Search code examples
c++cqtffmpegh.264

Issue in opening the 8MP H264 Stream from tcp server


I am facing issue in opening the raw h264 stream of 8MP resolution over tcp server from Android in Qt Application. To open the stream in ffplay, I give following command in terminal and it is able to play it

ffplay -f h264 -codec:v h264 -probesize 32M <tcp://ipaddress:port>

But when I try to open the stream in Qt Application, avformat_open_input() gives error Invalid data found while processing input. Below is the code I am using in Qt Application:

 av_register_all();
 avcodec_register_all();
 avformat_network_init();
 AVFormatContext *refrenceFormatCtx = NULL;
 SwsContext *img_convert_ctx;
 AVIOContext *avio_ctx = NULL;
 int video_stream_index = 0;
 AVCodecContext* codec_ctx = NULL;
 AVSampleFormat *fmt = NULL;
 char errorsdef[100];
 AVDictionary *options = NULL;
 av_dict_set(&options, "video_size","3264x2448",0);
 av_dict_set(&options,"pixel_format","yuv420p",0);
 av_dict_set(&options, "f", "h264", 0);
 av_dict_set(&options, "codec:v", "h264", 0);
 av_dict_set(&options, "codec:a", "aac", 0);
 av_dict_set(&options, "probesize", "32M", 0);

  int err = avformat_open_input(&refrenceFormatCtx,"tcp://192.168.42.129:2226", NULL, &options);
  av_strerror(err,errorsdef,100);
  qDebug() << "OPening Stream error: "<< err << " "<< errorsdef;
  if(err<0)
      abort();
  av_dict_free(&options);

Is the path to set the options in avformat_open_input is correct? Are parameters set by me are correct?


Solution

  • I got the answer for my above asked question. Code for the issue and getting rgb frames from raw H.264 Frame for 8MP resolution is as follows:

    avcodec_register_all();
    av_register_all();
    avformat_network_init();
    
    AVDictionary *options = NULL;
    AVFormatContext *refrenceFormatCtx = NULL;
    AVInputFormat *fmts = av_find_input_format("h264");
    char errorsdef[100];
    AVCodecContext* codec_ctx = NULL;
    int video_stream_index = 0;    
    SwsContext *img_convert_ctx = NULL;
    AVFrame* picture_yuv = NULL;
    AVFrame* picture_rgb = NULL;
    uint8_t* picture_buffer_rgb;
    uint8_t *rgb_image_data;
    int sizeofrgbpicture = 0;
    int initialize_rgb_requirements=1;
    picture_yuv = av_frame_alloc();
    
    av_dict_set(&options, "flags", "bicubic", 0);
    av_opt_set(refrenceFormatCtx,"f","h264", AV_OPT_SEARCH_CHILDREN);
    av_opt_set(refrenceFormatCtx,"codec:v","h264",AV_OPT_SEARCH_CHILDREN);
    av_opt_set(refrenceFormatCtx,"probesize","32M", AV_OPT_SEARCH_CHILDREN);
    
    // Open video file
    int err = avformat_open_input(&refrenceFormatCtx,"tcp://192.168.42.129:2226", fmts, &options);
    if (!options) {
        int dict_count = av_dict_count(options);
        qDebug() << "dict_count " << dict_count;
    }
    av_strerror(err,errorsdef,100);
    qDebug() << "OPening Stream error: "<< err << " "<< errorsdef;
    if (refrenceFormatCtx!=NULL){
        err = avformat_find_stream_info(refrenceFormatCtx, &options);
        if( err< 0){
            av_strerror(err,errorsdef,100);
            qDebug() << "Not able to find stream: "<< err << " "<< errorsdef;
        }
    }else{
        qDebug() << "referencecontext null";
        exit(1);
    }
    //search video stream
    for (int i = 0; i < (int)refrenceFormatCtx->nb_streams; i++) {
        AVStream* s = refrenceFormatCtx->streams[i];
        if (s->codec == NULL){
            continue;
        }
        codec_ctx = (s->codec);
        if (codec_ctx->codec_type == AVMEDIA_TYPE_VIDEO){
            video_stream_index = i;
        }
    }
    AVPacket packet;
    av_init_packet(&packet);
    
    //open output file
    AVFormatContext* output_ctx = avformat_alloc_context();
    AVStream* stream = NULL;
    
    //start reading packets from stream and emit data pointer to slot
    av_read_play(refrenceFormatCtx);    //play RTSP
    avcodec_copy_context(codec_ctx, refrenceFormatCtx->streams[video_stream_index]->codec);
    
    if (avcodec_open2(codec_ctx, avcodec_find_decoder(AV_CODEC_ID_H264), NULL) < 0){
        qDebug() << "avcodec_open2 null";
    }
    while (av_read_frame(refrenceFormatCtx, &packet) >= 0) {
        if (packet.stream_index == video_stream_index) {    //packet is video
            if (stream == NULL) {    //create stream in file
                stream = avformat_new_stream(output_ctx, refrenceFormatCtx->streams[video_stream_index]->codec->codec);
                avcodec_copy_context(stream->codec, refrenceFormatCtx->streams[video_stream_index]->codec);
                stream->sample_aspect_ratio = refrenceFormatCtx->streams[video_stream_index]->codec->sample_aspect_ratio;
            }
            int check = 0;
            packet.stream_index = stream->id;
            int result = avcodec_decode_video2(codec_ctx, picture_yuv, &check, &packet);
            av_free_packet(&packet);
            av_packet_unref(&packet);
            if(result <= 0 || check == 0){
                continue;
            }
            if(initialize_rgb_requirements)
            {
                sizeofrgbpicture = avpicture_get_size(AV_PIX_FMT_RGB24, codec_ctx->width, codec_ctx->height);
                picture_rgb = av_frame_alloc();
                picture_buffer_rgb = (uint8_t*) (av_malloc(sizeofrgbpicture));
                avpicture_fill((AVPicture *) picture_rgb, picture_buffer_rgb, AV_PIX_FMT_RGB24, codec_ctx->width, codec_ctx->height);
                img_convert_ctx = sws_getContext(codec_ctx->width, codec_ctx->height, AV_PIX_FMT_YUV420P, codec_ctx->width, codec_ctx->height, AV_PIX_FMT_RGB24, SWS_BICUBIC, NULL, NULL, NULL);
                initialize_rgb_requirements=0;
            }
            int height = 0;
            if(picture_yuv->data != NULL)
            {
                height = sws_scale(img_convert_ctx, ((AVPicture*)picture_yuv)->data, ((AVPicture*)picture_yuv)->linesize, 0, codec_ctx->height, ((AVPicture*)picture_rgb)->data,((AVPicture*)picture_rgb)->linesize);
            }
            rgb_image_data = (uint8_t *)malloc(sizeofrgbpicture * sizeof(uint8_t));
            int ret = avpicture_layout((AVPicture *)picture_rgb, AV_PIX_FMT_RGB24, codec_ctx->width, codec_ctx->height, rgb_image_data, sizeofrgbpicture);
            emit imageQueued(rgb_image_data, codec_ctx->width,codec_ctx->height);
        }
        msleep(1);
    }
    av_freep(picture_buffer_rgb);
    av_frame_free(&picture_rgb);
    avio_close(output_ctx->pb);
    avformat_free_context(output_ctx);
    avformat_close_input(&refrenceFormatCtx);
    

    I came to know that for raw H.264 stream we have to tell ffmpeg that the format is h264. For that I have used AVInputFormat, to set other options like video codec and probesize, I have used av_op_set(). To set the default flags in ffmpeg, I have used av_dict_set(). I have emitted the data pointer to my required slot. If any one wants to create a file from it, then it can generate .ppm file by writing this pointer into file.