Search code examples
c++gstreamerrtspdecodingmjpeg

GStreamer: wrong frame colors on output of rtpjpegdepay/jpegdec


MJPEG RTSP stream server has been run using VLC. Then I have run one more VLC to determine that frames from stream have correct colors. All colors were correct.

My task is to catch MJPEG frames on output of rtpjpegdepay plugin without any decompression. I have create C++ application with the following pipeline:

rtspsrc -> rtpjpegdepay -> appsink 

Source URL was set using the line below:

g_object_set(G_OBJECT(m_source), "location", url.c_str(), NULL);

Each jpeg frame was catched with method below:

g_signal_connect(m_sink, "new-sample", G_CALLBACK(captureGstBuffer), this);

But the received image was very odd (It is the road with a dividing strip, only colors are incorrect):

Jpeg with wrong colors

Next I have tryed to use another pipeline:

rtspsrc -> rtpjpegdepay -> jpegdec -> appsink

Using the same "new-sample" method I received YUV420 image with the same wrong colors like with rtpjpegdepay only.

I have tryed different versions of Gstreamer from 1.8.3 to 1.14.2

What can be the reason of that behaviour?

Here is the code (YUV420):

static void OnPadAdded(GstElement *element, GstPad *pad, void *data)
{
    GstElement *rtpjpeg = GST_ELEMENT(data);
    GstPad *sinkpad;

    sinkpad = gst_element_get_static_pad(rtpjpeg, "sink");
    gst_pad_link(pad, sinkpad);
    gst_object_unref(sinkpad);
}

static int fileind = 0;

void WriteToFile(BYTE *pBuffer, DWORD dwBufSize)
{
    fileind++;

    std::stringstream ssFileName;
    ssFileName << "D:\\Temp\\file" << fileind << ".yuv";
    FILE* fp = fopen(ssFileName.str().c_str(), "wb+");
    fwrite(pBuffer, dwBufSize, 1, fp);
    fclose(fp);
}

static GstFlowReturn CaptureGstBuffer(GstElement *sink, void *data) {
    GstSample *sample;

    g_signal_emit_by_name(sink, "pull-sample", &sample);
    if (sample) {

        GstBuffer *buffer = gst_sample_get_buffer(sample);

        GstMapInfo map;
        gst_buffer_map(buffer, &map, GST_MAP_READ);

        WriteToFile((BYTE *)map.data, map.size);

        gst_buffer_unmap(buffer, &map);
        gst_sample_unref(sample);
    }

    return GST_FLOW_OK;
}

long RTSPClientDevice::StartClient()
{
   m_loop = g_main_loop_new(NULL, FALSE);

   m_pipeline = gst_pipeline_new("mjpeg-catcher");
   g_assert(m_pipeline);
   m_source = gst_element_factory_make("rtspsrc", "Source");
   g_assert(m_source);
   m_depay = gst_element_factory_make("rtpjpegdepay", "Depay");
   g_assert(m_depay);
   m_decoder = gst_element_factory_make("jpegdec", "Decoder");
   g_assert(m_decoder);
   m_sink = gst_element_factory_make("appsink", "Output");
   g_assert(m_sink);

   if (!m_pipeline || !m_source || !m_depay || !m_decoder || !m_sink) {
        return Z_ERR;
   }

   std::string url = "";
   GetClientURL(url);

   g_object_set(G_OBJECT(m_source), "location", url.c_str(), NULL);
   g_object_set(G_OBJECT (m_source), "do-rtcp", 1, NULL);
   g_object_set(G_OBJECT(m_source), "latency", 0, NULL);
   g_object_set(G_OBJECT(m_source), "probation", 1, NULL);

   m_bus = gst_pipeline_get_bus(GST_PIPELINE(m_pipeline));
   gst_bus_add_watch(m_bus, bus_call, m_loop);
   gst_object_unref(m_bus);

   gst_bin_add_many(GST_BIN(m_pipeline), m_source, m_depay, m_decoder , 
m_sink, NULL);

   if (!gst_element_link(m_source, m_depay)) {
      return Z_ERR;
   }

   if (!gst_element_link(m_depay, m_decoder)) {
      return Z_ERR;
   }

   if (!gst_element_link(m_decoder, m_sink)) {
      return Z_ERR;
   }

   if (!g_signal_connect(m_source, "pad-added", G_CALLBACK(OnPadAdded), 
   m_capsfilter))
   {
      return Z_ERR;
   }

   g_object_set(G_OBJECT(m_sink), "emit-signals", TRUE, "sync", FALSE, NULL);
   g_object_set(G_OBJECT(m_sink), "max-buffers", (guint)1, NULL);
   g_object_set(G_OBJECT(m_sink), "drop", (guint)1, NULL);
   g_object_set(G_OBJECT(m_sink), "sync", (guint)0, NULL);
   g_object_set(G_OBJECT(m_sink), "max_lateness", G_GINT64_CONSTANT(-1), NULL);
   g_object_set(G_OBJECT(m_sink), "qos", (guint)1, NULL);


   /*GstCaps *caps = gst_caps_from_string("video/x-raw,encoding-name=RGB,format=(fourcc)YUV444,width=1280,height=720");
   g_object_set(m_videoconvert, "caps", caps, NULL);
   gst_caps_unref(caps);*/

   if (g_signal_connect(m_sink, "new-sample", G_CALLBACK(CaptureGstBuffer), this) <= 0)
   {
      return Z_ERR;
   }

   gst_element_set_state(m_pipeline, GST_STATE_PLAYING);

   ControlThreadStart(); //Place for g_main_loop_run

   m_isStarted = true;

   return Z_OK;
 }

 long RTSPClientDevice::StopClient()
 {
   if(!m_isStarted)
   {
     return Z_OK;
   }

   if (g_main_loop_is_running(m_loop))
   {
       g_main_loop_quit(m_loop);
       g_main_context_wakeup(g_main_loop_get_context(m_loop));
   }

   gst_element_set_state(m_pipeline, GST_STATE_NULL);
   gst_object_unref(GST_OBJECT(m_pipeline));
   //TODO: unref plugins

   g_main_loop_unref(m_loop);

   m_isStarted = false;

   return Z_OK;
 }

EDITED:

I have tryed to use the following:

gst-launch-1.0.exe -v rtspsrc location = rtsp://127.0.0.1:554/Streaming/Channels/101 ! rtpjpegdepay ! jpegparse ! multifilesink post-messages=true location="frame%d.jpg"

Result the same - wrong colors.

Command:

gst-launch-1.0.exe -v rtspsrc location = rtsp://127.0.0.1:554/Streaming/Channels/101 ! rtpjpegdepay ! multifilesink post-messages=true location="frame%d.jpg"

also produces the same frames

I have captured also logs from gst-launch-1.0 (do not see errors - only INFO and DEBUG) and wireshark log (Do not see any problems too). I will try to analyze it deeper. That avi was captured using VLC from Hikvision camera.


Solution

  • I have tried to use VLC MJPEG server for debugging purposes. It was the problem. My client based on gstreamer rtpjpegdepay plugin is working without any problem when it has directly connection to Hikvision MJPEG stream. So presumably it is VLC bug or breach of standards on any side.