The command is:
gst-launch-1.0 filesrc location=/home/pi/Videos/watch.mp4 ! qtdemux name=demux \
demux.audio_0 ! queue ! decodebin ! audioconvert ! audioresample ! autoaudiosink \
demux.video_0 ! queue ! decodebin ! videoconvert ! videoscale ! video/x-raw,width=800,height=480 ! avenc_bmp ! fakesink
It's a little difficult for me to link them together because both 'qtdemux' and 'decodebin' are used in this example.
I have tried with 'tee', but it's obviously slow.
Could anyone give me some help?Thank you.
#include <stdio.h>
#include <gstreamer-1.0/gst/gst.h>
#include <stdbool.h>
typedef struct {
GstElement *pipeline;
GstElement *filesrc;
GstElement *qtdemux;
/* Video */
struct{
GstElement *queue;
GstElement *decode;
GstElement *convert;
GstElement *scale;
GstElement *capsfilter;
GstElement *enc_bmp;
GstElement *fakesink;
}video;
/* Audio */
struct{
GstElement *queue;
GstElement *decode;
GstElement *convert;
GstElement *resample;
GstElement *sink;
}audio;
} gstreamer_t;
static void pad_added_handler (GstElement *src, GstPad *pad, gstreamer_t* data)
{
GstCaps *caps;
GstStructure *pad_sct;
const gchar *name;
caps = gst_pad_get_current_caps(pad);
pad_sct = gst_caps_get_structure( caps, 0 );
name = gst_structure_get_name(pad_sct);
printf( "src name = %s\r\n", gst_element_get_name(src) );
printf( "pad name = %s\r\n", name);
printf( "Received new pad '%s' from '%s'.\r\n", GST_PAD_NAME (pad), GST_ELEMENT_NAME (src));
GstPad *sinkpad = NULL;
if(g_str_has_prefix (name, "video/x-h264")) {
sinkpad = gst_element_get_static_pad(data->video.queue, "sink");
if(!gst_pad_link(pad, sinkpad) != GST_PAD_LINK_OK )
printf("not link !!\n\n");
gst_object_unref (sinkpad);
}else if(g_str_has_prefix( name, "audio/mpeg")){
sinkpad = gst_element_get_static_pad ( data->audio.queue, "sink");
if(gst_pad_link( pad, sinkpad) != GST_PAD_LINK_OK )
printf("not link !!\n\n");
gst_object_unref (sinkpad);
}
else
printf("Another Pad: %s.\r\n", name);
gst_caps_unref (caps);
}
int main(int argc, char *argv[]) {
gstreamer_t gstreamer;
GstBus *bus;
GstMessage *msg;
GstStateChangeReturn ret;
gboolean terminate = FALSE;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
gstreamer.filesrc = gst_element_factory_make ("filesrc", "filesrc");
g_object_set (gstreamer.filesrc, "location", "../../Videos/watch.mp4", NULL);
gstreamer.qtdemux = gst_element_factory_make ("qtdemux", "qtdemux");
/* Video */
gstreamer.video.queue = gst_element_factory_make("queue", "video");
gstreamer.video.decode = gst_element_factory_make ("decodebin", "decodebin");
gstreamer.video.convert = gst_element_factory_make ("videoconvert", "videoconvert");
gstreamer.video.scale = gst_element_factory_make ("videoscale", "videoscale");
gstreamer.video.capsfilter = gst_element_factory_make ("capsfilter", "capsfilter");
GstCaps *Caps = gst_caps_from_string("video/x-raw,width=800,height=480");
g_object_set(G_OBJECT(gstreamer.video.capsfilter), "caps", Caps, NULL);
gst_caps_unref(Caps);
gstreamer.video.enc_bmp = gst_element_factory_make ("avenc_bmp", "avenc_bmp");
gstreamer.video.fakesink = gst_element_factory_make ("fakesink", "fakesink");
g_object_set (gstreamer.video.fakesink, "sync", true, NULL);
/* Audio */
gstreamer.audio.queue = gst_element_factory_make("queue", "queue_audio");
gstreamer.audio.decode = gst_element_factory_make("decodebin", "decodebin");
gstreamer.audio.convert = gst_element_factory_make("audioconvert", "audioconvert");
gstreamer.audio.resample = gst_element_factory_make("audioresample", "audioresample");
gstreamer.audio.sink = gst_element_factory_make("autoaudiosink", "autoaudiosink");
/* Create the empty pipeline */
gstreamer.pipeline = gst_pipeline_new ("gstreamer-pipeline");
if (!gstreamer.pipeline || !gstreamer.filesrc || !gstreamer.qtdemux ||
!gstreamer.video.queue || !gstreamer.video.decode || !gstreamer.video.convert || !gstreamer.video.scale || !gstreamer.video.capsfilter || !gstreamer.video.enc_bmp || !gstreamer.video.fakesink||
!gstreamer.audio.queue || !gstreamer.audio.decode || !gstreamer.audio.convert || !gstreamer.audio.resample ||!gstreamer.audio.sink)
{
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Build the pipeline. Note that we are NOT linking the source at this
* point. We will do it later. */
gst_bin_add_many (GST_BIN (gstreamer.pipeline), gstreamer.filesrc, gstreamer.qtdemux,
gstreamer.video.queue, gstreamer.video.decode, gstreamer.video.convert, gstreamer.video.scale, gstreamer.video.capsfilter, gstreamer.video.enc_bmp, gstreamer.video.fakesink,
gstreamer.audio.queue, gstreamer.audio.decode, gstreamer.audio.convert, gstreamer.audio.resample, gstreamer.audio.sink, NULL);
if (!gst_element_link (gstreamer.filesrc, gstreamer.qtdemux)) {
g_printerr ("Elements filesrc and qtdemux could not be linked.\n");
gst_object_unref (gstreamer.pipeline);
return -1;
}
if (!gst_element_link_many (gstreamer.video.convert, gstreamer.video.scale, gstreamer.video.capsfilter, gstreamer.video.enc_bmp, gstreamer.video.fakesink, NULL)) {
g_printerr ("Video elements could not be linked.\n");
gst_object_unref (gstreamer.pipeline);
return -1;
}
if (!gst_element_link_many (gstreamer.audio.convert, gstreamer.audio.resample, gstreamer.audio.sink, NULL)) {
g_printerr ("Audio elements could not be linked.\n");
gst_object_unref (gstreamer.pipeline);
return -1;
}
g_signal_connect (gstreamer.qtdemux, "pad-added", G_CALLBACK (pad_added_handler), &gstreamer);
/* Start playing */
ret = gst_element_set_state (gstreamer.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (gstreamer.pipeline);
return -1;
}
/* Listen to the bus */
bus = gst_element_get_bus (gstreamer.pipeline);
do {
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error (&err);
g_free (debug_info);
terminate = TRUE;
break;
case GST_MESSAGE_EOS:
g_print ("\nEnd-Of-Stream reached.\n");
terminate = TRUE;
break;
case GST_MESSAGE_STATE_CHANGED:
/* We are only interested in state-changed messages from the pipeline */
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (gstreamer.pipeline)) {
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);
g_print ("Pipeline state changed from %s to %s:\n",
gst_element_state_get_name (old_state), gst_element_state_get_name (new_state));
}
break;
break;
default:
/* We should not reach here */
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
} while (!terminate);
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (gstreamer.pipeline, GST_STATE_NULL);
gst_object_unref (gstreamer.pipeline);
return 0;
}
The output is:
(stream:5337): GStreamer-WARNING **: 02:28:38.279: Name 'decodebin' is not unique in bin 'gstreamer-pipeline', not adding Pipeline state changed from NULL to READY: src name = qtdemux pad name = video/x-h264 Received new pad 'video_0' from 'qtdemux'. not link !!
src name = qtdemux pad name = audio/mpeg Received new pad 'audio_0' from 'qtdemux'. Error received from element qtdemux: Internal data stream error. Debugging information: ../gst/isomp4/qtdemux.c(6545): gst_qtdemux_loop (): /GstPipeline:gstreamer-pipeline/GstQTDemux:qtdemux: streaming stopped, reason not-linked (-1)
I dont't know how to link the video and audio.
The problem you're facing is with respect to decodebin. One single pipeline cannot contain two elements that have the same name.
In your case,
gstreamer.video.decode = gst_element_factory_make ("decodebin", "decodebin");
and
gstreamer.audio.decode = gst_element_factory_make("decodebin", "decodebin");
Have the same names set to decodebin
Thats why one of the decodebins are not even being added to the pipeline.
You can change it to something else. For example
gstreamer.audio.decode = gst_element_factory_make("decodebin", "decodebin-audio");
and
gstreamer.video.decode = gst_element_factory_make("decodebin", "decodebin-video");
This rule applies to any parent element that is directly added to your pipeline.