Hi good people,
I'm new to gstreamer and I have a problem of using decodebin as a decoder to play video file. Below is my sample code that I use. The code below runs fine for one stream of media. But if I want to play both streams(audio and video), it doesn't work. Can anyone tell me what I did wrong here? (Please don't suggest to use playbin) :) Thank you. Fariq. ####Sample code#### #include <gst/gst.h> #include <glib-2.0/glib/gprintf.h> #include <gstreamer-0.10/gst/gstelement.h> #include <glib-2.0/glib/gmessages.h> GstElement *pipeline, *audio,*video,*audioqueue,*videoqueue; static void cb_newpad (GstElement *decodebin, GstPad *pad, gboolean last, gpointer data) { GstPad *audiopad,*videopad; /* link audiopad */ audiopad = gst_element_get_static_pad (audio, "sink"); if (!GST_PAD_IS_LINKED (audiopad)) { gst_pad_link (pad, audiopad); g_print("audiopad has been linked"); g_object_unref (audiopad); return; } /* link videopad */ videopad = gst_element_get_static_pad(video,"sink"); if (!GST_PAD_IS_LINKED (videopad)) { gst_pad_link(pad,videopad); gst_element_link(videoqueue,video); g_object_unref (videopad); return; } } gint main (gint argc, gchar *argv[]) { GMainLoop *loop; GstElement *src, *dec, *conv, *sink, *typefind,*typefind2;//*myplugin; GstElement *convVid,*videosink; GstPad *audiopad,*videopad; GstBus *bus; /* init GStreamer */ gst_init (&argc, &argv); loop = g_main_loop_new (NULL, FALSE); /* make sure we have input */ if (argc != 2) { g_print ("Usage: %s <filename>\n", argv[0]); return -1; } g_print("Playing %s \n",argv[1]); /* setup */ pipeline = gst_pipeline_new ("pipeline"); bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); gst_bus_add_watch (bus, my_bus_callback, loop); gst_object_unref (bus); src = gst_element_factory_make ("filesrc", "source"); g_object_set (G_OBJECT (src), "location", argv[1], NULL); typefind = gst_element_factory_make("typefind","typefinder"); g_signal_connect(typefind,"have-type",G_CALLBACK(typefound_cb),loop); dec = gst_element_factory_make ("decodebin", "decoder"); g_signal_connect (dec, "new-decoded-pad", G_CALLBACK (cb_newpad), NULL); //myplugin = gst_element_factory_make("myplugin","MyPlugin"); audioqueue = gst_element_factory_make("queue","audioqueue"); videoqueue = gst_element_factory_make("queue","videoqueue"); gst_bin_add_many (GST_BIN (pipeline), src,typefind,dec,NULL); gst_element_link_many (src,typefind,dec,NULL); /* create audio output */ audio = gst_bin_new ("audiobin"); conv = gst_element_factory_make ("audioconvert", "aconv"); typefind2 = gst_element_factory_make("typefind","typefinder2"); //g_signal_connect(typefind2,"have-type",G_CALLBACK(typefound_cb),loop); audiopad = gst_element_get_static_pad (audioqueue, "sink"); sink = gst_element_factory_make ("osssink", "sink"); gst_bin_add_many (GST_BIN (audio),audioqueue,conv,typefind2, sink, NULL); gst_element_link_many (audioqueue,conv,typefind2, sink,NULL); gst_element_add_pad (audio, gst_ghost_pad_new ("sink", audiopad)); gst_object_unref (audiopad); gst_bin_add (GST_BIN (pipeline), audio); /* create video output */ video = gst_bin_new("videobin"); convVid = gst_element_factory_make("ffmpegcolorspace","converter"); videopad = gst_element_get_static_pad(videoqueue,"sink"); videosink = gst_element_factory_make("xvimagesink","videosink"); gst_bin_add_many(GST_BIN(video),videoqueue,convVid,videosink,NULL); gst_element_link_many(videoqueue,convVid,videosink,NULL); gst_element_add_pad(video,gst_ghost_pad_new("sink",videopad)); gst_object_unref(videopad); gst_bin_add(GST_BIN(pipeline),video); /* run */ gst_element_set_state (pipeline, GST_STATE_PLAYING); g_timeout_add (200, (GSourceFunc) cb_print_position, pipeline); g_main_loop_run (loop); /* cleanup */ gst_element_set_state (pipeline, GST_STATE_NULL); gst_object_unref (GST_OBJECT (pipeline)); return 0; } ------------------------------------------------------------------------------ _______________________________________________ gstreamer-devel mailing list [hidden email] https://lists.sourceforge.net/lists/listinfo/gstreamer-devel |
Fariq Izwan wrote:
> Hi good people, > > I'm new to gstreamer and I have a problem of using decodebin as a > decoder to play video file. Below is my sample code that I use. The > code below runs fine for one stream of media. But if I want to play > both streams(audio and video), it doesn't work. > Can anyone tell me what I did wrong here? (Please don't suggest to use > playbin) :) > > Thank you. > > Fariq. > > ####Sample code#### > > #include <gst/gst.h> > #include <glib-2.0/glib/gprintf.h> > #include <gstreamer-0.10/gst/gstelement.h> > #include <glib-2.0/glib/gmessages.h> > > GstElement *pipeline, *audio,*video,*audioqueue,*videoqueue; > > static void > cb_newpad (GstElement *decodebin, > GstPad *pad, > gboolean last, > gpointer data) > { > GstPad *audiopad,*videopad; > > /* link audiopad */ > audiopad = gst_element_get_static_pad (audio, "sink"); > if (!GST_PAD_IS_LINKED (audiopad)) { > gst_pad_link (pad, audiopad); > g_print("audiopad has been linked"); > g_object_unref (audiopad); > return; > } on the provided pad. (Get the caps from the pad, the structure from the caps and check the name of the structure (audio/video/something-else). Depending on that you link the audio- or videobin. Also in the above code, you leak the audiopad, if it is linked already. The above code might work, if you check the result of gst_pad_link() and only return if that worked, otherwise you continie and try the next pad. Stefan > /* link videopad */ > videopad = gst_element_get_static_pad(video,"sink"); > if (!GST_PAD_IS_LINKED (videopad)) { > gst_pad_link(pad,videopad); > gst_element_link(videoqueue,video); > g_object_unref (videopad); > return; > } > } > > gint > main (gint argc, > gchar *argv[]) > { > GMainLoop *loop; > GstElement *src, *dec, *conv, *sink, *typefind,*typefind2;//*myplugin; > GstElement *convVid,*videosink; > GstPad *audiopad,*videopad; > GstBus *bus; > > /* init GStreamer */ > gst_init (&argc, &argv); > loop = g_main_loop_new (NULL, FALSE); > > /* make sure we have input */ > if (argc != 2) { > g_print ("Usage: %s <filename>\n", argv[0]); > return -1; > } > > g_print("Playing %s \n",argv[1]); > > /* setup */ > pipeline = gst_pipeline_new ("pipeline"); > > bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); > gst_bus_add_watch (bus, my_bus_callback, loop); > gst_object_unref (bus); > > src = gst_element_factory_make ("filesrc", "source"); > g_object_set (G_OBJECT (src), "location", argv[1], NULL); > typefind = gst_element_factory_make("typefind","typefinder"); > g_signal_connect(typefind,"have-type",G_CALLBACK(typefound_cb),loop); > dec = gst_element_factory_make ("decodebin", "decoder"); > g_signal_connect (dec, "new-decoded-pad", G_CALLBACK (cb_newpad), NULL); > //myplugin = gst_element_factory_make("myplugin","MyPlugin"); > audioqueue = gst_element_factory_make("queue","audioqueue"); > videoqueue = gst_element_factory_make("queue","videoqueue"); > gst_bin_add_many (GST_BIN (pipeline), src,typefind,dec,NULL); > gst_element_link_many (src,typefind,dec,NULL); > > /* create audio output */ > audio = gst_bin_new ("audiobin"); > conv = gst_element_factory_make ("audioconvert", "aconv"); > typefind2 = gst_element_factory_make("typefind","typefinder2"); > //g_signal_connect(typefind2,"have-type",G_CALLBACK(typefound_cb),loop); > audiopad = gst_element_get_static_pad (audioqueue, "sink"); > sink = gst_element_factory_make ("osssink", "sink"); > gst_bin_add_many (GST_BIN (audio),audioqueue,conv,typefind2, sink, > NULL); > gst_element_link_many (audioqueue,conv,typefind2, sink,NULL); > gst_element_add_pad (audio, > gst_ghost_pad_new ("sink", audiopad)); > gst_object_unref (audiopad); > gst_bin_add (GST_BIN (pipeline), audio); > > /* create video output */ > video = gst_bin_new("videobin"); > convVid = gst_element_factory_make("ffmpegcolorspace","converter"); > videopad = gst_element_get_static_pad(videoqueue,"sink"); > videosink = gst_element_factory_make("xvimagesink","videosink"); > gst_bin_add_many(GST_BIN(video),videoqueue,convVid,videosink,NULL); > gst_element_link_many(videoqueue,convVid,videosink,NULL); > gst_element_add_pad(video,gst_ghost_pad_new("sink",videopad)); > gst_object_unref(videopad); > gst_bin_add(GST_BIN(pipeline),video); > > /* run */ > gst_element_set_state (pipeline, GST_STATE_PLAYING); > g_timeout_add (200, (GSourceFunc) cb_print_position, pipeline); > g_main_loop_run (loop); > > /* cleanup */ > gst_element_set_state (pipeline, GST_STATE_NULL); > gst_object_unref (GST_OBJECT (pipeline)); > > return 0; > } > ------------------------------------------------------------------------ > > ------------------------------------------------------------------------------ > > ------------------------------------------------------------------------ > > _______________________________________________ > gstreamer-devel mailing list > [hidden email] > https://lists.sourceforge.net/lists/listinfo/gstreamer-devel > ------------------------------------------------------------------------------ _______________________________________________ gstreamer-devel mailing list [hidden email] https://lists.sourceforge.net/lists/listinfo/gstreamer-devel |
In reply to this post by Fariq Izwan
Thank you very much. It works now. :)
Here is what I did: static void cb_newpad (GstElement *decodebin, GstPad *pad, gboolean last, gpointer data) { GstCaps *caps; GstStructure *str; GstPad *audiopad,*videopad; gchar *tex; /* check media type */ caps = gst_pad_get_caps (pad); str = gst_caps_get_structure (caps, 0); tex = gst_structure_get_name(str); if(g_strrstr(tex,"audio")) { audiopad = gst_element_get_static_pad (audio, "sink"); if(GST_PAD_IS_LINKED(audiopad)) { g_object_unref(audiopad); return; } else { gst_pad_link(pad,audiopad); g_object_unref(audiopad); } return; } Fariq.
------------------------------------------------------------------------------ _______________________________________________ gstreamer-devel mailing list [hidden email] https://lists.sourceforge.net/lists/listinfo/gstreamer-devel |
Free forum by Nabble | Edit this page |