translating command to code for playing an avi

classic Classic list List threaded Threaded
7 messages Options
Reply | Threaded
Open this post in threaded view
|

translating command to code for playing an avi

rossana
Hi, I try to play an avi, I know there's playbin2 option, but I want to gain more control and understand some issues as well.

I can play an avi from console with this command.
gst-launch filesrc location=/home/rossana/video11.avi ! avidemux name=demux  demux.audio_00 ! decodebin ! queue ! audioconvert ! audioresample ! autoaudiosink   demux.video_00 ! decodebin ! queue ! ffmpegcolorspace ! videoscale ! autovideosink"

I tempted to translate into C/C++ code, but I got errors "caps incompatible"

Thanks

Rossana


void on_pad_added (GstElement *element, GstPad *pad, GstElement *data)
{
        g_debug ("Signal: pad-added");
        GstCaps *caps;
        GstStructure *str;

        caps = gst_pad_get_caps (pad);
        g_assert (caps != NULL);
        str = gst_caps_get_structure (caps, 0);
        g_assert (str != NULL);

        cout << "enlazando enlazara pads" << endl << endl;
        if (g_strrstr (gst_structure_get_name (str), "video")) {
                g_debug ("Linking video pad to dec_vd");
                // Link it actually
                //GstPad *targetsink = gst_element_get_pad (decvd, "sink");
                GstPad *targetsink = gst_element_get_pad (data, "sink");
                cout << "enlazando cola de video" << endl << endl;
                g_assert (targetsink != NULL);
                gst_pad_link (pad, targetsink);
                gst_object_unref (targetsink);
        }

        if (g_strrstr (gst_structure_get_name (str), "audio")) {
                g_debug ("Linking audio pad to dec_ad");
                // Link it actually
                //GstPad *targetsink = gst_element_get_pad (decad, "sink");
                GstPad *targetsink = gst_element_get_pad (data, "sink");
                cout << "enlazando cola de audio" << endl << endl;
                g_assert (targetsink != NULL);
                gst_pad_link (pad, targetsink);
                gst_object_unref (targetsink);
        }

        gst_caps_unref (caps);
}



int main (int   argc, char *argv[])
{
  GMainLoop *loop;

  GstElement *pipeline;
  GstBus *bus;

  /* Initialisation */
  gst_init (&argc, &argv);

  loop = g_main_loop_new (NULL, FALSE);


  /* Check input arguments */
  if (argc != 1) {
    g_printerr ("Usage: %s <AVI filename>\n", argv[0]);
    return -1;
  }

  /* Create gstreamer elements */
  pipeline = gst_pipeline_new ("media-player");
  source = gst_element_factory_make ("filesrc", "file-source");
  demuxer = gst_element_factory_make ("avidemux", "avi-demuxer");
  decvd = gst_element_factory_make ("decodebin2", "decvd");
  decad = gst_element_factory_make ("decodebin2", "decad");
  aconvert = gst_element_factory_make ("audioconvert", "aconvert");
  asample = gst_element_factory_make ("audioresample", "asample");
  vdsink = gst_element_factory_make ("autovideosink", "video-sink");
  vdqueue = gst_element_factory_make ("queue", "video-queue");
  adqueue = gst_element_factory_make ("queue", "audio-queue");
  adsink = gst_element_factory_make ("autoaudiosink", "audio-sink");
  color = gst_element_factory_make("ffmpegcolorspace","color");
  vscale = gst_element_factory_make("videoscale","vscale");

  if (!pipeline || !source || !demuxer || !decvd || !decad || !vdsink || !vdqueue || !adqueue || !adsink) {
    g_printerr ("One element could not be created. Exiting.\n");
    return -1;
  }

  /* Set up the pipeline */

  /* we set the input filename to the source element */
   g_object_set (G_OBJECT (source), "location", "/home/videos/video11.avi", NULL);

  /* we add a message handler */
  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  gst_bus_add_watch (bus, bus_call, loop);
  gst_object_unref (bus);

  /* we add all elements into the pipeline */
  /* file-source | ogg-demuxer | vorbis-decoder | converter | alsa-output */
  //GstElement *clk = gst_element_factory_make("clockoverlay","clk");


  gst_bin_add_many (GST_BIN (pipeline),source, demuxer, decvd, decad, aconvert, vscale,asample,color,adqueue, vdqueue, vdsink, adsink,  NULL);


  /* we link the elements together */
  /* file-source -> demuxer ~> {video-decoder -> video-output} ~> {audio-decoder -> alsa-output} */
  gst_element_link (source, demuxer);
 

  GstPad *targetsrc = gst_element_get_pad(demuxer, "video_%02");

  GstPad *targetsrc2 = gst_element_get_pad(demuxer, "audio_%02");


  GstPad *padV = gst_element_get_static_pad(decvd,"sink");
  GstPad *padA = gst_element_get_static_pad(decad,"sink");
 

  gst_pad_link (targetsrc,padV);
  gst_object_unref (targetsrc);


  g_signal_connect (decvd, "pad-added", G_CALLBACK (on_pad_added), vdqueue);
  cout << "decvd - vdqueue" << endl << endl;
  gst_element_link (vdqueue,color);
  gst_element_link (color,vscale);
  gst_element_link (vscale,vdsink);

  gst_pad_link (targetsrc2,padA);
  gst_object_unref (targetsrc2);
  g_signal_connect (decad, "pad-added", G_CALLBACK (on_pad_added), adqueue);
  cout << "decad - adqueue" << endl << endl;
  gst_element_link (adqueue, aconvert);
  gst_element_link (aconvert, asample);
  gst_element_link (asample, adsink);

   

  /* Set the pipeline to "playing" state*/
  g_print ("Playing: %s\n", argv[1]);
  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline), GST_DEBUG_GRAPH_SHOW_ALL,"playerdec1");


  g_main_loop_run (loop);


  /* Out of the main loop, clean up nicely */
  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (GST_OBJECT (pipeline));
  //g_print ("Deleting pipeline\n");


  return 0;
}






_______________________________________________
gstreamer-devel mailing list
[hidden email]
http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel
Reply | Threaded
Open this post in threaded view
|

Re: translating command to code for playing an avi

Victor Jaquez
On Tue, Jan 03, 2012 at 04:21:32AM -0200, Rossana Guerra wrote:

> Hi, I try to play an avi, I know there's playbin2 option, but I want to
> gain more control and understand some issues as well.
>
> I can play an avi from console with this command.
> gst-launch filesrc location=/home/rossana/video11.avi ! avidemux
> name=demux  demux.audio_00 ! decodebin ! queue ! audioconvert !
> audioresample ! autoaudiosink   demux.video_00 ! decodebin ! queue !
> ffmpegcolorspace ! videoscale ! autovideosink"
>
> I tempted to translate into C/C++ code, but I got errors "caps incompatible"

I guess you could use gst_parse_lunch

http://gstreamer.freedesktop.org/data/doc/gstreamer/head/gstreamer/html/gstreamer-GstParse.html#gst-parse-launch

vmjl

>
> Thanks
>
> Rossana
>
>
> void on_pad_added (GstElement *element, GstPad *pad, GstElement *data)
> {
>         g_debug ("Signal: pad-added");
>         GstCaps *caps;
>         GstStructure *str;
>
>         caps = gst_pad_get_caps (pad);
>         g_assert (caps != NULL);
>         str = gst_caps_get_structure (caps, 0);
>         g_assert (str != NULL);
>
>         cout << "enlazando enlazara pads" << endl << endl;
>         if (g_strrstr (gst_structure_get_name (str), "video")) {
>                 g_debug ("Linking video pad to dec_vd");
>                 // Link it actually
>                 //GstPad *targetsink = gst_element_get_pad (decvd, "sink");
>                 GstPad *targetsink = gst_element_get_pad (data, "sink");
>                 cout << "enlazando cola de video" << endl << endl;
>                 g_assert (targetsink != NULL);
>                 gst_pad_link (pad, targetsink);
>                 gst_object_unref (targetsink);
>         }
>
>         if (g_strrstr (gst_structure_get_name (str), "audio")) {
>                 g_debug ("Linking audio pad to dec_ad");
>                 // Link it actually
>                 //GstPad *targetsink = gst_element_get_pad (decad, "sink");
>                 GstPad *targetsink = gst_element_get_pad (data, "sink");
>                 cout << "enlazando cola de audio" << endl << endl;
>                 g_assert (targetsink != NULL);
>                 gst_pad_link (pad, targetsink);
>                 gst_object_unref (targetsink);
>         }
>
>         gst_caps_unref (caps);
> }
>
>
>
> int main (int   argc, char *argv[])
> {
>   GMainLoop *loop;
>
>   GstElement *pipeline;
>   GstBus *bus;
>
>   /* Initialisation */
>   gst_init (&argc, &argv);
>
>   loop = g_main_loop_new (NULL, FALSE);
>
>
>   /* Check input arguments */
>   if (argc != 1) {
>     g_printerr ("Usage: %s <AVI filename>\n", argv[0]);
>     return -1;
>   }
>
>   /* Create gstreamer elements */
>   pipeline = gst_pipeline_new ("media-player");
>   source = gst_element_factory_make ("filesrc", "file-source");
>   demuxer = gst_element_factory_make ("avidemux", "avi-demuxer");
>   decvd = gst_element_factory_make ("decodebin2", "decvd");
>   decad = gst_element_factory_make ("decodebin2", "decad");
>   aconvert = gst_element_factory_make ("audioconvert", "aconvert");
>   asample = gst_element_factory_make ("audioresample", "asample");
>   vdsink = gst_element_factory_make ("autovideosink", "video-sink");
>   vdqueue = gst_element_factory_make ("queue", "video-queue");
>   adqueue = gst_element_factory_make ("queue", "audio-queue");
>   adsink = gst_element_factory_make ("autoaudiosink", "audio-sink");
>   color = gst_element_factory_make("ffmpegcolorspace","color");
>   vscale = gst_element_factory_make("videoscale","vscale");
>
>   if (!pipeline || !source || !demuxer || !decvd || !decad || !vdsink ||
> !vdqueue || !adqueue || !adsink) {
>     g_printerr ("One element could not be created. Exiting.\n");
>     return -1;
>   }
>
>   /* Set up the pipeline */
>
>   /* we set the input filename to the source element */
>    g_object_set (G_OBJECT (source), "location", "/home/videos/video11.avi",
> NULL);
>
>   /* we add a message handler */
>   bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
>   gst_bus_add_watch (bus, bus_call, loop);
>   gst_object_unref (bus);
>
>   /* we add all elements into the pipeline */
>   /* file-source | ogg-demuxer | vorbis-decoder | converter | alsa-output */
>   //GstElement *clk = gst_element_factory_make("clockoverlay","clk");
>
>
>   gst_bin_add_many (GST_BIN (pipeline),source, demuxer, decvd, decad,
> aconvert, vscale,asample,color,adqueue, vdqueue, vdsink, adsink,  NULL);
>
>
>   /* we link the elements together */
>   /* file-source -> demuxer ~> {video-decoder -> video-output} ~>
> {audio-decoder -> alsa-output} */
>   gst_element_link (source, demuxer);
>
>
>   GstPad *targetsrc = gst_element_get_pad(demuxer, "video_%02");
>
>   GstPad *targetsrc2 = gst_element_get_pad(demuxer, "audio_%02");
>
>
>   GstPad *padV = gst_element_get_static_pad(decvd,"sink");
>   GstPad *padA = gst_element_get_static_pad(decad,"sink");
>
>
>   gst_pad_link (targetsrc,padV);
>   gst_object_unref (targetsrc);
>
>
>   g_signal_connect (decvd, "pad-added", G_CALLBACK (on_pad_added), vdqueue);
>   cout << "decvd - vdqueue" << endl << endl;
>   gst_element_link (vdqueue,color);
>   gst_element_link (color,vscale);
>   gst_element_link (vscale,vdsink);
>
>   gst_pad_link (targetsrc2,padA);
>   gst_object_unref (targetsrc2);
>   g_signal_connect (decad, "pad-added", G_CALLBACK (on_pad_added), adqueue);
>   cout << "decad - adqueue" << endl << endl;
>   gst_element_link (adqueue, aconvert);
>   gst_element_link (aconvert, asample);
>   gst_element_link (asample, adsink);
>
>
>
>   /* Set the pipeline to "playing" state*/
>   g_print ("Playing: %s\n", argv[1]);
>   gst_element_set_state (pipeline, GST_STATE_PLAYING);
>
>   GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline),
> GST_DEBUG_GRAPH_SHOW_ALL,"playerdec1");
>
>
>   g_main_loop_run (loop);
>
>
>   /* Out of the main loop, clean up nicely */
>   gst_element_set_state (pipeline, GST_STATE_NULL);
>   gst_object_unref (GST_OBJECT (pipeline));
>   //g_print ("Deleting pipeline\n");
>
>
>   return 0;
> }

> _______________________________________________
> gstreamer-devel mailing list
> [hidden email]
> http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel

_______________________________________________
gstreamer-devel mailing list
[hidden email]
http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel
Reply | Threaded
Open this post in threaded view
|

Re: translating command to code for playing an avi

rossana
Yes I did it, sorry I forgot to mention. But I want to create de pipeline to customize it.
Any suggestion of what I am doing wrong in the code?. "caps are incompatible" it seems to appear in the link of queue (which streams video) and the decodebin.

Thanks


El 3 de enero de 2012 08:23, Víctor M. Jáquez L. <[hidden email]> escribió:
On Tue, Jan 03, 2012 at 04:21:32AM -0200, Rossana Guerra wrote:
> Hi, I try to play an avi, I know there's playbin2 option, but I want to
> gain more control and understand some issues as well.
>
> I can play an avi from console with this command.
> gst-launch filesrc location=/home/rossana/video11.avi ! avidemux
> name=demux  demux.audio_00 ! decodebin ! queue ! audioconvert !
> audioresample ! autoaudiosink   demux.video_00 ! decodebin ! queue !
> ffmpegcolorspace ! videoscale ! autovideosink"
>
> I tempted to translate into C/C++ code, but I got errors "caps incompatible"

I guess you could use gst_parse_lunch

http://gstreamer.freedesktop.org/data/doc/gstreamer/head/gstreamer/html/gstreamer-GstParse.html#gst-parse-launch

vmjl

>
> Thanks
>
> Rossana
>
>
> void on_pad_added (GstElement *element, GstPad *pad, GstElement *data)
> {
>         g_debug ("Signal: pad-added");
>         GstCaps *caps;
>         GstStructure *str;
>
>         caps = gst_pad_get_caps (pad);
>         g_assert (caps != NULL);
>         str = gst_caps_get_structure (caps, 0);
>         g_assert (str != NULL);
>
>         cout << "enlazando enlazara pads" << endl << endl;
>         if (g_strrstr (gst_structure_get_name (str), "video")) {
>                 g_debug ("Linking video pad to dec_vd");
>                 // Link it actually
>                 //GstPad *targetsink = gst_element_get_pad (decvd, "sink");
>                 GstPad *targetsink = gst_element_get_pad (data, "sink");
>                 cout << "enlazando cola de video" << endl << endl;
>                 g_assert (targetsink != NULL);
>                 gst_pad_link (pad, targetsink);
>                 gst_object_unref (targetsink);
>         }
>
>         if (g_strrstr (gst_structure_get_name (str), "audio")) {
>                 g_debug ("Linking audio pad to dec_ad");
>                 // Link it actually
>                 //GstPad *targetsink = gst_element_get_pad (decad, "sink");
>                 GstPad *targetsink = gst_element_get_pad (data, "sink");
>                 cout << "enlazando cola de audio" << endl << endl;
>                 g_assert (targetsink != NULL);
>                 gst_pad_link (pad, targetsink);
>                 gst_object_unref (targetsink);
>         }
>
>         gst_caps_unref (caps);
> }
>
>
>
> int main (int   argc, char *argv[])
> {
>   GMainLoop *loop;
>
>   GstElement *pipeline;
>   GstBus *bus;
>
>   /* Initialisation */
>   gst_init (&argc, &argv);
>
>   loop = g_main_loop_new (NULL, FALSE);
>
>
>   /* Check input arguments */
>   if (argc != 1) {
>     g_printerr ("Usage: %s <AVI filename>\n", argv[0]);
>     return -1;
>   }
>
>   /* Create gstreamer elements */
>   pipeline = gst_pipeline_new ("media-player");
>   source = gst_element_factory_make ("filesrc", "file-source");
>   demuxer = gst_element_factory_make ("avidemux", "avi-demuxer");
>   decvd = gst_element_factory_make ("decodebin2", "decvd");
>   decad = gst_element_factory_make ("decodebin2", "decad");
>   aconvert = gst_element_factory_make ("audioconvert", "aconvert");
>   asample = gst_element_factory_make ("audioresample", "asample");
>   vdsink = gst_element_factory_make ("autovideosink", "video-sink");
>   vdqueue = gst_element_factory_make ("queue", "video-queue");
>   adqueue = gst_element_factory_make ("queue", "audio-queue");
>   adsink = gst_element_factory_make ("autoaudiosink", "audio-sink");
>   color = gst_element_factory_make("ffmpegcolorspace","color");
>   vscale = gst_element_factory_make("videoscale","vscale");
>
>   if (!pipeline || !source || !demuxer || !decvd || !decad || !vdsink ||
> !vdqueue || !adqueue || !adsink) {
>     g_printerr ("One element could not be created. Exiting.\n");
>     return -1;
>   }
>
>   /* Set up the pipeline */
>
>   /* we set the input filename to the source element */
>    g_object_set (G_OBJECT (source), "location", "/home/videos/video11.avi",
> NULL);
>
>   /* we add a message handler */
>   bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
>   gst_bus_add_watch (bus, bus_call, loop);
>   gst_object_unref (bus);
>
>   /* we add all elements into the pipeline */
>   /* file-source | ogg-demuxer | vorbis-decoder | converter | alsa-output */
>   //GstElement *clk = gst_element_factory_make("clockoverlay","clk");
>
>
>   gst_bin_add_many (GST_BIN (pipeline),source, demuxer, decvd, decad,
> aconvert, vscale,asample,color,adqueue, vdqueue, vdsink, adsink,  NULL);
>
>
>   /* we link the elements together */
>   /* file-source -> demuxer ~> {video-decoder -> video-output} ~>
> {audio-decoder -> alsa-output} */
>   gst_element_link (source, demuxer);
>
>
>   GstPad *targetsrc = gst_element_get_pad(demuxer, "video_%02");
>
>   GstPad *targetsrc2 = gst_element_get_pad(demuxer, "audio_%02");
>
>
>   GstPad *padV = gst_element_get_static_pad(decvd,"sink");
>   GstPad *padA = gst_element_get_static_pad(decad,"sink");
>
>
>   gst_pad_link (targetsrc,padV);
>   gst_object_unref (targetsrc);
>
>
>   g_signal_connect (decvd, "pad-added", G_CALLBACK (on_pad_added), vdqueue);
>   cout << "decvd - vdqueue" << endl << endl;
>   gst_element_link (vdqueue,color);
>   gst_element_link (color,vscale);
>   gst_element_link (vscale,vdsink);
>
>   gst_pad_link (targetsrc2,padA);
>   gst_object_unref (targetsrc2);
>   g_signal_connect (decad, "pad-added", G_CALLBACK (on_pad_added), adqueue);
>   cout << "decad - adqueue" << endl << endl;
>   gst_element_link (adqueue, aconvert);
>   gst_element_link (aconvert, asample);
>   gst_element_link (asample, adsink);
>
>
>
>   /* Set the pipeline to "playing" state*/
>   g_print ("Playing: %s\n", argv[1]);
>   gst_element_set_state (pipeline, GST_STATE_PLAYING);
>
>   GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline),
> GST_DEBUG_GRAPH_SHOW_ALL,"playerdec1");
>
>
>   g_main_loop_run (loop);
>
>
>   /* Out of the main loop, clean up nicely */
>   gst_element_set_state (pipeline, GST_STATE_NULL);
>   gst_object_unref (GST_OBJECT (pipeline));
>   //g_print ("Deleting pipeline\n");
>
>
>   return 0;
> }

> _______________________________________________
> gstreamer-devel mailing list
> [hidden email]
> http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel

_______________________________________________
gstreamer-devel mailing list
[hidden email]
http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel


_______________________________________________
gstreamer-devel mailing list
[hidden email]
http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel
Reply | Threaded
Open this post in threaded view
|

Re: translating command to code for playing an avi

rossana
I need to point out that using the gstparse it works. It fails when I try to create the pipeline by myself.
thanks

2012/1/3 Rossana Guerra <[hidden email]>
Yes I did it, sorry I forgot to mention. But I want to create de pipeline to customize it.
Any suggestion of what I am doing wrong in the code?. "caps are incompatible" it seems to appear in the link of queue (which streams video) and the decodebin.

Thanks


El 3 de enero de 2012 08:23, Víctor M. Jáquez L. <[hidden email]> escribió:

On Tue, Jan 03, 2012 at 04:21:32AM -0200, Rossana Guerra wrote:
> Hi, I try to play an avi, I know there's playbin2 option, but I want to
> gain more control and understand some issues as well.
>
> I can play an avi from console with this command.
> gst-launch filesrc location=/home/rossana/video11.avi ! avidemux
> name=demux  demux.audio_00 ! decodebin ! queue ! audioconvert !
> audioresample ! autoaudiosink   demux.video_00 ! decodebin ! queue !
> ffmpegcolorspace ! videoscale ! autovideosink"
>
> I tempted to translate into C/C++ code, but I got errors "caps incompatible"

I guess you could use gst_parse_lunch

http://gstreamer.freedesktop.org/data/doc/gstreamer/head/gstreamer/html/gstreamer-GstParse.html#gst-parse-launch

vmjl

>
> Thanks
>
> Rossana
>
>
> void on_pad_added (GstElement *element, GstPad *pad, GstElement *data)
> {
>         g_debug ("Signal: pad-added");
>         GstCaps *caps;
>         GstStructure *str;
>
>         caps = gst_pad_get_caps (pad);
>         g_assert (caps != NULL);
>         str = gst_caps_get_structure (caps, 0);
>         g_assert (str != NULL);
>
>         cout << "enlazando enlazara pads" << endl << endl;
>         if (g_strrstr (gst_structure_get_name (str), "video")) {
>                 g_debug ("Linking video pad to dec_vd");
>                 // Link it actually
>                 //GstPad *targetsink = gst_element_get_pad (decvd, "sink");
>                 GstPad *targetsink = gst_element_get_pad (data, "sink");
>                 cout << "enlazando cola de video" << endl << endl;
>                 g_assert (targetsink != NULL);
>                 gst_pad_link (pad, targetsink);
>                 gst_object_unref (targetsink);
>         }
>
>         if (g_strrstr (gst_structure_get_name (str), "audio")) {
>                 g_debug ("Linking audio pad to dec_ad");
>                 // Link it actually
>                 //GstPad *targetsink = gst_element_get_pad (decad, "sink");
>                 GstPad *targetsink = gst_element_get_pad (data, "sink");
>                 cout << "enlazando cola de audio" << endl << endl;
>                 g_assert (targetsink != NULL);
>                 gst_pad_link (pad, targetsink);
>                 gst_object_unref (targetsink);
>         }
>
>         gst_caps_unref (caps);
> }
>
>
>
> int main (int   argc, char *argv[])
> {
>   GMainLoop *loop;
>
>   GstElement *pipeline;
>   GstBus *bus;
>
>   /* Initialisation */
>   gst_init (&argc, &argv);
>
>   loop = g_main_loop_new (NULL, FALSE);
>
>
>   /* Check input arguments */
>   if (argc != 1) {
>     g_printerr ("Usage: %s <AVI filename>\n", argv[0]);
>     return -1;
>   }
>
>   /* Create gstreamer elements */
>   pipeline = gst_pipeline_new ("media-player");
>   source = gst_element_factory_make ("filesrc", "file-source");
>   demuxer = gst_element_factory_make ("avidemux", "avi-demuxer");
>   decvd = gst_element_factory_make ("decodebin2", "decvd");
>   decad = gst_element_factory_make ("decodebin2", "decad");
>   aconvert = gst_element_factory_make ("audioconvert", "aconvert");
>   asample = gst_element_factory_make ("audioresample", "asample");
>   vdsink = gst_element_factory_make ("autovideosink", "video-sink");
>   vdqueue = gst_element_factory_make ("queue", "video-queue");
>   adqueue = gst_element_factory_make ("queue", "audio-queue");
>   adsink = gst_element_factory_make ("autoaudiosink", "audio-sink");
>   color = gst_element_factory_make("ffmpegcolorspace","color");
>   vscale = gst_element_factory_make("videoscale","vscale");
>
>   if (!pipeline || !source || !demuxer || !decvd || !decad || !vdsink ||
> !vdqueue || !adqueue || !adsink) {
>     g_printerr ("One element could not be created. Exiting.\n");
>     return -1;
>   }
>
>   /* Set up the pipeline */
>
>   /* we set the input filename to the source element */
>    g_object_set (G_OBJECT (source), "location", "/home/videos/video11.avi",
> NULL);
>
>   /* we add a message handler */
>   bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
>   gst_bus_add_watch (bus, bus_call, loop);
>   gst_object_unref (bus);
>
>   /* we add all elements into the pipeline */
>   /* file-source | ogg-demuxer | vorbis-decoder | converter | alsa-output */
>   //GstElement *clk = gst_element_factory_make("clockoverlay","clk");
>
>
>   gst_bin_add_many (GST_BIN (pipeline),source, demuxer, decvd, decad,
> aconvert, vscale,asample,color,adqueue, vdqueue, vdsink, adsink,  NULL);
>
>
>   /* we link the elements together */
>   /* file-source -> demuxer ~> {video-decoder -> video-output} ~>
> {audio-decoder -> alsa-output} */
>   gst_element_link (source, demuxer);
>
>
>   GstPad *targetsrc = gst_element_get_pad(demuxer, "video_%02");
>
>   GstPad *targetsrc2 = gst_element_get_pad(demuxer, "audio_%02");
>
>
>   GstPad *padV = gst_element_get_static_pad(decvd,"sink");
>   GstPad *padA = gst_element_get_static_pad(decad,"sink");
>
>
>   gst_pad_link (targetsrc,padV);
>   gst_object_unref (targetsrc);
>
>
>   g_signal_connect (decvd, "pad-added", G_CALLBACK (on_pad_added), vdqueue);
>   cout << "decvd - vdqueue" << endl << endl;
>   gst_element_link (vdqueue,color);
>   gst_element_link (color,vscale);
>   gst_element_link (vscale,vdsink);
>
>   gst_pad_link (targetsrc2,padA);
>   gst_object_unref (targetsrc2);
>   g_signal_connect (decad, "pad-added", G_CALLBACK (on_pad_added), adqueue);
>   cout << "decad - adqueue" << endl << endl;
>   gst_element_link (adqueue, aconvert);
>   gst_element_link (aconvert, asample);
>   gst_element_link (asample, adsink);
>
>
>
>   /* Set the pipeline to "playing" state*/
>   g_print ("Playing: %s\n", argv[1]);
>   gst_element_set_state (pipeline, GST_STATE_PLAYING);
>
>   GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline),
> GST_DEBUG_GRAPH_SHOW_ALL,"playerdec1");
>
>
>   g_main_loop_run (loop);
>
>
>   /* Out of the main loop, clean up nicely */
>   gst_element_set_state (pipeline, GST_STATE_NULL);
>   gst_object_unref (GST_OBJECT (pipeline));
>   //g_print ("Deleting pipeline\n");
>
>
>   return 0;
> }

> _______________________________________________
> gstreamer-devel mailing list
> [hidden email]
> http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel

_______________________________________________
gstreamer-devel mailing list
[hidden email]
http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel



_______________________________________________
gstreamer-devel mailing list
[hidden email]
http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel
Reply | Threaded
Open this post in threaded view
|

Re: translating command to code for playing an avi

Nathanael D. Noblet
On 01/03/2012 11:21 AM, Rossana Guerra wrote:
> I need to point out that using the gstparse it works. It fails when I
> try to create the pipeline by myself.

Hello,

   I haven't read this thread, however it seems that more often than not
when going from a gst-launch command to code to create a pipeline the
most common mistake is to try linking a decoder to the rest of the
pipeline. The issue here is that prior to the decoder getting some data,
it doesn't know how many streams it contains (a file could have multiple
audio streams for one video for example). So you have to use the
'pad-added' signals to dynamically link things once everything is ready.
If you google for the gstreamer hello world example it has an example of
doing just that. Hopefully that helps if this is the issue, otherwise
feel free to disregard this message...

--
Nathanael d. Noblet
t 403.875.4613
_______________________________________________
gstreamer-devel mailing list
[hidden email]
http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel
Reply | Threaded
Open this post in threaded view
|

Re: translating command to code for playing an avi

rossana
Actually I use the "pad-added" event it was posted in the thread :) but for sure, I did it wrong since I get  the "caps are incompatible" message.
thanks!

2012/1/3 Nathanael D. Noblet <[hidden email]>
On 01/03/2012 11:21 AM, Rossana Guerra wrote:
I need to point out that using the gstparse it works. It fails when I
try to create the pipeline by myself.

Hello,

 I haven't read this thread, however it seems that more often than not when going from a gst-launch command to code to create a pipeline the most common mistake is to try linking a decoder to the rest of the pipeline. The issue here is that prior to the decoder getting some data, it doesn't know how many streams it contains (a file could have multiple audio streams for one video for example). So you have to use the 'pad-added' signals to dynamically link things once everything is ready. If you google for the gstreamer hello world example it has an example of doing just that. Hopefully that helps if this is the issue, otherwise feel free to disregard this message...

--
Nathanael d. Noblet
t <a href="tel:403.875.4613" value="+14038754613" target="_blank">403.875.4613

_______________________________________________
gstreamer-devel mailing list
[hidden email]
http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel


_______________________________________________
gstreamer-devel mailing list
[hidden email]
http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel
Reply | Threaded
Open this post in threaded view
|

Re: translating command to code for playing an avi

Nicolai Hess


2012/1/3 Rossana Guerra <[hidden email]>
Actually I use the "pad-added" event it was posted in the thread :) but for sure, I did it wrong since I get  the "caps are incompatible" message.
thanks!

2012/1/3 Nathanael D. Noblet <[hidden email]>

On 01/03/2012 11:21 AM, Rossana Guerra wrote:
I need to point out that using the gstparse it works. It fails when I
try to create the pipeline by myself.

Hello,

 I haven't read this thread, however it seems that more often than not when going from a gst-launch command to code to create a pipeline the most common mistake is to try linking a decoder to the rest of the pipeline. The issue here is that prior to the decoder getting some data, it doesn't know how many streams it contains (a file could have multiple audio streams for one video for example). So you have to use the 'pad-added' signals to dynamically link things once everything is ready. If you google for the gstreamer hello world example it has an example of doing just that. Hopefully that helps if this is the issue, otherwise feel free to disregard this message...

--
Nathanael d. Noblet
t <a href="tel:403.875.4613" value="+14038754613" target="_blank">403.875.4613

_______________________________________________
gstreamer-devel mailing list
[hidden email]
http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel


_______________________________________________
gstreamer-devel mailing list
[hidden email]
http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel


Hi Rossana,
I think you need an additional pad-added callback for your avidemux element.
I don't get the "incompatible pad" error message, but instead the
targetsrc and targetsrc2 pads from
GstPad *targetsrc = gst_element_get_pad(demuxer, "video_%02");
GstPad *targetsrc2 = gst_element_get_pad(demuxer, "audio_%02");
are NULL, because the demuxer srcpads are "sometimes" pads and
not available at this point.
So, add another on_pad_added callback and link your
video and audio decodebin there, depending on the type of the pad added.


Something like this:

// register the callback
g_signal_connect (demuxer, "pad-added", G_CALLBACK (on_demuxer_pad_added), pipeline);

// the  demuxer "pad-added" callback
void on_demuxer_pad_added (GstElement *demuxer, GstPad *pad, GstElement *pipeline)
{
  char* pad_name = gst_pad_get_name(pad);
  if(g_strrstr(pad_name, "video_00"))
  {
    GstElement* decvd = gst_bin_get_by_name(GST_BIN(pipeline), "decvd");
    GstPad *padV = gst_element_get_static_pad(decvd,"sink");   
    gst_pad_link(pad,padV);
    gst_object_unref(decvd);
  }
  else if(g_strrstr(pad_name, "audio_00"))
  {
    GstElement* decad = gst_bin_get_by_name(GST_BIN(pipeline), "decad");
    GstPad *padA = gst_element_get_static_pad(decad,"sink");   
    gst_pad_link(pad,padA);
    gst_object_unref(decad);
  }
  g_free(pad_name);
}

_______________________________________________
gstreamer-devel mailing list
[hidden email]
http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel