gstreamer video playback , cannot play

classic Classic list List threaded Threaded
8 messages Options
Reply | Threaded
Open this post in threaded view
|

gstreamer video playback , cannot play

aero
Hi,

I just started with the gstreamer,  i have gone through the example they gave in application programmers reference manual, , its for playing audio and its works well.

I have done some study and tried to do the same with video/audio with following code , but it does not work. and i am getting the following error.
Can someone point out mistake please.
I just dont know how to connect audio/Video pipeline.

Now playing: ../Songs/nob.ogg
Running...
Dynamic pad created, linking demuxer/decoder
Error: Internal data stream error.
Returned, stopping playback
Deleting pipeline

#include <gst/gst.h>
#include <glib.h>

static void on_pad_added (GstElement *element,GstPad *pad,gpointer data)
{
        GstPad *sinkpad;
        GstElement *decoder = (GstElement *) data;
        g_print ("Dynamic pad created, linking demuxer/decoder\n");
        sinkpad = gst_element_get_static_pad (decoder, "sink");
        gst_pad_link (pad, sinkpad);
        gst_object_unref (sinkpad);
}

static gboolean cb_print_position (GstElement *pipeline)
{
        GstFormat fmt = GST_FORMAT_TIME;
        gint64 pos, len;
        if (gst_element_query_position (pipeline, &fmt, &pos)
                && gst_element_query_duration (pipeline, &fmt, &len)) {
                g_print ("Time: %" GST_TIME_FORMAT " / %" GST_TIME_FORMAT "\r",
                        GST_TIME_ARGS (pos), GST_TIME_ARGS (len));
        }
        /* call me again */
        return TRUE;
}

static gboolean bus_call (GstBus *bus,GstMessage *msg,gpointer data)
{
        GMainLoop *loop = (GMainLoop *) data;
        switch (GST_MESSAGE_TYPE (msg)) {
                case GST_MESSAGE_EOS:
                        g_print ("End of stream\n");
                        g_main_loop_quit (loop);
                        break;
                case GST_MESSAGE_ERROR: {
                        gchar *debug;
                        GError *error;
                        gst_message_parse_error (msg, &error, &debug);
                        g_free (debug);
                        g_printerr ("Error: %s\n", error->message);
                        g_error_free (error);
                        g_main_loop_quit (loop);
                        break;
                }
                default:
                break;
        }
return TRUE;
}


int main (int argc, char *argv[])
{
        GMainLoop *loop;
        GstElement *pipeline, *source, *demuxer, *decoder, *conv, *sink;
        GstBus *bus;
        gst_init (&argc, &argv);

        loop = g_main_loop_new (NULL, FALSE);
        if (argc != 2) {
                g_printerr ("Usage: %s <Ogg/Vorbis filename>\n", argv[0]);
                return -1;
        }
        pipeline = gst_pipeline_new ("video-player");
        source = gst_element_factory_make ("filesrc", "file-source");
        demuxer = gst_element_factory_make ("oggdemux", "ogg-demuxer");
        decoder = gst_element_factory_make ("vorbisdec", "vorbis-decoder");
        conv = gst_element_factory_make ("autovideoconvert", "converter");
        sink = gst_element_factory_make ("autovideosink", "video-output");
        if (!pipeline || !source || !demuxer || !decoder || !conv || !sink) {
                g_printerr ("One element could not be created. Exiting.\n");
                return -1;
        }
        g_object_set (G_OBJECT (source), "location", argv[1], NULL);
        bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
        gst_bus_add_watch (bus, bus_call, loop);
        gst_object_unref (bus);
        gst_bin_add_many (GST_BIN (pipeline),source, demuxer, decoder, conv, sink, NULL);
        gst_element_link (source, demuxer);
        gst_element_link_many (decoder, conv, sink, NULL);
        g_signal_connect (demuxer, "pad-added", G_CALLBACK (on_pad_added), decoder);
        g_print ("Now playing: %s\n", argv[1]);

        gst_element_set_state (pipeline, GST_STATE_PLAYING);
        g_print ("Running...\n");
        g_timeout_add (200, (GSourceFunc) cb_print_position, pipeline);
        g_main_loop_run (loop);
        g_print ("Returned, stopping playback\n");
        gst_element_set_state (pipeline, GST_STATE_NULL);
        g_print ("Deleting pipeline\n");
        gst_object_unref (GST_OBJECT (pipeline));
        return 0;
}
Reply | Threaded
Open this post in threaded view
|

Re: gstreamer video playback , cannot play

Stefan Sauer
On 10.03.2013 20:29, aero wrote:

> Hi,
>
> I just started with the gstreamer,  i have gone through the example they
> gave in application programmers reference manual, , its for playing audio
> and its works well.
>
> I have done some study and tried to do the same with video/audio with
> following code , but it does not work. and i am getting the following error.
> Can someone point out mistake please.
> I just dont know how to connect audio/Video pipeline.
1.) check return codes (e.g. in gst_pad_link())
2.) you don't link the video part, unless your songs/nob.ogg has a video
track.
3.) either feed the 'right' kind of files to your app, use playbin that
handles the complexity or implement that yourself (only add
autovideosink if you need it).

Stefan

>
> Now playing: ../Songs/nob.ogg
> Running...
> Dynamic pad created, linking demuxer/decoder
> Error: Internal data stream error.
> Returned, stopping playback
> Deleting pipeline
>
> #include <gst/gst.h>
> #include <glib.h>
>
> static void on_pad_added (GstElement *element,GstPad *pad,gpointer data)
> {
>         GstPad *sinkpad;
>         GstElement *decoder = (GstElement *) data;
>         g_print ("Dynamic pad created, linking demuxer/decoder\n");
>         sinkpad = gst_element_get_static_pad (decoder, "sink");
>         gst_pad_link (pad, sinkpad);
>         gst_object_unref (sinkpad);
> }
>
> static gboolean cb_print_position (GstElement *pipeline)
> {
>         GstFormat fmt = GST_FORMAT_TIME;
>         gint64 pos, len;
>         if (gst_element_query_position (pipeline, &fmt, &pos)
>                 && gst_element_query_duration (pipeline, &fmt, &len)) {
>                 g_print ("Time: %" GST_TIME_FORMAT " / %" GST_TIME_FORMAT
> "\r",
>                         GST_TIME_ARGS (pos), GST_TIME_ARGS (len));
>         }
>         /* call me again */
>         return TRUE;
> }
>
> static gboolean bus_call (GstBus *bus,GstMessage *msg,gpointer data)
> {
>         GMainLoop *loop = (GMainLoop *) data;
>         switch (GST_MESSAGE_TYPE (msg)) {
>                 case GST_MESSAGE_EOS:
>                         g_print ("End of stream\n");
>                         g_main_loop_quit (loop);
>                         break;
>                 case GST_MESSAGE_ERROR: {
>                         gchar *debug;
>                         GError *error;
>                         gst_message_parse_error (msg, &error, &debug);
>                         g_free (debug);
>                         g_printerr ("Error: %s\n", error->message);
>                         g_error_free (error);
>                         g_main_loop_quit (loop);
>                         break;
>                 }
>                 default:
>                 break;
>         }
> return TRUE;
> }
>
>
> int main (int argc, char *argv[])
> {
>         GMainLoop *loop;
>         GstElement *pipeline, *source, *demuxer, *decoder, *conv, *sink;
>         GstBus *bus;
>         gst_init (&argc, &argv);
>
>         loop = g_main_loop_new (NULL, FALSE);
>         if (argc != 2) {
>                 g_printerr ("Usage: %s <Ogg/Vorbis filename>\n", argv[0]);
>                 return -1;
>         }
>         pipeline = gst_pipeline_new ("video-player");
>         source = gst_element_factory_make ("filesrc", "file-source");
>         demuxer = gst_element_factory_make ("oggdemux", "ogg-demuxer");
>         decoder = gst_element_factory_make ("vorbisdec", "vorbis-decoder");
>         conv = gst_element_factory_make ("autovideoconvert", "converter");
>         sink = gst_element_factory_make ("autovideosink", "video-output");
>         if (!pipeline || !source || !demuxer || !decoder || !conv || !sink)
> {
>                 g_printerr ("One element could not be created. Exiting.\n");
>                 return -1;
>         }
>         g_object_set (G_OBJECT (source), "location", argv[1], NULL);
>         bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
>         gst_bus_add_watch (bus, bus_call, loop);
>         gst_object_unref (bus);
>         gst_bin_add_many (GST_BIN (pipeline),source, demuxer, decoder, conv,
> sink, NULL);
>         gst_element_link (source, demuxer);
>         gst_element_link_many (decoder, conv, sink, NULL);
>         g_signal_connect (demuxer, "pad-added", G_CALLBACK (on_pad_added),
> decoder);
>         g_print ("Now playing: %s\n", argv[1]);
>
>         gst_element_set_state (pipeline, GST_STATE_PLAYING);
>         g_print ("Running...\n");
>         g_timeout_add (200, (GSourceFunc) cb_print_position, pipeline);
>         g_main_loop_run (loop);
>         g_print ("Returned, stopping playback\n");
>         gst_element_set_state (pipeline, GST_STATE_NULL);
>         g_print ("Deleting pipeline\n");
>         gst_object_unref (GST_OBJECT (pipeline));
>         return 0;
> }
>
>
>
>
> --
> View this message in context: http://gstreamer-devel.966125.n4.nabble.com/gstreamer-video-playback-cannot-play-tp4659024.html
> Sent from the GStreamer-devel mailing list archive at Nabble.com.
> _______________________________________________
> gstreamer-devel mailing list
> [hidden email]
> http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel

_______________________________________________
gstreamer-devel mailing list
[hidden email]
http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel
Reply | Threaded
Open this post in threaded view
|

Re: gstreamer video playback , cannot play

aero
Thanks Stefan for your reply.
I actually figured out that i do not understand how the elements needs to be connected.
My question is how to know the proper elements to be connected when you want to play audio or video.
starting from Demux to Sink elements.I can understand some elements like demuxer, decoder,converter but what about filesrc,fakesrc,fakesink, videotestsrc. I have seen the documentation it just says when i use gst_element_factory_make(factory,name) , it creates the element of that type. what is the meaning of each and how to understand its property and what are the various ways we can use those.

in brief my question is what are the elements required to play an audio and video. how to connect them.
if elements depends on format of the flie (ogg,mp3,avi etc)  how to know what  to use for what type.

i just want to get some basic idea of properties of elements before i do any programming.



Reply | Threaded
Open this post in threaded view
|

Re: gstreamer video playback , cannot play

Ian Davidson

There are sounds that we hear and things that we see. In order to be able to record these things, they need to be converted to some digital format – and there are many choices for these. For example, a sound could be stored as a WAV file, a WMA file, an OGG file (and many more). The sound could have been recorded at 44100 samples per second (quite typical) or perhaps 1100 samples per second (very poor quality).

Many of the elements in a Gstreamer pipeline are used to manipulate the data from one format to another. A typical element will have a 'sink' (where you can 'pour in some data') and a 'src' (which will supply data to the next element in the sequence).

Obviously, the data will need to come from somewhere – so if you are reading a file, you will have a 'filesrc' which will pick up the data from a file and pass it on to the next element; such an element will have a 'location' parameter to tell it where to find the file.

At the other end of the pipeline you will want to do something with this data you have been manipulating, so you will choose a 'sink' such as 'alsasink' to play the sound.

In between, you will need to manipulate the data that you read from disk such that it can be played – so if your input is a WAV file, you will need 'wavparse' to convert the data.

It may be that 'wavparse' is not always ready to accept data when your source wants to give it, so it is a good idea to include a 'queue' between elements to handle such problems.

I don't know exactly what you want to do. Go to http://gstreamer.freedesktop.org/documentation/ and follow the link to “Overview of all Plug-ins ” to see what plug-ins are available. Many of the descriptions give an example of how you might use them.

Start by building a test pipeline using gst-launch. When you have got that working, then look to writing a program to replicate what you have tested using gst-launch.

On 12/03/2013 02:06, aero wrote:

in brief my question is what are the elements required to play an audio and
video. how to connect them.
if elements depends on format of the flie (ogg,mp3,avi etc)  how to know
what  to use for what type.

i just want to get some basic idea of properties of elements before i do any
programming.







--
View this message in context: http://gstreamer-devel.966125.n4.nabble.com/gstreamer-video-playback-cannot-play-tp4659024p4659054.html
Sent from the GStreamer-devel mailing list archive at Nabble.com.
_______________________________________________
gstreamer-devel mailing list
[hidden email]
http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel



_______________________________________________
gstreamer-devel mailing list
[hidden email]
http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel
Reply | Threaded
Open this post in threaded view
|

Re: gstreamer video playback , cannot play

Chuck Crisler-2
Another answer is buried in the GStreamer docs. Elements are connected by pads. Pads have caps (capabilities). Those caps describe the stream, all of the relevant information is there. That is how decodebin works - it examines the caps and then 'does the right thing'. If you write code to parse the caps and then switch to use the right elements, you will be duplicating decodebin, though it is a good learning exercise.

You also really need to learn about caps to understand how to build encode pipelines also. Many of the elements are very restrictive in what they accept. If you don't follow what they need it won't work and it may not be obvious why, until you turn on detailed logging.

The short answer is: caps.

On Tue, Mar 12, 2013 at 5:53 AM, Ian Davidson <[hidden email]> wrote:

There are sounds that we hear and things that we see. In order to be able to record these things, they need to be converted to some digital format – and there are many choices for these. For example, a sound could be stored as a WAV file, a WMA file, an OGG file (and many more). The sound could have been recorded at 44100 samples per second (quite typical) or perhaps 1100 samples per second (very poor quality).

Many of the elements in a Gstreamer pipeline are used to manipulate the data from one format to another. A typical element will have a 'sink' (where you can 'pour in some data') and a 'src' (which will supply data to the next element in the sequence).

Obviously, the data will need to come from somewhere – so if you are reading a file, you will have a 'filesrc' which will pick up the data from a file and pass it on to the next element; such an element will have a 'location' parameter to tell it where to find the file.

At the other end of the pipeline you will want to do something with this data you have been manipulating, so you will choose a 'sink' such as 'alsasink' to play the sound.

In between, you will need to manipulate the data that you read from disk such that it can be played – so if your input is a WAV file, you will need 'wavparse' to convert the data.

It may be that 'wavparse' is not always ready to accept data when your source wants to give it, so it is a good idea to include a 'queue' between elements to handle such problems.

I don't know exactly what you want to do. Go to http://gstreamer.freedesktop.org/documentation/ and follow the link to “Overview of all Plug-ins ” to see what plug-ins are available. Many of the descriptions give an example of how you might use them.

Start by building a test pipeline using gst-launch. When you have got that working, then look to writing a program to replicate what you have tested using gst-launch.

On 12/03/2013 02:06, aero wrote:
in brief my question is what are the elements required to play an audio and
video. how to connect them.
if elements depends on format of the flie (ogg,mp3,avi etc)  how to know
what  to use for what type.

i just want to get some basic idea of properties of elements before i do any
programming.







--
View this message in context: http://gstreamer-devel.966125.n4.nabble.com/gstreamer-video-playback-cannot-play-tp4659024p4659054.html
Sent from the GStreamer-devel mailing list archive at Nabble.com.
_______________________________________________
gstreamer-devel mailing list
[hidden email]
http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel



_______________________________________________
gstreamer-devel mailing list
[hidden email]
http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel



_______________________________________________
gstreamer-devel mailing list
[hidden email]
http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel
Reply | Threaded
Open this post in threaded view
|

Re: gstreamer video playback , cannot play

David Röthlisberger
In reply to this post by Ian Davidson
I'll repeat this just because it's the most important part of Ian's advice:

    Start by building a test pipeline using gst-launch.


On 12 Mar 2013, at 09:53, Ian Davidson wrote:

>
> There are sounds that we hear and things that we see. In order to be able to record these things, they need to be converted to some digital format – and there are many choices for these. For example, a sound could be stored as a WAV file, a WMA file, an OGG file (and many more). The sound could have been recorded at 44100 samples per second (quite typical) or perhaps 1100 samples per second (very poor quality).
> Many of the elements in a Gstreamer pipeline are used to manipulate the data from one format to another. A typical element will have a 'sink' (where you can 'pour in some data') and a 'src' (which will supply data to the next element in the sequence).
> Obviously, the data will need to come from somewhere – so if you are reading a file, you will have a 'filesrc' which will pick up the data from a file and pass it on to the next element; such an element will have a 'location' parameter to tell it where to find the file.
> At the other end of the pipeline you will want to do something with this data you have been manipulating, so you will choose a 'sink' such as 'alsasink' to play the sound.
> In between, you will need to manipulate the data that you read from disk such that it can be played – so if your input is a WAV file, you will need 'wavparse' to convert the data.
> It may be that 'wavparse' is not always ready to accept data when your source wants to give it, so it is a good idea to include a 'queue' between elements to handle such problems.
> I don't know exactly what you want to do. Go to http://gstreamer.freedesktop.org/documentation/ and follow the link to “Overview of all Plug-ins ” to see what plug-ins are available. Many of the descriptions give an example of how you might use them.
> Start by building a test pipeline using gst-launch. When you have got that working, then look to writing a program to replicate what you have tested using gst-launch.
>
> On 12/03/2013 02:06, aero wrote:
>>
>> in brief my question is what are the elements required to play an audio and
>> video. how to connect them.
>> if elements depends on format of the flie (ogg,mp3,avi etc)  how to know
>> what  to use for what type.
>>
>> i just want to get some basic idea of properties of elements before i do any
>> programming.
>>
>>
>>
>>
>>
>>
>>
>> --
>> View this message in context:
>> http://gstreamer-devel.966125.n4.nabble.com/gstreamer-video-playback-cannot-play-tp4659024p4659054.html
>>
>> Sent from the GStreamer-devel mailing list archive at Nabble.com.
>> _______________________________________________
>> gstreamer-devel mailing list
>>
>> [hidden email]
>> http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel
>>
>>
>>
>
> _______________________________________________
> gstreamer-devel mailing list
> [hidden email]
> http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel


_______________________________________________
gstreamer-devel mailing list
[hidden email]
http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel
Reply | Threaded
Open this post in threaded view
|

Re: gstreamer video playback , cannot play

Tim-Philipp Müller-2
In reply to this post by Chuck Crisler-2
On Tue, 2013-03-12 at 10:17 -0400, Chuck Crisler wrote:

> Another answer is buried in the GStreamer docs. Elements are connected
> by pads. Pads have caps (capabilities). Those caps describe the
> stream, all of the relevant information is there. That is how
> decodebin works - it examines the caps and then 'does the right
> thing'. If you write code to parse the caps and then switch to use the
> right elements, you will be duplicating decodebin, though it is a good
> learning exercise.
>
> You also really need to learn about caps to understand how to build
> encode pipelines also. Many of the elements are very restrictive in
> what they accept. If you don't follow what they need it won't work and
> it may not be obvious why, until you turn on detailed logging.
>
> The short answer is: caps.

Most of these basic concepts are also explained in the Application
Developer's Manual:

http://gstreamer.freedesktop.org/data/doc/gstreamer/head/manual/html/index.html

It is the first link on

  http://gstreamer.freedesktop.org/documentation/

the one marked with "Read this first". Buried in a rather unsightly html
table admittedly.

Cheers
 -Tim

> On Tue, Mar 12, 2013 at 5:53 AM, Ian Davidson
> <[hidden email]> wrote:
>         There are sounds that we hear and things that we see. In order
>         to be able to record these things, they need to be converted
>         to some digital format – and there are many choices for these.
>         For example, a sound could be stored as a WAV file, a WMA
>         file, an OGG file (and many more). The sound could have been
>         recorded at 44100 samples per second (quite typical) or
>         perhaps 1100 samples per second (very poor quality).
>        
>         Many of the elements in a Gstreamer pipeline are used to
>         manipulate the data from one format to another. A typical
>         element will have a 'sink' (where you can 'pour in some data')
>         and a 'src' (which will supply data to the next element in the
>         sequence).
>        
>         Obviously, the data will need to come from somewhere – so if
>         you are reading a file, you will have a 'filesrc' which will
>         pick up the data from a file and pass it on to the next
>         element; such an element will have a 'location' parameter to
>         tell it where to find the file.
>        
>         At the other end of the pipeline you will want to do something
>         with this data you have been manipulating, so you will choose
>         a 'sink' such as 'alsasink' to play the sound.
>        
>         In between, you will need to manipulate the data that you read
>         from disk such that it can be played – so if your input is a
>         WAV file, you will need 'wavparse' to convert the data.
>        
>         It may be that 'wavparse' is not always ready to accept data
>         when your source wants to give it, so it is a good idea to
>         include a 'queue' between elements to handle such problems.
>        
>         I don't know exactly what you want to do. Go to
>         http://gstreamer.freedesktop.org/documentation/ and follow the
>         link to “Overview of all Plug-ins ” to see what plug-ins are
>         available. Many of the descriptions give an example of how you
>         might use them.
>        
>         Start by building a test pipeline using gst-launch. When you
>         have got that working, then look to writing a program to
>         replicate what you have tested using gst-launch.
>        
>        
>        
>         On 12/03/2013 02:06, aero wrote:
>        
>         > in brief my question is what are the elements required to play an audio and
>         > video. how to connect them.
>         > if elements depends on format of the flie (ogg,mp3,avi etc)  how to know
>         > what  to use for what type.
>         >
>         > i just want to get some basic idea of properties of elements before i do any
>         > programming.
>         >
>         >
>         >
>         >
>         >
>         >
>         >
>         > --
>         > View this message in context: http://gstreamer-devel.966125.n4.nabble.com/gstreamer-video-playback-cannot-play-tp4659024p4659054.html
>         > Sent from the GStreamer-devel mailing list archive at Nabble.com.
>         > _______________________________________________
>         > gstreamer-devel mailing list
>         > [hidden email]
>         > http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel
>         >
>        
>        
>        
>         _______________________________________________
>         gstreamer-devel mailing list
>         [hidden email]
>         http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel
>        
>
> _______________________________________________
> gstreamer-devel mailing list
> [hidden email]
> http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel


_______________________________________________
gstreamer-devel mailing list
[hidden email]
http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel
Reply | Threaded
Open this post in threaded view
|

Re: gstreamer video playback , cannot play

parthlathiya
In reply to this post by aero
If you don't have much idea of what to use for what action, you can try an high level element like uridecodebin ! playbin and then using debugging tools, generate a pipeline graph that clearely shows which all elements are used to play certain audio/video.

Once you have the elements name, try to inspect each element using gst-inspect-1.0 rtspsrc which will give you all the description regarding that element.