Hi everybody,
i have started the development of a multimedia player based on gstreamer pipelines and i encounter some problems. i work on a debian platform using Nvidia drivers, and so vdpau.
after reading the documentations on the gstreamer mechanisms and the different example codes, i have started to make pipelines using gst-launch, and then i have written some C source code. i don't know if it's the better way, but i use the playbin2 element, with setting video-sink property to a "vdpau sink" pipeline. in commandLine, i use this : gst-launch-0.10 -v -m playbin2 uri=file:///Movies/test.mkv video-sink="vdpauvideopostprocess ! vdpausink" audio-sink="alsasink" this works fine, except that i can't set the Rank of the vdpau decoder, and so it hangs...
i tried to make the same in C source code, and the results are not convincing... a piece of my code is below : /*************************************************************************************************************************/
int main (int argc, char *argv[]) { /***************************/ /*Variables Declaration*/ /***************************/ //GStreamer main event loop
GMainLoop *loop; //elementary elements GstElement *playbin;
GstElement *videoOutputBin; //VideoSink elements for VDPau; GstElement *vdpauSink;
GstElement *vdpauVideoPostProcess; //AudioSink elements for Alsa;
GstElement *alsaSink; //Gstreamer bus to communicate with elements
GstBus *bus; //pad for videoOutputBin GstPad *vdpauBinPad;
/********************/ /*Real code start*/ /********************/ //Gstreamer initialisation gst_init(&argc, &argv);
loop = g_main_loop_new(NULL, FALSE); //Sanity check on parameters
if(argc != 2) { g_printerr ("Usage: %s <Multimedia filename>\n", argv[0]);
return -1; } //elements initialisation videoOutputBin = gst_bin_new("vdpau-output-chain");
playbin = gst_element_factory_make("playbin2","play-bin");
vdpauSink = gst_element_factory_make("vdpausink","vdpau-sink");
vdpauVideoPostProcess = gst_element_factory_make("vdpauvideopostprocess","post-process");
alsaSink = gst_element_factory_make("alsasink","alsa-sink");
//Sanity check on elements validity if(!playbin || !vdpauSink || !alsaSink || !audioConverter ||!vdpauVideoPostProcess || !videoOutputBin)
{ g_printerr("Some gstreamer elements could'nt itialized exit\n");
return -1; } //set up the pipeline
//1 - We set the URI input filename to the fileSource element g_object_set(G_OBJECT(playbin),"uri",argv[1],NULL);
//2 - We add a message Handler bus = gst_pipeline_get_bus (GST_PIPELINE(playbin));
gst_bus_add_watch(bus, bus_callback, loop); gst_object_unref(bus); //3 - define specific rank for VDpau decoders GstElementFactory *vdpauh264dec = gst_element_factory_find("vdpauh264dec");
gst_plugin_feature_set_rank (GST_PLUGIN_FEATURE(vdpauh264dec), GST_RANK_PRIMARY+2); GstElementFactory *vdpaumpeg4dec = gst_element_factory_find("vdpaumpeg4dec");
gst_plugin_feature_set_rank (GST_PLUGIN_FEATURE(vdpaumpeg4dec), GST_RANK_PRIMARY+2); GstElementFactory *vdpaumpegdec = gst_element_factory_find("vdpaumpegdec");
gst_plugin_feature_set_rank (GST_PLUGIN_FEATURE(vdpaumpegdec), GST_RANK_PRIMARY+2); //4 - create the videoOutputBin
g_signal_connect (G_OBJECT(videoOutputBin), "pad-added", G_CALLBACK (on_pad_added), NULL); g_signal_connect (G_OBJECT(videoOutputBin), "pad-removed", G_CALLBACK (on_pad_removed), NULL);
gst_bin_add_many(GST_BIN(videoOutputBin),vdpauVideoPostProcess,vdpauSink, NULL); gst_element_link(vdpauVideoPostProcess,vdpauSink);
vdpauBinPad = gst_ghost_pad_new("sink",gst_element_get_static_pad(vdpauVideoPostProcess,"sink")); if(!vdpauBinPad) { g_printerr("vdpauBinPad is null\n");
exit -1; } else {
g_print("vpaudBinPad Exist\n"); gst_element_add_pad(videoOutputBin,vdpauBinPad);
} //4 - set playbin properties g_object_set(G_OBJECT(playbin),"video-sink",videoOutputBin,"audio-sink",alsaSink,NULL);
/* Set the pipeline to "playing" state*/ g_print ("Now playing: %s\n", argv[1]);
gst_element_set_state (playbin, GST_STATE_PLAYING); /* Iterate */
g_print ("Running...\n"); g_timeout_add (200, (GSourceFunc) cb_print_position, playbin);
g_main_loop_run (loop); /* Out of the main loop, clean up nicely */
g_print ("Returned, stopping playback\n"); gst_element_set_state (playbin, GST_STATE_NULL);
g_print ("Deleting pipeline\n"); gst_object_unref (GST_OBJECT (playbin));
return 0; } /*************************************************************************************************************************/
As you can see, i have made a GstBin to encapsulate the post process element, and the vdpauSink, and created a ghost pad to vdpauvideopostprocess. but i don't know if it is the right way to do this, because on certain streams, video is displayed, but in other cases, gstreamer pipeline hang... and when it hangs, in the gstreamer debug traces, i have a lot of warnings, and some errors like this :
0:00:04.675157784 669 0x8783ff0 ERROR h264dpb h264/gsth264dpb.c:145:gst_h264_dpb_add:<GstH264DPB@0x8659ea8> Couldn't make room in DPB 0:00:04.696232757 669 0x8783ff0 WARN basetransform gstbasetransform.c:1065:gst_base_transform_acceptcaps_default:<passthrough-identity> transform could not transform video/x-vdpau-video, chroma-type=(int)0, width=(int)1920, height=(int)1080, framerate=(fraction)1710843747/71356439, pixel-aspect-ratio=(fraction)1/1, interlaced=(boolean)false in anything we support
0:00:04.729182133 669 0x8783ff0 WARN basetransform gstbasetransform.c:1065:gst_base_transform_acceptcaps_default:<passthrough-identity> transform could not transform video/x-vdpau-video, chroma-type=(int)0, width=(int)1920, height=(int)1080, framerate=(fraction)1710843747/71356439, pixel-aspect-ratio=(fraction)1/1, interlaced=(boolean)false in anything we support
0:00:04.729352573 669 0x8783ff0 ERROR h264dpb h264/gsth264dpb.c:145:gst_h264_dpb_add:<GstH264DPB@0x8659ea8> Couldn't make room in DPB 0:00:04.749110902 669 0x8783ff0 WARN basetransform gstbasetransform.c:1065:gst_base_transform_acceptcaps_default:<passthrough-identity> transform could not transform video/x-vdpau-video, chroma-type=(int)0, width=(int)1920, height=(int)1080, framerate=(fraction)1710843747/71356439, pixel-aspect-ratio=(fraction)1/1, interlaced=(boolean)false in anything we support
As it works in commandLine, i think i have made some errors, but i don't see them right now. if necessary i can take you some more informations.
Best regards -- Arnaud Tonda
_______________________________________________ gstreamer-devel mailing list [hidden email] http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel |
Hi Arnaud,
I am actually working on a similar application... the purpose is to build a video server that accelerate the decoding phase by taking advantage of GP-GPU capabilities.
My question is : do you integrate your code in the context of the appsrc-stream.c example?
I'll try your code and tell you more if I can... There is no "audiConverter", right? Have you try your C application with a first step example : no Ranking just as your command line example?
" //Sanity check on elements validity
if(!playbin || !vdpauSink || !alsaSink || !audioConverter?? || !vdpauVideoPostProcess || !videoOutputBin)
{
....
} "
Regards.
Jérémy.
_____________
2011/6/30 arnaud tonda <[hidden email]> Hi everybody, _______________________________________________ gstreamer-devel mailing list [hidden email] http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel |
Hi jérémy,
thanks for your reply. concerning the audioConverter element, it's a mistake. i have made a code cleaning, and i have forgotten this peace of code. i haven't tested mine in appsrc-stream.c because i did not see this example code... i will test in this context as soon as possible. i have made different tests, but none have been conclusive.
firstly i have removed the ranking set, but it seems the problem is not on the way. i also tried to remove the add_watch on bus messages, which in certain cases here, cause some violent crashes with double free or corruption traces.
i think the problem is concentrated on the fact i have forgotten some thing on the videoOutputBin creation (synchronization with playbin element or other things), but this is not really clear for me.
another thing that afraid me with the native gstreamer vdpau, is that it seems to don't have any support of VC-1 hardware decoding, but this is another subject. thanks to test this.
Best regards 2011/7/1 Jérémy Lauraire <[hidden email]>
-- Arnaud Tonda téléphone : 06 34 23 57 78 _______________________________________________ gstreamer-devel mailing list [hidden email] http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel |
This post was updated on .
Hi Arnaud,
As I told you in our last mail, I've tried to integrate your implementation in the context of the appsrc-stream.c example. The result is not what I expected : I don't obtain HW acceleration... :/ I've tried the code on mpeg2 and h264 coded video. The video are displayed but CPU is still at work!! Here is the result of GST_DEBUG : it tends to confirm that the problem comes from the link from playbin2 to videoOutputBin element trought the ghost pad..... :/ jeremy@Serveur-$ GST_DEBUG="*:2" ./client /home/jeremy/Bureau/video_mpeg2.mpg 0:00:00.014865328 11122 0x15ff4a0 WARN GST_REGISTRY gstregistry.c:1178:gst_registry_scan_path_level:<registry0> ignoring old plugin /usr/lib/gstreamer-0.10/libgstvalve.so which has been merged into the corelements plugin vdpauBinPad created! Dynamic pad created... (client:11122): GStreamer-CRITICAL **: gst_element_get_static_pad: assertion `GST_IS_ELEMENT (element)' failed (client:11122): GStreamer-CRITICAL **: gst_pad_link_full: assertion `GST_PAD_IS_SRC (srcpad)' failed (client:11122): GStreamer-CRITICAL **: gst_object_unref: assertion `object != NULL' failed Playing... 0:00:00.046855179 11122 0x187fc20 WARN basevideodecoder gstbasevideodecoder.c:319:gst_base_video_decoder_sink_event: new segment: format 3 rate 1 start 0:00:00.660000000 stop 99:99:99.999999999 position 0:00:00.000000000 update 0 0:00:00.072390442 11122 0x1a635d0 ERROR vdpauvideopostprocess gstvdpvideopostprocess.c:575:gst_vdp_vpp_sink_setcaps:<vdpau-video-post-process> Got EMPTY caps from gst_pad_get_allowed_caps 0:00:00.072951449 11122 0x16babd0 WARN basesrc gstbasesrc.c:2582:gst_base_src_loop:<source> error: Erreur interne de flux de données. 0:00:00.072988796 11122 0x16babd0 WARN basesrc gstbasesrc.c:2582:gst_base_src_loop:<source> error: streaming task paused, reason not-negotiated (-4) ** ERROR **: received error aborting... Abandon Here is the code... I let you see if I've made a mistake during translation... which is more than possible! / ***************** CODE developped from appsrc-stream.c example ***********************/ /* GStreamer * * appsrc-stream.c: example for using appsrc in streaming mode. * * Copyright (C) 2008 Wim Taymans <wim.taymans@gmail.com> and Lauraire Jeremy.... :) */ #ifdef HAVE_CONFIG_H #include "config.h" #endif #include <gst/gst.h> #include <stdio.h> #include <string.h> #include <stdlib.h> GST_DEBUG_CATEGORY (appsrc_playbin_debug); #define GST_CAT_DEFAULT appsrc_playbin_debug /* * an example application of using appsrc in streaming push mode. We simply push * buffers into appsrc. The size of the buffers we push can be any size we * choose. * * This example is very close to how one would deal with a streaming webserver * that does not support range requests or does not report the total file size. * * Some optimisations are done so that we don't push too much data. We connect * to the need-data and enough-data signals to start/stop sending buffers. * * Appsrc in streaming mode (the default) does not support seeking so we don't * have to handle any seek callbacks. * * Some formats are able to estimate the duration of the media file based on the * file length (mp3, mpeg,..), others report an unknown length (ogg,..). */ void on_pad_added (GstElement *element, GstPad *pad, gpointer data); typedef struct _App App; struct _App { GstElement *playbin; GstElement *appsrc; GMainLoop *loop; guint sourceid; GMappedFile *file; guint8 *data; gsize length; guint64 offset; }; App s_app; #define CHUNK_SIZE 4096 /* This method is called by the idle GSource in the mainloop. We feed CHUNK_SIZE * bytes into appsrc. * The ide handler is added to the mainloop when appsrc requests us to start * sending data (need-data signal) and is removed when appsrc has enough data * (enough-data signal). */ static gboolean read_data (App * app) { // printf("Reading data... \n"); GstBuffer *buffer; guint len; GstFlowReturn ret; buffer = gst_buffer_new (); if (app->offset >= app->length) { /* we are EOS, send end-of-stream and remove the source */ g_signal_emit_by_name (app->appsrc, "end-of-stream", &ret); return FALSE; } /* read the next chunk */ len = CHUNK_SIZE; if (app->offset + len > app->length) len = app->length - app->offset; GST_BUFFER_DATA (buffer) = app->data + app->offset; GST_BUFFER_SIZE (buffer) = len; GST_DEBUG ("feed buffer %p, offset %" G_GUINT64_FORMAT "-%u", buffer, app->offset, len); g_signal_emit_by_name (app->appsrc, "push-buffer", buffer, &ret); gst_buffer_unref (buffer); if (ret != GST_FLOW_OK) { /* some error, stop sending data */ return FALSE; } app->offset += len; return TRUE; } /* This signal callback is called when appsrc needs data, we add an idle handler * to the mainloop to start pushing data into the appsrc */ static void start_feed (GstElement * playbin, guint size, App * app) { // printf("Feeding... \n"); if (app->sourceid == 0) { GST_DEBUG ("start feeding"); app->sourceid = g_idle_add ((GSourceFunc) read_data, app); } } /* This callback is called when appsrc has enough data and we can stop sending. * We remove the idle handler from the mainloop */ static void stop_feed (GstElement * playbin, App * app) { // printf("Stop feeding... \n"); if (app->sourceid != 0) { GST_DEBUG ("stop feeding"); g_source_remove (app->sourceid); app->sourceid = 0; } } /* this callback is called when playbin2 has constructed a source object to read * from. Since we provided the appsrc:// uri to playbin2, this will be the * appsrc that we must handle. We set up some signals to start and stop pushing * data into appsrc */ static void found_source (GObject * object, GObject * orig, GParamSpec * pspec, App * app) { // printf("Source found... \n"); /* get a handle to the appsrc */ g_object_get (orig, pspec->name, &app->appsrc, NULL); GST_DEBUG ("got appsrc %p", app->appsrc); /* we can set the length in appsrc. This allows some elements to estimate the * total duration of the stream. It's a good idea to set the property when you * can but it's not required. */ g_object_set (app->appsrc, "size", (gint64) app->length, NULL); /* configure the appsrc, we will push data into the appsrc from the * mainloop. */ g_signal_connect (app->appsrc, "need-data", G_CALLBACK (start_feed), app); g_signal_connect (app->appsrc, "enough-data", G_CALLBACK (stop_feed), app); } static gboolean bus_message (GstBus * bus, GstMessage * message, App * app) { GST_DEBUG ("got message %s", gst_message_type_get_name (GST_MESSAGE_TYPE (message))); switch (GST_MESSAGE_TYPE (message)) { case GST_MESSAGE_ERROR: g_error ("received error"); g_main_loop_quit (app->loop); break; case GST_MESSAGE_EOS: g_main_loop_quit (app->loop); break; default: break; } return TRUE; } int main (int argc, char *argv[]) { App *app = &s_app; GError *error = NULL; GstBus *bus; gst_init (&argc, &argv); GST_DEBUG_CATEGORY_INIT (appsrc_playbin_debug, "appsrc-playbin", 0, "appsrc playbin example"); if (argc < 2) { g_print ("usage: %s <filename>\n", argv[0]); return -1; } /* try to open the file as an mmapped file */ app->file = g_mapped_file_new (argv[1], FALSE, &error); if (error) { g_print ("failed to open file: %s\n", error->message); g_error_free (error); return -2; } /* get some vitals, this will be used to read data from the mmapped file and * feed it to appsrc. */ app->length = g_mapped_file_get_length (app->file); app->data = (guint8 *) g_mapped_file_get_contents (app->file); app->offset = 0; /* create a mainloop to get messages and to handle the idle handler that will * feed data to appsrc. */ app->loop = g_main_loop_new (NULL, TRUE); GstElement *playbin, *videoOutputBin, *vdpauSink, *vdpauVideoPostProcess; GstPad *vdpauBinPad; videoOutputBin = gst_bin_new("vdpau-output-chain"); playbin = gst_element_factory_make("playbin2", "play-bin"); vdpauSink = gst_element_factory_make ("autovideosink", "vdpau-sink"); vdpauVideoPostProcess = gst_element_factory_make ("vdpauvideopostprocess", "vdpau-video-post-process"); if(!playbin || !videoOutputBin || !vdpauSink || !vdpauVideoPostProcess) { printf("Error while building elements... \n"); return 0; } /* set to read from appsrc */ //g_object_set (playbin, "uri", "udp://192.168.1.14:10308", NULL); g_object_set (playbin, "uri", "appsrc://", NULL); /* define specific RANK for vdpau decoders * if RANK=0 => no autoplugging by playbin */ GstElementFactory *vdpauh264dec = gst_element_factory_find("vdpauh264dec"); gst_plugin_feature_set_rank (GST_PLUGIN_FEATURE(vdpauh264dec), GST_RANK_PRIMARY+2); // or GST_RANK_MARGINAL GstElementFactory *vdpaumpeg4dec = gst_element_factory_find("vdpaumpeg4dec"); gst_plugin_feature_set_rank (GST_PLUGIN_FEATURE(vdpaumpeg4dec), GST_RANK_PRIMARY+2); GstElementFactory *vdpaumpegdec = gst_element_factory_find("vdpaumpegdec"); gst_plugin_feature_set_rank (GST_PLUGIN_FEATURE(vdpaumpegdec), GST_RANK_PRIMARY+2); /* create the videoOutputBin */ g_signal_connect (G_OBJECT(videoOutputBin), "pad-added", G_CALLBACK (on_pad_added), NULL); g_signal_connect (G_OBJECT(videoOutputBin), "pad-removed", G_CALLBACK (on_pad_removed), NULL); gst_bin_add_many (GST_BIN (videoOutputBin), vdpauVideoPostProcess, vdpauSink, NULL); gst_element_link(vdpauVideoPostProcess, vdpauSink); /* add ghostpad */ vdpauBinPad = gst_element_get_static_pad (vdpauVideoPostProcess, "sink"); if(!vdpauBinPad) { g_printerr ("vdpauBinPad is null! \n"); return -1; } else { g_print ("vdpauBinPad created! \n"); gst_element_add_pad (videoOutputBin, gst_ghost_pad_new ("sink", vdpauBinPad)); } /* set playbin properties */ g_object_set (G_OBJECT(playbin), "video-sink", videoOutputBin, NULL); app->playbin = playbin; g_assert(app->playbin); /* message handler gestion*/ bus = gst_pipeline_get_bus (GST_PIPELINE (app->playbin)); gst_bus_add_watch (bus, (GstBusFunc) bus_message, app); gst_object_unref(bus); /* get notification when the source is created so that we get a handle to it * and can configure it */ g_signal_connect (app->playbin, "deep-notify::source", (GCallback) found_source, app); /* go to playing and wait in a mainloop. */ g_print ("Playing... \n"); gst_element_set_state (app->playbin, GST_STATE_PLAYING); /* this mainloop is stopped when we receive an error or EOS */ g_main_loop_run (app->loop); GST_DEBUG ("stopping"); gst_element_set_state (app->playbin, GST_STATE_NULL); /* free the file */ g_mapped_file_unref (app->file); gst_object_unref (bus); g_main_loop_unref (app->loop); return 0; } /** * \brief on_pad_added */ void on_pad_added (GstElement *element, GstPad *pad, gpointer data) { GstPad *sinkpad; GstElement *decoder = (GstElement *) data; /* We can now link this pad with the vorbis-decoder sink pad */ g_print ("Dynamic pad created, linking demuxer/decoder \n"); sinkpad = gst_element_get_static_pad (decoder, "sink"); gst_pad_link (pad, sinkpad); gst_object_unref (sinkpad); } |
Free forum by Nabble | Edit this page |