Hi I a beginner with GStreamer, I am clueless about of the behavior and role of some GStreamer elements.
I am trying to do a player with a transition-crossfade effect, it works for avi files, but without sound. My piplene was compound by the composition, queue and videosink elements. I was said I need to add an avidemuxer and I did, but doesn't work anymore. I use this example as a base http://lists.freedesktop.org/archives/gstreamer-devel/2008-October/019240.html, but any adaptation didn't work for me. This is the runtime error I got: "GStreamer-CRITICAL **: gst_pad_link_full: assertion `GST_IS_PAD (sinkpad)' failed" it seems it couldn't get the pad between the composition and the avidemuxer, I have been working around with decoders, but there are pad linking errors as well. Thanks for any suggestion, regards Rossana This is the code and the pipeline is below it. const gint dur_crossfade = 500; static GstElement *play = NULL; static GstElement *comp = NULL; static GstElement *op = NULL; static GstElement *video1,*video2; // error handler static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer data) { // code for hadling errors return TRUE; } static void on_pad_added (GstElement *element, GstPad *pad, gpointer data) { GstPad *sinkpad = NULL; GstElement * queue = (GstElement *) data; g_print ("Dynamic pad created, linking queue\n"); sinkpad = gst_element_get_compatible_pad(queue,pad ,gst_pad_get_caps(pad)); gst_pad_link (pad, sinkpad); gst_object_unref (sinkpad); } void on_pad_added2 (GstElement *element, GstPad *pad,GstElement *dec ) { g_debug ("Signal: pad-added"); GstCaps *caps; GstStructure *str; caps = gst_pad_get_caps (pad); g_assert (caps != NULL); str = gst_caps_get_structure (caps, 0); g_assert (str != NULL); if (g_strrstr (gst_structure_get_name (str), "video")) { g_debug ("Linking video pad to dec_vd"); // Link it actually GstPad *targetsink = gst_element_get_pad (dec, "sink"); g_assert (targetsink != NULL); gst_pad_link (pad, targetsink); gst_object_unref (targetsink); } if (g_strrstr (gst_structure_get_name (str), "audio")) { g_debug ("Linking audio pad to dec_ad"); // Link it actually GstPad *targetsink = gst_element_get_pad (dec, "sink"); g_assert (targetsink != NULL); gst_pad_link (pad, targetsink); gst_object_unref (targetsink); } gst_caps_unref (caps); } GstElement * getBin(const gchar * nomBin, GstElement * &alfa1, GstElement *&smpte, GstElement * &color, gint transicion = 1) { GstElement * bin = gst_bin_new(nomBin); if (!bin) { g_printerr ("No se pudo crear el bin. Saliendo\n"); return NULL; } alfa1 = gst_element_factory_make ("alpha","alfa1"); smpte = gst_element_factory_make ("smptealpha","smpte"); color = gst_element_factory_make ("ffmpegcolorspace", "color"); GstElement * mixer = gst_element_factory_make("videomixer", "mixer"); if ((!alfa1) || (!smpte) || (!color) || (!mixer)) { g_printerr ("Alguno de los elementos del Bin no pudo ser creado. Saliendo\n"); return NULL; } // Anexamos al bin gst_bin_add_many(GST_BIN (bin),alfa1,smpte,mixer,color,NULL); gst_element_link (alfa1, mixer); gst_element_link (smpte, mixer); gst_element_link (mixer,color); g_object_set(smpte,"type", transicion, NULL); return bin; } void getAndSetController(GstElement * smpte, gdouble duracion) { GstController * ctrl = NULL; if (!(ctrl = gst_controller_new (G_OBJECT (smpte), "alpha",NULL))) { GST_WARNING ("No puede controlar el elemento fuente\n"); return; } GValue val_double = { 0, }; g_value_init (&val_double, G_TYPE_DOUBLE); GstInterpolationControlSource * csource = gst_interpolation_control_source_new(); gst_controller_set_control_source (ctrl, "alpha", GST_CONTROL_SOURCE (csource)); gst_interpolation_control_source_set_interpolation_mode(csource,GST_INTERPOLATE_LINEAR); // Seteo primer valor g_value_set_double(&val_double, 0.0); gst_interpolation_control_source_set(csource,(0 * GST_MSECOND),&val_double); // Seteo segundo valor g_value_set_double (&val_double, duracion); gst_interpolation_control_source_set(csource,(1 * GST_MSECOND),&val_double); g_object_unref (csource); } void addGhostPadsToBin(GstElement *alfa1, GstElement * smpte, GstElement * color, GstElement* bin) { /* add ghostpad */ GstPad * pad1 = gst_element_get_static_pad (alfa1, "sink"); gst_element_add_pad(bin, gst_ghost_pad_new("alfasink1", pad1)); gst_object_unref (GST_OBJECT (pad1)); GstPad * pad2 = gst_element_get_static_pad (smpte, "sink"); gst_element_add_pad(bin, gst_ghost_pad_new("alfasink2", pad2)); gst_object_unref(GST_OBJECT(pad2)); GstPad * pad3 = gst_element_get_static_pad (color, "src"); gst_element_add_pad(bin, gst_ghost_pad_new("colorsrc", pad3)); gst_object_unref(GST_OBJECT(pad3)); } void crossTransicion(gdouble duracion, GstElement * & bin,gint transicion = 1) { // devuelve el bin GstElement * alfa1, *smpte, *color; alfa1 = 0; smpte = 0; color = 0; bin = getBin("bin",alfa1, smpte,color,transicion); // Crea el bin y los elementos getAndSetController(smpte,duracion); addGhostPadsToBin(alfa1, smpte, color, bin); } GstElement * crearVideo(const gchar *nomVideo, GstElement *&comp) { GstElement* video = NULL; if ((video = gst_element_factory_make("gnlfilesource", nomVideo)) == NULL) { printf ("\n Falló la creacion del gnlfilesource \n"); return NULL; } if (gst_bin_add (GST_BIN (comp), video) == FALSE) { printf ("\n No pudo agregar video %s a comp \n", nomVideo); return NULL; } return video; } void crearBinyCrossfade(GstElement *&op, GstElement *&bin2) { crossTransicion(dur_crossfade, bin2,1); op = gst_element_factory_make("gnloperation", "op"); if (gst_bin_add (GST_BIN (op), bin2) == FALSE) { printf ("\n No pudo agregar el bin a la gnloperacion op \n"); return; } // comp es Global if (gst_bin_add (GST_BIN (comp), op) == FALSE) { printf ("\n No pudo agregar la gnloperacion a la gnlcomposition \n"); return; } } void crearCompYoper(GstElement*& compo, GstElement *&op, GstElement*& bin2) { if ((compo = gst_element_factory_make("gnlcomposition", "micomposicion")) == NULL) { printf ("\n Fallo al crear gnlcomposition \n"); return; } //Aqui se crea el bin crossTransicion(dur_crossfade, bin2,1); op = gst_element_factory_make("gnloperation", "op"); if (gst_bin_add (GST_BIN (op), bin2) == FALSE) { printf ("\n No pudo agregar el bin a la gnloperacion op \n"); return; } if (gst_bin_add (GST_BIN (compo), op) == FALSE) { printf ("\n No pudo agregar la gnloperacion a la gnlcomposition \n"); return; } } /// Aqui pipeline3 GstElement * getPipeline3(gchar *argv[],GstElement *&video1, gint dur1, GstElement *& video2,GstElement *& comp, GstElement *&op) { queueA, *sinkV,*sinkA, *decA, *decV, *demux, *pipeline, *aconvert,*bin; crearCompYoper(comp, op,bin); video1 = crearVideo("video1",comp); video2 = crearVideo("video2",comp); g_object_set (video1, "location", argv[1], NULL); g_object_set(video1, "uri", argv[1],NULL); g_object_set (video1, "start", 0 * GST_MSECOND, NULL); g_object_set (video1, "duration", dur1 * GST_MSECOND, NULL); g_object_set (video1, "media-start", 0* GST_MSECOND, NULL); g_object_set (video1, "media-duration", dur1 * GST_MSECOND, NULL); g_object_set (video1, "priority", 1,NULL); // setup the backend viewer queueA = gst_element_factory_make("queue", "queueA"); queueV = gst_element_factory_make("queue", "queueV"); sinkV = gst_element_factory_make("autovideosink", "sink1"); sinkA = gst_element_factory_make("alsasink", "sink2"); demux = gst_element_factory_make("avidemux", "demux"); pipeline = gst_pipeline_new ("video-player"); /* Agrego elementos al pipeline */ gst_bin_add_many (GST_BIN (pipeline),comp, demux, queueV, sinkV, queueA, sinkA, decA, NULL); g_signal_connect (comp, "pad-added", G_CALLBACK (on_pad_added),demux); g_signal_connect (demux, "pad-added", G_CALLBACK (on_pad_added2),queueA); g_signal_connect (demux, "pad-added", G_CALLBACK (on_pad_added2),queueV); gst_element_link (comp, demux); gst_element_link (queueV, sinkV); gst_element_link (queueA, sinkA); return pipeline; } void playSigVideo(gchar *argv[], GstElement *& video1, gint dur1,GstElement *& video2, gint dur2,GstElement *&op, GstElement *pip) { g_object_set (op,"start", (dur1 - dur_crossfade) * GST_MSECOND,NULL); g_object_set (op,"duration", dur_crossfade * GST_MSECOND,NULL); g_object_set (op,"media-start", 0 * GST_MSECOND,NULL); g_object_set(op,"media-duration", dur_crossfade * GST_MSECOND,NULL); g_object_set(op,"priority",0,NULL); g_object_set (video2, "location", argv[2], NULL); g_object_set (video2,"uri",argv[2],NULL); g_object_set (video2, "start", (dur1 - dur_crossfade) * GST_MSECOND, NULL); g_object_set (video2, "duration", dur2 * GST_MSECOND, NULL); g_object_set (video2, "media-start", 0 * GST_MSECOND, NULL); g_object_set (video2, "media-duration", dur2 * GST_MSECOND, NULL); g_object_set (video2, "priority", 2,NULL); //g_object_set(video2,"caps",gst_caps_from_string("video/x-raw-yuv;video/x-raw-rgb"),NULL); //g_object_set(video2,"caps",gst_caps_from_string("audio/x-dv"),NULL); } void startPlay(GstElement * pip) { /* Set the pipeline to "playing" state*/ gst_element_set_state (pip, GST_STATE_PLAYING); } int main(gint argc, gchar *argv[]) { gint dur1 = 9000; // d uration (in ms) to play of first clip gint dur2 = 8000; // duration (in ms) to play of second clip GMainLoop *loop = NULL; /* init GStreamer */ gst_init (&argc, &argv); gst_controller_init (&argc, &argv); loop = g_main_loop_new (NULL, FALSE); /* chequeamos sintaxis */ if (argc != 3) { g_print ("Uso: %s <URI1> <URI2>\n", argv[0]); return -1; } play = getPipeline3(argv,video1, dur1,video2,comp, op); playSigVideo(argv,video1,dur1,video2,dur2,op,play); GstBus *bus2 = gst_pipeline_get_bus (GST_PIPELINE (play)); gst_bus_add_watch (bus2, bus_call, loop); gst_object_unref (bus2); cout << "...PLAY" << endl; startPlay(play); /* now run */ g_main_loop_run (loop); /* also clean up */ gst_element_set_state (play, GST_STATE_NULL); gst_object_unref (GST_OBJECT (play)); return 0; } bin ----------------------------------------------------------------------------------------------- alpha -------------------- |----------videomixer ------------fmpegcolorspace smptealpha ---------- ^ | ----------------------------------------------------------------------------------------------- | Controller // it controls the start-duration property of smptealpha composition -------------------------------------------------------------------------------------------------------- gnlfilesource | gnloperation // contains the *bin* described above | gnlfilesource -------------------------------------------------------------------------------------------------------- gnlfilesource is for each video pipeline ____________________ | | composition operation |___________ avidemux ____________ queueV ___________ sinkV ___________________ | ____________ queueA ___________ sinkA _______________________________________________ gstreamer-devel mailing list [hidden email] http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel |
Free forum by Nabble | Edit this page |