I am trying to link audio and video queue's using rtspsrc element property name. The pipeline is:
gst-launch-1.0 rtspsrc location="rtsp://<file path>" latency=0 name=demux demux. ! queue ! rtpmp4gdepay ! aacparse ! avdec_aac ! audioconvert ! audioresample ! autoaudiosink demux. ! queue ! rtph264depay ! h264parse ! omxh264dec ! videoconvert ! videoscale ! video/x-raw,width=176, height=144 ! ximagesink I could create the value of name element using g_object_set(source, "name", "demux", NULL); But I am not able to link audio and video queues hence create. Following is the part of code: audio bin audio = gst_bin_new ("audiobin"); audioQueue = gst_element_factory_make ("queue", "audio-queue"); audioDepay = gst_element_factory_make ("rtpmp4gdepay", "audio-depayer"); audioParse = gst_element_factory_make ("aacparse", "audio-parser"); audioDecode = gst_element_factory_make ("avdec_aac", "audio-decoder"); audioConvert = gst_element_factory_make ("audioconvert", "aconv"); audioResample = gst_element_factory_make ("audioresample", "audio-resample"); audioSink = gst_element_factory_make ("autoaudiosink", "audiosink"); video bin video = gst_bin_new ("videobin"); videoQueue = gst_element_factory_make ("queue", "video-queue"); videoDepay= gst_element_factory_make ("rtph264depay", "video-depayer"); videoParser = gst_element_factory_make ("h264parse", "video-parser"); videoDecode = gst_element_factory_make ("omxh264dec", "video-decoder"); videoConvert = gst_element_factory_make("videoconvert", "convert"); videoScale = gst_element_factory_make("videoscale", "video-scale"); videoSink = gst_element_factory_make("ximagesink", "video-sink"); capsFilter = gst_caps_new_simple("video/x-raw", "width", G_TYPE_INT, 176, "height", G_TYPE_INT, 144, NULL); Linking procedure /*Linking filter element to videoScale and videoSink */ link_ok = gst_element_link_filtered(videoScale,videoSink, capsFilter); gst_caps_unref (capsFilter); if (!link_ok) { g_warning ("Failed to link element1 and element2!"); } /* Linking video elements internally */ if (!gst_element_link_many(videoQueue, videoDepay, videoParser, videoDecode, videoConvert, NULL)) { g_printerr("Cannot link videoDepay and videoParser \n"); return 0; } if (!gst_element_link_many(audioQueue, audioDepay, audioParse, audioDecode, audioConvert, audioResample, audioSink, NULL)) { g_printerr("Cannot link audioDepay and audioParse \n"); return 0; } Help is highly appreciated |
On Mon, 2017-01-23 at 03:50 -0800, rajvik wrote:
Hi, Could you clarify what fails exactly? It's not quite clear to me from your comments. Things to check: - did you gst_bin_add() the elements to the bin before linking? (although this would not cause linking failure) - are you handling rtspsrc's "pad-added" signal? (when created, rtspsrc does not have any pads yet, so you can only link it later once it knows what streams it will output and once it has added pads for each stream). Cheers -Tim -- Tim Müller, Centricular Ltd - http://www.centricular.com _______________________________________________ gstreamer-devel mailing list [hidden email] https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel |
In reply to this post by rajvik
Hi,
rtspsrc emits the signal "pad-added" twice. Once for audio and once for video. We can use a g_signal_connect() to catch the pad and link it to our video and audio queues seperately in a callback function. E.g.: //Call back function to link the pads from rtspsrc Void *sig_call_back(GstElement * element, GstPad *pad, char *data) { //Check for type of pad using pad //If audio type pad, link with audioQueue //If video type pad, link with videoQueue //Declaring the audioQueue and videoQueue as global would be easier way or use 'data' variable as pointer sent by g_singal_connect } main(){ ... ... rtsp_source= gst_element_ .... .... *Linking procedure* g_signal_connect(rtsp_source," ..... ... } On 23-Jan-2017 5:35 PM, "rajvik" <[hidden email]> wrote: I am trying to link audio and video queue's using rtspsrc element property _______________________________________________ gstreamer-devel mailing list [hidden email] https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel |
Thanks Tarun for the response.
I get the idea of linking using g_signal_connect. But I am still wondering how shall I use name element with the same. The following code follows: Here I create rtspsrc element: source = gst_element_factory_make ("rtspsrc", "rtsp-source"); g_object_set(source, "location", "rtsp://192.168.3.30:8554/rajvi", NULL); g_object_set(source, "latency", 0, NULL); g_object_set(source, "name", "demux", NULL) Here I link the call the demux0 the element to link it to the audio queue: audio = gst_bin_get_by_name(GST_BIN(pipeline), "demux0"); Here I link all the elements with audio bin: gst_bin_add_many(GST_BIN(audio), audioDepay, audioParse, audioDecode,audioConvert, audioResample, audioSink, NULL); if (!gst_element_link_many(audioQueue, audioDepay, audioParse, audioDecode, audioConvert, audioResample, audioSink, NULL)) { g_printerr("Cannot link audioDepay and audioParse \n"); return 0; } Same is applicable to video queue as well. Question is how shall I use signal connect? And with which element shall I use signal connect with audioQueue or any other element? |
In reply to this post by Tim Müller
Hi Tim thanks for the response.
1. I have added all the elements using gst_bin_add_many: gst_bin_add_many(GST_BIN(audio), audioDepay, audioParse, audioDecode,audioConvert, audioResample, audioSink, NULL); 2. I haven't used the pad-added signal, because I am confused as to how shall I create a cb function, as I am already using rtspsrc name element. Following the code snippet for audio and video queues: /*audio bin*/ audio = gst_bin_get_by_name(GST_BIN(pipeline), "demux0"); audioQueue = gst_element_factory_make ("queuea", "queuea"); audioDepay = gst_element_factory_make ("rtpmp4gdepay", "audio-depayer"); audioParse = gst_element_factory_make ("aacparse", "audio-parser"); audioDecode = gst_element_factory_make ("avdec_aac", "audio-decoder"); audioConvert = gst_element_factory_make ("audioconvert", "aconv"); audioResample = gst_element_factory_make ("audioresample", "audio-resample"); audioSink = gst_element_factory_make ("autoaudiosink", "audiosink"); Failing here saying audio bin is NULL if (!audio || !audioDepay || !audioParse || !audioConvert || !audioResample || !audioSink) { g_printerr("Cannot create audio elements \n"); return 0; } gst_bin_add_many(GST_BIN(audio), audioDepay, audioParse, audioDecode,audioConvert, audioResample, audioSink, NULL); if (!gst_element_link_many(audioQueue, audioDepay, audioParse, audioDecode, audioConvert, audioResample, audioSink, NULL)) { g_printerr("Cannot link audioDepay and audioParse \n"); return 0; } /*Video Bin */ video = gst_bin_get_by_name(GST_BIN(pipeline), "demux.");; videoQueue = gst_element_factory_make ("queuev", "queuev"); videoDepay= gst_element_factory_make ("rtph264depay", "video-depayer"); videoParser = gst_element_factory_make ("h264parse", "video-parser"); videoDecode = gst_element_factory_make ("omxh264dec", "video-decoder"); videoConvert = gst_element_factory_make("videoconvert", "convert"); videoScale = gst_element_factory_make("videoscale", "video-scale"); videoSink = gst_element_factory_make("ximagesink", "video-sink"); capsFilter = gst_caps_new_simple("video/x-raw", "width", G_TYPE_INT, 176, "height", G_TYPE_INT, 144, NULL); if (!video || !videoQueue || !videoDepay || !videoParser || !videoDecode || !videoConvert || !videoScale || !videoSink || !capsFilter) { g_printerr("Cannot create video elements \n"); return 0; } /*Adding video elements to video bin */ gst_bin_add_many(GST_BIN(video),videoQueue, videoDepay, videoParser, videoDecode, videoConvert, videoScale, videoSink, NULL); /*Linking filter element to videoScale and videoSink */ link_ok = gst_element_link_filtered(videoScale,videoSink, capsFilter); gst_caps_unref (capsFilter); if (!link_ok) { g_warning ("Failed to link element1 and element2!"); } /* Linking video elements internally */ if (!gst_element_link_many(videoQueue, videoDepay, videoParser, videoDecode, videoConvert, NULL)) { g_printerr("Cannot link videoDepay and videoParser \n"); return 0; } |
In reply to this post by rajvik
Thanks Tarun for the response. g_object_set(source, "name", "demux", NULL) I don't think there is the property "name" for the rtspsrc element. Moreover we don't have to use "demux ." while building a gstreamer pipeline through the program. I believe we need it only when pass pipeline to gst-launch command.
typical usage is: g_signal_connect(source, "pad-added", G_CALLBACK(user_function), data_ptr); usage of pad-added call back function is: void user_function (GstElement* object, GstPad* pad, gpointer user_data); And with which element shall I 'user_function' is the name of the callback function you define for linking the pads with the "source". source is the variable for rtpssrc you have defined above 'data_ptr' can be NULL or you can send the audioQueue and videoQueue as pointer, up to you how you want to use it. void user_function(GstElement* object, GstPad* pad, gpointer user_data) { //read the 'pad' and convert it to string //gst_pad_get_current_caps(pad) //gst_caps_get_structure() //gst_structure_get_name() //gst_caps_to_string() //if the above string contain "audio" // dynamic_pad = gst_element_get_static_pad(audioQueue, "sink"); // gst_pad_link (pad, dynamic_pad); //if the above string contain "video" // dynamic_pad = gst_element_get_static_pad(videoQueue, "sink"); // gst_pad_link (pad, dynamic_pad); //note: audioQueue and videoQueue are same pointers as you have defined in your program. It is good to have these as global pointers } On Tue, Jan 24, 2017 at 11:37 AM, rajvik <[hidden email]> wrote: Thanks Tarun for the response. _______________________________________________ gstreamer-devel mailing list [hidden email] https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel |
Thank you so much for the insights. I did incorporate what you asked me to, but still I am getting error:
0:00:00.602211533 2055 0x188380 WARN basesrc gstbasesrc.c:2933:gst_base_src_loop:<udpsrc3> error: Internal data flow error. 0:00:00.618291500 2055 0x188380 WARN basesrc gstbasesrc.c:2933:gst_base_src_loop:<udpsrc3> error: streaming task paused, reason not-linked (-1) 0:00:00.638866649 2055 0x188430 WARN basesrc gstbasesrc.c:2933:gst_base_src_loop:<udpsrc0> error: Internal data flow error. 0:00:00.655050073 2055 0x188430 WARN basesrc gstbasesrc.c:2933:gst_base_src_loop:<udpsrc0> error: streaming task paused, reason not-linked (-1) Please find the below code snippet: Taken audioQueue and VideoQueue as global variables and find the below cb function: static void onPadAdded(GstElement *element, GstPad *pad, gpointer data) { GstCaps *caps; const char *name; char *capsName; caps = gst_pad_get_current_caps(pad); GstStructure *str = gst_caps_get_structure(caps, 0); name = gst_structure_get_name(str); g_debug("name of caps struct string: %s", name); capsName = gst_caps_to_string(caps); g_debug("name of caps string: %s", capsName); if (g_str_has_prefix(capsName,"audio")) { GstPad *dynamic_pad = gst_element_get_static_pad(audioQueue, "sink"); gst_pad_link(pad, dynamic_pad); } else if (g_str_has_prefix(capsName, "video")) { GstPad *video_dynamic_pad = gst_element_get_static_pad(videoQueue, "sink"); gst_pad_link(pad, video_dynamic_pad); } g_free(capsName); } Following is the g_signal_connect operation: gst_bin_add(GST_BIN(pipeline), source); gst_bin_add_many(GST_BIN(pipeline), audioQueue, audioDepay, audioParse, audioDecode, audioConvert, audioResample, audioSink, videoQueue, videoDepay, videoParser, videoDecode, videoConvert, videoScale, videoSink, NULL); if (!gst_element_link_many(audioQueue, audioDepay, NULL )) { g_printerr("Cannot link audioqueue and audioDepay \n"); return 0; } if (!gst_element_link_many(audioParse, audioDecode,NULL )) { g_printerr("Cannot link audioParse and audiodecode \n"); return 0; } if (!gst_element_link_many(audioConvert, audioResample, audioSink, NULL )) { g_printerr("Cannot link audioConvert, audioParse, audioSink \n"); return 0; } /*Linking filter element to videoScale and videoSink */ link_ok = gst_element_link_filtered(videoScale,videoSink, capsFilter); gst_caps_unref (capsFilter); if (!link_ok) { g_warning ("Failed to link element1 and element2!"); } /* Linking video elements internally */ if (!gst_element_link_many(videoQueue, videoDepay, NULL)) { g_printerr("Cannot link videoQueue and videoDepay \n"); return 0; } if (!gst_element_link_many( videoParser, videoDecode, videoConvert, NULL)) { g_printerr("Cannot link videoParser, videoDecode, videoConvert \n"); return 0; } g_signal_connect(source, "pad-added", G_CALLBACK(onPadAdded), NULL); |
I am not sure, but can you try without having the queues? Can you please see how it works when link the pads with audioDepay and videoDepay directly? Tarun On 24-Jan-2017 4:31 PM, "rajvik" <[hidden email]> wrote: Thank you so much for the insights. I did incorporate what you asked me to, _______________________________________________ gstreamer-devel mailing list [hidden email] https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel |
The problem is it is not getting into the if else statement. if (g_str_has_prefix(capsName,"audio")) { g_print("Here 6th .....\n"); GstPad *dynamic_pad = gst_element_get_static_pad(audioQueue, "sink"); gst_pad_link(pad, dynamic_pad); } else if (g_str_has_prefix(capsName, "video")) { g_print("Here 7th .....\n"); GstPad *video_dynamic_pad = gst_element_get_static_pad(videoQueue, "sink"); gst_pad_link(pad, video_dynamic_pad); } g_free(capsName); Reason being the prefix of capsName is : name of caps string: application/x-rtp, media=(string)audio, payload=(int)96, clock-rate=(int)44100, encoding-name=(string)MPEG4-GENERIC, encoding-params=(string)2, streamtype=(string)5, profile-level-id=(string)15, m ode=(string)AAC-hbr, config=(string)1210, sizelength=(string)13, indexlength=(string)3, indexdeltalength=(string)3, profile=(string)1, a-tool=(string)"vlc\ 2.1.6", a-recvonly=(string)"", a-type=(string)broadcast, a-ch arset=(string)UTF-8, ssrc=(uint)398000458, clock-base=(uint)687408962, seqnum-base=(uint)15402, npt-start=(guint64)4219024000, play-speed=(double)1, play-scale=(double)1 Any API which would do a string compare for 1 word from the whole string? On Tue, Jan 24, 2017 at 5:14 PM, Tarun Tej K [via GStreamer-devel] <[hidden email]> wrote:
|
Try g_strstr instead of g_str_has_prefix On 24-Jan-2017 6:16 PM, "rajvik" <[hidden email]> wrote:
_______________________________________________ gstreamer-devel mailing list [hidden email] https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel |
It gets into the ifelse condition. Still facing error: name of caps string: application/x-rtp, media=(string)video, payload=(int)96, clock-rate=(int)90000, encoding-name=(string)H264, packetization-mode=(string)1, profile-level-id=(string)42801e, sprop-parameter-sets=(str ing)"Z0KAHpZWDY/yf/gACAAKhAAAD6QAA6mDgAAC3GAAFuNvxjg7QsXc\,aMqNSA\=\=", a-tool=(string)"vlc\ 2.1.6", a-recvonly=(string)"", a-type=(string)broadcast, a-charset=(string)UTF-8, ssrc=(uint)3197239660, clock-base=(uint)22 81054233, seqnum-base=(uint)15975, npt-start=(guint64)7813930284000, play-speed=(double)1, play-scale=(double)1 name of caps struct string: application/x-rtp name of caps string: application/x-rtp, media=(string)audio, payload=(int)96, clock-rate=(int)44100, encoding-name=(string)MPEG4-GENERIC, encoding-params=(string)2, streamtype=(string)5, profile-level-id=(string)15, m ode=(string)AAC-hbr, config=(string)1210, sizelength=(string)13, indexlength=(string)3, indexdeltalength=(string)3, profile=(string)1, a-tool=(string)"vlc\ 2.1.6", a-recvonly=(string)"", a-type=(string)broadcast, a-ch arset=(string)UTF-8, ssrc=(uint)358833443, clock-base=(uint)1031817228, seqnum-base=(uint)16576, npt-start=(guint64)7813930284000, play-speed=(double)1, play-scale=(double)1 0:00:00.795704054 2728 0x188430 WARN basesrc gstbasesrc.c:2933:gst_base_src_loop:<udpsrc1> error: Internal data flow error. 0:00:00.812515845 2728 0x188430 WARN basesrc gstbasesrc.c:2933:gst_base_src_loop:<udpsrc1> error: streaming task paused, reason not-linked (-1) Any idea as to how to move forward with this? Code snippet: GstElement *audioQueue, *videoQueue; GstElement *source, *audio, *video, *convert, *pipeline, *audioDepay, *audioParse, *audioDecode, *audioConvert, *audioResample, *audioSink, *videoDepay, *videoParser, *videoDecode, *videoConvert, *videoScale, *videoSink; static void onPadAdded(GstElement *element, GstPad *pad, gpointer data) { GstCaps *caps; const char *name; char *capsName; caps = gst_pad_get_current_caps(pad); GstStructure *str = gst_caps_get_structure(caps, 0); name = gst_structure_get_name(str); g_print("name of caps struct string: %s \n", name); capsName = gst_caps_to_string(caps); g_print("name of caps string: %s \n", capsName); if (g_strrstr(capsName,"audio")) { GstPad *dynamic_pad = gst_element_get_static_pad(audioQueue, "sink"); gst_pad_link(pad, dynamic_pad); } else if (g_strrstr(capsName, "video")) { GstPad *video_dynamic_pad = gst_element_get_static_pad(videoQueue, "sink"); gst_pad_link(pad, video_dynamic_pad); } g_free(capsName); } int main(int argc, char *argv[]) { GstCaps *capsFilter; GstBus *bus; GstMessage *msg; GstPad *pad; gboolean link_ok; GstStateChangeReturn ret; /* Initialize GStreamer */ gst_init (&argc, &argv); /* Create Elements */ pipeline = gst_pipeline_new("rtsp-pipeline"); source = gst_element_factory_make ("rtspsrc", "rtsp-source"); g_object_set(source, "location", "rtsp://192.168.3.30:8554/rajvi", NULL); g_object_set(source, "latency", 0, NULL); audioQueue = gst_element_factory_make ("queue", "audio-queue"); audioDepay = gst_element_factory_make ("rtpmp4gdepay", "audio-depayer"); audioParse = gst_element_factory_make ("aacparse", "audio-parser"); audioDecode = gst_element_factory_make ("avdec_aac", "audio-decoder"); audioConvert = gst_element_factory_make ("audioconvert", "aconv"); audioResample = gst_element_factory_make ("audioresample", "audio-resample"); audioSink = gst_element_factory_make ("autoaudiosink", "audiosink"); if (!audioQueue || !audioDepay || !audioParse || !audioConvert || !audioResample || !audioSink) { g_printerr("Cannot create audio elements \n"); return 0; } videoQueue = gst_element_factory_make ("queue", "video-queue"); videoDepay= gst_element_factory_make ("rtph264depay", "video-depay"); videoParser = gst_element_factory_make ("h264parse", "video-parser"); videoDecode = gst_element_factory_make ("omxh264dec", "video-decoder"); videoConvert = gst_element_factory_make("videoconvert", "convert"); videoScale = gst_element_factory_make("videoscale", "video-scale"); videoSink = gst_element_factory_make("ximagesink", "video-sink"); capsFilter = gst_caps_new_simple("video/x-raw", "width", G_TYPE_INT, 176, "height", G_TYPE_INT, 144, NULL); if (!videoQueue || !videoDepay || !videoParser || !videoDecode || !videoConvert || !videoScale || !videoSink || !capsFilter) { g_printerr("Cannot create video elements \n"); return 0; } gst_bin_add(GST_BIN(pipeline), source); gst_bin_add_many(GST_BIN(pipeline), audioQueue, audioDepay, audioParse, audioDecode, audioConvert, audioResample, audioSink, videoQueue, videoDepay, videoParser, videoDecode, videoConvert, videoScale, videoSink, NULL); if (!gst_element_link_many(audioQueue, audioDepay, NULL )) { g_printerr("Cannot link audioqueue and audioDepay \n"); return 0; } if (!gst_element_link_many(audioParse, audioDecode, NULL )) { g_printerr("Cannot link audioParse and audiodecode \n"); return 0; } if (!gst_element_link_many( audioConvert, audioResample, audioSink, NULL )) { g_printerr("Cannot link audioConvert, audioParse, audioSink \n"); return 0; } /*Linking filter element to videoScale and videoSink */ link_ok = gst_element_link_filtered(videoScale,videoSink, capsFilter); gst_caps_unref (capsFilter); if (!link_ok) { g_warning ("Failed to link element1 and element2!"); return 0; } /* Linking video elements internally */ if (!gst_element_link_many(videoQueue, videoDepay, NULL)) { g_printerr("Cannot link videoQueue and videoDepay \n"); return 0; } if (!gst_element_link_many(videoParser, videoDecode, videoConvert, NULL)) { g_printerr("Cannot link videoParser, videoDecode, videoConvert \n"); return 0; } g_signal_connect(source, "pad-added", G_CALLBACK(onPadAdded), NULL); /* Start playing */ gst_element_set_state ( pipeline, GST_STATE_PLAYING); /* Wait until error or EOS */ bus = gst_element_get_bus (pipeline); msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS); /* Free resources */ if (msg != NULL) gst_message_unref (msg); gst_object_unref (bus); gst_element_set_state (pipeline, GST_STATE_NULL); gst_object_unref (pipeline); return 0; } On Tue, Jan 24, 2017 at 6:29 PM, Tarun Tej K [via GStreamer-devel] <[hidden email]> wrote:
|
I may be way off course here - but when you are notified that a pad has
been added, don't you want to find the name of the PAD, not the name of the CAPS? (See https://gstreamer.freedesktop.org/documentation/application-development/basics/pads.html) Ian On 24/01/2017 14:45, rajvik wrote: > SNIP SNIP > Any idea as to how to move forward with this? > Code snippet: > > GstElement *audioQueue, *videoQueue; > GstElement *source, *audio, *video, *convert, *pipeline, *audioDepay, > *audioParse, *audioDecode, *audioConvert, *audioResample, > *audioSink, *videoDepay, *videoParser, *videoDecode, *videoConvert, > *videoScale, *videoSink; > > static void onPadAdded(GstElement *element, GstPad *pad, gpointer data) > { > GstCaps *caps; > const char *name; > char *capsName; > caps = gst_pad_get_current_caps(pad); > GstStructure *str = gst_caps_get_structure(caps, 0); > name = gst_structure_get_name(str); > g_print("name of caps struct string: %s \n", name); > capsName = gst_caps_to_string(caps); > g_print("name of caps string: %s \n", capsName); > if (g_strrstr(capsName,"audio")) > { > GstPad *dynamic_pad = > gst_element_get_static_pad(audioQueue, "sink"); > gst_pad_link(pad, dynamic_pad); > } > > else if (g_strrstr(capsName, "video")) > { > GstPad *video_dynamic_pad = > gst_element_get_static_pad(videoQueue, "sink"); > gst_pad_link(pad, video_dynamic_pad); > } > g_free(capsName); > > _______________________________________________ gstreamer-devel mailing list [hidden email] https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel |
When I try to get the name of the pad, it says EMPTY. I do not know how do we find that a pad has been created or not. Rajvi On Tue, Jan 24, 2017 at 10:45 PM, Ian Davidson [via GStreamer-devel] <[hidden email]> wrote: I may be way off course here - but when you are notified that a pad has |
The pad gets created and that is what you see after the 'name of caps string' in your debug info.
In the code snippet you've linked audioQueue and videoQueue with the pads. Have you tried linking the respective pads with audioDepay and videoDepay directly without using the queues at all? Since you're not reading the audio and video into different sinks (audiosink and ximagesink), we don't need the queue anymore. Us e the pads of rtspsrc to link with the respective depayloders. Tarun On 25-Jan-2017 9:33 AM, "rajvik" <[hidden email]> wrote:
_______________________________________________ gstreamer-devel mailing list [hidden email] https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel |
Getting same error even if linking with audio and video depay error logs: 0:00:00.526193431 2163 0xb440e180 WARN rtpjitterbuffer rtpjitterbuffer.c:185:rtp_jitter_buffer_set_clock_rate: Clock rate changed from 0 to 90000 0:00:00.552156505 2163 0xb440e230 WARN rtpjitterbuffer rtpjitterbuffer.c:185:rtp_jitter_buffer_set_clock_rate: Clock rate changed from 0 to 44100 name of caps struct string: application/x-rtp name of caps string: application/x-rtp, media=(string)video, payload=(int)96, clock-rate=(int)90000, encoding-name=(string)H264, packetization-mode=(string)1, profile-level-id=(string)42801e, sprop-parameter-sets=(str ing)"Z0KAHpZWDY/yf/gACAAKhAAAD6QAA6mDgAAC3GAAFuNvxjg7QsXc\,aMqNSA\=\=", a-tool=(string)"vlc\ 2.1.6", a-recvonly=(string)"", a-type=(string)broadcast, a-charset=(string)UTF-8, ssrc=(uint)3625393740, clock-base=(uint)27 09244179, seqnum-base=(uint)46478, npt-start=(guint64)60293455191000, play-speed=(double)1, play-scale=(double)1 name of caps struct string: application/x-rtp name of caps string: application/x-rtp, media=(string)audio, payload=(int)96, clock-rate=(int)44100, encoding-name=(string)MPEG4-GENERIC, encoding-params=(string)2, streamtype=(string)5, profile-level-id=(string)15, m ode=(string)AAC-hbr, config=(string)1210, sizelength=(string)13, indexlength=(string)3, indexdeltalength=(string)3, profile=(string)1, a-tool=(string)"vlc\ 2.1.6", a-recvonly=(string)"", a-type=(string)broadcast, a-ch arset=(string)UTF-8, ssrc=(uint)3051532542, clock-base=(uint)3346164277, seqnum-base=(uint)2401, npt-start=(guint64)60293455191000, play-speed=(double)1, play-scale=(double)1 here .....2 here .....1 0:00:00.714840440 2163 0xb440e180 WARN basesrc gstbasesrc.c:2933:gst_base_src_loop:<udpsrc4> error: Internal data flow error. 0:00:00.733348867 2163 0xb440e180 WARN basesrc gstbasesrc.c:2933:gst_base_src_loop:<udpsrc4> error: streaming task paused, reason not-linked (-1) ^C code snippet: static void onPadAdded(GstElement *element, GstPad *pad, gpointer data) { #if 1 GstCaps *caps; const char *name; char *capsName; caps = gst_pad_get_current_caps(pad); GstStructure *str = gst_caps_get_structure(caps, 0); name = gst_structure_get_name(str); g_print("name of caps struct string: %s \n", name); capsName = gst_caps_to_string(caps); g_print("name of caps string: %s \n", capsName); #endif if (g_strrstr(capsName,"audio")) { g_print("here .....1 \n"); GstPad *dynamic_pad = gst_element_get_static_pad(audioDepay, "sink"); gst_pad_link(pad, dynamic_pad); } else if (g_strrstr(capsName, "video")) { g_print("here .....2 \n"); GstPad *video_dynamic_pad= gst_element_get_static_pad(videoDepay, "sink"); gst_pad_link(pad, video_dynamic_pad); } g_free(capsName); } On Wed, Jan 25, 2017 at 10:30 AM, Tarun Tej K [via GStreamer-devel] <[hidden email]> wrote:
|
Ok, as you suspected may be the pad is not really getting created. How about not linking if the dynamic pad is NULL. See below: if (g_strrstr(capsName,"audio")) { GstPad *dynamic_pad = gst_element_get_static_pad( g_assert(dynamic_pad!=NULL); g_print("pad created .....1 \n"); gst_pad_link(pad, dynamic_pad); gst_object_unref(dynamic_pad); } else if (g_strrstr(capsName, "video")) { GstPad *video_dynamic_pad= gst_element_get_static_pad( g_assert(video_dynamic_pad!=NULL); g_print("pad created .....2 \n"); gst_pad_link(pad, video_dynamic_pad); gst_object_unref(video_dynamic_pad); } g_free(capsName); On 25-Jan-2017 11:02 AM, "rajvik" <[hidden email]> wrote:
_______________________________________________ gstreamer-devel mailing list [hidden email] https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel |
Nope you are right with that pads are actually getting created. I debugged more by increasing the debug level. Found that there is some issue with pushing data buffer. Cannot figure out what it exactly means though. Attaching the logs if you can help: 0:00:16.349383662 2327 0xb30060c0 INFO GST_ELEMENT_PADS gstelement.c:643:gst_element_add_pad:<rtpptdemux1> adding pad 'src_96' 0:00:16.349645086 2327 0xb30060c0 INFO GST_PADS gstpad.c:2186:gst_pad_link_prepare: trying to link rtpptdemux1:src_96 and recv_rtp_src_1_279848886_96:proxypad9 0:00:16.349711009 2327 0xb30060c0 INFO GST_PADS gstpad.c:2388:gst_pad_link_full: linked rtpptdemux1:src_96 and recv_rtp_src_1_279848886_96:proxypad9, successful 0:00:16.349766663 2327 0xb30060c0 INFO GST_EVENT gstevent.c:1373:gst_event_new_reconfigure: creating reconfigure event 0:00:16.349919971 2327 0xb30060c0 INFO GST_ELEMENT_PADS gstelement.c:643:gst_element_add_pad:<manager> adding pad 'recv_rtp_src_1_279848886_96' 0:00:16.681462663 2327 0xb30060c0 INFO GST_PADS gstpad.c:2186:gst_pad_link_prepare: trying to link manager:recv_rtp_src_1_279848886_96 and recv_rtp_src_1_279848886_96:proxypad10 0:00:16.681531163 2327 0xb30060c0 INFO GST_PADS gstpad.c:2388:gst_pad_link_full: linked manager:recv_rtp_src_1_279848886_96 and recv_rtp_src_1_279848886_96:proxypad10, successful 0:00:16.681588625 2327 0xb30060c0 INFO GST_EVENT gstevent.c:1373:gst_event_new_reconfigure: creating reconfigure event 0:00:16.681726702 2327 0xb30060c0 INFO GST_ELEMENT_PADS gstelement.c:643:gst_element_add_pad:<rtsp-source> adding pad 'recv_rtp_src_1_279848886_96' name of caps struct string: application/x-rtp name of caps string: application/x-rtp, media=(string)audio, payload=(int)96, clock-rate=(int)44100, encoding-name=(string)MPEG4-GENERIC, encoding-params=(string)2, streamtype=(string)5, profile-level-id=(string)15, m ode=(string)AAC-hbr, config=(string)1210, sizelength=(string)13, indexlength=(string)3, indexdeltalength=(string)3, profile=(string)1, a-tool=(string)"vlc\ 2.1.6", a-recvonly=(string)"", a-type=(string)broadcast, a-ch arset=(string)UTF-8, ssrc=(uint)279848886, clock-base=(uint)3447297483, seqnum-base=(uint)21447, npt-start=(guint64)62586725174000, play-speed=(double)1, play-scale=(double)1 here .....1 0:00:16.682087741 2327 0xb30060c0 INFO GST_ELEMENT_PADS gstelement.c:895:gst_element_get_static_pad: found pad audio-depayer:sink 0:00:16.682160472 2327 0xb30060c0 INFO GST_PADS gstpad.c:2186:gst_pad_link_prepare: trying to link rtsp-source:recv_rtp_src_1_279848886_96 and audio-depayer:sink 0:00:16.682366550 2327 0xb30060c0 INFO GST_PADS gstpad.c:2388:gst_pad_link_full: linked rtsp-source:recv_rtp_src_1_279848886_96 and audio-depayer:sink, successful 0:00:16.682424358 2327 0xb30060c0 INFO GST_EVENT gstevent.c:1373:gst_event_new_reconfigure: creating reconfigure event Link succeeded (type 'application/x-rtp, media=(string)audio, payload=(int)96, clock-rate=(int)44100, encoding-name=(string)MPEG4-GENERIC, encoding-params=(string)2, streamtype=(string)5, profile-level-id=(string)15 , mode=(string)AAC-hbr, config=(string)1210, sizelength=(string)13, indexlength=(string)3, indexdeltalength=(string)3, profile=(string)1, a-tool=(string)"vlc\ 2.1.6", a-recvonly=(string)"", a-type=(string)broadcast, a -charset=(string)UTF-8, ssrc=(uint)279848886, clock-base=(uint)3447297483, seqnum-base=(uint)21447, npt-start=(guint64)62586725174000, play-speed=(double)1, play-scale=(double)1'). 0:00:16.683200668 2327 0xb30060c0 INFO GST_EVENT gstevent.c:678:gst_event_new_caps: creating caps event audio/mpeg, mpegversion=(int)4, stream-format=(string)raw, codec_data=(buffer)1210 0:00:16.683362207 2327 0xb30060c0 INFO GST_PADS gstpad.c:3745:gst_pad_peer_query:<audio-parser:src> pad has no peer 0:00:16.683574938 2327 0xb30060c0 INFO GST_EVENT gstevent.c:678:gst_event_new_caps: creating caps event audio/mpeg, mpegversion=(int)4, stream-format=(string)raw, codec_data=(buffer)1210, framed=(boolea n)true, level=(string)2, base-profile=(string)lc, profile=(string)lc, rate=(int)44100, channels=(int)2 0:00:16.683692208 2327 0xb30060c0 INFO baseparse gstbaseparse.c:3585:gst_base_parse_set_passthrough:<audio-parser> passthrough: yes 0:00:17.015041706 2327 0xb30060c0 INFO GST_EVENT gstevent.c:759:gst_event_new_segment: creating segment event time segment start=0:00:00.000000000, offset=0:00:00.000000000, stop=99:99:99.999999999, rat e=1.000000, applied_rate=1.000000, flags=0x00, time=17:23:06.725174000, base=0:00:00.000000000, position 0:00:00.000000000, duration 99:99:99.999999999 0:00:17.015638093 2327 0xb30060c0 INFO GST_EVENT gstevent.c:678:gst_event_new_caps: creating caps event application/x-rtp, media=(string)audio, payload=(int)96, clock-rate=(int)44100, encoding-name=(str ing)MPEG4-GENERIC, encoding-params=(string)2, streamtype=(string)5, profile-level-id=(string)15, mode=(string)AAC-hbr, config=(string)1210, sizelength=(string)13, indexlength=(string)3, indexdeltalength=(string)3, pro file=(string)1, a-tool=(string)"vlc\ 2.1.6", a-recvonly=(string)"", a-type=(string)broadcast, a-charset=(string)UTF-8, ssrc=(uint)279848886, clock-base=(uint)3447297483, seqnum-base=(uint)21447, npt-start=(guint64)625 86725174000, play-speed=(double)1, play-scale=(double)1 0:00:17.143650771 2327 0xb430ec00 INFO basesrc gstbasesrc.c:2841:gst_base_src_loop:<udpsrc1> pausing after gst_pad_push() = not-linked 0:00:17.159881511 2327 0xb430ec00 WARN basesrc gstbasesrc.c:2933:gst_base_src_loop:<udpsrc1> error: Internal data flow error. 0:00:17.175436364 2327 0xb430ec00 WARN basesrc gstbasesrc.c:2933:gst_base_src_loop:<udpsrc1> error: streaming task paused, reason not-linked (-1) 0:00:17.192878876 2327 0xb430ec00 INFO GST_ERROR_SYSTEM gstelement.c:1835:gst_element_message_full:<udpsrc1> posting message: Internal data flow error. 0:00:17.210512927 2327 0xb430ec00 INFO GST_ERROR_SYSTEM gstelement.c:1858:gst_element_message_full:<udpsrc1> posted error message: Internal data flow error. 0:00:17.228713441 2327 0xb430ec00 INFO task gsttask.c:300:gst_task_func:<udpsrc1:src> Task going to paused 0:00:17.158788123 2327 0xb430ec30 WARN rtpjitterbuffer rtpjitterbuffer.c:494:calculate_skew: delta - skew: 0:00:01.458641493 too big, reset skew 0:00:17.243162944 2327 0xb430ec30 INFO GST_EVENT gstevent.c:678:gst_event_new_caps: creating caps event application/x-rtp, media=(string)audio, payload=(int)96, clock-rate=(int)44100, encoding-name=(str ing)MPEG4-GENERIC, encoding-params=(string)2, streamtype=(string)5, profile-level-id=(string)15, mode=(string)AAC-hbr, config=(string)1210, sizelength=(string)13, indexlength=(string)3, indexdeltalength=(string)3, pro file=(string)1, a-tool=(string)"vlc\ 2.1.6", a-recvonly=(string)"", a-type=(string)broadcast, a-charset=(string)UTF-8, ssrc=(uint)279848886, clock-base=(uint)3447297483, seqnum-base=(uint)21447, npt-start=(guint64)625 86725174000, play-speed=(double)1, play-scale=(double)1 0:00:17.243837600 2327 0xb430ec30 INFO GST_EVENT gstevent.c:678:gst_event_new_caps: creating caps event application/x-rtp, media=(string)audio, payload=(int)96, clock-rate=(int)44100, encoding-name=(str ing)MPEG4-GENERIC, encoding-params=(string)2, streamtype=(string)5, profile-level-id=(string)15, mode=(string)AAC-hbr, config=(string)1210, sizelength=(string)13, indexlength=(string)3, indexdeltalength=(string)3, pro file=(string)1, a-tool=(string)"vlc\ 2.1.6", a-recvonly=(string)"", a-type=(string)broadcast, a-charset=(string)UTF-8, ssrc=(uint)279848886, clock-base=(uint)3447297483, seqnum-base=(uint)21447, npt-start=(guint64)625 86725174000, play-speed=(double)1, play-scale=(double)1 0:00:17.245265643 2327 0xb30060c0 INFO GST_EVENT gstevent.c:678:gst_event_new_caps: creating caps event application/x-rtp, media=(string)audio, payload=(int)96, clock-rate=(int)44100, encoding-name=(str ing)MPEG4-GENERIC, encoding-params=(string)2, streamtype=(string)5, profile-level-id=(string)15, mode=(string)AAC-hbr, config=(string)1210, sizelength=(string)13, indexlength=(string)3, indexdeltalength=(string)3, pro file=(string)1, a-tool=(string)"vlc\ 2.1.6", a-recvonly=(string)"", a-type=(string)broadcast, a-charset=(string)UTF-8, ssrc=(uint)279848886, clock-base=(uint)3447297483, seqnum-base=(uint)21447, npt-start=(guint64)625 86725174000, play-speed=(double)1, play-scale=(double)1 0:00:17.775943951 2327 0xb430ec30 WARN rtpjitterbuffer rtpjitterbuffer.c:494:calculate_skew: delta - skew: 0:00:02.080126874 too big, reset skew 0:00:18.401815460 2327 0xb430ec90 INFO GST_EVENT gstevent.c:759:gst_event_new_segment: creating segment event time segment start=0:00:00.000000000, offset=0:00:00.000000000, stop=99:99:99.999999999, rat e=1.000000, applied_rate=1.000000, flags=0x00, time=0:00:00.000000000, base=0:00:00.000000000, position 0:00:00.000000000, duration 99:99:99.999999999 0:00:18.402094922 2327 0xb430ec90 INFO basesrc gstbasesrc.c:2828:gst_base_src_loop:<udpsrc5> marking pending DISCONT 0:00:18.402501115 2327 0xb430ec90 INFO GST_EVENT gstevent.c:678:gst_event_new_caps: creating caps event application/x-rtcp, ssrc=(uint)279848886 0:00:18.402832655 2327 0x19200 INFO GST_BUS gstbus.c:549:gst_bus_timed_pop_filtered:<bus1> we got woken up, recheck for message 0:00:18.486812322 2327 0xb430ec60 INFO GST_EVENT gstevent.c:759:gst_event_new_segment: creating segment event time segment start=0:00:00.000000000, offset=0:00:00.000000000, stop=99:99:99.999999999, rat e=1.000000, applied_rate=1.000000, flags=0x00, time=0:00:00.000000000, base=0:00:00.000000000, position 0:00:00.000000000, duration 99:99:99.999999999 0:00:18.487228515 2327 0xb430ec60 INFO basesrc gstbasesrc.c:2828:gst_base_src_loop:<udpsrc2> marking pending DISCONT 0:00:18.487556593 2327 0xb430ec60 INFO GST_EVENT gstevent.c:678:gst_event_new_caps: creating caps event application/x-rtcp, ssrc=(uint)315363940 0:00:18.487809248 2327 0x19200 INFO GST_BUS gstbus.c:549:gst_bus_timed_pop_filtered:<bus1> we got woken up, recheck for message 0:02:54.569395311 2327 0xb430ec60 INFO basesrc gstbasesrc.c:2724:gst_base_src_loop:<udpsrc2> pausing after gst_base_src_get_range() = eos 0:02:54.586390906 2327 0xb430ec30 INFO basesrc gstbasesrc.c:2724:gst_base_src_loop:<udpsrc4> pausing after gst_base_src_get_range() = eos 0:02:54.603107924 2327 0xb430ec60 INFO task gsttask.c:300:gst_task_func:<udpsrc2:src> Task going to paused 0:02:54.617068779 2327 0xb430ec90 INFO basesrc gstbasesrc.c:2724:gst_base_src_loop:<udpsrc5> pausing after gst_base_src_get_range() = eos 0:02:54.633440757 2327 0xb30060c0 INFO task gsttask.c:300:gst_task_func:<rtpjitterbuffer1:src> Task going to paused 0:02:54.633540296 2327 0xb430ec30 INFO task gsttask.c:300:gst_task_func:<udpsrc4:src> Task going to paused 0:02:54.662663240 2327 0xb430ec90 INFO task gsttask.c:300:gst_task_func:<udpsrc5:src> Task going to paused On Wed, Jan 25, 2017 at 11:30 AM, Tarun Tej K [via GStreamer-devel] <[hidden email]> wrote:
|
In reply to this post by Tarun Tej K
I am almost there, now getting some errors with alsa, given below from the debug logs: coda7542_os_api_pcontmem_alloc() succeed, pMemInfo 0xb2822f80, vAddr 0xab71e000, pAddr 0x40000000, helper 0x17 0:00:00.861131345 1934 0x138e60 WARN alsa conf.c:4563:parse_args: alsalib error: Unknown parameter AES0 coda7542_os_api_pcontmem_alloc() succeed, pMemInfo 0xb281f080, vAddr 0xab6f8000, pAddr 0x40040000, helper 0x18 0:00:00.886618614 1934 0x138e60 WARN alsa conf.c:4723:snd_config_expand: alsalib error: Parse arguments error: No such file or directory coda7542_os_api_pcontmem_alloc() succeed, pMemInfo 0xb281b778, vAddr 0xab6d2000, pAddr 0x40080000, helper 0x19 0:00:00.915113345 1934 0x138e60 WARN alsa pcm.c:2267:snd_pcm_open_noupdate: alsalib error: Unknown PCM default:{AES0 0x02 AES1 0x82 AES2 0x00 AES3 0x02} coda7542_os_api_pcontmem_alloc() succeed, pMemInfo 0xb2825c90, vAddr 0xab6ac000, pAddr 0x400c0000, helper 0x1a coda7542_os_api_pcontmem_alloc() succeed, pMemInfo 0xb2825d58, vAddr 0xab686000, pAddr 0x40100000, helper 0x1b coda7542_os_api_pcontmem_alloc() succeed, pMemInfo 0x1b2e90, vAddr 0xab506000, pAddr 0x40200000, helper 0x1c coda7542_os_driver_init is called _openfd_coda7542 succeed, fd 29 VpuInformCodecType(0x1b2ee0, 0x10000) is called get_mem_size is called get_mem_size is called get_mem_addr is called _VpuCoda7542GetBufAddr: VPU has no 2nd AXI _VpuCoda7542GetBufAddr() get firmware mem vpuio addr 0x27e00000, vir addr 0xab406000, sz 1048576 0:00:01.030366037 1934 0x138e60 WARN alsa pcm_hw.c:1234:snd_pcm_hw_get_chmap: alsalib error: Cannot read Channel Map ctl : No such file or directory download firmware in VpuCoda7542Device_Get(), rt 0 VpuCoda7542Slot_Get(0x1b2ee0) return 0 VpuCoda7542Slot_Rel(0x1b2ee0, 0) is called VpuCoda7542Device_Rel: munmap firmware, sram and reg base addr _VpuCoda7542RelBufAddr: VirAddrr = 0, index =2 coda7542_os_driver_clean is called 0x1b2ee0 _closefd_coda7542 is calling, fd 29 coda7542_os_driver_init is called _openfd_coda7542 succeed, fd 29 VpuInformCodecType(0x187400, 0x10000) is called get_mem_size is called get_mem_size is called get_mem_addr is called _VpuCoda7542GetBufAddr: VPU has no 2nd AXI _VpuCoda7542GetBufAddr() get firmware mem vpuio addr 0x27e00000, vir addr 0xab406000, sz 1048576 download firmware in VpuCoda7542Device_Get(), rt 0 VpuCoda7542Slot_Get(0x187400) return 0 coda7542_os_api_pcontmem_alloc() succeed, pMemInfo 0xb2827f98, vAddr 0xab71e000, pAddr 0x40000000, helper 0x1b coda7542_os_api_pcontmem_alloc() succeed, pMemInfo 0xb283cf90, vAddr 0xab6f8000, pAddr 0x40040000, helper 0x1e coda7542_os_api_pcontmem_alloc() succeed, pMemInfo 0xb281b778, vAddr 0xab6d2000, pAddr 0x40080000, helper 0x1f coda7542_os_api_pcontmem_alloc() succeed, pMemInfo 0xb2846d10, vAddr 0xab6ac000, pAddr 0x400c0000, helper 0x20 coda7542_os_api_pcontmem_alloc() succeed, pMemInfo 0xb2832f90, vAddr 0xab686000, pAddr 0x40100000, helper 0x21 coda7542_os_api_pcontmem_alloc() succeed, pMemInfo 0x1b2ee0, vAddr 0xab3e6000, pAddr 0x40140000, helper 0x22 VpuCoda7542Sram_Get is called Could not get 2nd AXI SRAM for Coda7542! 0:00:01.259776576 1934 0x188430 WARN basesrc gstbasesrc.c:2933:gst_base_src_loop:<udpsrc1> error: Internal data flow error. 0:00:01.275732345 1934 0x188430 WARN basesrc gstbasesrc.c:2933:gst_base_src_loop:<udpsrc1> error: streaming task paused, reason not-linked (-1) Updated source code: #include <gst/gst.h> #include <glib.h> #include <glib/gprintf.h> GstElement *audioQueue, *videoQueue; GstElement *source, *audio, *video, *convert, *pipeline, *audioDepay, *audioParse, *audioDecode, *audioConvert, *audioResample, *audioSink, *videoDepay, *videoParser, *videoDecode, *videoConvert, *videoScale, *videoSink; static void onPadAdded(GstElement *element, GstPad *pad, gpointer data) { #if 1 GstCaps *caps; const char *name; char *capsName; caps = gst_pad_get_current_caps(pad); GstStructure *str = gst_caps_get_structure(caps, 0); name = gst_structure_get_name(str); g_print("name of caps struct string: %s \n", name); capsName = gst_caps_to_string(caps); g_print("name of caps string: %s \n", capsName); GstPadLinkReturn ret; #endif if (g_strrstr(capsName,"audio")) { g_print("here .....1 \n"); GstPad *dynamic_pad = gst_element_get_static_pad(audioQueue, "sink"); ret = gst_pad_link(pad, dynamic_pad); if (GST_PAD_LINK_FAILED (ret)) { g_print (" Type is '%s' but link failed.\n", capsName); } else { g_print (" Link succeeded (type '%s').\n", capsName); } gst_object_unref(dynamic_pad); } else if (g_strrstr(capsName, "video")) { g_print("here .....2 \n"); GstPad *video_dynamic_pad= gst_element_get_static_pad(videoQueue, "sink"); ret = gst_pad_link(pad, video_dynamic_pad); if (GST_PAD_LINK_FAILED (ret)) { g_print (" Type is '%s' but link failed.\n", capsName); } else { g_print (" Link succeeded (type '%s').\n", capsName); } gst_object_unref(video_dynamic_pad); } g_free(capsName); gst_caps_unref(caps); } int main(int argc, char *argv[]) { GstCaps *capsFilter; GstBus *bus; GstMessage *msg; GstPad *pad; gboolean link_ok; GstStateChangeReturn ret; /* Initialize GStreamer */ gst_init (&argc, &argv); /* Create Elements */ pipeline = gst_pipeline_new("rtsp-pipeline"); source = gst_element_factory_make ("rtspsrc", "rtsp-source"); g_object_set(source, "location", "rtsp://192.168.3.30:8554/rajvi", NULL); g_object_set(source, "latency", 0, NULL); audioQueue = gst_element_factory_make ("queue", "audio-queue"); audioDepay = gst_element_factory_make ("rtpmp4gdepay", "audio-depayer"); audioParse = gst_element_factory_make ("aacparse", "audio-parser"); audioDecode = gst_element_factory_make ("avdec_aac", "audio-decoder"); audioConvert = gst_element_factory_make ("audioconvert", "aconv"); audioResample = gst_element_factory_make ("audioresample", "audio-resample"); audioSink = gst_element_factory_make ("autoaudiosink", "audiosink"); if (!audioQueue || !audioDepay || !audioParse || !audioConvert || !audioResample || !audioSink) { g_printerr("Cannot create audio elements \n"); return 0; } videoQueue = gst_element_factory_make ("queue", "video-queue"); videoDepay= gst_element_factory_make ("rtph264depay", "video-depay"); videoParser = gst_element_factory_make ("h264parse", "video-parser"); videoDecode = gst_element_factory_make ("omxh264dec", "video-decoder"); videoConvert = gst_element_factory_make("videoconvert", "convert"); videoScale = gst_element_factory_make("videoscale", "video-scale"); videoSink = gst_element_factory_make("ximagesink", "video-sink"); capsFilter = gst_caps_new_simple("video/x-raw", "width", G_TYPE_INT, 176, "height", G_TYPE_INT, 144, NULL); if (!videoQueue || !videoDepay || !videoParser || !videoDecode || !videoConvert || !videoScale || !videoSink || !capsFilter) { g_printerr("Cannot create video elements \n"); return 0; } gst_bin_add_many(GST_BIN(pipeline), source, audioQueue, audioDepay, audioParse, audioDecode, audioConvert, audioResample, audioSink, videoQueue, videoDepay, videoParser, videoDecode, videoConvert, videoScale, videoSink, NULL); if (!gst_element_link_many(audioQueue, audioDepay, audioParse, audioDecode, audioConvert, audioResample, audioSink, NULL)) { g_printerr("Cannot link audio elements \n"); return 0; } /*Linking filter element to videoScale and videoSink */ link_ok = gst_element_link_filtered(videoScale,videoSink, capsFilter); gst_caps_unref (capsFilter); if (!link_ok) { g_warning ("Failed to link element1 and element2!"); return 0; } if (!gst_element_link_many(videoQueue, videoDepay, videoParser, videoDecode, videoConvert, NULL)) { g_printerr("Cannot link audio elements \n"); return 0; } g_signal_connect(source, "pad-added", G_CALLBACK(onPadAdded), NULL); /* Start playing */ gst_element_set_state ( pipeline, GST_STATE_PLAYING); /* Wait until error or EOS */ bus = gst_element_get_bus (pipeline); msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS); /* Free resources */ if (msg != NULL) gst_message_unref (msg); gst_object_unref (bus); gst_element_set_state (pipeline, GST_STATE_NULL); gst_object_unref (pipeline); return 0; On Wed, Jan 25, 2017 at 11:42 AM, Rajvi Kamdar <[hidden email]> wrote:
|
Free forum by Nabble | Edit this page |