RTSP client application with source, audio and video bins

classic Classic list List threaded Threaded
1 message Options
Reply | Threaded
Open this post in threaded view
|

RTSP client application with source, audio and video bins

rajvik
This post was updated on .
Hi,

Getting errors of : GST_IS_ELEMENT (element)' failed as the pad creation and linking has some problem
Help is highly appreciated.
I am not able to link pads between source, audiobin and videobin. Following
is the pipeline I want to convert to application:

gst-launch-1.0 rtspsrc location="rtsp:<filepath>"  latency=0 name=demux
demux. ! queue !  rtpmp4gdepay ! aacparse ! avdec_aac !  audioconvert !
audioresample ! autoaudiosink demux. ! queue ! rtph264depay ! h264parse !
omxh264dec ! videoconvert ! videoscale ! video/x-raw,width=176, height=144
! ximagesink

Following is the code implemented till date:

#include <gst/gst.h>

static void onPadAdded(GstElement *element, GstPad *pad, gpointer data)
{
        GstElement *decoder;
        decoder = GST_ELEMENT(data);
        g_debug ("Linking audio pad to depay ");

        GstPad *targetsink = gst_element_get_static_pad ( decoder,
"audiosink");
        gst_pad_link (pad, targetsink);
        gst_object_unref (targetsink);

}

static void on_pad_added(GstElement *element, GstPad *pad, gpointer data)
{
        GstElement *decoder;

        decoder = GST_ELEMENT(data);
        g_debug ("Linking video pad to depay ");

        GstPad *targetsink = gst_element_get_static_pad ( decoder,
"videosink");
        gst_pad_link (pad, targetsink);
        gst_object_unref (targetsink);
}

int main(int argc, char *argv[]) {
        GstElement *source, *audio, *video, *convert, *pipeline,
*audioDepay, *audioQueue, *videoQueue,
           *audioParse, *audioDecode, *audioConvert, *audioResample,
*audioSink, *videoDepay, *videoParser, *videoDecode, *videoConvert,
*videoScale, *videoSink;
        GstCaps *capsFilter;
        GstBus *bus;
        GstMessage *msg;
        GstPad *pad;
        GstPad *sinkpad,*ghost_sinkpad;
        gboolean link_ok;
        GstStateChangeReturn ret;
      /* Initialize GStreamer */
        gst_init (&argc, &argv);
        /* Create Elements */
        pipeline = gst_pipeline_new("rtsp-pipeline");
        source = gst_element_factory_make ("rtspsrc", "source");
        /*audio bin*/
        audio = gst_bin_new ("audiobin");
        audioQueue = gst_element_factory_make ("queue", "audio-queue");
        audioDepay = gst_element_factory_make ("rtpmp4gdepay",
"audio-depayer");
        audioParse = gst_element_factory_make ("aacparse", "audio-parser");
        audioDecode = gst_element_factory_make ("avdec_aac",
"audio-decoder");
        audioConvert = gst_element_factory_make ("audioconvert", "aconv");
        audioResample = gst_element_factory_make ("audioresample",
"audio-resample");
        audioSink = gst_element_factory_make ("autoaudiosink", "audiosink");

        if (!audioQueue || !audioDepay || !audioParse || !audioConvert ||
!audioResample || !audioSink)
        {
                g_printerr("Cannot create audio elements \n");
                return 0;
        }
        g_object_set(source, "location", "rtsp://<file path>", NULL);
        g_object_set(source, "latency", 0, NULL);
        g_object_set(source, "name", "demux", NULL);

        video  = gst_bin_new ("videobin");
        videoQueue = gst_element_factory_make ("queue", "video-queue");
        videoDepay= gst_element_factory_make ("rtph264depay",
"video-depayer");
        videoParser = gst_element_factory_make ("h264parse",
"video-parser");
        videoDecode = gst_element_factory_make ("omxh264dec",
"video-decoder");
        videoConvert = gst_element_factory_make("videoconvert", "convert");
        videoScale = gst_element_factory_make("videoscale", "video-scale");
        videoSink = gst_element_factory_make("ximagesink", "video-sink");
        capsFilter = gst_caps_new_simple("video/x-raw",
                        "width", G_TYPE_INT, 176,
                        "height", G_TYPE_INT, 144,
                        NULL);

        if (!videoQueue || !videoDepay || !videoParser || !videoDecode ||
!videoConvert || !videoScale || !videoSink || !capsFilter)
        {
                g_printerr("Cannot create video elements \n");
                return 0;

        gst_bin_add_many(GST_BIN(audio),
                        audioQueue, audioDepay, audioParse,
audioDecode,audioConvert, audioResample, audioSink, NULL);

        /* set property value */
        if (!gst_element_link(audioDepay, audioParse))
        {
                g_printerr("Cannot link audioDepay and audioParse \n");
                return 0;
        }
        if (!gst_element_link(audioParse, audioDecode))
        {
                g_printerr("Cannot link audioParse and audioDecode \n");
                return 0;
        }
        if (!gst_element_link(audioDecode, audioConvert))
        {
                g_printerr("Cannot link audioDecode and audioConvert \n");
                return 0;
        }
        if (!gst_element_link(audioConvert, audioResample))
        {
                g_printerr("Cannot link audioConvert and audioResample \n");
                return 0;
        }
        if (!gst_element_link(audioResample, audioSink))
        {
                g_printerr("Cannot link audioResample and  audioSink \n");
                return 0;
        }
        g_signal_connect(G_OBJECT(source), "pad-added",
G_CALLBACK(onPadAdded), audioQueue);
        if (!gst_element_link(videoDepay, videoParser))
        {
                g_printerr("Cannot link videoDepay and videoParser \n");
                return 0;
        }
        if (!gst_element_link(videoParser, videoDecode))
        {
                g_printerr("Cannot link videoParser and videoConvert \n");
                return 0;
        }
        if (!gst_element_link(videoDecode, videoConvert))
        {
                g_printerr("Cannot link videoDecode and videoConvert \n");
                return 0;
        }
        g_signal_connect(G_OBJECT(source), "pad-added",
G_CALLBACK(on_pad_added), videoQueue);

        gst_bin_add_many(GST_BIN(pipeline), source, audio, video, NULL);
          /* Start playing */
        gst_element_set_state ( pipeline, GST_STATE_PLAYING);

        /* Wait until error or EOS */
        bus = gst_element_get_bus (pipeline);
        msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
GST_MESSAGE_ERROR | GST_MESSAGE_EOS);

        /* Free resources */
        if (msg != NULL)
                gst_message_unref (msg);
        gst_object_unref (bus);
        gst_element_set_state (pipeline, GST_STATE_NULL);
        gst_object_unref (pipeline);
        return 0;
}

Thanks
Rajvi