GStreamer (1.14) Pipeline in Python3

classic Classic list List threaded Threaded
3 messages Options
Reply | Threaded
Open this post in threaded view
|

GStreamer (1.14) Pipeline in Python3

Jon bae

Hello,

I try to understand the way Python and gstreamer works together. I'm able to build a pipeline for only video and that plays just fine, but when I add audio my pipeline stops by the first frame and put this to screen.

Can you tell me, what is wrong with my code here?:

#!/usr/bin/env python3

import gi
gi.require_version('Gst', '1.0')
gi.require_version('GLib', '2.0')
from gi.repository import GLib, Gst

# mode debugging infos
Gst.debug_set_active(True)
Gst.debug_set_default_threshold(3)


# callback link pads
def on_new_decoded_pad(dbin, pad):
    type = pad.query_caps(None).to_string()
    decode = pad.get_parent()
    pipeline = decode.get_parent()

    if type.startswith("video"):
        video_queue = pipeline.get_by_name("video_queue")
        decode.link(video_queue)
        print("linked video")
    if type.startswith("audio"):
        audio_queue = pipeline.get_by_name("audio_queue")
        decode.link(audio_queue)
        print("linked audio")


def main():
    Gst.init(None)

    # create new pipeline
    pipe = Gst.Pipeline.new('pipleline')

    # add source and decoder
    src = Gst.ElementFactory.make('filesrc', "src")
    decode = Gst.ElementFactory.make('decodebin', 'decode')

    # video queue and sink
    video_queue = Gst.ElementFactory.make('queue', 'video_queue')
    video_sink = Gst.ElementFactory.make('autovideosink', 'video_sink')

    # audio queue and sink
    audio_queue = Gst.ElementFactory.make('queue', 'audio_queue')
    audio_convert = Gst.ElementFactory.make('audioconvert', 'audio_convert')
    audio_sink = Gst.ElementFactory.make('autovideosink', 'audio_sink')

    # set some properties
    src.set_property('location', 'test.mp4')
    video_sink.set_property('sync', 'true')
    audio_sink.set_property('sync', 'true')

    # create pipeline
    pipe.add(src)
    pipe.add(decode)
    pipe.add(video_queue)
    pipe.add(video_sink)
    pipe.add(audio_queue)
    pipe.add(audio_convert)
    pipe.add(audio_sink)

    # link elements
    src.link(decode)
    video_queue.link(video_sink)
    audio_queue.link(audio_convert)
    audio_convert.link(audio_sink)

    # link decoder to coresponding queue
    decode.connect('pad-added', on_new_decoded_pad)

    loop = GLib.MainLoop()

    # start pipeline in play mode
    pipe.set_state(Gst.State.PLAYING)
    loop.run()

    # cleanup
    pipe.set_state(Gst.State.NULL)


if __name__ == '__main__':
    main()


I hope it is ok, to post so much code here, when not I can post next time a gist link.


Regards

Jonathan


_______________________________________________
gstreamer-devel mailing list
[hidden email]
https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel
Reply | Threaded
Open this post in threaded view
|

AW: GStreamer (1.14) Pipeline in Python3

BGraaf

Hi,

 

You use the wrong audiosink:

audio_sink = Gst.ElementFactory.make('autovideosink', 'audio_sink') à autoaudiosink?

 

Bernhard

 


Von: gstreamer-devel [mailto:[hidden email]] Im Auftrag von Jonathan Baecker
Gesendet: Mittwoch, 26. Dezember 2018 13:22
An: [hidden email]
Betreff: GStreamer (1.14) Pipeline in Python3

 

Hello,

I try to understand the way Python and gstreamer works together. I'm able to build a pipeline for only video and that plays just fine, but when I add audio my pipeline stops by the first frame and put this to screen.

Can you tell me, what is wrong with my code here?:

#!/usr/bin/env python3

import gi
gi.require_version('Gst', '1.0')
gi.require_version('GLib', '2.0')
from gi.repository import GLib, Gst

# mode debugging infos
Gst.debug_set_active(True)
Gst.debug_set_default_threshold(3)


# callback link pads
def on_new_decoded_pad(dbin, pad):
    type = pad.query_caps(None).to_string()
    decode = pad.get_parent()
    pipeline = decode.get_parent()

    if type.startswith("video"):
        video_queue = pipeline.get_by_name("video_queue")
        decode.link(video_queue)
        print("linked video")
    if type.startswith("audio"):
        audio_queue = pipeline.get_by_name("audio_queue")
        decode.link(audio_queue)
        print("linked audio")


def main():
    Gst.init(None)

    # create new pipeline
    pipe = Gst.Pipeline.new('pipleline')

    # add source and decoder
    src = Gst.ElementFactory.make('filesrc', "src")
    decode = Gst.ElementFactory.make('decodebin', 'decode')

    # video queue and sink
    video_queue = Gst.ElementFactory.make('queue', 'video_queue')
    video_sink = Gst.ElementFactory.make('autovideosink', 'video_sink')

    # audio queue and sink
    audio_queue = Gst.ElementFactory.make('queue', 'audio_queue')
    audio_convert = Gst.ElementFactory.make('audioconvert', 'audio_convert')
    audio_sink = Gst.ElementFactory.make('autovideosink', 'audio_sink')

    # set some properties
    src.set_property('location', 'test.mp4')
    video_sink.set_property('sync', 'true')
    audio_sink.set_property('sync', 'true')

    # create pipeline
    pipe.add(src)
    pipe.add(decode)
    pipe.add(video_queue)
    pipe.add(video_sink)
    pipe.add(audio_queue)
    pipe.add(audio_convert)
    pipe.add(audio_sink)

    # link elements
    src.link(decode)
    video_queue.link(video_sink)
    audio_queue.link(audio_convert)
    audio_convert.link(audio_sink)

    # link decoder to coresponding queue
    decode.connect('pad-added', on_new_decoded_pad)

    loop = GLib.MainLoop()

    # start pipeline in play mode
    pipe.set_state(Gst.State.PLAYING)
    loop.run()

    # cleanup
    pipe.set_state(Gst.State.NULL)


if __name__ == '__main__':
    main()

 

I hope it is ok, to post so much code here, when not I can post next time a gist link.

 

Regards

Jonathan


_______________________________________________
gstreamer-devel mailing list
[hidden email]
https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel
Reply | Threaded
Open this post in threaded view
|

Re: AW: GStreamer (1.14) Pipeline in Python3

Jon bae
On 26.12.18 14:58, Bernhard Graaf wrote:

Hi,

 

You use the wrong audiosink:

audio_sink = Gst.ElementFactory.make('autovideosink', 'audio_sink') à autoaudiosink?

 

Bernhard

Obs... How stupid is that - yes you are right! That was the problem. Maybe I copy that line and forgot to change the sink...

Thank you for pointing this out!

Jonathan


 


Von: gstreamer-devel [[hidden email]] Im Auftrag von Jonathan Baecker
Gesendet: Mittwoch, 26. Dezember 2018 13:22
An: [hidden email]
Betreff: GStreamer (1.14) Pipeline in Python3

 

Hello,

I try to understand the way Python and gstreamer works together. I'm able to build a pipeline for only video and that plays just fine, but when I add audio my pipeline stops by the first frame and put this to screen.

Can you tell me, what is wrong with my code here?:

#!/usr/bin/env python3

import gi
gi.require_version('Gst', '1.0')
gi.require_version('GLib', '2.0')
from gi.repository import GLib, Gst

# mode debugging infos
Gst.debug_set_active(True)
Gst.debug_set_default_threshold(3)


# callback link pads
def on_new_decoded_pad(dbin, pad):
    type = pad.query_caps(None).to_string()
    decode = pad.get_parent()
    pipeline = decode.get_parent()

    if type.startswith("video"):
        video_queue = pipeline.get_by_name("video_queue")
        decode.link(video_queue)
        print("linked video")
    if type.startswith("audio"):
        audio_queue = pipeline.get_by_name("audio_queue")
        decode.link(audio_queue)
        print("linked audio")


def main():
    Gst.init(None)

    # create new pipeline
    pipe = Gst.Pipeline.new('pipleline')

    # add source and decoder
    src = Gst.ElementFactory.make('filesrc', "src")
    decode = Gst.ElementFactory.make('decodebin', 'decode')

    # video queue and sink
    video_queue = Gst.ElementFactory.make('queue', 'video_queue')
    video_sink = Gst.ElementFactory.make('autovideosink', 'video_sink')

    # audio queue and sink
    audio_queue = Gst.ElementFactory.make('queue', 'audio_queue')
    audio_convert = Gst.ElementFactory.make('audioconvert', 'audio_convert')
    audio_sink = Gst.ElementFactory.make('autovideosink', 'audio_sink')

    # set some properties
    src.set_property('location', 'test.mp4')
    video_sink.set_property('sync', 'true')
    audio_sink.set_property('sync', 'true')

    # create pipeline
    pipe.add(src)
    pipe.add(decode)
    pipe.add(video_queue)
    pipe.add(video_sink)
    pipe.add(audio_queue)
    pipe.add(audio_convert)
    pipe.add(audio_sink)

    # link elements
    src.link(decode)
    video_queue.link(video_sink)
    audio_queue.link(audio_convert)
    audio_convert.link(audio_sink)

    # link decoder to coresponding queue
    decode.connect('pad-added', on_new_decoded_pad)

    loop = GLib.MainLoop()

    # start pipeline in play mode
    pipe.set_state(Gst.State.PLAYING)
    loop.run()

    # cleanup
    pipe.set_state(Gst.State.NULL)


if __name__ == '__main__':
    main()

 

I hope it is ok, to post so much code here, when not I can post next time a gist link.

 

Regards

Jonathan


_______________________________________________
gstreamer-devel mailing list
[hidden email]
https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel



_______________________________________________
gstreamer-devel mailing list
[hidden email]
https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel