Timestamping problem with gdppay element

classic Classic list List threaded Threaded
12 messages Options
Reply | Threaded
Open this post in threaded view
|

Timestamping problem with gdppay element

Alexey Chernov
Hello,

I continue working at some complex pipeline which I wrote about
previously ("Problems with video, tee and queues"). Solution proposed
by Marco works just fine for the pipeline I noticed there:

                     queue -- xvimagesink
                  /
playbin -- tee
                  \
                    queue -- fakesink

converted dynamically to:

                     queue -- xvimagesink
                  /
playbin -- tee
                  \
                     queue -- xvimagesink

But the final destination of my experiments is the following case:


                     queue -- xvimagesink
                  /
playbin -- tee
                  \
                    queue -- fakesink

converted dynamically to:

                     queue -- xvimagesink
                  /
playbin -- tee
                  \
                     queue -- gdppay -- filesink

with remote end like this:

filesrc -- gdpdepay -- decodebin -- ffmpegcolorspace -- xvimagesink

And for this case it doesn't work.

Well, what is the result for now:
1. Everything works fine if the final pipeline is hardcoded from the
beginning (i.e. we have gdppay right from start).
2. Everything works fine without gdppay (with xvimagesink or smth. instead).

But when I start the desired configuration I have the following:
1. Xv window of the remote end appears and stalls the playing but
start to refresh slowly (several frames are shown rapidly in intervals
of 4-5 secs)
2. Xv window of the main pipeline stops to play properly, too, and
behaves just the same way like the remote one.
3. The main pipeline prints the following warnings to console:

0:00:07.134552825 19490  0x95be9c8 WARN  gdppay
gstgdppay.c:594:gst_gdp_pay_chain:<gdppay0> did not receive
new-segment before first buffer
0:00:14.095700940 19490  0x948d2b0 WARN  ffmpeg
gstffmpegdec.c:2002:gst_ffmpegdec_video_frame:<ffdec_h2640> Dropping
non-keyframe (seek/init)
0:00:14.691007420 19490  0x95b4978 WARN  basesink
gstbasesink.c:2686:gst_base_sink_is_too_late:<xvimagesink0> warning: A
lot of buffers are being dropped.
0:00:14.691055331 19490  0x95b4978 WARN  basesink
gstbasesink.c:2686:gst_base_sink_is_too_late:<xvimagesink0> warning:
There may be a timestamping problem, or this computer is too slow.

Here's the source code of the main app:

#include <gst/gst.h>
#include <glib.h>

GstElement *_pipeline, *_fakesink, *_remotesink, *_valve, *_mpoint,
*_videobin, *_pipe;

void create_pipeline_bin()
{
        GstElement *tee, *queue1, *queue2, *xvimagesink;

        _pipeline      = gst_element_factory_make("playbin",  NULL);

        tee = gst_element_factory_make("tee",  NULL);
        xvimagesink   = gst_element_factory_make("xvimagesink", NULL);

        GstPad* pad;

        _videobin = gst_bin_new("videobin");
        queue1 = gst_element_factory_make("queue", NULL);
        queue2 = gst_element_factory_make("queue", NULL);
        _valve = gst_element_factory_make("valve", NULL);

        _fakesink = gst_element_factory_make("fakesink", NULL);

        gst_object_ref(_fakesink);

        _mpoint = _valve;

        gst_bin_add_many(GST_BIN(_videobin), tee, queue1, queue2, _valve,
xvimagesink, _fakesink, NULL);

        gst_element_link_many(tee, queue1, xvimagesink, NULL);
        gst_element_link_many(tee, queue2, _valve, _fakesink, NULL);
        pad = gst_element_get_static_pad (tee, "sink");
        gst_element_add_pad (_videobin, gst_ghost_pad_new ("sink", pad));
        gst_object_unref (GST_OBJECT (pad));

        g_object_set(G_OBJECT (_pipeline), "uri", "file:///home/alex/test.mkv", NULL);
        g_object_set(G_OBJECT (_pipeline), "video-sink", _videobin, NULL);
        g_object_set(G_OBJECT (_pipeline), "audio-sink", NULL, NULL);

        gst_element_set_state (_pipeline, GST_STATE_PLAYING);
}

void create_remote_bin()
{
        GstElement *gdppay;
        GstPad* pad;

        gdppay = gst_element_factory_make("gdppay",  NULL);
        _pipe = gst_element_factory_make("filesink",  NULL);

        g_object_set(G_OBJECT (_pipe), "location",
"/home/alex/work/playground/test.gdp", NULL);

        _remotesink = gst_bin_new("gdpbin");
        gst_bin_add_many(GST_BIN(_remotesink), gdppay, _pipe, NULL);
        gst_element_link_many(gdppay, _pipe, NULL);

        pad = gst_element_get_static_pad (gdppay, "sink");
        gst_element_add_pad (_remotesink, gst_ghost_pad_new ("sink", pad));
        gst_object_unref (GST_OBJECT (pad));
}

void connect_remote_client()
{
        if (_remotesink && _mpoint && _videobin)
        {
                g_object_set(G_OBJECT (_valve), "drop", TRUE, NULL);

                gst_element_unlink(_mpoint, _fakesink);
                gst_bin_remove(GST_BIN(_videobin), _fakesink);
                gst_element_set_state (_fakesink, GST_STATE_NULL);

                gst_bin_add(GST_BIN(_videobin), _remotesink);
                gst_element_link(_mpoint, _remotesink);

                gst_element_sync_state_with_parent(_remotesink);

                gst_element_set_state (_pipeline, GST_STATE_PLAYING);
                g_object_set(G_OBJECT (_valve), "drop", FALSE, NULL);
        }
}

gboolean connect_callback(gpointer)
{
        connect_remote_client();
        return FALSE;
}

int main(int argc, char *argv[])
{
        gst_init(&argc, &argv);

        gst_debug_set_active(true);
        gst_debug_set_default_threshold(GST_LEVEL_WARNING);

        create_remote_bin();
        create_pipeline_bin();

        g_timeout_add(7000, connect_callback, NULL);

        GMainLoop *loop;

        loop = g_main_loop_new (NULL, FALSE);

        g_print ("Running...\n");
        g_main_loop_run (loop);

        return 0;
}

End on the remote end there's gst-launch command:

gst-launch -v filesrc location=/home/alex/work/playground/test.gdp !
gdpdepay ! decodebin ! ffmpegcolorspace ! xvimagesink

What is the essential problem? Is it possible to keep the "main"
branch (i.e. with xvimagesink in main pipeline) playing while the
second establishes connection via GDP protocol and then keep them
synced? I hoped on valve element very much and it works perfectly for
a couple xvimagesink but with GDP element it doesn't work, everything
is almost stalled.

------------------------------------------------------------------------------
Beautiful is writing same markup. Internet Explorer 9 supports
standards for HTML5, CSS3, SVG 1.1,  ECMAScript5, and DOM L2 & L3.
Spend less time writing and  rewriting code and more time creating great
experiences on the web. Be a part of the beta today
http://p.sf.net/sfu/msIE9-sfdev2dev
_______________________________________________
gstreamer-devel mailing list
[hidden email]
https://lists.sourceforge.net/lists/listinfo/gstreamer-devel
Reply | Threaded
Open this post in threaded view
|

Re: Timestamping problem with gdppay element

Nathanael D. Noblet
On 11/16/2010 08:19 AM, 4ernov wrote:

Not likely to solve your problem however...

> Here's the source code of the main app:
> gst_element_link_many(tee, queue1, xvimagesink, NULL);

Can this even work??? I thought you had to request a pad from the tee...

------------------------------------------------------------------------------
Beautiful is writing same markup. Internet Explorer 9 supports
standards for HTML5, CSS3, SVG 1.1,  ECMAScript5, and DOM L2 & L3.
Spend less time writing and  rewriting code and more time creating great
experiences on the web. Be a part of the beta today
http://p.sf.net/sfu/msIE9-sfdev2dev
_______________________________________________
gstreamer-devel mailing list
[hidden email]
https://lists.sourceforge.net/lists/listinfo/gstreamer-devel
Reply | Threaded
Open this post in threaded view
|

Re: Timestamping problem with gdppay element

Alexey Chernov
In reply to this post by Alexey Chernov
It works because tee element has 2 outputs out of the box by default. I also
tried complex pad connection the first time but it turned out to be much
simpler.

> On 11/16/2010 08:19 AM, 4ernov wrote:
>
> Not likely to solve your problem however...
>
> > Here's the source code of the main app:
> > gst_element_link_many(tee, queue1, xvimagesink, NULL);
>
> Can this even work??? I thought you had to request a pad from the tee...

------------------------------------------------------------------------------
Beautiful is writing same markup. Internet Explorer 9 supports
standards for HTML5, CSS3, SVG 1.1,  ECMAScript5, and DOM L2 & L3.
Spend less time writing and  rewriting code and more time creating great
experiences on the web. Be a part of the beta today
http://p.sf.net/sfu/msIE9-sfdev2dev
_______________________________________________
gstreamer-devel mailing list
[hidden email]
https://lists.sourceforge.net/lists/listinfo/gstreamer-devel
Reply | Threaded
Open this post in threaded view
|

Re: Timestamping problem with gdppay element

Marco Ballesio
In reply to this post by Alexey Chernov
Hi,

On Tue, Nov 16, 2010 at 5:19 PM, 4ernov <[hidden email]> wrote:

> Hello,
>
> I continue working at some complex pipeline which I wrote about
> previously ("Problems with video, tee and queues"). Solution proposed
> by Marco works just fine for the pipeline I noticed there:
>
>                     queue -- xvimagesink
>                  /
> playbin -- tee
>                  \
>                    queue -- fakesink
>
> converted dynamically to:
>
>                     queue -- xvimagesink
>                  /
> playbin -- tee
>                  \
>                     queue -- xvimagesink
>
> But the final destination of my experiments is the following case:
>
>
>                     queue -- xvimagesink
>                  /
> playbin -- tee
>                  \
>                    queue -- fakesink
>
> converted dynamically to:
>
>                     queue -- xvimagesink
>                  /
> playbin -- tee
>                  \
>                     queue -- gdppay -- filesink
>
> with remote end like this:
>
> filesrc -- gdpdepay -- decodebin -- ffmpegcolorspace -- xvimagesink
>
> And for this case it doesn't work.
>
> Well, what is the result for now:
> 1. Everything works fine if the final pipeline is hardcoded from the
> beginning (i.e. we have gdppay right from start).
> 2. Everything works fine without gdppay (with xvimagesink or smth. instead).
>
> But when I start the desired configuration I have the following:
> 1. Xv window of the remote end appears and stalls the playing but
> start to refresh slowly (several frames are shown rapidly in intervals
> of 4-5 secs)

Are you transferring this somehow over a network? Using gdppay and
filesink/filesource for this is not really an optimal setup. Maybe you
could have better luck with (de)payloaders and udp elements + jitter
buffer.

> 2. Xv window of the main pipeline stops to play properly, too, and
> behaves just the same way like the remote one.
> 3. The main pipeline prints the following warnings to console:
>
> 0:00:07.134552825 19490  0x95be9c8 WARN  gdppay
> gstgdppay.c:594:gst_gdp_pay_chain:<gdppay0> did not receive
> new-segment before first buffer
> 0:00:14.095700940 19490  0x948d2b0 WARN  ffmpeg
> gstffmpegdec.c:2002:gst_ffmpegdec_video_frame:<ffdec_h2640> Dropping
> non-keyframe (seek/init)
> 0:00:14.691007420 19490  0x95b4978 WARN  basesink
> gstbasesink.c:2686:gst_base_sink_is_too_late:<xvimagesink0> warning: A
> lot of buffers are being dropped.
> 0:00:14.691055331 19490  0x95b4978 WARN  basesink
> gstbasesink.c:2686:gst_base_sink_is_too_late:<xvimagesink0> warning:
> There may be a timestamping problem, or this computer is too slow.

This message is pretty self-explanatory: xvimagesink is not rendering
anything because buffers are too late. Just to try, what's the effect
if you set sync=false on that element? What if you set the
max-lateness property to something like e.g. a few seconds?

Regards

>
> Here's the source code of the main app:
>
> #include <gst/gst.h>
> #include <glib.h>
>
> GstElement *_pipeline, *_fakesink, *_remotesink, *_valve, *_mpoint,
> *_videobin, *_pipe;
>
> void create_pipeline_bin()
> {
>        GstElement *tee, *queue1, *queue2, *xvimagesink;
>
>        _pipeline      = gst_element_factory_make("playbin",  NULL);
>
>        tee                     = gst_element_factory_make("tee",  NULL);
>        xvimagesink   = gst_element_factory_make("xvimagesink", NULL);
>
>        GstPad* pad;
>
>        _videobin = gst_bin_new("videobin");
>        queue1                  = gst_element_factory_make("queue", NULL);
>        queue2                  = gst_element_factory_make("queue", NULL);
>        _valve                  = gst_element_factory_make("valve", NULL);
>
>        _fakesink               = gst_element_factory_make("fakesink", NULL);
>
>        gst_object_ref(_fakesink);
>
>        _mpoint = _valve;
>
>        gst_bin_add_many(GST_BIN(_videobin), tee, queue1, queue2, _valve,
> xvimagesink, _fakesink, NULL);
>
>        gst_element_link_many(tee, queue1, xvimagesink, NULL);
>        gst_element_link_many(tee, queue2, _valve, _fakesink, NULL);
>        pad = gst_element_get_static_pad (tee, "sink");
>        gst_element_add_pad (_videobin, gst_ghost_pad_new ("sink", pad));
>        gst_object_unref (GST_OBJECT (pad));
>
>        g_object_set(G_OBJECT (_pipeline), "uri", "file:///home/alex/test.mkv", NULL);
>        g_object_set(G_OBJECT (_pipeline), "video-sink", _videobin, NULL);
>        g_object_set(G_OBJECT (_pipeline), "audio-sink", NULL, NULL);
>
>        gst_element_set_state (_pipeline, GST_STATE_PLAYING);
> }
>
> void create_remote_bin()
> {
>        GstElement *gdppay;
>        GstPad* pad;
>
>        gdppay          = gst_element_factory_make("gdppay",  NULL);
>        _pipe           = gst_element_factory_make("filesink",  NULL);
>
>        g_object_set(G_OBJECT (_pipe), "location",
> "/home/alex/work/playground/test.gdp", NULL);
>
>        _remotesink = gst_bin_new("gdpbin");
>        gst_bin_add_many(GST_BIN(_remotesink), gdppay, _pipe, NULL);
>        gst_element_link_many(gdppay, _pipe, NULL);
>
>        pad = gst_element_get_static_pad (gdppay, "sink");
>        gst_element_add_pad (_remotesink, gst_ghost_pad_new ("sink", pad));
>        gst_object_unref (GST_OBJECT (pad));
> }
>
> void connect_remote_client()
> {
>        if (_remotesink && _mpoint && _videobin)
>        {
>                g_object_set(G_OBJECT (_valve), "drop", TRUE, NULL);
>
>                gst_element_unlink(_mpoint, _fakesink);
>                gst_bin_remove(GST_BIN(_videobin), _fakesink);
>                gst_element_set_state (_fakesink, GST_STATE_NULL);
>
>                gst_bin_add(GST_BIN(_videobin), _remotesink);
>                gst_element_link(_mpoint, _remotesink);
>
>                gst_element_sync_state_with_parent(_remotesink);
>
>                gst_element_set_state (_pipeline, GST_STATE_PLAYING);
>                g_object_set(G_OBJECT (_valve), "drop", FALSE, NULL);
>        }
> }
>
> gboolean connect_callback(gpointer)
> {
>        connect_remote_client();
>        return FALSE;
> }
>
> int main(int argc, char *argv[])
> {
>        gst_init(&argc, &argv);
>
>        gst_debug_set_active(true);
>        gst_debug_set_default_threshold(GST_LEVEL_WARNING);
>
>        create_remote_bin();
>        create_pipeline_bin();
>
>        g_timeout_add(7000, connect_callback, NULL);
>
>        GMainLoop *loop;
>
>        loop = g_main_loop_new (NULL, FALSE);
>
>        g_print ("Running...\n");
>        g_main_loop_run (loop);
>
>        return 0;
> }
>
> End on the remote end there's gst-launch command:
>
> gst-launch -v filesrc location=/home/alex/work/playground/test.gdp !
> gdpdepay ! decodebin ! ffmpegcolorspace ! xvimagesink
>
> What is the essential problem? Is it possible to keep the "main"
> branch (i.e. with xvimagesink in main pipeline) playing while the
> second establishes connection via GDP protocol and then keep them
> synced? I hoped on valve element very much and it works perfectly for
> a couple xvimagesink but with GDP element it doesn't work, everything
> is almost stalled.
>
> ------------------------------------------------------------------------------
> Beautiful is writing same markup. Internet Explorer 9 supports
> standards for HTML5, CSS3, SVG 1.1,  ECMAScript5, and DOM L2 & L3.
> Spend less time writing and  rewriting code and more time creating great
> experiences on the web. Be a part of the beta today
> http://p.sf.net/sfu/msIE9-sfdev2dev
> _______________________________________________
> gstreamer-devel mailing list
> [hidden email]
> https://lists.sourceforge.net/lists/listinfo/gstreamer-devel
>

------------------------------------------------------------------------------
Increase Visibility of Your 3D Game App & Earn a Chance To Win $500!
Tap into the largest installed PC base & get more eyes on your game by
optimizing for Intel(R) Graphics Technology. Get started today with the
Intel(R) Software Partner Program. Five $500 cash prizes are up for grabs.
http://p.sf.net/sfu/intelisp-dev2dev
_______________________________________________
gstreamer-devel mailing list
[hidden email]
https://lists.sourceforge.net/lists/listinfo/gstreamer-devel
Reply | Threaded
Open this post in threaded view
|

Re: Timestamping problem with gdppay element

Alexey Chernov
In reply to this post by Alexey Chernov
Hello, Marco,

thanks for answer.

> Hi,
>
> On Tue, Nov 16, 2010 at 5:19 PM, 4ernov <[hidden email]> wrote:
> > Hello,
> >
> > I continue working at some complex pipeline which I wrote about
> > previously ("Problems with video, tee and queues"). Solution proposed
> > by Marco works just fine for the pipeline I noticed there:
> >
> >                     queue -- xvimagesink
> >                  /
> > playbin -- tee
> >                  \
> >                    queue -- fakesink
> >
> > converted dynamically to:
> >
> >                     queue -- xvimagesink
> >                  /
> > playbin -- tee
> >                  \
> >                     queue -- xvimagesink
> >
> > But the final destination of my experiments is the following case:
> >
> >
> >                     queue -- xvimagesink
> >                  /
> > playbin -- tee
> >                  \
> >                    queue -- fakesink
> >
> > converted dynamically to:
> >
> >                     queue -- xvimagesink
> >                  /
> > playbin -- tee
> >                  \
> >                     queue -- gdppay -- filesink
> >
> > with remote end like this:
> >
> > filesrc -- gdpdepay -- decodebin -- ffmpegcolorspace -- xvimagesink
> >
> > And for this case it doesn't work.
> >
> > Well, what is the result for now:
> > 1. Everything works fine if the final pipeline is hardcoded from the
> > beginning (i.e. we have gdppay right from start).
> > 2. Everything works fine without gdppay (with xvimagesink or smth. instead).
> >
> > But when I start the desired configuration I have the following:
> > 1. Xv window of the remote end appears and stalls the playing but
> > start to refresh slowly (several frames are shown rapidly in intervals
> > of 4-5 secs)
>
> Are you transferring this somehow over a network? Using gdppay and
> filesink/filesource for this is not really an optimal setup. Maybe you
> could have better luck with (de)payloaders and udp elements + jitter
> buffer.

Not really, everything is on the same machine, locally. The result I
need is to show video stream on two windows in two separate X-servers
(Xephyr servers to be exact, i.e. one window on the 1st server and
another on the 2nd one) and the second window should be pluggable
during playback. All I wanted is to stream internal GStreamer data to
second pipeline through gdppay/gdpdepay to be able to show it on
another X-server (two ximagesinks don't work because of X error). Yes,
I consider some variants with elements you mentioned but they seem to
be much more complex and I'd like to avoid them using gdp elements if
it's possible.

>
> > 2. Xv window of the main pipeline stops to play properly, too, and
> > behaves just the same way like the remote one.
> > 3. The main pipeline prints the following warnings to console:
> >
> > 0:00:07.134552825 19490  0x95be9c8 WARN  gdppay
> > gstgdppay.c:594:gst_gdp_pay_chain:<gdppay0> did not receive
> > new-segment before first buffer
> > 0:00:14.095700940 19490  0x948d2b0 WARN  ffmpeg
> > gstffmpegdec.c:2002:gst_ffmpegdec_video_frame:<ffdec_h2640> Dropping
> > non-keyframe (seek/init)
> > 0:00:14.691007420 19490  0x95b4978 WARN  basesink
> > gstbasesink.c:2686:gst_base_sink_is_too_late:<xvimagesink0> warning: A
> > lot of buffers are being dropped.
> > 0:00:14.691055331 19490  0x95b4978 WARN  basesink
> > gstbasesink.c:2686:gst_base_sink_is_too_late:<xvimagesink0> warning:
> > There may be a timestamping problem, or this computer is too slow.
>
> This message is pretty self-explanatory: xvimagesink is not rendering
> anything because buffers are too late. Just to try, what's the effect
> if you set sync=false on that element? What if you set the
> max-lateness property to something like e.g. a few seconds?

Thank you for suggestions, will try them. Yes, I guess it's something
with buffer timings but it seems that gdppay/gdpdepay just can't pump
data through FIFO fast enough when connected during playback. I think
so because if I set timeoverlay element after gdpdepay it shows that
time flows _very_ sporadically. What is also interesting is that if
change gdppay to "jpegenc ! avimux" connection and gdpdepay to
decodebin then everything start playing fine (just with some delay
accumulation during playback). I decided that it's gdp elements
problems and filed a bug here:

https://bugzilla.gnome.org/show_bug.cgi?id=635226

Maybe I'm mistaken but anyway, thanks for help, I'll try your
suggestions and write the results here.

>
> Regards
>
> >
> > Here's the source code of the main app:
> >
> > #include <gst/gst.h>
> > #include <glib.h>
> >
> > GstElement *_pipeline, *_fakesink, *_remotesink, *_valve, *_mpoint,
> > *_videobin, *_pipe;
> >
> > void create_pipeline_bin()
> > {
> >        GstElement *tee, *queue1, *queue2, *xvimagesink;
> >
> >        _pipeline      = gst_element_factory_make("playbin",  NULL);
> >
> >        tee                     = gst_element_factory_make("tee",  NULL);
> >        xvimagesink   = gst_element_factory_make("xvimagesink", NULL);
> >
> >        GstPad* pad;
> >
> >        _videobin = gst_bin_new("videobin");
> >        queue1                  = gst_element_factory_make("queue", NULL);
> >        queue2                  = gst_element_factory_make("queue", NULL);
> >        _valve                  = gst_element_factory_make("valve", NULL);
> >
> >        _fakesink               = gst_element_factory_make("fakesink", NULL);
> >
> >        gst_object_ref(_fakesink);
> >
> >        _mpoint = _valve;
> >
> >        gst_bin_add_many(GST_BIN(_videobin), tee, queue1, queue2, _valve,
> > xvimagesink, _fakesink, NULL);
> >
> >        gst_element_link_many(tee, queue1, xvimagesink, NULL);
> >        gst_element_link_many(tee, queue2, _valve, _fakesink, NULL);
> >        pad = gst_element_get_static_pad (tee, "sink");
> >        gst_element_add_pad (_videobin, gst_ghost_pad_new ("sink", pad));
> >        gst_object_unref (GST_OBJECT (pad));
> >
> >        g_object_set(G_OBJECT (_pipeline), "uri", "file:///home/alex/test.mkv", NULL);
> >        g_object_set(G_OBJECT (_pipeline), "video-sink", _videobin, NULL);
> >        g_object_set(G_OBJECT (_pipeline), "audio-sink", NULL, NULL);
> >
> >        gst_element_set_state (_pipeline, GST_STATE_PLAYING);
> > }
> >
> > void create_remote_bin()
> > {
> >        GstElement *gdppay;
> >        GstPad* pad;
> >
> >        gdppay          = gst_element_factory_make("gdppay",  NULL);
> >        _pipe           = gst_element_factory_make("filesink",  NULL);
> >
> >        g_object_set(G_OBJECT (_pipe), "location",
> > "/home/alex/work/playground/test.gdp", NULL);
> >
> >        _remotesink = gst_bin_new("gdpbin");
> >        gst_bin_add_many(GST_BIN(_remotesink), gdppay, _pipe, NULL);
> >        gst_element_link_many(gdppay, _pipe, NULL);
> >
> >        pad = gst_element_get_static_pad (gdppay, "sink");
> >        gst_element_add_pad (_remotesink, gst_ghost_pad_new ("sink", pad));
> >        gst_object_unref (GST_OBJECT (pad));
> > }
> >
> > void connect_remote_client()
> > {
> >        if (_remotesink && _mpoint && _videobin)
> >        {
> >                g_object_set(G_OBJECT (_valve), "drop", TRUE, NULL);
> >
> >                gst_element_unlink(_mpoint, _fakesink);
> >                gst_bin_remove(GST_BIN(_videobin), _fakesink);
> >                gst_element_set_state (_fakesink, GST_STATE_NULL);
> >
> >                gst_bin_add(GST_BIN(_videobin), _remotesink);
> >                gst_element_link(_mpoint, _remotesink);
> >
> >                gst_element_sync_state_with_parent(_remotesink);
> >
> >                gst_element_set_state (_pipeline, GST_STATE_PLAYING);
> >                g_object_set(G_OBJECT (_valve), "drop", FALSE, NULL);
> >        }
> > }
> >
> > gboolean connect_callback(gpointer)
> > {
> >        connect_remote_client();
> >        return FALSE;
> > }
> >
> > int main(int argc, char *argv[])
> > {
> >        gst_init(&argc, &argv);
> >
> >        gst_debug_set_active(true);
> >        gst_debug_set_default_threshold(GST_LEVEL_WARNING);
> >
> >        create_remote_bin();
> >        create_pipeline_bin();
> >
> >        g_timeout_add(7000, connect_callback, NULL);
> >
> >        GMainLoop *loop;
> >
> >        loop = g_main_loop_new (NULL, FALSE);
> >
> >        g_print ("Running...\n");
> >        g_main_loop_run (loop);
> >
> >        return 0;
> > }
> >
> > End on the remote end there's gst-launch command:
> >
> > gst-launch -v filesrc location=/home/alex/work/playground/test.gdp !
> > gdpdepay ! decodebin ! ffmpegcolorspace ! xvimagesink
> >
> > What is the essential problem? Is it possible to keep the "main"
> > branch (i.e. with xvimagesink in main pipeline) playing while the
> > second establishes connection via GDP protocol and then keep them
> > synced? I hoped on valve element very much and it works perfectly for
> > a couple xvimagesink but with GDP element it doesn't work, everything
> > is almost stalled.
> >
> > ------------------------------------------------------------------------------
> > Beautiful is writing same markup. Internet Explorer 9 supports
> > standards for HTML5, CSS3, SVG 1.1,  ECMAScript5, and DOM L2 & L3.
> > Spend less time writing and  rewriting code and more time creating great
> > experiences on the web. Be a part of the beta today
> > http://p.sf.net/sfu/msIE9-sfdev2dev
> > _______________________________________________
> > gstreamer-devel mailing list
> > [hidden email]
> > https://lists.sourceforge.net/lists/listinfo/gstreamer-devel
> >

------------------------------------------------------------------------------
Increase Visibility of Your 3D Game App & Earn a Chance To Win $500!
Tap into the largest installed PC base & get more eyes on your game by
optimizing for Intel(R) Graphics Technology. Get started today with the
Intel(R) Software Partner Program. Five $500 cash prizes are up for grabs.
http://p.sf.net/sfu/intelisp-dev2dev
_______________________________________________
gstreamer-devel mailing list
[hidden email]
https://lists.sourceforge.net/lists/listinfo/gstreamer-devel
Reply | Threaded
Open this post in threaded view
|

Re: Timestamping problem with gdppay element

Marco Ballesio
Hi,

On Tue, Nov 23, 2010 at 10:20 AM, 4ernov <[hidden email]> wrote:

> Hello, Marco,
>
> thanks for answer.
>> Hi,
>>
>> On Tue, Nov 16, 2010 at 5:19 PM, 4ernov <[hidden email]> wrote:
>> > Hello,
>> >
>> > I continue working at some complex pipeline which I wrote about
>> > previously ("Problems with video, tee and queues"). Solution proposed
>> > by Marco works just fine for the pipeline I noticed there:
>> >
>> >                     queue -- xvimagesink
>> >                  /
>> > playbin -- tee
>> >                  \
>> >                    queue -- fakesink
>> >
>> > converted dynamically to:
>> >
>> >                     queue -- xvimagesink
>> >                  /
>> > playbin -- tee
>> >                  \
>> >                     queue -- xvimagesink
>> >
>> > But the final destination of my experiments is the following case:
>> >
>> >
>> >                     queue -- xvimagesink
>> >                  /
>> > playbin -- tee
>> >                  \
>> >                    queue -- fakesink
>> >
>> > converted dynamically to:
>> >
>> >                     queue -- xvimagesink
>> >                  /
>> > playbin -- tee
>> >                  \
>> >                     queue -- gdppay -- filesink
>> >
>> > with remote end like this:
>> >
>> > filesrc -- gdpdepay -- decodebin -- ffmpegcolorspace -- xvimagesink
>> >
>> > And for this case it doesn't work.
>> >
>> > Well, what is the result for now:
>> > 1. Everything works fine if the final pipeline is hardcoded from the
>> > beginning (i.e. we have gdppay right from start).
>> > 2. Everything works fine without gdppay (with xvimagesink or smth. instead).
>> >
>> > But when I start the desired configuration I have the following:
>> > 1. Xv window of the remote end appears and stalls the playing but
>> > start to refresh slowly (several frames are shown rapidly in intervals
>> > of 4-5 secs)
>>
>> Are you transferring this somehow over a network? Using gdppay and
>> filesink/filesource for this is not really an optimal setup. Maybe you
>> could have better luck with (de)payloaders and udp elements + jitter
>> buffer.
>
> Not really, everything is on the same machine, locally. The result I
> need is to show video stream on two windows in two separate X-servers
> (Xephyr servers to be exact, i.e. one window on the 1st server and
> another on the 2nd one) and the second window should be pluggable
> during playback. All I wanted is to stream internal GStreamer data to
> second pipeline through gdppay/gdpdepay to be able to show it on
> another X-server (two ximagesinks don't work because of X error). Yes,
> I consider some variants with elements you mentioned but they seem to
> be much more complex and I'd like to avoid them using gdp elements if
> it's possible.
>

have you ruled out the possibility to use native X facilities for
this? For instance, you could use the "display" property of the second
xvimagesink to perform the rendering on a different server, or the
"device" one if you actually are on the same device

note: on some systems it requires to enable tcp connections for the X server.

Regards

>>
>> > 2. Xv window of the main pipeline stops to play properly, too, and
>> > behaves just the same way like the remote one.
>> > 3. The main pipeline prints the following warnings to console:
>> >
>> > 0:00:07.134552825 19490  0x95be9c8 WARN  gdppay
>> > gstgdppay.c:594:gst_gdp_pay_chain:<gdppay0> did not receive
>> > new-segment before first buffer
>> > 0:00:14.095700940 19490  0x948d2b0 WARN  ffmpeg
>> > gstffmpegdec.c:2002:gst_ffmpegdec_video_frame:<ffdec_h2640> Dropping
>> > non-keyframe (seek/init)
>> > 0:00:14.691007420 19490  0x95b4978 WARN  basesink
>> > gstbasesink.c:2686:gst_base_sink_is_too_late:<xvimagesink0> warning: A
>> > lot of buffers are being dropped.
>> > 0:00:14.691055331 19490  0x95b4978 WARN  basesink
>> > gstbasesink.c:2686:gst_base_sink_is_too_late:<xvimagesink0> warning:
>> > There may be a timestamping problem, or this computer is too slow.
>>
>> This message is pretty self-explanatory: xvimagesink is not rendering
>> anything because buffers are too late. Just to try, what's the effect
>> if you set sync=false on that element? What if you set the
>> max-lateness property to something like e.g. a few seconds?
>
> Thank you for suggestions, will try them. Yes, I guess it's something
> with buffer timings but it seems that gdppay/gdpdepay just can't pump
> data through FIFO fast enough when connected during playback. I think
> so because if I set timeoverlay element after gdpdepay it shows that
> time flows _very_ sporadically. What is also interesting is that if
> change gdppay to "jpegenc ! avimux" connection and gdpdepay to
> decodebin then everything start playing fine (just with some delay
> accumulation during playback). I decided that it's gdp elements
> problems and filed a bug here:
>
> https://bugzilla.gnome.org/show_bug.cgi?id=635226
>
> Maybe I'm mistaken but anyway, thanks for help, I'll try your
> suggestions and write the results here.
>
>>
>> Regards
>>
>> >
>> > Here's the source code of the main app:
>> >
>> > #include <gst/gst.h>
>> > #include <glib.h>
>> >
>> > GstElement *_pipeline, *_fakesink, *_remotesink, *_valve, *_mpoint,
>> > *_videobin, *_pipe;
>> >
>> > void create_pipeline_bin()
>> > {
>> >        GstElement *tee, *queue1, *queue2, *xvimagesink;
>> >
>> >        _pipeline      = gst_element_factory_make("playbin",  NULL);
>> >
>> >        tee                     = gst_element_factory_make("tee",  NULL);
>> >        xvimagesink   = gst_element_factory_make("xvimagesink", NULL);
>> >
>> >        GstPad* pad;
>> >
>> >        _videobin = gst_bin_new("videobin");
>> >        queue1                  = gst_element_factory_make("queue", NULL);
>> >        queue2                  = gst_element_factory_make("queue", NULL);
>> >        _valve                  = gst_element_factory_make("valve", NULL);
>> >
>> >        _fakesink               = gst_element_factory_make("fakesink", NULL);
>> >
>> >        gst_object_ref(_fakesink);
>> >
>> >        _mpoint = _valve;
>> >
>> >        gst_bin_add_many(GST_BIN(_videobin), tee, queue1, queue2, _valve,
>> > xvimagesink, _fakesink, NULL);
>> >
>> >        gst_element_link_many(tee, queue1, xvimagesink, NULL);
>> >        gst_element_link_many(tee, queue2, _valve, _fakesink, NULL);
>> >        pad = gst_element_get_static_pad (tee, "sink");
>> >        gst_element_add_pad (_videobin, gst_ghost_pad_new ("sink", pad));
>> >        gst_object_unref (GST_OBJECT (pad));
>> >
>> >        g_object_set(G_OBJECT (_pipeline), "uri", "file:///home/alex/test.mkv", NULL);
>> >        g_object_set(G_OBJECT (_pipeline), "video-sink", _videobin, NULL);
>> >        g_object_set(G_OBJECT (_pipeline), "audio-sink", NULL, NULL);
>> >
>> >        gst_element_set_state (_pipeline, GST_STATE_PLAYING);
>> > }
>> >
>> > void create_remote_bin()
>> > {
>> >        GstElement *gdppay;
>> >        GstPad* pad;
>> >
>> >        gdppay          = gst_element_factory_make("gdppay",  NULL);
>> >        _pipe           = gst_element_factory_make("filesink",  NULL);
>> >
>> >        g_object_set(G_OBJECT (_pipe), "location",
>> > "/home/alex/work/playground/test.gdp", NULL);
>> >
>> >        _remotesink = gst_bin_new("gdpbin");
>> >        gst_bin_add_many(GST_BIN(_remotesink), gdppay, _pipe, NULL);
>> >        gst_element_link_many(gdppay, _pipe, NULL);
>> >
>> >        pad = gst_element_get_static_pad (gdppay, "sink");
>> >        gst_element_add_pad (_remotesink, gst_ghost_pad_new ("sink", pad));
>> >        gst_object_unref (GST_OBJECT (pad));
>> > }
>> >
>> > void connect_remote_client()
>> > {
>> >        if (_remotesink && _mpoint && _videobin)
>> >        {
>> >                g_object_set(G_OBJECT (_valve), "drop", TRUE, NULL);
>> >
>> >                gst_element_unlink(_mpoint, _fakesink);
>> >                gst_bin_remove(GST_BIN(_videobin), _fakesink);
>> >                gst_element_set_state (_fakesink, GST_STATE_NULL);
>> >
>> >                gst_bin_add(GST_BIN(_videobin), _remotesink);
>> >                gst_element_link(_mpoint, _remotesink);
>> >
>> >                gst_element_sync_state_with_parent(_remotesink);
>> >
>> >                gst_element_set_state (_pipeline, GST_STATE_PLAYING);
>> >                g_object_set(G_OBJECT (_valve), "drop", FALSE, NULL);
>> >        }
>> > }
>> >
>> > gboolean connect_callback(gpointer)
>> > {
>> >        connect_remote_client();
>> >        return FALSE;
>> > }
>> >
>> > int main(int argc, char *argv[])
>> > {
>> >        gst_init(&argc, &argv);
>> >
>> >        gst_debug_set_active(true);
>> >        gst_debug_set_default_threshold(GST_LEVEL_WARNING);
>> >
>> >        create_remote_bin();
>> >        create_pipeline_bin();
>> >
>> >        g_timeout_add(7000, connect_callback, NULL);
>> >
>> >        GMainLoop *loop;
>> >
>> >        loop = g_main_loop_new (NULL, FALSE);
>> >
>> >        g_print ("Running...\n");
>> >        g_main_loop_run (loop);
>> >
>> >        return 0;
>> > }
>> >
>> > End on the remote end there's gst-launch command:
>> >
>> > gst-launch -v filesrc location=/home/alex/work/playground/test.gdp !
>> > gdpdepay ! decodebin ! ffmpegcolorspace ! xvimagesink
>> >
>> > What is the essential problem? Is it possible to keep the "main"
>> > branch (i.e. with xvimagesink in main pipeline) playing while the
>> > second establishes connection via GDP protocol and then keep them
>> > synced? I hoped on valve element very much and it works perfectly for
>> > a couple xvimagesink but with GDP element it doesn't work, everything
>> > is almost stalled.
>> >
>> > ------------------------------------------------------------------------------
>> > Beautiful is writing same markup. Internet Explorer 9 supports
>> > standards for HTML5, CSS3, SVG 1.1,  ECMAScript5, and DOM L2 & L3.
>> > Spend less time writing and  rewriting code and more time creating great
>> > experiences on the web. Be a part of the beta today
>> > http://p.sf.net/sfu/msIE9-sfdev2dev
>> > _______________________________________________
>> > gstreamer-devel mailing list
>> > [hidden email]
>> > https://lists.sourceforge.net/lists/listinfo/gstreamer-devel
>> >
>
> ------------------------------------------------------------------------------
> Increase Visibility of Your 3D Game App & Earn a Chance To Win $500!
> Tap into the largest installed PC base & get more eyes on your game by
> optimizing for Intel(R) Graphics Technology. Get started today with the
> Intel(R) Software Partner Program. Five $500 cash prizes are up for grabs.
> http://p.sf.net/sfu/intelisp-dev2dev
> _______________________________________________
> gstreamer-devel mailing list
> [hidden email]
> https://lists.sourceforge.net/lists/listinfo/gstreamer-devel
>

------------------------------------------------------------------------------
Increase Visibility of Your 3D Game App & Earn a Chance To Win $500!
Tap into the largest installed PC base & get more eyes on your game by
optimizing for Intel(R) Graphics Technology. Get started today with the
Intel(R) Software Partner Program. Five $500 cash prizes are up for grabs.
http://p.sf.net/sfu/intelisp-dev2dev
_______________________________________________
gstreamer-devel mailing list
[hidden email]
https://lists.sourceforge.net/lists/listinfo/gstreamer-devel
Reply | Threaded
Open this post in threaded view
|

Re: Timestamping problem with gdppay element

Alexey Chernov
In reply to this post by Alexey Chernov
> Hi,
>
> On Tue, Nov 23, 2010 at 10:20 AM, 4ernov <[hidden email]> wrote:
> > Hello, Marco,
> >
> > thanks for answer.
> >> Hi,
> >>
> >> On Tue, Nov 16, 2010 at 5:19 PM, 4ernov <[hidden email]> wrote:
> >> > Hello,
> >> >
> >> > I continue working at some complex pipeline which I wrote about
> >> > previously ("Problems with video, tee and queues"). Solution proposed
> >> > by Marco works just fine for the pipeline I noticed there:
> >> >
> >> >                     queue -- xvimagesink
> >> >                  /
> >> > playbin -- tee
> >> >                  \
> >> >                    queue -- fakesink
> >> >
> >> > converted dynamically to:
> >> >
> >> >                     queue -- xvimagesink
> >> >                  /
> >> > playbin -- tee
> >> >                  \
> >> >                     queue -- xvimagesink
> >> >
> >> > But the final destination of my experiments is the following case:
> >> >
> >> >
> >> >                     queue -- xvimagesink
> >> >                  /
> >> > playbin -- tee
> >> >                  \
> >> >                    queue -- fakesink
> >> >
> >> > converted dynamically to:
> >> >
> >> >                     queue -- xvimagesink
> >> >                  /
> >> > playbin -- tee
> >> >                  \
> >> >                     queue -- gdppay -- filesink
> >> >
> >> > with remote end like this:
> >> >
> >> > filesrc -- gdpdepay -- decodebin -- ffmpegcolorspace -- xvimagesink
> >> >
> >> > And for this case it doesn't work.
> >> >
> >> > Well, what is the result for now:
> >> > 1. Everything works fine if the final pipeline is hardcoded from the
> >> > beginning (i.e. we have gdppay right from start).
> >> > 2. Everything works fine without gdppay (with xvimagesink or smth. instead).
> >> >
> >> > But when I start the desired configuration I have the following:
> >> > 1. Xv window of the remote end appears and stalls the playing but
> >> > start to refresh slowly (several frames are shown rapidly in intervals
> >> > of 4-5 secs)
> >>
> >> Are you transferring this somehow over a network? Using gdppay and
> >> filesink/filesource for this is not really an optimal setup. Maybe you
> >> could have better luck with (de)payloaders and udp elements + jitter
> >> buffer.
> >
> > Not really, everything is on the same machine, locally. The result I
> > need is to show video stream on two windows in two separate X-servers
> > (Xephyr servers to be exact, i.e. one window on the 1st server and
> > another on the 2nd one) and the second window should be pluggable
> > during playback. All I wanted is to stream internal GStreamer data to
> > second pipeline through gdppay/gdpdepay to be able to show it on
> > another X-server (two ximagesinks don't work because of X error). Yes,
> > I consider some variants with elements you mentioned but they seem to
> > be much more complex and I'd like to avoid them using gdp elements if
> > it's possible.
> >
>
> have you ruled out the possibility to use native X facilities for
> this? For instance, you could use the "display" property of the second
> xvimagesink to perform the rendering on a different server, or the
> "device" one if you actually are on the same device
>
> note: on some systems it requires to enable tcp connections for the X server.

Yes, I tried to set "display" property but with no success. Don't
actually know whether tcp connections are enabled on my X server, will
try it and "device" property, too, thank you.
By the way, setting async option to "true" as you suggested really
works, but as expected it makes syncronization lost. "max-lateness"
doesn't make any sense (tried from little intervals e.g. 200 to bigger
ones (15000000) with no success.

>
> Regards
>
> >>
> >> > 2. Xv window of the main pipeline stops to play properly, too, and
> >> > behaves just the same way like the remote one.
> >> > 3. The main pipeline prints the following warnings to console:
> >> >
> >> > 0:00:07.134552825 19490  0x95be9c8 WARN  gdppay
> >> > gstgdppay.c:594:gst_gdp_pay_chain:<gdppay0> did not receive
> >> > new-segment before first buffer
> >> > 0:00:14.095700940 19490  0x948d2b0 WARN  ffmpeg
> >> > gstffmpegdec.c:2002:gst_ffmpegdec_video_frame:<ffdec_h2640> Dropping
> >> > non-keyframe (seek/init)
> >> > 0:00:14.691007420 19490  0x95b4978 WARN  basesink
> >> > gstbasesink.c:2686:gst_base_sink_is_too_late:<xvimagesink0> warning: A
> >> > lot of buffers are being dropped.
> >> > 0:00:14.691055331 19490  0x95b4978 WARN  basesink
> >> > gstbasesink.c:2686:gst_base_sink_is_too_late:<xvimagesink0> warning:
> >> > There may be a timestamping problem, or this computer is too slow.
> >>
> >> This message is pretty self-explanatory: xvimagesink is not rendering
> >> anything because buffers are too late. Just to try, what's the effect
> >> if you set sync=false on that element? What if you set the
> >> max-lateness property to something like e.g. a few seconds?
> >
> > Thank you for suggestions, will try them. Yes, I guess it's something
> > with buffer timings but it seems that gdppay/gdpdepay just can't pump
> > data through FIFO fast enough when connected during playback. I think
> > so because if I set timeoverlay element after gdpdepay it shows that
> > time flows _very_ sporadically. What is also interesting is that if
> > change gdppay to "jpegenc ! avimux" connection and gdpdepay to
> > decodebin then everything start playing fine (just with some delay
> > accumulation during playback). I decided that it's gdp elements
> > problems and filed a bug here:
> >
> > https://bugzilla.gnome.org/show_bug.cgi?id=635226
> >
> > Maybe I'm mistaken but anyway, thanks for help, I'll try your
> > suggestions and write the results here.
> >
> >>
> >> Regards
> >>
> >> >
> >> > Here's the source code of the main app:
> >> >
> >> > #include <gst/gst.h>
> >> > #include <glib.h>
> >> >
> >> > GstElement *_pipeline, *_fakesink, *_remotesink, *_valve, *_mpoint,
> >> > *_videobin, *_pipe;
> >> >
> >> > void create_pipeline_bin()
> >> > {
> >> >        GstElement *tee, *queue1, *queue2, *xvimagesink;
> >> >
> >> >        _pipeline      = gst_element_factory_make("playbin",  NULL);
> >> >
> >> >        tee                     = gst_element_factory_make("tee",  NULL);
> >> >        xvimagesink   = gst_element_factory_make("xvimagesink", NULL);
> >> >
> >> >        GstPad* pad;
> >> >
> >> >        _videobin = gst_bin_new("videobin");
> >> >        queue1                  = gst_element_factory_make("queue", NULL);
> >> >        queue2                  = gst_element_factory_make("queue", NULL);
> >> >        _valve                  = gst_element_factory_make("valve", NULL);
> >> >
> >> >        _fakesink               = gst_element_factory_make("fakesink", NULL);
> >> >
> >> >        gst_object_ref(_fakesink);
> >> >
> >> >        _mpoint = _valve;
> >> >
> >> >        gst_bin_add_many(GST_BIN(_videobin), tee, queue1, queue2, _valve,
> >> > xvimagesink, _fakesink, NULL);
> >> >
> >> >        gst_element_link_many(tee, queue1, xvimagesink, NULL);
> >> >        gst_element_link_many(tee, queue2, _valve, _fakesink, NULL);
> >> >        pad = gst_element_get_static_pad (tee, "sink");
> >> >        gst_element_add_pad (_videobin, gst_ghost_pad_new ("sink", pad));
> >> >        gst_object_unref (GST_OBJECT (pad));
> >> >
> >> >        g_object_set(G_OBJECT (_pipeline), "uri", "file:///home/alex/test.mkv", NULL);
> >> >        g_object_set(G_OBJECT (_pipeline), "video-sink", _videobin, NULL);
> >> >        g_object_set(G_OBJECT (_pipeline), "audio-sink", NULL, NULL);
> >> >
> >> >        gst_element_set_state (_pipeline, GST_STATE_PLAYING);
> >> > }
> >> >
> >> > void create_remote_bin()
> >> > {
> >> >        GstElement *gdppay;
> >> >        GstPad* pad;
> >> >
> >> >        gdppay          = gst_element_factory_make("gdppay",  NULL);
> >> >        _pipe           = gst_element_factory_make("filesink",  NULL);
> >> >
> >> >        g_object_set(G_OBJECT (_pipe), "location",
> >> > "/home/alex/work/playground/test.gdp", NULL);
> >> >
> >> >        _remotesink = gst_bin_new("gdpbin");
> >> >        gst_bin_add_many(GST_BIN(_remotesink), gdppay, _pipe, NULL);
> >> >        gst_element_link_many(gdppay, _pipe, NULL);
> >> >
> >> >        pad = gst_element_get_static_pad (gdppay, "sink");
> >> >        gst_element_add_pad (_remotesink, gst_ghost_pad_new ("sink", pad));
> >> >        gst_object_unref (GST_OBJECT (pad));
> >> > }
> >> >
> >> > void connect_remote_client()
> >> > {
> >> >        if (_remotesink && _mpoint && _videobin)
> >> >        {
> >> >                g_object_set(G_OBJECT (_valve), "drop", TRUE, NULL);
> >> >
> >> >                gst_element_unlink(_mpoint, _fakesink);
> >> >                gst_bin_remove(GST_BIN(_videobin), _fakesink);
> >> >                gst_element_set_state (_fakesink, GST_STATE_NULL);
> >> >
> >> >                gst_bin_add(GST_BIN(_videobin), _remotesink);
> >> >                gst_element_link(_mpoint, _remotesink);
> >> >
> >> >                gst_element_sync_state_with_parent(_remotesink);
> >> >
> >> >                gst_element_set_state (_pipeline, GST_STATE_PLAYING);
> >> >                g_object_set(G_OBJECT (_valve), "drop", FALSE, NULL);
> >> >        }
> >> > }
> >> >
> >> > gboolean connect_callback(gpointer)
> >> > {
> >> >        connect_remote_client();
> >> >        return FALSE;
> >> > }
> >> >
> >> > int main(int argc, char *argv[])
> >> > {
> >> >        gst_init(&argc, &argv);
> >> >
> >> >        gst_debug_set_active(true);
> >> >        gst_debug_set_default_threshold(GST_LEVEL_WARNING);
> >> >
> >> >        create_remote_bin();
> >> >        create_pipeline_bin();
> >> >
> >> >        g_timeout_add(7000, connect_callback, NULL);
> >> >
> >> >        GMainLoop *loop;
> >> >
> >> >        loop = g_main_loop_new (NULL, FALSE);
> >> >
> >> >        g_print ("Running...\n");
> >> >        g_main_loop_run (loop);
> >> >
> >> >        return 0;
> >> > }
> >> >
> >> > End on the remote end there's gst-launch command:
> >> >
> >> > gst-launch -v filesrc location=/home/alex/work/playground/test.gdp !
> >> > gdpdepay ! decodebin ! ffmpegcolorspace ! xvimagesink
> >> >
> >> > What is the essential problem? Is it possible to keep the "main"
> >> > branch (i.e. with xvimagesink in main pipeline) playing while the
> >> > second establishes connection via GDP protocol and then keep them
> >> > synced? I hoped on valve element very much and it works perfectly for
> >> > a couple xvimagesink but with GDP element it doesn't work, everything
> >> > is almost stalled.
> >> >
> >> > ------------------------------------------------------------------------------
> >> > Beautiful is writing same markup. Internet Explorer 9 supports
> >> > standards for HTML5, CSS3, SVG 1.1,  ECMAScript5, and DOM L2 & L3.
> >> > Spend less time writing and  rewriting code and more time creating great
> >> > experiences on the web. Be a part of the beta today
> >> > http://p.sf.net/sfu/msIE9-sfdev2dev
> >> > _______________________________________________
> >> > gstreamer-devel mailing list
> >> > [hidden email]
> >> > https://lists.sourceforge.net/lists/listinfo/gstreamer-devel
> >> >
> >
> > ------------------------------------------------------------------------------
> > Increase Visibility of Your 3D Game App & Earn a Chance To Win $500!
> > Tap into the largest installed PC base & get more eyes on your game by
> > optimizing for Intel(R) Graphics Technology. Get started today with the
> > Intel(R) Software Partner Program. Five $500 cash prizes are up for grabs.
> > http://p.sf.net/sfu/intelisp-dev2dev
> > _______________________________________________
> > gstreamer-devel mailing list
> > [hidden email]
> > https://lists.sourceforge.net/lists/listinfo/gstreamer-devel
> >

------------------------------------------------------------------------------
Increase Visibility of Your 3D Game App & Earn a Chance To Win $500!
Tap into the largest installed PC base & get more eyes on your game by
optimizing for Intel(R) Graphics Technology. Get started today with the
Intel(R) Software Partner Program. Five $500 cash prizes are up for grabs.
http://p.sf.net/sfu/intelisp-dev2dev
_______________________________________________
gstreamer-devel mailing list
[hidden email]
https://lists.sourceforge.net/lists/listinfo/gstreamer-devel
Reply | Threaded
Open this post in threaded view
|

Re: Timestamping problem with gdppay element

Marco Ballesio
Hi,

On Wed, Nov 24, 2010 at 12:58 PM, 4ernov <[hidden email]> wrote:

..snip..

>>
>> have you ruled out the possibility to use native X facilities for
>> this? For instance, you could use the "display" property of the second
>> xvimagesink to perform the rendering on a different server, or the
>> "device" one if you actually are on the same device
>>
>> note: on some systems it requires to enable tcp connections for the X server.
>
> Yes, I tried to set "display" property but with no success. Don't
> actually know whether tcp connections are enabled on my X server, will
> try it and "device" property, too, thank you.

on Ubuntu, give a look at /etc/gdm/gdm.schemas and set
"security/DisallowTCP" to "false".

restart your X server and then write from a terminal where the DISPLAY
variable is properly set:

xhost +

note that the things of above allow access to your X server from any
devices in the network (an beyond) so it should really be something to
use only for debug purposes. For stricter security, check the
documentation about xhost and gdm.

> By the way, setting async option to "true" as you suggested really
> works, but as expected it makes syncronization lost. "max-lateness"
> doesn't make any sense (tried from little intervals e.g. 200 to bigger
> ones (15000000) with no success.
>

the max-lateness option is in ns, so you've basically set the delay to
200ms. What if you try something more substantial, let's say more than
one second?

(not that I'm saying this is the best way to proceed, but I'm just curious).

Regards

..snip..

------------------------------------------------------------------------------
Increase Visibility of Your 3D Game App & Earn a Chance To Win $500!
Tap into the largest installed PC base & get more eyes on your game by
optimizing for Intel(R) Graphics Technology. Get started today with the
Intel(R) Software Partner Program. Five $500 cash prizes are up for grabs.
http://p.sf.net/sfu/intelisp-dev2dev
_______________________________________________
gstreamer-devel mailing list
[hidden email]
https://lists.sourceforge.net/lists/listinfo/gstreamer-devel
Reply | Threaded
Open this post in threaded view
|

Re: Timestamping problem with gdppay element

Alexey Chernov
In reply to this post by Alexey Chernov
> Hi,
>
> On Wed, Nov 24, 2010 at 12:58 PM, 4ernov <[hidden email]> wrote:
>
> ..snip..
>
> >>
> >> have you ruled out the possibility to use native X facilities for
> >> this? For instance, you could use the "display" property of the second
> >> xvimagesink to perform the rendering on a different server, or the
> >> "device" one if you actually are on the same device
> >>
> >> note: on some systems it requires to enable tcp connections for the X server.
> >
> > Yes, I tried to set "display" property but with no success. Don't
> > actually know whether tcp connections are enabled on my X server, will
> > try it and "device" property, too, thank you.
>
> on Ubuntu, give a look at /etc/gdm/gdm.schemas and set
> "security/DisallowTCP" to "false".
>
> restart your X server and then write from a terminal where the DISPLAY
> variable is properly set:
>
> xhost +
>
> note that the things of above allow access to your X server from any
> devices in the network (an beyond) so it should really be something to
> use only for debug purposes. For stricter security, check the
> documentation about xhost and gdm.

Yes, thank you for detailed instruction, tcp connections were disabled
really (X was started with -nolisten tcp option), I changed kdm
settings (I use Kubuntu) and restarted. Everything's right, X without
-nolisten tcp, also executed 'xhost +' on each display but still no
success in term of GStreamer playing. Here's the pipeline:

gst-launch filesrc location=/home/alex/test.mp4 ! decodebin !
ffmpegcolorspace ! tee name=dist ! queue ! xvimagesink display=":1"
dist. ! queue ! xvimagesink display=":2"

And the output is:

Setting pipeline to PAUSED ...
Pipeline is PREROLLING ...
X Error of failed request:  BadShmSeg (invalid shared segment parameter)
  Major opcode of failed request:  135 (XVideo)
  Minor opcode of failed request:  19 ()
  Segment id in failed request:  0x3e
  Serial number of failed request:  40
  Current serial number in output stream:  41

X error can also be

X Error of failed request:  BadShmSeg (invalid shared segment parameter)
  Major opcode of failed request:  130 (MIT-SHM)
  Minor opcode of failed request:  3 (X_ShmPutImage)
  Segment id in failed request:  0x200003
  Serial number of failed request:  29
  Current serial number in output stream:  30

if I use ximagesink instead of xvimagesink. I should say, everything
plays just fine if I use one xvimagesink element with display set
(video is really playing on another server without problems) or if
both these elements are with the same display option. But when I set
them different displays it goes to error.

> > By the way, setting async option to "true" as you suggested really
> > works, but as expected it makes syncronization lost. "max-lateness"
> > doesn't make any sense (tried from little intervals e.g. 200 to bigger
> > ones (15000000) with no success.
> >
>
> the max-lateness option is in ns, so you've basically set the delay to
> 200ms. What if you try something more substantial, let's say more than
> one second?
>
> (not that I'm saying this is the best way to proceed, but I'm just curious).

No problem) I also tried 2 and 20 secs (2000000000 and 20000000000
values) but the result is still the same. I think the problem is with
some fluctuations the first moments after connection. Can I avoid
these 'bad' buffers somehow? Or the stream is just out of time and
already unrecoverable?

>
> Regards
>
> ..snip..
>
>
>
> ------------------------------------------------------------------------------
> Increase Visibility of Your 3D Game App & Earn a Chance To Win $500!
> Tap into the largest installed PC base & get more eyes on your game by
> optimizing for Intel(R) Graphics Technology. Get started today with the
> Intel(R) Software Partner Program. Five $500 cash prizes are up for grabs.
> http://p.sf.net/sfu/intelisp-dev2dev
> _______________________________________________
> gstreamer-devel mailing list
> [hidden email]
> https://lists.sourceforge.net/lists/listinfo/gstreamer-devel

------------------------------------------------------------------------------
Increase Visibility of Your 3D Game App & Earn a Chance To Win $500!
Tap into the largest installed PC base & get more eyes on your game by
optimizing for Intel(R) Graphics Technology. Get started today with the
Intel(R) Software Partner Program. Five $500 cash prizes are up for grabs.
http://p.sf.net/sfu/intelisp-dev2dev
_______________________________________________
gstreamer-devel mailing list
[hidden email]
https://lists.sourceforge.net/lists/listinfo/gstreamer-devel
Reply | Threaded
Open this post in threaded view
|

Re: Timestamping problem with gdppay element

Marco Ballesio
Hi,

On Wed, Nov 24, 2010 at 2:43 PM, 4ernov <[hidden email]> wrote:

>> Hi,
>>
>> On Wed, Nov 24, 2010 at 12:58 PM, 4ernov <[hidden email]> wrote:
>>
>> ..snip..
>>
>> >>
>> >> have you ruled out the possibility to use native X facilities for
>> >> this? For instance, you could use the "display" property of the second
>> >> xvimagesink to perform the rendering on a different server, or the
>> >> "device" one if you actually are on the same device
>> >>
>> >> note: on some systems it requires to enable tcp connections for the X server.
>> >
>> > Yes, I tried to set "display" property but with no success. Don't
>> > actually know whether tcp connections are enabled on my X server, will
>> > try it and "device" property, too, thank you.
>>
>> on Ubuntu, give a look at /etc/gdm/gdm.schemas and set
>> "security/DisallowTCP" to "false".
>>
>> restart your X server and then write from a terminal where the DISPLAY
>> variable is properly set:
>>
>> xhost +
>>
>> note that the things of above allow access to your X server from any
>> devices in the network (an beyond) so it should really be something to
>> use only for debug purposes. For stricter security, check the
>> documentation about xhost and gdm.
>
> Yes, thank you for detailed instruction, tcp connections were disabled
> really (X was started with -nolisten tcp option), I changed kdm
> settings (I use Kubuntu) and restarted. Everything's right, X without
> -nolisten tcp, also executed 'xhost +' on each display but still no
> success in term of GStreamer playing. Here's the pipeline:
>
> gst-launch filesrc location=/home/alex/test.mp4 ! decodebin !
> ffmpegcolorspace ! tee name=dist ! queue ! xvimagesink display=":1"
> dist. ! queue ! xvimagesink display=":2"
>
> And the output is:
>
> Setting pipeline to PAUSED ...
> Pipeline is PREROLLING ...
> X Error of failed request:  BadShmSeg (invalid shared segment parameter)
>  Major opcode of failed request:  135 (XVideo)
>  Minor opcode of failed request:  19 ()
>  Segment id in failed request:  0x3e
>  Serial number of failed request:  40
>  Current serial number in output stream:  41
>

Right, the two X servers run as separate processes and it's not
possible to share the same buffer between the two while in overlay
(not that I understand very well the inner reason of this :/ ).

An easy but expensive solution is to copy the buffer before rendering
it, so your pipe should become something like:

gst-launch filesrc location=/home/alex/test.mp4 ! decodebin !
ffmpegcolorspace ! tee name=dist ! queue ! xvimagesink display=":1"
dist. ! queue ! videomixer ! xvimagesink display=":2"

we're lucky videomixers always copy the buffers :).

Regards,
Marco

> X error can also be
>
> X Error of failed request:  BadShmSeg (invalid shared segment parameter)
>  Major opcode of failed request:  130 (MIT-SHM)
>  Minor opcode of failed request:  3 (X_ShmPutImage)
>  Segment id in failed request:  0x200003
>  Serial number of failed request:  29
>  Current serial number in output stream:  30
>
> if I use ximagesink instead of xvimagesink. I should say, everything
> plays just fine if I use one xvimagesink element with display set
> (video is really playing on another server without problems) or if
> both these elements are with the same display option. But when I set
> them different displays it goes to error.
>
>> > By the way, setting async option to "true" as you suggested really
>> > works, but as expected it makes syncronization lost. "max-lateness"
>> > doesn't make any sense (tried from little intervals e.g. 200 to bigger
>> > ones (15000000) with no success.
>> >
>>
>> the max-lateness option is in ns, so you've basically set the delay to
>> 200ms. What if you try something more substantial, let's say more than
>> one second?
>>
>> (not that I'm saying this is the best way to proceed, but I'm just curious).
>
> No problem) I also tried 2 and 20 secs (2000000000 and 20000000000
> values) but the result is still the same. I think the problem is with
> some fluctuations the first moments after connection. Can I avoid
> these 'bad' buffers somehow? Or the stream is just out of time and
> already unrecoverable?
>
>>
>> Regards
>>
>> ..snip..
>>
>>
>>
>> ------------------------------------------------------------------------------
>> Increase Visibility of Your 3D Game App & Earn a Chance To Win $500!
>> Tap into the largest installed PC base & get more eyes on your game by
>> optimizing for Intel(R) Graphics Technology. Get started today with the
>> Intel(R) Software Partner Program. Five $500 cash prizes are up for grabs.
>> http://p.sf.net/sfu/intelisp-dev2dev
>> _______________________________________________
>> gstreamer-devel mailing list
>> [hidden email]
>> https://lists.sourceforge.net/lists/listinfo/gstreamer-devel
>
> ------------------------------------------------------------------------------
> Increase Visibility of Your 3D Game App & Earn a Chance To Win $500!
> Tap into the largest installed PC base & get more eyes on your game by
> optimizing for Intel(R) Graphics Technology. Get started today with the
> Intel(R) Software Partner Program. Five $500 cash prizes are up for grabs.
> http://p.sf.net/sfu/intelisp-dev2dev
> _______________________________________________
> gstreamer-devel mailing list
> [hidden email]
> https://lists.sourceforge.net/lists/listinfo/gstreamer-devel
>

------------------------------------------------------------------------------
Increase Visibility of Your 3D Game App & Earn a Chance To Win $500!
Tap into the largest installed PC base & get more eyes on your game by
optimizing for Intel(R) Graphics Technology. Get started today with the
Intel(R) Software Partner Program. Five $500 cash prizes are up for grabs.
http://p.sf.net/sfu/intelisp-dev2dev
_______________________________________________
gstreamer-devel mailing list
[hidden email]
https://lists.sourceforge.net/lists/listinfo/gstreamer-devel
Reply | Threaded
Open this post in threaded view
|

Re: Timestamping problem with gdppay element

Alexey Chernov
On Wednesday 24 November 2010 22:11:42 you wrote:

> Hi,
>
> On Wed, Nov 24, 2010 at 2:43 PM, 4ernov <[hidden email]> wrote:
> >> Hi,
> >>
> >> On Wed, Nov 24, 2010 at 12:58 PM, 4ernov <[hidden email]> wrote:
> >>
> >> ..snip..
> >>
> >> >> have you ruled out the possibility to use native X facilities for
> >> >> this? For instance, you could use the "display" property of the
> >> >> second xvimagesink to perform the rendering on a different server,
> >> >> or the "device" one if you actually are on the same device
> >> >>
> >> >> note: on some systems it requires to enable tcp connections for the X
> >> >> server.
> >> >
> >> > Yes, I tried to set "display" property but with no success. Don't
> >> > actually know whether tcp connections are enabled on my X server, will
> >> > try it and "device" property, too, thank you.
> >>
> >> on Ubuntu, give a look at /etc/gdm/gdm.schemas and set
> >> "security/DisallowTCP" to "false".
> >>
> >> restart your X server and then write from a terminal where the DISPLAY
> >> variable is properly set:
> >>
> >> xhost +
> >>
> >> note that the things of above allow access to your X server from any
> >> devices in the network (an beyond) so it should really be something to
> >> use only for debug purposes. For stricter security, check the
> >> documentation about xhost and gdm.
> >
> > Yes, thank you for detailed instruction, tcp connections were disabled
> > really (X was started with -nolisten tcp option), I changed kdm
> > settings (I use Kubuntu) and restarted. Everything's right, X without
> > -nolisten tcp, also executed 'xhost +' on each display but still no
> > success in term of GStreamer playing. Here's the pipeline:
> >
> > gst-launch filesrc location=/home/alex/test.mp4 ! decodebin !
> > ffmpegcolorspace ! tee name=dist ! queue ! xvimagesink display=":1"
> > dist. ! queue ! xvimagesink display=":2"
> >
> > And the output is:
> >
> > Setting pipeline to PAUSED ...
> > Pipeline is PREROLLING ...
> > X Error of failed request:  BadShmSeg (invalid shared segment parameter)
> >  Major opcode of failed request:  135 (XVideo)
> >  Minor opcode of failed request:  19 ()
> >  Segment id in failed request:  0x3e
> >  Serial number of failed request:  40
> >  Current serial number in output stream:  41
>
> Right, the two X servers run as separate processes and it's not
> possible to share the same buffer between the two while in overlay
> (not that I understand very well the inner reason of this :/ ).

me too actually :) but it's our client's idea and so I've got to do it..

>
> An easy but expensive solution is to copy the buffer before rendering
> it, so your pipe should become something like:
>
> gst-launch filesrc location=/home/alex/test.mp4 ! decodebin !
> ffmpegcolorspace ! tee name=dist ! queue ! xvimagesink display=":1"
> dist. ! queue ! videomixer ! xvimagesink display=":2"
>
> we're lucky videomixers always copy the buffers :).

Thank you very much, Marco! I just tried it. It works just fine. Very smooth, I
hope the performance is enough to copy buffers. Very good. Thank you!

>
> Regards,
> Marco
>
> > X error can also be
> >
> > X Error of failed request:  BadShmSeg (invalid shared segment parameter)
> >  Major opcode of failed request:  130 (MIT-SHM)
> >  Minor opcode of failed request:  3 (X_ShmPutImage)
> >  Segment id in failed request:  0x200003
> >  Serial number of failed request:  29
> >  Current serial number in output stream:  30
> >
> > if I use ximagesink instead of xvimagesink. I should say, everything
> > plays just fine if I use one xvimagesink element with display set
> > (video is really playing on another server without problems) or if
> > both these elements are with the same display option. But when I set
> > them different displays it goes to error.
> >
> >> > By the way, setting async option to "true" as you suggested really
> >> > works, but as expected it makes syncronization lost. "max-lateness"
> >> > doesn't make any sense (tried from little intervals e.g. 200 to bigger
> >> > ones (15000000) with no success.
> >>
> >> the max-lateness option is in ns, so you've basically set the delay to
> >> 200ms. What if you try something more substantial, let's say more than
> >> one second?
> >>
> >> (not that I'm saying this is the best way to proceed, but I'm just
> >> curious).
> >
> > No problem) I also tried 2 and 20 secs (2000000000 and 20000000000
> > values) but the result is still the same. I think the problem is with
> > some fluctuations the first moments after connection. Can I avoid
> > these 'bad' buffers somehow? Or the stream is just out of time and
> > already unrecoverable?
> >
> >> Regards
> >>
> >> ..snip..
> >>
> >>
> >>
> >> ------------------------------------------------------------------------
> >> ------ Increase Visibility of Your 3D Game App & Earn a Chance To Win
> >> $500! Tap into the largest installed PC base & get more eyes on your
> >> game by optimizing for Intel(R) Graphics Technology. Get started today
> >> with the Intel(R) Software Partner Program. Five $500 cash prizes are
> >> up for grabs. http://p.sf.net/sfu/intelisp-dev2dev
> >> _______________________________________________
> >> gstreamer-devel mailing list
> >> [hidden email]
> >> https://lists.sourceforge.net/lists/listinfo/gstreamer-devel
> >
> > -------------------------------------------------------------------------
> > ----- Increase Visibility of Your 3D Game App & Earn a Chance To Win
> > $500! Tap into the largest installed PC base & get more eyes on your
> > game by optimizing for Intel(R) Graphics Technology. Get started today
> > with the Intel(R) Software Partner Program. Five $500 cash prizes are up
> > for grabs. http://p.sf.net/sfu/intelisp-dev2dev
> > _______________________________________________
> > gstreamer-devel mailing list
> > [hidden email]
> > https://lists.sourceforge.net/lists/listinfo/gstreamer-devel

------------------------------------------------------------------------------
Increase Visibility of Your 3D Game App & Earn a Chance To Win $500!
Tap into the largest installed PC base & get more eyes on your game by
optimizing for Intel(R) Graphics Technology. Get started today with the
Intel(R) Software Partner Program. Five $500 cash prizes are up for grabs.
http://p.sf.net/sfu/intelisp-dev2dev
_______________________________________________
gstreamer-devel mailing list
[hidden email]
https://lists.sourceforge.net/lists/listinfo/gstreamer-devel
Reply | Threaded
Open this post in threaded view
|

Re: Timestamping problem with gdppay element

Alexey Chernov
I seem to find and fix a bug in gdpdepay element's code which caused
such a behavior in configuration with gdppay/gdpdepay. It was
timestamping problem in depaying process - timestamps on input depayed
buffers stay "as is" i.e. they are correct in the primary pipeline and
not in the secondary. Some more details and patch I wrote in comment
to this bug report:

https://bugzilla.gnome.org/show_bug.cgi?id=635226

Hope it would be accepted to the main tree.
Big thanks to everyone for help with this problem!

2010/11/24 Alexey Chernov <[hidden email]>:

> On Wednesday 24 November 2010 22:11:42 you wrote:
>> Hi,
>>
>> On Wed, Nov 24, 2010 at 2:43 PM, 4ernov <[hidden email]> wrote:
>> >> Hi,
>> >>
>> >> On Wed, Nov 24, 2010 at 12:58 PM, 4ernov <[hidden email]> wrote:
>> >>
>> >> ..snip..
>> >>
>> >> >> have you ruled out the possibility to use native X facilities for
>> >> >> this? For instance, you could use the "display" property of the
>> >> >> second xvimagesink to perform the rendering on a different server,
>> >> >> or the "device" one if you actually are on the same device
>> >> >>
>> >> >> note: on some systems it requires to enable tcp connections for the X
>> >> >> server.
>> >> >
>> >> > Yes, I tried to set "display" property but with no success. Don't
>> >> > actually know whether tcp connections are enabled on my X server, will
>> >> > try it and "device" property, too, thank you.
>> >>
>> >> on Ubuntu, give a look at /etc/gdm/gdm.schemas and set
>> >> "security/DisallowTCP" to "false".
>> >>
>> >> restart your X server and then write from a terminal where the DISPLAY
>> >> variable is properly set:
>> >>
>> >> xhost +
>> >>
>> >> note that the things of above allow access to your X server from any
>> >> devices in the network (an beyond) so it should really be something to
>> >> use only for debug purposes. For stricter security, check the
>> >> documentation about xhost and gdm.
>> >
>> > Yes, thank you for detailed instruction, tcp connections were disabled
>> > really (X was started with -nolisten tcp option), I changed kdm
>> > settings (I use Kubuntu) and restarted. Everything's right, X without
>> > -nolisten tcp, also executed 'xhost +' on each display but still no
>> > success in term of GStreamer playing. Here's the pipeline:
>> >
>> > gst-launch filesrc location=/home/alex/test.mp4 ! decodebin !
>> > ffmpegcolorspace ! tee name=dist ! queue ! xvimagesink display=":1"
>> > dist. ! queue ! xvimagesink display=":2"
>> >
>> > And the output is:
>> >
>> > Setting pipeline to PAUSED ...
>> > Pipeline is PREROLLING ...
>> > X Error of failed request:  BadShmSeg (invalid shared segment parameter)
>> >  Major opcode of failed request:  135 (XVideo)
>> >  Minor opcode of failed request:  19 ()
>> >  Segment id in failed request:  0x3e
>> >  Serial number of failed request:  40
>> >  Current serial number in output stream:  41
>>
>> Right, the two X servers run as separate processes and it's not
>> possible to share the same buffer between the two while in overlay
>> (not that I understand very well the inner reason of this :/ ).
>
> me too actually :) but it's our client's idea and so I've got to do it..
>
>>
>> An easy but expensive solution is to copy the buffer before rendering
>> it, so your pipe should become something like:
>>
>> gst-launch filesrc location=/home/alex/test.mp4 ! decodebin !
>> ffmpegcolorspace ! tee name=dist ! queue ! xvimagesink display=":1"
>> dist. ! queue ! videomixer ! xvimagesink display=":2"
>>
>> we're lucky videomixers always copy the buffers :).
>
> Thank you very much, Marco! I just tried it. It works just fine. Very smooth, I
> hope the performance is enough to copy buffers. Very good. Thank you!
>
>>
>> Regards,
>> Marco
>>
>> > X error can also be
>> >
>> > X Error of failed request:  BadShmSeg (invalid shared segment parameter)
>> >  Major opcode of failed request:  130 (MIT-SHM)
>> >  Minor opcode of failed request:  3 (X_ShmPutImage)
>> >  Segment id in failed request:  0x200003
>> >  Serial number of failed request:  29
>> >  Current serial number in output stream:  30
>> >
>> > if I use ximagesink instead of xvimagesink. I should say, everything
>> > plays just fine if I use one xvimagesink element with display set
>> > (video is really playing on another server without problems) or if
>> > both these elements are with the same display option. But when I set
>> > them different displays it goes to error.
>> >
>> >> > By the way, setting async option to "true" as you suggested really
>> >> > works, but as expected it makes syncronization lost. "max-lateness"
>> >> > doesn't make any sense (tried from little intervals e.g. 200 to bigger
>> >> > ones (15000000) with no success.
>> >>
>> >> the max-lateness option is in ns, so you've basically set the delay to
>> >> 200ms. What if you try something more substantial, let's say more than
>> >> one second?
>> >>
>> >> (not that I'm saying this is the best way to proceed, but I'm just
>> >> curious).
>> >
>> > No problem) I also tried 2 and 20 secs (2000000000 and 20000000000
>> > values) but the result is still the same. I think the problem is with
>> > some fluctuations the first moments after connection. Can I avoid
>> > these 'bad' buffers somehow? Or the stream is just out of time and
>> > already unrecoverable?
>> >
>> >> Regards
>> >>
>> >> ..snip..
>> >>
>> >>
>> >>
>> >> ------------------------------------------------------------------------
>> >> ------ Increase Visibility of Your 3D Game App & Earn a Chance To Win
>> >> $500! Tap into the largest installed PC base & get more eyes on your
>> >> game by optimizing for Intel(R) Graphics Technology. Get started today
>> >> with the Intel(R) Software Partner Program. Five $500 cash prizes are
>> >> up for grabs. http://p.sf.net/sfu/intelisp-dev2dev
>> >> _______________________________________________
>> >> gstreamer-devel mailing list
>> >> [hidden email]
>> >> https://lists.sourceforge.net/lists/listinfo/gstreamer-devel
>> >
>> > -------------------------------------------------------------------------
>> > ----- Increase Visibility of Your 3D Game App & Earn a Chance To Win
>> > $500! Tap into the largest installed PC base & get more eyes on your
>> > game by optimizing for Intel(R) Graphics Technology. Get started today
>> > with the Intel(R) Software Partner Program. Five $500 cash prizes are up
>> > for grabs. http://p.sf.net/sfu/intelisp-dev2dev
>> > _______________________________________________
>> > gstreamer-devel mailing list
>> > [hidden email]
>> > https://lists.sourceforge.net/lists/listinfo/gstreamer-devel
>

------------------------------------------------------------------------------
Increase Visibility of Your 3D Game App & Earn a Chance To Win $500!
Tap into the largest installed PC base & get more eyes on your game by
optimizing for Intel(R) Graphics Technology. Get started today with the
Intel(R) Software Partner Program. Five $500 cash prizes are up for grabs.
http://p.sf.net/sfu/intelisp-dev2dev
_______________________________________________
gstreamer-devel mailing list
[hidden email]
https://lists.sourceforge.net/lists/listinfo/gstreamer-devel