x264enc and variable rate appsrc

classic Classic list List threaded Threaded
5 messages Options
Reply | Threaded
Open this post in threaded view
|

x264enc and variable rate appsrc

David Jaggard
I have created a test pipeline (see code below) using a custom appsrc that creates an h264 stream and sends over rtp.

I am testing it with the following command line to receive and play the video stream:
gst-launch-1.0 -v udpsrc port=5000 caps = "application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264, payload=(int)96" ! rtph264depay ! decodebin ! videoconvert ! autovideosink

The command-line inside the application looks like this:
appsrc name=MyAppSrc ! video/x-raw,width=640,height=360,framerate=10/1 ! videoconvert ! identity name=ident ! x264enc zerolatency=true speed-preset=superfast ! rtph264pay ! udpsink host=127.0.0.1 port=5000

At the moment the appsrc is generating frames at precisely 10fps and I set the buffer timestamps correctly. However, unless I also specify the framerate (in the caps string following the appsrc) as 10/1 the client viewer constantly complains of:
WARNING: from element /GstPipeline:pipeline0/GstAutoVideoSink:autovideosink0/GstD3D11VideoSinkBin:autovideosink0-actual-sink-d3d11video/GstD3D11VideoSink:d3d11videosink0: A lot of buffers are being dropped.
Additional debug info:
../libs/gst/base/gstbasesink.c(3134): gst_base_sink_is_too_late (): /GstPipeline:pipeline0/GstAutoVideoSink:autovideosink0/GstD3D11VideoSinkBin:autovideosink0-actual-sink-d3d11video/GstD3D11VideoSink:d3d11videosink0:
There may be a timestamping problem, or this computer is too slow.


The problem is:
In the real application, the appsrc will produce frames at a variable rate - between about 2 and 20 fps. How can I get this to work if I'm required to set a fixed framerate?

Provided the buffer timestamps are set, why does the framerate property have any bearing on the pipeine?

Here is the application code:

// NativeBasicPipeline.cpp : This file contains the 'main' function. Program execution begins and ends there.
//

#include <gst/gst.h>
#include <gst/app/gstappsrc.h>

GTimer* timer;

GstAppSrc* appSrc;

guint sourceid = 0;

int width = 1;

int colour = 255;

bool read_data()
{
    static GstClockTime time = 0;
    static guint64 offset = 0;

    GstFlowReturn ret;
    auto ms = g_timer_elapsed(timer, NULL);

    if (ms > 1.0 / 10.0) {
        auto size = 640 * 3 * 360;

        gpointer copy = g_malloc(size);

        memset(copy, colour % 255, size);

        colour += 10;

        GstBuffer* buffer = gst_buffer_new_wrapped(copy, size);


        //GST_BUFFER_TIMESTAMP(buffer) = pts;
        GST_BUFFER_PTS(buffer) = time;
        GST_BUFFER_DTS(buffer) = time;
        GST_BUFFER_DURATION(buffer) = 100 * GST_MSECOND;

        GST_BUFFER_OFFSET(buffer) = offset++;
        GST_BUFFER_OFFSET_END(buffer) = offset;

        time += 100 * GST_MSECOND;

        g_signal_emit_by_name(appSrc, "push-buffer", buffer, &ret);

        //ret = gst_app_src_push_buffer(appSrc, buffer);
        gst_buffer_unref(buffer);

        g_timer_start(timer);

        return TRUE;
    }

    return TRUE;
}

void start_feed(GstElement* pipeline, guint size, void* unused)
{
    if (sourceid == 0) {
        sourceid = g_idle_add((GSourceFunc)read_data, 0);
    }
}

void stop_feed(GstElement* pipeline, void* unused)
{
    if (sourceid != 0) {
        g_source_remove(sourceid);
        sourceid = 0;
    }
}

static gboolean print_field(GQuark field, const GValue* value, gpointer pfx) {
    gchar* str = gst_value_serialize(value);

    g_print("%s  %15s: %s\n", (gchar*)pfx, g_quark_to_string(field), str);
    g_free(str);
    return TRUE;
}

static void print_caps(const GstCaps* caps, const gchar* pfx) {
    guint i;

    g_return_if_fail(caps != NULL);

    if (gst_caps_is_any(caps)) {
        g_print("%sANY\n", pfx);
        return;
    }
    if (gst_caps_is_empty(caps)) {
        g_print("%sEMPTY\n", pfx);
        return;
    }

    for (i = 0; i < gst_caps_get_size(caps); i++) {
        GstStructure* structure = gst_caps_get_structure(caps, i);

        g_print("%s%s\n", pfx, gst_structure_get_name(structure));
        gst_structure_foreach(structure, print_field, (gpointer)pfx);
    }
}

void handoff(GstElement* pipeline, GstBuffer* buffer, void* unused)
{
    static int count = 0;
    static GstBuffer* buffers[50];

    //auto ident = gst_bin_get_by_name(GST_BIN(pipeline), "ident");
    auto pads = GST_ELEMENT_PADS(pipeline);

    auto pad0 = GST_PAD(pads->data);
    auto pad1 = GST_PAD(pads->next->data);

    auto caps = gst_pad_get_current_caps(pad1);

    print_caps(caps, "");

    if (count < 50)
    {
        GstBuffer* copy = gst_buffer_copy(buffer);

        buffers[count] = copy;

        ++count;
    }
    else
    {
        count = 100;


    }
}

int main()
{
    GstElement* pipeline;
    GstBus* bus;
    GstMessage* msg;

    timer = g_timer_new();


    /* Initialize GStreamer */
    gst_init(0, 0);

    /* Build the pipeline */

    GError* err = 0;

    //auto udpPipe = "videotestsrc pattern=smpte ! video/x-raw,width=640,height=360,framerate=10/1 ! videoscale ! videoconvert ! identity name=ident ! x264enc zerolatency=true speed-preset=superfast ! rtph264pay ! udpsink host=127.0.0.1 port=5000";

    //auto videoPipe = "videotestsrc pattern=smpte ! video/x-raw,width=640,height=360,framerate=10/1 ! videoscale ! videoconvert ! autovideosink";

    auto appSrcPipe = "appsrc name=MyAppSrc ! video/x-raw,width=640,height=360 ! videoconvert ! identity name=ident ! x264enc zerolatency=true speed-preset=superfast ! rtph264pay ! udpsink host=127.0.0.1 port=5000";

    //auto appSrcPipeVideo = "appsrc name=MyAppSrc ! videoscale ! videoconvert ! autovideosink";

    pipeline =
        gst_parse_launch
        (appSrcPipe,
            &err);

    appSrc = (GstAppSrc*)gst_bin_get_by_name(GST_BIN(pipeline), "MyAppSrc");

    g_object_set(G_OBJECT(appSrc), "format", GST_FORMAT_TIME, NULL);

    g_signal_connect(appSrc, "need-data", G_CALLBACK(start_feed), 0);
    g_signal_connect(appSrc, "enough-data", G_CALLBACK(stop_feed), 0);

    auto caps = gst_caps_new_simple("video/x-raw",
        "format", G_TYPE_STRING, "RGB",
        "bpp", G_TYPE_INT, 24,
        "depth", G_TYPE_INT, 24,
        "width", G_TYPE_INT, 640,
        "height", G_TYPE_INT, 360,
        NULL);

    gst_app_src_set_caps((GstAppSrc*)appSrc, caps);

    //GstPad* pad = gst_element_get_static_pad(appSrc, "src");

    //auto ident = gst_bin_get_by_name(GST_BIN(pipeline), "ident");

    //g_signal_connect(ident, "handoff", G_CALLBACK(handoff), 0);


    /* Start playing */
    gst_element_set_state(pipeline, GST_STATE_PLAYING);

    auto main_loop = g_main_loop_new(NULL, FALSE);
    g_main_loop_run(main_loop);

    /* Wait until error or EOS */
    bus = gst_element_get_bus(pipeline);
    msg =
        gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE,
            (GstMessageType)(GST_MESSAGE_ERROR | GST_MESSAGE_EOS));

    /* Free resources */
    if (msg != NULL)
        gst_message_unref(msg);
    gst_object_unref(bus);
    gst_element_set_state(pipeline, GST_STATE_NULL);
    gst_object_unref(pipeline);
    return 0;
}



_______________________________________________
gstreamer-devel mailing list
[hidden email]
https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel
Reply | Threaded
Open this post in threaded view
|

Re: x264enc and variable rate appsrc

Gary Metalle

​Hi


What happens if you use a framerate of 0/1? This is the way to define a variable framerate.


Maybe the issue is with your decoder pipeline and the not the encoding. Have you tried setting 'sync=FALSE' for the video sink you're using?


From: gstreamer-devel <[hidden email]> on behalf of David J <[hidden email]>
Sent: 22 September 2020 12:21
To: Discussion of the development of and with GStreamer
Subject: x264enc and variable rate appsrc
 
I have created a test pipeline (see code below) using a custom appsrc that creates an h264 stream and sends over rtp.

I am testing it with the following command line to receive and play the video stream:
gst-launch-1.0 -v udpsrc port=5000 caps = "application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264, payload=(int)96" ! rtph264depay ! decodebin ! videoconvert ! autovideosink

The command-line inside the application looks like this:
appsrc name=MyAppSrc ! video/x-raw,width=640,height=360,framerate=10/1 ! videoconvert ! identity name=ident ! x264enc zerolatency=true speed-preset=superfast ! rtph264pay ! udpsink host=127.0.0.1 port=5000

At the moment the appsrc is generating frames at precisely 10fps and I set the buffer timestamps correctly. However, unless I also specify the framerate (in the caps string following the appsrc) as 10/1 the client viewer constantly complains of:
WARNING: from element /GstPipeline:pipeline0/GstAutoVideoSink:autovideosink0/GstD3D11VideoSinkBin:autovideosink0-actual-sink-d3d11video/GstD3D11VideoSink:d3d11videosink0: A lot of buffers are being dropped.
Additional debug info:
../libs/gst/base/gstbasesink.c(3134): gst_base_sink_is_too_late (): /GstPipeline:pipeline0/GstAutoVideoSink:autovideosink0/GstD3D11VideoSinkBin:autovideosink0-actual-sink-d3d11video/GstD3D11VideoSink:d3d11videosink0:
There may be a timestamping problem, or this computer is too slow.


The problem is:
In the real application, the appsrc will produce frames at a variable rate - between about 2 and 20 fps. How can I get this to work if I'm required to set a fixed framerate?

Provided the buffer timestamps are set, why does the framerate property have any bearing on the pipeine?

Here is the application code:

// NativeBasicPipeline.cpp : This file contains the 'main' function. Program execution begins and ends there.
//

#include <gst/gst.h>
#include <gst/app/gstappsrc.h>

GTimer* timer;

GstAppSrc* appSrc;

guint sourceid = 0;

int width = 1;

int colour = 255;

bool read_data()
{
    static GstClockTime time = 0;
    static guint64 offset = 0;

    GstFlowReturn ret;
    auto ms = g_timer_elapsed(timer, NULL);

    if (ms > 1.0 / 10.0) {
        auto size = 640 * 3 * 360;

        gpointer copy = g_malloc(size);

        memset(copy, colour % 255, size);

        colour += 10;

        GstBuffer* buffer = gst_buffer_new_wrapped(copy, size);


        //GST_BUFFER_TIMESTAMP(buffer) = pts;
        GST_BUFFER_PTS(buffer) = time;
        GST_BUFFER_DTS(buffer) = time;
        GST_BUFFER_DURATION(buffer) = 100 * GST_MSECOND;

        GST_BUFFER_OFFSET(buffer) = offset++;
        GST_BUFFER_OFFSET_END(buffer) = offset;

        time += 100 * GST_MSECOND;

        g_signal_emit_by_name(appSrc, "push-buffer", buffer, &ret);

        //ret = gst_app_src_push_buffer(appSrc, buffer);
        gst_buffer_unref(buffer);

        g_timer_start(timer);

        return TRUE;
    }

    return TRUE;
}

void start_feed(GstElement* pipeline, guint size, void* unused)
{
    if (sourceid == 0) {
        sourceid = g_idle_add((GSourceFunc)read_data, 0);
    }
}

void stop_feed(GstElement* pipeline, void* unused)
{
    if (sourceid != 0) {
        g_source_remove(sourceid);
        sourceid = 0;
    }
}

static gboolean print_field(GQuark field, const GValue* value, gpointer pfx) {
    gchar* str = gst_value_serialize(value);

    g_print("%s  %15s: %s\n", (gchar*)pfx, g_quark_to_string(field), str);
    g_free(str);
    return TRUE;
}

static void print_caps(const GstCaps* caps, const gchar* pfx) {
    guint i;

    g_return_if_fail(caps != NULL);

    if (gst_caps_is_any(caps)) {
        g_print("%sANY\n", pfx);
        return;
    }
    if (gst_caps_is_empty(caps)) {
        g_print("%sEMPTY\n", pfx);
        return;
    }

    for (i = 0; i < gst_caps_get_size(caps); i++) {
        GstStructure* structure = gst_caps_get_structure(caps, i);

        g_print("%s%s\n", pfx, gst_structure_get_name(structure));
        gst_structure_foreach(structure, print_field, (gpointer)pfx);
    }
}

void handoff(GstElement* pipeline, GstBuffer* buffer, void* unused)
{
    static int count = 0;
    static GstBuffer* buffers[50];

    //auto ident = gst_bin_get_by_name(GST_BIN(pipeline), "ident");
    auto pads = GST_ELEMENT_PADS(pipeline);

    auto pad0 = GST_PAD(pads->data);
    auto pad1 = GST_PAD(pads->next->data);

    auto caps = gst_pad_get_current_caps(pad1);

    print_caps(caps, "");

    if (count < 50)
    {
        GstBuffer* copy = gst_buffer_copy(buffer);

        buffers[count] = copy;

        ++count;
    }
    else
    {
        count = 100;


    }
}

int main()
{
    GstElement* pipeline;
    GstBus* bus;
    GstMessage* msg;

    timer = g_timer_new();


    /* Initialize GStreamer */
    gst_init(0, 0);

    /* Build the pipeline */

    GError* err = 0;

    //auto udpPipe = "videotestsrc pattern=smpte ! video/x-raw,width=640,height=360,framerate=10/1 ! videoscale ! videoconvert ! identity name=ident ! x264enc zerolatency=true speed-preset=superfast ! rtph264pay ! udpsink host=127.0.0.1 port=5000";

    //auto videoPipe = "videotestsrc pattern=smpte ! video/x-raw,width=640,height=360,framerate=10/1 ! videoscale ! videoconvert ! autovideosink";

    auto appSrcPipe = "appsrc name=MyAppSrc ! video/x-raw,width=640,height=360 ! videoconvert ! identity name=ident ! x264enc zerolatency=true speed-preset=superfast ! rtph264pay ! udpsink host=127.0.0.1 port=5000";

    //auto appSrcPipeVideo = "appsrc name=MyAppSrc ! videoscale ! videoconvert ! autovideosink";

    pipeline =
        gst_parse_launch
        (appSrcPipe,
            &err);

    appSrc = (GstAppSrc*)gst_bin_get_by_name(GST_BIN(pipeline), "MyAppSrc");

    g_object_set(G_OBJECT(appSrc), "format", GST_FORMAT_TIME, NULL);

    g_signal_connect(appSrc, "need-data", G_CALLBACK(start_feed), 0);
    g_signal_connect(appSrc, "enough-data", G_CALLBACK(stop_feed), 0);

    auto caps = gst_caps_new_simple("video/x-raw",
        "format", G_TYPE_STRING, "RGB",
        "bpp", G_TYPE_INT, 24,
        "depth", G_TYPE_INT, 24,
        "width", G_TYPE_INT, 640,
        "height", G_TYPE_INT, 360,
        NULL);

    gst_app_src_set_caps((GstAppSrc*)appSrc, caps);

    //GstPad* pad = gst_element_get_static_pad(appSrc, "src");

    //auto ident = gst_bin_get_by_name(GST_BIN(pipeline), "ident");

    //g_signal_connect(ident, "handoff", G_CALLBACK(handoff), 0);


    /* Start playing */
    gst_element_set_state(pipeline, GST_STATE_PLAYING);

    auto main_loop = g_main_loop_new(NULL, FALSE);
    g_main_loop_run(main_loop);

    /* Wait until error or EOS */
    bus = gst_element_get_bus(pipeline);
    msg =
        gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE,
            (GstMessageType)(GST_MESSAGE_ERROR | GST_MESSAGE_EOS));

    /* Free resources */
    if (msg != NULL)
        gst_message_unref(msg);
    gst_object_unref(bus);
    gst_element_set_state(pipeline, GST_STATE_NULL);
    gst_object_unref(pipeline);
    return 0;
}



_______________________________________________
gstreamer-devel mailing list
[hidden email]
https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel
Reply | Threaded
Open this post in threaded view
|

Re: x264enc and variable rate appsrc

David Jaggard
Thanks! I like a simple fix.... autovideosink sync=false fixed it!

FYI I had tried framerate 0/1 - I think this is the default if you don't set anything manually

On Tue, 22 Sep 2020 at 12:27, Gary Metalle <[hidden email]> wrote:

​Hi


What happens if you use a framerate of 0/1? This is the way to define a variable framerate.


Maybe the issue is with your decoder pipeline and the not the encoding. Have you tried setting 'sync=FALSE' for the video sink you're using?


From: gstreamer-devel <[hidden email]> on behalf of David J <[hidden email]>
Sent: 22 September 2020 12:21
To: Discussion of the development of and with GStreamer
Subject: x264enc and variable rate appsrc
 
I have created a test pipeline (see code below) using a custom appsrc that creates an h264 stream and sends over rtp.

I am testing it with the following command line to receive and play the video stream:
gst-launch-1.0 -v udpsrc port=5000 caps = "application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264, payload=(int)96" ! rtph264depay ! decodebin ! videoconvert ! autovideosink

The command-line inside the application looks like this:
appsrc name=MyAppSrc ! video/x-raw,width=640,height=360,framerate=10/1 ! videoconvert ! identity name=ident ! x264enc zerolatency=true speed-preset=superfast ! rtph264pay ! udpsink host=127.0.0.1 port=5000

At the moment the appsrc is generating frames at precisely 10fps and I set the buffer timestamps correctly. However, unless I also specify the framerate (in the caps string following the appsrc) as 10/1 the client viewer constantly complains of:
WARNING: from element /GstPipeline:pipeline0/GstAutoVideoSink:autovideosink0/GstD3D11VideoSinkBin:autovideosink0-actual-sink-d3d11video/GstD3D11VideoSink:d3d11videosink0: A lot of buffers are being dropped.
Additional debug info:
../libs/gst/base/gstbasesink.c(3134): gst_base_sink_is_too_late (): /GstPipeline:pipeline0/GstAutoVideoSink:autovideosink0/GstD3D11VideoSinkBin:autovideosink0-actual-sink-d3d11video/GstD3D11VideoSink:d3d11videosink0:
There may be a timestamping problem, or this computer is too slow.


The problem is:
In the real application, the appsrc will produce frames at a variable rate - between about 2 and 20 fps. How can I get this to work if I'm required to set a fixed framerate?

Provided the buffer timestamps are set, why does the framerate property have any bearing on the pipeine?

Here is the application code:

// NativeBasicPipeline.cpp : This file contains the 'main' function. Program execution begins and ends there.
//

#include <gst/gst.h>
#include <gst/app/gstappsrc.h>

GTimer* timer;

GstAppSrc* appSrc;

guint sourceid = 0;

int width = 1;

int colour = 255;

bool read_data()
{
    static GstClockTime time = 0;
    static guint64 offset = 0;

    GstFlowReturn ret;
    auto ms = g_timer_elapsed(timer, NULL);

    if (ms > 1.0 / 10.0) {
        auto size = 640 * 3 * 360;

        gpointer copy = g_malloc(size);

        memset(copy, colour % 255, size);

        colour += 10;

        GstBuffer* buffer = gst_buffer_new_wrapped(copy, size);


        //GST_BUFFER_TIMESTAMP(buffer) = pts;
        GST_BUFFER_PTS(buffer) = time;
        GST_BUFFER_DTS(buffer) = time;
        GST_BUFFER_DURATION(buffer) = 100 * GST_MSECOND;

        GST_BUFFER_OFFSET(buffer) = offset++;
        GST_BUFFER_OFFSET_END(buffer) = offset;

        time += 100 * GST_MSECOND;

        g_signal_emit_by_name(appSrc, "push-buffer", buffer, &ret);

        //ret = gst_app_src_push_buffer(appSrc, buffer);
        gst_buffer_unref(buffer);

        g_timer_start(timer);

        return TRUE;
    }

    return TRUE;
}

void start_feed(GstElement* pipeline, guint size, void* unused)
{
    if (sourceid == 0) {
        sourceid = g_idle_add((GSourceFunc)read_data, 0);
    }
}

void stop_feed(GstElement* pipeline, void* unused)
{
    if (sourceid != 0) {
        g_source_remove(sourceid);
        sourceid = 0;
    }
}

static gboolean print_field(GQuark field, const GValue* value, gpointer pfx) {
    gchar* str = gst_value_serialize(value);

    g_print("%s  %15s: %s\n", (gchar*)pfx, g_quark_to_string(field), str);
    g_free(str);
    return TRUE;
}

static void print_caps(const GstCaps* caps, const gchar* pfx) {
    guint i;

    g_return_if_fail(caps != NULL);

    if (gst_caps_is_any(caps)) {
        g_print("%sANY\n", pfx);
        return;
    }
    if (gst_caps_is_empty(caps)) {
        g_print("%sEMPTY\n", pfx);
        return;
    }

    for (i = 0; i < gst_caps_get_size(caps); i++) {
        GstStructure* structure = gst_caps_get_structure(caps, i);

        g_print("%s%s\n", pfx, gst_structure_get_name(structure));
        gst_structure_foreach(structure, print_field, (gpointer)pfx);
    }
}

void handoff(GstElement* pipeline, GstBuffer* buffer, void* unused)
{
    static int count = 0;
    static GstBuffer* buffers[50];

    //auto ident = gst_bin_get_by_name(GST_BIN(pipeline), "ident");
    auto pads = GST_ELEMENT_PADS(pipeline);

    auto pad0 = GST_PAD(pads->data);
    auto pad1 = GST_PAD(pads->next->data);

    auto caps = gst_pad_get_current_caps(pad1);

    print_caps(caps, "");

    if (count < 50)
    {
        GstBuffer* copy = gst_buffer_copy(buffer);

        buffers[count] = copy;

        ++count;
    }
    else
    {
        count = 100;


    }
}

int main()
{
    GstElement* pipeline;
    GstBus* bus;
    GstMessage* msg;

    timer = g_timer_new();


    /* Initialize GStreamer */
    gst_init(0, 0);

    /* Build the pipeline */

    GError* err = 0;

    //auto udpPipe = "videotestsrc pattern=smpte ! video/x-raw,width=640,height=360,framerate=10/1 ! videoscale ! videoconvert ! identity name=ident ! x264enc zerolatency=true speed-preset=superfast ! rtph264pay ! udpsink host=127.0.0.1 port=5000";

    //auto videoPipe = "videotestsrc pattern=smpte ! video/x-raw,width=640,height=360,framerate=10/1 ! videoscale ! videoconvert ! autovideosink";

    auto appSrcPipe = "appsrc name=MyAppSrc ! video/x-raw,width=640,height=360 ! videoconvert ! identity name=ident ! x264enc zerolatency=true speed-preset=superfast ! rtph264pay ! udpsink host=127.0.0.1 port=5000";

    //auto appSrcPipeVideo = "appsrc name=MyAppSrc ! videoscale ! videoconvert ! autovideosink";

    pipeline =
        gst_parse_launch
        (appSrcPipe,
            &err);

    appSrc = (GstAppSrc*)gst_bin_get_by_name(GST_BIN(pipeline), "MyAppSrc");

    g_object_set(G_OBJECT(appSrc), "format", GST_FORMAT_TIME, NULL);

    g_signal_connect(appSrc, "need-data", G_CALLBACK(start_feed), 0);
    g_signal_connect(appSrc, "enough-data", G_CALLBACK(stop_feed), 0);

    auto caps = gst_caps_new_simple("video/x-raw",
        "format", G_TYPE_STRING, "RGB",
        "bpp", G_TYPE_INT, 24,
        "depth", G_TYPE_INT, 24,
        "width", G_TYPE_INT, 640,
        "height", G_TYPE_INT, 360,
        NULL);

    gst_app_src_set_caps((GstAppSrc*)appSrc, caps);

    //GstPad* pad = gst_element_get_static_pad(appSrc, "src");

    //auto ident = gst_bin_get_by_name(GST_BIN(pipeline), "ident");

    //g_signal_connect(ident, "handoff", G_CALLBACK(handoff), 0);


    /* Start playing */
    gst_element_set_state(pipeline, GST_STATE_PLAYING);

    auto main_loop = g_main_loop_new(NULL, FALSE);
    g_main_loop_run(main_loop);

    /* Wait until error or EOS */
    bus = gst_element_get_bus(pipeline);
    msg =
        gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE,
            (GstMessageType)(GST_MESSAGE_ERROR | GST_MESSAGE_EOS));

    /* Free resources */
    if (msg != NULL)
        gst_message_unref(msg);
    gst_object_unref(bus);
    gst_element_set_state(pipeline, GST_STATE_NULL);
    gst_object_unref(pipeline);
    return 0;
}


_______________________________________________
gstreamer-devel mailing list
[hidden email]
https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel

_______________________________________________
gstreamer-devel mailing list
[hidden email]
https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel
Reply | Threaded
Open this post in threaded view
|

Re: x264enc and variable rate appsrc

Nicolas Dufresne-5
In reply to this post by David Jaggard
Le mardi 22 septembre 2020 à 12:21 +0100, David J a écrit :

> I have created a test pipeline (see code below) using a custom appsrc
> that creates an h264 stream and sends over rtp.
>
> I am testing it with the following command line to receive and play
> the video stream:
> gst-launch-1.0 -v udpsrc port=5000 caps = "application/x-rtp,
> media=(string)video, clock-rate=(int)90000, encoding-
> name=(string)H264, payload=(int)96" ! rtph264depay ! decodebin !
> videoconvert ! autovideosink
>
> The command-line inside the application looks like this:
> appsrc name=MyAppSrc ! video/x-
> raw,width=640,height=360,framerate=10/1 ! videoconvert ! identity
> name=ident ! x264enc zerolatency=true speed-preset=superfast !
> rtph264pay ! udpsink host=127.0.0.1 port=5000
>
> At the moment the appsrc is generating frames at precisely 10fps and
> I set the buffer timestamps correctly. However, unless I also specify
> the framerate (in the caps string following the appsrc) as 10/1 the
> client viewer constantly complains of:
> WARNING: from element
> /GstPipeline:pipeline0/GstAutoVideoSink:autovideosink0/GstD3D11VideoS
> inkBin:autovideosink0-actual-sink-
> d3d11video/GstD3D11VideoSink:d3d11videosink0: A lot of buffers are
> being dropped.
> Additional debug info:
> ../libs/gst/base/gstbasesink.c(3134): gst_base_sink_is_too_late ():
> /GstPipeline:pipeline0/GstAutoVideoSink:autovideosink0/GstD3D11VideoS
> inkBin:autovideosink0-actual-sink-
> d3d11video/GstD3D11VideoSink:d3d11videosink0:
> There may be a timestamping problem, or this computer is too slow.

RTP does not carry the framerate, which is needed to properly compute
the latency. That could explain why missing the framerate cause buffers
to be late (no latency being reported by elements). If you trully
target variable framerate (e.g. this is just for testing), you can
increase the sink processing-deadline to compensate for the worst case,
or simply set the framerate to the lowest expected rate.

Selecting a zero-latency decoder, like avdec_h264 or I believe the
stateless decoders in 1.18, might also work.

>
>
> The problem is:
> In the real application, the appsrc will produce frames at a variable
> rate - between about 2 and 20 fps. How can I get this to work if I'm
> required to set a fixed framerate?
>
> Provided the buffer timestamps are set, why does the framerate
> property have any bearing on the pipeine?
>
> Here is the application code:
>
> // NativeBasicPipeline.cpp : This file contains the 'main' function.
> Program execution begins and ends there.
> //
>
> #include <gst/gst.h>
> #include <gst/app/gstappsrc.h>
>
> GTimer* timer;
>
> GstAppSrc* appSrc;
>
> guint sourceid = 0;
>
> int width = 1;
>
> int colour = 255;
>
> bool read_data()
> {
>     static GstClockTime time = 0;
>     static guint64 offset = 0;
>
>     GstFlowReturn ret;
>     auto ms = g_timer_elapsed(timer, NULL);
>
>     if (ms > 1.0 / 10.0) {
>         auto size = 640 * 3 * 360;
>
>         gpointer copy = g_malloc(size);
>
>         memset(copy, colour % 255, size);
>
>         colour += 10;
>
>         GstBuffer* buffer = gst_buffer_new_wrapped(copy, size);
>
>
>         //GST_BUFFER_TIMESTAMP(buffer) = pts;
>         GST_BUFFER_PTS(buffer) = time;
>         GST_BUFFER_DTS(buffer) = time;
>         GST_BUFFER_DURATION(buffer) = 100 * GST_MSECOND;
>
>         GST_BUFFER_OFFSET(buffer) = offset++;
>         GST_BUFFER_OFFSET_END(buffer) = offset;
>
>         time += 100 * GST_MSECOND;
>
>         g_signal_emit_by_name(appSrc, "push-buffer", buffer, &ret);
>
>         //ret = gst_app_src_push_buffer(appSrc, buffer);
>         gst_buffer_unref(buffer);
>
>         g_timer_start(timer);
>
>         return TRUE;
>     }
>
>     return TRUE;
> }
>
> void start_feed(GstElement* pipeline, guint size, void* unused)
> {
>     if (sourceid == 0) {
>         sourceid = g_idle_add((GSourceFunc)read_data, 0);
>     }
> }
>
> void stop_feed(GstElement* pipeline, void* unused)
> {
>     if (sourceid != 0) {
>         g_source_remove(sourceid);
>         sourceid = 0;
>     }
> }
>
> static gboolean print_field(GQuark field, const GValue* value,
> gpointer pfx) {
>     gchar* str = gst_value_serialize(value);
>
>     g_print("%s  %15s: %s\n", (gchar*)pfx, g_quark_to_string(field),
> str);
>     g_free(str);
>     return TRUE;
> }
>
> static void print_caps(const GstCaps* caps, const gchar* pfx) {
>     guint i;
>
>     g_return_if_fail(caps != NULL);
>
>     if (gst_caps_is_any(caps)) {
>         g_print("%sANY\n", pfx);
>         return;
>     }
>     if (gst_caps_is_empty(caps)) {
>         g_print("%sEMPTY\n", pfx);
>         return;
>     }
>
>     for (i = 0; i < gst_caps_get_size(caps); i++) {
>         GstStructure* structure = gst_caps_get_structure(caps, i);
>
>         g_print("%s%s\n", pfx, gst_structure_get_name(structure));
>         gst_structure_foreach(structure, print_field, (gpointer)pfx);
>     }
> }
>
> void handoff(GstElement* pipeline, GstBuffer* buffer, void* unused)
> {
>     static int count = 0;
>     static GstBuffer* buffers[50];
>
>     //auto ident = gst_bin_get_by_name(GST_BIN(pipeline), "ident");
>     auto pads = GST_ELEMENT_PADS(pipeline);
>
>     auto pad0 = GST_PAD(pads->data);
>     auto pad1 = GST_PAD(pads->next->data);
>
>     auto caps = gst_pad_get_current_caps(pad1);
>
>     print_caps(caps, "");
>
>     if (count < 50)
>     {
>         GstBuffer* copy = gst_buffer_copy(buffer);
>
>         buffers[count] = copy;
>
>         ++count;
>     }
>     else
>     {
>         count = 100;
>
>
>     }
> }
>
> int main()
> {
>     GstElement* pipeline;
>     GstBus* bus;
>     GstMessage* msg;
>
>     timer = g_timer_new();
>
>
>     /* Initialize GStreamer */
>     gst_init(0, 0);
>
>     /* Build the pipeline */
>
>     GError* err = 0;
>
>     //auto udpPipe = "videotestsrc pattern=smpte ! video/x-
> raw,width=640,height=360,framerate=10/1 ! videoscale ! videoconvert !
> identity name=ident ! x264enc zerolatency=true speed-preset=superfast
> ! rtph264pay ! udpsink host=127.0.0.1 port=5000";
>
>     //auto videoPipe = "videotestsrc pattern=smpte ! video/x-
> raw,width=640,height=360,framerate=10/1 ! videoscale ! videoconvert !
> autovideosink";
>
>     auto appSrcPipe = "appsrc name=MyAppSrc ! video/x-
> raw,width=640,height=360 ! videoconvert ! identity name=ident !
> x264enc zerolatency=true speed-preset=superfast ! rtph264pay !
> udpsink host=127.0.0.1 port=5000";
>
>     //auto appSrcPipeVideo = "appsrc name=MyAppSrc ! videoscale !
> videoconvert ! autovideosink";
>
>     pipeline =
>         gst_parse_launch
>         (appSrcPipe,
>             &err);
>
>     appSrc = (GstAppSrc*)gst_bin_get_by_name(GST_BIN(pipeline),
> "MyAppSrc");
>
>     g_object_set(G_OBJECT(appSrc), "format", GST_FORMAT_TIME, NULL);
>
>     g_signal_connect(appSrc, "need-data", G_CALLBACK(start_feed), 0);
>     g_signal_connect(appSrc, "enough-data", G_CALLBACK(stop_feed),
> 0);
>
>     auto caps = gst_caps_new_simple("video/x-raw",
>         "format", G_TYPE_STRING, "RGB",
>         "bpp", G_TYPE_INT, 24,
>         "depth", G_TYPE_INT, 24,
>         "width", G_TYPE_INT, 640,
>         "height", G_TYPE_INT, 360,
>         NULL);
>
>     gst_app_src_set_caps((GstAppSrc*)appSrc, caps);
>
>     //GstPad* pad = gst_element_get_static_pad(appSrc, "src");
>
>     //auto ident = gst_bin_get_by_name(GST_BIN(pipeline), "ident");
>
>     //g_signal_connect(ident, "handoff", G_CALLBACK(handoff), 0);
>
>
>     /* Start playing */
>     gst_element_set_state(pipeline, GST_STATE_PLAYING);
>
>     auto main_loop = g_main_loop_new(NULL, FALSE);
>     g_main_loop_run(main_loop);
>
>     /* Wait until error or EOS */
>     bus = gst_element_get_bus(pipeline);
>     msg =
>         gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE,
>             (GstMessageType)(GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
>
>     /* Free resources */
>     if (msg != NULL)
>         gst_message_unref(msg);
>     gst_object_unref(bus);
>     gst_element_set_state(pipeline, GST_STATE_NULL);
>     gst_object_unref(pipeline);
>     return 0;
> }
>
>
> _______________________________________________
> gstreamer-devel mailing list
> [hidden email]
> https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel

_______________________________________________
gstreamer-devel mailing list
[hidden email]
https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel
Reply | Threaded
Open this post in threaded view
|

Re: x264enc and variable rate appsrc

Nicolas Dufresne-5
In reply to this post by David Jaggard
Le mardi 22 septembre 2020 à 12:34 +0100, David J a écrit :
> Thanks! I like a simple fix.... autovideosink sync=false fixed it!

sync=false might cause runner effects on stutter, just be aware, it
depends on the level of "smoothness" you are looking for.

>
> FYI I had tried framerate 0/1 - I think this is the default if you don't set anything manually
>
> On Tue, 22 Sep 2020 at 12:27, Gary Metalle <[hidden email]> wrote:
> > Hi
> >
> >
> > What happens if you use a framerate of 0/1? This is the way to define a variable framerate.
> >
> >
> >
> > Maybe the issue is with your decoder pipeline and the not the encoding. Have you tried setting 'sync=FALSE' for the video sink you're using?
> >
> > From: gstreamer-devel <[hidden email]> on behalf of David J <[hidden email]>
> > Sent: 22 September 2020 12:21
> > To: Discussion of the development of and with GStreamer
> > Subject: x264enc and variable rate appsrc
> >  
> > I have created a test pipeline (see code below) using a custom appsrc that creates an h264 stream and sends over rtp.
> >
> > I am testing it with the following command line to receive and play the video stream:
> > gst-launch-1.0 -v udpsrc port=5000 caps = "application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264, payload=(int)96" ! rtph264depay ! decodebin ! videoconvert ! autovideosink
> >
> > The command-line inside the application looks like this:
> > appsrc name=MyAppSrc ! video/x-raw,width=640,height=360,framerate=10/1 ! videoconvert ! identity name=ident ! x264enc zerolatency=true speed-preset=superfast ! rtph264pay ! udpsink host=127.0.0.1 port=5000
> >
> > At the moment the appsrc is generating frames at precisely 10fps and I set the buffer timestamps correctly. However, unless I also specify the framerate (in the caps string following the appsrc) as 10/1 the client viewer constantly complains of:
> > WARNING: from element /GstPipeline:pipeline0/GstAutoVideoSink:autovideosink0/GstD3D11VideoSinkBin:autovideosink0-actual-sink-d3d11video/GstD3D11VideoSink:d3d11videosink0: A lot of buffers are being dropped.
> > Additional debug info:
> > ../libs/gst/base/gstbasesink.c(3134): gst_base_sink_is_too_late (): /GstPipeline:pipeline0/GstAutoVideoSink:autovideosink0/GstD3D11VideoSinkBin:autovideosink0-actual-sink-d3d11video/GstD3D11VideoSink:d3d11videosink0:
> > There may be a timestamping problem, or this computer is too slow.
> >
> >
> > The problem is:
> > In the real application, the appsrc will produce frames at a variable rate - between about 2 and 20 fps. How can I get this to work if I'm required to set a fixed framerate?
> >
> > Provided the buffer timestamps are set, why does the framerate property have any bearing on the pipeine?
> >
> > Here is the application code:
> >
> > // NativeBasicPipeline.cpp : This file contains the 'main' function. Program execution begins and ends there.
> > //
> >
> > #include <gst/gst.h>
> > #include <gst/app/gstappsrc.h>
> >
> > GTimer* timer;
> >
> > GstAppSrc* appSrc;
> >
> > guint sourceid = 0;
> >
> > int width = 1;
> >
> > int colour = 255;
> >
> > bool read_data()
> > {
> >     static GstClockTime time = 0;
> >     static guint64 offset = 0;
> >
> >     GstFlowReturn ret;
> >     auto ms = g_timer_elapsed(timer, NULL);
> >
> >     if (ms > 1.0 / 10.0) {
> >         auto size = 640 * 3 * 360;
> >
> >         gpointer copy = g_malloc(size);
> >
> >         memset(copy, colour % 255, size);
> >
> >         colour += 10;
> >
> >         GstBuffer* buffer = gst_buffer_new_wrapped(copy, size);
> >
> >
> >         //GST_BUFFER_TIMESTAMP(buffer) = pts;
> >         GST_BUFFER_PTS(buffer) = time;
> >         GST_BUFFER_DTS(buffer) = time;
> >         GST_BUFFER_DURATION(buffer) = 100 * GST_MSECOND;
> >
> >         GST_BUFFER_OFFSET(buffer) = offset++;
> >         GST_BUFFER_OFFSET_END(buffer) = offset;
> >
> >         time += 100 * GST_MSECOND;
> >
> >         g_signal_emit_by_name(appSrc, "push-buffer", buffer, &ret);
> >
> >         //ret = gst_app_src_push_buffer(appSrc, buffer);
> >         gst_buffer_unref(buffer);
> >
> >         g_timer_start(timer);
> >
> >         return TRUE;
> >     }
> >
> >     return TRUE;
> > }
> >
> > void start_feed(GstElement* pipeline, guint size, void* unused)
> > {
> >     if (sourceid == 0) {
> >         sourceid = g_idle_add((GSourceFunc)read_data, 0);
> >     }
> > }
> >
> > void stop_feed(GstElement* pipeline, void* unused)
> > {
> >     if (sourceid != 0) {
> >         g_source_remove(sourceid);
> >         sourceid = 0;
> >     }
> > }
> >
> > static gboolean print_field(GQuark field, const GValue* value, gpointer pfx) {
> >     gchar* str = gst_value_serialize(value);
> >
> >     g_print("%s  %15s: %s\n", (gchar*)pfx, g_quark_to_string(field), str);
> >     g_free(str);
> >     return TRUE;
> > }
> >
> > static void print_caps(const GstCaps* caps, const gchar* pfx) {
> >     guint i;
> >
> >     g_return_if_fail(caps != NULL);
> >
> >     if (gst_caps_is_any(caps)) {
> >         g_print("%sANY\n", pfx);
> >         return;
> >     }
> >     if (gst_caps_is_empty(caps)) {
> >         g_print("%sEMPTY\n", pfx);
> >         return;
> >     }
> >
> >     for (i = 0; i < gst_caps_get_size(caps); i++) {
> >         GstStructure* structure = gst_caps_get_structure(caps, i);
> >
> >         g_print("%s%s\n", pfx, gst_structure_get_name(structure));
> >         gst_structure_foreach(structure, print_field, (gpointer)pfx);
> >     }
> > }
> >
> > void handoff(GstElement* pipeline, GstBuffer* buffer, void* unused)
> > {
> >     static int count = 0;
> >     static GstBuffer* buffers[50];
> >
> >     //auto ident = gst_bin_get_by_name(GST_BIN(pipeline), "ident");
> >     auto pads = GST_ELEMENT_PADS(pipeline);
> >
> >     auto pad0 = GST_PAD(pads->data);
> >     auto pad1 = GST_PAD(pads->next->data);
> >
> >     auto caps = gst_pad_get_current_caps(pad1);
> >
> >     print_caps(caps, "");
> >
> >     if (count < 50)
> >     {
> >         GstBuffer* copy = gst_buffer_copy(buffer);
> >
> >         buffers[count] = copy;
> >
> >         ++count;
> >     }
> >     else
> >     {
> >         count = 100;
> >
> >
> >     }
> > }
> >
> > int main()
> > {
> >     GstElement* pipeline;
> >     GstBus* bus;
> >     GstMessage* msg;
> >
> >     timer = g_timer_new();
> >
> >
> >     /* Initialize GStreamer */
> >     gst_init(0, 0);
> >
> >     /* Build the pipeline */
> >
> >     GError* err = 0;
> >
> >     //auto udpPipe = "videotestsrc pattern=smpte ! video/x-raw,width=640,height=360,framerate=10/1 ! videoscale ! videoconvert ! identity name=ident ! x264enc zerolatency=true speed-preset=superfast ! rtph264pay ! udpsink host=127.0.0.1 port=5000";
> >
> >     //auto videoPipe = "videotestsrc pattern=smpte ! video/x-raw,width=640,height=360,framerate=10/1 ! videoscale ! videoconvert ! autovideosink";
> >
> >     auto appSrcPipe = "appsrc name=MyAppSrc ! video/x-raw,width=640,height=360 ! videoconvert ! identity name=ident ! x264enc zerolatency=true speed-preset=superfast ! rtph264pay ! udpsink host=127.0.0.1 port=5000";
> >
> >     //auto appSrcPipeVideo = "appsrc name=MyAppSrc ! videoscale ! videoconvert ! autovideosink";
> >
> >     pipeline =
> >         gst_parse_launch
> >         (appSrcPipe,
> >             &err);
> >
> >     appSrc = (GstAppSrc*)gst_bin_get_by_name(GST_BIN(pipeline), "MyAppSrc");
> >
> >     g_object_set(G_OBJECT(appSrc), "format", GST_FORMAT_TIME, NULL);
> >
> >     g_signal_connect(appSrc, "need-data", G_CALLBACK(start_feed), 0);
> >     g_signal_connect(appSrc, "enough-data", G_CALLBACK(stop_feed), 0);
> >
> >     auto caps = gst_caps_new_simple("video/x-raw",
> >         "format", G_TYPE_STRING, "RGB",
> >         "bpp", G_TYPE_INT, 24,
> >         "depth", G_TYPE_INT, 24,
> >         "width", G_TYPE_INT, 640,
> >         "height", G_TYPE_INT, 360,
> >         NULL);
> >
> >     gst_app_src_set_caps((GstAppSrc*)appSrc, caps);
> >
> >     //GstPad* pad = gst_element_get_static_pad(appSrc, "src");
> >
> >     //auto ident = gst_bin_get_by_name(GST_BIN(pipeline), "ident");
> >
> >     //g_signal_connect(ident, "handoff", G_CALLBACK(handoff), 0);
> >
> >
> >     /* Start playing */
> >     gst_element_set_state(pipeline, GST_STATE_PLAYING);
> >
> >     auto main_loop = g_main_loop_new(NULL, FALSE);
> >     g_main_loop_run(main_loop);
> >
> >     /* Wait until error or EOS */
> >     bus = gst_element_get_bus(pipeline);
> >     msg =
> >         gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE,
> >             (GstMessageType)(GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
> >
> >     /* Free resources */
> >     if (msg != NULL)
> >         gst_message_unref(msg);
> >     gst_object_unref(bus);
> >     gst_element_set_state(pipeline, GST_STATE_NULL);
> >     gst_object_unref(pipeline);
> >     return 0;
> > }
> >
> >
> > _______________________________________________
> > gstreamer-devel mailing list
> > [hidden email]
> > https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel
>
> _______________________________________________
> gstreamer-devel mailing list
> [hidden email]
> https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel

_______________________________________________
gstreamer-devel mailing list
[hidden email]
https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel