Hello
In a C++ development, I divide a pipeline with help of appsink-appsrc. v4l2src device=/dev/video0 ! 'video/x-raw,format=(string)YV12,width=1280,height=720,framerate=(fraction)25/1' ! vaapih265enc ! appsink & appsrc ! vaapidecode ! xvimagesink Problem : I am facing an expected delay in video-steaming of around ~500ms. I am sure that this delay is not due to data-transmission. Because both application runs on same PC. To confirm this, I time-stamped the h.265 bytes and calculated data-transmission delay (~1ms). I am doubting that, I have to tweak appsink-appsrc elements. May be changing their properties. My code snippets look like *Appsink* _sink = gst_element_factory_make("appsink", "sink"); g_object_set(_sink, "emit-signals", TRUE, "max-buffers", 1, "drop", TRUE, "sync", FALSE, NULL); g_signal_connect( _sink, "new-sample", G_CALLBACK(onNewBuffer), this); static GstFlowReturn onNewBuffer (GstAppSink *appsink, gpointer userData) { h265msg::h265 msg; // A rosmsg to carry h.265 byte-stream frame GstMapInfo map; GstSample *sample; g_signal_emit_by_name(appsink, "pull-sample", &sample); if (sample){ GstBuffer *buffer = gst_sample_get_buffer(sample); if (gst_buffer_map (buffer, &map, GST_MAP_READ)) { msg.data.resize( map.size ); memcpy( &msg.data[0], map.data, map.size ); gst_buffer_unmap(buffer, &map); } msg.header.stamp = ros::Time::now(); msg.presentation_ts = buffer->pts; msg.decoding_ts = buffer->dts; msg.duration = buffer->duration; msg.offset = buffer->offset; msg.offset_end = buffer->offset_end; _pub.publish(msg); // publish the msg on network for 2nd appsrc application gst_sample_unref(sample); return GST_FLOW_OK; } return GST_FLOW_ERROR; } *Appsrc* _source = gst_element_factory_make ("appsrc", "appsrc"); g_object_set (_source, "caps", gst_caps_new_simple("video/x-h265", "stream-format", G_TYPE_STRING, "byte-stream", "alignment", G_TYPE_STRING, "au", "width", G_TYPE_INT, 1280, "height", G_TYPE_INT, 720, "pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1, "framerate", GST_TYPE_FRACTION, 25, 1, NULL ), "stream-type", 0, "format", GST_FORMAT_TIME, "min-latency", 0, "max-latency", 1000000, "do-timestamp", TRUE, //"is-live", TRUE, // if is-live is true, it displays only 1 frame per second NULL); void getBuffer(const h265msg::h265ConstPtr &msg) { GstBuffer *buffer = gst_buffer_new_allocate (NULL, msg->data.size(), NULL); gst_buffer_fill(buffer, 0, &msg->data[0], msg->data.size()); GST_BUFFER_PTS (buffer) = msg->presentation_ts; GST_BUFFER_DURATION (buffer) = msg->duration; GST_BUFFER_DTS(buffer) = msg->decoding_ts; GST_BUFFER_OFFSET(buffer) = msg->offset; GST_BUFFER_OFFSET_END(buffer) = msg->offset_end; GstFlowReturn ret; g_signal_emit_by_name(_source, "push-buffer", buffer, &ret); gst_buffer_unref (buffer); } -- Sent from: http://gstreamer-devel.966125.n4.nabble.com/ _______________________________________________ gstreamer-devel mailing list [hidden email] https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel |
Free forum by Nabble | Edit this page |