This post was updated on .
Hello,
I'm interfacing a camera with a variable frame rate depending of the light conditions. The camera is interfaced using a third party API so I'm using the appsrc element and push the frames into the pipeline. The goal is to encode the raw frames and record a .h264 file. The problem is that when I'm playing the resulting output.h264 file in VLC, all frames seem to play at once (too quick), also there's not video duration shown in the VLC player... I ran: "ffmpeg -i output.h264" with the next output: Input #0, h264, from 'output.h264': Duration: N/A, bitrate: N/A Stream #0:0: Video: h264 (High), yuv420p(tv, bt470bg), 1280x1024, 42 fps, 42 tbr, 1200k tbn, 84 tbc And the code is: static void cb_need_data (GstElement *appsrc, guint unused_size, GstElement *vrate) { static gboolean white = FALSE; static GstClockTime timestamp = 0; GstBuffer *buffer; guint buffer_size; GstFlowReturn ret; GstCaps *caps = 0; float fps=0.0; float fps_prev=0.0; unsigned char rate; // image buffer IMG image; memset(&image,0,sizeof(image)); image.size = sizeof(IMG); //Get camera image GetImage(handler, 0, &image); //Get new camera framerate GetFramerate(handler, FRAMERATE, &fps); //Set buffer size buffer_size = image.width * image.height * 2; buffer = gst_buffer_new(); gst_buffer_insert_memory(buffer, -1, gst_memory_new_wrapped(GST_MEMORY_FLAG_READONLY, (guint8*)image.bp, buffer_size, 0, buffer_size, NULL, NULL)); GST_BUFFER_PTS (buffer) = timestamp; GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int (1, GST_SECOND, (guint8) fps); timestamp += GST_BUFFER_DURATION (buffer); //Push buffer into appsrc g_signal_emit_by_name (appsrc, "push-buffer", buffer, &ret); if (ret != GST_FLOW_OK ) { /* something wrong, stop pushing */ gst_app_src_end_of_stream(GST_APP_SRC(appsrc)); g_main_loop_quit (loop); } } int main(int argc, char **argv) { IMG image; /* init GStreamer */ gst_init (&argc, &argv); // Retrieving a handle to the camera device stat = OpenDevice(0, &handler); GstElement *appsrc, *video_rate, *filesink, *omxh264, *enc_queue, *enc_capsfilter; GstCaps *enc_caps; float fps=0.0; // Setting Exposure Time parameter (10ms) stat = SetParameter(handler, AEAG, 1); // AEAG HandleResult(stat,"SetParameter (exposure time set)"); // Start acquisition stat = StartCam(handler); HandleResult(stat, GetImage(handler, 0, &image); HandleResult(stat,"GetImage"); loop = g_main_loop_new (NULL, FALSE); /* setup pipeline */ pipeline = gst_pipeline_new ("pipeline"); appsrc = gst_element_factory_make ("appsrc", "source"); video_rate = gst_element_factory_make ("videorate", "video_rate"); omxh264 = gst_element_factory_make ("omxh264enc", "omxh264enc"); enc_queue = gst_element_factory_make ("queue", "Encoder Queue"); enc_capsfilter = gst_element_factory_make ("capsfilter", "Encoder output caps"); filesink = gst_element_factory_make ("filesink", "filesink"); g_object_set(filesink, "location", "media/test/output.h264", NULL); g_object_set (G_OBJECT (appsrc), "is-live", TRUE,NULL); g_object_set(G_OBJECT(appsrc), "do-timestamp", TRUE, NULL); enc_caps = gst_caps_new_simple ("video/x-h264", "profile", G_TYPE_STRING, "high", NULL); g_object_set (G_OBJECT (enc_capsfilter), "caps", enc_caps, NULL); g_object_set (G_OBJECT (omxh264), "target-bitrate", 5000, "b-frames", 0, "control-rate", 1, "gop-length", 30, NULL); /* setup */ g_object_set (G_OBJECT (appsrc), "caps", gst_caps_new_simple ("video/x-raw", "format", G_TYPE_STRING, "NV12", "width", G_TYPE_INT, image.width, "height", G_TYPE_INT, image.height, "framerate", GST_TYPE_FRACTION, 0, 1, NULL), NULL); gst_bin_add_many (GST_BIN (pipeline), appsrc, video_rate, omxh264 , enc_capsfilter, enc_queue, filesink , NULL); gst_element_link_many (appsrc, video_rate, omxh264, enc_queue,enc_capsfilter, filesink, NULL); /* setup appsrc */ g_object_set (G_OBJECT (appsrc), "stream-type", 0, "format", GST_FORMAT_TIME, NULL); g_signal_connect (appsrc, "need-data", G_CALLBACK (cb_need_data),video_rate); /* play */ gst_element_set_state (pipeline, GST_STATE_PLAYING); g_main_loop_run (loop); /* clean up */ finish: printf("Stop Pipeline\r\n "); gst_element_set_state (pipeline, GST_STATE_NULL); gst_object_unref (pipeline); g_main_loop_unref (loop); StopCam(handler); // Close device CloseDevice(handler); return 0; } Any ideas/clues would be appreciated. Thanks. -- Sent from: http://gstreamer-devel.966125.n4.nabble.com/ _______________________________________________ gstreamer-devel mailing list gstreamer-devel@lists.freedesktop.org https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel |
H264 elementary stream do not have timestamp information (PTS).
So VLC plays it faster. Put stream to mp4 to get smooth playback. -- Sent from: http://gstreamer-devel.966125.n4.nabble.com/ _______________________________________________ gstreamer-devel mailing list [hidden email] https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel |
Hi, The timestamps are added by the muxer to the file. Try adding qtmux or mp4mux to your pipeline and writing an mp4 file.
You might also need an h264parser to force the output to stream-format=avc if your encoder produces stream-format=byte-stream -----Ursprüngliche Nachricht----- Von: gstreamer-devel <[hidden email]> Im Auftrag von jles Gesendet: Donnerstag, 11. Juli 2019 10:56 An: [hidden email] Betreff: Re: playback H264 encoded file too fast Hi Vinod, Thanks for your answer. I'd like to understand this better.... I'm already adding timestamps: (........) g_object_set(G_OBJECT(appsrc), "do-timestamp", TRUE, NULL); (........) GST_BUFFER_PTS (buffer) = timestamp; GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int (1, GST_SECOND, (guint8) fps); timestamp += GST_BUFFER_DURATION (buffer); What happens with them? are they deleted during H264 encoding? I'm using a hardware video codec unit, and only allows H264/H265 compression, if the above is true, what would it be the best way to add the timestamps after encoding? -- Sent from: http://gstreamer-devel.966125.n4.nabble.com/ _______________________________________________ gstreamer-devel mailing list [hidden email] https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel _______________________________________________ gstreamer-devel mailing list [hidden email] https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel |
Hi, Thanks for that.
I tried both qtmux and mp4mux, but I've got an empty output.mp4 file. The pipeline looks now like this: gst_bin_add_many (GST_BIN (pipeline), appsrc, video_rate,videoconvert,capsfilter, omxh264 , enc_queue, mp4mux, filesink , NULL); gst_element_link_many (appsrc, video_rate, videoconvert,capsfilter, omxh264,enc_queue, mp4mux, filesink, NULL); Any ideas why might have a empty file? -- Sent from: http://gstreamer-devel.966125.n4.nabble.com/ _______________________________________________ gstreamer-devel mailing list [hidden email] https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel |
In reply to this post by Vinod Kesti
H264 normal doesn't store timestamps. The Buffers going into your Encoder are in PTS order (Presentation Time). The buffers coming out of the Encoder are in DTS order (Decode Time). If you add a Muxer to your pipeline This information will be stored in the mp4 Header blocks. When it is played, the demuxer takes this information and passes it to the decoder so that your recording will be played at the proper speed.
-----Ursprüngliche Nachricht----- Von: gstreamer-devel <[hidden email]> Im Auftrag von jles Gesendet: Donnerstag, 11. Juli 2019 12:50 An: [hidden email] Betreff: Re: playback H264 encoded file too fast Hi Vinod, Thanks for your answer. I see what's the issue but I'd like to understand this a bit better.... I'm already adding timestamps here: (........) g_object_set(G_OBJECT(appsrc), "do-timestamp", TRUE, NULL); (........) GST_BUFFER_PTS (buffer) = timestamp; GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int (1, GST_SECOND, (guint8) fps); timestamp += GST_BUFFER_DURATION (buffer); That timestamps look like are added to the GST buffer right? so can I assume that they are only use in the gstreamer flow and they are not added to the frames during H264 encoding? So a solutions seems to be to add the H264 stream into a mp4 container right? Would it possible to do that with the generated H264 file, without the time stamp info? or what would it be the "best way" to do it? -- Sent from: http://gstreamer-devel.966125.n4.nabble.com/ _______________________________________________ gstreamer-devel mailing list [hidden email] https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel _______________________________________________ gstreamer-devel mailing list [hidden email] https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel |
In reply to this post by Thornton, Keith
Hi, Thanks for that.
I added to the pipeline: (....)-> h264parse->mp4mux->filesink, and it generates a mp4 file. But unfortunately still happening the "too fast playing" issue. Running: ffprobe -i output.mp4 I've got: Input #0, mov,mp4,m4a,3gp,3g2,mj2, from 'output.mp4': Metadata: major_brand : mp42 minor_version : 0 compatible_brands: mp42mp41isomiso2 creation_time : 2019-07-10 08:55:51 Duration: 00:00:00.86, start: 0.000000, bitrate: 42973 kb/s Stream #0:0(eng): Video: h264 (Constrained Baseline) (avc1 / 0x31637661), yuv420p(tv, bt470bg), 1280x1024, 42910 kb/s, SAR 1:1 DAR 5:4, 66 fps, 66 tbr, 6600 tbn, 132 tbc (default) Metadata: creation_time : 2019-07-10 08:55:51 handler_name : VideoHandler Any ideas? Thanks -- Sent from: http://gstreamer-devel.966125.n4.nabble.com/ _______________________________________________ gstreamer-devel mailing list [hidden email] https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel |
Le jeudi 11 juillet 2019 à 12:17 -0500, jles a écrit :
> Hi, Thanks for that. > > I added to the pipeline: (....)-> h264parse->mp4mux->filesink, and it > generates a mp4 file. > > But unfortunately still happening the "too fast playing" issue. > > Running: ffprobe -i output.mp4 I've got: > > Input #0, mov,mp4,m4a,3gp,3g2,mj2, from 'output.mp4': > Metadata: > major_brand : mp42 > minor_version : 0 > compatible_brands: mp42mp41isomiso2 > creation_time : 2019-07-10 08:55:51 > Duration: 00:00:00.86, start: 0.000000, bitrate: 42973 kb/s Could be your timestamp being a factor of 10 off, GStreamer timestamp are in nanosecond, let's say you had used us instead it would mean this .86 second stream should have been 8.6s > Stream #0:0(eng): Video: h264 (Constrained Baseline) (avc1 / > 0x31637661), yuv420p(tv, bt470bg), 1280x1024, 42910 kb/s, SAR 1:1 DAR 5:4, > 66 fps, 66 tbr, 6600 tbn, 132 tbc (default) > Metadata: > creation_time : 2019-07-10 08:55:51 > handler_name : VideoHandler > > > Any ideas? > Thanks > > > > > > -- > Sent from: http://gstreamer-devel.966125.n4.nabble.com/ > _______________________________________________ > gstreamer-devel mailing list > [hidden email] > https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel _______________________________________________ gstreamer-devel mailing list [hidden email] https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel |
Hi Nicolas, thanks for the reply,
I checked in debug if there's something different to nanoseconds and timestamps look fine (in nanoseconds). One interesting experiment which made me to be now even more confused, was to divide fps by 10 before GST_BUFFER_PTS: ...... fps/=10; GST_BUFFER_PTS (buffer) = timestamp; ....... The resulting video now the video plays in slow motion instead fast, so no sure where could it be the problem now.... -- Sent from: http://gstreamer-devel.966125.n4.nabble.com/ _______________________________________________ gstreamer-devel mailing list [hidden email] https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel |
Le ven. 12 juill. 2019 07 h 40, jles <[hidden email]> a écrit : Hi Nicolas, thanks for the reply, Just trace and share couple of values you have set in GST_BUFFER_PTS and DURATION. We'll debug this together.
_______________________________________________ gstreamer-devel mailing list [hidden email] https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel |
Hi Nicolas,
Thanks for the help, Just FYI I'm using now mpegtsmux element instead mp4mux but results/issues are identical. Piece of code where the timestamps are done: ..... GST_BUFFER_PTS (buffer) = timestamp; GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int (1, GST_SECOND, (guint8) fps); timestamp += GST_BUFFER_DURATION (buffer); ..... Here there are a few the time stamp values and frame rates, for all capture images: timestamp (nanoseconds): 77961018 fps: 29 timestamp (nanoseconds): 112443776 fps: 29 timestamp (nanoseconds): 146926534 fps: 29 timestamp (nanoseconds): 181409292 fps: 29 timestamp (nanoseconds): 215892050 fps: 29 .... And running "gst-discoverer-1.0 --verbose output.ts" I've got: Analyzing file:///media/test/output.ts Done discovering file:///media/test/output.ts Topology: container: video/mpegts, systemstream=(boolean)true, packetsize=(int)188 video: video/x-h264, stream-format=(string)byte-stream, alignment=(string)au, width=(int)1280, height=(int)1024, framerate=(fraction)46/1, interlace-mode=(string)progressive, chroma-format=(string)4:2:0, bit-depth-luma=(uint)8, bit-depth-chroma=(uint)8, parsed=(boolean)true, profile=(string)constrained-baseline, level=(string)4 Tags: video codec: H.264 Codec: video/x-h264, stream-format=(string)byte-stream, alignment=(string)au, width=(int)1280, height=(int)1024, framerate=(fraction)46/1, interlace-mode=(string)progressive, chroma-format=(string)4:2:0, bit-depth-luma=(uint)8, bit-depth-chroma=(uint)8, parsed=(boolean)true, profile=(string)constrained-baseline, level=(string)4 Additional info: None Stream ID: f623b2db448562f44acc3c45398a9f6ff43288c3244e8cdf2d776c577693743d:1/00000041 Width: 1280 Height: 1024 Depth: 24 Frame rate: 46/1 Pixel aspect ratio: 1/1 Interlaced: false Bitrate: 0 Max bitrate: 0 Properties: Duration: 0:00:00.487500111 Seekable: yes Tags: video codec: H.264 -- Sent from: http://gstreamer-devel.966125.n4.nabble.com/ _______________________________________________ gstreamer-devel mailing list [hidden email] https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel |
I was going through your thread, and you made no mention of how you configured your appsrc. Have you forgot to set the appsrc format to time ? Default is bytes. Le ven. 12 juill. 2019 17 h 40, jles <[hidden email]> a écrit : Hi Nicolas, _______________________________________________ gstreamer-devel mailing list [hidden email] https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel |
Hi,
Quick update: I did changed the appsrc format to time, but it didn't help, still same issue. Debugging the application I've realised that if encoding the frame rate time is faster than the time stamp the resulting video plays too fast but in the other way around everything looks at normal speed. I think that the problem is that the camera takes a few frames (2-3) to get a stable fps (after that it changes the fps depending of the light) and the encoder takes a fix encoding frame rate from the first pushed frame into the pipeline. E.g if for the first captured frame the rate is 40fps the encoder starts working at that speed regardless if the next frames are 14fps so it doesn't adjust speed for next captured frames even if framerate in caps and timestamps are updated ...so the resulting video plays too fast.... Any ideas how to fix this? -- Sent from: http://gstreamer-devel.966125.n4.nabble.com/ _______________________________________________ gstreamer-devel mailing list [hidden email] https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel |
Le lundi 15 juillet 2019 à 10:22 -0500, jles a écrit :
> Hi, > > Quick update: > > I did changed the appsrc format to time, but it didn't help, still same > issue. > > Debugging the application I've realised that if encoding the frame rate time > is faster than the time stamp the resulting video plays too fast but in the > other way around everything looks at normal speed. > > I think that the problem is that the camera takes a few frames (2-3) to get > a stable fps (after that it changes the fps depending of the light) and the > encoder takes a fix encoding frame rate from the first pushed frame into the > pipeline. > > E.g if for the first captured frame the rate is 40fps the encoder starts > working at that speed regardless if the next frames are 14fps so it doesn't > adjust speed for next captured frames even if framerate in caps and > timestamps are updated ...so the resulting video plays too fast.... > > Any ideas how to fix this? in time format, otherwise it won't produce a time segment hence your generated timestamp will have no meaning. That being said, I've surprise it works in the first place. > > > > > > > -- > Sent from: http://gstreamer-devel.966125.n4.nabble.com/ > _______________________________________________ > gstreamer-devel mailing list > [hidden email] > https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel _______________________________________________ gstreamer-devel mailing list [hidden email] https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel signature.asc (201 bytes) Download Attachment |
This post was updated on .
Hi,
It actually didn't work as I thought, it looked at normal speed but I test it with different light conditions and unfortunately still the issue... This is the code: static void cb_need_data (GstElement *appsrc, guint unused_size, GstElement *vrate) { clock_t start, end; struct timespec ts_start; struct timespec ts_end; static GstClockTime timestamp = 0; guint buffer_size; GstFlowReturn ret; static float fps=0.0; static float fps_prev=0.0; // image buffer IMG image; memset(&image,0,sizeof(image)); image.size = sizeof(IMG); // start = clock(); GetImage(Handle,&image); // end = clock(); GetCamParam(Handle, FRAMERATE, &fps); // printf("The execution time is : %f fps: %f\r\n", ((double)(end - start)) / CLOCKS_PER_SEC, fps); buffer_size = image.width * image.height * 2; GstBuffer *buffer = gst_buffer_new_wrapped_full( (GstBufferFlags)0 , (guint8*)image.bp, buffer_size, 0, buffer_size, NULL, NULL ); /*Set timestamp*/ GST_BUFFER_PTS (buffer) = timestamp; GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int (1, GST_SECOND, fps); timestamp += GST_BUFFER_DURATION (buffer); /*Push buffer*/ g_signal_emit_by_name (appsrc, "push-buffer", buffer, &ret); gst_buffer_unref (buffer); if (ret != GST_FLOW_OK ) { finish: /* something wrong, stop pushing */ gst_app_src_end_of_stream(GST_APP_SRC(appsrc)); g_main_loop_quit (loop); } } int main(int argc, char **argv) { IMG image; /* init GStreamer */ gst_init (&argc, &argv); clock_t start, end; static float fps=0.0; static float fps_prev=0.0; GstElement *appsrc, *video_rate, *filesink, *omxh264, *enc_queue,*src_queue, *enc_capsfilter, *videoconvert,*capsfilter,*mp4mux, *h264parser; // Retrieving a handle to the camera device stat = OpenDevice(0, &Handle); HandleResult(stat,"OpenDevice"); // Start acquisition StartAcquisition(Handle); GetImage(Handle,&image); loop = g_main_loop_new (NULL, FALSE); /* Pipeline elements */ pipeline = gst_pipeline_new ("pipeline"); appsrc = gst_element_factory_make ("appsrc", "source"); video_rate = gst_element_factory_make ("videorate", "video_rate"); omxh264 = gst_element_factory_make ("omxh264enc", "omxh264enc"); enc_queue = gst_element_factory_make ("queue", "Encoder Queue"); src_queue = gst_element_factory_make ("queue", "Source Queue"); videoconvert = gst_element_factory_make ("videoconvert", "video convert"); capsfilter = gst_element_factory_make ("capsfilter", "caps filter"); enc_capsfilter = gst_element_factory_make ("capsfilter", "Encoder output caps"); h264parser = gst_element_factory_make ("h264parse", "h264 parser"); mp4mux = gst_element_factory_make ("mpegtsmux", "mp4 mux"); filesink = gst_element_factory_make ("filesink", "filesink"); /* setup */ g_object_set(filesink, "location", "media/test/output.ts", NULL); g_object_set (G_OBJECT (appsrc), "is-live", TRUE,NULL); g_object_set (G_OBJECT (appsrc), "stream-type", 0, "format", GST_FORMAT_TIME, NULL); g_object_set(G_OBJECT(appsrc), "do-timestamp", TRUE, NULL); g_object_set (G_OBJECT (enc_capsfilter), "caps", gst_caps_new_simple ("video/x-h264", "profile", G_TYPE_STRING, "high", NULL), NULL); g_object_set (G_OBJECT(omxh264), "b-frames", 0, "control-rate", 1, "gop-length",0, NULL); g_object_set (G_OBJECT (appsrc), "caps", gst_caps_new_simple ("video/x-raw", "format", G_TYPE_STRING, "GRAY8", "width", G_TYPE_INT, image.width, "height", G_TYPE_INT, image.height, "framerate", GST_TYPE_FRACTION, 0, 1, NULL), NULL); g_object_set (G_OBJECT (capsfilter), "caps", gst_caps_new_simple ("video/x-raw", "format", G_TYPE_STRING, "NV12", "width", G_TYPE_INT, image.width, "height", G_TYPE_INT, image.height, NULL), NULL); /*Build pipeline*/ gst_bin_add_many (GST_BIN (pipeline), appsrc, video_rate,videoconvert,capsfilter,src_queue, omxh264 , enc_queue,enc_capsfilter, h264parser , mp4mux,filesink , NULL); gst_element_link_many (appsrc, video_rate, videoconvert,capsfilter,src_queue, omxh264, enc_queue,enc_capsfilter, h264parser , mp4mux, filesink, NULL); /* setup appsrc */ g_signal_connect (appsrc, "need-data", G_CALLBACK (cb_need_data),video_rate); /* play */ gst_element_set_state (pipeline, GST_STATE_PLAYING); g_main_loop_run (loop); /* clean up */ finish: printf("Stop Pipeline\r\n "); gst_element_set_state (pipeline, GST_STATE_NULL); gst_object_unref (pipeline); g_main_loop_unref (loop); StopAcquisition(Handle); // Close device if (Handle) CloseDevice(Handle); return 0; } I think the main issue is how the timestamp is calculated.... -- Sent from: http://gstreamer-devel.966125.n4.nabble.com/ _______________________________________________ gstreamer-devel mailing list gstreamer-devel@lists.freedesktop.org https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel |
Free forum by Nabble | Edit this page |