I've implemented the equivalent of the following pipeline in c(++) to read a camera stream and write it to disk as mp4:
gst-launch-1.0 v4l2src device=/dev/video0 ! video/x-raw, width=1280, height=720, framerate=10/1 ! clockoverlay halignment=right valignment=bottom text="Device Time:" shaded-background=true font-desc="Sans, 12" ! nvvidconv ! 'video/x-raw(memory:NVMM),format=(string)I420' ! nvv4l2h265enc bitrate=2000000 ! 'video/x-h265, stream-format=(string)byte-stream'! h265parse ! qtmux ! filesink location=test265.mp4 -e the gst-launch-1.0 command works alright when i ctrl-c out of it. my c++ program does start, it reads the camera and writes a file.. however if i ctrl-c out of the program the generated file is unreadable. So i'm now listening to SIGINT and sending an EOS message to any of the GstElement* but none of them make a difference.. the file is still corrupted. where do i need to send my : GstBus* bus = gst_element_get_bus(sd->pipeline); gst_bus_post (bus, gst_message_new_eos(GST_OBJECT(sd->mux))); to get it to nicely cleanup ? ---- setup of the stream follows: bool PipelineManager::start_stream( Ref<CameraDetails> cd) { bool ret_value = false; Ref<StreamDetails> sd = mkRef<StreamDetails>(); sd->m_serial = cd->get_serial(); sd->m_running = false; sd->m_resolution = cd->m_prefered_resolution; sd->loop = m_loop; // start stream // v4l2src device=/dev/video0 ! // video/x-raw, width=1280, height=720, framerate=10/1 ! // clockoverlay halignment=right valignment=bottom text="Device Time:" shaded-background=true font-desc="Sans, 12" ! // nvvidconv ! // 'video/x-raw(memory:NVMM),format=(string)I420' ! // nvv4l2h265enc bitrate=2000000 ! // 'video/x-h265, stream-format=(string)byte-stream'! // h265parse ! // qtmux ! // filesink location=test265.mp4 -e // gstream setup GstMessage *msg; GstStateChangeReturn ret; // initialise if ( !m_initialized) { m_initialized = true; gst_init (&m_argc, &m_argv); } Log::log("create a pipeline");// create a pipeline sd->pipeline = gst_pipeline_new (combine("mtdata-pipeline", sd->m_serial).c_str()); if ( sd->pipeline ) { sd->bus = gst_element_get_bus(sd->pipeline); gst_bus_add_signal_watch (sd->bus); g_signal_connect (sd->bus, "message", (GCallback) cb_message, sd.get()); //-------------------------------------------------------------------- // v4l2src device=/dev/video1 ! sd->source = create_plugin("v4l2src", sd->m_serial); if( G_IS_OBJECT(sd->source) ) { string devicestring = "/dev/" + cd->get_device_name(); //g_object_set (sd->source, "device", devicestring.c_str(), NULL); g_object_set (G_OBJECT(sd->source), "device", "/dev/video0", NULL); //-------------------------------------------------------------------- // video/x-raw,width=1280,height=720,framerate=10/1 ! GstCaps* caps_source = gst_caps_from_string("video/x-raw,width=1024,height=576,framerate=10/1"); sd->source_caps_filter = create_plugin("capsfilter", sd->m_serial, "source"); g_object_set (G_OBJECT(sd->source_caps_filter), "caps", caps_source, NULL); //-------------------------------------------------------------------- // clockoverlay halignment=right valignment=bottom text="Device Time:" // shaded-background=true font-desc="Sans, 12" ! sd->clockoverlay = create_plugin("clockoverlay", sd->m_serial); if( G_IS_OBJECT(sd->clockoverlay) ) { g_object_set (sd->clockoverlay, "halignment", 2, // 2 = right "valignment", 1, // 1 = bottom "text", "Device Time:", "shaded-background", true, "font-desc" ,"Sans, 12", NULL); // nvvidconv ! sd->convert = create_plugin("nvvideoconvert", sd->m_serial); if( G_IS_OBJECT(sd->convert) ) { sd->convert_caps_filter = create_plugin("capsfilter", sd->m_serial, "convert"); // 'video/x-raw(memory:NVMM),format=(string)I420' ! GstCaps* caps1 = gst_caps_from_string("video/x-raw(memory:NVMM),format=(string)I420"); g_object_set (G_OBJECT(sd->convert_caps_filter), "caps",caps1, NULL); //-------------------------------------------------------------------- // nvv4l2h265enc bitrate=2000000 ! sd->resample = create_plugin("nvv4l2h265enc", sd->m_serial, "resample"); if( G_IS_OBJECT(sd->resample) ) { g_object_set (sd->resample, "bitrate", 2000000, NULL); // 'video/x-h265, stream-format=(string)byte-stream'! GstCaps* caps2 = gst_caps_from_string("video/x-h265, stream-format=(string)byte-stream"); sd->resample_caps_filter = create_plugin("capsfilter", sd->m_serial, "resample"); g_object_set (G_OBJECT(sd->resample_caps_filter), "caps", caps2, NULL); //-------------------------------------------------------------------- // h265parse ! sd->parse = create_plugin("h265parse", sd->m_serial, "parse"); if( G_IS_OBJECT(sd->parse ) ) { //-------------------------------------------------------------------- // qtmux ! sd->mux = create_plugin("qtmux", sd->m_serial, "mux"); if( G_IS_OBJECT(sd->mux )) { //-------------------------------------------------------------------- // filesink location=test265.mp4 -e ! sd->filesink = create_plugin("filesink", sd->m_serial); if( G_IS_OBJECT(sd->filesink ) ) { string filename = create_new_filename(sd->m_serial); g_object_set (sd->filesink, "location", filename.c_str(), NULL); gst_bin_add_many( GST_BIN (sd->pipeline), sd->source, sd->source_caps_filter, sd->clockoverlay, sd->convert, sd->convert_caps_filter, sd->resample, sd->resample_caps_filter, sd->parse, sd->mux, sd->filesink, NULL); gst_element_link_many( sd->source, sd->source_caps_filter, sd->clockoverlay, sd->convert, sd->convert_caps_filter, sd->resample, sd->resample_caps_filter, sd->parse, sd->mux, sd->filesink, NULL); Log::log("starting"); gst_element_set_state(sd->pipeline, GST_STATE_PLAYING); ret_value = true; } } } } } } } } else { g_printerr ("pipeline could not be created.\n"); } if( ret_value) { sd->m_running = true; // add it to the collection m_stream_details_map[sd->m_serial] = sd; } return ret_value; } |
please ignore.
http://gstreamer-devel.966125.n4.nabble.com/EOS-which-element-should-receive-this-signal-tt973021.html#none has the answer (when i really read it..instead of just skimming it) gst_element_send_event (sd->pipeline, gst_event_new_eos());worked.. |
Free forum by Nabble | Edit this page |