Hello Everyone, I am using a framegrabber to capture frames from multiple v4l2 source cameras. So here is how i am doing it... starts playing it. Here is the pipeline ... 3) And then the fetch() function is used to pull the buffer from the appsink and use the buffer for further processing. However I am facing a problem here. This application works perfectly for the following configurations of v4l2src caps and app-sink caps respectively. v4l2srccaps appsink-caps 1) YUY2 RGB 2) RGB YUY2 3) RGB RGBA However for the following configurations, it captures properly for the first time in all the devices but fails to capture for the second time... no error messages though ....it just keeps waiting v4l2src-caps appsink-caps 1) YUY2 YUY2 2) RGB RGB Any help would be appreciated :) The following is the code for the pipeline initialization. bool GStreamerCameraFrameSourceImpl::InitializeGstPipeLine() { GstStateChangeReturn status; end = true; pipeline = GST_PIPELINE(gst_pipeline_new(NULL)); if (pipeline == NULL) { NVXIO_PRINT("Cannot create Gstreamer pipeline"); return false; } bus = gst_pipeline_get_bus(GST_PIPELINE (pipeline)); // create v4l2src GstElement * v4l2src = gst_element_factory_make("v4l2src", NULL); if (v4l2src == NULL) { NVXIO_PRINT("Cannot create v4l2src"); FinalizeGstPipeLine(); return false; } std::ostringstream cameraDev; cameraDev << "/dev/video" << cameraIdx; g_object_set(G_OBJECT(v4l2src), "device", cameraDev.str().c_str(), NULL); gst_bin_add(GST_BIN(pipeline), v4l2src); // create color convert element GstElement * color = gst_element_factory_make(COLOR_ELEM, NULL); if (color == NULL) { NVXIO_PRINT("Cannot create %s element", COLOR_ELEM); FinalizeGstPipeLine(); return false; } gst_bin_add(GST_BIN(pipeline), color); /*GstElement * scale = gst_element_factory_make("videoscale", NULL); if (scale == NULL) { NVXIO_PRINT("Cannot create videoscale element"); FinalizeGstPipeLine(); return false; } gst_bin_add(GST_BIN(pipeline), scale); */ // create appsink element sink = gst_element_factory_make("appsink", NULL); if (sink == NULL) { NVXIO_PRINT("Cannot create appsink element"); FinalizeGstPipeLine(); return false; } gst_bin_add(GST_BIN(pipeline), sink); // if initial values for FrameSource::Parameters are not // specified, let's set them manually to prevent very huge images if (configuration.frameWidth == (vx_uint32)-1) configuration.frameWidth = 1920; if (configuration.frameHeight == (vx_uint32)-1) configuration.frameHeight = 1080; if (configuration.fps == (vx_uint32)-1) configuration.fps = 30; #if GST_VERSION_MAJOR == 0 std::unique_ptr<GstCaps, GStreamerObjectDeleter> caps_v42lsrc( gst_caps_new_simple ("video/x-raw-rgb", "width", GST_TYPE_INT_RANGE, 1, (int)configuration.frameWidth, "height", GST_TYPE_INT_RANGE, 1, (int)configuration.frameHeight, "framerate", GST_TYPE_FRACTION, (int)configuration.fps, NULL)); #else std::ostringstream stream; /*stream << "video/x-raw, format=(string){RGB, GRAY8}, width=[1," << configuration.frameWidth << "], height=[1," << configuration.frameHeight << "], framerate=" << configuration.fps << "/1;";*/ stream << "video/x-raw, format=(string)RGB, width=" << configuration.frameWidth << ", height=" << configuration.frameHeight << ";"; std::unique_ptr<GstCaps, GStreamerObjectDeleter> caps_v42lsrc(gst_caps_from_string(stream.str().c_str())); #endif if (!caps_v42lsrc) { NVXIO_PRINT("Failed to create caps"); FinalizeGstPipeLine(); return false; } // link elements if (!gst_element_link_filtered(v4l2src, color, caps_v42lsrc.get())) { NVXIO_PRINT("GStreamer: cannot link v4l2src -> color using caps"); FinalizeGstPipeLine(); return false; } // link elements /*if (!gst_element_link(color, scale)) { NVXIO_PRINT("GStreamer: cannot link color -> scale"); FinalizeGstPipeLine(); std::cout << "Cannot link color and scale\n"; return false; }*/ if (!gst_element_link(color, sink)) { NVXIO_PRINT("GStreamer: cannot link color -> appsink"); FinalizeGstPipeLine(); return false; } gst_app_sink_set_max_buffers (GST_APP_SINK(sink), 1); gst_app_sink_set_drop (GST_APP_SINK(sink), true); // do not emit signals: all calls will be synchronous and blocking gst_app_sink_set_emit_signals (GST_APP_SINK(sink), 0); #if GST_VERSION_MAJOR == 0 std::unique_ptr<GstCaps, GStreamerObjectDeleter> caps_appsink( gst_caps_new_simple("video/x-raw-rgb", "bpp", G_TYPE_INT, 24, "red_mask", G_TYPE_INT, 0xFF0000, "green_mask", G_TYPE_INT, 0x00FF00, "blue_mask", G_TYPE_INT, 0x0000FF, NULL)); #else // support 1 and 4 channel 8 bit data std::unique_ptr<GstCaps, GStreamerObjectDeleter> caps_appsink( gst_caps_from_string("video/x-raw, format=(string){RGBA, GRAY8};")); #endif gst_app_sink_set_caps(GST_APP_SINK(sink), caps_appsink.get()); // Force pipeline to play video as fast as possible, ignoring system clock gst_pipeline_use_clock(pipeline, NULL); status = gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING); handleGStreamerMessages(); if (status == GST_STATE_CHANGE_ASYNC) { // wait for status update status = gst_element_get_state(GST_ELEMENT(pipeline), NULL, NULL, GST_CLOCK_TIME_NONE); } if (status == GST_STATE_CHANGE_FAILURE) { NVXIO_PRINT("GStreamer: unable to start playback"); FinalizeGstPipeLine(); std::cout << "Cannot initialize Gstreamer pipeline because of STATE_CHANGE_FAILURE" << std::endl; return false; } // explicitly set params to -1 to ensure // their update in the updateConfiguration() function configuration.frameWidth = (vx_uint32)-1; configuration.frameHeight = (vx_uint32)-1; if (!updateConfiguration(color, configuration)) { FinalizeGstPipeLine(); return false; } end = false; return true; } } _______________________________________________ gstreamer-devel mailing list [hidden email] https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel |
Free forum by Nabble | Edit this page |