Hello,
I am trying to record video with gstreamer on a N950 Meego device (from a Qt application). For first tests I adopted this sample code: http://maemo.org/maemo_release_documentation/maemo4.1.x/node9.html The adopted code looks this way: ##### /* Initialize Gstreamer */ gst_init(NULL, NULL); /* Create pipeline and attach a callback to it's * message bus */ pipeline = gst_pipeline_new("test-camera"); bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline)); gst_bus_add_watch(bus, (GstBusFunc)bus_callback, this); gst_object_unref(GST_OBJECT(bus)); /* Create elements */ /* Camera video stream comes from a Video4Linux driver */ camera_src = gst_element_factory_make("autovideosrc", "camera_src"); //also tested: subdevsrc, v4l2camsrc, v4l2src /* Colorspace filter is needed to make sure that sinks understands * the stream coming from the camera */ csp_filter = gst_element_factory_make("ffmpegcolorspace", "capsfilter"); /* Tee that copies the stream to multiple outputs */ tee = gst_element_factory_make("tee", "tee"); /* Queue creates new thread for the stream */ screen_queue = gst_element_factory_make("queue", "screen_queue"); /* Sink that shows the image on screen. Xephyr doesn't support XVideo * extension, so it needs to use ximagesink, but the device uses * xvimagesink */ screen_sink = gst_element_factory_make("xvimagesink", "screen_sink"); /* Creates separate thread for the stream from which the image * is captured */ image_queue = gst_element_factory_make("queue", "image_queue"); /* Filter to convert stream to use format that the gdkpixbuf library * can use */ image_filter = gst_element_factory_make("ffmpegcolorspace", "image_filter"); /* A dummy sink for the image stream. Goes to bitheaven */ image_sink = gst_element_factory_make("fakesink", "image_sink"); /* Check that elements are correctly initialized */ if(!(pipeline && camera_src && screen_sink && csp_filter && screen_queue && image_queue && image_filter && image_sink)) { qDebug() << "Couldn't create pipeline elements"; QApplication::exit(0); } /* Set image sink to emit handoff-signal before throwing away * it's buffer */ g_object_set(G_OBJECT(image_sink), "signal-handoffs", TRUE, NULL); /* Add elements to the pipeline. This has to be done prior to * linking them */ gst_bin_add_many(GST_BIN(pipeline), camera_src, csp_filter, tee, screen_queue, screen_sink, image_queue, image_filter, image_sink, NULL); /* Specify what kind of video is wanted from the camera */ caps = gst_caps_new_simple("video/x-raw-rgb", //x-raw-rgb yuv "width", G_TYPE_INT, 640, "height", G_TYPE_INT, 480, NULL); if (!caps) { qDebug() << "caps NULL"; } /* Link the camera source and colorspace filter using capabilities * specified */ if(!gst_element_link_filtered(camera_src, csp_filter, caps)) { qDebug() << "gst_element_link_filtered caps error"; // QApplication::exit(0); } gst_caps_unref(caps); /* Connect Colorspace Filter -> Tee -> Screen Queue -> Screen Sink * This finalizes the initialization of the screen-part of the pipeline */ if(!gst_element_link_many(csp_filter, tee, screen_queue, screen_sink, NULL)) { qDebug() << "gst_element_link_many tee error"; QApplication::exit(0); } /* gdkpixbuf requires 8 bits per sample which is 24 bits per * pixel */ caps = gst_caps_new_simple("video/x-raw-rgb", "width", G_TYPE_INT, 640, "height", G_TYPE_INT, 480, "bpp", G_TYPE_INT, 24, "depth", G_TYPE_INT, 24, "framerate", GST_TYPE_FRACTION, 15, 1, NULL); /* Link the image-branch of the pipeline. The pipeline is * ready after this */ if(!gst_element_link_many(tee, image_queue, image_filter, NULL)) { qDebug() << "gst_element_link_many image_queue error"; QApplication::exit(0); } if(!gst_element_link_filtered(image_filter, image_sink, caps)) { qDebug() << "gst_element_link_filtered image_sink error"; QApplication::exit(0); } gst_caps_unref(caps); /* As soon as screen is exposed, window ID will be advised to the sink */ //g_signal_connect(this, "expose-event", G_CALLBACK(expose_cb), // screen_sink); GstStateChangeReturn sret = gst_element_set_state(pipeline, GST_STATE_PLAYING); if(sret == GST_STATE_CHANGE_FAILURE) { qDebug() << "Error GST_STATE_PLAYING: " << sret; } else { qDebug() << "Ok GST_STATE_PLAYING: " << sret; } ##### Now the problem is that I just get "GST_STATE_CHANGE_FAILURE" as result and no preview (no error/debug messages). I already tested different parameters and looked into the Qt Mobility source code for reference which uses gstreamer too, all without success. Is there something wrong with the code, or can someone provide me a simple working example how to make the camera preview visible (in Qt/Meego)? Regards, Peter -- NEU: FreePhone - 0ct/min Handyspartarif mit Geld-zurück-Garantie! Jetzt informieren: http://www.gmx.net/de/go/freephone _______________________________________________ gstreamer-devel mailing list [hidden email] http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel |
On Mon, 2011-09-05 at 10:45 +0200, Peter Staab wrote:
> > Now the problem is that I just get "GST_STATE_CHANGE_FAILURE" as > result and no preview (no error/debug messages). Did you try setting any of the debugging flags ? Helps to track down what isn't working as you expect. http://gstreamer.freedesktop.org/data/doc/gstreamer/head/gstreamer/html/gst-running.html -- Kaj-Michael Lang <[hidden email]> _______________________________________________ gstreamer-devel mailing list [hidden email] http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel |
In reply to this post by Ha-P.
On 09/05/11 10:45, Peter Staab wrote:
> Hello, > > I am trying to record video with gstreamer on a N950 Meego device (from a Qt application). For first tests I adopted this sample code: http://maemo.org/maemo_release_documentation/maemo4.1.x/node9.html you don't wan to send rgb to xvimagesink. I'd also suggest to use autovideosink. Stefan > The adopted code looks this way: > ##### > /* Initialize Gstreamer */ > gst_init(NULL, NULL); > > /* Create pipeline and attach a callback to it's > * message bus */ > pipeline = gst_pipeline_new("test-camera"); > > bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline)); > gst_bus_add_watch(bus, (GstBusFunc)bus_callback, this); > gst_object_unref(GST_OBJECT(bus)); > > /* Create elements */ > /* Camera video stream comes from a Video4Linux driver */ > camera_src = gst_element_factory_make("autovideosrc", "camera_src"); //also tested: subdevsrc, v4l2camsrc, v4l2src > /* Colorspace filter is needed to make sure that sinks understands > * the stream coming from the camera */ > csp_filter = gst_element_factory_make("ffmpegcolorspace", "capsfilter"); > /* Tee that copies the stream to multiple outputs */ > tee = gst_element_factory_make("tee", "tee"); > /* Queue creates new thread for the stream */ > screen_queue = gst_element_factory_make("queue", "screen_queue"); > /* Sink that shows the image on screen. Xephyr doesn't support XVideo > * extension, so it needs to use ximagesink, but the device uses > * xvimagesink */ > screen_sink = gst_element_factory_make("xvimagesink", "screen_sink"); > /* Creates separate thread for the stream from which the image > * is captured */ > image_queue = gst_element_factory_make("queue", "image_queue"); > /* Filter to convert stream to use format that the gdkpixbuf library > * can use */ > image_filter = gst_element_factory_make("ffmpegcolorspace", "image_filter"); > /* A dummy sink for the image stream. Goes to bitheaven */ > image_sink = gst_element_factory_make("fakesink", "image_sink"); > > /* Check that elements are correctly initialized */ > if(!(pipeline && camera_src && screen_sink && csp_filter && screen_queue > && image_queue && image_filter && image_sink)) > { > qDebug() << "Couldn't create pipeline elements"; > QApplication::exit(0); > } > > /* Set image sink to emit handoff-signal before throwing away > * it's buffer */ > g_object_set(G_OBJECT(image_sink), > "signal-handoffs", TRUE, NULL); > > > > /* Add elements to the pipeline. This has to be done prior to > * linking them */ > gst_bin_add_many(GST_BIN(pipeline), camera_src, csp_filter, > tee, screen_queue, screen_sink, image_queue, > image_filter, image_sink, NULL); > > /* Specify what kind of video is wanted from the camera */ > caps = gst_caps_new_simple("video/x-raw-rgb", //x-raw-rgb yuv > "width", G_TYPE_INT, 640, > "height", G_TYPE_INT, 480, > NULL); > > if (!caps) > { > qDebug() << "caps NULL"; > } > > > /* Link the camera source and colorspace filter using capabilities > * specified */ > if(!gst_element_link_filtered(camera_src, csp_filter, caps)) > { > qDebug() << "gst_element_link_filtered caps error"; > // QApplication::exit(0); > } > gst_caps_unref(caps); > > /* Connect Colorspace Filter -> Tee -> Screen Queue -> Screen Sink > * This finalizes the initialization of the screen-part of the pipeline */ > if(!gst_element_link_many(csp_filter, tee, screen_queue, screen_sink, NULL)) > { > qDebug() << "gst_element_link_many tee error"; > QApplication::exit(0); > } > > /* gdkpixbuf requires 8 bits per sample which is 24 bits per > * pixel */ > caps = gst_caps_new_simple("video/x-raw-rgb", > "width", G_TYPE_INT, 640, > "height", G_TYPE_INT, 480, > "bpp", G_TYPE_INT, 24, > "depth", G_TYPE_INT, 24, > "framerate", GST_TYPE_FRACTION, 15, 1, > NULL); > > /* Link the image-branch of the pipeline. The pipeline is > * ready after this */ > if(!gst_element_link_many(tee, image_queue, image_filter, NULL)) > { > qDebug() << "gst_element_link_many image_queue error"; > QApplication::exit(0); > } > > if(!gst_element_link_filtered(image_filter, image_sink, caps)) > { > qDebug() << "gst_element_link_filtered image_sink error"; > QApplication::exit(0); > } > > gst_caps_unref(caps); > > /* As soon as screen is exposed, window ID will be advised to the sink */ > //g_signal_connect(this, "expose-event", G_CALLBACK(expose_cb), > // screen_sink); > > GstStateChangeReturn sret = gst_element_set_state(pipeline, GST_STATE_PLAYING); > if(sret == GST_STATE_CHANGE_FAILURE) > { > qDebug() << "Error GST_STATE_PLAYING: " << sret; > } > else > { > qDebug() << "Ok GST_STATE_PLAYING: " << sret; > } > ##### > > Now the problem is that I just get "GST_STATE_CHANGE_FAILURE" as result and no preview (no error/debug messages). > I already tested different parameters and looked into the Qt Mobility source code for reference which uses gstreamer too, all without success. > > Is there something wrong with the code, or can someone provide me a simple working example how to make the camera preview visible (in Qt/Meego)? > > Regards, > Peter _______________________________________________ gstreamer-devel mailing list [hidden email] http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel |
In reply to this post by Ha-P.
I managed to get gstreamer to work with N9. The pipeline is using appsink element to capture images frame by frame from the camera. I am using it coupled with openCV. The NewFrameAdded() callback is only called for the first frame. For performance issues i rather pool the later frame myself with PoolFrame fucntion.
CODE: static gboolean InitializePipeline(int *argc, char ***argv, int width, int height, IplImage **_frame3C) { GstElement *image_sink, *csp_filter, *image_filter; GstCaps *caps; buffer_width=width; buffer_hight=height; /*Allocate memory for the frame*/ frame3C=cvCreateImageHeader(cvSize(buffer_width,buffer_hight), IPL_DEPTH_8U, 3); //color image frame1= cvCreateImage(cvSize(buffer_width,buffer_hight), IPL_DEPTH_8U, 1); //grey current frame2= cvCreateImage(cvSize(buffer_width,buffer_hight), IPL_DEPTH_8U, 1); //grey previous /*initialize flags*/ cold_start=true; /* Initialize Gstreamer */ gst_init(argc, argv); /* Create elements */ /* Camera video stream comes from a Video4Linux driver */ camera_src = gst_element_factory_make("v4l2camsrc", "camera_src"); g_object_set(G_OBJECT(camera_src), "driver-name", "omap3cam", NULL); // thanks BBNS_ @ maemo irc aka Yun-Ta Tsai /* Colorspace filter is needed to make sure that sinks understands the stream coming from the camera */ csp_filter = gst_element_factory_make("ffmpegcolorspace", "csp_filter"); /* Filter to convert stream to use format that the gdkpixbuf library can use */ image_filter = gst_element_factory_make("ffmpegcolorspace", "image_filter"); /* A dummy sink for the image stream. Goes to bitheaven AppSink*/ image_sink = gst_element_factory_make("appsink", "image_sink"); /* Create pipeline and attach a callback to it's message bus */ pipeline = gst_pipeline_new("test-camera"); bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline)); /* Check that elements are correctly initialized */ if(!(pipeline && camera_src && csp_filter && image_sink && image_filter)) { g_critical("Couldn't create pipeline elements"); return FALSE; } /* Add elements to the pipeline. This has to be done prior to linking them */ gst_bin_add_many(GST_BIN(pipeline), camera_src, csp_filter,image_filter,image_sink,NULL); /* Specify what kind of video is wanted from the camera */ char caps_str[100]; sprintf(caps_str,"video/x-raw-yuv,format=(fourcc)UYVY,width=%i,height=%i",width,height); //video/x-raw-yuv,format=(fourcc)UYVY,width=400,height=240 printf("CAPSE STR: %s\n",caps_str); caps = gst_caps_from_string(caps_str); //framerate=[1/30,30/1] /* Link the camera source and colorspace filter using capabilitie specified */ if(!gst_element_link_filtered(camera_src, csp_filter, caps)) { return FALSE; } gst_caps_unref(caps); caps = gst_caps_new_simple("video/x-raw-rgb", "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, NULL); /* Link the image-branch of the pipeline.*/ if(!gst_element_link_many(csp_filter,image_filter, NULL)) return FALSE; if(!gst_element_link_filtered(image_filter, image_sink,caps)) return FALSE; /*Clean up*/ gst_caps_unref(caps); /* Set image sink to emit handoff-signal before throwing away it's buffer */ g_object_set (G_OBJECT (image_sink), "emit-signals", TRUE, NULL); g_signal_connect (image_sink, "new-buffer", G_CALLBACK (NewFrameAdded), NULL); return TRUE; } void NewFrameAdded (GstAppSink *_appsink) { if(cold_start) { //initialize appsink printf("\nAppsink initialized!\n"); appsink=_appsink; gst_app_sink_set_drop(appsink,true); gst_app_sink_set_emit_signals(appsink,false); // do not want to emit signal on new frame will rather pool frames gst_app_sink_set_max_buffers(appsink,1); cold_start=false; scanning_status=true; } buffer = gst_app_sink_pull_buffer(appsink); } IplImage * PoolFrame(void){ //printf("pool_buffeer()\n"); /*Block untill the appsing becomes available*/ while(appsink==NULL); if(appsink!=NULL){ UnrefGstFrame(); unsigned char* data; buffer = gst_app_sink_pull_buffer(appsink); data=(unsigned char *) GST_BUFFER_DATA (buffer); if(!buffer_swap) { cvSetData(frame3C ,data,buffer_width*3); cvCvtColor(frame3C, frame1, CV_RGB2GRAY); //can be optimized currentFrame=frame1; }else { cvSetData(frame3C ,data,buffer_width*3); cvCvtColor(frame3C, frame2, CV_RGB2GRAY); //can be optimized currentFrame=frame2; } buffer_swap=!buffer_swap; if(focusStatus==GST_PHOTOGRAPHY_FOCUS_STATUS_RUNNING) { PoolAutoFocusBusMessage(); } return currentFrame; } return currentFrame; } Hope it will be useful Cheers, K |
In reply to this post by Ha-P.
When running the application from command line on the device, you need to be logged in as a developer, because a user as well as root do not have appropriate privileges to create a gstreamer pipeline.
Cheers, Klen |
Free forum by Nabble | Edit this page |