Read frames from GStreamer pipeline in opencv (cv::Mat)

classic Classic list List threaded Threaded
8 messages Options
Reply | Threaded
Open this post in threaded view
|

Read frames from GStreamer pipeline in opencv (cv::Mat)

pchaurasia
Hi Folks,

I am looking to read frames from my Gstreamer pipeline into opencv data structure. I am using 'new_sample' signal of appsink to read frames. The map derived from the sample, read off of appsink - does not seem to give right size of the frame.

  if (!gst_buffer_map ((buffer), &map_info, GST_MAP_READ)) {
    gst_buffer_unmap ((buffer), &map_info);
    gst_sample_unref(sample);
    return GST_FLOW_ERROR;
  }

I believe map_info.size --> should give size of the frame. The frame size that I see does not appear correct.

Following is my code -

#include "opencv2/objdetect/objdetect.hpp"
#include "opencv2/highgui/highgui.hpp"
#include "opencv2/imgproc/imgproc.hpp"
#include "opencv/cv.h"
#include <gst/gst.h>
#include <gst/app/gstappsink.h>
#include <gst/gstelement.h>
#include <stdio.h>
#include <stdlib.h>

#include <unistd.h>
#include <pthread.h>


using namespace cv;
GstSample* buffer;        
cv::Mat frame;

int sampleno = 0;
GstFlowReturn CaptureGstBuffer(GstAppSink *sink, gpointer user_data)
{            

  //prog_data* pd = (prog_data*)user_data;


  GstSample* sample = gst_app_sink_pull_sample(sink);

  if(sample == NULL) {
    return GST_FLOW_ERROR;
  }

  GstBuffer* buffer = gst_sample_get_buffer(sample);
  GstMapInfo map_info;


  if (!gst_buffer_map ((buffer), &map_info, GST_MAP_READ)) {
    gst_buffer_unmap ((buffer), &map_info);
    gst_sample_unref(sample);
    return GST_FLOW_ERROR;
  }

  //render using map_info.data
//  frame = Mat::zeros(1080, 1920, CV_8UC3);
 // frame = cv::Mat(1080, 1920, CV_8UC3, (char *)map_info.data, cv::Mat::AUTO_STEP);
  //memcpy(frame.data,map_info.data,map_info.size);

    //Mat grey;
    //cvtColor(frame, grey, CV_BGR2GRAY);
 

//if (!frame.empty())
//  imshow("test-gstreamer-video",grey);
//  waitKey(1);

  fprintf(stderr,"Got sample no  %d  %d\n",sampleno++,(int)map_info.size);

  gst_buffer_unmap ((buffer), &map_info);
  //gst_memory_unmap(memory, &map_info);
  //gst_memory_unref(memory);
  gst_sample_unref(sample);

  return GST_FLOW_OK;
}
 

int main(int argc, char *argv[]) {
  GstElement *pipeline, *source, *caps, *sink;
  GstBus *bus;
  GstCaps *filtercaps;
  GstElement *tee, *vq1;
  GstMessage *msg;
  GstBin     *recorder;
  GstStateChangeReturn ret;
  GstPad      *srcpad,*sinkpad;

  /* Initialize GStreamer */
  gst_init (&argc, &argv);

  /* Create the elements */
  source        = gst_element_factory_make ("nvcamerasrc", "source");
  sink          = gst_element_factory_make ("appsink", "sink");
  tee           = gst_element_factory_make ("tee", "videotee");
  vq1           = gst_element_factory_make ("queue", "q1");

  recorder = GST_BIN(gst_bin_new("recording-bin"));

  /* Create the empty pipeline */
  pipeline = gst_pipeline_new ("test-pipeline");

  if (!pipeline || !source || !sink || !tee ||  !vq1 ) {
    g_printerr ("Not all elements could be created.\n");
    return -1;
  }


  caps = gst_element_factory_make ("capsfilter", "filter");
  g_assert (caps != NULL); /* should always exist */


  filtercaps = gst_caps_from_string("video/x-raw(memory:NVMM), width=(int)1920, height=(int)1080, format=(string)I420, framerate=(fraction)30/1 ");
  g_object_set (G_OBJECT (caps), "caps", filtercaps, NULL);
  gst_caps_unref (filtercaps);


  /* Modify the source's properties */
  //g_object_set (source, "pattern", 0, NULL);
  g_object_set (sink, "drop" , TRUE, NULL);
  g_object_set (sink, "new_sample" , TRUE, NULL);
  g_object_set (sink, "max-buffers" , 1, NULL);



   GstAppSinkCallbacks* appsink_callbacks = (GstAppSinkCallbacks*)malloc(sizeof(GstAppSinkCallbacks));
   appsink_callbacks->eos = NULL;
   appsink_callbacks->new_preroll = NULL;
   appsink_callbacks->new_sample = CaptureGstBuffer;
   gst_app_sink_set_callbacks(GST_APP_SINK(sink), appsink_callbacks,   (gpointer)NULL, free);


  /* Build the pipeline */
  gst_bin_add_many (GST_BIN (pipeline), source, caps, tee, vq1, sink, NULL);
  if (gst_element_link_many (source,caps,tee, vq1, sink, NULL) != TRUE) {
    g_printerr ("Elements could not be linked1.\n");
    gst_object_unref (pipeline);
    return -1;
  }

  srcpad                = gst_element_get_request_pad(tee,"src_%u");
  sinkpad               = gst_element_get_static_pad(vq1,"sink");
  gst_pad_link(srcpad,sinkpad);
  gst_element_link(vq1,sink);


  /* Start playing */
  ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
  if (ret == GST_STATE_CHANGE_FAILURE) {
    g_printerr ("Unable to set the pipeline to the playing state.\n");
    gst_object_unref (pipeline);
    return -1;
  }

  /* Wait until error or EOS */
  bus = gst_element_get_bus (pipeline);
  msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_ERROR | GST_MESSAGE_EOS));

  /* Parse message */
  if (msg != NULL) {
    GError *err;
    gchar *debug_info;

    switch (GST_MESSAGE_TYPE (msg)) {
      case GST_MESSAGE_ERROR:
        gst_message_parse_error (msg, &err, &debug_info);
        g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
        g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
        g_clear_error (&err);
        g_free (debug_info);
        break;
      case GST_MESSAGE_EOS:
        g_print ("End-Of-Stream reached.\n");
        break;
      default:
        /* We should not reach here because we only asked for ERRORs and EOS */
        g_printerr ("Unexpected message received.\n");
        break;
    }
    gst_message_unref (msg);
  }

  /* Free resources */
  gst_object_unref (bus);
  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (pipeline);
  return 0;
}

Reply | Threaded
Open this post in threaded view
|

Re: Read frames from GStreamer pipeline in opencv (cv::Mat)

Tim Müller
On Mon, 2017-05-29 at 12:03 -0700, pchaurasia wrote:

Hi,

I am looking to read frames from my Gstreamer pipeline into opencv data structure. I am using 'new_sample' signal of appsink to read frames. The map derived from the sample, read off of appsink - does not seem to give right size of the frame. 

  if (!gst_buffer_map ((buffer), &map_info, GST_MAP_READ)) {
    gst_buffer_unmap ((buffer), &map_info);
    gst_sample_unref(sample);
    return GST_FLOW_ERROR;
  }

I believe map_info.size --> should give size of the frame. The frame size that I see does not appear correct.

How is the size not correct? What frame size in bytes do you get for what resolution in what format, and what did you expect it to be?

You'll usually want to map video buffers with gst_video_frame_map():


so you also get the stride and such.

As for Cv::Mat, maybe have a look at the GStreamer OpenCV plugin, there are a bunch of elements that use Cv::Mat as well:


Cheers
-Tim

-- 
Tim Müller, Centricular Ltd - http://www.centricular.com

_______________________________________________
gstreamer-devel mailing list
[hidden email]
https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel
Reply | Threaded
Open this post in threaded view
|

Re: Read frames from GStreamer pipeline in opencv (cv::Mat)

pchaurasia
Hi Tim,
Thanks for your response and help.

I expect map_info.size to give me frame size as per -

 filtercaps = gst_caps_from_string("video/x-raw(memory:NVMM), width=(int)1920, height=(int)1080, format=(string)I420, framerate=(fraction)30/1 ")

Thus, I am expecting size to be 1920*1080*1.5 (420 format). However, the value of map_info.size, that I am printing/reading is about 776 bytes. Sorry, I should have provided this information along with my problem statement. I will try the other suggestions and update.

Thanks,

Reply | Threaded
Open this post in threaded view
|

Re: Read frames from GStreamer pipeline in opencv (cv::Mat)

Tim Müller
On Tue, 2017-05-30 at 10:08 -0700, pchaurasia wrote:

Hi,

> I expect map_info.size to give me frame size as per - 
>
>  filtercaps = gst_caps_from_string("video/x-raw(memory:NVMM),
> width=(int)1920, height=(int)1080, format=(string)I420,
> framerate=(fraction)30/1 ")
>
> Thus, I am expecting size to be 1920*1080*1.5 (420 format). However,
> the value of map_info.size, that I am printing/reading is about 776
> bytes.

Ah, that's a bit odd, looks like a bug or oversight in the way nvidia
have implemented this.

Have you tried using an 'nvvidconv' element after your source, I
believe that should convert it?

Cheers
-Tim

--
Tim Müller, Centricular Ltd - http://www.centricular.com
_______________________________________________
gstreamer-devel mailing list
[hidden email]
https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel
Reply | Threaded
Open this post in threaded view
|

Re: Read frames from GStreamer pipeline in opencv (cv::Mat)

pchaurasia
Hi Tim,

I tried nvvidconv. Although without success (i.e. the map_info.size is 776 bytes instead of 1920*1080*1.5).

Thanks,

Following is my code.

#include "opencv2/objdetect/objdetect.hpp"
#include "opencv2/highgui/highgui.hpp"
#include "opencv2/imgproc/imgproc.hpp"
#include "opencv/cv.h"
#include <gst/gst.h>
#include <gst/app/gstappsink.h>
#include <gst/gstelement.h>
#include <gst/video/video.h>
#include <stdio.h>
#include <stdlib.h>

#include <unistd.h>
#include <pthread.h>


using namespace cv;
GstSample* buffer;        
cv::Mat frame;
GstVideoInfo vinfo;
int sampleno = 0;

GstFlowReturn CaptureGstBuffer(GstAppSink *sink, gpointer user_data)
{            

  //prog_data* pd = (prog_data*)user_data;


  GstSample* sample = gst_app_sink_pull_sample(sink);

  if(sample == NULL) {
    return GST_FLOW_ERROR;
  }

  GstBuffer* buffer = gst_sample_get_buffer(sample);
  GstMapInfo map_info;


  if (!gst_buffer_map ((buffer), &map_info, GST_MAP_READ)) {
    gst_buffer_unmap ((buffer), &map_info);
    gst_sample_unref(sample);
    return GST_FLOW_ERROR;
  }

  //render using map_info.data
//  frame = Mat::zeros(1080, 1920, CV_8UC3);
 // frame = cv::Mat(1080, 1920, CV_8UC3, (char *)map_info.data, cv::Mat::AUTO_STEP);
  //memcpy(frame.data,map_info.data,map_info.size);

    //Mat grey;
    //cvtColor(frame, grey, CV_BGR2GRAY);
 

//if (!frame.empty())
//  imshow("test-gstreamer-video",grey);
//  waitKey(1);
  GstVideoFrame vframe;
  if (gst_video_frame_map (&vframe, &vinfo, buffer, GST_MAP_READ)) {
     fprintf(stderr,"I am able to map vframe\n");
     gst_video_frame_unmap (&vframe);
  }
 
  fprintf(stderr,"Got sample no  %d  %d\n",sampleno++,(int)map_info.size);

  gst_buffer_unmap ((buffer), &map_info);
  //gst_memory_unmap(memory, &map_info);
  //gst_memory_unref(memory);
  gst_sample_unref(sample);

  return GST_FLOW_OK;
}
 

int main(int argc, char *argv[]) {
  GstElement *pipeline, *source, *caps, *convert, *sink, *capssrc;
  GstBus *bus;
  GstCaps *filtercaps, *srcfiltercaps;
  GstElement *tee, *vq1;
  GstMessage *msg;
  GstStateChangeReturn ret;
  GstPad      *srcpad,*sinkpad;


  /* Initialize GStreamer */
  gst_init (&argc, &argv);

  /* Create the elements */
  source        = gst_element_factory_make ("nvcamerasrc", "source");
  sink          = gst_element_factory_make ("appsink", "sink");
  convert       = gst_element_factory_make ("nvvidconv","videoconvert");
 
  /* Create the empty pipeline */
  pipeline = gst_pipeline_new ("test-pipeline");

  if (!pipeline || !source || !sink || !convert ) {
    g_printerr ("Not all elements could be created.\n");
    return -1;
  }


  caps    = gst_element_factory_make ("capsfilter", "filter");
  capssrc = gst_element_factory_make ("capsfilter", "filter1");
  g_assert (caps != NULL); /* should always exist */
  g_assert (capssrc != NULL); /* should always exist */


  srcfiltercaps = gst_caps_from_string("video/x-raw, width=(int)1920, height=(int)1080, format=(string)UYVY, framerate=(fraction)30/1 ");
  filtercaps    = gst_caps_from_string("video/x-raw(memory:NVMM), width=(int)1920, height=(int)1080, format=(string)I420, framerate=(fraction)30/1 ");
  g_object_set (G_OBJECT (capssrc), "caps-src", srcfiltercaps, NULL);
  g_object_set (G_OBJECT (caps), "caps", filtercaps, NULL);
  gst_app_sink_set_caps(GST_APP_SINK(sink),filtercaps);

  gst_video_info_init(&vinfo);
  if (!gst_video_info_from_caps(&vinfo,filtercaps)){
    g_printerr ("Unable to find video info from caps\n");
    return -1;
  }
  gst_caps_unref (filtercaps);
  gst_caps_unref (srcfiltercaps);


  /* Modify the source's properties */
  //g_object_set (source, "pattern", 0, NULL);
  g_object_set (sink, "drop" , TRUE, NULL);
  g_object_set (sink, "new_sample" , FALSE, NULL);
  g_object_set (sink, "max-buffers" , 1, NULL);

   GstAppSinkCallbacks* appsink_callbacks = (GstAppSinkCallbacks*)malloc(sizeof(GstAppSinkCallbacks));
   appsink_callbacks->eos = NULL;
   appsink_callbacks->new_preroll = NULL;
   appsink_callbacks->new_sample = CaptureGstBuffer;
   gst_app_sink_set_callbacks(GST_APP_SINK(sink), appsink_callbacks,   (gpointer)NULL, free);

  gst_app_sink_set_emit_signals((GstAppSink*)sink,false);



  /* Build the pipeline */
  gst_bin_add_many (GST_BIN (pipeline), source, capssrc, convert, caps, sink, NULL);
  if (gst_element_link_many (source,convert,caps,sink, NULL) != TRUE) {
    g_printerr ("Elements could not be linked1.\n");
    gst_object_unref (pipeline);
    return -1;
  }


#if 1

  /* Start playing */
  ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
  if (ret == GST_STATE_CHANGE_FAILURE) {
    g_printerr ("Unable to set the pipeline to the playing state.\n");
    gst_object_unref (pipeline);
    return -1;
  }

  /* Wait until error or EOS */
  bus = gst_element_get_bus (pipeline);
  msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_ERROR | GST_MESSAGE_EOS));

  /* Parse message */
  if (msg != NULL) {
    GError *err;
    gchar *debug_info;

    switch (GST_MESSAGE_TYPE (msg)) {
      case GST_MESSAGE_ERROR:
        gst_message_parse_error (msg, &err, &debug_info);
        g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
        g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
        g_clear_error (&err);
        g_free (debug_info);
        break;
      case GST_MESSAGE_EOS:
        g_print ("End-Of-Stream reached.\n");
        break;
      default:
        /* We should not reach here because we only asked for ERRORs and EOS */
        g_printerr ("Unexpected message received.\n");
        break;
    }
    gst_message_unref (msg);
  }

  /* Free resources */
  gst_object_unref (bus);
  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (pipeline);
#endif
  return 0;
}
Reply | Threaded
Open this post in threaded view
|

Re: Read frames from GStreamer pipeline in opencv (cv::Mat)

Tim Müller
On Wed, 2017-05-31 at 12:58 -0700, pchaurasia wrote:

Hi,

> I tried nvvidconv. Although without success (i.e. the map_info.size
> is 776 bytes instead of 1920*1080*1.5). 
> ...
>   filtercaps    = gst_caps_from_string("video/x-raw(memory:NVMM),
> width=(int)1920, height=(int)1080, format=(string)I420,
> framerate=(fraction)30/1 ");
> ..
>   gst_app_sink_set_caps(GST_APP_SINK(sink),filtercaps);

I think you want caps without the 'memory:NVMM' here (at the sink /
after nvvidconv), to force nvvidconv to convert to system memory?

Cheers
 -Tim

--
Tim Müller, Centricular Ltd - http://www.centricular.com
_______________________________________________
gstreamer-devel mailing list
[hidden email]
https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel
Reply | Threaded
Open this post in threaded view
|

Re: Read frames from GStreamer pipeline in opencv (cv::Mat)

pchaurasia
Hi Tim

I tried -

1. Removing NVMM
2. Trying it out with videotestsrc (upon suggestion from Martin).

However with both #1, and #2 the problem still persists. I feel there is something fundamentally wrong, either  in  my code below or in one of nvidia plugins.

Thanks


#include "opencv2/objdetect/objdetect.hpp"
#include "opencv2/highgui/highgui.hpp"
#include "opencv2/imgproc/imgproc.hpp"
#include "opencv/cv.h"
#include <gst/gst.h>
#include <gst/app/gstappsink.h>
#include <gst/gstelement.h>
#include <gst/video/video.h>
#include <stdio.h>
#include <stdlib.h>

#include <unistd.h>
#include <pthread.h>


using namespace cv;
GstSample* buffer;        
cv::Mat frame;
GstVideoInfo vinfo;
int sampleno = 0;

GstFlowReturn CaptureGstBuffer(GstAppSink *sink, gpointer user_data)
{            

  //prog_data* pd = (prog_data*)user_data;


  GstSample* sample = gst_app_sink_pull_sample(sink);

  if(sample == NULL) {
    return GST_FLOW_ERROR;
  }

  GstBuffer* buffer = gst_sample_get_buffer(sample);
  GstMapInfo map_info;


  if (!gst_buffer_map ((buffer), &map_info, GST_MAP_READ)) {
    gst_buffer_unmap ((buffer), &map_info);
    gst_sample_unref(sample);
    return GST_FLOW_ERROR;
  }

  //render using map_info.data
//  frame = Mat::zeros(1080, 1920, CV_8UC3);
 // frame = cv::Mat(1080, 1920, CV_8UC3, (char *)map_info.data, cv::Mat::AUTO_STEP);
  //memcpy(frame.data,map_info.data,map_info.size);

    //Mat grey;
    //cvtColor(frame, grey, CV_BGR2GRAY);
 

//if (!frame.empty())
//  imshow("test-gstreamer-video",grey);
//  waitKey(1);
//  GstVideoFrame vframe;
//  if (gst_video_frame_map (&vframe, &vinfo, buffer, GST_MAP_READ)) {
//     fprintf(stderr,"I am able to map vframe\n");
//     gst_video_frame_unmap (&vframe);
//  }
 
  fprintf(stderr,"Got sample no  %d  %d\n",sampleno++,(int)map_info.size);

  gst_buffer_unmap ((buffer), &map_info);
  //gst_memory_unmap(memory, &map_info);
  //gst_memory_unref(memory);
  gst_sample_unref(sample);

  return GST_FLOW_OK;
}
 

int main(int argc, char *argv[]) {
  GstElement *pipeline, *source, *convert, *sink, *capssrc;
  GstBus *bus;
  GstCaps *filtercaps, *srcfiltercaps;
  GstElement *tee, *vq1;
  GstMessage *msg;
  GstStateChangeReturn ret;
  GstPad      *srcpad,*sinkpad;


  /* Initialize GStreamer */
  gst_init (&argc, &argv);

  /* Create the elements */
  source        = gst_element_factory_make ("videostestsrc", "source");
  sink          = gst_element_factory_make ("ximagesink", "sink");
  convert       = gst_element_factory_make ("nvvidconv","videoconvert");
 
  /* Create the empty pipeline */
  pipeline = gst_pipeline_new ("test-pipeline");

  if (!pipeline || !source || !sink || !convert ) {
    g_printerr ("Not all elements could be created.\n");
    return -1;
  }



  capssrc = gst_element_factory_make ("capsfilter", "filter1");
  g_assert (capssrc != NULL); /* should always exist */


  srcfiltercaps = gst_caps_from_string("video/x-raw, width=(int)1920, height=(int)1080, format=(string)I420");
  g_object_set (G_OBJECT (capssrc), "caps-src", srcfiltercaps, NULL);


  //gst_video_info_init(&vinfo);
  //if (!gst_video_info_from_caps(&vinfo,filtercaps)){
  //  g_printerr ("Unable to find video info from caps\n");
  //  return -1;
  //}
  //gst_caps_unref (filtercaps);
  gst_caps_unref (srcfiltercaps);


  /* Modify the source's properties */
  //g_object_set (source, "pattern", 0, NULL);
  g_object_set (sink, "drop" , TRUE, NULL);
  g_object_set (sink, "new_sample" , FALSE, NULL);
  g_object_set (sink, "max-buffers" , 1, NULL);

   GstAppSinkCallbacks* appsink_callbacks = (GstAppSinkCallbacks*)malloc(sizeof(GstAppSinkCallbacks));
   appsink_callbacks->eos = NULL;
   appsink_callbacks->new_preroll = NULL;
   appsink_callbacks->new_sample = CaptureGstBuffer;
   gst_app_sink_set_callbacks(GST_APP_SINK(sink), appsink_callbacks,   (gpointer)NULL, free);

  gst_app_sink_set_emit_signals((GstAppSink*)sink,false);



  /* Build the pipeline */
  gst_bin_add_many (GST_BIN (pipeline), source, capssrc, convert, sink, NULL);
  if (gst_element_link_many (source,capssrc,sink, NULL) != TRUE) {
    g_printerr ("Elements could not be linked1.\n");
    gst_object_unref (pipeline);
    return -1;
  }


#if 1

  /* Start playing */
  ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
  if (ret == GST_STATE_CHANGE_FAILURE) {
    g_printerr ("Unable to set the pipeline to the playing state.\n");
    gst_object_unref (pipeline);
    return -1;
  }

  /* Wait until error or EOS */
  bus = gst_element_get_bus (pipeline);
  msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_ERROR | GST_MESSAGE_EOS));

  /* Parse message */
  if (msg != NULL) {
    GError *err;
    gchar *debug_info;

    switch (GST_MESSAGE_TYPE (msg)) {
      case GST_MESSAGE_ERROR:
        gst_message_parse_error (msg, &err, &debug_info);
        g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
        g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
        g_clear_error (&err);
        g_free (debug_info);
        break;
      case GST_MESSAGE_EOS:
        g_print ("End-Of-Stream reached.\n");
        break;
      default:
        /* We should not reach here because we only asked for ERRORs and EOS */
        g_printerr ("Unexpected message received.\n");
        break;
    }
    gst_message_unref (msg);
  }

  /* Free resources */
  gst_object_unref (bus);
  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (pipeline);
#endif
  return 0;
}
Reply | Threaded
Open this post in threaded view
|

Re: Read frames from GStreamer pipeline in opencv (cv::Mat)

pchaurasia
Hi Tim

I tried -

1. Removing NVMM
2. Trying it out with videotestsrc (upon suggestion from Martin).

However with both #1, and #2 the problem still persists. I feel there is something fundamentally wrong, either  in  my code below or in one of nvidia plugins.

Thanks


#include "opencv2/objdetect/objdetect.hpp"
#include "opencv2/highgui/highgui.hpp"
#include "opencv2/imgproc/imgproc.hpp"
#include "opencv/cv.h"
#include <gst/gst.h>
#include <gst/app/gstappsink.h>
#include <gst/gstelement.h>
#include <gst/video/video.h>
#include <stdio.h>
#include <stdlib.h>

#include <unistd.h>
#include <pthread.h>


using namespace cv;
GstSample* buffer;        
cv::Mat frame;
GstVideoInfo vinfo;
int sampleno = 0;

GstFlowReturn CaptureGstBuffer(GstAppSink *sink, gpointer user_data)
{            

  //prog_data* pd = (prog_data*)user_data;


  GstSample* sample = gst_app_sink_pull_sample(sink);

  if(sample == NULL) {
    return GST_FLOW_ERROR;
  }

  GstBuffer* buffer = gst_sample_get_buffer(sample);
  GstMapInfo map_info;


  if (!gst_buffer_map ((buffer), &map_info, GST_MAP_READ)) {
    gst_buffer_unmap ((buffer), &map_info);
    gst_sample_unref(sample);
    return GST_FLOW_ERROR;
  }

  //render using map_info.data
//  frame = Mat::zeros(1080, 1920, CV_8UC3);
 // frame = cv::Mat(1080, 1920, CV_8UC3, (char *)map_info.data, cv::Mat::AUTO_STEP);
  //memcpy(frame.data,map_info.data,map_info.size);

    //Mat grey;
    //cvtColor(frame, grey, CV_BGR2GRAY);
 

//if (!frame.empty())
//  imshow("test-gstreamer-video",grey);
//  waitKey(1);
//  GstVideoFrame vframe;
//  if (gst_video_frame_map (&vframe, &vinfo, buffer, GST_MAP_READ)) {
//     fprintf(stderr,"I am able to map vframe\n");
//     gst_video_frame_unmap (&vframe);
//  }
 
  fprintf(stderr,"Got sample no  %d  %d\n",sampleno++,(int)map_info.size);

  gst_buffer_unmap ((buffer), &map_info);
  //gst_memory_unmap(memory, &map_info);
  //gst_memory_unref(memory);
  gst_sample_unref(sample);

  return GST_FLOW_OK;
}
 

int main(int argc, char *argv[]) {
  GstElement *pipeline, *source, *convert, *sink, *capssrc;
  GstBus *bus;
  GstCaps *filtercaps, *srcfiltercaps;
  GstElement *tee, *vq1;
  GstMessage *msg;
  GstStateChangeReturn ret;
  GstPad      *srcpad,*sinkpad;


  /* Initialize GStreamer */
  gst_init (&argc, &argv);

  /* Create the elements */
  source        = gst_element_factory_make ("videostestsrc", "source");
  sink          = gst_element_factory_make ("ximagesink", "sink");
  convert       = gst_element_factory_make ("nvvidconv","videoconvert");
 
  /* Create the empty pipeline */
  pipeline = gst_pipeline_new ("test-pipeline");

  if (!pipeline || !source || !sink || !convert ) {
    g_printerr ("Not all elements could be created.\n");
    return -1;
  }



  capssrc = gst_element_factory_make ("capsfilter", "filter1");
  g_assert (capssrc != NULL); /* should always exist */


  srcfiltercaps = gst_caps_from_string("video/x-raw, width=(int)1920, height=(int)1080, format=(string)I420");
  g_object_set (G_OBJECT (capssrc), "caps-src", srcfiltercaps, NULL);


  //gst_video_info_init(&vinfo);
  //if (!gst_video_info_from_caps(&vinfo,filtercaps)){
  //  g_printerr ("Unable to find video info from caps\n");
  //  return -1;
  //}
  //gst_caps_unref (filtercaps);
  gst_caps_unref (srcfiltercaps);


  /* Modify the source's properties */
  //g_object_set (source, "pattern", 0, NULL);
  g_object_set (sink, "drop" , TRUE, NULL);
  g_object_set (sink, "new_sample" , FALSE, NULL);
  g_object_set (sink, "max-buffers" , 1, NULL);

   GstAppSinkCallbacks* appsink_callbacks = (GstAppSinkCallbacks*)malloc(sizeof(GstAppSinkCallbacks));
   appsink_callbacks->eos = NULL;
   appsink_callbacks->new_preroll = NULL;
   appsink_callbacks->new_sample = CaptureGstBuffer;
   gst_app_sink_set_callbacks(GST_APP_SINK(sink), appsink_callbacks,   (gpointer)NULL, free);

  gst_app_sink_set_emit_signals((GstAppSink*)sink,false);



  /* Build the pipeline */
  gst_bin_add_many (GST_BIN (pipeline), source, capssrc, convert, sink, NULL);
  if (gst_element_link_many (source,capssrc,sink, NULL) != TRUE) {
    g_printerr ("Elements could not be linked1.\n");
    gst_object_unref (pipeline);
    return -1;
  }


#if 1

  /* Start playing */
  ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
  if (ret == GST_STATE_CHANGE_FAILURE) {
    g_printerr ("Unable to set the pipeline to the playing state.\n");
    gst_object_unref (pipeline);
    return -1;
  }

  /* Wait until error or EOS */
  bus = gst_element_get_bus (pipeline);
  msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_ERROR | GST_MESSAGE_EOS));

  /* Parse message */
  if (msg != NULL) {
    GError *err;
    gchar *debug_info;

    switch (GST_MESSAGE_TYPE (msg)) {
      case GST_MESSAGE_ERROR:
        gst_message_parse_error (msg, &err, &debug_info);
        g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
        g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
        g_clear_error (&err);
        g_free (debug_info);
        break;
      case GST_MESSAGE_EOS:
        g_print ("End-Of-Stream reached.\n");
        break;
      default:
        /* We should not reach here because we only asked for ERRORs and EOS */
        g_printerr ("Unexpected message received.\n");
        break;
    }
    gst_message_unref (msg);
  }

  /* Free resources */
  gst_object_unref (bus);
  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (pipeline);
#endif
  return 0;
}