NVMM memory

Hi Folks,

Where can we know more about NVMM memory, in terms of its architectural location on Tegra SoC or external DDR ? Would like to understand where buffer are created when we do - Gstreamer CAPS like -

filtercaps = gst_caps_from_string("video/x-raw(memory:NVMM), width=(int)1920, height=(int)1080, format=(string)I420, framerate=(fraction)30/1 ");

I need to transfer contents of Gstreamer - GstBuffer to opencv CV::Mat. While doing so I run into problems/issues related to NVMM (I think).

Here is the scoop.

My GStreamer pipeline looks like -

nvcamerasrc --> caps --> tee --> queue --> appsink
|
--> encoder_q --> encoder --> parser --> mux --> fsink

When I read GStreamer buffer from appsink and convert it to cv::Mat - I get segmentation fault while accessing cv::Mat. However If I use following pipeline -

code]nvcamerasrc → caps → tee → queue → nvoverlaysink
|
→ encoder_q → encoder → parser → mux → fsink[/code]

Then I do not get segmentation fault while accessing cv::Mat. Could someone please explain ?

Thanks

Hi dumbogeorge,

Similar topic here:

https://devtalk.nvidia.com/default/topic/987537/videocapture-fails-to-open-onboard-camera-l4t-24-2-1-opencv-3-1/

Hope this could help.

Hi Wayne,
I do not get enough insights from the thread you quoted, about NVMM, and the differences that I observed in my pipline with nvoverlaysink and appsink.
Could you please help ?
Thanks

Hi dumbogeorge,

appsink is an interface sink for openCV pipeline to get the frame from gst ----> nvcamerasrc.

How do you get cv::Mat in the below pipeline?

Hi Wayne,

I am attaching code below to clarify how frames are loaded into cv::Mat and later used for processing.

  1. I am draining data off of “sink” using a callback ( gst_app_sink_pull_sample ).
  2. I get segfault with my code when -
sink          = gst_element_factory_make ("appsink", "sink");

and there is no segfault when

sink          = gst_element_factory_make ("nvoverlaysink", "sink");

The segfault happens at imshow(), call.

Thanks,

#include "opencv2/objdetect/objdetect.hpp"
#include "opencv2/highgui/highgui.hpp"
#include "opencv2/imgproc/imgproc.hpp"
#include "opencv/cv.h"
#include <gst/gst.h>
#include <gst/app/gstappsink.h>
#include <gst/gstelement.h>
#include <stdio.h>
#include <stdlib.h>

#include <unistd.h>
#include <pthread.h>


using namespace cv;
GstSample* buffer;        
cv::Mat frame;

int sampleno = 0;
GstFlowReturn CaptureGstBuffer(GstAppSink *sink, gpointer user_data)
{            

  //prog_data* pd = (prog_data*)user_data;


  GstSample* sample = gst_app_sink_pull_sample(sink);

  if(sample == NULL) {
    return GST_FLOW_ERROR;
  }

  GstBuffer* buffer = gst_sample_get_buffer(sample);

  //GstMemory* memory = gst_buffer_get_all_memory(buffer);
  GstMapInfo map_info;


  if (!gst_buffer_map ((buffer), &map_info, GST_MAP_READ)) {
    gst_buffer_unmap ((buffer), &map_info);
    gst_sample_unref(sample);
    return GST_FLOW_ERROR;
  }

  //render using map_info.data
  frame = cv::Mat(1080, 1920, CV_8UC3, (char *)map_info.data, cv::Mat::AUTO_STEP);
  memcpy(frame.data,map_info.data,map_info.size);

   if (!frame.empty())
     imshow("test-gstreamer-video",frame);
   waitKey(1);
 

  gst_buffer_unmap ((buffer), &map_info);
  //gst_memory_unmap(memory, &map_info);
  //gst_memory_unref(memory);
  gst_sample_unref(sample);

  fprintf(stderr,"Got sample no  %d\n",sampleno++);
  return GST_FLOW_OK;
}
 

int main(int argc, char *argv[]) {
  GstElement *pipeline, *source, *caps, *sink, *fsink;
  GstBus *bus;
  GstCaps *filtercaps;
  GstElement *tee, *encoder_q,*encoder_qmux, *vq1, *vq2;
  GstElement *encoder;
  GstElement *parser;
  GstElement *mux;
  GstMessage *msg;
  GstBin     *recorder;
  GstStateChangeReturn ret;
  GstPad      *srcpad,*sinkpad; 

  /* Initialize GStreamer */
  gst_init (&argc, &argv);

  /* Create the elements */
  source        = gst_element_factory_make ("nvcamerasrc", "source");
  sink          = gst_element_factory_make ("nvoverlaysink", "sink");
  tee           = gst_element_factory_make ("tee", "videotee");
  encoder_q     = gst_element_factory_make ("queue", "encoderq");
  encoder_qmux  = gst_element_factory_make ("queue", "muxq");
  vq1           = gst_element_factory_make ("queue", "q1");
  vq2           = gst_element_factory_make ("queue", "q2");
  encoder       = gst_element_factory_make ("omxh265enc" , "h265encoder");
  parser        = gst_element_factory_make ("h265parse", "parser-h265");
  mux           = gst_element_factory_make ("matroskamux", "muxer");
  fsink         = gst_element_factory_make ("filesink", "destination");  

  recorder = GST_BIN(gst_bin_new("recording-bin"));

  /* Create the empty pipeline */
  pipeline = gst_pipeline_new ("test-pipeline");

  if (!pipeline || !source || !sink || !tee || !encoder_q || !vq1 || !vq2|| !encoder || !parser || !mux || !fsink) {
    g_printerr ("Not all elements could be created.\n");
    return -1;
  }


  caps = gst_element_factory_make ("capsfilter", "filter");
  g_assert (caps != NULL); /* should always exist */


  filtercaps = gst_caps_from_string("video/x-raw(memory:NVMM), width=(int)1920, height=(int)1080, format=(string)I420, framerate=(fraction)30/1 ");
  g_object_set (G_OBJECT (caps), "caps", filtercaps, NULL);
  gst_caps_unref (filtercaps);


  /* Modify the source's properties */
  //g_object_set (source, "pattern", 0, NULL);
  g_object_set (sink, "drop" , TRUE, NULL);
  g_object_set (sink, "emit-signals" , TRUE, NULL);
  g_object_set (sink, "max-buffers" , 1, NULL);
  g_object_set (encoder, "iframeinterval" , 24, "bitrate" , 10000000, NULL);
  g_object_set (mux,     "name" , "mux", NULL);
  g_object_set (fsink,   "location", "/home/ubuntu/cameracapture2.mkv", NULL);
  g_object_set (encoder_qmux,      "name", "queenc", NULL);


  /* connect appsink signals */
  /*if(g_signal_connect(sink, "new-sample", G_CALLBACK(CaptureGstBuffer), NULL) <= 0)
  {
      g_printerr("Could not connect signal handler.\n");
      exit(1);
  }*/


   GstAppSinkCallbacks* appsink_callbacks = (GstAppSinkCallbacks*)malloc(sizeof(GstAppSinkCallbacks));
   appsink_callbacks->eos = NULL;
   appsink_callbacks->new_preroll = NULL;
   appsink_callbacks->new_sample = CaptureGstBuffer;
   gst_app_sink_set_callbacks(GST_APP_SINK(sink), appsink_callbacks,   (gpointer)NULL, free);


  /* Build the pipeline */
  gst_bin_add_many (GST_BIN (pipeline), source, caps, tee, vq1, sink , encoder_q, encoder, parser, mux, fsink, NULL);
  //if (gst_element_link_many (source,caps,tee, vq1, sink, NULL) != TRUE) {
  if (gst_element_link_many (source,caps,tee, NULL) != TRUE) {
    g_printerr ("Elements could not be linked1.\n");
    gst_object_unref (pipeline);
    return -1;
  }

  srcpad                = gst_element_get_request_pad(tee,"src_%u");
  sinkpad               = gst_element_get_static_pad(encoder_q,"sink");
  gst_pad_link(srcpad,sinkpad);
  srcpad                = gst_element_get_request_pad(tee,"src_%u");
  sinkpad               = gst_element_get_static_pad(vq1,"sink");
  gst_pad_link(srcpad,sinkpad);
  gst_element_link(vq1,sink);



  if (gst_element_link_many (encoder_q,encoder, parser, NULL) != TRUE) {
    g_printerr ("Elements could not be linked1.\n");
    gst_object_unref (pipeline);
    return -1;
  }

  if (!gst_element_link_pads(parser, "src", mux, "video_%u")) {
    g_printerr ("Elements could not be linked2.\n");
    gst_object_unref (pipeline);
    return -1;
  }

  if (gst_element_link_many (mux,fsink ,NULL) != TRUE) {
    g_printerr ("Elements could not be linked3.\n");
    gst_object_unref (pipeline);
    return -1;
  }



  /* Start playing */
  ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
  if (ret == GST_STATE_CHANGE_FAILURE) {
    g_printerr ("Unable to set the pipeline to the playing state.\n");
    gst_object_unref (pipeline);
    return -1;
  }

  /* Wait until error or EOS */
  bus = gst_element_get_bus (pipeline);
  msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_ERROR | GST_MESSAGE_EOS));

  /* Parse message */
  if (msg != NULL) {
    GError *err;
    gchar *debug_info;

    switch (GST_MESSAGE_TYPE (msg)) {
      case GST_MESSAGE_ERROR:
        gst_message_parse_error (msg, &err, &debug_info);
        g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
        g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
        g_clear_error (&err);
        g_free (debug_info);
        break;
      case GST_MESSAGE_EOS:
        g_print ("End-Of-Stream reached.\n");
        break;
      default:
        /* We should not reach here because we only asked for ERRORs and EOS */
        g_printerr ("Unexpected message received.\n");
        break;
    }
    gst_message_unref (msg);
  }

  /* Free resources */
  gst_object_unref (bus);
  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (pipeline);
  return 0;
}

dumbogeorge,
For quick check, which openCV version are you using?

Hi WayneWWW,
I am using 3.1.0.
Thanks

Hi Folks,

I am not able to get right frame size - from map_info in my code below. My ‘sink’ is ‘appsink’. I am expecting to read frame out of gst_app_sink_pull_sample. Its size should be 1920*1080, however, it is just about 768 bytes.

Would be great if someone could spot my mistake ?

Thanks,

GstSample* sample = gst_app_sink_pull_sample(sink);

  if(sample == NULL) {
    return GST_FLOW_ERROR;
  }

  GstBuffer* buffer = gst_sample_get_buffer(sample);

  //GstMemory* memory = gst_buffer_get_all_memory(buffer);
  GstMapInfo map_info;


  if (!gst_buffer_map ((buffer), &map_info, GST_MAP_READ)) {
    gst_buffer_unmap ((buffer), &map_info);
    gst_sample_unref(sample);
    return GST_FLOW_ERROR;
  }

Hi Nvidia folks,

Any idea why would my GstMapInfo map_info.size be wrong ? Why would it read 768 bytes when I am programming frame size to be 1080P. I am expecting the size to be 192010801.5 as per -

filtercaps = gst_caps_from_string("video/x-raw(memory:NVMM), width=(int)1920, height=(int)1080, format=(string)I420, framerate=(fraction)30/1 ");

Thanks

Hi,
Please use nvvidconv to get the frames:

Hi DaneLLL

I would like to extract the frame in C/C++ code for further processing. Any idea how to extract frames from xvimagesink, into cv::Mat ?

Thanks,

Hi,
In your case, it should be

Actually, this command line gets stuck and does not show anything.

gst-launch-1.0 nvcamerasrc ! nvvidconv ! 'video/x-raw,format=I420' ! appsink 
Setting pipeline to PAUSED ...

Available Sensor modes : 
3840 x 2160 FR=60.000000 CF=0xf09208a10 SensorModeType=4 CSIPixelBitDepth=10 DynPixelBitDepth=10
1920 x 1080 FR=60.000000 CF=0xf09208a10 SensorModeType=4 CSIPixelBitDepth=10 DynPixelBitDepth=10
1280 x 540 FR=240.000000 CF=0xf09208a10 SensorModeType=4 CSIPixelBitDepth=10 DynPixelBitDepth=10
Pipeline is live and does not need PREROLL ...
Setting pipeline to PLAYING ...
New clock: GstSystemClock

NvCameraSrc: Trying To Set Default Camera Resolution. Selected 640x480 FrameRate = 30.000000 ...

^Chandling interrupt.
Interrupt: Stopping pipeline ...
Execution ended after 0:00:11.576271032
Setting pipeline to PAUSED ...
Setting pipeline to READY ...
Setting pipeline to NULL ...
Freeing pipeline ...

Furthermore I tried this command line in my code below, but my frame size (i.e. value of map_info.size ) is coming out incorrect. It is 776 bytes instead of 19201801.5.

Any idea what could be wrong in my code ? Could it be an issue with one of nvidia plugins ?

#include "opencv2/objdetect/objdetect.hpp"
#include "opencv2/highgui/highgui.hpp"
#include "opencv2/imgproc/imgproc.hpp"
#include "opencv/cv.h"
#include <gst/gst.h>
#include <gst/app/gstappsink.h>
#include <gst/gstelement.h>
#include <gst/video/video.h>
#include <stdio.h>
#include <stdlib.h>

#include <unistd.h>
#include <pthread.h>


using namespace cv;
GstSample* buffer;        
cv::Mat frame;
GstVideoInfo vinfo;
int sampleno = 0;

GstFlowReturn CaptureGstBuffer(GstAppSink *sink, gpointer user_data)
{            

  //prog_data* pd = (prog_data*)user_data;


  GstSample* sample = gst_app_sink_pull_sample(sink);

  if(sample == NULL) {
    return GST_FLOW_ERROR;
  }

  GstBuffer* buffer = gst_sample_get_buffer(sample);
  GstMapInfo map_info;


  if (!gst_buffer_map ((buffer), &map_info, GST_MAP_READ)) {
    gst_buffer_unmap ((buffer), &map_info);
    gst_sample_unref(sample);
    return GST_FLOW_ERROR;
  }

  //render using map_info.data
//  frame = Mat::zeros(1080, 1920, CV_8UC3);
 // frame = cv::Mat(1080, 1920, CV_8UC3, (char *)map_info.data, cv::Mat::AUTO_STEP);
  //memcpy(frame.data,map_info.data,map_info.size);

    //Mat grey;
    //cvtColor(frame, grey, CV_BGR2GRAY);
 

//if (!frame.empty())
//  imshow("test-gstreamer-video",grey);
//  waitKey(1);
  GstVideoFrame vframe;
  if (gst_video_frame_map (&vframe, &vinfo, buffer, GST_MAP_READ)) {
     fprintf(stderr,"I am able to map vframe\n");
     gst_video_frame_unmap (&vframe);
  }
  
  fprintf(stderr,"Got sample no  %d  %d\n",sampleno++,(int)map_info.size);

  gst_buffer_unmap ((buffer), &map_info);
  //gst_memory_unmap(memory, &map_info);
  //gst_memory_unref(memory);
  gst_sample_unref(sample);

  return GST_FLOW_OK;
}
 

int main(int argc, char *argv[]) {
  GstElement *pipeline, *source, *caps, *convert, *sink, *capssrc;
  GstBus *bus;
  GstCaps *filtercaps, *srcfiltercaps;
  GstElement *tee, *vq1;
  GstMessage *msg;
  GstStateChangeReturn ret;
  GstPad      *srcpad,*sinkpad; 


  /* Initialize GStreamer */
  gst_init (&argc, &argv);

  /* Create the elements */
  source        = gst_element_factory_make ("nvcamerasrc", "source");
  sink          = gst_element_factory_make ("appsink", "sink");
  convert       = gst_element_factory_make ("nvvidconv","videoconvert");
 
  /* Create the empty pipeline */
  pipeline = gst_pipeline_new ("test-pipeline");

  if (!pipeline || !source || !sink || !convert ) {
    g_printerr ("Not all elements could be created.\n");
    return -1;
  }


  caps    = gst_element_factory_make ("capsfilter", "filter");
  capssrc = gst_element_factory_make ("capsfilter", "filter1");
  g_assert (caps != NULL); /* should always exist */
  g_assert (capssrc != NULL); /* should always exist */


  srcfiltercaps = gst_caps_from_string("video/x-raw, width=(int)1920, height=(int)1080, format=(string)UYVY, framerate=(fraction)30/1 ");
  filtercaps    = gst_caps_from_string("video/x-raw(memory:NVMM), width=(int)1920, height=(int)1080, format=(string)I420, framerate=(fraction)30/1 ");
  g_object_set (G_OBJECT (capssrc), "caps-src", srcfiltercaps, NULL);
  g_object_set (G_OBJECT (caps), "caps", filtercaps, NULL);
  gst_app_sink_set_caps(GST_APP_SINK(sink),filtercaps);

  gst_video_info_init(&vinfo);
  if (!gst_video_info_from_caps(&vinfo,filtercaps)){
    g_printerr ("Unable to find video info from caps\n");
    return -1;
  }
  gst_caps_unref (filtercaps);
  gst_caps_unref (srcfiltercaps);


  /* Modify the source's properties */
  //g_object_set (source, "pattern", 0, NULL);
  g_object_set (sink, "drop" , TRUE, NULL);
  g_object_set (sink, "new_sample" , FALSE, NULL);
  g_object_set (sink, "max-buffers" , 1, NULL);

   GstAppSinkCallbacks* appsink_callbacks = (GstAppSinkCallbacks*)malloc(sizeof(GstAppSinkCallbacks));
   appsink_callbacks->eos = NULL;
   appsink_callbacks->new_preroll = NULL;
   appsink_callbacks->new_sample = CaptureGstBuffer;
   gst_app_sink_set_callbacks(GST_APP_SINK(sink), appsink_callbacks,   (gpointer)NULL, free);

  gst_app_sink_set_emit_signals((GstAppSink*)sink,false);



  /* Build the pipeline */
  gst_bin_add_many (GST_BIN (pipeline), source, capssrc, convert, caps, sink, NULL);
  if (gst_element_link_many (source,convert,capssrc,sink, NULL) != TRUE) {
    g_printerr ("Elements could not be linked1.\n");
    gst_object_unref (pipeline);
    return -1;
  }


#if 1

  /* Start playing */
  ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
  if (ret == GST_STATE_CHANGE_FAILURE) {
    g_printerr ("Unable to set the pipeline to the playing state.\n");
    gst_object_unref (pipeline);
    return -1;
  }

  /* Wait until error or EOS */
  bus = gst_element_get_bus (pipeline);
  msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_ERROR | GST_MESSAGE_EOS));

  /* Parse message */
  if (msg != NULL) {
    GError *err;
    gchar *debug_info;

    switch (GST_MESSAGE_TYPE (msg)) {
      case GST_MESSAGE_ERROR:
        gst_message_parse_error (msg, &err, &debug_info);
        g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
        g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
        g_clear_error (&err);
        g_free (debug_info);
        break;
      case GST_MESSAGE_EOS:
        g_print ("End-Of-Stream reached.\n");
        break;
      default:
        /* We should not reach here because we only asked for ERRORs and EOS */
        g_printerr ("Unexpected message received.\n");
        break;
    }
    gst_message_unref (msg);
  }

  /* Free resources */
  gst_object_unref (bus);
  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (pipeline);
#endif
  return 0;
}

Hi,
The issue in your code is you have to configure output cap of nvvidconv as “video/x-raw, width=(int)1920, height=(int)1080, format=(string)I420”

Hi DaneLLL

I changed the caps per your suggestion. It does not seem to fix the problem. The observation about map_info.size is same.

Thanks,

THe code is below -

#include "opencv2/objdetect/objdetect.hpp"
#include "opencv2/highgui/highgui.hpp"
#include "opencv2/imgproc/imgproc.hpp"
#include "opencv/cv.h"
#include <gst/gst.h>
#include <gst/app/gstappsink.h>
#include <gst/gstelement.h>
#include <gst/video/video.h>
#include <stdio.h>
#include <stdlib.h>

#include <unistd.h>
#include <pthread.h>


using namespace cv;
GstSample* buffer;        
cv::Mat frame;
GstVideoInfo vinfo;
int sampleno = 0;

GstFlowReturn CaptureGstBuffer(GstAppSink *sink, gpointer user_data)
{            

  //prog_data* pd = (prog_data*)user_data;


  GstSample* sample = gst_app_sink_pull_sample(sink);

  if(sample == NULL) {
    return GST_FLOW_ERROR;
  }

  GstBuffer* buffer = gst_sample_get_buffer(sample);
  GstMapInfo map_info;


  if (!gst_buffer_map ((buffer), &map_info, GST_MAP_READ)) {
    gst_buffer_unmap ((buffer), &map_info);
    gst_sample_unref(sample);
    return GST_FLOW_ERROR;
  }

  //render using map_info.data
//  frame = Mat::zeros(1080, 1920, CV_8UC3);
 // frame = cv::Mat(1080, 1920, CV_8UC3, (char *)map_info.data, cv::Mat::AUTO_STEP);
  //memcpy(frame.data,map_info.data,map_info.size);

    //Mat grey;
    //cvtColor(frame, grey, CV_BGR2GRAY);
 

//if (!frame.empty())
//  imshow("test-gstreamer-video",grey);
//  waitKey(1);
//  GstVideoFrame vframe;
//  if (gst_video_frame_map (&vframe, &vinfo, buffer, GST_MAP_READ)) {
//     fprintf(stderr,"I am able to map vframe\n");
//     gst_video_frame_unmap (&vframe);
//  }
  
  fprintf(stderr,"Got sample no  %d  %d\n",sampleno++,(int)map_info.size);

  gst_buffer_unmap ((buffer), &map_info);
  //gst_memory_unmap(memory, &map_info);
  //gst_memory_unref(memory);
  gst_sample_unref(sample);

  return GST_FLOW_OK;
}
 

int main(int argc, char *argv[]) {
  GstElement *pipeline, *source, *convert, *sink, *capssrc;
  GstBus *bus;
  GstCaps *filtercaps, *srcfiltercaps;
  GstElement *tee, *vq1;
  GstMessage *msg;
  GstStateChangeReturn ret;
  GstPad      *srcpad,*sinkpad; 


  /* Initialize GStreamer */
  gst_init (&argc, &argv);

  /* Create the elements */
  source        = gst_element_factory_make ("nvcamerasrc", "source");
  sink          = gst_element_factory_make ("appsink", "sink");
  convert       = gst_element_factory_make ("nvvidconv","videoconvert");
 
  /* Create the empty pipeline */
  pipeline = gst_pipeline_new ("test-pipeline");

  if (!pipeline || !source || !sink || !convert ) {
    g_printerr ("Not all elements could be created.\n");
    return -1;
  }



  capssrc = gst_element_factory_make ("capsfilter", "filter1");
  g_assert (capssrc != NULL); /* should always exist */


  srcfiltercaps = gst_caps_from_string("video/x-raw, width=(int)1920, height=(int)1080, format=(string)I420");
  g_object_set (G_OBJECT (capssrc), "caps-src", srcfiltercaps, NULL);


  //gst_video_info_init(&vinfo);
  //if (!gst_video_info_from_caps(&vinfo,filtercaps)){
  //  g_printerr ("Unable to find video info from caps\n");
  //  return -1;
  //}
  //gst_caps_unref (filtercaps);
  gst_caps_unref (srcfiltercaps);


  /* Modify the source's properties */
  //g_object_set (source, "pattern", 0, NULL);
  g_object_set (sink, "drop" , TRUE, NULL);
  g_object_set (sink, "new_sample" , FALSE, NULL);
  g_object_set (sink, "max-buffers" , 1, NULL);

   GstAppSinkCallbacks* appsink_callbacks = (GstAppSinkCallbacks*)malloc(sizeof(GstAppSinkCallbacks));
   appsink_callbacks->eos = NULL;
   appsink_callbacks->new_preroll = NULL;
   appsink_callbacks->new_sample = CaptureGstBuffer;
   gst_app_sink_set_callbacks(GST_APP_SINK(sink), appsink_callbacks,   (gpointer)NULL, free);

  gst_app_sink_set_emit_signals((GstAppSink*)sink,false);



  /* Build the pipeline */
  gst_bin_add_many (GST_BIN (pipeline), source, capssrc, convert, sink, NULL);
  if (gst_element_link_many (source,convert,capssrc,sink, NULL) != TRUE) {
    g_printerr ("Elements could not be linked1.\n");
    gst_object_unref (pipeline);
    return -1;
  }


#if 1

  /* Start playing */
  ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
  if (ret == GST_STATE_CHANGE_FAILURE) {
    g_printerr ("Unable to set the pipeline to the playing state.\n");
    gst_object_unref (pipeline);
    return -1;
  }

  /* Wait until error or EOS */
  bus = gst_element_get_bus (pipeline);
  msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_ERROR | GST_MESSAGE_EOS));

  /* Parse message */
  if (msg != NULL) {
    GError *err;
    gchar *debug_info;

    switch (GST_MESSAGE_TYPE (msg)) {
      case GST_MESSAGE_ERROR:
        gst_message_parse_error (msg, &err, &debug_info);
        g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
        g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
        g_clear_error (&err);
        g_free (debug_info);
        break;
      case GST_MESSAGE_EOS:
        g_print ("End-Of-Stream reached.\n");
        break;
      default:
        /* We should not reach here because we only asked for ERRORs and EOS */
        g_printerr ("Unexpected message received.\n");
        break;
    }
    gst_message_unref (msg);
  }

  /* Free resources */
  gst_object_unref (bus);
  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (pipeline);
#endif
  return 0;
}

Furthermore, I was suspecting that nvcamerasrc could be an issue. So I tried “videotestsrc” (instead of nvcamerasrc) and the observation is same.

Are you able to get correct size of BGR if you run the case of reference post?

Sorry, I did not understand - “if you run the case of reference post” ?
With the attached code I am not able to get the correct size.

Please refer to the sample code attached and apply to your case.

ubuntu@tegra-ubuntu:~$ ./test
Using launch string: nvcamerasrc ! video/x-raw(memory:NVMM), width=1920, height=1080, framerate=30/1 ! nvvidconv ! video/x-raw, format=I420, width=1920, height=1080 ! appsink name=mysink

Available Sensor modes :
2592 x 1944 FR=30.000000 CF=0x1009208a10 SensorModeType=4 CSIPixelBitDepth=10 DynPixelBitDepth=10
2592 x 1458 FR=30.000000 CF=0x1009208a10 SensorModeType=4 CSIPixelBitDepth=10 DynPixelBitDepth=10
1280 x 720 FR=120.000000 CF=0x1009208a10 SensorModeType=4 CSIPixelBitDepth=10 DynPixelBitDepth=10

NvCameraSrc: Trying To Set Default Camera Resolution. Selected 1920x1080 FrameRate = 30.000000 ...

map.size = 3110400
map.size = 3110400
map.size = 3110400
map.size = 3110400
map.size = 3110400
(......skip)

REAME2.txt

# Install 
sudo apt-get install libegl1-mesa-dev
sudo apt-get install libgstreamer1.0-dev
sudo apt-get install libgstreamer-plugins-base1.0-dev

# compile with below command
g++ -Wall -std=c++11  test2.cpp -o test $(pkg-config --cflags --libs gstreamer-app-1.0) -ldl

test2.cpp

#include <cstdlib>
#include <gst/gst.h>
#include <gst/gstinfo.h>
#include <gst/app/gstappsink.h>
#include <glib-unix.h>
#include <dlfcn.h>

#include <iostream>
#include <sstream>
#include <thread>

using namespace std;

#define USE(x) ((void)(x))

static GstPipeline *gst_pipeline = nullptr;
static string launch_string;   

static void appsink_eos(GstAppSink * appsink, gpointer user_data)
{
    printf("app sink receive eos\n");
//    g_main_loop_quit (hpipe->loop);
}

static GstFlowReturn new_buffer(GstAppSink *appsink, gpointer user_data)
{
    GstSample *sample = NULL;

    g_signal_emit_by_name (appsink, "pull-sample", &sample,NULL);

    if (sample)
    {
        GstBuffer *buffer = NULL;
        GstCaps   *caps   = NULL;
        GstMapInfo map    = {0};

        caps = gst_sample_get_caps (sample);
        if (!caps)
        {
            printf("could not get snapshot format\n");
        }
        gst_caps_get_structure (caps, 0);
        buffer = gst_sample_get_buffer (sample);
        gst_buffer_map (buffer, &map, GST_MAP_READ);

        printf("map.size = %lu\n", map.size);

        gst_buffer_unmap(buffer, &map);

        gst_sample_unref (sample);
    }
    else
    {
        g_print ("could not make snapshot\n");
    }

    return GST_FLOW_OK;
}

int main(int argc, char** argv) {
    USE(argc);
    USE(argv);

    gst_init (&argc, &argv);

    GMainLoop *main_loop;
    main_loop = g_main_loop_new (NULL, FALSE);
    ostringstream launch_stream;
    int w = 1920;
    int h = 1080;
    GstAppSinkCallbacks callbacks = {appsink_eos, NULL, new_buffer};

    launch_stream
    << "nvcamerasrc ! "
    << "video/x-raw(memory:NVMM), width="<< w <<", height="<< h <<", framerate=30/1 ! " 
    << "nvvidconv ! "
    << "video/x-raw, format=I420, width="<< w <<", height="<< h <<" ! "
    << "appsink name=mysink ";

    launch_string = launch_stream.str();

    g_print("Using launch string: %s\n", launch_string.c_str());

    GError *error = nullptr;
    gst_pipeline  = (GstPipeline*) gst_parse_launch(launch_string.c_str(), &error);

    if (gst_pipeline == nullptr) {
        g_print( "Failed to parse launch: %s\n", error->message);
        return -1;
    }
    if(error) g_error_free(error);

    GstElement *appsink_ = gst_bin_get_by_name(GST_BIN(gst_pipeline), "mysink");
    gst_app_sink_set_callbacks (GST_APP_SINK(appsink_), &callbacks, NULL, NULL);

    gst_element_set_state((GstElement*)gst_pipeline, GST_STATE_PLAYING); 

    sleep(10);
    //g_main_loop_run (main_loop);

    gst_element_set_state((GstElement*)gst_pipeline, GST_STATE_NULL);
    gst_object_unref(GST_OBJECT(gst_pipeline));
    g_main_loop_unref(main_loop);

    g_print("going to exit \n");
    return 0;
}