CLOSED. Gst encoding pipeline with frame processing using CUDA and libargus

Hi Nvidia,

I have tried your excellent samples with syncSensor, gstEncode and cudaHistogram from Tegra MM api separately. It works perfect.

But i am intresting this pipeline, using GPU memory:

CameraFrame1 →
================ → ProcessFrames → GstEncode
CameraFrame2 → /

It is possible(excluding v4l and gst data callback, only with libargus streams/consumers redirecting)?

Best regards, Viktor.

Hi,
Please refer to 10_camera_recording sample and APIs in nvbuf_utils.h

Yes, it records h264.
But i need Gst for encoding, cause i need alsa microphone source.

  1. Can i redirect streams from 2 cameras to another stream for frames processing and then push processed frames as final frame to gststream as nveglstreamsrc with alsasrc, using libargus?

Also i saw libnvsample_cudaprocess.so for gst frame processing, but it is for single nvcamera source.
2. How to add second camera to this pipeline?

Also i found videomixer gstelement.
3. Will it work?
gst-launch -e videomixer name=mix ! nvtee ! alsasrc ! queue ! m. ! nvivafilter cuda-process=true pre-process=true post-process=true customer-lib-name=“libnvsample_cudaprocess.so” ! ‘video/x-raw(memory:NVMM), format=(string)I420’ ! qtmux name = m ! filesink location=out.mp4
nvgstcamerasrc sensor-id=0 ! video/x-raw, framerate=5/1, width=640, height=480 ! videobox border-alpha=0 top=0 left=330 ! mix.
nvgstcamerasrc sensor-id=1 ! video/x-raw, framerate=5/1, width=640, height=480 ! mix.

please refer to the patch:

diff --git a/multimedia_api/ll_samples/samples/10_camera_recording/Makefile b/multimedia_api/ll_samples/samples/10_camera_recording/Makefile
index ac0a494..ca019bb 100644
--- a/multimedia_api/ll_samples/samples/10_camera_recording/Makefile
+++ b/multimedia_api/ll_samples/samples/10_camera_recording/Makefile
@@ -41,14 +41,30 @@ SRCS := \
 
 OBJS := $(SRCS:.cpp=.o)
 
+OBJS += \
+    $(ALGO_CUDA_DIR)/NvAnalysis.o \
+    $(ALGO_CUDA_DIR)/NvCudaProc.o
+
+CPPFLAGS += \
+    -I"$(ALGO_CUDA_DIR)"
+
 CPPFLAGS += \
-	-I"$(ARGUS_UTILS_DIR)"
+	-I"$(ARGUS_UTILS_DIR)" \
+    `pkg-config --cflags gstreamer-app-1.0`
 
 LDFLAGS += \
-	-lnveglstream_camconsumer -largus
+	-lnveglstream_camconsumer -largus \
+    `pkg-config --libs gstreamer-app-1.0`
+
+LDFLAGS += \
+       -L"$(CUDA_PATH)/targets/aarch64-linux/lib" \
+       -lcuda -lcudart
 
 all: $(APP)
 
+$(ALGO_CUDA_DIR)/%.o: $(ALGO_CUDA_DIR)/%.cu
+	$(MAKE) -C $(ALGO_CUDA_DIR)
+
 $(CLASS_DIR)/%.o: $(CLASS_DIR)/%.cpp
 	$(MAKE) -C $(CLASS_DIR)
 
diff --git a/multimedia_api/ll_samples/samples/10_camera_recording/main.cpp b/multimedia_api/ll_samples/samples/10_camera_recording/main.cpp
index 369bbce..42d01fc 100644
--- a/multimedia_api/ll_samples/samples/10_camera_recording/main.cpp
+++ b/multimedia_api/ll_samples/samples/10_camera_recording/main.cpp
@@ -42,6 +42,9 @@
 #include <iostream>
 #include <fstream>
 
+#include <gst/app/gstappsrc.h>
+#include "NvCudaProc.h"
+
 using namespace Argus;
 using namespace EGLStream;
 
@@ -81,7 +84,7 @@ namespace ArgusSamples
 class ConsumerThread : public Thread
 {
 public:
-    explicit ConsumerThread(OutputStream* stream);
+    explicit ConsumerThread(OutputStream* stream, GstElement *appsrc_);
     ~ConsumerThread();
 
     bool isInError()
@@ -111,14 +114,22 @@ private:
     NvVideoEncoder *m_VideoEncoder;
     std::ofstream *m_outputFile;
     bool m_gotError;
+
+    GstElement *m_appsrc_;
+    GstClockTime timestamp;
+    EGLDisplay egl_display;
 };
 
-ConsumerThread::ConsumerThread(OutputStream* stream) :
+ConsumerThread::ConsumerThread(OutputStream* stream, GstElement *appsrc_) :
         m_stream(stream),
         m_VideoEncoder(NULL),
         m_outputFile(NULL),
-        m_gotError(false)
+        m_gotError(false),
+        m_appsrc_(appsrc_),
+        timestamp(0)
 {
+    egl_display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
+    eglInitialize(egl_display, NULL, NULL);
 }
 
 ConsumerThread::~ConsumerThread()
@@ -132,6 +143,8 @@ ConsumerThread::~ConsumerThread()
 
     if (m_outputFile)
         delete m_outputFile;
+
+    eglTerminate(egl_display);
 }
 
 bool ConsumerThread::threadInitialize()
@@ -253,10 +266,20 @@ bool ConsumerThread::threadExecute()
             ORIGINATE_ERROR("IImageNativeBuffer not supported by Image.");
         fd = iNativeBuffer->createNvBuffer(STREAM_SIZE,
                                            NvBufferColorFormat_YUV420,
-                                           NvBufferLayout_BlockLinear);
+                                           NvBufferLayout_Pitch);
         if (VERBOSE_ENABLE)
             CONSUMER_PRINT("Acquired Frame. %d\n", fd);
 
+        EGLImageKHR egl_image = NULL;
+        egl_image = NvEGLImageFromFd(egl_display, fd);
+        if (egl_image == NULL)
+        {
+            fprintf(stderr, "Error while mapping dmabuf fd (0x%X) to EGLImage\n",
+                     fd);
+        }
+        HandleEGLImage(&egl_image);
+        NvDestroyEGLImage(egl_display, egl_image);
+
         // Push the frame into V4L2.
         v4l2_buf.m.planes[0].m.fd = fd;
         v4l2_buf.m.planes[0].bytesused = 1; // byteused must be non-zero
@@ -372,10 +395,29 @@ bool ConsumerThread::encoderCapturePlaneDqCallback(struct v4l2_buffer *v4l2_buf,
         thiz->abort();
         ORIGINATE_ERROR("Failed to dequeue buffer from encoder capture plane");
     }
-
+#if 1
+    if (buffer->planes[0].bytesused > 0) {
+        GstBuffer *gstbuf;
+        GstMapInfo map = {0};
+        GstFlowReturn ret;
+        gstbuf = gst_buffer_new_allocate (NULL, buffer->planes[0].bytesused, NULL);
+        gstbuf->pts = thiz->timestamp;
+        thiz->timestamp += 33333333; // ns
+
+        gst_buffer_map (gstbuf, &map, GST_MAP_WRITE);
+        memcpy(map.data, buffer->planes[0].data , buffer->planes[0].bytesused);
+        gst_buffer_unmap(gstbuf, &map);
+
+        g_signal_emit_by_name (thiz->m_appsrc_, "push-buffer", gstbuf, &ret);
+        gst_buffer_unref(gstbuf);
+    } else {
+        gst_app_src_end_of_stream((GstAppSrc *)thiz->m_appsrc_);
+        sleep(1);
+    }
+#else
     thiz->m_outputFile->write((char *) buffer->planes[0].data,
                               buffer->planes[0].bytesused);
-
+#endif
     if (thiz->m_VideoEncoder->capture_plane.qBuffer(*v4l2_buf, NULL) < 0)
     {
         thiz->abort();
@@ -401,6 +443,25 @@ bool ConsumerThread::encoderCapturePlaneDqCallback(struct v4l2_buffer *v4l2_buf,
  ******************************************************************************/
 static bool execute()
 {
+    GMainLoop *main_loop;
+    GstPipeline *gst_pipeline = NULL;
+    GError *err = NULL;
+    GstElement *appsrc_;
+
+    gst_init (0, NULL);
+    main_loop = g_main_loop_new (NULL, FALSE);
+    char launch_string_[1024];
+
+    sprintf(launch_string_,
+            "appsrc name=mysource ! video/x-h264,width=%d,height=%d,stream-format=byte-stream !",
+            STREAM_SIZE.width(), STREAM_SIZE.height());
+    sprintf(launch_string_ + strlen(launch_string_),
+                " h264parse ! qtmux ! filesink location=a.mp4 ");
+    gst_pipeline = (GstPipeline*)gst_parse_launch(launch_string_, &err);
+    appsrc_ = gst_bin_get_by_name(GST_BIN(gst_pipeline), "mysource");
+    gst_app_src_set_stream_type(GST_APP_SRC(appsrc_), GST_APP_STREAM_TYPE_STREAM);
+    gst_element_set_state((GstElement*)gst_pipeline, GST_STATE_PLAYING);
+
     // Create the CameraProvider object and get the core interface.
     UniqueObj<CameraProvider> cameraProvider = UniqueObj<CameraProvider>(CameraProvider::create());
     ICameraProvider *iCameraProvider = interface_cast<ICameraProvider>(cameraProvider);
@@ -433,7 +494,7 @@ static bool execute()
 
     // Launch the FrameConsumer thread to consume frames from the OutputStream.
     PRODUCER_PRINT("Launching consumer thread\n");
-    ConsumerThread frameConsumerThread(outputStream.get());
+    ConsumerThread frameConsumerThread(outputStream.get(), appsrc_);
     PROPAGATE_ERROR(frameConsumerThread.initialize());
 
     // Wait until the consumer is connected to the stream.
@@ -470,6 +531,11 @@ static bool execute()
     // Wait for the consumer thread to complete.
     PROPAGATE_ERROR(frameConsumerThread.shutdown());
 
+    gst_element_set_state((GstElement*)gst_pipeline, GST_STATE_NULL);
+    gst_object_unref(GST_OBJECT(gst_pipeline));
+    g_main_loop_unref(main_loop);
+    gst_deinit();
+
     PRODUCER_PRINT("Done -- exiting.\n");
 
     return true;

Make the camera sources and encoding as appsrc.

I have added second camera outputstream to ConsumerThread and alsasrc to launchstring. Provided example can aquire frames from 2 cameras, process/compose them with CUDA in single loop and encode final frame to mp4 with mic with gstreamer.
Thank you.

Closed.

Hi vsw,

I went through your post it helped me alot to solve many of my issues.
Can you guide/help me to get the muxed data from gstreamer pipeline (on tx2) to local buffer.

Details is in the below link with code.

Only modification added is :

sprintf(launch_string_,
            "appsrc name=mysource ! video/x-h264,width=%d,height=%d,stream-format=byte-stream !",
            STREAM_SIZE.width(), STREAM_SIZE.height());
    sprintf(launch_string_ + strlen(launch_string_),
                " h264parse ! flvmux name=mux alsasrc device=plughw:2 ! audioresample ! audio/x-raw,rate=48000,channels=1 ! queue ! voaacenc bitrate=32000 ! queue ! mux. mux.  ! queue ! filesink location=a.mp4 ");
    printf("\n cmd of gstremer = %s \n",launch_string_);

Kindly help,

Regards,
Raza

Hi,

I am using this:

std::string strRecord
    {
        "alsasrc device = default do-timestamp = true format = 3 is-live = true ! "
        "queue max-size-buffers = 0 max-size-time = 0 nax-size-bytes = 0 min-threshold-time = 0000000000 ! audioconvert ! "
        "queue ! audio/x-raw, format = S16LE, rate = 48000, channels = 2 ! "
        "queue ! voaacenc ! "
        "queue ! mux. "
        "appsrc name = videosrc do-timestamp = true format = 3 ! "
        "queue max-size-buffers = 0 max-size-time = 0 nax-size-bytes = 0 min-threshold-time = 0 ! video/x-h264, stream-format = byte-stream, width = " + std::to_string( videoWidth ) + ", height = " + std::to_string( videoHeight ) + " ! "
        "queue ! h264parse ! "
        "queue ! qtmux name = mux ! "
        "queue ! filesink location = " + fileName
    };

    main_loop = g_main_loop_new( NULL, FALSE );

    gst_pipeline = ( GstPipeline * ) gst_parse_launch( strRecord.c_str(), & err );

But it is not very good now.

thanks alot vsw … i will try and get back with results … currently i am facing sync issue now because i am getting 1000fps as frame rate .

Hi vsw,

I tried the above command, i have integrated in both examples 10_camera_recording & 12_camera_v4l2_cuda.

I am recording audio from usb camera “alsasrc device = plughw:2” and video from sony 4k camera for 12_camera_v4l2_cuda and onboard camera for 10_camera_recording.

good thing is its in lip sync but problem is:

  1. muxer for qtmux → .mp4 is not working (audio loop control its not going)
    so i changed to flvmux for .mp4 file formate.

Note: i am coping output muxed data to local buffer and then writing to file .mp4 (using flv muxer)

  1. FPS is still showing 1000 in vlc .

But it solved lip sync issue thanks alot.

I am trying to refine the below command still. Let me know if any updated command if you get .

sprintf(launch_string_, "alsasrc device = plughw:2 do-timestamp = true format = 3 is-live = true ! "
    "queue max-size-buffers = 0 max-size-time = 0 nax-size-bytes = 0 min-threshold-time = 0000000000 ! audioconvert ! "
    "queue ! audio/x-raw, format = S16LE, rate = 48000, channels = 2 ! "
    "queue ! voaacenc ! "
    "queue ! mux. "
    "appsrc name = mysource do-timestamp = true format = 3 ! "
    "queue max-size-buffers = 0 max-size-time = 0 nax-size-bytes = 0 min-threshold-time = 0 ! video/x-h264, stream-format = byte-stream, width =%d, height =%d ! "
    "queue ! h264parse ! "
    "queue ! flvmux name = mux ! "
    "queue ! appsink name=sink",m_pstInputSrcData->nWidth, m_pstInputSrcData->nHeight);

For qtmux(it is better for standart format) you need to remove all dts timestamps in need-data callback.
And dont forget implement EOS message for all pipeline. Nvidia sample has only appsrc EOS. Remove it too.

Hi vsw,
Thanks for quick reply and info.
I removed all dts timestamps (GstClockTime timestamp;) but facing difficulty in adding eos.
any reference plz.

Share a patch for r28.2

diff --git a/multimedia_api/ll_samples/samples/10_camera_recording/Makefile b/multimedia_api/ll_samples/samples/10_camera_recording/Makefile
index 2e14575..07f1ee7 100644
--- a/multimedia_api/ll_samples/samples/10_camera_recording/Makefile
+++ b/multimedia_api/ll_samples/samples/10_camera_recording/Makefile
@@ -41,14 +41,30 @@ SRCS := \
 
 OBJS := $(SRCS:.cpp=.o)
 
+OBJS += \
+    $(ALGO_CUDA_DIR)/NvAnalysis.o \
+    $(ALGO_CUDA_DIR)/NvCudaProc.o
+
+CPPFLAGS += \
+    -I"$(ALGO_CUDA_DIR)"
+
 CPPFLAGS += \
-	-I"$(ARGUS_UTILS_DIR)"
+	-I"$(ARGUS_UTILS_DIR)" \
+    `pkg-config --cflags gstreamer-1.0`
 
 LDFLAGS += \
-	-lnveglstream_camconsumer -largus
+	-lnveglstream_camconsumer -largus \
+    `pkg-config --libs gstreamer-app-1.0`
+
+LDFLAGS += \
+       -L"$(CUDA_PATH)/targets/aarch64-linux/lib" \
+       -lcuda -lcudart
 
 all: $(APP)
 
+$(ALGO_CUDA_DIR)/%.o: $(ALGO_CUDA_DIR)/%.cu
+	$(MAKE) -C $(ALGO_CUDA_DIR)
+
 $(CLASS_DIR)/%.o: $(CLASS_DIR)/%.cpp
 	$(AT)$(MAKE) -C $(CLASS_DIR)
 
diff --git a/multimedia_api/ll_samples/samples/10_camera_recording/main.cpp b/multimedia_api/ll_samples/samples/10_camera_recording/main.cpp
index 6f531b8..da56327 100644
--- a/multimedia_api/ll_samples/samples/10_camera_recording/main.cpp
+++ b/multimedia_api/ll_samples/samples/10_camera_recording/main.cpp
@@ -42,6 +42,9 @@
 #include <iostream>
 #include <fstream>
 
+#include <gst/app/gstappsrc.h>
+#include "NvCudaProc.h"
+
 using namespace Argus;
 using namespace EGLStream;
 
@@ -71,6 +74,7 @@ static uint32_t     ENCODER_PIXFMT = V4L2_PIX_FMT_H264;
 static bool         DO_STAT = false;
 static bool         VERBOSE_ENABLE = false;
 static bool         DO_CPU_PROCESS = false;
+static EGLDisplay   egl_display = nullptr;
 
 // Debug print macros.
 #define PRODUCER_PRINT(...) printf("PRODUCER: " __VA_ARGS__)
@@ -96,7 +100,7 @@ namespace ArgusSamples
 class ConsumerThread : public Thread
 {
 public:
-    explicit ConsumerThread(OutputStream* stream);
+    explicit ConsumerThread(OutputStream* stream, GstElement *appsrc_, EGLDisplay egl_display);
     ~ConsumerThread();
 
     bool isInError()
@@ -126,13 +130,20 @@ private:
     NvVideoEncoder *m_VideoEncoder;
     std::ofstream *m_outputFile;
     bool m_gotError;
+
+    GstElement *m_appsrc_;
+    GstClockTime init_ts;
+    EGLDisplay m_eglDisplay;
 };
 
-ConsumerThread::ConsumerThread(OutputStream* stream) :
+ConsumerThread::ConsumerThread(OutputStream* stream, GstElement *appsrc_, EGLDisplay egl_display) :
         m_stream(stream),
         m_VideoEncoder(NULL),
         m_outputFile(NULL),
-        m_gotError(false)
+        m_gotError(false),
+        m_appsrc_(appsrc_),
+        m_eglDisplay(egl_display),
+        init_ts(0)
 {
 }
 
@@ -260,6 +271,16 @@ bool ConsumerThread::threadExecute()
             CHECK_ERROR(m_VideoEncoder->output_plane.qBuffer(v4l2_buf, NULL));
             break;
         }
+        // Print out some capture metadata from the frame.
+        IArgusCaptureMetadata *iArgusCaptureMetadata = interface_cast<IArgusCaptureMetadata>(frame);
+        if (!iArgusCaptureMetadata)
+            ORIGINATE_ERROR("Failed to get IArgusCaptureMetadata interface.");
+        CaptureMetadata *metadata = iArgusCaptureMetadata->getMetadata();
+        ICaptureMetadata *iMetadata = interface_cast<ICaptureMetadata>(metadata);
+        if (!iMetadata)
+            ORIGINATE_ERROR("Failed to get ICaptureMetadata interface.");
+        unsigned long long ts;
+        ts = static_cast<unsigned long long>(iMetadata->getSensorTimestamp());
 
         // Get the IImageNativeBuffer extension interface and create the fd.
         NV::IImageNativeBuffer *iNativeBuffer =
@@ -268,7 +289,7 @@ bool ConsumerThread::threadExecute()
             ORIGINATE_ERROR("IImageNativeBuffer not supported by Image.");
         fd = iNativeBuffer->createNvBuffer(STREAM_SIZE,
                                            NvBufferColorFormat_YUV420,
-                                           (DO_CPU_PROCESS)?NvBufferLayout_Pitch:NvBufferLayout_BlockLinear);
+                                           NvBufferLayout_Pitch);
         if (VERBOSE_ENABLE)
             CONSUMER_PRINT("Acquired Frame. %d\n", fd);
 
@@ -296,11 +317,24 @@ bool ConsumerThread::threadExecute()
             }
             NvBufferMemSyncForDevice (fd, Y_INDEX, &ptr_y);
             NvBufferMemUnMap(fd, Y_INDEX, &ptr_y);
+        } else {
+            EGLImageKHR egl_image = NULL;
+            egl_image = NvEGLImageFromFd(m_eglDisplay, fd);
+            if (egl_image == NULL)
+            {
+                fprintf(stderr, "Error while mapping dmabuf fd (0x%X) to EGLImage\n",
+                         fd);
+            }
+            HandleEGLImage(&egl_image);
+            NvDestroyEGLImage(m_eglDisplay, egl_image);
         }
 
         // Push the frame into V4L2.
         v4l2_buf.m.planes[0].m.fd = fd;
         v4l2_buf.m.planes[0].bytesused = 1; // byteused must be non-zero
+        v4l2_buf.flags |= V4L2_BUF_FLAG_TIMESTAMP_COPY;
+        v4l2_buf.timestamp.tv_sec = ts / 1000000000;;
+        v4l2_buf.timestamp.tv_usec = (ts / 1000) % 1000000;;
         CHECK_ERROR(m_VideoEncoder->output_plane.qBuffer(v4l2_buf, NULL));
     }
 
@@ -413,9 +447,34 @@ bool ConsumerThread::encoderCapturePlaneDqCallback(struct v4l2_buffer *v4l2_buf,
         thiz->abort();
         ORIGINATE_ERROR("Failed to dequeue buffer from encoder capture plane");
     }
+#if 1
+    if (buffer->planes[0].bytesused > 0) {
+        GstBuffer *gstbuf;
+        GstMapInfo map = {0};
+        GstFlowReturn ret;
+        GstClockTime timestamp;
+        gstbuf = gst_buffer_new_allocate (NULL, buffer->planes[0].bytesused, NULL);
+
+        timestamp = v4l2_buf->timestamp.tv_sec*1000000000 +
+                    v4l2_buf->timestamp.tv_usec*1000;
+        if (thiz->init_ts == 0)
+            thiz->init_ts = timestamp;
+        gstbuf->pts = timestamp - thiz->init_ts; // ns
+        gstbuf->dts = gstbuf->pts;
+        if (VERBOSE_ENABLE)
+            CONSUMER_PRINT("PTS %lu \n", gstbuf->pts);
+
+        gst_buffer_map (gstbuf, &map, GST_MAP_WRITE);
+        memcpy(map.data, buffer->planes[0].data , buffer->planes[0].bytesused);
+        gst_buffer_unmap(gstbuf, &map);
 
+        g_signal_emit_by_name (thiz->m_appsrc_, "push-buffer", gstbuf, &ret);
+        gst_buffer_unref(gstbuf);
+    }
+#else
     thiz->m_outputFile->write((char *) buffer->planes[0].data,
                               buffer->planes[0].bytesused);
+#endif
 
     if (thiz->m_VideoEncoder->capture_plane.qBuffer(*v4l2_buf, NULL) < 0)
     {
@@ -427,6 +486,7 @@ bool ConsumerThread::encoderCapturePlaneDqCallback(struct v4l2_buffer *v4l2_buf,
     // GOT EOS from m_VideoEncoderoder. Stop dqthread.
     if (buffer->planes[0].bytesused == 0)
     {
+        gst_app_src_end_of_stream((GstAppSrc *)thiz->m_appsrc_);
         CONSUMER_PRINT("Got EOS, exiting...\n");
         return false;
     }
@@ -442,6 +502,25 @@ bool ConsumerThread::encoderCapturePlaneDqCallback(struct v4l2_buffer *v4l2_buf,
  ******************************************************************************/
 static bool execute()
 {
+    GMainLoop *main_loop;
+    GstPipeline *gst_pipeline = NULL;
+    GError *err = NULL;
+    GstElement *appsrc_;
+
+    gst_init (0, NULL);
+    main_loop = g_main_loop_new (NULL, FALSE);
+    char launch_string_[1024];
+
+    sprintf(launch_string_,
+            "appsrc name=mysource ! video/x-h264,width=%d,height=%d,stream-format=byte-stream !",
+            STREAM_SIZE.width(), STREAM_SIZE.height());
+    sprintf(launch_string_ + strlen(launch_string_),
+                " h264parse ! qtmux ! filesink location=a.mp4 ");
+    gst_pipeline = (GstPipeline*)gst_parse_launch(launch_string_, &err);
+    appsrc_ = gst_bin_get_by_name(GST_BIN(gst_pipeline), "mysource");
+    gst_app_src_set_stream_type(GST_APP_SRC(appsrc_), GST_APP_STREAM_TYPE_STREAM);
+    gst_element_set_state((GstElement*)gst_pipeline, GST_STATE_PLAYING);
+
     // Create the CameraProvider object and get the core interface.
     UniqueObj<CameraProvider> cameraProvider = UniqueObj<CameraProvider>(CameraProvider::create());
     ICameraProvider *iCameraProvider = interface_cast<ICameraProvider>(cameraProvider);
@@ -469,12 +548,14 @@ static bool execute()
         ORIGINATE_ERROR("Failed to get IOutputStreamSettings interface");
 
     iStreamSettings->setPixelFormat(PIXEL_FMT_YCbCr_420_888);
+    iStreamSettings->setEGLDisplay(egl_display);
     iStreamSettings->setResolution(STREAM_SIZE);
+    iStreamSettings->setMetadataEnable(true);
     UniqueObj<OutputStream> outputStream(iCaptureSession->createOutputStream(streamSettings.get()));
 
     // Launch the FrameConsumer thread to consume frames from the OutputStream.
     PRODUCER_PRINT("Launching consumer thread\n");
-    ConsumerThread frameConsumerThread(outputStream.get());
+    ConsumerThread frameConsumerThread(outputStream.get(), appsrc_, egl_display);
     PROPAGATE_ERROR(frameConsumerThread.initialize());
 
     // Wait until the consumer is connected to the stream.
@@ -511,6 +592,15 @@ static bool execute()
     // Wait for the consumer thread to complete.
     PROPAGATE_ERROR(frameConsumerThread.shutdown());
 
+    // Wait for EOS message
+    GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(gst_pipeline));
+    gst_bus_poll(bus, GST_MESSAGE_EOS, GST_CLOCK_TIME_NONE);
+
+    gst_element_set_state((GstElement*)gst_pipeline, GST_STATE_NULL);
+    gst_object_unref(GST_OBJECT(gst_pipeline));
+    g_main_loop_unref(main_loop);
+    gst_deinit();
+
     PRODUCER_PRINT("Done -- exiting.\n");
 
     return true;
@@ -589,6 +679,8 @@ int main(int argc, char *argv[])
         printHelp();
         return EXIT_FAILURE;
     }
+    egl_display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
+    eglInitialize(egl_display, NULL, NULL);
 
     NvApplicationProfiler &profiler = NvApplicationProfiler::getProfilerInstance();
 
@@ -597,6 +689,7 @@ int main(int argc, char *argv[])
 
     profiler.stop();
     profiler.printProfilerData(std::cout);
+    eglTerminate(egl_display);
 
     return EXIT_SUCCESS;
 }

This patch hardcodes the output file name in lines 162-163, breaking the command line options:

+    sprintf(launch_string_ + strlen(launch_string_),
+                " h264parse ! qtmux ! filesink location=a.mp4 ");

It is simple to fix this and keep the options working:

+    sprintf(launch_string_ + strlen(launch_string_),
+                " h264parse ! qtmux ! filesink location=%s ", OUTPUT_FILENAME.c_str());

Is the patch valid for R28.4 as well?

Hi,

The patch should be valid for r28.4. Please give it a try.

Seams to be problems with the patch, get lots of rejection in Makefile and som in the cpp file. Could you append the resulting files, or check/update the patch?

The NvAnalysis and NvCudaProc is missing as well

Hi,
We have checked 10_camera_recording and don’t see much deviation between r28.2 and r28.4. And NvAnalysis and NvCudaProc atr still in

tegra_multimedia_api\samples\common\algorithm\cuda

Suggest you check if you download correct version. If it cannot be applied directly, it should not be diffcult to do manual merge. You may give it a try.