Share a patch for r28.2
diff --git a/multimedia_api/ll_samples/samples/10_camera_recording/Makefile b/multimedia_api/ll_samples/samples/10_camera_recording/Makefile
index 2e14575..07f1ee7 100644
--- a/multimedia_api/ll_samples/samples/10_camera_recording/Makefile
+++ b/multimedia_api/ll_samples/samples/10_camera_recording/Makefile
@@ -41,14 +41,30 @@ SRCS := \
OBJS := $(SRCS:.cpp=.o)
+OBJS += \
+ $(ALGO_CUDA_DIR)/NvAnalysis.o \
+ $(ALGO_CUDA_DIR)/NvCudaProc.o
+
+CPPFLAGS += \
+ -I"$(ALGO_CUDA_DIR)"
+
CPPFLAGS += \
- -I"$(ARGUS_UTILS_DIR)"
+ -I"$(ARGUS_UTILS_DIR)" \
+ `pkg-config --cflags gstreamer-1.0`
LDFLAGS += \
- -lnveglstream_camconsumer -largus
+ -lnveglstream_camconsumer -largus \
+ `pkg-config --libs gstreamer-app-1.0`
+
+LDFLAGS += \
+ -L"$(CUDA_PATH)/targets/aarch64-linux/lib" \
+ -lcuda -lcudart
all: $(APP)
+$(ALGO_CUDA_DIR)/%.o: $(ALGO_CUDA_DIR)/%.cu
+ $(MAKE) -C $(ALGO_CUDA_DIR)
+
$(CLASS_DIR)/%.o: $(CLASS_DIR)/%.cpp
$(AT)$(MAKE) -C $(CLASS_DIR)
diff --git a/multimedia_api/ll_samples/samples/10_camera_recording/main.cpp b/multimedia_api/ll_samples/samples/10_camera_recording/main.cpp
index 6f531b8..da56327 100644
--- a/multimedia_api/ll_samples/samples/10_camera_recording/main.cpp
+++ b/multimedia_api/ll_samples/samples/10_camera_recording/main.cpp
@@ -42,6 +42,9 @@
#include <iostream>
#include <fstream>
+#include <gst/app/gstappsrc.h>
+#include "NvCudaProc.h"
+
using namespace Argus;
using namespace EGLStream;
@@ -71,6 +74,7 @@ static uint32_t ENCODER_PIXFMT = V4L2_PIX_FMT_H264;
static bool DO_STAT = false;
static bool VERBOSE_ENABLE = false;
static bool DO_CPU_PROCESS = false;
+static EGLDisplay egl_display = nullptr;
// Debug print macros.
#define PRODUCER_PRINT(...) printf("PRODUCER: " __VA_ARGS__)
@@ -96,7 +100,7 @@ namespace ArgusSamples
class ConsumerThread : public Thread
{
public:
- explicit ConsumerThread(OutputStream* stream);
+ explicit ConsumerThread(OutputStream* stream, GstElement *appsrc_, EGLDisplay egl_display);
~ConsumerThread();
bool isInError()
@@ -126,13 +130,20 @@ private:
NvVideoEncoder *m_VideoEncoder;
std::ofstream *m_outputFile;
bool m_gotError;
+
+ GstElement *m_appsrc_;
+ GstClockTime init_ts;
+ EGLDisplay m_eglDisplay;
};
-ConsumerThread::ConsumerThread(OutputStream* stream) :
+ConsumerThread::ConsumerThread(OutputStream* stream, GstElement *appsrc_, EGLDisplay egl_display) :
m_stream(stream),
m_VideoEncoder(NULL),
m_outputFile(NULL),
- m_gotError(false)
+ m_gotError(false),
+ m_appsrc_(appsrc_),
+ m_eglDisplay(egl_display),
+ init_ts(0)
{
}
@@ -260,6 +271,16 @@ bool ConsumerThread::threadExecute()
CHECK_ERROR(m_VideoEncoder->output_plane.qBuffer(v4l2_buf, NULL));
break;
}
+ // Print out some capture metadata from the frame.
+ IArgusCaptureMetadata *iArgusCaptureMetadata = interface_cast<IArgusCaptureMetadata>(frame);
+ if (!iArgusCaptureMetadata)
+ ORIGINATE_ERROR("Failed to get IArgusCaptureMetadata interface.");
+ CaptureMetadata *metadata = iArgusCaptureMetadata->getMetadata();
+ ICaptureMetadata *iMetadata = interface_cast<ICaptureMetadata>(metadata);
+ if (!iMetadata)
+ ORIGINATE_ERROR("Failed to get ICaptureMetadata interface.");
+ unsigned long long ts;
+ ts = static_cast<unsigned long long>(iMetadata->getSensorTimestamp());
// Get the IImageNativeBuffer extension interface and create the fd.
NV::IImageNativeBuffer *iNativeBuffer =
@@ -268,7 +289,7 @@ bool ConsumerThread::threadExecute()
ORIGINATE_ERROR("IImageNativeBuffer not supported by Image.");
fd = iNativeBuffer->createNvBuffer(STREAM_SIZE,
NvBufferColorFormat_YUV420,
- (DO_CPU_PROCESS)?NvBufferLayout_Pitch:NvBufferLayout_BlockLinear);
+ NvBufferLayout_Pitch);
if (VERBOSE_ENABLE)
CONSUMER_PRINT("Acquired Frame. %d\n", fd);
@@ -296,11 +317,24 @@ bool ConsumerThread::threadExecute()
}
NvBufferMemSyncForDevice (fd, Y_INDEX, &ptr_y);
NvBufferMemUnMap(fd, Y_INDEX, &ptr_y);
+ } else {
+ EGLImageKHR egl_image = NULL;
+ egl_image = NvEGLImageFromFd(m_eglDisplay, fd);
+ if (egl_image == NULL)
+ {
+ fprintf(stderr, "Error while mapping dmabuf fd (0x%X) to EGLImage\n",
+ fd);
+ }
+ HandleEGLImage(&egl_image);
+ NvDestroyEGLImage(m_eglDisplay, egl_image);
}
// Push the frame into V4L2.
v4l2_buf.m.planes[0].m.fd = fd;
v4l2_buf.m.planes[0].bytesused = 1; // byteused must be non-zero
+ v4l2_buf.flags |= V4L2_BUF_FLAG_TIMESTAMP_COPY;
+ v4l2_buf.timestamp.tv_sec = ts / 1000000000;;
+ v4l2_buf.timestamp.tv_usec = (ts / 1000) % 1000000;;
CHECK_ERROR(m_VideoEncoder->output_plane.qBuffer(v4l2_buf, NULL));
}
@@ -413,9 +447,34 @@ bool ConsumerThread::encoderCapturePlaneDqCallback(struct v4l2_buffer *v4l2_buf,
thiz->abort();
ORIGINATE_ERROR("Failed to dequeue buffer from encoder capture plane");
}
+#if 1
+ if (buffer->planes[0].bytesused > 0) {
+ GstBuffer *gstbuf;
+ GstMapInfo map = {0};
+ GstFlowReturn ret;
+ GstClockTime timestamp;
+ gstbuf = gst_buffer_new_allocate (NULL, buffer->planes[0].bytesused, NULL);
+
+ timestamp = v4l2_buf->timestamp.tv_sec*1000000000 +
+ v4l2_buf->timestamp.tv_usec*1000;
+ if (thiz->init_ts == 0)
+ thiz->init_ts = timestamp;
+ gstbuf->pts = timestamp - thiz->init_ts; // ns
+ gstbuf->dts = gstbuf->pts;
+ if (VERBOSE_ENABLE)
+ CONSUMER_PRINT("PTS %lu \n", gstbuf->pts);
+
+ gst_buffer_map (gstbuf, &map, GST_MAP_WRITE);
+ memcpy(map.data, buffer->planes[0].data , buffer->planes[0].bytesused);
+ gst_buffer_unmap(gstbuf, &map);
+ g_signal_emit_by_name (thiz->m_appsrc_, "push-buffer", gstbuf, &ret);
+ gst_buffer_unref(gstbuf);
+ }
+#else
thiz->m_outputFile->write((char *) buffer->planes[0].data,
buffer->planes[0].bytesused);
+#endif
if (thiz->m_VideoEncoder->capture_plane.qBuffer(*v4l2_buf, NULL) < 0)
{
@@ -427,6 +486,7 @@ bool ConsumerThread::encoderCapturePlaneDqCallback(struct v4l2_buffer *v4l2_buf,
// GOT EOS from m_VideoEncoderoder. Stop dqthread.
if (buffer->planes[0].bytesused == 0)
{
+ gst_app_src_end_of_stream((GstAppSrc *)thiz->m_appsrc_);
CONSUMER_PRINT("Got EOS, exiting...\n");
return false;
}
@@ -442,6 +502,25 @@ bool ConsumerThread::encoderCapturePlaneDqCallback(struct v4l2_buffer *v4l2_buf,
******************************************************************************/
static bool execute()
{
+ GMainLoop *main_loop;
+ GstPipeline *gst_pipeline = NULL;
+ GError *err = NULL;
+ GstElement *appsrc_;
+
+ gst_init (0, NULL);
+ main_loop = g_main_loop_new (NULL, FALSE);
+ char launch_string_[1024];
+
+ sprintf(launch_string_,
+ "appsrc name=mysource ! video/x-h264,width=%d,height=%d,stream-format=byte-stream !",
+ STREAM_SIZE.width(), STREAM_SIZE.height());
+ sprintf(launch_string_ + strlen(launch_string_),
+ " h264parse ! qtmux ! filesink location=a.mp4 ");
+ gst_pipeline = (GstPipeline*)gst_parse_launch(launch_string_, &err);
+ appsrc_ = gst_bin_get_by_name(GST_BIN(gst_pipeline), "mysource");
+ gst_app_src_set_stream_type(GST_APP_SRC(appsrc_), GST_APP_STREAM_TYPE_STREAM);
+ gst_element_set_state((GstElement*)gst_pipeline, GST_STATE_PLAYING);
+
// Create the CameraProvider object and get the core interface.
UniqueObj<CameraProvider> cameraProvider = UniqueObj<CameraProvider>(CameraProvider::create());
ICameraProvider *iCameraProvider = interface_cast<ICameraProvider>(cameraProvider);
@@ -469,12 +548,14 @@ static bool execute()
ORIGINATE_ERROR("Failed to get IOutputStreamSettings interface");
iStreamSettings->setPixelFormat(PIXEL_FMT_YCbCr_420_888);
+ iStreamSettings->setEGLDisplay(egl_display);
iStreamSettings->setResolution(STREAM_SIZE);
+ iStreamSettings->setMetadataEnable(true);
UniqueObj<OutputStream> outputStream(iCaptureSession->createOutputStream(streamSettings.get()));
// Launch the FrameConsumer thread to consume frames from the OutputStream.
PRODUCER_PRINT("Launching consumer thread\n");
- ConsumerThread frameConsumerThread(outputStream.get());
+ ConsumerThread frameConsumerThread(outputStream.get(), appsrc_, egl_display);
PROPAGATE_ERROR(frameConsumerThread.initialize());
// Wait until the consumer is connected to the stream.
@@ -511,6 +592,15 @@ static bool execute()
// Wait for the consumer thread to complete.
PROPAGATE_ERROR(frameConsumerThread.shutdown());
+ // Wait for EOS message
+ GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(gst_pipeline));
+ gst_bus_poll(bus, GST_MESSAGE_EOS, GST_CLOCK_TIME_NONE);
+
+ gst_element_set_state((GstElement*)gst_pipeline, GST_STATE_NULL);
+ gst_object_unref(GST_OBJECT(gst_pipeline));
+ g_main_loop_unref(main_loop);
+ gst_deinit();
+
PRODUCER_PRINT("Done -- exiting.\n");
return true;
@@ -589,6 +679,8 @@ int main(int argc, char *argv[])
printHelp();
return EXIT_FAILURE;
}
+ egl_display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
+ eglInitialize(egl_display, NULL, NULL);
NvApplicationProfiler &profiler = NvApplicationProfiler::getProfilerInstance();
@@ -597,6 +689,7 @@ int main(int argc, char *argv[])
profiler.stop();
profiler.printProfilerData(std::cout);
+ eglTerminate(egl_display);
return EXIT_SUCCESS;
}