opencv VideoWriter to nvoverlaysink using gstreamer ?

Dear all

how write an opencv img to nvoverlaysink (with gstreamer)
I can get data from nvcamera and display image with cv2.imshow()

but I want to display directly to monitor (HDMI cable to jetson tx2)

regards,

cap = cv2.VideoCapture("nvcamerasrc !\
video/x-raw(memory:NVMM), width=(int)1280, height=(int)720,format=(string)I420, framerate=(fraction)120/1 !\
nvvidconv flip-method=0 ! video/x-raw, format=(string)BGRx !\
videoconvert ! video/x-raw, format=(string)BGR ! appsink")

w = cv2.VideoWriter('appsrc ! nvhdmioverlaysink -e', 0,20.0, (640,480),True)
if cap.isOpened():
    cv2.namedWindow("demo", cv2.WINDOW_AUTOSIZE)
    while True:
        ret_val, img = cap.read();
        img = cv2.resize(img,(640,480))
        cv2.imshow('demo',img)
        w.write(img)
        cv2.waitKey(int(1000/100))

this gstreamer pipeline is okay. Basically I want do this from python3 opencv.
nvcamera reading is ok. I need hdmi monitor display (from opencv)

gst-launch-1.0 nvcamerasrc fpsRange="30.0 30.0" ! 'video/x-raw(memory:NVMM), width=(int)640, height=(int)480, format=(string)I420, framerate=(fraction)30/1' ! nvvidconv flip-method=2 ! 'video/x-raw(memory:NVMM), format=(string)I420' ! nvoverlaysink -e

Hi raph38130,
Please check if nvivafilter helps your case
[url]gstreamer NVMM <-> opencv gpuMat - Jetson TX2 - NVIDIA Developer Forums

This is just reversing the capture pipeline, there are probably better solutions, but you may try this as a starting point:

#include <iostream>

#include <opencv2/opencv.hpp>
#include <opencv2/videoio.hpp>
#include <opencv2/highgui.hpp>

int main()
{
    const char* gst =  "nvcamerasrc  ! video/x-raw(memory:NVMM), format=(string)I420, width=(int)640, height=(int)480, framerate=(fraction)30/1 ! \
			nvvidconv    ! video/x-raw,              format=(string)BGRx ! \
			videoconvert ! video/x-raw,              format=(string)BGR  ! \
			appsink";
 
    const char*  gst_writer = "appsrc       ! video/x-raw, format=(string)BGR ! \
			       videoconvert ! video/x-raw, format=(string)I420, framerate=(fraction)30/1 ! \
			       nvvidconv    ! video/x-raw(memory:NVMM) ! \
			       nvoverlaysink";

    cv::VideoCapture cap(gst);
    if(!cap.isOpened()) {
		std::cout<<"Failed to open camera."<<std::endl;
		return (-1);
    }
    
    unsigned int width  = cap.get(CV_CAP_PROP_FRAME_WIDTH); 
    unsigned int height = cap.get(CV_CAP_PROP_FRAME_HEIGHT); 
    unsigned int fps    = cap.get(CV_CAP_PROP_FPS);
    unsigned int pixels = width*height;
    std::cout <<" Frame size : "<<width<<" x "<<height<<", "<<pixels<<" Pixels "<<fps<<" FPS"<<std::endl;

    cv::VideoWriter writer(gst_writer, cap.get(CV_CAP_PROP_FOURCC), fps, cv::Size(width, height));
    if (!writer.isOpened()) {
		std::cout<<"Failed to open writer."<<std::endl;
		return (-2);
    }

cv::Mat frame_in;
    while(1)
    {
    	if (!cap.read(frame_in)) {
		std::cout<<"Capture read error"<<std::endl;
		break;
	}
	else  {
		writer.write(frame_in);
	}	
    }

    cap.release();
    writer.release();

    return 0;

}

thank you. exactly what I want

I am working on a project to put in GPU memory frames (1920x1080, at ~25 fps) into HDMI port, as a starting point the above code from #Honey_Patouceul definitely works. However i have a relative large latency on the pipe line. To investigate it, my first step is to get rid of opencv, hence i have the following two versions:

using namespace ngv;
using namespace cv;
using namespace std;

#define MY_IMG_W  1280
#define MY_IMG_H  720
#define MY_FPS    25
#define FEED_BY_CB	0
#define USE_GBUF_HD_COPY	1

GstClockTime	g_timestamp=0;
const guint64	g_frmIntreval = gst_util_uint64_scale_int(1, GST_SECOND, MY_FPS);
const uint32_t  g_rgbImgSz = MY_IMG_H * MY_IMG_W * 3;


static void cb_need_data(GstElement *appsrc, guint unused, void *data)
{
	static int fn = 0;
	GstBuffer *buffer = 0;
	GstFlowReturn ret;

	boost::posix_time::ptime t0 = POSIX_LOCAL_TIME;
	std::string ts0 = "fn=" + std::to_string(fn) + ", ts0=" +  getPrettyTimeStamp(t0 );

	//generate a noise image
	cv::Mat I(MY_IMG_H, MY_IMG_W, CV_8UC3);
	cv::randu(I, cv::Scalar(0, 0, 0), cv::Scalar(255, 255, 255));
	cv::putText(I, "fn=" + std::to_string(fn), cv::Point(10, 200), cv::FONT_HERSHEY_SCRIPT_SIMPLEX, 1, cv::Scalar(255, 255, 255), 2);


	//copy <I> to <buffer>
#if USE_GBUF_HD_COPY
	buffer = gst_buffer_new_allocate(NULL, g_rgbImgSz, NULL);
	gst_buffer_fill(buffer, 0, (gconstpointer)I.data, (gsize)g_rgbImgSz);
#else
	buffer = gst_buffer_new_wrapped( (gpointer)I.data, g_rgbImgSz);
#endif

	/* increment the g_timestamp every 1/25 second */
	GST_BUFFER_PTS(buffer) = g_timestamp;
	GST_BUFFER_DURATION(buffer) = g_frmIntreval;
	g_timestamp += g_frmIntreval;

	g_signal_emit_by_name(appsrc, "push-buffer", buffer, &ret);

#if USE_GBUF_HD_COPY
	gst_buffer_unref(buffer);
#endif

	printf("%s, ts1=%s, ts1-ts0=%d, g_timestamp=%lld\n", ts0.c_str(), getPrettyTimeStamp().c_str(), timeIntervalMillisec(t0), g_timestamp);

	fn++;
}

int test_hdmi_out_v1(int argc, char *argv[])
{
	/* init GStreamer */
	gst_init(&argc, &argv);

	GMainLoop *loop = g_main_loop_new(NULL, FALSE);

	/* setup pipeline */
	GstElement *pipeline = gst_parse_launch("appsrc name=mySrc ! video/x-raw, format=(string)BGR, width=(int)1280, height=(int)720 ! \
			       videoconvert ! video/x-raw, format=(string)I420, framerate=(fraction)25/1 ! \
			       nvvidconv    ! video/x-raw(memory:NVMM) ! \
			       nvoverlaysink",	NULL);

	GstElement *appsrc = gst_bin_get_by_name(GST_BIN(pipeline), "mySrc");
	g_assert(appsrc!= 0);

	/* setup appsrc */
	g_object_set(G_OBJECT(appsrc), "format", GST_FORMAT_TIME, NULL);
	g_signal_connect(appsrc, "need-data", G_CALLBACK(cb_need_data), NULL);

	/* play */
	gst_element_set_state(pipeline, GST_STATE_PLAYING);

	g_print("PLAY\n");
	g_main_loop_run(loop);

	/* clean up */
	gst_element_set_state(pipeline, GST_STATE_NULL);
	gst_object_unref(GST_OBJECT(pipeline));
	g_main_loop_unref(loop);

	return 0;
}

int test_hdmi_out_v2(int argc, char *argv[])
{
	/* init GStreamer */
	gst_init(&argc, &argv);

	/* setup pipeline */
	GstElement *pipeline = gst_parse_launch("appsrc name=mySrc ! video/x-raw, format=(string)BGR, width=(int)1280, height=(int)720 ! \
			       videoconvert ! video/x-raw, format=(string)I420, framerate=(fraction)25/1 ! \
			       nvvidconv    ! video/x-raw(memory:NVMM) ! \
			       nvoverlaysink", NULL);

	GstElement *appsrc = gst_bin_get_by_name(GST_BIN(pipeline), "mySrc");
	g_assert(appsrc != 0);

	/* setup appsrc */
	g_object_set(G_OBJECT(appsrc), "format", GST_FORMAT_TIME, NULL);
	//g_signal_connect(appsrc, "need-data", G_CALLBACK(cb_need_data), NULL);

	/* play */
	GstStateChangeReturn state_ret = gst_element_set_state(pipeline, GST_STATE_PLAYING);
	g_assert(state_ret == GST_STATE_CHANGE_ASYNC);

	g_print("PLAY\n");
	//Push the data from buffer to gstpipeline 100 times
	g_timestamp = 0;

#if USE_GBUF_HD_COPY
	GstBuffer *pushbuffer = gst_buffer_new_allocate(NULL, g_rgbImgSz, NULL);
#endif
	for (int i = 0; i < 1000; i++) {
		cout << "---- i =" << i << "-----------" <<  endl;
		//generate a noise image
		cv::Mat I(MY_IMG_H, MY_IMG_W, CV_8UC3);
		cv::randu(I, cv::Scalar(0, 0, 0), cv::Scalar(255, 255, 255));
		cv::putText(I, "fn=" + std::to_string(i), cv::Point(10, 200), cv::FONT_HERSHEY_SCRIPT_SIMPLEX, 1, cv::Scalar(255, 255, 255), 2);
		//ngv::myImgWrite(I, "./", "tmp", i, 0);

#if USE_GBUF_HD_COPY
		gst_buffer_fill(pushbuffer, 0, (gconstpointer)I.data, g_rgbImgSz);
#else
		GstBuffer *pushbuffer = gst_buffer_new_wrapped(I.data, g_rgbImgSz);  //Wrap the data
#endif
		GST_BUFFER_PTS(pushbuffer) = g_timestamp;
		GST_BUFFER_DTS(pushbuffer) = g_timestamp;
		GST_BUFFER_DURATION(pushbuffer) = g_frmIntreval;
		g_timestamp += g_frmIntreval;

		GstFlowReturn ret = gst_app_src_push_buffer((GstAppSrc*)appsrc, pushbuffer);     //Push data into pipeline
		g_assert(ret == GST_FLOW_OK);
	
		cout << "i =" << i << ",g_timestamp=" << g_timestamp << ",refCnt=" << pushbuffer->mini_object.refcount << endl;
	}
	// clean up 
#if USE_GBUF_HD_COPY
	gst_buffer_unref(pushbuffer);
#endif
	gst_element_set_state(pipeline, GST_STATE_NULL);
	gst_object_unref(GST_OBJECT(pipeline));

	return 0;
}

the callback version test_hdmi_out_v1() works as expected, but the for-loop version test_hdmi_out_v2() pops up a run-time-error:

PLAY
---- i =0 --------------
i =0,g_timestamp=40000000,refCnt1
---- i =1 --------------
i =1,g_timestamp=80000000,refCnt1
---- i =2 --------------
**
ERROR:gstappsrc.c:1225:gst_app_src_create: assertion failed: (GST_IS_BUFFER_LIST (obj))
Aborted (core dumped)

What i missed in test_hdmi_out_v2()? Thank you very much!

Hi,
Here is a sample similar to your usecase. Please take a look.
[url]https://devtalk.nvidia.com/default/topic/1031734/jetson-tx2/appsrc-link-to-nvvidconv-error-with-reason-not-negotiated-4-/post/5250184/#5250184[/url]

1 Like

Thanks DaneLLL, the code in this link works for me!

1 Like

how can i compile this code thanks.