V4L2 camera

HI:
When I use v4l2 to capture frame bypass ISP, the frame size was larger than set, is there any v4l2 bypass ISP sample?
thanks !

hello ming1988,

please refer to [NVIDIA Tegra Linux Driver Package]-> [Release 28.2 Development Guide]->
[Camera Software Development Solution]-> [Applications Using V4L2 IOCTL Directly]
thanks

thanks!
that just a cmd line to capture frame, is there any sample code ?

hello ming1988,

could you please illustrate your use-case,
may I also know your sensor capability,
thanks

#include <unistd.h>
#include <error.h>
#include <errno.h>
#include <fcntl.h>
#include <sys/ioctl.h>
#include <sys/types.h>
#include <pthread.h>
#include <linux/videodev2.h>
#include <sys/mman.h>
#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>

#include
#include
#include

using namespace std;

#define CLEAR(x) memset(&(x), 0, sizeof(x))

#define IMAGEWIDTH 1920
#define IMAGEHEIGHT 1080

class V4L2Capture {
public:
V4L2Capture(char *devName, int width, int height);
virtual ~V4L2Capture();

int openDevice();
int closeDevice();
int initDevice();
int startCapture();
int stopCapture();
int freeBuffers();
int getFrame(void **,size_t *);
int backFrame();
static void test();

private:
int initBuffers();

struct cam_buffer
{
	void* start;
	unsigned int length;
};
char *devName;
int capW;
int capH;
int fd_cam;
cam_buffer *buffers;
unsigned int n_buffers;
int frameIndex;

};

V4L2Capture::V4L2Capture(char *devName, int width, int height) {
// TODO Auto-generated constructor stub
this->devName = devName;
this->fd_cam = -1;
this->buffers = NULL;
this->n_buffers = 0;
this->frameIndex = -1;
this->capW=width;
this->capH=height;
}

V4L2Capture::~V4L2Capture() {
// TODO Auto-generated destructor stub
}

int V4L2Capture::openDevice() {
/设备的打开/
printf(“video dev : %s\n”, devName);
fd_cam = open(devName, O_RDWR);
if (fd_cam < 0) {
perror(“Can’t open video device”);
}
return 0;
}

int V4L2Capture::closeDevice() {
if (fd_cam > 0) {
int ret = 0;
if ((ret = close(fd_cam)) < 0) {
perror(“Can’t close video device”);
}
return 0;
} else {
return -1;
}
}

int V4L2Capture::initDevice() {
int ret;
struct v4l2_capability cam_cap; //显示设备信息
struct v4l2_cropcap cam_cropcap; //设置摄像头的捕捉能力
struct v4l2_fmtdesc cam_fmtdesc; //查询所有支持的格式:VIDIOC_ENUM_FMT
struct v4l2_crop cam_crop; //图像的缩放
struct v4l2_format cam_format; //设置摄像头的视频制式、帧格式等

/* 使用IOCTL命令VIDIOC_QUERYCAP,获取摄像头的基本信息*/
ret = ioctl(fd_cam, VIDIOC_QUERYCAP, &cam_cap);
if (ret < 0) {
	perror("Can't get device information: VIDIOCGCAP");
}
printf(
		"Driver Name:%s\nCard Name:%s\nBus info:%s\nDriver Version:%u.%u.%u\n",
		cam_cap.driver, cam_cap.card, cam_cap.bus_info,
		(cam_cap.version >> 16) & 0XFF, (cam_cap.version >> 8) & 0XFF,
		cam_cap.version & 0XFF);

/* 使用IOCTL命令VIDIOC_ENUM_FMT,获取摄像头所有支持的格式*/
cam_fmtdesc.index = 0;
cam_fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
printf("Support format:\n");
while (ioctl(fd_cam, VIDIOC_ENUM_FMT, &cam_fmtdesc) != -1) {
	printf("\t%d.%s\n", cam_fmtdesc.index + 1, cam_fmtdesc.description);
	cam_fmtdesc.index++;
}

/* 使用IOCTL命令VIDIOC_CROPCAP,获取摄像头的捕捉能力*/
cam_cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (0 == ioctl(fd_cam, VIDIOC_CROPCAP, &cam_cropcap)) {
	printf("Default rec:\n\tleft:%d\n\ttop:%d\n\twidth:%d\n\theight:%d\n",
			cam_cropcap.defrect.left, cam_cropcap.defrect.top,
			cam_cropcap.defrect.width, cam_cropcap.defrect.height);
	/* 使用IOCTL命令VIDIOC_S_CROP,获取摄像头的窗口取景参数*/
	cam_crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
	cam_crop.c = cam_cropcap.defrect;		//默认取景窗口大小
	if (-1 == ioctl(fd_cam, VIDIOC_S_CROP, &cam_crop)) {
		//printf("Can't set crop para\n");
	}
} else {
	printf("Can't set cropcap para\n");
}

/* 使用IOCTL命令VIDIOC_S_FMT,设置摄像头帧信息*/
cam_format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
cam_format.fmt.pix.width = capW;
cam_format.fmt.pix.height = capH;
cam_format.fmt.pix.pixelformat = V4L2_PIX_FMT_SBGGR10;		//要和摄像头支持的类型对应
cam_format.fmt.pix.field = V4L2_FIELD_INTERLACED;
ret = ioctl(fd_cam, VIDIOC_S_FMT, &cam_format);
if (ret < 0) {
	perror("Can't set frame information");
}
/* 使用IOCTL命令VIDIOC_G_FMT,获取摄像头帧信息*/
cam_format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
ret = ioctl(fd_cam, VIDIOC_G_FMT, &cam_format);
if (ret < 0) {
	perror("Can't get frame information");
}
printf("Current data format information:\n\twidth:%d\n\theight:%d\n",
		cam_format.fmt.pix.width, cam_format.fmt.pix.height);
ret = initBuffers();
if (ret < 0) {
	perror("Buffers init error");
	//exit(-1);
}
return 0;

}

int V4L2Capture::initBuffers() {
int ret;
/* 使用IOCTL命令VIDIOC_REQBUFS,申请帧缓冲*/
struct v4l2_requestbuffers req;
CLEAR(req);
req.count = 4;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = V4L2_MEMORY_MMAP;
ret = ioctl(fd_cam, VIDIOC_REQBUFS, &req);
if (ret < 0) {
perror(“Request frame buffers failed”);
}
if (req.count < 2) {
perror(“Request frame buffers while insufficient buffer memory”);
}
buffers = (struct cam_buffer*) calloc(req.count, sizeof(*buffers));
if (!buffers) {
perror(“Out of memory”);
}
for (n_buffers = 0; n_buffers < req.count; n_buffers++) {
struct v4l2_buffer buf;
CLEAR(buf);
// 查询序号为n_buffers 的缓冲区,得到其起始物理地址和大小
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = n_buffers;
ret = ioctl(fd_cam, VIDIOC_QUERYBUF, &buf);
if (ret < 0) {
printf(“VIDIOC_QUERYBUF %d failed\n”, n_buffers);
return -1;
}
buffers[n_buffers].length = buf.length;
//printf(“buf.length= %d\n”,buf.length);
// 映射内存
buffers[n_buffers].start = mmap(
NULL, // start anywhere
buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd_cam,
buf.m.offset);
if (MAP_FAILED == buffers[n_buffers].start) {
printf(“mmap buffer%d failed\n”, n_buffers);
return -1;
}
}
return 0;
}

int V4L2Capture::startCapture() {
unsigned int i;
for (i = 0; i < n_buffers; i++) {
struct v4l2_buffer buf;
CLEAR(buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = i;
if (-1 == ioctl(fd_cam, VIDIOC_QBUF, &buf)) {
printf(“VIDIOC_QBUF buffer%d failed\n”, i);
return -1;
}
}
enum v4l2_buf_type type;
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (-1 == ioctl(fd_cam, VIDIOC_STREAMON, &type)) {
printf(“VIDIOC_STREAMON error”);
return -1;
}
return 0;
}

int V4L2Capture::stopCapture() {
enum v4l2_buf_type type;
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (-1 == ioctl(fd_cam, VIDIOC_STREAMOFF, &type)) {
printf(“VIDIOC_STREAMOFF error\n”);
return -1;
}
return 0;
}

int V4L2Capture::freeBuffers() {
unsigned int i;
for (i = 0; i < n_buffers; ++i) {
if (-1 == munmap(buffers[i].start, buffers[i].length)) {
printf(“munmap buffer%d failed\n”, i);
return -1;
}
}
free(buffers);
return 0;
}

int V4L2Capture::getFrame(void *frame_buf, size_t len) {
struct v4l2_buffer queue_buf;
CLEAR(queue_buf);
queue_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
queue_buf.memory = V4L2_MEMORY_MMAP;
if (-1 == ioctl(fd_cam, VIDIOC_DQBUF, &queue_buf)) {
printf(“VIDIOC_DQBUF error\n”);
return -1;
}
*frame_buf = buffers[queue_buf.index].start;
*len = buffers[queue_buf.index].length;
frameIndex = queue_buf.index;
return 0;
}

int V4L2Capture::backFrame() {
if (frameIndex != -1) {
struct v4l2_buffer queue_buf;
CLEAR(queue_buf);
queue_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
queue_buf.memory = V4L2_MEMORY_MMAP;
queue_buf.index = frameIndex;
if (-1 == ioctl(fd_cam, VIDIOC_QBUF, &queue_buf)) {
printf(“VIDIOC_QBUF error\n”);
return -1;
}
return 0;
}
return -1;
}

void V4L2Capture::test() {
unsigned char *rawframe = NULL;
unsigned long rawframeSize = 0;

string videoDev="/dev/video0";
V4L2Capture *vcap = new V4L2Capture(const_cast<char*>(videoDev.c_str()),
		1920, 1080);
vcap->openDevice();
vcap->initDevice();
vcap->startCapture();
vcap->getFrame((void **) &rawframe, (size_t *)&rawframeSize);

vcap->backFrame();
vcap->freeBuffers();
vcap->closeDevice();

}

void VideoPlayer() {
unsigned char *rawframe = NULL;
unsigned long rawframeSize = 0;

string videoDev = "/dev/video0";
V4L2Capture *vcap = new V4L2Capture(const_cast<char*>(videoDev.c_str()), 1920, 1080);
vcap->openDevice();
vcap->initDevice();
vcap->startCapture();

//cvNamedWindow("Capture",CV_WINDOW_AUTOSIZE);
//IplImage* img;
//CvMat cvmat;
double t;
int index = 0;
FILE *f = fopen("./test.raw", "wb+");
//while(1){
	t = (double)cvGetTickCount();
	vcap->getFrame((void **) &rawframe, (size_t *)&rawframeSize);
	fwrite(rawframe, rawframeSize, 1, f);
	
	#if 1
	cvmat = cvMat(IMAGEHEIGHT,IMAGEWIDTH,CV_8UC3,(void*)rawframe);		//CV_8UC3

	//解码
	img = cvDecodeImage(&cvmat,1);
	if(!img){
		printf("DecodeImage error!\n");
	}
	
	cvShowImage("Capture",img);
	cvReleaseImage(&img);

	vcap->backFrame();
	if((cvWaitKey(1)&255) == 27){
		exit(0);
	}
	t = (double)cvGetTickCount() - t;
	printf("Used time is %g ms\n",( t / (cvGetTickFrequency()*1000)));
	#endif
//}		
vcap->stopCapture();
vcap->freeBuffers();
vcap->closeDevice();

}

int main() {
VideoPlayer();
return 0;
}

hi,JerryChang:
we use the ov5693 camera on the tx2 board

hello ming1988,

so, you would like to access the sensor via standard v4l2 controls,
please refer to below kernel sources for example, thanks

sources/kernel/kernel-4.4/drivers/media/platform/tegra/camera/vi/channel.c

hi, JerryChang,
I use “v4l2-ctl --set-fmt-video=width=1920,height=1080,pixelformat=RG10 --stream-mmap --stream-count=1 -d /dev/video0 --stream-to=ov5693.raw” to capture a raw frame, the ov5693.raw was still wrong

hello ming1988,

may I know what’s wrong with your ov5693 raw files.
you could use 3rdparty tools to view the raw file, such as 7yuv.
please also try to increase stream-counts to dump more frames for confirmation.
thanks

hi, JerryChang:
thanks! I have use photoshop view the raw data.
There are many black stripes, like some data is not collected

hi, JerryChang:
Did you ever try to capture ov5693 raw data bypass ISP and only using standard v4l2?

hello ming1988,

yes, we had verified raw dump functionality.
could you please share which JetPack release you’re working on.
please also share the raw files to us if you still having issues.
thanks

hi, JerryChang:
Below is the raw data link , ov5693.raw is v4l2-ctl captured, test.raw is standard v4l2 program captured
test.raw_免费高速下载|百度网盘-分享无限制
ov5693.raw_免费高速下载|百度网盘-分享无限制

JetPack is JetPack-L4T-3.2-linux-x64_b196.run

thanks!

hi, JerryChang:
Above, I used the TX2 core board to plug into the TX1 kit bottom board.
I just used the TX1 core board plugged into the TX1 kit bottom board, and then collected raw data, actually correct!
so can we use TX2 core board plug into TX1 kit bottom board ? or above error are some bugs in the csi/vi driver?

hello ming1988,

I have take a brief check, there are hex values of these two raw files.
may I know what’s the resolution of these two raw files?

in addition, what’s your means about “I used the TX2 core board to plug into the TX1 kit bottom board.”
there are indeed different VI drivers between TX1 and TX2,
you need to flash TX1 and TX2 boards with different flashing configuration.
please also refer to [NVIDIA Tegra Linux Driver Package] and check [Release 28.2 Development Guide]-> [Quick Start Guide]-> [To flash the rootfilesystem to internal eMMC]
thanks

the default resolution 2592*1944

hi, JerryChang:
which Jectpack you had verified raw dump functionality. indeed, I have try many times on tx2 , using v4l2-ctl.

thanks!

hello ming1988,

we’ll look into this with JetPack-3.2.
could you please try to dump the raw files with different resolutions.
for example,

v4l2-ctl -d /dev/video0 --set-fmt-video=width=1920,height=1080,pixelformat=RG10 --set-ctrl bypass_mode=0 --stream-user --stream-count=5

hello ming1988,

may I have your status update about launching with lower resolution?

besides, it should be the application limitation that 256 stride alignment is not supported.
please modify the stride alignment as below to analysis your raw dump.
thanks

diff --git a/drivers/media/platform/tegra/camera/vi/core.h b/drivers/media/platform/tegra/camera/vi/core.h
index 56b24fd..3d5461d 100644
--- a/drivers/media/platform/tegra/camera/vi/core.h
+++ b/drivers/media/platform/tegra/camera/vi/core.h
@@ -23,7 +23,7 @@
 /* Width alignment */
 #define TEGRA_WIDTH_ALIGNMENT  1
 /* Stride alignment is 256, for VIC worse case */
-#define TEGRA_STRIDE_ALIGNMENT 256
+#define TEGRA_STRIDE_ALIGNMENT 64