Gladly. Below is the bare bones of the code. I kept most comments but got rid of my error checking and diagnostic prints. It captures a frame, uses mmap and cv::Mat to place the pixel values into a Mat array, and then does some calculations.
For a test image, I placed in front of the built in camera a black circle drawn on white paper. The calculation finds the minimum pixel intensity value in each row (and column). If the minimum intensity value in a column is below a threshold (of 75), that row (column) is assumed to contain the black dot. Once the rows and columns of the black dot are determined, it’s intensity weighted center of mass is computed.
The output gives, in order, (1) the first column containing the object, (2) the x-position of the center of mass (units of pixels), and (3) the last column containing the object. Similarly for the rows.
On my TX1, it goes haywire around 550 captures.
#include <stdio.h>
#include <stdlib.h>
#include <Argus/Argus.h>
#include <EGLStream/EGLStream.h>
#include <EGLStream/NV/ImageNativeBuffer.h>
#include <nvbuf_utils.h>
#include <NvUtils.h>
#include <opencv2/opencv.hpp>
#include <opencv2/core.hpp>
#include <opencv2/imgproc.hpp>
#include <opencv2/highgui.hpp>
#include <sys/mman.h>
#define EXIT_IF_NULL(val,msg) \
{if (!val) {printf("%s\n",msg); return EXIT_FAILURE;}}
#define EXIT_IF_NOT_OK(val,msg) \
{if (val!=Argus::STATUS_OK) {printf("%s\n",msg); return EXIT_FAILURE;}}
using namespace std;
using namespace Argus;
int main(int argc, char** argv)
{
const uint64_t FIVE_SECONDS_IN_NANOSECONDS = 5000000000;
std::vector<Argus::CameraDevice*> cameraDevices;
vector<cv::Mat> ycbcr_split(3);
/*
* Set up Argus API Framework, identify available camera devices, and create
* a capture session for the first available device
*/
Argus::UniqueObj<CameraProvider> cameraProvider(CameraProvider::create());
Argus::ICameraProvider *iCameraProvider = Argus::interface_cast<Argus::ICameraProvider>(cameraProvider);
EXIT_IF_NULL(iCameraProvider, "Cannot get core camera provider interface");
Argus::Status status = iCameraProvider->getCameraDevices(&cameraDevices);
EXIT_IF_NOT_OK(status, "Failed to get camera devices");
EXIT_IF_NULL(cameraDevices.size(), "No camera devices available");
Argus::UniqueObj<Argus::CaptureSession> captureSession(iCameraProvider->createCaptureSession(cameraDevices[0], &status));
Argus::ICaptureSession *iCaptureSession = Argus::interface_cast<Argus::ICaptureSession>(captureSession);
EXIT_IF_NULL(iCaptureSession, "Cannot get Capture Session Interface");
/*
* Creates the stream between the Argus camera image capturing
* sub-system (producer) and the image acquisition code (consumer). A consumer object is
* created from the stream to be used to request the image frame. A successfully submitted
* capture request activates the stream's functionality to eventually make a frame available
* for acquisition.
*/
Argus::UniqueObj<Argus::OutputStreamSettings> outputStreamSettings(iCaptureSession->createOutputStreamSettings());
Argus::IOutputStreamSettings *iOutputStreamSettings = Argus::interface_cast<Argus::IOutputStreamSettings>(outputStreamSettings);
EXIT_IF_NULL(iOutputStreamSettings, "Cannot get OutputStreamSettings Interface");
iOutputStreamSettings->setPixelFormat(Argus::PIXEL_FMT_YCbCr_420_888);
iOutputStreamSettings->setResolution(Argus::Size(640, 480));
/* parameters */
uint nImageWidth = 640;
uint nImageHeight = 480;
uint maxCount = 1000; // maximum count value
/* set image resolution to stream */
iOutputStreamSettings->setResolution(Argus::Size(nImageWidth, nImageHeight));
Argus::UniqueObj<Argus::OutputStream> outputStream(
iCaptureSession->createOutputStream(outputStreamSettings.get()));
Argus::IStream *iOutputStream = Argus::interface_cast<Argus::IStream>(outputStream);
EXIT_IF_NULL(iOutputStream, "Cannot get OutputStream Interface");
Argus::UniqueObj<EGLStream::FrameConsumer> consumer(EGLStream::FrameConsumer::create(outputStream.get()));
EGLStream::IFrameConsumer *iFrameConsumer = Argus::interface_cast<EGLStream::IFrameConsumer>(consumer);
EXIT_IF_NULL(iFrameConsumer, "Failed to initialize Consumer");
Argus::UniqueObj<Argus::Request> request(iCaptureSession->createRequest(Argus::CAPTURE_INTENT_STILL_CAPTURE));
Argus::IRequest *iRequest = Argus::interface_cast<Argus::IRequest>(request);
EXIT_IF_NULL(iRequest, "Failed to get capture request interface");
status = iRequest->enableOutputStream(outputStream.get());
EXIT_IF_NOT_OK(status, "Failed to enable stream in capture request");
// indicate repeat capture
iCaptureSession -> repeat(request.get());
// get the start time
double startTime = clock();
// initialize counter of frame captures
int iCount = 0;
// create an array to hold the object center positions
double tCenter[maxCount] = {0.0};
double xCenter[maxCount] = {0.0};
double yCenter[maxCount] = {0.0};
while(iCount <= maxCount)
{
// issue capture request
uint32_t requestId = iCaptureSession->capture(request.get());
EXIT_IF_NULL(requestId, "Failed to submit capture request");
/*
* Acquire a frame generated by the capture request, get the image from the frame
* and create a .JPG file of the captured image
*/
Argus::UniqueObj<EGLStream::Frame> frame(iFrameConsumer->acquireFrame(FIVE_SECONDS_IN_NANOSECONDS, &status));
EGLStream::IFrame *iFrame = Argus::interface_cast<EGLStream::IFrame>(frame);
EXIT_IF_NULL(iFrame, "Failed to get IFrame interface");
// get the image from the frame
EGLStream::Image *image = iFrame->getImage();
EXIT_IF_NULL(image, "Failed to get Image from iFrame->getImage()");
/* write image data to NvBuffer */
EGLStream::NV::IImageNativeBuffer *iImageNativeBuffer = interface_cast<EGLStream::NV::IImageNativeBuffer>(image);
EXIT_IF_NULL(iImageNativeBuffer,"Failed to create an IImageNativeBuffer");
int dmabuf_fd = iImageNativeBuffer->createNvBuffer(Size {nImageWidth, nImageHeight}, NvBufferColorFormat_YUV420, NvBufferLayout_Pitch, &status);
EXIT_IF_NOT_OK(status,"Failed to create NvBuffer")
NvBufferParams params;
int ret = NvBufferGetParams(dmabuf_fd, ¶ms);
if(ret < 0)
{
printf("Failed to get native buffer parameters\n"); return EXIT_FAILURE;
}
/*
* convert image data in the NvBuffer to YUV image data
*/
for(int i = 0 ; i<params.num_planes ; i++)
{
int32_t width = params.width[i];
int32_t height = params.height[i];
int32_t pitch = params.pitch[i];
size_t fsize = pitch*height;
uint8_t* data_mem = (uint8_t*)mmap(0, fsize, PROT_READ | PROT_WRITE, MAP_SHARED, dmabuf_fd, params.offset[i]);
ycbcr_split[i] = cv::Mat (height, width, CV_8UC1, data_mem, pitch);
}
// free memory for NvBuffer
NvBufferDestroy(dmabuf_fd);
/*
* convert raw YUV data to grayscale pixel values
*/
// use the first channel of the YUV values as a substitute for grayscale values
cv::Mat grayScaleValues = ycbcr_split[0].clone();
// define vectors to hold column and row-wise maximum pixel values
cv::Mat minValuesByColumn;
cv::Mat minValuesByRow;
// define variables for the last column and row numbers for iterating
int lastColumn = nImageWidth - 1;
int lastRow = nImageHeight - 1;
// determine minimum pixel values in each column
cv::reduce(grayScaleValues,minValuesByColumn,0,CV_REDUCE_MIN,-1);
// determine minimum pixel values in each row
cv::reduce(grayScaleValues,minValuesByRow,1,CV_REDUCE_MIN,-1);
// check for a zero row in the grayScaleValues array
cv::Mat minValuesInGray;
cv::reduce(minValuesByColumn,minValuesInGray,1,CV_REDUCE_MAX,-1);
/*************************************************/
/*
/* begin searching through pixels for object
/*
/*************************************************/
// initialize values
int iColStart = 0; // column number where the object starts
int iColEnd = 0; // column number were the object ends
int thresholdPixelValue = 75;
/*
*
* determine the columns containing the object
*
*/
for(int iC = 0 ; iC <= lastColumn ; iC++)
{
if(minValuesByColumn.at<uint8_t>(0,iC) <= thresholdPixelValue)
{
// a pixel from the object was detected
iColStart = iC;
break;
}
}
// set the end column to the current column
iColEnd = iColStart;
// only continue if a pixel from the object was detected before the last column
for(int iC = iColStart ; iC <= lastColumn ; iC++)
{
if(minValuesByColumn.at<uint8_t>(0,iC) <= thresholdPixelValue)
{
// still part of the object
iColEnd = iC;
}
else
{
break;
}
}
/*
*
* determine the rows containing the object
*
*/
int iRowStart = 0; // row number where the object starts
int iRowEnd = 0;
for(int iR = 0 ; iR <= lastRow ; iR++)
{
if(minValuesByRow.at<uint8_t>(0,iR) <= thresholdPixelValue)
{
// a pixel from the object was detected
iRowStart = iR;
break;
}
}
// set the end column to the current column
iRowEnd = iRowStart;
// only continue if a pixel from the object was detected before the last column
for(int iR = iRowStart ; iR <= lastRow ; iR++)
{
if(minValuesByRow.at<uint8_t>(0,iR) <= thresholdPixelValue)
{
// still part of the object
iRowEnd = iR;
}
else
{
break;
}
}
/************************************************/
/*
/* compute object center of mass
/*
/************************************************/
double pixelIntensity = 0.0;
double columnPositionTimesIntensity = 0.0;
double rowPositionTimesIntensity = 0.0;
for(int iC = iColStart ; iC <= iColEnd ; iC++)
{
for(int iR = iRowStart ; iR <= iRowEnd ; iR++)
{
pixelIntensity += grayScaleValues.at<uint8_t>(iR,iC);
columnPositionTimesIntensity += grayScaleValues.at<uint8_t>(iR,iC)*iC;
rowPositionTimesIntensity += grayScaleValues.at<uint8_t>(iR,iC)*iR;
}
}
// save current values
tCenter[iCount] = (clock()-startTime)/1000000.0; // time in seconds
xCenter[iCount] = columnPositionTimesIntensity/pixelIntensity; // x-position center of mass
yCenter[iCount] = rowPositionTimesIntensity/pixelIntensity; // y-position center of mass
if(tCenter[iCount] < 10)
{
printf("For image %i at %f s: Column center = [%i\t%f\t%i] and row center = [%i\t%f\t%i]\n",iCount,tCenter[iCount],iColStart,xCenter[iCount],iColEnd,iRowStart,yCenter[iCount],iRowEnd);
}
else
{
printf("For image %i at %f s: Column center = [%i\t%f\t%i] and row center = [%i\t%f\t%i]\n",iCount,tCenter[iCount],iColStart,xCenter[iCount],iColEnd,iRowStart,yCenter[iCount],iRowEnd);
}
/* Original oneShot code to write image data to a file */
EGLStream::IImageJPEG *iImageJPEG = Argus::interface_cast<EGLStream::IImageJPEG>(image);
EXIT_IF_NULL(iImageJPEG, "Failed to get ImageJPEG Interface");
if(iCount == maxCount)
{
status = iImageJPEG->writeJPEG("oneShot.jpg");
EXIT_IF_NOT_OK(status,"Failed to write JPEG");
}
if(iCount == 0)
{
// establish the autocontrol object
//Argus::UniqueObj<Argus::Request> autoControlSettings(iRequest->getAutocontrolSettings());
// get autocontrol interface
Argus::IAutoControlSettings *iAutoControlSettings = Argus::interface_cast<Argus::IAutoControlSettings>(iRequest->getAutoControlSettings());
// set autocontrol settings
iAutoControlSettings -> setAeLock(true);
iAutoControlSettings -> setAwbLock(true);
}
iCaptureSession->repeat(request.get());
iCount++;
}
iCaptureSession -> stopRepeat();
printf("Captured %i frames in %f seconds at a rate of %f frames/sec\n", iCount-1, (clock()-startTime)/1000000.0,(iCount-1)/(clock()-startTime)*1000000.0);
return EXIT_SUCCESS;
}