I’ve created a new class for grabbing frames from an MJPEG v4l2 camera. I create a gst pipeline v4l2src → nvjpegdec → nvvidconv → nvvideosink. I couldn’t get it working until I realised the nvvideosink needed the “outcaps” set and that should be NV12 or I420 despite it claiming RGBA output! The source for my class is included below.
Now the code works and I can create a framesource and load frames in the visionworks tracking example. For the first 10 - 20 seconds the video is fine but then it suddenly becomes choppy and doesn’t recover until I restart. CPU and memory usage seems fine. Using the same camera through the standard v4l2 framesource seems to work fine. The algorithm and display time are reporting high fps despite the choppiness. Using a similar pipeline with gst-launch to nvoverlaysink works fine without slowdown.
#include "GStreamerMJPEGFrameSourceImpl.hpp"
#include <NVX/FrameSource/GStreamer/GStreamerEGLStreamSinkFrameSourceImpl.hpp>
#include <sstream>
namespace nvidiaio
{
GStreamerMJPEGFrameSourceImpl::GStreamerMJPEGFrameSourceImpl(uint cameraIdx_) :
GStreamerEGLStreamSinkFrameSourceImpl(nvxio::FrameSource::CAMERA_SOURCE, "GStreamerMJPEGFrameSource", false),
cameraIdx(cameraIdx_)
{
}
bool GStreamerMJPEGFrameSourceImpl::setConfiguration(const FrameSource::Parameters& params)
{
NVXIO_ASSERT(end);
configuration.frameHeight = params.frameHeight;
configuration.frameWidth = params.frameWidth;
configuration.fps = params.fps;
NVXIO_ASSERT((params.format == NVXCU_DF_IMAGE_NV12) ||
(params.format == NVXCU_DF_IMAGE_U8) ||
(params.format == NVXCU_DF_IMAGE_RGB) ||
(params.format == NVXCU_DF_IMAGE_RGBX)||
(params.format == NVXCU_DF_IMAGE_NONE));
configuration.format = params.format;
return true;
}
bool GStreamerMJPEGFrameSourceImpl::InitializeGstPipeLine()
{
// Set defaults
if (configuration.frameWidth == (vx_uint32)-1)
configuration.frameWidth = 1920;
if (configuration.frameHeight == (vx_uint32)-1)
configuration.frameHeight = 1080;
if (configuration.fps == (vx_uint32)-1)
configuration.fps = 60;
GstStateChangeReturn status;
end = true;
pipeline = GST_PIPELINE(gst_pipeline_new(nullptr));
if (!pipeline)
{
NVXIO_PRINT("Cannot create Gstreamer pipeline");
return false;
}
bus = gst_pipeline_get_bus(GST_PIPELINE (pipeline));
// create v4l2src
GstElement * v4l2src = gst_element_factory_make("v4l2src", nullptr);
if (!v4l2src)
{
NVXIO_PRINT("Cannot create v4l2src");
FinalizeGstPipeLine();
return false;
}
std::ostringstream cameraDev;
cameraDev << "/dev/video" << cameraIdx;
g_object_set(G_OBJECT(v4l2src), "device", cameraDev.str().c_str(), nullptr);
gst_bin_add(GST_BIN(pipeline), v4l2src);
// create nvjpegdec
GstElement * nvjpegdec = gst_element_factory_make("nvjpegdec", nullptr);
if (!nvjpegdec)
{
NVXIO_PRINT("Cannot create nvjpegdec element");
FinalizeGstPipeLine();
return false;
}
g_object_set(G_OBJECT(nvjpegdec), "max-errors", G_GINT64_CONSTANT(-1), "idct-method", G_GINT64_CONSTANT(2), nullptr);
gst_bin_add(GST_BIN(pipeline), nvjpegdec);
// create nvvidconv
GstElement * nvvidconv = gst_element_factory_make("nvvidconv", NULL);
if (nvvidconv == NULL)
{
NVXIO_PRINT("Cannot create nvvidconv");
FinalizeGstPipeLine();
return false;
}
gst_bin_add(GST_BIN(pipeline), nvvidconv);
// create nvvideosink element
GstElement * nvvideosink = gst_element_factory_make("nvvideosink", nullptr);
if (!nvvideosink)
{
NVXIO_PRINT("Cannot create nvvideosink element");
FinalizeGstPipeLine();
return false;
}
std::ostringstream stream;
stream << "video/x-raw(memory:NVMM), width=" << configuration.frameWidth << ", height=" << configuration.frameHeight << ", format=(string)NV12, framerate=" << configuration.fps << "/1;";
std::unique_ptr<GstCaps, GStreamerObjectDeleter> caps_out(gst_caps_from_string(stream.str().c_str()));
g_object_set(G_OBJECT(nvvideosink),
"display", context.display,
"stream", context.stream,
"fifo", fifoMode,
"max-lateness", G_GINT64_CONSTANT(-1),
"throttle-time", G_GUINT64_CONSTANT(0),
"render-delay", G_GUINT64_CONSTANT(0),
"qos", FALSE,
"sync", FALSE,
"async", TRUE,
nullptr);
g_object_set(G_OBJECT(nvvideosink), "outcaps", caps_out.get(), NULL);
gst_bin_add(GST_BIN(pipeline), nvvideosink);
// Create caps
stream.str(std::string());
stream << "image/jpeg, width=[1," << configuration.frameWidth << "], height=[1," << configuration.frameHeight << "], framerate=" << configuration.fps << "/1;";
std::unique_ptr<GstCaps, GStreamerObjectDeleter> caps_v42lsrc(gst_caps_from_string(stream.str().c_str()));
if (!caps_v42lsrc)
{
NVXIO_PRINT("Failed to create caps v4lsrc");
FinalizeGstPipeLine();
return false;
}
// link elements
if (!gst_element_link_filtered(v4l2src, nvjpegdec, caps_v42lsrc.get()))
{
NVXIO_PRINT("GStreamer: cannot link v4l2src -> nvjpegdec using caps");
FinalizeGstPipeLine();
return false;
}
// Create caps
std::unique_ptr<GstCaps, GStreamerObjectDeleter> caps_nvjpegdec(gst_caps_from_string("video/x-raw, format=(string)I420"));
if (!caps_nvjpegdec)
{
NVXIO_PRINT("Failed to create caps nvjpegdec");
FinalizeGstPipeLine();
return false;
}
// link elements
if (!gst_element_link_filtered(nvjpegdec, nvvidconv, caps_nvjpegdec.get()))
{
NVXIO_PRINT("GStreamer: cannot link nvjpegdec -> nvvidconv using caps");
FinalizeGstPipeLine();
return false;
}
// Create caps
std::unique_ptr<GstCaps, GStreamerObjectDeleter> caps_nvvidconv(gst_caps_from_string("video/x-raw(memory:NVMM), format=(string)NV12"));
if (!caps_nvvidconv)
{
NVXIO_PRINT("Failed to create caps nvvidconv");
FinalizeGstPipeLine();
return false;
}
// link elements
if (!gst_element_link_filtered(nvvidconv, nvvideosink, caps_nvvidconv.get()))
{
NVXIO_PRINT("GStreamer: cannot link nvvidconv -> nvvideosink using caps");
FinalizeGstPipeLine();
return false;
}
// Force pipeline to play video as fast as possible, ignoring system clock
gst_pipeline_use_clock(pipeline, nullptr);
status = gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING);
handleGStreamerMessages();
if (status == GST_STATE_CHANGE_ASYNC)
{
status = gst_element_get_state(GST_ELEMENT(pipeline), nullptr, nullptr, GST_CLOCK_TIME_NONE);
}
if (status == GST_STATE_CHANGE_FAILURE)
{
NVXIO_PRINT("GStreamer: unable to start playback");
FinalizeGstPipeLine();
return false;
}
if (!updateConfiguration(nvvideosink, v4l2src, configuration))
{
FinalizeGstPipeLine();
return false;
}
end = false;
return true;
}
} // namespace nvidiaio