First, let me clear my throat.
$cat /etc/nv_tegra_release
# R28 (release), REVISION: 1.0, GCID: 9379712, BOARD: t186ref, EABI: aarch64, DATE: Thu Jul 20 07:59:31 UTC 2017
7f8fb47183cbd3d4cacc5eb50fc7869aacdefe40 */usr/lib/aarch64-linux-gnu/libv4l/plugins/libv4l2_nvvideocodec.so
c33af0b5f9a88ab8ff62b31c16c635aa2744902d */usr/lib/aarch64-linux-gnu/libv4l/plugins/libv4l2_nvvidconv.so
678e42e35687f11c9b5b602a539a56cc3de1188f */usr/lib/aarch64-linux-gnu/tegra/libnvomx.so
ccecdd04fb3ef95308a25a96ccf6670310400ba1 */usr/lib/aarch64-linux-gnu/tegra/libnveglstreamproducer.so
1653e5d266b7b030dc882d226b03f0c0157b4f3a */usr/lib/aarch64-linux-gnu/tegra/libnvtx_helper.so
fc646aa1d227d10ee3c338373e721ffade0b36d5 */usr/lib/aarch64-linux-gnu/tegra/libnvddk_vic.so
c255aeebc742731b2a1e796816178b3b8ffa7dea */usr/lib/aarch64-linux-gnu/tegra/libglx.so
6cc4acdeee4908f9c4ff4f4dcfedef71be190354 */usr/lib/aarch64-linux-gnu/tegra/libargus_socketserver.so
5b66cf6e49430ca8918835a8ea287b51c8b3b941 */usr/lib/aarch64-linux-gnu/tegra/libnvmmlite.so
9f8b91a4b08d160d5f473b43e9874a5c24c66e88 */usr/lib/aarch64-linux-gnu/tegra/libnvddk_2d_v2.so
a3a2931e3fe2e5d40f6783d7d6ce3639ac88f0ed */usr/lib/aarch64-linux-gnu/tegra/libnvwinsys.so
7259ca958e6e595bfd3f50b914b9f51b12419ba8 */usr/lib/aarch64-linux-gnu/tegra/libargus.so
1a599b8a1f7e5abbbc8b3e51d3f48bcc5124f51a */usr/lib/aarch64-linux-gnu/tegra/libnvmm.so
97cf051cc8ac5aecf158bc3c85feb83b89fefdd9 */usr/lib/aarch64-linux-gnu/tegra/libnvjpeg.so
3c48f3b81b1b7b333df5b261dc920736194e1f95 */usr/lib/aarch64-linux-gnu/tegra/libnvexif.so
cbe774108f73fe79b48ae7357e08e9413344dabc */usr/lib/aarch64-linux-gnu/tegra/libnvdc.so
8a9db15bf96f8c89967e96f1f55a116f0ad8853c */usr/lib/aarch64-linux-gnu/tegra/libnvavp.so
429e26afbfaf88b8ace2e0fa35207a2499cec9e1 */usr/lib/aarch64-linux-gnu/tegra/libnvtestresults.so
3f48c67f1c1650df51586078d1d8d3bd3740b025 */usr/lib/aarch64-linux-gnu/tegra/libargus_socketclient.so
4afd017ec9e9a16138da168c29a70f6bef4bd868 */usr/lib/aarch64-linux-gnu/tegra/libnvmm_utils.so
4336787797e9727d6fead71a027a5b5a10105a21 */usr/lib/aarch64-linux-gnu/tegra/libnvfnet.so
0eeac3a25c46c2095db087f69fcb2da8e7c51855 */usr/lib/aarch64-linux-gnu/tegra/libnvll.so
1f2fc2642f5cd373b5db26921d85014b6abf840d */usr/lib/aarch64-linux-gnu/tegra/libnvcameratools.so
d5c04359d52a3d594fa091a18653426262a7197a */usr/lib/aarch64-linux-gnu/tegra/libnvapputil.so
8f47a0da6cc1e75b4ebd2ae33f83503249b6d5ee */usr/lib/aarch64-linux-gnu/tegra/libnveglstream_camconsumer.so
0667aa3962b8eda69023ac17830a2efc016cad4f */usr/lib/aarch64-linux-gnu/tegra/libnvrm.so
8653db228561f903a452d4cab1cc632ca3315881 */usr/lib/aarch64-linux-gnu/tegra/libnvcam_imageencoder.so
fd427c65f562573a5826fd29fc5410f0290ad52f */usr/lib/aarch64-linux-gnu/tegra/libtegrav4l2.so
183da5b0281e0ee120545e2eaa99f56a0bb89d02 */usr/lib/aarch64-linux-gnu/tegra/libnvparser.so
beb1786a7d0e9464e98bdf3dda5d11c994069b8a */usr/lib/aarch64-linux-gnu/tegra/libnvtvmr.so
07c1e569a35cb39c77728ecbb7212f6339c8fd68 */usr/lib/aarch64-linux-gnu/tegra/libnvrm_gpu.so
ae214a66a4fe6ef66c15ea40a0a03dadb8055f72 */usr/lib/aarch64-linux-gnu/tegra/libnvtnr.so
ad4f99d3c3a6daa5829678a0defd1b2345b1c3b1 */usr/lib/aarch64-linux-gnu/tegra/libnvcamerautils.so
cb725c103def5f9c8f0e25205b3b39eab4642721 */usr/lib/aarch64-linux-gnu/tegra/libnvidia-egl-wayland.so
9673606cfb805c3e1563fcdf1256cfb6c95fecc9 */usr/lib/aarch64-linux-gnu/tegra/libnvfnetstoredefog.so
f213ecec058176a1830e0621907f28176f57ff7e */usr/lib/aarch64-linux-gnu/tegra/libnvodm_imager.so
a54283f9ed83ef15d6bd433d97e6a53e73176219 */usr/lib/aarch64-linux-gnu/tegra/libnvmmlite_utils.so
532626aba510a1b8d586c04b23011ad7f48ff351 */usr/lib/aarch64-linux-gnu/tegra/libnvcolorutil.so
cc9f715b1fd1b9719ff845f1b9c07c3f3162fe2e */usr/lib/aarch64-linux-gnu/tegra/libnvmmlite_video.so
24237d67a163325fa3bf5194c5934bb944f71b58 */usr/lib/aarch64-linux-gnu/tegra/libnvmmlite_image.so
24a0ba50281234b4fe3205032a39eb88b89d2fd5 */usr/lib/aarch64-linux-gnu/tegra/libnvmm_contentpipe.so
cde93d596b8976787dc3f5f5bff1a3ec49dc8a13 */usr/lib/aarch64-linux-gnu/tegra/libnvimp.so
340c90216c7662a2ae6df0d9f5db961c9c7b3752 */usr/lib/aarch64-linux-gnu/tegra/libnvos.so
331481e2895586a29de020f40a1a288e2fc8d58b */usr/lib/aarch64-linux-gnu/tegra/libnvrm_graphics.so
56d4dd97a4073b605a4c906caaee0224affda2a4 */usr/lib/aarch64-linux-gnu/tegra/libnvmedia.so
7c627627fbc26a280c5f395ba04ab01891f4341f */usr/lib/aarch64-linux-gnu/tegra/libnvfnetstorehdfx.so
0c3deb2a856368700fcc226110238e3299054b1b */usr/lib/aarch64-linux-gnu/tegra/libnvomxilclient.so
4f20b8cc95d69177ce108423cf5bac116e58a8c9 */usr/lib/aarch64-linux-gnu/tegra/libnvcamlog.so
1afa41bd35fc74e4f978875c0a6db0a8997201e5 */usr/lib/aarch64-linux-gnu/tegra/libnvmm_parser.so
d95121ac07e17d56500763b74d2fb29159fea85d */usr/lib/aarch64-linux-gnu/tegra/libscf.so
65acd5f0844c6dc12b71cf6fa46baf0d6c8e9a70 */usr/lib/aarch64-linux-gnu/tegra/libnvosd.so
3ef04ac64cac4cbe8f5c25414d2b71373d3a99a0 */usr/lib/xorg/modules/drivers/nvidia_drv.so
c255aeebc742731b2a1e796816178b3b8ffa7dea */usr/lib/xorg/modules/extensions/libglx.so
In a short way: “What takes RGB data as input and outputs H264 video, using hardware encoding?”
Next, I would like to list a few concepts I have about encoding video and see if the kind community would mind confirming or denying.
- There are four main ways to encode H264 on the jetson.
- Tegra Multimedia API - using Video4Linux2 as the data source
- Gstreamer 1.0 command line, specifying input and output files and capability/format strings
- Gstreamer 1.0 appsink/appsrc, using C++ code to interface with gstreamer
- OpenMax - interfacing directly with the openmax hardware driver and deliver data.
Are all of the above accurate?
If you feel I have a grasp on my surroundings, please help me find a solution to this issue:
- I am using c++ to write my program.
- I have a pointer to a data block of known length containing formatted RGB data.
- I would like to pass this pointer to any of the above methods of encoding.
- The source data at the pointer can be a single frame, or multiple frames in sequence
- The output would not be returned to the program, but written to a file.
For argument sake, lets say the RGB data is ARGB8888, meaning 4 bytes to each pixel. Each byte represents a single color channel (0-255). This could easily be any other format that will work as input for the encoder.
The most promising approach I have found is gstreamer appsrc reading data from a C program buffer, but I am very unclear on how to implement that. I have checked the documentation on that (not nvidia’s realm) and fail to see what I am missing. It frankly starts talking about things I don’t understand.
I would like to use the Tegra MultiMedia API (high level) but am unsure how to eliminate the v4L2 as input, and provide my own data. I see no function in the docs that would lead me to believe I can insert data in this way.
For clarity I am able to use gstreamer on the command line with the omxh264enc module to produce a correct h264 video, using a different test file as input.
Would any experienced heads care to clear up some of my misunderstandings?
Additionally, this is just a test case. I will work from any solution I find into delivering source data frame-by-frame to the H264 encoder.
– Below is some code I am working with to achieve my solution, but am failing :'(
– These are snippets of a larger program i am writing.
-- Unsure how to get rid of v4l2, as it seems thats all TegraMMAPI understands.
int colorWidth = 1280;
int colorHeight = 720;
context_t encoderctx;
int ret = 0;
int error = 0;
bool eos = false;
//Give it what it needs to breathe, man
memset(&encoderctx, 0, sizeof(context_t));
encoderctx.in_file_path = "sourceframes.data";
encoderctx.width = colorWidth;
encoderctx.height = colorHeight;
//V4L2_ARGB-8888 format... (Dont ask why its BA24...)
unsigned int formatInput = (
( __u32)('B') |
( (__u32)('A') << 8 ) |
( (__u32)('2') << 16) |
( (__u32)('4') << 24) );
encoderctx.encoder_pixfmt = formatInput;
//encoderctc.encoder_pixfmt = V4L2_PIX_FMT_H264
encoderctx.out_file_path = "video.h264";
encoderctx.fps_n = 1;
encoderctx.fps_d = 1;
encoderctx.bitrate = 4 * 1024 * 1024;
encoderctx.in_file = new ifstream(encoderctx.in_file_path);
encoderctx.out_file = new ofstream(encoderctx.out_file_path);
encoderctx.enc = NvVideoEncoder::createVideoEncoder("enc0");
// Set encoder capture plane format
ret = encoderctx.enc->setCapturePlaneFormat( encoderctx.encoder_pixfmt,
encoderctx.width, encoderctx.height,
2 * 1024 * 1024 );
// Set encoder output plane format
unsigned int outputFormat = (
( __u32)('Y') |
( (__u32)('M') << 8 ) |
( (__u32)('1') << 16) |
( (__u32)('2') << 24) );
ret = encoderctx.enc->setOutputPlaneFormat( outputFormat ,
encoderctx.width, encoderctx.height );
ret = encoderctx.enc->setBitrate(encoderctx.bitrate);
//if (encoderctx.encoder_pixfmt == V4L2_PIX_FMT_H264) {
// ret = encoderctx.enc->setProfile(V4L2_MPEG_VIDEO_H264_PROFILE_HIGH);
//}
//else {
ret = encoderctx.enc->setProfile(V4L2_MPEG_VIDEO_H265_PROFILE_MAIN);
//}
//if (encoderctx.encoder_pixfmt == V4L2_PIX_FMT_H264) {
// ret = encoderctx.enc->setLevel(V4L2_MPEG_VIDEO_H264_LEVEL_5_0);
//}
ret = encoderctx.enc->setFrameRate(encoderctx.fps_n, encoderctx.fps_d);
// Query, Export and Map the output plane buffers so that we can read
// raw data into the buffers
ret = encoderctx.enc->output_plane.setupPlane(V4L2_MEMORY_MMAP, 10, true, false);
// Query, Export and Map the output plane buffers so that we can write
// encoded data from the buffers
ret = encoderctx.enc->capture_plane.setupPlane(V4L2_MEMORY_MMAP, 10, true, false);
// output plane STREAMON
ret = encoderctx.enc->output_plane.setStreamStatus(true);
// capture plane STREAMON
ret = encoderctx.enc->capture_plane.setStreamStatus(true);
encoderctx.enc->capture_plane.
setDQThreadCallback(encoder_capture_plane_dq_callback);
// startDQThread starts a thread internally which calls the
// encoder_capture_plane_dq_callback whenever a buffer is dequeued
// on the plane
encoderctx.enc->capture_plane.startDQThread(&encoderctx);
// Enqueue all the empty capture plane buffers
for (uint32_t i = 0; i < encoderctx.enc->capture_plane.getNumBuffers(); i++) {
struct v4l2_buffer v4l2_buf;
struct v4l2_plane planes[MAX_PLANES];
memset(&v4l2_buf, 0, sizeof(v4l2_buf));
memset(planes, 0, MAX_PLANES * sizeof(struct v4l2_plane));
v4l2_buf.index = i;
v4l2_buf.m.planes = planes;
///! Inject data here?
ret = encoderctx.enc->capture_plane.qBuffer(v4l2_buf, NULL);
if (ret < 0) {
printf("Error while queueing buffer at capture plane\n");
abort(&encoderctx);
goto cleanup;
}
}
// Read video frame and queue all the output plane buffers
for ( uint32_t i = 0;
i < encoderctx.enc->output_plane.getNumBuffers() && !encoderctx.got_error;
i++ ) {
struct v4l2_buffer v4l2_buf;
struct v4l2_plane planes[MAX_PLANES];
NvBuffer *buffer = encoderctx.enc->output_plane.getNthBuffer(i);
memset(&v4l2_buf, 0, sizeof(v4l2_buf));
memset(planes, 0, MAX_PLANES * sizeof(struct v4l2_plane));
v4l2_buf.index = i;
v4l2_buf.m.planes = planes;
if (read_video_frame(encoderctx.in_file, *buffer) < 0) {
printf("Could not read complete frame from input file\n");
v4l2_buf.m.planes[0].bytesused = 0;
}
ret = encoderctx.enc->output_plane.qBuffer(v4l2_buf, NULL);
if (ret < 0) {
printf("Error while queueing buffer at output plane\n");
abort(&encoderctx);
goto cleanup;
}
if (v4l2_buf.m.planes[0].bytesused == 0) {
printf("File read complete.\n");
eos = true;
break;
}
}
-- Gstreamer plugin with appsink, looks simple, but its not to me. :''(
app = new _App();
app->src = (GstAppSrc*)gst_element_factory_make ("appsrc", "source");
app->encoder = gst_element_factory_make("omx264enc", "encoder");
app->sink = (GstAppSink*)gst_element_factory_make ("filesink", "sink");
if (!app->pipeline || !app->src || !app->encoder || !app->sink)
return;
app->bus = gst_pipeline_get_bus (GST_PIPELINE (app->pipeline));
g_assert(app->bus);
gst_bus_add_watch (app->bus, (GstBusFunc) BusMessage, this);
gst_bin_add_many (GST_BIN (app->pipeline), (GstElement*)app->src, app->encoder, app->sink ,NULL);
// SETUP ELEMENTS
g_object_set(app->src,
"stream-type", 0,
"format", GST_FORMAT_BUFFERS,
"is-live", true,
"block", true,
NULL);
g_object_set(app->src, "caps", gst_caps_new_simple("video/x-h264",
NULL), NULL);
g_signal_connect(app->src, "need-data", G_CALLBACK(StartFeed), this);
g_signal_connect(app->src, "enough-data", G_CALLBACK(StopFeed), this);
g_object_set (app->sink,
"location", GenerateFileName().c_str(),
"buffer-mode", 0,
NULL);
//PLAY
GstStateChangeReturn ret = gst_element_set_state (app->pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE)
{
gst_object_unref (app->pipeline);
return;
}
I would be happy to paypal anyone some beer money for an outright solution. This may be as simple as “have you seen the blurpaderp method on the doodanger class in the tegraMMAPI?”
Thanks for reading my first nvidia post. :^)