Ok now it’s reporting Internal data flow errors. But I still think this should be do able. What am I doing wrong? Thanks!
#include <Argus/Argus.h>
#include <gst/gst.h>
#include <stdlib.h>
#include <unistd.h>
#include "Error.h"
#include "Options.h"
#include "PreviewConsumer.h"
#include "GLContext.h"
#include <dirent.h>
#include <string>
namespace ArgusMesa
{
// Globals
static ArgusSamples::EGLDisplayHolder g_display;
static const Argus::Size2D<uint32_t> PREVIEW_STREAM_SIZE(3840, 2160);
class GstFramework
{
protected:
GstState gst_state;
GstElement *m_pipeline;
public:
GMainLoop *loop;
GstFramework()
: gst_state(GST_STATE_NULL)
, m_pipeline(NULL)
, loop(g_main_loop_new(NULL,FALSE)) {}
~GstFramework()
{
shutdown();
}
/**
* Initialize the GStreamer video encoder pipeline.
* @param[in] eglStream --- The EGLStream to consume frames from.
*/
bool initialize(EGLStreamKHR eglStream)
{
// Initialize GStreamer.
gst_init(NULL, NULL);
loop = g_main_loop_new(NULL, FALSE);
GstBus *bus;
guint bus_watch_id;
GstElement *src, *srcfilter, *nvconv;
GstCaps *caps;
/* Create GStreamer Elements */
m_pipeline = gst_pipeline_new(NULL);
src = gst_element_factory_make("nveglstreamsrc", NULL);
srcfilter = gst_element_factory_make("capsfilter", NULL);
nvconv = gst_element_factory_make("nvvidconv", NULL);
GstElement *overlay = gst_element_factory_make("clockoverlay", NULL);
GstElement *enc = gst_element_factory_make("omxh264enc",NULL);
GstElement *filesink = gst_element_factory_make("splitmuxsink", NULL);
if (!m_pipeline || !src || !srcfilter || !nvconv || !overlay || !enc || !filesink) {
g_printerr("One element count not be created.\n");
return false;
}
/* Set up GStreamer Bus Message Handler */
bus = gst_pipeline_get_bus(GST_PIPELINE(m_pipeline));
bus_watch_id = gst_bus_add_watch(bus, bus_callback, loop);
gst_object_unref(bus);
/* Configure GStreamer Element properties */
g_object_set(G_OBJECT(src), "display", g_display.get(), NULL);
g_object_set(G_OBJECT(src), "eglstream", eglStream, NULL);
caps = gst_caps_from_string("video/x-raw(memory:NVMM), width=2592, height=1944, framerate=30/1, format=I420");
g_object_set(srcfilter, "caps", caps, NULL);
gst_caps_unref(caps);
g_object_set(filesink, "location", "%05d.264", NULL);
/* Add all elements to the pipeline */
gst_bin_add_many(GST_BIN(m_pipeline),
src, srcfilter, nvconv, overlay, enc, filesink, NULL);
/* Link elements */
gst_element_link_many(src, srcfilter, nvconv, overlay, enc, filesink, NULL);
printf("GST Intialization done\n");
return true;
}
/**
* Watches for messages on the pipeline bus.
* @param[in] bus --- The GStreamer bus that is being watched.
* @param[in] msg --- The message that was received on the bus.
* @param[in] data --- Any user data to be passed into the callback.
*/
static gboolean bus_callback(GstBus *bus, GstMessage *msg, gpointer data)
{
GMainLoop *lp = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE(msg)) {
case GST_MESSAGE_EOS: {
g_main_loop_quit (lp);
break;
}
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error(msg, &error, &debug);
g_free (debug);
g_printerr("Error: %s\n", error->message);
g_error_free(error);
g_main_loop_quit(lp);
break;
}
default:
break;
}
return TRUE;
}
/**
* Stops the pipeline and frees resources.
*/
void shutdown()
{
if (gst_state == GST_STATE_PLAYING)
stopRecording();
if (m_pipeline)
gst_object_unref(m_pipeline);
m_pipeline = NULL;
}
/**
* Start GStreamer pipeline.
*/
bool startRecording()
{
if (!m_pipeline)
ORIGINATE_ERROR("GStreamer pipeline not initialized");
if (gst_state != GST_STATE_NULL)
ORIGINATE_ERROR("GStreamer pipeline already running");
if (gst_element_set_state(m_pipeline, GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE)
ORIGINATE_ERROR("Failed to start pipeline");
gst_state = GST_STATE_PLAYING;
return true;
}
/**
* Stop GStreamer pipeline.
*/
bool stopRecording()
{
if (!m_pipeline)
ORIGINATE_ERROR("GStreamer pipeline not initialized");
if (gst_state != GST_STATE_PLAYING)
ORIGINATE_ERROR("GStreamer pipeline not running");
if (gst_element_set_state(m_pipeline, GST_STATE_NULL) == GST_STATE_CHANGE_FAILURE)
ORIGINATE_ERROR("Failed to stop pipeline");
gst_state = GST_STATE_NULL;
return true;
}
}; // class GstFramework
struct ExecuteOptions
{
uint32_t cameraIndex;
uint32_t captureSeconds;
};
/**
* Executes the GStreamer pipeline with options.
* @param[in] options --- ExecuteOptions struct with
*/
static bool execute(const ExecuteOptions& options)
{
using namespace Argus;
PROPAGATE_ERROR(g_display.initialize(EGL_DEFAULT_DISPLAY));
/* Create CameraProvider */
UniqueObj<CameraProvider> cameraProvider(CameraProvider::create());
ICameraProvider *iCameraProvider = interface_cast<ICameraProvider>(cameraProvider);
printf("Argus Version: %s\n", iCameraProvider->getVersion().c_str());
if (!iCameraProvider)
ORIGINATE_ERROR("Failed to open CameraProvider");
/* Get/use the first available CameraDevice */
std::vector<CameraDevice*> cameraDevices;
if (iCameraProvider->getCameraDevices(&cameraDevices) != STATUS_OK)
ORIGINATE_ERROR("Failed to get CameraDevices");
if (cameraDevices.size() == 0)
ORIGINATE_ERROR("No CameraDevices available");
if (cameraDevices.size() <= options.cameraIndex)
ORIGINATE_ERROR("Camera %d not available; there are %d cameras",
options.cameraIndex, (unsigned)cameraDevices.size());
CameraDevice *cameraDevice = cameraDevices[options.cameraIndex];
ICameraProperties *iCameraProperties = interface_cast<ICameraProperties>(cameraDevice);
if (!iCameraProperties)
ORIGINATE_ERROR("Failed to get ICameraProperties interface");
/* Create CaptureSession */
UniqueObj<CaptureSession> captureSession(iCameraProvider->createCaptureSession(cameraDevice));
ICaptureSession *iSession = interface_cast<ICaptureSession>(captureSession);
if (!iSession)
ORIGINATE_ERROR("Failed to create CaptureSession");
// Get the sensor mode to determine the video output stream resolution.
std::vector<Argus::SensorMode*> sensorModes;
iCameraProperties->getBasicSensorModes(&sensorModes);
if (sensorModes.size() == 0)
ORIGINATE_ERROR("Failed to get sensor modes");
ISensorMode *iSensorMode = interface_cast<ISensorMode>(sensorModes[0]);
printf("Capture Resolution: %dx%d\n", iSensorMode->getResolution().width(), iSensorMode->getResolution().height());
if (!iSensorMode)
ORIGINATE_ERROR("Failed to get sensor mode interface");
/* Set common output stream settings */
UniqueObj<OutputStreamSettings> streamSettings(iSession->createOutputStreamSettings());
IOutputStreamSettings *iStreamSettings = interface_cast<IOutputStreamSettings>(streamSettings);
if (!iStreamSettings)
ORIGINATE_ERROR("Failed to create OutputStreamSettings");
iStreamSettings->setPixelFormat(PIXEL_FMT_YCbCr_420_888);
iStreamSettings->setEGLDisplay(g_display.get());
/* Create video encoder stream */
iStreamSettings->setResolution(PREVIEW_STREAM_SIZE);
UniqueObj<OutputStream> videoStream(iSession->createOutputStream(streamSettings.get()));
IStream *iVideoStream = interface_cast<IStream>(videoStream);
if (!iVideoStream)
ORIGINATE_ERROR("Failed to create video stream");
/* Create capture Request and enable the streams in the Request */
UniqueObj<Request> request(iSession->createRequest(CAPTURE_INTENT_VIDEO_RECORD));
IRequest *iRequest = interface_cast<IRequest>(request);
if (!iRequest)
ORIGINATE_ERROR("Failed to create Request");
if (iRequest->enableOutputStream(videoStream.get()) != STATUS_OK)
ORIGINATE_ERROR("Failed to enable video stream in Request");
/* Initialize the GStreamer video encoder consumer */
GstFramework gstPipeline;
if (!gstPipeline.initialize(iVideoStream->getEGLStream()))
ORIGINATE_ERROR("Failed to initialize gstPipeline EGLStream consumer");
if (!gstPipeline.startRecording())
ORIGINATE_ERROR("Failed to start video recording");
/* Perform repeat capture requests for requested number of seconds */
if (iSession->repeat(request.get()) != STATUS_OK)
ORIGINATE_ERROR("Failed to start repeat capture requests");
else
g_main_loop_run(gstPipeline.loop);
/* If the GMainLoop stops running, the code below will execute */
iSession->stopRepeat();
/* Wait until all frames have completed before stopping recording. */
/// @todo: Not doing this may cause a deadlock.
iSession->waitForIdle();
/* Stop the pipeline */
if (!gstPipeline.stopRecording())
ORIGINATE_ERROR("Failed to stop pipeline");
gstPipeline.shutdown();
videoStream.reset();
return true;
}
}; // namespace ArgusMesa
int main(int argc, char **argv)
{
printf("Executing: %s\n", basename(argv[0]));
ArgusSamples::Value<uint32_t> cameraIndex(0);
ArgusSamples::Value<uint32_t> captureTime(10);
ArgusMesa::ExecuteOptions executeOptions;
executeOptions.cameraIndex = cameraIndex.get();
executeOptions.captureSeconds = captureTime.get();
if (!ArgusMesa::execute(executeOptions))
return EXIT_FAILURE;
return EXIT_SUCCESS;
}