Hello everyone!
I have encountered a confusing problem on jetson-TX1.It works well(real-time decoding 25fps),when i run gst-launch-1.0 command on terminal.
nvidia@tegra-ubuntu:~$ gst-launch-1.0 rtspsrc location=rtsp://admin:admin12345@192.168.0.64:554/Streaming/Channels/101?transportmode=unicastprofile=Profile_1 protocols=tcp latency=0 ! decodebin ! videoconvert ! xvimagesink
Setting pipeline to PAUSED ...
Pipeline is live and does not need PREROLL ...
Progress: (open) Opening Stream
Progress: (connect) Connecting to rtsp://admin:admin12345@192.168.0.64:554/Streaming/Channels/101?transportmode=unicastprofile=Profile_1
Progress: (open) Retrieving server options
Progress: (open) Retrieving media info
Progress: (request) SETUP stream 0
Progress: (request) SETUP stream 1
Progress: (open) Opened Stream
Setting pipeline to PLAYING ...
New clock: GstSystemClock
Progress: (request) Sending PLAY request
Progress: (request) Sending PLAY request
Progress: (request) Sent PLAY request
Inside NvxLiteH264DecoderLowLatencyInitNvxLiteH264DecoderLowLatencyInit set DPB and MjstreamingInside NvxLiteH265DecoderLowLatencyInitNvxLiteH265DecoderLowLatencyInit set DPB and MjstreamingNvMMLiteOpen : Block : BlockType = 261
TVMR: NvMMLiteTVMRDecBlockOpen: 7818: NvMMLiteBlockOpen
NvMMLiteBlockCreate : Block : BlockType = 261
TVMR: cbBeginSequence: 1190: BeginSequence 1920x1088, bVPR = 0
TVMR: LowCorner Frequency = 180000
TVMR: cbBeginSequence: 1583: DecodeBuffers = 5, pnvsi->eCodec = 4, codec = 0
TVMR: cbBeginSequence: 1654: Display Resolution : (1920x1080)
TVMR: cbBeginSequence: 1655: Display Aspect Ratio : (1920x1080)
TVMR: cbBeginSequence: 1697: ColorFormat : 5
TVMR: cbBeginSequence:1702 ColorSpace = NvColorSpace_YCbCr709_ER
TVMR: cbBeginSequence: 1839: SurfaceLayout = 3
TVMR: cbBeginSequence: 1936: NumOfSurfaces = 9, InteraceStream = 0, InterlaceEnabled = 0, bSecure = 0, MVC = 0 Semiplanar = 1, bReinit = 1, BitDepthForSurface = 8 LumaBitDepth = 8, ChromaBitDepth = 8, ChromaFormat = 5
TVMR: cbBeginSequence: 1938: BeginSequence ColorPrimaries = 1, TransferCharacteristics = 1, MatrixCoefficients = 1
Allocating new output: 1920x1088 (x 9), ThumbnailMode = 0
TVMR: FrameRate = 25
TVMR: NVDEC LowCorner Freq = (150000 * 1024)
---> TVMR: Video-conferencing detected !!!!!!!!!
TVMR: FrameRate = 25.000000
TVMR: FrameRate = 25.000000
TVMR: FrameRate = 25.000000
TVMR: FrameRate = 25.000000
TVMR: FrameRate = 25.000000
TVMR: FrameRate = 25.000000
TVMR: FrameRate = 25.000000
TVMR: FrameRate = 25.000000
^Chandling interrupt.
Interrupt: Stopping pipeline ...
Execution ended after 0:00:39.169351505
Setting pipeline to PAUSED ...
Setting pipeline to READY ...
TVMR: cbDisplayPicture: 3889: Retunred NULL Frame Buffer
TVMR: TVMRFrameStatusReporting: 6266: Closing TVMR Frame Status Thread -------------
TVMR: TVMRVPRFloorSizeSettingThread: 6084: Closing TVMRVPRFloorSizeSettingThread -------------
TVMR: TVMRFrameDelivery: 6116: Closing TVMR Frame Delivery Thread -------------
TVMR: NvMMLiteTVMRDecBlockClose: 8018: Done
Setting pipeline to NULL ...
Freeing pipeline ...
But,When i tested the follow code which used appsink callback and the result is that video is not real-time and the delaying is accumulating over time.The result is unacceptable in my real-time project.This is the part code.(gstIPCamera.cpp and gst-camera.cpp)
/*
* Author:Chen
* Date:2017/05/17
*/
#include "gstIPCamera.h"
#include <gst/gst.h>
#include <gst/app/gstappsink.h>
#include <stdio.h>
#include <sstream>
#include <unistd.h>
#include <string.h>
#include <stdlib.h>
#include <QMutex>
#include <QWaitCondition>
#include <opencv2/opencv.hpp>
using namespace cv;
// constructor
gstIPCamera::gstIPCamera()
{
mAppSink = NULL;
mBus = NULL;
mPipeline = NULL;
mWaitEvent = new QWaitCondition();
mWaitMutex = new QMutex();
mRingMutex = new QMutex();
mLatestRingbuffer = 0;
mLatestRetrieved = false;
for( uint32_t n=0; n < NUM_RINGBUFFERS; n++ )
{
mRingbufferCPU[n] = NULL;
}
}
// destructor
gstIPCamera::~gstIPCamera()
{
if(NULL != mRingbufferCPU[0]){
for( uint32_t n=0; n < NUM_RINGBUFFERS; n++ )
{
free(mRingbufferCPU[n]);
mRingbufferCPU[n] = NULL;
}
}
}
// onEOS
void gstIPCamera::onEOS(_GstAppSink* sink, void* user_data)
{
printf( "gstreamer decoder onEOS\n");
}
// onPreroll
GstFlowReturn gstIPCamera::onPreroll(_GstAppSink* sink, void* user_data)
{
printf( "gstreamer decoder onPreroll\n");
return GST_FLOW_OK;
}
// onBuffer
GstFlowReturn gstIPCamera::onBuffer(_GstAppSink* sink, void* user_data)
{
printf( "gstreamer decoder onBuffer\n");
if( !user_data )
return GST_FLOW_OK;
gstIPCamera* dec = (gstIPCamera*)user_data;
dec->checkBuffer();
//dec->checkMsgBus();
return GST_FLOW_OK;
}
// Capture
bool gstIPCamera::Capture( void ** cpu, unsigned long timeout )
{
mWaitMutex->lock();
const bool wait_result = mWaitEvent->wait(mWaitMutex, timeout);
mWaitMutex->unlock();
if( !wait_result )
{
printf("Failed to wait result!\n");
return false;
}
mRingMutex->lock();
const uint32_t latest = mLatestRingbuffer;
const bool retrieved = mLatestRetrieved;
mLatestRetrieved = true;
mRingMutex->unlock();
// skip if it was already retrieved
if( retrieved )
return false;
if( cpu != NULL )
*cpu = mRingbufferCPU[latest];
return true;
}
#define release_return { gst_sample_unref(gstSample); return; }
// checkBuffer
void gstIPCamera::checkBuffer()
{
printf( "Get IP camera frame data!\n");
if( !mAppSink )
return;
// block waiting for the buffer
GstSample* gstSample = gst_app_sink_pull_sample(mAppSink);
if( !gstSample )
{
printf( "gstreamer camera -- gst_app_sink_pull_sample() returned NULL...\n");
return;
}
GstBuffer* gstBuffer = gst_sample_get_buffer(gstSample);
if( !gstBuffer )
{
printf( "gstreamer camera -- gst_sample_get_buffer() returned NULL...\n");
return;
}
// retrieve
GstMapInfo map;
if( !gst_buffer_map(gstBuffer, &map, GST_MAP_READ) )
{
printf( "gstreamer camera -- gst_buffer_map() failed...\n");
return;
}
//gst_util_dump_mem(map.data, map.size);
void* gstData = map.data; //GST_BUFFER_DATA(gstBuffer);
const uint32_t gstSize = map.size; //GST_BUFFER_SIZE(gstBuffer);
if( !gstData )
{
printf( "gstreamer camera -- gst_buffer had NULL data pointer...\n");
release_return;
}
// retrieve caps
GstCaps* gstCaps = gst_sample_get_caps(gstSample);
if( !gstCaps )
{
printf( "gstreamer camera -- gst_buffer had NULL caps...\n");
release_return;
}
GstStructure* gstCapsStruct = gst_caps_get_structure(gstCaps, 0);
if( !gstCapsStruct )
{
printf( "gstreamer camera -- gst_caps had NULL structure...\n");
release_return;
}
// get width & height of the buffer
int width = 0;
int height = 0;
if( !gst_structure_get_int(gstCapsStruct, "width", &width) ||
!gst_structure_get_int(gstCapsStruct, "height", &height) )
{
printf( "gstreamer camera -- gst_caps missing width/height...\n");
release_return;
}
if( width < 1 || height < 1 )
release_return;
mWidth = width;
mHeight = height;
mDepth = (gstSize * 8) / (width * height);
mSize = gstSize;
mPitch = gstSize / height;
cv::Mat frame(Size(width,height),CV_8UC3,(char*)gstData,Mat::AUTO_STEP);
if(!frame.empty())
{
cv::imshow("frame111",frame);
cv::waitKey(1);
}
printf( "gstreamer camera recieved %ix%i frame (%u bytes, %u bpp)\n", width, height, gstSize, mDepth);
//make sure mFrameBuffer is allocated
if( NULL == mRingbufferCPU[0])
{
for( uint32_t n=0; n < NUM_RINGBUFFERS; ++n ){
if(!(mRingbufferCPU[n] = (void*)malloc(gstSize)))
printf( "gstreamer camera -- failed to allocate mRingbufferCPU %u (size=%u)\n", n, gstSize);
}
printf( "gstreamer camera -- allocated %u mFrameBuffer, %u bytes each\n",NUM_RINGBUFFERS, gstSize);
}
// copy to next ringbuffer
const uint32_t nextRingbuffer = (mLatestRingbuffer + 1) % NUM_RINGBUFFERS;
printf( "gstreamer camera -- using ringbuffer #%u for next frame\n", nextRingbuffer);
memcpy(mRingbufferCPU[nextRingbuffer], gstData, gstSize);
gst_buffer_unmap(gstBuffer, &map);
//gst_buffer_unref(gstBuffer);
gst_sample_unref(gstSample);
usleep(100000);
// update and signal sleeping threads
mRingMutex->lock();
mLatestRingbuffer = nextRingbuffer;
mLatestRetrieved = false;
mRingMutex->unlock();
mWaitEvent->wakeAll();
}
// buildLaunchStr
bool gstIPCamera::buildLaunchStr()
{
std::ostringstream ss;
//open ip camera
char ipRtsp[260];
if(mCamType == 0)
{
sprintf(ipRtsp,"rtspsrc location=rtsp://%s:%s@%s:%s/Streaming/Channels/101?transportmode=unicast&profile=Profile_1 !",mUserName,mPassword,mIP,mPorts);
}
else if(mCamType == 1)
{
sprintf(ipRtsp,"rtspsrc location=rtsp://%s:%s@%s:%s/cam/realmonitor?channel=1&subtype=0&unicast=true !",mIP,mPorts,mUserName,mPassword);
}
else
{
sprintf(ipRtsp,"rtspsrc location=rtsp://%s:%s/user=admin_password=tlJwpbo6_channel=1_stream=0.sdp?real_stream protocols=tcp latency=0 !",mIP,mPorts);
}
ss << ipRtsp;
ss << " rtpjitterbuffer ! rtph264depay ! h264parse ! omxh264dec !";
ss << " nvvidconv !";
ss << " videoconvert ! ";
ss << " appsink name=sink caps=video/x-raw,format=BGR,fFrameRate=25/1 sync=false";
//ss << " decodebin ! nvvidconv !";
//ss << " videoconvert !";
//ss << " appsink name=sink caps=video/x-raw,width=1920,height=1080,format=BGR,fFrameRate=25/1 sync=false";
mLaunchStr = ss.str();
printf( "gstreamer decoder pipeline string:\n");
printf("%s\n", mLaunchStr.c_str());
return true;
}
// init
bool gstIPCamera::init(char *ip, char *userName,char *password, char *port,int camType)
{
int argc = 0;
//char* argv[] = { "none" };
if( !gst_init_check(&argc, NULL, NULL) )
{
printf( "failed to initialize gstreamer library with gst_init()\n");
return false;
}
mIP = ip;
mUserName = userName;
mPassword = password;
mPorts = port;
mCamType = camType;
GError* err = NULL;
// build pipeline string
if( !buildLaunchStr() )
{
printf( "gstreamer decoder failed to build pipeline string\n");
return false;
}
// launch pipeline
mPipeline = gst_parse_launch(mLaunchStr.c_str(), &err);
if( err != NULL )
{
printf( "gstreamer decoder failed to create pipeline\n");
printf( " (%s)\n", err->message);
g_error_free(err);
return false;
}
GstPipeline* pipeline = GST_PIPELINE(mPipeline);
if( !pipeline )
{
printf( "gstreamer failed to cast GstElement into GstPipeline\n");
return false;
}
// retrieve pipeline bus
/*GstBus**/ mBus = gst_pipeline_get_bus(pipeline);
if( !mBus )
{
printf( "gstreamer failed to retrieve GstBus from pipeline\n");
return false;
}
// add watch for messages (disabled when we poll the bus ourselves, instead of gmainloop)
//gst_bus_add_watch(mBus, (GstBusFunc)gst_message_print, NULL);
// get the appsrc
GstElement* appsinkElement = gst_bin_get_by_name(GST_BIN(pipeline), "sink");
GstAppSink* appsink = GST_APP_SINK(appsinkElement);
if( !appsinkElement || !appsink)
{
printf( "gstreamer failed to retrieve AppSink element from pipeline\n");
return false;
}
mAppSink = appsink;
// setup callbacks
GstAppSinkCallbacks cb;
memset(&cb, 0, sizeof(GstAppSinkCallbacks));
cb.eos = onEOS;
cb.new_preroll = onPreroll;
cb.new_sample = onBuffer;
gst_app_sink_set_callbacks(mAppSink, &cb, (void*)this, NULL);
return true;
}
// Open
bool gstIPCamera::Open(char *ip, char *userName,char *password, char *port,int camType)
{
//init
init(ip, userName,password, port,camType);
// transition pipline to STATE_PLAYING
printf( "gstreamer transitioning pipeline to GST_STATE_PLAYING\n");
const GstStateChangeReturn result = gst_element_set_state(mPipeline, GST_STATE_PLAYING);
if( result == GST_STATE_CHANGE_ASYNC )
{
#if 0
GstMessage* asyncMsg = gst_bus_timed_pop_filtered(mBus, 5 * GST_SECOND,
(GstMessageType)(GST_MESSAGE_ASYNC_DONE|GST_MESSAGE_ERROR));
if( asyncMsg != NULL )
{
gst_message_print(mBus, asyncMsg, this);
gst_message_unref(asyncMsg);
}
else
printf( "gstreamer NULL message after transitioning pipeline to PLAYING...\n");
#endif
}
else if( result != GST_STATE_CHANGE_SUCCESS )
{
printf( "gstreamer failed to set pipeline state to PLAYING (error %u)\n", result);
return false;
}
//checkMsgBus();
usleep(100*1000);
//checkMsgBus();
return true;
}
// Close
bool gstIPCamera::Close()
{
// stop pipeline
printf( "gstreamer transitioning pipeline to GST_STATE_NULL\n");
const GstStateChangeReturn result = gst_element_set_state(mPipeline, GST_STATE_NULL);
if( result != GST_STATE_CHANGE_SUCCESS )
printf( "gstreamer failed to set pipeline state to PLAYING (error %u)\n", result);
usleep(250*1000);
return true;
}
/*
* inference-decode
*/
#include "gstIPCamera.h"
#include <stdio.h>
#include <signal.h>
#include <unistd.h>
#include <opencv2/opencv.hpp>
using namespace cv;
bool signal_recieved = false;
void sig_handler(int signo)
{
if( signo == SIGINT )
{
printf("received SIGINT\n");
signal_recieved = true;
}
}
int main( int argc, char** argv )
{
printf("gst-ipcamera\n args (%i): ", argc);
for( int i=0; i < argc; i++ )
printf("%i [%s] ", i, argv[i]);
printf("\n");
if( signal(SIGINT, sig_handler) == SIG_ERR )
printf("\ncan't catch SIGINT\n");
/*
* create the ipcamera device
*/
gstIPCamera* ipcamera = new gstIPCamera();
/*
* start streaming
*/
if( !ipcamera->Open("192.168.0.64","admin","admin12345","554",0) )
{
printf("\ngst-ipcamera: failed to open camera for streaming\n");
return 0;
}
printf("\ngst-ipcamera: successfully initialized video device\n");
printf(" width: %u\n", ipcamera->GetWidth());
printf(" height: %u\n", ipcamera->GetHeight());
printf(" depth: %u (bpp)\n", ipcamera->GetPixelDepth());
while(!signal_recieved)
{
//printf("\ngst-ipcamera: start display frame!\n");
void* img = NULL;
// get the latest frame
if( !ipcamera->Capture(&img, 100) )
printf("\ngst-ipcamera: failed to capture frame\n");
else
printf("gst-ipcamera: recieved new frame =0x%p\n", img);
//convert from frame to Mat
if(NULL != img){
cv::Mat frame(Size(ipcamera->GetWidth(),ipcamera->GetHeight()),CV_8UC3,(char*)img,Mat::AUTO_STEP);
if(!frame.empty()){
cv::imshow("frame",frame);
cv::waitKey(1);
}
}
}
printf("\ngst-ipcamera: un-initializing video device\n");
/*
* shutdown the ip camera device
*/
if( ipcamera != NULL )
{
delete ipcamera;
ipcamera = NULL;
}
printf("gst-ipcamera: video device has been un-initialized.\n");
printf("gst-ipcamera: this concludes the test of the video device.\n");
return 0;
}
The decode speed is only 10fps,and the result is unacceptable.
Could you give some suggestions?Thanks for you in advance.