gstomx source code for gstreamer 1.0

Hi,

I came across the gstomx source files on the tk1 support page but they appear to be limited to gstreamer 0.10 (and it also does not compile without errors right out of the box)
There’s an nvgstplayer-1.0 binary so I’m guessing the sources should have been ported to gstreamer 1.0 already…

Is there a specific reason why the 1.0 sources have not been made available?

1 Like

and this
http://cgit.freedesktop.org/gstreamer/gst-omx/
?

Can you test
multiple decoding/encoding with gstreamer openmax plz ?
I would like to know if K1 DSP support this with openmax api

A more complete GST 1.0 support should come in the next release.

Thanks for the update kulve!
Will it come with sources for nvgstplayer/nvgstcapture?
I know it’s not extremely difficult to write it myself but it would definitely save some time :)

#include "opencv2/objdetect/objdetect.hpp"
#include "opencv2/highgui/highgui.hpp"
#include "opencv2/imgproc/imgproc.hpp"
#include "opencv/cv.h"

using namespace std;
using namespace cv;
#define CAPS "video/x-raw,format=RGB,width=1920,pixel-aspect-ratio=1/1"

#define BUFF_SIZE (1024)

void cb_need_data (GstElement *appsrc,guint unused_size,gpointer user_data)
{
	GstBuffer *buffer,*buf;
	gint size = 0;
	GstFlowReturn ret;
	clock_t start,finish;
	double sum;
	int count = 0;
	FILE *fp = fopen("/home/nvidia/test_gstream/test.mp4", "r+");
	
	
	buffer = gst_buffer_new_and_alloc (BUFF_SIZE);
	GST_BUFFER_OFFSET(buffer) = ftell(fp);
	
	GstMapInfo map;
	gst_buffer_map(buffer,&map,GST_MAP_READ);
	size = fread(map.data,1,BUFF_SIZE,fp);
        g_print ("size ddddddd  = %d \n",size);
	//size = gst_buffer_get_size(buffer);
	size=gst_buffer_get_size(buffer);
        g_print ("size  = %d \n",sizeof(buffer));

	GST_BUFFER_OFFSET_END(buffer) = GST_BUFFER_OFFSET(buffer) + size;
	
	//ret = gst_app_src_push_buffer((GstAppSrc*) appsrc,buffer); 
	g_signal_emit_by_name(appsrc,"push-buffer",buffer,&ret);
        gst_buffer_unmap (buffer, &map);

}

int main (int argc, char *argv[])

{

  GstElement *pipeline, *sink;
  GstElement *app_src;
  gint width, height;

  GstSample *sample = NULL;

  gchar *descr;

  GError *error = NULL;

  GstStateChangeReturn ret;

  gboolean res;

  GstMapInfo map;

  GMainLoop *loop;

  GstBus *bus;
  GstMessage *msg;



  gst_init (&argc, &argv);


  descr =
    g_strdup_printf ("appsrc is-live=TRUE name = source caps=video/x-raw,format=RGB,width=1920,pixel-aspect-ratio=1/1 ! qtdemux name=demux ! queue ! h264parse ! omxh264dec ! videoconvert ! appsink name=sink sync=false ",NULL);

  pipeline = gst_parse_launch (descr, &error);
   
  if (error != NULL) {

    g_print ("could not construct pipeline: %s\n", error->message);

    g_clear_error (&error);

    exit (-1);

  }

     /*get appsrc*/
	//g_object_set (G_OBJECT (appsrc),"stream-type",0,"format",GST_FORMAT_TIME,NULL);
        app_src = gst_bin_get_by_name (GST_BIN (pipeline), "source");
	g_signal_connect(app_src,"need-data",G_CALLBACK(cb_need_data),app_src);

    /* get sink */
        sink = gst_bin_get_by_name (GST_BIN (pipeline), "sink");

   /* set to PAUSED to make the first frame arrive in the sink */
       ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
       switch (ret) {

        case GST_STATE_CHANGE_FAILURE:{
           g_print ("failed to play the file---------\n");
           exit (-1);
   	}
         case GST_STATE_CHANGE_NO_PREROLL:{
           g_print ("live sources not supported yet\n");
           exit (-1);
	}

        default:

         break;

  }


  /* This can block for up to 5 seconds. If your machine is really overloaded,

   * it might time out before the pipeline prerolled and we generate an error. A

   * better way is to run a mainloop and catch errors there. */

  ret = gst_element_get_state (pipeline, NULL, NULL, 1 * GST_SECOND);

  if (ret == GST_STATE_CHANGE_FAILURE) {

    g_print ("failed to play the file==========\n");

    exit (-1);

  }

 gst_element_send_event (pipeline,gst_event_new_step (GST_FORMAT_BUFFERS, 1, 1, TRUE, FALSE));

  clock_t start,finish;
  int count = 0;
  bool isEnd = false;
double sum;
 
  while(true){
 /* prerolls */
 start = clock();
  g_signal_emit_by_name (sink, "pull-preroll", &sample, NULL);

  if (sample) {

    GstBuffer *buffer;

    GstCaps *caps;

    GstStructure *s;

    caps = gst_sample_get_caps (sample);

    if (!caps) {

      g_print ("could not get snapshot format\n");

      exit (-1);

    }

    s = gst_caps_get_structure (caps, 0);



    /* we need to get the final caps on the buffer to get the size */

    res = gst_structure_get_int (s, "width", &width);
    res |= gst_structure_get_int (s, "height", &height);
    if (!res) {

      g_print ("could not get snapshot dimension\n");

      exit (-1);

    }

    /* create pixmap from buffer and save, gstreamer video buffers have a stride

     * that is rounded up to the nearest multiple of 4 */

    buffer = gst_sample_get_buffer (sample);

    /* Mapping a buffer can fail (non-readable) */

    if (gst_buffer_map (buffer, &map, GST_MAP_READ)) {

 double duration = (double)(clock()-start)/CLOCKS_PER_SEC;
	sum += duration;
  //g_print("%f the second of %d pic\n",duration,count);
  g_print("%f the average second of %d pic\n",sum/count*1000.0,count);
	//render using map_info.data
	
       // cv::Mat frame = cv::Mat(1080*3/2, 1920, CV_8UC1, (char *)map.data, cv::Mat::AUTO_STEP);
	 //g_print ("sizeof(*map.data) =  %d\n",sizeof(*map.data));
	Mat show = cv::Mat(1080, 1920, CV_8UC3, (char *)map.data, cv::Mat::AUTO_STEP);	
	//cvtColor(frame,show, CV_YUV2BGR_I420);
       // memcpy(frame.data,map.data,map.size);
	Mat frame;
	char name[128];
	sprintf(name,"./test_pic/%d.jpg",count);
	cvtColor(show,frame,CV_RGB2BGR);
	if (!show.empty() && count%5000 == 0) {
		imwrite(name,frame);
  		//imshow("test-gstreamer-video",show);
 		waitKey(1);
  	}
     
      gst_buffer_unmap (buffer, &map);

    }

    gst_sample_unref (sample);


 }
  if(gst_app_sink_is_eos((GstAppSink*)sink)){

         g_print("is end of stream\n");

             break;


  }
 
  gst_element_send_event (pipeline,gst_event_new_step (GST_FORMAT_BUFFERS, 1, 1, TRUE, FALSE));
 

  count++;




}

  /* cleanup and exit */

  gst_element_set_state (pipeline, GST_STATE_NULL);

  gst_object_unref (pipeline);



  exit (0);

}

I want to decode .mp4 file but,when i run code ,it make an error:

failed to play the file==========
just here:

 ret = gst_element_get_state (pipeline, NULL, NULL, 1 * GST_SECOND);

  if (ret == GST_STATE_CHANGE_FAILURE) {

    g_print ("failed to play the file==========\n");

    exit (-1);

  }

however when i write code like this,it works well:

#include <gst/gst.h>
#include <gst/app/gstappsink.h>

//#define HAVE_GTK
//#ifdef HAVE_GTK
#include <gtk/gtk.h>

//#endif

#include <stdlib.h>
#include <time.h>

#include "opencv2/objdetect/objdetect.hpp"
#include "opencv2/highgui/highgui.hpp"
#include "opencv2/imgproc/imgproc.hpp"
#include "opencv/cv.h"

using namespace std;
using namespace cv;
#define CAPS "video/x-raw,format=RGB,width=3840,pixel-aspect-ratio=1/1"


int main (int argc, char *argv[])

{

  GstElement *pipeline, *sink;

  gint width, height;

  GstSample *sample = NULL;

  gchar *descr;

  GError *error = NULL;

  GstStateChangeReturn ret;

  gboolean res;

  GstMapInfo map;

  GMainLoop *loop;

  GstBus *bus;
  GstMessage *msg;



  gst_init (&argc, &argv);


  //if (argc != 2) {

  //  g_print ("usage: %s <uri>\n Writes snapshot.png in the current directory\n",

   //     argv[0]);

    //exit (-1);

descr =

     // g_strdup_printf ("filesrc location=%s ! qtdemux name=demux demux.video_0 ! queue ! h264parse ! omxh264dec ! videoconvert ! appsink name=sink sync=false "

     // "caps=\"" CAPS "\"", argv[1]);
     g_strdup_printf ("tcpclientsrc host=192.168.6.72 port=5000 ! gdpdepay ! rtph264depay ! h264parse ! omxh264dec ! videoconvert ! appsink name=sink sync=false "

     "caps=\"" CAPS "\"", argv[1]);

  pipeline = gst_parse_launch (descr, &error);
	//pipeline = gst_parse_launch ("playbin uri=http://docs.gstreamer.com/media/sintel_trailer-480p.webm",NULL);


  if (error != NULL) {

    g_print ("could not construct pipeline: %s\n", error->message);

    g_clear_error (&error);

    exit (-1);

  }

  /* get sink */

  sink = gst_bin_get_by_name (GST_BIN (pipeline), "sink");



  /* set to PAUSED to make the first frame arrive in the sink */
  //ret = gst_element_set_state (pipeline, GST_STATE_READY);
  //ret = gst_element_set_state (pipeline, GST_STATE_PAUSED);
   ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
  switch (ret) {

    case GST_STATE_CHANGE_FAILURE:

      g_print ("failed to play the file---------\n");

      exit (-1);

    case GST_STATE_CHANGE_NO_PREROLL:

      g_print ("live sources not supported yet\n");

      exit (-1);

    default:

      break;

  }


  /* This can block for up to 5 seconds. If your machine is really overloaded,

   * it might time out before the pipeline prerolled and we generate an error. A

   * better way is to run a mainloop and catch errors there. */

  ret = gst_element_get_state (pipeline, NULL, NULL, 1 * GST_SECOND);

  if (ret == GST_STATE_CHANGE_FAILURE) {

    g_print ("failed to play the file==========\n");

    exit (-1);

  }

 gst_element_send_event (pipeline,gst_event_new_step (GST_FORMAT_BUFFERS, 1, 1, TRUE, FALSE));

  clock_t start,finish;
  int count = 0;
  bool isEnd = false;
double sum;
 
  while(true){
 /* prerolls */
 start = clock();
  g_signal_emit_by_name (sink, "pull-preroll", &sample, NULL);

  if (sample) {

    GstBuffer *buffer;

    GstCaps *caps;

    GstStructure *s;

    caps = gst_sample_get_caps (sample);

    if (!caps) {

      g_print ("could not get snapshot format\n");

      exit (-1);

    }

    s = gst_caps_get_structure (caps, 0);



    /* we need to get the final caps on the buffer to get the size */

    res = gst_structure_get_int (s, "width", &width);
    res |= gst_structure_get_int (s, "height", &height);
    if (!res) {

      g_print ("could not get snapshot dimension\n");

      exit (-1);

    }

    /* create pixmap from buffer and save, gstreamer video buffers have a stride

     * that is rounded up to the nearest multiple of 4 */

    buffer = gst_sample_get_buffer (sample);

    /* Mapping a buffer can fail (non-readable) */

    if (gst_buffer_map (buffer, &map, GST_MAP_READ)) {

 double duration = (double)(clock()-start)/CLOCKS_PER_SEC;
	sum += duration;
  //g_print("%f the second of %d pic\n",duration,count);
  g_print("%f the average second of %d pic\n",sum/count*1000.0,count);
	//render using map_info.data
	
       // cv::Mat frame = cv::Mat(1080*3/2, 1920, CV_8UC1, (char *)map.data, cv::Mat::AUTO_STEP);
	 //g_print ("sizeof(*map.data) =  %d\n",sizeof(*map.data));
	Mat show = cv::Mat(1080, 3840, CV_8UC3, (char *)map.data, cv::Mat::AUTO_STEP);	
	//cvtColor(frame,show, CV_YUV2BGR_I420);
       // memcpy(frame.data,map.data,map.size);
	Mat frame;
	char name[128];
	sprintf(name,"./test_pic/%d.jpg",count);
	cvtColor(show,frame,CV_RGB2BGR);
	if (!show.empty() && count%5000 == 0) {
		imwrite(name,frame);
  		//imshow("test-gstreamer-video",show);
 		//waitKey(1);
  	}

//#ifdef HAVE_GTK

      //GdkPixbuf *pixbuf = gdk_pixbuf_new_from_data (map.data,

      //    GDK_COLORSPACE_RGB, FALSE, 8, width, height,

      //    GST_ROUND_UP_2 (width*3), NULL, NULL);

      /* save the pixbuf */

     //  gchar *picName;
     //  picName = g_strdup_printf("./test_pic/%d.png",count);
     //  gdk_pixbuf_save (pixbuf, picName, "png", &error, NULL);
     //  g_print (" get snapshot pic\n");
//#endif
      
      gst_buffer_unmap (buffer, &map);

    }

    gst_sample_unref (sample);


 }
  if(gst_app_sink_is_eos((GstAppSink*)sink)){

         g_print("is end of stream\n");

             break;


  }
 
  gst_element_send_event (pipeline,gst_event_new_step (GST_FORMAT_BUFFERS, 1, 1, TRUE, FALSE));
 

  count++;



}

  /* cleanup and exit */

  gst_element_set_state (pipeline, GST_STATE_NULL);

  gst_object_unref (pipeline);



  exit (0);

}

who can tell me how to fix this.any advice will be appreciated. thx!

sorry, this code is working well.

#include <gst/gst.h>
#include <gst/app/gstappsink.h>

//#define HAVE_GTK
//#ifdef HAVE_GTK
#include <gtk/gtk.h>

//#endif

#include <stdlib.h>
#include <time.h>

#include "opencv2/objdetect/objdetect.hpp"
#include "opencv2/highgui/highgui.hpp"
#include "opencv2/imgproc/imgproc.hpp"
#include "opencv/cv.h"

using namespace std;
using namespace cv;
#define CAPS "video/x-raw,format=RGB,width=1920,pixel-aspect-ratio=1/1"

#define BUFF_SIZE (1024)

void cb_need_data (GstElement *appsrc,guint unused_size,gpointer user_data)
{
	GstBuffer *buffer,*buf;
	gint size = 0;
	GstFlowReturn ret;
	clock_t start,finish;
	double sum;
	int count = 0;
	FILE *fp = fopen("/home/nvidia/test_gstream/test.mp4", "r+");

	buffer = gst_buffer_new_and_alloc (BUFF_SIZE);
	GST_BUFFER_OFFSET(buffer) = ftell(fp);

	GstMapInfo map;
	gst_buffer_map(buffer,&map,GST_MAP_READ);
	size = gst_buffer_get_size(buffer);
        g_print ("size  = %d \n",sizeof(buffer));

	GST_BUFFER_OFFSET_END(buffer) = GST_BUFFER_OFFSET(buffer) + size;
	
	//ret = gst_app_src_push_buffer((GstAppSrc*) appsrc,buffer); 
	g_signal_emit_by_name(appsrc,"push-buffer",buffer,&ret);
        gst_buffer_unmap (buffer, &map);

}

int main (int argc, char *argv[])

{

  GstElement *pipeline, *sink;
  GstElement *app_src;
  gint width, height;

  GstSample *sample = NULL;

  gchar *descr;

  GError *error = NULL;

  GstStateChangeReturn ret;

  gboolean res;

  GstMapInfo map;

  GMainLoop *loop;

  GstBus *bus;
  GstMessage *msg;



  gst_init (&argc, &argv);


  descr =
          g_strdup_printf ("filesrc location=%s ! qtdemux name=demux ! queue ! h264parse ! omxh264dec ! videoconvert ! appsink name=sink sync=false "
        "caps=\"" CAPS "\"", argv[1]);

  pipeline = gst_parse_launch (descr, &error);
   
  if (error != NULL) {

    g_print ("could not construct pipeline: %s\n", error->message);

    g_clear_error (&error);

    exit (-1);

  }

     /*get appsrc*/
	//g_object_set (G_OBJECT (appsrc),"stream-type",0,"format",GST_FORMAT_TIME,NULL);
        //app_src = gst_bin_get_by_name (GST_BIN (pipeline), "source");
	//g_signal_connect(app_src,"need-data",G_CALLBACK(cb_need_data),app_src);

    /* get sink */
        sink = gst_bin_get_by_name (GST_BIN (pipeline), "sink");

   /* set to PAUSED to make the first frame arrive in the sink */
       ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
       switch (ret) {

        case GST_STATE_CHANGE_FAILURE:{
           g_print ("failed to play the file---------\n");
           exit (-1);
   	}
         case GST_STATE_CHANGE_NO_PREROLL:{
           g_print ("live sources not supported yet\n");
           exit (-1);
	}

        default:

         break;

  }


  /* This can block for up to 5 seconds. If your machine is really overloaded,

   * it might time out before the pipeline prerolled and we generate an error. A

   * better way is to run a mainloop and catch errors there. */

  ret = gst_element_get_state (pipeline, NULL, NULL, 1 * GST_SECOND);

  if (ret == GST_STATE_CHANGE_FAILURE) {

    g_print ("failed to play the file==========\n");

    exit (-1);

  }

 gst_element_send_event (pipeline,gst_event_new_step (GST_FORMAT_BUFFERS, 1, 1, TRUE, FALSE));

  clock_t start,finish;
  int count = 0;
  bool isEnd = false;
double sum;
 
  while(true){
 /* prerolls */
 start = clock();
  g_signal_emit_by_name (sink, "pull-preroll", &sample, NULL);

  if (sample) {

    GstBuffer *buffer;

    GstCaps *caps;

    GstStructure *s;

    caps = gst_sample_get_caps (sample);

    if (!caps) {

      g_print ("could not get snapshot format\n");

      exit (-1);

    }

    s = gst_caps_get_structure (caps, 0);



    /* we need to get the final caps on the buffer to get the size */

    res = gst_structure_get_int (s, "width", &width);
    res |= gst_structure_get_int (s, "height", &height);
    if (!res) {

      g_print ("could not get snapshot dimension\n");

      exit (-1);

    }

    /* create pixmap from buffer and save, gstreamer video buffers have a stride

     * that is rounded up to the nearest multiple of 4 */

    buffer = gst_sample_get_buffer (sample);

    /* Mapping a buffer can fail (non-readable) */

    if (gst_buffer_map (buffer, &map, GST_MAP_READ)) {

 double duration = (double)(clock()-start)/CLOCKS_PER_SEC;
	sum += duration;
  //g_print("%f the second of %d pic\n",duration,count);
  g_print("%f the average second of %d pic\n",sum/count*1000.0,count);
	//render using map_info.data
	
       // cv::Mat frame = cv::Mat(1080*3/2, 1920, CV_8UC1, (char *)map.data, cv::Mat::AUTO_STEP);
	 //g_print ("sizeof(*map.data) =  %d\n",sizeof(*map.data));
	Mat show = cv::Mat(1080, 1920, CV_8UC3, (char *)map.data, cv::Mat::AUTO_STEP);	
	//cvtColor(frame,show, CV_YUV2BGR_I420);
       // memcpy(frame.data,map.data,map.size);
	Mat frame;
	char name[128];
	sprintf(name,"./test_pic/%d.jpg",count);
	cvtColor(show,frame,CV_RGB2BGR);
	if (!show.empty() && count%5000 == 0) {
		imwrite(name,frame);
  		//imshow("test-gstreamer-video",show);
 		waitKey(1);
  	}
     
      gst_buffer_unmap (buffer, &map);

    }

    gst_sample_unref (sample);


 }
  if(gst_app_sink_is_eos((GstAppSink*)sink)){

         g_print("is end of stream\n");

             break;


  }
 
  gst_element_send_event (pipeline,gst_event_new_step (GST_FORMAT_BUFFERS, 1, 1, TRUE, FALSE));
 

  count++;



}

  /* cleanup and exit */

  gst_element_set_state (pipeline, GST_STATE_NULL);

  gst_object_unref (pipeline);



  exit (0);

}