我正在为IMX nitrogen 6x编写GStreamer应用程序以编码为h264,解码和渲染。这适用于下面提到的Gstreamer命令。
#gst-launch-1.0 videotestsrc ! imxvpuenc_h264 ! queue ! imxvpudec ! imx2dvideosink
但Gstreamer应用程序代码不起作用。面对以下错误:
> > 0:00:00.229253000 245 0x9d1ee0错误imxeglplatform_fb egl_platform_fb.c:72:gst_imx_egl_viv_sink_egl_platform_create:eglGetDisplay失败:错误的分配
读者可以帮助我。
#include <gst/gst.h>
#include <gst/app/gstappsink.h>
#include <stdlib.h>
//#define CAPS "video/x-raw,format=RGB,width=160,pixel-aspect-ratio=1/1"
#define CAPS "video/x-h264, stream-format=(string)byte-stream, alignment=(string)au, level=(string)2, profile=(string)high, width=(int)320, height=(int)240, pixel-aspect-ratio=(fraction)1/1, framerate=(fraction)30/1"
int
main (int argc, char *argv[])
{
GstElement *pipeline, *sink, *dec_pipeline, *source;
gint width, height;
GstSample *sample;
GstBuffer *buf;
gchar *descr, *dec_descr;
GError *error = NULL;
gint64 duration, position;
GstStateChangeReturn ret;
gboolean res;
GstMapInfo map;
gint itr;
gst_init (&argc, &argv);
/* create a new pipeline */
descr =
g_strdup_printf ("videotestsrc horizontal-speed=5 ! imxvpuenc_h264 ! tee name=t t. ! queue ! appsink name=sink caps=\"" CAPS "\"");
pipeline = gst_parse_launch (descr, &error);
if (error != NULL) {
g_print ("could not construct pipeline: %s\n", error->message);
g_clear_error (&error);
exit (-1);
}
dec_pipeline = gst_parse_launch ("appsrc name=source caps=\"" CAPS "\" ! queue ! imxvpudec ! imx2dvideosink", &error);
if (error != NULL) {
g_print ("could not construct decoder pipeline: %s\n", error->message);
g_clear_error (&error);
exit (-1);
}
/* get sink and source from respective pipeline*/
sink = gst_bin_get_by_name (GST_BIN (pipeline), "sink");
source = gst_bin_get_by_name (GST_BIN (dec_pipeline), "source");
if( !source ) {
g_print("Get source from pipeline is failed");
}
/* set to PAUSED to make the first frame arrive in the sink */
ret = gst_element_set_state (pipeline, GST_STATE_PAUSED);
switch (ret) {
case GST_STATE_CHANGE_FAILURE:
g_print ("failed to play the file\n");
exit (-1);
case GST_STATE_CHANGE_NO_PREROLL:
/* for live sources, we need to set the pipeline to PLAYING before we can
* receive a buffer. We don't do that yet */
g_print ("live sources not supported yet\n");
exit (-1);
default:
break;
}
gst_element_set_state (dec_pipeline, GST_STATE_PAUSED);
g_print ("dec pipeline moved to pause state successfully\n");
/* This can block for up to 5 seconds. If your machine is really overloaded,
* it might time out before the pipeline prerolled and we generate an error. A
* better way is to run a mainloop and catch errors there. */
ret = gst_element_get_state (pipeline, NULL, NULL, 5 * GST_SECOND);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_print ("failed to play the file\n");
exit (-1);
}
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
sample = gst_app_sink_pull_preroll(GST_APP_SINK(sink));
//handle_sample_buffer(sample);
gst_element_set_state (dec_pipeline, GST_STATE_PLAYING);
buf = gst_sample_get_buffer (sample);
g_print("Pushing buffer to app src\n");
ret = gst_app_src_push_buffer(source, buf);
if (ret == GST_FLOW_FLUSHING) {
g_print ("appsrc is not paused\n");
} else
if (ret == GST_FLOW_EOS) {
g_print("EOS occured\n");
} else {
g_print("Buffer is not queued successfully\n");
}
do {
/* if we have a buffer now, convert it to a pixbuf. It's possible that we
* don't have a buffer because we went EOS right away or had an error. */
sample = gst_app_sink_pull_sample(GST_APP_SINK(sink));
buf = gst_sample_get_buffer (sample);
//handle_sample_buffer(sample);
ret = gst_app_src_push_buffer(source, buf);
}while(1);
/* cleanup and exit */
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
gst_element_set_state (dec_pipeline, GST_STATE_NULL);
gst_object_unref (dec_pipeline);
exit (0);
}