在Qt中使用Gstreamer在c ++中出现问题:流程错误

时间:2017-03-21 18:50:31

标签: c++ opencv gstreamer

我是Gstreamer的初学者,我需要从UDP摄像头获取帧并将其转换为[self.webview evaluateJavaScript:@"var foo = 1; foo + 1;" completionHandler:^(id result, NSError *error) { if (error == nil) { if (result != nil) { NSInteger integerResult = [result integerValue]; // 2 NSLog(@"result: %d", integerResult); } } else { NSLog(@"evaluateJavaScript error : %@", error.localizedDescription); } }]; (OpenCV)。 我像这样运行我的相机流:

cv::Mat

在另一个终端我可以得到这样的流(并且它可以工作):

gst-launch-1.0 v4l2src device=/dev/video0 ! \
   h264parse ! rtph264pay ! udpsink host=XXX.XXX.XXX.XXX port=5000

所以在我的C ++代码中这里是我的C ++代码:

gst-launch-1.0 udpsrc port=5000 caps='application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264' ! \
rtph264depay ! avdec_h264 ! autovideosink

GstFlowReturn new_preroll(GstAppSink *appsink, gpointer data) { g_print ("Got preroll!\n"); return GST_FLOW_OK; } GstFlowReturn new_sample(GstAppSink *appsink, gpointer data) { static int framecount = 0; framecount++; GstSample *sample = gst_app_sink_pull_sample(appsink); GstCaps *caps = gst_sample_get_caps(sample); GstBuffer *buffer = gst_sample_get_buffer(sample); const GstStructure *info = gst_sample_get_info(sample); // ---- Read frame and convert to opencv format --------------- GstMapInfo map; gst_buffer_map (buffer, &map, GST_MAP_READ); // convert gstreamer data to OpenCV Mat, you could actually // resolve height / width from caps... Mat frame(Size(320, 240), CV_8UC3, (char*)map.data, Mat::AUTO_STEP); int frameSize = map.size; // TODO: synchronize this.... frameQueue.push_back(frame); gst_buffer_unmap(buffer, &map); // ------------------------------------------------------------ // print dot every 30 frames if (framecount%30 == 0) { g_print ("."); } // show caps on first frame if (framecount == 1) { g_print ("%s\n", gst_caps_to_string(caps)); } gst_sample_unref (sample); return GST_FLOW_OK; } static gboolean my_bus_callback (GstBus *bus, GstMessage *message, gpointer data) { g_print ("Got %s message\n", GST_MESSAGE_TYPE_NAME (message)); switch (GST_MESSAGE_TYPE (message)) { case GST_MESSAGE_ERROR: { GError *err; gchar *debug; gst_message_parse_error (message, &err, &debug); g_print ("Error: %s\n", err->message); g_error_free (err); g_free (debug); break; } case GST_MESSAGE_EOS: /* end-of-stream */ break; default: /* unhandled message */ break; } /* we want to be notified again the next time there is a message * on the bus, so returning TRUE (FALSE means we want to stop watching * for messages on the bus and our callback should not be called again) */ return TRUE; } int main (int argc, char *argv[]) { GError *error = NULL; gst_init (&argc, &argv); gchar *descr = g_strdup( "udpsrc port=5000 ! " "caps=application/x-rtp, media=(string)video, clock-rate=(int)9000, encoding-name=(string)H264 ! " "rtph264depay ! " "avdec_h264 ! " "videoconvert ! " "appsink name=sink " ); GstElement *pipeline = gst_parse_launch (descr, &error); if (pipeline== NULL) { g_print ("could not construct pipeline: %s\n", error->message); g_error_free (error); exit (-1); } /* get sink */ GstElement *sink = gst_bin_get_by_name (GST_BIN (pipeline), "sink"); gst_app_sink_set_emit_signals((GstAppSink*)sink, true); gst_app_sink_set_drop((GstAppSink*)sink, true); gst_app_sink_set_max_buffers((GstAppSink*)sink, 1); GstAppSinkCallbacks callbacks = { NULL, new_preroll, new_sample }; gst_app_sink_set_callbacks (GST_APP_SINK(sink), &callbacks, NULL, NULL); GstBus *bus; guint bus_watch_id; bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); bus_watch_id = gst_bus_add_watch (bus, my_bus_callback, NULL); gst_object_unref (bus); GstStateChangeReturn test=gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING); qDebug() <<test<< " this is the test"; namedWindow("edges",1); while(1) { g_main_iteration(false); // TODO: synchronize... if (frameQueue.size() >10) { // this lags pretty badly even when grabbing frames from webcam Mat frame = frameQueue.front(); imshow("edges", frame); cv::waitKey(30); frameQueue.clear(); } } gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_NULL); gst_object_unref (GST_OBJECT (pipeline)); return 0; }

GstFlowReturn new_preroll(GstAppSink *appsink, gpointer data) { g_print ("Got preroll!\n"); return GST_FLOW_OK; } GstFlowReturn new_sample(GstAppSink *appsink, gpointer data) { static int framecount = 0; framecount++; GstSample *sample = gst_app_sink_pull_sample(appsink); GstCaps *caps = gst_sample_get_caps(sample); GstBuffer *buffer = gst_sample_get_buffer(sample); const GstStructure *info = gst_sample_get_info(sample); // ---- Read frame and convert to opencv format --------------- GstMapInfo map; gst_buffer_map (buffer, &map, GST_MAP_READ); // convert gstreamer data to OpenCV Mat, you could actually // resolve height / width from caps... Mat frame(Size(320, 240), CV_8UC3, (char*)map.data, Mat::AUTO_STEP); int frameSize = map.size; // TODO: synchronize this.... frameQueue.push_back(frame); gst_buffer_unmap(buffer, &map); // ------------------------------------------------------------ // print dot every 30 frames if (framecount%30 == 0) { g_print ("."); } // show caps on first frame if (framecount == 1) { g_print ("%s\n", gst_caps_to_string(caps)); } gst_sample_unref (sample); return GST_FLOW_OK; } static gboolean my_bus_callback (GstBus *bus, GstMessage *message, gpointer data) { g_print ("Got %s message\n", GST_MESSAGE_TYPE_NAME (message)); switch (GST_MESSAGE_TYPE (message)) { case GST_MESSAGE_ERROR: { GError *err; gchar *debug; gst_message_parse_error (message, &err, &debug); g_print ("Error: %s\n", err->message); g_error_free (err); g_free (debug); break; } case GST_MESSAGE_EOS: /* end-of-stream */ break; default: /* unhandled message */ break; } /* we want to be notified again the next time there is a message * on the bus, so returning TRUE (FALSE means we want to stop watching * for messages on the bus and our callback should not be called again) */ return TRUE; } int main (int argc, char *argv[]) { GError *error = NULL; gst_init (&argc, &argv); gchar *descr = g_strdup( "udpsrc port=5000 ! " "caps=application/x-rtp, media=(string)video, clock-rate=(int)9000, encoding-name=(string)H264 ! " "rtph264depay ! " "avdec_h264 ! " "videoconvert ! " "appsink name=sink " ); GstElement *pipeline = gst_parse_launch (descr, &error); if (pipeline== NULL) { g_print ("could not construct pipeline: %s\n", error->message); g_error_free (error); exit (-1); } /* get sink */ GstElement *sink = gst_bin_get_by_name (GST_BIN (pipeline), "sink"); gst_app_sink_set_emit_signals((GstAppSink*)sink, true); gst_app_sink_set_drop((GstAppSink*)sink, true); gst_app_sink_set_max_buffers((GstAppSink*)sink, 1); GstAppSinkCallbacks callbacks = { NULL, new_preroll, new_sample }; gst_app_sink_set_callbacks (GST_APP_SINK(sink), &callbacks, NULL, NULL); GstBus *bus; guint bus_watch_id; bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); bus_watch_id = gst_bus_add_watch (bus, my_bus_callback, NULL); gst_object_unref (bus); GstStateChangeReturn test=gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING); qDebug() <<test<< " this is the test"; namedWindow("edges",1); while(1) { g_main_iteration(false); // TODO: synchronize... if (frameQueue.size() >10) { // this lags pretty badly even when grabbing frames from webcam Mat frame = frameQueue.front(); imshow("edges", frame); cv::waitKey(30); frameQueue.clear(); } } gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_NULL); gst_object_unref (GST_OBJECT (pipeline)); return 0; }

我得到了错误:

  

错误:内部数据流错误。

我认为这是我对我的管道的声明,但我找不到它的错误。 有什么建议吗?

1 个答案:

答案 0 :(得分:1)

!之后您有udpsrc port=5000。你的原始管道中没有那个。我没有进一步检查,可能打印出管道并仔细检查它是否是所需的。