背景:我有一个应用程序,它从 USB 摄像头获取管道并将视频输出保存到文件中。现在我想这样做,但要使用应用程序中的框架。所以我有以下代码,所以在测试期间我有从camrea到filesink和appsink(使用tee)的主要管道,以及从appsource(此时基本上是前面提到的appsink)到不同filesink的次要管道。所以我有:
流水线一:camera------>tee--------->filesink1
tee--------->appsink
管道二:(appsink->)appsource------>filesink2
目标:不要在第一个管道中使用 tee,而是从第一个管道中复制(然后在 - 上执行一些操作)每个样本减少到相机--->appsink 到第二个管道。
流水线一:camera------>appsink
管道二:(appsink->some work->)appsource------>filesink
问题:创建了两个测试视频,大小相等。但是从 appsrc->filesink 管道创建的那个声称有 0s 长度,不管视频实际有多长。谁能猜到为什么?
代码:
GstElement *pipeline_main; //main pipeline FROM the camera
GstElement *pipeline_saveVideo;
GstElement *tee; //splitter
GstCaps *caps; //caps currently set to the camera
GstElement *videoApssrc; //source for the pipeline to SAVE the video;
bool record;
static GstFlowReturn new_sample_jpeg(GstElement * elt)
{
GstSample *sample;
GstFlowReturn ret = GST_FLOW_OK;
// get the sample from appsink
sample = gst_app_sink_pull_sample (GST_APP_SINK (elt));
//if recording, send the sample to recording sink
if (record) gst_app_src_push_sample(GST_APP_SRC(videoApssrc), sample);
...
return ret;
}
bool createPipelineVideo(std::string path){
GstStateChangeReturn ret;
GstElement *muxer, *sink, *queue;
videoApssrc = gst_element_factory_make ("appsrc", "saveVideoSource");
muxer = gst_element_factory_make ("avimux", "avimux");
queue = gst_element_factory_make("queue", "rcr_queue");
sink = gst_element_factory_make ("filesink", "sink");
g_object_set (sink, "location", path().c_str(), NULL);
pipeline_saveVideo = gst_pipeline_new ("pipeline_vid");
if (!pipeline_saveVideo || !videoApssrc || !muxer || !sink) {
g_printerr ("Not all elements could be created.\n");
return false;
}
gst_app_src_set_caps(GST_APP_SRC(videoApssrc), caps);
gst_app_src_set_duration(GST_APP_SRC(videoApssrc), GST_TIME_AS_MSECONDS(80));
gst_app_src_set_stream_type(GST_APP_SRC(videoApssrc), GST_APP_STREAM_TYPE_STREAM);
gst_app_src_set_latency(GST_APP_SRC(videoApssrc), -1, 0);
gst_bin_add_many (GST_BIN (pipeline_saveVideo), videoApssrc,queue, sink, NULL);
if (gst_element_link_many(videoApssrc, queue, sink, NULL) != TRUE) {
g_printerr ("Elements could not be linked.\n");
gst_object_unref (pipeline_saveVideo);
return false;
}
ret = gst_element_set_state (pipeline_saveVideo, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (pipeline_saveVideo);
return false;
}
return true;
}
void startCapturing(){
if (!gst_is_initialized()) {
qWarning()<<"initializing GST";
setenv("GST_DEBUG", ("*:" + std::to_string(3)).c_str(), 1);
gst_init(nullptr, nullptr);
}
GstStateChangeReturn ret;
GstElement *source, *muxer, *sink, *queue_rcr, *queue_app, *appsink;
source = gst_element_factory_make ("v4l2src", "source");
g_object_set (source, "device", "/dev/video1", NULL);
muxer = gst_element_factory_make ("avimux", "avimux");
tee = gst_element_factory_make("tee", "tee");
sink = gst_element_factory_make ("filesink", "sink");
queue_rcr = gst_element_factory_make ("queue", "record_queue");
queue_app = gst_element_factory_make ("queue", "app_queue");
appsink = gst_element_factory_make("appsink", "appsink");
g_object_set (sink, "location", "/mnt/test1.avi", NULL);
pipeline_main = gst_pipeline_new ("pipeline_src");
if (!pipeline_main || !source || !muxer || !sink || !queue_rcr || !appsink) {
g_printerr ("Not all elements could be created.\n");
return;
}
caps = gst_caps_new_simple ("image/jpeg",
"width", G_TYPE_INT, 1920,
"height", G_TYPE_INT, 1080,
"io-mode", G_TYPE_INT, 4,
"framerate", GST_TYPE_FRACTION, 30, 1,
"pixel-aspect-ratio", GST_TYPE_FRACTION, 1,1,
"interlace-mode", G_TYPE_STRING, "progresive",
NULL);
gst_bin_add_many (GST_BIN (pipeline_main), source, muxer,tee, sink,queue_rcr, appsink, queue_app, NULL);
if (gst_element_link_filtered(source, tee, caps) != TRUE) {
gstFail("Elements could not be linked or caps set.\n");
return;
}
if (gst_element_link_many(tee, queue_rcr, muxer, sink, NULL) != TRUE) {
gstFail("Elements could not be linked-recording line\n");
return;
}
if (gst_element_link_many(tee, queue_app, appsink, NULL) != TRUE) {
gstFail("Elements could not be linked-recording line\n");
return;
}
gst_app_sink_set_emit_signals(GST_APP_SINK(appsink), true);
g_signal_connect (appsink, "new-sample", G_CALLBACK (new_sample_jpeg));
ret = gst_element_set_state (pipeline_main, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
gstFail("Unable to set the pipeline to the playing state.\n");
return;
}
// Start playing
createPipelineVideo("/mnt/test2.avi");
record = true;
return;
}
void endVideo(){
record = false;
GstMessage *message = gst_message_new_eos(nullptr);
/* Free resources */
if (message != NULL)
gst_message_unref (message);
gst_element_set_state (pipeline_main, GST_STATE_PAUSED);
gst_element_set_state (pipeline_main, GST_STATE_NULL);
sleep(1);
gst_object_unref (pipeline_main);
//push the image in the pipeline
GstMessage *EndMessage = gst_message_new_eos(&pipeline_saveVideo->object);
gst_bus_post(pipeline_saveVideo->bus, EndMessage);
/* Free resources */
if (message != NULL)
gst_message_unref (EndMessage);
GstFlowReturn status = GstFlowReturn::GST_FLOW_OK;
status = gst_app_src_end_of_stream(GST_APP_SRC(videoApssrc));
//end the pipeline
usleep(500000);
gst_element_set_state (pipeline_saveVideo, GST_STATE_PAUSED);
gst_element_set_state (pipeline_saveVideo, GST_STATE_NULL);
gst_object_unref (pipeline_saveVideo);
}