Gstreamer C代码失败,流停止,原因未协商(-4)

时间:2019-05-10 09:10:26

标签: c video gstreamer gstreamer-1.0

我正在学习Gstreamer,无论我通过Gstreamer工具取得了什么成就,我都在尝试使用C语言在gstreamer应用程序中实现同样的功能。

以下命令成功流式传输了mp4视频文件: gst-launch-1.0.exe -v filesrc location = file.mp4! qtdemux! h264parse! avdec_h264!视频转换!自动视频接收器

我用C代码尝试了同样的操作,还使用了“添加了填充”的Elements Signals创建填充并链接到下一个元素,即解析器(h264parser)。

因此,它因流停止而失败,原因未经协商。

完整输出: 现在播放:file.mp4 跑步... 为解复用器创建了一个新的打击垫视频_0 元素多路分配将链接到解析器 错误:内部数据流错误。 调试信息:../gst/isomp4/qtdemux.c(6607):gst_qtdemux_loop():/ GstPipeline:video-play / GstQTDemux:demux: 流停止,原因未协商(-4) 返回,正在停止播放... 正在释放管道... 已完成。再见!

#include <gst/gst.h>
#include <stdlib.h>
#include <string.h>

#define INPUT_FILE "file.mp4"

static gboolean bus_call(GstBus *bus, GstMessage *msg, gpointer data)
{
    GMainLoop *loop = (GMainLoop *)data;

    switch (GST_MESSAGE_TYPE(msg)) {
        gchar  *debug;
        GError *error;

    case GST_MESSAGE_EOS:
        g_print("End of stream\n");
        g_main_loop_quit(loop);
        break;

    case GST_MESSAGE_ERROR:

        gst_message_parse_error(msg, &error, &debug);
        g_free(debug);

        g_printerr("Error: %s\n", error->message);
        g_printerr("Debug Information: %s\n", debug);
        g_error_free(error);

        g_main_loop_quit(loop);
        break;
    default:
        break;
    }

    return TRUE;
}


static void on_pad_added(GstElement *element, GstPad *pad, gpointer data)
{
    gchar *name;
    GstElement *parse = (GstElement *)data;

    name = gst_pad_get_name(pad);
    g_print("A new pad %s was created for %s\n", name, gst_element_get_name(element));
    g_free(name);

    g_print("element %s will be linked to %s\n",
        gst_element_get_name(element),
        gst_element_get_name(parse));

    gst_element_link(element, parse);
}

int main(int argc, char *argv[])
{
    GMainLoop *loop;
    GstElement *pipeline, *source, *demux, *parser, *decoder, *sink, *fpssink;
    GstBus *bus;
    guint bus_watch_id;

    const gchar *input_file = INPUT_FILE;

    /* Initialization */
    gst_init(&argc, &argv);
    loop = g_main_loop_new(NULL, FALSE);

    /* Create gstreamer elements */
    pipeline = gst_pipeline_new("video-play");
    source = gst_element_factory_make("filesrc", "file-source");
    demux = gst_element_factory_make("qtdemux", "demux");
    parser = gst_element_factory_make("h264parse", "h264-parser");
    decoder = gst_element_factory_make("avdec_h264", "decoder");
    sink = gst_element_factory_make("d3dvideosink", "video-output");

    if (!pipeline || !source || !demux || !parser || !decoder || !sink) {
        g_printerr("One element could not be created. Exiting.\n");
        return -1;
    }

    /* Set input video file for source element */
    g_object_set(G_OBJECT(source), "location", input_file, NULL);

    /* we add a message handler */
    bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
    bus_watch_id = gst_bus_add_watch(bus, bus_call, loop);
    gst_object_unref(bus);

    /* Add all elements into the pipeline */
    /* pipeline---[ filesrc + qtdemux + h264parse + avdec_h264 + d3dvideosink ] */
    gst_bin_add_many(GST_BIN(pipeline), source, demux, parser, decoder, sink, NULL);

    /* Link the elements filesrc->demux together */

    if (gst_element_link(source, demux) != TRUE) {
        g_printerr("Element source->demux could not be linked.\n");
        gst_object_unref(pipeline);
        return -1;
    }
    /* h264parse -> avdec_h264 -> d3dvideosink */

    if (gst_element_link_many(parser, decoder, sink, NULL) != TRUE) {
            g_printerr("Many Elements could not be linked.\n");
            gst_object_unref(pipeline);
            return -1;
    }

    g_signal_connect(demux, "pad-added", G_CALLBACK(on_pad_added), parser);

    /* Set the pipeline to "playing" state */
    g_print("Now playing: %s\n", input_file);
    if (gst_element_set_state(pipeline,
        GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
        g_printerr("Unable to set the pipeline to the playing state.\n");
        gst_object_unref(pipeline);
        return -1;
    }

    g_print("Running...\n");
    g_main_loop_run(loop);


    /* Free resources and change state to NULL */
    gst_object_unref(bus);
    g_print("Returned, stopping playback...\n");
    gst_element_set_state(pipeline, GST_STATE_NULL);
    g_print("Freeing pipeline...\n");
    gst_object_unref(GST_OBJECT(pipeline));
    g_print("Completed. Goodbye!\n");
    return 0;
}

请让我知道如何将这些键盘链接到h264parser元素以流式传输视频文件。如有可能,请说明这些键盘在Gstreamer工具和应用程序中如何工作

2 个答案:

答案 0 :(得分:0)

  

以下命令成功流式传输了mp4视频文件:   gst-launch-1.0.exe -v filesrc location = file.mp4! qtdemux! h264parse   ! avdec_h264!视频转换!自动视频接收器

理想情况下,您的管道应该是:

  

gst-launch-1.0.exe -v filesrc location = file.mp4! qtdemux名称= d   d.video_0!排队! h264parse! avdec_h264!视频转换!   自动视频接收器

如果您检查qtdemux (gst-inspect-1.0 qtdemux),您会注意到SINK垫具有以下上限:

功能:       视频/快速时间       视频/ mj2       音频/ x-m4a       application / x-3gp

如果检查h264parse (gst-inspect-1.0 h264parse),您会注意到SRC垫具有以下上限:

SRC模板:“ src”     可用性:始终     能力:       视频/ x-h264                  解析为:true           流格式:{(string)avc,(string)avc3,(string)byte-stream}               对齐方式:{(string)au,(string)nal}

当您尝试将Qtdemux的水槽垫链接到h264parse的src垫时,可能必须收集视频盖才能与h264parse连接。

我已使用以下代码在“添加了键盘的”信号中将qtdemux与h264parse链接起来:

static void pad_added_handler(GstElement *src, GstPad *new_pad, App *data) {
   GstPadLinkReturn ret;
   GstCaps *new_pad_caps = NULL;
   GstStructure *new_pad_struct = NULL;
   const gchar *new_pad_type = NULL;

   /* Check the new pad's type */
   new_pad_caps = gst_pad_get_current_caps(new_pad);
   new_pad_struct = gst_caps_get_structure(new_pad_caps, 0);
   new_pad_type = gst_structure_get_name(new_pad_struct);

   GST_PAD_NAME(new_pad), new_pad_type, gst_caps_to_string(new_pad_caps));

    if (g_str_has_prefix(new_pad_type, "video/x-h264")) {
          ret = gst_pad_link(new_pad, sink_pad_video);
     }
   }

注意: 您可能需要链接大写过滤器,以过滤视频源所需的视频功能,或者仅尝试以下条件:      if(g_str_has_prefix(new_pad_type,“ 视频”)){}

但是我不确定添加队列如何解决您的问题。

希望这会有所帮助。

答案 1 :(得分:0)

我刚刚编辑并修复了Shafeer的答案。

这是有效的代码:

static void pad_added_handler(GstElement *src, GstPad *new_pad, gpointer *data) {
   GstPadLinkReturn ret;
   GstCaps *new_pad_caps = nullptr;
   GstStructure *new_pad_struct = nullptr;
   const gchar *new_pad_type = nullptr;
   GstElement *h264parse = (GstElement *) data;

   /* Check the new pad's type */
   new_pad_caps = gst_pad_get_current_caps(new_pad);
   new_pad_struct = gst_caps_get_structure(new_pad_caps, 0);
   new_pad_type = gst_structure_get_name(new_pad_struct);

    if (g_str_has_prefix(new_pad_type, "video/x-h264")) {
          GstPad *sink_pad_video = gst_element_get_static_pad (h264parse, "sink");
          ret = gst_pad_link(new_pad, sink_pad_video);
     }
}