如何在C代码中实现GStreamer tee

时间:2017-12-07 13:47:17

标签: gstreamer

我有以下工作流程。它已经使用命令行工具 gst-launch-1.0 和函数videotestsrc  ! video/x-raw,width=640,height=480 ! videocrop left=80 right=80 ! tee name=t ! queue ! glupload ! glimagesink t. ! queue ! jpegenc ! avimux ! filesink location=output.avi 进行了测试,并且在两种情况下都有效。

static gboolean busCallback(GstBus *bus, GstMessage *message, gpointer data);

GStreamer::GStreamer(QQuickItem *parent) : QQuickItem(parent)
{
    qDebug() << "Constructed GSteamer";        
}    

void GStreamer::createPipeline()
{
    qDebug() << "Creating pipeline";      

    if(m_source.isEmpty()){
        qDebug() << "Error: Missing source property for GStreamer component";
        return;
    }

    if(m_videoItem.isEmpty()){
        qDebug() << "Error: Missing videoItem property for GStreamer component";
        return;
    }

    m_pipeline = gst_pipeline_new(NULL);
    m_sink = NULL;

    QByteArray ba = m_source.toLatin1();
    m_src = gst_element_factory_make(ba.data(), NULL);
    g_assert(m_src);


    m_filter = gst_element_factory_make("capsfilter", "filter");
    g_assert(m_filter);

    g_object_set(G_OBJECT (m_filter), "caps", gst_caps_new_simple("video/x-raw", 
        "width", G_TYPE_INT, 640,
        "height", G_TYPE_INT, 480,
        NULL), 
    NULL);

    m_convert = gst_element_factory_make("videoconvert", NULL);
    g_assert(m_convert);

    m_crop = gst_element_factory_make("videocrop", "crop");
    g_assert(m_crop);

    g_object_set(G_OBJECT (m_crop), "left", 80, "right", 80, NULL);

    // Tee
    m_tee = gst_element_factory_make("tee", "videotee");
    g_assert(m_tee);

    // Display queue
    m_displayQueue = gst_element_factory_make("queue", "displayQueue");
    g_assert(m_displayQueue);    

    m_upload = gst_element_factory_make("glupload", NULL);
    g_assert(m_upload);    

    m_sink = gst_element_factory_make("qmlglsink", NULL);
    g_assert(m_sink);

    // Record queue
    m_recordQueue = gst_element_factory_make("queue", "recordQueue");
    g_assert(m_recordQueue);

    m_encode = gst_element_factory_make("jpegenc", NULL);
    g_assert(m_encode);

    m_mux = gst_element_factory_make("avimux", NULL);
    g_assert(m_mux);

    m_filesink = gst_element_factory_make("filesink", NULL);
    g_assert(m_filesink);

    g_object_set(G_OBJECT(m_filesink), "location", "output.avi", NULL);    

    gst_bin_add_many(GST_BIN (m_pipeline), m_src, m_filter, m_convert, m_crop, m_upload, m_sink, NULL);
    gst_bin_add_many(GST_BIN(m_pipeline), m_tee, m_displayQueue, m_recordQueue, m_encode, m_mux, m_filesink, NULL);

    // If I only link this simple pipeline, it works fine
    /*
   if(!gst_element_link_many(m_src, m_filter, m_convert, m_crop, m_upload, m_sink, NULL)){
        qDebug() << "Unable to link source";
    }
    */

    if(!gst_element_link_many(m_src, m_filter, m_convert, m_crop, m_tee, NULL)){
        qDebug() << "Unable to link source";
    }
    if(!gst_element_link_many(m_displayQueue, m_upload, m_sink, NULL)){
        qDebug() << "Unable to link display queue";
    }
    if(!gst_element_link_many(m_recordQueue, m_encode, m_mux, m_filesink, NULL)){
        qDebug() << "Unable to link record queue";
    }    

    GstPad *teeDisplayPad = gst_element_get_request_pad(m_tee, "src_%u");
    GstPad *queueDisplayPad = gst_element_get_static_pad(m_displayQueue, "sink");

    GstPad *teeRecordPad = gst_element_get_request_pad(m_tee, "src_%u");
    GstPad *queueRecordPad = gst_element_get_static_pad(m_recordQueue, "sink");

    if(gst_pad_link(teeDisplayPad, queueDisplayPad) != GST_PAD_LINK_OK){
        qDebug() << "Unable to link display tee";
    }

    if(gst_pad_link(teeRecordPad, queueRecordPad) != GST_PAD_LINK_OK){
        qDebug() << "Unable to link record tee";
    }

    //gst_object_unref(teeDisplayPad);
    gst_object_unref(queueDisplayPad);
    //gst_object_unref(teeRecordPad);
    gst_object_unref(queueRecordPad);    

    QQuickItem *videoItem = window()->findChild<QQuickItem *> (m_videoItem);
    g_object_set(m_sink, "widget", videoItem, NULL);

    // This will call gst_element_set_state(m_pipeline, GST_STATE_PLAYING) when the window is ready
    window()->scheduleRenderJob (new SetPlaying (m_pipeline), QQuickWindow::BeforeSynchronizingStage);    

    m_bus = gst_element_get_bus(m_pipeline);

    gst_bus_add_watch(m_bus, busCallback, m_loop);
    gst_object_unref(m_bus);

    m_loop = g_main_loop_new(NULL, false);
    g_main_loop_run(m_loop);     
}

static gboolean busCallback(GstBus *bus, GstMessage *message, gpointer data){
    qDebug() << "Callback function reached";
    switch(GST_MESSAGE_TYPE(message)){
        case GST_MESSAGE_ERROR:
            GError *error;
            gchar *debugInfo;
            gst_message_parse_error(message, &error, &debugInfo);
            qDebug() << "Error received from element" << GST_OBJECT_NAME(message->src) << ":" << error->message;
            qDebug() << "Debugging information:" << (debugInfo ? debugInfo : "none");
            //g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (m_message->src), error->message);
            //g_printerr ("Debugging information: %s\n", debugInfo ? debugInfo : "none");
            g_clear_error (&error);
            g_free (debugInfo);

            g_main_loop_quit(static_cast<GMainLoop *>(data));
            break;
        case GST_MESSAGE_EOS:
            qDebug() << "End-Of-Stream reached.";
            g_main_loop_quit(static_cast<GMainLoop *>(data)); 
            break;
        default:
            qDebug() << "Unexpected message received."; 
            break;
        }
    return true;    
}

/**
The rest of the code is probably not relevant. It contains 
only destructor and some getters and setters.
**/

GStreamer::~GStreamer()
{
    gst_object_unref(m_bus);
    gst_element_set_state(m_pipeline, GST_STATE_NULL);
    gst_object_unref(m_pipeline);
}

QString GStreamer::source() const
{
    return m_source;
}

void GStreamer::setSource(const QString &source)
{
    if(source != m_source){
        m_source = source;
    }
}

QString GStreamer::videoItem() const
{
    return m_videoItem;
}

void GStreamer::setVideoItem(const QString &videoItem)
{
    if(videoItem != m_videoItem){
        m_videoItem = videoItem;
    }
}

我尝试在代码中手动设置它,但我现在卡在以下错误上(应用程序打开,但没有显示视频):

  

从元素videotestsrc0收到错误:内部数据流错误。

     

调试信息:gstbasesrc.c(2948):gst_base_src_loop():   / GstPipeline:pipeline0 / GstVideoTestSrc:videotestsrc0:流媒体任务   暂停,原因没有谈判(-4)

我在Qt应用程序中使用GStreamer,glimagesink将视频链接到QML类型。与GStreamer相关的所有代码都位于名为GStreamer的GStreamer类中。整个cpp文件发布在下面,以防问题位于某个我无法猜测的地方。我为不相关的代码道歉。

{{1}}

所有成员变量都在.h文件中定义。

如果我没有将tee元素添加到bin中并将其链接到管道中,则视频会按预期显示在屏幕上。所以我想我弄乱了发球元素上的垫子。

我一直在关注GStreamers文档中的教程,所以我不明白它为什么不起作用。

希望有人可以提供帮助。

1 个答案:

答案 0 :(得分:2)

好的,所提供的gst-launch行与应用程序代码之间的区别在于使用qmlglsink元素代替glimagesink

问题是qmlglsink仅接受RGBA格式的视频缓冲区,但jpegenc的另一分支中的tee不接受RGBA格式的视频缓冲区。这会导致协商问题,因为tee的两个分支都不支持通用格式。

修复方法是在videoconvert之前添加jpegenc元素或在glcolorconvert之前添加qmlglsink元素,以便tee的两个分支都可以协商相同的视频格式。

旁注:glimagesink内部包含glupload ! glcolorconvert ! actual-sink,因此已转换视频格式。