在c

时间:2017-01-19 12:55:05

标签: c gstreamer rtsp-client

获取链接音频和视频分档的错误:gst_pad_set_active:断言' GST_IS_PAD(pad)'失败

尝试将以下管道转换为C应用程序:

  

gst-launch-1.0 rtspsrc location =" rtsp://" latency = 0 name = demux demux。 !排队! rtpmp4gdepay! aacparse! avdec_aac! audioconvert!听觉样本! autoaudiosink demux。 !排队! rtph264depay! h264parse! omxh264dec!视频转换!视频! video / x-raw,width = 176,height = 144! ximagesink

以下是代码实现:

int main(int argc, char *argv[]) {
GstElement *source, *audio, *video, *convert, *pipeline, *audioDepay, *audioQueue, *videoQueue,
*audioParse, *audioDecode, *audioConvert, *audioResample, *audioSink,
*videoDepay, *videoParser, *videoDecode, *videoConvert, *videoScale,
*videoSink;

    GstCaps *capsFilter;
    GstBus *bus;
    GstMessage *msg;
    GstPad *pad;
    GstPad *sinkpad,*ghost_sinkpad;
    gboolean link_ok;
    GstStateChangeReturn ret;

    /* Initialize GStreamer */
    gst_init (&argc, &argv);


    /* Create Elements */
    pipeline = gst_pipeline_new("rtsp-pipeline");
    source = gst_element_factory_make ("rtspsrc", "source");


    /*audio bin*/
    audio = gst_bin_new ("audiobin");
    audioQueue = gst_element_factory_make ("queue", "audio-queue");
    audioDepay = gst_element_factory_make ("rtpmp4gdepay", "audio-depayer");
    audioParse = gst_element_factory_make ("aacparse", "audio-parser");
    audioDecode = gst_element_factory_make ("avdec_aac", "audio-decoder");
    audioConvert = gst_element_factory_make ("audioconvert", "aconv");
    audioResample = gst_element_factory_make ("audioresample", "audio-resample");
    audioSink = gst_element_factory_make ("autoaudiosink", "audiosink");

    if (!audioQueue || !audioDepay || !audioParse || !audioConvert || !audioResample || !audioSink)
    {
            g_printerr("Cannot create audio elements \n");
            return 0;
    }

    /*Setting rtsp source elements values */
    g_object_set(source, "location", "rtsp://", NULL);
    g_object_set(source, "latency", 0, NULL);
    g_object_set(source, "name", "demux", NULL);
    /*Adding audio elements to audio bin */
    gst_bin_add_many(GST_BIN(audio),
                    audioQueue, audioDepay, audioParse, audioDecode,audioConvert, audioResample, audioSink, NULL);
    /*Linking audio elements internally*/
    if (!gst_element_link_many(audioQueue, audioDepay, audioParse, audioDecode, audioConvert, audioResample, audioSink, NULL))
    {
            g_printerr("Cannot link audioDepay and audioParse \n");
            return 0;
    }
    /* Adding pad for audio Queue */
    GstPad *audio_sinkpad, *ghost_audio_sinkpad;
    audio_sinkpad = gst_element_get_static_pad(audioQueue, "sink");
    ghost_sinkpad = gst_ghost_pad_new("sink", audio_sinkpad);
    gst_pad_set_active (ghost_audio_sinkpad, TRUE);
    gst_element_add_pad(audio, ghost_audio_sinkpad);

    gst_bin_add_many(GST_BIN(pipeline), source, audio, NULL);

    gst_element_set_state(audio, GST_STATE_PAUSED);

    /*Video Bin */
    video  = gst_bin_new ("videobin");
    videoQueue = gst_element_factory_make ("queue", "video-queue");
    videoDepay= gst_element_factory_make ("rtph264depay", "video-depayer");
    videoParser = gst_element_factory_make ("h264parse", "video-parser");
    videoDecode = gst_element_factory_make ("omxh264dec", "video-decoder");
    videoConvert = gst_element_factory_make("videoconvert", "convert");
    videoScale = gst_element_factory_make("videoscale", "video-scale");
    videoSink = gst_element_factory_make("ximagesink", "video-sink");
    capsFilter = gst_caps_new_simple("video/x-raw",
                    "width", G_TYPE_INT, 176,
                    "height", G_TYPE_INT, 144,
                    NULL);

    if (!videoQueue || !videoDepay || !videoParser || !videoDecode || !videoConvert || !videoScale || !videoSink || !capsFilter)
    {
            g_printerr("Cannot create video elements \n");
            return 0;
    }

    /*Adding video elements to video bin */
    gst_bin_add_many(GST_BIN(video),
                    videoQueue, videoDepay, videoParser, videoDecode, videoConvert, videoScale, videoSink, NULL);

    /*Linking filter element to videoScale and videoSink */
    link_ok = gst_element_link_filtered(videoScale,videoSink, capsFilter);
    gst_caps_unref (capsFilter);
    if (!link_ok) {
            g_warning ("Failed to link element1 and element2!");
    }
    /* Linking video elements internally */
    if (!gst_element_link_many(videoQueue, videoDepay, videoParser, videoDecode, videoConvert, NULL))
    {
            g_printerr("Cannot link videoDepay and videoParser \n");
            return 0;
    }

    /* Creating dynamic pad between source and videoqueue */
    sinkpad = gst_element_get_static_pad (videoQueue, "sink");
    ghost_sinkpad = gst_ghost_pad_new ("sink", sinkpad);
    gst_pad_set_active (ghost_sinkpad, TRUE);
    gst_element_add_pad (video, ghost_sinkpad);

    gst_bin_add_many(GST_BIN(pipeline), video, NULL);
          /* Start playing */
    gst_element_set_state ( pipeline, GST_STATE_PLAYING);

    /* Wait until error or EOS */
    bus = gst_element_get_bus (pipeline);
    msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR | GST_MESSAGE_EOS);

    /* Free resources */
    if (msg != NULL)
            gst_message_unref (msg);
    gst_object_unref (bus);
    gst_element_set_state (pipeline, GST_STATE_NULL);
    gst_object_unref (pipeline);
    return 0;

}

1 个答案:

答案 0 :(得分:1)

Bug就在这里:

ghost_sinkpad = gst_ghost_pad_new("sink", audio_sinkpad);
gst_pad_set_active (ghost_audio_sinkpad, TRUE);

您将pad分配给ghost_sinkpad,但在ghost_audio_sinkpad中使用gst_pad_set_active。我认为它应该是这样的:

ghost_audio_sinkpad = gst_ghost_pad_new("sink", audio_sinkpad);
gst_pad_set_active (ghost_audio_sinkpad, TRUE);