如何将Gstreamer程序转换为通过udp将视频流式传输到Qt程序?

时间:2010-03-05 06:46:18

标签: c++ c linux gstreamer

我有一个Gstreamer程序,通过UDP源流式传输视频。我需要在Qt中实施该计划。请让我知道我该怎么做。

我正在使用的程序如下所示。

    #include <gst/gst.h>
    #include <stdio.h>
    #include <stdlib.h> 



    GstElement *pipeline,
               *source,
               *decoder,
               *video_sink,
               *text,
               *audio_sink;


    static gboolean
    bus_call (GstBus     *bus,
              GstMessage *msg,
              gpointer    data)
    {
      GMainLoop *loop = (GMainLoop *) data;

      switch (GST_MESSAGE_TYPE (msg)) {
        case GST_MESSAGE_EOS:
          g_print ("End-of-stream\n");
          g_main_loop_quit (loop);
          break;
        case GST_MESSAGE_ERROR:
        {
          gchar *debug;
          GError *err;

          gst_message_parse_error (msg, &err, &debug);
          g_free (debug);

          g_print ("Error: %s\n", err->message);
          g_error_free (err);

          g_main_loop_quit (loop);
          break;
        }
        default:
          break;
      }

      return TRUE;
    }

    static void
    new_pad (GstElement *element,
             GstPad     *pad,
             gpointer    data)
    {
            GstPad *sinkpad = NULL;
            const gchar *mime;
            GstCaps *caps;

            // get capabilities
            caps = gst_pad_get_caps (pad);


            // get mime type
            mime = gst_structure_get_name (gst_caps_get_structure (caps, 0));

            g_print ("Dynamic pad %s:%s created with mime-type %s\n", GST_OBJECT_NAME (element), GST_OBJECT_NAME (pad), mime);

            if (g_strrstr (mime, "video"))
            {
                    g_print ("Linking video...\n");
                    sinkpad = gst_element_get_static_pad (text, "video_sink");
            }

            if (g_strrstr (mime, "audio"))
            {
                    g_print ("Linking audio...\n");
                    sinkpad = gst_element_get_static_pad (audio_sink, "sink");
            }

            if(sinkpad!=NULL)
            {
                    // link
                    gst_pad_link (pad, sinkpad);
                    gst_object_unref (sinkpad);
            }

            gst_caps_unref (caps);
    }


    int main (int argc, char *argv[])
    {

            GMainLoop *loop;
            GstBus *bus;


            // initialize GStreamer
            gst_init (&argc, &argv);
            printf("step 0\n");
            loop = g_main_loop_new (NULL, FALSE);

            /* check input arguments
            if (argc != 2)
            {
                    g_print ("Usage: %s <filename>\n", argv[0]);
                    return -1;
            }*/
          // argv[1]="http://192.168.0.247:1234/Documents/6.mpg";
//"udp://192.168.0.247:1234";
//"/home/quarkgluon/Documents/rajesh/gstreamer/Serenity.mp4";
            printf("step 1\n");
            // create elements
            pipeline   = gst_pipeline_new ("video-player");
            source     = gst_element_factory_make ("udpsrc", "source");
            decoder    = gst_element_factory_make ("decodebin2", "decoder");
            text = gst_element_factory_make ("textoverlay", "text");
            video_sink = gst_element_factory_make ("xvimagesink", "vsink");
            audio_sink = gst_element_factory_make ("alsasink", "asink");
            if (!pipeline || !source || !decoder || !video_sink || !text || !audio_sink)
            {
            g_print ("One element could not be created\n");
                    return -1;
            }

            // set filename property on the file source. Also add a message
            // handler.
            g_object_set (G_OBJECT (source),"port",1234, NULL);

           // g_object_set (G_OBJECT (text), "text", "hello world awertyuiop!!!", NULL);
         //   g_object_set (G_OBJECT (text), "italic", 1, NULL);
        //g_object_set (G_OBJECT (text), "bold", 1, NULL);

            bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
            gst_bus_add_watch (bus, bus_call, loop);
            gst_object_unref (bus);

            // put all elements in a bin
            gst_bin_add_many (GST_BIN (pipeline), source, decoder, video_sink, text, audio_sink, NULL);

            // link together - note that we cannot link the decoder and
            // sink yet, because the decoder uses dynamic pads. For that,
            // we set a pad-added signal handler.
            gst_element_link (source, decoder);
            gst_element_link (text, video_sink);
            g_signal_connect (decoder, "pad-added", G_CALLBACK (new_pad), NULL);

            printf("step 2\n");



            // Now set to playing and iterate.
            g_print ("Setting to PLAYING\n");
            gst_element_set_state (pipeline, GST_STATE_PLAYING);
            g_print ("Running\n");
            g_main_loop_run (loop);

            // clean up nicely
            g_print ("Returned, stopping playback\n");
            gst_element_set_state (pipeline, GST_STATE_NULL);
            g_print ("Deleting pipeline\n");
            gst_object_unref (GST_OBJECT (pipeline));

            return 0;
    }

1 个答案:

答案 0 :(得分:3)