如何使用Gstreamer C API编写视频混合器

时间:2012-01-04 03:02:41

标签: c gstreamer

我正在尝试使用C API模拟以下gstreamer管道:

gst-launch -e videomixer name=mix ! ffmpegcolorspace ! xvimagesink \
   videotestsrc pattern=1  ! video/x-raw-yuv,width=100,height=100 ! videobox border-alpha=0 top=0 left=0 ! mix. \
   videotestsrc pattern=0  ! video/x-raw-yuv,width=100,height=100 ! videobox border-alpha=0 top=0 left=-100 ! mix. 

到目前为止,我有:

#include <gst/gst.h>
#include <glib.h>


static gboolean
bus_call (GstBus     *bus,
          GstMessage *msg,
          gpointer    data)
{
  GMainLoop *loop = (GMainLoop *) data;

  switch (GST_MESSAGE_TYPE (msg)) {

    case GST_MESSAGE_EOS:
      g_print ("End of stream\n");
      g_main_loop_quit (loop);
      break;

    case GST_MESSAGE_ERROR: {
      gchar  *debug;
      GError *error;

      gst_message_parse_error (msg, &error, &debug);
      g_free (debug);

      g_printerr ("Error: %s\n", error->message);
      g_error_free (error);

      g_main_loop_quit (loop);
      break;
    }
    default:
      break;
  }

  return TRUE;
}


int
main (int   argc,
      char *argv[])
{
  GMainLoop *loop;

  GstElement *pipeline;
  GstElement *source1,*source2;
  GstElement *scale,*filter;
  GstElement *videobox1,*videobox2; //just one.
  GstElement *mixer,*clrspace,*sink;
  GstCaps *filtercaps;
  GstBus *bus;

  /* Initialisation */
  gst_init (&argc, &argv);

  loop = g_main_loop_new (NULL, FALSE);


  /* Check input arguments */
  /*if (argc != 2) {
    g_printerr ("Usage: %s <Ogg/Vorbis filename>\n", argv[0]);
    return -1;
  }*/

 //gst-launch videotestsrc pattern=snow ! ximagesink

  /* Create gstreamer elements */
  pipeline = gst_pipeline_new ("player");
  source1   = gst_element_factory_make ("videotestsrc",       "source1");
  source2   = gst_element_factory_make ("videotestsrc",       "source2");
//  source2   = gst_element_factory_make ("uridecodebin",       "file-source2");
  scale   = gst_element_factory_make ("videoscale",       "scale");
  filter = gst_element_factory_make("capsfilter","filter");
  videobox1 = gst_element_factory_make ("videobox",       "videobox1");
  videobox2 = gst_element_factory_make ("videobox",       "videobox2");
  mixer = gst_element_factory_make ("videomixer",       "mixer");
  clrspace  = gst_element_factory_make ("ffmpegcolorspace",       "clrspace");

//  demuxer  = gst_element_factory_make ("oggdemux",      "ogg-demuxer");
//  decoder  = gst_element_factory_make ("vorbisdec",     "vorbis-decoder");
//  conv     = gst_element_factory_make ("audioconvert",  "converter");
  sink     = gst_element_factory_make ("xvimagesink", "sink");

  /*if (!pipeline || !source || !demuxer || !decoder || !conv || !sink) {
    g_printerr ("One element could not be created. Exiting.\n");
    return -1;
  }*/
  if (!pipeline || !source1 || !source2 || !sink) {
    g_printerr ("One element could not be created. Exiting.\n");
    return -1;
  }

  filtercaps = gst_caps_new_simple ("video/x-raw-yuv",
          "width", G_TYPE_INT, 200,
          "height", G_TYPE_INT, 100,          
          NULL);
  g_object_set (G_OBJECT (filter), "caps", filtercaps, NULL);
  //gst_caps_unref (filtercaps);

  g_object_set(videobox1,"border-alpha",0,"top",0,"left",0,NULL);
  g_object_set(videobox2,"border-alpha",0,"top",0,"left",-200,NULL);


  /* Set up the pipeline */

  /* we set the input filename to the source element */
    g_object_set (G_OBJECT (source1), "pattern", 0, NULL);
    g_object_set (G_OBJECT (source2), "pattern", 1, NULL);

  /* we add a message handler */
  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  gst_bus_add_watch (bus, bus_call, loop);
  gst_object_unref (bus);

  /* we add all elements into the pipeline */
  gst_bin_add_many (GST_BIN (pipeline),
                    source1,filter,videobox1,mixer,clrspace, sink, source2,videobox2, NULL);

  /* we link the elements together */
    //gst_element_link_many (source1, scale, filter, videobox1, mixer, clrspace, sink);
    //gst_element_link_many (source2, scale, filter, videobox2, mixer, clrspace, sink);
    gst_element_link_many (source1, filter, videobox1, mixer, clrspace, sink);
    gst_element_link_many (source2, filter, videobox2, mixer, clrspace, sink);


  /* Set the pipeline to "playing" state*/
  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  /* Iterate */
  g_print ("Running...\n");
  g_main_loop_run (loop);

  /* Out of the main loop, clean up nicely */
  g_print ("Returned, stopping playback\n");
  gst_element_set_state (pipeline, GST_STATE_NULL);

  g_print ("Deleting pipeline\n");
  gst_object_unref (GST_OBJECT (pipeline));

  return 0;
}

我还设置了调试:export GST_DEBUG=3

当我运行程序时,我收到以下错误:

Running...
0:00:00.178663884  4797  0x8937020 WARN                 basesrc gstbasesrc.c:2582:gst_base_src_loop:<source2> error: Internal data flow error.
0:00:00.178766444  4797  0x8937020 WARN                 basesrc gstbasesrc.c:2582:gst_base_src_loop:<source2> error: streaming task paused, reason not-linked (-1)
Error: Internal data flow error.
Returned, stopping playback
0:00:00.202571746  4797  0x893ae00 WARN           basetransform gstbasetransform.c:1627:gst_base_transform_prepare_output_buffer:<clrspace> pad-alloc failed: wrong-state
0:00:00.202645907  4797  0x893ae00 WARN           basetransform gstbasetransform.c:2335:gst_base_transform_handle_buffer:<clrspace> could not get buffer from pool: wrong-state
Deleting pipeline

为什么抱怨source2没有链接?

2 个答案:

答案 0 :(得分:1)

gst_element_link_many (source1, filter, videobox1, mixer, clrspace, sink, NULL);
gst_element_link_many (source2, filter, videobox2, mixer, NULL);

还请了解videomixerpad上的x,y,z属性,您可以通过这种方式备用视频文件元素并获得性能

答案 1 :(得分:1)

有点晚,但可能会有所帮助:

如果查看documentation for the videomixer element,您会看到视频解码器的接收器是请求板。您需要在链接之前创建这些打击垫。

/* Manually link the mixer, which has "Request" pads */
mixer_sink_pad_template = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS    (mixer), "sink_%u");
mixer_sink_pad = gst_element_request_pad (mixer, mixer_sink_pad_template, NULL, NULL);
sink_pad = gst_element_get_static_pad (clrspace, "src");
gst_pad_link ( sink_pad,mixer_sink_pad);

同样为任意数量的流创建请求板。