如何在C中使用Gstreamer在视频流上叠加图片?

时间:2018-01-15 16:16:01

标签: overlay gstreamer

我想覆盖" .png"来自使用Gstreamer的IP摄像机的流的图片。 我的硬件的工作管道是:

gst-launch-1.0 
rtspsrc location=rtsp://user:pass@IP:port/channel latency=400 ! rtph264depay ! 
vpudec use-vpu-memory=false ! imxvideoconvert_ipu 
! video/x-raw,format=I420 ! gdkpixbufoverlay 
location=/home/user/folder/image.png offset-x=100 offset-y=100 ! overlaysink

当我尝试在C中翻译此管道时出现问题。 我为此管道编写的代码运行,但显示器上没有视频播放。玩家在设置管道之前就会陷入困境,然后玩#34;州。 这里有一个简单版本的C实现:

#include <gst/gst.h>
#include <glib.h>
#include <iostream>

typedef struct _CustomData {
  GstElement *source;
  GstElement *rtp;
  GstElement *sink;
  GstElement *vpudec;
  GstElement *converter, *gdkpixbufoverlay, *capsfilter ;
  GstBus *bus;
  GstElement    *pipeline;
  GMainLoop     *loop;
} CustomData;


static gboolean bus_call (GstBus *bus,
      GstMessage *msg,
      gpointer    data)
{
  GMainLoop *loop = (GMainLoop *) data;

  switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:{
  g_print ("End of stream\n");
  g_main_loop_quit (loop);
  break;
}  
case GST_MESSAGE_ERROR: {
  gchar  *debug;
  GError *error;
  gst_message_parse_error (msg, &error, &debug);
  g_free (debug);
  g_printerr ("Error: %s\n", error->message);
  g_error_free (error);    
  g_main_loop_quit (loop);
  break;
}
default:
  break;
  }    
  return TRUE;
}

static void pad_added_handler (GstElement *src, GstPad *new_pad, CustomData *data) {
  GstPad *sink_pad = gst_element_get_static_pad (data->rtp, "sink");
  GstPadLinkReturn ret;
  GstCaps *new_pad_caps = NULL;
  GstStructure *new_pad_struct = NULL;
  const gchar *new_pad_type = NULL;

  if (gst_pad_is_linked (sink_pad)) {
  goto exit;
  }

  new_pad_caps = gst_pad_query_caps (new_pad, NULL);
  new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
  new_pad_type = gst_structure_get_name (new_pad_struct);
  if (!g_str_has_prefix (new_pad_type, "application/x-rtp")) {
  g_print ("  It has type '%s' which is not x-rtp . Ignoring.\n", 
  new_pad_type);
  goto exit;
  }

  ret = gst_pad_link (new_pad, sink_pad);
  if (GST_PAD_LINK_FAILED (ret)) {
  g_print("  Type is '%s' but link failed.\n", new_pad_type);
  } else {
  g_print ("  Link succeeded (type '%s').\n", new_pad_type);
  }

exit:

  if (new_pad_caps != NULL)
  gst_caps_unref (new_pad_caps);
  gst_object_unref (sink_pad);
}



int main (int   argc, char *argv[]){

CustomData data;

gst_init (NULL, NULL);

data.loop = g_main_loop_new (NULL, FALSE);

// Create gstreamer elements
data.pipeline = gst_pipeline_new ("player");

data.source           = gst_element_factory_make ("rtspsrc",  "source");
data.rtp              = gst_element_factory_make ("rtph264depay","rtp");
data.vpudec           = gst_element_factory_make ("vpudec","vpudec");
data.converter        = gst_element_factory_make 
("imxcompositor_ipu","converter");
data.capsfilter       = gst_element_factory_make ("capsfilter", "video-
rate");
data.gdkpixbufoverlay = gst_element_factory_make 
("gdkpixbufoverlay","overlaytool");
data.sink             = gst_element_factory_make ("overlaysink", 
"videoSink");


if (!data.pipeline || !data.source || !data.rtp || !data.vpudec || 
!data.converter || !data.capsfilter || !data.gdkpixbufoverlay || !data.sink) 
{
 g_printerr ("One element could not be created. Exiting.\n");
 return -1;
}

g_object_set (data.source, "location","rtsp://user:pass@IP:port/channel", 
NULL);
g_object_set (data.source,"latency", 400 , NULL);
g_object_set (data.vpudec, "use-vpu-memory", false, NULL);

g_object_set (data.gdkpixbufoverlay, 
"location","/home/user/folder/image.png", NULL);
g_object_set (data.gdkpixbufoverlay, "offset-x", 100 , NULL);
g_object_set (data.gdkpixbufoverlay, "offset-y", 100 , NULL);


GstCaps *capsFormat = gst_caps_from_string ("video/x-raw,format=I420");
g_object_set ( data.capsfilter, "caps", capsFormat, NULL);
gst_caps_unref(capsFormat);

//add all elements into the pipeline
gst_bin_add_many (GST_BIN (data.pipeline),
          data.source,
          data.rtp,
          data.vpudec,
          data.converter,
          data.capsfilter,
          data.gdkpixbufoverlay,
          data.sink,
          NULL);

// link all elements
gst_element_link_many ( data.rtp, data.vpudec , data.converter , 
data.capsfilter, data.gdkpixbufoverlay, data.sink, NULL);

g_signal_connect (data.source, "pad-added", G_CALLBACK (pad_added_handler), 
&data);

// Set the pipeline to "playing" state
GstStateChangeReturn ret;
ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);

if (ret == GST_STATE_CHANGE_FAILURE) {
   g_printerr("Unable to set the pipeline to the playing state.\n");
   gst_object_unref (data.pipeline);
   return -1;
}


// Iterate
g_main_loop_run (data.loop);

 // Out of the main loop, clean
 g_print ("Returned, stopping playback\n");
 gst_element_set_state (data.pipeline, GST_STATE_NULL);

 g_print ("Deleting pipeline\n");
 gst_object_unref (GST_OBJECT (data.pipeline));

 return 0;
}

有没有人看到这个问题?

谢谢

1 个答案:

答案 0 :(得分:0)

经过多次尝试,我确实发现在我发布的C代码中我选择了错误的元素,因此data.converter元素是:

data.converter = gst_element_factory_make("imxvideoconvert_ipu ","converter");

而非 imxcompositor_ipu