通过 appsrc 直播到 VLC

时间:2021-01-11 07:41:53

标签: gstreamer gstreamer-1.0

我需要通过 UDP 将网络摄像头实时流式传输到 VLC。我从 https://gist.github.com/beeender/d539734794606a38d4e3 得到了很好的参考,但我未能产生预期的输出。使用 appsrc 将视频帧推送到管道。

使用以下管道成功显示直播:

appsrc ! videoconvert ! autovideosink

由于我需要通过 UDP 在 VLC 上直播,我更新了管道,如下所示:

appsrc ! videoconvert ! avenc_mpeg4 ! rtpmp4vpay ! udpsink

使用上述管道,我能够通过 UDP 将视频流式传输到 VLC,但在 VLC 上显示的视频质量非常低(只是显示的视频模糊,无法识别对象,显示绿色和红色块在多个地方)。我尝试了多个更新选项,但未能产生输出。任何建议都会非常有帮助。提前致谢。

请找到用于创建管道的代码。

void start_stream(void)
{

  /* Pointer to hold bus address */
  GstBus *bus = nullptr;

  gst_init (nullptr, nullptr);
  loop = g_main_loop_new (NULL, FALSE);
  
  pipeline = gst_pipeline_new ("pipeline");
  appsrc = gst_element_factory_make ("appsrc", "source");
  conv = gst_element_factory_make ("videoconvert", "conv");
  
  #if(0)
  /* TEST WITH autovideosink */
  videosink = gst_element_factory_make ("autovideosink", "videosink");
  #else
  /* TEST WITH udpsink */
  videoenc = gst_element_factory_make ("avenc_mpeg4", "videoenc");
  payloader = gst_element_factory_make ("rtpmp4vpay", "payloader");
  videosink = gst_element_factory_make ("udpsink", "videosink");
  g_object_set(G_OBJECT(StreamPipeline.videosink), "host", "127.0.0.1", "port", 5000, NULL);
  g_object_set(G_OBJECT(StreamPipeline.payloader), "config-interval", 60, NULL);
  #endif

  /* setup */
  g_object_set (G_OBJECT (appsrc), "caps",
    gst_caps_new_simple ("video/x-raw",
    "format", G_TYPE_STRING, "RGB",
    "width", G_TYPE_INT, 1280,
    "height", G_TYPE_INT, 720,
    "pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1,
    "framerate", GST_TYPE_FRACTION, 30, 1,
    NULL), NULL);

  #if(0)
  /* TEST WITH autovideosink */
  /* WORKING AS EXPECTED, ABLE TO SEE LIVE STREAMING ON DISPLAY */
  gst_bin_add_many (GST_BIN (pipeline), appsrc, conv, videosink, NULL);
  gst_element_link_many (appsrc, conv, videosink, NULL);
  #else
  /* TEST WITH udpsink */
  /* NOT WORKING AS EXPECTED, DISPLAYING A BLURRY VIDEO */
  gst_bin_add_many (GST_BIN (pipeline), appsrc, conv, videoenc, payloader, videosink, NULL);
  gst_element_link_many (appsrc, conv, videoenc, payloader, videosink, NULL);
  #endif
  
  g_object_set (G_OBJECT (appsrc),
    "stream-type", 0,
    "is-live", TRUE,
    "format", GST_FORMAT_TIME, NULL);
    
   bus = gst_element_get_bus(pipeline);
   gst_bus_add_watch (bus, BusWatch, loop);
   gst_object_unref(bus);
 
   gst_element_set_state(StreamPipeline.pipeline, GST_STATE_PLAYING);
   
   g_main_loop_run(StreamPipeline.loop);
    
   gst_element_set_state(StreamPipeline.pipeline, GST_STATE_NULL);
   gst_object_unref(StreamPipeline.pipeline);
}

1 个答案:

答案 0 :(得分:-1)

我尝试了这个管道,它奏效了。你可以编写它的 C 等价物。

Val timestamp: Timestamp = new Timestamp(System.currentTimeMillis())

在下面的代码中,我将帧从一个管道拉到 gst-launch-1.0 videotestsrc ! video/x-raw,format=I420,framerate=5/1,width=1280,height=720 ! x265enc option-string="repeat-headers=yes" speed-preset=5 ! rtph265pay ! udpsink host=127.0.0.1 port=5000 sync=false 。我使用 appsinkvideotestsrc 来模拟您从网络摄像头等获取 jpeg 帧的场景。然后我将该帧推送到另一个管道的 jpegenc,然后使用 appsrc

udpsink

要查看此 udp 流,我们可以使用以下管道

    #include <iostream>
#include <string>
#include <gst/gst.h>
#include <gst/app/gstappsrc.h>
#include <thread>
#include <opencv2/opencv.hpp>
#include <mutex>

GMainLoop *loopVideoCapture;
GMainLoop *loopVideoStream;

GstElement *Stream_pipeline, *Stream_appsrc, *Stream_conv, *Stream_videosink, *Stream_videoenc, *Stream_mux, *Stream_payloader, *Stream_filter;
GstCaps *convertCaps;

#define AUTO_VIDEO_SINK  0
#define OLD_ALGO         0
#define APPSRC_WIDTH 320
#define APPSRC_HEIGHT 240
#define APPSRC_FRAMERATE 5
std::mutex mtx;

struct FrameData
{
    char* data;
    unsigned int size;
};

FrameData frameData;

void fill_appsrc(const unsigned char *DataPtr, gsize size)
{
    memcpy(frameData.data, DataPtr, size);
}


static void
cb_need_data (GstElement *appsrc,
          guint       unused_size, gpointer    user_data)
{

      static gboolean white = FALSE;
      static GstClockTime timestamp = 0;
      guint size,depth,height,width,step,channels;
      GstFlowReturn ret ;
      IplImage* img;
      guchar *data1;
      GstMapInfo map;
      mtx.lock();

      FrameData *frameData = (FrameData*)user_data;
      size = frameData->size;
      data1 = frameData->data;
      GstBuffer *buffer = NULL;//gst_buffer_new_allocate (NULL, size, NULL);
//
//      g_print("frame_size: %d \n",size);
//      g_print("timestamp: %ld \n",timestamp);


      buffer = gst_buffer_new_allocate (NULL, size, NULL);
      gst_buffer_map (buffer, &map, GST_MAP_WRITE);
      memcpy( (guchar *)map.data, data1,  gst_buffer_get_size( buffer ) );

      GST_BUFFER_PTS (buffer) = timestamp;
      GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int (1, GST_SECOND, APPSRC_FRAMERATE);

      timestamp += GST_BUFFER_DURATION (buffer);
      //gst_app_src_push_buffer ((GstAppSrc *)appsrc, buffer);

      g_signal_emit_by_name (appsrc, "push-buffer", buffer, &ret);
    mtx.unlock();
}

void VideoStream(void)
{
  gst_init (nullptr, nullptr);
  loopVideoStream = g_main_loop_new (NULL, FALSE);

  Stream_pipeline = gst_pipeline_new ("pipeline");
  Stream_appsrc = gst_element_factory_make ("appsrc", "source");
  Stream_conv = gst_element_factory_make ("videoconvert", "conv");

  #if(AUTO_VIDEO_SINK == 1)
  Stream_videosink = gst_element_factory_make ("autovideosink", "videosink");

  gst_bin_add_many (GST_BIN (Stream_pipeline), Stream_appsrc, Stream_conv, Stream_videosink, NULL);
  gst_element_link_many (Stream_appsrc, Stream_conv, Stream_videosink, NULL);

  #else

  Stream_videosink = gst_element_factory_make ("udpsink", "videosink");
  g_object_set(G_OBJECT(Stream_videosink), "host", "127.0.0.1", "port", 5000, NULL);
  Stream_videoenc = gst_element_factory_make ("x265enc", "videoenc");
  g_object_set (G_OBJECT (Stream_videoenc), "option-string", "repeat-headers=yes", NULL);
  g_object_set (G_OBJECT (Stream_videoenc), "bitrate", 200, NULL);

  #if(OLD_ALGO == 1)
  Stream_payloader = gst_element_factory_make ("rtpmp4vpay", "payloader");
  g_object_set(G_OBJECT(Stream_payloader), "config-interval", 60, NULL);

  gst_bin_add_many (GST_BIN (Stream_pipeline), Stream_appsrc, Stream_conv, Stream_videoenc, Stream_payloader, Stream_videosink, NULL);
  gst_element_link_many (Stream_appsrc, Stream_conv, Stream_videoenc, Stream_payloader, Stream_videosink, NULL);

  #else

  Stream_filter = gst_element_factory_make ("capsfilter", "converter-caps");
  Stream_payloader = gst_element_factory_make ("rtph265pay", "rtp-payloader");

  convertCaps = gst_caps_new_simple ("video/x-raw", "format", G_TYPE_STRING, "I420",
    "width", G_TYPE_INT, APPSRC_WIDTH,
    "height", G_TYPE_INT, APPSRC_HEIGHT,
    "framerate", GST_TYPE_FRACTION, APPSRC_FRAMERATE, 1,
    NULL);

  g_object_set (G_OBJECT (Stream_filter), "caps", convertCaps, NULL);

  gst_bin_add_many (GST_BIN (Stream_pipeline), Stream_appsrc, Stream_conv, Stream_filter,  Stream_videoenc, Stream_payloader, Stream_videosink, NULL);
  gst_element_link_many (Stream_appsrc, Stream_conv, Stream_filter , Stream_videoenc, Stream_payloader, Stream_videosink, NULL);
  #endif

  #endif

  g_object_set (G_OBJECT (Stream_appsrc), "caps",
    gst_caps_new_simple ("video/x-raw",
      "format", G_TYPE_STRING, "RGB",
      "width", G_TYPE_INT, APPSRC_WIDTH,
      "height", G_TYPE_INT, APPSRC_HEIGHT,
      "pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1,
      "framerate", GST_TYPE_FRACTION, APPSRC_FRAMERATE, 1,
      NULL),
    NULL);

  g_object_set (G_OBJECT (Stream_appsrc),
    "stream-type", 0,
    "is-live", TRUE,
    "block", FALSE,
    "format", GST_FORMAT_TIME,
    NULL);

  g_signal_connect (Stream_appsrc, "need-data", G_CALLBACK (cb_need_data), &frameData);


   gst_element_set_state(Stream_pipeline, GST_STATE_PLAYING);
   g_main_loop_run(loopVideoStream);

   gst_element_set_state(Stream_pipeline, GST_STATE_NULL);
   gst_object_unref(Stream_pipeline);
}

GstFlowReturn NewFrame(GstElement *sink, void *data) {

  GstSample *sample = nullptr;
  GstBuffer *buffer = nullptr;
  GstMapInfo map;
  g_signal_emit_by_name(sink, "pull-sample", &sample);

  if (!sample) {
    g_printerr("Error: Could not obtain sample.\n");
    return GST_FLOW_ERROR;
  }
//  printf("NewFrame\n");

  buffer = gst_sample_get_buffer (sample);
  gst_buffer_map(buffer, &map, GST_MAP_READ);

  mtx.lock();
  fill_appsrc(map.data, map.size);
  mtx.unlock();

  gst_buffer_unmap(buffer, &map);
  gst_sample_unref(sample);

  return GST_FLOW_OK;
}

void VideoCapture(void)
{
  GstElement *pipeline, *source, *videorate, *testsrccaps, *jpegencode, *mjpegcaps, *decodemjpef, *convertmjpeg, *rgbcaps, *convertrgb, *sink;

  gst_init(nullptr, nullptr);

  pipeline     = gst_pipeline_new ("new-pipeline");
  source       = gst_element_factory_make("videotestsrc", "source");
  videorate       = gst_element_factory_make("videorate", "video-rate");
  testsrccaps    = gst_element_factory_make("capsfilter", "test-src-caps");
  jpegencode       = gst_element_factory_make("jpegenc", "jpeg-encoder");
  mjpegcaps    = gst_element_factory_make("capsfilter", "mjpegcaps");
  decodemjpef  = gst_element_factory_make("jpegdec", "decodemjpef");
  convertmjpeg = gst_element_factory_make("videoconvert", "convertmjpeg");
  rgbcaps      = gst_element_factory_make("capsfilter", "rgbcaps");
  convertrgb   = gst_element_factory_make("videoconvert", "convertrgb");
  sink         = gst_element_factory_make("appsink", "sink");

//  g_object_set(source, "device", "/dev/video0", NULL);

  g_object_set (G_OBJECT(testsrccaps), "caps",
    gst_caps_new_simple (
    "video/x-raw",
      "framerate", GST_TYPE_FRACTION, 30, 1,
      NULL),
    NULL);

  g_object_set (G_OBJECT(mjpegcaps), "caps",
    gst_caps_new_simple (
      "image/jpeg",
      "width", G_TYPE_INT, APPSRC_WIDTH,
      "height", G_TYPE_INT, APPSRC_HEIGHT,
      "framerate", GST_TYPE_FRACTION, 30, 1,
      NULL),
    NULL);

  g_object_set (G_OBJECT(rgbcaps), "caps",
    gst_caps_new_simple (
      "video/x-raw",
      "format", G_TYPE_STRING, "RGB",
      "framerate", GST_TYPE_FRACTION, 30, 1,
      NULL),
    NULL);

  g_object_set(sink, "emit-signals", TRUE, "sync", TRUE, NULL);
  g_signal_connect(sink, "new-sample", G_CALLBACK(NewFrame), nullptr);

  gst_bin_add_many(GST_BIN (pipeline), source,testsrccaps, videorate,  mjpegcaps, jpegencode, decodemjpef, convertmjpeg, rgbcaps, convertrgb, sink, NULL);
  if (!gst_element_link_many(source, testsrccaps, videorate, jpegencode, mjpegcaps, decodemjpef, convertmjpeg, rgbcaps, convertrgb, sink, NULL)) {
    g_printerr("Error: Elements could not be linked.\n");
  }


  gst_element_set_state(pipeline, GST_STATE_PLAYING);
  loopVideoCapture = g_main_loop_new(NULL, FALSE);
  g_main_loop_run(loopVideoCapture);

  gst_element_set_state(pipeline, GST_STATE_NULL);
  gst_object_unref(pipeline);
}

int main(void){

    frameData.size = APPSRC_WIDTH*APPSRC_HEIGHT*4;
    frameData.data = new char[frameData.size];

  std::thread ThreadStreame(VideoStream);
  std::thread ThreadCapture(VideoCapture);

  while(1);
  g_main_loop_quit(loopVideoCapture);
  g_main_loop_quit(loopVideoStream);
  ThreadStreame.join();
  ThreadCapture.join();

  return true;
}

在 VLC 中查看:

制作一个 .sdp 文件,例如 rtp.sdp 并粘贴以下内容

gst-launch-1.0 udpsrc uri=udp://127.0.0.1 port=5000 caps="application/x-rtp,media=video,payload=96" ! rtph265depay ! h265parse ! avdec_h265 ! autovideosink

在 vlc 中打开这个文件( ctrl+O 打开文件) 等待一段时间,视频将在 VLC 中打开

相关问题