我正在尝试将视频帧放入OpenCV,对其进行一些处理(确切地说,aruco detection),然后使用GStreamer将结果帧打包到RTSP流中。
我已经看到Python solution的问题,但是在将其转换为C ++时遇到了麻烦。
这是我尝试重新创建SensorFactory
类的尝试:
#include <glib-object.h>
#include <iostream>
#include "SensorFactory.h"
SensorFactory::SensorFactory(std::string launch) {
launchString = launch;
cap = cv::VideoCapture(0);
// should be incremented once on each frame for timestamping
numberFrames = 0;
// simple struct with only the cap (int*), lastFrame (cv::Mat*) and numberFrames (int* again) fields
CVData cvData;
cvData.cap = ∩
cvData.lastFrame = &lastFrame;
cvData.numberFrames = &numberFrames;
}
GstFlowReturn SensorFactory::on_need_data(GstElement *src, CVData *datum) {
if (datum->cap->isOpened()) {
if (datum->cap->read(*(datum->lastFrame))) {
std::string data = std::string(reinterpret_cast<char * > (datum->lastFrame->data));
GstBuffer *buf = gst_buffer_new_allocate(nullptr, data.max_size(), nullptr);
gst_buffer_fill(buf, 0, &data, data.max_size());
buf->duration = static_cast<GstClockTime>(duration);
GstClockTimeDiff timestamp = *(datum->numberFrames) * duration;
buf->pts = buf->dts = static_cast<GstClockTime>(timestamp);
buf->offset = static_cast<guint64>(timestamp);
int *numf = datum->numberFrames;
*numf += 1;
g_signal_emit_by_name(src, "push-buffer", buf);
gst_buffer_unref(buf);
return GST_FLOW_OK;
}
}
// never reached
return GST_FLOW_NOT_LINKED;
}
GstElement *SensorFactory::create_element(const GstRTSPUrl *url) { return gst_parse_launch(launchString.c_str(), nullptr); }
void SensorFactory::configure(GstRTSPMedia *rtspMedia) {
numberFrames = 0;
GstElement *appsrc;
appsrc = gst_rtsp_media_get_element(rtspMedia);
g_signal_connect(appsrc, "need-data", (GCallback) on_need_data, &cvData);
}
SensorFactory
的标题没什么特别的:
#include <gst/rtsp-server/rtsp-media-factory.h>
#include <gst/rtsp-server/rtsp-media.h>
#include <gst/app/gstappsrc.h>
#include <opencv2/videoio.hpp>
class SensorFactory : public GstRTSPMediaFactory {
public:
typedef struct _CVData {
cv::VideoCapture *cap;
cv::Mat *lastFrame;
int *numberFrames;
} CVData;
CVData cvData;
std::string launchString;
cv::VideoCapture cap;
cv::Mat lastFrame;
int numberFrames = 0;
const static int framerate = 30;
const static GstClockTimeDiff duration = 1 / framerate * GST_SECOND;
explicit SensorFactory(std::string launch);
static GstFlowReturn on_need_data(GstElement *src, CVData *datum);
GstElement *create_element(const GstRTSPUrl *url);
void configure(GstRTSPMedia *media);
};
然后main.cpp
看起来像这样:
#include <gst/gst.h>
#include "src/SensorFactory.h"
int main() {
gst_init(nullptr, nullptr);
GstRTSPServer *server;
server = gst_rtsp_server_new();
SensorFactory sensorFactory("appsrc name=source is-live=true block=true format=GST_FORMAT_TIME"
"caps=video/x-raw,format=BGR ! "
"videoconvert ! video/x-raw,format=I420 ! "
"x264enc speed-preset=ultrafast tune=zerolatency ! rtph264pay name=pay0");
g_print("setting shared\n");
gst_rtsp_media_factory_set_shared(&sensorFactory, true);
g_print("set shared\n");
GstRTSPMountPoints *mounts;
mounts = gst_rtsp_server_get_mount_points(server);
gst_rtsp_mount_points_add_factory(mounts, "/test", &sensorFactory);
GMainLoop *loop;
loop = g_main_loop_new(nullptr, false);
g_main_loop_run(loop);
}
程序可以正常编译,甚至可以开始运行,但是在gst_rtsp_media_factory_set_shared(&sensorFactory, true);
上存在段错误。此程序中没有其他任何hacky内存管理。
答案 0 :(得分:0)
您可以尝试以下步骤将流写入RTMP。
if (platform is "Windows") {
// if the platform is windows, then add the head data of the video
// otherwise it will not work on the HTML flash player
headData = " ! video/x-h264,profile=high";
}
// to rtmp (media server e.g: NGINX)
rtmpUrl = "appsrc ! videoconvert ! x264enc speed-preset=ultrafast tune=zerolatency "+headData+" ! flvmux ! rtmpsink location=rtmp://192.168.1.25/mylive/test";
// using UDP broadcast to all 1~255 IPs
rtmpUrl = "appsrc ! videoconvert ! x264enc speed-preset=ultrafast tune=zerolatency "+headData+" ! flvmux ! udpsink host=192.168.1.255 port=5000";
// using UDP broadcast specific IP
rtmpUrl = "appsrc ! videoconvert ! x264enc speed-preset=ultrafast tune=zerolatency "+headData+" ! flvmux ! udpsink host=192.168.1.25 port=5000";
// give the FPS and the size of the video
VideoWriter writer = new VideoWriter(rtmpUrl, Videoio.CAP_GSTREAMER, FOURCC, currentFps, new Size(width, height));
// then you can write the video using writer
注意:确保使用GStreamer构建OpenCV。
答案 1 :(得分:0)
这是另一种方法。
现在,将您的SensorFactory
与rtsp代码分开。
使用管道启动SensorFactory
。
appsrc name=source is-live=true block=true format=GST_FORMAT_TIME caps=video/x-raw,format=BGR,width=640,height=480,framerate=30/1 ! videoconvert ! video/x-raw,format=I420 ! x264enc speed-preset=ultrafast tune=zerolatency ! udpsink port=5050
我们通过将h264用管道输送到端口5050上的udpsink结束管道。
然后编译gstreamer rtsp服务器示例here 并使用管道
启动它./test-launch "( udpsrc port=5050 ! rtph264pay name=pay0 pt=96 )"
假设您的SensorFactory能够按预期工作,那么应该可以在rtsp://localhost:8554/test
处获得RTSP流服务