MJPEG RTSP流服务器已使用VLC运行。然后,我再运行一个VLC来确定流中的帧具有正确的颜色。所有颜色都是正确的。
我的任务是在rtpjpegdepay插件的输出上捕获MJPEG帧而不进行任何解压缩。我已经使用以下管道创建了C ++应用程序:
rtspsrc -> rtpjpegdepay -> appsink
使用以下行设置源URL:
g_object_set(G_OBJECT(m_source), "location", url.c_str(), NULL);
使用以下方法捕获每个jpeg帧:
g_signal_connect(m_sink, "new-sample", G_CALLBACK(captureGstBuffer), this);
但是收到的图像非常奇怪(这是一条带有分隔条的路,只有颜色不正确):
接下来,我尝试使用其他管道:
rtspsrc-> rtpjpegdepay-> jpegdec-> appsink
使用相同的“新样本”方法,我收到的YUV420图像具有相同的错误颜色,例如仅使用rtpjpegdepay。
我尝试了从1.8.3到1.14.2的不同版本的Gstreamer
该行为的原因可能是什么?
这是代码(YUV420):
static void OnPadAdded(GstElement *element, GstPad *pad, void *data)
{
GstElement *rtpjpeg = GST_ELEMENT(data);
GstPad *sinkpad;
sinkpad = gst_element_get_static_pad(rtpjpeg, "sink");
gst_pad_link(pad, sinkpad);
gst_object_unref(sinkpad);
}
static int fileind = 0;
void WriteToFile(BYTE *pBuffer, DWORD dwBufSize)
{
fileind++;
std::stringstream ssFileName;
ssFileName << "D:\\Temp\\file" << fileind << ".yuv";
FILE* fp = fopen(ssFileName.str().c_str(), "wb+");
fwrite(pBuffer, dwBufSize, 1, fp);
fclose(fp);
}
static GstFlowReturn CaptureGstBuffer(GstElement *sink, void *data) {
GstSample *sample;
g_signal_emit_by_name(sink, "pull-sample", &sample);
if (sample) {
GstBuffer *buffer = gst_sample_get_buffer(sample);
GstMapInfo map;
gst_buffer_map(buffer, &map, GST_MAP_READ);
WriteToFile((BYTE *)map.data, map.size);
gst_buffer_unmap(buffer, &map);
gst_sample_unref(sample);
}
return GST_FLOW_OK;
}
long RTSPClientDevice::StartClient()
{
m_loop = g_main_loop_new(NULL, FALSE);
m_pipeline = gst_pipeline_new("mjpeg-catcher");
g_assert(m_pipeline);
m_source = gst_element_factory_make("rtspsrc", "Source");
g_assert(m_source);
m_depay = gst_element_factory_make("rtpjpegdepay", "Depay");
g_assert(m_depay);
m_decoder = gst_element_factory_make("jpegdec", "Decoder");
g_assert(m_decoder);
m_sink = gst_element_factory_make("appsink", "Output");
g_assert(m_sink);
if (!m_pipeline || !m_source || !m_depay || !m_decoder || !m_sink) {
return Z_ERR;
}
std::string url = "";
GetClientURL(url);
g_object_set(G_OBJECT(m_source), "location", url.c_str(), NULL);
g_object_set(G_OBJECT (m_source), "do-rtcp", 1, NULL);
g_object_set(G_OBJECT(m_source), "latency", 0, NULL);
g_object_set(G_OBJECT(m_source), "probation", 1, NULL);
m_bus = gst_pipeline_get_bus(GST_PIPELINE(m_pipeline));
gst_bus_add_watch(m_bus, bus_call, m_loop);
gst_object_unref(m_bus);
gst_bin_add_many(GST_BIN(m_pipeline), m_source, m_depay, m_decoder ,
m_sink, NULL);
if (!gst_element_link(m_source, m_depay)) {
return Z_ERR;
}
if (!gst_element_link(m_depay, m_decoder)) {
return Z_ERR;
}
if (!gst_element_link(m_decoder, m_sink)) {
return Z_ERR;
}
if (!g_signal_connect(m_source, "pad-added", G_CALLBACK(OnPadAdded),
m_capsfilter))
{
return Z_ERR;
}
g_object_set(G_OBJECT(m_sink), "emit-signals", TRUE, "sync", FALSE, NULL);
g_object_set(G_OBJECT(m_sink), "max-buffers", (guint)1, NULL);
g_object_set(G_OBJECT(m_sink), "drop", (guint)1, NULL);
g_object_set(G_OBJECT(m_sink), "sync", (guint)0, NULL);
g_object_set(G_OBJECT(m_sink), "max_lateness", G_GINT64_CONSTANT(-1), NULL);
g_object_set(G_OBJECT(m_sink), "qos", (guint)1, NULL);
/*GstCaps *caps = gst_caps_from_string("video/x-raw,encoding-name=RGB,format=(fourcc)YUV444,width=1280,height=720");
g_object_set(m_videoconvert, "caps", caps, NULL);
gst_caps_unref(caps);*/
if (g_signal_connect(m_sink, "new-sample", G_CALLBACK(CaptureGstBuffer), this) <= 0)
{
return Z_ERR;
}
gst_element_set_state(m_pipeline, GST_STATE_PLAYING);
ControlThreadStart(); //Place for g_main_loop_run
m_isStarted = true;
return Z_OK;
}
long RTSPClientDevice::StopClient()
{
if(!m_isStarted)
{
return Z_OK;
}
if (g_main_loop_is_running(m_loop))
{
g_main_loop_quit(m_loop);
g_main_context_wakeup(g_main_loop_get_context(m_loop));
}
gst_element_set_state(m_pipeline, GST_STATE_NULL);
gst_object_unref(GST_OBJECT(m_pipeline));
//TODO: unref plugins
g_main_loop_unref(m_loop);
m_isStarted = false;
return Z_OK;
}
已编辑
:我尝试使用以下内容:
gst-launch-1.0.exe -v rtspsrc位置= rtsp://127.0.0.1:554 / Streaming / Channels / 101! rtpjpegdepay! jpegparse! multifilesink post-messages = true location =“ frame%d.jpg”
结果相同-颜色错误。
命令:
gst-launch-1.0.exe -v rtspsrc位置= rtsp://127.0.0.1:554 / Streaming / Channels / 101! rtpjpegdepay! multifilesink post-messages = true location =“ frame%d.jpg”
也产生相同的帧
我还捕获了来自gst-launch-1.0的日志(看不到错误-仅INFO和DEBUG)和Wireshark日志(也看不到任何问题)。我将尝试对其进行更深入的分析。该AVI是使用海康威视摄像机的VLC捕获的。
答案 0 :(得分:1)
我已尝试将VLC MJPEG服务器用于调试目的。是问题所在。基于gstreamer rtpjpegdepay插件的客户端在直接连接到Hikvision MJPEG流时可以正常工作。因此大概是VLC错误或任何方面都违反标准。