我有一个使用gstreamer的小型qt / c ++程序。它可以在debian 64bit上正常工作,而在运行raspbian的Raspberry Pi 3上则无法工作。
程序将在/ dev / video0显示USB网络摄像头,并将其流式传输到指定的IP地址。 (为了进行测试,我使用环回地址,并且在同一台计算机上有一个RTP接收器程序来显示流)
我在两台计算机上都安装了qt creator,并且程序在目标计算机上构建。 (无交叉编译)
在树莓派上运行时,“ GstStateChangeReturn”返回为“ GST_STATE_CHANGE_FAILURE”
这是完整的代码:
gstreamer_stream_and_display_test.pro
#ifndef MAINWINDOW_H
#define MAINWINDOW_H
#include <QMainWindow>
#include <glib.h>
#include <gst/gst.h>
#include <gst/video/videooverlay.h>
namespace Ui {
class MainWindow;
}
class MainWindow : public QMainWindow
{
Q_OBJECT
public:
explicit MainWindow(QWidget *parent = nullptr);
~MainWindow();
private:
Ui::MainWindow *ui;
GstElement *pipeline0;
GstStateChangeReturn sret0;
};
#endif // MAINWINDOW_H
mainwindow.h
#include "mainwindow.h"
#include <QApplication>
#include <glib.h>
#include <gst/gst.h>
#include <gst/video/videooverlay.h>
int main(int argc, char *argv[])
{
QApplication a(argc, argv);
gst_init (&argc, &argv);
MainWindow w;
w.show();
return a.exec();
}
main.cpp
#include "mainwindow.h"
#include "ui_mainwindow.h"
#include <QApplication>
#include <QWidget>
#include <QDebug>
#include <glib.h>
#include <gst/gst.h>
#include <gst/video/videooverlay.h>
MainWindow::MainWindow(QWidget *parent) :
QMainWindow(parent),
ui(new Ui::MainWindow)
{
ui->setupUi(this);
setFixedSize(1280,720);
setWindowTitle("Stream and Display Test");
WId xwinid = winId();
pipeline0 = gst_pipeline_new("myPipeline");
GstElement *v4l2src = gst_element_factory_make("v4l2src", "v4l2src");
GstElement *capsfilter = gst_element_factory_make("capsfilter", "capsfilter");
GstElement *tee = gst_element_factory_make("tee", "tee");
GstElement *encoder = gst_element_factory_make("jpegenc", "encoder");
GstElement *videoConverter = gst_element_factory_make("videoconvert", "videoConverter");
GstElement *rtp = gst_element_factory_make("rtpjpegpay", "rtp");
GstElement *queueDisplay = gst_element_factory_make("queue", "queueDisplay");
GstElement *queueStreamer = gst_element_factory_make("queue", "queueStreamer");
GstElement *udpsink = gst_element_factory_make("udpsink", "udpsink");
GstElement *videoSink = gst_element_factory_make("xvimagesink", "videoSink");
// jpegenc -> rtpjpegpay -> udpsink
//v4l2src -> capsfilter -> tee <
// videoconvert -> queue -> xvimagesink
g_object_set(udpsink, "port", 5200, NULL);
g_object_set(udpsink, "host", "127.0.0.1", NULL);
GstCaps *caps = gst_caps_from_string("video/x-raw, width=(int)1280, height=(int)720");
g_object_set(capsfilter, "caps", caps, NULL);
//add elements to bin
gst_bin_add_many(GST_BIN(pipeline0), v4l2src, capsfilter, tee, encoder, queueDisplay, queueStreamer, videoConverter, rtp, udpsink, videoSink, NULL);
//Link source elements
if(!gst_element_link_many(v4l2src, capsfilter, tee, NULL))
{
qDebug()<<"Unable to link source elements";
}
//Link streaming elements
if(!gst_element_link_many(queueStreamer, encoder, rtp, udpsink, NULL))
{
qDebug()<<"Unable to link streaming elements";
}
//Link display elements
if(!gst_element_link_many(queueDisplay, videoConverter, videoSink, NULL))
{
qDebug()<<"Unable to link display elements";
}
//Create Tee Pads
GstPad *teeStreamerPad = gst_element_get_request_pad(tee, "src_%u");
GstPad *queueStreamerPad = gst_element_get_static_pad(queueStreamer, "sink");
GstPad *teeDisplayPad = gst_element_get_request_pad(tee, "src_%u");
GstPad *queueDisplayPad= gst_element_get_static_pad(queueDisplay, "sink");
//Link Tee and Queue Pads
if(gst_pad_link(teeStreamerPad, queueStreamerPad) != GST_PAD_LINK_OK)
{
qDebug()<<"Unable to link streamer pads";
}
if(gst_pad_link(teeDisplayPad, queueDisplayPad) != GST_PAD_LINK_OK)
{
qDebug()<<"Unable to link display pads";
}
gst_object_unref(queueStreamerPad);
gst_object_unref(queueDisplayPad);
//Setup Gui Display Area
gint viewX = 0;
gint viewY = 0;
gint camResX = 1280;
gint camResY = 720;
gst_video_overlay_set_window_handle(GST_VIDEO_OVERLAY (videoSink), xwinid);
gst_video_overlay_set_render_rectangle(GST_VIDEO_OVERLAY (videoSink), viewX, viewY, camResX, camResY);
//start pipeline
sret0 = gst_element_set_state(pipeline0, GST_STATE_PLAYING);
if(sret0 == GST_STATE_CHANGE_FAILURE)
{
qDebug()<<"Error: GST_STATE_CHANGE_FAILURE";
close();
}
else
{
qDebug()<<sret0;
}
}
MainWindow::~MainWindow()
{
gst_element_set_state (pipeline0, GST_STATE_NULL);
gst_object_unref(pipeline0);
delete ui;
qDebug()<<"Closed correctly";
}
mainwindow.cpp
compile 'com.squareup.retrofit2:retrofit:2.3.0'
compile 'com.squareup.retrofit2:converter-gson:2.3.0'
compile 'com.squareup.retrofit2:converter-scalars:2.3.0'
答案 0 :(得分:0)
我认为您直接使用gstreamer使整个过程复杂化了。
使用多媒体模块中的Qt QCamera类应该更容易:它允许您播放网络摄像头流(还:您可以记录,捕获帧,获取网络摄像头信息等)。
您甚至可以编写自己的QAbstractVideoSurface来在线播放视频。
This example无疑是一个良好的开端。