我想用Qt播放YUV视频序列。现在我使用QPixmap,在QPixmap上逐像素地使用DrawPixel。但是,它无法实时播放视频。我怎样才能提高速度?
答案 0 :(得分:1)
答案 1 :(得分:0)
逐像素是创建图片的最慢方法。如果您之前处理过图像数据并使用了QPixmap的loadFromData()方法,它会提高很多的性能。
答案 2 :(得分:0)
好吧,DrawPixel绝对是性能最差的解决方案。
QOpenGLWiget (今天的Qt 5)可用于将视频帧渲染为纹理。 实际上,取决于视频像素格式,它可以是简单的纹理渲染,也可以是通过着色器并进一步绘制纹理的像素格式转换。
这个问题很老,所以我留下一个粗略的解决方案,只是因为我自己花了一些时间才能解决这个问题。因此,最简单的方法(不是最好的方法,因为可以进行很多优化)是:
OpenGLDisplayRGB.h
#pragma once
#include <QOpenGLWidget>
#include <QOpenGLFunctions>
#include <QScopedPointer>
#include <QException>
/*!
* \brief The OpenGLDisplay class
* Simple OpenGL display, that renders RGBA to texture
*/
class OpenGLDisplayRGB : public QOpenGLWidget, public QOpenGLFunctions
{
Q_OBJECT
public:
explicit OpenGLDisplayRGB(QWidget* parent = nullptr);
~OpenGLDisplayRGB() override;
protected:
void initializeGL() override;
void resizeGL(int w, int h) override;
void paintGL() override;
void closeEvent(QCloseEvent* e) override;
public:
void DisplayVideoFrame(unsigned char* data, int frameWidth, int frameHeight);
Q_SIGNAL void closed();
private:
struct OpenGLDisplayRGBImpl;
QScopedPointer<OpenGLDisplayRGBImpl> impl;
};
OpenGLDisplayRGB.cpp
#include "OpenGLDisplayRGB.h"
#include <QOpenGLShader>
#include <QOpenGLTexture>
#include <QCoreApplication>
#include <QResizeEvent>
#include <QTimer>
#include <QDebug>
#define ATTRIB_VERTEX 0
#define ATTRIB_TEXTURE 1
namespace
{
//Vertex matrix
static const GLfloat vertexVertices[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
//Texture matrix
static const GLfloat textureVertices[] = {
0.0f, 1.0f,
1.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
};
}
struct OpenGLDisplayRGB::OpenGLDisplayRGBImpl
{
OpenGLDisplayRGBImpl(QObject* ownerPtr)
: mBufRGB(nullptr)
//, mRepaintTimer(new QTimer(ownerPtr))
, mEnabled(true)
, mShaderProgram(new QOpenGLShaderProgram(ownerPtr))
, mTexture(new QOpenGLTexture(QOpenGLTexture::Target2D))
{ }
unsigned char* mBufRGB;
//QTimer* mRepaintTimer;
bool mEnabled;
QOpenGLShader* mVShader;
QOpenGLShader* mFShader;
QOpenGLShaderProgram* mShaderProgram;
QScopedPointer<QOpenGLTexture> mTexture;
int mTextureUniform;
GLsizei mVideoW, mVideoH;
};
/*************************************************************************/
OpenGLDisplayRGB::OpenGLDisplayRGB(QWidget* parent)
: QOpenGLWidget(parent)
, impl(new OpenGLDisplayRGBImpl(this))
{
setAttribute(Qt::WA_OpaquePaintEvent);
// setAttribute(Qt::WA_PaintOnScreen);
setAttribute(Qt::WA_NoSystemBackground);
/*
impl->mRepaintTimer->setInterval(50);
connect(impl->mRepaintTimer, SIGNAL(timeout()), this, SLOT(update()));
impl->mRepaintTimer->start();*/
}
OpenGLDisplayRGB::~OpenGLDisplayRGB()
{
makeCurrent();
}
void OpenGLDisplayRGB::DisplayVideoFrame(unsigned char *data, int frameWidth, int frameHeight)
{
impl->mVideoW = frameWidth;
impl->mVideoH = frameHeight;
impl->mBufRGB = data;
update();
}
void OpenGLDisplayRGB::initializeGL()
{
initializeOpenGLFunctions();
glEnable(GL_DEPTH_TEST);
/* Modern opengl rendering pipeline relies on shaders to handle incoming data.
* Shader: is a small function written in OpenGL Shading Language (GLSL).
* GLSL is the language that makes up all OpenGL shaders.
* The syntax of the specific GLSL language requires the reader to find relevant information. */
impl->mEnabled = impl->mShaderProgram->addShaderFromSourceFile(QOpenGLShader::Vertex, ":/OpenGL/simple_vertex_shader.v.glsl");
if(!impl->mEnabled)
qDebug() << QString("[Error] Vertex shader failed: %1").arg(impl->mShaderProgram->log());
impl->mShaderProgram->addShaderFromSourceFile(QOpenGLShader::Fragment, ":/OpenGL/simple_texture_shader.f.glsl");
if(!impl->mEnabled)
qDebug() << QString("[Error] Fragment shader failed: %1").arg(impl->mShaderProgram->log());
// Bind the property vertexIn to the specified location ATTRIB_VERTEX, this property
// has a declaration in the vertex shader source
impl->mShaderProgram->bindAttributeLocation("vertexIn", ATTRIB_VERTEX);
// Bind the attribute textureIn to the specified location ATTRIB_TEXTURE, the attribute
// has a declaration in the vertex shader source
impl->mShaderProgram->bindAttributeLocation("textureIn", ATTRIB_TEXTURE);
//Link all the shader programs added to
impl->mShaderProgram->link();
//activate all links
impl->mShaderProgram->bind();
// Read the position of the data variable tex_rgb in the shader, the declaration
// of these variables can be seen in
// fragment shader source
impl->mTextureUniform = impl->mShaderProgram->uniformLocation("uSampler");
// Set the value of the vertex matrix of the attribute ATTRIB_VERTEX and format
glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, 0, 0, vertexVertices);
// Set the texture matrix value and format of the attribute ATTRIB_TEXTURE
glVertexAttribPointer(ATTRIB_TEXTURE, 2, GL_FLOAT, 0, 0, textureVertices);
// Enable the ATTRIB_VERTEX attribute data, the default is off
glEnableVertexAttribArray(ATTRIB_VERTEX);
// Enable the ATTRIB_TEXTURE attribute data, the default is off
glEnableVertexAttribArray(ATTRIB_TEXTURE);
impl->mTexture->create();
impl->mTexture->setMinMagFilters(QOpenGLTexture::Linear, QOpenGLTexture::Linear);
impl->mTexture->setWrapMode(QOpenGLTexture::ClampToEdge);
glClearColor (1.0f, 0.0f, 1.0f, 1.0f); // set the background color
}
void OpenGLDisplayRGB::resizeGL(int w, int h)
{
if(h == 0)// prevents being divided by zero
h = 1;// set the height to 1
// Set the viewport
glViewport(0, 0, w, h);
}
void OpenGLDisplayRGB::paintGL()
{
if (!impl->mEnabled || !impl->mBufRGB)
return; //RET
// Load y data texture
// Activate the texture unit GL_TEXTURE0
glActiveTexture(GL_TEXTURE0);
// Use the texture generated from y to generate texture
glBindTexture(GL_TEXTURE_2D, impl->mTexture->textureId());
// Use the memory mBufYuv data to create a real y data texture
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, impl->mVideoW, impl->mVideoH, 0, GL_RGBA, GL_UNSIGNED_BYTE, impl->mBufRGB);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
// Specify y texture to use the new value can only use 0, 1, 2, etc. to represent
// the index of the texture unit, this is the place where opengl is not humanized
//0 corresponds to the texture unit GL_TEXTURE0 1 corresponds to the
// texture unit GL_TEXTURE1 2 corresponds to the texture unit GL_TEXTURE2
glUniform1i(impl->mTextureUniform, 0);
// Use the vertex array way to draw graphics
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
}
void OpenGLDisplayRGB::closeEvent(QCloseEvent *e)
{
emit closed();
e->accept();
}
simple_texture_shader.f.glsl
varying vec2 vTextureCoord;
uniform sampler2D uSampler;
void main(void)
{
gl_FragColor = texture2D(uSampler, vTextureCoord);
}
simple_vertex_shader.v.glsl
attribute vec4 vertexIn;
attribute vec2 textureIn;
varying vec2 vTextureCoord;
void main(void)
{
gl_Position = vertexIn;
vTextureCoord = textureIn;
}