我正在尝试使用QCamera从相机获取图像。
我以为我应该从# Read data:
library(readxl)
df <- read_excel("MDO.xlsx")
# Convert to data.frame
df <- as.data.frame(df)
# Remove rows when all is na
df <- df[!apply(is.na(df[, -1]), 1, all),]
# Scale the columns
df[, -1] <- apply(df[, -1], 2, scale)
# Distance and cluster
d <- dist(df, method = "euclidean")
hc1 <- hclust(d, method = "complete" )
plot(hc1)
派生并实现QAbstractVideoSurface
,其中将代表摄像机捕获的当前图像的present()
作为参数发送。
当我需要进行一些处理时,我尝试QVideoFrame
我的框架,使用map()
获取数据,做我想做的一切,然后bits()
。但是我在unmap()
这是我得到的错误:
map()
我在做什么错了?
这是完整的应用程序代码:
W libTest.so: (null):0 ((null)): Unsupported viewfinder pixel format
D SensorManager: registerListener :: 6, LSM6DSL Acceleration Sensor, 200000, 0,
E libEGL : call to OpenGL ES API with no current context (logged once per thread)
E GLConsumer: [SurfaceTexture-0-546-0] attachToContext: invalid current EGLDisplay
F libc : Fatal signal 11 (SIGSEGV), code 1, fault addr 0x4 in tid 663 (qtMainLoopThrea)
编辑:所以,我认为这可能是因为我的QVideoFrame是作为OpenGL纹理存储的,并且我的///////////////////////////////////////////////
//main.cpp
///////////////////////////////////////////////
#include <QGuiApplication>
#include <QQmlApplicationEngine>
#include <QQmlContext>
#include "camera_engine.h"
int main(int argc, char *argv[])
{
QCoreApplication::setAttribute(Qt::AA_EnableHighDpiScaling);
QGuiApplication app(argc, argv);
QQmlApplicationEngine engine;
CameraEngine camEngine;
engine.rootContext()->setContextProperty("cameraEngine", &camEngine);
engine.load(QUrl(QStringLiteral("qrc:/main.qml")));
if (engine.rootObjects().isEmpty())
return -1;
return app.exec();
}
///////////////////////////////////////////////
//camera_engine.h
///////////////////////////////////////////////
#ifndef __CAMERA_ENGINE_H__
#define __CAMERA_ENGINE_H__
#include <QCamera>
#include "image_reader.h"
class CameraEngine : public QObject {
Q_OBJECT
public:
explicit CameraEngine(QCamera::Position pos = QCamera::BackFace);
~CameraEngine();
public slots:
void start();
private:
QCamera mCamera;
ImageReader mImageReader;
};
#endif // __CAMERA_ENGINE_H__
///////////////////////////////////////////////
//camera_engine.cpp
///////////////////////////////////////////////
#include "camera_engine.h"
CameraEngine::CameraEngine(QCamera::Position pos) : mCamera(pos)
{
mCamera.setViewfinder(&mImageReader);
QCameraViewfinderSettings viewFinderSettings;
viewFinderSettings.setResolution(640, 480);
viewFinderSettings.setMinimumFrameRate(30);
viewFinderSettings.setMaximumFrameRate(30);
viewFinderSettings.setPixelFormat(QVideoFrame::Format_RGB24);
mCamera.setViewfinderSettings(viewFinderSettings);
}
CameraEngine::~CameraEngine()
{
if (mCamera.state() == QCamera::ActiveState) {
mCamera.stop();
}
}
void CameraEngine::start()
{
mCamera.start();
}
///////////////////////////////////////////////
//image_reader.h
///////////////////////////////////////////////
#ifndef CAMERA_IMAGE_READER_H
#define CAMERA_IMAGE_READER_H
#include <QAbstractVideoSurface>
class ImageReader : public QAbstractVideoSurface {
Q_OBJECT
public:
ImageReader() = default;
~ImageReader() = default;
virtual bool present(const QVideoFrame& frame);
virtual QList<QVideoFrame::PixelFormat> supportedPixelFormats(QAbstractVideoBuffer::HandleType type) const;
};
#endif // CAMERA_IMAGE_READER_H
///////////////////////////////////////////////
//image_reader.cpp
///////////////////////////////////////////////
#include "image_reader.h"
#include <QDebug>
bool ImageReader::present(const QVideoFrame &frame)
{
QVideoFrame currentFrame = frame;
currentFrame.map(QAbstractVideoBuffer::ReadOnly); //crashes here
// Do something
currentFrame.unmap();
return true;
}
QList<QVideoFrame::PixelFormat> ImageReader::supportedPixelFormats(QAbstractVideoBuffer::HandleType type) const
{
Q_UNUSED(type)
return QList<QVideoFrame::PixelFormat>() << QVideoFrame::Format_RGB24;
}
///////////////////////////////////////////////
//main.qml
///////////////////////////////////////////////
import QtQuick 2.11
import QtQuick.Controls 2.2
ApplicationWindow {
id: window
visible: true
width: 640
height: 480
Component.onCompleted: cameraEngine.start()
}
函数可能不在OpenGL线程上运行,因此找不到OpenGL ES上下文。
有没有办法确保它在正确的线程上运行?
Edit2:我发现了:http://doc.qt.io/qt-5/qtquick-scenegraph-openglunderqml-example.html
也许我可以修改此代码以获取有效的OpenGL ES上下文。不幸的是,我现在没有时间这样做。除非有人有更好的建议,否则我将在星期一尝试,并告诉您结果。
Edit3:所以,显然我的解决方案不是一个好的解决方案,我在present()
上遇到了崩溃(SIGSEGV)
我看到了安东尼奥·迪亚斯(Antonio Dias)的答案,使用带有功能initializeOpenGLFunctions();
的{{1}}并尝试了一下,它似乎起作用了,但是,如果我理解正确,VideoOutput
就会“吸引” grabToImage
在CPU内存中,在此过程中,我打算使用grabToImage
丢失一些元数据。
我也尝试直接使用NDK,但是相机要求的API级别至少为24,即使设置了我发现的所有相关设置,它似乎也没有使用它。
Edit4:我实际上不知道自己做了什么,但是我的程序最终使用了正确的API级别,所以现在我将使用NDK解决方案。
答案 0 :(得分:0)
...除非有人有更好的建议...
在QML中从Camera
对象捕获图像的简单方法是调用grabToImage
并将图像传递给C ++进行处理!
此方法并不强制您处理活动相机的所有帧,只有在准备好或需要时才可以抓取!
请记住,尤其是在android中,VideoOutput
对象必须可见才能抓取任何东西,例如,您不能抓取在Android屏幕关闭的情况下抓取图像。但是,您无需填充屏幕,只需要将屏幕的某些区域留给VideoOutput
并使其可见并位于顶部即可。
另外,请注意减小VideoOutput
的大小并不会减小您可以捕获的最大大小,例如,您可以使用较小的VideoOutput
捕获1280x720的图像。
在调整VideoOutput
的大小时,还必须保留长宽比,以保持图像的长宽比。
代码:
您可以看到完整的来源here!
main.cpp:
#include <QGuiApplication>
#include <QQmlApplicationEngine>
#include "camerahelper.h"
int main(int argc, char *argv[])
{
#if defined(Q_OS_WIN)
QCoreApplication::setAttribute(Qt::AA_EnableHighDpiScaling);
#endif
QGuiApplication app(argc, argv);
QQmlApplicationEngine engine;
engine.load(QUrl(QStringLiteral("qrc:/main.qml")));
if (engine.rootObjects().isEmpty())
return -1;
CameraHelper camera_helper;
camera_helper.start(engine.rootObjects().first());
return app.exec();
}
camerahelper.h:
#ifndef CAMERAHELPER_H
#define CAMERAHELPER_H
#include <QtCore>
#include <QtGui>
#include <QtQuick>
class CameraHelper : public QObject
{
Q_OBJECT
public:
explicit CameraHelper(QObject *parent = nullptr);
signals:
public slots:
void start(QObject *qml_obj);
void grab();
private slots:
void frameReady(const QVariant &frame_variant);
private:
QObject *m_qml_obj;
};
#endif // CAMERAHELPER_H
camerahelper.cpp:
#include "camerahelper.h"
CameraHelper::CameraHelper(QObject *parent) : QObject(parent)
{
}
void CameraHelper::start(QObject *qml_obj)
{
m_qml_obj = qml_obj;
//Connect the QML frameReady SIGNAL to our frameReady SLOT
connect(m_qml_obj, SIGNAL(frameReady(QVariant)), this, SLOT(frameReady(QVariant)));
//Do the first grab
grab();
}
void CameraHelper::grab()
{
//Size of the captured image
QSize size = QSize(320, 240);
//Pass grab size to QML and wait for captured image on the frameReady SIGNAL
QMetaObject::invokeMethod(m_qml_obj, "grab", Q_ARG(QVariant, size.width()), Q_ARG(QVariant, size.height()));
}
void CameraHelper::frameReady(const QVariant &frame_variant)
{
QQuickItemGrabResult *grab_result = qvariant_cast<QQuickItemGrabResult*>(frame_variant); //Cast from QVariant
QImage frame = grab_result->image(); //Get the QImage
grab_result->deleteLater(); //Release QQuickItemGrabResult
//Depending on OS the image can have different formats,
//use convertToFormat to unify all possibles formats to one
frame = frame.convertToFormat(QImage::Format_RGB32);
//Frame is ready to use
grab(); //Do the next frame grab
}
main.qml:
import QtQuick 2.10
import QtQuick.Window 2.10
import QtMultimedia 5.8
Window
{
visible: true
color: "black"
width: 640
height: 480
title: qsTr("Hello World")
signal frameReady(var frame)
Camera
{
id: camera
viewfinder.resolution: "320x240"
}
VideoOutput
{
id: videoOutput
source: camera
autoOrientation: true
visible: true
anchors.fill: parent
}
function grab(grab_width, grab_height)
{
if (!visible)
return
videoOutput.grabToImage(function(frame)
{
frameReady(frame) //Emit frameReady SIGNAL
}, Qt.size(grab_width, grab_height))
}
}
答案 1 :(得分:0)
我终于设法使它起作用。显然QCamera
在Android上存在一些问题,因此我不得不使用QML Camera
,并用VideoOutput
显示它,并应用一个过滤器来获取图像。>
此过滤器分为两部分:一部分来自QAbstractVideoFilter
,另一部分来自QVideoFilterRunnable
。
这是代码:
////////////////////////////////////////////////////////////////////
// main.cpp
////////////////////////////////////////////////////////////////////
#include <QGuiApplication>
#include <QQmlApplicationEngine>
#include "myfilter.hpp"
int main(int argc, char *argv[])
{
QCoreApplication::setAttribute(Qt::AA_EnableHighDpiScaling);
QGuiApplication app(argc, argv);
QQmlApplicationEngine engine;
qmlRegisterType<MyFilter>("example.myfilter", 1, 0, "MyFilter");
engine.load(QUrl(QStringLiteral("qrc:/main.qml")));
if (engine.rootObjects().isEmpty())
return -1;
return app.exec();
}
////////////////////////////////////////////////////////////////////
// myfilter.hpp
////////////////////////////////////////////////////////////////////
#ifndef MYFILTER_H
#define MYFILTER_H
#include <QAbstractVideoFilter>
class MyFilterRunnable : public QVideoFilterRunnable {
public:
QVideoFrame run(QVideoFrame *input, const QVideoSurfaceFormat &surfaceFormat, RunFlags flags);
};
class MyFilter : public QAbstractVideoFilter
{
public:
QVideoFilterRunnable* createFilterRunnable();
};
#endif // MYFILTER_H
////////////////////////////////////////////////////////////////////
// myfilter.cpp
////////////////////////////////////////////////////////////////////
#include "myfilter.hpp"
#include <QOpenGLContext>
#include <QOpenGLFunctions>
QVideoFrame MyFilterRunnable::run(QVideoFrame *input, const QVideoSurfaceFormat &surfaceFormat, QVideoFilterRunnable::RunFlags flags)
{
QImage img(input->width(), input->height(), QImage::Format_RGBA8888);
bool success = false;
if (input->handleType() == QAbstractVideoBuffer::GLTextureHandle) {
GLuint textureId = input->handle().toUInt();
QOpenGLContext *ctx = QOpenGLContext::currentContext();
QOpenGLFunctions *f = ctx->functions();
GLuint fbo;
f->glGenFramebuffers(1, &fbo);
GLuint prevFbo;
f->glGetIntegerv(GL_FRAMEBUFFER_BINDING, (GLint *) &prevFbo);
f->glBindFramebuffer(GL_FRAMEBUFFER, fbo);
f->glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, textureId, 0);
f->glReadPixels(0, 0, input->width(), input->height(), GL_RGBA, GL_UNSIGNED_BYTE, img.bits());
f->glBindFramebuffer(GL_FRAMEBUFFER, prevFbo);
success = true;
} // else handle other types
if( success ) {
// Process image
return QVideoFrame(img);
} else {
return *input; //Could not apply filter, return unmodified input
}
}
QVideoFilterRunnable *MyFilter::createFilterRunnable()
{
return new MyFilterRunnable;
}
////////////////////////////////////////////////////////////////////
// main.qml
////////////////////////////////////////////////////////////////////
import QtQuick 2.11
import QtQuick.Controls 2.2
import QtMultimedia 5.9
import example.myfilter 1.0
ApplicationWindow {
id: window
visible: true
width: 640
height: 480
Camera {
id: camera
}
MyFilter {
id: filter
}
VideoOutput {
source: camera
autoOrientation: true
filters: [ filter ]
anchors.fill: parent
}
}
(我的run
实现从这里改编:http://code.qt.io/cgit/qt/qtmultimedia.git/tree/examples/multimedia/video/qmlvideofilter_opencl/rgbframehelper.h)