我正在运行在Dragonboard 820上运行Android API 23的本机android上,该文件读取/ storage / emulated / 0 /目录中的视频文件。它使用Android的JAVACV库,我手动设置该库以将视频文件读取为Frame对象,然后将其转换为Mat对象。该Mat对象作为参数传递给本机代码,程序在其中获取帧并生成Mat对象的灰度级。本机代码还发送转换是否成功。然后将该灰度Mat对象转换回Frame对象后,使用OpenCVFrameRecorder()进行记录。此外,我正在使用Async在单独的线程中运行JavaCV代码。代码在下面
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Environment;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.widget.TextView;
import org.bytedeco.javacpp.opencv_core;
import org.bytedeco.javacv.FFmpegFrameGrabber;
import org.bytedeco.javacv.Frame;
import org.bytedeco.javacv.FrameGrabber;
import org.bytedeco.javacv.FrameRecorder;
import org.bytedeco.javacv.OpenCVFrameConverter;
import org.bytedeco.javacv.OpenCVFrameRecorder;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
public class MainActivity extends AppCompatActivity {
Mat inImage,outImage;
opencv_core.Mat mat,mat2;
int state;
Frame outFrame = null;
Frame vFrame = null;
FrameGrabber videoGrabber;
FrameRecorder videoRecorder;
// Used to load the 'native-lib' library on application startup.
static {
System.loadLibrary("native-lib");
System.loadLibrary("opencv_java3");
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
TextView tv = (TextView) findViewById(R.id.sample_text);
tv.setText(R.string.msg);
MyTask myTask = new MyTask();
myTask.execute("Hello");
}
private class MyTask extends AsyncTask<String, String, Void> {
String url;
@Override
protected Void doInBackground(String... params) {
OpenCVFrameConverter.ToMat converterToMat = new OpenCVFrameConverter.ToMat();
try
{
videoGrabber.start();
Log.d("Status Check : ","Video Grabber started");
} catch (Exception e){
Log.e("Status Check :", "Failed to start grabber" + e);
}
try {
videoRecorder.start();
} catch (FrameRecorder.Exception e) {
e.printStackTrace();
}
do
try {
vFrame = videoGrabber.grabFrame();
if (vFrame != null) {
mat = converterToMat.convert(vFrame);
//if (mat != null) {
Log.d("Status Check :", "Working...");
inImage = new Mat(mat.address());
state = processStateFromJNI(inImage.getNativeObjAddr(), outImage.getNativeObjAddr());
if (state == 1) {
Log.d("Status Check :", "Starting process...");
mat2 = new opencv_core.Mat() { { address = outImage.getNativeObjAddr(); } };
Log.d("Status Check :", "Got mat2...");
outFrame = converterToMat.convert(mat2);
if(outFrame != null){
Log.d("Status Check :", "Starting to record video...");
videoRecorder.record(outFrame);
Log.d("Status Check :", "\n Process completed!!!");
}
} else if (state == 0) {
Log.d("Status Check :", "\n Process not completed!!!");
// }
}
}
} catch (Exception e) {
Log.e("Status Check :", "video grabFrame failed: " + e);
} while(vFrame != null);
return null;
}
@Override
protected void onPostExecute(Void result) {
super.onPostExecute(result);
TextView tv = (TextView) findViewById(R.id.sample_text);
tv.setText(tv.getText()+"\nonPostExecute!!!!!");
try
{
videoGrabber.stop();
videoRecorder.stop();
}catch (Exception e) {
Log.e("Status Check :", "failed to stop video grabber", e);
}
Log.d("Status Check", "Ended video processing...");
if (state == 1) {
tv.setText(tv.getText() + "\n Process completed!!!");
} else if (state == 0) {
tv.setText(tv.getText() + "\n Process Error!!!");
}
}
@Override
protected void onPreExecute() {
inImage = new Mat(480,640, CvType.CV_8UC4);
outImage = new Mat(480,640, CvType.CV_8UC1);
videoRecorder = new OpenCVFrameRecorder(Environment.getExternalStorageDirectory().getPath()+"/output.avi",640,480);
//videoRecorder.setVideoCodec(CV_FOURCC('M','J','P','G'));
videoRecorder.setFrameRate(30);
videoRecorder.setPixelFormat(1);
videoGrabber = new FFmpegFrameGrabber(Environment.getExternalStorageDirectory().toString()+"/vtest.avi");
videoGrabber.setFormat("avi");
TextView tv = (TextView) findViewById(R.id.sample_text);
tv.setText(tv.getText()+"\nonPreExecute!!!!!");
}
}
/**
* A native method that is implemented by the 'native-lib' native library,
* which is packaged with this application.
*/
public native int processStateFromJNI(long matAddrRgba, long matAddrGray);
}
本机代码是
#include <jni.h>
#include <string>
#include <stdexcept>
#include <opencv2/opencv.hpp>
#include <opencv2/core.hpp>
#include <opencv2/objdetect.hpp>
#include <opencv2/highgui.hpp>
#include <opencv2/imgproc.hpp>
#include <opencv2/imgcodecs.hpp>
#include <opencv2/video.hpp>
#include <opencv2/videoio.hpp>
#include <android/log.h>
#define LOG_TAG "FaceDetection/DetectionBasedTracker"
#define LOGD(...) (__android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__))
using namespace cv;
using namespace std;
int toGray(Mat img, Mat& gray);
extern "C" JNIEXPORT jint
JNICALL
Java_com_example_iq3_computervisionapp_MainActivity_processStateFromJNI(
JNIEnv *env,
jobject /*this*/, jlong addrRgba, jlong addrGray ) {
Mat& mRgb = *(Mat*)addrRgba;
Mat& mGray = *(Mat*)addrGray;
LOGD("Before starting...");
int conv;
jint retVal;
conv = toGray(mRgb,mGray);
retVal = (jint)conv;
return retVal;
}
int toGray(Mat img, Mat& gray){
cvtColor(img,gray,CV_RGBA2GRAY);
if(gray.rows == img.rows && gray.cols == img.cols){
return 1;
}
else{
return 0;
}
}
我面临的问题是程序完全运行,但是未创建视频“ output.avi”。我已经在AndroidManifest.xml中提供了用于外部读写的权限。请告诉我我要去哪里了。欢迎任何链接和建议。