我正在Android stduio中使用OpenCV
此代码(DetectActivity.java)在我的其他应用程序中成功运行
但是我将此DetectActivity.java和相应的xml复制到另一个应用程序中,它将崩溃
Logcat显示
CvException [org.opencv.core.CvException: cv::Exception: /build/master_pack-android/opencv/modules/core/src/matrix.cpp:489: error: (-215) 0 <= _rowRange.start && _rowRange.start <= _rowRange.end && _rowRange.end <= m.rows in function cv::Mat::Mat(const cv::Mat&, const cv::Range&, const cv::Range&) ] at org.opencv.core.Mat.n_Mat(Native Method) at org.opencv.core.Mat.<init>(Mat.java:111) at com.example.achic.merge.DetectActivity.onCameraFrame(DetectActivity.java:204) at org.opencv.android.CameraBridgeViewBase.deliverAndDrawFrame(CameraBridgeViewBase.java:392) at org.opencv.android.JavaCameraView$CameraWorker.run(JavaCameraView.java:373) at java.lang.Thread.run(Thread.java:764)
我在DetectActivity中的第201-204行是
Rect rect = new Rect(
new Point(mGray.width() / 2 - 300, mGray.height() / 2 - 300),
new Size(600, 600));
mGray = new Mat(mGray, rect);//CRASH HERE
我尝试通过添加来解决此错误
android:theme="@style/Theme.AppCompat.Light.NoActionBar"
android:screenOrientation="landscape"
在AndroidManifest.xml中的DetectActivity活动标签上
并刷新我的xml,但它仍然崩溃。
此外,我尝试将值-300更改为-100或其他数字,但仍然如此。
这是我的xml(test.xml)
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent">
<org.opencv.android.JavaCameraView
android:id="@+id/fd_activity_surface_view"
android:layout_width="match_parent"
android:layout_height="match_parent" />
<ImageView
android:layout_width="match_parent"
android:layout_height="match_parent"
/>
</RelativeLayout>
这是我的全部代码
package com.example.achic.merge;
import android.content.Context;
import android.content.Intent;
import android.content.pm.ActivityInfo;
import android.graphics.Bitmap;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.WindowManager;
import android.widget.Toast;
import com.example.achic.merge.R;
import com.example.achic.merge.DatabaseHelper;
import com.example.achic.merge.UserInfo;
import com.example.achic.merge.FaceMatcher;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2;
import org.opencv.android.Utils;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.MatOfRect;
import org.opencv.core.Point;
import org.opencv.core.Rect;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import org.opencv.objdetect.CascadeClassifier;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.List;
public class DetectActivity extends AppCompatActivity implements
CvCameraViewListener2 {
private static final Scalar FACE_RECT_COLOR = new Scalar(0, 255, 0, 255);
private UserInfo user;
public final static int FLAG_REGISTER = 1;
public final static int FLAG_VERIFY = 2;
public static final int JAVA_DETECTOR = 0;
public static final int NATIVE_DETECTOR = 1;
private Mat mRgba;
private DetectionBasedTracker mNativeDetector;
private Mat mGray;
private Bitmap shibiebitmap;
private CascadeClassifier mJavaDetector;
private float mRelativeFaceSize = 0.2f;
private int mAbsoluteFaceSize = 0;
private CameraBridgeViewBase mOpenCvCameraView;
List<UserInfo> userList;
private int mDetectorType = JAVA_DETECTOR;
private Bitmap mDetectedFace;
private FaceMatcher matcher;
private Handler mHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
Intent intent;
switch (msg.what) {
case FLAG_REGISTER:
if (mDetectedFace == null) {
mDetectedFace = (Bitmap) msg.obj;
int result = matcher.histogramMatch(mDetectedFace);
if (result == matcher.UNFINISHED) {
mDetectedFace = null;
} else if (result == matcher.NO_MATCHER) {
user.setName("01");
DatabaseHelper helper = new DatabaseHelper(DetectActivity.this);
Bitmap bitmap = getIntent().getParcelableExtra("Face");
String path = helper.saveBitmapToLocal(bitmap);
user.setPath(path);
helper.insert(user);
helper.close();
Toast.makeText(DetectActivity.this, "Register Successfully", Toast.LENGTH_SHORT).show();
finish();
}
} else {
intent = new Intent();
setResult(RESULT_OK, intent);
finish();
}
break;
case FLAG_VERIFY:
if (mDetectedFace == null) {
mDetectedFace = (Bitmap) msg.obj;
int result = matcher.histogramMatch(mDetectedFace);
if (result == matcher.UNFINISHED) {
mDetectedFace = null;
} else if (result == matcher.NO_MATCHER) {
intent = new Intent();
setResult(RESULT_CANCELED, intent);
finish();
} else {
intent = new Intent();
intent.putExtra("USER_ID", result);
setResult(RESULT_OK, intent);
finish();
}
}
break;
default:
break;
}
}
};
static {
System.loadLibrary("opencv_java3");
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
configWindowSetting();
setContentView(R.layout.test);
mOpenCvCameraView = (CameraBridgeViewBase)
findViewById(R.id.fd_activity_surface_view);
mOpenCvCameraView.setVisibility(CameraBridgeViewBase.VISIBLE);
mOpenCvCameraView.setCameraIndex(CameraBridgeViewBase.CAMERA_ID_FRONT);
mOpenCvCameraView.setCvCameraViewListener(this);
init();
DatabaseHelper helper = new DatabaseHelper(DetectActivity.this);
userList = helper.query();
matcher = new FaceMatcher(userList);
helper.close();
}
private void configWindowSetting() {
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
}
private void init() {
try {
InputStream is = getResources()
.openRawResource(R.raw.lbpcascade_frontalface);
File cascadeDir = getDir("cascade", Context.MODE_PRIVATE);
File cascadeFile = new File(cascadeDir, "lbpcascade_frontalface.xml");
FileOutputStream os = new FileOutputStream(cascadeFile);
byte[] buffer = new byte[4096];
int bytesRead;
while ((bytesRead = is.read(buffer)) != -1) {
os.write(buffer, 0, bytesRead);
}
is.close();
os.close();
mJavaDetector = new CascadeClassifier(cascadeFile.getAbsolutePath());
if (mJavaDetector.empty()) {
mJavaDetector = null;
}
cascadeDir.delete();
} catch (IOException e) {
e.printStackTrace();
}
mOpenCvCameraView.enableView();
}
@Override
public void onPause() {
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
public void onDestroy() {
super.onDestroy();
mOpenCvCameraView.disableView();
}
public void onCameraViewStarted(int width, int height) {
mGray = new Mat();
mRgba = new Mat();
}
public void onCameraViewStopped() {
mGray.release();
mRgba.release();
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
mRgba = inputFrame.rgba();
mGray = inputFrame.gray();
Core.flip(mRgba, mRgba, 1);
Core.flip(mGray, mGray, 1);
Rect rect = new Rect(
new Point(mGray.width() / 2 - 300, mGray.height() / 2 - 300),
new Size(600, 600));
mGray = new Mat(mGray, rect); //**CRASH HERE**
if (mAbsoluteFaceSize == 0) {
int height = mGray.rows();
if (Math.round(height * mRelativeFaceSize) > 0) {
mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
}
}
MatOfRect faces = new MatOfRect();
if (mJavaDetector != null) {
mJavaDetector.detectMultiScale(mGray, faces, 1.1, 2, 2,
new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());
}
Rect[] facesArray = faces.toArray();
for (int i = 0; i < facesArray.length; i++) {
Point point = new Point(facesArray[i].x + 680, facesArray[i].y + 250);
facesArray[i] = new Rect(point, facesArray[i].size());
if (facesArray[i].height > 400 && facesArray[i].height < 500) {
Imgproc.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(),
FACE_RECT_COLOR, 3);
Mat faceMat = new Mat(mRgba, facesArray[i]);
Imgproc.resize(faceMat, faceMat, new Size(320, 320));
Bitmap bitmap = Bitmap.createBitmap(faceMat.width(),faceMat.height(), Bitmap.Config.ARGB_8888);
Utils.matToBitmap(faceMat, bitmap);
Message message = Message.obtain();
message.what = getIntent().getIntExtra("flag", 0);
message.obj = bitmap;
mHandler.sendMessage(message);
}
}
return mRgba;
}
}
如何解决?我尝试谷歌它,但仍然找不到答案。
谢谢。
更新
当输入DetectActivity.java时,我的模拟器将像这样旋转90度
我不确定是否是导致错误的问题,但是我已经添加了
android:screenOrientation="landscape"
AndroidManifest.xml中此行的DetectActivity标签
,并且在OnCameraFrame()中也具有filp()函数
更新
我更改了201-204行中的值
Rect rect = new Rect(
new Point(mGray.width() / 2 -50, mGray.height() / 2 -50),
new Size(100, 100));
mGray = new Mat(mGray, rect);
这不会崩溃。但是它无法检测到我的脸。