我正在开发一个科学的Android应用程序,它使用OpenCV库来检测眼睛瞳孔运动。 我正在研究下面的代码,它可以在App Activity上正常工作。但目的是 应用程序是在后台运行图像处理,以便能够在前台运行VR场景。所以场景是,前景中的VR场景在背景中显示某些东西和Pupil运动检测器应用程序。
问题是:我正在尝试在Android服务中使用此代码中的相同功能。但似乎没有办法做到这一点。
我已经做了一些搜索好几天了,我得出的结论是,我既不能让CameraBridgeViewBase在后台运行,也不能从相机中提取帧。
这是活动代码:
public class MainActivity extends Activity implements CameraBridgeViewBase.CvCameraViewListener2 {
private static final String TAG = "pupilDetector";
public static final int JAVA_DETECTOR = 0;
public static Mat mRgba;
public static Mat mGray;
public static CascadeClassifier mJavaDetectorLeftEye;
public static CameraBridgeViewBase mOpenCvCameraView;
public static double xCenter = -1;
public static double yCenter = -1;
public static Mat mIntermediateMat;
public static Mat hierarchy;
public static boolean captureFrame;
public static int count = 0;
public static Mat mZoomWindow;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS: {
Log.i(TAG, "OpenCV loaded successfully");
// load cascade file from application resources
File cascadeDir = getDir("cascade", Context.MODE_PRIVATE);
mJavaDetectorLeftEye = loadClassifier(R.raw.haarcascade_lefteye_2splits, "haarcascade_eye_left.xml",
cascadeDir);
cascadeDir.delete();
//Use front Camera
mOpenCvCameraView.setCameraIndex(1);
mOpenCvCameraView.enableFpsMeter();
mOpenCvCameraView.enableView();
mOpenCvCameraView.setMaxFrameSize(640, 360); //best resolution for real time processing on Lenovo K6
}
break;
default: {
super.onManagerConnected(status);
}
break;
}
}
};
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.activity_main);
mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.eyes_activity_surface_view);
mOpenCvCameraView.setCvCameraViewListener(this);
}
@Override
public void onPause() {
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
@Override
public void onResume() {
super.onResume();
if (!OpenCVLoader.initDebug()) {
Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_11, this, mLoaderCallback);
} else {
Log.d(TAG, "OpenCV library found inside package. Using it!");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
}
@Override
protected void onStop() {
super.onStop();
}
public void onDestroy() {
super.onDestroy();
mOpenCvCameraView.disableView();
}
public void onCameraViewStarted(int width, int height) {
mRgba = new Mat(height, width, CvType.CV_8UC4);
mIntermediateMat = new Mat(height, width, CvType.CV_8UC4);
mGray = new Mat(height, width, CvType.CV_8UC1);
hierarchy = new Mat();
}
public void onCameraViewStopped() {
mGray.release();
mRgba.release();
mIntermediateMat.release();
hierarchy.release();
mZoomWindow.release();
}
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
mRgba = inputFrame.rgba();
mGray = inputFrame.gray();
if (mZoomWindow == null)
createAuxiliaryMats();
Rect area = new Rect(new Point(20, 20), new Point(mGray.width() - 20, mGray.height() - 20));
detectEye(mJavaDetectorLeftEye, area, 100);
if (captureFrame) {
saveImage();
captureFrame = false;
}
return mRgba;
}
private void createAuxiliaryMats() {
if (mGray.empty())
return;
int rows = mGray.rows();
int cols = mGray.cols();
if (mZoomWindow == null) {
mZoomWindow = mRgba.submat(rows / 2 + rows / 10, rows, cols / 2 + cols / 10, cols);
}
}
private Mat detectEye(CascadeClassifier clasificator, Rect area, int size) {
Mat template = new Mat();
Mat mROI = mGray.submat(area);
MatOfRect eyes = new MatOfRect();
Point iris = new Point();
//isolate the eyes first
clasificator.detectMultiScale(mROI, eyes, 1.15, 2, Objdetect.CASCADE_FIND_BIGGEST_OBJECT
| Objdetect.CASCADE_SCALE_IMAGE, new Size(30, 30), new Size());
Rect[] eyesArray = eyes.toArray();
for (int i = 0; i < eyesArray.length; ) {
Rect e = eyesArray[i];
e.x = area.x + e.x;
e.y = area.y + e.y;
Rect eye_only_rectangle = new Rect((int) e.tl().x, (int) (e.tl().y + e.height * 0.4), (int) e.width,
(int) (e.height * 0.6));
Core.MinMaxLocResult mmG = Core.minMaxLoc(mROI);
iris.x = mmG.minLoc.x + eye_only_rectangle.x;
iris.y = mmG.minLoc.y + eye_only_rectangle.y;
Core.rectangle(mRgba, eye_only_rectangle.tl(), eye_only_rectangle.br(), new Scalar(255, 255, 0, 255), 2);
//find the pupil inside the eye rect
detectPupil(eye_only_rectangle);
return template;
}
return template;
}
protected void detectPupil(Rect eyeRect) {
hierarchy = new Mat();
Mat img = mRgba.submat(eyeRect);
Mat img_hue = new Mat();
Mat circles = new Mat();
// Convert it to hue, convert to range color, and blur to remove false
// circles
Imgproc.cvtColor(img, img_hue, Imgproc.COLOR_RGB2HSV);
Core.inRange(img_hue, new Scalar(0, 0, 0), new Scalar(255, 255, 32), img_hue);
Imgproc.erode(img_hue, img_hue, Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(3, 3)));
Imgproc.dilate(img_hue, img_hue, Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(6, 6)));
Imgproc.Canny(img_hue, img_hue, 170, 220);
Imgproc.GaussianBlur(img_hue, img_hue, new Size(9, 9), 2, 2);
// Apply Hough Transform to find the circles
Imgproc.HoughCircles(img_hue, circles, Imgproc.CV_HOUGH_GRADIENT, 3, img_hue.rows(), 200, 75, 10, 25);
if (circles.cols() > 0)
for (int x = 0; x < circles.cols(); x++) {
double vCircle[] = circles.get(0, x);
if (vCircle == null)
break;
Point pt = new Point(Math.round(vCircle[0]), Math.round(vCircle[1]));
int radius = (int) Math.round(vCircle[2]);
// draw the found circle
Core.circle(img, pt, radius, new Scalar(0, 255, 0), 2);
Core.circle(img, pt, 3, new Scalar(0, 0, 255), 2);
Log.i("Coo", String.valueOf(pt.x) + ", " + String.valueOf(pt.y));
}
}
private CascadeClassifier loadClassifier(int rawResId, String filename, File cascadeDir) {
CascadeClassifier classifier = null;
try {
InputStream is = getResources().openRawResource(rawResId);
File cascadeFile = new File(cascadeDir, filename);
FileOutputStream os = new FileOutputStream(cascadeFile);
byte[] buffer = new byte[4096];
int bytesRead;
while ((bytesRead = is.read(buffer)) != -1) {
os.write(buffer, 0, bytesRead);
}
is.close();
os.close();
classifier = new CascadeClassifier(cascadeFile.getAbsolutePath());
if (classifier.empty()) {
Log.e(TAG, "Failed to load cascade classifier");
classifier = null;
} else
Log.i(TAG, "Loaded cascade classifier from " + cascadeFile.getAbsolutePath());
} catch (IOException e) {
e.printStackTrace();
Log.e(TAG, "Failed to load cascade. Exception thrown: " + e);
}
return classifier;
}
public void onRecreateClick(View v) {
captureFrame = true;
}
public void saveImage() {
Mat mIntermediateMat = new Mat();
Imgproc.cvtColor(mRgba, mIntermediateMat, Imgproc.COLOR_RGBA2BGR, 3);
File path = new File(Environment.getExternalStorageDirectory() + "/OpenCV/");
path.mkdirs();
File file = new File(path, "image" + count + ".png");
count++;
String filename = file.toString();
Boolean bool = Highgui.imwrite(filename, mIntermediateMat);
if (bool)
Log.i(TAG, "SUCCESS writing image to external storage");
else
Log.i(TAG, "Fail writing image to external storage");
}}