OpenCv Face Tracking Android应用程序

时间:2014-08-13 17:33:39

标签: java android eclipse opencv

我是这个网站的新手,也是这个整个Java / Android / OpenCv的新手!但我真的需要帮助,希望你能帮助我!我正在开发一个Android应用程序来跟踪人脸。我正在使用Haar分类器检测脸部和眼睛,然后使用眼睛上的模板匹配方法来跟踪脸部。我已经编写了代码,但是我在启动相机时遇到了问题!

如果使用CameraBridgeViewBase或JavaCameraView,我只得到一个黑屏,相机捕获帧(说“收到帧预览”)但没有任何反应(没有检测到脸部或眼睛......),它说“无法创建OpenGL上下文“!

当我使用VideoCapture时,它会因错误“致命信号11”而崩溃!

我一直在寻找任何问题的解决方案,但无法解决这个问题!我会发布代码。

提前感谢您的时间!

这是我的主要活动:

package com.example.newfacetracker;

import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2;
import org.opencv.android.JavaCameraView;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.android.Utils;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.MatOfRect;
import org.opencv.core.Point;
import org.opencv.core.Rect;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import org.opencv.objdetect.CascadeClassifier;

import android.app.Activity;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Matrix;
import android.graphics.RectF;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.SurfaceView;
import android.view.WindowManager;
import android.widget.ImageView;

public class MainActivity extends Activity implements CvCameraViewListener2{

        public String TAG = "Main Activity";

        private JavaCameraView mOpenCvCameraView;

        private CascadeClassifier faceCascade;
        private CascadeClassifier leftEyeCascade;
        private CascadeClassifier rightEyeCascade;
        private CascadeClassifier eyesAreaCascade;

        int screen_w, screen_h;

        private Mat gray, frame, lowRes;

        private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
                @Override
                public void onManagerConnected(int status) {
                        switch (status) {
                        case LoaderCallbackInterface.SUCCESS:
                        {
                                Log.i(TAG, "OpenCV loaded successfully");
                                //Intent cameraIntent = new
Intent(android.provider.MediaStore.ACTION_VIDEO_CAPTURE);
                                //startActivityForResult(cameraIntent, 0);
                            mOpenCvCameraView.setCameraIndex(0);
                            mOpenCvCameraView.enableFpsMeter();
                                mOpenCvCameraView.enableView();
                        loadFaceCascade();
                        loadLeftEyeCascade();
                        loadRightEyeCascade();
                        loadEyesAreaCascade();
                        } break;
                        default:
                        {
                                super.onManagerConnected(status);
                        } break;
                        }
                }
        };

        public MainActivity() {}

        @Override
        protected void onCreate(Bundle savedInstanceState) {
                super.onCreate(savedInstanceState);
                getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);

                //setContentView(R.layout.activity_main);
            //mOpenCvCameraView = (JavaCameraView)
findViewById(R.id.tutorial2_activity_surface_view);
            //mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
            //mOpenCvCameraView.setCvCameraViewListener(this);

            mOpenCvCameraView = new JavaCameraView(this, -1);
        setContentView(mOpenCvCameraView);
        mOpenCvCameraView.setCvCameraViewListener(this);
        }

        @Override
        public void onPause()
        {
                super.onPause();
                if (mOpenCvCameraView != null)
                        mOpenCvCameraView.disableView();
        }

        @Override
        public void onResume()
        {
                super.onResume();
                OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_9, this,
mLoaderCallback);
        }

        public void onDestroy() {
                super.onDestroy();
                if (mOpenCvCameraView != null)
                mOpenCvCameraView.disableView();
        }

        public void onCameraViewStarted(int width, int height) {
                screen_w=width;
                screen_h=height;
                gray = new Mat(screen_w, screen_h, CvType.CV_8UC1);
                frame = new Mat(screen_w, screen_h, CvType.CV_8UC4);
                lowRes = new Mat();
        }

        public void onCameraViewStopped() {
                frame.release();
                gray.release();
        }

        public Mat onCameraFrame(CvCameraViewFrame inputFrame) {

            MatOfRect face = new MatOfRect();
        MatOfRect leftEye = new MatOfRect();
        MatOfRect rightEye = new MatOfRect();
        MatOfRect eyesArea = new MatOfRect();

            //Capturing a frame
            //VideoCapture videoCapture = new VideoCapture();
            //videoCapture.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, 320);
            //videoCapture.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, 240);

            //frame = new Mat(320, 240, CvType.CV_8UC4);
            //gray = new Mat(frame.width(), frame.height(), CvType.CV_8UC1);

        while (face.empty() && leftEye.empty() && rightEye.empty())
            {
                frame = inputFrame.rgba();
                Imgproc.resize(frame, lowRes, new Size(), 0.25, 0.25,
Imgproc.INTER_LINEAR);
                Imgproc.cvtColor(lowRes, gray, Imgproc.COLOR_BGRA2GRAY);

                //videoCapture.read(frame);

                if (frame.empty()) {Log.i(TAG, "Frame Empty");}

                //Resizing to reduce the resolution
                //Imgproc.resize(frame, lowRes, new Size(), 0.25, 0.25,
Imgproc.INTER_LINEAR);

                //Detecting the face and eyes in the first frame
                //Imgproc.cvtColor(lowRes, gray, Imgproc.COLOR_BGR2GRAY);
                //Log.i(TAG, "First Frame Converted To Grayscale");

                faceCascade.detectMultiScale(gray, face);
                //Rect[] faceArray = face.toArray();

                //Verification : eyes area inside face
                eyesAreaCascade.detectMultiScale(gray, eyesArea);
                //Rect[] eyesAreaArray = eyesArea.toArray();
                    //if ((eyesAreaArray[0].x < faceArray[0].x) || (eyesAreaArray[0].x +
eyesAreaArray[0].width > faceArray[0].x + faceArray[0].width)
                    //          || (eyesAreaArray[0].y < faceArray[0].y) || (eyesAreaArray[0].y
+ eyesAreaArray[0].height > faceArray[0].y + faceArray[0].height))
                    //{ face.empty(); }

                    //Verification : left eye inside eyes area
                    leftEyeCascade.detectMultiScale(gray, leftEye);
                    //Rect[] leftEyeArray = leftEye.toArray();
                   // if ((leftEyeArray[0].x < eyesAreaArray[0].x) || (leftEyeArray[0].x
+ leftEyeArray[0].width > eyesAreaArray[0].x +
eyesAreaArray[0].width)
                   //           || (leftEyeArray[0].y < eyesAreaArray[0].y) ||
(leftEyeArray[0].y + leftEyeArray[0].height > eyesAreaArray[0].y +
eyesAreaArray[0].height))
                   // { face.empty(); }

                    //Verification : right eye to the right of left eye
                    rightEyeCascade.detectMultiScale(gray, rightEye);
                    //Rect[] rightEyeArray = rightEye.toArray();
                    //if (rightEyeArray[0].x < leftEyeArray[0].x +
leftEyeArray[0].width) { face.empty(); }
            }

            Rect[] faceArray = face.toArray();
            Rect[] leftEyeArray = leftEye.toArray();
            Rect[] rightEyeArray = rightEye.toArray();
            Rect[] eyesAreaArray = eyesArea.toArray();

            Point faceCenter = new Point (faceArray[0].x + faceArray[0].width *
0.5,faceArray[0].y + faceArray[0].height * 0.5);
                Size faceAxes = new Size(faceArray[0].width*0.5, faceArray[0].height*0.5);
                Scalar faceColor = new Scalar(255, 0, 255);
                Core.ellipse(lowRes, faceCenter, faceAxes, 0, 0, 360, faceColor, 4, 8, 0);

            Point leftEyeCenter = new Point (leftEyeArray[0].x +
leftEyeArray[0].width * 0.5,leftEyeArray[0].y +
leftEyeArray[0].height * 0.5);
                Size leftEyeAxes = new Size(leftEyeArray[0].width*0.5,
leftEyeArray[0].height*0.5);
                Scalar leftEyeColor = new Scalar(0, 0, 255);
                Core.ellipse(lowRes, leftEyeCenter, leftEyeAxes, 0, 0, 360,
leftEyeColor, 4, 8, 0);

                Point rightEyeCenter = new Point (rightEyeArray[0].x +
rightEyeArray[0].width * 0.5,rightEyeArray[0].y +
rightEyeArray[0].height * 0.5);
                Size rightEyeAxes = new Size(rightEyeArray[0].width*0.5,
rightEyeArray[0].height*0.5);
                Scalar rightEyeColor = new Scalar(255, 0, 0);
                Core.ellipse(lowRes, rightEyeCenter, rightEyeAxes, 0, 0, 360,
rightEyeColor, 4, 8, 0);

                Point eyesAreaCenter = new Point (eyesAreaArray[0].x +
eyesAreaArray[0].width * 0.5,eyesAreaArray[0].y +
eyesAreaArray[0].height * 0.5);
                Size eyesAreaAxes = new Size(eyesAreaArray[0].width*0.5,
eyesAreaArray[0].height*0.5);
                Scalar eyesAreaColor = new Scalar(0, 255, 0);
                Core.ellipse(lowRes, eyesAreaCenter, eyesAreaAxes, 0, 0, 360,
eyesAreaColor, 4, 8, 0);

                Bitmap bm = Bitmap.createBitmap(lowRes.cols(), lowRes.rows(),
Bitmap.Config.ARGB_8888);
                Utils.matToBitmap(lowRes, bm);
                ImageView imageView = (ImageView) findViewById
(R.id.tutorial2_activity_surface_view);
                imageView.setImageBitmap(bm);

                Log.i(TAG, "Ellipses Drawn");

                //Getting the initial positions of the eyes
                ArrayList<Double> lx = new ArrayList<Double>();
                ArrayList<Double> ly = new ArrayList<Double>();
                ArrayList<Double> rx = new ArrayList<Double>();
                ArrayList<Double> ry = new ArrayList<Double>();
                lx.add((double) leftEyeArray[0].x);
                ly.add((double) leftEyeArray[0].y);
                rx.add((double) rightEyeArray[0].x);
                ry.add((double) rightEyeArray[0].y);

                //Calculating the distance between the eyes at initial position
                ArrayList<Double> eyesDist = new ArrayList<Double>();
                eyesDist.add(Math.sqrt(((lx.get(0)-rx.get(0))*(lx.get(0)-rx.get(0)))*((ly.get(0)-ry.get(0))*(ly.get(0)-ry.get(0)))));

                //Calculating the slope for the eyes at initial position
                ArrayList<Double> slope = new ArrayList<Double>();
                slope.add(Math.atan((ry.get(0)-ly.get(0))/(rx.get(0)-lx.get(0))));

                //Created for the minimum values for matching template, and adding an
element to match the iterations
                ArrayList<Point> leftEyeMinLoc = new ArrayList<Point>();
                ArrayList<Point> rightEyeMinLoc = new ArrayList<Point>();
                leftEyeMinLoc.add(null);
                rightEyeMinLoc.add(null);

                Log.i(TAG, "Calculations Done");

                Mat newLowRes = new Mat();

                for (int i=1; mOpenCvCameraView.isActivated(); i++)
                {

                        //Capturing a new frame
                        Mat newFrame = new Mat();
                        newFrame = inputFrame.rgba();
                Imgproc.resize(newFrame, newLowRes, new Size(), 0.25, 0.25,
Imgproc.INTER_LINEAR);
                Imgproc.equalizeHist(newLowRes, newLowRes);

                        //videoCapture.read(newFrame);
                        //Imgproc.resize(newFrame, newLowRes, new Size(), 0.25, 0.25,
Imgproc.INTER_LINEAR);
                        //Imgproc.equalizeHist(newLowRes, newLowRes); //For illumination
robustness

                        Log.i(TAG, "New Frame Captured");

                        //Matching templates to get the new eyes positions
                        ArrayList<Mat> leftEyeResults = new ArrayList<Mat>();
                        ArrayList<Mat> rightEyeResults = new ArrayList<Mat>();
                        Imgproc.matchTemplate(newLowRes, leftEye, leftEyeResults.get(i),
Imgproc.TM_SQDIFF_NORMED);
                        Imgproc.matchTemplate(newLowRes, rightEye, rightEyeResults.get(i),
Imgproc.TM_SQDIFF_NORMED);

                        //Locating the minimum values for matching results (matching with
SQDIFF method)
                        leftEyeMinLoc.add(Core.minMaxLoc(leftEyeResults.get(i)).minLoc);
                        rightEyeMinLoc.add(Core.minMaxLoc(rightEyeResults.get(i)).minLoc);

                        //Getting the new positions of the eyes
                        lx.add(leftEyeMinLoc.get(i).x);
                        ly.add(leftEyeMinLoc.get(i).y);
                        rx.add(rightEyeMinLoc.get(i).x);
                        ry.add(rightEyeMinLoc.get(i).y);

                        //Calculating the new distance between the eyes
                        eyesDist.add(Math.sqrt(((lx.get(i)-rx.get(i))*(lx.get(i)-rx.get(i)))*((ly.get(i)-ry.get(i))*(ly.get(i)-ry.get(i)))));

                        //Calculating the new slop for the eyes
                        slope.add(Math.atan((ry.get(i)-ly.get(i))/(rx.get(i)-lx.get(i))));

                        //Calculating the translation
                        double tx = ((lx.get(i)-lx.get(i-1))+(rx.get(i)-rx.get(i-1)))/2;
                        double ty = ((ly.get(i)-ly.get(i-1))+(ry.get(i)-ry.get(i-1)))/2;

                        //Calculating the rotation
                        double theta = slope.get(i-1) - slope.get(i);

                        //Calculating the scale
                        double scale = eyesDist.get(i)/eyesDist.get(i-1);

                        //Constructing the matrix for the transformation
                        Matrix trans = new Matrix();
                        trans.setValues(new float[] {(float) (scale * Math.cos(theta)), (float)
(scale * Math.sin(theta)), (float) tx,
                                        (float) (- scale * Math.sin(theta)), (float) (scale *
Math.cos(theta)), (float) ty, 0,  0,  1});

                        //Multiplying the initial square found by face detector by the matrix,
then the updated square by the newest matrix
                        RectF faceArrayF = new RectF(faceArray[0].x, faceArray[0].y,
faceArray[0].width, faceArray[0].height);
                        RectF newPosition = new RectF();
                        if (i==1)
                        {
                                trans.mapRect(faceArrayF, newPosition);
                        }
                        else
                        {
                                trans.mapRect(newPosition, newPosition);
                        }

                        //Drawing ellipses on the new frame, no need to draw on the eyes area
                        Point newFaceCenter = new Point (newPosition.centerX(),
newPosition.centerY());
                        Size newFaceAxes = new Size(newPosition.width()*0.5,
newPosition.height()*0.5);
                        Core.ellipse(newLowRes, newFaceCenter, newFaceAxes, 0, 0, 360,
faceColor, 4, 8, 0);

                    Point newLeftEyeCenter = new Point (lx.get(i), ly.get(i));
                        Size newLeftEyeAxes = new Size(leftEyeArray[0].width*0.5,
leftEyeArray[0].height*0.5);
                        Core.ellipse(newLowRes, newLeftEyeCenter, newLeftEyeAxes, 0, 0, 360,
leftEyeColor, 4, 8, 0);

                        Point newRightEyeCenter = new Point (rx.get(i), ry.get(i));
                        Size newRightEyeAxes = new Size(rightEyeArray[0].width*0.5,
rightEyeArray[0].height*0.5);
                        Core.ellipse(newLowRes, newRightEyeCenter, newRightEyeAxes, 0, 0, 360,
rightEyeColor, 4, 8, 0);

                        //Showing the frames in an ImageView
                        //Bitmap bmi = Bitmap.createBitmap(newLowRes.cols(), newLowRes.rows(),
Bitmap.Config.ARGB_8888);
                        //Utils.matToBitmap(newLowRes, bmi);
                        //imageView.setImageBitmap(bmi);
                }

                return newLowRes;

        }

        @Override
        public boolean onCreateOptionsMenu(Menu menu) {

                // Inflate the menu; this adds items to the action bar if it is present.
                getMenuInflater().inflate(R.menu.main, menu);
                return true;
        }

        @Override
        public boolean onOptionsItemSelected(MenuItem item) {
                // Handle action bar item clicks here. The action bar will
                // automatically handle clicks on the Home/Up button, so long
                // as you specify a parent activity in AndroidManifest.xml.
                int id = item.getItemId();
                if (id == R.id.action_settings) {
                        return true;
                }
                return super.onOptionsItemSelected(item);
        }

        public void loadFaceCascade(){
                try {
                        InputStream is =
getResources().openRawResource(R.raw.haarcascade_frontalface_alt2);
                        File cascadeDir = getDir("cascade", Context.MODE_PRIVATE);
                        File mCascadeFile = new File(cascadeDir,
"haarcascade_frontalface_alt2.xml");
                        FileOutputStream os = new FileOutputStream(mCascadeFile);

                        byte[] buffer = new byte[4096];
                        int bytesRead;
                        while ((bytesRead = is.read(buffer)) != -1) {
                                os.write(buffer, 0, bytesRead);
                                }
                        is.close();
                        os.close();

                        faceCascade = new CascadeClassifier(mCascadeFile.getAbsolutePath());
                        if(faceCascade.empty())
                        {
                                Log.v("MyActivity","--(!)Error loading A\n");
                                return;
                        }
                        else
                        {
                                Log.v("MyActivity",
                                                "Loaded cascade classifier from " + mCascadeFile.getAbsolutePath());
                        }
                } catch (IOException e) {
                        e.printStackTrace();
                        Log.v("MyActivity", "Failed to load cascade. Exception thrown: " + e);
                }
        }

        public void loadLeftEyeCascade(){
                try {
                        InputStream is =
getResources().openRawResource(R.raw.haarcascade_mcs_lefteye);
                        File cascadeDir = getDir("cascade", Context.MODE_PRIVATE);
                        File mCascadeFile = new File(cascadeDir, "haarcascade_mcs_lefteye.xml");
                        FileOutputStream os = new FileOutputStream(mCascadeFile);

                        byte[] buffer = new byte[4096];
                        int bytesRead;
                        while ((bytesRead = is.read(buffer)) != -1) {
                                os.write(buffer, 0, bytesRead);
                                }
                        is.close();
                        os.close();

                        leftEyeCascade = new CascadeClassifier(mCascadeFile.getAbsolutePath());
                        if(leftEyeCascade.empty())
                        {
                                Log.v("MyActivity","--(!)Error loading A\n");
                                return;
                        }
                        else
                        {
                                Log.v("MyActivity",
                                                "Loaded cascade classifier from " + mCascadeFile.getAbsolutePath());
                        }
                } catch (IOException e) {
                        e.printStackTrace();
                        Log.v("MyActivity", "Failed to load cascade. Exception thrown: " + e);
                }
        }

        public void loadRightEyeCascade(){
                try {
                        InputStream is =
getResources().openRawResource(R.raw.haarcascade_mcs_righteye);
                        File cascadeDir = getDir("cascade", Context.MODE_PRIVATE);
                        File mCascadeFile = new File(cascadeDir, "haarcascade_mcs_righteye.xml");
                        FileOutputStream os = new FileOutputStream(mCascadeFile);

                        byte[] buffer = new byte[4096];
                        int bytesRead;
                        while ((bytesRead = is.read(buffer)) != -1) {
                                os.write(buffer, 0, bytesRead);
                                }
                        is.close();
                        os.close();

                        rightEyeCascade = new CascadeClassifier(mCascadeFile.getAbsolutePath());
                        if(rightEyeCascade.empty())
                        {
                                Log.v("MyActivity","--(!)Error loading A\n");
                                return;
                        }
                        else
                        {
                                Log.v("MyActivity",
                                                "Loaded cascade classifier from " + mCascadeFile.getAbsolutePath());
                        }
                } catch (IOException e) {
                        e.printStackTrace();
                        Log.v("MyActivity", "Failed to load cascade. Exception thrown: " + e);
                }
        }

        public void loadEyesAreaCascade(){
                try {
                        InputStream is =
getResources().openRawResource(R.raw.haarcascade_mcs_eyepair_small);
                        File cascadeDir = getDir("cascade", Context.MODE_PRIVATE);
                        File mCascadeFile = new File(cascadeDir,
"haarcascade_mcs_eyepair_small.xml");
                        FileOutputStream os = new FileOutputStream(mCascadeFile);

                        byte[] buffer = new byte[4096];
                        int bytesRead;
                        while ((bytesRead = is.read(buffer)) != -1) {
                                os.write(buffer, 0, bytesRead);
                                }
                        is.close();
                        os.close();

                        eyesAreaCascade = new CascadeClassifier(mCascadeFile.getAbsolutePath());
                        if(eyesAreaCascade.empty())
                        {
                                Log.v("MyActivity","--(!)Error loading A\n");
                                return;
                        }
                        else
                        {
                                Log.v("MyActivity",
                                                "Loaded cascade classifier from " + mCascadeFile.getAbsolutePath());
                        }
                } catch (IOException e) {
                        e.printStackTrace();
                        Log.v("MyActivity", "Failed to load cascade. Exception thrown: " + e);
                }
        }
}

...和我的布局:

    <LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
   xmlns:tools="http://schemas.android.com/tools"
   xmlns:opencv="http://schemas.android.com/apk/res-auto"
   android:orientation="horizontal"
   android:layout_width="match_parent"
   android:layout_height="match_parent" >

       <org.opencv.android.JavaCameraView
                android:layout_width="match_parent"
                android:layout_height="match_parent"
        android:id="@+id/tutorial2_activity_surface_view"
        opencv:show_fps="true"
        opencv:camera_id="front"/>

 </LinearLayout>

......和我的清单:

    <?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
    package="com.example.newfacetracker"
    android:versionCode="1"
    android:versionName="1.0" >

     <supports-screens android:resizeable="true"
            android:smallScreens="true"
            android:normalScreens="true"
            android:largeScreens="true"
            android:anyDensity="true" />

    <uses-sdk
        android:minSdkVersion="8"
        android:targetSdkVersion="19" />

    <uses-permission android:name="android.permission.CAMERA"/>
   <uses-feature android:name="android.hardware.camera"
android:required="false"/>
   <uses-feature android:name="android.hardware.camera.autofocus"
android:required="false"/>
   <uses-feature android:name="android.hardware.camera.front"
android:required="false"/>
   <uses-feature android:name="android.hardware.camera.front.autofocus"
android:required="false"/>

    <application
        android:allowBackup="true"
        android:icon="@drawable/ic_launcher"
        android:label="@string/app_name"
        android:theme="@style/AppTheme" >
        <activity
            android:name="com.example.newfacetracker.MainActivity"
            android:label="@string/app_name"
            android:screenOrientation="landscape" >
            <intent-filter>
                <action android:name="android.intent.action.MAIN" />
                <category android:name="android.intent.category.LAUNCHER" />
            </intent-filter>
        </activity>
    </application>

</manifest>

1 个答案:

答案 0 :(得分:0)

您不需要Android中的VideoCapture。而是从CvCameraViewFrame中检索图像:

public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
     Mat rgba = inputFrame.rgba();
     Mat gray = inputFrame.gray();
     // now process that...

     return rgba; // finally return the image for blitting
}

“然后在眼睛上使用模板匹配方法跟踪脸部” - 这听起来像是一个破碎的想法。

由于透视失真,旋转,缩放,

模板匹配通常不适用于来自相机的图像。