如何在Android中的服务中录制视频时实时检测视频帧中的人脸?

时间:2016-08-17 06:33:40

标签: android service

我创建了一项服务,当有人试图尝试解锁手机失败时录制视频。当服务正在运行并捕获视频时,它还会实时捕获视频帧并检测这些视频帧中的人脸。< / p>

问题是当服务从视频中检测到面部时,跳过超过150帧,如log cat中所示。

如何有效地检测面部,以便不跳过这么多帧?

这是我的代码

public class Background_Recording extends Service implements SurfaceHolder.Callback,Camera.PreviewCallback {
    private WindowManager windowManager;
    public static int MAX_FACES = 5;
    boolean stopped = false;
    Timer t;
    Bitmap bitmaper;
    Handler handler;
    ArrayList<Bitmap> bit_collect = new ArrayList<Bitmap>();
    private SurfaceView surfaceView;
    private Camera camera = null;
    int camera_type = 1;
    private MediaRecorder mediaRecorder = null;

    @Override
    public void onCreate() {
        windowManager = (WindowManager) this.getSystemService(Context.WINDOW_SERVICE);
        surfaceView = new SurfaceView(this);
        WindowManager.LayoutParams layoutParams = new WindowManager.LayoutParams(1, 1, WindowManager.LayoutParams.TYPE_SYSTEM_OVERLAY,
                WindowManager.LayoutParams.FLAG_WATCH_OUTSIDE_TOUCH,
                PixelFormat.TRANSLUCENT
        );
        layoutParams.gravity = Gravity.LEFT | Gravity.TOP;
        windowManager.addView(surfaceView, layoutParams);
        surfaceView.getHolder().addCallback(this);
    }

    @Override
    public void surfaceCreated(SurfaceHolder surfaceHolder) {
        camera = Camera.open(camera_type);
        mediaRecorder = new MediaRecorder();
        camera.unlock();
        mediaRecorder.setPreviewDisplay(surfaceHolder.getSurface());
        mediaRecorder.setCamera(camera);
        mediaRecorder.setAudioSource(MediaRecorder.AudioSource.CAMCORDER);
        mediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
        mediaRecorder.setProfile(CamcorderProfile.get(CamcorderProfile.QUALITY_HIGH));
        mediaRecorder.setOutputFile("/sdcard/unlock.mp4");
        try {
            mediaRecorder.prepare();
            mediaRecorder.start();
            camera.setPreviewCallback(this);
            t = new Timer();
            t.schedule(new TimerTask() {
                @Override
                public void run() {
                    camera.setPreviewCallback(null);
                    stopSelf();
                    stopped = true;
                }
            }, 8000);
        } catch (Exception e) {
            Toast.makeText(getApplicationContext(), "getting exception ", Toast.LENGTH_LONG).show();
        }
    }

    public int onStartCommand(Intent intent, int flags, int flagID) {
        handler = new Handler(Looper.getMainLooper());
        return super.onStartCommand(intent, flags, flagID);
    }

    @Override
    public void onDestroy() {
        mediaRecorder.stop();
        mediaRecorder.reset();
        mediaRecorder.release();
        camera.lock();
        camera.release();
        windowManager.removeView(surfaceView);
    }

    @Override
    public void surfaceChanged(SurfaceHolder surfaceHolder, int format, int width, int height) {
    }

    @Override
    public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
    }

    @Override
    public IBinder onBind(Intent intent) {
        return null;
    }

    @Override
    public void onPreviewFrame(byte[] data, Camera camera) {
        try {
            Camera.Parameters parameters = camera.getParameters();
            Camera.Size size = parameters.getPreviewSize();
            YuvImage image = new YuvImage(data, parameters.getPreviewFormat(), size.width, size.height, null);
            ByteArrayOutputStream os = new ByteArrayOutputStream();
            image.compressToJpeg(new Rect(0, 0, size.width, size.height), 100, os);
            byte[] jpegByteArray = os.toByteArray();
            bitmaper = BitmapFactory.decodeByteArray(jpegByteArray, 0, jpegByteArray.length);
            bit_collect.add(bitmaper);
            handler.post(new Runnable() {
                @Override
                public void run() {
                    processing(bitmaper);
                }
            });
        } catch (Exception e) {
            Toast.makeText(getApplicationContext(), "no frames ", Toast.LENGTH_SHORT).show();
        }
    }

    public void processing(Bitmap final_byte) {
        if (final_byte != null) {
            int width = final_byte.getWidth();
            int height = final_byte.getHeight();
            FaceDetector detector = new FaceDetector(width, height, Background_Recording.MAX_FACES);
            FaceDetector.Face[] faces = new FaceDetector.Face[Background_Recording.MAX_FACES];
            int facesFound = detector.findFaces(final_byte, faces);
            if (facesFound > 0) {
                Toast.makeText(getApplicationContext(), "face found", Toast.LENGTH_SHORT).show();
            } else {
                final_byte.recycle();
                Toast.makeText(getApplicationContext(), "no face found", Toast.LENGTH_SHORT).show();
            }
        }
    }
}

1 个答案:

答案 0 :(得分:1)

您正在UI线程上运行面部检测 - 因为您的handler已附加到主线程的Looper。你应该把所有这些都移到后台。请尝试下面的代码,并查看我的评论,解释我的更改。它可能不会100%工作,但它应该非常接近。

//at the beginning of your class
private HandlerThread handlerThread;

//...
@Override
public int onStartCommand(Intent intent, int flags, int flagID) {
    //Create a new thread which will be used by the Handler to
    //process the image data and run frame deteciton
    handlerThread = new HandlerThread("faceDetectionThread");
    handlerThread.start();
    Looper looper = handlerThread.getLooper();
    Handler handler = new Handler(looper); 
    return super.onStartCommand(intent, flags, flagID);
}

@Override
public void onPreviewFrame(final byte[] data, final Camera camera) {
    try {
        //This solution only processes the last frame.
        //It waits for 100ms, and if no more data is received,
        //it will start processing. Otherwise, the previous posted
        //Runnable will be cancelled and replaced with the new frame.
        handler.removeCallbacksAndMessages(null);
        handler.postDelayed(new Runnable() {
            @Override
            public void run() {
                processing(data, camera);
            }
        }, 100);
    } catch (Exception e) {
        Toast.makeText(getApplicationContext(), "no frames ", Toast.LENGTH_SHORT).show();
    }
}

public void processing(final byte[] data, final Camera camera) {

    // Bitmap processing must be on a background thread!
    // Moved this from the onPreviewFrame method
    Camera.Parameters parameters = camera.getParameters();
    Camera.Size size = parameters.getPreviewSize();
    YuvImage image = new YuvImage(data, parameters.getPreviewFormat(), size.width, size.height, null);
    ByteArrayOutputStream os = new ByteArrayOutputStream();
    image.compressToJpeg(new Rect(0, 0, size.width, size.height), 100, os);
    byte[] jpegByteArray = os.toByteArray();
    bitmaper = BitmapFactory.decodeByteArray(jpegByteArray, 0, jpegByteArray.length);
        bit_collect.add(bitmaper);

    if (bitmaper != null) {
        int width = bitmaper.getWidth();
        int height = bitmaper.getHeight();
        FaceDetector detector = new FaceDetector(width, height, Background_Recording.MAX_FACES);
        FaceDetector.Face[] faces = new FaceDetector.Face[Background_Recording.MAX_FACES];
        int facesFound = detector.findFaces(bitmaper, faces);
        if (facesFound > 0) {
            Toast.makeText(getApplicationContext(), "face found", Toast.LENGTH_SHORT).show();
        } else {
            bitmaper.recycle();
            Toast.makeText(getApplicationContext(), "no face found", Toast.LENGTH_SHORT).show();
        }
    }
}

//...

@Override
public void onDestroy() {
    mediaRecorder.stop();
    mediaRecorder.reset();
    mediaRecorder.release();
    camera.lock();
    camera.release();
    windowManager.removeView(surfaceView);
    handlerThread.quit(); //Don't forget this!
}

此外,如果您将MAX_FACES缩减为1,我猜人脸检测会更快。