我正在尝试在Android中为我的论文开发一个样本面部检测应用程序。我已尝试使用Android SDK本身提供的FaceDetecor类,它可以正常工作。但我想让它在后台运行。因为我想制作一个应用程序,当用户盯着他的手机并快速行走或跑步时发出声音或其他东西。所以我需要在后台进行人脸检测。我让它作为一项服务运行,但每当我关闭应用程序时,线程仍然在运行,但面部检测停止,因为预览停止
检查捕获图片的线程... 这是我的代码......提前谢谢
public final class FaceDetectRGBActivity extends AppCompatActivity implements SurfaceHolder.Callback, Camera.PreviewCallback
{
public static final String TAG = FaceDetectRGBActivity.class.getSimpleName();
private static final int MAX_FACE = 10;
private final CameraErrorCallback mErrorCallback = new CameraErrorCallback();
long start, end;
int counter = 0;
double fps;
private int numberOfCameras;
private Camera mCamera;
private int cameraId = 1;
private int mDisplayRotation;
private int mDisplayOrientation;
private int previewWidth;
private int previewHeight;
private SurfaceView mView;
private FaceOverlayView mFaceView;
private boolean isThreadWorking = false;
private Handler handler;
private FaceDetectTask detectTask = null;
private int prevSettingWidth;
private int prevSettingHeight;
private android.media.FaceDetector fdet;
private FaceResult faces[];
private FaceResult faces_previous[];
private int Id = 0;
private String BUNDLE_CAMERA_ID = "camera";
private HashMap<Integer, Integer> facesCount = new HashMap<>();
private float speed;
private accelerator ac= new accelerator();;
private StepCounter steps=new StepCounter();;
private ArrayList<Bitmap> facesBitmap;
/**
* Initializes the UI and initiates the creation of a face detector.
*/
@Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
setContentView(R.layout.activity_camera_viewer);
mView = (SurfaceView) findViewById(R.id.surfaceview);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
// Now create the OverlayView:
mFaceView = new FaceOverlayView(this);
addContentView(mFaceView, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT));
// Create and Start the OrientationListener:
handler = new Handler();
faces = new FaceResult[MAX_FACE];
faces_previous = new FaceResult[MAX_FACE];
for (int i = 0; i < MAX_FACE; i++) {
faces[i] = new FaceResult();
faces_previous[i] = new FaceResult();
}
getSupportActionBar().setDisplayShowTitleEnabled(true);
getSupportActionBar().setHomeButtonEnabled(true);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
getSupportActionBar().setTitle("Face Detect RGB");
if (icicle != null)
cameraId = icicle.getInt(BUNDLE_CAMERA_ID, 1);
}
@Override
protected void onPostCreate(Bundle savedInstanceState) {
super.onPostCreate(savedInstanceState);
// Check for the camera permission before accessing the camera. If the
// permission is not granted yet, request permission.
SurfaceHolder holder = mView.getHolder();
holder.addCallback(this);
holder.setFormat(ImageFormat.NV21);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.menu_camera, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case android.R.id.home:
super.onBackPressed();
return true;
case R.id.switchCam:
if (numberOfCameras == 1) {
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle("Switch Camera").setMessage("Your device have one camera").setNeutralButton("Close", null);
AlertDialog alert = builder.create();
alert.show();
return true;
}
cameraId = (cameraId + 1) % numberOfCameras;
recreate();
return true;
default:
return super.onOptionsItemSelected(item);
}
}
/**
* Restarts the camera.
*/
@Override
protected void onResume() {
super.onResume();
Log.i(TAG, "onResume");
startPreview();
}
@Override
protected void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
outState.putInt(BUNDLE_CAMERA_ID, cameraId);
}
@Override
public void surfaceCreated(SurfaceHolder surfaceHolder) {
//Find the total number of cameras available
numberOfCameras = Camera.getNumberOfCameras();
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
for (int i = 0; i < Camera.getNumberOfCameras(); i++) {
Camera.getCameraInfo(i, cameraInfo);
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
if (cameraId == 0) cameraId = i;
}
}
mCamera = Camera.open(cameraId);
Camera.getCameraInfo(cameraId, cameraInfo);
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
mFaceView.setFront(true);
}
try {
mCamera.setPreviewDisplay(mView.getHolder());
} catch (Exception e) {
Log.e(TAG, "Could not preview the image.", e);
}
}
@Override
public void surfaceChanged(SurfaceHolder surfaceHolder, int format, int width, int height) {
// We have no surface, return immediately:
if (surfaceHolder.getSurface() == null) {
// return;
}
try {
// mCamera.stopPreview();
} catch (Exception e) {
// Ignore...
}
configureCamera(width, height);
setDisplayOrientation();
setErrorCallback();
// Create media.FaceDetector
float aspect = (float) previewHeight / (float) previewWidth;
fdet = new android.media.FaceDetector(prevSettingWidth, (int) (prevSettingWidth * aspect), MAX_FACE);
// Everything is configured! Finally start the camera preview again:
startPreview();
}
private void setErrorCallback() {
mCamera.setErrorCallback(mErrorCallback);
}
private void setDisplayOrientation() {
// Now set the display orientation:
mDisplayRotation = Util.getDisplayRotation(FaceDetectRGBActivity.this);
mDisplayOrientation = Util.getDisplayOrientation(mDisplayRotation, cameraId);
mCamera.setDisplayOrientation(mDisplayOrientation);
if (mFaceView != null) {
mFaceView.setDisplayOrientation(mDisplayOrientation);
}
}
private void configureCamera(int width, int height) {
Camera.Parameters parameters = mCamera.getParameters();
// Set the PreviewSize and AutoFocus:
setOptimalPreviewSize(parameters, width, height);
setAutoFocus(parameters);
// And set the parameters:
mCamera.setParameters(parameters);
}
private void setOptimalPreviewSize(Camera.Parameters cameraParameters, int width, int height) {
List<Camera.Size> previewSizes = cameraParameters.getSupportedPreviewSizes();
float targetRatio = (float) width / height;
Camera.Size previewSize = Util.getOptimalPreviewSize(this, previewSizes, targetRatio);
previewWidth = previewSize.width;
previewHeight = previewSize.height;
Log.e(TAG, "previewWidth" + previewWidth);
Log.e(TAG, "previewHeight" + previewHeight);
/**
* Calculate size to scale full frame bitmap to smaller bitmap
* Detect face in scaled bitmap have high performance than full bitmap.
* The smaller image size -> detect faster, but distance to detect face shorter,
* so calculate the size follow your purpose
*/
if (previewWidth / 4 > 360) {
prevSettingWidth = 360;
prevSettingHeight = 270;
} else if (previewWidth / 4 > 320) {
prevSettingWidth = 320;
prevSettingHeight = 240;
} else if (previewWidth / 4 > 240) {
prevSettingWidth = 240;
prevSettingHeight = 160;
} else {
prevSettingWidth = 160;
prevSettingHeight = 120;
}
cameraParameters.setPreviewSize(previewSize.width, previewSize.height);
mFaceView.setPreviewWidth(previewWidth);
mFaceView.setPreviewHeight(previewHeight);
}
private void setAutoFocus(Camera.Parameters cameraParameters) {
List<String> focusModes = cameraParameters.getSupportedFocusModes();
if (focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE))
cameraParameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
}
private void startPreview() {
if (mCamera != null) {
//isThreadWorking = false;
mCamera.startPreview();
mCamera.setPreviewCallback(this);
counter = 0;
}
}
@Override
public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
mCamera.setPreviewCallbackWithBuffer(null);
mCamera.setErrorCallback(null);
mCamera.release();
// mCamera = null;
}
@Override
public void onPreviewFrame(byte[] _data, Camera _camera) {
if (!isThreadWorking) {
if (counter == 0)
start = System.currentTimeMillis();
isThreadWorking = true;
waitForFdetThreadComplete();
detectTask = new FaceDetectTask(handler, this);
detectTask.execute(_data);
}
new Timer().scheduleAtFixedRate(new TimerTask() {
@Override
public void run() {
isCameraUsebyApp();
Log.e("Hello", "HELLO");
if (detectTask.getStatus() == AsyncTask.Status.RUNNING) {
Log.e("ALIVE", "ALIVE");
} else Log.e("DEAD", "DEAD");
}
}, 0, 1000);//put here time 1000 milliseconds=1 second
}
public boolean isCameraUsebyApp() {
Camera camera = null;
try {
camera = Camera.open();
} catch (RuntimeException e) {
return true;
} finally {
if (camera != null) camera.release();
}
return false;
}
private void waitForFdetThreadComplete() {
if (detectTask == null) {
return;
}
if (detectTask.getStatus() == AsyncTask.Status.RUNNING) {
detectTask = null;
}
}
private class FaceDetectTask extends AsyncTask<byte[], Integer, String> {
private Handler handler;
private byte[] data = null;
private Context ctx;
private Bitmap faceCroped;
public FaceDetectTask(Handler handler, Context ctx) {
this.ctx = ctx;
this.handler = handler;
}
@Override
protected String doInBackground(byte[]... params) {
data = params[0];
Log.e("FaceDetectThread", "running");
float aspect = (float) previewHeight / (float) previewWidth;
int w = prevSettingWidth;
int h = (int) (prevSettingWidth * aspect);
Bitmap bitmap = Bitmap.createBitmap(previewWidth, previewHeight, Bitmap.Config.RGB_565);
YuvImage yuv = new YuvImage(data, ImageFormat.NV21,
bitmap.getWidth(), bitmap.getHeight(), null);above
Rect rectImage = new Rect(0, 0, bitmap.getWidth(), bitmap.getHeight());
ByteArrayOutputStream baout = new ByteArrayOutputStream();
if (!yuv.compressToJpeg(rectImage, 100, baout))
{
Log.e("CreateBitmap", "compressToJpeg failed");
}
BitmapFactory.Options bfo = new BitmapFactory.Options();
bfo.inPreferredConfig = Bitmap.Config.RGB_565;
bitmap = BitmapFactory.decodeStream(
new ByteArrayInputStream(baout.toByteArray()), null, bfo);
Bitmap bmp = Bitmap.createScaledBitmap(bitmap, w, h, false);
float xScale = (float) previewWidth / (float) prevSettingWidth;
float yScale = (float) previewHeight / (float) h;
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(cameraId, info);
int rotate = mDisplayOrientation;
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT && mDisplayRotation % 180 == 0) {
if (rotate + 180 > 360) {
rotate = rotate - 180;
} else
rotate = rotate + 180;
}
switch (rotate) {
case 90:
bmp = ImageUtils.rotate(bmp, 90);
xScale = (float) previewHeight / bmp.getWidth();
yScale = (float) previewWidth / bmp.getHeight();
break;
case 180:
bmp = ImageUtils.rotate(bmp, 180);
break;
case 270:
bmp = ImageUtils.rotate(bmp, 270);
xScale = (float) previewHeight / (float) h;
yScale = (float) previewWidth / (float) prevSettingWidth;
break;
}
android.media.FaceDetector.Face[] fullResults = new android.media.FaceDetector.Face[MAX_FACE];
for (int i = 0; i < MAX_FACE; i++) {
if (fullResults[i] == null) {
faces[i].clear();
} else {
PointF mid = new PointF();
fullResults[i].getMidPoint(mid);
mid.x *= xScale;
mid.y *= yScale;
float eyesDis = fullResults[i].eyesDistance() * xScale;
float confidence = fullResults[i].confidence();
float pose = fullResults[i].pose(android.media.FaceDetector.Face.EULER_Y);
int idFace = Id;
Rect rect = new Rect(
(int) (mid.x - eyesDis * 1.20f),
(int) (mid.y - eyesDis * 0.55f),
(int) (mid.x + eyesDis * 1.20f),
(int) (mid.y + eyesDis * 1.85f));
if (rect.height() * rect.width() > 100 * 100) {
for (int j = 0; j < MAX_FACE; j++) {
float eyesDisPre = faces_previous[j].eyesDistance();
PointF midPre = new PointF();
faces_previous[j].getMidPoint(midPre);
RectF rectCheck = new RectF(
(midPre.x - eyesDisPre * 1.5f),
(midPre.y - eyesDisPre * 1.15f),
(midPre.x + eyesDisPre * 1.5f),
(midPre.y + eyesDisPre * 1.85f));
if (rectCheck.contains(mid.x, mid.y) && (System.currentTimeMillis() - faces_previous[j].getTime()) < 1000) {
idFace = faces_previous[j].getId();
break;
}
}
if (idFace == Id) Id++;
faces[i].setFace(idFace, mid, eyesDis, confidence, pose, System.currentTimeMillis());
faces_previous[i].set(faces[i].getId(), faces[i].getMidEye(), faces[i].eyesDistance(), faces[i].getConfidence(), faces[i].getPose(), faces[i].getTime());
if (facesCount.get(idFace) == null) {
facesCount.put(idFace, 0);
Log.d("Photo Taken!!", "PHOTO");
Log.e("Photo Taken!!", "PHOTO");
} else {
int count = facesCount.get(idFace) + 1;
if (count <= 5) {
Log.e("Photo Taken!!", "PHOTO");
Handler handler = new Handler(Looper.getMainLooper());
Log.e("Photo Taken!!", "PHOTO");
}
}
}
}
}
Log.e("Hello there", "Hi");
Thread thread = new Thread() {
@Override
public void run() {
//send face to FaceView to draw rect
mFaceView.setFaces(faces);
//calculate FPS
end = System.currentTimeMillis();
counter++;
double time = (double) (end - start) / 1000;
if (time != 0)
fps = counter / time;
mFaceView.setFPS(fps);
if (counter == (Integer.MAX_VALUE - 1000))
counter = 0;
isThreadWorking = false;
}
};
thread.start();
return null;
}
@Override
protected void onPostExecute(String result) {
super.onPostExecute(result);
}
}
}