我尝试制作应用程序以跟踪相机检测到的面部数量,但是,通过logcat,我发现程序从不调用onPreviewFrame方法,我不知道原因。
public class MainActivity extends Activity {
Context context = null;
BackGround bg=new BackGround();
FDR fdr = new FDR();
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
context=this.getApplicationContext();
bg.execute();
}
private class BackGround extends AsyncTask <Void, Integer, Void> {
@Override
protected Void doInBackground(Void...params) {
while(!isCancelled()){
fdr.startFaceDetection(context);
Log.d("result", String.valueOf(fdr.getnumberOfFaceDetected()));
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
fdr.FreeCamera();
return null;
}
@Override
protected void onProgressUpdate( Integer ...progress ){
}
}
@Override
public void onDestroy(){
super.onDestroy();
bg.cancel(true);
fdr.FreeCamera();
}
}
以下类的代码基本上是来自其他网站的示例代码。
public class FDR {
protected Camera mCameraDevice = null;
private long mScanBeginTime = 0;
private long mScanEndTime = 0;
private long mSpecPreviewTime = 0;
private long mSpecStopTime = 0;
private long mSpecCameraTime = 0;
private static final String TAG = null;
private int orientionOfCamera ;
private Context con=null;
private int numberOfFaceDetected;
public void startFaceDetection(Context cont) {
con=cont;
try {
FreeCamera();
mCameraDevice = Camera.open(1);
if (mCameraDevice != null)
Log.i(TAG, "open cameradevice success! ");
} catch (Exception e) {
mCameraDevice = null;
Log.w(TAG, "open cameraFail");
return;
}
Log.i(TAG, "startFaceDetection");
Camera.Parameters parameters = mCameraDevice.getParameters();
setCameraDisplayOrientation(1,mCameraDevice);
mCameraDevice.setPreviewCallback(new PreviewCallback(){
public void onPreviewFrame(byte[] data, Camera camera){
mScanEndTime = System.currentTimeMillis();
mSpecPreviewTime = mScanEndTime - mScanBeginTime;
Log.i(TAG, "onPreviewFrame and mSpecPreviewTime = " + String.valueOf(mSpecPreviewTime));
Camera.Size localSize = camera.getParameters().getPreviewSize();
YuvImage localYuvImage = new YuvImage(data, 17, localSize.width, localSize.height, null);
ByteArrayOutputStream localByteArrayOutputStream = new ByteArrayOutputStream();
localYuvImage.compressToJpeg(new Rect(0, 0, localSize.width, localSize.height), 80, localByteArrayOutputStream);
byte[] arrayOfByte = localByteArrayOutputStream.toByteArray();
FreeCamera();
StoreByteImage(arrayOfByte);
}
});
mCameraDevice.startPreview();
Log.i(TAG,"StartPreviewed");
mScanBeginTime = System.currentTimeMillis();
}
public void setCameraDisplayOrientation(int paramInt, Camera paramCamera){
CameraInfo info = new CameraInfo();
Camera.getCameraInfo(paramInt, info);
int rotation = ((WindowManager)con.getSystemService("window")).getDefaultDisplay().getRotation();
int degrees = 0;
Log.i(TAG,"getRotation's rotation is " + String.valueOf(rotation));
switch (rotation) {
case Surface.ROTATION_0: degrees = 0; break;
case Surface.ROTATION_90: degrees = 90; break;
case Surface.ROTATION_180: degrees = 180; break;
case Surface.ROTATION_270: degrees = 270; break;
}
orientionOfCamera = info.orientation;
int result;
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
result = (info.orientation + degrees) % 360;
result = (360 - result) % 360; // compensate the mirror
} else { // back-facing
result = (info.orientation - degrees + 360) % 360;
}
paramCamera.setDisplayOrientation(result);
}
public void StoreByteImage(byte[] paramArrayOfByte){
mSpecStopTime = System.currentTimeMillis();
mSpecCameraTime = mSpecStopTime - mScanBeginTime;
Log.i(TAG, "StoreByteImage and mSpecCameraTime is " + String.valueOf(mSpecCameraTime));
BitmapFactory.Options localOptions = new BitmapFactory.Options();
Bitmap localBitmap1 = BitmapFactory.decodeByteArray(paramArrayOfByte, 0, paramArrayOfByte.length, localOptions);
int i = localBitmap1.getWidth();
int j = localBitmap1.getHeight();
Matrix localMatrix = new Matrix();
//int k = cameraResOr;
Bitmap localBitmap2 = null;
FaceDetector localFaceDetector = null;
switch(orientionOfCamera){
case 0:
localFaceDetector = new FaceDetector(i, j, 1);
localMatrix.postRotate(0.0F, i / 2, j / 2);
localBitmap2 = Bitmap.createBitmap(i, j, Bitmap.Config.RGB_565);
break;
case 90:
localFaceDetector = new FaceDetector(j, i, 1);
localMatrix.postRotate(-270.0F, j / 2, i / 2);
localBitmap2 = Bitmap.createBitmap(i, j, Bitmap.Config.RGB_565);
break;
case 180:
localFaceDetector = new FaceDetector(i, j, 1);
localMatrix.postRotate(-180.0F, i / 2, j / 2);
localBitmap2 = Bitmap.createBitmap(i, j, Bitmap.Config.RGB_565);
break;
case 270:
localFaceDetector = new FaceDetector(j, i, 1);
localMatrix.postRotate(-90.0F, j / 2, i / 2);
localBitmap2 = Bitmap.createBitmap(j, i, Bitmap.Config.RGB_565);
break;
}
FaceDetector.Face[] arrayOfFace = new FaceDetector.Face[1];
Paint localPaint1 = new Paint();
Paint localPaint2 = new Paint();
localPaint1.setDither(true);
localPaint2.setColor(-65536);
localPaint2.setStyle(Paint.Style.STROKE);
localPaint2.setStrokeWidth(2.0F);
Canvas localCanvas = new Canvas();
localCanvas.setBitmap(localBitmap2);
localCanvas.setMatrix(localMatrix);
localCanvas.drawBitmap(localBitmap1, 0.0F, 0.0F, localPaint1);
numberOfFaceDetected = localFaceDetector.findFaces(localBitmap2, arrayOfFace);
localBitmap2.recycle();
localBitmap1.recycle();
}
public int getnumberOfFaceDetected(){
return numberOfFaceDetected;
}
public void FreeCamera() {
if (mCameraDevice != null) {
// Call stopPreview() to stop updating the preview surface.
// Important: Call release() to release the camera for use by other
// applications. Applications should release the camera immediately
// during onPause() and re-open() it during onResume()).
mCameraDevice.stopPreview();
mCameraDevice.release();
mCameraDevice = null;
}
}
}
答案 0 :(得分:0)
要接收回调,您必须先setPreviewDisplay()
。
考虑在循环之前打开一次相机,并使用setOneShotPreviewCallback()运行面部检测
将YUV预览帧转换为RGB位图比使用JPEG中间码更有效。
Android相机具有相当快速且强大的内置面部检测器,您可以通过调用startFaceDetection()
启动它,然后您将收到结果作为回调。