我为我的项目制作了一个Android应用程序。 我的应用目的是检测眼睛眨眼。
我使用移动视觉API。 我正在使用关于眨眼检测代码的移动视觉面部检测的示例代码。
我的第一个问题是移动视觉API可以用于后台服务吗?
我的第二个问题是将相机api与移动视觉API相结合的原因是什么? 相机api可以用于后台服务,但它不能用于移动视觉人脸检测课程。
我的应用程序的服务代码是
public class ServiceClass extends Service{
private static final String TAG = "FaceTracker";
public CameraSource mCameraSource = null;
public CameraSourcePreview mPreview;
private GraphicOverlay mGraphicOverlay;
private static final int RC_HANDLE_GMS = 9001;
// permission request codes need to be < 256
private static final int RC_HANDLE_CAMERA_PERM = 2;
static int x=0;
//Camera variables
//a surface holder
private SurfaceHolder sHolder;
//a variable to control the camera
private Camera mCamera;
//the camera parameters
private Camera.Parameters parameters;
/** Called when the activity is first created. */
@Override
public void onCreate()
{
super.onCreate();
Log.d("SERVICE", "onCreate" );
// Check for the camera permission before accessing the camera. If the
// permission is not granted yet, request permission.
int rc = ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA);
if (rc == PackageManager.PERMISSION_GRANTED) {
createCameraSource();
} else {
//requestCameraPermission();
Toast.makeText(getApplicationContext(), "Need a permission", Toast.LENGTH_LONG).show();
}
}
/**
* Creates and starts the camera. Note that this uses a higher resolution in comparison
* to other detection examples to enable the barcode detector to detect small barcodes
* at long distances.
*/
public void createCameraSource() {
Log.d("SERVICE", "createCameraSource" );
Context context = getApplicationContext();
FaceDetector detector = new FaceDetector.Builder(context)
.setProminentFaceOnly(true) //Detection for front camera
.setClassificationType(FaceDetector.ALL_CLASSIFICATIONS)
.build();
detector.setProcessor(
new LargestFaceFocusingProcessor(detector, new BlinkTracker()));
//new MultiProcessor.Builder<>(new GraphicFaceTrackerFactory())
//.build());
if (!detector.isOperational()) {
// Note: The first time that an app using face API is installed on a device, GMS will
// download a native library to the device in order to do detection. Usually this
// completes before the app is run for the first time. But if that download has not yet
// completed, then the above call will not detect any faces.
//
// isOperational() can be used to check if the required native library is currently
// available. The detector will automatically become operational once the library
// download completes on device.
Log.w(TAG, "Face detector dependencies are not yet available.");
}
mCameraSource = new CameraSource.Builder(context, detector)
.setRequestedPreviewSize(640, 480)
.setFacing(CameraSource.CAMERA_FACING_FRONT)//using front camera
.setRequestedFps(30.0f)
.build();
Log.i("createCameraSource",mCameraSource.toString());
}
/**
* starts the camera.
*/
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
Toast.makeText(this, "service starting", Toast.LENGTH_SHORT).show();
startCameraSource();
// If we get killed, after returning from here, restart
return START_STICKY;
}
/*
@Override
public void onStart(Intent intent, int startId) {
// TODO Auto-generated method stub
super.onStart(intent, startId);
Log.d("SERVICE", "onStart" );
startCameraSource();
//startCameraSource();
// mCamera = Camera.open(1);
}*/
//==============================================================================================
// Camera Source Preview
//==============================================================================================
/**
* Starts or restarts the camera source, if it exists. If the camera source doesn't exist yet
* (e.g., because onResume was called before the camera source was created), this will be called
* again when the camera source is created.
*/
public void startCameraSource() {
Log.d("SERVICE", "startCameraSource" );
// check that the device has play services available.
int code = GoogleApiAvailability.getInstance().isGooglePlayServicesAvailable(
getApplicationContext());
/*if (code != ConnectionResult.SUCCESS) {
Dialog dlg =
GoogleApiAvailability.getInstance().getErrorDialog(this, code, RC_HANDLE_GMS);
dlg.show();
}*/
if (mCameraSource != null) {
try {
Log.d("startCameraSource", "log!" );
Log.i("startCameraSource1",mCameraSource.toString());
mPreview.start(mCameraSource);
} catch (IOException e) {
Log.e(TAG, "Unable to start camera source.", e);
mCameraSource.release();
mCameraSource = null;
}
}
}
@Override
public IBinder onBind(Intent intent) {
// TODO Auto-generated method stub
return null;
}
//==============================================================================================
// Eye Blink Tracker
//==============================================================================================
public class BlinkTracker extends Tracker<Face> {
private final double OPEN_THRESHOLD = 0.85;
private final double CLOSE_THRESHOLD = 0.50;
private int state = 0;
public void onUpdate(Detector.Detections<Face> detections, Face face) {
Log.i("BlinkTracker", "eye tracker start");
float left = face.getIsLeftEyeOpenProbability();
float right = face.getIsRightEyeOpenProbability();
if ((left == Face.UNCOMPUTED_PROBABILITY) ||
(right == Face.UNCOMPUTED_PROBABILITY)) {
// At least one of the eyes was not detected.
return;
}
switch (state) {
case 0:
if ((left > OPEN_THRESHOLD) && (right > OPEN_THRESHOLD)) {
// Both eyes are initially open
Log.i("BlinkTracker", "eye open");
state = 1;
}
break;
case 1:
if ((left < CLOSE_THRESHOLD) && (right < CLOSE_THRESHOLD)) {
// Both eyes become closed
Log.i("BlinkTracker", "blink occurred!");
state = 0;
}
break;
/*case 2:
if ((left > OPEN_THRESHOLD) && (right > OPEN_THRESHOLD)) {
// Both eyes are open again
Log.i("BlinkTracker", "blink occurred!");
state = 0;
}
break;*/
}
}
}
}
logcat的
07-13 09:55:04.716 17707-17707/com.hyechon.etrackermv E/AndroidRuntime: FATAL EXCEPTION: main
Process: com.hyechon.etrackermv, PID: 17707
java.lang.RuntimeException: Unable to start service com.hyechon.etrackermv.ServiceClass@7566889 with Intent { cmp=com.hyechon.etrackermv/.ServiceClass }: java.lang.NullPointerException: Attempt to invoke virtual method 'void com.hyechon.etrackermv.camera.CameraSourcePreview.start(com.google.android.gms.vision.CameraSource)' on a null object reference
at android.app.ActivityThread.handleServiceArgs(ActivityThread.java:4079)
at android.app.ActivityThread.access$2400(ActivityThread.java:221)
at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1897)
at android.os.Handler.dispatchMessage(Handler.java:102)
at android.os.Looper.loop(Looper.java:158)
at android.app.ActivityThread.main(ActivityThread.java:7225)
at java.lang.reflect.Method.invoke(Native Method)
at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:1230)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:1120)
Caused by: java.lang.NullPointerException: Attempt to invoke virtual method 'void com.hyechon.etrackermv.camera.CameraSourcePreview.start(com.google.android.gms.vision.CameraSource)' on a null object reference
at com.hyechon.etrackermv.ServiceClass.startCameraSource(ServiceClass.java:255)
at com.hyechon.etrackermv.ServiceClass.onStartCommand(ServiceClass.java:136)
at android.app.ActivityThread.handleServiceArgs(ActivityThread.java:4062)
at android.app.ActivityThread.access$2400(ActivityThread.java:221)
at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1897)
at android.os.Handler.dispatchMessage(Handler.java:102)
at android.os.Looper.loop(Looper.java:158)
at android.app.ActivityThread.main(ActivityThread.java:7225)
at java.lang.reflect.Method.invoke(Native Method)
at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:1230)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:1120)
07-13 09:55:05.666 17707-17777 / com.hyechon.etrackermv I / Vision:连接错误:null 07-13 10:00:04.761 17707-17707 / com.hyechon.etrackermv I / Process:发送信号。 PID:17707 SIG:9