我正在使用android “opencv samples - face detection”来检测脸部。它运作良好。现在我想用“面部检测”包括“眼睛检测”。可能吗?如果是,任何人都可以给我一些样本来检测眼睛。我在互联网上搜索过。但我没有得到任何样品。
答案 0 :(得分:1)
您可以使用此代码检测眼睛......(在脸部数量上)
//代码...
public void setFace() {
FaceDetector fd;
mFaceBitmap = mBitmap.copy(Bitmap.Config.RGB_565, true);
mFaceWidth = mFaceBitmap.getWidth();
mFaceHeight = mFaceBitmap.getHeight();
FaceDetector.Face [] faces = new FaceDetector.Face[MAX_FACES];
PointF eyescenter = new PointF();
float eyesdist = 0.0f;
int [] fpx = null;
int [] fpy = null;
try {
Log.e("setFace(): ", "try");
fd = new FaceDetector(mFaceWidth, mFaceHeight, MAX_FACES);
count = fd.findFaces(mFaceBitmap, faces);
} catch (Exception e) {
return;
}
// check if we detect any faces
if (count > 0) {
fpx = new int[count * 1];
fpy = new int[count * 1];
for (int i = 0; i < count; i++) {
try {
Log.e("setFace(): ", "loop");
faces[i].getMidPoint(eyescenter);
eyesdist = faces[i].eyesDistance();
// set up left eye location
fpx[1 * i] = (int)(eyescenter.x - eyesdist / 2);
fpy[1 * i] = (int)eyescenter.y;
// set up right eye location
fpx[1 * i + 1] = (int)(eyescenter.x + eyesdist / 2);
fpy[1 * i + 1] = (int)eyescenter.y;
} catch (Exception e) {
}
}
MainActivity.clicked = true;
}
}
答案 1 :(得分:0)
我认为这是可能的。您需要加载另一个级联(嵌套级联)并在面部级联检测到的ROI中使用detectMultiScale。看一下c样本中的facedetect.cpp示例。
答案 2 :(得分:0)
是的,你可以这样做。有一个generic眼睛探测器和特定的left / right眼睛探测器。
您可以使用与脸部相同的Haar级联代码,只需替换眼睛检测器。
在实践中,我发现最好同时尝试两种方法 - 从通用眼睛检测器开始,然后如果在合理位置找不到任何东西,则移动到左眼和右眼检测器。
答案 3 :(得分:0)
您可以浏览此代码
public FdView(Context context) {
super(context);
this.context = context;
try {
InputStream is = context.getResources().openRawResource(
R.raw.lbpcascade_frontalface);
File cascadeDir = context.getDir("cascade", Context.MODE_PRIVATE);
mCascadeFile = new File(cascadeDir, "lbpcascade_frontalface.xml");
FileOutputStream os = new FileOutputStream(mCascadeFile);
byte[] buffer = new byte[4096];
int bytesRead;
while ((bytesRead = is.read(buffer)) != -1) {
os.write(buffer, 0, bytesRead);
}
is.close();
os.close();
// ------------------------- load left eye classificator//
// -----------------------------------
InputStream iser = context.getResources().openRawResource(
R.raw.haarcascade_lefteye_2splits);
File cascadeDirER = context.getDir("cascadeER",
Context.MODE_PRIVATE);
File cascadeFileER = new File(cascadeDirER,
"haarcascade_eye_right.xml");
FileOutputStream oser = new FileOutputStream(cascadeFileER);
byte[] bufferER = new byte[4096];
int bytesReadER;
while ((bytesReadER = iser.read(bufferER)) != -1) {
oser.write(bufferER, 0, bytesReadER);
}
iser.close();
oser.close();
// ----------------------------------------------------------------------------------------------------
// --------------------------------- load right eye classificator//
// ------------------------------------
InputStream isel = context.getResources().openRawResource(
R.raw.haarcascade_lefteye_2splits);
File cascadeDirEL = context.getDir("cascadeEL",
Context.MODE_PRIVATE);
File cascadeFileEL = new File(cascadeDirEL,
"haarcascade_eye_left.xml");
FileOutputStream osel = new FileOutputStream(cascadeFileEL);
byte[] bufferEL = new byte[4096];
int bytesReadEL;
while ((bytesReadEL = isel.read(bufferEL)) != -1) {
osel.write(bufferEL, 0, bytesReadEL);
}
isel.close();
osel.close();
// ------------------------------------------------------------------------------------------------------
mJavaDetector = new CascadeClassifier(
mCascadeFile.getAbsolutePath());
mCascadeER = new CascadeClassifier(cascadeFileER.getAbsolutePath());
mCascadeEL = new CascadeClassifier(cascadeFileER.getAbsolutePath());
if (mJavaDetector.empty() || mCascadeER.empty()
|| mCascadeEL.empty()) {
Log.e(TAG, "Failed to load cascade classifier");
mJavaDetector = null;
mCascadeER = null;
mCascadeEL = null;
} else
Log.i(TAG,
"Loaded cascade classifier from "
+ mCascadeFile.getAbsolutePath());
mNativeDetector = new DetectionBasedTracker(
mCascadeFile.getAbsolutePath(), 0);
cascadeDir.delete();
cascadeFileER.delete();
cascadeDirER.delete();
cascadeFileEL.delete();
cascadeDirEL.delete();
} catch (IOException e) {
e.printStackTrace();
Log.e(TAG, "Failed to load cascade. Exception thrown: " + e);
}
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
synchronized (this) {
// initialize Mats before usage
mGray = new Mat();
mRgba = new Mat();
}
super.surfaceCreated(holder);
}
@Override
protected Bitmap processFrame(VideoCapture capture) {
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
if (mAbsoluteFaceSize == 0) {
int height = mGray.rows();
if (Math.round(height * mRelativeFaceSize) > 0)
{
mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
}
mNativeDetector.setMinFaceSize(mAbsoluteFaceSize);
}
MatOfRect faces = new MatOfRect();
if (mDetectorType == JAVA_DETECTOR) {
if (mJavaDetector != null)
mJavaDetector.detectMultiScale(mGray, faces, 1.1, 2, 2,
new Size(mAbsoluteFaceSize, mAbsoluteFaceSize),
new Size());
/*
* if (mZoomCorner == null || mZoomWindow == null)
* CreateAuxiliaryMats();
*/
Rect[] facesArray = faces.toArray();
for (int i = 0; i < facesArray.length; i++) {
Rect r = facesArray[i];
Core.rectangle(mGray, r.tl(), r.br(),new Scalar(255, 0, 255, 0), 3);
Core.rectangle(mRgba, r.tl(), r.br(),new Scalar(46, 139, 87, 5), 3);
//eyearea = new Rect(r.x + r.width / 8,(int) (r.y + (r.height / 4.5)), r.width - 10 * r.width/ 8, (int) (r.height / 3.0));
// Core.rectangle(mRgba, eyearea.tl(), eyearea.br(), new Scalar(255, 0, 0, 255), 3);
Rect eyearea_right = new Rect(r.x + r.width / 16,
(int) (r.y + (r.height / 4.5)),
(r.width - 2 * r.width / 16) / 2,(int) (r.height / 3.0));
Rect eyearea_left = new Rect(r.x + r.width / 16+ (r.width - 2 * r.width / 16) / 2,(int) (r.y + (r.height / 4.5)),(r.width - 2 * r.width / 16) / 2,(int) (r.height / 3.0));
// outer recTangles
//Core.rectangle(mRgba, eyearea_left.tl(), eyearea_left.br(),new Scalar(0, 255, 255, 255), 3);
//Core.rectangle(mRgba, eyearea_right.tl(), eyearea_right.br(),new Scalar(0, 255, 255, 255), 3);
teplateR = get_template(mCascadeER, eyearea_right, 3);
teplateL = get_template(mCascadeEL, eyearea_left, 3);
learn_frames++;
/*
* match_value = match_eye(eyearea_right, teplateR,
* FdActivity.method);
*
* match_value = match_eye(eyearea_left, teplateL,
* FdActivity.method); ; }
*/
/************************************************************************************************/
/*
* Imgproc.resize(mRgba.submat(eyearea_left), mZoomWindow2,
* mZoomWindow2.size());
* Imgproc.resize(mRgba.submat(eyearea_right), mZoomWindow,
* mZoomWindow.size());
*/
}
} else if (mDetectorType == NATIVE_DETECTOR) {
if (mNativeDetector != null)
mNativeDetector.detect(mGray, faces);
} else {
Log.e(TAG, "Detection method is not selected!");
}
Rect[] facesArray = faces.toArray();
for (int i = 0; i < facesArray.length; i++)
Core.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(),
FACE_RECT_COLOR, 3);
Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(),
Bitmap.Config.ARGB_8888);
try {
Utils.matToBitmap(mRgba, bmp);
} catch (Exception e) {
Log.e(TAG,
"Utils.matToBitmap() throws an exception: "
+ e.getMessage());
bmp.recycle();
bmp = null;
}
return bmp;
}
希望这会对你有所帮助。