对于一个专业项目,我需要从他的照片中发现一个人。所以我可以为每张脸都获得CIFaceFeature。
然后我会为标识符的每个面部获取唯一值。
我看了一下TrackingID,但是这个总是等于0。 我还对眼睛和嘴巴的位置进行了一些计算,但与iPhone相比,它减少了脸部的距离。
-(void)detectForFacesInUIImage:(UIImage *)facePicture
{
CIImage* image = [CIImage imageWithCGImage:facePicture.CGImage];
// CIDetector* detector = [CIDetector detectorOfType:CIDetectorTypeFace
// context:nil options:[NSDictionary dictionaryWithObject:CIDetectorAccuracyLow forKey:CIDetectorAccuracy]];
NSArray* features = [detector featuresInImage:image];
for(CIFaceFeature* faceObject in features)
{
CGRect modifiedFaceBounds = faceObject.bounds;
modifiedFaceBounds.origin.y = facePicture.size.height-faceObject.bounds.size.height-faceObject.bounds.origin.y;
CGPoint c = [faceObject leftEyePosition];
NSLog(@"left eye position %f %f = %f", c.x, c.y, c.y - c.x);
c = [faceObject rightEyePosition];
NSLog(@"right eye position %f %f = %f", c.x, c.y, c.y - c.x);
c = [faceObject mouthPosition];
NSLog(@"mouth eye position %f %f = %f", c.x, c.y, c.y - c.x);
CGRect d = [faceObject bounds];
NSLog(@"bounds eye position %f %f", d.size.width, d.size.height);
NSLog(@"trackID = %d", faceObject.trackingID);
NSLog(@"\n\n");
[self addSubViewWithFrame:modifiedFaceBounds];
if(faceObject.hasLeftEyePosition)
{
CGRect leftEye = CGRectMake(faceObject.leftEyePosition.x,(facePicture.size.height-faceObject.leftEyePosition.y), 10, 10);
[self addSubViewWithFrame:leftEye];
}
if(faceObject.hasRightEyePosition)
{
CGRect rightEye = CGRectMake(faceObject.rightEyePosition.x, (facePicture.size.height-faceObject.rightEyePosition.y), 10, 10);
[self addSubViewWithFrame:rightEye];
}
if(faceObject.hasMouthPosition)
{
CGRect mouth = CGRectMake(faceObject.mouthPosition.x,facePicture.size.height-faceObject.mouthPosition.y,10, 10);
[self addSubViewWithFrame:mouth];
}
}
}