我多次使用CIDetector:
-(NSArray *)detect:(UIImage *)inimage
{
UIImage *inputimage = inimage;
UIImageOrientation exifOrientation = inimage.imageOrientation;
NSNumber *orientation = [NSNumber numberWithInt:exifOrientation];
NSDictionary *imageOptions = [NSDictionary dictionaryWithObject:orientation forKey:CIDetectorImageOrientation];
CIImage* ciimage = [CIImage imageWithCGImage:inputimage.CGImage options:imageOptions];
NSDictionary *detectorOptions = [NSDictionary dictionaryWithObject:orientation forKey:CIDetectorImageOrientation];
NSArray* features = [self.detector featuresInImage:ciimage options:detectorOptions];
if (features.count == 0)
{
PXLog(@"no face found");
}
ciimage = nil;
NSMutableArray *returnArray = [NSMutableArray new];
for(CIFaceFeature *feature in features)
{
CGRect rect = feature.bounds;
CGRect r = CGRectMake(rect.origin.x,inputimage.size.height - rect.origin.y - rect.size.height,rect.size.width,rect.size.height);
FaceFeatures * ff = [[FaceFeatures new] initWithLeftEye:CGPointMake(feature.leftEyePosition.x, inputimage.size.height - feature.leftEyePosition.y )
rightEye:CGPointMake(feature.rightEyePosition.x, inputimage.size.height - feature.rightEyePosition.y )
mouth:CGPointMake(feature.mouthPosition.x, inputimage.size.height - feature.mouthPosition.y )];
Face *ob = [[Face new] initFaceInRect:r withFaceFeatures:ff] ;
[returnArray addObject:ob];
}
features = nil;
return returnArray;
}
-(CIContext*) context{
if(!_context){
_context = [CIContext contextWithOptions:nil];
}
return _context;
}
-(CIDetector *)detector
{
if (!_detector)
{
// 1 for high 0 for low
#warning not checking for fast/slow detection operation
NSString *str = @"fast";//[SettingsFunctions retrieveFromUserDefaults:@"face_detection_accuracy"];
if ([str isEqualToString:@"slow"])
{
//DDLogInfo(@"faceDetection: -I- Setting accuracy to high");
_detector = [CIDetector detectorOfType:CIDetectorTypeFace context:nil
options:[NSDictionary dictionaryWithObject:CIDetectorAccuracyHigh forKey:CIDetectorAccuracy]];
} else {
//DDLogInfo(@"faceDetection: -I- Setting accuracy to low");
_detector = [CIDetector detectorOfType:CIDetectorTypeFace context:nil
options:[NSDictionary dictionaryWithObject:CIDetectorAccuracyLow forKey:CIDetectorAccuracy]];
}
}
return _detector;
}
但是在遇到各种内存问题后,根据“工具”看起来NSArray* features = [self.detector featuresInImage:ciimage options:detectorOptions];
似乎没有被释放
我的代码中是否有内存泄漏?
答案 0 :(得分:4)
我遇到了同样的问题,它似乎是一个错误(或者可能是设计,用于缓存目的)重用CIDetector。
我能够通过不重复使用CIDetector来绕过它,而是在检测完成时根据需要实例化然后释放它(或者,在ARC术语中,只是不保持参考)。这样做有一些成本,但是如果你正如你所说的那样在后台线程上进行检测,那么与无限内存增长相比,这个成本可能是值得的。
也许更好的解决方案是,如果您连续检测多个图像,创建一个检测器,将其用于所有(或者,如果增长太大,则释放并且每N个图像创建一个新图像你必须试验看N应该是什么。
我已经向Apple提交了有关此问题的Radar错误:http://openradar.appspot.com/radar?id=6645353252126720
答案 1 :(得分:1)
我已经解决了这个问题,你应该使用@autorelease来调用detect方法,就像在swift中这样
TouchAction circleTouch = new TouchAction((MobileDriver)driver);
circleTouch.tap(continuebutton);
circleTouch.perform();