这行代码使用Quartz框架中的CGBitmapContextCreateImage,会产生一个主要的内存泄漏:
CGImageRef imgRef = CGBitmapContextCreateImage(bitmapContext);
是否有替代方法或代码替换它。
整个方法如下:
- (void)captureImageWithCompletionHander:(void(^)(NSString *imageFilePath))completionHandler
{
dispatch_suspend(_captureQueue);
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in self.stillImageOutput.connections)
{
for (AVCaptureInputPort *port in [connection inputPorts])
{
if ([[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
break;
}
}
if (videoConnection) break;
}
__weak typeof(self) weakSelf = self;
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
if (error)
{
dispatch_resume(_captureQueue);
return;
}
__block NSString *filePath = [NSTemporaryDirectory() stringByAppendingPathComponent:@"Image 1"];
@autoreleasepool
{
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
CIImage *ciImage = [[CIImage alloc] initWithData:imageData options:@{kCIImageColorSpace:[NSNull null]}];
enhancedImage = [self filteredImageUsingContrastFilterOnImage:ciImage];
if (weakSelf.isBorderDetectionEnabled && rectangleDetectionConfidenceHighEnough(_imageDedectionConfidence))
{
CIRectangleFeature *rectangleFeature = [self biggestRectangleInRectangles:[[self highAccuracyRectangleDetector] featuresInImage:enhancedImage]];
if (rectangleFeature)
{
[self correctPerspectiveForImageWithFeatures:rectangleFeature];
}
}
CIFilter *transform = [CIFilter filterWithName:@"CIAffineTransform"];
[transform setValue:enhancedImage forKey:kCIInputImageKey];
NSValue *rotation = [NSValue valueWithCGAffineTransform:CGAffineTransformMakeRotation(-90 * (M_PI/180))];
[transform setValue:rotation forKey:@"inputTransform"];
enhancedImage = [transform outputImage];
if (!enhancedImage || CGRectIsEmpty(enhancedImage.extent)) return;
static CIContext *ctx = nil;
if (!ctx)
{
ctx = [CIContext contextWithOptions:@{kCIContextWorkingColorSpace:[NSNull null]}];
}
CGSize bounds = enhancedImage.extent.size;
bounds = CGSizeMake(floorf(bounds.width / 4) * 4,floorf(bounds.height / 4) * 4);
CGRect extent = CGRectMake(enhancedImage.extent.origin.x, enhancedImage.extent.origin.y, bounds.width, bounds.height);
static int bytesPerPixel = 8;
uint rowBytes = bytesPerPixel * bounds.width;
uint totalBytes = rowBytes * bounds.height;
uint8_t *byteBuffer = (uint8_t *)malloc(totalBytes);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
[ctx render:enhancedImage toBitmap:byteBuffer rowBytes:rowBytes bounds:extent format:kCIFormatRGBA8 colorSpace:colorSpace];
CGContextRef bitmapContext = CGBitmapContextCreate(byteBuffer,bounds.width,bounds.height,bytesPerPixel,rowBytes,colorSpace,kCGImageAlphaNoneSkipLast);
free(byteBuffer);
CGImageRef imgRef = CGBitmapContextCreateImage(bitmapContext);
CGContextRelease(bitmapContext);
CGColorSpaceRelease(colorSpace);
saveCGImageAsJPEGToFilePath(imgRef, filePath);
CFRelease(imgRef);
dispatch_async(dispatch_get_main_queue(), ^
{
completionHandler(filePath);
dispatch_resume(_captureQueue);
});
_imageDedectionConfidence = 0.0f;
}
}];
}
答案 0 :(得分:1)
您使用CGBitmapContextCreateImage()创建CGImage,但尚未发布该CGImage。
一旦你获得了UIImage发布的CGImageRef,它将帮助你
从
获取图片CGImageRef imgRef = CGBitmapContextCreateImage(bmContext);
image = [UIImage imageWithCGImage:imgRef];
CGImageRelease(imgRef);
答案 1 :(得分:0)
我通过使代码更加精简并删除一些不必要的部分来修复问题,包括上面泄漏的代码行。
- (void)captureImageWithCompletionHander:(void(^)(NSString *imageFilePath))completionHandler
{
dispatch_suspend(_captureQueue);
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in self.stillImageOutput.connections)
{
for (AVCaptureInputPort *port in [connection inputPorts])
{
if ([[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
break;
}
}
if (videoConnection) break;
}
__weak typeof(self) weakSelf = self;
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
if (error)
{
dispatch_resume(_captureQueue);
return;
}
__block NSString *filePath = [NSTemporaryDirectory() stringByAppendingPathComponent:@"Image 1"];
@autoreleasepool
{
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage *img = [UIImage imageWithData:imageData];imageData=nil;
CGSize newSize = CGSizeMake(img.size.width, img.size.height);
UIGraphicsBeginImageContext( newSize );
[img drawInRect:CGRectMake(0,0,newSize.width,newSize.height)];
UIImage* newImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
CIImage *ciImage = [[CIImage alloc]initWithImage:newImage];
enhancedImage = [self filteredImageUsingContrastFilterOnImage:ciImage];
if (weakSelf.isBorderDetectionEnabled && rectangleDetectionConfidenceHighEnough(_imageDedectionConfidence))
{
CIRectangleFeature *rectangleFeature = [self biggestRectangleInRectangles:[[self highAccuracyRectangleDetector] featuresInImage:enhancedImage]];
if (rectangleFeature)
{
enhancedImage = [self correctPerspectiveForImage:enhancedImage withFeatures:rectangleFeature];
}
}
if (!enhancedImage || CGRectIsEmpty(enhancedImage.extent)) return;
CGRect rect;
rect = [enhancedImage extent];
if (!ctx)
{
ctx = [CIContext contextWithOptions:@{kCIContextWorkingColorSpace:[NSNull null]}];
}
CGImageRef imgRef = [ctx createCGImage:enhancedImage fromRect:rect];
saveCGImageAsJPEGToFilePath(imgRef, filePath);
CFRelease(imgRef);
dispatch_async(dispatch_get_main_queue(), ^
{
completionHandler(filePath);
dispatch_resume(_captureQueue);
});
_imageDedectionConfidence = 0.0f;
}
}];
}