我正在通过相机使用AVFoundation捕获图像,然后通过segue将其显示到另一个具有UIImageView
的viewController。图像大小为1936 x 2592,屏幕在viewWillAppear
被调用后用图像更新需要5.5秒。
这是正常的,仅仅是因为图像的大小?这是在运行iOS6的iPhone4上。这种延迟是不可接受的,所以我必须找到一个解决方案,可能首先尝试缩小图像,但我想在跳过篮球之前我会问。
更新
将图像尺寸缩小到484 x 648甚至更长:5.6秒。评论中有一个我正在使用的捕获代码的请求。它位于这篇文章的最底层。此外,我只捕获一个图像,然后显示它。
//控制台输出:
有图像
2013-03-03 11:37:16.741 RPFaceCamera [4867:907] viewWillAppear - 所用时间:0.000215>
2013-03-03 11:37:22.249 RPFaceCamera [4867:907] viewDidAppear - 所用时间:5.507458>
//代码
- (void)viewWillAppear:(BOOL)animated
{
[super viewWillAppear:animated];
if (self.faceImage != nil){
start = [NSDate date];
printf("have image \n");
self.faceImageView.image = self.faceImage;
NSLog(@"viewWillAppear - time taken: %f", -[start timeIntervalSinceNow]);
}
}
- (void)viewDidAppear:(BOOL)animated
{
[super viewDidAppear:animated];
NSLog(@"viewDidAppear - time taken: %f", -[start timeIntervalSinceNow]);
}
//捕获代码 - (我想我从Apple的AVDemo中抄袭了这个)。 Objective-C的格式在可读性方面有很多不足之处。
- (void)captureImage
{
AVCaptureConnection *connection;
connection = [stillImageOutput connectionWithMediaType:AVMediaTypeVideo];
[stillImageOutput setOutputSettings:[NSDictionary
dictionaryWithObject:[NSNumber numberWithInt:kCMPixelFormat_32BGRA]
forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
[stillImageOutput captureStillImageAsynchronouslyFromConnection:connection
completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
if(error)
NSLog(@"captureImage failed");
else{
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(imageDataSampleBuffer);
CFDictionaryRef attachments = CMCopyDictionaryOfAttachments(kCFAllocatorDefault,
imageDataSampleBuffer,
kCMAttachmentMode_ShouldPropagate);
CIImage *ciImage =
[[CIImage alloc] initWithCVPixelBuffer:pixelBuffer
options:(__bridge NSDictionary *)attachments];
NSNumber *orientation = (__bridge NSNumber *)(CMGetAttachment(imageDataSampleBuffer,
kCGImagePropertyOrientation, NULL));
printf("original orientation %d\n", [orientation intValue]);
self.faceImage = [UIImage imageWithCIImage:ciImage scale:1.0f orientation:[orientation intValue]];
printf("self.faceImage %d\n", self.faceImage.imageOrientation);
printf("self.faceImage width: %f height: %f\n", self.faceImage.size.width, self.faceImage.size.height);
__weak __typeof__(self) weakSelf = self;
[weakSelf performSelectorOnMainThread:@selector(showFaceView) withObject:nil waitUntilDone:NO];
}
}
];
}
//捕获完成时在主线程上调用的方法 - (void)showFaceView { 的NSLog(@ “showfaceView”);
[self destroyAVCapture];
[self.delegate dismissCameraWithImage:self.faceImage];
}