我想实时捕捉覆盖图像,就像多维数据集狗的工作原理一样 - 完成代码,请点击此处http://www.musicalgeometry.com/?p=1681
我知道如何在预览图层中叠加图像并捕获图像,我查看了Apple示例代码,如果在相机胶卷中检测到面部,则会保存一个红色方框。
修改
我想将其保存在1920 X 1080的后置摄像头和1280 X 960上,下面的代码可以实时保存叠加和图像,但是对齐关闭了,我不知道为什么有人可以帮忙吗?
欢呼声
这是预览图层
这是在捕获之后
- (id)init {
if ((self = [super init])) {
[self setCaptureSession:[[AVCaptureSession alloc] init]];
[self.captureSession setSessionPreset:AVCaptureSessionPresetHigh];
}
NSLog(@"init called");
return self;
}
-(void)takePictureWithOverlay:(UIImage*)overlay andRect:(CGRect)overlayRect
{
// Find out the current orientation and tell the still image output.
AVCaptureConnection *stillImageConnection = [self.stillImageOutput connectionWithMediaType:AVMediaTypeVideo];
//UIDeviceOrientation curDeviceOrientation = [[UIDevice currentDevice] orientation];
// AVCaptureVideoOrientation avcaptureOrientation = [self avOrientationForDeviceOrientation:curDeviceOrientation];
[stillImageConnection setVideoOrientation:AVCaptureVideoOrientationPortrait];
[stillImageConnection setVideoScaleAndCropFactor:self.effectiveScale];
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:stillImageConnection
completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
if (error) {
[self displayErrorOnMainQueue:error withMessage:@"Take picture failed"];
}
else {
// trivial simple JPEG case
NSData *jpegData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:jpegData];
/////
CGSize imageSize = [image size];
CGSize overlaySize = [overlay size];
UIGraphicsBeginImageContext(imageSize);
[image drawInRect:CGRectMake(0, 0, imageSize.width, imageSize.height)];
NSLog(@"aaa %f", [UIScreen mainScreen].applicationFrame.size.width);
NSLog(@"aaa %f", [UIScreen mainScreen].applicationFrame.size.height);
NSLog(@"aaa %f", [[UIScreen mainScreen] bounds].size.height);
CGFloat xScaleFactor = imageSize.width / 320;//320;
CGFloat yScaleFactor = imageSize.height / 568;//480;//568;
NSLog(@"xScaleFactor size %F",xScaleFactor);
NSLog(@"yScaleFactor size %F",yScaleFactor);
//144 for 568
[overlay drawInRect:CGRectMake(overlayRect.origin.x * xScaleFactor, overlayRect.origin.y*yScaleFactor
, overlaySize.width * xScaleFactor, overlaySize.height * yScaleFactor)]; // rect used in AROverlayViewController was (30,100,260,200)
UIImage *combinedImage = UIGraphicsGetImageFromCurrentImageContext();
[self setStillImage:combinedImage];
UIGraphicsEndImageContext();
/////
}
[[NSNotificationCenter defaultCenter] postNotificationName:kImageCapturedSuccessfully object:nil];
}];
}
答案 0 :(得分:1)
从这里找到我的答案。 http://developer.apple.com/library/ios/#qa/qa1714/_index.html
// Render the UIView into the CGContextRef using the
// CALayer/-renderInContext: method
- (void)renderView:(UIView*)view inContext:(CGContextRef)context
{
// -renderInContext: renders in the coordinate space of the layer,
// so we must first apply the layer's geometry to the graphics context
CGContextSaveGState(context);
// Center the context around the window's anchor point
CGContextTranslateCTM(context, [view center].x, [view center].y);
// Apply the window's transform about the anchor point
CGContextConcatCTM(context, [view transform]);
// Offset by the portion of the bounds left of and above the anchor point
CGContextTranslateCTM(context,
-[view bounds].size.width * [[view layer] anchorPoint].x,
-[view bounds].size.height * [[view layer] anchorPoint].y);
// Render the layer hierarchy to the current context
[[view layer] renderInContext:context];
// Restore the context
CGContextRestoreGState(context);
}
-(void)takePictureWithOverlay:(UIView *)overlay andRect:(CGRect)overlayRect
{
// Find out the current orientation and tell the still image output.
self.videoConnection = [self.stillImageOutput connectionWithMediaType:AVMediaTypeVideo];
//UIDeviceOrientation curDeviceOrientation = [[UIDevice currentDevice] orientation];
// AVCaptureVideoOrientation avcaptureOrientation = [self avOrientationForDeviceOrientation:curDeviceOrientation];
[self.videoConnection setVideoOrientation:AVCaptureVideoOrientationPortrait];
[self.videoConnection setVideoScaleAndCropFactor:self.effectiveScale];
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:self.videoConnection
completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
if (error) {
[self displayErrorOnMainQueue:error withMessage:@"Take picture failed"];
}
else {
// trivial simple JPEG case
NSData *jpegData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:jpegData];
NSLog(@"cgsize of image %@", NSStringFromCGSize(image.size));
CGSize imageSize = [[UIScreen mainScreen] bounds].size;
NSLog(@"cgsize %@", NSStringFromCGSize(imageSize));
UIGraphicsBeginImageContextWithOptions(imageSize, NO, 0);
CGContextRef context = UIGraphicsGetCurrentContext();
// Draw the image returned by the camera sample buffer into the context.
// Draw it into the same sized rectangle as the view that is displayed on the screen.
float menubarUIOffset = 200.0;
UIGraphicsPushContext(context);
[image drawInRect:CGRectMake(0, 0, imageSize.width, imageSize.height)];
UIGraphicsPopContext();
// Render the camera overlay view into the graphic context that we created above.
[self renderView:overlay inContext:context];
//Retrieve the screenshot image containing both the camera content and the overlay view
UIImage *screenshot = UIGraphicsGetImageFromCurrentImageContext();
[self setStillImage:screenshot];
UIGraphicsEndImageContext();
/////
}
[[NSNotificationCenter defaultCenter] postNotificationName:kImageCapturedSuccessfully object:nil];
}];
}