这是Swift语言版本。(来源地址:Swift Version.)
这是我的密码。任何人都可以帮我找到它没有用的原因吗?
#import "ViewController.h"
#import <AVFoundation/AVFoundation.h>
@interface ViewController ()<AVCaptureVideoDataOutputSampleBufferDelegate>{
AVCaptureSession *captureSession;
AVCaptureDevice *captureDevice;
AVCaptureVideoPreviewLayer *previewLayer;
UIImage *resultImage;
BOOL isStart;
}
@end
@implementation ViewController
- (void)viewDidLoad {
[super viewDidLoad];
isStart = NO;
[self isStartTrue];
captureSession = [[AVCaptureSession alloc]init];
captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
captureSession.sessionPreset = AVCaptureSessionPresetLow;
NSArray *devices = [[NSArray alloc]init];
devices = [AVCaptureDevice devices];
for (AVCaptureDevice *device in devices) {
if ([device hasMediaType:AVMediaTypeVideo]) {
if (device.position == AVCaptureDevicePositionFront) {
captureDevice = device;
if (captureDevice != nil) {
NSLog(@"Capture Device found");
[self beginSession];
}
}
}
}
// Do any additional setup after loading the view, typically from a nib.
}
-(void) isStartTrue {
isStart = YES;
}
-(void)beginSession {
AVCaptureDeviceInput *captureDeviceInput = [[AVCaptureDeviceInput alloc]initWithDevice:captureDevice error:nil];
[captureSession addInput:captureDeviceInput];
AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc]init];
dispatch_queue_t cameraQueue;
cameraQueue = dispatch_queue_create("cameraQueue", DISPATCH_QUEUE_SERIAL);
[output setSampleBufferDelegate:self queue:cameraQueue];
NSDictionary *videoSettings = [[NSDictionary alloc] initWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA],kCVPixelBufferPixelFormatTypeKey, nil];
output.videoSettings = videoSettings;
[captureSession addOutput:output];
previewLayer = [[AVCaptureVideoPreviewLayer alloc]initWithSession:captureSession];
previewLayer.videoGravity = @"AVLayerVideoGravityResizeAspect";
previewLayer.frame = self.view.bounds;
[self.view.layer addSublayer:previewLayer];
[captureSession startRunning];
}
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
if (isStart) {
resultImage = [[UIImage alloc] init];
resultImage = [self sampleBufferToImage:sampleBuffer];
CIContext *context = [CIContext contextWithOptions:[NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:kCIContextUseSoftwareRenderer]];
CIDetector *detector = [CIDetector detectorOfType:CIDetectorTypeFace context:context options:[NSDictionary dictionaryWithObject:CIDetectorAccuracyHigh forKey:CIDetectorAccuracy]];
CIImage *ciImage = [[CIImage alloc]init];
ciImage = [CIImage imageWithCGImage:resultImage.CGImage];
dispatch_async(dispatch_get_main_queue(), ^{
previewIV.image = resultImage;
});
NSArray *results = [detector featuresInImage:ciImage options:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:6] forKey:CIDetectorImageOrientation]];
for (CIFaceFeature *face in results) {
UIImage *faceImage = [UIImage imageWithCGImage:[context createCGImage:ciImage fromRect:face.bounds] scale:1.0 orientation:UIImageOrientationRight];
NSLog(@" ====%@", NSStringFromCGRect(face.bounds));
}
}
}
-(UIImage *)sampleBufferToImage:(CMSampleBufferRef)sampleBuffer{
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
void * baseAddress = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
int bitsPerCompornent = 8;
CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, bitsPerCompornent, bytesPerRow, colorSpace, (kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst));
CGImageRef imageRef = CGBitmapContextCreateImage(context);
UIImage *result = [[UIImage alloc]initWithCGImage:imageRef scale:1.0 orientation:UIImageOrientationRight];
return result;
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
[captureSession stopRunning];
// Dispose of any resources that can be recreated.
}
@end
这是此检测视图控制器的完整代码。
您可以复制它并导入AVdoundation.framework和coremedia.framework。
答案 0 :(得分:1)
如果你看一下你的预览IV,你会发现它是一张空图像。
所以我将你的sampleBufferToImage方法更改为打击,它可以正常工作。
-(UIImage *)sampleBufferToImage:(CMSampleBufferRef)sampleBuffer{
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CIImage *ciImage = [CIImage imageWithCVPixelBuffer:imageBuffer];
CIContext *temporaryContext = [CIContext contextWithOptions:nil];
CGImageRef videoImage = [temporaryContext
createCGImage:ciImage
fromRect:CGRectMake(0, 0,
CVPixelBufferGetWidth(imageBuffer),
CVPixelBufferGetHeight(imageBuffer))];
UIImage *result = [[UIImage alloc] initWithCGImage:videoImage];
CGImageRelease(videoImage);
return result;
}