我正在做一个我开始使用AVCaptureSession的应用程序。我打开了前置摄像头。现在我想在avcapturesession上应用亮度。
AVCaptureSession *session = [AVCaptureSession new];
if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPhone)
[session setSessionPreset:AVCaptureSessionPresetHigh];
else
[session setSessionPreset:AVCaptureSessionPresetPhoto];
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
require( error == nil, bail );
{
if ( [session canAddInput:deviceInput] )
[session addInput:deviceInput];
stillImageOutput = [AVCaptureStillImageOutput new];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[stillImageOutput setOutputSettings:outputSettings];
[stillImageOutput addObserver:self forKeyPath:@"capturingStillImage" options:NSKeyValueObservingOptionNew context:(__bridge void *)(AVCaptureStillImageIsCapturingStillImageContext)];
if ( [session canAddOutput:stillImageOutput] )
[session addOutput:stillImageOutput];
[[stillImageOutput connectionWithMediaType:AVMediaTypeVideo] setEnabled:YES];
videoDataOutput = [AVCaptureVideoDataOutput new];
NSDictionary *rgbOutputSettings = [NSDictionary dictionaryWithObject:
[NSNumber numberWithInt:kCMPixelFormat_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
[videoDataOutput setVideoSettings:rgbOutputSettings];
[videoDataOutput setAlwaysDiscardsLateVideoFrames:YES];
videoDataOutputQueue = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL);
[videoDataOutput setSampleBufferDelegate:self queue:videoDataOutputQueue];
if ( [session canAddOutput:videoDataOutput] )
[session addOutput:videoDataOutput];
[[videoDataOutput connectionWithMediaType:AVMediaTypeVideo] setEnabled:YES];
effectiveScale = 1.0;
chromeLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
[chromeLayer setBackgroundColor:[[UIColor blackColor] CGColor]];
[chromeLayer setVideoGravity:AVLayerVideoGravityResizeAspect];
CALayer *rootLayer = [chromeView layer];
[rootLayer setMasksToBounds:YES];
[chromeLayer setFrame:CGRectMake(self.view.frame.origin.x, self.view.frame.origin.y, self.view.frame.size.width, self.view.frame.size.height)];
[rootLayer addSublayer:chromeLayer];
[session startRunning];
我已经实现了与avcapturesession相关的委托和数据源。欢迎任何帮助!!!