我有一个AVCaptureSession的应用程序,可以正常使用以前的iOS版本,但后来我尝试在ios8设备上运行它,应用程序崩溃了。但问题没有解决。进入“[session addInput:input];”的异常。请告知如何解决。请在[session addInput:input];
中验证我的以下代码并获得错误打印错误说明:错误Domain = AVFoundationErrorDomain 代码= -11852“无法使用后置摄像头”UserInfo = 0x17c076e0 {NSLocalizedDescription =无法使用后置摄像头, AVErrorDeviceKey =, NSLocalizedFailureReason =此应用程序无权使用Back 相机。}
#import "CameraViewController.h"
#import "MAImagePickerControllerAdjustViewController.h"
#import "PopupViewController.h"
#import "MAImagePickerFinalViewController.h"
@implementation CameraViewController
@synthesize vImagePreview;
@synthesize vImage;
@synthesize stillImageOutput;
@synthesize lFrameCount;
@synthesize session;
@synthesize device;
@synthesize oneOff;
@synthesize captureManager = _captureManager;
@synthesize flashButton = _flashButton;
@synthesize vImage1;
@synthesize vImage2;
@synthesize vImage3;
@synthesize vImage4;
@synthesize vImage5;
@synthesize vImage6;
/////////////////////////////////////////////////////////////////////
#pragma mark - UI Actions
/////////////////////////////////////////////////////////////////////
-(IBAction) captureNow
{
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in stillImageOutput.connections)
{
for (AVCaptureInputPort *port in [connection inputPorts])
{
if ([[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
break;
}
}
if (videoConnection) { break; }
}
NSLog(@"about to request a capture from: %@", stillImageOutput);
[stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
CFDictionaryRef exifAttachments = CMGetAttachment( imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);
if (exifAttachments)
{
// Do something with the attachments.
NSLog(@"attachements: %@", exifAttachments);
}
else
NSLog(@"no attachments");
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
NSUserDefaults *standardUserDefaults = [NSUserDefaults standardUserDefaults];
NSString *val1 = nil;
if (standardUserDefaults)
{
val1 = [standardUserDefaults objectForKey:@"clickTypeTwo"];
}
if([val1 isEqualToString:@"cameraType"])
{
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
[session stopRunning];
});
FinalViewController *finalView;
if ([[UIScreen mainScreen] bounds].size.height == 568)
finalView = [[FinalViewController alloc] initWithNibName:IS_IPAD()?@"FinalViewController_iPad":@"FinalViewController" bundle:nil];
else
finalView =[[FinalViewController alloc] initWithNibName:IS_IPAD()?@"FinalViewController_iPad":@"FinalViewController" bundle:nil];
finalView.sourceImage = image;
//finalView.imageFrameEdited = YES;
CATransition* transition = [CATransition animation];
transition.duration = 0.4;
transition.type = kCATransitionFade;
transition.subtype = kCATransitionFromBottom;
[self.navigationController.view.layer addAnimation:transition forKey:kCATransition];
[self.navigationController pushViewController:finalView animated:NO];
}
else
{
[session stopRunning];
AdjustViewController *adjustViewController;
if ([[UIScreen mainScreen] bounds].size.height == 568)
adjustViewController = [[AdjustViewController alloc] initWithNibName:IS_IPAD()?@"AdjustViewController_iPad":@"AdjustViewController" bundle:nil];
else
adjustViewController =[[AdjustViewController alloc] initWithNibName:IS_IPAD()?@"AdjustViewController_iPad":@"AdjustViewController" bundle:nil];
adjustViewController.sourceImage = image;
CATransition* transition = [CATransition animation];
transition.duration = 0.4;
transition.type = kCATransitionFade;
transition.subtype = kCATransitionFromBottom;
[self.navigationController.view.layer addAnimation:transition forKey:kCATransition];
[self.navigationController pushViewController:adjustViewController animated:NO];
}
}];
}
-(void)cropImageViewControllerDidFinished:(UIImage *)image{
FinalViewController *finalView;
if ([[UIScreen mainScreen] bounds].size.height == 568)
finalView = [[MAImagePickerFinalViewController alloc] initWithNibName:IS_IPAD()?@"FinalViewController_iPad":@"FinalViewController" bundle:nil];
else
finalView =[[MAImagePickerFinalViewController alloc] initWithNibName:IS_IPAD()?@"FinalViewController_iPad":@"FinalViewController" bundle:nil];
finalView.sourceImage = image;
//finalView.imageFrameEdited = YES;
CATransition* transition = [CATransition animation];
transition.duration = 0.4;
transition.type = kCATransitionFade;
transition.subtype = kCATransitionFromBottom;
[self.navigationController.view.layer addAnimation:transition forKey:kCATransition];
[self.navigationController pushViewController:finalView animated:NO];
}
/////////////////////////////////////////////////////////////////////
#pragma mark - Video Frame Delegate
/////////////////////////////////////////////////////////////////////
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
//NSLog(@"got frame");
iFrameCount++;
// Update Display
// We are running the the context of the capture session. To update the UI in real time, We have to do this in the context of the main thread.
NSString * frameCountString = [[NSString alloc] initWithFormat:@"%4.4d", iFrameCount];
[lFrameCount performSelectorOnMainThread: @selector(setText:) withObject:frameCountString waitUntilDone:YES];
//NSLog(@"frame count %d", iFrameCount);
}
- (IBAction)showLeftSideBar
{
//[self dismissModalViewControllerAnimated:YES];
if ([[SidebarViewController share] respondsToSelector:@selector(showSideBarControllerWithDirection:)]) {
[[SidebarViewController share] showSideBarControllerWithDirection:SideBarShowDirectionLeft];
}
}
- (IBAction)showRightSideBar:(id)sender
{
}
- (IBAction)flipCamera:(id)sender
{
AVCaptureDevicePosition desiredPosition;
if (isUsingFrontFacingCamera)
desiredPosition = AVCaptureDevicePositionBack;
else
desiredPosition = AVCaptureDevicePositionFront;
for (AVCaptureDevice *d in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) {
if ([d position] == desiredPosition) {
[[self session] beginConfiguration];
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:d error:nil];
for (AVCaptureInput *oldInput in [[self session] inputs]) {
[[self session] removeInput:oldInput];
}
[[self session] addInput:input];
[[self session] commitConfiguration];
break;
}
}
isUsingFrontFacingCamera = !isUsingFrontFacingCamera;
}
BOOL isUsingFrontFacingCamera;
/////////////////////////////////////////////////////////////////////
#pragma mark - Guts
/////////////////////////////////////////////////////////////////////
- (void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
// Release any cached data, images, etc that aren't in use.
}
/////////////////////////////////////////////////////////////////////
#pragma mark - View lifecycle
/////////////////////////////////////////////////////////////////////
- (void)viewDidLoad
{
[super viewDidLoad];
}
- (void)viewDidUnload
{
[super viewDidUnload];
// Release any retained subviews of the main view.
// e.g. self.myOutlet = nil;
}
- (void)viewWillAppear:(BOOL)animated
{
[super viewWillAppear:animated];
}
- (void)viewDidAppear:(BOOL)animated
{
[super viewDidAppear:animated];
flashIsOn=YES;
/////////////////////////////////////////////////////////////////////////////
// Create a preview layer that has a capture session attached to it.
// Stick this preview layer into our UIView.
/////////////////////////////////////////////////////////////////////////////
session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPreset640x480;
CALayer *viewLayer = self.vImagePreview.layer;
NSLog(@"viewLayer = %@", viewLayer);
// viewLayer.frame = CGRectMake(-70, 150, 480, 336);
// UIGraphicsBeginImageContextWithOptions(CGSizeMake(400, 400), NO, 1);
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
CGRect bounds=vImagePreview.layer.bounds;
captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
captureVideoPreviewLayer.bounds=bounds;
captureVideoPreviewLayer.position=CGPointMake(CGRectGetMidX(bounds), CGRectGetMidY(bounds));
captureVideoPreviewLayer.frame = self.vImagePreview.bounds;
[self.vImagePreview.layer addSublayer:captureVideoPreviewLayer];
//[self addVideoInputFrontCamera:YES]; // set to YES for Front Camera, No for Back camera
device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
// Handle the error appropriately.
NSLog(@"ERROR: trying to open camera: %@", error);
}
[session addInput:input];
/////////////////////////////////////////////////////////////
// OUTPUT #1: Still Image
/////////////////////////////////////////////////////////////
// Add an output object to our session so we can get a still image
// We retain a handle to the still image output and use this when we capture an image.
stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil];
[stillImageOutput setOutputSettings:outputSettings];
[session addOutput:stillImageOutput];
/////////////////////////////////////////////////////////////
// OUTPUT #2: Video Frames
/////////////////////////////////////////////////////////////
// Create Video Frame Outlet that will send each frame to our delegate
AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc] init];
captureOutput.alwaysDiscardsLateVideoFrames = YES;
//captureOutput.minFrameDuration = CMTimeMake(1, 3); // deprecated in IOS5
// We need to create a queue to funnel the frames to our delegate
dispatch_queue_t queue;
queue = dispatch_queue_create("cameraQueue", NULL);
[captureOutput setSampleBufferDelegate:self queue:queue];
dispatch_release(queue);
// Set the video output to store frame in BGRA (It is supposed to be faster)
NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
// let's try some different keys,
NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
[captureOutput setVideoSettings:videoSettings];
[session addOutput:captureOutput];
/////////////////////////////////////////////////////////////
// start the capture session
[session startRunning];
/////////////////////////////////////////////////////////////////////////////
// initialize frame counter
iFrameCount = 0;
}
- (void)viewWillDisappear:(BOOL)animated
{
[super viewWillDisappear:animated];
}
- (void)viewDidDisappear:(BOOL)animated
{
[super viewDidDisappear:animated];
[session stopRunning];
}
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation
{
// Return YES for supported orientations
if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPhone) {
return (interfaceOrientation != UIInterfaceOrientationPortraitUpsideDown);
} else {
return YES;
}
}
- (IBAction)cancelButton:(id)sender{
}
- (IBAction)flashOn:(id)sender{
Class captureDeviceClass = NSClassFromString(@"AVCaptureDevice");
if (captureDeviceClass != nil) {
if ([device hasTorch] && [device hasFlash]){
[device lockForConfiguration:nil];
if (flashIsOn) {
[device setTorchMode:AVCaptureTorchModeOn];
[device setFlashMode:AVCaptureFlashModeOn];
oneOff.text=@"On";
[_flashButton setImage:[UIImage imageNamed:@"flash-on-button"]];
_flashButton.accessibilityLabel = @"Disable Camera Flash";
flashIsOn = NO; //define as a variable/property if you need to know status
} else {
[_flashButton setImage:[UIImage imageNamed:@"flash-off-button"]];
_flashButton.accessibilityLabel = @"Enable Camera Flash";
oneOff.text=@"Off";
[device setTorchMode:AVCaptureTorchModeOff];
[device setFlashMode:AVCaptureFlashModeOff];
flashIsOn = YES;
}
[device unlockForConfiguration];
}
}
}
- (void)dealloc {
[[self session] stopRunning];
[super dealloc];
}
- (void)storeFlashSettingWithBool:(BOOL)flashSetting
{
[[NSUserDefaults standardUserDefaults] setBool:flashSetting forKey:kCameraFlashDefaultsKey];
[[NSUserDefaults standardUserDefaults] synchronize];
}
@end
答案 0 :(得分:10)
请检查您的设备设置。 转到隐私--->相机--->检查您的应用的设置----->打开它
运行该应用。有用。 干杯
答案 1 :(得分:3)
更改 dealloc方法
[self.captureSession removeInput:self.videoInput];
[self.captureSession removeOutput:self.videoOutput];
self.captureSession = nil;
self.videoOutput = nil;
self.videoInput = nil;
答案 2 :(得分:1)
今天我们遇到了一个问题,主要是从iOS 8.0.2及更高版本开始访问相机需要隐私设置到相机而不是相机胶卷,一旦启用该代码然后工作。
答案 3 :(得分:1)
今天在我的应用程序中看到同样的错误,我正在使用包含应用程序隐私设置的“设置”按钮快捷方式的警报来处理它。
do {
let captureInput:AVCaptureDeviceInput = try AVCaptureDeviceInput(device: self.device)
...
} catch let error as NSError {
let alert = UIAlertController(title:error.localizedDescription, message:error.localizedFailureReason, preferredStyle:.Alert)
let settingsAction = UIAlertAction(title: "Settings", style: .Default) { (action) in
UIApplication.sharedApplication().openURL(NSURL(string:UIApplicationOpenSettingsURLString)!)
}
alert.addAction(settingsAction)
self.presentViewController(alert,animated:true,completion:nil)
}