我想在我的View Controller中简单地放置一个Camera View。
我在顶部导入了AVFoundation
,以及UIImagePickerControllerDelegate
和UINavigationControllerDelegate
类。
但是,每当我尝试使用AVCaptureStillImageOutput
时,Xcode都会告诉我它已在iOS10中弃用,我应该使用AVCapturePhotoOutput
。这完全没问题,但是,只要我想呼叫stillImageOutput.outputSettings
,.outputSettings
本身就不可用。因此,我必须使用AVAVCaptureStillImageOutput
才能使用它,但我有多个警告,因为在iOS10中不推荐使用此功能。
我进行了搜索和搜索,但无法真正找到解决方案。我将衷心感谢您的帮助。我正在学习,所以任何解释都会很棒!代码如下。
import UIKit
import AVFoundation
class CameraView: UIViewController, UIImagePickerControllerDelegate, UINavigationControllerDelegate {
var captureSession : AVCaptureSession?
var stillImageOutput : AVCaptureStillImageOutput?
var previewLayer : AVCaptureVideoPreviewLayer?
@IBOutlet var cameraView: UIView!
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
captureSession = AVCaptureSession()
captureSession?.sessionPreset = AVCaptureSessionPreset1920x1080
var backCamera = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
var error : NSError?
do {
var input = try! AVCaptureDeviceInput (device: backCamera)
if (error == nil && captureSession?.canAddInput(input) != nil) {
captureSession?.addInput(input)
stillImageOutput = AVCaptureStillImageOutput()
stillImageOutput?.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
if (captureSession?.canAddOutput(stillImageOutput) != nil) {
captureSession?.addOutput(stillImageOutput)
previewLayer = AVCaptureVideoPreviewLayer (session: captureSession)
previewLayer?.videoGravity = AVLayerVideoGravityResizeAspect
previewLayer?.connection.videoOrientation = AVCaptureVideoOrientation.portrait
cameraView.layer.addSublayer(previewLayer!)
captureSession?.startRunning()
}
}
} catch {
}
}
}
答案 0 :(得分:11)
我的完整实施
import UIKit
import AVFoundation
class ViewController: UIViewController, AVCapturePhotoCaptureDelegate {
var captureSesssion : AVCaptureSession!
var cameraOutput : AVCapturePhotoOutput!
var previewLayer : AVCaptureVideoPreviewLayer!
@IBOutlet weak var capturedImage: UIImageView!
@IBOutlet weak var previewView: UIView!
override func viewDidLoad() {
super.viewDidLoad()
captureSesssion = AVCaptureSession()
captureSesssion.sessionPreset = AVCaptureSessionPresetPhoto
cameraOutput = AVCapturePhotoOutput()
let device = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
if let input = try? AVCaptureDeviceInput(device: device) {
if captureSesssion.canAddInput(input) {
captureSesssion.addInput(input)
if captureSesssion.canAddOutput(cameraOutput) {
captureSesssion.addOutput(cameraOutput)
previewLayer = AVCaptureVideoPreviewLayer(session: captureSesssion)
previewLayer.frame = previewView.bounds
previewView.layer.addSublayer(previewLayer)
captureSesssion.startRunning()
}
} else {
print("issue here : captureSesssion.canAddInput")
}
} else {
print("some problem here")
}
}
// Take picture button
@IBAction func didPressTakePhoto(_ sender: UIButton) {
let settings = AVCapturePhotoSettings()
let previewPixelType = settings.availablePreviewPhotoPixelFormatTypes.first!
let previewFormat = [
kCVPixelBufferPixelFormatTypeKey as String: previewPixelType,
kCVPixelBufferWidthKey as String: 160,
kCVPixelBufferHeightKey as String: 160
]
settings.previewPhotoFormat = previewFormat
cameraOutput.capturePhoto(with: settings, delegate: self)
}
// callBack from take picture
func capture(_ captureOutput: AVCapturePhotoOutput, didFinishProcessingPhotoSampleBuffer photoSampleBuffer: CMSampleBuffer?, previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) {
if let error = error {
print("error occure : \(error.localizedDescription)")
}
if let sampleBuffer = photoSampleBuffer,
let previewBuffer = previewPhotoSampleBuffer,
let dataImage = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: sampleBuffer, previewPhotoSampleBuffer: previewBuffer) {
print(UIImage(data: dataImage)?.size as Any)
let dataProvider = CGDataProvider(data: dataImage as CFData)
let cgImageRef: CGImage! = CGImage(jpegDataProviderSource: dataProvider!, decode: nil, shouldInterpolate: true, intent: .defaultIntent)
let image = UIImage(cgImage: cgImageRef, scale: 1.0, orientation: UIImageOrientation.right)
self.capturedImage.image = image
} else {
print("some error here")
}
}
// This method you can use somewhere you need to know camera permission state
func askPermission() {
print("here")
let cameraPermissionStatus = AVCaptureDevice.authorizationStatus(forMediaType: AVMediaTypeVideo)
switch cameraPermissionStatus {
case .authorized:
print("Already Authorized")
case .denied:
print("denied")
let alert = UIAlertController(title: "Sorry :(" , message: "But could you please grant permission for camera within device settings", preferredStyle: .alert)
let action = UIAlertAction(title: "Ok", style: .cancel, handler: nil)
alert.addAction(action)
present(alert, animated: true, completion: nil)
case .restricted:
print("restricted")
default:
AVCaptureDevice.requestAccess(forMediaType: AVMediaTypeVideo) {
[weak self]
(granted :Bool) -> Void in
if granted == true {
// User granted
print("User granted")
DispatchQueue.main.async() {
// Do smth that you need in main thread
}
} else {
// User Rejected
print("User Rejected")
DispatchQueue.main.async() {
let alert = UIAlertController(title: "WHY?" , message: "Camera it is the main feature of our application", preferredStyle: .alert)
let action = UIAlertAction(title: "Ok", style: .cancel, handler: nil)
alert.addAction(action)
self?.present(alert, animated: true, completion: nil)
}
}
}
}
}
}
答案 1 :(得分:7)
AVCaptureStillImageOutput
被弃用意味着您可以继续在iOS 10中使用它,但是:
AVCaptureStillImageOutput
以获得广泛的颜色,但使用AVCapturePhotoOutput
进行广泛的颜色要容易得多。对于RAW捕捉或实时照片,AVCapturePhotoOutput
是城里唯一的游戏。 如果您在弃用过程中感到满意,那么您的问题不在于outputSettings
被删除 - it's still there。
对于beta 6及更高版本需要注意的事项(虽然事实证明这不是问题):使用NSDictionary
而没有显式键和值类型的API作为[AnyHashable: Any]
进入Swift 3您可能在字典中使用的Foundation或CoreFoundation类型不再隐式桥接到Swift类型。 (other questions about beta 6 dictionary conversions中的一些可能会指出你在正确的方向。)
但是,我在设置outputSettings
时没有收到任何编译错误。无论是在您的完整代码中还是将其缩减为该行的基本部分:
var stillImageOutput : AVCaptureStillImageOutput?
stillImageOutput = AVCaptureStillImageOutput()
stillImageOutput?.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG]
...我看到的唯一警告是关于弃用。
答案 2 :(得分:-2)
我写了Objective-c代码,因为 Aleksey Timoshchenko答案是正确的。
只为了帮助他人。
@interface CameraGalleryViewController ()
@property (weak, nonatomic) IBOutlet UIView *viewCamera;
@property (weak, nonatomic) IBOutlet UICollectionView *collectionView;
@property (strong, nonatomic) AVCaptureSession *session;
@property (strong, nonatomic) AVCapturePhotoOutput *cameraOutput;
@property (strong, nonatomic) AVCaptureVideoPreviewLayer *previewLayer;
@end
@implementation CameraGalleryViewController
#pragma mark - Lifecycle
// ==================================================================================
// Lifecycle
- (void) viewDidLoad {
[super viewDidLoad];
[self.viewModel viewModelDidLoad];
}
- (void) viewWillAppear:(BOOL)animated {
[super viewWillAppear:animated];
}
- (void) viewDidAppear:(BOOL)animated {
[super viewDidAppear:animated];
[self initVars];
}
- (void)viewWillTransitionToSize:(CGSize)size withTransitionCoordinator:(id<UIViewControllerTransitionCoordinator>)coordinator {
[super viewWillTransitionToSize:size withTransitionCoordinator:coordinator];
[coordinator animateAlongsideTransition:^(id<UIViewControllerTransitionCoordinatorContext> _Nonnull context) {
} completion:^(id<UIViewControllerTransitionCoordinatorContext> _Nonnull context) {
[self changeOrientation];
}];
}
#pragma mark - IBActions
// ==================================================================================
// IBActions
- (IBAction)takePhoto:(UIButton *)sender {
AVCapturePhotoSettings *settings = [[AVCapturePhotoSettings alloc] init];
NSNumber *previewPixelType = settings.availablePreviewPhotoPixelFormatTypes.firstObject;
NSString *formatTypeKey = (NSString *)kCVPixelBufferPixelFormatTypeKey;
NSString *widthKey = (NSString *)kCVPixelBufferWidthKey;
NSString *heightKey = (NSString *)kCVPixelBufferHeightKey;
NSDictionary *previewFormat = @{formatTypeKey:previewPixelType,
widthKey:@1024,
heightKey:@768
};
settings.previewPhotoFormat = previewFormat;
[self.cameraOutput capturePhotoWithSettings:settings delegate:self];
}
#pragma mark - Public methods
// ==================================================================================
// Public methods
- (void) setupView {
[self.collectionView reloadData];
}
#pragma mark - Private methods
// ==================================================================================
// Private methods
- (void) initVars {
[self.collectionView registerNib:[CameraGalleryViewCell cellNib] forCellWithReuseIdentifier:[CameraGalleryViewCell cellId]];
self.collectionView.dataSource = self;
self.collectionView.delegate = self;
self.session = [[AVCaptureSession alloc] init];
[self.session setSessionPreset:AVCaptureSessionPresetPhoto];
self.cameraOutput = [[AVCapturePhotoOutput alloc] init];
AVCaptureDevice *inputDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error;
AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:inputDevice error:&error];
if ([self.session canAddInput:deviceInput]) {
[self.session addInput:deviceInput];
if ([self.session canAddOutput:self.cameraOutput]) {
[self.session addOutput:self.cameraOutput];
self.previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.session];
[self.previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
self.previewLayer.frame = CGRectMake(0,0, self.view.bounds.size.width, self.viewCamera.bounds.size.height);
[self.viewCamera.layer addSublayer:self.previewLayer];
[self changeOrientation];
[self.session startRunning];
}
}
}
- (void) changeOrientation {
UIInterfaceOrientation orientation = [UIApplication sharedApplication].statusBarOrientation;
CGRect size = [UIScreen mainScreen].bounds;
if (size.size.height > size.size.width) {
if (orientation == UIInterfaceOrientationPortrait) {
self.previewLayer.connection.videoOrientation = AVCaptureVideoOrientationPortrait;
} else {
self.previewLayer.connection.videoOrientation = AVCaptureVideoOrientationPortraitUpsideDown;
}
} else {
if (orientation == UIInterfaceOrientationLandscapeRight) {
self.previewLayer.connection.videoOrientation = AVCaptureVideoOrientationLandscapeRight;
} else {
self.previewLayer.connection.videoOrientation = AVCaptureVideoOrientationLandscapeLeft;
}
}
}
#pragma mark - CollectionView delegate
// ==================================================================================
// CollectionView delegate
- (NSInteger) collectionView:(UICollectionView *)collectionView numberOfItemsInSection:(NSInteger)section {
NSInteger numItems = [self.viewModel imageListCount];
self.collectionView.hidden = !(numItems > 0);
return numItems;
}
- (UICollectionViewCell *)collectionView:(UICollectionView *)collectionView cellForItemAtIndexPath:(NSIndexPath *)indexPath {
CameraGalleryViewCell *cell = [collectionView dequeueReusableCellWithReuseIdentifier:[CameraGalleryViewCell cellId] forIndexPath:indexPath];
[cell imageForImageView:[self.viewModel imageFromListWithIndex:indexPath.row]];
return cell;
}
#pragma mark - Camera delegate
// ==================================================================================
// Camera delegate
- (void) captureOutput:(AVCapturePhotoOutput *)output didFinishProcessingPhotoSampleBuffer:(CMSampleBufferRef)photoSampleBuffer previewPhotoSampleBuffer:(CMSampleBufferRef)previewPhotoSampleBuffer resolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings bracketSettings:(AVCaptureBracketedStillImageSettings *)bracketSettings error:(NSError *)error {
if (error) {
return;
}
if (photoSampleBuffer && previewPhotoSampleBuffer) {
NSData *imageData = [AVCapturePhotoOutput JPEGPhotoDataRepresentationForJPEGSampleBuffer:photoSampleBuffer previewPhotoSampleBuffer:previewPhotoSampleBuffer];
[self.viewModel addImageToListAndRefresh:[UIImage imageWithData:imageData]];
}
}
@end