我试图按照我已将捕获会话以及在后台核心上设置的任何其他摄像机放置的apple文档,将照相机会话放置在collectionView单元内。我不确定我是否正确执行此操作,因为在单元格之间滑动时性能仍然不理想。如何在collectionView中正确实现相机会话。
重要
startRunning()方法是一个阻塞调用,它可能要花费一些时间,因此您应该在串行队列上执行会话设置,以使主队列不被阻塞(使UI保持响应)。有关实现示例,请参见AVCam-iOS:使用AVFoundation捕获图像和电影。 AVCapture Session Documentation
放在后台线程上的代码
//attempt to speed things up
let backgroundWorker2 = DispatchQueue(label:"BackgroundWorker2",qos: .userInteractive)
override func awakeFromNib() {
super.awakeFromNib()
self.backgroundWorker2.async {
self.setupCaptureSession()
self.setupDevice()
self.setupInput()
self.setupPreviewLayer()
self.startRunningCaptureSession()
}
}
完整的代码
import UIKit
import AVFoundation
class MainCameraCollectionViewCell: UICollectionViewCell {
var captureSession = AVCaptureSession()
private var sessionQueue: DispatchQueue!
var captureConnection = AVCaptureConnection()
var backCamera: AVCaptureDevice?
var frontCamera: AVCaptureDevice?
var currentCamera: AVCaptureDevice?
var photoOutPut: AVCapturePhotoOutput?
var cameraPreviewLayer: AVCaptureVideoPreviewLayer?
var image: UIImage?
var usingFrontCamera = false
//attempt to speed things up
let backgroundWorker2 = DispatchQueue(label:"BackgroundWorker2",qos: .userInteractive)
override func awakeFromNib() {
super.awakeFromNib()
self.backgroundWorker2.async {
self.setupCaptureSession()
self.setupDevice()
self.setupInput()
self.setupPreviewLayer()
self.startRunningCaptureSession()
}
}
func setupCaptureSession(){
captureSession.sessionPreset = AVCaptureSession.Preset.photo
}
func setupDevice(usingFrontCamera:Bool = false){
let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [AVCaptureDevice.DeviceType.builtInWideAngleCamera], mediaType: AVMediaType.video, position: AVCaptureDevice.Position.unspecified)
let devices = deviceDiscoverySession.devices
for device in devices{
if usingFrontCamera && device.position == AVCaptureDevice.Position.front {
//backCamera = device
self.currentCamera = device
} else if device.position == AVCaptureDevice.Position.back {
//frontCamera = device
self.currentCamera = device
}
}
}
func setupInput() {
do {
let captureDeviceInput = try AVCaptureDeviceInput(device: self.currentCamera!)
if self.captureSession.canAddInput(captureDeviceInput) {
self.captureSession.addInput(captureDeviceInput)
print("input added")
}
self.photoOutPut = AVCapturePhotoOutput()
self.photoOutPut?.setPreparedPhotoSettingsArray([AVCapturePhotoSettings(format:[AVVideoCodecKey: AVVideoCodecType.jpeg])], completionHandler: nil)
if self.captureSession.canAddOutput(self.photoOutPut!) {
self.captureSession.addOutput(self.photoOutPut!)
print("output added")
}
} catch {
print(error)
}
}
func setupPreviewLayer(){
cameraPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
cameraPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
cameraPreviewLayer?.connection?.videoOrientation = AVCaptureVideoOrientation.portrait
cameraPreviewLayer?.frame = CGRect(x: 0, y: 0, width: UIScreen.main.bounds.width, height: UIScreen.main.bounds.height)
self.layer.insertSublayer(cameraPreviewLayer!, at: 0)
}
func startRunningCaptureSession(){
captureSession.startRunning()
}
@IBAction func cameraButton_TouchUpInside(_ sender: Any) {
print("Camera button tapped")
let settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])
//
settings.isAutoStillImageStabilizationEnabled = true
if let photoOutputConnection = self.photoOutPut?.connection(with: .video){
photoOutputConnection.videoOrientation = (cameraPreviewLayer?.connection?.videoOrientation)!
}
}
}