电话会议期间相机应用程序冻结

时间:2018-02-20 04:50:47

标签: ios swift avcapturesession avcapturedevice

我的相机应用中有一个错误。如果您在通话时打开应用程序,整个应用程序会冻结。我尝试使用AVCaptureSessionWasInterruptedAVCaptureSessionInterruptionEnded通知来处理电话呼叫期间的音频输入管理,但没有幸运解决问题。当我注释掉音频输入设置时,应用程序在通话过程中不再冻结,所以我非常有信心这个问题出现在音频管理的某个地方。

为什么应用程序在通话期间冻结,我该如何解决?

提前致谢!

相关代码:

class CameraManager: NSObject {
    static let shared = CameraManager()

    private let notificationQueue = OperationQueue.main

    var delegate: CameraManagerDelegate? = nil

    let session = AVCaptureSession()
    var captureDeviceInput: AVCaptureDeviceInput? = nil
    var audioInput: AVCaptureDeviceInput? = nil
    let photoOutput = AVCapturePhotoOutput()
    let videoOutput = AVCaptureMovieFileOutput()

    var isRecording: Bool {
        return videoOutput.isRecording
    }

    func getCurrentVideoCaptureDevice() throws -> AVCaptureDevice {
        guard let device = self.captureDeviceInput?.device else {
            throw CameraManagerError.missingCaptureDeviceInput
        }
        return device
    }

    func getZoomFactor() throws -> CGFloat {
        return try getCurrentVideoCaptureDevice().videoZoomFactor
    }

    func getMaxZoomFactor() throws -> CGFloat {
        return try getCurrentVideoCaptureDevice().activeFormat.videoMaxZoomFactor
    }

    override init() {
        super.init()

        NotificationCenter.default.addObserver(forName: Notification.Name.UIApplicationDidBecomeActive, object: nil, queue: notificationQueue) { [unowned self] (notification) in
            self.session.startRunning()
            try? self.setupCamera()
            try? self.setZoomLevel(zoomLevel: 1.0)

            if Settings.shared.autoRecord {
                try? self.startRecording()
            }
        }

        NotificationCenter.default.addObserver(forName: Notification.Name.UIApplicationWillResignActive, object: nil, queue: notificationQueue) { [unowned self] (notification) in
            self.stopRecording()
            self.session.stopRunning()
        }

        NotificationCenter.default.addObserver(forName: Notification.Name.AVCaptureSessionWasInterrupted, object: nil, queue: notificationQueue) { [unowned self] (notification) in
            if let audioInput = self.audioInput {
                self.session.removeInput(audioInput)
            }
        }

        NotificationCenter.default.addObserver(forName: Notification.Name.AVCaptureSessionInterruptionEnded, object: nil, queue: notificationQueue) { [unowned self] (notification) in
            try? self.setupAudio()
        }

        try? self.setupSession()
    }

    func setupSession() throws {
        session.sessionPreset = .high

        if !session.isRunning {
            session.startRunning()
        }

        if Utils.checkPermissions() {
            try setupInputs()
            setupOutputs()
        }
    }

    func setupInputs() throws {
        try setupCamera()
        try setupAudio()
    }

    func setupCamera() throws {
        do {
            try setCamera(position: Settings.shared.defaultCamera)
        } catch CameraManagerError.unableToFindCaptureDevice(let position) {
            //some devices don't have a front camera, so try the back for setup
            if position == .front {
                try setCamera(position: .back)
            }
        }
    }

    func setupAudio() throws {
        if let audioInput = self.audioInput {
            self.session.removeInput(audioInput)
        }

        guard let audioDevice = AVCaptureDevice.default(for: .audio) else {
            throw CameraManagerError.unableToGetAudioDevice
        }

        let audioInput = try AVCaptureDeviceInput(device: audioDevice)

        if session.canAddInput(audioInput) {
            session.addInput(audioInput)
            self.audioInput = audioInput
        } else {
            self.delegate?.unableToAddAudioInput()
        }
    }

    func setupOutputs() {
        self.photoOutput.isHighResolutionCaptureEnabled = true
        guard session.canAddOutput(self.photoOutput) else {
            //error
            return
        }

        session.addOutput(self.photoOutput)

        guard session.canAddOutput(self.videoOutput) else {
            //error
            return
        }
        session.addOutput(self.videoOutput)
    }

    func startRecording() throws {
        if !self.videoOutput.isRecording {
            let documentDirectory = try FileManager.default.url(for: .documentDirectory, in: .userDomainMask, appropriateFor:nil, create:false)
            let url = documentDirectory.appendingPathComponent(UUID().uuidString + ".mov")

            self.videoOutput.startRecording(to: url, recordingDelegate: self)
        }
    }

    func stopRecording() {
        if self.videoOutput.isRecording {
            self.videoOutput.stopRecording()
        }
    }

    func setZoomLevel(zoomLevel: CGFloat) throws {
        guard let captureDevice = self.captureDeviceInput?.device else {
            throw CameraManagerError.missingCaptureDevice
        }

        try captureDevice.lockForConfiguration()
        captureDevice.videoZoomFactor = zoomLevel
        captureDevice.unlockForConfiguration()
    }

    func capturePhoto() {
        let photoOutputSettings = AVCapturePhotoSettings()
        photoOutputSettings.flashMode = Settings.shared.flash
        photoOutputSettings.isAutoStillImageStabilizationEnabled = true
        photoOutputSettings.isHighResolutionPhotoEnabled = true

        self.photoOutput.capturePhoto(with: photoOutputSettings, delegate: self)
    }

    func toggleCamera() throws {
        if let captureDeviceInput = self.captureDeviceInput,
            captureDeviceInput.device.position == .back {
            try setCamera(position: .front)
        } else {
            try setCamera(position: .back)
        }
    }

    func setCamera(position: AVCaptureDevice.Position) throws {
        if let captureDeviceInput = self.captureDeviceInput {
            if captureDeviceInput.device.position == position {
                return
            } else {
                session.removeInput(captureDeviceInput)
            }
        }

        var device: AVCaptureDevice? = nil

        switch position {
        case .front:
            device = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front)
        default:
            device = AVCaptureDevice.default(for: .video)
        }

        guard let nonNilDevice = device else {
            throw CameraManagerError.unableToFindCaptureDevice(position)
        }

        try nonNilDevice.lockForConfiguration()

        if nonNilDevice.isFocusModeSupported(.continuousAutoFocus) {
            nonNilDevice.focusMode = .continuousAutoFocus
        }

        if nonNilDevice.isExposureModeSupported(.continuousAutoExposure) {
            nonNilDevice.exposureMode = .continuousAutoExposure
        }

        nonNilDevice.unlockForConfiguration()

        let input = try AVCaptureDeviceInput(device: nonNilDevice)

        guard session.canAddInput(input) else {
            throw CameraManagerError.unableToAddCaptureDeviceInput
        }

        session.addInput(input)

        self.captureDeviceInput = input
    }

    func setFocus(point: CGPoint) throws {
        guard let device = self.captureDeviceInput?.device else {
            throw CameraManagerError.missingCaptureDeviceInput
        }

        guard device.isFocusPointOfInterestSupported && device.isFocusModeSupported(.autoFocus) else {
            throw CameraManagerError.notSupportedByDevice
        }

        try device.lockForConfiguration()

        device.focusPointOfInterest = point
        device.focusMode = .autoFocus

        device.unlockForConfiguration()
    }

    func setExposure(point: CGPoint) throws {
        guard let device = self.captureDeviceInput?.device else {
            throw CameraManagerError.missingCaptureDeviceInput
        }

        guard device.isExposurePointOfInterestSupported && device.isExposureModeSupported(.autoExpose) else {
            throw CameraManagerError.notSupportedByDevice
        }

        try device.lockForConfiguration()

        device.exposurePointOfInterest = point
        device.exposureMode = .autoExpose

        device.unlockForConfiguration()
    }
}

extension CameraManager: AVCapturePhotoCaptureDelegate {

    func photoOutput(_ output: AVCapturePhotoOutput, willCapturePhotoFor resolvedSettings: AVCaptureResolvedPhotoSettings) {
        self.delegate?.cameraManagerWillCapturePhoto()
    }

    func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
        guard let imageData = photo.fileDataRepresentation() else {
            //error
            return
        }

        let capturedImage = UIImage.init(data: imageData , scale: 1.0)
        if let image = capturedImage {
            UIImageWriteToSavedPhotosAlbum(image, nil, nil, nil)
        }

        self.delegate?.cameraManagerDidFinishProcessingPhoto()
    }
}

extension CameraManager: AVCaptureFileOutputRecordingDelegate {

    func fileOutput(_ output: AVCaptureFileOutput, didStartRecordingTo fileURL: URL, from connections: [AVCaptureConnection]) {
        self.delegate?.cameraManagerDidStartRecording()
    }

    func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {

        self.delegate?.cameraManagerDidFinishRecording()

        PHPhotoLibrary.shared().performChanges({
            PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: outputFileURL)
        }) { saved, error in
            if saved {
                do {
                    try FileManager.default.removeItem(at: outputFileURL)
                } catch _ as NSError {
                    //error
                }
            }
        }
    }
}

0 个答案:

没有答案