当我尝试运行摄像机编码时,当前不支持多个音频/视频AVCaptureInputs

时间:2019-05-10 22:10:38

标签: ios swift camera

当我尝试运行摄像机编码时,会收到以下错误消息

  

“ 2019-05-09 23:15:48.446844 + 0200 testing2 [514:31963]由于未捕获的异常'NSInvalidArgumentException'终止了应用程序,原因:' *-[AVCaptureSession addInput:]多个音频/视频AVCaptureInputs目前不受支持”   * 第一个调用堆栈:   libc ++ abi.dylib:以类型为NSException的未捕获异常终止   (lldb)“

我找不到问题出在哪里,并尝试使用断点来了解但没有解决。

还尝试过使用info.plist解决方案,因此它与此无关。

let captureSession = AVCaptureSession()

var previewLayer:CALayer!

var captureDevice:AVCaptureDevice!

var takePhoto = false

override func viewDidLoad() {
    super.viewDidLoad()
}

override func viewWillAppear(_ animated: Bool) {
    super.viewWillAppear(animated)
    prepareCamera()
}


func prepareCamera() {
    captureSession.sessionPreset = AVCaptureSession.Preset.photo

    let availableDevices = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: AVMediaType.video, position: .back).devices
    captureDevice = availableDevices.first
    beginSession()


}

func beginSession () {
    do {
        let captureDeviceInput = try AVCaptureDeviceInput(device: captureDevice)

        captureSession.addInput(captureDeviceInput)

    }catch {
        print(error.localizedDescription)
    }


    let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
    self.previewLayer = previewLayer
    self.view.layer.addSublayer(self.previewLayer)
    self.previewLayer.frame = self.view.layer.frame
    captureSession.startRunning()

    let dataOutput = AVCaptureVideoDataOutput()
    dataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString):NSNumber(value:kCVPixelFormatType_32BGRA)] as [String : Any]

    dataOutput.alwaysDiscardsLateVideoFrames = true

    if captureSession.canAddOutput(dataOutput) {
        captureSession.addOutput(dataOutput)
    }

    captureSession.commitConfiguration()


    let queue = DispatchQueue(label: "com.brianadvent.captureQueue")
    dataOutput.setSampleBufferDelegate(self, queue: queue)





}

@IBAction func takePhoto(_ sender: Any) {
    takePhoto = true

}

func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {

    if takePhoto {
        takePhoto = false

        if let image = self.getImageFromSampleBuffer(buffer: sampleBuffer) {

            let photoVC =  UIStoryboard(name: "Main", bundle:  nil).instantiateViewController(withIdentifier: "PhotoVC") as! Viewcontroller2

            photoVC.takenPhoto = image

            DispatchQueue.main.async {
                self.present(photoVC, animated: true, completion: {
                    self.stopCaptureSession()
                })

            }
        }


    }
}


func getImageFromSampleBuffer (buffer:CMSampleBuffer) -> UIImage? {
    if let pixelBuffer = CMSampleBufferGetImageBuffer(buffer) {
        let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
        let context = CIContext()

        let imageRect = CGRect(x: 0, y: 0, width: CVPixelBufferGetWidth(pixelBuffer), height: CVPixelBufferGetHeight(pixelBuffer))

        if let image = context.createCGImage(ciImage, from: imageRect) {
            return UIImage(cgImage: image, scale: UIScreen.main.scale, orientation: .right)
        }

    }

    return nil
}

func stopCaptureSession () {
    self.captureSession.stopRunning()

    if let inputs = captureSession.inputs as? [AVCaptureDeviceInput] {
        for input in inputs {
            self.captureSession.removeInput(input)
        }
    }

}


override func didReceiveMemoryWarning() {
    super.didReceiveMemoryWarning()

}



















struct Constants {
    static let apiKey = "AIzaSyDtaJ5eU24rbnHsG9pb1STOizDJvqcaj5E"
    static let bundleId = "com.felibundle"
    static let searchEngineId = "016628067786358079133:2gm9usqzouc"
}
@IBAction func pish(_ sender: Any) {
    googleSearch(term: "George Bush") { results in
        print(results)}
}
func googleSearch(term: String, callback:@escaping ([(title: String, url: String)]?) -> Void) {
    let urlString = String(format: "https://cse.google.com/cse?cx=016628067786358079133:2gm9usqzouc", term, Constants.searchEngineId, Constants.apiKey)
    let encodedUrl = urlString.addingPercentEncoding(withAllowedCharacters: .urlQueryAllowed)

    guard let url = URL(string: encodedUrl ?? urlString) else {
        print("invalid url \(urlString)")
        return
    }

    let request = NSMutableURLRequest(url: url, cachePolicy: .useProtocolCachePolicy, timeoutInterval: 10)
    request.httpMethod = "GET"
    request.setValue(Constants.bundleId, forHTTPHeaderField: "X-Ios-Bundle-Identifier")

    let session = URLSession.shared
    let datatask = session.dataTask(with: request as URLRequest) { (data, response, error) in
        guard
            error == nil,
            let data = data,
            let json = try? JSONSerialization.jsonObject(with: data, options: .allowFragments) as? [String : Any]
            else {

                callback(nil)
                return
        }

        guard let items = json?["items"] as? [[String : Any]], items.count > 0 else {
            print("no results")
            return
        }

        callback(items.map { ($0["title"] as! String, $0["formattedUrl"] as! String) })
    }


    datatask.resume()
}

2 个答案:

答案 0 :(得分:0)

可能是这个

override func viewWillAppear(_ animated: Bool) {
   super.viewWillAppear(animated)
   prepareCamera()
}
在当前vc上方显示一个vc并将其关闭时,会多次调用

,因此可以将其放在viewDidLoad

答案 1 :(得分:0)

这应该可以解决问题。...

if captureSession.inputs.isEmpty {
    self.captureSession.addInput(deviceInput)
}