从第一个VC在第二个VC中显示我的图像时出现问题!相反,第二个VC变成了新的摄影机视图

时间:2019-05-11 20:48:39

标签: ios swift camera

当我尝试在第二个VC中显示应该在第一个VC中拍摄的图像时,相反,第二个VC变成了类似于第一个VC的相机视图,我不希望这样。我还收到错误“原因:' *-[AVCaptureSession addInput:]当前不支持多个音频/视频AVCaptureInput'” * 第一个调用堆栈: libc ++ abi.dylib:以类型为NSException的未捕获异常终止 (lldb)“

曾经尝试用新的相机代码更改我的代码,但仍然遇到相同的问题,所以我观看了旧问题以及YouTube教程中的一些内容,但这也无济于事。

FirstViewController

let captureSession = AVCaptureSession()
var previewLayer:CALayer!

var captureDevice:AVCaptureDevice!

var takePhoto = false

override func viewDidLoad() {
    super.viewDidLoad()
     prepareCamera()
}

override func viewWillAppear(_ animated: Bool) {
    super.viewWillAppear(animated)

}


func prepareCamera() {
    captureSession.sessionPreset = AVCaptureSession.Preset.photo

    let availableDevices = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: AVMediaType.video, position: .back).devices
    captureDevice = availableDevices.first
    beginSession()


}

func beginSession () {
    do {
        let captureDeviceInput = try AVCaptureDeviceInput(device: captureDevice)

        captureSession.addInput(captureDeviceInput)

    }catch {
        print(error.localizedDescription)
    }


    let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
    self.previewLayer = previewLayer
    self.view.layer.addSublayer(self.previewLayer)
    self.previewLayer.frame = self.view.layer.frame
    captureSession.startRunning()

    let dataOutput = AVCaptureVideoDataOutput()
    dataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString):NSNumber(value:kCVPixelFormatType_32BGRA)] as [String : Any]

    dataOutput.alwaysDiscardsLateVideoFrames = true

    if captureSession.canAddOutput(dataOutput) {
        captureSession.addOutput(dataOutput)
    }

    captureSession.commitConfiguration()


    let queue = DispatchQueue(label: "com.brianadvent.captureQueue")
    dataOutput.setSampleBufferDelegate(self, queue: queue)

}

@IBAction func takePhoto(_ sender: Any) {
    takePhoto = true

}

func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {

    if takePhoto {
        takePhoto = false

        if let image = self.getImageFromSampleBuffer(buffer: sampleBuffer) {

            let photoVC =  UIStoryboard(name: "Main", bundle:  nil).instantiateViewController(withIdentifier: "PhotoVC") as! Viewcontroller2

            photoVC.takenPhoto = image

            DispatchQueue.main.async {
                self.present(photoVC, animated: true, completion: {
                    self.stopCaptureSession()
                })

            }
        }


    }
}


func getImageFromSampleBuffer (buffer:CMSampleBuffer) -> UIImage? {
    if let pixelBuffer = CMSampleBufferGetImageBuffer(buffer) {
        let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
        let context = CIContext()

        let imageRect = CGRect(x: 0, y: 0, width: CVPixelBufferGetWidth(pixelBuffer), height: CVPixelBufferGetHeight(pixelBuffer))

        if let image = context.createCGImage(ciImage, from: imageRect) {
            return UIImage(cgImage: image, scale: UIScreen.main.scale, orientation: .right)
        }

    }

    return nil
}

func stopCaptureSession () {
    self.captureSession.stopRunning()

    if let inputs = captureSession.inputs as? [AVCaptureDeviceInput] {
        for input in inputs {
            self.captureSession.removeInput(input)
        }
    }

}

SecondViewController

var takenPhoto:UIImage?

@IBOutlet weak var imageView: UIImageView!

override func viewDidLoad() {
    super.viewDidLoad()

    if let availableImage = takenPhoto {
        imageView.image = availableImage
    }


}


@IBAction func goBack(_ sender: Any) {

    self.dismiss(animated: true, completion: nil)

}

0 个答案:

没有答案