自定义条形码扫描仪,点击以聚焦

时间:2019-01-23 18:13:57

标签: ios swift barcode-scanner

在这里参考这些问题: How to implement tap to focus on barcode scanner app using swift?

Set Camera Focus On Tap Point With Swift

https://stackoverflow.com/a/41796603/8272698

上面的链接很旧而且过时。我试图使用上面提供的答案,但无济于事...下面是我对它们的尝试。

读取条形码时,我需要点击屏幕以实现对视图中对象的聚焦。

这是我的代码尝试

var captureDevice: AVCaptureDevice? //capture device Is this right?

override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
    let screenSize = videoPreviewLayer!.bounds.size
    if let touchPoint = touches.first {
        let x = touchPoint.location(in: self.view).y / screenSize.height
        let y = 1.0 - touchPoint.location(in: self.view).x / screenSize.width
        let focusPoint = CGPoint(x: x, y: y)

        if let device = captureDevice {
            do {
                try device.lockForConfiguration()

                device.focusPointOfInterest = focusPoint
                //device.focusMode = .continuousAutoFocus
                device.focusMode = .autoFocus
                //device.focusMode = .locked
                device.exposurePointOfInterest = focusPoint
                device.exposureMode = AVCaptureDevice.ExposureMode.continuousAutoExposure
                device.unlockForConfiguration()
            }
            catch {
                // just ignore
            }
        }
    }
}

此代码不起作用,因为当我点击时没有发生聚焦。

这是我其余的相机代码。

import UIKit
import AVFoundation


class BarcodeScanVC: UIViewController {

    struct GlobalVariable{
        static var senderTags = 0
    }



    var captureSession = AVCaptureSession()
    var videoPreviewLayer: AVCaptureVideoPreviewLayer?
    var qrCodeFrameView: UIView?
    var row = 0
    var senderTag = 0

    var waybillData: String = ""
    var diagnosticErrorCodeData: String = ""
    var hddSerialNumberData: String = ""


    var scanRectView: UIView?
    var delegate: BarcodeScanDelegate?
    var captureDevice: AVCaptureDevice?


    private let supportedCodeTypes = [AVMetadataObject.ObjectType.upce,
                                      AVMetadataObject.ObjectType.code39,
                                      AVMetadataObject.ObjectType.code39Mod43,
                                      AVMetadataObject.ObjectType.code93,
                                      AVMetadataObject.ObjectType.code128,
                                      AVMetadataObject.ObjectType.ean8,
                                      AVMetadataObject.ObjectType.ean13,
                                      AVMetadataObject.ObjectType.aztec,
                                      AVMetadataObject.ObjectType.pdf417,
                                      AVMetadataObject.ObjectType.itf14,
                                      AVMetadataObject.ObjectType.dataMatrix,
                                      AVMetadataObject.ObjectType.interleaved2of5,
                                      AVMetadataObject.ObjectType.qr]

    override func viewDidLoad() {
        super.viewDidLoad()

        // Get the back-facing camera for capturing videos
        //let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInDualCamera], mediaType: AVMediaType.video, position: .back)
        let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: AVMediaType.video, position: .back)

        guard let captureDevice = deviceDiscoverySession.devices.first else {
            print("Failed to get the camera device")
            return
        }

        do {
            // Get an instance of the AVCaptureDeviceInput class using the previous device object.
            let input = try AVCaptureDeviceInput(device: captureDevice)

            // Set the input device on the capture session.
            captureSession.addInput(input)

            // Initialize a AVCaptureMetadataOutput object and set it as the output device to the capture session.
            let captureMetadataOutput = AVCaptureMetadataOutput()
            captureSession.addOutput(captureMetadataOutput)

            // Set delegate and use the default dispatch queue to execute the call back
            captureMetadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
            captureMetadataOutput.metadataObjectTypes = supportedCodeTypes
            //            captureMetadataOutput.metadataObjectTypes = [AVMetadataObject.ObjectType.qr]

        } catch {
            // If any error occurs, simply print it out and don't continue any more.
            print(error)
            return
        }

        captureSession.commitConfiguration()
        // Initialize the video preview layer and add it as a sublayer to the viewPreview view's layer.
        videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
        videoPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
        videoPreviewLayer?.frame = view.layer.bounds
        //videoPreviewLayer?.frame

//        let height: CGFloat = ((videoPreviewLayer?.frame.size.width)!)/2
//        let width: CGFloat = ((videoPreviewLayer?.frame.size.width)!)/2

        let height: CGFloat = (view.frame.size.height)/2
        let width: CGFloat = (view.frame.size.width) - 200
        let path = UIBezierPath()
        //Corner1
        path.move(to: CGPoint(x: 5, y: 50))
        path.addLine(to: CGPoint(x: 5, y: 5))
        path.addLine(to: CGPoint(x: 50, y: 5))
        //Corner2
        path.move(to: CGPoint(x: height - 55, y: 5))
        path.addLine(to: CGPoint(x: height - 5, y: 5))
        path.addLine(to: CGPoint(x: height - 5, y: 55))
        //Corner3
        path.move(to: CGPoint(x: 5, y: width - 55))
        path.addLine(to: CGPoint(x: 5, y: width - 5))
        path.addLine(to: CGPoint(x: 55, y: width - 5))
        //Corner4 -bottom right
        path.move(to: CGPoint(x: height - 5, y: width - 55))
        path.addLine(to: CGPoint(x: height - 5, y: width - 5))
        path.addLine(to: CGPoint(x: height - 55, y: width - 5))
        let shape = CAShapeLayer()
        shape.path = path.cgPath
        shape.strokeColor = UIColor.white.cgColor
        shape.lineWidth = 5
        shape.frame.origin.x = 20
        shape.frame.origin.y = 180
        shape.fillColor = UIColor.clear.cgColor
        videoPreviewLayer?.addSublayer(shape)
        view.layer.addSublayer(videoPreviewLayer!)
        //videoPreviewLayer?.anchorPoint.centerXAnchor.constraint(equalTo: view.centerXAnchor).isActive = true
        //view.layer.addSublayer(scanRectView)

        // Start video capture.
        captureSession.startRunning()

        // Move the message label and top bar to the front
        //view.bringSubview(toFront: messageLabel)
        //view.bringSubview(toFront: topbar)

        // Initialize QR Code Frame to highlight the QR code
        qrCodeFrameView = UIView()

        if let qrCodeFrameView = qrCodeFrameView {
            qrCodeFrameView.layer.borderColor = UIColor.green.cgColor
            qrCodeFrameView.layer.borderWidth = 2
            view.addSubview(qrCodeFrameView)
            view.bringSubviewToFront(qrCodeFrameView)
        }
    }

    override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
        let screenSize = videoPreviewLayer!.bounds.size
        if let touchPoint = touches.first {
            let x = touchPoint.location(in: self.view).y / screenSize.height
            let y = 1.0 - touchPoint.location(in: self.view).x / screenSize.width
            let focusPoint = CGPoint(x: x, y: y)

            if let device = captureDevice {
                do {
                    try device.lockForConfiguration()

                    device.focusPointOfInterest = focusPoint
                    //device.focusMode = .continuousAutoFocus
                    device.focusMode = .autoFocus
                    //device.focusMode = .locked
                    device.exposurePointOfInterest = focusPoint
                    device.exposureMode = AVCaptureDevice.ExposureMode.continuousAutoExposure
                    device.unlockForConfiguration()
                }
                catch {
                    // just ignore
                }
            }
        }
    }


    override func didReceiveMemoryWarning() {
        super.didReceiveMemoryWarning()
        // Dispose of any resources that can be recreated.
    }

    func launchApp(barcodeScan: String) {

//        if presentedViewController != nil {
//            return
//        }

        guard presentedViewController == nil else {
            return
        }


        let alertPrompt = UIAlertController(title: "Barcode Found", message: "\(barcodeScan)", preferredStyle: .actionSheet)
        let confirmAction = UIAlertAction(title: "Confirm", style: UIAlertAction.Style.default, handler: { (action) -> Void in

            if self.senderTag == 1 {
                GlobalVariable.senderTags = 1
                self.delegate?.didScan(barcodeData: barcodeScan)
                self.navigationController?.popViewController(animated: true)
            }
            if self.senderTag == 2 {
                GlobalVariable.senderTags = 2
                self.delegate?.didScan(barcodeData: barcodeScan)
                self.navigationController?.popViewController(animated: true)
            }
            if self.senderTag == 3 {
                GlobalVariable.senderTags = 3
                self.delegate?.didScan(barcodeData: barcodeScan)
                self.navigationController?.popViewController(animated: true)
            }
            if self.senderTag != 1 && self.senderTag != 2 && self.senderTag != 3  {
                let indexPath = IndexPath(row: self.row, section: 0)
                let cell: PartsOrderRequestTableCell = globalPartsOrderRequestTableVC?.tableView.cellForRow(at: indexPath) as! PartsOrderRequestTableCell
                cell.diagnosticCodeLabel.text = barcodeScan
                cell.diagnosticCodeLabel.endEditing(true)

                self.navigationController?.popViewController(animated: true)
                //return
            }
        })

        let cancelAction = UIAlertAction(title: "Cancel", style: UIAlertAction.Style.cancel, handler: nil)

        alertPrompt.addAction(confirmAction)
        alertPrompt.addAction(cancelAction)

        present(alertPrompt, animated: true, completion: nil)
    }

}

extension BarcodeScanVC: AVCaptureMetadataOutputObjectsDelegate {

    func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
        // Check if the metadataObjects array is not nil and it contains at least one object.
        if metadataObjects.count == 0 {
            qrCodeFrameView?.frame = CGRect.zero
            //messageLabel.text = "No QR code is detected"
            return
        }

        // Get the metadata object.
        let metadataObj = metadataObjects[0] as! AVMetadataMachineReadableCodeObject

        if supportedCodeTypes.contains(metadataObj.type) {
            // If the found metadata is equal to the QR code metadata (or barcode) then update the status label's text and set the bounds
            let barCodeObject = videoPreviewLayer?.transformedMetadataObject(for: metadataObj)
            qrCodeFrameView?.frame = barCodeObject!.bounds

            if metadataObj.stringValue != nil {
                launchApp(barcodeScan: metadataObj.stringValue!)
                //messageLabel.text = metadataObj.stringValue
            }
        }
    }

    private func updatePreviewLayer(layer: AVCaptureConnection, orientation: AVCaptureVideoOrientation) {

        layer.videoOrientation = orientation

        videoPreviewLayer?.frame = self.view.bounds

    }

    override func viewDidLayoutSubviews() {
        super.viewDidLayoutSubviews()

        if let connection =  self.videoPreviewLayer?.connection  {

            let currentDevice: UIDevice = UIDevice.current

            let orientation: UIDeviceOrientation = currentDevice.orientation

            let previewLayerConnection : AVCaptureConnection = connection

            if previewLayerConnection.isVideoOrientationSupported {

                switch (orientation) {
                case .portrait: updatePreviewLayer(layer: previewLayerConnection, orientation: .portrait)

                    break

                case .landscapeRight: updatePreviewLayer(layer: previewLayerConnection, orientation: .landscapeLeft)

                    break

                case .landscapeLeft: updatePreviewLayer(layer: previewLayerConnection, orientation: .landscapeRight)

                    break

                case .portraitUpsideDown: updatePreviewLayer(layer: previewLayerConnection, orientation: .portraitUpsideDown)

                    break

                default: updatePreviewLayer(layer: previewLayerConnection, orientation: .portrait)

                    break
                }
            }
        }
    }
}

我缺少明显的东西吗? 是否有一个简单的答案,而无需更改我已经拥有的许多代码?

谢谢!

1 个答案:

答案 0 :(得分:1)

您在正确的轨道上。陷入

if let device = captureDevice

captureDevice始终为零。您是在viewDidLoad中本地设置的,但是touchesBegan方法无法访问它。

guard let captureDevice = deviceDiscoverySession.devices.first else {
    print("Failed to get the camera device")
    return
}

从保护程序中更改代码,让CaptureDevice进行以下操作:

captureDevice = deviceDiscoverySession.devices.first

,当您使用captureDevice时,请在需要时测试nil。

编辑:

override func viewDidLoad() {
    super.viewDidLoad()

    // Get the back-facing camera for capturing videos
    //let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInDualCamera], mediaType: AVMediaType.video, position: .back)
    let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: AVMediaType.video, position: .back)

    captureDevice = deviceDiscoverySession.devices.first

    if let captureDevice = captureDevice {
        do {
            // Get an instance of the AVCaptureDeviceInput class using the previous device object.
            let input = try AVCaptureDeviceInput(device: captureDevice)

            // Set the input device on the capture session.
            captureSession.addInput(input)

            // Initialize a AVCaptureMetadataOutput object and set it as the output device to the capture session.
            let captureMetadataOutput = AVCaptureMetadataOutput()
            captureSession.addOutput(captureMetadataOutput)

            // Set delegate and use the default dispatch queue to execute the call back
            captureMetadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
            captureMetadataOutput.metadataObjectTypes = supportedCodeTypes
            //            captureMetadataOutput.metadataObjectTypes = [AVMetadataObject.ObjectType.qr]

        } catch {
            // If any error occurs, simply print it out and don't continue any more.
            print(error)
            return
        }
    }
    ..... Method cut short as no other changes.