IOS Swift - 自定义相机覆盖

时间:2015-12-30 18:55:29

标签: ios swift camera

你好我想在我的应用程序中打开相机

enter image description here

我只想在该部分的中间打开相机,这样用户只能在矩形部分中拍摄

我正在使用的代码就是这个

import UIKit
import AVFoundation

class TakeProductPhotoController: UIViewController {

    let captureSession = AVCaptureSession()
    var previewLayer : AVCaptureVideoPreviewLayer?

    // If we find a device we'll store it here for later use
    var captureDevice : AVCaptureDevice?

    override func viewDidLoad() {
        super.viewDidLoad()

        // Do any additional setup after loading the view, typically from a nib.
        captureSession.sessionPreset = AVCaptureSessionPresetHigh

        let devices = AVCaptureDevice.devices()

        // Loop through all the capture devices on this phone
        for device in devices {
            // Make sure this particular device supports video
            if (device.hasMediaType(AVMediaTypeVideo)) {
                // Finally check the position and confirm we've got the back camera
                if(device.position == AVCaptureDevicePosition.Back) {
                    captureDevice = device as? AVCaptureDevice
                    if captureDevice != nil {
                        print("Capture device found")
                        beginSession()
                    }
                }
            }
        }

    }
    func updateDeviceSettings(focusValue : Float, isoValue : Float) {
        let error: NSErrorPointer = nil

        if let device = captureDevice {
            do {
                try captureDevice!.lockForConfiguration()

            } catch let error1 as NSError {
                error.memory = error1
            }

                device.setFocusModeLockedWithLensPosition(focusValue, completionHandler: { (time) -> Void in
                    //
                })

                // Adjust the iso to clamp between minIso and maxIso based on the active format
                let minISO = device.activeFormat.minISO
                let maxISO = device.activeFormat.maxISO
                let clampedISO = isoValue * (maxISO - minISO) + minISO

                device.setExposureModeCustomWithDuration(AVCaptureExposureDurationCurrent, ISO: clampedISO, completionHandler: { (time) -> Void in
                    //
                })

                device.unlockForConfiguration()

        }
    }

    func touchPercent(touch : UITouch) -> CGPoint {
        // Get the dimensions of the screen in points
        let screenSize = UIScreen.mainScreen().bounds.size

        // Create an empty CGPoint object set to 0, 0
        var touchPer = CGPointZero

        // Set the x and y values to be the value of the tapped position, divided by the width/height of the screen
        touchPer.x = touch.locationInView(self.view).x / screenSize.width
        touchPer.y = touch.locationInView(self.view).y / screenSize.height

        // Return the populated CGPoint
        return touchPer
    }

    func focusTo(value : Float) {
        let error: NSErrorPointer = nil


        if let device = captureDevice {
            do {
                try captureDevice!.lockForConfiguration()

            } catch let error1 as NSError {
                error.memory = error1
            }

                device.setFocusModeLockedWithLensPosition(value, completionHandler: { (time) -> Void in
                    //
                })
                device.unlockForConfiguration()

        }
    }

    let screenWidth = UIScreen.mainScreen().bounds.size.width

    override func touchesBegan(touches: Set<UITouch>, withEvent event: UIEvent?) {
        //if let touchPer = touches.first {
            let touchPer = touchPercent( touches.first! as UITouch )
         updateDeviceSettings(Float(touchPer.x), isoValue: Float(touchPer.y))


        super.touchesBegan(touches, withEvent:event)
    }

   override func touchesMoved(touches: Set<UITouch>, withEvent event: UIEvent?) {
      // if let anyTouch = touches.first {
           let touchPer = touchPercent( touches.first! as UITouch )
       // let touchPercent = anyTouch.locationInView(self.view).x / screenWidth
  //      focusTo(Float(touchPercent))
    updateDeviceSettings(Float(touchPer.x), isoValue: Float(touchPer.y))

    }

    func configureDevice() {
          let error: NSErrorPointer = nil
        if let device = captureDevice {
            //device.lockForConfiguration(nil)

            do {
                try captureDevice!.lockForConfiguration()

            } catch let error1 as NSError {
                error.memory = error1
            }

            device.focusMode = .Locked
            device.unlockForConfiguration()
        }

    }

    func beginSession() {
        configureDevice()
        var err : NSError? = nil

        var deviceInput: AVCaptureDeviceInput!
        do {
            deviceInput = try AVCaptureDeviceInput(device: captureDevice)

        } catch let error as NSError {
            err = error
            deviceInput = nil
        };


        captureSession.addInput(deviceInput)

        if err != nil {
            print("error: \(err?.localizedDescription)")
        }

        previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)

        self.view.layer.addSublayer(previewLayer!)
        previewLayer?.frame = self.view.layer.frame
        captureSession.startRunning()
    }
}

在此代码中,相机正在拍摄整个屏幕。

1 个答案:

答案 0 :(得分:10)

如果您想在自定义UIView中启动相机,则需要更改AVCaptureVideoPreviewLayer。你可以改变它的界限,它的位置,也可以为它添加遮罩。

来到你的问题,捕获层正在全屏显示,因为你有:

 previewLayer?.frame = self.view.layer.frame

将此行更改为该叠加帧

  previewLayer?.frame = self.overLayView.layer.frame 

或者,如果您想使用原始值手动定位相机图层:

  previewLayer?.frame = CGRectMake(x,y,width,height)

另请注意,如果要在叠加视图中启动相机,则需要将子视图添加到该叠加视图

所以这一行:

     self.view.layer.addSublayer(previewLayer!)

将是这样的:

    self.overLayView.layer.addSublayer(previewLayer!)

拉伸图层/适合预览图层:

  previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)

        var bounds:CGRect
         bounds=cameraView.layer.frame;
        previewLayer!.videoGravity = AVLayerVideoGravityResizeAspectFill;
        previewLayer!.bounds=bounds;
        previewLayer!.position=CGPointMake(CGRectGetMidX(bounds), CGRectGetMidY(bounds));

        self.view.layer.addSublayer(previewLayer!)