> REQ Swift重构/代码布局/审查

时间:2015-04-24 05:47:34

标签: swift

我把我从Obj-C到Swift的翻译弄得一团糟,所以我非常感谢重构/代码布局审查。花括号真的在扔我。是否有任何Xcode插件或其他东西可以帮助我更好地管理我的代码块?

我的一些功能和计算可能效率也不高,所以如果你对这些领域有任何建议也会很好。例如,如果您使用或看过更好的过滤算法等。

P.S。谢谢马丁。

import UIKit
import Foundation
import AVFoundation
import CoreMedia
import CoreVideo

let minFramesForFilterToSettle = 10


class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {

let captureSession = AVCaptureSession()
// If we find a device we'll store it here for later use
var captureDevice : AVCaptureDevice?

var validFrameCounter: Int = 0
var detector: Detector!
var filter: Filter!
// var currentState = CurrentState.stateSampling       // Is this initialized correctly?

override func viewDidLoad() {
    super.viewDidLoad()
    self.detector = Detector()
    self.filter = Filter()
    // startCameraCapture() // call to un-used function.

    captureSession.sessionPreset = AVCaptureSessionPresetHigh

    let devices = AVCaptureDevice.devices()

    // Loop through all the capture devices on this phone
    for device in devices {
        // Make sure this particular device supports video
        if (device.hasMediaType(AVMediaTypeVideo)) {
            // Finally check the position and confirm we've got the back camera
            if(device.position == AVCaptureDevicePosition.Front) {
                captureDevice = device as? AVCaptureDevice
                if captureDevice != nil {
                    //println("Capture device found")
                    beginSession()
                }
            }
        }
    }


} // end of viewDidLoad ???



// configure device for camera and focus mode // maybe not needed since we dont use focuc?
func configureDevice() {
    if let device = captureDevice {
        device.lockForConfiguration(nil)
        //device.focusMode = .Locked
        device.unlockForConfiguration()
    }

}


// start capturing frames
func beginSession() {
    // Create the AVCapture Session

    configureDevice()

    var err : NSError? = nil
    captureSession.addInput(AVCaptureDeviceInput(device: captureDevice, error: &err))

    if err != nil {
        println("error: \(err?.localizedDescription)")
    }

    // Automatic Switch ON torch mode
    if  captureDevice!.hasTorch {
        // lock your device for configuration
        captureDevice!.lockForConfiguration(nil)
        // check if your torchMode is on or off. If on turns it off otherwise turns it on
        captureDevice!.torchMode = captureDevice!.torchActive ? AVCaptureTorchMode.Off : AVCaptureTorchMode.On
        // sets the torch intensity to 100%
        captureDevice!.setTorchModeOnWithLevel(1.0, error: nil)
        // unlock your device
        captureDevice!.unlockForConfiguration()
    }

    // Create a AVCaptureInput with the camera device
    var deviceInput : AVCaptureInput = AVCaptureDeviceInput.deviceInputWithDevice(captureDevice, error: &err) as! AVCaptureInput
    if deviceInput == nil! {
        println("error: \(err?.localizedDescription)")
    }

    // Set the output
    var videoOutput : AVCaptureVideoDataOutput = AVCaptureVideoDataOutput()

    // create a queue to run the capture on
    var captureQueue : dispatch_queue_t = dispatch_queue_create("captureQueue", nil)

    // setup ourself up as the capture delegate
    videoOutput.setSampleBufferDelegate(self, queue: captureQueue)

    // configure the pixel format
    videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String : Int(kCVPixelFormatType_32BGRA)] // kCVPixelBufferPixelFormatTypeKey is a CFString btw.

    // set the minimum acceptable frame rate to 10 fps
    captureDevice!.activeVideoMinFrameDuration = CMTimeMake(1, 10)

    // and the size of the frames we want - we'll use the smallest frame size available
    captureSession.sessionPreset = AVCaptureSessionPresetLow

    // Add the input and output
    captureSession.addInput(deviceInput)
    captureSession.addOutput(videoOutput)


    // Start the session
    captureSession.startRunning()

    // we're now sampling from the camera
    enum CurrentState {
        case statePaused
        case stateSampling
        }
    var currentState = CurrentState.statePaused

    func setState(state: CurrentState){
        switch state
        {
        case .statePaused:
            // what goes here? Something like this?
            UIApplication.sharedApplication().idleTimerDisabled = false
        case .stateSampling:
            // what goes here? Something like this?
            UIApplication.sharedApplication().idleTimerDisabled = true  // singletons

        }
    }

    // we're now sampling from the camera
    currentState = CurrentState.stateSampling


    // stop the app from sleeping
    UIApplication.sharedApplication().idleTimerDisabled = true

    // update our UI on a timer every 0.1 seconds
    NSTimer.scheduledTimerWithTimeInterval(0.1, target: self, selector: Selector("update"), userInfo: nil, repeats: true)

    func stopCameraCapture() {
        captureSession.stopRunning()
        captureSession = nil
    }


    // pragma mark Pause and Resume of detection
     func pause() {
        if currentState == CurrentState.statePaused {
           return
        }

        // switch off the torch
        if captureDevice!.isTorchModeSupported(AVCaptureTorchMode.On) {
            captureDevice!.lockForConfiguration(nil)
            captureDevice!.torchMode = AVCaptureTorchMode.Off
            captureDevice!.unlockForConfiguration()
    }
    currentState = CurrentState.statePaused
    // let the application go to sleep if the phone is idle
    UIApplication.sharedApplication().idleTimerDisabled = false
    }


    func resume() {
        if currentState != CurrentState.statePaused {
            return
        }

        // switch on the torch
        if captureDevice!.isTorchModeSupported(AVCaptureTorchMode.On) {
            captureDevice!.lockForConfiguration(nil)
            captureDevice!.torchMode = AVCaptureTorchMode.On
            captureDevice!.unlockForConfiguration()
    }
    currentState = CurrentState.stateSampling
    // stop the app from sleeping
    UIApplication.sharedApplication().idleTimerDisabled = true
    }

    // beginning of paste

    // r,g,b values are from 0 to 1 // h = [0,360], s = [0,1], v = [0,1]
    //  if s == 0, then h = -1 (undefined)
   func RGBtoHSV(r : Float, g : Float, b : Float, inout h : Float, inout s : Float, inout v : Float) {
        let rgbMin = min(r, g, b)
        let rgbMax = max(r, g, b)
        let delta = rgbMax - rgbMin

        v = rgbMax
        s = delta/rgbMax
        h = Float(0.0) 



    // start of calculation
        if (rgbMax != 0) {
            s = delta / rgbMax
        }
        else{
            // r = g = b = 0
            s = 0    
            h = -1
            return
        }

        if r == rgbMax {
            h = (g - b) / delta
        }
        else if (g == rgbMax) {
            h = 2 + (b - r ) / delta
        }
        else{
            h = 4 + (r - g) / delta
            h = 60
        }
        if (h < 0) {
            h += 360
        }
    }



        // process the frame of video
    func captureOutput(captureOutput:AVCaptureOutput, didOutputSampleBuffer sampleBuffer:CMSampleBuffer, fromConnection connection:AVCaptureConnection) {
        // if we're paused don't do anything
        if currentState == CurrentState.statePaused {
            // reset our frame counter
            self.validFrameCounter = 0

            return
        }

    // this is the image buffer
    var cvimgRef:CVImageBufferRef = CMSampleBufferGetImageBuffer(sampleBuffer)
    // Lock the image buffer
    CVPixelBufferLockBaseAddress(cvimgRef, 0)
    // access the data
    var width: size_t = CVPixelBufferGetWidth(cvimgRef)
    var height:size_t = CVPixelBufferGetHeight(cvimgRef)
    // get the raw image bytes
    let buf = UnsafeMutablePointer<UInt8>(CVPixelBufferGetBaseAddress(cvimgRef))
    var bprow: size_t = CVPixelBufferGetBytesPerRow(cvimgRef)


    var r = 0
    var g = 0
    var b = 0

        for var y = 0; y < height; y++ {
            for var x = 0; x < width * 4; x += 4 {
                b+=buf[x](UnsafeMutablePointer(UInt8))  // fix
                g+=buf[x + 1](UnsafeMutablePointer(Float)) // fix 
                r+=buf[x + 2](UnsafeMutablePointer(Int)) // fix
            }
            buf += bprow()
        }
        r /= 255 * (width*height)
        g /= 255 * (width*height)
        b /= 255 * (width*height)


    }


    // convert from rgb to hsv colourspace
    var h = Float()
    var s = Float()
    var v = Float()

    RGBtoHSV(r, g, b, &h, &s, &v)

    // do a sanity check for blackness
    if s > 0.5 && v > 0.5 {
        // increment the valid frame count 
        validFrameCounter++
        // filter the hue value - the filter is a simple band pass filter that removes any DC component and any high frequency noise
        var filtered: Float = filter.processValue(h)
        // have we collected enough frames for the filter to settle?
        if validFrameCounter > minFramesForFilterToSettle {
            // add the new value to the detector
            detector.addNewValue(filtered, atTime: CACurrentMediaTime())
        }
    } else {
        validFrameCounter = 0
        // clear the detector - we only really need to do this once, just before we start adding valid samples
        detector.reset()
    }
}

1 个答案:

答案 0 :(得分:0)

你实际上可以那样做

System.out.print("* **");