如何将消息从captureOutput发送到其他uiviewcontroller

时间:2019-06-05 05:00:48

标签: swift xcode10 capture-output

我正在尝试进行“闪烁”捕获,以引起用户的注意并对其进行处理。我设法引起注意,看到用户何时眨眼,但是我没有设法在两堂课之间发送消息。 1级捕捉到眨眼,而2级则需要获取消息并进行操作。在第1课中,我捕获了实时视频并分析了将其发送到第2课的结果:

第1类:

import AVFoundation

class SessionHandler : NSObject,
AVCaptureVideoDataOutputSampleBufferDelegate,
AVCaptureMetadataOutputObjectsDelegate {
var session = AVCaptureSession()
let layer = AVSampleBufferDisplayLayer()
let sampleQueue = DispatchQueue(label:
"com.zweigraf.DisplayLiveSamples.sampleQueue", attributes: [])
let faceQueue = DispatchQueue(label:
"com.zweigraf.DisplayLiveSamples.faceQueue", attributes: [])
let wrapper = DlibWrapper()
var message : DarwinBoolean!
var currentMetadata: [AnyObject]
var count = 0
override init() {
    currentMetadata = []
    //message = false
    super.init()
}

func openSession() {
    self.message = false
    let device = AVCaptureDevice.devices(for: AVMediaType.video)
        .map { $0 }
        .filter { $0.position == .front}
        .first!

    let input = try! AVCaptureDeviceInput(device: device)

    let output = AVCaptureVideoDataOutput()
    output.setSampleBufferDelegate(self, queue: sampleQueue)

    let metaOutput = AVCaptureMetadataOutput()
    metaOutput.setMetadataObjectsDelegate(self, queue: faceQueue)

    session.beginConfiguration()

    if session.canAddInput(input) {
        session.addInput(input)
    }
    if session.canAddOutput(output) {
        session.addOutput(output)
    }
    if session.canAddOutput(metaOutput) {
        session.addOutput(metaOutput)
    }

    session.commitConfiguration()

    let settings: [AnyHashable: Any] =
[kCVPixelBufferPixelFormatTypeKey as AnyHashable:
Int(kCVPixelFormatType_32BGRA)]
    output.videoSettings = settings as! [String : Any]
           metaOutput.metadataObjectTypes = 
[AVMetadataObject.ObjectType.face]

    wrapper?.prepare()

    session.startRunning()
}

// MARK: AVCaptureVideoDataOutputSampleBufferDelegate
func captureOutput(_ output: AVCaptureOutput, didOutput
sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection
{

    if !currentMetadata.isEmpty {
        let boundsArray = currentMetadata
            .compactMap { $0 as? AVMetadataFaceObject }
            .map { (faceObject) -> NSValue in
                let convertedObject =
output.transformedMetadataObject(for: faceObject, connection:
connection)
                return NSValue(cgRect: convertedObject!.bounds)
        }

        wrapper?.doWork(on: sampleBuffer, inRects: boundsArray,
message: &message)
        if(message == true){
            count += 1
            if(count >= 4){
                //send message here 
            }
        }
        else{
            count = 0
        }

    }

    layer.enqueue(sampleBuffer)
}

func captureOutput(_ output: AVCaptureOutput, didDrop sampleBuffer:
CMSampleBuffer, from connection: AVCaptureConnection) {
    connection.videoOrientation =
AVCaptureVideoOrientation.portrait
    print("DidDropSampleBuffer")
}

// MARK: AVCaptureMetadataOutputObjectsDelegate

func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput
metadataObjects: [AVMetadataObject], from connection:
AVCaptureConnection) {
    currentMetadata = metadataObjects as [AnyObject]
}
}

第2类:

import UIKit
import AVFoundation

class DlibSwift: UIViewController {
let sessionHandler = SessionHandler()
var message : DarwinBoolean!

@IBOutlet weak var preview: UIView!

override func viewDidLoad() {
    super.viewDidLoad()
    // Do any additional setup after loading the view, typically
from a nib.
}

override func didReceiveMemoryWarning() {
    super.didReceiveMemoryWarning()
    // Dispose of any resources that can be recreated.
}

override func viewDidAppear(_ animated: Bool) {
    message = false
    super.viewDidAppear(animated)

    sessionHandler.openSession()


    let layer = sessionHandler.layer
    layer.frame = preview.bounds
    preview.layer.addSublayer(layer)
    view.layoutIfNeeded()

}

}

我尝试浏览网络,但没有成功。我尝试了@转义,但视频没有消失。我真的迷路了

0 个答案:

没有答案
相关问题