我正在研究ARFaceTrackingConfiguration
以检测人脸,并且使用Vision
框架可以轻松实现。现在,我要检测用户是否正在查看“左-右-上-底”,并根据此信息在屏幕上显示一些内容。但是问题是我无法检测到该怎么做!
到目前为止我尝试过什么?
extension FaceDetectionViewController: ARSCNViewDelegate {
//implement ARSCNViewDelegate functions for things like error tracking
func renderer(_ renderer: SCNSceneRenderer, didUpdate node: SCNNode, for anchor: ARAnchor) {
scanForFaces()
if let faceAnchor = anchor as? ARFaceAnchor {
DispatchQueue.main.async {
//This changes as I move camera and cannot able to detect logic
let position = faceAnchor.transform.position()
self.lblX.text = "X: \(node.position.x)----Y: \(node.position.y)"
self.lblY.text = "X: \(position.x)----Y: \(position.y)"
}
}
//Same this problem appears here too
DispatchQueue.main.async {
self.lblX.text = " \(node.eulerAngles.x) "
self.lblY.text = " \(node.eulerAngles.y) "
}
}
}
extension matrix_float4x4 {
func position() -> SCNVector3 {
return SCNVector3(columns.3.x, columns.3.y, columns.3.z)
}
}
@objc
private func doFaceScan() {
//get the captured image of the ARSession's current frame
guard let capturedImage = sceneView.session.currentFrame?.capturedImage else { return }
let image = CIImage.init(cvPixelBuffer: capturedImage)
let detectFaceRequest = VNDetectFaceRectanglesRequest { (request, error) in
DispatchQueue.main.async {
//Loop through the resulting faces and add a red UIView on top of them.
if let faces = request.results as? [VNFaceObservation] {
for face in faces {
self.faceFrame(from: face.boundingBox)
}
}
}
}
try? VNImageRequestHandler(ciImage: image, orientation: self.imageOrientation).perform([detectFaceRequest])
}
有人可以帮忙吗?