我正在与ARKit一起使用,以扫描面部并通过每隔1秒钟调用func scanForFaces()
来获取ARSession的当前帧,然后仅通过裁剪图像来裁剪该图像以使其面向面部,但是我正在裁剪图像从图像的顶部而不是从具有图像的面部
private func scanForFaces() {
//remove the test views and empty the array that was keeping a reference to them
_ = scannedFaceViews.map { $0.removeFromSuperview() }
scannedFaceViews.removeAll()
//get the captured image of the ARSession's current frame
guard let capturedImage = sceneView.session.currentFrame?.capturedImage else { return }
let image = CIImage.init(cvPixelBuffer: capturedImage)
let cimage = self.convertCIImageToCGImage(inputImage: image)
let detectFaceRequest = VNDetectFaceRectanglesRequest { (request, error) in
DispatchQueue.main.async {
//Loop through the resulting faces and add a red UIView on top of them.
if let faces = request.results as? [VNFaceObservation] {
for face in faces {
let faceView = UIView(frame: self.faceFrame(from: face.boundingBox))
faceView.backgroundColor = .red
self.sceneView.addSubview(faceView)
let rotatedimage1 = UIImage(cgImage: cimage!).rotate(radians: .pi/2).fixImageOrientation()
var frame = self.faceFrame(from: faceView.bounds)
frame.origin = faceView.frame.origin
let imagefix = self.crop(imageToCrop: rotatedimage1!, toRect: frame)
print(frame)
let abcview = self.storyboard?.instantiateViewController(withIdentifier: "MyViewController") as! MyViewController
abcview.image = imagefix
self.navigationController?.pushViewController(abcview, animated: true)
self.scannedFaceViews.append(faceView)
}
}
}
}
DispatchQueue.global().async {
try? VNImageRequestHandler(ciImage: image, orientation: self.imageOrientation).perform([detectFaceRequest])
}
}
private func faceFrame(from boundingBox: CGRect) -> CGRect {
//translate camera frame to frame inside the ARSKView
let origin = CGPoint(x: boundingBox.minX * sceneView.bounds.width, y: (1 - boundingBox.maxY) * sceneView.bounds.height)
let size = CGSize(width: boundingBox.width * sceneView.bounds.width, height: boundingBox.height * sceneView.bounds.height)
return CGRect(origin: origin, size: size)
}
}
extension FaceDetectionViewController: ARSCNViewDelegate {
func cropImage(image: CGImage, toRect: CGRect) -> UIImage? {
// Cropping is available trhough CGGraphics
let cgImage :CGImage! = image
let croppedCGImage: CGImage! = cgImage.cropping(to: toRect)
return UIImage(cgImage: croppedCGImage)
}
func convertCIImageToCGImage(inputImage: CIImage) -> CGImage? {
let context = CIContext(options: nil)
if let cgImage = context.createCGImage(inputImage, from: inputImage.extent) {
return cgImage
}
return nil
}
func crop(imageToCrop:UIImage, toRect rect:CGRect) -> UIImage{
let imageRef:CGImage = imageToCrop.cgImage!.cropping(to: rect)!
let cropped:UIImage = UIImage(cgImage:imageRef)
return cropped
}
//implement ARSCNViewDelegate functions for things like error tracking
}
extension UIImage {
func rotate(radians: CGFloat) -> UIImage {
let rotatedSize = CGRect(origin: .zero, size: size)
.applying(CGAffineTransform(rotationAngle: CGFloat(radians)))
.integral.size
UIGraphicsBeginImageContext(rotatedSize)
if let context = UIGraphicsGetCurrentContext() {
let origin = CGPoint(x: rotatedSize.width / 2.0,
y: rotatedSize.height / 2.0)
context.translateBy(x: origin.x, y: origin.y)
context.rotate(by: radians)
draw(in: CGRect(x: -origin.x, y: -origin.y,
width: size.width, height: size.height))
let rotatedImage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return rotatedImage ?? self
}
return self
}