在ARKit中的SCNPlane上使用GPUImage对视频进行颜色键控

时间:2017-12-06 17:29:42

标签: swift sprite-kit scenekit gpuimage arkit

我正在尝试播放视频,显示ARSCNView的透明度。 SCNPlane用作视频的投影空间,我正在尝试使用GPUImage为此视频着色。

我在这里跟着this example。不幸的是,我还没有找到一种方法将视频投射到我的videoSpriteKitNode上。因为过滤器是以GPUImageView呈现的,SKVideoNode需要AVPlayer

我不确定它是否可能,我想做什么,所以如果有人能分享他们的见解,我会非常感激!

import UIKit
import ARKit
import GPUImage

class ARTransVC: UIViewController{

@IBOutlet weak var sceneView: ARSCNView!
let configuration = ARWorldTrackingConfiguration()
var movie: GPUImageMovie!
var filter: GPUImageChromaKeyBlendFilter!
var sourcePicture: GPUImagePicture!
var player = AVPlayer()
var gpuImageView: GPUImageView!


override func viewDidLoad() {
    super.viewDidLoad()
    self.sceneView.debugOptions = [ARSCNDebugOptions.showWorldOrigin, ARSCNDebugOptions.showFeaturePoints]
    self.sceneView.session.run(configuration)

    self.gpuImageView = GPUImageView()
    self.gpuImageView.translatesAutoresizingMaskIntoConstraints = false

    //a delay for ARKit to capture the surroundings
    DispatchQueue.main.asyncAfter(deadline: .now() + 3) {

        // A SpriteKit scene to contain the SpriteKit video node
        let spriteKitScene = SKScene(size: CGSize(width: self.sceneView.frame.width, height: self.sceneView.frame.height))
        spriteKitScene.scaleMode = .aspectFit

        // Create a video player, which will be responsible for the playback of the video material
        guard let url = Bundle.main.url(forResource: "FY3A4278", withExtension: "mp4") else { return }
        let playerItem = AVPlayerItem(url: url)
        self.player.replaceCurrentItem(with: playerItem)

        //trans
        self.filter = GPUImageChromaKeyBlendFilter()
        self.filter.thresholdSensitivity = 0.15
        self.filter.smoothing = 0.3
        self.filter.setColorToReplaceRed(0.322, green: 0.455, blue: 0.831)

        self.movie = GPUImageMovie(playerItem: playerItem)
        self.movie.playAtActualSpeed = true
        self.movie.addTarget(self.filter)
        self.movie.startProcessing()

        let backgroundImage = UIImage(named: "transparent.png")
        self.sourcePicture = GPUImagePicture(image: backgroundImage, smoothlyScaleOutput: true)!
        self.sourcePicture.addTarget(self.filter)
        self.sourcePicture.processImage()

        ///HERE DON'T KNOW HOW TO CONTINUE ?
        self.filter.addTarget(self.gpuImageView)


        // To make the video loop
        self.player.actionAtItemEnd = .none
        NotificationCenter.default.addObserver(
            self,
            selector: #selector(ARTransVC.playerItemDidReachEnd),
            name: NSNotification.Name.AVPlayerItemDidPlayToEndTime,
            object:  self.player.currentItem)

        // Create the SpriteKit video node, containing the video player
        let videoSpriteKitNode = SKVideoNode(avPlayer: self.player)
        videoSpriteKitNode.position = CGPoint(x: spriteKitScene.size.width / 2.0, y: spriteKitScene.size.height / 2.0)
        videoSpriteKitNode.size = spriteKitScene.size
        videoSpriteKitNode.yScale = -1.0
        videoSpriteKitNode.play()
        spriteKitScene.addChild(videoSpriteKitNode)

        // Create the SceneKit scene
        let scene = SCNScene()
        self.sceneView.scene = scene
        self.sceneView.isPlaying = true

        // Create a SceneKit plane and add the SpriteKit scene as its material
        let background = SCNPlane(width: CGFloat(1), height: CGFloat(1))
        background.firstMaterial?.diffuse.contents = spriteKitScene
        let backgroundNode = SCNNode(geometry: background)
        backgroundNode.geometry?.firstMaterial?.isDoubleSided = true

        backgroundNode.position = SCNVector3(0,0,-2.0)
        scene.rootNode.addChildNode(backgroundNode)
    }
}

@objc func playerItemDidReachEnd(notification: NSNotification) {
    if let playerItem: AVPlayerItem = notification.object as? AVPlayerItem {
        playerItem.seek(to: kCMTimeZero, completionHandler: nil)
    }
}
}

2 个答案:

答案 0 :(得分:1)

尝试清除背景并使用

设置scalemode
backgroundColor = .clear 
scaleMode = .aspectFit

答案 1 :(得分:1)

所以,我现在设法使用色度键并在ARSCNView中播放一个现在透明的视频,但它仍然是一个小小的解决方案。

我放弃了以前的方法,并从ChromaKeyMaterial实施了Lësha Turkowski

在这里,调整到我想要键入的颜色:

import SceneKit

public class ChromaKeyMaterial: SCNMaterial {

public var backgroundColor: UIColor {
    didSet { didSetBackgroundColor() }
}

public var thresholdSensitivity: Float {
    didSet { didSetThresholdSensitivity() }
}

public var smoothing: Float  {
    didSet { didSetSmoothing() }
}

public init(backgroundColor: UIColor = .green, thresholdSensitivity: Float = 0.15, smoothing: Float = 0.0) {

    self.backgroundColor = backgroundColor
    self.thresholdSensitivity = thresholdSensitivity
    self.smoothing = smoothing

    super.init()

    didSetBackgroundColor()
    didSetThresholdSensitivity()
    didSetSmoothing()

    // chroma key shader is based on GPUImage
    // https://github.com/BradLarson/GPUImage/blob/master/framework/Source/GPUImageChromaKeyFilter.m

    let surfaceShader =
    """
    uniform vec3 c_colorToReplace;
    uniform float c_thresholdSensitivity;
    uniform float c_smoothing;

    #pragma transparent
    #pragma body

    vec3 textureColor = _surface.diffuse.rgb;

    float maskY = 0.2989 * c_colorToReplace.r + 0.5866 * c_colorToReplace.g + 0.1145 * c_colorToReplace.b;
    float maskCr = 0.7132 * (c_colorToReplace.r - maskY);
    float maskCb = 0.5647 * (c_colorToReplace.b - maskY);

    float Y = 0.2989 * textureColor.r + 0.5866 * textureColor.g + 0.1145 * textureColor.b;
    float Cr = 0.7132 * (textureColor.r - Y);
    float Cb = 0.5647 * (textureColor.b - Y);

    float blendValue = smoothstep(c_thresholdSensitivity, c_thresholdSensitivity + c_smoothing, distance(vec2(Cr, Cb), vec2(maskCr, maskCb)));

    float a = blendValue;
    _surface.transparent.a = a;
    """

    //_surface.transparent.a = a;

    shaderModifiers = [
        .surface: surfaceShader,
    ]
}

required public init?(coder aDecoder: NSCoder) {
    fatalError("init(coder:) has not been implemented")
}

//setting background color to be keyed out
private func didSetBackgroundColor() {
//getting pixel from background color
//let rgb = backgroundColor.cgColor.components!.map{Float($0)}
//let vector = SCNVector3(x: rgb[0], y: rgb[1], z: rgb[2])
    let vector = SCNVector3(x: 0.216, y: 0.357, z: 0.663)
    setValue(vector, forKey: "c_colorToReplace")
}

private func didSetSmoothing() {
    setValue(smoothing, forKey: "c_smoothing")
}

private func didSetThresholdSensitivity() {
    setValue(thresholdSensitivity, forKey: "c_thresholdSensitivity")
}
}

以下是在SCNPlane上播放ARKit中的键控视频的代码:

import UIKit
import ARKit

class ARTransVC: UIViewController{

@IBOutlet weak var arSceneView: ARSCNView!
let configuration = ARWorldTrackingConfiguration()

private var player: AVPlayer = {
    guard let url = Bundle.main.url(forResource: "FY3A4278", withExtension: "mp4") else { fatalError() }
    return AVPlayer(url: url)
}()

override func viewDidLoad() {
    super.viewDidLoad()
    self.arSceneView.debugOptions = [ARSCNDebugOptions.showWorldOrigin, ARSCNDebugOptions.showFeaturePoints]
    self.arSceneView.session.run(configuration)

    //a delay for ARKit to capture the surroundings
    DispatchQueue.main.asyncAfter(deadline: .now() + 3) {

        // A SpriteKit scene to contain the SpriteKit video node
        let spriteKitScene = SKScene(size: CGSize(width: self.arSceneView.frame.width, height: self.arSceneView.frame.height))
        spriteKitScene.scaleMode = .aspectFit
        spriteKitScene.backgroundColor = .clear
        spriteKitScene.scaleMode = .aspectFit

        //Create the SpriteKit video node, containing the video player
        let videoSpriteKitNode = SKVideoNode(avPlayer: self.player)
        videoSpriteKitNode.position = CGPoint(x: spriteKitScene.size.width / 2.0, y: spriteKitScene.size.height / 2.0)
        videoSpriteKitNode.size = spriteKitScene.size
        videoSpriteKitNode.yScale = -1.0
        videoSpriteKitNode.play()
        spriteKitScene.addChild(videoSpriteKitNode)

        // To make the video loop
        self.player.actionAtItemEnd = .none
        NotificationCenter.default.addObserver(
            self,
            selector: #selector(ARTransVC.playerItemDidReachEnd),
            name: NSNotification.Name.AVPlayerItemDidPlayToEndTime,
            object:  self.player.currentItem)

        // Create the SceneKit scene
        let scene = SCNScene()
        self.arSceneView.scene = scene

        //Create a SceneKit plane and add the SpriteKit scene as its material
        let background = SCNPlane(width: CGFloat(1), height: CGFloat(1))
        background.firstMaterial?.diffuse.contents = spriteKitScene
        let chromaKeyMaterial = ChromaKeyMaterial()
        chromaKeyMaterial.diffuse.contents = self.player

        let backgroundNode = SCNNode(geometry: background)
        backgroundNode.geometry?.firstMaterial?.isDoubleSided = true
        backgroundNode.geometry!.materials = [chromaKeyMaterial]

        backgroundNode.position = SCNVector3(0,0,-2.0)
        scene.rootNode.addChildNode(backgroundNode)

        //video does not start without delaying the player
        //playing the video before just results in [SceneKit] Error: Cannot get pixel buffer (CVPixelBufferRef)
        DispatchQueue.main.asyncAfter(deadline: .now() + 1) {
            self.player.seek(to:CMTimeMakeWithSeconds(1, 1000))
            self.player.play()
        }
    }
}

@objc func playerItemDidReachEnd(notification: NSNotification) {
    if let playerItem: AVPlayerItem = notification.object as? AVPlayerItem {
        playerItem.seek(to: kCMTimeZero, completionHandler: nil)
    }
}

我得到的是[SceneKit] Error: Cannot get pixel buffer (CVPixelBufferRef),在iOS 11.2中显然是fixed。现在,我刚刚找到一个相当不完整的解决方案,我在一秒钟的延迟后重新启动视频。非常感谢一个更好的方法。