如何在视频(AVPlayer)Swift 4.0之上添加图像/字幕叠加

时间:2019-01-22 10:14:15

标签: swift video avfoundation avplayer

我对开发一个可以在指定时间段内将贴纸/文字覆盖在视频顶部的应用程序感兴趣。理想情况下,UI应该足够简单,用户可以拖放标签并轻松更改持续时间长度。

现在,我才刚刚开始学习AVFoundation / AVPlayer。解决此问题的最佳方法是什么?这是我使用AVMutableVideoComposition的原型代码:

我最初的计划是使用AVMutableVideoComposition。但是我要担心的是:渲染速度是否足够快,以便每当用户拖放标签时,用户都可以轻松进行调整(位置/长度)?

// Add sticker by create a CALayer instance and add it afterward
func addSticker(_ sticker: Int, at position: Int) {
    addStickerView?.removeFromSuperview()

    // Create an AVMutableComposition for editing
    let mutableComposition = getVideoComposition()

    // Create a CALayer instance and configurate it
    let stickerLayer = CALayer()
    stickerLayer.contents = UIImage(named: "sticker\(sticker)")?.cgImage
    stickerLayer.contentsGravity = CALayerContentsGravity.resizeAspect
    stickerLayer.opacity = 0

    // fade in animation
    let animation = CABasicAnimation(keyPath: "opacity")
    animation.delegate = self
    animation.fillMode = .removed

    animation.isRemovedOnCompletion = true
    animation.duration = 1
    animation.fromValue = CGFloat(0.0)
    animation.toValue = CGFloat(1.0)
    animation.beginTime = AVCoreAnimationBeginTimeAtZero
    stickerLayer.add(animation, forKey: "opacity")

    let videoSize = videoAsset.tracks(withMediaType: AVMediaType.video)[0].naturalSize
    let videoWidth = videoSize.width
    let videoHeight = videoSize.height
    let stickerWidth = videoWidth/6
    let stickerX = videoWidth * CGFloat(5 * (position % 3)) / 12
    let stickerY = videoHeight * CGFloat(position / 3) / 3
    stickerLayer.frame = CGRect(x: stickerX, y: stickerY, width: stickerWidth, height: stickerWidth)

    // Further export the video with the layer
    exportWithAddedLayer(mutableComposition, with: stickerLayer, doneEditing: false)
}

}

func exportWithAddedLayer(_ mutableComposition: AVMutableComposition, with addedLayer: CALayer, doneEditing: Bool) {

    self.view.addSubview(processingLabel)
    processingLabel.frame = self.toolsTableView.frame

    let videoTrack: AVAssetTrack = mutableComposition.tracks(withMediaType: AVMediaType.video)[0]
    let videoSize = videoTrack.naturalSize


    let videoLayer = CALayer()
    videoLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)

    let containerLayer = CALayer()
    containerLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
    containerLayer.addSublayer(videoLayer)
    containerLayer.addSublayer(addedLayer)

    let layerComposition = AVMutableVideoComposition()
    layerComposition.frameDuration = CMTimeMake(value: 1, timescale: 30)
    layerComposition.renderSize = videoSize
    layerComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: containerLayer)

    let instruction = AVMutableVideoCompositionInstruction()
    instruction.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: mutableComposition.duration)
    let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
    instruction.layerInstructions = [layerInstruction]
    layerComposition.instructions = [instruction]

    let exportUrl = generateExportUrl()
    // Set up exporter
    guard let exporter = AVAssetExportSession(asset: mutableComposition, presetName: AVAssetExportPresetHighestQuality) else { return }
    exporter.videoComposition = layerComposition
    exporter.outputURL = exportUrl as URL
    exporter.outputFileType = AVFileType.mov
    exporter.exportAsynchronously() {
        DispatchQueue.main.async { 
            self.exportDidComplete(exportURL: exporter.outputURL!, doneEditing: doneEditing)
        }
    }
}

0 个答案:

没有答案