如何在Swift3 iOS中的视频AVAset上添加淡入和淡出效果

时间:2018-07-03 06:24:03

标签: ios video swift3 avfoundation avasset

我正在Video中开发一个Swift3应用程序。我必须将任何文本转换为Video,然后添加Fade In and Fade Out效果并将Fade effect Video发布到服务器。对于Third Party Library,我不必使用任何Fade effect

我可以将Text转换为Video,我的问题是如何在Fade In and Fade Out Video上添加AVAsset

谁能建议我实现这一目标。我找不到这个问题的最新答案。感谢您的帮助!

2 个答案:

答案 0 :(得分:1)

AVVideoCompositionLayerInstruction

AVVideoCompositionLayerInstruction的实例数组,指定应如何分层和组合来自源轨道的视频帧。

AVMutableVideoCompositionInstruction

AVVideoComposition对象维护instructions的数组以执行其合成。

Swift4示例: 我合并了具有fade-infade-out效果的视频,并更改了音频顺序

func doMerge(arrayVideos:[AVAsset], arrayAudios:[AVAsset], animation:Bool, completion:@escaping Completion) -> Void {

    var insertTime = kCMTimeZero
    var audioInsertTime = kCMTimeZero
    var arrayLayerInstructions:[AVMutableVideoCompositionLayerInstruction] = []
    var outputSize = CGSize.init(width: 0, height: 0)

    // Determine video output size
    for videoAsset in arrayVideos {
        let videoTrack = videoAsset.tracks(withMediaType: AVMediaType.video)[0]
        let assetInfo = orientationFromTransform(transform: videoTrack.preferredTransform)
        var videoSize = videoTrack.naturalSize
        if assetInfo.isPortrait == true {
            videoSize.width = videoTrack.naturalSize.height
            videoSize.height = videoTrack.naturalSize.width
        }
        outputSize = videoSize
    }

    // Init composition
    let mixComposition = AVMutableComposition.init()

    for index in 0..<arrayVideos.count {
        // Get video track
        guard let videoTrack = arrayVideos[index].tracks(withMediaType: AVMediaType.video).first else { continue }

        // Get audio track
        var audioTrack:AVAssetTrack?
        if index < arrayAudios.count {
            if arrayAudios[index].tracks(withMediaType: AVMediaType.audio).count > 0 {
                audioTrack = arrayAudios[index].tracks(withMediaType: AVMediaType.audio).first
            }
        }
        // Init video & audio composition track
        let videoCompositionTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))

        let audioCompositionTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))

        do {
            let startTime = kCMTimeZero
            let duration = arrayVideos[index].duration

            // Add video track to video composition at specific time
            try videoCompositionTrack?.insertTimeRange(CMTimeRangeMake(startTime, duration), of: videoTrack, at: insertTime)

            // Add audio track to audio composition at specific time
            var audioDuration = kCMTimeZero
            if index < arrayAudios.count   {
                 audioDuration = arrayAudios[index].duration
            }

            if let audioTrack = audioTrack {
                do {
                    try audioCompositionTrack?.insertTimeRange(CMTimeRangeMake(startTime, audioDuration), of: audioTrack, at: audioInsertTime)
                }
                catch {
                    print(error.localizedDescription)
                }
            }

            // Add instruction for video track
            let layerInstruction = videoCompositionInstructionForTrack(track: videoCompositionTrack!, asset: arrayVideos[index], standardSize: outputSize, atTime: insertTime)

            // Hide video track before changing to new track
            let endTime = CMTimeAdd(insertTime, duration)

            if animation {
                let timeScale = arrayVideos[index].duration.timescale
                let durationAnimation = CMTime.init(seconds: 1, preferredTimescale: timeScale)

                layerInstruction.setOpacityRamp (fromStartOpacity: 1.0, toEndOpacity: 0.0, timeRange: CMTimeRange.init(start: endTime, duration: durationAnimation))
            }
            else {
                layerInstruction.setOpacity(0, at: endTime)
            }

            arrayLayerInstructions.append(layerInstruction)

            // Increase the insert time
            audioInsertTime = CMTimeAdd(audioInsertTime, audioDuration)
            insertTime = CMTimeAdd(insertTime, duration)
        }
        catch {
            print("Load track error")
        }
    }

    // Main video composition instruction
    let mainInstruction = AVMutableVideoCompositionInstruction()
    mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, insertTime)
    mainInstruction.layerInstructions = arrayLayerInstructions

    // Main video composition
    let mainComposition = AVMutableVideoComposition()
    mainComposition.instructions = [mainInstruction]
    mainComposition.frameDuration = CMTimeMake(1, 30)
    mainComposition.renderSize = outputSize

    // Export to file
    let path = NSTemporaryDirectory().appending("mergedVideo.mp4")
    let exportURL = URL.init(fileURLWithPath: path)

    // Remove file if existed
    FileManager.default.removeItemIfExisted(exportURL)

    // Init exporter
    let exporter = AVAssetExportSession.init(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
    exporter?.outputURL = exportURL
    exporter?.outputFileType = AVFileType.mp4
    exporter?.shouldOptimizeForNetworkUse = true
    exporter?.videoComposition = mainComposition

    // Do export
    exporter?.exportAsynchronously(completionHandler: {
        DispatchQueue.main.async {
            self.exportDidFinish(exporter: exporter, videoURL: exportURL, completion: completion)
        }
    })

}

答案 1 :(得分:0)

淡出效果

let parentLayer = CALayer()
let fadeOut = CABasicAnimation(keyPath: "opacity")
fadeOut.fromValue = 1.0
fadeOut.toValue = 0.0
fadeOut.duration = 5.0//This will video duration
fadeOut.setValue("video", forKey:"fadeOut")
fadeOut.isRemovedOnCompletion = false
fadeOut.fillMode = CAMediaTimingFillMode.forwards
parentLayer.add(fadeOut, forKey: "opacity")

褪色

fadeIn.fromValue = 0.0
fadeIn.toValue = 1.0

添加到播放器

self.playerView?.playerLayer?.add(fadeOut, forKey: nil)

添加到您的资产

var startTime = CMTime.zero
var timeDuration = CMTimeMake(value: 3, timescale: 1)
let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)

//MARK: Fade in effect
layerInstruction.setOpacityRamp(fromStartOpacity: 0.0, toEndOpacity: 1.0, timeRange: CMTimeRange(start: startTime, duration: timeDuration))

//MARK: Fade out effect
        startTime = CMTimeSubtract(mutableComposition.duration, CMTimeMake(value: 3, timescale: 1))
        timeDuration = CMTimeMake(value: 3, timescale: 1)
        layerInstruction.setOpacityRamp(
            fromStartOpacity: 1.0,
            toEndOpacity: 0.0,
            timeRange: CMTimeRangeMake(start: startTime, duration: timeDuration)
        )