AVFoundation-合并多个视频-在视频之间添加动画

时间:2019-06-26 14:24:26

标签: ios swift avfoundation avasset avmutablecomposition

我正在尝试将图像和视频片段合并在一起。我保留了在视频和图像之间添加动画的选项。淡入,淡出,旋转,向上滑动,向下滑动,向左,向右等选项很少。对于图像,我可以添加动画,但是如何为视频添加动画?具体来说,当一个视频剪辑完成并且另一个视频剪辑要开始播放时,我想添加动画。现在,我的合并功能运行良好。仅在视频之间添加动画。

我尝试过:

instruction.setOpacityRamp(fromStartOpacity: <#T##Float#>, toEndOpacity: <#T##Float#>, timeRange: <#T##CMTimeRange#>)

,但是此选项仅显示淡入/淡出效果。但是其他自定义动画选项在何处添加这些效果以及如何添加?

这是我用于合并的源代码。代码中有许多相关函数。但是我只发布了合并功能代码。我已对//HERE TO ADD THE ANIMATION发表了评论。这样您就可以直接到达我要添加动画的位置。

func merge(allAssets: [MovieAssetPresentable], isHDR: Bool, success: @escaping (URL?) -> (Void), progress: @escaping (CGFloat) -> (Void), failed: @escaping (String?) -> (Void)) {
    cancelExport()
    let defaultSize = isHDR ? self.videoOutputResolution.HD : self.videoOutputResolution.lowQuality
    let videoPresetName = self.getPresetName(resolution: defaultSize)

    self.mergeSuccess = success
    self.mergeError = failed
    self.mergeProgress = progress

    let mixComposition = AVMutableComposition()

    let mainInstruction = AVMutableVideoCompositionInstruction()

    var layerInstructions = [AVMutableVideoCompositionLayerInstruction]()

    guard let urlVideoForBackground = Bundle.main.url(forResource: "black", withExtension: "mov") else {
        self.mergeError("Need black background video !")
        return
    }

    let assetForBackground = AVAsset(url: urlVideoForBackground)

    let trackForBackground = assetForBackground.tracks(withMediaType: AVMediaType.video).first

    //Set output size
    var outputSize = CGSize.zero

    for asset in allAssets.filter({$0.assetType! == .video}) {
        guard let videoAsset = asset.asset else { continue }

        // Get video track
        guard let videoTrack = videoAsset.tracks(withMediaType: AVMediaType.video).first else { continue }

        let assetInfo = self.orientationFromTransform(videoTrack.preferredTransform)

        var videoSize = videoTrack.naturalSize

        if assetInfo.isPortrait == true {
            videoSize.width = videoTrack.naturalSize.height
            videoSize.height = videoTrack.naturalSize.width
        }

        if videoSize.height > outputSize.height {
            outputSize = CGSize(width: defaultSize.width, height: ((videoSize.height / videoSize.width) * defaultSize.width))
        }
    }

    if outputSize == CGSize.zero {
        outputSize = defaultSize
    }

    debugPrint("OUTPUT SIZE: \(outputSize)")
    let layerContentsGravity = VideoSettings.shared.fetchVideoFitClips()
    var layerImages = [CALayer]()

    var insertTime = CMTime.zero

    var audioMixInputParameters = [AVMutableAudioMixInputParameters]()


    // Init Video layer
    let videoLayer = CALayer()
    videoLayer.frame = CGRect.init(x: 0, y: 0, width: outputSize.width, height: outputSize.height)
    videoLayer.contentsGravity = layerContentsGravity

    let parentlayer = CALayer()
    parentlayer.frame = CGRect.init(x: 0, y: 0, width: outputSize.width, height: outputSize.height)
    parentlayer.addSublayer(videoLayer)


    for asset in allAssets.filter({$0.assetType! == .image || $0.assetType! == .video}) {
        //Video speed level
        let videoSpeed = Double(asset.videoSpeedLevel!)
        if asset.assetType! == .video {
            //Video asset
            let ast = asset.asset!
            let duration = asset.endTime! - asset.beginTime! //ast.duration

            //Create AVMutableCompositionTrack object
            guard let track = mixComposition.addMutableTrack(withMediaType: AVMediaType.video,
                                                             preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else {
                                                                self.mergeError("Unable to create track.")
                                                                continue
            }

            //Add original video sound track
            let originalSoundTrack: AVMutableCompositionTrack?
            if asset.asset!.tracks(withMediaType: .audio).count > 0 {
                originalSoundTrack = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid)
                do {
                    try originalSoundTrack?.insertTimeRange(CMTimeRange(start: asset.beginTime ?? CMTime.zero, duration: duration), of: ast.tracks(withMediaType: AVMediaType.audio)[0], at: insertTime)
                } catch {
                    self.mergeError("Unable to create original audio track.")
                    continue
                }
                //Set video original sound track speed
                originalSoundTrack?.scaleTimeRange(CMTimeRange(start: asset.beginTime ?? CMTime.zero, duration: duration), toDuration: CMTime(value: CMTimeValue(Double(duration.value) * videoSpeed), timescale: duration.timescale))

                let audioInputParams = AVMutableAudioMixInputParameters(track: originalSoundTrack)
                audioInputParams.setVolume(asset.videoOriginalVolume!, at: CMTime.zero)
                audioInputParams.trackID = originalSoundTrack?.trackID ?? kCMPersistentTrackID_Invalid
                audioMixInputParameters.append(audioInputParams)
            }

            //Set time range
            do {
                try track.insertTimeRange(CMTimeRange(start: asset.beginTime ?? CMTime.zero, duration: duration),
                                          of: ast.tracks(withMediaType: AVMediaType.video)[0],
                                          at: insertTime)
            } catch let err {
                self.mergeError("Failed to load track: \(err.localizedDescription)")
                continue
            }
            //Set video speed
            track.scaleTimeRange(CMTimeRange(start: asset.beginTime ?? CMTime.zero, duration: duration), toDuration: CMTime(value: CMTimeValue(Double(duration.value) * videoSpeed), timescale: duration.timescale))

            insertTime = CMTimeAdd(insertTime, duration)


            let instruction = self.videoCompositionInstruction(track, asset: ast, outputSize: outputSize)
            //                let instruction = videoCompositionInstructionForTrack(track: t, asset: ast, standardSize: outputSize, atTime: insertTime)
            instruction.setOpacity(0.0, at: insertTime)

            //HERE TO ADD THE ANIMATION

            layerInstructions.append(instruction)
        } else {
            //Image data
            let videoCompositionTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.video,
                                                                       preferredTrackID: Int32(kCMPersistentTrackID_Invalid))

            let defaultImageTime = CMTimeGetSeconds(asset.endTime!) - CMTimeGetSeconds(asset.beginTime!)
            let duration = CMTime.init(seconds:defaultImageTime, preferredTimescale: assetForBackground.duration.timescale)
            do {
                try videoCompositionTrack?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: duration),
                                                           of: trackForBackground!,
                                                           at: insertTime)
            }
            catch {
                self.mergeError("Background time range error")
            }

            guard let image = UIImage(data: asset.imageData!) else { continue }

            // Create Image layer
            let imageLayer = CALayer()
            imageLayer.frame = CGRect.init(origin: CGPoint.zero, size: outputSize)
            imageLayer.contents = image.cgImage
            imageLayer.opacity = 0
            imageLayer.contentsGravity = layerContentsGravity

            self.setOrientation(image: image, onLayer: imageLayer)

            // Add Fade in & Fade out animation
            let fadeInAnimation = CABasicAnimation.init(keyPath: "opacity")
            fadeInAnimation.duration = 1
            fadeInAnimation.fromValue = NSNumber(value: 0)
            fadeInAnimation.toValue = NSNumber(value: 1)
            fadeInAnimation.isRemovedOnCompletion = false
            fadeInAnimation.beginTime = CMTimeGetSeconds(insertTime) == 0 ? 0.05: CMTimeGetSeconds(insertTime)
            fadeInAnimation.fillMode = CAMediaTimingFillMode.forwards
            imageLayer.add(fadeInAnimation, forKey: "opacityIN")

            let fadeOutAnimation = CABasicAnimation.init(keyPath: "opacity")
            fadeOutAnimation.duration = 1
            fadeOutAnimation.fromValue = NSNumber(value: 1)
            fadeOutAnimation.toValue = NSNumber(value: 0)
            fadeOutAnimation.isRemovedOnCompletion = false
            fadeOutAnimation.beginTime = CMTimeGetSeconds(CMTimeAdd(insertTime, duration))
            fadeOutAnimation.fillMode = CAMediaTimingFillMode.forwards
            imageLayer.add(fadeOutAnimation, forKey: "opacityOUT")

            layerImages.append(imageLayer)

            // Increase the insert time
            insertTime = CMTimeAdd(insertTime, duration)
        }
    }


    // Add Image layers
    for layer in layerImages {
        parentlayer.addSublayer(layer)
    }

    //Add Water mark if Subscription not activated
    if !AddManager.shared.hasActiveSubscription {

        let imglogo = UIImage(named: "watermark")
        let waterMarklayer = CALayer()
        waterMarklayer.contents = imglogo?.cgImage
        let sizeOfWaterMark = Utility.getWaterMarkSizeWithVideoSize(videoSize: outputSize, defaultSize: waterMarkSize)
        debugPrint("sizeOfWaterMark=\(sizeOfWaterMark)")
        waterMarklayer.frame = CGRect(x: outputSize.width - (sizeOfWaterMark.width+10), y: 5, width: sizeOfWaterMark.width, height: sizeOfWaterMark.height)
        waterMarklayer.contentsGravity = .resizeAspect
        waterMarklayer.opacity = 1.0
        parentlayer.addSublayer(waterMarklayer)
    }

    mainInstruction.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: insertTime)
    mainInstruction.layerInstructions = layerInstructions
    mainInstruction.backgroundColor = VideoSettings.shared.fetchVideoBackgroundColor().color.cgColor

    let mainComposition = AVMutableVideoComposition()
    mainComposition.instructions = [mainInstruction]
    mainComposition.frameDuration = CMTimeMake(value: 1, timescale: 30)
    mainComposition.renderSize = outputSize
    mainComposition.renderScale = 1.0
    mainComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: parentlayer)

    for audioAsset in allAssets.filter({$0.assetType! == .audio}) {
        //NOTE: If you have requirement to increase/ decrease audio fade-in fade-out effect time, please modify fadeInFadeOutEffectTiming variable as second
        let fadeInFadeOutEffectTiming = Double(3) //seconds

        let volumeLevel = audioAsset.audioVolumeLevel!
        let isFadeIn = audioAsset.audioFadeInEffect!
        let isFadeOut = audioAsset.audioFadeOutEffect!
        var audioBeginTime = audioAsset.beginTime!
        var audioEndTime = audioAsset.endTime!
        var audioTrackTime = audioAsset.audioTrackStartTime!
        var trimmedAudioDuration = CMTimeSubtract(audioEndTime, audioBeginTime)

        //If audio starting position (second) is greater than equals to zero (in order to video length)

        if CMTimeGetSeconds(CMTimeAdd(audioTrackTime, audioBeginTime)) >= 0 {
            //If audio starting position (second) more than video length, i.e. total video length is 20 second, but audio starting position is from 24 seconds, we sould not add the audio
            if CMTimeCompare(CMTimeAdd(audioTrackTime, audioBeginTime), insertTime) == 1 {
                trimmedAudioDuration = CMTime.zero
            } else {
                //If audio start position (seconds) + crop length is exceed total video length, we should add only the part within the video
                if CMTimeCompare(CMTimeAdd(CMTimeAdd(audioTrackTime, audioBeginTime), trimmedAudioDuration), insertTime) == 1 {
                    audioTrackTime = CMTimeAdd(audioTrackTime, audioBeginTime)
                    trimmedAudioDuration = CMTimeSubtract(insertTime, audioTrackTime)
                } else {
                    audioTrackTime = CMTimeAdd(audioTrackTime, audioBeginTime)
                }
            }
        }
            //If audio start time is in negative (second)
        else {
            //If audio crop length is in negative (second)
            if CMTimeCompare(CMTimeAdd(CMTimeAdd(audioTrackTime, audioBeginTime), trimmedAudioDuration), CMTime.zero) == -1 {
                trimmedAudioDuration = CMTime.zero
            } else {
                audioBeginTime = CMTime(seconds: abs(CMTimeGetSeconds(audioTrackTime)), preferredTimescale: audioTrackTime.timescale)
                audioTrackTime = CMTime.zero
                trimmedAudioDuration = CMTimeSubtract(audioEndTime, audioBeginTime)
                if CMTimeCompare(trimmedAudioDuration, insertTime) == 1 {
                    trimmedAudioDuration = insertTime
                }
            }
        }

        if trimmedAudioDuration != CMTime.zero {
            audioEndTime = CMTimeAdd(audioTrackTime, trimmedAudioDuration)
            let audioTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: kCMPersistentTrackID_Invalid)
            do {
                try audioTrack?.insertTimeRange(CMTimeRangeMake(start: audioBeginTime , duration: trimmedAudioDuration),
                                                of: audioAsset.asset!.tracks(withMediaType: AVMediaType.audio)[0] ,
                                                at: audioTrackTime)
                let audioInputParams = AVMutableAudioMixInputParameters(track: audioTrack)

                var effectTime = CMTime(seconds: fadeInFadeOutEffectTiming, preferredTimescale: 600)
                if CMTimeCompare(trimmedAudioDuration, CMTimeMultiply(effectTime, multiplier: 2)) == -1 {
                    effectTime = CMTime(seconds: CMTimeGetSeconds(trimmedAudioDuration) / 2, preferredTimescale: 600)
                }

                //Fade in effect
                audioInputParams.setVolumeRamp(fromStartVolume: isFadeIn ? 0 : volumeLevel, toEndVolume: volumeLevel, timeRange: CMTimeRange(start: audioTrackTime, duration: effectTime))
                //Fade out effect
                audioInputParams.setVolumeRamp(fromStartVolume: volumeLevel, toEndVolume: isFadeOut ? 0 : volumeLevel, timeRange: CMTimeRange(start: CMTimeSubtract(audioEndTime, effectTime), duration: effectTime))

                audioInputParams.trackID = audioTrack?.trackID ?? kCMPersistentTrackID_Invalid
                audioMixInputParameters.append(audioInputParams)
            } catch {
                print("Failed to load Audio track")
            }
        }
    }

    // 4 - Get path
    guard let url = Utility.createFileAtDocumentDirectory(name: "mergeVideo-\(Date().timeIntervalSince1970).mp4") else {
        debugPrint("Unable to file at document directory")
        return
    }

    // 5 - Create Exporter
    self.exporter = AVAssetExportSession(asset: mixComposition, presetName: videoPresetName)
    guard let exp = self.exporter else {
        debugPrint("Unable to export.")
        return
    }
    let audioMix = AVMutableAudioMix()
    audioMix.inputParameters = audioMixInputParameters
    exp.outputURL = url
    exp.outputFileType = AVFileType.mp4
    exp.shouldOptimizeForNetworkUse = true
    exp.videoComposition = mainComposition
    exp.audioMix = audioMix

    //self.viewPieProgress.setProgress(0.0, animated: false)
    //viewPieProgress.isHidden = isHDR

    //timer for progress
    self.timer = Timer.scheduledTimer(timeInterval: 1.0, target: self, selector: #selector(self.updateExportingProgress(timer:)), userInfo: exp, repeats: true)

    // 6 - Perform the Export
    exp.exportAsynchronously() {
        DispatchQueue.main.async {
            self.exportDidFinish(exp)
        }
    }
}

我尝试了很多选择,但是没有任何东西可以满足我的要求。请帮帮我。

如果您需要我提供任何其他信息,请随时评论此帖子。

谢谢。

0 个答案:

没有答案