AVExportSession导出视频超级慢

时间:2020-10-05 07:37:44

标签: ios swift avfoundation avasset avmutablecomposition

我正试图简单地提高合并视频的导出速度。

这里是代码://从网上和SO的广泛研究中,我可以大致了解到预设的PassThrough使其变得非常快,但是正如我在代码注释中所写的那样,合并的代码没有似乎可以使用该预设进行导出:/

            static func videoCompositionInstruction(_ track: AVCompositionTrack, asset: AVAsset)
            -> AVMutableVideoCompositionLayerInstruction {
                let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
                let assetTrack = asset.tracks(withMediaType: .video)[0]

                let transform = assetTrack.preferredTransform
                let assetInfo = orientationFromTransform(transform)

                var scaleToFitRatio = 1080 / assetTrack.naturalSize.width
                if assetInfo.isPortrait {
                    scaleToFitRatio = 1080 / assetTrack.naturalSize.height
                    let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio)
                    var finalTransform = assetTrack.preferredTransform.concatenating(scaleFactor)
                    //was needed in my case (if video not taking entire screen and leaving some parts black - don't know when actually needed so you'll have to try and see when it's needed)
                    if assetInfo.orientation == .rightMirrored || assetInfo.orientation == .leftMirrored {
                        finalTransform = finalTransform.translatedBy(x: -transform.ty, y: 0)
                    }
                    instruction.setTransform(finalTransform, at: CMTime.zero)
                } else {
                    let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio)
                    var concat = assetTrack.preferredTransform.concatenating(scaleFactor)
                        .concatenating(CGAffineTransform(translationX: 0, y: UIScreen.main.bounds.width / 2))
                    if assetInfo.orientation == .down {
                        let fixUpsideDown = CGAffineTransform(rotationAngle: CGFloat(Double.pi))
                        let windowBounds = UIScreen.main.bounds
                        let yFix = assetTrack.naturalSize.height + windowBounds.height
                        let centerFix = CGAffineTransform(translationX: assetTrack.naturalSize.width, y: yFix)
                        concat = fixUpsideDown.concatenating(centerFix).concatenating(scaleFactor)
                    }
                    instruction.setTransform(concat, at: CMTime.zero)
                }

                return instruction
        }

        static func orientationFromTransform(_ transform: CGAffineTransform)
            -> (orientation: UIImage.Orientation, isPortrait: Bool) {
                var assetOrientation = UIImage.Orientation.up
                var isPortrait = false
                if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 {
                    assetOrientation = .right
                    isPortrait = true
                } else if transform.a == 0 && transform.b == 1.0 && transform.c == 1.0 && transform.d == 0 {
                    assetOrientation = .rightMirrored
                    isPortrait = true
                } else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 {
                    assetOrientation = .left
                    isPortrait = true
                } else if transform.a == 0 && transform.b == -1.0 && transform.c == -1.0 && transform.d == 0 {
                    assetOrientation = .leftMirrored
                    isPortrait = true
                } else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 {
                    assetOrientation = .up
                } else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 {
                    assetOrientation = .down
                }
                return (assetOrientation, isPortrait)
        }

    func mergeVideosTestSQ(arrayVideos:[AVAsset], completion:@escaping (URL?, Error?) -> ()) {
            
            let mixComposition = AVMutableComposition()

            
            var instructions: [AVMutableVideoCompositionLayerInstruction] = []
            var insertTime = CMTime(seconds: 0, preferredTimescale: 1)

            /// for each URL add the video and audio tracks and their duration to the composition
            for sourceAsset in arrayVideos {
                    
                let frameRange = CMTimeRange(start: CMTime(seconds: 0, preferredTimescale: 1), duration: sourceAsset.duration)

                guard
                    let nthVideoTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)),
                    let nthAudioTrack = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)), //0 used to be kCMPersistentTrackID_Invalid
                    let assetVideoTrack = sourceAsset.tracks(withMediaType: .video).first
                else {
                    print("didnt work")
                    return
                }

                var assetAudioTrack: AVAssetTrack?
                assetAudioTrack = sourceAsset.tracks(withMediaType: .audio).first
                print(assetAudioTrack, ",-- assetAudioTrack???", assetAudioTrack?.asset, "<-- hes", sourceAsset)
                
                do {
                    
                    try nthVideoTrack.insertTimeRange(frameRange, of: assetVideoTrack, at: insertTime)
                    try nthAudioTrack.insertTimeRange(frameRange, of: assetAudioTrack!, at: insertTime)
    
                    //instructions:
                    let nthInstruction = MainCamVC.videoCompositionInstruction(nthVideoTrack, asset: sourceAsset)
                    nthInstruction.setOpacity(0.0, at: CMTimeAdd(insertTime, sourceAsset.duration)) //sourceasset.duration

                    instructions.append(nthInstruction)
                    insertTime = insertTime + sourceAsset.duration //sourceAsset.duration

                    
                    
                } catch {
                    DispatchQueue.main.async {
                        print("didnt wor2k")
                    }
                }

            }
        

            let mainInstruction = AVMutableVideoCompositionInstruction()
            mainInstruction.timeRange = CMTimeRange(start: CMTime(seconds: 0, preferredTimescale: 1), duration: insertTime)

            mainInstruction.layerInstructions = instructions

            let mainComposition = AVMutableVideoComposition()
            mainComposition.instructions = [mainInstruction]
            mainComposition.frameDuration = CMTimeMake(value: 1, timescale: 30)
            mainComposition.renderSize = CGSize(width: 1080, height: 1920)

            let outputFileURL = URL(fileURLWithPath: NSTemporaryDirectory() + "merge.mp4")

            //below to clear the video form docuent folder for new vid...
            let fileManager = FileManager()
            try? fileManager.removeItem(at: outputFileURL)
            
        
            /// try to start an export session and set the path and file type
            if let exportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) { //DOES NOT WORK WITH AVAssetExportPresetPassthrough
                exportSession.outputFileType = .mov
                exportSession.outputURL = outputFileURL
                exportSession.videoComposition = mainComposition
                exportSession.shouldOptimizeForNetworkUse = true

                /// try to export the file and handle the status cases
                exportSession.exportAsynchronously {
                    if let url = exportSession.outputURL{
                      completion(url, nil)
                  }
                    if let error = exportSession.error {
                      completion(nil, error)
                  }
                }
                
            }

        }

请注意,我有说明以保持正确的方向。

感谢您的帮助!我只需要更快,它大约需要videoDuration / 2秒才能及时导出...

1 个答案:

答案 0 :(得分:2)

将代码实现到我的项目中之后,似乎导致导出速度变慢的原因可能是处理renderSize以及视频分辨率的方式。最重要的是,使用较低的预设质量可能会提高质量。

我特别要注意这部分:

    let mainInstruction = AVMutableVideoCompositionInstruction()
    mainInstruction.timeRange = CMTimeRange(start: CMTime(seconds: 0, preferredTimescale: 1), duration: insertTime)

    mainInstruction.layerInstructions = instructions

    let mainComposition = AVMutableVideoComposition()
    mainComposition.instructions = [mainInstruction]
    mainComposition.frameDuration = CMTimeMake(value: 1, timescale: 30)
    mainComposition.renderSize = CGSize(width: 1080, height: 1920)

将renderSize更改为= videoCompositions.size(可能与您的项目名称不同)就可以解决问题。

然后在出口处,建议更改此部分:

    /// try to start an export session and set the path and file type
    if let exportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) { //DOES NOT WORK WITH AVAssetExportPresetPassthrough
        exportSession.outputFileType = .mov
        exportSession.outputURL = outputFileURL
        exportSession.videoComposition = mainComposition
        exportSession.shouldOptimizeForNetworkUse = true

        /// try to export the file and handle the status cases
        exportSession.exportAsynchronously {
            if let url = exportSession.outputURL{
              completion(url, nil)
          }
            if let error = exportSession.error {
              completion(nil, error)
          }
        }
        
    }

正如我之前所说,将其更改为presetQuality低大约一个。它将大大提高速度!一些转换也被复制了,需要注意一些!

相关问题