尝试在Swift中合并视频时获取代码Code = -11841

时间:2018-10-22 09:43:33

标签: ios swift

我正在尝试创建一个将多个视频合并为一个1920x1080大小的长视频的应用程序。现在的问题是,每当我尝试运行以下代码时,都会出现以下错误。

  

Optional(Error Domain = AVFoundationErrorDomain代码= -11841“操作   已停止” UserInfo = {NSLocalizedFailureReason =无法播放视频   composition。,NSLocalizedDescription =操作已停止,   NSUnderlyingError = 0x283ae3b10 {Error Domain = NSOSStatusErrorDomain   Code = -17390“(null)”}})

func merge( videoArray: [AVAsset] ){

        // 1 - Create AVMutableComposition object. This object will hold your AVMutableCompositionTrack instances.
        let mixComposition = AVMutableComposition()

        var instructions: [AVVideoCompositionInstruction] = []

        var totalDuration = CMTime.zero
        var currentTime = CMTime.zero
        var highestFramerate: __int32_t = 0

        var i = 1;

        let videoTacks = mixComposition.addMutableTrack(withMediaType: AVMediaType.video,preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
        let audioTracks = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: 0)

        for video: AVAsset in videoArray {

            print("Merging Video - \(i) ")

            let videoAseetTrack: AVAssetTrack = video.tracks(withMediaType: AVMediaType.video)[0]


            let currentFramerate: __int32_t = __int32_t(roundf(videoAseetTrack.nominalFrameRate))
            if(currentFramerate > highestFramerate){
                highestFramerate = currentFramerate
            }


            do {
                try videoTacks?.insertTimeRange(CMTimeRange(start: CMTime.zero, duration: video.duration),
                                               of: videoAseetTrack,
                                               at: currentTime)
            } catch {
                print("Failed to load first track")
            }

            let audioResult = video.tracks(withMediaType: AVMediaType.audio).indices.contains(0)

            if(audioResult == true){

                do {
                    try audioTracks?.insertTimeRange(CMTimeRange(start: CMTime.zero, end: video.duration),
                                                    of: video.tracks(withMediaType: AVMediaType.audio)[0] ,
                                                    at: currentTime)
                } catch {
                    print("Failed to load Audio track")
                }

            }

            let videoCompositionInstruction = AVMutableVideoCompositionInstruction()
            videoCompositionInstruction.timeRange = CMTimeRange(start: currentTime, duration: video.duration)

            let layerInstruction = AVMutableVideoCompositionLayerInstruction.init(assetTrack:  videoAseetTrack)


            ////////////////////

            let transform = videoAseetTrack.preferredTransform
            let assetInfo = orientationFromTransform(transform)

            if assetInfo.isPortrait {

                let s = 1080 / videoAseetTrack.naturalSize.width

                let scaledWidth = videoAseetTrack.naturalSize.height * s

                let x = (1920 / 2) - (scaledWidth / 2)

                let transform: CGAffineTransform = videoAseetTrack.preferredTransform

                let new: CGAffineTransform = transform.concatenating(CGAffineTransform(scaleX: s, y: s))

                let degrees : Double = 90; //the value in degrees
                let rotate: CGAffineTransform = new.concatenating(CGAffineTransform(rotationAngle: CGFloat(degrees * .pi/180) ))

                let newer: CGAffineTransform = new.concatenating(CGAffineTransform(translationX: x, y: 0))

                layerInstruction.setTransform(newer, at: CMTime.zero)

            }else {


                let s = 1920 / videoAseetTrack.naturalSize.width

                let transform: CGAffineTransform = videoAseetTrack.preferredTransform

                let new: CGAffineTransform = transform.concatenating(CGAffineTransform(scaleX: s, y: s))

                let newer: CGAffineTransform = new.concatenating(CGAffineTransform(translationX: 0, y: 0))

                layerInstruction.setTransform(newer, at: CMTime.zero)


            }

            layerInstruction.setTransform(transform, at: CMTime.zero)



            ///////////////////


            videoCompositionInstruction.layerInstructions.append(layerInstruction)
            instructions.append(videoCompositionInstruction)

            totalDuration = CMTimeAdd(totalDuration, video.duration)
            currentTime = CMTimeAdd(currentTime, video.duration)

            i = i + 1
        }


        // 2.3

        let mainComposition = AVMutableVideoComposition()
        mainComposition.instructions = instructions
        mainComposition.frameDuration = CMTime(value: 1, timescale: highestFramerate)
        mainComposition.renderSize = CGSize(width: 1920, height: 1080)


        // 4 - Get path
        guard let documentDirectory = FileManager.default.urls(for: .documentDirectory,
                                                               in: .userDomainMask).first else {
                                                                return
        }
        let dateFormatter = DateFormatter()
        dateFormatter.dateStyle = .long
        dateFormatter.timeStyle = .short
        let date = dateFormatter.string(from: Date())
        let randomInt = Int.random(in: 0..<6)
        let url = documentDirectory.appendingPathComponent("mergeVideo-\(date)-\(randomInt).mov")

        // 5 - Create Exporter
        guard let exporter = AVAssetExportSession(asset: mixComposition,
                                                  presetName: AVAssetExportPreset1920x1080) else {
                                                    return
        }
        exporter.outputURL = url
        exporter.outputFileType = AVFileType.mov
        exporter.shouldOptimizeForNetworkUse = true
        exporter.videoComposition = mainComposition

        // 6 - Perform the Export
        exporter.exportAsynchronously() {

            if(exporter.status == AVAssetExportSession.Status.completed){

                DispatchQueue.main.async {

                    PHPhotoLibrary.shared().performChanges({
                        PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: url)
                    }) { saved, error in
                        if saved {
                            let alertController = UIAlertController(title: "Your video was successfully saved", message: nil, preferredStyle: .alert)
                            let defaultAction = UIAlertAction(title: "OK", style: .default, handler: nil)
                            alertController.addAction(defaultAction)
                            self.present(alertController, animated: true, completion: nil)
                        }
                    }

                    //self.exportDidFinish(exporter)
                }

            }else {

                print(exporter.status.rawValue)
                print(exporter.error)
            }
        }


    }

我认为问题可能与视频计时有关,但我认为我设置正确。如果有人能发现这个问题,那将是一个很大的问题,因为这确实使我受阻。

谢谢

更新

经过更多研究后,问题似乎出在合并已经使用上述代码合并的视频。如果我运行2个新视频,则可以将它们合并并导出。如果然后我使用已导出的视频创建新视频,则会引发错误。有谁知道为什么已经导出的视频可以这样做?

0 个答案:

没有答案