合并目录中的多个视频

时间:2017-02-01 06:30:45

标签: ios swift swift3 avcomposition

我要求从文档目录加载所有视频,然后合并为单个视频。然后在服务器上传视频。但此代码仅合并两个视频。我如何合并多个视频。

func mergeVideo(index:Int){
    print("index:\(index)")

    let item:AVPlayerItem!

    Utility.showActivityIndicator()
        var url = String()
        var url2 = String()

        url = fileManager.appending(videoFileNames[index] + ".MOV")
        url2 = fileManager.appending(videoFileNames[index+1] + ".MOV")

        let avAsset = AVAsset(url:NSURL(fileURLWithPath: url) as URL)
        let avAsset2 = AVAsset(url:NSURL(fileURLWithPath: url2) as URL)
        firstAsset = avAsset
        secondAsset = avAsset2
        if let firstAsset = firstAsset, let secondAsset = secondAsset {
            // 1 - Create AVMutableComposition object. This object will hold your AVMutableCompositionTrack instances.
            let mixComposition = AVMutableComposition()

            // 2 - Create two video tracks
            let firstTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
            do {
                try firstTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, firstAsset.duration), of: firstAsset.tracks(withMediaType: AVMediaTypeVideo)[0], at: kCMTimeZero)
            } catch _ {
                print("Failed to load first track")
            }

            let secondTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
            do {
                try secondTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, secondAsset.duration), of: secondAsset.tracks(withMediaType: AVMediaTypeVideo)[0], at: firstAsset.duration)
            } catch _ {
                print("Failed to load second track")
            }

            // 2.1
            let mainInstruction = AVMutableVideoCompositionInstruction()
            mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeAdd(firstAsset.duration, secondAsset.duration))

            // 2.2
            let firstInstruction = videoCompositionInstructionForTrack(firstTrack, asset: firstAsset)
            firstInstruction.setOpacity(0.0, at: firstAsset.duration)
            let secondInstruction = videoCompositionInstructionForTrack(secondTrack, asset: secondAsset)

            // 2.3
            mainInstruction.layerInstructions = [firstInstruction, secondInstruction]
            let mainComposition = AVMutableVideoComposition()
            mainComposition.instructions = [mainInstruction]
            mainComposition.frameDuration = CMTimeMake(1, 30)
            mainComposition.renderSize = CGSize(width: UIScreen.main.bounds.width, height: UIScreen.main.bounds.height)

            item = AVPlayerItem(asset: mixComposition)
            item.videoComposition = mainComposition

            // 3 -Code for  Audio track
            /*
            if let loadedAudioAsset = audioAsset {
                let audioTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: 0)
                do {
                    try audioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, CMTimeAdd(firstAsset.duration, secondAsset.duration)),
                                                   of: loadedAudioAsset.tracks(withMediaType: AVMediaTypeAudio)[0] ,
                                                   at: kCMTimeZero)
                } catch _ {
                    print("Failed to load Audio track")
                }
            }*/

            // 4 - Get path
            let documentDirectory = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0]
            let dateFormatter = DateFormatter()
            dateFormatter.dateStyle = .long
            dateFormatter.timeStyle = .short
            let date = dateFormatter.string(from: Date())
            let savePath = (documentDirectory as NSString).appendingPathComponent("mergeVideo-\(date).mov")

            MergeURL = URL(fileURLWithPath: savePath)
            // 5 - Create Exporter
            guard let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) else { return }
            exporter.outputURL = MergeURL
            exporter.outputFileType = AVFileTypeQuickTimeMovie
            exporter.shouldOptimizeForNetworkUse = true
            exporter.videoComposition = mainComposition

            // 6 - Perform the Export
            exporter.exportAsynchronously() {
                DispatchQueue.main.async { _ in
                    self.exportDidFinish(exporter)
                }
            }
        }
    }

func orientationFromTransform(_ transform: CGAffineTransform) -> (orientation: UIImageOrientation, isPortrait: Bool) {
    var assetOrientation = UIImageOrientation.up
    var isPortrait = false
    if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 {
        assetOrientation = .right
        isPortrait = true
    } else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 {
        assetOrientation = .left
        isPortrait = true
    } else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 {
        assetOrientation = .up
    } else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 {
        assetOrientation = .down
    }
    return (assetOrientation, isPortrait)
}

func videoCompositionInstructionForTrack(_ track: AVCompositionTrack, asset: AVAsset) -> AVMutableVideoCompositionLayerInstruction {
    let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
    let assetTrack = asset.tracks(withMediaType: AVMediaTypeVideo)[0]

    let transform = assetTrack.preferredTransform
    let assetInfo = orientationFromTransform(transform)

    var scaleToFitRatio = UIScreen.main.bounds.width / assetTrack.naturalSize.width
    if assetInfo.isPortrait {
        scaleToFitRatio = UIScreen.main.bounds.width / assetTrack.naturalSize.height
        let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio)
        instruction.setTransform(assetTrack.preferredTransform.concatenating(scaleFactor),
                                 at: kCMTimeZero)
    } else {
        let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio)
        var concat = assetTrack.preferredTransform.concatenating(scaleFactor).concatenating(CGAffineTransform(translationX: 0, y: UIScreen.main.bounds.width / 2))
        if assetInfo.orientation == .down {
            let fixUpsideDown = CGAffineTransform(rotationAngle: CGFloat(M_PI))
            let windowBounds = UIScreen.main.bounds
            let yFix = assetTrack.naturalSize.height + windowBounds.height
            let centerFix = CGAffineTransform(translationX: assetTrack.naturalSize.width, y: yFix)
            concat = fixUpsideDown.concatenating(centerFix).concatenating(scaleFactor)
        }
        instruction.setTransform(concat, at: kCMTimeZero)
    }

    return instruction
}

1 个答案:

答案 0 :(得分:4)

    func mergeVideo(_ mAssetsList: [AVAsset]) {
    let mainComposition = AVMutableVideoComposition()
    var startDuration:CMTime = kCMTimeZero
    let mainInstruction = AVMutableVideoCompositionInstruction()
    let mixComposition = AVMutableComposition()
    var allVideoInstruction = [AVMutableVideoCompositionLayerInstruction]()

    var assets = mAssetsList

    var strCaption = EMPTY_STRING
    for i in 0 ..< assets.count {
        let currentAsset:AVAsset = assets[i] //Current Asset.

        let currentTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
        do {
            try currentTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero,
                                                             currentAsset.duration), of: currentAsset.tracks(withMediaType: AVMediaTypeVideo)[0], at: startDuration)

            //Creates Instruction for current video asset.
            let currentInstruction:AVMutableVideoCompositionLayerInstruction = videoCompositionInstructionForTrack(currentTrack, asset: currentAsset)

            currentInstruction.setOpacityRamp(fromStartOpacity: 0.0,
                                              toEndOpacity: 1.0,
                                              timeRange:CMTimeRangeMake(
                                                startDuration,
                                                CMTimeMake(1, 1)))
            if i != assets.count - 1 {
                //Sets Fade out effect at the end of the video.
                currentInstruction.setOpacityRamp(fromStartOpacity: 1.0,
                                                  toEndOpacity: 0.0,
                                                  timeRange:CMTimeRangeMake(
                                                    CMTimeSubtract(
                                                        CMTimeAdd(currentAsset.duration, startDuration),
                                                        CMTimeMake(1, 1)),
                                                    CMTimeMake(2, 1)))
            }

            let transform:CGAffineTransform = currentTrack.preferredTransform
            if orientationFromTransform(transform).isPortrait {
                let outputSize:CGSize = CGSize(width: VIDEO_WIDTH, height: VIDEO_HEIGHT)
                let horizontalRatio = CGFloat(outputSize.width) / currentTrack.naturalSize.width
                let verticalRatio = CGFloat(outputSize.height) / currentTrack.naturalSize.height
                let scaleToFitRatio = max(horizontalRatio, verticalRatio) // ScaleAspectFill
                let FirstAssetScaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio)
                if currentAsset.g_orientation == .landscapeLeft {
                    let rotation = CGAffineTransform(rotationAngle: .pi)
                    let translateToCenter = CGAffineTransform(translationX: VIDEO_WIDTH, y: VIDEO_HEIGHT)
                    let mixedTransform = rotation.concatenating(translateToCenter)
                    currentInstruction.setTransform(currentTrack.preferredTransform.concatenating(FirstAssetScaleFactor).concatenating(mixedTransform), at: kCMTimeZero)
                } else {
                    currentInstruction.setTransform(currentTrack.preferredTransform.concatenating(FirstAssetScaleFactor), at: kCMTimeZero)
                }
            }

            allVideoInstruction.append(currentInstruction) //Add video instruction in Instructions Array.


            startDuration = CMTimeAdd(startDuration, currentAsset.duration)
        } catch _ {
            print(ERROR_LOADING_VIDEO)
        }
    }


    mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, startDuration)
    mainInstruction.layerInstructions = allVideoInstruction

    mainComposition.instructions = [mainInstruction]
    mainComposition.frameDuration = CMTimeMake(1, 30)
    mainComposition.renderSize = CGSize(width: 640, height: 480)

    // Create path to store merged video.
    let savePath = (getDocumentsDirectory() as NSString).appendingPathComponent("\(MERGED_VIDEO).mp4")
    let url = URL(fileURLWithPath: savePath)
    deleteFileAtPath(savePath)

    guard let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVEXPORT_PRESET_NAME) else { return }
    exporter.outputURL = url
    exporter.outputFileType = AVFileTypeQuickTimeMovie
    exporter.shouldOptimizeForNetworkUse = false
    exporter.videoComposition = mainComposition

    // Perform the Export
    exporter.exportAsynchronously() {
        DispatchQueue.main.async { _ in
            self.exportDidFinish(exporter)
        }
    }
}

func exportDidFinish(_ session: AVAssetExportSession) {
    if session.status == AVAssetExportSessionStatus.completed {
            print(session.outputURL)
    }
}

func videoCompositionInstructionForTrack(_ track: AVCompositionTrack, asset: AVAsset) -> AVMutableVideoCompositionLayerInstruction {
        let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
        return instruction
    }

extension AVAsset {
    var g_size: CGSize {
        return tracks(withMediaType: AVMediaTypeVideo).first?.naturalSize ?? .zero
    }
    var g_orientation: UIInterfaceOrientation {
        guard let transform = tracks(withMediaType: AVMediaTypeVideo).first?.preferredTransform else {
            return .portrait
        }
        switch (transform.tx, transform.ty) {
        case (0, 0):
            return .landscapeRight
        case (g_size.width, g_size.height):
            return .landscapeLeft
        case (0, g_size.width):
            return .portraitUpsideDown
        default:
            return .portrait
        }
    }
}

从文档目录中的所有视频创建AVAssets数组,并将其传递给mergeVideo函数以将它们全部合并。