在Swift 4中将视频旋转90度

时间:2018-08-06 21:10:59

标签: swift video rotation avasset

我整天都在尝试搜索,但是所有答案都指向旧版本的Swift或Obj-C。

我尝试了层指令,但是AVMutableComposition没有成员指令。我记得仅使用affineTransform就很容易,但是现在我不再知道在哪里找到它了。

            var mainVideoURL:URL!

            let paths = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)
            let tempPath = paths[0] + "/mainVideo.mp4"

            if(FileManager.default.fileExists(atPath: tempPath)){
                guard (try? FileManager.default.removeItem(atPath: tempPath)) != nil else {
                    print("remove path failed")
                    self.enableButtons(enabled:true)
                    return
                }
            }
            mainVideoURL = URL(fileURLWithPath: tempPath)

            let firstAsset = AVURLAsset(url: fileURL)

            let mixComposition = AVMutableComposition()


            // repeat video number of times

            let videoRepeat = photoVideoRepeats

            for i in 0 ... videoRepeat - 1 {

                do {

                    try mixComposition.insertTimeRange(CMTimeRangeMake(kCMTimeZero, firstAsset.duration),
                                                       of: firstAsset,
                                                       at: kCMTimeZero + CMTimeMultiply(firstAsset.duration,Int32(i)))



                } catch _ {
                    print("Failed to load first track")
                }

            }

            guard let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) else { return }

1 个答案:

答案 0 :(得分:1)

录制视频后,我能够在AVMutableCompositionTrack上执行CGAffineTransform。

就我而言,我需要将音轨与视频合并,但是您可以看到转换发生的位置:

func mergeVideoAndAudio(videoUrl: URL,
                        audioUrl: URL) -> AVAsset {

    let mixComposition = AVMutableComposition()
    var mutableCompositionVideoTrack = [AVMutableCompositionTrack]()
    var mutableCompositionAudioTrack = [AVMutableCompositionTrack]()
    var mutableCompositionAudioOfVideoTrack = [AVMutableCompositionTrack]()

    //start merge

    let aVideoAsset = AVAsset(url: videoUrl)
    let aAudioAsset = AVAsset(url: audioUrl)

    let compositionAddVideo = mixComposition.addMutableTrack(withMediaType: .video,
                                                             preferredTrackID: kCMPersistentTrackID_Invalid)
    let compositionAddAudio = mixComposition.addMutableTrack(withMediaType: .audio,
                                                             preferredTrackID: kCMPersistentTrackID_Invalid)
    let compositionAddAudioOfVideo = mixComposition.addMutableTrack(withMediaType: .audio,
                                                                    preferredTrackID: kCMPersistentTrackID_Invalid)

    let aVideoAssetTrack: AVAssetTrack = aVideoAsset.tracks(withMediaType: AVMediaType.video)[0]
    let aAudioOfVideoAssetTrack: AVAssetTrack? = aVideoAsset.tracks(withMediaType: AVMediaType.audio).first
    let aAudioAssetTrack: AVAssetTrack = aAudioAsset.tracks(withMediaType: AVMediaType.audio)[0]

    // Default must have tranformation

    compositionAddVideo?.preferredTransform = aVideoAssetTrack.preferredTransform

    var transforms = aVideoAssetTrack.preferredTransform


    if UIDevice.current.orientation == UIDeviceOrientation.landscapeLeft {
        transforms = transforms.concatenating(CGAffineTransform(rotationAngle: CGFloat(-90.0 * .pi / 180)))
        transforms = transforms.concatenating(CGAffineTransform(translationX: 1280, y: 0))
    }
    else if UIDevice.current.orientation == UIDeviceOrientation.landscapeRight {
        transforms = transforms.concatenating(CGAffineTransform(rotationAngle: CGFloat(90.0 * .pi / 180)))
        transforms = transforms.concatenating(CGAffineTransform(translationX: 1280, y: 0))
    }
    else if UIDevice.current.orientation == UIDeviceOrientation.portraitUpsideDown {
        transforms = transforms.concatenating(CGAffineTransform(rotationAngle: CGFloat(180.0 * .pi / 180)))
        transforms = transforms.concatenating(CGAffineTransform(translationX: 0, y: 720))
    }

    compositionAddVideo?.preferredTransform = transforms


    mutableCompositionVideoTrack.append(compositionAddVideo!)
    mutableCompositionAudioTrack.append(compositionAddAudio!)
    mutableCompositionAudioOfVideoTrack.append(compositionAddAudioOfVideo!)

    do {

        try mutableCompositionVideoTrack[0].insertTimeRange(CMTimeRangeMake(start: CMTime.zero,
                                                                            duration: aVideoAssetTrack.timeRange.duration),
                                                            of: aVideoAssetTrack,
                                                            at: CMTime.zero)

        //In my case my audio file is longer then video file so i took videoAsset duration
        //instead of audioAsset duration
        try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(start: CMTime.zero,
                                                                            duration: aVideoAssetTrack.timeRange.duration),
                                                            of: aAudioAssetTrack,
                                                            at: CMTime.zero)

        // adding audio (of the video if exists) asset to the final composition
        if let aAudioOfVideoAssetTrack = aAudioOfVideoAssetTrack {
            try mutableCompositionAudioOfVideoTrack[0].insertTimeRange(CMTimeRangeMake(start: CMTime.zero,
                                                                                       duration: aVideoAssetTrack.timeRange.duration),
                                                                       of: aAudioOfVideoAssetTrack,
                                                                       at: CMTime.zero)
        }
    } catch {
        print(error.localizedDescription)
    }

    return mixComposition

}