音频和视频合并后的视频方向

时间:2017-09-11 11:49:43

标签: ios swift video avfoundation

我正在尝试合并一个音频和视频,但是当我将视频保存到我的库时,它就会保存。我正在学习一些教程,但没有找到合适的答案。

  

但问题是视频不是纵向模式。

那么我怎样才能在我的图书馆中获得带有肖像的视频 在这里我尝试

func mergeFilesWithUrl(videoUrl:NSURL, audioUrl:NSURL)
{
    let mixComposition : AVMutableComposition = AVMutableComposition()
    var mutableCompositionVideoTrack : [AVMutableCompositionTrack] = []
    var mutableCompositionAudioTrack : [AVMutableCompositionTrack] = []
    let totalVideoCompositionInstruction : AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()


    //start merge

    let aVideoAsset : AVAsset = AVAsset(url: videoUrl as URL)
    let aAudioAsset : AVAsset = AVAsset(url: audioUrl as URL)

    mutableCompositionVideoTrack.append(mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid))
    mutableCompositionAudioTrack.append( mixComposition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid))

    let aVideoAssetTrack : AVAssetTrack = aVideoAsset.tracks(withMediaType: AVMediaTypeVideo)[0]
    let aAudioAssetTrack : AVAssetTrack = aAudioAsset.tracks(withMediaType: AVMediaTypeAudio)[0]



    do{
        try mutableCompositionVideoTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), of: aVideoAssetTrack, at: kCMTimeZero)

        //In my case my audio file is longer then video file so i took videoAsset duration
        //instead of audioAsset duration

        try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), of: aAudioAssetTrack, at: kCMTimeZero)


             try mutableCompositionAudioTrack[0].insertTimeRange(CMTimeRangeMake(kCMTimeZero, aVideoAssetTrack.timeRange.duration), ofTrack: aAudioAssetTrack, atTime: kCMTimeZero)

    }catch{

    }

    totalVideoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero,aVideoAssetTrack.timeRange.duration )

    let mutableVideoComposition : AVMutableVideoComposition = AVMutableVideoComposition()
    mutableVideoComposition.frameDuration = CMTimeMake(1, 30)

    mutableVideoComposition.renderSize = CGSize(width: 1280, height: 720)



    //find your video on this URl


    let savePathUrl : NSURL = NSURL(fileURLWithPath: NSHomeDirectory() + "/Documents/newVideo.mp4")

    let assetExport: AVAssetExportSession = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)!
    assetExport.outputFileType = AVFileTypeMPEG4

    assetExport.outputURL = savePathUrl as URL
    assetExport.shouldOptimizeForNetworkUse = true

    assetExport.exportAsynchronously { () -> Void in
        switch assetExport.status {

        case AVAssetExportSessionStatus.completed:


           //here i Store into asset library

            let assetsLib = ALAssetsLibrary()
            assetsLib.writeVideoAtPath(toSavedPhotosAlbum: savePathUrl as URL!, completionBlock: nil)

            print("success")
        case  AVAssetExportSessionStatus.failed:
            print("failed \(String(describing: assetExport.error))")
        case AVAssetExportSessionStatus.cancelled:
            print("cancelled \(String(describing: assetExport.error))")
        default:
            print("complete")
        }
    }

}

1 个答案:

答案 0 :(得分:0)

你有两种方法可以解决这个问题。

第一种方法,不使用AVMutableVideoComposition

let mainComposition = AVMutableComposition()

let videoTrack = mainComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
let videoAssetTrack = videoAsset.tracks(withMediaType: AVMediaTypeVideo).first!
try? videoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: videoAssetTrack, at: kCMTimeZero)
videoTrack.preferredTransform = videoAssetTrack.preferredTransform // THIS LINE IS IMPORTANT

let audioTrack = mainComposition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
let audioAssetTrack = audioAsset.tracks(withMediaType: AVMediaTypeAudio).first!
try? audioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, audioAsset.duration), of: audioAssetTrack, at: kCMTimeZero)

let exportSession = AVAssetExportSession(asset: mainComposition, presetName: AVAssetExportPresetHighestQuality)
exportSession?.outputURL = outputURL
exportSession?.outputFileType = AVFileTypeQuickTimeMovie
exportSession?.shouldOptimizeForNetworkUse = true
exportSession?.exportAsynchronously {

}

AVMutableVideoComposition

的第二种方法
let mainComposition = AVMutableComposition()

let videoTrack = mainComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
let videoAssetTrack = videoAsset.tracks(withMediaType: AVMediaTypeVideo).first!
try? videoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: videoAssetTrack, at: kCMTimeZero)

let audioTrack = mainComposition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
let audioAssetTrack = audioAsset.tracks(withMediaType: AVMediaTypeAudio).first!
try? audioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, audioAsset.duration), of: audioAssetTrack, at: kCMTimeZero)

let videoCompositionLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
videoCompositionLayerInstruction.setTransform(videoAssetTrack.preferredTransform, at: kCMTimeZero)

let videoCompositionInstuction = AVMutableVideoCompositionInstruction()
videoCompositionInstuction.timeRange = CMTimeRangeMake(kCMTimeZero, mainComposition.duration)
videoCompositionInstuction.layerInstructions = [ videoCompositionLayerInstruction ]

var renderSize = videoAssetTrack.naturalSize
renderSize = renderSize.applying(videoAssetTrack.preferredTransform)
renderSize = CGSize(width: fabs(renderSize.width), height: fabs(renderSize.height))

let videoComposition = AVMutableVideoComposition()
videoComposition.renderSize = renderSize
videoComposition.frameDuration = CMTimeMake(1, 30)
videoComposition.instructions = [ videoCompositionInstuction ]

let exportSession = AVAssetExportSession(asset: mainComposition, presetName: AVAssetExportPresetHighestQuality)
exportSession?.videoComposition = videoComposition
exportSession?.outputURL = outputURL
exportSession?.outputFileType = AVFileTypeQuickTimeMovie
exportSession?.shouldOptimizeForNetworkUse = false
exportSession?.exportAsynchronously {

}