在ios app中合并两个视频仍然保持每个视频的方向?

时间:2014-08-03 11:33:19

标签: ios objective-c merge video-capture

我正在研究合并两个视频,发现一个奇怪的问题,当我尝试合并一个肖像视频和一个风景视频时,输出结果显示了在potrait模式下的视频横向版本,我不会这样做。我想要发生。 我很确定我在管理方向方面缺少一些基本的东西。

我的代码在这里:

NSMutableArray  *videoClipPaths=[[NSMutableArray alloc]init];


[videoClipPaths addObject:[NSURL URLWithString:videoBundleURL1]];
[videoClipPaths addObject:[NSURL URLWithString:videoBundleURL2]];
[videoClipPaths addObject:[NSURL URLWithString:videoBundleURL1]];

float start_time=startSeconds;
float end_time=endSeconds;




  AVMutableComposition *mixComposition = [AVMutableComposition composition];
    AVMutableCompositionTrack *compositionTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
    AVMutableCompositionTrack *compositionTrack2 = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
    __block CMTime time = kCMTimeZero;
    __block CGAffineTransform translate;
    __block CGSize size;

for (int i=0; i<[videoClipPaths count]; i++)
{
    AVURLAsset *assetClip = [AVURLAsset URLAssetWithURL:[videoClipPaths objectAtIndex:i] options:nil];
    AVAssetTrack *clipVideoTrackB = [[assetClip tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
    AVAsset *anAsset = [[AVURLAsset alloc] initWithURL:[videoClipPaths objectAtIndex:i] options:nil];
    CMTime start;
    CMTime duration;
    CMTimeRange video_timeRange;
    if (i==0){
        start = CMTimeMakeWithSeconds(0.0f, anAsset.duration.timescale);
        duration = CMTimeMakeWithSeconds(start_time, anAsset.duration.timescale);
        video_timeRange = CMTimeRangeMake(kCMTimeZero,duration);
        [compositionTrack insertTimeRange:video_timeRange ofTrack:clipVideoTrackB atTime:start error:nil];

        size = CGSizeMake(clipVideoTrackB.naturalSize.height, clipVideoTrackB.naturalSize.height);

        translate = CGAffineTransformMakeTranslation(-420, 0);
        CGAffineTransform newTransform = CGAffineTransformConcat(translate, clipVideoTrackB.preferredTransform);
        [compositionTrack setPreferredTransform:newTransform];
        time = CMTimeAdd(time, duration);

    }else if (i==1){
        CMTime duration = anAsset.duration;
        float seconds = CMTimeGetSeconds(duration);

        start = CMTimeMakeWithSeconds(start_time, anAsset.duration.timescale);
        duration = CMTimeMakeWithSeconds(seconds, anAsset.duration.timescale);
        video_timeRange = CMTimeRangeMake(kCMTimeZero,duration);
        [compositionTrack insertTimeRange:video_timeRange ofTrack:clipVideoTrackB atTime:start error:nil];

        translate = CGAffineTransformMakeTranslation(-420, 0);
        CGAffineTransform newTransform = CGAffineTransformConcat(translate, clipVideoTrackB.preferredTransform);
        [compositionTrack setPreferredTransform:newTransform];
        time = CMTimeAdd(time, duration);
    }
    else if (i==2){
        CMTime duration = anAsset.duration;
        float seconds = CMTimeGetSeconds(duration);

        start = CMTimeMakeWithSeconds(end_time, anAsset.duration.timescale);
        duration = CMTimeMakeWithSeconds(seconds-start_time, anAsset.duration.timescale);
        //            duration = CMTimeMakeWithSeconds(anAsset.duration.timescale, anAsset.duration.timescale);
        video_timeRange = CMTimeRangeMake(CMTimeMakeWithSeconds(start_time, anAsset.duration.timescale),duration);
        [compositionTrack insertTimeRange:video_timeRange ofTrack:clipVideoTrackB atTime:kCMTimeInvalid error:nil];

        translate = CGAffineTransformMakeTranslation(-420, 0);
        CGAffineTransform newTransform = CGAffineTransformConcat(translate, clipVideoTrackB.preferredTransform);
        [compositionTrack setPreferredTransform:newTransform];
        time = CMTimeAdd(time, duration);
    }
    //        video_timeRange = CMTimeRangeMake(start,duration);

    //merge audio of video files
    AVAssetTrack *clipVideoTrackB1 = [[assetClip tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
    /*CMTime start1;
     CMTime duration1;
     CMTimeRange video_timeRange1;
     if (i==0){
     start1 = CMTimeMakeWithSeconds(0.0, anAsset.duration.timescale);
     duration1 = CMTimeMakeWithSeconds(start_time, anAsset.duration.timescale);

     }else if (i==1){
     start1 = CMTimeMakeWithSeconds(start_time, anAsset.duration.timescale);
     duration1 = CMTimeMakeWithSeconds(end_time-start_time, anAsset.duration.timescale);
     }
     else if (i==2){
     start1 = CMTimeMakeWithSeconds(end_time, anAsset.duration.timescale);
     duration1 = CMTimeMakeWithSeconds(anAsset.duration.timescale, anAsset.duration.timescale);
     }
     video_timeRange1 = CMTimeRangeMake(start,duration);*/
    [compositionTrack2 insertTimeRange:video_timeRange ofTrack:clipVideoTrackB1 atTime:start error:nil];

}

以上代码确定3个视频的时间范围(实际上是2个视频),但我必须在预定的时间间隔内将第2个视频嵌入到第1个视频中。

以下代码将创建我所拥有的所有资产的可变组合:

AVMutableVideoCompositionInstruction *vtemp = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
vtemp.timeRange = CMTimeRangeMake(kCMTimeZero, time);
NSLog(@"\nInstruction vtemp's time range is %f %f", CMTimeGetSeconds( vtemp.timeRange.start),
      CMTimeGetSeconds(vtemp.timeRange.duration));

// Also tried videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack
AVMutableVideoCompositionLayerInstruction *vLayerInstruction = [AVMutableVideoCompositionLayerInstruction
                                                                videoCompositionLayerInstructionWithAssetTrack:compositionTrack];


[vLayerInstruction setTransform:compositionTrack.preferredTransform atTime:kCMTimeZero];
vtemp.layerInstructions = @[vLayerInstruction];

AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.renderSize = size;
videoComposition.frameDuration = CMTimeMake(1,30);
videoComposition.instructions = @[vtemp];

AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
NSParameterAssert(exporter != nil);

NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsPath = [paths objectAtIndex:0]; //Get the docs directory
documentsPath=[documentsPath stringByAppendingString:@"/MergeVideos"];

.....其余的是将视频保存到照片库

提前致谢!!

1 个答案:

答案 0 :(得分:0)

我有Swift版本。我的一个工作完美..这是我的一个..经过几个小时的编码终于让这个工作了。很抱歉在Swift中发布答案。希望您觉得这个有帮助。

readyVideoURLs包含所有视频网址。您可以使用此代码合并任意数量的视频。

我正在制作正方形视频。

mainComposition.renderSize = CGSize(width: 600, height: 600)

func mergeVideos() {

        let composition = AVMutableComposition()
        let compositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)

        var current = kCMTimeZero
        var assetArr = [AVURLAsset]()

        let mainInstruction = AVMutableVideoCompositionInstruction()

        for url in readyVideoURLs {
            assetArr.append(AVURLAsset(url: url))
            print("readyVideoURL vid url:- \(url)")

        }

        for asset in assetArr {
            do {
                try compositionTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, asset.duration), of: asset.tracks(withMediaType: AVMediaTypeVideo)[0], at: kCMTimeZero)

            } catch let error {
                print(error.localizedDescription)
            }


            let instruction = videoCompositionInstructionForTrack(track: compositionTrack, asset: asset)
            mainInstruction.layerInstructions.append(instruction)
            mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeAdd(current, asset.duration))
            current = CMTimeAdd(current, asset.duration)
        }


        let mainComposition = AVMutableVideoComposition()
        mainComposition.instructions = [mainInstruction]
        mainComposition.frameDuration = CMTimeMake(1, 30)
        mainComposition.renderSize = CGSize(width: 600, height: 600)


        //add audio track
//        if let loadedAudioAsset = audioAsset {
//            let audioTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: 0)
//            do {
//                try audioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, current),
//                                               of: loadedAudioAsset.tracks(withMediaType: AVMediaTypeAudio)[0], at: kCMTimeZero)
//            } catch _ {
//                print("Failed to load Audio track")
//            }
//        }

        let finalVideoPath = NSHomeDirectory().appending("/Documents/finalDailyVideo.mp4")
        if FileManager.default.fileExists(atPath: finalVideoPath) {
            do {
                try FileManager.default.removeItem(atPath: finalVideoPath)
            } catch let error {
                print(error.localizedDescription)
            }
        }

        guard let exporter = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality) else { return }
        exporter.outputURL = URL(fileURLWithPath: finalVideoPath)
        exporter.outputFileType = AVFileTypeMPEG4
        exporter.shouldOptimizeForNetworkUse = true

        saveToAlbum(mergedVidUrl: URL(string:finalVideoPath)!, exporter: exporter)

    }

保存到相册

func saveToAlbum(mergedVidUrl : URL, exporter : AVAssetExportSession)  {
        exportFinalVideo(exporter: exporter) { (completed) in
            if completed {

                let assetLib = ALAssetsLibrary()
                assetLib.writeVideoAtPath(toSavedPhotosAlbum: mergedVidUrl, completionBlock: { (assetUrl, error) in
                    if error == nil {
                        DispatchQueue.main.async {
                            self.showAlert(title: "Congrats..",message: "Your daily Vlog was saved in albums", actionTitle: "Got it")
                        }
                    }

                })
            }
        }
    }

让出口商完成

func exportFinalVideo(exporter: AVAssetExportSession, completion:@escaping(Bool) -> ())  {
        exporter.exportAsynchronously() {

            if exporter.status == .exporting {
                print("EXPORTING...")

            } else if exporter.status == .completed {
                print("merged video exporting DONE")
                DispatchQueue.main.async {
                    self.dismiss(animated: true, completion: nil)
                }
                completion(true)
            } else if exporter.status == .failed {
                completion(false)
                self.showAlert(title: "Oops!", message: "Something went wrong. Video could not be created.", actionTitle: "Okay!")

            }
        }

    }

用于定位的视频图层说明

func videoCompositionInstructionForTrack(track: AVCompositionTrack, asset: AVAsset) -> AVMutableVideoCompositionLayerInstruction {
        let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
        let assetTrack = asset.tracks(withMediaType: AVMediaTypeVideo)[0]

        let transform = assetTrack.preferredTransform
        let assetInfo = orientationFromTransform(transform: transform)

        var scaleToFitRatio = UIScreen.main.bounds.width / assetTrack.naturalSize.width
        if assetInfo.isPortrait {
            scaleToFitRatio = UIScreen.main.bounds.width / assetTrack.naturalSize.height
            let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio)
            instruction.setTransform(assetTrack.preferredTransform.concatenating(scaleFactor),
                                     at: kCMTimeZero)
        } else {
            let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio)
            var concat = assetTrack.preferredTransform.concatenating(scaleFactor).concatenating(CGAffineTransform(translationX: 0, y: UIScreen.main.bounds.width / 2))
            if assetInfo.orientation == .down {
                let fixUpsideDown = CGAffineTransform(rotationAngle: CGFloat(Double.pi))
                let windowBounds = UIScreen.main.bounds
                let yFix = assetTrack.naturalSize.height + windowBounds.height
                let centerFix = CGAffineTransform(translationX: assetTrack.naturalSize.width, y: yFix)
                concat = fixUpsideDown.concatenating(centerFix).concatenating(scaleFactor)
            }
            instruction.setTransform(concat, at: kCMTimeZero)
        }

        return instruction
    }

    func orientationFromTransform(transform: CGAffineTransform) -> (orientation: UIImageOrientation, isPortrait: Bool) {
        var assetOrientation = UIImageOrientation.up
        var isPortrait = false
        if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 {
            assetOrientation = .right
            isPortrait = true
        } else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 {
            assetOrientation = .left
            isPortrait = true
        } else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 {
            assetOrientation = .up
        } else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 {
            assetOrientation = .down
        }
        return (assetOrientation, isPortrait)
    }