使用AVMutableComposition合并视频时视频破解

时间:2015-02-16 14:57:32

标签: ios objective-c video avassetexportsession avmutablecomposition

我使用AVMutableComposition将视频与下面的代码合并,

- (void)MergeAndSave_internal{

    AVMutableComposition *composition = [AVMutableComposition composition];
    AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
    AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
    AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
    videoComposition.frameDuration = CMTimeMake(1,30);
    videoComposition.renderScale = 1.0;

    AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];

    AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack];

    NSLog(@"%@",videoPathArray);

    float time = 0;
    CMTime startTime = kCMTimeZero;

    for (int i = 0; i<videoPathArray.count; i++) {

        AVURLAsset *sourceAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:[videoPathArray objectAtIndex:i]] options:[NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:AVURLAssetPreferPreciseDurationAndTimingKey]];

        NSError *error = nil;

        BOOL ok = NO;
        AVAssetTrack *sourceVideoTrack = [[sourceAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
        AVAssetTrack *sourceAudioTrack = [[sourceAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];

        CGSize temp = CGSizeApplyAffineTransform(sourceVideoTrack.naturalSize, sourceVideoTrack.preferredTransform);
        CGSize size = CGSizeMake(fabsf(temp.width), fabsf(temp.height));
        CGAffineTransform transform = sourceVideoTrack.preferredTransform;

        videoComposition.renderSize = sourceVideoTrack.naturalSize;
        if (size.width > size.height) {

            [layerInstruction setTransform:transform atTime:CMTimeMakeWithSeconds(time, 30)];
        } else {


            float s = size.width/size.height;


            CGAffineTransform newe = CGAffineTransformConcat(transform, CGAffineTransformMakeScale(s,s));

            float x = (size.height - size.width*s)/2;

            CGAffineTransform newer = CGAffineTransformConcat(newe, CGAffineTransformMakeTranslation(x, 0));

            [layerInstruction setTransform:newer atTime:CMTimeMakeWithSeconds(time, 30)];
        }
        if(i==0){
             [compositionVideoTrack setPreferredTransform:sourceVideoTrack.preferredTransform];
        }
        ok = [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [sourceAsset duration]) ofTrack:sourceVideoTrack atTime:startTime error:&error];


        ok = [compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [sourceAsset duration]) ofTrack:sourceAudioTrack atTime:startTime error:nil];

        if (!ok) {
            {
                [radialView4 setHidden:YES];
                NSLog(@"Export failed: %@", [[self.exportSession error] localizedDescription]);
                UIAlertView *alert = [[UIAlertView alloc] initWithTitle:@"Error" message:@"Something Went Wrong :("  delegate:nil cancelButtonTitle:@"Ok" otherButtonTitles: nil, nil];
                [alert show];
                [radialView4 setHidden:YES];
                break;
            }

        }

        startTime = CMTimeAdd(startTime, [sourceAsset duration]);

    }


    instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
    instruction.timeRange = compositionVideoTrack.timeRange;

    videoComposition.instructions = [NSArray arrayWithObject:instruction];

    NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
    NSString *documentsDirectory = [paths objectAtIndex:0];
    NSString *myPathDocs =  [documentsDirectory stringByAppendingPathComponent:
                             [NSString stringWithFormat:@"RampMergedVideo.mov"]];
    unlink([myPathDocs UTF8String]);
    NSURL *url = [NSURL fileURLWithPath:myPathDocs];

    AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:composition
                                                                      presetName:AVAssetExportPreset1280x720];
    exporter.outputURL=url;
    exporter.outputFileType = AVFileTypeQuickTimeMovie;
    exporter.shouldOptimizeForNetworkUse = YES;
    [exporter exportAsynchronouslyWithCompletionHandler:^{
        dispatch_async(dispatch_get_main_queue(), ^{

            switch ([exporter status]) {
                case AVAssetExportSessionStatusFailed:
                    NSLog(@"Export failed: %@", [exporter error]);
                    break;
                case AVAssetExportSessionStatusCancelled:
                    NSLog(@"Export canceled");
                    break;
                case AVAssetExportSessionStatusCompleted:{
                    NSLog(@"Export successfully");

                }
                default:
                    break;
            }
            if (exporter.status != AVAssetExportSessionStatusCompleted){
                NSLog(@"Retry export");

            }

        });
    }];

}

但视频在保存到系统并在快速播放器中播放时看起来很破旧。我认为CFAffline中的问题转化了。有人可以请教吗?

以下是视频中间的破解屏幕: enter image description here enter image description here

1 个答案:

答案 0 :(得分:5)

您尚未将videoComposition设置为AVAssetExportSession。尝试执行此操作exporter.videoComposition = videoComposition;。虽然尝试了这个但是应该可以工作。