合并两个视频时在iOS8中出现问题

时间:2014-12-09 11:34:30

标签: ios iphone video ios8 video-processing

我的应用程序合并了两个视频。

我使用以下代码使用AVVideoComposition

合并两个视频
- (void)buildSequenceComposition:(AVMutableComposition *)mixComposition andVideoComposition:(AVMutableVideoComposition *)videoComposition withAudioMix:(AVMutableAudioMix *)audioMix
{
    CMTime nextClipStartTime = kCMTimeZero;
    NSInteger i;

    // No transitions: place clips into one video track and one audio track in composition.
    AVMutableVideoCompositionInstruction * MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
    NSMutableArray*arrLayerInstruction = [NSMutableArray array];

    for (i = 0; i < [_clips count]; i++ )
    {
        AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
        AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
        AVURLAsset *asset = [[_clips objectAtIndex:i] objectForKey:@"videoURL"];

        CMTimeRange timeRangeInAsset;

        timeRangeInAsset = CMTimeRangeMake(kCMTimeZero, [asset duration]);

        AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
        NSError*err = nil;
        [compositionVideoTrack insertTimeRange:timeRangeInAsset ofTrack:clipVideoTrack atTime:nextClipStartTime error:&err];

        if ([[asset tracksWithMediaType:AVMediaTypeAudio] count] != 0)
        {
            AVAssetTrack *clipAudioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];

            [compositionAudioTrack insertTimeRange:timeRangeInAsset ofTrack:clipAudioTrack atTime:nextClipStartTime error:nil];


            AVMutableAudioMixInputParameters *exportAudioMixInputParameters = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:[[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]];
            [exportAudioMixInputParameters setVolume:[[[_clips objectAtIndex:i] objectForKey:@"videoSoundLevel"] floatValue] atTime:nextClipStartTime];
            exportAudioMixInputParameters.trackID = compositionAudioTrack.trackID;
            audioMix.inputParameters=[NSArray arrayWithObject:exportAudioMixInputParameters];

        }
        //FIXING ORIENTATION//
        AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack];

        UIImageOrientation FirstAssetOrientation_  = UIImageOrientationUp;
        BOOL  isFirstAssetPortrait_  = NO;
        CGAffineTransform firstTransform = clipVideoTrack.preferredTransform;
        if(firstTransform.a == 0 && firstTransform.b == 1.0 && firstTransform.c == -1.0 && firstTransform.d == 0)
        {
            FirstAssetOrientation_= UIImageOrientationRight;
            isFirstAssetPortrait_ = YES;
        }
        if(firstTransform.a == 0 && firstTransform.b == -1.0 && firstTransform.c == 1.0 && firstTransform.d == 0)
        {
            FirstAssetOrientation_ =  UIImageOrientationLeft;
            isFirstAssetPortrait_ = YES;
        }
        if(firstTransform.a == 1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == 1.0)
        {
            FirstAssetOrientation_ =  UIImageOrientationUp;
        }
        if(firstTransform.a == -1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == -1.0)
        {
            FirstAssetOrientation_ = UIImageOrientationDown;
        }

        CGFloat tHeight = [clipVideoTrack naturalSize].height;
        CGFloat tWidth = [clipVideoTrack naturalSize].width;

        if(isFirstAssetPortrait_)
        {
            tHeight = [clipVideoTrack naturalSize].height;
            tWidth = [clipVideoTrack naturalSize].width;
            CGFloat temp = tHeight;
            tHeight = tWidth;
            tWidth = temp;

        }

        CGFloat FirstAssetScaleToFitRatioWidth = [mixComposition naturalSize].width/tWidth;
        CGFloat FirstAssetScaleToFitRatioHeight = [mixComposition naturalSize].height/tHeight;


        CGFloat FirstAssetScaleToFitRatio = FirstAssetScaleToFitRatioWidth>FirstAssetScaleToFitRatioHeight?FirstAssetScaleToFitRatioHeight:FirstAssetScaleToFitRatioWidth;
        CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
        CGSize naturalSize = CGSizeApplyAffineTransform(CGSizeMake(tWidth, tHeight), FirstAssetScaleFactor);

        CGAffineTransform transform = CGAffineTransformIdentity;

        CGSize translateSize = CGSizeMake(0, 0);

        if (FirstAssetScaleToFitRatioWidth<FirstAssetScaleToFitRatioHeight)
        {
            transform = CGAffineTransformMakeTranslation(0, ([mixComposition naturalSize].height-naturalSize.height)/2);
            translateSize.height = ([mixComposition naturalSize].height-naturalSize.height)/2;
        }
        else if (FirstAssetScaleToFitRatioWidth==FirstAssetScaleToFitRatioHeight)
        {

        }
        else
        {
            transform = CGAffineTransformMakeTranslation(([mixComposition naturalSize].width-naturalSize.width)/2, 0);
            translateSize.width = ([mixComposition naturalSize].width-naturalSize.width)/2;
        }

        [FirstlayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(clipVideoTrack.preferredTransform, FirstAssetScaleFactor),transform) atTime:kCMTimeZero];

        [FirstlayerInstruction setOpacity:0.0 atTime:CMTimeAdd(nextClipStartTime, timeRangeInAsset.duration)];
        [FirstlayerInstruction setOpacity:1.0 atTime:nextClipStartTime];

        [arrLayerInstruction addObject:FirstlayerInstruction];
        nextClipStartTime = CMTimeAdd(nextClipStartTime, timeRangeInAsset.duration);
    }
    MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, nextClipStartTime);
    MainInstruction.layerInstructions = arrLayerInstruction;;
    videoComposition.instructions = [NSArray arrayWithObject:MainInstruction];
}

虽然它适用于iOS7,但在iOS8中使用AVVideoCompositon导出视频时,它会出现以下错误:

Title :Error Domain=AVFoundationErrorDomain Code=-11821 “Cannot Decode” { NSLocalizedFailureReason=The media data could not be decoded. It may be damaged.}

适用于iOS7以及iOS版之前的其他版本,但不适用于iOS8。

我还尝试过来自AVSampleEditor的Apple的示例代码,在iOS8中导出视频时也会出现同样的错误。

请帮助我解决问题。感谢。

1 个答案:

答案 0 :(得分:1)

检查demo code。 为我工作