iOS AVAssetExportSession失败Code = -11820仅限iPhone 5(c)

时间:2016-03-23 08:50:53

标签: ios objective-c iphone avfoundation avassetexportsession

我想从包含两个视频(带音频)和一个音轨的合成中导出视频文件。它适用于iPhone 5s及更高版本,但在iPhone 5c (iOS 9.2.1)上失败。错误在此返回:

[_assetExport exportAsynchronouslyWithCompletionHandler:
 ^(void ) {
     if (AVAssetExportSessionStatusCompleted == _assetExport.status) {
         [self performSelectorOnMainThread:@selector(videoIsDone) withObject:nil waitUntilDone:YES];
     } else {
         NSLog(@"Export error: %@", _assetExport.error);
         [self performSelectorOnMainThread:@selector(videoHasFailed) withObject:nil waitUntilDone:YES];
     }
 }
 ];

它打印的日志:     Export error: Error Domain=AVFoundationErrorDomain Code=-11820 "Cannot Complete Export" UserInfo={NSLocalizedRecoverySuggestion=Try exporting again., NSLocalizedDescription=Cannot Complete Export}

如上所述,在我的iPhone 5s,6和6s上它的效果非常好,但只有在我的iPhone 5c上才会返回此错误。希望有人有这方面的经验。

创建曲目和合成的完整代码:

- (void) generateVideoWithInputPath:(NSString*)inputVideo andAudioFileName:(NSString*)audioFileName andVolume:(float)volume {
NSString* introVideoPath = [[NSBundle mainBundle] pathForResource:@"IntroVideo" ofType:@"mp4"];
NSURL* introVideoUrl = [NSURL fileURLWithPath:introVideoPath];
NSURL* video_inputFileUrl = [NSURL fileURLWithPath:inputVideo];

self.outputAssetURL = NULL;
self.outputFilePath = finalVideoPath;
NSURL* outputFileUrl = [NSURL fileURLWithPath:self.outputFilePath];
unlink([self.outputFilePath UTF8String]); // remove existing result

// Create composition
AVMutableComposition* mixComposition = [AVMutableComposition composition];

// Create Asset for introVideo
AVURLAsset* introVideoAsset = [[AVURLAsset alloc] initWithURL:introVideoUrl options:nil];

// Create time ranges
CMTime introStartTime = kCMTimeZero;
CMTime introEndTime = introVideoAsset.duration;
CMTimeRange introVideo_timeRange = CMTimeRangeMake(introStartTime, introEndTime);

//add VideoTrack of introVideo to composition
NSArray*        introVideoAssetTracks = [introVideoAsset tracksWithMediaType:AVMediaTypeVideo];
AVAssetTrack*   introVideoAssetTrack = ([introVideoAssetTracks count] > 0 ? [introVideoAssetTracks objectAtIndex:0] : nil);

AVMutableCompositionTrack* b_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionVideoTrack insertTimeRange:introVideo_timeRange ofTrack:introVideoAssetTrack atTime:introStartTime error:nil];

// Add AudioTrack of introVideo to composition
NSArray*        audioAssetTracksIntro = [introVideoAsset tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack*   audioAssetTrackIntro = ([audioAssetTracksIntro count] > 0 ? [audioAssetTracksIntro objectAtIndex:0] : nil);
AVMutableCompositionTrack* a_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionAudioTrack insertTimeRange:introVideo_timeRange ofTrack:audioAssetTrackIntro atTime:introStartTime error:nil];

// Create Asset for inputVideo
CMTime nextClipStartTime = introEndTime;
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];

// Create time ranges
CMTime videoStartTime = kCMTimeZero;
CMTime videoEndTime = videoAsset.duration;
if (CMTIME_IS_INVALID(videoEndTime)) {
    NSLog(@"videoEndTime is invalid");
}
CMTimeRange mainVideo_timeRange = CMTimeRangeMake(videoStartTime, videoEndTime);

// Add VideoTrack of inputVideo to composition
NSArray*       videoAssetTracks2 = [videoAsset tracksWithMediaType:AVMediaTypeVideo];
AVAssetTrack*  videoAssetTrack2 = ([videoAssetTracks2 count] > 0 ? [videoAssetTracks2 objectAtIndex:0] : nil);
//    CMTime         audioDurationFix = CMTimeAdd(videoAsset.duration, CMTimeMakeWithSeconds(-1.0f, 1));
//    CMTimeRange    video_timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
//    CMTimeRange    audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioDurationFix);

AVMutableCompositionTrack* a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:mainVideo_timeRange ofTrack:videoAssetTrack2 atTime:nextClipStartTime error:nil];

// Add AudioTrack of inputVideo to composition
NSArray*        audioAssetTracks2 = [videoAsset tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack*   audioAssetTrack2 = ([audioAssetTracks2 count] > 0 ? [audioAssetTracks2 objectAtIndex:0] : nil);
//AVMutableCompositionTrack* a_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionAudioTrack insertTimeRange:mainVideo_timeRange ofTrack:audioAssetTrack2 atTime:nextClipStartTime error:nil];

AVMutableAudioMix* audioMix = NULL;

if (audioFileName) {
    NSURL* audio_inputFileUrl = [NSURL fileURLWithPath:audioFileName];


    // Create Asset for audio (song)
    AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];

    // Add Audio of song to composition
    NSArray* audioAssetTracks = [audioAsset tracksWithMediaType:AVMediaTypeAudio];
    AVAssetTrack* audioAssetTrack = ([audioAssetTracks count] > 0 ? [audioAssetTracks objectAtIndex:0] : nil);

    AVMutableCompositionTrack* b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
    [b_compositionAudioTrack insertTimeRange:mainVideo_timeRange ofTrack:audioAssetTrack atTime:nextClipStartTime error:nil];

    // Set Volume of song
    NSArray *tracksToDuck = [mixComposition tracksWithMediaType:AVMediaTypeAudio];
    NSMutableArray *trackMixArray = [NSMutableArray array];
//        for (int i = 0; i < [tracksToDuck count]; i++) {
        AVAssetTrack *leTrack = [tracksToDuck objectAtIndex:0];
        AVMutableAudioMixInputParameters *trackMix = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:leTrack];
        [trackMix setVolume:1 atTime:kCMTimeZero];
        [trackMixArray addObject:trackMix];

    AVAssetTrack *leTrack2 = [tracksToDuck objectAtIndex:1];
    AVMutableAudioMixInputParameters *trackMix2 = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:leTrack2];
    [trackMix2 setVolume:volume atTime:kCMTimeZero];
    [trackMixArray addObject:trackMix2];
//        }

    audioMix = [AVMutableAudioMix audioMix];
    audioMix.inputParameters = trackMixArray;

}

// Export composition to videoFile
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.outputFileType = AVFileTypeQuickTimeMovie; //@"com.apple.quicktime-movie";
_assetExport.outputURL = outputFileUrl;
_assetExport.videoComposition = [self getVideoComposition:videoAsset intro:introVideoAsset composition:mixComposition];
// Set song volume audio
if (audioMix != NULL) {
    _assetExport.audioMix = audioMix;
}
[_assetExport exportAsynchronouslyWithCompletionHandler:
 ^(void ) {
     if (AVAssetExportSessionStatusCompleted == _assetExport.status) {
         [self performSelectorOnMainThread:@selector(videoIsDone) withObject:nil waitUntilDone:YES];
     } else {
         NSLog(@"Export error: %@", _assetExport.error);
         [self performSelectorOnMainThread:@selector(videoHasFailed) withObject:nil waitUntilDone:YES];
     }
 }
 ];


}

-(AVMutableVideoComposition *) getVideoComposition:(AVAsset *)asset intro:(AVAsset *)intro composition:( AVMutableComposition*)composition{

  AVMutableCompositionTrack *compositionIntroTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];

  AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];

  NSArray *audioTracksArray = [intro tracksWithMediaType:AVMediaTypeVideo];
  AVAssetTrack *introTrack;
  if (audioTracksArray.count > 0) {
    introTrack = [audioTracksArray objectAtIndex:0];
    [compositionIntroTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, intro.duration) ofTrack:introTrack atTime:kCMTimeZero error:nil];
  }

  NSArray *videoTracksArray = [asset tracksWithMediaType:AVMediaTypeVideo];
  AVAssetTrack *videoTrack;
  if (videoTracksArray.count > 0) {
    videoTrack = [videoTracksArray objectAtIndex:0];
    [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:videoTrack atTime:intro.duration error:nil];
  }

  AVMutableVideoCompositionLayerInstruction *firstLayerInst = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionIntroTrack];

  AVMutableVideoCompositionLayerInstruction *secondLayerInst = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack];

  CGSize videoSize;
  if (videoTrack && introTrack) {
    CGSize trackDimensions = [videoTrack naturalSize];
    videoSize = CGSizeMake(0, 0);
    // turn around for portrait
    if (trackDimensions.height>trackDimensions.width) {
        videoSize = CGSizeMake(trackDimensions.width, trackDimensions.height);
    } else {
        videoSize = CGSizeMake(trackDimensions.height, trackDimensions.width);
    }

    CGAffineTransform transform = videoTrack.preferredTransform;
    CGAffineTransform scale = CGAffineTransformMakeScale((videoSize.width/introTrack.naturalSize.width),(videoSize.height/introTrack.naturalSize.height));
    [firstLayerInst setTransform:scale atTime:kCMTimeZero];
    [secondLayerInst setTransform:transform atTime:kCMTimeZero];
  } else {
    videoSize = [[FilteringClass sharedFilteringClass] getVideoSize];
  }

  CMTime totalTime = CMTimeAdd(asset.duration, intro.duration);
  NSLog(@"Total videotime: %lld", totalTime.value);

  AVMutableVideoCompositionInstruction *inst = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
  inst.timeRange = CMTimeRangeMake(kCMTimeZero, totalTime);
  inst.layerInstructions = [NSArray arrayWithObjects:firstLayerInst, secondLayerInst, nil];


  AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
  videoComposition.instructions = [NSArray arrayWithObject:inst];

  videoComposition.renderSize = videoSize;
  videoComposition.frameDuration = CMTimeMake(1, 30);
  videoComposition.renderScale = 1.0;
  return videoComposition;
}

1 个答案:

答案 0 :(得分:2)

在我看来,你正在达到AVFoundation中设置的解码器限制。在iOS 5中,解码器限制为4,在iOS 6中为16,因此如果工作正常,则尝试导出小尺寸视频意味着问题出在您的视频中file ...可能超出了解码设置限制。