AVAssetExportSession有时会导出而没有音频

时间:2019-05-13 16:59:34

标签: ios cocoa avfoundation avassetexportsession

使用AVAssetExportSession从iPad上记录的quicktime文件中导出MP4。

每6次,我都会得到一个导出的文件,其中没有音频,并且仅在某些特定的iPad机型上。

AVAsset *asset = [AVAsset assetWithURL:videoUrl];
AVAssetTrack *assetTrack = [[[AVAsset assetWithURL:videoUrl] tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
                                                                    preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
                                                                    preferredTrackID:kCMPersistentTrackID_Invalid];
CMTime insertTime = kCMTimeZero;
CMTimeRange timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);

[videoTrack insertTimeRange:timeRange
                    ofTrack:assetTrack
                     atTime:insertTime
                      error:nil];
if ([asset tracksWithMediaType:AVMediaTypeAudio].count >0 ) {
    [audioTrack insertTimeRange:timeRange
                        ofTrack:[[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
                         atTime:insertTime
                          error:nil];
}
insertTime = CMTimeAdd(insertTime,asset.duration);
AVMutableVideoCompositionLayerInstruction *videolayerInstruction = [self layerInstructionAfterFixingOrientationForAsset:asset forTrack:videoTrack atTime:insertTime defaultAsset:assetTrack];
mainInstruction.layerInstructions = @[videolayerInstruction];
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero,insertTime);
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];

BOOL isVideoAssetPortrait_  = [self videoAssetPortraitFromTransform:assetTrack];
CGSize naturalSize;
if(isVideoAssetPortrait_){
    naturalSize = CGSizeMake(assetTrack.naturalSize.height, assetTrack.naturalSize.width);
} else {
    naturalSize = assetTrack.naturalSize;
}

float renderWidth, renderHeight;
renderWidth = naturalSize.width;
renderHeight = naturalSize.height;
mainCompositionInst.renderSize = CGSizeMake(renderWidth, renderHeight);
mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:asset
                                                                       presetName:AVAssetExportPreset1280x720];
exportSession.videoComposition = mainCompositionInst;
exportSession.fileLengthLimit = 6500000;
exportSession.shouldOptimizeForNetworkUse = YES;
exportSession.outputFileType = AVFileTypeMPEG4;
NSString *myPathDocs =  [[Utils getDataDirectory] stringByAppendingPathComponent:@"sourceVideo.mp4"];
NSURL *assetURL = [NSURL fileURLWithPath:myPathDocs];
[[NSFileManager defaultManager]removeItemAtURL:assetURL error:nil];
exportSession.outputURL = assetURL;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
    switch ([exportSession status])
    {
        case AVAssetExportSessionStatusCompleted:
        {
            [self.delegate didSucceedMovieProcessing:assetURL];
        }
            break;
        default:
            if (self.delegate && [self.delegate respondsToSelector:@selector(didFailedMovieProcessing)]) {
                [self.delegate didFailedMovieProcessing];
            }
            break;
    }
}];

0 个答案:

没有答案