iOS AVFoundation导出会话缺少音频

时间:2012-12-24 23:55:37

标签: ios audio video export avfoundation

我正在使用iOS AVFoundation框架,我能够成功合并视频轨道,图像叠加和文本叠加。但是,我的输出文件并未保留原始源视频中的音频。

如何确保我的某个视频的音频源与我创建的新视频保持一致?

修改

*使用此代码可以很好地说明如何创建视频(使用原始音频)。在使用AVFoundation处理视频时,我需要单独包含音频轨道,这一点并不明显。希望这有助于其他人。

    AVAssetTrack *videoTrack = nil;
    AVAssetTrack *audioTrack = nil;
    CMTime insertionPoint = kCMTimeZero;

    if([[url tracksWithMediaType:AVMediaTypeVideo] count] != 0) {
        videoTrack = [url tracksWithMediaType:AVMediaTypeVideo][0];
    }

    if([[url tracksWithMediaType:AVMediaTypeAudio] count] != 0) {
        audioTrack = [url tracksWithMediaType:AVMediaTypeAudio][0];
    }

    // Insert the video and audio tracks from AVAsset
    if (videoTrack != nil) {
        AVMutableCompositionTrack *compositionVideoTrack = [videoComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
        [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [url duration]) ofTrack:videoTrack atTime:insertionPoint error:&error];
    }
    if (audioTrack != nil) {
        AVMutableCompositionTrack *compositionAudioTrack = [videoComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
        [compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [url duration]) ofTrack:audioTrack atTime:insertionPoint error:&error];
    }

3 个答案:

答案 0 :(得分:13)

以下是解决此问题的完整代码,它有两个视频与他们的音频相结合: -

AVURLAsset* video1 = [[AVURLAsset alloc]initWithURL:[NSURL fileURLWithPath:path1] options:nil];

AVURLAsset* video2 = [[AVURLAsset alloc]initWithURL:[NSURL fileURLWithPath:path2] options:nil];

if (video1 !=nil && video2!=nil) {

    // 1 - Create AVMutableComposition object. This object will hold your AVMutableCompositionTrack instances.
    AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
    // 2 - Video track

    AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
                                                                        preferredTrackID:kCMPersistentTrackID_Invalid];
    AVMutableCompositionTrack *firstTrackAudio = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
                                                                        preferredTrackID:kCMPersistentTrackID_Invalid];

    [firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, video1.duration)
                        ofTrack:[[video1 tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
    [firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, video2.duration)
                        ofTrack:[[video2 tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:video1.duration error:nil];

//它有一个音轨

    if ([[video1 tracksWithMediaType:AVMediaTypeAudio] count] > 0)
    {
        AVAssetTrack *clipAudioTrack = [[video1 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
        [firstTrackAudio insertTimeRange:CMTimeRangeMake(kCMTimeZero, video1.duration) ofTrack:clipAudioTrack atTime:kCMTimeZero error:nil];
    }

//它有一个音轨

    if ([[video2 tracksWithMediaType:AVMediaTypeAudio] count] > 0)
    {
        AVAssetTrack *clipAudioTrack = [[video2 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
        [firstTrackAudio insertTimeRange:CMTimeRangeMake(kCMTimeZero, video2.duration) ofTrack:clipAudioTrack atTime:video1.duration error:nil];
    }

//导出会话

    AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
                                                                      presetName:AVAssetExportPresetHighestQuality];

    //Creates the path to export to  - Saving to temporary directory
    NSString* filename = [NSString stringWithFormat:@"Video_%d.mov",arc4random() % 1000];
    NSString* path = [NSTemporaryDirectory() stringByAppendingPathComponent:filename];

    //Checks if there is already a file at the output URL.  
    if ([[NSFileManager defaultManager] fileExistsAtPath:path])
    {
        NSLog(@"Removing item at path: %@", path);
        [[NSFileManager defaultManager] removeItemAtPath:path error:nil];
    }

    exporter.outputURL = [NSURL fileURLWithPath:path];
    //Set the output file type
    exporter.outputFileType = AVFileTypeQuickTimeMovie;


    path3=path;
    [arr_StoredDocumentoryUrls addObject:path3];

    //Exports!
    [exporter exportAsynchronouslyWithCompletionHandler:^{
        switch (exporter.status) {
            case AVAssetExportSessionStatusCompleted:{
                NSLog(@"Export Complete");

                break;
            }
            case AVAssetExportSessionStatusFailed:
                NSLog(@"Export Error: %@", [exporter.error description]);
                break;
            case AVAssetExportSessionStatusCancelled:
                NSLog(@"Export Cancelled");
                break;
            default:
                break;
        }
    }];

}

答案 1 :(得分:0)

Swift 4 版本基于@Ashish的答案

let video1 = AVURLAsset(url: videoURL1)
let video2 = AVURLAsset(url: videoURL2)

// Create AVMutableComposition object. This object will hold your AVMutableCompositionTrack instances.
let mixComposition = AVMutableComposition()

guard let firstTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else {return}

guard let firstAudioTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio,preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else {return}
do {
  try firstTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, video1.duration),
                                 of: video1.tracks(withMediaType: AVMediaType.video)[0],
                                 at: kCMTimeZero)
} catch {
  print("error handling video1")
}

do {
  try firstTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, video2.duration),
                                 of: video2.tracks(withMediaType: AVMediaType.video)[0],
                                 at: kCMTimeZero)
} catch {
  print("error handling video2")
}

// if video 1 has an audio track
if video1.tracks.count > 0 {
  let clipAudioTrack = video1.tracks[0]
  do {
    try firstAudioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, video1.duration), of: clipAudioTrack, at: kCMTimeZero)
  } catch {
    print("error inserting audio track 1")
  }

}

// if video 2 has an audio track
if video2.tracks.count > 0 {
  let clipAudioTrack = video2.tracks[0]
  do {
    try firstAudioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, video2.duration), of: clipAudioTrack, at: video1.duration)
  } catch {
    print("error inserting audio track 2")
  }

}

// Checks if there is already a file at the output URL.
if FileManager.default.fileExists(atPath: "path") {
  try? FileManager.default.removeItem(atPath: "path")
}

// Create Exporter
guard let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) else {return}

// Get path
guard let documentDirectory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first else {return}
let documentUrl = documentDirectory.appendingPathComponent("mergeVideo.mov")

// Set the output file type
exporter.outputURL = documentUrl
exporter.outputFileType = AVFileType.mov

// Exports!
exporter.exportAsynchronously {
  switch exporter.status {
  case .completed:
    print("export completed")

  case .failed:
    print("export failed")

  case .cancelled:
    print("export candelled")

  default:
    break
  }
}

答案 2 :(得分:-2)

尝试添加MobileCoreServices并执行。