在iPhone上组合两个.caf文件

时间:2011-10-05 03:49:40

标签: iphone objective-c avfoundation

我看了看并找了答案,但似乎找不到答案。很多人问过,但没有人得到答案。我有一个使用AVAudioRecorder录制音频的应用程序。现在我只想将两个或多个录音合并到一个可以通过电子邮件发送的文件中。有没有人知道如何做到这一点?

This answer建议使用名为“音频服务队列”的内容,但我对此一无所知)

2 个答案:

答案 0 :(得分:8)

这并不像你想象的那么容易。我使用AVFoundation framework来完成您要求创建的iAmRingtones。它需要从音频文件创建AVAssets并设置AVExportSession。最终结果很棒,但肯定需要一些工作。这里或多或少是我们在应用程序中创建导出功能的方式:

- (void) setUpAndAddAudioAtPath:(NSURL*)assetURL toComposition:(AVMutableComposition *)composition {

    AVURLAsset *songAsset = [AVURLAsset URLAssetWithURL:assetURL options:nil];  

    AVMutableCompositionTrack *track = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
    AVAssetTrack *sourceAudioTrack = [[songAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];

    NSError *error = nil;
    BOOL ok = NO;

    CMTime startTime = CMTimeMakeWithSeconds(0, 1);
    CMTime trackDuration = songAsset.duration;
    CMTime longestTime = CMTimeMake(848896, 44100); //(19.24 seconds)
    CMTimeRange tRange = CMTimeRangeMake(startTime, trackDuration);

    //Set Volume
    AVMutableAudioMixInputParameters *trackMix = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:track];
    [trackMix setVolume:0.8f atTime:startTime];
    [audioMixParams addObject:trackMix];

    //Insert audio into track
    ok = [track insertTimeRange:tRange ofTrack:sourceAudioTrack atTime:CMTimeMake(0, 44100) error:&error];
}   

上述方法从以下方法调用两次(每个音轨一次):

- (void) exportAudio {

    AVMutableComposition *composition = [AVMutableComposition composition];
    audioMixParams = [[NSMutableArray alloc] initWithObjects:nil];

    //Add Audio Tracks to Composition
    NSString *URLPath1 = pathToYourAudioFile1;
    NSURL *assetURL1 = [NSURL fileURLWithPath:URLPath1];
    [self setUpAndAddAudioAtPath:assetURL1 toComposition:composition];

    NSString *URLPath2 = pathToYourAudioFile2;
    NSURL *assetURL2 = [NSURL fileURLWithPath:URLPath2];
    [self setUpAndAddAudioAtPath:assetURL2 toComposition:composition];

    AVMutableAudioMix *audioMix = [AVMutableAudioMix audioMix];
    audioMix.inputParameters = [NSArray arrayWithArray:audioMixParams];

    //If you need to query what formats you can export to, here's a way to find out
    NSLog (@"compatible presets for songAsset: %@",
            [AVAssetExportSession exportPresetsCompatibleWithAsset:composition]);

    AVAssetExportSession *exporter = [[AVAssetExportSession alloc]
                                  initWithAsset: composition
                                  presetName: AVAssetExportPresetAppleM4A];
    exporter.audioMix = audioMix;
    exporter.outputFileType = @"com.apple.m4a-audio";
    NSString *fileName = @"someFilename";
    NSString *exportFile = [[util getDocumentsDirectory] stringByAppendingFormat: @"/%@.m4a", fileName];    

    // set up export 
    myDeleteFile(exportFile);
    NSURL *exportURL = [NSURL fileURLWithPath:exportFile];
    exporter.outputURL = exportURL; 

    // do the export
    [exporter exportAsynchronouslyWithCompletionHandler:^{
            int exportStatus = exporter.status;
            switch (exportStatus) {
                case AVAssetExportSessionStatusFailed: 
                    NSError *exportError = exporter.error;
                    NSLog (@"AVAssetExportSessionStatusFailed: %@", exportError);
                    break;

                case AVAssetExportSessionStatusCompleted: NSLog (@"AVAssetExportSessionStatusCompleted"); break;
                case AVAssetExportSessionStatusUnknown: NSLog (@"AVAssetExportSessionStatusUnknown"); break;
                case AVAssetExportSessionStatusExporting: NSLog (@"AVAssetExportSessionStatusExporting"); break;
                case AVAssetExportSessionStatusCancelled: NSLog (@"AVAssetExportSessionStatusCancelled"); break;
                case AVAssetExportSessionStatusWaiting: NSLog (@"AVAssetExportSessionStatusWaiting"); break;
                default:  NSLog (@"didn't get export status"); break;
    }
}];

    // start up the export progress bar
    progressView.hidden = NO;
    progressView.progress = 0.0;
    [NSTimer scheduledTimerWithTimeInterval:0.1
                                 target:self
                               selector:@selector (updateExportProgress:)
                               userInfo:exporter
                                repeats:YES];

}

答案 1 :(得分:1)

如何顺序合并任意数量的音频文件,其路径包含在名为recordingsArray的数组中

# pragma mark  mergeRecording

- (void) mergeRecording
{
        AVMutableComposition *composition = [AVMutableComposition composition];
        [self buildSequenceComposition:composition];   //given Below 

        NSLog (@"compatible presets for songAsset: %@",[AVAssetExportSession exportPresetsCompatibleWithAsset:composition]);

        AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset: composition presetName: AVAssetExportPresetAppleM4A];
        exporter.outputFileType = @"com.apple.m4a-audio";

        //File Name

        NSString *recordingFileName = [self setRecordingFileName];
        self.recordingTimeLbl.text = @"00:00:00";
        NSString *exportFile = [NSTemporaryDirectory() stringByAppendingFormat: @"/%@.m4a", recordingFileName];

        // set up export 
        BOOL yes = [[NSFileManager defaultManager] removeItemAtPath:exportFile error:NULL];
        NSURL *exportURL = [NSURL fileURLWithPath:exportFile];
        exporter.outputURL = exportURL; 
        NSData *sound1Data = [[NSData alloc] initWithContentsOfURL: exportURL];
        NSLog(@"Length %i",sound1Data.length);

        [exporter exportAsynchronouslyWithCompletionHandler:^{
            int exportStatus = exporter.status;
            switch (exportStatus) {
                case AVAssetExportSessionStatusFailed: 
                    NSLog (@"AVAssetExportSessionStatusFailed:");
                    break;

                case AVAssetExportSessionStatusCompleted: NSLog (@"AVAssetExportSessionStatusCompleted"); break;
                case AVAssetExportSessionStatusUnknown: NSLog (@"AVAssetExportSessionStatusUnknown"); break;
                case AVAssetExportSessionStatusExporting: NSLog (@"AVAssetExportSessionStatusExporting"); break;
                case AVAssetExportSessionStatusCancelled: NSLog (@"AVAssetExportSessionStatusCancelled"); break;
                case AVAssetExportSessionStatusWaiting: NSLog (@"AVAssetExportSessionStatusWaiting"); break;
                default:  NSLog (@"didn't get export status"); break;
            }
        }];

        // start up the export progress bar
        [NSTimer scheduledTimerWithTimeInterval:0.1 target:self selector:@selector (updateProgress:) userInfo:exporter repeats:NO];
}


- (NSString *) setRecordingFileName
{
    NSDate *todaysDate = [NSDate date];

    NSDateFormatter *dateFormat = [[NSDateFormatter alloc] init];
    [dateFormat setDateFormat:@"dd-MM-yyyy"];
    NSString *dateString11 = [dateFormat stringFromDate:todaysDate];

    NSCalendar *gregorian = [[NSCalendar alloc] initWithCalendarIdentifier:NSGregorianCalendar];
    NSDateComponents *dateComponents = [gregorian components:(NSHourCalendarUnit  | NSMinuteCalendarUnit | NSSecondCalendarUnit) fromDate:todaysDate];
    NSInteger hour = [dateComponents hour];
    NSInteger minute = [dateComponents minute];
    NSInteger second = [dateComponents second];
    [gregorian release];

    NSLog(@"Date: %@  \n Time : %@-%@-%@",dateString11,[NSString stringWithFormat:@"%i",hour],[NSString stringWithFormat:@"%i",minute],[NSString stringWithFormat:@"%i",second]);


    NSString *recordingFileName = @"Any Name";
    if(recordingFileName.length > 0)
    {
            recordingFileName = [NSString stringWithFormat:@"%@AND%@AND%@-%@-%@", recordingFileName, dateString11, [NSString stringWithFormat:@"%i",hour], [NSString stringWithFormat:@"%i",minute], [NSString stringWithFormat:@"%i",second]];
    }
    else
    {
            recordingFileName = [NSString stringWithFormat:@"%@AND%@-%@-%@",dateString11,[NSString stringWithFormat:@"%i",hour],[NSString stringWithFormat:@"%i",minute],[NSString stringWithFormat:@"%i",second]];
    }
    return recordingFileName;
}


- (void)updateProgress:(id)timer
{
    AVAssetExportSession *session;
    if([timer isKindOfClass:[NSTimer class]])
        session = (AVAssetExportSession *)[timer userInfo];
    else if([timer isKindOfClass:[AVAssetExportSession class]])
        session = timer;

    if (session.status == AVAssetExportSessionStatusExporting) 
    {

        NSArray *modes = [[[NSArray alloc] initWithObjects:NSDefaultRunLoopMode, UITrackingRunLoopMode, nil] autorelease];
        [self performSelector:@selector(updateProgress:) withObject:session afterDelay:0.5 inModes:modes];

    }
    else if(session.status == AVAssetExportSessionStatusCompleted)
    {
        NSLog(@"Exporting Ended");
        NSURL  *exportURL =  session.outputURL;
        NSData *sound1Data = [[NSData alloc] initWithContentsOfURL: exportURL];
        NSLog(@"Length %i \n Path %@",sound1Data.length,exportURL);

        [self.activityIndicator stopAnimating];
        self.activityIndicator.hidden = YES;
        NSLog(@"Merging Complete");

        for(int x = 0 ; x < [recordingsArray count] ; x++)
        {
                NSURL   *recordingPathUrl = [recordingsArray objectAtIndex:x];
                BOOL yes = [[NSFileManager defaultManager] removeItemAtPath:recordingPathUrl.relativePath error:NULL];
                if (yes) 
                {
                    NSLog(@"File Removed at Path %@",recordingPathUrl.relativePath);
                }
                else
                {
                    NSLog(@"Problem During Removal of Recording At Path %@",recordingPathUrl.relativePath);
                }

        }

        NSString *exportFile = [NSString stringWithFormat:@"%@",exportURL];
        NSString *recordingFileName = [self setRecordingFileName];
        BOOL isInserted = [[DbFile sharedDatabase] insertRecordingDataIntoTable:recordingFileName recordingPath:exportFile];

        if(isInserted)
        {
            NSLog(@"Recording Inserted In Database");
        }
        else
        {
            NSLog(@"Recording Inserted In Database");
        }


         if([timer isKindOfClass:[NSTimer class]])
            [timer invalidate];

    }
    else if(session.status == AVAssetExportSessionStatusFailed)
    {

            [self.activityIndicator stopAnimating];
            NSLog(@"Recording Export Failed");

            UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:@"Error" message:@"Recording Export Failed" delegate:nil cancelButtonTitle:@"OK" otherButtonTitles: nil];
            [alertView show];
            [alertView release];

            if([timer isKindOfClass:[NSTimer class]])
                [timer invalidate];

    }
    else if(session.status == AVAssetExportSessionStatusCancelled)
    {

            [self.activityIndicator stopAnimating];
            NSLog(@"Recording Export Cancelled");

            UIAlertView *alertView = [[UIAlertView alloc] initWithTitle:@"Error" message:@"Recording Export Cancelled" delegate:nil cancelButtonTitle:@"OK" otherButtonTitles: nil];
            [alertView show];
            [alertView release];
            if([timer isKindOfClass:[NSTimer class]])
                [timer invalidate];
    }
}


- (void) buildSequenceComposition:(AVMutableComposition *)composition
{
    AVMutableCompositionTrack *audioTrack1 = [composition addMutableTrackWithMediaType:AVMediaTypeAudio 
                                                                      preferredTrackID:kCMPersistentTrackID_Invalid];
    CMTime nextClipStartTime = kCMTimeZero;

    for(NSURL * view in recordingsArray)
    {
        AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:view options:nil];
        CMTimeRange timeRangeInAsset;
        timeRangeInAsset = CMTimeRangeMake(kCMTimeZero, [audioAsset duration]);

        AVAssetTrack *clipVideoTrack = [[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
        [audioTrack1 insertTimeRange:timeRangeInAsset ofTrack:clipVideoTrack atTime:nextClipStartTime error:nil];
        nextClipStartTime = CMTimeAdd(nextClipStartTime, timeRangeInAsset.duration);
    }
}