我想将两个.wav录音文件组合在一起。 任何人都可以帮我弄清楚如何实现这一点。我尝试组合数据,但标题正在产生问题。 我们可以像我们一样组合波形文件进行组合。
这就是我在做联合收割机的方式,
NSMutableData *datas = [NSMutableData alloc];
NSData *data1 = [NSData dataWithContentsOfFile: [recordedTmpFile1 path]];
NSData *data2 = [NSData dataWithContentsOfFile: [recordedTmpFile2 path]];
NSLog(@"file1 size : %d", [data1 length]);
NSLog(@"file2 size : %d", [data2 length]);
[datas appendData:data1];
[datas appendData:data2];
NSLog(@"file3 size : %d", [datas length]);
NSURL *combinedPath = [NSURL alloc];
combinedPath = [NSURL fileURLWithPath:[NSTemporaryDirectory() stringByAppendingPathComponent:
[NSString stringWithFormat: @"1_20111215.%@",@"wav"]]];
[[NSFileManager defaultManager] createFileAtPath:[combinedPath path] contents:datas attributes:nil];
NSFileManager * fm = [NSFileManager defaultManager];
[fm removeItemAtPath:[recordedTmpFile2 path] error:nil];
答案 0 :(得分:10)
我可以通过录制.wav扩展名音频来实现两个文件的组合,代码如下,
NSURL *originalFileName = [NSURL alloc];
NSURL *RecordingPath =[NSURL fileURLWithPath:[appDelegate.RecordingPath stringByAppendingPathComponent:
[NSString stringWithFormat:@"RecordingFile.wav"]]];
NSLog(@"LocalRecoding Path :%@",RecordingPath);
originalFileName=RecordingPath;
NSURL *temporaryFileName = [NSURL alloc];
temporaryFileName = [NSURL fileURLWithPath:[appDelegate.RecordingPath stringByAppendingPathComponent:
[NSString stringWithFormat: @"tempRecordingFile.%@", @"wav"]]];
long totalAudioLen = 0;
long totalDataLen = 0;
long longSampleRate = 11025.0;
int channels = 2;
long byteRate = 16 * 11025.0 * channels/8;
NSData * wav1Data = [NSData dataWithContentsOfFile:[originalFileName path]];
NSData * wav2Data = [NSData dataWithContentsOfFile:[temporaryFileName path]];
if([wav1Data length]>0 && [wav2Data length] >0)
{
int wav1DataSize = [wav1Data length] - 44;
NSLog(@"WAV I:%d",wav1DataSize);
int wav2DataSize = [wav2Data length] - 44;
NSLog(@"WAV II:%d",wav2DataSize);
NSData *Wave1= [NSMutableData dataWithData:[wav1Data subdataWithRange:NSMakeRange(44, wav1DataSize)]];
NSData *Wave2 =[NSMutableData dataWithData:[wav2Data subdataWithRange:NSMakeRange(44, wav2DataSize)]];
NSLog(@"WAV 1:%d",[Wave1 length]);
NSLog(@"WAV 2:%d",[Wave2 length]);
totalAudioLen=[Wave1 length]+[Wave2 length];
totalDataLen = totalAudioLen + 44;
Byte *header = (Byte*)malloc(44);
header[0] = 'R'; // RIFF/WAVE header
header[1] = 'I';
header[2] = 'F';
header[3] = 'F';
header[4] = (Byte) (totalDataLen & 0xff);
header[5] = (Byte) ((totalDataLen >> 8) & 0xff);
header[6] = (Byte) ((totalDataLen >> 16) & 0xff);
header[7] = (Byte) ((totalDataLen >> 24) & 0xff);
header[8] = 'W';
header[9] = 'A';
header[10] = 'V';
header[11] = 'E';
header[12] = 'f'; // 'fmt ' chunk
header[13] = 'm';
header[14] = 't';
header[15] = ' ';
header[16] = 16; // 4 bytes: size of 'fmt ' chunk
header[17] = 0;
header[18] = 0;
header[19] = 0;
header[20] = 1; // format = 1
header[21] = 0;
header[22] = (Byte) channels;
header[23] = 0;
header[24] = (Byte) (longSampleRate & 0xff);
header[25] = (Byte) ((longSampleRate >> 8) & 0xff);
header[26] = (Byte) ((longSampleRate >> 16) & 0xff);
header[27] = (Byte) ((longSampleRate >> 24) & 0xff);
header[28] = (Byte) (byteRate & 0xff);
header[29] = (Byte) ((byteRate >> 8) & 0xff);
header[30] = (Byte) ((byteRate >> 16) & 0xff);
header[31] = (Byte) ((byteRate >> 24) & 0xff);
header[32] = (Byte) (2 * 8 / 8); // block align
header[33] = 0;
header[34] = 16; // bits per sample
header[35] = 0;
header[36] = 'd';
header[37] = 'a';
header[38] = 't';
header[39] = 'a';
header[40] = (Byte) (totalAudioLen & 0xff);
header[41] = (Byte) ((totalAudioLen >> 8) & 0xff);
header[42] = (Byte) ((totalAudioLen >> 16) & 0xff);
header[43] = (Byte) ((totalAudioLen >> 24) & 0xff);
NSData *headerData = [NSData dataWithBytes:header length:44];
//Merge the sound data of the original file with the temp file and create a new sound file with the
//update header.
NSMutableData * soundFileData = [NSMutableData alloc];
[soundFileData appendData:[headerData subdataWithRange:NSMakeRange(0, 44)]];
[soundFileData appendData:Wave1];
[soundFileData appendData:Wave2];
//delete the original file
NSFileManager * fm = [NSFileManager defaultManager];
[fm removeItemAtPath:[originalFileName path] error:nil];
//create a new file with merged audio with the original file name.
NSURL *mergedFilePath = [NSURL alloc];
mergedFilePath = [NSURL fileURLWithPath:[appDelegate.RecordingPath stringByAppendingPathComponent:
[NSString stringWithFormat:[originalFileName lastPathComponent]]]];
[[NSFileManager defaultManager] createFileAtPath:[mergedFilePath path] contents:soundFileData attributes:nil];
NSLog(@"COMBINED FILE PATH :%@",mergedFilePath);
}
答案 1 :(得分:3)
此代码将剥离标题并仅返回PCM数据的NSData。
- (NSData *)stripCAFHeader: (NSString *) path
NSFileManager* fileMgr = [NSFileManager defaultManager];
//Load the file in
NSData* dataBuffer = [fileMgr contentsAtPath: path];
//This is the data header
NSData* searchString = [NSData dataWithBytes:"data" length:4];
//Find where the header starts
NSUInteger dataWordStart = [dataBuffer rangeOfData:searchString options:0 range:NSMakeRange(0,[dataBuffer length])].location;
//Create the new range without the header
//4 bytes for the DATA word, 8 bytes for the data length, and 4 bytes for the edit count
NSRange dataRange = NSMakeRange(dataWordStart + 4 + 4 + 8, [dataBuffer length] - dataWordStart - 4 - 4 - 8);
//Copy the new data
NSData *data = [dataBuffer subdataWithRange:dataRange];
return data;
}
答案 2 :(得分:0)
我刚刚在我的项目中做到了这一点。我使用的AVMutableComposition
方法需要#import <AVFoundation/AVFoundation.h>
。我在应用程序的文档目录中有两个.wav文件,并将它们组合到一个新的.wav文件中。
AVAsset *audio1 = [AVURLAsset URLAssetWithURL:audioFilePath options:nil];
AVAsset *audio2 = [AVURLAsset URLAssetWithURL:audioFilePath options:nil];
AVMutableComposition* composition = [[AVMutableComposition alloc] init];
AVMutableCompositionTrack *audioCombinedTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[audioCombinedTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [audio1 duration]) ofTrack:[audio1.tracks objectAtIndex:0] atTime:kCMTimeZero error:&error];
[audioCombinedTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [audio2 duration]) ofTrack:[audio2.tracks objectAtIndex:0] atTime:[audio1 duration] error:&error];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetPassthrough];
NSString *exportPath = [[NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0] stringByAppendingPathComponent:@"Combined.wav"];
NSURL *exportURL = [NSURL fileURLWithPath:exportPath];
exportSession.outputURL = exportURL;
exportSession.outputFileType = AVFileTypeWAVE;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
NSLog (@"Exporting. status is %ld", (long)exportSession.status);
switch (exportSession.status) {
case AVAssetExportSessionStatusFailed:
case AVAssetExportSessionStatusCompleted: {
NSLog(@"export done");
break;
}
};
}];