我是通过AudioUnitRender从AVAudioEngineOutput创建音频文件。在iPhone上,这种实现工作正常,但在iPad上我得到了无效的音频文件。为什么会这样?
主要方法
NSTimeInterval duration = CMTimeGetSeconds(asset.duration);
NSUInteger lengthInFrames = (NSUInteger) (duration * audioDescription->mSampleRate);
const NSUInteger kBufferLength = 1024; //3756;
AudioBufferList *bufferList = AEAllocateAndInitAudioBufferList(*audioDescription, kBufferLength);
AudioTimeStamp timeStamp;
memset (&timeStamp, 0, sizeof(timeStamp));
timeStamp.mFlags = kAudioTimeStampSampleTimeValid;
OSStatus status = noErr;
for (NSUInteger i = kBufferLength; i < lengthInFrames; i += kBufferLength) {
status = [self renderToBufferList:bufferList writeToFile:audioFile bufferLength:kBufferLength timeStamp:&timeStamp];
if (status != noErr)
break;
}
if (status == noErr && timeStamp.mSampleTime < lengthInFrames) {
NSUInteger restBufferLength = (NSUInteger) (lengthInFrames - timeStamp.mSampleTime);
AudioBufferList *restBufferList = AEAllocateAndInitAudioBufferList(*audioDescription, (Float32)restBufferLength);
status = [self renderToBufferList:restBufferList writeToFile:audioFile bufferLength:restBufferLength timeStamp:&timeStamp];
AEFreeAudioBufferList(restBufferList);
}
SInt64 fileLengthInFrames;
UInt32 size = sizeof(SInt64);
ExtAudioFileGetProperty(audioFile, kExtAudioFileProperty_FileLengthFrames, &size, &fileLengthInFrames);
AEFreeAudioBufferList(bufferList);
ExtAudioFileDispose(audioFile);
if (status != noErr)
[self showAlertWithTitle:@"Error" message:@"See logs for details"];
else {
NSLog(@"Finished writing to file at path: %@ \n File size must be %f Mb", path,(tmpData.length/1024.0)/1024.0);
[self showAlertWithTitle:@"Success!" message:@"Now you can play a result file"];
}
分配缓冲区
AudioBufferList *AEAllocateAndInitAudioBufferList(AudioStreamBasicDescription audioFormat, int frameCount) {
int numberOfBuffers = audioFormat.mFormatFlags & kAudioFormatFlagIsNonInterleaved ? audioFormat.mChannelsPerFrame : 1;
int channelsPerBuffer = audioFormat.mFormatFlags & kAudioFormatFlagIsNonInterleaved ? 1 : audioFormat.mChannelsPerFrame;
int bytesPerBuffer = audioFormat.mBytesPerFrame * frameCount;
AudioBufferList *audio = malloc(sizeof(AudioBufferList) + (numberOfBuffers - 1) * sizeof(AudioBuffer));
if (!audio) {
return NULL;
}
audio->mNumberBuffers = numberOfBuffers;
for (int i = 0; i < numberOfBuffers; i++) {
if (bytesPerBuffer > 0) {
audio->mBuffers[i].mData = calloc(bytesPerBuffer, 1);
if (!audio->mBuffers[i].mData) {
for (int j = 0; j < i; j++) free(audio->mBuffers[j].mData);
free(audio);
return NULL;
}
} else {
audio->mBuffers[i].mData = NULL;
}
audio->mBuffers[i].mDataByteSize = bytesPerBuffer;
audio->mBuffers[i].mNumberChannels = channelsPerBuffer;
}
return audio;
}
渲染方法
- (OSStatus)renderToBufferList:(AudioBufferList *)bufferList
writeToFile:(ExtAudioFileRef)audioFile
bufferLength:(NSUInteger)bufferLength
timeStamp:(AudioTimeStamp *)timeStamp {
[self clearBufferList:bufferList];
AudioUnit outputUnit = self.engine.outputNode.audioUnit;
OSStatus status =AudioUnitRender(outputUnit, 0, timeStamp, 0, (UInt32)bufferLength, bufferList);
[tmpData appendBytes:bufferList->mBuffers[0].mData length:bufferLength];
float *data1 = bufferList->mBuffers[0].mData;
float *data2 = bufferList->mBuffers[1].mData;;
for(int i=0; i<bufferLength/4; i++)
{
//On iPad data[i]==0 and data2[i] == 0
if(data1[i]!=0||data2[i]!=0)
NSLog(@"%f - %f",data1[i],data2[i]);
}
if (status != noErr) {
NSLog(@"Can not render audio unit");
return status;
}
timeStamp->mSampleTime += bufferLength;
status = ExtAudioFileWrite(audioFile, (UInt32)bufferLength, bufferList);
if (status != noErr)
NSLog(@"Can not write audio to file");
return status;
}
渲染方法出现问题