我正在尝试使用音频单元服务和音频转换器服务进行一些流式处理,但我有点卡住了。我的目标是捕获麦克风输入(PCM),将其即时转换为AAC并通过网络发送数据包。它似乎有效,但音频很糟糕,应用程序在4s后崩溃
//初始化音频单元
OSStatus status;
AudioComponentDescription desc;
desc.componentType = kAudioUnitType_Output;
desc.componentSubType = kAudioUnitSubType_RemoteIO;
desc.componentFlags = 0;
desc.componentFlagsMask = 0;
desc.componentManufacturer = kAudioUnitManufacturer_Apple;
AudioComponent component = AudioComponentFindNext(NULL, &desc);
status = AudioComponentInstanceNew(component, &_audioUnit);
NSLog(@"status instance new: %lu",status);
UInt32 flag = 1;
status = AudioUnitSetProperty(_audioUnit,
kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Input,
1,
&flag,
sizeof(flag));
NSLog(@"status AudioUnitSetProperty input: %lu",status);
AudioStreamBasicDescription audioFormat;
memset(&audioFormat, 0, sizeof(AudioStreamBasicDescription));
audioFormat.mSampleRate = 44100;
audioFormat.mFormatID = kAudioFormatLinearPCM;
audioFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
audioFormat.mFramesPerPacket = 1;
audioFormat.mChannelsPerFrame = 1;
audioFormat.mBitsPerChannel = 16;
audioFormat.mBytesPerPacket =
audioFormat.mBytesPerFrame = audioFormat.mChannelsPerFrame * sizeof(SInt16);
status = AudioUnitSetProperty(_audioUnit,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Input,
0,
&audioFormat,
sizeof(audioFormat));
NSLog(@"status AudioUnitSetProperty audioFormat: %lu",status);
AURenderCallbackStruct renderCallbackInfo;
renderCallbackInfo.inputProc = recordingCallback;
renderCallbackInfo.inputProcRefCon = NULL;
AudioUnitSetProperty(_audioUnit,
kAudioOutputUnitProperty_SetInputCallback,
kAudioUnitScope_Global,
1,
&renderCallbackInfo,
sizeof(renderCallbackInfo));
float aBufferLength = 0.005; // In seconds
AudioSessionSetProperty(kAudioSessionProperty_PreferredHardwareIOBufferDuration,
sizeof(aBufferLength), &aBufferLength);
_converter = [[Converter alloc] initWithFormat];
status = AudioUnitInitialize(_audioUnit);
NSLog(@"status AudioUnit initialize: %lu",status);
//录制回调:
static OSStatus recordingCallback(void *inRefCon,
AudioUnitRenderActionFlags *ioActionFlags,
const AudioTimeStamp *inTimeStamp,
UInt32 inBusNumber,
UInt32 inNumberFrames,
AudioBufferList *ioData) {
AudioBufferList bufferList;
bufferList.mNumberBuffers = 1;
bufferList.mBuffers[0].mNumberChannels = 1;
bufferList.mBuffers[0].mData = NULL;
bufferList.mBuffers[0].mDataByteSize = inNumberFrames * sizeof(SInt16);
OSStatus status = AudioUnitRender(_audioUnit, ioActionFlags, inTimeStamp,1, inNumberFrames, &bufferList);
AudioBuffer aac;
pcm.mData = malloc(inNumberFrames * sizeof(SInt16));
unsigned char * p = malloc(inNumberFrames * sizeof(SInt16));
memcpy(p, bufferList.mBuffers[0].mData, inNumberFrames * sizeof(SInt16));
memcpy(pcm.mData,p,bufferList.mBuffers[0].mDataByteSize); // dunno why i can't memcpy bufferlist data directly to pcm struct
pcm.mDataByteSize = inNumberFrames * sizeof(SInt16);
pcm.mNumberChannels = 1;
int osstatus = [_converter convertAudioBuffer:&pcm EncodedAudioBuffer:&aac];
// send to the network
NSData* data = [[NSData alloc]initWithBytes:aac.mData length:aac.mDataByteSize];
dispatch_async(myQueue, ^{_pts+=1024;[sender sendBuf2:data withTime:_pts];});
return noErr;
}
//转换器端:
-(int) convertAudioBuffer:(AudioBuffer*)inSamples EncodedAudioBuffer:(AudioBuffer*) outData{
memset(_buffer, 0, _converterSettings.maxPacketSize);
_converterSettings.buffer = *inSamples;
//_converterSettings.bytesToEncode = inSamples->mDataByteSize;
UInt32 ioOutputDataPackets = 1;
AudioStreamPacketDescription outPacketDesc[1];
AudioBufferList convertedData;
convertedData.mNumberBuffers = 1;
convertedData.mBuffers[0].mNumberChannels = 1;
convertedData.mBuffers[0].mDataByteSize = _converterSettings.maxPacketSize;
convertedData.mBuffers[0].mData = _buffer;
OSStatus error = AudioConverterFillComplexBuffer(_audioConverter,
MyAudioConverterCallback,
&_converterSettings,
&ioOutputDataPackets,
&convertedData,
outPacketDesc);
if (error != noErr)
{
NSError *err = [NSError errorWithDomain:NSOSStatusErrorDomain code:error userInfo:nil];
NSLog(@"%ld",convertedData.mBuffers[0].mDataByteSize);
}
NSLog(@"%ld",convertedData.mBuffers[0].mDataByteSize);
/* Set the ouput data */
outData->mNumberChannels = convertedData.mBuffers[0].mNumberChannels;
outData->mDataByteSize = convertedData.mBuffers[0].mDataByteSize;
outData->mData = convertedData.mBuffers[0].mData;
return 0;
}
proc回调:
OSStatus MyAudioConverterCallback(AudioConverterRef inAudioConverter,
UInt32 *ioDataPacketCount,
AudioBufferList *ioData,
AudioStreamPacketDescription **outDataPacketDescription,
void *inUserData)
{
if (outDataPacketDescription)
{
*outDataPacketDescription = NULL;
}
Settings *audioConverterSettings = (Settings *)inUserData;
ioData->mBuffers[0].mData = audioConverterSettings->buffer.mData;
ioData->mBuffers[0].mDataByteSize = audioConverterSettings->buffer.mDataByteSize;
ioData->mBuffers[0].mNumberChannels = audioConverterSettings->buffer.mNumberChannels;
return noErr;
}
最后输出:
2013-07-16 16:58:57.192 AudioUnitAAC[84656:c07] status instance new: 0
2013-07-16 16:58:57.195 AudioUnitAAC[84656:c07] status AudioUnitSetProperty input: 0
2013-07-16 16:58:57.197 AudioUnitAAC[84656:c07] status AudioUnitSetProperty audioFormat: 0
2013-07-16 16:58:57.235 AudioUnitAAC[84656:c07] status AudioUnit initialize: 0
2013-07-16 16:58:58.182 AudioUnitAAC[84656:c07] start : 0
2013-07-16 16:58:58.200 AudioUnitAAC[84656:6e07] 4 bytes encoded
2013-07-16 16:58:58.211 AudioUnitAAC[84656:6e07] 152 bytes encoded
2013-07-16 16:58:58.223 AudioUnitAAC[84656:6e07] 169 bytes encoded
2013-07-16 16:58:58.235 AudioUnitAAC[84656:6e07] 157 bytes encoded
2013-07-16 16:58:58.246 AudioUnitAAC[84656:6e07] 160 bytes encoded
2013-07-16 16:58:58.258 AudioUnitAAC[84656:6e07] 164 bytes encoded
....
直到它因某些随机原因崩溃(AudioConverterFillComplexBuffer访问不良,NSLog(@“%ld”,convertedData.mBuffers [0] .mDataByteSize)错误指令...)
我是苹果音频核心的新手,任何帮助都会受到赞赏:)
答案 0 :(得分:0)
感谢这篇文章我得到了它的工作!我换了:
pcm.mData = malloc(inNumberFrames * sizeof(SInt16));
unsigned char * p = malloc(inNumberFrames * sizeof(SInt16));
memcpy(p, bufferList.mBuffers[0].mData, inNumberFrames * sizeof(SInt16));
memcpy(pcm.mData,p,bufferList.mBuffers[0].mDataByteSize); // dunno why i can't memcpy bufferlist data directly to pcm struct
pcm.mDataByteSize = inNumberFrames * sizeof(SInt16);
pcm.mNumberChannels = 1;
为:
memset(&audioBuffer, 0, sizeof(AudioBufferList));
memset(&audioBuffer.mBuffers[0], 0, sizeof(AudioBuffer));
audioBuffer.mNumberBuffers = 1;
audioBuffer.mBuffers[0].mNumberChannels = bufferList->mBuffers[0].mNumberChannels;
audioBuffer.mBuffers[0].mDataByteSize = bufferList->mBuffers[0].mDataByteSize;
audioBuffer.mBuffers[0].mData = bufferList->mBuffers[0].mData;
在回调中我这样做:
ioData->mBuffers[0].mData = audioBuffer.mBuffers[0].mData;
ioData->mBuffers[0].mDataByteSize = audioBuffer.mBuffers[0].mDataByteSize;
ioData->mBuffers[0].mNumberChannels = audioBuffer.mBuffers[0].mNumberChannels;