iOS RemoteIO - AudioUnitAddRenderNotify回调

时间:2012-11-07 04:18:40

标签: core-audio recording remoteio

我正在尝试使用AudioUnitAddRenderNotify从RemoteIO进行录制。 基本上,我无法从bus1获取样本,这是我的输入总线。 recordingCallback没有超越这个:

   if (*ioActionFlags & kAudioUnitRenderAction_PostRender || inBusNumber != 1) {
    return noErr;
}

但我被告知每轮应该为每辆公共汽车调用录音回叫。即。使用inBusNumber == 0,然后inBusNumber == 1调用,分别是输出(remoteIO out)和输入(录制总线)。

我可以做什么来在我的输入总线上调用recordingCallback以便我可以录制? 谢谢。

码头。

这是回调。

static OSStatus recordingCallback(void *inRefCon,
                              AudioUnitRenderActionFlags *ioActionFlags,
                              const AudioTimeStamp *inTimeStamp,
                              UInt32 inBusNumber,
                              UInt32 inNumberFrames,
                              AudioBufferList *ioData) {
NSLog(@"Entered recording callback");

// Only do pre render on bus 1
if (*ioActionFlags & kAudioUnitRenderAction_PostRender || inBusNumber != 1) {
    return noErr;
}

RIO *rio = (RIO*)inRefCon;
AudioUnit rioUnit = rio->theAudioUnit;
//ExtAudioFileRef eaf = rio->outEAF;
AudioBufferList abl = rio->audioBufferList;

SInt32 samples[NUMBER_OF_SAMPLES]; // A large enough size to not have to worry about buffer overrun
abl.mNumberBuffers = 1;
abl.mBuffers[0].mData = &samples;
abl.mBuffers[0].mNumberChannels = 1;
abl.mBuffers[0].mDataByteSize = inNumberFrames  * sizeof(SInt16);

OSStatus result;
result = AudioUnitRender(rioUnit,
                         ioActionFlags,
                         inTimeStamp,
                         inBusNumber, 
                         inNumberFrames,
                         &abl);

if (noErr != result) { NSLog(@"Obtain recorded samples error! Error : %ld", result); }

NSLog(@"Bus %ld", inBusNumber); 

// React to a recording flag, if recording, save the abl into own buffer, else ignore
if (rio->recording)
{
    TPCircularBufferProduceBytes(&rio->buffer, abl.mBuffers[0].mData, inNumberFrames  * sizeof(SInt16));
    //rio->timeIncurred += (('p'float)inNumberFrames) / 44100.0;
    //NSLog(@"Self-calculated time incurred: %f", rio->timeIncurred);
}

return noErr;

}

这是调用回调的代码。

- (void)setupAudioUnitRemoteIO {

UInt32 framesPerSlice = 0;
UInt32 framesPerSlicePropertySize = sizeof (framesPerSlice);
UInt32 sampleRatePropertySize = sizeof (_graphSampleRate);

// Describe audio component
AudioComponentDescription desc;
desc.componentType = kAudioUnitType_Output;
desc.componentSubType = kAudioUnitSubType_RemoteIO;
desc.componentFlags = 0;
desc.componentFlagsMask = 0;
desc.componentManufacturer = kAudioUnitManufacturer_Apple;

// Get component
AudioComponent inputComponent = AudioComponentFindNext(NULL, &desc);

// Get audio units
status = AudioComponentInstanceNew(inputComponent, &_remoteIOUnit);

if (noErr != status) { NSLog(@"Get audio units error"); return; }

// Enable IO for recording
UInt32 flag = 1;
status = AudioUnitSetProperty(_remoteIOUnit,
                              kAudioOutputUnitProperty_EnableIO, 
                              kAudioUnitScope_Input, 
                              kInputBus,
                              &flag, 
                              sizeof(flag));
if (noErr != status) { NSLog(@"Enable IO for recording error"); return; }

// Enable IO for playback
status = AudioUnitSetProperty(_remoteIOUnit, 
                              kAudioOutputUnitProperty_EnableIO, 
                              kAudioUnitScope_Output, 
                              kOutputBus,
                              &flag, 
                              sizeof(flag));

if (noErr != status) { NSLog(@"Enable IO for playback error"); return; }

// Obtain the value of the maximum-frames-per-slice from the I/O unit.
status =    AudioUnitGetProperty (
                                  _remoteIOUnit,
                                  kAudioUnitProperty_MaximumFramesPerSlice,
                                  kAudioUnitScope_Global,
                                  0,
                                  &framesPerSlice,
                                  &framesPerSlicePropertySize
                                  );

// Describe format

audioFormat.mSampleRate         = 44100.00;
audioFormat.mFormatID           = kAudioFormatLinearPCM;
audioFormat.mFormatFlags        = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
audioFormat.mFramesPerPacket    = 1;
audioFormat.mChannelsPerFrame   = 1;
audioFormat.mBitsPerChannel     = 16;
audioFormat.mBytesPerPacket     = 2;
audioFormat.mBytesPerFrame      = 2;

// Apply format
status = AudioUnitSetProperty(_remoteIOUnit, 
                              kAudioUnitProperty_StreamFormat, 
                              kAudioUnitScope_Output, 
                              kInputBus, 
                              &audioFormat, 
                              sizeof(audioFormat));

if (noErr != status) { NSLog(@"Apply format to input bus error"); return; }

status = AudioUnitSetProperty(_remoteIOUnit, 
                              kAudioUnitProperty_StreamFormat, 
                              kAudioUnitScope_Input, 
                              kOutputBus, 
                              &audioFormat, 
                              sizeof(audioFormat));
if (noErr != status) { NSLog(@"Apply format to output bus error"); return; }




rio.theAudioUnit = _remoteIOUnit; // Need this, as used in callbacks to refer to remoteIO

AURenderCallbackStruct callbackStruct;
callbackStruct.inputProc = recordingCallback;
callbackStruct.inputProcRefCon = &rio;
status = AudioUnitAddRenderNotify(_remoteIOUnit, callbackStruct.inputProc,  callbackStruct.inputProcRefCon);

NSAssert (status == noErr, @"Problem adding recordingCallback to RemoteIO. Error code: %d '%.4s'", (int) status, (const char *)&status);

0 个答案:

没有答案