具有kAudioUnitSubType_VoiceProcessingIO裁剪的kAudioUnitType_Output音频单元

时间:2015-04-17 14:47:08

标签: ios audio core-audio audiounit

我正在制作录音和播放应用。我正在使用带有kAudioUnitSubType_VoiceProcessingIO的kAudioUnitType_Output音频。有时它工作正常但有时会有很多削波。我认为这是因为环境噪音。我不知道这种削波是否会因为AEC或我的音频单元设置错误而产生噪音的副作用:

以下是我的设置功能:

struct CallbackData {
    AudioUnit               rioUnit;

    BOOL*                   audioChainIsBeingReconstructed;

    CallbackData(): rioUnit(NULL), audioChainIsBeingReconstructed(NULL){}
} cd;  

static OSStatus performRender (void*inRefCon,
                           AudioUnitRenderActionFlags   *ioActionFlags,
                           const AudioTimeStamp         *inTimeStamp,
                           UInt32                       inBusNumber,
                           UInt32                       inNumberFrames,
                           AudioBufferList              *ioData)
 {
  OSStatus err = noErr;
  if (*cd.audioChainIsBeingReconstructed == NO)
  {
    err = AudioUnitRender(cd.rioUnit, ioActionFlags, inTimeStamp, 1, inNumberFrames, ioData);

    float *inputFrames = (float*)(ioData->mBuffers->mData);
    //engine_process_iOS(inputFrames, ioData->mBuffers->mNumberChannels * inNumberFrames);
  }

return err;
}


- (void)setupAudioSession
{
try {
    // Configure the audio session
    AVAudioSession *sessionInstance = [AVAudioSession sharedInstance];

    NSError *error = nil;
    [sessionInstance setCategory:AVAudioSessionCategoryPlayAndRecord withOptions:AVAudioSessionCategoryOptionAllowBluetooth  error:&error];

    NSTimeInterval bufferDuration = .005;
    [sessionInstance setPreferredIOBufferDuration:bufferDuration error:&error];

    [sessionInstance setPreferredSampleRate:44100 error:&error];

    [[NSNotificationCenter defaultCenter] addObserver:self
                                             selector:@selector(handleInterruption:)
                                                 name:AVAudioSessionInterruptionNotification
                                               object:sessionInstance];

    [[NSNotificationCenter defaultCenter] addObserver:self
                                             selector:@selector(handleRouteChange:)
                                                 name:AVAudioSessionRouteChangeNotification
                                               object:sessionInstance];

    [[NSNotificationCenter defaultCenter]   addObserver:    self
                                             selector:  @selector(handleMediaServerReset:)
                                                 name:  AVAudioSessionMediaServicesWereResetNotification
                                               object:  sessionInstance];


    [[AVAudioSession sharedInstance] setActive:YES error:&error];
}

catch (NSException *e) {
    NSLog(@"Error returned from setupAudioSession");
}
catch (...) {
    NSLog(@"Unknown error returned from setupAudioSession");
}

return;
}

- (void)setupIOUnit
{
try {
    // Create a new instance of AURemoteIO

    AudioComponentDescription desc;
    desc.componentType = kAudioUnitType_Output;
    desc.componentSubType = kAudioUnitSubType_VoiceProcessingIO ;
    desc.componentManufacturer = kAudioUnitManufacturer_Apple;
    desc.componentFlags = 0;
    desc.componentFlagsMask = 0;

    AudioComponent comp = AudioComponentFindNext(NULL, &desc);
    AudioComponentInstanceNew(comp, &_rioUnit);

    UInt32 one = 1;
    AudioUnitSetProperty(_rioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, 1, &one, sizeof(one));
    AudioUnitSetProperty(_rioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Output, 0, &one, sizeof(one));

    CAStreamBasicDescription ioFormat = CAStreamBasicDescription(44100, 2, CAStreamBasicDescription::kPCMFormatFloat32, true);

    AudioUnitSetProperty(_rioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &ioFormat, sizeof(ioFormat));
    AudioUnitSetProperty(_rioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &ioFormat, sizeof(ioFormat));

    UInt32 maxFramesPerSlice = 4096;
    AudioUnitSetProperty(_rioUnit, kAudioUnitProperty_MaximumFramesPerSlice, kAudioUnitScope_Global, 0, &maxFramesPerSlice, sizeof(UInt32));

    UInt32 propSize = sizeof(UInt32);
    AudioUnitGetProperty(_rioUnit, kAudioUnitProperty_MaximumFramesPerSlice, kAudioUnitScope_Global, 0, &maxFramesPerSlice, &propSize);

    cd.rioUnit = _rioUnit;
    cd.audioChainIsBeingReconstructed = &_audioChainIsBeingReconstructed;

    // Set the render callback on AURemoteIO
    AURenderCallbackStruct renderCallback;
    renderCallback.inputProc = performRender;
    renderCallback.inputProcRefCon = NULL;
    AudioUnitSetProperty(_rioUnit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input, 0, &renderCallback, sizeof(renderCallback));

    // Initialize the AURemoteIO instance
    AudioUnitInitialize(_rioUnit);
    //if (err) NSLog(@"couldn't start AURemoteIO: %d", (int)err);

}

catch (NSException *e) {
    NSLog(@"Error returned from setupIOUnit");
}
catch (...) {
    NSLog(@"Unknown error returned from setupIOUnit");
}

return;
}

这种剪辑可能是什么原因?

1 个答案:

答案 0 :(得分:0)

回调中音频缓冲区的样本应为SInt16。尝试投射它:

SInt16 *inputFrames = (SInt16*)(ioData->mBuffers[0]->mData);