如何在iPhone SDK中对PCM数据中的AAC数据进行编码? (iphone dev / Audio)

时间:2012-01-13 08:25:36

标签: iphone encoding pcm aac audio-converter

我猜“AudioConverterFillComplexBuffer”是解决方案。

但我不知道这种方式是对的。

1。 AudioUnit

初始化AudioUnit:“recordingCallback”是回调方法。 输出格式是PCM。 记录到文件。(我播放了录制的文件)。

2。 AudioConverter

添加“AudioConverterFillComplexBuffer” 我不太了解它。补充说,

3。问题

“audioConverterComplexInputDataProc”方法只调用一次。

如何使用AudioConverter api?

附上我的代码

#import "AACAudioRecorder.h"
#define kOutputBus 0
#define kInputBus 1
@implementation AACAudioRecorder

这是AudioConverterFillComplexBuffer的回调方法。

static OSStatus audioConverterComplexInputDataProc(  AudioConverterRef               inAudioConverter,
                                      UInt32*                         ioNumberDataPackets,
                                      AudioBufferList*                ioData,
                                      AudioStreamPacketDescription**  outDataPacketDescription,
                                      void*                           inUserData){
    ioData = (AudioBufferList*)inUserData;
    return 0;
}

这是AudioUnit的回调。

static OSStatus recordingCallback(void *inRefCon, 
                                        AudioUnitRenderActionFlags *ioActionFlags, 
                                        const AudioTimeStamp *inTimeStamp, 
                                        UInt32 inBusNumber, 
                                        UInt32 inNumberFrames, 
                                        AudioBufferList *ioData) {
   @autoreleasepool {

       AudioBufferList *bufferList;

       AACAudioRecorder *THIS = (AACAudioRecorder *)inRefCon;
       OSStatus err = AudioUnitRender(THIS-> m_audioUnit , 
                                             ioActionFlags, 
                                             inTimeStamp, 1, inNumberFrames, bufferList);    

       if (err) { NSLog(@"%s AudioUnitRender error %d\n",__FUNCTION__, (int)err); return err; }

       NSString *recordFile = 
                       [NSTemporaryDirectory() stringByAppendingPathComponent: @"auioBuffer.pcm"];
       FILE *fp;
       fp = fopen([recordFile UTF8String], "a+");
       fwrite(bufferList->mBuffers[0].mData, sizeof(Byte), 
bufferList->mBuffers[0].mDataByteSize, fp);
       fclose(fp);    

       [THIS convert:bufferList ioOutputDataPacketSize:&inNumberFrames];     

   if (err) {NSLog(@"%s : AudioFormat Convert error %d\n",__FUNCTION__, (int)err);  }
    }
    return noErr;
}

状态检查方法

static void checkStatus(OSStatus status, const char* str){
    if (status != noErr) {
        NSLog(@"%s %s error : %ld ",__FUNCTION__, str, status);
    }
}

转换方法:PCM - > AAC

- (void)convert:(AudioBufferList*)input_bufferList ioOutputDataPacketSize:(UInt32*)packetSizeRef 
{
    UInt32 size = sizeof(UInt32);
    UInt32 maxOutputSize;
    AudioConverterGetProperty(m_audioConverterRef, 
                              kAudioConverterPropertyMaximumOutputPacketSize, 
                              &size, 
                              &maxOutputSize);

    AudioBufferList *output_bufferList = (AudioBufferList *)malloc(sizeof(AudioBufferList));

    output_bufferList->mNumberBuffers               = 1;
    output_bufferList->mBuffers[0].mNumberChannels  = 1;
    output_bufferList->mBuffers[0].mDataByteSize    = *packetSizeRef * 2;
    output_bufferList->mBuffers[0].mData  = (AudioUnitSampleType *)malloc(*packetSizeRef * 2);

    OSStatus        err;
    err = AudioConverterFillComplexBuffer(
                                          m_audioConverterRef,
                                          audioConverterComplexInputDataProc,
                                          input_bufferList,
                                          packetSizeRef,
                                          output_bufferList,
                                          NULL
                                          );


    if (err) {NSLog(@"%s : AudioFormat Convert error %d\n",__FUNCTION__, (int)err);  }
}

这是初始化方法。

- (void)initialize
{ 
    // ...


    OSStatus status;

    // Describe audio component
    AudioComponentDescription desc;
    desc.componentType = kAudioUnitType_Output;
    desc.componentSubType = kAudioUnitSubType_RemoteIO;
    desc.componentFlags = 0;
    desc.componentFlagsMask = 0;
    desc.componentManufacturer = kAudioUnitManufacturer_Apple;

    // Get component
    AudioComponent inputComponent = AudioComponentFindNext(NULL, &desc);

    // Get audio units
    status = AudioComponentInstanceNew(inputComponent, &m_audioUnit);
    checkStatus(status,"AudioComponentInstanceNew");

    // Enable IO for recording
    UInt32 flag = 1;
    status = AudioUnitSetProperty(m_audioUnit, 
                                  kAudioOutputUnitProperty_EnableIO, 
                                  kAudioUnitScope_Input, 
                                  kInputBus,
                                  &flag, 
                                  sizeof(flag));
    checkStatus(status,"Enable IO for recording");

    // Enable IO for playback
    status = AudioUnitSetProperty(m_audioUnit, 
                                  kAudioOutputUnitProperty_EnableIO, 
                                  kAudioUnitScope_Output, 
                                  kOutputBus,
                                  &flag, 
                                  sizeof(flag));
    checkStatus(status,"Enable IO for playback");

    // Describe format
    AudioStreamBasicDescription audioFormat;
    audioFormat.mSampleRate   = 44100.00;
    audioFormat.mFormatID   = kAudioFormatLinearPCM;
    audioFormat.mFormatFlags  = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
    audioFormat.mFramesPerPacket = 1;
    audioFormat.mChannelsPerFrame = 1;
    audioFormat.mBitsPerChannel  = 16;
    audioFormat.mBytesPerPacket  = 2;
    audioFormat.mBytesPerFrame  = 2;

    // Apply format
    status = AudioUnitSetProperty(m_audioUnit, 
                                  kAudioUnitProperty_StreamFormat, 
                                  kAudioUnitScope_Output, 
                                  kInputBus, 
                                  &audioFormat, 
                                  sizeof(audioFormat));
    checkStatus(status,"Apply format1");
    status = AudioUnitSetProperty(m_audioUnit, 
                                  kAudioUnitProperty_StreamFormat, 
                                  kAudioUnitScope_Input, 
                                  kOutputBus, 
                                  &audioFormat, 
                                  sizeof(audioFormat));
    checkStatus(status,"Apply format2");


    // Set input callback
    AURenderCallbackStruct callbackStruct;
    callbackStruct.inputProc = recordingCallback;
    callbackStruct.inputProcRefCon = self;
    status = AudioUnitSetProperty(m_audioUnit, 
                                  kAudioOutputUnitProperty_SetInputCallback, 
                                  kAudioUnitScope_Global, 
                                  kInputBus, 
                                  &callbackStruct, 
                                  sizeof(callbackStruct));
    checkStatus(status,"Set input callback");

    // Initialise
    status = AudioUnitInitialize(m_audioUnit);
    checkStatus(status,"AudioUnitInitialize");

    // Set ASBD For converting Output Stream

    AudioStreamBasicDescription outputFormat;
    memset(&outputFormat, 0, sizeof(outputFormat));
    outputFormat.mSampleRate  = 44100.00;
    outputFormat.mFormatID   = kAudioFormatMPEG4AAC;
    outputFormat.mFormatFlags       = kMPEG4Object_AAC_Main; 
    outputFormat.mFramesPerPacket = 1024;
    outputFormat.mChannelsPerFrame = 1;
    outputFormat.mBitsPerChannel = 0;
    outputFormat.mBytesPerFrame = 0;
    outputFormat.mBytesPerPacket = 0;

    //Create An Audio Converter
    status = AudioConverterNew( &audioFormat, &outputFormat, &m_audioConverterRef );
    checkStatus(status,"Create An Audio Converter");
    if(m_audioConverterRef) NSLog(@"m_audioConverterRef is created");

}

AudioOutputUnitStart

- (void)StartRecord
{
    OSStatus status = AudioOutputUnitStart(m_audioUnit);
    checkStatus(status,"AudioOutputUnitStart");
}

AudioOutputUnitStop

- (void)StopRecord
{
    OSStatus status = AudioOutputUnitStop(m_audioUnit);
    checkStatus(status,"AudioOutputUnitStop");
}

光洁度

- (void)finish
{
    AudioUnitUninitialize(m_audioUnit);
}


@end

2 个答案:

答案 0 :(得分:1)

我花了很长时间才了解AudioConverterFillComplexBuffer,特别是如何使用它来实时转换音频。我在这里发布了我的方法:How do I use CoreAudio's AudioConverter to encode AAC in real-time?

答案 1 :(得分:0)

参考https://developer.apple.com/library/ios/samplecode/iPhoneACFileConvertTest/Introduction/Intro.html

它演示了如何使用音频转换器API将PCM音频格式转换为包含AAC的压缩格式。