我使用TPAACAudioConverter将CAF转换为AAC格式,但转换后的文件持续时间为0

时间:2013-07-25 13:38:54

标签: iphone ios objective-c

我正在使用AVAudioRecorder。我用caf格式录制声音。之后,我使用TPAACAudioConverter将文件caf转换为aac格式。它工作正常,但转换文件的持续时间是00:00。有没有办法获得aac音频文件的持续时间。

2 个答案:

答案 0 :(得分:2)

AAC格式不支持某些模拟器。您可以在设备上查看它。它会正常工作,你可能会得到AAC音频文件的持续时间。

答案 1 :(得分:0)

你可以播放caf文件吗?

如果您只想录制麦克风到aac文件的声音,可以使用音频队列服务(我可以发布一些代码)

编辑:这是Apple开发教程的一个实现,可能会有一些错误,因为我修改它以适合您的问题

//AudioQ.mm
@implementation AudioQ
static const int nBuffer = 3;

struct AQRecorderState{
    AudioStreamBasicDescription mDataFormat;
    AudioQueueRef               mQueue;
    AudioQueueBufferRef         mBuffers[nBuffer];
    AudioFileID                 mAudioFile;
    UInt32                      bufferByteSize;
    SInt64                      mCurrentPacket;
    bool                        mIsRunning;
};

AQRecorderState aqData;
CFURLRef url;
static OSStatus BufferFilledHandler(
                                    void *                               inUserData,
                                    SInt64                               inPosition,
                                    UInt32                               requestCount,
                                    const void *                         buffer,
                                    UInt32 *                             actualCount

                                    ){
    // callback when you write to the file
    // you can handle audio packet and send them for broadcasting
    return 0;
}

static void HandleInputBuffer(
                              void                              *aqData,
                              AudioQueueRef                     inAq,
                              AudioQueueBufferRef                   inBuffer,
                              const AudioTimeStamp              *inStartTime,
                              UInt32                                inNumPackets,
                              const AudioStreamPacketDescription    *inPacketDesc
                              ) {
    AQRecorderState *pAqData = (AQRecorderState*) aqData;
    if (AudioFileWritePackets (
                               pAqData->mAudioFile,
                               false,
                               inBuffer->mAudioDataByteSize,
                               inPacketDesc,
                               pAqData->mCurrentPacket,
                               &inNumPackets,
                               inBuffer->mAudioData
                               ) == noErr) {
        pAqData->mCurrentPacket += inNumPackets;
    } else {
        NSLog(@"err writing packet");
    }
    if (pAqData->mIsRunning == 0)
        return;
    AudioQueueEnqueueBuffer(pAqData->mQueue,inBuffer,0,NULL);
}

-(OSStatus) initializeAQ{

    //--- set the output format ---//
    aqData.mDataFormat.mSampleRate = 22050;
    aqData.mDataFormat.mFormatID = kAudioFormatMPEG4AAC;
    aqData.mDataFormat.mFormatFlags = kMPEG4Object_AAC_Main;
    aqData.mDataFormat.mBytesPerPacket = 0;
    aqData.mDataFormat.mFramesPerPacket = 1024;
    aqData.mDataFormat.mBytesPerFrame = 0;
    aqData.mDataFormat.mChannelsPerFrame = 1;
    aqData.mDataFormat.mBitsPerChannel = 0;
    AudioFileTypeID fileType = kAudioFileAAC_ADTSType;

    aqData.bufferByteSize = 0x5000; // ??

    AudioQueueNewInput(&aqData.mDataFormat, HandleInputBuffer, &aqData, CFRunLoopGetMain(), kCFRunLoopCommonModes, 0, &aqData.mQueue);
    aqData.mCurrentPacket = 0;
    aqData.mIsRunning = true;


    //--- record in a file get the callback when writing ---//
    AQRecorderState *pAqData = &aqData;
    AudioFileInitializeWithCallbacks((void*)&pAqData,
                                     nil,
                                     BufferFilledHandler,
                                     nil,
                                     nil,
                                     fileType,
                                     &aqData.mDataFormat,
                                     kAudioFileFlags_EraseFile,
                                     &aqData.mAudioFile);

    //--- prepare set of audio queue buffers ---//
    for(int i = 0 ; i < nBuffer ; i++){
        AudioQueueAllocateBuffer(aqData.mQueue, aqData.bufferByteSize, &aqData.mBuffers[i]);
        AudioQueueEnqueueBuffer(aqData.mQueue, aqData.mBuffers[i], 0, NULL);
    }
    return 0;   
}

-(void) start{
    AudioQueueStart(aqData.mQueue, NULL);       
}

-(void) stop{
    NSLog(@"stoping");
    AudioQueueStop(aqData.mQueue, true);
    aqData.mIsRunning = false;
    AudioQueueDispose (aqData.mQueue,true);
    AudioFileClose (aqData.mAudioFile);
}
@end

AudioQ.h

static void HandleInputBuffer(
                              void                                  *aqData,
                              AudioQueueRef                         inAq,
                              AudioQueueBufferRef                   inBuffer,
                              const AudioTimeStamp                  *inStartTime,
                              UInt32                                inNumPackets,
                              const AudioStreamPacketDescription    *inPacketDesc
                              );

static OSStatus BufferFilledHandler(
                                    void *                               inUserData,
                                    SInt64                               inPosition,
                                    UInt32                               requestCount,
                                    const void *                         buffer,
                                    UInt32 *                             actualCount
                                    );
-(OSStatus)initializeAQ;
-(void)stop;
-(void)start;