在AudioQueue的缓冲区之间单击/点击

时间:2012-07-09 18:54:22

标签: ios audio audioqueue

从代码中可以看出,在我的回调中,我提取出音频数据并将其放入NSData数据中,然后将其发送到另一个类以将其上传到服务器。这一切都有效,这意味着服务器接收并播放音频数据。但是,缓冲区之间存在点击或敲击噪音。我希望有人可以告诉我是什么原因以及如何解决这个问题。 我已经阅读了其他相关帖子,但是他们似乎都只是指使用1个缓冲区而且添加更多是修复但是我使用3个缓冲区并尝试调整那个没有修复它的数字

AQRecorder.mm

#include "AQRecorder.h"
RestClient * restClient;
NSData* data;


// ____________________________________________________________________________________
// Determine the size, in bytes, of a buffer necessary to represent the supplied number
// of seconds of audio data.
int AQRecorder::ComputeRecordBufferSize(const AudioStreamBasicDescription *format, float seconds)
{
    int packets, frames, bytes = 0;
    try {
        frames = (int)ceil(seconds * format->mSampleRate);

        if (format->mBytesPerFrame > 0)
            bytes = frames * format->mBytesPerFrame;
        else {
            UInt32 maxPacketSize;
            if (format->mBytesPerPacket > 0)
                maxPacketSize = format->mBytesPerPacket;    // constant packet size
            else {
                UInt32 propertySize = sizeof(maxPacketSize);
                XThrowIfError(AudioQueueGetProperty(mQueue, kAudioQueueProperty_MaximumOutputPacketSize, &maxPacketSize,
                                                 &propertySize), "couldn't get queue's maximum output packet size");
            }
            if (format->mFramesPerPacket > 0)
                packets = frames / format->mFramesPerPacket;
            else
                packets = frames;   // worst-case scenario: 1 frame in a packet
            if (packets == 0)       // sanity check
                packets = 1;
            bytes = packets * maxPacketSize;
        }
    } catch (CAXException e) {
        char buf[256];
        fprintf(stderr, "Error: %s (%s)\n", e.mOperation, e.FormatError(buf));
        return 0;
    }   
    return bytes;
}

// ____________________________________________________________________________________
// AudioQueue callback function, called when an input buffers has been filled.
void AQRecorder::MyInputBufferHandler(  void *                              inUserData,
                                        AudioQueueRef                       inAQ,
                                        AudioQueueBufferRef                 inBuffer,
                                        const AudioTimeStamp *              inStartTime,
                                        UInt32                              inNumPackets,
                                        const AudioStreamPacketDescription* inPacketDesc)
{
    AQRecorder *aqr = (AQRecorder *)inUserData;


    try {
        if (inNumPackets > 0) {
            // write packets to file
//          XThrowIfError(AudioFileWritePackets(aqr->mRecordFile, FALSE, inBuffer->mAudioDataByteSize,
//                                           inPacketDesc, aqr->mRecordPacket, &inNumPackets, inBuffer->mAudioData),
//                     "AudioFileWritePackets failed");
            aqr->mRecordPacket += inNumPackets;



//            int numBytes = inBuffer->mAudioDataByteSize;       
//            SInt8 *testBuffer = (SInt8*)inBuffer->mAudioData;
//            
//            for (int i=0; i < numBytes; i++)
//            {
//                SInt8 currentData = testBuffer[i];
//                printf("Current data in testbuffer is %d", currentData);
//                
//                NSData * temp = [NSData dataWithBytes:currentData length:sizeof(currentData)];
//            }


            data=[[NSData dataWithBytes:inBuffer->mAudioData length:inBuffer->mAudioDataByteSize]retain];

            [restClient uploadAudioData:data url:nil];

        }


        // if we're not stopping, re-enqueue the buffer so that it gets filled again
        if (aqr->IsRunning())
            XThrowIfError(AudioQueueEnqueueBuffer(inAQ, inBuffer, 0, NULL), "AudioQueueEnqueueBuffer failed");
    } catch (CAXException e) {
        char buf[256];
        fprintf(stderr, "Error: %s (%s)\n", e.mOperation, e.FormatError(buf));
    }

}

AQRecorder::AQRecorder()
{
    mIsRunning = false;
    mRecordPacket = 0;

    data = [[NSData alloc]init];
    restClient = [[RestClient sharedManager]retain];
}

AQRecorder::~AQRecorder()
{
    AudioQueueDispose(mQueue, TRUE);
    AudioFileClose(mRecordFile);

    if (mFileName){
     CFRelease(mFileName);   
    }

    [restClient release];
    [data release];
}

// ____________________________________________________________________________________
// Copy a queue's encoder's magic cookie to an audio file.
void AQRecorder::CopyEncoderCookieToFile()
{
    UInt32 propertySize;
    // get the magic cookie, if any, from the converter     
    OSStatus err = AudioQueueGetPropertySize(mQueue, kAudioQueueProperty_MagicCookie, &propertySize);

    // we can get a noErr result and also a propertySize == 0
    // -- if the file format does support magic cookies, but this file doesn't have one.
    if (err == noErr && propertySize > 0) {
        Byte *magicCookie = new Byte[propertySize];
        UInt32 magicCookieSize;
        XThrowIfError(AudioQueueGetProperty(mQueue, kAudioQueueProperty_MagicCookie, magicCookie, &propertySize), "get audio converter's magic cookie");
        magicCookieSize = propertySize; // the converter lies and tell us the wrong size

        // now set the magic cookie on the output file
        UInt32 willEatTheCookie = false;
        // the converter wants to give us one; will the file take it?
        err = AudioFileGetPropertyInfo(mRecordFile, kAudioFilePropertyMagicCookieData, NULL, &willEatTheCookie);
        if (err == noErr && willEatTheCookie) {
            err = AudioFileSetProperty(mRecordFile, kAudioFilePropertyMagicCookieData, magicCookieSize, magicCookie);
            XThrowIfError(err, "set audio file's magic cookie");
        }
        delete[] magicCookie;
    }
}

void AQRecorder::SetupAudioFormat(UInt32 inFormatID)
{
    memset(&mRecordFormat, 0, sizeof(mRecordFormat));

    UInt32 size = sizeof(mRecordFormat.mSampleRate);
    XThrowIfError(AudioSessionGetProperty(  kAudioSessionProperty_CurrentHardwareSampleRate,
                                        &size, 
                                        &mRecordFormat.mSampleRate), "couldn't get hardware sample rate");

    //override samplearate to 8k from device sample rate

    mRecordFormat.mSampleRate = 8000.0;

    size = sizeof(mRecordFormat.mChannelsPerFrame);
    XThrowIfError(AudioSessionGetProperty(  kAudioSessionProperty_CurrentHardwareInputNumberChannels, 
                                        &size, 
                                        &mRecordFormat.mChannelsPerFrame), "couldn't get input channel count");


//    mRecordFormat.mChannelsPerFrame = 1;

    mRecordFormat.mFormatID = inFormatID;
    if (inFormatID == kAudioFormatLinearPCM)
    {
        // if we want pcm, default to signed 16-bit little-endian
        mRecordFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked;
        mRecordFormat.mBitsPerChannel = 16;
        mRecordFormat.mBytesPerPacket = mRecordFormat.mBytesPerFrame = (mRecordFormat.mBitsPerChannel / 8) * mRecordFormat.mChannelsPerFrame;
        mRecordFormat.mFramesPerPacket = 1;
    }

    if (inFormatID == kAudioFormatULaw) {
        NSLog(@"is ulaw");
        mRecordFormat.mSampleRate = 8000.0;
        mRecordFormat.mFormatFlags = 0;
        mRecordFormat.mFramesPerPacket = 1;
        mRecordFormat.mChannelsPerFrame = 1;
        mRecordFormat.mBitsPerChannel = 8;
        mRecordFormat.mBytesPerPacket = 1;
        mRecordFormat.mBytesPerFrame = 1;
    }
}

NSString * GetDocumentDirectory(void)
{    
    NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
    NSString *basePath = ([paths count] > 0) ? [paths objectAtIndex:0] : nil;
    return basePath;
}


void AQRecorder::StartRecord(CFStringRef inRecordFile)
{
    int i, bufferByteSize;
    UInt32 size;
    CFURLRef url;

    try {       
        mFileName = CFStringCreateCopy(kCFAllocatorDefault, inRecordFile);

        // specify the recording format
        SetupAudioFormat(kAudioFormatULaw /*kAudioFormatLinearPCM*/);

        // create the queue
        XThrowIfError(AudioQueueNewInput(
                                      &mRecordFormat,
                                      MyInputBufferHandler,
                                      this /* userData */,
                                      NULL /* run loop */, NULL /* run loop mode */,
                                      0 /* flags */, &mQueue), "AudioQueueNewInput failed");

        // get the record format back from the queue's audio converter --
        // the file may require a more specific stream description than was necessary to create the encoder.
        mRecordPacket = 0;

        size = sizeof(mRecordFormat);
        XThrowIfError(AudioQueueGetProperty(mQueue, kAudioQueueProperty_StreamDescription,  
                                         &mRecordFormat, &size), "couldn't get queue's format");

        NSString *basePath = GetDocumentDirectory();
        NSString *recordFile = [basePath /*NSTemporaryDirectory()*/ stringByAppendingPathComponent: (NSString*)inRecordFile];   

        url = CFURLCreateWithString(kCFAllocatorDefault, (CFStringRef)recordFile, NULL);

        // create the audio file
        XThrowIfError(AudioFileCreateWithURL(url, kAudioFileCAFType, &mRecordFormat, kAudioFileFlags_EraseFile,
                                          &mRecordFile), "AudioFileCreateWithURL failed");
        CFRelease(url);

        // copy the cookie first to give the file object as much info as we can about the data going in
        // not necessary for pcm, but required for some compressed audio
        CopyEncoderCookieToFile();


        // allocate and enqueue buffers
        bufferByteSize = ComputeRecordBufferSize(&mRecordFormat, kBufferDurationSeconds);   // enough bytes for half a second
        for (i = 0; i < kNumberRecordBuffers; ++i) {
            XThrowIfError(AudioQueueAllocateBuffer(mQueue, bufferByteSize, &mBuffers[i]),
                       "AudioQueueAllocateBuffer failed");
            XThrowIfError(AudioQueueEnqueueBuffer(mQueue, mBuffers[i], 0, NULL),
                       "AudioQueueEnqueueBuffer failed");
        }
        // start the queue
        mIsRunning = true;
        XThrowIfError(AudioQueueStart(mQueue, NULL), "AudioQueueStart failed");
    }
    catch (CAXException &e) {
        char buf[256];
        fprintf(stderr, "Error: %s (%s)\n", e.mOperation, e.FormatError(buf));
    }
    catch (...) {
        fprintf(stderr, "An unknown error occurred\n");
    }   

}

void AQRecorder::StopRecord()
{
    // end recording
    mIsRunning = false;
//    XThrowIfError(AudioQueueReset(mQueue), "AudioQueueStop failed");  
    XThrowIfError(AudioQueueStop(mQueue, true), "AudioQueueStop failed");   
    // a codec may update its cookie at the end of an encoding session, so reapply it to the file now
    CopyEncoderCookieToFile();
    if (mFileName)
    {
        CFRelease(mFileName);
        mFileName = NULL;
    }
    AudioQueueDispose(mQueue, true);
    AudioFileClose(mRecordFile);
}

1 个答案:

答案 0 :(得分:0)

我将#define kBufferDurationSeconds从.5更改为5.0,虽然点击仍然存在但却不太明显。
如果你有建议/答案仍然发布,因为这不是一个解决方案只是一个解决方案比之前更好

我还尝试在将数据发送到服务器之前多次将数据附加到数据。这似乎也有所帮助。