如何用iphone sdk中的audiounit和augraph播放音乐

时间:2012-04-12 13:17:40

标签: iphone core-audio

我是AudioUnit和AUGraph服务的新手。 我需要使用这些服务播放单个声音文件。  我从apple的开发者网站下载了MixerHost项目并更改了一些代码。 但是代码现在崩溃了

我新更改的代码是:

#import "JKAudioPlayer.h"
#pragma mark Mixer input bus render callback

//
//    Declared as AURenderCallback in AudioUnit/AUComponent.h. See Audio Unit Component Services Reference.
static OSStatus inputRenderCallback (

                                     void                        *inRefCon,  
                                     AudioUnitRenderActionFlags  *ioActionFlags, 
                                     const AudioTimeStamp        *inTimeStamp,  
                                     UInt32                      inBusNumber,  
                                     UInt32                      inNumberFrames,
                                     AudioBufferList             *ioData   
                                     ) {

    soundStructPtr    soundStructPointerArray   = (soundStructPtr) inRefCon;
    UInt32            frameTotalForSound        = soundStructPointerArray[inBusNumber].frameCount;
    BOOL              isStereo                  = soundStructPointerArray[inBusNumber].isStereo;

    AudioUnitSampleType *dataInLeft;
    AudioUnitSampleType *dataInRight;

    dataInLeft                 = soundStructPointerArray[inBusNumber].audioDataLeft;
    if (isStereo) dataInRight  = soundStructPointerArray[inBusNumber].audioDataRight;

    AudioUnitSampleType *outSamplesChannelLeft;
    AudioUnitSampleType *outSamplesChannelRight;

    outSamplesChannelLeft                 = (AudioUnitSampleType *) ioData->mBuffers[0].mData;
    if (isStereo) outSamplesChannelRight  = (AudioUnitSampleType *) ioData->mBuffers[1].mData;

    UInt32 sampleNumber = soundStructPointerArray[0].sampleNumber;

    for (UInt32 frameNumber = 0; frameNumber < inNumberFrames; ++frameNumber) {
        outSamplesChannelLeft[frameNumber]                 = dataInLeft[sampleNumber];
        if (isStereo) outSamplesChannelRight[frameNumber]  = dataInRight[sampleNumber];
        sampleNumber++;
        if (sampleNumber >= frameTotalForSound) sampleNumber = 0;
    }
    soundStructPointerArray[inBusNumber].sampleNumber = sampleNumber;
    return noErr;
}

#pragma mark -
#pragma mark Audio route change listener callback
static void audioRouteChangeListenerCallback (
                                              void                      *inUserData,
                                              AudioSessionPropertyID    inPropertyID,
                                              UInt32                    inPropertyValueSize,
                                              const void                *inPropertyValue
                                              ) {
    if (inPropertyID != kAudioSessionProperty_AudioRouteChange) return;
    JKAudioPlayer *audioObject = (__bridge_transfer JKAudioPlayer *) inUserData;  
    if (NO == audioObject.isPlaying) {
        NSLog (@"Audio route change while application audio is stopped.");
        return;
    } else {
        CFDictionaryRef routeChangeDictionary = inPropertyValue;
        CFNumberRef routeChangeReasonRef =
        CFDictionaryGetValue (
                              routeChangeDictionary,
                              CFSTR (kAudioSession_AudioRouteChangeKey_Reason)
                              );

        SInt32 routeChangeReason;

        CFNumberGetValue (
                          routeChangeReasonRef,
                          kCFNumberSInt32Type,
                          &routeChangeReason
                          );        
        if (routeChangeReason == kAudioSessionRouteChangeReason_OldDeviceUnavailable) {

            NSLog (@"Audio output device was removed; stopping audio playback.");
            NSString *MixerHostAudioObjectPlaybackStateDidChangeNotification = @"MixerHostAudioObjectPlaybackStateDidChangeNotification";
            [[NSNotificationCenter defaultCenter] postNotificationName: MixerHostAudioObjectPlaybackStateDidChangeNotification object: audioObject]; 

        } else {

            NSLog (@"A route change occurred that does not require stopping application audio.");
        }
    }
}

@implementation JKAudioPlayer
@synthesize monoStreamFormat;           // mono format for use in buffer and mixer input for "beats" sound
@synthesize graphSampleRate;            // sample rate to use throughout audio processing chain
@synthesize mixerUnit;                  // the Multichannel Mixer unit
@synthesize playing;                    // Boolean flag to indicate whether audio is playing or not
@synthesize interruptedDuringPlayback;  // Boolean flag to indicate whether audio was playing when an interruption arrived
- (id) init {

    self = [super init];

    if (!self) return nil;
    NSURL *beatsLoop   = [[NSBundle mainBundle] URLForResource: @"beatsMono"
                                                 withExtension: @"caf"];
    sourceURL   = (__bridge_retained  CFURLRef) beatsLoop;
    self.interruptedDuringPlayback = NO;
    [self setupAudioSession];    
    [self setupMonoStreamFormat];
    [self readAudioFilesIntoMemory];
    [self configureAndInitializeAudioProcessingGraph];
    [self enableMixerInput: 0 isOn: YES];
    [self setMixerOutputGain:1];
    [self setMixerInput: 0 gain:YES];
    return self;
}
- (void) setupAudioSession {
    AVAudioSession *mySession = [AVAudioSession sharedInstance];
    [mySession setDelegate: self];
    NSError *audioSessionError = nil;
    [mySession setCategory: AVAudioSessionCategoryPlayback
                     error: &audioSessionError];

    if (audioSessionError != nil) {

        NSLog (@"Error setting audio session category.");
        return;
    }
    self.graphSampleRate = 44100.0;    // Hertz
    [mySession setPreferredHardwareSampleRate: graphSampleRate
                                        error: &audioSessionError];
    if (audioSessionError != nil) {

        NSLog (@"Error setting preferred hardware sample rate.");
        return;
    }
    [mySession setActive: YES
                   error: &audioSessionError];

    if (audioSessionError != nil) {

        NSLog (@"Error activating audio session during initial setup.");
        return;
    }    
    self.graphSampleRate = [mySession currentHardwareSampleRate];
    AudioSessionAddPropertyListener (
                                     kAudioSessionProperty_AudioRouteChange,
                                     audioRouteChangeListenerCallback,
                                     (__bridge void*)self
                                     );
}
- (void) setupMonoStreamFormat {
    size_t bytesPerSample = sizeof (AudioUnitSampleType);
    monoStreamFormat.mFormatID          = kAudioFormatLinearPCM;
    monoStreamFormat.mFormatFlags       = kAudioFormatFlagsAudioUnitCanonical;
    monoStreamFormat.mBytesPerPacket    = bytesPerSample;
    monoStreamFormat.mFramesPerPacket   = 1;
    monoStreamFormat.mBytesPerFrame     = bytesPerSample;
    monoStreamFormat.mChannelsPerFrame  = 1;                  // 1 indicates mono
    monoStreamFormat.mBitsPerChannel    = 8 * bytesPerSample;
    monoStreamFormat.mSampleRate        = graphSampleRate;
}


- (void) readAudioFilesIntoMemory {
    ExtAudioFileRef audioFileObject = 0;
    OSStatus result = ExtAudioFileOpenURL (sourceURL, &audioFileObject);
    if (noErr != result || NULL == audioFileObject) {NSLog(@"error ext audiofile open url %ld",result); return;}
    UInt64 totalFramesInFile = 0;
    UInt32 frameLengthPropertySize = sizeof (totalFramesInFile);
    result =    ExtAudioFileGetProperty (
                                         audioFileObject,
                                         kExtAudioFileProperty_FileLengthFrames,
                                         &frameLengthPropertySize,
                                         &totalFramesInFile
                                         );

    if (noErr != result) {NSLog(@"ExtAudioFileGetProperty (audio file length in frames %ld",result); return;}
    soundStruct.frameCount = totalFramesInFile;
    AudioStreamBasicDescription fileAudioFormat = {0};
    UInt32 formatPropertySize = sizeof (fileAudioFormat);
    result =    ExtAudioFileGetProperty (
                                         audioFileObject,
                                         kExtAudioFileProperty_FileDataFormat,
                                         &formatPropertySize,
                                         &fileAudioFormat
                                         );

    if (noErr != result) {NSLog( @"ExtAudioFileGetProperty (file audio format)%ld",result); return;}

    UInt32 channelCount = fileAudioFormat.mChannelsPerFrame;
    soundStruct.audioDataLeft =
    (AudioUnitSampleType *) calloc (totalFramesInFile, sizeof (AudioUnitSampleType));

    AudioStreamBasicDescription importFormat = {0};
    if (1 == channelCount) {

        soundStruct.isStereo = NO;
        importFormat = monoStreamFormat;

    } else {

        NSLog (@"*** WARNING: File format not supported - wrong number of channels");
        ExtAudioFileDispose (audioFileObject);
        return;
    }
    result =    ExtAudioFileSetProperty (
                                         audioFileObject,
                                         kExtAudioFileProperty_ClientDataFormat,
                                         sizeof (importFormat),
                                         &importFormat
                                         );

    if (noErr != result) {NSLog( @"ExtAudioFileSetProperty (client data format %ld", result); return;}
    AudioBufferList *bufferList;
    bufferList = (AudioBufferList *) malloc (
                                             sizeof (AudioBufferList) + sizeof (AudioBuffer) * (channelCount - 1)
                                             );

    if (NULL == bufferList) {NSLog (@"*** malloc failure for allocating bufferList memory"); return;}

    bufferList->mNumberBuffers = channelCount;
    AudioBuffer emptyBuffer = {0};
    size_t arrayIndex;
    for (arrayIndex = 0; arrayIndex < channelCount; arrayIndex++) {
        bufferList->mBuffers[arrayIndex] = emptyBuffer;
    }
    bufferList->mBuffers[0].mNumberChannels  = 1;
    bufferList->mBuffers[0].mDataByteSize    = totalFramesInFile * sizeof (AudioUnitSampleType);
    bufferList->mBuffers[0].mData            = soundStruct.audioDataLeft;

    if (2 == channelCount) {
        bufferList->mBuffers[1].mNumberChannels  = 1;
        bufferList->mBuffers[1].mDataByteSize    = totalFramesInFile * sizeof (AudioUnitSampleType);
        bufferList->mBuffers[1].mData            = soundStruct.audioDataRight;
    }

    UInt32 numberOfPacketsToRead = (UInt32) totalFramesInFile;

    result = ExtAudioFileRead (
                               audioFileObject,
                               &numberOfPacketsToRead,
                               bufferList
                               );

    free (bufferList);

    if (noErr != result) {

        NSLog( @"ExtAudioFileRead failure - %ld " , result);
        free (soundStruct.audioDataLeft);
        soundStruct.audioDataLeft = 0;

        if (2 == channelCount) {
            free (soundStruct.audioDataRight);
            soundStruct.audioDataRight = 0;
        }

        ExtAudioFileDispose (audioFileObject);            
        return;
    }

    NSLog (@"Finished reading file  into memory");
    soundStruct.sampleNumber = 0;
    ExtAudioFileDispose (audioFileObject);
}

- (void) configureAndInitializeAudioProcessingGraph {

    NSLog (@"Configuring and then initializing audio processing graph");
    OSStatus result = noErr;
    result = NewAUGraph (&processingGraph);

    if (noErr != result) {[self printErrorMessage: @"NewAUGraph" withStatus: result]; return;}

    // I/O unit
    AudioComponentDescription iOUnitDescription;
    iOUnitDescription.componentType          = kAudioUnitType_Output;
    iOUnitDescription.componentSubType       = kAudioUnitSubType_RemoteIO;
    iOUnitDescription.componentManufacturer  = kAudioUnitManufacturer_Apple;
    iOUnitDescription.componentFlags         = 0;
    iOUnitDescription.componentFlagsMask     = 0;

    // Multichannel mixer unit
    AudioComponentDescription MixerUnitDescription;
    MixerUnitDescription.componentType          = kAudioUnitType_Mixer;
    MixerUnitDescription.componentSubType       = kAudioUnitSubType_MultiChannelMixer;
    MixerUnitDescription.componentManufacturer  = kAudioUnitManufacturer_Apple;
    MixerUnitDescription.componentFlags         = 0;
    MixerUnitDescription.componentFlagsMask     = 0;


    //............................................................................
    // Add nodes to the audio processing graph.
    NSLog (@"Adding nodes to audio processing graph");

    AUNode   iONode;         // node for I/O unit
    AUNode   mixerNode;      // node for Multichannel Mixer unit

    // Add the nodes to the audio processing graph
    result =    AUGraphAddNode (
                                processingGraph,
                                &iOUnitDescription,
                                &iONode);

    if (noErr != result) {[self printErrorMessage: @"AUGraphNewNode failed for I/O unit" withStatus: result]; return;}


    result =    AUGraphAddNode (
                                processingGraph,
                                &MixerUnitDescription,
                                &mixerNode
                                );

    if (noErr != result) {[self printErrorMessage: @"AUGraphNewNode failed for Mixer unit" withStatus: result]; return;}
    result = AUGraphOpen (processingGraph);

    if (noErr != result) {[self printErrorMessage: @"AUGraphOpen" withStatus: result]; return;}
     result =    AUGraphNodeInfo (
                                 processingGraph,
                                 mixerNode,
                                 NULL,
                                 &mixerUnit
                                 );

    if (noErr != result) {[self printErrorMessage: @"AUGraphNodeInfo" withStatus: result]; return;}
    UInt32 busCount   = 2;    // bus count for mixer unit input

    UInt32 beatsBus   = 1;    // mixer unit bus 1 will be mono and will take the beats sound

    NSLog (@"Setting mixer unit input bus count to: %lu", busCount);
    result = AudioUnitSetProperty (
                                   mixerUnit,
                                   kAudioUnitProperty_ElementCount,
                                   kAudioUnitScope_Input,
                                   0,
                                   &busCount,
                                   sizeof (busCount)
                                   );

    if (noErr != result) {[self printErrorMessage: @"AudioUnitSetProperty (set mixer unit bus count)" withStatus: result]; return;}
   UInt32 maximumFramesPerSlice = 4096;

    result = AudioUnitSetProperty (
                                   mixerUnit,
                                   kAudioUnitProperty_MaximumFramesPerSlice,
                                   kAudioUnitScope_Global,
                                   0,
                                   &maximumFramesPerSlice,
                                   sizeof (maximumFramesPerSlice)
                                   );

    if (noErr != result) {[self printErrorMessage: @"AudioUnitSetProperty (set mixer unit input stream format)" withStatus: result]; return;}
    for (UInt16 busNumber = 0; busNumber < busCount; ++busNumber) {

        // Setup the struture that contains the input render callback 
        AURenderCallbackStruct inputCallbackStruct;
        inputCallbackStruct.inputProc        = &inputRenderCallback;
        inputCallbackStruct.inputProcRefCon  = &soundStruct;

        NSLog (@"Registering the render callback with mixer unit input bus %u", busNumber);
        // Set a callback for the specified node's specified input
        result = AUGraphSetNodeInputCallback (
                                              processingGraph,
                                              mixerNode,
                                              busNumber,
                                              &inputCallbackStruct
                                              );

        if (noErr != result) {[self printErrorMessage: @"AUGraphSetNodeInputCallback" withStatus: result]; return;}
    }


    NSLog (@"Setting mono stream format for mixer unit \"beats\" input bus");
    result = AudioUnitSetProperty (
                                   mixerUnit,
                                   kAudioUnitProperty_StreamFormat,
                                   kAudioUnitScope_Input,
                                   beatsBus,
                                   &monoStreamFormat,
                                   sizeof (monoStreamFormat)
                                   );

    if (noErr != result) {[self printErrorMessage: @"AudioUnitSetProperty (set mixer unit beats input bus stream format)" withStatus: result];return;}


    NSLog (@"Setting sample rate for mixer unit output scope");
     result = AudioUnitSetProperty (
                                   mixerUnit,
                                   kAudioUnitProperty_SampleRate,
                                   kAudioUnitScope_Output,
                                   0,
                                   &graphSampleRate,
                                   sizeof (graphSampleRate)
                                   );

    if (noErr != result) {[self printErrorMessage: @"AudioUnitSetProperty (set mixer unit output stream format)" withStatus: result]; return;}
     NSLog (@"Connecting the mixer output to the input of the I/O unit output element");

    result = AUGraphConnectNodeInput (
                                      processingGraph,
                                      mixerNode,         // source node
                                      0,                 // source node output bus number
                                      iONode,            // destination node
                                      0                  // desintation node input bus number
                                      );

    if (noErr != result) {[self printErrorMessage: @"AUGraphConnectNodeInput" withStatus: result]; return;}
    CAShow (processingGraph);
    result = AUGraphInitialize (processingGraph);

    if (noErr != result) {[self printErrorMessage: @"AUGraphInitialize" withStatus: result]; return;}
}

// Start playback
- (void) startAUGraph  {
     OSStatus result = AUGraphStart (processingGraph);
    if (noErr != result) {[self printErrorMessage: @"AUGraphStart" withStatus: result]; return;}

    self.playing = YES;
}

// Stop playback
- (void) stopAUGraph {
    Boolean isRunning = false;
    OSStatus result = AUGraphIsRunning (processingGraph, &isRunning);
    if (noErr != result) {[self printErrorMessage: @"AUGraphIsRunning" withStatus: result]; return;}

    if (isRunning) {

        result = AUGraphStop (processingGraph);
        if (noErr != result) {[self printErrorMessage: @"AUGraphStop" withStatus: result]; return;}
        self.playing = NO;
    }
}

- (void) enableMixerInput: (UInt32) inputBus isOn: (AudioUnitParameterValue) isOnValue {
   OSStatus result = AudioUnitSetParameter (
                                             mixerUnit,
                                             kMultiChannelMixerParam_Enable,
                                             kAudioUnitScope_Input,
                                             inputBus,
                                             isOnValue,
                                             0
                                             );

    if (noErr != result) {[self printErrorMessage: @"AudioUnitSetParameter (enable the mixer unit)" withStatus: result]; return;}


}

- (void) setMixerInput: (UInt32) inputBus gain: (AudioUnitParameterValue) newGain {
    NSLog(@"mixer input %lu gain %f",inputBus,newGain);

    OSStatus result = AudioUnitSetParameter (
                                             mixerUnit,
                                             kMultiChannelMixerParam_Volume,
                                             kAudioUnitScope_Input,
                                             inputBus,
                                             newGain,
                                             0
                                             );

    if (noErr != result) {[self printErrorMessage: @"AudioUnitSetParameter (set mixer unit input volume)" withStatus: result]; return;}

}
- (void) setMixerOutputGain: (AudioUnitParameterValue) newGain {
    NSLog(@"mixer output  gain %f",newGain);
    OSStatus result = AudioUnitSetParameter (
                                             mixerUnit,
                                             kMultiChannelMixerParam_Volume,
                                             kAudioUnitScope_Output,
                                             0,
                                             newGain,
                                             0
                                             );

    if (noErr != result) {[self printErrorMessage: @"AudioUnitSetParameter (set mixer unit output volume)" withStatus: result]; return;}

}
- (void) printErrorMessage: (NSString *) errorString withStatus: (OSStatus) result {

    char resultString[5];
    UInt32 swappedResult = CFSwapInt32HostToBig (result);
    bcopy (&swappedResult, resultString, 4);
    resultString[4] = '\0';

    NSLog (
           @"*** %@ error: %@ %@s",
           errorString,
           (char*) &resultString
           );
}

@end

应用程序崩溃

outSamplesChannelLeft [frameNumber] = dataInLeft [sampleNumber];     if(isStereo)outSamplesChannelRight [frameNumber] = dataInRight [sampleNumber];  我的代码有什么错误 任何人都可以帮助我

1 个答案:

答案 0 :(得分:0)

following SO question包含使用音频单元播放单个文件的工作代码(即使在中间使用混音器,您也可能不需要)。虽然问题表明它不起作用,但它对我来说是开箱即用的 - 只需在最后添加一个AUGraphStart(_graph)。它使用AudioFilePlayer单元,无需编写自己的渲染回调。