EZMicrophone EXC_BAD_ACCESS

时间:2014-01-13 08:38:54

标签: ios objective-c audio

我正在开展一个让录音发声的项目。 为了录制音频,我使用了EZAudio库,它可以让我看到波形。 ViewController中的对象依次插入UINavigationController中,但不幸的是,当我回来时,我得到一个EXC_BAD_ACCESS,我不知道如何解决... 我认为问题是UINavigationController ......

这是代码接口:

#import <UIKit/UIKit.h>
#import "EZAudio.h"
#import <AVFoundation/AVFoundation.h>



    @interface AudioViewController : UIViewController<AVAudioPlayerDelegate, EZMicrophoneDelegate>
    {

    }
    @property (nonatomic, weak) IBOutlet EZAudioPlotGL *plotGL;
    @property (nonatomic,assign) BOOL isRecording;
    @property (nonatomic,retain) EZMicrophone *microphone;
    @property (nonatomic,retain) EZRecorder *recorder;
    @property (nonatomic,retain) IBOutlet UIButton *buttonRecorder;

    -(IBAction)recordingAudio:(id)sender;

    @end

以下是代码实现:

@implementation AudioViewController
@synthesize plotGL, isRecording, microphone, recorder, buttonRecorder;

-(void)toggleRecording:(id)sender{

}


-(void)recordingAudio:(id)sender{
    if(self.isRecording == YES){
        self.isRecording = NO;
    }
    else{
        self.isRecording = YES;
    }
}


- (void)viewDidLoad
{
    [super viewDidLoad];
    // Do any additional setup after loading the view.


    //Inizializzo il microfono
    self.microphone = [EZMicrophone microphoneWithDelegate:self];

    self.plotGL.backgroundColor = [UIColor whiteColor];
    self.plotGL.color           = [UIColor yellowColor];
    self.plotGL.plotType        = EZPlotTypeBuffer;

    self.plotGL.shouldFill      = NO;
    self.plotGL.shouldMirror    = NO;

    [self.microphone startFetchingAudio];




    NSArray *syms = [NSThread  callStackSymbols];
    if ([syms count] > 1) {
        NSLog(@"<%@ %p> %@ - caller: %@ ", [self class], self, NSStringFromSelector(_cmd),[syms objectAtIndex:1]);
    } else {
        NSLog(@"<%@ %p> %@", [self class], self, NSStringFromSelector(_cmd));
    }

}

- (void)didReceiveMemoryWarning
{
    [super didReceiveMemoryWarning];
}

-(void)microphone:(EZMicrophone *)microphone
 hasAudioReceived:(float **)buffer
   withBufferSize:(UInt32)bufferSize
    withNumberOfChannels:(UInt32)numberOfChannels {
    dispatch_async(dispatch_get_main_queue(),^{

        [self.plotGL updateBuffer:buffer[0] withBufferSize:bufferSize];
    });
}

-(void)microphone:(EZMicrophone *)microphone hasAudioStreamBasicDescription:(AudioStreamBasicDescription)audioStreamBasicDescription {

    [EZAudio printASBD:audioStreamBasicDescription];
    self.recorder = [EZRecorder recorderWithDestinationURL:[self getAudioFile]
                                           andSourceFormat:audioStreamBasicDescription];
}

-(void)microphone:(EZMicrophone *)microphone
    hasBufferList:(AudioBufferList *)bufferList
   withBufferSize:(UInt32)bufferSize
withNumberOfChannels:(UInt32)numberOfChannels {

    if( self.isRecording ){
        [self.recorder appendDataFromBufferList:bufferList
                                 withBufferSize:bufferSize];
    }

}

#pragma mark - AVAudioPlayerDelegate
-(void)audioPlayerDidFinishPlaying:(AVAudioPlayer *)player successfully:(BOOL)flag {
    [self.microphone stopFetchingAudio];
}



#pragma mark - Utility
-(NSArray*)applicationDocuments {
    return NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
}

-(NSString*)applicationDocumentsDirectory
{
    NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
    NSString *basePath = ([paths count] > 0) ? [paths objectAtIndex:0] : nil;
    NSLog(@"basePath %@",basePath);
    return basePath;
}

-(NSURL*)getAudioFile {

    NSString *fileName = @"audio.wav";
    return [NSURL fileURLWithPath:[NSString stringWithFormat:@"%@/%@",[self applicationDocumentsDirectory],fileName]];
}

@end

1 个答案:

答案 0 :(得分:0)

请插入:

(nonatomic, weak) IBOutlet EZAudioPlotGL *plotGL; ---> ...(nonatomic, retain) IBOutlet EZAudioPlotGL *plotGL;