嗨,我有一个应用程序,你必须按住一个按钮,吹进手机,它发出声音,这是代码:
#import "AirInstrumentViewController.h"
@implementation AirInstrumentViewController
@synthesize audiOfA;
- (void)didReceiveMemoryWarning
{
/ / Releases the view if it doesn't have a superview.
[super didReceiveMemoryWarning];
// Release any cached data, images, etc that aren't in use.
}
#pragma mark - View lifecycle
- (void)viewDidLoad
{
[super viewDidLoad];
NSURL *url = [NSURL fileURLWithPath:@"/dev/null"];
NSDictionary *settings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithFloat: 44100.0], AVSampleRateKey,
[NSNumber numberWithInt: kAudioFormatAppleLossless], AVFormatIDKey,
[NSNumber numberWithInt: 1], AVNumberOfChannelsKey,
[NSNumber numberWithInt: AVAudioQualityMax], AVEncoderAudioQualityKey,
nil];
NSError *error;
recorder = [[AVAudioRecorder alloc] initWithURL:url settings:settings error:&error];
if (recorder) {
[recorder prepareToRecord];
recorder.meteringEnabled = YES;
[recorder record];
levelTimer = [NSTimer scheduledTimerWithTimeInterval: 0.03 target: self selector: @selector(levelTimerCallback:) userInfo: nil repeats: YES];
} else
NSLog([error description]);
}
- (void)levelTimerCallback:(NSTimer *)timer {
[recorder updateMeters];
const double ALPHA = 0.05;
double peakPowerForChannel = pow(10, (0.05 * [recorder peakPowerForChannel:0]));
lowPassResults = ALPHA * peakPowerForChannel + (1.0 - ALPHA) * lowPassResults;
if (lowPassResults > 0.55) {
NSLog(@"Mic blow detected");
if (aIsBeingTouched == YES) {
NSString *musicString = [[NSBundle mainBundle] pathForResource:@"A" ofType:@"aifc"];
audiOfA = [[AVAudioPlayer alloc] initWithContentsOfURL:[NSURL fileURLWithPath:musicString] error:NULL];
[audiOfA play];
} }else {
[audiOfA stop];
}
}
-(void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event {
UITouch *touch = [[event allTouches] anyObject];
CGPoint touchLocation = [touch locationInView:self.view];
if (CGRectContainsPoint(aImage.frame, touchLocation)) {
aImage.image = [UIImage imageNamed:@"active.png"];
aIsBeingTouched = YES;
}
}
-(void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event {
[audiOfA stop];
aImage.image = [UIImage imageNamed:@"a.png"];
aIsBeingTouched = NO;
}
- (void)viewDidUnload
{
[super viewDidUnload];
// Release any retained subviews of the main view.
// e.g. self.myOutlet = nil;
}
@end
我希望你明白我想做什么。在这里: if(lowPassResults> 0.55){ NSLog(@“麦克风检测”);
if (aIsBeingTouched == YES) {
NSString *musicString = [[NSBundle mainBundle] pathForResource:@"A" ofType:@"aifc"];
audiOfA = [[AVAudioPlayer alloc] initWithContentsOfURL:[NSURL fileURLWithPath:musicString] error:NULL];
[audiOfA play];
} }else {
[audiOfA stop];
}
}
声音持续时间更长,然后停止。触摸结束时也一样。如果我吹 虽然没有按住图像,但是当我最终按住图像时,即使我没有吹也会播放,因为当它没有被按下时它被吹了,它可能已经记录了它或者有些东西!我不知道如何解决所有这些问题,请帮忙!
答案 0 :(得分:1)
NSTimer
是非常不可靠的。根据{{3}},引用:
定时器的时间间隔的有效分辨率限制在50-100毫秒的量级
可能是因为你如此快速地调用它们(0.03秒或30毫秒非常快),NSTimer
没有按照确切的间隔进行调用,因此它正在跳过。
您可能希望尝试多线程处理,以便与UI
相关的所有内容在主线程上完成,AVAudio
部分在单独的线程上完成。 Apple Documentation是一个解释NSOperation
的链接,这是一种简单的方法,并且鉴于您提供的代码,它应该适合您。
希望有帮助!