我正在使用siri的演讲文本,但我得到的只是audioEngine没有回复。
audioEngine启动一次,在识别出第一个语音文本后,它会停止,因为我想在一段时间内识别语音。任何建议都表示赞赏。
这是我到目前为止所做的代码:
NSLocale *locale = [NSLocale localeWithLocaleIdentifier:@"en-US"];
speechRecognizer = [[SFSpeechRecognizer alloc]initWithLocale: locale];
audioEngine = [[AVAudioEngine alloc]init];
if (speechRecognitionTask != nil) {
[audioEngine stop];
[speechRecognitionTask cancel];
speechRecognitionTask = nil;
}
AVAudioSession *audioSession = [AVAudioSession sharedInstance];
NSError *error = nil;
if(error == nil) {
[audioSession setCategory:AVAudioSessionCategoryRecord error:&error];
[audioSession setMode:AVAudioSessionModeMeasurement error:&error];
[audioSession setActive:true error:&error];
} else
NSLog(@"audioSession properties weren't set because of an error.");
recognitionRequest = [[SFSpeechAudioBufferRecognitionRequest alloc] init];
AVAudioInputNode *inputNode = audioEngine.inputNode;
if (inputNode == nil) {
NSLog(@"AudioEngine has no input node");
return;
}
recognitionRequest.shouldReportPartialResults = true;
error = nil;
speechRecognitionTask = [speechRecognizer recognitionTaskWithRequest:recognitionRequest resultHandler:^(SFSpeechRecognitionResult * result, NSError * error1) {
BOOL isFinal = false;
if (result != nil) {
self.textView.text = result.bestTranscription.formattedString;
isFinal = ![result isFinal];
[audioEngine stop];
[inputNode removeTapOnBus: 0];
recognitionRequest = nil;
speechRecognitionTask = nil;
AVAudioFormat *recodringFormat = [inputNode outputFormatForBus:0];
[inputNode installTapOnBus:0 bufferSize:1024 format:recodringFormat block:^(AVAudioPCMBuffer * _Nonnull buffer, AVAudioTime * _Nonnull when) {
[recognitionRequest appendAudioPCMBuffer:buffer];
}];
NSError *error1;
[audioEngine prepare];
[audioEngine startAndReturnError:&error1];
if (error1 != nil) {
NSLog(@"Error discription: %@", error1.description);
}
答案 0 :(得分:0)
-(void)startRecording
{
audioEngine = [[AVAudioEngine alloc]init];
if (speechRecognitionTask != nil) {
speechRecognitionTask = nil;
[speechRecognitionTask cancel];
}
[self showActivityIndicator];
self.textView.text = @"Please speak the business";
audioSession = [AVAudioSession sharedInstance];
NSError *error = nil;
if(error == nil) {
[audioSession setCategory:AVAudioSessionCategoryRecord error:&error];
[audioSession setMode:AVAudioSessionModeMeasurement error:&error];
[audioSession setActive:true withOptions:AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation error:&error];
} else{
NSLog(@"audioSession properties weren't set because of an error.");
}
recognitionRequest = [[SFSpeechAudioBufferRecognitionRequest alloc] init];
inputNode = audioEngine.inputNode;
if (inputNode == nil) {
NSLog(@"AudioEngine has no input node");
}
if (recognitionRequest == nil) {
NSLog(@"Unable to create and SFSpeechAudioBufferRecognitionRequest object");
}
recognitionRequest.shouldReportPartialResults = YES;
error = nil;
speechRecognitionTask = [speechRecognizer recognitionTaskWithRequest:recognitionRequest resultHandler:^(SFSpeechRecognitionResult * _Nullable result, NSError * _Nullable errorl) {
BOOL isFinal;
if (result != nil) {
NSLog(@"Formatted String: %@ ",result.bestTranscription.formattedString);
userFeedback.text = result.bestTranscription.formattedString;
[NSTimer scheduledTimerWithTimeInterval:4 target:self selector:@selector(endRecordingAudio) userInfo:nil repeats:NO];
isFinal = [result isFinal];
if (isFinal) {
[audioEngine stop];
[inputNode removeTapOnBus: 0];
recognitionRequest = nil;
speechRecognitionTask = nil;
}
[self.microphoneButton setEnabled: true];
}
if (errorl) {
NSLog(@"Error Description: %@", errorl);
}
}];
AVAudioFormat *recodringFormat = [inputNode outputFormatForBus:0];
[inputNode installTapOnBus:0 bufferSize:1024 format:recodringFormat block:^(AVAudioPCMBuffer * _Nonnull buffer, AVAudioTime * _Nonnull when) {
[recognitionRequest appendAudioPCMBuffer:buffer];
}];
NSError *error1;
[audioEngine prepare];
[audioEngine startAndReturnError:&error1];
if (error1 != nil) {
NSLog(@"Error discription: %@", error1.description);
[self hideActivityIndicator];
}
userFeedback.text = @"Say something, I am listening!";
NSLog(@"Say something, I am listening!");
}
-(void)endRecordingAudio
{
NSLog(@"AudioEngine stopped");
[audioEngine stop];
[inputNode removeTapOnBus: 0];
recognitionRequest = nil;
speechRecognitionTask = nil;
}