我正在开发一个Android语音识别系统。我成功了。但谷歌语音对话框一直让我烦恼,我想摆脱它。即我想在后台运行此语音识别作为服务。我知道有很多SO帖子都有答案。但我的问题是这里的每个代码都是在原生java上,我在电话差距上发展。所以我无法将这个东西移植到手机间隙。我的代码如下。所以,任何帮助将不胜感激!我在手机上使用this插件。提前致谢 。
package com.phonegap.plugins.speech;
import java.util.ArrayList;
import java.util.Locale;
import org.json.JSONArray;
import org.apache.cordova.CordovaPlugin;
import org.apache.cordova.CallbackContext;
import android.util.Log;
import android.app.Activity;
import android.content.Intent;
import android.speech.RecognizerIntent;
/**
* Style and such borrowed from the TTS and PhoneListener plugins
*/
public class SpeechRecognizer extends CordovaPlugin {
private static final String LOG_TAG = SpeechRecognizer.class.getSimpleName();
private static int REQUEST_CODE = 1001;
private CallbackContext callbackContext;
private LanguageDetailsChecker languageDetailsChecker;
//@Override
public boolean execute(String action, JSONArray args, CallbackContext callbackContext) {
Boolean isValidAction = true;
this.callbackContext= callbackContext;
// Action selector
if ("startRecognize".equals(action)) {
// recognize speech
startSpeechRecognitionActivity(args);
} else if ("getSupportedLanguages".equals(action)) {
getSupportedLanguages();
} else {
// Invalid action
this.callbackContext.error("Unknown action: " + action);
isValidAction = false;
}
return isValidAction;
}
// Get the list of supported languages
private void getSupportedLanguages() {
if (languageDetailsChecker == null){
languageDetailsChecker = new LanguageDetailsChecker(callbackContext);
}
// Create and launch get languages intent
Intent detailsIntent = new Intent(RecognizerIntent.ACTION_GET_LANGUAGE_DETAILS);
cordova.getActivity().sendOrderedBroadcast(detailsIntent, null, languageDetailsChecker, null, Activity.RESULT_OK, null, null);
}
/**
* Fire an intent to start the speech recognition activity.
*
* @param args Argument array with the following string args: [req code][number of matches][prompt string]
*/
private void startSpeechRecognitionActivity(JSONArray args) {
int maxMatches = 0;
String prompt = "";
String language = Locale.getDefault().toString();
try {
if (args.length() > 0) {
// Maximum number of matches, 0 means the recognizer decides
String temp = args.getString(0);
maxMatches = Integer.parseInt(temp);
}
if (args.length() > 1) {
// Optional text prompt
prompt = args.getString(1);
}
if (args.length() > 2) {
// Optional language specified
language = args.getString(2);
}
}
catch (Exception e) {
Log.e(LOG_TAG, String.format("startSpeechRecognitionActivity exception: %s", e.toString()));
}
// Create the intent and set parameters
Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, language);
if (maxMatches > 0)
intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, maxMatches);
if (!prompt.equals(""))
intent.putExtra(RecognizerIntent.EXTRA_PROMPT, prompt);
cordova.startActivityForResult(this, intent, REQUEST_CODE);
}
/**
* Handle the results from the recognition activity.
*/
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
if (resultCode == Activity.RESULT_OK) {
// Fill the list view with the strings the recognizer thought it could have heard
ArrayList<String> matches = data.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS);
returnSpeechResults(matches);
}
else {
// Failure - Let the caller know
this.callbackContext.error(Integer.toString(resultCode));
}
super.onActivityResult(requestCode, resultCode, data);
}
private void returnSpeechResults(ArrayList<String> matches) {
JSONArray jsonMatches = new JSONArray(matches);
this.callbackContext.success(jsonMatches);
}
}
答案 0 :(得分:0)
因为您的需求与插件提供的功能略有不同,所以您似乎需要write your own plugin。您当然可以使用插件源(特别是plugin.xml,SpeechRecognizer.js和SpeechRecognizer.java)作为示例,了解如何一般地实现它。