我正在使用Google SpeechClient创建一个应用程序,该应用程序要求设置GOOGLE_APPLICATION_CREDENTIALS环境变量,设置该变量后,您可以使用语音文本api。
我的应用程序需要在linux和Windows中运行。在Linux中,它可以完美运行,但是在Windows上,运行项目时,它会抛出异常com.google.api.gax.rpc.UnavailableException:“ io.grpc.StatusRuntimeException:UNAVAILABLE:凭据无法获取元数据”运行该线程
package Controller.Runnables;
import Controller.GUI.VoxSpeechGUIController;
import Model.SpokenTextHistory;
import com.google.api.gax.rpc.ClientStream;
import com.google.api.gax.rpc.ResponseObserver;
import com.google.api.gax.rpc.StreamController;
import com.google.cloud.speech.v1.*;
import com.google.protobuf.ByteString;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.DataLine;
import javax.sound.sampled.TargetDataLine;
import java.io.IOException;
import java.util.ArrayList;
public class SpeechRecognizerRunnable implements Runnable{
private VoxSpeechGUIController controller;
public SpeechRecognizerRunnable(VoxSpeechGUIController voxSpeechGUIController) {
this.controller = voxSpeechGUIController;
}
@Override
public void run() {
MicrofoneRunnable micrunnable = MicrofoneRunnable.getInstance();
Thread micThread = new Thread(micrunnable);
ResponseObserver<StreamingRecognizeResponse> responseObserver = null;
try (SpeechClient client = SpeechClient.create()) {
ClientStream<StreamingRecognizeRequest> clientStream;
responseObserver =
new ResponseObserver<StreamingRecognizeResponse>() {
ArrayList<StreamingRecognizeResponse> responses = new ArrayList<>();
public void onStart(StreamController controller) {}
public void onResponse(StreamingRecognizeResponse response) {
try {
responses.add(response);
StreamingRecognitionResult result = response.getResultsList().get(0);
// There can be several alternative transcripts for a given chunk of speech. Just
// use the first (most likely) one here.
SpeechRecognitionAlternative alternative = result.getAlternativesList().get(0);
String transcript = alternative.getTranscript();
System.out.printf("Transcript : %s\n", transcript);
String newText = SpokenTextHistory.getInstance().getActualSpeechString() + " " + transcript;
SpokenTextHistory.getInstance().setActualSpeechString(newText);
controller.setLabelText(newText);
}
catch (Exception ex){
System.out.println(ex.getMessage());
ex.printStackTrace();
}
}
public void onComplete() {
}
public void onError(Throwable t) {
System.out.println(t);
}
};
clientStream = client.streamingRecognizeCallable().splitCall(responseObserver);
RecognitionConfig recognitionConfig =
RecognitionConfig.newBuilder()
.setEncoding(RecognitionConfig.AudioEncoding.LINEAR16)
.setLanguageCode("pt-BR")
.setSampleRateHertz(16000)
.build();
StreamingRecognitionConfig streamingRecognitionConfig =
StreamingRecognitionConfig.newBuilder().setConfig(recognitionConfig).build();
StreamingRecognizeRequest request =
StreamingRecognizeRequest.newBuilder()
.setStreamingConfig(streamingRecognitionConfig)
.build(); // The first request in a streaming call has to be a config
clientStream.send(request);
try {
// SampleRate:16000Hz, SampleSizeInBits: 16, Number of channels: 1, Signed: true,
// bigEndian: false
AudioFormat audioFormat = new AudioFormat(16000, 16, 1, true, false);
DataLine.Info targetInfo =
new DataLine.Info(
TargetDataLine.class,
audioFormat); // Set the system information to read from the microphone audio
// stream
if (!AudioSystem.isLineSupported(targetInfo)) {
System.out.println("Microphone not supported");
System.exit(0);
}
// Target data line captures the audio stream the microphone produces.
micrunnable.targetDataLine = (TargetDataLine) AudioSystem.getLine(targetInfo);
micrunnable.targetDataLine.open(audioFormat);
micThread.start();
long startTime = System.currentTimeMillis();
while (!micrunnable.stopFlag) {
long estimatedTime = System.currentTimeMillis() - startTime;
if (estimatedTime >= 55000) {
clientStream.closeSend();
clientStream = client.streamingRecognizeCallable().splitCall(responseObserver);
request =
StreamingRecognizeRequest.newBuilder()
.setStreamingConfig(streamingRecognitionConfig)
.build();
startTime = System.currentTimeMillis();
} else {
request =
StreamingRecognizeRequest.newBuilder()
.setAudioContent(ByteString.copyFrom(micrunnable.sharedQueue.take()))
.build();
}
clientStream.send(request);
}
} catch (Exception e) {
System.out.println(e);
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
我已经努力工作了几个小时,却没有找到解决我的问题的解决方案。 值得一提的是,环境变量设置正确。
有人在Google遇到过这个问题吗?我应该怎么做才能解决这个问题?非常感谢。
这是我的环境变量创建者: PS:我已经尝试过使用所有Google替代方法来验证凭据,所有错误都会返回我。
package Controller.Autentication;
import java.io.*;
import java.lang.reflect.Field;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
public class GoogleAuthentication {
private static final String GOOGLE_APPLICATION_CREDENTIALS = "GOOGLE_APPLICATION_CREDENTIALS";
private static final String VoxSpeechFolder = ".vox";
private static final String GoogleAuthenticationJsonFile = "VoxAuthentication.json";
public static void setupGoogleCredentials() {
String directory = defaultDirectory();
directory += File.separator+VoxSpeechFolder;
File voxPath = new File(directory);
if (!voxPath.exists()) {
voxPath.mkdirs();
}
ClassLoader classLoader = new GoogleAuthentication().getClass().getClassLoader();
File srcFile = new File(classLoader.getResource(GoogleAuthenticationJsonFile).getFile());
if(srcFile.exists()){
try {
String voxDestPath = defaultDirectory() + File.separator + VoxSpeechFolder +File.separator+ GoogleAuthenticationJsonFile;
File destFile = new File(voxDestPath);
copyFile(srcFile,destFile);
} catch (IOException e) {
e.printStackTrace();
}
}
try {
Map<String,String> googleEnv = new HashMap<>();
String path = defaultDirectory() +File.separator+ VoxSpeechFolder +File.separator+ GoogleAuthenticationJsonFile;
googleEnv.put(GOOGLE_APPLICATION_CREDENTIALS, path);
setGoogleEnv(googleEnv);
} catch (Exception e) {
e.printStackTrace();
}
}
static void copyFile(File sourceFile, File destFile)
throws IOException {
InputStream inStream ;
OutputStream outStream ;
System.out.println(destFile.getPath());
if(destFile.createNewFile()){
inStream = new FileInputStream(sourceFile);
outStream = new FileOutputStream(destFile);
byte[] buffer = new byte[1024];
int length;
while ((length = inStream.read(buffer)) > 0){
outStream.write(buffer, 0, length);
}
inStream.close();
outStream.close();
}
}
static String defaultDirectory()
{
String OS = getOperationSystem();
if (OS.contains("WIN"))
return System.getenv("APPDATA");
else if (OS.contains("MAC"))
return System.getProperty("user.home") + "/Library/Application "
+ "Support";
else if (OS.contains("LINUX")) {
return System.getProperty("user.home");
}
return System.getProperty("user.dir");
}
static String getOperationSystem() {
return System.getProperty("os.name").toUpperCase();
}
protected static void setGoogleEnv(Map<String, String> newenv) throws Exception {
try {
Class<?> processEnvironmentClass = Class.forName("java.lang.ProcessEnvironment");
Field theEnvironmentField = processEnvironmentClass.getDeclaredField("theEnvironment");
theEnvironmentField.setAccessible(true);
Map<String, String> env = (Map<String, String>) theEnvironmentField.get(null);
env.putAll(newenv);
Field theCaseInsensitiveEnvironmentField = processEnvironmentClass.getDeclaredField("theCaseInsensitiveEnvironment");
theCaseInsensitiveEnvironmentField.setAccessible(true);
Map<String, String> cienv = (Map<String, String>) theCaseInsensitiveEnvironmentField.get(null);
cienv.putAll(newenv);
} catch (NoSuchFieldException e) {
Class[] classes = Collections.class.getDeclaredClasses();
Map<String, String> env = System.getenv();
for(Class cl : classes) {
if("java.util.Collections$UnmodifiableMap".equals(cl.getName())) {
Field field = cl.getDeclaredField("m");
field.setAccessible(true);
Object obj = field.get(env);
Map<String, String> map = (Map<String, String>) obj;
map.clear();
map.putAll(newenv);
}
}
}
String genv = System.getenv(GOOGLE_APPLICATION_CREDENTIALS);
System.out.println(genv);
}
}