我正在尝试创建TensorFlow语音命令示例的端口并将结果发送到我的vr Unity3d游戏。 首先,我做了一些关于如何使用android和unity制作服务背景的研究,我成功地做到了这一点,使语音命令示例与Unity分开工作。 我现在唯一坚持的是TensorInferenceInference Java类,它是识别单词输入的老板......在我的apk统一构建中找不到,日志就是这样:
(Filename: D Line: 0)
06-08 21:47:40.212 4729-4729/com.usn.unityplugin V/SpeechRecognitionService: Reading labels from: conv_actions_labels.txt
06-08 21:47:40.216 4729-4729/com.usn.unityplugin D/AndroidRuntime: Shutting down VM
06-08 21:47:40.231 4729-4729/com.usn.unityplugin E/AndroidRuntime: FATAL EXCEPTION: main
Process: com.usn.unityplugin, PID: 4729
java.lang.Error: FATAL EXCEPTION [main]
Unity version : 2017.1.1f1
Device model : samsung SM-G610F
Device fingerprint: samsung/on7xeltedd/on7xelte:7.0/NRD90M/G610FDDU1BRD1:user/release-keys
Caused by: java.lang.NoClassDefFoundError: Failed resolution of: Lorg/tensorflow/contrib/android/TensorFlowInferenceInterface;
at app.test.pluginservice.SpeechRecognitionService.onCreate(SpeechRecognitionService.java:107)
at android.app.ActivityThread.handleCreateService(ActivityThread.java:3534)
at android.app.ActivityThread.-wrap6(ActivityThread.java)
at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1732)
at android.os.Handler.dispatchMessage(Handler.java:102)
at android.os.Looper.loop(Looper.java:154)
at android.app.ActivityThread.main(ActivityThread.java:6776)
at java.lang.reflect.Method.invoke(Native Method)
at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:1518)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:1408)
**Caused by: java.lang.ClassNotFoundException: Didn't find class "org.tensorflow.contrib.android.TensorFlowInferenceInterface" on path: DexPathList[[zip file "/data/app/com.usn.unityplugin-2/base.apk"],nativeLibraryDirectories=[/data/app/com.usn.unityplugin-2/lib/arm, /data/app/com.usn.unityplugin-2/base.apk!/lib/armeabi-v7a, /system/lib, /vendor/lib]]**
at dalvik.system.BaseDexClassLoader.findClass(BaseDexClassLoader.java:56)
at java.lang.ClassLoader.loadClass(ClassLoader.java:380)
at java.lang.ClassLoader.loadClass(ClassLoader.java:312)
at app.test.pluginservice.SpeechRecognitionService.onCreate(SpeechRecognitionService.java:107)
at android.app.ActivityThread.handleCreateService(ActivityThread.java:3534)
at android.app.ActivityThread.-wrap6(ActivityThread.java)
at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1732)
at android.os.Handler.dispatchMessage(Handler.java:102)
at android.os.Looper.loop(Looper.java:154)
at android.app.ActivityThread.main(ActivityThread.java:6776)
at java.lang.reflect.Method.invoke(Native Method)
at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:1518)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:1408)
我使用TensorFlow模型(Kaldi),其中float []作为输入,“string label”作为输出,我想让Unity每隔几秒钟接收一次这个字符串值,这样我觉得如果我得到的话会很好把手上的例子放在android上并将它移植到Unity作为AAR插件,这就是我做了什么以及是什么让我提出问题关于上面这个错误的问题。
顺便说一下,我是Android开发中的新手:)只是游戏开发人员,我需要完成这项工作,并从统一收到结果标签。
这是C#代码:
using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
public class ImportJava : MonoBehaviour
{
AndroidJavaClass unityClass;
AndroidJavaObject unityActivity;
AndroidJavaObject TestTensorObject;
AndroidJavaClass customClass;
public Text txt;
// Use this for initialization
void Start()
{
//Replace with your full package name
Debug.Log("Getting JAVA DATA");
sendActivityReference("app.test.pluginservice.TestServiceActivity");
startService();
}
void Update()
{
if (Time.frameCount % 30 == 0)
{
//GetString();
}
}
private void GetString()
{
var str = customClass.CallStatic<string>("GetStr");
Debug.Log(" +++GetString()++++ " + str);
}
private void GetDate()
{
string str = customClass.CallStatic<string>("getTestData");
Debug.Log("unityActivity.Get<string>(getTestData);" + str);
}
void sendActivityReference(string packageName)
{
Debug.Log("entered sendActivityReference");
unityClass = new AndroidJavaClass("com.unity3d.player.UnityPlayer");
unityActivity = unityClass.GetStatic<AndroidJavaObject>("currentActivity");
customClass = new AndroidJavaClass(packageName);
customClass.CallStatic("receiveActivityInstance", unityActivity);
}
void startService()
{
Debug.Log(" entered startService");
customClass.CallStatic("StartServiceTestServiceActivityClass");
}
}
和Java服务代码:
package app.test.pluginservice;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.View;
import android.widget.TextView;
import java.util.Timer;
import java.util.TimerTask;
public class TestServiceActivity {
static String resultString = "EmptyCommand";
static Activity myActivity;
// Called From C# to get the Activity Instance
public static void receiveActivityInstance(Activity tempActivity) {
myActivity = tempActivity;
}
public static void StartServiceTestServiceActivityClass() {
myActivity.startService(new Intent(myActivity, SpeechRecognitionService.class));
}
public static String GetStr() {
resultString = SpeechRecognitionService.getmInstance().getRecognitionOutput();
Log.d("wat?", "GetStr: " + resultString);
return resultString;
}
public static void stopService() {
SpeechRecognitionService.getmInstance().stopService();
}
}