我正在尝试制作 TensorFlow 语音命令示例的端口并将结果发送到我的 vr Unity3d 游戏。首先,我对如何使用 android 和 unity 制作服务背景进行了一些研究,并且我成功地做到了这一点,以使语音命令示例与 unity 分开工作。我现在唯一坚持的是 TensorInferenceInference Java 类,它是这里识别单词输入的老板......在我的 apk 统一构建中找不到,日志会是这样的:
(Filename: D Line: 0)
06-08 21:47:40.212 4729-4729/com.usn.unityplugin V/SpeechRecognitionService: Reading labels from: conv_actions_labels.txt
06-08 21:47:40.216 4729-4729/com.usn.unityplugin D/AndroidRuntime: Shutting down VM
06-08 21:47:40.231 4729-4729/com.usn.unityplugin E/AndroidRuntime: FATAL EXCEPTION: main
Process: com.usn.unityplugin, PID: 4729
java.lang.Error: FATAL EXCEPTION [main]
Unity version : 2017.1.1f1
Device model : samsung SM-G610F
Device fingerprint: samsung/on7xeltedd/on7xelte:7.0/NRD90M/G610FDDU1BRD1:user/release-keys
Caused by: java.lang.NoClassDefFoundError: Failed resolution of: Lorg/tensorflow/contrib/android/TensorFlowInferenceInterface;
at app.test.pluginservice.SpeechRecognitionService.onCreate(SpeechRecognitionService.java:107)
at android.app.ActivityThread.handleCreateService(ActivityThread.java:3534)
at android.app.ActivityThread.-wrap6(ActivityThread.java)
at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1732)
at android.os.Handler.dispatchMessage(Handler.java:102)
at android.os.Looper.loop(Looper.java:154)
at android.app.ActivityThread.main(ActivityThread.java:6776)
at java.lang.reflect.Method.invoke(Native Method)
at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:1518)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:1408)
**Caused by: java.lang.ClassNotFoundException: Didn't find class "org.tensorflow.contrib.android.TensorFlowInferenceInterface" on path: DexPathList[[zip file "/data/app/com.usn.unityplugin-2/base.apk"],nativeLibraryDirectories=[/data/app/com.usn.unityplugin-2/lib/arm, /data/app/com.usn.unityplugin-2/base.apk!/lib/armeabi-v7a, /system/lib, /vendor/lib]]**
at dalvik.system.BaseDexClassLoader.findClass(BaseDexClassLoader.java:56)
at java.lang.ClassLoader.loadClass(ClassLoader.java:380)
at java.lang.ClassLoader.loadClass(ClassLoader.java:312)
at app.test.pluginservice.SpeechRecognitionService.onCreate(SpeechRecognitionService.java:107)
at android.app.ActivityThread.handleCreateService(ActivityThread.java:3534)
at android.app.ActivityThread.-wrap6(ActivityThread.java)
at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1732)
at android.os.Handler.dispatchMessage(Handler.java:102)
at android.os.Looper.loop(Looper.java:154)
at android.app.ActivityThread.main(ActivityThread.java:6776)
at java.lang.reflect.Method.invoke(Native Method)
at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:1518)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:1408)
我正在使用 TensorFlow 模型(Kaldi),浮点 [] 作为输入,“字符串标签”作为输出,我想让 unity 每隔几秒接收一次这个字符串值,所以我认为如果我得到我的手会很好android上的示例并将其作为AAR插件移植到unity,这就是我所做的,也是我提出关于上述错误的问题的原因。
顺便说一句,我是 android 开发的新手 :) 只是游戏开发者,我需要完成这项工作并从统一接收结果标签。
这是 C# 代码:
using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
public class ImportJava : MonoBehaviour
{
AndroidJavaClass unityClass;
AndroidJavaObject unityActivity;
AndroidJavaObject TestTensorObject;
AndroidJavaClass customClass;
public Text txt;
// Use this for initialization
void Start()
{
//Replace with your full package name
Debug.Log("Getting JAVA DATA");
sendActivityReference("app.test.pluginservice.TestServiceActivity");
startService();
}
void Update()
{
if (Time.frameCount % 30 == 0)
{
//GetString();
}
}
private void GetString()
{
var str = customClass.CallStatic<string>("GetStr");
Debug.Log(" +++GetString()++++ " + str);
}
private void GetDate()
{
string str = customClass.CallStatic<string>("getTestData");
Debug.Log("unityActivity.Get<string>(getTestData);" + str);
}
void sendActivityReference(string packageName)
{
Debug.Log("entered sendActivityReference");
unityClass = new AndroidJavaClass("com.unity3d.player.UnityPlayer");
unityActivity = unityClass.GetStatic<AndroidJavaObject>("currentActivity");
customClass = new AndroidJavaClass(packageName);
customClass.CallStatic("receiveActivityInstance", unityActivity);
}
void startService()
{
Debug.Log(" entered startService");
customClass.CallStatic("StartServiceTestServiceActivityClass");
}
}
和 Java 服务代码:
package app.test.pluginservice;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.View;
import android.widget.TextView;
import java.util.Timer;
import java.util.TimerTask;
public class TestServiceActivity {
static String resultString = "EmptyCommand";
static Activity myActivity;
// Called From C# to get the Activity Instance
public static void receiveActivityInstance(Activity tempActivity) {
myActivity = tempActivity;
}
public static void StartServiceTestServiceActivityClass() {
myActivity.startService(new Intent(myActivity, SpeechRecognitionService.class));
}
public static String GetStr() {
resultString = SpeechRecognitionService.getmInstance().getRecognitionOutput();
Log.d("wat?", "GetStr: " + resultString);
return resultString;
}
public static void stopService() {
SpeechRecognitionService.getmInstance().stopService();
}
}