3

我正在尝试将 Sphinx 与我的 Phonegap 应用程序集成,并遵循 pocketsphinx-android-demo,但在启动时收到 RuntimeException,以下是详细信息:

E/OADemo  (15835): java.lang.RuntimeException: Decoder_setSearch returned -1
E/OADemo  (15835):  at edu.cmu.pocketsphinx.PocketSphinxJNI.Decoder_setSearch(Native    Method)
E/OADemo  (15835):  at edu.cmu.pocketsphinx.Decoder.setSearch(Unknown Source)
E/OADemo  (15835):  at edu.cmu.pocketsphinx.SpeechRecognizer.startListening(Unknown Source)
E/OADemo  (15835):  at cn.fsll.oademo.OADemo.switchSearch(OADemo.java:103)
E/OADemo  (15835):  at cn.fsll.oademo.OADemo.access$100(OADemo.java:22)
E/OADemo  (15835):  at cn.fsll.oademo.OADemo$1.onPostExecute(OADemo.java:53)
E/OADemo  (15835):  at cn.fsll.oademo.OADemo$1.onPostExecute(OADemo.java:34)
E/OADemo  (15835):  at android.os.AsyncTask.finish(AsyncTask.java:632)
E/OADemo  (15835):  at android.os.AsyncTask.access$600(AsyncTask.java:177)
E/OADemo  (15835):  at android.os.AsyncTask$InternalHandler.handleMessage(AsyncTask.java:645)
E/OADemo  (15835):  at android.os.Handler.dispatchMessage(Handler.java:102)
E/OADemo  (15835):  at android.os.Looper.loop(Looper.java:136)
E/OADemo  (15835):  at android.app.ActivityThread.main(ActivityThread.java:5050)
E/OADemo  (15835):  at java.lang.reflect.Method.invoke(Native Method)
E/OADemo  (15835):  at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:789)
E/OADemo  (15835):  at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:605)

那么,我该如何解决这个问题?任何帮助表示赞赏。

package cn.fsll.oademo;

import static android.widget.Toast.makeText;
import static edu.cmu.pocketsphinx.SpeechRecognizerSetup.defaultSetup;

import android.content.Context;
import android.os.Bundle;
import android.os.AsyncTask;
import android.widget.Toast;
import android.util.Log;

import java.io.File;
import java.io.IOException;

import edu.cmu.pocketsphinx.Assets;
import edu.cmu.pocketsphinx.Hypothesis;
import edu.cmu.pocketsphinx.RecognitionListener;
import edu.cmu.pocketsphinx.SpeechRecognizer;

import org.apache.cordova.*;

public class OADemo extends CordovaActivity implements RecognitionListener {
    private static final String KWS_SEARCH = "wakeup";
    private static final String KEYPHRASE = "oa";

    private final String LOGTAG = "OADemo";
    private SpeechRecognizer recognizer;

    @Override
    public void onCreate(Bundle savedInstanceState) {

        super.onCreate(savedInstanceState);
        super.init();

        new AsyncTask<Void, Void, Exception>() {
            @Override
            protected Exception doInBackground(Void... params) {
                try {
                    Assets assets = new Assets(OADemo.this);
                    File assetDir = assets.syncAssets();

                    setupRecognizer(assetDir);
                } catch (IOException ex) {
                    Log.e(LOGTAG, Log.getStackTraceString(ex));
                    return ex;
                }

                return null;
            }

            @Override
            protected void onPostExecute(Exception ex) {
                if(null == ex) {
                    switchSearch(KWS_SEARCH);
                } else {
                    Log.e(LOGTAG, Log.getStackTraceString(ex));
                }
            }
        }.execute();

        super.loadUrl(Config.getStartUrl());
    }

    @Override
    public void onPartialResult(Hypothesis hypothesis) {
        Log.d(LOGTAG, "onPartialResult");
        onResult(hypothesis);
    }

    @Override
    public void onResult(Hypothesis hypothesis) {
        Log.d(LOGTAG, "onResult");

        String text = hypothesis.getHypstr();

        if(text.equals(KEYPHRASE)) {
            recognizer.stop();
            // TODO: start iflytek speech recognizator
        }
    }

    @Override
    public void onBeginningOfSpeech() {
        Log.d(LOGTAG, "onBeginningOfSpeech");
    }

    @Override
    public void onEndOfSpeech() {
        Log.d(LOGTAG, "onEndOfSpeech");

        switchSearch(KWS_SEARCH);
    }

    private void toast(final String msg) {
        makeText(getApplicationContext(), msg, Toast.LENGTH_SHORT).show();
        Log.e(LOGTAG, msg);
    }

    private void switchSearch(final String searchName) {
        Log.d(LOGTAG, "switchSearch");

        try {
            recognizer.stop();
            recognizer.startListening(searchName);
        } catch(Exception ex) {
            Log.e(LOGTAG, Log.getStackTraceString(ex));
        }
    }

    private void setupRecognizer(final File assetsDir) {
        Log.d(LOGTAG, "setupRecognizer");

        File modelsDir = new File(assetsDir, "models");
        recognizer = defaultSetup()
            .setAcousticModel(new File(modelsDir, "hmm/en-us-semi"))
            .setDictionary(new File(modelsDir, "dict/cmu07a.dic"))
            .setRawLogDir(assetsDir).setKeywordThreshold(1e-20f)
            .getRecognizer();

        recognizer.addListener(this);
        recognizer.addKeyphraseSearch(KWS_SEARCH, KEYPHRASE);
    }
}
4

0 回答 0