我有一个非常棒的语音输入类。但是,我希望我的应用程序的用户能够在应用程序的任何页面上使用语音输入代码。我需要做的是在每个 xml 上设置一个按钮,允许我使用我的语音输入代码,而不必将我的所有语音输入代码复制到每个单独的类中。如何引用我的代码,以便我可以简单地在我的代码中添加一些内容,说明如果我按下此按钮,它会在另一个类中执行该活动?代码顺序如下,voiceinput代码java类,java类我希望能有一个按钮使用这个代码。
package com.example.com.proto1;
import android.app.Activity;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.content.pm.ResolveInfo;
import android.os.Bundle;
import android.speech.RecognizerIntent;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.ListView;
import java.util.ArrayList;
import java.util.List;
/**
* Sample code that invokes the speech recognition intent API.
*/
public class VoiceRecognition extends Activity implements OnClickListener {
public static final int VOICE_RECOGNITION_REQUEST_CODE = 1234;
public ListView mList;
public Button speakButton;
/**
* Called with the activity is first created.
*/
@Override
public void onCreate(Bundle voiceinput) {
super.onCreate(voiceinput);
// Inflate our UI from its XML layout description.
setContentView(R.layout.voice_recognition);
// Get display items for later interaction
voiceinputbuttons();
// Check to see if a recognition activity is present
PackageManager pm = getPackageManager();
List<ResolveInfo> activities = pm.queryIntentActivities(new Intent(
RecognizerIntent.ACTION_RECOGNIZE_SPEECH), 0);
if (activities.size() != 0) {
speakButton.setOnClickListener(this);
} else {
speakButton.setEnabled(false);
speakButton.setText("Recognizer not present");
}
}
public void voiceinputbuttons() {
// TODO Auto-generated method stub
speakButton = (Button) findViewById(R.id.btn_speak);
mList = (ListView) findViewById(R.id.list);
}
/**
* Handle the click on the start recognition button.
*/
public void onClick(View v) {
if (v.getId() == R.id.btn_speak) {
startVoiceRecognitionActivity();
}
}
/**
* Fire an intent to start the speech recognition activity.
*/
public void startVoiceRecognitionActivity() {
Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
intent.putExtra(RecognizerIntent.EXTRA_PROMPT,
"Speech recognition demo");
startActivityForResult(intent, VOICE_RECOGNITION_REQUEST_CODE);
}
/**
* Handle the results from the recognition activity.
*/
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == VOICE_RECOGNITION_REQUEST_CODE
&& resultCode == RESULT_OK) {
// Fill the list view with the strings the recognizer thought it
// could have heard
ArrayList<String> matches = data
.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS);
mList.setAdapter(new ArrayAdapter<String>(this,
android.R.layout.simple_list_item_1, matches));
//matches is the result of voice input. It is a list of what the user possibly said.
//Using an if statement for the keyword you want to use allows the use of any activity if keywords match
//it is possible to set up multiple keywords to use the same activity so more than one word will allow the user
//to use the activity (makes it so the user doesn't have to memorize words from a list)
//to use an activity from the voice input information simply use the following format;
//if (matches.contains("keyword(s) here") { startActivity(new Intent("name.of.manifest.ACTIVITY")
if (matches.contains("information")) {
startActivity(new Intent("android.intent.action.INFOSCREEN"));
}
if (matches.contains("home")) {
startActivity(new Intent("android.intent.action.MENU"));
}
}
super.onActivityResult(requestCode, resultCode, data);
}
}
package com.example.com.proto1;
import android.app.Activity;
import android.content.Intent;
import android.media.MediaPlayer;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import android.content.pm.PackageManager;
import android.content.pm.ResolveInfo;
import android.speech.RecognizerIntent;
import android.view.View.OnClickListener;
import android.widget.ArrayAdapter;
import android.widget.ListView;
import android.speech.tts.TextToSpeech;
import java.util.ArrayList;
import java.util.List;
public class menu extends Activity implements TextToSpeech.OnInitListener {
MediaPlayer sep, aep, vpm;
TextToSpeech mTts;
public void onInit(int i) {
// TODO Auto-generated method stub
mTts.speak("EyePhone Main Menu", TextToSpeech.QUEUE_FLUSH, null);
}
@Override
protected void onCreate(Bundle aboutmenu) {
// TODO Auto-generated method stub
super.onCreate(aboutmenu);
setContentView(R.layout.mainx);
// Setting up the button references
Button info = (Button) findViewById(R.id.aboutbutton);
Button voice = (Button) findViewById(R.id.voicebutton);
Button speakButton = (Button) findViewById(R.id.btn_speak);
info.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
// TODO Auto-generated method stub
startActivity(new Intent("android.intent.action.INFOSCREEN"));
}
});
voice.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
// TODO Auto-generated method stub
try {
Intent voiceIntent = new Intent(
"android.intent.action.RECOGNITIONMENU");
startActivity(voiceIntent);
} catch (Exception e) {
}
}
});
speakButton.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
// TODO Auto-generated method stub
try {
//this is the place where I thought the code should go that I am asking about
} catch (Exception e) {
}
}
});
}
}