这是我在 Logcat 中遇到的错误。
04-23 08:00:07.524: E/AndroidRuntime(1384): FATAL EXCEPTION: main
04-23 08:00:07.524: E/AndroidRuntime(1384): java.lang.RuntimeException: Unable to start activity ComponentInfo{com.datumdroid.android.ocr.simple/com.datumdroid.android.ocr.simple.SimpleAndroidOCRActivity}: java.lang.NullPointerException
04-23 08:00:07.524: E/AndroidRuntime(1384): at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2180)
04-23 08:00:07.524: E/AndroidRuntime(1384): at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:2230)
04-23 08:00:07.524: E/AndroidRuntime(1384): at android.app.ActivityThread.access$600(ActivityThread.java:141)
04-23 08:00:07.524: E/AndroidRuntime(1384): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1234)
04-23 08:00:07.524: E/AndroidRuntime(1384): at android.os.Handler.dispatchMessage(Handler.java:99)
04-23 08:00:07.524: E/AndroidRuntime(1384): at android.os.Looper.loop(Looper.java:137)
04-23 08:00:07.524: E/AndroidRuntime(1384): at android.app.ActivityThread.main(ActivityThread.java:5039)
04-23 08:00:07.524: E/AndroidRuntime(1384): at java.lang.reflect.Method.invokeNative(Native Method)
04-23 08:00:07.524: E/AndroidRuntime(1384): at java.lang.reflect.Method.invoke(Method.java:511)
04-23 08:00:07.524: E/AndroidRuntime(1384): at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:793)
04-23 08:00:07.524: E/AndroidRuntime(1384): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:560)
04-23 08:00:07.524: E/AndroidRuntime(1384): at dalvik.system.NativeStart.main(Native Method)
04-23 08:00:07.524: E/AndroidRuntime(1384): Caused by: java.lang.NullPointerException
04-23 08:00:07.524: E/AndroidRuntime(1384): at com.datumdroid.android.ocr.simple.SimpleAndroidOCRActivity.onCreate(SimpleAndroidOCRActivity.java:68)
04-23 08:00:07.524: E/AndroidRuntime(1384): at android.app.Activity.performCreate(Activity.java:5104)
04-23 08:00:07.524: E/AndroidRuntime(1384): at android.app.Instrumentation.callActivityOnCreate(Instrumentation.java:1080)
04-23 08:00:07.524: E/AndroidRuntime(1384): at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2144)
启动画面活动
public class SplashAct extends Activity {
/** Called when the activity is first created. */
private final Handler mHandler = new Handler();
private static final int duration = 1500;
@Override
protected void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(R.layout.splash);
mHandler.postDelayed(mPendingLauncherRunnable,
SplashAct.duration);
}
@Override
protected void onPause() {
super.onPause();
mHandler.removeCallbacks(mPendingLauncherRunnable);
}
private final Runnable mPendingLauncherRunnable = new Runnable() {
public void run() {
final Intent intent = new Intent(SplashAct.this,
SimpleAndroidOCRActivity.class);
startActivity(intent);
finish();
}
};
}
主要活动
public class SimpleAndroidOCRActivity extends Activity implements OnInitListener {
public static final String PACKAGE_NAME = "com.datumdroid.android.ocr.simple";
public static final String DATA_PATH = Environment
.getExternalStorageDirectory().toString() + "/SimpleAndroidOCR/";
// You should have the trained data file in assets folder
// You can get them at:
// http://code.google.com/p/tesseract-ocr/downloads/list
public static final String lang = "eng";
private static final String TAG = "SimpleAndroidOCR.java";
protected Button _button, btnSpeak;
// protected ImageView _image;
protected EditText _field;
protected String _path;
protected boolean _taken;
protected static final String PHOTO_TAKEN = "photo_taken";
//tts
private TextToSpeech tts;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
//tts
tts = new TextToSpeech(this, this);
btnSpeak = (Button) findViewById(R.id.button1);
btnSpeak.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View arg0) {
speakOut();
}
});
//ocr
String[] paths = new String[] { DATA_PATH, DATA_PATH + "tessdata/" };
for (String path : paths) {
File dir = new File(path);
if (!dir.exists()) {
if (!dir.mkdirs()) {
Log.v(TAG, "ERROR: Creation of directory " + path + " on sdcard failed");
return;
} else {
Log.v(TAG, "Created directory " + path + " on sdcard");
}
}
}
// lang.traineddata file with the app (in assets folder)
// You can get them at:
// http://code.google.com/p/tesseract-ocr/downloads/list
// This area needs work and optimization
if (!(new File(DATA_PATH + "tessdata/" + lang + ".traineddata")).exists()) {
try {
AssetManager assetManager = getAssets();
InputStream in = assetManager.open("tessdata/eng.traineddata");
//GZIPInputStream gin = new GZIPInputStream(in);
OutputStream out = new FileOutputStream(DATA_PATH
+ "tessdata/eng.traineddata");
// Transfer bytes from in to out
byte[] buf = new byte[1024];
int len;
//while ((lenf = gin.read(buff)) > 0) {
while ((len = in.read(buf)) > 0) {
out.write(buf, 0, len);
}
in.close();
//gin.close();
out.close();
Log.v(TAG, "Copied " + lang + " traineddata");
} catch (IOException e) {
Log.e(TAG, "Was unable to copy " + lang + " traineddata " + e.toString());
}
}
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
// _image = (ImageView) findViewById(R.id.image);
_field = (EditText) findViewById(R.id.field);
_button = (Button) findViewById(R.id.button);
_button.setOnClickListener(new ButtonClickHandler());
_path = DATA_PATH + "/ocr.jpg";
}
public class ButtonClickHandler implements View.OnClickListener {
public void onClick(View view) {
Log.v(TAG, "Starting Camera app");
startCameraActivity();
}
}
// Simple android photo capture:
// http://labs.makemachine.net/2010/03/simple-android-photo-capture/
protected void startCameraActivity() {
File file = new File(_path);
Uri outputFileUri = Uri.fromFile(file);
final Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
intent.putExtra(MediaStore.EXTRA_OUTPUT, outputFileUri);
startActivityForResult(intent, 0);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
Log.i(TAG, "resultCode: " + resultCode);
if (resultCode == -1) {
onPhotoTaken();
} else {
Log.v(TAG, "User cancelled");
}
}
@Override
protected void onSaveInstanceState(Bundle outState) {
outState.putBoolean(SimpleAndroidOCRActivity.PHOTO_TAKEN, _taken);
}
@Override
protected void onRestoreInstanceState(Bundle savedInstanceState) {
Log.i(TAG, "onRestoreInstanceState()");
if (savedInstanceState.getBoolean(SimpleAndroidOCRActivity.PHOTO_TAKEN)) {
onPhotoTaken();
}
}
protected void onPhotoTaken() {
_taken = true;
BitmapFactory.Options options = new BitmapFactory.Options();
options.inSampleSize = 4;
Bitmap bitmap = BitmapFactory.decodeFile(_path, options);
try {
ExifInterface exif = new ExifInterface(_path);
int exifOrientation = exif.getAttributeInt(
ExifInterface.TAG_ORIENTATION,
ExifInterface.ORIENTATION_NORMAL);
Log.v(TAG, "Orient: " + exifOrientation);
int rotate = 0;
switch (exifOrientation) {
case ExifInterface.ORIENTATION_ROTATE_90:
rotate = 90;
break;
case ExifInterface.ORIENTATION_ROTATE_180:
rotate = 180;
break;
case ExifInterface.ORIENTATION_ROTATE_270:
rotate = 270;
break;
}
Log.v(TAG, "Rotation: " + rotate);
if (rotate != 0) {
// Getting width & height of the given image.
int w = bitmap.getWidth();
int h = bitmap.getHeight();
// Setting pre rotate
Matrix mtx = new Matrix();
mtx.preRotate(rotate);
// Rotating Bitmap
bitmap = Bitmap.createBitmap(bitmap, 0, 0, w, h, mtx, false);
}
// Convert to ARGB_8888, required by tess
bitmap = bitmap.copy(Bitmap.Config.ARGB_8888, true);
} catch (IOException e) {
Log.e(TAG, "Couldn't correct orientation: " + e.toString());
}
// _image.setImageBitmap( bitmap );
Log.v(TAG, "Before baseApi");
TessBaseAPI baseApi = new TessBaseAPI();
baseApi.setDebug(true);
baseApi.init(DATA_PATH, lang);
baseApi.setImage(bitmap);
String recognizedText = baseApi.getUTF8Text();
baseApi.end();
// You now have the text in recognizedText var, you can do anything with it.
// We will display a stripped out trimmed alpha-numeric version of it (if lang is eng)
// so that garbage doesn't make it to the display.
Log.v(TAG, "OCRED TEXT: " + recognizedText);
if ( lang.equalsIgnoreCase("eng") ) {
recognizedText = recognizedText.replaceAll("[^a-zA-Z0-9]+", " ");
}
recognizedText = recognizedText.trim();
if ( recognizedText.length() != 0 ) {
_field.setText(_field.getText().toString().length() == 0 ? recognizedText : _field.getText() + " " + recognizedText);
_field.setSelection(_field.getText().toString().length());
}
// Cycle done.
}
//tts
@Override
public void onDestroy() {
// Don't forget to shutdown tts!
if (tts != null) {
tts.stop();
tts.shutdown();
}
super.onDestroy();
}
@Override
public void onInit(int status) {
if (status == TextToSpeech.SUCCESS) {
int result = tts.setLanguage(Locale.US);
if (result == TextToSpeech.LANG_MISSING_DATA
|| result == TextToSpeech.LANG_NOT_SUPPORTED) {
Log.e("TTS", "This Language is not supported");
} else {
btnSpeak.setEnabled(true);
speakOut();
}
} else {
Log.e("TTS", "Initilization Failed!");
}
}
private void speakOut() {
String text = _field.getText().toString();
tts.speak(text, TextToSpeech.QUEUE_FLUSH, null);
}
}
这是我的类文件,从技术上讲,文件的 OCR 部分来自示例代码,我哪里出错了?我通过启动屏幕活动没有任何问题。如果我NullPointerException,
推荐 btnSpeak 行,View.OnClickListener,
我会得到 superOnCreate 错误,添加 super.onCreate 也无济于事。
我一无所知。
顺便说一句,button、button1 和 field 分别是 Button、Button 和 EditText 的 id。