我正在开发使用 ocr 进行文本检测的应用程序。我从https://github.com/GautamGupta/Simple-Android-OCR获得源代码。当我使用最新的 android studio 时。我没有得到 TessBaseAPI 的导入文件。我尝试将 tess-two 添加到项目模块中。但它不起作用。如何运行,下面显示了我写的源代码。
package com.acs.ocr;
import android.content.Intent;
import android.content.res.AssetManager;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Matrix;
import android.media.ExifInterface;
import android.net.Uri;
import android.os.Environment;
import android.provider.MediaStore;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import com.google.android.gms.appindexing.Action;
import com.google.android.gms.appindexing.AppIndex;
import com.google.android.gms.common.api.GoogleApiClient;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
public class
MainActivity extends AppCompatActivity {
public static final String PACKAGE_NAME = "com.datumdroid.ocr";
public static final String DATA_PATH = Environment
.getExternalStorageDirectory().toString() + "/SimpleAndroidOCR/";
public static final String lang = "eng";
private static final String TAG = "MainActivity.java";
protected Button _button;
// protected ImageView _image;
protected EditText _field;
protected String _path;
protected boolean _taken;
protected static final String PHOTO_TAKEN = "photo_taken";
/**
* ATTENTION: This was auto-generated to implement the App Indexing API.
* See https://g.co/AppIndexing/AndroidStudio for more information.
*/
private GoogleApiClient client;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
String[] paths = new String[]{DATA_PATH, DATA_PATH + "tessdata/"};
for (String path : paths) {
File dir = new File(path);
if (!dir.exists()) {
if (!dir.mkdirs()) {
Log.v(TAG, "ERROR: Creation of directory " + path + " on sdcard failed");
return;
} else {
Log.v(TAG, "Created directory " + path + " on sdcard");
}
}
}
if (!(new File(DATA_PATH + "tessdata/" + lang + ".traineddata")).exists()) {
try {
AssetManager assetManager = getAssets();
InputStream in = assetManager.open("tessdata/" + lang + ".traineddata");
//GZIPInputStream gin = new GZIPInputStream(in);
OutputStream out = new FileOutputStream(DATA_PATH
+ "tessdata/" + lang + ".traineddata");
// Transfer bytes from in to out
byte[] buf = new byte[1024];
int len;
//while ((lenf = gin.read(buff)) > 0) {
while ((len = in.read(buf)) > 0) {
out.write(buf, 0, len);
}
in.close();
//gin.close();
out.close();
Log.v(TAG, "Copied " + lang + " traineddata");
} catch (IOException e) {
Log.e(TAG, "Was unable to copy " + lang + " traineddata " + e.toString());
}
}
// _image = (ImageView) findViewById(R.id.image);
_field = (EditText) findViewById(R.id.field);
_button = (Button) findViewById(R.id.button);
_button.setOnClickListener(new ButtonClickHandler());
_path = DATA_PATH + "/ocr.jpg";
// ATTENTION: This was auto-generated to implement the App Indexing API.
// See https://g.co/AppIndexing/AndroidStudio for more information.
client = new GoogleApiClient.Builder(this).addApi(AppIndex.API).build();
}
@Override
public void onStart() {
super.onStart();
// ATTENTION: This was auto-generated to implement the App Indexing API.
// See https://g.co/AppIndexing/AndroidStudio for more information.
client.connect();
Action viewAction = Action.newAction(
Action.TYPE_VIEW, // TODO: choose an action type.
"Main Page", // TODO: Define a title for the content shown.
// TODO: If you have web page content that matches this app activity's content,
// make sure this auto-generated web page URL is correct.
// Otherwise, set the URL to null.
Uri.parse("http://host/path"),
// TODO: Make sure this auto-generated app URL is correct.
Uri.parse("android-app://com.acs.ocr/http/host/path")
);
AppIndex.AppIndexApi.start(client, viewAction);
}
@Override
public void onStop() {
super.onStop();
// ATTENTION: This was auto-generated to implement the App Indexing API.
// See https://g.co/AppIndexing/AndroidStudio for more information.
Action viewAction = Action.newAction(
Action.TYPE_VIEW, // TODO: choose an action type.
"Main Page", // TODO: Define a title for the content shown.
// TODO: If you have web page content that matches this app activity's content,
// make sure this auto-generated web page URL is correct.
// Otherwise, set the URL to null.
Uri.parse("http://host/path"),
// TODO: Make sure this auto-generated app URL is correct.
Uri.parse("android-app://com.acs.ocr/http/host/path")
);
AppIndex.AppIndexApi.end(client, viewAction);
client.disconnect();
}
public class ButtonClickHandler implements View.OnClickListener {
public void onClick(View view) {
Log.v(TAG, "Starting Camera app");
startCameraActivity();
}
}
protected void startCameraActivity() {
File file = new File(_path);
Uri outputFileUri = Uri.fromFile(file);
final Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
intent.putExtra(MediaStore.EXTRA_OUTPUT, outputFileUri);
startActivityForResult(intent, 0);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
Log.i(TAG, "resultCode: " + resultCode);
if (resultCode == -1) {
onPhotoTaken();
} else {
Log.v(TAG, "User cancelled");
}
}
@Override
protected void onSaveInstanceState(Bundle outState) {
outState.putBoolean(MainActivity.PHOTO_TAKEN, _taken);
}
@Override
protected void onRestoreInstanceState(Bundle savedInstanceState) {
Log.i(TAG, "onRestoreInstanceState()");
if (savedInstanceState.getBoolean(MainActivity.PHOTO_TAKEN)) {
onPhotoTaken();
}
}
protected void onPhotoTaken() {
_taken = true;
BitmapFactory.Options options = new BitmapFactory.Options();
options.inSampleSize = 4;
Bitmap bitmap = BitmapFactory.decodeFile(_path, options);
try {
ExifInterface exif = new ExifInterface(_path);
int exifOrientation = exif.getAttributeInt(
ExifInterface.TAG_ORIENTATION,
ExifInterface.ORIENTATION_NORMAL);
Log.v(TAG, "Orient: " + exifOrientation);
int rotate = 0;
switch (exifOrientation) {
case ExifInterface.ORIENTATION_ROTATE_90:
rotate = 90;
break;
case ExifInterface.ORIENTATION_ROTATE_180:
rotate = 180;
break;
case ExifInterface.ORIENTATION_ROTATE_270:
rotate = 270;
break;
}
Log.v(TAG, "Rotation: " + rotate);
if (rotate != 0) {
// Getting width & height of the given image.
int w = bitmap.getWidth();
int h = bitmap.getHeight();
// Setting pre rotate
Matrix mtx = new Matrix();
mtx.preRotate(rotate);
// Rotating Bitmap
bitmap = Bitmap.createBitmap(bitmap, 0, 0, w, h, mtx, false);
}
// Convert to ARGB_8888, required by tess
bitmap = bitmap.copy(Bitmap.Config.ARGB_8888, true);
} catch (IOException e) {
Log.e(TAG, "Couldn't correct orientation: " + e.toString());
}
// _image.setImageBitmap( bitmap );
Log.v(TAG, "Before baseApi");
TessBaseAPI baseApi = new TessBaseAPI();
baseApi.setDebug(true);
baseApi.init(DATA_PATH, lang);
baseApi.setImage(bitmap);
String recognizedText = baseApi.getUTF8Text();
baseApi.end();
// You now have the text in recognizedText var, you can do anything with it.
// We will display a stripped out trimmed alpha-numeric version of it (if lang is eng)
// so that garbage doesn't make it to the display.
Log.v(TAG, "OCRED TEXT: " + recognizedText);
if (lang.equalsIgnoreCase("eng")) {
recognizedText = recognizedText.replaceAll("[^a-zA-Z0-9]+", " ");
}
recognizedText = recognizedText.trim();
if (recognizedText.length() != 0) {
_field.setText(_field.getText().toString().length() == 0 ? recognizedText : _field.getText() + " " + recognizedText);
_field.setSelection(_field.getText().toString().length());
}
// Cycle done.
}
}
在此代码中,TessBaseAPI 未导入。它是如何解决的?