我正在尝试在我的 Android 应用程序中使用 Tom Gibaras 实现(已更改为 Android)来实现精明的边缘检测算法。该应用程序使用相机拍摄照片,相机工作正常并拍摄照片,问题出在我想将它传递给算法并显示结果时。而不是处理图像并检测边缘然后显示,应用程序返回到主屏幕 下面的代码是我到目前为止的活动。我可以提供任何需要的额外代码,你能帮我吗?
/**
* ShootActivity demonstrates capturing camera images
* - user presses button to capture an image using the device camera
*/
public class ShootActivity extends Activity {
public static final int ACTION_TAKE_PHOTO_B = 1;
public String mCurrentPhotoPath;
public static final String BITMAP_STORAGE_KEY = "viewbitmap";
public static final String IMAGEVIEW_VISIBILITY_STORAGE_KEY = "imageviewvisibility";
public ImageView mImageView;
public Bitmap mImageBitmap;
public static final String JPEG_FILE_PREFIX = "IMG_";
public static final String JPEG_FILE_SUFFIX = ".jpg";
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
mImageView = (ImageView) findViewById(R.id.imageView1);
mImageBitmap = null;
Button picBtn = (Button) findViewById(R.id.btnIntend);
setBtnListenerOrDisable(
picBtn,
mTakePicOnClickListener,
MediaStore.ACTION_IMAGE_CAPTURE
);
}
/**
* Click method to handle user pressing button to launch camera
*/
Button.OnClickListener mTakePicOnClickListener =
new Button.OnClickListener() {
public void onClick(View v) {
dispatchTakePictureIntent(ACTION_TAKE_PHOTO_B);
}
};
public File createImageFile() throws IOException {
// Create an image file name
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
String imageFileName = JPEG_FILE_PREFIX + timeStamp + "_";
//File albumF = getAlbumDir();
File imageF = File.createTempFile(imageFileName, JPEG_FILE_SUFFIX);
return imageF;
}
public File setUpPhotoFile() throws IOException {
File f = createImageFile();
mCurrentPhotoPath = f.getAbsolutePath();
return f;
}
public void setPic() {
/* There isn't enough memory to open up more than a couple camera photos */
/* So pre-scale the target bitmap into which the file is decoded */
/* Get the size of the ImageView */
int targetW = mImageView.getWidth();
int targetH = mImageView.getHeight();
/* Get the size of the image */
BitmapFactory.Options bmOptions = new BitmapFactory.Options();
bmOptions.inJustDecodeBounds = true;
BitmapFactory.decodeFile(mCurrentPhotoPath, bmOptions);
int photoW = bmOptions.outWidth;
int photoH = bmOptions.outHeight;
/* Figure out which way needs to be reduced less */
int scaleFactor = 2;
if ((targetW > 0) || (targetH > 0)) {
scaleFactor = Math.min(photoW/targetW, photoH/targetH);
}
/* Set bitmap options to scale the image decode target */
bmOptions.inJustDecodeBounds = false;
bmOptions.inSampleSize = scaleFactor;
bmOptions.inPurgeable = true;
/* Decode the JPEG file into a Bitmap */
mImageBitmap = BitmapFactory.decodeFile(mCurrentPhotoPath, bmOptions);
CannyEdgeDetection detector = new CannyEdgeDetection();
detector.setLowThreshold(0.5f);
detector.setHighThreshold(1f);
detector.setSourceImage(mImageBitmap);
detector.process();
Bitmap edges = detector.getEdgesImage();
/* Associate the Bitmap to the ImageView */
mImageView.setImageBitmap(edges);
mImageView.setVisibility(View.VISIBLE);
}
public void galleryAddPic() {
Intent mediaScanIntent = new Intent("android.intent.action.MEDIA_SCANNER_SCAN_FILE");
File f = new File(mCurrentPhotoPath);
Uri contentUri = Uri.fromFile(f);
mediaScanIntent.setData(contentUri);
this.sendBroadcast(mediaScanIntent);
}
public void dispatchTakePictureIntent(int actionCode) {
Intent takePictureIntent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
File f = null;
try {
f = setUpPhotoFile();
mCurrentPhotoPath = f.getAbsolutePath();
takePictureIntent.putExtra(MediaStore.EXTRA_OUTPUT, Uri.fromFile(f));
} catch (IOException e) {
e.printStackTrace();
f = null;
mCurrentPhotoPath = null;
}
startActivityForResult(takePictureIntent, 1102);
}
public void handleBigCameraPhoto()
{
if (mCurrentPhotoPath != null)
{
setPic();
galleryAddPic();
mCurrentPhotoPath = null;
}
}
/**
* Handle user returning from capturing the image
*/
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data)
{
if(requestCode == 1102 && resultCode == Activity.RESULT_OK)
{
if(data != null)
{
handleBigCameraPhoto();
}
}
}
@Override
protected void onSaveInstanceState(Bundle outState) {
outState.putParcelable(BITMAP_STORAGE_KEY, mImageBitmap);
outState.putBoolean(IMAGEVIEW_VISIBILITY_STORAGE_KEY, (mImageBitmap != null) );
super.onSaveInstanceState(outState);
}
@Override
protected void onRestoreInstanceState(Bundle savedInstanceState) {
super.onRestoreInstanceState(savedInstanceState);
mImageBitmap = savedInstanceState.getParcelable(BITMAP_STORAGE_KEY);
mImageView.setImageBitmap(mImageBitmap);
mImageView.setVisibility(savedInstanceState.getBoolean(IMAGEVIEW_VISIBILITY_STORAGE_KEY) ? ImageView.VISIBLE : ImageView.INVISIBLE);
}
public static boolean isIntentAvailable(Context context, String action) {
final PackageManager packageManager = context.getPackageManager();
final Intent intent = new Intent(action);
List<ResolveInfo> list =
packageManager.queryIntentActivities(intent,
PackageManager.MATCH_DEFAULT_ONLY);
return list.size() > 0;
}
public void setBtnListenerOrDisable(
Button btn,
Button.OnClickListener onClickListener,
String intentName
) {
if (isIntentAvailable(this, intentName)) {
btn.setOnClickListener(onClickListener);
} else {
btn.setText(
getText(R.string.cannot).toString() + " " + btn.getText());
btn.setClickable(false);
}
}
}
03-27 02:47:37.310: E/AndroidRuntime(6513): FATAL EXCEPTION: main
03-27 02:47:37.310: E/AndroidRuntime(6513): java.lang.RuntimeException: Failure delivering result ResultInfo{who=null, request=1102, result=-1, data=null} to activity {com.example.f_y_p/com.example.f_y_p.ShootActivity}: java.lang.NullPointerException
03-27 02:47:37.310: E/AndroidRuntime(6513): at android.app.ActivityThread.deliverResults(ActivityThread.java:3182)
03-27 02:47:37.310: E/AndroidRuntime(6513): at android.app.ActivityThread.handleSendResult(ActivityThread.java:3225)
03-27 02:47:37.310: E/AndroidRuntime(6513): at android.app.ActivityThread.access$1100(ActivityThread.java:140)
03-27 02:47:37.310: E/AndroidRuntime(6513): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1275)
03-27 02:47:37.310: E/AndroidRuntime(6513): at android.os.Handler.dispatchMessage(Handler.java:99)
03-27 02:47:37.310: E/AndroidRuntime(6513): at android.os.Looper.loop(Looper.java:137)
03-27 02:47:37.310: E/AndroidRuntime(6513): at android.app.ActivityThread.main(ActivityThread.java:4898)
03-27 02:47:37.310: E/AndroidRuntime(6513): at java.lang.reflect.Method.invokeNative(Native Method)
03-27 02:47:37.310: E/AndroidRuntime(6513): at java.lang.reflect.Method.invoke(Method.java:511)
03-27 02:47:37.310: E/AndroidRuntime(6513): at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:1006)
03-27 02:47:37.310: E/AndroidRuntime(6513): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:773)
03-27 02:47:37.310: E/AndroidRuntime(6513): at dalvik.system.NativeStart.main(Native Method)
03-27 02:47:37.310: E/AndroidRuntime(6513): Caused by: java.lang.NullPointerException
03-27 02:47:37.310: E/AndroidRuntime(6513): at com.example.f_y_p.CannyEdgeDetection.setSourceImage(CannyEdgeDetection.java:75)
03-27 02:47:37.310: E/AndroidRuntime(6513): at com.example.f_y_p.ShootActivity.setPic(ShootActivity.java:114)
03-27 02:47:37.310: E/AndroidRuntime(6513): at com.example.f_y_p.ShootActivity.handleBigCameraPhoto(ShootActivity.java:153)
03-27 02:47:37.310: E/AndroidRuntime(6513): at com.example.f_y_p.ShootActivity.onActivityResult(ShootActivity.java:166)
03-27 02:47:37.310: E/AndroidRuntime(6513): at android.app.Activity.dispatchActivityResult(Activity.java:5390)
032702:47:37.310:E/AndroidRuntime(6513):atandroid.app.ActivityThread.deliverResults(ActivityThread.java:3178)
03-27 02:47:37.310: E/AndroidRuntime(6513): ... 11 more
03-27 02:47:48.535: I/Process(6513): 发送信号。PID:6513 SIG:9