我正在尝试在表面视图上冻结相机预览。但是我收到下面提到的错误。
我的预习课
package com.example.android;
import java.io.IOException;
import java.lang.reflect.Method;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
import android.content.Context;
import android.hardware.Camera;
import android.hardware.Camera.AutoFocusCallback;
import android.hardware.Camera.Size;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import android.util.AttributeSet;
import android.util.Log;
import android.view.Display;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.WindowManager;
import android.widget.Toast;
public class Preview extends SurfaceView implements SurfaceHolder.Callback {
private boolean DEBUG = true;
private static final String TAG = Preview.class.getSimpleName();
private static long AUTO_FOCUS_INTERVAL = 1500;
public static final int CMD_SCAN = 1;
public static final int CMD_IMAGE_COPIED = 2;
private SurfaceHolder mHolder;
private Handler mHandler;
Camera mCamera;
Size mPreviewSize;
ScanningHandler mPreviewHandler;
Thread mPreviewThread;
AtomicBoolean mPreviewThreadRun = new AtomicBoolean(false);
private int angle;
private Thread mAutofocusThread;
private Boolean mAutoFocus;
private byte[] mLastFrameCopy;
private FrameReceiver mFrameReceiver;
private Size mFramePreviewSize;
public interface FrameReceiver {
public void onFrameReceived(byte[] frameBuffer, Size framePreviewSize);
}
public Preview(Context context) {
this(context, null);
}
public Preview(Context context, AttributeSet attrs) {
super(context, attrs);
mHandler = new Handler();
mHolder = getHolder();
mHolder.addCallback(this);
// this is needed for old android version
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
private Size getOptimalSize(List<Size> sizes, int w, int h) {
final double ASPECT_TOLERANCE = 0.2;
double targetRatio = (double) w / h;
if (DEBUG)
Log.d(TAG, "target view size: " + w + "x" + h + ", target ratio="
+ targetRatio);
if (sizes == null)
return null;
Size optimalSize = null;
double minDiff = Double.MAX_VALUE;
int targetHeight = h;
int targetWidth = w;
// Try to find an size match aspect ratio and size
for (Size size : sizes) {
double ratio = (double) size.width / size.height;
boolean fitToView = size.width <= w && size.height <= h;
if (DEBUG)
Log.d(TAG, "Supported preview size: " + size.width + "x"
+ size.height + ", ratio=" + ratio + ", fitToView="
+ fitToView);
if (!fitToView) {
// we can not use preview size bigger than surface dimensions
// skipping
continue;
}
if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE) {
continue;
}
double hypot = Math.hypot(size.height - targetHeight, size.width
- targetWidth);
if (hypot < minDiff) {
optimalSize = size;
minDiff = hypot;
}
}
if (optimalSize == null) {
if (DEBUG)
Log.d(TAG,
"Cannot find preview that matchs the aspect ratio, ignore the aspect ratio requirement");
minDiff = Double.MAX_VALUE;
for (Size size : sizes) {
if (size.width > w || size.height > h) {
// we can not use preview size bigger than surface
// dimensions
continue;
}
double hypot = Math.hypot(size.height - targetHeight,
size.width - targetWidth);
if (hypot < minDiff) {
optimalSize = size;
minDiff = hypot;
}
}
}
if (optimalSize == null) {
throw new RuntimeException(
"Unable to determine optimal preview size");
}
if (DEBUG)
Log.d(TAG, "optimalSize.width=" + optimalSize.width
+ ", optimalSize.height=" + optimalSize.height);
return optimalSize;
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
if (mCamera == null) {
if (DEBUG)
Log.e(TAG, "mCamera == null !");
return;
}
Camera.Parameters params = mCamera.getParameters();
Display display = ((WindowManager) getContext().getSystemService(
Context.WINDOW_SERVICE)).getDefaultDisplay();
switch (display.getRotation()) {
case Surface.ROTATION_0:
angle = 90;
break;
case Surface.ROTATION_90:
angle = 0;
break;
case Surface.ROTATION_180:
angle = 270;
break;
case Surface.ROTATION_270:
angle = 180;
break;
default:
throw new AssertionError("Wrong surface rotation value");
}
setDisplayOrientation(params, angle);
if (mPreviewSize == null) {
// h and w get inverted on purpose
mPreviewSize = getOptimalSize(params.getSupportedPreviewSizes(),
width > height ? width : height, width > height ? height
: width);
}
params.setPreviewSize(mPreviewSize.width, mPreviewSize.height);
List<String> focusModes = params.getSupportedFocusModes();
if (focusModes != null
&& focusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
params.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
}
mCamera.setParameters(params);
try {
mCamera.setPreviewDisplay(holder);
} catch (IOException e) {
Log.e(TAG, "Can't set preview display", e);
}
startPreview();
mFramePreviewSize = mCamera.getParameters().getPreviewSize();
int bitsPerPixel = 12;
mLastFrameCopy = new byte[mFramePreviewSize.height
* mFramePreviewSize.width * bitsPerPixel / 8];
PreviewCallbackScan();
mPreviewThreadRun.set(true);
scan();
}
class AutoFocusRunnable implements Runnable {
@Override
public void run() {
if (mAutoFocus) {
if (mCamera != null) {
try {
mCamera.autoFocus(new AutoFocusCallback() {
@Override
public void onAutoFocus(boolean success,
Camera camera) {
mHandler.postDelayed(AutoFocusRunnable.this,
AUTO_FOCUS_INTERVAL);
}
});
} catch (Exception e) {
Log.w(TAG, "Unable to auto-focus", e);
mHandler.postDelayed(AutoFocusRunnable.this,
AUTO_FOCUS_INTERVAL);
}
}
}
}
};
void startAutofocus() {
mAutoFocus = true;
mAutofocusThread = new Thread(new AutoFocusRunnable(),
"Autofocus Thread");
mAutofocusThread.start();
}
void stopPreview() {
mAutoFocus = false;
if (mCamera != null)
mCamera.cancelAutoFocus();
mAutofocusThread = null;
if (mCamera != null)
mCamera.stopPreview();
}
void startPreview() {
if (mCamera != null) {
mCamera.startPreview();
startAutofocus();
}
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
if (mCamera == null) {
try {
mCamera = Camera.open();
} catch (RuntimeException e) {
Toast.makeText(
getContext(),
"Unable to connect to camera. "
+ "Perhaps it's being used by another app.",
Toast.LENGTH_LONG).show();
}
}
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
if (mCamera != null) {
synchronized (this) {
mCamera.setPreviewCallback(null);
mCamera.stopPreview();
mCamera.release();
mCamera = null;
}
}
}
private void setDisplayOrientation(Camera.Parameters params, int angle) {
try {
Method method = mCamera.getClass().getMethod(
"setDisplayOrientation", new Class[] { int.class });
if (method != null)
method.invoke(mCamera, new Object[] { angle });
} catch (Exception e) {
if (DEBUG)
Log.d(TAG,
"Can't call Camera.setDisplayOrientation on this device, trying another way");
if (angle == 90 || angle == 270)
params.set("orientation", "portrait");
else if (angle == 0 || angle == 180)
params.set("orientation", "landscape");
}
params.setRotation(angle);
}
public class PreviewThread extends Thread {
public PreviewThread(String string) {
super(string);
}
@Override
public void run() {
Looper.prepare();
Thread.currentThread().setPriority(MIN_PRIORITY);
mPreviewHandler = new ScanningHandler();
Looper.loop();
};
}
public void setFrameReceiver(FrameReceiver receiver) {
if (DEBUG)
Log.d(TAG, "set Frame Receiver");
mFrameReceiver = receiver;
}
private Object mLastFrameCopyLock = new Object();
public void copyLastFrame(byte[] frame) {
synchronized (mLastFrameCopyLock) {
if (DEBUG)
Log.d(TAG, "copying frame");
System.arraycopy(frame, 0, mLastFrameCopy, 0, frame.length);
}
mPreviewHandler.obtainMessage(CMD_IMAGE_COPIED).sendToTarget();
}
public byte[] getLastFrameCopy() {
synchronized (mLastFrameCopyLock) {
return mLastFrameCopy;
}
}
public void scan() {
if (DEBUG)
Log.d(TAG, "<<<<<<<<<<<<<< scan called >>>>>>>>>>>>>>>>");
removeAllMessages();
mPreviewHandler.obtainMessage(CMD_SCAN).sendToTarget();
}
/**
* @return the default angle of the camera
*/
public int getAngle() {
return angle;
}
public void PreviewCallbackScan() {
mCamera.setPreviewCallbackWithBuffer(new Camera.PreviewCallback() {
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
if (data == null) {
return;
}
copyLastFrame(data);
}
});
}
public class ScanningHandler extends Handler {
@Override
public void handleMessage(Message message) {
switch (message.what) {
case (CMD_SCAN):
if (mPreviewThreadRun.get()) {
mCamera.addCallbackBuffer(mLastFrameCopy);
break;
}
break;
case (CMD_IMAGE_COPIED):
if (mPreviewThreadRun.get()) {
if (DEBUG)
Log.d(TAG, "frame copied");
mFrameReceiver.onFrameReceived(getLastFrameCopy(),
mFramePreviewSize);
break;
}
break;
}
}
}
public void removeAllMessages() {
mPreviewHandler.removeMessages(CMD_SCAN);
mPreviewHandler.removeMessages(CMD_IMAGE_COPIED);
}
}
**DemoActivity**
package com.example.android;
import java.util.concurrent.atomic.AtomicBoolean;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.ProgressDialog;
import android.content.DialogInterface;
import android.graphics.YuvImage;
import android.hardware.Camera;
import android.hardware.Camera.Size;
import android.os.Bundle;
import android.os.Handler;
import android.util.Log;
import android.view.Gravity;
import android.view.SurfaceView;
import android.view.View;
import android.webkit.WebView;
import android.widget.Toast;
import com.moodstocks.android.MoodstocksError;
import com.moodstocks.android.Result;
import com.moodstocks.android.ScannerSession;
import com.moodstocks.android.core.CameraManager;
public class ScanActivity extends Activity implements CameraManager.Listener,
ScannerSession.Listener, View.OnClickListener,
ProgressDialog.OnCancelListener {
// -----------------------------------
// Interface implemented by overlays
// -----------------------------------
public static interface Listener {
/* send a new result to Overlay */
public void onResult(ScannerSession session, Result result);
/* send any other information in a Bundle */
public void onStatusUpdate(Bundle status);
}
private AlertDialog ad;
// private QueryProgressDialog pd;
private AtomicBoolean activityRunning = new AtomicBoolean(false);
private Preview.FrameReceiver mreceiver;
private WebView webViewer;
// Enabled scanning types: configure it according to your needs.
// Here we allow Image recognition, EAN13, Datamatrix and QRCodes decoding.
// Feel free to add `EAN8` if you want in addition to decode EAN-8.
private int ScanOptions = Result.Type.IMAGE | Result.Type.EAN13
| Result.Type.QRCODE | Result.Type.DATAMATRIX;
public static final String TAG = "Main";
private ScannerSession session;
private Overlay overlay;
private View touch;
private Bundle status;
private ProgressDialog searching;
static final boolean DEBUG = true;
// Activates the local search.
static final boolean SEARCH_OBJECT_LOCAL = false;
// Activates the barcode scanning
static boolean SEARCH_OBJECT_BARCODE = true;
// Activates the scan search.
static boolean SEARCH_OBJECT_SCAN = true;
// Activates the snap search
static boolean SEARCH_OBJECT_SNAP = true;
// Activates the remote search.
static final boolean SEARCH_OBJECT_REMOTE = true;
private Handler handler;
private Preview preview;
Camera Camera;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
preview.mPreviewThread = preview.new PreviewThread("Preview Thread");
preview.mPreviewThread.start();
// initialize the overlay, that will display results and informations
overlay = (Overlay) findViewById(R.id.overlay);
overlay.init();
// initialize the tap-on-screen
touch = findViewById(R.id.touch);
touch.setOnClickListener(this);
// get the camera preview surface
SurfaceView preview = (SurfaceView) findViewById(R.id.preview);
// Create a scanner session
try {
session = new ScannerSession(this, this, preview);
} catch (MoodstocksError e) {
e.log();
}
// set session options
// session.setOptions(ScanOptions);
mreceiver = new DemoFrameReceiver();
}
@Override
protected void onResume() {
super.onResume();
activityRunning.set(true);
// start scanning!
// session.resume();
// preview.setFrameReceiver(mreceiver);
// Send information to the overlay
status = new Bundle();
status.putBoolean("decode_ean_8", (ScanOptions & Result.Type.EAN8) != 0);
status.putBoolean("decode_ean_13",
(ScanOptions & Result.Type.EAN13) != 0);
status.putBoolean("decode_qrcode",
(ScanOptions & Result.Type.QRCODE) != 0);
status.putBoolean("decode_datamatrix",
(ScanOptions & Result.Type.DATAMATRIX) != 0);
overlay.onStatusUpdate(status);
}
@Override
protected void onPause() {
super.onPause();
session.pause();
}
@Override
protected void onDestroy() {
super.onDestroy();
session.close();
}
@Override
public void onBackPressed() {
if (overlay.result != null) {
overlay.onResult(session, null);
} else {
super.onBackPressed();
}
}
// -------------------------
// ScannerSession.Listener
// -------------------------
@Override
public void onScanComplete(Result result) {
if (result != null) {
// pause scanning session
session.pause();
// result found, send to overlay
overlay.onResult(session, result);
}
}
@Override
public void onScanFailed(MoodstocksError error) {
// in this sample code, we just log the errors.
error.log();
}
@Override
public void onApiSearchStart() {
// inform user
searching = ProgressDialog.show(this, "", "Searching...", true, true,
this);
}
@Override
public void onApiSearchComplete(Result result) {
searching.dismiss();
if (result != null) {
// pause scanning session
session.pause();
// result found, send to overlay
overlay.onResult(session, result);
} else {
// no result found, inform user
Toast t = Toast
.makeText(this, "No match found", Toast.LENGTH_SHORT);
t.setGravity(Gravity.CENTER_HORIZONTAL | Gravity.BOTTOM, 0, 200);
t.show();
}
}
@Override
public void onApiSearchFailed(MoodstocksError e) {
searching.dismiss();
// A problem occurred, e.g. there is no available network. Inform user:
Toast t = Toast.makeText(this, e.getMessage(), Toast.LENGTH_SHORT);
t.setGravity(Gravity.CENTER_HORIZONTAL | Gravity.BOTTOM, 0, 200);
t.show();
}
// ----------------------
// View.OnClickListener
// ----------------------
// Intercept tap-on-screen:
@Override
public void onClick(View v) {
if (v == touch) {
// session.resume();
}
}
private void stopScanning() {
if (DEBUG)
Log.d(TAG, "stop scanning");
preview.mPreviewThreadRun.set(false);
}
// ---------------------------------
// ProgressDialog.OnCancelListener
// ---------------------------------
// User cancelled snap
@Override
public void onCancel(DialogInterface dialog) {
if (dialog == this.searching) {
session.cancel();
}
}
private void freezePreview() {
// on old device freezing preview only shows a black screen
if (DEBUG)
Log.d(TAG, "preview is freezed");
preview.stopPreview();
preview.PreviewCallbackScan();
}
private void processImageScan(final YuvImage yuv) {
if (DEBUG)
Log.d(TAG, "scan decode message");
// iqe.goScan();
// Message.obtain(iqe, IQE.CMD_DECODE, IQE.scan, 0, yuv).sendToTarget();
}
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
// TODO Auto-generated method stub
}
@Override
public void onPreviewInfoFound(int w, int h, boolean front_facing) {
// TODO Auto-generated method stub
}
@Override
public void onCameraOpenFailed(int e) {
// TODO Auto-generated method stub
}
class DemoFrameReceiver implements Preview.FrameReceiver {
/**
* Starts the continuous local search with the displayed frames.
*
* @param frameBuffer
* A {@link Byte} array, the frame's data.
* @param framePreviewSize
* A {@link Size}, the frame dimensions.
*/
@Override
public void onFrameReceived(byte[] frameBuffer, Size framePreviewSize) {
if (!preview.mPreviewThreadRun.get()) {
return;
}
if (frameBuffer == null) {
if (DEBUG)
Log.d(TAG, "no picture");
return;
}
YuvImage yuvImage = new YuvImage(frameBuffer, 17,
framePreviewSize.width, framePreviewSize.height, null);
// analyze the picture.
processImageScan(yuvImage);
}
}
}
错误日志:
10-11 16:21:25.429: E/AndroidRuntime(4441): FATAL EXCEPTION: main
10-11 16:21:25.429: E/AndroidRuntime(4441): java.lang.RuntimeException: Unable to start activity ComponentInfo{com.example.android/com.example.android.ScanActivity}: java.lang.NullPointerException
10-11 16:21:25.429: E/AndroidRuntime(4441): at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2517)
10-11 16:21:25.429: E/AndroidRuntime(4441): at android.app.ActivityThread.handleLaunchActivity(ActivityThread.java:2574)
10-11 16:21:25.429: E/AndroidRuntime(4441): at android.app.ActivityThread.access$600(ActivityThread.java:162)
10-11 16:21:25.429: E/AndroidRuntime(4441): at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1413)
10-11 16:21:25.429: E/AndroidRuntime(4441): at android.os.Handler.dispatchMessage(Handler.java:99)
10-11 16:21:25.429: E/AndroidRuntime(4441): at android.os.Looper.loop(Looper.java:158)
10-11 16:21:25.429: E/AndroidRuntime(4441): at android.app.ActivityThread.main(ActivityThread.java:5789)
10-11 16:21:25.429: E/AndroidRuntime(4441): at java.lang.reflect.Method.invokeNative(Native Method)
10-11 16:21:25.429: E/AndroidRuntime(4441): at java.lang.reflect.Method.invoke(Method.java:525)
10-11 16:21:25.429: E/AndroidRuntime(4441): at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:1027)
10-11 16:21:25.429: E/AndroidRuntime(4441): at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:843)
10-11 16:21:25.429: E/AndroidRuntime(4441): at dalvik.system.NativeStart.main(Native Method)
10-11 16:21:25.429: E/AndroidRuntime(4441): Caused by: java.lang.NullPointerException
10-11 16:21:25.429: E/AndroidRuntime(4441): at com.example.android.ScanActivity.onCreate(ScanActivity.java:93)
10-11 16:21:25.429: E/AndroidRuntime(4441): at android.app.Activity.performCreate(Activity.java:5195)
10-11 16:21:25.429: E/AndroidRuntime(4441): at android.app.Instrumentation.callActivityOnCreate(Instrumentation.java:1111)
10-11 16:21:25.429: E/AndroidRuntime(4441): at android.app.ActivityThread.performLaunchActivity(ActivityThread.java:2473)
10-11 16:21:25.429: E/AndroidRuntime(4441): ... 11 more