我已经设法通过自定义过滤器(灰度、色调等)工作来获得相机预览。通过操作 RGB 数组,然后将其绘制回画布,然后将其显示在表面视图中,此自定义过滤器与预览回调一起应用。
这样做的缺点是我的 FPS 非常低。在这个低 FPS 的情况下,如果我不在后台线程中使用 Asynctask 执行此操作,它会在 UI 线程中做太多的工作。所以我尝试使用 Asynctask 进行相机操作(我的主要目的是让 UI 仍然可以完美地工作,即使相机预览回调的繁重工作也是如此)。
但即使在我使用了 Asynctask 之后,它也没有太大帮助。所以我想知道我的实现是错误的还是因为即使使用 asynctask UI 线程仍然会受到影响?
我的代码片段如下:
CameraActivity.java
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Log.d("ACTIVITY_LIFECYCLE","CameraActivity: onCreate");
setContentView(R.layout.camera_layout);
}
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
@Override
protected void onResume() {
Log.d("ACTIVITY_LIFECYCLE","CameraActivity: onResume");
if(preview == null){
preview = new CameraPreviewAsync(this,camera);
preview.execute();
}
super.onResume();
}
@Override
protected void onPause() {
Log.d("ACTIVITY_LIFECYCLE","CameraActivity: onPause");
if(preview!=null){
preview.cancel(true);
camera = preview.getCamera();
if(camera!=null){
camera.stopPreview();
camera.setPreviewCallback(null);
camera.release();
camera = null;
preview.setCamera(camera);
}
preview = null;
}
super.onPause();
}
@Override
public void onDestroy(){
Log.d("ACTIVITY_LIFECYCLE","CameraActivity: onDestroy");
super.onDestroy();
}
CameraPreviewAsync.java:
private final String TAG = "CameraPreviewAsync";
private CameraActivity camAct;
private Camera mCamera;
private int cameraId;
private SurfaceView mSurfaceView;
private SurfaceHolder mHolder;
private boolean isPreviewRunning = false;
private int[] rgbints;
private int width;
private int height;
private Bitmap mBitmap;
public CameraPreviewAsync(CameraActivity act, Camera cam){
this.camAct = act;
this.mCamera = cam;
this.mSurfaceView = (SurfaceView) act.findViewById(R.id.surfaceView);
}
public void resetSurface(){
if(mCamera!=null){
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.release();
mCamera = null;
}
int tempId = R.id.surfaceView;
RelativeLayout buttonBar = (RelativeLayout) camAct.findViewById(R.id.buttonBar);
((RelativeLayout) camAct.findViewById(R.id.preview)).removeAllViews();
SurfaceView newSurface = new SurfaceView(camAct);
newSurface.setId(tempId);
RelativeLayout.LayoutParams layParams = new RelativeLayout.LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT);
layParams.alignWithParent = true;
newSurface.setLayoutParams(layParams);
((RelativeLayout) camAct.findViewById(R.id.preview)).addView(newSurface);
((RelativeLayout) camAct.findViewById(R.id.preview)).addView(buttonBar);
}
@Override
protected void onPreExecute() {
//Things to do before doInBackground executed
Log.d(TAG,"onPreExecute");
RelativeLayout.LayoutParams layParams = new RelativeLayout.LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT);
layParams.alignWithParent = true;
mSurfaceView.setLayoutParams(layParams);
//Check number of camera in the device, if less than 2 then remove swap button
if (Camera.getNumberOfCameras() < 2) {
((RelativeLayout) camAct.findViewById(R.id.buttonBar)).removeViewAt(R.id.cameraSwap);
}
//Opening the camera
cameraId = findBackFacingCamera();
if (cameraId < 0) {
cameraId = findFrontFacingCamera();
if (cameraId < 0)
Toast.makeText(camAct, "No camera found.", Toast.LENGTH_LONG).show();
else
mCamera = Camera.open(cameraId);
} else {
mCamera = Camera.open(cameraId);
}
//invalidate the menu bar and show menu appropriately
camAct.invalidateOptionsMenu();
// get Camera parameters and set it to Auto Focus
if(mCamera!=null){
Camera.Parameters params = mCamera.getParameters();
List<String> focusModes = params.getSupportedFocusModes();
if (focusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
// set the focus mode
params.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
// set Camera parameters
mCamera.setParameters(params);
}
}
super.onPreExecute();
}
@Override
protected Void doInBackground(Void... params) {
//Things to do in the background thread
Log.d(TAG,"doInBackground");
mHolder = mSurfaceView.getHolder();
mHolder.addCallback(surfaceCallback);
return null;
}
@Override
protected void onPostExecute(Void values) {
//Things to do after doInBackground
Log.d(TAG,"onPostExecute");
}
@Override
protected void onCancelled(){
super.onCancelled();
}
/*
* ************************************************************************************
* SURFACEHOLDER CALLBACK
* ************************************************************************************
*/
SurfaceHolder.Callback surfaceCallback = new SurfaceHolder.Callback() {
@Override
public void surfaceCreated(SurfaceHolder holder) {
Log.d(TAG,"surfaceCreated!!");
if(CameraActivity.filterMode == CameraActivity.NORMAL_FILTER){
try {
if (mCamera != null) {
mCamera.startPreview();
mCamera.setPreviewDisplay(holder);
}else{
Log.d(TAG,"CAMERA IS NULL in surfaceCreated!!");
}
} catch (IOException exception) {
Log.e(TAG, "IOException caused by setPreviewDisplay()", exception);
}
}else{
synchronized(mSurfaceView){
if(isPreviewRunning){
return;
}else{
mSurfaceView.setWillNotDraw(false);
if(mCamera!=null){
isPreviewRunning = true;
Camera.Parameters p = mCamera.getParameters();
List<Size> sizes = p.getSupportedPreviewSizes();
Size size = p.getPreviewSize();
width = size.width;
height = size.height;
p.setPreviewFormat(ImageFormat.NV21);
showSupportedCameraFormats(p);
mCamera.setParameters(p);
rgbints = new int[width * height];
mCamera.startPreview();
mCamera.setPreviewCallback(previewCallback);
}
}
}
}
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
Log.d(TAG,"surfaceDestroyed!");
if(CameraActivity.filterMode == CameraActivity.NORMAL_FILTER){
if (mCamera != null) {
mCamera.stopPreview();
isPreviewRunning = false;
}
}else{
synchronized(mSurfaceView){
if(mCamera!=null){
mCamera.setPreviewCallback(null);
mCamera.stopPreview();
isPreviewRunning = false;
}
}
}
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
Log.d(TAG,"surfaceChanged!");
}
};
/*
* ************************************************************************************
* CAMERA PREVIEW CALLBACK
* ************************************************************************************
*/
Camera.PreviewCallback previewCallback = new Camera.PreviewCallback() {
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
if (!isPreviewRunning)
return;
Canvas resCanvas = null;
if (mHolder == null) {
return;
}
try {
synchronized (mHolder) {
resCanvas = mHolder.lockCanvas(null);
int resCanvasW = resCanvas.getWidth();
int resCanvasH = resCanvas.getHeight();
if(mBitmap == null){
mBitmap = Bitmap.createBitmap (width, height, Bitmap.Config.ARGB_8888);
}
decodeYUV(rgbints, data, width, height);
Canvas canvas = new Canvas(mBitmap);
//Setting the filter
if(camAct.getCustomFilter().equalsIgnoreCase("NORMAL")) ;//don't change the rgb value
if(camAct.getCustomFilter().equalsIgnoreCase("GRAYSCALE")) rgbints = grayscale(rgbints);
if(camAct.getCustomFilter().equalsIgnoreCase("INVERT")) rgbints = invert(rgbints);
if(camAct.getCustomFilter().equalsIgnoreCase("BOOSTRED")) rgbints = boostColor(rgbints,1);
if(camAct.getCustomFilter().equalsIgnoreCase("BOOSTGREEN")) rgbints = boostColor(rgbints,2);
if(camAct.getCustomFilter().equalsIgnoreCase("BOOSTBLUE")) rgbints = boostColor(rgbints,3);
if(camAct.getCustomFilter().equalsIgnoreCase("NOISE")) rgbints = noise(rgbints);
if(camAct.getCustomFilter().equalsIgnoreCase("HUE")) rgbints = hue(rgbints);
if(camAct.getCustomFilter().equalsIgnoreCase("SATURATION")) rgbints = saturation(rgbints);
if(camAct.getCustomFilter().equalsIgnoreCase("ENGRAVE")) rgbints = engrave(rgbints);
if(camAct.getCustomFilter().equalsIgnoreCase("EMBOSS")) rgbints = emboss(rgbints);
// draw the decoded image, centered on canvas
canvas.drawBitmap(rgbints, 0, width, 0,0, width, height, false, null);
resCanvas.drawBitmap (mBitmap, resCanvasW-((width+resCanvasW)>>1), resCanvasH-((height+resCanvasH)>>1),null);
}
} catch (Exception e){
e.printStackTrace();
} finally {
// do this in a finally so that if an exception is thrown
// during the above, we don't leave the Surface in an
// inconsistent state
if (resCanvas != null) {
mHolder.unlockCanvasAndPost(resCanvas);
}
}
}
};
任何帮助深表感谢!:) 提前谢谢你们!