1

我有一个应用程序处理来自相机的帧并显示在布局上,捕获和管理相机帧的类如下:

package org.opencv.face;

import java.io.IOException;
import java.util.List;

import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.ImageFormat;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;

public abstract class SampleViewBase extends SurfaceView implements SurfaceHolder.Callback, Runnable {
private static final String TAG = "Sample::SurfaceView";

public Camera              mCamera;
private SurfaceHolder       mHolder;
private int                 mFrameWidth;
private int                 mFrameHeight;
private byte[]              mFrame;
private boolean             mThreadRun;
private byte[]              mBuffer;


public SampleViewBase(Context context) {
    super(context);
    mHolder = getHolder();
    mHolder.addCallback(this);
    Log.i(TAG, "Instantiated new " + this.getClass());
}

public int getFrameWidth() {
    return mFrameWidth;
}

public int getFrameHeight() {
    return mFrameHeight;
}

public void setPreview() throws IOException {
        mCamera.setPreviewDisplay(null);
}

public boolean openCamera() {
    Log.i(TAG, "openCamera");
    releaseCamera();
    mCamera = Camera.open();
    if(mCamera == null) {
        Log.e(TAG, "Can't open camera!");
        return false;
    }

    mCamera.setPreviewCallbackWithBuffer(new PreviewCallback() {
        public void onPreviewFrame(byte[] data, Camera camera) {
            synchronized (SampleViewBase.this) {
                System.arraycopy(data, 0, mFrame, 0, data.length);
                SampleViewBase.this.notify(); 
            }
            camera.addCallbackBuffer(mBuffer);
        }
    });
    return true;
}

public void releaseCamera() {
    Log.i(TAG, "releaseCamera");
    mThreadRun = false;
    synchronized (this) {
        if (mCamera != null) {
            mCamera.stopPreview();
            mCamera.setPreviewCallback(null);
            mCamera.release();
            mCamera = null;
        }
    }
    onPreviewStopped();
}

public void setupCamera(int width, int height) {
    Log.i(TAG, "setupCamera");
    synchronized (this) {
        if (mCamera != null) {
            Camera.Parameters params = mCamera.getParameters();
            List<Camera.Size> sizes = params.getSupportedPreviewSizes();
            mFrameWidth = width;
            mFrameHeight = height;

            // selecting optimal camera preview size
            {
                int  minDiff = Integer.MAX_VALUE;
                for (Camera.Size size : sizes) {
                    if (Math.abs(size.height - height) < minDiff) {
                        mFrameWidth = size.width;
                        mFrameHeight = size.height;
                        minDiff = Math.abs(size.height - height);
                    }
                }
            }

            params.setPreviewSize(getFrameWidth(), getFrameHeight());

            List<String> FocusModes = params.getSupportedFocusModes();
            if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
            {
                params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
            }            

            mCamera.setParameters(params);

            /* Now allocate the buffer */
            params = mCamera.getParameters();
            int size = params.getPreviewSize().width * params.getPreviewSize().height;
            size  = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8 * 2;
            mBuffer = new byte[size];
            /* The buffer where the current frame will be copied */
            mFrame = new byte [size];
            mCamera.addCallbackBuffer(mBuffer);

            try {
                setPreview();
            } catch (IOException e) {
                Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
            }

            /* Notify that the preview is about to be started and deliver preview size */
            onPreviewStarted(params.getPreviewSize().width, params.getPreviewSize().height);

            /* Now we can start a preview */
            mCamera.startPreview();
        }
    }
}

public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
    Log.i(TAG, "surfaceChanged");
    setupCamera(width, height);
}

public void surfaceCreated(SurfaceHolder holder) {
    Log.i(TAG, "surfaceCreated");
    (new Thread(this)).start();
}

public void surfaceDestroyed(SurfaceHolder holder) {
    Log.i(TAG, "surfaceDestroyed");
    releaseCamera();
}


/* The bitmap returned by this method shall be owned by the child and released in onPreviewStopped() */
protected abstract Bitmap processFrame(byte[] data);

/**
 * This method is called when the preview process is being started. It is called before the first frame delivered and processFrame is called
 * It is called with the width and height parameters of the preview process. It can be used to prepare the data needed during the frame processing.
 * @param previewWidth - the width of the preview frames that will be delivered via processFrame
 * @param previewHeight - the height of the preview frames that will be delivered via processFrame
 */
protected abstract void onPreviewStarted(int previewWidtd, int previewHeight);

/**
 * This method is called when preview is stopped. When this method is called the preview stopped and all the processing of frames already completed.
 * If the Bitmap object returned via processFrame is cached - it is a good time to recycle it.
 * Any other resources used during the preview can be released.
 */
protected abstract void onPreviewStopped();

public void run() {
    mThreadRun = true;
    Log.i(TAG, "Starting processing thread");
    while (mThreadRun) {
        Bitmap bmp = null;

        synchronized (this) {
            try {
                this.wait();
                bmp = processFrame(mFrame);
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
        }

        if (bmp != null) {
            Canvas canvas = mHolder.lockCanvas();
            if (canvas != null) {
                canvas.drawBitmap(bmp, (canvas.getWidth() - getFrameWidth()) / 2, (canvas.getHeight() - getFrameHeight()) / 2, null);
                mHolder.unlockCanvasAndPost(canvas);
            }
        }
    }
}

}

屏幕不显示任何内容,但将方法“setPreview()”替换为以下内容:

@TargetApi(11)
public void setPreview() throws IOException {
    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
        mSf = new SurfaceTexture(10);
        mCamera.setPreviewTexture( mSf );
    }
    else
        mCamera.setPreviewDisplay(null);

现在可以正常工作,但仅适用于 3.0 以上的 Android 版本,而我想要的是适用于任何版本。

我怎样才能解决这个问题?

4

1 回答 1

0

如果你想接收预览回调,你必须显示预览,使用setPreviewDisplay()非空参数。该 API 旨在强制您在屏幕上显示此预览。任何让您创建从不渲染的虚拟 SurfaceView 的解决方法可能无法在某些设备上或在下次升级后工作。

在 Honeycomb 之前,可以在屏幕外创建预览 SurfaceView(使其位置偏右),因此不会显示预览。这个错误后来被修复了。

幸运的是,您可以使用 3+ setPreviewTexture(),并且平台无法强迫您实际显示纹理。

于 2013-11-12T08:57:51.773 回答