我有完全相同的问题。也想弄清楚。我正在尝试在SurfaceView
不占用整个屏幕的情况下显示图像。我读到你不能让你的相机处理程序类和SurfaceView
不同的类链接。于是把所有的东西都砸成了一个。
所以,目前我让相机显示在 上SurfaceView
,并将帧数据复制到一个mFrame
变量中。基本上只是努力获得mFrame
处理(在多线程、Run()、函数中)并在SurfaceView
.
这是我拥有的代码,如果您认为它会有所帮助:(请原谅格式化,因为我的代码也在进行中)
package org.opencv.samples.tutorial3;
import java.io.IOException;
import java.util.List;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.ImageFormat;
import android.graphics.Paint;
import android.graphics.Rect;
import android.graphics.RectF;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.os.Bundle;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.Window;
import android.widget.TextView;
public class Sample3Native extends Activity implements SurfaceHolder.Callback,Runnable{
//Camera variables
private Camera cam;
private boolean previewing = false;
private SurfaceHolder mHolder;
private SurfaceView mViewer;
private int mFrameWidth;
private int mFrameHeight;
private byte[] mFrame;
private boolean mThreadRun;
private byte[] mBuffer;
Sample3View viewclass;
TextView text;
int value = 0;
//==========
int framecount = 0;
private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
// Load native library after(!) OpenCV initialization
System.loadLibrary("native_sample");
//constructor for viewclass that works on frames
viewclass = new Sample3View();
//setContentView(mView);
//OpenCam();
//setContentView(R.layout.main);
// Create and set View
CameraConstruct();
Camopen();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public Sample3Native()
{}
@Override
public void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(R.layout.main);
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack);
}
//Camera construction
public void CameraConstruct()
{
mViewer = (SurfaceView)findViewById(R.id.camera_view);
text = (TextView)findViewById(R.id.text);
mHolder = mViewer.getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
//calls camera screen setup when screen surface changes
public void surfaceChanged(SurfaceHolder holder, int format, int width,int height)
{
CamStartDisplay();
}
public void Camclose()
{
if(cam != null && previewing)
{
cam.setPreviewCallback(null);
cam.stopPreview();
cam.release();
cam = null;
previewing = false;
}
mThreadRun = false;
viewclass.PreviewStopped();
}
//only open camera, and get frame data
public void Camopen()
{
if(!previewing){
cam = Camera.open();
//rotate display
cam.setDisplayOrientation(90);
if (cam != null)
{
//copy viewed frame
cam.setPreviewCallbackWithBuffer(new PreviewCallback()
{
public void onPreviewFrame(byte[] data, Camera camera)
{
synchronized (this)
{
System.arraycopy(data, 0, mFrame, 0, data.length);
this.notify();
}
//text.setText(Integer.toString(value++));
camera.addCallbackBuffer(mBuffer);
}
});
}
}//if not previewing
}
//start preview
public void CamStartDisplay()
{
synchronized (this)
{
if(cam != null)
{
//stop previewing till after settings is changed
if(previewing == true)
{
cam.stopPreview();
previewing = false;
}
Camera.Parameters p = cam.getParameters();
for(Camera.Size s : p.getSupportedPreviewSizes())
{
p.setPreviewSize(s.width, s.height);
mFrameWidth = s.width;
mFrameHeight = s.height;
break;
}
p.setPreviewSize(mFrameWidth, mFrameHeight);
List<String> FocusModes = p.getSupportedFocusModes();
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
p.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
cam.setParameters(p);
//set the width and height for processing
viewclass.setFrame(mFrameWidth, mFrameHeight);
int size = mFrameWidth*mFrameHeight;
size = size * ImageFormat.getBitsPerPixel(p.getPreviewFormat()) / 8;
mBuffer = new byte[size];
mFrame = new byte [size];
cam.addCallbackBuffer(mBuffer);
viewclass.PreviewStarted(mFrameWidth, mFrameHeight);
//start display streaming
try
{
//cam.setPreviewDisplay(null);
cam.setPreviewDisplay(mHolder);
cam.startPreview();
previewing = true;
}
catch (IOException e)
{
e.printStackTrace();
}
}//end of if cam != null
}//synchronising
}
//thread gets started when the screen surface is created
public void surfaceCreated(SurfaceHolder holder) {
//Camopen();
//CamStartDisplay();
(new Thread(this)).start();
}
//called when the screen surface is stopped
public void surfaceDestroyed(SurfaceHolder holder)
{
Camclose();
}
//this is function that is run by thread
public void run()
{
mThreadRun = true;
while (mThreadRun)
{
//text.setText(Integer.toString(value++));
Bitmap bmp = null;
synchronized (this)
{
try
{
this.wait();
bmp = viewclass.processFrame(mFrame);
}
catch (InterruptedException e) {}
}
if (bmp != null)
{
Canvas canvas = mHolder.lockCanvas();
if (canvas != null)
{
canvas.drawBitmap(bmp, (canvas.getWidth() - mFrameWidth) / 2, (canvas.getHeight() - mFrameHeight) / 2, null);
mHolder.unlockCanvasAndPost(canvas);
}
}//if bmp != null
}// while thread in run
}
}//end class
Sample3View
在此类中使用的仅包含以下processFrame
功能:
package org.opencv.samples.tutorial3;
import android.content.Context;
import android.graphics.Bitmap;
import android.widget.TextView;
class Sample3View {
private int mFrameSize;
private Bitmap mBitmap;
private int[] mRGBA;
private int frameWidth;
private int frameHeight;
private int count = 0;
Sample3Native samp;
//constructor
public Sample3View()
{
}
public void setFrame(int width,int height)
{
frameWidth = width;
frameHeight = height;
}
public void PreviewStarted(int previewWidtd, int previewHeight) {
mFrameSize = previewWidtd * previewHeight;
mRGBA = new int[mFrameSize];
mBitmap = Bitmap.createBitmap(previewWidtd, previewHeight, Bitmap.Config.ARGB_8888);
}
public void PreviewStopped() {
if(mBitmap != null) {
mBitmap.recycle();
mBitmap = null;
}
mRGBA = null;
}
public Bitmap processFrame(byte[] data) {
int[] rgba = mRGBA;
FindFeatures(frameWidth, frameHeight, data, rgba);
Bitmap bmp = mBitmap;
bmp.setPixels(rgba, 0, frameWidth, 0, 0, frameWidth, frameHeight);
//samp.setValue(count++);
return bmp;
}
public native void FindFeatures(int width, int height, byte yuv[], int[] rgba);
}
所以,是的,希望这会有所帮助。如果我得到完整的解决方案,我也会发布。如果你先得到解决方案,也请发布你的东西!享受