在我的应用程序中,我从相机获取数据,创建bitmap
对象,我必须使用它来显示它openGL
,因此我有Square
类和 GlRenderer
类。问题是它的工作速度非常慢,每次它显示一些图像,然后黑屏,以及另一个图像等等。下面是我的课
Square class
package com.example.videochat;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import javax.microedition.khronos.opengles.GL10;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.opengl.GLUtils;
public class Square {
private FloatBuffer vertexBuffer; // buffer holding the vertices
private float vertices[] = {
-1.0f, -1.0f, 0.0f, // V1 - bottom left
-1.0f, 1.0f, 0.0f, // V2 - top left
1.0f, -1.0f, 0.0f, // V3 - bottom right
1.0f, 1.0f, 0.0f // V4 - top right
};
private Bitmap m_bitmap;
private FloatBuffer textureBuffer; // buffer holding the texture coordinates
private float texture[] = {
// Mapping coordinates for the vertices
0.0f, 1.0f, // top left (V2)
0.0f, 0.0f, // bottom left (V1)
1.0f, 1.0f, // top right (V4)
1.0f, 0.0f // bottom right (V3)
};
/** The texture pointer */
private int[] textures = new int[1];
public Square(Bitmap bitmap) {
// a float has 4 bytes so we allocate for each coordinate 4 bytes
m_bitmap = bitmap;
ByteBuffer byteBuffer = ByteBuffer.allocateDirect(vertices.length * 4);
byteBuffer.order(ByteOrder.nativeOrder());
// allocates the memory from the byte buffer
vertexBuffer = byteBuffer.asFloatBuffer();
// fill the vertexBuffer with the vertices
vertexBuffer.put(vertices);
// set the cursor position to the beginning of the buffer
vertexBuffer.position(0);
byteBuffer = ByteBuffer.allocateDirect(texture.length * 4);
byteBuffer.order(ByteOrder.nativeOrder());
textureBuffer = byteBuffer.asFloatBuffer();
textureBuffer.put(texture);
textureBuffer.position(0);
}
public void loadGLTexture(GL10 gl, Context context) {
// generate one texture pointer
gl.glGenTextures(1, textures, 0);
// ...and bind it to our array
gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]);
// create nearest filtered texture
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_NEAREST);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
//Different possible texture parameters, e.g. GL10.GL_CLAMP_TO_EDGE
// gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_S, GL10.GL_REPEAT);
// gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_T, GL10.GL_REPEAT);
// Use Android GLUtils to specify a two-dimensional texture image from our bitmap
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, m_bitmap, 0);
// Clean up
m_bitmap.recycle();
}
/** The draw method for the square with the GL context */
public void draw(GL10 gl) {
// bind the previously generated texture
gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]);
// Point to our buffers
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
// Set the face rotation
gl.glFrontFace(GL10.GL_CW);
// Point to our vertex buffer
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vertexBuffer);
gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, textureBuffer);
// Draw the vertices as triangle strip
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, vertices.length / 3);
//Disable the client state before leaving
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
}
}
GlRenderer 类
package com.example.videochat;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import android.content.Context;
import android.graphics.Bitmap;
import android.opengl.GLU;
import android.opengl.GLSurfaceView.Renderer;
public class GlRenderer implements Renderer {
private Square square; // the square
private Context context;
/** Constructor to set the handed over context */
public GlRenderer(Context context, Bitmap bitmap) {
this.context = context;
// initialise the square
this.square = new Square(bitmap);
}
@Override
public void onDrawFrame(GL10 gl) {
// clear Screen and Depth Buffer
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
// Reset the Modelview Matrix
gl.glLoadIdentity();
// Drawing
gl.glTranslatef(0.0f, 0.0f, -5.0f); // move 5 units INTO the screen
// is the same as moving the camera 5 units away
// gl.glScalef(0.5f, 0.5f, 0.5f); // scale the square to 50%
// otherwise it will be too large
square.draw(gl); // Draw the triangle
}
@Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
if(height == 0) { //Prevent A Divide By Zero By
height = 1; //Making Height Equal One
}
gl.glViewport(0, 0, width, height); //Reset The Current Viewport
gl.glMatrixMode(GL10.GL_PROJECTION); //Select The Projection Matrix
gl.glLoadIdentity(); //Reset The Projection Matrix
//Calculate The Aspect Ratio Of The Window
GLU.gluPerspective(gl, 45.0f, (float)width / (float)height, 0.1f, 100.0f);
gl.glMatrixMode(GL10.GL_MODELVIEW); //Select The Modelview Matrix
gl.glLoadIdentity(); //Reset The Modelview Matrix
}
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
// Load the texture for the square
square.loadGLTexture(gl, this.context);
gl.glEnable(GL10.GL_TEXTURE_2D); //Enable Texture Mapping ( NEW )
gl.glShadeModel(GL10.GL_SMOOTH); //Enable Smooth Shading
gl.glClearColor(0.0f, 0.0f, 0.0f, 0.5f); //Black Background
gl.glClearDepthf(1.0f); //Depth Buffer Setup
gl.glEnable(GL10.GL_DEPTH_TEST); //Enables Depth Testing
gl.glDepthFunc(GL10.GL_LEQUAL); //The Type Of Depth Testing To Do
//Really Nice Perspective Calculations
gl.glHint(GL10.GL_PERSPECTIVE_CORRECTION_HINT, GL10.GL_NICEST);
}
}
main
和那类的一部分
public void initGlSurfaceView (Bitmap bmp, SurfaceView VideoCaptureBitmap) {
glSurfaceView = new GLSurfaceView(this);
glSurfaceView.setRenderer(new GlRenderer(this,bmp));
setContentView(glSurfaceView);
}
private void startVideo() {
if(m_Camera != null) return;
SurfaceHolder videoCaptureViewHolder = null;
try {
m_Camera = Camera.open();
} catch (RuntimeException e) {
Log.e("CameraTest", "Camera Open filed");
return;
}
m_Camera.setErrorCallback(new ErrorCallback() {
public void onError(int error, Camera camera) {
}
});
Camera.Parameters parameters = m_Camera.getParameters();
//parameters.setPreviewFrameRate(30);
List<Size> supportedPreviewSizes=parameters.getSupportedPreviewSizes();
Iterator<Size> supportedPreviewSizesIterator=supportedPreviewSizes.iterator();
while(supportedPreviewSizesIterator.hasNext()){
Size tmpSize=supportedPreviewSizesIterator.next();
Log.v("CameraTest","supportedPreviewSize.width = "+tmpSize.width+"supportedPreviewSize.height = "+tmpSize.height);
}
m_Camera.setParameters(parameters);
if (null != m_VideoCaptureView)
videoCaptureViewHolder = m_VideoCaptureView.getHolder();
try {
m_Camera.setPreviewDisplay(videoCaptureViewHolder);
} catch (Throwable t) {
}
Log.v("CameraTest","Camera PreviewFrameRate = "+m_Camera.getParameters().getPreviewFrameRate());
Size previewSize=m_Camera.getParameters().getPreviewSize();
int dataBufferSize=(int)(previewSize.height*previewSize.width*
(ImageFormat.getBitsPerPixel(m_Camera.getParameters().getPreviewFormat())/8.0));
m_Camera.addCallbackBuffer(new byte[dataBufferSize]);
m_Camera.addCallbackBuffer(new byte[dataBufferSize]);
m_Camera.addCallbackBuffer(new byte[dataBufferSize]);
m_Camera.setPreviewCallbackWithBuffer(new Camera.PreviewCallback() {
private long timestamp=0;
public synchronized void onPreviewFrame(byte[] data, Camera camera) {
//Log.v("CameraTest","Time Gap = "+(System.currentTimeMillis()-timestamp));
//int size = data.length;
//Bitmap btmp = BitmapFactory.decodeByteArray(data, 0, size);
//m_VideCaptureBitmap.setImageBitmap(btmp);
Size previewSize = camera.getParameters().getPreviewSize();
YuvImage yuvimage=new YuvImage(data, ImageFormat.NV21, previewSize.width, previewSize.height, null);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
yuvimage.compressToJpeg(new Rect(0, 0, previewSize.width, previewSize.height), 80, baos);
byte[] jdata = baos.toByteArray();
// Convert to Bitmap
Bitmap bmp = BitmapFactory.decodeByteArray(jdata, 0, jdata.length);
//m_VideCaptureBitmap.setImageBitmap(bmp);
initGlSurfaceView(bmp, m_VideCaptureBitmap);
// set our renderer to be the main renderer with
// the current activity context
//setContentView(glSurfaceView);
Log.v("CameraTest","Frame size = "+ data.length);
timestamp=System.currentTimeMillis();
try{
camera.addCallbackBuffer(data);
}catch (Exception e) {
Log.e("CameraTest", "addCallbackBuffer error");
return;
}
return;
}
});
try {
m_Camera.startPreview();
} catch (Throwable e) {
m_Camera.release();
m_Camera = null;
return;
}
}
有人可以帮助我吗?感谢和问候