我有一个名为 MarkerCustom 的类文件。MarkerCustom 有一个构造函数,它接受三个不同类型的变量。
public MarkerCustom(int myInt, String, myString, BitmapData myBitmap) {
从我的主要主要活动中,我想加载一个 GLSurface 视图,该视图将获取我想要加载到 GLSurface 中的每个 MarkerCustom 实例的 ArrayList 以及将传递到 MarkerCustom 构造函数的每个实例的数据。
让我们调用数组列表myMarkers
;
我需要myMarkers
看起来像这样:
myMarkers[0] = [1, "hello", bitMapData1]
myMarkers[1] = [66, "johnHandy", bitmapData2]
我对 java 还很陌生,它的铸造 ect 让我有点困惑,因为我来自 AS3
编辑
因此,按照下面 AKhill 的回答,我编辑了我GLSurfaceView
以接受 ArrayList,如下所示。但是需要从构造函数中该 ArrayList 中列出的每个创建类,其MarkerCustom
方式是可以在onSurfaceCreate
onDrawFrame
GLSurfaceView
请参阅下面此类中的这些方法和 for 循环/注释:
public class GLLayer extends GLSurfaceView implements SurfaceHolder.Callback, Camera.PreviewCallback, Renderer {
private Context context;
ArrayList <MarkerCustom> locationTags;
private PhoneOrientation phoneOri;
public GLLayer(Context context, int orientation, ArrayList<MarkerCustom> custMarkers) {
super(context);
locationTags = custMarkers;
this.context = context;
phoneOri=new PhoneOrientation(context); // sensor manager and interpreter
for(int i =0; i<locationTags.size(); i++){
//Need to create a an instance of each MarkerCustom
// something like MarkerCustom locationTags[i]; = new MarkerCustom (its params);
}
this.setEGLConfigChooser(8, 8, 8, 8, 16, 0);
this.getHolder().setFormat(PixelFormat.TRANSLUCENT);
this.setRenderer(this);
phoneOri.start(context, orientation);
}
@Override
public void onDrawFrame(GL10 gl) {
gl.glEnable(GL10.GL_TEXTURE_2D);
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
gl.glLoadIdentity();
GLU.gluLookAt(gl,0, 0, 0, 0, 0, 0, 0, 0, 0);
float floatMat[]=phoneOri.getMatrix();
gl.glMatrixMode(GL10.GL_MODELVIEW);
gl.glLoadMatrixf(floatMat, 0);
for(int i=0;i<loacationTags.size();i++){
gl.glPushMatrix();
//locationTags[i].draw(gl);
gl.glLoadMatrixf(floatMat,0);
}
gl.glPushMatrix();
}
@Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
if(height == 0) {
height = 1;
}
float ratio = (float) width / height;
gl.glViewport(0, 0, width, height);
gl.glMatrixMode(GL10.GL_PROJECTION);
gl.glLoadIdentity();
GLU.gluPerspective(gl, 35.0f, (float)width / (float)height, 5.0f, 200.0f);
gl.glMatrixMode(GL10.GL_MODELVIEW);
gl.glLoadIdentity();
GLU.gluLookAt(gl, 0, 1.0f, 5.0f, 0, 0, 0, 0, 1.0f, 0);
}
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig arg1) {
for(int i=0;i<locationTags.size();i++){
//call each MarkerCustom's loadGLTexture Method
//locationTags[i].loadGLTexture(gl, this.context);
}
gl.glEnable(GL10.GL_TEXTURE_2D);
gl.glShadeModel(GL10.GL_SMOOTH);
gl.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
gl.glClearDepthf(1.0f);
gl.glEnable(GL10.GL_DEPTH_TEST);
gl.glDepthFunc(GL10.GL_LEQUAL);
gl.glHint(GL10.GL_PERSPECTIVE_CORRECTION_HINT, GL10.GL_SMOOTH);
}
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
}
}
仅供参考这里是我的MarkerCustom
课
public class MarkerCustom {
public float xPos;
public float yPos;
public float zPos;
public float yaw;
public float pitch;
public Bitmap tagImage;
private FloatBuffer vertexBuffer; // buffer holding the vertices
private float vertices[] = {
0.0f, -10.0f, -10.0f, // V1 - bottom left
0.0f, -10.0f, 10.0f, // V2 - top left
0.0f, 10.0f, -10.0f, // V3 - bottom right
0.0f, 10.0f, 10.0f // V4 - top right
};
private FloatBuffer textureBuffer; // buffer holding the texture coordinates
private float texture[] = {
// Mapping coordinates for the vertices
0.0f, 1.0f, // top left (V2)
0.0f, 0.0f, // bottom left (V1)
1.0f, 1.0f, // top right (V4)
1.0f, 0.0f // bottom right (V3)
};
public MarkerCustom(float x, float y, float z, float yawAngle, float pitchAngle, Bitmap bitmap) {
xPos = x;
yPos = y;
zPos = z;
yaw = yawAngle;
pitch = pitchAngle;
tagImage = bitmap;
ByteBuffer byteBuffer = ByteBuffer.allocateDirect(vertices.length * 4);
byteBuffer.order(ByteOrder.nativeOrder());
vertexBuffer = byteBuffer.asFloatBuffer();
vertexBuffer.put(vertices);
vertexBuffer.position(0);
byteBuffer = ByteBuffer.allocateDirect(texture.length * 4);
byteBuffer.order(ByteOrder.nativeOrder());
textureBuffer = byteBuffer.asFloatBuffer();
textureBuffer.put(texture);
textureBuffer.position(0);
}
/** The texture pointer */
private int[] textures = new int[1];
public void loadGLTexture(GL10 gl, Context context) {
// loading texture
// Enable blending using premultiplied alpha.
gl.glEnable(GL10.GL_BLEND);
gl.glBlendFunc(GL10.GL_ONE, GL10.GL_ONE_MINUS_SRC_ALPHA);
// generate one texture pointer
gl.glGenTextures(1, textures, 0);
// ...and bind it to our array
gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]);
// create nearest filtered texture
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_NEAREST);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
// Use Android GLUtils to specify a two-dimensional texture image from our bitmap
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, tagImage, 0);
// Clean up
tagImage.recycle();
}
public void draw(GL10 gl) {
// bind the previously generated texture
gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]);
// Point to our buffers
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
// Set the face rotation
gl.glFrontFace(GL10.GL_CW);
// Point to our vertex buffer
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vertexBuffer);
gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, textureBuffer);
// Draw the vertices as triangle strip
gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, vertices.length / 3);
//Disable the client state before leaving
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
}
}