好的,让我先说我对 OpenGL 非常陌生,因为它与 Android 有关,虽然我已经阅读了一段时间,但我无法在编码中克服这个障碍。
目前,我正在尝试编写一个类,以将位于我的 drawables 文件夹中的 .png 文件中的纹理加载到我在 Blender 中制作的 .obj 模型上。我在 Blender 模型上进行了 UV 展开,然后使用 uv 展开作为 .png 文件的指南。
当前的问题是我能够将纹理加载到模型上,但它是一种纯色,似乎来自纹理文件。显然,我对 Blender 中的 UV 纹理了解不够,但是有这么多不同的 OpenGL 库,并且从 PC 到 Android 的变化如此之多,以至于我很难理解在哪里可以工作。
如果有人可以帮助我,我将不胜感激。这是一些相关的代码,我会根据需要发布更多:
来自 TextureLoader:
public Texture getTexture(GL10 gl, final int ref) throws IOException {
Texture tex = (Texture) table.get(ref);
if (tex != null) {
return tex;
}
Log.i("Textures:", "Loading texture: " + ref);
tex = getTexture(gl, ref,
GL10.GL_TEXTURE_2D, // target
GL10.GL_RGBA, // dst pixel format
GL10.GL_LINEAR, // min filter (unused)
GL10.GL_NEAREST);
table.put(ref,tex);
return tex;
}
public Texture getTexture(GL10 gl, final int ref,
int target,
int dstPixelFormat,
int minFilter,
int magFilter) throws IOException {
if (!sReady) {
throw new RuntimeException("Texture Loader not prepared");
}
int srcPixelFormat = 0;
// create the texture ID for this texture
int id = createID(gl);
Texture texture = new Texture(target, id);
// bind this texture
gl.glBindTexture(target, id);
Bitmap bitmap = loadImage(ref);
texture.setWidth(bitmap.getWidth());
texture.setHeight(bitmap.getHeight());
if (bitmap.hasAlpha()) {
srcPixelFormat = GL10.GL_RGBA;
} else {
srcPixelFormat = GL10.GL_RGB;
}
// convert that image into a byte buffer of texture data
ByteBuffer textureBuffer = convertImageData(bitmap);
if (target == GL10.GL_TEXTURE_2D) {
gl.glTexParameterf(target, GL10.GL_TEXTURE_MIN_FILTER, minFilter);
gl.glTexParameterf(target, GL10.GL_TEXTURE_MAG_FILTER, magFilter);
}
GLUtils.texImage2D(target, 0, bitmap, 0);
/*gl.glTexImage2D(target,
0,
dstPixelFormat,
get2Fold(bitmap.getWidth()),
get2Fold(bitmap.getHeight()),
0,
srcPixelFormat,
GL10.GL_UNSIGNED_BYTE,
textureBuffer);*/
bitmap.recycle();
return texture;
}
/**
* Get the closest greater power of 2 to the fold number
*
* @param fold The target number
* @return The power of 2
*/
private int get2Fold(int fold) {
int ret = 2;
while (ret < fold) {
ret *= 2;
}
return ret;
}
/**
* Convert the buffered image to a texture
*
* @param bufferedImage The image to convert to a texture
* @param texture The texture to store the data into
* @return A buffer containing the data
*/
private ByteBuffer convertImageData(Bitmap bitmap) {
ByteBuffer imageBuffer = null;
ByteArrayOutputStream stream = new ByteArrayOutputStream();
bitmap.compress(Bitmap.CompressFormat.PNG, 100, stream);
byte[] data = stream.toByteArray();
imageBuffer = ByteBuffer.allocateDirect(data.length);
imageBuffer.order(ByteOrder.nativeOrder());
imageBuffer.put(data, 0, data.length);
imageBuffer.flip();
return imageBuffer;
}
/**
* Creates an integer buffer to hold specified ints
* - strictly a utility method
*
* @param size how many int to contain
* @return created IntBuffer
*/
protected IntBuffer createIntBuffer(int size) {
ByteBuffer temp = ByteBuffer.allocateDirect(4 * size);
temp.order(ByteOrder.nativeOrder());
return temp.asIntBuffer();
}
private Bitmap loadImage(int ref) {
Bitmap bitmap = null;
Matrix flip = new Matrix();
flip.postScale(1f, -1f);
// This will tell the BitmapFactory to not scale based on the device's pixel density:
BitmapFactory.Options opts = new BitmapFactory.Options();
opts.inScaled = false;
Bitmap temp = BitmapFactory.decodeResource(sContext.getResources(), ref, opts);
bitmap = Bitmap.createBitmap(temp, 0, 0, temp.getWidth(), temp.getHeight(), flip, true);
temp.recycle();
return bitmap;
}
来自纹理:
public void bind(GL10 gl) {
gl.glBindTexture(target, textureID);
gl.glEnable(GL10.GL_TEXTURE_2D);
}
因为它被称为:
public void render() {
//Clear Screen And Depth Buffer
gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);
gl.glEnable(GL10.GL_LIGHTING);
gl.glPushMatrix();
gl.glTranslatef(0.0f, -1.2f, z); //Move down 1.2 Unit And Into The Screen 6.0
gl.glRotatef(xrot, 1.0f, 0.0f, 0.0f); //X
gl.glRotatef(yrot, 0.0f, 1.0f, 0.0f); //Y
texture.bind(gl);
model.draw(gl);
gl.glPopMatrix();
}