我的渲染方法目前看起来像这样:
void Renderer::render() {
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
checkGlError("glClear");
EntityCamera* camera = (EntityCamera*) resourceManager_->getResource(GHOST_CAMERA);
mat4 proj;
Matrix::projection3D(proj, 45.0f,
(float) nScreenWidth_ / nScreenHeight_, GHOST_NEAR_DISTANCE, GHOST_FAR_DISTANCE);
mat4 view;
Matrix::multiply(proj, camera_->getMatrix(), view);
camera->extractPlanes(view);
for (vector<Node*>::const_iterator it = renderArray_.begin(); it != renderArray_.end();
it++) {
Node* node = *it;
if (!node->isRenderable()) {
continue;
}
if (node->hasBV() && node->getBV()->isInFrustum(camera, node) == BoundingVolume::OUTSIDE) {
LOGI("Node %s is outside :O", node->getName().c_str());
continue;
}
EntityModel* entity =
static_cast<EntityModel*>(resourceManager_->getResource(
(*it)->getEntity()));
if (entity == 0 || entity->getVertices() == 0 || entity->getVertices()->size() == 0) {
LOGI("Empty entity %s.", node->getName().c_str());
continue;
}
Resource* resource = resourceManager_->getResource(node->getShader());
Shader* shader = static_cast<Shader*>(resource);
Resource* resource2 = resourceManager_->getResource(entity->getTexture());
Image* image = static_cast<Image*>(resource2);
mat4 res;
Matrix::multiply(view, node->getMatrix(), res);
// Select shader program to use.
glUseProgram(shader->getId());
checkGlError("glUseProgram");
int matrix = glGetUniformLocation(shader->getId(), "uWVP");
int texture = glGetUniformLocation(shader->getId(), "texture_0");
checkGlError("glGetUniformLocation");
int textureCoords = glGetAttribLocation(shader->getId(), "attrTexCoords");
int vertices = glGetAttribLocation(shader->getId(), "attrPos");
checkGlError("glGetAttribLocation");
// Specify WVP matrix.
glUniformMatrix4fv(matrix, 1, false, res);
checkGlError("glUniformMatrix4fv");
// Load vertex positions.
if (!entity->isCompiled()) {
//LOGI("Entity %s, not compiled.", entity->getName().c_str());
continue;
}
glEnableVertexAttribArray(vertices);
checkGlError("glEnableVertexAttribArray");
//glVertexAttribPointer(vertices, 3, GL_FLOAT, GL_FALSE, 0,
// &(*entity->getVertices())[0]);
//LOGI("%s vbo id: %d", node->getName().c_str(), entity->getVBO());
glBindBuffer(GL_ARRAY_BUFFER, entity->getVBO());
checkGlError("glBindBuffer");
glVertexAttribPointer(vertices, 3, GL_FLOAT, GL_FALSE, 0, 0);
checkGlError("glVertexAttribPointer");
// Load UV coordinates.
glEnableVertexAttribArray(textureCoords);
checkGlError("glEnableVertexAttribArray");
glVertexAttribPointer(textureCoords, 2, GL_FLOAT, GL_FALSE, 0,
&(*entity->getTextureCoords())[0]);
checkGlError("glVertexAttribPointer");
// Bind the texture.
glActiveTexture(GL_TEXTURE0);
checkGlError("glActiveTexture");
glBindTexture(GL_TEXTURE_2D, image->getId());
checkGlError("glBindTexture");
glUniform1i(texture, 0);
checkGlError("glUniform1i");
if (entity->hasIndices()) {
vector<vector<GLushort>*>* indices = entity->getIndices();
for (unsigned int i = 0; i < indices->size(); i++) {
if (entity->hasBoundingVolumes()) {
BoundingVolume* volume = (*entity->getBoundingVolumes())[i];
if (volume->isInFrustum(camera, node) == BoundingVolume::OUTSIDE) {
continue;
}
}
vector<GLushort>* ind = (*indices)[i];
glDrawElements(GL_TRIANGLES, ind->size(), GL_UNSIGNED_SHORT, &(*ind)[0]);
checkGlError("glDrawElements");
}
}
else {
glDrawArrays(GL_TRIANGLES, 0, entity->getVertices()->size() / 3);
checkGlError("glDrawArrays");
}
glBindBuffer(GL_ARRAY_BUFFER, 0);
checkGlError("glBindBuffer");
}
}
我最近刚尝试使用 VBO,在我直接发送顶点数据并且一切正常之前,纹理被正确映射。现在我用 VBO 更改了顶点数组,即使它有效,也没有应用纹理,我只能看到黑色物体。
- 我的纹理可能有什么问题?
- 为什么当我更改 glVertexAttribPointer(vertices, 3, GL_FLOAT, GL_FALSE, 0, 0); glBindBuffer(GL_ARRAY_BUFFER, entity->getVBO()) 的行序;我得到毁容的物体?这是我使用的正确呼叫顺序吗?