我目前正在为 iPhone 开发一个相机应用程序,在该应用程序中我获取相机输入,将其转换为 OpenGL 纹理,然后将其映射到 3D 对象(为了简单起见,目前是透视投影中的平面)。在将相机输入映射到这个 3D 平面后,我将这个 3D 场景渲染到一个纹理,然后将其用作正交空间中平面的新纹理(在我的片段着色器中应用额外的过滤器)。
只要我将所有内容都保留在正交投影中,我的渲染纹理的分辨率就相当高。但是从我把我的飞机放在透视投影的那一刻起,我的渲染纹理的分辨率就很低了。
比较:
如您所见,与其他两个相比,最后一个图像的分辨率非常低。所以我猜我做错了什么。
我目前没有在我的任何帧缓冲区上使用多重采样,我怀疑我是否需要它来解决我的问题,因为正交场景工作得很好。
我渲染的纹理是 2048x2048(最终将作为图像输出到 iPhone 相机胶卷)。
以下是我认为可能相关的源代码的一些部分:
创建输出到屏幕的帧缓冲区的代码:
// Color renderbuffer
glGenRenderbuffers(1, &colorRenderBuffer);
glBindRenderbuffer(GL_RENDERBUFFER, colorRenderBuffer);
[context renderbufferStorage:GL_RENDERBUFFER
fromDrawable:(CAEAGLLayer*)glView.layer];
// Depth renderbuffer
glGenRenderbuffers(1, &depthRenderbuffer);
glBindRenderbuffer(GL_RENDERBUFFER, depthRenderbuffer);
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT16, width, height);
// Framebuffer
glGenFramebuffers(1, &defaultFrameBuffer);
glBindFramebuffer(GL_FRAMEBUFFER, defaultFrameBuffer);
// Associate renderbuffers with framebuffer
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0,
GL_RENDERBUFFER, colorRenderBuffer);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT,
GL_RENDERBUFFER, depthRenderbuffer);
TextureRenderTarget 类:
void TextureRenderTarget::init()
{
// Color renderbuffer
glGenRenderbuffers(1, &colorRenderBuffer);
glBindRenderbuffer(GL_RENDERBUFFER, colorRenderBuffer);
glRenderbufferStorage(GL_RENDERBUFFER, GL_RGB8_OES,
width, height);
// Depth renderbuffer
glGenRenderbuffers(1, &depthRenderbuffer);
glBindRenderbuffer(GL_RENDERBUFFER, depthRenderbuffer);
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT16,
width, height);
// Framebuffer
glGenFramebuffers(1, &framebuffer);
glBindFramebuffer(GL_FRAMEBUFFER, framebuffer);
// Associate renderbuffers with framebuffer
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0,
GL_RENDERBUFFER, colorRenderBuffer);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT,
GL_RENDERBUFFER, depthRenderbuffer);
// Texture and associate with framebuffer
texture = new RenderTexture(width, height);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0,
GL_TEXTURE_2D, texture->getHandle(), 0);
// Check for errors
checkStatus();
}
void TextureRenderTarget::bind() const
{
glBindFramebuffer(GL_FRAMEBUFFER, framebuffer);
glBindRenderbuffer(GL_RENDERBUFFER, colorRenderBuffer);
}
void TextureRenderTarget::unbind() const
{
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glBindRenderbuffer(GL_RENDERBUFFER, 0);
}
最后,关于我如何创建渲染纹理并用像素填充它的片段:
void Texture::generate()
{
// Create texture to render into
glActiveTexture(unit);
glGenTextures(1, &handle);
glBindTexture(GL_TEXTURE_2D, handle);
// Configure texture
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
}
void Texture::setPixels(const GLvoid* pixels)
{
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA,
GL_UNSIGNED_BYTE, pixels);
updateMipMaps();
}
void Texture::updateMipMaps() const
{
glBindTexture(GL_TEXTURE_2D, handle);
glGenerateMipmap(GL_TEXTURE_2D);
}
void Texture::bind(GLenum unit)
{
this->unit = unit;
if(unit != -1)
{
glActiveTexture(unit);
glBindTexture(GL_TEXTURE_2D, handle);
}
else
{
cout << "Texture::bind -> Couldn't activate unit -1" << endl;
}
}
void Texture::unbind()
{
glBindTexture(GL_TEXTURE_2D, 0);
}