我正在尝试在 iPhone 上进行一些图像处理,基于 Apple 的 GLImageProcessing 示例。
最终我想做的是将图像加载到纹理中,执行示例代码中的一个或多个操作(色调、饱和度、亮度等),然后将生成的图像读回以供以后处理/保存。在大多数情况下,这永远不需要触摸屏幕,所以我认为 FBO 可能是要走的路。
首先,我拼凑了一个小例子,它创建了一个屏幕外 FBO,绘制到它,然后将数据作为图像读回。当这在模拟器中完美运行时,我感到很兴奋,然后当我意识到我只是在实际设备上出现黑屏时感到沮丧。
免责声明:我的 OpenGL 已经足够老了,以至于我已经有相当多的学习曲线去 OpenGL ES,而且我从来都不是一个纹理向导。我确实知道该设备在帧缓冲区访问方面与模拟器具有不同的特性(设备上的强制屏幕外 FBO 和交换,模拟器上的直接访问),但我无法找到我做错了什么,即使经过相当广泛的搜索。
有什么建议么?
// set up the offscreen FBO sizes
int renderBufferWidth = 1280;
int renderBufferHeight = 720;
// now the FBO
GLuint fbo = 0;
glGenFramebuffersOES(1, &fbo);
glBindFramebufferOES(GL_FRAMEBUFFER_OES, fbo);
GLuint renderBuffer = 0;
glGenRenderbuffersOES(1, &renderBuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, renderBuffer);
glRenderbufferStorageOES(GL_RENDERBUFFER_OES,
GL_RGBA8_OES,
renderBufferWidth,
renderBufferHeight);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES,
GL_COLOR_ATTACHMENT0_OES,
GL_RENDERBUFFER_OES,
renderBuffer);
GLenum status = glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES);
if (status != GL_FRAMEBUFFER_COMPLETE_OES) {
NSLog(@"Problem with OpenGL framebuffer after specifying color render buffer: %x", status);
}
// throw in a test drawing
glClearColor(0.5f, 0.5f, 0.5f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
static const GLfloat triangleVertices[] = {
-0.5f, -0.33f,
0.5f, -0.33f,
-0.5f, 0.33f
};
static const GLfloat triangleColors[] = {
1.0, 0.0, 0.0, 0.5,
0.0, 1.0, 0.0, 0.5,
0.0, 0.0, 1.0, 0.5
};
GLint backingWidth = 320;
GLint backingHeight = 480;
NSLog(@"setting up view/model matrices");
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glVertexPointer(2, GL_FLOAT, 0, triangleVertices);
glEnableClientState(GL_VERTEX_ARRAY);
glColorPointer(4, GL_FLOAT, 0, triangleColors);
glEnableClientState(GL_COLOR_ARRAY);
// draw the triangle
glDrawArrays(GL_TRIANGLE_STRIP, 0, 3);
// Extract the resulting rendering as an image
int samplesPerPixel = 4; // R, G, B and A
int rowBytes = samplesPerPixel * renderBufferWidth;
char* bufferData = (char*)malloc(rowBytes * renderBufferHeight);
if (bufferData == NULL) {
NSLog(@"Unable to allocate buffer for image extraction.");
}
// works on simulator with GL_BGRA, but not on device
glReadPixels(0, 0, renderBufferWidth,
renderBufferHeight,
GL_BGRA,
GL_UNSIGNED_BYTE, bufferData);
NSLog(@"reading pixels from framebuffer");
// Flip it vertically - images read from OpenGL buffers are upside-down
char* flippedBuffer = (char*)malloc(rowBytes * renderBufferHeight);
if (flippedBuffer == NULL) {
NSLog(@"Unable to allocate flipped buffer for corrected image.");
}
for (int i = 0 ; i < renderBufferHeight ; i++) {
bcopy(bufferData + i * rowBytes,
flippedBuffer + (renderBufferHeight - i - 1) * rowBytes,
rowBytes);
}
// unbind my FBO
glBindFramebufferOES(GL_FRAMEBUFFER_OES, 0);
// Output the image to a file
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
int bitsPerComponent = 8;
CGBitmapInfo bitmapInfo = kCGImageAlphaNoneSkipFirst | kCGBitmapByteOrder32Host;
CGContextRef contextRef = CGBitmapContextCreate(flippedBuffer,
renderBufferWidth,
renderBufferHeight,
bitsPerComponent,
rowBytes, colorSpace, bitmapInfo);
if (contextRef == nil) {
NSLog(@"Unable to create CGContextRef.");
}
CGImageRef imageRef = CGBitmapContextCreateImage(contextRef);
if (imageRef == nil) {
NSLog(@"Unable to create CGImageRef.");
} else {
if (savedImage == NO) {
UIImage *myImage = [UIImage imageWithCGImage:imageRef];
UIImageWriteToSavedPhotosAlbum(myImage, nil, nil, nil);
savedImage = YES;
}
}
编辑:
答案当然是位图格式应该是 GL_RGBA,而不是 GL_BGRA:
// works on simulator with GL_BGRA, but not on device
glReadPixels(0, 0, renderBufferWidth,
renderBufferHeight,
**GL_RGBA**,
GL_UNSIGNED_BYTE, bufferData);