6

我正在使用 OpenGL ES 渲染一些特殊效果,我不想向用户展示,我只想将结果保存为 UIImage,有人可以帮我吗?

这是我正在使用的代码,我可以获得包含我使用的红色透明颜色的图像,但没有显示几何图形。

#import "RendererGL.h"
#import <GLKit/GLKit.h>
#import <UIKit/UIKit.h>
#import <OpenGLES/EAGL.h>
#import <OpenGLES/EAGLDrawable.h>
#import <OpenGLES/ES2/glext.h>
#import <QuartzCore/QuartzCore.h>

static NSInteger WIDTH_IN_PIXEL = 400;
static NSInteger HEIGHT_IN_PIXEL = 300;

typedef struct {
    GLKVector3 positionCoords;
}
SceneVertex;

static const SceneVertex vertices[] =
{
    {{-0.5f, -0.5f, 0.0}}, // lower left corner
    {{ 0.5f, -0.5f, 0.0}}, // lower right corner
    {{-0.5f,  0.5f, 0.0}}  // upper left corner
};

@implementation RendererGL
{
    EAGLContext* _myContext;

    GLuint _framebuffer;
    GLuint _colorRenderbuffer;
    GLuint _depthRenderbuffer;

    GLuint _vertexBufferID;

    GLKBaseEffect *_baseEffect;
}

- (id) init
{
    self = [super init];
    if (self)
    {
        _myContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
        [EAGLContext setCurrentContext:_myContext];
        [self setupOffscreenBuffer];
        [self setUpEffect];
        [self renderImage];
        [self saveImage];  //this do works, since I get an image, but the image only contains the red color I used to clear
    }

    return self;
}

-(void)setUpEffect
{
    _baseEffect = [[GLKBaseEffect alloc] init];
    _baseEffect.useConstantColor = GL_TRUE;
    _baseEffect.constantColor = GLKVector4Make(0.0f, 0.0f, 1.0f,  1.0f);
}

//this code is from apples document
-(void)setupOffscreenBuffer
{
    glGenFramebuffers(1, &_framebuffer);
    glBindFramebuffer(GL_FRAMEBUFFER, _framebuffer);

    glGenRenderbuffers(1, &_colorRenderbuffer);
    glBindRenderbuffer(GL_RENDERBUFFER, _colorRenderbuffer);
    glRenderbufferStorage(GL_RENDERBUFFER, GL_RGBA4, WIDTH_IN_PIXEL, HEIGHT_IN_PIXEL);
    glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, _colorRenderbuffer);

    glGenRenderbuffers(1, &_depthRenderbuffer);
    glBindRenderbuffer(GL_RENDERBUFFER, _depthRenderbuffer);
    glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT16, WIDTH_IN_PIXEL, HEIGHT_IN_PIXEL);
    glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, _depthRenderbuffer);

    GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER) ;
    if(status != GL_FRAMEBUFFER_COMPLETE) {
        NSLog(@"failed to make complete framebuffer object %x", status);
    }
}

- (void) renderImage
{
    GLenum error = GL_NO_ERROR;

    glClearColor(1, 0, 0, 1);   //red clear color, this can be seen
    glClear(GL_COLOR_BUFFER_BIT);

    glEnable(GL_DEPTH_TEST);

    [_baseEffect prepareToDraw];

    glGenBuffers(1, &_vertexBufferID);
    glBindBuffer(GL_ARRAY_BUFFER, _vertexBufferID);
    glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
    error = glGetError();
    if (error != GL_NO_ERROR) {
        NSLog(@"error happend, error is %d, line %d",error,__LINE__);
    }

    glEnableVertexAttribArray(GLKVertexAttribPosition);
    glVertexAttribPointer(GLKVertexAttribPosition,3,GL_FLOAT, GL_FALSE, sizeof(SceneVertex),  NULL);                   
    glDrawArrays(GL_TRIANGLES,0,3);
    error = glGetError();
    if (error != GL_NO_ERROR) {
        NSLog(@"error happend, error is %d, line %d",error,__LINE__);
    }

    glFinish();
    error = glGetError();
    if (error != GL_NO_ERROR) {
        NSLog(@"error happend, error is %d, line %d",error,__LINE__);
    }
}

-(void)saveImage
{
    GLenum error = GL_NO_ERROR;

    NSInteger x = 0, y = 0;
    NSInteger dataLength = WIDTH_IN_PIXEL * HEIGHT_IN_PIXEL * 4;
    GLubyte *data = (GLubyte*)malloc(dataLength * sizeof(GLubyte));

    glPixelStorei(GL_PACK_ALIGNMENT, 4);
    glReadPixels(x, y, WIDTH_IN_PIXEL, HEIGHT_IN_PIXEL, GL_RGBA, GL_UNSIGNED_BYTE, data);
    NSData *pixelsRead = [NSData dataWithBytes:data length:dataLength];

    error = glGetError();
    if (error != GL_NO_ERROR) {
        NSLog(@"error happend, error is %d, line %d",error,__LINE__);
    }

    CGDataProviderRef ref = CGDataProviderCreateWithData(NULL, data, dataLength, NULL);
    CGColorSpaceRef colorspace = CGColorSpaceCreateDeviceRGB();
    CGImageRef iref = CGImageCreate(WIDTH_IN_PIXEL, HEIGHT_IN_PIXEL, 8, 32, WIDTH_IN_PIXEL * 4, colorspace, kCGBitmapByteOrder32Big | kCGImageAlphaPremultipliedLast,
                                    ref, NULL, true, kCGRenderingIntentDefault);


    UIGraphicsBeginImageContext(CGSizeMake(WIDTH_IN_PIXEL, HEIGHT_IN_PIXEL));
    CGContextRef cgcontext = UIGraphicsGetCurrentContext();
    CGContextSetBlendMode(cgcontext, kCGBlendModeCopy);
    CGContextDrawImage(cgcontext, CGRectMake(0.0, 0.0, WIDTH_IN_PIXEL, HEIGHT_IN_PIXEL), iref);
    UIImage *image = UIGraphicsGetImageFromCurrentImageContext();

    NSData *d = UIImageJPEGRepresentation(image, 1);
    NSString *documentDirPath = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES)[0];
    static NSInteger imageNO = 1;
    imageNO++;
    NSString *savingPath = [documentDirPath stringByAppendingPathComponent:[NSString stringWithFormat:@"%d.jpg",imageNO]];
    BOOL succ = [d writeToFile:savingPath atomically:NO];   //is succeeded

    UIGraphicsEndImageContext();

    free(data);
    CFRelease(ref);
    CFRelease(colorspace);
    CGImageRelease(iref);
}

@end
4

2 回答 2

6

我有一个非常相似的问题 - 渲染一些线条并获取 UIImage。我使用了 OpenGL ES 1.1 和多重采样。我删除了一些不涉及渲染和一些 OpenGL 错误检查的附加代码。你可以在这里找到完整的代码:OSPRendererGL。另外,很抱歉我的一劳永逸方法。

@interface OSPRendererGL
{
    EAGLContext* myContext;
    GLuint framebuffer;
    GLuint colorRenderbuffer;
    GLuint depthRenderbuffer;
    GLuint _vertexArray;
    GLuint _vertexBuffer;
    GLuint resolveFramebuffer;
    GLuint msaaFramebuffer, msaaRenderbuffer, msaaDepthbuffer;

    int width;
    int height;
}

@implementation OSPRendererGL

- (id) init
{
    self = [super init];
    if (self)
    {
        myContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES1];
        [EAGLContext setCurrentContext:myContext];
        [self setupOpenGL];
        [EAGLContext setCurrentContext:nil];
        width = 256;
        height = 256;
    }
    return self;
}

-(void) setupOpenGL
{
    glGenFramebuffersOES(1, &framebuffer);
    glBindFramebufferOES(GL_FRAMEBUFFER_OES, framebuffer);

    glGenRenderbuffersOES(1, &colorRenderbuffer);
    glBindRenderbufferOES(GL_RENDERBUFFER_OES, colorRenderbuffer);
    glRenderbufferStorageOES(GL_RENDERBUFFER_OES, GL_RGBA8_OES, width, height);
    glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_COLOR_ATTACHMENT0_OES, GL_RENDERBUFFER_OES, colorRenderbuffer);

    glGenRenderbuffersOES(1, &depthRenderbuffer);
    glBindRenderbufferOES(GL_RENDERBUFFER_OES, depthRenderbuffer);
    glRenderbufferStorageOES(GL_RENDERBUFFER_OES, GL_DEPTH_COMPONENT16_OES, width, height);
    glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_DEPTH_ATTACHMENT_OES, GL_RENDERBUFFER_OES, depthRenderbuffer);

    glGenFramebuffersOES(1, &msaaFramebuffer); 
    glGenRenderbuffersOES(1, &msaaRenderbuffer);

    glBindFramebufferOES(GL_FRAMEBUFFER_OES, msaaFramebuffer); 
    glBindRenderbufferOES(GL_RENDERBUFFER_OES, msaaRenderbuffer);   

    glRenderbufferStorageMultisampleAPPLE(GL_RENDERBUFFER_OES, 4, GL_RGBA8_OES, width, height); 
    glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_COLOR_ATTACHMENT0_OES, GL_RENDERBUFFER_OES, msaaRenderbuffer); 

    glGenRenderbuffersOES(1, &msaaDepthbuffer);   
    glBindRenderbufferOES(GL_RENDERBUFFER_OES, msaaDepthbuffer); 
    glRenderbufferStorageMultisampleAPPLE(GL_RENDERBUFFER_OES, 4, GL_DEPTH_COMPONENT16_OES, width, height); 
    glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_DEPTH_ATTACHMENT_OES, GL_RENDERBUFFER_OES, msaaDepthbuffer);

}

-(UIImage *) renderImageAtZoom:(int)zoom
{
    CGRect b = CGRectMake(0, 0, width, height);
    OSPCoordinateRect r = OSPRectForMapAreaInRect([self mapArea], b);

    double_scale = b.size.width / r.size.x;
    double scale = 1.0/_scale;

    [EAGLContext setCurrentContext:myContext];

    glBindFramebuffer(GL_FRAMEBUFFER_OES, msaaFramebuffer);
    glBindRenderbufferOES(GL_RENDERBUFFER_OES, msaaRenderbuffer);

    glViewport(0, 0, width, height);
    glMatrixMode(GL_PROJECTION);
    glLoadIdentity();
    glOrthof(0.0f, 256.0f, 256.0f, 0.0f, 1.0f, -1.0f);
    glMatrixMode(GL_MODELVIEW);

    glPushMatrix();

    glScalef(_scale, _scale, 1);
    glTranslatef(-r.origin.x, -r.origin.y, 0);

    glEnableClientState(GL_VERTEX_ARRAY);
    glEnableClientState(GL_TEXTURE_COORD_ARRAY);
    glEnable(GL_LINE_SMOOTH);
    glEnable(GL_POINT_SMOOTH);
    glEnable(GL_BLEND);

    glClearColor(1, 1, 1, 1);
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

    // rendering here

    glPopMatrix();

    // msaa

    glBindFramebufferOES(GL_READ_FRAMEBUFFER_APPLE, msaaFramebuffer); 
    glBindFramebufferOES(GL_DRAW_FRAMEBUFFER_APPLE, framebuffer);

    glResolveMultisampleFramebufferAPPLE();

    glBindFramebuffer(GL_FRAMEBUFFER_OES, framebuffer);
    glBindRenderbufferOES(GL_RENDERBUFFER, colorRenderbuffer);

    // grabbing image from FBO

    GLint backingWidth, backingHeight;

    glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_WIDTH_OES, &backingWidth);
    glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_HEIGHT_OES, &backingHeight);

    NSInteger x = 0, y = 0;
    NSInteger dataLength = width * height * 4;
    GLubyte *data = (GLubyte*)malloc(dataLength * sizeof(GLubyte));

    glPixelStorei(GL_PACK_ALIGNMENT, 4);
    glReadPixels(x, y, width, height, GL_RGBA, GL_UNSIGNED_BYTE, data);

    CGDataProviderRef ref = CGDataProviderCreateWithData(NULL, data, dataLength, NULL);
    CGColorSpaceRef colorspace = CGColorSpaceCreateDeviceRGB();
    CGImageRef iref = CGImageCreate(width, height, 8, 32, width * 4, colorspace, kCGBitmapByteOrder32Big | kCGImageAlphaPremultipliedLast,
                                    ref, NULL, true, kCGRenderingIntentDefault);


    UIGraphicsBeginImageContext(CGSizeMake(width, height));
    CGContextRef cgcontext = UIGraphicsGetCurrentContext();
    CGContextSetBlendMode(cgcontext, kCGBlendModeCopy);
    CGContextDrawImage(cgcontext, CGRectMake(0.0, 0.0, width, height), iref);
    UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
    UIGraphicsEndImageContext();

    free(data);
    CFRelease(ref);
    CFRelease(colorspace);
    CGImageRelease(iref);

    [EAGLContext setCurrentContext:nil];

    return image;
}
于 2013-08-19T08:42:34.660 回答
1

我没有看到对 eglSwapBuffers() 的调用。这是在 PowerVR 上开始渲染帧所必需的,即使在渲染到 Renderbuffer 时也是如此。请参阅以下示例代码:

http://processors.wiki.ti.com/index.php/Render_to_Texture_with_OpenGL_ES

于 2013-08-16T19:59:36.160 回答