6

I'm rewriting a large part of my texturing code. I would like to be able to specify certain internal formats: GL_RGB8I, GL_RGB8UI, GL_RGB16I, GL_RGB16UI, GL_RGB32I, and GL_RGB32UI. These tokens do not exist in OpenGL 2.

When specifying these internal formats as arguments to glTexImage2D, the texturing fails (the texture appears as white). When checking for errors, I get [EDIT:] 1282 ("invalid operation"). I take this to mean that the OpenGL is still using OpenGL 2 for glTexImage2D, and so the call is failing. Obviously, it will need to use a newer version to succeed. Enums like GL_RGB, GL_RGBA, and (oddly) GL_RGB32F, GL_RGBA32F work as expected.

I configure to use GLEW or GLee for extensions. I can use OpenGL 4 calls with no problem elsewhere (e.g., glPatchParameteri, glBindFramebuffer, etc.), and the enums in question certainly exist. For completeness, glGetString(GL_VERSION) returns "4.2.0". My question: can I force one of these extension libraries to use the OpenGL 4.2 version? If so, how?

EDIT: The code is too complicated to post, but here is a simple, self-contained example using GLee that also demonstrates the problem:

#include <GLee5_4/GLee.h>
#include <GL/gl.h>
#include <GL/glu.h>
#include <gl/glut.h>
//For Windows
#pragma comment(lib,"GLee.lib")
#pragma comment(lib,"opengl32.lib")
#pragma comment(lib,"glu32.lib")
#pragma comment(lib,"glut32.lib")

#include <stdlib.h>
#include <stdio.h>

const int screen_size[2] = {512,512};
#define TEXTURE_SIZE 64

//Choose a selection.  If you see black, then texturing is working.  If you see red, then the quad isn't drawing.  If you see white, texturing has failed.
#define TYPE 1

void error_check(void) {
    GLenum error_code = glGetError();
    const GLubyte* error_string = gluErrorString(error_code);
    (error_string==NULL) ? printf("%d = (unrecognized error--an extension error?)\n",error_code) : printf("%d = \"%s\"\n",error_code,error_string);
}

#if   TYPE==1 //############ 8-BIT TESTS ############
    inline GLenum get_type(int which) { return (which==1)?    GL_RGB8: GL_RGB; } //works
#elif TYPE==2
    inline GLenum get_type(int which) { return (which==1)?   GL_RGBA8:GL_RGBA; } //works
#elif TYPE==3
    inline GLenum get_type(int which) { return (which==1)?  GL_RGB8UI: GL_RGB; } //doesn't work (invalid op)
#elif TYPE==4
    inline GLenum get_type(int which) { return (which==1)?   GL_RGB8I: GL_RGB; } //doesn't work (invalid op)
#elif TYPE==5
    inline GLenum get_type(int which) { return (which==1)? GL_RGBA8UI:GL_RGBA; } //doesn't work (invalid op)
#elif TYPE==6
    inline GLenum get_type(int which) { return (which==1)?  GL_RGBA8I:GL_RGBA; } //doesn't work (invalid op)
#elif TYPE==7 //############ 16-BIT TESTS ############
    inline GLenum get_type(int which) { return (which==1)?   GL_RGB16: GL_RGB; } //works
#elif TYPE==8
    inline GLenum get_type(int which) { return (which==1)?  GL_RGBA16:GL_RGBA; } //works
#elif TYPE==9
    inline GLenum get_type(int which) { return (which==1)? GL_RGB16UI: GL_RGB; } //doesn't work (invalid op)
#elif TYPE==10
    inline GLenum get_type(int which) { return (which==1)?  GL_RGB16I: GL_RGB; } //doesn't work (invalid op)
#elif TYPE==11
    inline GLenum get_type(int which) { return (which==1)?GL_RGBA16UI:GL_RGBA; } //doesn't work (invalid op)
#elif TYPE==12
    inline GLenum get_type(int which) { return (which==1)? GL_RGBA16I:GL_RGBA; } //doesn't work (invalid op)
#elif TYPE==13 //############ 32-BIT TESTS ############
    inline GLenum get_type(int which) { return (which==1)?   GL_RGB32: GL_RGB; } //token doesn't exist
#elif TYPE==14
    inline GLenum get_type(int which) { return (which==1)?  GL_RGBA32:GL_RGBA; } //token doesn't exist
#elif TYPE==15
    inline GLenum get_type(int which) { return (which==1)? GL_RGB32UI: GL_RGB; } //doesn't work (invalid op)
#elif TYPE==16
    inline GLenum get_type(int which) { return (which==1)?  GL_RGB32I: GL_RGB; } //doesn't work (invalid op)
#elif TYPE==17
    inline GLenum get_type(int which) { return (which==1)?GL_RGBA32UI:GL_RGBA; } //doesn't work (invalid op)
#elif TYPE==18
    inline GLenum get_type(int which) { return (which==1)? GL_RGBA32I:GL_RGBA; } //doesn't work (invalid op)
#elif TYPE==19 //############ 32-BIT FLOAT ############
    inline GLenum get_type(int which) { return (which==1)?  GL_RGB32F: GL_RGB; } //works
#elif TYPE==20
    inline GLenum get_type(int which) { return (which==1)? GL_RGBA32F:GL_RGBA; } //works
#endif

GLuint texture;
void create_texture(void) {
    printf("    Status before texture setup: "); error_check();

    glGenTextures(1,&texture);
    glBindTexture(GL_TEXTURE_2D,texture);

    printf("    Status after texture created: "); error_check();

    GLenum data_type = GL_UNSIGNED_BYTE;
    int data_length = TEXTURE_SIZE*TEXTURE_SIZE*4; //maximum number of channels, so it will work for everything
    unsigned char* data = new unsigned char[data_length];
    for (int i=0;i<data_length;++i) {
        data[i] = (unsigned char)(0);
    };

    glTexImage2D(GL_TEXTURE_2D,0,get_type(1), TEXTURE_SIZE,TEXTURE_SIZE, 0,get_type(2),data_type,data);

    printf("    Status after glTexImage2D: "); error_check();

    delete [] data;

    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
    glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);

    printf("    Status after texture filters defined: "); error_check();
}

void keyboard(unsigned char key, int x, int y) {
    switch (key) {
        case 27: //esc
            exit(0);
            break;
    }
}

void draw(void) {
    glClearColor(1.0,0.0,0.0,1.0); //in case the quad doesn't draw
    glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT);

    glViewport(0,0,screen_size[0],screen_size[1]);
    glMatrixMode(GL_PROJECTION);
    glLoadIdentity();
    gluOrtho2D(0,screen_size[0],0,screen_size[1]);
    glMatrixMode(GL_MODELVIEW);
    glLoadIdentity();

    glBegin(GL_QUADS);
    glTexCoord2f(0,0); glVertex2f(0,0);
    glTexCoord2f(2,0); glVertex2f(screen_size[0],0);
    glTexCoord2f(2,2); glVertex2f(screen_size[0],screen_size[1]);
    glTexCoord2f(0,2); glVertex2f(0,screen_size[1]);
    glEnd();

    glutSwapBuffers();
}

int main(int argc, char* argv[]) {
    glutInit(&argc,argv);
    glutInitWindowSize(screen_size[0],screen_size[1]);
    glutInitDisplayMode(GLUT_RGB|GLUT_DOUBLE|GLUT_DEPTH);
    glutCreateWindow("Texture Types - Ian Mallett");

    glEnable(GL_DEPTH_TEST);
    glEnable(GL_TEXTURE_2D);

    printf("Status after OpenGL setup: "); error_check();

    create_texture();

    printf("Status after texture setup: "); error_check();

    glutDisplayFunc(draw);
    glutIdleFunc(draw);
    glutKeyboardFunc(keyboard);

    glutMainLoop();

    return 0;
}
4

1 回答 1

10

检查错误时,我得到 [EDIT:] 1282(“无效操作”)。我认为这意味着 OpenGL 仍在为 glTexImage2D 使用 OpenGL 2,因此调用失败。

OpenGL 错误理解起来并不复杂。GL_INVALID_ENUM/VALUE当您传递意外、不受支持或超出范围的枚举或值时抛出。如果您将“17”作为内部格式传递给glTexImage2D,您将得到GL_INVALID_ENUM,因为 17 不是内部格式的有效枚举数。如果您将 103,422 作为宽度传递给glTexImage2D,您将得到GL_INVALID_VALUE,因为 103,422 几乎可以肯定大于GL_MAX_TEXTURE_2D' 的大小。

GL_INVALID_OPERATION总是用于出错的状态组合。之前设置的某些上下文状态与您正在调用的函数不匹配,或者两个或多个参数组合导致问题。后者就是你这里的情况。

如果您的实现根本不支持整数纹理,那么您会得到INVALID_ENUM(因为内部格式不是有效格式)。得到INVALID_OPERATION意味着其他事情是错误的。

即,这个:

glTexImage2D(GL_TEXTURE_2D,0,get_type(1), TEXTURE_SIZE,TEXTURE_SIZE, 0,get_type(2),data_type,data);

您的get_type(2)电话返回GL_RGBGL_RGBA在所有情况下。但是,当使用整数图像格式时,您必须使用末尾带有_INTEGER.

所以你get_type(2)需要是这样的:

inline GLenum get_type(int which) { return (which==1)? GL_RGB16UI: GL_RGB_INTEGER; }

对于其他完整的图像格式也是如此。

于 2012-08-02T05:41:45.233 回答