简体   繁体   中英

OpenGL texture 2d array won't render with depth > 1

Below code uploads my texture memory that's described in the passed parameters. When ' vPixelData ' only holds 1 item/texture it is rendered properly, but once there's 2 or more nothing shows up.

glTexSubImage3D() is returning ' GL_INVALID_OPERATION ' when I call glGetError() after it only when vPixelData.size() is greater than 1.

/*virtual*/ uint32 HyOpenGL::AddTextureArray(uint32 uiNumColorChannels, uint32 uiWidth, uint32 uiHeight, vector<unsigned char *> &vPixelData)
{
    GLenum eInternalFormat = uiNumColorChannels == 4 ? GL_RGBA8 : (uiNumColorChannels == 3 ? GL_RGB8 : GL_R8);
    GLenum eFormat = uiNumColorChannels == 4 ? GL_RGBA : (uiNumColorChannels == 3 ? GL_RGB : GL_RED);

    glTexImage3D(GL_TEXTURE_2D_ARRAY, 0, eInternalFormat, uiWidth, uiHeight, static_cast<uint32>(vPixelData.size()), 0, eFormat, GL_UNSIGNED_BYTE, NULL);

    GLuint hGLTextureArray;
    glGenTextures(1, &hGLTextureArray);
    //glActiveTexture(GL_TEXTURE0);
    glBindTexture(GL_TEXTURE_2D_ARRAY, hGLTextureArray);

    // Create storage for the texture
    glTexStorage3D(GL_TEXTURE_2D_ARRAY,
                    1,                      // Number of mipmaps
                    eInternalFormat,        // Internal format
                    uiWidth, uiHeight,      // width, height
                    static_cast<uint32>(vPixelData.size()));

    for(unsigned int i = 0; i != vPixelData.size(); ++i)
    {
        // Write each texture into storage
        glTexSubImage3D(GL_TEXTURE_2D_ARRAY,
                        0,                                      // Mipmap number
                        0, 0, i,                                // xoffset, yoffset, zoffset
                        uiWidth, uiHeight, 1,                   // width, height, depth (of texture you're copying in)
                        eFormat,                                // format
                        GL_UNSIGNED_BYTE,                       // type
                        vPixelData[i]);                         // pointer to pixel data

        GLenum eError = glGetError(); // Getting 'GL_INVALID_OPERATION' when > 1 texture depth. It's 'GL_NO_ERROR' otherwise
    }

    glTexParameteri(GL_TEXTURE_2D_ARRAY, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D_ARRAY, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D_ARRAY, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D_ARRAY, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

    return hGLTextureArray;
}

(My current passed parameters are uiNumColorChannels == 4)

(uiWidth and uiHeight are both 512)

Apparently everything works if I use:

glTexImage3D(GL_TEXTURE_2D_ARRAY,
             0,
             eFormat,
             uiWidth, uiHeight,
             uiNumTextures,
             0,
             eFormat,
             GL_UNSIGNED_BYTE,
             NULL);

instead of:

glTexStorage3D(GL_TEXTURE_2D_ARRAY,
                    1,                      // Number of mipmaps
                    eInternalFormat,        // Internal format
                    uiWidth, uiHeight,      // width, height
                    static_cast<uint32>(vPixelData.size()));

The technical post webpages of this site follow the CC BY-SA 4.0 protocol. If you need to reprint, please indicate the site URL or the original address.Any question please contact:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM