简体   繁体   中英

OpenGL GLFW + GLAD Program not drawing triangle

I am trying to draw a triangle with GLFW and Glad. But the screen won't display anything (I can only clear the screen).

I am able to Open a Window, and clear the screen to a dark blue color (or any other color)

But I cannot draw a triangle on the Screen, and I do not know why.

I thought maybe my GPU didn't have a default shader.

#include <iostream>
#include <glad/glad.h>
#include <GLFW/glfw3.h>
#include <string.h>
#include <vector>

void Log(const char* str) {
    std::cout << str << std::endl;
}


void error_callback(int error, const char* description)
{
    std::cout << "Error Code ["<< stderr << "] Error: \n" << description << std::endl;
}

class Shader {

public:
    Shader(const std::string& vertexSrc, const std::string& fragSrc) {
        // Create an empty vertex shader handle
        GLuint vertexShader = glCreateShader(GL_VERTEX_SHADER);

        const GLchar* source = vertexSrc.c_str();
        glShaderSource(vertexShader, 1, &source, 0);

        glCompileShader(vertexShader);

        GLint isCompiled = 0;
        glGetShaderiv(vertexShader, GL_COMPILE_STATUS, &isCompiled);
        if (isCompiled == GL_FALSE)
        {
            GLint maxLength = 0;
            glGetShaderiv(vertexShader, GL_INFO_LOG_LENGTH, &maxLength);

            std::vector<GLchar> infoLog(maxLength);
            glGetShaderInfoLog(vertexShader, maxLength, &maxLength, &infoLog[0]);

            glDeleteShader(vertexShader);

            return;
        }

        GLuint fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);

        source = fragSrc.c_str();
        glShaderSource(fragmentShader, 1, &source, 0);

        glCompileShader(fragmentShader);

        glGetShaderiv(fragmentShader, GL_COMPILE_STATUS, &isCompiled);
        if (isCompiled == GL_FALSE)
        {
            GLint maxLength = 0;
            glGetShaderiv(fragmentShader, GL_INFO_LOG_LENGTH, &maxLength);

            std::vector<GLchar> infoLog(maxLength);
            glGetShaderInfoLog(fragmentShader, maxLength, &maxLength, &infoLog[0]);

            glDeleteShader(fragmentShader);
            glDeleteShader(vertexShader);

            return;
        }

        m_RendererID = glCreateProgram();

        glAttachShader(m_RendererID, vertexShader);
        glAttachShader(m_RendererID, fragmentShader);

        glLinkProgram(m_RendererID);

        // Note the different functions here: glGetProgram* instead of glGetShader*.
        GLint isLinked = 0;
        glGetProgramiv(m_RendererID, GL_LINK_STATUS, (int*)&isLinked);
        if (isLinked == GL_FALSE)
        {
            GLint maxLength = 0;
            glGetProgramiv(m_RendererID, GL_INFO_LOG_LENGTH, &maxLength);

            std::vector<GLchar> infoLog(maxLength);
            glGetProgramInfoLog(m_RendererID, maxLength, &maxLength, &infoLog[0]);

            glDeleteProgram(m_RendererID);
            glDeleteShader(vertexShader);
            glDeleteShader(fragmentShader);

            return;
        }

        Log("Shader compiled Successfully at:");
        std::cout << m_RendererID << std::endl;

        glDetachShader(m_RendererID, vertexShader);
        glDetachShader(m_RendererID, fragmentShader);
    }
    ~Shader() {
        glDeleteProgram(m_RendererID);
    }

    void Bind() const {
        glUseProgram(m_RendererID);
    }
    void Unbind() const {
        glUseProgram(0);
    }
private:
    uint32_t m_RendererID = 0;
};

int main() {

    Log("Initializing...");

    glfwSetErrorCallback(error_callback);

    if (!glfwInit())
    {
        // Initialization failed
        Log("Initialization Failed");
    }

    glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 2);
    glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 0);

    GLFWwindow* window = glfwCreateWindow(1080, 720, "GLFW_GLAD TEST", NULL, NULL);
    if (!window)
    {
        glfwTerminate();
        Log("Window or OpenGL Context Failed");
    }
    glfwMakeContextCurrent(window);
    gladLoadGL();
    glfwSwapInterval(1);

    int width, height;
    glfwGetFramebufferSize(window, &width, &height);
    glViewport(0, 0, width, height);

    //APP CODE
    unsigned int m_VertexArray;
    unsigned int m_VertexBuffer;
    unsigned int m_IndexBuffer;

    float vertices[9] = {
        -0.5f, -0.5f, 0.0f,
         0.5f, -0.5f, 0.0f,
         0.0f,  0.5f, 0.0f
    };

    // Vertex Array
    glGenVertexArrays(1, &m_VertexArray);
    glBindVertexArray(m_VertexArray);

    // Vertex Buffer
    glGenBuffers(1, &m_VertexBuffer);
    glBindBuffer(GL_ARRAY_BUFFER, m_VertexBuffer);
    glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);

    glDisableVertexAttribArray(0);
    glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(float), nullptr);


    // Index Buffer
    glGenBuffers(1, &m_IndexBuffer);
    glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, m_IndexBuffer);

    unsigned int indices[3] = {
        0, 1, 2,
    };
    glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(indices), indices, GL_STATIC_DRAW);

    std::string vertexSrc = R"(
        #version 330
    
        layout(location = 0) in vec3 a_Pos;
        void main()
        {
            gl_Position = vec4(a_Pos, 1);
        }
    )";
    std::string fragmentSrc = R"(
        #version 330
        
        layout(location = 0) out vec4 FragColor;
        void main()
        {
            FragColor = vec4(0.8f, 0.5f, 0.2f, 1.0f);
        }
    )";

    Shader* shader = new Shader(vertexSrc, fragmentSrc);

    shader->Bind();


    while (!glfwWindowShouldClose(window))
    {
        glViewport(0, 0, width, height);
        glClearColor(0.1f, .1f, .14f, 1);
        glClear(GL_COLOR_BUFFER_BIT);

        glBindVertexArray(m_VertexArray);
        glDrawElements(GL_TRIANGLES, 3, GL_UNSIGNED_INT, nullptr);


        glfwSwapBuffers(window);
        glfwPollEvents();
    }

    glfwDestroyWindow(window);
     glfwTerminate();
    return 0;
}

This here definitively is wrong.

 glDisableVertexAttribArray(0);

You meant glEnableVertexAttribArray here, so that the attibute pointer you are setting up is actually used . For disabled attribute arrays, all shader invocations will get a constant value.

Also your shaders use #version 330 and actually require GLSL 330 syntax, however:

 glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 2); glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 0);

you request only a GL 2.0 context and GL 2.0 only mandates support for GLSL 1.10. If this is going to work will be completely up to your driver, you might get some >= 3.3 compatibility profile, but you can't count on it.

You should create a 3.3 context explicitly, and also explicitly request a core profile.

The technical post webpages of this site follow the CC BY-SA 4.0 protocol. If you need to reprint, please indicate the site URL or the original address.Any question please contact:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM