This question is not a duplicate as in the other question the user had no issues with the default shader. But I am having an issue with both the custom shader and the default shader so I believe the error in my code is much more ingrained in the program.
I have been following this tutorial on YouTube by The Cherno: https://www.youtube.com/watch?v=71BLZwRGUJE&list=PLlrATfBNZ98foTJPJ_Ev03o2oq3-GGOS2&index=7
This tutorial explains how to set up a shader and I have largely copied the code written in this tutorial (except unlike the tutor I used raw string literal to hold the GLSL code)
Here is my code
#include <iostream>
#include <GL/glew.h>
#include <GLFW/glfw3.h>
static unsigned int CompileShader(unsigned int type, const std::string& source)
{
unsigned int id = glCreateShader(type);
const char* src = source.c_str();
glShaderSource(id, 1, &src, nullptr);
glCompileShader(id);
int result;
glGetShaderiv(id, GL_COMPILE_STATUS, &result);
if (result == GL_FALSE)
{
int length;
glGetShaderiv(id, GL_INFO_LOG_LENGTH, &length);
char* message = (char*)_malloca(length * sizeof(char));
if (type==GL_FRAGMENT_SHADER)
{
std::cout << "Could not compile fragment shader\n";
}
else if (type == GL_VERTEX_SHADER)
{
std::cout << "Could not compile vertex shader\n";
}
std::cout << message << std::endl;
glDeleteShader(id);
return 0;
}
else
{
if (type == GL_FRAGMENT_SHADER)
{
std::cout << "compiled fragment shader\n";
}
else if (type == GL_VERTEX_SHADER)
{
std::cout << "compiled vertex shader\n";
}
}
return id;
}
static int CreateShader(const std::string& vertexShader, const std::string& fragmentShader)
{
unsigned int program = glCreateProgram();
unsigned int vs = CompileShader(GL_VERTEX_SHADER, vertexShader);
unsigned int fs = CompileShader(GL_FRAGMENT_SHADER, fragmentShader);
glAttachShader(program, vs);
glAttachShader(program, fs);
glLinkProgram(program);
glValidateProgram(program);
glDeleteShader(vs); //deletes the shader intermediate files
glDeleteShader(fs);
return program;
}
int main(void)
{
GLFWwindow* window;
/* Initialize the library */
if (!glfwInit())
return -1;
/* Create a windowed mode window and its OpenGL context */
window = glfwCreateWindow(640, 480, "Hello World", NULL, NULL);
if (!window)
{
glfwTerminate();
return -1;
}
/* Make the window's context current */
glfwMakeContextCurrent(window);
if (glewInit()!=GLEW_OK)
{
std::cout << "Error\n";
}
float position[] = { -0.5f,-0.5f,
0.0f,-0.5f,
0.5f,-0.5f }; //The positions of the vertices that will be put in the vertex buffer
unsigned int buffer;
glGenBuffers(1, &buffer); //Genreates the buffer
glBindBuffer(GL_ARRAY_BUFFER, buffer); //Binds the buffer which allows it to be access by the GL state machine
glBufferData(GL_ARRAY_BUFFER, 6 * sizeof(float), position, GL_STATIC_DRAW); //Initialises the Vertex Buffer
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, sizeof(float) * 2, 0);
glEnableVertexAttribArray(0);
std::string vertexShader = R"glsl(
#version 330 core
layout(location = 0)in vec4 position;
void main()
{
gl_Position=position;
}
)glsl";
std::string fragmentShader = R"glsl(
#version 330 core
layout(location = 0) out vec4 color;
void main()
{
color=vec4(1.0,0.0,0.0,1.0);
}
)glsl";
unsigned int shader = CreateShader(vertexShader, fragmentShader);
glUseProgram(shader);
/* Loop until the user closes the window */
while (!glfwWindowShouldClose(window))
{
/* Render here */
glClear(GL_COLOR_BUFFER_BIT);
glDrawArrays(GL_TRIANGLES, 0, 3); //Draws w/o index buffer
/* Swap front and back buffers */
glfwSwapBuffers(window);
/* Poll for and process events */
glfwPollEvents();
}
glfwTerminate();
return 0;
}
The tutor's code seemed to work however my code only displays a black screen. It does not work with the default shader either. In any shader it just shows a black screen.
Aucun commentaire:
Enregistrer un commentaire