Hello I drawn an opengl triangle that worked fine with 3 floats color vertex attributes. The alpha one was in the shader.
Now in this version I'm trying to send color attributes with 4 floats. But the colors are wierd and the third vertex is always black. The programming language is Nim.
type
OGLfloat = float32
OGLuint = uint32
OGLint = int32
# Glfw3 and Opengl constants
const
GLFW_CONTEXT_VERSION_MAJOR = 0x00022002
GLFW_CONTEXT_VERSION_MINOR = 0x00022003
GLFW_OPENGL_PROFILE = 0x00022008
GLFW_OPENGL_CORE_PROFILE = 0x00032001
const
GL_COLOR_BUFFER_BIT = 0x00004000
GL_DEPTH_BUFFER_BIT = 0x00000100
GL_ACCUM_BUFFER_BIT = 0x00000200
GL_STENCIL_BUFFER_BIT = 0x00000400
GL_ARRAY_BUFFER = 0x8892
GL_ELEMENT_ARRAY_BUFFER = 0x8893
GL_FALSE = 0.char
GL_STATIC_DRAW = 0x88E4
GL_FLOAT = 0x1406
GL_VERTEX_SHADER = 0x8B31
GL_COMPILE_STATUS = 0x8B81
GL_INFO_LOG_LENGTH = 0x8B84
GL_FRAGMENT_SHADER = 0x8B30
GL_LINK_STATUS = 0x8B82
GL_TRIANGLES = 0x0004
GL_UNSIGNED_INT= 0x1405
GL_VERSION = 0x1F02
# My own constants
const
POSITION_LENGTH = 3.OGLint
COLOR_LENGTH = 4.OGLint
const
WINDOW_W = 640
WINDOW_H = 480
let
colorDataOffset = COLOR_LENGTH * OGLint(sizeof(OGLfloat))
# I don't pasted the opengl imports here to save some space
# Opengl imports...
var
#I expect a black triangle but the two first vertices are blue on screen.
vertices = @[OGLfloat(0.0), 0.5, 0, 0, 0, 0, 1,
0.5, -0.5, 0, 0, 0, 0, 1,
-0.5, -0.5, 0, 0, 0, 0, 1]
indices = @[OGLuint(0), 1 , 2]
var glfwErr = glfwInit()
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3)
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3)
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE)
var winHandle = glfwCreateWindow(WINDOW_W, WINDOW_H)
glfwMakeContextCurrent(winHandle)
var glewErr = glewInit()
var
shadID:OGLuint
vertSrc:cstring = """
#version 330 core
layout (location = 0) in vec3 aPos;
layout (location = 1) in vec4 aColor;
out vec4 vColor;
void main()
{
gl_Position = vec4(aPos, 1.0f);
vColor = aColor;
}
"""
fragSrc:cstring = """
#version 330 core
out vec4 FragColor;
in vec4 vColor;
void main()
{
FragColor = vColor;
}
"""
proc send_src(vert:var cstring, frag:var cstring):OGLuint =
var success:OGLint
# vertex
var vertexShader = glCreateShader(GL_VERTEX_SHADER)
glShaderSource(vertexShader, 1, addr vert, nil)
glCompileShader(vertexShader)
# Check compilation errors.
glGetShaderiv(vertexShader, GL_COMPILE_STATUS, addr success)
if bool(success) == false:
echo(" vertex shader compilation failed (send_src)")
else:
echo("vertexShader compiled (send_src)")
# fragment
var fragmentShader = glCreateShader(GL_FRAGMENT_SHADER)
glShaderSource(fragmentShader, 1, addr frag, nil)
glCompileShader(fragmentShader)
# Check compilation errors.
glGetShaderiv(fragmentShader, GL_COMPILE_STATUS, addr success)
if bool(success) == false:
echo("fragment shader compilation failed (send_src)")
else:
echo("fragmentShader compiled (send_src)")
# Shader program
result = glCreateProgram()
glAttachShader(result, vertexShader)
glAttachShader(result, fragmentShader)
glLinkProgram(result)
# Check for linkage errors.
glGetProgramiv(result, GL_LINK_STATUS, addr success)
if success == 0:
echo("program linking failed (send_src)")
else:
echo("shader linked (send_src)")
glDeleteShader(vertexShader)
glDeleteShader(fragmentShader)
glViewport(0, 0, WINDOW_W, WINDOW_H)
shadID = send_src(vertSrc, fragSrc)
var VAO, VBO, EBO:OGLuint
glGenVertexArrays(1, addr VAO)
glGenBuffers(1, addr VBO)
glGenBuffers(1, addr EBO)
glBindVertexArray(VAO)
glBindBuffer(GL_ARRAY_BUFFER, VBO)
glBufferData(GL_ARRAY_BUFFER, vertices.len * sizeof(OGLfloat),
addr vertices[0], GL_STATIC_DRAW)
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, EBO)
glBufferData(GL_ELEMENT_ARRAY_BUFFER, indices.len * sizeof(OGLuint),
addr indices[0], GL_STATIC_DRAW)
# Position layout
glVertexAttribPointer(0, POSITION_LENGTH, GL_FLOAT, GL_FALSE, (POSITION_LENGTH + COLOR_LENGTH) * OGLint(sizeof(OGLfloat)),
nil)
glEnableVertexAttribArray(0)
# Color layout
glVertexAttribPointer(1, COLOR_LENGTH, GL_FLOAT, GL_FALSE, (POSITION_LENGTH + COLOR_LENGTH) * OGLint(sizeof(OGLfloat)),
cast[pointer](colorDataOffset))
glEnableVertexAttribArray(1)
glBindBuffer(GL_ARRAY_BUFFER, 0)
glBindVertexArray(0)
glUseProgram(shadID)
while bool(glfwWindowShouldClose(winHandle)) == false:
glClearColor(0.2, 0.3, 0.3, 1.0)
glClear(GL_COLOR_BUFFER_BIT)
glBindVertexArray(VAO)
glDrawElements(GL_TRIANGLES, OGLint(indices.len), GL_UNSIGNED_INT, nil)
glfwSwapBuffers(winHandle)
glfwPollEvents()
glDeleteVertexArrays(1, addr VAO)
glDeleteBuffers(1, addr VBO)
glDeleteBuffers(1, addr EBO)
glfwDestroyWindow(winHandle)
glfwTerminate()
I didn't find what is wrong.
To set up the pointer for tyour color attribute, you are using:
glVertexAttribPointer(1, COLOR_LENGTH, GL_FLOAT, GL_FALSE, (POSITION_LENGTH + COLOR_LENGTH) * OGLint(sizeof(OGLfloat)), cast[pointer](colorDataOffset))
So it means that at byte offset colorDataOffset
, you first color attribute begins.
Since your vertex format is (3*4 bytes position | 4*4 bytes color)
, the correct offset would be 12, so to skip the position part of the very first vertex. However, you set it to:
colorDataOffset = COLOR_LENGTH * OGLint(sizeof(OGLfloat))
which should evaluate to 16, so you actually mix it with the position data of the next vertex.
You need to use POSITION_LENGTH
here...
User contributions licensed under CC BY-SA 3.0