Rendering with RBG32UI internalFormat and reading with glReadPixels() not giving proper pixels

0

for fun I was testing format RGB32UI for rendering. I managed to solve problem in some fancy way, I'm curious if it's possible to do it "normally".

Here is my code to draw quad to render buffer:

virtual long Run()
{
    long result = NO_ERROR;

    glBindTexture(GL_TEXTURE_2D, m_texture);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

    m_program = CreateProgram(VertexShader(), "", "", "", FragmentShader());
    glLinkProgram(m_program);

    const float vertex_data[] = { -1, -1, 0, 1, 1, -1, 1, 1, 1, 1, 1, 0, -1, 1, 0, 0 };
    glBindBuffer(GL_ARRAY_BUFFER, m_vbo);
    glBufferData(GL_ARRAY_BUFFER, sizeof(vertex_data), vertex_data, GL_STATIC_DRAW);

    glBindVertexArray(m_vao);
    glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(GLfloat), 0);
    glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(GLfloat), (void*)(2 * sizeof(GLfloat)));
    glEnableVertexAttribArray(0);
    glEnableVertexAttribArray(1);

    glUseProgram(m_program);

    glUniform1f(glGetUniformLocation(m_program, "normalizer"), 127.0);
    if (Execute<GLbyte>(GL_RGBA8I, GL_BGRA_INTEGER, GL_BYTE, 0x00, 101, 127, 85) == ERROR)
    {
        Output(2, "Fail: Internal format GL_RGBA8I\t\tType GL_BYTE.\n");
        result |= ERROR;
    }
    glUniform1f(glGetUniformLocation(m_program, "normalizer"), 32767.0);
    if (Execute<GLshort>(GL_RGBA16I, GL_BGRA_INTEGER, GL_SHORT, 0x0000, 26214, 32767, 21845) == ERROR)
    {
        Output(2, "Fail: Internal format GL_RGBA16I\t\tType GL_SHORT.\n");
        result |= ERROR;
    }
    glUniform1f(glGetUniformLocation(m_program, "normalizer"), 2147483647.0);
    if (Execute<GLint>(GL_RGBA32I, GL_BGRA_INTEGER, GL_INT, 0x00000000, 1717987071, 2147483647, 1431655807) == ERROR)
    {
        Output(2, "Fail: Internal format GL_RGBA32I\t\tType GL_INT.\n");
        result |= ERROR;
    }

    return result;
}

template<typename T>
long Execute(const long& internalFormat, const long& format, const long& type, const long& B, const long& G, const long& R, const long& A)
{
    long result = NO_ERROR;

    std::vector<T> dataIN(8192 * 4);
    std::vector<T> dataOUT(8192 * 4);

    for (int i = 0; i < 8192; i++)
    {
        dataIN[4 * i] = B;
        dataIN[4 * i + 1] = G;
        dataIN[4 * i + 2] = R;
        dataIN[4 * i + 3] = A;
    }

    glTexImage2D(GL_TEXTURE_2D, 0, internalFormat, 8192, 1, 0, format, type, &dataIN[0]);
    glGetTexImage(GL_TEXTURE_2D, 0, format, type, &dataOUT[0]);

    if (memcmp(&dataIN[0], &dataOUT[0], sizeof(T) * 8192 * 4) != 0)
    {
        Output(3, "Texel read from bound texture (%d, %d, %d, %d) is not equal to texel wrote to bound texture (%d, %d, %d, %d).\n",
            dataOUT[0], dataOUT[1], dataOUT[2], dataOUT[3], dataIN[0], dataIN[1], dataIN[2], dataIN[3]);
        result = ERROR;
    }

    glClear(GL_COLOR_BUFFER_BIT);
    glDrawArrays(GL_QUADS, 0, 4);

    std::vector<vec4> pix(100 * 100);
    glReadPixels(0, 0, 100, 100, GL_RGBA, GL_FLOAT, &pix[0]);
    DisplayData(100, 100, pix);

    result |= CheckRectColor(pix, 100, 0, 0, 100, 100, vec4(1.0f, 0.8f, 0.0f, 0.666666f)) ? NO_ERROR : ERROR;

    return result;
}

My fancy VertexShader:

std::string FragmentShader()
{
    return
        CONF_SHADER_BEGIN(430)
        NL "in vec2 texCoord;"
        NL "out vec4 out_color;"
        NL "uniform isampler2D tex;"
        NL "uniform float normalizer;"
        NL "void main() {"
        NL "    ivec4 color = texture(tex, texCoord);"
        NL "    out_color = vec4(color.rgb / normalizer, color.a / normalizer);"
        NL "}";
}

As you can see I'd like to render texture with GL_BGRA_INTEGER type (formats: GL_RGBA8I, GL_RGBA16I, GL_RGBA32I) to buffer and then read pixels from it with glReadPixels(). I tried to do it with GL_FLOAT and GL_INT formats but they give me nothing... Only way I managed to do it is by normalizing values inside shader. The way I'd like to do it is with FragmentShader below:

virtual std::string FragmentShader()
{
    return
        CONF_SHADER_BEGIN(430)
        NL "in vec2 texCoord;"
        NL "out ivec4 out_color;"
        NL "uniform isampler2D tex;"
        NL "void main() {"
        NL "    out_color = texture(tex, texCoord);"
        NL "}";
}

And read pixels with:

std::vector<ivec4> pix(100 * 100);
glReadPixels(0, 0, 100, 100, GL_RGBA, GL_INT, &pix[0]);
c++
opengl
asked on Stack Overflow Dec 19, 2019 by Takeren • edited Dec 19, 2019 by genpfault

0 Answers

Nobody has answered this question yet.


User contributions licensed under CC BY-SA 3.0