Compositor is blending OpenGl even when fragments alpha's are 1.0

2

The following code (a bare minimal example) produces a triangle on top of a plain color screen. The triangle and the screen colors have alpha values of 1.0, but still when the program is run with compositor enabled (X11/KDE system settings) the colors are merged with whatever is behind the window. The effect produced is like the window have an opacity setting (except for the titlebar, so it is not the window opacity value).

I tried my best to avoid this behavior, changing X Visuals, changing Egl configurations, Depths, disabling opengl blending, changing blending functions, etc... The only way I found to prevent it is to change the visuals to 24bits, but then It doesn't compose anymore, and I want the composition, as I want some parts of the output to be blended but not everything and without control.

Here is a minimal reproducible example:

//
//compilation example:   g++ test.cpp -o test -lEGL -lX11 -lGLX -lOpenGL
//
#include  <iostream>
#include  <cstdlib>
#include  <cstring>

#include  <cmath>
#include  <sys/time.h>

#include  <X11/Xlib.h>
#include  <X11/Xatom.h>
#include  <X11/Xutil.h>

#define GL_GLEXT_PROTOTYPES
#include <GL/gl.h>
#include <GL/glext.h>
#include  <EGL/egl.h>

const char *vertexShaderSource=
    "#version 330 core\n"
    "layout (location=0) in vec3 aPos;\n"
    "void main(){gl_Position = vec4 (aPos.x,aPos.y,aPos.z,1.0);}"
    ;

const char *fragmentShaderSource =
    "#version 330 core\n"
    "out vec4 FragColor;\n"
    "void main(){FragColor = vec4 (1.0,0.5f,0.2f,1.0f);}"
    ;

float vertices[] = {    -0.5f, -0.5f, 0.0f, 0.5f, -0.5f, 0.0f, 0.0f,  0.5f, 0.0f  };

GLuint load_shader ( const char  *shader_source, GLenum type )
{
   GLuint  shader = glCreateShader( type );
   glShaderSource  ( shader , 1 , &shader_source , nullptr );
   glCompileShader ( shader );
   return shader;
}
bool someerror=false;
template<typename T>
T &showError(const char * err,T&t)
{
    if (!t)
    {
        someerror=true;
        std::cerr<<err<<std::endl;
    }
    return t;
}
template<typename T>
const T &showError(const char * err,const T&t)
{
    if (!t)
    {
        someerror=true;
        std::cerr<<err<<std::endl;
    }
    return t;
}

int main(int, char**)
{
    auto display=XOpenDisplay(nullptr);
    showError("Error opening display",display);
    auto egl_display=eglGetDisplay(EGL_CAST(EGLNativeDisplayType,display));
    showError("Error opening egl display",egl_display);
    showError("Error initializing egl",eglInitialize(egl_display,nullptr,nullptr));
    showError("Error binding api",eglBindAPI(EGL_OPENGL_API));


    auto screen=DefaultScreen(display);
    Window parentWin=RootWindow(display,screen);


    XVisualInfo vinfo;

    Window win;
    XSetWindowAttributes setWAtt;

    //
    // Change UseDepth=24 to avoid compositor at all
    //
    auto UseDepth=32;
    if (!XMatchVisualInfo(display, screen, UseDepth, TrueColor, &vinfo))
    {
        win=XCreateWindow(display,parentWin,0,0,800,480,0,CopyFromParent,InputOutput,CopyFromParent,0,nullptr);
    }
    else
    {
        setWAtt.colormap = XCreateColormap(display, parentWin, vinfo.visual, AllocNone);
        setWAtt.background_pixel = 0xffffffff;
        setWAtt.border_pixel = 0xffffffff;
        auto valueMask=CWColormap|CWBorderPixel|CWBackPixel;
        win=XCreateWindow(display,parentWin,0,0,800,480,0,vinfo.depth, InputOutput, vinfo.visual,valueMask,&setWAtt);
    }
    showError("Error creating window",win);

    XWMHints hints;
    hints.input = true;
    hints.flags = InputHint;
    XSetWMHints(display, win, &hints);

    XMapWindow ( display , win );

    XSync(display,false);

    EGLint attr[] = {EGL_RED_SIZE,8,EGL_GREEN_SIZE,8,EGL_BLUE_SIZE,8,EGL_NONE};

    EGLConfig  ecfg[200];
    EGLint     num_config;
    showError("Error choosing egl config",eglChooseConfig( egl_display, attr, ecfg, 200, &num_config ));
    EGLSurface egl_surface;
    int matchConfig=0;
    for (;matchConfig<num_config;matchConfig++)
    {
        egl_surface = eglCreateWindowSurface ( egl_display, ecfg[matchConfig], win, nullptr );
        if ( egl_surface)
            break;
    }
    showError("Error creating surface",egl_surface);

    EGLint ctxattr[] = { EGL_NONE  };
    auto egl_context = eglCreateContext ( egl_display, ecfg[matchConfig], EGL_NO_CONTEXT, ctxattr );
    showError("Error creating context",egl_context);

    eglMakeCurrent( egl_display, egl_surface, egl_surface, egl_context );

    GLuint vertexShader   = load_shader ( vertexShaderSource , GL_VERTEX_SHADER  );
    GLuint fragmentShader = load_shader ( fragmentShaderSource , GL_FRAGMENT_SHADER );

    GLuint shaderProgram  = glCreateProgram ();
    glAttachShader ( shaderProgram, fragmentShader );
    glAttachShader ( shaderProgram, vertexShader );

    glLinkProgram ( shaderProgram );
    glUseProgram  ( shaderProgram );


    while (!someerror)
    {
        glPolygonMode(GL_FRONT_AND_BACK, GL_FILL);
        glClearColor(0.2f, 0.3f, 0.3f, 1.0f);
        glClear(GL_COLOR_BUFFER_BIT);
        glVertexAttribPointer ( 0, 3, GL_FLOAT, false, 0, vertices );
        glEnableVertexAttribArray ( 0 );
        glDrawArrays(GL_TRIANGLES,0,3);
        eglSwapBuffers ( egl_display, egl_surface );
    }

    return 0;
}

The output produced, at left with compositor enabled, at right without:

Compositor On / Compositor Off

My goal is to produce something like this with compositor enabled:

Compositor enabled goal

My testing platform:

  • OpenSuse Tumbleweed 20201018
  • KDE/Plasma 5.19.5
  • GeForce RTX 2070 SUPER (with nvidia drivers)
  • Xorg server 1.20.9
  • Kernel 5.8.14
c++
opengl
x11
egl
glx
asked on Stack Overflow Oct 13, 2020 by Pablo Yaggi

1 Answer

2

Well it happens that the visual configuration used to create the egl_texture has no alpha bits, in that case the output is merged with whatever is in the back, at least in my hardware/software. So to fix that behavior this need to be changed:

//    EGLint attr[] = {EGL_RED_SIZE,8,EGL_GREEN_SIZE,8,EGL_BLUE_SIZE,8,,EGL_NONE};
    EGLint attr[] = {EGL_RED_SIZE,8,EGL_GREEN_SIZE,8,EGL_BLUE_SIZE,8,EGL_ALPHA_SIZE,8,EGL_NONE};

The way I see it, if the composer gets an output with the alphas stripped and because the surface has alpha bits it decides to do that blending. I don't think if it is intended and/or documented.

answered on Stack Overflow Oct 13, 2020 by Pablo Yaggi • edited Oct 14, 2020 by Pablo Yaggi

User contributions licensed under CC BY-SA 3.0