It looks like FBOs are incorrectly shared between contexts. I’m judging by the lifetime of the objects, not their availability.
Here is the example:
#include <EGL/egl.h>
#include <GLES2/gl2.h>
#include <stdio.h>
#include <assert.h>
int main(int argc, char** argv) {
EGLDisplay d = eglGetDisplay(EGL_DEFAULT_DISPLAY);
EGLBoolean b = eglInitialize(d, NULL, NULL);
const EGLint attrs[] = {
EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL_SURFACE_TYPE, EGL_PBUFFER_BIT,
EGL_NONE
};
EGLint num_cfg;
EGLConfig cfg;
eglChooseConfig(d, attrs, &cfg, 1, &num_cfg);
const EGLint pbuf_attrs[] = {
EGL_WIDTH, 1, EGL_HEIGHT, 1,
EGL_NONE
};
EGLSurface sfc = eglCreatePbufferSurface(d, cfg, pbuf_attrs);
const EGLint ctx_attrs[] = {
EGL_CONTEXT_CLIENT_VERSION, 2,
EGL_NONE
};
EGLContext share_ctx = eglCreateContext(d, cfg, EGL_NO_CONTEXT, ctx_attrs);
EGLContext ctx1 = eglCreateContext(d, cfg, share_ctx, ctx_attrs);
EGLContext ctx2 = eglCreateContext(d, cfg, share_ctx, ctx_attrs);
eglMakeCurrent(d, sfc, sfc, ctx1);
GLuint tex;
GLint fmt=GL_RGBA, mWidth=1280, mHeight=800, type=GL_UNSIGNED_BYTE;
glGenTextures(1, &tex);
glBindTexture(GL_TEXTURE_2D, tex);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexImage2D(GL_TEXTURE_2D, 0, fmt, mWidth, mHeight, 0, fmt, type, NULL);
glBindTexture(GL_TEXTURE_2D, 0);
GLuint fb;
glGenFramebuffers(1, &fb);
glBindFramebuffer(GL_FRAMEBUFFER, fb);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, tex, 0);
glBindFramebuffer(GL_FRAMEBUFFER, GL_NONE);
assert(glIsFramebuffer(fb) == 1);
eglMakeCurrent(d, sfc, sfc, ctx2);
glDeleteTextures(1, &tex);
eglDestroyContext(d, ctx1);
assert(glIsFramebuffer(fb) == 0); // <--- Assert fails here
return 0;
}
Tested on Ubuntu 16.04, nvidia-381, running on AWS G3 instance (Tesla M60).