Make EGL pixel format selection follow the same procedure as the other context types and make OpenGL context version parsing more tolerant of garbage data. Fixes #2395

This commit is contained in:
Jonathan De Wachter 2016-09-28 18:43:10 +07:00 committed by Chris Thrasher
parent 7004db1cd9
commit 6a3feda5ab
3 changed files with 118 additions and 65 deletions

View File

@ -263,15 +263,17 @@ bool RenderTextureImplFBO::create(const Vector2u& size, unsigned int textureId,
static_cast<GLsizei>(size.x), static_cast<GLsizei>(size.x),
static_cast<GLsizei>(size.y))); static_cast<GLsizei>(size.y)));
m_stencil = true;
#else #else
m_stencil = false;
err() << "Impossible to create render texture (failed to create the attached depth/stencil buffer)" err() << "Impossible to create render texture (failed to create the attached depth/stencil buffer)"
<< std::endl; << std::endl;
return false; return false;
#endif // SFML_OPENGL_ES #endif // SFML_OPENGL_ES
m_stencil = true;
} }
else if (settings.depthBits) else if (settings.depthBits)
{ {
@ -355,14 +357,16 @@ bool RenderTextureImplFBO::create(const Vector2u& size, unsigned int textureId,
static_cast<GLsizei>(size.y))); static_cast<GLsizei>(size.y)));
} }
m_multisample = true;
#else #else
m_multisample = false;
err() << "Impossible to create render texture (failed to create the multisample render buffers)" << std::endl; err() << "Impossible to create render texture (failed to create the multisample render buffers)" << std::endl;
return false; return false;
#endif // SFML_OPENGL_ES #endif // SFML_OPENGL_ES
m_multisample = true;
} }
} }

View File

@ -31,6 +31,7 @@
#include <SFML/Window/EglContext.hpp> #include <SFML/Window/EglContext.hpp>
#include <SFML/Window/WindowImpl.hpp> #include <SFML/Window/WindowImpl.hpp>
#include <memory>
#include <mutex> #include <mutex>
#include <ostream> #include <ostream>
#ifdef SFML_SYSTEM_ANDROID #ifdef SFML_SYSTEM_ANDROID
@ -80,18 +81,27 @@ EGLDisplay getInitializedDisplay()
//////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////
void ensureInit() void ensureInit()
{ {
static bool initialized = false; static std::once_flag flag;
if (!initialized)
{
initialized = true;
// We don't check the return value since the extension std::call_once(flag,
// flags are cleared even if loading fails []()
gladLoaderLoadEGL(EGL_NO_DISPLAY); {
if (!gladLoaderLoadEGL(EGL_NO_DISPLAY))
{
// At this point, the failure is unrecoverable
// Dump a message to the console and let the application terminate
sf::err() << "Failed to load EGL entry points" << std::endl;
// Continue loading with a display assert(false);
gladLoaderLoadEGL(getInitializedDisplay());
} return false;
}
// Continue loading with a display
gladLoaderLoadEGL(getInitializedDisplay());
return true;
});
} }
} // namespace EglContextImpl } // namespace EglContextImpl
} // namespace } // namespace
@ -287,67 +297,100 @@ EGLConfig EglContext::getBestConfig(EGLDisplay display, unsigned int bitsPerPixe
{ {
EglContextImpl::ensureInit(); EglContextImpl::ensureInit();
// Set our video settings constraint // Determine the number of available configs
const EGLint attributes[] = EGLint configCount;
{EGL_BUFFER_SIZE, eglCheck(eglGetConfigs(display, nullptr, 0, &configCount));
static_cast<EGLint>(bitsPerPixel),
EGL_DEPTH_SIZE,
static_cast<EGLint>(settings.depthBits),
EGL_STENCIL_SIZE,
static_cast<EGLint>(settings.stencilBits),
EGL_SAMPLE_BUFFERS,
settings.antialiasingLevel ? 1 : 0,
EGL_SAMPLES,
static_cast<EGLint>(settings.antialiasingLevel),
EGL_SURFACE_TYPE,
EGL_WINDOW_BIT | EGL_PBUFFER_BIT,
EGL_RENDERABLE_TYPE,
EGL_OPENGL_ES_BIT,
EGL_NONE};
EGLint configCount; // Retrieve the list of available configs
EGLConfig configs[1]; auto configs = std::make_unique<EGLConfig[]>(static_cast<std::size_t>(configCount));
// Ask EGL for the best config matching our video settings eglCheck(eglGetConfigs(display, configs.get(), configCount, &configCount));
eglCheck(eglChooseConfig(display, attributes, configs, 1, &configCount));
// TODO: This should check EGL_CONFORMANT and pick the first conformant configuration. // Evaluate all the returned configs, and pick the best one
int bestScore = 0x7FFFFFFF;
EGLConfig bestConfig{};
return configs[0]; for (std::size_t i = 0; i < static_cast<std::size_t>(configCount); ++i)
{
// Check mandatory attributes
int surfaceType;
int renderableType;
eglCheck(eglGetConfigAttrib(display, configs[i], EGL_SURFACE_TYPE, &surfaceType));
eglCheck(eglGetConfigAttrib(display, configs[i], EGL_RENDERABLE_TYPE, &renderableType));
if (!(surfaceType & (EGL_WINDOW_BIT | EGL_PBUFFER_BIT)) || !(renderableType & EGL_OPENGL_ES_BIT))
continue;
// Extract the components of the current config
int red;
int green;
int blue;
int alpha;
int depth;
int stencil;
int multiSampling;
int samples;
int caveat;
eglCheck(eglGetConfigAttrib(display, configs[i], EGL_RED_SIZE, &red));
eglCheck(eglGetConfigAttrib(display, configs[i], EGL_GREEN_SIZE, &green));
eglCheck(eglGetConfigAttrib(display, configs[i], EGL_BLUE_SIZE, &blue));
eglCheck(eglGetConfigAttrib(display, configs[i], EGL_ALPHA_SIZE, &alpha));
eglCheck(eglGetConfigAttrib(display, configs[i], EGL_DEPTH_SIZE, &depth));
eglCheck(eglGetConfigAttrib(display, configs[i], EGL_STENCIL_SIZE, &stencil));
eglCheck(eglGetConfigAttrib(display, configs[i], EGL_SAMPLE_BUFFERS, &multiSampling));
eglCheck(eglGetConfigAttrib(display, configs[i], EGL_SAMPLES, &samples));
eglCheck(eglGetConfigAttrib(display, configs[i], EGL_CONFIG_CAVEAT, &caveat));
// Evaluate the config
int color = red + green + blue + alpha;
int score = evaluateFormat(bitsPerPixel, settings, color, depth, stencil, multiSampling ? samples : 0, caveat == EGL_NONE, false);
// If it's better than the current best, make it the new best
if (score < bestScore)
{
bestScore = score;
bestConfig = configs[i];
}
}
assert(bestScore < 0x7FFFFFFF);
return bestConfig;
} }
//////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////
void EglContext::updateSettings() void EglContext::updateSettings()
{ {
m_settings.majorVersion = 1;
m_settings.minorVersion = 1;
m_settings.attributeFlags = ContextSettings::Default;
m_settings.depthBits = 0;
m_settings.stencilBits = 0;
m_settings.antialiasingLevel = 0;
EGLBoolean result = EGL_FALSE; EGLBoolean result = EGL_FALSE;
EGLint tmp = 0; EGLint tmp = 0;
// Update the internal context settings with the current config // Update the internal context settings with the current config
eglCheck(result = eglGetConfigAttrib(m_display, m_config, EGL_DEPTH_SIZE, &tmp)); eglCheck(result = eglGetConfigAttrib(m_display, m_config, EGL_DEPTH_SIZE, &tmp));
if (result == EGL_FALSE) if (result != EGL_FALSE)
err() << "Failed to retrieve EGL_DEPTH_SIZE" << std::endl; m_settings.depthBits = static_cast<unsigned int>(tmp);
m_settings.depthBits = static_cast<unsigned int>(tmp);
eglCheck(result = eglGetConfigAttrib(m_display, m_config, EGL_STENCIL_SIZE, &tmp)); eglCheck(result = eglGetConfigAttrib(m_display, m_config, EGL_STENCIL_SIZE, &tmp));
if (result == EGL_FALSE) if (result != EGL_FALSE)
err() << "Failed to retrieve EGL_STENCIL_SIZE" << std::endl; m_settings.stencilBits = static_cast<unsigned int>(tmp);
m_settings.stencilBits = static_cast<unsigned int>(tmp); eglCheck(result = eglGetConfigAttrib(m_display, m_config, EGL_SAMPLE_BUFFERS, &tmp));
eglCheck(result = eglGetConfigAttrib(m_display, m_config, EGL_SAMPLES, &tmp)); if ((result != EGL_FALSE) && tmp)
{
eglCheck(result = eglGetConfigAttrib(m_display, m_config, EGL_SAMPLES, &tmp));
if (result == EGL_FALSE) if (result != EGL_FALSE)
err() << "Failed to retrieve EGL_SAMPLES" << std::endl; m_settings.antialiasingLevel = static_cast<unsigned int>(tmp);
}
m_settings.antialiasingLevel = static_cast<unsigned int>(tmp);
m_settings.majorVersion = 1;
m_settings.minorVersion = 1;
m_settings.attributeFlags = ContextSettings::Default;
} }

View File

@ -275,8 +275,6 @@ std::vector<std::string> extensions;
// Load our extensions vector with the supported extensions // Load our extensions vector with the supported extensions
void loadExtensions() void loadExtensions()
{ {
extensions.clear();
auto glGetErrorFunc = reinterpret_cast<glGetErrorFuncType>(sf::priv::GlContext::getFunction("glGetError")); auto glGetErrorFunc = reinterpret_cast<glGetErrorFuncType>(sf::priv::GlContext::getFunction("glGetError"));
auto glGetIntegervFunc = reinterpret_cast<glGetIntegervFuncType>(sf::priv::GlContext::getFunction("glGetIntegerv")); auto glGetIntegervFunc = reinterpret_cast<glGetIntegervFuncType>(sf::priv::GlContext::getFunction("glGetIntegerv"));
auto glGetStringFunc = reinterpret_cast<glGetStringFuncType>(sf::priv::GlContext::getFunction("glGetString")); auto glGetStringFunc = reinterpret_cast<glGetStringFuncType>(sf::priv::GlContext::getFunction("glGetString"));
@ -290,21 +288,25 @@ void loadExtensions()
auto glGetStringiFunc = reinterpret_cast<glGetStringiFuncType>(sf::priv::GlContext::getFunction("glGetStringi")); auto glGetStringiFunc = reinterpret_cast<glGetStringiFuncType>(sf::priv::GlContext::getFunction("glGetStringi"));
if (glGetErrorFunc() == GL_INVALID_ENUM || !glGetStringiFunc) if (glGetErrorFunc() == GL_INVALID_ENUM || !majorVersion || !glGetStringiFunc)
{ {
// Try to load the < 3.0 way // Try to load the < 3.0 way
const char* extensionString = reinterpret_cast<const char*>(glGetStringFunc(GL_EXTENSIONS)); const char* extensionString = reinterpret_cast<const char*>(glGetStringFunc(GL_EXTENSIONS));
assert(extensionString);
do if (extensionString)
{ {
const char* extension = extensionString; extensions.clear();
while (*extensionString && (*extensionString != ' ')) do
++extensionString; {
const char* extension = extensionString;
extensions.emplace_back(extension, extensionString); while (*extensionString && (*extensionString != ' '))
} while (*extensionString++); ++extensionString;
extensions.emplace_back(extension, extensionString);
} while (*extensionString++);
}
} }
else else
{ {
@ -314,10 +316,14 @@ void loadExtensions()
if (numExtensions) if (numExtensions)
{ {
extensions.clear();
for (unsigned int i = 0; i < static_cast<unsigned int>(numExtensions); ++i) for (unsigned int i = 0; i < static_cast<unsigned int>(numExtensions); ++i)
{ {
const char* extensionString = reinterpret_cast<const char*>(glGetStringiFunc(GL_EXTENSIONS, i)); const char* extensionString = reinterpret_cast<const char*>(glGetStringiFunc(GL_EXTENSIONS, i));
extensions.emplace_back(extensionString);
if (extensionString)
extensions.emplace_back(extensionString);
} }
} }
} }
@ -807,7 +813,7 @@ void GlContext::initialize(const ContextSettings& requestedSettings)
glGetIntegervFunc(GL_MAJOR_VERSION, &majorVersion); glGetIntegervFunc(GL_MAJOR_VERSION, &majorVersion);
glGetIntegervFunc(GL_MINOR_VERSION, &minorVersion); glGetIntegervFunc(GL_MINOR_VERSION, &minorVersion);
if (glGetErrorFunc() != GL_INVALID_ENUM) if ((glGetErrorFunc() != GL_INVALID_ENUM) && (majorVersion != 0))
{ {
m_settings.majorVersion = static_cast<unsigned int>(majorVersion); m_settings.majorVersion = static_cast<unsigned int>(majorVersion);
m_settings.minorVersion = static_cast<unsigned int>(minorVersion); m_settings.minorVersion = static_cast<unsigned int>(minorVersion);