diff --git a/src/SFML/Graphics/RenderQueue.cpp b/src/SFML/Graphics/RenderQueue.cpp index 5161c9458..1d0a747fd 100644 --- a/src/SFML/Graphics/RenderQueue.cpp +++ b/src/SFML/Graphics/RenderQueue.cpp @@ -38,13 +38,22 @@ namespace { // Fast float to int conversion inline sf::Int32 Round(double value) - { - value += 6755399441055744.0; + { + // Use a union rather than reinterpret_cast, because it doesn't break strict-aliasing + // rules and results in a correct behaviour when compiling in optimized mode + union DoubleToInt + { + double d; + sf::Int32 i[2]; + }; + + DoubleToInt u; + u.d = value + 6755399441055744.0; #if defined(SFML_ENDIAN_LITTLE) - return (reinterpret_cast(&value))[0]; + return u.i[0]; #else - return (reinterpret_cast(&value))[1]; + return u.i[1]; #endif } } diff --git a/src/SFML/Window/Linux/ContextGLX.cpp b/src/SFML/Window/Linux/ContextGLX.cpp index 08156bd16..d9f308b7b 100644 --- a/src/SFML/Window/Linux/ContextGLX.cpp +++ b/src/SFML/Window/Linux/ContextGLX.cpp @@ -275,7 +275,7 @@ void ContextGLX::CreateContext(ContextGLX* shared, unsigned int bitsPerPixel, co GLXContext toShare = shared ? shared->myContext : NULL; // Create the context -- first try an OpenGL 3.0 context if it is supported - const GLubyte* name = reinterpret_cast("glXCreateContextAttribsARB"); + /*const GLubyte* name = reinterpret_cast("glXCreateContextAttribsARB"); PFNGLXCREATECONTEXTATTRIBSARBPROC glXCreateContextAttribsARB = reinterpret_cast(glXGetProcAddress(name)); if (glXCreateContextAttribsARB) { @@ -295,7 +295,7 @@ void ContextGLX::CreateContext(ContextGLX* shared, unsigned int bitsPerPixel, co 0, 0 }; myContext = glXCreateContextAttribsARB(myDisplay, configs[0], toShare, true, attributes); - } + }*/ // If the OpenGL 3.0 context failed, create a regular OpenGL 1.x context if (!myContext)