aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJohannes Schickel2016-01-04 11:00:58 +0100
committerJohannes Schickel2016-03-16 20:29:27 +0100
commit0b46af2f0e5eef939daa73d5b38b6b817c78c7d8 (patch)
tree0ef26fea0924c578f0efa84dcb86a00423359a80
parent472dbc4a84997e5efe83331f6e2e1e5c079f26ca (diff)
downloadscummvm-rg350-0b46af2f0e5eef939daa73d5b38b6b817c78c7d8.tar.gz
scummvm-rg350-0b46af2f0e5eef939daa73d5b38b6b817c78c7d8.tar.bz2
scummvm-rg350-0b46af2f0e5eef939daa73d5b38b6b817c78c7d8.zip
OPENGL: Don't prefix maxTextureSize variable for consistency.
-rw-r--r--backends/graphics/opengl/context.cpp6
-rw-r--r--backends/graphics/opengl/opengl-graphics.cpp12
-rw-r--r--backends/graphics/opengl/opengl-sys.h2
3 files changed, 10 insertions, 10 deletions
diff --git a/backends/graphics/opengl/context.cpp b/backends/graphics/opengl/context.cpp
index d9c40859dc..3678abfd09 100644
--- a/backends/graphics/opengl/context.cpp
+++ b/backends/graphics/opengl/context.cpp
@@ -32,7 +32,7 @@
namespace OpenGL {
void Context::reset() {
- _maxTextureSize = 0;
+ maxTextureSize = 0;
NPOTSupported = false;
shadersSupported = false;
@@ -121,8 +121,8 @@ void OpenGLGraphicsManager::initializeGLContext() {
#undef LOAD_FUNC
// Obtain maximum texture size.
- GL_CALL(glGetIntegerv(GL_MAX_TEXTURE_SIZE, &g_context._maxTextureSize));
- debug(5, "OpenGL maximum texture size: %d", g_context._maxTextureSize);
+ GL_CALL(glGetIntegerv(GL_MAX_TEXTURE_SIZE, &g_context.maxTextureSize));
+ debug(5, "OpenGL maximum texture size: %d", g_context.maxTextureSize);
const char *extString = (const char *)g_context.glGetString(GL_EXTENSIONS);
diff --git a/backends/graphics/opengl/opengl-graphics.cpp b/backends/graphics/opengl/opengl-graphics.cpp
index 36fc7b88aa..8832597f33 100644
--- a/backends/graphics/opengl/opengl-graphics.cpp
+++ b/backends/graphics/opengl/opengl-graphics.cpp
@@ -220,8 +220,8 @@ OSystem::TransactionError OpenGLGraphicsManager::endGFXTransaction() {
// a context existing before, which means we don't know the maximum
// supported texture size before this. Thus, we check whether the
// requested game resolution is supported over here.
- || ( _currentState.gameWidth > (uint)g_context._maxTextureSize
- || _currentState.gameHeight > (uint)g_context._maxTextureSize)) {
+ || ( _currentState.gameWidth > (uint)g_context.maxTextureSize
+ || _currentState.gameHeight > (uint)g_context.maxTextureSize)) {
if (_transactionMode == kTransactionActive) {
// Try to setup the old state in case its valid and is
// actually different from the new one.
@@ -792,15 +792,15 @@ void OpenGLGraphicsManager::setActualScreenSize(uint width, uint height) {
// possible and then scale it to the physical display size. This sounds
// bad but actually all recent chips should support full HD resolution
// anyway. Thus, it should not be a real issue for modern hardware.
- if ( overlayWidth > (uint)g_context._maxTextureSize
- || overlayHeight > (uint)g_context._maxTextureSize) {
+ if ( overlayWidth > (uint)g_context.maxTextureSize
+ || overlayHeight > (uint)g_context.maxTextureSize) {
const frac_t outputAspect = intToFrac(_outputScreenWidth) / _outputScreenHeight;
if (outputAspect > (frac_t)FRAC_ONE) {
- overlayWidth = g_context._maxTextureSize;
+ overlayWidth = g_context.maxTextureSize;
overlayHeight = intToFrac(overlayWidth) / outputAspect;
} else {
- overlayHeight = g_context._maxTextureSize;
+ overlayHeight = g_context.maxTextureSize;
overlayWidth = fracToInt(overlayHeight * outputAspect);
}
}
diff --git a/backends/graphics/opengl/opengl-sys.h b/backends/graphics/opengl/opengl-sys.h
index ffc80a23dc..ce4f0bfa40 100644
--- a/backends/graphics/opengl/opengl-sys.h
+++ b/backends/graphics/opengl/opengl-sys.h
@@ -106,7 +106,7 @@ struct Context {
void reset();
/** The maximum texture size supported by the context. */
- GLint _maxTextureSize;
+ GLint maxTextureSize;
/** Whether GL_ARB_texture_non_power_of_two is available or not. */
bool NPOTSupported;