OpenGL: fix max texture anisotropy check

Query max supported on init, use that to clamp user-set values.
This commit is contained in:
Mike Erwin 2016-01-07 11:21:08 -05:00
parent 1aff22b81d
commit 0df1bdc268
3 changed files with 15 additions and 2 deletions

View File

@ -51,6 +51,7 @@ bool GPU_instanced_drawing_support(void);
int GPU_max_texture_size(void);
int GPU_max_textures(void);
float GPU_max_texture_anisotropy(void);
int GPU_max_color_texture_samples(void);
int GPU_color_depth(void);
void GPU_get_dfdy_factors(float fac[2]);

View File

@ -365,8 +365,9 @@ void GPU_set_anisotropic(float value)
GPU_free_images();
/* Clamp value to the maximum value the graphics card supports */
if (value > GL_MAX_TEXTURE_MAX_ANISOTROPY_EXT)
value = GL_MAX_TEXTURE_MAX_ANISOTROPY_EXT;
const float max = GPU_max_texture_anisotropy();
if (value > max)
value = max;
GTS.anisotropic = value;
}

View File

@ -84,6 +84,7 @@ static struct GPUGlobal {
float dfdyfactors[2]; /* workaround for different calculation of dfdy factors on GPUs. Some GPUs/drivers
calculate dfdy in shader differently when drawing to an offscreen buffer. First
number is factor on screen and second is off-screen */
float max_anisotropy;
} GG = {1, 0};
/* GPU Types */
@ -110,6 +111,11 @@ int GPU_max_textures(void)
return GG.maxtextures;
}
float GPU_max_texture_anisotropy(void)
{
return GG.max_anisotropy;
}
int GPU_max_color_texture_samples(void)
{
return GG.samples_color_texture_max;
@ -129,6 +135,11 @@ void gpu_extensions_init(void)
glGetIntegerv(GL_MAX_TEXTURE_SIZE, &GG.maxtexsize);
if (GLEW_EXT_texture_filter_anisotropic)
glGetFloatv(GL_MAX_TEXTURE_MAX_ANISOTROPY_EXT, &GG.max_anisotropy);
else
GG.max_anisotropy = 1.0f;
GLint r, g, b;
glGetIntegerv(GL_RED_BITS, &r);
glGetIntegerv(GL_GREEN_BITS, &g);