[client] opengl: use util_hasGLExt instead of custom logic

This commit is contained in:
Quantum 2021-09-30 06:16:25 -04:00 committed by Geoffrey McRae
parent 4a76401c34
commit 3f72de78da

View File

@ -401,12 +401,8 @@ bool opengl_renderStartup(LG_Renderer * renderer, bool useDMA)
DEBUG_INFO("Renderer: %s", glGetString(GL_RENDERER));
DEBUG_INFO("Version : %s", glGetString(GL_VERSION ));
GLint n;
glGetIntegerv(GL_NUM_EXTENSIONS, &n);
for(GLint i = 0; i < n; ++i)
{
const GLubyte *ext = glGetStringi(GL_EXTENSIONS, i);
if (strcmp((const char *)ext, "GL_AMD_pinned_memory") == 0)
const char * exts = (const char *)glGetString(GL_EXTENSIONS);
if (util_hasGLExt(exts, "GL_AMD_pinned_memory"))
{
if (this->opt.amdPinnedMem)
{
@ -415,8 +411,6 @@ bool opengl_renderStartup(LG_Renderer * renderer, bool useDMA)
}
else
DEBUG_INFO("GL_AMD_pinned_memory is available but not in use");
break;
}
}
glEnable(GL_TEXTURE_2D);