[client] opengl: use util_hasGLExt instead of custom logic

This commit is contained in:
Quantum 2021-09-30 06:16:25 -04:00 committed by Geoffrey McRae
parent 4a76401c34
commit 3f72de78da

View File

@ -401,22 +401,16 @@ bool opengl_renderStartup(LG_Renderer * renderer, bool useDMA)
DEBUG_INFO("Renderer: %s", glGetString(GL_RENDERER)); DEBUG_INFO("Renderer: %s", glGetString(GL_RENDERER));
DEBUG_INFO("Version : %s", glGetString(GL_VERSION )); DEBUG_INFO("Version : %s", glGetString(GL_VERSION ));
GLint n; const char * exts = (const char *)glGetString(GL_EXTENSIONS);
glGetIntegerv(GL_NUM_EXTENSIONS, &n); if (util_hasGLExt(exts, "GL_AMD_pinned_memory"))
for(GLint i = 0; i < n; ++i)
{ {
const GLubyte *ext = glGetStringi(GL_EXTENSIONS, i); if (this->opt.amdPinnedMem)
if (strcmp((const char *)ext, "GL_AMD_pinned_memory") == 0)
{ {
if (this->opt.amdPinnedMem) this->amdPinnedMemSupport = true;
{ DEBUG_INFO("Using GL_AMD_pinned_memory");
this->amdPinnedMemSupport = true;
DEBUG_INFO("Using GL_AMD_pinned_memory");
}
else
DEBUG_INFO("GL_AMD_pinned_memory is available but not in use");
break;
} }
else
DEBUG_INFO("GL_AMD_pinned_memory is available but not in use");
} }
glEnable(GL_TEXTURE_2D); glEnable(GL_TEXTURE_2D);