summaryrefslogtreecommitdiffstats
path: root/src/glx/x11/glxextensions.c
diff options
context:
space:
mode:
authorIan Romanick <[email protected]>2005-03-17 01:36:24 +0000
committerIan Romanick <[email protected]>2005-03-17 01:36:24 +0000
commit9eae0b9f19f3e71dec6d6440db69dd5d140228de (patch)
tree0412acb67a25fc92d5eef1e7c6e9c5d498986f5d /src/glx/x11/glxextensions.c
parentc1aa3d1e6f6fd3971df93b11d4db139bec824c91 (diff)
Hard-code the client-side maximum GL version. It turns out that there are
some flaws in the calculation code when the highest version listed in known_gl_extensions is supported. This code would also have some problems with some of the new features (that don't have an associated extension) on GL 2.0.
Diffstat (limited to 'src/glx/x11/glxextensions.c')
-rw-r--r--src/glx/x11/glxextensions.c26
1 files changed, 2 insertions, 24 deletions
diff --git a/src/glx/x11/glxextensions.c b/src/glx/x11/glxextensions.c
index 56ba5c9b47e..31f7aee5cc6 100644
--- a/src/glx/x11/glxextensions.c
+++ b/src/glx/x11/glxextensions.c
@@ -266,8 +266,8 @@ static unsigned char direct_glx_support[8];
/**
* Highest core GL version that can be supported for indirect rendering.
*/
-static unsigned gl_major = 0;
-static unsigned gl_minor = 0;
+static const unsigned gl_major = 1;
+static const unsigned gl_minor = 4;
/* client extensions string */
static const char * __glXGLXClientExtensions = NULL;
@@ -386,11 +386,8 @@ __glXScrEnableExtension( __GLXscreenConfigs *psc, const char * name )
static void
__glXExtensionsCtr( void )
{
- static const char major_table[32] = { 1, 1, 1, 1, 1, 1, 2, };
- static const char minor_table[32] = { 0, 1, 2, 3, 4, 5, 0, };
unsigned i;
static GLboolean ext_list_first_time = GL_TRUE;
- unsigned full_support = ~0;
if ( ext_list_first_time ) {
@@ -430,31 +427,12 @@ __glXExtensionsCtr( void )
if ( known_gl_extensions[i].client_support ) {
SET_BIT( client_gl_support, bit );
}
- else if ( known_gl_extensions[i].version_major != 0 ) {
- /* If an extension that is required for some core GL version is
- * not supported, clear the bit for that core GL version as well.
- */
-
- unsigned ver_bit = (6 * (known_gl_extensions[i].version_major - 1))
- + (known_gl_extensions[i].version_minor);
-
- full_support &= ~(1U << ver_bit);
- }
if ( known_gl_extensions[i].client_only ) {
SET_BIT( client_gl_only, bit );
}
}
- /* Determine the lowest unsupported core GL version. The version before
- * that is, therefore, the highest supported core GL version.
- */
- for ( i = 0 ; (full_support & (1 << i)) != 0 ; i++ )
- /* empty */ ;
-
- i--;
- gl_major = major_table[i];
- gl_minor = minor_table[i];
#if 0
fprintf( stderr, "[%s:%u] Maximum client library version: %u.%u\n",
__func__, __LINE__, gl_major, gl_minor );