glx: fix regression with copy sub buffer disappearing
So copy sub buffer isn't a core extensions it's a driver extension
which means we are using totally the wrong interface to query for it
here, which means bad things happen when you roll out this code,
for instance MESA_copy_sub_buffer stops working.
This is just the hack I'm sticking in Fedora to avoid the regression
for now, but hopefully will inspire us.
Reviewed-by: Adam Jackson <ajax@redhat.com>
Signed-off-by: Dave Airlie <airlied@redhat.com>
(cherry picked from commit 6da3f5d04f
)
This commit is contained in:
parent
92effabee1
commit
2a561fac70
|
@ -396,6 +396,9 @@ initializeExtensions(__GLXDRIscreen * screen)
|
|||
const __DRIextension **extensions;
|
||||
int i;
|
||||
|
||||
__glXEnableExtension(screen->glx_enable_bits, "GLX_MESA_copy_sub_buffer");
|
||||
LogMessage(X_INFO, "AIGLX: enabled GLX_MESA_copy_sub_buffer\n");
|
||||
|
||||
if (screen->swrast->base.version >= 3) {
|
||||
__glXEnableExtension(screen->glx_enable_bits,
|
||||
"GLX_ARB_create_context");
|
||||
|
@ -416,8 +419,6 @@ initializeExtensions(__GLXDRIscreen * screen)
|
|||
if (strcmp(extensions[i]->name, __DRI_COPY_SUB_BUFFER) == 0) {
|
||||
screen->copySubBuffer =
|
||||
(const __DRIcopySubBufferExtension *) extensions[i];
|
||||
__glXEnableExtension(screen->glx_enable_bits,
|
||||
"GLX_MESA_copy_sub_buffer");
|
||||
}
|
||||
|
||||
if (strcmp(extensions[i]->name, __DRI_TEX_BUFFER) == 0) {
|
||||
|
|
Loading…
Reference in New Issue