summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/mesa/drivers/dri/common/xmlpool.h6
-rw-r--r--src/mesa/drivers/dri/r200/r200_context.c13
-rw-r--r--src/mesa/drivers/dri/r200/r200_screen.c3
-rw-r--r--src/mesa/drivers/dri/r200/r200_screen.h1
-rw-r--r--src/mesa/drivers/dri/radeon/radeon_context.c7
-rw-r--r--src/mesa/drivers/dri/radeon/radeon_screen.c3
6 files changed, 28 insertions, 5 deletions
diff --git a/src/mesa/drivers/dri/common/xmlpool.h b/src/mesa/drivers/dri/common/xmlpool.h
index 29b5bf597d..dfa5f49524 100644
--- a/src/mesa/drivers/dri/common/xmlpool.h
+++ b/src/mesa/drivers/dri/common/xmlpool.h
@@ -293,6 +293,11 @@ DRI_CONF_OPT_BEGIN_V(texture_units,int,def, # min ":" # max ) \
DRI_CONF_DESC(de,"Anzahl der Textureinheiten") \
DRI_CONF_OPT_END
+#define DRI_CONF_TEXTURE_LEVEL_HACK(def) \
+DRI_CONF_OPT_BEGIN(texture_level_hack,bool,def) \
+ DRI_CONF_DESC(en,"Enable texture level hack for radeon/r200 for playing games with compressed textures") \
+DRI_CONF_OPT_END
+
#define DRI_CONF_TEXTURE_HEAPS_ALL 0
#define DRI_CONF_TEXTURE_HEAPS_CARD 1
#define DRI_CONF_TEXTURE_HEAPS_GART 2
@@ -331,4 +336,5 @@ DRI_CONF_OPT_BEGIN(nv_vertex_program,bool,def) \
DRI_CONF_DESC(fr,"Activer GL_NV_vertex_program") \
DRI_CONF_OPT_END
+
#endif
diff --git a/src/mesa/drivers/dri/r200/r200_context.c b/src/mesa/drivers/dri/r200/r200_context.c
index 25f9d2cb96..bc50f3de68 100644
--- a/src/mesa/drivers/dri/r200/r200_context.c
+++ b/src/mesa/drivers/dri/r200/r200_context.c
@@ -365,6 +365,13 @@ GLboolean r200CreateContext( const __GLcontextModes *glVisual,
12,
GL_FALSE );
+ /* adjust max texture size a bit. Hack, but I really want to use larger textures
+ which will work just fine in 99.999999% of all cases, especially with texture compression... */
+ if (driQueryOptionb( &rmesa->optionCache, "texture_level_hack" ))
+ {
+ if (ctx->Const.MaxTextureLevels < 12) ctx->Const.MaxTextureLevels += 1;
+ }
+
ctx->Const.MaxTextureMaxAnisotropy = 16.0;
/* No wide points.
@@ -415,9 +422,9 @@ GLboolean r200CreateContext( const __GLcontextModes *glVisual,
_math_matrix_set_identity( &rmesa->tmpmat );
driInitExtensions( ctx, card_extensions, GL_TRUE );
- if (rmesa->r200Screen->chipset & R200_CHIPSET_REAL_R200) {
- /* yuv textures only work with r200 chips for unknown reasons, the
- others get the bit ordering right but don't actually do YUV-RGB conversion */
+ if (!rmesa->r200Screen->chipset & R200_CHIPSET_YCBCR_BROKEN) {
+ /* yuv textures don't work with some chips - R200 / rv280 okay so far
+ others get the bit ordering right but don't actually do YUV-RGB conversion */
_mesa_enable_extension( ctx, "GL_MESA_ycbcr_texture" );
}
if (rmesa->glCtx->Mesa_DXTn) {
diff --git a/src/mesa/drivers/dri/r200/r200_screen.c b/src/mesa/drivers/dri/r200/r200_screen.c
index 76d7016092..103d5d3384 100644
--- a/src/mesa/drivers/dri/r200/r200_screen.c
+++ b/src/mesa/drivers/dri/r200/r200_screen.c
@@ -73,6 +73,7 @@ DRI_CONF_BEGIN
DRI_CONF_COLOR_REDUCTION(DRI_CONF_COLOR_REDUCTION_DITHER)
DRI_CONF_ROUND_MODE(DRI_CONF_ROUND_TRUNC)
DRI_CONF_DITHER_MODE(DRI_CONF_DITHER_XERRORDIFF)
+ DRI_CONF_TEXTURE_LEVEL_HACK(false)
DRI_CONF_SECTION_END
DRI_CONF_SECTION_DEBUG
DRI_CONF_NO_RAST(false)
@@ -82,7 +83,7 @@ DRI_CONF_BEGIN
DRI_CONF_NV_VERTEX_PROGRAM(false)
DRI_CONF_SECTION_END
DRI_CONF_END;
-static const GLuint __driNConfigOptions = 15;
+static const GLuint __driNConfigOptions = 16;
#if 1
/* Including xf86PciInfo.h introduces a bunch of errors...
diff --git a/src/mesa/drivers/dri/r200/r200_screen.h b/src/mesa/drivers/dri/r200/r200_screen.h
index fa8c33faad..fdfb21a6c5 100644
--- a/src/mesa/drivers/dri/r200/r200_screen.h
+++ b/src/mesa/drivers/dri/r200/r200_screen.h
@@ -52,6 +52,7 @@ typedef struct {
/* chipset features */
#define R200_CHIPSET_TCL (1 << 0)
#define R200_CHIPSET_REAL_R200 (1 << 1)
+#define R200_CHIPSET_YCBCR_BROKEN (1 << 2)
#define R200_NR_TEX_HEAPS 2
diff --git a/src/mesa/drivers/dri/radeon/radeon_context.c b/src/mesa/drivers/dri/radeon/radeon_context.c
index 6a2f3dc353..fb00e59b46 100644
--- a/src/mesa/drivers/dri/radeon/radeon_context.c
+++ b/src/mesa/drivers/dri/radeon/radeon_context.c
@@ -342,6 +342,13 @@ radeonCreateContext( const __GLcontextModes *glVisual,
12,
GL_FALSE );
+ /* adjust max texture size a bit. Hack, but I really want to use larger textures
+ which will work just fine in 99.999999% of all cases, especially with texture compression... */
+ if (driQueryOptionb( &rmesa->optionCache, "texture_level_hack" ))
+ {
+ if (ctx->Const.MaxTextureLevels < 12) ctx->Const.MaxTextureLevels += 1;
+ }
+
ctx->Const.MaxTextureMaxAnisotropy = 16.0;
/* No wide points.
diff --git a/src/mesa/drivers/dri/radeon/radeon_screen.c b/src/mesa/drivers/dri/radeon/radeon_screen.c
index eba24d82d6..08c85fdf5c 100644
--- a/src/mesa/drivers/dri/radeon/radeon_screen.c
+++ b/src/mesa/drivers/dri/radeon/radeon_screen.c
@@ -70,12 +70,13 @@ DRI_CONF_BEGIN
DRI_CONF_COLOR_REDUCTION(DRI_CONF_COLOR_REDUCTION_DITHER)
DRI_CONF_ROUND_MODE(DRI_CONF_ROUND_TRUNC)
DRI_CONF_DITHER_MODE(DRI_CONF_DITHER_XERRORDIFF)
+ DRI_CONF_TEXTURE_LEVEL_HACK(false)
DRI_CONF_SECTION_END
DRI_CONF_SECTION_DEBUG
DRI_CONF_NO_RAST(false)
DRI_CONF_SECTION_END
DRI_CONF_END;
-static const GLuint __driNConfigOptions = 12;
+static const GLuint __driNConfigOptions = 13;
#if 1
/* Including xf86PciInfo.h introduces a bunch of errors...