summaryrefslogtreecommitdiffstats
path: root/src/gallium/drivers/radeonsi/si_pipe.c
diff options
context:
space:
mode:
authorMarek Olšák <[email protected]>2014-01-17 22:52:28 +0100
committerMarek Olšák <[email protected]>2014-02-04 20:19:16 +0100
commit0354b769c2ee865ed40e9994f2147f2d86e989b7 (patch)
tree65d9f7e4d785a9199bc9aeaa2d2527d6a5647177 /src/gallium/drivers/radeonsi/si_pipe.c
parent82c0914266ec53d59233b6d326bcfde7049da17b (diff)
gallium: remove PIPE_CAP_MAX_COMBINED_SAMPLERS
This can be derived from the shader caps. All GPUs from ATI/AMD, NVIDIA, and INTEL have separate texture slots for each shader stage.
Diffstat (limited to 'src/gallium/drivers/radeonsi/si_pipe.c')
-rw-r--r--src/gallium/drivers/radeonsi/si_pipe.c2
1 files changed, 0 insertions, 2 deletions
diff --git a/src/gallium/drivers/radeonsi/si_pipe.c b/src/gallium/drivers/radeonsi/si_pipe.c
index ff8788ea225..14dfd30bfb0 100644
--- a/src/gallium/drivers/radeonsi/si_pipe.c
+++ b/src/gallium/drivers/radeonsi/si_pipe.c
@@ -306,8 +306,6 @@ static int si_get_param(struct pipe_screen* pscreen, enum pipe_cap param)
return 15;
case PIPE_CAP_MAX_TEXTURE_ARRAY_LAYERS:
return 16384;
- case PIPE_CAP_MAX_COMBINED_SAMPLERS:
- return HAVE_LLVM >= 0x0305 ? 48 : 32;
/* Render targets. */
case PIPE_CAP_MAX_RENDER_TARGETS: