aboutsummaryrefslogtreecommitdiffstats
path: root/src/intel/vulkan/anv_nir_apply_pipeline_layout.c
diff options
context:
space:
mode:
authorJason Ekstrand <[email protected]>2017-12-01 03:18:51 -0800
committerJason Ekstrand <[email protected]>2017-12-08 15:43:25 -0800
commit1bce04deb8e62e7fc1c19b1ecc7b6484e33ecf20 (patch)
treecf0edb10e405b5fa38bcb91d93b1e288dcc15043 /src/intel/vulkan/anv_nir_apply_pipeline_layout.c
parent3b34ed79f1dcc802d87191226a37f5d17cabbbc1 (diff)
anv/pipeline: Translate vulkan_resource_index to a constant when possible
We want to call brw_nir_analyze_ubo_ranges immedately after anv_nir_apply_pipeline_layout and it badly wants constants. We could run an optimization step and let constant folding do it but that's way more expensive than needed. It's really easy to just handle constants in apply_pipeline_layout. Reviewed-by: Jordan Justen <[email protected]>
Diffstat (limited to 'src/intel/vulkan/anv_nir_apply_pipeline_layout.c')
-rw-r--r--src/intel/vulkan/anv_nir_apply_pipeline_layout.c17
1 files changed, 13 insertions, 4 deletions
diff --git a/src/intel/vulkan/anv_nir_apply_pipeline_layout.c b/src/intel/vulkan/anv_nir_apply_pipeline_layout.c
index 612b3f7effe..978a8a57c70 100644
--- a/src/intel/vulkan/anv_nir_apply_pipeline_layout.c
+++ b/src/intel/vulkan/anv_nir_apply_pipeline_layout.c
@@ -116,12 +116,21 @@ lower_res_index_intrinsic(nir_intrinsic_instr *intrin,
uint32_t array_size =
state->layout->set[set].layout->binding[binding].array_size;
- nir_ssa_def *block_index = nir_ssa_for_src(b, intrin->src[0], 1);
+ nir_const_value *const_array_index = nir_src_as_const_value(intrin->src[0]);
- if (state->add_bounds_checks)
- block_index = nir_umin(b, block_index, nir_imm_int(b, array_size - 1));
+ nir_ssa_def *block_index;
+ if (const_array_index) {
+ unsigned array_index = const_array_index->u32[0];
+ array_index = MIN2(array_index, array_size - 1);
+ block_index = nir_imm_int(b, surface_index + array_index);
+ } else {
+ block_index = nir_ssa_for_src(b, intrin->src[0], 1);
- block_index = nir_iadd(b, nir_imm_int(b, surface_index), block_index);
+ if (state->add_bounds_checks)
+ block_index = nir_umin(b, block_index, nir_imm_int(b, array_size - 1));
+
+ block_index = nir_iadd(b, nir_imm_int(b, surface_index), block_index);
+ }
assert(intrin->dest.is_ssa);
nir_ssa_def_rewrite_uses(&intrin->dest.ssa, nir_src_for_ssa(block_index));