summaryrefslogtreecommitdiffstats
path: root/src/compiler
diff options
context:
space:
mode:
authorEric Anholt <[email protected]>2019-12-20 09:02:07 -0800
committerEric Anholt <[email protected]>2020-01-21 10:06:23 -0800
commit10dc4ac4c5d6dbe3df1f2b75229804e7aa5f86f1 (patch)
treedb3ef64c6af9f850b7736f792254922e8246ee8d /src/compiler
parent2dc205515742d7da7ed3ca3b6005e1762a876254 (diff)
mesa: Make atomic lowering put atomics above SSBOs.
Gallium arbitrarily (it seems) put atomics below SSBOs, resulting in a bunch of extra index management, and surprising shader code when you would see your SSBOs up at index 16. It makes a lot more sense to see atomics converted to SSBOs appear as magic high numbers. Reviewed-by: Marek Olšák <[email protected]> Part-of: <https://gitlab.freedesktop.org/mesa/mesa/merge_requests/3240>
Diffstat (limited to 'src/compiler')
-rw-r--r--src/compiler/nir/nir_lower_atomics_to_ssbo.c69
1 files changed, 17 insertions, 52 deletions
diff --git a/src/compiler/nir/nir_lower_atomics_to_ssbo.c b/src/compiler/nir/nir_lower_atomics_to_ssbo.c
index 7ff0123b7bb..df6f959c4b5 100644
--- a/src/compiler/nir/nir_lower_atomics_to_ssbo.c
+++ b/src/compiler/nir/nir_lower_atomics_to_ssbo.c
@@ -32,17 +32,13 @@
#endif
/*
- * Remap atomic counters to SSBOs. Atomic counters get remapped to
- * SSBO binding points [0..ssbo_offset) and the original SSBOs are
- * remapped to [ssbo_offset..n) (mostly to align with what mesa/st
- * does.
+ * Remap atomic counters to SSBOs, starting from the passed in ssbo_offset.
*/
static bool
lower_instr(nir_intrinsic_instr *instr, unsigned ssbo_offset, nir_builder *b)
{
nir_intrinsic_op op;
- int idx_src;
b->cursor = nir_before_instr(&instr->instr);
@@ -54,32 +50,6 @@ lower_instr(nir_intrinsic_instr *instr, unsigned ssbo_offset, nir_builder *b)
instr->intrinsic = nir_intrinsic_memory_barrier_buffer;
return true;
- case nir_intrinsic_ssbo_atomic_add:
- case nir_intrinsic_ssbo_atomic_imin:
- case nir_intrinsic_ssbo_atomic_umin:
- case nir_intrinsic_ssbo_atomic_imax:
- case nir_intrinsic_ssbo_atomic_umax:
- case nir_intrinsic_ssbo_atomic_and:
- case nir_intrinsic_ssbo_atomic_or:
- case nir_intrinsic_ssbo_atomic_xor:
- case nir_intrinsic_ssbo_atomic_exchange:
- case nir_intrinsic_ssbo_atomic_comp_swap:
- case nir_intrinsic_ssbo_atomic_fadd:
- case nir_intrinsic_ssbo_atomic_fmin:
- case nir_intrinsic_ssbo_atomic_fmax:
- case nir_intrinsic_ssbo_atomic_fcomp_swap:
- case nir_intrinsic_store_ssbo:
- case nir_intrinsic_load_ssbo:
- case nir_intrinsic_get_buffer_size:
- /* easy case, keep same opcode and just remap SSBO buffer index: */
- op = instr->intrinsic;
- idx_src = (op == nir_intrinsic_store_ssbo) ? 1 : 0;
- nir_ssa_def *old_idx = nir_ssa_for_src(b, instr->src[idx_src], 1);
- nir_ssa_def *new_idx = nir_iadd(b, old_idx, nir_imm_int(b, ssbo_offset));
- nir_instr_rewrite_src(&instr->instr,
- &instr->src[idx_src],
- nir_src_for_ssa(new_idx));
- return true;
case nir_intrinsic_atomic_counter_inc:
case nir_intrinsic_atomic_counter_add:
case nir_intrinsic_atomic_counter_pre_dec:
@@ -115,7 +85,7 @@ lower_instr(nir_intrinsic_instr *instr, unsigned ssbo_offset, nir_builder *b)
return false;
}
- nir_ssa_def *buffer = nir_imm_int(b, nir_intrinsic_base(instr));
+ nir_ssa_def *buffer = nir_imm_int(b, ssbo_offset + nir_intrinsic_base(instr));
nir_ssa_def *temp = NULL;
nir_intrinsic_instr *new_instr =
nir_intrinsic_instr_create(ralloc_parent(instr), op);
@@ -231,7 +201,21 @@ nir_lower_atomics_to_ssbo(nir_shader *shader, unsigned ssbo_offset)
snprintf(name, sizeof(name), "counter%d", var->data.binding);
ssbo = nir_variable_create(shader, nir_var_mem_ssbo, type, name);
- ssbo->data.binding = var->data.binding;
+ ssbo->data.binding = ssbo_offset + var->data.binding;
+
+ /* We can't use num_abos, because it only represents the number of
+ * active atomic counters, and currently unlike SSBO's they aren't
+ * compacted so num_abos actually isn't a bound on the index passed
+ * to nir_intrinsic_atomic_counter_*. e.g. if we have a single atomic
+ * counter declared like:
+ *
+ * layout(binding=1) atomic_uint counter0;
+ *
+ * then when we lower accesses to it the atomic_counter_* intrinsics
+ * will have 1 as the index but num_abos will still be 1.
+ */
+ shader->info.num_ssbos = MAX2(shader->info.num_ssbos,
+ ssbo->data.binding + 1);
struct glsl_struct_field field = {
.type = type,
@@ -247,25 +231,6 @@ nir_lower_atomics_to_ssbo(nir_shader *shader, unsigned ssbo_offset)
}
}
- /* Make sure that shader->info.num_ssbos still reflects the maximum SSBO
- * index that can be used in the shader.
- */
- if (shader->info.num_ssbos > 0) {
- shader->info.num_ssbos += ssbo_offset;
- } else {
- /* We can't use num_abos, because it only represents the number of
- * active atomic counters, and currently unlike SSBO's they aren't
- * compacted so num_abos actually isn't a bound on the index passed
- * to nir_intrinsic_atomic_counter_*. e.g. if we have a single atomic
- * counter declared like:
- *
- * layout(binding=1) atomic_uint counter0;
- *
- * then when we lower accesses to it the atomic_counter_* intrinsics
- * will have 1 as the index but num_abos will still be 1.
- * */
- shader->info.num_ssbos = util_last_bit(replaced);
- }
shader->info.num_abos = 0;
}