diff options
author | Jason Ekstrand <[email protected]> | 2018-12-14 18:20:00 -0600 |
---|---|---|
committer | Jason Ekstrand <[email protected]> | 2019-01-08 00:38:30 +0000 |
commit | be039cb467635c6e2a70e29a586de7a5e403c929 (patch) | |
tree | afbcce8a2b3af467c62cd244f06f0079062632db | |
parent | 5c3cb9c3ce3f9fb05c22536c30a68a7b09300642 (diff) |
spirv: Choose atomic deref type with pointer_uses_ssa_offset
Previously, we hard-coded the rule about workgroup variables and the
builder lower_workgroup_access_to_offsets flag. Instead base it on the
handy helper we have for exactly this sort of thing.
Reviewed-by: Alejandro PiƱeiro <[email protected]>
Reviewed-by: Caio Marcelo de Oliveira Filho <[email protected]>
-rw-r--r-- | src/compiler/spirv/spirv_to_nir.c | 76 | ||||
-rw-r--r-- | src/compiler/spirv/vtn_private.h | 3 | ||||
-rw-r--r-- | src/compiler/spirv/vtn_variables.c | 2 |
3 files changed, 41 insertions, 40 deletions
diff --git a/src/compiler/spirv/spirv_to_nir.c b/src/compiler/spirv/spirv_to_nir.c index 2968c735f32..a36cd7206fc 100644 --- a/src/compiler/spirv/spirv_to_nir.c +++ b/src/compiler/spirv/spirv_to_nir.c @@ -2801,23 +2801,39 @@ vtn_handle_atomics(struct vtn_builder *b, SpvOp opcode, unreachable("Invalid SPIR-V atomic"); } - } else if (ptr->mode == vtn_variable_mode_workgroup && - !b->options->lower_workgroup_access_to_offsets) { - nir_deref_instr *deref = vtn_pointer_to_deref(b, ptr); - const struct glsl_type *deref_type = deref->type; - nir_intrinsic_op op = get_deref_nir_atomic_op(b, opcode); + } else if (vtn_pointer_uses_ssa_offset(b, ptr)) { + nir_ssa_def *offset, *index; + offset = vtn_pointer_to_offset(b, ptr, &index); + + nir_intrinsic_op op; + if (ptr->mode == vtn_variable_mode_ssbo) { + op = get_ssbo_nir_atomic_op(b, opcode); + } else { + vtn_assert(ptr->mode == vtn_variable_mode_workgroup && + b->options->lower_workgroup_access_to_offsets); + op = get_shared_nir_atomic_op(b, opcode); + } + atomic = nir_intrinsic_instr_create(b->nb.shader, op); - atomic->src[0] = nir_src_for_ssa(&deref->dest.ssa); + int src = 0; switch (opcode) { case SpvOpAtomicLoad: - atomic->num_components = glsl_get_vector_elements(deref_type); + atomic->num_components = glsl_get_vector_elements(ptr->type->type); + nir_intrinsic_set_align(atomic, 4, 0); + if (ptr->mode == vtn_variable_mode_ssbo) + atomic->src[src++] = nir_src_for_ssa(index); + atomic->src[src++] = nir_src_for_ssa(offset); break; case SpvOpAtomicStore: - atomic->num_components = glsl_get_vector_elements(deref_type); + atomic->num_components = glsl_get_vector_elements(ptr->type->type); nir_intrinsic_set_write_mask(atomic, (1 << atomic->num_components) - 1); - atomic->src[1] = nir_src_for_ssa(vtn_ssa_value(b, w[4])->def); + nir_intrinsic_set_align(atomic, 4, 0); + atomic->src[src++] = nir_src_for_ssa(vtn_ssa_value(b, w[4])->def); + if (ptr->mode == vtn_variable_mode_ssbo) + atomic->src[src++] = nir_src_for_ssa(index); + atomic->src[src++] = nir_src_for_ssa(offset); break; case SpvOpAtomicExchange: @@ -2834,46 +2850,31 @@ vtn_handle_atomics(struct vtn_builder *b, SpvOp opcode, case SpvOpAtomicAnd: case SpvOpAtomicOr: case SpvOpAtomicXor: - fill_common_atomic_sources(b, opcode, w, &atomic->src[1]); + if (ptr->mode == vtn_variable_mode_ssbo) + atomic->src[src++] = nir_src_for_ssa(index); + atomic->src[src++] = nir_src_for_ssa(offset); + fill_common_atomic_sources(b, opcode, w, &atomic->src[src]); break; default: vtn_fail("Invalid SPIR-V atomic"); - } } else { - nir_ssa_def *offset, *index; - offset = vtn_pointer_to_offset(b, ptr, &index); - - nir_intrinsic_op op; - if (ptr->mode == vtn_variable_mode_ssbo) { - op = get_ssbo_nir_atomic_op(b, opcode); - } else { - vtn_assert(ptr->mode == vtn_variable_mode_workgroup && - b->options->lower_workgroup_access_to_offsets); - op = get_shared_nir_atomic_op(b, opcode); - } - + nir_deref_instr *deref = vtn_pointer_to_deref(b, ptr); + const struct glsl_type *deref_type = deref->type; + nir_intrinsic_op op = get_deref_nir_atomic_op(b, opcode); atomic = nir_intrinsic_instr_create(b->nb.shader, op); + atomic->src[0] = nir_src_for_ssa(&deref->dest.ssa); - int src = 0; switch (opcode) { case SpvOpAtomicLoad: - atomic->num_components = glsl_get_vector_elements(ptr->type->type); - nir_intrinsic_set_align(atomic, 4, 0); - if (ptr->mode == vtn_variable_mode_ssbo) - atomic->src[src++] = nir_src_for_ssa(index); - atomic->src[src++] = nir_src_for_ssa(offset); + atomic->num_components = glsl_get_vector_elements(deref_type); break; case SpvOpAtomicStore: - atomic->num_components = glsl_get_vector_elements(ptr->type->type); + atomic->num_components = glsl_get_vector_elements(deref_type); nir_intrinsic_set_write_mask(atomic, (1 << atomic->num_components) - 1); - nir_intrinsic_set_align(atomic, 4, 0); - atomic->src[src++] = nir_src_for_ssa(vtn_ssa_value(b, w[4])->def); - if (ptr->mode == vtn_variable_mode_ssbo) - atomic->src[src++] = nir_src_for_ssa(index); - atomic->src[src++] = nir_src_for_ssa(offset); + atomic->src[1] = nir_src_for_ssa(vtn_ssa_value(b, w[4])->def); break; case SpvOpAtomicExchange: @@ -2890,10 +2891,7 @@ vtn_handle_atomics(struct vtn_builder *b, SpvOp opcode, case SpvOpAtomicAnd: case SpvOpAtomicOr: case SpvOpAtomicXor: - if (ptr->mode == vtn_variable_mode_ssbo) - atomic->src[src++] = nir_src_for_ssa(index); - atomic->src[src++] = nir_src_for_ssa(offset); - fill_common_atomic_sources(b, opcode, w, &atomic->src[src]); + fill_common_atomic_sources(b, opcode, w, &atomic->src[1]); break; default: diff --git a/src/compiler/spirv/vtn_private.h b/src/compiler/spirv/vtn_private.h index 342d4b74d71..8e3fa4af7f2 100644 --- a/src/compiler/spirv/vtn_private.h +++ b/src/compiler/spirv/vtn_private.h @@ -454,6 +454,9 @@ struct vtn_pointer { enum gl_access_qualifier access; }; +bool vtn_pointer_uses_ssa_offset(struct vtn_builder *b, + struct vtn_pointer *ptr); + struct vtn_variable { enum vtn_variable_mode mode; diff --git a/src/compiler/spirv/vtn_variables.c b/src/compiler/spirv/vtn_variables.c index 7e80263abf3..97d3be75ea3 100644 --- a/src/compiler/spirv/vtn_variables.c +++ b/src/compiler/spirv/vtn_variables.c @@ -44,7 +44,7 @@ vtn_access_chain_create(struct vtn_builder *b, unsigned length) return chain; } -static bool +bool vtn_pointer_uses_ssa_offset(struct vtn_builder *b, struct vtn_pointer *ptr) { |