diff options
author | Ian Romanick <[email protected]> | 2016-08-31 18:09:27 -0700 |
---|---|---|
committer | Ian Romanick <[email protected]> | 2016-10-04 16:53:31 -0700 |
commit | b7df52b106416c410d0e14dc6bba091831f9c786 (patch) | |
tree | 21f6c1e646af6b4ef690dc883259909f1f0dbde4 /src | |
parent | 5854de99b2735988f1fd7c08f367b4a8e58b270e (diff) |
glsl: Use the ir_intrinsic_* enums instead of the __intrinsic_* name strings
text data bss dec hex filename
6038043 283160 28608 6349811 60e3f3 lib64/i965_dri.so before
6036507 283160 28608 6348275 60ddf3 lib64/i965_dri.so after
v2: s/ir_intrinsic_atomic_sub/ir_intrinsic_atomic_counter_sub/. Noticed
by Ilia.
v3: Silence unhandled enum in switch warnings in st_glsl_to_tgsi.
Signed-off-by: Ian Romanick <[email protected]>
Acked-by: Ilia Mirkin <[email protected]>
Diffstat (limited to 'src')
-rw-r--r-- | src/compiler/glsl/glsl_to_nir.cpp | 172 | ||||
-rw-r--r-- | src/compiler/glsl/lower_shared_reference.cpp | 18 | ||||
-rw-r--r-- | src/compiler/glsl/lower_ubo_reference.cpp | 18 | ||||
-rw-r--r-- | src/mesa/state_tracker/st_glsl_to_tgsi.cpp | 287 |
4 files changed, 276 insertions, 219 deletions
diff --git a/src/compiler/glsl/glsl_to_nir.cpp b/src/compiler/glsl/glsl_to_nir.cpp index 01dcac4ee93..7ecbfe079a1 100644 --- a/src/compiler/glsl/glsl_to_nir.cpp +++ b/src/compiler/glsl/glsl_to_nir.cpp @@ -609,74 +609,75 @@ nir_visitor::visit(ir_call *ir) { if (ir->callee->is_intrinsic) { nir_intrinsic_op op; - if (strcmp(ir->callee_name(), "__intrinsic_atomic_read") == 0) { - assert(ir->callee->intrinsic_id == ir_intrinsic_atomic_counter_read); + + switch (ir->callee->intrinsic_id) { + case ir_intrinsic_atomic_counter_read: op = nir_intrinsic_atomic_counter_read_var; - } else if (strcmp(ir->callee_name(), "__intrinsic_atomic_increment") == 0) { - assert(ir->callee->intrinsic_id == ir_intrinsic_atomic_counter_increment); + break; + case ir_intrinsic_atomic_counter_increment: op = nir_intrinsic_atomic_counter_inc_var; - } else if (strcmp(ir->callee_name(), "__intrinsic_atomic_predecrement") == 0) { - assert(ir->callee->intrinsic_id == ir_intrinsic_atomic_counter_predecrement); + break; + case ir_intrinsic_atomic_counter_predecrement: op = nir_intrinsic_atomic_counter_dec_var; - } else if (strcmp(ir->callee_name(), "__intrinsic_image_load") == 0) { - assert(ir->callee->intrinsic_id == ir_intrinsic_image_load); + break; + case ir_intrinsic_image_load: op = nir_intrinsic_image_load; - } else if (strcmp(ir->callee_name(), "__intrinsic_image_store") == 0) { - assert(ir->callee->intrinsic_id == ir_intrinsic_image_store); + break; + case ir_intrinsic_image_store: op = nir_intrinsic_image_store; - } else if (strcmp(ir->callee_name(), "__intrinsic_image_atomic_add") == 0) { - assert(ir->callee->intrinsic_id == ir_intrinsic_image_atomic_add); + break; + case ir_intrinsic_image_atomic_add: op = nir_intrinsic_image_atomic_add; - } else if (strcmp(ir->callee_name(), "__intrinsic_image_atomic_min") == 0) { - assert(ir->callee->intrinsic_id == ir_intrinsic_image_atomic_min); + break; + case ir_intrinsic_image_atomic_min: op = nir_intrinsic_image_atomic_min; - } else if (strcmp(ir->callee_name(), "__intrinsic_image_atomic_max") == 0) { - assert(ir->callee->intrinsic_id == ir_intrinsic_image_atomic_max); + break; + case ir_intrinsic_image_atomic_max: op = nir_intrinsic_image_atomic_max; - } else if (strcmp(ir->callee_name(), "__intrinsic_image_atomic_and") == 0) { - assert(ir->callee->intrinsic_id == ir_intrinsic_image_atomic_and); + break; + case ir_intrinsic_image_atomic_and: op = nir_intrinsic_image_atomic_and; - } else if (strcmp(ir->callee_name(), "__intrinsic_image_atomic_or") == 0) { - assert(ir->callee->intrinsic_id == ir_intrinsic_image_atomic_or); + break; + case ir_intrinsic_image_atomic_or: op = nir_intrinsic_image_atomic_or; - } else if (strcmp(ir->callee_name(), "__intrinsic_image_atomic_xor") == 0) { - assert(ir->callee->intrinsic_id == ir_intrinsic_image_atomic_xor); + break; + case ir_intrinsic_image_atomic_xor: op = nir_intrinsic_image_atomic_xor; - } else if (strcmp(ir->callee_name(), "__intrinsic_image_atomic_exchange") == 0) { - assert(ir->callee->intrinsic_id == ir_intrinsic_image_atomic_exchange); + break; + case ir_intrinsic_image_atomic_exchange: op = nir_intrinsic_image_atomic_exchange; - } else if (strcmp(ir->callee_name(), "__intrinsic_image_atomic_comp_swap") == 0) { - assert(ir->callee->intrinsic_id == ir_intrinsic_image_atomic_comp_swap); + break; + case ir_intrinsic_image_atomic_comp_swap: op = nir_intrinsic_image_atomic_comp_swap; - } else if (strcmp(ir->callee_name(), "__intrinsic_memory_barrier") == 0) { - assert(ir->callee->intrinsic_id == ir_intrinsic_memory_barrier); + break; + case ir_intrinsic_memory_barrier: op = nir_intrinsic_memory_barrier; - } else if (strcmp(ir->callee_name(), "__intrinsic_image_size") == 0) { - assert(ir->callee->intrinsic_id == ir_intrinsic_image_size); + break; + case ir_intrinsic_image_size: op = nir_intrinsic_image_size; - } else if (strcmp(ir->callee_name(), "__intrinsic_image_samples") == 0) { - assert(ir->callee->intrinsic_id == ir_intrinsic_image_samples); + break; + case ir_intrinsic_image_samples: op = nir_intrinsic_image_samples; - } else if (strcmp(ir->callee_name(), "__intrinsic_store_ssbo") == 0) { - assert(ir->callee->intrinsic_id == ir_intrinsic_ssbo_store); + break; + case ir_intrinsic_ssbo_store: op = nir_intrinsic_store_ssbo; - } else if (strcmp(ir->callee_name(), "__intrinsic_load_ssbo") == 0) { - assert(ir->callee->intrinsic_id == ir_intrinsic_ssbo_load); + break; + case ir_intrinsic_ssbo_load: op = nir_intrinsic_load_ssbo; - } else if (strcmp(ir->callee_name(), "__intrinsic_atomic_add_ssbo") == 0) { - assert(ir->callee->intrinsic_id == ir_intrinsic_ssbo_atomic_add); + break; + case ir_intrinsic_ssbo_atomic_add: op = nir_intrinsic_ssbo_atomic_add; - } else if (strcmp(ir->callee_name(), "__intrinsic_atomic_and_ssbo") == 0) { - assert(ir->callee->intrinsic_id == ir_intrinsic_ssbo_atomic_and); + break; + case ir_intrinsic_ssbo_atomic_and: op = nir_intrinsic_ssbo_atomic_and; - } else if (strcmp(ir->callee_name(), "__intrinsic_atomic_or_ssbo") == 0) { - assert(ir->callee->intrinsic_id == ir_intrinsic_ssbo_atomic_or); + break; + case ir_intrinsic_ssbo_atomic_or: op = nir_intrinsic_ssbo_atomic_or; - } else if (strcmp(ir->callee_name(), "__intrinsic_atomic_xor_ssbo") == 0) { - assert(ir->callee->intrinsic_id == ir_intrinsic_ssbo_atomic_xor); + break; + case ir_intrinsic_ssbo_atomic_xor: op = nir_intrinsic_ssbo_atomic_xor; - } else if (strcmp(ir->callee_name(), "__intrinsic_atomic_min_ssbo") == 0) { - assert(ir->callee->intrinsic_id == ir_intrinsic_ssbo_atomic_min); + break; + case ir_intrinsic_ssbo_atomic_min: assert(ir->return_deref); if (ir->return_deref->type == glsl_type::int_type) op = nir_intrinsic_ssbo_atomic_imin; @@ -684,8 +685,8 @@ nir_visitor::visit(ir_call *ir) op = nir_intrinsic_ssbo_atomic_umin; else unreachable("Invalid type"); - } else if (strcmp(ir->callee_name(), "__intrinsic_atomic_max_ssbo") == 0) { - assert(ir->callee->intrinsic_id == ir_intrinsic_ssbo_atomic_max); + break; + case ir_intrinsic_ssbo_atomic_max: assert(ir->return_deref); if (ir->return_deref->type == glsl_type::int_type) op = nir_intrinsic_ssbo_atomic_imax; @@ -693,50 +694,50 @@ nir_visitor::visit(ir_call *ir) op = nir_intrinsic_ssbo_atomic_umax; else unreachable("Invalid type"); - } else if (strcmp(ir->callee_name(), "__intrinsic_atomic_exchange_ssbo") == 0) { - assert(ir->callee->intrinsic_id == ir_intrinsic_ssbo_atomic_exchange); + break; + case ir_intrinsic_ssbo_atomic_exchange: op = nir_intrinsic_ssbo_atomic_exchange; - } else if (strcmp(ir->callee_name(), "__intrinsic_atomic_comp_swap_ssbo") == 0) { - assert(ir->callee->intrinsic_id == ir_intrinsic_ssbo_atomic_comp_swap); + break; + case ir_intrinsic_ssbo_atomic_comp_swap: op = nir_intrinsic_ssbo_atomic_comp_swap; - } else if (strcmp(ir->callee_name(), "__intrinsic_shader_clock") == 0) { - assert(ir->callee->intrinsic_id == ir_intrinsic_shader_clock); + break; + case ir_intrinsic_shader_clock: op = nir_intrinsic_shader_clock; - } else if (strcmp(ir->callee_name(), "__intrinsic_group_memory_barrier") == 0) { - assert(ir->callee->intrinsic_id == ir_intrinsic_group_memory_barrier); + break; + case ir_intrinsic_group_memory_barrier: op = nir_intrinsic_group_memory_barrier; - } else if (strcmp(ir->callee_name(), "__intrinsic_memory_barrier_atomic_counter") == 0) { - assert(ir->callee->intrinsic_id == ir_intrinsic_memory_barrier_atomic_counter); + break; + case ir_intrinsic_memory_barrier_atomic_counter: op = nir_intrinsic_memory_barrier_atomic_counter; - } else if (strcmp(ir->callee_name(), "__intrinsic_memory_barrier_buffer") == 0) { - assert(ir->callee->intrinsic_id == ir_intrinsic_memory_barrier_buffer); + break; + case ir_intrinsic_memory_barrier_buffer: op = nir_intrinsic_memory_barrier_buffer; - } else if (strcmp(ir->callee_name(), "__intrinsic_memory_barrier_image") == 0) { - assert(ir->callee->intrinsic_id == ir_intrinsic_memory_barrier_image); + break; + case ir_intrinsic_memory_barrier_image: op = nir_intrinsic_memory_barrier_image; - } else if (strcmp(ir->callee_name(), "__intrinsic_memory_barrier_shared") == 0) { - assert(ir->callee->intrinsic_id == ir_intrinsic_memory_barrier_shared); + break; + case ir_intrinsic_memory_barrier_shared: op = nir_intrinsic_memory_barrier_shared; - } else if (strcmp(ir->callee_name(), "__intrinsic_load_shared") == 0) { - assert(ir->callee->intrinsic_id == ir_intrinsic_shared_load); + break; + case ir_intrinsic_shared_load: op = nir_intrinsic_load_shared; - } else if (strcmp(ir->callee_name(), "__intrinsic_store_shared") == 0) { - assert(ir->callee->intrinsic_id == ir_intrinsic_shared_store); + break; + case ir_intrinsic_shared_store: op = nir_intrinsic_store_shared; - } else if (strcmp(ir->callee_name(), "__intrinsic_atomic_add_shared") == 0) { - assert(ir->callee->intrinsic_id == ir_intrinsic_shared_atomic_add); + break; + case ir_intrinsic_shared_atomic_add: op = nir_intrinsic_shared_atomic_add; - } else if (strcmp(ir->callee_name(), "__intrinsic_atomic_and_shared") == 0) { - assert(ir->callee->intrinsic_id == ir_intrinsic_shared_atomic_and); + break; + case ir_intrinsic_shared_atomic_and: op = nir_intrinsic_shared_atomic_and; - } else if (strcmp(ir->callee_name(), "__intrinsic_atomic_or_shared") == 0) { - assert(ir->callee->intrinsic_id == ir_intrinsic_shared_atomic_or); + break; + case ir_intrinsic_shared_atomic_or: op = nir_intrinsic_shared_atomic_or; - } else if (strcmp(ir->callee_name(), "__intrinsic_atomic_xor_shared") == 0) { - assert(ir->callee->intrinsic_id == ir_intrinsic_shared_atomic_xor); + break; + case ir_intrinsic_shared_atomic_xor: op = nir_intrinsic_shared_atomic_xor; - } else if (strcmp(ir->callee_name(), "__intrinsic_atomic_min_shared") == 0) { - assert(ir->callee->intrinsic_id == ir_intrinsic_shared_atomic_min); + break; + case ir_intrinsic_shared_atomic_min: assert(ir->return_deref); if (ir->return_deref->type == glsl_type::int_type) op = nir_intrinsic_shared_atomic_imin; @@ -744,8 +745,8 @@ nir_visitor::visit(ir_call *ir) op = nir_intrinsic_shared_atomic_umin; else unreachable("Invalid type"); - } else if (strcmp(ir->callee_name(), "__intrinsic_atomic_max_shared") == 0) { - assert(ir->callee->intrinsic_id == ir_intrinsic_shared_atomic_max); + break; + case ir_intrinsic_shared_atomic_max: assert(ir->return_deref); if (ir->return_deref->type == glsl_type::int_type) op = nir_intrinsic_shared_atomic_imax; @@ -753,13 +754,14 @@ nir_visitor::visit(ir_call *ir) op = nir_intrinsic_shared_atomic_umax; else unreachable("Invalid type"); - } else if (strcmp(ir->callee_name(), "__intrinsic_atomic_exchange_shared") == 0) { - assert(ir->callee->intrinsic_id == ir_intrinsic_shared_atomic_exchange); + break; + case ir_intrinsic_shared_atomic_exchange: op = nir_intrinsic_shared_atomic_exchange; - } else if (strcmp(ir->callee_name(), "__intrinsic_atomic_comp_swap_shared") == 0) { - assert(ir->callee->intrinsic_id == ir_intrinsic_shared_atomic_comp_swap); + break; + case ir_intrinsic_shared_atomic_comp_swap: op = nir_intrinsic_shared_atomic_comp_swap; - } else { + break; + default: unreachable("not reached"); } diff --git a/src/compiler/glsl/lower_shared_reference.cpp b/src/compiler/glsl/lower_shared_reference.cpp index 49ee3774c68..a7e738e8fc4 100644 --- a/src/compiler/glsl/lower_shared_reference.cpp +++ b/src/compiler/glsl/lower_shared_reference.cpp @@ -450,15 +450,15 @@ lower_shared_reference_visitor::check_for_shared_atomic_intrinsic(ir_call *ir) if (!var || var->data.mode != ir_var_shader_shared) return ir; - const char *callee = ir->callee_name(); - if (!strcmp("__intrinsic_atomic_add", callee) || - !strcmp("__intrinsic_atomic_min", callee) || - !strcmp("__intrinsic_atomic_max", callee) || - !strcmp("__intrinsic_atomic_and", callee) || - !strcmp("__intrinsic_atomic_or", callee) || - !strcmp("__intrinsic_atomic_xor", callee) || - !strcmp("__intrinsic_atomic_exchange", callee) || - !strcmp("__intrinsic_atomic_comp_swap", callee)) { + const enum ir_intrinsic_id id = ir->callee->intrinsic_id; + if (id == ir_intrinsic_generic_atomic_add || + id == ir_intrinsic_generic_atomic_min || + id == ir_intrinsic_generic_atomic_max || + id == ir_intrinsic_generic_atomic_and || + id == ir_intrinsic_generic_atomic_or || + id == ir_intrinsic_generic_atomic_xor || + id == ir_intrinsic_generic_atomic_exchange || + id == ir_intrinsic_generic_atomic_comp_swap) { return lower_shared_atomic_intrinsic(ir); } diff --git a/src/compiler/glsl/lower_ubo_reference.cpp b/src/compiler/glsl/lower_ubo_reference.cpp index 0e6a02d868c..276e77fbc82 100644 --- a/src/compiler/glsl/lower_ubo_reference.cpp +++ b/src/compiler/glsl/lower_ubo_reference.cpp @@ -1063,15 +1063,15 @@ lower_ubo_reference_visitor::check_for_ssbo_atomic_intrinsic(ir_call *ir) if (!var || !var->is_in_shader_storage_block()) return ir; - const char *callee = ir->callee_name(); - if (!strcmp("__intrinsic_atomic_add", callee) || - !strcmp("__intrinsic_atomic_min", callee) || - !strcmp("__intrinsic_atomic_max", callee) || - !strcmp("__intrinsic_atomic_and", callee) || - !strcmp("__intrinsic_atomic_or", callee) || - !strcmp("__intrinsic_atomic_xor", callee) || - !strcmp("__intrinsic_atomic_exchange", callee) || - !strcmp("__intrinsic_atomic_comp_swap", callee)) { + const enum ir_intrinsic_id id = ir->callee->intrinsic_id; + if (id == ir_intrinsic_generic_atomic_add || + id == ir_intrinsic_generic_atomic_min || + id == ir_intrinsic_generic_atomic_max || + id == ir_intrinsic_generic_atomic_and || + id == ir_intrinsic_generic_atomic_or || + id == ir_intrinsic_generic_atomic_xor || + id == ir_intrinsic_generic_atomic_exchange || + id == ir_intrinsic_generic_atomic_comp_swap) { return lower_ssbo_atomic_intrinsic(ir); } diff --git a/src/mesa/state_tracker/st_glsl_to_tgsi.cpp b/src/mesa/state_tracker/st_glsl_to_tgsi.cpp index 81ea233a8ec..749008b0fe1 100644 --- a/src/mesa/state_tracker/st_glsl_to_tgsi.cpp +++ b/src/mesa/state_tracker/st_glsl_to_tgsi.cpp @@ -3144,7 +3144,6 @@ glsl_to_tgsi_visitor::get_function_signature(ir_function_signature *sig) void glsl_to_tgsi_visitor::visit_atomic_counter_intrinsic(ir_call *ir) { - const char *callee = ir->callee->function_name(); exec_node *param = ir->actual_parameters.get_head(); ir_dereference *deref = static_cast<ir_dereference *>(param); ir_variable *location = deref->variable_referenced(); @@ -3173,12 +3172,12 @@ glsl_to_tgsi_visitor::visit_atomic_counter_intrinsic(ir_call *ir) glsl_to_tgsi_instruction *inst; - if (!strcmp("__intrinsic_atomic_read", callee)) { + if (ir->callee->intrinsic_id == ir_intrinsic_atomic_counter_read) { inst = emit_asm(ir, TGSI_OPCODE_LOAD, dst, offset); - } else if (!strcmp("__intrinsic_atomic_increment", callee)) { + } else if (ir->callee->intrinsic_id == ir_intrinsic_atomic_counter_increment) { inst = emit_asm(ir, TGSI_OPCODE_ATOMUADD, dst, offset, st_src_reg_for_int(1)); - } else if (!strcmp("__intrinsic_atomic_predecrement", callee)) { + } else if (ir->callee->intrinsic_id == ir_intrinsic_atomic_counter_predecrement) { inst = emit_asm(ir, TGSI_OPCODE_ATOMUADD, dst, offset, st_src_reg_for_int(-1)); emit_asm(ir, TGSI_OPCODE_ADD, dst, this->result, st_src_reg_for_int(-1)); @@ -3189,34 +3188,46 @@ glsl_to_tgsi_visitor::visit_atomic_counter_intrinsic(ir_call *ir) st_src_reg data = this->result, data2 = undef_src; unsigned opcode; - if (!strcmp("__intrinsic_atomic_add", callee)) + switch (ir->callee->intrinsic_id) { + case ir_intrinsic_atomic_counter_add: opcode = TGSI_OPCODE_ATOMUADD; - else if (!strcmp("__intrinsic_atomic_min", callee)) + break; + case ir_intrinsic_atomic_counter_min: opcode = TGSI_OPCODE_ATOMIMIN; - else if (!strcmp("__intrinsic_atomic_max", callee)) + break; + case ir_intrinsic_atomic_counter_max: opcode = TGSI_OPCODE_ATOMIMAX; - else if (!strcmp("__intrinsic_atomic_and", callee)) + break; + case ir_intrinsic_atomic_counter_and: opcode = TGSI_OPCODE_ATOMAND; - else if (!strcmp("__intrinsic_atomic_or", callee)) + break; + case ir_intrinsic_atomic_counter_or: opcode = TGSI_OPCODE_ATOMOR; - else if (!strcmp("__intrinsic_atomic_xor", callee)) + break; + case ir_intrinsic_atomic_counter_xor: opcode = TGSI_OPCODE_ATOMXOR; - else if (!strcmp("__intrinsic_atomic_exchange", callee)) + break; + case ir_intrinsic_atomic_counter_exchange: opcode = TGSI_OPCODE_ATOMXCHG; - else if (!strcmp("__intrinsic_atomic_comp_swap", callee)) { + break; + case ir_intrinsic_atomic_counter_comp_swap: { opcode = TGSI_OPCODE_ATOMCAS; param = param->get_next(); val = ((ir_instruction *)param)->as_rvalue(); val->accept(this); data2 = this->result; - } else if (!strcmp("__intrinsic_atomic_sub", callee)) { + break; + } + case ir_intrinsic_atomic_counter_sub: { opcode = TGSI_OPCODE_ATOMUADD; st_src_reg res = get_temp(glsl_type::uvec4_type); st_dst_reg dstres = st_dst_reg(res); dstres.writemask = dst.writemask; emit_asm(ir, TGSI_OPCODE_INEG, dstres, data); data = res; - } else { + break; + } + default: assert(!"Unexpected intrinsic"); return; } @@ -3230,7 +3241,6 @@ glsl_to_tgsi_visitor::visit_atomic_counter_intrinsic(ir_call *ir) void glsl_to_tgsi_visitor::visit_ssbo_intrinsic(ir_call *ir) { - const char *callee = ir->callee->function_name(); exec_node *param = ir->actual_parameters.get_head(); ir_rvalue *block = ((ir_instruction *)param)->as_rvalue(); @@ -3266,11 +3276,11 @@ glsl_to_tgsi_visitor::visit_ssbo_intrinsic(ir_call *ir) glsl_to_tgsi_instruction *inst; - if (!strcmp("__intrinsic_load_ssbo", callee)) { + if (ir->callee->intrinsic_id == ir_intrinsic_ssbo_load) { inst = emit_asm(ir, TGSI_OPCODE_LOAD, dst, off); if (dst.type == GLSL_TYPE_BOOL) emit_asm(ir, TGSI_OPCODE_USNE, dst, st_src_reg(dst), st_src_reg_for_int(0)); - } else if (!strcmp("__intrinsic_store_ssbo", callee)) { + } else if (ir->callee->intrinsic_id == ir_intrinsic_ssbo_store) { param = param->get_next(); ir_rvalue *val = ((ir_instruction *)param)->as_rvalue(); val->accept(this); @@ -3289,27 +3299,36 @@ glsl_to_tgsi_visitor::visit_ssbo_intrinsic(ir_call *ir) st_src_reg data = this->result, data2 = undef_src; unsigned opcode; - if (!strcmp("__intrinsic_atomic_add_ssbo", callee)) + switch (ir->callee->intrinsic_id) { + case ir_intrinsic_ssbo_atomic_add: opcode = TGSI_OPCODE_ATOMUADD; - else if (!strcmp("__intrinsic_atomic_min_ssbo", callee)) + break; + case ir_intrinsic_ssbo_atomic_min: opcode = TGSI_OPCODE_ATOMIMIN; - else if (!strcmp("__intrinsic_atomic_max_ssbo", callee)) + break; + case ir_intrinsic_ssbo_atomic_max: opcode = TGSI_OPCODE_ATOMIMAX; - else if (!strcmp("__intrinsic_atomic_and_ssbo", callee)) + break; + case ir_intrinsic_ssbo_atomic_and: opcode = TGSI_OPCODE_ATOMAND; - else if (!strcmp("__intrinsic_atomic_or_ssbo", callee)) + break; + case ir_intrinsic_ssbo_atomic_or: opcode = TGSI_OPCODE_ATOMOR; - else if (!strcmp("__intrinsic_atomic_xor_ssbo", callee)) + break; + case ir_intrinsic_ssbo_atomic_xor: opcode = TGSI_OPCODE_ATOMXOR; - else if (!strcmp("__intrinsic_atomic_exchange_ssbo", callee)) + break; + case ir_intrinsic_ssbo_atomic_exchange: opcode = TGSI_OPCODE_ATOMXCHG; - else if (!strcmp("__intrinsic_atomic_comp_swap_ssbo", callee)) { + break; + case ir_intrinsic_ssbo_atomic_comp_swap: opcode = TGSI_OPCODE_ATOMCAS; param = param->get_next(); val = ((ir_instruction *)param)->as_rvalue(); val->accept(this); data2 = this->result; - } else { + break; + default: assert(!"Unexpected intrinsic"); return; } @@ -3341,41 +3360,46 @@ glsl_to_tgsi_visitor::visit_ssbo_intrinsic(ir_call *ir) void glsl_to_tgsi_visitor::visit_membar_intrinsic(ir_call *ir) { - const char *callee = ir->callee->function_name(); - - if (!strcmp("__intrinsic_memory_barrier", callee)) + switch (ir->callee->intrinsic_id) { + case ir_intrinsic_memory_barrier: emit_asm(ir, TGSI_OPCODE_MEMBAR, undef_dst, st_src_reg_for_int(TGSI_MEMBAR_SHADER_BUFFER | TGSI_MEMBAR_ATOMIC_BUFFER | TGSI_MEMBAR_SHADER_IMAGE | TGSI_MEMBAR_SHARED)); - else if (!strcmp("__intrinsic_memory_barrier_atomic_counter", callee)) + break; + case ir_intrinsic_memory_barrier_atomic_counter: emit_asm(ir, TGSI_OPCODE_MEMBAR, undef_dst, st_src_reg_for_int(TGSI_MEMBAR_ATOMIC_BUFFER)); - else if (!strcmp("__intrinsic_memory_barrier_buffer", callee)) + break; + case ir_intrinsic_memory_barrier_buffer: emit_asm(ir, TGSI_OPCODE_MEMBAR, undef_dst, st_src_reg_for_int(TGSI_MEMBAR_SHADER_BUFFER)); - else if (!strcmp("__intrinsic_memory_barrier_image", callee)) + break; + case ir_intrinsic_memory_barrier_image: emit_asm(ir, TGSI_OPCODE_MEMBAR, undef_dst, st_src_reg_for_int(TGSI_MEMBAR_SHADER_IMAGE)); - else if (!strcmp("__intrinsic_memory_barrier_shared", callee)) + break; + case ir_intrinsic_memory_barrier_shared: emit_asm(ir, TGSI_OPCODE_MEMBAR, undef_dst, st_src_reg_for_int(TGSI_MEMBAR_SHARED)); - else if (!strcmp("__intrinsic_group_memory_barrier", callee)) + break; + case ir_intrinsic_group_memory_barrier: emit_asm(ir, TGSI_OPCODE_MEMBAR, undef_dst, st_src_reg_for_int(TGSI_MEMBAR_SHADER_BUFFER | TGSI_MEMBAR_ATOMIC_BUFFER | TGSI_MEMBAR_SHADER_IMAGE | TGSI_MEMBAR_SHARED | TGSI_MEMBAR_THREAD_GROUP)); - else + break; + default: assert(!"Unexpected memory barrier intrinsic"); + } } void glsl_to_tgsi_visitor::visit_shared_intrinsic(ir_call *ir) { - const char *callee = ir->callee->function_name(); exec_node *param = ir->actual_parameters.get_head(); ir_rvalue *offset = ((ir_instruction *)param)->as_rvalue(); @@ -3395,10 +3419,10 @@ glsl_to_tgsi_visitor::visit_shared_intrinsic(ir_call *ir) glsl_to_tgsi_instruction *inst; - if (!strcmp("__intrinsic_load_shared", callee)) { + if (ir->callee->intrinsic_id == ir_intrinsic_shared_load) { inst = emit_asm(ir, TGSI_OPCODE_LOAD, dst, off); inst->buffer = buffer; - } else if (!strcmp("__intrinsic_store_shared", callee)) { + } else if (ir->callee->intrinsic_id == ir_intrinsic_shared_store) { param = param->get_next(); ir_rvalue *val = ((ir_instruction *)param)->as_rvalue(); val->accept(this); @@ -3418,27 +3442,36 @@ glsl_to_tgsi_visitor::visit_shared_intrinsic(ir_call *ir) st_src_reg data = this->result, data2 = undef_src; unsigned opcode; - if (!strcmp("__intrinsic_atomic_add_shared", callee)) + switch (ir->callee->intrinsic_id) { + case ir_intrinsic_shared_atomic_add: opcode = TGSI_OPCODE_ATOMUADD; - else if (!strcmp("__intrinsic_atomic_min_shared", callee)) + break; + case ir_intrinsic_shared_atomic_min: opcode = TGSI_OPCODE_ATOMIMIN; - else if (!strcmp("__intrinsic_atomic_max_shared", callee)) + break; + case ir_intrinsic_shared_atomic_max: opcode = TGSI_OPCODE_ATOMIMAX; - else if (!strcmp("__intrinsic_atomic_and_shared", callee)) + break; + case ir_intrinsic_shared_atomic_and: opcode = TGSI_OPCODE_ATOMAND; - else if (!strcmp("__intrinsic_atomic_or_shared", callee)) + break; + case ir_intrinsic_shared_atomic_or: opcode = TGSI_OPCODE_ATOMOR; - else if (!strcmp("__intrinsic_atomic_xor_shared", callee)) + break; + case ir_intrinsic_shared_atomic_xor: opcode = TGSI_OPCODE_ATOMXOR; - else if (!strcmp("__intrinsic_atomic_exchange_shared", callee)) + break; + case ir_intrinsic_shared_atomic_exchange: opcode = TGSI_OPCODE_ATOMXCHG; - else if (!strcmp("__intrinsic_atomic_comp_swap_shared", callee)) { + break; + case ir_intrinsic_shared_atomic_comp_swap: opcode = TGSI_OPCODE_ATOMCAS; param = param->get_next(); val = ((ir_instruction *)param)->as_rvalue(); val->accept(this); data2 = this->result; - } else { + break; + default: assert(!"Unexpected intrinsic"); return; } @@ -3451,7 +3484,6 @@ glsl_to_tgsi_visitor::visit_shared_intrinsic(ir_call *ir) void glsl_to_tgsi_visitor::visit_image_intrinsic(ir_call *ir) { - const char *callee = ir->callee->function_name(); exec_node *param = ir->actual_parameters.get_head(); ir_dereference *img = (ir_dereference *)param; @@ -3479,10 +3511,10 @@ glsl_to_tgsi_visitor::visit_image_intrinsic(ir_call *ir) glsl_to_tgsi_instruction *inst; - if (!strcmp("__intrinsic_image_size", callee)) { + if (ir->callee->intrinsic_id == ir_intrinsic_image_size) { dst.writemask = WRITEMASK_XYZ; inst = emit_asm(ir, TGSI_OPCODE_RESQ, dst); - } else if (!strcmp("__intrinsic_image_samples", callee)) { + } else if (ir->callee->intrinsic_id == ir_intrinsic_image_samples) { st_src_reg res = get_temp(glsl_type::ivec4_type); st_dst_reg dstres = st_dst_reg(res); dstres.writemask = WRITEMASK_W; @@ -3534,27 +3566,38 @@ glsl_to_tgsi_visitor::visit_image_intrinsic(ir_call *ir) assert(param->is_tail_sentinel()); unsigned opcode; - if (!strcmp("__intrinsic_image_load", callee)) + switch (ir->callee->intrinsic_id) { + case ir_intrinsic_image_load: opcode = TGSI_OPCODE_LOAD; - else if (!strcmp("__intrinsic_image_store", callee)) + break; + case ir_intrinsic_image_store: opcode = TGSI_OPCODE_STORE; - else if (!strcmp("__intrinsic_image_atomic_add", callee)) + break; + case ir_intrinsic_image_atomic_add: opcode = TGSI_OPCODE_ATOMUADD; - else if (!strcmp("__intrinsic_image_atomic_min", callee)) + break; + case ir_intrinsic_image_atomic_min: opcode = TGSI_OPCODE_ATOMIMIN; - else if (!strcmp("__intrinsic_image_atomic_max", callee)) + break; + case ir_intrinsic_image_atomic_max: opcode = TGSI_OPCODE_ATOMIMAX; - else if (!strcmp("__intrinsic_image_atomic_and", callee)) + break; + case ir_intrinsic_image_atomic_and: opcode = TGSI_OPCODE_ATOMAND; - else if (!strcmp("__intrinsic_image_atomic_or", callee)) + break; + case ir_intrinsic_image_atomic_or: opcode = TGSI_OPCODE_ATOMOR; - else if (!strcmp("__intrinsic_image_atomic_xor", callee)) + break; + case ir_intrinsic_image_atomic_xor: opcode = TGSI_OPCODE_ATOMXOR; - else if (!strcmp("__intrinsic_image_atomic_exchange", callee)) + break; + case ir_intrinsic_image_atomic_exchange: opcode = TGSI_OPCODE_ATOMXCHG; - else if (!strcmp("__intrinsic_image_atomic_comp_swap", callee)) + break; + case ir_intrinsic_image_atomic_comp_swap: opcode = TGSI_OPCODE_ATOMCAS; - else { + break; + default: assert(!"Unexpected intrinsic"); return; } @@ -3617,79 +3660,91 @@ glsl_to_tgsi_visitor::visit(ir_call *ir) { glsl_to_tgsi_instruction *call_inst; ir_function_signature *sig = ir->callee; - const char *callee = sig->function_name(); function_entry *entry; int i; /* Filter out intrinsics */ - if (!strcmp("__intrinsic_atomic_read", callee) || - !strcmp("__intrinsic_atomic_increment", callee) || - !strcmp("__intrinsic_atomic_predecrement", callee) || - !strcmp("__intrinsic_atomic_add", callee) || - !strcmp("__intrinsic_atomic_sub", callee) || - !strcmp("__intrinsic_atomic_min", callee) || - !strcmp("__intrinsic_atomic_max", callee) || - !strcmp("__intrinsic_atomic_and", callee) || - !strcmp("__intrinsic_atomic_or", callee) || - !strcmp("__intrinsic_atomic_xor", callee) || - !strcmp("__intrinsic_atomic_exchange", callee) || - !strcmp("__intrinsic_atomic_comp_swap", callee)) { + switch (sig->intrinsic_id) { + case ir_intrinsic_invalid: + break; + + case ir_intrinsic_atomic_counter_read: + case ir_intrinsic_atomic_counter_increment: + case ir_intrinsic_atomic_counter_predecrement: + case ir_intrinsic_atomic_counter_add: + case ir_intrinsic_atomic_counter_sub: + case ir_intrinsic_atomic_counter_min: + case ir_intrinsic_atomic_counter_max: + case ir_intrinsic_atomic_counter_and: + case ir_intrinsic_atomic_counter_or: + case ir_intrinsic_atomic_counter_xor: + case ir_intrinsic_atomic_counter_exchange: + case ir_intrinsic_atomic_counter_comp_swap: visit_atomic_counter_intrinsic(ir); return; - } - if (!strcmp("__intrinsic_load_ssbo", callee) || - !strcmp("__intrinsic_store_ssbo", callee) || - !strcmp("__intrinsic_atomic_add_ssbo", callee) || - !strcmp("__intrinsic_atomic_min_ssbo", callee) || - !strcmp("__intrinsic_atomic_max_ssbo", callee) || - !strcmp("__intrinsic_atomic_and_ssbo", callee) || - !strcmp("__intrinsic_atomic_or_ssbo", callee) || - !strcmp("__intrinsic_atomic_xor_ssbo", callee) || - !strcmp("__intrinsic_atomic_exchange_ssbo", callee) || - !strcmp("__intrinsic_atomic_comp_swap_ssbo", callee)) { + case ir_intrinsic_ssbo_load: + case ir_intrinsic_ssbo_store: + case ir_intrinsic_ssbo_atomic_add: + case ir_intrinsic_ssbo_atomic_min: + case ir_intrinsic_ssbo_atomic_max: + case ir_intrinsic_ssbo_atomic_and: + case ir_intrinsic_ssbo_atomic_or: + case ir_intrinsic_ssbo_atomic_xor: + case ir_intrinsic_ssbo_atomic_exchange: + case ir_intrinsic_ssbo_atomic_comp_swap: visit_ssbo_intrinsic(ir); return; - } - if (!strcmp("__intrinsic_memory_barrier", callee) || - !strcmp("__intrinsic_memory_barrier_atomic_counter", callee) || - !strcmp("__intrinsic_memory_barrier_buffer", callee) || - !strcmp("__intrinsic_memory_barrier_image", callee) || - !strcmp("__intrinsic_memory_barrier_shared", callee) || - !strcmp("__intrinsic_group_memory_barrier", callee)) { + case ir_intrinsic_memory_barrier: + case ir_intrinsic_memory_barrier_atomic_counter: + case ir_intrinsic_memory_barrier_buffer: + case ir_intrinsic_memory_barrier_image: + case ir_intrinsic_memory_barrier_shared: + case ir_intrinsic_group_memory_barrier: visit_membar_intrinsic(ir); return; - } - if (!strcmp("__intrinsic_load_shared", callee) || - !strcmp("__intrinsic_store_shared", callee) || - !strcmp("__intrinsic_atomic_add_shared", callee) || - !strcmp("__intrinsic_atomic_min_shared", callee) || - !strcmp("__intrinsic_atomic_max_shared", callee) || - !strcmp("__intrinsic_atomic_and_shared", callee) || - !strcmp("__intrinsic_atomic_or_shared", callee) || - !strcmp("__intrinsic_atomic_xor_shared", callee) || - !strcmp("__intrinsic_atomic_exchange_shared", callee) || - !strcmp("__intrinsic_atomic_comp_swap_shared", callee)) { + case ir_intrinsic_shared_load: + case ir_intrinsic_shared_store: + case ir_intrinsic_shared_atomic_add: + case ir_intrinsic_shared_atomic_min: + case ir_intrinsic_shared_atomic_max: + case ir_intrinsic_shared_atomic_and: + case ir_intrinsic_shared_atomic_or: + case ir_intrinsic_shared_atomic_xor: + case ir_intrinsic_shared_atomic_exchange: + case ir_intrinsic_shared_atomic_comp_swap: visit_shared_intrinsic(ir); return; - } - if (!strcmp("__intrinsic_image_load", callee) || - !strcmp("__intrinsic_image_store", callee) || - !strcmp("__intrinsic_image_atomic_add", callee) || - !strcmp("__intrinsic_image_atomic_min", callee) || - !strcmp("__intrinsic_image_atomic_max", callee) || - !strcmp("__intrinsic_image_atomic_and", callee) || - !strcmp("__intrinsic_image_atomic_or", callee) || - !strcmp("__intrinsic_image_atomic_xor", callee) || - !strcmp("__intrinsic_image_atomic_exchange", callee) || - !strcmp("__intrinsic_image_atomic_comp_swap", callee) || - !strcmp("__intrinsic_image_size", callee) || - !strcmp("__intrinsic_image_samples", callee)) { + case ir_intrinsic_image_load: + case ir_intrinsic_image_store: + case ir_intrinsic_image_atomic_add: + case ir_intrinsic_image_atomic_min: + case ir_intrinsic_image_atomic_max: + case ir_intrinsic_image_atomic_and: + case ir_intrinsic_image_atomic_or: + case ir_intrinsic_image_atomic_xor: + case ir_intrinsic_image_atomic_exchange: + case ir_intrinsic_image_atomic_comp_swap: + case ir_intrinsic_image_size: + case ir_intrinsic_image_samples: visit_image_intrinsic(ir); return; + + case ir_intrinsic_generic_load: + case ir_intrinsic_generic_store: + case ir_intrinsic_generic_atomic_add: + case ir_intrinsic_generic_atomic_and: + case ir_intrinsic_generic_atomic_or: + case ir_intrinsic_generic_atomic_xor: + case ir_intrinsic_generic_atomic_min: + case ir_intrinsic_generic_atomic_max: + case ir_intrinsic_generic_atomic_exchange: + case ir_intrinsic_generic_atomic_comp_swap: + case ir_intrinsic_shader_clock: + unreachable("Invalid intrinsic"); } entry = get_function_signature(sig); |