summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorIan Romanick <[email protected]>2016-08-30 17:49:24 -0700
committerIan Romanick <[email protected]>2016-10-04 16:53:31 -0700
commit5854de99b2735988f1fd7c08f367b4a8e58b270e (patch)
tree9fad096d319b67ecbf067143fca87a21e0f3fd6e
parentc01f2bfc6c7abfbbb66b9d91a81b96518285ccf1 (diff)
glsl: Track a unique intrinsic ID with each intrinsic function
text data bss dec hex filename 6037483 283160 28608 6349251 60e1c3 lib64/i965_dri.so before 6038043 283160 28608 6349811 60e3f3 lib64/i965_dri.so after Signed-off-by: Ian Romanick <[email protected]> Acked-by: Ilia Mirkin <[email protected]>
-rw-r--r--src/compiler/glsl/builtin_functions.cpp208
-rw-r--r--src/compiler/glsl/glsl_to_nir.cpp42
-rw-r--r--src/compiler/glsl/ir.cpp2
-rw-r--r--src/compiler/glsl/ir.h88
-rw-r--r--src/compiler/glsl/link_functions.cpp1
-rw-r--r--src/compiler/glsl/lower_shared_reference.cpp6
-rw-r--r--src/compiler/glsl/lower_ubo_reference.cpp6
7 files changed, 280 insertions, 73 deletions
diff --git a/src/compiler/glsl/builtin_functions.cpp b/src/compiler/glsl/builtin_functions.cpp
index 102c0398e6f..daaa3a69e61 100644
--- a/src/compiler/glsl/builtin_functions.cpp
+++ b/src/compiler/glsl/builtin_functions.cpp
@@ -634,7 +634,8 @@ private:
const char *intrinsic_name,
image_prototype_ctr prototype,
unsigned num_arguments,
- unsigned flags);
+ unsigned flags,
+ enum ir_intrinsic_id id);
/**
* Create new functions for all known image built-ins and types.
@@ -821,9 +822,12 @@ private:
B1(interpolateAtOffset)
B1(interpolateAtSample)
- ir_function_signature *_atomic_counter_intrinsic(builtin_available_predicate avail);
- ir_function_signature *_atomic_counter_intrinsic1(builtin_available_predicate avail);
- ir_function_signature *_atomic_counter_intrinsic2(builtin_available_predicate avail);
+ ir_function_signature *_atomic_counter_intrinsic(builtin_available_predicate avail,
+ enum ir_intrinsic_id id);
+ ir_function_signature *_atomic_counter_intrinsic1(builtin_available_predicate avail,
+ enum ir_intrinsic_id id);
+ ir_function_signature *_atomic_counter_intrinsic2(builtin_available_predicate avail,
+ enum ir_intrinsic_id id);
ir_function_signature *_atomic_counter_op(const char *intrinsic,
builtin_available_predicate avail);
ir_function_signature *_atomic_counter_op1(const char *intrinsic,
@@ -832,12 +836,14 @@ private:
builtin_available_predicate avail);
ir_function_signature *_atomic_intrinsic2(builtin_available_predicate avail,
- const glsl_type *type);
+ const glsl_type *type,
+ enum ir_intrinsic_id id);
ir_function_signature *_atomic_op2(const char *intrinsic,
builtin_available_predicate avail,
const glsl_type *type);
ir_function_signature *_atomic_intrinsic3(builtin_available_predicate avail,
- const glsl_type *type);
+ const glsl_type *type,
+ enum ir_intrinsic_id id);
ir_function_signature *_atomic_op3(const char *intrinsic,
builtin_available_predicate avail,
const glsl_type *type);
@@ -859,10 +865,12 @@ private:
const glsl_type *image_type,
const char *intrinsic_name,
unsigned num_arguments,
- unsigned flags);
+ unsigned flags,
+ enum ir_intrinsic_id id);
ir_function_signature *_memory_barrier_intrinsic(
- builtin_available_predicate avail);
+ builtin_available_predicate avail,
+ enum ir_intrinsic_id id);
ir_function_signature *_memory_barrier(const char *intrinsic_name,
builtin_available_predicate avail);
@@ -980,94 +988,128 @@ void
builtin_builder::create_intrinsics()
{
add_function("__intrinsic_atomic_read",
- _atomic_counter_intrinsic(shader_atomic_counters),
+ _atomic_counter_intrinsic(shader_atomic_counters,
+ ir_intrinsic_atomic_counter_read),
NULL);
add_function("__intrinsic_atomic_increment",
- _atomic_counter_intrinsic(shader_atomic_counters),
+ _atomic_counter_intrinsic(shader_atomic_counters,
+ ir_intrinsic_atomic_counter_increment),
NULL);
add_function("__intrinsic_atomic_predecrement",
- _atomic_counter_intrinsic(shader_atomic_counters),
+ _atomic_counter_intrinsic(shader_atomic_counters,
+ ir_intrinsic_atomic_counter_predecrement),
NULL);
add_function("__intrinsic_atomic_add",
_atomic_intrinsic2(buffer_atomics_supported,
- glsl_type::uint_type),
+ glsl_type::uint_type,
+ ir_intrinsic_generic_atomic_add),
_atomic_intrinsic2(buffer_atomics_supported,
- glsl_type::int_type),
- _atomic_counter_intrinsic1(shader_atomic_counter_ops),
+ glsl_type::int_type,
+ ir_intrinsic_generic_atomic_add),
+ _atomic_counter_intrinsic1(shader_atomic_counter_ops,
+ ir_intrinsic_atomic_counter_add),
NULL);
add_function("__intrinsic_atomic_sub",
- _atomic_counter_intrinsic1(shader_atomic_counter_ops),
+ _atomic_counter_intrinsic1(shader_atomic_counter_ops,
+ ir_intrinsic_atomic_counter_sub),
NULL);
add_function("__intrinsic_atomic_min",
_atomic_intrinsic2(buffer_atomics_supported,
- glsl_type::uint_type),
+ glsl_type::uint_type,
+ ir_intrinsic_generic_atomic_min),
_atomic_intrinsic2(buffer_atomics_supported,
- glsl_type::int_type),
- _atomic_counter_intrinsic1(shader_atomic_counter_ops),
+ glsl_type::int_type,
+ ir_intrinsic_generic_atomic_min),
+ _atomic_counter_intrinsic1(shader_atomic_counter_ops,
+ ir_intrinsic_atomic_counter_min),
NULL);
add_function("__intrinsic_atomic_max",
_atomic_intrinsic2(buffer_atomics_supported,
- glsl_type::uint_type),
+ glsl_type::uint_type,
+ ir_intrinsic_generic_atomic_max),
_atomic_intrinsic2(buffer_atomics_supported,
- glsl_type::int_type),
- _atomic_counter_intrinsic1(shader_atomic_counter_ops),
+ glsl_type::int_type,
+ ir_intrinsic_generic_atomic_max),
+ _atomic_counter_intrinsic1(shader_atomic_counter_ops,
+ ir_intrinsic_atomic_counter_max),
NULL);
add_function("__intrinsic_atomic_and",
_atomic_intrinsic2(buffer_atomics_supported,
- glsl_type::uint_type),
+ glsl_type::uint_type,
+ ir_intrinsic_generic_atomic_and),
_atomic_intrinsic2(buffer_atomics_supported,
- glsl_type::int_type),
- _atomic_counter_intrinsic1(shader_atomic_counter_ops),
+ glsl_type::int_type,
+ ir_intrinsic_generic_atomic_and),
+ _atomic_counter_intrinsic1(shader_atomic_counter_ops,
+ ir_intrinsic_atomic_counter_and),
NULL);
add_function("__intrinsic_atomic_or",
_atomic_intrinsic2(buffer_atomics_supported,
- glsl_type::uint_type),
+ glsl_type::uint_type,
+ ir_intrinsic_generic_atomic_or),
_atomic_intrinsic2(buffer_atomics_supported,
- glsl_type::int_type),
- _atomic_counter_intrinsic1(shader_atomic_counter_ops),
+ glsl_type::int_type,
+ ir_intrinsic_generic_atomic_or),
+ _atomic_counter_intrinsic1(shader_atomic_counter_ops,
+ ir_intrinsic_atomic_counter_or),
NULL);
add_function("__intrinsic_atomic_xor",
_atomic_intrinsic2(buffer_atomics_supported,
- glsl_type::uint_type),
+ glsl_type::uint_type,
+ ir_intrinsic_generic_atomic_xor),
_atomic_intrinsic2(buffer_atomics_supported,
- glsl_type::int_type),
- _atomic_counter_intrinsic1(shader_atomic_counter_ops),
+ glsl_type::int_type,
+ ir_intrinsic_generic_atomic_xor),
+ _atomic_counter_intrinsic1(shader_atomic_counter_ops,
+ ir_intrinsic_atomic_counter_xor),
NULL);
add_function("__intrinsic_atomic_exchange",
_atomic_intrinsic2(buffer_atomics_supported,
- glsl_type::uint_type),
+ glsl_type::uint_type,
+ ir_intrinsic_generic_atomic_exchange),
_atomic_intrinsic2(buffer_atomics_supported,
- glsl_type::int_type),
- _atomic_counter_intrinsic1(shader_atomic_counter_ops),
+ glsl_type::int_type,
+ ir_intrinsic_generic_atomic_exchange),
+ _atomic_counter_intrinsic1(shader_atomic_counter_ops,
+ ir_intrinsic_atomic_counter_exchange),
NULL);
add_function("__intrinsic_atomic_comp_swap",
_atomic_intrinsic3(buffer_atomics_supported,
- glsl_type::uint_type),
+ glsl_type::uint_type,
+ ir_intrinsic_generic_atomic_comp_swap),
_atomic_intrinsic3(buffer_atomics_supported,
- glsl_type::int_type),
- _atomic_counter_intrinsic2(shader_atomic_counter_ops),
+ glsl_type::int_type,
+ ir_intrinsic_generic_atomic_comp_swap),
+ _atomic_counter_intrinsic2(shader_atomic_counter_ops,
+ ir_intrinsic_atomic_counter_comp_swap),
NULL);
add_image_functions(false);
add_function("__intrinsic_memory_barrier",
- _memory_barrier_intrinsic(shader_image_load_store),
+ _memory_barrier_intrinsic(shader_image_load_store,
+ ir_intrinsic_memory_barrier),
NULL);
add_function("__intrinsic_group_memory_barrier",
- _memory_barrier_intrinsic(compute_shader),
+ _memory_barrier_intrinsic(compute_shader,
+ ir_intrinsic_group_memory_barrier),
NULL);
add_function("__intrinsic_memory_barrier_atomic_counter",
- _memory_barrier_intrinsic(compute_shader),
+ _memory_barrier_intrinsic(compute_shader,
+ ir_intrinsic_memory_barrier_atomic_counter),
NULL);
add_function("__intrinsic_memory_barrier_buffer",
- _memory_barrier_intrinsic(compute_shader),
+ _memory_barrier_intrinsic(compute_shader,
+ ir_intrinsic_memory_barrier_buffer),
NULL);
add_function("__intrinsic_memory_barrier_image",
- _memory_barrier_intrinsic(compute_shader),
+ _memory_barrier_intrinsic(compute_shader,
+ ir_intrinsic_memory_barrier_image),
NULL);
add_function("__intrinsic_memory_barrier_shared",
- _memory_barrier_intrinsic(compute_shader),
+ _memory_barrier_intrinsic(compute_shader,
+ ir_intrinsic_memory_barrier_shared),
NULL);
add_function("__intrinsic_shader_clock",
@@ -2985,7 +3027,8 @@ builtin_builder::add_image_function(const char *name,
const char *intrinsic_name,
image_prototype_ctr prototype,
unsigned num_arguments,
- unsigned flags)
+ unsigned flags,
+ enum ir_intrinsic_id intrinsic_id)
{
static const glsl_type *const types[] = {
glsl_type::image1D_type,
@@ -3031,7 +3074,7 @@ builtin_builder::add_image_function(const char *name,
(types[i]->sampler_dimensionality == GLSL_SAMPLER_DIM_MS ||
!(flags & IMAGE_FUNCTION_MS_ONLY)))
f->add_signature(_image(prototype, types[i], intrinsic_name,
- num_arguments, flags));
+ num_arguments, flags, intrinsic_id));
}
shader->symbols->add_function(f);
@@ -3047,7 +3090,8 @@ builtin_builder::add_image_functions(bool glsl)
&builtin_builder::_image_prototype, 0,
(flags | IMAGE_FUNCTION_HAS_VECTOR_DATA_TYPE |
IMAGE_FUNCTION_SUPPORTS_FLOAT_DATA_TYPE |
- IMAGE_FUNCTION_READ_ONLY));
+ IMAGE_FUNCTION_READ_ONLY),
+ ir_intrinsic_image_load);
add_image_function(glsl ? "imageStore" : "__intrinsic_image_store",
"__intrinsic_image_store",
@@ -3055,56 +3099,67 @@ builtin_builder::add_image_functions(bool glsl)
(flags | IMAGE_FUNCTION_RETURNS_VOID |
IMAGE_FUNCTION_HAS_VECTOR_DATA_TYPE |
IMAGE_FUNCTION_SUPPORTS_FLOAT_DATA_TYPE |
- IMAGE_FUNCTION_WRITE_ONLY));
+ IMAGE_FUNCTION_WRITE_ONLY),
+ ir_intrinsic_image_store);
const unsigned atom_flags = flags | IMAGE_FUNCTION_AVAIL_ATOMIC;
add_image_function(glsl ? "imageAtomicAdd" : "__intrinsic_image_atomic_add",
"__intrinsic_image_atomic_add",
- &builtin_builder::_image_prototype, 1, atom_flags);
+ &builtin_builder::_image_prototype, 1, atom_flags,
+ ir_intrinsic_image_atomic_add);
add_image_function(glsl ? "imageAtomicMin" : "__intrinsic_image_atomic_min",
"__intrinsic_image_atomic_min",
- &builtin_builder::_image_prototype, 1, atom_flags);
+ &builtin_builder::_image_prototype, 1, atom_flags,
+ ir_intrinsic_image_atomic_min);
add_image_function(glsl ? "imageAtomicMax" : "__intrinsic_image_atomic_max",
"__intrinsic_image_atomic_max",
- &builtin_builder::_image_prototype, 1, atom_flags);
+ &builtin_builder::_image_prototype, 1, atom_flags,
+ ir_intrinsic_image_atomic_max);
add_image_function(glsl ? "imageAtomicAnd" : "__intrinsic_image_atomic_and",
"__intrinsic_image_atomic_and",
- &builtin_builder::_image_prototype, 1, atom_flags);
+ &builtin_builder::_image_prototype, 1, atom_flags,
+ ir_intrinsic_image_atomic_and);
add_image_function(glsl ? "imageAtomicOr" : "__intrinsic_image_atomic_or",
"__intrinsic_image_atomic_or",
- &builtin_builder::_image_prototype, 1, atom_flags);
+ &builtin_builder::_image_prototype, 1, atom_flags,
+ ir_intrinsic_image_atomic_or);
add_image_function(glsl ? "imageAtomicXor" : "__intrinsic_image_atomic_xor",
"__intrinsic_image_atomic_xor",
- &builtin_builder::_image_prototype, 1, atom_flags);
+ &builtin_builder::_image_prototype, 1, atom_flags,
+ ir_intrinsic_image_atomic_xor);
add_image_function((glsl ? "imageAtomicExchange" :
"__intrinsic_image_atomic_exchange"),
"__intrinsic_image_atomic_exchange",
&builtin_builder::_image_prototype, 1,
(flags | IMAGE_FUNCTION_AVAIL_ATOMIC_EXCHANGE |
- IMAGE_FUNCTION_SUPPORTS_FLOAT_DATA_TYPE));
+ IMAGE_FUNCTION_SUPPORTS_FLOAT_DATA_TYPE),
+ ir_intrinsic_image_atomic_exchange);
add_image_function((glsl ? "imageAtomicCompSwap" :
"__intrinsic_image_atomic_comp_swap"),
"__intrinsic_image_atomic_comp_swap",
- &builtin_builder::_image_prototype, 2, atom_flags);
+ &builtin_builder::_image_prototype, 2, atom_flags,
+ ir_intrinsic_image_atomic_comp_swap);
add_image_function(glsl ? "imageSize" : "__intrinsic_image_size",
"__intrinsic_image_size",
&builtin_builder::_image_size_prototype, 1,
- flags | IMAGE_FUNCTION_SUPPORTS_FLOAT_DATA_TYPE);
+ flags | IMAGE_FUNCTION_SUPPORTS_FLOAT_DATA_TYPE,
+ ir_intrinsic_image_size);
add_image_function(glsl ? "imageSamples" : "__intrinsic_image_samples",
"__intrinsic_image_samples",
&builtin_builder::_image_samples_prototype, 1,
flags | IMAGE_FUNCTION_SUPPORTS_FLOAT_DATA_TYPE |
- IMAGE_FUNCTION_MS_ONLY);
+ IMAGE_FUNCTION_MS_ONLY,
+ ir_intrinsic_image_samples);
}
ir_variable *
@@ -3208,10 +3263,11 @@ builtin_builder::new_sig(const glsl_type *return_type,
ir_factory body(&sig->body, mem_ctx); \
sig->is_defined = true;
-#define MAKE_INTRINSIC(return_type, avail, ...) \
+#define MAKE_INTRINSIC(return_type, id, avail, ...) \
ir_function_signature *sig = \
new_sig(return_type, avail, __VA_ARGS__); \
- sig->is_intrinsic = true;
+ sig->is_intrinsic = true; \
+ sig->intrinsic_id = id;
ir_function_signature *
builtin_builder::unop(builtin_available_predicate avail,
@@ -5218,50 +5274,55 @@ builtin_builder::_interpolateAtSample(const glsl_type *type)
}
ir_function_signature *
-builtin_builder::_atomic_counter_intrinsic(builtin_available_predicate avail)
+builtin_builder::_atomic_counter_intrinsic(builtin_available_predicate avail,
+ enum ir_intrinsic_id id)
{
ir_variable *counter = in_var(glsl_type::atomic_uint_type, "counter");
- MAKE_INTRINSIC(glsl_type::uint_type, avail, 1, counter);
+ MAKE_INTRINSIC(glsl_type::uint_type, id, avail, 1, counter);
return sig;
}
ir_function_signature *
-builtin_builder::_atomic_counter_intrinsic1(builtin_available_predicate avail)
+builtin_builder::_atomic_counter_intrinsic1(builtin_available_predicate avail,
+ enum ir_intrinsic_id id)
{
ir_variable *counter = in_var(glsl_type::atomic_uint_type, "counter");
ir_variable *data = in_var(glsl_type::uint_type, "data");
- MAKE_INTRINSIC(glsl_type::uint_type, avail, 2, counter, data);
+ MAKE_INTRINSIC(glsl_type::uint_type, id, avail, 2, counter, data);
return sig;
}
ir_function_signature *
-builtin_builder::_atomic_counter_intrinsic2(builtin_available_predicate avail)
+builtin_builder::_atomic_counter_intrinsic2(builtin_available_predicate avail,
+ enum ir_intrinsic_id id)
{
ir_variable *counter = in_var(glsl_type::atomic_uint_type, "counter");
ir_variable *compare = in_var(glsl_type::uint_type, "compare");
ir_variable *data = in_var(glsl_type::uint_type, "data");
- MAKE_INTRINSIC(glsl_type::uint_type, avail, 3, counter, compare, data);
+ MAKE_INTRINSIC(glsl_type::uint_type, id, avail, 3, counter, compare, data);
return sig;
}
ir_function_signature *
builtin_builder::_atomic_intrinsic2(builtin_available_predicate avail,
- const glsl_type *type)
+ const glsl_type *type,
+ enum ir_intrinsic_id id)
{
ir_variable *atomic = in_var(type, "atomic");
ir_variable *data = in_var(type, "data");
- MAKE_INTRINSIC(type, avail, 2, atomic, data);
+ MAKE_INTRINSIC(type, id, avail, 2, atomic, data);
return sig;
}
ir_function_signature *
builtin_builder::_atomic_intrinsic3(builtin_available_predicate avail,
- const glsl_type *type)
+ const glsl_type *type,
+ enum ir_intrinsic_id id)
{
ir_variable *atomic = in_var(type, "atomic");
ir_variable *data1 = in_var(type, "data1");
ir_variable *data2 = in_var(type, "data2");
- MAKE_INTRINSIC(type, avail, 3, atomic, data1, data2);
+ MAKE_INTRINSIC(type, id, avail, 3, atomic, data1, data2);
return sig;
}
@@ -5518,7 +5579,8 @@ builtin_builder::_image(image_prototype_ctr prototype,
const glsl_type *image_type,
const char *intrinsic_name,
unsigned num_arguments,
- unsigned flags)
+ unsigned flags,
+ enum ir_intrinsic_id id)
{
ir_function_signature *sig = (this->*prototype)(image_type,
num_arguments, flags);
@@ -5540,15 +5602,17 @@ builtin_builder::_image(image_prototype_ctr prototype,
} else {
sig->is_intrinsic = true;
+ sig->intrinsic_id = id;
}
return sig;
}
ir_function_signature *
-builtin_builder::_memory_barrier_intrinsic(builtin_available_predicate avail)
+builtin_builder::_memory_barrier_intrinsic(builtin_available_predicate avail,
+ enum ir_intrinsic_id id)
{
- MAKE_INTRINSIC(glsl_type::void_type, avail, 0);
+ MAKE_INTRINSIC(glsl_type::void_type, id, avail, 0);
return sig;
}
@@ -5566,7 +5630,7 @@ ir_function_signature *
builtin_builder::_shader_clock_intrinsic(builtin_available_predicate avail,
const glsl_type *type)
{
- MAKE_INTRINSIC(type, avail, 0);
+ MAKE_INTRINSIC(type, ir_intrinsic_shader_clock, avail, 0);
return sig;
}
diff --git a/src/compiler/glsl/glsl_to_nir.cpp b/src/compiler/glsl/glsl_to_nir.cpp
index 3ac21966563..01dcac4ee93 100644
--- a/src/compiler/glsl/glsl_to_nir.cpp
+++ b/src/compiler/glsl/glsl_to_nir.cpp
@@ -610,50 +610,73 @@ nir_visitor::visit(ir_call *ir)
if (ir->callee->is_intrinsic) {
nir_intrinsic_op op;
if (strcmp(ir->callee_name(), "__intrinsic_atomic_read") == 0) {
+ assert(ir->callee->intrinsic_id == ir_intrinsic_atomic_counter_read);
op = nir_intrinsic_atomic_counter_read_var;
} else if (strcmp(ir->callee_name(), "__intrinsic_atomic_increment") == 0) {
+ assert(ir->callee->intrinsic_id == ir_intrinsic_atomic_counter_increment);
op = nir_intrinsic_atomic_counter_inc_var;
} else if (strcmp(ir->callee_name(), "__intrinsic_atomic_predecrement") == 0) {
+ assert(ir->callee->intrinsic_id == ir_intrinsic_atomic_counter_predecrement);
op = nir_intrinsic_atomic_counter_dec_var;
} else if (strcmp(ir->callee_name(), "__intrinsic_image_load") == 0) {
+ assert(ir->callee->intrinsic_id == ir_intrinsic_image_load);
op = nir_intrinsic_image_load;
} else if (strcmp(ir->callee_name(), "__intrinsic_image_store") == 0) {
+ assert(ir->callee->intrinsic_id == ir_intrinsic_image_store);
op = nir_intrinsic_image_store;
} else if (strcmp(ir->callee_name(), "__intrinsic_image_atomic_add") == 0) {
+ assert(ir->callee->intrinsic_id == ir_intrinsic_image_atomic_add);
op = nir_intrinsic_image_atomic_add;
} else if (strcmp(ir->callee_name(), "__intrinsic_image_atomic_min") == 0) {
+ assert(ir->callee->intrinsic_id == ir_intrinsic_image_atomic_min);
op = nir_intrinsic_image_atomic_min;
} else if (strcmp(ir->callee_name(), "__intrinsic_image_atomic_max") == 0) {
+ assert(ir->callee->intrinsic_id == ir_intrinsic_image_atomic_max);
op = nir_intrinsic_image_atomic_max;
} else if (strcmp(ir->callee_name(), "__intrinsic_image_atomic_and") == 0) {
+ assert(ir->callee->intrinsic_id == ir_intrinsic_image_atomic_and);
op = nir_intrinsic_image_atomic_and;
} else if (strcmp(ir->callee_name(), "__intrinsic_image_atomic_or") == 0) {
+ assert(ir->callee->intrinsic_id == ir_intrinsic_image_atomic_or);
op = nir_intrinsic_image_atomic_or;
} else if (strcmp(ir->callee_name(), "__intrinsic_image_atomic_xor") == 0) {
+ assert(ir->callee->intrinsic_id == ir_intrinsic_image_atomic_xor);
op = nir_intrinsic_image_atomic_xor;
} else if (strcmp(ir->callee_name(), "__intrinsic_image_atomic_exchange") == 0) {
+ assert(ir->callee->intrinsic_id == ir_intrinsic_image_atomic_exchange);
op = nir_intrinsic_image_atomic_exchange;
} else if (strcmp(ir->callee_name(), "__intrinsic_image_atomic_comp_swap") == 0) {
+ assert(ir->callee->intrinsic_id == ir_intrinsic_image_atomic_comp_swap);
op = nir_intrinsic_image_atomic_comp_swap;
} else if (strcmp(ir->callee_name(), "__intrinsic_memory_barrier") == 0) {
+ assert(ir->callee->intrinsic_id == ir_intrinsic_memory_barrier);
op = nir_intrinsic_memory_barrier;
} else if (strcmp(ir->callee_name(), "__intrinsic_image_size") == 0) {
+ assert(ir->callee->intrinsic_id == ir_intrinsic_image_size);
op = nir_intrinsic_image_size;
} else if (strcmp(ir->callee_name(), "__intrinsic_image_samples") == 0) {
+ assert(ir->callee->intrinsic_id == ir_intrinsic_image_samples);
op = nir_intrinsic_image_samples;
} else if (strcmp(ir->callee_name(), "__intrinsic_store_ssbo") == 0) {
+ assert(ir->callee->intrinsic_id == ir_intrinsic_ssbo_store);
op = nir_intrinsic_store_ssbo;
} else if (strcmp(ir->callee_name(), "__intrinsic_load_ssbo") == 0) {
+ assert(ir->callee->intrinsic_id == ir_intrinsic_ssbo_load);
op = nir_intrinsic_load_ssbo;
} else if (strcmp(ir->callee_name(), "__intrinsic_atomic_add_ssbo") == 0) {
+ assert(ir->callee->intrinsic_id == ir_intrinsic_ssbo_atomic_add);
op = nir_intrinsic_ssbo_atomic_add;
} else if (strcmp(ir->callee_name(), "__intrinsic_atomic_and_ssbo") == 0) {
+ assert(ir->callee->intrinsic_id == ir_intrinsic_ssbo_atomic_and);
op = nir_intrinsic_ssbo_atomic_and;
} else if (strcmp(ir->callee_name(), "__intrinsic_atomic_or_ssbo") == 0) {
+ assert(ir->callee->intrinsic_id == ir_intrinsic_ssbo_atomic_or);
op = nir_intrinsic_ssbo_atomic_or;
} else if (strcmp(ir->callee_name(), "__intrinsic_atomic_xor_ssbo") == 0) {
+ assert(ir->callee->intrinsic_id == ir_intrinsic_ssbo_atomic_xor);
op = nir_intrinsic_ssbo_atomic_xor;
} else if (strcmp(ir->callee_name(), "__intrinsic_atomic_min_ssbo") == 0) {
+ assert(ir->callee->intrinsic_id == ir_intrinsic_ssbo_atomic_min);
assert(ir->return_deref);
if (ir->return_deref->type == glsl_type::int_type)
op = nir_intrinsic_ssbo_atomic_imin;
@@ -662,6 +685,7 @@ nir_visitor::visit(ir_call *ir)
else
unreachable("Invalid type");
} else if (strcmp(ir->callee_name(), "__intrinsic_atomic_max_ssbo") == 0) {
+ assert(ir->callee->intrinsic_id == ir_intrinsic_ssbo_atomic_max);
assert(ir->return_deref);
if (ir->return_deref->type == glsl_type::int_type)
op = nir_intrinsic_ssbo_atomic_imax;
@@ -670,34 +694,49 @@ nir_visitor::visit(ir_call *ir)
else
unreachable("Invalid type");
} else if (strcmp(ir->callee_name(), "__intrinsic_atomic_exchange_ssbo") == 0) {
+ assert(ir->callee->intrinsic_id == ir_intrinsic_ssbo_atomic_exchange);
op = nir_intrinsic_ssbo_atomic_exchange;
} else if (strcmp(ir->callee_name(), "__intrinsic_atomic_comp_swap_ssbo") == 0) {
+ assert(ir->callee->intrinsic_id == ir_intrinsic_ssbo_atomic_comp_swap);
op = nir_intrinsic_ssbo_atomic_comp_swap;
} else if (strcmp(ir->callee_name(), "__intrinsic_shader_clock") == 0) {
+ assert(ir->callee->intrinsic_id == ir_intrinsic_shader_clock);
op = nir_intrinsic_shader_clock;
} else if (strcmp(ir->callee_name(), "__intrinsic_group_memory_barrier") == 0) {
+ assert(ir->callee->intrinsic_id == ir_intrinsic_group_memory_barrier);
op = nir_intrinsic_group_memory_barrier;
} else if (strcmp(ir->callee_name(), "__intrinsic_memory_barrier_atomic_counter") == 0) {
+ assert(ir->callee->intrinsic_id == ir_intrinsic_memory_barrier_atomic_counter);
op = nir_intrinsic_memory_barrier_atomic_counter;
} else if (strcmp(ir->callee_name(), "__intrinsic_memory_barrier_buffer") == 0) {
+ assert(ir->callee->intrinsic_id == ir_intrinsic_memory_barrier_buffer);
op = nir_intrinsic_memory_barrier_buffer;
} else if (strcmp(ir->callee_name(), "__intrinsic_memory_barrier_image") == 0) {
+ assert(ir->callee->intrinsic_id == ir_intrinsic_memory_barrier_image);
op = nir_intrinsic_memory_barrier_image;
} else if (strcmp(ir->callee_name(), "__intrinsic_memory_barrier_shared") == 0) {
+ assert(ir->callee->intrinsic_id == ir_intrinsic_memory_barrier_shared);
op = nir_intrinsic_memory_barrier_shared;
} else if (strcmp(ir->callee_name(), "__intrinsic_load_shared") == 0) {
+ assert(ir->callee->intrinsic_id == ir_intrinsic_shared_load);
op = nir_intrinsic_load_shared;
} else if (strcmp(ir->callee_name(), "__intrinsic_store_shared") == 0) {
+ assert(ir->callee->intrinsic_id == ir_intrinsic_shared_store);
op = nir_intrinsic_store_shared;
} else if (strcmp(ir->callee_name(), "__intrinsic_atomic_add_shared") == 0) {
+ assert(ir->callee->intrinsic_id == ir_intrinsic_shared_atomic_add);
op = nir_intrinsic_shared_atomic_add;
} else if (strcmp(ir->callee_name(), "__intrinsic_atomic_and_shared") == 0) {
+ assert(ir->callee->intrinsic_id == ir_intrinsic_shared_atomic_and);
op = nir_intrinsic_shared_atomic_and;
} else if (strcmp(ir->callee_name(), "__intrinsic_atomic_or_shared") == 0) {
+ assert(ir->callee->intrinsic_id == ir_intrinsic_shared_atomic_or);
op = nir_intrinsic_shared_atomic_or;
} else if (strcmp(ir->callee_name(), "__intrinsic_atomic_xor_shared") == 0) {
+ assert(ir->callee->intrinsic_id == ir_intrinsic_shared_atomic_xor);
op = nir_intrinsic_shared_atomic_xor;
} else if (strcmp(ir->callee_name(), "__intrinsic_atomic_min_shared") == 0) {
+ assert(ir->callee->intrinsic_id == ir_intrinsic_shared_atomic_min);
assert(ir->return_deref);
if (ir->return_deref->type == glsl_type::int_type)
op = nir_intrinsic_shared_atomic_imin;
@@ -706,6 +745,7 @@ nir_visitor::visit(ir_call *ir)
else
unreachable("Invalid type");
} else if (strcmp(ir->callee_name(), "__intrinsic_atomic_max_shared") == 0) {
+ assert(ir->callee->intrinsic_id == ir_intrinsic_shared_atomic_max);
assert(ir->return_deref);
if (ir->return_deref->type == glsl_type::int_type)
op = nir_intrinsic_shared_atomic_imax;
@@ -714,8 +754,10 @@ nir_visitor::visit(ir_call *ir)
else
unreachable("Invalid type");
} else if (strcmp(ir->callee_name(), "__intrinsic_atomic_exchange_shared") == 0) {
+ assert(ir->callee->intrinsic_id == ir_intrinsic_shared_atomic_exchange);
op = nir_intrinsic_shared_atomic_exchange;
} else if (strcmp(ir->callee_name(), "__intrinsic_atomic_comp_swap_shared") == 0) {
+ assert(ir->callee->intrinsic_id == ir_intrinsic_shared_atomic_comp_swap);
op = nir_intrinsic_shared_atomic_comp_swap;
} else {
unreachable("not reached");
diff --git a/src/compiler/glsl/ir.cpp b/src/compiler/glsl/ir.cpp
index cb195b21ad6..e633018ff68 100644
--- a/src/compiler/glsl/ir.cpp
+++ b/src/compiler/glsl/ir.cpp
@@ -1618,7 +1618,7 @@ ir_function_signature::ir_function_signature(const glsl_type *return_type,
builtin_available_predicate b)
: ir_instruction(ir_type_function_signature),
return_type(return_type), is_defined(false), is_intrinsic(false),
- builtin_avail(b), _function(NULL)
+ intrinsic_id(ir_intrinsic_invalid), builtin_avail(b), _function(NULL)
{
this->origin = NULL;
}
diff --git a/src/compiler/glsl/ir.h b/src/compiler/glsl/ir.h
index a3b1a50229f..7333a53d215 100644
--- a/src/compiler/glsl/ir.h
+++ b/src/compiler/glsl/ir.h
@@ -1013,6 +1013,91 @@ public:
*/
typedef bool (*builtin_available_predicate)(const _mesa_glsl_parse_state *);
+#define MAKE_INTRINSIC_FOR_TYPE(op, t) \
+ ir_intrinsic_generic_ ## op - ir_intrinsic_generic_load + ir_intrinsic_ ## t ## _ ## load
+
+#define MAP_INTRINSIC_TO_TYPE(i, t) \
+ ir_intrinsic_id(int(i) - int(ir_intrinsic_generic_load) + int(ir_intrinsic_ ## t ## _ ## load))
+
+enum ir_intrinsic_id {
+ ir_intrinsic_invalid = 0,
+
+ /**
+ * \name Generic intrinsics
+ *
+ * Each of these intrinsics has a specific version for shared variables and
+ * SSBOs.
+ */
+ /*@{*/
+ ir_intrinsic_generic_load,
+ ir_intrinsic_generic_store,
+ ir_intrinsic_generic_atomic_add,
+ ir_intrinsic_generic_atomic_and,
+ ir_intrinsic_generic_atomic_or,
+ ir_intrinsic_generic_atomic_xor,
+ ir_intrinsic_generic_atomic_min,
+ ir_intrinsic_generic_atomic_max,
+ ir_intrinsic_generic_atomic_exchange,
+ ir_intrinsic_generic_atomic_comp_swap,
+ /*@}*/
+
+ ir_intrinsic_atomic_counter_read,
+ ir_intrinsic_atomic_counter_increment,
+ ir_intrinsic_atomic_counter_predecrement,
+ ir_intrinsic_atomic_counter_add,
+ ir_intrinsic_atomic_counter_sub,
+ ir_intrinsic_atomic_counter_and,
+ ir_intrinsic_atomic_counter_or,
+ ir_intrinsic_atomic_counter_xor,
+ ir_intrinsic_atomic_counter_min,
+ ir_intrinsic_atomic_counter_max,
+ ir_intrinsic_atomic_counter_exchange,
+ ir_intrinsic_atomic_counter_comp_swap,
+
+ ir_intrinsic_image_load,
+ ir_intrinsic_image_store,
+ ir_intrinsic_image_atomic_add,
+ ir_intrinsic_image_atomic_and,
+ ir_intrinsic_image_atomic_or,
+ ir_intrinsic_image_atomic_xor,
+ ir_intrinsic_image_atomic_min,
+ ir_intrinsic_image_atomic_max,
+ ir_intrinsic_image_atomic_exchange,
+ ir_intrinsic_image_atomic_comp_swap,
+ ir_intrinsic_image_size,
+ ir_intrinsic_image_samples,
+
+ ir_intrinsic_ssbo_load,
+ ir_intrinsic_ssbo_store = MAKE_INTRINSIC_FOR_TYPE(store, ssbo),
+ ir_intrinsic_ssbo_atomic_add = MAKE_INTRINSIC_FOR_TYPE(atomic_add, ssbo),
+ ir_intrinsic_ssbo_atomic_and = MAKE_INTRINSIC_FOR_TYPE(atomic_and, ssbo),
+ ir_intrinsic_ssbo_atomic_or = MAKE_INTRINSIC_FOR_TYPE(atomic_or, ssbo),
+ ir_intrinsic_ssbo_atomic_xor = MAKE_INTRINSIC_FOR_TYPE(atomic_xor, ssbo),
+ ir_intrinsic_ssbo_atomic_min = MAKE_INTRINSIC_FOR_TYPE(atomic_min, ssbo),
+ ir_intrinsic_ssbo_atomic_max = MAKE_INTRINSIC_FOR_TYPE(atomic_max, ssbo),
+ ir_intrinsic_ssbo_atomic_exchange = MAKE_INTRINSIC_FOR_TYPE(atomic_exchange, ssbo),
+ ir_intrinsic_ssbo_atomic_comp_swap = MAKE_INTRINSIC_FOR_TYPE(atomic_comp_swap, ssbo),
+
+ ir_intrinsic_memory_barrier,
+ ir_intrinsic_shader_clock,
+ ir_intrinsic_group_memory_barrier,
+ ir_intrinsic_memory_barrier_atomic_counter,
+ ir_intrinsic_memory_barrier_buffer,
+ ir_intrinsic_memory_barrier_image,
+ ir_intrinsic_memory_barrier_shared,
+
+ ir_intrinsic_shared_load,
+ ir_intrinsic_shared_store = MAKE_INTRINSIC_FOR_TYPE(store, shared),
+ ir_intrinsic_shared_atomic_add = MAKE_INTRINSIC_FOR_TYPE(atomic_add, shared),
+ ir_intrinsic_shared_atomic_and = MAKE_INTRINSIC_FOR_TYPE(atomic_and, shared),
+ ir_intrinsic_shared_atomic_or = MAKE_INTRINSIC_FOR_TYPE(atomic_or, shared),
+ ir_intrinsic_shared_atomic_xor = MAKE_INTRINSIC_FOR_TYPE(atomic_xor, shared),
+ ir_intrinsic_shared_atomic_min = MAKE_INTRINSIC_FOR_TYPE(atomic_min, shared),
+ ir_intrinsic_shared_atomic_max = MAKE_INTRINSIC_FOR_TYPE(atomic_max, shared),
+ ir_intrinsic_shared_atomic_exchange = MAKE_INTRINSIC_FOR_TYPE(atomic_exchange, shared),
+ ir_intrinsic_shared_atomic_comp_swap = MAKE_INTRINSIC_FOR_TYPE(atomic_comp_swap, shared),
+};
+
/*@{*/
/**
* The representation of a function instance; may be the full definition or
@@ -1108,6 +1193,9 @@ public:
*/
bool is_intrinsic;
+ /** Indentifier for this intrinsic. */
+ enum ir_intrinsic_id intrinsic_id;
+
/** Whether or not a built-in is available for this shader. */
bool is_builtin_available(const _mesa_glsl_parse_state *state) const;
diff --git a/src/compiler/glsl/link_functions.cpp b/src/compiler/glsl/link_functions.cpp
index e4f77be717f..00dc345d751 100644
--- a/src/compiler/glsl/link_functions.cpp
+++ b/src/compiler/glsl/link_functions.cpp
@@ -162,6 +162,7 @@ public:
linked_sig->replace_parameters(&formal_parameters);
linked_sig->is_intrinsic = sig->is_intrinsic;
+ linked_sig->intrinsic_id = sig->intrinsic_id;
if (sig->is_defined) {
foreach_in_list(const ir_instruction, original, &sig->body) {
diff --git a/src/compiler/glsl/lower_shared_reference.cpp b/src/compiler/glsl/lower_shared_reference.cpp
index 091f4027320..49ee3774c68 100644
--- a/src/compiler/glsl/lower_shared_reference.cpp
+++ b/src/compiler/glsl/lower_shared_reference.cpp
@@ -285,6 +285,7 @@ lower_shared_reference_visitor::shared_store(void *mem_ctx,
assert(sig);
sig->replace_parameters(&sig_params);
sig->is_intrinsic = true;
+ sig->intrinsic_id = ir_intrinsic_shared_store;
ir_function *f = new(mem_ctx) ir_function("__intrinsic_store_shared");
f->add_signature(sig);
@@ -312,6 +313,7 @@ lower_shared_reference_visitor::shared_load(void *mem_ctx,
assert(sig);
sig->replace_parameters(&sig_params);
sig->is_intrinsic = true;
+ sig->intrinsic_id = ir_intrinsic_shared_load;
ir_function *f = new(mem_ctx) ir_function("__intrinsic_load_shared");
f->add_signature(sig);
@@ -406,6 +408,10 @@ lower_shared_reference_visitor::lower_shared_atomic_intrinsic(ir_call *ir)
sig->replace_parameters(&sig_params);
sig->is_intrinsic = true;
+ assert(ir->callee->intrinsic_id >= ir_intrinsic_generic_load);
+ assert(ir->callee->intrinsic_id <= ir_intrinsic_generic_atomic_comp_swap);
+ sig->intrinsic_id = MAP_INTRINSIC_TO_TYPE(ir->callee->intrinsic_id, shared);
+
char func_name[64];
sprintf(func_name, "%s_shared", ir->callee_name());
ir_function *f = new(mem_ctx) ir_function(func_name);
diff --git a/src/compiler/glsl/lower_ubo_reference.cpp b/src/compiler/glsl/lower_ubo_reference.cpp
index 1ed281c488a..0e6a02d868c 100644
--- a/src/compiler/glsl/lower_ubo_reference.cpp
+++ b/src/compiler/glsl/lower_ubo_reference.cpp
@@ -454,6 +454,7 @@ lower_ubo_reference_visitor::ssbo_store(void *mem_ctx,
assert(sig);
sig->replace_parameters(&sig_params);
sig->is_intrinsic = true;
+ sig->intrinsic_id = ir_intrinsic_ssbo_store;
ir_function *f = new(mem_ctx) ir_function("__intrinsic_store_ssbo");
f->add_signature(sig);
@@ -491,6 +492,7 @@ lower_ubo_reference_visitor::ssbo_load(void *mem_ctx,
assert(sig);
sig->replace_parameters(&sig_params);
sig->is_intrinsic = true;
+ sig->intrinsic_id = ir_intrinsic_ssbo_load;
ir_function *f = new(mem_ctx) ir_function("__intrinsic_load_ssbo");
f->add_signature(sig);
@@ -1018,6 +1020,10 @@ lower_ubo_reference_visitor::lower_ssbo_atomic_intrinsic(ir_call *ir)
sig->replace_parameters(&sig_params);
sig->is_intrinsic = true;
+ assert(ir->callee->intrinsic_id >= ir_intrinsic_generic_load);
+ assert(ir->callee->intrinsic_id <= ir_intrinsic_generic_atomic_comp_swap);
+ sig->intrinsic_id = MAP_INTRINSIC_TO_TYPE(ir->callee->intrinsic_id, ssbo);
+
char func_name[64];
sprintf(func_name, "%s_ssbo", ir->callee_name());
ir_function *f = new(mem_ctx) ir_function(func_name);