summaryrefslogtreecommitdiffstats
path: root/src/compiler/glsl/lower_ubo_reference.cpp
diff options
context:
space:
mode:
authorJason Ekstrand <[email protected]>2016-05-18 20:28:07 -0700
committerJason Ekstrand <[email protected]>2016-05-23 19:12:34 -0700
commit27b9481d03959a7bee6d906c62b4a519b6b1dc38 (patch)
tree4578aca510eef5c8e2e27a00f83d7791377e3dd4 /src/compiler/glsl/lower_ubo_reference.cpp
parentac242aac3d2c38fd6843d85363a455271b1348c2 (diff)
glsl: Add an option to clamp block indices when lowering UBO/SSBOs
This prevents array overflow when the block is actually an array of UBOs or SSBOs. On some hardware such as i965, such overflows can cause GPU hangs. Reviewed-by: Ian Romanick <[email protected]> Reviewed-by: Kenneth Graunke <[email protected]>
Diffstat (limited to 'src/compiler/glsl/lower_ubo_reference.cpp')
-rw-r--r--src/compiler/glsl/lower_ubo_reference.cpp36
1 files changed, 32 insertions, 4 deletions
diff --git a/src/compiler/glsl/lower_ubo_reference.cpp b/src/compiler/glsl/lower_ubo_reference.cpp
index 1a0140fad15..749deedcd1c 100644
--- a/src/compiler/glsl/lower_ubo_reference.cpp
+++ b/src/compiler/glsl/lower_ubo_reference.cpp
@@ -44,8 +44,10 @@ namespace {
class lower_ubo_reference_visitor :
public lower_buffer_access::lower_buffer_access {
public:
- lower_ubo_reference_visitor(struct gl_shader *shader)
- : shader(shader), struct_field(NULL), variable(NULL)
+ lower_ubo_reference_visitor(struct gl_shader *shader,
+ bool clamp_block_indices)
+ : shader(shader), clamp_block_indices(clamp_block_indices),
+ struct_field(NULL), variable(NULL)
{
}
@@ -104,6 +106,7 @@ public:
ir_visitor_status visit_enter(ir_call *ir);
struct gl_shader *shader;
+ bool clamp_block_indices;
struct gl_uniform_buffer_variable *ubo_var;
const struct glsl_struct_field *struct_field;
ir_variable *variable;
@@ -242,6 +245,26 @@ interface_field_name(void *mem_ctx, char *base_name, ir_rvalue *d,
return NULL;
}
+static ir_rvalue *
+clamp_to_array_bounds(void *mem_ctx, ir_rvalue *index, const glsl_type *type)
+{
+ assert(type->is_array());
+
+ const unsigned array_size = type->arrays_of_arrays_size();
+
+ ir_constant *max_index = new(mem_ctx) ir_constant(array_size - 1);
+ max_index->type = index->type;
+
+ ir_constant *zero = new(mem_ctx) ir_constant(0);
+ zero->type = index->type;
+
+ if (index->type->base_type == GLSL_TYPE_INT)
+ index = max2(index, zero);
+ index = min2(index, max_index);
+
+ return index;
+}
+
void
lower_ubo_reference_visitor::setup_for_load_or_store(void *mem_ctx,
ir_variable *var,
@@ -258,6 +281,11 @@ lower_ubo_reference_visitor::setup_for_load_or_store(void *mem_ctx,
interface_field_name(mem_ctx, (char *) var->get_interface_type()->name,
deref, &nonconst_block_index);
+ if (nonconst_block_index && clamp_block_indices) {
+ nonconst_block_index =
+ clamp_to_array_bounds(mem_ctx, nonconst_block_index, var->type);
+ }
+
/* Locate the block by interface name */
unsigned num_blocks;
struct gl_uniform_block **blocks;
@@ -1062,9 +1090,9 @@ lower_ubo_reference_visitor::visit_enter(ir_call *ir)
} /* unnamed namespace */
void
-lower_ubo_reference(struct gl_shader *shader)
+lower_ubo_reference(struct gl_shader *shader, bool clamp_block_indices)
{
- lower_ubo_reference_visitor v(shader);
+ lower_ubo_reference_visitor v(shader, clamp_block_indices);
/* Loop over the instructions lowering references, because we take
* a deref of a UBO array using a UBO dereference as the index will