aboutsummaryrefslogtreecommitdiffstats
path: root/src/freedreno/ir3
diff options
context:
space:
mode:
authorEric Anholt <[email protected]>2020-05-29 10:21:02 -0700
committerEric Anholt <[email protected]>2020-06-05 13:36:29 -0700
commita25347ab92bbe66c0f0c8def4d5f413f396b228d (patch)
treeecc6a7d0fca88156488aaff46df6e7c73e78a5c0 /src/freedreno/ir3
parent9e58ab09ffbd18355868000b2da90a5cd73b5c09 (diff)
freedreno/ir3: Stop shifting UBO 1 down to be UBO 0.
It turns out the GL uniforms file is larger than the hardware constant file, so we need to limit how many UBOs we lower to constbuf loads. To do actual UBO loads, we'll need to be able to upload UBO 0's pointer or descriptor. No difference on nohw 1 UBO update drawoverhead case (n=35). Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/5273>
Diffstat (limited to 'src/freedreno/ir3')
-rw-r--r--src/freedreno/ir3/ir3_nir_analyze_ubo_ranges.c27
1 files changed, 9 insertions, 18 deletions
diff --git a/src/freedreno/ir3/ir3_nir_analyze_ubo_ranges.c b/src/freedreno/ir3/ir3_nir_analyze_ubo_ranges.c
index ba72e7351ef..a4733cdb2ef 100644
--- a/src/freedreno/ir3/ir3_nir_analyze_ubo_ranges.c
+++ b/src/freedreno/ir3/ir3_nir_analyze_ubo_ranges.c
@@ -177,32 +177,23 @@ handle_partial_const(nir_builder *b, nir_ssa_def **srcp, int *offp)
}
}
+/* Tracks the maximum bindful UBO accessed so that we reduce the UBO
+ * descriptors emitted in the fast path for GL.
+ */
static void
-lower_ubo_block_decrement(nir_intrinsic_instr *instr, nir_builder *b, int *num_ubos)
+track_ubo_use(nir_intrinsic_instr *instr, nir_builder *b, int *num_ubos)
{
- /* Skip shifting things for turnip's bindless resources. */
if (ir3_bindless_resource(instr->src[0])) {
assert(!b->shader->info.first_ubo_is_default_ubo); /* only set for GL */
return;
}
- /* Shift all GL nir_intrinsic_load_ubo UBO indices down by 1, because we
- * have lowered block 0 off of load_ubo to constbuf and ir3_const only
- * uploads pointers for block 1-N. This is also where we update the NIR
- * num_ubos to reflect the UBOs that remain in use after others got
- * lowered to constbuf access.
- */
if (nir_src_is_const(instr->src[0])) {
- int block = nir_src_as_uint(instr->src[0]) - 1;
+ int block = nir_src_as_uint(instr->src[0]);
*num_ubos = MAX2(*num_ubos, block + 1);
} else {
- *num_ubos = b->shader->info.num_ubos - 1;
+ *num_ubos = b->shader->info.num_ubos;
}
-
- nir_ssa_def *old_idx = nir_ssa_for_src(b, instr->src[0], 1);
- nir_ssa_def *new_idx = nir_iadd_imm(b, old_idx, -1);
- nir_instr_rewrite_src(&instr->instr, &instr->src[0],
- nir_src_for_ssa(new_idx));
}
static void
@@ -217,7 +208,7 @@ lower_ubo_load_to_uniform(nir_intrinsic_instr *instr, nir_builder *b,
*/
struct ir3_ubo_range *range = get_existing_range(instr, state, false);
if (!range) {
- lower_ubo_block_decrement(instr, b, num_ubos);
+ track_ubo_use(instr, b, num_ubos);
return;
}
@@ -227,7 +218,7 @@ lower_ubo_load_to_uniform(nir_intrinsic_instr *instr, nir_builder *b,
* access, so for now just fall back to pulling.
*/
if (!nir_src_is_const(instr->src[1])) {
- lower_ubo_block_decrement(instr, b, num_ubos);
+ track_ubo_use(instr, b, num_ubos);
return;
}
@@ -236,7 +227,7 @@ lower_ubo_load_to_uniform(nir_intrinsic_instr *instr, nir_builder *b,
*/
const struct ir3_ubo_range r = get_ubo_load_range(instr, alignment);
if (!(range->start <= r.start && r.end <= range->end)) {
- lower_ubo_block_decrement(instr, b, num_ubos);
+ track_ubo_use(instr, b, num_ubos);
return;
}
}