diff options
author | Roland Scheidegger <[email protected]> | 2017-09-08 02:59:11 +0200 |
---|---|---|
committer | Roland Scheidegger <[email protected]> | 2017-09-09 03:06:10 +0200 |
commit | 57a341b0a94d37e2aee5380703d171c422d8550e (patch) | |
tree | c23dd782cb5a7c140b47ab62409b7be8e00a8708 /src/gallium/drivers | |
parent | 772f475351d63067f8fd0251e2fe6a33aedf1f56 (diff) |
llvmpipe, draw: improve shader cache debugging
With GALLIVM_DEBUG=perf set, output the relevant stats for shader cache usage
whenever we have to evict shader variants.
Also add some output when shaders are deleted (but not with the perf setting
to keep this one less noisy).
While here, also don't delete that many shaders when we have to evict. For fs,
there's potentially some cost if we have to evict due to the required flush,
however certainly shader recompiles have a high cost too so I don't think
evicting one quarter of the cache size makes sense (and, if we're evicting
based on IR count, we probably typically evict only very few or just one
shader too). For vs, I'm not sure it even makes sense to evict more than
one shader at a time, but keep the logic the same for now.
Reviewed-by: Jose Fonseca <[email protected]>
Reviewed-by: Brian Paul <[email protected]>
Diffstat (limited to 'src/gallium/drivers')
-rw-r--r-- | src/gallium/drivers/llvmpipe/lp_state_fs.c | 25 |
1 files changed, 16 insertions, 9 deletions
diff --git a/src/gallium/drivers/llvmpipe/lp_state_fs.c b/src/gallium/drivers/llvmpipe/lp_state_fs.c index df3557a7d6f..9a43f01738b 100644 --- a/src/gallium/drivers/llvmpipe/lp_state_fs.c +++ b/src/gallium/drivers/llvmpipe/lp_state_fs.c @@ -2993,14 +2993,13 @@ void llvmpipe_remove_shader_variant(struct llvmpipe_context *lp, struct lp_fragment_shader_variant *variant) { - if (gallivm_debug & GALLIVM_DEBUG_IR) { - debug_printf("llvmpipe: del fs #%u var #%u v created #%u v cached" - " #%u v total cached #%u\n", - variant->shader->no, - variant->no, + if ((LP_DEBUG & DEBUG_FS) || (gallivm_debug & GALLIVM_DEBUG_IR)) { + debug_printf("llvmpipe: del fs #%u var %u v created %u v cached %u " + "v total cached %u inst %u total inst %u\n", + variant->shader->no, variant->no, variant->shader->variants_created, variant->shader->variants_cached, - lp->nr_fs_variants); + lp->nr_fs_variants, variant->nr_instrs, lp->nr_fs_instrs); } gallivm_destroy(variant->gallivm); @@ -3357,7 +3356,7 @@ llvmpipe_update_fs(struct llvmpipe_context *lp) unsigned i; unsigned variants_to_cull; - if (0) { + if (LP_DEBUG & DEBUG_FS) { debug_printf("%u variants,\t%u instrs,\t%u instrs/variant\n", lp->nr_fs_variants, lp->nr_fs_instrs, @@ -3365,14 +3364,22 @@ llvmpipe_update_fs(struct llvmpipe_context *lp) } /* First, check if we've exceeded the max number of shader variants. - * If so, free 25% of them (the least recently used ones). + * If so, free 6.25% of them (the least recently used ones). */ - variants_to_cull = lp->nr_fs_variants >= LP_MAX_SHADER_VARIANTS ? LP_MAX_SHADER_VARIANTS / 4 : 0; + variants_to_cull = lp->nr_fs_variants >= LP_MAX_SHADER_VARIANTS ? LP_MAX_SHADER_VARIANTS / 16 : 0; if (variants_to_cull || lp->nr_fs_instrs >= LP_MAX_SHADER_INSTRUCTIONS) { struct pipe_context *pipe = &lp->pipe; + if (gallivm_debug & GALLIVM_DEBUG_PERF) { + debug_printf("Evicting FS: %u fs variants,\t%u total variants," + "\t%u instrs,\t%u instrs/variant\n", + shader->variants_cached, + lp->nr_fs_variants, lp->nr_fs_instrs, + lp->nr_fs_instrs / lp->nr_fs_variants); + } + /* * XXX: we need to flush the context until we have some sort of * reference counting in fragment shaders as they may still be binned |