diff options
author | Chia-I Wu <[email protected]> | 2014-09-06 12:20:55 +0800 |
---|---|---|
committer | Chia-I Wu <[email protected]> | 2014-09-09 13:31:37 +0800 |
commit | d2acd673135318585fb956a2723a9a1ba89577d7 (patch) | |
tree | 97f5be7cf4bde77d96e7d95b454ddf7f3e6602f4 /src/gallium/drivers/ilo/ilo_shader.c | |
parent | 55f80a3290cb0e07db780265369eb504573b4e62 (diff) |
ilo: use ilo_builder for kernels and STATE_BASE_ADDRESS
Remove instruction buffer management from ilo_3d and adapt ilo_shader_cache to
upload kernels to ilo_builder. To be able to do that, we also let ilo_builder
manage STATE_BASE_ADDRESS.
Diffstat (limited to 'src/gallium/drivers/ilo/ilo_shader.c')
-rw-r--r-- | src/gallium/drivers/ilo/ilo_shader.c | 130 |
1 files changed, 28 insertions, 102 deletions
diff --git a/src/gallium/drivers/ilo/ilo_shader.c b/src/gallium/drivers/ilo/ilo_shader.c index b7e7a0a1439..ee796da7788 100644 --- a/src/gallium/drivers/ilo/ilo_shader.c +++ b/src/gallium/drivers/ilo/ilo_shader.c @@ -30,6 +30,7 @@ #include "intel_winsys.h" #include "shader/ilo_shader_internal.h" +#include "ilo_builder.h" #include "ilo_state.h" #include "ilo_shader.h" @@ -107,128 +108,53 @@ ilo_shader_cache_notify_change(struct ilo_shader_cache *shc, } /** - * Upload a managed shader to the bo. + * Upload managed shaders to the bo. Only shaders that are changed or added + * after the last upload are uploaded. */ -static int -ilo_shader_cache_upload_shader(struct ilo_shader_cache *shc, - struct ilo_shader_state *shader, - struct intel_bo *bo, unsigned offset, - bool incremental) +void +ilo_shader_cache_upload(struct ilo_shader_cache *shc, + struct ilo_builder *builder) { - const unsigned base = offset; - struct ilo_shader *sh; - - LIST_FOR_EACH_ENTRY(sh, &shader->variants, list) { - int err; - - if (incremental && sh->uploaded) - continue; - - /* kernels must be aligned to 64-byte */ - offset = align(offset, 64); - - err = intel_bo_pwrite(bo, offset, sh->kernel_size, sh->kernel); - if (unlikely(err)) - return -1; - - sh->uploaded = true; - sh->cache_offset = offset; - - offset += sh->kernel_size; - } + struct ilo_shader_state *shader, *next; - return (int) (offset - base); -} + LIST_FOR_EACH_ENTRY_SAFE(shader, next, &shc->changed, list) { + struct ilo_shader *sh; -/** - * Similar to ilo_shader_cache_upload(), except no upload happens. - */ -static int -ilo_shader_cache_get_upload_size(struct ilo_shader_cache *shc, - unsigned offset, - bool incremental) -{ - const unsigned base = offset; - struct ilo_shader_state *shader; + LIST_FOR_EACH_ENTRY(sh, &shader->variants, list) { + if (sh->uploaded) + continue; - if (!incremental) { - LIST_FOR_EACH_ENTRY(shader, &shc->shaders, list) { - struct ilo_shader *sh; + sh->cache_offset = ilo_builder_instruction_write(builder, + sh->kernel_size, sh->kernel); - /* see ilo_shader_cache_upload_shader() */ - LIST_FOR_EACH_ENTRY(sh, &shader->variants, list) { - if (!incremental || !sh->uploaded) - offset = align(offset, 64) + sh->kernel_size; - } + sh->uploaded = true; } - } - - LIST_FOR_EACH_ENTRY(shader, &shc->changed, list) { - struct ilo_shader *sh; - /* see ilo_shader_cache_upload_shader() */ - LIST_FOR_EACH_ENTRY(sh, &shader->variants, list) { - if (!incremental || !sh->uploaded) - offset = align(offset, 64) + sh->kernel_size; - } + list_del(&shader->list); + list_add(&shader->list, &shc->shaders); } - - /* - * From the Sandy Bridge PRM, volume 4 part 2, page 112: - * - * "Due to prefetch of the instruction stream, the EUs may attempt to - * access up to 8 instructions (128 bytes) beyond the end of the - * kernel program - possibly into the next memory page. Although - * these instructions will not be executed, software must account for - * the prefetch in order to avoid invalid page access faults." - */ - if (offset > base) - offset += 128; - - return (int) (offset - base); } /** - * Upload managed shaders to the bo. When incremental is true, only shaders - * that are changed or added after the last upload are uploaded. + * Invalidate all shaders so that they get uploaded in next + * ilo_shader_cache_upload(). */ -int -ilo_shader_cache_upload(struct ilo_shader_cache *shc, - struct intel_bo *bo, unsigned offset, - bool incremental) +void +ilo_shader_cache_invalidate(struct ilo_shader_cache *shc) { struct ilo_shader_state *shader, *next; - int size = 0, s; - if (!bo) - return ilo_shader_cache_get_upload_size(shc, offset, incremental); - - if (!incremental) { - LIST_FOR_EACH_ENTRY(shader, &shc->shaders, list) { - s = ilo_shader_cache_upload_shader(shc, shader, - bo, offset, incremental); - if (unlikely(s < 0)) - return s; - - size += s; - offset += s; - } + LIST_FOR_EACH_ENTRY_SAFE(shader, next, &shc->shaders, list) { + list_del(&shader->list); + list_add(&shader->list, &shc->changed); } - LIST_FOR_EACH_ENTRY_SAFE(shader, next, &shc->changed, list) { - s = ilo_shader_cache_upload_shader(shc, shader, - bo, offset, incremental); - if (unlikely(s < 0)) - return s; - - size += s; - offset += s; + LIST_FOR_EACH_ENTRY(shader, &shc->changed, list) { + struct ilo_shader *sh; - list_del(&shader->list); - list_add(&shader->list, &shc->shaders); + LIST_FOR_EACH_ENTRY(sh, &shader->variants, list) + sh->uploaded = false; } - - return size; } /** |