summaryrefslogtreecommitdiffstats
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/intel/vulkan/anv_blorp.c88
1 files changed, 88 insertions, 0 deletions
diff --git a/src/intel/vulkan/anv_blorp.c b/src/intel/vulkan/anv_blorp.c
index 90ee5ed43e2..56552ae2296 100644
--- a/src/intel/vulkan/anv_blorp.c
+++ b/src/intel/vulkan/anv_blorp.c
@@ -1058,6 +1058,88 @@ void anv_CmdClearAttachments(
blorp_batch_finish(&batch);
}
+enum subpass_stage {
+ SUBPASS_STAGE_LOAD,
+ SUBPASS_STAGE_DRAW,
+ SUBPASS_STAGE_RESOLVE,
+};
+
+static bool
+attachment_needs_flush(struct anv_cmd_buffer *cmd_buffer,
+ struct anv_render_pass_attachment *att,
+ enum subpass_stage stage)
+{
+ struct anv_render_pass *pass = cmd_buffer->state.pass;
+ struct anv_subpass *subpass = cmd_buffer->state.subpass;
+ unsigned subpass_idx = subpass - pass->subpasses;
+ assert(subpass_idx < pass->subpass_count);
+
+ /* We handle this subpass specially based on the current stage */
+ enum anv_subpass_usage usage = att->subpass_usage[subpass_idx];
+ switch (stage) {
+ case SUBPASS_STAGE_LOAD:
+ if (usage & (ANV_SUBPASS_USAGE_INPUT | ANV_SUBPASS_USAGE_RESOLVE_SRC))
+ return true;
+ break;
+
+ case SUBPASS_STAGE_DRAW:
+ if (usage & ANV_SUBPASS_USAGE_RESOLVE_SRC)
+ return true;
+ break;
+
+ default:
+ break;
+ }
+
+ for (uint32_t s = subpass_idx + 1; s < pass->subpass_count; s++) {
+ usage = att->subpass_usage[s];
+
+ /* If this attachment is going to be used as an input in this or any
+ * future subpass, then we need to flush its cache and invalidate the
+ * texture cache.
+ */
+ if (att->subpass_usage[s] & ANV_SUBPASS_USAGE_INPUT)
+ return true;
+
+ if (usage & (ANV_SUBPASS_USAGE_DRAW | ANV_SUBPASS_USAGE_RESOLVE_DST)) {
+ /* We found another subpass that draws to this attachment. We'll
+ * wait to resolve until then.
+ */
+ return false;
+ }
+ }
+
+ return false;
+}
+
+static void
+anv_cmd_buffer_flush_attachments(struct anv_cmd_buffer *cmd_buffer,
+ enum subpass_stage stage)
+{
+ struct anv_subpass *subpass = cmd_buffer->state.subpass;
+ struct anv_render_pass *pass = cmd_buffer->state.pass;
+
+ for (uint32_t i = 0; i < subpass->color_count; ++i) {
+ uint32_t att = subpass->color_attachments[i];
+ assert(att < pass->attachment_count);
+ if (attachment_needs_flush(cmd_buffer, &pass->attachments[att], stage)) {
+ cmd_buffer->state.pending_pipe_bits |=
+ ANV_PIPE_TEXTURE_CACHE_INVALIDATE_BIT |
+ ANV_PIPE_RENDER_TARGET_CACHE_FLUSH_BIT;
+ }
+ }
+
+ if (subpass->depth_stencil_attachment != VK_ATTACHMENT_UNUSED) {
+ uint32_t att = subpass->depth_stencil_attachment;
+ assert(att < pass->attachment_count);
+ if (attachment_needs_flush(cmd_buffer, &pass->attachments[att], stage)) {
+ cmd_buffer->state.pending_pipe_bits |=
+ ANV_PIPE_TEXTURE_CACHE_INVALIDATE_BIT |
+ ANV_PIPE_DEPTH_CACHE_FLUSH_BIT;
+ }
+ }
+}
+
static bool
subpass_needs_clear(const struct anv_cmd_buffer *cmd_buffer)
{
@@ -1137,6 +1219,8 @@ anv_cmd_buffer_clear_subpass(struct anv_cmd_buffer *cmd_buffer)
}
blorp_batch_finish(&batch);
+
+ anv_cmd_buffer_flush_attachments(cmd_buffer, SUBPASS_STAGE_LOAD);
}
static void
@@ -1291,6 +1375,8 @@ anv_cmd_buffer_resolve_subpass(struct anv_cmd_buffer *cmd_buffer)
subpass->color_attachments[i]);
}
+ anv_cmd_buffer_flush_attachments(cmd_buffer, SUBPASS_STAGE_DRAW);
+
if (subpass->has_resolve) {
for (uint32_t i = 0; i < subpass->color_count; ++i) {
uint32_t src_att = subpass->color_attachments[i];
@@ -1336,6 +1422,8 @@ anv_cmd_buffer_resolve_subpass(struct anv_cmd_buffer *cmd_buffer)
ccs_resolve_attachment(cmd_buffer, &batch, dst_att);
}
+
+ anv_cmd_buffer_flush_attachments(cmd_buffer, SUBPASS_STAGE_RESOLVE);
}
blorp_batch_finish(&batch);