diff options
Diffstat (limited to 'src')
-rw-r--r-- | src/vulkan/anv_cmd_buffer.c | 23 | ||||
-rw-r--r-- | src/vulkan/anv_meta.c | 2 | ||||
-rw-r--r-- | src/vulkan/anv_pipeline.c | 2 | ||||
-rw-r--r-- | src/vulkan/anv_private.h | 32 | ||||
-rw-r--r-- | src/vulkan/gen7_cmd_buffer.c | 32 | ||||
-rw-r--r-- | src/vulkan/gen8_cmd_buffer.c | 34 |
6 files changed, 63 insertions, 62 deletions
diff --git a/src/vulkan/anv_cmd_buffer.c b/src/vulkan/anv_cmd_buffer.c index b8783f9e212..78c7635ef6d 100644 --- a/src/vulkan/anv_cmd_buffer.c +++ b/src/vulkan/anv_cmd_buffer.c @@ -312,14 +312,14 @@ void anv_CmdBindPipeline( switch (pipelineBindPoint) { case VK_PIPELINE_BIND_POINT_COMPUTE: cmd_buffer->state.compute_pipeline = pipeline; - cmd_buffer->state.compute_dirty |= ANV_CMD_BUFFER_PIPELINE_DIRTY; + cmd_buffer->state.compute_dirty |= ANV_CMD_DIRTY_PIPELINE; cmd_buffer->state.push_constants_dirty |= VK_SHADER_STAGE_COMPUTE_BIT; break; case VK_PIPELINE_BIND_POINT_GRAPHICS: cmd_buffer->state.pipeline = pipeline; cmd_buffer->state.vb_dirty |= pipeline->vb_used; - cmd_buffer->state.dirty |= ANV_CMD_BUFFER_PIPELINE_DIRTY; + cmd_buffer->state.dirty |= ANV_CMD_DIRTY_PIPELINE; cmd_buffer->state.push_constants_dirty |= pipeline->active_stages; /* Apply the dynamic state from the pipeline */ @@ -346,7 +346,7 @@ void anv_CmdSetViewport( memcpy(cmd_buffer->state.dynamic.viewport.viewports, pViewports, viewportCount * sizeof(*pViewports)); - cmd_buffer->state.dirty |= ANV_DYNAMIC_VIEWPORT_DIRTY; + cmd_buffer->state.dirty |= ANV_CMD_DIRTY_DYNAMIC_VIEWPORT; } void anv_CmdSetScissor( @@ -360,7 +360,7 @@ void anv_CmdSetScissor( memcpy(cmd_buffer->state.dynamic.scissor.scissors, pScissors, scissorCount * sizeof(*pScissors)); - cmd_buffer->state.dirty |= ANV_DYNAMIC_SCISSOR_DIRTY; + cmd_buffer->state.dirty |= ANV_CMD_DIRTY_DYNAMIC_SCISSOR; } void anv_CmdSetLineWidth( @@ -370,8 +370,7 @@ void anv_CmdSetLineWidth( ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, cmdBuffer); cmd_buffer->state.dynamic.line_width = lineWidth; - - cmd_buffer->state.dirty |= ANV_DYNAMIC_LINE_WIDTH_DIRTY; + cmd_buffer->state.dirty |= ANV_CMD_DIRTY_DYNAMIC_LINE_WIDTH; } void anv_CmdSetDepthBias( @@ -386,7 +385,7 @@ void anv_CmdSetDepthBias( cmd_buffer->state.dynamic.depth_bias.clamp = depthBiasClamp; cmd_buffer->state.dynamic.depth_bias.slope_scaled = slopeScaledDepthBias; - cmd_buffer->state.dirty |= ANV_DYNAMIC_DEPTH_BIAS_DIRTY; + cmd_buffer->state.dirty |= ANV_CMD_DIRTY_DYNAMIC_DEPTH_BIAS; } void anv_CmdSetBlendConstants( @@ -398,7 +397,7 @@ void anv_CmdSetBlendConstants( memcpy(cmd_buffer->state.dynamic.blend_constants, blendConst, sizeof(float) * 4); - cmd_buffer->state.dirty |= ANV_DYNAMIC_BLEND_CONSTANTS_DIRTY; + cmd_buffer->state.dirty |= ANV_CMD_DIRTY_DYNAMIC_BLEND_CONSTANTS; } void anv_CmdSetDepthBounds( @@ -411,7 +410,7 @@ void anv_CmdSetDepthBounds( cmd_buffer->state.dynamic.depth_bounds.min = minDepthBounds; cmd_buffer->state.dynamic.depth_bounds.max = maxDepthBounds; - cmd_buffer->state.dirty |= ANV_DYNAMIC_DEPTH_BOUNDS_DIRTY; + cmd_buffer->state.dirty |= ANV_CMD_DIRTY_DYNAMIC_DEPTH_BOUNDS; } void anv_CmdSetStencilCompareMask( @@ -426,7 +425,7 @@ void anv_CmdSetStencilCompareMask( if (faceMask & VK_STENCIL_FACE_BACK_BIT) cmd_buffer->state.dynamic.stencil_compare_mask.back = stencilCompareMask; - cmd_buffer->state.dirty |= ANV_DYNAMIC_STENCIL_COMPARE_MASK_DIRTY; + cmd_buffer->state.dirty |= ANV_CMD_DIRTY_DYNAMIC_STENCIL_COMPARE_MASK; } void anv_CmdSetStencilWriteMask( @@ -441,7 +440,7 @@ void anv_CmdSetStencilWriteMask( if (faceMask & VK_STENCIL_FACE_BACK_BIT) cmd_buffer->state.dynamic.stencil_write_mask.back = stencilWriteMask; - cmd_buffer->state.dirty |= ANV_DYNAMIC_STENCIL_WRITE_MASK_DIRTY; + cmd_buffer->state.dirty |= ANV_CMD_DIRTY_DYNAMIC_STENCIL_WRITE_MASK; } void anv_CmdSetStencilReference( @@ -456,7 +455,7 @@ void anv_CmdSetStencilReference( if (faceMask & VK_STENCIL_FACE_BACK_BIT) cmd_buffer->state.dynamic.stencil_reference.back = stencilReference; - cmd_buffer->state.dirty |= ANV_DYNAMIC_STENCIL_REFERENCE_DIRTY; + cmd_buffer->state.dirty |= ANV_CMD_DIRTY_DYNAMIC_STENCIL_REFERENCE; } void anv_CmdBindDescriptorSets( diff --git a/src/vulkan/anv_meta.c b/src/vulkan/anv_meta.c index 09d035b0b07..591a89928a6 100644 --- a/src/vulkan/anv_meta.c +++ b/src/vulkan/anv_meta.c @@ -345,7 +345,7 @@ anv_cmd_buffer_restore(struct anv_cmd_buffer *cmd_buffer, sizeof(state->old_vertex_bindings)); cmd_buffer->state.vb_dirty |= (1 << NUM_VB_USED) - 1; - cmd_buffer->state.dirty |= ANV_CMD_BUFFER_PIPELINE_DIRTY; + cmd_buffer->state.dirty |= ANV_CMD_DIRTY_PIPELINE; cmd_buffer->state.descriptors_dirty |= VK_SHADER_STAGE_VERTEX_BIT; anv_dynamic_state_copy(&cmd_buffer->state.dynamic, &state->dynamic, diff --git a/src/vulkan/anv_pipeline.c b/src/vulkan/anv_pipeline.c index 41a4d06403b..fda382eee19 100644 --- a/src/vulkan/anv_pipeline.c +++ b/src/vulkan/anv_pipeline.c @@ -181,7 +181,7 @@ static void anv_pipeline_init_dynamic_state(struct anv_pipeline *pipeline, const VkGraphicsPipelineCreateInfo *pCreateInfo) { - uint32_t states = ANV_DYNAMIC_STATE_DIRTY_MASK; + anv_cmd_dirty_mask_t states = ANV_CMD_DIRTY_DYNAMIC_ALL; ANV_FROM_HANDLE(anv_render_pass, pass, pCreateInfo->renderPass); struct anv_subpass *subpass = &pass->subpasses[pCreateInfo->subpass]; diff --git a/src/vulkan/anv_private.h b/src/vulkan/anv_private.h index f03620f92ec..aadedb8b53a 100644 --- a/src/vulkan/anv_private.h +++ b/src/vulkan/anv_private.h @@ -797,19 +797,21 @@ struct anv_buffer { VkDeviceSize offset; }; -/* The first 9 correspond to 1 << VK_DYNAMIC_STATE_FOO */ -#define ANV_DYNAMIC_VIEWPORT_DIRTY (1 << 0) -#define ANV_DYNAMIC_SCISSOR_DIRTY (1 << 1) -#define ANV_DYNAMIC_LINE_WIDTH_DIRTY (1 << 2) -#define ANV_DYNAMIC_DEPTH_BIAS_DIRTY (1 << 3) -#define ANV_DYNAMIC_BLEND_CONSTANTS_DIRTY (1 << 4) -#define ANV_DYNAMIC_DEPTH_BOUNDS_DIRTY (1 << 5) -#define ANV_DYNAMIC_STENCIL_COMPARE_MASK_DIRTY (1 << 6) -#define ANV_DYNAMIC_STENCIL_WRITE_MASK_DIRTY (1 << 7) -#define ANV_DYNAMIC_STENCIL_REFERENCE_DIRTY (1 << 8) -#define ANV_DYNAMIC_STATE_DIRTY_MASK ((1 << 9) - 1) -#define ANV_CMD_BUFFER_PIPELINE_DIRTY (1 << 9) -#define ANV_CMD_BUFFER_INDEX_BUFFER_DIRTY (1 << 10) +enum anv_cmd_dirty_bits { + ANV_CMD_DIRTY_DYNAMIC_VIEWPORT = 1 << 0, /* VK_DYNAMIC_STATE_VIEWPORT */ + ANV_CMD_DIRTY_DYNAMIC_SCISSOR = 1 << 1, /* VK_DYNAMIC_STATE_SCISSOR */ + ANV_CMD_DIRTY_DYNAMIC_LINE_WIDTH = 1 << 2, /* VK_DYNAMIC_STATE_LINE_WIDTH */ + ANV_CMD_DIRTY_DYNAMIC_DEPTH_BIAS = 1 << 3, /* VK_DYNAMIC_STATE_DEPTH_BIAS */ + ANV_CMD_DIRTY_DYNAMIC_BLEND_CONSTANTS = 1 << 4, /* VK_DYNAMIC_STATE_BLEND_CONSTANTS */ + ANV_CMD_DIRTY_DYNAMIC_DEPTH_BOUNDS = 1 << 5, /* VK_DYNAMIC_STATE_DEPTH_BOUNDS */ + ANV_CMD_DIRTY_DYNAMIC_STENCIL_COMPARE_MASK = 1 << 6, /* VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK */ + ANV_CMD_DIRTY_DYNAMIC_STENCIL_WRITE_MASK = 1 << 7, /* VK_DYNAMIC_STATE_STENCIL_WRITE_MASK */ + ANV_CMD_DIRTY_DYNAMIC_STENCIL_REFERENCE = 1 << 8, /* VK_DYNAMIC_STATE_STENCIL_REFERENCE */ + ANV_CMD_DIRTY_DYNAMIC_ALL = (1 << 9) - 1, + ANV_CMD_DIRTY_PIPELINE = 1 << 9, + ANV_CMD_DIRTY_INDEX_BUFFER = 1 << 10, +}; +typedef uint32_t anv_cmd_dirty_mask_t; struct anv_vertex_binding { struct anv_buffer * buffer; @@ -892,8 +894,8 @@ void anv_dynamic_state_copy(struct anv_dynamic_state *dest, struct anv_cmd_state { uint32_t current_pipeline; uint32_t vb_dirty; - uint32_t dirty; - uint32_t compute_dirty; + anv_cmd_dirty_mask_t dirty; + anv_cmd_dirty_mask_t compute_dirty; VkShaderStageFlags descriptors_dirty; VkShaderStageFlags push_constants_dirty; uint32_t scratch_size; diff --git a/src/vulkan/gen7_cmd_buffer.c b/src/vulkan/gen7_cmd_buffer.c index 0106aa74aa6..a99881f2eb9 100644 --- a/src/vulkan/gen7_cmd_buffer.c +++ b/src/vulkan/gen7_cmd_buffer.c @@ -128,7 +128,7 @@ void gen7_CmdBindIndexBuffer( ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, cmdBuffer); ANV_FROM_HANDLE(anv_buffer, buffer, _buffer); - cmd_buffer->state.dirty |= ANV_CMD_BUFFER_INDEX_BUFFER_DIRTY; + cmd_buffer->state.dirty |= ANV_CMD_DIRTY_INDEX_BUFFER; cmd_buffer->state.gen7.index_buffer = buffer; cmd_buffer->state.gen7.index_type = vk_to_gen_index_type[indexType]; cmd_buffer->state.gen7.index_offset = offset; @@ -185,11 +185,11 @@ gen7_cmd_buffer_flush_compute_state(struct anv_cmd_buffer *cmd_buffer) cmd_buffer->state.current_pipeline = GPGPU; } - if (cmd_buffer->state.compute_dirty & ANV_CMD_BUFFER_PIPELINE_DIRTY) + if (cmd_buffer->state.compute_dirty & ANV_CMD_DIRTY_PIPELINE) anv_batch_emit_batch(&cmd_buffer->batch, &pipeline->batch); if ((cmd_buffer->state.descriptors_dirty & VK_SHADER_STAGE_COMPUTE_BIT) || - (cmd_buffer->state.compute_dirty & ANV_CMD_BUFFER_PIPELINE_DIRTY)) { + (cmd_buffer->state.compute_dirty & ANV_CMD_DIRTY_PIPELINE)) { /* FIXME: figure out descriptors for gen7 */ result = gen7_flush_compute_descriptor_set(cmd_buffer); assert(result == VK_SUCCESS); @@ -242,7 +242,7 @@ gen7_cmd_buffer_flush_state(struct anv_cmd_buffer *cmd_buffer) } } - if (cmd_buffer->state.dirty & ANV_CMD_BUFFER_PIPELINE_DIRTY) { + if (cmd_buffer->state.dirty & ANV_CMD_DIRTY_PIPELINE) { /* If somebody compiled a pipeline after starting a command buffer the * scratch bo may have grown since we started this cmd buffer (and * emitted STATE_BASE_ADDRESS). If we're binding that pipeline now, @@ -256,15 +256,15 @@ gen7_cmd_buffer_flush_state(struct anv_cmd_buffer *cmd_buffer) if (cmd_buffer->state.descriptors_dirty) anv_flush_descriptor_sets(cmd_buffer); - if (cmd_buffer->state.dirty & ANV_DYNAMIC_VIEWPORT_DIRTY) + if (cmd_buffer->state.dirty & ANV_CMD_DIRTY_DYNAMIC_VIEWPORT) anv_cmd_buffer_emit_viewport(cmd_buffer); - if (cmd_buffer->state.dirty & ANV_DYNAMIC_SCISSOR_DIRTY) + if (cmd_buffer->state.dirty & ANV_CMD_DIRTY_DYNAMIC_SCISSOR) anv_cmd_buffer_emit_scissor(cmd_buffer); - if (cmd_buffer->state.dirty & (ANV_CMD_BUFFER_PIPELINE_DIRTY | - ANV_DYNAMIC_LINE_WIDTH_DIRTY | - ANV_DYNAMIC_DEPTH_BIAS_DIRTY)) { + if (cmd_buffer->state.dirty & (ANV_CMD_DIRTY_PIPELINE | + ANV_CMD_DIRTY_DYNAMIC_LINE_WIDTH | + ANV_CMD_DIRTY_DYNAMIC_DEPTH_BIAS)) { bool enable_bias = cmd_buffer->state.dynamic.depth_bias.bias != 0.0f || cmd_buffer->state.dynamic.depth_bias.slope_scaled != 0.0f; @@ -285,8 +285,8 @@ gen7_cmd_buffer_flush_state(struct anv_cmd_buffer *cmd_buffer) anv_batch_emit_merge(&cmd_buffer->batch, sf_dw, pipeline->gen7.sf); } - if (cmd_buffer->state.dirty & (ANV_DYNAMIC_BLEND_CONSTANTS_DIRTY | - ANV_DYNAMIC_STENCIL_REFERENCE_DIRTY)) { + if (cmd_buffer->state.dirty & (ANV_CMD_DIRTY_DYNAMIC_BLEND_CONSTANTS | + ANV_CMD_DIRTY_DYNAMIC_STENCIL_REFERENCE)) { struct anv_state cc_state = anv_cmd_buffer_alloc_dynamic_state(cmd_buffer, GEN7_COLOR_CALC_STATE_length, 64); @@ -307,9 +307,9 @@ gen7_cmd_buffer_flush_state(struct anv_cmd_buffer *cmd_buffer) .ColorCalcStatePointer = cc_state.offset); } - if (cmd_buffer->state.dirty & (ANV_CMD_BUFFER_PIPELINE_DIRTY | - ANV_DYNAMIC_STENCIL_COMPARE_MASK_DIRTY | - ANV_DYNAMIC_STENCIL_WRITE_MASK_DIRTY)) { + if (cmd_buffer->state.dirty & (ANV_CMD_DIRTY_PIPELINE | + ANV_CMD_DIRTY_DYNAMIC_STENCIL_COMPARE_MASK | + ANV_CMD_DIRTY_DYNAMIC_STENCIL_WRITE_MASK)) { uint32_t depth_stencil_dw[GEN7_DEPTH_STENCIL_STATE_length]; struct GEN7_DEPTH_STENCIL_STATE depth_stencil = { @@ -340,8 +340,8 @@ gen7_cmd_buffer_flush_state(struct anv_cmd_buffer *cmd_buffer) } if (cmd_buffer->state.gen7.index_buffer && - cmd_buffer->state.dirty & (ANV_CMD_BUFFER_PIPELINE_DIRTY | - ANV_CMD_BUFFER_INDEX_BUFFER_DIRTY)) { + cmd_buffer->state.dirty & (ANV_CMD_DIRTY_PIPELINE | + ANV_CMD_DIRTY_INDEX_BUFFER)) { struct anv_buffer *buffer = cmd_buffer->state.gen7.index_buffer; uint32_t offset = cmd_buffer->state.gen7.index_offset; diff --git a/src/vulkan/gen8_cmd_buffer.c b/src/vulkan/gen8_cmd_buffer.c index a1db0170c09..f626cad2831 100644 --- a/src/vulkan/gen8_cmd_buffer.c +++ b/src/vulkan/gen8_cmd_buffer.c @@ -104,7 +104,7 @@ gen8_cmd_buffer_flush_state(struct anv_cmd_buffer *cmd_buffer) } } - if (cmd_buffer->state.dirty & ANV_CMD_BUFFER_PIPELINE_DIRTY) { + if (cmd_buffer->state.dirty & ANV_CMD_DIRTY_PIPELINE) { /* If somebody compiled a pipeline after starting a command buffer the * scratch bo may have grown since we started this cmd buffer (and * emitted STATE_BASE_ADDRESS). If we're binding that pipeline now, @@ -121,14 +121,14 @@ gen8_cmd_buffer_flush_state(struct anv_cmd_buffer *cmd_buffer) if (cmd_buffer->state.push_constants_dirty) gen8_cmd_buffer_flush_push_constants(cmd_buffer); - if (cmd_buffer->state.dirty & ANV_DYNAMIC_VIEWPORT_DIRTY) + if (cmd_buffer->state.dirty & ANV_CMD_DIRTY_DYNAMIC_VIEWPORT) anv_cmd_buffer_emit_viewport(cmd_buffer); - if (cmd_buffer->state.dirty & ANV_DYNAMIC_SCISSOR_DIRTY) + if (cmd_buffer->state.dirty & ANV_CMD_DIRTY_DYNAMIC_SCISSOR) anv_cmd_buffer_emit_scissor(cmd_buffer); - if (cmd_buffer->state.dirty & (ANV_CMD_BUFFER_PIPELINE_DIRTY | - ANV_DYNAMIC_LINE_WIDTH_DIRTY)) { + if (cmd_buffer->state.dirty & (ANV_CMD_DIRTY_PIPELINE | + ANV_CMD_DIRTY_DYNAMIC_LINE_WIDTH)) { uint32_t sf_dw[GEN8_3DSTATE_SF_length]; struct GEN8_3DSTATE_SF sf = { GEN8_3DSTATE_SF_header, @@ -138,8 +138,8 @@ gen8_cmd_buffer_flush_state(struct anv_cmd_buffer *cmd_buffer) anv_batch_emit_merge(&cmd_buffer->batch, sf_dw, pipeline->gen8.sf); } - if (cmd_buffer->state.dirty & (ANV_CMD_BUFFER_PIPELINE_DIRTY | - ANV_DYNAMIC_DEPTH_BIAS_DIRTY)) { + if (cmd_buffer->state.dirty & (ANV_CMD_DIRTY_PIPELINE | + ANV_CMD_DIRTY_DYNAMIC_DEPTH_BIAS)){ bool enable_bias = cmd_buffer->state.dynamic.depth_bias.bias != 0.0f || cmd_buffer->state.dynamic.depth_bias.slope_scaled != 0.0f; @@ -158,8 +158,8 @@ gen8_cmd_buffer_flush_state(struct anv_cmd_buffer *cmd_buffer) pipeline->gen8.raster); } - if (cmd_buffer->state.dirty & (ANV_DYNAMIC_BLEND_CONSTANTS_DIRTY | - ANV_DYNAMIC_STENCIL_REFERENCE_DIRTY)) { + if (cmd_buffer->state.dirty & (ANV_CMD_DIRTY_DYNAMIC_BLEND_CONSTANTS | + ANV_CMD_DIRTY_DYNAMIC_STENCIL_REFERENCE)) { struct anv_state cc_state = anv_cmd_buffer_alloc_dynamic_state(cmd_buffer, GEN8_COLOR_CALC_STATE_length, 64); @@ -181,9 +181,9 @@ gen8_cmd_buffer_flush_state(struct anv_cmd_buffer *cmd_buffer) .ColorCalcStatePointerValid = true); } - if (cmd_buffer->state.dirty & (ANV_CMD_BUFFER_PIPELINE_DIRTY | - ANV_DYNAMIC_STENCIL_COMPARE_MASK_DIRTY | - ANV_DYNAMIC_STENCIL_WRITE_MASK_DIRTY)) { + if (cmd_buffer->state.dirty & (ANV_CMD_DIRTY_PIPELINE | + ANV_CMD_DIRTY_DYNAMIC_STENCIL_COMPARE_MASK | + ANV_CMD_DIRTY_DYNAMIC_STENCIL_WRITE_MASK)) { uint32_t wm_depth_stencil_dw[GEN8_3DSTATE_WM_DEPTH_STENCIL_length]; struct GEN8_3DSTATE_WM_DEPTH_STENCIL wm_depth_stencil = { @@ -210,8 +210,8 @@ gen8_cmd_buffer_flush_state(struct anv_cmd_buffer *cmd_buffer) pipeline->gen8.wm_depth_stencil); } - if (cmd_buffer->state.dirty & (ANV_CMD_BUFFER_PIPELINE_DIRTY | - ANV_CMD_BUFFER_INDEX_BUFFER_DIRTY)) { + if (cmd_buffer->state.dirty & (ANV_CMD_DIRTY_PIPELINE | + ANV_CMD_DIRTY_INDEX_BUFFER)) { anv_batch_emit_merge(&cmd_buffer->batch, cmd_buffer->state.state_vf, pipeline->gen8.vf); } @@ -331,7 +331,7 @@ void gen8_CmdBindIndexBuffer( }; GEN8_3DSTATE_VF_pack(NULL, cmd_buffer->state.state_vf, &vf); - cmd_buffer->state.dirty |= ANV_CMD_BUFFER_INDEX_BUFFER_DIRTY; + cmd_buffer->state.dirty |= ANV_CMD_DIRTY_INDEX_BUFFER; anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_INDEX_BUFFER, .IndexFormat = vk_to_gen_index_type[indexType], @@ -394,11 +394,11 @@ gen8_cmd_buffer_flush_compute_state(struct anv_cmd_buffer *cmd_buffer) cmd_buffer->state.current_pipeline = GPGPU; } - if (cmd_buffer->state.compute_dirty & ANV_CMD_BUFFER_PIPELINE_DIRTY) + if (cmd_buffer->state.compute_dirty & ANV_CMD_DIRTY_PIPELINE) anv_batch_emit_batch(&cmd_buffer->batch, &pipeline->batch); if ((cmd_buffer->state.descriptors_dirty & VK_SHADER_STAGE_COMPUTE_BIT) || - (cmd_buffer->state.compute_dirty & ANV_CMD_BUFFER_PIPELINE_DIRTY)) { + (cmd_buffer->state.compute_dirty & ANV_CMD_DIRTY_PIPELINE)) { result = gen8_flush_compute_descriptor_set(cmd_buffer); assert(result == VK_SUCCESS); cmd_buffer->state.descriptors_dirty &= ~VK_SHADER_STAGE_COMPUTE; |