diff options
author | Ville Syrjälä <[email protected]> | 2019-06-10 14:21:05 +0300 |
---|---|---|
committer | Jason Ekstrand <[email protected]> | 2019-06-11 16:25:07 -0500 |
commit | 6230bfeb656f1de598e8ed58cef548dc1c1781aa (patch) | |
tree | 2446fc4962501e2ddea710def03b7e83ad9d7047 | |
parent | 39f4dc23a5af1b78a58905cf91d56441b59eb80a (diff) |
anv/cmd_buffer: Reuse gen8 Cmd{Set, Reset}Event on gen7
Modern DXVK requires event support [1], but looks like it only
uses vkCmdSetEvent() + vkGetEventStatus(). So we can just
borrow the relevant code from gen8, leaving CmdWaitEvents still
unimplemented.
[1] https://github.com/doitsujin/dxvk/commit/8c3900c533d83d12c970b905183d17a1d3e8df1f
v2: Also move CmdWaitEvents into genX_cmd_buffer.c (Jason)
Signed-off-by: Ville Syrjälä <[email protected]>
Reviewed-by: Jason Ekstrand <[email protected]>
-rw-r--r-- | src/intel/vulkan/gen7_cmd_buffer.c | 38 | ||||
-rw-r--r-- | src/intel/vulkan/gen8_cmd_buffer.c | 102 | ||||
-rw-r--r-- | src/intel/vulkan/genX_cmd_buffer.c | 107 |
3 files changed, 107 insertions, 140 deletions
diff --git a/src/intel/vulkan/gen7_cmd_buffer.c b/src/intel/vulkan/gen7_cmd_buffer.c index 380283bdd56..115d12b3536 100644 --- a/src/intel/vulkan/gen7_cmd_buffer.c +++ b/src/intel/vulkan/gen7_cmd_buffer.c @@ -286,41 +286,3 @@ genX(cmd_buffer_enable_pma_fix)(struct anv_cmd_buffer *cmd_buffer, { /* The NP PMA fix doesn't exist on gen7 */ } - -void genX(CmdSetEvent)( - VkCommandBuffer commandBuffer, - VkEvent event, - VkPipelineStageFlags stageMask) -{ - anv_finishme("Implement events on gen7"); -} - -void genX(CmdResetEvent)( - VkCommandBuffer commandBuffer, - VkEvent event, - VkPipelineStageFlags stageMask) -{ - anv_finishme("Implement events on gen7"); -} - -void genX(CmdWaitEvents)( - VkCommandBuffer commandBuffer, - uint32_t eventCount, - const VkEvent* pEvents, - VkPipelineStageFlags srcStageMask, - VkPipelineStageFlags destStageMask, - uint32_t memoryBarrierCount, - const VkMemoryBarrier* pMemoryBarriers, - uint32_t bufferMemoryBarrierCount, - const VkBufferMemoryBarrier* pBufferMemoryBarriers, - uint32_t imageMemoryBarrierCount, - const VkImageMemoryBarrier* pImageMemoryBarriers) -{ - anv_finishme("Implement events on gen7"); - - genX(CmdPipelineBarrier)(commandBuffer, srcStageMask, destStageMask, - false, /* byRegion */ - memoryBarrierCount, pMemoryBarriers, - bufferMemoryBarrierCount, pBufferMemoryBarriers, - imageMemoryBarrierCount, pImageMemoryBarriers); -} diff --git a/src/intel/vulkan/gen8_cmd_buffer.c b/src/intel/vulkan/gen8_cmd_buffer.c index 6568d2c7511..2e6d9de9f4a 100644 --- a/src/intel/vulkan/gen8_cmd_buffer.c +++ b/src/intel/vulkan/gen8_cmd_buffer.c @@ -565,105 +565,3 @@ void genX(CmdBindIndexBuffer)( cmd_buffer->state.gfx.dirty |= ANV_CMD_DIRTY_INDEX_BUFFER; } - -/* Set of stage bits for which are pipelined, i.e. they get queued by the - * command streamer for later execution. - */ -#define ANV_PIPELINE_STAGE_PIPELINED_BITS \ - (VK_PIPELINE_STAGE_VERTEX_INPUT_BIT | \ - VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | \ - VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT | \ - VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT | \ - VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT | \ - VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | \ - VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | \ - VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT | \ - VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | \ - VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT | \ - VK_PIPELINE_STAGE_TRANSFER_BIT | \ - VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT | \ - VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT | \ - VK_PIPELINE_STAGE_ALL_COMMANDS_BIT) - -void genX(CmdSetEvent)( - VkCommandBuffer commandBuffer, - VkEvent _event, - VkPipelineStageFlags stageMask) -{ - ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer); - ANV_FROM_HANDLE(anv_event, event, _event); - - anv_batch_emit(&cmd_buffer->batch, GENX(PIPE_CONTROL), pc) { - if (stageMask & ANV_PIPELINE_STAGE_PIPELINED_BITS) { - pc.StallAtPixelScoreboard = true; - pc.CommandStreamerStallEnable = true; - } - - pc.DestinationAddressType = DAT_PPGTT, - pc.PostSyncOperation = WriteImmediateData, - pc.Address = (struct anv_address) { - cmd_buffer->device->dynamic_state_pool.block_pool.bo, - event->state.offset - }; - pc.ImmediateData = VK_EVENT_SET; - } -} - -void genX(CmdResetEvent)( - VkCommandBuffer commandBuffer, - VkEvent _event, - VkPipelineStageFlags stageMask) -{ - ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer); - ANV_FROM_HANDLE(anv_event, event, _event); - - anv_batch_emit(&cmd_buffer->batch, GENX(PIPE_CONTROL), pc) { - if (stageMask & ANV_PIPELINE_STAGE_PIPELINED_BITS) { - pc.StallAtPixelScoreboard = true; - pc.CommandStreamerStallEnable = true; - } - - pc.DestinationAddressType = DAT_PPGTT; - pc.PostSyncOperation = WriteImmediateData; - pc.Address = (struct anv_address) { - cmd_buffer->device->dynamic_state_pool.block_pool.bo, - event->state.offset - }; - pc.ImmediateData = VK_EVENT_RESET; - } -} - -void genX(CmdWaitEvents)( - VkCommandBuffer commandBuffer, - uint32_t eventCount, - const VkEvent* pEvents, - VkPipelineStageFlags srcStageMask, - VkPipelineStageFlags destStageMask, - uint32_t memoryBarrierCount, - const VkMemoryBarrier* pMemoryBarriers, - uint32_t bufferMemoryBarrierCount, - const VkBufferMemoryBarrier* pBufferMemoryBarriers, - uint32_t imageMemoryBarrierCount, - const VkImageMemoryBarrier* pImageMemoryBarriers) -{ - ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer); - for (uint32_t i = 0; i < eventCount; i++) { - ANV_FROM_HANDLE(anv_event, event, pEvents[i]); - - anv_batch_emit(&cmd_buffer->batch, GENX(MI_SEMAPHORE_WAIT), sem) { - sem.WaitMode = PollingMode, - sem.CompareOperation = COMPARE_SAD_EQUAL_SDD, - sem.SemaphoreDataDword = VK_EVENT_SET, - sem.SemaphoreAddress = (struct anv_address) { - cmd_buffer->device->dynamic_state_pool.block_pool.bo, - event->state.offset - }; - } - } - - genX(CmdPipelineBarrier)(commandBuffer, srcStageMask, destStageMask, - false, /* byRegion */ - memoryBarrierCount, pMemoryBarriers, - bufferMemoryBarrierCount, pBufferMemoryBarriers, - imageMemoryBarrierCount, pImageMemoryBarriers); -} diff --git a/src/intel/vulkan/genX_cmd_buffer.c b/src/intel/vulkan/genX_cmd_buffer.c index f171d02dab7..c57c02a08da 100644 --- a/src/intel/vulkan/genX_cmd_buffer.c +++ b/src/intel/vulkan/genX_cmd_buffer.c @@ -4729,3 +4729,110 @@ void genX(CmdEndConditionalRenderingEXT)( cmd_state->conditional_render_enabled = false; } #endif + +/* Set of stage bits for which are pipelined, i.e. they get queued by the + * command streamer for later execution. + */ +#define ANV_PIPELINE_STAGE_PIPELINED_BITS \ + (VK_PIPELINE_STAGE_VERTEX_INPUT_BIT | \ + VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | \ + VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT | \ + VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT | \ + VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT | \ + VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | \ + VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | \ + VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT | \ + VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | \ + VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT | \ + VK_PIPELINE_STAGE_TRANSFER_BIT | \ + VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT | \ + VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT | \ + VK_PIPELINE_STAGE_ALL_COMMANDS_BIT) + +void genX(CmdSetEvent)( + VkCommandBuffer commandBuffer, + VkEvent _event, + VkPipelineStageFlags stageMask) +{ + ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer); + ANV_FROM_HANDLE(anv_event, event, _event); + + anv_batch_emit(&cmd_buffer->batch, GENX(PIPE_CONTROL), pc) { + if (stageMask & ANV_PIPELINE_STAGE_PIPELINED_BITS) { + pc.StallAtPixelScoreboard = true; + pc.CommandStreamerStallEnable = true; + } + + pc.DestinationAddressType = DAT_PPGTT, + pc.PostSyncOperation = WriteImmediateData, + pc.Address = (struct anv_address) { + cmd_buffer->device->dynamic_state_pool.block_pool.bo, + event->state.offset + }; + pc.ImmediateData = VK_EVENT_SET; + } +} + +void genX(CmdResetEvent)( + VkCommandBuffer commandBuffer, + VkEvent _event, + VkPipelineStageFlags stageMask) +{ + ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer); + ANV_FROM_HANDLE(anv_event, event, _event); + + anv_batch_emit(&cmd_buffer->batch, GENX(PIPE_CONTROL), pc) { + if (stageMask & ANV_PIPELINE_STAGE_PIPELINED_BITS) { + pc.StallAtPixelScoreboard = true; + pc.CommandStreamerStallEnable = true; + } + + pc.DestinationAddressType = DAT_PPGTT; + pc.PostSyncOperation = WriteImmediateData; + pc.Address = (struct anv_address) { + cmd_buffer->device->dynamic_state_pool.block_pool.bo, + event->state.offset + }; + pc.ImmediateData = VK_EVENT_RESET; + } +} + +void genX(CmdWaitEvents)( + VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent* pEvents, + VkPipelineStageFlags srcStageMask, + VkPipelineStageFlags destStageMask, + uint32_t memoryBarrierCount, + const VkMemoryBarrier* pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VkBufferMemoryBarrier* pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VkImageMemoryBarrier* pImageMemoryBarriers) +{ +#if GEN_GEN >= 8 + ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer); + + for (uint32_t i = 0; i < eventCount; i++) { + ANV_FROM_HANDLE(anv_event, event, pEvents[i]); + + anv_batch_emit(&cmd_buffer->batch, GENX(MI_SEMAPHORE_WAIT), sem) { + sem.WaitMode = PollingMode, + sem.CompareOperation = COMPARE_SAD_EQUAL_SDD, + sem.SemaphoreDataDword = VK_EVENT_SET, + sem.SemaphoreAddress = (struct anv_address) { + cmd_buffer->device->dynamic_state_pool.block_pool.bo, + event->state.offset + }; + } + } +#else + anv_finishme("Implement events on gen7"); +#endif + + genX(CmdPipelineBarrier)(commandBuffer, srcStageMask, destStageMask, + false, /* byRegion */ + memoryBarrierCount, pMemoryBarriers, + bufferMemoryBarrierCount, pBufferMemoryBarriers, + imageMemoryBarrierCount, pImageMemoryBarriers); +} |