diff options
author | Ville Syrjälä <[email protected]> | 2019-06-10 14:21:05 +0300 |
---|---|---|
committer | Juan A. Suarez Romero <[email protected]> | 2019-06-25 16:06:33 +0000 |
commit | 970cc023b03537e4e8b1fb65287bfc0a504a6556 (patch) | |
tree | 1676b63933085934d5fdc9254fb92361285cdca3 /src | |
parent | 2e83a64f64d6310665cab59ebde4b61b70daa361 (diff) |
anv/cmd_buffer: Reuse gen8 Cmd{Set, Reset}Event on gen7
Modern DXVK requires event support [1], but looks like it only
uses vkCmdSetEvent() + vkGetEventStatus(). So we can just
borrow the relevant code from gen8, leaving CmdWaitEvents still
unimplemented.
[1] https://github.com/doitsujin/dxvk/commit/8c3900c533d83d12c970b905183d17a1d3e8df1f
v2: Also move CmdWaitEvents into genX_cmd_buffer.c (Jason)
Signed-off-by: Ville Syrjälä <[email protected]>
Reviewed-by: Jason Ekstrand <[email protected]>
(cherry picked from commit 6230bfeb656f1de598e8ed58cef548dc1c1781aa)
Diffstat (limited to 'src')
-rw-r--r-- | src/intel/vulkan/gen7_cmd_buffer.c | 38 | ||||
-rw-r--r-- | src/intel/vulkan/gen8_cmd_buffer.c | 102 | ||||
-rw-r--r-- | src/intel/vulkan/genX_cmd_buffer.c | 107 |
3 files changed, 107 insertions, 140 deletions
diff --git a/src/intel/vulkan/gen7_cmd_buffer.c b/src/intel/vulkan/gen7_cmd_buffer.c index 380283bdd56..115d12b3536 100644 --- a/src/intel/vulkan/gen7_cmd_buffer.c +++ b/src/intel/vulkan/gen7_cmd_buffer.c @@ -286,41 +286,3 @@ genX(cmd_buffer_enable_pma_fix)(struct anv_cmd_buffer *cmd_buffer, { /* The NP PMA fix doesn't exist on gen7 */ } - -void genX(CmdSetEvent)( - VkCommandBuffer commandBuffer, - VkEvent event, - VkPipelineStageFlags stageMask) -{ - anv_finishme("Implement events on gen7"); -} - -void genX(CmdResetEvent)( - VkCommandBuffer commandBuffer, - VkEvent event, - VkPipelineStageFlags stageMask) -{ - anv_finishme("Implement events on gen7"); -} - -void genX(CmdWaitEvents)( - VkCommandBuffer commandBuffer, - uint32_t eventCount, - const VkEvent* pEvents, - VkPipelineStageFlags srcStageMask, - VkPipelineStageFlags destStageMask, - uint32_t memoryBarrierCount, - const VkMemoryBarrier* pMemoryBarriers, - uint32_t bufferMemoryBarrierCount, - const VkBufferMemoryBarrier* pBufferMemoryBarriers, - uint32_t imageMemoryBarrierCount, - const VkImageMemoryBarrier* pImageMemoryBarriers) -{ - anv_finishme("Implement events on gen7"); - - genX(CmdPipelineBarrier)(commandBuffer, srcStageMask, destStageMask, - false, /* byRegion */ - memoryBarrierCount, pMemoryBarriers, - bufferMemoryBarrierCount, pBufferMemoryBarriers, - imageMemoryBarrierCount, pImageMemoryBarriers); -} diff --git a/src/intel/vulkan/gen8_cmd_buffer.c b/src/intel/vulkan/gen8_cmd_buffer.c index 6568d2c7511..2e6d9de9f4a 100644 --- a/src/intel/vulkan/gen8_cmd_buffer.c +++ b/src/intel/vulkan/gen8_cmd_buffer.c @@ -565,105 +565,3 @@ void genX(CmdBindIndexBuffer)( cmd_buffer->state.gfx.dirty |= ANV_CMD_DIRTY_INDEX_BUFFER; } - -/* Set of stage bits for which are pipelined, i.e. they get queued by the - * command streamer for later execution. - */ -#define ANV_PIPELINE_STAGE_PIPELINED_BITS \ - (VK_PIPELINE_STAGE_VERTEX_INPUT_BIT | \ - VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | \ - VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT | \ - VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT | \ - VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT | \ - VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | \ - VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | \ - VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT | \ - VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | \ - VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT | \ - VK_PIPELINE_STAGE_TRANSFER_BIT | \ - VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT | \ - VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT | \ - VK_PIPELINE_STAGE_ALL_COMMANDS_BIT) - -void genX(CmdSetEvent)( - VkCommandBuffer commandBuffer, - VkEvent _event, - VkPipelineStageFlags stageMask) -{ - ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer); - ANV_FROM_HANDLE(anv_event, event, _event); - - anv_batch_emit(&cmd_buffer->batch, GENX(PIPE_CONTROL), pc) { - if (stageMask & ANV_PIPELINE_STAGE_PIPELINED_BITS) { - pc.StallAtPixelScoreboard = true; - pc.CommandStreamerStallEnable = true; - } - - pc.DestinationAddressType = DAT_PPGTT, - pc.PostSyncOperation = WriteImmediateData, - pc.Address = (struct anv_address) { - cmd_buffer->device->dynamic_state_pool.block_pool.bo, - event->state.offset - }; - pc.ImmediateData = VK_EVENT_SET; - } -} - -void genX(CmdResetEvent)( - VkCommandBuffer commandBuffer, - VkEvent _event, - VkPipelineStageFlags stageMask) -{ - ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer); - ANV_FROM_HANDLE(anv_event, event, _event); - - anv_batch_emit(&cmd_buffer->batch, GENX(PIPE_CONTROL), pc) { - if (stageMask & ANV_PIPELINE_STAGE_PIPELINED_BITS) { - pc.StallAtPixelScoreboard = true; - pc.CommandStreamerStallEnable = true; - } - - pc.DestinationAddressType = DAT_PPGTT; - pc.PostSyncOperation = WriteImmediateData; - pc.Address = (struct anv_address) { - cmd_buffer->device->dynamic_state_pool.block_pool.bo, - event->state.offset - }; - pc.ImmediateData = VK_EVENT_RESET; - } -} - -void genX(CmdWaitEvents)( - VkCommandBuffer commandBuffer, - uint32_t eventCount, - const VkEvent* pEvents, - VkPipelineStageFlags srcStageMask, - VkPipelineStageFlags destStageMask, - uint32_t memoryBarrierCount, - const VkMemoryBarrier* pMemoryBarriers, - uint32_t bufferMemoryBarrierCount, - const VkBufferMemoryBarrier* pBufferMemoryBarriers, - uint32_t imageMemoryBarrierCount, - const VkImageMemoryBarrier* pImageMemoryBarriers) -{ - ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer); - for (uint32_t i = 0; i < eventCount; i++) { - ANV_FROM_HANDLE(anv_event, event, pEvents[i]); - - anv_batch_emit(&cmd_buffer->batch, GENX(MI_SEMAPHORE_WAIT), sem) { - sem.WaitMode = PollingMode, - sem.CompareOperation = COMPARE_SAD_EQUAL_SDD, - sem.SemaphoreDataDword = VK_EVENT_SET, - sem.SemaphoreAddress = (struct anv_address) { - cmd_buffer->device->dynamic_state_pool.block_pool.bo, - event->state.offset - }; - } - } - - genX(CmdPipelineBarrier)(commandBuffer, srcStageMask, destStageMask, - false, /* byRegion */ - memoryBarrierCount, pMemoryBarriers, - bufferMemoryBarrierCount, pBufferMemoryBarriers, - imageMemoryBarrierCount, pImageMemoryBarriers); -} diff --git a/src/intel/vulkan/genX_cmd_buffer.c b/src/intel/vulkan/genX_cmd_buffer.c index 5534b2e39a5..1caa0a8ce43 100644 --- a/src/intel/vulkan/genX_cmd_buffer.c +++ b/src/intel/vulkan/genX_cmd_buffer.c @@ -4754,3 +4754,110 @@ void genX(CmdEndConditionalRenderingEXT)( cmd_state->conditional_render_enabled = false; } #endif + +/* Set of stage bits for which are pipelined, i.e. they get queued by the + * command streamer for later execution. + */ +#define ANV_PIPELINE_STAGE_PIPELINED_BITS \ + (VK_PIPELINE_STAGE_VERTEX_INPUT_BIT | \ + VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | \ + VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT | \ + VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT | \ + VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT | \ + VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | \ + VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | \ + VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT | \ + VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | \ + VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT | \ + VK_PIPELINE_STAGE_TRANSFER_BIT | \ + VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT | \ + VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT | \ + VK_PIPELINE_STAGE_ALL_COMMANDS_BIT) + +void genX(CmdSetEvent)( + VkCommandBuffer commandBuffer, + VkEvent _event, + VkPipelineStageFlags stageMask) +{ + ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer); + ANV_FROM_HANDLE(anv_event, event, _event); + + anv_batch_emit(&cmd_buffer->batch, GENX(PIPE_CONTROL), pc) { + if (stageMask & ANV_PIPELINE_STAGE_PIPELINED_BITS) { + pc.StallAtPixelScoreboard = true; + pc.CommandStreamerStallEnable = true; + } + + pc.DestinationAddressType = DAT_PPGTT, + pc.PostSyncOperation = WriteImmediateData, + pc.Address = (struct anv_address) { + cmd_buffer->device->dynamic_state_pool.block_pool.bo, + event->state.offset + }; + pc.ImmediateData = VK_EVENT_SET; + } +} + +void genX(CmdResetEvent)( + VkCommandBuffer commandBuffer, + VkEvent _event, + VkPipelineStageFlags stageMask) +{ + ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer); + ANV_FROM_HANDLE(anv_event, event, _event); + + anv_batch_emit(&cmd_buffer->batch, GENX(PIPE_CONTROL), pc) { + if (stageMask & ANV_PIPELINE_STAGE_PIPELINED_BITS) { + pc.StallAtPixelScoreboard = true; + pc.CommandStreamerStallEnable = true; + } + + pc.DestinationAddressType = DAT_PPGTT; + pc.PostSyncOperation = WriteImmediateData; + pc.Address = (struct anv_address) { + cmd_buffer->device->dynamic_state_pool.block_pool.bo, + event->state.offset + }; + pc.ImmediateData = VK_EVENT_RESET; + } +} + +void genX(CmdWaitEvents)( + VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent* pEvents, + VkPipelineStageFlags srcStageMask, + VkPipelineStageFlags destStageMask, + uint32_t memoryBarrierCount, + const VkMemoryBarrier* pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VkBufferMemoryBarrier* pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VkImageMemoryBarrier* pImageMemoryBarriers) +{ +#if GEN_GEN >= 8 + ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, commandBuffer); + + for (uint32_t i = 0; i < eventCount; i++) { + ANV_FROM_HANDLE(anv_event, event, pEvents[i]); + + anv_batch_emit(&cmd_buffer->batch, GENX(MI_SEMAPHORE_WAIT), sem) { + sem.WaitMode = PollingMode, + sem.CompareOperation = COMPARE_SAD_EQUAL_SDD, + sem.SemaphoreDataDword = VK_EVENT_SET, + sem.SemaphoreAddress = (struct anv_address) { + cmd_buffer->device->dynamic_state_pool.block_pool.bo, + event->state.offset + }; + } + } +#else + anv_finishme("Implement events on gen7"); +#endif + + genX(CmdPipelineBarrier)(commandBuffer, srcStageMask, destStageMask, + false, /* byRegion */ + memoryBarrierCount, pMemoryBarriers, + bufferMemoryBarrierCount, pBufferMemoryBarriers, + imageMemoryBarrierCount, pImageMemoryBarriers); +} |