diff options
author | Fredrik Höglund <[email protected]> | 2017-03-29 19:19:47 +0200 |
---|---|---|
committer | Fredrik Höglund <[email protected]> | 2017-04-07 00:54:46 +0200 |
commit | c1f8c83cb66b765f496b7f41259109d0d546bf5c (patch) | |
tree | 73877c23f42ba4890966bb85fbef9e1dfda4c599 | |
parent | c6487bc48bb084ec3d4a3c331239d690dfc94436 (diff) |
radv: implement VK_KHR_descriptor_update_template
All offsets and strides are precomputed by
radv_CreateDescriptorUpdateTemplateKHR and stored in the template.
v2: Move the new struct declarations from radv_descriptor_set.h
to radv_private.h (Bas)
Signed-off-by: Fredrik Höglund <[email protected]>
Reviewed-by: Bas Nieuwenhuizen <[email protected]>
-rw-r--r-- | src/amd/vulkan/radv_cmd_buffer.c | 24 | ||||
-rw-r--r-- | src/amd/vulkan/radv_descriptor_set.c | 163 | ||||
-rw-r--r-- | src/amd/vulkan/radv_device.c | 4 | ||||
-rw-r--r-- | src/amd/vulkan/radv_entrypoints_gen.py | 1 | ||||
-rw-r--r-- | src/amd/vulkan/radv_private.h | 39 |
5 files changed, 231 insertions, 0 deletions
diff --git a/src/amd/vulkan/radv_cmd_buffer.c b/src/amd/vulkan/radv_cmd_buffer.c index 46c139a8fcb..992dc123a21 100644 --- a/src/amd/vulkan/radv_cmd_buffer.c +++ b/src/amd/vulkan/radv_cmd_buffer.c @@ -1988,6 +1988,30 @@ void radv_CmdPushDescriptorSetKHR( cmd_buffer->state.push_descriptors_dirty = true; } +void radv_CmdPushDescriptorSetWithTemplateKHR( + VkCommandBuffer commandBuffer, + VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, + VkPipelineLayout _layout, + uint32_t set, + const void* pData) +{ + RADV_FROM_HANDLE(radv_cmd_buffer, cmd_buffer, commandBuffer); + RADV_FROM_HANDLE(radv_pipeline_layout, layout, _layout); + struct radv_descriptor_set *push_set = &cmd_buffer->push_descriptors.set; + + assert(layout->set[set].layout->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR); + + if (!radv_init_push_descriptor_set(cmd_buffer, push_set, layout->set[set].layout)) + return; + + radv_update_descriptor_set_with_template(cmd_buffer->device, cmd_buffer, push_set, + descriptorUpdateTemplate, pData); + + cmd_buffer->state.descriptors[set] = push_set; + cmd_buffer->state.descriptors_dirty |= (1 << set); + cmd_buffer->state.push_descriptors_dirty = true; +} + void radv_CmdPushConstants(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, diff --git a/src/amd/vulkan/radv_descriptor_set.c b/src/amd/vulkan/radv_descriptor_set.c index 0ff9aa206d2..c2bf006f342 100644 --- a/src/amd/vulkan/radv_descriptor_set.c +++ b/src/amd/vulkan/radv_descriptor_set.c @@ -742,3 +742,166 @@ void radv_UpdateDescriptorSets( radv_update_descriptor_sets(device, NULL, VK_NULL_HANDLE, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies); } + +VkResult radv_CreateDescriptorUpdateTemplateKHR(VkDevice _device, + const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, + const VkAllocationCallbacks *pAllocator, + VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate) +{ + RADV_FROM_HANDLE(radv_device, device, _device); + RADV_FROM_HANDLE(radv_descriptor_set_layout, set_layout, pCreateInfo->descriptorSetLayout); + const uint32_t entry_count = pCreateInfo->descriptorUpdateEntryCount; + const size_t size = sizeof(struct radv_descriptor_update_template) + + sizeof(struct radv_descriptor_update_template_entry) * entry_count; + struct radv_descriptor_update_template *templ; + uint32_t i; + + templ = vk_alloc2(&device->alloc, pAllocator, size, 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); + if (!templ) + return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY); + + templ->entry_count = entry_count; + + for (i = 0; i < entry_count; i++) { + const VkDescriptorUpdateTemplateEntryKHR *entry = &pCreateInfo->pDescriptorUpdateEntries[i]; + const struct radv_descriptor_set_binding_layout *binding_layout = + set_layout->binding + entry->dstBinding; + const uint32_t buffer_offset = binding_layout->buffer_offset + + binding_layout->buffer_count * entry->dstArrayElement; + uint32_t *immutable_samplers = NULL; + uint16_t dst_offset; + uint16_t dst_stride; + + /* dst_offset is an offset into dynamic_descriptors when the descriptor + is dynamic, and an offset into mapped_ptr otherwise */ + switch (entry->descriptorType) { + case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: + case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: + assert(pCreateInfo->templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR); + dst_offset = binding_layout->dynamic_offset_offset + entry->dstArrayElement; + dst_stride = 0; /* Not used */ + break; + default: + switch (entry->descriptorType) { + case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: + case VK_DESCRIPTOR_TYPE_SAMPLER: + /* Immutable samplers are copied into push descriptors when they are pushed */ + if (pCreateInfo->templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR && + binding_layout->immutable_samplers && !binding_layout->immutable_samplers_equal) + immutable_samplers = binding_layout->immutable_samplers + entry->dstArrayElement * 4; + break; + default: + break; + } + dst_offset = binding_layout->offset / 4 + binding_layout->size * entry->dstArrayElement / 4; + dst_stride = binding_layout->size / 4; + break; + } + + templ->entry[i] = (struct radv_descriptor_update_template_entry) { + .descriptor_type = entry->descriptorType, + .descriptor_count = entry->descriptorCount, + .src_offset = entry->offset, + .src_stride = entry->stride, + .dst_offset = dst_offset, + .dst_stride = dst_stride, + .buffer_offset = buffer_offset, + .buffer_count = binding_layout->buffer_count, + .has_sampler = !binding_layout->immutable_samplers, + .immutable_samplers = immutable_samplers + }; + } + + *pDescriptorUpdateTemplate = radv_descriptor_update_template_to_handle(templ); + return VK_SUCCESS; +} + +void radv_DestroyDescriptorUpdateTemplateKHR(VkDevice _device, + VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, + const VkAllocationCallbacks *pAllocator) +{ + RADV_FROM_HANDLE(radv_device, device, _device); + RADV_FROM_HANDLE(radv_descriptor_update_template, templ, descriptorUpdateTemplate); + + if (!templ) + return; + + vk_free2(&device->alloc, pAllocator, templ); +} + +void radv_update_descriptor_set_with_template(struct radv_device *device, + struct radv_cmd_buffer *cmd_buffer, + struct radv_descriptor_set *set, + VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, + const void *pData) +{ + RADV_FROM_HANDLE(radv_descriptor_update_template, templ, descriptorUpdateTemplate); + uint32_t i; + + for (i = 0; i < templ->entry_count; ++i) { + struct radeon_winsys_bo **buffer_list = set->descriptors + templ->entry[i].buffer_offset; + uint32_t *pDst = set->mapped_ptr + templ->entry[i].dst_offset; + const uint8_t *pSrc = ((const uint8_t *) pData) + templ->entry[i].src_offset; + uint32_t j; + + for (j = 0; j < templ->entry[i].descriptor_count; ++j) { + switch (templ->entry[i].descriptor_type) { + case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: + case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: { + const unsigned idx = templ->entry[i].dst_offset + j; + assert(!(set->layout->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR)); + write_dynamic_buffer_descriptor(device, set->dynamic_descriptors + idx, + buffer_list, (struct VkDescriptorBufferInfo *) pSrc); + break; + } + case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: + case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER: + write_buffer_descriptor(device, cmd_buffer, pDst, buffer_list, + (struct VkDescriptorBufferInfo *) pSrc); + break; + case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: + case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: + write_texel_buffer_descriptor(device, cmd_buffer, pDst, buffer_list, + *(VkBufferView *) pSrc); + break; + case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: + case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: + case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: + write_image_descriptor(device, cmd_buffer, pDst, buffer_list, + (struct VkDescriptorImageInfo *) pSrc); + break; + case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: + write_combined_image_sampler_descriptor(device, cmd_buffer, pDst, buffer_list, + (struct VkDescriptorImageInfo *) pSrc, + templ->entry[i].has_sampler); + if (templ->entry[i].immutable_samplers) + memcpy(pDst + 16, templ->entry[i].immutable_samplers + 4 * j, 16); + break; + case VK_DESCRIPTOR_TYPE_SAMPLER: + if (templ->entry[i].has_sampler) + write_sampler_descriptor(device, pDst, + (struct VkDescriptorImageInfo *) pSrc); + else if (templ->entry[i].immutable_samplers) + memcpy(pDst, templ->entry[i].immutable_samplers + 4 * j, 16); + break; + default: + unreachable("unimplemented descriptor type"); + break; + } + pSrc += templ->entry[i].src_stride; + pDst += templ->entry[i].dst_stride; + buffer_list += templ->entry[i].buffer_count; + } + } +} + +void radv_UpdateDescriptorSetWithTemplateKHR(VkDevice _device, + VkDescriptorSet descriptorSet, + VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, + const void *pData) +{ + RADV_FROM_HANDLE(radv_device, device, _device); + RADV_FROM_HANDLE(radv_descriptor_set, set, descriptorSet); + + radv_update_descriptor_set_with_template(device, NULL, set, descriptorUpdateTemplate, pData); +} diff --git a/src/amd/vulkan/radv_device.c b/src/amd/vulkan/radv_device.c index 059c013f359..9e8faa3da9a 100644 --- a/src/amd/vulkan/radv_device.c +++ b/src/amd/vulkan/radv_device.c @@ -92,6 +92,10 @@ static const VkExtensionProperties instance_extensions[] = { static const VkExtensionProperties common_device_extensions[] = { { + .extensionName = VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME, + .specVersion = 1, + }, + { .extensionName = VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME, .specVersion = 1, }, diff --git a/src/amd/vulkan/radv_entrypoints_gen.py b/src/amd/vulkan/radv_entrypoints_gen.py index 4651c63dcf8..3474c789ea8 100644 --- a/src/amd/vulkan/radv_entrypoints_gen.py +++ b/src/amd/vulkan/radv_entrypoints_gen.py @@ -30,6 +30,7 @@ max_api_version = 1.0 supported_extensions = [ 'VK_AMD_draw_indirect_count', 'VK_NV_dedicated_allocation', + 'VK_KHR_descriptor_update_template', 'VK_KHR_get_physical_device_properties2', 'VK_KHR_incremental_present', 'VK_KHR_maintenance1', diff --git a/src/amd/vulkan/radv_private.h b/src/amd/vulkan/radv_private.h index e39819fff3e..580c1197e64 100644 --- a/src/amd/vulkan/radv_private.h +++ b/src/amd/vulkan/radv_private.h @@ -565,6 +565,37 @@ struct radv_descriptor_pool { struct list_head vram_list; }; +struct radv_descriptor_update_template_entry { + VkDescriptorType descriptor_type; + + /* The number of descriptors to update */ + uint16_t descriptor_count; + + /* Into mapped_ptr or dynamic_descriptors, in units of the respective array */ + uint16_t dst_offset; + + /* In dwords. Not valid/used for dynamic descriptors */ + uint16_t dst_stride; + + uint16_t buffer_offset; + uint16_t buffer_count; + + /* Only valid for combined image samplers and samplers */ + uint16_t has_sampler; + + /* In bytes */ + size_t src_offset; + size_t src_stride; + + /* For push descriptors */ + uint32_t *immutable_samplers; +}; + +struct radv_descriptor_update_template { + uint32_t entry_count; + struct radv_descriptor_update_template_entry entry[0]; +}; + struct radv_buffer { struct radv_device * device; VkDeviceSize size; @@ -1363,6 +1394,13 @@ radv_update_descriptor_sets(struct radv_device *device, uint32_t descriptorCopyCount, const VkCopyDescriptorSet *pDescriptorCopies); +void +radv_update_descriptor_set_with_template(struct radv_device *device, + struct radv_cmd_buffer *cmd_buffer, + struct radv_descriptor_set *set, + VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, + const void *pData); + void radv_initialise_cmask(struct radv_cmd_buffer *cmd_buffer, struct radv_image *image, uint32_t value); void radv_initialize_dcc(struct radv_cmd_buffer *cmd_buffer, @@ -1419,6 +1457,7 @@ RADV_DEFINE_NONDISP_HANDLE_CASTS(radv_buffer_view, VkBufferView) RADV_DEFINE_NONDISP_HANDLE_CASTS(radv_descriptor_pool, VkDescriptorPool) RADV_DEFINE_NONDISP_HANDLE_CASTS(radv_descriptor_set, VkDescriptorSet) RADV_DEFINE_NONDISP_HANDLE_CASTS(radv_descriptor_set_layout, VkDescriptorSetLayout) +RADV_DEFINE_NONDISP_HANDLE_CASTS(radv_descriptor_update_template, VkDescriptorUpdateTemplateKHR) RADV_DEFINE_NONDISP_HANDLE_CASTS(radv_device_memory, VkDeviceMemory) RADV_DEFINE_NONDISP_HANDLE_CASTS(radv_fence, VkFence) RADV_DEFINE_NONDISP_HANDLE_CASTS(radv_event, VkEvent) |