summaryrefslogtreecommitdiffstats
path: root/src/intel/vulkan/anv_formats.c
diff options
context:
space:
mode:
authorSamuel Iglesias Gonsálvez <[email protected]>2016-11-15 12:49:38 +0100
committerSamuel Iglesias Gonsálvez <[email protected]>2017-01-09 09:10:13 +0100
commit3551a2d3ad2661477ba3b0a36a13eeb68e28fe85 (patch)
tree98132511479140c3c6609218efee6a71dfe1b2ba /src/intel/vulkan/anv_formats.c
parent1c9483f48e7ee7dd32847352e95c5151dfb0f3f2 (diff)
isl: fix VA64 support for double and dvecN vertex attributes
We use *64*_PASSTHRU formats to upload vertex attributes of 64 bits to avoid conversions. From the BDW PRM, Volume 2d, page 586 (VERTEX_ELEMENT_STATE): "When SourceElementFormat is set to one of the *64*_PASSTHRU formats, 64-bit components are stored in the URB without any conversion. In this case, vertex elements must be written as 128 or 256 bits, with VFCOMP_STORE_0 being used to pad the output as required. E.g., if R64_PASSTHRU is used to copy a 64-bit Red component into the URB, Component 1 must be specified as VFCOMP_STORE_0 (with Components 2,3 set to VFCOMP_NOSTORE) in order to output a 128-bit vertex element, or Components 1-3 must be specified as VFCOMP_STORE_0 in order to output a 256-bit vertex element. Likewise, use of R64G64B64_PASSTHRU requires Component 3 to be specified as VFCOMP_STORE_0 in order to output a 256-bit vertex element." v2,v3 (Jason): - Don't delete unused formats. Signed-off-by: Samuel Iglesias Gonsálvez <[email protected]> Reviewed-by: Jason Ekstrand <[email protected]>
Diffstat (limited to 'src/intel/vulkan/anv_formats.c')
-rw-r--r--src/intel/vulkan/anv_formats.c8
1 files changed, 4 insertions, 4 deletions
diff --git a/src/intel/vulkan/anv_formats.c b/src/intel/vulkan/anv_formats.c
index 1ee4d0fa191..a5d783e6897 100644
--- a/src/intel/vulkan/anv_formats.c
+++ b/src/intel/vulkan/anv_formats.c
@@ -156,16 +156,16 @@ static const struct anv_format anv_formats[] = {
fmt(VK_FORMAT_R32G32B32A32_SFLOAT, ISL_FORMAT_R32G32B32A32_FLOAT),
fmt(VK_FORMAT_R64_UINT, ISL_FORMAT_R64_PASSTHRU),
fmt(VK_FORMAT_R64_SINT, ISL_FORMAT_R64_PASSTHRU),
- fmt(VK_FORMAT_R64_SFLOAT, ISL_FORMAT_R64_FLOAT),
+ fmt(VK_FORMAT_R64_SFLOAT, ISL_FORMAT_R64_PASSTHRU),
fmt(VK_FORMAT_R64G64_UINT, ISL_FORMAT_R64G64_PASSTHRU),
fmt(VK_FORMAT_R64G64_SINT, ISL_FORMAT_R64G64_PASSTHRU),
- fmt(VK_FORMAT_R64G64_SFLOAT, ISL_FORMAT_R64G64_FLOAT),
+ fmt(VK_FORMAT_R64G64_SFLOAT, ISL_FORMAT_R64G64_PASSTHRU),
fmt(VK_FORMAT_R64G64B64_UINT, ISL_FORMAT_R64G64B64_PASSTHRU),
fmt(VK_FORMAT_R64G64B64_SINT, ISL_FORMAT_R64G64B64_PASSTHRU),
- fmt(VK_FORMAT_R64G64B64_SFLOAT, ISL_FORMAT_R64G64B64_FLOAT),
+ fmt(VK_FORMAT_R64G64B64_SFLOAT, ISL_FORMAT_R64G64B64_PASSTHRU),
fmt(VK_FORMAT_R64G64B64A64_UINT, ISL_FORMAT_R64G64B64A64_PASSTHRU),
fmt(VK_FORMAT_R64G64B64A64_SINT, ISL_FORMAT_R64G64B64A64_PASSTHRU),
- fmt(VK_FORMAT_R64G64B64A64_SFLOAT, ISL_FORMAT_R64G64B64A64_FLOAT),
+ fmt(VK_FORMAT_R64G64B64A64_SFLOAT, ISL_FORMAT_R64G64B64A64_PASSTHRU),
fmt(VK_FORMAT_B10G11R11_UFLOAT_PACK32, ISL_FORMAT_R11G11B10_FLOAT),
fmt(VK_FORMAT_E5B9G9R9_UFLOAT_PACK32, ISL_FORMAT_R9G9B9E5_SHAREDEXP),