aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorIlia Mirkin <[email protected]>2014-11-28 17:29:09 -0500
committerIlia Mirkin <[email protected]>2014-11-29 12:15:43 -0500
commit20fbf99595fc87efb4a7ecfd4c1bd9bed08208a5 (patch)
treeba51dfd5bef83d0516fcde1cccfde2a5c80d9451
parent3338bfcf498aa93a256aaadaee25951d4864f78f (diff)
freedreno/a3xx: rename vertex/texture format enums to be more consistent
Switch both of them from independently inconsistent conventions to having UINT/SINT/UNORM/SNORM/FLOAT/FIXED suffixes. Signed-off-by: Ilia Mirkin <[email protected]> Reviewed-by: Rob Clark <[email protected]>
-rw-r--r--src/gallium/drivers/freedreno/a3xx/a3xx.xml.h223
-rw-r--r--src/gallium/drivers/freedreno/a3xx/fd3_format.c126
2 files changed, 177 insertions, 172 deletions
diff --git a/src/gallium/drivers/freedreno/a3xx/a3xx.xml.h b/src/gallium/drivers/freedreno/a3xx/a3xx.xml.h
index 109e9a263da..bbaee436d64 100644
--- a/src/gallium/drivers/freedreno/a3xx/a3xx.xml.h
+++ b/src/gallium/drivers/freedreno/a3xx/a3xx.xml.h
@@ -8,13 +8,10 @@ http://github.com/freedreno/envytools/
git clone https://github.com/freedreno/envytools.git
The rules-ng-ng source files this header was generated from are:
-- /home/robclark/src/freedreno/envytools/rnndb/adreno.xml ( 364 bytes, from 2013-11-30 14:47:15)
-- /home/robclark/src/freedreno/envytools/rnndb/freedreno_copyright.xml ( 1453 bytes, from 2013-03-31 16:51:27)
-- /home/robclark/src/freedreno/envytools/rnndb/adreno/a2xx.xml ( 32901 bytes, from 2014-06-02 15:21:30)
-- /home/robclark/src/freedreno/envytools/rnndb/adreno/adreno_common.xml ( 10551 bytes, from 2014-11-13 22:44:30)
-- /home/robclark/src/freedreno/envytools/rnndb/adreno/adreno_pm4.xml ( 15053 bytes, from 2014-11-09 15:45:47)
-- /home/robclark/src/freedreno/envytools/rnndb/adreno/a3xx.xml ( 63169 bytes, from 2014-11-13 22:44:18)
-- /home/robclark/src/freedreno/envytools/rnndb/adreno/a4xx.xml ( 49097 bytes, from 2014-11-14 15:38:00)
+- /home/ilia/src/freedreno/envytools/rnndb/adreno/a3xx.xml ( 63311 bytes, from 2014-11-28 07:43:14)
+- /home/ilia/src/freedreno/envytools/rnndb/freedreno_copyright.xml ( 1453 bytes, from 2014-09-12 03:09:53)
+- /home/ilia/src/freedreno/envytools/rnndb/adreno/adreno_common.xml ( 10551 bytes, from 2014-11-28 03:50:42)
+- /home/ilia/src/freedreno/envytools/rnndb/adreno/adreno_pm4.xml ( 15053 bytes, from 2014-11-28 03:50:42)
Copyright (C) 2013-2014 by the following authors:
- Rob Clark <[email protected]> (robclark)
@@ -58,111 +55,117 @@ enum a3xx_cache_opcode {
};
enum a3xx_vtx_fmt {
- VFMT_FLOAT_32 = 0,
- VFMT_FLOAT_32_32 = 1,
- VFMT_FLOAT_32_32_32 = 2,
- VFMT_FLOAT_32_32_32_32 = 3,
- VFMT_FLOAT_16 = 4,
- VFMT_FLOAT_16_16 = 5,
- VFMT_FLOAT_16_16_16 = 6,
- VFMT_FLOAT_16_16_16_16 = 7,
- VFMT_FIXED_32 = 8,
- VFMT_FIXED_32_32 = 9,
- VFMT_FIXED_32_32_32 = 10,
- VFMT_FIXED_32_32_32_32 = 11,
- VFMT_SHORT_16 = 16,
- VFMT_SHORT_16_16 = 17,
- VFMT_SHORT_16_16_16 = 18,
- VFMT_SHORT_16_16_16_16 = 19,
- VFMT_USHORT_16 = 20,
- VFMT_USHORT_16_16 = 21,
- VFMT_USHORT_16_16_16 = 22,
- VFMT_USHORT_16_16_16_16 = 23,
- VFMT_NORM_SHORT_16 = 24,
- VFMT_NORM_SHORT_16_16 = 25,
- VFMT_NORM_SHORT_16_16_16 = 26,
- VFMT_NORM_SHORT_16_16_16_16 = 27,
- VFMT_NORM_USHORT_16 = 28,
- VFMT_NORM_USHORT_16_16 = 29,
- VFMT_NORM_USHORT_16_16_16 = 30,
- VFMT_NORM_USHORT_16_16_16_16 = 31,
- VFMT_UINT_32 = 32,
- VFMT_UINT_32_32 = 33,
- VFMT_UINT_32_32_32 = 34,
- VFMT_UINT_32_32_32_32 = 35,
- VFMT_INT_32 = 36,
- VFMT_INT_32_32 = 37,
- VFMT_INT_32_32_32 = 38,
- VFMT_INT_32_32_32_32 = 39,
- VFMT_UBYTE_8 = 40,
- VFMT_UBYTE_8_8 = 41,
- VFMT_UBYTE_8_8_8 = 42,
- VFMT_UBYTE_8_8_8_8 = 43,
- VFMT_NORM_UBYTE_8 = 44,
- VFMT_NORM_UBYTE_8_8 = 45,
- VFMT_NORM_UBYTE_8_8_8 = 46,
- VFMT_NORM_UBYTE_8_8_8_8 = 47,
- VFMT_BYTE_8 = 48,
- VFMT_BYTE_8_8 = 49,
- VFMT_BYTE_8_8_8 = 50,
- VFMT_BYTE_8_8_8_8 = 51,
- VFMT_NORM_BYTE_8 = 52,
- VFMT_NORM_BYTE_8_8 = 53,
- VFMT_NORM_BYTE_8_8_8 = 54,
- VFMT_NORM_BYTE_8_8_8_8 = 55,
- VFMT_UINT_10_10_10_2 = 60,
- VFMT_NORM_UINT_10_10_10_2 = 61,
- VFMT_INT_10_10_10_2 = 62,
- VFMT_NORM_INT_10_10_10_2 = 63,
+ VFMT_32_FLOAT = 0,
+ VFMT_32_32_FLOAT = 1,
+ VFMT_32_32_32_FLOAT = 2,
+ VFMT_32_32_32_32_FLOAT = 3,
+ VFMT_16_FLOAT = 4,
+ VFMT_16_16_FLOAT = 5,
+ VFMT_16_16_16_FLOAT = 6,
+ VFMT_16_16_16_16_FLOAT = 7,
+ VFMT_32_FIXED = 8,
+ VFMT_32_32_FIXED = 9,
+ VFMT_32_32_32_FIXED = 10,
+ VFMT_32_32_32_32_FIXED = 11,
+ VFMT_16_SINT = 16,
+ VFMT_16_16_SINT = 17,
+ VFMT_16_16_16_SINT = 18,
+ VFMT_16_16_16_16_SINT = 19,
+ VFMT_16_UINT = 20,
+ VFMT_16_16_UINT = 21,
+ VFMT_16_16_16_UINT = 22,
+ VFMT_16_16_16_16_UINT = 23,
+ VFMT_16_SNORM = 24,
+ VFMT_16_16_SNORM = 25,
+ VFMT_16_16_16_SNORM = 26,
+ VFMT_16_16_16_16_SNORM = 27,
+ VFMT_16_UNORM = 28,
+ VFMT_16_16_UNORM = 29,
+ VFMT_16_16_16_UNORM = 30,
+ VFMT_16_16_16_16_UNORM = 31,
+ VFMT_32_UINT = 32,
+ VFMT_32_32_UINT = 33,
+ VFMT_32_32_32_UINT = 34,
+ VFMT_32_32_32_32_UINT = 35,
+ VFMT_32_SINT = 36,
+ VFMT_32_32_SINT = 37,
+ VFMT_32_32_32_SINT = 38,
+ VFMT_32_32_32_32_SINT = 39,
+ VFMT_8_UINT = 40,
+ VFMT_8_8_UINT = 41,
+ VFMT_8_8_8_UINT = 42,
+ VFMT_8_8_8_8_UINT = 43,
+ VFMT_8_UNORM = 44,
+ VFMT_8_8_UNORM = 45,
+ VFMT_8_8_8_UNORM = 46,
+ VFMT_8_8_8_8_UNORM = 47,
+ VFMT_8_SINT = 48,
+ VFMT_8_8_SINT = 49,
+ VFMT_8_8_8_SINT = 50,
+ VFMT_8_8_8_8_SINT = 51,
+ VFMT_8_SNORM = 52,
+ VFMT_8_8_SNORM = 53,
+ VFMT_8_8_8_SNORM = 54,
+ VFMT_8_8_8_8_SNORM = 55,
+ VFMT_10_10_10_2_UINT = 60,
+ VFMT_10_10_10_2_UNORM = 61,
+ VFMT_10_10_10_2_SINT = 62,
+ VFMT_10_10_10_2_SNORM = 63,
};
enum a3xx_tex_fmt {
- TFMT_NORM_USHORT_565 = 4,
- TFMT_NORM_USHORT_5551 = 6,
- TFMT_NORM_USHORT_4444 = 7,
- TFMT_NORM_USHORT_Z16 = 9,
- TFMT_NORM_UINT_X8Z24 = 10,
- TFMT_FLOAT_Z32 = 11,
- TFMT_NORM_UINT_NV12_UV_TILED = 17,
- TFMT_NORM_UINT_NV12_Y_TILED = 19,
- TFMT_NORM_UINT_NV12_UV = 21,
- TFMT_NORM_UINT_NV12_Y = 23,
- TFMT_NORM_UINT_I420_Y = 24,
- TFMT_NORM_UINT_I420_U = 26,
- TFMT_NORM_UINT_I420_V = 27,
- TFMT_NORM_UINT_2_10_10_10 = 41,
- TFMT_FLOAT_9_9_9_E5 = 42,
- TFMT_FLOAT_10_11_11 = 43,
- TFMT_NORM_UINT_A8 = 44,
- TFMT_NORM_UINT_L8_A8 = 47,
- TFMT_NORM_UINT_8 = 48,
- TFMT_NORM_UINT_8_8 = 49,
- TFMT_NORM_UINT_8_8_8 = 50,
- TFMT_NORM_UINT_8_8_8_8 = 51,
- TFMT_NORM_SINT_8_8 = 53,
- TFMT_NORM_SINT_8_8_8_8 = 55,
- TFMT_UINT_8_8 = 57,
- TFMT_UINT_8_8_8_8 = 59,
- TFMT_SINT_8_8 = 61,
- TFMT_SINT_8_8_8_8 = 63,
- TFMT_FLOAT_16 = 64,
- TFMT_FLOAT_16_16 = 65,
- TFMT_FLOAT_16_16_16_16 = 67,
- TFMT_UINT_16 = 68,
- TFMT_UINT_16_16 = 69,
- TFMT_UINT_16_16_16_16 = 71,
- TFMT_SINT_16 = 72,
- TFMT_SINT_16_16 = 73,
- TFMT_SINT_16_16_16_16 = 75,
- TFMT_FLOAT_32 = 84,
- TFMT_FLOAT_32_32 = 85,
- TFMT_FLOAT_32_32_32_32 = 87,
- TFMT_UINT_32 = 88,
- TFMT_UINT_32_32 = 89,
- TFMT_UINT_32_32_32_32 = 91,
- TFMT_SINT_32 = 92,
- TFMT_SINT_32_32 = 93,
- TFMT_SINT_32_32_32_32 = 95,
+ TFMT_5_6_5_UNORM = 4,
+ TFMT_5_5_5_1_UNORM = 5,
+ TFMT_4_4_4_4_UNORM = 7,
+ TFMT_Z16_UNORM = 9,
+ TFMT_X8Z24_UNORM = 10,
+ TFMT_Z32_FLOAT = 11,
+ TFMT_NV12_UV_TILED = 17,
+ TFMT_NV12_Y_TILED = 19,
+ TFMT_NV12_UV = 21,
+ TFMT_NV12_Y = 23,
+ TFMT_I420_Y = 24,
+ TFMT_I420_U = 26,
+ TFMT_I420_V = 27,
+ TFMT_10_10_10_2_UNORM = 41,
+ TFMT_9_9_9_E5_FLOAT = 42,
+ TFMT_11_11_10_FLOAT = 43,
+ TFMT_A8_UNORM = 44,
+ TFMT_L8_A8_UNORM = 47,
+ TFMT_8_UNORM = 48,
+ TFMT_8_8_UNORM = 49,
+ TFMT_8_8_8_UNORM = 50,
+ TFMT_8_8_8_8_UNORM = 51,
+ TFMT_8_SNORM = 52,
+ TFMT_8_8_SNORM = 53,
+ TFMT_8_8_8_SNORM = 54,
+ TFMT_8_8_8_8_SNORM = 55,
+ TFMT_8_UINT = 56,
+ TFMT_8_8_UINT = 57,
+ TFMT_8_8_8_UINT = 58,
+ TFMT_8_8_8_8_UINT = 59,
+ TFMT_8_SINT = 60,
+ TFMT_8_8_SINT = 61,
+ TFMT_8_8_8_SINT = 62,
+ TFMT_8_8_8_8_SINT = 63,
+ TFMT_16_FLOAT = 64,
+ TFMT_16_16_FLOAT = 65,
+ TFMT_16_16_16_16_FLOAT = 67,
+ TFMT_16_UINT = 68,
+ TFMT_16_16_UINT = 69,
+ TFMT_16_16_16_16_UINT = 71,
+ TFMT_16_SINT = 72,
+ TFMT_16_16_SINT = 73,
+ TFMT_16_16_16_16_SINT = 75,
+ TFMT_32_FLOAT = 84,
+ TFMT_32_32_FLOAT = 85,
+ TFMT_32_32_32_32_FLOAT = 87,
+ TFMT_32_UINT = 88,
+ TFMT_32_32_UINT = 89,
+ TFMT_32_32_32_32_UINT = 91,
+ TFMT_32_SINT = 92,
+ TFMT_32_32_SINT = 93,
+ TFMT_32_32_32_32_SINT = 95,
};
enum a3xx_tex_fetchsize {
@@ -180,9 +183,11 @@ enum a3xx_color_fmt {
RB_R4G4B4A4_UNORM = 3,
RB_R8G8B8_UNORM = 4,
RB_R8G8B8A8_UNORM = 8,
+ RB_R8G8B8A8_SNORM = 9,
RB_R8G8B8A8_UINT = 10,
RB_R8G8B8A8_SINT = 11,
RB_R8G8_UNORM = 12,
+ RB_R8G8_SNORM = 13,
RB_R8_UINT = 14,
RB_R8_SINT = 15,
RB_R10G10B10A2_UNORM = 16,
diff --git a/src/gallium/drivers/freedreno/a3xx/fd3_format.c b/src/gallium/drivers/freedreno/a3xx/fd3_format.c
index a70acebd626..41e48d0af7c 100644
--- a/src/gallium/drivers/freedreno/a3xx/fd3_format.c
+++ b/src/gallium/drivers/freedreno/a3xx/fd3_format.c
@@ -38,209 +38,209 @@ fd3_pipe2vtx(enum pipe_format format)
switch (format) {
/* 8-bit buffers. */
case PIPE_FORMAT_R8_UNORM:
- return VFMT_NORM_UBYTE_8;
+ return VFMT_8_UNORM;
case PIPE_FORMAT_R8_SNORM:
- return VFMT_NORM_BYTE_8;
+ return VFMT_8_SNORM;
case PIPE_FORMAT_R8_UINT:
case PIPE_FORMAT_R8_USCALED:
- return VFMT_UBYTE_8;
+ return VFMT_8_UINT;
case PIPE_FORMAT_R8_SINT:
case PIPE_FORMAT_R8_SSCALED:
- return VFMT_BYTE_8;
+ return VFMT_8_SINT;
/* 16-bit buffers. */
case PIPE_FORMAT_R16_UNORM:
case PIPE_FORMAT_Z16_UNORM:
- return VFMT_NORM_USHORT_16;
+ return VFMT_16_UNORM;
case PIPE_FORMAT_R16_SNORM:
- return VFMT_NORM_SHORT_16;
+ return VFMT_16_SNORM;
case PIPE_FORMAT_R16_UINT:
case PIPE_FORMAT_R16_USCALED:
- return VFMT_USHORT_16;
+ return VFMT_16_UINT;
case PIPE_FORMAT_R16_SINT:
case PIPE_FORMAT_R16_SSCALED:
- return VFMT_SHORT_16;
+ return VFMT_16_SINT;
case PIPE_FORMAT_R16_FLOAT:
- return VFMT_FLOAT_16;
+ return VFMT_16_FLOAT;
case PIPE_FORMAT_R8G8_UNORM:
- return VFMT_NORM_UBYTE_8_8;
+ return VFMT_8_8_UNORM;
case PIPE_FORMAT_R8G8_SNORM:
- return VFMT_NORM_BYTE_8_8;
+ return VFMT_8_8_SNORM;
case PIPE_FORMAT_R8G8_UINT:
case PIPE_FORMAT_R8G8_USCALED:
- return VFMT_UBYTE_8_8;
+ return VFMT_8_8_UINT;
case PIPE_FORMAT_R8G8_SINT:
case PIPE_FORMAT_R8G8_SSCALED:
- return VFMT_BYTE_8_8;
+ return VFMT_8_8_SINT;
/* 24-bit buffers. */
case PIPE_FORMAT_R8G8B8_UNORM:
- return VFMT_NORM_UBYTE_8_8_8;
+ return VFMT_8_8_8_UNORM;
case PIPE_FORMAT_R8G8B8_SNORM:
- return VFMT_NORM_BYTE_8_8_8;
+ return VFMT_8_8_8_SNORM;
case PIPE_FORMAT_R8G8B8_UINT:
case PIPE_FORMAT_R8G8B8_USCALED:
- return VFMT_UBYTE_8_8_8;
+ return VFMT_8_8_8_UINT;
case PIPE_FORMAT_R8G8B8_SINT:
case PIPE_FORMAT_R8G8B8_SSCALED:
- return VFMT_BYTE_8_8_8;
+ return VFMT_8_8_8_SINT;
/* 32-bit buffers. */
case PIPE_FORMAT_A8B8G8R8_UNORM:
case PIPE_FORMAT_A8R8G8B8_UNORM:
case PIPE_FORMAT_B8G8R8A8_UNORM:
case PIPE_FORMAT_R8G8B8A8_UNORM:
- return VFMT_NORM_UBYTE_8_8_8_8;
+ return VFMT_8_8_8_8_UNORM;
case PIPE_FORMAT_R8G8B8A8_SNORM:
- return VFMT_NORM_BYTE_8_8_8_8;
+ return VFMT_8_8_8_8_SNORM;
case PIPE_FORMAT_R8G8B8A8_UINT:
case PIPE_FORMAT_R8G8B8A8_USCALED:
- return VFMT_UBYTE_8_8_8_8;
+ return VFMT_8_8_8_8_UINT;
case PIPE_FORMAT_R8G8B8A8_SINT:
case PIPE_FORMAT_R8G8B8A8_SSCALED:
- return VFMT_BYTE_8_8_8_8;
+ return VFMT_8_8_8_8_SINT;
case PIPE_FORMAT_R16G16_SSCALED:
case PIPE_FORMAT_R16G16_SINT:
- return VFMT_SHORT_16_16;
+ return VFMT_16_16_SINT;
case PIPE_FORMAT_R16G16_FLOAT:
- return VFMT_FLOAT_16_16;
+ return VFMT_16_16_FLOAT;
case PIPE_FORMAT_R16G16_UINT:
case PIPE_FORMAT_R16G16_USCALED:
- return VFMT_USHORT_16_16;
+ return VFMT_16_16_UINT;
case PIPE_FORMAT_R16G16_UNORM:
- return VFMT_NORM_USHORT_16_16;
+ return VFMT_16_16_UNORM;
case PIPE_FORMAT_R16G16_SNORM:
- return VFMT_NORM_SHORT_16_16;
+ return VFMT_16_16_SNORM;
case PIPE_FORMAT_R32_UINT:
case PIPE_FORMAT_R32_USCALED:
- return VFMT_UINT_32;
+ return VFMT_32_UINT;
case PIPE_FORMAT_R32_SINT:
case PIPE_FORMAT_R32_SSCALED:
- return VFMT_INT_32;
+ return VFMT_32_SINT;
case PIPE_FORMAT_R10G10B10A2_UNORM:
- return VFMT_NORM_UINT_10_10_10_2;
+ return VFMT_10_10_10_2_UNORM;
case PIPE_FORMAT_R10G10B10A2_SNORM:
- return VFMT_NORM_INT_10_10_10_2;
+ return VFMT_10_10_10_2_SNORM;
case PIPE_FORMAT_R10G10B10A2_UINT:
case PIPE_FORMAT_R10G10B10A2_USCALED:
- return VFMT_UINT_10_10_10_2;
+ return VFMT_10_10_10_2_UINT;
case PIPE_FORMAT_R10G10B10A2_SSCALED:
- return VFMT_INT_10_10_10_2;
+ return VFMT_10_10_10_2_SINT;
/* 48-bit buffers. */
case PIPE_FORMAT_R16G16B16_FLOAT:
- return VFMT_FLOAT_16_16_16;
+ return VFMT_16_16_16_FLOAT;
case PIPE_FORMAT_R16G16B16_SINT:
case PIPE_FORMAT_R16G16B16_SSCALED:
- return VFMT_SHORT_16_16_16;
+ return VFMT_16_16_16_SINT;
case PIPE_FORMAT_R16G16B16_UINT:
case PIPE_FORMAT_R16G16B16_USCALED:
- return VFMT_USHORT_16_16_16;
+ return VFMT_16_16_16_UINT;
case PIPE_FORMAT_R16G16B16_SNORM:
- return VFMT_NORM_SHORT_16_16_16;
+ return VFMT_16_16_16_SNORM;
case PIPE_FORMAT_R16G16B16_UNORM:
- return VFMT_NORM_USHORT_16_16_16;
+ return VFMT_16_16_16_UNORM;
case PIPE_FORMAT_R32_FLOAT:
case PIPE_FORMAT_Z32_FLOAT:
- return VFMT_FLOAT_32;
+ return VFMT_32_FLOAT;
case PIPE_FORMAT_R32_FIXED:
- return VFMT_FIXED_32;
+ return VFMT_32_FIXED;
/* 64-bit buffers. */
case PIPE_FORMAT_R16G16B16A16_UNORM:
- return VFMT_NORM_USHORT_16_16_16_16;
+ return VFMT_16_16_16_16_UNORM;
case PIPE_FORMAT_R16G16B16A16_SNORM:
- return VFMT_NORM_SHORT_16_16_16_16;
+ return VFMT_16_16_16_16_SNORM;
case PIPE_FORMAT_R16G16B16A16_UINT:
case PIPE_FORMAT_R16G16B16A16_USCALED:
- return VFMT_USHORT_16_16_16_16;
+ return VFMT_16_16_16_16_UINT;
case PIPE_FORMAT_R16G16B16A16_SINT:
case PIPE_FORMAT_R16G16B16A16_SSCALED:
- return VFMT_SHORT_16_16_16_16;
+ return VFMT_16_16_16_16_SINT;
case PIPE_FORMAT_R32G32_FLOAT:
- return VFMT_FLOAT_32_32;
+ return VFMT_32_32_FLOAT;
case PIPE_FORMAT_R32G32_FIXED:
- return VFMT_FIXED_32_32;
+ return VFMT_32_32_FIXED;
case PIPE_FORMAT_R16G16B16A16_FLOAT:
- return VFMT_FLOAT_16_16_16_16;
+ return VFMT_16_16_16_16_FLOAT;
case PIPE_FORMAT_R32G32_UINT:
case PIPE_FORMAT_R32G32_USCALED:
- return VFMT_UINT_32_32;
+ return VFMT_32_32_UINT;
case PIPE_FORMAT_R32G32_SINT:
case PIPE_FORMAT_R32G32_SSCALED:
- return VFMT_INT_32_32;
+ return VFMT_32_32_SINT;
/* 96-bit buffers. */
case PIPE_FORMAT_R32G32B32_FLOAT:
- return VFMT_FLOAT_32_32_32;
+ return VFMT_32_32_32_FLOAT;
case PIPE_FORMAT_R32G32B32_FIXED:
- return VFMT_FIXED_32_32_32;
+ return VFMT_32_32_32_FIXED;
case PIPE_FORMAT_R32G32B32_UINT:
case PIPE_FORMAT_R32G32B32_USCALED:
- return VFMT_UINT_32_32_32;
+ return VFMT_32_32_32_UINT;
case PIPE_FORMAT_R32G32B32_SINT:
case PIPE_FORMAT_R32G32B32_SSCALED:
- return VFMT_INT_32_32_32;
+ return VFMT_32_32_32_SINT;
/* 128-bit buffers. */
case PIPE_FORMAT_R32G32B32A32_FLOAT:
- return VFMT_FLOAT_32_32_32_32;
+ return VFMT_32_32_32_32_FLOAT;
case PIPE_FORMAT_R32G32B32A32_FIXED:
- return VFMT_FIXED_32_32_32_32;
+ return VFMT_32_32_32_32_FIXED;
case PIPE_FORMAT_R32G32B32A32_UINT:
case PIPE_FORMAT_R32G32B32A32_USCALED:
- return VFMT_UINT_32_32_32_32;
+ return VFMT_32_32_32_32_UINT;
case PIPE_FORMAT_R32G32B32A32_SINT:
case PIPE_FORMAT_R32G32B32A32_SSCALED:
- return VFMT_INT_32_32_32_32;
+ return VFMT_32_32_32_32_SINT;
/* TODO normalized 32bit int formats do not appear to be supported
* natively.. will require either shader variant or VFD_DECODE
@@ -262,10 +262,10 @@ fd3_pipe2tex(enum pipe_format format)
case PIPE_FORMAT_L8_UNORM:
case PIPE_FORMAT_A8_UNORM:
case PIPE_FORMAT_I8_UNORM:
- return TFMT_NORM_UINT_8;
+ return TFMT_8_UNORM;
case PIPE_FORMAT_R8G8_UNORM:
- return TFMT_NORM_UINT_8_8;
+ return TFMT_8_8_UNORM;
case PIPE_FORMAT_B8G8R8A8_UNORM:
case PIPE_FORMAT_B8G8R8X8_UNORM:
@@ -275,22 +275,22 @@ fd3_pipe2tex(enum pipe_format format)
case PIPE_FORMAT_B8G8R8X8_SRGB:
case PIPE_FORMAT_R8G8B8A8_SRGB:
case PIPE_FORMAT_R8G8B8X8_SRGB:
- return TFMT_NORM_UINT_8_8_8_8;
+ return TFMT_8_8_8_8_UNORM;
case PIPE_FORMAT_Z24X8_UNORM:
case PIPE_FORMAT_Z24_UNORM_S8_UINT:
- return TFMT_NORM_UINT_X8Z24;
+ return TFMT_X8Z24_UNORM;
case PIPE_FORMAT_Z16_UNORM:
- return TFMT_NORM_USHORT_Z16;
+ return TFMT_Z16_UNORM;
case PIPE_FORMAT_R16G16B16A16_FLOAT:
case PIPE_FORMAT_R16G16B16X16_FLOAT:
- return TFMT_FLOAT_16_16_16_16;
+ return TFMT_16_16_16_16_FLOAT;
case PIPE_FORMAT_R32G32B32A32_FLOAT:
case PIPE_FORMAT_R32G32B32X32_FLOAT:
- return TFMT_FLOAT_32_32_32_32;
+ return TFMT_32_32_32_32_FLOAT;
// TODO add more..