1 /*
2 * Copyright © 2014 Rob Clark <[email protected]>
3 * SPDX-License-Identifier: MIT
4 *
5 * Authors:
6 * Rob Clark <[email protected]>
7 */
8
9 #include "pipe/p_defines.h"
10 #include "util/format/u_format.h"
11
12 #include "fd4_format.h"
13
14 /* Specifies the table of all the formats and their features. Also supplies
15 * the helpers that look up various data in those tables.
16 */
17
18 struct fd4_format {
19 enum a4xx_vtx_fmt vtx;
20 enum a4xx_tex_fmt tex;
21 enum a4xx_color_fmt rb;
22 enum a3xx_color_swap swap;
23 bool present;
24 };
25
26 /* vertex + texture */
27 #define VT(pipe, fmt, rbfmt, swapfmt) \
28 [PIPE_FORMAT_##pipe] = {.present = 1, \
29 .vtx = VFMT4_##fmt, \
30 .tex = TFMT4_##fmt, \
31 .rb = RB4_##rbfmt, \
32 .swap = swapfmt}
33
34 /* texture-only */
35 #define _T(pipe, fmt, rbfmt, swapfmt) \
36 [PIPE_FORMAT_##pipe] = {.present = 1, \
37 .vtx = VFMT4_NONE, \
38 .tex = TFMT4_##fmt, \
39 .rb = RB4_##rbfmt, \
40 .swap = swapfmt}
41
42 /* vertex-only */
43 #define V_(pipe, fmt, rbfmt, swapfmt) \
44 [PIPE_FORMAT_##pipe] = {.present = 1, \
45 .vtx = VFMT4_##fmt, \
46 .tex = TFMT4_NONE, \
47 .rb = RB4_##rbfmt, \
48 .swap = swapfmt}
49
50 /* clang-format off */
51 static struct fd4_format formats[PIPE_FORMAT_COUNT] = {
52 /* 8-bit */
53 VT(R8_UNORM, 8_UNORM, R8_UNORM, WZYX),
54 _T(R8_SRGB, 8_UNORM, R8_UNORM, WZYX),
55 VT(R8_SNORM, 8_SNORM, R8_SNORM, WZYX),
56 VT(R8_UINT, 8_UINT, R8_UINT, WZYX),
57 VT(R8_SINT, 8_SINT, R8_SINT, WZYX),
58 V_(R8_USCALED, 8_UINT, NONE, WZYX),
59 V_(R8_SSCALED, 8_SINT, NONE, WZYX),
60
61 _T(A8_UNORM, 8_UNORM, A8_UNORM, WZYX),
62 _T(L8_UNORM, 8_UNORM, R8_UNORM, WZYX),
63 _T(L8_SNORM, 8_SNORM, R8_SNORM, WZYX),
64 _T(I8_UNORM, 8_UNORM, NONE, WZYX),
65 _T(I8_SNORM, 8_SNORM, NONE, WZYX),
66
67 _T(A8_UINT, 8_UINT, NONE, WZYX),
68 _T(A8_SINT, 8_SINT, NONE, WZYX),
69 _T(L8_UINT, 8_UINT, NONE, WZYX),
70 _T(L8_SINT, 8_SINT, NONE, WZYX),
71 _T(I8_UINT, 8_UINT, NONE, WZYX),
72 _T(I8_SINT, 8_SINT, NONE, WZYX),
73
74 _T(S8_UINT, 8_UINT, R8_UNORM, WZYX),
75
76 /* 16-bit */
77 VT(R16_UNORM, 16_UNORM, R16_UNORM, WZYX),
78 VT(R16_SNORM, 16_SNORM, R16_SNORM, WZYX),
79 VT(R16_UINT, 16_UINT, R16_UINT, WZYX),
80 VT(R16_SINT, 16_SINT, R16_SINT, WZYX),
81 V_(R16_USCALED, 16_UINT, NONE, WZYX),
82 V_(R16_SSCALED, 16_SINT, NONE, WZYX),
83 VT(R16_FLOAT, 16_FLOAT, R16_FLOAT, WZYX),
84
85 _T(A16_UNORM, 16_UNORM, NONE, WZYX),
86 _T(A16_SNORM, 16_SNORM, NONE, WZYX),
87 _T(A16_UINT, 16_UINT, NONE, WZYX),
88 _T(A16_SINT, 16_SINT, NONE, WZYX),
89 _T(A16_FLOAT, 16_FLOAT, NONE, WZYX),
90 _T(L16_UNORM, 16_UNORM, R16_UNORM, WZYX),
91 _T(L16_SNORM, 16_SNORM, R16_SNORM, WZYX),
92 _T(L16_UINT, 16_UINT, R16_UINT, WZYX),
93 _T(L16_SINT, 16_SINT, R16_SINT, WZYX),
94 _T(L16_FLOAT, 16_FLOAT, R16_FLOAT, WZYX),
95 _T(I16_UNORM, 16_UNORM, NONE, WZYX),
96 _T(I16_SNORM, 16_SNORM, NONE, WZYX),
97 _T(I16_UINT, 16_UINT, NONE, WZYX),
98 _T(I16_SINT, 16_SINT, NONE, WZYX),
99 _T(I16_FLOAT, 16_FLOAT, NONE, WZYX),
100
101 VT(R8G8_UNORM, 8_8_UNORM, R8G8_UNORM, WZYX),
102 _T(R8G8_SRGB, 8_8_UNORM, R8G8_UNORM, WZYX),
103 VT(R8G8_SNORM, 8_8_SNORM, R8G8_SNORM, WZYX),
104 VT(R8G8_UINT, 8_8_UINT, R8G8_UINT, WZYX),
105 VT(R8G8_SINT, 8_8_SINT, R8G8_SINT, WZYX),
106 V_(R8G8_USCALED, 8_8_UINT, NONE, WZYX),
107 V_(R8G8_SSCALED, 8_8_SINT, NONE, WZYX),
108
109 _T(L8A8_UNORM, 8_8_UNORM, NONE, WZYX),
110 _T(L8A8_SNORM, 8_8_SNORM, NONE, WZYX),
111 _T(L8A8_UINT, 8_8_UINT, NONE, WZYX),
112 _T(L8A8_SINT, 8_8_SINT, NONE, WZYX),
113
114 _T(B5G6R5_UNORM, 5_6_5_UNORM, R5G6B5_UNORM, WXYZ),
115 _T(B5G5R5A1_UNORM, 5_5_5_1_UNORM, R5G5B5A1_UNORM, WXYZ),
116 _T(B5G5R5X1_UNORM, 5_5_5_1_UNORM, R5G5B5A1_UNORM, WXYZ),
117 _T(B4G4R4A4_UNORM, 4_4_4_4_UNORM, R4G4B4A4_UNORM, WXYZ),
118
119 /* 24-bit */
120 V_(R8G8B8_UNORM, 8_8_8_UNORM, NONE, WZYX),
121 V_(R8G8B8_SNORM, 8_8_8_SNORM, NONE, WZYX),
122 V_(R8G8B8_UINT, 8_8_8_UINT, NONE, WZYX),
123 V_(R8G8B8_SINT, 8_8_8_SINT, NONE, WZYX),
124 V_(R8G8B8_USCALED, 8_8_8_UINT, NONE, WZYX),
125 V_(R8G8B8_SSCALED, 8_8_8_SINT, NONE, WZYX),
126
127 /* 32-bit */
128 VT(R32_UINT, 32_UINT, R32_UINT, WZYX),
129 VT(R32_SINT, 32_SINT, R32_SINT, WZYX),
130 V_(R32_USCALED, 32_UINT, NONE, WZYX),
131 V_(R32_SSCALED, 32_SINT, NONE, WZYX),
132 VT(R32_FLOAT, 32_FLOAT, R32_FLOAT,WZYX),
133 V_(R32_FIXED, 32_FIXED, NONE, WZYX),
134
135 _T(A32_UINT, 32_UINT, NONE, WZYX),
136 _T(A32_SINT, 32_SINT, NONE, WZYX),
137 _T(A32_FLOAT, 32_FLOAT, NONE, WZYX),
138 _T(L32_UINT, 32_UINT, R32_UINT, WZYX),
139 _T(L32_SINT, 32_SINT, R32_SINT, WZYX),
140 _T(L32_FLOAT, 32_FLOAT, R32_FLOAT,WZYX),
141 _T(I32_UINT, 32_UINT, NONE, WZYX),
142 _T(I32_SINT, 32_SINT, NONE, WZYX),
143 _T(I32_FLOAT, 32_FLOAT, NONE, WZYX),
144
145 VT(R16G16_UNORM, 16_16_UNORM, R16G16_UNORM, WZYX),
146 VT(R16G16_SNORM, 16_16_SNORM, R16G16_SNORM, WZYX),
147 VT(R16G16_UINT, 16_16_UINT, R16G16_UINT, WZYX),
148 VT(R16G16_SINT, 16_16_SINT, R16G16_SINT, WZYX),
149 V_(R16G16_USCALED, 16_16_UINT, NONE, WZYX),
150 V_(R16G16_SSCALED, 16_16_SINT, NONE, WZYX),
151 VT(R16G16_FLOAT, 16_16_FLOAT, R16G16_FLOAT, WZYX),
152
153 _T(L16A16_UNORM, 16_16_UNORM, NONE, WZYX),
154 _T(L16A16_SNORM, 16_16_SNORM, NONE, WZYX),
155 _T(L16A16_UINT, 16_16_UINT, NONE, WZYX),
156 _T(L16A16_SINT, 16_16_SINT, NONE, WZYX),
157 _T(L16A16_FLOAT, 16_16_FLOAT, NONE, WZYX),
158
159 VT(R8G8B8A8_UNORM, 8_8_8_8_UNORM, R8G8B8A8_UNORM, WZYX),
160 _T(R8G8B8X8_UNORM, 8_8_8_8_UNORM, R8G8B8A8_UNORM, WZYX),
161 _T(R8G8B8A8_SRGB, 8_8_8_8_UNORM, R8G8B8A8_UNORM, WZYX),
162 _T(R8G8B8X8_SRGB, 8_8_8_8_UNORM, R8G8B8A8_UNORM, WZYX),
163 VT(R8G8B8A8_SNORM, 8_8_8_8_SNORM, R8G8B8A8_SNORM, WZYX),
164 VT(R8G8B8A8_UINT, 8_8_8_8_UINT, R8G8B8A8_UINT, WZYX),
165 VT(R8G8B8A8_SINT, 8_8_8_8_SINT, R8G8B8A8_SINT, WZYX),
166 V_(R8G8B8A8_USCALED, 8_8_8_8_UINT, NONE, WZYX),
167 V_(R8G8B8A8_SSCALED, 8_8_8_8_SINT, NONE, WZYX),
168
169 VT(B8G8R8A8_UNORM, 8_8_8_8_UNORM, R8G8B8A8_UNORM, WXYZ),
170 _T(B8G8R8X8_UNORM, 8_8_8_8_UNORM, R8G8B8A8_UNORM, WXYZ),
171 VT(B8G8R8A8_SRGB, 8_8_8_8_UNORM, R8G8B8A8_UNORM, WXYZ),
172 _T(B8G8R8X8_SRGB, 8_8_8_8_UNORM, R8G8B8A8_UNORM, WXYZ),
173
174 VT(A8B8G8R8_UNORM, 8_8_8_8_UNORM, R8G8B8A8_UNORM, XYZW),
175 _T(X8B8G8R8_UNORM, 8_8_8_8_UNORM, R8G8B8A8_UNORM, XYZW),
176 _T(A8B8G8R8_SRGB, 8_8_8_8_UNORM, R8G8B8A8_UNORM, XYZW),
177 _T(X8B8G8R8_SRGB, 8_8_8_8_UNORM, R8G8B8A8_UNORM, XYZW),
178
179 VT(A8R8G8B8_UNORM, 8_8_8_8_UNORM, R8G8B8A8_UNORM, ZYXW),
180 _T(X8R8G8B8_UNORM, 8_8_8_8_UNORM, R8G8B8A8_UNORM, ZYXW),
181 _T(A8R8G8B8_SRGB, 8_8_8_8_UNORM, R8G8B8A8_UNORM, ZYXW),
182 _T(X8R8G8B8_SRGB, 8_8_8_8_UNORM, R8G8B8A8_UNORM, ZYXW),
183
184 VT(R10G10B10A2_UNORM, 10_10_10_2_UNORM, R10G10B10A2_UNORM, WZYX),
185 VT(B10G10R10A2_UNORM, 10_10_10_2_UNORM, R10G10B10A2_UNORM, WXYZ),
186 _T(B10G10R10X2_UNORM, 10_10_10_2_UNORM, R10G10B10A2_UNORM, WXYZ),
187 V_(R10G10B10A2_SNORM, 10_10_10_2_SNORM, NONE, WZYX),
188 V_(B10G10R10A2_SNORM, 10_10_10_2_SNORM, NONE, WXYZ),
189 VT(R10G10B10A2_UINT, 10_10_10_2_UINT, R10G10B10A2_UINT, WZYX),
190 VT(B10G10R10A2_UINT, 10_10_10_2_UINT, R10G10B10A2_UINT, WXYZ),
191 V_(R10G10B10A2_USCALED, 10_10_10_2_UINT, NONE, WZYX),
192 V_(B10G10R10A2_USCALED, 10_10_10_2_UINT, NONE, WXYZ),
193 V_(R10G10B10A2_SSCALED, 10_10_10_2_SINT, NONE, WZYX),
194 V_(B10G10R10A2_SSCALED, 10_10_10_2_SINT, NONE, WXYZ),
195
196 VT(R11G11B10_FLOAT, 11_11_10_FLOAT, R11G11B10_FLOAT, WZYX),
197 _T(R9G9B9E5_FLOAT, 9_9_9_E5_FLOAT, NONE, WZYX),
198
199 _T(Z16_UNORM, 16_UNORM, R8G8_UNORM, WZYX),
200 _T(Z24X8_UNORM, X8Z24_UNORM, R8G8B8A8_UNORM, WZYX),
201 _T(X24S8_UINT, 8_8_8_8_UINT, R8G8B8A8_UINT, XYZW),
202 _T(Z24_UNORM_S8_UINT, X8Z24_UNORM, R8G8B8A8_UNORM, WZYX),
203 _T(Z32_FLOAT, 32_FLOAT, R8G8B8A8_UNORM, WZYX),
204 _T(Z32_FLOAT_S8X24_UINT, 32_FLOAT, R8G8B8A8_UNORM, WZYX),
205 _T(X32_S8X24_UINT, 8_UINT, R8_UINT, WZYX),
206
207 /* 48-bit */
208 V_(R16G16B16_UNORM, 16_16_16_UNORM, NONE, WZYX),
209 V_(R16G16B16_SNORM, 16_16_16_SNORM, NONE, WZYX),
210 V_(R16G16B16_UINT, 16_16_16_UINT, NONE, WZYX),
211 V_(R16G16B16_SINT, 16_16_16_SINT, NONE, WZYX),
212 V_(R16G16B16_USCALED, 16_16_16_UINT, NONE, WZYX),
213 V_(R16G16B16_SSCALED, 16_16_16_SINT, NONE, WZYX),
214 V_(R16G16B16_FLOAT, 16_16_16_FLOAT, NONE, WZYX),
215
216 /* 64-bit */
217 VT(R16G16B16A16_UNORM, 16_16_16_16_UNORM, R16G16B16A16_UNORM, WZYX),
218 VT(R16G16B16X16_UNORM, 16_16_16_16_UNORM, R16G16B16A16_UNORM, WZYX),
219 VT(R16G16B16A16_SNORM, 16_16_16_16_SNORM, R16G16B16A16_SNORM, WZYX),
220 VT(R16G16B16X16_SNORM, 16_16_16_16_SNORM, R16G16B16A16_SNORM, WZYX),
221 VT(R16G16B16A16_UINT, 16_16_16_16_UINT, R16G16B16A16_UINT, WZYX),
222 _T(R16G16B16X16_UINT, 16_16_16_16_UINT, R16G16B16A16_UINT, WZYX),
223 VT(R16G16B16A16_SINT, 16_16_16_16_SINT, R16G16B16A16_SINT, WZYX),
224 _T(R16G16B16X16_SINT, 16_16_16_16_SINT, R16G16B16A16_SINT, WZYX),
225 V_(R16G16B16A16_USCALED, 16_16_16_16_UINT, NONE, WZYX),
226 V_(R16G16B16A16_SSCALED, 16_16_16_16_SINT, NONE, WZYX),
227 VT(R16G16B16A16_FLOAT, 16_16_16_16_FLOAT, R16G16B16A16_FLOAT, WZYX),
228 _T(R16G16B16X16_FLOAT, 16_16_16_16_FLOAT, R16G16B16A16_FLOAT, WZYX),
229
230 VT(R32G32_UINT, 32_32_UINT, R32G32_UINT, WZYX),
231 VT(R32G32_SINT, 32_32_SINT, R32G32_SINT, WZYX),
232 V_(R32G32_USCALED, 32_32_UINT, NONE, WZYX),
233 V_(R32G32_SSCALED, 32_32_SINT, NONE, WZYX),
234 VT(R32G32_FLOAT, 32_32_FLOAT, R32G32_FLOAT,WZYX),
235 V_(R32G32_FIXED, 32_32_FIXED, NONE, WZYX),
236
237 _T(L32A32_UINT, 32_32_UINT, NONE, WZYX),
238 _T(L32A32_SINT, 32_32_SINT, NONE, WZYX),
239 _T(L32A32_FLOAT, 32_32_FLOAT, NONE, WZYX),
240
241 /* 96-bit */
242 VT(R32G32B32_UINT, 32_32_32_UINT, NONE, WZYX),
243 VT(R32G32B32_SINT, 32_32_32_SINT, NONE, WZYX),
244 V_(R32G32B32_USCALED, 32_32_32_UINT, NONE, WZYX),
245 V_(R32G32B32_SSCALED, 32_32_32_SINT, NONE, WZYX),
246 VT(R32G32B32_FLOAT, 32_32_32_FLOAT, NONE, WZYX),
247 V_(R32G32B32_FIXED, 32_32_32_FIXED, NONE, WZYX),
248
249 /* 128-bit */
250 VT(R32G32B32A32_UINT, 32_32_32_32_UINT, R32G32B32A32_UINT, WZYX),
251 _T(R32G32B32X32_UINT, 32_32_32_32_UINT, R32G32B32A32_UINT, WZYX),
252 VT(R32G32B32A32_SINT, 32_32_32_32_SINT, R32G32B32A32_SINT, WZYX),
253 _T(R32G32B32X32_SINT, 32_32_32_32_SINT, R32G32B32A32_SINT, WZYX),
254 V_(R32G32B32A32_USCALED, 32_32_32_32_UINT, NONE, WZYX),
255 V_(R32G32B32A32_SSCALED, 32_32_32_32_SINT, NONE, WZYX),
256 VT(R32G32B32A32_FLOAT, 32_32_32_32_FLOAT, R32G32B32A32_FLOAT, WZYX),
257 _T(R32G32B32X32_FLOAT, 32_32_32_32_FLOAT, R32G32B32A32_FLOAT, WZYX),
258 V_(R32G32B32A32_FIXED, 32_32_32_32_FIXED, NONE, WZYX),
259
260 /* compressed */
261 _T(ETC1_RGB8, ETC1, NONE, WZYX),
262 _T(ETC2_RGB8, ETC2_RGB8, NONE, WZYX),
263 _T(ETC2_SRGB8, ETC2_RGB8, NONE, WZYX),
264 _T(ETC2_RGB8A1, ETC2_RGB8A1, NONE, WZYX),
265 _T(ETC2_SRGB8A1, ETC2_RGB8A1, NONE, WZYX),
266 _T(ETC2_RGBA8, ETC2_RGBA8, NONE, WZYX),
267 _T(ETC2_SRGBA8, ETC2_RGBA8, NONE, WZYX),
268 _T(ETC2_R11_UNORM, ETC2_R11_UNORM, NONE, WZYX),
269 _T(ETC2_R11_SNORM, ETC2_R11_SNORM, NONE, WZYX),
270 _T(ETC2_RG11_UNORM, ETC2_RG11_UNORM, NONE, WZYX),
271 _T(ETC2_RG11_SNORM, ETC2_RG11_SNORM, NONE, WZYX),
272
273 _T(DXT1_RGB, DXT1, NONE, WZYX),
274 _T(DXT1_SRGB, DXT1, NONE, WZYX),
275 _T(DXT1_RGBA, DXT1, NONE, WZYX),
276 _T(DXT1_SRGBA, DXT1, NONE, WZYX),
277 _T(DXT3_RGBA, DXT3, NONE, WZYX),
278 _T(DXT3_SRGBA, DXT3, NONE, WZYX),
279 _T(DXT5_RGBA, DXT5, NONE, WZYX),
280 _T(DXT5_SRGBA, DXT5, NONE, WZYX),
281
282 _T(BPTC_RGBA_UNORM, BPTC, NONE, WZYX),
283 _T(BPTC_SRGBA, BPTC, NONE, WZYX),
284 _T(BPTC_RGB_FLOAT, BPTC_FLOAT, NONE, WZYX),
285 _T(BPTC_RGB_UFLOAT, BPTC_UFLOAT, NONE, WZYX),
286
287 _T(RGTC1_UNORM, RGTC1_UNORM, NONE, WZYX),
288 _T(RGTC1_SNORM, RGTC1_SNORM, NONE, WZYX),
289 _T(RGTC2_UNORM, RGTC2_UNORM, NONE, WZYX),
290 _T(RGTC2_SNORM, RGTC2_SNORM, NONE, WZYX),
291 _T(LATC1_UNORM, RGTC1_UNORM, NONE, WZYX),
292 _T(LATC1_SNORM, RGTC1_SNORM, NONE, WZYX),
293 _T(LATC2_UNORM, RGTC2_UNORM, NONE, WZYX),
294 _T(LATC2_SNORM, RGTC2_SNORM, NONE, WZYX),
295
296 _T(ASTC_4x4, ASTC_4x4, NONE, WZYX),
297 _T(ASTC_5x4, ASTC_5x4, NONE, WZYX),
298 _T(ASTC_5x5, ASTC_5x5, NONE, WZYX),
299 _T(ASTC_6x5, ASTC_6x5, NONE, WZYX),
300 _T(ASTC_6x6, ASTC_6x6, NONE, WZYX),
301 _T(ASTC_8x5, ASTC_8x5, NONE, WZYX),
302 _T(ASTC_8x6, ASTC_8x6, NONE, WZYX),
303 _T(ASTC_8x8, ASTC_8x8, NONE, WZYX),
304 _T(ASTC_10x5, ASTC_10x5, NONE, WZYX),
305 _T(ASTC_10x6, ASTC_10x6, NONE, WZYX),
306 _T(ASTC_10x8, ASTC_10x8, NONE, WZYX),
307 _T(ASTC_10x10, ASTC_10x10, NONE, WZYX),
308 _T(ASTC_12x10, ASTC_12x10, NONE, WZYX),
309 _T(ASTC_12x12, ASTC_12x12, NONE, WZYX),
310
311 _T(ASTC_4x4_SRGB, ASTC_4x4, NONE, WZYX),
312 _T(ASTC_5x4_SRGB, ASTC_5x4, NONE, WZYX),
313 _T(ASTC_5x5_SRGB, ASTC_5x5, NONE, WZYX),
314 _T(ASTC_6x5_SRGB, ASTC_6x5, NONE, WZYX),
315 _T(ASTC_6x6_SRGB, ASTC_6x6, NONE, WZYX),
316 _T(ASTC_8x5_SRGB, ASTC_8x5, NONE, WZYX),
317 _T(ASTC_8x6_SRGB, ASTC_8x6, NONE, WZYX),
318 _T(ASTC_8x8_SRGB, ASTC_8x8, NONE, WZYX),
319 _T(ASTC_10x5_SRGB, ASTC_10x5, NONE, WZYX),
320 _T(ASTC_10x6_SRGB, ASTC_10x6, NONE, WZYX),
321 _T(ASTC_10x8_SRGB, ASTC_10x8, NONE, WZYX),
322 _T(ASTC_10x10_SRGB, ASTC_10x10, NONE, WZYX),
323 _T(ASTC_12x10_SRGB, ASTC_12x10, NONE, WZYX),
324 _T(ASTC_12x12_SRGB, ASTC_12x12, NONE, WZYX),
325 };
326 /* clang-format on */
327
328 /* convert pipe format to vertex buffer format: */
329 enum a4xx_vtx_fmt
fd4_pipe2vtx(enum pipe_format format)330 fd4_pipe2vtx(enum pipe_format format)
331 {
332 if (!formats[format].present)
333 return VFMT4_NONE;
334 return formats[format].vtx;
335 }
336
337 /* convert pipe format to texture sampler format: */
338 enum a4xx_tex_fmt
fd4_pipe2tex(enum pipe_format format)339 fd4_pipe2tex(enum pipe_format format)
340 {
341 if (!formats[format].present)
342 return TFMT4_NONE;
343 return formats[format].tex;
344 }
345
346 /* convert pipe format to MRT / copydest format used for render-target: */
347 enum a4xx_color_fmt
fd4_pipe2color(enum pipe_format format)348 fd4_pipe2color(enum pipe_format format)
349 {
350 if (!formats[format].present)
351 return RB4_NONE;
352 return formats[format].rb;
353 }
354
355 enum a3xx_color_swap
fd4_pipe2swap(enum pipe_format format)356 fd4_pipe2swap(enum pipe_format format)
357 {
358 if (!formats[format].present)
359 return WZYX;
360 return formats[format].swap;
361 }
362
363 enum a4xx_depth_format
fd4_pipe2depth(enum pipe_format format)364 fd4_pipe2depth(enum pipe_format format)
365 {
366 switch (format) {
367 case PIPE_FORMAT_Z16_UNORM:
368 return DEPTH4_16;
369 case PIPE_FORMAT_Z24X8_UNORM:
370 case PIPE_FORMAT_Z24_UNORM_S8_UINT:
371 case PIPE_FORMAT_X8Z24_UNORM:
372 case PIPE_FORMAT_S8_UINT_Z24_UNORM:
373 return DEPTH4_24_8;
374 case PIPE_FORMAT_Z32_FLOAT:
375 case PIPE_FORMAT_Z32_FLOAT_S8X24_UINT:
376 return DEPTH4_32;
377 default:
378 return ~0;
379 }
380 }
381
382 static inline enum a4xx_tex_swiz
tex_swiz(unsigned swiz)383 tex_swiz(unsigned swiz)
384 {
385 switch (swiz) {
386 default:
387 case PIPE_SWIZZLE_X:
388 return A4XX_TEX_X;
389 case PIPE_SWIZZLE_Y:
390 return A4XX_TEX_Y;
391 case PIPE_SWIZZLE_Z:
392 return A4XX_TEX_Z;
393 case PIPE_SWIZZLE_W:
394 return A4XX_TEX_W;
395 case PIPE_SWIZZLE_0:
396 return A4XX_TEX_ZERO;
397 case PIPE_SWIZZLE_1:
398 return A4XX_TEX_ONE;
399 }
400 }
401
402 uint32_t
fd4_tex_swiz(enum pipe_format format,unsigned swizzle_r,unsigned swizzle_g,unsigned swizzle_b,unsigned swizzle_a)403 fd4_tex_swiz(enum pipe_format format, unsigned swizzle_r, unsigned swizzle_g,
404 unsigned swizzle_b, unsigned swizzle_a)
405 {
406 const struct util_format_description *desc = util_format_description(format);
407 unsigned char swiz[4] = {
408 swizzle_r,
409 swizzle_g,
410 swizzle_b,
411 swizzle_a,
412 }, rswiz[4];
413
414 util_format_compose_swizzles(desc->swizzle, swiz, rswiz);
415
416 return A4XX_TEX_CONST_0_SWIZ_X(tex_swiz(rswiz[0])) |
417 A4XX_TEX_CONST_0_SWIZ_Y(tex_swiz(rswiz[1])) |
418 A4XX_TEX_CONST_0_SWIZ_Z(tex_swiz(rswiz[2])) |
419 A4XX_TEX_CONST_0_SWIZ_W(tex_swiz(rswiz[3]));
420 }
421