xref: /aosp_15_r20/external/mesa3d/src/nouveau/vulkan/nvk_mme.h (revision 6104692788411f58d303aa86923a9ff6ecaded22)
1 /*
2  * Copyright © 2022 Collabora Ltd. and Red Hat Inc.
3  * SPDX-License-Identifier: MIT
4  */
5 #ifndef NVK_MME_H
6 #define NVK_MME_H 1
7 
8 #include "mme_builder.h"
9 #include "nvk_private.h"
10 
11 struct nv_device_info;
12 
13 enum nvk_mme {
14    NVK_MME_SELECT_CB0,
15    NVK_MME_BIND_CBUF_DESC,
16    NVK_MME_CLEAR,
17    NVK_MME_BIND_IB,
18    NVK_MME_BIND_VB,
19    NVK_MME_SET_VB_ENABLES,
20    NVK_MME_SET_VB_STRIDE,
21    NVK_MME_SET_TESS_PARAMS,
22    NVK_MME_SET_ANTI_ALIAS,
23    NVK_MME_DRAW,
24    NVK_MME_DRAW_INDEXED,
25    NVK_MME_DRAW_INDIRECT,
26    NVK_MME_DRAW_INDEXED_INDIRECT,
27    NVK_MME_DRAW_INDIRECT_COUNT,
28    NVK_MME_DRAW_INDEXED_INDIRECT_COUNT,
29    NVK_MME_ADD_CS_INVOCATIONS,
30    NVK_MME_DISPATCH_INDIRECT,
31    NVK_MME_WRITE_CS_INVOCATIONS,
32    NVK_MME_XFB_COUNTER_LOAD,
33    NVK_MME_XFB_DRAW_INDIRECT,
34    NVK_MME_SET_PRIV_REG,
35    NVK_MME_SET_WRITE_MASK,
36    NVK_MME_SET_CONSERVATIVE_RASTER_STATE,
37    NVK_MME_SET_VIEWPORT_MIN_MAX_Z,
38    NVK_MME_SET_Z_CLAMP,
39 
40    NVK_MME_COUNT,
41 };
42 
43 enum nvk_mme_scratch {
44    /* These are reserved for communicating with FALCON */
45    NVK_MME_SCRATCH_FALCON_0 = 0,
46    NVK_MME_SCRATCH_FALCON_1 = 0,
47    NVK_MME_SCRATCH_FALCON_2 = 0,
48 
49    NVK_MME_SCRATCH_CS_INVOCATIONS_HI,
50    NVK_MME_SCRATCH_CS_INVOCATIONS_LO,
51    NVK_MME_SCRATCH_DRAW_BEGIN,
52    NVK_MME_SCRATCH_DRAW_COUNT,
53    NVK_MME_SCRATCH_DRAW_PAD_DW,
54    NVK_MME_SCRATCH_DRAW_IDX,
55    NVK_MME_SCRATCH_VIEW_MASK,
56    NVK_MME_SCRATCH_WRITE_MASK_DYN,
57    NVK_MME_SCRATCH_WRITE_MASK_PIPELINE,
58    NVK_MME_SCRATCH_CONSERVATIVE_RASTER_STATE,
59 
60    /* Bitfield of enabled vertex buffer bindings */
61    NVK_MME_SCRATCH_VB_ENABLES,
62 
63    /* Tessellation parameters */
64    NVK_MME_SCRATCH_TESS_PARAMS,
65 
66    /* Anti-aliasing state */
67    NVK_MME_SCRATCH_ANTI_ALIAS,
68 
69    /* Addres of cb0 */
70    NVK_MME_SCRATCH_CB0_ADDR_HI,
71    NVK_MME_SCRATCH_CB0_ADDR_LO,
72 
73    /* Addres of zero page */
74    NVK_MME_SCRATCH_ZERO_ADDR_HI,
75    NVK_MME_SCRATCH_ZERO_ADDR_LO,
76 
77    /* Shadow copies of values in CB0 */
78    NVK_MME_SCRATCH_CB0_FIRST_VERTEX,
79    NVK_MME_SCRATCH_CB0_DRAW_INDEX,
80    NVK_MME_SCRATCH_CB0_VIEW_INDEX,
81 
82    NVK_MME_SCRATCH_VIEWPORT0_MIN_Z,
83    NVK_MME_SCRATCH_VIEWPORT0_MAX_Z,
84    NVK_MME_SCRATCH_Z_CLAMP = NVK_MME_SCRATCH_VIEWPORT0_MIN_Z
85                              + (NVK_MAX_VIEWPORTS * 2),
86 
87    /* Must be at the end */
88    NVK_MME_NUM_SCRATCH,
89 };
90 
91 #define NVK_SET_MME_SCRATCH(S) (0x3400 + (NVK_MME_SCRATCH_##S) * 4)
92 
93 static inline void
_nvk_mme_load_scratch_to(struct mme_builder * b,struct mme_value val,enum nvk_mme_scratch scratch)94 _nvk_mme_load_scratch_to(struct mme_builder *b, struct mme_value val,
95                          enum nvk_mme_scratch scratch)
96 {
97    mme_state_to(b, val, 0x3400 + scratch * 4);
98 }
99 #define nvk_mme_load_scratch_to(b, v, S) \
100    _nvk_mme_load_scratch_to(b, v, NVK_MME_SCRATCH_##S)
101 
102 static inline struct mme_value
_nvk_mme_load_scratch(struct mme_builder * b,enum nvk_mme_scratch scratch)103 _nvk_mme_load_scratch(struct mme_builder *b, enum nvk_mme_scratch scratch)
104 {
105    struct mme_value val = mme_alloc_reg(b);
106    _nvk_mme_load_scratch_to(b, val, scratch);
107    return val;
108 }
109 #define nvk_mme_load_scratch(b, S) \
110    _nvk_mme_load_scratch(b, NVK_MME_SCRATCH_##S)
111 
112 static inline void
_nvk_mme_store_scratch(struct mme_builder * b,enum nvk_mme_scratch scratch,struct mme_value data)113 _nvk_mme_store_scratch(struct mme_builder *b, enum nvk_mme_scratch scratch,
114                        struct mme_value data)
115 {
116    mme_mthd(b, 0x3400 + scratch * 4);
117    mme_emit(b, data);
118 }
119 #define nvk_mme_store_scratch(b, S, v) \
120    _nvk_mme_store_scratch(b, NVK_MME_SCRATCH_##S, v)
121 
122 static inline void
_nvk_mme_load_to_scratch(struct mme_builder * b,enum nvk_mme_scratch scratch)123 _nvk_mme_load_to_scratch(struct mme_builder *b, enum nvk_mme_scratch scratch)
124 {
125    struct mme_value val = mme_load(b);
126    _nvk_mme_store_scratch(b, scratch, val);
127    mme_free_reg(b, val);
128 }
129 #define nvk_mme_load_to_scratch(b, S) \
130    _nvk_mme_load_to_scratch(b, NVK_MME_SCRATCH_##S)
131 
132 static inline uint32_t
nvk_mme_val_mask(uint16_t val,uint16_t mask)133 nvk_mme_val_mask(uint16_t val, uint16_t mask)
134 {
135    /* If there are bits in val which aren't in mask, it's probably a
136     * programming error on the CPU side.  nvk_mme_set_masked() will still
137     * work in this case but it's worth an assert.
138     */
139    assert(!(val & ~mask));
140 
141    return ((uint32_t)val) | (((uint32_t)mask) << 16);
142 }
143 
144 /* This is a common pattern in NVK.  The input val_mask is a value plus a mask
145  * where the top 16 bits are mask and the bottom 16 bits are data.  src is
146  * copied and the bits in the mask are replaced by the corresponding value
147  * bits in val_mask.
148  */
149 static inline struct mme_value
nvk_mme_set_masked(struct mme_builder * b,struct mme_value src,struct mme_value val_mask)150 nvk_mme_set_masked(struct mme_builder *b, struct mme_value src,
151                    struct mme_value val_mask)
152 {
153    struct mme_value mask = mme_merge(b, mme_zero(), val_mask, 0, 16, 16);
154    struct mme_value val = mme_and_not(b, src, mask);
155 
156    /* Re-use the mask reg for val_mask & mask */
157    mme_and_to(b, mask, val_mask, mask);
158    mme_or_to(b, val, val, mask);
159    mme_free_reg(b, mask);
160 
161    return val;
162 }
163 
164 static void
_nvk_mme_spill(struct mme_builder * b,enum nvk_mme_scratch scratch,struct mme_value val)165 _nvk_mme_spill(struct mme_builder *b, enum nvk_mme_scratch scratch,
166                struct mme_value val)
167 {
168    if (val.type == MME_VALUE_TYPE_REG) {
169       _nvk_mme_store_scratch(b, scratch, val);
170       mme_free_reg(b, val);
171    }
172 }
173 #define nvk_mme_spill(b, S, v) \
174    _nvk_mme_spill(b, NVK_MME_SCRATCH_##S, v)
175 
176 static void
_nvk_mme_unspill(struct mme_builder * b,enum nvk_mme_scratch scratch,struct mme_value val)177 _nvk_mme_unspill(struct mme_builder *b, enum nvk_mme_scratch scratch,
178                  struct mme_value val)
179 {
180    if (val.type == MME_VALUE_TYPE_REG) {
181       mme_realloc_reg(b, val);
182       _nvk_mme_load_scratch_to(b, val, scratch);
183    }
184 }
185 #define nvk_mme_unspill(b, S, v) \
186    _nvk_mme_unspill(b, NVK_MME_SCRATCH_##S, v)
187 
188 typedef void (*nvk_mme_builder_func)(struct mme_builder *b);
189 
190 uint32_t *nvk_build_mme(const struct nv_device_info *devinfo,
191                         enum nvk_mme mme, size_t *size_out);
192 
193 void nvk_mme_select_cb0(struct mme_builder *b);
194 void nvk_mme_bind_cbuf_desc(struct mme_builder *b);
195 void nvk_mme_clear(struct mme_builder *b);
196 void nvk_mme_bind_ib(struct mme_builder *b);
197 void nvk_mme_bind_vb(struct mme_builder *b);
198 void nvk_mme_set_vb_enables(struct mme_builder *b);
199 void nvk_mme_set_vb_stride(struct mme_builder *b);
200 void nvk_mme_set_tess_params(struct mme_builder *b);
201 void nvk_mme_set_anti_alias(struct mme_builder *b);
202 void nvk_mme_draw(struct mme_builder *b);
203 void nvk_mme_draw_indexed(struct mme_builder *b);
204 void nvk_mme_draw_indirect(struct mme_builder *b);
205 void nvk_mme_draw_indexed_indirect(struct mme_builder *b);
206 void nvk_mme_draw_indirect_count(struct mme_builder *b);
207 void nvk_mme_draw_indexed_indirect_count(struct mme_builder *b);
208 void nvk_mme_add_cs_invocations(struct mme_builder *b);
209 void nvk_mme_dispatch_indirect(struct mme_builder *b);
210 void nvk_mme_write_cs_invocations(struct mme_builder *b);
211 void nvk_mme_xfb_counter_load(struct mme_builder *b);
212 void nvk_mme_xfb_draw_indirect(struct mme_builder *b);
213 void nvk_mme_set_priv_reg(struct mme_builder *b);
214 void nvk_mme_set_write_mask(struct mme_builder *b);
215 void nvk_mme_set_conservative_raster_state(struct mme_builder *b);
216 void nvk_mme_set_viewport_min_max_z(struct mme_builder *b);
217 void nvk_mme_set_z_clamp(struct mme_builder *b);
218 
219 struct nvk_mme_mthd_data {
220    uint16_t mthd;
221    uint32_t data;
222 };
223 
224 #define NVK_MME_MTHD_DATA_END ((struct nvk_mme_mthd_data) { 0, 0 })
225 
226 struct nvk_mme_test_case {
227    const struct nvk_mme_mthd_data *init;
228    const uint32_t *params;
229    const struct nvk_mme_mthd_data *expected;
230    void (*check)(const struct nv_device_info *devinfo,
231                  const struct nvk_mme_test_case *test,
232                  const struct nvk_mme_mthd_data *results);
233 };
234 
235 extern const struct nvk_mme_test_case nvk_mme_clear_tests[];
236 extern const struct nvk_mme_test_case nvk_mme_bind_vb_tests[];
237 extern const struct nvk_mme_test_case nvk_mme_set_tess_params_tests[];
238 extern const struct nvk_mme_test_case nvk_mme_set_anti_alias_tests[];
239 
240 void nvk_test_all_mmes(const struct nv_device_info *devinfo);
241 
242 #endif /* NVK_MME_H */
243