xref: /aosp_15_r20/external/mesa3d/src/nouveau/vulkan/nvk_cmd_buffer.h (revision 6104692788411f58d303aa86923a9ff6ecaded22)
1 /*
2  * Copyright © 2022 Collabora Ltd. and Red Hat Inc.
3  * SPDX-License-Identifier: MIT
4  */
5 #ifndef NVK_CMD_BUFFER_H
6 #define NVK_CMD_BUFFER_H 1
7 
8 #include "nvk_private.h"
9 
10 #include "nv_push.h"
11 #include "nvk_cmd_pool.h"
12 #include "nvk_descriptor_set.h"
13 #include "nvk_image.h"
14 #include "nvk_shader.h"
15 
16 #include "util/u_dynarray.h"
17 
18 #include "vk_command_buffer.h"
19 
20 #include <stdio.h>
21 
22 struct nvk_buffer;
23 struct nvk_cbuf;
24 struct nvk_cmd_mem;
25 struct nvk_cmd_buffer;
26 struct nvk_cmd_pool;
27 struct nvk_image_view;
28 struct nvk_push_descriptor_set;
29 struct nvk_shader;
30 struct vk_shader;
31 
32 /** Root descriptor table.  This gets pushed to the GPU directly */
33 struct nvk_root_descriptor_table {
34    union {
35       struct {
36          uint32_t base_vertex;
37          uint32_t base_instance;
38          uint32_t draw_index;
39          uint32_t view_index;
40          struct nak_sample_location sample_locations[NVK_MAX_SAMPLES];
41          struct nak_sample_mask sample_masks[NVK_MAX_SAMPLES];
42       } draw;
43       struct {
44          uint32_t base_group[3];
45          uint32_t group_count[3];
46       } cs;
47    };
48 
49    /* Client push constants */
50    uint8_t push[NVK_MAX_PUSH_SIZE];
51 
52    /* Descriptor set addresses */
53    struct nvk_buffer_address sets[NVK_MAX_SETS];
54 
55    /* For each descriptor set, the index in dynamic_buffers where that set's
56     * the dynamic buffers start. This is maintained for every set, regardless
57     * of whether or not anything is bound there.
58     */
59    uint8_t set_dynamic_buffer_start[NVK_MAX_SETS];
60 
61    /* Dynamic buffer bindings */
62    union nvk_buffer_descriptor dynamic_buffers[NVK_MAX_DYNAMIC_BUFFERS];
63 
64    /* enfore alignment to 0x100 as needed pre pascal */
65    uint8_t __padding[0x38];
66 };
67 
68 /* helper macro for computing root descriptor byte offsets */
69 #define nvk_root_descriptor_offset(member)\
70    offsetof(struct nvk_root_descriptor_table, member)
71 
72 enum ENUM_PACKED nvk_descriptor_set_type {
73    NVK_DESCRIPTOR_SET_TYPE_NONE,
74    NVK_DESCRIPTOR_SET_TYPE_SET,
75    NVK_DESCRIPTOR_SET_TYPE_PUSH,
76    NVK_DESCRIPTOR_SET_TYPE_BUFFER,
77 };
78 
79 struct nvk_descriptor_set_binding {
80    enum nvk_descriptor_set_type type;
81    struct nvk_descriptor_set *set;
82    struct nvk_push_descriptor_set *push;
83 };
84 
85 struct nvk_descriptor_state {
86    alignas(16) char root[sizeof(struct nvk_root_descriptor_table)];
87    void (*flush_root)(struct nvk_cmd_buffer *cmd,
88                       struct nvk_descriptor_state *desc,
89                       size_t offset, size_t size);
90 
91    struct nvk_descriptor_set_binding sets[NVK_MAX_SETS];
92    uint32_t push_dirty;
93 };
94 
95 #define nvk_descriptor_state_get_root(desc, member, dst) do { \
96    const struct nvk_root_descriptor_table *root = \
97       (const struct nvk_root_descriptor_table *)(desc)->root; \
98    *dst = root->member; \
99 } while (0)
100 
101 #define nvk_descriptor_state_get_root_array(desc, member, \
102                                             start, count, dst) do { \
103    const struct nvk_root_descriptor_table *root = \
104       (const struct nvk_root_descriptor_table *)(desc)->root; \
105    unsigned _start = start; \
106    assert(_start + count <= ARRAY_SIZE(root->member)); \
107    for (unsigned i = 0; i < count; i++) \
108       (dst)[i] = root->member[i + _start]; \
109 } while (0)
110 
111 #define nvk_descriptor_state_set_root(cmd, desc, member, src) do { \
112    struct nvk_descriptor_state *_desc = (desc); \
113    struct nvk_root_descriptor_table *root = \
114       (struct nvk_root_descriptor_table *)_desc->root; \
115    root->member = (src); \
116    if (_desc->flush_root != NULL) { \
117       size_t offset = (char *)&root->member - (char *)root; \
118       _desc->flush_root((cmd), _desc, offset, sizeof(root->member)); \
119    } \
120 } while (0)
121 
122 #define nvk_descriptor_state_set_root_array(cmd, desc, member, \
123                                             start, count, src) do { \
124    struct nvk_descriptor_state *_desc = (desc); \
125    struct nvk_root_descriptor_table *root = \
126       (struct nvk_root_descriptor_table *)_desc->root; \
127    unsigned _start = start; \
128    assert(_start + count <= ARRAY_SIZE(root->member)); \
129    for (unsigned i = 0; i < count; i++) \
130       root->member[i + _start] = (src)[i]; \
131    if (_desc->flush_root != NULL) { \
132       size_t offset = (char *)&root->member[_start] - (char *)root; \
133       _desc->flush_root((cmd), _desc, offset, \
134                         count * sizeof(root->member[0])); \
135    } \
136 } while (0)
137 
138 struct nvk_attachment {
139    VkFormat vk_format;
140    struct nvk_image_view *iview;
141 
142    VkResolveModeFlagBits resolve_mode;
143    struct nvk_image_view *resolve_iview;
144 
145    /* Needed to track the value of storeOp in case we need to copy images for
146     * the DRM_FORMAT_MOD_LINEAR case */
147    VkAttachmentStoreOp store_op;
148 };
149 
150 struct nvk_rendering_state {
151    VkRenderingFlagBits flags;
152 
153    VkRect2D area;
154    uint32_t layer_count;
155    uint32_t view_mask;
156    uint32_t samples;
157 
158    uint32_t color_att_count;
159    struct nvk_attachment color_att[NVK_MAX_RTS];
160    struct nvk_attachment depth_att;
161    struct nvk_attachment stencil_att;
162 
163    bool all_linear;
164 };
165 
166 struct nvk_graphics_state {
167    struct nvk_rendering_state render;
168    struct nvk_descriptor_state descriptors;
169 
170    uint32_t shaders_dirty;
171    struct nvk_shader *shaders[MESA_SHADER_MESH + 1];
172 
173    struct nvk_cbuf_group {
174       uint16_t dirty;
175       struct nvk_cbuf cbufs[16];
176    } cbuf_groups[5];
177 
178    /* Used for meta save/restore */
179    struct nvk_addr_range vb0;
180 
181    /* Needed by vk_command_buffer::dynamic_graphics_state */
182    struct vk_vertex_input_state _dynamic_vi;
183    struct vk_sample_locations_state _dynamic_sl;
184 };
185 
186 struct nvk_compute_state {
187    struct nvk_descriptor_state descriptors;
188    struct nvk_shader *shader;
189 };
190 
191 struct nvk_cmd_push {
192    void *map;
193    uint64_t addr;
194    uint32_t range;
195    bool no_prefetch;
196 };
197 
198 struct nvk_cmd_buffer {
199    struct vk_command_buffer vk;
200 
201    struct {
202       uint64_t descriptor_buffers[NVK_MAX_SETS];
203       struct nvk_graphics_state gfx;
204       struct nvk_compute_state cs;
205    } state;
206 
207    /** List of nvk_cmd_mem
208     *
209     * This list exists entirely for ownership tracking.  Everything in here
210     * must also be in pushes or bo_refs if it is to be referenced by this
211     * command buffer.
212     */
213    struct list_head owned_mem;
214    struct list_head owned_gart_mem;
215 
216    struct nvk_cmd_mem *upload_mem;
217    uint32_t upload_offset;
218 
219    struct nvk_cmd_mem *cond_render_gart_mem;
220    uint32_t cond_render_gart_offset;
221 
222    struct nvk_cmd_mem *push_mem;
223    uint32_t *push_mem_limit;
224    struct nv_push push;
225 
226    /** Array of struct nvk_cmd_push
227     *
228     * This acts both as a BO reference as well as provides a range in the
229     * buffer to use as a pushbuf.
230     */
231    struct util_dynarray pushes;
232 
233    uint64_t tls_space_needed;
234 };
235 
236 VK_DEFINE_HANDLE_CASTS(nvk_cmd_buffer, vk.base, VkCommandBuffer,
237                        VK_OBJECT_TYPE_COMMAND_BUFFER)
238 
239 extern const struct vk_command_buffer_ops nvk_cmd_buffer_ops;
240 
241 static inline struct nvk_device *
nvk_cmd_buffer_device(struct nvk_cmd_buffer * cmd)242 nvk_cmd_buffer_device(struct nvk_cmd_buffer *cmd)
243 {
244    return (struct nvk_device *)cmd->vk.base.device;
245 }
246 
247 static inline struct nvk_cmd_pool *
nvk_cmd_buffer_pool(struct nvk_cmd_buffer * cmd)248 nvk_cmd_buffer_pool(struct nvk_cmd_buffer *cmd)
249 {
250    return (struct nvk_cmd_pool *)cmd->vk.pool;
251 }
252 
253 void nvk_cmd_buffer_new_push(struct nvk_cmd_buffer *cmd);
254 
255 #define NVK_CMD_BUFFER_MAX_PUSH 512
256 
257 static inline struct nv_push *
nvk_cmd_buffer_push(struct nvk_cmd_buffer * cmd,uint32_t dw_count)258 nvk_cmd_buffer_push(struct nvk_cmd_buffer *cmd, uint32_t dw_count)
259 {
260    assert(dw_count <= NVK_CMD_BUFFER_MAX_PUSH);
261 
262    /* Compare to the actual limit on our push bo */
263    if (unlikely(cmd->push.end + dw_count > cmd->push_mem_limit))
264       nvk_cmd_buffer_new_push(cmd);
265 
266    cmd->push.limit = cmd->push.end + dw_count;
267 
268    return &cmd->push;
269 }
270 
271 void
272 nvk_cmd_buffer_push_indirect(struct nvk_cmd_buffer *cmd,
273                              uint64_t addr, uint32_t dw_count);
274 
275 void nvk_cmd_buffer_begin_graphics(struct nvk_cmd_buffer *cmd,
276                                    const VkCommandBufferBeginInfo *pBeginInfo);
277 void nvk_cmd_buffer_begin_compute(struct nvk_cmd_buffer *cmd,
278                                   const VkCommandBufferBeginInfo *pBeginInfo);
279 
280 void nvk_cmd_invalidate_graphics_state(struct nvk_cmd_buffer *cmd);
281 void nvk_cmd_invalidate_compute_state(struct nvk_cmd_buffer *cmd);
282 
283 void nvk_cmd_bind_shaders(struct vk_command_buffer *vk_cmd,
284                           uint32_t stage_count,
285                           const gl_shader_stage *stages,
286                           struct vk_shader ** const shaders);
287 
288 void nvk_cmd_bind_graphics_shader(struct nvk_cmd_buffer *cmd,
289                                   const gl_shader_stage stage,
290                                   struct nvk_shader *shader);
291 
292 void nvk_cmd_bind_compute_shader(struct nvk_cmd_buffer *cmd,
293                                  struct nvk_shader *shader);
294 
295 void nvk_cmd_dirty_cbufs_for_descriptors(struct nvk_cmd_buffer *cmd,
296                                          VkShaderStageFlags stages,
297                                          uint32_t sets_start, uint32_t sets_end,
298                                          uint32_t dyn_start, uint32_t dyn_end);
299 void nvk_cmd_bind_vertex_buffer(struct nvk_cmd_buffer *cmd, uint32_t vb_idx,
300                                 struct nvk_addr_range addr_range);
301 
302 static inline struct nvk_descriptor_state *
nvk_get_descriptors_state(struct nvk_cmd_buffer * cmd,VkPipelineBindPoint bind_point)303 nvk_get_descriptors_state(struct nvk_cmd_buffer *cmd,
304                           VkPipelineBindPoint bind_point)
305 {
306    switch (bind_point) {
307    case VK_PIPELINE_BIND_POINT_GRAPHICS:
308       return &cmd->state.gfx.descriptors;
309    case VK_PIPELINE_BIND_POINT_COMPUTE:
310       return &cmd->state.cs.descriptors;
311    default:
312       unreachable("Unhandled bind point");
313    }
314 };
315 
316 VkResult nvk_cmd_buffer_upload_alloc(struct nvk_cmd_buffer *cmd,
317                                      uint32_t size, uint32_t alignment,
318                                      uint64_t *addr, void **ptr);
319 
320 VkResult nvk_cmd_buffer_upload_data(struct nvk_cmd_buffer *cmd,
321                                     const void *data, uint32_t size,
322                                     uint32_t alignment, uint64_t *addr);
323 
324 VkResult nvk_cmd_buffer_cond_render_alloc(struct nvk_cmd_buffer *cmd,
325 					  uint64_t *addr);
326 
327 void nvk_cmd_flush_wait_dep(struct nvk_cmd_buffer *cmd,
328                             const VkDependencyInfo *dep,
329                             bool wait);
330 
331 void nvk_cmd_invalidate_deps(struct nvk_cmd_buffer *cmd,
332                              uint32_t dep_count,
333                              const VkDependencyInfo *deps);
334 
335 void
336 nvk_cmd_buffer_flush_push_descriptors(struct nvk_cmd_buffer *cmd,
337                                       struct nvk_descriptor_state *desc);
338 
339 bool
340 nvk_cmd_buffer_get_cbuf_addr(struct nvk_cmd_buffer *cmd,
341                              const struct nvk_descriptor_state *desc,
342                              const struct nvk_shader *shader,
343                              const struct nvk_cbuf *cbuf,
344                              struct nvk_buffer_address *addr_out);
345 uint64_t
346 nvk_cmd_buffer_get_cbuf_descriptor_addr(struct nvk_cmd_buffer *cmd,
347                                         const struct nvk_descriptor_state *desc,
348                                         const struct nvk_cbuf *cbuf);
349 
350 void nvk_cmd_dispatch_shader(struct nvk_cmd_buffer *cmd,
351                              struct nvk_shader *shader,
352                              const void *push_data, size_t push_size,
353                              uint32_t groupCountX,
354                              uint32_t groupCountY,
355                              uint32_t groupCountZ);
356 
357 void nvk_meta_resolve_rendering(struct nvk_cmd_buffer *cmd,
358                                 const VkRenderingInfo *pRenderingInfo);
359 
360 void nvk_cmd_buffer_dump(struct nvk_cmd_buffer *cmd, FILE *fp);
361 
362 void nvk_linear_render_copy(struct nvk_cmd_buffer *cmd,
363                             const struct nvk_image_view *iview,
364                             VkRect2D copy_rect,
365                             bool copy_to_tiled_shadow);
366 
367 #endif
368