xref: /aosp_15_r20/external/mesa3d/src/intel/vulkan/anv_descriptor_set.c (revision 6104692788411f58d303aa86923a9ff6ecaded22)
1 /*
2  * Copyright © 2015 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21  * IN THE SOFTWARE.
22  */
23 
24 #include <assert.h>
25 #include <stdbool.h>
26 #include <string.h>
27 #include <unistd.h>
28 #include <fcntl.h>
29 
30 #include "util/mesa-sha1.h"
31 #include "vk_util.h"
32 
33 #include "anv_private.h"
34 
35 /*
36  * Descriptor set layouts.
37  */
38 
39 static void
anv_descriptor_data_alignment(enum anv_descriptor_data data,enum anv_descriptor_set_layout_type layout_type,unsigned * out_surface_align,unsigned * out_sampler_align)40 anv_descriptor_data_alignment(enum anv_descriptor_data data,
41                               enum anv_descriptor_set_layout_type layout_type,
42                               unsigned *out_surface_align,
43                               unsigned *out_sampler_align)
44 {
45    unsigned surface_align = 1, sampler_align = 1;
46 
47    if (data & (ANV_DESCRIPTOR_INDIRECT_SAMPLED_IMAGE |
48                ANV_DESCRIPTOR_INDIRECT_STORAGE_IMAGE |
49                ANV_DESCRIPTOR_INDIRECT_ADDRESS_RANGE))
50       surface_align = MAX2(surface_align, 8);
51 
52    if (data & ANV_DESCRIPTOR_SURFACE)
53       surface_align = MAX2(surface_align, ANV_SURFACE_STATE_SIZE);
54 
55    if (data & ANV_DESCRIPTOR_SURFACE_SAMPLER) {
56       surface_align = MAX2(surface_align, ANV_SURFACE_STATE_SIZE);
57       if (layout_type == ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_DIRECT)
58          sampler_align = MAX2(sampler_align, ANV_SAMPLER_STATE_SIZE);
59    }
60 
61    if (data & ANV_DESCRIPTOR_SAMPLER) {
62       if (layout_type == ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_DIRECT)
63          sampler_align = MAX2(sampler_align, ANV_SAMPLER_STATE_SIZE);
64       else
65          surface_align = MAX2(surface_align, ANV_SAMPLER_STATE_SIZE);
66    }
67 
68    if (data & ANV_DESCRIPTOR_INLINE_UNIFORM)
69       surface_align = MAX2(surface_align, ANV_UBO_ALIGNMENT);
70 
71    *out_surface_align = surface_align;
72    *out_sampler_align = sampler_align;
73 }
74 
75 static enum anv_descriptor_data
anv_indirect_descriptor_data_for_type(VkDescriptorType type)76 anv_indirect_descriptor_data_for_type(VkDescriptorType type)
77 {
78    enum anv_descriptor_data data = 0;
79 
80    switch (type) {
81    case VK_DESCRIPTOR_TYPE_SAMPLER:
82       data = ANV_DESCRIPTOR_BTI_SAMPLER_STATE |
83              ANV_DESCRIPTOR_INDIRECT_SAMPLED_IMAGE;
84       break;
85 
86    case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
87       data = ANV_DESCRIPTOR_BTI_SURFACE_STATE |
88              ANV_DESCRIPTOR_BTI_SAMPLER_STATE |
89              ANV_DESCRIPTOR_INDIRECT_SAMPLED_IMAGE;
90       break;
91 
92    case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
93    case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
94    case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
95       data = ANV_DESCRIPTOR_BTI_SURFACE_STATE |
96              ANV_DESCRIPTOR_INDIRECT_SAMPLED_IMAGE;
97       break;
98 
99    case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
100    case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
101       data = ANV_DESCRIPTOR_BTI_SURFACE_STATE |
102              ANV_DESCRIPTOR_INDIRECT_STORAGE_IMAGE;
103       break;
104 
105    case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
106    case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
107       data = ANV_DESCRIPTOR_BTI_SURFACE_STATE |
108              ANV_DESCRIPTOR_BUFFER_VIEW;
109       break;
110 
111    case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
112    case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
113       data = ANV_DESCRIPTOR_BTI_SURFACE_STATE;
114       break;
115 
116    case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
117       data = ANV_DESCRIPTOR_INLINE_UNIFORM;
118       break;
119 
120    case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
121       data = ANV_DESCRIPTOR_INDIRECT_ADDRESS_RANGE;
122       break;
123 
124    default:
125       unreachable("Unsupported descriptor type");
126    }
127 
128    /* We also need to push SSBO address ranges so that we can use A64
129     * messages in the shader.
130     */
131    if (type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER ||
132        type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC ||
133        type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ||
134        type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC)
135       data |= ANV_DESCRIPTOR_INDIRECT_ADDRESS_RANGE;
136 
137    return data;
138 }
139 
140 static enum anv_descriptor_data
anv_direct_descriptor_data_for_type(const struct anv_physical_device * device,enum anv_descriptor_set_layout_type layout_type,VkDescriptorSetLayoutCreateFlags set_flags,VkDescriptorType type)141 anv_direct_descriptor_data_for_type(const struct anv_physical_device *device,
142                                     enum anv_descriptor_set_layout_type layout_type,
143                                     VkDescriptorSetLayoutCreateFlags set_flags,
144                                     VkDescriptorType type)
145 {
146    enum anv_descriptor_data data = 0;
147 
148    switch (type) {
149    case VK_DESCRIPTOR_TYPE_SAMPLER:
150       if (set_flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_EMBEDDED_IMMUTABLE_SAMPLERS_BIT_EXT)
151          return 0;
152       data = ANV_DESCRIPTOR_BTI_SAMPLER_STATE |
153              ANV_DESCRIPTOR_SAMPLER;
154       break;
155 
156    case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
157       if (layout_type == ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_DIRECT) {
158          data = ANV_DESCRIPTOR_BTI_SURFACE_STATE |
159                 ANV_DESCRIPTOR_BTI_SAMPLER_STATE |
160                 ANV_DESCRIPTOR_SURFACE |
161                 ANV_DESCRIPTOR_SAMPLER;
162       } else {
163          data = ANV_DESCRIPTOR_BTI_SURFACE_STATE |
164                 ANV_DESCRIPTOR_BTI_SAMPLER_STATE |
165                 ANV_DESCRIPTOR_SURFACE_SAMPLER;
166       }
167       break;
168 
169    case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
170    case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
171    case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
172    case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
173    case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
174    case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
175    case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
176    case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
177    case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
178       data = ANV_DESCRIPTOR_BTI_SURFACE_STATE |
179              ANV_DESCRIPTOR_SURFACE;
180       break;
181 
182    case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
183       data = ANV_DESCRIPTOR_INLINE_UNIFORM;
184       break;
185 
186    case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
187       data = ANV_DESCRIPTOR_INDIRECT_ADDRESS_RANGE;
188       break;
189 
190    default:
191       unreachable("Unsupported descriptor type");
192    }
193 
194    if (layout_type == ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_BUFFER) {
195       if (set_flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR) {
196          /* Push descriptors are special with descriptor buffers. On Gfx12.5+
197           * they have their own pool and are not reachable by the binding
198           * table. On previous generations, they are only reachable through
199           * the binding table.
200           */
201          if (device->uses_ex_bso) {
202             data &= ~(ANV_DESCRIPTOR_BTI_SURFACE_STATE |
203                       ANV_DESCRIPTOR_BTI_SAMPLER_STATE);
204          }
205       } else {
206          /* Non push descriptor buffers cannot be accesses through the binding
207           * table on all platforms.
208           */
209          data &= ~(ANV_DESCRIPTOR_BTI_SURFACE_STATE |
210                    ANV_DESCRIPTOR_BTI_SAMPLER_STATE);
211       }
212    }
213 
214    return data;
215 }
216 
217 static enum anv_descriptor_data
anv_descriptor_data_for_type(const struct anv_physical_device * device,enum anv_descriptor_set_layout_type layout_type,VkDescriptorSetLayoutCreateFlags set_flags,VkDescriptorType type)218 anv_descriptor_data_for_type(const struct anv_physical_device *device,
219                              enum anv_descriptor_set_layout_type layout_type,
220                              VkDescriptorSetLayoutCreateFlags set_flags,
221                              VkDescriptorType type)
222 {
223    if (layout_type == ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_BUFFER)
224       return anv_direct_descriptor_data_for_type(device, layout_type, set_flags, type);
225    else if (device->indirect_descriptors)
226       return anv_indirect_descriptor_data_for_type(type);
227    else
228       return anv_direct_descriptor_data_for_type(device, layout_type, set_flags, type);
229 }
230 
231 static enum anv_descriptor_data
anv_descriptor_data_for_mutable_type(const struct anv_physical_device * device,enum anv_descriptor_set_layout_type layout_type,VkDescriptorSetLayoutCreateFlags set_flags,const VkMutableDescriptorTypeCreateInfoEXT * mutable_info,int binding)232 anv_descriptor_data_for_mutable_type(const struct anv_physical_device *device,
233                                      enum anv_descriptor_set_layout_type layout_type,
234                                      VkDescriptorSetLayoutCreateFlags set_flags,
235                                      const VkMutableDescriptorTypeCreateInfoEXT *mutable_info,
236                                      int binding)
237 {
238    enum anv_descriptor_data desc_data = 0;
239 
240    if (!mutable_info || mutable_info->mutableDescriptorTypeListCount <= binding) {
241       for(VkDescriptorType i = 0; i <= VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT; i++) {
242          if (i == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC ||
243              i == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||
244              i == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK)
245             continue;
246 
247          desc_data |= anv_descriptor_data_for_type(device, layout_type, set_flags, i);
248       }
249 
250       desc_data |= anv_descriptor_data_for_type(
251          device, layout_type, set_flags, VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR);
252 
253       return desc_data;
254    }
255 
256    const VkMutableDescriptorTypeListEXT *type_list =
257       &mutable_info->pMutableDescriptorTypeLists[binding];
258    for (uint32_t i = 0; i < type_list->descriptorTypeCount; i++) {
259       desc_data |=
260          anv_descriptor_data_for_type(device, layout_type, set_flags,
261                                       type_list->pDescriptorTypes[i]);
262    }
263 
264    return desc_data;
265 }
266 
267 static void
anv_descriptor_data_size(enum anv_descriptor_data data,enum anv_descriptor_set_layout_type layout_type,uint16_t * out_surface_size,uint16_t * out_sampler_size)268 anv_descriptor_data_size(enum anv_descriptor_data data,
269                          enum anv_descriptor_set_layout_type layout_type,
270                          uint16_t *out_surface_size,
271                          uint16_t *out_sampler_size)
272 {
273    unsigned surface_size = 0;
274    unsigned sampler_size = 0;
275 
276    if (data & ANV_DESCRIPTOR_INDIRECT_SAMPLED_IMAGE)
277       surface_size += sizeof(struct anv_sampled_image_descriptor);
278 
279    if (data & ANV_DESCRIPTOR_INDIRECT_STORAGE_IMAGE)
280       surface_size += sizeof(struct anv_storage_image_descriptor);
281 
282    if (data & ANV_DESCRIPTOR_INDIRECT_ADDRESS_RANGE)
283       surface_size += sizeof(struct anv_address_range_descriptor);
284 
285    if (data & ANV_DESCRIPTOR_SURFACE)
286       surface_size += ANV_SURFACE_STATE_SIZE;
287 
288    /* Direct descriptors have sampler states stored separately */
289    if (layout_type == ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_DIRECT) {
290       if (data & ANV_DESCRIPTOR_SAMPLER)
291          sampler_size += ANV_SAMPLER_STATE_SIZE;
292 
293       if (data & ANV_DESCRIPTOR_SURFACE_SAMPLER) {
294          surface_size += ANV_SURFACE_STATE_SIZE;
295          sampler_size += ANV_SAMPLER_STATE_SIZE;
296       }
297    } else {
298       if (data & ANV_DESCRIPTOR_SAMPLER)
299          surface_size += ANV_SAMPLER_STATE_SIZE;
300 
301       if (data & ANV_DESCRIPTOR_SURFACE_SAMPLER) {
302          surface_size += ALIGN(ANV_SURFACE_STATE_SIZE + ANV_SAMPLER_STATE_SIZE,
303                                ANV_SURFACE_STATE_SIZE);
304       }
305    }
306 
307    *out_surface_size = surface_size;
308    *out_sampler_size = sampler_size;
309 }
310 
311 static bool
anv_needs_descriptor_buffer(VkDescriptorType desc_type,enum anv_descriptor_set_layout_type layout_type,enum anv_descriptor_data desc_data)312 anv_needs_descriptor_buffer(VkDescriptorType desc_type,
313                             enum anv_descriptor_set_layout_type layout_type,
314                             enum anv_descriptor_data desc_data)
315 {
316    if (desc_type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK)
317       return true;
318 
319    uint16_t surface_size, sampler_size;
320    anv_descriptor_data_size(desc_data, layout_type,
321                             &surface_size, &sampler_size);
322    return surface_size > 0 || sampler_size > 0;
323 }
324 
325 /** Returns the size in bytes of each descriptor with the given layout */
326 static void
anv_descriptor_size(const struct anv_descriptor_set_binding_layout * layout,enum anv_descriptor_set_layout_type layout_type,uint16_t * out_surface_stride,uint16_t * out_sampler_stride)327 anv_descriptor_size(const struct anv_descriptor_set_binding_layout *layout,
328                     enum anv_descriptor_set_layout_type layout_type,
329                     uint16_t *out_surface_stride,
330                     uint16_t *out_sampler_stride)
331 {
332    if (layout->data & ANV_DESCRIPTOR_INLINE_UNIFORM) {
333       assert(layout->data == ANV_DESCRIPTOR_INLINE_UNIFORM);
334       assert(layout->array_size <= UINT16_MAX);
335       *out_surface_stride = layout->array_size;
336       *out_sampler_stride = 0;
337       return;
338    }
339 
340    anv_descriptor_data_size(layout->data, layout_type,
341                             out_surface_stride,
342                             out_sampler_stride);
343 }
344 
345 /** Returns size in bytes of the biggest descriptor in the given layout */
346 static void
anv_descriptor_size_for_mutable_type(const struct anv_physical_device * device,enum anv_descriptor_set_layout_type layout_type,VkDescriptorSetLayoutCreateFlags set_flags,const VkMutableDescriptorTypeCreateInfoEXT * mutable_info,int binding,uint16_t * out_surface_stride,uint16_t * out_sampler_stride)347 anv_descriptor_size_for_mutable_type(const struct anv_physical_device *device,
348                                      enum anv_descriptor_set_layout_type layout_type,
349                                      VkDescriptorSetLayoutCreateFlags set_flags,
350                                      const VkMutableDescriptorTypeCreateInfoEXT *mutable_info,
351                                      int binding,
352                                      uint16_t *out_surface_stride,
353                                      uint16_t *out_sampler_stride)
354 {
355    *out_surface_stride = 0;
356    *out_sampler_stride = 0;
357 
358    if (!mutable_info ||
359        mutable_info->mutableDescriptorTypeListCount <= binding ||
360        binding >= mutable_info->mutableDescriptorTypeListCount) {
361       for(VkDescriptorType i = 0; i <= VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT; i++) {
362 
363          if (i == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC ||
364              i == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||
365              i == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK)
366             continue;
367 
368          enum anv_descriptor_data desc_data =
369             anv_descriptor_data_for_type(device, layout_type, set_flags, i);
370          uint16_t surface_stride, sampler_stride;
371          anv_descriptor_data_size(desc_data, layout_type,
372                                   &surface_stride, &sampler_stride);
373 
374          *out_surface_stride = MAX2(*out_surface_stride, surface_stride);
375          *out_sampler_stride = MAX2(*out_sampler_stride, sampler_stride);
376       }
377 
378       enum anv_descriptor_data desc_data =
379          anv_descriptor_data_for_type(device, layout_type, set_flags,
380                                       VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR);
381       uint16_t surface_stride, sampler_stride;
382       anv_descriptor_data_size(desc_data, layout_type,
383                                &surface_stride, &sampler_stride);
384 
385       *out_surface_stride = MAX2(*out_surface_stride, surface_stride);
386       *out_sampler_stride = MAX2(*out_sampler_stride, sampler_stride);
387 
388       return;
389    }
390 
391    const VkMutableDescriptorTypeListEXT *type_list =
392       &mutable_info->pMutableDescriptorTypeLists[binding];
393    for (uint32_t i = 0; i < type_list->descriptorTypeCount; i++) {
394       enum anv_descriptor_data desc_data =
395          anv_descriptor_data_for_type(device, layout_type, set_flags,
396                                       type_list->pDescriptorTypes[i]);
397 
398       uint16_t surface_stride, sampler_stride;
399       anv_descriptor_data_size(desc_data, layout_type,
400                                &surface_stride, &sampler_stride);
401 
402       *out_surface_stride = MAX2(*out_surface_stride, surface_stride);
403       *out_sampler_stride = MAX2(*out_sampler_stride, sampler_stride);
404    }
405 }
406 
407 static bool
anv_descriptor_data_supports_bindless(const struct anv_physical_device * pdevice,VkDescriptorSetLayoutCreateFlags set_flags,enum anv_descriptor_data data)408 anv_descriptor_data_supports_bindless(const struct anv_physical_device *pdevice,
409                                       VkDescriptorSetLayoutCreateFlags set_flags,
410                                       enum anv_descriptor_data data)
411 {
412    if (set_flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_DESCRIPTOR_BUFFER_BIT_EXT) {
413       /* When using descriptor buffers, on platforms that don't have extended
414        * bindless offset, all push descriptors have to go through the binding
415        * tables.
416        */
417       if (!pdevice->uses_ex_bso &&
418           (set_flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR)) {
419          return data & (ANV_DESCRIPTOR_INDIRECT_ADDRESS_RANGE |
420                         ANV_DESCRIPTOR_INDIRECT_SAMPLED_IMAGE |
421                         ANV_DESCRIPTOR_INDIRECT_STORAGE_IMAGE);
422       }
423 
424       /* Otherwise we can do bindless for everything */
425       return true;
426    } else {
427       if (pdevice->indirect_descriptors) {
428          return data & (ANV_DESCRIPTOR_INDIRECT_ADDRESS_RANGE |
429                         ANV_DESCRIPTOR_INDIRECT_SAMPLED_IMAGE |
430                         ANV_DESCRIPTOR_INDIRECT_STORAGE_IMAGE);
431       }
432 
433       /* Direct descriptor support bindless for everything */
434       return true;
435    }
436 }
437 
438 bool
anv_descriptor_supports_bindless(const struct anv_physical_device * pdevice,const struct anv_descriptor_set_layout * set,const struct anv_descriptor_set_binding_layout * binding)439 anv_descriptor_supports_bindless(const struct anv_physical_device *pdevice,
440                                  const struct anv_descriptor_set_layout *set,
441                                  const struct anv_descriptor_set_binding_layout *binding)
442 {
443    return anv_descriptor_data_supports_bindless(pdevice, set->flags, binding->data);
444 }
445 
446 bool
anv_descriptor_requires_bindless(const struct anv_physical_device * pdevice,const struct anv_descriptor_set_layout * set,const struct anv_descriptor_set_binding_layout * binding)447 anv_descriptor_requires_bindless(const struct anv_physical_device *pdevice,
448                                  const struct anv_descriptor_set_layout *set,
449                                  const struct anv_descriptor_set_binding_layout *binding)
450 {
451    if (pdevice->always_use_bindless)
452       return anv_descriptor_supports_bindless(pdevice, set, binding);
453 
454    if (set->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR)
455       return false;
456 
457    if (set->flags & (VK_DESCRIPTOR_SET_LAYOUT_CREATE_DESCRIPTOR_BUFFER_BIT_EXT |
458                      VK_DESCRIPTOR_SET_LAYOUT_CREATE_EMBEDDED_IMMUTABLE_SAMPLERS_BIT_EXT))
459       return true;
460 
461    static const VkDescriptorBindingFlagBits flags_requiring_bindless =
462       VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT |
463       VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT |
464       VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT;
465 
466    return (binding->flags & flags_requiring_bindless) != 0;
467 }
468 
469 static enum anv_descriptor_set_layout_type
anv_descriptor_set_layout_type_for_flags(const struct anv_physical_device * device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo)470 anv_descriptor_set_layout_type_for_flags(const struct anv_physical_device *device,
471                                          const VkDescriptorSetLayoutCreateInfo *pCreateInfo)
472 {
473    if (pCreateInfo->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_DESCRIPTOR_BUFFER_BIT_EXT)
474       return ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_BUFFER;
475    else if (device->indirect_descriptors)
476       return ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_INDIRECT;
477    else
478       return ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_DIRECT;
479 }
480 
481 static bool
mutable_list_includes_type(const VkMutableDescriptorTypeCreateInfoEXT * mutable_info,uint32_t binding,VkDescriptorType type)482 mutable_list_includes_type(const VkMutableDescriptorTypeCreateInfoEXT *mutable_info,
483                            uint32_t binding, VkDescriptorType type)
484 {
485    if (!mutable_info || mutable_info->mutableDescriptorTypeListCount == 0)
486       return true;
487 
488    const VkMutableDescriptorTypeListEXT *type_list =
489       &mutable_info->pMutableDescriptorTypeLists[binding];
490    for (uint32_t i = 0; i < type_list->descriptorTypeCount; i++) {
491       if (type_list->pDescriptorTypes[i] == type)
492          return true;
493    }
494 
495    return false;
496 }
497 
anv_GetDescriptorSetLayoutSupport(VkDevice _device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,VkDescriptorSetLayoutSupport * pSupport)498 void anv_GetDescriptorSetLayoutSupport(
499     VkDevice                                    _device,
500     const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
501     VkDescriptorSetLayoutSupport*               pSupport)
502 {
503    ANV_FROM_HANDLE(anv_device, device, _device);
504    const struct anv_physical_device *pdevice = device->physical;
505 
506    uint32_t surface_count[MESA_VULKAN_SHADER_STAGES] = { 0, };
507    VkDescriptorType varying_desc_type = VK_DESCRIPTOR_TYPE_MAX_ENUM;
508    bool needs_descriptor_buffer = false;
509 
510    const VkDescriptorSetLayoutBindingFlagsCreateInfo *binding_flags_info =
511       vk_find_struct_const(pCreateInfo->pNext,
512                            DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO);
513    const VkMutableDescriptorTypeCreateInfoEXT *mutable_info =
514       vk_find_struct_const(pCreateInfo->pNext,
515                            MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT);
516 
517    enum anv_descriptor_set_layout_type layout_type =
518       anv_descriptor_set_layout_type_for_flags(pdevice, pCreateInfo);
519 
520    for (uint32_t b = 0; b < pCreateInfo->bindingCount; b++) {
521       const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[b];
522 
523       VkDescriptorBindingFlags flags = 0;
524       if (binding_flags_info && binding_flags_info->bindingCount > 0) {
525          assert(binding_flags_info->bindingCount == pCreateInfo->bindingCount);
526          flags = binding_flags_info->pBindingFlags[b];
527       }
528 
529       /* Combined image/sampler descriptor are not supported with descriptor
530        * buffers & mutable descriptor types because we cannot know from the
531        * shader where to find the sampler structure. It can be written to the
532        * beginning of the descriptor (at offset 0) or in the second part (at
533        * offset 64bytes).
534        */
535       if ((pCreateInfo->flags &
536            VK_DESCRIPTOR_SET_LAYOUT_CREATE_DESCRIPTOR_BUFFER_BIT_EXT) &&
537           binding->descriptorType == VK_DESCRIPTOR_TYPE_MUTABLE_EXT &&
538           mutable_list_includes_type(mutable_info, b,
539                                      VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)) {
540          pSupport->supported = false;
541          return;
542       }
543 
544       enum anv_descriptor_data desc_data =
545          binding->descriptorType == VK_DESCRIPTOR_TYPE_MUTABLE_EXT ?
546          anv_descriptor_data_for_mutable_type(pdevice, layout_type,
547                                               pCreateInfo->flags,
548                                               mutable_info, b) :
549          anv_descriptor_data_for_type(pdevice, layout_type,
550                                       pCreateInfo->flags,
551                                       binding->descriptorType);
552 
553       if (anv_needs_descriptor_buffer(binding->descriptorType,
554                                       layout_type, desc_data))
555          needs_descriptor_buffer = true;
556 
557       if (flags & VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT)
558          varying_desc_type = binding->descriptorType;
559 
560       switch (binding->descriptorType) {
561       case VK_DESCRIPTOR_TYPE_SAMPLER:
562          /* There is no real limit on samplers */
563          break;
564 
565       case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
566          /* Inline uniforms don't use a binding */
567          break;
568 
569       case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
570          if (anv_descriptor_data_supports_bindless(pdevice,
571                                                    pCreateInfo->flags,
572                                                    desc_data))
573             break;
574 
575          if (binding->pImmutableSamplers) {
576             for (uint32_t i = 0; i < binding->descriptorCount; i++) {
577                ANV_FROM_HANDLE(anv_sampler, sampler,
578                                binding->pImmutableSamplers[i]);
579                anv_foreach_stage(s, binding->stageFlags)
580                   surface_count[s] += sampler->n_planes;
581             }
582          } else {
583             anv_foreach_stage(s, binding->stageFlags)
584                surface_count[s] += binding->descriptorCount;
585          }
586          break;
587 
588       default:
589          if (anv_descriptor_data_supports_bindless(pdevice,
590                                                    pCreateInfo->flags,
591                                                    desc_data))
592             break;
593 
594          anv_foreach_stage(s, binding->stageFlags)
595             surface_count[s] += binding->descriptorCount;
596          break;
597       }
598    }
599 
600    for (unsigned s = 0; s < ARRAY_SIZE(surface_count); s++) {
601       if (needs_descriptor_buffer)
602          surface_count[s] += 1;
603    }
604 
605    VkDescriptorSetVariableDescriptorCountLayoutSupport *vdcls =
606       vk_find_struct(pSupport->pNext,
607                      DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT);
608    if (vdcls != NULL) {
609       if (varying_desc_type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
610          vdcls->maxVariableDescriptorCount = MAX_INLINE_UNIFORM_BLOCK_SIZE;
611       } else if (varying_desc_type != VK_DESCRIPTOR_TYPE_MAX_ENUM) {
612          vdcls->maxVariableDescriptorCount = UINT16_MAX;
613       } else {
614          vdcls->maxVariableDescriptorCount = 0;
615       }
616    }
617 
618    bool supported = true;
619    for (unsigned s = 0; s < ARRAY_SIZE(surface_count); s++) {
620       /* Our maximum binding table size is 240 and we need to reserve 8 for
621        * render targets.
622        */
623       if (surface_count[s] > MAX_BINDING_TABLE_SIZE - MAX_RTS)
624          supported = false;
625    }
626 
627    pSupport->supported = supported;
628 }
629 
anv_CreateDescriptorSetLayout(VkDevice _device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorSetLayout * pSetLayout)630 VkResult anv_CreateDescriptorSetLayout(
631     VkDevice                                    _device,
632     const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
633     const VkAllocationCallbacks*                pAllocator,
634     VkDescriptorSetLayout*                      pSetLayout)
635 {
636    ANV_FROM_HANDLE(anv_device, device, _device);
637 
638    assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO);
639 
640    uint32_t num_bindings = 0;
641    uint32_t immutable_sampler_count = 0;
642    for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
643       num_bindings = MAX2(num_bindings, pCreateInfo->pBindings[j].binding + 1);
644 
645       /* From the Vulkan 1.1.97 spec for VkDescriptorSetLayoutBinding:
646        *
647        *    "If descriptorType specifies a VK_DESCRIPTOR_TYPE_SAMPLER or
648        *    VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER type descriptor, then
649        *    pImmutableSamplers can be used to initialize a set of immutable
650        *    samplers. [...]  If descriptorType is not one of these descriptor
651        *    types, then pImmutableSamplers is ignored.
652        *
653        * We need to be careful here and only parse pImmutableSamplers if we
654        * have one of the right descriptor types.
655        */
656       VkDescriptorType desc_type = pCreateInfo->pBindings[j].descriptorType;
657       if ((desc_type == VK_DESCRIPTOR_TYPE_SAMPLER ||
658            desc_type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) &&
659           pCreateInfo->pBindings[j].pImmutableSamplers)
660          immutable_sampler_count += pCreateInfo->pBindings[j].descriptorCount;
661    }
662 
663    /* We need to allocate descriptor set layouts off the device allocator
664     * with DEVICE scope because they are reference counted and may not be
665     * destroyed when vkDestroyDescriptorSetLayout is called.
666     */
667    VK_MULTIALLOC(ma);
668    VK_MULTIALLOC_DECL(&ma, struct anv_descriptor_set_layout, set_layout, 1);
669    VK_MULTIALLOC_DECL(&ma, struct anv_descriptor_set_binding_layout,
670                            bindings, num_bindings);
671    VK_MULTIALLOC_DECL(&ma, struct anv_sampler *, samplers,
672                            immutable_sampler_count);
673 
674    if (!vk_object_multizalloc(&device->vk, &ma, NULL,
675                               VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT))
676       return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
677 
678    set_layout->ref_cnt = 1;
679    set_layout->binding_count = num_bindings;
680    set_layout->flags = pCreateInfo->flags;
681    set_layout->type = anv_descriptor_set_layout_type_for_flags(device->physical,
682                                                                pCreateInfo);
683 
684    for (uint32_t b = 0; b < num_bindings; b++) {
685       /* Initialize all binding_layout entries to -1 */
686       memset(&set_layout->binding[b], -1, sizeof(set_layout->binding[b]));
687 
688       set_layout->binding[b].flags = 0;
689       set_layout->binding[b].data = 0;
690       set_layout->binding[b].max_plane_count = 0;
691       set_layout->binding[b].array_size = 0;
692       set_layout->binding[b].immutable_samplers = NULL;
693    }
694 
695    /* Initialize all samplers to 0 */
696    memset(samplers, 0, immutable_sampler_count * sizeof(*samplers));
697 
698    uint32_t buffer_view_count = 0;
699    uint32_t dynamic_offset_count = 0;
700    uint32_t descriptor_buffer_surface_size = 0;
701    uint32_t descriptor_buffer_sampler_size = 0;
702    uint32_t sampler_count = 0;
703 
704    for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
705       const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[j];
706       uint32_t b = binding->binding;
707       /* We temporarily store pCreateInfo->pBindings[] index (plus one) in the
708        * immutable_samplers pointer.  This provides us with a quick-and-dirty
709        * way to sort the bindings by binding number.
710        */
711       set_layout->binding[b].immutable_samplers = (void *)(uintptr_t)(j + 1);
712    }
713 
714    const VkDescriptorSetLayoutBindingFlagsCreateInfo *binding_flags_info =
715       vk_find_struct_const(pCreateInfo->pNext,
716                            DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO);
717 
718    const VkMutableDescriptorTypeCreateInfoEXT *mutable_info =
719       vk_find_struct_const(pCreateInfo->pNext,
720                            MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT);
721 
722    for (uint32_t b = 0; b < num_bindings; b++) {
723       /* We stashed the pCreateInfo->pBindings[] index (plus one) in the
724        * immutable_samplers pointer.  Check for NULL (empty binding) and then
725        * reset it and compute the index.
726        */
727       if (set_layout->binding[b].immutable_samplers == NULL)
728          continue;
729       const uint32_t info_idx =
730          (uintptr_t)(void *)set_layout->binding[b].immutable_samplers - 1;
731       set_layout->binding[b].immutable_samplers = NULL;
732 
733       const VkDescriptorSetLayoutBinding *binding =
734          &pCreateInfo->pBindings[info_idx];
735 
736       if (binding->descriptorCount == 0)
737          continue;
738 
739       set_layout->binding[b].type = binding->descriptorType;
740 
741       if (binding_flags_info && binding_flags_info->bindingCount > 0) {
742          assert(binding_flags_info->bindingCount == pCreateInfo->bindingCount);
743          set_layout->binding[b].flags =
744             binding_flags_info->pBindingFlags[info_idx];
745 
746          /* From the Vulkan spec:
747           *
748           *    "If VkDescriptorSetLayoutCreateInfo::flags includes
749           *    VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR, then
750           *    all elements of pBindingFlags must not include
751           *    VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT,
752           *    VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT, or
753           *    VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT"
754           */
755          if (pCreateInfo->flags &
756              VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR) {
757             assert(!(set_layout->binding[b].flags &
758                (VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT |
759                 VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT |
760                 VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT)));
761          }
762       }
763 
764       set_layout->binding[b].data =
765          binding->descriptorType == VK_DESCRIPTOR_TYPE_MUTABLE_EXT ?
766          anv_descriptor_data_for_mutable_type(device->physical,
767                                               set_layout->type,
768                                               pCreateInfo->flags,
769                                               mutable_info, b) :
770          anv_descriptor_data_for_type(device->physical,
771                                       set_layout->type,
772                                       pCreateInfo->flags,
773                                       binding->descriptorType);
774 
775       set_layout->binding[b].array_size = binding->descriptorCount;
776       set_layout->binding[b].descriptor_index = set_layout->descriptor_count;
777       set_layout->descriptor_count += binding->descriptorCount;
778 
779       if (set_layout->binding[b].data & ANV_DESCRIPTOR_BUFFER_VIEW) {
780          set_layout->binding[b].buffer_view_index = buffer_view_count;
781          buffer_view_count += binding->descriptorCount;
782       }
783 
784       set_layout->binding[b].max_plane_count = 1;
785       switch (binding->descriptorType) {
786       case VK_DESCRIPTOR_TYPE_SAMPLER:
787       case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
788       case VK_DESCRIPTOR_TYPE_MUTABLE_EXT:
789          if (binding->pImmutableSamplers) {
790             set_layout->binding[b].immutable_samplers = samplers;
791             samplers += binding->descriptorCount;
792 
793             for (uint32_t i = 0; i < binding->descriptorCount; i++) {
794                ANV_FROM_HANDLE(anv_sampler, sampler,
795                                binding->pImmutableSamplers[i]);
796 
797                set_layout->binding[b].immutable_samplers[i] = sampler;
798                if (set_layout->binding[b].max_plane_count < sampler->n_planes)
799                   set_layout->binding[b].max_plane_count = sampler->n_planes;
800             }
801          }
802          break;
803 
804       default:
805          break;
806       }
807 
808       switch (binding->descriptorType) {
809       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
810       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
811          set_layout->binding[b].dynamic_offset_index = dynamic_offset_count;
812          set_layout->dynamic_offset_stages[dynamic_offset_count] = binding->stageFlags;
813          dynamic_offset_count += binding->descriptorCount;
814          assert(dynamic_offset_count < MAX_DYNAMIC_BUFFERS);
815          break;
816 
817       default:
818          break;
819       }
820 
821       if (binding->descriptorType == VK_DESCRIPTOR_TYPE_MUTABLE_EXT) {
822          anv_descriptor_size_for_mutable_type(
823             device->physical, set_layout->type,
824             pCreateInfo->flags, mutable_info, b,
825             &set_layout->binding[b].descriptor_data_surface_size,
826             &set_layout->binding[b].descriptor_data_sampler_size);
827       } else {
828          anv_descriptor_size(&set_layout->binding[b],
829                              set_layout->type,
830                              &set_layout->binding[b].descriptor_data_surface_size,
831                              &set_layout->binding[b].descriptor_data_sampler_size);
832       }
833 
834       /* For multi-planar bindings, we make every descriptor consume the maximum
835        * number of planes so we don't have to bother with walking arrays and
836        * adding things up every time.  Fortunately, YCbCr samplers aren't all
837        * that common and likely won't be in the middle of big arrays.
838        */
839       set_layout->binding[b].descriptor_surface_stride =
840          set_layout->binding[b].max_plane_count *
841          set_layout->binding[b].descriptor_data_surface_size;
842       set_layout->binding[b].descriptor_sampler_stride =
843          set_layout->binding[b].max_plane_count *
844          set_layout->binding[b].descriptor_data_sampler_size;
845 
846       if (binding->descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER) {
847          sampler_count += binding->descriptorCount *
848                           set_layout->binding[b].max_plane_count;
849       }
850 
851       unsigned surface_align, sampler_align;
852       anv_descriptor_data_alignment(set_layout->binding[b].data,
853                                     set_layout->type,
854                                     &surface_align,
855                                     &sampler_align);
856       descriptor_buffer_surface_size =
857          align(descriptor_buffer_surface_size, surface_align);
858       descriptor_buffer_sampler_size =
859          align(descriptor_buffer_sampler_size, sampler_align);
860 
861       if (binding->descriptorType == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
862          set_layout->binding[b].descriptor_surface_offset = descriptor_buffer_surface_size;
863          descriptor_buffer_surface_size += binding->descriptorCount;
864       } else {
865          set_layout->binding[b].descriptor_surface_offset = descriptor_buffer_surface_size;
866          descriptor_buffer_surface_size +=
867             set_layout->binding[b].descriptor_surface_stride * binding->descriptorCount;
868       }
869 
870       set_layout->binding[b].descriptor_sampler_offset = descriptor_buffer_sampler_size;
871       descriptor_buffer_sampler_size +=
872          set_layout->binding[b].descriptor_sampler_stride * binding->descriptorCount;
873 
874       set_layout->shader_stages |= binding->stageFlags;
875    }
876 
877    /* Sanity checks */
878    assert(descriptor_buffer_sampler_size == 0 ||
879           set_layout->type == ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_DIRECT);
880 
881    set_layout->buffer_view_count = buffer_view_count;
882    set_layout->dynamic_offset_count = dynamic_offset_count;
883    set_layout->descriptor_buffer_surface_size = descriptor_buffer_surface_size;
884    set_layout->descriptor_buffer_sampler_size = descriptor_buffer_sampler_size;
885 
886    if (pCreateInfo->flags &
887        VK_DESCRIPTOR_SET_LAYOUT_CREATE_EMBEDDED_IMMUTABLE_SAMPLERS_BIT_EXT) {
888       assert(set_layout->descriptor_buffer_surface_size == 0);
889       assert(set_layout->descriptor_buffer_sampler_size == 0);
890       set_layout->embedded_sampler_count = sampler_count;
891    }
892 
893    *pSetLayout = anv_descriptor_set_layout_to_handle(set_layout);
894 
895    return VK_SUCCESS;
896 }
897 
898 void
anv_descriptor_set_layout_destroy(struct anv_device * device,struct anv_descriptor_set_layout * layout)899 anv_descriptor_set_layout_destroy(struct anv_device *device,
900                                   struct anv_descriptor_set_layout *layout)
901 {
902    assert(layout->ref_cnt == 0);
903    vk_object_free(&device->vk, NULL, layout);
904 }
905 
906 static const struct anv_descriptor_set_binding_layout *
set_layout_dynamic_binding(const struct anv_descriptor_set_layout * set_layout)907 set_layout_dynamic_binding(const struct anv_descriptor_set_layout *set_layout)
908 {
909    if (set_layout->binding_count == 0)
910       return NULL;
911 
912    const struct anv_descriptor_set_binding_layout *last_binding =
913       &set_layout->binding[set_layout->binding_count - 1];
914    if (!(last_binding->flags & VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT))
915       return NULL;
916 
917    return last_binding;
918 }
919 
920 static uint32_t
set_layout_descriptor_count(const struct anv_descriptor_set_layout * set_layout,uint32_t var_desc_count)921 set_layout_descriptor_count(const struct anv_descriptor_set_layout *set_layout,
922                             uint32_t var_desc_count)
923 {
924    const struct anv_descriptor_set_binding_layout *dynamic_binding =
925       set_layout_dynamic_binding(set_layout);
926    if (dynamic_binding == NULL)
927       return set_layout->descriptor_count;
928 
929    assert(var_desc_count <= dynamic_binding->array_size);
930    uint32_t shrink = dynamic_binding->array_size - var_desc_count;
931 
932    if (dynamic_binding->type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK)
933       return set_layout->descriptor_count;
934 
935    return set_layout->descriptor_count - shrink;
936 }
937 
938 static uint32_t
set_layout_buffer_view_count(const struct anv_descriptor_set_layout * set_layout,uint32_t var_desc_count)939 set_layout_buffer_view_count(const struct anv_descriptor_set_layout *set_layout,
940                              uint32_t var_desc_count)
941 {
942    const struct anv_descriptor_set_binding_layout *dynamic_binding =
943       set_layout_dynamic_binding(set_layout);
944    if (dynamic_binding == NULL)
945       return set_layout->buffer_view_count;
946 
947    assert(var_desc_count <= dynamic_binding->array_size);
948    uint32_t shrink = dynamic_binding->array_size - var_desc_count;
949 
950    if (!(dynamic_binding->data & ANV_DESCRIPTOR_BUFFER_VIEW))
951       return set_layout->buffer_view_count;
952 
953    return set_layout->buffer_view_count - shrink;
954 }
955 
956 static bool
anv_descriptor_set_layout_empty(const struct anv_descriptor_set_layout * set_layout)957 anv_descriptor_set_layout_empty(const struct anv_descriptor_set_layout *set_layout)
958 {
959    return set_layout->binding_count == 0;
960 }
961 
962 static void
anv_descriptor_set_layout_descriptor_buffer_size(const struct anv_descriptor_set_layout * set_layout,uint32_t var_desc_count,uint32_t * out_surface_size,uint32_t * out_sampler_size)963 anv_descriptor_set_layout_descriptor_buffer_size(const struct anv_descriptor_set_layout *set_layout,
964                                                  uint32_t var_desc_count,
965                                                  uint32_t *out_surface_size,
966                                                  uint32_t *out_sampler_size)
967 {
968    const struct anv_descriptor_set_binding_layout *dynamic_binding =
969       set_layout_dynamic_binding(set_layout);
970    if (dynamic_binding == NULL) {
971       *out_surface_size = ALIGN(set_layout->descriptor_buffer_surface_size,
972                                 ANV_UBO_ALIGNMENT);
973       *out_sampler_size = set_layout->descriptor_buffer_sampler_size;
974       return;
975    }
976 
977    assert(var_desc_count <= dynamic_binding->array_size);
978    uint32_t shrink = dynamic_binding->array_size - var_desc_count;
979    uint32_t set_surface_size, set_sampler_size;
980 
981    if (dynamic_binding->type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
982       /* Inline uniform blocks are specified to use the descriptor array
983        * size as the size in bytes of the block.
984        */
985       set_surface_size = set_layout->descriptor_buffer_surface_size - shrink;
986       set_sampler_size = 0;
987    } else {
988       set_surface_size =
989          set_layout->descriptor_buffer_surface_size > 0 ?
990          (set_layout->descriptor_buffer_surface_size -
991           shrink * dynamic_binding->descriptor_surface_stride) : 0;
992       set_sampler_size =
993          set_layout->descriptor_buffer_sampler_size > 0 ?
994          (set_layout->descriptor_buffer_sampler_size -
995           shrink * dynamic_binding->descriptor_sampler_stride) : 0;
996    }
997 
998    *out_surface_size = ALIGN(set_surface_size, ANV_UBO_ALIGNMENT);
999    *out_sampler_size = set_sampler_size;
1000 }
1001 
anv_DestroyDescriptorSetLayout(VkDevice _device,VkDescriptorSetLayout _set_layout,const VkAllocationCallbacks * pAllocator)1002 void anv_DestroyDescriptorSetLayout(
1003     VkDevice                                    _device,
1004     VkDescriptorSetLayout                       _set_layout,
1005     const VkAllocationCallbacks*                pAllocator)
1006 {
1007    ANV_FROM_HANDLE(anv_device, device, _device);
1008    ANV_FROM_HANDLE(anv_descriptor_set_layout, set_layout, _set_layout);
1009 
1010    if (!set_layout)
1011       return;
1012 
1013    anv_descriptor_set_layout_unref(device, set_layout);
1014 }
1015 
1016 void
anv_descriptor_set_layout_print(const struct anv_descriptor_set_layout * layout)1017 anv_descriptor_set_layout_print(const struct anv_descriptor_set_layout *layout)
1018 {
1019    fprintf(stderr, "set layout:\n");
1020    for (uint32_t b = 0; b < layout->binding_count; b++) {
1021       fprintf(stderr, "  binding%03u: offsets=0x%08x/0x%08x sizes=%04u/%04u strides=%03u/%03u planes=%hhu count=%03u\n",
1022               b,
1023               layout->binding[b].descriptor_surface_offset,
1024               layout->binding[b].descriptor_sampler_offset,
1025               layout->binding[b].descriptor_data_surface_size,
1026               layout->binding[b].descriptor_data_sampler_size,
1027               layout->binding[b].descriptor_surface_stride,
1028               layout->binding[b].descriptor_sampler_stride,
1029               layout->binding[b].max_plane_count,
1030               layout->binding[b].array_size);
1031    }
1032 }
1033 
1034 #define SHA1_UPDATE_VALUE(ctx, x) _mesa_sha1_update(ctx, &(x), sizeof(x));
1035 
1036 static void
sha1_update_immutable_sampler(struct mesa_sha1 * ctx,bool embedded_sampler,const struct anv_sampler * sampler)1037 sha1_update_immutable_sampler(struct mesa_sha1 *ctx,
1038                               bool embedded_sampler,
1039                               const struct anv_sampler *sampler)
1040 {
1041    if (!sampler->vk.ycbcr_conversion)
1042       return;
1043 
1044    /* Hash the conversion if any as this affect placement of descriptors in
1045     * the set due to the number of planes.
1046     */
1047    SHA1_UPDATE_VALUE(ctx, sampler->vk.ycbcr_conversion->state);
1048 
1049    /* For embedded samplers, we need to hash the sampler parameters as the
1050     * sampler handle is baked into the shader and this ultimately is part of
1051     * the shader hash key. We can only consider 2 shaders identical if all
1052     * their embedded samplers parameters are identical.
1053     */
1054    if (embedded_sampler)
1055       SHA1_UPDATE_VALUE(ctx, sampler->sha1);
1056 }
1057 
1058 static void
sha1_update_descriptor_set_binding_layout(struct mesa_sha1 * ctx,bool embedded_samplers,const struct anv_descriptor_set_binding_layout * layout)1059 sha1_update_descriptor_set_binding_layout(struct mesa_sha1 *ctx,
1060                                           bool embedded_samplers,
1061                                           const struct anv_descriptor_set_binding_layout *layout)
1062 {
1063    SHA1_UPDATE_VALUE(ctx, layout->flags);
1064    SHA1_UPDATE_VALUE(ctx, layout->data);
1065    SHA1_UPDATE_VALUE(ctx, layout->max_plane_count);
1066    SHA1_UPDATE_VALUE(ctx, layout->array_size);
1067    SHA1_UPDATE_VALUE(ctx, layout->descriptor_index);
1068    SHA1_UPDATE_VALUE(ctx, layout->dynamic_offset_index);
1069    SHA1_UPDATE_VALUE(ctx, layout->buffer_view_index);
1070    SHA1_UPDATE_VALUE(ctx, layout->descriptor_surface_offset);
1071    SHA1_UPDATE_VALUE(ctx, layout->descriptor_sampler_offset);
1072 
1073    if (layout->immutable_samplers) {
1074       for (uint16_t i = 0; i < layout->array_size; i++) {
1075          sha1_update_immutable_sampler(ctx, embedded_samplers,
1076                                        layout->immutable_samplers[i]);
1077       }
1078    }
1079 }
1080 
1081 static void
sha1_update_descriptor_set_layout(struct mesa_sha1 * ctx,const struct anv_descriptor_set_layout * layout)1082 sha1_update_descriptor_set_layout(struct mesa_sha1 *ctx,
1083                                   const struct anv_descriptor_set_layout *layout)
1084 {
1085    SHA1_UPDATE_VALUE(ctx, layout->flags);
1086    SHA1_UPDATE_VALUE(ctx, layout->binding_count);
1087    SHA1_UPDATE_VALUE(ctx, layout->descriptor_count);
1088    SHA1_UPDATE_VALUE(ctx, layout->shader_stages);
1089    SHA1_UPDATE_VALUE(ctx, layout->buffer_view_count);
1090    SHA1_UPDATE_VALUE(ctx, layout->dynamic_offset_count);
1091    SHA1_UPDATE_VALUE(ctx, layout->descriptor_buffer_surface_size);
1092    SHA1_UPDATE_VALUE(ctx, layout->descriptor_buffer_sampler_size);
1093 
1094    bool embedded_samplers =
1095       layout->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_EMBEDDED_IMMUTABLE_SAMPLERS_BIT_EXT;
1096 
1097    for (uint16_t i = 0; i < layout->binding_count; i++) {
1098       sha1_update_descriptor_set_binding_layout(ctx, embedded_samplers,
1099                                                 &layout->binding[i]);
1100    }
1101 }
1102 
1103 /*
1104  * Pipeline layouts.  These have nothing to do with the pipeline.  They are
1105  * just multiple descriptor set layouts pasted together
1106  */
1107 
1108 void
anv_pipeline_sets_layout_init(struct anv_pipeline_sets_layout * layout,struct anv_device * device,bool independent_sets)1109 anv_pipeline_sets_layout_init(struct anv_pipeline_sets_layout *layout,
1110                               struct anv_device *device,
1111                               bool independent_sets)
1112 {
1113    memset(layout, 0, sizeof(*layout));
1114 
1115    layout->device = device;
1116    layout->push_descriptor_set_index = -1;
1117    layout->independent_sets = independent_sets;
1118 }
1119 
1120 void
anv_pipeline_sets_layout_add(struct anv_pipeline_sets_layout * layout,uint32_t set_idx,struct anv_descriptor_set_layout * set_layout)1121 anv_pipeline_sets_layout_add(struct anv_pipeline_sets_layout *layout,
1122                              uint32_t set_idx,
1123                              struct anv_descriptor_set_layout *set_layout)
1124 {
1125    if (layout->set[set_idx].layout)
1126       return;
1127 
1128    /* Workaround CTS : Internal CTS issue 3584 */
1129    if (layout->independent_sets && anv_descriptor_set_layout_empty(set_layout))
1130       return;
1131 
1132    if (layout->type == ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_UNKNOWN)
1133       layout->type = set_layout->type;
1134    else
1135       assert(layout->type == set_layout->type);
1136 
1137    layout->num_sets = MAX2(set_idx + 1, layout->num_sets);
1138 
1139    layout->set[set_idx].layout =
1140       anv_descriptor_set_layout_ref(set_layout);
1141 
1142    layout->set[set_idx].dynamic_offset_start = layout->num_dynamic_buffers;
1143    layout->num_dynamic_buffers += set_layout->dynamic_offset_count;
1144 
1145    assert(layout->num_dynamic_buffers < MAX_DYNAMIC_BUFFERS);
1146 
1147    if (set_layout->flags &
1148        VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR) {
1149       assert(layout->push_descriptor_set_index == -1);
1150       layout->push_descriptor_set_index = set_idx;
1151    }
1152 }
1153 
1154 uint32_t
anv_pipeline_sets_layout_embedded_sampler_count(const struct anv_pipeline_sets_layout * layout)1155 anv_pipeline_sets_layout_embedded_sampler_count(const struct anv_pipeline_sets_layout *layout)
1156 {
1157    uint32_t count = 0;
1158    for (unsigned s = 0; s < layout->num_sets; s++) {
1159       if (!layout->set[s].layout)
1160          continue;
1161       count += layout->set[s].layout->embedded_sampler_count;
1162    }
1163    return count;
1164 }
1165 
1166 void
anv_pipeline_sets_layout_hash(struct anv_pipeline_sets_layout * layout)1167 anv_pipeline_sets_layout_hash(struct anv_pipeline_sets_layout *layout)
1168 {
1169    struct mesa_sha1 ctx;
1170    _mesa_sha1_init(&ctx);
1171    for (unsigned s = 0; s < layout->num_sets; s++) {
1172       if (!layout->set[s].layout)
1173          continue;
1174       sha1_update_descriptor_set_layout(&ctx, layout->set[s].layout);
1175       _mesa_sha1_update(&ctx, &layout->set[s].dynamic_offset_start,
1176                         sizeof(layout->set[s].dynamic_offset_start));
1177    }
1178    _mesa_sha1_update(&ctx, &layout->num_sets, sizeof(layout->num_sets));
1179    _mesa_sha1_final(&ctx, layout->sha1);
1180 }
1181 
1182 void
anv_pipeline_sets_layout_fini(struct anv_pipeline_sets_layout * layout)1183 anv_pipeline_sets_layout_fini(struct anv_pipeline_sets_layout *layout)
1184 {
1185    for (unsigned s = 0; s < layout->num_sets; s++) {
1186       if (!layout->set[s].layout)
1187          continue;
1188 
1189       anv_descriptor_set_layout_unref(layout->device, layout->set[s].layout);
1190    }
1191 }
1192 
1193 void
anv_pipeline_sets_layout_print(const struct anv_pipeline_sets_layout * layout)1194 anv_pipeline_sets_layout_print(const struct anv_pipeline_sets_layout *layout)
1195 {
1196    fprintf(stderr, "layout: dyn_count=%u sets=%u ind=%u\n",
1197            layout->num_dynamic_buffers,
1198            layout->num_sets,
1199            layout->independent_sets);
1200    for (unsigned s = 0; s < layout->num_sets; s++) {
1201       if (!layout->set[s].layout)
1202          continue;
1203 
1204       fprintf(stderr, "   set%i: dyn_start=%u flags=0x%x\n",
1205               s, layout->set[s].dynamic_offset_start, layout->set[s].layout->flags);
1206    }
1207 }
1208 
anv_CreatePipelineLayout(VkDevice _device,const VkPipelineLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPipelineLayout * pPipelineLayout)1209 VkResult anv_CreatePipelineLayout(
1210     VkDevice                                    _device,
1211     const VkPipelineLayoutCreateInfo*           pCreateInfo,
1212     const VkAllocationCallbacks*                pAllocator,
1213     VkPipelineLayout*                           pPipelineLayout)
1214 {
1215    ANV_FROM_HANDLE(anv_device, device, _device);
1216    struct anv_pipeline_layout *layout;
1217 
1218    assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO);
1219 
1220    layout = vk_object_zalloc(&device->vk, pAllocator, sizeof(*layout),
1221                              VK_OBJECT_TYPE_PIPELINE_LAYOUT);
1222    if (layout == NULL)
1223       return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
1224 
1225    anv_pipeline_sets_layout_init(&layout->sets_layout, device,
1226                                  pCreateInfo->flags & VK_PIPELINE_LAYOUT_CREATE_INDEPENDENT_SETS_BIT_EXT);
1227 
1228    for (uint32_t set = 0; set < pCreateInfo->setLayoutCount; set++) {
1229       ANV_FROM_HANDLE(anv_descriptor_set_layout, set_layout,
1230                       pCreateInfo->pSetLayouts[set]);
1231 
1232       /* VUID-VkPipelineLayoutCreateInfo-graphicsPipelineLibrary-06753
1233        *
1234        *    "If graphicsPipelineLibrary is not enabled, elements of
1235        *     pSetLayouts must be valid VkDescriptorSetLayout objects"
1236        *
1237        * As a result of supporting graphicsPipelineLibrary, we need to allow
1238        * null descriptor set layouts.
1239        */
1240       if (set_layout == NULL)
1241          continue;
1242 
1243       anv_pipeline_sets_layout_add(&layout->sets_layout, set, set_layout);
1244    }
1245 
1246    anv_pipeline_sets_layout_hash(&layout->sets_layout);
1247 
1248    *pPipelineLayout = anv_pipeline_layout_to_handle(layout);
1249 
1250    return VK_SUCCESS;
1251 }
1252 
anv_DestroyPipelineLayout(VkDevice _device,VkPipelineLayout _pipelineLayout,const VkAllocationCallbacks * pAllocator)1253 void anv_DestroyPipelineLayout(
1254     VkDevice                                    _device,
1255     VkPipelineLayout                            _pipelineLayout,
1256     const VkAllocationCallbacks*                pAllocator)
1257 {
1258    ANV_FROM_HANDLE(anv_device, device, _device);
1259    ANV_FROM_HANDLE(anv_pipeline_layout, layout, _pipelineLayout);
1260 
1261    if (!layout)
1262       return;
1263 
1264    anv_pipeline_sets_layout_fini(&layout->sets_layout);
1265 
1266    vk_object_free(&device->vk, pAllocator, layout);
1267 }
1268 
1269 /*
1270  * Descriptor pools.
1271  *
1272  * These are implemented using a big pool of memory and a vma heap for the
1273  * host memory allocations and a state_stream and a free list for the buffer
1274  * view surface state. The spec allows us to fail to allocate due to
1275  * fragmentation in all cases but two: 1) after pool reset, allocating up
1276  * until the pool size with no freeing must succeed and 2) allocating and
1277  * freeing only descriptor sets with the same layout. Case 1) is easy enough,
1278  * and the vma heap ensures case 2).
1279  */
1280 
1281 /* The vma heap reserves 0 to mean NULL; we have to offset by some amount to
1282  * ensure we can allocate the entire BO without hitting zero.  The actual
1283  * amount doesn't matter.
1284  */
1285 #define POOL_HEAP_OFFSET 64
1286 
1287 #define EMPTY 1
1288 
1289 static VkResult
anv_descriptor_pool_heap_init(struct anv_device * device,struct anv_descriptor_pool_heap * heap,uint32_t size,bool host_only,bool samplers)1290 anv_descriptor_pool_heap_init(struct anv_device *device,
1291                               struct anv_descriptor_pool_heap *heap,
1292                               uint32_t size,
1293                               bool host_only,
1294                               bool samplers)
1295 {
1296    if (size == 0)
1297       return VK_SUCCESS;
1298 
1299    if (host_only) {
1300       heap->size = size;
1301       heap->host_mem = vk_zalloc(&device->vk.alloc, size, 8,
1302                                  VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
1303       if (heap->host_mem == NULL)
1304          return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
1305    } else {
1306       const char *bo_name =
1307          device->physical->indirect_descriptors ? "indirect descriptors" :
1308          samplers ? "direct sampler" : "direct surfaces";
1309 
1310       heap->size = align(size, 4096);
1311 
1312       VkResult result = anv_device_alloc_bo(device,
1313                                             bo_name, heap->size,
1314                                             ANV_BO_ALLOC_CAPTURE |
1315                                             ANV_BO_ALLOC_MAPPED |
1316                                             ANV_BO_ALLOC_HOST_CACHED_COHERENT |
1317                                             (samplers ?
1318                                              ANV_BO_ALLOC_DYNAMIC_VISIBLE_POOL :
1319                                              ANV_BO_ALLOC_DESCRIPTOR_POOL),
1320                                             0 /* explicit_address */,
1321                                             &heap->bo);
1322       if (result != VK_SUCCESS)
1323          return vk_error(device, VK_ERROR_OUT_OF_DEVICE_MEMORY);
1324    }
1325 
1326    util_vma_heap_init(&heap->heap, POOL_HEAP_OFFSET, heap->size);
1327 
1328    return VK_SUCCESS;
1329 }
1330 
1331 static void
anv_descriptor_pool_heap_fini(struct anv_device * device,struct anv_descriptor_pool_heap * heap)1332 anv_descriptor_pool_heap_fini(struct anv_device *device,
1333                               struct anv_descriptor_pool_heap *heap)
1334 {
1335    if (heap->size == 0)
1336       return;
1337 
1338    util_vma_heap_finish(&heap->heap);
1339 
1340    if (heap->bo)
1341       anv_device_release_bo(device, heap->bo);
1342 
1343    if (heap->host_mem)
1344       vk_free(&device->vk.alloc, heap->host_mem);
1345 }
1346 
1347 static void
anv_descriptor_pool_heap_reset(struct anv_device * device,struct anv_descriptor_pool_heap * heap)1348 anv_descriptor_pool_heap_reset(struct anv_device *device,
1349                                struct anv_descriptor_pool_heap *heap)
1350 {
1351    if (heap->size == 0)
1352       return;
1353 
1354    util_vma_heap_finish(&heap->heap);
1355    util_vma_heap_init(&heap->heap, POOL_HEAP_OFFSET, heap->size);
1356 }
1357 
1358 static VkResult
anv_descriptor_pool_heap_alloc(struct anv_descriptor_pool * pool,struct anv_descriptor_pool_heap * heap,uint32_t size,uint32_t alignment,struct anv_state * state)1359 anv_descriptor_pool_heap_alloc(struct anv_descriptor_pool *pool,
1360                                struct anv_descriptor_pool_heap *heap,
1361                                uint32_t size, uint32_t alignment,
1362                                struct anv_state *state)
1363 {
1364    uint64_t pool_vma_offset =
1365       util_vma_heap_alloc(&heap->heap, size, alignment);
1366    if (pool_vma_offset == 0)
1367       return vk_error(pool, VK_ERROR_FRAGMENTED_POOL);
1368 
1369    assert(pool_vma_offset >= POOL_HEAP_OFFSET &&
1370           pool_vma_offset - POOL_HEAP_OFFSET <= INT32_MAX);
1371 
1372    state->offset = pool_vma_offset - POOL_HEAP_OFFSET;
1373    state->alloc_size = size;
1374    if (heap->host_mem)
1375       state->map = heap->host_mem + state->offset;
1376    else
1377       state->map = heap->bo->map + state->offset;
1378 
1379    return VK_SUCCESS;
1380 }
1381 
1382 static void
anv_descriptor_pool_heap_free(struct anv_descriptor_pool_heap * heap,struct anv_state state)1383 anv_descriptor_pool_heap_free(struct anv_descriptor_pool_heap *heap,
1384                               struct anv_state state)
1385 {
1386    util_vma_heap_free(&heap->heap,
1387                       (uint64_t)state.offset + POOL_HEAP_OFFSET,
1388                       state.alloc_size);
1389 }
1390 
anv_CreateDescriptorPool(VkDevice _device,const VkDescriptorPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorPool * pDescriptorPool)1391 VkResult anv_CreateDescriptorPool(
1392     VkDevice                                    _device,
1393     const VkDescriptorPoolCreateInfo*           pCreateInfo,
1394     const VkAllocationCallbacks*                pAllocator,
1395     VkDescriptorPool*                           pDescriptorPool)
1396 {
1397    ANV_FROM_HANDLE(anv_device, device, _device);
1398    struct anv_descriptor_pool *pool;
1399 
1400    const VkDescriptorPoolInlineUniformBlockCreateInfo *inline_info =
1401       vk_find_struct_const(pCreateInfo->pNext,
1402                            DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO);
1403    const VkMutableDescriptorTypeCreateInfoEXT *mutable_info =
1404       vk_find_struct_const(pCreateInfo->pNext,
1405                            MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT);
1406 
1407    uint32_t descriptor_count = 0;
1408    uint32_t buffer_view_count = 0;
1409    uint32_t descriptor_bo_surface_size = 0;
1410    uint32_t descriptor_bo_sampler_size = 0;
1411 
1412    const enum anv_descriptor_set_layout_type layout_type =
1413       device->physical->indirect_descriptors ?
1414       ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_INDIRECT :
1415       ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_DIRECT;
1416 
1417    for (uint32_t i = 0; i < pCreateInfo->poolSizeCount; i++) {
1418       enum anv_descriptor_data desc_data =
1419          pCreateInfo->pPoolSizes[i].type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT ?
1420          anv_descriptor_data_for_mutable_type(device->physical, layout_type,
1421                                               pCreateInfo->flags,
1422                                               mutable_info, i) :
1423          anv_descriptor_data_for_type(device->physical, layout_type,
1424                                       pCreateInfo->flags,
1425                                       pCreateInfo->pPoolSizes[i].type);
1426 
1427       if (desc_data & ANV_DESCRIPTOR_BUFFER_VIEW)
1428          buffer_view_count += pCreateInfo->pPoolSizes[i].descriptorCount;
1429 
1430       uint16_t desc_surface_size, desc_sampler_size;
1431       if (pCreateInfo->pPoolSizes[i].type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT) {
1432          anv_descriptor_size_for_mutable_type(device->physical, layout_type,
1433                                               pCreateInfo->flags, mutable_info, i,
1434                                               &desc_surface_size, &desc_sampler_size);
1435       } else {
1436          anv_descriptor_data_size(desc_data, layout_type,
1437                                   &desc_surface_size, &desc_sampler_size);
1438       }
1439 
1440       uint32_t desc_data_surface_size =
1441          desc_surface_size * pCreateInfo->pPoolSizes[i].descriptorCount;
1442       uint32_t desc_data_sampler_size =
1443          desc_sampler_size * pCreateInfo->pPoolSizes[i].descriptorCount;
1444 
1445       /* Combined image sampler descriptors can take up to 3 slots if they
1446        * hold a YCbCr image.
1447        */
1448       if (pCreateInfo->pPoolSizes[i].type ==
1449           VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) {
1450          desc_data_surface_size *= 3;
1451          desc_data_sampler_size *= 3;
1452       }
1453 
1454       if (pCreateInfo->pPoolSizes[i].type ==
1455           VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
1456          /* Inline uniform blocks are specified to use the descriptor array
1457           * size as the size in bytes of the block.
1458           */
1459          assert(inline_info);
1460          desc_data_surface_size += pCreateInfo->pPoolSizes[i].descriptorCount;
1461       }
1462 
1463       descriptor_bo_surface_size += desc_data_surface_size;
1464       descriptor_bo_sampler_size += desc_data_sampler_size;
1465 
1466       descriptor_count += pCreateInfo->pPoolSizes[i].descriptorCount;
1467    }
1468    /* We have to align descriptor buffer allocations to 32B so that we can
1469     * push descriptor buffers.  This means that each descriptor buffer
1470     * allocated may burn up to 32B of extra space to get the right alignment.
1471     * (Technically, it's at most 28B because we're always going to start at
1472     * least 4B aligned but we're being conservative here.)  Allocate enough
1473     * extra space that we can chop it into maxSets pieces and align each one
1474     * of them to 32B.
1475     */
1476    descriptor_bo_surface_size += ANV_UBO_ALIGNMENT * pCreateInfo->maxSets;
1477    /* We align inline uniform blocks to ANV_UBO_ALIGNMENT */
1478    if (inline_info) {
1479       descriptor_bo_surface_size +=
1480          ANV_UBO_ALIGNMENT * inline_info->maxInlineUniformBlockBindings;
1481    }
1482 
1483    const bool host_only =
1484       pCreateInfo->flags & VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_EXT;
1485 
1486    /* For host_only pools, allocate some memory to hold the written surface
1487     * states of the internal anv_buffer_view. With normal pools, the memory
1488     * holding surface state is allocated from the device surface_state_pool.
1489     */
1490    const size_t host_mem_size =
1491       pCreateInfo->maxSets * sizeof(struct anv_descriptor_set) +
1492       descriptor_count * sizeof(struct anv_descriptor) +
1493       buffer_view_count * sizeof(struct anv_buffer_view) +
1494       (host_only ? buffer_view_count * ANV_SURFACE_STATE_SIZE : 0);
1495 
1496    pool = vk_object_zalloc(&device->vk, pAllocator,
1497                            sizeof(*pool) + host_mem_size,
1498                            VK_OBJECT_TYPE_DESCRIPTOR_POOL);
1499    if (!pool)
1500       return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
1501 
1502    pool->host_mem_size = host_mem_size;
1503    util_vma_heap_init(&pool->host_heap, POOL_HEAP_OFFSET, host_mem_size);
1504 
1505    pool->host_only = host_only;
1506 
1507    VkResult result = anv_descriptor_pool_heap_init(device,
1508                                                    &pool->surfaces,
1509                                                    descriptor_bo_surface_size,
1510                                                    pool->host_only,
1511                                                    false /* samplers */);
1512    if (result != VK_SUCCESS) {
1513       vk_object_free(&device->vk, pAllocator, pool);
1514       return result;
1515    }
1516 
1517    result = anv_descriptor_pool_heap_init(device,
1518                                           &pool->samplers,
1519                                           descriptor_bo_sampler_size,
1520                                           pool->host_only,
1521                                           true /* samplers */);
1522    if (result != VK_SUCCESS) {
1523       anv_descriptor_pool_heap_fini(device, &pool->surfaces);
1524       vk_object_free(&device->vk, pAllocator, pool);
1525       return result;
1526    }
1527 
1528    /* All the surface states allocated by the descriptor pool are internal. We
1529     * have to allocate them to handle the fact that we do not have surface
1530     * states for VkBuffers.
1531     */
1532    anv_state_stream_init(&pool->surface_state_stream,
1533                          &device->internal_surface_state_pool, 4096);
1534    pool->surface_state_free_list = NULL;
1535 
1536    list_inithead(&pool->desc_sets);
1537 
1538    ANV_RMV(descriptor_pool_create, device, pCreateInfo, pool, false);
1539 
1540    *pDescriptorPool = anv_descriptor_pool_to_handle(pool);
1541 
1542    return VK_SUCCESS;
1543 }
1544 
anv_DestroyDescriptorPool(VkDevice _device,VkDescriptorPool _pool,const VkAllocationCallbacks * pAllocator)1545 void anv_DestroyDescriptorPool(
1546     VkDevice                                    _device,
1547     VkDescriptorPool                            _pool,
1548     const VkAllocationCallbacks*                pAllocator)
1549 {
1550    ANV_FROM_HANDLE(anv_device, device, _device);
1551    ANV_FROM_HANDLE(anv_descriptor_pool, pool, _pool);
1552 
1553    if (!pool)
1554       return;
1555 
1556    ANV_RMV(resource_destroy, device, pool);
1557 
1558    list_for_each_entry_safe(struct anv_descriptor_set, set,
1559                             &pool->desc_sets, pool_link) {
1560       anv_descriptor_set_layout_unref(device, set->layout);
1561    }
1562 
1563    util_vma_heap_finish(&pool->host_heap);
1564 
1565    anv_state_stream_finish(&pool->surface_state_stream);
1566 
1567    anv_descriptor_pool_heap_fini(device, &pool->surfaces);
1568    anv_descriptor_pool_heap_fini(device, &pool->samplers);
1569 
1570    vk_object_free(&device->vk, pAllocator, pool);
1571 }
1572 
anv_ResetDescriptorPool(VkDevice _device,VkDescriptorPool descriptorPool,VkDescriptorPoolResetFlags flags)1573 VkResult anv_ResetDescriptorPool(
1574     VkDevice                                    _device,
1575     VkDescriptorPool                            descriptorPool,
1576     VkDescriptorPoolResetFlags                  flags)
1577 {
1578    ANV_FROM_HANDLE(anv_device, device, _device);
1579    ANV_FROM_HANDLE(anv_descriptor_pool, pool, descriptorPool);
1580 
1581    list_for_each_entry_safe(struct anv_descriptor_set, set,
1582                             &pool->desc_sets, pool_link) {
1583       anv_descriptor_set_layout_unref(device, set->layout);
1584    }
1585    list_inithead(&pool->desc_sets);
1586 
1587    util_vma_heap_finish(&pool->host_heap);
1588    util_vma_heap_init(&pool->host_heap, POOL_HEAP_OFFSET, pool->host_mem_size);
1589 
1590    anv_descriptor_pool_heap_reset(device, &pool->surfaces);
1591    anv_descriptor_pool_heap_reset(device, &pool->samplers);
1592 
1593    anv_state_stream_finish(&pool->surface_state_stream);
1594    anv_state_stream_init(&pool->surface_state_stream,
1595                          &device->internal_surface_state_pool, 4096);
1596    pool->surface_state_free_list = NULL;
1597 
1598    return VK_SUCCESS;
1599 }
1600 
1601 static VkResult
anv_descriptor_pool_alloc_set(struct anv_descriptor_pool * pool,uint32_t size,struct anv_descriptor_set ** set)1602 anv_descriptor_pool_alloc_set(struct anv_descriptor_pool *pool,
1603                               uint32_t size,
1604                               struct anv_descriptor_set **set)
1605 {
1606    uint64_t vma_offset = util_vma_heap_alloc(&pool->host_heap, size, 1);
1607 
1608    if (vma_offset == 0) {
1609       if (size <= pool->host_heap.free_size) {
1610          return VK_ERROR_FRAGMENTED_POOL;
1611       } else {
1612          return VK_ERROR_OUT_OF_POOL_MEMORY;
1613       }
1614    }
1615 
1616    assert(vma_offset >= POOL_HEAP_OFFSET);
1617    uint64_t host_mem_offset = vma_offset - POOL_HEAP_OFFSET;
1618 
1619    *set = (struct anv_descriptor_set *) (pool->host_mem + host_mem_offset);
1620    (*set)->size = size;
1621 
1622    return VK_SUCCESS;
1623 }
1624 
1625 static void
anv_descriptor_pool_free_set(struct anv_descriptor_pool * pool,struct anv_descriptor_set * set)1626 anv_descriptor_pool_free_set(struct anv_descriptor_pool *pool,
1627                              struct anv_descriptor_set *set)
1628 {
1629    util_vma_heap_free(&pool->host_heap,
1630                       ((char *) set - pool->host_mem) + POOL_HEAP_OFFSET,
1631                       set->size);
1632 }
1633 
1634 struct surface_state_free_list_entry {
1635    void *next;
1636    struct anv_state state;
1637 };
1638 
1639 static struct anv_state
anv_descriptor_pool_alloc_state(struct anv_descriptor_pool * pool)1640 anv_descriptor_pool_alloc_state(struct anv_descriptor_pool *pool)
1641 {
1642    assert(!pool->host_only);
1643 
1644    struct surface_state_free_list_entry *entry =
1645       pool->surface_state_free_list;
1646 
1647    if (entry) {
1648       struct anv_state state = entry->state;
1649       pool->surface_state_free_list = entry->next;
1650       assert(state.alloc_size == ANV_SURFACE_STATE_SIZE);
1651       return state;
1652    } else {
1653       struct anv_state state =
1654          anv_state_stream_alloc(&pool->surface_state_stream,
1655                                 ANV_SURFACE_STATE_SIZE, 64);
1656       return state;
1657    }
1658 }
1659 
1660 static void
anv_descriptor_pool_free_state(struct anv_descriptor_pool * pool,struct anv_state state)1661 anv_descriptor_pool_free_state(struct anv_descriptor_pool *pool,
1662                                struct anv_state state)
1663 {
1664    assert(state.alloc_size);
1665    /* Put the buffer view surface state back on the free list. */
1666    struct surface_state_free_list_entry *entry = state.map;
1667    entry->next = pool->surface_state_free_list;
1668    entry->state = state;
1669    pool->surface_state_free_list = entry;
1670 }
1671 
1672 static size_t
anv_descriptor_set_layout_size(const struct anv_descriptor_set_layout * layout,bool host_only,uint32_t var_desc_count)1673 anv_descriptor_set_layout_size(const struct anv_descriptor_set_layout *layout,
1674                                bool host_only, uint32_t var_desc_count)
1675 {
1676    const uint32_t descriptor_count =
1677       set_layout_descriptor_count(layout, var_desc_count);
1678    const uint32_t buffer_view_count =
1679       set_layout_buffer_view_count(layout, var_desc_count);
1680 
1681    return sizeof(struct anv_descriptor_set) +
1682           descriptor_count * sizeof(struct anv_descriptor) +
1683           buffer_view_count * sizeof(struct anv_buffer_view) +
1684           (host_only ? buffer_view_count * ANV_SURFACE_STATE_SIZE : 0);
1685 }
1686 
1687 static VkResult
anv_descriptor_set_create(struct anv_device * device,struct anv_descriptor_pool * pool,struct anv_descriptor_set_layout * layout,uint32_t var_desc_count,struct anv_descriptor_set ** out_set)1688 anv_descriptor_set_create(struct anv_device *device,
1689                           struct anv_descriptor_pool *pool,
1690                           struct anv_descriptor_set_layout *layout,
1691                           uint32_t var_desc_count,
1692                           struct anv_descriptor_set **out_set)
1693 {
1694    struct anv_descriptor_set *set;
1695    const size_t size = anv_descriptor_set_layout_size(layout,
1696                                                       pool->host_only,
1697                                                       var_desc_count);
1698 
1699    VkResult result = anv_descriptor_pool_alloc_set(pool, size, &set);
1700    if (result != VK_SUCCESS)
1701       return result;
1702 
1703    uint32_t descriptor_buffer_surface_size, descriptor_buffer_sampler_size;
1704    anv_descriptor_set_layout_descriptor_buffer_size(layout, var_desc_count,
1705                                                     &descriptor_buffer_surface_size,
1706                                                     &descriptor_buffer_sampler_size);
1707 
1708    set->desc_surface_state = ANV_STATE_NULL;
1709    set->is_push = false;
1710 
1711    if (descriptor_buffer_surface_size) {
1712       result = anv_descriptor_pool_heap_alloc(pool, &pool->surfaces,
1713                                               descriptor_buffer_surface_size,
1714                                               ANV_UBO_ALIGNMENT,
1715                                               &set->desc_surface_mem);
1716       if (result != VK_SUCCESS) {
1717          anv_descriptor_pool_free_set(pool, set);
1718          return result;
1719       }
1720 
1721       set->desc_surface_addr = (struct anv_address) {
1722          .bo = pool->surfaces.bo,
1723          .offset = set->desc_surface_mem.offset,
1724       };
1725       set->desc_offset = anv_address_physical(set->desc_surface_addr) -
1726                          device->physical->va.internal_surface_state_pool.addr;
1727 
1728       enum isl_format format =
1729          anv_isl_format_for_descriptor_type(device,
1730                                             VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER);
1731 
1732       if (!pool->host_only) {
1733          set->desc_surface_state = anv_descriptor_pool_alloc_state(pool);
1734          if (set->desc_surface_state.map == NULL) {
1735             anv_descriptor_pool_free_set(pool, set);
1736             return vk_error(pool, VK_ERROR_OUT_OF_DEVICE_MEMORY);
1737          }
1738 
1739          anv_fill_buffer_surface_state(device, set->desc_surface_state.map,
1740                                        format, ISL_SWIZZLE_IDENTITY,
1741                                        ISL_SURF_USAGE_CONSTANT_BUFFER_BIT,
1742                                        set->desc_surface_addr,
1743                                        descriptor_buffer_surface_size, 1);
1744       }
1745    } else {
1746       set->desc_surface_mem = ANV_STATE_NULL;
1747       set->desc_surface_addr = ANV_NULL_ADDRESS;
1748    }
1749 
1750    if (descriptor_buffer_sampler_size) {
1751       result = anv_descriptor_pool_heap_alloc(pool, &pool->samplers,
1752                                               descriptor_buffer_sampler_size,
1753                                               ANV_SAMPLER_STATE_SIZE,
1754                                               &set->desc_sampler_mem);
1755       if (result != VK_SUCCESS) {
1756          anv_descriptor_pool_free_set(pool, set);
1757          return result;
1758       }
1759 
1760       set->desc_sampler_addr = (struct anv_address) {
1761          .bo = pool->samplers.bo,
1762          .offset = set->desc_sampler_mem.offset,
1763       };
1764    } else {
1765       set->desc_sampler_mem = ANV_STATE_NULL;
1766       set->desc_sampler_addr = ANV_NULL_ADDRESS;
1767    }
1768 
1769    vk_object_base_init(&device->vk, &set->base,
1770                        VK_OBJECT_TYPE_DESCRIPTOR_SET);
1771    set->pool = pool;
1772    set->layout = layout;
1773    anv_descriptor_set_layout_ref(layout);
1774 
1775    set->buffer_view_count =
1776       set_layout_buffer_view_count(layout, var_desc_count);
1777    set->descriptor_count =
1778       set_layout_descriptor_count(layout, var_desc_count);
1779 
1780    set->buffer_views =
1781       (struct anv_buffer_view *) &set->descriptors[set->descriptor_count];
1782 
1783    /* By defining the descriptors to be zero now, we can later verify that
1784     * a descriptor has not been populated with user data.
1785     */
1786    memset(set->descriptors, 0,
1787           sizeof(struct anv_descriptor) * set->descriptor_count);
1788 
1789    /* Go through and fill out immutable samplers if we have any */
1790    for (uint32_t b = 0; b < layout->binding_count; b++) {
1791       if (layout->binding[b].immutable_samplers) {
1792          for (uint32_t i = 0; i < layout->binding[b].array_size; i++) {
1793             /* The type will get changed to COMBINED_IMAGE_SAMPLER in
1794              * UpdateDescriptorSets if needed.  However, if the descriptor
1795              * set has an immutable sampler, UpdateDescriptorSets may never
1796              * touch it, so we need to make sure it's 100% valid now.
1797              *
1798              * We don't need to actually provide a sampler because the helper
1799              * will always write in the immutable sampler regardless of what
1800              * is in the sampler parameter.
1801              */
1802             VkDescriptorImageInfo info = { };
1803             anv_descriptor_set_write_image_view(device, set, &info,
1804                                                 VK_DESCRIPTOR_TYPE_SAMPLER,
1805                                                 b, i);
1806          }
1807       }
1808    }
1809 
1810    /* Allocate surface states for real descriptor sets if we're using indirect
1811     * descriptors. For host only sets, we just store the surface state data in
1812     * malloc memory.
1813     */
1814    if (device->physical->indirect_descriptors) {
1815       if (!pool->host_only) {
1816          for (uint32_t b = 0; b < set->buffer_view_count; b++) {
1817             set->buffer_views[b].general.state =
1818                anv_descriptor_pool_alloc_state(pool);
1819          }
1820       } else {
1821          void *host_surface_states =
1822             set->buffer_views + set->buffer_view_count;
1823          memset(host_surface_states, 0,
1824                 set->buffer_view_count * ANV_SURFACE_STATE_SIZE);
1825          for (uint32_t b = 0; b < set->buffer_view_count; b++) {
1826             set->buffer_views[b].general.state = (struct anv_state) {
1827                .alloc_size = ANV_SURFACE_STATE_SIZE,
1828                .map = host_surface_states + b * ANV_SURFACE_STATE_SIZE,
1829             };
1830          }
1831       }
1832    }
1833 
1834    list_addtail(&set->pool_link, &pool->desc_sets);
1835 
1836    *out_set = set;
1837 
1838    return VK_SUCCESS;
1839 }
1840 
1841 static void
anv_descriptor_set_destroy(struct anv_device * device,struct anv_descriptor_pool * pool,struct anv_descriptor_set * set)1842 anv_descriptor_set_destroy(struct anv_device *device,
1843                            struct anv_descriptor_pool *pool,
1844                            struct anv_descriptor_set *set)
1845 {
1846    anv_descriptor_set_layout_unref(device, set->layout);
1847 
1848    if (set->desc_surface_mem.alloc_size) {
1849       anv_descriptor_pool_heap_free(&pool->surfaces, set->desc_surface_mem);
1850       if (set->desc_surface_state.alloc_size)
1851          anv_descriptor_pool_free_state(pool, set->desc_surface_state);
1852    }
1853 
1854    if (set->desc_sampler_mem.alloc_size)
1855       anv_descriptor_pool_heap_free(&pool->samplers, set->desc_sampler_mem);
1856 
1857    if (device->physical->indirect_descriptors) {
1858       if (!pool->host_only) {
1859          for (uint32_t b = 0; b < set->buffer_view_count; b++) {
1860             if (set->buffer_views[b].general.state.alloc_size) {
1861                anv_descriptor_pool_free_state(
1862                   pool, set->buffer_views[b].general.state);
1863             }
1864          }
1865       }
1866    }
1867 
1868    list_del(&set->pool_link);
1869 
1870    vk_object_base_finish(&set->base);
1871    anv_descriptor_pool_free_set(pool, set);
1872 }
1873 
anv_AllocateDescriptorSets(VkDevice _device,const VkDescriptorSetAllocateInfo * pAllocateInfo,VkDescriptorSet * pDescriptorSets)1874 VkResult anv_AllocateDescriptorSets(
1875     VkDevice                                    _device,
1876     const VkDescriptorSetAllocateInfo*          pAllocateInfo,
1877     VkDescriptorSet*                            pDescriptorSets)
1878 {
1879    ANV_FROM_HANDLE(anv_device, device, _device);
1880    ANV_FROM_HANDLE(anv_descriptor_pool, pool, pAllocateInfo->descriptorPool);
1881 
1882    VkResult result = VK_SUCCESS;
1883    struct anv_descriptor_set *set = NULL;
1884    uint32_t i;
1885 
1886    const VkDescriptorSetVariableDescriptorCountAllocateInfo *vdcai =
1887       vk_find_struct_const(pAllocateInfo->pNext,
1888                            DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO);
1889 
1890    for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
1891       ANV_FROM_HANDLE(anv_descriptor_set_layout, layout,
1892                       pAllocateInfo->pSetLayouts[i]);
1893 
1894       uint32_t var_desc_count = 0;
1895       if (vdcai != NULL && vdcai->descriptorSetCount > 0) {
1896          assert(vdcai->descriptorSetCount == pAllocateInfo->descriptorSetCount);
1897          var_desc_count = vdcai->pDescriptorCounts[i];
1898       }
1899 
1900       result = anv_descriptor_set_create(device, pool, layout,
1901                                          var_desc_count, &set);
1902       if (result != VK_SUCCESS)
1903          break;
1904 
1905       pDescriptorSets[i] = anv_descriptor_set_to_handle(set);
1906    }
1907 
1908    if (result != VK_SUCCESS) {
1909       anv_FreeDescriptorSets(_device, pAllocateInfo->descriptorPool,
1910                              i, pDescriptorSets);
1911       /* The Vulkan 1.3.228 spec, section 14.2.3. Allocation of Descriptor Sets:
1912        *
1913        *   "If the creation of any of those descriptor sets fails, then the
1914        *    implementation must destroy all successfully created descriptor
1915        *    set objects from this command, set all entries of the
1916        *    pDescriptorSets array to VK_NULL_HANDLE and return the error."
1917        */
1918       for (i = 0; i < pAllocateInfo->descriptorSetCount; i++)
1919          pDescriptorSets[i] = VK_NULL_HANDLE;
1920 
1921    }
1922 
1923    return result;
1924 }
1925 
anv_FreeDescriptorSets(VkDevice _device,VkDescriptorPool descriptorPool,uint32_t count,const VkDescriptorSet * pDescriptorSets)1926 VkResult anv_FreeDescriptorSets(
1927     VkDevice                                    _device,
1928     VkDescriptorPool                            descriptorPool,
1929     uint32_t                                    count,
1930     const VkDescriptorSet*                      pDescriptorSets)
1931 {
1932    ANV_FROM_HANDLE(anv_device, device, _device);
1933    ANV_FROM_HANDLE(anv_descriptor_pool, pool, descriptorPool);
1934 
1935    for (uint32_t i = 0; i < count; i++) {
1936       ANV_FROM_HANDLE(anv_descriptor_set, set, pDescriptorSets[i]);
1937 
1938       if (!set)
1939          continue;
1940 
1941       anv_descriptor_set_destroy(device, pool, set);
1942    }
1943 
1944    return VK_SUCCESS;
1945 }
1946 
1947 bool
anv_push_descriptor_set_init(struct anv_cmd_buffer * cmd_buffer,struct anv_push_descriptor_set * push_set,struct anv_descriptor_set_layout * layout)1948 anv_push_descriptor_set_init(struct anv_cmd_buffer *cmd_buffer,
1949                              struct anv_push_descriptor_set *push_set,
1950                              struct anv_descriptor_set_layout *layout)
1951 {
1952    struct anv_descriptor_set *set = &push_set->set;
1953    /* Only copy the old descriptor data if needed :
1954     *    - not if there was no previous layout
1955     *    - not if the layout is different (descriptor set data becomes
1956     *      undefined)
1957     *    - not if there is only one descriptor, we know the entire data will
1958     *      be replaced
1959     *
1960     * TODO: we could optimizer further, try to keep a copy of the old data on
1961     *       the host, try to copy only the non newly written bits, ...
1962     */
1963    const bool copy_old_descriptors = set->layout != NULL &&
1964                                      set->layout == layout &&
1965                                      layout->descriptor_count > 1;
1966 
1967    if (set->layout != layout) {
1968       if (set->layout) {
1969          anv_descriptor_set_layout_unref(cmd_buffer->device, set->layout);
1970       } else {
1971          /* one-time initialization */
1972          vk_object_base_init(&cmd_buffer->device->vk, &set->base,
1973                              VK_OBJECT_TYPE_DESCRIPTOR_SET);
1974          set->is_push = true;
1975          set->buffer_views = push_set->buffer_views;
1976       }
1977 
1978       anv_descriptor_set_layout_ref(layout);
1979       set->layout = layout;
1980       set->generate_surface_states = 0;
1981    }
1982 
1983    assert(set->is_push && set->buffer_views);
1984    set->size = anv_descriptor_set_layout_size(layout, false /* host_only */, 0);
1985    set->buffer_view_count = layout->buffer_view_count;
1986    set->descriptor_count = layout->descriptor_count;
1987 
1988    if (layout->descriptor_buffer_surface_size &&
1989        (push_set->set_used_on_gpu ||
1990         set->desc_surface_mem.alloc_size < layout->descriptor_buffer_surface_size)) {
1991       struct anv_physical_device *pdevice = cmd_buffer->device->physical;
1992       struct anv_state_stream *push_stream;
1993       uint64_t push_base_address;
1994 
1995       if (layout->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_DESCRIPTOR_BUFFER_BIT_EXT) {
1996          push_stream = pdevice->uses_ex_bso ?
1997             &cmd_buffer->push_descriptor_buffer_stream :
1998             &cmd_buffer->surface_state_stream;
1999          push_base_address = pdevice->uses_ex_bso ?
2000             pdevice->va.push_descriptor_buffer_pool.addr :
2001             pdevice->va.internal_surface_state_pool.addr;
2002       } else {
2003          push_stream = pdevice->indirect_descriptors ?
2004             &cmd_buffer->indirect_push_descriptor_stream :
2005             &cmd_buffer->surface_state_stream;
2006          push_base_address = pdevice->indirect_descriptors ?
2007             pdevice->va.indirect_push_descriptor_pool.addr :
2008             pdevice->va.internal_surface_state_pool.addr;
2009       }
2010 
2011       uint32_t surface_size, sampler_size;
2012       anv_descriptor_set_layout_descriptor_buffer_size(layout, 0,
2013                                                        &surface_size,
2014                                                        &sampler_size);
2015 
2016       /* The previous buffer is either actively used by some GPU command (so
2017        * we can't modify it) or is too small.  Allocate a new one.
2018        */
2019       struct anv_state desc_surface_mem =
2020          anv_state_stream_alloc(push_stream, surface_size, ANV_UBO_ALIGNMENT);
2021       if (desc_surface_mem.map == NULL)
2022          return false;
2023 
2024       if (copy_old_descriptors) {
2025          memcpy(desc_surface_mem.map, set->desc_surface_mem.map,
2026                 MIN2(desc_surface_mem.alloc_size,
2027                      set->desc_surface_mem.alloc_size));
2028       }
2029       set->desc_surface_mem = desc_surface_mem;
2030 
2031       set->desc_surface_addr = anv_state_pool_state_address(
2032          push_stream->state_pool,
2033          set->desc_surface_mem);
2034       set->desc_offset = anv_address_physical(set->desc_surface_addr) -
2035                          push_base_address;
2036    }
2037 
2038    if (layout->descriptor_buffer_sampler_size &&
2039        (push_set->set_used_on_gpu ||
2040         set->desc_sampler_mem.alloc_size < layout->descriptor_buffer_sampler_size)) {
2041       struct anv_physical_device *pdevice = cmd_buffer->device->physical;
2042       assert(!pdevice->indirect_descriptors);
2043       struct anv_state_stream *push_stream = &cmd_buffer->dynamic_state_stream;
2044 
2045       uint32_t surface_size, sampler_size;
2046       anv_descriptor_set_layout_descriptor_buffer_size(layout, 0,
2047                                                        &surface_size,
2048                                                        &sampler_size);
2049 
2050       /* The previous buffer is either actively used by some GPU command (so
2051        * we can't modify it) or is too small.  Allocate a new one.
2052        */
2053       struct anv_state desc_sampler_mem =
2054          anv_state_stream_alloc(push_stream, sampler_size, ANV_SAMPLER_STATE_SIZE);
2055       if (desc_sampler_mem.map == NULL)
2056          return false;
2057 
2058       if (copy_old_descriptors) {
2059          memcpy(desc_sampler_mem.map, set->desc_sampler_mem.map,
2060                 MIN2(desc_sampler_mem.alloc_size,
2061                      set->desc_sampler_mem.alloc_size));
2062       }
2063       set->desc_sampler_mem = desc_sampler_mem;
2064 
2065       set->desc_sampler_addr = anv_state_pool_state_address(
2066          push_stream->state_pool,
2067          set->desc_sampler_mem);
2068    }
2069 
2070    if (push_set->set_used_on_gpu) {
2071       set->desc_surface_state = ANV_STATE_NULL;
2072       push_set->set_used_on_gpu = false;
2073    }
2074 
2075    return true;
2076 }
2077 
2078 void
anv_push_descriptor_set_finish(struct anv_push_descriptor_set * push_set)2079 anv_push_descriptor_set_finish(struct anv_push_descriptor_set *push_set)
2080 {
2081    struct anv_descriptor_set *set = &push_set->set;
2082    if (set->layout) {
2083       struct anv_device *device =
2084          container_of(set->base.device, struct anv_device, vk);
2085       anv_descriptor_set_layout_unref(device, set->layout);
2086    }
2087 }
2088 
2089 static uint32_t
anv_surface_state_to_handle(struct anv_physical_device * device,struct anv_state state)2090 anv_surface_state_to_handle(struct anv_physical_device *device,
2091                             struct anv_state state)
2092 {
2093    /* Bits 31:12 of the bindless surface offset in the extended message
2094     * descriptor is bits 25:6 of the byte-based address.
2095     */
2096    assert(state.offset >= 0);
2097    uint32_t offset = state.offset;
2098    if (device->uses_ex_bso) {
2099       assert((offset & 0x3f) == 0);
2100       return offset;
2101    } else {
2102       assert((offset & 0x3f) == 0 && offset < (1 << 26));
2103       return offset << 6;
2104    }
2105 }
2106 
2107 static const void *
anv_image_view_surface_data_for_plane_layout(struct anv_image_view * image_view,VkDescriptorType desc_type,unsigned plane,VkImageLayout layout)2108 anv_image_view_surface_data_for_plane_layout(struct anv_image_view *image_view,
2109                                              VkDescriptorType desc_type,
2110                                              unsigned plane,
2111                                              VkImageLayout layout)
2112 {
2113    if (desc_type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ||
2114        desc_type == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE ||
2115        desc_type == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT) {
2116       return layout == VK_IMAGE_LAYOUT_GENERAL ?
2117          &image_view->planes[plane].general_sampler.state_data :
2118          &image_view->planes[plane].optimal_sampler.state_data;
2119    }
2120 
2121    if (desc_type == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
2122       return &image_view->planes[plane].storage.state_data;
2123 
2124    unreachable("Invalid descriptor type");
2125 }
2126 
2127 static const uint32_t *
anv_sampler_state_for_descriptor_set(const struct anv_sampler * sampler,const struct anv_descriptor_set * set,uint32_t plane)2128 anv_sampler_state_for_descriptor_set(const struct anv_sampler *sampler,
2129                                      const struct anv_descriptor_set *set,
2130                                      uint32_t plane)
2131 {
2132    return sampler->state[plane];
2133 }
2134 
2135 void
anv_descriptor_set_write_image_view(struct anv_device * device,struct anv_descriptor_set * set,const VkDescriptorImageInfo * const info,VkDescriptorType type,uint32_t binding,uint32_t element)2136 anv_descriptor_set_write_image_view(struct anv_device *device,
2137                                     struct anv_descriptor_set *set,
2138                                     const VkDescriptorImageInfo * const info,
2139                                     VkDescriptorType type,
2140                                     uint32_t binding,
2141                                     uint32_t element)
2142 {
2143    const struct anv_descriptor_set_binding_layout *bind_layout =
2144       &set->layout->binding[binding];
2145    struct anv_descriptor *desc =
2146       &set->descriptors[bind_layout->descriptor_index + element];
2147    struct anv_image_view *image_view = NULL;
2148    struct anv_sampler *sampler = NULL;
2149 
2150    /* We get called with just VK_DESCRIPTOR_TYPE_SAMPLER as part of descriptor
2151     * set initialization to set the bindless samplers.
2152     */
2153    assert(type == bind_layout->type ||
2154           type == VK_DESCRIPTOR_TYPE_SAMPLER ||
2155           bind_layout->type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT);
2156 
2157    switch (type) {
2158    case VK_DESCRIPTOR_TYPE_SAMPLER:
2159       sampler = bind_layout->immutable_samplers ?
2160                 bind_layout->immutable_samplers[element] :
2161                 anv_sampler_from_handle(info->sampler);
2162       break;
2163 
2164    case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
2165       image_view = anv_image_view_from_handle(info->imageView);
2166       sampler = bind_layout->immutable_samplers ?
2167                 bind_layout->immutable_samplers[element] :
2168                 anv_sampler_from_handle(info->sampler);
2169       break;
2170 
2171    case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
2172    case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
2173    case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
2174       image_view = anv_image_view_from_handle(info->imageView);
2175       break;
2176 
2177    default:
2178       unreachable("invalid descriptor type");
2179    }
2180 
2181    *desc = (struct anv_descriptor) {
2182       .type = type,
2183       .layout = info->imageLayout,
2184       .image_view = image_view,
2185       .sampler = sampler,
2186    };
2187 
2188    void *desc_surface_map = set->desc_surface_mem.map +
2189       bind_layout->descriptor_surface_offset +
2190       element * bind_layout->descriptor_surface_stride;
2191 
2192    enum anv_descriptor_data data =
2193       bind_layout->type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT ?
2194       anv_descriptor_data_for_type(device->physical, set->layout->type,
2195                                    set->layout->flags, type) :
2196       bind_layout->data;
2197 
2198    if (data & ANV_DESCRIPTOR_INDIRECT_SAMPLED_IMAGE) {
2199       struct anv_sampled_image_descriptor desc_data[3];
2200       memset(desc_data, 0, sizeof(desc_data));
2201 
2202       if (image_view) {
2203          for (unsigned p = 0; p < image_view->n_planes; p++) {
2204             const struct anv_surface_state *sstate =
2205                anv_image_view_texture_surface_state(image_view, p,
2206                                                     desc->layout);
2207             desc_data[p].image =
2208                anv_surface_state_to_handle(device->physical, sstate->state);
2209          }
2210       }
2211 
2212       if (sampler) {
2213          for (unsigned p = 0; p < sampler->n_planes; p++)
2214             desc_data[p].sampler = sampler->bindless_state.offset + p * 32;
2215       }
2216 
2217       /* We may have max_plane_count < 0 if this isn't a sampled image but it
2218        * can be no more than the size of our array of handles.
2219        */
2220       assert(bind_layout->max_plane_count <= ARRAY_SIZE(desc_data));
2221       memcpy(desc_surface_map, desc_data,
2222              bind_layout->max_plane_count * sizeof(desc_data[0]));
2223    }
2224 
2225    if (data & ANV_DESCRIPTOR_INDIRECT_STORAGE_IMAGE) {
2226       if (image_view) {
2227          assert(image_view->n_planes == 1);
2228          struct anv_storage_image_descriptor desc_data = {
2229             .vanilla = anv_surface_state_to_handle(
2230                device->physical,
2231                anv_image_view_storage_surface_state(image_view)->state),
2232             .image_depth = image_view->vk.storage.z_slice_count,
2233          };
2234          memcpy(desc_surface_map, &desc_data, sizeof(desc_data));
2235       } else {
2236          memset(desc_surface_map, 0, bind_layout->descriptor_surface_stride);
2237       }
2238    }
2239 
2240    if (data & ANV_DESCRIPTOR_SAMPLER) {
2241       void *sampler_map =
2242          set->layout->type == ANV_PIPELINE_DESCRIPTOR_SET_LAYOUT_TYPE_DIRECT ?
2243          (set->desc_sampler_mem.map +
2244           bind_layout->descriptor_sampler_offset +
2245           element * bind_layout->descriptor_sampler_stride) : desc_surface_map;
2246       if (sampler) {
2247          for (unsigned p = 0; p < sampler->n_planes; p++) {
2248             memcpy(sampler_map + p * ANV_SAMPLER_STATE_SIZE,
2249                    anv_sampler_state_for_descriptor_set(sampler, set, p),
2250                    ANV_SAMPLER_STATE_SIZE);
2251          }
2252       } else {
2253          memset(sampler_map, 0, bind_layout->descriptor_sampler_stride);
2254       }
2255    }
2256 
2257    if (data & ANV_DESCRIPTOR_SURFACE) {
2258       unsigned max_plane_count = image_view ? image_view->n_planes : 1;
2259 
2260       for (unsigned p = 0; p < max_plane_count; p++) {
2261          void *plane_map = desc_surface_map + p * ANV_SURFACE_STATE_SIZE;
2262 
2263          if (image_view) {
2264             memcpy(plane_map,
2265                    anv_image_view_surface_data_for_plane_layout(image_view, type,
2266                                                                 p, desc->layout),
2267                    ANV_SURFACE_STATE_SIZE);
2268          } else {
2269             memcpy(plane_map, &device->host_null_surface_state, ANV_SURFACE_STATE_SIZE);
2270          }
2271       }
2272    }
2273 
2274    if (data & ANV_DESCRIPTOR_SURFACE_SAMPLER) {
2275       unsigned max_plane_count =
2276          MAX2(image_view ? image_view->n_planes : 1,
2277               sampler ? sampler->n_planes : 1);
2278 
2279       for (unsigned p = 0; p < max_plane_count; p++) {
2280          void *plane_map = desc_surface_map + p * 2 * ANV_SURFACE_STATE_SIZE;
2281 
2282          if (image_view) {
2283             memcpy(plane_map,
2284                    anv_image_view_surface_data_for_plane_layout(image_view, type,
2285                                                                 p, desc->layout),
2286                    ANV_SURFACE_STATE_SIZE);
2287          } else {
2288             memcpy(plane_map, &device->host_null_surface_state, ANV_SURFACE_STATE_SIZE);
2289          }
2290 
2291          if (sampler) {
2292             memcpy(plane_map + ANV_SURFACE_STATE_SIZE,
2293                    anv_sampler_state_for_descriptor_set(sampler, set, p),
2294                    ANV_SAMPLER_STATE_SIZE);
2295          } else {
2296             memset(plane_map + ANV_SURFACE_STATE_SIZE, 0,
2297                    ANV_SAMPLER_STATE_SIZE);
2298          }
2299       }
2300    }
2301 }
2302 
2303 static const void *
anv_buffer_view_surface_data(struct anv_buffer_view * buffer_view,VkDescriptorType desc_type)2304 anv_buffer_view_surface_data(struct anv_buffer_view *buffer_view,
2305                              VkDescriptorType desc_type)
2306 {
2307    if (desc_type == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER)
2308       return &buffer_view->general.state_data;
2309 
2310    if (desc_type == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER)
2311       return &buffer_view->storage.state_data;
2312 
2313    unreachable("Invalid descriptor type");
2314 }
2315 
2316 void
anv_descriptor_set_write_buffer_view(struct anv_device * device,struct anv_descriptor_set * set,VkDescriptorType type,struct anv_buffer_view * buffer_view,uint32_t binding,uint32_t element)2317 anv_descriptor_set_write_buffer_view(struct anv_device *device,
2318                                      struct anv_descriptor_set *set,
2319                                      VkDescriptorType type,
2320                                      struct anv_buffer_view *buffer_view,
2321                                      uint32_t binding,
2322                                      uint32_t element)
2323 {
2324    const struct anv_descriptor_set_binding_layout *bind_layout =
2325       &set->layout->binding[binding];
2326    struct anv_descriptor *desc =
2327       &set->descriptors[bind_layout->descriptor_index + element];
2328 
2329    assert(type == bind_layout->type ||
2330           bind_layout->type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT);
2331 
2332    *desc = (struct anv_descriptor) {
2333       .type = type,
2334       .buffer_view = buffer_view,
2335    };
2336 
2337    enum anv_descriptor_data data =
2338       bind_layout->type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT ?
2339       anv_descriptor_data_for_type(device->physical, set->layout->type,
2340                                    set->layout->flags, type) :
2341       bind_layout->data;
2342 
2343    void *desc_map = set->desc_surface_mem.map +
2344                     bind_layout->descriptor_surface_offset +
2345                     element * bind_layout->descriptor_surface_stride;
2346 
2347    if (buffer_view == NULL) {
2348       if (data & ANV_DESCRIPTOR_SURFACE)
2349          memcpy(desc_map, &device->host_null_surface_state, ANV_SURFACE_STATE_SIZE);
2350       else
2351          memset(desc_map, 0, bind_layout->descriptor_surface_stride);
2352       return;
2353    }
2354 
2355    if (data & ANV_DESCRIPTOR_INDIRECT_SAMPLED_IMAGE) {
2356       struct anv_sampled_image_descriptor desc_data = {
2357          .image = anv_surface_state_to_handle(
2358             device->physical, buffer_view->general.state),
2359       };
2360       memcpy(desc_map, &desc_data, sizeof(desc_data));
2361    }
2362 
2363    if (data & ANV_DESCRIPTOR_INDIRECT_STORAGE_IMAGE) {
2364       struct anv_storage_image_descriptor desc_data = {
2365          .vanilla = anv_surface_state_to_handle(
2366             device->physical, buffer_view->storage.state),
2367       };
2368       memcpy(desc_map, &desc_data, sizeof(desc_data));
2369    }
2370 
2371    if (data & ANV_DESCRIPTOR_SURFACE) {
2372       memcpy(desc_map,
2373              anv_buffer_view_surface_data(buffer_view, type),
2374              ANV_SURFACE_STATE_SIZE);
2375    }
2376 }
2377 
2378 void
anv_descriptor_write_surface_state(struct anv_device * device,struct anv_descriptor * desc,struct anv_state surface_state)2379 anv_descriptor_write_surface_state(struct anv_device *device,
2380                                    struct anv_descriptor *desc,
2381                                    struct anv_state surface_state)
2382 {
2383    assert(surface_state.alloc_size);
2384 
2385    struct anv_buffer_view *bview = desc->buffer_view;
2386 
2387    bview->general.state = surface_state;
2388 
2389    isl_surf_usage_flags_t usage =
2390       anv_isl_usage_for_descriptor_type(desc->type);
2391 
2392    enum isl_format format =
2393       anv_isl_format_for_descriptor_type(device, desc->type);
2394    anv_fill_buffer_surface_state(device, bview->general.state.map,
2395                                  format, ISL_SWIZZLE_IDENTITY,
2396                                  usage, bview->address, bview->vk.range, 1);
2397 }
2398 
2399 void
anv_descriptor_set_write_buffer(struct anv_device * device,struct anv_descriptor_set * set,VkDescriptorType type,struct anv_buffer * buffer,uint32_t binding,uint32_t element,VkDeviceSize offset,VkDeviceSize range)2400 anv_descriptor_set_write_buffer(struct anv_device *device,
2401                                 struct anv_descriptor_set *set,
2402                                 VkDescriptorType type,
2403                                 struct anv_buffer *buffer,
2404                                 uint32_t binding,
2405                                 uint32_t element,
2406                                 VkDeviceSize offset,
2407                                 VkDeviceSize range)
2408 {
2409    const struct anv_descriptor_set_binding_layout *bind_layout =
2410       &set->layout->binding[binding];
2411    const uint32_t descriptor_index = bind_layout->descriptor_index + element;
2412    struct anv_descriptor *desc = &set->descriptors[descriptor_index];
2413 
2414    assert(type == bind_layout->type ||
2415           bind_layout->type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT);
2416 
2417    *desc = (struct anv_descriptor) {
2418       .type = type,
2419       .offset = offset,
2420       .range = range,
2421       .buffer = buffer,
2422    };
2423 
2424    enum anv_descriptor_data data =
2425       bind_layout->type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT ?
2426       anv_descriptor_data_for_type(device->physical, set->layout->type,
2427                                    set->layout->flags, type) :
2428       bind_layout->data;
2429 
2430    void *desc_map = set->desc_surface_mem.map +
2431                     bind_layout->descriptor_surface_offset +
2432                     element * bind_layout->descriptor_surface_stride;
2433 
2434    if (buffer == NULL) {
2435       if (data & ANV_DESCRIPTOR_SURFACE)
2436          memcpy(desc_map, &device->host_null_surface_state, ANV_SURFACE_STATE_SIZE);
2437       else
2438          memset(desc_map, 0, bind_layout->descriptor_surface_stride);
2439       return;
2440    }
2441 
2442    struct anv_address bind_addr = anv_address_add(buffer->address, offset);
2443    desc->bind_range = vk_buffer_range(&buffer->vk, offset, range);
2444 
2445    /* We report a bounds checking alignment of 32B for the sake of block
2446     * messages which read an entire register worth at a time.
2447     */
2448    if (type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ||
2449        type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC)
2450       desc->bind_range = align64(desc->bind_range, ANV_UBO_ALIGNMENT);
2451 
2452    if (data & ANV_DESCRIPTOR_INDIRECT_ADDRESS_RANGE) {
2453       struct anv_address_range_descriptor desc_data = {
2454          .address = anv_address_physical(bind_addr),
2455          .range = desc->bind_range,
2456       };
2457       memcpy(desc_map, &desc_data, sizeof(desc_data));
2458    }
2459 
2460    if (data & ANV_DESCRIPTOR_SURFACE) {
2461       isl_surf_usage_flags_t usage =
2462          anv_isl_usage_for_descriptor_type(desc->type);
2463 
2464       enum isl_format format =
2465          anv_isl_format_for_descriptor_type(device, desc->type);
2466 
2467       if (bind_addr.bo && bind_addr.bo->alloc_flags & ANV_BO_ALLOC_PROTECTED)
2468          usage |= ISL_SURF_USAGE_PROTECTED_BIT;
2469       isl_buffer_fill_state(&device->isl_dev, desc_map,
2470                             .address = anv_address_physical(bind_addr),
2471                             .mocs = isl_mocs(&device->isl_dev, usage,
2472                                              bind_addr.bo && anv_bo_is_external(bind_addr.bo)),
2473                             .size_B = desc->bind_range,
2474                             .format = format,
2475                             .swizzle = ISL_SWIZZLE_IDENTITY,
2476                             .stride_B = 1);
2477    }
2478 
2479    if (type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||
2480        type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC)
2481       return;
2482 
2483    if (data & ANV_DESCRIPTOR_BUFFER_VIEW) {
2484       struct anv_buffer_view *bview =
2485          &set->buffer_views[bind_layout->buffer_view_index + element];
2486 
2487       desc->set_buffer_view = bview;
2488 
2489       bview->vk.range = desc->bind_range;
2490       bview->address = bind_addr;
2491 
2492       if (set->is_push) {
2493          set->generate_surface_states |= BITFIELD_BIT(descriptor_index);
2494          /* Reset the surface state to make sure
2495           * genX(cmd_buffer_emit_push_descriptor_surfaces) generates a new
2496           * one.
2497           */
2498          bview->general.state = ANV_STATE_NULL;
2499       } else {
2500          anv_descriptor_write_surface_state(device, desc, bview->general.state);
2501       }
2502    }
2503 }
2504 
2505 void
anv_descriptor_set_write_inline_uniform_data(struct anv_device * device,struct anv_descriptor_set * set,uint32_t binding,const void * data,size_t offset,size_t size)2506 anv_descriptor_set_write_inline_uniform_data(struct anv_device *device,
2507                                              struct anv_descriptor_set *set,
2508                                              uint32_t binding,
2509                                              const void *data,
2510                                              size_t offset,
2511                                              size_t size)
2512 {
2513    const struct anv_descriptor_set_binding_layout *bind_layout =
2514       &set->layout->binding[binding];
2515 
2516    assert(bind_layout->data & ANV_DESCRIPTOR_INLINE_UNIFORM);
2517 
2518    void *desc_map = set->desc_surface_mem.map +
2519                     bind_layout->descriptor_surface_offset;
2520 
2521    memcpy(desc_map + offset, data, size);
2522 }
2523 
2524 void
anv_descriptor_set_write_acceleration_structure(struct anv_device * device,struct anv_descriptor_set * set,struct vk_acceleration_structure * accel,uint32_t binding,uint32_t element)2525 anv_descriptor_set_write_acceleration_structure(struct anv_device *device,
2526                                                 struct anv_descriptor_set *set,
2527                                                 struct vk_acceleration_structure *accel,
2528                                                 uint32_t binding,
2529                                                 uint32_t element)
2530 {
2531    const struct anv_descriptor_set_binding_layout *bind_layout =
2532       &set->layout->binding[binding];
2533    struct anv_descriptor *desc =
2534       &set->descriptors[bind_layout->descriptor_index + element];
2535 
2536    assert(bind_layout->data & ANV_DESCRIPTOR_INDIRECT_ADDRESS_RANGE);
2537    *desc = (struct anv_descriptor) {
2538       .type = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR,
2539       .accel_struct = accel,
2540    };
2541 
2542    struct anv_address_range_descriptor desc_data = { };
2543    if (accel != NULL) {
2544       desc_data.address = vk_acceleration_structure_get_va(accel);
2545       desc_data.range = accel->size;
2546    }
2547    assert(sizeof(desc_data) <= bind_layout->descriptor_surface_stride);
2548 
2549    void *desc_map = set->desc_surface_mem.map +
2550                     bind_layout->descriptor_surface_offset +
2551                     element * bind_layout->descriptor_surface_stride;
2552    memcpy(desc_map, &desc_data, sizeof(desc_data));
2553 }
2554 
2555 void
anv_descriptor_set_write(struct anv_device * device,struct anv_descriptor_set * set_override,uint32_t write_count,const VkWriteDescriptorSet * writes)2556 anv_descriptor_set_write(struct anv_device *device,
2557                          struct anv_descriptor_set *set_override,
2558                          uint32_t write_count,
2559                          const VkWriteDescriptorSet *writes)
2560 {
2561    for (uint32_t i = 0; i < write_count; i++) {
2562       const VkWriteDescriptorSet *write = &writes[i];
2563       struct anv_descriptor_set *set = unlikely(set_override) ?
2564          set_override :
2565          anv_descriptor_set_from_handle(write->dstSet);
2566 
2567       switch (write->descriptorType) {
2568       case VK_DESCRIPTOR_TYPE_SAMPLER:
2569       case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
2570       case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
2571       case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
2572       case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
2573          for (uint32_t j = 0; j < write->descriptorCount; j++) {
2574             anv_descriptor_set_write_image_view(device, set,
2575                                                 write->pImageInfo + j,
2576                                                 write->descriptorType,
2577                                                 write->dstBinding,
2578                                                 write->dstArrayElement + j);
2579          }
2580          break;
2581 
2582       case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
2583       case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
2584          for (uint32_t j = 0; j < write->descriptorCount; j++) {
2585             ANV_FROM_HANDLE(anv_buffer_view, bview,
2586                             write->pTexelBufferView[j]);
2587 
2588             anv_descriptor_set_write_buffer_view(device, set,
2589                                                  write->descriptorType,
2590                                                  bview,
2591                                                  write->dstBinding,
2592                                                  write->dstArrayElement + j);
2593          }
2594          break;
2595 
2596       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
2597       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
2598       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
2599       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
2600          for (uint32_t j = 0; j < write->descriptorCount; j++) {
2601             ANV_FROM_HANDLE(anv_buffer, buffer, write->pBufferInfo[j].buffer);
2602 
2603             anv_descriptor_set_write_buffer(device, set,
2604                                             write->descriptorType,
2605                                             buffer,
2606                                             write->dstBinding,
2607                                             write->dstArrayElement + j,
2608                                             write->pBufferInfo[j].offset,
2609                                             write->pBufferInfo[j].range);
2610          }
2611          break;
2612 
2613       case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK: {
2614          const VkWriteDescriptorSetInlineUniformBlock *inline_write =
2615             vk_find_struct_const(write->pNext,
2616                                  WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK);
2617          assert(inline_write->dataSize == write->descriptorCount);
2618          anv_descriptor_set_write_inline_uniform_data(device, set,
2619                                                       write->dstBinding,
2620                                                       inline_write->pData,
2621                                                       write->dstArrayElement,
2622                                                       inline_write->dataSize);
2623          break;
2624       }
2625 
2626       case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR: {
2627          const VkWriteDescriptorSetAccelerationStructureKHR *accel_write =
2628             vk_find_struct_const(write, WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR);
2629          assert(accel_write->accelerationStructureCount ==
2630                 write->descriptorCount);
2631          for (uint32_t j = 0; j < write->descriptorCount; j++) {
2632             ANV_FROM_HANDLE(vk_acceleration_structure, accel,
2633                             accel_write->pAccelerationStructures[j]);
2634             anv_descriptor_set_write_acceleration_structure(device, set, accel,
2635                                                             write->dstBinding,
2636                                                             write->dstArrayElement + j);
2637          }
2638          break;
2639       }
2640 
2641       default:
2642          break;
2643       }
2644    }
2645 }
2646 
anv_UpdateDescriptorSets(VkDevice _device,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VkCopyDescriptorSet * pDescriptorCopies)2647 void anv_UpdateDescriptorSets(
2648     VkDevice                                    _device,
2649     uint32_t                                    descriptorWriteCount,
2650     const VkWriteDescriptorSet*                 pDescriptorWrites,
2651     uint32_t                                    descriptorCopyCount,
2652     const VkCopyDescriptorSet*                  pDescriptorCopies)
2653 {
2654    ANV_FROM_HANDLE(anv_device, device, _device);
2655 
2656    anv_descriptor_set_write(device, NULL, descriptorWriteCount,
2657                             pDescriptorWrites);
2658 
2659    for (uint32_t i = 0; i < descriptorCopyCount; i++) {
2660       const VkCopyDescriptorSet *copy = &pDescriptorCopies[i];
2661       ANV_FROM_HANDLE(anv_descriptor_set, src, copy->srcSet);
2662       ANV_FROM_HANDLE(anv_descriptor_set, dst, copy->dstSet);
2663 
2664       const struct anv_descriptor_set_binding_layout *src_layout =
2665          &src->layout->binding[copy->srcBinding];
2666       const struct anv_descriptor_set_binding_layout *dst_layout =
2667          &dst->layout->binding[copy->dstBinding];
2668 
2669       if (src_layout->type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
2670          anv_descriptor_set_write_inline_uniform_data(device, dst,
2671                                                       copy->dstBinding,
2672                                                       src->desc_surface_mem.map +
2673                                                       src_layout->descriptor_surface_offset + copy->srcArrayElement,
2674                                                       copy->dstArrayElement,
2675                                                       copy->descriptorCount);
2676          continue;
2677       }
2678 
2679       uint32_t copy_surface_element_size =
2680          MIN2(src_layout->descriptor_surface_stride,
2681               dst_layout->descriptor_surface_stride);
2682       uint32_t copy_sampler_element_size =
2683          MIN2(src_layout->descriptor_sampler_stride,
2684               dst_layout->descriptor_sampler_stride);
2685       for (uint32_t j = 0; j < copy->descriptorCount; j++) {
2686          struct anv_descriptor *src_desc =
2687             &src->descriptors[src_layout->descriptor_index +
2688                               copy->srcArrayElement + j];
2689          struct anv_descriptor *dst_desc =
2690             &dst->descriptors[dst_layout->descriptor_index +
2691                               copy->dstArrayElement + j];
2692 
2693          /* Copy the memory containing one of the following structure read by
2694           * the shaders :
2695           *    - anv_sampled_image_descriptor
2696           *    - anv_storage_image_descriptor
2697           *    - anv_address_range_descriptor
2698           *    - RENDER_SURFACE_STATE
2699           *    - SAMPLER_STATE
2700           */
2701          memcpy(dst->desc_surface_mem.map +
2702                 dst_layout->descriptor_surface_offset +
2703                 (copy->dstArrayElement + j) * dst_layout->descriptor_surface_stride,
2704                 src->desc_surface_mem.map +
2705                 src_layout->descriptor_surface_offset +
2706                 (copy->srcArrayElement + j) * src_layout->descriptor_surface_stride,
2707                 copy_surface_element_size);
2708          memcpy(dst->desc_sampler_mem.map +
2709                 dst_layout->descriptor_sampler_offset +
2710                 (copy->dstArrayElement + j) * dst_layout->descriptor_sampler_stride,
2711                 src->desc_sampler_mem.map +
2712                 src_layout->descriptor_sampler_offset +
2713                 (copy->srcArrayElement + j) * src_layout->descriptor_sampler_stride,
2714                 copy_sampler_element_size);
2715 
2716          /* Copy the CPU side data anv_descriptor */
2717          *dst_desc = *src_desc;
2718 
2719          /* If the CPU side may contain a buffer view, we need to copy that as
2720           * well
2721           */
2722          const enum anv_descriptor_data data =
2723             src_layout->type == VK_DESCRIPTOR_TYPE_MUTABLE_EXT ?
2724             anv_descriptor_data_for_type(device->physical,
2725                                          src->layout->type,
2726                                          src->layout->flags,
2727                                          src_desc->type) :
2728             src_layout->data;
2729          if (data & ANV_DESCRIPTOR_BUFFER_VIEW) {
2730             struct anv_buffer_view *src_bview =
2731                &src->buffer_views[src_layout->buffer_view_index +
2732                                   copy->srcArrayElement + j];
2733             struct anv_buffer_view *dst_bview =
2734                &dst->buffer_views[dst_layout->buffer_view_index +
2735                                   copy->dstArrayElement + j];
2736 
2737             dst_desc->set_buffer_view = dst_bview;
2738 
2739             dst_bview->vk.range = src_bview->vk.range;
2740             dst_bview->address = src_bview->address;
2741 
2742             memcpy(dst_bview->general.state.map,
2743                    src_bview->general.state.map,
2744                    ANV_SURFACE_STATE_SIZE);
2745          }
2746       }
2747    }
2748 }
2749 
2750 /*
2751  * Descriptor update templates.
2752  */
2753 
2754 void
anv_descriptor_set_write_template(struct anv_device * device,struct anv_descriptor_set * set,const struct vk_descriptor_update_template * template,const void * data)2755 anv_descriptor_set_write_template(struct anv_device *device,
2756                                   struct anv_descriptor_set *set,
2757                                   const struct vk_descriptor_update_template *template,
2758                                   const void *data)
2759 {
2760    for (uint32_t i = 0; i < template->entry_count; i++) {
2761       const struct vk_descriptor_template_entry *entry =
2762          &template->entries[i];
2763 
2764       switch (entry->type) {
2765       case VK_DESCRIPTOR_TYPE_SAMPLER:
2766       case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
2767       case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
2768       case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
2769       case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
2770          for (uint32_t j = 0; j < entry->array_count; j++) {
2771             const VkDescriptorImageInfo *info =
2772                data + entry->offset + j * entry->stride;
2773             anv_descriptor_set_write_image_view(device, set,
2774                                                 info, entry->type,
2775                                                 entry->binding,
2776                                                 entry->array_element + j);
2777          }
2778          break;
2779 
2780       case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
2781       case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
2782          for (uint32_t j = 0; j < entry->array_count; j++) {
2783             const VkBufferView *_bview =
2784                data + entry->offset + j * entry->stride;
2785             ANV_FROM_HANDLE(anv_buffer_view, bview, *_bview);
2786 
2787             anv_descriptor_set_write_buffer_view(device, set,
2788                                                  entry->type,
2789                                                  bview,
2790                                                  entry->binding,
2791                                                  entry->array_element + j);
2792          }
2793          break;
2794 
2795       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
2796       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
2797       case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
2798       case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
2799          for (uint32_t j = 0; j < entry->array_count; j++) {
2800             const VkDescriptorBufferInfo *info =
2801                data + entry->offset + j * entry->stride;
2802             ANV_FROM_HANDLE(anv_buffer, buffer, info->buffer);
2803 
2804             anv_descriptor_set_write_buffer(device, set,
2805                                             entry->type,
2806                                             buffer,
2807                                             entry->binding,
2808                                             entry->array_element + j,
2809                                             info->offset, info->range);
2810          }
2811          break;
2812 
2813       case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
2814          anv_descriptor_set_write_inline_uniform_data(device, set,
2815                                                       entry->binding,
2816                                                       data + entry->offset,
2817                                                       entry->array_element,
2818                                                       entry->array_count);
2819          break;
2820 
2821       case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
2822          for (uint32_t j = 0; j < entry->array_count; j++) {
2823             VkAccelerationStructureKHR *accel_obj =
2824                (VkAccelerationStructureKHR *)(data + entry->offset + j * entry->stride);
2825             ANV_FROM_HANDLE(vk_acceleration_structure, accel, *accel_obj);
2826 
2827             anv_descriptor_set_write_acceleration_structure(device, set,
2828                                                             accel,
2829                                                             entry->binding,
2830                                                             entry->array_element + j);
2831          }
2832          break;
2833 
2834       default:
2835          break;
2836       }
2837    }
2838 }
2839 
anv_UpdateDescriptorSetWithTemplate(VkDevice _device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const void * pData)2840 void anv_UpdateDescriptorSetWithTemplate(
2841     VkDevice                                    _device,
2842     VkDescriptorSet                             descriptorSet,
2843     VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
2844     const void*                                 pData)
2845 {
2846    ANV_FROM_HANDLE(anv_device, device, _device);
2847    ANV_FROM_HANDLE(anv_descriptor_set, set, descriptorSet);
2848    VK_FROM_HANDLE(vk_descriptor_update_template, template,
2849                   descriptorUpdateTemplate);
2850 
2851    anv_descriptor_set_write_template(device, set, template, pData);
2852 }
2853 
anv_GetDescriptorSetLayoutSizeEXT(VkDevice device,VkDescriptorSetLayout layout,VkDeviceSize * pLayoutSizeInBytes)2854 void anv_GetDescriptorSetLayoutSizeEXT(
2855     VkDevice                                    device,
2856     VkDescriptorSetLayout                       layout,
2857     VkDeviceSize*                               pLayoutSizeInBytes)
2858 {
2859    ANV_FROM_HANDLE(anv_descriptor_set_layout, set_layout, layout);
2860 
2861    *pLayoutSizeInBytes = set_layout->descriptor_buffer_surface_size;
2862 }
2863 
anv_GetDescriptorSetLayoutBindingOffsetEXT(VkDevice device,VkDescriptorSetLayout layout,uint32_t binding,VkDeviceSize * pOffset)2864 void anv_GetDescriptorSetLayoutBindingOffsetEXT(
2865     VkDevice                                    device,
2866     VkDescriptorSetLayout                       layout,
2867     uint32_t                                    binding,
2868     VkDeviceSize*                               pOffset)
2869 {
2870    ANV_FROM_HANDLE(anv_descriptor_set_layout, set_layout, layout);
2871    assert(binding < set_layout->binding_count);
2872    const struct anv_descriptor_set_binding_layout *bind_layout =
2873       &set_layout->binding[binding];
2874 
2875    *pOffset = bind_layout->descriptor_surface_offset;
2876 }
2877 
2878 static bool
address_info_is_null(const VkDescriptorAddressInfoEXT * addr_info)2879 address_info_is_null(const VkDescriptorAddressInfoEXT *addr_info)
2880 {
2881    return addr_info == NULL || addr_info->address == 0 || addr_info->range == 0;
2882 }
2883 
anv_GetDescriptorEXT(VkDevice _device,const VkDescriptorGetInfoEXT * pDescriptorInfo,size_t dataSize,void * pDescriptor)2884 void anv_GetDescriptorEXT(
2885     VkDevice                                    _device,
2886     const VkDescriptorGetInfoEXT*               pDescriptorInfo,
2887     size_t                                      dataSize,
2888     void*                                       pDescriptor)
2889 {
2890    ANV_FROM_HANDLE(anv_device, device, _device);
2891    struct anv_sampler *sampler;
2892    struct anv_image_view *image_view;
2893 
2894    switch (pDescriptorInfo->type) {
2895    case VK_DESCRIPTOR_TYPE_SAMPLER:
2896       if (pDescriptorInfo->data.pSampler &&
2897           (sampler = anv_sampler_from_handle(*pDescriptorInfo->data.pSampler))) {
2898          memcpy(pDescriptor, sampler->state[0], ANV_SAMPLER_STATE_SIZE);
2899       } else {
2900          memset(pDescriptor, 0, ANV_SAMPLER_STATE_SIZE);
2901       }
2902       break;
2903 
2904    case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
2905       for (uint32_t i = 0; i < dataSize / (2 * ANV_SURFACE_STATE_SIZE); i++) {
2906          uint32_t desc_offset = i * 2 * ANV_SURFACE_STATE_SIZE;
2907 
2908          if (pDescriptorInfo->data.pCombinedImageSampler &&
2909              (image_view = anv_image_view_from_handle(
2910                 pDescriptorInfo->data.pCombinedImageSampler->imageView))) {
2911             const VkImageLayout layout =
2912                pDescriptorInfo->data.pCombinedImageSampler->imageLayout;
2913             memcpy(pDescriptor + desc_offset,
2914                    anv_image_view_surface_data_for_plane_layout(image_view,
2915                                                                 pDescriptorInfo->type,
2916                                                                 i,
2917                                                                 layout),
2918                    ANV_SURFACE_STATE_SIZE);
2919          } else {
2920             memcpy(pDescriptor + desc_offset,
2921                    device->host_null_surface_state,
2922                    ANV_SURFACE_STATE_SIZE);
2923          }
2924 
2925          if (pDescriptorInfo->data.pCombinedImageSampler &&
2926              (sampler = anv_sampler_from_handle(
2927                 pDescriptorInfo->data.pCombinedImageSampler->sampler))) {
2928             memcpy(pDescriptor + desc_offset + ANV_SURFACE_STATE_SIZE,
2929                    sampler->state[i], ANV_SAMPLER_STATE_SIZE);
2930          } else {
2931             memset(pDescriptor + desc_offset + ANV_SURFACE_STATE_SIZE,
2932                    0, ANV_SAMPLER_STATE_SIZE);
2933          }
2934       }
2935       break;
2936 
2937    case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
2938    case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
2939    case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
2940       if (pDescriptorInfo->data.pSampledImage &&
2941           (image_view = anv_image_view_from_handle(
2942              pDescriptorInfo->data.pSampledImage->imageView))) {
2943          const VkImageLayout layout =
2944             pDescriptorInfo->data.pSampledImage->imageLayout;
2945 
2946          memcpy(pDescriptor,
2947                 anv_image_view_surface_data_for_plane_layout(image_view,
2948                                                              pDescriptorInfo->type,
2949                                                              0,
2950                                                              layout),
2951                 ANV_SURFACE_STATE_SIZE);
2952       } else {
2953          memcpy(pDescriptor, device->host_null_surface_state,
2954                 ANV_SURFACE_STATE_SIZE);
2955       }
2956       break;
2957 
2958    case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: {
2959       const VkDescriptorAddressInfoEXT *addr_info =
2960          pDescriptorInfo->data.pUniformTexelBuffer;
2961 
2962       if (!address_info_is_null(addr_info)) {
2963          struct anv_format_plane format =
2964             anv_get_format_plane(device->info,
2965                                  addr_info->format,
2966                                  0, VK_IMAGE_TILING_LINEAR);
2967          const uint32_t format_bs =
2968             isl_format_get_layout(format.isl_format)->bpb / 8;
2969 
2970          anv_fill_buffer_surface_state(device, pDescriptor,
2971                                        format.isl_format, format.swizzle,
2972                                        ISL_SURF_USAGE_TEXTURE_BIT,
2973                                        anv_address_from_u64(addr_info->address),
2974                                        align_down_npot_u32(addr_info->range, format_bs),
2975                                        format_bs);
2976       } else {
2977          memcpy(pDescriptor, device->host_null_surface_state,
2978                 ANV_SURFACE_STATE_SIZE);
2979       }
2980       break;
2981    }
2982 
2983    case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: {
2984       const VkDescriptorAddressInfoEXT *addr_info =
2985          pDescriptorInfo->data.pStorageTexelBuffer;
2986 
2987       if (!address_info_is_null(addr_info)) {
2988          struct anv_format_plane format =
2989             anv_get_format_plane(device->info,
2990                                  addr_info->format,
2991                                  0, VK_IMAGE_TILING_LINEAR);
2992          const uint32_t format_bs =
2993             isl_format_get_layout(format.isl_format)->bpb / 8;
2994 
2995          anv_fill_buffer_surface_state(device, pDescriptor,
2996                                        format.isl_format, format.swizzle,
2997                                        ISL_SURF_USAGE_STORAGE_BIT,
2998                                        anv_address_from_u64(addr_info->address),
2999                                        align_down_npot_u32(addr_info->range, format_bs),
3000                                        format_bs);
3001       } else {
3002          memcpy(pDescriptor, device->host_null_surface_state,
3003                 ANV_SURFACE_STATE_SIZE);
3004       }
3005       break;
3006    }
3007 
3008    case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
3009    case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER: {
3010       const VkDescriptorAddressInfoEXT *addr_info =
3011          pDescriptorInfo->data.pStorageBuffer;
3012 
3013       if (!address_info_is_null(addr_info)) {
3014          VkDeviceSize range = addr_info->range;
3015 
3016          /* We report a bounds checking alignment of 32B for the sake of block
3017           * messages which read an entire register worth at a time.
3018           */
3019          if (pDescriptorInfo->type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER)
3020             range = align64(range, ANV_UBO_ALIGNMENT);
3021 
3022          isl_surf_usage_flags_t usage =
3023             pDescriptorInfo->type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ?
3024             ISL_SURF_USAGE_CONSTANT_BUFFER_BIT :
3025             ISL_SURF_USAGE_STORAGE_BIT;
3026 
3027          enum isl_format format =
3028             anv_isl_format_for_descriptor_type(device, pDescriptorInfo->type);
3029 
3030          isl_buffer_fill_state(&device->isl_dev, pDescriptor,
3031                                .address = addr_info->address,
3032                                .mocs = isl_mocs(&device->isl_dev, usage, false),
3033                                .size_B = range,
3034                                .format = format,
3035                                .swizzle = ISL_SWIZZLE_IDENTITY,
3036                                .stride_B = 1);
3037       } else {
3038          memcpy(pDescriptor, device->host_null_surface_state,
3039                 ANV_SURFACE_STATE_SIZE);
3040       }
3041       break;
3042    }
3043 
3044    case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR: {
3045       struct anv_address_range_descriptor desc_data = {
3046          .address = pDescriptorInfo->data.accelerationStructure,
3047          .range = 0,
3048       };
3049 
3050       memcpy(pDescriptor, &desc_data, sizeof(desc_data));
3051       break;
3052    }
3053 
3054    default:
3055       unreachable("Invalid descriptor type");
3056    }
3057 }
3058