1 /*
2 * Copyright © 2024 Collabora Ltd.
3 * SPDX-License-Identifier: MIT
4 */
5
6 #include <assert.h>
7 #include <fcntl.h>
8 #include <stdbool.h>
9 #include <string.h>
10 #include <unistd.h>
11
12 #include "util/mesa-sha1.h"
13 #include "vk_alloc.h"
14 #include "vk_descriptor_update_template.h"
15 #include "vk_descriptors.h"
16 #include "vk_format.h"
17 #include "vk_log.h"
18 #include "vk_util.h"
19
20 #include "util/bitset.h"
21
22 #include "genxml/gen_macros.h"
23
24 #include "panvk_buffer.h"
25 #include "panvk_buffer_view.h"
26 #include "panvk_descriptor_set.h"
27 #include "panvk_descriptor_set_layout.h"
28 #include "panvk_device.h"
29 #include "panvk_entrypoints.h"
30 #include "panvk_image.h"
31 #include "panvk_image_view.h"
32 #include "panvk_macros.h"
33 #include "panvk_priv_bo.h"
34 #include "panvk_sampler.h"
35
36 static inline const bool
is_dynamic_buffer(VkDescriptorType type)37 is_dynamic_buffer(VkDescriptorType type)
38 {
39 return type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||
40 type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC;
41 }
42
43 static void *
get_desc_slot_ptr(struct panvk_descriptor_set * set,uint32_t binding,uint32_t elem,VkDescriptorType type)44 get_desc_slot_ptr(struct panvk_descriptor_set *set, uint32_t binding,
45 uint32_t elem, VkDescriptorType type)
46 {
47 const struct panvk_descriptor_set_binding_layout *binding_layout =
48 &set->layout->bindings[binding];
49
50 uint32_t offset = panvk_get_desc_index(binding_layout, elem, type);
51
52 assert(offset < set->layout->desc_count);
53
54 return (char *)set->descs.host + offset * PANVK_DESCRIPTOR_SIZE;
55 }
56
57 #define write_desc(set, binding, elem, desc, type) \
58 do { \
59 static_assert(sizeof(*(desc)) == PANVK_DESCRIPTOR_SIZE, \
60 "wrong descriptor size"); \
61 void *__dst = get_desc_slot_ptr(set, binding, elem, type); \
62 memcpy(__dst, (desc), PANVK_DESCRIPTOR_SIZE); \
63 } while (0)
64
65 static void
write_sampler_desc(struct panvk_descriptor_set * set,const VkDescriptorImageInfo * const pImageInfo,uint32_t binding,uint32_t elem,bool write_immutable)66 write_sampler_desc(struct panvk_descriptor_set *set,
67 const VkDescriptorImageInfo *const pImageInfo,
68 uint32_t binding, uint32_t elem, bool write_immutable)
69 {
70 const struct panvk_descriptor_set_binding_layout *binding_layout =
71 &set->layout->bindings[binding];
72
73 if (binding_layout->immutable_samplers && !write_immutable)
74 return;
75
76 const struct mali_sampler_packed *sampler_desc;
77
78 if (binding_layout->immutable_samplers) {
79 sampler_desc = &binding_layout->immutable_samplers[elem];
80 } else {
81 struct panvk_sampler *sampler = panvk_sampler_from_handle(
82 pImageInfo ? pImageInfo->sampler : VK_NULL_HANDLE);
83
84 sampler_desc = sampler ? &sampler->desc : NULL;
85 }
86
87 if (sampler_desc)
88 write_desc(set, binding, elem, sampler_desc, VK_DESCRIPTOR_TYPE_SAMPLER);
89 }
90
91 static void
write_image_view_desc(struct panvk_descriptor_set * set,const VkDescriptorImageInfo * const pImageInfo,uint32_t binding,uint32_t elem,VkDescriptorType type)92 write_image_view_desc(struct panvk_descriptor_set *set,
93 const VkDescriptorImageInfo *const pImageInfo,
94 uint32_t binding, uint32_t elem, VkDescriptorType type)
95 {
96 if (pImageInfo && pImageInfo->imageView != VK_NULL_HANDLE) {
97 VK_FROM_HANDLE(panvk_image_view, view, pImageInfo->imageView);
98
99 #if PAN_ARCH <= 7
100 if (type == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
101 write_desc(set, binding, elem, &view->descs.img_attrib_buf, type);
102 else
103 write_desc(set, binding, elem, &view->descs.tex, type);
104 #else
105 write_desc(set, binding, elem, &view->descs.tex, type);
106 #endif
107 }
108 }
109
110 static void
write_buffer_desc(struct panvk_descriptor_set * set,const VkDescriptorBufferInfo * const info,uint32_t binding,uint32_t elem,VkDescriptorType type)111 write_buffer_desc(struct panvk_descriptor_set *set,
112 const VkDescriptorBufferInfo *const info, uint32_t binding,
113 uint32_t elem, VkDescriptorType type)
114 {
115 VK_FROM_HANDLE(panvk_buffer, buffer, info->buffer);
116 const uint64_t range = panvk_buffer_range(buffer, info->offset, info->range);
117 assert(range <= UINT32_MAX);
118
119 #if PAN_ARCH <= 7
120 if (type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER) {
121 struct panvk_ssbo_addr desc = {
122 .base_addr = panvk_buffer_gpu_ptr(buffer, info->offset),
123 .size = range,
124 };
125
126 write_desc(set, binding, elem, &desc, type);
127 } else {
128 struct {
129 struct mali_uniform_buffer_packed ubo;
130 uint32_t pad[6];
131 } padded_desc = {0};
132
133 pan_pack(&padded_desc.ubo, UNIFORM_BUFFER, cfg) {
134 cfg.pointer = panvk_buffer_gpu_ptr(buffer, info->offset);
135 cfg.entries = DIV_ROUND_UP(range, 16);
136 }
137
138 write_desc(set, binding, elem, &padded_desc, type);
139 }
140 #else
141 struct mali_buffer_packed desc;
142
143 pan_pack(&desc, BUFFER, cfg) {
144 cfg.address = panvk_buffer_gpu_ptr(buffer, info->offset);
145 cfg.size = range;
146 }
147 write_desc(set, binding, elem, &desc, type);
148 #endif
149 }
150
151 static void
write_dynamic_buffer_desc(struct panvk_descriptor_set * set,const VkDescriptorBufferInfo * const info,uint32_t binding,uint32_t elem)152 write_dynamic_buffer_desc(struct panvk_descriptor_set *set,
153 const VkDescriptorBufferInfo *const info,
154 uint32_t binding, uint32_t elem)
155 {
156 VK_FROM_HANDLE(panvk_buffer, buffer, info->buffer);
157 const struct panvk_descriptor_set_binding_layout *binding_layout =
158 &set->layout->bindings[binding];
159 uint32_t dyn_buf_idx = binding_layout->desc_idx + elem;
160 const uint64_t range = panvk_buffer_range(buffer, info->offset, info->range);
161
162 assert(range <= UINT32_MAX);
163 assert(dyn_buf_idx < ARRAY_SIZE(set->dyn_bufs));
164
165 set->dyn_bufs[dyn_buf_idx].dev_addr =
166 panvk_buffer_gpu_ptr(buffer, info->offset);
167 set->dyn_bufs[dyn_buf_idx].size = range;
168 }
169
170 static void
write_buffer_view_desc(struct panvk_descriptor_set * set,const VkBufferView bufferView,uint32_t binding,uint32_t elem,VkDescriptorType type)171 write_buffer_view_desc(struct panvk_descriptor_set *set,
172 const VkBufferView bufferView, uint32_t binding,
173 uint32_t elem, VkDescriptorType type)
174 {
175 if (bufferView != VK_NULL_HANDLE) {
176 VK_FROM_HANDLE(panvk_buffer_view, view, bufferView);
177
178 #if PAN_ARCH <= 7
179 if (type == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER)
180 write_desc(set, binding, elem, &view->descs.img_attrib_buf, type);
181 else
182 write_desc(set, binding, elem, &view->descs.tex, type);
183 #else
184 write_desc(set, binding, elem, &view->descs.tex, type);
185 #endif
186 }
187 }
188
189 static void
panvk_desc_pool_free_set(struct panvk_descriptor_pool * pool,struct panvk_descriptor_set * set)190 panvk_desc_pool_free_set(struct panvk_descriptor_pool *pool,
191 struct panvk_descriptor_set *set)
192 {
193 uintptr_t set_idx = set - pool->sets;
194 assert(set_idx < pool->max_sets);
195
196 if (!BITSET_TEST(pool->free_sets, set_idx)) {
197 if (set->desc_count)
198 util_vma_heap_free(&pool->desc_heap, set->descs.dev,
199 set->desc_count * PANVK_DESCRIPTOR_SIZE);
200
201 BITSET_SET(pool->free_sets, set_idx);
202
203 /* Discard constness to call vk_descriptor_set_layout_unref(). */
204 struct panvk_descriptor_set_layout *set_layout =
205 (struct panvk_descriptor_set_layout *)set->layout;
206
207 vk_descriptor_set_layout_unref(pool->base.device, &set_layout->vk);
208 vk_object_base_finish(&set->base);
209 memset(set, 0, sizeof(*set));
210 }
211 }
212
213 static void
panvk_destroy_descriptor_pool(struct panvk_device * device,const VkAllocationCallbacks * pAllocator,struct panvk_descriptor_pool * pool)214 panvk_destroy_descriptor_pool(struct panvk_device *device,
215 const VkAllocationCallbacks *pAllocator,
216 struct panvk_descriptor_pool *pool)
217 {
218 for (uint32_t i = 0; i < pool->max_sets; i++)
219 panvk_desc_pool_free_set(pool, &pool->sets[i]);
220
221 if (pool->desc_bo) {
222 util_vma_heap_finish(&pool->desc_heap);
223 panvk_priv_bo_unref(pool->desc_bo);
224 }
225
226 vk_object_free(&device->vk, pAllocator, pool);
227 }
228
229 VkResult
panvk_per_arch(CreateDescriptorPool)230 panvk_per_arch(CreateDescriptorPool)(
231 VkDevice _device, const VkDescriptorPoolCreateInfo *pCreateInfo,
232 const VkAllocationCallbacks *pAllocator, VkDescriptorPool *pDescriptorPool)
233 {
234 VK_FROM_HANDLE(panvk_device, device, _device);
235
236 VK_MULTIALLOC(ma);
237 VK_MULTIALLOC_DECL(&ma, struct panvk_descriptor_pool, pool, 1);
238 VK_MULTIALLOC_DECL(&ma, BITSET_WORD, free_sets,
239 BITSET_WORDS(pCreateInfo->maxSets));
240 VK_MULTIALLOC_DECL(&ma, struct panvk_descriptor_set, sets,
241 pCreateInfo->maxSets);
242
243 if (!vk_object_multizalloc(&device->vk, &ma, pAllocator,
244 VK_OBJECT_TYPE_DESCRIPTOR_POOL))
245 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
246
247 uint32_t desc_count = 0;
248 for (unsigned i = 0; i < pCreateInfo->poolSizeCount; ++i) {
249 if (!is_dynamic_buffer(pCreateInfo->pPoolSizes[i].type))
250 desc_count += panvk_get_desc_stride(pCreateInfo->pPoolSizes[i].type) *
251 pCreateInfo->pPoolSizes[i].descriptorCount;
252 }
253
254 /* initialize to all ones to indicate all sets are free */
255 BITSET_SET_RANGE(free_sets, 0, pCreateInfo->maxSets - 1);
256 pool->free_sets = free_sets;
257 pool->sets = sets;
258 pool->max_sets = pCreateInfo->maxSets;
259
260 if (desc_count) {
261 /* adjust desc_count to account for 1 dummy sampler per descriptor set */
262 desc_count += pool->max_sets;
263
264 uint64_t pool_size = desc_count * PANVK_DESCRIPTOR_SIZE;
265 pool->desc_bo = panvk_priv_bo_create(device, pool_size, 0,
266 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
267 if (!pool->desc_bo) {
268 panvk_destroy_descriptor_pool(device, pAllocator, pool);
269 return vk_error(device, VK_ERROR_OUT_OF_DEVICE_MEMORY);
270 }
271 uint64_t bo_size = pool->desc_bo->bo->size;
272 assert(pool_size <= bo_size);
273 util_vma_heap_init(&pool->desc_heap, pool->desc_bo->addr.dev, bo_size);
274 }
275
276 *pDescriptorPool = panvk_descriptor_pool_to_handle(pool);
277 return VK_SUCCESS;
278 }
279
280 void
panvk_per_arch(DestroyDescriptorPool)281 panvk_per_arch(DestroyDescriptorPool)(VkDevice _device, VkDescriptorPool _pool,
282 const VkAllocationCallbacks *pAllocator)
283 {
284 VK_FROM_HANDLE(panvk_device, device, _device);
285 VK_FROM_HANDLE(panvk_descriptor_pool, pool, _pool);
286
287 if (pool)
288 panvk_destroy_descriptor_pool(device, pAllocator, pool);
289 }
290
291 static void
desc_set_write_immutable_samplers(struct panvk_descriptor_set * set,uint32_t variable_count)292 desc_set_write_immutable_samplers(struct panvk_descriptor_set *set,
293 uint32_t variable_count)
294 {
295 const struct panvk_descriptor_set_layout *layout = set->layout;
296
297 for (uint32_t b = 0; b < layout->binding_count; b++) {
298 if (layout->bindings[b].type != VK_DESCRIPTOR_TYPE_SAMPLER &&
299 layout->bindings[b].type != VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
300 continue;
301
302 if (layout->bindings[b].immutable_samplers == NULL)
303 continue;
304
305 uint32_t array_size = layout->bindings[b].desc_count;
306
307 if (layout->bindings[b].flags &
308 VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT)
309 array_size = variable_count;
310
311 for (uint32_t j = 0; j < array_size; j++) {
312 write_desc(set, b, j, &layout->bindings[b].immutable_samplers[j],
313 VK_DESCRIPTOR_TYPE_SAMPLER);
314 }
315 }
316 }
317
318 static VkResult
panvk_desc_pool_allocate_set(struct panvk_descriptor_pool * pool,struct panvk_descriptor_set_layout * layout,uint32_t variable_count,struct panvk_descriptor_set ** out)319 panvk_desc_pool_allocate_set(struct panvk_descriptor_pool *pool,
320 struct panvk_descriptor_set_layout *layout,
321 uint32_t variable_count,
322 struct panvk_descriptor_set **out)
323 {
324 uint32_t num_descs = layout->desc_count;
325
326 if (layout->binding_count) {
327 uint32_t last_binding = layout->binding_count - 1;
328
329 if ((layout->bindings[last_binding].flags &
330 VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT) &&
331 !is_dynamic_buffer(layout->bindings[last_binding].type)) {
332 uint32_t desc_stride =
333 panvk_get_desc_stride(layout->bindings[last_binding].type);
334
335 num_descs -= layout->bindings[last_binding].desc_count * desc_stride;
336 num_descs += variable_count * desc_stride;
337 }
338 }
339
340 uint64_t descs_size = num_descs * PANVK_DESCRIPTOR_SIZE;
341 uint32_t first_free_set =
342 __bitset_ffs(pool->free_sets, BITSET_WORDS(pool->max_sets));
343 if (first_free_set == 0 || pool->desc_heap.free_size < descs_size)
344 return VK_ERROR_OUT_OF_POOL_MEMORY;
345
346 uint64_t descs_dev_addr = 0;
347 if (num_descs) {
348 descs_dev_addr = util_vma_heap_alloc(&pool->desc_heap, descs_size,
349 PANVK_DESCRIPTOR_SIZE);
350 if (!descs_dev_addr)
351 return VK_ERROR_FRAGMENTED_POOL;
352 }
353 struct panvk_descriptor_set *set = &pool->sets[first_free_set - 1];
354
355 vk_object_base_init(pool->base.device, &set->base,
356 VK_OBJECT_TYPE_DESCRIPTOR_SET);
357 vk_descriptor_set_layout_ref(&layout->vk);
358 set->layout = layout;
359 set->desc_count = num_descs;
360 if (pool->desc_bo) {
361 set->descs.dev = descs_dev_addr;
362 set->descs.host =
363 pool->desc_bo->addr.host + set->descs.dev - pool->desc_bo->addr.dev;
364 }
365 desc_set_write_immutable_samplers(set, variable_count);
366 BITSET_CLEAR(pool->free_sets, first_free_set - 1);
367
368 *out = set;
369 return VK_SUCCESS;
370 }
371
372 VkResult
panvk_per_arch(AllocateDescriptorSets)373 panvk_per_arch(AllocateDescriptorSets)(
374 VkDevice _device, const VkDescriptorSetAllocateInfo *pAllocateInfo,
375 VkDescriptorSet *pDescriptorSets)
376 {
377 VK_FROM_HANDLE(panvk_descriptor_pool, pool, pAllocateInfo->descriptorPool);
378 VkResult result = VK_SUCCESS;
379 unsigned i;
380
381 struct panvk_descriptor_set *set = NULL;
382
383 const VkDescriptorSetVariableDescriptorCountAllocateInfo *var_desc_count =
384 vk_find_struct_const(
385 pAllocateInfo->pNext,
386 DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO);
387
388 /* allocate a set of buffers for each shader to contain descriptors */
389 for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
390 VK_FROM_HANDLE(panvk_descriptor_set_layout, layout,
391 pAllocateInfo->pSetLayouts[i]);
392 /* If descriptorSetCount is zero or this structure is not included in
393 * the pNext chain, then the variable lengths are considered to be zero.
394 */
395 const uint32_t variable_count =
396 var_desc_count && var_desc_count->descriptorSetCount > 0
397 ? var_desc_count->pDescriptorCounts[i]
398 : 0;
399
400 result = panvk_desc_pool_allocate_set(pool, layout, variable_count, &set);
401 if (result != VK_SUCCESS)
402 goto err_free_sets;
403
404 pDescriptorSets[i] = panvk_descriptor_set_to_handle(set);
405 }
406
407 return VK_SUCCESS;
408
409 err_free_sets:
410 panvk_per_arch(FreeDescriptorSets)(_device, pAllocateInfo->descriptorPool, i,
411 pDescriptorSets);
412 for (i = 0; i < pAllocateInfo->descriptorSetCount; i++)
413 pDescriptorSets[i] = VK_NULL_HANDLE;
414
415 return result;
416 }
417
418 VkResult
panvk_per_arch(FreeDescriptorSets)419 panvk_per_arch(FreeDescriptorSets)(VkDevice _device,
420 VkDescriptorPool descriptorPool,
421 uint32_t descriptorSetCount,
422 const VkDescriptorSet *pDescriptorSets)
423 {
424 VK_FROM_HANDLE(panvk_descriptor_pool, pool, descriptorPool);
425
426 for (unsigned i = 0; i < descriptorSetCount; i++) {
427 VK_FROM_HANDLE(panvk_descriptor_set, set, pDescriptorSets[i]);
428
429 if (set)
430 panvk_desc_pool_free_set(pool, set);
431 }
432 return VK_SUCCESS;
433 }
434
435 VkResult
panvk_per_arch(ResetDescriptorPool)436 panvk_per_arch(ResetDescriptorPool)(VkDevice _device, VkDescriptorPool _pool,
437 VkDescriptorPoolResetFlags flags)
438 {
439 VK_FROM_HANDLE(panvk_descriptor_pool, pool, _pool);
440
441 for (uint32_t i = 0; i < pool->max_sets; i++)
442 panvk_desc_pool_free_set(pool, &pool->sets[i]);
443
444 BITSET_SET_RANGE(pool->free_sets, 0, pool->max_sets - 1);
445 return VK_SUCCESS;
446 }
447
448 VkResult
panvk_per_arch(descriptor_set_write)449 panvk_per_arch(descriptor_set_write)(struct panvk_descriptor_set *set,
450 const VkWriteDescriptorSet *write,
451 bool write_immutable_samplers)
452 {
453 switch (write->descriptorType) {
454 case VK_DESCRIPTOR_TYPE_SAMPLER:
455 for (uint32_t j = 0; j < write->descriptorCount; j++) {
456 write_sampler_desc(set, write->pImageInfo + j, write->dstBinding,
457 write->dstArrayElement + j,
458 write_immutable_samplers);
459 }
460 break;
461
462 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
463 for (uint32_t j = 0; j < write->descriptorCount; j++) {
464 write_sampler_desc(set, write->pImageInfo + j, write->dstBinding,
465 write->dstArrayElement + j,
466 write_immutable_samplers);
467 write_image_view_desc(set, write->pImageInfo + j, write->dstBinding,
468 write->dstArrayElement + j,
469 VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE);
470 }
471 break;
472
473 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
474 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
475 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
476 for (uint32_t j = 0; j < write->descriptorCount; j++) {
477 write_image_view_desc(set, write->pImageInfo + j, write->dstBinding,
478 write->dstArrayElement + j,
479 write->descriptorType);
480 }
481 break;
482
483 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
484 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
485 for (uint32_t j = 0; j < write->descriptorCount; j++) {
486 write_buffer_view_desc(set, write->pTexelBufferView[j],
487 write->dstBinding, write->dstArrayElement + j,
488 write->descriptorType);
489 }
490 break;
491
492 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
493 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
494 for (uint32_t j = 0; j < write->descriptorCount; j++) {
495 write_buffer_desc(set, write->pBufferInfo + j, write->dstBinding,
496 write->dstArrayElement + j, write->descriptorType);
497 }
498 break;
499
500 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
501 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
502 for (uint32_t j = 0; j < write->descriptorCount; j++) {
503 write_dynamic_buffer_desc(set, write->pBufferInfo + j,
504 write->dstBinding,
505 write->dstArrayElement + j);
506 }
507 break;
508
509 default:
510 unreachable("Unsupported descriptor type");
511 }
512 return VK_SUCCESS;
513 }
514
515 static VkResult
panvk_descriptor_set_copy(const VkCopyDescriptorSet * copy)516 panvk_descriptor_set_copy(const VkCopyDescriptorSet *copy)
517 {
518 VK_FROM_HANDLE(panvk_descriptor_set, src_set, copy->srcSet);
519 VK_FROM_HANDLE(panvk_descriptor_set, dst_set, copy->dstSet);
520
521 const struct panvk_descriptor_set_binding_layout *dst_binding_layout =
522 &dst_set->layout->bindings[copy->dstBinding];
523 const struct panvk_descriptor_set_binding_layout *src_binding_layout =
524 &src_set->layout->bindings[copy->srcBinding];
525
526 assert(dst_binding_layout->type == src_binding_layout->type);
527
528 switch (src_binding_layout->type) {
529 case VK_DESCRIPTOR_TYPE_SAMPLER:
530 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
531 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
532 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
533 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
534 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
535 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
536 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
537 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
538 for (uint32_t i = 0; i < copy->descriptorCount; i++) {
539 void *dst = get_desc_slot_ptr(dst_set, copy->dstBinding,
540 copy->dstArrayElement + i,
541 dst_binding_layout->type);
542 const void *src = get_desc_slot_ptr(src_set, copy->srcBinding,
543 copy->srcArrayElement + i,
544 src_binding_layout->type);
545
546 memcpy(dst, src,
547 PANVK_DESCRIPTOR_SIZE *
548 panvk_get_desc_stride(src_binding_layout->type));
549 }
550 break;
551
552 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
553 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
554 uint32_t dst_dyn_buf_idx =
555 dst_binding_layout->desc_idx + copy->dstArrayElement;
556 uint32_t src_dyn_buf_idx =
557 src_binding_layout->desc_idx + copy->srcArrayElement;
558
559 memcpy(
560 &dst_set->dyn_bufs[dst_dyn_buf_idx],
561 &src_set->dyn_bufs[src_dyn_buf_idx],
562 copy->descriptorCount * sizeof(dst_set->dyn_bufs[dst_dyn_buf_idx]));
563 break;
564 }
565
566 default:
567 unreachable("Unsupported descriptor type");
568 }
569
570 return VK_SUCCESS;
571 }
572
573 void
panvk_per_arch(UpdateDescriptorSets)574 panvk_per_arch(UpdateDescriptorSets)(
575 VkDevice _device, uint32_t descriptorWriteCount,
576 const VkWriteDescriptorSet *pDescriptorWrites, uint32_t descriptorCopyCount,
577 const VkCopyDescriptorSet *pDescriptorCopies)
578 {
579 for (uint32_t i = 0; i < descriptorWriteCount; i++) {
580 VK_FROM_HANDLE(panvk_descriptor_set, set, pDescriptorWrites[i].dstSet);
581
582 panvk_per_arch(descriptor_set_write)(set, &pDescriptorWrites[i], false);
583 }
584
585 for (uint32_t i = 0; i < descriptorCopyCount; i++)
586 panvk_descriptor_set_copy(&pDescriptorCopies[i]);
587 }
588
589 void
panvk_per_arch(descriptor_set_write_template)590 panvk_per_arch(descriptor_set_write_template)(
591 struct panvk_descriptor_set *set,
592 const struct vk_descriptor_update_template *template, const void *data,
593 bool write_immutable_samplers)
594 {
595 for (uint32_t i = 0; i < template->entry_count; i++) {
596 const struct vk_descriptor_template_entry *entry = &template->entries[i];
597
598 switch (entry->type) {
599 case VK_DESCRIPTOR_TYPE_SAMPLER:
600 for (uint32_t j = 0; j < entry->array_count; j++) {
601 const VkDescriptorImageInfo *info =
602 data + entry->offset + j * entry->stride;
603
604 write_sampler_desc(set, info, entry->binding,
605 entry->array_element + j,
606 write_immutable_samplers);
607 }
608 break;
609
610 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
611 for (uint32_t j = 0; j < entry->array_count; j++) {
612 const VkDescriptorImageInfo *info =
613 data + entry->offset + j * entry->stride;
614 write_sampler_desc(set, info, entry->binding,
615 entry->array_element + j,
616 write_immutable_samplers);
617 write_image_view_desc(set, info, entry->binding,
618 entry->array_element + j,
619 VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE);
620 }
621 break;
622
623 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
624 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
625 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
626 for (uint32_t j = 0; j < entry->array_count; j++) {
627 const VkDescriptorImageInfo *info =
628 data + entry->offset + j * entry->stride;
629
630 write_image_view_desc(set, info, entry->binding,
631 entry->array_element + j, entry->type);
632 }
633 break;
634
635 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
636 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
637 for (uint32_t j = 0; j < entry->array_count; j++) {
638 const VkBufferView *bview =
639 data + entry->offset + j * entry->stride;
640
641 write_buffer_view_desc(set, *bview, entry->binding,
642 entry->array_element + j, entry->type);
643 }
644 break;
645
646 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
647 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
648 for (uint32_t j = 0; j < entry->array_count; j++) {
649 const VkDescriptorBufferInfo *info =
650 data + entry->offset + j * entry->stride;
651
652 write_buffer_desc(set, info, entry->binding,
653 entry->array_element + j, entry->type);
654 }
655 break;
656
657 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
658 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
659 for (uint32_t j = 0; j < entry->array_count; j++) {
660 const VkDescriptorBufferInfo *info =
661 data + entry->offset + j * entry->stride;
662
663 write_dynamic_buffer_desc(set, info, entry->binding,
664 entry->array_element + j);
665 }
666 break;
667 default:
668 unreachable("Unsupported descriptor type");
669 }
670 }
671 }
672
673 void
panvk_per_arch(UpdateDescriptorSetWithTemplate)674 panvk_per_arch(UpdateDescriptorSetWithTemplate)(
675 VkDevice _device, VkDescriptorSet descriptorSet,
676 VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void *pData)
677 {
678 VK_FROM_HANDLE(panvk_descriptor_set, set, descriptorSet);
679 VK_FROM_HANDLE(vk_descriptor_update_template, template,
680 descriptorUpdateTemplate);
681
682 panvk_per_arch(descriptor_set_write_template)(set, template, pData, false);
683 }
684