1 /*
2 * Copyright © 2019 Raspberry Pi Ltd
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24 #include "vk_descriptors.h"
25 #include "vk_util.h"
26
27 #include "v3dv_private.h"
28
29 /*
30 * For a given descriptor defined by the descriptor_set it belongs, its
31 * binding layout, array_index, and plane, it returns the map region assigned
32 * to it from the descriptor pool bo.
33 */
34 static void *
descriptor_bo_map(struct v3dv_device * device,struct v3dv_descriptor_set * set,const struct v3dv_descriptor_set_binding_layout * binding_layout,uint32_t array_index)35 descriptor_bo_map(struct v3dv_device *device,
36 struct v3dv_descriptor_set *set,
37 const struct v3dv_descriptor_set_binding_layout *binding_layout,
38 uint32_t array_index)
39 {
40 /* Inline uniform blocks use BO memory to store UBO contents, not
41 * descriptor data, so their descriptor BO size is 0 even though they
42 * do use BO memory.
43 */
44 uint32_t bo_size = v3dv_X(device, descriptor_bo_size)(binding_layout->type);
45 assert(bo_size > 0 ||
46 binding_layout->type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK);
47
48 return set->pool->bo->map +
49 set->base_offset + binding_layout->descriptor_offset +
50 array_index * binding_layout->plane_stride * bo_size;
51 }
52
53 static bool
descriptor_type_is_dynamic(VkDescriptorType type)54 descriptor_type_is_dynamic(VkDescriptorType type)
55 {
56 switch (type) {
57 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
58 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
59 return true;
60 break;
61 default:
62 return false;
63 }
64 }
65
66 /*
67 * Tries to get a real descriptor using a descriptor map index from the
68 * descriptor_state + pipeline_layout.
69 */
70 struct v3dv_descriptor *
v3dv_descriptor_map_get_descriptor(struct v3dv_descriptor_state * descriptor_state,struct v3dv_descriptor_map * map,struct v3dv_pipeline_layout * pipeline_layout,uint32_t index,uint32_t * dynamic_offset)71 v3dv_descriptor_map_get_descriptor(struct v3dv_descriptor_state *descriptor_state,
72 struct v3dv_descriptor_map *map,
73 struct v3dv_pipeline_layout *pipeline_layout,
74 uint32_t index,
75 uint32_t *dynamic_offset)
76 {
77 assert(index < map->num_desc);
78
79 uint32_t set_number = map->set[index];
80 assert((descriptor_state->valid & 1 << set_number));
81
82 struct v3dv_descriptor_set *set =
83 descriptor_state->descriptor_sets[set_number];
84 assert(set);
85
86 uint32_t binding_number = map->binding[index];
87 assert(binding_number < set->layout->binding_count);
88
89 const struct v3dv_descriptor_set_binding_layout *binding_layout =
90 &set->layout->binding[binding_number];
91
92 uint32_t array_index = map->array_index[index];
93 assert(array_index < binding_layout->array_size);
94
95 if (descriptor_type_is_dynamic(binding_layout->type)) {
96 uint32_t dynamic_offset_index =
97 pipeline_layout->set[set_number].dynamic_offset_start +
98 binding_layout->dynamic_offset_index + array_index;
99
100 *dynamic_offset = descriptor_state->dynamic_offsets[dynamic_offset_index];
101 }
102
103 return &set->descriptors[binding_layout->descriptor_index + array_index];
104 }
105
106 /* Equivalent to map_get_descriptor but it returns a reloc with the bo
107 * associated with that descriptor (suballocation of the descriptor pool bo)
108 *
109 * It also returns the descriptor type, so the caller could do extra
110 * validation or adding extra offsets if the bo contains more that one field.
111 */
112 struct v3dv_cl_reloc
v3dv_descriptor_map_get_descriptor_bo(struct v3dv_device * device,struct v3dv_descriptor_state * descriptor_state,struct v3dv_descriptor_map * map,struct v3dv_pipeline_layout * pipeline_layout,uint32_t index,VkDescriptorType * out_type)113 v3dv_descriptor_map_get_descriptor_bo(struct v3dv_device *device,
114 struct v3dv_descriptor_state *descriptor_state,
115 struct v3dv_descriptor_map *map,
116 struct v3dv_pipeline_layout *pipeline_layout,
117 uint32_t index,
118 VkDescriptorType *out_type)
119 {
120 assert(index < map->num_desc);
121
122 uint32_t set_number = map->set[index];
123 assert(descriptor_state->valid & 1 << set_number);
124
125 struct v3dv_descriptor_set *set =
126 descriptor_state->descriptor_sets[set_number];
127 assert(set);
128
129 uint32_t binding_number = map->binding[index];
130 assert(binding_number < set->layout->binding_count);
131
132 const struct v3dv_descriptor_set_binding_layout *binding_layout =
133 &set->layout->binding[binding_number];
134
135
136 uint32_t bo_size = v3dv_X(device, descriptor_bo_size)(binding_layout->type);
137
138 assert(binding_layout->type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK ||
139 bo_size > 0);
140 if (out_type)
141 *out_type = binding_layout->type;
142
143 uint32_t array_index = map->array_index[index];
144 assert(array_index < binding_layout->array_size);
145
146 struct v3dv_cl_reloc reloc = {
147 .bo = set->pool->bo,
148 .offset = set->base_offset + binding_layout->descriptor_offset +
149 array_index * binding_layout->plane_stride * bo_size,
150 };
151
152 return reloc;
153 }
154
155 /*
156 * The difference between this method and v3dv_descriptor_map_get_descriptor,
157 * is that if the sampler are added as immutable when creating the set layout,
158 * they are bound to the set layout, so not part of the descriptor per
159 * se. This method return early in that case.
160 */
161 const struct v3dv_sampler *
v3dv_descriptor_map_get_sampler(struct v3dv_descriptor_state * descriptor_state,struct v3dv_descriptor_map * map,struct v3dv_pipeline_layout * pipeline_layout,uint32_t index)162 v3dv_descriptor_map_get_sampler(struct v3dv_descriptor_state *descriptor_state,
163 struct v3dv_descriptor_map *map,
164 struct v3dv_pipeline_layout *pipeline_layout,
165 uint32_t index)
166 {
167 assert(index < map->num_desc);
168
169 uint32_t set_number = map->set[index];
170 assert(descriptor_state->valid & 1 << set_number);
171
172 struct v3dv_descriptor_set *set =
173 descriptor_state->descriptor_sets[set_number];
174 assert(set);
175
176 uint32_t binding_number = map->binding[index];
177 assert(binding_number < set->layout->binding_count);
178
179 const struct v3dv_descriptor_set_binding_layout *binding_layout =
180 &set->layout->binding[binding_number];
181
182 uint32_t array_index = map->array_index[index];
183 assert(array_index < binding_layout->array_size);
184
185 if (binding_layout->immutable_samplers_offset != 0) {
186 assert(binding_layout->type == VK_DESCRIPTOR_TYPE_SAMPLER ||
187 binding_layout->type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
188
189 const struct v3dv_sampler *immutable_samplers =
190 v3dv_immutable_samplers(set->layout, binding_layout);
191
192 assert(immutable_samplers);
193 const struct v3dv_sampler *sampler = &immutable_samplers[array_index];
194 assert(sampler);
195
196 return sampler;
197 }
198
199 struct v3dv_descriptor *descriptor =
200 &set->descriptors[binding_layout->descriptor_index + array_index];
201
202 assert(descriptor->type == VK_DESCRIPTOR_TYPE_SAMPLER ||
203 descriptor->type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
204
205 assert(descriptor->sampler);
206
207 return descriptor->sampler;
208 }
209
210
211 struct v3dv_cl_reloc
v3dv_descriptor_map_get_sampler_state(struct v3dv_device * device,struct v3dv_descriptor_state * descriptor_state,struct v3dv_descriptor_map * map,struct v3dv_pipeline_layout * pipeline_layout,uint32_t index)212 v3dv_descriptor_map_get_sampler_state(struct v3dv_device *device,
213 struct v3dv_descriptor_state *descriptor_state,
214 struct v3dv_descriptor_map *map,
215 struct v3dv_pipeline_layout *pipeline_layout,
216 uint32_t index)
217 {
218 VkDescriptorType type;
219 struct v3dv_cl_reloc reloc =
220 v3dv_descriptor_map_get_descriptor_bo(device, descriptor_state, map,
221 pipeline_layout,
222 index, &type);
223
224 assert(type == VK_DESCRIPTOR_TYPE_SAMPLER ||
225 type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
226
227 if (type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
228 reloc.offset += v3dv_X(device, combined_image_sampler_sampler_state_offset)(map->plane[index]);
229
230 return reloc;
231 }
232
233 struct v3dv_bo*
v3dv_descriptor_map_get_texture_bo(struct v3dv_descriptor_state * descriptor_state,struct v3dv_descriptor_map * map,struct v3dv_pipeline_layout * pipeline_layout,uint32_t index)234 v3dv_descriptor_map_get_texture_bo(struct v3dv_descriptor_state *descriptor_state,
235 struct v3dv_descriptor_map *map,
236 struct v3dv_pipeline_layout *pipeline_layout,
237 uint32_t index)
238
239 {
240 struct v3dv_descriptor *descriptor =
241 v3dv_descriptor_map_get_descriptor(descriptor_state, map,
242 pipeline_layout, index, NULL);
243
244 switch (descriptor->type) {
245 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
246 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
247 assert(descriptor->buffer_view);
248 return descriptor->buffer_view->buffer->mem->bo;
249 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
250 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
251 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
252 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: {
253 assert(descriptor->image_view);
254 struct v3dv_image *image =
255 (struct v3dv_image *) descriptor->image_view->vk.image;
256 assert(map->plane[index] < image->plane_count);
257 return image->planes[map->plane[index]].mem->bo;
258 }
259 default:
260 unreachable("descriptor type doesn't has a texture bo");
261 }
262 }
263
264 struct v3dv_cl_reloc
v3dv_descriptor_map_get_texture_shader_state(struct v3dv_device * device,struct v3dv_descriptor_state * descriptor_state,struct v3dv_descriptor_map * map,struct v3dv_pipeline_layout * pipeline_layout,uint32_t index)265 v3dv_descriptor_map_get_texture_shader_state(struct v3dv_device *device,
266 struct v3dv_descriptor_state *descriptor_state,
267 struct v3dv_descriptor_map *map,
268 struct v3dv_pipeline_layout *pipeline_layout,
269 uint32_t index)
270 {
271 VkDescriptorType type;
272 struct v3dv_cl_reloc reloc =
273 v3dv_descriptor_map_get_descriptor_bo(device,
274 descriptor_state, map,
275 pipeline_layout,
276 index, &type);
277
278 assert(type == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE ||
279 type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ||
280 type == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT ||
281 type == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE ||
282 type == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER ||
283 type == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER);
284
285 if (type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
286 reloc.offset += v3dv_X(device, combined_image_sampler_texture_state_offset)(map->plane[index]);
287
288 return reloc;
289 }
290
291 #define SHA1_UPDATE_VALUE(ctx, x) _mesa_sha1_update(ctx, &(x), sizeof(x));
292
293 static void
sha1_update_ycbcr_conversion(struct mesa_sha1 * ctx,const struct vk_ycbcr_conversion_state * conversion)294 sha1_update_ycbcr_conversion(struct mesa_sha1 *ctx,
295 const struct vk_ycbcr_conversion_state *conversion)
296 {
297 SHA1_UPDATE_VALUE(ctx, conversion->format);
298 SHA1_UPDATE_VALUE(ctx, conversion->ycbcr_model);
299 SHA1_UPDATE_VALUE(ctx, conversion->ycbcr_range);
300 SHA1_UPDATE_VALUE(ctx, conversion->mapping);
301 SHA1_UPDATE_VALUE(ctx, conversion->chroma_offsets);
302 SHA1_UPDATE_VALUE(ctx, conversion->chroma_reconstruction);
303 }
304
305 static void
sha1_update_descriptor_set_binding_layout(struct mesa_sha1 * ctx,const struct v3dv_descriptor_set_binding_layout * layout,const struct v3dv_descriptor_set_layout * set_layout)306 sha1_update_descriptor_set_binding_layout(struct mesa_sha1 *ctx,
307 const struct v3dv_descriptor_set_binding_layout *layout,
308 const struct v3dv_descriptor_set_layout *set_layout)
309 {
310 SHA1_UPDATE_VALUE(ctx, layout->type);
311 SHA1_UPDATE_VALUE(ctx, layout->array_size);
312 SHA1_UPDATE_VALUE(ctx, layout->descriptor_index);
313 SHA1_UPDATE_VALUE(ctx, layout->dynamic_offset_count);
314 SHA1_UPDATE_VALUE(ctx, layout->dynamic_offset_index);
315 SHA1_UPDATE_VALUE(ctx, layout->descriptor_offset);
316 SHA1_UPDATE_VALUE(ctx, layout->immutable_samplers_offset);
317 SHA1_UPDATE_VALUE(ctx, layout->plane_stride);
318
319 if (layout->immutable_samplers_offset) {
320 const struct v3dv_sampler *immutable_samplers =
321 v3dv_immutable_samplers(set_layout, layout);
322
323 for (unsigned i = 0; i < layout->array_size; i++) {
324 const struct v3dv_sampler *sampler = &immutable_samplers[i];
325 if (sampler->conversion)
326 sha1_update_ycbcr_conversion(ctx, &sampler->conversion->state);
327 }
328 }
329 }
330
331 static void
sha1_update_descriptor_set_layout(struct mesa_sha1 * ctx,const struct v3dv_descriptor_set_layout * layout)332 sha1_update_descriptor_set_layout(struct mesa_sha1 *ctx,
333 const struct v3dv_descriptor_set_layout *layout)
334 {
335 SHA1_UPDATE_VALUE(ctx, layout->flags);
336 SHA1_UPDATE_VALUE(ctx, layout->binding_count);
337 SHA1_UPDATE_VALUE(ctx, layout->shader_stages);
338 SHA1_UPDATE_VALUE(ctx, layout->descriptor_count);
339 SHA1_UPDATE_VALUE(ctx, layout->dynamic_offset_count);
340
341 for (uint16_t i = 0; i < layout->binding_count; i++)
342 sha1_update_descriptor_set_binding_layout(ctx, &layout->binding[i], layout);
343 }
344
345
346 /*
347 * As anv and tu already points:
348 *
349 * "Pipeline layouts. These have nothing to do with the pipeline. They are
350 * just multiple descriptor set layouts pasted together."
351 */
352
353 VKAPI_ATTR VkResult VKAPI_CALL
v3dv_CreatePipelineLayout(VkDevice _device,const VkPipelineLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPipelineLayout * pPipelineLayout)354 v3dv_CreatePipelineLayout(VkDevice _device,
355 const VkPipelineLayoutCreateInfo *pCreateInfo,
356 const VkAllocationCallbacks *pAllocator,
357 VkPipelineLayout *pPipelineLayout)
358 {
359 V3DV_FROM_HANDLE(v3dv_device, device, _device);
360 struct v3dv_pipeline_layout *layout;
361
362 assert(pCreateInfo->sType ==
363 VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO);
364
365 layout = vk_object_zalloc(&device->vk, pAllocator, sizeof(*layout),
366 VK_OBJECT_TYPE_PIPELINE_LAYOUT);
367 if (layout == NULL)
368 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
369
370 layout->num_sets = pCreateInfo->setLayoutCount;
371 layout->ref_cnt = 1;
372
373 uint32_t dynamic_offset_count = 0;
374 for (uint32_t set = 0; set < pCreateInfo->setLayoutCount; set++) {
375 V3DV_FROM_HANDLE(v3dv_descriptor_set_layout, set_layout,
376 pCreateInfo->pSetLayouts[set]);
377 v3dv_descriptor_set_layout_ref(set_layout);
378 layout->set[set].layout = set_layout;
379 layout->set[set].dynamic_offset_start = dynamic_offset_count;
380 for (uint32_t b = 0; b < set_layout->binding_count; b++) {
381 dynamic_offset_count += set_layout->binding[b].array_size *
382 set_layout->binding[b].dynamic_offset_count;
383 }
384
385 layout->shader_stages |= set_layout->shader_stages;
386 }
387
388 layout->push_constant_size = 0;
389 for (unsigned i = 0; i < pCreateInfo->pushConstantRangeCount; ++i) {
390 const VkPushConstantRange *range = pCreateInfo->pPushConstantRanges + i;
391 layout->push_constant_size =
392 MAX2(layout->push_constant_size, range->offset + range->size);
393 }
394
395 layout->push_constant_size = align(layout->push_constant_size, 4096);
396
397 layout->dynamic_offset_count = dynamic_offset_count;
398
399 struct mesa_sha1 ctx;
400 _mesa_sha1_init(&ctx);
401 for (unsigned s = 0; s < layout->num_sets; s++) {
402 sha1_update_descriptor_set_layout(&ctx, layout->set[s].layout);
403 _mesa_sha1_update(&ctx, &layout->set[s].dynamic_offset_start,
404 sizeof(layout->set[s].dynamic_offset_start));
405 }
406 _mesa_sha1_update(&ctx, &layout->num_sets, sizeof(layout->num_sets));
407 _mesa_sha1_final(&ctx, layout->sha1);
408
409 *pPipelineLayout = v3dv_pipeline_layout_to_handle(layout);
410
411 return VK_SUCCESS;
412 }
413
414 void
v3dv_pipeline_layout_destroy(struct v3dv_device * device,struct v3dv_pipeline_layout * layout,const VkAllocationCallbacks * alloc)415 v3dv_pipeline_layout_destroy(struct v3dv_device *device,
416 struct v3dv_pipeline_layout *layout,
417 const VkAllocationCallbacks *alloc)
418 {
419 assert(layout);
420
421 for (uint32_t i = 0; i < layout->num_sets; i++)
422 v3dv_descriptor_set_layout_unref(device, layout->set[i].layout);
423
424 vk_object_free(&device->vk, alloc, layout);
425 }
426
427 VKAPI_ATTR void VKAPI_CALL
v3dv_DestroyPipelineLayout(VkDevice _device,VkPipelineLayout _pipelineLayout,const VkAllocationCallbacks * pAllocator)428 v3dv_DestroyPipelineLayout(VkDevice _device,
429 VkPipelineLayout _pipelineLayout,
430 const VkAllocationCallbacks *pAllocator)
431 {
432 V3DV_FROM_HANDLE(v3dv_device, device, _device);
433 V3DV_FROM_HANDLE(v3dv_pipeline_layout, pipeline_layout, _pipelineLayout);
434
435 if (!pipeline_layout)
436 return;
437
438 v3dv_pipeline_layout_unref(device, pipeline_layout, pAllocator);
439 }
440
441 VKAPI_ATTR VkResult VKAPI_CALL
v3dv_CreateDescriptorPool(VkDevice _device,const VkDescriptorPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorPool * pDescriptorPool)442 v3dv_CreateDescriptorPool(VkDevice _device,
443 const VkDescriptorPoolCreateInfo *pCreateInfo,
444 const VkAllocationCallbacks *pAllocator,
445 VkDescriptorPool *pDescriptorPool)
446 {
447 V3DV_FROM_HANDLE(v3dv_device, device, _device);
448 struct v3dv_descriptor_pool *pool;
449 /* size is for the vulkan object descriptor pool. The final size would
450 * depend on some of FREE_DESCRIPTOR flags used
451 */
452 uint64_t size = sizeof(struct v3dv_descriptor_pool);
453 /* bo_size is for the descriptor related info that we need to have on a GPU
454 * address (so on v3dv_bo_alloc allocated memory), like for example the
455 * texture sampler state. Note that not all the descriptors use it
456 */
457 uint32_t bo_size = 0;
458 uint32_t descriptor_count = 0;
459
460 const VkDescriptorPoolInlineUniformBlockCreateInfo *inline_info =
461 vk_find_struct_const(pCreateInfo->pNext,
462 DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO);
463
464 for (unsigned i = 0; i < pCreateInfo->poolSizeCount; ++i) {
465 /* Verify supported descriptor type */
466 switch(pCreateInfo->pPoolSizes[i].type) {
467 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
468 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
469 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
470 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
471 case VK_DESCRIPTOR_TYPE_SAMPLER:
472 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
473 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
474 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
475 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
476 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
477 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
478 case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
479 break;
480 default:
481 unreachable("Unimplemented descriptor type");
482 break;
483 }
484
485 assert(pCreateInfo->pPoolSizes[i].descriptorCount > 0);
486 if (pCreateInfo->pPoolSizes[i].type ==
487 VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
488 /* Inline uniform blocks are specified to use the descriptor array
489 * size as the size in bytes of the block.
490 */
491 assert(inline_info);
492 descriptor_count += inline_info->maxInlineUniformBlockBindings;
493 bo_size += pCreateInfo->pPoolSizes[i].descriptorCount;
494 } else {
495 descriptor_count += pCreateInfo->pPoolSizes[i].descriptorCount;
496 bo_size += v3dv_X(device, descriptor_bo_size)(pCreateInfo->pPoolSizes[i].type) *
497 pCreateInfo->pPoolSizes[i].descriptorCount;
498 }
499 }
500
501 /* We align all our buffers to V3D_NON_COHERENT_ATOM_SIZE, make sure we
502 * allocate enough memory to honor that requirement for all our inline
503 * buffers too.
504 */
505 if (inline_info) {
506 bo_size += V3D_NON_COHERENT_ATOM_SIZE *
507 inline_info->maxInlineUniformBlockBindings;
508 }
509
510 if (!(pCreateInfo->flags & VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT)) {
511 uint64_t host_size =
512 pCreateInfo->maxSets * sizeof(struct v3dv_descriptor_set);
513 host_size += sizeof(struct v3dv_descriptor) * descriptor_count;
514 size += host_size;
515 } else {
516 size += sizeof(struct v3dv_descriptor_pool_entry) * pCreateInfo->maxSets;
517 }
518
519 pool = vk_object_zalloc(&device->vk, pAllocator, size,
520 VK_OBJECT_TYPE_DESCRIPTOR_POOL);
521
522 if (!pool)
523 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
524
525 if (!(pCreateInfo->flags & VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT)) {
526 pool->host_memory_base = (uint8_t*)pool + sizeof(struct v3dv_descriptor_pool);
527 pool->host_memory_ptr = pool->host_memory_base;
528 pool->host_memory_end = (uint8_t*)pool + size;
529 }
530
531 pool->max_entry_count = pCreateInfo->maxSets;
532
533 if (bo_size > 0) {
534 pool->bo = v3dv_bo_alloc(device, bo_size, "descriptor pool bo", true);
535 if (!pool->bo)
536 goto out_of_device_memory;
537
538 bool ok = v3dv_bo_map(device, pool->bo, pool->bo->size);
539 if (!ok)
540 goto out_of_device_memory;
541
542 pool->current_offset = 0;
543 } else {
544 pool->bo = NULL;
545 }
546
547 list_inithead(&pool->set_list);
548
549 *pDescriptorPool = v3dv_descriptor_pool_to_handle(pool);
550
551 return VK_SUCCESS;
552
553 out_of_device_memory:
554 vk_object_free(&device->vk, pAllocator, pool);
555 return vk_error(device, VK_ERROR_OUT_OF_DEVICE_MEMORY);
556 }
557
558 static void
descriptor_set_destroy(struct v3dv_device * device,struct v3dv_descriptor_pool * pool,struct v3dv_descriptor_set * set,bool free_bo)559 descriptor_set_destroy(struct v3dv_device *device,
560 struct v3dv_descriptor_pool *pool,
561 struct v3dv_descriptor_set *set,
562 bool free_bo)
563 {
564 assert(!pool->host_memory_base);
565
566 if (free_bo && !pool->host_memory_base) {
567 for (uint32_t i = 0; i < pool->entry_count; i++) {
568 if (pool->entries[i].set == set) {
569 memmove(&pool->entries[i], &pool->entries[i+1],
570 sizeof(pool->entries[i]) * (pool->entry_count - i - 1));
571 --pool->entry_count;
572 break;
573 }
574 }
575 }
576 vk_object_free(&device->vk, NULL, set);
577 }
578
579 VKAPI_ATTR void VKAPI_CALL
v3dv_DestroyDescriptorPool(VkDevice _device,VkDescriptorPool _pool,const VkAllocationCallbacks * pAllocator)580 v3dv_DestroyDescriptorPool(VkDevice _device,
581 VkDescriptorPool _pool,
582 const VkAllocationCallbacks *pAllocator)
583 {
584 V3DV_FROM_HANDLE(v3dv_device, device, _device);
585 V3DV_FROM_HANDLE(v3dv_descriptor_pool, pool, _pool);
586
587 if (!pool)
588 return;
589
590 list_for_each_entry_safe(struct v3dv_descriptor_set, set,
591 &pool->set_list, pool_link) {
592 v3dv_descriptor_set_layout_unref(device, set->layout);
593 }
594
595 if (!pool->host_memory_base) {
596 for(int i = 0; i < pool->entry_count; ++i) {
597 descriptor_set_destroy(device, pool, pool->entries[i].set, false);
598 }
599 }
600
601 if (pool->bo) {
602 v3dv_bo_free(device, pool->bo);
603 pool->bo = NULL;
604 }
605
606 vk_object_free(&device->vk, pAllocator, pool);
607 }
608
609 VKAPI_ATTR VkResult VKAPI_CALL
v3dv_ResetDescriptorPool(VkDevice _device,VkDescriptorPool descriptorPool,VkDescriptorPoolResetFlags flags)610 v3dv_ResetDescriptorPool(VkDevice _device,
611 VkDescriptorPool descriptorPool,
612 VkDescriptorPoolResetFlags flags)
613 {
614 V3DV_FROM_HANDLE(v3dv_device, device, _device);
615 V3DV_FROM_HANDLE(v3dv_descriptor_pool, pool, descriptorPool);
616
617 list_for_each_entry_safe(struct v3dv_descriptor_set, set,
618 &pool->set_list, pool_link) {
619 v3dv_descriptor_set_layout_unref(device, set->layout);
620 }
621 list_inithead(&pool->set_list);
622
623 if (!pool->host_memory_base) {
624 for(int i = 0; i < pool->entry_count; ++i) {
625 descriptor_set_destroy(device, pool, pool->entries[i].set, false);
626 }
627 } else {
628 /* We clean-up the host memory, so when allocating a new set from the
629 * pool, it is already 0
630 */
631 uint32_t host_size = pool->host_memory_end - pool->host_memory_base;
632 memset(pool->host_memory_base, 0, host_size);
633 }
634
635 pool->entry_count = 0;
636 pool->host_memory_ptr = pool->host_memory_base;
637 pool->current_offset = 0;
638
639 return VK_SUCCESS;
640 }
641
642 void
v3dv_descriptor_set_layout_destroy(struct v3dv_device * device,struct v3dv_descriptor_set_layout * set_layout)643 v3dv_descriptor_set_layout_destroy(struct v3dv_device *device,
644 struct v3dv_descriptor_set_layout *set_layout)
645 {
646 assert(set_layout->ref_cnt == 0);
647 vk_object_base_finish(&set_layout->base);
648 vk_free2(&device->vk.alloc, NULL, set_layout);
649 }
650
651 VKAPI_ATTR VkResult VKAPI_CALL
v3dv_CreateDescriptorSetLayout(VkDevice _device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorSetLayout * pSetLayout)652 v3dv_CreateDescriptorSetLayout(VkDevice _device,
653 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
654 const VkAllocationCallbacks *pAllocator,
655 VkDescriptorSetLayout *pSetLayout)
656 {
657 V3DV_FROM_HANDLE(v3dv_device, device, _device);
658 struct v3dv_descriptor_set_layout *set_layout;
659
660 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO);
661
662 uint32_t num_bindings = 0;
663 uint32_t immutable_sampler_count = 0;
664
665 /* for immutable descriptors, the plane stride is the largest plane
666 * count of all combined image samplers. For mutable descriptors
667 * this is always 1 since multiplanar images are restricted to
668 * immutable combined image samplers.
669 */
670 uint8_t plane_stride = 1;
671 for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
672 num_bindings = MAX2(num_bindings, pCreateInfo->pBindings[j].binding + 1);
673
674 /* From the Vulkan 1.1.97 spec for VkDescriptorSetLayoutBinding:
675 *
676 * "If descriptorType specifies a VK_DESCRIPTOR_TYPE_SAMPLER or
677 * VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER type descriptor, then
678 * pImmutableSamplers can be used to initialize a set of immutable
679 * samplers. [...] If descriptorType is not one of these descriptor
680 * types, then pImmutableSamplers is ignored.
681 *
682 * We need to be careful here and only parse pImmutableSamplers if we
683 * have one of the right descriptor types.
684 */
685 VkDescriptorType desc_type = pCreateInfo->pBindings[j].descriptorType;
686 if ((desc_type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ||
687 desc_type == VK_DESCRIPTOR_TYPE_SAMPLER) &&
688 pCreateInfo->pBindings[j].pImmutableSamplers) {
689 uint32_t descriptor_count = pCreateInfo->pBindings[j].descriptorCount;
690 immutable_sampler_count += descriptor_count;
691
692 for (uint32_t i = 0; i < descriptor_count; i++) {
693 const VkSampler vk_sampler =
694 pCreateInfo->pBindings[j].pImmutableSamplers[i];
695 VK_FROM_HANDLE(v3dv_sampler, sampler, vk_sampler);
696 plane_stride = MAX2(plane_stride, sampler->plane_count);
697 }
698 }
699 }
700
701 /* We place immutable samplers after the binding data. We want to use
702 * offsetof instead of any sizeof(struct v3dv_descriptor_set_layout)
703 * because the latter may include padding at the end of the struct.
704 */
705 uint32_t samplers_offset =
706 offsetof(struct v3dv_descriptor_set_layout, binding[num_bindings]);
707
708 uint32_t size = samplers_offset +
709 immutable_sampler_count * sizeof(struct v3dv_sampler);
710
711 /* Descriptor set layouts are reference counted and therefore can survive
712 * vkDestroyPipelineSetLayout, so they need to be allocated with a device
713 * scope.
714 */
715 set_layout =
716 vk_zalloc(&device->vk.alloc, size, 8, VK_SYSTEM_ALLOCATION_SCOPE_DEVICE);
717 if (!set_layout)
718 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
719
720 vk_object_base_init(&device->vk, &set_layout->base,
721 VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT);
722
723 struct v3dv_sampler *samplers = (void*) &set_layout->binding[num_bindings];
724
725 assert(pCreateInfo->bindingCount == 0 || num_bindings > 0);
726
727 VkDescriptorSetLayoutBinding *bindings = NULL;
728 VkResult result = vk_create_sorted_bindings(pCreateInfo->pBindings,
729 pCreateInfo->bindingCount, &bindings);
730 if (result != VK_SUCCESS) {
731 v3dv_descriptor_set_layout_destroy(device, set_layout);
732 return vk_error(device, result);
733 }
734
735 set_layout->binding_count = num_bindings;
736 set_layout->flags = pCreateInfo->flags;
737 set_layout->shader_stages = 0;
738 set_layout->bo_size = 0;
739 set_layout->ref_cnt = 1;
740
741 uint32_t descriptor_count = 0;
742 uint32_t dynamic_offset_count = 0;
743
744 for (uint32_t i = 0; i < pCreateInfo->bindingCount; i++) {
745 const VkDescriptorSetLayoutBinding *binding = bindings + i;
746 uint32_t binding_number = binding->binding;
747
748 switch (binding->descriptorType) {
749 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
750 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
751 break;
752 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
753 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
754 set_layout->binding[binding_number].dynamic_offset_count = 1;
755 break;
756 case VK_DESCRIPTOR_TYPE_SAMPLER:
757 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
758 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
759 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
760 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
761 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
762 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
763 case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
764 /* Nothing here, just to keep the descriptor type filtering below */
765 break;
766 default:
767 unreachable("Unknown descriptor type\n");
768 break;
769 }
770
771 set_layout->binding[binding_number].type = binding->descriptorType;
772 set_layout->binding[binding_number].array_size = binding->descriptorCount;
773 set_layout->binding[binding_number].descriptor_index = descriptor_count;
774 set_layout->binding[binding_number].dynamic_offset_index = dynamic_offset_count;
775 set_layout->binding[binding_number].plane_stride = plane_stride;
776
777 if ((binding->descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ||
778 binding->descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER) &&
779 binding->pImmutableSamplers) {
780
781 set_layout->binding[binding_number].immutable_samplers_offset = samplers_offset;
782
783 for (uint32_t i = 0; i < binding->descriptorCount; i++)
784 samplers[i] = *v3dv_sampler_from_handle(binding->pImmutableSamplers[i]);
785
786 samplers += binding->descriptorCount;
787 samplers_offset += sizeof(struct v3dv_sampler) * binding->descriptorCount;
788
789 set_layout->binding[binding_number].plane_stride = plane_stride;
790 }
791
792 set_layout->shader_stages |= binding->stageFlags;
793
794 if (binding->descriptorType != VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
795 dynamic_offset_count += binding->descriptorCount *
796 set_layout->binding[binding_number].dynamic_offset_count;
797
798 descriptor_count += binding->descriptorCount;
799
800 set_layout->binding[binding_number].descriptor_offset =
801 set_layout->bo_size;
802 set_layout->bo_size +=
803 v3dv_X(device, descriptor_bo_size)(set_layout->binding[binding_number].type) *
804 binding->descriptorCount * set_layout->binding[binding_number].plane_stride;
805 } else {
806 /* We align all our buffers, inline buffers too. We made sure to take
807 * this account when calculating total BO size requirements at pool
808 * creation time.
809 */
810 set_layout->bo_size = align(set_layout->bo_size,
811 V3D_NON_COHERENT_ATOM_SIZE);
812
813 set_layout->binding[binding_number].descriptor_offset =
814 set_layout->bo_size;
815
816 /* Inline uniform blocks are not arrayed, instead descriptorCount
817 * specifies the size of the buffer in bytes.
818 */
819 set_layout->bo_size += binding->descriptorCount;
820 descriptor_count++;
821 }
822 }
823
824 free(bindings);
825
826 set_layout->descriptor_count = descriptor_count;
827 set_layout->dynamic_offset_count = dynamic_offset_count;
828
829 *pSetLayout = v3dv_descriptor_set_layout_to_handle(set_layout);
830
831 return VK_SUCCESS;
832 }
833
834 VKAPI_ATTR void VKAPI_CALL
v3dv_DestroyDescriptorSetLayout(VkDevice _device,VkDescriptorSetLayout _set_layout,const VkAllocationCallbacks * pAllocator)835 v3dv_DestroyDescriptorSetLayout(VkDevice _device,
836 VkDescriptorSetLayout _set_layout,
837 const VkAllocationCallbacks *pAllocator)
838 {
839 V3DV_FROM_HANDLE(v3dv_device, device, _device);
840 V3DV_FROM_HANDLE(v3dv_descriptor_set_layout, set_layout, _set_layout);
841
842 if (!set_layout)
843 return;
844
845 v3dv_descriptor_set_layout_unref(device, set_layout);
846 }
847
848 static inline VkResult
out_of_pool_memory(const struct v3dv_device * device,const struct v3dv_descriptor_pool * pool)849 out_of_pool_memory(const struct v3dv_device *device,
850 const struct v3dv_descriptor_pool *pool)
851 {
852 /* Don't log OOPM errors for internal driver pools, we handle these properly
853 * by allocating a new pool, so they don't point to real issues.
854 */
855 if (!pool->is_driver_internal)
856 return vk_error(device, VK_ERROR_OUT_OF_POOL_MEMORY);
857 else
858 return VK_ERROR_OUT_OF_POOL_MEMORY;
859 }
860
861 static VkResult
descriptor_set_create(struct v3dv_device * device,struct v3dv_descriptor_pool * pool,struct v3dv_descriptor_set_layout * layout,struct v3dv_descriptor_set ** out_set)862 descriptor_set_create(struct v3dv_device *device,
863 struct v3dv_descriptor_pool *pool,
864 struct v3dv_descriptor_set_layout *layout,
865 struct v3dv_descriptor_set **out_set)
866 {
867 struct v3dv_descriptor_set *set;
868 uint32_t descriptor_count = layout->descriptor_count;
869 unsigned mem_size = sizeof(struct v3dv_descriptor_set) +
870 sizeof(struct v3dv_descriptor) * descriptor_count;
871
872 if (pool->host_memory_base) {
873 if (pool->host_memory_end - pool->host_memory_ptr < mem_size)
874 return out_of_pool_memory(device, pool);
875
876 set = (struct v3dv_descriptor_set*)pool->host_memory_ptr;
877 pool->host_memory_ptr += mem_size;
878
879 vk_object_base_init(&device->vk, &set->base, VK_OBJECT_TYPE_DESCRIPTOR_SET);
880 } else {
881 set = vk_object_zalloc(&device->vk, NULL, mem_size,
882 VK_OBJECT_TYPE_DESCRIPTOR_SET);
883
884 if (!set)
885 return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
886 }
887
888 set->pool = pool;
889
890 set->layout = layout;
891
892 /* FIXME: VK_EXT_descriptor_indexing introduces
893 * VARIABLE_DESCRIPTOR_LAYOUT_COUNT. That would affect the layout_size used
894 * below for bo allocation
895 */
896
897 uint32_t offset = 0;
898 uint32_t index = pool->entry_count;
899
900 if (layout->bo_size) {
901 if (!pool->host_memory_base && pool->entry_count == pool->max_entry_count) {
902 vk_object_free(&device->vk, NULL, set);
903 return out_of_pool_memory(device, pool);
904 }
905
906 /* We first try to allocate linearly fist, so that we don't spend time
907 * looking for gaps if the app only allocates & resets via the pool.
908 *
909 * If that fails, we try to find a gap from previously freed subregions
910 * iterating through the descriptor pool entries. Note that we are not
911 * doing that if we have a pool->host_memory_base. We only have that if
912 * VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT is not set, so in
913 * that case the user can't free subregions, so it doesn't make sense to
914 * even try (or track those subregions).
915 */
916 if (pool->current_offset + layout->bo_size <= pool->bo->size) {
917 offset = pool->current_offset;
918 pool->current_offset += layout->bo_size;
919 } else if (!pool->host_memory_base) {
920 for (index = 0; index < pool->entry_count; index++) {
921 if (pool->entries[index].offset - offset >= layout->bo_size)
922 break;
923 offset = pool->entries[index].offset + pool->entries[index].size;
924 }
925 if (pool->bo->size - offset < layout->bo_size) {
926 vk_object_free(&device->vk, NULL, set);
927 return out_of_pool_memory(device, pool);
928 }
929 memmove(&pool->entries[index + 1], &pool->entries[index],
930 sizeof(pool->entries[0]) * (pool->entry_count - index));
931 } else {
932 assert(pool->host_memory_base);
933 return out_of_pool_memory(device, pool);
934 }
935
936 set->base_offset = offset;
937 }
938
939 if (!pool->host_memory_base) {
940 pool->entries[index].set = set;
941 pool->entries[index].offset = offset;
942 pool->entries[index].size = layout->bo_size;
943 pool->entry_count++;
944 }
945
946 /* Go through and fill out immutable samplers if we have any */
947 for (uint32_t b = 0; b < layout->binding_count; b++) {
948 if (layout->binding[b].immutable_samplers_offset == 0)
949 continue;
950
951 const struct v3dv_sampler *samplers =
952 (const struct v3dv_sampler *)((const char *)layout +
953 layout->binding[b].immutable_samplers_offset);
954
955 for (uint32_t i = 0; i < layout->binding[b].array_size; i++) {
956 assert(samplers[i].plane_count <= V3DV_MAX_PLANE_COUNT);
957 for (uint8_t plane = 0; plane < samplers[i].plane_count; plane++) {
958 uint32_t combined_offset =
959 layout->binding[b].type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ?
960 v3dv_X(device, combined_image_sampler_sampler_state_offset)(plane) : 0;
961 void *desc_map =
962 descriptor_bo_map(device, set, &layout->binding[b], i);
963 desc_map += combined_offset;
964
965 memcpy(desc_map, samplers[i].sampler_state,
966 sizeof(samplers[i].sampler_state));
967 }
968 }
969 }
970
971 v3dv_descriptor_set_layout_ref(layout);
972 list_addtail(&set->pool_link, &pool->set_list);
973
974 *out_set = set;
975
976 return VK_SUCCESS;
977 }
978
979 VKAPI_ATTR VkResult VKAPI_CALL
v3dv_AllocateDescriptorSets(VkDevice _device,const VkDescriptorSetAllocateInfo * pAllocateInfo,VkDescriptorSet * pDescriptorSets)980 v3dv_AllocateDescriptorSets(VkDevice _device,
981 const VkDescriptorSetAllocateInfo *pAllocateInfo,
982 VkDescriptorSet *pDescriptorSets)
983 {
984 V3DV_FROM_HANDLE(v3dv_device, device, _device);
985 V3DV_FROM_HANDLE(v3dv_descriptor_pool, pool, pAllocateInfo->descriptorPool);
986
987 VkResult result = VK_SUCCESS;
988 struct v3dv_descriptor_set *set = NULL;
989 uint32_t i = 0;
990
991 for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
992 V3DV_FROM_HANDLE(v3dv_descriptor_set_layout, layout,
993 pAllocateInfo->pSetLayouts[i]);
994
995 result = descriptor_set_create(device, pool, layout, &set);
996 if (result != VK_SUCCESS)
997 break;
998
999 pDescriptorSets[i] = v3dv_descriptor_set_to_handle(set);
1000 }
1001
1002 if (result != VK_SUCCESS) {
1003 v3dv_FreeDescriptorSets(_device, pAllocateInfo->descriptorPool,
1004 i, pDescriptorSets);
1005 for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
1006 pDescriptorSets[i] = VK_NULL_HANDLE;
1007 }
1008 }
1009
1010 return result;
1011 }
1012
1013 VKAPI_ATTR VkResult VKAPI_CALL
v3dv_FreeDescriptorSets(VkDevice _device,VkDescriptorPool descriptorPool,uint32_t count,const VkDescriptorSet * pDescriptorSets)1014 v3dv_FreeDescriptorSets(VkDevice _device,
1015 VkDescriptorPool descriptorPool,
1016 uint32_t count,
1017 const VkDescriptorSet *pDescriptorSets)
1018 {
1019 V3DV_FROM_HANDLE(v3dv_device, device, _device);
1020 V3DV_FROM_HANDLE(v3dv_descriptor_pool, pool, descriptorPool);
1021
1022 for (uint32_t i = 0; i < count; i++) {
1023 V3DV_FROM_HANDLE(v3dv_descriptor_set, set, pDescriptorSets[i]);
1024
1025 if (set) {
1026 v3dv_descriptor_set_layout_unref(device, set->layout);
1027 list_del(&set->pool_link);
1028 if (!pool->host_memory_base)
1029 descriptor_set_destroy(device, pool, set, true);
1030 }
1031 }
1032
1033 return VK_SUCCESS;
1034 }
1035
1036 static void
descriptor_bo_copy(struct v3dv_device * device,struct v3dv_descriptor_set * dst_set,const struct v3dv_descriptor_set_binding_layout * dst_binding_layout,uint32_t dst_array_index,struct v3dv_descriptor_set * src_set,const struct v3dv_descriptor_set_binding_layout * src_binding_layout,uint32_t src_array_index)1037 descriptor_bo_copy(struct v3dv_device *device,
1038 struct v3dv_descriptor_set *dst_set,
1039 const struct v3dv_descriptor_set_binding_layout *dst_binding_layout,
1040 uint32_t dst_array_index,
1041 struct v3dv_descriptor_set *src_set,
1042 const struct v3dv_descriptor_set_binding_layout *src_binding_layout,
1043 uint32_t src_array_index)
1044 {
1045 assert(dst_binding_layout->type == src_binding_layout->type);
1046 assert(src_binding_layout->plane_stride == dst_binding_layout->plane_stride);
1047
1048 void *dst_map = descriptor_bo_map(device, dst_set, dst_binding_layout,
1049 dst_array_index);
1050 void *src_map = descriptor_bo_map(device, src_set, src_binding_layout,
1051 src_array_index);
1052
1053 memcpy(dst_map, src_map,
1054 v3dv_X(device, descriptor_bo_size)(src_binding_layout->type) *
1055 src_binding_layout->plane_stride);
1056 }
1057
1058 static void
write_buffer_descriptor(struct v3dv_descriptor * descriptor,VkDescriptorType desc_type,const VkDescriptorBufferInfo * buffer_info)1059 write_buffer_descriptor(struct v3dv_descriptor *descriptor,
1060 VkDescriptorType desc_type,
1061 const VkDescriptorBufferInfo *buffer_info)
1062 {
1063 V3DV_FROM_HANDLE(v3dv_buffer, buffer, buffer_info->buffer);
1064
1065 descriptor->type = desc_type;
1066 descriptor->buffer = buffer;
1067 descriptor->offset = buffer_info->offset;
1068 if (buffer_info->range == VK_WHOLE_SIZE) {
1069 descriptor->range = buffer->size - buffer_info->offset;
1070 } else {
1071 assert(descriptor->range <= UINT32_MAX);
1072 descriptor->range = buffer_info->range;
1073 }
1074 }
1075
1076 static void
write_image_descriptor(struct v3dv_device * device,struct v3dv_descriptor * descriptor,VkDescriptorType desc_type,struct v3dv_descriptor_set * set,const struct v3dv_descriptor_set_binding_layout * binding_layout,struct v3dv_image_view * iview,struct v3dv_sampler * sampler,uint32_t array_index)1077 write_image_descriptor(struct v3dv_device *device,
1078 struct v3dv_descriptor *descriptor,
1079 VkDescriptorType desc_type,
1080 struct v3dv_descriptor_set *set,
1081 const struct v3dv_descriptor_set_binding_layout *binding_layout,
1082 struct v3dv_image_view *iview,
1083 struct v3dv_sampler *sampler,
1084 uint32_t array_index)
1085 {
1086 descriptor->type = desc_type;
1087 descriptor->sampler = sampler;
1088 descriptor->image_view = iview;
1089
1090 assert(iview || sampler);
1091 uint8_t plane_count = iview ? iview->plane_count : sampler->plane_count;
1092
1093 void *desc_map = descriptor_bo_map(device, set,
1094 binding_layout, array_index);
1095
1096 for (uint8_t plane = 0; plane < plane_count; plane++) {
1097 if (iview) {
1098 uint32_t offset = desc_type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ?
1099 v3dv_X(device, combined_image_sampler_texture_state_offset)(plane) : 0;
1100
1101 void *plane_desc_map = desc_map + offset;
1102
1103 const uint32_t tex_state_index =
1104 iview->vk.view_type != VK_IMAGE_VIEW_TYPE_CUBE_ARRAY ||
1105 desc_type != VK_DESCRIPTOR_TYPE_STORAGE_IMAGE ? 0 : 1;
1106 memcpy(plane_desc_map,
1107 iview->planes[plane].texture_shader_state[tex_state_index],
1108 sizeof(iview->planes[plane].texture_shader_state[0]));
1109 }
1110
1111 if (sampler && !binding_layout->immutable_samplers_offset) {
1112 uint32_t offset = desc_type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ?
1113 v3dv_X(device, combined_image_sampler_sampler_state_offset)(plane) : 0;
1114
1115 void *plane_desc_map = desc_map + offset;
1116 /* For immutable samplers this was already done as part of the
1117 * descriptor set create, as that info can't change later
1118 */
1119 memcpy(plane_desc_map,
1120 sampler->sampler_state,
1121 sizeof(sampler->sampler_state));
1122 }
1123 }
1124 }
1125
1126
1127 static void
write_buffer_view_descriptor(struct v3dv_device * device,struct v3dv_descriptor * descriptor,VkDescriptorType desc_type,struct v3dv_descriptor_set * set,const struct v3dv_descriptor_set_binding_layout * binding_layout,struct v3dv_buffer_view * bview,uint32_t array_index)1128 write_buffer_view_descriptor(struct v3dv_device *device,
1129 struct v3dv_descriptor *descriptor,
1130 VkDescriptorType desc_type,
1131 struct v3dv_descriptor_set *set,
1132 const struct v3dv_descriptor_set_binding_layout *binding_layout,
1133 struct v3dv_buffer_view *bview,
1134 uint32_t array_index)
1135 {
1136 assert(bview);
1137 descriptor->type = desc_type;
1138 descriptor->buffer_view = bview;
1139
1140 void *desc_map = descriptor_bo_map(device, set, binding_layout, array_index);
1141
1142 memcpy(desc_map,
1143 bview->texture_shader_state,
1144 sizeof(bview->texture_shader_state));
1145 }
1146
1147 static void
write_inline_uniform_descriptor(struct v3dv_device * device,struct v3dv_descriptor * descriptor,struct v3dv_descriptor_set * set,const struct v3dv_descriptor_set_binding_layout * binding_layout,const void * data,size_t offset,size_t size)1148 write_inline_uniform_descriptor(struct v3dv_device *device,
1149 struct v3dv_descriptor *descriptor,
1150 struct v3dv_descriptor_set *set,
1151 const struct v3dv_descriptor_set_binding_layout *binding_layout,
1152 const void *data,
1153 size_t offset,
1154 size_t size)
1155 {
1156 assert(binding_layout->type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK);
1157 descriptor->type = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK;
1158 descriptor->buffer = NULL;
1159
1160 void *desc_map = descriptor_bo_map(device, set, binding_layout, 0);
1161 memcpy(desc_map + offset, data, size);
1162
1163 /* Inline uniform buffers allocate BO space in the pool for all inline
1164 * buffers it may allocate and then this space is assigned to individual
1165 * descriptors when they are written, so we define the range of an inline
1166 * buffer as the largest range of data that the client has written to it.
1167 */
1168 descriptor->offset = 0;
1169 descriptor->range = MAX2(descriptor->range, offset + size);
1170 }
1171
1172 VKAPI_ATTR void VKAPI_CALL
v3dv_UpdateDescriptorSets(VkDevice _device,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VkCopyDescriptorSet * pDescriptorCopies)1173 v3dv_UpdateDescriptorSets(VkDevice _device,
1174 uint32_t descriptorWriteCount,
1175 const VkWriteDescriptorSet *pDescriptorWrites,
1176 uint32_t descriptorCopyCount,
1177 const VkCopyDescriptorSet *pDescriptorCopies)
1178 {
1179 V3DV_FROM_HANDLE(v3dv_device, device, _device);
1180 for (uint32_t i = 0; i < descriptorWriteCount; i++) {
1181 const VkWriteDescriptorSet *writeset = &pDescriptorWrites[i];
1182 V3DV_FROM_HANDLE(v3dv_descriptor_set, set, writeset->dstSet);
1183
1184 const struct v3dv_descriptor_set_binding_layout *binding_layout =
1185 set->layout->binding + writeset->dstBinding;
1186
1187 struct v3dv_descriptor *descriptor = set->descriptors;
1188
1189 descriptor += binding_layout->descriptor_index;
1190
1191 /* Inline uniform blocks are not arrayed, instead they use dstArrayElement
1192 * to specify the byte offset of the uniform update and descriptorCount
1193 * to specify the size (in bytes) of the update.
1194 */
1195 uint32_t descriptor_count;
1196 if (writeset->descriptorType != VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
1197 descriptor += writeset->dstArrayElement;
1198 descriptor_count = writeset->descriptorCount;
1199 } else {
1200 descriptor_count = 1;
1201 }
1202
1203 for (uint32_t j = 0; j < descriptor_count; ++j) {
1204 switch(writeset->descriptorType) {
1205
1206 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1207 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1208 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1209 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: {
1210 const VkDescriptorBufferInfo *buffer_info = writeset->pBufferInfo + j;
1211 write_buffer_descriptor(descriptor, writeset->descriptorType,
1212 buffer_info);
1213 break;
1214 }
1215 case VK_DESCRIPTOR_TYPE_SAMPLER: {
1216 /* If we are here we shouldn't be modifying an immutable sampler */
1217 assert(!binding_layout->immutable_samplers_offset);
1218 const VkDescriptorImageInfo *image_info = writeset->pImageInfo + j;
1219 V3DV_FROM_HANDLE(v3dv_sampler, sampler, image_info->sampler);
1220
1221 write_image_descriptor(device, descriptor, writeset->descriptorType,
1222 set, binding_layout, NULL, sampler,
1223 writeset->dstArrayElement + j);
1224
1225 break;
1226 }
1227 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1228 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1229 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: {
1230 const VkDescriptorImageInfo *image_info = writeset->pImageInfo + j;
1231 V3DV_FROM_HANDLE(v3dv_image_view, iview, image_info->imageView);
1232
1233 write_image_descriptor(device, descriptor, writeset->descriptorType,
1234 set, binding_layout, iview, NULL,
1235 writeset->dstArrayElement + j);
1236
1237 break;
1238 }
1239 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: {
1240 const VkDescriptorImageInfo *image_info = writeset->pImageInfo + j;
1241 V3DV_FROM_HANDLE(v3dv_image_view, iview, image_info->imageView);
1242 struct v3dv_sampler *sampler = NULL;
1243 if (!binding_layout->immutable_samplers_offset) {
1244 /* In general we ignore the sampler when updating a combined
1245 * image sampler, but for YCbCr we kwnow that we must use
1246 * immutable combined image samplers
1247 */
1248 assert(iview->plane_count == 1);
1249 V3DV_FROM_HANDLE(v3dv_sampler, _sampler, image_info->sampler);
1250 sampler = _sampler;
1251 }
1252
1253 write_image_descriptor(device, descriptor, writeset->descriptorType,
1254 set, binding_layout, iview, sampler,
1255 writeset->dstArrayElement + j);
1256
1257 break;
1258 }
1259 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1260 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: {
1261 V3DV_FROM_HANDLE(v3dv_buffer_view, buffer_view,
1262 writeset->pTexelBufferView[j]);
1263 write_buffer_view_descriptor(device, descriptor, writeset->descriptorType,
1264 set, binding_layout, buffer_view,
1265 writeset->dstArrayElement + j);
1266 break;
1267 }
1268 case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK: {
1269 const VkWriteDescriptorSetInlineUniformBlock *inline_write =
1270 vk_find_struct_const(writeset->pNext,
1271 WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK);
1272 assert(inline_write->dataSize == writeset->descriptorCount);
1273 write_inline_uniform_descriptor(device, descriptor, set,
1274 binding_layout,
1275 inline_write->pData,
1276 writeset->dstArrayElement, /* offset */
1277 inline_write->dataSize);
1278 break;
1279 }
1280 default:
1281 unreachable("unimplemented descriptor type");
1282 break;
1283 }
1284 descriptor++;
1285 }
1286 }
1287
1288 for (uint32_t i = 0; i < descriptorCopyCount; i++) {
1289 const VkCopyDescriptorSet *copyset = &pDescriptorCopies[i];
1290 V3DV_FROM_HANDLE(v3dv_descriptor_set, src_set,
1291 copyset->srcSet);
1292 V3DV_FROM_HANDLE(v3dv_descriptor_set, dst_set,
1293 copyset->dstSet);
1294
1295 const struct v3dv_descriptor_set_binding_layout *src_binding_layout =
1296 src_set->layout->binding + copyset->srcBinding;
1297 const struct v3dv_descriptor_set_binding_layout *dst_binding_layout =
1298 dst_set->layout->binding + copyset->dstBinding;
1299
1300 assert(src_binding_layout->type == dst_binding_layout->type);
1301
1302 struct v3dv_descriptor *src_descriptor = src_set->descriptors;
1303 struct v3dv_descriptor *dst_descriptor = dst_set->descriptors;
1304
1305 src_descriptor += src_binding_layout->descriptor_index;
1306 dst_descriptor += dst_binding_layout->descriptor_index;
1307
1308 if (src_binding_layout->type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
1309 /* {src,dst}ArrayElement specifies src/dst start offset and
1310 * descriptorCount specifies size (in bytes) to copy.
1311 */
1312 const void *src_data = src_set->pool->bo->map +
1313 src_set->base_offset +
1314 src_binding_layout->descriptor_offset +
1315 copyset->srcArrayElement;
1316 write_inline_uniform_descriptor(device, dst_descriptor, dst_set,
1317 dst_binding_layout,
1318 src_data,
1319 copyset->dstArrayElement,
1320 copyset->descriptorCount);
1321 continue;
1322 }
1323
1324 src_descriptor += copyset->srcArrayElement;
1325 dst_descriptor += copyset->dstArrayElement;
1326
1327 for (uint32_t j = 0; j < copyset->descriptorCount; j++) {
1328 *dst_descriptor = *src_descriptor;
1329 dst_descriptor++;
1330 src_descriptor++;
1331
1332 if (v3dv_X(device, descriptor_bo_size)(src_binding_layout->type) > 0) {
1333 descriptor_bo_copy(device,
1334 dst_set, dst_binding_layout,
1335 j + copyset->dstArrayElement,
1336 src_set, src_binding_layout,
1337 j + copyset->srcArrayElement);
1338 }
1339
1340 }
1341 }
1342 }
1343
1344 VKAPI_ATTR void VKAPI_CALL
v3dv_GetDescriptorSetLayoutSupport(VkDevice _device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,VkDescriptorSetLayoutSupport * pSupport)1345 v3dv_GetDescriptorSetLayoutSupport(
1346 VkDevice _device,
1347 const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
1348 VkDescriptorSetLayoutSupport *pSupport)
1349 {
1350 V3DV_FROM_HANDLE(v3dv_device, device, _device);
1351 VkDescriptorSetLayoutBinding *bindings = NULL;
1352 VkResult result = vk_create_sorted_bindings(
1353 pCreateInfo->pBindings, pCreateInfo->bindingCount, &bindings);
1354 if (result != VK_SUCCESS) {
1355 pSupport->supported = false;
1356 return;
1357 }
1358
1359 bool supported = true;
1360
1361 uint32_t desc_host_size = sizeof(struct v3dv_descriptor);
1362 uint32_t host_size = sizeof(struct v3dv_descriptor_set);
1363 uint32_t bo_size = 0;
1364 for (uint32_t i = 0; i < pCreateInfo->bindingCount; i++) {
1365 const VkDescriptorSetLayoutBinding *binding = bindings + i;
1366
1367 if ((UINT32_MAX - host_size) / desc_host_size < binding->descriptorCount) {
1368 supported = false;
1369 break;
1370 }
1371
1372 uint32_t desc_bo_size = v3dv_X(device, descriptor_bo_size)(binding->descriptorType);
1373 if (desc_bo_size > 0 &&
1374 (UINT32_MAX - bo_size) / desc_bo_size < binding->descriptorCount) {
1375 supported = false;
1376 break;
1377 }
1378
1379 host_size += binding->descriptorCount * desc_host_size;
1380 bo_size += binding->descriptorCount * desc_bo_size;
1381 }
1382
1383 free(bindings);
1384
1385 pSupport->supported = supported;
1386 }
1387
1388 void
v3dv_UpdateDescriptorSetWithTemplate(VkDevice _device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const void * pData)1389 v3dv_UpdateDescriptorSetWithTemplate(
1390 VkDevice _device,
1391 VkDescriptorSet descriptorSet,
1392 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
1393 const void *pData)
1394 {
1395 V3DV_FROM_HANDLE(v3dv_device, device, _device);
1396 V3DV_FROM_HANDLE(v3dv_descriptor_set, set, descriptorSet);
1397 V3DV_FROM_HANDLE(vk_descriptor_update_template, template,
1398 descriptorUpdateTemplate);
1399
1400 for (int i = 0; i < template->entry_count; i++) {
1401 const struct vk_descriptor_template_entry *entry =
1402 &template->entries[i];
1403
1404 const struct v3dv_descriptor_set_binding_layout *binding_layout =
1405 set->layout->binding + entry->binding;
1406
1407 struct v3dv_descriptor *descriptor =
1408 set->descriptors +
1409 binding_layout->descriptor_index;
1410
1411 switch (entry->type) {
1412 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1413 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1414 case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1415 case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1416 for (uint32_t j = 0; j < entry->array_count; j++) {
1417 const VkDescriptorBufferInfo *info =
1418 pData + entry->offset + j * entry->stride;
1419 write_buffer_descriptor(descriptor + entry->array_element + j,
1420 entry->type, info);
1421 }
1422 break;
1423
1424 case VK_DESCRIPTOR_TYPE_SAMPLER:
1425 case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1426 case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1427 case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1428 case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1429 for (uint32_t j = 0; j < entry->array_count; j++) {
1430 const VkDescriptorImageInfo *info =
1431 pData + entry->offset + j * entry->stride;
1432 V3DV_FROM_HANDLE(v3dv_image_view, iview, info->imageView);
1433 V3DV_FROM_HANDLE(v3dv_sampler, sampler, info->sampler);
1434 write_image_descriptor(device, descriptor + entry->array_element + j,
1435 entry->type, set, binding_layout, iview,
1436 sampler, entry->array_element + j);
1437 }
1438 break;
1439
1440 case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1441 case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1442 for (uint32_t j = 0; j < entry->array_count; j++) {
1443 const VkBufferView *_bview =
1444 pData + entry->offset + j * entry->stride;
1445 V3DV_FROM_HANDLE(v3dv_buffer_view, bview, *_bview);
1446 write_buffer_view_descriptor(device,
1447 descriptor + entry->array_element + j,
1448 entry->type, set, binding_layout, bview,
1449 entry->array_element + j);
1450 }
1451 break;
1452
1453 case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK: {
1454 write_inline_uniform_descriptor(device, descriptor, set,
1455 binding_layout,
1456 pData + entry->offset,
1457 entry->array_element, /* offset */
1458 entry->array_count); /* size */
1459 break;
1460 }
1461
1462 default:
1463 unreachable("Unsupported descriptor type");
1464 }
1465 }
1466 }
1467