1 /*
2 * Copyright 2019 Google LLC
3 * SPDX-License-Identifier: MIT
4 *
5 * based in part on anv and radv which are:
6 * Copyright © 2015 Intel Corporation
7 * Copyright © 2016 Red Hat.
8 * Copyright © 2016 Bas Nieuwenhuizen
9 */
10
11 #include "vn_image.h"
12
13 #include "venus-protocol/vn_protocol_driver_image.h"
14 #include "venus-protocol/vn_protocol_driver_image_view.h"
15 #include "venus-protocol/vn_protocol_driver_sampler.h"
16 #include "venus-protocol/vn_protocol_driver_sampler_ycbcr_conversion.h"
17 #include "vk_format.h"
18
19 #include "vn_android.h"
20 #include "vn_device.h"
21 #include "vn_device_memory.h"
22 #include "vn_physical_device.h"
23 #include "vn_wsi.h"
24
25 #define IMAGE_REQS_CACHE_MAX_ENTRIES 500
26
27 /* image commands */
28
29 static inline uint32_t
vn_image_get_plane_count(const VkImageCreateInfo * create_info)30 vn_image_get_plane_count(const VkImageCreateInfo *create_info)
31 {
32 if (!(create_info->flags & VK_IMAGE_CREATE_DISJOINT_BIT))
33 return 1;
34
35 /* TODO VkDrmFormatModifierPropertiesEXT::drmFormatModifierPlaneCount */
36 assert(create_info->tiling != VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT);
37 return vk_format_get_plane_count(create_info->format);
38 }
39
40 static inline uint32_t
vn_image_get_plane(const VkImageAspectFlagBits plane_aspect)41 vn_image_get_plane(const VkImageAspectFlagBits plane_aspect)
42 {
43 switch (plane_aspect) {
44 case VK_IMAGE_ASPECT_PLANE_1_BIT:
45 return 1;
46 case VK_IMAGE_ASPECT_PLANE_2_BIT:
47 return 2;
48 default:
49 return 0;
50 }
51 }
52
53 static void
vn_image_fill_reqs(const struct vn_image_memory_requirements * req,VkMemoryRequirements2 * out_reqs)54 vn_image_fill_reqs(const struct vn_image_memory_requirements *req,
55 VkMemoryRequirements2 *out_reqs)
56 {
57 union {
58 VkBaseOutStructure *pnext;
59 VkMemoryRequirements2 *two;
60 VkMemoryDedicatedRequirements *dedicated;
61 } u = { .two = out_reqs };
62
63 while (u.pnext) {
64 switch (u.pnext->sType) {
65 case VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2:
66 u.two->memoryRequirements = req->memory.memoryRequirements;
67 break;
68 case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS:
69 u.dedicated->prefersDedicatedAllocation =
70 req->dedicated.prefersDedicatedAllocation;
71 u.dedicated->requiresDedicatedAllocation =
72 req->dedicated.requiresDedicatedAllocation;
73 break;
74 default:
75 break;
76 }
77 u.pnext = u.pnext->pNext;
78 }
79 }
80
81 static void
vn_image_cache_debug_dump(struct vn_image_reqs_cache * cache)82 vn_image_cache_debug_dump(struct vn_image_reqs_cache *cache)
83 {
84 vn_log(NULL, "dumping image reqs cache statistics");
85 vn_log(NULL, " hit %u\n", cache->debug.cache_hit_count);
86 vn_log(NULL, " miss %u\n", cache->debug.cache_miss_count);
87 vn_log(NULL, " skip %u\n", cache->debug.cache_skip_count);
88 }
89
90 static bool
vn_image_get_image_reqs_key(struct vn_device * dev,const VkImageCreateInfo * create_info,uint8_t * key)91 vn_image_get_image_reqs_key(struct vn_device *dev,
92 const VkImageCreateInfo *create_info,
93 uint8_t *key)
94 {
95 struct mesa_sha1 sha1_ctx;
96
97 if (!dev->image_reqs_cache.ht)
98 return false;
99
100 _mesa_sha1_init(&sha1_ctx);
101
102 /* Hash relevant fields in the pNext chain */
103 vk_foreach_struct_const(src, create_info->pNext) {
104 switch (src->sType) {
105 case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO: {
106 struct VkExternalMemoryImageCreateInfo *ext_mem =
107 (struct VkExternalMemoryImageCreateInfo *)src;
108 _mesa_sha1_update(&sha1_ctx, &ext_mem->handleTypes,
109 sizeof(VkExternalMemoryHandleTypeFlags));
110 break;
111 }
112 case VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO: {
113 struct VkImageFormatListCreateInfo *format_list =
114 (struct VkImageFormatListCreateInfo *)src;
115 _mesa_sha1_update(&sha1_ctx, format_list->pViewFormats,
116 sizeof(VkFormat) * format_list->viewFormatCount);
117 break;
118 }
119 case VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT: {
120 struct VkImageDrmFormatModifierListCreateInfoEXT *format_mod_list =
121 (struct VkImageDrmFormatModifierListCreateInfoEXT *)src;
122 _mesa_sha1_update(
123 &sha1_ctx, format_mod_list->pDrmFormatModifiers,
124 sizeof(uint64_t) * format_mod_list->drmFormatModifierCount);
125 break;
126 }
127 case VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT: {
128 struct VkImageDrmFormatModifierExplicitCreateInfoEXT
129 *format_mod_explicit =
130 (struct VkImageDrmFormatModifierExplicitCreateInfoEXT *)src;
131 _mesa_sha1_update(&sha1_ctx, &format_mod_explicit->drmFormatModifier,
132 sizeof(uint64_t));
133 _mesa_sha1_update(
134 &sha1_ctx, format_mod_explicit->pPlaneLayouts,
135 sizeof(VkSubresourceLayout) *
136 format_mod_explicit->drmFormatModifierPlaneCount);
137 break;
138 }
139 case VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO: {
140 struct VkImageStencilUsageCreateInfo *stencil_usage =
141 (struct VkImageStencilUsageCreateInfo *)src;
142 _mesa_sha1_update(&sha1_ctx, &stencil_usage->stencilUsage,
143 sizeof(VkImageUsageFlags));
144 break;
145 }
146 default:
147 /* Skip cache for unsupported pNext */
148 dev->image_reqs_cache.debug.cache_skip_count++;
149 return false;
150 }
151 }
152
153 /* Hash contingous block of VkImageCreateInfo starting with
154 * VkImageCreateInfo->flags and ending with VkImageCreateInfo->sharingMode
155 *
156 * There's no padding in involved in this hash block so no concern for C
157 * enum sizes or alignment.
158 */
159 static const size_t create_image_hash_block_size =
160 offsetof(VkImageCreateInfo, queueFamilyIndexCount) -
161 offsetof(VkImageCreateInfo, flags);
162
163 _mesa_sha1_update(&sha1_ctx, &create_info->flags,
164 create_image_hash_block_size);
165
166 /* Follow pointer and hash pQueueFamilyIndices separately.
167 * pQueueFamilyIndices is ignored if sharingMode is not
168 * VK_SHARING_MODE_CONCURRENT
169 */
170 if (create_info->sharingMode == VK_SHARING_MODE_CONCURRENT) {
171 _mesa_sha1_update(
172 &sha1_ctx, create_info->pQueueFamilyIndices,
173 sizeof(uint32_t) * create_info->queueFamilyIndexCount);
174 }
175
176 _mesa_sha1_update(&sha1_ctx, &create_info->initialLayout,
177 sizeof(create_info->initialLayout));
178 _mesa_sha1_final(&sha1_ctx, key);
179
180 return true;
181 }
182
183 void
vn_image_reqs_cache_init(struct vn_device * dev)184 vn_image_reqs_cache_init(struct vn_device *dev)
185 {
186 struct vn_image_reqs_cache *cache = &dev->image_reqs_cache;
187
188 if (VN_PERF(NO_ASYNC_IMAGE_CREATE))
189 return;
190
191 cache->ht = _mesa_hash_table_create(NULL, vn_cache_key_hash_function,
192 vn_cache_key_equal_function);
193 if (!cache->ht)
194 return;
195
196 simple_mtx_init(&cache->mutex, mtx_plain);
197 list_inithead(&dev->image_reqs_cache.lru);
198 }
199
200 void
vn_image_reqs_cache_fini(struct vn_device * dev)201 vn_image_reqs_cache_fini(struct vn_device *dev)
202 {
203 const VkAllocationCallbacks *alloc = &dev->base.base.alloc;
204 struct vn_image_reqs_cache *cache = &dev->image_reqs_cache;
205
206 if (!cache->ht)
207 return;
208
209 hash_table_foreach(cache->ht, hash_entry) {
210 struct vn_image_reqs_cache_entry *cache_entry = hash_entry->data;
211 list_del(&cache_entry->head);
212 vk_free(alloc, cache_entry);
213 }
214 assert(list_is_empty(&dev->image_reqs_cache.lru));
215
216 _mesa_hash_table_destroy(cache->ht, NULL);
217
218 simple_mtx_destroy(&cache->mutex);
219
220 if (VN_DEBUG(CACHE))
221 vn_image_cache_debug_dump(cache);
222 }
223
224 static bool
vn_image_init_reqs_from_cache(struct vn_device * dev,struct vn_image * img,uint8_t * key)225 vn_image_init_reqs_from_cache(struct vn_device *dev,
226 struct vn_image *img,
227 uint8_t *key)
228 {
229 struct vn_image_reqs_cache *cache = &dev->image_reqs_cache;
230
231 assert(cache->ht);
232
233 simple_mtx_lock(&cache->mutex);
234 struct hash_entry *hash_entry = _mesa_hash_table_search(cache->ht, key);
235 if (hash_entry) {
236 struct vn_image_reqs_cache_entry *cache_entry = hash_entry->data;
237 for (uint32_t i = 0; i < cache_entry->plane_count; i++)
238 img->requirements[i] = cache_entry->requirements[i];
239 list_move_to(&cache_entry->head, &dev->image_reqs_cache.lru);
240 p_atomic_inc(&cache->debug.cache_hit_count);
241 } else {
242 p_atomic_inc(&cache->debug.cache_miss_count);
243 }
244 simple_mtx_unlock(&cache->mutex);
245
246 return !!hash_entry;
247 }
248
249 static struct vn_image_memory_requirements *
vn_image_get_reqs_from_cache(struct vn_device * dev,uint8_t * key,uint32_t plane)250 vn_image_get_reqs_from_cache(struct vn_device *dev,
251 uint8_t *key,
252 uint32_t plane)
253 {
254 struct vn_image_memory_requirements *requirements = NULL;
255 struct vn_image_reqs_cache *cache = &dev->image_reqs_cache;
256
257 assert(cache->ht);
258
259 simple_mtx_lock(&cache->mutex);
260 struct hash_entry *hash_entry = _mesa_hash_table_search(cache->ht, key);
261 if (hash_entry) {
262 struct vn_image_reqs_cache_entry *cache_entry = hash_entry->data;
263 requirements = &cache_entry->requirements[plane];
264 list_move_to(&cache_entry->head, &dev->image_reqs_cache.lru);
265 p_atomic_inc(&cache->debug.cache_hit_count);
266 } else {
267 p_atomic_inc(&cache->debug.cache_miss_count);
268 }
269 simple_mtx_unlock(&cache->mutex);
270
271 return requirements;
272 }
273
274 static void
vn_image_store_reqs_in_cache(struct vn_device * dev,uint8_t * key,uint32_t plane_count,struct vn_image_memory_requirements * requirements)275 vn_image_store_reqs_in_cache(struct vn_device *dev,
276 uint8_t *key,
277 uint32_t plane_count,
278 struct vn_image_memory_requirements *requirements)
279 {
280 const VkAllocationCallbacks *alloc = &dev->base.base.alloc;
281 struct vn_image_reqs_cache *cache = &dev->image_reqs_cache;
282 struct vn_image_reqs_cache_entry *cache_entry;
283
284 assert(cache->ht);
285
286 simple_mtx_lock(&cache->mutex);
287
288 /* Check if entry was added before lock */
289 if (_mesa_hash_table_search(cache->ht, key)) {
290 simple_mtx_unlock(&cache->mutex);
291 return;
292 }
293
294 if (_mesa_hash_table_num_entries(cache->ht) ==
295 IMAGE_REQS_CACHE_MAX_ENTRIES) {
296 /* Evict/use the last entry in the lru list for this new entry */
297 cache_entry =
298 list_last_entry(&cache->lru, struct vn_image_reqs_cache_entry, head);
299
300 _mesa_hash_table_remove_key(cache->ht, cache_entry->key);
301 list_del(&cache_entry->head);
302 } else {
303 cache_entry = vk_zalloc(alloc, sizeof(*cache_entry), VN_DEFAULT_ALIGN,
304 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
305 if (!cache_entry) {
306 simple_mtx_unlock(&cache->mutex);
307 return;
308 }
309 }
310
311 for (uint32_t i = 0; i < plane_count; i++)
312 cache_entry->requirements[i] = requirements[i];
313
314 memcpy(cache_entry->key, key, SHA1_DIGEST_LENGTH);
315 cache_entry->plane_count = plane_count;
316
317 _mesa_hash_table_insert(dev->image_reqs_cache.ht, cache_entry->key,
318 cache_entry);
319 list_add(&cache_entry->head, &cache->lru);
320
321 simple_mtx_unlock(&cache->mutex);
322 }
323
324 static void
vn_image_init_memory_requirements(struct vn_image * img,struct vn_device * dev,uint32_t plane_count)325 vn_image_init_memory_requirements(struct vn_image *img,
326 struct vn_device *dev,
327 uint32_t plane_count)
328 {
329 assert(plane_count <= ARRAY_SIZE(img->requirements));
330
331 for (uint32_t i = 0; i < plane_count; i++) {
332 img->requirements[i].memory.sType =
333 VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2;
334 img->requirements[i].memory.pNext = &img->requirements[i].dedicated;
335 img->requirements[i].dedicated.sType =
336 VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS;
337 img->requirements[i].dedicated.pNext = NULL;
338 }
339
340 VkDevice dev_handle = vn_device_to_handle(dev);
341 VkImage img_handle = vn_image_to_handle(img);
342 if (plane_count == 1) {
343 vn_call_vkGetImageMemoryRequirements2(
344 dev->primary_ring, dev_handle,
345 &(VkImageMemoryRequirementsInfo2){
346 .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2,
347 .image = img_handle,
348 },
349 &img->requirements[0].memory);
350
351 /* AHB backed image requires dedicated allocation */
352 if (img->deferred_info) {
353 img->requirements[0].dedicated.prefersDedicatedAllocation = VK_TRUE;
354 img->requirements[0].dedicated.requiresDedicatedAllocation = VK_TRUE;
355 }
356 } else {
357 for (uint32_t i = 0; i < plane_count; i++) {
358 vn_call_vkGetImageMemoryRequirements2(
359 dev->primary_ring, dev_handle,
360 &(VkImageMemoryRequirementsInfo2){
361 .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2,
362 .pNext =
363 &(VkImagePlaneMemoryRequirementsInfo){
364 .sType =
365 VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO,
366 .planeAspect = VK_IMAGE_ASPECT_PLANE_0_BIT << i,
367 },
368 .image = img_handle,
369 },
370 &img->requirements[i].memory);
371 }
372 }
373 }
374
375 static VkResult
vn_image_deferred_info_init(struct vn_image * img,const VkImageCreateInfo * create_info,const VkAllocationCallbacks * alloc)376 vn_image_deferred_info_init(struct vn_image *img,
377 const VkImageCreateInfo *create_info,
378 const VkAllocationCallbacks *alloc)
379 {
380 struct vn_image_create_deferred_info *info = NULL;
381 VkBaseOutStructure *dst = NULL;
382
383 info = vk_zalloc(alloc, sizeof(*info), VN_DEFAULT_ALIGN,
384 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
385 if (!info)
386 return VK_ERROR_OUT_OF_HOST_MEMORY;
387
388 info->create = *create_info;
389 dst = (void *)&info->create;
390
391 vk_foreach_struct_const(src, create_info->pNext) {
392 void *pnext = NULL;
393 switch (src->sType) {
394 case VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO: {
395 /* 12.3. Images
396 *
397 * If viewFormatCount is zero, pViewFormats is ignored and the image
398 * is created as if the VkImageFormatListCreateInfo structure were
399 * not included in the pNext chain of VkImageCreateInfo.
400 */
401 if (!((const VkImageFormatListCreateInfo *)src)->viewFormatCount)
402 break;
403
404 memcpy(&info->list, src, sizeof(info->list));
405 pnext = &info->list;
406
407 /* need a deep copy for view formats array */
408 const size_t size = sizeof(VkFormat) * info->list.viewFormatCount;
409 VkFormat *view_formats = vk_zalloc(
410 alloc, size, VN_DEFAULT_ALIGN, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
411 if (!view_formats) {
412 vk_free(alloc, info);
413 return VK_ERROR_OUT_OF_HOST_MEMORY;
414 }
415
416 memcpy(view_formats,
417 ((const VkImageFormatListCreateInfo *)src)->pViewFormats,
418 size);
419 info->list.pViewFormats = view_formats;
420 } break;
421 case VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO:
422 memcpy(&info->stencil, src, sizeof(info->stencil));
423 pnext = &info->stencil;
424 break;
425 case VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID: {
426 const uint32_t drm_format =
427 (uint32_t)((const VkExternalFormatANDROID *)src)->externalFormat;
428 if (drm_format) {
429 info->create.format =
430 vn_android_drm_format_to_vk_format(drm_format);
431 info->from_external_format = true;
432 }
433 } break;
434 case VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR:
435 img->wsi.is_wsi = true;
436 break;
437 default:
438 break;
439 }
440
441 if (pnext) {
442 dst->pNext = pnext;
443 dst = pnext;
444 }
445 }
446 dst->pNext = NULL;
447
448 img->deferred_info = info;
449
450 return VK_SUCCESS;
451 }
452
453 static void
vn_image_deferred_info_fini(struct vn_image * img,const VkAllocationCallbacks * alloc)454 vn_image_deferred_info_fini(struct vn_image *img,
455 const VkAllocationCallbacks *alloc)
456 {
457 if (!img->deferred_info)
458 return;
459
460 if (img->deferred_info->list.pViewFormats)
461 vk_free(alloc, (void *)img->deferred_info->list.pViewFormats);
462
463 vk_free(alloc, img->deferred_info);
464 }
465
466 static VkResult
vn_image_init(struct vn_device * dev,const VkImageCreateInfo * create_info,struct vn_image * img)467 vn_image_init(struct vn_device *dev,
468 const VkImageCreateInfo *create_info,
469 struct vn_image *img)
470 {
471 VkDevice device = vn_device_to_handle(dev);
472 VkImage image = vn_image_to_handle(img);
473 VkResult result = VK_SUCCESS;
474
475 img->sharing_mode = create_info->sharingMode;
476
477 /* Check if mem reqs in cache. If found, make async call */
478 uint8_t key[SHA1_DIGEST_LENGTH] = { 0 };
479 const bool cacheable = vn_image_get_image_reqs_key(dev, create_info, key);
480
481 if (cacheable && vn_image_init_reqs_from_cache(dev, img, key)) {
482 vn_async_vkCreateImage(dev->primary_ring, device, create_info, NULL,
483 &image);
484 return VK_SUCCESS;
485 }
486
487 result = vn_call_vkCreateImage(dev->primary_ring, device, create_info,
488 NULL, &image);
489 if (result != VK_SUCCESS)
490 return result;
491
492 const uint32_t plane_count = vn_image_get_plane_count(create_info);
493 vn_image_init_memory_requirements(img, dev, plane_count);
494
495 if (cacheable)
496 vn_image_store_reqs_in_cache(dev, key, plane_count, img->requirements);
497
498 return VK_SUCCESS;
499 }
500
501 VkResult
vn_image_create(struct vn_device * dev,const VkImageCreateInfo * create_info,const VkAllocationCallbacks * alloc,struct vn_image ** out_img)502 vn_image_create(struct vn_device *dev,
503 const VkImageCreateInfo *create_info,
504 const VkAllocationCallbacks *alloc,
505 struct vn_image **out_img)
506 {
507 struct vn_image *img =
508 vk_image_create(&dev->base.base, create_info, alloc, sizeof(*img));
509 if (!img)
510 return VK_ERROR_OUT_OF_HOST_MEMORY;
511
512 vn_object_set_id(img, vn_get_next_obj_id(), VK_OBJECT_TYPE_IMAGE);
513
514 VkResult result = vn_image_init(dev, create_info, img);
515 if (result != VK_SUCCESS) {
516 vk_image_destroy(&dev->base.base, alloc, &img->base.base);
517 return result;
518 }
519
520 *out_img = img;
521
522 return VK_SUCCESS;
523 }
524
525 VkResult
vn_image_init_deferred(struct vn_device * dev,const VkImageCreateInfo * create_info,struct vn_image * img)526 vn_image_init_deferred(struct vn_device *dev,
527 const VkImageCreateInfo *create_info,
528 struct vn_image *img)
529 {
530 VkResult result = vn_image_init(dev, create_info, img);
531 img->deferred_info->initialized = result == VK_SUCCESS;
532 return result;
533 }
534
535 static VkResult
vn_image_create_deferred(struct vn_device * dev,const VkImageCreateInfo * create_info,const VkAllocationCallbacks * alloc,struct vn_image ** out_img)536 vn_image_create_deferred(struct vn_device *dev,
537 const VkImageCreateInfo *create_info,
538 const VkAllocationCallbacks *alloc,
539 struct vn_image **out_img)
540 {
541 struct vn_image *img =
542 vk_image_create(&dev->base.base, create_info, alloc, sizeof(*img));
543 if (!img)
544 return VK_ERROR_OUT_OF_HOST_MEMORY;
545
546 vn_object_set_id(img, vn_get_next_obj_id(), VK_OBJECT_TYPE_IMAGE);
547
548 VkResult result = vn_image_deferred_info_init(img, create_info, alloc);
549 if (result != VK_SUCCESS) {
550 vk_image_destroy(&dev->base.base, alloc, &img->base.base);
551 return result;
552 }
553
554 *out_img = img;
555
556 return VK_SUCCESS;
557 }
558
559 struct vn_image_create_info {
560 VkImageCreateInfo create;
561 VkExternalMemoryImageCreateInfo external;
562 VkImageFormatListCreateInfo format_list;
563 VkImageStencilUsageCreateInfo stencil;
564 VkImageDrmFormatModifierListCreateInfoEXT modifier_list;
565 VkImageDrmFormatModifierExplicitCreateInfoEXT modifier_explicit;
566 };
567
568 static const VkImageCreateInfo *
vn_image_fix_create_info(const VkImageCreateInfo * create_info,const VkExternalMemoryHandleTypeFlagBits renderer_handle_type,struct vn_image_create_info * local_info)569 vn_image_fix_create_info(
570 const VkImageCreateInfo *create_info,
571 const VkExternalMemoryHandleTypeFlagBits renderer_handle_type,
572 struct vn_image_create_info *local_info)
573 {
574 local_info->create = *create_info;
575 VkBaseOutStructure *cur = (void *)&local_info->create;
576
577 vk_foreach_struct_const(src, create_info->pNext) {
578 void *next = NULL;
579 switch (src->sType) {
580 case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO:
581 memcpy(&local_info->external, src, sizeof(local_info->external));
582 local_info->external.handleTypes = renderer_handle_type;
583 next = &local_info->external;
584 break;
585 case VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO:
586 memcpy(&local_info->format_list, src,
587 sizeof(local_info->format_list));
588 next = &local_info->format_list;
589 break;
590 case VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO:
591 memcpy(&local_info->stencil, src, sizeof(local_info->stencil));
592 next = &local_info->stencil;
593 break;
594 case VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT:
595 memcpy(&local_info->modifier_list, src,
596 sizeof(local_info->modifier_list));
597 next = &local_info->modifier_list;
598 break;
599 case VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT:
600 memcpy(&local_info->modifier_explicit, src,
601 sizeof(local_info->modifier_explicit));
602 next = &local_info->modifier_explicit;
603 break;
604 default:
605 break;
606 }
607
608 if (next) {
609 cur->pNext = next;
610 cur = next;
611 }
612 }
613
614 cur->pNext = NULL;
615
616 return &local_info->create;
617 }
618
619 VkResult
vn_CreateImage(VkDevice device,const VkImageCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImage * pImage)620 vn_CreateImage(VkDevice device,
621 const VkImageCreateInfo *pCreateInfo,
622 const VkAllocationCallbacks *pAllocator,
623 VkImage *pImage)
624 {
625 struct vn_device *dev = vn_device_from_handle(device);
626 const VkAllocationCallbacks *alloc =
627 pAllocator ? pAllocator : &dev->base.base.alloc;
628 const VkExternalMemoryHandleTypeFlagBits renderer_handle_type =
629 dev->physical_device->external_memory.renderer_handle_type;
630 struct vn_image *img;
631 VkResult result;
632
633 const struct wsi_image_create_info *wsi_info = NULL;
634 const VkNativeBufferANDROID *anb_info = NULL;
635 const VkImageSwapchainCreateInfoKHR *swapchain_info = NULL;
636 const VkExternalMemoryImageCreateInfo *external_info = NULL;
637 bool ahb_info = false;
638
639 vk_foreach_struct_const(pnext, pCreateInfo->pNext) {
640 switch ((uint32_t)pnext->sType) {
641 case VK_STRUCTURE_TYPE_WSI_IMAGE_CREATE_INFO_MESA:
642 wsi_info = (void *)pnext;
643 break;
644 case VK_STRUCTURE_TYPE_NATIVE_BUFFER_ANDROID:
645 anb_info = (void *)pnext;
646 break;
647 case VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR:
648 swapchain_info = (void *)pnext;
649 if (!swapchain_info->swapchain)
650 swapchain_info = NULL;
651 break;
652 case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO:
653 external_info = (void *)pnext;
654 if (!external_info->handleTypes)
655 external_info = NULL;
656 else if (
657 external_info->handleTypes ==
658 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID)
659 ahb_info = true;
660 break;
661 default:
662 break;
663 }
664 }
665
666 /* No need to fix external handle type for:
667 * - common wsi image: dma_buf is hard-coded in wsi_configure_native_image
668 * - common wsi image alias: it aligns with wsi_info on external handle
669 * - Android wsi image: VK_ANDROID_native_buffer involves no external info
670 * - AHB external image: deferred creation reconstructs external info
671 *
672 * Must fix the external handle type for:
673 * - non-AHB external image requesting handle types different from renderer
674 *
675 * Will have to fix more when renderer handle type is no longer dma_buf.
676 */
677 if (wsi_info) {
678 assert(external_info->handleTypes == renderer_handle_type);
679 result = vn_wsi_create_image(dev, pCreateInfo, wsi_info, alloc, &img);
680 } else if (anb_info) {
681 result =
682 vn_android_image_from_anb(dev, pCreateInfo, anb_info, alloc, &img);
683 } else if (ahb_info) {
684 result = vn_image_create_deferred(dev, pCreateInfo, alloc, &img);
685 } else if (swapchain_info) {
686 #if DETECT_OS_ANDROID
687 result = vn_image_create_deferred(dev, pCreateInfo, alloc, &img);
688 #else
689 result = vn_wsi_create_image_from_swapchain(
690 dev, pCreateInfo, swapchain_info, alloc, &img);
691 #endif
692 } else {
693 struct vn_image_create_info local_info;
694 if (external_info &&
695 external_info->handleTypes != renderer_handle_type) {
696 pCreateInfo = vn_image_fix_create_info(
697 pCreateInfo, renderer_handle_type, &local_info);
698 }
699
700 result = vn_image_create(dev, pCreateInfo, alloc, &img);
701 }
702
703 if (result != VK_SUCCESS)
704 return vn_error(dev->instance, result);
705
706 *pImage = vn_image_to_handle(img);
707 return VK_SUCCESS;
708 }
709
710 void
vn_DestroyImage(VkDevice device,VkImage image,const VkAllocationCallbacks * pAllocator)711 vn_DestroyImage(VkDevice device,
712 VkImage image,
713 const VkAllocationCallbacks *pAllocator)
714 {
715 struct vn_device *dev = vn_device_from_handle(device);
716 struct vn_image *img = vn_image_from_handle(image);
717 const VkAllocationCallbacks *alloc =
718 pAllocator ? pAllocator : &dev->base.base.alloc;
719
720 if (!img)
721 return;
722
723 if (img->wsi.memory && img->wsi.memory_owned) {
724 VkDeviceMemory mem_handle = vn_device_memory_to_handle(img->wsi.memory);
725 vn_FreeMemory(device, mem_handle, pAllocator);
726 }
727
728 /* must not ask renderer to destroy uninitialized deferred image */
729 if (!img->deferred_info || img->deferred_info->initialized)
730 vn_async_vkDestroyImage(dev->primary_ring, device, image, NULL);
731
732 vn_image_deferred_info_fini(img, alloc);
733
734 vk_image_destroy(&dev->base.base, alloc, &img->base.base);
735 }
736
737 void
vn_GetImageMemoryRequirements2(VkDevice device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)738 vn_GetImageMemoryRequirements2(VkDevice device,
739 const VkImageMemoryRequirementsInfo2 *pInfo,
740 VkMemoryRequirements2 *pMemoryRequirements)
741 {
742 const struct vn_image *img = vn_image_from_handle(pInfo->image);
743
744 uint32_t plane = 0;
745 const VkImagePlaneMemoryRequirementsInfo *plane_info =
746 vk_find_struct_const(pInfo->pNext,
747 IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO);
748 if (plane_info)
749 plane = vn_image_get_plane(plane_info->planeAspect);
750
751 vn_image_fill_reqs(&img->requirements[plane], pMemoryRequirements);
752 }
753
754 void
vn_GetImageSparseMemoryRequirements2(VkDevice device,const VkImageSparseMemoryRequirementsInfo2 * pInfo,uint32_t * pSparseMemoryRequirementCount,VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements)755 vn_GetImageSparseMemoryRequirements2(
756 VkDevice device,
757 const VkImageSparseMemoryRequirementsInfo2 *pInfo,
758 uint32_t *pSparseMemoryRequirementCount,
759 VkSparseImageMemoryRequirements2 *pSparseMemoryRequirements)
760 {
761 struct vn_device *dev = vn_device_from_handle(device);
762
763 /* see vn_GetPhysicalDeviceSparseImageFormatProperties2 */
764 if (dev->physical_device->sparse_binding_disabled) {
765 *pSparseMemoryRequirementCount = 0;
766 return;
767 }
768
769 /* TODO local or per-device cache */
770 vn_call_vkGetImageSparseMemoryRequirements2(
771 dev->primary_ring, device, pInfo, pSparseMemoryRequirementCount,
772 pSparseMemoryRequirements);
773 }
774
775 static VkResult
vn_image_bind_wsi_memory(struct vn_device * dev,uint32_t count,const VkBindImageMemoryInfo * infos)776 vn_image_bind_wsi_memory(struct vn_device *dev,
777 uint32_t count,
778 const VkBindImageMemoryInfo *infos)
779 {
780 STACK_ARRAY(VkBindImageMemoryInfo, local_infos, count);
781 typed_memcpy(local_infos, infos, count);
782
783 for (uint32_t i = 0; i < count; i++) {
784 VkBindImageMemoryInfo *info = &local_infos[i];
785 struct vn_image *img = vn_image_from_handle(info->image);
786 struct vn_device_memory *mem =
787 vn_device_memory_from_handle(info->memory);
788
789 if (!mem) {
790 #if DETECT_OS_ANDROID
791 mem = vn_android_get_wsi_memory_from_bind_info(dev, info);
792 if (!mem) {
793 STACK_ARRAY_FINISH(local_infos);
794 return VK_ERROR_OUT_OF_HOST_MEMORY;
795 }
796 #else
797 const VkBindImageMemorySwapchainInfoKHR *swapchain_info =
798 vk_find_struct_const(info->pNext,
799 BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR);
800 assert(img->wsi.is_wsi && swapchain_info);
801
802 struct vn_image *swapchain_img =
803 vn_image_from_handle(wsi_common_get_image(
804 swapchain_info->swapchain, swapchain_info->imageIndex));
805 mem = swapchain_img->wsi.memory;
806 #endif
807 info->memory = vn_device_memory_to_handle(mem);
808 }
809 assert(mem && info->memory != VK_NULL_HANDLE);
810
811 #if DETECT_OS_ANDROID
812 assert(img->wsi.memory);
813 #else
814 assert(!img->wsi.memory);
815 img->wsi.memory = mem;
816 #endif
817 }
818
819 vn_async_vkBindImageMemory2(dev->primary_ring, vn_device_to_handle(dev),
820 count, local_infos);
821
822 STACK_ARRAY_FINISH(local_infos);
823
824 return VK_SUCCESS;
825 }
826
827 VkResult
vn_BindImageMemory2(VkDevice device,uint32_t bindInfoCount,const VkBindImageMemoryInfo * pBindInfos)828 vn_BindImageMemory2(VkDevice device,
829 uint32_t bindInfoCount,
830 const VkBindImageMemoryInfo *pBindInfos)
831 {
832 struct vn_device *dev = vn_device_from_handle(device);
833
834 for (uint32_t i = 0; i < bindInfoCount; i++) {
835 struct vn_image *img = vn_image_from_handle(pBindInfos[i].image);
836 if (img->wsi.is_wsi)
837 return vn_image_bind_wsi_memory(dev, bindInfoCount, pBindInfos);
838 }
839
840 vn_async_vkBindImageMemory2(dev->primary_ring, device, bindInfoCount,
841 pBindInfos);
842 return VK_SUCCESS;
843 }
844
845 VkResult
vn_GetImageDrmFormatModifierPropertiesEXT(VkDevice device,VkImage image,VkImageDrmFormatModifierPropertiesEXT * pProperties)846 vn_GetImageDrmFormatModifierPropertiesEXT(
847 VkDevice device,
848 VkImage image,
849 VkImageDrmFormatModifierPropertiesEXT *pProperties)
850 {
851 struct vn_device *dev = vn_device_from_handle(device);
852
853 /* TODO local cache */
854 return vn_call_vkGetImageDrmFormatModifierPropertiesEXT(
855 dev->primary_ring, device, image, pProperties);
856 }
857
858 void
vn_GetImageSubresourceLayout(VkDevice device,VkImage image,const VkImageSubresource * pSubresource,VkSubresourceLayout * pLayout)859 vn_GetImageSubresourceLayout(VkDevice device,
860 VkImage image,
861 const VkImageSubresource *pSubresource,
862 VkSubresourceLayout *pLayout)
863 {
864 struct vn_device *dev = vn_device_from_handle(device);
865 struct vn_image *img = vn_image_from_handle(image);
866
867 /* override aspect mask for wsi/ahb images with tiling modifier */
868 VkImageSubresource local_subresource;
869 if ((img->wsi.is_wsi && img->wsi.tiling_override ==
870 VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) ||
871 img->deferred_info) {
872 VkImageAspectFlags aspect = pSubresource->aspectMask;
873 switch (aspect) {
874 case VK_IMAGE_ASPECT_COLOR_BIT:
875 case VK_IMAGE_ASPECT_DEPTH_BIT:
876 case VK_IMAGE_ASPECT_STENCIL_BIT:
877 case VK_IMAGE_ASPECT_PLANE_0_BIT:
878 aspect = VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT;
879 break;
880 case VK_IMAGE_ASPECT_PLANE_1_BIT:
881 aspect = VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT;
882 break;
883 case VK_IMAGE_ASPECT_PLANE_2_BIT:
884 aspect = VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT;
885 break;
886 default:
887 break;
888 }
889
890 /* only handle supported aspect override */
891 if (aspect != pSubresource->aspectMask) {
892 local_subresource = *pSubresource;
893 local_subresource.aspectMask = aspect;
894 pSubresource = &local_subresource;
895 }
896 }
897
898 /* TODO local cache */
899 vn_call_vkGetImageSubresourceLayout(dev->primary_ring, device, image,
900 pSubresource, pLayout);
901 }
902
903 /* image view commands */
904
905 VkResult
vn_CreateImageView(VkDevice device,const VkImageViewCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImageView * pView)906 vn_CreateImageView(VkDevice device,
907 const VkImageViewCreateInfo *pCreateInfo,
908 const VkAllocationCallbacks *pAllocator,
909 VkImageView *pView)
910 {
911 struct vn_device *dev = vn_device_from_handle(device);
912 struct vn_image *img = vn_image_from_handle(pCreateInfo->image);
913 const VkAllocationCallbacks *alloc =
914 pAllocator ? pAllocator : &dev->base.base.alloc;
915
916 VkImageViewCreateInfo local_info;
917 if (img->deferred_info && img->deferred_info->from_external_format) {
918 assert(pCreateInfo->format == VK_FORMAT_UNDEFINED);
919
920 local_info = *pCreateInfo;
921 local_info.format = img->deferred_info->create.format;
922 pCreateInfo = &local_info;
923
924 assert(pCreateInfo->format != VK_FORMAT_UNDEFINED);
925 }
926
927 struct vn_image_view *view =
928 vk_zalloc(alloc, sizeof(*view), VN_DEFAULT_ALIGN,
929 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
930 if (!view)
931 return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
932
933 vn_object_base_init(&view->base, VK_OBJECT_TYPE_IMAGE_VIEW, &dev->base);
934 view->image = img;
935
936 VkImageView view_handle = vn_image_view_to_handle(view);
937 vn_async_vkCreateImageView(dev->primary_ring, device, pCreateInfo, NULL,
938 &view_handle);
939
940 *pView = view_handle;
941
942 return VK_SUCCESS;
943 }
944
945 void
vn_DestroyImageView(VkDevice device,VkImageView imageView,const VkAllocationCallbacks * pAllocator)946 vn_DestroyImageView(VkDevice device,
947 VkImageView imageView,
948 const VkAllocationCallbacks *pAllocator)
949 {
950 struct vn_device *dev = vn_device_from_handle(device);
951 struct vn_image_view *view = vn_image_view_from_handle(imageView);
952 const VkAllocationCallbacks *alloc =
953 pAllocator ? pAllocator : &dev->base.base.alloc;
954
955 if (!view)
956 return;
957
958 vn_async_vkDestroyImageView(dev->primary_ring, device, imageView, NULL);
959
960 vn_object_base_fini(&view->base);
961 vk_free(alloc, view);
962 }
963
964 /* sampler commands */
965
966 VkResult
vn_CreateSampler(VkDevice device,const VkSamplerCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSampler * pSampler)967 vn_CreateSampler(VkDevice device,
968 const VkSamplerCreateInfo *pCreateInfo,
969 const VkAllocationCallbacks *pAllocator,
970 VkSampler *pSampler)
971 {
972 struct vn_device *dev = vn_device_from_handle(device);
973 const VkAllocationCallbacks *alloc =
974 pAllocator ? pAllocator : &dev->base.base.alloc;
975
976 struct vn_sampler *sampler =
977 vk_zalloc(alloc, sizeof(*sampler), VN_DEFAULT_ALIGN,
978 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
979 if (!sampler)
980 return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
981
982 vn_object_base_init(&sampler->base, VK_OBJECT_TYPE_SAMPLER, &dev->base);
983
984 VkSampler sampler_handle = vn_sampler_to_handle(sampler);
985 vn_async_vkCreateSampler(dev->primary_ring, device, pCreateInfo, NULL,
986 &sampler_handle);
987
988 *pSampler = sampler_handle;
989
990 return VK_SUCCESS;
991 }
992
993 void
vn_DestroySampler(VkDevice device,VkSampler _sampler,const VkAllocationCallbacks * pAllocator)994 vn_DestroySampler(VkDevice device,
995 VkSampler _sampler,
996 const VkAllocationCallbacks *pAllocator)
997 {
998 struct vn_device *dev = vn_device_from_handle(device);
999 struct vn_sampler *sampler = vn_sampler_from_handle(_sampler);
1000 const VkAllocationCallbacks *alloc =
1001 pAllocator ? pAllocator : &dev->base.base.alloc;
1002
1003 if (!sampler)
1004 return;
1005
1006 vn_async_vkDestroySampler(dev->primary_ring, device, _sampler, NULL);
1007
1008 vn_object_base_fini(&sampler->base);
1009 vk_free(alloc, sampler);
1010 }
1011
1012 /* sampler YCbCr conversion commands */
1013
1014 VkResult
vn_CreateSamplerYcbcrConversion(VkDevice device,const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSamplerYcbcrConversion * pYcbcrConversion)1015 vn_CreateSamplerYcbcrConversion(
1016 VkDevice device,
1017 const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
1018 const VkAllocationCallbacks *pAllocator,
1019 VkSamplerYcbcrConversion *pYcbcrConversion)
1020 {
1021 struct vn_device *dev = vn_device_from_handle(device);
1022 const VkAllocationCallbacks *alloc =
1023 pAllocator ? pAllocator : &dev->base.base.alloc;
1024 const VkExternalFormatANDROID *ext_info =
1025 vk_find_struct_const(pCreateInfo->pNext, EXTERNAL_FORMAT_ANDROID);
1026
1027 VkSamplerYcbcrConversionCreateInfo local_info;
1028 if (ext_info && ext_info->externalFormat) {
1029 assert(pCreateInfo->format == VK_FORMAT_UNDEFINED);
1030
1031 local_info = *pCreateInfo;
1032 local_info.format =
1033 vn_android_drm_format_to_vk_format(ext_info->externalFormat);
1034 local_info.components.r = VK_COMPONENT_SWIZZLE_IDENTITY;
1035 local_info.components.g = VK_COMPONENT_SWIZZLE_IDENTITY;
1036 local_info.components.b = VK_COMPONENT_SWIZZLE_IDENTITY;
1037 local_info.components.a = VK_COMPONENT_SWIZZLE_IDENTITY;
1038 pCreateInfo = &local_info;
1039
1040 assert(pCreateInfo->format != VK_FORMAT_UNDEFINED);
1041 }
1042
1043 struct vn_sampler_ycbcr_conversion *conv =
1044 vk_zalloc(alloc, sizeof(*conv), VN_DEFAULT_ALIGN,
1045 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
1046 if (!conv)
1047 return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
1048
1049 vn_object_base_init(&conv->base, VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION,
1050 &dev->base);
1051
1052 VkSamplerYcbcrConversion conv_handle =
1053 vn_sampler_ycbcr_conversion_to_handle(conv);
1054 vn_async_vkCreateSamplerYcbcrConversion(dev->primary_ring, device,
1055 pCreateInfo, NULL, &conv_handle);
1056
1057 *pYcbcrConversion = conv_handle;
1058
1059 return VK_SUCCESS;
1060 }
1061
1062 void
vn_DestroySamplerYcbcrConversion(VkDevice device,VkSamplerYcbcrConversion ycbcrConversion,const VkAllocationCallbacks * pAllocator)1063 vn_DestroySamplerYcbcrConversion(VkDevice device,
1064 VkSamplerYcbcrConversion ycbcrConversion,
1065 const VkAllocationCallbacks *pAllocator)
1066 {
1067 struct vn_device *dev = vn_device_from_handle(device);
1068 struct vn_sampler_ycbcr_conversion *conv =
1069 vn_sampler_ycbcr_conversion_from_handle(ycbcrConversion);
1070 const VkAllocationCallbacks *alloc =
1071 pAllocator ? pAllocator : &dev->base.base.alloc;
1072
1073 if (!conv)
1074 return;
1075
1076 vn_async_vkDestroySamplerYcbcrConversion(dev->primary_ring, device,
1077 ycbcrConversion, NULL);
1078
1079 vn_object_base_fini(&conv->base);
1080 vk_free(alloc, conv);
1081 }
1082
1083 void
vn_GetDeviceImageMemoryRequirements(VkDevice device,const VkDeviceImageMemoryRequirements * pInfo,VkMemoryRequirements2 * pMemoryRequirements)1084 vn_GetDeviceImageMemoryRequirements(
1085 VkDevice device,
1086 const VkDeviceImageMemoryRequirements *pInfo,
1087 VkMemoryRequirements2 *pMemoryRequirements)
1088 {
1089 struct vn_device *dev = vn_device_from_handle(device);
1090
1091 uint8_t key[SHA1_DIGEST_LENGTH] = { 0 };
1092 const bool cacheable =
1093 vn_image_get_image_reqs_key(dev, pInfo->pCreateInfo, key);
1094
1095 if (cacheable) {
1096 uint32_t plane = 0;
1097 if (pInfo->pCreateInfo->flags & VK_IMAGE_CREATE_DISJOINT_BIT)
1098 vn_image_get_plane(pInfo->planeAspect);
1099
1100 const struct vn_image_memory_requirements *cached_reqs =
1101 vn_image_get_reqs_from_cache(dev, key, plane);
1102 if (cached_reqs) {
1103 vn_image_fill_reqs(cached_reqs, pMemoryRequirements);
1104 return;
1105 }
1106
1107 const uint32_t plane_count =
1108 vn_image_get_plane_count(pInfo->pCreateInfo);
1109 STACK_ARRAY(VkDeviceImageMemoryRequirements, req_info, plane_count);
1110 STACK_ARRAY(struct vn_image_memory_requirements, reqs, plane_count);
1111
1112 /* Retrieve reqs for all planes so the cache entry is complete */
1113 for (uint32_t i = 0; i < plane_count; i++) {
1114 req_info[i].sType =
1115 VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS;
1116 req_info[i].pNext = NULL;
1117 req_info[i].pCreateInfo = pInfo->pCreateInfo;
1118 req_info[i].planeAspect = VK_IMAGE_ASPECT_PLANE_0_BIT << i;
1119
1120 reqs[i].memory.sType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2;
1121 reqs[i].memory.pNext = &reqs[i].dedicated;
1122 reqs[i].dedicated.sType =
1123 VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS;
1124 reqs[i].dedicated.pNext = NULL;
1125
1126 vn_call_vkGetDeviceImageMemoryRequirements(
1127 dev->primary_ring, device, &req_info[i], &reqs[i].memory);
1128 }
1129 vn_image_fill_reqs(&reqs[plane], pMemoryRequirements);
1130 vn_image_store_reqs_in_cache(dev, key, plane_count, reqs);
1131
1132 STACK_ARRAY_FINISH(req_info);
1133 STACK_ARRAY_FINISH(reqs);
1134 } else {
1135 vn_call_vkGetDeviceImageMemoryRequirements(dev->primary_ring, device,
1136 pInfo, pMemoryRequirements);
1137 }
1138 }
1139
1140 void
vn_GetDeviceImageSparseMemoryRequirements(VkDevice device,const VkDeviceImageMemoryRequirements * pInfo,uint32_t * pSparseMemoryRequirementCount,VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements)1141 vn_GetDeviceImageSparseMemoryRequirements(
1142 VkDevice device,
1143 const VkDeviceImageMemoryRequirements *pInfo,
1144 uint32_t *pSparseMemoryRequirementCount,
1145 VkSparseImageMemoryRequirements2 *pSparseMemoryRequirements)
1146 {
1147 struct vn_device *dev = vn_device_from_handle(device);
1148
1149 /* see vn_GetPhysicalDeviceSparseImageFormatProperties2 */
1150 if (dev->physical_device->sparse_binding_disabled) {
1151 *pSparseMemoryRequirementCount = 0;
1152 return;
1153 }
1154
1155 /* TODO per-device cache */
1156 vn_call_vkGetDeviceImageSparseMemoryRequirements(
1157 dev->primary_ring, device, pInfo, pSparseMemoryRequirementCount,
1158 pSparseMemoryRequirements);
1159 }
1160
1161 void
vn_GetDeviceImageSubresourceLayoutKHR(VkDevice device,const VkDeviceImageSubresourceInfoKHR * pInfo,VkSubresourceLayout2KHR * pLayout)1162 vn_GetDeviceImageSubresourceLayoutKHR(VkDevice device,
1163 const VkDeviceImageSubresourceInfoKHR *pInfo,
1164 VkSubresourceLayout2KHR *pLayout)
1165 {
1166 struct vn_device *dev = vn_device_from_handle(device);
1167
1168 /* TODO per-device cache */
1169 vn_call_vkGetDeviceImageSubresourceLayoutKHR(
1170 dev->primary_ring, device, pInfo, pLayout);
1171 }
1172
1173 void
vn_GetImageSubresourceLayout2KHR(VkDevice device,VkImage image,const VkImageSubresource2KHR * pSubresource,VkSubresourceLayout2KHR * pLayout)1174 vn_GetImageSubresourceLayout2KHR(VkDevice device,
1175 VkImage image,
1176 const VkImageSubresource2KHR *pSubresource,
1177 VkSubresourceLayout2KHR *pLayout)
1178 {
1179 struct vn_device *dev = vn_device_from_handle(device);
1180 struct vn_image *img = vn_image_from_handle(image);
1181
1182 /* override aspect mask for wsi/ahb images with tiling modifier */
1183 VkImageSubresource2KHR local_subresource;
1184 if ((img->wsi.is_wsi && img->wsi.tiling_override ==
1185 VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) ||
1186 img->deferred_info) {
1187 VkImageAspectFlags aspect = pSubresource->imageSubresource.aspectMask;
1188 switch (aspect) {
1189 case VK_IMAGE_ASPECT_COLOR_BIT:
1190 case VK_IMAGE_ASPECT_DEPTH_BIT:
1191 case VK_IMAGE_ASPECT_STENCIL_BIT:
1192 case VK_IMAGE_ASPECT_PLANE_0_BIT:
1193 aspect = VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT;
1194 break;
1195 case VK_IMAGE_ASPECT_PLANE_1_BIT:
1196 aspect = VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT;
1197 break;
1198 case VK_IMAGE_ASPECT_PLANE_2_BIT:
1199 aspect = VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT;
1200 break;
1201 default:
1202 break;
1203 }
1204
1205 /* only handle supported aspect override */
1206 if (aspect != pSubresource->imageSubresource.aspectMask) {
1207 local_subresource = *pSubresource;
1208 local_subresource.imageSubresource.aspectMask = aspect;
1209 pSubresource = &local_subresource;
1210 }
1211 }
1212
1213 vn_call_vkGetImageSubresourceLayout2KHR(
1214 dev->primary_ring, device, image, pSubresource, pLayout);
1215 }
1216