1 /*
2 * Copyright 2024 Autodesk, Inc.
3 *
4 * SPDX-License-Identifier: MIT
5 */
6
7 #include "vk_instance.h"
8 #include "vk_physical_device.h"
9 #include "vk_util.h"
10
11 #include "util/timespec.h"
12 #include "util/u_vector.h"
13
14 #include "wsi_common_entrypoints.h"
15 #include "wsi_common_private.h"
16 #include "wsi_common_metal_layer.h"
17
18 #include "vulkan/vulkan_core.h"
19
20 #include <assert.h>
21
22 struct wsi_metal {
23 struct wsi_interface base;
24
25 struct wsi_device *wsi;
26
27 const VkAllocationCallbacks *alloc;
28 VkPhysicalDevice physical_device;
29 };
30
31 static VkResult
wsi_metal_surface_get_support(VkIcdSurfaceBase * surface,struct wsi_device * wsi_device,uint32_t queueFamilyIndex,VkBool32 * pSupported)32 wsi_metal_surface_get_support(VkIcdSurfaceBase *surface,
33 struct wsi_device *wsi_device,
34 uint32_t queueFamilyIndex,
35 VkBool32* pSupported)
36 {
37 *pSupported = true;
38 return VK_SUCCESS;
39 }
40
41 static const VkPresentModeKHR present_modes[] = {
42 VK_PRESENT_MODE_IMMEDIATE_KHR,
43 VK_PRESENT_MODE_FIFO_KHR,
44 };
45
46 static VkResult
wsi_metal_surface_get_capabilities(VkIcdSurfaceBase * surface,struct wsi_device * wsi_device,VkSurfaceCapabilitiesKHR * caps)47 wsi_metal_surface_get_capabilities(VkIcdSurfaceBase *surface,
48 struct wsi_device *wsi_device,
49 VkSurfaceCapabilitiesKHR* caps)
50 {
51 VkIcdSurfaceMetal *metal_surface = (VkIcdSurfaceMetal *)surface;
52 assert(metal_surface->pLayer);
53
54 wsi_metal_layer_size(metal_surface->pLayer,
55 &caps->currentExtent.width,
56 &caps->currentExtent.height);
57
58 caps->minImageCount = 2;
59 caps->maxImageCount = 3;
60
61 caps->minImageExtent = (VkExtent2D) { 1, 1 };
62 caps->maxImageExtent = (VkExtent2D) {
63 wsi_device->maxImageDimension2D,
64 wsi_device->maxImageDimension2D,
65 };
66
67 caps->supportedTransforms = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
68 caps->currentTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
69 caps->maxImageArrayLayers = 1;
70
71 caps->supportedCompositeAlpha =
72 VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR |
73 VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR;
74
75 caps->supportedUsageFlags = wsi_caps_get_image_usage();
76
77 VK_FROM_HANDLE(vk_physical_device, pdevice, wsi_device->pdevice);
78 if (pdevice->supported_extensions.EXT_attachment_feedback_loop_layout)
79 caps->supportedUsageFlags |= VK_IMAGE_USAGE_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT;
80
81 return VK_SUCCESS;
82 }
83
84 static VkResult
wsi_metal_surface_get_capabilities2(VkIcdSurfaceBase * surface,struct wsi_device * wsi_device,const void * info_next,VkSurfaceCapabilities2KHR * caps)85 wsi_metal_surface_get_capabilities2(VkIcdSurfaceBase *surface,
86 struct wsi_device *wsi_device,
87 const void *info_next,
88 VkSurfaceCapabilities2KHR* caps)
89 {
90 assert(caps->sType == VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR);
91
92 const VkSurfacePresentModeEXT *present_mode =
93 (const VkSurfacePresentModeEXT *)vk_find_struct_const(info_next, SURFACE_PRESENT_MODE_EXT);
94
95 VkResult result =
96 wsi_metal_surface_get_capabilities(surface, wsi_device,
97 &caps->surfaceCapabilities);
98
99 vk_foreach_struct(ext, caps->pNext) {
100 switch (ext->sType) {
101 case VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR: {
102 VkSurfaceProtectedCapabilitiesKHR *protected = (void *)ext;
103 protected->supportsProtected = VK_FALSE;
104 break;
105 }
106
107 case VK_STRUCTURE_TYPE_SURFACE_PRESENT_SCALING_CAPABILITIES_EXT: {
108 /* TODO: support scaling */
109 VkSurfacePresentScalingCapabilitiesEXT *scaling =
110 (VkSurfacePresentScalingCapabilitiesEXT *)ext;
111 scaling->supportedPresentScaling = 0;
112 scaling->supportedPresentGravityX = 0;
113 scaling->supportedPresentGravityY = 0;
114 scaling->minScaledImageExtent = caps->surfaceCapabilities.minImageExtent;
115 scaling->maxScaledImageExtent = caps->surfaceCapabilities.maxImageExtent;
116 break;
117 }
118
119 case VK_STRUCTURE_TYPE_SURFACE_PRESENT_MODE_COMPATIBILITY_EXT: {
120 /* Unsupported, just report the input present mode. */
121 VkSurfacePresentModeCompatibilityEXT *compat =
122 (VkSurfacePresentModeCompatibilityEXT *)ext;
123 if (compat->pPresentModes) {
124 if (compat->presentModeCount) {
125 assert(present_mode);
126 compat->pPresentModes[0] = present_mode->presentMode;
127 compat->presentModeCount = 1;
128 }
129 } else {
130 if (!present_mode)
131 wsi_common_vk_warn_once("Use of VkSurfacePresentModeCompatibilityEXT "
132 "without a VkSurfacePresentModeEXT set. This is an "
133 "application bug.\n");
134 compat->presentModeCount = 1;
135 }
136 break;
137 }
138
139 default:
140 /* Ignored */
141 break;
142 }
143 }
144
145 return result;
146 }
147
148 static const VkFormat available_surface_formats[] = {
149 VK_FORMAT_B8G8R8A8_SRGB,
150 VK_FORMAT_B8G8R8A8_UNORM,
151 VK_FORMAT_R16G16B16A16_SFLOAT,
152 VK_FORMAT_A2R10G10B10_UNORM_PACK32,
153 VK_FORMAT_A2B10G10R10_UNORM_PACK32,
154 };
155
156 static void
get_sorted_vk_formats(bool force_bgra8_unorm_first,VkFormat * sorted_formats)157 get_sorted_vk_formats(bool force_bgra8_unorm_first, VkFormat *sorted_formats)
158 {
159 for (unsigned i = 0; i < ARRAY_SIZE(available_surface_formats); i++)
160 sorted_formats[i] = available_surface_formats[i];
161
162 if (force_bgra8_unorm_first) {
163 for (unsigned i = 0; i < ARRAY_SIZE(available_surface_formats); i++) {
164 if (sorted_formats[i] == VK_FORMAT_B8G8R8A8_UNORM) {
165 sorted_formats[i] = sorted_formats[0];
166 sorted_formats[0] = VK_FORMAT_B8G8R8A8_UNORM;
167 break;
168 }
169 }
170 }
171 }
172
173 static VkResult
wsi_metal_surface_get_formats(VkIcdSurfaceBase * icd_surface,struct wsi_device * wsi_device,uint32_t * pSurfaceFormatCount,VkSurfaceFormatKHR * pSurfaceFormats)174 wsi_metal_surface_get_formats(VkIcdSurfaceBase *icd_surface,
175 struct wsi_device *wsi_device,
176 uint32_t* pSurfaceFormatCount,
177 VkSurfaceFormatKHR* pSurfaceFormats)
178 {
179 VK_OUTARRAY_MAKE_TYPED(VkSurfaceFormatKHR, out, pSurfaceFormats, pSurfaceFormatCount);
180
181 VkFormat sorted_formats[ARRAY_SIZE(available_surface_formats)];
182 get_sorted_vk_formats(wsi_device->force_bgra8_unorm_first, sorted_formats);
183
184 for (unsigned i = 0; i < ARRAY_SIZE(sorted_formats); i++) {
185 vk_outarray_append_typed(VkSurfaceFormatKHR, &out, f) {
186 f->format = sorted_formats[i];
187 f->colorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
188 }
189 }
190
191 return vk_outarray_status(&out);
192 }
193
194 static VkResult
wsi_metal_surface_get_formats2(VkIcdSurfaceBase * icd_surface,struct wsi_device * wsi_device,const void * info_next,uint32_t * pSurfaceFormatCount,VkSurfaceFormat2KHR * pSurfaceFormats)195 wsi_metal_surface_get_formats2(VkIcdSurfaceBase *icd_surface,
196 struct wsi_device *wsi_device,
197 const void *info_next,
198 uint32_t* pSurfaceFormatCount,
199 VkSurfaceFormat2KHR* pSurfaceFormats)
200 {
201 VK_OUTARRAY_MAKE_TYPED(VkSurfaceFormat2KHR, out, pSurfaceFormats, pSurfaceFormatCount);
202
203 VkFormat sorted_formats[ARRAY_SIZE(available_surface_formats)];
204 get_sorted_vk_formats(wsi_device->force_bgra8_unorm_first, sorted_formats);
205
206 for (unsigned i = 0; i < ARRAY_SIZE(sorted_formats); i++) {
207 vk_outarray_append_typed(VkSurfaceFormat2KHR, &out, f) {
208 assert(f->sType == VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR);
209 f->surfaceFormat.format = sorted_formats[i];
210 f->surfaceFormat.colorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
211 }
212 }
213
214 return vk_outarray_status(&out);
215 }
216
217 static VkResult
wsi_metal_surface_get_present_modes(VkIcdSurfaceBase * surface,struct wsi_device * wsi_device,uint32_t * pPresentModeCount,VkPresentModeKHR * pPresentModes)218 wsi_metal_surface_get_present_modes(VkIcdSurfaceBase *surface,
219 struct wsi_device *wsi_device,
220 uint32_t* pPresentModeCount,
221 VkPresentModeKHR* pPresentModes)
222 {
223 if (pPresentModes == NULL) {
224 *pPresentModeCount = ARRAY_SIZE(present_modes);
225 return VK_SUCCESS;
226 }
227
228 *pPresentModeCount = MIN2(*pPresentModeCount, ARRAY_SIZE(present_modes));
229 typed_memcpy(pPresentModes, present_modes, *pPresentModeCount);
230
231 return *pPresentModeCount < ARRAY_SIZE(present_modes) ? VK_INCOMPLETE : VK_SUCCESS;
232 }
233
234 static VkResult
wsi_metal_surface_get_present_rectangles(VkIcdSurfaceBase * surface,struct wsi_device * wsi_device,uint32_t * pRectCount,VkRect2D * pRects)235 wsi_metal_surface_get_present_rectangles(VkIcdSurfaceBase *surface,
236 struct wsi_device *wsi_device,
237 uint32_t* pRectCount,
238 VkRect2D* pRects)
239 {
240 VK_OUTARRAY_MAKE_TYPED(VkRect2D, out, pRects, pRectCount);
241
242 vk_outarray_append_typed(VkRect2D, &out, rect) {
243 /* We don't know a size so just return the usual "I don't know." */
244 *rect = (VkRect2D) {
245 .offset = { 0, 0 },
246 .extent = { UINT32_MAX, UINT32_MAX },
247 };
248 }
249
250 return vk_outarray_status(&out);
251 }
252
253 struct wsi_metal_image {
254 struct wsi_image base;
255 CAMetalDrawableBridged *drawable;
256 };
257
258 struct wsi_metal_swapchain {
259 struct wsi_swapchain base;
260
261 VkExtent2D extent;
262 VkFormat vk_format;
263
264 struct u_vector modifiers;
265
266 VkPresentModeKHR present_mode;
267 bool fifo_ready;
268
269 VkIcdSurfaceMetal *surface;
270
271 struct wsi_metal_layer_blit_context *blit_context;
272
273 uint32_t current_image_index;
274 struct wsi_metal_image images[0];
275 };
276 VK_DEFINE_NONDISP_HANDLE_CASTS(wsi_metal_swapchain, base.base, VkSwapchainKHR,
277 VK_OBJECT_TYPE_SWAPCHAIN_KHR)
278
279 static struct wsi_image *
wsi_metal_swapchain_get_wsi_image(struct wsi_swapchain * wsi_chain,uint32_t image_index)280 wsi_metal_swapchain_get_wsi_image(struct wsi_swapchain *wsi_chain,
281 uint32_t image_index)
282 {
283 struct wsi_metal_swapchain *chain =
284 (struct wsi_metal_swapchain *)wsi_chain;
285 return &chain->images[image_index].base;
286 }
287
288 static VkResult
wsi_metal_swapchain_acquire_next_image(struct wsi_swapchain * wsi_chain,const VkAcquireNextImageInfoKHR * info,uint32_t * image_index)289 wsi_metal_swapchain_acquire_next_image(struct wsi_swapchain *wsi_chain,
290 const VkAcquireNextImageInfoKHR *info,
291 uint32_t *image_index)
292 {
293 struct wsi_metal_swapchain *chain =
294 (struct wsi_metal_swapchain *)wsi_chain;
295 struct timespec start_time, end_time;
296 struct timespec rel_timeout;
297
298 timespec_from_nsec(&rel_timeout, info->timeout);
299
300 clock_gettime(CLOCK_MONOTONIC, &start_time);
301 timespec_add(&end_time, &rel_timeout, &start_time);
302
303 while (1) {
304 /* Try to acquire an drawable. Unfortunately we might block for up to 1 second. */
305 CAMetalDrawableBridged *drawable = wsi_metal_layer_acquire_drawable(chain->surface->pLayer);
306 if (drawable) {
307 uint32_t i = (chain->current_image_index++) % chain->base.image_count;
308 *image_index = i;
309 chain->images[i].drawable = drawable;
310 return VK_SUCCESS;
311 }
312
313 /* Check for timeout. */
314 struct timespec current_time;
315 clock_gettime(CLOCK_MONOTONIC, ¤t_time);
316 if (timespec_after(¤t_time, &end_time))
317 return VK_NOT_READY;
318 }
319 }
320
321 static VkResult
wsi_metal_swapchain_queue_present(struct wsi_swapchain * wsi_chain,uint32_t image_index,uint64_t present_id,const VkPresentRegionKHR * damage)322 wsi_metal_swapchain_queue_present(struct wsi_swapchain *wsi_chain,
323 uint32_t image_index,
324 uint64_t present_id,
325 const VkPresentRegionKHR *damage)
326 {
327 struct wsi_metal_swapchain *chain =
328 (struct wsi_metal_swapchain *)wsi_chain;
329
330 assert(image_index < chain->base.image_count);
331
332 struct wsi_metal_image *image = &chain->images[image_index];
333
334 wsi_metal_layer_blit_and_present(chain->blit_context,
335 &image->drawable,
336 image->base.cpu_map,
337 chain->extent.width, chain->extent.height,
338 image->base.row_pitches[0]);
339
340 return VK_SUCCESS;
341 }
342
343 static VkResult
wsi_metal_swapchain_destroy(struct wsi_swapchain * wsi_chain,const VkAllocationCallbacks * pAllocator)344 wsi_metal_swapchain_destroy(struct wsi_swapchain *wsi_chain,
345 const VkAllocationCallbacks *pAllocator)
346 {
347 struct wsi_metal_swapchain *chain =
348 (struct wsi_metal_swapchain *)wsi_chain;
349
350 for (uint32_t i = 0; i < chain->base.image_count; i++) {
351 wsi_metal_layer_cancel_present(chain->blit_context, &chain->images[i].drawable);
352 if (chain->images[i].base.image != VK_NULL_HANDLE)
353 wsi_destroy_image(&chain->base, &chain->images[i].base);
354 }
355
356 u_vector_finish(&chain->modifiers);
357
358 wsi_destroy_metal_layer_blit_context(chain->blit_context);
359
360 wsi_swapchain_finish(&chain->base);
361
362 vk_free(pAllocator, chain);
363
364 return VK_SUCCESS;
365 }
366
367 static VkResult
wsi_metal_surface_create_swapchain(VkIcdSurfaceBase * icd_surface,VkDevice device,struct wsi_device * wsi_device,const VkSwapchainCreateInfoKHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,struct wsi_swapchain ** swapchain_out)368 wsi_metal_surface_create_swapchain(VkIcdSurfaceBase *icd_surface,
369 VkDevice device,
370 struct wsi_device *wsi_device,
371 const VkSwapchainCreateInfoKHR* pCreateInfo,
372 const VkAllocationCallbacks* pAllocator,
373 struct wsi_swapchain **swapchain_out)
374 {
375 VkResult result;
376
377 VkIcdSurfaceMetal *metal_surface = (VkIcdSurfaceMetal *)icd_surface;
378 assert(metal_surface->pLayer);
379
380 assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR);
381
382 MTLPixelFormat metal_format;
383 switch (pCreateInfo->imageFormat)
384 {
385 case VK_FORMAT_B8G8R8A8_SRGB:
386 metal_format = MTLPixelFormatBGRA8Unorm_sRGB;
387 break;
388 case VK_FORMAT_B8G8R8A8_UNORM:
389 metal_format = MTLPixelFormatBGRA8Unorm;
390 break;
391 case VK_FORMAT_R16G16B16A16_SFLOAT:
392 metal_format = MTLPixelFormatRGBA16Float;
393 break;
394 case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
395 metal_format = MTLPixelFormatRGB10A2Unorm;
396 break;
397 case VK_FORMAT_A2R10G10B10_UNORM_PACK32:
398 metal_format = MTLPixelFormatBGR10A2Unorm;
399 break;
400 default:
401 return VK_ERROR_FORMAT_NOT_SUPPORTED;
402 }
403
404 int num_images = pCreateInfo->minImageCount;
405
406 struct wsi_metal_swapchain *chain;
407 size_t size = sizeof(*chain) + num_images * sizeof(chain->images[0]);
408 chain = vk_zalloc(pAllocator, size, 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
409 if (chain == NULL)
410 return VK_ERROR_OUT_OF_HOST_MEMORY;
411
412 struct wsi_cpu_image_params cpu_params = {
413 .base.image_type = WSI_IMAGE_TYPE_CPU,
414 };
415
416 result = wsi_swapchain_init(wsi_device, &chain->base, device,
417 pCreateInfo, &cpu_params.base, pAllocator);
418 if (result != VK_SUCCESS) {
419 vk_free(pAllocator, chain);
420 return result;
421 }
422
423 chain->base.destroy = wsi_metal_swapchain_destroy;
424 chain->base.get_wsi_image = wsi_metal_swapchain_get_wsi_image;
425 chain->base.acquire_next_image = wsi_metal_swapchain_acquire_next_image;
426 chain->base.queue_present = wsi_metal_swapchain_queue_present;
427 chain->base.present_mode = wsi_swapchain_get_present_mode(wsi_device, pCreateInfo);
428 chain->base.image_count = num_images;
429 chain->extent = pCreateInfo->imageExtent;
430 chain->vk_format = pCreateInfo->imageFormat;
431 chain->surface = metal_surface;
432
433 wsi_metal_layer_configure(metal_surface->pLayer,
434 pCreateInfo->imageExtent.width, pCreateInfo->imageExtent.height,
435 num_images, metal_format,
436 pCreateInfo->compositeAlpha & VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR,
437 pCreateInfo->presentMode == VK_PRESENT_MODE_IMMEDIATE_KHR);
438
439 chain->current_image_index = 0;
440 for (uint32_t i = 0; i < chain->base.image_count; i++) {
441 result = wsi_create_image(&chain->base, &chain->base.image_info,
442 &chain->images[i].base);
443 if (result != VK_SUCCESS)
444 return result;
445
446 chain->images[i].drawable = NULL;
447 }
448
449 chain->blit_context = wsi_create_metal_layer_blit_context();
450
451 *swapchain_out = &chain->base;
452
453 return VK_SUCCESS;
454 }
455
456 VkResult
wsi_metal_init_wsi(struct wsi_device * wsi_device,const VkAllocationCallbacks * alloc,VkPhysicalDevice physical_device)457 wsi_metal_init_wsi(struct wsi_device *wsi_device,
458 const VkAllocationCallbacks *alloc,
459 VkPhysicalDevice physical_device)
460 {
461 struct wsi_metal *wsi;
462 VkResult result;
463
464 wsi = vk_alloc(alloc, sizeof(*wsi), 8,
465 VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
466 if (!wsi) {
467 result = VK_ERROR_OUT_OF_HOST_MEMORY;
468 goto fail;
469 }
470
471 wsi->physical_device = physical_device;
472 wsi->alloc = alloc;
473 wsi->wsi = wsi_device;
474
475 wsi->base.get_support = wsi_metal_surface_get_support;
476 wsi->base.get_capabilities2 = wsi_metal_surface_get_capabilities2;
477 wsi->base.get_formats = wsi_metal_surface_get_formats;
478 wsi->base.get_formats2 = wsi_metal_surface_get_formats2;
479 wsi->base.get_present_modes = wsi_metal_surface_get_present_modes;
480 wsi->base.get_present_rectangles = wsi_metal_surface_get_present_rectangles;
481 wsi->base.create_swapchain = wsi_metal_surface_create_swapchain;
482
483 wsi_device->wsi[VK_ICD_WSI_PLATFORM_METAL] = &wsi->base;
484
485 return VK_SUCCESS;
486
487 fail:
488 wsi_device->wsi[VK_ICD_WSI_PLATFORM_METAL] = NULL;
489
490 return result;
491 }
492
493 void
wsi_metal_finish_wsi(struct wsi_device * wsi_device,const VkAllocationCallbacks * alloc)494 wsi_metal_finish_wsi(struct wsi_device *wsi_device,
495 const VkAllocationCallbacks *alloc)
496 {
497 struct wsi_metal *wsi =
498 (struct wsi_metal *)wsi_device->wsi[VK_ICD_WSI_PLATFORM_METAL];
499 if (!wsi)
500 return;
501
502 vk_free(alloc, wsi);
503 }
504
505 VKAPI_ATTR VkResult VKAPI_CALL
vkCreateMetalSurfaceEXT(VkInstance _instance,const VkMetalSurfaceCreateInfoEXT * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSurfaceKHR * pSurface)506 vkCreateMetalSurfaceEXT(
507 VkInstance _instance,
508 const VkMetalSurfaceCreateInfoEXT* pCreateInfo,
509 const VkAllocationCallbacks* pAllocator,
510 VkSurfaceKHR* pSurface)
511 {
512 VK_FROM_HANDLE(vk_instance, instance, _instance);
513 VkIcdSurfaceMetal *surface;
514
515 surface = vk_alloc2(&instance->alloc, pAllocator, sizeof *surface, 8,
516 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
517 if (surface == NULL)
518 return VK_ERROR_OUT_OF_HOST_MEMORY;
519
520 surface->base.platform = VK_ICD_WSI_PLATFORM_METAL;
521 surface->pLayer = pCreateInfo->pLayer;
522 assert(surface->pLayer);
523
524 *pSurface = VkIcdSurfaceBase_to_handle(&surface->base);
525 return VK_SUCCESS;
526 }
527