1 /*
2 * Copyright 2019 Google LLC
3 * SPDX-License-Identifier: MIT
4 *
5 * based in part on anv and radv which are:
6 * Copyright © 2015 Intel Corporation
7 * Copyright © 2016 Red Hat.
8 * Copyright © 2016 Bas Nieuwenhuizen
9 */
10
11 #include "vn_render_pass.h"
12
13 #include "venus-protocol/vn_protocol_driver_framebuffer.h"
14 #include "venus-protocol/vn_protocol_driver_render_pass.h"
15 #include "vk_format.h"
16
17 #include "vn_device.h"
18 #include "vn_image.h"
19
20 #define COUNT_PRESENT_SRC(atts, att_count, initial_count, final_count) \
21 do { \
22 *initial_count = 0; \
23 *final_count = 0; \
24 for (uint32_t i = 0; i < att_count; i++) { \
25 if (atts[i].initialLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) \
26 (*initial_count)++; \
27 if (atts[i].finalLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) \
28 (*final_count)++; \
29 } \
30 } while (false)
31
32 #define REPLACE_PRESENT_SRC(pass, atts, att_count, out_atts) \
33 do { \
34 struct vn_present_src_attachment *_acquire_atts = \
35 pass->present_acquire_attachments; \
36 struct vn_present_src_attachment *_release_atts = \
37 pass->present_release_attachments; \
38 \
39 memcpy(out_atts, atts, sizeof(*atts) * att_count); \
40 for (uint32_t i = 0; i < att_count; i++) { \
41 if (out_atts[i].initialLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) { \
42 out_atts[i].initialLayout = VN_PRESENT_SRC_INTERNAL_LAYOUT; \
43 _acquire_atts->index = i; \
44 _acquire_atts++; \
45 } \
46 if (out_atts[i].finalLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) { \
47 out_atts[i].finalLayout = VN_PRESENT_SRC_INTERNAL_LAYOUT; \
48 _release_atts->index = i; \
49 _release_atts++; \
50 } \
51 } \
52 } while (false)
53
54 #define INIT_SUBPASSES(_pass, _pCreateInfo) \
55 do { \
56 for (uint32_t i = 0; i < _pCreateInfo->subpassCount; i++) { \
57 __auto_type subpass_desc = &_pCreateInfo->pSubpasses[i]; \
58 struct vn_subpass *subpass = &_pass->subpasses[i]; \
59 \
60 for (uint32_t j = 0; j < subpass_desc->colorAttachmentCount; j++) { \
61 if (subpass_desc->pColorAttachments[j].attachment != \
62 VK_ATTACHMENT_UNUSED) { \
63 subpass->attachment_aspects |= VK_IMAGE_ASPECT_COLOR_BIT; \
64 break; \
65 } \
66 } \
67 \
68 if (subpass_desc->pDepthStencilAttachment && \
69 subpass_desc->pDepthStencilAttachment->attachment != \
70 VK_ATTACHMENT_UNUSED) { \
71 uint32_t att = \
72 subpass_desc->pDepthStencilAttachment->attachment; \
73 subpass->attachment_aspects |= \
74 vk_format_aspects(_pCreateInfo->pAttachments[att].format); \
75 } \
76 } \
77 } while (false)
78
79 static inline void
vn_render_pass_count_present_src(const VkRenderPassCreateInfo * create_info,uint32_t * initial_count,uint32_t * final_count)80 vn_render_pass_count_present_src(const VkRenderPassCreateInfo *create_info,
81 uint32_t *initial_count,
82 uint32_t *final_count)
83 {
84 if (VN_PRESENT_SRC_INTERNAL_LAYOUT == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) {
85 *initial_count = *final_count = 0;
86 return;
87 }
88 COUNT_PRESENT_SRC(create_info->pAttachments, create_info->attachmentCount,
89 initial_count, final_count);
90 }
91
92 static inline void
vn_render_pass_count_present_src2(const VkRenderPassCreateInfo2 * create_info,uint32_t * initial_count,uint32_t * final_count)93 vn_render_pass_count_present_src2(const VkRenderPassCreateInfo2 *create_info,
94 uint32_t *initial_count,
95 uint32_t *final_count)
96 {
97 if (VN_PRESENT_SRC_INTERNAL_LAYOUT == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) {
98 *initial_count = *final_count = 0;
99 return;
100 }
101 COUNT_PRESENT_SRC(create_info->pAttachments, create_info->attachmentCount,
102 initial_count, final_count);
103 }
104
105 static inline void
vn_render_pass_replace_present_src(struct vn_render_pass * pass,const VkRenderPassCreateInfo * create_info,VkAttachmentDescription * out_atts)106 vn_render_pass_replace_present_src(struct vn_render_pass *pass,
107 const VkRenderPassCreateInfo *create_info,
108 VkAttachmentDescription *out_atts)
109 {
110 REPLACE_PRESENT_SRC(pass, create_info->pAttachments,
111 create_info->attachmentCount, out_atts);
112 }
113
114 static inline void
vn_render_pass_replace_present_src2(struct vn_render_pass * pass,const VkRenderPassCreateInfo2 * create_info,VkAttachmentDescription2 * out_atts)115 vn_render_pass_replace_present_src2(struct vn_render_pass *pass,
116 const VkRenderPassCreateInfo2 *create_info,
117 VkAttachmentDescription2 *out_atts)
118 {
119 REPLACE_PRESENT_SRC(pass, create_info->pAttachments,
120 create_info->attachmentCount, out_atts);
121 }
122
123 static void
vn_render_pass_setup_present_src_barriers(struct vn_render_pass * pass)124 vn_render_pass_setup_present_src_barriers(struct vn_render_pass *pass)
125 {
126 /* TODO parse VkSubpassDependency for more accurate barriers */
127
128 for (uint32_t i = 0; i < pass->present_acquire_count; i++) {
129 struct vn_present_src_attachment *att =
130 &pass->present_acquire_attachments[i];
131
132 att->src_stage_mask = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
133 att->src_access_mask = 0;
134 att->dst_stage_mask = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
135 att->dst_access_mask =
136 VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT;
137 }
138
139 for (uint32_t i = 0; i < pass->present_release_count; i++) {
140 struct vn_present_src_attachment *att =
141 &pass->present_release_attachments[i];
142
143 att->src_stage_mask = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
144 att->src_access_mask = VK_ACCESS_MEMORY_WRITE_BIT;
145 att->dst_stage_mask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
146 att->dst_access_mask = 0;
147 }
148 }
149
150 static struct vn_render_pass *
vn_render_pass_create(struct vn_device * dev,uint32_t present_acquire_count,uint32_t present_release_count,uint32_t subpass_count,const VkAllocationCallbacks * alloc)151 vn_render_pass_create(struct vn_device *dev,
152 uint32_t present_acquire_count,
153 uint32_t present_release_count,
154 uint32_t subpass_count,
155 const VkAllocationCallbacks *alloc)
156 {
157 uint32_t present_count = present_acquire_count + present_release_count;
158 struct vn_render_pass *pass;
159 struct vn_present_src_attachment *present_atts;
160 struct vn_subpass *subpasses;
161
162 VK_MULTIALLOC(ma);
163 vk_multialloc_add(&ma, &pass, __typeof__(*pass), 1);
164 vk_multialloc_add(&ma, &present_atts, __typeof__(*present_atts),
165 present_count);
166 vk_multialloc_add(&ma, &subpasses, __typeof__(*subpasses), subpass_count);
167
168 if (!vk_multialloc_zalloc(&ma, alloc, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT))
169 return NULL;
170
171 vn_object_base_init(&pass->base, VK_OBJECT_TYPE_RENDER_PASS, &dev->base);
172
173 pass->present_count = present_count;
174 pass->present_acquire_count = present_acquire_count;
175 pass->present_release_count = present_release_count;
176 pass->subpass_count = subpass_count;
177
178 /* For each array pointer, set it only if its count != 0. This allows code
179 * elsewhere to intuitively use either condition, `foo_atts == NULL` or
180 * `foo_count != 0`.
181 */
182 if (present_count)
183 pass->present_attachments = present_atts;
184 if (present_acquire_count)
185 pass->present_acquire_attachments = present_atts;
186 if (present_release_count)
187 pass->present_release_attachments =
188 present_atts + present_acquire_count;
189 if (subpass_count)
190 pass->subpasses = subpasses;
191
192 return pass;
193 }
194
195 /* render pass commands */
196
197 VkResult
vn_CreateRenderPass(VkDevice device,const VkRenderPassCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass)198 vn_CreateRenderPass(VkDevice device,
199 const VkRenderPassCreateInfo *pCreateInfo,
200 const VkAllocationCallbacks *pAllocator,
201 VkRenderPass *pRenderPass)
202 {
203 struct vn_device *dev = vn_device_from_handle(device);
204 const VkAllocationCallbacks *alloc =
205 pAllocator ? pAllocator : &dev->base.base.alloc;
206
207 uint32_t acquire_count;
208 uint32_t release_count;
209 vn_render_pass_count_present_src(pCreateInfo, &acquire_count,
210 &release_count);
211
212 struct vn_render_pass *pass = vn_render_pass_create(
213 dev, acquire_count, release_count, pCreateInfo->subpassCount, alloc);
214 if (!pass)
215 return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
216
217 INIT_SUBPASSES(pass, pCreateInfo);
218
219 STACK_ARRAY(VkAttachmentDescription, attachments,
220 pCreateInfo->attachmentCount);
221
222 VkRenderPassCreateInfo local_pass_info;
223 if (pass->present_count) {
224 vn_render_pass_replace_present_src(pass, pCreateInfo, attachments);
225 vn_render_pass_setup_present_src_barriers(pass);
226
227 local_pass_info = *pCreateInfo;
228 local_pass_info.pAttachments = attachments;
229 pCreateInfo = &local_pass_info;
230 }
231
232 /* Store the viewMask of each subpass for query feedback */
233 const struct VkRenderPassMultiviewCreateInfo *multiview_info =
234 vk_find_struct_const(pCreateInfo->pNext,
235 RENDER_PASS_MULTIVIEW_CREATE_INFO);
236 if (multiview_info) {
237 for (uint32_t i = 0; i < multiview_info->subpassCount; i++)
238 pass->subpasses[i].view_mask = multiview_info->pViewMasks[i];
239 }
240
241 VkRenderPass pass_handle = vn_render_pass_to_handle(pass);
242 vn_async_vkCreateRenderPass(dev->primary_ring, device, pCreateInfo, NULL,
243 &pass_handle);
244
245 STACK_ARRAY_FINISH(attachments);
246
247 *pRenderPass = pass_handle;
248
249 return VK_SUCCESS;
250 }
251
252 VkResult
vn_CreateRenderPass2(VkDevice device,const VkRenderPassCreateInfo2 * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass)253 vn_CreateRenderPass2(VkDevice device,
254 const VkRenderPassCreateInfo2 *pCreateInfo,
255 const VkAllocationCallbacks *pAllocator,
256 VkRenderPass *pRenderPass)
257 {
258 struct vn_device *dev = vn_device_from_handle(device);
259 const VkAllocationCallbacks *alloc =
260 pAllocator ? pAllocator : &dev->base.base.alloc;
261
262 uint32_t acquire_count;
263 uint32_t release_count;
264 vn_render_pass_count_present_src2(pCreateInfo, &acquire_count,
265 &release_count);
266
267 struct vn_render_pass *pass = vn_render_pass_create(
268 dev, acquire_count, release_count, pCreateInfo->subpassCount, alloc);
269 if (!pass)
270 return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
271
272 INIT_SUBPASSES(pass, pCreateInfo);
273
274 STACK_ARRAY(VkAttachmentDescription2, attachments,
275 pCreateInfo->attachmentCount);
276
277 VkRenderPassCreateInfo2 local_pass_info;
278 if (pass->present_count) {
279 vn_render_pass_replace_present_src2(pass, pCreateInfo, attachments);
280 vn_render_pass_setup_present_src_barriers(pass);
281
282 local_pass_info = *pCreateInfo;
283 local_pass_info.pAttachments = attachments;
284 pCreateInfo = &local_pass_info;
285 }
286
287 /* Store the viewMask of each subpass for query feedback */
288 for (uint32_t i = 0; i < pCreateInfo->subpassCount; i++)
289 pass->subpasses[i].view_mask = pCreateInfo->pSubpasses[i].viewMask;
290
291 VkRenderPass pass_handle = vn_render_pass_to_handle(pass);
292 vn_async_vkCreateRenderPass2(dev->primary_ring, device, pCreateInfo, NULL,
293 &pass_handle);
294
295 STACK_ARRAY_FINISH(attachments);
296
297 *pRenderPass = pass_handle;
298
299 return VK_SUCCESS;
300 }
301
302 void
vn_DestroyRenderPass(VkDevice device,VkRenderPass renderPass,const VkAllocationCallbacks * pAllocator)303 vn_DestroyRenderPass(VkDevice device,
304 VkRenderPass renderPass,
305 const VkAllocationCallbacks *pAllocator)
306 {
307 struct vn_device *dev = vn_device_from_handle(device);
308 struct vn_render_pass *pass = vn_render_pass_from_handle(renderPass);
309 const VkAllocationCallbacks *alloc =
310 pAllocator ? pAllocator : &dev->base.base.alloc;
311
312 if (!pass)
313 return;
314
315 vn_async_vkDestroyRenderPass(dev->primary_ring, device, renderPass, NULL);
316
317 vn_object_base_fini(&pass->base);
318 vk_free(alloc, pass);
319 }
320
321 void
vn_GetRenderAreaGranularity(VkDevice device,VkRenderPass renderPass,VkExtent2D * pGranularity)322 vn_GetRenderAreaGranularity(VkDevice device,
323 VkRenderPass renderPass,
324 VkExtent2D *pGranularity)
325 {
326 struct vn_device *dev = vn_device_from_handle(device);
327 struct vn_render_pass *pass = vn_render_pass_from_handle(renderPass);
328
329 if (!pass->granularity.width) {
330 vn_call_vkGetRenderAreaGranularity(dev->primary_ring, device,
331 renderPass, &pass->granularity);
332 }
333
334 *pGranularity = pass->granularity;
335 }
336
337 void
vn_GetRenderingAreaGranularityKHR(VkDevice device,const VkRenderingAreaInfoKHR * pRenderingAreaInfo,VkExtent2D * pGranularity)338 vn_GetRenderingAreaGranularityKHR(VkDevice device,
339 const VkRenderingAreaInfoKHR *pRenderingAreaInfo,
340 VkExtent2D *pGranularity)
341 {
342 struct vn_device *dev = vn_device_from_handle(device);
343
344 /* TODO per-device cache */
345 vn_call_vkGetRenderingAreaGranularityKHR(dev->primary_ring, device,
346 pRenderingAreaInfo, pGranularity);
347 }
348
349 /* framebuffer commands */
350
351 VkResult
vn_CreateFramebuffer(VkDevice device,const VkFramebufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkFramebuffer * pFramebuffer)352 vn_CreateFramebuffer(VkDevice device,
353 const VkFramebufferCreateInfo *pCreateInfo,
354 const VkAllocationCallbacks *pAllocator,
355 VkFramebuffer *pFramebuffer)
356 {
357 struct vn_device *dev = vn_device_from_handle(device);
358 const VkAllocationCallbacks *alloc =
359 pAllocator ? pAllocator : &dev->base.base.alloc;
360
361 /* Two render passes differ only in attachment image layouts are considered
362 * compatible. We must not use pCreateInfo->renderPass here.
363 */
364 const bool imageless =
365 pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT;
366 const uint32_t view_count = imageless ? 0 : pCreateInfo->attachmentCount;
367
368 struct vn_framebuffer *fb =
369 vk_zalloc(alloc, sizeof(*fb) + sizeof(*fb->image_views) * view_count,
370 VN_DEFAULT_ALIGN, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
371 if (!fb)
372 return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
373
374 vn_object_base_init(&fb->base, VK_OBJECT_TYPE_FRAMEBUFFER, &dev->base);
375
376 fb->image_view_count = view_count;
377 memcpy(fb->image_views, pCreateInfo->pAttachments,
378 sizeof(*pCreateInfo->pAttachments) * view_count);
379
380 VkFramebuffer fb_handle = vn_framebuffer_to_handle(fb);
381 vn_async_vkCreateFramebuffer(dev->primary_ring, device, pCreateInfo, NULL,
382 &fb_handle);
383
384 *pFramebuffer = fb_handle;
385
386 return VK_SUCCESS;
387 }
388
389 void
vn_DestroyFramebuffer(VkDevice device,VkFramebuffer framebuffer,const VkAllocationCallbacks * pAllocator)390 vn_DestroyFramebuffer(VkDevice device,
391 VkFramebuffer framebuffer,
392 const VkAllocationCallbacks *pAllocator)
393 {
394 struct vn_device *dev = vn_device_from_handle(device);
395 struct vn_framebuffer *fb = vn_framebuffer_from_handle(framebuffer);
396 const VkAllocationCallbacks *alloc =
397 pAllocator ? pAllocator : &dev->base.base.alloc;
398
399 if (!fb)
400 return;
401
402 vn_async_vkDestroyFramebuffer(dev->primary_ring, device, framebuffer,
403 NULL);
404
405 vn_object_base_fini(&fb->base);
406 vk_free(alloc, fb);
407 }
408