1 /*
2 * Copyright © 2022 Collabora Ltd. and Red Hat Inc.
3 * SPDX-License-Identifier: MIT
4 */
5 #include "nvk_image_view.h"
6
7 #include "nvk_device.h"
8 #include "nvk_entrypoints.h"
9 #include "nvk_format.h"
10 #include "nvk_image.h"
11 #include "nvk_physical_device.h"
12
13 #include "vk_format.h"
14
15 static enum nil_view_type
vk_image_view_type_to_nil_view_type(VkImageViewType view_type)16 vk_image_view_type_to_nil_view_type(VkImageViewType view_type)
17 {
18 switch (view_type) {
19 case VK_IMAGE_VIEW_TYPE_1D: return NIL_VIEW_TYPE_1D;
20 case VK_IMAGE_VIEW_TYPE_2D: return NIL_VIEW_TYPE_2D;
21 case VK_IMAGE_VIEW_TYPE_3D: return NIL_VIEW_TYPE_3D;
22 case VK_IMAGE_VIEW_TYPE_CUBE: return NIL_VIEW_TYPE_CUBE;
23 case VK_IMAGE_VIEW_TYPE_1D_ARRAY: return NIL_VIEW_TYPE_1D_ARRAY;
24 case VK_IMAGE_VIEW_TYPE_2D_ARRAY: return NIL_VIEW_TYPE_2D_ARRAY;
25 case VK_IMAGE_VIEW_TYPE_CUBE_ARRAY: return NIL_VIEW_TYPE_CUBE_ARRAY;
26 default:
27 unreachable("Invalid image view type");
28 }
29 }
30
31 static enum pipe_swizzle
vk_swizzle_to_pipe(VkComponentSwizzle swizzle)32 vk_swizzle_to_pipe(VkComponentSwizzle swizzle)
33 {
34 switch (swizzle) {
35 case VK_COMPONENT_SWIZZLE_R: return PIPE_SWIZZLE_X;
36 case VK_COMPONENT_SWIZZLE_G: return PIPE_SWIZZLE_Y;
37 case VK_COMPONENT_SWIZZLE_B: return PIPE_SWIZZLE_Z;
38 case VK_COMPONENT_SWIZZLE_A: return PIPE_SWIZZLE_W;
39 case VK_COMPONENT_SWIZZLE_ONE: return PIPE_SWIZZLE_1;
40 case VK_COMPONENT_SWIZZLE_ZERO: return PIPE_SWIZZLE_0;
41 default:
42 unreachable("Invalid component swizzle");
43 }
44 }
45
46 static void
image_single_level_view(struct nil_image * image,struct nil_view * view,uint64_t * base_addr)47 image_single_level_view(struct nil_image *image,
48 struct nil_view *view,
49 uint64_t *base_addr)
50 {
51 assert(view->num_levels == 1);
52
53 uint64_t offset_B;
54 *image = nil_image_for_level(image, view->base_level, &offset_B);
55 *base_addr += offset_B;
56 view->base_level = 0;
57 }
58
59 static void
image_uncompressed_view(struct nil_image * image,struct nil_view * view,uint64_t * base_addr)60 image_uncompressed_view(struct nil_image *image,
61 struct nil_view *view,
62 uint64_t *base_addr)
63 {
64 assert(view->num_levels == 1);
65
66 uint64_t offset_B;
67 *image = nil_image_level_as_uncompressed(image, view->base_level, &offset_B);
68 *base_addr += offset_B;
69 view->base_level = 0;
70 }
71
72 static void
image_3d_view_as_2d_array(struct nil_image * image,struct nil_view * view,uint64_t * base_addr)73 image_3d_view_as_2d_array(struct nil_image *image,
74 struct nil_view *view,
75 uint64_t *base_addr)
76 {
77 assert(view->view_type == NIL_VIEW_TYPE_2D ||
78 view->view_type == NIL_VIEW_TYPE_2D_ARRAY);
79 assert(view->num_levels == 1);
80
81 uint64_t offset_B;
82 *image = nil_image_3d_level_as_2d_array(image, view->base_level, &offset_B);
83 *base_addr += offset_B;
84 view->base_level = 0;
85 }
86
87 static enum pipe_format
get_stencil_format(enum pipe_format format)88 get_stencil_format(enum pipe_format format)
89 {
90 switch (format) {
91 case PIPE_FORMAT_S8_UINT: return PIPE_FORMAT_S8_UINT;
92 case PIPE_FORMAT_Z24_UNORM_S8_UINT: return PIPE_FORMAT_X24S8_UINT;
93 case PIPE_FORMAT_S8_UINT_Z24_UNORM: return PIPE_FORMAT_S8X24_UINT;
94 case PIPE_FORMAT_Z32_FLOAT_S8X24_UINT: return PIPE_FORMAT_X32_S8X24_UINT;
95 default: unreachable("Unsupported depth/stencil format");
96 }
97 }
98
99 VkResult
nvk_image_view_init(struct nvk_device * dev,struct nvk_image_view * view,bool driver_internal,const VkImageViewCreateInfo * pCreateInfo)100 nvk_image_view_init(struct nvk_device *dev,
101 struct nvk_image_view *view,
102 bool driver_internal,
103 const VkImageViewCreateInfo *pCreateInfo)
104 {
105 struct nvk_physical_device *pdev = nvk_device_physical(dev);
106 VK_FROM_HANDLE(nvk_image, image, pCreateInfo->image);
107 VkResult result;
108
109 const VkOpaqueCaptureDescriptorDataCreateInfoEXT *cap_info =
110 vk_find_struct_const(pCreateInfo->pNext,
111 OPAQUE_CAPTURE_DESCRIPTOR_DATA_CREATE_INFO_EXT);
112 struct nvk_image_view_capture cap = {};
113 if (cap_info != NULL)
114 memcpy(&cap, cap_info->opaqueCaptureDescriptorData, sizeof(cap));
115
116 memset(view, 0, sizeof(*view));
117
118 vk_image_view_init(&dev->vk, &view->vk, driver_internal, pCreateInfo);
119
120 /* First, figure out which image planes we need.
121 * For depth/stencil, we only have plane so simply assert
122 * and then map directly betweeen the image and view plane
123 */
124 if (image->vk.aspects & (VK_IMAGE_ASPECT_DEPTH_BIT |
125 VK_IMAGE_ASPECT_STENCIL_BIT)) {
126 assert(image->plane_count == 1);
127 assert(nvk_image_aspects_to_plane(image, view->vk.aspects) == 0);
128 view->plane_count = 1;
129 view->planes[0].image_plane = 0;
130 } else {
131 /* For other formats, retrieve the plane count from the aspect mask
132 * and then walk through the aspect mask to map each image plane
133 * to its corresponding view plane
134 */
135 assert(util_bitcount(view->vk.aspects) ==
136 vk_format_get_plane_count(view->vk.format));
137 view->plane_count = 0;
138 u_foreach_bit(aspect_bit, view->vk.aspects) {
139 uint8_t image_plane = nvk_image_aspects_to_plane(image, 1u << aspect_bit);
140 view->planes[view->plane_count++].image_plane = image_plane;
141 }
142 }
143
144 /* Finally, fill in each view plane separately */
145 for (unsigned view_plane = 0; view_plane < view->plane_count; view_plane++) {
146 const uint8_t image_plane = view->planes[view_plane].image_plane;
147 struct nil_image nil_image = image->planes[image_plane].nil;
148 uint64_t base_addr = nvk_image_base_address(image, image_plane);
149
150 const struct vk_format_ycbcr_info *ycbcr_info =
151 vk_format_get_ycbcr_info(view->vk.format);
152 assert(ycbcr_info || view_plane == 0);
153 VkFormat plane_format = ycbcr_info ?
154 ycbcr_info->planes[view_plane].format : view->vk.format;
155 enum pipe_format p_format = vk_format_to_pipe_format(plane_format);
156 if (view->vk.aspects == VK_IMAGE_ASPECT_STENCIL_BIT)
157 p_format = get_stencil_format(p_format);
158
159 struct nil_view nil_view = {
160 .view_type = vk_image_view_type_to_nil_view_type(view->vk.view_type),
161 .format = nil_format(p_format),
162 .base_level = view->vk.base_mip_level,
163 .num_levels = view->vk.level_count,
164 .base_array_layer = view->vk.base_array_layer,
165 .array_len = view->vk.layer_count,
166 .swizzle = {
167 vk_swizzle_to_pipe(view->vk.swizzle.r),
168 vk_swizzle_to_pipe(view->vk.swizzle.g),
169 vk_swizzle_to_pipe(view->vk.swizzle.b),
170 vk_swizzle_to_pipe(view->vk.swizzle.a),
171 },
172 .min_lod_clamp = view->vk.min_lod,
173 };
174
175 if (util_format_is_compressed(nil_image.format.p_format) &&
176 !util_format_is_compressed(nil_view.format.p_format))
177 image_uncompressed_view(&nil_image, &nil_view, &base_addr);
178
179 if (nil_image.dim == NIL_IMAGE_DIM_3D &&
180 nil_view.view_type != NIL_VIEW_TYPE_3D)
181 image_3d_view_as_2d_array(&nil_image, &nil_view, &base_addr);
182
183 view->planes[view_plane].sample_layout = nil_image.sample_layout;
184
185 if (view->vk.usage & (VK_IMAGE_USAGE_SAMPLED_BIT |
186 VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT)) {
187 uint32_t tic[8];
188 nil_image_fill_tic(&nil_image, &pdev->info,
189 &nil_view, base_addr, &tic);
190
191 uint32_t desc_index = 0;
192 if (cap_info != NULL) {
193 desc_index = cap.planes[view_plane].sampled_desc_index;
194 result = nvk_descriptor_table_insert(dev, &dev->images,
195 desc_index, tic, sizeof(tic));
196 } else {
197 result = nvk_descriptor_table_add(dev, &dev->images,
198 tic, sizeof(tic), &desc_index);
199 }
200 if (result != VK_SUCCESS) {
201 nvk_image_view_finish(dev, view);
202 return result;
203 }
204
205 view->planes[view_plane].sampled_desc_index = desc_index;
206 }
207
208 if (view->vk.usage & VK_IMAGE_USAGE_STORAGE_BIT) {
209 /* For storage images, we can't have any cubes */
210 if (view->vk.view_type == VK_IMAGE_VIEW_TYPE_CUBE ||
211 view->vk.view_type == VK_IMAGE_VIEW_TYPE_CUBE_ARRAY)
212 nil_view.view_type = NIL_VIEW_TYPE_2D_ARRAY;
213
214 if (view->vk.view_type == VK_IMAGE_VIEW_TYPE_3D) {
215 /* Without VK_AMD_shader_image_load_store_lod, the client can only
216 * get at the first LOD from the shader anyway.
217 */
218 assert(view->vk.base_array_layer == 0);
219 assert(view->vk.layer_count == 1);
220 nil_view.num_levels = 1;
221 image_single_level_view(&nil_image, &nil_view, &base_addr);
222
223 if (view->vk.storage.z_slice_offset > 0 ||
224 view->vk.storage.z_slice_count < nil_image.extent_px.depth) {
225 nil_view.view_type = NIL_VIEW_TYPE_3D_SLICED;
226 nil_view.base_array_layer = view->vk.storage.z_slice_offset;
227 nil_view.array_len = view->vk.storage.z_slice_count;
228 }
229 }
230
231 if (image->vk.samples != VK_SAMPLE_COUNT_1_BIT)
232 nil_image = nil_msaa_image_as_sa(&nil_image);
233
234 uint32_t tic[8];
235 nil_image_fill_tic(&nil_image, &pdev->info, &nil_view,
236 base_addr, &tic);
237
238 uint32_t desc_index = 0;
239 if (cap_info != NULL) {
240 desc_index = cap.planes[view_plane].storage_desc_index;
241 result = nvk_descriptor_table_insert(dev, &dev->images,
242 desc_index, tic, sizeof(tic));
243 } else {
244 result = nvk_descriptor_table_add(dev, &dev->images,
245 tic, sizeof(tic), &desc_index);
246 }
247 if (result != VK_SUCCESS) {
248 nvk_image_view_finish(dev, view);
249 return result;
250 }
251
252 view->planes[view_plane].storage_desc_index = desc_index;
253 }
254 }
255
256 return VK_SUCCESS;
257 }
258
259 void
nvk_image_view_finish(struct nvk_device * dev,struct nvk_image_view * view)260 nvk_image_view_finish(struct nvk_device *dev,
261 struct nvk_image_view *view)
262 {
263 for (uint8_t plane = 0; plane < view->plane_count; plane++) {
264 if (view->planes[plane].sampled_desc_index) {
265 nvk_descriptor_table_remove(dev, &dev->images,
266 view->planes[plane].sampled_desc_index);
267 }
268
269 if (view->planes[plane].storage_desc_index) {
270 nvk_descriptor_table_remove(dev, &dev->images,
271 view->planes[plane].storage_desc_index);
272 }
273 }
274
275 vk_image_view_finish(&view->vk);
276 }
277
278 VKAPI_ATTR VkResult VKAPI_CALL
nvk_CreateImageView(VkDevice _device,const VkImageViewCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImageView * pView)279 nvk_CreateImageView(VkDevice _device,
280 const VkImageViewCreateInfo *pCreateInfo,
281 const VkAllocationCallbacks *pAllocator,
282 VkImageView *pView)
283 {
284 VK_FROM_HANDLE(nvk_device, dev, _device);
285 struct nvk_image_view *view;
286 VkResult result;
287
288 view = vk_alloc2(&dev->vk.alloc, pAllocator, sizeof(*view), 8,
289 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
290 if (!view)
291 return vk_error(dev, VK_ERROR_OUT_OF_HOST_MEMORY);
292
293 result = nvk_image_view_init(dev, view, false, pCreateInfo);
294 if (result != VK_SUCCESS) {
295 vk_free2(&dev->vk.alloc, pAllocator, view);
296 return result;
297 }
298
299 *pView = nvk_image_view_to_handle(view);
300
301 return VK_SUCCESS;
302 }
303
304 VKAPI_ATTR void VKAPI_CALL
nvk_DestroyImageView(VkDevice _device,VkImageView imageView,const VkAllocationCallbacks * pAllocator)305 nvk_DestroyImageView(VkDevice _device,
306 VkImageView imageView,
307 const VkAllocationCallbacks *pAllocator)
308 {
309 VK_FROM_HANDLE(nvk_device, dev, _device);
310 VK_FROM_HANDLE(nvk_image_view, view, imageView);
311
312 if (!view)
313 return;
314
315 nvk_image_view_finish(dev, view);
316 vk_free2(&dev->vk.alloc, pAllocator, view);
317 }
318
319
320 VKAPI_ATTR VkResult VKAPI_CALL
nvk_GetImageViewOpaqueCaptureDescriptorDataEXT(VkDevice _device,const VkImageViewCaptureDescriptorDataInfoEXT * pInfo,void * pData)321 nvk_GetImageViewOpaqueCaptureDescriptorDataEXT(
322 VkDevice _device,
323 const VkImageViewCaptureDescriptorDataInfoEXT *pInfo,
324 void *pData)
325 {
326 VK_FROM_HANDLE(nvk_image_view, view, pInfo->imageView);
327
328 struct nvk_image_view_capture cap = {};
329 for (uint8_t p = 0; p < view->plane_count; p++) {
330 cap.planes[p].sampled_desc_index = view->planes[p].sampled_desc_index;
331 cap.planes[p].storage_desc_index = view->planes[p].storage_desc_index;
332 }
333
334 memcpy(pData, &cap, sizeof(cap));
335
336 return VK_SUCCESS;
337 }
338