xref: /aosp_15_r20/external/skia/tools/gpu/vk/VkYcbcrSamplerHelper.cpp (revision c8dee2aa9b3f27cf6c858bd81872bdeb2c07ed17)
1 /*
2  * Copyright 2020 Google Inc.
3  *
4  * Use of this source code is governed by a BSD-style license that can be
5  * found in the LICENSE file.
6  */
7 
8 #include "tools/gpu/vk/VkYcbcrSamplerHelper.h"
9 
10 #if defined(SK_VULKAN)
11 
12 #include "include/gpu/ganesh/GrDirectContext.h"
13 #include "include/gpu/ganesh/vk/GrVkBackendSurface.h"
14 #include "include/gpu/vk/VulkanTypes.h"
15 #include "src/gpu/ganesh/GrDirectContextPriv.h"
16 #include "src/gpu/ganesh/vk/GrVkGpu.h"
17 #include "src/gpu/ganesh/vk/GrVkUtil.h"
18 #include "src/gpu/vk/VulkanInterface.h"
19 
20 #if defined(SK_GRAPHITE)
21 #include "include/gpu/GpuTypes.h"
22 #include "include/gpu/graphite/BackendTexture.h"
23 #include "include/gpu/graphite/Recorder.h"
24 #include "include/gpu/graphite/vk/VulkanGraphiteTypes.h"
25 #include "src/gpu/graphite/vk/VulkanGraphiteUtilsPriv.h"
26 #include "src/gpu/graphite/vk/VulkanSharedContext.h"
27 #endif
28 
GetExpectedY(int x,int y,int width,int height)29 int VkYcbcrSamplerHelper::GetExpectedY(int x, int y, int width, int height) {
30     return 16 + (x + y) * 219 / (width + height - 2);
31 }
32 
GetExpectedUV(int x,int y,int width,int height)33 std::pair<int, int> VkYcbcrSamplerHelper::GetExpectedUV(int x, int y, int width, int height) {
34     return { 16 + x * 224 / (width - 1), 16 + y * 224 / (height - 1) };
35 }
36 
37 namespace {
38 
populate_ycbcr_image_info(VkImageCreateInfo * outImageInfo,uint32_t width,uint32_t height)39 void populate_ycbcr_image_info(VkImageCreateInfo* outImageInfo, uint32_t width, uint32_t height) {
40     outImageInfo->sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
41     outImageInfo->pNext = nullptr;
42     outImageInfo->flags = 0;
43     outImageInfo->imageType = VK_IMAGE_TYPE_2D;
44     outImageInfo->format = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM;
45     outImageInfo->extent = VkExtent3D{width, height, 1};
46     outImageInfo->mipLevels = 1;
47     outImageInfo->arrayLayers = 1;
48     outImageInfo->samples = VK_SAMPLE_COUNT_1_BIT;
49     outImageInfo->tiling = VK_IMAGE_TILING_LINEAR;
50     outImageInfo->usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT |
51                           VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
52     outImageInfo->sharingMode = VK_SHARING_MODE_EXCLUSIVE;
53     outImageInfo->queueFamilyIndexCount = 0;
54     outImageInfo->pQueueFamilyIndices = nullptr;
55     outImageInfo->initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
56 }
57 
find_memory_type_index(const VkPhysicalDeviceMemoryProperties & phyDevMemProps,const VkMemoryRequirements & memoryRequirements,uint32_t * memoryTypeIndex)58 bool find_memory_type_index(const VkPhysicalDeviceMemoryProperties& phyDevMemProps,
59                             const VkMemoryRequirements& memoryRequirements,
60                             uint32_t* memoryTypeIndex) {
61     for (uint32_t i = 0; i < phyDevMemProps.memoryTypeCount; ++i) {
62         if (memoryRequirements.memoryTypeBits & (1 << i)) {
63             // Map host-visible memory.
64             if (phyDevMemProps.memoryTypes[i].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) {
65                 *memoryTypeIndex = i;
66                 return true;
67             }
68         }
69     }
70     return false;
71 }
72 
73 }
74 
75 #ifdef SK_GRAPHITE
76 // TODO(b/339211930): When graphite and ganesh can share a macro for certain Vulkan driver calls,
77 // much more code can be shared between this method and createGrBackendTexture.
createBackendTexture(uint32_t width,uint32_t height)78 bool VkYcbcrSamplerHelper::createBackendTexture(uint32_t width, uint32_t height) {
79     // Create YCbCr image.
80     VkImageCreateInfo vkImageInfo;
81     populate_ycbcr_image_info(&vkImageInfo, width, height);
82     SkASSERT(fImage == VK_NULL_HANDLE);
83 
84     VkResult result;
85     VULKAN_CALL_RESULT(fSharedCtxt, result, CreateImage(fSharedCtxt->device(),
86                                                         &vkImageInfo,
87                                                         /*pAllocator=*/nullptr,
88                                                         &fImage));
89     if (result != VK_SUCCESS) {
90         return false;
91     }
92 
93     VkMemoryRequirements requirements;
94     VULKAN_CALL(fSharedCtxt->interface(), GetImageMemoryRequirements(fSharedCtxt->device(),
95                                                                      fImage,
96                                                                      &requirements));
97     uint32_t memoryTypeIndex = 0;
98     const VkPhysicalDeviceMemoryProperties& phyDevMemProps =
99             fSharedCtxt->vulkanCaps().physicalDeviceMemoryProperties2().memoryProperties;
100     if (!find_memory_type_index(phyDevMemProps, requirements, &memoryTypeIndex)) {
101         return false;
102     }
103 
104     VkMemoryAllocateInfo allocInfo;
105     allocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
106     allocInfo.pNext = nullptr;
107     allocInfo.allocationSize = requirements.size;
108     allocInfo.memoryTypeIndex = memoryTypeIndex;
109 
110     SkASSERT(fImageMemory == VK_NULL_HANDLE);
111     VULKAN_CALL_RESULT(fSharedCtxt, result, AllocateMemory(fSharedCtxt->device(),
112                                                            &allocInfo,
113                                                            nullptr,
114                                                            &fImageMemory));
115     if (result != VK_SUCCESS) {
116         return false;
117     }
118 
119     void* mappedBuffer;
120     VULKAN_CALL_RESULT(fSharedCtxt, result, MapMemory(fSharedCtxt->device(),
121                                                       fImageMemory,
122                                                       /*offset=*/0u,
123                                                       requirements.size,
124                                                       /*flags=*/0u,
125                                                       &mappedBuffer));
126     if (result != VK_SUCCESS) {
127         return false;
128     }
129 
130     // Write Y channel.
131     VkImageSubresource subresource;
132     subresource.aspectMask = VK_IMAGE_ASPECT_PLANE_0_BIT;
133     subresource.mipLevel = 0;
134     subresource.arrayLayer = 0;
135 
136     VkSubresourceLayout yLayout;
137     VULKAN_CALL(fSharedCtxt->interface(),
138                 GetImageSubresourceLayout(fSharedCtxt->device(), fImage, &subresource, &yLayout));
139     uint8_t* bufferData = reinterpret_cast<uint8_t*>(mappedBuffer) + yLayout.offset;
140     for (size_t y = 0; y < height; ++y) {
141         for (size_t x = 0; x < width; ++x) {
142             bufferData[y * yLayout.rowPitch + x] = GetExpectedY(x, y, width, height);
143         }
144     }
145 
146     // Write UV channels.
147     subresource.aspectMask = VK_IMAGE_ASPECT_PLANE_1_BIT;
148     VkSubresourceLayout uvLayout;
149     VULKAN_CALL(fSharedCtxt->interface(), GetImageSubresourceLayout(fSharedCtxt->device(),
150                                                                     fImage,
151                                                                     &subresource,
152                                                                     &uvLayout));
153     bufferData = reinterpret_cast<uint8_t*>(mappedBuffer) + uvLayout.offset;
154     for (size_t y = 0; y < height / 2; ++y) {
155         for (size_t x = 0; x < width / 2; ++x) {
156             auto [u, v] = GetExpectedUV(2*x, 2*y, width, height);
157             bufferData[y * uvLayout.rowPitch + x * 2] = u;
158             bufferData[y * uvLayout.rowPitch + x * 2 + 1] = v;
159         }
160     }
161 
162     VkMappedMemoryRange flushRange;
163     flushRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
164     flushRange.pNext = nullptr;
165     flushRange.memory = fImageMemory;
166     flushRange.offset = 0;
167     flushRange.size = VK_WHOLE_SIZE;
168     VULKAN_CALL_RESULT(fSharedCtxt, result,  FlushMappedMemoryRanges(fSharedCtxt->device(),
169                                                                      /*memoryRangeCount=*/1,
170                                                                      &flushRange));
171     if (result != VK_SUCCESS) {
172         return false;
173     }
174     VULKAN_CALL(fSharedCtxt->interface(), UnmapMemory(fSharedCtxt->device(), fImageMemory));
175 
176     // Bind image memory.
177     VULKAN_CALL_RESULT(fSharedCtxt, result, BindImageMemory(fSharedCtxt->device(),
178                                                             fImage,
179                                                             fImageMemory,
180                                                             /*memoryOffset=*/0u));
181     if (result != VK_SUCCESS) {
182         return false;
183     }
184 
185     // Wrap the image into SkImage.
186     VkFormatProperties formatProperties;
187     SkASSERT(fSharedCtxt->physDevice() != VK_NULL_HANDLE);
188     VULKAN_CALL(fSharedCtxt->interface(),
189                 GetPhysicalDeviceFormatProperties(fSharedCtxt->physDevice(),
190                                                   VK_FORMAT_G8_B8R8_2PLANE_420_UNORM,
191                                                   &formatProperties));
192     SkDEBUGCODE(auto linFlags = formatProperties.linearTilingFeatures;)
193     SkASSERT((linFlags & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT) &&
194              (linFlags & VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT) &&
195              (linFlags & VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT) &&
196              (linFlags & VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT));
197 
198     skgpu::VulkanYcbcrConversionInfo ycbcrInfo = {vkImageInfo.format,
199                                                   /*externalFormat=*/0,
200                                                   VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709,
201                                                   VK_SAMPLER_YCBCR_RANGE_ITU_NARROW,
202                                                   VK_CHROMA_LOCATION_COSITED_EVEN,
203                                                   VK_CHROMA_LOCATION_COSITED_EVEN,
204                                                   VK_FILTER_LINEAR,
205                                                   false,
206                                                   formatProperties.linearTilingFeatures};
207     skgpu::VulkanAlloc alloc;
208     alloc.fMemory = fImageMemory;
209     alloc.fOffset = 0;
210     alloc.fSize = requirements.size;
211 
212     skgpu::graphite::VulkanTextureInfo imageInfo = {
213             static_cast<uint32_t>(vkImageInfo.samples),
214             skgpu::Mipmapped::kNo,
215             VK_IMAGE_CREATE_PROTECTED_BIT,
216             vkImageInfo.format,
217             vkImageInfo.tiling,
218             vkImageInfo.usage,
219             vkImageInfo.sharingMode,
220             VK_IMAGE_ASPECT_PLANE_0_BIT | VK_IMAGE_ASPECT_PLANE_1_BIT,
221             ycbcrInfo};
222 
223     fTexture = skgpu::graphite::BackendTextures::MakeVulkan({(int32_t)width, (int32_t)height},
224                                                             imageInfo,
225                                                             VK_IMAGE_LAYOUT_UNDEFINED,
226                                                             /*queueFamilyIndex=*/0,
227                                                             fImage,
228                                                             alloc);
229     return true;
230 }
231 #endif // SK_GRAPHITE
232 
createGrBackendTexture(uint32_t width,uint32_t height)233 bool VkYcbcrSamplerHelper::createGrBackendTexture(uint32_t width, uint32_t height) {
234     GrVkGpu* vkGpu = this->vkGpu();
235     VkResult result;
236 
237     // Create YCbCr image.
238     VkImageCreateInfo vkImageInfo;
239     populate_ycbcr_image_info(&vkImageInfo, width, height);
240     SkASSERT(fImage == VK_NULL_HANDLE);
241 
242     GR_VK_CALL_RESULT(vkGpu, result, CreateImage(vkGpu->device(), &vkImageInfo, nullptr, &fImage));
243     if (result != VK_SUCCESS) {
244         return false;
245     }
246 
247     VkMemoryRequirements requirements;
248     GR_VK_CALL(vkGpu->vkInterface(), GetImageMemoryRequirements(vkGpu->device(),
249                                                                 fImage,
250                                                                 &requirements));
251 
252     uint32_t memoryTypeIndex = 0;
253     VkPhysicalDeviceMemoryProperties phyDevMemProps;
254     GR_VK_CALL(vkGpu->vkInterface(), GetPhysicalDeviceMemoryProperties(vkGpu->physicalDevice(),
255                                                                        &phyDevMemProps));
256     if (!find_memory_type_index(phyDevMemProps, requirements, &memoryTypeIndex)) {
257         return false;
258     }
259 
260     VkMemoryAllocateInfo allocInfo = {};
261     allocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
262     allocInfo.allocationSize = requirements.size;
263     allocInfo.memoryTypeIndex = memoryTypeIndex;
264 
265     SkASSERT(fImageMemory == VK_NULL_HANDLE);
266     GR_VK_CALL_RESULT(vkGpu, result, AllocateMemory(vkGpu->device(), &allocInfo,
267                                                     nullptr, &fImageMemory));
268     if (result != VK_SUCCESS) {
269         return false;
270     }
271 
272     void* mappedBuffer;
273     GR_VK_CALL_RESULT(vkGpu, result, MapMemory(vkGpu->device(), fImageMemory, 0u,
274                                                requirements.size, 0u, &mappedBuffer));
275     if (result != VK_SUCCESS) {
276         return false;
277     }
278 
279     // Write Y channel.
280     VkImageSubresource subresource;
281     subresource.aspectMask = VK_IMAGE_ASPECT_PLANE_0_BIT;
282     subresource.mipLevel = 0;
283     subresource.arrayLayer = 0;
284 
285     VkSubresourceLayout yLayout;
286     GR_VK_CALL(vkGpu->vkInterface(), GetImageSubresourceLayout(vkGpu->device(), fImage,
287                                                                &subresource, &yLayout));
288     uint8_t* bufferData = reinterpret_cast<uint8_t*>(mappedBuffer) + yLayout.offset;
289     for (size_t y = 0; y < height; ++y) {
290         for (size_t x = 0; x < width; ++x) {
291             bufferData[y * yLayout.rowPitch + x] = GetExpectedY(x, y, width, height);
292         }
293     }
294 
295     // Write UV channels.
296     subresource.aspectMask = VK_IMAGE_ASPECT_PLANE_1_BIT;
297     VkSubresourceLayout uvLayout;
298     GR_VK_CALL(vkGpu->vkInterface(), GetImageSubresourceLayout(vkGpu->device(), fImage,
299                                                                &subresource, &uvLayout));
300     bufferData = reinterpret_cast<uint8_t*>(mappedBuffer) + uvLayout.offset;
301     for (size_t y = 0; y < height / 2; ++y) {
302         for (size_t x = 0; x < width / 2; ++x) {
303             auto [u, v] = GetExpectedUV(2*x, 2*y, width, height);
304             bufferData[y * uvLayout.rowPitch + x * 2] = u;
305             bufferData[y * uvLayout.rowPitch + x * 2 + 1] = v;
306         }
307     }
308 
309     VkMappedMemoryRange flushRange;
310     flushRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE;
311     flushRange.pNext = nullptr;
312     flushRange.memory = fImageMemory;
313     flushRange.offset = 0;
314     flushRange.size = VK_WHOLE_SIZE;
315     GR_VK_CALL_RESULT(vkGpu, result, FlushMappedMemoryRanges(vkGpu->device(), 1, &flushRange));
316     if (result != VK_SUCCESS) {
317         return false;
318     }
319     GR_VK_CALL(vkGpu->vkInterface(), UnmapMemory(vkGpu->device(), fImageMemory));
320 
321     // Bind image memory.
322     GR_VK_CALL_RESULT(vkGpu, result, BindImageMemory(vkGpu->device(), fImage, fImageMemory, 0u));
323     if (result != VK_SUCCESS) {
324         return false;
325     }
326 
327     // Wrap the image into SkImage.
328     VkFormatProperties formatProperties;
329     GR_VK_CALL(vkGpu->vkInterface(),
330                GetPhysicalDeviceFormatProperties(vkGpu->physicalDevice(),
331                                                  VK_FORMAT_G8_B8R8_2PLANE_420_UNORM,
332                                                  &formatProperties));
333     SkDEBUGCODE(auto linFlags = formatProperties.linearTilingFeatures;)
334     SkASSERT((linFlags & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT) &&
335              (linFlags & VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT) &&
336              (linFlags & VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT) &&
337              (linFlags & VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT));
338 
339     skgpu::VulkanYcbcrConversionInfo ycbcrInfo = {vkImageInfo.format,
340                                                   /*externalFormat=*/0,
341                                                   VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709,
342                                                   VK_SAMPLER_YCBCR_RANGE_ITU_NARROW,
343                                                   VK_CHROMA_LOCATION_COSITED_EVEN,
344                                                   VK_CHROMA_LOCATION_COSITED_EVEN,
345                                                   VK_FILTER_LINEAR,
346                                                   false,
347                                                   formatProperties.linearTilingFeatures,
348                                                   /*fComponents=*/{}};
349     skgpu::VulkanAlloc alloc;
350     alloc.fMemory = fImageMemory;
351     alloc.fOffset = 0;
352     alloc.fSize = requirements.size;
353 
354     GrVkImageInfo imageInfo = {fImage,
355                                alloc,
356                                VK_IMAGE_TILING_LINEAR,
357                                VK_IMAGE_LAYOUT_UNDEFINED,
358                                vkImageInfo.format,
359                                vkImageInfo.usage,
360                                1 /* sample count */,
361                                1 /* levelCount */,
362                                VK_QUEUE_FAMILY_IGNORED,
363                                GrProtected::kNo,
364                                ycbcrInfo};
365 
366     fGrTexture = GrBackendTextures::MakeVk(width, height, imageInfo);
367     return true;
368 }
369 
vkGpu()370 GrVkGpu* VkYcbcrSamplerHelper::vkGpu() {
371     return (GrVkGpu*) fDContext->priv().getGpu();
372 }
373 
VkYcbcrSamplerHelper(GrDirectContext * dContext)374 VkYcbcrSamplerHelper::VkYcbcrSamplerHelper(GrDirectContext* dContext) : fDContext(dContext) {
375     SkASSERT_RELEASE(dContext->backend() == GrBackendApi::kVulkan);
376 #if defined(SK_GRAPHITE)
377     fSharedCtxt = nullptr;
378 #endif
379 }
380 
~VkYcbcrSamplerHelper()381 VkYcbcrSamplerHelper::~VkYcbcrSamplerHelper() {
382 #ifdef SK_GRAPHITE
383     if (fSharedCtxt) {
384         if (fImage != VK_NULL_HANDLE) {
385             VULKAN_CALL(fSharedCtxt->interface(),
386                         DestroyImage(fSharedCtxt->device(), fImage, nullptr));
387             fImage = VK_NULL_HANDLE;
388         }
389         if (fImageMemory != VK_NULL_HANDLE) {
390             VULKAN_CALL(fSharedCtxt->interface(),
391                         FreeMemory(fSharedCtxt->device(), fImageMemory, nullptr));
392             fImageMemory = VK_NULL_HANDLE;
393         }
394     } else
395 #endif // SK_GRAPHITE
396     {
397         GrVkGpu* vkGpu = this->vkGpu();
398 
399         if (fImage != VK_NULL_HANDLE) {
400             GR_VK_CALL(vkGpu->vkInterface(), DestroyImage(vkGpu->device(), fImage, nullptr));
401             fImage = VK_NULL_HANDLE;
402         }
403         if (fImageMemory != VK_NULL_HANDLE) {
404             GR_VK_CALL(vkGpu->vkInterface(), FreeMemory(vkGpu->device(), fImageMemory, nullptr));
405             fImageMemory = VK_NULL_HANDLE;
406         }
407     }
408 }
409 
isYCbCrSupported()410 bool VkYcbcrSamplerHelper::isYCbCrSupported() {
411     VkFormatProperties formatProperties;
412 #ifdef SK_GRAPHITE
413     if (fSharedCtxt) {
414         if (!fSharedCtxt->vulkanCaps().supportsYcbcrConversion()) {
415             return false;
416         }
417 
418         SkASSERT(fSharedCtxt->physDevice() != VK_NULL_HANDLE);
419         VULKAN_CALL(fSharedCtxt->interface(),
420                     GetPhysicalDeviceFormatProperties(fSharedCtxt->physDevice(),
421                                                       VK_FORMAT_G8_B8R8_2PLANE_420_UNORM,
422                                                       &formatProperties));
423     } else
424 #endif
425     {
426         GrVkGpu* vkGpu = this->vkGpu();
427         if (!vkGpu->vkCaps().supportsYcbcrConversion()) {
428             return false;
429         }
430 
431         GR_VK_CALL(vkGpu->vkInterface(),
432                 GetPhysicalDeviceFormatProperties(vkGpu->physicalDevice(),
433                                                     VK_FORMAT_G8_B8R8_2PLANE_420_UNORM,
434                                                     &formatProperties));
435     }
436 
437     // The createBackendTexture call (which is the point of this helper class) requires linear
438     // support for VK_FORMAT_G8_B8R8_2PLANE_420_UNORM including sampling and cosited chroma.
439     // Verify that the image format is supported.
440     auto linFlags = formatProperties.linearTilingFeatures;
441     if (!(linFlags & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT) ||
442         !(linFlags & VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT) ||
443         !(linFlags & VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT) ||
444         !(linFlags & VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT)) {
445         // VK_FORMAT_G8_B8R8_2PLANE_420_UNORM is not supported
446         return false;
447     }
448     return true;
449 }
450 #endif // SK_VULKAN
451