xref: /aosp_15_r20/external/skia/src/gpu/ganesh/vk/GrVkBuffer.cpp (revision c8dee2aa9b3f27cf6c858bd81872bdeb2c07ed17)
1*c8dee2aaSAndroid Build Coastguard Worker /*
2*c8dee2aaSAndroid Build Coastguard Worker  * Copyright 2021 Google LLC
3*c8dee2aaSAndroid Build Coastguard Worker  *
4*c8dee2aaSAndroid Build Coastguard Worker  * Use of this source code is governed by a BSD-style license that can be
5*c8dee2aaSAndroid Build Coastguard Worker  * found in the LICENSE file.
6*c8dee2aaSAndroid Build Coastguard Worker  */
7*c8dee2aaSAndroid Build Coastguard Worker 
8*c8dee2aaSAndroid Build Coastguard Worker #include "src/gpu/ganesh/vk/GrVkBuffer.h"
9*c8dee2aaSAndroid Build Coastguard Worker 
10*c8dee2aaSAndroid Build Coastguard Worker #include "include/gpu/GpuTypes.h"
11*c8dee2aaSAndroid Build Coastguard Worker #include "include/gpu/ganesh/GrDirectContext.h"
12*c8dee2aaSAndroid Build Coastguard Worker #include "include/gpu/vk/VulkanMemoryAllocator.h"
13*c8dee2aaSAndroid Build Coastguard Worker #include "include/private/base/SkAlign.h"
14*c8dee2aaSAndroid Build Coastguard Worker #include "include/private/base/SkAssert.h"
15*c8dee2aaSAndroid Build Coastguard Worker #include "include/private/base/SkDebug.h"
16*c8dee2aaSAndroid Build Coastguard Worker #include "include/private/base/SkTemplates.h"
17*c8dee2aaSAndroid Build Coastguard Worker #include "src/gpu/ganesh/GrDirectContextPriv.h"
18*c8dee2aaSAndroid Build Coastguard Worker #include "src/gpu/ganesh/GrResourceProvider.h"
19*c8dee2aaSAndroid Build Coastguard Worker #include "src/gpu/ganesh/vk/GrVkCaps.h"
20*c8dee2aaSAndroid Build Coastguard Worker #include "src/gpu/ganesh/vk/GrVkDescriptorSet.h"
21*c8dee2aaSAndroid Build Coastguard Worker #include "src/gpu/ganesh/vk/GrVkGpu.h"
22*c8dee2aaSAndroid Build Coastguard Worker #include "src/gpu/ganesh/vk/GrVkResourceProvider.h"
23*c8dee2aaSAndroid Build Coastguard Worker #include "src/gpu/ganesh/vk/GrVkUniformHandler.h"
24*c8dee2aaSAndroid Build Coastguard Worker #include "src/gpu/ganesh/vk/GrVkUtil.h"
25*c8dee2aaSAndroid Build Coastguard Worker #include "src/gpu/vk/VulkanMemory.h"
26*c8dee2aaSAndroid Build Coastguard Worker 
27*c8dee2aaSAndroid Build Coastguard Worker #include <cstring>
28*c8dee2aaSAndroid Build Coastguard Worker #include <functional>
29*c8dee2aaSAndroid Build Coastguard Worker #include <utility>
30*c8dee2aaSAndroid Build Coastguard Worker 
31*c8dee2aaSAndroid Build Coastguard Worker #define VK_CALL(GPU, X) GR_VK_CALL(GPU->vkInterface(), X)
32*c8dee2aaSAndroid Build Coastguard Worker 
GrVkBuffer(GrVkGpu * gpu,size_t sizeInBytes,GrGpuBufferType bufferType,GrAccessPattern accessPattern,VkBuffer buffer,const skgpu::VulkanAlloc & alloc,const GrVkDescriptorSet * uniformDescriptorSet,std::string_view label)33*c8dee2aaSAndroid Build Coastguard Worker GrVkBuffer::GrVkBuffer(GrVkGpu* gpu,
34*c8dee2aaSAndroid Build Coastguard Worker                        size_t sizeInBytes,
35*c8dee2aaSAndroid Build Coastguard Worker                        GrGpuBufferType bufferType,
36*c8dee2aaSAndroid Build Coastguard Worker                        GrAccessPattern accessPattern,
37*c8dee2aaSAndroid Build Coastguard Worker                        VkBuffer buffer,
38*c8dee2aaSAndroid Build Coastguard Worker                        const skgpu::VulkanAlloc& alloc,
39*c8dee2aaSAndroid Build Coastguard Worker                        const GrVkDescriptorSet* uniformDescriptorSet,
40*c8dee2aaSAndroid Build Coastguard Worker                        std::string_view label)
41*c8dee2aaSAndroid Build Coastguard Worker         : GrGpuBuffer(gpu, sizeInBytes, bufferType, accessPattern, label)
42*c8dee2aaSAndroid Build Coastguard Worker         , fBuffer(buffer)
43*c8dee2aaSAndroid Build Coastguard Worker         , fAlloc(alloc)
44*c8dee2aaSAndroid Build Coastguard Worker         , fUniformDescriptorSet(uniformDescriptorSet) {
45*c8dee2aaSAndroid Build Coastguard Worker     // We always require dynamic buffers to be mappable
46*c8dee2aaSAndroid Build Coastguard Worker     SkASSERT(accessPattern != kDynamic_GrAccessPattern || this->isVkMappable());
47*c8dee2aaSAndroid Build Coastguard Worker     SkASSERT(bufferType != GrGpuBufferType::kUniform || uniformDescriptorSet);
48*c8dee2aaSAndroid Build Coastguard Worker     this->registerWithCache(skgpu::Budgeted::kYes);
49*c8dee2aaSAndroid Build Coastguard Worker }
50*c8dee2aaSAndroid Build Coastguard Worker 
make_uniform_desc_set(GrVkGpu * gpu,VkBuffer buffer,size_t size)51*c8dee2aaSAndroid Build Coastguard Worker static const GrVkDescriptorSet* make_uniform_desc_set(GrVkGpu* gpu, VkBuffer buffer, size_t size) {
52*c8dee2aaSAndroid Build Coastguard Worker     const GrVkDescriptorSet* descriptorSet = gpu->resourceProvider().getUniformDescriptorSet();
53*c8dee2aaSAndroid Build Coastguard Worker     if (!descriptorSet) {
54*c8dee2aaSAndroid Build Coastguard Worker         return nullptr;
55*c8dee2aaSAndroid Build Coastguard Worker     }
56*c8dee2aaSAndroid Build Coastguard Worker 
57*c8dee2aaSAndroid Build Coastguard Worker     VkDescriptorBufferInfo bufferInfo;
58*c8dee2aaSAndroid Build Coastguard Worker     memset(&bufferInfo, 0, sizeof(VkDescriptorBufferInfo));
59*c8dee2aaSAndroid Build Coastguard Worker     bufferInfo.buffer = buffer;
60*c8dee2aaSAndroid Build Coastguard Worker     bufferInfo.offset = 0;
61*c8dee2aaSAndroid Build Coastguard Worker     bufferInfo.range = size;
62*c8dee2aaSAndroid Build Coastguard Worker 
63*c8dee2aaSAndroid Build Coastguard Worker     VkWriteDescriptorSet descriptorWrite;
64*c8dee2aaSAndroid Build Coastguard Worker     memset(&descriptorWrite, 0, sizeof(VkWriteDescriptorSet));
65*c8dee2aaSAndroid Build Coastguard Worker     descriptorWrite.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
66*c8dee2aaSAndroid Build Coastguard Worker     descriptorWrite.pNext = nullptr;
67*c8dee2aaSAndroid Build Coastguard Worker     descriptorWrite.dstSet = *descriptorSet->descriptorSet();
68*c8dee2aaSAndroid Build Coastguard Worker     descriptorWrite.dstBinding = GrVkUniformHandler::kUniformBinding;
69*c8dee2aaSAndroid Build Coastguard Worker     descriptorWrite.dstArrayElement = 0;
70*c8dee2aaSAndroid Build Coastguard Worker     descriptorWrite.descriptorCount = 1;
71*c8dee2aaSAndroid Build Coastguard Worker     descriptorWrite.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
72*c8dee2aaSAndroid Build Coastguard Worker     descriptorWrite.pImageInfo = nullptr;
73*c8dee2aaSAndroid Build Coastguard Worker     descriptorWrite.pBufferInfo = &bufferInfo;
74*c8dee2aaSAndroid Build Coastguard Worker     descriptorWrite.pTexelBufferView = nullptr;
75*c8dee2aaSAndroid Build Coastguard Worker 
76*c8dee2aaSAndroid Build Coastguard Worker     GR_VK_CALL(gpu->vkInterface(),
77*c8dee2aaSAndroid Build Coastguard Worker                UpdateDescriptorSets(gpu->device(), 1, &descriptorWrite, 0, nullptr));
78*c8dee2aaSAndroid Build Coastguard Worker     return descriptorSet;
79*c8dee2aaSAndroid Build Coastguard Worker }
80*c8dee2aaSAndroid Build Coastguard Worker 
Make(GrVkGpu * gpu,size_t size,GrGpuBufferType bufferType,GrAccessPattern accessPattern)81*c8dee2aaSAndroid Build Coastguard Worker sk_sp<GrVkBuffer> GrVkBuffer::Make(GrVkGpu* gpu,
82*c8dee2aaSAndroid Build Coastguard Worker                                    size_t size,
83*c8dee2aaSAndroid Build Coastguard Worker                                    GrGpuBufferType bufferType,
84*c8dee2aaSAndroid Build Coastguard Worker                                    GrAccessPattern accessPattern) {
85*c8dee2aaSAndroid Build Coastguard Worker     VkBuffer buffer;
86*c8dee2aaSAndroid Build Coastguard Worker     skgpu::VulkanAlloc alloc;
87*c8dee2aaSAndroid Build Coastguard Worker 
88*c8dee2aaSAndroid Build Coastguard Worker     bool isProtected = gpu->protectedContext() &&
89*c8dee2aaSAndroid Build Coastguard Worker                        accessPattern == kStatic_GrAccessPattern;
90*c8dee2aaSAndroid Build Coastguard Worker 
91*c8dee2aaSAndroid Build Coastguard Worker     // Protected memory _never_ uses mappable buffers.
92*c8dee2aaSAndroid Build Coastguard Worker     // Otherwise, the only time we don't require mappable buffers is when we have a static
93*c8dee2aaSAndroid Build Coastguard Worker     // access pattern and we're on a device where gpu only memory has faster reads on the gpu than
94*c8dee2aaSAndroid Build Coastguard Worker     // memory that is also mappable on the cpu.
95*c8dee2aaSAndroid Build Coastguard Worker     bool requiresMappable = !isProtected &&
96*c8dee2aaSAndroid Build Coastguard Worker                             (accessPattern == kDynamic_GrAccessPattern ||
97*c8dee2aaSAndroid Build Coastguard Worker                              accessPattern == kStream_GrAccessPattern ||
98*c8dee2aaSAndroid Build Coastguard Worker                              !gpu->vkCaps().gpuOnlyBuffersMorePerformant());
99*c8dee2aaSAndroid Build Coastguard Worker 
100*c8dee2aaSAndroid Build Coastguard Worker     using BufferUsage = skgpu::VulkanMemoryAllocator::BufferUsage;
101*c8dee2aaSAndroid Build Coastguard Worker     BufferUsage allocUsage;
102*c8dee2aaSAndroid Build Coastguard Worker 
103*c8dee2aaSAndroid Build Coastguard Worker     if (bufferType == GrGpuBufferType::kXferCpuToGpu) {
104*c8dee2aaSAndroid Build Coastguard Worker         allocUsage = BufferUsage::kTransfersFromCpuToGpu;
105*c8dee2aaSAndroid Build Coastguard Worker     } else if (bufferType == GrGpuBufferType::kXferGpuToCpu) {
106*c8dee2aaSAndroid Build Coastguard Worker         allocUsage = BufferUsage::kTransfersFromGpuToCpu;
107*c8dee2aaSAndroid Build Coastguard Worker     } else {
108*c8dee2aaSAndroid Build Coastguard Worker         allocUsage = requiresMappable ? BufferUsage::kCpuWritesGpuReads : BufferUsage::kGpuOnly;
109*c8dee2aaSAndroid Build Coastguard Worker     }
110*c8dee2aaSAndroid Build Coastguard Worker 
111*c8dee2aaSAndroid Build Coastguard Worker     // create the buffer object
112*c8dee2aaSAndroid Build Coastguard Worker     VkBufferCreateInfo bufInfo;
113*c8dee2aaSAndroid Build Coastguard Worker     memset(&bufInfo, 0, sizeof(VkBufferCreateInfo));
114*c8dee2aaSAndroid Build Coastguard Worker     bufInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
115*c8dee2aaSAndroid Build Coastguard Worker     bufInfo.flags = isProtected ? VK_BUFFER_CREATE_PROTECTED_BIT : 0;
116*c8dee2aaSAndroid Build Coastguard Worker     bufInfo.size = size;
117*c8dee2aaSAndroid Build Coastguard Worker     // To support SkMesh buffer updates we make Vertex and Index buffers capable of being transfer
118*c8dee2aaSAndroid Build Coastguard Worker     // dsts.
119*c8dee2aaSAndroid Build Coastguard Worker     switch (bufferType) {
120*c8dee2aaSAndroid Build Coastguard Worker         case GrGpuBufferType::kVertex:
121*c8dee2aaSAndroid Build Coastguard Worker             bufInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
122*c8dee2aaSAndroid Build Coastguard Worker             break;
123*c8dee2aaSAndroid Build Coastguard Worker         case GrGpuBufferType::kIndex:
124*c8dee2aaSAndroid Build Coastguard Worker             bufInfo.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
125*c8dee2aaSAndroid Build Coastguard Worker             break;
126*c8dee2aaSAndroid Build Coastguard Worker         case GrGpuBufferType::kDrawIndirect:
127*c8dee2aaSAndroid Build Coastguard Worker             bufInfo.usage = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT;
128*c8dee2aaSAndroid Build Coastguard Worker             break;
129*c8dee2aaSAndroid Build Coastguard Worker         case GrGpuBufferType::kUniform:
130*c8dee2aaSAndroid Build Coastguard Worker             bufInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
131*c8dee2aaSAndroid Build Coastguard Worker             break;
132*c8dee2aaSAndroid Build Coastguard Worker         case GrGpuBufferType::kXferCpuToGpu:
133*c8dee2aaSAndroid Build Coastguard Worker             bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
134*c8dee2aaSAndroid Build Coastguard Worker             break;
135*c8dee2aaSAndroid Build Coastguard Worker         case GrGpuBufferType::kXferGpuToCpu:
136*c8dee2aaSAndroid Build Coastguard Worker             bufInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
137*c8dee2aaSAndroid Build Coastguard Worker             break;
138*c8dee2aaSAndroid Build Coastguard Worker     }
139*c8dee2aaSAndroid Build Coastguard Worker     // We may not always get a mappable buffer for non dynamic access buffers. Thus we set the
140*c8dee2aaSAndroid Build Coastguard Worker     // transfer dst usage bit in case we need to do a copy to write data.
141*c8dee2aaSAndroid Build Coastguard Worker     // TODO: It doesn't really hurt setting this extra usage flag, but maybe we can narrow the scope
142*c8dee2aaSAndroid Build Coastguard Worker     // of buffers we set it on more than just not dynamic.
143*c8dee2aaSAndroid Build Coastguard Worker     if (!requiresMappable) {
144*c8dee2aaSAndroid Build Coastguard Worker         bufInfo.usage |= VK_BUFFER_USAGE_TRANSFER_DST_BIT;
145*c8dee2aaSAndroid Build Coastguard Worker     }
146*c8dee2aaSAndroid Build Coastguard Worker 
147*c8dee2aaSAndroid Build Coastguard Worker     bufInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
148*c8dee2aaSAndroid Build Coastguard Worker     bufInfo.queueFamilyIndexCount = 0;
149*c8dee2aaSAndroid Build Coastguard Worker     bufInfo.pQueueFamilyIndices = nullptr;
150*c8dee2aaSAndroid Build Coastguard Worker 
151*c8dee2aaSAndroid Build Coastguard Worker     VkResult err;
152*c8dee2aaSAndroid Build Coastguard Worker     err = VK_CALL(gpu, CreateBuffer(gpu->device(), &bufInfo, nullptr, &buffer));
153*c8dee2aaSAndroid Build Coastguard Worker     if (err) {
154*c8dee2aaSAndroid Build Coastguard Worker         return nullptr;
155*c8dee2aaSAndroid Build Coastguard Worker     }
156*c8dee2aaSAndroid Build Coastguard Worker 
157*c8dee2aaSAndroid Build Coastguard Worker     bool shouldPersistentlyMapCpuToGpu = gpu->vkCaps().shouldPersistentlyMapCpuToGpuBuffers();
158*c8dee2aaSAndroid Build Coastguard Worker     auto checkResult = [gpu, allocUsage, shouldPersistentlyMapCpuToGpu](VkResult result) {
159*c8dee2aaSAndroid Build Coastguard Worker         GR_VK_LOG_IF_NOT_SUCCESS(gpu, result, "skgpu::VulkanMemory::AllocBufferMemory "
160*c8dee2aaSAndroid Build Coastguard Worker                                  "(allocUsage:%d, shouldPersistentlyMapCpuToGpu:%d)",
161*c8dee2aaSAndroid Build Coastguard Worker                                  (int)allocUsage, (int)shouldPersistentlyMapCpuToGpu);
162*c8dee2aaSAndroid Build Coastguard Worker         return gpu->checkVkResult(result);
163*c8dee2aaSAndroid Build Coastguard Worker     };
164*c8dee2aaSAndroid Build Coastguard Worker     auto allocator = gpu->memoryAllocator();
165*c8dee2aaSAndroid Build Coastguard Worker     if (!skgpu::VulkanMemory::AllocBufferMemory(allocator,
166*c8dee2aaSAndroid Build Coastguard Worker                                                 buffer,
167*c8dee2aaSAndroid Build Coastguard Worker                                                 skgpu::Protected(isProtected),
168*c8dee2aaSAndroid Build Coastguard Worker                                                 allocUsage,
169*c8dee2aaSAndroid Build Coastguard Worker                                                 shouldPersistentlyMapCpuToGpu,
170*c8dee2aaSAndroid Build Coastguard Worker                                                 checkResult,
171*c8dee2aaSAndroid Build Coastguard Worker                                                 &alloc)) {
172*c8dee2aaSAndroid Build Coastguard Worker         VK_CALL(gpu, DestroyBuffer(gpu->device(), buffer, nullptr));
173*c8dee2aaSAndroid Build Coastguard Worker         return nullptr;
174*c8dee2aaSAndroid Build Coastguard Worker     }
175*c8dee2aaSAndroid Build Coastguard Worker 
176*c8dee2aaSAndroid Build Coastguard Worker     // Bind buffer
177*c8dee2aaSAndroid Build Coastguard Worker     GR_VK_CALL_RESULT(gpu, err, BindBufferMemory(gpu->device(),
178*c8dee2aaSAndroid Build Coastguard Worker                                                  buffer,
179*c8dee2aaSAndroid Build Coastguard Worker                                                  alloc.fMemory,
180*c8dee2aaSAndroid Build Coastguard Worker                                                  alloc.fOffset));
181*c8dee2aaSAndroid Build Coastguard Worker     if (err) {
182*c8dee2aaSAndroid Build Coastguard Worker         skgpu::VulkanMemory::FreeBufferMemory(allocator, alloc);
183*c8dee2aaSAndroid Build Coastguard Worker         VK_CALL(gpu, DestroyBuffer(gpu->device(), buffer, nullptr));
184*c8dee2aaSAndroid Build Coastguard Worker         return nullptr;
185*c8dee2aaSAndroid Build Coastguard Worker     }
186*c8dee2aaSAndroid Build Coastguard Worker 
187*c8dee2aaSAndroid Build Coastguard Worker     // If this is a uniform buffer we must setup a descriptor set
188*c8dee2aaSAndroid Build Coastguard Worker     const GrVkDescriptorSet* uniformDescSet = nullptr;
189*c8dee2aaSAndroid Build Coastguard Worker     if (bufferType == GrGpuBufferType::kUniform) {
190*c8dee2aaSAndroid Build Coastguard Worker         uniformDescSet = make_uniform_desc_set(gpu, buffer, size);
191*c8dee2aaSAndroid Build Coastguard Worker         if (!uniformDescSet) {
192*c8dee2aaSAndroid Build Coastguard Worker             VK_CALL(gpu, DestroyBuffer(gpu->device(), buffer, nullptr));
193*c8dee2aaSAndroid Build Coastguard Worker             skgpu::VulkanMemory::FreeBufferMemory(allocator, alloc);
194*c8dee2aaSAndroid Build Coastguard Worker             return nullptr;
195*c8dee2aaSAndroid Build Coastguard Worker         }
196*c8dee2aaSAndroid Build Coastguard Worker     }
197*c8dee2aaSAndroid Build Coastguard Worker 
198*c8dee2aaSAndroid Build Coastguard Worker     return sk_sp<GrVkBuffer>(new GrVkBuffer(
199*c8dee2aaSAndroid Build Coastguard Worker             gpu, size, bufferType, accessPattern, buffer, alloc, uniformDescSet,
200*c8dee2aaSAndroid Build Coastguard Worker             /*label=*/"MakeVkBuffer"));
201*c8dee2aaSAndroid Build Coastguard Worker }
202*c8dee2aaSAndroid Build Coastguard Worker 
vkMap(size_t readOffset,size_t readSize)203*c8dee2aaSAndroid Build Coastguard Worker void GrVkBuffer::vkMap(size_t readOffset, size_t readSize) {
204*c8dee2aaSAndroid Build Coastguard Worker     SkASSERT(!fMapPtr);
205*c8dee2aaSAndroid Build Coastguard Worker     if (this->isVkMappable()) {
206*c8dee2aaSAndroid Build Coastguard Worker         // Not every buffer will use command buffer usage refs and instead the command buffer just
207*c8dee2aaSAndroid Build Coastguard Worker         // holds normal refs. Systems higher up in Ganesh should be making sure not to reuse a
208*c8dee2aaSAndroid Build Coastguard Worker         // buffer that currently has a ref held by something else. However, we do need to make sure
209*c8dee2aaSAndroid Build Coastguard Worker         // there isn't a buffer with just a command buffer usage that is trying to be mapped.
210*c8dee2aaSAndroid Build Coastguard Worker         SkASSERT(this->internalHasNoCommandBufferUsages());
211*c8dee2aaSAndroid Build Coastguard Worker         SkASSERT(fAlloc.fSize > 0);
212*c8dee2aaSAndroid Build Coastguard Worker         SkASSERT(fAlloc.fSize >= readOffset + readSize);
213*c8dee2aaSAndroid Build Coastguard Worker 
214*c8dee2aaSAndroid Build Coastguard Worker         GrVkGpu* gpu = this->getVkGpu();
215*c8dee2aaSAndroid Build Coastguard Worker         auto checkResult_mapAlloc = [gpu](VkResult result) {
216*c8dee2aaSAndroid Build Coastguard Worker             GR_VK_LOG_IF_NOT_SUCCESS(gpu, result, "skgpu::VulkanMemory::MapAlloc");
217*c8dee2aaSAndroid Build Coastguard Worker             return gpu->checkVkResult(result);
218*c8dee2aaSAndroid Build Coastguard Worker         };
219*c8dee2aaSAndroid Build Coastguard Worker         auto allocator = gpu->memoryAllocator();
220*c8dee2aaSAndroid Build Coastguard Worker         fMapPtr = skgpu::VulkanMemory::MapAlloc(allocator, fAlloc, checkResult_mapAlloc);
221*c8dee2aaSAndroid Build Coastguard Worker         if (fMapPtr && readSize != 0) {
222*c8dee2aaSAndroid Build Coastguard Worker             auto checkResult_invalidateMapAlloc = [gpu, readOffset, readSize](VkResult result) {
223*c8dee2aaSAndroid Build Coastguard Worker                 GR_VK_LOG_IF_NOT_SUCCESS(gpu, result, "skgpu::VulkanMemory::InvalidateMappedAlloc "
224*c8dee2aaSAndroid Build Coastguard Worker                                          "(readOffset:%zu, readSize:%zu)",
225*c8dee2aaSAndroid Build Coastguard Worker                                          readOffset, readSize);
226*c8dee2aaSAndroid Build Coastguard Worker                 return gpu->checkVkResult(result);
227*c8dee2aaSAndroid Build Coastguard Worker             };
228*c8dee2aaSAndroid Build Coastguard Worker             // "Invalidate" here means make device writes visible to the host. That is, it makes
229*c8dee2aaSAndroid Build Coastguard Worker             // sure any GPU writes are finished in the range we might read from.
230*c8dee2aaSAndroid Build Coastguard Worker             skgpu::VulkanMemory::InvalidateMappedAlloc(allocator,
231*c8dee2aaSAndroid Build Coastguard Worker                                                        fAlloc,
232*c8dee2aaSAndroid Build Coastguard Worker                                                        readOffset,
233*c8dee2aaSAndroid Build Coastguard Worker                                                        readSize,
234*c8dee2aaSAndroid Build Coastguard Worker                                                        checkResult_invalidateMapAlloc);
235*c8dee2aaSAndroid Build Coastguard Worker         }
236*c8dee2aaSAndroid Build Coastguard Worker     }
237*c8dee2aaSAndroid Build Coastguard Worker }
238*c8dee2aaSAndroid Build Coastguard Worker 
vkUnmap(size_t flushOffset,size_t flushSize)239*c8dee2aaSAndroid Build Coastguard Worker void GrVkBuffer::vkUnmap(size_t flushOffset, size_t flushSize) {
240*c8dee2aaSAndroid Build Coastguard Worker     SkASSERT(fMapPtr && this->isVkMappable());
241*c8dee2aaSAndroid Build Coastguard Worker 
242*c8dee2aaSAndroid Build Coastguard Worker     SkASSERT(fAlloc.fSize > 0);
243*c8dee2aaSAndroid Build Coastguard Worker     SkASSERT(fAlloc.fSize >= flushOffset + flushSize);
244*c8dee2aaSAndroid Build Coastguard Worker 
245*c8dee2aaSAndroid Build Coastguard Worker     GrVkGpu* gpu = this->getVkGpu();
246*c8dee2aaSAndroid Build Coastguard Worker     auto checkResult = [gpu, flushOffset, flushSize](VkResult result) {
247*c8dee2aaSAndroid Build Coastguard Worker         GR_VK_LOG_IF_NOT_SUCCESS(gpu, result, "skgpu::VulkanMemory::FlushMappedAlloc "
248*c8dee2aaSAndroid Build Coastguard Worker                                  "(flushOffset:%zu, flushSize:%zu)",
249*c8dee2aaSAndroid Build Coastguard Worker                                  flushOffset, flushSize);
250*c8dee2aaSAndroid Build Coastguard Worker         return gpu->checkVkResult(result);
251*c8dee2aaSAndroid Build Coastguard Worker     };
252*c8dee2aaSAndroid Build Coastguard Worker     auto allocator = this->getVkGpu()->memoryAllocator();
253*c8dee2aaSAndroid Build Coastguard Worker     skgpu::VulkanMemory::FlushMappedAlloc(allocator, fAlloc, flushOffset, flushSize, checkResult);
254*c8dee2aaSAndroid Build Coastguard Worker     skgpu::VulkanMemory::UnmapAlloc(allocator, fAlloc);
255*c8dee2aaSAndroid Build Coastguard Worker }
256*c8dee2aaSAndroid Build Coastguard Worker 
copyCpuDataToGpuBuffer(const void * src,size_t offset,size_t size)257*c8dee2aaSAndroid Build Coastguard Worker void GrVkBuffer::copyCpuDataToGpuBuffer(const void* src, size_t offset, size_t size) {
258*c8dee2aaSAndroid Build Coastguard Worker     SkASSERT(src);
259*c8dee2aaSAndroid Build Coastguard Worker 
260*c8dee2aaSAndroid Build Coastguard Worker     GrVkGpu* gpu = this->getVkGpu();
261*c8dee2aaSAndroid Build Coastguard Worker 
262*c8dee2aaSAndroid Build Coastguard Worker     // The vulkan api restricts the use of vkCmdUpdateBuffer to updates that are less than or equal
263*c8dee2aaSAndroid Build Coastguard Worker     // to 65536 bytes and a size and offset that are both 4 byte aligned.
264*c8dee2aaSAndroid Build Coastguard Worker     if ((size <= 65536) && SkIsAlign4(size) && SkIsAlign4(offset) &&
265*c8dee2aaSAndroid Build Coastguard Worker         !gpu->vkCaps().avoidUpdateBuffers()) {
266*c8dee2aaSAndroid Build Coastguard Worker         gpu->updateBuffer(sk_ref_sp(this), src, offset, size);
267*c8dee2aaSAndroid Build Coastguard Worker     } else {
268*c8dee2aaSAndroid Build Coastguard Worker         GrResourceProvider* resourceProvider = gpu->getContext()->priv().resourceProvider();
269*c8dee2aaSAndroid Build Coastguard Worker         sk_sp<GrGpuBuffer> transferBuffer = resourceProvider->createBuffer(
270*c8dee2aaSAndroid Build Coastguard Worker                 src,
271*c8dee2aaSAndroid Build Coastguard Worker                 size,
272*c8dee2aaSAndroid Build Coastguard Worker                 GrGpuBufferType::kXferCpuToGpu,
273*c8dee2aaSAndroid Build Coastguard Worker                 kDynamic_GrAccessPattern);
274*c8dee2aaSAndroid Build Coastguard Worker         if (!transferBuffer) {
275*c8dee2aaSAndroid Build Coastguard Worker             return;
276*c8dee2aaSAndroid Build Coastguard Worker         }
277*c8dee2aaSAndroid Build Coastguard Worker 
278*c8dee2aaSAndroid Build Coastguard Worker         gpu->transferFromBufferToBuffer(std::move(transferBuffer),
279*c8dee2aaSAndroid Build Coastguard Worker                                         /*srcOffset=*/0,
280*c8dee2aaSAndroid Build Coastguard Worker                                         sk_ref_sp(this),
281*c8dee2aaSAndroid Build Coastguard Worker                                         offset,
282*c8dee2aaSAndroid Build Coastguard Worker                                         size);
283*c8dee2aaSAndroid Build Coastguard Worker     }
284*c8dee2aaSAndroid Build Coastguard Worker }
285*c8dee2aaSAndroid Build Coastguard Worker 
addMemoryBarrier(VkAccessFlags srcAccessMask,VkAccessFlags dstAccesMask,VkPipelineStageFlags srcStageMask,VkPipelineStageFlags dstStageMask,bool byRegion) const286*c8dee2aaSAndroid Build Coastguard Worker void GrVkBuffer::addMemoryBarrier(VkAccessFlags srcAccessMask,
287*c8dee2aaSAndroid Build Coastguard Worker                                   VkAccessFlags dstAccesMask,
288*c8dee2aaSAndroid Build Coastguard Worker                                   VkPipelineStageFlags srcStageMask,
289*c8dee2aaSAndroid Build Coastguard Worker                                   VkPipelineStageFlags dstStageMask,
290*c8dee2aaSAndroid Build Coastguard Worker                                   bool byRegion) const {
291*c8dee2aaSAndroid Build Coastguard Worker     VkBufferMemoryBarrier bufferMemoryBarrier = {
292*c8dee2aaSAndroid Build Coastguard Worker             VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,  // sType
293*c8dee2aaSAndroid Build Coastguard Worker             nullptr,                                  // pNext
294*c8dee2aaSAndroid Build Coastguard Worker             srcAccessMask,                            // srcAccessMask
295*c8dee2aaSAndroid Build Coastguard Worker             dstAccesMask,                             // dstAccessMask
296*c8dee2aaSAndroid Build Coastguard Worker             VK_QUEUE_FAMILY_IGNORED,                  // srcQueueFamilyIndex
297*c8dee2aaSAndroid Build Coastguard Worker             VK_QUEUE_FAMILY_IGNORED,                  // dstQueueFamilyIndex
298*c8dee2aaSAndroid Build Coastguard Worker             fBuffer,                                  // buffer
299*c8dee2aaSAndroid Build Coastguard Worker             0,                                        // offset
300*c8dee2aaSAndroid Build Coastguard Worker             this->size(),                             // size
301*c8dee2aaSAndroid Build Coastguard Worker     };
302*c8dee2aaSAndroid Build Coastguard Worker 
303*c8dee2aaSAndroid Build Coastguard Worker     // TODO: restrict to area of buffer we're interested in
304*c8dee2aaSAndroid Build Coastguard Worker     this->getVkGpu()->addBufferMemoryBarrier(srcStageMask, dstStageMask, byRegion,
305*c8dee2aaSAndroid Build Coastguard Worker                                              &bufferMemoryBarrier);
306*c8dee2aaSAndroid Build Coastguard Worker }
307*c8dee2aaSAndroid Build Coastguard Worker 
vkRelease()308*c8dee2aaSAndroid Build Coastguard Worker void GrVkBuffer::vkRelease() {
309*c8dee2aaSAndroid Build Coastguard Worker     if (this->wasDestroyed()) {
310*c8dee2aaSAndroid Build Coastguard Worker         return;
311*c8dee2aaSAndroid Build Coastguard Worker     }
312*c8dee2aaSAndroid Build Coastguard Worker 
313*c8dee2aaSAndroid Build Coastguard Worker     if (fMapPtr) {
314*c8dee2aaSAndroid Build Coastguard Worker         this->vkUnmap(0, this->size());
315*c8dee2aaSAndroid Build Coastguard Worker         fMapPtr = nullptr;
316*c8dee2aaSAndroid Build Coastguard Worker     }
317*c8dee2aaSAndroid Build Coastguard Worker 
318*c8dee2aaSAndroid Build Coastguard Worker     if (fUniformDescriptorSet) {
319*c8dee2aaSAndroid Build Coastguard Worker         fUniformDescriptorSet->recycle();
320*c8dee2aaSAndroid Build Coastguard Worker         fUniformDescriptorSet = nullptr;
321*c8dee2aaSAndroid Build Coastguard Worker     }
322*c8dee2aaSAndroid Build Coastguard Worker 
323*c8dee2aaSAndroid Build Coastguard Worker     SkASSERT(fBuffer);
324*c8dee2aaSAndroid Build Coastguard Worker     SkASSERT(fAlloc.fMemory && fAlloc.fBackendMemory);
325*c8dee2aaSAndroid Build Coastguard Worker     VK_CALL(this->getVkGpu(), DestroyBuffer(this->getVkGpu()->device(), fBuffer, nullptr));
326*c8dee2aaSAndroid Build Coastguard Worker     fBuffer = VK_NULL_HANDLE;
327*c8dee2aaSAndroid Build Coastguard Worker 
328*c8dee2aaSAndroid Build Coastguard Worker     skgpu::VulkanMemory::FreeBufferMemory(this->getVkGpu()->memoryAllocator(), fAlloc);
329*c8dee2aaSAndroid Build Coastguard Worker     fAlloc.fMemory = VK_NULL_HANDLE;
330*c8dee2aaSAndroid Build Coastguard Worker     fAlloc.fBackendMemory = 0;
331*c8dee2aaSAndroid Build Coastguard Worker }
332*c8dee2aaSAndroid Build Coastguard Worker 
onRelease()333*c8dee2aaSAndroid Build Coastguard Worker void GrVkBuffer::onRelease() {
334*c8dee2aaSAndroid Build Coastguard Worker     this->vkRelease();
335*c8dee2aaSAndroid Build Coastguard Worker     this->GrGpuBuffer::onRelease();
336*c8dee2aaSAndroid Build Coastguard Worker }
337*c8dee2aaSAndroid Build Coastguard Worker 
onAbandon()338*c8dee2aaSAndroid Build Coastguard Worker void GrVkBuffer::onAbandon() {
339*c8dee2aaSAndroid Build Coastguard Worker     this->vkRelease();
340*c8dee2aaSAndroid Build Coastguard Worker     this->GrGpuBuffer::onAbandon();
341*c8dee2aaSAndroid Build Coastguard Worker }
342*c8dee2aaSAndroid Build Coastguard Worker 
onMap(MapType type)343*c8dee2aaSAndroid Build Coastguard Worker void GrVkBuffer::onMap(MapType type) {
344*c8dee2aaSAndroid Build Coastguard Worker     this->vkMap(0, type == MapType::kRead ? this->size() : 0);
345*c8dee2aaSAndroid Build Coastguard Worker }
346*c8dee2aaSAndroid Build Coastguard Worker 
onUnmap(MapType type)347*c8dee2aaSAndroid Build Coastguard Worker void GrVkBuffer::onUnmap(MapType type) {
348*c8dee2aaSAndroid Build Coastguard Worker     this->vkUnmap(0, type == MapType::kWriteDiscard ? this->size() : 0);
349*c8dee2aaSAndroid Build Coastguard Worker }
350*c8dee2aaSAndroid Build Coastguard Worker 
onClearToZero()351*c8dee2aaSAndroid Build Coastguard Worker bool GrVkBuffer::onClearToZero() { return this->getVkGpu()->zeroBuffer(sk_ref_sp(this)); }
352*c8dee2aaSAndroid Build Coastguard Worker 
onUpdateData(const void * src,size_t offset,size_t size,bool)353*c8dee2aaSAndroid Build Coastguard Worker bool GrVkBuffer::onUpdateData(const void* src, size_t offset, size_t size, bool /*preserve*/) {
354*c8dee2aaSAndroid Build Coastguard Worker     if (this->isVkMappable()) {
355*c8dee2aaSAndroid Build Coastguard Worker         // We won't be reading the mapped memory so pass an empty range.
356*c8dee2aaSAndroid Build Coastguard Worker         this->vkMap(0, 0);
357*c8dee2aaSAndroid Build Coastguard Worker         if (!fMapPtr) {
358*c8dee2aaSAndroid Build Coastguard Worker             return false;
359*c8dee2aaSAndroid Build Coastguard Worker         }
360*c8dee2aaSAndroid Build Coastguard Worker         memcpy(SkTAddOffset<void>(fMapPtr, offset), src, size);
361*c8dee2aaSAndroid Build Coastguard Worker         // We only need to flush the updated portion so pass the true range here.
362*c8dee2aaSAndroid Build Coastguard Worker         this->vkUnmap(offset, size);
363*c8dee2aaSAndroid Build Coastguard Worker         fMapPtr = nullptr;
364*c8dee2aaSAndroid Build Coastguard Worker     } else {
365*c8dee2aaSAndroid Build Coastguard Worker         this->copyCpuDataToGpuBuffer(src, offset, size);
366*c8dee2aaSAndroid Build Coastguard Worker     }
367*c8dee2aaSAndroid Build Coastguard Worker     return true;
368*c8dee2aaSAndroid Build Coastguard Worker }
369*c8dee2aaSAndroid Build Coastguard Worker 
getVkGpu() const370*c8dee2aaSAndroid Build Coastguard Worker GrVkGpu* GrVkBuffer::getVkGpu() const {
371*c8dee2aaSAndroid Build Coastguard Worker     SkASSERT(!this->wasDestroyed());
372*c8dee2aaSAndroid Build Coastguard Worker     return static_cast<GrVkGpu*>(this->getGpu());
373*c8dee2aaSAndroid Build Coastguard Worker }
374*c8dee2aaSAndroid Build Coastguard Worker 
uniformDescriptorSet() const375*c8dee2aaSAndroid Build Coastguard Worker const VkDescriptorSet* GrVkBuffer::uniformDescriptorSet() const {
376*c8dee2aaSAndroid Build Coastguard Worker     SkASSERT(fUniformDescriptorSet);
377*c8dee2aaSAndroid Build Coastguard Worker     return fUniformDescriptorSet->descriptorSet();
378*c8dee2aaSAndroid Build Coastguard Worker }
379