1*c8dee2aaSAndroid Build Coastguard Worker /*
2*c8dee2aaSAndroid Build Coastguard Worker * Copyright 2010 Google Inc.
3*c8dee2aaSAndroid Build Coastguard Worker *
4*c8dee2aaSAndroid Build Coastguard Worker * Use of this source code is governed by a BSD-style license that can be
5*c8dee2aaSAndroid Build Coastguard Worker * found in the LICENSE file.
6*c8dee2aaSAndroid Build Coastguard Worker */
7*c8dee2aaSAndroid Build Coastguard Worker #include "src/gpu/ganesh/GrBufferAllocPool.h"
8*c8dee2aaSAndroid Build Coastguard Worker
9*c8dee2aaSAndroid Build Coastguard Worker #include "include/gpu/ganesh/GrDirectContext.h"
10*c8dee2aaSAndroid Build Coastguard Worker #include "include/private/base/SkMacros.h"
11*c8dee2aaSAndroid Build Coastguard Worker #include "src/base/SkSafeMath.h"
12*c8dee2aaSAndroid Build Coastguard Worker #include "src/core/SkTraceEvent.h"
13*c8dee2aaSAndroid Build Coastguard Worker #include "src/gpu/ganesh/GrCaps.h"
14*c8dee2aaSAndroid Build Coastguard Worker #include "src/gpu/ganesh/GrCpuBuffer.h"
15*c8dee2aaSAndroid Build Coastguard Worker #include "src/gpu/ganesh/GrDirectContextPriv.h"
16*c8dee2aaSAndroid Build Coastguard Worker #include "src/gpu/ganesh/GrGpu.h"
17*c8dee2aaSAndroid Build Coastguard Worker #include "src/gpu/ganesh/GrGpuBuffer.h"
18*c8dee2aaSAndroid Build Coastguard Worker #include "src/gpu/ganesh/GrResourceProvider.h"
19*c8dee2aaSAndroid Build Coastguard Worker
20*c8dee2aaSAndroid Build Coastguard Worker #include <algorithm>
21*c8dee2aaSAndroid Build Coastguard Worker #include <cstdint>
22*c8dee2aaSAndroid Build Coastguard Worker #include <cstring>
23*c8dee2aaSAndroid Build Coastguard Worker #include <memory>
24*c8dee2aaSAndroid Build Coastguard Worker
Make(int maxBuffersToCache)25*c8dee2aaSAndroid Build Coastguard Worker sk_sp<GrBufferAllocPool::CpuBufferCache> GrBufferAllocPool::CpuBufferCache::Make(
26*c8dee2aaSAndroid Build Coastguard Worker int maxBuffersToCache) {
27*c8dee2aaSAndroid Build Coastguard Worker return sk_sp<CpuBufferCache>(new CpuBufferCache(maxBuffersToCache));
28*c8dee2aaSAndroid Build Coastguard Worker }
29*c8dee2aaSAndroid Build Coastguard Worker
CpuBufferCache(int maxBuffersToCache)30*c8dee2aaSAndroid Build Coastguard Worker GrBufferAllocPool::CpuBufferCache::CpuBufferCache(int maxBuffersToCache)
31*c8dee2aaSAndroid Build Coastguard Worker : fMaxBuffersToCache(maxBuffersToCache) {
32*c8dee2aaSAndroid Build Coastguard Worker if (fMaxBuffersToCache) {
33*c8dee2aaSAndroid Build Coastguard Worker fBuffers = std::make_unique<Buffer[]>(fMaxBuffersToCache);
34*c8dee2aaSAndroid Build Coastguard Worker }
35*c8dee2aaSAndroid Build Coastguard Worker }
36*c8dee2aaSAndroid Build Coastguard Worker
makeBuffer(size_t size,bool mustBeInitialized)37*c8dee2aaSAndroid Build Coastguard Worker sk_sp<GrCpuBuffer> GrBufferAllocPool::CpuBufferCache::makeBuffer(size_t size,
38*c8dee2aaSAndroid Build Coastguard Worker bool mustBeInitialized) {
39*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(size > 0);
40*c8dee2aaSAndroid Build Coastguard Worker Buffer* result = nullptr;
41*c8dee2aaSAndroid Build Coastguard Worker if (size == kDefaultBufferSize) {
42*c8dee2aaSAndroid Build Coastguard Worker int i = 0;
43*c8dee2aaSAndroid Build Coastguard Worker for (; i < fMaxBuffersToCache && fBuffers[i].fBuffer; ++i) {
44*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(fBuffers[i].fBuffer->size() == kDefaultBufferSize);
45*c8dee2aaSAndroid Build Coastguard Worker if (fBuffers[i].fBuffer->unique()) {
46*c8dee2aaSAndroid Build Coastguard Worker result = &fBuffers[i];
47*c8dee2aaSAndroid Build Coastguard Worker }
48*c8dee2aaSAndroid Build Coastguard Worker }
49*c8dee2aaSAndroid Build Coastguard Worker if (!result && i < fMaxBuffersToCache) {
50*c8dee2aaSAndroid Build Coastguard Worker fBuffers[i].fBuffer = GrCpuBuffer::Make(size);
51*c8dee2aaSAndroid Build Coastguard Worker result = &fBuffers[i];
52*c8dee2aaSAndroid Build Coastguard Worker }
53*c8dee2aaSAndroid Build Coastguard Worker }
54*c8dee2aaSAndroid Build Coastguard Worker Buffer tempResult;
55*c8dee2aaSAndroid Build Coastguard Worker if (!result) {
56*c8dee2aaSAndroid Build Coastguard Worker tempResult.fBuffer = GrCpuBuffer::Make(size);
57*c8dee2aaSAndroid Build Coastguard Worker result = &tempResult;
58*c8dee2aaSAndroid Build Coastguard Worker }
59*c8dee2aaSAndroid Build Coastguard Worker if (mustBeInitialized && !result->fCleared) {
60*c8dee2aaSAndroid Build Coastguard Worker result->fCleared = true;
61*c8dee2aaSAndroid Build Coastguard Worker memset(result->fBuffer->data(), 0, result->fBuffer->size());
62*c8dee2aaSAndroid Build Coastguard Worker }
63*c8dee2aaSAndroid Build Coastguard Worker return result->fBuffer;
64*c8dee2aaSAndroid Build Coastguard Worker }
65*c8dee2aaSAndroid Build Coastguard Worker
releaseAll()66*c8dee2aaSAndroid Build Coastguard Worker void GrBufferAllocPool::CpuBufferCache::releaseAll() {
67*c8dee2aaSAndroid Build Coastguard Worker for (int i = 0; i < fMaxBuffersToCache && fBuffers[i].fBuffer; ++i) {
68*c8dee2aaSAndroid Build Coastguard Worker fBuffers[i].fBuffer.reset();
69*c8dee2aaSAndroid Build Coastguard Worker fBuffers[i].fCleared = false;
70*c8dee2aaSAndroid Build Coastguard Worker }
71*c8dee2aaSAndroid Build Coastguard Worker }
72*c8dee2aaSAndroid Build Coastguard Worker
73*c8dee2aaSAndroid Build Coastguard Worker //////////////////////////////////////////////////////////////////////////////
74*c8dee2aaSAndroid Build Coastguard Worker
75*c8dee2aaSAndroid Build Coastguard Worker #ifdef SK_DEBUG
76*c8dee2aaSAndroid Build Coastguard Worker #define VALIDATE validate
77*c8dee2aaSAndroid Build Coastguard Worker #else
VALIDATE(bool=false)78*c8dee2aaSAndroid Build Coastguard Worker static void VALIDATE(bool = false) {}
79*c8dee2aaSAndroid Build Coastguard Worker #endif
80*c8dee2aaSAndroid Build Coastguard Worker
81*c8dee2aaSAndroid Build Coastguard Worker #define UNMAP_BUFFER(block) \
82*c8dee2aaSAndroid Build Coastguard Worker do { \
83*c8dee2aaSAndroid Build Coastguard Worker TRACE_EVENT_INSTANT1("skia.gpu", "GrBufferAllocPool Unmapping Buffer", \
84*c8dee2aaSAndroid Build Coastguard Worker TRACE_EVENT_SCOPE_THREAD, "percent_unwritten", \
85*c8dee2aaSAndroid Build Coastguard Worker (float)((block).fBytesFree) / (block).fBuffer->size()); \
86*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(!block.fBuffer->isCpuBuffer()); \
87*c8dee2aaSAndroid Build Coastguard Worker static_cast<GrGpuBuffer*>(block.fBuffer.get())->unmap(); \
88*c8dee2aaSAndroid Build Coastguard Worker } while (false)
89*c8dee2aaSAndroid Build Coastguard Worker
GrBufferAllocPool(GrGpu * gpu,GrGpuBufferType bufferType,sk_sp<CpuBufferCache> cpuBufferCache)90*c8dee2aaSAndroid Build Coastguard Worker GrBufferAllocPool::GrBufferAllocPool(GrGpu* gpu, GrGpuBufferType bufferType,
91*c8dee2aaSAndroid Build Coastguard Worker sk_sp<CpuBufferCache> cpuBufferCache)
92*c8dee2aaSAndroid Build Coastguard Worker : fBlocks(8)
93*c8dee2aaSAndroid Build Coastguard Worker , fCpuBufferCache(std::move(cpuBufferCache))
94*c8dee2aaSAndroid Build Coastguard Worker , fGpu(gpu)
95*c8dee2aaSAndroid Build Coastguard Worker , fBufferType(bufferType) {}
96*c8dee2aaSAndroid Build Coastguard Worker
deleteBlocks()97*c8dee2aaSAndroid Build Coastguard Worker void GrBufferAllocPool::deleteBlocks() {
98*c8dee2aaSAndroid Build Coastguard Worker if (!fBlocks.empty()) {
99*c8dee2aaSAndroid Build Coastguard Worker GrBuffer* buffer = fBlocks.back().fBuffer.get();
100*c8dee2aaSAndroid Build Coastguard Worker if (!buffer->isCpuBuffer() && static_cast<GrGpuBuffer*>(buffer)->isMapped()) {
101*c8dee2aaSAndroid Build Coastguard Worker UNMAP_BUFFER(fBlocks.back());
102*c8dee2aaSAndroid Build Coastguard Worker }
103*c8dee2aaSAndroid Build Coastguard Worker }
104*c8dee2aaSAndroid Build Coastguard Worker while (!fBlocks.empty()) {
105*c8dee2aaSAndroid Build Coastguard Worker this->destroyBlock();
106*c8dee2aaSAndroid Build Coastguard Worker }
107*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(!fBufferPtr);
108*c8dee2aaSAndroid Build Coastguard Worker }
109*c8dee2aaSAndroid Build Coastguard Worker
~GrBufferAllocPool()110*c8dee2aaSAndroid Build Coastguard Worker GrBufferAllocPool::~GrBufferAllocPool() {
111*c8dee2aaSAndroid Build Coastguard Worker VALIDATE();
112*c8dee2aaSAndroid Build Coastguard Worker this->deleteBlocks();
113*c8dee2aaSAndroid Build Coastguard Worker }
114*c8dee2aaSAndroid Build Coastguard Worker
reset()115*c8dee2aaSAndroid Build Coastguard Worker void GrBufferAllocPool::reset() {
116*c8dee2aaSAndroid Build Coastguard Worker VALIDATE();
117*c8dee2aaSAndroid Build Coastguard Worker fBytesInUse = 0;
118*c8dee2aaSAndroid Build Coastguard Worker this->deleteBlocks();
119*c8dee2aaSAndroid Build Coastguard Worker this->resetCpuData(0);
120*c8dee2aaSAndroid Build Coastguard Worker VALIDATE();
121*c8dee2aaSAndroid Build Coastguard Worker }
122*c8dee2aaSAndroid Build Coastguard Worker
unmap()123*c8dee2aaSAndroid Build Coastguard Worker void GrBufferAllocPool::unmap() {
124*c8dee2aaSAndroid Build Coastguard Worker VALIDATE();
125*c8dee2aaSAndroid Build Coastguard Worker
126*c8dee2aaSAndroid Build Coastguard Worker if (fBufferPtr) {
127*c8dee2aaSAndroid Build Coastguard Worker BufferBlock& block = fBlocks.back();
128*c8dee2aaSAndroid Build Coastguard Worker GrBuffer* buffer = block.fBuffer.get();
129*c8dee2aaSAndroid Build Coastguard Worker if (!buffer->isCpuBuffer()) {
130*c8dee2aaSAndroid Build Coastguard Worker if (static_cast<GrGpuBuffer*>(buffer)->isMapped()) {
131*c8dee2aaSAndroid Build Coastguard Worker UNMAP_BUFFER(block);
132*c8dee2aaSAndroid Build Coastguard Worker } else {
133*c8dee2aaSAndroid Build Coastguard Worker size_t flushSize = block.fBuffer->size() - block.fBytesFree;
134*c8dee2aaSAndroid Build Coastguard Worker this->flushCpuData(fBlocks.back(), flushSize);
135*c8dee2aaSAndroid Build Coastguard Worker }
136*c8dee2aaSAndroid Build Coastguard Worker }
137*c8dee2aaSAndroid Build Coastguard Worker fBufferPtr = nullptr;
138*c8dee2aaSAndroid Build Coastguard Worker }
139*c8dee2aaSAndroid Build Coastguard Worker VALIDATE();
140*c8dee2aaSAndroid Build Coastguard Worker }
141*c8dee2aaSAndroid Build Coastguard Worker
142*c8dee2aaSAndroid Build Coastguard Worker #ifdef SK_DEBUG
validate(bool unusedBlockAllowed) const143*c8dee2aaSAndroid Build Coastguard Worker void GrBufferAllocPool::validate(bool unusedBlockAllowed) const {
144*c8dee2aaSAndroid Build Coastguard Worker bool wasDestroyed = false;
145*c8dee2aaSAndroid Build Coastguard Worker if (fBufferPtr) {
146*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(!fBlocks.empty());
147*c8dee2aaSAndroid Build Coastguard Worker const GrBuffer* buffer = fBlocks.back().fBuffer.get();
148*c8dee2aaSAndroid Build Coastguard Worker if (!buffer->isCpuBuffer() && !static_cast<const GrGpuBuffer*>(buffer)->isMapped()) {
149*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(fCpuStagingBuffer && fCpuStagingBuffer->data() == fBufferPtr);
150*c8dee2aaSAndroid Build Coastguard Worker }
151*c8dee2aaSAndroid Build Coastguard Worker } else if (!fBlocks.empty()) {
152*c8dee2aaSAndroid Build Coastguard Worker const GrBuffer* buffer = fBlocks.back().fBuffer.get();
153*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(buffer->isCpuBuffer() || !static_cast<const GrGpuBuffer*>(buffer)->isMapped());
154*c8dee2aaSAndroid Build Coastguard Worker }
155*c8dee2aaSAndroid Build Coastguard Worker size_t bytesInUse = 0;
156*c8dee2aaSAndroid Build Coastguard Worker for (int i = 0; i < fBlocks.size() - 1; ++i) {
157*c8dee2aaSAndroid Build Coastguard Worker const GrBuffer* buffer = fBlocks[i].fBuffer.get();
158*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(buffer->isCpuBuffer() || !static_cast<const GrGpuBuffer*>(buffer)->isMapped());
159*c8dee2aaSAndroid Build Coastguard Worker }
160*c8dee2aaSAndroid Build Coastguard Worker for (int i = 0; !wasDestroyed && i < fBlocks.size(); ++i) {
161*c8dee2aaSAndroid Build Coastguard Worker GrBuffer* buffer = fBlocks[i].fBuffer.get();
162*c8dee2aaSAndroid Build Coastguard Worker if (!buffer->isCpuBuffer() && static_cast<GrGpuBuffer*>(buffer)->wasDestroyed()) {
163*c8dee2aaSAndroid Build Coastguard Worker wasDestroyed = true;
164*c8dee2aaSAndroid Build Coastguard Worker } else {
165*c8dee2aaSAndroid Build Coastguard Worker size_t bytes = fBlocks[i].fBuffer->size() - fBlocks[i].fBytesFree;
166*c8dee2aaSAndroid Build Coastguard Worker bytesInUse += bytes;
167*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(bytes || unusedBlockAllowed);
168*c8dee2aaSAndroid Build Coastguard Worker }
169*c8dee2aaSAndroid Build Coastguard Worker }
170*c8dee2aaSAndroid Build Coastguard Worker
171*c8dee2aaSAndroid Build Coastguard Worker if (!wasDestroyed) {
172*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(bytesInUse == fBytesInUse);
173*c8dee2aaSAndroid Build Coastguard Worker if (unusedBlockAllowed) {
174*c8dee2aaSAndroid Build Coastguard Worker SkASSERT((fBytesInUse && !fBlocks.empty()) ||
175*c8dee2aaSAndroid Build Coastguard Worker (!fBytesInUse && (fBlocks.size() < 2)));
176*c8dee2aaSAndroid Build Coastguard Worker } else {
177*c8dee2aaSAndroid Build Coastguard Worker SkASSERT((0 == fBytesInUse) == fBlocks.empty());
178*c8dee2aaSAndroid Build Coastguard Worker }
179*c8dee2aaSAndroid Build Coastguard Worker }
180*c8dee2aaSAndroid Build Coastguard Worker }
181*c8dee2aaSAndroid Build Coastguard Worker #endif
182*c8dee2aaSAndroid Build Coastguard Worker
align_up_pad(size_t x,size_t alignment)183*c8dee2aaSAndroid Build Coastguard Worker static inline size_t align_up_pad(size_t x, size_t alignment) {
184*c8dee2aaSAndroid Build Coastguard Worker return (alignment - x % alignment) % alignment;
185*c8dee2aaSAndroid Build Coastguard Worker }
186*c8dee2aaSAndroid Build Coastguard Worker
align_down(size_t x,uint32_t alignment)187*c8dee2aaSAndroid Build Coastguard Worker static inline size_t align_down(size_t x, uint32_t alignment) {
188*c8dee2aaSAndroid Build Coastguard Worker return (x / alignment) * alignment;
189*c8dee2aaSAndroid Build Coastguard Worker }
190*c8dee2aaSAndroid Build Coastguard Worker
makeSpace(size_t size,size_t alignment,sk_sp<const GrBuffer> * buffer,size_t * offset)191*c8dee2aaSAndroid Build Coastguard Worker void* GrBufferAllocPool::makeSpace(size_t size,
192*c8dee2aaSAndroid Build Coastguard Worker size_t alignment,
193*c8dee2aaSAndroid Build Coastguard Worker sk_sp<const GrBuffer>* buffer,
194*c8dee2aaSAndroid Build Coastguard Worker size_t* offset) {
195*c8dee2aaSAndroid Build Coastguard Worker VALIDATE();
196*c8dee2aaSAndroid Build Coastguard Worker
197*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(buffer);
198*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(offset);
199*c8dee2aaSAndroid Build Coastguard Worker
200*c8dee2aaSAndroid Build Coastguard Worker if (fBufferPtr) {
201*c8dee2aaSAndroid Build Coastguard Worker BufferBlock& back = fBlocks.back();
202*c8dee2aaSAndroid Build Coastguard Worker size_t usedBytes = back.fBuffer->size() - back.fBytesFree;
203*c8dee2aaSAndroid Build Coastguard Worker size_t pad = align_up_pad(usedBytes, alignment);
204*c8dee2aaSAndroid Build Coastguard Worker SkSafeMath safeMath;
205*c8dee2aaSAndroid Build Coastguard Worker size_t alignedSize = safeMath.add(pad, size);
206*c8dee2aaSAndroid Build Coastguard Worker if (!safeMath.ok()) {
207*c8dee2aaSAndroid Build Coastguard Worker return nullptr;
208*c8dee2aaSAndroid Build Coastguard Worker }
209*c8dee2aaSAndroid Build Coastguard Worker if (alignedSize <= back.fBytesFree) {
210*c8dee2aaSAndroid Build Coastguard Worker memset((void*)(reinterpret_cast<intptr_t>(fBufferPtr) + usedBytes), 0, pad);
211*c8dee2aaSAndroid Build Coastguard Worker usedBytes += pad;
212*c8dee2aaSAndroid Build Coastguard Worker *offset = usedBytes;
213*c8dee2aaSAndroid Build Coastguard Worker *buffer = back.fBuffer;
214*c8dee2aaSAndroid Build Coastguard Worker back.fBytesFree -= alignedSize;
215*c8dee2aaSAndroid Build Coastguard Worker fBytesInUse += alignedSize;
216*c8dee2aaSAndroid Build Coastguard Worker VALIDATE();
217*c8dee2aaSAndroid Build Coastguard Worker return (void*)(reinterpret_cast<intptr_t>(fBufferPtr) + usedBytes);
218*c8dee2aaSAndroid Build Coastguard Worker }
219*c8dee2aaSAndroid Build Coastguard Worker }
220*c8dee2aaSAndroid Build Coastguard Worker
221*c8dee2aaSAndroid Build Coastguard Worker // We could honor the space request using by a partial update of the current
222*c8dee2aaSAndroid Build Coastguard Worker // VB (if there is room). But we don't currently use draw calls to GL that
223*c8dee2aaSAndroid Build Coastguard Worker // allow the driver to know that previously issued draws won't read from
224*c8dee2aaSAndroid Build Coastguard Worker // the part of the buffer we update. Also, when this was written the GL
225*c8dee2aaSAndroid Build Coastguard Worker // buffer implementation was cheating on the actual buffer size by shrinking
226*c8dee2aaSAndroid Build Coastguard Worker // the buffer in updateData() if the amount of data passed was less than
227*c8dee2aaSAndroid Build Coastguard Worker // the full buffer size. This is old code and both concerns may be obsolete.
228*c8dee2aaSAndroid Build Coastguard Worker
229*c8dee2aaSAndroid Build Coastguard Worker if (!this->createBlock(size)) {
230*c8dee2aaSAndroid Build Coastguard Worker return nullptr;
231*c8dee2aaSAndroid Build Coastguard Worker }
232*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(fBufferPtr);
233*c8dee2aaSAndroid Build Coastguard Worker
234*c8dee2aaSAndroid Build Coastguard Worker *offset = 0;
235*c8dee2aaSAndroid Build Coastguard Worker BufferBlock& back = fBlocks.back();
236*c8dee2aaSAndroid Build Coastguard Worker *buffer = back.fBuffer;
237*c8dee2aaSAndroid Build Coastguard Worker back.fBytesFree -= size;
238*c8dee2aaSAndroid Build Coastguard Worker fBytesInUse += size;
239*c8dee2aaSAndroid Build Coastguard Worker VALIDATE();
240*c8dee2aaSAndroid Build Coastguard Worker return fBufferPtr;
241*c8dee2aaSAndroid Build Coastguard Worker }
242*c8dee2aaSAndroid Build Coastguard Worker
makeSpaceAtLeast(size_t minSize,size_t fallbackSize,size_t alignment,sk_sp<const GrBuffer> * buffer,size_t * offset,size_t * actualSize)243*c8dee2aaSAndroid Build Coastguard Worker void* GrBufferAllocPool::makeSpaceAtLeast(size_t minSize,
244*c8dee2aaSAndroid Build Coastguard Worker size_t fallbackSize,
245*c8dee2aaSAndroid Build Coastguard Worker size_t alignment,
246*c8dee2aaSAndroid Build Coastguard Worker sk_sp<const GrBuffer>* buffer,
247*c8dee2aaSAndroid Build Coastguard Worker size_t* offset,
248*c8dee2aaSAndroid Build Coastguard Worker size_t* actualSize) {
249*c8dee2aaSAndroid Build Coastguard Worker VALIDATE();
250*c8dee2aaSAndroid Build Coastguard Worker
251*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(buffer);
252*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(offset);
253*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(actualSize);
254*c8dee2aaSAndroid Build Coastguard Worker
255*c8dee2aaSAndroid Build Coastguard Worker size_t usedBytes = (fBlocks.empty()) ? 0 : fBlocks.back().fBuffer->size() -
256*c8dee2aaSAndroid Build Coastguard Worker fBlocks.back().fBytesFree;
257*c8dee2aaSAndroid Build Coastguard Worker size_t pad = align_up_pad(usedBytes, alignment);
258*c8dee2aaSAndroid Build Coastguard Worker if (!fBufferPtr || fBlocks.empty() || (minSize + pad) > fBlocks.back().fBytesFree) {
259*c8dee2aaSAndroid Build Coastguard Worker // We either don't have a block yet or the current block doesn't have enough free space.
260*c8dee2aaSAndroid Build Coastguard Worker // Create a new one.
261*c8dee2aaSAndroid Build Coastguard Worker if (!this->createBlock(fallbackSize)) {
262*c8dee2aaSAndroid Build Coastguard Worker return nullptr;
263*c8dee2aaSAndroid Build Coastguard Worker }
264*c8dee2aaSAndroid Build Coastguard Worker usedBytes = 0;
265*c8dee2aaSAndroid Build Coastguard Worker pad = 0;
266*c8dee2aaSAndroid Build Coastguard Worker }
267*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(fBufferPtr);
268*c8dee2aaSAndroid Build Coastguard Worker
269*c8dee2aaSAndroid Build Coastguard Worker // Consume padding first, to make subsequent alignment math easier
270*c8dee2aaSAndroid Build Coastguard Worker memset(static_cast<char*>(fBufferPtr) + usedBytes, 0, pad);
271*c8dee2aaSAndroid Build Coastguard Worker usedBytes += pad;
272*c8dee2aaSAndroid Build Coastguard Worker fBlocks.back().fBytesFree -= pad;
273*c8dee2aaSAndroid Build Coastguard Worker fBytesInUse += pad;
274*c8dee2aaSAndroid Build Coastguard Worker
275*c8dee2aaSAndroid Build Coastguard Worker // Give caller all remaining space in this block (but aligned correctly)
276*c8dee2aaSAndroid Build Coastguard Worker size_t size = align_down(fBlocks.back().fBytesFree, alignment);
277*c8dee2aaSAndroid Build Coastguard Worker *offset = usedBytes;
278*c8dee2aaSAndroid Build Coastguard Worker *buffer = fBlocks.back().fBuffer;
279*c8dee2aaSAndroid Build Coastguard Worker *actualSize = size;
280*c8dee2aaSAndroid Build Coastguard Worker fBlocks.back().fBytesFree -= size;
281*c8dee2aaSAndroid Build Coastguard Worker fBytesInUse += size;
282*c8dee2aaSAndroid Build Coastguard Worker VALIDATE();
283*c8dee2aaSAndroid Build Coastguard Worker return static_cast<char*>(fBufferPtr) + usedBytes;
284*c8dee2aaSAndroid Build Coastguard Worker }
285*c8dee2aaSAndroid Build Coastguard Worker
putBack(size_t bytes)286*c8dee2aaSAndroid Build Coastguard Worker void GrBufferAllocPool::putBack(size_t bytes) {
287*c8dee2aaSAndroid Build Coastguard Worker VALIDATE();
288*c8dee2aaSAndroid Build Coastguard Worker if (!bytes) {
289*c8dee2aaSAndroid Build Coastguard Worker return;
290*c8dee2aaSAndroid Build Coastguard Worker }
291*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(!fBlocks.empty());
292*c8dee2aaSAndroid Build Coastguard Worker BufferBlock& block = fBlocks.back();
293*c8dee2aaSAndroid Build Coastguard Worker // Caller shouldn't try to put back more than they've taken and all those bytes should fit into
294*c8dee2aaSAndroid Build Coastguard Worker // one block. All the uses of this call are sequential with a single makeSpaceAtLeast call. So
295*c8dee2aaSAndroid Build Coastguard Worker // we should not have a case where someone is putting back bytes that are greater than the
296*c8dee2aaSAndroid Build Coastguard Worker // current block.
297*c8dee2aaSAndroid Build Coastguard Worker // It is possible the caller returns all their allocated bytes thus the <= and not just <.
298*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(bytes <= (block.fBuffer->size() - block.fBytesFree));
299*c8dee2aaSAndroid Build Coastguard Worker block.fBytesFree += bytes;
300*c8dee2aaSAndroid Build Coastguard Worker fBytesInUse -= bytes;
301*c8dee2aaSAndroid Build Coastguard Worker
302*c8dee2aaSAndroid Build Coastguard Worker // We don't allow blocks without any used bytes. So if we end up in that case after putting
303*c8dee2aaSAndroid Build Coastguard Worker // back the bytes then destroy the block. This scenario shouldn't occur often, but even if we
304*c8dee2aaSAndroid Build Coastguard Worker // end up allocating a new block immediately after destroying this one, the GPU and CPU buffers
305*c8dee2aaSAndroid Build Coastguard Worker // will usually be cached so the new block shouldn't be too expensive to make.
306*c8dee2aaSAndroid Build Coastguard Worker // TODO: This was true in older versions and uses of this class but is it still needed to
307*c8dee2aaSAndroid Build Coastguard Worker // have this restriction?
308*c8dee2aaSAndroid Build Coastguard Worker if (block.fBytesFree == block.fBuffer->size()) {
309*c8dee2aaSAndroid Build Coastguard Worker GrBuffer* buffer = block.fBuffer.get();
310*c8dee2aaSAndroid Build Coastguard Worker if (!buffer->isCpuBuffer() && static_cast<GrGpuBuffer*>(buffer)->isMapped()) {
311*c8dee2aaSAndroid Build Coastguard Worker UNMAP_BUFFER(block);
312*c8dee2aaSAndroid Build Coastguard Worker }
313*c8dee2aaSAndroid Build Coastguard Worker this->destroyBlock();
314*c8dee2aaSAndroid Build Coastguard Worker }
315*c8dee2aaSAndroid Build Coastguard Worker
316*c8dee2aaSAndroid Build Coastguard Worker VALIDATE();
317*c8dee2aaSAndroid Build Coastguard Worker }
318*c8dee2aaSAndroid Build Coastguard Worker
createBlock(size_t requestSize)319*c8dee2aaSAndroid Build Coastguard Worker bool GrBufferAllocPool::createBlock(size_t requestSize) {
320*c8dee2aaSAndroid Build Coastguard Worker size_t size = std::max(requestSize, kDefaultBufferSize);
321*c8dee2aaSAndroid Build Coastguard Worker
322*c8dee2aaSAndroid Build Coastguard Worker VALIDATE();
323*c8dee2aaSAndroid Build Coastguard Worker
324*c8dee2aaSAndroid Build Coastguard Worker BufferBlock& block = fBlocks.push_back();
325*c8dee2aaSAndroid Build Coastguard Worker
326*c8dee2aaSAndroid Build Coastguard Worker block.fBuffer = this->getBuffer(size);
327*c8dee2aaSAndroid Build Coastguard Worker if (!block.fBuffer) {
328*c8dee2aaSAndroid Build Coastguard Worker fBlocks.pop_back();
329*c8dee2aaSAndroid Build Coastguard Worker return false;
330*c8dee2aaSAndroid Build Coastguard Worker }
331*c8dee2aaSAndroid Build Coastguard Worker
332*c8dee2aaSAndroid Build Coastguard Worker block.fBytesFree = block.fBuffer->size();
333*c8dee2aaSAndroid Build Coastguard Worker if (fBufferPtr) {
334*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(fBlocks.size() > 1);
335*c8dee2aaSAndroid Build Coastguard Worker BufferBlock& prev = fBlocks.fromBack(1);
336*c8dee2aaSAndroid Build Coastguard Worker GrBuffer* buffer = prev.fBuffer.get();
337*c8dee2aaSAndroid Build Coastguard Worker if (!buffer->isCpuBuffer()) {
338*c8dee2aaSAndroid Build Coastguard Worker if (static_cast<GrGpuBuffer*>(buffer)->isMapped()) {
339*c8dee2aaSAndroid Build Coastguard Worker UNMAP_BUFFER(prev);
340*c8dee2aaSAndroid Build Coastguard Worker } else {
341*c8dee2aaSAndroid Build Coastguard Worker this->flushCpuData(prev, prev.fBuffer->size() - prev.fBytesFree);
342*c8dee2aaSAndroid Build Coastguard Worker }
343*c8dee2aaSAndroid Build Coastguard Worker }
344*c8dee2aaSAndroid Build Coastguard Worker fBufferPtr = nullptr;
345*c8dee2aaSAndroid Build Coastguard Worker }
346*c8dee2aaSAndroid Build Coastguard Worker
347*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(!fBufferPtr);
348*c8dee2aaSAndroid Build Coastguard Worker
349*c8dee2aaSAndroid Build Coastguard Worker // If the buffer is CPU-backed we "map" it because it is free to do so and saves a copy.
350*c8dee2aaSAndroid Build Coastguard Worker // Otherwise when buffer mapping is supported we map if the buffer size is greater than the
351*c8dee2aaSAndroid Build Coastguard Worker // threshold.
352*c8dee2aaSAndroid Build Coastguard Worker if (block.fBuffer->isCpuBuffer()) {
353*c8dee2aaSAndroid Build Coastguard Worker fBufferPtr = static_cast<GrCpuBuffer*>(block.fBuffer.get())->data();
354*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(fBufferPtr);
355*c8dee2aaSAndroid Build Coastguard Worker } else {
356*c8dee2aaSAndroid Build Coastguard Worker if (GrCaps::kNone_MapFlags != fGpu->caps()->mapBufferFlags() &&
357*c8dee2aaSAndroid Build Coastguard Worker size > fGpu->caps()->bufferMapThreshold()) {
358*c8dee2aaSAndroid Build Coastguard Worker fBufferPtr = static_cast<GrGpuBuffer*>(block.fBuffer.get())->map();
359*c8dee2aaSAndroid Build Coastguard Worker }
360*c8dee2aaSAndroid Build Coastguard Worker }
361*c8dee2aaSAndroid Build Coastguard Worker if (!fBufferPtr) {
362*c8dee2aaSAndroid Build Coastguard Worker this->resetCpuData(block.fBytesFree);
363*c8dee2aaSAndroid Build Coastguard Worker fBufferPtr = fCpuStagingBuffer->data();
364*c8dee2aaSAndroid Build Coastguard Worker }
365*c8dee2aaSAndroid Build Coastguard Worker
366*c8dee2aaSAndroid Build Coastguard Worker VALIDATE(true);
367*c8dee2aaSAndroid Build Coastguard Worker
368*c8dee2aaSAndroid Build Coastguard Worker return true;
369*c8dee2aaSAndroid Build Coastguard Worker }
370*c8dee2aaSAndroid Build Coastguard Worker
destroyBlock()371*c8dee2aaSAndroid Build Coastguard Worker void GrBufferAllocPool::destroyBlock() {
372*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(!fBlocks.empty());
373*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(fBlocks.back().fBuffer->isCpuBuffer() ||
374*c8dee2aaSAndroid Build Coastguard Worker !static_cast<GrGpuBuffer*>(fBlocks.back().fBuffer.get())->isMapped());
375*c8dee2aaSAndroid Build Coastguard Worker fBlocks.pop_back();
376*c8dee2aaSAndroid Build Coastguard Worker fBufferPtr = nullptr;
377*c8dee2aaSAndroid Build Coastguard Worker }
378*c8dee2aaSAndroid Build Coastguard Worker
resetCpuData(size_t newSize)379*c8dee2aaSAndroid Build Coastguard Worker void GrBufferAllocPool::resetCpuData(size_t newSize) {
380*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(newSize >= kDefaultBufferSize || !newSize);
381*c8dee2aaSAndroid Build Coastguard Worker if (!newSize) {
382*c8dee2aaSAndroid Build Coastguard Worker fCpuStagingBuffer.reset();
383*c8dee2aaSAndroid Build Coastguard Worker return;
384*c8dee2aaSAndroid Build Coastguard Worker }
385*c8dee2aaSAndroid Build Coastguard Worker if (fCpuStagingBuffer && newSize <= fCpuStagingBuffer->size()) {
386*c8dee2aaSAndroid Build Coastguard Worker return;
387*c8dee2aaSAndroid Build Coastguard Worker }
388*c8dee2aaSAndroid Build Coastguard Worker bool mustInitialize = fGpu->caps()->mustClearUploadedBufferData();
389*c8dee2aaSAndroid Build Coastguard Worker fCpuStagingBuffer = fCpuBufferCache ? fCpuBufferCache->makeBuffer(newSize, mustInitialize)
390*c8dee2aaSAndroid Build Coastguard Worker : GrCpuBuffer::Make(newSize);
391*c8dee2aaSAndroid Build Coastguard Worker }
392*c8dee2aaSAndroid Build Coastguard Worker
flushCpuData(const BufferBlock & block,size_t flushSize)393*c8dee2aaSAndroid Build Coastguard Worker void GrBufferAllocPool::flushCpuData(const BufferBlock& block, size_t flushSize) {
394*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(block.fBuffer.get());
395*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(!block.fBuffer.get()->isCpuBuffer());
396*c8dee2aaSAndroid Build Coastguard Worker GrGpuBuffer* buffer = static_cast<GrGpuBuffer*>(block.fBuffer.get());
397*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(!buffer->isMapped());
398*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(fCpuStagingBuffer && fCpuStagingBuffer->data() == fBufferPtr);
399*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(flushSize <= buffer->size());
400*c8dee2aaSAndroid Build Coastguard Worker VALIDATE(true);
401*c8dee2aaSAndroid Build Coastguard Worker
402*c8dee2aaSAndroid Build Coastguard Worker if (GrCaps::kNone_MapFlags != fGpu->caps()->mapBufferFlags() &&
403*c8dee2aaSAndroid Build Coastguard Worker flushSize > fGpu->caps()->bufferMapThreshold()) {
404*c8dee2aaSAndroid Build Coastguard Worker void* data = buffer->map();
405*c8dee2aaSAndroid Build Coastguard Worker if (data) {
406*c8dee2aaSAndroid Build Coastguard Worker memcpy(data, fBufferPtr, flushSize);
407*c8dee2aaSAndroid Build Coastguard Worker UNMAP_BUFFER(block);
408*c8dee2aaSAndroid Build Coastguard Worker return;
409*c8dee2aaSAndroid Build Coastguard Worker }
410*c8dee2aaSAndroid Build Coastguard Worker }
411*c8dee2aaSAndroid Build Coastguard Worker buffer->updateData(fBufferPtr, /*offset=*/0, flushSize, /*preserve=*/false);
412*c8dee2aaSAndroid Build Coastguard Worker VALIDATE(true);
413*c8dee2aaSAndroid Build Coastguard Worker }
414*c8dee2aaSAndroid Build Coastguard Worker
getBuffer(size_t size)415*c8dee2aaSAndroid Build Coastguard Worker sk_sp<GrBuffer> GrBufferAllocPool::getBuffer(size_t size) {
416*c8dee2aaSAndroid Build Coastguard Worker const GrCaps& caps = *fGpu->caps();
417*c8dee2aaSAndroid Build Coastguard Worker auto resourceProvider = fGpu->getContext()->priv().resourceProvider();
418*c8dee2aaSAndroid Build Coastguard Worker if (caps.preferClientSideDynamicBuffers() ||
419*c8dee2aaSAndroid Build Coastguard Worker (fBufferType == GrGpuBufferType::kDrawIndirect && caps.useClientSideIndirectBuffers())) {
420*c8dee2aaSAndroid Build Coastguard Worker // Create a CPU buffer.
421*c8dee2aaSAndroid Build Coastguard Worker bool mustInitialize = caps.mustClearUploadedBufferData();
422*c8dee2aaSAndroid Build Coastguard Worker return fCpuBufferCache ? fCpuBufferCache->makeBuffer(size, mustInitialize)
423*c8dee2aaSAndroid Build Coastguard Worker : GrCpuBuffer::Make(size);
424*c8dee2aaSAndroid Build Coastguard Worker }
425*c8dee2aaSAndroid Build Coastguard Worker return resourceProvider->createBuffer(size,
426*c8dee2aaSAndroid Build Coastguard Worker fBufferType,
427*c8dee2aaSAndroid Build Coastguard Worker kDynamic_GrAccessPattern,
428*c8dee2aaSAndroid Build Coastguard Worker GrResourceProvider::ZeroInit::kNo);
429*c8dee2aaSAndroid Build Coastguard Worker }
430*c8dee2aaSAndroid Build Coastguard Worker
431*c8dee2aaSAndroid Build Coastguard Worker ////////////////////////////////////////////////////////////////////////////////
432*c8dee2aaSAndroid Build Coastguard Worker
GrVertexBufferAllocPool(GrGpu * gpu,sk_sp<CpuBufferCache> cpuBufferCache)433*c8dee2aaSAndroid Build Coastguard Worker GrVertexBufferAllocPool::GrVertexBufferAllocPool(GrGpu* gpu, sk_sp<CpuBufferCache> cpuBufferCache)
434*c8dee2aaSAndroid Build Coastguard Worker : GrBufferAllocPool(gpu, GrGpuBufferType::kVertex, std::move(cpuBufferCache)) {}
435*c8dee2aaSAndroid Build Coastguard Worker
makeSpace(size_t vertexSize,int vertexCount,sk_sp<const GrBuffer> * buffer,int * startVertex)436*c8dee2aaSAndroid Build Coastguard Worker void* GrVertexBufferAllocPool::makeSpace(size_t vertexSize,
437*c8dee2aaSAndroid Build Coastguard Worker int vertexCount,
438*c8dee2aaSAndroid Build Coastguard Worker sk_sp<const GrBuffer>* buffer,
439*c8dee2aaSAndroid Build Coastguard Worker int* startVertex) {
440*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(vertexCount >= 0);
441*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(buffer);
442*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(startVertex);
443*c8dee2aaSAndroid Build Coastguard Worker
444*c8dee2aaSAndroid Build Coastguard Worker size_t offset SK_INIT_TO_AVOID_WARNING;
445*c8dee2aaSAndroid Build Coastguard Worker void* ptr = INHERITED::makeSpace(SkSafeMath::Mul(vertexSize, vertexCount),
446*c8dee2aaSAndroid Build Coastguard Worker vertexSize,
447*c8dee2aaSAndroid Build Coastguard Worker buffer,
448*c8dee2aaSAndroid Build Coastguard Worker &offset);
449*c8dee2aaSAndroid Build Coastguard Worker
450*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(0 == offset % vertexSize);
451*c8dee2aaSAndroid Build Coastguard Worker *startVertex = static_cast<int>(offset / vertexSize);
452*c8dee2aaSAndroid Build Coastguard Worker return ptr;
453*c8dee2aaSAndroid Build Coastguard Worker }
454*c8dee2aaSAndroid Build Coastguard Worker
makeSpaceAtLeast(size_t vertexSize,int minVertexCount,int fallbackVertexCount,sk_sp<const GrBuffer> * buffer,int * startVertex,int * actualVertexCount)455*c8dee2aaSAndroid Build Coastguard Worker void* GrVertexBufferAllocPool::makeSpaceAtLeast(size_t vertexSize, int minVertexCount,
456*c8dee2aaSAndroid Build Coastguard Worker int fallbackVertexCount,
457*c8dee2aaSAndroid Build Coastguard Worker sk_sp<const GrBuffer>* buffer, int* startVertex,
458*c8dee2aaSAndroid Build Coastguard Worker int* actualVertexCount) {
459*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(minVertexCount >= 0);
460*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(fallbackVertexCount >= minVertexCount);
461*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(buffer);
462*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(startVertex);
463*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(actualVertexCount);
464*c8dee2aaSAndroid Build Coastguard Worker
465*c8dee2aaSAndroid Build Coastguard Worker size_t offset SK_INIT_TO_AVOID_WARNING;
466*c8dee2aaSAndroid Build Coastguard Worker size_t actualSize SK_INIT_TO_AVOID_WARNING;
467*c8dee2aaSAndroid Build Coastguard Worker void* ptr = INHERITED::makeSpaceAtLeast(SkSafeMath::Mul(vertexSize, minVertexCount),
468*c8dee2aaSAndroid Build Coastguard Worker SkSafeMath::Mul(vertexSize, fallbackVertexCount),
469*c8dee2aaSAndroid Build Coastguard Worker vertexSize,
470*c8dee2aaSAndroid Build Coastguard Worker buffer,
471*c8dee2aaSAndroid Build Coastguard Worker &offset,
472*c8dee2aaSAndroid Build Coastguard Worker &actualSize);
473*c8dee2aaSAndroid Build Coastguard Worker
474*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(0 == offset % vertexSize);
475*c8dee2aaSAndroid Build Coastguard Worker *startVertex = static_cast<int>(offset / vertexSize);
476*c8dee2aaSAndroid Build Coastguard Worker
477*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(0 == actualSize % vertexSize);
478*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(actualSize >= vertexSize * minVertexCount);
479*c8dee2aaSAndroid Build Coastguard Worker *actualVertexCount = static_cast<int>(actualSize / vertexSize);
480*c8dee2aaSAndroid Build Coastguard Worker
481*c8dee2aaSAndroid Build Coastguard Worker return ptr;
482*c8dee2aaSAndroid Build Coastguard Worker }
483*c8dee2aaSAndroid Build Coastguard Worker
484*c8dee2aaSAndroid Build Coastguard Worker ////////////////////////////////////////////////////////////////////////////////
485*c8dee2aaSAndroid Build Coastguard Worker
GrIndexBufferAllocPool(GrGpu * gpu,sk_sp<CpuBufferCache> cpuBufferCache)486*c8dee2aaSAndroid Build Coastguard Worker GrIndexBufferAllocPool::GrIndexBufferAllocPool(GrGpu* gpu, sk_sp<CpuBufferCache> cpuBufferCache)
487*c8dee2aaSAndroid Build Coastguard Worker : GrBufferAllocPool(gpu, GrGpuBufferType::kIndex, std::move(cpuBufferCache)) {}
488*c8dee2aaSAndroid Build Coastguard Worker
makeSpace(int indexCount,sk_sp<const GrBuffer> * buffer,int * startIndex)489*c8dee2aaSAndroid Build Coastguard Worker void* GrIndexBufferAllocPool::makeSpace(int indexCount, sk_sp<const GrBuffer>* buffer,
490*c8dee2aaSAndroid Build Coastguard Worker int* startIndex) {
491*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(indexCount >= 0);
492*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(buffer);
493*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(startIndex);
494*c8dee2aaSAndroid Build Coastguard Worker
495*c8dee2aaSAndroid Build Coastguard Worker size_t offset SK_INIT_TO_AVOID_WARNING;
496*c8dee2aaSAndroid Build Coastguard Worker void* ptr = INHERITED::makeSpace(SkSafeMath::Mul(indexCount, sizeof(uint16_t)),
497*c8dee2aaSAndroid Build Coastguard Worker sizeof(uint16_t),
498*c8dee2aaSAndroid Build Coastguard Worker buffer,
499*c8dee2aaSAndroid Build Coastguard Worker &offset);
500*c8dee2aaSAndroid Build Coastguard Worker
501*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(0 == offset % sizeof(uint16_t));
502*c8dee2aaSAndroid Build Coastguard Worker *startIndex = static_cast<int>(offset / sizeof(uint16_t));
503*c8dee2aaSAndroid Build Coastguard Worker return ptr;
504*c8dee2aaSAndroid Build Coastguard Worker }
505*c8dee2aaSAndroid Build Coastguard Worker
makeSpaceAtLeast(int minIndexCount,int fallbackIndexCount,sk_sp<const GrBuffer> * buffer,int * startIndex,int * actualIndexCount)506*c8dee2aaSAndroid Build Coastguard Worker void* GrIndexBufferAllocPool::makeSpaceAtLeast(int minIndexCount, int fallbackIndexCount,
507*c8dee2aaSAndroid Build Coastguard Worker sk_sp<const GrBuffer>* buffer, int* startIndex,
508*c8dee2aaSAndroid Build Coastguard Worker int* actualIndexCount) {
509*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(minIndexCount >= 0);
510*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(fallbackIndexCount >= minIndexCount);
511*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(buffer);
512*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(startIndex);
513*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(actualIndexCount);
514*c8dee2aaSAndroid Build Coastguard Worker
515*c8dee2aaSAndroid Build Coastguard Worker size_t offset SK_INIT_TO_AVOID_WARNING;
516*c8dee2aaSAndroid Build Coastguard Worker size_t actualSize SK_INIT_TO_AVOID_WARNING;
517*c8dee2aaSAndroid Build Coastguard Worker void* ptr = INHERITED::makeSpaceAtLeast(SkSafeMath::Mul(minIndexCount, sizeof(uint16_t)),
518*c8dee2aaSAndroid Build Coastguard Worker SkSafeMath::Mul(fallbackIndexCount, sizeof(uint16_t)),
519*c8dee2aaSAndroid Build Coastguard Worker sizeof(uint16_t),
520*c8dee2aaSAndroid Build Coastguard Worker buffer,
521*c8dee2aaSAndroid Build Coastguard Worker &offset,
522*c8dee2aaSAndroid Build Coastguard Worker &actualSize);
523*c8dee2aaSAndroid Build Coastguard Worker
524*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(0 == offset % sizeof(uint16_t));
525*c8dee2aaSAndroid Build Coastguard Worker *startIndex = static_cast<int>(offset / sizeof(uint16_t));
526*c8dee2aaSAndroid Build Coastguard Worker
527*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(0 == actualSize % sizeof(uint16_t));
528*c8dee2aaSAndroid Build Coastguard Worker SkASSERT(actualSize >= minIndexCount * sizeof(uint16_t));
529*c8dee2aaSAndroid Build Coastguard Worker *actualIndexCount = static_cast<int>(actualSize / sizeof(uint16_t));
530*c8dee2aaSAndroid Build Coastguard Worker return ptr;
531*c8dee2aaSAndroid Build Coastguard Worker }
532