1 /*
2 * Copyright 2019 Google LLC.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7 #include "src/gpu/ganesh/ops/AtlasPathRenderer.h"
8
9 #include "include/core/SkMatrix.h"
10 #include "include/core/SkPath.h"
11 #include "include/core/SkRect.h"
12 #include "include/core/SkSize.h"
13 #include "include/gpu/GpuTypes.h"
14 #include "include/gpu/ganesh/GrBackendSurface.h"
15 #include "include/gpu/ganesh/GrContextOptions.h"
16 #include "include/gpu/ganesh/GrDirectContext.h"
17 #include "include/gpu/ganesh/GrRecordingContext.h"
18 #include "include/gpu/ganesh/GrTypes.h"
19 #include "include/private/base/SkAssert.h"
20 #include "include/private/base/SkDebug.h"
21 #include "include/private/base/SkSpan_impl.h"
22 #include "include/private/gpu/ganesh/GrTypesPriv.h"
23 #include "src/base/SkMathPriv.h"
24 #include "src/base/SkVx.h"
25 #include "src/core/SkIPoint16.h"
26 #include "src/gpu/ganesh/GrCaps.h"
27 #include "src/gpu/ganesh/GrClip.h"
28 #include "src/gpu/ganesh/GrDirectContextPriv.h"
29 #include "src/gpu/ganesh/GrDrawingManager.h"
30 #include "src/gpu/ganesh/GrDynamicAtlas.h"
31 #include "src/gpu/ganesh/GrPaint.h"
32 #include "src/gpu/ganesh/GrRecordingContextPriv.h"
33 #include "src/gpu/ganesh/GrRenderTargetProxy.h"
34 #include "src/gpu/ganesh/GrRenderTask.h"
35 #include "src/gpu/ganesh/GrStyle.h"
36 #include "src/gpu/ganesh/GrSurfaceProxy.h"
37 #include "src/gpu/ganesh/GrSurfaceProxyView.h"
38 #include "src/gpu/ganesh/GrTexture.h"
39 #include "src/gpu/ganesh/GrTextureProxy.h"
40 #include "src/gpu/ganesh/SurfaceDrawContext.h"
41 #include "src/gpu/ganesh/effects/GrModulateAtlasCoverageEffect.h"
42 #include "src/gpu/ganesh/geometry/GrStyledShape.h"
43 #include "src/gpu/ganesh/ops/AtlasRenderTask.h"
44 #include "src/gpu/ganesh/ops/DrawAtlasPathOp.h"
45 #include "src/gpu/ganesh/ops/GrOp.h"
46 #include "src/gpu/ganesh/ops/TessellationPathRenderer.h"
47
48 #include <algorithm>
49 #include <utility>
50
51 using namespace skia_private;
52
53 namespace {
54
55 // Returns the rect [topLeftFloor, botRightCeil], which is the rect [r] rounded out to integer
56 // boundaries.
round_out(const SkRect & r)57 std::pair<skvx::float2, skvx::float2> round_out(const SkRect& r) {
58 return {floor(skvx::float2::Load(&r.fLeft)),
59 ceil(skvx::float2::Load(&r.fRight))};
60 }
61
62 // Returns whether the given proxyOwner uses the atlasProxy.
refs_atlas(const T * proxyOwner,const GrSurfaceProxy * atlasProxy)63 template<typename T> bool refs_atlas(const T* proxyOwner, const GrSurfaceProxy* atlasProxy) {
64 bool refsAtlas = false;
65 auto checkForAtlasRef = [atlasProxy, &refsAtlas](GrSurfaceProxy* proxy, skgpu::Mipmapped) {
66 if (proxy == atlasProxy) {
67 refsAtlas = true;
68 }
69 };
70 if (proxyOwner) {
71 proxyOwner->visitProxies(checkForAtlasRef);
72 }
73 return refsAtlas;
74 }
75
is_visible(const SkRect & pathDevBounds,const SkIRect & clipBounds)76 bool is_visible(const SkRect& pathDevBounds, const SkIRect& clipBounds) {
77 auto pathTopLeft = skvx::float2::Load(&pathDevBounds.fLeft);
78 auto pathBotRight = skvx::float2::Load(&pathDevBounds.fRight);
79 // Empty paths are never visible. Phrase this as a NOT of positive logic so we also return false
80 // in the case of NaN.
81 if (!all(pathTopLeft < pathBotRight)) {
82 return false;
83 }
84 auto clipTopLeft = skvx::cast<float>(skvx::int2::Load(&clipBounds.fLeft));
85 auto clipBotRight = skvx::cast<float>(skvx::int2::Load(&clipBounds.fRight));
86 static_assert(sizeof(clipBounds) == sizeof(clipTopLeft) + sizeof(clipBotRight));
87 return all(pathTopLeft < clipBotRight) && all(pathBotRight > clipTopLeft);
88 }
89
90 #ifdef SK_DEBUG
91 // Ensures the atlas dependencies are set up such that each atlas will be totally out of service
92 // before we render the next one in line. This means there will only ever be one atlas active at a
93 // time and that they can all share the same texture.
validate_atlas_dependencies(const TArray<sk_sp<skgpu::ganesh::AtlasRenderTask>> & atlasTasks)94 void validate_atlas_dependencies(
95 const TArray<sk_sp<skgpu::ganesh::AtlasRenderTask>>& atlasTasks) {
96 for (int i = atlasTasks.size() - 1; i >= 1; --i) {
97 auto atlasTask = atlasTasks[i].get();
98 auto previousAtlasTask = atlasTasks[i - 1].get();
99 // Double check that atlasTask depends on every dependent of its previous atlas. If this
100 // fires it might mean previousAtlasTask gained a new dependent after atlasTask came into
101 // service (maybe by an op that hadn't yet been added to an opsTask when we registered the
102 // new atlas with the drawingManager).
103 for (GrRenderTask* previousAtlasUser : previousAtlasTask->dependents()) {
104 SkASSERT(atlasTask->dependsOn(previousAtlasUser));
105 }
106 }
107 }
108 #endif
109
110 } // anonymous namespace
111
112 namespace skgpu::ganesh {
113
114 constexpr static auto kAtlasAlpha8Type = GrColorType::kAlpha_8;
115 constexpr static int kAtlasInitialSize = 512;
116
117 // The atlas is only used for small-area paths, which means at least one dimension of every path is
118 // guaranteed to be quite small. So if we transpose tall paths, then every path will have a small
119 // height, which lends very well to efficient pow2 atlas packing.
120 constexpr static auto kAtlasAlgorithm = GrDynamicAtlas::RectanizerAlgorithm::kPow2;
121
122 // Ensure every path in the atlas falls in or below the 256px high rectanizer band.
123 constexpr static int kAtlasMaxPathHeight = 256;
124
125 // If we have MSAA to fall back on, paths are already fast enough that we really only benefit from
126 // atlasing when they are very small.
127 constexpr static int kAtlasMaxPathHeightWithMSAAFallback = 128;
128
129 // http://skbug.com/12291 -- The way GrDynamicAtlas works, a single 2048x1 path is given an entire
130 // 2048x2048 atlas with draw bounds of 2048x1025. Limit the max width to 1024 to avoid this landmine
131 // until it's resolved.
132 constexpr static int kAtlasMaxPathWidth = 1024;
133
IsSupported(GrRecordingContext * rContext)134 bool AtlasPathRenderer::IsSupported(GrRecordingContext* rContext) {
135 #ifdef SK_BUILD_FOR_IOS
136 // b/195095846: There is a bug with the atlas path renderer on OpenGL iOS. Disable until we can
137 // investigate.
138 if (rContext->backend() == GrBackendApi::kOpenGL) {
139 return false;
140 }
141 #endif
142 #ifdef SK_BUILD_FOR_WIN
143 // http://skbug.com/13519 There is a bug with the atlas path renderer on Direct3D, running on
144 // Radeon hardware and possibly others. Disable until we can investigate.
145 if (rContext->backend() == GrBackendApi::kDirect3D) {
146 return false;
147 }
148 #endif
149 const GrCaps& caps = *rContext->priv().caps();
150 auto atlasFormat = caps.getDefaultBackendFormat(kAtlasAlpha8Type, GrRenderable::kYes);
151 return rContext->asDirectContext() && // The atlas doesn't support DDL yet.
152 caps.internalMultisampleCount(atlasFormat) > 1 &&
153 // GrAtlasRenderTask currently requires tessellation. In the future it could use the
154 // default path renderer when tessellation isn't available.
155 TessellationPathRenderer::IsSupported(caps);
156 }
157
Make(GrRecordingContext * rContext)158 sk_sp<AtlasPathRenderer> AtlasPathRenderer::Make(GrRecordingContext* rContext) {
159 return IsSupported(rContext)
160 ? sk_sp<AtlasPathRenderer>(new AtlasPathRenderer(rContext->asDirectContext()))
161 : nullptr;
162 }
163
AtlasPathRenderer(GrDirectContext * dContext)164 AtlasPathRenderer::AtlasPathRenderer(GrDirectContext* dContext) {
165 SkASSERT(IsSupported(dContext));
166 const GrCaps& caps = *dContext->priv().caps();
167 #if defined(GPU_TEST_UTILS)
168 fAtlasMaxSize = dContext->priv().options().fMaxTextureAtlasSize;
169 #else
170 fAtlasMaxSize = 2048;
171 #endif
172 fAtlasMaxSize = SkPrevPow2(std::min(fAtlasMaxSize, (float)caps.maxPreferredRenderTargetSize()));
173 fAtlasMaxPathWidth = std::min((float)kAtlasMaxPathWidth, fAtlasMaxSize);
174 fAtlasInitialSize = SkNextPow2(std::min(kAtlasInitialSize, (int)fAtlasMaxSize));
175 }
176
pathFitsInAtlas(const SkRect & pathDevBounds,GrAAType fallbackAAType) const177 bool AtlasPathRenderer::pathFitsInAtlas(const SkRect& pathDevBounds,
178 GrAAType fallbackAAType) const {
179 SkASSERT(fallbackAAType != GrAAType::kNone); // The atlas doesn't support non-AA.
180 float atlasMaxPathHeight_p2 = (fallbackAAType == GrAAType::kMSAA)
181 ? kAtlasMaxPathHeightWithMSAAFallback * kAtlasMaxPathHeightWithMSAAFallback
182 : kAtlasMaxPathHeight * kAtlasMaxPathHeight;
183 auto [topLeftFloor, botRightCeil] = round_out(pathDevBounds);
184 auto size = botRightCeil - topLeftFloor;
185 return // Ensure the path's largest dimension fits in the atlas.
186 all(size <= fAtlasMaxPathWidth) &&
187 // Since we will transpose tall skinny paths, limiting to atlasMaxPathHeight^2 pixels
188 // guarantees heightInAtlas <= atlasMaxPathHeight, while also allowing paths that are
189 // very wide and short.
190 size[0] * size[1] <= atlasMaxPathHeight_p2;
191 }
192
set(const SkMatrix & m,const SkPath & path)193 void AtlasPathRenderer::AtlasPathKey::set(const SkMatrix& m, const SkPath& path) {
194 fPathGenID = path.getGenerationID();
195 fAffineMatrix[0] = m.getScaleX();
196 fAffineMatrix[1] = m.getSkewX();
197 fAffineMatrix[2] = m.getTranslateX();
198 fAffineMatrix[3] = m.getSkewY();
199 fAffineMatrix[4] = m.getScaleY();
200 fAffineMatrix[5] = m.getTranslateY();
201 fFillRule = (uint32_t)GrFillRuleForSkPath(path); // Fill rule doesn't affect the path's genID.
202 }
203
addPathToAtlas(GrRecordingContext * rContext,const SkMatrix & viewMatrix,const SkPath & path,const SkRect & pathDevBounds,SkIRect * devIBounds,SkIPoint16 * locationInAtlas,bool * transposedInAtlas,const DrawRefsAtlasCallback & drawRefsAtlasCallback)204 bool AtlasPathRenderer::addPathToAtlas(GrRecordingContext* rContext,
205 const SkMatrix& viewMatrix,
206 const SkPath& path,
207 const SkRect& pathDevBounds,
208 SkIRect* devIBounds,
209 SkIPoint16* locationInAtlas,
210 bool* transposedInAtlas,
211 const DrawRefsAtlasCallback& drawRefsAtlasCallback) {
212 SkASSERT(!viewMatrix.hasPerspective()); // See onCanDrawPath().
213
214 pathDevBounds.roundOut(devIBounds);
215 #ifdef SK_DEBUG
216 // is_visible() should have guaranteed the path's bounds were representable as ints, since clip
217 // bounds within the max render target size are nowhere near INT_MAX.
218 auto [topLeftFloor, botRightCeil] = round_out(pathDevBounds);
219 SkASSERT(all(skvx::cast<float>(skvx::int2::Load(&devIBounds->fLeft)) == topLeftFloor));
220 SkASSERT(all(skvx::cast<float>(skvx::int2::Load(&devIBounds->fRight)) == botRightCeil));
221 #endif
222
223 int widthInAtlas = devIBounds->width();
224 int heightInAtlas = devIBounds->height();
225 // is_visible() should have guaranteed the path's bounds were non-empty.
226 SkASSERT(widthInAtlas > 0 && heightInAtlas > 0);
227
228 if (SkNextPow2(widthInAtlas) == SkNextPow2(heightInAtlas)) {
229 // Both dimensions go to the same pow2 band in the atlas. Use the larger dimension as height
230 // for more efficient packing.
231 *transposedInAtlas = widthInAtlas > heightInAtlas;
232 } else {
233 // Both dimensions go to different pow2 bands in the atlas. Use the smaller pow2 band for
234 // most efficient packing.
235 *transposedInAtlas = heightInAtlas > widthInAtlas;
236 }
237 if (*transposedInAtlas) {
238 std::swap(heightInAtlas, widthInAtlas);
239 }
240 // pathFitsInAtlas() should have guaranteed these constraints on the path size.
241 SkASSERT(widthInAtlas <= (int)fAtlasMaxPathWidth);
242 SkASSERT(heightInAtlas <= kAtlasMaxPathHeight);
243
244 // Check if this path is already in the atlas. This is mainly for clip paths.
245 AtlasPathKey atlasPathKey;
246 if (!path.isVolatile()) {
247 atlasPathKey.set(viewMatrix, path);
248 if (const SkIPoint16* existingLocation = fAtlasPathCache.find(atlasPathKey)) {
249 *locationInAtlas = *existingLocation;
250 return true;
251 }
252 }
253
254 if (fAtlasRenderTasks.empty() ||
255 !fAtlasRenderTasks.back()->addPath(viewMatrix, path, devIBounds->topLeft(), widthInAtlas,
256 heightInAtlas, *transposedInAtlas, locationInAtlas)) {
257 // We either don't have an atlas yet or the current one is full. Try to replace it.
258 auto currentAtlasTask = (!fAtlasRenderTasks.empty()) ? fAtlasRenderTasks.back().get()
259 : nullptr;
260 if (currentAtlasTask &&
261 drawRefsAtlasCallback &&
262 drawRefsAtlasCallback(currentAtlasTask->atlasProxy())) {
263 // The draw already refs the current atlas. Give up. Otherwise the draw would ref two
264 // different atlases and they couldn't share a texture.
265 return false;
266 }
267 // Replace the atlas with a new one.
268 auto dynamicAtlas = std::make_unique<GrDynamicAtlas>(
269 kAtlasAlpha8Type, GrDynamicAtlas::InternalMultisample::kYes,
270 SkISize{fAtlasInitialSize, fAtlasInitialSize}, fAtlasMaxSize,
271 *rContext->priv().caps(), kAtlasAlgorithm);
272 auto newAtlasTask = sk_make_sp<AtlasRenderTask>(rContext,
273 sk_make_sp<GrArenas>(),
274 std::move(dynamicAtlas));
275 rContext->priv().drawingManager()->addAtlasTask(newAtlasTask, currentAtlasTask);
276 SkAssertResult(newAtlasTask->addPath(viewMatrix, path, devIBounds->topLeft(), widthInAtlas,
277 heightInAtlas, *transposedInAtlas, locationInAtlas));
278 fAtlasRenderTasks.push_back(std::move(newAtlasTask));
279 fAtlasPathCache.reset();
280 }
281
282 // Remember this path's location in the atlas, in case it gets drawn again.
283 if (!path.isVolatile()) {
284 fAtlasPathCache.set(atlasPathKey, *locationInAtlas);
285 }
286 return true;
287 }
288
onCanDrawPath(const CanDrawPathArgs & args) const289 PathRenderer::CanDrawPath AtlasPathRenderer::onCanDrawPath(const CanDrawPathArgs& args) const {
290 #ifdef SK_DEBUG
291 if (!fAtlasRenderTasks.empty()) {
292 // args.fPaint should NEVER reference our current atlas. If it does, it means somebody
293 // intercepted a clip FP meant for a different op and will cause rendering artifacts.
294 const GrSurfaceProxy* atlasProxy = fAtlasRenderTasks.back()->atlasProxy();
295 SkASSERT(!refs_atlas(args.fPaint->getColorFragmentProcessor(), atlasProxy));
296 SkASSERT(!refs_atlas(args.fPaint->getCoverageFragmentProcessor(), atlasProxy));
297 }
298 SkASSERT(!args.fHasUserStencilSettings); // See onGetStencilSupport().
299 #endif
300 bool canDrawPath = args.fShape->style().isSimpleFill() &&
301 #ifdef SK_DISABLE_ATLAS_PATH_RENDERER_WITH_COVERAGE_AA
302 // The MSAA requirement is a temporary limitation in order to preserve
303 // functionality for refactoring. TODO: Allow kCoverage AA types.
304 args.fAAType == GrAAType::kMSAA &&
305 #else
306 args.fAAType != GrAAType::kNone &&
307 #endif
308 // Non-DMSAA convex paths should be handled by the convex tessellator.
309 // (With DMSAA we continue to use the atlas for these paths in order to avoid
310 // triggering MSAA.)
311 (args.fProxy->numSamples() == 1 || !args.fShape->knownToBeConvex()) &&
312 !args.fShape->style().hasPathEffect() &&
313 !args.fViewMatrix->hasPerspective() &&
314 this->pathFitsInAtlas(args.fViewMatrix->mapRect(args.fShape->bounds()),
315 args.fAAType);
316 return canDrawPath ? CanDrawPath::kYes : CanDrawPath::kNo;
317 }
318
onDrawPath(const DrawPathArgs & args)319 bool AtlasPathRenderer::onDrawPath(const DrawPathArgs& args) {
320 SkPath path;
321 args.fShape->asPath(&path);
322
323 const SkRect pathDevBounds = args.fViewMatrix->mapRect(args.fShape->bounds());
324 SkASSERT(this->pathFitsInAtlas(pathDevBounds, args.fAAType));
325
326 if (!is_visible(pathDevBounds, args.fClip->getConservativeBounds())) {
327 // The path is empty or outside the clip. No mask is needed.
328 if (path.isInverseFillType()) {
329 args.fSurfaceDrawContext->drawPaint(args.fClip, std::move(args.fPaint),
330 *args.fViewMatrix);
331 }
332 return true;
333 }
334
335 SkIRect devIBounds;
336 SkIPoint16 locationInAtlas;
337 bool transposedInAtlas;
338 SkAssertResult(this->addPathToAtlas(args.fContext, *args.fViewMatrix, path, pathDevBounds,
339 &devIBounds, &locationInAtlas, &transposedInAtlas,
340 nullptr/*DrawRefsAtlasCallback -- see onCanDrawPath()*/));
341
342 const SkIRect& fillBounds = args.fShape->inverseFilled()
343 ? (args.fClip
344 ? args.fClip->getConservativeBounds()
345 : args.fSurfaceDrawContext->asSurfaceProxy()->backingStoreBoundsIRect())
346 : devIBounds;
347 const GrCaps& caps = *args.fSurfaceDrawContext->caps();
348 auto op = GrOp::Make<DrawAtlasPathOp>(args.fContext,
349 args.fSurfaceDrawContext->arenaAlloc(),
350 fillBounds, *args.fViewMatrix,
351 std::move(args.fPaint), locationInAtlas,
352 devIBounds, transposedInAtlas,
353 fAtlasRenderTasks.back()->readView(caps),
354 args.fShape->inverseFilled());
355 args.fSurfaceDrawContext->addDrawOp(args.fClip, std::move(op));
356 return true;
357 }
358
makeAtlasClipEffect(const SurfaceDrawContext * sdc,const GrOp * opBeingClipped,std::unique_ptr<GrFragmentProcessor> inputFP,const SkIRect & drawBounds,const SkMatrix & viewMatrix,const SkPath & path)359 GrFPResult AtlasPathRenderer::makeAtlasClipEffect(const SurfaceDrawContext* sdc,
360 const GrOp* opBeingClipped,
361 std::unique_ptr<GrFragmentProcessor> inputFP,
362 const SkIRect& drawBounds,
363 const SkMatrix& viewMatrix,
364 const SkPath& path) {
365 if (viewMatrix.hasPerspective()) {
366 return GrFPFailure(std::move(inputFP));
367 }
368
369 const SkRect pathDevBounds = viewMatrix.mapRect(path.getBounds());
370 if (!is_visible(pathDevBounds, drawBounds)) {
371 // The path is empty or outside the drawBounds. No mask is needed. We explicitly allow the
372 // returned successful "fp" to be null in case this bypassed atlas clip effect was the first
373 // clip to be processed by the clip stack (at which point inputFP is null).
374 return path.isInverseFillType() ? GrFPNullableSuccess(std::move(inputFP))
375 : GrFPFailure(std::move(inputFP));
376 }
377
378 auto fallbackAAType = (sdc->numSamples() > 1 || sdc->canUseDynamicMSAA()) ? GrAAType::kMSAA
379 : GrAAType::kCoverage;
380 if (!this->pathFitsInAtlas(pathDevBounds, fallbackAAType)) {
381 // The path is too big.
382 return GrFPFailure(std::move(inputFP));
383 }
384
385 SkIRect devIBounds;
386 SkIPoint16 locationInAtlas;
387 bool transposedInAtlas;
388 // Called if the atlas runs out of room, to determine if it's safe to create a new one. (Draws
389 // can never access more than one atlas.)
390 auto drawRefsAtlasCallback = [opBeingClipped, &inputFP](const GrSurfaceProxy* atlasProxy) {
391 return refs_atlas(opBeingClipped, atlasProxy) ||
392 refs_atlas(inputFP.get(), atlasProxy);
393 };
394 // addPathToAtlas() ignores inverseness of the fill. See GrAtlasRenderTask::getAtlasUberPath().
395 if (!this->addPathToAtlas(sdc->recordingContext(), viewMatrix, path, pathDevBounds, &devIBounds,
396 &locationInAtlas, &transposedInAtlas, drawRefsAtlasCallback)) {
397 // The atlas ran out of room and we were unable to start a new one.
398 return GrFPFailure(std::move(inputFP));
399 }
400
401 SkMatrix atlasMatrix;
402 auto [atlasX, atlasY] = locationInAtlas;
403 if (!transposedInAtlas) {
404 atlasMatrix = SkMatrix::Translate(atlasX - devIBounds.left(), atlasY - devIBounds.top());
405 } else {
406 atlasMatrix.setAll(0, 1, atlasX - devIBounds.top(),
407 1, 0, atlasY - devIBounds.left(),
408 0, 0, 1);
409 }
410 auto flags = GrModulateAtlasCoverageEffect::Flags::kNone;
411 if (path.isInverseFillType()) {
412 flags |= GrModulateAtlasCoverageEffect::Flags::kInvertCoverage;
413 }
414 if (!devIBounds.contains(drawBounds)) {
415 flags |= GrModulateAtlasCoverageEffect::Flags::kCheckBounds;
416 // At this point in time we expect callers to tighten the scissor for "kIntersect" clips, as
417 // opposed to us having to check the path bounds. Feel free to remove this assert if that
418 // ever changes.
419 SkASSERT(path.isInverseFillType());
420 }
421 GrSurfaceProxyView atlasView = fAtlasRenderTasks.back()->readView(*sdc->caps());
422 return GrFPSuccess(std::make_unique<GrModulateAtlasCoverageEffect>(flags, std::move(inputFP),
423 std::move(atlasView),
424 atlasMatrix, devIBounds));
425 }
426
preFlush(GrOnFlushResourceProvider * onFlushRP)427 bool AtlasPathRenderer::preFlush(GrOnFlushResourceProvider* onFlushRP) {
428 if (fAtlasRenderTasks.empty()) {
429 SkASSERT(fAtlasPathCache.count() == 0);
430 return true;
431 }
432
433 // Verify the atlases can all share the same texture.
434 SkDEBUGCODE(validate_atlas_dependencies(fAtlasRenderTasks);)
435
436 bool successful;
437
438 #if defined(GPU_TEST_UTILS)
439 if (onFlushRP->failFlushTimeCallbacks()) {
440 successful = false;
441 } else
442 #endif
443 {
444 // TODO: it seems like this path renderer's backing-texture reuse could be greatly
445 // improved. Please see skbug.com/13298.
446
447 // Instantiate the first atlas.
448 successful = fAtlasRenderTasks[0]->instantiate(onFlushRP);
449
450 // Instantiate the remaining atlases.
451 GrTexture* firstAtlas = fAtlasRenderTasks[0]->atlasProxy()->peekTexture();
452 SkASSERT(firstAtlas);
453 for (int i = 1; successful && i < fAtlasRenderTasks.size(); ++i) {
454 auto atlasTask = fAtlasRenderTasks[i].get();
455 if (atlasTask->atlasProxy()->backingStoreDimensions() == firstAtlas->dimensions()) {
456 successful &= atlasTask->instantiate(onFlushRP, sk_ref_sp(firstAtlas));
457 } else {
458 // The atlases are expected to all be full size except possibly the final one.
459 SkASSERT(i == fAtlasRenderTasks.size() - 1);
460 SkASSERT(atlasTask->atlasProxy()->backingStoreDimensions().area() <
461 firstAtlas->dimensions().area());
462 // TODO: Recycle the larger atlas texture anyway?
463 successful &= atlasTask->instantiate(onFlushRP);
464 }
465 }
466 }
467
468 // Reset all atlas data.
469 fAtlasRenderTasks.clear();
470 fAtlasPathCache.reset();
471 return successful;
472 }
473
474 } // namespace skgpu::ganesh
475