1 /* 2 * Copyright 2023 Google LLC 3 * 4 * Use of this source code is governed by a BSD-style license that can be 5 * found in the LICENSE file. 6 */ 7 8 #ifndef skgpu_graphite_AtlasProvider_DEFINED 9 #define skgpu_graphite_AtlasProvider_DEFINED 10 11 #include "include/core/SkColorType.h" 12 #include "include/core/SkRefCnt.h" 13 #include "include/private/base/SkTo.h" 14 #include "src/base/SkEnumBitMask.h" 15 16 #include <memory> 17 #include <unordered_map> 18 19 namespace skgpu::graphite { 20 21 class Caps; 22 class ComputePathAtlas; 23 class DrawContext; 24 class PathAtlas; 25 class RasterPathAtlas; 26 class Recorder; 27 class TextAtlasManager; 28 class TextureProxy; 29 30 /** 31 * AtlasProvider groups various texture atlas management algorithms together. 32 */ 33 class AtlasProvider final { 34 public: 35 enum class PathAtlasFlags : unsigned { 36 kNone = 0b000, 37 // ComputePathAtlas is supported 38 kCompute = 0b001, 39 // RasterPathAtlas is supported 40 kRaster = 0b010, 41 }; 42 SK_DECL_BITMASK_OPS_FRIENDS(PathAtlasFlags) 43 using PathAtlasFlagsBitMask = SkEnumBitMask<PathAtlasFlags>; 44 45 // Query the supported path atlas algorithms based on device capabilities. 46 static PathAtlasFlagsBitMask QueryPathAtlasSupport(const Caps*); 47 48 explicit AtlasProvider(Recorder*); 49 ~AtlasProvider() = default; 50 51 // Returns the TextAtlasManager that provides access to persistent DrawAtlas instances used in 52 // glyph rendering. This TextAtlasManager is always available. textAtlasManager()53 TextAtlasManager* textAtlasManager() const { return fTextAtlasManager.get(); } 54 55 // Returns whether a particular atlas type is available. Currently PathAtlasFlags::kRaster is 56 // always supported. isAvailable(PathAtlasFlags atlasType)57 bool isAvailable(PathAtlasFlags atlasType) const { 58 return SkToBool(fPathAtlasFlags & atlasType); 59 } 60 61 // Creates a new transient atlas handler that uses compute shaders to rasterize coverage masks 62 // for path rendering. This method returns nullptr if compute shaders are not supported by the 63 // owning Recorder's context. 64 std::unique_ptr<ComputePathAtlas> createComputePathAtlas(Recorder* recorder) const; 65 66 // Gets the atlas handler that uses the CPU raster pipeline to create coverage masks 67 // for path rendering. 68 RasterPathAtlas* getRasterPathAtlas() const; 69 70 // Return a TextureProxy with the given dimensions and color type. 71 sk_sp<TextureProxy> getAtlasTexture( 72 Recorder*, uint16_t width, uint16_t height, SkColorType, uint16_t identifier, 73 bool requireStorageUsage); 74 75 // This frees textures held in the atlas pool, and compacts the pages within the other 76 // atlas managers. It does not free resources that are in use or clear cached masks. 77 void freeGpuResources(); 78 79 // Push any pending uploads to atlases onto the draw context 80 void recordUploads(DrawContext*); 81 82 // Handle any post-flush work (garbage collection) 83 void compact(bool forceCompact); 84 85 // Invalidate any cached state about what may or may not already be uploaded in the atlas. 86 void invalidateAtlases(); 87 88 private: 89 std::unique_ptr<TextAtlasManager> fTextAtlasManager; 90 91 // Accumulates atlas coverage masks generated by software rendering that are required by one or 92 // more entries in `fPendingDraws`. During the snapUploadTask step, prior to pending draws 93 // being snapped into a new DrawPass, any necessary uploads into an atlas texture are recorded 94 // for the accumulated masks. The accumulated masks are then cleared which frees up the atlas 95 // for future draws. 96 // 97 // TODO: We should not clear all accumulated masks but cache masks over more than one frame. 98 // 99 // TODO: We may need a method to generate raster-generated masks in separate threads prior to 100 // upload. 101 std::unique_ptr<RasterPathAtlas> fRasterPathAtlas; 102 103 // Allocated and cached texture proxies shared by all PathAtlas instances. It is possible for 104 // the same texture to be bound to multiple DispatchGroups and DrawPasses across flushes. The 105 // owning Recorder must guarantee that any uploads or compute dispatches are scheduled to remain 106 // coherent across flushes. 107 // TODO: This requirement might change with a more sophisticated reuse scheme for texture 108 // allocations. For now our model is simple: all PathAtlases target the same texture and only 109 // one of them will render to the texture during a given command submission. 110 std::unordered_map<uint64_t, sk_sp<TextureProxy>> fTexturePool; 111 112 PathAtlasFlagsBitMask fPathAtlasFlags = PathAtlasFlags::kNone; 113 }; 114 115 SK_MAKE_BITMASK_OPS(AtlasProvider::PathAtlasFlags) 116 117 } // namespace skgpu::graphite 118 119 #endif // skgpu_graphite_AtlasProvider_DEFINED 120