xref: /aosp_15_r20/external/skia/include/gpu/ganesh/GrRecordingContext.h (revision c8dee2aa9b3f27cf6c858bd81872bdeb2c07ed17)
1 /*
2  * Copyright 2019 Google Inc.
3  *
4  * Use of this source code is governed by a BSD-style license that can be
5  * found in the LICENSE file.
6  */
7 
8 #ifndef GrRecordingContext_DEFINED
9 #define GrRecordingContext_DEFINED
10 
11 #include "include/core/SkColorType.h"
12 #include "include/core/SkRefCnt.h"
13 #include "include/core/SkString.h" // IWYU pragma: keep
14 #include "include/core/SkTypes.h"
15 #include "include/private/base/SkTArray.h"
16 #include "include/private/gpu/ganesh/GrContext_Base.h"
17 #include "include/private/gpu/ganesh/GrImageContext.h"
18 
19 #include <map>
20 #include <memory>
21 #include <string>
22 
23 class GrAuditTrail;
24 class GrContextThreadSafeProxy;
25 class GrDirectContext;
26 class GrDrawingManager;
27 class GrOnFlushCallbackObject;
28 class GrProgramDesc;
29 class GrProgramInfo;
30 class GrProxyProvider;
31 class GrRecordingContextPriv;
32 class GrThreadSafeCache;
33 class SkArenaAlloc;
34 class SkCapabilities;
35 class SkJSONWriter;
36 
37 namespace sktext::gpu {
38 class SubRunAllocator;
39 class TextBlobRedrawCoordinator;
40 }
41 
42 class GrRecordingContext : public GrImageContext {
43 public:
44     ~GrRecordingContext() override;
45 
46     /**
47      * Reports whether the GrDirectContext associated with this GrRecordingContext is abandoned.
48      * When called on a GrDirectContext it may actively check whether the underlying 3D API
49      * device/context has been disconnected before reporting the status. If so, calling this
50      * method will transition the GrDirectContext to the abandoned state.
51      */
abandoned()52     bool abandoned() override { return GrImageContext::abandoned(); }
53 
54     /*
55      * Can a SkSurface be created with the given color type. To check whether MSAA is supported
56      * use maxSurfaceSampleCountForColorType().
57      */
colorTypeSupportedAsSurface(SkColorType colorType)58     SK_API bool colorTypeSupportedAsSurface(SkColorType colorType) const {
59         if (kR16G16_unorm_SkColorType == colorType ||
60             kA16_unorm_SkColorType == colorType ||
61             kA16_float_SkColorType == colorType ||
62             kR16G16_float_SkColorType == colorType ||
63             kR16G16B16A16_unorm_SkColorType == colorType ||
64             kGray_8_SkColorType == colorType) {
65             return false;
66         }
67 
68         return this->maxSurfaceSampleCountForColorType(colorType) > 0;
69     }
70 
71     /**
72      * Gets the maximum supported texture size.
73      */
74     SK_API int maxTextureSize() const;
75 
76     /**
77      * Gets the maximum supported render target size.
78      */
79     SK_API int maxRenderTargetSize() const;
80 
81     /**
82      * Can a SkImage be created with the given color type.
83      */
84     SK_API bool colorTypeSupportedAsImage(SkColorType) const;
85 
86     /**
87      * Does this context support protected content?
88      */
89     SK_API bool supportsProtectedContent() const;
90 
91     /**
92      * Gets the maximum supported sample count for a color type. 1 is returned if only non-MSAA
93      * rendering is supported for the color type. 0 is returned if rendering to this color type
94      * is not supported at all.
95      */
maxSurfaceSampleCountForColorType(SkColorType colorType)96     SK_API int maxSurfaceSampleCountForColorType(SkColorType colorType) const {
97         return GrImageContext::maxSurfaceSampleCountForColorType(colorType);
98     }
99 
100     SK_API sk_sp<const SkCapabilities> skCapabilities() const;
101 
102     // Provides access to functions that aren't part of the public API.
103     GrRecordingContextPriv priv();
104     const GrRecordingContextPriv priv() const;  // NOLINT(readability-const-return-type)
105 
106     // The collection of specialized memory arenas for different types of data recorded by a
107     // GrRecordingContext. Arenas does not maintain ownership of the pools it groups together.
108     class Arenas {
109     public:
110         Arenas(SkArenaAlloc*, sktext::gpu::SubRunAllocator*);
111 
112         // For storing pipelines and other complex data as-needed by ops
recordTimeAllocator()113         SkArenaAlloc* recordTimeAllocator() { return fRecordTimeAllocator; }
114 
115         // For storing GrTextBlob SubRuns
recordTimeSubRunAllocator()116         sktext::gpu::SubRunAllocator* recordTimeSubRunAllocator() {
117             return fRecordTimeSubRunAllocator;
118         }
119 
120     private:
121         SkArenaAlloc* fRecordTimeAllocator;
122         sktext::gpu::SubRunAllocator* fRecordTimeSubRunAllocator;
123     };
124 
125 protected:
126     friend class GrRecordingContextPriv;    // for hidden functions
127     friend class GrDeferredDisplayList;     // for OwnedArenas
128     friend class GrDeferredDisplayListPriv; // for ProgramData
129 
130     // Like Arenas, but preserves ownership of the underlying pools.
131     class OwnedArenas {
132     public:
133         OwnedArenas(bool ddlRecording);
134         ~OwnedArenas();
135 
136         Arenas get();
137 
138         OwnedArenas& operator=(OwnedArenas&&);
139 
140     private:
141         bool fDDLRecording;
142         std::unique_ptr<SkArenaAlloc> fRecordTimeAllocator;
143         std::unique_ptr<sktext::gpu::SubRunAllocator> fRecordTimeSubRunAllocator;
144     };
145 
146     GrRecordingContext(sk_sp<GrContextThreadSafeProxy>, bool ddlRecording);
147 
148     bool init() override;
149 
150     void abandonContext() override;
151 
152     GrDrawingManager* drawingManager();
153 
154     // There is no going back from this method. It should only be called to control the timing
155     // during abandon or destruction of the context.
156     void destroyDrawingManager();
157 
arenas()158     Arenas arenas() { return fArenas.get(); }
159     // This entry point should only be used for DDL creation where we want the ops' lifetime to
160     // match that of the DDL.
161     OwnedArenas&& detachArenas();
162 
proxyProvider()163     GrProxyProvider* proxyProvider() { return fProxyProvider.get(); }
proxyProvider()164     const GrProxyProvider* proxyProvider() const { return fProxyProvider.get(); }
165 
166     struct ProgramData {
167         ProgramData(std::unique_ptr<const GrProgramDesc>, const GrProgramInfo*);
168         ProgramData(ProgramData&&);                     // for SkTArray
169         ProgramData(const ProgramData&) = delete;
170         ~ProgramData();
171 
descProgramData172         const GrProgramDesc& desc() const { return *fDesc; }
infoProgramData173         const GrProgramInfo& info() const { return *fInfo; }
174 
175     private:
176         // TODO: store the GrProgramDescs in the 'fRecordTimeData' arena
177         std::unique_ptr<const GrProgramDesc> fDesc;
178         // The program infos should be stored in 'fRecordTimeData' so do not need to be ref
179         // counted or deleted in the destructor.
180         const GrProgramInfo* fInfo = nullptr;
181     };
182 
183     // This entry point gives the recording context a chance to cache the provided
184     // programInfo. The DDL context takes this opportunity to store programInfos as a sidecar
185     // to the DDL.
recordProgramInfo(const GrProgramInfo *)186     virtual void recordProgramInfo(const GrProgramInfo*) {}
187     // This asks the recording context to return any programInfos it may have collected
188     // via the 'recordProgramInfo' call. It is up to the caller to ensure that the lifetime
189     // of the programInfos matches the intended use. For example, in DDL-record mode it
190     // is known that all the programInfos will have been allocated in an arena with the
191     // same lifetime at the DDL itself.
detachProgramData(skia_private::TArray<ProgramData> *)192     virtual void detachProgramData(skia_private::TArray<ProgramData>*) {}
193 
194     sktext::gpu::TextBlobRedrawCoordinator* getTextBlobRedrawCoordinator();
195     const sktext::gpu::TextBlobRedrawCoordinator* getTextBlobRedrawCoordinator() const;
196 
197     GrThreadSafeCache* threadSafeCache();
198     const GrThreadSafeCache* threadSafeCache() const;
199 
200     /**
201      * Registers an object for flush-related callbacks. (See GrOnFlushCallbackObject.)
202      *
203      * NOTE: the drawing manager tracks this object as a raw pointer; it is up to the caller to
204      * ensure its lifetime is tied to that of the context.
205      */
206     void addOnFlushCallbackObject(GrOnFlushCallbackObject*);
207 
asRecordingContext()208     GrRecordingContext* asRecordingContext() override { return this; }
209 
210     class Stats {
211     public:
212         Stats() = default;
213 
214 #if GR_GPU_STATS
reset()215         void reset() { *this = {}; }
216 
numPathMasksGenerated()217         int numPathMasksGenerated() const { return fNumPathMasksGenerated; }
incNumPathMasksGenerated()218         void incNumPathMasksGenerated() { fNumPathMasksGenerated++; }
219 
numPathMaskCacheHits()220         int numPathMaskCacheHits() const { return fNumPathMaskCacheHits; }
incNumPathMasksCacheHits()221         void incNumPathMasksCacheHits() { fNumPathMaskCacheHits++; }
222 
223 #if defined(GPU_TEST_UTILS)
224         void dump(SkString* out) const;
225         void dumpKeyValuePairs(skia_private::TArray<SkString>* keys,
226                                skia_private::TArray<double>* values) const;
227 #endif
228 
229     private:
230         int fNumPathMasksGenerated{0};
231         int fNumPathMaskCacheHits{0};
232 
233 #else // GR_GPU_STATS
incNumPathMasksGenerated()234         void incNumPathMasksGenerated() {}
incNumPathMasksCacheHits()235         void incNumPathMasksCacheHits() {}
236 
237 #if defined(GPU_TEST_UTILS)
dump(SkString *)238         void dump(SkString*) const {}
dumpKeyValuePairs(skia_private::TArray<SkString> * keys,skia_private::TArray<double> * values)239         void dumpKeyValuePairs(skia_private::TArray<SkString>* keys,
240                                skia_private::TArray<double>* values) const {}
241 #endif
242 #endif // GR_GPU_STATS
243     } fStats;
244 
245 #if GR_GPU_STATS && defined(GPU_TEST_UTILS)
246     struct DMSAAStats {
247         void dumpKeyValuePairs(skia_private::TArray<SkString>* keys,
248                                skia_private::TArray<double>* values) const;
249         void dump() const;
250         void merge(const DMSAAStats&);
251         int fNumRenderPasses = 0;
252         int fNumMultisampleRenderPasses = 0;
253         std::map<std::string, int> fTriggerCounts;
254     };
255 
256     DMSAAStats fDMSAAStats;
257 #endif
258 
stats()259     Stats* stats() { return &fStats; }
stats()260     const Stats* stats() const { return &fStats; }
261     void dumpJSON(SkJSONWriter*) const;
262 
263 protected:
264     // Delete last in case other objects call it during destruction.
265     std::unique_ptr<GrAuditTrail>     fAuditTrail;
266 
267 private:
268     OwnedArenas                       fArenas;
269 
270     std::unique_ptr<GrDrawingManager> fDrawingManager;
271     std::unique_ptr<GrProxyProvider>  fProxyProvider;
272 
273 #if defined(GPU_TEST_UTILS)
274     int fSuppressWarningMessages = 0;
275 #endif
276 };
277 
278 /**
279  * Safely cast a possibly-null base context to direct context.
280  */
GrAsDirectContext(GrContext_Base * base)281 static inline GrDirectContext* GrAsDirectContext(GrContext_Base* base) {
282     return base ? base->asDirectContext() : nullptr;
283 }
284 
285 #endif
286