Flutter Engine
The Flutter Engine
Loading...
Searching...
No Matches
GrRecordingContext.cpp
Go to the documentation of this file.
1/*
2 * Copyright 2019 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
9
16#include "include/gpu/GrTypes.h"
30
31#include <utility>
32
33using namespace skia_private;
34
36
37GrRecordingContext::ProgramData::ProgramData(std::unique_ptr<const GrProgramDesc> desc,
38 const GrProgramInfo* info)
39 : fDesc(std::move(desc))
40 , fInfo(info) {
41}
42
44 : fDesc(std::move(other.fDesc))
45 , fInfo(other.fInfo) {
46}
47
49
51 : GrImageContext(std::move(proxy))
53 , fArenas(ddlRecording) {
54 fProxyProvider = std::make_unique<GrProxyProvider>(this);
55}
56
60
62 if (!GrImageContext::init()) {
63 return false;
64 }
65
68#if defined(GR_TEST_UTILS)
69 prcOptions.fGpuPathRenderers = this->options().fGpuPathRenderers;
70#endif
71 // FIXME: Once this is removed from Chrome and Android, rename to fEnable"".
72 if (this->options().fDisableDistanceFieldPaths) {
74 }
75
76 bool reduceOpsTaskSplitting = true;
77 if (this->caps()->avoidReorderingRenderTasks()) {
78 reduceOpsTaskSplitting = false;
79 } else if (GrContextOptions::Enable::kYes == this->options().fReduceOpsTaskSplitting) {
80 reduceOpsTaskSplitting = true;
81 } else if (GrContextOptions::Enable::kNo == this->options().fReduceOpsTaskSplitting) {
82 reduceOpsTaskSplitting = false;
83 }
84 fDrawingManager.reset(new GrDrawingManager(this,
85 prcOptions,
86 reduceOpsTaskSplitting));
87 return true;
88}
95
97 return fDrawingManager.get();
98}
99
101 fDrawingManager.reset();
102}
103
105 sktext::gpu::SubRunAllocator* subRunAllocator)
106 : fRecordTimeAllocator(recordTimeAllocator)
107 , fRecordTimeSubRunAllocator(subRunAllocator) {
108 // OwnedArenas should instantiate these before passing the bare pointer off to this struct.
109 SkASSERT(subRunAllocator);
110}
111
112// Must be defined here so that std::unique_ptr can see the sizes of the various pools, otherwise
113// it can't generate a default destructor for them.
114GrRecordingContext::OwnedArenas::OwnedArenas(bool ddlRecording) : fDDLRecording(ddlRecording) {}
116
118 fDDLRecording = a.fDDLRecording;
119 fRecordTimeAllocator = std::move(a.fRecordTimeAllocator);
120 fRecordTimeSubRunAllocator = std::move(a.fRecordTimeSubRunAllocator);
121 return *this;
122}
123
125 if (!fRecordTimeAllocator && fDDLRecording) {
126 // TODO: empirically determine a better number for SkArenaAlloc's firstHeapAllocation param
127 fRecordTimeAllocator = std::make_unique<SkArenaAlloc>(1024);
128 }
129
130 if (!fRecordTimeSubRunAllocator) {
131 fRecordTimeSubRunAllocator = std::make_unique<sktext::gpu::SubRunAllocator>();
132 }
133
134 return {fRecordTimeAllocator.get(), fRecordTimeSubRunAllocator.get()};
135}
136
140
144
148
152
156
160
161////////////////////////////////////////////////////////////////////////////////
162
166
168
170
177
181
182///////////////////////////////////////////////////////////////////////////////////////////////////
183
184#ifdef SK_ENABLE_DUMP_GPU
186
187void GrRecordingContext::dumpJSON(SkJSONWriter* writer) const {
188 writer->beginObject();
189
190#if GR_GPU_STATS
191 writer->appendS32("path_masks_generated", this->stats()->numPathMasksGenerated());
192 writer->appendS32("path_mask_cache_hits", this->stats()->numPathMaskCacheHits());
193#endif
194
195 writer->endObject();
196}
197#else
199#endif
200
201#if defined(GR_TEST_UTILS)
202
203#if GR_GPU_STATS
204
205void GrRecordingContext::Stats::dump(SkString* out) const {
206 out->appendf("Num Path Masks Generated: %d\n", fNumPathMasksGenerated);
207 out->appendf("Num Path Mask Cache Hits: %d\n", fNumPathMaskCacheHits);
208}
209
210void GrRecordingContext::Stats::dumpKeyValuePairs(TArray<SkString>* keys,
211 TArray<double>* values) const {
212 keys->push_back(SkString("path_masks_generated"));
213 values->push_back(fNumPathMasksGenerated);
214
215 keys->push_back(SkString("path_mask_cache_hits"));
216 values->push_back(fNumPathMaskCacheHits);
217}
218
219void GrRecordingContext::DMSAAStats::dumpKeyValuePairs(TArray<SkString>* keys,
220 TArray<double>* values) const {
221 keys->push_back(SkString("dmsaa_render_passes"));
222 values->push_back(fNumRenderPasses);
223
224 keys->push_back(SkString("dmsaa_multisample_render_passes"));
225 values->push_back(fNumMultisampleRenderPasses);
226
227 for (const auto& [name, count] : fTriggerCounts) {
228 keys->push_back(SkStringPrintf("dmsaa_trigger_%s", name.c_str()));
229 values->push_back(count);
230 }
231}
232
233void GrRecordingContext::DMSAAStats::dump() const {
234 SkDebugf("DMSAA Render Passes: %d\n", fNumRenderPasses);
235 SkDebugf("DMSAA Multisample Render Passes: %d\n", fNumMultisampleRenderPasses);
236 if (!fTriggerCounts.empty()) {
237 SkDebugf("DMSAA Triggers:\n");
238 for (const auto& [name, count] : fTriggerCounts) {
239 SkDebugf(" %s: %d\n", name.c_str(), count);
240 }
241 }
242}
243
244void GrRecordingContext::DMSAAStats::merge(const DMSAAStats& stats) {
245 fNumRenderPasses += stats.fNumRenderPasses;
246 fNumMultisampleRenderPasses += stats.fNumMultisampleRenderPasses;
247 for (const auto& [name, count] : stats.fTriggerCounts) {
248 fTriggerCounts[name] += count;
249 }
250}
251
252#endif // GR_GPU_STATS
253#endif // defined(GR_TEST_UTILS)
static void info(const char *fmt,...) SK_PRINTF_LIKE(1
Definition DM.cpp:213
int count
GpuPathRenderers
static constexpr GrColorType SkColorTypeToGrColorType(SkColorType ct)
#define SkASSERT(cond)
Definition SkAssert.h:116
SkColorType
Definition SkColorType.h:19
void SK_SPI SkDebugf(const char format[],...) SK_PRINTF_LIKE(1
static SkColorType colorType(AImageDecoder *decoder, const AImageDecoderHeaderInfo *headerInfo)
SK_API SkString static SkString SkStringPrintf()
Definition SkString.h:287
bool isValid() const
bool supportsProtectedContent() const
Definition GrCaps.h:422
int maxTextureSize() const
Definition GrCaps.h:229
int maxRenderTargetSize() const
Definition GrCaps.h:223
GrBackendFormat getDefaultBackendFormat(GrColorType, GrRenderable) const
Definition GrCaps.cpp:400
sktext::gpu::TextBlobRedrawCoordinator * getTextBlobRedrawCoordinator()
GrContextThreadSafeProxyPriv priv()
const GrCaps * caps() const
sk_sp< const GrCaps > refCaps() const
virtual bool init()
sk_sp< GrContextThreadSafeProxy > fThreadSafeProxy
const GrContextOptions & options() const
void addOnFlushCallbackObject(GrOnFlushCallbackObject *)
virtual SK_API void abandonContext()
Arenas(SkArenaAlloc *, sktext::gpu::SubRunAllocator *)
OwnedArenas & operator=(OwnedArenas &&)
GrThreadSafeCache * threadSafeCache()
void dumpJSON(SkJSONWriter *) const
sktext::gpu::TextBlobRedrawCoordinator * getTextBlobRedrawCoordinator()
SK_API bool colorTypeSupportedAsImage(SkColorType) const
OwnedArenas && detachArenas()
GrDrawingManager * drawingManager()
GrRecordingContext(sk_sp< GrContextThreadSafeProxy >, bool ddlRecording)
void addOnFlushCallbackObject(GrOnFlushCallbackObject *)
SK_API sk_sp< const SkCapabilities > skCapabilities() const
SK_API int maxTextureSize() const
SK_API bool supportsProtectedContent() const
SK_API int maxRenderTargetSize() const
std::unique_ptr< GrAuditTrail > fAuditTrail
void abandonContext() override
void appendS32(int32_t value)
void beginObject(const char *name=nullptr, bool multiline=true)
struct MyStruct a[10]
uint32_t uint32_t * format
const char * name
Definition fuchsia.cc:50
Definition ref_ptr.h:256
ProgramData(std::unique_ptr< const GrProgramDesc >, const GrProgramInfo *)