Flutter Engine
The Flutter Engine
All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Modules Pages
Recorder.cpp
Go to the documentation of this file.
1/*
2 * Copyright 2021 Google LLC
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
9
19
23#include "src/gpu/AtlasTypes.h"
24#include "src/gpu/DataUtils.h"
54
55namespace skgpu::graphite {
56
57#define ASSERT_SINGLE_OWNER SKGPU_ASSERT_SINGLE_OWNER(this->singleOwner())
58#define ASSERT_SINGLE_OWNER_PRIV SKGPU_ASSERT_SINGLE_OWNER(fRecorder->singleOwner())
59
60/*
61 * The default image provider doesn't perform any conversion so, by default, Graphite won't
62 * draw any non-Graphite-backed images.
63 */
64class DefaultImageProvider final : public ImageProvider {
65public:
68 }
69
71 const SkImage* image,
73 SkASSERT(!as_IB(image)->isGraphiteBacked());
74
75 return nullptr;
76 }
77
78private:
80};
81
82/**************************************************************************************************/
84RecorderOptions::RecorderOptions(const RecorderOptions&) = default;
86
87/**************************************************************************************************/
88static uint32_t next_id() {
89 static std::atomic<uint32_t> nextID{1};
90 uint32_t id;
91 do {
92 id = nextID.fetch_add(1, std::memory_order_relaxed);
93 } while (id == SK_InvalidGenID);
94 return id;
95}
96
98 const RecorderOptions& options,
99 const Context* context)
100 : fSharedContext(std::move(sharedContext))
101 , fRuntimeEffectDict(std::make_unique<RuntimeEffectDictionary>())
102 , fRootTaskList(new TaskList)
103 , fUniformDataCache(new UniformDataCache)
104 , fTextureDataCache(new TextureDataCache)
105 , fProxyReadCounts(new ProxyReadCountMap)
106 , fUniqueID(next_id())
107 , fAtlasProvider(std::make_unique<AtlasProvider>(this))
108 , fTokenTracker(std::make_unique<TokenTracker>())
109 , fStrikeCache(std::make_unique<sktext::gpu::StrikeCache>())
110 , fTextBlobCache(std::make_unique<sktext::gpu::TextBlobRedrawCoordinator>(fUniqueID)) {
111 fClientImageProvider = options.fImageProvider;
112 if (!fClientImageProvider) {
113 fClientImageProvider = DefaultImageProvider::Make();
114 }
115
116 if (context) {
117 fOwnedResourceProvider = nullptr;
118 fResourceProvider = context->priv().resourceProvider();
119 } else {
120 fOwnedResourceProvider = fSharedContext->makeResourceProvider(this->singleOwner(),
121 fUniqueID,
122 options.fGpuBudgetInBytes);
123 fResourceProvider = fOwnedResourceProvider.get();
124 }
125 fUploadBufferManager = std::make_unique<UploadBufferManager>(fResourceProvider,
126 fSharedContext->caps());
127 fDrawBufferManager = std::make_unique<DrawBufferManager>(fResourceProvider,
128 fSharedContext->caps(),
129 fUploadBufferManager.get());
130
131 SkASSERT(fResourceProvider);
132}
133
136 // Any finished procs that haven't been passed to a Recording fail
137 for (int i = 0; i < fFinishedProcs.size(); ++i) {
138 fFinishedProcs[i]->setFailureResult();
139 }
140
141 for (auto& device : fTrackedDevices) {
142 // deregisterDevice() may have left an entry as null previously.
143 if (device) {
144 device->abandonRecorder();
145 }
146 }
147#if defined(GRAPHITE_TEST_UTILS)
148 if (fContext) {
149 fContext->priv().deregisterRecorder(this);
150 }
151#endif
152
153 // TODO: needed?
154 fStrikeCache->freeAll();
155}
156
157BackendApi Recorder::backend() const { return fSharedContext->backend(); }
158
159std::unique_ptr<Recording> Recorder::snap() {
160 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
162 this->priv().flushTrackedDevices();
163
164 std::unordered_set<sk_sp<TextureProxy>, Recording::ProxyHash> nonVolatileLazyProxies;
165 std::unordered_set<sk_sp<TextureProxy>, Recording::ProxyHash> volatileLazyProxies;
166 fTextureDataCache->foreach([&](const TextureDataBlock* block) {
167 for (int j = 0; j < block->numTextures(); ++j) {
168 const TextureDataBlock::SampledTexture& tex = block->texture(j);
169
170 if (tex.first->isLazy()) {
171 if (tex.first->isVolatile()) {
172 volatileLazyProxies.insert(tex.first);
173 } else {
174 nonVolatileLazyProxies.insert(tex.first);
175 }
176 }
177 }
178 });
179
180 std::unique_ptr<Recording::LazyProxyData> targetProxyData;
181 if (fTargetProxyData) {
182 targetProxyData = std::move(fTargetProxyData);
183 fTargetProxyDevice.reset();
184 fTargetProxyCanvas.reset();
185 }
186
187 // The scratch resources only need to be tracked until prepareResources() is finished, so
188 // Recorder doesn't hold a persistent manager and it can be deleted when snap() returns.
189 ScratchResourceManager scratchManager{fResourceProvider, std::move(fProxyReadCounts)};
190 fProxyReadCounts = std::make_unique<ProxyReadCountMap>();
191
192 // In both the "task failed" case and the "everything is discarded" case, there's no work that
193 // needs to be done in insertRecording(). However, we use nullptr as a failure signal, so
194 // kDiscard will return a non-null Recording that has no tasks in it.
195 if (fDrawBufferManager->hasMappingFailed() ||
196 fRootTaskList->prepareResources(fResourceProvider,
197 &scratchManager,
198 fRuntimeEffectDict.get()) == Task::Status::kFail) {
199 // Leaving 'fTrackedDevices' alone since they were flushed earlier and could still be
200 // attached to extant SkSurfaces.
201 fDrawBufferManager = std::make_unique<DrawBufferManager>(fResourceProvider,
202 fSharedContext->caps(),
203 fUploadBufferManager.get());
204 fTextureDataCache = std::make_unique<TextureDataCache>();
205 fUniformDataCache = std::make_unique<UniformDataCache>();
206 fRootTaskList->reset();
207 fRuntimeEffectDict->reset();
208 return nullptr;
209 }
210
211 std::unique_ptr<Recording> recording(new Recording(fNextRecordingID++,
212 fUniqueID,
213 std::move(nonVolatileLazyProxies),
214 std::move(volatileLazyProxies),
215 std::move(targetProxyData),
216 std::move(fFinishedProcs)));
217
218 // Allow the buffer managers to add any collected tasks for data transfer or initialization
219 // before moving the root task list to the Recording.
220 fDrawBufferManager->transferToRecording(recording.get());
221 fUploadBufferManager->transferToRecording(recording.get());
222 recording->priv().addTasks(std::move(*fRootTaskList));
223
224 SkASSERT(!fRootTaskList->hasTasks());
225 fRuntimeEffectDict->reset();
226 fTextureDataCache = std::make_unique<TextureDataCache>();
227 fUniformDataCache = std::make_unique<UniformDataCache>();
228 if (!this->priv().caps()->requireOrderedRecordings()) {
229 fAtlasProvider->textAtlasManager()->evictAtlases();
230 }
231
232 return recording;
233}
234
236 const TextureInfo& textureInfo) {
237 // Mipmaps can't reasonably be kept valid on a deferred surface with no actual texture.
238 if (textureInfo.mipmapped() == Mipmapped::kYes) {
239 SKGPU_LOG_W("Requested a deferred canvas with mipmapping; this is not supported");
240 return nullptr;
241 }
242
243 if (fTargetProxyCanvas) {
244 // Require snapping before requesting another canvas.
245 SKGPU_LOG_W("Requested a new deferred canvas before snapping the previous one");
246 return nullptr;
247 }
248
249 fTargetProxyData = std::make_unique<Recording::LazyProxyData>(textureInfo);
250 // Use kLoad for the initial load op since the purpose of a deferred canvas is to draw on top
251 // of an existing, late-bound texture.
252 fTargetProxyDevice = Device::Make(this,
253 fTargetProxyData->refLazyProxy(),
254 imageInfo.dimensions(),
255 imageInfo.colorInfo(),
256 {},
258 fTargetProxyCanvas = std::make_unique<SkCanvas>(fTargetProxyDevice);
259 return fTargetProxyCanvas.get();
260}
261
262void Recorder::registerDevice(sk_sp<Device> device) {
264
266
267 // By taking a ref on tracked devices, the Recorder prevents the Device from being deleted on
268 // another thread unless the Recorder has been destroyed or the device has abandoned its
269 // recorder (e.g. was marked immutable).
270 fTrackedDevices.emplace_back(std::move(device));
271}
272
273void Recorder::deregisterDevice(const Device* device) {
275 for (int i = 0; i < fTrackedDevices.size(); ++i) {
276 if (fTrackedDevices[i].get() == device) {
277 // Don't modify the list structure of fTrackedDevices within this loop
278 fTrackedDevices[i] = nullptr;
279 break;
280 }
281 }
282}
283
286
287 if (!info.isValid() || info.backend() != this->backend()) {
288 return {};
289 }
290 return fResourceProvider->createBackendTexture(dimensions, info);
291}
292
293#ifdef SK_BUILD_FOR_ANDROID
294
296 bool isRenderable,
297 bool isProtectedContent,
298 SkISize dimensions,
299 bool fromAndroidWindow) const {
300 if (fSharedContext->backend() != BackendApi::kVulkan) {
301 SKGPU_LOG_W("Creating an AHardwareBuffer-backed BackendTexture is only supported with the"
302 "Vulkan backend.");
303 return {};
304 }
305 return fResourceProvider->createBackendTexture(hardwareBuffer,
306 isRenderable,
307 isProtectedContent,
308 dimensions,
309 fromAndroidWindow);
310}
311
312#endif // SK_BUILD_FOR_ANDROID
313
315 const SkPixmap srcData[],
316 int numLevels) {
318
319 if (!backendTex.isValid() || backendTex.backend() != this->backend()) {
320 return false;
321 }
322
323 if (!srcData || numLevels <= 0) {
324 return false;
325 }
326
327 // If the texture has MIP levels then we require that the full set is overwritten.
328 int numExpectedLevels = 1;
329 if (backendTex.info().mipmapped() == Mipmapped::kYes) {
330 numExpectedLevels = SkMipmap::ComputeLevelCount(backendTex.dimensions().width(),
331 backendTex.dimensions().height()) + 1;
332 }
333 if (numLevels != numExpectedLevels) {
334 return false;
335 }
336
337 SkColorType ct = srcData[0].colorType();
338
339 if (!this->priv().caps()->areColorTypeAndTextureInfoCompatible(ct, backendTex.info())) {
340 return false;
341 }
342
344 if (!texture) {
345 return false;
346 }
347
349
350 std::vector<MipLevel> mipLevels;
351 mipLevels.resize(numLevels);
352
353 for (int i = 0; i < numLevels; ++i) {
354 SkASSERT(srcData[i].addr());
355 SkASSERT(srcData[i].info().colorInfo() == srcData[0].info().colorInfo());
356
357 mipLevels[i].fPixels = srcData[i].addr();
358 mipLevels[i].fRowBytes = srcData[i].rowBytes();
359 }
360
361 // Src and dst colorInfo are the same
362 const SkColorInfo& colorInfo = srcData[0].info().colorInfo();
363 // Add UploadTask to Recorder
365 std::move(proxy),
366 colorInfo, colorInfo,
367 mipLevels,
368 SkIRect::MakeSize(backendTex.dimensions()),
369 std::make_unique<ImageUploadContext>());
370 if (!upload.isValid()) {
371 SKGPU_LOG_E("Recorder::updateBackendTexture: Could not create UploadInstance");
372 return false;
373 }
374 sk_sp<Task> uploadTask = UploadTask::Make(std::move(upload));
375
376 // Need to flush any pending work in case it depends on this texture
377 this->priv().flushTrackedDevices();
378
379 this->priv().add(std::move(uploadTask));
380
381 return true;
382}
383
385 const void* data,
386 size_t dataSize) {
388
389 if (!backendTex.isValid() || backendTex.backend() != this->backend()) {
390 return false;
391 }
392
393 if (!data) {
394 return false;
395 }
396
398 if (!texture) {
399 return false;
400 }
401
403
404 // Add UploadTask to Recorder
406 std::move(proxy),
407 data,
408 dataSize);
409 if (!upload.isValid()) {
410 SKGPU_LOG_E("Recorder::updateBackendTexture: Could not create UploadInstance");
411 return false;
412 }
413 sk_sp<Task> uploadTask = UploadTask::Make(std::move(upload));
414
415 // Need to flush any pending work in case it depends on this texture
416 this->priv().flushTrackedDevices();
417
418 this->priv().add(std::move(uploadTask));
419
420 return true;
421}
422
425
426 if (!texture.isValid() || texture.backend() != this->backend()) {
427 return;
428 }
429 fResourceProvider->deleteBackendTexture(texture);
430}
431
433 if (info.fFinishedProc) {
435 RefCntedCallback::Make(info.fFinishedProc, info.fFinishedContext);
436 fFinishedProcs.push_back(std::move(callback));
437 }
438}
439
442
443 // We don't want to free the Uniform/TextureDataCaches or the Draw/UploadBufferManagers since
444 // all their resources need to be held on to until a Recording is snapped. And once snapped, all
445 // their held resources are released. The StrikeCache and TextBlobCache don't hold onto any Gpu
446 // resources.
447
448 // The AtlasProvider gives out refs to TextureProxies so it should be safe to clear its pool
449 // in the middle of Recording since those using the previous TextureProxies will have refs on
450 // them.
451 fAtlasProvider->clearTexturePool();
452
453 fResourceProvider->freeGpuResources();
454}
455
456void Recorder::performDeferredCleanup(std::chrono::milliseconds msNotUsed) {
458
459 auto purgeTime = skgpu::StdSteadyClock::now() - msNotUsed;
460 fResourceProvider->purgeResourcesNotUsedSince(purgeTime);
461}
462
465 return fResourceProvider->getResourceCacheCurrentBudgetedBytes();
466}
467
470 return fResourceProvider->getResourceCacheLimit();
471}
472
475 fResourceProvider->dumpMemoryStatistics(traceMemoryDump);
476 // TODO: What is the graphite equivalent for the text blob cache and how do we print out its
477 // used bytes here (see Ganesh implementation).
478}
479
482 fRecorder->fProxyReadCounts->increment(proxy);
483}
484
487 fRecorder->fRootTaskList->add(std::move(task));
488}
489
492
493 // If this is the initial flushTrackedDevices() call, fFlushingTrackedDevicesIndex will be -1
494 // so we start iterating at 0. We remember the starting device index to perform clean up only
495 // when it was 0 to prevent modifying the underlying data structure while iterating over it.
496 // However, when flushing one device it may register new devices as well as recursively call
497 // flushTrackedDevices(). In that case, it picks up the next device after the current one that
498 // triggered the recursive flush since all prior devices have been flushed are in progress
499 // (and they should not be flushed while in an unfinished flush). When the control flow returns
500 // to the outer flushTrackedDevices(), it will pick up with wherever the inner flush had ended.
501 // TODO(b/330864257): Once paint data is extracted at draw time (so picture shaders are rendered
502 // to images before a flush instead of inside a flush), we can simplify this and assert that
503 // flushTrackedDevices() is not recursively called and that devices are not added or removed
504 // while flushing.
505 const int startingIndex = fRecorder->fFlushingDevicesIndex;
506 while (fRecorder->fFlushingDevicesIndex < fRecorder->fTrackedDevices.size() - 1) {
507 // Advance before calling flushPendingWorkToRecorder() so that any re-entrant clal to
508 // flushTrackedDevices() will skip the current device.
509 fRecorder->fFlushingDevicesIndex++;
510 // Entries may be set to null from a call to deregisterDevice(), which will be cleaned up
511 // along with any immutable or uniquely held Devices once everything is flushed.
512 Device* device = fRecorder->fTrackedDevices[fRecorder->fFlushingDevicesIndex].get();
513 if (device) {
514 device->flushPendingWorkToRecorder();
515 }
516 }
517
518 // Issue next upload flush token. This is only used by the atlasing code which
519 // always uses this method. Calling in Device::flushPendingWorkToRecorder may
520 // miss parent device flushes, increment too often, and lead to atlas corruption.
521 this->tokenTracker()->issueFlushToken();
522
523 if (startingIndex < 0) {
524 // Initial call to flushTrackedDevices() so cleanup null/immutable devices and reset the
525 // loop index.
526 int i = 0;
527 while (i < fRecorder->fTrackedDevices.size()) {
528 Device* device = fRecorder->fTrackedDevices[i].get();
529 if (!device || !device->recorder() || device->unique()) {
530 if (device) {
531 device->abandonRecorder(); // Keep ~Device() happy
532 }
533 fRecorder->fTrackedDevices.removeShuffle(i);
534 // Keep i as-is to process what was just shuffled to the ith index.
535 } else {
536 i++;
537 }
538 }
539
540 fRecorder->fFlushingDevicesIndex = -1;
541 }
542}
543
545 const SkBitmap& bitmap,
546 std::string_view label) {
547 SkASSERT(!bitmap.isNull());
548
549 if (!recorder) {
550 return nullptr;
551 }
552 return recorder->priv().proxyCache()->findOrCreateCachedProxy(recorder,
553 bitmap,
554 std::move(label));
555}
556
558 return fRecorder->fResourceProvider->getResourceCacheLimit();
559}
560
561#if defined(GRAPHITE_TEST_UTILS)
562bool RecorderPriv::deviceIsRegistered(Device* device) const {
564 for (const sk_sp<Device>& currentDevice : fRecorder->fTrackedDevices) {
565 if (device == currentDevice.get()) {
566 return true;
567 }
568 }
569 return false;
570}
571
572// used by the Context that created this Recorder to set a back pointer
573void RecorderPriv::setContext(Context* context) {
574 fRecorder->fContext = context;
575}
576#endif
577
578
579} // namespace skgpu::graphite
const char * options
static void info(const char *fmt,...) SK_PRINTF_LIKE(1
Definition: DM.cpp:213
struct AHardwareBuffer AHardwareBuffer
#define SKGPU_LOG_E(fmt,...)
Definition: Log.h:38
#define SKGPU_LOG_W(fmt,...)
Definition: Log.h:40
#define ASSERT_SINGLE_OWNER
Definition: Recorder.cpp:57
#define ASSERT_SINGLE_OWNER_PRIV
Definition: Recorder.cpp:58
#define SkASSERT(cond)
Definition: SkAssert.h:116
SkColorType
Definition: SkColorType.h:19
static SkImage_Base * as_IB(SkImage *image)
Definition: SkImage_Base.h:201
sk_sp< T > sk_ref_sp(T *obj)
Definition: SkRefCnt.h:381
const Context & fContext
#define TRACE_FUNC
Definition: SkTraceEvent.h:30
static constexpr uint32_t SK_InvalidGenID
Definition: SkTypes.h:192
static int ComputeLevelCount(int baseWidth, int baseHeight)
Definition: SkMipmap.cpp:134
size_t rowBytes() const
Definition: SkPixmap.h:145
SkColorType colorType() const
Definition: SkPixmap.h:173
const SkImageInfo & info() const
Definition: SkPixmap.h:135
const void * addr() const
Definition: SkPixmap.h:153
T * get() const
Definition: SkRefCnt.h:303
static sk_sp< RefCntedCallback > Make(Callback proc, Context ctx)
const TextureInfo & info() const
sk_sp< SkImage > findOrCreate(Recorder *recorder, const SkImage *image, SkImage::RequiredProperties) override
Definition: Recorder.cpp:70
static sk_sp< DefaultImageProvider > Make()
Definition: Recorder.cpp:66
static sk_sp< Device > Make(Recorder *recorder, sk_sp< TextureProxy >, SkISize deviceSize, const SkColorInfo &, const SkSurfaceProps &, LoadOp initialLoadOp, bool registerWithRecorder=true)
Definition: Device.cpp:276
sk_sp< TextureProxy > findOrCreateCachedProxy(Recorder *, const SkBitmap &, std::string_view label)
Definition: ProxyCache.cpp:74
void addPendingRead(const TextureProxy *)
Definition: Recorder.cpp:480
TokenTracker * tokenTracker()
Definition: RecorderPriv.h:62
size_t getResourceCacheLimit() const
Definition: Recorder.cpp:557
ResourceProvider * resourceProvider()
Definition: RecorderPriv.h:33
static sk_sp< TextureProxy > CreateCachedProxy(Recorder *, const SkBitmap &, std::string_view label)
Definition: Recorder.cpp:544
void add(sk_sp< Task >)
Definition: Recorder.cpp:485
SkCanvas * makeDeferredCanvas(const SkImageInfo &, const TextureInfo &)
Definition: Recorder.cpp:235
size_t currentBudgetedBytes() const
Definition: Recorder.cpp:463
BackendApi backend() const
Definition: Recorder.cpp:157
void dumpMemoryStatistics(SkTraceMemoryDump *traceMemoryDump) const
Definition: Recorder.cpp:473
void performDeferredCleanup(std::chrono::milliseconds msNotUsed)
Definition: Recorder.cpp:456
void deleteBackendTexture(const BackendTexture &)
Definition: Recorder.cpp:423
bool updateCompressedBackendTexture(const BackendTexture &, const void *data, size_t dataSize)
Definition: Recorder.cpp:384
size_t maxBudgetedBytes() const
Definition: Recorder.cpp:468
BackendTexture createBackendTexture(SkISize dimensions, const TextureInfo &)
Definition: Recorder.cpp:284
void addFinishInfo(const InsertFinishInfo &)
Definition: Recorder.cpp:432
std::unique_ptr< Recording > snap()
Definition: Recorder.cpp:159
bool updateBackendTexture(const BackendTexture &, const SkPixmap srcData[], int numLevels)
Definition: Recorder.cpp:314
Recorder(const Recorder &)=delete
void purgeResourcesNotUsedSince(StdSteadyClock::time_point purgeTime)
sk_sp< Texture > createWrappedTexture(const BackendTexture &, std::string_view label)
BackendTexture createBackendTexture(SkISize dimensions, const TextureInfo &)
void deleteBackendTexture(const BackendTexture &)
size_t getResourceCacheCurrentBudgetedBytes() const
void dumpMemoryStatistics(SkTraceMemoryDump *traceMemoryDump) const
std::pair< sk_sp< TextureProxy >, SamplerDesc > SampledTexture
Definition: PipelineData.h:57
const SampledTexture & texture(int index) const
Definition: PipelineData.h:64
Mipmapped mipmapped() const
Definition: TextureInfo.h:79
static sk_sp< TextureProxy > Wrap(sk_sp< Texture >)
static UploadInstance MakeCompressed(Recorder *, sk_sp< TextureProxy > targetProxy, const void *data, size_t dataSize)
Definition: UploadTask.cpp:235
static UploadInstance Make(Recorder *, sk_sp< TextureProxy > targetProxy, const SkColorInfo &srcColorInfo, const SkColorInfo &dstColorInfo, SkSpan< const MipLevel > levels, const SkIRect &dstRect, std::unique_ptr< ConditionalUploadContext >)
Definition: UploadTask.cpp:91
static sk_sp< UploadTask > Make(UploadList *)
Definition: UploadTask.cpp:425
VkDevice device
Definition: main.cc:53
FlKeyEvent uint64_t FlKeyResponderAsyncCallback callback
FlPixelBufferTexturePrivate * priv
FlTexture * texture
sk_sp< const SkImage > image
Definition: SkRecords.h:269
Definition: bitmap.py:1
const myers::Point & get(const myers::Segment &)
PipelineDataCache< UniformDataBlock > UniformDataCache
Definition: Recorder.h:60
static uint32_t next_id()
Definition: DrawAtlas.cpp:70
PipelineDataCache< TextureDataBlock > TextureDataCache
Definition: Recorder.h:61
BackendApi
Definition: GpuTypes.h:22
Definition: ref_ptr.h:256
Definition: upload.py:1
static constexpr SkIRect MakeSize(const SkISize &size)
Definition: SkRect.h:66
Definition: SkSize.h:16
constexpr int32_t width() const
Definition: SkSize.h:36
constexpr int32_t height() const
Definition: SkSize.h:37
const SkColorInfo & colorInfo() const
Definition: SkImageInfo.h:404
SkISize dimensions() const
Definition: SkImageInfo.h:421
std::shared_ptr< const fml::Mapping > data
Definition: texture_gles.cc:63
const uintptr_t id
#define TRACE_EVENT0(category_group, name)
Definition: trace_event.h:131