Flutter Engine
The Flutter Engine
Loading...
Searching...
No Matches
Recorder.cpp
Go to the documentation of this file.
1/*
2 * Copyright 2021 Google LLC
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
9
19
23#include "src/gpu/AtlasTypes.h"
24#include "src/gpu/DataUtils.h"
53
54namespace skgpu::graphite {
55
56#define ASSERT_SINGLE_OWNER SKGPU_ASSERT_SINGLE_OWNER(this->singleOwner())
57#define ASSERT_SINGLE_OWNER_PRIV SKGPU_ASSERT_SINGLE_OWNER(fRecorder->singleOwner())
58
59/*
60 * The default image provider doesn't perform any conversion so, by default, Graphite won't
61 * draw any non-Graphite-backed images.
62 */
63class DefaultImageProvider final : public ImageProvider {
64public:
68
70 const SkImage* image,
72 SkASSERT(!as_IB(image)->isGraphiteBacked());
73
74 return nullptr;
75 }
76
77private:
79};
80
81/**************************************************************************************************/
83RecorderOptions::RecorderOptions(const RecorderOptions&) = default;
85
86/**************************************************************************************************/
87static uint32_t next_id() {
88 static std::atomic<uint32_t> nextID{1};
89 uint32_t id;
90 do {
91 id = nextID.fetch_add(1, std::memory_order_relaxed);
92 } while (id == SK_InvalidGenID);
93 return id;
94}
95
96Recorder::Recorder(sk_sp<SharedContext> sharedContext, const RecorderOptions& options)
97 : fSharedContext(std::move(sharedContext))
98 , fRuntimeEffectDict(std::make_unique<RuntimeEffectDictionary>())
99 , fRootTaskList(new TaskList)
100 , fUniformDataCache(new UniformDataCache)
101 , fTextureDataCache(new TextureDataCache)
102 , fUniqueID(next_id())
103 , fAtlasProvider(std::make_unique<AtlasProvider>(this))
104 , fTokenTracker(std::make_unique<TokenTracker>())
105 , fStrikeCache(std::make_unique<sktext::gpu::StrikeCache>())
106 , fTextBlobCache(std::make_unique<sktext::gpu::TextBlobRedrawCoordinator>(fUniqueID)) {
107 fClientImageProvider = options.fImageProvider;
108 if (!fClientImageProvider) {
109 fClientImageProvider = DefaultImageProvider::Make();
110 }
111
112 fResourceProvider = fSharedContext->makeResourceProvider(this->singleOwner(),
113 fUniqueID,
114 options.fGpuBudgetInBytes);
115 fUploadBufferManager = std::make_unique<UploadBufferManager>(fResourceProvider.get(),
116 fSharedContext->caps());
117 fDrawBufferManager = std::make_unique<DrawBufferManager>(fResourceProvider.get(),
118 fSharedContext->caps(),
119 fUploadBufferManager.get());
120
121 SkASSERT(fResourceProvider);
122}
123
126 // Any finished procs that haven't been passed to a Recording fail
127 for (int i = 0; i < fFinishedProcs.size(); ++i) {
128 fFinishedProcs[i]->setFailureResult();
129 }
130
131 for (auto& device : fTrackedDevices) {
132 // deregisterDevice() may have left an entry as null previously.
133 if (device) {
134 device->abandonRecorder();
135 }
136 }
137#if defined(GRAPHITE_TEST_UTILS)
138 if (fContext) {
139 fContext->priv().deregisterRecorder(this);
140 }
141#endif
142
143 // TODO: needed?
144 fStrikeCache->freeAll();
145}
146
147BackendApi Recorder::backend() const { return fSharedContext->backend(); }
148
149std::unique_ptr<Recording> Recorder::snap() {
150 TRACE_EVENT0("skia.gpu", TRACE_FUNC);
152 this->priv().flushTrackedDevices();
153
154 std::unordered_set<sk_sp<TextureProxy>, Recording::ProxyHash> nonVolatileLazyProxies;
155 std::unordered_set<sk_sp<TextureProxy>, Recording::ProxyHash> volatileLazyProxies;
156 fTextureDataCache->foreach([&](const TextureDataBlock* block) {
157 for (int j = 0; j < block->numTextures(); ++j) {
158 const TextureDataBlock::SampledTexture& tex = block->texture(j);
159
160 if (tex.first->isLazy()) {
161 if (tex.first->isVolatile()) {
162 volatileLazyProxies.insert(tex.first);
163 } else {
164 nonVolatileLazyProxies.insert(tex.first);
165 }
166 }
167 }
168 });
169
170 std::unique_ptr<Recording::LazyProxyData> targetProxyData;
171 if (fTargetProxyData) {
172 targetProxyData = std::move(fTargetProxyData);
173 fTargetProxyDevice.reset();
174 fTargetProxyCanvas.reset();
175 }
176
177 // In both the "task failed" case and the "everything is discarded" case, there's no work that
178 // needs to be done in insertRecording(). However, we use nullptr as a failure signal, so
179 // kDiscard will return a non-null Recording that has no tasks in it.
180 if (fDrawBufferManager->hasMappingFailed() ||
181 fRootTaskList->prepareResources(fResourceProvider.get(),
182 fRuntimeEffectDict.get()) == Task::Status::kFail) {
183 // Leaving 'fTrackedDevices' alone since they were flushed earlier and could still be
184 // attached to extant SkSurfaces.
185 fDrawBufferManager = std::make_unique<DrawBufferManager>(fResourceProvider.get(),
186 fSharedContext->caps(),
187 fUploadBufferManager.get());
188 fTextureDataCache = std::make_unique<TextureDataCache>();
189 fUniformDataCache = std::make_unique<UniformDataCache>();
190 fRootTaskList->reset();
191 fRuntimeEffectDict->reset();
192 return nullptr;
193 }
194
195 std::unique_ptr<Recording> recording(new Recording(fNextRecordingID++,
196 fUniqueID,
197 std::move(nonVolatileLazyProxies),
198 std::move(volatileLazyProxies),
199 std::move(targetProxyData),
200 std::move(fFinishedProcs)));
201
202 // Allow the buffer managers to add any collected tasks for data transfer or initialization
203 // before moving the root task list to the Recording.
204 fDrawBufferManager->transferToRecording(recording.get());
205 fUploadBufferManager->transferToRecording(recording.get());
206 recording->priv().addTasks(std::move(*fRootTaskList));
207
208 SkASSERT(!fRootTaskList->hasTasks());
209 fRuntimeEffectDict->reset();
210 fTextureDataCache = std::make_unique<TextureDataCache>();
211 fUniformDataCache = std::make_unique<UniformDataCache>();
212 if (!this->priv().caps()->requireOrderedRecordings()) {
213 fAtlasProvider->textAtlasManager()->evictAtlases();
214 }
215
216 return recording;
217}
218
220 const TextureInfo& textureInfo) {
221 // Mipmaps can't reasonably be kept valid on a deferred surface with no actual texture.
222 if (textureInfo.mipmapped() == Mipmapped::kYes) {
223 SKGPU_LOG_W("Requested a deferred canvas with mipmapping; this is not supported");
224 return nullptr;
225 }
226
227 if (fTargetProxyCanvas) {
228 // Require snapping before requesting another canvas.
229 SKGPU_LOG_W("Requested a new deferred canvas before snapping the previous one");
230 return nullptr;
231 }
232
233 fTargetProxyData = std::make_unique<Recording::LazyProxyData>(textureInfo);
234 // Use kLoad for the initial load op since the purpose of a deferred canvas is to draw on top
235 // of an existing, late-bound texture.
236 fTargetProxyDevice = Device::Make(this,
237 fTargetProxyData->refLazyProxy(),
238 imageInfo.dimensions(),
239 imageInfo.colorInfo(),
240 {},
242 fTargetProxyCanvas = std::make_unique<SkCanvas>(fTargetProxyDevice);
243 return fTargetProxyCanvas.get();
244}
245
246void Recorder::registerDevice(sk_sp<Device> device) {
248
250#if defined(SK_DEBUG)
251 // TODO(b/333073673): Confirm the device isn't already in the tracked list
252 for (const sk_sp<Device>& tracked : fTrackedDevices) {
253 SkASSERT(tracked.get() != device.get());
254 }
255#endif
256
257 // By taking a ref on tracked devices, the Recorder prevents the Device from being deleted on
258 // another thread unless the Recorder has been destroyed or the device has abandoned its
259 // recorder (e.g. was marked immutable).
260 fTrackedDevices.emplace_back(std::move(device));
261}
262
263void Recorder::deregisterDevice(const Device* device) {
265 for (int i = 0; i < fTrackedDevices.size(); ++i) {
266 if (fTrackedDevices[i].get() == device) {
267 // Don't modify the list structure of fTrackedDevices within this loop
268 fTrackedDevices[i] = nullptr;
269 break;
270 }
271 }
272
273#if defined(SK_DEBUG)
274 // TODO(b/333073673): Confirm that the device is not in the tracked list anymore.
275 for (const sk_sp<Device>& tracked : fTrackedDevices) {
276 SkASSERT(tracked.get() != device);
277 }
278#endif
279}
280
283
284 if (!info.isValid() || info.backend() != this->backend()) {
285 return {};
286 }
287 return fResourceProvider->createBackendTexture(dimensions, info);
288}
289
290#ifdef SK_BUILD_FOR_ANDROID
291
293 bool isRenderable,
294 bool isProtectedContent,
295 SkISize dimensions,
296 bool fromAndroidWindow) const {
297 if (fSharedContext->backend() != BackendApi::kVulkan) {
298 SKGPU_LOG_W("Creating an AHardwareBuffer-backed BackendTexture is only supported with the"
299 "Vulkan backend.");
300 return {};
301 }
302 return fResourceProvider->createBackendTexture(hardwareBuffer,
303 isRenderable,
304 isProtectedContent,
305 dimensions,
306 fromAndroidWindow);
307}
308
309#endif // SK_BUILD_FOR_ANDROID
310
312 const SkPixmap srcData[],
313 int numLevels) {
315
316 if (!backendTex.isValid() || backendTex.backend() != this->backend()) {
317 return false;
318 }
319
320 if (!srcData || numLevels <= 0) {
321 return false;
322 }
323
324 // If the texture has MIP levels then we require that the full set is overwritten.
325 int numExpectedLevels = 1;
326 if (backendTex.info().mipmapped() == Mipmapped::kYes) {
327 numExpectedLevels = SkMipmap::ComputeLevelCount(backendTex.dimensions().width(),
328 backendTex.dimensions().height()) + 1;
329 }
330 if (numLevels != numExpectedLevels) {
331 return false;
332 }
333
334 SkColorType ct = srcData[0].colorType();
335
336 if (!this->priv().caps()->areColorTypeAndTextureInfoCompatible(ct, backendTex.info())) {
337 return false;
338 }
339
341 if (!texture) {
342 return false;
343 }
344
346
347 std::vector<MipLevel> mipLevels;
348 mipLevels.resize(numLevels);
349
350 for (int i = 0; i < numLevels; ++i) {
351 SkASSERT(srcData[i].addr());
352 SkASSERT(srcData[i].info().colorInfo() == srcData[0].info().colorInfo());
353
354 mipLevels[i].fPixels = srcData[i].addr();
355 mipLevels[i].fRowBytes = srcData[i].rowBytes();
356 }
357
358 // Src and dst colorInfo are the same
359 const SkColorInfo& colorInfo = srcData[0].info().colorInfo();
360 // Add UploadTask to Recorder
362 std::move(proxy),
363 colorInfo, colorInfo,
364 mipLevels,
365 SkIRect::MakeSize(backendTex.dimensions()),
366 std::make_unique<ImageUploadContext>());
367 if (!upload.isValid()) {
368 SKGPU_LOG_E("Recorder::updateBackendTexture: Could not create UploadInstance");
369 return false;
370 }
371 sk_sp<Task> uploadTask = UploadTask::Make(std::move(upload));
372
373 // Need to flush any pending work in case it depends on this texture
374 this->priv().flushTrackedDevices();
375
376 this->priv().add(std::move(uploadTask));
377
378 return true;
379}
380
382 const void* data,
383 size_t dataSize) {
385
386 if (!backendTex.isValid() || backendTex.backend() != this->backend()) {
387 return false;
388 }
389
390 if (!data) {
391 return false;
392 }
393
395 if (!texture) {
396 return false;
397 }
398
400
401 // Add UploadTask to Recorder
403 std::move(proxy),
404 data,
405 dataSize);
406 if (!upload.isValid()) {
407 SKGPU_LOG_E("Recorder::updateBackendTexture: Could not create UploadInstance");
408 return false;
409 }
410 sk_sp<Task> uploadTask = UploadTask::Make(std::move(upload));
411
412 // Need to flush any pending work in case it depends on this texture
413 this->priv().flushTrackedDevices();
414
415 this->priv().add(std::move(uploadTask));
416
417 return true;
418}
419
422
423 if (!texture.isValid() || texture.backend() != this->backend()) {
424 return;
425 }
426 fResourceProvider->deleteBackendTexture(texture);
427}
428
430 if (info.fFinishedProc) {
432 RefCntedCallback::Make(info.fFinishedProc, info.fFinishedContext);
433 fFinishedProcs.push_back(std::move(callback));
434 }
435}
436
439
440 // We don't want to free the Uniform/TextureDataCaches or the Draw/UploadBufferManagers since
441 // all their resources need to be held on to until a Recording is snapped. And once snapped, all
442 // their held resources are released. The StrikeCache and TextBlobCache don't hold onto any Gpu
443 // resources.
444
445 // The AtlasProvider gives out refs to TextureProxies so it should be safe to clear its pool
446 // in the middle of Recording since those using the previous TextureProxies will have refs on
447 // them.
448 fAtlasProvider->clearTexturePool();
449
450 fResourceProvider->freeGpuResources();
451}
452
453void Recorder::performDeferredCleanup(std::chrono::milliseconds msNotUsed) {
455
456 auto purgeTime = skgpu::StdSteadyClock::now() - msNotUsed;
457 fResourceProvider->purgeResourcesNotUsedSince(purgeTime);
458}
459
462 return fResourceProvider->getResourceCacheCurrentBudgetedBytes();
463}
464
467 return fResourceProvider->getResourceCacheLimit();
468}
469
472 fResourceProvider->dumpMemoryStatistics(traceMemoryDump);
473 // TODO: What is the graphite equivalent for the text blob cache and how do we print out its
474 // used bytes here (see Ganesh implementation).
475}
476
479 fRecorder->fRootTaskList->add(std::move(task));
480}
481
484
485 // If this is the initial flushTrackedDevices() call, fFlushingTrackedDevicesIndex will be -1
486 // so we start iterating at 0. We remember the starting device index to perform clean up only
487 // when it was 0 to prevent modifying the underlying data structure while iterating over it.
488 // However, when flushing one device it may register new devices as well as recursively call
489 // flushTrackedDevices(). In that case, it picks up the next device after the current one that
490 // triggered the recursive flush since all prior devices have been flushed are in progress
491 // (and they should not be flushed while in an unfinished flush). When the control flow returns
492 // to the outer flushTrackedDevices(), it will pick up with wherever the inner flush had ended.
493 // TODO(b/330864257): Once paint data is extracted at draw time (so picture shaders are rendered
494 // to images before a flush instead of inside a flush), we can simplify this and assert that
495 // flushTrackedDevices() is not recursively called and that devices are not added or removed
496 // while flushing.
497 const int startingIndex = fRecorder->fFlushingDevicesIndex;
498 while (fRecorder->fFlushingDevicesIndex < fRecorder->fTrackedDevices.size() - 1) {
499 // Advance before calling flushPendingWorkToRecorder() so that any re-entrant clal to
500 // flushTrackedDevices() will skip the current device.
501 fRecorder->fFlushingDevicesIndex++;
502 // Entries may be set to null from a call to deregisterDevice(), which will be cleaned up
503 // along with any immutable or uniquely held Devices once everything is flushed.
504 Device* device = fRecorder->fTrackedDevices[fRecorder->fFlushingDevicesIndex].get();
505 if (device) {
506 device->flushPendingWorkToRecorder(fRecorder);
507 }
508 }
509
510 // Issue next upload flush token. This is only used by the atlasing code which
511 // always uses this method. Calling in Device::flushPendingWorkToRecorder may
512 // miss parent device flushes, increment too often, and lead to atlas corruption.
513 this->tokenTracker()->issueFlushToken();
514
515 if (startingIndex < 0) {
516 // Initial call to flushTrackedDevices() so cleanup null/immutable devices and reset the
517 // loop index.
518 int i = 0;
519 while (i < fRecorder->fTrackedDevices.size()) {
520 Device* device = fRecorder->fTrackedDevices[i].get();
521 if (!device || !device->recorder() || device->unique()) {
522 if (device) {
523 device->abandonRecorder(); // Keep ~Device() happy
524 }
525 fRecorder->fTrackedDevices.removeShuffle(i);
526 // Keep i as-is to process what was just shuffled to the ith index.
527 } else {
528 i++;
529 }
530 }
531
532 fRecorder->fFlushingDevicesIndex = -1;
533 }
534}
535
537 const SkBitmap& bitmap,
538 Mipmapped mipmapped) {
539 SkASSERT(!bitmap.isNull());
540
541 if (!recorder) {
542 return nullptr;
543 }
544 return recorder->priv().proxyCache()->findOrCreateCachedProxy(recorder, bitmap, mipmapped);
545}
546
548 return fRecorder->fResourceProvider->getResourceCacheLimit();
549}
550
551#if defined(GRAPHITE_TEST_UTILS)
552bool RecorderPriv::deviceIsRegistered(Device* device) const {
554 for (const sk_sp<Device>& currentDevice : fRecorder->fTrackedDevices) {
555 if (device == currentDevice.get()) {
556 return true;
557 }
558 }
559 return false;
560}
561
562// used by the Context that created this Recorder to set a back pointer
563void RecorderPriv::setContext(Context* context) {
564 fRecorder->fContext = context;
565}
566#endif
567
568
569} // namespace skgpu::graphite
const char * options
static void info(const char *fmt,...) SK_PRINTF_LIKE(1
Definition DM.cpp:213
struct AHardwareBuffer AHardwareBuffer
#define SKGPU_LOG_E(fmt,...)
Definition Log.h:38
#define SKGPU_LOG_W(fmt,...)
Definition Log.h:40
#define ASSERT_SINGLE_OWNER_PRIV
Definition Recorder.cpp:57
#define SkASSERT(cond)
Definition SkAssert.h:116
SkColorType
Definition SkColorType.h:19
static SkImage_Base * as_IB(SkImage *image)
sk_sp< T > sk_ref_sp(T *obj)
Definition SkRefCnt.h:381
const Context & fContext
#define TRACE_FUNC
static constexpr uint32_t SK_InvalidGenID
Definition SkTypes.h:192
static int ComputeLevelCount(int baseWidth, int baseHeight)
Definition SkMipmap.cpp:134
size_t rowBytes() const
Definition SkPixmap.h:145
SkColorType colorType() const
Definition SkPixmap.h:173
const SkImageInfo & info() const
Definition SkPixmap.h:135
const void * addr() const
Definition SkPixmap.h:153
static sk_sp< RefCntedCallback > Make(Callback proc, Context ctx)
const TextureInfo & info() const
sk_sp< SkImage > findOrCreate(Recorder *recorder, const SkImage *image, SkImage::RequiredProperties) override
Definition Recorder.cpp:69
static sk_sp< DefaultImageProvider > Make()
Definition Recorder.cpp:65
static sk_sp< Device > Make(Recorder *recorder, sk_sp< TextureProxy >, SkISize deviceSize, const SkColorInfo &, const SkSurfaceProps &, LoadOp initialLoadOp, bool registerWithRecorder=true)
Definition Device.cpp:268
sk_sp< TextureProxy > findOrCreateCachedProxy(Recorder *, const SkBitmap &, Mipmapped)
TokenTracker * tokenTracker()
size_t getResourceCacheLimit() const
Definition Recorder.cpp:547
static sk_sp< TextureProxy > CreateCachedProxy(Recorder *, const SkBitmap &, Mipmapped=skgpu::Mipmapped::kNo)
Definition Recorder.cpp:536
ResourceProvider * resourceProvider()
void add(sk_sp< Task >)
Definition Recorder.cpp:477
SkCanvas * makeDeferredCanvas(const SkImageInfo &, const TextureInfo &)
Definition Recorder.cpp:219
size_t currentBudgetedBytes() const
Definition Recorder.cpp:460
BackendApi backend() const
Definition Recorder.cpp:147
void dumpMemoryStatistics(SkTraceMemoryDump *traceMemoryDump) const
Definition Recorder.cpp:470
void performDeferredCleanup(std::chrono::milliseconds msNotUsed)
Definition Recorder.cpp:453
void deleteBackendTexture(const BackendTexture &)
Definition Recorder.cpp:420
bool updateCompressedBackendTexture(const BackendTexture &, const void *data, size_t dataSize)
Definition Recorder.cpp:381
size_t maxBudgetedBytes() const
Definition Recorder.cpp:465
BackendTexture createBackendTexture(SkISize dimensions, const TextureInfo &)
Definition Recorder.cpp:281
void addFinishInfo(const InsertFinishInfo &)
Definition Recorder.cpp:429
std::unique_ptr< Recording > snap()
Definition Recorder.cpp:149
bool updateBackendTexture(const BackendTexture &, const SkPixmap srcData[], int numLevels)
Definition Recorder.cpp:311
Recorder(const Recorder &)=delete
virtual sk_sp< Texture > createWrappedTexture(const BackendTexture &)=0
std::pair< sk_sp< TextureProxy >, SamplerDesc > SampledTexture
const SampledTexture & texture(int index) const
Mipmapped mipmapped() const
Definition TextureInfo.h:79
static sk_sp< TextureProxy > Wrap(sk_sp< Texture >)
static UploadInstance MakeCompressed(Recorder *, sk_sp< TextureProxy > targetProxy, const void *data, size_t dataSize)
static UploadInstance Make(Recorder *, sk_sp< TextureProxy > targetProxy, const SkColorInfo &srcColorInfo, const SkColorInfo &dstColorInfo, SkSpan< const MipLevel > levels, const SkIRect &dstRect, std::unique_ptr< ConditionalUploadContext >)
static sk_sp< UploadTask > Make(UploadList *)
VkDevice device
Definition main.cc:53
sk_sp< SkImage > image
Definition examples.cpp:29
FlKeyEvent uint64_t FlKeyResponderAsyncCallback callback
FlPixelBufferTexturePrivate * priv
#define ASSERT_SINGLE_OWNER
Definition Device.cpp:120
FlTexture * texture
PipelineDataCache< UniformDataBlock > UniformDataCache
Definition Recorder.h:59
static uint32_t next_id()
Definition DrawAtlas.cpp:69
PipelineDataCache< TextureDataBlock > TextureDataCache
Definition Recorder.h:60
BackendApi
Definition GpuTypes.h:22
Mipmapped
Definition GpuTypes.h:53
Definition ref_ptr.h:256
static constexpr SkIRect MakeSize(const SkISize &size)
Definition SkRect.h:66
constexpr int32_t width() const
Definition SkSize.h:36
constexpr int32_t height() const
Definition SkSize.h:37
const SkColorInfo & colorInfo() const
SkISize dimensions() const
const uintptr_t id
#define TRACE_EVENT0(category_group, name)