31 if (drain_immediate_) {
35 std::scoped_lock lock(mutex_);
36 objects_.push_back(
object);
37 if (!drain_pending_) {
38 drain_pending_ =
true;
40 [strong =
fml::Ref(
this)]() { strong->Drain(); }, drain_delay_);
48 std::scoped_lock lock(mutex_);
50 if (!drain_pending_) {
51 drain_pending_ =
true;
53 [strong =
fml::Ref(
this)]() { strong->Drain(); }, drain_delay_);
65 std::deque<SkRefCnt*> skia_objects;
70 std::scoped_lock lock(mutex_);
71 objects_.swap(skia_objects);
73 drain_pending_ =
false;
83 context_ = std::move(context);
90 std::deque<SkRefCnt*> objects_;
92 bool drain_pending_ =
false;
93 sk_sp<ResourceContext> context_;
96 bool drain_immediate_;
103 sk_sp<ResourceContext> context =
nullptr,
104 bool drain_immediate =
false)
105 : task_runner_(
std::move(task_runner)),
107 context_(
std::move(context)),
108 drain_immediate_(drain_immediate) {}
116 task_runner_, [objects = std::move(objects_),
118 textures = std::move(textures_),
120 raw_context]()
mutable {
121 sk_sp<ResourceContext> context(raw_context);
132 static void DoDrain(
const std::deque<SkRefCnt*>& skia_objects,
134 const std::deque<GrBackendTexture>& textures,
136 const sk_sp<ResourceContext>& context) {
137 for (SkRefCnt* skia_object : skia_objects) {
138 skia_object->unref();
143 for (
const GrBackendTexture&
texture : textures) {
144 context->deleteBackendTexture(
texture);
147 if (!skia_objects.empty()) {
148 context->performDeferredCleanup(std::chrono::milliseconds(0));
151 context->flushAndSubmit(GrSyncCpu::kYes);