30 if (drain_immediate_) {
34 std::scoped_lock lock(mutex_);
35 objects_.push_back(
object);
36 if (!drain_pending_) {
37 drain_pending_ =
true;
39 [strong =
fml::Ref(
this)]() { strong->Drain(); }, drain_delay_);
46 std::scoped_lock lock(mutex_);
48 if (!drain_pending_) {
49 drain_pending_ =
true;
51 [strong =
fml::Ref(
this)]() { strong->Drain(); }, drain_delay_);
62 std::deque<SkRefCnt*> skia_objects;
63 std::deque<GrBackendTexture>
textures;
65 std::scoped_lock lock(mutex_);
66 objects_.swap(skia_objects);
68 drain_pending_ =
false;
70 DoDrain(skia_objects,
textures, context_);
81 std::deque<SkRefCnt*> objects_;
82 std::deque<GrBackendTexture> textures_;
83 bool drain_pending_ =
false;
87 bool drain_immediate_;
95 bool drain_immediate =
false)
96 : task_runner_(
std::move(task_runner)),
99 drain_immediate_(drain_immediate) {}
107 task_runner_, [objects = std::move(objects_),
108 textures = std::move(textures_), raw_context]()
mutable {
110 DoDrain(objects,
textures, context);
116 static void DoDrain(
const std::deque<SkRefCnt*>& skia_objects,
117 const std::deque<GrBackendTexture>&
textures,
119 for (
SkRefCnt* skia_object : skia_objects) {
120 skia_object->unref();
125 context->deleteBackendTexture(
texture);
128 if (!skia_objects.empty()) {
129 context->performDeferredCleanup(std::chrono::milliseconds(0));
std::vector< std::shared_ptr< FakeTexture > > textures