Flutter Engine
The Flutter Engine
skia_gpu_object.h
Go to the documentation of this file.
1// Copyright 2013 The Flutter Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef FLUTTER_FLOW_SKIA_GPU_OBJECT_H_
6#define FLUTTER_FLOW_SKIA_GPU_OBJECT_H_
7
8#include <mutex>
9#include <queue>
10
11#include "flutter/common/macros.h"
12#include "flutter/fml/memory/ref_counted.h"
13#include "flutter/fml/memory/weak_ptr.h"
14#include "flutter/fml/task_runner.h"
15#include "flutter/fml/trace_event.h"
20
21namespace flutter {
22
23// A queue that holds Skia objects that must be destructed on the given task
24// runner.
25template <class T>
26class UnrefQueue : public fml::RefCountedThreadSafe<UnrefQueue<T>> {
27 public:
29
30 void Unref(SkRefCnt* object) {
31 if (drain_immediate_) {
32 object->unref();
33 return;
34 }
35 std::scoped_lock lock(mutex_);
36 objects_.push_back(object);
37 if (!drain_pending_) {
38 drain_pending_ = true;
39 task_runner_->PostDelayedTask(
40 [strong = fml::Ref(this)]() { strong->Drain(); }, drain_delay_);
41 }
42 }
43
44#if !SLIMPELLER
46 // drain_immediate_ should only be used on Impeller.
47 FML_DCHECK(!drain_immediate_);
48 std::scoped_lock lock(mutex_);
49 textures_.push_back(texture);
50 if (!drain_pending_) {
51 drain_pending_ = true;
52 task_runner_->PostDelayedTask(
53 [strong = fml::Ref(this)]() { strong->Drain(); }, drain_delay_);
54 }
55 }
56#endif // !SLIMPELLER
57
58 // Usually, the drain is called automatically. However, during IO manager
59 // shutdown (when the platform side reference to the OpenGL context is about
60 // to go away), we may need to pre-emptively drain the unref queue. It is the
61 // responsibility of the caller to ensure that no further unrefs are queued
62 // after this call.
63 void Drain() {
64 TRACE_EVENT0("flutter", "SkiaUnrefQueue::Drain");
65 std::deque<SkRefCnt*> skia_objects;
66
67 NOT_SLIMPELLER(std::deque<GrBackendTexture> textures);
68
69 {
70 std::scoped_lock lock(mutex_);
71 objects_.swap(skia_objects);
72 NOT_SLIMPELLER(textures_.swap(textures));
73 drain_pending_ = false;
74 }
75 DoDrain(skia_objects,
76#if !SLIMPELLER
77 textures,
78#endif // !SLIMPELLER
79 context_);
80 }
81
83 context_ = context;
84 }
85
86 private:
87 const fml::RefPtr<fml::TaskRunner> task_runner_;
88 const fml::TimeDelta drain_delay_;
89 std::mutex mutex_;
90 std::deque<SkRefCnt*> objects_;
91 NOT_SLIMPELLER(std::deque<GrBackendTexture> textures_);
92 bool drain_pending_ = false;
94 // Enabled when there is an impeller context, which removes the usage of
95 // the queue altogether.
96 bool drain_immediate_;
97
98 // The `GrDirectContext* context` is only used for signaling Skia to
99 // performDeferredCleanup. It can be nullptr when such signaling is not needed
100 // (e.g., in unit tests).
102 fml::TimeDelta delay,
103 sk_sp<ResourceContext> context = nullptr,
104 bool drain_immediate = false)
105 : task_runner_(std::move(task_runner)),
106 drain_delay_(delay),
107 context_(context),
108 drain_immediate_(drain_immediate) {}
109
110 ~UnrefQueue() {
111 // The ResourceContext must be deleted on the task runner thread.
112 // Transfer ownership of the UnrefQueue's ResourceContext reference
113 // into a task queued to that thread.
114 ResourceContext* raw_context = context_.release();
116 task_runner_, [objects = std::move(objects_),
117#if !SLIMPELLER
118 textures = std::move(textures_),
119#endif // !SLIMPELLER
120 raw_context]() mutable {
121 sk_sp<ResourceContext> context(raw_context);
122 DoDrain(objects,
123#if !SLIMPELLER
124 textures,
125#endif // !SLIMPELLER
126 context);
127 context.reset();
128 });
129 }
130
131 // static
132 static void DoDrain(const std::deque<SkRefCnt*>& skia_objects,
133#if !SLIMPELLER
134 const std::deque<GrBackendTexture>& textures,
135#endif // !SLIMPELLER
136 sk_sp<ResourceContext> context) {
137 for (SkRefCnt* skia_object : skia_objects) {
138 skia_object->unref();
139 }
140
141#if !SLIMPELLER
142 if (context) {
143 for (const GrBackendTexture& texture : textures) {
144 context->deleteBackendTexture(texture);
145 }
146
147 if (!skia_objects.empty()) {
148 context->performDeferredCleanup(std::chrono::milliseconds(0));
149 }
150
151 context->flushAndSubmit(GrSyncCpu::kYes);
152 }
153#endif // !SLIMPELLER
154 }
155
156 FML_FRIEND_REF_COUNTED_THREAD_SAFE(UnrefQueue);
157 FML_FRIEND_MAKE_REF_COUNTED(UnrefQueue);
158 FML_DISALLOW_COPY_AND_ASSIGN(UnrefQueue);
159};
160
162
163/// An object whose deallocation needs to be performed on an specific unref
164/// queue.
165template <class T>
167 public:
169
170 SkiaGPUObject() = default;
172 : object_(std::move(object)), queue_(std::move(queue)) {
173 FML_DCHECK(object_);
174 }
177
179
180 sk_sp<SkiaObjectType> skia_object() const { return object_; }
181
182 void reset() {
183 if (object_ && queue_) {
184 queue_->Unref(object_.release());
185 }
186 queue_ = nullptr;
187 FML_DCHECK(object_ == nullptr);
188 }
189
190 private:
191 sk_sp<SkiaObjectType> object_;
193
194 FML_DISALLOW_COPY_AND_ASSIGN(SkiaGPUObject);
195};
196
197} // namespace flutter
198
199#endif // FLUTTER_FLOW_SKIA_GPU_OBJECT_H_
sk_sp< SkiaObjectType > skia_object() const
SkiaGPUObject(SkiaGPUObject &&)=default
SkiaGPUObject(sk_sp< SkiaObjectType > object, fml::RefPtr< SkiaUnrefQueue > queue)
SkiaGPUObject & operator=(SkiaGPUObject &&)=default
void DeleteTexture(const GrBackendTexture &texture)
void UpdateResourceContext(sk_sp< ResourceContext > context)
void Unref(SkRefCnt *object)
static void RunNowOrPostTask(const fml::RefPtr< fml::TaskRunner > &runner, const fml::closure &task)
Definition: task_runner.cc:55
virtual void PostDelayedTask(const fml::closure &task, fml::TimeDelta delay)
Definition: task_runner.cc:33
T * release()
Definition: SkRefCnt.h:324
VkQueue queue
Definition: main.cc:55
#define FML_DCHECK(condition)
Definition: logging.h:103
FlTexture * texture
RefPtr< T > Ref(T *ptr)
Definition: ref_ptr.h:237
Definition: ref_ptr.h:256
#define T
Definition: precompiler.cc:65
#define TRACE_EVENT0(category_group, name)
Definition: trace_event.h:131