Flutter Engine
The Flutter Engine
Loading...
Searching...
No Matches
skia_gpu_object.h
Go to the documentation of this file.
1// Copyright 2013 The Flutter Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef FLUTTER_FLOW_SKIA_GPU_OBJECT_H_
6#define FLUTTER_FLOW_SKIA_GPU_OBJECT_H_
7
8#include <mutex>
9#include <queue>
10
11#include "flutter/fml/memory/ref_counted.h"
12#include "flutter/fml/memory/weak_ptr.h"
13#include "flutter/fml/task_runner.h"
14#include "flutter/fml/trace_event.h"
19
20namespace flutter {
21
22// A queue that holds Skia objects that must be destructed on the given task
23// runner.
24template <class T>
25class UnrefQueue : public fml::RefCountedThreadSafe<UnrefQueue<T>> {
26 public:
28
29 void Unref(SkRefCnt* object) {
30 if (drain_immediate_) {
31 object->unref();
32 return;
33 }
34 std::scoped_lock lock(mutex_);
35 objects_.push_back(object);
36 if (!drain_pending_) {
37 drain_pending_ = true;
38 task_runner_->PostDelayedTask(
39 [strong = fml::Ref(this)]() { strong->Drain(); }, drain_delay_);
40 }
41 }
42
44 // drain_immediate_ should only be used on Impeller.
45 FML_DCHECK(!drain_immediate_);
46 std::scoped_lock lock(mutex_);
47 textures_.push_back(texture);
48 if (!drain_pending_) {
49 drain_pending_ = true;
50 task_runner_->PostDelayedTask(
51 [strong = fml::Ref(this)]() { strong->Drain(); }, drain_delay_);
52 }
53 }
54
55 // Usually, the drain is called automatically. However, during IO manager
56 // shutdown (when the platform side reference to the OpenGL context is about
57 // to go away), we may need to pre-emptively drain the unref queue. It is the
58 // responsibility of the caller to ensure that no further unrefs are queued
59 // after this call.
60 void Drain() {
61 TRACE_EVENT0("flutter", "SkiaUnrefQueue::Drain");
62 std::deque<SkRefCnt*> skia_objects;
63 std::deque<GrBackendTexture> textures;
64 {
65 std::scoped_lock lock(mutex_);
66 objects_.swap(skia_objects);
67 textures_.swap(textures);
68 drain_pending_ = false;
69 }
70 DoDrain(skia_objects, textures, context_);
71 }
72
74 context_ = context;
75 }
76
77 private:
78 const fml::RefPtr<fml::TaskRunner> task_runner_;
79 const fml::TimeDelta drain_delay_;
80 std::mutex mutex_;
81 std::deque<SkRefCnt*> objects_;
82 std::deque<GrBackendTexture> textures_;
83 bool drain_pending_ = false;
85 // Enabled when there is an impeller context, which removes the usage of
86 // the queue altogether.
87 bool drain_immediate_;
88
89 // The `GrDirectContext* context` is only used for signaling Skia to
90 // performDeferredCleanup. It can be nullptr when such signaling is not needed
91 // (e.g., in unit tests).
93 fml::TimeDelta delay,
94 sk_sp<ResourceContext> context = nullptr,
95 bool drain_immediate = false)
96 : task_runner_(std::move(task_runner)),
97 drain_delay_(delay),
98 context_(context),
99 drain_immediate_(drain_immediate) {}
100
101 ~UnrefQueue() {
102 // The ResourceContext must be deleted on the task runner thread.
103 // Transfer ownership of the UnrefQueue's ResourceContext reference
104 // into a task queued to that thread.
105 ResourceContext* raw_context = context_.release();
107 task_runner_, [objects = std::move(objects_),
108 textures = std::move(textures_), raw_context]() mutable {
109 sk_sp<ResourceContext> context(raw_context);
110 DoDrain(objects, textures, context);
111 context.reset();
112 });
113 }
114
115 // static
116 static void DoDrain(const std::deque<SkRefCnt*>& skia_objects,
117 const std::deque<GrBackendTexture>& textures,
118 sk_sp<ResourceContext> context) {
119 for (SkRefCnt* skia_object : skia_objects) {
120 skia_object->unref();
121 }
122
123 if (context) {
124 for (const GrBackendTexture& texture : textures) {
125 context->deleteBackendTexture(texture);
126 }
127
128 if (!skia_objects.empty()) {
129 context->performDeferredCleanup(std::chrono::milliseconds(0));
130 }
131
132 context->flushAndSubmit(GrSyncCpu::kYes);
133 }
134 }
135
137 FML_FRIEND_MAKE_REF_COUNTED(UnrefQueue);
139};
140
142
143/// An object whose deallocation needs to be performed on an specific unref
144/// queue. The template argument U need to have a call operator that returns
145/// that unref queue.
146template <class T>
148 public:
150
151 SkiaGPUObject() = default;
153 : object_(std::move(object)), queue_(std::move(queue)) {
154 FML_DCHECK(object_);
155 }
158
160
161 sk_sp<SkiaObjectType> skia_object() const { return object_; }
162
163 void reset() {
164 if (object_ && queue_) {
165 queue_->Unref(object_.release());
166 }
167 queue_ = nullptr;
168 FML_DCHECK(object_ == nullptr);
169 }
170
171 private:
172 sk_sp<SkiaObjectType> object_;
174
176};
177
178} // namespace flutter
179
180#endif // FLUTTER_FLOW_SKIA_GPU_OBJECT_H_
sk_sp< SkiaObjectType > skia_object() const
SkiaGPUObject(SkiaGPUObject &&)=default
SkiaGPUObject(sk_sp< SkiaObjectType > object, fml::RefPtr< SkiaUnrefQueue > queue)
SkiaGPUObject & operator=(SkiaGPUObject &&)=default
void DeleteTexture(const GrBackendTexture &texture)
void UpdateResourceContext(sk_sp< ResourceContext > context)
void Unref(SkRefCnt *object)
static void RunNowOrPostTask(const fml::RefPtr< fml::TaskRunner > &runner, const fml::closure &task)
virtual void PostDelayedTask(const fml::closure &task, fml::TimeDelta delay)
T * release()
Definition SkRefCnt.h:324
std::vector< std::shared_ptr< FakeTexture > > textures
VkQueue queue
Definition main.cc:55
#define FML_DCHECK(condition)
Definition logging.h:103
#define FML_DISALLOW_COPY_AND_ASSIGN(TypeName)
Definition macros.h:27
FlTexture * texture
RefPtr< T > Ref(T *ptr)
Definition ref_ptr.h:237
Definition ref_ptr.h:256
#define T
#define FML_FRIEND_REF_COUNTED_THREAD_SAFE(T)
#define FML_FRIEND_MAKE_REF_COUNTED(T)
#define TRACE_EVENT0(category_group, name)