37 {
38 public:
39 void Enqueue(PtrType ptr) {
40 ASSERT(ptr->untag()->next_seen_by_gc().IsRawNull());
41 ptr->untag()->next_seen_by_gc_ = head_;
42 if (head_ == Type::null()) {
43 tail_ = ptr;
44 }
45 head_ = ptr;
46 }
47
48 void FlushInto(GCLinkedList<Type, PtrType>* to) {
49 if (to->head_ == Type::null()) {
50 ASSERT(to->tail_ == Type::null());
51 to->head_ = head_;
52 to->tail_ = tail_;
53 } else {
54 ASSERT(to->tail_ != Type::null());
55 ASSERT(to->tail_->untag()->next_seen_by_gc() == Type::null());
56 if (head_ != Type::null()) {
57 to->tail_->untag()->next_seen_by_gc_ = head_;
58 to->tail_ = tail_;
59 }
60 }
61 Release();
62 }
63
64 PtrType Release() {
65 PtrType return_value = head_;
66 head_ = Type::null();
67 tail_ = Type::null();
68 return return_value;
69 }
70
71 bool IsEmpty() { return head_ == Type::null() && tail_ == Type::null(); }
72
73 private:
74 PtrType head_ = Type::null();
75 PtrType tail_ = Type::null();
76};
77
78struct GCLinkedLists {
79 void Release();
80 bool IsEmpty();
81 void FlushInto(GCLinkedLists* to);
82
83#define FOREACH(type, var) GCLinkedList<type, type##Ptr> var;
85#undef FOREACH
86};
87
88#ifdef DEBUG
89#define TRACE_FINALIZER(format, ...) \
90 if (FLAG_trace_finalizers) { \
91 THR_Print("%s %p " format "\n", GCVisitorType::kName, visitor, \
92 __VA_ARGS__); \
93 }
94#else
95#define TRACE_FINALIZER(format, ...)
96#endif
97
98
100
101
102
103
104
105template <typename GCVisitorType>
107 FinalizerEntryPtr raw_entry,
108 Heap::Space before_gc_space,
109 GCVisitorType* visitor) {
110 PointerPtr callback_pointer = raw_finalizer->untag()->callback();
111 const auto callback =
reinterpret_cast<NativeFinalizer::Callback
>(
112 callback_pointer->untag()->data());
113 ObjectPtr token_object = raw_entry->untag()->token();
114 const bool is_detached = token_object == raw_entry;
115 const intptr_t external_size = raw_entry->untag()->external_size();
116 if (is_detached) {
117
118 ASSERT(token_object == raw_entry);
119 ASSERT(external_size == 0);
120 if (FLAG_trace_finalizers) {
121 TRACE_FINALIZER(
"Not running native finalizer %p callback %p, detached",
123 }
124 } else {
125
126 ASSERT(token_object.IsPointer());
127 PointerPtr token = static_cast<PointerPtr>(token_object);
128 void* peer = reinterpret_cast<void*>(token->untag()->data());
129 if (FLAG_trace_finalizers) {
130 TRACE_FINALIZER(
"Running native finalizer %p callback %p with token %p",
131 raw_finalizer->untag(),
callback, peer);
132 }
133 raw_entry.untag()->set_token(raw_entry);
134 (*callback)(peer);
135 if (external_size > 0) {
136 if (FLAG_trace_finalizers) {
138 external_size, before_gc_space == 0 ? "new" : "old");
139 }
140 visitor->isolate_group()->heap()->FreedExternal(external_size,
141 before_gc_space);
142 raw_entry->untag()->set_external_size(0);
143 }
144 }
145}
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160template <typename GCVisitorType>
162 FinalizerEntryPtr current_entry) {
164
166 const bool value_collected_this_gc =
167 GCVisitorType::ForwardOrSetNullIfCollected(
168 current_entry, ¤t_entry->untag()->value_);
169 if (!value_collected_this_gc && before_gc_space == Heap::kNew) {
171 if (after_gc_space == Heap::kOld) {
172 const intptr_t external_size = current_entry->untag()->external_size_;
174 " bytes from new to old space",
175 external_size);
176 visitor->isolate_group()->heap()->PromotedExternal(external_size);
177 }
178 }
179 GCVisitorType::ForwardOrSetNullIfCollected(current_entry,
180 ¤t_entry->untag()->detach_);
181 GCVisitorType::ForwardOrSetNullIfCollected(
182 current_entry, ¤t_entry->untag()->finalizer_);
183
184 ObjectPtr token_object = current_entry->untag()->token();
185
186 const bool is_detached = token_object == current_entry;
187
188 if (!value_collected_this_gc) return;
189 if (is_detached) return;
190
191 FinalizerBasePtr finalizer = current_entry->untag()->finalizer();
192
193 if (finalizer.IsRawNull()) {
195 current_entry->untag());
196
197
198 return;
199 }
200
202 current_entry->untag(), finalizer->untag());
203
204 FinalizerPtr finalizer_dart = static_cast<FinalizerPtr>(finalizer);
205
206
207
208
209
210
211
212
213
214
215
216 ASSERT(Thread::Current()->OwnsGCSafepoint() ||
217 Thread::Current()->BypassSafepoints());
218
219 if (finalizer.IsNativeFinalizer()) {
220 NativeFinalizerPtr native_finalizer =
221 static_cast<NativeFinalizerPtr>(finalizer);
222
223
225 visitor);
226
227
228
229 }
230
231 FinalizerEntryPtr previous_head =
232 finalizer_dart->untag()->exchange_entries_collected(current_entry);
233 current_entry->untag()->set_next(previous_head);
234 const bool first_entry = previous_head.IsRawNull();
235
236
237
238
239
240 if (!first_entry && previous_head->IsNewObject() &&
241 current_entry->IsOldObject()) {
242 TRACE_FINALIZER(
"Entry %p (old) next is %p (new)", current_entry->untag(),
243 previous_head->untag());
244
245 }
246
247
248 if (first_entry) {
249 Isolate* isolate = finalizer->untag()->isolate_;
250 if (isolate == nullptr) {
251 TRACE_FINALIZER(
"Not scheduling finalizer %p callback on isolate null",
252 finalizer->untag());
253 } else {
255 finalizer->untag(), isolate);
256
257 PersistentHandle* handle =
258 isolate->group()->api_state()->AllocatePersistentHandle();
259 handle->set_ptr(finalizer);
261 message_handler->PostMessage(
262 Message::New(handle, Message::kNormalPriority),
263 false);
264 }
265 }
266}
267
268#undef TRACE_FINALIZER
269
270}
271
272#endif
FlKeyEvent uint64_t FlKeyResponderAsyncCallback callback
#define FOREACH(type, var)
#define TRACE_FINALIZER(format,...)
#define GC_LINKED_LIST(V)
void MournFinalizerEntry(GCVisitorType *visitor, FinalizerEntryPtr current_entry)
Heap::Space SpaceForExternal(FinalizerEntryPtr raw_entry)
void RunNativeFinalizerCallback(NativeFinalizerPtr raw_finalizer, FinalizerEntryPtr raw_entry, Heap::Space before_gc_space, GCVisitorType *visitor)
std::function< void(const T &message, const MessageReply< T > &reply)> MessageHandler