Flutter Engine
The Flutter Engine
Loading...
Searching...
No Matches
object_graph_copy.cc
Go to the documentation of this file.
1// Copyright (c) 2021, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
6
7#include <memory>
8
9#include "vm/dart_api_state.h"
10#include "vm/flags.h"
11#include "vm/heap/weak_table.h"
12#include "vm/longjump.h"
13#include "vm/object.h"
14#include "vm/object_store.h"
15#include "vm/snapshot.h"
16#include "vm/symbols.h"
17#include "vm/timeline.h"
18
19#define Z zone_
20
21// The list here contains two kinds of classes of objects
22// * objects that will be shared and we will therefore never need to copy
23// * objects that user object graphs should never reference
24#define FOR_UNSUPPORTED_CLASSES(V) \
25 V(AbstractType) \
26 V(ApiError) \
27 V(Bool) \
28 V(CallSiteData) \
29 V(Capability) \
30 V(Class) \
31 V(ClosureData) \
32 V(Code) \
33 V(CodeSourceMap) \
34 V(CompressedStackMaps) \
35 V(ContextScope) \
36 V(DynamicLibrary) \
37 V(Error) \
38 V(ExceptionHandlers) \
39 V(FfiTrampolineData) \
40 V(Field) \
41 V(Finalizer) \
42 V(FinalizerBase) \
43 V(FinalizerEntry) \
44 V(NativeFinalizer) \
45 V(Function) \
46 V(FunctionType) \
47 V(FutureOr) \
48 V(ICData) \
49 V(Instance) \
50 V(Instructions) \
51 V(InstructionsSection) \
52 V(InstructionsTable) \
53 V(Int32x4) \
54 V(Integer) \
55 V(KernelProgramInfo) \
56 V(LanguageError) \
57 V(Library) \
58 V(LibraryPrefix) \
59 V(LoadingUnit) \
60 V(LocalVarDescriptors) \
61 V(MegamorphicCache) \
62 V(Mint) \
63 V(MirrorReference) \
64 V(MonomorphicSmiableCall) \
65 V(Namespace) \
66 V(Number) \
67 V(ObjectPool) \
68 V(PatchClass) \
69 V(PcDescriptors) \
70 V(Pointer) \
71 V(ReceivePort) \
72 V(RecordType) \
73 V(RegExp) \
74 V(Script) \
75 V(Sentinel) \
76 V(SendPort) \
77 V(SingleTargetCache) \
78 V(Smi) \
79 V(StackTrace) \
80 V(SubtypeTestCache) \
81 V(SuspendState) \
82 V(Type) \
83 V(TypeArguments) \
84 V(TypeParameter) \
85 V(TypeParameters) \
86 V(TypedDataBase) \
87 V(UnhandledException) \
88 V(UnlinkedCall) \
89 V(UnwindError) \
90 V(UserTag) \
91 V(WeakArray) \
92 V(WeakSerializationReference)
93
94namespace dart {
95
97 enable_fast_object_copy,
98 true,
99 "Enable fast path for fast object copy.");
101 gc_on_foc_slow_path,
102 false,
103 "Cause a GC when falling off the fast path for fast object copy.");
104
105const char* kFastAllocationFailed = "fast allocation failed";
106
107struct PtrTypes {
110 return arg.untag();
111 }
112 static const dart::ObjectPtr GetObjectPtr(Object arg) { return arg; }
114 return dart::Object::Handle(arg);
115 }
116
117#define DO(V) \
118 using V = V##Ptr; \
119 static Untagged##V* Untag##V(V##Ptr arg) { return arg.untag(); } \
120 static V##Ptr Get##V##Ptr(V##Ptr arg) { return arg; } \
121 static V##Ptr Cast##V(ObjectPtr arg) { return dart::V::RawCast(arg); }
123#undef DO
124};
125
127 using Object = const dart::Object&;
129 return arg.ptr().untag();
130 }
131 static dart::ObjectPtr GetObjectPtr(Object arg) { return arg.ptr(); }
132 static Object HandlifyObject(Object arg) { return arg; }
133
134#define DO(V) \
135 using V = const dart::V&; \
136 static Untagged##V* Untag##V(V arg) { return arg.ptr().untag(); } \
137 static V##Ptr Get##V##Ptr(V arg) { return arg.ptr(); } \
138 static V Cast##V(const dart::Object& arg) { return dart::V::Cast(arg); }
140#undef DO
141};
142
143DART_FORCE_INLINE
145 return Object::unknown_constant().ptr();
146}
147
148DART_FORCE_INLINE
149static bool CanShareObject(ObjectPtr obj, uword tags) {
151 return true;
152 }
153 const auto cid = UntaggedObject::ClassIdTag::decode(tags);
156 // Unmodifiable typed data views may have mutable backing stores.
157 return TypedDataView::RawCast(obj)
158 ->untag()
159 ->typed_data()
160 ->untag()
161 ->IsImmutable();
162 }
163
164 // All other objects that have immutability bit set are deeply immutable.
165 return true;
166 }
167
168 // TODO(https://dartbug.com/55136): Mark Closures as shallowly imutable.
169 // And move this into the if above.
170 if (cid == kClosureCid) {
171 // We can share a closure iff it doesn't close over any state.
172 return Closure::RawCast(obj)->untag()->context() == Object::null();
173 }
174
175 return false;
176}
177
179 if (!obj->IsHeapObject()) return true;
180 const uword tags = TagsFromUntaggedObject(obj.untag());
181 return CanShareObject(obj, tags);
182}
183
184// Whether executing `get:hashCode` (possibly in a different isolate) on an
185// object with the given [tags] might return a different answer than the source
186// object (if copying is needed) or on the same object (if the object is
187// shared).
188DART_FORCE_INLINE
189static bool MightNeedReHashing(ObjectPtr object) {
190 const uword tags = TagsFromUntaggedObject(object.untag());
191 const auto cid = UntaggedObject::ClassIdTag::decode(tags);
192 // These use structural hash codes and will therefore always result in the
193 // same hash codes.
194 if (cid == kOneByteStringCid) return false;
195 if (cid == kTwoByteStringCid) return false;
196 if (cid == kMintCid) return false;
197 if (cid == kDoubleCid) return false;
198 if (cid == kBoolCid) return false;
199 if (cid == kSendPortCid) return false;
200 if (cid == kCapabilityCid) return false;
201 if (cid == kNullCid) return false;
202
203 // These are shared and use identity hash codes. If they are used as a key in
204 // a map or a value in a set, they will already have the identity hash code
205 // set.
206 if (cid == kRegExpCid) return false;
207 if (cid == kInt32x4Cid) return false;
208
209 // If the [tags] indicates this is a canonical object we'll share it instead
210 // of copying it. That would suggest we don't have to re-hash maps/sets
211 // containing this object on the receiver side.
212 //
213 // Though the object can be a constant of a user-defined class with a
214 // custom hash code that is misbehaving (e.g one that depends on global field
215 // state, ...). To be on the safe side we'll force re-hashing if such objects
216 // are encountered in maps/sets.
217 //
218 // => We might want to consider changing the implementation to avoid rehashing
219 // in such cases in the future and disambiguate the documentation.
220 return true;
221}
222
223DART_FORCE_INLINE
225 return obj->tags_;
226}
227
228DART_FORCE_INLINE
230 uword tags = 0;
231
232 tags = UntaggedObject::SizeTag::update(size, tags);
234 tags = UntaggedObject::AlwaysSetBit::update(true, tags);
235 tags = UntaggedObject::NotMarkedBit::update(true, tags);
237 tags = UntaggedObject::CanonicalBit::update(false, tags);
238 tags = UntaggedObject::NewBit::update(true, tags);
241#if defined(HASH_IN_OBJECT_HEADER)
242 tags = UntaggedObject::HashTag::update(0, tags);
243#endif
244 to.untag()->tags_ = tags;
245}
246
247DART_FORCE_INLINE
249 intptr_t size,
250 intptr_t allocated_bytes) {
251#if defined(DART_COMPRESSED_POINTERS)
252 const bool compressed = true;
253#else
254 const bool compressed = false;
255#endif
256 const intptr_t kLargeMessageThreshold = 16 * MB;
257 const Heap::Space space =
258 allocated_bytes > kLargeMessageThreshold ? Heap::kOld : Heap::kNew;
259 // Mimic the old initialization behavior of Object::InitializeObject where
260 // the contents are initialized to Object::null(), except for TypedDataBase
261 // subclasses which are initialized to 0, as the contents of the original
262 // are translated and copied over prior to returning the object graph root.
264 return Object::Allocate(cid, size, space, compressed,
265 Object::from_offset<TypedDataBase>(),
266 Object::to_offset<TypedDataBase>());
267
268 } else {
269 // Remember that ptr_field_end_offset is the offset to the last Ptr
270 // field, not the offset just past it.
271 const uword ptr_field_end_offset =
272 size - (compressed ? kCompressedWordSize : kWordSize);
273 return Object::Allocate(cid, size, space, compressed,
274 Object::from_offset<Object>(),
275 ptr_field_end_offset);
276 }
277}
278
279DART_FORCE_INLINE
280void UpdateLengthField(intptr_t cid, ObjectPtr from, ObjectPtr to) {
281 // We share these objects - never copy them.
283
284 // We update any in-heap variable sized object with the length to keep the
285 // length and the size in the object header in-sync for the GC.
286 if (cid == kArrayCid || cid == kImmutableArrayCid) {
287 static_cast<UntaggedArray*>(to.untag())->length_ =
288 static_cast<UntaggedArray*>(from.untag())->length_;
289 } else if (cid == kContextCid) {
290 static_cast<UntaggedContext*>(to.untag())->num_variables_ =
291 static_cast<UntaggedContext*>(from.untag())->num_variables_;
292 } else if (IsTypedDataClassId(cid)) {
293 static_cast<UntaggedTypedDataBase*>(to.untag())->length_ =
294 static_cast<UntaggedTypedDataBase*>(from.untag())->length_;
295 } else if (cid == kRecordCid) {
296 static_cast<UntaggedRecord*>(to.untag())->shape_ =
297 static_cast<UntaggedRecord*>(from.untag())->shape_;
298 }
299}
300
302 ExternalTypedDataPtr from,
303 ExternalTypedDataPtr to) {
304 auto raw_from = from.untag();
305 auto raw_to = to.untag();
306 const intptr_t length =
307 TypedData::ElementSizeInBytes(cid) * Smi::Value(raw_from->length_);
308
309 auto buffer = static_cast<uint8_t*>(malloc(length));
310 memmove(buffer, raw_from->data_, length);
311 raw_to->length_ = raw_from->length_;
312 raw_to->data_ = buffer;
313}
314
315template <typename T>
317 const T& from,
318 const T& to,
319 intptr_t length) {
320 constexpr intptr_t kChunkSize = 100 * 1024;
321
322 const intptr_t chunks = length / kChunkSize;
323 const intptr_t remainder = length % kChunkSize;
324
325 // Notice we re-load the data pointer, since T may be TypedData in which case
326 // the interior pointer may change after checking into safepoints.
327 for (intptr_t i = 0; i < chunks; ++i) {
328 memmove(to.ptr().untag()->data_ + i * kChunkSize,
329 from.ptr().untag()->data_ + i * kChunkSize, kChunkSize);
330
331 thread->CheckForSafepoint();
332 }
333 if (remainder > 0) {
334 memmove(to.ptr().untag()->data_ + chunks * kChunkSize,
335 from.ptr().untag()->data_ + chunks * kChunkSize, remainder);
336 }
337}
338
340 Thread* thread,
341 intptr_t cid,
342 const ExternalTypedData& from,
343 const ExternalTypedData& to) {
344 const intptr_t length_in_elements = from.Length();
345 const intptr_t length_in_bytes =
346 TypedData::ElementSizeInBytes(cid) * length_in_elements;
347
348 uint8_t* to_data = static_cast<uint8_t*>(malloc(length_in_bytes));
349 to.ptr().untag()->data_ = to_data;
350 to.ptr().untag()->length_ = Smi::New(length_in_elements);
351
352 CopyTypedDataBaseWithSafepointChecks(thread, from, to, length_in_bytes);
353}
354
355void InitializeTypedDataView(TypedDataViewPtr obj) {
356 obj.untag()->typed_data_ = TypedDataBase::null();
357 obj.untag()->offset_in_bytes_ = Smi::New(0);
358 obj.untag()->length_ = Smi::New(0);
359}
360
361void FreeExternalTypedData(void* isolate_callback_data, void* buffer) {
362 free(buffer);
363}
364
365void FreeTransferablePeer(void* isolate_callback_data, void* peer) {
366 delete static_cast<TransferableTypedDataPeer*>(peer);
367}
368
370 public:
371 explicit SlowFromTo(const GrowableObjectArray& storage) : storage_(storage) {}
372
373 ObjectPtr At(intptr_t index) { return storage_.At(index); }
374 void Add(const Object& key, const Object& value) {
375 storage_.Add(key);
376 storage_.Add(value);
377 }
378 intptr_t Length() { return storage_.Length(); }
379
380 private:
381 const GrowableObjectArray& storage_;
382};
383
385 public:
386 explicit FastFromTo(GrowableArray<ObjectPtr>& storage) : storage_(storage) {}
387
388 ObjectPtr At(intptr_t index) { return storage_.At(index); }
389 void Add(ObjectPtr key, ObjectPtr value) {
390 intptr_t i = storage_.length();
391 storage_.Resize(i + 2);
392 storage_[i + 0] = key;
393 storage_[i + 1] = value;
394 }
395 intptr_t Length() { return storage_.length(); }
396
397 private:
398 GrowableArray<ObjectPtr>& storage_;
399};
400
402 return obj;
403}
404static ObjectPtr Ptr(const Object& obj) {
405 return obj.ptr();
406}
407
408#if defined(HASH_IN_OBJECT_HEADER)
409class IdentityMap {
410 public:
411 explicit IdentityMap(Thread* thread) : thread_(thread) {
412 hash_table_used_ = 0;
413 hash_table_capacity_ = 32;
414 hash_table_ = reinterpret_cast<uint32_t*>(
415 malloc(hash_table_capacity_ * sizeof(uint32_t)));
416 memset(hash_table_, 0, hash_table_capacity_ * sizeof(uint32_t));
417 }
418 ~IdentityMap() { free(hash_table_); }
419
420 template <typename S, typename T>
421 DART_FORCE_INLINE ObjectPtr ForwardedObject(const S& object, T from_to) {
422 intptr_t mask = hash_table_capacity_ - 1;
423 intptr_t probe = GetHeaderHash(Ptr(object)) & mask;
424 for (;;) {
425 intptr_t index = hash_table_[probe];
426 if (index == 0) {
427 return Marker();
428 }
429 if (from_to.At(index) == Ptr(object)) {
430 return from_to.At(index + 1);
431 }
432 probe = (probe + 1) & mask;
433 }
434 }
435
436 template <typename S, typename T>
437 DART_FORCE_INLINE void Insert(const S& from,
438 const S& to,
439 T from_to,
440 bool check_for_safepoint) {
441 ASSERT(ForwardedObject(from, from_to) == Marker());
442 const auto id = from_to.Length();
443 from_to.Add(from, to); // Must occur before rehashing.
444 intptr_t mask = hash_table_capacity_ - 1;
445 intptr_t probe = GetHeaderHash(Ptr(from)) & mask;
446 for (;;) {
447 intptr_t index = hash_table_[probe];
448 if (index == 0) {
449 hash_table_[probe] = id;
450 break;
451 }
452 probe = (probe + 1) & mask;
453 }
454 hash_table_used_++;
455 if (hash_table_used_ * 2 > hash_table_capacity_) {
456 Rehash(hash_table_capacity_ * 2, from_to, check_for_safepoint);
457 }
458 }
459
460 private:
461 DART_FORCE_INLINE
462 uint32_t GetHeaderHash(ObjectPtr object) {
463 uint32_t hash = Object::GetCachedHash(object);
464 if (hash == 0) {
465 switch (object->GetClassId()) {
466 case kMintCid:
467 hash = Mint::Value(static_cast<MintPtr>(object));
468 // Don't write back: doesn't agree with dart:core's identityHash.
469 break;
470 case kDoubleCid:
471 hash =
472 bit_cast<uint64_t>(Double::Value(static_cast<DoublePtr>(object)));
473 // Don't write back: doesn't agree with dart:core's identityHash.
474 break;
475 case kOneByteStringCid:
476 case kTwoByteStringCid:
477 hash = String::Hash(static_cast<StringPtr>(object));
478 hash = Object::SetCachedHashIfNotSet(object, hash);
479 break;
480 default:
481 do {
482 hash = thread_->random()->NextUInt32();
483 } while (hash == 0 || !Smi::IsValid(hash));
484 hash = Object::SetCachedHashIfNotSet(object, hash);
485 break;
486 }
487 }
488 return hash;
489 }
490
491 template <typename T>
492 void Rehash(intptr_t new_capacity, T from_to, bool check_for_safepoint) {
493 hash_table_capacity_ = new_capacity;
494 hash_table_used_ = 0;
495 free(hash_table_);
496 hash_table_ = reinterpret_cast<uint32_t*>(
497 malloc(hash_table_capacity_ * sizeof(uint32_t)));
498 for (intptr_t i = 0; i < hash_table_capacity_; i++) {
499 hash_table_[i] = 0;
500 if (check_for_safepoint && (((i + 1) % KB) == 0)) {
501 thread_->CheckForSafepoint();
502 }
503 }
504 for (intptr_t id = 2; id < from_to.Length(); id += 2) {
505 ObjectPtr obj = from_to.At(id);
506 intptr_t mask = hash_table_capacity_ - 1;
507 intptr_t probe = GetHeaderHash(obj) & mask;
508 for (;;) {
509 if (hash_table_[probe] == 0) {
510 hash_table_[probe] = id;
511 hash_table_used_++;
512 break;
513 }
514 probe = (probe + 1) & mask;
515 }
516 if (check_for_safepoint && (((id + 2) % KB) == 0)) {
517 thread_->CheckForSafepoint();
518 }
519 }
520 }
521
522 Thread* thread_;
523 uint32_t* hash_table_;
524 uint32_t hash_table_capacity_;
525 uint32_t hash_table_used_;
526};
527#else // defined(HASH_IN_OBJECT_HEADER)
529 public:
530 explicit IdentityMap(Thread* thread) : isolate_(thread->isolate()) {
531 isolate_->set_forward_table_new(new WeakTable());
532 isolate_->set_forward_table_old(new WeakTable());
533 }
535 isolate_->set_forward_table_new(nullptr);
536 isolate_->set_forward_table_old(nullptr);
537 }
538
539 template <typename S, typename T>
540 DART_FORCE_INLINE ObjectPtr ForwardedObject(const S& object, T from_to) {
541 const intptr_t id = GetObjectId(Ptr(object));
542 if (id == 0) return Marker();
543 return from_to.At(id + 1);
544 }
545
546 template <typename S, typename T>
547 DART_FORCE_INLINE void Insert(const S& from,
548 const S& to,
549 T from_to,
550 bool check_for_safepoint) {
551 ASSERT(ForwardedObject(from, from_to) == Marker());
552 const auto id = from_to.Length();
553 // May take >100ms and cannot yield to safepoints.
554 SetObjectId(Ptr(from), id);
555 from_to.Add(from, to);
556 }
557
558 private:
559 DART_FORCE_INLINE
560 intptr_t GetObjectId(ObjectPtr object) {
561 if (object->IsNewObject()) {
562 return isolate_->forward_table_new()->GetValueExclusive(object);
563 } else {
564 return isolate_->forward_table_old()->GetValueExclusive(object);
565 }
566 }
567
568 DART_FORCE_INLINE
569 void SetObjectId(ObjectPtr object, intptr_t id) {
570 if (object->IsNewObject()) {
571 isolate_->forward_table_new()->SetValueExclusive(object, id);
572 } else {
573 isolate_->forward_table_old()->SetValueExclusive(object, id);
574 }
575 }
576
577 Isolate* isolate_;
578};
579#endif // defined(HASH_IN_OBJECT_HEADER)
580
582 public:
583 explicit ForwardMapBase(Thread* thread)
584 : thread_(thread), zone_(thread->zone()) {}
585
586 protected:
587 friend class ObjectGraphCopier;
588
590 const TransferableTypedData& to) {
591 // Get the old peer.
592 auto fpeer = static_cast<TransferableTypedDataPeer*>(
593 thread_->heap()->GetPeer(from.ptr()));
594 ASSERT(fpeer != nullptr && fpeer->data() != nullptr);
595 const intptr_t length = fpeer->length();
596
597 // Allocate new peer object with (data, length).
598 auto tpeer = new TransferableTypedDataPeer(fpeer->data(), length);
599 thread_->heap()->SetPeer(to.ptr(), tpeer);
600
601 // Move the handle itself to the new object.
602 fpeer->handle()->EnsureFreedExternal(thread_->isolate_group());
603 FinalizablePersistentHandle* finalizable_ref =
606 /*auto_delete=*/true);
607 ASSERT(finalizable_ref != nullptr);
608 tpeer->set_handle(finalizable_ref);
609 fpeer->ClearData();
610 }
611
615
618
619 private:
621};
622
624 public:
625 explicit FastForwardMap(Thread* thread, IdentityMap* map)
626 : ForwardMapBase(thread),
627 map_(map),
628 raw_from_to_(thread->zone(), 20),
629 raw_transferables_from_to_(thread->zone(), 0),
630 raw_objects_to_rehash_(thread->zone(), 0),
631 raw_expandos_to_rehash_(thread->zone(), 0) {
632 raw_from_to_.Resize(2);
633 raw_from_to_[0] = Object::null();
634 raw_from_to_[1] = Object::null();
635 fill_cursor_ = 2;
636 }
637
639 return map_->ForwardedObject(object, FastFromTo(raw_from_to_));
640 }
641
642 void Insert(ObjectPtr from, ObjectPtr to, intptr_t size) {
643 map_->Insert(from, to, FastFromTo(raw_from_to_),
644 /*check_for_safepoint*/ false);
645 allocated_bytes += size;
646 }
647
648 void AddTransferable(TransferableTypedDataPtr from,
649 TransferableTypedDataPtr to) {
650 raw_transferables_from_to_.Add(from);
651 raw_transferables_from_to_.Add(to);
652 }
653 void AddWeakProperty(WeakPropertyPtr from) { raw_weak_properties_.Add(from); }
654 void AddWeakReference(WeakReferencePtr from) {
655 raw_weak_references_.Add(from);
656 }
657 void AddExternalTypedData(ExternalTypedDataPtr to) {
658 raw_external_typed_data_to_.Add(to);
659 }
660
661 void AddObjectToRehash(ObjectPtr to) { raw_objects_to_rehash_.Add(to); }
662 void AddExpandoToRehash(ObjectPtr to) { raw_expandos_to_rehash_.Add(to); }
663
664 private:
665 friend class FastObjectCopy;
666 friend class ObjectGraphCopier;
667
668 IdentityMap* map_;
669 GrowableArray<ObjectPtr> raw_from_to_;
670 GrowableArray<TransferableTypedDataPtr> raw_transferables_from_to_;
671 GrowableArray<ExternalTypedDataPtr> raw_external_typed_data_to_;
672 GrowableArray<ObjectPtr> raw_objects_to_rehash_;
673 GrowableArray<ObjectPtr> raw_expandos_to_rehash_;
674 GrowableArray<WeakPropertyPtr> raw_weak_properties_;
675 GrowableArray<WeakReferencePtr> raw_weak_references_;
676 intptr_t fill_cursor_ = 0;
677 intptr_t allocated_bytes = 0;
678
680};
681
683 public:
684 explicit SlowForwardMap(Thread* thread, IdentityMap* map)
685 : ForwardMapBase(thread),
686 map_(map),
687 from_to_transition_(thread->zone(), 2),
688 from_to_(GrowableObjectArray::Handle(thread->zone(),
689 GrowableObjectArray::New(2))),
690 transferables_from_to_(thread->zone(), 0) {
691 from_to_transition_.Resize(2);
692 from_to_transition_[0] = &PassiveObject::Handle();
693 from_to_transition_[1] = &PassiveObject::Handle();
694 from_to_.Add(Object::null_object());
695 from_to_.Add(Object::null_object());
696 fill_cursor_ = 2;
697 }
698
700 return map_->ForwardedObject(object, SlowFromTo(from_to_));
701 }
702 void Insert(const Object& from, const Object& to, intptr_t size) {
703 map_->Insert(from, to, SlowFromTo(from_to_),
704 /* check_for_safepoint */ true);
705 allocated_bytes += size;
706 }
707
709 const TransferableTypedData& to) {
710 transferables_from_to_.Add(&TransferableTypedData::Handle(from.ptr()));
711 transferables_from_to_.Add(&TransferableTypedData::Handle(to.ptr()));
712 }
713 void AddWeakProperty(const WeakProperty& from) {
714 weak_properties_.Add(&WeakProperty::Handle(from.ptr()));
715 }
716 void AddWeakReference(const WeakReference& from) {
717 weak_references_.Add(&WeakReference::Handle(from.ptr()));
718 }
719 const ExternalTypedData& AddExternalTypedData(ExternalTypedDataPtr to) {
720 auto to_handle = &ExternalTypedData::Handle(to);
721 external_typed_data_.Add(to_handle);
722 return *to_handle;
723 }
724 void AddObjectToRehash(const Object& to) {
725 objects_to_rehash_.Add(&Object::Handle(to.ptr()));
726 }
727 void AddExpandoToRehash(const Object& to) {
728 expandos_to_rehash_.Add(&Object::Handle(to.ptr()));
729 }
730
732 for (intptr_t i = 0; i < transferables_from_to_.length(); i += 2) {
733 auto from = transferables_from_to_[i];
734 auto to = transferables_from_to_[i + 1];
735 FinalizeTransferable(*from, *to);
736 }
737 }
738
740 for (intptr_t i = 0; i < external_typed_data_.length(); i++) {
741 auto to = external_typed_data_[i];
743 }
744 }
745
746 private:
747 friend class SlowObjectCopy;
748 friend class SlowObjectCopyBase;
749 friend class ObjectGraphCopier;
750
751 IdentityMap* map_;
752 GrowableArray<const PassiveObject*> from_to_transition_;
753 GrowableObjectArray& from_to_;
754 GrowableArray<const TransferableTypedData*> transferables_from_to_;
755 GrowableArray<const ExternalTypedData*> external_typed_data_;
756 GrowableArray<const Object*> objects_to_rehash_;
757 GrowableArray<const Object*> expandos_to_rehash_;
758 GrowableArray<const WeakProperty*> weak_properties_;
760 intptr_t fill_cursor_ = 0;
761 intptr_t allocated_bytes = 0;
762
764};
765
767 public:
768 explicit ObjectCopyBase(Thread* thread)
769 : thread_(thread),
770 heap_base_(thread->heap_base()),
771 zone_(thread->zone()),
772 heap_(thread->isolate_group()->heap()),
773 class_table_(thread->isolate_group()->class_table()),
774 new_space_(heap_->new_space()),
775 tmp_(Object::Handle(thread->zone())),
776 to_(Object::Handle(thread->zone())),
777 expando_cid_(Class::GetClassId(
778 thread->isolate_group()->object_store()->expando_class())),
779 exception_unexpected_object_(Object::Handle(thread->zone())) {}
781
782 protected:
783 static ObjectPtr LoadPointer(ObjectPtr src, intptr_t offset) {
784 return src.untag()->LoadPointer(reinterpret_cast<ObjectPtr*>(
785 reinterpret_cast<uint8_t*>(src.untag()) + offset));
786 }
788 intptr_t offset) {
789 return src.untag()->LoadPointer(reinterpret_cast<CompressedObjectPtr*>(
790 reinterpret_cast<uint8_t*>(src.untag()) + offset));
791 }
793 intptr_t offset) {
794 return *reinterpret_cast<compressed_uword*>(
795 reinterpret_cast<uint8_t*>(src.untag()) + offset);
796 }
798 intptr_t offset,
799 ObjectPtr value) {
800 obj.untag()->StorePointer(
801 reinterpret_cast<ObjectPtr*>(reinterpret_cast<uint8_t*>(obj.untag()) +
802 offset),
803 value);
804 }
806 intptr_t offset,
807 ObjectPtr value) {
809 reinterpret_cast<CompressedObjectPtr*>(
810 reinterpret_cast<uint8_t*>(obj.untag()) + offset),
811 value);
812 }
814 intptr_t offset,
815 ObjectPtr value) {
817 reinterpret_cast<CompressedObjectPtr*>(
818 reinterpret_cast<uint8_t*>(obj.untag()) + offset),
819 value, thread_);
820 }
822 intptr_t offset,
823 ObjectPtr value) {
824 *reinterpret_cast<ObjectPtr*>(reinterpret_cast<uint8_t*>(obj.untag()) +
825 offset) = value;
826 }
827 template <typename T = ObjectPtr>
829 intptr_t offset,
830 T value) {
831 *reinterpret_cast<CompressedObjectPtr*>(
832 reinterpret_cast<uint8_t*>(obj.untag()) + offset) = value;
833 }
835 intptr_t offset,
836 compressed_uword value) {
837 *reinterpret_cast<compressed_uword*>(
838 reinterpret_cast<uint8_t*>(obj.untag()) + offset) = value;
839 }
840
841 DART_FORCE_INLINE
842 bool CanCopyObject(uword tags, ObjectPtr object) {
843 const auto cid = UntaggedObject::ClassIdTag::decode(tags);
846 zone_,
847 "Illegal argument in isolate message: object is unsendable - %s ("
848 "see restrictions listed at `SendPort.send()` documentation "
849 "for more information)",
852 return false;
853 }
854 if (cid > kNumPredefinedCids) {
855 return true;
856 }
857#define HANDLE_ILLEGAL_CASE(Type) \
858 case k##Type##Cid: { \
859 exception_msg_ = \
860 "Illegal argument in isolate message: " \
861 "(object is a " #Type ")"; \
862 exception_unexpected_object_ = object; \
863 return false; \
864 }
865
866 switch (cid) {
867 // From "dart:ffi" we handle only Pointer/DynamicLibrary specially, since
868 // those are the only non-abstract classes (so we avoid checking more cids
869 // here that cannot happen in reality)
878 default:
879 return true;
880 }
881 }
882
891 intptr_t expando_cid_;
892
893 const char* exception_msg_ = nullptr;
895};
896
898 class Visitor : public ObjectPointerVisitor {
899 public:
901 RetainingPath* retaining_path,
902 MallocGrowableArray<ObjectPtr>* const working_list,
903 TraversalRules traversal_rules)
905 retaining_path_(retaining_path),
906 working_list_(working_list),
907 traversal_rules_(traversal_rules) {}
908
909 void VisitObject(ObjectPtr obj) {
910 if (!obj->IsHeapObject()) {
911 return;
912 }
913 // Skip canonical objects when rules are for messages internal to
914 // an isolate group. Otherwise, need to inspect canonical objects
915 // as well.
916 if (traversal_rules_ == TraversalRules::kInternalToIsolateGroup &&
917 obj->untag()->IsCanonical()) {
918 return;
919 }
920 if (retaining_path_->WasVisited(obj)) {
921 return;
922 }
923 retaining_path_->MarkVisited(obj);
924 working_list_->Add(obj);
925 }
926
927 void VisitPointers(ObjectPtr* from, ObjectPtr* to) override {
928 for (ObjectPtr* ptr = from; ptr <= to; ptr++) {
929 VisitObject(*ptr);
930 }
931 }
932
933#if defined(DART_COMPRESSED_POINTERS)
934 void VisitCompressedPointers(uword heap_base,
936 CompressedObjectPtr* to) override {
937 for (CompressedObjectPtr* ptr = from; ptr <= to; ptr++) {
938 VisitObject(ptr->Decompress(heap_base));
939 }
940 }
941#endif
942
943 RetainingPath* retaining_path_;
944 MallocGrowableArray<ObjectPtr>* const working_list_;
945 TraversalRules traversal_rules_;
946 };
947
948 public:
950 Isolate* isolate,
951 const Object& from,
952 const Object& to,
953 TraversalRules traversal_rules)
954 : zone_(zone),
955 isolate_(isolate),
956 from_(from),
957 to_(to),
958 traversal_rules_(traversal_rules) {
959 isolate_->set_forward_table_new(new WeakTable());
960 isolate_->set_forward_table_old(new WeakTable());
961 }
962
964 isolate_->set_forward_table_new(nullptr);
965 isolate_->set_forward_table_old(nullptr);
966 }
967
968 bool WasVisited(ObjectPtr object) {
969 if (object->IsNewObject()) {
970 return isolate_->forward_table_new()->GetValueExclusive(object) != 0;
971 } else {
972 return isolate_->forward_table_old()->GetValueExclusive(object) != 0;
973 }
974 }
975
976 void MarkVisited(ObjectPtr object) {
977 if (object->IsNewObject()) {
978 isolate_->forward_table_new()->SetValueExclusive(object, 1);
979 } else {
980 isolate_->forward_table_old()->SetValueExclusive(object, 1);
981 }
982 }
983
984 const char* FindPath() {
985 MallocGrowableArray<ObjectPtr>* const working_list =
986 isolate_->pointers_to_verify_at_exit();
987 ASSERT(working_list->length() == 0);
988
989 Visitor visitor(isolate_->group(), this, working_list, traversal_rules_);
990
991 MarkVisited(from_.ptr());
992 working_list->Add(from_.ptr());
993
994 Thread* thread = Thread::Current();
995 ClassTable* class_table = isolate_->group()->class_table();
996 Closure& closure = Closure::Handle(zone_);
997 Array& array = Array::Handle(zone_);
998 Class& klass = Class::Handle(zone_);
999
1000 while (!working_list->is_empty()) {
1001 thread->CheckForSafepoint();
1002
1003 // Keep node in the list, separated by null value so that
1004 // if we are to add children, children can find it in case
1005 // they are on retaining path.
1006 ObjectPtr raw = working_list->Last();
1007 if (raw == Object::null()) {
1008 // If all children of a node were processed, then skip the separator,
1009 working_list->RemoveLast();
1010 // then skip the parent since it has already been processed too.
1011 working_list->RemoveLast();
1012 continue;
1013 }
1014
1015 if (raw == to_.ptr()) {
1016 return CollectPath(working_list);
1017 }
1018
1019 // Separator null object indicates children goes next in the working_list
1020 working_list->Add(Object::null());
1021 int length = working_list->length();
1022
1023 do { // This loop is here so that we can skip children processing
1024 const intptr_t cid = raw->GetClassId();
1025
1026 if (traversal_rules_ == TraversalRules::kInternalToIsolateGroup) {
1028 break;
1029 }
1030 if (cid == kClosureCid) {
1031 closure ^= raw;
1032 // Only context has to be checked.
1033 working_list->Add(closure.RawContext());
1034 break;
1035 }
1036 // These we are not expected to drill into as they can't be on
1037 // retaining path, they are illegal to send.
1038 klass = class_table->At(cid);
1039 if (klass.is_isolate_unsendable()) {
1040 break;
1041 }
1042 } else {
1043 ASSERT(traversal_rules_ ==
1045 // Skip classes that are illegal to send across isolate groups.
1046 // (keep the list in sync with message_snapshot.cc)
1047 bool skip = false;
1048 switch (cid) {
1049 case kClosureCid:
1050 case kFinalizerCid:
1051 case kFinalizerEntryCid:
1052 case kFunctionTypeCid:
1053 case kMirrorReferenceCid:
1054 case kNativeFinalizerCid:
1055 case kReceivePortCid:
1056 case kRecordCid:
1057 case kRecordTypeCid:
1058 case kRegExpCid:
1059 case kStackTraceCid:
1060 case kSuspendStateCid:
1061 case kUserTagCid:
1062 case kWeakPropertyCid:
1063 case kWeakReferenceCid:
1064 case kWeakArrayCid:
1065 case kDynamicLibraryCid:
1066 case kPointerCid:
1067 case kInstanceCid:
1068 skip = true;
1069 break;
1070 default:
1071 if (cid >= kNumPredefinedCids) {
1072 skip = true;
1073 }
1074 }
1075 if (skip) {
1076 break;
1077 }
1078 }
1079 if (cid == kArrayCid) {
1080 array ^= Array::RawCast(raw);
1081 visitor.VisitObject(array.GetTypeArguments());
1082 const intptr_t batch_size = (2 << 14) - 1;
1083 for (intptr_t i = 0; i < array.Length(); ++i) {
1084 ObjectPtr ptr = array.At(i);
1085 visitor.VisitObject(ptr);
1086 if ((i & batch_size) == batch_size) {
1087 thread->CheckForSafepoint();
1088 }
1089 }
1090 break;
1091 } else {
1092 raw->untag()->VisitPointers(&visitor);
1093 }
1094 } while (false);
1095
1096 // If no children were added, remove null separator and the node.
1097 // If children were added, the node will be removed once last child
1098 // is processed, only separator null remains.
1099 if (working_list->length() == length) {
1100 RELEASE_ASSERT(working_list->RemoveLast() == Object::null());
1101 RELEASE_ASSERT(working_list->RemoveLast() == raw);
1102 }
1103 }
1104 // `to` was not found in the graph rooted in `from`, empty retaining path
1105 return "";
1106 }
1107
1108 private:
1109 Zone* zone_;
1110 Isolate* isolate_;
1111 const Object& from_;
1112 const Object& to_;
1113 TraversalRules traversal_rules_;
1114
1115 class FindObjectVisitor : public ObjectPointerVisitor {
1116 public:
1117 FindObjectVisitor(IsolateGroup* isolate_group, ObjectPtr target)
1118 : ObjectPointerVisitor(isolate_group), target_(target), index_(0) {}
1119
1120 void VisitPointers(ObjectPtr* from, ObjectPtr* to) override {
1121 for (ObjectPtr* ptr = from; ptr <= to; ptr++, index_++) {
1122 if (*ptr == target_) {
1123 break;
1124 }
1125 }
1126 }
1127
1128#if defined(DART_COMPRESSED_POINTERS)
1129 void VisitCompressedPointers(uword heap_base,
1130 CompressedObjectPtr* from,
1131 CompressedObjectPtr* to) override {
1132 for (CompressedObjectPtr* ptr = from; ptr <= to; ptr++, index_++) {
1133 if (ptr->Decompress(heap_base) == target_) {
1134 break;
1135 }
1136 }
1137 }
1138#endif
1139
1140 intptr_t index() { return index_; }
1141
1142 private:
1143 ObjectPtr target_;
1144 intptr_t index_;
1145 };
1146
1147 const char* CollectPath(MallocGrowableArray<ObjectPtr>* const working_list) {
1148 Object& previous_object = Object::Handle(zone_);
1149 Object& object = Object::Handle(zone_);
1150 Field& field = Field::Handle(zone_);
1151 Class& klass = Class::Handle(zone_);
1152 Library& library = Library::Handle(zone_);
1153 String& library_url = String::Handle(zone_);
1154 Context& context = Context::Handle(zone_);
1155 Closure& closure = Closure::Handle(zone_);
1156 Function& function = Function::Handle(zone_);
1157#if !defined(DART_PRECOMPILED_RUNTIME)
1158 Code& code = Code::Handle(zone_);
1159 LocalVarDescriptors& var_descriptors = LocalVarDescriptors::Handle(zone_);
1160 String& name = String::Handle(zone_);
1161#endif
1162
1163 const char* saved_context_location = nullptr;
1164 intptr_t saved_context_object_index = -1;
1165 intptr_t saved_context_depth = 0;
1166 const char* retaining_path = "";
1167
1168 ObjectPtr raw = to_.ptr();
1169 do {
1170 previous_object = raw;
1171 // Skip all remaining children until null-separator, so we get the parent
1172 do {
1173 raw = working_list->RemoveLast();
1174 } while (raw != Object::null() && raw != from_.ptr());
1175 if (raw == Object::null()) {
1176 raw = working_list->RemoveLast();
1177 object = raw;
1178 klass = object.clazz();
1179
1180 const char* location = object.ToCString();
1181
1182 if (object.IsContext()) {
1183 context ^= raw;
1184 if (saved_context_object_index == -1) {
1185 // If this is the first context, remember index of the
1186 // [previous_object] in the Context.
1187 // We will need it later if get to see the Closure next.
1188 saved_context_depth = 0;
1189 for (intptr_t i = 0; i < context.num_variables(); i++) {
1190 if (context.At(i) == previous_object.ptr()) {
1191 saved_context_object_index = i;
1192 break;
1193 }
1194 }
1195 } else {
1196 // Keep track of context depths in case of nested contexts;
1197 saved_context_depth++;
1198 }
1199 } else {
1200 if (object.IsInstance()) {
1201 if (object.IsClosure()) {
1202 closure ^= raw;
1203 function ^= closure.function();
1204 // Use function's class when looking for a library information.
1205 klass ^= function.Owner();
1206#if defined(DART_PRECOMPILED_RUNTIME)
1207 // Use function's name instead of closure's.
1208 location = function.QualifiedUserVisibleNameCString();
1209#else // defined(DART_PRECOMPILED_RUNTIME) \
1210 // Attempt to convert "instance <- Context+ <- Closure" into \
1211 // "instance <- local var name in Closure".
1212 if (!function.ForceOptimize()) {
1213 function.EnsureHasCompiledUnoptimizedCode();
1214 }
1215 code ^= function.unoptimized_code();
1216 ASSERT(!code.IsNull());
1217 var_descriptors ^= code.GetLocalVarDescriptors();
1218 for (intptr_t i = 0; i < var_descriptors.Length(); i++) {
1219 UntaggedLocalVarDescriptors::VarInfo info;
1220 var_descriptors.GetInfo(i, &info);
1221 if (info.scope_id == -saved_context_depth &&
1222 info.kind() ==
1224 info.index() == saved_context_object_index) {
1225 name ^= var_descriptors.GetName(i);
1226 location =
1227 OS::SCreate(zone_, "field %s in %s", name.ToCString(),
1228 function.QualifiedUserVisibleNameCString());
1229 // Won't need saved context location after all.
1230 saved_context_location = nullptr;
1231 break;
1232 }
1233 }
1234#endif // defined(DART_PRECOMPILED_RUNTIME)
1235 } else {
1236 // Attempt to find field name for the field that holds the
1237 // [previous_object] instance.
1238 FindObjectVisitor visitor(isolate_->group(),
1239 previous_object.ptr());
1240 raw->untag()->VisitPointers(&visitor);
1241 field ^= klass.FieldFromIndex(visitor.index());
1242 if (!field.IsNull()) {
1243 location =
1244 OS::SCreate(zone_, "%s in %s",
1245 field.UserVisibleNameCString(), location);
1246 }
1247 }
1248 }
1249 // Saved context object index stays up for only one cycle - just to
1250 // accommodate short chains Closure -> Context -> instance.
1251 saved_context_object_index = -1;
1252 saved_context_depth = -1;
1253 }
1254 // Add library url to the location if library is available.
1255 library = klass.library();
1256 if (!library.IsNull()) {
1257 library_url = library.url();
1258 location = OS::SCreate(zone_, "%s (from %s)", location,
1259 library_url.ToCString());
1260 }
1261
1262 if (object.IsContext()) {
1263 // Save context string placeholder in case we don't find closure next
1264 if (saved_context_location == nullptr) {
1265 saved_context_location = location;
1266 } else {
1267 // Append saved contexts
1268 saved_context_location = OS::SCreate(
1269 zone_, "%s <- %s\n", saved_context_location, location);
1270 }
1271 } else {
1272 if (saved_context_location != nullptr) {
1273 // Could not use saved context, insert it into retaining path now.
1274 retaining_path = OS::SCreate(zone_, "%s <- %s", retaining_path,
1275 saved_context_location);
1276 saved_context_location = nullptr;
1277 }
1278 retaining_path =
1279 OS::SCreate(zone_, "%s <- %s\n", retaining_path, location);
1280 }
1281 }
1282 } while (raw != from_.ptr());
1283 ASSERT(working_list->is_empty());
1284 return retaining_path;
1285 }
1286};
1287
1288const char* FindRetainingPath(Zone* zone_,
1289 Isolate* isolate,
1290 const Object& from,
1291 const Object& to,
1292 TraversalRules traversal_rules) {
1293 RetainingPath rr(zone_, isolate, from, to, traversal_rules);
1294 return rr.FindPath();
1295}
1296
1298 public:
1300
1302 : ObjectCopyBase(thread), fast_forward_map_(thread, map) {}
1303
1304 protected:
1305 DART_FORCE_INLINE
1307 ObjectPtr dst,
1308 intptr_t offset,
1309 intptr_t end_offset) {
1310 for (; offset < end_offset; offset += kCompressedWordSize) {
1312 }
1313 }
1314
1315 DART_FORCE_INLINE
1317 ObjectPtr dst,
1318 intptr_t offset,
1319 intptr_t end_offset,
1321 if (bitmap.IsEmpty()) {
1322 ForwardCompressedPointers(src, dst, offset, end_offset);
1323 return;
1324 }
1325 intptr_t bit = offset >> kCompressedWordSizeLog2;
1326 for (; offset < end_offset; offset += kCompressedWordSize) {
1327 if (bitmap.Get(bit++)) {
1330 } else {
1332 }
1333 }
1334 }
1335
1336 void ForwardCompressedArrayPointers(intptr_t array_length,
1337 ObjectPtr src,
1338 ObjectPtr dst,
1339 intptr_t offset,
1340 intptr_t end_offset) {
1341 for (; offset < end_offset; offset += kCompressedWordSize) {
1343 }
1344 }
1345
1346 void ForwardCompressedContextPointers(intptr_t context_length,
1347 ObjectPtr src,
1348 ObjectPtr dst,
1349 intptr_t offset,
1350 intptr_t end_offset) {
1351 for (; offset < end_offset; offset += kCompressedWordSize) {
1353 }
1354 }
1355
1356 DART_FORCE_INLINE
1358 auto value = LoadCompressedPointer(src, offset);
1359 if (!value.IsHeapObject()) {
1361 return;
1362 }
1363 auto value_decompressed = value.Decompress(heap_base_);
1364 const uword tags = TagsFromUntaggedObject(value_decompressed.untag());
1365 if (CanShareObject(value_decompressed, tags)) {
1367 return;
1368 }
1369
1370 ObjectPtr existing_to =
1371 fast_forward_map_.ForwardedObject(value_decompressed);
1372 if (existing_to != Marker()) {
1373 StoreCompressedPointerNoBarrier(dst, offset, existing_to);
1374 return;
1375 }
1376
1377 if (UNLIKELY(!CanCopyObject(tags, value_decompressed))) {
1378 ASSERT(exception_msg_ != nullptr);
1380 return;
1381 }
1382
1383 auto to = Forward(tags, value_decompressed);
1385 }
1386
1388 const intptr_t header_size = UntaggedObject::SizeTag::decode(tags);
1389 const auto cid = UntaggedObject::ClassIdTag::decode(tags);
1390 const uword size =
1391 header_size != 0 ? header_size : from.untag()->HeapSize();
1392 if (IsAllocatableInNewSpace(size)) {
1393 const uword alloc = new_space_->TryAllocateNoSafepoint(thread_, size);
1394 if (alloc != 0) {
1395 ObjectPtr to(reinterpret_cast<UntaggedObject*>(alloc));
1396 fast_forward_map_.Insert(from, to, size);
1397
1399 SetNewSpaceTaggingWord(to, cid, header_size);
1404 } else if (IsTypedDataViewClassId(cid) ||
1406 // We set the views backing store to `null` to satisfy an assertion in
1407 // GCCompactor::VisitTypedDataViewPointers().
1408 SetNewSpaceTaggingWord(to, cid, header_size);
1410 }
1411 return to;
1412 }
1413 }
1415 return Marker();
1416 }
1417
1418 void EnqueueTransferable(TransferableTypedDataPtr from,
1419 TransferableTypedDataPtr to) {
1421 }
1422 void EnqueueWeakProperty(WeakPropertyPtr from) {
1424 }
1425 void EnqueueWeakReference(WeakReferencePtr from) {
1427 }
1434
1435 static void StoreCompressedArrayPointers(intptr_t array_length,
1436 ObjectPtr src,
1437 ObjectPtr dst,
1438 intptr_t offset,
1439 intptr_t end_offset) {
1440 StoreCompressedPointers(src, dst, offset, end_offset);
1441 }
1443 ObjectPtr dst,
1444 intptr_t offset,
1445 intptr_t end_offset) {
1446 StoreCompressedPointersNoBarrier(src, dst, offset, end_offset);
1447 }
1449 ObjectPtr dst,
1450 intptr_t offset,
1451 intptr_t end_offset) {
1452 for (; offset <= end_offset; offset += kCompressedWordSize) {
1455 }
1456 }
1457
1458 protected:
1459 friend class ObjectGraphCopier;
1460
1462};
1463
1465 public:
1467
1468 explicit SlowObjectCopyBase(Thread* thread, IdentityMap* map)
1469 : ObjectCopyBase(thread), slow_forward_map_(thread, map) {}
1470
1471 protected:
1472 DART_FORCE_INLINE
1474 const Object& dst,
1475 intptr_t offset,
1476 intptr_t end_offset) {
1477 for (; offset < end_offset; offset += kCompressedWordSize) {
1479 }
1480 }
1481
1482 DART_FORCE_INLINE
1484 const Object& dst,
1485 intptr_t offset,
1486 intptr_t end_offset,
1488 intptr_t bit = offset >> kCompressedWordSizeLog2;
1489 for (; offset < end_offset; offset += kCompressedWordSize) {
1490 if (bitmap.Get(bit++)) {
1492 dst.ptr(), offset, LoadCompressedNonPointerWord(src.ptr(), offset));
1493 } else {
1495 }
1496 }
1497 }
1498
1499 void ForwardCompressedArrayPointers(intptr_t array_length,
1500 const Object& src,
1501 const Object& dst,
1502 intptr_t offset,
1503 intptr_t end_offset) {
1504 if (Array::UseCardMarkingForAllocation(array_length)) {
1505 for (; offset < end_offset; offset += kCompressedWordSize) {
1508 }
1509 } else {
1510 for (; offset < end_offset; offset += kCompressedWordSize) {
1512 }
1513 }
1514 }
1515
1516 void ForwardCompressedContextPointers(intptr_t context_length,
1517 const Object& src,
1518 const Object& dst,
1519 intptr_t offset,
1520 intptr_t end_offset) {
1521 for (; offset < end_offset; offset += kCompressedWordSize) {
1523 }
1524 }
1525
1526 DART_FORCE_INLINE
1528 const Object& dst,
1529 intptr_t offset) {
1530 auto value = LoadCompressedPointer(src.ptr(), offset);
1531 if (!value.IsHeapObject()) {
1533 return;
1534 }
1535
1536 auto value_decompressed = value.Decompress(heap_base_);
1537 const uword tags = TagsFromUntaggedObject(value_decompressed.untag());
1538 if (CanShareObject(value_decompressed, tags)) {
1540 value_decompressed);
1541 return;
1542 }
1543
1544 ObjectPtr existing_to =
1545 slow_forward_map_.ForwardedObject(value_decompressed);
1546 if (existing_to != Marker()) {
1547 StoreCompressedLargeArrayPointerBarrier(dst.ptr(), offset, existing_to);
1548 return;
1549 }
1550
1551 if (UNLIKELY(!CanCopyObject(tags, value_decompressed))) {
1552 ASSERT(exception_msg_ != nullptr);
1554 Object::null());
1555 return;
1556 }
1557
1558 tmp_ = value_decompressed;
1559 tmp_ = Forward(tags, tmp_); // Only this can cause allocation.
1561 }
1562 DART_FORCE_INLINE
1564 const Object& dst,
1565 intptr_t offset) {
1566 auto value = LoadCompressedPointer(src.ptr(), offset);
1567 if (!value.IsHeapObject()) {
1569 return;
1570 }
1571 auto value_decompressed = value.Decompress(heap_base_);
1572 const uword tags = TagsFromUntaggedObject(value_decompressed.untag());
1573 if (CanShareObject(value_decompressed, tags)) {
1574 StoreCompressedPointerBarrier(dst.ptr(), offset, value_decompressed);
1575 return;
1576 }
1577
1578 ObjectPtr existing_to =
1579 slow_forward_map_.ForwardedObject(value_decompressed);
1580 if (existing_to != Marker()) {
1581 StoreCompressedPointerBarrier(dst.ptr(), offset, existing_to);
1582 return;
1583 }
1584
1585 if (UNLIKELY(!CanCopyObject(tags, value_decompressed))) {
1586 ASSERT(exception_msg_ != nullptr);
1588 return;
1589 }
1590
1591 tmp_ = value_decompressed;
1592 tmp_ = Forward(tags, tmp_); // Only this can cause allocation.
1594 }
1595
1596 ObjectPtr Forward(uword tags, const Object& from) {
1597 const intptr_t cid = UntaggedObject::ClassIdTag::decode(tags);
1598 intptr_t size = UntaggedObject::SizeTag::decode(tags);
1599 if (size == 0) {
1600 size = from.ptr().untag()->HeapSize();
1601 }
1602 to_ = AllocateObject(cid, size, slow_forward_map_.allocated_bytes);
1603 UpdateLengthField(cid, from.ptr(), to_.ptr());
1604 slow_forward_map_.Insert(from, to_, size);
1605 ObjectPtr to = to_.ptr();
1606 if ((cid == kArrayCid || cid == kImmutableArrayCid) &&
1607 !IsAllocatableInNewSpace(size)) {
1609 }
1611 const auto& external_to = slow_forward_map_.AddExternalTypedData(
1614 thread_, cid, ExternalTypedData::Cast(from), external_to);
1615 return external_to.ptr();
1616 } else if (IsTypedDataViewClassId(cid) ||
1618 // We set the views backing store to `null` to satisfy an assertion in
1619 // GCCompactor::VisitTypedDataViewPointers().
1621 }
1622 return to;
1623 }
1640
1641 void StoreCompressedArrayPointers(intptr_t array_length,
1642 const Object& src,
1643 const Object& dst,
1644 intptr_t offset,
1645 intptr_t end_offset) {
1646 auto src_ptr = src.ptr();
1647 auto dst_ptr = dst.ptr();
1648 if (Array::UseCardMarkingForAllocation(array_length)) {
1649 for (; offset <= end_offset; offset += kCompressedWordSize) {
1651 dst_ptr, offset,
1652 LoadCompressedPointer(src_ptr, offset).Decompress(heap_base_));
1653 }
1654 } else {
1655 for (; offset <= end_offset; offset += kCompressedWordSize) {
1657 dst_ptr, offset,
1658 LoadCompressedPointer(src_ptr, offset).Decompress(heap_base_));
1659 }
1660 }
1661 }
1663 const Object& dst,
1664 intptr_t offset,
1665 intptr_t end_offset) {
1666 auto src_ptr = src.ptr();
1667 auto dst_ptr = dst.ptr();
1668 for (; offset <= end_offset; offset += kCompressedWordSize) {
1670 dst_ptr, offset,
1671 LoadCompressedPointer(src_ptr, offset).Decompress(heap_base_));
1672 }
1673 }
1675 const Object& dst,
1676 intptr_t offset,
1677 intptr_t end_offset) {
1678 auto src_ptr = src.ptr();
1679 auto dst_ptr = dst.ptr();
1680 for (; offset <= end_offset; offset += kCompressedWordSize) {
1682 LoadCompressedPointer(src_ptr, offset));
1683 }
1684 }
1685
1686 protected:
1687 friend class ObjectGraphCopier;
1688
1690};
1691
1692template <typename Base>
1693class ObjectCopy : public Base {
1694 public:
1695 using Types = typename Base::Types;
1696
1697 ObjectCopy(Thread* thread, IdentityMap* map) : Base(thread, map) {}
1698
1699 void CopyPredefinedInstance(typename Types::Object from,
1700 typename Types::Object to,
1701 intptr_t cid) {
1704 return;
1705 }
1706 switch (cid) {
1707#define COPY_TO(clazz) \
1708 case clazz::kClassId: { \
1709 typename Types::clazz casted_from = Types::Cast##clazz(from); \
1710 typename Types::clazz casted_to = Types::Cast##clazz(to); \
1711 Copy##clazz(casted_from, casted_to); \
1712 return; \
1713 }
1714
1716 COPY_TO(Array)
1718 COPY_TO(Map)
1719 COPY_TO(Set)
1720#undef COPY_TO
1721
1723 typename Types::Array casted_from = Types::CastArray(from);
1724 typename Types::Array casted_to = Types::CastArray(to);
1725 CopyArray(casted_from, casted_to);
1726 return;
1727 }
1728
1729#define COPY_TO(clazz) case kTypedData##clazz##Cid:
1730
1732 typename Types::TypedData casted_from = Types::CastTypedData(from);
1733 typename Types::TypedData casted_to = Types::CastTypedData(to);
1734 CopyTypedData(casted_from, casted_to);
1735 return;
1736 }
1737#undef COPY_TO
1738
1739 case kByteDataViewCid:
1741#define COPY_TO(clazz) \
1742 case kTypedData##clazz##ViewCid: \
1743 case kUnmodifiableTypedData##clazz##ViewCid:
1745 typename Types::TypedDataView casted_from =
1746 Types::CastTypedDataView(from);
1747 typename Types::TypedDataView casted_to =
1748 Types::CastTypedDataView(to);
1749 CopyTypedDataView(casted_from, casted_to);
1750 return;
1751 }
1752#undef COPY_TO
1753
1754#define COPY_TO(clazz) case kExternalTypedData##clazz##Cid:
1755
1757 typename Types::ExternalTypedData casted_from =
1758 Types::CastExternalTypedData(from);
1759 typename Types::ExternalTypedData casted_to =
1760 Types::CastExternalTypedData(to);
1761 CopyExternalTypedData(casted_from, casted_to);
1762 return;
1763 }
1764#undef COPY_TO
1765 default:
1766 break;
1767 }
1768
1769 const Object& obj = Types::HandlifyObject(from);
1770 FATAL("Unexpected object: %s\n", obj.ToCString());
1771 }
1772
1773 void CopyUserdefinedInstance(typename Types::Object from,
1774 typename Types::Object to,
1776 const intptr_t instance_size = UntagObject(from)->HeapSize();
1777 Base::ForwardCompressedPointers(from, to, kWordSize, instance_size, bitmap);
1778 }
1779
1780 void CopyUserdefinedInstanceWithoutUnboxedFields(typename Types::Object from,
1781 typename Types::Object to) {
1782 const intptr_t instance_size = UntagObject(from)->HeapSize();
1783 Base::ForwardCompressedPointers(from, to, kWordSize, instance_size);
1784 }
1785 void CopyClosure(typename Types::Closure from, typename Types::Closure to) {
1786 Base::StoreCompressedPointers(
1787 from, to, OFFSET_OF(UntaggedClosure, instantiator_type_arguments_),
1788 OFFSET_OF(UntaggedClosure, function_));
1789 Base::ForwardCompressedPointer(from, to,
1790 OFFSET_OF(UntaggedClosure, context_));
1791 Base::StoreCompressedPointersNoBarrier(from, to,
1792 OFFSET_OF(UntaggedClosure, hash_),
1793 OFFSET_OF(UntaggedClosure, hash_));
1794 ONLY_IN_PRECOMPILED(UntagClosure(to)->entry_point_ =
1795 UntagClosure(from)->entry_point_);
1796 }
1797
1798 void CopyContext(typename Types::Context from, typename Types::Context to) {
1799 const intptr_t length = Context::NumVariables(Types::GetContextPtr(from));
1800
1801 UntagContext(to)->num_variables_ = UntagContext(from)->num_variables_;
1802
1803 Base::ForwardCompressedPointer(from, to,
1804 OFFSET_OF(UntaggedContext, parent_));
1805 Base::ForwardCompressedContextPointers(
1806 length, from, to, Context::variable_offset(0),
1808 }
1809
1810 void CopyArray(typename Types::Array from, typename Types::Array to) {
1811 const intptr_t length = Smi::Value(UntagArray(from)->length());
1812 Base::StoreCompressedArrayPointers(
1813 length, from, to, OFFSET_OF(UntaggedArray, type_arguments_),
1814 OFFSET_OF(UntaggedArray, type_arguments_));
1815 Base::StoreCompressedPointersNoBarrier(from, to,
1816 OFFSET_OF(UntaggedArray, length_),
1817 OFFSET_OF(UntaggedArray, length_));
1818 Base::ForwardCompressedArrayPointers(
1819 length, from, to, Array::data_offset(),
1821 }
1822
1823 void CopyGrowableObjectArray(typename Types::GrowableObjectArray from,
1824 typename Types::GrowableObjectArray to) {
1825 Base::StoreCompressedPointers(
1826 from, to, OFFSET_OF(UntaggedGrowableObjectArray, type_arguments_),
1827 OFFSET_OF(UntaggedGrowableObjectArray, type_arguments_));
1828 Base::StoreCompressedPointersNoBarrier(
1829 from, to, OFFSET_OF(UntaggedGrowableObjectArray, length_),
1831 Base::ForwardCompressedPointer(
1832 from, to, OFFSET_OF(UntaggedGrowableObjectArray, data_));
1833 }
1834
1835 void CopyRecord(typename Types::Record from, typename Types::Record to) {
1836 const intptr_t num_fields = Record::NumFields(Types::GetRecordPtr(from));
1837 Base::StoreCompressedPointersNoBarrier(from, to,
1838 OFFSET_OF(UntaggedRecord, shape_),
1839 OFFSET_OF(UntaggedRecord, shape_));
1840 Base::ForwardCompressedPointers(
1841 from, to, Record::field_offset(0),
1843 }
1844
1845 template <intptr_t one_for_set_two_for_map, typename T>
1847 T to,
1848 UntaggedLinkedHashBase* from_untagged,
1849 UntaggedLinkedHashBase* to_untagged) {
1850 // We have to find out whether the map needs re-hashing on the receiver side
1851 // due to keys being copied and the keys therefore possibly having different
1852 // hash codes (e.g. due to user-defined hashCode implementation or due to
1853 // new identity hash codes of the copied objects).
1854 bool needs_rehashing = false;
1855 ArrayPtr data = from_untagged->data_.Decompress(Base::heap_base_);
1856 if (data != Array::null()) {
1857 UntaggedArray* untagged_data = data.untag();
1858 const intptr_t length = Smi::Value(untagged_data->length_);
1859 auto key_value_pairs = untagged_data->data();
1860 for (intptr_t i = 0; i < length; i += one_for_set_two_for_map) {
1861 ObjectPtr key = key_value_pairs[i].Decompress(Base::heap_base_);
1862 const bool is_deleted_entry = key == data;
1863 if (key->IsHeapObject()) {
1864 if (!is_deleted_entry && MightNeedReHashing(key)) {
1865 needs_rehashing = true;
1866 break;
1867 }
1868 }
1869 }
1870 }
1871
1872 Base::StoreCompressedPointers(
1873 from, to, OFFSET_OF(UntaggedLinkedHashBase, type_arguments_),
1874 OFFSET_OF(UntaggedLinkedHashBase, type_arguments_));
1875
1876 // Compared with the snapshot-based (de)serializer we do preserve the same
1877 // backing store (i.e. used_data/deleted_keys/data) and therefore do not
1878 // magically shrink backing store based on usage.
1879 //
1880 // We do this to avoid making assumptions about the object graph and the
1881 // linked hash map (e.g. assuming there's no other references to the data,
1882 // assuming the linked hashmap is in a consistent state)
1883 if (needs_rehashing) {
1884 to_untagged->hash_mask_ = Smi::New(0);
1885 to_untagged->index_ = TypedData::RawCast(Object::null());
1886 to_untagged->deleted_keys_ = Smi::New(0);
1887 }
1888
1889 // From this point on we shouldn't use the raw pointers, since GC might
1890 // happen when forwarding objects.
1891 from_untagged = nullptr;
1892 to_untagged = nullptr;
1893
1894 if (!needs_rehashing) {
1895 Base::ForwardCompressedPointer(from, to,
1897 Base::StoreCompressedPointersNoBarrier(
1898 from, to, OFFSET_OF(UntaggedLinkedHashBase, hash_mask_),
1899 OFFSET_OF(UntaggedLinkedHashBase, hash_mask_));
1900 Base::StoreCompressedPointersNoBarrier(
1901 from, to, OFFSET_OF(UntaggedMap, deleted_keys_),
1902 OFFSET_OF(UntaggedMap, deleted_keys_));
1903 }
1904 Base::ForwardCompressedPointer(from, to,
1906 Base::StoreCompressedPointersNoBarrier(
1907 from, to, OFFSET_OF(UntaggedLinkedHashBase, used_data_),
1908 OFFSET_OF(UntaggedLinkedHashBase, used_data_));
1909
1910 if (Base::exception_msg_ == nullptr && needs_rehashing) {
1911 Base::EnqueueObjectToRehash(to);
1912 }
1913 }
1914
1915 void CopyMap(typename Types::Map from, typename Types::Map to) {
1916 CopyLinkedHashBase<2, typename Types::Map>(from, to, UntagMap(from),
1917 UntagMap(to));
1918 }
1919
1920 void CopySet(typename Types::Set from, typename Types::Set to) {
1921 CopyLinkedHashBase<1, typename Types::Set>(from, to, UntagSet(from),
1922 UntagSet(to));
1923 }
1924
1925 void CopyDouble(typename Types::Double from, typename Types::Double to) {
1926#if !defined(DART_PRECOMPILED_RUNTIME)
1927 auto raw_from = UntagDouble(from);
1928 auto raw_to = UntagDouble(to);
1929 raw_to->value_ = raw_from->value_;
1930#else
1931 // Will be shared and not copied.
1932 UNREACHABLE();
1933#endif
1934 }
1935
1936 void CopyFloat32x4(typename Types::Float32x4 from,
1937 typename Types::Float32x4 to) {
1938#if !defined(DART_PRECOMPILED_RUNTIME)
1939 auto raw_from = UntagFloat32x4(from);
1940 auto raw_to = UntagFloat32x4(to);
1941 raw_to->value_[0] = raw_from->value_[0];
1942 raw_to->value_[1] = raw_from->value_[1];
1943 raw_to->value_[2] = raw_from->value_[2];
1944 raw_to->value_[3] = raw_from->value_[3];
1945#else
1946 // Will be shared and not copied.
1947 UNREACHABLE();
1948#endif
1949 }
1950
1951 void CopyFloat64x2(typename Types::Float64x2 from,
1952 typename Types::Float64x2 to) {
1953#if !defined(DART_PRECOMPILED_RUNTIME)
1954 auto raw_from = UntagFloat64x2(from);
1955 auto raw_to = UntagFloat64x2(to);
1956 raw_to->value_[0] = raw_from->value_[0];
1957 raw_to->value_[1] = raw_from->value_[1];
1958#else
1959 // Will be shared and not copied.
1960 UNREACHABLE();
1961#endif
1962 }
1963
1964 void CopyTypedData(TypedDataPtr from, TypedDataPtr to) {
1965 auto raw_from = from.untag();
1966 auto raw_to = to.untag();
1967 const intptr_t cid = Types::GetTypedDataPtr(from)->GetClassId();
1968 raw_to->length_ = raw_from->length_;
1969 raw_to->RecomputeDataField();
1970 const intptr_t length =
1971 TypedData::ElementSizeInBytes(cid) * Smi::Value(raw_from->length_);
1972 memmove(raw_to->data_, raw_from->data_, length);
1973 }
1974
1975 void CopyTypedData(const TypedData& from, const TypedData& to) {
1976 auto raw_from = from.ptr().untag();
1977 auto raw_to = to.ptr().untag();
1978 const intptr_t cid = Types::GetTypedDataPtr(from)->GetClassId();
1979 ASSERT(raw_to->length_ == raw_from->length_);
1980 raw_to->RecomputeDataField();
1981 const intptr_t length =
1982 TypedData::ElementSizeInBytes(cid) * Smi::Value(raw_from->length_);
1983 CopyTypedDataBaseWithSafepointChecks(Base::thread_, from, to, length);
1984 }
1985
1986 void CopyTypedDataView(typename Types::TypedDataView from,
1987 typename Types::TypedDataView to) {
1988 // This will forward & initialize the typed data.
1989 Base::ForwardCompressedPointer(
1990 from, to, OFFSET_OF(UntaggedTypedDataView, typed_data_));
1991
1992 auto raw_from = UntagTypedDataView(from);
1993 auto raw_to = UntagTypedDataView(to);
1994 raw_to->length_ = raw_from->length_;
1995 raw_to->offset_in_bytes_ = raw_from->offset_in_bytes_;
1996 raw_to->data_ = nullptr;
1997
1998 auto forwarded_backing_store =
1999 raw_to->typed_data_.Decompress(Base::heap_base_);
2000 if (forwarded_backing_store == Marker() ||
2001 forwarded_backing_store == Object::null()) {
2002 // Ensure the backing store is never "sentinel" - the scavenger doesn't
2003 // like it.
2004 Base::StoreCompressedPointerNoBarrier(
2005 Types::GetTypedDataViewPtr(to),
2007 raw_to->length_ = Smi::New(0);
2008 raw_to->offset_in_bytes_ = Smi::New(0);
2009 ASSERT(Base::exception_msg_ != nullptr);
2010 return;
2011 }
2012
2013 const bool is_external =
2014 raw_from->data_ != raw_from->DataFieldForInternalTypedData();
2015 if (is_external) {
2016 // The raw_to is fully initialized at this point (see handling of external
2017 // typed data in [ForwardCompressedPointer])
2018 raw_to->RecomputeDataField();
2019 } else {
2020 // The raw_to isn't initialized yet, but it's address is valid, so we can
2021 // compute the data field it would use.
2022 raw_to->RecomputeDataFieldForInternalTypedData();
2023 }
2024 const bool is_external2 =
2025 raw_to->data_ != raw_to->DataFieldForInternalTypedData();
2026 ASSERT(is_external == is_external2);
2027 }
2028
2029 void CopyExternalTypedData(typename Types::ExternalTypedData from,
2030 typename Types::ExternalTypedData to) {
2031 // The external typed data is initialized on the forwarding pass (where
2032 // normally allocation but not initialization happens), so views on it
2033 // can be initialized immediately.
2034#if defined(DEBUG)
2035 auto raw_from = UntagExternalTypedData(from);
2036 auto raw_to = UntagExternalTypedData(to);
2037 ASSERT(raw_to->data_ != nullptr);
2038 ASSERT(raw_to->length_ == raw_from->length_);
2039#endif
2040 }
2041
2042 void CopyTransferableTypedData(typename Types::TransferableTypedData from,
2043 typename Types::TransferableTypedData to) {
2044 // The [TransferableTypedData] is an empty object with an associated heap
2045 // peer object.
2046 // -> We'll validate that there's a peer and enqueue the transferable to be
2047 // transferred if the transitive copy is successful.
2048 auto fpeer = static_cast<TransferableTypedDataPeer*>(
2049 Base::heap_->GetPeer(Types::GetTransferableTypedDataPtr(from)));
2050 ASSERT(fpeer != nullptr);
2051 if (fpeer->data() == nullptr) {
2052 Base::exception_msg_ =
2053 "Illegal argument in isolate message"
2054 " : (TransferableTypedData has been transferred already)";
2055 Base::exception_unexpected_object_ =
2056 Types::GetTransferableTypedDataPtr(from);
2057 return;
2058 }
2059 Base::EnqueueTransferable(from, to);
2060 }
2061
2062 void CopyWeakProperty(typename Types::WeakProperty from,
2063 typename Types::WeakProperty to) {
2064 // We store `null`s as keys/values and let the main algorithm know that
2065 // we should check reachability of the key again after the fixpoint (if it
2066 // became reachable, forward the key/value).
2067 Base::StoreCompressedPointerNoBarrier(Types::GetWeakPropertyPtr(to),
2069 Object::null());
2070 Base::StoreCompressedPointerNoBarrier(
2071 Types::GetWeakPropertyPtr(to), OFFSET_OF(UntaggedWeakProperty, value_),
2072 Object::null());
2073 // To satisfy some ASSERT()s in GC we'll use Object:null() explicitly here.
2074 Base::StoreCompressedPointerNoBarrier(
2075 Types::GetWeakPropertyPtr(to),
2076 OFFSET_OF(UntaggedWeakProperty, next_seen_by_gc_), Object::null());
2077 Base::EnqueueWeakProperty(from);
2078 }
2079
2080 void CopyWeakReference(typename Types::WeakReference from,
2081 typename Types::WeakReference to) {
2082 // We store `null` as target and let the main algorithm know that
2083 // we should check reachability of the target again after the fixpoint (if
2084 // it became reachable, forward the target).
2085 Base::StoreCompressedPointerNoBarrier(
2086 Types::GetWeakReferencePtr(to),
2088 // Type argument should always be copied.
2089 Base::ForwardCompressedPointer(
2090 from, to, OFFSET_OF(UntaggedWeakReference, type_arguments_));
2091 // To satisfy some ASSERT()s in GC we'll use Object:null() explicitly here.
2092 Base::StoreCompressedPointerNoBarrier(
2093 Types::GetWeakReferencePtr(to),
2094 OFFSET_OF(UntaggedWeakReference, next_seen_by_gc_), Object::null());
2095 Base::EnqueueWeakReference(from);
2096 }
2097
2098 // clang-format off
2099#define DEFINE_UNSUPPORTED(clazz) \
2100 void Copy##clazz(typename Types::clazz from, typename Types::clazz to) { \
2101 FATAL("Objects of type " #clazz " should not occur in object graphs"); \
2102 }
2103
2105
2106#undef DEFINE_UNSUPPORTED
2107 // clang-format on
2108
2109 UntaggedObject* UntagObject(typename Types::Object obj) {
2110 return Types::GetObjectPtr(obj).Decompress(Base::heap_base_).untag();
2111 }
2112
2113#define DO(V) \
2114 DART_FORCE_INLINE \
2115 Untagged##V* Untag##V(typename Types::V obj) { \
2116 return Types::Get##V##Ptr(obj).Decompress(Base::heap_base_).untag(); \
2117 }
2119#undef DO
2120};
2121
2122class FastObjectCopy : public ObjectCopy<FastObjectCopyBase> {
2123 public:
2124 FastObjectCopy(Thread* thread, IdentityMap* map) : ObjectCopy(thread, map) {}
2126
2128 NoSafepointScope no_safepoint_scope;
2129
2130 ObjectPtr root_copy = Forward(TagsFromUntaggedObject(root.untag()), root);
2131 if (root_copy == Marker()) {
2132 return root_copy;
2133 }
2134 auto& from_weak_property = WeakProperty::Handle(zone_);
2135 auto& to_weak_property = WeakProperty::Handle(zone_);
2136 auto& weak_property_key = Object::Handle(zone_);
2137 while (true) {
2138 if (fast_forward_map_.fill_cursor_ ==
2139 fast_forward_map_.raw_from_to_.length()) {
2140 break;
2141 }
2142
2143 // Run fixpoint to copy all objects.
2144 while (fast_forward_map_.fill_cursor_ <
2145 fast_forward_map_.raw_from_to_.length()) {
2146 const intptr_t index = fast_forward_map_.fill_cursor_;
2147 ObjectPtr from = fast_forward_map_.raw_from_to_[index];
2148 ObjectPtr to = fast_forward_map_.raw_from_to_[index + 1];
2149 FastCopyObject(from, to);
2150 if (exception_msg_ != nullptr) {
2151 return root_copy;
2152 }
2153 fast_forward_map_.fill_cursor_ += 2;
2154
2155 // To maintain responsiveness we regularly check whether safepoints are
2156 // requested - if so, we bail to slow path which will then checkin.
2159 return root_copy;
2160 }
2161 }
2162
2163 // Possibly forward values of [WeakProperty]s if keys became reachable.
2164 intptr_t i = 0;
2165 auto& weak_properties = fast_forward_map_.raw_weak_properties_;
2166 while (i < weak_properties.length()) {
2167 from_weak_property = weak_properties[i];
2168 weak_property_key =
2169 fast_forward_map_.ForwardedObject(from_weak_property.key());
2170 if (weak_property_key.ptr() != Marker()) {
2171 to_weak_property ^=
2172 fast_forward_map_.ForwardedObject(from_weak_property.ptr());
2173
2174 // The key became reachable so we'll change the forwarded
2175 // [WeakProperty]'s key to the new key (it is `null` at this point).
2176 to_weak_property.set_key(weak_property_key);
2177
2178 // Since the key has become strongly reachable in the copied graph,
2179 // we'll also need to forward the value.
2180 ForwardCompressedPointer(from_weak_property.ptr(),
2181 to_weak_property.ptr(),
2183
2184 // We don't need to process this [WeakProperty] again.
2185 const intptr_t last = weak_properties.length() - 1;
2186 if (i < last) {
2187 weak_properties[i] = weak_properties[last];
2188 weak_properties.SetLength(last);
2189 continue;
2190 }
2191 }
2192 i++;
2193 }
2194 }
2195 // After the fix point with [WeakProperty]s do [WeakReference]s.
2196 auto& from_weak_reference = WeakReference::Handle(zone_);
2197 auto& to_weak_reference = WeakReference::Handle(zone_);
2198 auto& weak_reference_target = Object::Handle(zone_);
2199 auto& weak_references = fast_forward_map_.raw_weak_references_;
2200 for (intptr_t i = 0; i < weak_references.length(); i++) {
2201 from_weak_reference = weak_references[i];
2202 weak_reference_target =
2203 fast_forward_map_.ForwardedObject(from_weak_reference.target());
2204 if (weak_reference_target.ptr() != Marker()) {
2205 to_weak_reference ^=
2206 fast_forward_map_.ForwardedObject(from_weak_reference.ptr());
2207
2208 // The target became reachable so we'll change the forwarded
2209 // [WeakReference]'s target to the new target (it is `null` at this
2210 // point).
2211 to_weak_reference.set_target(weak_reference_target);
2212 }
2213 }
2214 if (root_copy != Marker()) {
2215 ObjectPtr array;
2217 fast_forward_map_.raw_objects_to_rehash_);
2218 if (array == Marker()) return root_copy;
2219 raw_objects_to_rehash_ = Array::RawCast(array);
2220
2222 fast_forward_map_.raw_expandos_to_rehash_);
2223 if (array == Marker()) return root_copy;
2224 raw_expandos_to_rehash_ = Array::RawCast(array);
2225 }
2226 return root_copy;
2227 }
2228
2230 const GrowableArray<ObjectPtr>& objects_to_rehash) {
2231 const intptr_t length = objects_to_rehash.length();
2232 if (length == 0) return Object::null();
2233
2234 const intptr_t size = Array::InstanceSize(length);
2235 const uword array_addr = new_space_->TryAllocateNoSafepoint(thread_, size);
2236 if (array_addr == 0) {
2238 return Marker();
2239 }
2240
2241 const uword header_size =
2242 UntaggedObject::SizeTag::SizeFits(size) ? size : 0;
2243 ArrayPtr array(reinterpret_cast<UntaggedArray*>(array_addr));
2244 SetNewSpaceTaggingWord(array, kArrayCid, header_size);
2246 Smi::New(length));
2248 OFFSET_OF(UntaggedArray, type_arguments_),
2250 auto array_data = array.untag()->data();
2251 for (intptr_t i = 0; i < length; ++i) {
2252 array_data[i] = objects_to_rehash[i];
2253 }
2254 return array;
2255 }
2256
2257 private:
2258 friend class ObjectGraphCopier;
2259
2260 void FastCopyObject(ObjectPtr from, ObjectPtr to) {
2261 const uword tags = TagsFromUntaggedObject(from.untag());
2262 const intptr_t cid = UntaggedObject::ClassIdTag::decode(tags);
2263 const intptr_t size = UntaggedObject::SizeTag::decode(tags);
2264
2265 // Ensure the last word is GC-safe (our heap objects are 2-word aligned, the
2266 // object header stores the size in multiples of kObjectAlignment, the GC
2267 // uses the information from the header and therefore might visit one slot
2268 // more than the actual size of the instance).
2269 *reinterpret_cast<ObjectPtr*>(UntaggedObject::ToAddr(to) +
2270 from.untag()->HeapSize() - kWordSize) =
2271 nullptr;
2272 SetNewSpaceTaggingWord(to, cid, size);
2273
2274 // Fall back to virtual variant for predefined classes
2275 if (cid < kNumPredefinedCids && cid != kInstanceCid) {
2276 CopyPredefinedInstance(from, to, cid);
2277 return;
2278 }
2281 bitmap);
2282 if (cid == expando_cid_) {
2284 }
2285 }
2286
2287 ArrayPtr raw_objects_to_rehash_ = Array::null();
2288 ArrayPtr raw_expandos_to_rehash_ = Array::null();
2289};
2290
2291class SlowObjectCopy : public ObjectCopy<SlowObjectCopyBase> {
2292 public:
2294 : ObjectCopy(thread, map),
2295 objects_to_rehash_(Array::Handle(thread->zone())),
2296 expandos_to_rehash_(Array::Handle(thread->zone())) {}
2298
2300 const Object& fast_root_copy) {
2301 auto& root_copy = Object::Handle(Z, fast_root_copy.ptr());
2302 if (root_copy.ptr() == Marker()) {
2303 root_copy = Forward(TagsFromUntaggedObject(root.ptr().untag()), root);
2304 }
2305
2306 WeakProperty& weak_property = WeakProperty::Handle(Z);
2307 Object& from = Object::Handle(Z);
2308 Object& to = Object::Handle(Z);
2309 while (true) {
2310 if (slow_forward_map_.fill_cursor_ ==
2311 slow_forward_map_.from_to_.Length()) {
2312 break;
2313 }
2314
2315 // Run fixpoint to copy all objects.
2316 while (slow_forward_map_.fill_cursor_ <
2317 slow_forward_map_.from_to_.Length()) {
2318 const intptr_t index = slow_forward_map_.fill_cursor_;
2319 from = slow_forward_map_.from_to_.At(index);
2320 to = slow_forward_map_.from_to_.At(index + 1);
2321 CopyObject(from, to);
2322 slow_forward_map_.fill_cursor_ += 2;
2323 if (exception_msg_ != nullptr) {
2324 return Marker();
2325 }
2326 // To maintain responsiveness we regularly check whether safepoints are
2327 // requested.
2329 }
2330
2331 // Possibly forward values of [WeakProperty]s if keys became reachable.
2332 intptr_t i = 0;
2333 auto& weak_properties = slow_forward_map_.weak_properties_;
2334 while (i < weak_properties.length()) {
2335 const auto& from_weak_property = *weak_properties[i];
2336 to = slow_forward_map_.ForwardedObject(from_weak_property.key());
2337 if (to.ptr() != Marker()) {
2338 weak_property ^=
2339 slow_forward_map_.ForwardedObject(from_weak_property.ptr());
2340
2341 // The key became reachable so we'll change the forwarded
2342 // [WeakProperty]'s key to the new key (it is `null` at this point).
2343 weak_property.set_key(to);
2344
2345 // Since the key has become strongly reachable in the copied graph,
2346 // we'll also need to forward the value.
2347 ForwardCompressedPointer(from_weak_property, weak_property,
2349
2350 // We don't need to process this [WeakProperty] again.
2351 const intptr_t last = weak_properties.length() - 1;
2352 if (i < last) {
2353 weak_properties[i] = weak_properties[last];
2354 weak_properties.SetLength(last);
2355 continue;
2356 }
2357 }
2358 i++;
2359 }
2360 }
2361
2362 // After the fix point with [WeakProperty]s do [WeakReference]s.
2363 WeakReference& weak_reference = WeakReference::Handle(Z);
2364 auto& weak_references = slow_forward_map_.weak_references_;
2365 for (intptr_t i = 0; i < weak_references.length(); i++) {
2366 const auto& from_weak_reference = *weak_references[i];
2367 to = slow_forward_map_.ForwardedObject(from_weak_reference.target());
2368 if (to.ptr() != Marker()) {
2369 weak_reference ^=
2370 slow_forward_map_.ForwardedObject(from_weak_reference.ptr());
2371
2372 // The target became reachable so we'll change the forwarded
2373 // [WeakReference]'s target to the new target (it is `null` at this
2374 // point).
2375 weak_reference.set_target(to);
2376 }
2377 }
2378
2379 objects_to_rehash_ =
2381 expandos_to_rehash_ =
2383 return root_copy.ptr();
2384 }
2385
2387 const GrowableArray<const Object*>& objects_to_rehash) {
2388 const intptr_t length = objects_to_rehash.length();
2389 if (length == 0) return Array::null();
2390
2391 const auto& array = Array::Handle(zone_, Array::New(length));
2392 for (intptr_t i = 0; i < length; ++i) {
2393 array.SetAt(i, *objects_to_rehash[i]);
2394 }
2395 return array.ptr();
2396 }
2397
2398 private:
2399 friend class ObjectGraphCopier;
2400
2401 void CopyObject(const Object& from, const Object& to) {
2402 const auto cid = from.GetClassId();
2403
2404 // Fall back to virtual variant for predefined classes
2405 if (cid < kNumPredefinedCids && cid != kInstanceCid) {
2406 CopyPredefinedInstance(from, to, cid);
2407 return;
2408 }
2411 if (cid == expando_cid_) {
2413 }
2414 }
2415
2416 Array& objects_to_rehash_;
2417 Array& expandos_to_rehash_;
2418};
2419
2421 public:
2424 thread_(thread),
2425 zone_(thread->zone()),
2426 map_(thread),
2427 fast_object_copy_(thread_, &map_),
2428 slow_object_copy_(thread_, &map_) {}
2429
2430 // Result will be
2431 // [
2432 // <message>,
2433 // <collection-lib-objects-to-rehash>,
2434 // <core-lib-objects-to-rehash>,
2435 // ]
2437 const char* volatile exception_msg = nullptr;
2438 auto& result = Object::Handle(zone_);
2439
2440 {
2441 LongJumpScope jump; // e.g. for OOMs.
2442 if (setjmp(*jump.Set()) == 0) {
2443 result = CopyObjectGraphInternal(root, &exception_msg);
2444 // Any allocated external typed data must have finalizers attached so
2445 // memory will get free()ed.
2446 slow_object_copy_.slow_forward_map_.FinalizeExternalTypedData();
2447 } else {
2448 // Any allocated external typed data must have finalizers attached so
2449 // memory will get free()ed.
2450 slow_object_copy_.slow_forward_map_.FinalizeExternalTypedData();
2451
2452 // The copy failed due to non-application error (e.g. OOM error),
2453 // propagate this error.
2454 result = thread_->StealStickyError();
2455 RELEASE_ASSERT(result.IsError());
2456 }
2457 }
2458
2459 if (result.IsError()) {
2460 Exceptions::PropagateError(Error::Cast(result));
2461 UNREACHABLE();
2462 }
2463 ASSERT(result.IsArray());
2464 auto& result_array = Array::Cast(result);
2465 if (result_array.At(0) == Marker()) {
2466 ASSERT(exception_msg != nullptr);
2467 auto& unexpected_object_ = Object::Handle(zone_, result_array.At(1));
2468 if (!unexpected_object_.IsNull()) {
2469 exception_msg =
2470 OS::SCreate(zone_, "%s\n%s", exception_msg,
2472 zone_, thread_->isolate(), root, unexpected_object_,
2474 }
2475 ThrowException(exception_msg);
2476 UNREACHABLE();
2477 }
2478
2479 // The copy was successful, then detach transferable data from the sender
2480 // and attach to the copied graph.
2481 slow_object_copy_.slow_forward_map_.FinalizeTransferables();
2482 return result.ptr();
2483 }
2484
2485 intptr_t allocated_bytes() { return allocated_bytes_; }
2486
2487 intptr_t copied_objects() { return copied_objects_; }
2488
2489 private:
2490 ObjectPtr CopyObjectGraphInternal(const Object& root,
2491 const char* volatile* exception_msg) {
2492 const auto& result_array = Array::Handle(zone_, Array::New(3));
2493 if (!root.ptr()->IsHeapObject()) {
2494 result_array.SetAt(0, root);
2495 return result_array.ptr();
2496 }
2497 const uword tags = TagsFromUntaggedObject(root.ptr().untag());
2498 if (CanShareObject(root.ptr(), tags)) {
2499 result_array.SetAt(0, root);
2500 return result_array.ptr();
2501 }
2502 if (!fast_object_copy_.CanCopyObject(tags, root.ptr())) {
2503 ASSERT(fast_object_copy_.exception_msg_ != nullptr);
2504 *exception_msg = fast_object_copy_.exception_msg_;
2505 result_array.SetAt(0, Object::Handle(zone_, Marker()));
2506 result_array.SetAt(1, fast_object_copy_.exception_unexpected_object_);
2507 return result_array.ptr();
2508 }
2509
2510 // We try a fast new-space only copy first that will not use any barriers.
2511 auto& result = Object::Handle(Z, Marker());
2512
2513 // All allocated but non-initialized heap objects have to be made GC-visible
2514 // at this point.
2515 if (FLAG_enable_fast_object_copy) {
2516 {
2517 NoSafepointScope no_safepoint_scope;
2518
2519 result = fast_object_copy_.TryCopyGraphFast(root.ptr());
2520 if (result.ptr() != Marker()) {
2521 if (fast_object_copy_.exception_msg_ == nullptr) {
2522 result_array.SetAt(0, result);
2523 fast_object_copy_.tmp_ = fast_object_copy_.raw_objects_to_rehash_;
2524 result_array.SetAt(1, fast_object_copy_.tmp_);
2525 fast_object_copy_.tmp_ = fast_object_copy_.raw_expandos_to_rehash_;
2526 result_array.SetAt(2, fast_object_copy_.tmp_);
2527 HandlifyExternalTypedData();
2528 HandlifyTransferables();
2529 allocated_bytes_ =
2530 fast_object_copy_.fast_forward_map_.allocated_bytes;
2531 copied_objects_ =
2532 fast_object_copy_.fast_forward_map_.fill_cursor_ / 2 -
2533 /*null_entry=*/1;
2534 return result_array.ptr();
2535 }
2536
2537 // There are left-over uninitialized objects we'll have to make GC
2538 // visible.
2539 SwitchToSlowForwardingList();
2540 }
2541 }
2542
2543 if (FLAG_gc_on_foc_slow_path) {
2544 // We force the GC to compact, which is more likely to discover
2545 // untracked pointers (and other issues, like incorrect class table).
2547 /*compact=*/true);
2548 }
2549
2550 ObjectifyFromToObjects();
2551
2552 // Fast copy failed due to
2553 // - either failure to allocate into new space
2554 // - or failure to copy object which we cannot copy
2555 ASSERT(fast_object_copy_.exception_msg_ != nullptr);
2556 if (fast_object_copy_.exception_msg_ != kFastAllocationFailed) {
2557 *exception_msg = fast_object_copy_.exception_msg_;
2558 result_array.SetAt(0, Object::Handle(zone_, Marker()));
2559 result_array.SetAt(1, fast_object_copy_.exception_unexpected_object_);
2560 return result_array.ptr();
2561 }
2562 ASSERT(fast_object_copy_.exception_msg_ == kFastAllocationFailed);
2563 }
2564
2565 // Use the slow copy approach.
2566 result = slow_object_copy_.ContinueCopyGraphSlow(root, result);
2567 ASSERT((result.ptr() == Marker()) ==
2568 (slow_object_copy_.exception_msg_ != nullptr));
2569 if (result.ptr() == Marker()) {
2570 *exception_msg = slow_object_copy_.exception_msg_;
2571 result_array.SetAt(0, Object::Handle(zone_, Marker()));
2572 result_array.SetAt(1, slow_object_copy_.exception_unexpected_object_);
2573 return result_array.ptr();
2574 }
2575
2576 result_array.SetAt(0, result);
2577 result_array.SetAt(1, slow_object_copy_.objects_to_rehash_);
2578 result_array.SetAt(2, slow_object_copy_.expandos_to_rehash_);
2579 allocated_bytes_ = slow_object_copy_.slow_forward_map_.allocated_bytes;
2580 copied_objects_ =
2581 slow_object_copy_.slow_forward_map_.fill_cursor_ / 2 - /*null_entry=*/1;
2582 return result_array.ptr();
2583 }
2584
2585 void SwitchToSlowForwardingList() {
2586 auto& fast_forward_map = fast_object_copy_.fast_forward_map_;
2587 auto& slow_forward_map = slow_object_copy_.slow_forward_map_;
2588
2589 MakeUninitializedNewSpaceObjectsGCSafe();
2590 HandlifyTransferables();
2591 HandlifyWeakProperties();
2592 HandlifyWeakReferences();
2593 HandlifyExternalTypedData();
2594 HandlifyObjectsToReHash();
2595 HandlifyExpandosToReHash();
2596 HandlifyFromToObjects();
2597 slow_forward_map.fill_cursor_ = fast_forward_map.fill_cursor_;
2598 slow_forward_map.allocated_bytes = fast_forward_map.allocated_bytes;
2599 }
2600
2601 void MakeUninitializedNewSpaceObjectsGCSafe() {
2602 auto& fast_forward_map = fast_object_copy_.fast_forward_map_;
2603 const auto length = fast_forward_map.raw_from_to_.length();
2604 const auto cursor = fast_forward_map.fill_cursor_;
2605 for (intptr_t i = cursor; i < length; i += 2) {
2606 auto from = fast_forward_map.raw_from_to_[i];
2607 auto to = fast_forward_map.raw_from_to_[i + 1];
2608 const uword tags = TagsFromUntaggedObject(from.untag());
2609 const intptr_t cid = UntaggedObject::ClassIdTag::decode(tags);
2610 // External typed data is already initialized.
2613#if defined(DART_COMPRESSED_POINTERS)
2614 const bool compressed = true;
2615#else
2616 const bool compressed = false;
2617#endif
2618 // Mimic the old initialization behavior of Object::InitializeObject
2619 // where the contents are initialized to Object::null(), except for
2620 // TypedDataBase subclasses which are initialized to 0, as the contents
2621 // of the original are translated and copied over prior to returning
2622 // the object graph root.
2624 Object::InitializeObject(reinterpret_cast<uword>(to.untag()), cid,
2625 from.untag()->HeapSize(), compressed,
2626 Object::from_offset<TypedDataBase>(),
2627 Object::to_offset<TypedDataBase>());
2628
2629 } else {
2630 // Remember that ptr_field_end_offset is the offset to the last Ptr
2631 // field, not the offset just past it.
2632 const uword ptr_field_end_offset =
2633 from.untag()->HeapSize() -
2634 (compressed ? kCompressedWordSize : kWordSize);
2635 Object::InitializeObject(reinterpret_cast<uword>(to.untag()), cid,
2636 from.untag()->HeapSize(), compressed,
2637 Object::from_offset<Object>(),
2638 ptr_field_end_offset);
2639 }
2640 UpdateLengthField(cid, from, to);
2641 }
2642 }
2643 }
2644 void HandlifyTransferables() {
2645 Handlify(&fast_object_copy_.fast_forward_map_.raw_transferables_from_to_,
2646 &slow_object_copy_.slow_forward_map_.transferables_from_to_);
2647 }
2648 void HandlifyWeakProperties() {
2649 Handlify(&fast_object_copy_.fast_forward_map_.raw_weak_properties_,
2650 &slow_object_copy_.slow_forward_map_.weak_properties_);
2651 }
2652 void HandlifyWeakReferences() {
2653 Handlify(&fast_object_copy_.fast_forward_map_.raw_weak_references_,
2654 &slow_object_copy_.slow_forward_map_.weak_references_);
2655 }
2656 void HandlifyExternalTypedData() {
2657 Handlify(&fast_object_copy_.fast_forward_map_.raw_external_typed_data_to_,
2658 &slow_object_copy_.slow_forward_map_.external_typed_data_);
2659 }
2660 void HandlifyObjectsToReHash() {
2661 Handlify(&fast_object_copy_.fast_forward_map_.raw_objects_to_rehash_,
2662 &slow_object_copy_.slow_forward_map_.objects_to_rehash_);
2663 }
2664 void HandlifyExpandosToReHash() {
2665 Handlify(&fast_object_copy_.fast_forward_map_.raw_expandos_to_rehash_,
2666 &slow_object_copy_.slow_forward_map_.expandos_to_rehash_);
2667 }
2668 template <typename PtrType, typename HandleType>
2669 void Handlify(GrowableArray<PtrType>* from,
2670 GrowableArray<const HandleType*>* to) {
2671 const auto length = from->length();
2672 if (length > 0) {
2673 to->Resize(length);
2674 for (intptr_t i = 0; i < length; i++) {
2675 (*to)[i] = &HandleType::Handle(Z, (*from)[i]);
2676 }
2677 from->Clear();
2678 }
2679 }
2680 void HandlifyFromToObjects() {
2681 auto& fast_forward_map = fast_object_copy_.fast_forward_map_;
2682 auto& slow_forward_map = slow_object_copy_.slow_forward_map_;
2683 const intptr_t length = fast_forward_map.raw_from_to_.length();
2684 slow_forward_map.from_to_transition_.Resize(length);
2685 for (intptr_t i = 0; i < length; i++) {
2686 slow_forward_map.from_to_transition_[i] =
2687 &PassiveObject::Handle(Z, fast_forward_map.raw_from_to_[i]);
2688 }
2689 ASSERT(slow_forward_map.from_to_transition_.length() == length);
2690 fast_forward_map.raw_from_to_.Clear();
2691 }
2692 void ObjectifyFromToObjects() {
2693 auto& from_to_transition =
2694 slow_object_copy_.slow_forward_map_.from_to_transition_;
2695 auto& from_to = slow_object_copy_.slow_forward_map_.from_to_;
2696 intptr_t length = from_to_transition.length();
2698 for (intptr_t i = 0; i < length; i++) {
2699 from_to.Add(*from_to_transition[i]);
2700 }
2701 ASSERT(from_to.Length() == length);
2702 from_to_transition.Clear();
2703 }
2704
2705 void ThrowException(const char* exception_msg) {
2706 const auto& msg_obj = String::Handle(Z, String::New(exception_msg));
2707 const auto& args = Array::Handle(Z, Array::New(1));
2708 args.SetAt(0, msg_obj);
2710 UNREACHABLE();
2711 }
2712
2713 Thread* thread_;
2714 Zone* zone_;
2715 IdentityMap map_;
2716 FastObjectCopy fast_object_copy_;
2717 SlowObjectCopy slow_object_copy_;
2718 intptr_t copied_objects_ = 0;
2719 intptr_t allocated_bytes_ = 0;
2720};
2721
2723 auto thread = Thread::Current();
2724 TIMELINE_DURATION(thread, Isolate, "CopyMutableObjectGraph");
2725 ObjectGraphCopier copier(thread);
2726 ObjectPtr result = copier.CopyObjectGraph(object);
2727#if defined(SUPPORT_TIMELINE)
2728 if (tbes.enabled()) {
2729 tbes.SetNumArguments(2);
2730 tbes.FormatArgument(0, "CopiedObjects", "%" Pd, copier.copied_objects());
2731 tbes.FormatArgument(1, "AllocatedBytes", "%" Pd, copier.allocated_bytes());
2732 }
2733#endif
2734 return result;
2735}
2736
2737} // namespace dart
static void info(const char *fmt,...) SK_PRINTF_LIKE(1
Definition DM.cpp:213
static bool skip(SkStream *stream, size_t amount)
static uint32_t hash(const SkShaderBase::GradientInfo &v)
#define UNREACHABLE()
Definition assert.h:248
#define RELEASE_ASSERT(cond)
Definition assert.h:327
#define Z
#define CLASS_LIST_NO_OBJECT_NOR_STRING_NOR_ARRAY_NOR_MAP(V)
Definition class_id.h:113
#define CLASS_LIST_FOR_HANDLES(V)
Definition class_id.h:193
#define CLASS_LIST_TYPED_DATA(V)
Definition class_id.h:137
static intptr_t InstanceSize()
Definition object.h:10910
static ArrayPtr New(intptr_t len, Heap::Space space=Heap::kNew)
Definition object.h:10933
virtual TypeArgumentsPtr GetTypeArguments() const
Definition object.h:10879
static constexpr bool UseCardMarkingForAllocation(const intptr_t array_length)
Definition object.h:10797
ObjectPtr At(intptr_t index) const
Definition object.h:10854
intptr_t Length() const
Definition object.h:10808
static intptr_t data_offset()
Definition object.h:10814
void Add(const T &value)
void Resize(intptr_t new_length)
const T & At(intptr_t index) const
intptr_t length() const
static constexpr uword update(ClassIdTagType value, uword original)
Definition bitfield.h:190
ClassPtr At(intptr_t cid) const
UnboxedFieldBitmap GetUnboxedFieldsMapAt(intptr_t cid) const
bool is_isolate_unsendable() const
Definition object.h:2153
static bool IsIsolateUnsendable(ClassPtr clazz)
Definition object.h:1797
static intptr_t NumVariables(const ContextPtr context)
Definition object.h:7389
static intptr_t variable_offset(intptr_t context_index)
Definition object.h:7410
static constexpr intptr_t kBytesPerElement
Definition object.h:7402
static double Value(DoublePtr dbl)
Definition object.h:10095
static DART_NORETURN void ThrowByType(ExceptionType type, const Array &arguments)
static DART_NORETURN void PropagateError(const Error &error)
FinalizablePersistentHandle * AddFinalizer(void *peer, Dart_HandleFinalizer callback, intptr_t external_size) const
Definition object.cc:25698
ObjectPtr ForwardedObject(ObjectPtr object)
void AddTransferable(TransferableTypedDataPtr from, TransferableTypedDataPtr to)
void AddObjectToRehash(ObjectPtr to)
void AddExpandoToRehash(ObjectPtr to)
void Insert(ObjectPtr from, ObjectPtr to, intptr_t size)
void AddWeakReference(WeakReferencePtr from)
void AddExternalTypedData(ExternalTypedDataPtr to)
void AddWeakProperty(WeakPropertyPtr from)
FastForwardMap(Thread *thread, IdentityMap *map)
ObjectPtr At(intptr_t index)
FastFromTo(GrowableArray< ObjectPtr > &storage)
void Add(ObjectPtr key, ObjectPtr value)
static void StoreCompressedPointersNoBarrier(ObjectPtr src, ObjectPtr dst, intptr_t offset, intptr_t end_offset)
void EnqueueWeakProperty(WeakPropertyPtr from)
void EnqueueTransferable(TransferableTypedDataPtr from, TransferableTypedDataPtr to)
DART_FORCE_INLINE void ForwardCompressedPointer(ObjectPtr src, ObjectPtr dst, intptr_t offset)
void EnqueueWeakReference(WeakReferencePtr from)
void ForwardCompressedContextPointers(intptr_t context_length, ObjectPtr src, ObjectPtr dst, intptr_t offset, intptr_t end_offset)
DART_FORCE_INLINE void ForwardCompressedPointers(ObjectPtr src, ObjectPtr dst, intptr_t offset, intptr_t end_offset)
ObjectPtr Forward(uword tags, ObjectPtr from)
void ForwardCompressedArrayPointers(intptr_t array_length, ObjectPtr src, ObjectPtr dst, intptr_t offset, intptr_t end_offset)
FastObjectCopyBase(Thread *thread, IdentityMap *map)
static void StoreCompressedArrayPointers(intptr_t array_length, ObjectPtr src, ObjectPtr dst, intptr_t offset, intptr_t end_offset)
DART_FORCE_INLINE void ForwardCompressedPointers(ObjectPtr src, ObjectPtr dst, intptr_t offset, intptr_t end_offset, UnboxedFieldBitmap bitmap)
static void StoreCompressedPointers(ObjectPtr src, ObjectPtr dst, intptr_t offset, intptr_t end_offset)
void EnqueueExpandoToRehash(ObjectPtr to)
void EnqueueObjectToRehash(ObjectPtr to)
FastObjectCopy(Thread *thread, IdentityMap *map)
ObjectPtr TryCopyGraphFast(ObjectPtr root)
ObjectPtr TryBuildArrayOfObjectsToRehash(const GrowableArray< ObjectPtr > &objects_to_rehash)
static FinalizablePersistentHandle * New(IsolateGroup *isolate_group, const Object &object, void *peer, Dart_HandleFinalizer callback, intptr_t external_size, bool auto_delete)
void FinalizeTransferable(const TransferableTypedData &from, const TransferableTypedData &to)
ForwardMapBase(Thread *thread)
void FinalizeExternalTypedData(const ExternalTypedData &to)
void Add(const Object &value, Heap::Space space=Heap::kNew) const
Definition object.cc:25070
static GrowableObjectArrayPtr New(Heap::Space space=Heap::kNew)
Definition object.h:11118
intptr_t Length() const
Definition object.h:11046
ObjectPtr At(intptr_t index) const
Definition object.h:11059
@ kNew
Definition heap.h:38
@ kOld
Definition heap.h:39
void * GetPeer(ObjectPtr raw_obj) const
Definition heap.h:167
void SetPeer(ObjectPtr raw_obj, void *peer)
Definition heap.h:164
void CollectAllGarbage(GCReason reason=GCReason::kFull, bool compact=false)
Definition heap.cc:562
DART_FORCE_INLINE ObjectPtr ForwardedObject(const S &object, T from_to)
IdentityMap(Thread *thread)
DART_FORCE_INLINE void Insert(const S &from, const S &to, T from_to, bool check_for_safepoint)
static const ClassId kClassId
Definition object.h:11018
ClassTable * class_table() const
Definition isolate.h:491
void set_forward_table_old(WeakTable *table)
Definition isolate.cc:2516
MallocGrowableArray< ObjectPtr > * pointers_to_verify_at_exit()
Definition isolate.h:1432
void set_forward_table_new(WeakTable *table)
Definition isolate.cc:2512
IsolateGroup * group() const
Definition isolate.h:990
WeakTable * forward_table_old()
Definition isolate.h:1414
WeakTable * forward_table_new()
Definition isolate.h:1411
jmp_buf * Set()
Definition longjump.cc:16
static int64_t Value(MintPtr mint)
Definition object.h:10054
static char * SCreate(Zone *zone, const char *format,...) PRINTF_ATTRIBUTE(2
static CompressedObjectPtr LoadCompressedPointer(ObjectPtr src, intptr_t offset)
static void StoreCompressedPointerBarrier(ObjectPtr obj, intptr_t offset, ObjectPtr value)
static void StoreCompressedNonPointerWord(ObjectPtr obj, intptr_t offset, compressed_uword value)
static compressed_uword LoadCompressedNonPointerWord(ObjectPtr src, intptr_t offset)
DART_FORCE_INLINE bool CanCopyObject(uword tags, ObjectPtr object)
ObjectCopyBase(Thread *thread)
static void StorePointerBarrier(ObjectPtr obj, intptr_t offset, ObjectPtr value)
static ObjectPtr LoadPointer(ObjectPtr src, intptr_t offset)
static void StorePointerNoBarrier(ObjectPtr obj, intptr_t offset, ObjectPtr value)
void StoreCompressedLargeArrayPointerBarrier(ObjectPtr obj, intptr_t offset, ObjectPtr value)
static void StoreCompressedPointerNoBarrier(ObjectPtr obj, intptr_t offset, T value)
void CopyArray(typename Types::Array from, typename Types::Array to)
void CopyTransferableTypedData(typename Types::TransferableTypedData from, typename Types::TransferableTypedData to)
void CopyGrowableObjectArray(typename Types::GrowableObjectArray from, typename Types::GrowableObjectArray to)
void CopyUserdefinedInstance(typename Types::Object from, typename Types::Object to, UnboxedFieldBitmap bitmap)
void CopyMap(typename Types::Map from, typename Types::Map to)
void CopyFloat64x2(typename Types::Float64x2 from, typename Types::Float64x2 to)
UntaggedObject * UntagObject(typename Types::Object obj)
void CopyDouble(typename Types::Double from, typename Types::Double to)
void CopyTypedData(TypedDataPtr from, TypedDataPtr to)
typename Base::Types Types
void CopySet(typename Types::Set from, typename Types::Set to)
void CopyExternalTypedData(typename Types::ExternalTypedData from, typename Types::ExternalTypedData to)
void CopyFloat32x4(typename Types::Float32x4 from, typename Types::Float32x4 to)
void CopyUserdefinedInstanceWithoutUnboxedFields(typename Types::Object from, typename Types::Object to)
void CopyTypedData(const TypedData &from, const TypedData &to)
void CopyTypedDataView(typename Types::TypedDataView from, typename Types::TypedDataView to)
void CopyContext(typename Types::Context from, typename Types::Context to)
void CopyRecord(typename Types::Record from, typename Types::Record to)
ObjectCopy(Thread *thread, IdentityMap *map)
void CopyLinkedHashBase(T from, T to, UntaggedLinkedHashBase *from_untagged, UntaggedLinkedHashBase *to_untagged)
void CopyClosure(typename Types::Closure from, typename Types::Closure to)
void CopyWeakProperty(typename Types::WeakProperty from, typename Types::WeakProperty to)
void CopyWeakReference(typename Types::WeakReference from, typename Types::WeakReference to)
void CopyPredefinedInstance(typename Types::Object from, typename Types::Object to, intptr_t cid)
ObjectPtr CopyObjectGraph(const Object &root)
IsolateGroup * isolate_group() const
Definition visitor.h:25
void VisitCompressedPointers(uword heap_base, CompressedObjectPtr *first, CompressedObjectPtr *last)
Definition visitor.h:43
ObjectPtr Decompress(uword heap_base) const
UntaggedObject * untag() const
intptr_t GetClassId() const
Definition raw_object.h:864
static ObjectPtr Allocate(intptr_t cls_id, intptr_t size, Heap::Space space, bool compressed, uword ptr_field_start_offset, uword ptr_field_end_offset)
Definition object.cc:2820
static ObjectPtr null()
Definition object.h:433
intptr_t GetClassId() const
Definition object.h:341
ObjectPtr ptr() const
Definition object.h:332
virtual const char * ToCString() const
Definition object.h:366
static Object & Handle()
Definition object.h:407
static ObjectPtr RawCast(ObjectPtr obj)
Definition object.h:325
static PassiveObject & Handle()
Definition object.h:1077
static constexpr intptr_t kBytesPerElement
Definition object.h:11414
static intptr_t NumFields(RecordPtr ptr)
Definition object.h:11400
static intptr_t field_offset(intptr_t index)
Definition object.h:11422
RetainingPath(Zone *zone, Isolate *isolate, const Object &from, const Object &to, TraversalRules traversal_rules)
bool WasVisited(ObjectPtr object)
void MarkVisited(ObjectPtr object)
uword TryAllocateNoSafepoint(Thread *thread, intptr_t size)
Definition scavenger.h:146
void AddTransferable(const TransferableTypedData &from, const TransferableTypedData &to)
void AddWeakProperty(const WeakProperty &from)
const ExternalTypedData & AddExternalTypedData(ExternalTypedDataPtr to)
SlowForwardMap(Thread *thread, IdentityMap *map)
void AddObjectToRehash(const Object &to)
void Insert(const Object &from, const Object &to, intptr_t size)
void AddExpandoToRehash(const Object &to)
ObjectPtr ForwardedObject(ObjectPtr object)
void AddWeakReference(const WeakReference &from)
void Add(const Object &key, const Object &value)
ObjectPtr At(intptr_t index)
SlowFromTo(const GrowableObjectArray &storage)
void ForwardCompressedArrayPointers(intptr_t array_length, const Object &src, const Object &dst, intptr_t offset, intptr_t end_offset)
DART_FORCE_INLINE void ForwardCompressedLargeArrayPointer(const Object &src, const Object &dst, intptr_t offset)
void EnqueueObjectToRehash(const Object &to)
void EnqueueWeakReference(const WeakReference &from)
DART_FORCE_INLINE void ForwardCompressedPointer(const Object &src, const Object &dst, intptr_t offset)
void EnqueueTransferable(const TransferableTypedData &from, const TransferableTypedData &to)
void ForwardCompressedContextPointers(intptr_t context_length, const Object &src, const Object &dst, intptr_t offset, intptr_t end_offset)
void StoreCompressedPointers(const Object &src, const Object &dst, intptr_t offset, intptr_t end_offset)
void StoreCompressedArrayPointers(intptr_t array_length, const Object &src, const Object &dst, intptr_t offset, intptr_t end_offset)
SlowObjectCopyBase(Thread *thread, IdentityMap *map)
DART_FORCE_INLINE void ForwardCompressedPointers(const Object &src, const Object &dst, intptr_t offset, intptr_t end_offset, UnboxedFieldBitmap bitmap)
DART_FORCE_INLINE void ForwardCompressedPointers(const Object &src, const Object &dst, intptr_t offset, intptr_t end_offset)
void EnqueueWeakProperty(const WeakProperty &from)
void EnqueueExpandoToRehash(const Object &to)
static void StoreCompressedPointersNoBarrier(const Object &src, const Object &dst, intptr_t offset, intptr_t end_offset)
ObjectPtr Forward(uword tags, const Object &from)
ArrayPtr BuildArrayOfObjectsToRehash(const GrowableArray< const Object * > &objects_to_rehash)
SlowObjectCopy(Thread *thread, IdentityMap *map)
ObjectPtr ContinueCopyGraphSlow(const Object &root, const Object &fast_root_copy)
static SmiPtr New(intptr_t value)
Definition object.h:9985
intptr_t Value() const
Definition object.h:9969
static bool IsValid(int64_t value)
Definition object.h:10005
ThreadState * thread() const
Definition allocation.h:33
static StringPtr New(const char *cstr, Heap::Space space=Heap::kNew)
Definition object.cc:23777
uword Hash() const
Definition object.h:10195
static Thread * Current()
Definition thread.h:361
bool IsSafepointRequested() const
Definition thread.h:924
DART_WARN_UNUSED_RESULT ErrorPtr StealStickyError()
Definition thread.cc:243
Heap * heap() const
Definition thread.cc:876
void CheckForSafepoint()
Definition thread.h:1091
Isolate * isolate() const
Definition thread.h:533
IsolateGroup * isolate_group() const
Definition thread.h:540
intptr_t Length() const
Definition object.h:11492
intptr_t ElementSizeInBytes() const
Definition object.h:11505
intptr_t LengthInBytes() const
Definition object.h:11497
void * DataAddr(intptr_t byte_offset) const
Definition object.h:11545
static constexpr uword update(intptr_t size, uword tag)
Definition raw_object.h:209
static constexpr bool SizeFits(intptr_t size)
Definition raw_object.h:213
static constexpr uword decode(uword tag)
Definition raw_object.h:205
void StorePointer(type const *addr, type value)
Definition raw_object.h:574
bool IsImmutable() const
Definition raw_object.h:333
void StoreCompressedArrayPointer(compressed_type const *addr, type value)
Definition raw_object.h:641
static uword ToAddr(const UntaggedObject *raw_obj)
Definition raw_object.h:501
intptr_t HeapSize() const
Definition raw_object.h:380
void SetCardRememberedBitUnsynchronized()
Definition raw_object.h:365
intptr_t VisitPointers(ObjectPointerVisitor *visitor)
Definition raw_object.h:426
void StoreCompressedPointer(compressed_type const *addr, type value)
Definition raw_object.h:585
intptr_t GetClassId() const
Definition raw_object.h:371
bool IsCanonical() const
Definition raw_object.h:329
void set_key(const Object &key) const
Definition object.h:12895
void set_target(const Object &target) const
Definition object.h:12919
void SetValueExclusive(ObjectPtr key, intptr_t val)
Definition weak_table.cc:33
intptr_t GetValueExclusive(ObjectPtr key) const
Definition weak_table.h:109
#define DO(type, attrs)
#define ASSERT(E)
#define FATAL(error)
G_BEGIN_DECLS G_MODULE_EXPORT FlValue * args
static const uint8_t buffer[]
uint8_t value
GAsyncResult * result
uint32_t * target
#define DEFINE_FLAG(type, name, default_value, comment)
Definition flags.h:16
Dart_NativeFunction function
Definition fuchsia.cc:51
size_t length
constexpr intptr_t MB
Definition globals.h:530
bool IsTypedDataViewClassId(intptr_t index)
Definition class_id.h:439
bool IsTypedDataClassId(intptr_t index)
Definition class_id.h:433
void CopyTypedDataBaseWithSafepointChecks(Thread *thread, const T &from, const T &to, intptr_t length)
@ kInternalToIsolateGroup
@ kExternalBetweenIsolateGroups
void InitializeExternalTypedDataWithSafepointChecks(Thread *thread, intptr_t cid, const ExternalTypedData &from, const ExternalTypedData &to)
const char *const name
static constexpr intptr_t kCompressedWordSizeLog2
Definition globals.h:43
bool IsTypedDataBaseClassId(intptr_t index)
Definition class_id.h:429
bool CanShareObjectAcrossIsolates(ObjectPtr obj)
void InitializeTypedDataView(TypedDataViewPtr obj)
const char * kFastAllocationFailed
const char * FindRetainingPath(Zone *zone_, Isolate *isolate, const Object &from, const Object &to, TraversalRules traversal_rules)
bool IsImplicitFieldClassId(intptr_t index)
Definition class_id.h:556
void * malloc(size_t size)
Definition allocation.cc:19
static ObjectPtr Ptr(ObjectPtr obj)
ObjectPtr CopyMutableObjectGraph(const Object &object)
int32_t classid_t
Definition globals.h:524
bool IsUnmodifiableTypedDataViewClassId(intptr_t index)
Definition class_id.h:453
@ kNullCid
Definition class_id.h:252
@ kNumPredefinedCids
Definition class_id.h:257
@ kByteDataViewCid
Definition class_id.h:244
@ kUnmodifiableByteDataViewCid
Definition class_id.h:245
constexpr intptr_t KB
Definition globals.h:528
uintptr_t uword
Definition globals.h:501
bool IsAllocatableInNewSpace(intptr_t size)
Definition spaces.h:57
uintptr_t compressed_uword
Definition globals.h:44
DART_FORCE_INLINE void UpdateLengthField(intptr_t cid, ObjectPtr from, ObjectPtr to)
static DART_FORCE_INLINE bool CanShareObject(ObjectPtr obj, uword tags)
void FreeTransferablePeer(void *isolate_callback_data, void *peer)
DART_FORCE_INLINE uword TagsFromUntaggedObject(UntaggedObject *obj)
const intptr_t cid
static constexpr intptr_t kCompressedWordSize
Definition globals.h:42
raw_obj untag() -> num_entries()) VARIABLE_COMPRESSED_VISITOR(Array, Smi::Value(raw_obj->untag() ->length())) VARIABLE_COMPRESSED_VISITOR(TypedData, TypedData::ElementSizeInBytes(raw_obj->GetClassId()) *Smi::Value(raw_obj->untag() ->length())) VARIABLE_COMPRESSED_VISITOR(Record, RecordShape(raw_obj->untag() ->shape()).num_fields()) VARIABLE_NULL_VISITOR(CompressedStackMaps, CompressedStackMaps::PayloadSizeOf(raw_obj)) VARIABLE_NULL_VISITOR(OneByteString, Smi::Value(raw_obj->untag() ->length())) VARIABLE_NULL_VISITOR(TwoByteString, Smi::Value(raw_obj->untag() ->length())) intptr_t UntaggedField::VisitFieldPointers(FieldPtr raw_obj, ObjectPointerVisitor *visitor)
static DART_FORCE_INLINE ObjectPtr Marker()
static InstancePtr AllocateObject(Thread *thread, const Class &cls)
constexpr intptr_t kWordSize
Definition globals.h:509
void InitializeExternalTypedData(intptr_t cid, ExternalTypedDataPtr from, ExternalTypedDataPtr to)
DART_FORCE_INLINE void SetNewSpaceTaggingWord(ObjectPtr to, classid_t cid, uint32_t size)
static int8_t data[kExtLength]
static DART_FORCE_INLINE bool MightNeedReHashing(ObjectPtr object)
void FreeExternalTypedData(void *isolate_callback_data, void *buffer)
bool IsExternalTypedDataClassId(intptr_t index)
Definition class_id.h:447
bool IsStringClassId(intptr_t index)
Definition class_id.h:350
ObjectPtr CompressedObjectPtr
std::function< void()> closure
Definition closure.h:14
#define DEFINE_UNSUPPORTED(clazz)
#define COPY_TO(clazz)
#define FOR_UNSUPPORTED_CLASSES(V)
#define HANDLE_ILLEGAL_CASE(Type)
#define UNLIKELY(cond)
Definition globals.h:261
#define Pd
Definition globals.h:408
#define DISALLOW_COPY_AND_ASSIGN(TypeName)
Definition globals.h:581
#define T
Point offset
static const dart::UntaggedObject * UntagObject(Object arg)
static dart::ObjectPtr GetObjectPtr(Object arg)
static Object HandlifyObject(Object arg)
static const dart::Object & HandlifyObject(ObjectPtr arg)
static const dart::ObjectPtr GetObjectPtr(Object arg)
static const dart::UntaggedObject * UntagObject(Object arg)
const uintptr_t id
#define TIMELINE_DURATION(thread, stream, name)
Definition timeline.h:39
#define OFFSET_OF(type, field)
Definition globals.h:138
#define ONLY_IN_PRECOMPILED(code)
Definition globals.h:101