24#define FOR_UNSUPPORTED_CLASSES(V) \
34 V(CompressedStackMaps) \
38 V(ExceptionHandlers) \
39 V(FfiTrampolineData) \
51 V(InstructionsSection) \
52 V(InstructionsTable) \
55 V(KernelProgramInfo) \
60 V(LocalVarDescriptors) \
64 V(MonomorphicSmiableCall) \
77 V(SingleTargetCache) \
87 V(UnhandledException) \
92 V(WeakSerializationReference)
97 enable_fast_object_copy,
99 "Enable fast path for fast object copy.");
103 "Cause a GC when falling off the fast path for fast object copy.");
119 static Untagged##V* Untag##V(V##Ptr arg) { return arg.untag(); } \
120 static V##Ptr Get##V##Ptr(V##Ptr arg) { return arg; } \
121 static V##Ptr Cast##V(ObjectPtr arg) { return dart::V::RawCast(arg); }
135 using V = const dart::V&; \
136 static Untagged##V* Untag##V(V arg) { return arg.ptr().untag(); } \
137 static V##Ptr Get##V##Ptr(V arg) { return arg.ptr(); } \
138 static V Cast##V(const dart::Object& arg) { return dart::V::Cast(arg); }
145 return Object::unknown_constant().ptr();
170 if (
cid == kClosureCid) {
179 if (!obj->IsHeapObject())
return true;
194 if (
cid == kOneByteStringCid)
return false;
195 if (
cid == kTwoByteStringCid)
return false;
196 if (
cid == kMintCid)
return false;
197 if (
cid == kDoubleCid)
return false;
198 if (
cid == kBoolCid)
return false;
199 if (
cid == kSendPortCid)
return false;
200 if (
cid == kCapabilityCid)
return false;
206 if (
cid == kRegExpCid)
return false;
207 if (
cid == kInt32x4Cid)
return false;
241#if defined(HASH_IN_OBJECT_HEADER)
242 tags = UntaggedObject::HashTag::update(0, tags);
244 to.
untag()->tags_ = tags;
250 intptr_t allocated_bytes) {
251#if defined(DART_COMPRESSED_POINTERS)
252 const bool compressed =
true;
254 const bool compressed =
false;
256 const intptr_t kLargeMessageThreshold = 16 *
MB;
265 Object::from_offset<TypedDataBase>(),
266 Object::to_offset<TypedDataBase>());
271 const uword ptr_field_end_offset =
274 Object::from_offset<Object>(),
275 ptr_field_end_offset);
286 if (
cid == kArrayCid ||
cid == kImmutableArrayCid) {
289 }
else if (
cid == kContextCid) {
295 }
else if (
cid == kRecordCid) {
302 ExternalTypedDataPtr from,
303 ExternalTypedDataPtr to) {
304 auto raw_from = from.untag();
305 auto raw_to = to.untag();
311 raw_to->length_ = raw_from->length_;
320 constexpr intptr_t kChunkSize = 100 * 1024;
322 const intptr_t chunks =
length / kChunkSize;
323 const intptr_t remainder =
length % kChunkSize;
327 for (intptr_t
i = 0;
i < chunks; ++
i) {
328 memmove(to.ptr().untag()->data_ +
i * kChunkSize,
329 from.ptr().untag()->data_ +
i * kChunkSize, kChunkSize);
334 memmove(to.ptr().untag()->data_ + chunks * kChunkSize,
335 from.ptr().untag()->data_ + chunks * kChunkSize, remainder);
344 const intptr_t length_in_elements = from.
Length();
345 const intptr_t length_in_bytes =
348 uint8_t* to_data =
static_cast<uint8_t*
>(
malloc(length_in_bytes));
357 obj.untag()->offset_in_bytes_ =
Smi::New(0);
390 intptr_t
i = storage_.
length();
392 storage_[
i + 0] =
key;
408#if defined(HASH_IN_OBJECT_HEADER)
411 explicit IdentityMap(Thread* thread) : thread_(thread) {
412 hash_table_used_ = 0;
413 hash_table_capacity_ = 32;
414 hash_table_ =
reinterpret_cast<uint32_t*
>(
415 malloc(hash_table_capacity_ *
sizeof(uint32_t)));
416 memset(hash_table_, 0, hash_table_capacity_ *
sizeof(uint32_t));
420 template <
typename S,
typename T>
422 intptr_t mask = hash_table_capacity_ - 1;
423 intptr_t probe = GetHeaderHash(
Ptr(
object)) & mask;
425 intptr_t index = hash_table_[probe];
429 if (from_to.At(index) ==
Ptr(
object)) {
430 return from_to.At(index + 1);
432 probe = (probe + 1) & mask;
436 template <
typename S,
typename T>
437 DART_FORCE_INLINE
void Insert(
const S& from,
440 bool check_for_safepoint) {
442 const auto id = from_to.Length();
443 from_to.Add(from, to);
444 intptr_t mask = hash_table_capacity_ - 1;
445 intptr_t probe = GetHeaderHash(
Ptr(from)) & mask;
447 intptr_t index = hash_table_[probe];
449 hash_table_[probe] =
id;
452 probe = (probe + 1) & mask;
455 if (hash_table_used_ * 2 > hash_table_capacity_) {
456 Rehash(hash_table_capacity_ * 2, from_to, check_for_safepoint);
462 uint32_t GetHeaderHash(ObjectPtr
object) {
463 uint32_t
hash = Object::GetCachedHash(
object);
465 switch (object->GetClassId()) {
472 bit_cast<uint64_t>(
Double::Value(
static_cast<DoublePtr
>(
object)));
475 case kOneByteStringCid:
476 case kTwoByteStringCid:
478 hash = Object::SetCachedHashIfNotSet(
object,
hash);
482 hash = thread_->random()->NextUInt32();
484 hash = Object::SetCachedHashIfNotSet(
object,
hash);
491 template <
typename T>
492 void Rehash(intptr_t new_capacity,
T from_to,
bool check_for_safepoint) {
493 hash_table_capacity_ = new_capacity;
494 hash_table_used_ = 0;
496 hash_table_ =
reinterpret_cast<uint32_t*
>(
497 malloc(hash_table_capacity_ *
sizeof(uint32_t)));
498 for (intptr_t
i = 0;
i < hash_table_capacity_;
i++) {
501 thread_->CheckForSafepoint();
504 for (intptr_t
id = 2;
id < from_to.Length();
id += 2) {
505 ObjectPtr obj = from_to.At(
id);
506 intptr_t mask = hash_table_capacity_ - 1;
507 intptr_t probe = GetHeaderHash(obj) & mask;
509 if (hash_table_[probe] == 0) {
510 hash_table_[probe] =
id;
514 probe = (probe + 1) & mask;
517 thread_->CheckForSafepoint();
523 uint32_t* hash_table_;
524 uint32_t hash_table_capacity_;
525 uint32_t hash_table_used_;
539 template <
typename S,
typename T>
541 const intptr_t
id = GetObjectId(
Ptr(
object));
542 if (
id == 0)
return Marker();
543 return from_to.At(
id + 1);
546 template <
typename S,
typename T>
550 bool check_for_safepoint) {
552 const auto id = from_to.Length();
554 SetObjectId(
Ptr(from),
id);
555 from_to.Add(from, to);
561 if (object->IsNewObject()) {
569 void SetObjectId(ObjectPtr
object, intptr_t
id) {
570 if (object->IsNewObject()) {
594 ASSERT(fpeer !=
nullptr && fpeer->data() !=
nullptr);
595 const intptr_t
length = fpeer->length();
607 ASSERT(finalizable_ref !=
nullptr);
608 tpeer->set_handle(finalizable_ref);
628 raw_from_to_(thread->zone(), 20),
629 raw_transferables_from_to_(thread->zone(), 0),
630 raw_objects_to_rehash_(thread->zone(), 0),
631 raw_expandos_to_rehash_(thread->zone(), 0) {
645 allocated_bytes +=
size;
649 TransferableTypedDataPtr to) {
650 raw_transferables_from_to_.
Add(from);
651 raw_transferables_from_to_.
Add(to);
655 raw_weak_references_.
Add(from);
658 raw_external_typed_data_to_.
Add(to);
676 intptr_t fill_cursor_ = 0;
677 intptr_t allocated_bytes = 0;
687 from_to_transition_(thread->zone(), 2),
690 transferables_from_to_(thread->zone(), 0) {
691 from_to_transition_.Resize(2);
694 from_to_.
Add(Object::null_object());
695 from_to_.
Add(Object::null_object());
705 allocated_bytes +=
size;
721 external_typed_data_.Add(to_handle);
732 for (intptr_t
i = 0;
i < transferables_from_to_.length();
i += 2) {
733 auto from = transferables_from_to_[
i];
734 auto to = transferables_from_to_[
i + 1];
740 for (intptr_t
i = 0;
i < external_typed_data_.length();
i++) {
741 auto to = external_typed_data_[
i];
760 intptr_t fill_cursor_ = 0;
761 intptr_t allocated_bytes = 0;
771 zone_(thread->zone()),
772 heap_(thread->isolate_group()->heap()),
778 thread->isolate_group()->object_store()->expando_class())),
784 return src.untag()->LoadPointer(
reinterpret_cast<ObjectPtr*
>(
785 reinterpret_cast<uint8_t*
>(
src.untag()) +
offset));
790 reinterpret_cast<uint8_t*
>(
src.untag()) +
offset));
795 reinterpret_cast<uint8_t*
>(
src.untag()) +
offset);
801 reinterpret_cast<ObjectPtr*
>(
reinterpret_cast<uint8_t*
>(obj.
untag()) +
824 *
reinterpret_cast<ObjectPtr*
>(
reinterpret_cast<uint8_t*
>(obj.
untag()) +
827 template <
typename T = ObjectPtr>
847 "Illegal argument in isolate message: object is unsendable - %s ("
848 "see restrictions listed at `SendPort.send()` documentation "
849 "for more information)",
857#define HANDLE_ILLEGAL_CASE(Type) \
858 case k##Type##Cid: { \
860 "Illegal argument in isolate message: " \
861 "(object is a " #Type ")"; \
862 exception_unexpected_object_ = object; \
905 retaining_path_(retaining_path),
906 working_list_(working_list),
907 traversal_rules_(traversal_rules) {}
910 if (!obj->IsHeapObject()) {
920 if (retaining_path_->WasVisited(obj)) {
923 retaining_path_->MarkVisited(obj);
924 working_list_->Add(obj);
928 for (
ObjectPtr* ptr = from; ptr <= to; ptr++) {
933#if defined(DART_COMPRESSED_POINTERS)
934 void VisitCompressedPointers(
uword heap_base,
938 VisitObject(ptr->Decompress(heap_base));
958 traversal_rules_(traversal_rules) {
969 if (object->IsNewObject()) {
977 if (object->IsNewObject()) {
989 Visitor visitor(isolate_->
group(),
this, working_list, traversal_rules_);
992 working_list->
Add(from_.
ptr());
1000 while (!working_list->
is_empty()) {
1015 if (raw == to_.
ptr()) {
1016 return CollectPath(working_list);
1030 if (
cid == kClosureCid) {
1038 klass = class_table->
At(
cid);
1043 ASSERT(traversal_rules_ ==
1051 case kFinalizerEntryCid:
1052 case kFunctionTypeCid:
1053 case kMirrorReferenceCid:
1054 case kNativeFinalizerCid:
1055 case kReceivePortCid:
1057 case kRecordTypeCid:
1059 case kStackTraceCid:
1060 case kSuspendStateCid:
1062 case kWeakPropertyCid:
1063 case kWeakReferenceCid:
1065 case kDynamicLibraryCid:
1079 if (
cid == kArrayCid) {
1082 const intptr_t batch_size = (2 << 14) - 1;
1083 for (intptr_t
i = 0;
i < array.
Length(); ++
i) {
1085 visitor.VisitObject(ptr);
1086 if ((
i & batch_size) == batch_size) {
1120 void VisitPointers(ObjectPtr* from, ObjectPtr* to)
override {
1121 for (ObjectPtr* ptr = from; ptr <= to; ptr++, index_++) {
1122 if (*ptr == target_) {
1128#if defined(DART_COMPRESSED_POINTERS)
1133 if (ptr->Decompress(heap_base) == target_) {
1140 intptr_t index() {
return index_; }
1147 const char* CollectPath(MallocGrowableArray<ObjectPtr>*
const working_list) {
1157#if !defined(DART_PRECOMPILED_RUNTIME)
1163 const char* saved_context_location =
nullptr;
1164 intptr_t saved_context_object_index = -1;
1165 intptr_t saved_context_depth = 0;
1166 const char* retaining_path =
"";
1168 ObjectPtr raw = to_.
ptr();
1170 previous_object = raw;
1173 raw = working_list->RemoveLast();
1176 raw = working_list->RemoveLast();
1178 klass =
object.clazz();
1180 const char* location =
object.ToCString();
1182 if (
object.IsContext()) {
1184 if (saved_context_object_index == -1) {
1188 saved_context_depth = 0;
1189 for (intptr_t
i = 0;
i < context.num_variables();
i++) {
1190 if (context.At(
i) == previous_object.ptr()) {
1191 saved_context_object_index =
i;
1197 saved_context_depth++;
1200 if (
object.IsInstance()) {
1201 if (
object.IsClosure()) {
1206#if defined(DART_PRECOMPILED_RUNTIME)
1208 location =
function.QualifiedUserVisibleNameCString();
1213 function.EnsureHasCompiledUnoptimizedCode();
1217 var_descriptors ^=
code.GetLocalVarDescriptors();
1218 for (intptr_t
i = 0;
i < var_descriptors.Length();
i++) {
1219 UntaggedLocalVarDescriptors::VarInfo
info;
1220 var_descriptors.GetInfo(
i, &
info);
1221 if (
info.scope_id == -saved_context_depth &&
1223 UntaggedLocalVarDescriptors::VarInfoKind::kContextVar &&
1224 info.index() == saved_context_object_index) {
1225 name ^= var_descriptors.GetName(
i);
1228 function.QualifiedUserVisibleNameCString());
1230 saved_context_location =
nullptr;
1238 FindObjectVisitor visitor(isolate_->
group(),
1239 previous_object.ptr());
1240 raw->untag()->VisitPointers(&visitor);
1241 field ^= klass.FieldFromIndex(visitor.index());
1242 if (!field.IsNull()) {
1245 field.UserVisibleNameCString(), location);
1251 saved_context_object_index = -1;
1252 saved_context_depth = -1;
1255 library = klass.library();
1256 if (!library.IsNull()) {
1257 library_url = library.url();
1258 location =
OS::SCreate(zone_,
"%s (from %s)", location,
1259 library_url.ToCString());
1262 if (
object.IsContext()) {
1264 if (saved_context_location ==
nullptr) {
1265 saved_context_location = location;
1269 zone_,
"%s <- %s\n", saved_context_location, location);
1272 if (saved_context_location !=
nullptr) {
1274 retaining_path =
OS::SCreate(zone_,
"%s <- %s", retaining_path,
1275 saved_context_location);
1276 saved_context_location =
nullptr;
1279 OS::SCreate(zone_,
"%s <- %s\n", retaining_path, location);
1282 }
while (raw != from_.
ptr());
1283 ASSERT(working_list->is_empty());
1284 return retaining_path;
1293 RetainingPath rr(zone_, isolate, from, to, traversal_rules);
1309 intptr_t end_offset) {
1319 intptr_t end_offset,
1340 intptr_t end_offset) {
1350 intptr_t end_offset) {
1359 if (!
value.IsHeapObject()) {
1372 if (existing_to !=
Marker()) {
1383 auto to =
Forward(tags, value_decompressed);
1419 TransferableTypedDataPtr to) {
1439 intptr_t end_offset) {
1445 intptr_t end_offset) {
1451 intptr_t end_offset) {
1476 intptr_t end_offset) {
1486 intptr_t end_offset,
1503 intptr_t end_offset) {
1520 intptr_t end_offset) {
1531 if (!
value.IsHeapObject()) {
1540 value_decompressed);
1546 if (existing_to !=
Marker()) {
1558 tmp_ = value_decompressed;
1567 if (!
value.IsHeapObject()) {
1580 if (existing_to !=
Marker()) {
1591 tmp_ = value_decompressed;
1606 if ((
cid == kArrayCid ||
cid == kImmutableArrayCid) &&
1614 thread_,
cid, ExternalTypedData::Cast(from), external_to);
1615 return external_to.ptr();
1645 intptr_t end_offset) {
1646 auto src_ptr =
src.ptr();
1647 auto dst_ptr =
dst.ptr();
1665 intptr_t end_offset) {
1666 auto src_ptr =
src.ptr();
1667 auto dst_ptr =
dst.ptr();
1677 intptr_t end_offset) {
1678 auto src_ptr =
src.ptr();
1679 auto dst_ptr =
dst.ptr();
1692template <
typename Base>
1700 typename Types::Object to,
1707#define COPY_TO(clazz) \
1708 case clazz::kClassId: { \
1709 typename Types::clazz casted_from = Types::Cast##clazz(from); \
1710 typename Types::clazz casted_to = Types::Cast##clazz(to); \
1711 Copy##clazz(casted_from, casted_to); \
1723 typename Types::Array casted_from = Types::CastArray(from);
1724 typename Types::Array casted_to = Types::CastArray(to);
1729#define COPY_TO(clazz) case kTypedData##clazz##Cid:
1732 typename Types::TypedData casted_from = Types::CastTypedData(from);
1733 typename Types::TypedData casted_to = Types::CastTypedData(to);
1741#define COPY_TO(clazz) \
1742 case kTypedData##clazz##ViewCid: \
1743 case kUnmodifiableTypedData##clazz##ViewCid:
1745 typename Types::TypedDataView casted_from =
1746 Types::CastTypedDataView(from);
1747 typename Types::TypedDataView casted_to =
1748 Types::CastTypedDataView(to);
1754#define COPY_TO(clazz) case kExternalTypedData##clazz##Cid:
1757 typename Types::ExternalTypedData casted_from =
1758 Types::CastExternalTypedData(from);
1759 typename Types::ExternalTypedData casted_to =
1760 Types::CastExternalTypedData(to);
1769 const Object& obj = Types::HandlifyObject(from);
1774 typename Types::Object to,
1777 Base::ForwardCompressedPointers(from, to,
kWordSize, instance_size,
bitmap);
1781 typename Types::Object to) {
1783 Base::ForwardCompressedPointers(from, to,
kWordSize, instance_size);
1785 void CopyClosure(
typename Types::Closure from,
typename Types::Closure to) {
1786 Base::StoreCompressedPointers(
1789 Base::ForwardCompressedPointer(from, to,
1791 Base::StoreCompressedPointersNoBarrier(from, to,
1795 UntagClosure(from)->entry_point_);
1798 void CopyContext(
typename Types::Context from,
typename Types::Context to) {
1801 UntagContext(to)->num_variables_ = UntagContext(from)->num_variables_;
1803 Base::ForwardCompressedPointer(from, to,
1805 Base::ForwardCompressedContextPointers(
1810 void CopyArray(
typename Types::Array from,
typename Types::Array to) {
1812 Base::StoreCompressedArrayPointers(
1815 Base::StoreCompressedPointersNoBarrier(from, to,
1818 Base::ForwardCompressedArrayPointers(
1824 typename Types::GrowableObjectArray to) {
1825 Base::StoreCompressedPointers(
1828 Base::StoreCompressedPointersNoBarrier(
1831 Base::ForwardCompressedPointer(
1835 void CopyRecord(
typename Types::Record from,
typename Types::Record to) {
1837 Base::StoreCompressedPointersNoBarrier(from, to,
1840 Base::ForwardCompressedPointers(
1845 template <
intptr_t one_for_set_two_for_map,
typename T>
1854 bool needs_rehashing =
false;
1855 ArrayPtr
data = from_untagged->data_.Decompress(Base::heap_base_);
1859 auto key_value_pairs = untagged_data->data();
1860 for (intptr_t
i = 0;
i <
length;
i += one_for_set_two_for_map) {
1861 ObjectPtr key = key_value_pairs[
i].Decompress(Base::heap_base_);
1862 const bool is_deleted_entry =
key ==
data;
1863 if (
key->IsHeapObject()) {
1865 needs_rehashing =
true;
1872 Base::StoreCompressedPointers(
1883 if (needs_rehashing) {
1884 to_untagged->hash_mask_ =
Smi::New(0);
1886 to_untagged->deleted_keys_ =
Smi::New(0);
1891 from_untagged =
nullptr;
1892 to_untagged =
nullptr;
1894 if (!needs_rehashing) {
1895 Base::ForwardCompressedPointer(from, to,
1897 Base::StoreCompressedPointersNoBarrier(
1900 Base::StoreCompressedPointersNoBarrier(
1904 Base::ForwardCompressedPointer(from, to,
1906 Base::StoreCompressedPointersNoBarrier(
1910 if (Base::exception_msg_ ==
nullptr && needs_rehashing) {
1911 Base::EnqueueObjectToRehash(to);
1915 void CopyMap(
typename Types::Map from,
typename Types::Map to) {
1916 CopyLinkedHashBase<2, typename Types::Map>(from, to, UntagMap(from),
1920 void CopySet(
typename Types::Set from,
typename Types::Set to) {
1921 CopyLinkedHashBase<1, typename Types::Set>(from, to, UntagSet(from),
1925 void CopyDouble(
typename Types::Double from,
typename Types::Double to) {
1926#if !defined(DART_PRECOMPILED_RUNTIME)
1927 auto raw_from = UntagDouble(from);
1928 auto raw_to = UntagDouble(to);
1929 raw_to->value_ = raw_from->value_;
1937 typename Types::Float32x4 to) {
1938#if !defined(DART_PRECOMPILED_RUNTIME)
1939 auto raw_from = UntagFloat32x4(from);
1940 auto raw_to = UntagFloat32x4(to);
1941 raw_to->value_[0] = raw_from->value_[0];
1942 raw_to->value_[1] = raw_from->value_[1];
1943 raw_to->value_[2] = raw_from->value_[2];
1944 raw_to->value_[3] = raw_from->value_[3];
1952 typename Types::Float64x2 to) {
1953#if !defined(DART_PRECOMPILED_RUNTIME)
1954 auto raw_from = UntagFloat64x2(from);
1955 auto raw_to = UntagFloat64x2(to);
1956 raw_to->value_[0] = raw_from->value_[0];
1957 raw_to->value_[1] = raw_from->value_[1];
1965 auto raw_from = from.untag();
1966 auto raw_to = to.untag();
1967 const intptr_t
cid = Types::GetTypedDataPtr(from)->GetClassId();
1968 raw_to->length_ = raw_from->length_;
1969 raw_to->RecomputeDataField();
1972 memmove(raw_to->data_, raw_from->data_,
length);
1976 auto raw_from = from.
ptr().
untag();
1978 const intptr_t
cid = Types::GetTypedDataPtr(from)->GetClassId();
1979 ASSERT(raw_to->length_ == raw_from->length_);
1980 raw_to->RecomputeDataField();
1987 typename Types::TypedDataView to) {
1989 Base::ForwardCompressedPointer(
1992 auto raw_from = UntagTypedDataView(from);
1993 auto raw_to = UntagTypedDataView(to);
1994 raw_to->length_ = raw_from->length_;
1995 raw_to->offset_in_bytes_ = raw_from->offset_in_bytes_;
1996 raw_to->data_ =
nullptr;
1998 auto forwarded_backing_store =
1999 raw_to->typed_data_.Decompress(Base::heap_base_);
2000 if (forwarded_backing_store ==
Marker() ||
2004 Base::StoreCompressedPointerNoBarrier(
2005 Types::GetTypedDataViewPtr(to),
2008 raw_to->offset_in_bytes_ =
Smi::New(0);
2009 ASSERT(Base::exception_msg_ !=
nullptr);
2013 const bool is_external =
2014 raw_from->data_ != raw_from->DataFieldForInternalTypedData();
2018 raw_to->RecomputeDataField();
2022 raw_to->RecomputeDataFieldForInternalTypedData();
2024 const bool is_external2 =
2025 raw_to->data_ != raw_to->DataFieldForInternalTypedData();
2026 ASSERT(is_external == is_external2);
2030 typename Types::ExternalTypedData to) {
2035 auto raw_from = UntagExternalTypedData(from);
2036 auto raw_to = UntagExternalTypedData(to);
2037 ASSERT(raw_to->data_ !=
nullptr);
2038 ASSERT(raw_to->length_ == raw_from->length_);
2043 typename Types::TransferableTypedData to) {
2049 Base::heap_->GetPeer(Types::GetTransferableTypedDataPtr(from)));
2050 ASSERT(fpeer !=
nullptr);
2051 if (fpeer->data() ==
nullptr) {
2052 Base::exception_msg_ =
2053 "Illegal argument in isolate message"
2054 " : (TransferableTypedData has been transferred already)";
2055 Base::exception_unexpected_object_ =
2056 Types::GetTransferableTypedDataPtr(from);
2059 Base::EnqueueTransferable(from, to);
2063 typename Types::WeakProperty to) {
2067 Base::StoreCompressedPointerNoBarrier(Types::GetWeakPropertyPtr(to),
2070 Base::StoreCompressedPointerNoBarrier(
2074 Base::StoreCompressedPointerNoBarrier(
2075 Types::GetWeakPropertyPtr(to),
2077 Base::EnqueueWeakProperty(from);
2081 typename Types::WeakReference to) {
2085 Base::StoreCompressedPointerNoBarrier(
2086 Types::GetWeakReferencePtr(to),
2089 Base::ForwardCompressedPointer(
2092 Base::StoreCompressedPointerNoBarrier(
2093 Types::GetWeakReferencePtr(to),
2095 Base::EnqueueWeakReference(from);
2099#define DEFINE_UNSUPPORTED(clazz) \
2100 void Copy##clazz(typename Types::clazz from, typename Types::clazz to) { \
2101 FATAL("Objects of type " #clazz " should not occur in object graphs"); \
2106#undef DEFINE_UNSUPPORTED
2110 return Types::GetObjectPtr(obj).Decompress(Base::heap_base_).untag();
2115 Untagged##V* Untag##V(typename Types::V obj) { \
2116 return Types::Get##V##Ptr(obj).Decompress(Base::heap_base_).untag(); \
2131 if (root_copy ==
Marker()) {
2149 FastCopyObject(from, to);
2166 while (
i < weak_properties.length()) {
2167 from_weak_property = weak_properties[
i];
2170 if (weak_property_key.ptr() !=
Marker()) {
2176 to_weak_property.set_key(weak_property_key);
2181 to_weak_property.ptr(),
2185 const intptr_t last = weak_properties.length() - 1;
2187 weak_properties[
i] = weak_properties[last];
2188 weak_properties.SetLength(last);
2200 for (intptr_t
i = 0;
i < weak_references.length();
i++) {
2201 from_weak_reference = weak_references[
i];
2202 weak_reference_target =
2204 if (weak_reference_target.ptr() !=
Marker()) {
2205 to_weak_reference ^=
2211 to_weak_reference.set_target(weak_reference_target);
2214 if (root_copy !=
Marker()) {
2218 if (array ==
Marker())
return root_copy;
2223 if (array ==
Marker())
return root_copy;
2236 if (array_addr == 0) {
2241 const uword header_size =
2243 ArrayPtr array(
reinterpret_cast<UntaggedArray*
>(array_addr));
2250 auto array_data = array.untag()->data();
2252 array_data[
i] = objects_to_rehash[
i];
2295 objects_to_rehash_(
Array::Handle(thread->zone())),
2296 expandos_to_rehash_(
Array::Handle(thread->zone())) {}
2300 const Object& fast_root_copy) {
2302 if (root_copy.ptr() ==
Marker()) {
2321 CopyObject(from, to);
2334 while (
i < weak_properties.length()) {
2335 const auto& from_weak_property = *weak_properties[
i];
2351 const intptr_t last = weak_properties.length() - 1;
2353 weak_properties[
i] = weak_properties[last];
2354 weak_properties.SetLength(last);
2365 for (intptr_t
i = 0;
i < weak_references.length();
i++) {
2366 const auto& from_weak_reference = *weak_references[
i];
2379 objects_to_rehash_ =
2381 expandos_to_rehash_ =
2383 return root_copy.ptr();
2393 array.SetAt(
i, *objects_to_rehash[
i]);
2401 void CopyObject(
const Object& from,
const Object& to) {
2416 Array& objects_to_rehash_;
2417 Array& expandos_to_rehash_;
2427 fast_object_copy_(thread_, &map_),
2428 slow_object_copy_(thread_, &map_) {}
2437 const char*
volatile exception_msg =
nullptr;
2442 if (setjmp(*jump.
Set()) == 0) {
2443 result = CopyObjectGraphInternal(
root, &exception_msg);
2464 auto& result_array = Array::Cast(
result);
2465 if (result_array.At(0) ==
Marker()) {
2466 ASSERT(exception_msg !=
nullptr);
2467 auto& unexpected_object_ =
Object::Handle(zone_, result_array.At(1));
2468 if (!unexpected_object_.IsNull()) {
2472 zone_, thread_->
isolate(),
root, unexpected_object_,
2475 ThrowException(exception_msg);
2491 const char*
volatile* exception_msg) {
2493 if (!
root.ptr()->IsHeapObject()) {
2494 result_array.SetAt(0,
root);
2495 return result_array.ptr();
2499 result_array.SetAt(0,
root);
2500 return result_array.ptr();
2507 return result_array.ptr();
2515 if (FLAG_enable_fast_object_copy) {
2517 NoSafepointScope no_safepoint_scope;
2522 result_array.SetAt(0,
result);
2523 fast_object_copy_.
tmp_ = fast_object_copy_.raw_objects_to_rehash_;
2524 result_array.SetAt(1, fast_object_copy_.
tmp_);
2525 fast_object_copy_.
tmp_ = fast_object_copy_.raw_expandos_to_rehash_;
2526 result_array.SetAt(2, fast_object_copy_.
tmp_);
2527 HandlifyExternalTypedData();
2528 HandlifyTransferables();
2534 return result_array.ptr();
2539 SwitchToSlowForwardingList();
2543 if (FLAG_gc_on_foc_slow_path) {
2550 ObjectifyFromToObjects();
2560 return result_array.ptr();
2573 return result_array.ptr();
2576 result_array.SetAt(0,
result);
2577 result_array.SetAt(1, slow_object_copy_.objects_to_rehash_);
2578 result_array.SetAt(2, slow_object_copy_.expandos_to_rehash_);
2582 return result_array.ptr();
2585 void SwitchToSlowForwardingList() {
2589 MakeUninitializedNewSpaceObjectsGCSafe();
2590 HandlifyTransferables();
2591 HandlifyWeakProperties();
2592 HandlifyWeakReferences();
2593 HandlifyExternalTypedData();
2594 HandlifyObjectsToReHash();
2595 HandlifyExpandosToReHash();
2596 HandlifyFromToObjects();
2597 slow_forward_map.fill_cursor_ = fast_forward_map.fill_cursor_;
2598 slow_forward_map.allocated_bytes = fast_forward_map.allocated_bytes;
2601 void MakeUninitializedNewSpaceObjectsGCSafe() {
2603 const auto length = fast_forward_map.raw_from_to_.length();
2604 const auto cursor = fast_forward_map.fill_cursor_;
2605 for (intptr_t
i = cursor;
i <
length;
i += 2) {
2606 auto from = fast_forward_map.raw_from_to_[
i];
2607 auto to = fast_forward_map.raw_from_to_[
i + 1];
2613#if defined(DART_COMPRESSED_POINTERS)
2614 const bool compressed =
true;
2616 const bool compressed =
false;
2624 Object::InitializeObject(
reinterpret_cast<uword>(to.untag()),
cid,
2625 from.untag()->HeapSize(), compressed,
2626 Object::from_offset<TypedDataBase>(),
2627 Object::to_offset<TypedDataBase>());
2632 const uword ptr_field_end_offset =
2633 from.untag()->HeapSize() -
2635 Object::InitializeObject(
reinterpret_cast<uword>(to.untag()),
cid,
2636 from.untag()->HeapSize(), compressed,
2637 Object::from_offset<Object>(),
2638 ptr_field_end_offset);
2644 void HandlifyTransferables() {
2648 void HandlifyWeakProperties() {
2652 void HandlifyWeakReferences() {
2656 void HandlifyExternalTypedData() {
2660 void HandlifyObjectsToReHash() {
2664 void HandlifyExpandosToReHash() {
2668 template <
typename PtrType,
typename HandleType>
2669 void Handlify(GrowableArray<PtrType>* from,
2670 GrowableArray<const HandleType*>* to) {
2671 const auto length = from->length();
2675 (*to)[
i] = &HandleType::Handle(
Z, (*from)[
i]);
2680 void HandlifyFromToObjects() {
2683 const intptr_t
length = fast_forward_map.raw_from_to_.length();
2684 slow_forward_map.from_to_transition_.Resize(
length);
2686 slow_forward_map.from_to_transition_[
i] =
2689 ASSERT(slow_forward_map.from_to_transition_.length() ==
length);
2690 fast_forward_map.raw_from_to_.Clear();
2692 void ObjectifyFromToObjects() {
2693 auto& from_to_transition =
2696 intptr_t
length = from_to_transition.length();
2699 from_to.Add(*from_to_transition[
i]);
2702 from_to_transition.Clear();
2705 void ThrowException(
const char* exception_msg) {
2708 args.SetAt(0, msg_obj);
2716 FastObjectCopy fast_object_copy_;
2717 SlowObjectCopy slow_object_copy_;
2718 intptr_t copied_objects_ = 0;
2719 intptr_t allocated_bytes_ = 0;
2727#if defined(SUPPORT_TIMELINE)
2728 if (tbes.enabled()) {
2729 tbes.SetNumArguments(2);
static void info(const char *fmt,...) SK_PRINTF_LIKE(1
static bool skip(SkStream *stream, size_t amount)
static uint32_t hash(const SkShaderBase::GradientInfo &v)
#define RELEASE_ASSERT(cond)
#define CLASS_LIST_NO_OBJECT_NOR_STRING_NOR_ARRAY_NOR_MAP(V)
#define CLASS_LIST_FOR_HANDLES(V)
#define CLASS_LIST_TYPED_DATA(V)
static intptr_t InstanceSize()
static ArrayPtr New(intptr_t len, Heap::Space space=Heap::kNew)
virtual TypeArgumentsPtr GetTypeArguments() const
static constexpr bool UseCardMarkingForAllocation(const intptr_t array_length)
ObjectPtr At(intptr_t index) const
static intptr_t data_offset()
void Resize(intptr_t new_length)
const T & At(intptr_t index) const
static constexpr ClassIdTagType decode(uword value)
static constexpr uword update(ClassIdTagType value, uword original)
static constexpr uword mask_in_place()
ClassPtr At(intptr_t cid) const
UnboxedFieldBitmap GetUnboxedFieldsMapAt(intptr_t cid) const
bool is_isolate_unsendable() const
static bool IsIsolateUnsendable(ClassPtr clazz)
static intptr_t NumVariables(const ContextPtr context)
static intptr_t variable_offset(intptr_t context_index)
static constexpr intptr_t kBytesPerElement
static double Value(DoublePtr dbl)
static DART_NORETURN void ThrowByType(ExceptionType type, const Array &arguments)
static DART_NORETURN void PropagateError(const Error &error)
FinalizablePersistentHandle * AddFinalizer(void *peer, Dart_HandleFinalizer callback, intptr_t external_size) const
ObjectPtr ForwardedObject(ObjectPtr object)
void AddTransferable(TransferableTypedDataPtr from, TransferableTypedDataPtr to)
void AddObjectToRehash(ObjectPtr to)
void AddExpandoToRehash(ObjectPtr to)
void Insert(ObjectPtr from, ObjectPtr to, intptr_t size)
void AddWeakReference(WeakReferencePtr from)
void AddExternalTypedData(ExternalTypedDataPtr to)
void AddWeakProperty(WeakPropertyPtr from)
FastForwardMap(Thread *thread, IdentityMap *map)
ObjectPtr At(intptr_t index)
FastFromTo(GrowableArray< ObjectPtr > &storage)
void Add(ObjectPtr key, ObjectPtr value)
static void StoreCompressedPointersNoBarrier(ObjectPtr src, ObjectPtr dst, intptr_t offset, intptr_t end_offset)
void EnqueueWeakProperty(WeakPropertyPtr from)
void EnqueueTransferable(TransferableTypedDataPtr from, TransferableTypedDataPtr to)
DART_FORCE_INLINE void ForwardCompressedPointer(ObjectPtr src, ObjectPtr dst, intptr_t offset)
void EnqueueWeakReference(WeakReferencePtr from)
void ForwardCompressedContextPointers(intptr_t context_length, ObjectPtr src, ObjectPtr dst, intptr_t offset, intptr_t end_offset)
DART_FORCE_INLINE void ForwardCompressedPointers(ObjectPtr src, ObjectPtr dst, intptr_t offset, intptr_t end_offset)
ObjectPtr Forward(uword tags, ObjectPtr from)
void ForwardCompressedArrayPointers(intptr_t array_length, ObjectPtr src, ObjectPtr dst, intptr_t offset, intptr_t end_offset)
FastForwardMap fast_forward_map_
FastObjectCopyBase(Thread *thread, IdentityMap *map)
static void StoreCompressedArrayPointers(intptr_t array_length, ObjectPtr src, ObjectPtr dst, intptr_t offset, intptr_t end_offset)
DART_FORCE_INLINE void ForwardCompressedPointers(ObjectPtr src, ObjectPtr dst, intptr_t offset, intptr_t end_offset, UnboxedFieldBitmap bitmap)
static void StoreCompressedPointers(ObjectPtr src, ObjectPtr dst, intptr_t offset, intptr_t end_offset)
void EnqueueExpandoToRehash(ObjectPtr to)
void EnqueueObjectToRehash(ObjectPtr to)
FastObjectCopy(Thread *thread, IdentityMap *map)
ObjectPtr TryCopyGraphFast(ObjectPtr root)
ObjectPtr TryBuildArrayOfObjectsToRehash(const GrowableArray< ObjectPtr > &objects_to_rehash)
static FinalizablePersistentHandle * New(IsolateGroup *isolate_group, const Object &object, void *peer, Dart_HandleFinalizer callback, intptr_t external_size, bool auto_delete)
void FinalizeTransferable(const TransferableTypedData &from, const TransferableTypedData &to)
ForwardMapBase(Thread *thread)
void FinalizeExternalTypedData(const ExternalTypedData &to)
void Add(const Object &value, Heap::Space space=Heap::kNew) const
static GrowableObjectArrayPtr New(Heap::Space space=Heap::kNew)
ObjectPtr At(intptr_t index) const
void * GetPeer(ObjectPtr raw_obj) const
void SetPeer(ObjectPtr raw_obj, void *peer)
void CollectAllGarbage(GCReason reason=GCReason::kFull, bool compact=false)
DART_FORCE_INLINE ObjectPtr ForwardedObject(const S &object, T from_to)
IdentityMap(Thread *thread)
DART_FORCE_INLINE void Insert(const S &from, const S &to, T from_to, bool check_for_safepoint)
static const ClassId kClassId
ClassTable * class_table() const
void set_forward_table_old(WeakTable *table)
MallocGrowableArray< ObjectPtr > * pointers_to_verify_at_exit()
void set_forward_table_new(WeakTable *table)
IsolateGroup * group() const
WeakTable * forward_table_old()
WeakTable * forward_table_new()
static int64_t Value(MintPtr mint)
static char * SCreate(Zone *zone, const char *format,...) PRINTF_ATTRIBUTE(2
static CompressedObjectPtr LoadCompressedPointer(ObjectPtr src, intptr_t offset)
static void StoreCompressedPointerBarrier(ObjectPtr obj, intptr_t offset, ObjectPtr value)
static void StoreCompressedNonPointerWord(ObjectPtr obj, intptr_t offset, compressed_uword value)
static compressed_uword LoadCompressedNonPointerWord(ObjectPtr src, intptr_t offset)
Object & exception_unexpected_object_
DART_FORCE_INLINE bool CanCopyObject(uword tags, ObjectPtr object)
ObjectCopyBase(Thread *thread)
static void StorePointerBarrier(ObjectPtr obj, intptr_t offset, ObjectPtr value)
static ObjectPtr LoadPointer(ObjectPtr src, intptr_t offset)
static void StorePointerNoBarrier(ObjectPtr obj, intptr_t offset, ObjectPtr value)
void StoreCompressedLargeArrayPointerBarrier(ObjectPtr obj, intptr_t offset, ObjectPtr value)
ClassTable * class_table_
const char * exception_msg_
static void StoreCompressedPointerNoBarrier(ObjectPtr obj, intptr_t offset, T value)
void CopyArray(typename Types::Array from, typename Types::Array to)
void CopyTransferableTypedData(typename Types::TransferableTypedData from, typename Types::TransferableTypedData to)
void CopyGrowableObjectArray(typename Types::GrowableObjectArray from, typename Types::GrowableObjectArray to)
void CopyUserdefinedInstance(typename Types::Object from, typename Types::Object to, UnboxedFieldBitmap bitmap)
void CopyMap(typename Types::Map from, typename Types::Map to)
void CopyFloat64x2(typename Types::Float64x2 from, typename Types::Float64x2 to)
UntaggedObject * UntagObject(typename Types::Object obj)
void CopyDouble(typename Types::Double from, typename Types::Double to)
void CopyTypedData(TypedDataPtr from, TypedDataPtr to)
typename Base::Types Types
void CopySet(typename Types::Set from, typename Types::Set to)
void CopyExternalTypedData(typename Types::ExternalTypedData from, typename Types::ExternalTypedData to)
void CopyFloat32x4(typename Types::Float32x4 from, typename Types::Float32x4 to)
void CopyUserdefinedInstanceWithoutUnboxedFields(typename Types::Object from, typename Types::Object to)
void CopyTypedData(const TypedData &from, const TypedData &to)
void CopyTypedDataView(typename Types::TypedDataView from, typename Types::TypedDataView to)
void CopyContext(typename Types::Context from, typename Types::Context to)
void CopyRecord(typename Types::Record from, typename Types::Record to)
ObjectCopy(Thread *thread, IdentityMap *map)
void CopyLinkedHashBase(T from, T to, UntaggedLinkedHashBase *from_untagged, UntaggedLinkedHashBase *to_untagged)
void CopyClosure(typename Types::Closure from, typename Types::Closure to)
void CopyWeakProperty(typename Types::WeakProperty from, typename Types::WeakProperty to)
void CopyWeakReference(typename Types::WeakReference from, typename Types::WeakReference to)
void CopyPredefinedInstance(typename Types::Object from, typename Types::Object to, intptr_t cid)
intptr_t allocated_bytes()
ObjectPtr CopyObjectGraph(const Object &root)
intptr_t copied_objects()
ObjectGraphCopier(Thread *thread)
void VisitCompressedPointers(uword heap_base, CompressedObjectPtr *first, CompressedObjectPtr *last)
UntaggedObject * untag() const
intptr_t GetClassId() const
static ObjectPtr Allocate(intptr_t cls_id, intptr_t size, Heap::Space space, bool compressed, uword ptr_field_start_offset, uword ptr_field_end_offset)
intptr_t GetClassId() const
virtual const char * ToCString() const
static ObjectPtr RawCast(ObjectPtr obj)
static PassiveObject & Handle()
static constexpr intptr_t kBytesPerElement
static intptr_t NumFields(RecordPtr ptr)
static intptr_t field_offset(intptr_t index)
RetainingPath(Zone *zone, Isolate *isolate, const Object &from, const Object &to, TraversalRules traversal_rules)
bool WasVisited(ObjectPtr object)
void MarkVisited(ObjectPtr object)
uword TryAllocateNoSafepoint(Thread *thread, intptr_t size)
void FinalizeExternalTypedData()
void AddTransferable(const TransferableTypedData &from, const TransferableTypedData &to)
void AddWeakProperty(const WeakProperty &from)
const ExternalTypedData & AddExternalTypedData(ExternalTypedDataPtr to)
SlowForwardMap(Thread *thread, IdentityMap *map)
void AddObjectToRehash(const Object &to)
void Insert(const Object &from, const Object &to, intptr_t size)
void AddExpandoToRehash(const Object &to)
void FinalizeTransferables()
ObjectPtr ForwardedObject(ObjectPtr object)
void AddWeakReference(const WeakReference &from)
void Add(const Object &key, const Object &value)
ObjectPtr At(intptr_t index)
SlowFromTo(const GrowableObjectArray &storage)
void ForwardCompressedArrayPointers(intptr_t array_length, const Object &src, const Object &dst, intptr_t offset, intptr_t end_offset)
DART_FORCE_INLINE void ForwardCompressedLargeArrayPointer(const Object &src, const Object &dst, intptr_t offset)
void EnqueueObjectToRehash(const Object &to)
void EnqueueWeakReference(const WeakReference &from)
DART_FORCE_INLINE void ForwardCompressedPointer(const Object &src, const Object &dst, intptr_t offset)
void EnqueueTransferable(const TransferableTypedData &from, const TransferableTypedData &to)
void ForwardCompressedContextPointers(intptr_t context_length, const Object &src, const Object &dst, intptr_t offset, intptr_t end_offset)
void StoreCompressedPointers(const Object &src, const Object &dst, intptr_t offset, intptr_t end_offset)
void StoreCompressedArrayPointers(intptr_t array_length, const Object &src, const Object &dst, intptr_t offset, intptr_t end_offset)
SlowObjectCopyBase(Thread *thread, IdentityMap *map)
SlowForwardMap slow_forward_map_
DART_FORCE_INLINE void ForwardCompressedPointers(const Object &src, const Object &dst, intptr_t offset, intptr_t end_offset, UnboxedFieldBitmap bitmap)
DART_FORCE_INLINE void ForwardCompressedPointers(const Object &src, const Object &dst, intptr_t offset, intptr_t end_offset)
void EnqueueWeakProperty(const WeakProperty &from)
void EnqueueExpandoToRehash(const Object &to)
static void StoreCompressedPointersNoBarrier(const Object &src, const Object &dst, intptr_t offset, intptr_t end_offset)
ObjectPtr Forward(uword tags, const Object &from)
ArrayPtr BuildArrayOfObjectsToRehash(const GrowableArray< const Object * > &objects_to_rehash)
SlowObjectCopy(Thread *thread, IdentityMap *map)
ObjectPtr ContinueCopyGraphSlow(const Object &root, const Object &fast_root_copy)
static SmiPtr New(intptr_t value)
static bool IsValid(int64_t value)
ThreadState * thread() const
static StringPtr New(const char *cstr, Heap::Space space=Heap::kNew)
static Thread * Current()
bool IsSafepointRequested() const
DART_WARN_UNUSED_RESULT ErrorPtr StealStickyError()
Isolate * isolate() const
IsolateGroup * isolate_group() const
intptr_t ElementSizeInBytes() const
intptr_t LengthInBytes() const
void * DataAddr(intptr_t byte_offset) const
static constexpr uword update(intptr_t size, uword tag)
static constexpr bool SizeFits(intptr_t size)
static constexpr uword decode(uword tag)
void StorePointer(type const *addr, type value)
void StoreCompressedArrayPointer(compressed_type const *addr, type value)
static uword ToAddr(const UntaggedObject *raw_obj)
intptr_t HeapSize() const
void SetCardRememberedBitUnsynchronized()
intptr_t VisitPointers(ObjectPointerVisitor *visitor)
void StoreCompressedPointer(compressed_type const *addr, type value)
void set_key(const Object &key) const
void set_target(const Object &target) const
void SetValueExclusive(ObjectPtr key, intptr_t val)
intptr_t GetValueExclusive(ObjectPtr key) const
G_BEGIN_DECLS G_MODULE_EXPORT FlValue * args
Dart_NativeFunction function
Visitor(Ts...) -> Visitor< Ts... >
void Decompress(const uint8_t *input, intptr_t input_len, uint8_t **output, intptr_t *output_length)
bool IsTypedDataViewClassId(intptr_t index)
bool IsTypedDataClassId(intptr_t index)
void CopyTypedDataBaseWithSafepointChecks(Thread *thread, const T &from, const T &to, intptr_t length)
@ kInternalToIsolateGroup
@ kExternalBetweenIsolateGroups
void InitializeExternalTypedDataWithSafepointChecks(Thread *thread, intptr_t cid, const ExternalTypedData &from, const ExternalTypedData &to)
static constexpr intptr_t kCompressedWordSizeLog2
bool IsTypedDataBaseClassId(intptr_t index)
bool CanShareObjectAcrossIsolates(ObjectPtr obj)
void InitializeTypedDataView(TypedDataViewPtr obj)
const char * kFastAllocationFailed
const char * FindRetainingPath(Zone *zone_, Isolate *isolate, const Object &from, const Object &to, TraversalRules traversal_rules)
bool IsImplicitFieldClassId(intptr_t index)
void * malloc(size_t size)
static ObjectPtr Ptr(ObjectPtr obj)
ObjectPtr CopyMutableObjectGraph(const Object &object)
bool IsUnmodifiableTypedDataViewClassId(intptr_t index)
@ kUnmodifiableByteDataViewCid
bool IsAllocatableInNewSpace(intptr_t size)
static constexpr intptr_t kSlotsPerInterruptCheck
uintptr_t compressed_uword
DART_FORCE_INLINE void UpdateLengthField(intptr_t cid, ObjectPtr from, ObjectPtr to)
static DART_FORCE_INLINE bool CanShareObject(ObjectPtr obj, uword tags)
void FreeTransferablePeer(void *isolate_callback_data, void *peer)
DART_FORCE_INLINE uword TagsFromUntaggedObject(UntaggedObject *obj)
DEFINE_FLAG(bool, print_cluster_information, false, "Print information about clusters written to snapshot")
static constexpr intptr_t kCompressedWordSize
raw_obj untag() -> num_entries()) VARIABLE_COMPRESSED_VISITOR(Array, Smi::Value(raw_obj->untag() ->length())) VARIABLE_COMPRESSED_VISITOR(TypedData, TypedData::ElementSizeInBytes(raw_obj->GetClassId()) *Smi::Value(raw_obj->untag() ->length())) VARIABLE_COMPRESSED_VISITOR(Record, RecordShape(raw_obj->untag() ->shape()).num_fields()) VARIABLE_NULL_VISITOR(CompressedStackMaps, CompressedStackMaps::PayloadSizeOf(raw_obj)) VARIABLE_NULL_VISITOR(OneByteString, Smi::Value(raw_obj->untag() ->length())) VARIABLE_NULL_VISITOR(TwoByteString, Smi::Value(raw_obj->untag() ->length())) intptr_t UntaggedField::VisitFieldPointers(FieldPtr raw_obj, ObjectPointerVisitor *visitor)
static DART_FORCE_INLINE ObjectPtr Marker()
static InstancePtr AllocateObject(Thread *thread, const Class &cls)
constexpr intptr_t kWordSize
void InitializeExternalTypedData(intptr_t cid, ExternalTypedDataPtr from, ExternalTypedDataPtr to)
DART_FORCE_INLINE void SetNewSpaceTaggingWord(ObjectPtr to, classid_t cid, uint32_t size)
static int8_t data[kExtLength]
static DART_FORCE_INLINE bool MightNeedReHashing(ObjectPtr object)
void FreeExternalTypedData(void *isolate_callback_data, void *buffer)
bool IsExternalTypedDataClassId(intptr_t index)
bool IsStringClassId(intptr_t index)
ObjectPtr CompressedObjectPtr
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not defaults to or::depending on whether ipv6 is specified vm service A custom Dart VM Service port The default is to pick a randomly available open port disable vm Disable the Dart VM Service The Dart VM Service is never available in release mode disable vm service Disable mDNS Dart VM Service publication Bind to the IPv6 localhost address for the Dart VM Service Ignored if vm service host is set endless trace buffer
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive keep the shell running after the Dart script has completed enable serial On low power devices with low core running concurrent GC tasks on threads can cause them to contend with the UI thread which could potentially lead to jank This option turns off all concurrent GC activities domain network JSON encoded network policy per domain This overrides the DisallowInsecureConnections switch Embedder can specify whether to allow or disallow insecure connections at a domain level old gen heap size
std::function< void()> closure
SI auto map(std::index_sequence< I... >, Fn &&fn, const Args &... args) -> skvx::Vec< sizeof...(I), decltype(fn(args[0]...))>
#define DEFINE_UNSUPPORTED(clazz)
#define FOR_UNSUPPORTED_CLASSES(V)
#define HANDLE_ILLEGAL_CASE(Type)
static const dart::UntaggedObject * UntagObject(Object arg)
static dart::ObjectPtr GetObjectPtr(Object arg)
static Object HandlifyObject(Object arg)
static const dart::Object & HandlifyObject(ObjectPtr arg)
static const dart::ObjectPtr GetObjectPtr(Object arg)
static const dart::UntaggedObject * UntagObject(Object arg)
#define TIMELINE_DURATION(thread, stream, name)
#define OFFSET_OF(type, field)
#define ONLY_IN_PRECOMPILED(code)