37#if !defined(DART_PRECOMPILED_RUNTIME)
45#if !defined(DART_PRECOMPILED_RUNTIME)
47 print_cluster_information,
49 "Print information about clusters written to snapshot");
52#if defined(DART_PRECOMPILER)
54 write_v8_snapshot_profile_to,
56 "Write a snapshot profile in V8 format to a file.");
58 print_array_optimization_candidates,
60 "Print information about how many array are candidates for Smi and "
61 "ROData optimizations.");
72static constexpr intptr_t kDeltaEncodedTypedDataCid =
kNativePointer;
76struct GrowableArrayStorageTraits {
82 intptr_t Length()
const {
return length_; }
83 void SetAt(intptr_t index,
const Object& value)
const {
84 array_[index] =
value.ptr();
86 ObjectPtr At(intptr_t index)
const {
return array_[index]; }
90 ObjectPtr* array_ =
nullptr;
94 using ArrayPtr = Array*;
95 class ArrayHandle :
public ZoneAllocated {
97 explicit ArrayHandle(ArrayPtr ptr) : ptr_(ptr) {}
100 void SetFrom(
const ArrayHandle& other) { ptr_ = other.ptr_; }
101 void Clear() { ptr_ =
nullptr; }
102 bool IsNull()
const {
return ptr_ ==
nullptr; }
103 ArrayPtr ptr() {
return ptr_; }
105 intptr_t Length()
const {
return ptr_->Length(); }
106 void SetAt(intptr_t index,
const Object& value)
const {
107 ptr_->SetAt(index, value);
109 ObjectPtr At(intptr_t index)
const {
return ptr_->At(index); }
112 ArrayPtr ptr_ =
nullptr;
116 static ArrayHandle& PtrToHandle(ArrayPtr ptr) {
117 return *
new ArrayHandle(ptr);
120 static void SetHandle(ArrayHandle& dst,
const ArrayHandle& src) {
124 static void ClearHandle(ArrayHandle& dst) {
128 static ArrayPtr New(Zone* zone, intptr_t
length, Heap::Space space) {
129 return new (zone) Array(zone,
length);
132 static bool IsImmutable(
const ArrayHandle& handle) {
return false; }
134 static ObjectPtr At(ArrayHandle* array, intptr_t index) {
135 return array->At(index);
138 static void SetAt(ArrayHandle* array, intptr_t index,
const Object& value) {
139 array->SetAt(index, value);
144#if defined(DART_PRECOMPILER) && !defined(TARGET_ARCH_IA32)
146static void RelocateCodeObjects(
148 GrowableArray<CodePtr>* code_objects,
149 GrowableArray<ImageWriterCommand>* image_writer_commands) {
154 WritableCodePages writable_code_pages(thread, isolate_group);
155 CodeRelocator::Relocate(thread, code_objects, image_writer_commands, is_vm);
221 bool is_immutable =
false)
242#if defined(DART_PRECOMPILED_RUNTIME)
244 FATAL(
"%s needs canonicalization but doesn't define PostLoad",
name());
304class CodeSerializationCluster;
317 const char*
type =
nullptr,
318 const char*
name =
nullptr);
333#if defined(SNAPSHOT_BACKTRACE)
369 ReserveId(serializer,
380 ReserveId(serializer,
type, object,
name),
409 template <
typename T>
432 intptr_t
id =
RefId(
object);
434 if (profile_writer_ !=
nullptr) {
465 intptr_t
id =
RefId(
object);
467 if (profile_writer_ !=
nullptr) {
469 object_currently_writing_.cid_,
offset)) {
477 template <
typename T,
typename... P>
479 auto* from = obj->untag()->from();
480 auto* to = obj->untag()->to_snapshot(
kind(),
args...);
484 template <
typename T>
486 for (
auto* p = from; p <= to; p++) {
493 template <
typename T,
typename... P>
495 auto* from = obj->untag()->from();
496 auto* to = obj->untag()->to_snapshot(
kind(),
args...);
500 template <
typename T>
502 for (
auto* p = from; p <= to; p++) {
519 uint32_t unchecked_offset,
546 return loading_units_;
549 loading_units_ = units;
553 current_loading_unit_id_ =
id;
580 ASSERT(profile_writer_ !=
nullptr);
584 return heap_->
GetObjectId(
object) > num_base_objects_;
588 const char* ReadOnlyObjectType(intptr_t
cid);
602 intptr_t cid_override;
604 GrowableArray<StackEntry> stack_;
607 intptr_t num_tlc_cids_;
608 intptr_t num_base_objects_;
609 intptr_t num_written_objects_;
610 intptr_t next_ref_index_;
612 intptr_t dispatch_table_size_ = 0;
613 intptr_t bytes_heap_allocated_ = 0;
614 intptr_t instructions_table_len_ = 0;
615 intptr_t instructions_table_rodata_offset_ = 0;
620 V8SnapshotProfileWriter* profile_writer_ =
nullptr;
621 struct ProfilingObject {
622 ObjectPtr object_ =
nullptr;
625 V8SnapshotProfileWriter::ObjectId id_ =
627 intptr_t last_stream_position_ = 0;
629 } object_currently_writing_;
630 OffsetsTable* offsets_table_ =
nullptr;
632#if defined(SNAPSHOT_BACKTRACE)
633 ObjectPtr current_parent_;
634 GrowableArray<Object*> parent_pairs_;
637#if defined(DART_PRECOMPILER)
638 IntMap<intptr_t> deduped_instructions_sources_;
639 IntMap<intptr_t> code_index_;
642 intptr_t current_loading_unit_id_ = 0;
643 GrowableArray<LoadingUnitSerializationData*>* loading_units_ =
nullptr;
644 ZoneGrowableArray<Object*>* objects_ =
new ZoneGrowableArray<Object*>();
649#define AutoTraceObject(obj) \
650 Serializer::WritingObjectScope scope_##__COUNTER__(s, name(), obj, nullptr)
652#define AutoTraceObjectName(obj, str) \
653 Serializer::WritingObjectScope scope_##__COUNTER__(s, name(), obj, str)
655#define WriteFieldValue(field, value) s->WritePropertyRef(value, #field);
657#define WriteFromTo(obj, ...) s->WriteFromTo(obj, ##__VA_ARGS__);
659#define PushFromTo(obj, ...) s->PushFromTo(obj, ##__VA_ARGS__);
661#define WriteField(obj, field) s->WritePropertyRef(obj->untag()->field, #field)
662#define WriteCompressedField(obj, name) \
663 s->WritePropertyRef(obj->untag()->name(), #name "_")
671 const uint8_t* data_buffer,
672 const uint8_t* instructions_buffer,
687 bool is_canonical =
false) {
699 template <
typename T>
724 ASSERT(next_ref_index_ <= num_objects_);
725 refs_->untag()->data()[next_ref_index_] = object;
731 ASSERT(index <= num_objects_);
732 return refs_->untag()->element(index);
743 intptr_t code_index);
753 return Read<int32_t>();
771 intptr_t deferred_code_start_index,
772 intptr_t deferred_code_end_index);
778#if defined(DART_PRECOMPILED_RUNTIME)
790 return instructions_table_;
800 :
ReadStream(
d->stream_.buffer_,
d->stream_.current_,
d->stream_.end_),
806 d->stream_.current_ =
nullptr;
809 ~Local() { d_->stream_.current_ = current_; }
813 ASSERT(index <= d_->num_objects_);
814 return refs_->untag()->element(index);
817 template <
typename T>
830 return Read<int32_t>();
833 template <
typename T,
typename... P>
835 auto* from = obj->untag()->from();
836 auto* to_snapshot = obj->untag()->to_snapshot(d_->
kind(),
params...);
837 auto* to = obj->untag()->to(
params...);
838 for (
auto* p = from; p <= to_snapshot; p++) {
845 for (
auto* p = to_snapshot + 1; p <= to; p++) {
852 const ArrayPtr refs_;
864 intptr_t num_base_objects_;
865 intptr_t num_objects_;
866 intptr_t num_clusters_;
868 intptr_t next_ref_index_;
869 intptr_t code_start_index_ = 0;
870 intptr_t code_stop_index_ = 0;
871 intptr_t instructions_index_ = 0;
873 const bool is_non_root_unit_;
898 raw->
untag()->tags_ = tags;
901#if !defined(DART_PRECOMPILED_RUNTIME)
909 serializer->
Write<uint32_t>(tags);
914 if (FLAG_print_cluster_information) {
916 stop_size - start_size);
918 stop_data - start_data);
921 size_ += (stop_size - start_size) + (stop_data - start_data);
932 if (FLAG_print_cluster_information) {
942 intptr_t instance_size) {
944 intptr_t
count =
d->ReadUnsigned();
945 for (intptr_t i = 0; i <
count; i++) {
946 d->AssignRef(
d->Allocate(instance_size));
951#if !defined(DART_PRECOMPILED_RUNTIME)
955 const auto unboxed_fields_bitmap_host =
956 s->isolate_group()->class_table()->GetUnboxedFieldsMapAt(class_id);
959 if (unboxed_fields_bitmap_host.IsEmpty() ||
960 kWordSize == compiler::target::kWordSize) {
961 unboxed_fields_bitmap = unboxed_fields_bitmap_host;
966 unboxed_fields_bitmap.
Reset();
967 intptr_t target_i = 0, host_i = 0;
972 if (unboxed_fields_bitmap_host.Get(host_i++)) {
973 unboxed_fields_bitmap.
Set(target_i++);
974 unboxed_fields_bitmap.
Set(target_i++);
982 return unboxed_fields_bitmap;
992 objects_(num_cids) {}
997 intptr_t class_id = cls->untag()->id_;
1001 s->UnexpectedObject(cls,
"Class with illegal cid");
1007 predefined_.
Add(cls);
1018 for (intptr_t i = 0; i <
count; i++) {
1019 ClassPtr cls = predefined_[i];
1022 intptr_t class_id = cls->untag()->id_;
1023 s->WriteCid(class_id);
1027 for (intptr_t i = 0; i <
count; i++) {
1028 ClassPtr cls = objects_[i];
1035 for (intptr_t i = 0; i <
count; i++) {
1036 WriteClass(
s, predefined_[i]);
1039 for (intptr_t i = 0; i <
count; i++) {
1040 WriteClass(
s, objects_[i]);
1048 intptr_t class_id = cls->untag()->id_;
1050 s->UnexpectedObject(cls,
"Class with illegal cid");
1052 s->WriteCid(class_id);
1054 s->Write<uint32_t>(cls->untag()->kernel_offset_);
1059 s->Write<int16_t>(cls->untag()->num_type_arguments_);
1060 s->Write<uint16_t>(cls->untag()->num_native_fields_);
1062 s->WriteTokenPosition(cls->untag()->token_pos_);
1063 s->WriteTokenPosition(cls->untag()->end_token_pos_);
1064 s->WriteCid(cls->untag()->implementor_cid_);
1066 s->Write<uint32_t>(cls->untag()->state_bits_);
1069 const auto unboxed_fields_map =
1071 s->WriteUnsigned64(unboxed_fields_map.Value());
1075 GrowableArray<ClassPtr> predefined_;
1076 GrowableArray<ClassPtr> objects_;
1086 predefined_start_index_ =
d->next_index();
1087 intptr_t
count =
d->ReadUnsigned();
1089 for (intptr_t i = 0; i <
count; i++) {
1090 intptr_t class_id =
d->ReadCid();
1092 ClassPtr cls =
table->At(class_id);
1096 predefined_stop_index_ =
d->next_index();
1099 count =
d->ReadUnsigned();
1100 for (intptr_t i = 0; i <
count; i++) {
1109 for (intptr_t
id = predefined_start_index_;
id < predefined_stop_index_;
1111 ClassPtr cls =
static_cast<ClassPtr
>(
d.Ref(
id));
1113 intptr_t class_id =
d.ReadCid();
1114 cls->untag()->id_ = class_id;
1115#if !defined(DART_PRECOMPILED_RUNTIME)
1117 cls->untag()->kernel_offset_ =
d.Read<uint32_t>();
1120 cls->untag()->host_instance_size_in_words_ =
d.Read<int32_t>();
1121 cls->untag()->host_next_field_offset_in_words_ =
d.Read<int32_t>();
1122#if defined(DART_PRECOMPILER)
1125 cls->untag()->target_instance_size_in_words_ =
1126 cls->untag()->host_instance_size_in_words_;
1127 cls->untag()->target_next_field_offset_in_words_ =
1128 cls->untag()->host_next_field_offset_in_words_;
1134 cls->untag()->host_type_arguments_field_offset_in_words_ =
1136#if defined(DART_PRECOMPILER)
1137 cls->untag()->target_type_arguments_field_offset_in_words_ =
1138 cls->untag()->host_type_arguments_field_offset_in_words_;
1140 cls->untag()->num_type_arguments_ =
d.Read<int16_t>();
1141 cls->untag()->num_native_fields_ =
d.Read<uint16_t>();
1142#if !defined(DART_PRECOMPILED_RUNTIME)
1144 cls->untag()->token_pos_ =
d.ReadTokenPosition();
1145 cls->untag()->end_token_pos_ =
d.ReadTokenPosition();
1146 cls->untag()->implementor_cid_ =
d.ReadCid();
1148 cls->untag()->state_bits_ =
d.Read<uint32_t>();
1154 ClassPtr cls =
static_cast<ClassPtr
>(
d.Ref(
id));
1158 intptr_t class_id =
d.ReadCid();
1160 cls->untag()->id_ = class_id;
1162#if !defined(DART_PRECOMPILED_RUNTIME)
1164 cls->untag()->kernel_offset_ =
d.Read<uint32_t>();
1166 cls->untag()->host_instance_size_in_words_ =
d.Read<int32_t>();
1167 cls->untag()->host_next_field_offset_in_words_ =
d.Read<int32_t>();
1168 cls->untag()->host_type_arguments_field_offset_in_words_ =
1170#if defined(DART_PRECOMPILER)
1171 cls->untag()->target_instance_size_in_words_ =
1172 cls->untag()->host_instance_size_in_words_;
1173 cls->untag()->target_next_field_offset_in_words_ =
1174 cls->untag()->host_next_field_offset_in_words_;
1175 cls->untag()->target_type_arguments_field_offset_in_words_ =
1176 cls->untag()->host_type_arguments_field_offset_in_words_;
1178 cls->untag()->num_type_arguments_ =
d.Read<int16_t>();
1179 cls->untag()->num_native_fields_ =
d.Read<uint16_t>();
1180#if !defined(DART_PRECOMPILED_RUNTIME)
1182 cls->untag()->token_pos_ =
d.ReadTokenPosition();
1183 cls->untag()->end_token_pos_ =
d.ReadTokenPosition();
1184 cls->untag()->implementor_cid_ =
d.ReadCid();
1186 cls->untag()->state_bits_ =
d.Read<uint32_t>();
1188 table->AllocateIndex(class_id);
1189 table->SetAt(class_id, cls);
1193 table->SetUnboxedFieldsMapAt(class_id, unboxed_fields_map);
1199 intptr_t predefined_start_index_;
1200 intptr_t predefined_stop_index_;
1218#if !defined(DART_PRECOMPILED_RUNTIME)
1219template <
typename SetType,
1220 typename HandleType,
1221 typename PointerType,
1222 bool kAllCanonicalObjectsAreIncludedIntoSet =
true>
1227 bool represents_canonical_set,
1229 intptr_t target_instance_size = 0)
1231 represents_canonical_set_(represents_canonical_set) {}
1236 ASSERT(kAllCanonicalObjectsAreIncludedIntoSet);
1241 if (!represents_canonical_set_) {
1247 using ZoneCanonicalSet =
1251 intptr_t required_capacity = 0;
1254 required_capacity++;
1259 const intptr_t kSpareCapacity = 32;
1260 required_capacity =
static_cast<intptr_t
>(
1261 static_cast<double>(required_capacity + kSpareCapacity) /
1264 intptr_t num_occupied = 0;
1268 ZoneCanonicalSet
table(
1269 s->zone(), HashTables::New<ZoneCanonicalSet>(required_capacity));
1270 HandleType& element = HandleType::Handle(
s->zone());
1274 intptr_t entry = -1;
1275 const bool present =
table.FindKeyOrDeletedOrUnused(element, &entry);
1277 table.InsertKey(entry, element);
1283 const auto prefix_length = num_occupied;
1286 auto& arr =
table.Release();
1287 intptr_t last_occupied = ZoneCanonicalSet::kFirstKeyIndex - 1;
1288 for (intptr_t i = ZoneCanonicalSet::kFirstKeyIndex,
length = arr.Length();
1291 ASSERT(v != ZoneCanonicalSet::DeletedMarker().ptr());
1292 if (v != ZoneCanonicalSet::UnusedMarker().ptr()) {
1293 const intptr_t unused_run_length = (i - 1) - last_occupied;
1294 gaps_.
Add(unused_run_length);
1295 objects_[num_occupied++] =
static_cast<PointerType
>(v);
1301 table_length_ = arr.Length();
1305 if (represents_canonical_set_) {
1306 s->WriteUnsigned(table_length_);
1308 for (
auto gap : gaps_) {
1309 s->WriteUnsigned(gap);
1312 compiler::target::Array::InstanceSize(table_length_);
1319 const bool represents_canonical_set_;
1321 intptr_t table_length_ = 0;
1325template <
typename SetType,
bool kAllCanonicalObjectsAreIncludedIntoSet = true>
1333 table_(SetType::ArrayHandle::Handle()) {}
1340 const auto table_length =
d->ReadUnsigned();
1343 auto table = StartDeserialization(
d, table_length,
count);
1345 table.FillGap(
d->ReadUnsigned());
1346 table.WriteElement(
d,
d->Ref(i));
1358 const typename SetType::ArrayHandle& current_table) {
1361 if (!current_table.IsNull()) {
1362 SetType current_set(
d->zone(), current_table.ptr());
1363 ASSERT(current_set.NumOccupied() == 0);
1364 current_set.Release();
1369 SetType canonical_set(
d->zone(),
table_.ptr());
1375 canonical_set.Release();
1380 struct DeserializationFinger {
1381 typename SetType::ArrayPtr table;
1382 intptr_t current_index;
1385 void FillGap(
int length) {
1386 for (intptr_t j = 0; j <
length; j++) {
1387 table->untag()->data()[current_index + j] = gap_element;
1392 void WriteElement(Deserializer*
d, ObjectPtr
object) {
1393 table->untag()->data()[current_index++] = object;
1396 typename SetType::ArrayPtr Finish() {
1397 if (table != SetType::ArrayHandle::null()) {
1401 table = SetType::ArrayHandle::null();
1406 static DeserializationFinger StartDeserialization(Deserializer*
d,
1409 const intptr_t instance_size = SetType::ArrayHandle::InstanceSize(
length);
1410 typename SetType::ArrayPtr
table =
1411 static_cast<typename SetType::ArrayPtr
>(
d->Allocate(instance_size));
1414 if ((SetType::Storage::ArrayCid == kArrayCid) &&
1416 table->untag()->SetCardRememberedBitUnsynchronized();
1418 InitTypeArgsOrNext(
table);
1420 for (intptr_t i = 0; i < SetType::kFirstKeyIndex; i++) {
1424 return {
table, SetType::kFirstKeyIndex, SetType::UnusedMarker().ptr()};
1427 static void InitTypeArgsOrNext(ArrayPtr
table) {
1430 static void InitTypeArgsOrNext(WeakArrayPtr
table) {
1435#if !defined(DART_PRECOMPILED_RUNTIME)
1447 objects_.
Add(type_params);
1454 for (intptr_t i = 0; i <
count; i++) {
1455 TypeParametersPtr type_params = objects_[i];
1456 s->AssignRef(type_params);
1462 for (intptr_t i = 0; i <
count; i++) {
1463 TypeParametersPtr type_params = objects_[i];
1489 TypeParametersPtr type_params =
static_cast<TypeParametersPtr
>(
d.Ref(
id));
1492 d.ReadFromTo(type_params);
1497#if !defined(DART_PRECOMPILED_RUNTIME)
1504 bool represents_canonical_set)
1507 represents_canonical_set,
1515 s->Push(type_args->untag()->instantiations());
1517 for (intptr_t i = 0; i <
length; i++) {
1518 s->Push(type_args->untag()->element(i));
1526 for (intptr_t i = 0; i <
count; i++) {
1527 TypeArgumentsPtr type_args =
objects_[i];
1528 s->AssignRef(type_args);
1533 compiler::target::TypeArguments::InstanceSize(
length);
1540 for (intptr_t i = 0; i <
count; i++) {
1541 TypeArgumentsPtr type_args =
objects_[i];
1546 s->Write<int32_t>(
hash);
1547 const intptr_t nullability =
1548 Smi::Value(type_args->untag()->nullability());
1549 s->WriteUnsigned(nullability);
1551 for (intptr_t j = 0; j <
length; j++) {
1552 s->WriteElementRef(type_args->untag()->element(j), j);
1571 const intptr_t
count =
d->ReadUnsigned();
1572 for (intptr_t i = 0; i <
count; i++) {
1573 const intptr_t
length =
d->ReadUnsigned();
1585 TypeArgumentsPtr type_args =
static_cast<TypeArgumentsPtr
>(
d.Ref(
id));
1586 const intptr_t
length =
d.ReadUnsigned();
1591 type_args->untag()->hash_ =
Smi::New(
d.Read<int32_t>());
1592 type_args->untag()->nullability_ =
Smi::New(
d.ReadUnsigned());
1593 type_args->untag()->instantiations_ =
static_cast<ArrayPtr
>(
d.ReadRef());
1594 for (intptr_t j = 0; j <
length; j++) {
1595 type_args->untag()->types()[j] =
1596 static_cast<AbstractTypePtr
>(
d.ReadRef());
1603 auto object_store =
d->isolate_group()->object_store();
1605 d, refs,
Array::Handle(object_store->canonical_type_arguments()));
1606 object_store->set_canonical_type_arguments(
table_);
1610 type_arg ^= refs.
At(i);
1612 refs.
SetAt(i, type_arg);
1618#if !defined(DART_PRECOMPILED_RUNTIME)
1636 for (intptr_t i = 0; i <
count; i++) {
1637 PatchClassPtr cls = objects_[i];
1644 for (intptr_t i = 0; i <
count; i++) {
1645 PatchClassPtr cls = objects_[i];
1649 s->Write<int32_t>(cls->untag()->kernel_library_index_);
1673 PatchClassPtr cls =
static_cast<PatchClassPtr
>(
d.Ref(
id));
1677#if !defined(DART_PRECOMPILED_RUNTIME)
1679 cls->untag()->kernel_library_index_ =
d.Read<int32_t>();
1685#if !defined(DART_PRECOMPILED_RUNTIME)
1701 s->Push(func->untag()->code());
1704 s->Push(func->untag()->code());
1705 s->Push(func->untag()->ic_data_array());
1715 for (intptr_t i = 0; i <
count; i++) {
1716 FunctionPtr func = objects_[i];
1724 for (intptr_t i = 0; i <
count; i++) {
1725 FunctionPtr func = objects_[i];
1729#if defined(DART_PRECOMPILER)
1730 CodePtr code = func->untag()->code();
1731 const auto code_index =
s->GetCodeIndex(code);
1732 s->WriteUnsigned(code_index);
1733 s->AttributePropertyRef(code,
"code_");
1748#if defined(DART_PRECOMPILER) && !defined(PRODUCT)
1756 if (!script.IsNull()) {
1757 script.GetTokenLocation(token_pos, &line,
nullptr);
1759 token_pos = line == -1 ? TokenPosition::kNoSource
1762 s->WriteTokenPosition(token_pos);
1765 s->WriteTokenPosition(func->untag()->token_pos_);
1769 s->WriteTokenPosition(func->untag()->end_token_pos_);
1770 s->Write<uint32_t>(func->untag()->kernel_offset_);
1771 s->Write<uint32_t>(func->untag()->packed_fields_);
1773 s->Write<uint32_t>(func->untag()->kind_tag_);
1779 if (
s->profile_writer() ==
nullptr) {
1798template <
bool need_entry_po
int_for_non_discarded>
1801 intptr_t code_index,
1802 uword* entry_point) {
1809 const intptr_t
base =
d->is_non_root_unit() ?
d->num_base_objects() : 0;
1810 if (code_index <
base) {
1811 CodePtr code =
static_cast<CodePtr
>(
d->Ref(code_index));
1812 if (need_entry_point_for_non_discarded) {
1826 const intptr_t first_entry_with_code =
1827 d->instructions_table().rodata()->first_entry_with_code;
1828 if (code_index < first_entry_with_code) {
1829 *entry_point =
d->instructions_table().EntryPointAt(code_index);
1830 return StubCode::UnknownDartCode().ptr();
1832 const intptr_t cluster_index = code_index - first_entry_with_code;
1834 static_cast<CodePtr
>(
d->Ref(
d->code_start_index() + cluster_index));
1835 if (need_entry_point_for_non_discarded) {
1843 uword* entry_point)
const {
1845 if (code_index == 0) {
1846 return StubCode::LazyCompile().ptr();
1847 }
else if (FLAG_precompiled_mode) {
1849 false>(
this, code_index,
1853 const intptr_t ref = code_start_index_ + code_index - 1;
1854 ASSERT(code_start_index_ <= ref && ref < code_stop_index_);
1855 return static_cast<CodePtr
>(
Ref(ref));
1860 intptr_t code_index) {
1865 ASSERT(FLAG_precompiled_mode);
1866 const intptr_t first_entry_with_code =
table.rodata()->first_entry_with_code;
1867 return code_index - 1 - first_entry_with_code;
1872 ASSERT(FLAG_precompiled_mode);
1873 uword entry_point = 0;
1875 this, code_index, &entry_point);
1895 FunctionPtr func =
static_cast<FunctionPtr
>(
d.Ref(
id));
1901 func->untag()->entry_point_ = 0;
1902 func->untag()->unchecked_entry_point_ = 0;
1905#if defined(DART_PRECOMPILED_RUNTIME)
1907 const intptr_t code_index =
d.ReadUnsigned();
1908 uword entry_point = 0;
1910 func->untag()->code_ = code;
1911 if (entry_point != 0) {
1912 func->untag()->entry_point_ = entry_point;
1913 func->untag()->unchecked_entry_point_ = entry_point;
1918 func->untag()->unoptimized_code_ =
static_cast<CodePtr
>(
d.ReadRef());
1919 func->untag()->code_ =
static_cast<CodePtr
>(
d.ReadRef());
1920 func->untag()->ic_data_array_ =
static_cast<ArrayPtr
>(
d.ReadRef());
1924#if !defined(DART_PRECOMPILED_RUNTIME)
1926 func->untag()->positional_parameter_names_ =
1927 static_cast<ArrayPtr
>(
d.ReadRef());
1929#if !defined(DART_PRECOMPILED_RUNTIME) || \
1930 (defined(DART_PRECOMPILED_RUNTIME) && !defined(PRODUCT))
1931 func->untag()->token_pos_ =
d.ReadTokenPosition();
1933#if !defined(DART_PRECOMPILED_RUNTIME)
1934 func->untag()->end_token_pos_ =
d.ReadTokenPosition();
1935 func->untag()->kernel_offset_ =
d.Read<uint32_t>();
1936 func->untag()->unboxed_parameters_info_.Reset();
1937 func->untag()->packed_fields_ =
d.Read<uint32_t>();
1940 func->untag()->kind_tag_ =
d.Read<uint32_t>();
1941#if !defined(DART_PRECOMPILED_RUNTIME)
1942 func->untag()->usage_counter_ = 0;
1943 func->untag()->optimized_instruction_count_ = 0;
1944 func->untag()->optimized_call_site_count_ = 0;
1945 func->untag()->deoptimization_counter_ = 0;
1946 func->untag()->state_bits_ = 0;
1947 func->untag()->inlining_depth_ = 0;
1957 auto const code = func.
ptr()->
untag()->code();
1960 uword entry_point = code->untag()->entry_point_;
1961 ASSERT(entry_point != 0);
1962 func.
ptr()->
untag()->entry_point_ = entry_point;
1963 uword unchecked_entry_point = code->untag()->unchecked_entry_point_;
1964 ASSERT(unchecked_entry_point != 0);
1965 func.
ptr()->
untag()->unchecked_entry_point_ = unchecked_entry_point;
1974 if (func.
HasCode() && !code.IsDisabled()) {
1976 func.SetWasCompiled(
true);
1991#if !defined(DART_PRECOMPILED_RUNTIME)
2005 s->Push(
data->untag()->context_scope());
2007 s->Push(
data->untag()->parent_function());
2008 s->Push(
data->untag()->closure());
2014 for (intptr_t i = 0; i <
count; i++) {
2015 ClosureDataPtr
data = objects_[i];
2022 for (intptr_t i = 0; i <
count; i++) {
2023 ClosureDataPtr
data = objects_[i];
2030 s->WriteUnsigned(
static_cast<uint32_t
>(
data->untag()->packed_fields_));
2053 ClosureDataPtr
data =
static_cast<ClosureDataPtr
>(
d.Ref(
id));
2059 data->untag()->context_scope_ =
2060 static_cast<ContextScopePtr
>(
d.ReadRef());
2062 data->untag()->parent_function_ =
static_cast<FunctionPtr
>(
d.ReadRef());
2063 data->untag()->closure_ =
static_cast<ClosurePtr
>(
d.ReadRef());
2064 data->untag()->packed_fields_ =
d.ReadUnsigned<uint32_t>();
2069#if !defined(DART_PRECOMPILED_RUNTIME)
2074 "FfiTrampolineData",
2075 kFfiTrampolineDataCid,
2088 for (intptr_t i = 0; i <
count; i++) {
2089 s->AssignRef(objects_[i]);
2095 for (intptr_t i = 0; i <
count; i++) {
2096 FfiTrampolineDataPtr
const data = objects_[i];
2099 s->Write<int32_t>(
data->untag()->callback_id_);
2100 s->Write<uint8_t>(
data->untag()->ffi_function_kind_);
2124 FfiTrampolineDataPtr
data =
static_cast<FfiTrampolineDataPtr
>(
d.Ref(
id));
2128 data->untag()->callback_id_ =
d.Read<int32_t>();
2129 data->untag()->ffi_function_kind_ =
d.Read<uint8_t>();
2134#if !defined(DART_PRECOMPILED_RUNTIME)
2145 objects_.
Add(field);
2149 s->Push(field->untag()->name());
2150 s->Push(field->untag()->owner());
2151 s->Push(field->untag()->type());
2153 s->Push(field->untag()->initializer_function());
2156 s->Push(field->untag()->guarded_list_length());
2159 s->Push(field->untag()->dependent_code());
2163 s->Push(field->untag()->host_offset_or_field_id());
2172 for (intptr_t i = 0; i <
count; i++) {
2173 FieldPtr field = objects_[i];
2174 s->AssignRef(field);
2181 for (intptr_t i = 0; i <
count; i++) {
2182 FieldPtr field = objects_[i];
2198 s->WriteTokenPosition(field->untag()->token_pos_);
2199 s->WriteTokenPosition(field->untag()->end_token_pos_);
2200 s->WriteCid(field->untag()->guarded_cid_);
2201 s->WriteCid(field->untag()->is_nullable_);
2202 s->Write<int8_t>(field->untag()->static_type_exactness_state_);
2203 s->Write<uint32_t>(field->untag()->kernel_offset_);
2205 s->Write<uint16_t>(field->untag()->kind_bits_);
2234#if !defined(DART_PRECOMPILED_RUNTIME)
2238 FieldPtr field =
static_cast<FieldPtr
>(
d.Ref(
id));
2240 d.ReadFromTo(field);
2241#if !defined(DART_PRECOMPILED_RUNTIME)
2243 field->untag()->guarded_list_length_ =
static_cast<SmiPtr
>(
d.ReadRef());
2245 field->untag()->dependent_code_ =
2246 static_cast<WeakArrayPtr
>(
d.ReadRef());
2248 field->untag()->token_pos_ =
d.ReadTokenPosition();
2249 field->untag()->end_token_pos_ =
d.ReadTokenPosition();
2250 field->untag()->guarded_cid_ =
d.ReadCid();
2251 field->untag()->is_nullable_ =
d.ReadCid();
2252 const int8_t static_type_exactness_state =
d.Read<int8_t>();
2253#if defined(TARGET_ARCH_X64)
2254 field->untag()->static_type_exactness_state_ =
2255 static_type_exactness_state;
2262 USE(static_type_exactness_state);
2263 field->untag()->static_type_exactness_state_ =
2266 field->untag()->kernel_offset_ =
d.Read<uint32_t>();
2268 field->untag()->kind_bits_ =
d.Read<uint16_t>();
2270 field->untag()->host_offset_or_field_id_ =
2271 static_cast<SmiPtr
>(
d.ReadRef());
2272#if !defined(DART_PRECOMPILED_RUNTIME)
2273 field->untag()->target_offset_ =
2274 Smi::Value(field->untag()->host_offset_or_field_id());
2283 field ^= refs.
At(i);
2294 field ^= refs.
At(i);
2301#if !defined(DART_PRECOMPILED_RUNTIME)
2312 objects_.
Add(script);
2313 auto* from = script->untag()->from();
2314 auto* to = script->untag()->to_snapshot(
s->kind());
2315 for (
auto* p = from; p <= to; p++) {
2317 reinterpret_cast<uword>(p) -
reinterpret_cast<uword>(script->untag());
2318 const ObjectPtr obj = p->Decompress(script->heap_base());
2321 s->Push(obj, kDeltaEncodedTypedDataCid);
2331 for (intptr_t i = 0; i <
count; i++) {
2332 ScriptPtr script = objects_[i];
2333 s->AssignRef(script);
2339 for (intptr_t i = 0; i <
count; i++) {
2340 ScriptPtr script = objects_[i];
2346 int32_t written_flags =
2348 0, script->untag()->flags_and_max_position_);
2350 false, written_flags);
2351 s->Write<int32_t>(written_flags);
2353 s->Write<int32_t>(script->untag()->kernel_script_index_);
2376 ScriptPtr script =
static_cast<ScriptPtr
>(
d.Ref(
id));
2379 d.ReadFromTo(script);
2380#if !defined(DART_PRECOMPILED_RUNTIME)
2381 script->untag()->flags_and_max_position_ =
d.Read<int32_t>();
2383 script->untag()->kernel_script_index_ =
d.Read<int32_t>();
2384 script->untag()->load_timestamp_ = 0;
2389#if !defined(DART_PRECOMPILED_RUNTIME)
2407 for (intptr_t i = 0; i <
count; i++) {
2408 LibraryPtr lib = objects_[i];
2415 for (intptr_t i = 0; i <
count; i++) {
2416 LibraryPtr lib = objects_[i];
2419 s->Write<int32_t>(lib->untag()->index_);
2420 s->Write<uint16_t>(lib->untag()->num_imports_);
2421 s->Write<int8_t>(lib->untag()->load_state_);
2422 s->Write<uint8_t>(lib->untag()->flags_);
2424 s->Write<uint32_t>(lib->untag()->kernel_library_index_);
2448 LibraryPtr lib =
static_cast<LibraryPtr
>(
d.Ref(
id));
2451 lib->untag()->native_entry_resolver_ =
nullptr;
2452 lib->untag()->native_entry_symbol_resolver_ =
nullptr;
2453 lib->untag()->ffi_native_resolver_ =
nullptr;
2454 lib->untag()->index_ =
d.Read<int32_t>();
2455 lib->untag()->num_imports_ =
d.Read<uint16_t>();
2456 lib->untag()->load_state_ =
d.Read<int8_t>();
2457 lib->untag()->flags_ =
2459#if !defined(DART_PRECOMPILED_RUNTIME)
2461 lib->untag()->kernel_library_index_ =
d.Read<uint32_t>();
2467#if !defined(DART_PRECOMPILED_RUNTIME)
2485 for (intptr_t i = 0; i <
count; i++) {
2486 NamespacePtr ns = objects_[i];
2493 for (intptr_t i = 0; i <
count; i++) {
2494 NamespacePtr ns = objects_[i];
2519 NamespacePtr ns =
static_cast<NamespacePtr
>(
d.Ref(
id));
2527#if !defined(DART_PRECOMPILED_RUNTIME)
2533 "KernelProgramInfo",
2534 kKernelProgramInfoCid,
2547 for (intptr_t i = 0; i <
count; i++) {
2548 KernelProgramInfoPtr
info = objects_[i];
2555 for (intptr_t i = 0; i <
count; i++) {
2556 KernelProgramInfoPtr
info = objects_[i];
2583 KernelProgramInfoPtr
info =
static_cast<KernelProgramInfoPtr
>(
d.Ref(
id));
2595 array = HashTables::New<UnorderedHashMap<SmiTraits>>(16,
Heap::kOld);
2596 info.set_libraries_cache(array);
2597 array = HashTables::New<UnorderedHashMap<SmiTraits>>(16,
Heap::kOld);
2598 info.set_classes_cache(array);
2612 const bool is_deferred = !
s->InCurrentLoadingUnitOrRoot(code);
2614 s->RecordDeferredCode(code);
2622 ObjectPoolPtr
pool = code->untag()->object_pool_;
2626 if (
s->InCurrentLoadingUnitOrRoot(
pool)) {
2634 s->Push(code->untag()->deopt_info_array_);
2635 s->Push(code->untag()->static_calls_target_table_);
2636 s->Push(code->untag()->compressed_stackmaps_);
2641#if defined(DART_PRECOMPILER)
2642 auto const calls_array = code->untag()->static_calls_target_table_;
2646 array_ = calls_array;
2661 ASSERT(destination->IsHeapObject() && destination->IsCode());
2662 s->Push(destination);
2673 !FLAG_retain_code_objects);
2679 s->Push(code->untag()->owner_);
2680 s->Push(code->untag()->exception_handlers_);
2681 s->Push(code->untag()->pc_descriptors_);
2682 s->Push(code->untag()->catch_entry_);
2683 if (!FLAG_precompiled_mode || !FLAG_dwarf_stack_traces_mode) {
2684 s->Push(code->untag()->inlined_id_to_function_);
2685 if (
s->InCurrentLoadingUnitOrRoot(code->untag()->code_source_map_)) {
2686 s->Push(code->untag()->code_source_map_);
2689#if !defined(PRODUCT)
2690 s->Push(code->untag()->return_address_metadata_);
2691 if (FLAG_code_comments) {
2692 s->Push(code->untag()->comments_);
2702 const intptr_t
length =
pool->untag()->length_;
2703 uint8_t* entry_bits =
pool->untag()->entry_bits();
2704 for (intptr_t i = 0; i <
length; i++) {
2706 if (entry_type == ObjectPool::EntryType::kTaggedObject) {
2712 intptr_t
cid =
target->GetClassIdMayBeSmi();
2713 if (!only_call_targets || (
cid == kCodeCid) || (
cid == kFunctionCid) ||
2714 (
cid == kFieldCid) || (
cid == kClosureCid)) {
2717 s->Push(
s->isolate_group()->class_table()->At(
cid));
2743 if (
a->not_discarded <
b->not_discarded)
return -1;
2744 if (
a->not_discarded >
b->not_discarded)
return 1;
2745 if (
a->instructions_id <
b->instructions_id)
return -1;
2746 if (
a->instructions_id >
b->instructions_id)
return 1;
2754 InstructionsPtr instr = code->untag()->instructions_;
2755 intptr_t
key =
static_cast<intptr_t
>(instr);
2756 intptr_t instructions_id = 0;
2762 instructions_id = order_map->
Lookup(
key);
2764 instructions_id = order_map->
Length() + 1;
2765 order_map->
Insert(
key, instructions_id);
2769 info.instructions_id = instructions_id;
2777 for (intptr_t i = 0; i < codes->
length(); i++) {
2778 Insert(
s, &order_list, &order_map, (*codes)[i]);
2782 for (intptr_t i = 0; i < order_list.
length(); i++) {
2783 (*codes)[i] = order_list[i].code;
2790 for (intptr_t i = 0; i < codes->
length(); i++) {
2791 Insert(
s, &order_list, &order_map, (*codes)[i]->ptr());
2795 for (intptr_t i = 0; i < order_list.
length(); i++) {
2796 *(*codes)[i] = order_list[i].code;
2802 for (
auto code : objects_) {
2815 first_ref_ =
s->next_ref_index();
2816 s->WriteUnsigned(non_discarded_count);
2817 for (
auto code : objects_) {
2827 s->WriteUnsigned(deferred_objects_.
length());
2828 first_deferred_ref_ =
s->next_ref_index();
2829 for (
auto code : deferred_objects_) {
2833 last_ref_ =
s->next_ref_index() - 1;
2840 const int32_t state_bits = code->untag()->state_bits_;
2841 s->Write<int32_t>(state_bits);
2848 for (intptr_t i = 0; i <
count; i++) {
2849 CodePtr code = objects_[i];
2850#if defined(DART_PRECOMPILER)
2851 if (FLAG_write_v8_snapshot_profile_to !=
nullptr &&
2853 s->CreateArtificialNodeIfNeeded(code);
2860 const intptr_t deferred_count = deferred_objects_.
length();
2861 for (intptr_t i = 0; i < deferred_count; i++) {
2862 CodePtr code = deferred_objects_[i];
2871 const intptr_t bytes_written =
s->bytes_written();
2874 intptr_t pointer_offsets_length =
2876 if (pointer_offsets_length != 0) {
2877 FATAL(
"Cannot serialize code with embedded pointers");
2881 s->UnexpectedObject(code,
"Disabled code");
2884 s->WriteInstructions(code->untag()->instructions_,
2885 code->untag()->unchecked_offset_, code, deferred);
2890 const uint32_t active_unchecked_offset =
2891 code->untag()->unchecked_entry_point_ - code->untag()->entry_point_;
2892 s->WriteInstructions(code->untag()->active_instructions_,
2893 active_unchecked_offset, code, deferred);
2896#if defined(DART_PRECOMPILER)
2897 if (FLAG_write_v8_snapshot_profile_to !=
nullptr) {
2902 ObjectPoolPtr
pool = code->untag()->object_pool_;
2904 ASSERT(!
s->HasRef(
pool) ||
pool == Object::empty_object_pool().ptr());
2905 s->CreateArtificialNodeIfNeeded(
pool);
2906 s->AttributePropertyRef(
pool,
"object_pool_");
2909 code->untag()->static_calls_target_table_ !=
Array::null()) {
2910 auto const table = code->untag()->static_calls_target_table_;
2914 s->CreateArtificialNodeIfNeeded(
table);
2915 s->AttributePropertyRef(
table,
"static_calls_target_table_");
2922 ASSERT(
s->bytes_written() == bytes_written);
2926 !FLAG_retain_code_objects);
2927#if defined(DART_PRECOMPILER)
2928 if (FLAG_write_v8_snapshot_profile_to !=
nullptr) {
2930 const auto& owner = code->untag()->owner_;
2931 s->CreateArtificialNodeIfNeeded(owner);
2932 s->AttributePropertyRef(owner,
"owner_");
2941 if (
s->InCurrentLoadingUnitOrRoot(code->untag()->object_pool_)) {
2954 if (FLAG_precompiled_mode && FLAG_dwarf_stack_traces_mode) {
2959 if (
s->InCurrentLoadingUnitOrRoot(code->untag()->code_source_map_)) {
2967 WriteField(code, static_calls_target_table_);
2970#if !defined(PRODUCT)
2972 if (FLAG_code_comments) {
2982 if (
s->profile_writer() ==
nullptr) {
2987 Code& code = reused_code_handle.Handle();
2989 return code.QualifiedName(
2999 intptr_t first_ref_;
3000 intptr_t first_deferred_ref_;
3016 const intptr_t
count =
d->ReadUnsigned();
3017 for (intptr_t i = 0; i <
count; i++) {
3022 deferred_start_index_ =
d->next_index();
3023 const intptr_t deferred_count =
d->ReadUnsigned();
3024 for (intptr_t i = 0; i < deferred_count; i++) {
3027 deferred_stop_index_ =
d->next_index();
3031 const int32_t state_bits =
d->Read<int32_t>();
3035 code->untag()->state_bits_ = state_bits;
3041#if defined(DART_PRECOMPILED_RUNTIME)
3042 ReadFill(
d, deferred_start_index_, deferred_stop_index_,
true);
3044 ASSERT(deferred_start_index_ == deferred_stop_index_);
3049 intptr_t start_index,
3050 intptr_t stop_index,
3052 for (intptr_t
id = start_index, n = stop_index;
id < n;
id++) {
3053 auto const code =
static_cast<CodePtr
>(
d->Ref(
id));
3060 d->ReadInstructions(code, deferred);
3062#if !defined(DART_PRECOMPILED_RUNTIME)
3064 code->untag()->object_pool_ =
static_cast<ObjectPoolPtr
>(
d->ReadRef());
3070 code->untag()->owner_ =
d->ReadRef();
3071 code->untag()->exception_handlers_ =
3072 static_cast<ExceptionHandlersPtr
>(
d->ReadRef());
3073 code->untag()->pc_descriptors_ =
3074 static_cast<PcDescriptorsPtr
>(
d->ReadRef());
3075 code->untag()->catch_entry_ =
d->ReadRef();
3076#if !defined(DART_PRECOMPILED_RUNTIME)
3078 code->untag()->compressed_stackmaps_ =
3079 static_cast<CompressedStackMapsPtr
>(
d->ReadRef());
3084 code->untag()->inlined_id_to_function_ =
3085 static_cast<ArrayPtr
>(
d->ReadRef());
3086 code->untag()->code_source_map_ =
3087 static_cast<CodeSourceMapPtr
>(
d->ReadRef());
3089#if !defined(DART_PRECOMPILED_RUNTIME)
3091 code->untag()->deopt_info_array_ =
static_cast<ArrayPtr
>(
d->ReadRef());
3092 code->untag()->static_calls_target_table_ =
3093 static_cast<ArrayPtr
>(
d->ReadRef());
3096#if !defined(PRODUCT)
3097 code->untag()->return_address_metadata_ =
d->ReadRef();
3099 code->untag()->comments_ = FLAG_code_comments
3100 ?
static_cast<ArrayPtr
>(
d->ReadRef())
3102 code->untag()->compile_timestamp_ = 0;
3108 d->EndInstructions();
3110#if !defined(PRODUCT)
3114#if !defined(PRODUCT) || defined(FORCE_INCLUDE_DISASSEMBLER)
3118 code ^= refs.
At(
id);
3119#if !defined(DART_PRECOMPILED_RUNTIME) && !defined(PRODUCT)
3124#if !defined(PRODUCT) || defined(FORCE_INCLUDE_DISASSEMBLER)
3125 owner = code.owner();
3126 if (owner.IsFunction()) {
3127 if ((FLAG_disassemble ||
3128 (code.is_optimized() && FLAG_disassemble_optimized)) &&
3129 compiler::PrintFilter::ShouldPrint(Function::Cast(owner))) {
3131 code.is_optimized());
3133 }
else if (FLAG_disassemble_stubs) {
3141 intptr_t deferred_start_index_;
3142 intptr_t deferred_stop_index_;
3145#if !defined(DART_PRECOMPILED_RUNTIME)
3157 const intptr_t
length =
pool->untag()->length_;
3158 uint8_t* entry_bits =
pool->untag()->entry_bits();
3159 for (intptr_t i = 0; i <
length; i++) {
3161 if (entry_type == ObjectPool::EntryType::kTaggedObject) {
3162 s->Push(
pool->untag()->data()[i].raw_obj_);
3171 for (intptr_t i = 0; i <
count; i++) {
3172 ObjectPoolPtr
pool = objects_[i];
3175 const intptr_t
length =
pool->untag()->length_;
3185 for (intptr_t i = 0; i <
count; i++) {
3186 ObjectPoolPtr
pool = objects_[i];
3188 const intptr_t
length =
pool->untag()->length_;
3190 uint8_t* entry_bits =
pool->untag()->entry_bits();
3191 for (intptr_t j = 0; j <
length; j++) {
3192 UntaggedObjectPool::Entry& entry =
pool->untag()->data()[j];
3193 uint8_t bits = entry_bits[j];
3196 ASSERT(snapshot_behavior !=
3197 ObjectPool::SnapshotBehavior::kNotSnapshotable);
3198 s->Write<uint8_t>(bits);
3199 if (snapshot_behavior != ObjectPool::SnapshotBehavior::kSnapshotable) {
3205 case ObjectPool::EntryType::kTaggedObject: {
3206 if (weak && !
s->HasRef(entry.raw_obj_)) {
3210 s->WriteElementRef(entry.raw_obj_, j);
3214 case ObjectPool::EntryType::kImmediate: {
3215 s->Write<intptr_t>(entry.raw_value_);
3218 case ObjectPool::EntryType::kNativeFunction: {
3241 const intptr_t
count =
d->ReadUnsigned();
3242 for (intptr_t i = 0; i <
count; i++) {
3243 const intptr_t
length =
d->ReadUnsigned();
3253 fill_position_ =
d.Position();
3254#if defined(DART_PRECOMPILED_RUNTIME)
3256 ObjectPool::EntryType::kImmediate, ObjectPool::Patchability::kPatchable,
3257 ObjectPool::SnapshotBehavior::kSnapshotable);
3258 uword switchable_call_miss_entry_point =
3259 StubCode::SwitchableCallMiss().MonomorphicEntryPoint();
3263 const intptr_t
length =
d.ReadUnsigned();
3264 ObjectPoolPtr
pool =
static_cast<ObjectPoolPtr
>(
d.Ref(
id));
3268 for (intptr_t j = 0; j <
length; j++) {
3269 const uint8_t entry_bits =
d.Read<uint8_t>();
3270 pool->untag()->entry_bits()[j] = entry_bits;
3271 UntaggedObjectPool::Entry& entry =
pool->untag()->data()[j];
3272 const auto snapshot_behavior =
3274 ASSERT(snapshot_behavior !=
3275 ObjectPool::SnapshotBehavior::kNotSnapshotable);
3276 switch (snapshot_behavior) {
3277 case ObjectPool::SnapshotBehavior::kSnapshotable:
3280 case ObjectPool::SnapshotBehavior::kResetToBootstrapNative:
3281 entry.raw_obj_ = StubCode::CallBootstrapNative().ptr();
3283#if defined(DART_PRECOMPILED_RUNTIME)
3284 case ObjectPool::SnapshotBehavior::
3285 kResetToSwitchableCallMissEntryPoint:
3286 pool->untag()->entry_bits()[j] = immediate_bits;
3288 static_cast<intptr_t
>(switchable_call_miss_entry_point);
3291 case ObjectPool::SnapshotBehavior::kSetToZero:
3292 entry.raw_value_ = 0;
3295 FATAL(
"Unexpected snapshot behavior: %d\n", snapshot_behavior);
3298 case ObjectPool::EntryType::kTaggedObject:
3299 entry.raw_obj_ =
d.ReadRef();
3301 case ObjectPool::EntryType::kImmediate:
3302 entry.raw_value_ =
d.Read<intptr_t>();
3304 case ObjectPool::EntryType::kNativeFunction: {
3307 entry.raw_value_ =
static_cast<intptr_t
>(new_entry);
3318#if defined(DART_PRECOMPILED_RUNTIME) && \
3319 (!defined(PRODUCT) || defined(FORCE_INCLUDE_DISASSEMBLER))
3320 if (FLAG_disassemble) {
3322 d->isolate_group()->object_store()->global_object_pool());
3330 intptr_t fill_position_ = 0;
3333#if defined(DART_PRECOMPILER)
3334class WeakSerializationReferenceSerializationCluster
3335 :
public SerializationCluster {
3337 WeakSerializationReferenceSerializationCluster()
3338 : SerializationCluster(
3339 "WeakSerializationReference",
3341 ~WeakSerializationReferenceSerializationCluster() {}
3343 void Trace(Serializer*
s, ObjectPtr
object) {
3344 ASSERT(
s->kind() == Snapshot::kFullAOT);
3345 objects_.Add(WeakSerializationReference::RawCast(
object));
3348 void RetraceEphemerons(Serializer*
s) {
3349 for (intptr_t i = 0; i < objects_.length(); i++) {
3350 WeakSerializationReferencePtr weak = objects_[i];
3351 if (!
s->IsReachable(weak->untag()->target())) {
3352 s->Push(weak->untag()->replacement());
3357 intptr_t Count(Serializer*
s) {
return objects_.length(); }
3359 void CreateArtificialTargetNodesIfNeeded(Serializer*
s) {
3360 for (intptr_t i = 0; i < objects_.length(); i++) {
3361 WeakSerializationReferencePtr weak = objects_[i];
3362 s->CreateArtificialNodeIfNeeded(weak->untag()->target());
3366 void WriteAlloc(Serializer*
s) {
3370 void WriteFill(Serializer*
s) {
3375 GrowableArray<WeakSerializationReferencePtr> objects_;
3379#if !defined(DART_PRECOMPILED_RUNTIME)
3394 for (intptr_t i = 0; i <
count; i++) {
3395 PcDescriptorsPtr desc = objects_[i];
3398 const intptr_t
length = desc->untag()->length_;
3401 compiler::target::PcDescriptors::InstanceSize(
length);
3407 for (intptr_t i = 0; i <
count; i++) {
3408 PcDescriptorsPtr desc = objects_[i];
3410 const intptr_t
length = desc->untag()->length_;
3412 uint8_t* cdata =
reinterpret_cast<uint8_t*
>(desc->untag()->data());
3430 const intptr_t
count =
d->ReadUnsigned();
3431 for (intptr_t i = 0; i <
count; i++) {
3432 const intptr_t
length =
d->ReadUnsigned();
3443 const intptr_t
length =
d.ReadUnsigned();
3444 PcDescriptorsPtr desc =
static_cast<PcDescriptorsPtr
>(
d.Ref(
id));
3447 desc->untag()->length_ =
length;
3448 uint8_t* cdata =
reinterpret_cast<uint8_t*
>(desc->untag()->data());
3454#if !defined(DART_PRECOMPILED_RUNTIME)
3469 for (intptr_t i = 0; i <
count; i++) {
3470 CodeSourceMapPtr map = objects_[i];
3473 const intptr_t
length = map->untag()->length_;
3476 compiler::target::PcDescriptors::InstanceSize(
length);
3482 for (intptr_t i = 0; i <
count; i++) {
3483 CodeSourceMapPtr map = objects_[i];
3485 const intptr_t
length = map->untag()->length_;
3487 uint8_t* cdata =
reinterpret_cast<uint8_t*
>(map->untag()->data());
3505 const intptr_t
count =
d->ReadUnsigned();
3506 for (intptr_t i = 0; i <
count; i++) {
3507 const intptr_t
length =
d->ReadUnsigned();
3517 const intptr_t
length =
d.ReadUnsigned();
3518 CodeSourceMapPtr map =
static_cast<CodeSourceMapPtr
>(
d.Ref(
id));
3521 map->untag()->length_ =
length;
3522 uint8_t* cdata =
reinterpret_cast<uint8_t*
>(map->untag()->data());
3528#if !defined(DART_PRECOMPILED_RUNTIME)
3543 for (intptr_t i = 0; i <
count; i++) {
3544 CompressedStackMapsPtr map = objects_[i];
3548 map->untag()->payload()->flags_and_size());
3551 compiler::target::CompressedStackMaps::InstanceSize(
length);
3557 for (intptr_t i = 0; i <
count; i++) {
3558 CompressedStackMapsPtr map = objects_[i];
3560 s->WriteUnsigned(map->untag()->payload()->flags_and_size());
3562 map->untag()->payload()->flags_and_size());
3564 reinterpret_cast<uint8_t*
>(map->untag()->payload()->data());
3583 const intptr_t
count =
d->ReadUnsigned();
3584 for (intptr_t i = 0; i <
count; i++) {
3585 const intptr_t
length =
d->ReadUnsigned();
3595 const intptr_t flags_and_size =
d.ReadUnsigned();
3598 CompressedStackMapsPtr map =
3599 static_cast<CompressedStackMapsPtr
>(
d.Ref(
id));
3602 map->untag()->payload()->set_flags_and_size(flags_and_size);
3604 reinterpret_cast<uint8_t*
>(map->untag()->payload()->data());
3610#if !defined(DART_PRECOMPILED_RUNTIME) && !defined(DART_COMPRESSED_POINTERS)
3637 s->heap()->old_space()->IsObjectFromImagePages(
object)) {
3653 uint32_t running_offset = 0;
3654 for (intptr_t i = 0; i <
count; i++) {
3656 s->AssignRef(
object);
3657 const StringPtr
name =
3660 uint32_t
offset =
s->GetDataOffset(
object);
3665 s->WriteUnsigned((
offset - running_offset) >>
3678 const intptr_t cid_;
3679 const char*
const type_;
3683#if !defined(DART_COMPRESSED_POINTERS)
3698 intptr_t
count =
d->ReadUnsigned();
3699 uint32_t running_offset = 0;
3700 for (intptr_t i = 0; i <
count; i++) {
3702 ObjectPtr object =
d->GetObjectAt(running_offset);
3703 d->AssignRef(
object);
3706 if (cid_ == kStringCid) {
3719 auto object_store =
d->isolate_group()->object_store();
3722 object_store->set_symbol_table(
table_);
3727 FATAL(
"Cannot recanonicalize RO objects.");
3732 const intptr_t cid_;
3736#if !defined(DART_PRECOMPILED_RUNTIME)
3745 objects_.
Add(handlers);
3747 s->Push(handlers->untag()->handled_types_data());
3753 for (intptr_t i = 0; i <
count; i++) {
3754 ExceptionHandlersPtr handlers = objects_[i];
3755 s->AssignRef(handlers);
3757 const intptr_t
length = handlers->untag()->num_entries();
3760 compiler::target::ExceptionHandlers::InstanceSize(
length);
3766 for (intptr_t i = 0; i <
count; i++) {
3767 ExceptionHandlersPtr handlers = objects_[i];
3769 const intptr_t packed_fields = handlers->untag()->packed_fields_;
3772 s->WriteUnsigned(packed_fields);
3774 for (intptr_t j = 0; j <
length; j++) {
3776 s->Write<uint32_t>(
info.handler_pc_offset);
3777 s->Write<int16_t>(
info.outer_try_index);
3778 s->Write<int8_t>(
info.needs_stacktrace);
3779 s->Write<int8_t>(
info.has_catch_all);
3780 s->Write<int8_t>(
info.is_generated);
3798 const intptr_t
count =
d->ReadUnsigned();
3799 for (intptr_t i = 0; i <
count; i++) {
3800 const intptr_t
length =
d->ReadUnsigned();
3811 ExceptionHandlersPtr handlers =
3812 static_cast<ExceptionHandlersPtr
>(
d.Ref(
id));
3813 const intptr_t packed_fields =
d.ReadUnsigned();
3818 handlers->untag()->packed_fields_ = packed_fields;
3819 handlers->untag()->handled_types_data_ =
3820 static_cast<ArrayPtr
>(
d.ReadRef());
3821 for (intptr_t j = 0; j <
length; j++) {
3824 info.outer_try_index =
d.Read<int16_t>();
3825 info.needs_stacktrace =
d.Read<int8_t>();
3826 info.has_catch_all =
d.Read<int8_t>();
3827 info.is_generated =
d.Read<int8_t>();
3833#if !defined(DART_PRECOMPILED_RUNTIME)
3842 objects_.
Add(context);
3844 s->Push(context->untag()->parent());
3845 const intptr_t
length = context->untag()->num_variables_;
3846 for (intptr_t i = 0; i <
length; i++) {
3847 s->Push(context->untag()->element(i));
3854 for (intptr_t i = 0; i <
count; i++) {
3855 ContextPtr context = objects_[i];
3856 s->AssignRef(context);
3858 const intptr_t
length = context->untag()->num_variables_;
3866 for (intptr_t i = 0; i <
count; i++) {
3867 ContextPtr context = objects_[i];
3869 const intptr_t
length = context->untag()->num_variables_;
3872 for (intptr_t j = 0; j <
length; j++) {
3873 s->WriteElementRef(context->untag()->element(j), j);
3890 const intptr_t
count =
d->ReadUnsigned();
3891 for (intptr_t i = 0; i <
count; i++) {
3892 const intptr_t
length =
d->ReadUnsigned();
3903 ContextPtr context =
static_cast<ContextPtr
>(
d.Ref(
id));
3904 const intptr_t
length =
d.ReadUnsigned();
3907 context->untag()->num_variables_ =
length;
3908 context->untag()->parent_ =
static_cast<ContextPtr
>(
d.ReadRef());
3909 for (intptr_t j = 0; j <
length; j++) {
3910 context->untag()->data()[j] =
d.ReadRef();
3916#if !defined(DART_PRECOMPILED_RUNTIME)
3925 objects_.
Add(scope);
3927 const intptr_t
length = scope->untag()->num_variables_;
3934 for (intptr_t i = 0; i <
count; i++) {
3935 ContextScopePtr scope = objects_[i];
3936 s->AssignRef(scope);
3938 const intptr_t
length = scope->untag()->num_variables_;
3941 compiler::target::ContextScope::InstanceSize(
length);
3947 for (intptr_t i = 0; i <
count; i++) {
3948 ContextScopePtr scope = objects_[i];
3950 const intptr_t
length = scope->untag()->num_variables_;
3952 s->Write<
bool>(scope->untag()->is_implicit_);
3970 const intptr_t
count =
d->ReadUnsigned();
3971 for (intptr_t i = 0; i <
count; i++) {
3972 const intptr_t
length =
d->ReadUnsigned();
3983 ContextScopePtr scope =
static_cast<ContextScopePtr
>(
d.Ref(
id));
3984 const intptr_t
length =
d.ReadUnsigned();
3987 scope->untag()->num_variables_ =
length;
3988 scope->untag()->is_implicit_ =
d.Read<
bool>();
3994#if !defined(DART_PRECOMPILED_RUNTIME)
4005 objects_.
Add(unlinked);
4012 for (intptr_t i = 0; i <
count; i++) {
4013 UnlinkedCallPtr unlinked = objects_[i];
4014 s->AssignRef(unlinked);
4020 for (intptr_t i = 0; i <
count; i++) {
4021 UnlinkedCallPtr unlinked = objects_[i];
4024 s->Write<
bool>(unlinked->untag()->can_patch_to_monomorphic_);
4048 UnlinkedCallPtr unlinked =
static_cast<UnlinkedCallPtr
>(
d.Ref(
id));
4051 d.ReadFromTo(unlinked);
4052 unlinked->untag()->can_patch_to_monomorphic_ =
d.Read<
bool>();
4057#if !defined(DART_PRECOMPILED_RUNTIME)
4075 for (intptr_t i = 0; i <
count; i++) {
4076 ICDataPtr ic = objects_[i];
4084 for (intptr_t i = 0; i <
count; i++) {
4085 ICDataPtr ic = objects_[i];
4091 s->Write<uint32_t>(ic->untag()->state_bits_);
4114 ICDataPtr ic =
static_cast<ICDataPtr
>(
d.Ref(
id));
4118 ic->untag()->state_bits_ =
d.Read<int32_t>();
4123#if !defined(DART_PRECOMPILED_RUNTIME)
4129 kMegamorphicCacheCid,
4135 objects_.
Add(cache);
4142 for (intptr_t i = 0; i <
count; i++) {
4143 MegamorphicCachePtr cache = objects_[i];
4144 s->AssignRef(cache);
4150 for (intptr_t i = 0; i <
count; i++) {
4151 MegamorphicCachePtr cache = objects_[i];
4154 s->Write<int32_t>(cache->untag()->filled_entry_count_);
4178 MegamorphicCachePtr cache =
static_cast<MegamorphicCachePtr
>(
d.Ref(
id));
4181 d.ReadFromTo(cache);
4182 cache->untag()->filled_entry_count_ =
d.Read<int32_t>();
4187#if !defined(DART_PRECOMPILED_RUNTIME)
4193 kSubtypeTestCacheCid,
4199 objects_.
Add(cache);
4200 s->Push(cache->untag()->cache_);
4206 for (intptr_t i = 0; i <
count; i++) {
4207 SubtypeTestCachePtr cache = objects_[i];
4208 s->AssignRef(cache);
4214 for (intptr_t i = 0; i <
count; i++) {
4215 SubtypeTestCachePtr cache = objects_[i];
4218 s->Write<uint32_t>(cache->untag()->num_inputs_);
4219 s->Write<uint32_t>(cache->untag()->num_occupied_);
4243 SubtypeTestCachePtr cache =
static_cast<SubtypeTestCachePtr
>(
d.Ref(
id));
4246 cache->untag()->cache_ =
static_cast<ArrayPtr
>(
d.ReadRef());
4247 cache->untag()->num_inputs_ =
d.Read<uint32_t>();
4248 cache->untag()->num_occupied_ =
d.Read<uint32_t>();
4253#if !defined(DART_PRECOMPILED_RUNTIME)
4265 s->Push(unit->untag()->parent());
4271 for (intptr_t i = 0; i <
count; i++) {
4272 LoadingUnitPtr unit = objects_[i];
4279 for (intptr_t i = 0; i <
count; i++) {
4280 LoadingUnitPtr unit = objects_[i];
4307 LoadingUnitPtr unit =
static_cast<LoadingUnitPtr
>(
d.Ref(
id));
4310 unit->untag()->parent_ =
static_cast<LoadingUnitPtr
>(
d.ReadRef());
4312 unit->untag()->instructions_image_ =
nullptr;
4313 unit->untag()->packed_fields_ =
4315 UntaggedLoadingUnit::kNotLoaded) |
4321#if !defined(DART_PRECOMPILED_RUNTIME)
4339 for (intptr_t i = 0; i <
count; i++) {
4340 LanguageErrorPtr
error = objects_[i];
4347 for (intptr_t i = 0; i <
count; i++) {
4348 LanguageErrorPtr
error = objects_[i];
4351 s->WriteTokenPosition(
error->untag()->token_pos_);
4352 s->Write<
bool>(
error->untag()->report_after_token_);
4353 s->Write<int8_t>(
error->untag()->kind_);
4377 LanguageErrorPtr
error =
static_cast<LanguageErrorPtr
>(
d.Ref(
id));
4381 error->untag()->token_pos_ =
d.ReadTokenPosition();
4382 error->untag()->report_after_token_ =
d.Read<
bool>();
4383 error->untag()->kind_ =
d.Read<int8_t>();
4388#if !defined(DART_PRECOMPILED_RUNTIME)
4393 "UnhandledException",
4394 kUnhandledExceptionCid,
4400 objects_.
Add(exception);
4407 for (intptr_t i = 0; i <
count; i++) {
4408 UnhandledExceptionPtr exception = objects_[i];
4409 s->AssignRef(exception);
4415 for (intptr_t i = 0; i <
count; i++) {
4416 UnhandledExceptionPtr exception = objects_[i];
4442 UnhandledExceptionPtr exception =
4443 static_cast<UnhandledExceptionPtr
>(
d.Ref(
id));
4446 d.ReadFromTo(exception);
4451#if !defined(DART_PRECOMPILED_RUNTIME)
4457 host_next_field_offset_in_words_ =
4458 cls->untag()->host_next_field_offset_in_words_;
4459 ASSERT(host_next_field_offset_in_words_ > 0);
4460#if defined(DART_PRECOMPILER)
4461 target_next_field_offset_in_words_ =
4462 cls->untag()->target_next_field_offset_in_words_;
4463 target_instance_size_in_words_ =
4464 cls->untag()->target_instance_size_in_words_;
4466 target_next_field_offset_in_words_ =
4467 cls->untag()->host_next_field_offset_in_words_;
4468 target_instance_size_in_words_ = cls->untag()->host_instance_size_in_words_;
4470 ASSERT(target_next_field_offset_in_words_ > 0);
4471 ASSERT(target_instance_size_in_words_ > 0);
4478 const intptr_t next_field_offset = host_next_field_offset_in_words_
4480 const auto unboxed_fields_bitmap =
4481 s->isolate_group()->class_table()->GetUnboxedFieldsMapAt(
cid_);
4483 while (
offset < next_field_offset) {
4500 s->Write<int32_t>(target_next_field_offset_in_words_);
4501 s->Write<int32_t>(target_instance_size_in_words_);
4503 for (intptr_t i = 0; i <
count; i++) {
4504 InstancePtr
instance = objects_[i];
4508 const intptr_t instance_size = compiler::target::RoundedAllocationSize(
4509 target_instance_size_in_words_ * compiler::target::kCompressedWordSize);
4514 intptr_t next_field_offset = host_next_field_offset_in_words_
4518 const auto unboxed_fields_bitmap =
4519 s->isolate_group()->class_table()->GetUnboxedFieldsMapAt(
cid_);
4521 for (intptr_t i = 0; i <
count; i++) {
4522 InstancePtr
instance = objects_[i];
4524#if defined(DART_PRECOMPILER)
4525 if (FLAG_write_v8_snapshot_profile_to !=
nullptr) {
4526 ClassPtr cls =
s->isolate_group()->class_table()->At(
cid_);
4527 s->AttributePropertyRef(cls,
"<class>");
4531 while (
offset < next_field_offset) {
4536 s->WriteWordWith32BitWrites(
value);
4542 s->WriteElementRef(raw_obj,
offset);
4550 intptr_t host_next_field_offset_in_words_;
4551 intptr_t target_next_field_offset_in_words_;
4552 intptr_t target_instance_size_in_words_;
4568#if defined(DART_PRECOMPILED_RUNTIME)
4572 d->isolate_group()->constant_canonicalization_mutex());
4595 is_immutable_(is_immutable) {}
4600 const intptr_t
count =
d->ReadUnsigned();
4601 next_field_offset_in_words_ =
d->Read<int32_t>();
4602 instance_size_in_words_ =
d->Read<int32_t>();
4605 for (intptr_t i = 0; i <
count; i++) {
4606 d->AssignRef(
d->Allocate(instance_size));
4614 const intptr_t
cid = cid_;
4616 const bool is_immutable = is_immutable_;
4617 intptr_t next_field_offset = next_field_offset_in_words_
4624 InstancePtr
instance =
static_cast<InstancePtr
>(
d.Ref(
id));
4626 mark_canonical, is_immutable);
4628 while (
offset < next_field_offset) {
4633 *p =
d.ReadWordWith32BitReads();
4641 while (
offset < instance_size) {
4652 const intptr_t cid_;
4653 const bool is_immutable_;
4654 intptr_t next_field_offset_in_words_;
4655 intptr_t instance_size_in_words_;
4658#if !defined(DART_PRECOMPILED_RUNTIME)
4669 objects_.
Add(prefix);
4676 for (intptr_t i = 0; i <
count; i++) {
4677 LibraryPrefixPtr prefix = objects_[i];
4678 s->AssignRef(prefix);
4684 for (intptr_t i = 0; i <
count; i++) {
4685 LibraryPrefixPtr prefix = objects_[i];
4688 s->Write<uint16_t>(prefix->untag()->num_imports_);
4689 s->Write<
bool>(prefix->untag()->is_deferred_load_);
4713 LibraryPrefixPtr prefix =
static_cast<LibraryPrefixPtr
>(
d.Ref(
id));
4716 d.ReadFromTo(prefix);
4717 prefix->untag()->num_imports_ =
d.Read<uint16_t>();
4718 prefix->untag()->is_deferred_load_ =
d.Read<
bool>();
4723#if !defined(DART_PRECOMPILED_RUNTIME)
4735 represents_canonical_set,
4747 ClassPtr type_class =
4748 s->isolate_group()->class_table()->At(
type->untag()->type_class_id());
4749 s->Push(type_class);
4756 for (intptr_t i = 0; i <
count; i++) {
4765 for (intptr_t i = 0; i <
count; i++) {
4780 ClassPtr type_class =
4781 s->isolate_group()->class_table()->At(
type->untag()->type_class_id());
4782 if (type_class->untag()->declaration_type() !=
type) {
4793#if defined(DART_PRECOMPILER)
4794 if (FLAG_write_v8_snapshot_profile_to !=
nullptr) {
4795 ClassPtr type_class =
4796 s->isolate_group()->class_table()->At(
type->untag()->type_class_id());
4797 s->AttributePropertyRef(type_class,
"<type_class>");
4801 s->WriteUnsigned(
type->untag()->flags());
4826 TypePtr
type =
static_cast<TypePtr
>(
d.Ref(
id));
4830 type->untag()->set_flags(
d.ReadUnsigned());
4836 auto object_store =
d->isolate_group()->object_store();
4839 object_store->set_canonical_types(
table_);
4855 type.UpdateTypeTestingStubEntryPoint();
4861 type.InitializeTypeTestingStubNonAtomic(stub);
4867#if !defined(DART_PRECOMPILED_RUNTIME)
4874 bool represents_canonical_set)
4878 represents_canonical_set,
4894 for (intptr_t i = 0; i <
count; i++) {
4903 for (intptr_t i = 0; i <
count; i++) {
4913 s->Write<uint8_t>(
type->untag()->flags());
4914 s->Write<uint32_t>(
type->untag()->packed_parameter_counts_);
4915 s->Write<uint16_t>(
type->untag()->packed_type_parameter_counts_);
4940 FunctionTypePtr
type =
static_cast<FunctionTypePtr
>(
d.Ref(
id));
4944 type->untag()->set_flags(
d.Read<uint8_t>());
4945 type->untag()->packed_parameter_counts_ =
d.Read<uint32_t>();
4946 type->untag()->packed_type_parameter_counts_ =
d.Read<uint16_t>();
4952 auto object_store =
d->isolate_group()->object_store();
4954 d, refs,
Array::Handle(object_store->canonical_function_types()));
4955 object_store->set_canonical_function_types(
table_);
4971 type.UpdateTypeTestingStubEntryPoint();
4977 type.InitializeTypeTestingStubNonAtomic(stub);
4983#if !defined(DART_PRECOMPILED_RUNTIME)
4990 bool represents_canonical_set)
4994 represents_canonical_set,
5010 for (intptr_t i = 0; i <
count; i++) {
5019 for (intptr_t i = 0; i <
count; i++) {
5029 s->Write<uint8_t>(
type->untag()->flags());
5053 RecordTypePtr
type =
static_cast<RecordTypePtr
>(
d.Ref(
id));
5057 type->untag()->set_flags(
d.Read<uint8_t>());
5063 auto object_store =
d->isolate_group()->object_store();
5066 object_store->set_canonical_record_types(
table_);
5082 type.UpdateTypeTestingStubEntryPoint();
5088 type.InitializeTypeTestingStubNonAtomic(stub);
5094#if !defined(DART_PRECOMPILED_RUNTIME)
5101 bool cluster_represents_canonical_set)
5105 cluster_represents_canonical_set,
5121 for (intptr_t i = 0; i <
count; i++) {
5130 for (intptr_t i = 0; i <
count; i++) {
5139 s->Write<uint16_t>(
type->untag()->base_);
5140 s->Write<uint16_t>(
type->untag()->index_);
5142 s->Write<uint8_t>(
type->untag()->flags());
5167 TypeParameterPtr
type =
static_cast<TypeParameterPtr
>(
d.Ref(
id));
5172 type->untag()->base_ =
d.Read<uint16_t>();
5173 type->untag()->index_ =
d.Read<uint16_t>();
5174 type->untag()->set_flags(
d.Read<uint8_t>());
5180 auto object_store =
d->isolate_group()->object_store();
5182 d, refs,
Array::Handle(object_store->canonical_type_parameters()));
5183 object_store->set_canonical_type_parameters(
table_);
5187 type_param ^= refs.
At(i);
5189 refs.
SetAt(i, type_param);
5198 type_param ^= refs.
At(
id);
5203 type_param ^= refs.
At(
id);
5211#if !defined(DART_PRECOMPILED_RUNTIME)
5223 objects_.
Add(closure);
5230 for (intptr_t i = 0; i <
count; i++) {
5231 ClosurePtr closure = objects_[i];
5232 s->AssignRef(closure);
5238 for (intptr_t i = 0; i <
count; i++) {
5239 ClosurePtr closure = objects_[i];
5268 ClosurePtr closure =
static_cast<ClosurePtr
>(
d.Ref(
id));
5271 d.ReadFromTo(closure);
5272#if defined(DART_PRECOMPILED_RUNTIME)
5273 closure->untag()->entry_point_ = 0;
5278#if defined(DART_PRECOMPILED_RUNTIME)
5286 closure ^= refs.
At(i);
5287 func = closure.function();
5288 uword entry_point = func.entry_point();
5289 ASSERT(entry_point != 0);
5290 closure.ptr()->untag()->entry_point_ = entry_point;
5296#if !defined(DART_PRECOMPILED_RUNTIME)
5304 if (!object->IsHeapObject()) {
5315 for (intptr_t i = 0; i < smis_.
length(); i++) {
5316 SmiPtr smi = smis_[i];
5320 s->Write<int64_t>(
value);
5326 for (intptr_t i = 0; i < mints_.
length(); i++) {
5327 MintPtr mint = mints_[i];
5330 s->Write<int64_t>(mint->untag()->value_);
5356 const intptr_t
count =
d->ReadUnsigned();
5358 for (intptr_t i = 0; i <
count; i++) {
5359 int64_t
value =
d->Read<int64_t>();
5366 mint->untag()->value_ =
value;
5376#if !defined(DART_PRECOMPILED_RUNTIME)
5394 for (intptr_t i = 0; i <
count; i++) {
5395 DoublePtr dbl = objects_[i];
5402 for (intptr_t i = 0; i <
count; i++) {
5403 DoublePtr dbl = objects_[i];
5405 s->Write<
double>(dbl->untag()->value_);
5431 DoublePtr dbl =
static_cast<DoublePtr
>(
d.Ref(
id));
5434 dbl->untag()->value_ =
d.Read<
double>();
5439#if !defined(DART_PRECOMPILED_RUNTIME)
5447 ASSERT_EQUAL(compiler::target::Int32x4::InstanceSize(),
5448 compiler::target::Float32x4::InstanceSize());
5449 ASSERT_EQUAL(compiler::target::Int32x4::InstanceSize(),
5450 compiler::target::Float64x2::InstanceSize());
5459 for (intptr_t i = 0; i <
count; i++) {
5461 s->AssignRef(vector);
5467 for (intptr_t i = 0; i <
count; i++) {
5472 s->WriteBytes(&(
static_cast<Int32x4Ptr
>(vector)->
untag()->value_),
5502 const intptr_t
cid = cid_;
5508 d.ReadBytes(&(
static_cast<Int32x4Ptr
>(vector)->
untag()->value_),
5517#if !defined(DART_PRECOMPILED_RUNTIME)
5522 "GrowableObjectArray",
5523 kGrowableObjectArrayCid,
5529 objects_.
Add(array);
5536 for (intptr_t i = 0; i <
count; i++) {
5537 GrowableObjectArrayPtr array = objects_[i];
5538 s->AssignRef(array);
5544 for (intptr_t i = 0; i <
count; i++) {
5545 GrowableObjectArrayPtr array = objects_[i];
5571 GrowableObjectArrayPtr list =
5572 static_cast<GrowableObjectArrayPtr
>(
d.Ref(
id));
5580#if !defined(DART_PRECOMPILED_RUNTIME)
5589 objects_.
Add(record);
5592 for (intptr_t i = 0; i < num_fields; ++i) {
5593 s->Push(record->untag()->field(i));
5600 for (intptr_t i = 0; i <
count; ++i) {
5601 RecordPtr record = objects_[i];
5602 s->AssignRef(record);
5605 s->WriteUnsigned(num_fields);
5612 for (intptr_t i = 0; i <
count; ++i) {
5613 RecordPtr record = objects_[i];
5615 const RecordShape shape(record->untag()->shape());
5616 s->WriteUnsigned(shape.
AsInt());
5617 const intptr_t num_fields = shape.
num_fields();
5618 for (intptr_t j = 0; j < num_fields; ++j) {
5619 s->WriteElementRef(record->untag()->field(j), j);
5640 const intptr_t
count =
d->ReadUnsigned();
5641 for (intptr_t i = 0; i <
count; i++) {
5642 const intptr_t num_fields =
d->ReadUnsigned();
5653 RecordPtr record =
static_cast<RecordPtr
>(
d.Ref(
id));
5654 const intptr_t shape =
d.ReadUnsigned();
5659 record->untag()->shape_ =
Smi::New(shape);
5660 for (intptr_t j = 0; j < num_fields; ++j) {
5661 record->untag()->data()[j] =
d.ReadRef();
5667#if !defined(DART_PRECOMPILED_RUNTIME)
5683 for (intptr_t i = 0; i <
count; i++) {
5684 TypedDataPtr
data = objects_[i];
5697 for (intptr_t i = 0; i <
count; i++) {
5698 TypedDataPtr
data = objects_[i];
5702 uint8_t* cdata =
reinterpret_cast<uint8_t*
>(
data->untag()->data());
5720 const intptr_t
count =
d->ReadUnsigned();
5722 for (intptr_t i = 0; i <
count; i++) {
5723 const intptr_t
length =
d->ReadUnsigned();
5735 const intptr_t
cid = cid_;
5737 TypedDataPtr
data =
static_cast<TypedDataPtr
>(
d.Ref(
id));
5738 const intptr_t
length =
d.ReadUnsigned();
5743 data->untag()->RecomputeDataField();
5744 uint8_t* cdata =
reinterpret_cast<uint8_t*
>(
data->untag()->data());
5745 d.ReadBytes(cdata, length_in_bytes);
5750 const intptr_t cid_;
5753#if !defined(DART_PRECOMPILED_RUNTIME)
5772 for (intptr_t i = 0; i <
count; i++) {
5773 TypedDataViewPtr view = objects_[i];
5780 for (intptr_t i = 0; i <
count; i++) {
5781 TypedDataViewPtr view = objects_[i];
5805 const intptr_t
cid = cid_;
5808 TypedDataViewPtr view =
static_cast<TypedDataViewPtr
>(
d.Ref(
id));
5817 view ^= refs.
At(
id);
5818 view.RecomputeDataField();
5823 const intptr_t cid_;
5826#if !defined(DART_PRECOMPILED_RUNTIME)
5831 "ExternalTypedData",
5844 for (intptr_t i = 0; i <
count; i++) {
5845 ExternalTypedDataPtr
data = objects_[i];
5853 for (intptr_t i = 0; i <
count; i++) {
5854 ExternalTypedDataPtr
data = objects_[i];
5858 uint8_t* cdata =
reinterpret_cast<uint8_t*
>(
data->untag()->data_);
5883 const intptr_t
cid = cid_;
5886 ExternalTypedDataPtr
data =
static_cast<ExternalTypedDataPtr
>(
d.Ref(
id));
5887 const intptr_t
length =
d.ReadUnsigned();
5892 data->untag()->data_ =
const_cast<uint8_t*
>(
d.AddressOfCurrentPosition());
5899 const intptr_t cid_;
5902#if !defined(DART_PRECOMPILED_RUNTIME)
5907 kDeltaEncodedTypedDataCid) {}
5918 for (intptr_t i = 0; i <
count; i++) {
5919 const TypedDataPtr
data = objects_[i];
5924 const intptr_t length_in_bytes =
5926 s->WriteUnsigned(length_in_bytes);
5928 compiler::target::TypedData::InstanceSize(length_in_bytes);
5935 for (intptr_t i = 0; i <
count; i++) {
5936 const TypedDataPtr
data = objects_[i];
5938 const intptr_t
cid =
data->GetClassId();
5941 ASSERT(
cid == kTypedDataUint16ArrayCid ||
5942 cid == kTypedDataUint32ArrayCid);
5943 const intptr_t cid_flag =
cid == kTypedDataUint16ArrayCid ? 0 : 1;
5945 const intptr_t encoded_length = (
length << 1) | cid_flag;
5946 s->WriteUnsigned(encoded_length);
5949 for (intptr_t j = 0; j <
length; ++j) {
5950 const intptr_t
value = (
cid == kTypedDataUint16ArrayCid)
5951 ? typed_data.GetUint16(j << 1)
5952 : typed_data.GetUint32(j << 2);
5974 const intptr_t
count =
d->ReadUnsigned();
5975 for (intptr_t i = 0; i <
count; i++) {
5976 const intptr_t length_in_bytes =
d->ReadUnsigned();
5989 TypedDataPtr
data =
static_cast<TypedDataPtr
>(
d.Ref(
id));
5990 const intptr_t encoded_length =
d.ReadUnsigned();
5991 const intptr_t
length = encoded_length >> 1;
5992 const intptr_t
cid = (encoded_length & 0x1) == 0
5993 ? kTypedDataUint16ArrayCid
5994 : kTypedDataUint32ArrayCid;
6000 data->untag()->RecomputeDataField();
6003 for (intptr_t j = 0; j <
length; ++j) {
6004 value +=
d.ReadUnsigned();
6005 if (
cid == kTypedDataUint16ArrayCid) {
6006 typed_data.SetUint16(j << 1,
static_cast<uint16_t
>(
value));
6008 typed_data.SetUint32(j << 2,
value);
6015#if !defined(DART_PRECOMPILED_RUNTIME)
6026 objects_.
Add(trace);
6033 for (intptr_t i = 0; i <
count; i++) {
6034 StackTracePtr trace = objects_[i];
6035 s->AssignRef(trace);
6041 for (intptr_t i = 0; i <
count; i++) {
6042 StackTracePtr trace = objects_[i];
6067 StackTracePtr trace =
static_cast<StackTracePtr
>(
d.Ref(
id));
6070 d.ReadFromTo(trace);
6075#if !defined(DART_PRECOMPILED_RUNTIME)
6086 objects_.
Add(regexp);
6093 for (intptr_t i = 0; i <
count; i++) {
6094 RegExpPtr regexp = objects_[i];
6095 s->AssignRef(regexp);
6101 for (intptr_t i = 0; i <
count; i++) {
6102 RegExpPtr regexp = objects_[i];
6105 s->Write<int32_t>(regexp->untag()->num_one_byte_registers_);
6106 s->Write<int32_t>(regexp->untag()->num_two_byte_registers_);
6107 s->Write<int8_t>(regexp->untag()->type_flags_);
6130 RegExpPtr regexp =
static_cast<RegExpPtr
>(
d.Ref(
id));
6133 d.ReadFromTo(regexp);
6134 regexp->untag()->num_one_byte_registers_ =
d.Read<int32_t>();
6135 regexp->untag()->num_two_byte_registers_ =
d.Read<int32_t>();
6136 regexp->untag()->type_flags_ =
d.Read<int8_t>();
6141#if !defined(DART_PRECOMPILED_RUNTIME)
6152 objects_.
Add(property);
6154 s->PushWeak(property->untag()->key());
6158 for (intptr_t i = 0; i < objects_.
length(); i++) {
6159 WeakPropertyPtr
property = objects_[i];
6160 if (
s->IsReachable(property->untag()->key())) {
6161 s->Push(property->untag()->value());
6169 for (intptr_t i = 0; i <
count; i++) {
6170 WeakPropertyPtr
property = objects_[i];
6171 s->AssignRef(property);
6177 for (intptr_t i = 0; i <
count; i++) {
6178 WeakPropertyPtr
property = objects_[i];
6180 if (
s->HasRef(property->untag()->key())) {
6182 s->WriteOffsetRef(property->untag()->value(),
6211 WeakPropertyPtr
property =
static_cast<WeakPropertyPtr
>(
d.Ref(
id));
6214 d.ReadFromTo(property);
6220#if !defined(DART_PRECOMPILED_RUNTIME)
6233 ASSERT(map->untag()->IsCanonical());
6242 for (intptr_t i = 0; i <
count; i++) {
6243 MapPtr map = objects_[i];
6250 for (intptr_t i = 0; i <
count; i++) {
6251 MapPtr map = objects_[i];
6281 const intptr_t
cid = cid_;
6284 MapPtr map =
static_cast<MapPtr
>(
d.Ref(
id));
6292 const intptr_t cid_;
6295#if !defined(DART_PRECOMPILED_RUNTIME)
6308 ASSERT(set->untag()->IsCanonical());
6317 for (intptr_t i = 0; i <
count; i++) {
6318 SetPtr set = objects_[i];
6325 for (intptr_t i = 0; i <
count; i++) {
6326 SetPtr set = objects_[i];
6356 const intptr_t
cid = cid_;
6359 SetPtr set =
static_cast<SetPtr
>(
d.Ref(
id));
6367 const intptr_t cid_;
6370#if !defined(DART_PRECOMPILED_RUNTIME)
6379 objects_.
Add(array);
6381 s->Push(array->untag()->type_arguments());
6383 for (intptr_t i = 0; i <
length; i++) {
6384 s->Push(array->untag()->element(i));
6388#if defined(DART_PRECOMPILER)
6389 static bool IsReadOnlyCid(intptr_t
cid) {
6391 case kPcDescriptorsCid:
6392 case kCodeSourceMapCid:
6393 case kCompressedStackMapsCid:
6394 case kOneByteStringCid:
6395 case kTwoByteStringCid:
6404#if defined(DART_PRECOMPILER)
6405 if (FLAG_print_array_optimization_candidates) {
6406 intptr_t array_count = objects_.
length();
6407 intptr_t array_count_allsmi = 0;
6408 intptr_t array_count_allro = 0;
6409 intptr_t array_count_empty = 0;
6410 intptr_t element_count = 0;
6411 intptr_t element_count_allsmi = 0;
6412 intptr_t element_count_allro = 0;
6413 for (intptr_t i = 0; i < array_count; i++) {
6414 ArrayPtr array = objects_[i];
6418 for (intptr_t i = 0; i <
length; i++) {
6421 if (!IsReadOnlyCid(
cid)) allro =
false;
6422 if (
cid != kSmiCid) allsmi =
false;
6426 array_count_empty++;
6427 }
else if (allsmi) {
6428 array_count_allsmi++;
6429 element_count_allsmi +=
length;
6431 array_count_allro++;
6432 element_count_allro +=
length;
6439 array_count_allsmi, element_count_allsmi);
6441 element_count_allro);
6448 for (intptr_t i = 0; i <
count; i++) {
6449 ArrayPtr array = objects_[i];
6450 s->AssignRef(array);
6460 for (intptr_t i = 0; i <
count; i++) {
6461 ArrayPtr array = objects_[i];
6466 for (intptr_t j = 0; j <
length; j++) {
6467 s->WriteElementRef(array->untag()->element(j), j);
6491 const intptr_t
count =
d->ReadUnsigned();
6492 for (intptr_t i = 0; i <
count; i++) {
6493 const intptr_t
length =
d->ReadUnsigned();
6502 const intptr_t
cid = cid_;
6505 ArrayPtr array =
static_cast<ArrayPtr
>(
d.Ref(
id));
6506 const intptr_t
length =
d.ReadUnsigned();
6510 array->untag()->SetCardRememberedBitUnsynchronized();
6512 array->untag()->type_arguments_ =
6513 static_cast<TypeArgumentsPtr
>(
d.ReadRef());
6515 for (intptr_t j = 0; j <
length; j++) {
6516 array->untag()->data()[j] =
d.ReadRef();
6522 const intptr_t cid_;
6525#if !defined(DART_PRECOMPILED_RUNTIME)
6534 objects_.
Add(array);
6537 for (intptr_t i = 0; i <
length; i++) {
6538 s->PushWeak(array->untag()->element(i));
6545 for (intptr_t i = 0; i <
count; i++) {
6546 WeakArrayPtr array = objects_[i];
6547 s->AssignRef(array);
6557 for (intptr_t i = 0; i <
count; i++) {
6558 WeakArrayPtr array = objects_[i];
6562 for (intptr_t j = 0; j <
length; j++) {
6563 if (
s->HasRef(array->untag()->element(j))) {
6564 s->WriteElementRef(array->untag()->element(j), j);
6584 const intptr_t
count =
d->ReadUnsigned();
6585 for (intptr_t i = 0; i <
count; i++) {
6586 const intptr_t
length =
d->ReadUnsigned();
6596 WeakArrayPtr array =
static_cast<WeakArrayPtr
>(
d.Ref(
id));
6597 const intptr_t
length =
d.ReadUnsigned();
6602 for (intptr_t j = 0; j <
length; j++) {
6603 array->untag()->data()[j] =
d.ReadRef();
6609#if !defined(DART_PRECOMPILED_RUNTIME)
6619 ASSERT(
cid == kOneByteStringCid ||
cid == kTwoByteStringCid);
6621 return (
length << 1) | (
cid == kTwoByteStringCid ? 0x1 : 0x0);
6625 bool represents_canonical_set)
6628 represents_canonical_set,
6634 StringPtr str =
static_cast<StringPtr
>(object);
6642 for (intptr_t i = 0; i <
count; i++) {
6646 const intptr_t
cid = str->GetClassId();
6649 s->WriteUnsigned(encoded);
6651 cid == kOneByteStringCid
6652 ? compiler::target::OneByteString::InstanceSize(
length)
6653 : compiler::target::TwoByteString::InstanceSize(
length);
6660 for (intptr_t i = 0; i <
count; i++) {
6663 const intptr_t
cid = str->GetClassId();
6666 s->WriteUnsigned(encoded);
6667 if (
cid == kOneByteStringCid) {
6668 s->WriteBytes(
static_cast<OneByteStringPtr
>(str)->
untag()->
data(),
6671 s->WriteBytes(
reinterpret_cast<uint8_t*
>(
6672 static_cast<TwoByteStringPtr
>(str)->
untag()->
data()),
6684 *out_cid = (encoded & 0x1) != 0 ? kTwoByteStringCid : kOneByteStringCid;
6685 return encoded >> 1;
6701 const intptr_t
count =
d->ReadUnsigned();
6702 for (intptr_t i = 0; i <
count; i++) {
6703 const intptr_t encoded =
d->ReadUnsigned();
6716 StringPtr str =
static_cast<StringPtr
>(
d.Ref(
id));
6717 const intptr_t encoded =
d.ReadUnsigned();
6724 *
reinterpret_cast<word*
>(
reinterpret_cast<uint8_t*
>(str->untag()) +
6726 *
reinterpret_cast<word*
>(
reinterpret_cast<uint8_t*
>(str->untag()) +
6729#if DART_COMPRESSED_POINTERS
6731 const intptr_t length_offset =
6732 reinterpret_cast<intptr_t
>(&str->untag()->length_);
6733 const intptr_t data_offset =
6734 cid == kOneByteStringCid
6735 ?
reinterpret_cast<intptr_t
>(
6736 static_cast<OneByteStringPtr
>(str)->
untag()->data())
6737 :
reinterpret_cast<intptr_t
>(
6738 static_cast<TwoByteStringPtr
>(str)->
untag()->data());
6739 const intptr_t length_with_gap = data_offset - length_offset;
6742 memset(
reinterpret_cast<void*
>(length_offset), 0, length_with_gap);
6747 if (
cid == kOneByteStringCid) {
6748 for (intptr_t j = 0; j <
length; j++) {
6749 uint8_t code_unit =
d.Read<uint8_t>();
6750 static_cast<OneByteStringPtr
>(str)->
untag()->data()[j] = code_unit;
6751 hasher.
Add(code_unit);
6755 for (intptr_t j = 0; j <
length; j++) {
6756 uint16_t code_unit =
d.Read<uint8_t>();
6757 code_unit = code_unit | (
d.Read<uint8_t>() << 8);
6758 static_cast<TwoByteStringPtr
>(str)->
untag()->data()[j] = code_unit;
6759 hasher.
Add(code_unit);
6768 auto object_store =
d->isolate_group()->object_store();
6771 object_store->set_symbol_table(
table_);
6783#if !defined(DART_PRECOMPILED_RUNTIME)
6803#if !defined(DART_PRECOMPILED_RUNTIME)
6807 bool should_write_symbols)
6808 : symbols_(symbols),
6809 should_write_symbols_(should_write_symbols),
6810 zone_(
Thread::Current()->zone()) {}
6817 s->AddBaseObject(Object::sentinel().ptr(),
"Null",
"sentinel");
6818 s->AddBaseObject(Object::transition_sentinel().ptr(),
"Null",
6819 "transition_sentinel");
6820 s->AddBaseObject(Object::optimized_out().ptr(),
"Null",
"<optimized out>");
6821 s->AddBaseObject(Object::empty_array().ptr(),
"Array",
"<empty_array>");
6822 s->AddBaseObject(Object::empty_instantiations_cache_array().ptr(),
"Array",
6823 "<empty_instantiations_cache_array>");
6824 s->AddBaseObject(Object::empty_subtype_test_cache_array().ptr(),
"Array",
6825 "<empty_subtype_test_cache_array>");
6826 s->AddBaseObject(Object::dynamic_type().ptr(),
"Type",
"<dynamic type>");
6827 s->AddBaseObject(Object::void_type().ptr(),
"Type",
"<void type>");
6828 s->AddBaseObject(Object::empty_type_arguments().ptr(),
"TypeArguments",
6830 s->AddBaseObject(
Bool::True().ptr(),
"bool",
"true");
6831 s->AddBaseObject(
Bool::False().ptr(),
"bool",
"false");
6833 s->AddBaseObject(Object::synthetic_getter_parameter_types().ptr(),
"Array",
6834 "<synthetic getter parameter types>");
6836 s->AddBaseObject(Object::synthetic_getter_parameter_names().ptr(),
"Array",
6837 "<synthetic getter parameter names>");
6838 s->AddBaseObject(Object::empty_context_scope().ptr(),
"ContextScope",
6840 s->AddBaseObject(Object::empty_object_pool().ptr(),
"ObjectPool",
6842 s->AddBaseObject(Object::empty_compressed_stackmaps().ptr(),
6843 "CompressedStackMaps",
"<empty>");
6844 s->AddBaseObject(Object::empty_descriptors().ptr(),
"PcDescriptors",
6846 s->AddBaseObject(Object::empty_var_descriptors().ptr(),
6847 "LocalVarDescriptors",
"<empty>");
6848 s->AddBaseObject(Object::empty_exception_handlers().ptr(),
6849 "ExceptionHandlers",
"<empty>");
6850 s->AddBaseObject(Object::empty_async_exception_handlers().ptr(),
6851 "ExceptionHandlers",
"<empty async>");
6854 s->AddBaseObject(ArgumentsDescriptor::cached_args_descriptors_[i],
6855 "ArgumentsDescriptor",
"<cached arguments descriptor>");
6858 s->AddBaseObject(ICData::cached_icdata_arrays_[i],
"Array",
6859 "<empty icdata entries>");
6866 if (
cid != kErrorCid &&
cid != kCallSiteDataCid) {
6885 if (should_write_symbols_) {
6886 s->Push(symbols_.
ptr());
6888 for (intptr_t i = 0; i < symbols_.
Length(); i++) {
6889 s->Push(symbols_.
At(i));
6909 if (!should_write_symbols_ &&
s->profile_writer() !=
nullptr) {
6913 s->AssignArtificialRef(symbols_.
ptr());
6914 const auto& symbols_snapshot_id =
s->GetProfileId(symbols_.
ptr());
6915 s->profile_writer()->SetObjectTypeAndName(symbols_snapshot_id,
"Symbols",
6917 s->profile_writer()->AddRoot(symbols_snapshot_id);
6918 for (intptr_t i = 0; i < symbols_.
Length(); i++) {
6919 s->profile_writer()->AttributeReferenceTo(
6921 s->GetProfileId(symbols_.
At(i)));
6928 const bool should_write_symbols_;
6942 d->AddBaseObject(Object::sentinel().ptr());
6943 d->AddBaseObject(Object::transition_sentinel().ptr());
6944 d->AddBaseObject(Object::optimized_out().ptr());
6945 d->AddBaseObject(Object::empty_array().ptr());
6946 d->AddBaseObject(Object::empty_instantiations_cache_array().ptr());
6947 d->AddBaseObject(Object::empty_subtype_test_cache_array().ptr());
6948 d->AddBaseObject(Object::dynamic_type().ptr());
6949 d->AddBaseObject(Object::void_type().ptr());
6950 d->AddBaseObject(Object::empty_type_arguments().ptr());
6954 d->AddBaseObject(Object::synthetic_getter_parameter_types().ptr());
6956 d->AddBaseObject(Object::synthetic_getter_parameter_names().ptr());
6957 d->AddBaseObject(Object::empty_context_scope().ptr());
6958 d->AddBaseObject(Object::empty_object_pool().ptr());
6959 d->AddBaseObject(Object::empty_compressed_stackmaps().ptr());
6960 d->AddBaseObject(Object::empty_descriptors().ptr());
6961 d->AddBaseObject(Object::empty_var_descriptors().ptr());
6962 d->AddBaseObject(Object::empty_exception_handlers().ptr());
6963 d->AddBaseObject(Object::empty_async_exception_handlers().ptr());
6966 d->AddBaseObject(ArgumentsDescriptor::cached_args_descriptors_[i]);
6969 d->AddBaseObject(ICData::cached_icdata_arrays_[i]);
6976 if (
cid != kErrorCid &&
cid != kCallSiteDataCid) {
6992 symbol_table_ ^=
d->ReadRef();
6993 if (!symbol_table_.
IsNull()) {
6994 d->isolate_group()->object_store()->set_symbol_table(symbol_table_);
6999 *code ^=
d->ReadRef();
7009 d->heap()->old_space()->ReleaseBumpAllocation();
7011 if (!symbol_table_.
IsNull()) {
7022#if !defined(DART_PRECOMPILED_RUNTIME)
7024#define DECLARE_OBJECT_STORE_FIELD(Type, Name) #Name,
7034#undef DECLARE_OBJECT_STORE_FIELD
7039#define RESET_ROOT_LIST(V) \
7040 V(symbol_table, WeakArray, HashTables::New<CanonicalStringSet>(4)) \
7041 V(canonical_types, Array, HashTables::New<CanonicalTypeSet>(4)) \
7042 V(canonical_function_types, Array, \
7043 HashTables::New<CanonicalFunctionTypeSet>(4)) \
7044 V(canonical_record_types, Array, HashTables::New<CanonicalRecordTypeSet>(4)) \
7045 V(canonical_type_arguments, Array, \
7046 HashTables::New<CanonicalTypeArgumentsSet>(4)) \
7047 V(canonical_type_parameters, Array, \
7048 HashTables::New<CanonicalTypeParameterSet>(4)) \
7049 ONLY_IN_PRODUCT(ONLY_IN_AOT( \
7050 V(closure_functions, GrowableObjectArray, GrowableObjectArray::null()))) \
7051 ONLY_IN_AOT(V(closure_functions_table, Array, Array::null())) \
7052 ONLY_IN_AOT(V(canonicalized_stack_map_entries, CompressedStackMaps, \
7053 CompressedStackMaps::null()))
7058 : base_objects_(base_objects),
7059 object_store_(object_store),
7060 snapshot_kind_(snapshot_kind) {
7061#define ONLY_IN_AOT(code) \
7062 if (snapshot_kind_ == Snapshot::kFullAOT) { \
7065#define SAVE_AND_RESET_ROOT(name, Type, init) \
7067 saved_##name##_ = object_store->name(); \
7068 object_store->set_##name(Type::Handle(init)); \
7072#undef SAVE_AND_RESET_ROOT
7076#define ONLY_IN_AOT(code) \
7077 if (snapshot_kind_ == Snapshot::kFullAOT) { \
7080#define RESTORE_ROOT(name, Type, init) \
7081 object_store_->set_##name(saved_##name##_);
7088 if (base_objects_ ==
nullptr) {
7090 const Array& base_objects = Object::vm_isolate_snapshot_object_table();
7092 s->AddBaseObject(base_objects.
At(i));
7096 for (intptr_t i = 0; i < base_objects_->length(); i++) {
7097 s->AddBaseObject((*base_objects_)[i]->ptr());
7103 ObjectPtr* from = object_store_->from();
7104 ObjectPtr* to = object_store_->to_snapshot(
s->kind());
7105 for (
ObjectPtr* p = from; p <= to; p++) {
7110 s->thread()->isolate_group()->initial_field_table();
7111 for (intptr_t i = 0, n = initial_field_table->
NumFieldIds(); i < n; i++) {
7112 s->Push(initial_field_table->
At(i));
7115 dispatch_table_entries_ = object_store_->dispatch_table_code_entries();
7119#if defined(DART_PRECOMPILER)
7124 if (!dispatch_table_entries_.
IsNull()) {
7125 for (intptr_t i = 0; i < dispatch_table_entries_.
Length(); i++) {
7126 s->Push(dispatch_table_entries_.
At(i));
7133 ObjectPtr* from = object_store_->from();
7134 ObjectPtr* to = object_store_->to_snapshot(
s->kind());
7135 for (
ObjectPtr* p = from; p <= to; p++) {
7140 s->thread()->isolate_group()->initial_field_table();
7142 s->WriteUnsigned(n);
7143 for (intptr_t i = 0; i < n; i++) {
7144 s->WriteRootRef(initial_field_table->
At(i),
"some-static-field");
7148 s->WriteDispatchTable(dispatch_table_entries_);
7152 return saved_canonicalized_stack_map_entries_;
7161#define ONLY_IN_AOT(code) code
7162#define DECLARE_FIELD(name, Type, init) Type& saved_##name##_ = Type::Handle();
7172 : object_store_(object_store) {}
7176 const Array& base_objects = Object::vm_isolate_snapshot_object_table();
7178 d->AddBaseObject(base_objects.
At(i));
7184 ObjectPtr* from = object_store_->from();
7185 ObjectPtr* to = object_store_->to_snapshot(
d->kind());
7186 for (
ObjectPtr* p = from; p <= to; p++) {
7191 d->thread()->isolate_group()->initial_field_table();
7192 intptr_t n =
d->ReadUnsigned();
7194 for (intptr_t i = 0; i < n; i++) {
7195 initial_field_table->
SetAt(i,
d->ReadRef());
7199 d->ReadDispatchTable();
7203 auto isolate_group =
d->isolate_group();
7204 { isolate_group->class_table()->CopySizesFromClassObjects(); }
7205 d->heap()->old_space()->EvaluateAfterLoading();
7207 auto object_store = isolate_group->object_store();
7223#if !defined(DART_PRECOMPILED_RUNTIME)
7231 for (intptr_t i = 0; i < objects->
length(); i++) {
7232 s->AddBaseObject(objects->
At(i)->ptr());
7238 ASSERT(deferred_object->IsCode());
7239 CodePtr code =
static_cast<CodePtr
>(deferred_object->ptr());
7240 ObjectPoolPtr
pool = code->untag()->object_pool_;
7242 const intptr_t
length =
pool->untag()->length_;
7243 uint8_t* entry_bits =
pool->untag()->entry_bits();
7244 for (intptr_t i = 0; i <
length; i++) {
7246 if (entry_type == ObjectPool::EntryType::kTaggedObject) {
7247 s->Push(
pool->untag()->data()[i].raw_obj_);
7251 s->Push(code->untag()->code_source_map_);
7256#if defined(DART_PRECOMPILER)
7257 intptr_t start_index = 0;
7259 if (num_deferred_objects != 0) {
7263 s->WriteUnsigned(start_index);
7264 s->WriteUnsigned(num_deferred_objects);
7265 for (intptr_t i = 0; i < num_deferred_objects; i++) {
7267 ASSERT(deferred_object->IsCode());
7268 CodePtr code =
static_cast<CodePtr
>(deferred_object->
ptr());
7269 ASSERT(
s->RefId(code) == (start_index + i));
7271 s->WriteInstructions(code->untag()->instructions_,
7272 code->untag()->unchecked_offset_, code,
false);
7273 s->WriteRootRef(code->untag()->code_source_map_,
"deferred-code");
7276 ObjectPoolPtr
pool =
7277 s->isolate_group()->object_store()->global_object_pool();
7278 const intptr_t
length =
pool->untag()->length_;
7279 uint8_t* entry_bits =
pool->untag()->entry_bits();
7280 intptr_t last_write = 0;
7281 for (intptr_t i = 0; i <
length; i++) {
7283 if (entry_type == ObjectPool::EntryType::kTaggedObject) {
7284 if (
s->IsWritten(
pool->untag()->data()[i].raw_obj_)) {
7285 intptr_t
skip = i - last_write;
7286 s->WriteUnsigned(
skip);
7287 s->WriteRootRef(
pool->untag()->data()[i].raw_obj_,
7288 "deferred-literal");
7293 s->WriteUnsigned(
length - last_write);
7307 const Array& base_objects =
7310 d->AddBaseObject(base_objects.
At(i));
7315 deferred_start_index_ =
d->ReadUnsigned();
7316 deferred_stop_index_ = deferred_start_index_ +
d->ReadUnsigned();
7317 for (intptr_t
id = deferred_start_index_;
id < deferred_stop_index_;
id++) {
7318 CodePtr code =
static_cast<CodePtr
>(
d->Ref(
id));
7320 d->ReadInstructions(code,
false);
7321 if (code->untag()->owner_->IsHeapObject() &&
7322 code->untag()->owner_->IsFunction()) {
7323 FunctionPtr func =
static_cast<FunctionPtr
>(code->untag()->owner_);
7324 uword entry_point = code->untag()->entry_point_;
7325 ASSERT(entry_point != 0);
7326 func->untag()->entry_point_ = entry_point;
7327 uword unchecked_entry_point = code->untag()->unchecked_entry_point_;
7328 ASSERT(unchecked_entry_point != 0);
7329 func->untag()->unchecked_entry_point_ = unchecked_entry_point;
7330#if defined(DART_PRECOMPILED_RUNTIME)
7331 if (func->untag()->data()->IsHeapObject() &&
7332 func->untag()->data()->IsClosureData()) {
7335 auto data =
static_cast<ClosureDataPtr
>(func->untag()->data());
7339 data->untag()->closure()->untag()->entry_point_ = entry_point;
7344 code->untag()->code_source_map_ =
7345 static_cast<CodeSourceMapPtr
>(
d->ReadRef());
7348 ObjectPoolPtr
pool =
7349 d->isolate_group()->object_store()->global_object_pool();
7350 const intptr_t
length =
pool->untag()->length_;
7351 uint8_t* entry_bits =
pool->untag()->entry_bits();
7352 for (intptr_t i =
d->ReadUnsigned(); i <
length; i +=
d->ReadUnsigned()) {
7354 ASSERT(entry_type == ObjectPool::EntryType::kTaggedObject);
7357 pool->untag()->data()[i].raw_obj_ =
d->ReadRef();
7362 auto isolate_group =
d->isolate_group();
7363 if (isolate_group->dispatch_table_snapshot() !=
nullptr) {
7364 ReadStream stream(isolate_group->dispatch_table_snapshot(),
7365 isolate_group->dispatch_table_snapshot_size());
7367 isolate_group->object_store()->instructions_tables());
7369 root_table ^= tables.
At(0);
7370 d->ReadDispatchTable(&stream,
true, root_table,
7371 deferred_start_index_, deferred_stop_index_);
7376 d->EndInstructions();
7382 intptr_t deferred_start_index_;
7383 intptr_t deferred_stop_index_;
7387static constexpr int32_t kSectionMarker = 0xABAB;
7397 heap_(thread->isolate_group()->heap()),
7398 zone_(thread->zone()),
7401 image_writer_(image_writer),
7402 canonical_clusters_by_cid_(nullptr),
7403 clusters_by_cid_(nullptr),
7407 num_base_objects_(0),
7408 num_written_objects_(0),
7411 profile_writer_(profile_writer)
7412#
if defined(SNAPSHOT_BACKTRACE)
7414 current_parent_(
Object::null()),
7417#
if defined(DART_PRECOMPILER)
7419 deduped_instructions_sources_(zone_)
7425 for (intptr_t i = 0; i < num_cids_; i++) {
7426 canonical_clusters_by_cid_[i] =
nullptr;
7429 for (intptr_t i = 0; i < num_cids_; i++) {
7430 clusters_by_cid_[i] =
nullptr;
7432 if (profile_writer_ !=
nullptr) {
7438 delete[] canonical_clusters_by_cid_;
7439 delete[] clusters_by_cid_;
7446 const bool is_discarded_code = base_object->IsHeapObject() &&
7447 base_object->IsCode() &&
7449 if (!is_discarded_code) {
7452 num_base_objects_++;
7454 if ((profile_writer_ !=
nullptr) && (
type !=
nullptr)) {
7457 profile_writer_->
AddRoot(profile_id);
7466 ASSERT(!object->IsHeapObject() || !object->IsInstructions());
7472 return next_ref_index_++;
7476 const intptr_t ref = -(next_ref_index_++);
7478 if (
object !=
nullptr) {
7479 ASSERT(!
object.IsHeapObject() || !
object.IsInstructions());
7487void Serializer::FlushProfile() {
7488 if (profile_writer_ ==
nullptr)
return;
7489 const intptr_t bytes =
7490 stream_->
Position() - object_currently_writing_.last_stream_position_;
7492 object_currently_writing_.last_stream_position_ = stream_->
Position();
7498 ASSERT(!object->IsHeapObject() || !object->IsInstructions());
7503 intptr_t heap_id)
const {
7514 if (profile_writer_ ==
nullptr)
return;
7516#if defined(DART_PRECOMPILER)
7517 if (object->IsHeapObject() && object->IsWeakSerializationReference()) {
7519 auto const target = wsr->untag()->target();
7521 if (object_id != target_id) {
7522 const auto& replacement_id =
GetProfileId(wsr->untag()->replacement());
7523 ASSERT(object_id == replacement_id);
7526 profile_writer_->AttributeDroppedReferenceTo(
7527 object_currently_writing_.id_, reference, target_id, replacement_id);
7537 reference, object_id);
7544 : serializer_(serializer),
7545 old_object_(serializer->object_currently_writing_.object_),
7546 old_id_(serializer->object_currently_writing_.id_),
7547 old_cid_(serializer->object_currently_writing_.cid_) {
7548 if (serializer_->profile_writer_ ==
nullptr)
return;
7552 serializer_->FlushProfile();
7553 serializer_->object_currently_writing_.object_ = object;
7554 serializer_->object_currently_writing_.id_ =
id;
7555 serializer_->object_currently_writing_.cid_ =
7556 object ==
nullptr ? -1 :
object->GetClassIdMayBeSmi();
7560 if (serializer_->profile_writer_ ==
nullptr)
return;
7561 serializer_->FlushProfile();
7562 serializer_->object_currently_writing_.object_ = old_object_;
7563 serializer_->object_currently_writing_.id_ = old_id_;
7564 serializer_->object_currently_writing_.cid_ = old_cid_;
7572 if (
s->profile_writer_ ==
nullptr) {
7575 if (
name ==
nullptr) {
7587 case kOneByteStringCid:
7588 case kTwoByteStringCid: {
7594 const auto& obj_id =
s->GetProfileId(obj);
7595 s->profile_writer_->SetObjectTypeAndName(obj_id,
type,
name);
7599#if !defined(DART_PRECOMPILED_RUNTIME)
7609 if (obj->IsHeapObject() && obj->IsWeakSerializationReference()) {
7620 const char*
type =
nullptr;
7621 const char*
name =
nullptr;
7631 case kObjectPoolCid: {
7632 type =
"ObjectPool";
7634 for (intptr_t i = 0; i <
pool->untag()->length_; i++) {
7635 uint8_t bits =
pool->untag()->entry_bits()[i];
7637 ObjectPool::EntryType::kTaggedObject) {
7638 auto const elem =
pool->untag()->data()[i].raw_obj_;
7647 case kImmutableArrayCid:
7651 for (intptr_t i = 0, n =
Smi::Value(array->untag()->length()); i < n;
7663 links.
Add({code->untag()->owner(),
7667 case kFunctionCid: {
7668 FunctionPtr func =
static_cast<FunctionPtr
>(obj);
7672 links.
Add({func->untag()->owner(),
7675 if (
data->GetClassId() == kClosureDataCid) {
7681 case kClosureDataCid: {
7682 auto data =
static_cast<ClosureDataPtr
>(obj);
7683 type =
"ClosureData";
7685 {
data->untag()->parent_function(),
7690 ClassPtr cls =
static_cast<ClassPtr
>(obj);
7693 links.
Add({cls->untag()->library(),
7697 case kPatchClassCid: {
7698 PatchClassPtr patch_cls =
static_cast<PatchClassPtr
>(obj);
7699 type =
"PatchClass";
7701 {patch_cls->untag()->wrapped_class(),
7706 LibraryPtr lib =
static_cast<LibraryPtr
>(obj);
7711 case kFunctionTypeCid: {
7712 type =
"FunctionType";
7715 case kRecordTypeCid: {
7716 type =
"RecordType";
7720 FATAL(
"Request to create artificial node for object with cid %d",
cid);
7725 for (
const auto& link : links) {
7740 auto& handle =
thread()->ObjectHandle();
7742 FATAL(
"Reference to unreachable object %s", handle.ToCString());
7748 ASSERT(!object->IsHeapObject() || !object->IsInstructions());
7759 if (object->IsWeakSerializationReference()) {
7762 auto const wsr =
static_cast<WeakSerializationReferencePtr
>(object);
7765 id =
HasRef(wsr->untag()->target()) ?
RefId(wsr->untag()->target())
7766 :
RefId(wsr->untag()->replacement());
7771 auto& handle =
thread()->ObjectHandle();
7773 FATAL(
"Reference for object %s is unallocated", handle.ToCString());
7776const char* Serializer::ReadOnlyObjectType(intptr_t
cid) {
7778 case kPcDescriptorsCid:
7779 return "PcDescriptors";
7780 case kCodeSourceMapCid:
7781 return "CodeSourceMap";
7782 case kCompressedStackMapsCid:
7783 return "CompressedStackMaps";
7788 case kOneByteStringCid:
7790 ?
"OneByteStringCid"
7792 case kTwoByteStringCid:
7794 ?
"TwoByteStringCid"
7802 bool is_canonical) {
7803#if defined(DART_PRECOMPILED_RUNTIME)
7822#if !defined(DART_COMPRESSED_POINTERS)
7832 if (
auto const type = ReadOnlyObjectType(
cid)) {
7838 const bool cluster_represents_canonical_set =
7844 case kTypeParametersCid:
7846 case kTypeArgumentsCid:
7848 is_canonical, cluster_represents_canonical_set);
7849 case kPatchClassCid:
7853 case kClosureDataCid:
7855 case kFfiTrampolineDataCid:
7865 case kKernelProgramInfoCid:
7869 case kObjectPoolCid:
7871 case kPcDescriptorsCid:
7873 case kCodeSourceMapCid:
7875 case kCompressedStackMapsCid:
7877 case kExceptionHandlersCid:
7881 case kContextScopeCid:
7883 case kUnlinkedCallCid:
7887 case kMegamorphicCacheCid:
7889 case kSubtypeTestCacheCid:
7891 case kLoadingUnitCid:
7893 case kLanguageErrorCid:
7895 case kUnhandledExceptionCid:
7897 case kLibraryPrefixCid:
7901 cluster_represents_canonical_set);
7902 case kFunctionTypeCid:
7904 is_canonical, cluster_represents_canonical_set);
7905 case kRecordTypeCid:
7907 is_canonical, cluster_represents_canonical_set);
7908 case kTypeParameterCid:
7910 is_canonical, cluster_represents_canonical_set);
7921 case kGrowableObjectArrayCid:
7925 case kStackTraceCid:
7929 case kWeakPropertyCid:
7943 case kImmutableArrayCid:
7950 is_canonical, cluster_represents_canonical_set && !vm_);
7951#define CASE_FFI_CID(name) case kFfi##name##Cid:
7955 case kDeltaEncodedTypedDataCid:
7957 case kWeakSerializationReferenceCid:
7958#if defined(DART_PRECOMPILER)
7960 return new (
Z) WeakSerializationReferenceSerializationCluster();
7973 if (loading_units_ ==
nullptr)
return true;
7977 FATAL(
"Missing loading unit assignment: %s\n",
7986 (*loading_units_)[unit_id]->AddDeferredObject(code);
7989#if !defined(DART_PRECOMPILED_RUNTIME)
7990#if defined(DART_PRECOMPILER)
8032 if (code == StubCode::LazyCompile().ptr() && !vm_) {
8034 }
else if (FLAG_precompiled_mode) {
8038 const intptr_t
base =
8041 : num_base_objects_;
8054 ref <= code_cluster_->last_ref()));
8058 if (ref < code_cluster_->first_deferred_ref()) {
8059 const intptr_t
key =
static_cast<intptr_t
>(
code->untag()->instructions_);
8061 const intptr_t
result = code_index_.Lookup(
key);
8069 return 1 +
base + code_index_.Length() + cluster_index;
8075 ref <= code_cluster_->last_ref());
8076 return 1 + (ref - code_cluster_->
first_ref());
8091 if (code_cluster_ !=
nullptr) {
8094 if ((loading_units_ !=
nullptr) &&
8098 auto unit_objects = loading_units_->At(i)->deferred_objects();
8100 ASSERT(unit_objects->length() == 0 || code_cluster_ !=
nullptr);
8101 for (intptr_t j = 0; j < unit_objects->length(); j++) {
8107#if defined(DART_PRECOMPILER) && !defined(TARGET_ARCH_IA32)
8116 if (code_cluster_ !=
nullptr) {
8117 auto in = code_cluster_->
objects();
8118 for (intptr_t i = 0; i < in->length(); i++) {
8119 code_objects.
Add(in->At(i));
8122 if (loading_units_ !=
nullptr) {
8124 loading_units_->At(current_loading_unit_id_)->deferred_objects();
8125 for (intptr_t i = 0; i < in->length(); i++) {
8126 code_objects.
Add(in->At(i)->ptr());
8131 RelocateCodeObjects(vm_, &code_objects, &writer_commands);
8134 if (code_objects.
length() == 0) {
8147 CompressedStackMapsPtr map;
8162 intptr_t not_discarded_count = 0;
8163 uint32_t first_entry_with_code = 0;
8164 for (
auto& cmd : writer_commands) {
8167 cmd.insert_instruction_of_code.code);
8169 (not_discarded_count == 0));
8171 if (not_discarded_count == 0) {
8172 first_entry_with_code = total;
8174 not_discarded_count++;
8180 const intptr_t instr =
static_cast<intptr_t
>(
8181 cmd.insert_instruction_of_code.code->untag()->instructions_);
8182 ASSERT(!code_index_.HasKey(instr));
8183 code_index_.Insert(instr, total);
8187 CompressedStackMapsPtr stack_map =
8188 cmd.insert_instruction_of_code.code->untag()->compressed_stackmaps_;
8189 const intptr_t
key =
static_cast<intptr_t
>(stack_map);
8192 stack_maps_info.
Lookup(
key)->use_count++;
8194 auto info =
new StackMapInfo();
8195 info->map = stack_map;
8196 info->use_count = 1;
8202 ASSERT(
static_cast<intptr_t
>(total) == code_index_.Length());
8203 instructions_table_len_ = not_discarded_count;
8207 stack_maps.
Sort([](StackMapInfo*
const*
a, StackMapInfo*
const*
b) {
8208 if ((*a)->use_count < (*b)->use_count)
return 1;
8209 if ((*a)->use_count > (*b)->use_count)
return -1;
8218 UntaggedInstructionsTable::Data
header;
8221 header.first_entry_with_code = first_entry_with_code;
8226 for (
auto& cmd : writer_commands) {
8228 pc_mapping.
WriteFixed<UntaggedInstructionsTable::DataEntry>({0, 0});
8233 auto write_stack_map = [&](CompressedStackMapsPtr smap) {
8234 const auto flags_and_size = smap->untag()->payload()->flags_and_size();
8235 const auto payload_size =
8237 pc_mapping.
WriteFixed<uint32_t>(flags_and_size);
8238 pc_mapping.
WriteBytes(smap->untag()->payload()->data(), payload_size);
8241 for (
auto sm : stack_maps) {
8243 write_stack_map(sm->map);
8247 if (!canonical_stack_map_entries.
IsNull()) {
8248 auto header =
reinterpret_cast<UntaggedInstructionsTable::Data*
>(
8251 write_stack_map(canonical_stack_map_entries.
ptr());
8258 sizeof(UntaggedInstructionsTable::Data));
8259 for (
auto& cmd : writer_commands) {
8261 CompressedStackMapsPtr smap =
8262 cmd.insert_instruction_of_code.code->untag()->compressed_stackmaps_;
8264 stack_maps_info.
Lookup(
static_cast<intptr_t
>(smap))->
offset;
8267 cmd.insert_instruction_of_code.code);
8269 pc_mapping.
WriteFixed<UntaggedInstructionsTable::DataEntry>(
8270 {
static_cast<uint32_t
>(entry),
offset});
8279 instructions_table_rodata_offset_ =
8282 if (profile_writer_ !=
nullptr) {
8287 "<instructions-table-rodata>"),
8288 {offset_space, instructions_table_rodata_offset_});
8295 uint32_t unchecked_offset,
8306#if defined(DART_PRECOMPILER)
8307 if (profile_writer_ !=
nullptr) {
8308 ASSERT(object_currently_writing_.id_ !=
8312 object_currently_writing_.id_,
8314 {offset_space, offset});
8323 if (FLAG_precompiled_mode) {
8324 const uint32_t payload_info =
8335 if (profile_writer_ ==
nullptr)
return;
8337 ASSERT(object_currently_writing_.id_ !=
8343 object_currently_writing_.id_,
8348#if defined(SNAPSHOT_BACKTRACE)
8356 if (image_writer_ ==
nullptr) {
8364 const bool is_code =
object->IsHeapObject() &&
object->IsCode();
8375 if (object->IsHeapObject() && object->IsInstructions()) {
8377 "Instructions should only be reachable from Code");
8382 stack_.
Add({object, cid_override});
8384 num_written_objects_++;
8386#if defined(SNAPSHOT_BACKTRACE)
8400 if (!object->IsHeapObject() || vm_) {
8408 if (!object->IsHeapObject()) {
8412 is_canonical =
true;
8414 cid =
object->GetClassId();
8415 is_canonical =
object->untag()->IsCanonical();
8424 is_canonical ? &canonical_clusters_by_cid_[
cid] : &clusters_by_cid_[
cid];
8425 if (*cluster_ref ==
nullptr) {
8427 if (*cluster_ref ==
nullptr) {
8432 ASSERT(cluster !=
nullptr);
8434 FATAL(
"cluster for %s (cid %" Pd ") %s as canonical, but %s",
8437 is_canonical ?
"should be" :
"should not be");
8440#if defined(SNAPSHOT_BACKTRACE)
8441 current_parent_ = object;
8444 cluster->
Trace(
this,
object);
8446#if defined(SNAPSHOT_BACKTRACE)
8453 while (
thread()->no_safepoint_scope_depth() > 0) {
8459 object.ToCString());
8460#if defined(SNAPSHOT_BACKTRACE)
8461 while (!
object.
IsNull()) {
8462 object = ParentOf(
object);
8464 static_cast<uword>(
object.ptr()),
object.ToCString());
8470#if defined(SNAPSHOT_BACKTRACE)
8472 for (intptr_t i = 0; i < parent_pairs_.length(); i += 2) {
8473 if (parent_pairs_[i]->ptr() ==
object) {
8474 return parent_pairs_[i + 1]->ptr();
8480ObjectPtr Serializer::ParentOf(
const Object&
object)
const {
8481 for (intptr_t i = 0; i < parent_pairs_.length(); i += 2) {
8482 if (parent_pairs_[i]->ptr() ==
object.ptr()) {
8483 return parent_pairs_[i + 1]->ptr();
8492 ASSERT(expected_version !=
nullptr);
8493 const intptr_t version_len = strlen(expected_version);
8494 WriteBytes(
reinterpret_cast<const uint8_t*
>(expected_version), version_len);
8496 char* expected_features =
8498 ASSERT(expected_features !=
nullptr);
8499 const intptr_t features_len = strlen(expected_features);
8500 WriteBytes(
reinterpret_cast<const uint8_t*
>(expected_features),
8502 free(expected_features);
8505#if !defined(DART_PRECOMPILED_RUNTIME)
8508 if ((*a)->size() > (*b)->size()) {
8510 }
else if ((*a)->size() < (*b)->size()) {
8517#define CID_CLUSTER(Type) \
8518 reinterpret_cast<Type##SerializationCluster*>(clusters_by_cid_[k##Type##Cid])
8530 roots->AddBaseObjects(
this);
8534 roots->PushRoots(
this);
8553 while (stack_.
length() > 0) {
8555 while (stack_.
length() > 0) {
8557 Trace(entry.obj, entry.cid_override);
8561#if defined(DART_PRECOMPILER)
8563 cluster->RetraceEphemerons(
this);
8567 cluster->RetraceEphemerons(
this);
8571#if defined(DART_PRECOMPILER)
8573 if (wsr_cluster !=
nullptr) {
8576 num_written_objects_ -= wsr_cluster->Count(
this);
8578 clusters_by_cid_[kWeakSerializationReferenceCid] =
nullptr;
8580 ASSERT(clusters_by_cid_[kWeakSerializationReferenceCid] ==
nullptr);
8590#define ADD_CANONICAL_NEXT(cid) \
8591 if (auto const cluster = canonical_clusters_by_cid_[cid]) { \
8592 clusters.Add(cluster); \
8593 canonical_clusters_by_cid_[cid] = nullptr; \
8595#define ADD_NON_CANONICAL_NEXT(cid) \
8596 if (auto const cluster = clusters_by_cid_[cid]) { \
8597 clusters.Add(cluster); \
8598 clusters_by_cid_[cid] = nullptr; \
8618#undef ADD_CANONICAL_NEXT
8619#undef ADD_NON_CANONICAL_NEXT
8620 const intptr_t out_of_order_clusters = clusters.
length();
8621 for (intptr_t
cid = 0;
cid < num_cids_;
cid++) {
8622 if (
auto const cluster = canonical_clusters_by_cid_[
cid]) {
8623 clusters.
Add(cluster);
8626 for (intptr_t
cid = 0;
cid < num_cids_;
cid++) {
8627 if (
auto const cluster = clusters_by_cid_[
cid]) {
8628 clusters.
Add(clusters_by_cid_[
cid]);
8632 for (intptr_t i = 0; i < out_of_order_clusters; i++) {
8633 const auto& cluster = clusters.
At(i);
8634 const intptr_t
cid = cluster->cid();
8635 auto const cid_clusters =
8636 cluster->is_canonical() ? canonical_clusters_by_cid_ : clusters_by_cid_;
8638 cid_clusters[
cid] = cluster;
8643 intptr_t num_objects = num_base_objects_ + num_written_objects_;
8644#if defined(ARCH_IS_64_BIT)
8646 FATAL(
"Ref overflow");
8653 ASSERT((instructions_table_len_ == 0) || FLAG_precompiled_mode);
8658 cluster->WriteAndMeasureAlloc(
this);
8659 bytes_heap_allocated_ += cluster->target_memory_size();
8661 Write<int32_t>(next_ref_index_);
8666 ASSERT((next_ref_index_ - 1) == num_objects);
8668 ASSERT(objects_->length() == num_objects);
8670#if defined(DART_PRECOMPILER)
8671 if (profile_writer_ !=
nullptr && wsr_cluster !=
nullptr) {
8679 wsr_cluster->CreateArtificialTargetNodesIfNeeded(
this);
8684 cluster->WriteAndMeasureFill(
this);
8686 Write<int32_t>(kSectionMarker);
8690 roots->WriteRoots(
this);
8693 Write<int32_t>(kSectionMarker);
8704#if defined(DART_PRECOMPILER) || defined(DART_PRECOMPILED_RUNTIME)
8717static constexpr intptr_t kDispatchTableSpecialEncodingBits = 6;
8718static constexpr intptr_t kDispatchTableRecentCount =
8719 1 << kDispatchTableSpecialEncodingBits;
8720static constexpr intptr_t kDispatchTableRecentMask =
8721 (1 << kDispatchTableSpecialEncodingBits) - 1;
8722static constexpr intptr_t kDispatchTableMaxRepeat =
8723 (1 << kDispatchTableSpecialEncodingBits) - 1;
8724static constexpr intptr_t kDispatchTableIndexBase = kDispatchTableMaxRepeat + 1;
8728#if defined(DART_PRECOMPILER)
8736 const auto& dispatch_table_profile_id =
GetProfileId(profile_ref);
8737 if (profile_writer_ !=
nullptr) {
8739 "DispatchTable",
"dispatch_table");
8740 profile_writer_->
AddRoot(dispatch_table_profile_id);
8743 if (profile_writer_ !=
nullptr) {
8752 const intptr_t table_length = entries.
IsNull() ? 0 : entries.
Length();
8754 ASSERT(table_length <= compiler::target::kWordMax);
8756 if (table_length == 0) {
8761 ASSERT(code_cluster_ !=
nullptr);
8778 CodePtr previous_code =
nullptr;
8779 CodePtr recent[kDispatchTableRecentCount] = {
nullptr};
8780 intptr_t recent_index = 0;
8781 intptr_t repeat_count = 0;
8782 for (intptr_t i = 0; i < table_length; i++) {
8786 if (code == previous_code) {
8787 if (++repeat_count == kDispatchTableMaxRepeat) {
8788 Write(kDispatchTableMaxRepeat);
8794 if (repeat_count > 0) {
8795 Write(repeat_count);
8798 previous_code = code;
8807 intptr_t found_index = 0;
8808 for (; found_index < kDispatchTableRecentCount; found_index++) {
8809 if (recent[found_index] == code)
break;
8811 if (found_index < kDispatchTableRecentCount) {
8812 Write(~found_index);
8819 auto const encoded = kDispatchTableIndexBase + code_index;
8820 ASSERT(encoded <= compiler::target::kWordMax);
8822 recent[recent_index] = code;
8823 recent_index = (recent_index + 1) & kDispatchTableRecentMask;
8825 if (repeat_count > 0) {
8826 Write(repeat_count);
8833#if !defined(DART_PRECOMPILED_RUNTIME)
8834 if (FLAG_print_snapshot_sizes_verbose) {
8837 buffer.Printf(
"%25s",
"Cluster");
8838 buffer.Printf(
" %6s",
"Objs");
8839 buffer.Printf(
" %8s",
"Size");
8840 buffer.Printf(
" %8s",
"Fraction");
8841 buffer.Printf(
" %10s",
"Cumulative");
8842 buffer.Printf(
" %8s",
"HeapSize");
8843 buffer.Printf(
" %5s",
"Cid");
8844 buffer.Printf(
" %9s",
"Canonical");
8847 for (intptr_t
cid = 1;
cid < num_cids_;
cid++) {
8848 if (
auto const cluster = canonical_clusters_by_cid_[
cid]) {
8849 clusters_by_size.
Add(cluster);
8851 if (
auto const cluster = clusters_by_cid_[
cid]) {
8852 clusters_by_size.
Add(cluster);
8855 intptr_t text_size = 0;
8856 if (image_writer_ !=
nullptr) {
8859 intptr_t trampoline_count, trampoline_size;
8861 auto const instructions_count = text_object_count - trampoline_count;
8862 auto const instructions_size = text_size - trampoline_size;
8865 instructions_count, instructions_size));
8866 if (trampoline_size > 0) {
8869 trampoline_count, trampoline_size));
8875 if (dispatch_table_size_ > 0) {
8878 isolate_group()->object_store()->dispatch_table_code_entries());
8879 auto const entry_count =
8880 dispatch_table_entries.IsNull() ? 0 : dispatch_table_entries.Length();
8882 "DispatchTable", entry_count, dispatch_table_size_));
8884 if (instructions_table_len_ > 0) {
8885 const intptr_t memory_size =
8886 compiler::target::InstructionsTable::InstanceSize() +
8887 compiler::target::Array::InstanceSize(instructions_table_len_);
8889 "InstructionsTable", instructions_table_len_, 0, memory_size));
8894 double cumulative_fraction = 0.0;
8895 for (intptr_t i = 0; i < clusters_by_size.
length(); i++) {
8897 double fraction =
static_cast<double>(cluster->
size()) /
total_size;
8898 cumulative_fraction += fraction;
8902 buffer.Printf(
" %1.6lf", fraction);
8903 buffer.Printf(
" %1.8lf", cumulative_fraction);
8905 if (cluster->
cid() != -1) {
8908 buffer.Printf(
" %5s",
"");
8911 buffer.Printf(
" %9s",
"canonical");
8913 buffer.Printf(
" %9s",
"");
8926 const uint8_t* data_buffer,
8927 const uint8_t* instructions_buffer,
8928 bool is_non_root_unit,
8932 old_space_(heap_->old_space()),
8933 freelist_(old_space_->DataFreeList()),
8937 image_reader_(nullptr),
8941 is_non_root_unit_(is_non_root_unit),
8944 ASSERT(instructions_buffer !=
nullptr);
8945 ASSERT(data_buffer !=
nullptr);
8946 image_reader_ =
new (zone_)
ImageReader(data_buffer, instructions_buffer);
8956 const uint32_t tags = Read<uint32_t>();
8963 cid, is_canonical, is_immutable, !is_non_root_unit_);
8978#if !defined(DART_COMPRESSED_POINTERS)
8981 case kPcDescriptorsCid:
8982 case kCodeSourceMapCid:
8983 case kCompressedStackMapsCid:
8986 case kOneByteStringCid:
8987 case kTwoByteStringCid:
8989 if (!is_non_root_unit_) {
8991 !is_non_root_unit_);
9002 case kTypeParametersCid:
9004 case kTypeArgumentsCid:
9007 case kPatchClassCid:
9013 case kClosureDataCid:
9016 case kFfiTrampolineDataCid:
9031#if !defined(DART_PRECOMPILED_RUNTIME)
9032 case kKernelProgramInfoCid:
9039 case kObjectPoolCid:
9042 case kPcDescriptorsCid:
9045 case kCodeSourceMapCid:
9048 case kCompressedStackMapsCid:
9051 case kExceptionHandlersCid:
9057 case kContextScopeCid:
9060 case kUnlinkedCallCid:
9066 case kMegamorphicCacheCid:
9069 case kSubtypeTestCacheCid:
9072 case kLoadingUnitCid:
9075 case kLanguageErrorCid:
9078 case kUnhandledExceptionCid:
9081 case kLibraryPrefixCid:
9087 case kFunctionTypeCid:
9090 case kRecordTypeCid:
9093 case kTypeParameterCid:
9110 case kGrowableObjectArrayCid:
9116 case kStackTraceCid:
9122 case kWeakPropertyCid:
9130 !is_non_root_unit_);
9136 !is_non_root_unit_);
9139 !is_non_root_unit_);
9140 case kImmutableArrayCid:
9142 kImmutableArrayCid, is_canonical, !is_non_root_unit_);
9149#define CASE_FFI_CID(name) case kFfi##name##Cid:
9153 cid, is_canonical, is_immutable, !is_non_root_unit_);
9154 case kDeltaEncodedTypedDataCid:
9159 FATAL(
"No cluster defined for cid %" Pd,
cid);
9167 intptr_t deferred_code_start_index,
9168 intptr_t deferred_code_end_index) {
9169#if defined(DART_PRECOMPILED_RUNTIME)
9170 const uint8_t* table_snapshot_start = stream->AddressOfCurrentPosition();
9171 const intptr_t
length = stream->ReadUnsigned();
9174 const intptr_t first_code_id = stream->ReadUnsigned();
9175 deferred_code_start_index -= first_code_id;
9176 deferred_code_end_index -= first_code_id;
9179 auto code =
IG->object_store()->dispatch_table_null_error_stub();
9188 ASSERT(
IG->dispatch_table() ==
nullptr);
9191 auto const array =
table->array();
9193 uword recent[kDispatchTableRecentCount] = {0};
9194 intptr_t recent_index = 0;
9195 intptr_t repeat_count = 0;
9196 for (intptr_t i = 0; i <
length; i++) {
9197 if (repeat_count > 0) {
9202 auto const encoded = stream->Read<intptr_t>();
9205 }
else if (encoded < 0) {
9206 intptr_t r = ~encoded;
9207 ASSERT(r < kDispatchTableRecentCount);
9209 }
else if (encoded <= kDispatchTableMaxRepeat) {
9210 repeat_count = encoded - 1;
9212 const intptr_t code_index = encoded - kDispatchTableIndexBase;
9214 const intptr_t code_id =
9216 if ((deferred_code_start_index <= code_id) &&
9217 (code_id < deferred_code_end_index)) {
9218 auto code =
static_cast<CodePtr
>(
Ref(first_code_id + code_id));
9227 recent[recent_index] =
value;
9228 recent_index = (recent_index + 1) & kDispatchTableRecentMask;
9232 ASSERT(repeat_count == 0);
9235 IG->set_dispatch_table(
table);
9236 intptr_t table_snapshot_size =
9237 stream->AddressOfCurrentPosition() - table_snapshot_start;
9238 IG->set_dispatch_table_snapshot(table_snapshot_start);
9239 IG->set_dispatch_table_snapshot_size(table_snapshot_size);
9245 if (image_reader_ !=
nullptr) {
9254 char*
error = VerifyVersion();
9255 if (
error ==
nullptr) {
9256 error = VerifyFeatures(isolate_group);
9258 if (
error ==
nullptr) {
9264char* SnapshotHeaderReader::VerifyVersion() {
9269 ASSERT(expected_version !=
nullptr);
9270 const intptr_t version_len = strlen(expected_version);
9272 const intptr_t kMessageBufferSize = 128;
9273 char message_buffer[kMessageBufferSize];
9275 "No full snapshot version found, expected '%s'",
9277 return BuildError(message_buffer);
9280 const char* version =
9282 ASSERT(version !=
nullptr);
9283 if (strncmp(version, expected_version, version_len) != 0) {
9284 const intptr_t kMessageBufferSize = 256;
9285 char message_buffer[kMessageBufferSize];
9288 "Wrong %s snapshot version, expected '%s' found '%s'",
9290 expected_version, actual_version);
9291 free(actual_version);
9292 return BuildError(message_buffer);
9299char* SnapshotHeaderReader::VerifyFeatures(IsolateGroup* isolate_group) {
9300 const char* expected_features =
9302 ASSERT(expected_features !=
nullptr);
9303 const intptr_t expected_len = strlen(expected_features);
9305 const char* features =
nullptr;
9306 intptr_t features_length = 0;
9308 auto error = ReadFeatures(&features, &features_length);
9309 if (
error !=
nullptr) {
9313 if (features_length != expected_len ||
9314 (strncmp(features, expected_features, expected_len) != 0)) {
9315 const intptr_t kMessageBufferSize = 1024;
9316 char message_buffer[kMessageBufferSize];
9318 features, features_length < 1024 ? features_length : 1024);
9320 "Snapshot not compatible with the current VM configuration: "
9321 "the snapshot requires '%s' but the VM has '%s'",
9322 actual_features, expected_features);
9323 free(
const_cast<char*
>(expected_features));
9324 free(actual_features);
9325 return BuildError(message_buffer);
9327 free(
const_cast<char*
>(expected_features));
9331char* SnapshotHeaderReader::ReadFeatures(
const char** features,
9332 intptr_t* features_length) {
9333 const char* cursor =
9338 "The features string in the snapshot was not '\\0'-terminated.");
9341 *features_length =
length;
9346char* SnapshotHeaderReader::BuildError(
const char*
message) {
9350ApiErrorPtr FullSnapshotReader::ConvertToApiError(
char*
message) {
9362#if defined(DART_PRECOMPILED_RUNTIME)
9364 uword entry_point = StubCode::NotLoaded().EntryPoint();
9365 code->untag()->entry_point_ = entry_point;
9366 code->untag()->unchecked_entry_point_ = entry_point;
9367 code->untag()->monomorphic_entry_point_ = entry_point;
9368 code->untag()->monomorphic_unchecked_entry_point_ = entry_point;
9369 code->untag()->instructions_length_ = 0;
9374 instructions_table_.
rodata()->first_entry_with_code +
9375 instructions_index_);
9377 const uint32_t unchecked_offset = payload_info >> 1;
9378 const bool has_monomorphic_entrypoint = (payload_info & 0x1) == 0x1;
9380 const uword entry_offset =
9381 has_monomorphic_entrypoint ? Instructions::kPolymorphicEntryOffsetAOT : 0;
9382 const uword monomorphic_entry_offset =
9383 has_monomorphic_entrypoint ? Instructions::kMonomorphicEntryOffsetAOT : 0;
9385 const uword entry_point = payload_start + entry_offset;
9386 const uword monomorphic_entry_point =
9387 payload_start + monomorphic_entry_offset;
9389 instructions_table_.
SetCodeAt(instructions_index_++, code);
9393 code->untag()->entry_point_ = entry_point;
9394 code->untag()->unchecked_entry_point_ = entry_point + unchecked_offset;
9395 code->untag()->monomorphic_entry_point_ = monomorphic_entry_point;
9396 code->untag()->monomorphic_unchecked_entry_point_ =
9397 monomorphic_entry_point + unchecked_offset;
9402 code->untag()->instructions_ = instr;
9403 code->untag()->unchecked_offset_ = unchecked_offset;
9405 const uint32_t active_offset = Read<uint32_t>();
9408 code->untag()->active_instructions_ = instr;
9409 Code::InitializeCachedEntryPointsFrom(code, instr, unchecked_offset);
9414#if defined(DART_PRECOMPILED_RUNTIME)
9415 if (instructions_table_.
IsNull()) {
9416 ASSERT(instructions_index_ == 0);
9420 const auto& code_objects =
9422 ASSERT(code_objects.Length() == instructions_index_);
9424 uword previous_end = image_reader_->GetBareInstructionsEnd();
9425 for (intptr_t i = instructions_index_ - 1; i >= 0; --i) {
9429 code->untag()->instructions_length_ = previous_end -
start;
9430 previous_end =
start;
9438 object_store->set_instructions_tables(tables);
9440 if ((tables.
Length() == 0) ||
9441 (tables.
At(tables.
Length() - 1) != instructions_table_.
ptr())) {
9443 (is_non_root_unit_ && tables.
Length() > 0));
9457 page_space_(page_space),
9458 freelist_(page_space->DataFreeList()) {
9475 const intptr_t instructions_table_len =
ReadUnsigned();
9476 const uint32_t instruction_table_data_offset =
ReadUnsigned();
9477 USE(instruction_table_data_offset);
9482#if defined(DART_PRECOMPILED_RUNTIME)
9483 if (instructions_table_len > 0) {
9484 ASSERT(FLAG_precompiled_mode);
9485 const uword start_pc = image_reader_->GetBareInstructionsAt(0);
9486 const uword end_pc = image_reader_->GetBareInstructionsEnd();
9487 uword instruction_table_data = 0;
9488 if (instruction_table_data_offset != 0) {
9493 instruction_table_data =
reinterpret_cast<uword>(
9495 image_reader_->
GetObjectAt(instruction_table_data_offset)))));
9498 instructions_table_len, start_pc, end_pc, instruction_table_data);
9501 ASSERT(instructions_table_len == 0);
9521 roots->AddBaseObjects(
this);
9525 " base objects, but deserializer provided %" Pd,
9531 for (intptr_t i = 0; i < num_clusters_; i++) {
9535 intptr_t serializers_next_ref_index_ = Read<int32_t>();
9536 ASSERT_EQUAL(serializers_next_ref_index_, next_ref_index_);
9546 for (intptr_t i = 0; i < num_clusters_; i++) {
9549 int32_t section_marker = Read<int32_t>();
9550 ASSERT(section_marker == kSectionMarker);
9555 roots->ReadRoots(
this);
9558 int32_t section_marker = Read<int32_t>();
9559 ASSERT(section_marker == kSectionMarker);
9565 roots->PostLoad(
this, refs);
9577 for (intptr_t i = 0; i < num_clusters_; i++) {
9578 clusters_[i]->
PostLoad(
this, refs);
9583 size_t clustered_length =
9585 reinterpret_cast<uword>(clustered_start);
9591#if !defined(DART_PRECOMPILED_RUNTIME)
9598 : thread_(
Thread::Current()),
9600 vm_snapshot_data_(vm_snapshot_data),
9601 isolate_snapshot_data_(isolate_snapshot_data),
9602 vm_isolate_snapshot_size_(0),
9603 isolate_snapshot_size_(0),
9604 vm_image_writer_(vm_image_writer),
9605 isolate_image_writer_(isolate_image_writer) {
9606 ASSERT(isolate_group() !=
nullptr);
9607 ASSERT(heap() !=
nullptr);
9608 ObjectStore* object_store = isolate_group()->object_store();
9609 ASSERT(object_store !=
nullptr);
9612 isolate_group()->ValidateClassTable();
9615#if defined(DART_PRECOMPILER)
9616 if (FLAG_write_v8_snapshot_profile_to !=
nullptr) {
9627 ASSERT(vm_snapshot_data_ !=
nullptr);
9628 Serializer serializer(thread(), kind_, vm_snapshot_data_, vm_image_writer_,
9629 true, profile_writer_);
9631 serializer.ReserveHeader();
9632 serializer.WriteVersionAndFeatures(
true);
9638 serializer.FillHeader(serializer.kind());
9639 clustered_vm_size_ = serializer.bytes_written();
9640 heap_vm_size_ = serializer.bytes_heap_allocated();
9644 vm_image_writer_->
Write(serializer.stream(),
true);
9645 mapped_data_size_ += vm_image_writer_->
data_size();
9646 mapped_text_size_ += vm_image_writer_->
text_size();
9652 vm_isolate_snapshot_size_ = serializer.bytes_written();
9656void FullSnapshotWriter::WriteProgramSnapshot(
9657 ZoneGrowableArray<Object*>* objects,
9658 GrowableArray<LoadingUnitSerializationData*>* units) {
9661 ASSERT(isolate_snapshot_data_ !=
nullptr);
9662 Serializer serializer(thread(), kind_, isolate_snapshot_data_,
9663 isolate_image_writer_,
false, profile_writer_);
9664 serializer.set_loading_units(units);
9666 ObjectStore* object_store = isolate_group()->object_store();
9667 ASSERT(object_store !=
nullptr);
9670 ASSERT(object_store->type_argument_int()->untag()->IsCanonical());
9671 ASSERT(object_store->type_argument_double()->untag()->IsCanonical());
9672 ASSERT(object_store->type_argument_string()->untag()->IsCanonical());
9673 ASSERT(object_store->type_argument_string_dynamic()->untag()->IsCanonical());
9674 ASSERT(object_store->type_argument_string_string()->untag()->IsCanonical());
9676 serializer.ReserveHeader();
9677 serializer.WriteVersionAndFeatures(
false);
9678 ProgramSerializationRoots
roots(objects, object_store, kind_);
9679 objects = serializer.Serialize(&roots);
9680 if (units !=
nullptr) {
9683 serializer.FillHeader(serializer.kind());
9684 clustered_isolate_size_ = serializer.bytes_written();
9685 heap_isolate_size_ = serializer.bytes_heap_allocated();
9689 isolate_image_writer_->
Write(serializer.stream(),
false);
9690#if defined(DART_PRECOMPILER)
9694 mapped_data_size_ += isolate_image_writer_->
data_size();
9695 mapped_text_size_ += isolate_image_writer_->
text_size();
9701 isolate_snapshot_size_ = serializer.bytes_written();
9707 uint32_t program_hash) {
9710 Serializer serializer(thread(), kind_, isolate_snapshot_data_,
9711 isolate_image_writer_,
false, profile_writer_);
9717 serializer.
Write(program_hash);
9727 isolate_image_writer_->
Write(serializer.
stream(),
false);
9728#if defined(DART_PRECOMPILER)
9732 mapped_data_size_ += isolate_image_writer_->
data_size();
9733 mapped_text_size_ += isolate_image_writer_->
text_size();
9745 if (vm_snapshot_data_ !=
nullptr) {
9746 objects = WriteVMSnapshot();
9751 if (isolate_snapshot_data_ !=
nullptr) {
9752 WriteProgramSnapshot(objects,
data);
9755 if (FLAG_print_snapshot_sizes) {
9756 OS::Print(
"VMIsolate(CodeSize): %" Pd "\n", clustered_vm_size_);
9757 OS::Print(
"Isolate(CodeSize): %" Pd "\n", clustered_isolate_size_);
9758 OS::Print(
"ReadOnlyData(CodeSize): %" Pd "\n", mapped_data_size_);
9759 OS::Print(
"Instructions(CodeSize): %" Pd "\n", mapped_text_size_);
9761 clustered_vm_size_ + clustered_isolate_size_ + mapped_data_size_ +
9763 OS::Print(
"VMIsolate(HeapSize): %" Pd "\n", heap_vm_size_);
9764 OS::Print(
"Isolate(HeapSize): %" Pd "\n", heap_isolate_size_);
9765 OS::Print(
"Total(HeapSize): %" Pd "\n", heap_vm_size_ + heap_isolate_size_);
9768#if defined(DART_PRECOMPILER)
9769 if (FLAG_write_v8_snapshot_profile_to !=
nullptr) {
9770 profile_writer_->Write(FLAG_write_v8_snapshot_profile_to);
9777 const uint8_t* instructions_buffer,
9779 : kind_(snapshot->kind()),
9781 buffer_(snapshot->Addr()),
9782 size_(snapshot->
length()),
9783 data_image_(snapshot->DataImage()),
9784 instructions_image_(instructions_buffer) {}
9790 char*
error = header_reader.VerifyVersion();
9791 if (
error !=
nullptr) {
9795 const char* features =
nullptr;
9796 intptr_t features_length = 0;
9797 error = header_reader.ReadFeatures(&features, &features_length);
9798 if (
error !=
nullptr) {
9802 ASSERT(features[features_length] ==
'\0');
9803 const char* cursor = features;
9804 while (*cursor !=
'\0') {
9805 while (*cursor ==
' ') {
9809 const char*
end = strstr(cursor,
" ");
9810 if (
end ==
nullptr) {
9811 end = features + features_length;
9814#define SET_FLAG(name) \
9815 if (strncmp(cursor, #name, end - cursor) == 0) { \
9816 FLAG_##name = true; \
9820 if (strncmp(cursor, "no-" #name, end - cursor) == 0) { \
9821 FLAG_##name = false; \
9826#define CHECK_FLAG(name, mode) \
9827 if (strncmp(cursor, #name, end - cursor) == 0) { \
9828 if (!FLAG_##name) { \
9829 return header_reader.BuildError("Flag " #name \
9830 " is true in snapshot, " \
9832 " is always false in " mode); \
9837 if (strncmp(cursor, "no-" #name, end - cursor) == 0) { \
9838 if (FLAG_##name) { \
9839 return header_reader.BuildError("Flag " #name \
9840 " is false in snapshot, " \
9842 " is always true in " mode); \
9848#define SET_P(name, T, DV, C) SET_FLAG(name)
9851#define SET_OR_CHECK_R(name, PV, T, DV, C) CHECK_FLAG(name, "product mode")
9853#define SET_OR_CHECK_R(name, PV, T, DV, C) SET_FLAG(name)
9857#define SET_OR_CHECK_C(name, PCV, PV, T, DV, C) CHECK_FLAG(name, "product mode")
9858#elif defined(DART_PRECOMPILED_RUNTIME)
9859#define SET_OR_CHECK_C(name, PCV, PV, T, DV, C) \
9860 CHECK_FLAG(name, "the precompiled runtime")
9862#define SET_OR_CHECK_C(name, PV, T, DV, C) SET_FLAG(name)
9866#define SET_OR_CHECK_D(name, T, DV, C) CHECK_FLAG(name, "non-debug mode")
9868#define SET_OR_CHECK_D(name, T, DV, C) SET_FLAG(name)
9873#undef SET_OR_CHECK_D
9874#undef SET_OR_CHECK_C
9875#undef SET_OR_CHECK_R
9892 if (
error !=
nullptr) {
9893 return ConvertToApiError(
error);
9901 Deserializer deserializer(thread_, kind_, buffer_, size_, data_image_,
9902 instructions_image_,
false,
9910 ASSERT(data_image_ !=
nullptr);
9913 ASSERT(instructions_image_ !=
nullptr);
9921#if defined(DART_PRECOMPILED_RUNTIME)
9925 if (
auto const bss =
image.bss()) {
9938 if (
error !=
nullptr) {
9939 return ConvertToApiError(
error);
9947 Deserializer deserializer(thread_, kind_, buffer_, size_, data_image_,
9948 instructions_image_,
false,
9956 ASSERT(data_image_ !=
nullptr);
9959 ASSERT(instructions_image_ !=
nullptr);
9970 if (!units.IsNull()) {
9975 unit.set_load_outstanding();
9976 unit.set_instructions_image(instructions_image_);
9977 unit.set_loaded(
true);
9991 if (
error !=
nullptr) {
9992 return ConvertToApiError(
error);
9996 thread_, kind_, buffer_, size_, data_image_, instructions_image_,
10004 Array::Handle(isolate_group()->object_store()->loading_units());
10006 uint32_t unit_program_hash = deserializer.
Read<uint32_t>();
10007 if (main_program_hash != unit_program_hash) {
10009 String::New(
"Deferred loading unit is from a different "
10010 "program than the main loading unit")));
10015 ASSERT(data_image_ !=
nullptr);
10018 ASSERT(instructions_image_ !=
nullptr);
10032void FullSnapshotReader::InitializeBSS() {
10033#if defined(DART_PRECOMPILED_RUNTIME)
10037 if (
auto const bss =
image.bss()) {
static void info(const char *fmt,...) SK_PRINTF_LIKE(1
static float prev(float f)
static size_t total_size(SkSBlockAllocator< N > &pool)
static bool skip(SkStream *stream, size_t amount)
static uint32_t hash(const SkShaderBase::GradientInfo &v)
#define SET_OR_CHECK_R(name, PV, T, DV, C)
#define SAVE_AND_RESET_ROOT(name, Type, init)
#define AutoTraceObject(obj)
#define PushFromTo(obj,...)
#define RESET_ROOT_LIST(V)
#define DECLARE_OBJECT_STORE_FIELD(Type, Name)
#define CID_CLUSTER(Type)
#define SET_P(name, T, DV, C)
#define ADD_CANONICAL_NEXT(cid)
#define CASE_FFI_CID(name)
#define WriteFromTo(obj,...)
#define SET_OR_CHECK_C(name, PV, T, DV, C)
#define AutoTraceObjectName(obj, str)
#define RESTORE_ROOT(name, Type, init)
#define SET_OR_CHECK_D(name, T, DV, C)
#define WriteCompressedField(obj, name)
#define ADD_NON_CANONICAL_NEXT(cid)
#define DECLARE_FIELD(name, Type, init)
#define WriteFieldValue(field, value)
#define WriteField(obj, field)
#define ASSERT_EQUAL(expected, actual)
#define RELEASE_ASSERT(cond)
#define COMPILE_ASSERT(expr)
#define CLASS_LIST_FFI_TYPE_MARKER(V)
AbstractInstanceDeserializationCluster(const char *name, bool is_canonical, bool is_root_unit)
void UpdateTypeTestingStubEntryPoint() const
void InitializeTypeTestingStubNonAtomic(const Code &stub) const
void ReadFill(Deserializer *d_) override
ArrayDeserializationCluster(intptr_t cid, bool is_canonical, bool is_root_unit)
~ArrayDeserializationCluster()
void ReadAlloc(Deserializer *d) override
~ArraySerializationCluster()
void WriteAlloc(Serializer *s)
void Trace(Serializer *s, ObjectPtr object)
void WriteFill(Serializer *s)
ArraySerializationCluster(bool is_canonical, intptr_t cid)
static intptr_t InstanceSize()
static ArrayPtr New(intptr_t len, Heap::Space space=Heap::kNew)
static constexpr bool UseCardMarkingForAllocation(const intptr_t array_length)
ObjectPtr At(intptr_t index) const
void SetAt(intptr_t index, const Object &value) const
static void Initialize(Thread *current, uword *bss, bool vm)
bool HasKey(typename KeyValueTrait::Key key) const
const T & At(intptr_t index) const
void Sort(int compare(const T *, const T *))
void WriteBytes(const void *addr, intptr_t len)
void WriteWordWith32BitWrites(uword value)
intptr_t Align(intptr_t alignment, intptr_t offset=0)
void WriteUnsigned(T value)
DART_FORCE_INLINE intptr_t bytes_written() const
virtual intptr_t Position() const
void WriteRefId(intptr_t value)
static constexpr bool decode(uint16_t value)
static constexpr uword update(ClassIdTagType value, uword original)
static constexpr uword encode(ClassIdTagType value)
static const Bool & False()
static const Bool & True()
static void SetupNativeResolver()
void BuildCanonicalSetFromLayout(Deserializer *d)
CanonicalSetDeserializationCluster(bool is_canonical, bool is_root_unit, const char *name)
SetType::ArrayHandle & table_
void VerifyCanonicalSet(Deserializer *d, const Array &refs, const typename SetType::ArrayHandle ¤t_table)
CanonicalSetSerializationCluster(intptr_t cid, bool is_canonical, bool represents_canonical_set, const char *name, intptr_t target_instance_size=0)
void ReorderObjects(Serializer *s)
void WriteCanonicalSetLayout(Serializer *s)
GrowableArray< PointerType > objects_
virtual bool IsInCanonicalSet(Serializer *s, PointerType ptr)
~ClassDeserializationCluster()
void ReadAlloc(Deserializer *d) override
ClassDeserializationCluster()
void ReadFill(Deserializer *d_) override
void Trace(Serializer *s, ObjectPtr object)
void WriteAlloc(Serializer *s)
void WriteFill(Serializer *s)
~ClassSerializationCluster()
ClassSerializationCluster(intptr_t num_cids)
ClassPtr At(intptr_t cid) const
intptr_t NumTopLevelCids() const
static bool IsTopLevelCid(intptr_t cid)
static int32_t target_next_field_offset_in_words(const ClassPtr cls)
static intptr_t InstanceSize()
static int32_t target_type_arguments_field_offset_in_words(const ClassPtr cls)
static int32_t target_instance_size_in_words(const ClassPtr cls)
ClosureDataDeserializationCluster()
void ReadAlloc(Deserializer *d) override
void ReadFill(Deserializer *d_) override
~ClosureDataDeserializationCluster()
void Trace(Serializer *s, ObjectPtr object)
void WriteAlloc(Serializer *s)
~ClosureDataSerializationCluster()
void WriteFill(Serializer *s)
ClosureDataSerializationCluster()
static intptr_t InstanceSize()
void ReadFill(Deserializer *d_) override
ClosureDeserializationCluster(bool is_canonical, bool is_root_unit)
~ClosureDeserializationCluster()
void ReadAlloc(Deserializer *d) override
void WriteAlloc(Serializer *s)
ClosureSerializationCluster(bool is_canonical)
~ClosureSerializationCluster()
void Trace(Serializer *s, ObjectPtr object)
void WriteFill(Serializer *s)
static intptr_t InstanceSize()
void ReadFill(Deserializer *d, intptr_t start_index, intptr_t stop_index, bool deferred)
void PostLoad(Deserializer *d, const Array &refs) override
void ReadAllocOneCode(Deserializer *d)
~CodeDeserializationCluster()
void ReadAlloc(Deserializer *d) override
void ReadFill(Deserializer *d) override
CodeDeserializationCluster()
intptr_t first_ref() const
~CodeSerializationCluster()
static void Sort(Serializer *s, GrowableArray< CodePtr > *codes)
static void Sort(Serializer *s, GrowableArray< Code * > *codes)
void WriteFill(Serializer *s, Snapshot::Kind kind, CodePtr code, bool deferred)
void WriteAlloc(Serializer *s, CodePtr code)
static const char * MakeDisambiguatedCodeName(Serializer *s, CodePtr c)
static void Insert(Serializer *s, GrowableArray< CodeOrderInfo > *order_list, IntMap< intptr_t > *order_map, CodePtr code)
void Trace(Serializer *s, ObjectPtr object)
GrowableArray< CodePtr > * objects()
intptr_t last_ref() const
void TracePool(Serializer *s, ObjectPoolPtr pool, bool only_call_targets)
void WriteAlloc(Serializer *s)
static int CompareCodeOrderInfo(CodeOrderInfo const *a, CodeOrderInfo const *b)
intptr_t NonDiscardedCodeCount()
void WriteFill(Serializer *s)
CodeSerializationCluster(Heap *heap)
GrowableArray< CodePtr > * deferred_objects()
intptr_t first_deferred_ref() const
~CodeSourceMapDeserializationCluster()
CodeSourceMapDeserializationCluster()
void ReadFill(Deserializer *d_) override
void ReadAlloc(Deserializer *d) override
~CodeSourceMapSerializationCluster()
void WriteFill(Serializer *s)
void WriteAlloc(Serializer *s)
CodeSourceMapSerializationCluster()
void Trace(Serializer *s, ObjectPtr object)
static intptr_t InstanceSize()
static uword EntryPointOf(const CodePtr code)
static intptr_t InstanceSize()
static InstructionsPtr InstructionsOf(const CodePtr code)
static uword PayloadStartOf(const CodePtr code)
bool HasMonomorphicEntry() const
static bool IsDiscarded(const CodePtr code)
static void NotifyCodeObservers(const Code &code, bool optimized)
@ kSCallTableCodeOrTypeTarget
@ kSCallTableKindAndOffset
bool IsUnknownDartCode() const
void ReadAlloc(Deserializer *d) override
CompressedStackMapsDeserializationCluster()
~CompressedStackMapsDeserializationCluster()
void ReadFill(Deserializer *d_) override
void WriteFill(Serializer *s)
~CompressedStackMapsSerializationCluster()
void WriteAlloc(Serializer *s)
CompressedStackMapsSerializationCluster()
void Trace(Serializer *s, ObjectPtr object)
static intptr_t InstanceSize()
void ReadAlloc(Deserializer *d) override
ContextDeserializationCluster()
~ContextDeserializationCluster()
void ReadFill(Deserializer *d_) override
void ReadFill(Deserializer *d_) override
void ReadAlloc(Deserializer *d) override
ContextScopeDeserializationCluster()
~ContextScopeDeserializationCluster()
void Trace(Serializer *s, ObjectPtr object)
void WriteAlloc(Serializer *s)
ContextScopeSerializationCluster()
void WriteFill(Serializer *s)
~ContextScopeSerializationCluster()
static intptr_t InstanceSize()
void Trace(Serializer *s, ObjectPtr object)
void WriteAlloc(Serializer *s)
void WriteFill(Serializer *s)
ContextSerializationCluster()
~ContextSerializationCluster()
static intptr_t InstanceSize()
static IsolateGroup * vm_isolate_group()
static Isolate * vm_isolate()
static char * FeaturesString(IsolateGroup *isolate_group, bool is_vm_snapshot, Snapshot::Kind kind)
~DeltaEncodedTypedDataDeserializationCluster()
void ReadAlloc(Deserializer *d) override
DeltaEncodedTypedDataDeserializationCluster()
void ReadFill(Deserializer *d_) override
~DeltaEncodedTypedDataSerializationCluster()
void WriteFill(Serializer *s)
void WriteAlloc(Serializer *s)
DeltaEncodedTypedDataSerializationCluster()
void Trace(Serializer *s, ObjectPtr object)
virtual void ReadFill(Deserializer *deserializer)=0
virtual ~DeserializationCluster()
DeserializationCluster(const char *name, bool is_canonical=false, bool is_immutable=false)
bool is_canonical() const
virtual void PostLoad(Deserializer *deserializer, const Array &refs)
void ReadAllocFixedSize(Deserializer *deserializer, intptr_t instance_size)
virtual void ReadAlloc(Deserializer *deserializer)=0
const char * name() const
virtual void ReadRoots(Deserializer *deserializer)=0
virtual void AddBaseObjects(Deserializer *deserializer)=0
virtual void PostLoad(Deserializer *deserializer, const Array &refs)=0
virtual ~DeserializationRoots()
void ReadFromTo(T obj, P &&... params)
ObjectPtr Ref(intptr_t index) const
uint64_t ReadUnsigned64()
TokenPosition ReadTokenPosition()
uint64_t ReadUnsigned64()
ObjectPtr Allocate(intptr_t size)
void ReadInstructions(CodePtr code, bool deferred)
void set_code_start_index(intptr_t value)
void AssignRef(ObjectPtr object)
ApiErrorPtr VerifyImageAlignment()
bool is_non_root_unit() const
const InstructionsTable & instructions_table() const
intptr_t next_index() const
const uint8_t * AddressOfCurrentPosition() const
static void InitializeHeader(ObjectPtr raw, intptr_t cid, intptr_t size, bool is_canonical=false)
void AddBaseObject(ObjectPtr base_object)
intptr_t num_base_objects() const
ObjectPtr Ref(intptr_t index) const
void Advance(intptr_t value)
CodePtr GetCodeByIndex(intptr_t code_index, uword *entry_point) const
uword GetEntryPointByCodeIndex(intptr_t code_index) const
intptr_t position() const
TokenPosition ReadTokenPosition()
ObjectPtr GetObjectAt(uint32_t offset) const
intptr_t code_start_index() const
static intptr_t CodeIndexToClusterIndex(const InstructionsTable &table, intptr_t code_index)
Snapshot::Kind kind() const
DeserializationCluster * ReadCluster()
uword ReadWordWith32BitReads()
void Align(intptr_t alignment, intptr_t offset=0)
void ReadBytes(uint8_t *addr, intptr_t len)
Deserializer(Thread *thread, Snapshot::Kind kind, const uint8_t *buffer, intptr_t size, const uint8_t *data_buffer, const uint8_t *instructions_buffer, bool is_non_root_unit, intptr_t offset=0)
intptr_t code_stop_index() const
void set_position(intptr_t p)
void Deserialize(DeserializationRoots *roots)
void set_code_stop_index(intptr_t value)
static void DisassembleStub(const char *name, const Code &code)
static void DisassembleCode(const Function &function, const Code &code, bool optimized)
void ReadFill(Deserializer *d_) override
~DoubleDeserializationCluster()
DoubleDeserializationCluster(bool is_canonical, bool is_root_unit)
void ReadAlloc(Deserializer *d) override
~DoubleSerializationCluster()
void WriteFill(Serializer *s)
void WriteAlloc(Serializer *s)
DoubleSerializationCluster(bool is_canonical)
void Trace(Serializer *s, ObjectPtr object)
static intptr_t InstanceSize()
void ReadFill(Deserializer *d_) override
~ExceptionHandlersDeserializationCluster()
ExceptionHandlersDeserializationCluster()
void ReadAlloc(Deserializer *d) override
void Trace(Serializer *s, ObjectPtr object)
void WriteAlloc(Serializer *s)
void WriteFill(Serializer *s)
~ExceptionHandlersSerializationCluster()
ExceptionHandlersSerializationCluster()
static intptr_t InstanceSize()
~ExternalTypedDataDeserializationCluster()
void ReadFill(Deserializer *d_) override
void ReadAlloc(Deserializer *d) override
ExternalTypedDataDeserializationCluster(intptr_t cid)
~ExternalTypedDataSerializationCluster()
void WriteAlloc(Serializer *s)
ExternalTypedDataSerializationCluster(intptr_t cid)
void Trace(Serializer *s, ObjectPtr object)
void WriteFill(Serializer *s)
static intptr_t InstanceSize()
static constexpr int kDataSerializationAlignment
void WriteFill(Serializer *s)
void WriteAlloc(Serializer *s)
~FakeSerializationCluster()
FakeSerializationCluster(const char *name, intptr_t num_objects, intptr_t size, intptr_t target_memory_size=0)
void Trace(Serializer *s, ObjectPtr object)
void ReadFill(Deserializer *d_) override
void ReadAlloc(Deserializer *d) override
FfiTrampolineDataDeserializationCluster()
~FfiTrampolineDataDeserializationCluster()
void Trace(Serializer *s, ObjectPtr object)
void WriteAlloc(Serializer *s)
~FfiTrampolineDataSerializationCluster()
void WriteFill(Serializer *s)
FfiTrampolineDataSerializationCluster()
static intptr_t InstanceSize()
FieldDeserializationCluster()
~FieldDeserializationCluster()
void ReadFill(Deserializer *d_) override
void PostLoad(Deserializer *d, const Array &refs) override
void ReadAlloc(Deserializer *d) override
void WriteFill(Serializer *s)
FieldSerializationCluster()
void WriteAlloc(Serializer *s)
~FieldSerializationCluster()
void Trace(Serializer *s, ObjectPtr object)
void SetAt(intptr_t index, ObjectPtr raw_instance, bool concurrent_use=false)
ObjectPtr At(intptr_t index, bool concurrent_use=false) const
void AllocateIndex(intptr_t index)
intptr_t NumFieldIds() const
void set_is_nullable_unsafe(bool val) const
void InitializeGuardedListLengthInObjectOffset(bool unsafe=false) const
static intptr_t InstanceSize()
void set_guarded_list_length_in_object_offset_unsafe(intptr_t offset) const
void set_guarded_cid_unsafe(intptr_t cid) const
static intptr_t TargetOffsetOf(FieldPtr field)
void set_guarded_list_length_unsafe(intptr_t list_length) const
void set_static_type_exactness_state_unsafe(StaticTypeExactnessState state) const
static intptr_t value_offset()
static intptr_t InstanceSize()
static intptr_t InstanceSize()
static intptr_t value_offset()
ApiErrorPtr ReadUnitSnapshot(const LoadingUnit &unit)
ApiErrorPtr ReadProgramSnapshot()
FullSnapshotReader(const Snapshot *snapshot, const uint8_t *instructions_buffer, Thread *thread)
ApiErrorPtr ReadVMSnapshot()
FullSnapshotWriter(Snapshot::Kind kind, NonStreamingWriteStream *vm_snapshot_data, NonStreamingWriteStream *isolate_snapshot_data, ImageWriter *vm_image_writer, ImageWriter *iso_image_writer)
void WriteFullSnapshot(GrowableArray< LoadingUnitSerializationData * > *data=nullptr)
void WriteUnitSnapshot(GrowableArray< LoadingUnitSerializationData * > *units, LoadingUnitSerializationData *unit, uint32_t program_hash)
void ReadFill(Deserializer *d_) override
FunctionDeserializationCluster()
~FunctionDeserializationCluster()
void PostLoad(Deserializer *d, const Array &refs) override
void ReadAlloc(Deserializer *d) override
void WriteAlloc(Serializer *s)
~FunctionSerializationCluster()
FunctionSerializationCluster()
void Trace(Serializer *s, ObjectPtr object)
void WriteFill(Serializer *s)
static const char * MakeDisambiguatedFunctionName(Serializer *s, FunctionPtr f)
void ReadFill(Deserializer *d_) override
void ReadAlloc(Deserializer *d) override
FunctionTypeDeserializationCluster(bool is_canonical, bool is_root_unit)
~FunctionTypeDeserializationCluster()
void PostLoad(Deserializer *d, const Array &refs) override
void WriteFill(Serializer *s)
void WriteAlloc(Serializer *s)
~FunctionTypeSerializationCluster()
void Trace(Serializer *s, ObjectPtr object)
FunctionTypeSerializationCluster(bool is_canonical, bool represents_canonical_set)
static intptr_t InstanceSize()
CodePtr CurrentCode() const
static intptr_t InstanceSize()
void ClearCodeSafe() const
void PrintName(const NameFormattingParams ¶ms, BaseTextBuffer *printer) const
void SetInstructionsSafe(const Code &value) const
void ReadFill(Deserializer *d_) override
GrowableObjectArrayDeserializationCluster()
~GrowableObjectArrayDeserializationCluster()
void ReadAlloc(Deserializer *d) override
void WriteFill(Serializer *s)
~GrowableObjectArraySerializationCluster()
GrowableObjectArraySerializationCluster()
void Trace(Serializer *s, ObjectPtr object)
void WriteAlloc(Serializer *s)
void Add(const Object &value, Heap::Space space=Heap::kNew) const
static GrowableObjectArrayPtr New(Heap::Space space=Heap::kNew)
static intptr_t InstanceSize()
ObjectPtr At(intptr_t index) const
static constexpr double kMaxLoadFactor
HeapLocker(Thread *thread, PageSpace *page_space)
intptr_t GetLoadingUnit(ObjectPtr raw_obj) const
void ResetObjectIdTable()
bool Verify(const char *msg, MarkExpectation mark_expectation=kForbidMarked)
intptr_t GetObjectId(ObjectPtr raw_obj) const
void SetObjectId(ObjectPtr raw_obj, intptr_t object_id)
void ReadFill(Deserializer *d_) override
void ReadAlloc(Deserializer *d) override
~ICDataDeserializationCluster()
ICDataDeserializationCluster()
ICDataSerializationCluster()
void Trace(Serializer *s, ObjectPtr object)
void WriteFill(Serializer *s)
~ICDataSerializationCluster()
void WriteAlloc(Serializer *s)
@ kCachedICDataArrayCount
static intptr_t InstanceSize()
ObjectPtr GetObjectAt(uint32_t offset) const
InstructionsPtr GetInstructionsAt(uint32_t offset) const
ApiErrorPtr VerifyAlignment() const
intptr_t GetTextObjectCount() const
void Write(NonStreamingWriteStream *clustered_stream, bool vm)
void SetProfileWriter(V8SnapshotProfileWriter *profile_writer)
static const char * TagObjectTypeAsReadOnly(Zone *zone, const char *type)
void ClearProfileWriter()
intptr_t text_size() const
void GetTrampolineInfo(intptr_t *count, intptr_t *size) const
int32_t GetTextOffsetFor(InstructionsPtr instructions, CodePtr code)
uint32_t GetDataOffsetFor(ObjectPtr raw_object)
intptr_t data_size() const
void PrepareForSerialization(GrowableArray< ImageWriterCommand > *commands)
uint32_t AddBytesToData(uint8_t *bytes, intptr_t length)
~InstanceDeserializationCluster()
void ReadAlloc(Deserializer *d) override
InstanceDeserializationCluster(intptr_t cid, bool is_canonical, bool is_immutable, bool is_root_unit)
void ReadFill(Deserializer *d_) override
void Trace(Serializer *s, ObjectPtr object)
void WriteAlloc(Serializer *s)
void WriteFill(Serializer *s)
~InstanceSerializationCluster()
InstanceSerializationCluster(bool is_canonical, intptr_t cid)
static intptr_t NextFieldOffset()
static InstructionsTablePtr New(intptr_t length, uword start_pc, uword end_pc, uword rodata)
void SetCodeAt(intptr_t index, CodePtr code) const
const UntaggedInstructionsTable::Data * rodata() const
uword EntryPointAt(intptr_t index) const
static intptr_t InstanceSize()
static intptr_t value_offset()
V Lookup(const Key &key) const
void Insert(const Key &key, const Value &value)
ObjectStore * object_store() const
static IsolateGroup * Current()
ClassTable * class_table() const
void SetupImagePage(const uint8_t *snapshot_buffer, bool is_executable)
IsolateGroup * group() const
void ReadAlloc(Deserializer *d) override
void PostLoad(Deserializer *d, const Array &refs) override
KernelProgramInfoDeserializationCluster()
~KernelProgramInfoDeserializationCluster()
void ReadFill(Deserializer *d_) override
void Trace(Serializer *s, ObjectPtr object)
~KernelProgramInfoSerializationCluster()
void WriteFill(Serializer *s)
KernelProgramInfoSerializationCluster()
void WriteAlloc(Serializer *s)
static intptr_t InstanceSize()
LanguageErrorDeserializationCluster()
~LanguageErrorDeserializationCluster()
void ReadFill(Deserializer *d_) override
void ReadAlloc(Deserializer *d) override
LanguageErrorSerializationCluster()
void WriteFill(Serializer *s)
void WriteAlloc(Serializer *s)
void Trace(Serializer *s, ObjectPtr object)
~LanguageErrorSerializationCluster()
static intptr_t InstanceSize()
void ReadAlloc(Deserializer *d) override
~LibraryDeserializationCluster()
LibraryDeserializationCluster()
void ReadFill(Deserializer *d_) override
LibraryPrefixDeserializationCluster()
void ReadAlloc(Deserializer *d) override
~LibraryPrefixDeserializationCluster()
void ReadFill(Deserializer *d_) override
void Trace(Serializer *s, ObjectPtr object)
LibraryPrefixSerializationCluster()
void WriteAlloc(Serializer *s)
void WriteFill(Serializer *s)
~LibraryPrefixSerializationCluster()
static intptr_t InstanceSize()
~LibrarySerializationCluster()
void WriteFill(Serializer *s)
void WriteAlloc(Serializer *s)
void Trace(Serializer *s, ObjectPtr object)
LibrarySerializationCluster()
static intptr_t InstanceSize()
void ReadFill(Deserializer *d_) override
LoadingUnitDeserializationCluster()
void ReadAlloc(Deserializer *d) override
~LoadingUnitDeserializationCluster()
void WriteFill(Serializer *s)
void WriteAlloc(Serializer *s)
~LoadingUnitSerializationCluster()
void Trace(Serializer *s, ObjectPtr object)
LoadingUnitSerializationCluster()
void set_objects(ZoneGrowableArray< Object * > *objects)
ZoneGrowableArray< Object * > * objects()
LoadingUnitSerializationData * parent() const
GrowableArray< Code * > * deferred_objects()
LoadingUnitPtr parent() const
static intptr_t InstanceSize()
static constexpr intptr_t kRootId
void set_base_objects(const Array &value) const
void set_instructions_image(const uint8_t *value) const
uint8_t * Steal(intptr_t *length)
void ReadAlloc(Deserializer *d) override
~MapDeserializationCluster()
MapDeserializationCluster(intptr_t cid, bool is_canonical, bool is_root_unit)
void ReadFill(Deserializer *d_) override
MapSerializationCluster(bool is_canonical, intptr_t cid)
void Trace(Serializer *s, ObjectPtr object)
~MapSerializationCluster()
void WriteAlloc(Serializer *s)
void WriteFill(Serializer *s)
static intptr_t InstanceSize()
void ReadAlloc(Deserializer *d) override
~MegamorphicCacheDeserializationCluster()
MegamorphicCacheDeserializationCluster()
void ReadFill(Deserializer *d_) override
void WriteFill(Serializer *s)
void WriteAlloc(Serializer *s)
~MegamorphicCacheSerializationCluster()
void Trace(Serializer *s, ObjectPtr object)
MegamorphicCacheSerializationCluster()
static intptr_t InstanceSize()
void ReadFill(Deserializer *d_) override
void ReadAlloc(Deserializer *d) override
~MintDeserializationCluster()
MintDeserializationCluster(bool is_canonical, bool is_root_unit)
void WriteFill(Serializer *s)
MintSerializationCluster(bool is_canonical)
void WriteAlloc(Serializer *s)
~MintSerializationCluster()
void Trace(Serializer *s, ObjectPtr object)
static intptr_t InstanceSize()
NamespaceDeserializationCluster()
void ReadAlloc(Deserializer *d) override
~NamespaceDeserializationCluster()
void ReadFill(Deserializer *d_) override
void Trace(Serializer *s, ObjectPtr object)
void WriteAlloc(Serializer *s)
~NamespaceSerializationCluster()
NamespaceSerializationCluster()
void WriteFill(Serializer *s)
static intptr_t InstanceSize()
static uword LinkNativeCallEntry()
DART_FORCE_INLINE void SetPosition(intptr_t value)
static void static void PrintErr(const char *format,...) PRINTF_ATTRIBUTE(1
static void Print(const char *format,...) PRINTF_ATTRIBUTE(1
static DART_NORETURN void Abort()
static char * SCreate(Zone *zone, const char *format,...) PRINTF_ATTRIBUTE(2
ObjectPoolDeserializationCluster()
void ReadAlloc(Deserializer *d) override
void PostLoad(Deserializer *d, const Array &refs) override
void ReadFill(Deserializer *d_) override
~ObjectPoolDeserializationCluster()
void WriteFill(Serializer *s)
void WriteAlloc(Serializer *s)
~ObjectPoolSerializationCluster()
ObjectPoolSerializationCluster()
void Trace(Serializer *s, ObjectPtr object)
static uint8_t EncodeBits(EntryType type, Patchability patchable, SnapshotBehavior snapshot_behavior)
static intptr_t InstanceSize()
ObjectPtr Decompress(uword heap_base) const
UntaggedObject * untag() const
intptr_t GetClassIdMayBeSmi() const
static Object * ReadOnlyHandle()
static void set_vm_isolate_snapshot_object_table(const Array &table)
static void FinalizeReadOnlyObject(ObjectPtr object)
virtual const char * ToCString() const
static constexpr intptr_t RoundedAllocationSize(intptr_t size)
static ObjectPtr RawCast(ObjectPtr obj)
static Object & ZoneHandle()
const char * FieldNameForOffset(intptr_t cid, intptr_t offset)
static intptr_t InstanceSize()
void AcquireLock(FreeList *freelist)
void ReleaseLock(FreeList *freelist)
DART_FORCE_INLINE uword AllocateSnapshotLocked(FreeList *freelist, intptr_t size)
~PatchClassDeserializationCluster()
void ReadAlloc(Deserializer *d) override
PatchClassDeserializationCluster()
void ReadFill(Deserializer *d_) override
~PatchClassSerializationCluster()
void Trace(Serializer *s, ObjectPtr object)
void WriteFill(Serializer *s)
void WriteAlloc(Serializer *s)
PatchClassSerializationCluster()
static intptr_t InstanceSize()
void ReadFill(Deserializer *d_) override
~PcDescriptorsDeserializationCluster()
PcDescriptorsDeserializationCluster()
void ReadAlloc(Deserializer *d) override
static intptr_t InstanceSize()
ProgramDeserializationRoots(ObjectStore *object_store)
void PostLoad(Deserializer *d, const Array &refs) override
void ReadRoots(Deserializer *d) override
void AddBaseObjects(Deserializer *d) override
void PushRoots(Serializer *s)
~ProgramSerializationRoots()
void AddBaseObjects(Serializer *s)
ProgramSerializationRoots(ZoneGrowableArray< Object * > *base_objects, ObjectStore *object_store, Snapshot::Kind snapshot_kind)
virtual const CompressedStackMaps & canonicalized_stack_map_entries() const
void WriteRoots(Serializer *s)
void ReadAlloc(Deserializer *d) override
void ReadFill(Deserializer *d_) override
RODataDeserializationCluster(intptr_t cid, bool is_canonical, bool is_root_unit)
~RODataDeserializationCluster()
void PostLoad(Deserializer *d, const Array &refs) override
RODataSerializationCluster(Zone *zone, const char *type, intptr_t cid, bool is_canonical)
void WriteFill(Serializer *s)
~RODataSerializationCluster()
void Trace(Serializer *s, ObjectPtr object)
void WriteAlloc(Serializer *s)
uword ReadWordWith32BitReads()
void Align(intptr_t alignment, intptr_t offset=0)
intptr_t Position() const
intptr_t PendingBytes() const
const uint8_t * AddressOfCurrentPosition() const
void Advance(intptr_t value)
void SetPosition(intptr_t value)
void ReadBytes(void *addr, intptr_t len)
void ReadAlloc(Deserializer *d) override
RecordDeserializationCluster(bool is_canonical, bool is_root_unit)
~RecordDeserializationCluster()
void ReadFill(Deserializer *d_) override
~RecordSerializationCluster()
void WriteFill(Serializer *s)
void WriteAlloc(Serializer *s)
RecordSerializationCluster(bool is_canonical)
void Trace(Serializer *s, ObjectPtr object)
intptr_t num_fields() const
void ReadFill(Deserializer *d_) override
void PostLoad(Deserializer *d, const Array &refs) override
void ReadAlloc(Deserializer *d) override
~RecordTypeDeserializationCluster()
RecordTypeDeserializationCluster(bool is_canonical, bool is_root_unit)
void Trace(Serializer *s, ObjectPtr object)
RecordTypeSerializationCluster(bool is_canonical, bool represents_canonical_set)
void WriteFill(Serializer *s)
~RecordTypeSerializationCluster()
void WriteAlloc(Serializer *s)
static intptr_t InstanceSize()
static intptr_t InstanceSize()
static intptr_t NumFields(RecordPtr ptr)
void ReadFill(Deserializer *d_) override
void ReadAlloc(Deserializer *d) override
RegExpDeserializationCluster()
~RegExpDeserializationCluster()
void WriteAlloc(Serializer *s)
void WriteFill(Serializer *s)
void Trace(Serializer *s, ObjectPtr object)
RegExpSerializationCluster()
~RegExpSerializationCluster()
static intptr_t InstanceSize()
void ReadAlloc(Deserializer *d) override
void ReadFill(Deserializer *d_) override
~ScriptDeserializationCluster()
ScriptDeserializationCluster()
void WriteFill(Serializer *s)
~ScriptSerializationCluster()
void WriteAlloc(Serializer *s)
ScriptSerializationCluster()
void Trace(Serializer *s, ObjectPtr object)
static intptr_t line_starts_offset()
static intptr_t InstanceSize()
const char * name() const
bool is_canonical() const
virtual ~SerializationCluster()
void WriteAndMeasureAlloc(Serializer *serializer)
void WriteAndMeasureFill(Serializer *serializer)
static constexpr intptr_t kSizeVaries
intptr_t target_memory_size() const
const intptr_t target_instance_size_
intptr_t target_memory_size_
intptr_t num_objects() const
virtual void Trace(Serializer *serializer, ObjectPtr object)=0
SerializationCluster(const char *name, intptr_t cid, intptr_t target_instance_size=kSizeVaries, bool is_canonical=false)
virtual void WriteAlloc(Serializer *serializer)=0
virtual void WriteFill(Serializer *serializer)=0
bool is_immutable() const
virtual void AddBaseObjects(Serializer *serializer)=0
virtual const CompressedStackMaps & canonicalized_stack_map_entries() const
virtual void WriteRoots(Serializer *serializer)=0
virtual void PushRoots(Serializer *serializer)=0
virtual ~SerializationRoots()
WritingObjectScope(Serializer *serializer, const char *type, ObjectPtr object, const char *name)
WritingObjectScope(Serializer *serializer, const char *type, ObjectPtr object, StringPtr name)
WritingObjectScope(Serializer *serializer, ObjectPtr object)
intptr_t current_loading_unit_id() const
void WriteCid(intptr_t cid)
void WritePropertyRef(ObjectPtr object, const char *property)
void WriteWordWith32BitWrites(uword value)
NonStreamingWriteStream * stream()
void DumpCombinedCodeStatistics()
DART_NOINLINE void WriteRange(ObjectPtr obj, T from, T to)
void TraceDataOffset(uint32_t offset)
void AddBaseObject(ObjectPtr base_object, const char *type=nullptr, const char *name=nullptr)
void WriteVersionAndFeatures(bool is_vm_snapshot)
bool InCurrentLoadingUnitOrRoot(ObjectPtr obj)
void WriteRootRef(ObjectPtr object, const char *name=nullptr)
Serializer(Thread *thread, Snapshot::Kind kind, NonStreamingWriteStream *stream, ImageWriter *image_writer_, bool vm_, V8SnapshotProfileWriter *profile_writer=nullptr)
GrowableArray< LoadingUnitSerializationData * > * loading_units() const
bool HasArtificialRef(ObjectPtr object) const
void set_loading_units(GrowableArray< LoadingUnitSerializationData * > *units)
void PrintSnapshotSizes()
void set_current_loading_unit_id(intptr_t id)
bool HasProfileNode(ObjectPtr object) const
void WriteFromTo(T obj, P &&... args)
void WriteElementRef(ObjectPtr object, intptr_t index)
void FillHeader(Snapshot::Kind kind)
uint32_t GetDataOffset(ObjectPtr object) const
void AttributeReference(ObjectPtr object, const V8SnapshotProfileWriter::Reference &reference)
bool HasRef(ObjectPtr object) const
bool IsWritten(ObjectPtr object) const
intptr_t AssignArtificialRef(ObjectPtr object=nullptr)
DART_NOINLINE void PushRange(ObjectPtr obj, T from, T to)
void PushWeak(ObjectPtr object)
Snapshot::Kind kind() const
intptr_t RefId(ObjectPtr object) const
intptr_t GetCodeIndex(CodePtr code)
void PushFromTo(T obj, P &&... args)
SerializationCluster * NewClusterForClass(intptr_t cid, bool is_canonical)
bool IsReachable(ObjectPtr object) const
void Trace(ObjectPtr object, intptr_t cid_override)
ZoneGrowableArray< Object * > * Serialize(SerializationRoots *roots)
void RecordDeferredCode(CodePtr ptr)
void WriteBytes(const void *addr, intptr_t len)
void WriteOffsetRef(ObjectPtr object, intptr_t offset)
intptr_t UnsafeRefId(ObjectPtr object) const
void AttributePropertyRef(ObjectPtr object, const char *property)
void WriteUnsigned(intptr_t value)
intptr_t AssignRef(ObjectPtr object)
void WriteTokenPosition(TokenPosition pos)
void Align(intptr_t alignment, intptr_t offset=0)
void PrepareInstructions(const CompressedStackMaps &canonical_smap)
V8SnapshotProfileWriter::ObjectId GetProfileId(ObjectPtr object) const
void WriteDispatchTable(const Array &entries)
V8SnapshotProfileWriter * profile_writer() const
intptr_t GetDataSize() const
void UnexpectedObject(ObjectPtr object, const char *message)
bool CreateArtificialNodeIfNeeded(ObjectPtr obj)
intptr_t bytes_heap_allocated()
void Push(ObjectPtr object, intptr_t cid_override=kIllegalCid)
void WriteRefId(intptr_t value)
void WriteInstructions(InstructionsPtr instr, uint32_t unchecked_offset, CodePtr code, bool deferred)
intptr_t next_ref_index() const
void AttributeElementRef(ObjectPtr object, intptr_t index)
void WriteUnsigned64(uint64_t value)
void ReadAlloc(Deserializer *d) override
SetDeserializationCluster(intptr_t cid, bool is_canonical, bool is_root_unit)
~SetDeserializationCluster()
void ReadFill(Deserializer *d_) override
SetSerializationCluster(bool is_canonical, intptr_t cid)
void Trace(Serializer *s, ObjectPtr object)
void WriteAlloc(Serializer *s)
~SetSerializationCluster()
void WriteFill(Serializer *s)
static intptr_t InstanceSize()
void ReadFill(Deserializer *d_) override
~Simd128DeserializationCluster()
Simd128DeserializationCluster(intptr_t cid, bool is_canonical, bool is_root_unit)
void ReadAlloc(Deserializer *d) override
void WriteAlloc(Serializer *s)
void Trace(Serializer *s, ObjectPtr object)
Simd128SerializationCluster(intptr_t cid, bool is_canonical)
void WriteFill(Serializer *s)
~Simd128SerializationCluster()
static SmiPtr New(intptr_t value)
static bool IsValid(int64_t value)
static bool IsFull(Kind kind)
static const char * KindToCString(Kind kind)
static bool IncludesStringsInROData(Kind kind)
static bool IncludesCode(Kind kind)
static constexpr intptr_t kHeaderSize
ThreadState * thread() const
StackTraceDeserializationCluster()
void ReadFill(Deserializer *d_) override
void ReadAlloc(Deserializer *d) override
~StackTraceDeserializationCluster()
void Trace(Serializer *s, ObjectPtr object)
~StackTraceSerializationCluster()
void WriteAlloc(Serializer *s)
StackTraceSerializationCluster()
void WriteFill(Serializer *s)
static intptr_t InstanceSize()
static StaticTypeExactnessState NotTracking()
static intptr_t DecodeLengthAndCid(intptr_t encoded, intptr_t *out_cid)
StringDeserializationCluster(bool is_canonical, bool is_root_unit)
~StringDeserializationCluster()
void ReadAlloc(Deserializer *d) override
static intptr_t InstanceSize(intptr_t length, intptr_t cid)
void PostLoad(Deserializer *d, const Array &refs) override
void ReadFill(Deserializer *d_) override
void Add(uint16_t code_unit)
void WriteFill(Serializer *s)
~StringSerializationCluster()
StringSerializationCluster(bool is_canonical, bool represents_canonical_set)
void WriteAlloc(Serializer *s)
void Trace(Serializer *s, ObjectPtr object)
static intptr_t EncodeLengthAndCid(intptr_t length, intptr_t cid)
static StringPtr New(const char *cstr, Heap::Space space=Heap::kNew)
static uint32_t SetCachedHash(StringPtr obj, uint32_t hash)
static const Code & EntryAt(intptr_t index)
static const char * NameAt(intptr_t index)
static void InitializationDone()
static intptr_t NumEntries()
static void EntryAtPut(intptr_t index, Code *entry)
void ReadAlloc(Deserializer *d) override
~SubtypeTestCacheDeserializationCluster()
void ReadFill(Deserializer *d_) override
SubtypeTestCacheDeserializationCluster()
void WriteAlloc(Serializer *s)
void WriteFill(Serializer *s)
~SubtypeTestCacheSerializationCluster()
SubtypeTestCacheSerializationCluster()
void Trace(Serializer *s, ObjectPtr object)
static intptr_t InstanceSize()
static void InitFromSnapshot(IsolateGroup *isolate_group)
static StringPtr New(Thread *thread, const char *cstr)
IsolateGroup * isolate_group() const
void DecrementNoSafepointScopeDepth()
static Thread * Current()
IsolateGroup * isolate_group() const
static TokenPosition Deserialize(int32_t value)
static intptr_t InstanceSize()
~TypeArgumentsDeserializationCluster()
void PostLoad(Deserializer *d, const Array &refs) override
void ReadFill(Deserializer *d_) override
TypeArgumentsDeserializationCluster(bool is_canonical, bool is_root_unit)
void ReadAlloc(Deserializer *d) override
~TypeArgumentsSerializationCluster()
TypeArgumentsSerializationCluster(bool is_canonical, bool represents_canonical_set)
void WriteAlloc(Serializer *s)
void Trace(Serializer *s, ObjectPtr object)
void WriteFill(Serializer *s)
static intptr_t InstanceSize()
TypeArgumentsPtr Canonicalize(Thread *thread) const
void ReadAlloc(Deserializer *d) override
void PostLoad(Deserializer *d, const Array &refs) override
~TypeDeserializationCluster()
void ReadFill(Deserializer *d_) override
TypeDeserializationCluster(bool is_canonical, bool is_root_unit)
void ReadFill(Deserializer *d_) override
void PostLoad(Deserializer *d, const Array &refs) override
void ReadAlloc(Deserializer *d) override
~TypeParameterDeserializationCluster()
TypeParameterDeserializationCluster(bool is_canonical, bool is_root_unit)
TypeParameterSerializationCluster(bool is_canonical, bool cluster_represents_canonical_set)
void WriteAlloc(Serializer *s)
void Trace(Serializer *s, ObjectPtr object)
~TypeParameterSerializationCluster()
void WriteFill(Serializer *s)
static intptr_t InstanceSize()
virtual AbstractTypePtr Canonicalize(Thread *thread) const
TypeParametersDeserializationCluster()
void ReadFill(Deserializer *d_) override
~TypeParametersDeserializationCluster()
void ReadAlloc(Deserializer *d) override
static intptr_t InstanceSize()
virtual bool IsInCanonicalSet(Serializer *s, TypePtr type)
TypeSerializationCluster(bool is_canonical, bool represents_canonical_set)
~TypeSerializationCluster()
void WriteAlloc(Serializer *s)
void WriteFill(Serializer *s)
void Trace(Serializer *s, ObjectPtr object)
static CodePtr DefaultCodeForType(const AbstractType &type, bool lazy_specialize=true)
bool IsDeclarationTypeOf(const Class &cls) const
static intptr_t InstanceSize()
intptr_t ElementSizeInBytes() const
~TypedDataDeserializationCluster()
TypedDataDeserializationCluster(intptr_t cid)
void ReadAlloc(Deserializer *d) override
void ReadFill(Deserializer *d_) override
void Trace(Serializer *s, ObjectPtr object)
~TypedDataSerializationCluster()
void WriteAlloc(Serializer *s)
TypedDataSerializationCluster(intptr_t cid)
void WriteFill(Serializer *s)
void ReadAlloc(Deserializer *d) override
~TypedDataViewDeserializationCluster()
TypedDataViewDeserializationCluster(intptr_t cid)
void ReadFill(Deserializer *d_) override
void PostLoad(Deserializer *d, const Array &refs) override
TypedDataViewSerializationCluster(intptr_t cid)
void WriteFill(Serializer *s)
~TypedDataViewSerializationCluster()
void WriteAlloc(Serializer *s)
void Trace(Serializer *s, ObjectPtr object)
static intptr_t InstanceSize()
static intptr_t InstanceSize()
static DART_FORCE_INLINE constexpr intptr_t Length()
DART_FORCE_INLINE bool Get(intptr_t position) const
DART_FORCE_INLINE void Reset()
DART_FORCE_INLINE void Set(intptr_t position)
void ReadAlloc(Deserializer *d) override
UnhandledExceptionDeserializationCluster()
~UnhandledExceptionDeserializationCluster()
void ReadFill(Deserializer *d_) override
void Trace(Serializer *s, ObjectPtr object)
UnhandledExceptionSerializationCluster()
void WriteFill(Serializer *s)
void WriteAlloc(Serializer *s)
~UnhandledExceptionSerializationCluster()
static intptr_t InstanceSize()
void ReadRoots(Deserializer *d) override
void AddBaseObjects(Deserializer *d) override
void PostLoad(Deserializer *d, const Array &refs) override
UnitDeserializationRoots(const LoadingUnit &unit)
void PushRoots(Serializer *s)
UnitSerializationRoots(LoadingUnitSerializationData *unit)
void WriteRoots(Serializer *s)
void AddBaseObjects(Serializer *s)
void ReadAlloc(Deserializer *d) override
void ReadFill(Deserializer *d_) override
UnlinkedCallDeserializationCluster()
~UnlinkedCallDeserializationCluster()
void Trace(Serializer *s, ObjectPtr object)
UnlinkedCallSerializationCluster()
~UnlinkedCallSerializationCluster()
void WriteAlloc(Serializer *s)
void WriteFill(Serializer *s)
static intptr_t InstanceSize()
static constexpr uword update(intptr_t size, uword tag)
static ObjectPtr FromAddr(uword addr)
bool InVMIsolateHeap() const
static bool IsInt(intptr_t N, T value)
static int SNPrint(char *str, size_t size, const char *format,...) PRINTF_ATTRIBUTE(3
static char * StrDup(const char *s)
static intptr_t StrNLen(const char *s, intptr_t n)
static bool IsUint(intptr_t N, T value)
static constexpr bool IsAligned(T x, uintptr_t alignment, uintptr_t offset=0)
static char * StrNDup(const char *s, intptr_t n)
static const ObjectId kArtificialRootId
void AttributeReferenceTo(const ObjectId &from_object_id, const Reference &reference, const ObjectId &to_object_id)
void SetObjectTypeAndName(const ObjectId &object_id, const char *type, const char *name)
void AddRoot(const ObjectId &object_id, const char *name=nullptr)
void AttributeBytesTo(const ObjectId &object_id, size_t num_bytes)
bool HasId(const ObjectId &object_id)
void ReadRoots(Deserializer *d) override
void PostLoad(Deserializer *d, const Array &refs) override
void AddBaseObjects(Deserializer *d) override
void WriteRoots(Serializer *s)
void PushRoots(Serializer *s)
void AddBaseObjects(Serializer *s)
VMSerializationRoots(const WeakArray &symbols, bool should_write_symbols)
static const char * SnapshotString()
static void DontNeed(void *address, intptr_t size)
WeakArrayDeserializationCluster()
void ReadFill(Deserializer *d_) override
~WeakArrayDeserializationCluster()
void ReadAlloc(Deserializer *d) override
WeakArraySerializationCluster()
~WeakArraySerializationCluster()
void Trace(Serializer *s, ObjectPtr object)
void WriteFill(Serializer *s)
void WriteAlloc(Serializer *s)
static intptr_t InstanceSize()
ObjectPtr At(intptr_t index) const
void ReadAlloc(Deserializer *d) override
WeakPropertyDeserializationCluster()
void ReadFill(Deserializer *d_) override
~WeakPropertyDeserializationCluster()
void RetraceEphemerons(Serializer *s)
~WeakPropertySerializationCluster()
void WriteAlloc(Serializer *s)
void Trace(Serializer *s, ObjectPtr object)
void WriteFill(Serializer *s)
WeakPropertySerializationCluster()
static intptr_t key_offset()
static intptr_t value_offset()
static intptr_t InstanceSize()
static constexpr intptr_t kNoValue
char * PrintToString(const char *format,...) PRINTF_ATTRIBUTE(2
#define THR_Print(format,...)
const EmbeddedViewParams * params
VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE auto & d
G_BEGIN_DECLS G_MODULE_EXPORT FlValue * args
static const uint8_t buffer[]
const uint8_t uint32_t uint32_t GError ** error
#define VM_GLOBAL_FLAG_LIST(P, R, C, D)
#define DEFINE_FLAG(type, name, default_value, comment)
Dart_NativeFunction function
bool IsTypedDataViewClassId(intptr_t index)
bool IsTypedDataClassId(intptr_t index)
static const char *const kObjectStoreFieldNames[]
static constexpr bool IsReachableReference(intptr_t ref)
static constexpr intptr_t kCompressedWordSizeLog2
static constexpr intptr_t kUnreachableReference
DART_EXPORT bool IsNull(Dart_Handle object)
static constexpr intptr_t kUnallocatedReference
static constexpr bool IsArtificialReference(intptr_t ref)
static constexpr bool IsAllocatedReference(intptr_t ref)
uintptr_t compressed_uword
static UnboxedFieldBitmap CalculateTargetUnboxedFieldsBitmap(Serializer *s, intptr_t class_id)
bool ShouldHaveImmutabilityBitSetCid(intptr_t predefined_cid)
constexpr intptr_t kFirstInternalOnlyCid
bool IsInternalVMdefinedClassId(intptr_t index)
static constexpr intptr_t kCompressedWordSize
raw_obj untag() -> num_entries()) VARIABLE_COMPRESSED_VISITOR(Array, Smi::Value(raw_obj->untag() ->length())) VARIABLE_COMPRESSED_VISITOR(TypedData, TypedData::ElementSizeInBytes(raw_obj->GetClassId()) *Smi::Value(raw_obj->untag() ->length())) VARIABLE_COMPRESSED_VISITOR(Record, RecordShape(raw_obj->untag() ->shape()).num_fields()) VARIABLE_NULL_VISITOR(CompressedStackMaps, CompressedStackMaps::PayloadSizeOf(raw_obj)) VARIABLE_NULL_VISITOR(OneByteString, Smi::Value(raw_obj->untag() ->length())) VARIABLE_NULL_VISITOR(TwoByteString, Smi::Value(raw_obj->untag() ->length())) intptr_t UntaggedField::VisitFieldPointers(FieldPtr raw_obj, ObjectPointerVisitor *visitor)
static constexpr intptr_t kFirstReference
static DART_FORCE_INLINE CodePtr GetCodeAndEntryPointByIndex(const Deserializer *d, intptr_t code_index, uword *entry_point)
constexpr intptr_t kWordSize
static constexpr intptr_t kObjectAlignment
ArrayOfTuplesView< Code::SCallTableEntry, std::tuple< Smi, Object, Function > > StaticCallsTable
static int CompareClusters(SerializationCluster *const *a, SerializationCluster *const *b)
static int8_t data[kExtLength]
static constexpr intptr_t kObjectAlignmentLog2
bool IsExternalTypedDataClassId(intptr_t index)
constexpr intptr_t kLastInternalOnlyCid
bool IsStringClassId(intptr_t index)
#define OBJECT_STORE_FIELD_LIST(R_, RW, ARW_RELAXED, ARW_AR, LAZY_CORE, LAZY_ASYNC, LAZY_ISOLATE, LAZY_INTERNAL, LAZY_FFI)
#define REUSABLE_FUNCTION_HANDLESCOPE(thread)
#define REUSABLE_OBJECT_HANDLESCOPE(thread)
#define REUSABLE_CODE_HANDLESCOPE(thread)
static const char header[]
uint32_t handler_pc_offset
@ InsertInstructionOfCode
static constexpr intptr_t kObjectAlignmentLog2
static constexpr intptr_t kObjectAlignment
static Reference Element(intptr_t offset)
static Reference Property(const char *name)
#define TIMELINE_DURATION(thread, stream, name)
#define NOT_IN_PRECOMPILED(code)