37#if !defined(DART_PRECOMPILED_RUNTIME)
45#if !defined(DART_PRECOMPILED_RUNTIME)
47 print_cluster_information,
49 "Print information about clusters written to snapshot");
52#if defined(DART_PRECOMPILER)
54 write_v8_snapshot_profile_to,
56 "Write a snapshot profile in V8 format to a file.");
58 print_array_optimization_candidates,
60 "Print information about how many array are candidates for Smi and "
61 "ROData optimizations.");
72static constexpr intptr_t kDeltaEncodedTypedDataCid =
kNativePointer;
76struct GrowableArrayStorageTraits {
82 intptr_t Length()
const {
return length_; }
83 void SetAt(intptr_t index,
const Object&
value)
const {
84 array_[index] =
value.ptr();
86 ObjectPtr At(intptr_t index)
const {
return array_[index]; }
90 ObjectPtr* array_ =
nullptr;
94 using ArrayPtr = Array*;
95 class ArrayHandle :
public ZoneAllocated {
97 explicit ArrayHandle(ArrayPtr ptr) : ptr_(ptr) {}
100 void SetFrom(
const ArrayHandle& other) { ptr_ = other.ptr_; }
101 void Clear() { ptr_ =
nullptr; }
102 bool IsNull()
const {
return ptr_ ==
nullptr; }
103 ArrayPtr ptr() {
return ptr_; }
105 intptr_t Length()
const {
return ptr_->Length(); }
106 void SetAt(intptr_t index,
const Object&
value)
const {
107 ptr_->SetAt(index,
value);
109 ObjectPtr At(intptr_t index)
const {
return ptr_->At(index); }
112 ArrayPtr ptr_ =
nullptr;
116 static ArrayHandle& PtrToHandle(ArrayPtr ptr) {
117 return *
new ArrayHandle(ptr);
120 static void SetHandle(ArrayHandle&
dst,
const ArrayHandle&
src) {
124 static void ClearHandle(ArrayHandle&
dst) {
132 static bool IsImmutable(
const ArrayHandle& handle) {
return false; }
134 static ObjectPtr At(ArrayHandle* array, intptr_t index) {
135 return array->At(index);
138 static void SetAt(ArrayHandle* array, intptr_t index,
const Object&
value) {
139 array->SetAt(index,
value);
144#if defined(DART_PRECOMPILER) && !defined(TARGET_ARCH_IA32)
146static void RelocateCodeObjects(
148 GrowableArray<CodePtr>* code_objects,
149 GrowableArray<ImageWriterCommand>* image_writer_commands) {
154 WritableCodePages writable_code_pages(thread, isolate_group);
155 CodeRelocator::Relocate(thread, code_objects, image_writer_commands, is_vm);
221 bool is_immutable =
false)
242#if defined(DART_PRECOMPILED_RUNTIME)
244 FATAL(
"%s needs canonicalization but doesn't define PostLoad",
name());
304class CodeSerializationCluster;
317 const char*
type =
nullptr,
318 const char*
name =
nullptr);
333#if defined(SNAPSHOT_BACKTRACE)
369 ReserveId(serializer,
380 ReserveId(serializer,
type, object,
name),
409 template <
typename T>
432 intptr_t
id =
RefId(
object);
434 if (profile_writer_ !=
nullptr) {
465 intptr_t
id =
RefId(
object);
467 if (profile_writer_ !=
nullptr) {
469 object_currently_writing_.cid_,
offset)) {
477 template <
typename T,
typename...
P>
479 auto* from = obj->untag()->from();
480 auto* to = obj->untag()->to_snapshot(
kind(),
args...);
484 template <
typename T>
486 for (
auto*
p = from;
p <= to;
p++) {
493 template <
typename T,
typename...
P>
495 auto* from = obj->untag()->from();
496 auto* to = obj->untag()->to_snapshot(
kind(),
args...);
500 template <
typename T>
502 for (
auto*
p = from;
p <= to;
p++) {
519 uint32_t unchecked_offset,
546 return loading_units_;
549 loading_units_ = units;
553 current_loading_unit_id_ =
id;
580 ASSERT(profile_writer_ !=
nullptr);
584 return heap_->
GetObjectId(
object) > num_base_objects_;
588 const char* ReadOnlyObjectType(intptr_t
cid);
602 intptr_t cid_override;
604 GrowableArray<StackEntry> stack_;
607 intptr_t num_tlc_cids_;
608 intptr_t num_base_objects_;
609 intptr_t num_written_objects_;
610 intptr_t next_ref_index_;
612 intptr_t dispatch_table_size_ = 0;
613 intptr_t bytes_heap_allocated_ = 0;
614 intptr_t instructions_table_len_ = 0;
615 intptr_t instructions_table_rodata_offset_ = 0;
620 V8SnapshotProfileWriter* profile_writer_ =
nullptr;
621 struct ProfilingObject {
622 ObjectPtr object_ =
nullptr;
625 V8SnapshotProfileWriter::ObjectId id_ =
627 intptr_t last_stream_position_ = 0;
629 } object_currently_writing_;
630 OffsetsTable* offsets_table_ =
nullptr;
632#if defined(SNAPSHOT_BACKTRACE)
633 ObjectPtr current_parent_;
634 GrowableArray<Object*> parent_pairs_;
637#if defined(DART_PRECOMPILER)
638 IntMap<intptr_t> deduped_instructions_sources_;
639 IntMap<intptr_t> code_index_;
642 intptr_t current_loading_unit_id_ = 0;
643 GrowableArray<LoadingUnitSerializationData*>* loading_units_ =
nullptr;
644 ZoneGrowableArray<Object*>* objects_ =
new ZoneGrowableArray<Object*>();
649#define AutoTraceObject(obj) \
650 Serializer::WritingObjectScope scope_##__COUNTER__(s, name(), obj, nullptr)
652#define AutoTraceObjectName(obj, str) \
653 Serializer::WritingObjectScope scope_##__COUNTER__(s, name(), obj, str)
655#define WriteFieldValue(field, value) s->WritePropertyRef(value, #field);
657#define WriteFromTo(obj, ...) s->WriteFromTo(obj, ##__VA_ARGS__);
659#define PushFromTo(obj, ...) s->PushFromTo(obj, ##__VA_ARGS__);
661#define WriteField(obj, field) s->WritePropertyRef(obj->untag()->field, #field)
662#define WriteCompressedField(obj, name) \
663 s->WritePropertyRef(obj->untag()->name(), #name "_")
671 const uint8_t* data_buffer,
672 const uint8_t* instructions_buffer,
687 bool is_canonical =
false) {
699 template <
typename T>
724 ASSERT(next_ref_index_ <= num_objects_);
725 refs_->untag()->data()[next_ref_index_] = object;
731 ASSERT(index <= num_objects_);
732 return refs_->untag()->element(index);
743 intptr_t code_index);
753 return Read<int32_t>();
771 intptr_t deferred_code_start_index,
772 intptr_t deferred_code_end_index);
778#if defined(DART_PRECOMPILED_RUNTIME)
790 return instructions_table_;
800 :
ReadStream(
d->stream_.buffer_,
d->stream_.current_,
d->stream_.end_),
806 d->stream_.current_ =
nullptr;
809 ~Local() { d_->stream_.current_ = current_; }
813 ASSERT(index <= d_->num_objects_);
814 return refs_->untag()->element(index);
817 template <
typename T>
830 return Read<int32_t>();
833 template <
typename T,
typename...
P>
835 auto* from = obj->untag()->from();
836 auto* to_snapshot = obj->untag()->to_snapshot(d_->
kind(),
params...);
837 auto* to = obj->untag()->to(
params...);
838 for (
auto*
p = from;
p <= to_snapshot;
p++) {
845 for (
auto*
p = to_snapshot + 1;
p <= to;
p++) {
852 const ArrayPtr refs_;
864 intptr_t num_base_objects_;
865 intptr_t num_objects_;
866 intptr_t num_clusters_;
868 intptr_t next_ref_index_;
869 intptr_t code_start_index_ = 0;
870 intptr_t code_stop_index_ = 0;
871 intptr_t instructions_index_ = 0;
873 const bool is_non_root_unit_;
898 raw->
untag()->tags_ = tags;
901#if !defined(DART_PRECOMPILED_RUNTIME)
909 serializer->
Write<uint32_t>(tags);
914 if (FLAG_print_cluster_information) {
916 stop_size - start_size);
918 stop_data - start_data);
921 size_ += (stop_size - start_size) + (stop_data - start_data);
932 if (FLAG_print_cluster_information) {
942 intptr_t instance_size) {
944 intptr_t
count =
d->ReadUnsigned();
945 for (intptr_t
i = 0;
i <
count;
i++) {
946 d->AssignRef(
d->Allocate(instance_size));
951#if !defined(DART_PRECOMPILED_RUNTIME)
955 const auto unboxed_fields_bitmap_host =
956 s->isolate_group()->class_table()->GetUnboxedFieldsMapAt(class_id);
959 if (unboxed_fields_bitmap_host.IsEmpty() ||
961 unboxed_fields_bitmap = unboxed_fields_bitmap_host;
966 unboxed_fields_bitmap.
Reset();
967 intptr_t target_i = 0, host_i = 0;
972 if (unboxed_fields_bitmap_host.Get(host_i++)) {
973 unboxed_fields_bitmap.
Set(target_i++);
974 unboxed_fields_bitmap.
Set(target_i++);
982 return unboxed_fields_bitmap;
992 objects_(num_cids) {}
997 intptr_t class_id = cls->untag()->id_;
1001 s->UnexpectedObject(cls,
"Class with illegal cid");
1007 predefined_.
Add(cls);
1018 for (intptr_t
i = 0;
i <
count;
i++) {
1019 ClassPtr cls = predefined_[
i];
1022 intptr_t class_id = cls->untag()->id_;
1023 s->WriteCid(class_id);
1027 for (intptr_t
i = 0;
i <
count;
i++) {
1028 ClassPtr cls = objects_[
i];
1035 for (intptr_t
i = 0;
i <
count;
i++) {
1036 WriteClass(
s, predefined_[
i]);
1039 for (intptr_t
i = 0;
i <
count;
i++) {
1040 WriteClass(
s, objects_[
i]);
1048 intptr_t class_id = cls->untag()->id_;
1050 s->UnexpectedObject(cls,
"Class with illegal cid");
1052 s->WriteCid(class_id);
1054 s->Write<uint32_t>(cls->untag()->kernel_offset_);
1059 s->Write<int16_t>(cls->untag()->num_type_arguments_);
1060 s->Write<uint16_t>(cls->untag()->num_native_fields_);
1062 s->WriteTokenPosition(cls->untag()->token_pos_);
1063 s->WriteTokenPosition(cls->untag()->end_token_pos_);
1064 s->WriteCid(cls->untag()->implementor_cid_);
1066 s->Write<uint32_t>(cls->untag()->state_bits_);
1069 const auto unboxed_fields_map =
1071 s->WriteUnsigned64(unboxed_fields_map.Value());
1075 GrowableArray<ClassPtr> predefined_;
1076 GrowableArray<ClassPtr> objects_;
1086 predefined_start_index_ =
d->next_index();
1087 intptr_t
count =
d->ReadUnsigned();
1089 for (intptr_t
i = 0;
i <
count;
i++) {
1090 intptr_t class_id =
d->ReadCid();
1092 ClassPtr cls =
table->At(class_id);
1096 predefined_stop_index_ =
d->next_index();
1099 count =
d->ReadUnsigned();
1100 for (intptr_t
i = 0;
i <
count;
i++) {
1109 for (intptr_t
id = predefined_start_index_;
id < predefined_stop_index_;
1111 ClassPtr cls =
static_cast<ClassPtr
>(
d.Ref(
id));
1113 intptr_t class_id =
d.ReadCid();
1114 cls->untag()->id_ = class_id;
1115#if !defined(DART_PRECOMPILED_RUNTIME)
1117 cls->untag()->kernel_offset_ =
d.Read<uint32_t>();
1120 cls->untag()->host_instance_size_in_words_ =
d.Read<int32_t>();
1121 cls->untag()->host_next_field_offset_in_words_ =
d.Read<int32_t>();
1122#if defined(DART_PRECOMPILER)
1125 cls->untag()->target_instance_size_in_words_ =
1126 cls->untag()->host_instance_size_in_words_;
1127 cls->untag()->target_next_field_offset_in_words_ =
1128 cls->untag()->host_next_field_offset_in_words_;
1134 cls->untag()->host_type_arguments_field_offset_in_words_ =
1136#if defined(DART_PRECOMPILER)
1137 cls->untag()->target_type_arguments_field_offset_in_words_ =
1138 cls->untag()->host_type_arguments_field_offset_in_words_;
1140 cls->untag()->num_type_arguments_ =
d.Read<int16_t>();
1141 cls->untag()->num_native_fields_ =
d.Read<uint16_t>();
1142#if !defined(DART_PRECOMPILED_RUNTIME)
1144 cls->untag()->token_pos_ =
d.ReadTokenPosition();
1145 cls->untag()->end_token_pos_ =
d.ReadTokenPosition();
1146 cls->untag()->implementor_cid_ =
d.ReadCid();
1148 cls->untag()->state_bits_ =
d.Read<uint32_t>();
1154 ClassPtr cls =
static_cast<ClassPtr
>(
d.Ref(
id));
1158 intptr_t class_id =
d.ReadCid();
1160 cls->untag()->id_ = class_id;
1162#if !defined(DART_PRECOMPILED_RUNTIME)
1164 cls->untag()->kernel_offset_ =
d.Read<uint32_t>();
1166 cls->untag()->host_instance_size_in_words_ =
d.Read<int32_t>();
1167 cls->untag()->host_next_field_offset_in_words_ =
d.Read<int32_t>();
1168 cls->untag()->host_type_arguments_field_offset_in_words_ =
1170#if defined(DART_PRECOMPILER)
1171 cls->untag()->target_instance_size_in_words_ =
1172 cls->untag()->host_instance_size_in_words_;
1173 cls->untag()->target_next_field_offset_in_words_ =
1174 cls->untag()->host_next_field_offset_in_words_;
1175 cls->untag()->target_type_arguments_field_offset_in_words_ =
1176 cls->untag()->host_type_arguments_field_offset_in_words_;
1178 cls->untag()->num_type_arguments_ =
d.Read<int16_t>();
1179 cls->untag()->num_native_fields_ =
d.Read<uint16_t>();
1180#if !defined(DART_PRECOMPILED_RUNTIME)
1182 cls->untag()->token_pos_ =
d.ReadTokenPosition();
1183 cls->untag()->end_token_pos_ =
d.ReadTokenPosition();
1184 cls->untag()->implementor_cid_ =
d.ReadCid();
1186 cls->untag()->state_bits_ =
d.Read<uint32_t>();
1188 table->AllocateIndex(class_id);
1189 table->SetAt(class_id, cls);
1193 table->SetUnboxedFieldsMapAt(class_id, unboxed_fields_map);
1199 intptr_t predefined_start_index_;
1200 intptr_t predefined_stop_index_;
1218#if !defined(DART_PRECOMPILED_RUNTIME)
1219template <
typename SetType,
1221 typename PointerType,
1222 bool kAllCanonicalObjectsAreIncludedIntoSet =
true>
1227 bool represents_canonical_set,
1229 intptr_t target_instance_size = 0)
1231 represents_canonical_set_(represents_canonical_set) {}
1236 ASSERT(kAllCanonicalObjectsAreIncludedIntoSet);
1241 if (!represents_canonical_set_) {
1247 using ZoneCanonicalSet =
1251 intptr_t required_capacity = 0;
1254 required_capacity++;
1259 const intptr_t kSpareCapacity = 32;
1260 required_capacity =
static_cast<intptr_t
>(
1261 static_cast<double>(required_capacity + kSpareCapacity) /
1264 intptr_t num_occupied = 0;
1268 ZoneCanonicalSet
table(
1269 s->zone(), HashTables::New<ZoneCanonicalSet>(required_capacity));
1270 HandleType& element = HandleType::Handle(
s->zone());
1274 intptr_t entry = -1;
1275 const bool present =
table.FindKeyOrDeletedOrUnused(element, &entry);
1277 table.InsertKey(entry, element);
1283 const auto prefix_length = num_occupied;
1286 auto& arr =
table.Release();
1287 intptr_t last_occupied = ZoneCanonicalSet::kFirstKeyIndex - 1;
1288 for (intptr_t
i = ZoneCanonicalSet::kFirstKeyIndex,
length = arr.Length();
1291 ASSERT(v != ZoneCanonicalSet::DeletedMarker().ptr());
1292 if (v != ZoneCanonicalSet::UnusedMarker().ptr()) {
1293 const intptr_t unused_run_length = (
i - 1) - last_occupied;
1294 gaps_.
Add(unused_run_length);
1295 objects_[num_occupied++] =
static_cast<PointerType
>(v);
1301 table_length_ = arr.Length();
1305 if (represents_canonical_set_) {
1306 s->WriteUnsigned(table_length_);
1308 for (
auto gap : gaps_) {
1309 s->WriteUnsigned(gap);
1319 const bool represents_canonical_set_;
1321 intptr_t table_length_ = 0;
1325template <
typename SetType,
bool kAllCanonicalObjectsAreIncludedIntoSet = true>
1333 table_(SetType::ArrayHandle::Handle()) {}
1340 const auto table_length =
d->ReadUnsigned();
1343 auto table = StartDeserialization(
d, table_length,
count);
1345 table.FillGap(
d->ReadUnsigned());
1358 const typename SetType::ArrayHandle& current_table) {
1361 if (!current_table.IsNull()) {
1362 SetType current_set(
d->zone(), current_table.ptr());
1363 ASSERT(current_set.NumOccupied() == 0);
1364 current_set.Release();
1369 SetType canonical_set(
d->zone(),
table_.ptr());
1375 canonical_set.Release();
1380 struct DeserializationFinger {
1381 typename SetType::ArrayPtr
table;
1382 intptr_t current_index;
1385 void FillGap(
int length) {
1386 for (intptr_t j = 0; j <
length; j++) {
1387 table->untag()->data()[current_index + j] = gap_element;
1392 void WriteElement(Deserializer*
d, ObjectPtr
object) {
1393 table->untag()->data()[current_index++] = object;
1396 typename SetType::ArrayPtr
Finish() {
1397 if (
table != SetType::ArrayHandle::null()) {
1401 table = SetType::ArrayHandle::null();
1406 static DeserializationFinger StartDeserialization(Deserializer*
d,
1409 const intptr_t instance_size = SetType::ArrayHandle::InstanceSize(
length);
1410 typename SetType::ArrayPtr
table =
1411 static_cast<typename SetType::ArrayPtr
>(
d->Allocate(instance_size));
1414 if ((SetType::Storage::ArrayCid == kArrayCid) &&
1416 table->untag()->SetCardRememberedBitUnsynchronized();
1418 InitTypeArgsOrNext(
table);
1420 for (intptr_t
i = 0;
i < SetType::kFirstKeyIndex;
i++) {
1424 return {
table, SetType::kFirstKeyIndex, SetType::UnusedMarker().ptr()};
1427 static void InitTypeArgsOrNext(ArrayPtr
table) {
1430 static void InitTypeArgsOrNext(WeakArrayPtr
table) {
1435#if !defined(DART_PRECOMPILED_RUNTIME)
1447 objects_.
Add(type_params);
1454 for (intptr_t
i = 0;
i <
count;
i++) {
1455 TypeParametersPtr type_params = objects_[
i];
1456 s->AssignRef(type_params);
1462 for (intptr_t
i = 0;
i <
count;
i++) {
1463 TypeParametersPtr type_params = objects_[
i];
1489 TypeParametersPtr type_params =
static_cast<TypeParametersPtr
>(
d.Ref(
id));
1492 d.ReadFromTo(type_params);
1497#if !defined(DART_PRECOMPILED_RUNTIME)
1504 bool represents_canonical_set)
1507 represents_canonical_set,
1515 s->Push(type_args->untag()->instantiations());
1518 s->Push(type_args->untag()->element(
i));
1526 for (intptr_t
i = 0;
i <
count;
i++) {
1527 TypeArgumentsPtr type_args =
objects_[
i];
1528 s->AssignRef(type_args);
1540 for (intptr_t
i = 0;
i <
count;
i++) {
1541 TypeArgumentsPtr type_args =
objects_[
i];
1546 s->Write<int32_t>(
hash);
1547 const intptr_t nullability =
1548 Smi::Value(type_args->untag()->nullability());
1549 s->WriteUnsigned(nullability);
1551 for (intptr_t j = 0; j <
length; j++) {
1552 s->WriteElementRef(type_args->untag()->element(j), j);
1571 const intptr_t
count =
d->ReadUnsigned();
1572 for (intptr_t
i = 0;
i <
count;
i++) {
1573 const intptr_t
length =
d->ReadUnsigned();
1585 TypeArgumentsPtr type_args =
static_cast<TypeArgumentsPtr
>(
d.Ref(
id));
1586 const intptr_t
length =
d.ReadUnsigned();
1591 type_args->untag()->hash_ =
Smi::New(
d.Read<int32_t>());
1592 type_args->untag()->nullability_ =
Smi::New(
d.ReadUnsigned());
1593 type_args->untag()->instantiations_ =
static_cast<ArrayPtr
>(
d.ReadRef());
1594 for (intptr_t j = 0; j <
length; j++) {
1595 type_args->untag()->types()[j] =
1596 static_cast<AbstractTypePtr
>(
d.ReadRef());
1603 auto object_store =
d->isolate_group()->object_store();
1605 d, refs,
Array::Handle(object_store->canonical_type_arguments()));
1606 object_store->set_canonical_type_arguments(
table_);
1610 type_arg ^= refs.
At(
i);
1618#if !defined(DART_PRECOMPILED_RUNTIME)
1636 for (intptr_t
i = 0;
i <
count;
i++) {
1637 PatchClassPtr cls = objects_[
i];
1644 for (intptr_t
i = 0;
i <
count;
i++) {
1645 PatchClassPtr cls = objects_[
i];
1649 s->Write<int32_t>(cls->untag()->kernel_library_index_);
1673 PatchClassPtr cls =
static_cast<PatchClassPtr
>(
d.Ref(
id));
1677#if !defined(DART_PRECOMPILED_RUNTIME)
1679 cls->untag()->kernel_library_index_ =
d.Read<int32_t>();
1685#if !defined(DART_PRECOMPILED_RUNTIME)
1701 s->Push(func->untag()->code());
1704 s->Push(func->untag()->code());
1705 s->Push(func->untag()->ic_data_array());
1715 for (intptr_t
i = 0;
i <
count;
i++) {
1716 FunctionPtr func = objects_[
i];
1724 for (intptr_t
i = 0;
i <
count;
i++) {
1725 FunctionPtr func = objects_[
i];
1729#if defined(DART_PRECOMPILER)
1730 CodePtr
code = func->untag()->code();
1731 const auto code_index =
s->GetCodeIndex(
code);
1732 s->WriteUnsigned(code_index);
1733 s->AttributePropertyRef(
code,
"code_");
1748#if defined(DART_PRECOMPILER) && !defined(PRODUCT)
1757 script.GetTokenLocation(token_pos, &
line,
nullptr);
1759 token_pos =
line == -1 ? TokenPosition::kNoSource
1762 s->WriteTokenPosition(token_pos);
1765 s->WriteTokenPosition(func->untag()->token_pos_);
1769 s->WriteTokenPosition(func->untag()->end_token_pos_);
1770 s->Write<uint32_t>(func->untag()->kernel_offset_);
1771 s->Write<uint32_t>(func->untag()->packed_fields_);
1773 s->Write<uint32_t>(func->untag()->kind_tag_);
1779 if (
s->profile_writer() ==
nullptr) {
1788 Object::NameVisibility::kInternalName),
1798template <
bool need_entry_po
int_for_non_discarded>
1801 intptr_t code_index,
1802 uword* entry_point) {
1809 const intptr_t
base =
d->is_non_root_unit() ?
d->num_base_objects() : 0;
1810 if (code_index <
base) {
1811 CodePtr
code =
static_cast<CodePtr
>(
d->Ref(code_index));
1812 if (need_entry_point_for_non_discarded) {
1826 const intptr_t first_entry_with_code =
1827 d->instructions_table().rodata()->first_entry_with_code;
1828 if (code_index < first_entry_with_code) {
1829 *entry_point =
d->instructions_table().EntryPointAt(code_index);
1830 return StubCode::UnknownDartCode().ptr();
1832 const intptr_t cluster_index = code_index - first_entry_with_code;
1834 static_cast<CodePtr
>(
d->Ref(
d->code_start_index() + cluster_index));
1835 if (need_entry_point_for_non_discarded) {
1843 uword* entry_point)
const {
1845 if (code_index == 0) {
1846 return StubCode::LazyCompile().ptr();
1847 }
else if (FLAG_precompiled_mode) {
1849 false>(
this, code_index,
1853 const intptr_t ref = code_start_index_ + code_index - 1;
1854 ASSERT(code_start_index_ <= ref && ref < code_stop_index_);
1855 return static_cast<CodePtr
>(
Ref(ref));
1860 intptr_t code_index) {
1865 ASSERT(FLAG_precompiled_mode);
1866 const intptr_t first_entry_with_code =
table.rodata()->first_entry_with_code;
1867 return code_index - 1 - first_entry_with_code;
1872 ASSERT(FLAG_precompiled_mode);
1873 uword entry_point = 0;
1875 this, code_index, &entry_point);
1895 FunctionPtr func =
static_cast<FunctionPtr
>(
d.Ref(
id));
1901 func->untag()->entry_point_ = 0;
1902 func->untag()->unchecked_entry_point_ = 0;
1905#if defined(DART_PRECOMPILED_RUNTIME)
1907 const intptr_t code_index =
d.ReadUnsigned();
1908 uword entry_point = 0;
1910 func->untag()->code_ =
code;
1911 if (entry_point != 0) {
1912 func->untag()->entry_point_ = entry_point;
1913 func->untag()->unchecked_entry_point_ = entry_point;
1918 func->untag()->unoptimized_code_ =
static_cast<CodePtr
>(
d.ReadRef());
1919 func->untag()->code_ =
static_cast<CodePtr
>(
d.ReadRef());
1920 func->untag()->ic_data_array_ =
static_cast<ArrayPtr
>(
d.ReadRef());
1924#if !defined(DART_PRECOMPILED_RUNTIME)
1926 func->untag()->positional_parameter_names_ =
1927 static_cast<ArrayPtr
>(
d.ReadRef());
1929#if !defined(DART_PRECOMPILED_RUNTIME) || \
1930 (defined(DART_PRECOMPILED_RUNTIME) && !defined(PRODUCT))
1931 func->untag()->token_pos_ =
d.ReadTokenPosition();
1933#if !defined(DART_PRECOMPILED_RUNTIME)
1934 func->untag()->end_token_pos_ =
d.ReadTokenPosition();
1935 func->untag()->kernel_offset_ =
d.Read<uint32_t>();
1936 func->untag()->unboxed_parameters_info_.Reset();
1937 func->untag()->packed_fields_ =
d.Read<uint32_t>();
1940 func->untag()->kind_tag_ =
d.Read<uint32_t>();
1941#if !defined(DART_PRECOMPILED_RUNTIME)
1942 func->untag()->usage_counter_ = 0;
1943 func->untag()->optimized_instruction_count_ = 0;
1944 func->untag()->optimized_call_site_count_ = 0;
1945 func->untag()->deoptimization_counter_ = 0;
1946 func->untag()->state_bits_ = 0;
1947 func->untag()->inlining_depth_ = 0;
1960 uword entry_point =
code->untag()->entry_point_;
1961 ASSERT(entry_point != 0);
1962 func.
ptr()->
untag()->entry_point_ = entry_point;
1963 uword unchecked_entry_point =
code->untag()->unchecked_entry_point_;
1964 ASSERT(unchecked_entry_point != 0);
1965 func.
ptr()->
untag()->unchecked_entry_point_ = unchecked_entry_point;
1976 func.SetWasCompiled(
true);
1991#if !defined(DART_PRECOMPILED_RUNTIME)
2005 s->Push(
data->untag()->context_scope());
2007 s->Push(
data->untag()->parent_function());
2008 s->Push(
data->untag()->closure());
2014 for (intptr_t
i = 0;
i <
count;
i++) {
2015 ClosureDataPtr
data = objects_[
i];
2022 for (intptr_t
i = 0;
i <
count;
i++) {
2023 ClosureDataPtr
data = objects_[
i];
2030 s->WriteUnsigned(
static_cast<uint32_t
>(
data->untag()->packed_fields_));
2053 ClosureDataPtr
data =
static_cast<ClosureDataPtr
>(
d.Ref(
id));
2059 data->untag()->context_scope_ =
2060 static_cast<ContextScopePtr
>(
d.ReadRef());
2062 data->untag()->parent_function_ =
static_cast<FunctionPtr
>(
d.ReadRef());
2063 data->untag()->closure_ =
static_cast<ClosurePtr
>(
d.ReadRef());
2064 data->untag()->packed_fields_ =
d.ReadUnsigned<uint32_t>();
2069#if !defined(DART_PRECOMPILED_RUNTIME)
2074 "FfiTrampolineData",
2075 kFfiTrampolineDataCid,
2088 for (intptr_t
i = 0;
i <
count;
i++) {
2089 s->AssignRef(objects_[
i]);
2095 for (intptr_t
i = 0;
i <
count;
i++) {
2096 FfiTrampolineDataPtr
const data = objects_[
i];
2099 s->Write<int32_t>(
data->untag()->callback_id_);
2100 s->Write<uint8_t>(
data->untag()->ffi_function_kind_);
2124 FfiTrampolineDataPtr
data =
static_cast<FfiTrampolineDataPtr
>(
d.Ref(
id));
2128 data->untag()->callback_id_ =
d.Read<int32_t>();
2129 data->untag()->ffi_function_kind_ =
d.Read<uint8_t>();
2134#if !defined(DART_PRECOMPILED_RUNTIME)
2145 objects_.
Add(field);
2149 s->Push(field->untag()->name());
2150 s->Push(field->untag()->owner());
2151 s->Push(field->untag()->type());
2153 s->Push(field->untag()->initializer_function());
2156 s->Push(field->untag()->guarded_list_length());
2159 s->Push(field->untag()->dependent_code());
2163 s->Push(field->untag()->host_offset_or_field_id());
2172 for (intptr_t
i = 0;
i <
count;
i++) {
2173 FieldPtr field = objects_[
i];
2174 s->AssignRef(field);
2181 for (intptr_t
i = 0;
i <
count;
i++) {
2182 FieldPtr field = objects_[
i];
2198 s->WriteTokenPosition(field->untag()->token_pos_);
2199 s->WriteTokenPosition(field->untag()->end_token_pos_);
2200 s->WriteCid(field->untag()->guarded_cid_);
2201 s->WriteCid(field->untag()->is_nullable_);
2202 s->Write<int8_t>(field->untag()->static_type_exactness_state_);
2203 s->Write<uint32_t>(field->untag()->kernel_offset_);
2205 s->Write<uint16_t>(field->untag()->kind_bits_);
2234#if !defined(DART_PRECOMPILED_RUNTIME)
2238 FieldPtr field =
static_cast<FieldPtr
>(
d.Ref(
id));
2240 d.ReadFromTo(field);
2241#if !defined(DART_PRECOMPILED_RUNTIME)
2243 field->untag()->guarded_list_length_ =
static_cast<SmiPtr
>(
d.ReadRef());
2245 field->untag()->dependent_code_ =
2246 static_cast<WeakArrayPtr
>(
d.ReadRef());
2248 field->untag()->token_pos_ =
d.ReadTokenPosition();
2249 field->untag()->end_token_pos_ =
d.ReadTokenPosition();
2250 field->untag()->guarded_cid_ =
d.ReadCid();
2251 field->untag()->is_nullable_ =
d.ReadCid();
2252 const int8_t static_type_exactness_state =
d.Read<int8_t>();
2253#if defined(TARGET_ARCH_X64)
2254 field->untag()->static_type_exactness_state_ =
2255 static_type_exactness_state;
2262 USE(static_type_exactness_state);
2263 field->untag()->static_type_exactness_state_ =
2266 field->untag()->kernel_offset_ =
d.Read<uint32_t>();
2268 field->untag()->kind_bits_ =
d.Read<uint16_t>();
2270 field->untag()->host_offset_or_field_id_ =
2271 static_cast<SmiPtr
>(
d.ReadRef());
2272#if !defined(DART_PRECOMPILED_RUNTIME)
2273 field->untag()->target_offset_ =
2274 Smi::Value(field->untag()->host_offset_or_field_id());
2283 field ^= refs.
At(
i);
2294 field ^= refs.
At(
i);
2301#if !defined(DART_PRECOMPILED_RUNTIME)
2313 auto* from =
script->untag()->from();
2314 auto* to =
script->untag()->to_snapshot(
s->kind());
2315 for (
auto*
p = from;
p <= to;
p++) {
2321 s->Push(obj, kDeltaEncodedTypedDataCid);
2331 for (intptr_t
i = 0;
i <
count;
i++) {
2332 ScriptPtr
script = objects_[
i];
2339 for (intptr_t
i = 0;
i <
count;
i++) {
2340 ScriptPtr
script = objects_[
i];
2346 int32_t written_flags =
2348 0,
script->untag()->flags_and_max_position_);
2350 false, written_flags);
2351 s->Write<int32_t>(written_flags);
2353 s->Write<int32_t>(
script->untag()->kernel_script_index_);
2376 ScriptPtr
script =
static_cast<ScriptPtr
>(
d.Ref(
id));
2380#if !defined(DART_PRECOMPILED_RUNTIME)
2381 script->untag()->flags_and_max_position_ =
d.Read<int32_t>();
2383 script->untag()->kernel_script_index_ =
d.Read<int32_t>();
2384 script->untag()->load_timestamp_ = 0;
2389#if !defined(DART_PRECOMPILED_RUNTIME)
2407 for (intptr_t
i = 0;
i <
count;
i++) {
2408 LibraryPtr lib = objects_[
i];
2415 for (intptr_t
i = 0;
i <
count;
i++) {
2416 LibraryPtr lib = objects_[
i];
2419 s->Write<int32_t>(lib->untag()->index_);
2420 s->Write<uint16_t>(lib->untag()->num_imports_);
2421 s->Write<int8_t>(lib->untag()->load_state_);
2422 s->Write<uint8_t>(lib->untag()->flags_);
2424 s->Write<uint32_t>(lib->untag()->kernel_library_index_);
2448 LibraryPtr lib =
static_cast<LibraryPtr
>(
d.Ref(
id));
2451 lib->untag()->native_entry_resolver_ =
nullptr;
2452 lib->untag()->native_entry_symbol_resolver_ =
nullptr;
2453 lib->untag()->ffi_native_resolver_ =
nullptr;
2454 lib->untag()->index_ =
d.Read<int32_t>();
2455 lib->untag()->num_imports_ =
d.Read<uint16_t>();
2456 lib->untag()->load_state_ =
d.Read<int8_t>();
2457 lib->untag()->flags_ =
2458 UntaggedLibrary::InFullSnapshotBit::update(
true,
d.Read<uint8_t>());
2459#if !defined(DART_PRECOMPILED_RUNTIME)
2461 lib->untag()->kernel_library_index_ =
d.Read<uint32_t>();
2467#if !defined(DART_PRECOMPILED_RUNTIME)
2485 for (intptr_t
i = 0;
i <
count;
i++) {
2486 NamespacePtr ns = objects_[
i];
2493 for (intptr_t
i = 0;
i <
count;
i++) {
2494 NamespacePtr ns = objects_[
i];
2519 NamespacePtr ns =
static_cast<NamespacePtr
>(
d.Ref(
id));
2527#if !defined(DART_PRECOMPILED_RUNTIME)
2533 "KernelProgramInfo",
2534 kKernelProgramInfoCid,
2547 for (intptr_t
i = 0;
i <
count;
i++) {
2548 KernelProgramInfoPtr
info = objects_[
i];
2555 for (intptr_t
i = 0;
i <
count;
i++) {
2556 KernelProgramInfoPtr
info = objects_[
i];
2583 KernelProgramInfoPtr
info =
static_cast<KernelProgramInfoPtr
>(
d.Ref(
id));
2595 array = HashTables::New<UnorderedHashMap<SmiTraits>>(16,
Heap::kOld);
2596 info.set_libraries_cache(array);
2597 array = HashTables::New<UnorderedHashMap<SmiTraits>>(16,
Heap::kOld);
2598 info.set_classes_cache(array);
2612 const bool is_deferred = !
s->InCurrentLoadingUnitOrRoot(
code);
2614 s->RecordDeferredCode(
code);
2622 ObjectPoolPtr
pool =
code->untag()->object_pool_;
2626 if (
s->InCurrentLoadingUnitOrRoot(
pool)) {
2634 s->Push(
code->untag()->deopt_info_array_);
2635 s->Push(
code->untag()->static_calls_target_table_);
2636 s->Push(
code->untag()->compressed_stackmaps_);
2641#if defined(DART_PRECOMPILER)
2642 auto const calls_array =
code->untag()->static_calls_target_table_;
2646 array_ = calls_array;
2661 ASSERT(destination->IsHeapObject() && destination->IsCode());
2662 s->Push(destination);
2673 !FLAG_retain_code_objects);
2679 s->Push(
code->untag()->owner_);
2680 s->Push(
code->untag()->exception_handlers_);
2681 s->Push(
code->untag()->pc_descriptors_);
2682 s->Push(
code->untag()->catch_entry_);
2683 if (!FLAG_precompiled_mode || !FLAG_dwarf_stack_traces_mode) {
2684 s->Push(
code->untag()->inlined_id_to_function_);
2685 if (
s->InCurrentLoadingUnitOrRoot(
code->untag()->code_source_map_)) {
2686 s->Push(
code->untag()->code_source_map_);
2689#if !defined(PRODUCT)
2690 s->Push(
code->untag()->return_address_metadata_);
2691 if (FLAG_code_comments) {
2692 s->Push(
code->untag()->comments_);
2702 const intptr_t
length =
pool->untag()->length_;
2703 uint8_t* entry_bits =
pool->untag()->entry_bits();
2706 if (entry_type == ObjectPool::EntryType::kTaggedObject) {
2712 intptr_t
cid =
target->GetClassIdMayBeSmi();
2713 if (!only_call_targets || (
cid == kCodeCid) || (
cid == kFunctionCid) ||
2714 (
cid == kFieldCid) || (
cid == kClosureCid)) {
2717 s->Push(
s->isolate_group()->class_table()->At(
cid));
2743 if (
a->not_discarded <
b->not_discarded)
return -1;
2744 if (
a->not_discarded >
b->not_discarded)
return 1;
2745 if (
a->instructions_id <
b->instructions_id)
return -1;
2746 if (
a->instructions_id >
b->instructions_id)
return 1;
2754 InstructionsPtr instr =
code->untag()->instructions_;
2755 intptr_t
key =
static_cast<intptr_t
>(instr);
2756 intptr_t instructions_id = 0;
2762 instructions_id = order_map->
Lookup(
key);
2764 instructions_id = order_map->
Length() + 1;
2765 order_map->
Insert(
key, instructions_id);
2769 info.instructions_id = instructions_id;
2777 for (intptr_t
i = 0;
i < codes->
length();
i++) {
2778 Insert(
s, &order_list, &order_map, (*codes)[
i]);
2782 for (intptr_t
i = 0;
i < order_list.
length();
i++) {
2783 (*codes)[
i] = order_list[
i].code;
2790 for (intptr_t
i = 0;
i < codes->
length();
i++) {
2791 Insert(
s, &order_list, &order_map, (*codes)[
i]->ptr());
2795 for (intptr_t
i = 0;
i < order_list.
length();
i++) {
2796 *(*codes)[
i] = order_list[
i].code;
2802 for (
auto code : objects_) {
2815 first_ref_ =
s->next_ref_index();
2816 s->WriteUnsigned(non_discarded_count);
2817 for (
auto code : objects_) {
2827 s->WriteUnsigned(deferred_objects_.
length());
2828 first_deferred_ref_ =
s->next_ref_index();
2829 for (
auto code : deferred_objects_) {
2833 last_ref_ =
s->next_ref_index() - 1;
2840 const int32_t state_bits =
code->untag()->state_bits_;
2841 s->Write<int32_t>(state_bits);
2848 for (intptr_t
i = 0;
i <
count;
i++) {
2849 CodePtr
code = objects_[
i];
2850#if defined(DART_PRECOMPILER)
2851 if (FLAG_write_v8_snapshot_profile_to !=
nullptr &&
2853 s->CreateArtificialNodeIfNeeded(
code);
2860 const intptr_t deferred_count = deferred_objects_.
length();
2861 for (intptr_t
i = 0;
i < deferred_count;
i++) {
2862 CodePtr
code = deferred_objects_[
i];
2871 const intptr_t bytes_written =
s->bytes_written();
2874 intptr_t pointer_offsets_length =
2876 if (pointer_offsets_length != 0) {
2877 FATAL(
"Cannot serialize code with embedded pointers");
2881 s->UnexpectedObject(
code,
"Disabled code");
2884 s->WriteInstructions(
code->untag()->instructions_,
2885 code->untag()->unchecked_offset_,
code, deferred);
2890 const uint32_t active_unchecked_offset =
2891 code->untag()->unchecked_entry_point_ -
code->untag()->entry_point_;
2892 s->WriteInstructions(
code->untag()->active_instructions_,
2893 active_unchecked_offset,
code, deferred);
2896#if defined(DART_PRECOMPILER)
2897 if (FLAG_write_v8_snapshot_profile_to !=
nullptr) {
2902 ObjectPoolPtr
pool =
code->untag()->object_pool_;
2904 ASSERT(!
s->HasRef(
pool) ||
pool == Object::empty_object_pool().ptr());
2905 s->CreateArtificialNodeIfNeeded(
pool);
2906 s->AttributePropertyRef(
pool,
"object_pool_");
2910 auto const table =
code->untag()->static_calls_target_table_;
2914 s->CreateArtificialNodeIfNeeded(
table);
2915 s->AttributePropertyRef(
table,
"static_calls_target_table_");
2922 ASSERT(
s->bytes_written() == bytes_written);
2926 !FLAG_retain_code_objects);
2927#if defined(DART_PRECOMPILER)
2928 if (FLAG_write_v8_snapshot_profile_to !=
nullptr) {
2930 const auto& owner =
code->untag()->owner_;
2931 s->CreateArtificialNodeIfNeeded(owner);
2932 s->AttributePropertyRef(owner,
"owner_");
2941 if (
s->InCurrentLoadingUnitOrRoot(
code->untag()->object_pool_)) {
2954 if (FLAG_precompiled_mode && FLAG_dwarf_stack_traces_mode) {
2959 if (
s->InCurrentLoadingUnitOrRoot(
code->untag()->code_source_map_)) {
2970#if !defined(PRODUCT)
2972 if (FLAG_code_comments) {
2982 if (
s->profile_writer() ==
nullptr) {
2987 Code&
code = reused_code_handle.Handle();
2989 return code.QualifiedName(
2991 Object::NameVisibility::kInternalName));
2999 intptr_t first_ref_;
3000 intptr_t first_deferred_ref_;
3016 const intptr_t
count =
d->ReadUnsigned();
3017 for (intptr_t
i = 0;
i <
count;
i++) {
3022 deferred_start_index_ =
d->next_index();
3023 const intptr_t deferred_count =
d->ReadUnsigned();
3024 for (intptr_t
i = 0;
i < deferred_count;
i++) {
3027 deferred_stop_index_ =
d->next_index();
3031 const int32_t state_bits =
d->Read<int32_t>();
3035 code->untag()->state_bits_ = state_bits;
3041#if defined(DART_PRECOMPILED_RUNTIME)
3042 ReadFill(
d, deferred_start_index_, deferred_stop_index_,
true);
3044 ASSERT(deferred_start_index_ == deferred_stop_index_);
3049 intptr_t start_index,
3050 intptr_t stop_index,
3052 for (intptr_t
id = start_index, n = stop_index;
id < n;
id++) {
3053 auto const code =
static_cast<CodePtr
>(
d->Ref(
id));
3060 d->ReadInstructions(
code, deferred);
3062#if !defined(DART_PRECOMPILED_RUNTIME)
3064 code->untag()->object_pool_ =
static_cast<ObjectPoolPtr
>(
d->ReadRef());
3070 code->untag()->owner_ =
d->ReadRef();
3071 code->untag()->exception_handlers_ =
3072 static_cast<ExceptionHandlersPtr
>(
d->ReadRef());
3073 code->untag()->pc_descriptors_ =
3074 static_cast<PcDescriptorsPtr
>(
d->ReadRef());
3075 code->untag()->catch_entry_ =
d->ReadRef();
3076#if !defined(DART_PRECOMPILED_RUNTIME)
3078 code->untag()->compressed_stackmaps_ =
3079 static_cast<CompressedStackMapsPtr
>(
d->ReadRef());
3084 code->untag()->inlined_id_to_function_ =
3085 static_cast<ArrayPtr
>(
d->ReadRef());
3086 code->untag()->code_source_map_ =
3087 static_cast<CodeSourceMapPtr
>(
d->ReadRef());
3089#if !defined(DART_PRECOMPILED_RUNTIME)
3091 code->untag()->deopt_info_array_ =
static_cast<ArrayPtr
>(
d->ReadRef());
3092 code->untag()->static_calls_target_table_ =
3093 static_cast<ArrayPtr
>(
d->ReadRef());
3096#if !defined(PRODUCT)
3097 code->untag()->return_address_metadata_ =
d->ReadRef();
3099 code->untag()->comments_ = FLAG_code_comments
3100 ?
static_cast<ArrayPtr
>(
d->ReadRef())
3102 code->untag()->compile_timestamp_ = 0;
3108 d->EndInstructions();
3110#if !defined(PRODUCT)
3114#if !defined(PRODUCT) || defined(FORCE_INCLUDE_DISASSEMBLER)
3119#if !defined(DART_PRECOMPILED_RUNTIME) && !defined(PRODUCT)
3124#if !defined(PRODUCT) || defined(FORCE_INCLUDE_DISASSEMBLER)
3125 owner =
code.owner();
3126 if (owner.IsFunction()) {
3127 if ((FLAG_disassemble ||
3128 (
code.is_optimized() && FLAG_disassemble_optimized)) &&
3129 compiler::PrintFilter::ShouldPrint(Function::Cast(owner))) {
3131 code.is_optimized());
3133 }
else if (FLAG_disassemble_stubs) {
3141 intptr_t deferred_start_index_;
3142 intptr_t deferred_stop_index_;
3145#if !defined(DART_PRECOMPILED_RUNTIME)
3157 const intptr_t
length =
pool->untag()->length_;
3158 uint8_t* entry_bits =
pool->untag()->entry_bits();
3161 if (entry_type == ObjectPool::EntryType::kTaggedObject) {
3162 s->Push(
pool->untag()->data()[
i].raw_obj_);
3171 for (intptr_t
i = 0;
i <
count;
i++) {
3172 ObjectPoolPtr
pool = objects_[
i];
3175 const intptr_t
length =
pool->untag()->length_;
3185 for (intptr_t
i = 0;
i <
count;
i++) {
3186 ObjectPoolPtr
pool = objects_[
i];
3188 const intptr_t
length =
pool->untag()->length_;
3190 uint8_t* entry_bits =
pool->untag()->entry_bits();
3191 for (intptr_t j = 0; j <
length; j++) {
3193 uint8_t
bits = entry_bits[j];
3196 ASSERT(snapshot_behavior !=
3197 ObjectPool::SnapshotBehavior::kNotSnapshotable);
3198 s->Write<uint8_t>(
bits);
3199 if (snapshot_behavior != ObjectPool::SnapshotBehavior::kSnapshotable) {
3205 case ObjectPool::EntryType::kTaggedObject: {
3206 if (weak && !
s->HasRef(entry.raw_obj_)) {
3210 s->WriteElementRef(entry.raw_obj_, j);
3214 case ObjectPool::EntryType::kImmediate: {
3215 s->Write<intptr_t>(entry.raw_value_);
3218 case ObjectPool::EntryType::kNativeFunction: {
3241 const intptr_t
count =
d->ReadUnsigned();
3242 for (intptr_t
i = 0;
i <
count;
i++) {
3243 const intptr_t
length =
d->ReadUnsigned();
3253 fill_position_ =
d.Position();
3254#if defined(DART_PRECOMPILED_RUNTIME)
3256 ObjectPool::EntryType::kImmediate, ObjectPool::Patchability::kPatchable,
3257 ObjectPool::SnapshotBehavior::kSnapshotable);
3258 uword switchable_call_miss_entry_point =
3259 StubCode::SwitchableCallMiss().MonomorphicEntryPoint();
3263 const intptr_t
length =
d.ReadUnsigned();
3264 ObjectPoolPtr
pool =
static_cast<ObjectPoolPtr
>(
d.Ref(
id));
3268 for (intptr_t j = 0; j <
length; j++) {
3269 const uint8_t entry_bits =
d.Read<uint8_t>();
3270 pool->untag()->entry_bits()[j] = entry_bits;
3272 const auto snapshot_behavior =
3274 ASSERT(snapshot_behavior !=
3275 ObjectPool::SnapshotBehavior::kNotSnapshotable);
3276 switch (snapshot_behavior) {
3277 case ObjectPool::SnapshotBehavior::kSnapshotable:
3280 case ObjectPool::SnapshotBehavior::kResetToBootstrapNative:
3281 entry.raw_obj_ = StubCode::CallBootstrapNative().ptr();
3283#if defined(DART_PRECOMPILED_RUNTIME)
3284 case ObjectPool::SnapshotBehavior::
3285 kResetToSwitchableCallMissEntryPoint:
3286 pool->untag()->entry_bits()[j] = immediate_bits;
3288 static_cast<intptr_t
>(switchable_call_miss_entry_point);
3291 case ObjectPool::SnapshotBehavior::kSetToZero:
3292 entry.raw_value_ = 0;
3295 FATAL(
"Unexpected snapshot behavior: %d\n", snapshot_behavior);
3298 case ObjectPool::EntryType::kTaggedObject:
3299 entry.raw_obj_ =
d.ReadRef();
3301 case ObjectPool::EntryType::kImmediate:
3302 entry.raw_value_ =
d.Read<intptr_t>();
3304 case ObjectPool::EntryType::kNativeFunction: {
3307 entry.raw_value_ =
static_cast<intptr_t
>(new_entry);
3318#if defined(DART_PRECOMPILED_RUNTIME) && \
3319 (!defined(PRODUCT) || defined(FORCE_INCLUDE_DISASSEMBLER))
3320 if (FLAG_disassemble) {
3322 d->isolate_group()->object_store()->global_object_pool());
3330 intptr_t fill_position_ = 0;
3333#if defined(DART_PRECOMPILER)
3334class WeakSerializationReferenceSerializationCluster
3335 :
public SerializationCluster {
3337 WeakSerializationReferenceSerializationCluster()
3338 : SerializationCluster(
3339 "WeakSerializationReference",
3341 ~WeakSerializationReferenceSerializationCluster() {}
3343 void Trace(Serializer*
s, ObjectPtr
object) {
3348 void RetraceEphemerons(Serializer*
s) {
3349 for (intptr_t
i = 0;
i < objects_.length();
i++) {
3350 WeakSerializationReferencePtr weak = objects_[
i];
3351 if (!
s->IsReachable(weak->untag()->target())) {
3352 s->Push(weak->untag()->replacement());
3357 intptr_t Count(Serializer*
s) {
return objects_.length(); }
3359 void CreateArtificialTargetNodesIfNeeded(Serializer*
s) {
3360 for (intptr_t
i = 0;
i < objects_.length();
i++) {
3361 WeakSerializationReferencePtr weak = objects_[
i];
3362 s->CreateArtificialNodeIfNeeded(weak->untag()->target());
3366 void WriteAlloc(Serializer*
s) {
3370 void WriteFill(Serializer*
s) {
3375 GrowableArray<WeakSerializationReferencePtr> objects_;
3379#if !defined(DART_PRECOMPILED_RUNTIME)
3394 for (intptr_t
i = 0;
i <
count;
i++) {
3395 PcDescriptorsPtr
desc = objects_[
i];
3398 const intptr_t
length =
desc->untag()->length_;
3407 for (intptr_t
i = 0;
i <
count;
i++) {
3408 PcDescriptorsPtr
desc = objects_[
i];
3410 const intptr_t
length =
desc->untag()->length_;
3412 uint8_t* cdata =
reinterpret_cast<uint8_t*
>(
desc->untag()->data());
3430 const intptr_t
count =
d->ReadUnsigned();
3431 for (intptr_t
i = 0;
i <
count;
i++) {
3432 const intptr_t
length =
d->ReadUnsigned();
3443 const intptr_t
length =
d.ReadUnsigned();
3444 PcDescriptorsPtr
desc =
static_cast<PcDescriptorsPtr
>(
d.Ref(
id));
3448 uint8_t* cdata =
reinterpret_cast<uint8_t*
>(
desc->untag()->data());
3454#if !defined(DART_PRECOMPILED_RUNTIME)
3469 for (intptr_t
i = 0;
i <
count;
i++) {
3470 CodeSourceMapPtr
map = objects_[
i];
3473 const intptr_t
length =
map->untag()->length_;
3482 for (intptr_t
i = 0;
i <
count;
i++) {
3483 CodeSourceMapPtr
map = objects_[
i];
3485 const intptr_t
length =
map->untag()->length_;
3487 uint8_t* cdata =
reinterpret_cast<uint8_t*
>(
map->untag()->data());
3505 const intptr_t
count =
d->ReadUnsigned();
3506 for (intptr_t
i = 0;
i <
count;
i++) {
3507 const intptr_t
length =
d->ReadUnsigned();
3517 const intptr_t
length =
d.ReadUnsigned();
3518 CodeSourceMapPtr
map =
static_cast<CodeSourceMapPtr
>(
d.Ref(
id));
3522 uint8_t* cdata =
reinterpret_cast<uint8_t*
>(
map->untag()->data());
3528#if !defined(DART_PRECOMPILED_RUNTIME)
3543 for (intptr_t
i = 0;
i <
count;
i++) {
3544 CompressedStackMapsPtr
map = objects_[
i];
3548 map->untag()->payload()->flags_and_size());
3557 for (intptr_t
i = 0;
i <
count;
i++) {
3558 CompressedStackMapsPtr
map = objects_[
i];
3560 s->WriteUnsigned(
map->untag()->payload()->flags_and_size());
3562 map->untag()->payload()->flags_and_size());
3564 reinterpret_cast<uint8_t*
>(
map->untag()->payload()->data());
3583 const intptr_t
count =
d->ReadUnsigned();
3584 for (intptr_t
i = 0;
i <
count;
i++) {
3585 const intptr_t
length =
d->ReadUnsigned();
3595 const intptr_t flags_and_size =
d.ReadUnsigned();
3598 CompressedStackMapsPtr
map =
3599 static_cast<CompressedStackMapsPtr
>(
d.Ref(
id));
3602 map->untag()->payload()->set_flags_and_size(flags_and_size);
3604 reinterpret_cast<uint8_t*
>(
map->untag()->payload()->data());
3610#if !defined(DART_PRECOMPILED_RUNTIME) && !defined(DART_COMPRESSED_POINTERS)
3637 s->heap()->old_space()->IsObjectFromImagePages(
object)) {
3653 uint32_t running_offset = 0;
3654 for (intptr_t
i = 0;
i <
count;
i++) {
3656 s->AssignRef(
object);
3657 const StringPtr
name =
3660 uint32_t
offset =
s->GetDataOffset(
object);
3665 s->WriteUnsigned((
offset - running_offset) >>
3678 const intptr_t cid_;
3679 const char*
const type_;
3683#if !defined(DART_COMPRESSED_POINTERS)
3698 intptr_t
count =
d->ReadUnsigned();
3699 uint32_t running_offset = 0;
3700 for (intptr_t
i = 0;
i <
count;
i++) {
3702 ObjectPtr object =
d->GetObjectAt(running_offset);
3703 d->AssignRef(
object);
3706 if (cid_ == kStringCid) {
3719 auto object_store =
d->isolate_group()->object_store();
3722 object_store->set_symbol_table(
table_);
3727 FATAL(
"Cannot recanonicalize RO objects.");
3732 const intptr_t cid_;
3736#if !defined(DART_PRECOMPILED_RUNTIME)
3745 objects_.
Add(handlers);
3747 s->Push(handlers->untag()->handled_types_data());
3753 for (intptr_t
i = 0;
i <
count;
i++) {
3754 ExceptionHandlersPtr handlers = objects_[
i];
3755 s->AssignRef(handlers);
3757 const intptr_t
length = handlers->untag()->num_entries();
3766 for (intptr_t
i = 0;
i <
count;
i++) {
3767 ExceptionHandlersPtr handlers = objects_[
i];
3769 const intptr_t packed_fields = handlers->untag()->packed_fields_;
3772 s->WriteUnsigned(packed_fields);
3774 for (intptr_t j = 0; j <
length; j++) {
3776 s->Write<uint32_t>(
info.handler_pc_offset);
3777 s->Write<int16_t>(
info.outer_try_index);
3778 s->Write<int8_t>(
info.needs_stacktrace);
3779 s->Write<int8_t>(
info.has_catch_all);
3780 s->Write<int8_t>(
info.is_generated);
3798 const intptr_t
count =
d->ReadUnsigned();
3799 for (intptr_t
i = 0;
i <
count;
i++) {
3800 const intptr_t
length =
d->ReadUnsigned();
3811 ExceptionHandlersPtr handlers =
3812 static_cast<ExceptionHandlersPtr
>(
d.Ref(
id));
3813 const intptr_t packed_fields =
d.ReadUnsigned();
3818 handlers->untag()->packed_fields_ = packed_fields;
3819 handlers->untag()->handled_types_data_ =
3820 static_cast<ArrayPtr
>(
d.ReadRef());
3821 for (intptr_t j = 0; j <
length; j++) {
3823 info.handler_pc_offset =
d.Read<uint32_t>();
3824 info.outer_try_index =
d.Read<int16_t>();
3825 info.needs_stacktrace =
d.Read<int8_t>();
3826 info.has_catch_all =
d.Read<int8_t>();
3827 info.is_generated =
d.Read<int8_t>();
3833#if !defined(DART_PRECOMPILED_RUNTIME)
3842 objects_.
Add(context);
3844 s->Push(context->untag()->parent());
3845 const intptr_t
length = context->untag()->num_variables_;
3847 s->Push(context->untag()->element(
i));
3854 for (intptr_t
i = 0;
i <
count;
i++) {
3855 ContextPtr context = objects_[
i];
3856 s->AssignRef(context);
3858 const intptr_t
length = context->untag()->num_variables_;
3866 for (intptr_t
i = 0;
i <
count;
i++) {
3867 ContextPtr context = objects_[
i];
3869 const intptr_t
length = context->untag()->num_variables_;
3872 for (intptr_t j = 0; j <
length; j++) {
3873 s->WriteElementRef(context->untag()->element(j), j);
3890 const intptr_t
count =
d->ReadUnsigned();
3891 for (intptr_t
i = 0;
i <
count;
i++) {
3892 const intptr_t
length =
d->ReadUnsigned();
3903 ContextPtr context =
static_cast<ContextPtr
>(
d.Ref(
id));
3904 const intptr_t
length =
d.ReadUnsigned();
3907 context->untag()->num_variables_ =
length;
3908 context->untag()->parent_ =
static_cast<ContextPtr
>(
d.ReadRef());
3909 for (intptr_t j = 0; j <
length; j++) {
3910 context->untag()->data()[j] =
d.ReadRef();
3916#if !defined(DART_PRECOMPILED_RUNTIME)
3925 objects_.
Add(scope);
3927 const intptr_t
length = scope->untag()->num_variables_;
3934 for (intptr_t
i = 0;
i <
count;
i++) {
3935 ContextScopePtr scope = objects_[
i];
3936 s->AssignRef(scope);
3938 const intptr_t
length = scope->untag()->num_variables_;
3947 for (intptr_t
i = 0;
i <
count;
i++) {
3948 ContextScopePtr scope = objects_[
i];
3950 const intptr_t
length = scope->untag()->num_variables_;
3952 s->Write<
bool>(scope->untag()->is_implicit_);
3970 const intptr_t
count =
d->ReadUnsigned();
3971 for (intptr_t
i = 0;
i <
count;
i++) {
3972 const intptr_t
length =
d->ReadUnsigned();
3983 ContextScopePtr scope =
static_cast<ContextScopePtr
>(
d.Ref(
id));
3984 const intptr_t
length =
d.ReadUnsigned();
3987 scope->untag()->num_variables_ =
length;
3988 scope->untag()->is_implicit_ =
d.Read<
bool>();
3994#if !defined(DART_PRECOMPILED_RUNTIME)
4005 objects_.
Add(unlinked);
4012 for (intptr_t
i = 0;
i <
count;
i++) {
4013 UnlinkedCallPtr unlinked = objects_[
i];
4014 s->AssignRef(unlinked);
4020 for (intptr_t
i = 0;
i <
count;
i++) {
4021 UnlinkedCallPtr unlinked = objects_[
i];
4024 s->Write<
bool>(unlinked->untag()->can_patch_to_monomorphic_);
4048 UnlinkedCallPtr unlinked =
static_cast<UnlinkedCallPtr
>(
d.Ref(
id));
4051 d.ReadFromTo(unlinked);
4052 unlinked->untag()->can_patch_to_monomorphic_ =
d.Read<
bool>();
4057#if !defined(DART_PRECOMPILED_RUNTIME)
4075 for (intptr_t
i = 0;
i <
count;
i++) {
4076 ICDataPtr ic = objects_[
i];
4084 for (intptr_t
i = 0;
i <
count;
i++) {
4085 ICDataPtr ic = objects_[
i];
4091 s->Write<uint32_t>(ic->untag()->state_bits_);
4114 ICDataPtr ic =
static_cast<ICDataPtr
>(
d.Ref(
id));
4118 ic->untag()->state_bits_ =
d.Read<int32_t>();
4123#if !defined(DART_PRECOMPILED_RUNTIME)
4129 kMegamorphicCacheCid,
4142 for (intptr_t
i = 0;
i <
count;
i++) {
4143 MegamorphicCachePtr
cache = objects_[
i];
4150 for (intptr_t
i = 0;
i <
count;
i++) {
4151 MegamorphicCachePtr
cache = objects_[
i];
4154 s->Write<int32_t>(
cache->untag()->filled_entry_count_);
4178 MegamorphicCachePtr
cache =
static_cast<MegamorphicCachePtr
>(
d.Ref(
id));
4182 cache->untag()->filled_entry_count_ =
d.Read<int32_t>();
4187#if !defined(DART_PRECOMPILED_RUNTIME)
4193 kSubtypeTestCacheCid,
4200 s->Push(
cache->untag()->cache_);
4206 for (intptr_t
i = 0;
i <
count;
i++) {
4207 SubtypeTestCachePtr
cache = objects_[
i];
4214 for (intptr_t
i = 0;
i <
count;
i++) {
4215 SubtypeTestCachePtr
cache = objects_[
i];
4218 s->Write<uint32_t>(
cache->untag()->num_inputs_);
4219 s->Write<uint32_t>(
cache->untag()->num_occupied_);
4243 SubtypeTestCachePtr
cache =
static_cast<SubtypeTestCachePtr
>(
d.Ref(
id));
4246 cache->untag()->cache_ =
static_cast<ArrayPtr
>(
d.ReadRef());
4247 cache->untag()->num_inputs_ =
d.Read<uint32_t>();
4248 cache->untag()->num_occupied_ =
d.Read<uint32_t>();
4253#if !defined(DART_PRECOMPILED_RUNTIME)
4265 s->Push(unit->untag()->parent());
4271 for (intptr_t
i = 0;
i <
count;
i++) {
4272 LoadingUnitPtr unit = objects_[
i];
4279 for (intptr_t
i = 0;
i <
count;
i++) {
4280 LoadingUnitPtr unit = objects_[
i];
4307 LoadingUnitPtr unit =
static_cast<LoadingUnitPtr
>(
d.Ref(
id));
4310 unit->untag()->parent_ =
static_cast<LoadingUnitPtr
>(
d.ReadRef());
4312 unit->untag()->instructions_image_ =
nullptr;
4313 unit->untag()->packed_fields_ =
4315 UntaggedLoadingUnit::kNotLoaded) |
4321#if !defined(DART_PRECOMPILED_RUNTIME)
4339 for (intptr_t
i = 0;
i <
count;
i++) {
4340 LanguageErrorPtr
error = objects_[
i];
4347 for (intptr_t
i = 0;
i <
count;
i++) {
4348 LanguageErrorPtr
error = objects_[
i];
4351 s->WriteTokenPosition(
error->untag()->token_pos_);
4352 s->Write<
bool>(
error->untag()->report_after_token_);
4353 s->Write<int8_t>(
error->untag()->kind_);
4377 LanguageErrorPtr
error =
static_cast<LanguageErrorPtr
>(
d.Ref(
id));
4381 error->untag()->token_pos_ =
d.ReadTokenPosition();
4382 error->untag()->report_after_token_ =
d.Read<
bool>();
4383 error->untag()->kind_ =
d.Read<int8_t>();
4388#if !defined(DART_PRECOMPILED_RUNTIME)
4393 "UnhandledException",
4394 kUnhandledExceptionCid,
4400 objects_.
Add(exception);
4407 for (intptr_t
i = 0;
i <
count;
i++) {
4408 UnhandledExceptionPtr exception = objects_[
i];
4409 s->AssignRef(exception);
4415 for (intptr_t
i = 0;
i <
count;
i++) {
4416 UnhandledExceptionPtr exception = objects_[
i];
4442 UnhandledExceptionPtr exception =
4443 static_cast<UnhandledExceptionPtr
>(
d.Ref(
id));
4446 d.ReadFromTo(exception);
4451#if !defined(DART_PRECOMPILED_RUNTIME)
4457 host_next_field_offset_in_words_ =
4458 cls->untag()->host_next_field_offset_in_words_;
4459 ASSERT(host_next_field_offset_in_words_ > 0);
4460#if defined(DART_PRECOMPILER)
4461 target_next_field_offset_in_words_ =
4462 cls->untag()->target_next_field_offset_in_words_;
4463 target_instance_size_in_words_ =
4464 cls->untag()->target_instance_size_in_words_;
4466 target_next_field_offset_in_words_ =
4467 cls->untag()->host_next_field_offset_in_words_;
4468 target_instance_size_in_words_ = cls->untag()->host_instance_size_in_words_;
4470 ASSERT(target_next_field_offset_in_words_ > 0);
4471 ASSERT(target_instance_size_in_words_ > 0);
4478 const intptr_t next_field_offset = host_next_field_offset_in_words_
4480 const auto unboxed_fields_bitmap =
4481 s->isolate_group()->class_table()->GetUnboxedFieldsMapAt(
cid_);
4483 while (
offset < next_field_offset) {
4500 s->Write<int32_t>(target_next_field_offset_in_words_);
4501 s->Write<int32_t>(target_instance_size_in_words_);
4503 for (intptr_t
i = 0;
i <
count;
i++) {
4514 intptr_t next_field_offset = host_next_field_offset_in_words_
4518 const auto unboxed_fields_bitmap =
4519 s->isolate_group()->class_table()->GetUnboxedFieldsMapAt(
cid_);
4521 for (intptr_t
i = 0;
i <
count;
i++) {
4524#if defined(DART_PRECOMPILER)
4525 if (FLAG_write_v8_snapshot_profile_to !=
nullptr) {
4526 ClassPtr cls =
s->isolate_group()->class_table()->At(
cid_);
4527 s->AttributePropertyRef(cls,
"<class>");
4531 while (
offset < next_field_offset) {
4536 s->WriteWordWith32BitWrites(
value);
4542 s->WriteElementRef(raw_obj,
offset);
4550 intptr_t host_next_field_offset_in_words_;
4551 intptr_t target_next_field_offset_in_words_;
4552 intptr_t target_instance_size_in_words_;
4568#if defined(DART_PRECOMPILED_RUNTIME)
4572 d->isolate_group()->constant_canonicalization_mutex());
4595 is_immutable_(is_immutable) {}
4600 const intptr_t
count =
d->ReadUnsigned();
4601 next_field_offset_in_words_ =
d->Read<int32_t>();
4602 instance_size_in_words_ =
d->Read<int32_t>();
4605 for (intptr_t
i = 0;
i <
count;
i++) {
4606 d->AssignRef(
d->Allocate(instance_size));
4614 const intptr_t
cid = cid_;
4616 const bool is_immutable = is_immutable_;
4617 intptr_t next_field_offset = next_field_offset_in_words_
4624 InstancePtr
instance =
static_cast<InstancePtr
>(
d.Ref(
id));
4626 mark_canonical, is_immutable);
4628 while (
offset < next_field_offset) {
4633 *
p =
d.ReadWordWith32BitReads();
4641 while (
offset < instance_size) {
4652 const intptr_t cid_;
4653 const bool is_immutable_;
4654 intptr_t next_field_offset_in_words_;
4655 intptr_t instance_size_in_words_;
4658#if !defined(DART_PRECOMPILED_RUNTIME)
4676 for (intptr_t
i = 0;
i <
count;
i++) {
4677 LibraryPrefixPtr
prefix = objects_[
i];
4684 for (intptr_t
i = 0;
i <
count;
i++) {
4685 LibraryPrefixPtr
prefix = objects_[
i];
4688 s->Write<uint16_t>(
prefix->untag()->num_imports_);
4689 s->Write<
bool>(
prefix->untag()->is_deferred_load_);
4713 LibraryPrefixPtr
prefix =
static_cast<LibraryPrefixPtr
>(
d.Ref(
id));
4717 prefix->untag()->num_imports_ =
d.Read<uint16_t>();
4718 prefix->untag()->is_deferred_load_ =
d.Read<
bool>();
4723#if !defined(DART_PRECOMPILED_RUNTIME)
4735 represents_canonical_set,
4747 ClassPtr type_class =
4748 s->isolate_group()->class_table()->At(
type->untag()->type_class_id());
4749 s->Push(type_class);
4756 for (intptr_t
i = 0;
i <
count;
i++) {
4765 for (intptr_t
i = 0;
i <
count;
i++) {
4780 ClassPtr type_class =
4781 s->isolate_group()->class_table()->At(
type->untag()->type_class_id());
4782 if (type_class->untag()->declaration_type() !=
type) {
4793#if defined(DART_PRECOMPILER)
4794 if (FLAG_write_v8_snapshot_profile_to !=
nullptr) {
4795 ClassPtr type_class =
4796 s->isolate_group()->class_table()->At(
type->untag()->type_class_id());
4797 s->AttributePropertyRef(type_class,
"<type_class>");
4801 s->WriteUnsigned(
type->untag()->flags());
4826 TypePtr
type =
static_cast<TypePtr
>(
d.Ref(
id));
4830 type->untag()->set_flags(
d.ReadUnsigned());
4836 auto object_store =
d->isolate_group()->object_store();
4839 object_store->set_canonical_types(
table_);
4855 type.UpdateTypeTestingStubEntryPoint();
4861 type.InitializeTypeTestingStubNonAtomic(stub);
4867#if !defined(DART_PRECOMPILED_RUNTIME)
4874 bool represents_canonical_set)
4878 represents_canonical_set,
4894 for (intptr_t
i = 0;
i <
count;
i++) {
4903 for (intptr_t
i = 0;
i <
count;
i++) {
4913 s->Write<uint8_t>(
type->untag()->flags());
4914 s->Write<uint32_t>(
type->untag()->packed_parameter_counts_);
4915 s->Write<uint16_t>(
type->untag()->packed_type_parameter_counts_);
4940 FunctionTypePtr
type =
static_cast<FunctionTypePtr
>(
d.Ref(
id));
4944 type->untag()->set_flags(
d.Read<uint8_t>());
4945 type->untag()->packed_parameter_counts_ =
d.Read<uint32_t>();
4946 type->untag()->packed_type_parameter_counts_ =
d.Read<uint16_t>();
4952 auto object_store =
d->isolate_group()->object_store();
4954 d, refs,
Array::Handle(object_store->canonical_function_types()));
4955 object_store->set_canonical_function_types(
table_);
4971 type.UpdateTypeTestingStubEntryPoint();
4977 type.InitializeTypeTestingStubNonAtomic(stub);
4983#if !defined(DART_PRECOMPILED_RUNTIME)
4990 bool represents_canonical_set)
4994 represents_canonical_set,
5010 for (intptr_t
i = 0;
i <
count;
i++) {
5019 for (intptr_t
i = 0;
i <
count;
i++) {
5029 s->Write<uint8_t>(
type->untag()->flags());
5053 RecordTypePtr
type =
static_cast<RecordTypePtr
>(
d.Ref(
id));
5057 type->untag()->set_flags(
d.Read<uint8_t>());
5063 auto object_store =
d->isolate_group()->object_store();
5066 object_store->set_canonical_record_types(
table_);
5082 type.UpdateTypeTestingStubEntryPoint();
5088 type.InitializeTypeTestingStubNonAtomic(stub);
5094#if !defined(DART_PRECOMPILED_RUNTIME)
5101 bool cluster_represents_canonical_set)
5105 cluster_represents_canonical_set,
5121 for (intptr_t
i = 0;
i <
count;
i++) {
5130 for (intptr_t
i = 0;
i <
count;
i++) {
5139 s->Write<uint16_t>(
type->untag()->base_);
5140 s->Write<uint16_t>(
type->untag()->index_);
5142 s->Write<uint8_t>(
type->untag()->flags());
5167 TypeParameterPtr
type =
static_cast<TypeParameterPtr
>(
d.Ref(
id));
5172 type->untag()->base_ =
d.Read<uint16_t>();
5173 type->untag()->index_ =
d.Read<uint16_t>();
5174 type->untag()->set_flags(
d.Read<uint8_t>());
5180 auto object_store =
d->isolate_group()->object_store();
5182 d, refs,
Array::Handle(object_store->canonical_type_parameters()));
5183 object_store->set_canonical_type_parameters(
table_);
5187 type_param ^= refs.
At(
i);
5189 refs.
SetAt(
i, type_param);
5198 type_param ^= refs.
At(
id);
5203 type_param ^= refs.
At(
id);
5211#if !defined(DART_PRECOMPILED_RUNTIME)
5230 for (intptr_t
i = 0;
i <
count;
i++) {
5238 for (intptr_t
i = 0;
i <
count;
i++) {
5268 ClosurePtr
closure =
static_cast<ClosurePtr
>(
d.Ref(
id));
5272#if defined(DART_PRECOMPILED_RUNTIME)
5273 closure->untag()->entry_point_ = 0;
5278#if defined(DART_PRECOMPILED_RUNTIME)
5288 uword entry_point = func.entry_point();
5289 ASSERT(entry_point != 0);
5290 closure.ptr()->untag()->entry_point_ = entry_point;
5296#if !defined(DART_PRECOMPILED_RUNTIME)
5304 if (!object->IsHeapObject()) {
5315 for (intptr_t
i = 0;
i < smis_.
length();
i++) {
5316 SmiPtr smi = smis_[
i];
5320 s->Write<int64_t>(
value);
5326 for (intptr_t
i = 0;
i < mints_.
length();
i++) {
5327 MintPtr mint = mints_[
i];
5330 s->Write<int64_t>(mint->untag()->value_);
5356 const intptr_t
count =
d->ReadUnsigned();
5358 for (intptr_t
i = 0;
i <
count;
i++) {
5359 int64_t
value =
d->Read<int64_t>();
5366 mint->untag()->value_ =
value;
5376#if !defined(DART_PRECOMPILED_RUNTIME)
5394 for (intptr_t
i = 0;
i <
count;
i++) {
5395 DoublePtr dbl = objects_[
i];
5402 for (intptr_t
i = 0;
i <
count;
i++) {
5403 DoublePtr dbl = objects_[
i];
5405 s->Write<
double>(dbl->untag()->value_);
5431 DoublePtr dbl =
static_cast<DoublePtr
>(
d.Ref(
id));
5434 dbl->untag()->value_ =
d.Read<
double>();
5439#if !defined(DART_PRECOMPILED_RUNTIME)
5459 for (intptr_t
i = 0;
i <
count;
i++) {
5461 s->AssignRef(vector);
5467 for (intptr_t
i = 0;
i <
count;
i++) {
5472 s->WriteBytes(&(
static_cast<Int32x4Ptr
>(vector)->
untag()->value_),
5502 const intptr_t
cid = cid_;
5508 d.ReadBytes(&(
static_cast<Int32x4Ptr
>(vector)->
untag()->value_),
5517#if !defined(DART_PRECOMPILED_RUNTIME)
5522 "GrowableObjectArray",
5523 kGrowableObjectArrayCid,
5529 objects_.
Add(array);
5536 for (intptr_t
i = 0;
i <
count;
i++) {
5537 GrowableObjectArrayPtr array = objects_[
i];
5538 s->AssignRef(array);
5544 for (intptr_t
i = 0;
i <
count;
i++) {
5545 GrowableObjectArrayPtr array = objects_[
i];
5571 GrowableObjectArrayPtr list =
5572 static_cast<GrowableObjectArrayPtr
>(
d.Ref(
id));
5580#if !defined(DART_PRECOMPILED_RUNTIME)
5589 objects_.
Add(record);
5592 for (intptr_t
i = 0;
i < num_fields; ++
i) {
5593 s->Push(record->untag()->field(
i));
5600 for (intptr_t
i = 0;
i <
count; ++
i) {
5601 RecordPtr record = objects_[
i];
5602 s->AssignRef(record);
5605 s->WriteUnsigned(num_fields);
5612 for (intptr_t
i = 0;
i <
count; ++
i) {
5613 RecordPtr record = objects_[
i];
5615 const RecordShape shape(record->untag()->shape());
5616 s->WriteUnsigned(shape.
AsInt());
5617 const intptr_t num_fields = shape.
num_fields();
5618 for (intptr_t j = 0; j < num_fields; ++j) {
5619 s->WriteElementRef(record->untag()->field(j), j);
5640 const intptr_t
count =
d->ReadUnsigned();
5641 for (intptr_t
i = 0;
i <
count;
i++) {
5642 const intptr_t num_fields =
d->ReadUnsigned();
5653 RecordPtr record =
static_cast<RecordPtr
>(
d.Ref(
id));
5654 const intptr_t shape =
d.ReadUnsigned();
5659 record->untag()->shape_ =
Smi::New(shape);
5660 for (intptr_t j = 0; j < num_fields; ++j) {
5661 record->untag()->data()[j] =
d.ReadRef();
5667#if !defined(DART_PRECOMPILED_RUNTIME)
5683 for (intptr_t
i = 0;
i <
count;
i++) {
5684 TypedDataPtr
data = objects_[
i];
5697 for (intptr_t
i = 0;
i <
count;
i++) {
5698 TypedDataPtr
data = objects_[
i];
5702 uint8_t* cdata =
reinterpret_cast<uint8_t*
>(
data->untag()->data());
5720 const intptr_t
count =
d->ReadUnsigned();
5722 for (intptr_t
i = 0;
i <
count;
i++) {
5723 const intptr_t
length =
d->ReadUnsigned();
5735 const intptr_t
cid = cid_;
5737 TypedDataPtr
data =
static_cast<TypedDataPtr
>(
d.Ref(
id));
5738 const intptr_t
length =
d.ReadUnsigned();
5743 data->untag()->RecomputeDataField();
5744 uint8_t* cdata =
reinterpret_cast<uint8_t*
>(
data->untag()->data());
5745 d.ReadBytes(cdata, length_in_bytes);
5750 const intptr_t cid_;
5753#if !defined(DART_PRECOMPILED_RUNTIME)
5772 for (intptr_t
i = 0;
i <
count;
i++) {
5773 TypedDataViewPtr view = objects_[
i];
5780 for (intptr_t
i = 0;
i <
count;
i++) {
5781 TypedDataViewPtr view = objects_[
i];
5805 const intptr_t
cid = cid_;
5808 TypedDataViewPtr view =
static_cast<TypedDataViewPtr
>(
d.Ref(
id));
5817 view ^= refs.
At(
id);
5818 view.RecomputeDataField();
5823 const intptr_t cid_;
5826#if !defined(DART_PRECOMPILED_RUNTIME)
5831 "ExternalTypedData",
5844 for (intptr_t
i = 0;
i <
count;
i++) {
5845 ExternalTypedDataPtr
data = objects_[
i];
5853 for (intptr_t
i = 0;
i <
count;
i++) {
5854 ExternalTypedDataPtr
data = objects_[
i];
5858 uint8_t* cdata =
reinterpret_cast<uint8_t*
>(
data->untag()->data_);
5883 const intptr_t
cid = cid_;
5886 ExternalTypedDataPtr
data =
static_cast<ExternalTypedDataPtr
>(
d.Ref(
id));
5887 const intptr_t
length =
d.ReadUnsigned();
5892 data->untag()->data_ =
const_cast<uint8_t*
>(
d.AddressOfCurrentPosition());
5899 const intptr_t cid_;
5902#if !defined(DART_PRECOMPILED_RUNTIME)
5907 kDeltaEncodedTypedDataCid) {}
5918 for (intptr_t
i = 0;
i <
count;
i++) {
5919 const TypedDataPtr
data = objects_[
i];
5924 const intptr_t length_in_bytes =
5926 s->WriteUnsigned(length_in_bytes);
5935 for (intptr_t
i = 0;
i <
count;
i++) {
5936 const TypedDataPtr
data = objects_[
i];
5938 const intptr_t
cid =
data->GetClassId();
5941 ASSERT(
cid == kTypedDataUint16ArrayCid ||
5942 cid == kTypedDataUint32ArrayCid);
5943 const intptr_t cid_flag =
cid == kTypedDataUint16ArrayCid ? 0 : 1;
5945 const intptr_t encoded_length = (
length << 1) | cid_flag;
5946 s->WriteUnsigned(encoded_length);
5949 for (intptr_t j = 0; j <
length; ++j) {
5950 const intptr_t
value = (
cid == kTypedDataUint16ArrayCid)
5951 ? typed_data.GetUint16(j << 1)
5952 : typed_data.GetUint32(j << 2);
5974 const intptr_t
count =
d->ReadUnsigned();
5975 for (intptr_t
i = 0;
i <
count;
i++) {
5976 const intptr_t length_in_bytes =
d->ReadUnsigned();
5989 TypedDataPtr
data =
static_cast<TypedDataPtr
>(
d.Ref(
id));
5990 const intptr_t encoded_length =
d.ReadUnsigned();
5991 const intptr_t
length = encoded_length >> 1;
5992 const intptr_t
cid = (encoded_length & 0x1) == 0
5993 ? kTypedDataUint16ArrayCid
5994 : kTypedDataUint32ArrayCid;
6000 data->untag()->RecomputeDataField();
6003 for (intptr_t j = 0; j <
length; ++j) {
6004 value +=
d.ReadUnsigned();
6005 if (
cid == kTypedDataUint16ArrayCid) {
6006 typed_data.SetUint16(j << 1,
static_cast<uint16_t
>(
value));
6008 typed_data.SetUint32(j << 2,
value);
6015#if !defined(DART_PRECOMPILED_RUNTIME)
6026 objects_.
Add(trace);
6033 for (intptr_t
i = 0;
i <
count;
i++) {
6034 StackTracePtr trace = objects_[
i];
6035 s->AssignRef(trace);
6041 for (intptr_t
i = 0;
i <
count;
i++) {
6042 StackTracePtr trace = objects_[
i];
6067 StackTracePtr trace =
static_cast<StackTracePtr
>(
d.Ref(
id));
6070 d.ReadFromTo(trace);
6075#if !defined(DART_PRECOMPILED_RUNTIME)
6086 objects_.
Add(regexp);
6093 for (intptr_t
i = 0;
i <
count;
i++) {
6094 RegExpPtr regexp = objects_[
i];
6095 s->AssignRef(regexp);
6101 for (intptr_t
i = 0;
i <
count;
i++) {
6102 RegExpPtr regexp = objects_[
i];
6105 s->Write<int32_t>(regexp->untag()->num_one_byte_registers_);
6106 s->Write<int32_t>(regexp->untag()->num_two_byte_registers_);
6107 s->Write<int8_t>(regexp->untag()->type_flags_);
6130 RegExpPtr regexp =
static_cast<RegExpPtr
>(
d.Ref(
id));
6133 d.ReadFromTo(regexp);
6134 regexp->untag()->num_one_byte_registers_ =
d.Read<int32_t>();
6135 regexp->untag()->num_two_byte_registers_ =
d.Read<int32_t>();
6136 regexp->untag()->type_flags_ =
d.Read<int8_t>();
6141#if !defined(DART_PRECOMPILED_RUNTIME)
6152 objects_.
Add(property);
6154 s->PushWeak(property->untag()->key());
6158 for (intptr_t
i = 0;
i < objects_.
length();
i++) {
6159 WeakPropertyPtr
property = objects_[
i];
6160 if (
s->IsReachable(property->untag()->key())) {
6161 s->Push(property->untag()->value());
6169 for (intptr_t
i = 0;
i <
count;
i++) {
6170 WeakPropertyPtr
property = objects_[
i];
6171 s->AssignRef(property);
6177 for (intptr_t
i = 0;
i <
count;
i++) {
6178 WeakPropertyPtr
property = objects_[
i];
6180 if (
s->HasRef(property->untag()->key())) {
6182 s->WriteOffsetRef(property->untag()->value(),
6211 WeakPropertyPtr
property =
static_cast<WeakPropertyPtr
>(
d.Ref(
id));
6214 d.ReadFromTo(property);
6220#if !defined(DART_PRECOMPILED_RUNTIME)
6242 for (intptr_t
i = 0;
i <
count;
i++) {
6243 MapPtr
map = objects_[
i];
6250 for (intptr_t
i = 0;
i <
count;
i++) {
6251 MapPtr
map = objects_[
i];
6281 const intptr_t
cid = cid_;
6284 MapPtr
map =
static_cast<MapPtr
>(
d.Ref(
id));
6292 const intptr_t cid_;
6295#if !defined(DART_PRECOMPILED_RUNTIME)
6317 for (intptr_t
i = 0;
i <
count;
i++) {
6318 SetPtr
set = objects_[
i];
6325 for (intptr_t
i = 0;
i <
count;
i++) {
6326 SetPtr
set = objects_[
i];
6356 const intptr_t
cid = cid_;
6359 SetPtr
set =
static_cast<SetPtr
>(
d.Ref(
id));
6367 const intptr_t cid_;
6370#if !defined(DART_PRECOMPILED_RUNTIME)
6379 objects_.
Add(array);
6381 s->Push(array->untag()->type_arguments());
6384 s->Push(array->untag()->element(
i));
6388#if defined(DART_PRECOMPILER)
6389 static bool IsReadOnlyCid(intptr_t
cid) {
6391 case kPcDescriptorsCid:
6392 case kCodeSourceMapCid:
6393 case kCompressedStackMapsCid:
6394 case kOneByteStringCid:
6395 case kTwoByteStringCid:
6404#if defined(DART_PRECOMPILER)
6405 if (FLAG_print_array_optimization_candidates) {
6406 intptr_t array_count = objects_.
length();
6407 intptr_t array_count_allsmi = 0;
6408 intptr_t array_count_allro = 0;
6409 intptr_t array_count_empty = 0;
6410 intptr_t element_count = 0;
6411 intptr_t element_count_allsmi = 0;
6412 intptr_t element_count_allro = 0;
6413 for (intptr_t
i = 0;
i < array_count;
i++) {
6414 ArrayPtr array = objects_[
i];
6421 if (!IsReadOnlyCid(
cid)) allro =
false;
6422 if (
cid != kSmiCid) allsmi =
false;
6426 array_count_empty++;
6427 }
else if (allsmi) {
6428 array_count_allsmi++;
6429 element_count_allsmi +=
length;
6431 array_count_allro++;
6432 element_count_allro +=
length;
6439 array_count_allsmi, element_count_allsmi);
6441 element_count_allro);
6448 for (intptr_t
i = 0;
i <
count;
i++) {
6449 ArrayPtr array = objects_[
i];
6450 s->AssignRef(array);
6460 for (intptr_t
i = 0;
i <
count;
i++) {
6461 ArrayPtr array = objects_[
i];
6466 for (intptr_t j = 0; j <
length; j++) {
6467 s->WriteElementRef(array->untag()->element(j), j);
6491 const intptr_t
count =
d->ReadUnsigned();
6492 for (intptr_t
i = 0;
i <
count;
i++) {
6493 const intptr_t
length =
d->ReadUnsigned();
6502 const intptr_t
cid = cid_;
6505 ArrayPtr array =
static_cast<ArrayPtr
>(
d.Ref(
id));
6506 const intptr_t
length =
d.ReadUnsigned();
6510 array->untag()->SetCardRememberedBitUnsynchronized();
6512 array->untag()->type_arguments_ =
6513 static_cast<TypeArgumentsPtr
>(
d.ReadRef());
6515 for (intptr_t j = 0; j <
length; j++) {
6516 array->untag()->data()[j] =
d.ReadRef();
6522 const intptr_t cid_;
6525#if !defined(DART_PRECOMPILED_RUNTIME)
6534 objects_.
Add(array);
6538 s->PushWeak(array->untag()->element(
i));
6545 for (intptr_t
i = 0;
i <
count;
i++) {
6546 WeakArrayPtr array = objects_[
i];
6547 s->AssignRef(array);
6557 for (intptr_t
i = 0;
i <
count;
i++) {
6558 WeakArrayPtr array = objects_[
i];
6562 for (intptr_t j = 0; j <
length; j++) {
6563 if (
s->HasRef(array->untag()->element(j))) {
6564 s->WriteElementRef(array->untag()->element(j), j);
6584 const intptr_t
count =
d->ReadUnsigned();
6585 for (intptr_t
i = 0;
i <
count;
i++) {
6586 const intptr_t
length =
d->ReadUnsigned();
6596 WeakArrayPtr array =
static_cast<WeakArrayPtr
>(
d.Ref(
id));
6597 const intptr_t
length =
d.ReadUnsigned();
6602 for (intptr_t j = 0; j <
length; j++) {
6603 array->untag()->data()[j] =
d.ReadRef();
6609#if !defined(DART_PRECOMPILED_RUNTIME)
6619 ASSERT(
cid == kOneByteStringCid ||
cid == kTwoByteStringCid);
6621 return (
length << 1) | (
cid == kTwoByteStringCid ? 0x1 : 0x0);
6625 bool represents_canonical_set)
6628 represents_canonical_set,
6634 StringPtr str =
static_cast<StringPtr
>(object);
6642 for (intptr_t
i = 0;
i <
count;
i++) {
6646 const intptr_t
cid = str->GetClassId();
6649 s->WriteUnsigned(encoded);
6651 cid == kOneByteStringCid
6660 for (intptr_t
i = 0;
i <
count;
i++) {
6663 const intptr_t
cid = str->GetClassId();
6666 s->WriteUnsigned(encoded);
6667 if (
cid == kOneByteStringCid) {
6668 s->WriteBytes(
static_cast<OneByteStringPtr
>(str)->
untag()->
data(),
6671 s->WriteBytes(
reinterpret_cast<uint8_t*
>(
6672 static_cast<TwoByteStringPtr
>(str)->
untag()->
data()),
6684 *out_cid = (encoded & 0x1) != 0 ? kTwoByteStringCid : kOneByteStringCid;
6685 return encoded >> 1;
6701 const intptr_t
count =
d->ReadUnsigned();
6702 for (intptr_t
i = 0;
i <
count;
i++) {
6703 const intptr_t encoded =
d->ReadUnsigned();
6716 StringPtr str =
static_cast<StringPtr
>(
d.Ref(
id));
6717 const intptr_t encoded =
d.ReadUnsigned();
6724 *
reinterpret_cast<word*
>(
reinterpret_cast<uint8_t*
>(str->untag()) +
6726 *
reinterpret_cast<word*
>(
reinterpret_cast<uint8_t*
>(str->untag()) +
6729#if DART_COMPRESSED_POINTERS
6731 const intptr_t length_offset =
6732 reinterpret_cast<intptr_t
>(&str->untag()->length_);
6733 const intptr_t data_offset =
6734 cid == kOneByteStringCid
6735 ?
reinterpret_cast<intptr_t
>(
6736 static_cast<OneByteStringPtr
>(str)->
untag()->data())
6737 :
reinterpret_cast<intptr_t
>(
6738 static_cast<TwoByteStringPtr
>(str)->untag()->data());
6739 const intptr_t length_with_gap = data_offset - length_offset;
6742 memset(
reinterpret_cast<void*
>(length_offset), 0, length_with_gap);
6747 if (
cid == kOneByteStringCid) {
6748 for (intptr_t j = 0; j <
length; j++) {
6749 uint8_t code_unit =
d.Read<uint8_t>();
6750 static_cast<OneByteStringPtr
>(str)->
untag()->data()[j] = code_unit;
6751 hasher.
Add(code_unit);
6755 for (intptr_t j = 0; j <
length; j++) {
6756 uint16_t code_unit =
d.Read<uint8_t>();
6757 code_unit = code_unit | (
d.Read<uint8_t>() << 8);
6758 static_cast<TwoByteStringPtr
>(str)->
untag()->data()[j] = code_unit;
6759 hasher.
Add(code_unit);
6768 auto object_store =
d->isolate_group()->object_store();
6771 object_store->set_symbol_table(
table_);
6783#if !defined(DART_PRECOMPILED_RUNTIME)
6803#if !defined(DART_PRECOMPILED_RUNTIME)
6807 bool should_write_symbols)
6809 should_write_symbols_(should_write_symbols),
6810 zone_(
Thread::Current()->zone()) {}
6817 s->AddBaseObject(Object::sentinel().ptr(),
"Null",
"sentinel");
6818 s->AddBaseObject(Object::transition_sentinel().ptr(),
"Null",
6819 "transition_sentinel");
6820 s->AddBaseObject(Object::optimized_out().ptr(),
"Null",
"<optimized out>");
6821 s->AddBaseObject(Object::empty_array().ptr(),
"Array",
"<empty_array>");
6822 s->AddBaseObject(Object::empty_instantiations_cache_array().ptr(),
"Array",
6823 "<empty_instantiations_cache_array>");
6824 s->AddBaseObject(Object::empty_subtype_test_cache_array().ptr(),
"Array",
6825 "<empty_subtype_test_cache_array>");
6826 s->AddBaseObject(Object::dynamic_type().ptr(),
"Type",
"<dynamic type>");
6827 s->AddBaseObject(Object::void_type().ptr(),
"Type",
"<void type>");
6828 s->AddBaseObject(Object::empty_type_arguments().ptr(),
"TypeArguments",
6830 s->AddBaseObject(
Bool::True().ptr(),
"bool",
"true");
6831 s->AddBaseObject(
Bool::False().ptr(),
"bool",
"false");
6833 s->AddBaseObject(Object::synthetic_getter_parameter_types().ptr(),
"Array",
6834 "<synthetic getter parameter types>");
6836 s->AddBaseObject(Object::synthetic_getter_parameter_names().ptr(),
"Array",
6837 "<synthetic getter parameter names>");
6838 s->AddBaseObject(Object::empty_context_scope().ptr(),
"ContextScope",
6840 s->AddBaseObject(Object::empty_object_pool().ptr(),
"ObjectPool",
6842 s->AddBaseObject(Object::empty_compressed_stackmaps().ptr(),
6843 "CompressedStackMaps",
"<empty>");
6844 s->AddBaseObject(Object::empty_descriptors().ptr(),
"PcDescriptors",
6846 s->AddBaseObject(Object::empty_var_descriptors().ptr(),
6847 "LocalVarDescriptors",
"<empty>");
6848 s->AddBaseObject(Object::empty_exception_handlers().ptr(),
6849 "ExceptionHandlers",
"<empty>");
6850 s->AddBaseObject(Object::empty_async_exception_handlers().ptr(),
6851 "ExceptionHandlers",
"<empty async>");
6854 s->AddBaseObject(ArgumentsDescriptor::cached_args_descriptors_[
i],
6855 "ArgumentsDescriptor",
"<cached arguments descriptor>");
6858 s->AddBaseObject(ICData::cached_icdata_arrays_[
i],
"Array",
6859 "<empty icdata entries>");
6866 if (
cid != kErrorCid &&
cid != kCallSiteDataCid) {
6871 .NameCString(Object::NameVisibility::kInternalName));
6885 if (should_write_symbols_) {
6886 s->Push(symbols_.
ptr());
6888 for (intptr_t
i = 0;
i < symbols_.
Length();
i++) {
6889 s->Push(symbols_.
At(
i));
6909 if (!should_write_symbols_ &&
s->profile_writer() !=
nullptr) {
6913 s->AssignArtificialRef(symbols_.
ptr());
6914 const auto& symbols_snapshot_id =
s->GetProfileId(symbols_.
ptr());
6915 s->profile_writer()->SetObjectTypeAndName(symbols_snapshot_id,
"Symbols",
6917 s->profile_writer()->AddRoot(symbols_snapshot_id);
6918 for (intptr_t
i = 0;
i < symbols_.
Length();
i++) {
6919 s->profile_writer()->AttributeReferenceTo(
6921 s->GetProfileId(symbols_.
At(
i)));
6928 const bool should_write_symbols_;
6942 d->AddBaseObject(Object::sentinel().ptr());
6943 d->AddBaseObject(Object::transition_sentinel().ptr());
6944 d->AddBaseObject(Object::optimized_out().ptr());
6945 d->AddBaseObject(Object::empty_array().ptr());
6946 d->AddBaseObject(Object::empty_instantiations_cache_array().ptr());
6947 d->AddBaseObject(Object::empty_subtype_test_cache_array().ptr());
6948 d->AddBaseObject(Object::dynamic_type().ptr());
6949 d->AddBaseObject(Object::void_type().ptr());
6950 d->AddBaseObject(Object::empty_type_arguments().ptr());
6954 d->AddBaseObject(Object::synthetic_getter_parameter_types().ptr());
6956 d->AddBaseObject(Object::synthetic_getter_parameter_names().ptr());
6957 d->AddBaseObject(Object::empty_context_scope().ptr());
6958 d->AddBaseObject(Object::empty_object_pool().ptr());
6959 d->AddBaseObject(Object::empty_compressed_stackmaps().ptr());
6960 d->AddBaseObject(Object::empty_descriptors().ptr());
6961 d->AddBaseObject(Object::empty_var_descriptors().ptr());
6962 d->AddBaseObject(Object::empty_exception_handlers().ptr());
6963 d->AddBaseObject(Object::empty_async_exception_handlers().ptr());
6966 d->AddBaseObject(ArgumentsDescriptor::cached_args_descriptors_[
i]);
6969 d->AddBaseObject(ICData::cached_icdata_arrays_[
i]);
6976 if (
cid != kErrorCid &&
cid != kCallSiteDataCid) {
6992 symbol_table_ ^=
d->ReadRef();
6993 if (!symbol_table_.
IsNull()) {
6994 d->isolate_group()->object_store()->set_symbol_table(symbol_table_);
6999 *
code ^=
d->ReadRef();
7009 d->heap()->old_space()->ReleaseBumpAllocation();
7011 if (!symbol_table_.
IsNull()) {
7022#if !defined(DART_PRECOMPILED_RUNTIME)
7024#define DECLARE_OBJECT_STORE_FIELD(Type, Name) #Name,
7034#undef DECLARE_OBJECT_STORE_FIELD
7039#define RESET_ROOT_LIST(V) \
7040 V(symbol_table, WeakArray, HashTables::New<CanonicalStringSet>(4)) \
7041 V(canonical_types, Array, HashTables::New<CanonicalTypeSet>(4)) \
7042 V(canonical_function_types, Array, \
7043 HashTables::New<CanonicalFunctionTypeSet>(4)) \
7044 V(canonical_record_types, Array, HashTables::New<CanonicalRecordTypeSet>(4)) \
7045 V(canonical_type_arguments, Array, \
7046 HashTables::New<CanonicalTypeArgumentsSet>(4)) \
7047 V(canonical_type_parameters, Array, \
7048 HashTables::New<CanonicalTypeParameterSet>(4)) \
7049 ONLY_IN_PRODUCT(ONLY_IN_AOT( \
7050 V(closure_functions, GrowableObjectArray, GrowableObjectArray::null()))) \
7051 ONLY_IN_AOT(V(closure_functions_table, Array, Array::null())) \
7052 ONLY_IN_AOT(V(canonicalized_stack_map_entries, CompressedStackMaps, \
7053 CompressedStackMaps::null()))
7058 : base_objects_(base_objects),
7059 object_store_(object_store),
7061#define ONLY_IN_AOT(code) \
7062 if (snapshot_kind_ == Snapshot::kFullAOT) { \
7065#define SAVE_AND_RESET_ROOT(name, Type, init) \
7067 saved_##name##_ = object_store->name(); \
7068 object_store->set_##name(Type::Handle(init)); \
7072#undef SAVE_AND_RESET_ROOT
7076#define ONLY_IN_AOT(code) \
7077 if (snapshot_kind_ == Snapshot::kFullAOT) { \
7080#define RESTORE_ROOT(name, Type, init) \
7081 object_store_->set_##name(saved_##name##_);
7088 if (base_objects_ ==
nullptr) {
7090 const Array& base_objects = Object::vm_isolate_snapshot_object_table();
7092 s->AddBaseObject(base_objects.
At(
i));
7096 for (intptr_t
i = 0;
i < base_objects_->length();
i++) {
7097 s->AddBaseObject((*base_objects_)[
i]->ptr());
7103 ObjectPtr* from = object_store_->from();
7104 ObjectPtr* to = object_store_->to_snapshot(
s->kind());
7110 s->thread()->isolate_group()->initial_field_table();
7111 for (intptr_t
i = 0, n = initial_field_table->
NumFieldIds();
i < n;
i++) {
7112 s->Push(initial_field_table->
At(
i));
7116 s->thread()->isolate_group()->shared_initial_field_table();
7117 for (intptr_t
i = 0, n = shared_initial_field_table->
NumFieldIds();
i < n;
7119 s->Push(shared_initial_field_table->
At(
i));
7122 dispatch_table_entries_ = object_store_->dispatch_table_code_entries();
7126#if defined(DART_PRECOMPILER)
7131 if (!dispatch_table_entries_.
IsNull()) {
7132 for (intptr_t
i = 0;
i < dispatch_table_entries_.
Length();
i++) {
7133 s->Push(dispatch_table_entries_.
At(
i));
7140 ObjectPtr* from = object_store_->from();
7141 ObjectPtr* to = object_store_->to_snapshot(
s->kind());
7147 s->thread()->isolate_group()->initial_field_table();
7149 s->WriteUnsigned(n);
7150 for (intptr_t
i = 0;
i < n;
i++) {
7151 s->WriteRootRef(initial_field_table->
At(
i),
"some-static-field");
7155 s->thread()->isolate_group()->shared_initial_field_table();
7156 intptr_t n_shared = shared_initial_field_table->
NumFieldIds();
7157 s->WriteUnsigned(n_shared);
7158 for (intptr_t
i = 0;
i < n_shared;
i++) {
7159 s->WriteRootRef(shared_initial_field_table->
At(
i),
7160 "some-shared-static-field");
7164 s->WriteDispatchTable(dispatch_table_entries_);
7168 return saved_canonicalized_stack_map_entries_;
7177#define ONLY_IN_AOT(code) code
7178#define DECLARE_FIELD(name, Type, init) Type& saved_##name##_ = Type::Handle();
7188 : object_store_(object_store) {}
7192 const Array& base_objects = Object::vm_isolate_snapshot_object_table();
7194 d->AddBaseObject(base_objects.
At(
i));
7200 ObjectPtr* from = object_store_->from();
7201 ObjectPtr* to = object_store_->to_snapshot(
d->kind());
7208 d->thread()->isolate_group()->initial_field_table();
7209 intptr_t n =
d->ReadUnsigned();
7211 for (intptr_t
i = 0;
i < n;
i++) {
7212 initial_field_table->
SetAt(
i,
d->ReadRef());
7218 d->thread()->isolate_group()->shared_initial_field_table();
7219 intptr_t n_shared =
d->ReadUnsigned();
7222 for (intptr_t
i = 0;
i < n_shared;
i++) {
7223 shared_initial_field_table->
SetAt(
i,
d->ReadRef());
7229 d->ReadDispatchTable();
7233 auto isolate_group =
d->isolate_group();
7235 isolate_group->class_table()->CopySizesFromClassObjects();
7237 d->heap()->old_space()->EvaluateAfterLoading();
7239 auto object_store = isolate_group->object_store();
7255#if !defined(DART_PRECOMPILED_RUNTIME)
7263 for (intptr_t
i = 0;
i < objects->
length();
i++) {
7264 s->AddBaseObject(objects->
At(
i)->ptr());
7270 ASSERT(deferred_object->IsCode());
7271 CodePtr
code =
static_cast<CodePtr
>(deferred_object->ptr());
7272 ObjectPoolPtr
pool =
code->untag()->object_pool_;
7274 const intptr_t
length =
pool->untag()->length_;
7275 uint8_t* entry_bits =
pool->untag()->entry_bits();
7278 if (entry_type == ObjectPool::EntryType::kTaggedObject) {
7279 s->Push(
pool->untag()->data()[
i].raw_obj_);
7283 s->Push(
code->untag()->code_source_map_);
7288#if defined(DART_PRECOMPILER)
7289 intptr_t start_index = 0;
7291 if (num_deferred_objects != 0) {
7295 s->WriteUnsigned(start_index);
7296 s->WriteUnsigned(num_deferred_objects);
7297 for (intptr_t
i = 0;
i < num_deferred_objects;
i++) {
7299 ASSERT(deferred_object->IsCode());
7300 CodePtr
code =
static_cast<CodePtr
>(deferred_object->
ptr());
7303 s->WriteInstructions(
code->untag()->instructions_,
7304 code->untag()->unchecked_offset_,
code,
false);
7305 s->WriteRootRef(
code->untag()->code_source_map_,
"deferred-code");
7308 ObjectPoolPtr
pool =
7309 s->isolate_group()->object_store()->global_object_pool();
7310 const intptr_t
length =
pool->untag()->length_;
7311 uint8_t* entry_bits =
pool->untag()->entry_bits();
7312 intptr_t last_write = 0;
7315 if (entry_type == ObjectPool::EntryType::kTaggedObject) {
7316 if (
s->IsWritten(
pool->untag()->data()[
i].raw_obj_)) {
7317 intptr_t
skip =
i - last_write;
7318 s->WriteUnsigned(
skip);
7319 s->WriteRootRef(
pool->untag()->data()[
i].raw_obj_,
7320 "deferred-literal");
7325 s->WriteUnsigned(
length - last_write);
7339 const Array& base_objects =
7342 d->AddBaseObject(base_objects.
At(
i));
7347 deferred_start_index_ =
d->ReadUnsigned();
7348 deferred_stop_index_ = deferred_start_index_ +
d->ReadUnsigned();
7349 for (intptr_t
id = deferred_start_index_;
id < deferred_stop_index_;
id++) {
7350 CodePtr
code =
static_cast<CodePtr
>(
d->Ref(
id));
7352 d->ReadInstructions(
code,
false);
7353 if (
code->untag()->owner_->IsHeapObject() &&
7354 code->untag()->owner_->IsFunction()) {
7355 FunctionPtr func =
static_cast<FunctionPtr
>(
code->untag()->owner_);
7356 uword entry_point =
code->untag()->entry_point_;
7357 ASSERT(entry_point != 0);
7358 func->untag()->entry_point_ = entry_point;
7359 uword unchecked_entry_point =
code->untag()->unchecked_entry_point_;
7360 ASSERT(unchecked_entry_point != 0);
7361 func->untag()->unchecked_entry_point_ = unchecked_entry_point;
7362#if defined(DART_PRECOMPILED_RUNTIME)
7363 if (func->untag()->data()->IsHeapObject() &&
7364 func->untag()->data()->IsClosureData()) {
7367 auto data =
static_cast<ClosureDataPtr
>(func->untag()->data());
7371 data->untag()->closure()->untag()->entry_point_ = entry_point;
7376 code->untag()->code_source_map_ =
7377 static_cast<CodeSourceMapPtr
>(
d->ReadRef());
7380 ObjectPoolPtr
pool =
7381 d->isolate_group()->object_store()->global_object_pool();
7382 const intptr_t
length =
pool->untag()->length_;
7383 uint8_t* entry_bits =
pool->untag()->entry_bits();
7384 for (intptr_t
i =
d->ReadUnsigned();
i <
length;
i +=
d->ReadUnsigned()) {
7386 ASSERT(entry_type == ObjectPool::EntryType::kTaggedObject);
7389 pool->untag()->data()[
i].raw_obj_ =
d->ReadRef();
7394 auto isolate_group =
d->isolate_group();
7395 if (isolate_group->dispatch_table_snapshot() !=
nullptr) {
7397 isolate_group->dispatch_table_snapshot_size());
7399 isolate_group->object_store()->instructions_tables());
7401 root_table ^= tables.
At(0);
7402 d->ReadDispatchTable(&
stream,
true, root_table,
7403 deferred_start_index_, deferred_stop_index_);
7408 d->EndInstructions();
7414 intptr_t deferred_start_index_;
7415 intptr_t deferred_stop_index_;
7419static constexpr int32_t kSectionMarker = 0xABAB;
7429 heap_(thread->isolate_group()->heap()),
7430 zone_(thread->zone()),
7433 image_writer_(image_writer),
7434 canonical_clusters_by_cid_(nullptr),
7435 clusters_by_cid_(nullptr),
7439 num_base_objects_(0),
7440 num_written_objects_(0),
7443 profile_writer_(profile_writer)
7444#
if defined(SNAPSHOT_BACKTRACE)
7446 current_parent_(
Object::null()),
7449#
if defined(DART_PRECOMPILER)
7451 deduped_instructions_sources_(zone_)
7457 for (intptr_t
i = 0;
i < num_cids_;
i++) {
7458 canonical_clusters_by_cid_[
i] =
nullptr;
7461 for (intptr_t
i = 0;
i < num_cids_;
i++) {
7462 clusters_by_cid_[
i] =
nullptr;
7464 if (profile_writer_ !=
nullptr) {
7470 delete[] canonical_clusters_by_cid_;
7471 delete[] clusters_by_cid_;
7478 const bool is_discarded_code = base_object->IsHeapObject() &&
7479 base_object->IsCode() &&
7481 if (!is_discarded_code) {
7484 num_base_objects_++;
7486 if ((profile_writer_ !=
nullptr) && (
type !=
nullptr)) {
7489 profile_writer_->
AddRoot(profile_id);
7498 ASSERT(!object->IsHeapObject() || !object->IsInstructions());
7504 return next_ref_index_++;
7508 const intptr_t ref = -(next_ref_index_++);
7510 if (
object !=
nullptr) {
7511 ASSERT(!
object.IsHeapObject() || !
object.IsInstructions());
7519void Serializer::FlushProfile() {
7520 if (profile_writer_ ==
nullptr)
return;
7521 const intptr_t bytes =
7522 stream_->
Position() - object_currently_writing_.last_stream_position_;
7524 object_currently_writing_.last_stream_position_ = stream_->
Position();
7530 ASSERT(!object->IsHeapObject() || !object->IsInstructions());
7535 intptr_t heap_id)
const {
7546 if (profile_writer_ ==
nullptr)
return;
7548#if defined(DART_PRECOMPILER)
7549 if (object->IsHeapObject() && object->IsWeakSerializationReference()) {
7551 auto const target = wsr->untag()->target();
7553 if (object_id != target_id) {
7554 const auto& replacement_id =
GetProfileId(wsr->untag()->replacement());
7555 ASSERT(object_id == replacement_id);
7558 profile_writer_->AttributeDroppedReferenceTo(
7559 object_currently_writing_.id_, reference, target_id, replacement_id);
7569 reference, object_id);
7576 : serializer_(serializer),
7577 old_object_(serializer->object_currently_writing_.object_),
7578 old_id_(serializer->object_currently_writing_.id_),
7579 old_cid_(serializer->object_currently_writing_.cid_) {
7580 if (serializer_->profile_writer_ ==
nullptr)
return;
7584 serializer_->FlushProfile();
7585 serializer_->object_currently_writing_.object_ = object;
7586 serializer_->object_currently_writing_.id_ =
id;
7587 serializer_->object_currently_writing_.cid_ =
7588 object ==
nullptr ? -1 :
object->GetClassIdMayBeSmi();
7592 if (serializer_->profile_writer_ ==
nullptr)
return;
7593 serializer_->FlushProfile();
7594 serializer_->object_currently_writing_.object_ = old_object_;
7595 serializer_->object_currently_writing_.id_ = old_id_;
7596 serializer_->object_currently_writing_.cid_ = old_cid_;
7604 if (
s->profile_writer_ ==
nullptr) {
7607 if (
name ==
nullptr) {
7619 case kOneByteStringCid:
7620 case kTwoByteStringCid: {
7626 const auto& obj_id =
s->GetProfileId(obj);
7627 s->profile_writer_->SetObjectTypeAndName(obj_id,
type,
name);
7631#if !defined(DART_PRECOMPILED_RUNTIME)
7641 if (obj->IsHeapObject() && obj->IsWeakSerializationReference()) {
7652 const char*
type =
nullptr;
7653 const char*
name =
nullptr;
7663 case kObjectPoolCid: {
7664 type =
"ObjectPool";
7666 for (intptr_t
i = 0;
i <
pool->untag()->length_;
i++) {
7667 uint8_t
bits =
pool->untag()->entry_bits()[
i];
7669 ObjectPool::EntryType::kTaggedObject) {
7670 auto const elem =
pool->untag()->data()[
i].raw_obj_;
7679 case kImmutableArrayCid:
7683 for (intptr_t
i = 0, n =
Smi::Value(array->untag()->length());
i < n;
7695 links.
Add({
code->untag()->owner(),
7699 case kFunctionCid: {
7700 FunctionPtr func =
static_cast<FunctionPtr
>(obj);
7704 links.
Add({func->untag()->owner(),
7707 if (
data->GetClassId() == kClosureDataCid) {
7713 case kClosureDataCid: {
7714 auto data =
static_cast<ClosureDataPtr
>(obj);
7715 type =
"ClosureData";
7717 {
data->untag()->parent_function(),
7722 ClassPtr cls =
static_cast<ClassPtr
>(obj);
7725 links.
Add({cls->untag()->library(),
7729 case kPatchClassCid: {
7730 PatchClassPtr patch_cls =
static_cast<PatchClassPtr
>(obj);
7731 type =
"PatchClass";
7733 {patch_cls->untag()->wrapped_class(),
7738 LibraryPtr lib =
static_cast<LibraryPtr
>(obj);
7743 case kFunctionTypeCid: {
7744 type =
"FunctionType";
7747 case kRecordTypeCid: {
7748 type =
"RecordType";
7752 FATAL(
"Request to create artificial node for object with cid %d",
cid);
7757 for (
const auto&
link : links) {
7772 auto& handle =
thread()->ObjectHandle();
7774 FATAL(
"Reference to unreachable object %s", handle.ToCString());
7780 ASSERT(!object->IsHeapObject() || !object->IsInstructions());
7791 if (object->IsWeakSerializationReference()) {
7794 auto const wsr =
static_cast<WeakSerializationReferencePtr
>(object);
7797 id =
HasRef(wsr->untag()->target()) ?
RefId(wsr->untag()->target())
7798 :
RefId(wsr->untag()->replacement());
7803 auto& handle =
thread()->ObjectHandle();
7805 FATAL(
"Reference for object %s is unallocated", handle.ToCString());
7808const char* Serializer::ReadOnlyObjectType(intptr_t
cid) {
7810 case kPcDescriptorsCid:
7811 return "PcDescriptors";
7812 case kCodeSourceMapCid:
7813 return "CodeSourceMap";
7814 case kCompressedStackMapsCid:
7815 return "CompressedStackMaps";
7820 case kOneByteStringCid:
7822 ?
"OneByteStringCid"
7824 case kTwoByteStringCid:
7826 ?
"TwoByteStringCid"
7834 bool is_canonical) {
7835#if defined(DART_PRECOMPILED_RUNTIME)
7854#if !defined(DART_COMPRESSED_POINTERS)
7864 if (
auto const type = ReadOnlyObjectType(
cid)) {
7870 const bool cluster_represents_canonical_set =
7876 case kTypeParametersCid:
7878 case kTypeArgumentsCid:
7880 is_canonical, cluster_represents_canonical_set);
7881 case kPatchClassCid:
7885 case kClosureDataCid:
7887 case kFfiTrampolineDataCid:
7897 case kKernelProgramInfoCid:
7901 case kObjectPoolCid:
7903 case kPcDescriptorsCid:
7905 case kCodeSourceMapCid:
7907 case kCompressedStackMapsCid:
7909 case kExceptionHandlersCid:
7913 case kContextScopeCid:
7915 case kUnlinkedCallCid:
7919 case kMegamorphicCacheCid:
7921 case kSubtypeTestCacheCid:
7923 case kLoadingUnitCid:
7925 case kLanguageErrorCid:
7927 case kUnhandledExceptionCid:
7929 case kLibraryPrefixCid:
7933 cluster_represents_canonical_set);
7934 case kFunctionTypeCid:
7936 is_canonical, cluster_represents_canonical_set);
7937 case kRecordTypeCid:
7939 is_canonical, cluster_represents_canonical_set);
7940 case kTypeParameterCid:
7942 is_canonical, cluster_represents_canonical_set);
7953 case kGrowableObjectArrayCid:
7957 case kStackTraceCid:
7961 case kWeakPropertyCid:
7975 case kImmutableArrayCid:
7982 is_canonical, cluster_represents_canonical_set && !vm_);
7983#define CASE_FFI_CID(name) case kFfi##name##Cid:
7987 case kDeltaEncodedTypedDataCid:
7989 case kWeakSerializationReferenceCid:
7990#if defined(DART_PRECOMPILER)
7992 return new (
Z) WeakSerializationReferenceSerializationCluster();
8005 if (loading_units_ ==
nullptr)
return true;
8009 FATAL(
"Missing loading unit assignment: %s\n",
8018 (*loading_units_)[unit_id]->AddDeferredObject(
code);
8021#if !defined(DART_PRECOMPILED_RUNTIME)
8022#if defined(DART_PRECOMPILER)
8064 if (
code == StubCode::LazyCompile().ptr() && !vm_) {
8066 }
else if (FLAG_precompiled_mode) {
8070 const intptr_t
base =
8073 : num_base_objects_;
8086 ref <= code_cluster_->last_ref()));
8090 if (ref < code_cluster_->first_deferred_ref()) {
8091 const intptr_t
key =
static_cast<intptr_t
>(
code->untag()->instructions_);
8093 const intptr_t
result = code_index_.Lookup(
key);
8101 return 1 +
base + code_index_.Length() + cluster_index;
8107 ref <= code_cluster_->last_ref());
8108 return 1 + (ref - code_cluster_->
first_ref());
8123 if (code_cluster_ !=
nullptr) {
8126 if ((loading_units_ !=
nullptr) &&
8130 auto unit_objects = loading_units_->At(
i)->deferred_objects();
8132 ASSERT(unit_objects->length() == 0 || code_cluster_ !=
nullptr);
8133 for (intptr_t j = 0; j < unit_objects->length(); j++) {
8139#if defined(DART_PRECOMPILER) && !defined(TARGET_ARCH_IA32)
8148 if (code_cluster_ !=
nullptr) {
8149 auto in = code_cluster_->
objects();
8150 for (intptr_t
i = 0;
i < in->length();
i++) {
8151 code_objects.
Add(in->At(
i));
8154 if (loading_units_ !=
nullptr) {
8156 loading_units_->At(current_loading_unit_id_)->deferred_objects();
8157 for (intptr_t
i = 0;
i < in->length();
i++) {
8158 code_objects.
Add(in->At(
i)->ptr());
8163 RelocateCodeObjects(vm_, &code_objects, &writer_commands);
8166 if (code_objects.
length() == 0) {
8179 CompressedStackMapsPtr
map;
8194 intptr_t not_discarded_count = 0;
8195 uint32_t first_entry_with_code = 0;
8196 for (
auto&
cmd : writer_commands) {
8199 cmd.insert_instruction_of_code.code);
8201 (not_discarded_count == 0));
8203 if (not_discarded_count == 0) {
8204 first_entry_with_code = total;
8206 not_discarded_count++;
8212 const intptr_t instr =
static_cast<intptr_t
>(
8213 cmd.insert_instruction_of_code.code->untag()->instructions_);
8214 ASSERT(!code_index_.HasKey(instr));
8215 code_index_.Insert(instr, total);
8219 CompressedStackMapsPtr stack_map =
8220 cmd.insert_instruction_of_code.code->untag()->compressed_stackmaps_;
8221 const intptr_t
key =
static_cast<intptr_t
>(stack_map);
8224 stack_maps_info.
Lookup(
key)->use_count++;
8226 auto info =
new StackMapInfo();
8227 info->map = stack_map;
8228 info->use_count = 1;
8234 ASSERT(
static_cast<intptr_t
>(total) == code_index_.Length());
8235 instructions_table_len_ = not_discarded_count;
8239 stack_maps.
Sort([](StackMapInfo*
const*
a, StackMapInfo*
const*
b) {
8240 if ((*a)->use_count < (*b)->use_count)
return 1;
8241 if ((*a)->use_count > (*b)->use_count)
return -1;
8253 header.first_entry_with_code = first_entry_with_code;
8258 for (
auto&
cmd : writer_commands) {
8260 pc_mapping.
WriteFixed<UntaggedInstructionsTable::DataEntry>({0, 0});
8265 auto write_stack_map = [&](CompressedStackMapsPtr smap) {
8266 const auto flags_and_size = smap->untag()->payload()->flags_and_size();
8267 const auto payload_size =
8269 pc_mapping.
WriteFixed<uint32_t>(flags_and_size);
8270 pc_mapping.
WriteBytes(smap->untag()->payload()->data(), payload_size);
8273 for (
auto sm : stack_maps) {
8275 write_stack_map(sm->map);
8279 if (!canonical_stack_map_entries.
IsNull()) {
8283 write_stack_map(canonical_stack_map_entries.
ptr());
8291 for (
auto&
cmd : writer_commands) {
8293 CompressedStackMapsPtr smap =
8294 cmd.insert_instruction_of_code.code->untag()->compressed_stackmaps_;
8296 stack_maps_info.
Lookup(
static_cast<intptr_t
>(smap))->offset;
8299 cmd.insert_instruction_of_code.code);
8301 pc_mapping.
WriteFixed<UntaggedInstructionsTable::DataEntry>(
8302 {
static_cast<uint32_t
>(entry),
offset});
8311 instructions_table_rodata_offset_ =
8314 if (profile_writer_ !=
nullptr) {
8319 "<instructions-table-rodata>"),
8320 {offset_space, instructions_table_rodata_offset_});
8327 uint32_t unchecked_offset,
8338#if defined(DART_PRECOMPILER)
8339 if (profile_writer_ !=
nullptr) {
8340 ASSERT(object_currently_writing_.id_ !=
8344 object_currently_writing_.id_,
8346 {offset_space, offset});
8355 if (FLAG_precompiled_mode) {
8356 const uint32_t payload_info =
8367 if (profile_writer_ ==
nullptr)
return;
8369 ASSERT(object_currently_writing_.id_ !=
8375 object_currently_writing_.id_,
8380#if defined(SNAPSHOT_BACKTRACE)
8388 if (image_writer_ ==
nullptr) {
8396 const bool is_code =
object->IsHeapObject() &&
object->IsCode();
8407 if (object->IsHeapObject() && object->IsInstructions()) {
8409 "Instructions should only be reachable from Code");
8414 stack_.
Add({object, cid_override});
8416 num_written_objects_++;
8418#if defined(SNAPSHOT_BACKTRACE)
8432 if (!object->IsHeapObject() || vm_) {
8440 if (!object->IsHeapObject()) {
8444 is_canonical =
true;
8446 cid =
object->GetClassId();
8447 is_canonical =
object->untag()->IsCanonical();
8456 is_canonical ? &canonical_clusters_by_cid_[
cid] : &clusters_by_cid_[
cid];
8457 if (*cluster_ref ==
nullptr) {
8459 if (*cluster_ref ==
nullptr) {
8464 ASSERT(cluster !=
nullptr);
8466 FATAL(
"cluster for %s (cid %" Pd ") %s as canonical, but %s",
8469 is_canonical ?
"should be" :
"should not be");
8472#if defined(SNAPSHOT_BACKTRACE)
8473 current_parent_ = object;
8476 cluster->
Trace(
this,
object);
8478#if defined(SNAPSHOT_BACKTRACE)
8485 while (
thread()->no_safepoint_scope_depth() > 0) {
8491 object.ToCString());
8492#if defined(SNAPSHOT_BACKTRACE)
8493 while (!
object.
IsNull()) {
8494 object = ParentOf(
object);
8496 static_cast<uword>(
object.ptr()),
object.ToCString());
8502#if defined(SNAPSHOT_BACKTRACE)
8504 for (intptr_t
i = 0;
i < parent_pairs_.length();
i += 2) {
8505 if (parent_pairs_[
i]->ptr() ==
object) {
8506 return parent_pairs_[
i + 1]->ptr();
8512ObjectPtr Serializer::ParentOf(
const Object&
object)
const {
8513 for (intptr_t
i = 0;
i < parent_pairs_.length();
i += 2) {
8514 if (parent_pairs_[
i]->ptr() ==
object.ptr()) {
8515 return parent_pairs_[
i + 1]->ptr();
8524 ASSERT(expected_version !=
nullptr);
8525 const intptr_t version_len = strlen(expected_version);
8526 WriteBytes(
reinterpret_cast<const uint8_t*
>(expected_version), version_len);
8528 char* expected_features =
8530 ASSERT(expected_features !=
nullptr);
8531 const intptr_t features_len = strlen(expected_features);
8532 WriteBytes(
reinterpret_cast<const uint8_t*
>(expected_features),
8534 free(expected_features);
8537#if !defined(DART_PRECOMPILED_RUNTIME)
8540 if ((*a)->size() > (*b)->size()) {
8542 }
else if ((*a)->size() < (*b)->size()) {
8549#define CID_CLUSTER(Type) \
8550 reinterpret_cast<Type##SerializationCluster*>(clusters_by_cid_[k##Type##Cid])
8562 roots->AddBaseObjects(
this);
8566 roots->PushRoots(
this);
8585 while (stack_.
length() > 0) {
8587 while (stack_.
length() > 0) {
8589 Trace(entry.obj, entry.cid_override);
8593#if defined(DART_PRECOMPILER)
8595 cluster->RetraceEphemerons(
this);
8599 cluster->RetraceEphemerons(
this);
8603#if defined(DART_PRECOMPILER)
8605 if (wsr_cluster !=
nullptr) {
8608 num_written_objects_ -= wsr_cluster->Count(
this);
8610 clusters_by_cid_[kWeakSerializationReferenceCid] =
nullptr;
8612 ASSERT(clusters_by_cid_[kWeakSerializationReferenceCid] ==
nullptr);
8622#define ADD_CANONICAL_NEXT(cid) \
8623 if (auto const cluster = canonical_clusters_by_cid_[cid]) { \
8624 clusters.Add(cluster); \
8625 canonical_clusters_by_cid_[cid] = nullptr; \
8627#define ADD_NON_CANONICAL_NEXT(cid) \
8628 if (auto const cluster = clusters_by_cid_[cid]) { \
8629 clusters.Add(cluster); \
8630 clusters_by_cid_[cid] = nullptr; \
8650#undef ADD_CANONICAL_NEXT
8651#undef ADD_NON_CANONICAL_NEXT
8652 const intptr_t out_of_order_clusters = clusters.
length();
8653 for (intptr_t
cid = 0;
cid < num_cids_;
cid++) {
8654 if (
auto const cluster = canonical_clusters_by_cid_[
cid]) {
8655 clusters.
Add(cluster);
8658 for (intptr_t
cid = 0;
cid < num_cids_;
cid++) {
8659 if (
auto const cluster = clusters_by_cid_[
cid]) {
8660 clusters.
Add(clusters_by_cid_[
cid]);
8664 for (intptr_t
i = 0;
i < out_of_order_clusters;
i++) {
8665 const auto& cluster = clusters.
At(
i);
8666 const intptr_t
cid = cluster->cid();
8667 auto const cid_clusters =
8668 cluster->is_canonical() ? canonical_clusters_by_cid_ : clusters_by_cid_;
8670 cid_clusters[
cid] = cluster;
8675 intptr_t num_objects = num_base_objects_ + num_written_objects_;
8676#if defined(ARCH_IS_64_BIT)
8678 FATAL(
"Ref overflow");
8685 ASSERT((instructions_table_len_ == 0) || FLAG_precompiled_mode);
8690 cluster->WriteAndMeasureAlloc(
this);
8691 bytes_heap_allocated_ += cluster->target_memory_size();
8693 Write<int32_t>(next_ref_index_);
8698 ASSERT((next_ref_index_ - 1) == num_objects);
8700 ASSERT(objects_->length() == num_objects);
8702#if defined(DART_PRECOMPILER)
8703 if (profile_writer_ !=
nullptr && wsr_cluster !=
nullptr) {
8711 wsr_cluster->CreateArtificialTargetNodesIfNeeded(
this);
8716 cluster->WriteAndMeasureFill(
this);
8718 Write<int32_t>(kSectionMarker);
8722 roots->WriteRoots(
this);
8725 Write<int32_t>(kSectionMarker);
8736#if defined(DART_PRECOMPILER) || defined(DART_PRECOMPILED_RUNTIME)
8749static constexpr intptr_t kDispatchTableSpecialEncodingBits = 6;
8750static constexpr intptr_t kDispatchTableRecentCount =
8751 1 << kDispatchTableSpecialEncodingBits;
8752static constexpr intptr_t kDispatchTableRecentMask =
8753 (1 << kDispatchTableSpecialEncodingBits) - 1;
8754static constexpr intptr_t kDispatchTableMaxRepeat =
8755 (1 << kDispatchTableSpecialEncodingBits) - 1;
8756static constexpr intptr_t kDispatchTableIndexBase = kDispatchTableMaxRepeat + 1;
8760#if defined(DART_PRECOMPILER)
8768 const auto& dispatch_table_profile_id =
GetProfileId(profile_ref);
8769 if (profile_writer_ !=
nullptr) {
8771 "DispatchTable",
"dispatch_table");
8772 profile_writer_->
AddRoot(dispatch_table_profile_id);
8775 if (profile_writer_ !=
nullptr) {
8784 const intptr_t table_length = entries.
IsNull() ? 0 : entries.
Length();
8788 if (table_length == 0) {
8793 ASSERT(code_cluster_ !=
nullptr);
8810 CodePtr previous_code =
nullptr;
8811 CodePtr recent[kDispatchTableRecentCount] = {
nullptr};
8812 intptr_t recent_index = 0;
8813 intptr_t repeat_count = 0;
8814 for (intptr_t
i = 0;
i < table_length;
i++) {
8818 if (
code == previous_code) {
8819 if (++repeat_count == kDispatchTableMaxRepeat) {
8820 Write(kDispatchTableMaxRepeat);
8826 if (repeat_count > 0) {
8827 Write(repeat_count);
8830 previous_code =
code;
8839 intptr_t found_index = 0;
8840 for (; found_index < kDispatchTableRecentCount; found_index++) {
8841 if (recent[found_index] ==
code)
break;
8843 if (found_index < kDispatchTableRecentCount) {
8844 Write(~found_index);
8851 auto const encoded = kDispatchTableIndexBase + code_index;
8854 recent[recent_index] =
code;
8855 recent_index = (recent_index + 1) & kDispatchTableRecentMask;
8857 if (repeat_count > 0) {
8858 Write(repeat_count);
8865#if !defined(DART_PRECOMPILED_RUNTIME)
8866 if (FLAG_print_snapshot_sizes_verbose) {
8869 buffer.Printf(
"%25s",
"Cluster");
8870 buffer.Printf(
" %6s",
"Objs");
8871 buffer.Printf(
" %8s",
"Size");
8872 buffer.Printf(
" %8s",
"Fraction");
8873 buffer.Printf(
" %10s",
"Cumulative");
8874 buffer.Printf(
" %8s",
"HeapSize");
8875 buffer.Printf(
" %5s",
"Cid");
8876 buffer.Printf(
" %9s",
"Canonical");
8879 for (intptr_t
cid = 1;
cid < num_cids_;
cid++) {
8880 if (
auto const cluster = canonical_clusters_by_cid_[
cid]) {
8881 clusters_by_size.
Add(cluster);
8883 if (
auto const cluster = clusters_by_cid_[
cid]) {
8884 clusters_by_size.
Add(cluster);
8887 intptr_t text_size = 0;
8888 if (image_writer_ !=
nullptr) {
8891 intptr_t trampoline_count, trampoline_size;
8893 auto const instructions_count = text_object_count - trampoline_count;
8894 auto const instructions_size = text_size - trampoline_size;
8897 instructions_count, instructions_size));
8898 if (trampoline_size > 0) {
8901 trampoline_count, trampoline_size));
8907 if (dispatch_table_size_ > 0) {
8910 isolate_group()->object_store()->dispatch_table_code_entries());
8911 auto const entry_count =
8912 dispatch_table_entries.IsNull() ? 0 : dispatch_table_entries.Length();
8914 "DispatchTable", entry_count, dispatch_table_size_));
8916 if (instructions_table_len_ > 0) {
8917 const intptr_t memory_size =
8921 "InstructionsTable", instructions_table_len_, 0, memory_size));
8926 double cumulative_fraction = 0.0;
8927 for (intptr_t
i = 0;
i < clusters_by_size.
length();
i++) {
8929 double fraction =
static_cast<double>(cluster->
size()) /
total_size;
8930 cumulative_fraction += fraction;
8934 buffer.Printf(
" %1.6lf", fraction);
8935 buffer.Printf(
" %1.8lf", cumulative_fraction);
8937 if (cluster->
cid() != -1) {
8940 buffer.Printf(
" %5s",
"");
8943 buffer.Printf(
" %9s",
"canonical");
8945 buffer.Printf(
" %9s",
"");
8958 const uint8_t* data_buffer,
8959 const uint8_t* instructions_buffer,
8960 bool is_non_root_unit,
8964 old_space_(heap_->old_space()),
8965 freelist_(old_space_->DataFreeList()),
8969 image_reader_(nullptr),
8973 is_non_root_unit_(is_non_root_unit),
8976 ASSERT(instructions_buffer !=
nullptr);
8977 ASSERT(data_buffer !=
nullptr);
8978 image_reader_ =
new (zone_)
ImageReader(data_buffer, instructions_buffer);
8988 const uint32_t tags = Read<uint32_t>();
8995 cid, is_canonical, is_immutable, !is_non_root_unit_);
9010#if !defined(DART_COMPRESSED_POINTERS)
9013 case kPcDescriptorsCid:
9014 case kCodeSourceMapCid:
9015 case kCompressedStackMapsCid:
9018 case kOneByteStringCid:
9019 case kTwoByteStringCid:
9021 if (!is_non_root_unit_) {
9023 !is_non_root_unit_);
9034 case kTypeParametersCid:
9036 case kTypeArgumentsCid:
9039 case kPatchClassCid:
9045 case kClosureDataCid:
9048 case kFfiTrampolineDataCid:
9063#if !defined(DART_PRECOMPILED_RUNTIME)
9064 case kKernelProgramInfoCid:
9071 case kObjectPoolCid:
9074 case kPcDescriptorsCid:
9077 case kCodeSourceMapCid:
9080 case kCompressedStackMapsCid:
9083 case kExceptionHandlersCid:
9089 case kContextScopeCid:
9092 case kUnlinkedCallCid:
9098 case kMegamorphicCacheCid:
9101 case kSubtypeTestCacheCid:
9104 case kLoadingUnitCid:
9107 case kLanguageErrorCid:
9110 case kUnhandledExceptionCid:
9113 case kLibraryPrefixCid:
9119 case kFunctionTypeCid:
9122 case kRecordTypeCid:
9125 case kTypeParameterCid:
9142 case kGrowableObjectArrayCid:
9148 case kStackTraceCid:
9154 case kWeakPropertyCid:
9162 !is_non_root_unit_);
9168 !is_non_root_unit_);
9171 !is_non_root_unit_);
9172 case kImmutableArrayCid:
9174 kImmutableArrayCid, is_canonical, !is_non_root_unit_);
9181#define CASE_FFI_CID(name) case kFfi##name##Cid:
9185 cid, is_canonical, is_immutable, !is_non_root_unit_);
9186 case kDeltaEncodedTypedDataCid:
9191 FATAL(
"No cluster defined for cid %" Pd,
cid);
9199 intptr_t deferred_code_start_index,
9200 intptr_t deferred_code_end_index) {
9201#if defined(DART_PRECOMPILED_RUNTIME)
9202 const uint8_t* table_snapshot_start =
stream->AddressOfCurrentPosition();
9206 const intptr_t first_code_id =
stream->ReadUnsigned();
9207 deferred_code_start_index -= first_code_id;
9208 deferred_code_end_index -= first_code_id;
9211 auto code =
IG->object_store()->dispatch_table_null_error_stub();
9220 ASSERT(
IG->dispatch_table() ==
nullptr);
9223 auto const array =
table->array();
9225 uword recent[kDispatchTableRecentCount] = {0};
9226 intptr_t recent_index = 0;
9227 intptr_t repeat_count = 0;
9229 if (repeat_count > 0) {
9234 auto const encoded =
stream->Read<intptr_t>();
9237 }
else if (encoded < 0) {
9238 intptr_t r = ~encoded;
9239 ASSERT(r < kDispatchTableRecentCount);
9241 }
else if (encoded <= kDispatchTableMaxRepeat) {
9242 repeat_count = encoded - 1;
9244 const intptr_t code_index = encoded - kDispatchTableIndexBase;
9246 const intptr_t code_id =
9248 if ((deferred_code_start_index <= code_id) &&
9249 (code_id < deferred_code_end_index)) {
9250 auto code =
static_cast<CodePtr
>(
Ref(first_code_id + code_id));
9259 recent[recent_index] =
value;
9260 recent_index = (recent_index + 1) & kDispatchTableRecentMask;
9264 ASSERT(repeat_count == 0);
9267 IG->set_dispatch_table(
table);
9268 intptr_t table_snapshot_size =
9269 stream->AddressOfCurrentPosition() - table_snapshot_start;
9270 IG->set_dispatch_table_snapshot(table_snapshot_start);
9271 IG->set_dispatch_table_snapshot_size(table_snapshot_size);
9277 if (image_reader_ !=
nullptr) {
9285 auto prev_position = stream_.
Position();
9286 char*
error = VerifyVersion();
9287 if (
error ==
nullptr) {
9288 const char* features =
nullptr;
9289 intptr_t features_length = 0;
9290 char*
error = ReadFeatures(&features, &features_length);
9291 if (
error ==
nullptr) {
9292 if (strstr(features,
" no-coverage") !=
nullptr) {
9294 }
else if (strstr(features,
" coverage") !=
nullptr) {
9306 char*
error = VerifyVersion();
9307 if (
error ==
nullptr) {
9308 error = VerifyFeatures(isolate_group);
9310 if (
error ==
nullptr) {
9316char* SnapshotHeaderReader::VerifyVersion() {
9321 ASSERT(expected_version !=
nullptr);
9322 const intptr_t version_len = strlen(expected_version);
9324 const intptr_t kMessageBufferSize = 128;
9325 char message_buffer[kMessageBufferSize];
9327 "No full snapshot version found, expected '%s'",
9329 return BuildError(message_buffer);
9335 if (strncmp(
version, expected_version, version_len) != 0) {
9336 const intptr_t kMessageBufferSize = 256;
9337 char message_buffer[kMessageBufferSize];
9340 "Wrong %s snapshot version, expected '%s' found '%s'",
9342 expected_version, actual_version);
9343 free(actual_version);
9344 return BuildError(message_buffer);
9351char* SnapshotHeaderReader::VerifyFeatures(IsolateGroup* isolate_group) {
9352 const char* expected_features =
9354 ASSERT(expected_features !=
nullptr);
9355 const intptr_t expected_len = strlen(expected_features);
9357 const char* features =
nullptr;
9358 intptr_t features_length = 0;
9360 auto error = ReadFeatures(&features, &features_length);
9361 if (
error !=
nullptr) {
9365 if (features_length != expected_len ||
9366 (strncmp(features, expected_features, expected_len) != 0)) {
9367 const intptr_t kMessageBufferSize = 1024;
9368 char message_buffer[kMessageBufferSize];
9370 features, features_length < 1024 ? features_length : 1024);
9372 "Snapshot not compatible with the current VM configuration: "
9373 "the snapshot requires '%s' but the VM has '%s'",
9374 actual_features, expected_features);
9375 free(
const_cast<char*
>(expected_features));
9376 free(actual_features);
9377 return BuildError(message_buffer);
9379 free(
const_cast<char*
>(expected_features));
9383char* SnapshotHeaderReader::ReadFeatures(
const char** features,
9384 intptr_t* features_length) {
9385 const char* cursor =
9390 "The features string in the snapshot was not '\\0'-terminated.");
9393 *features_length =
length;
9398char* SnapshotHeaderReader::BuildError(
const char*
message) {
9402ApiErrorPtr FullSnapshotReader::ConvertToApiError(
char*
message) {
9414#if defined(DART_PRECOMPILED_RUNTIME)
9416 uword entry_point = StubCode::NotLoaded().EntryPoint();
9417 code->untag()->entry_point_ = entry_point;
9418 code->untag()->unchecked_entry_point_ = entry_point;
9419 code->untag()->monomorphic_entry_point_ = entry_point;
9420 code->untag()->monomorphic_unchecked_entry_point_ = entry_point;
9421 code->untag()->instructions_length_ = 0;
9426 instructions_table_.
rodata()->first_entry_with_code +
9427 instructions_index_);
9429 const uint32_t unchecked_offset = payload_info >> 1;
9430 const bool has_monomorphic_entrypoint = (payload_info & 0x1) == 0x1;
9432 const uword entry_offset =
9433 has_monomorphic_entrypoint ? Instructions::kPolymorphicEntryOffsetAOT : 0;
9434 const uword monomorphic_entry_offset =
9435 has_monomorphic_entrypoint ? Instructions::kMonomorphicEntryOffsetAOT : 0;
9437 const uword entry_point = payload_start + entry_offset;
9438 const uword monomorphic_entry_point =
9439 payload_start + monomorphic_entry_offset;
9445 code->untag()->entry_point_ = entry_point;
9446 code->untag()->unchecked_entry_point_ = entry_point + unchecked_offset;
9447 code->untag()->monomorphic_entry_point_ = monomorphic_entry_point;
9448 code->untag()->monomorphic_unchecked_entry_point_ =
9449 monomorphic_entry_point + unchecked_offset;
9454 code->untag()->instructions_ = instr;
9455 code->untag()->unchecked_offset_ = unchecked_offset;
9457 const uint32_t active_offset = Read<uint32_t>();
9460 code->untag()->active_instructions_ = instr;
9461 Code::InitializeCachedEntryPointsFrom(
code, instr, unchecked_offset);
9466#if defined(DART_PRECOMPILED_RUNTIME)
9467 if (instructions_table_.
IsNull()) {
9468 ASSERT(instructions_index_ == 0);
9472 const auto& code_objects =
9474 ASSERT(code_objects.Length() == instructions_index_);
9476 uword previous_end = image_reader_->GetBareInstructionsEnd();
9477 for (intptr_t
i = instructions_index_ - 1;
i >= 0; --
i) {
9481 code->untag()->instructions_length_ = previous_end -
start;
9482 previous_end =
start;
9490 object_store->set_instructions_tables(tables);
9492 if ((tables.
Length() == 0) ||
9493 (tables.
At(tables.
Length() - 1) != instructions_table_.
ptr())) {
9495 (is_non_root_unit_ && tables.
Length() > 0));
9509 page_space_(page_space),
9510 freelist_(page_space->DataFreeList()) {
9527 const intptr_t instructions_table_len =
ReadUnsigned();
9528 const uint32_t instruction_table_data_offset =
ReadUnsigned();
9529 USE(instruction_table_data_offset);
9534#if defined(DART_PRECOMPILED_RUNTIME)
9535 if (instructions_table_len > 0) {
9536 ASSERT(FLAG_precompiled_mode);
9537 const uword start_pc = image_reader_->GetBareInstructionsAt(0);
9538 const uword end_pc = image_reader_->GetBareInstructionsEnd();
9539 uword instruction_table_data = 0;
9540 if (instruction_table_data_offset != 0) {
9545 instruction_table_data =
reinterpret_cast<uword>(
9547 image_reader_->
GetObjectAt(instruction_table_data_offset)))));
9550 instructions_table_len, start_pc, end_pc, instruction_table_data);
9553 ASSERT(instructions_table_len == 0);
9573 roots->AddBaseObjects(
this);
9577 " base objects, but deserializer provided %" Pd,
9583 for (intptr_t
i = 0;
i < num_clusters_;
i++) {
9587 intptr_t serializers_next_ref_index_ = Read<int32_t>();
9588 ASSERT_EQUAL(serializers_next_ref_index_, next_ref_index_);
9598 for (intptr_t
i = 0;
i < num_clusters_;
i++) {
9601 int32_t section_marker = Read<int32_t>();
9602 ASSERT(section_marker == kSectionMarker);
9607 roots->ReadRoots(
this);
9610 int32_t section_marker = Read<int32_t>();
9611 ASSERT(section_marker == kSectionMarker);
9617 roots->PostLoad(
this, refs);
9629 for (intptr_t
i = 0;
i < num_clusters_;
i++) {
9635 size_t clustered_length =
9637 reinterpret_cast<uword>(clustered_start);
9643#if !defined(DART_PRECOMPILED_RUNTIME)
9650 : thread_(
Thread::Current()),
9654 vm_isolate_snapshot_size_(0),
9655 isolate_snapshot_size_(0),
9656 vm_image_writer_(vm_image_writer),
9657 isolate_image_writer_(isolate_image_writer) {
9658 ASSERT(isolate_group() !=
nullptr);
9659 ASSERT(heap() !=
nullptr);
9660 ObjectStore* object_store = isolate_group()->object_store();
9661 ASSERT(object_store !=
nullptr);
9664 isolate_group()->ValidateClassTable();
9667#if defined(DART_PRECOMPILER)
9668 if (FLAG_write_v8_snapshot_profile_to !=
nullptr) {
9679 ASSERT(vm_snapshot_data_ !=
nullptr);
9680 Serializer serializer(thread(), kind_, vm_snapshot_data_, vm_image_writer_,
9681 true, profile_writer_);
9683 serializer.ReserveHeader();
9684 serializer.WriteVersionAndFeatures(
true);
9690 serializer.FillHeader(serializer.kind());
9691 clustered_vm_size_ = serializer.bytes_written();
9692 heap_vm_size_ = serializer.bytes_heap_allocated();
9696 vm_image_writer_->
Write(serializer.stream(),
true);
9697 mapped_data_size_ += vm_image_writer_->
data_size();
9698 mapped_text_size_ += vm_image_writer_->
text_size();
9704 vm_isolate_snapshot_size_ = serializer.bytes_written();
9708void FullSnapshotWriter::WriteProgramSnapshot(
9709 ZoneGrowableArray<Object*>* objects,
9710 GrowableArray<LoadingUnitSerializationData*>* units) {
9713 ASSERT(isolate_snapshot_data_ !=
nullptr);
9714 Serializer serializer(thread(), kind_, isolate_snapshot_data_,
9715 isolate_image_writer_,
false, profile_writer_);
9716 serializer.set_loading_units(units);
9718 ObjectStore* object_store = isolate_group()->object_store();
9719 ASSERT(object_store !=
nullptr);
9722 ASSERT(object_store->type_argument_int()->untag()->IsCanonical());
9723 ASSERT(object_store->type_argument_double()->untag()->IsCanonical());
9724 ASSERT(object_store->type_argument_string()->untag()->IsCanonical());
9725 ASSERT(object_store->type_argument_string_dynamic()->untag()->IsCanonical());
9726 ASSERT(object_store->type_argument_string_string()->untag()->IsCanonical());
9728 serializer.ReserveHeader();
9729 serializer.WriteVersionAndFeatures(
false);
9730 ProgramSerializationRoots
roots(objects, object_store, kind_);
9731 objects = serializer.Serialize(&
roots);
9732 if (units !=
nullptr) {
9735 serializer.FillHeader(serializer.kind());
9736 clustered_isolate_size_ = serializer.bytes_written();
9737 heap_isolate_size_ = serializer.bytes_heap_allocated();
9741 isolate_image_writer_->
Write(serializer.stream(),
false);
9742#if defined(DART_PRECOMPILER)
9746 mapped_data_size_ += isolate_image_writer_->
data_size();
9747 mapped_text_size_ += isolate_image_writer_->
text_size();
9753 isolate_snapshot_size_ = serializer.bytes_written();
9759 uint32_t program_hash) {
9762 Serializer serializer(thread(), kind_, isolate_snapshot_data_,
9763 isolate_image_writer_,
false, profile_writer_);
9769 serializer.
Write(program_hash);
9779 isolate_image_writer_->
Write(serializer.
stream(),
false);
9780#if defined(DART_PRECOMPILER)
9784 mapped_data_size_ += isolate_image_writer_->
data_size();
9785 mapped_text_size_ += isolate_image_writer_->
text_size();
9797 if (vm_snapshot_data_ !=
nullptr) {
9798 objects = WriteVMSnapshot();
9803 if (isolate_snapshot_data_ !=
nullptr) {
9804 WriteProgramSnapshot(objects,
data);
9807 if (FLAG_print_snapshot_sizes) {
9808 OS::Print(
"VMIsolate(CodeSize): %" Pd "\n", clustered_vm_size_);
9809 OS::Print(
"Isolate(CodeSize): %" Pd "\n", clustered_isolate_size_);
9810 OS::Print(
"ReadOnlyData(CodeSize): %" Pd "\n", mapped_data_size_);
9811 OS::Print(
"Instructions(CodeSize): %" Pd "\n", mapped_text_size_);
9813 clustered_vm_size_ + clustered_isolate_size_ + mapped_data_size_ +
9815 OS::Print(
"VMIsolate(HeapSize): %" Pd "\n", heap_vm_size_);
9816 OS::Print(
"Isolate(HeapSize): %" Pd "\n", heap_isolate_size_);
9817 OS::Print(
"Total(HeapSize): %" Pd "\n", heap_vm_size_ + heap_isolate_size_);
9820#if defined(DART_PRECOMPILER)
9821 if (FLAG_write_v8_snapshot_profile_to !=
nullptr) {
9822 profile_writer_->Write(FLAG_write_v8_snapshot_profile_to);
9829 const uint8_t* instructions_buffer,
9831 : kind_(snapshot->kind()),
9833 buffer_(snapshot->Addr()),
9834 size_(snapshot->
length()),
9835 data_image_(snapshot->DataImage()),
9836 instructions_image_(instructions_buffer) {}
9842 char*
error = header_reader.VerifyVersion();
9843 if (
error !=
nullptr) {
9847 const char* features =
nullptr;
9848 intptr_t features_length = 0;
9849 error = header_reader.ReadFeatures(&features, &features_length);
9850 if (
error !=
nullptr) {
9854 ASSERT(features[features_length] ==
'\0');
9855 const char* cursor = features;
9856 while (*cursor !=
'\0') {
9857 while (*cursor ==
' ') {
9861 const char*
end = strstr(cursor,
" ");
9862 if (
end ==
nullptr) {
9863 end = features + features_length;
9866#define SET_FLAG(name) \
9867 if (strncmp(cursor, #name, end - cursor) == 0) { \
9868 FLAG_##name = true; \
9872 if (strncmp(cursor, "no-" #name, end - cursor) == 0) { \
9873 FLAG_##name = false; \
9878#define CHECK_FLAG(name, mode) \
9879 if (strncmp(cursor, #name, end - cursor) == 0) { \
9880 if (!FLAG_##name) { \
9881 return header_reader.BuildError("Flag " #name \
9882 " is true in snapshot, " \
9884 " is always false in " mode); \
9889 if (strncmp(cursor, "no-" #name, end - cursor) == 0) { \
9890 if (FLAG_##name) { \
9891 return header_reader.BuildError("Flag " #name \
9892 " is false in snapshot, " \
9894 " is always true in " mode); \
9900#define SET_P(name, T, DV, C) SET_FLAG(name)
9903#define SET_OR_CHECK_R(name, PV, T, DV, C) CHECK_FLAG(name, "product mode")
9905#define SET_OR_CHECK_R(name, PV, T, DV, C) SET_FLAG(name)
9909#define SET_OR_CHECK_C(name, PCV, PV, T, DV, C) CHECK_FLAG(name, "product mode")
9910#elif defined(DART_PRECOMPILED_RUNTIME)
9911#define SET_OR_CHECK_C(name, PCV, PV, T, DV, C) \
9912 CHECK_FLAG(name, "the precompiled runtime")
9914#define SET_OR_CHECK_C(name, PV, T, DV, C) SET_FLAG(name)
9918#define SET_OR_CHECK_D(name, T, DV, C) CHECK_FLAG(name, "non-debug mode")
9920#define SET_OR_CHECK_D(name, T, DV, C) SET_FLAG(name)
9925#undef SET_OR_CHECK_D
9926#undef SET_OR_CHECK_C
9927#undef SET_OR_CHECK_R
9944 if (
error !=
nullptr) {
9945 return ConvertToApiError(
error);
9953 Deserializer deserializer(thread_, kind_, buffer_, size_, data_image_,
9954 instructions_image_,
false,
9962 ASSERT(data_image_ !=
nullptr);
9965 ASSERT(instructions_image_ !=
nullptr);
9973#if defined(DART_PRECOMPILED_RUNTIME)
9977 if (
auto const bss =
image.bss()) {
9991 if (
error !=
nullptr) {
9992 return ConvertToApiError(
error);
10000 Deserializer deserializer(thread_, kind_, buffer_, size_, data_image_,
10001 instructions_image_,
false,
10009 ASSERT(data_image_ !=
nullptr);
10012 ASSERT(instructions_image_ !=
nullptr);
10023 if (!units.IsNull()) {
10028 unit.set_load_outstanding();
10029 unit.set_instructions_image(instructions_image_);
10030 unit.set_loaded(
true);
10044 if (
error !=
nullptr) {
10045 return ConvertToApiError(
error);
10049 thread_, kind_, buffer_, size_, data_image_, instructions_image_,
10057 Array::Handle(isolate_group()->object_store()->loading_units());
10059 uint32_t unit_program_hash = deserializer.
Read<uint32_t>();
10060 if (main_program_hash != unit_program_hash) {
10062 String::New(
"Deferred loading unit is from a different "
10063 "program than the main loading unit")));
10068 ASSERT(data_image_ !=
nullptr);
10071 ASSERT(instructions_image_ !=
nullptr);
10085void FullSnapshotReader::InitializeBSS() {
10086#if defined(DART_PRECOMPILED_RUNTIME)
10090 if (
auto const bss =
image.bss()) {
static void info(const char *fmt,...) SK_PRINTF_LIKE(1
static float prev(float f)
static size_t total_size(SkSBlockAllocator< N > &pool)
static bool skip(SkStream *stream, size_t amount)
static uint32_t hash(const SkShaderBase::GradientInfo &v)
#define SET_OR_CHECK_R(name, PV, T, DV, C)
#define SAVE_AND_RESET_ROOT(name, Type, init)
#define AutoTraceObject(obj)
#define PushFromTo(obj,...)
#define RESET_ROOT_LIST(V)
#define DECLARE_OBJECT_STORE_FIELD(Type, Name)
#define CID_CLUSTER(Type)
#define SET_P(name, T, DV, C)
#define ADD_CANONICAL_NEXT(cid)
#define CASE_FFI_CID(name)
#define WriteFromTo(obj,...)
#define SET_OR_CHECK_C(name, PV, T, DV, C)
#define AutoTraceObjectName(obj, str)
#define RESTORE_ROOT(name, Type, init)
#define SET_OR_CHECK_D(name, T, DV, C)
#define WriteCompressedField(obj, name)
#define ADD_NON_CANONICAL_NEXT(cid)
#define DECLARE_FIELD(name, Type, init)
#define WriteFieldValue(field, value)
#define WriteField(obj, field)
#define ASSERT_EQUAL(expected, actual)
#define RELEASE_ASSERT(cond)
#define CLASS_LIST_FFI_TYPE_MARKER(V)
AbstractInstanceDeserializationCluster(const char *name, bool is_canonical, bool is_root_unit)
void UpdateTypeTestingStubEntryPoint() const
void InitializeTypeTestingStubNonAtomic(const Code &stub) const
void ReadFill(Deserializer *d_) override
ArrayDeserializationCluster(intptr_t cid, bool is_canonical, bool is_root_unit)
~ArrayDeserializationCluster()
void ReadAlloc(Deserializer *d) override
~ArraySerializationCluster()
void WriteAlloc(Serializer *s)
void Trace(Serializer *s, ObjectPtr object)
void WriteFill(Serializer *s)
ArraySerializationCluster(bool is_canonical, intptr_t cid)
static intptr_t InstanceSize()
static ArrayPtr New(intptr_t len, Heap::Space space=Heap::kNew)
static constexpr bool UseCardMarkingForAllocation(const intptr_t array_length)
ObjectPtr At(intptr_t index) const
void SetAt(intptr_t index, const Object &value) const
static void Initialize(Thread *current, uword *bss, bool vm)
bool HasKey(typename KeyValueTrait::Key key) const
const T & At(intptr_t index) const
void Sort(int compare(const T *, const T *))
void WriteBytes(const void *addr, intptr_t len)
void WriteWordWith32BitWrites(uword value)
intptr_t Align(intptr_t alignment, intptr_t offset=0)
void WriteUnsigned(T value)
DART_FORCE_INLINE intptr_t bytes_written() const
virtual intptr_t Position() const
void WriteRefId(intptr_t value)
static constexpr CallKind decode(intptr_t value)
static constexpr uword update(ClassIdTagType value, uword original)
static constexpr uword encode(ClassIdTagType value)
static const Bool & False()
static const Bool & True()
static void SetupNativeResolver()
void BuildCanonicalSetFromLayout(Deserializer *d)
CanonicalSetDeserializationCluster(bool is_canonical, bool is_root_unit, const char *name)
SetType::ArrayHandle & table_
void VerifyCanonicalSet(Deserializer *d, const Array &refs, const typename SetType::ArrayHandle ¤t_table)
CanonicalSetSerializationCluster(intptr_t cid, bool is_canonical, bool represents_canonical_set, const char *name, intptr_t target_instance_size=0)
void ReorderObjects(Serializer *s)
void WriteCanonicalSetLayout(Serializer *s)
GrowableArray< PointerType > objects_
virtual bool IsInCanonicalSet(Serializer *s, PointerType ptr)
~ClassDeserializationCluster()
void ReadAlloc(Deserializer *d) override
ClassDeserializationCluster()
void ReadFill(Deserializer *d_) override
void Trace(Serializer *s, ObjectPtr object)
void WriteAlloc(Serializer *s)
void WriteFill(Serializer *s)
~ClassSerializationCluster()
ClassSerializationCluster(intptr_t num_cids)
ClassPtr At(intptr_t cid) const
intptr_t NumTopLevelCids() const
static bool IsTopLevelCid(intptr_t cid)
static int32_t target_next_field_offset_in_words(const ClassPtr cls)
static intptr_t InstanceSize()
static int32_t target_type_arguments_field_offset_in_words(const ClassPtr cls)
static int32_t target_instance_size_in_words(const ClassPtr cls)
ClosureDataDeserializationCluster()
void ReadAlloc(Deserializer *d) override
void ReadFill(Deserializer *d_) override
~ClosureDataDeserializationCluster()
void Trace(Serializer *s, ObjectPtr object)
void WriteAlloc(Serializer *s)
~ClosureDataSerializationCluster()
void WriteFill(Serializer *s)
ClosureDataSerializationCluster()
static intptr_t InstanceSize()
void ReadFill(Deserializer *d_) override
ClosureDeserializationCluster(bool is_canonical, bool is_root_unit)
~ClosureDeserializationCluster()
void ReadAlloc(Deserializer *d) override
void WriteAlloc(Serializer *s)
ClosureSerializationCluster(bool is_canonical)
~ClosureSerializationCluster()
void Trace(Serializer *s, ObjectPtr object)
void WriteFill(Serializer *s)
static intptr_t InstanceSize()
void ReadFill(Deserializer *d, intptr_t start_index, intptr_t stop_index, bool deferred)
void PostLoad(Deserializer *d, const Array &refs) override
void ReadAllocOneCode(Deserializer *d)
~CodeDeserializationCluster()
void ReadAlloc(Deserializer *d) override
void ReadFill(Deserializer *d) override
CodeDeserializationCluster()
intptr_t first_ref() const
~CodeSerializationCluster()
static void Sort(Serializer *s, GrowableArray< CodePtr > *codes)
static void Sort(Serializer *s, GrowableArray< Code * > *codes)
void WriteFill(Serializer *s, Snapshot::Kind kind, CodePtr code, bool deferred)
void WriteAlloc(Serializer *s, CodePtr code)
static const char * MakeDisambiguatedCodeName(Serializer *s, CodePtr c)
static void Insert(Serializer *s, GrowableArray< CodeOrderInfo > *order_list, IntMap< intptr_t > *order_map, CodePtr code)
void Trace(Serializer *s, ObjectPtr object)
GrowableArray< CodePtr > * objects()
intptr_t last_ref() const
void TracePool(Serializer *s, ObjectPoolPtr pool, bool only_call_targets)
void WriteAlloc(Serializer *s)
static int CompareCodeOrderInfo(CodeOrderInfo const *a, CodeOrderInfo const *b)
intptr_t NonDiscardedCodeCount()
void WriteFill(Serializer *s)
CodeSerializationCluster(Heap *heap)
GrowableArray< CodePtr > * deferred_objects()
intptr_t first_deferred_ref() const
~CodeSourceMapDeserializationCluster()
CodeSourceMapDeserializationCluster()
void ReadFill(Deserializer *d_) override
void ReadAlloc(Deserializer *d) override
~CodeSourceMapSerializationCluster()
void WriteFill(Serializer *s)
void WriteAlloc(Serializer *s)
CodeSourceMapSerializationCluster()
void Trace(Serializer *s, ObjectPtr object)
static intptr_t InstanceSize()
static uword EntryPointOf(const CodePtr code)
static intptr_t InstanceSize()
static InstructionsPtr InstructionsOf(const CodePtr code)
static uword PayloadStartOf(const CodePtr code)
bool HasMonomorphicEntry() const
static bool IsDiscarded(const CodePtr code)
static void NotifyCodeObservers(const Code &code, bool optimized)
@ kSCallTableCodeOrTypeTarget
@ kSCallTableKindAndOffset
bool IsUnknownDartCode() const
void ReadAlloc(Deserializer *d) override
CompressedStackMapsDeserializationCluster()
~CompressedStackMapsDeserializationCluster()
void ReadFill(Deserializer *d_) override
void WriteFill(Serializer *s)
~CompressedStackMapsSerializationCluster()
void WriteAlloc(Serializer *s)
CompressedStackMapsSerializationCluster()
void Trace(Serializer *s, ObjectPtr object)
static intptr_t InstanceSize()
void ReadAlloc(Deserializer *d) override
ContextDeserializationCluster()
~ContextDeserializationCluster()
void ReadFill(Deserializer *d_) override
void ReadFill(Deserializer *d_) override
void ReadAlloc(Deserializer *d) override
ContextScopeDeserializationCluster()
~ContextScopeDeserializationCluster()
void Trace(Serializer *s, ObjectPtr object)
void WriteAlloc(Serializer *s)
ContextScopeSerializationCluster()
void WriteFill(Serializer *s)
~ContextScopeSerializationCluster()
static intptr_t InstanceSize()
void Trace(Serializer *s, ObjectPtr object)
void WriteAlloc(Serializer *s)
void WriteFill(Serializer *s)
ContextSerializationCluster()
~ContextSerializationCluster()
static intptr_t InstanceSize()
static IsolateGroup * vm_isolate_group()
static Isolate * vm_isolate()
static char * FeaturesString(IsolateGroup *isolate_group, bool is_vm_snapshot, Snapshot::Kind kind)
~DeltaEncodedTypedDataDeserializationCluster()
void ReadAlloc(Deserializer *d) override
DeltaEncodedTypedDataDeserializationCluster()
void ReadFill(Deserializer *d_) override
~DeltaEncodedTypedDataSerializationCluster()
void WriteFill(Serializer *s)
void WriteAlloc(Serializer *s)
DeltaEncodedTypedDataSerializationCluster()
void Trace(Serializer *s, ObjectPtr object)
virtual void ReadFill(Deserializer *deserializer)=0
virtual ~DeserializationCluster()
DeserializationCluster(const char *name, bool is_canonical=false, bool is_immutable=false)
bool is_canonical() const
virtual void PostLoad(Deserializer *deserializer, const Array &refs)
void ReadAllocFixedSize(Deserializer *deserializer, intptr_t instance_size)
virtual void ReadAlloc(Deserializer *deserializer)=0
const char * name() const
virtual void ReadRoots(Deserializer *deserializer)=0
virtual void AddBaseObjects(Deserializer *deserializer)=0
virtual void PostLoad(Deserializer *deserializer, const Array &refs)=0
virtual ~DeserializationRoots()
void ReadFromTo(T obj, P &&... params)
ObjectPtr Ref(intptr_t index) const
uint64_t ReadUnsigned64()
TokenPosition ReadTokenPosition()
uint64_t ReadUnsigned64()
ObjectPtr Allocate(intptr_t size)
void ReadInstructions(CodePtr code, bool deferred)
void set_code_start_index(intptr_t value)
void AssignRef(ObjectPtr object)
ApiErrorPtr VerifyImageAlignment()
bool is_non_root_unit() const
const InstructionsTable & instructions_table() const
intptr_t next_index() const
const uint8_t * AddressOfCurrentPosition() const
static void InitializeHeader(ObjectPtr raw, intptr_t cid, intptr_t size, bool is_canonical=false)
void AddBaseObject(ObjectPtr base_object)
intptr_t num_base_objects() const
ObjectPtr Ref(intptr_t index) const
void Advance(intptr_t value)
CodePtr GetCodeByIndex(intptr_t code_index, uword *entry_point) const
uword GetEntryPointByCodeIndex(intptr_t code_index) const
intptr_t position() const
TokenPosition ReadTokenPosition()
ObjectPtr GetObjectAt(uint32_t offset) const
intptr_t code_start_index() const
static intptr_t CodeIndexToClusterIndex(const InstructionsTable &table, intptr_t code_index)
Snapshot::Kind kind() const
DeserializationCluster * ReadCluster()
uword ReadWordWith32BitReads()
void Align(intptr_t alignment, intptr_t offset=0)
void ReadBytes(uint8_t *addr, intptr_t len)
Deserializer(Thread *thread, Snapshot::Kind kind, const uint8_t *buffer, intptr_t size, const uint8_t *data_buffer, const uint8_t *instructions_buffer, bool is_non_root_unit, intptr_t offset=0)
intptr_t code_stop_index() const
void set_position(intptr_t p)
void Deserialize(DeserializationRoots *roots)
void set_code_stop_index(intptr_t value)
static void DisassembleStub(const char *name, const Code &code)
static void DisassembleCode(const Function &function, const Code &code, bool optimized)
void ReadFill(Deserializer *d_) override
~DoubleDeserializationCluster()
DoubleDeserializationCluster(bool is_canonical, bool is_root_unit)
void ReadAlloc(Deserializer *d) override
~DoubleSerializationCluster()
void WriteFill(Serializer *s)
void WriteAlloc(Serializer *s)
DoubleSerializationCluster(bool is_canonical)
void Trace(Serializer *s, ObjectPtr object)
static intptr_t InstanceSize()
void ReadFill(Deserializer *d_) override
~ExceptionHandlersDeserializationCluster()
ExceptionHandlersDeserializationCluster()
void ReadAlloc(Deserializer *d) override
void Trace(Serializer *s, ObjectPtr object)
void WriteAlloc(Serializer *s)
void WriteFill(Serializer *s)
~ExceptionHandlersSerializationCluster()
ExceptionHandlersSerializationCluster()
static intptr_t InstanceSize()
~ExternalTypedDataDeserializationCluster()
void ReadFill(Deserializer *d_) override
void ReadAlloc(Deserializer *d) override
ExternalTypedDataDeserializationCluster(intptr_t cid)
~ExternalTypedDataSerializationCluster()
void WriteAlloc(Serializer *s)
ExternalTypedDataSerializationCluster(intptr_t cid)
void Trace(Serializer *s, ObjectPtr object)
void WriteFill(Serializer *s)
static intptr_t InstanceSize()
static constexpr int kDataSerializationAlignment
void WriteFill(Serializer *s)
void WriteAlloc(Serializer *s)
~FakeSerializationCluster()
FakeSerializationCluster(const char *name, intptr_t num_objects, intptr_t size, intptr_t target_memory_size=0)
void Trace(Serializer *s, ObjectPtr object)
void ReadFill(Deserializer *d_) override
void ReadAlloc(Deserializer *d) override
FfiTrampolineDataDeserializationCluster()
~FfiTrampolineDataDeserializationCluster()
void Trace(Serializer *s, ObjectPtr object)
void WriteAlloc(Serializer *s)
~FfiTrampolineDataSerializationCluster()
void WriteFill(Serializer *s)
FfiTrampolineDataSerializationCluster()
static intptr_t InstanceSize()
FieldDeserializationCluster()
~FieldDeserializationCluster()
void ReadFill(Deserializer *d_) override
void PostLoad(Deserializer *d, const Array &refs) override
void ReadAlloc(Deserializer *d) override
void WriteFill(Serializer *s)
FieldSerializationCluster()
void WriteAlloc(Serializer *s)
~FieldSerializationCluster()
void Trace(Serializer *s, ObjectPtr object)
void SetAt(intptr_t index, ObjectPtr raw_instance, bool concurrent_use=false)
ObjectPtr At(intptr_t index, bool concurrent_use=false) const
void AllocateIndex(intptr_t index)
intptr_t NumFieldIds() const
void set_is_nullable_unsafe(bool val) const
void InitializeGuardedListLengthInObjectOffset(bool unsafe=false) const
static intptr_t InstanceSize()
void set_guarded_list_length_in_object_offset_unsafe(intptr_t offset) const
void set_guarded_cid_unsafe(intptr_t cid) const
static intptr_t TargetOffsetOf(FieldPtr field)
void set_guarded_list_length_unsafe(intptr_t list_length) const
void set_static_type_exactness_state_unsafe(StaticTypeExactnessState state) const
static intptr_t value_offset()
static intptr_t InstanceSize()
static intptr_t InstanceSize()
static intptr_t value_offset()
ApiErrorPtr ReadUnitSnapshot(const LoadingUnit &unit)
ApiErrorPtr ReadProgramSnapshot()
FullSnapshotReader(const Snapshot *snapshot, const uint8_t *instructions_buffer, Thread *thread)
ApiErrorPtr ReadVMSnapshot()
FullSnapshotWriter(Snapshot::Kind kind, NonStreamingWriteStream *vm_snapshot_data, NonStreamingWriteStream *isolate_snapshot_data, ImageWriter *vm_image_writer, ImageWriter *iso_image_writer)
void WriteFullSnapshot(GrowableArray< LoadingUnitSerializationData * > *data=nullptr)
void WriteUnitSnapshot(GrowableArray< LoadingUnitSerializationData * > *units, LoadingUnitSerializationData *unit, uint32_t program_hash)
void ReadFill(Deserializer *d_) override
FunctionDeserializationCluster()
~FunctionDeserializationCluster()
void PostLoad(Deserializer *d, const Array &refs) override
void ReadAlloc(Deserializer *d) override
void WriteAlloc(Serializer *s)
~FunctionSerializationCluster()
FunctionSerializationCluster()
void Trace(Serializer *s, ObjectPtr object)
void WriteFill(Serializer *s)
static const char * MakeDisambiguatedFunctionName(Serializer *s, FunctionPtr f)
void ReadFill(Deserializer *d_) override
void ReadAlloc(Deserializer *d) override
FunctionTypeDeserializationCluster(bool is_canonical, bool is_root_unit)
~FunctionTypeDeserializationCluster()
void PostLoad(Deserializer *d, const Array &refs) override
void WriteFill(Serializer *s)
void WriteAlloc(Serializer *s)
~FunctionTypeSerializationCluster()
void Trace(Serializer *s, ObjectPtr object)
FunctionTypeSerializationCluster(bool is_canonical, bool represents_canonical_set)
static intptr_t InstanceSize()
CodePtr CurrentCode() const
static intptr_t InstanceSize()
void ClearCodeSafe() const
void PrintName(const NameFormattingParams ¶ms, BaseTextBuffer *printer) const
void SetInstructionsSafe(const Code &value) const
void ReadFill(Deserializer *d_) override
GrowableObjectArrayDeserializationCluster()
~GrowableObjectArrayDeserializationCluster()
void ReadAlloc(Deserializer *d) override
void WriteFill(Serializer *s)
~GrowableObjectArraySerializationCluster()
GrowableObjectArraySerializationCluster()
void Trace(Serializer *s, ObjectPtr object)
void WriteAlloc(Serializer *s)
void Add(const Object &value, Heap::Space space=Heap::kNew) const
static GrowableObjectArrayPtr New(Heap::Space space=Heap::kNew)
static intptr_t InstanceSize()
ObjectPtr At(intptr_t index) const
static constexpr double kMaxLoadFactor
HeapLocker(Thread *thread, PageSpace *page_space)
intptr_t GetLoadingUnit(ObjectPtr raw_obj) const
void ResetObjectIdTable()
bool Verify(const char *msg, MarkExpectation mark_expectation=kForbidMarked)
intptr_t GetObjectId(ObjectPtr raw_obj) const
void SetObjectId(ObjectPtr raw_obj, intptr_t object_id)
void ReadFill(Deserializer *d_) override
void ReadAlloc(Deserializer *d) override
~ICDataDeserializationCluster()
ICDataDeserializationCluster()
ICDataSerializationCluster()
void Trace(Serializer *s, ObjectPtr object)
void WriteFill(Serializer *s)
~ICDataSerializationCluster()
void WriteAlloc(Serializer *s)
@ kCachedICDataArrayCount
static intptr_t InstanceSize()
ObjectPtr GetObjectAt(uint32_t offset) const
InstructionsPtr GetInstructionsAt(uint32_t offset) const
ApiErrorPtr VerifyAlignment() const
intptr_t GetTextObjectCount() const
void Write(NonStreamingWriteStream *clustered_stream, bool vm)
void SetProfileWriter(V8SnapshotProfileWriter *profile_writer)
static const char * TagObjectTypeAsReadOnly(Zone *zone, const char *type)
void ClearProfileWriter()
intptr_t text_size() const
void GetTrampolineInfo(intptr_t *count, intptr_t *size) const
int32_t GetTextOffsetFor(InstructionsPtr instructions, CodePtr code)
uint32_t GetDataOffsetFor(ObjectPtr raw_object)
intptr_t data_size() const
void PrepareForSerialization(GrowableArray< ImageWriterCommand > *commands)
uint32_t AddBytesToData(uint8_t *bytes, intptr_t length)
~InstanceDeserializationCluster()
void ReadAlloc(Deserializer *d) override
InstanceDeserializationCluster(intptr_t cid, bool is_canonical, bool is_immutable, bool is_root_unit)
void ReadFill(Deserializer *d_) override
void Trace(Serializer *s, ObjectPtr object)
void WriteAlloc(Serializer *s)
void WriteFill(Serializer *s)
~InstanceSerializationCluster()
InstanceSerializationCluster(bool is_canonical, intptr_t cid)
static intptr_t NextFieldOffset()
static InstructionsTablePtr New(intptr_t length, uword start_pc, uword end_pc, uword rodata)
void SetCodeAt(intptr_t index, CodePtr code) const
const UntaggedInstructionsTable::Data * rodata() const
uword EntryPointAt(intptr_t index) const
static intptr_t InstanceSize()
static intptr_t value_offset()
V Lookup(const Key &key) const
void Insert(const Key &key, const Value &value)
ObjectStore * object_store() const
static IsolateGroup * Current()
ClassTable * class_table() const
void SetupImagePage(const uint8_t *snapshot_buffer, bool is_executable)
void set_coverage(bool value)
IsolateGroup * group() const
void ReadAlloc(Deserializer *d) override
void PostLoad(Deserializer *d, const Array &refs) override
KernelProgramInfoDeserializationCluster()
~KernelProgramInfoDeserializationCluster()
void ReadFill(Deserializer *d_) override
void Trace(Serializer *s, ObjectPtr object)
~KernelProgramInfoSerializationCluster()
void WriteFill(Serializer *s)
KernelProgramInfoSerializationCluster()
void WriteAlloc(Serializer *s)
static intptr_t InstanceSize()
LanguageErrorDeserializationCluster()
~LanguageErrorDeserializationCluster()
void ReadFill(Deserializer *d_) override
void ReadAlloc(Deserializer *d) override
LanguageErrorSerializationCluster()
void WriteFill(Serializer *s)
void WriteAlloc(Serializer *s)
void Trace(Serializer *s, ObjectPtr object)
~LanguageErrorSerializationCluster()
static intptr_t InstanceSize()
void ReadAlloc(Deserializer *d) override
~LibraryDeserializationCluster()
LibraryDeserializationCluster()
void ReadFill(Deserializer *d_) override
LibraryPrefixDeserializationCluster()
void ReadAlloc(Deserializer *d) override
~LibraryPrefixDeserializationCluster()
void ReadFill(Deserializer *d_) override
void Trace(Serializer *s, ObjectPtr object)
LibraryPrefixSerializationCluster()
void WriteAlloc(Serializer *s)
void WriteFill(Serializer *s)
~LibraryPrefixSerializationCluster()
static intptr_t InstanceSize()
~LibrarySerializationCluster()
void WriteFill(Serializer *s)
void WriteAlloc(Serializer *s)
void Trace(Serializer *s, ObjectPtr object)
LibrarySerializationCluster()
static intptr_t InstanceSize()
void ReadFill(Deserializer *d_) override
LoadingUnitDeserializationCluster()
void ReadAlloc(Deserializer *d) override
~LoadingUnitDeserializationCluster()
void WriteFill(Serializer *s)
void WriteAlloc(Serializer *s)
~LoadingUnitSerializationCluster()
void Trace(Serializer *s, ObjectPtr object)
LoadingUnitSerializationCluster()
void set_objects(ZoneGrowableArray< Object * > *objects)
ZoneGrowableArray< Object * > * objects()
LoadingUnitSerializationData * parent() const
GrowableArray< Code * > * deferred_objects()
LoadingUnitPtr parent() const
static intptr_t InstanceSize()
static constexpr intptr_t kRootId
void set_base_objects(const Array &value) const
void set_instructions_image(const uint8_t *value) const
uint8_t * Steal(intptr_t *length)
void ReadAlloc(Deserializer *d) override
~MapDeserializationCluster()
MapDeserializationCluster(intptr_t cid, bool is_canonical, bool is_root_unit)
void ReadFill(Deserializer *d_) override
MapSerializationCluster(bool is_canonical, intptr_t cid)
void Trace(Serializer *s, ObjectPtr object)
~MapSerializationCluster()
void WriteAlloc(Serializer *s)
void WriteFill(Serializer *s)
static intptr_t InstanceSize()
void ReadAlloc(Deserializer *d) override
~MegamorphicCacheDeserializationCluster()
MegamorphicCacheDeserializationCluster()
void ReadFill(Deserializer *d_) override
void WriteFill(Serializer *s)
void WriteAlloc(Serializer *s)
~MegamorphicCacheSerializationCluster()
void Trace(Serializer *s, ObjectPtr object)
MegamorphicCacheSerializationCluster()
static intptr_t InstanceSize()
void ReadFill(Deserializer *d_) override
void ReadAlloc(Deserializer *d) override
~MintDeserializationCluster()
MintDeserializationCluster(bool is_canonical, bool is_root_unit)
void WriteFill(Serializer *s)
MintSerializationCluster(bool is_canonical)
void WriteAlloc(Serializer *s)
~MintSerializationCluster()
void Trace(Serializer *s, ObjectPtr object)
static intptr_t InstanceSize()
NamespaceDeserializationCluster()
void ReadAlloc(Deserializer *d) override
~NamespaceDeserializationCluster()
void ReadFill(Deserializer *d_) override
void Trace(Serializer *s, ObjectPtr object)
void WriteAlloc(Serializer *s)
~NamespaceSerializationCluster()
NamespaceSerializationCluster()
void WriteFill(Serializer *s)
static intptr_t InstanceSize()
static uword LinkNativeCallEntry()
DART_FORCE_INLINE void SetPosition(intptr_t value)
static void static void PrintErr(const char *format,...) PRINTF_ATTRIBUTE(1
static void Print(const char *format,...) PRINTF_ATTRIBUTE(1
static DART_NORETURN void Abort()
static char * SCreate(Zone *zone, const char *format,...) PRINTF_ATTRIBUTE(2
ObjectPoolDeserializationCluster()
void ReadAlloc(Deserializer *d) override
void PostLoad(Deserializer *d, const Array &refs) override
void ReadFill(Deserializer *d_) override
~ObjectPoolDeserializationCluster()
void WriteFill(Serializer *s)
void WriteAlloc(Serializer *s)
~ObjectPoolSerializationCluster()
ObjectPoolSerializationCluster()
void Trace(Serializer *s, ObjectPtr object)
static uint8_t EncodeBits(EntryType type, Patchability patchable, SnapshotBehavior snapshot_behavior)
static intptr_t InstanceSize()
ObjectPtr Decompress(uword heap_base) const
UntaggedObject * untag() const
intptr_t GetClassIdMayBeSmi() const
static Object * ReadOnlyHandle()
static void set_vm_isolate_snapshot_object_table(const Array &table)
static void FinalizeReadOnlyObject(ObjectPtr object)
virtual const char * ToCString() const
static constexpr intptr_t RoundedAllocationSize(intptr_t size)
static ObjectPtr RawCast(ObjectPtr obj)
static Object & ZoneHandle()
const char * FieldNameForOffset(intptr_t cid, intptr_t offset)
static intptr_t InstanceSize()
void AcquireLock(FreeList *freelist)
void ReleaseLock(FreeList *freelist)
DART_FORCE_INLINE uword AllocateSnapshotLocked(FreeList *freelist, intptr_t size)
~PatchClassDeserializationCluster()
void ReadAlloc(Deserializer *d) override
PatchClassDeserializationCluster()
void ReadFill(Deserializer *d_) override
~PatchClassSerializationCluster()
void Trace(Serializer *s, ObjectPtr object)
void WriteFill(Serializer *s)
void WriteAlloc(Serializer *s)
PatchClassSerializationCluster()
static intptr_t InstanceSize()
void ReadFill(Deserializer *d_) override
~PcDescriptorsDeserializationCluster()
PcDescriptorsDeserializationCluster()
void ReadAlloc(Deserializer *d) override
static intptr_t InstanceSize()
ProgramDeserializationRoots(ObjectStore *object_store)
void PostLoad(Deserializer *d, const Array &refs) override
void ReadRoots(Deserializer *d) override
void AddBaseObjects(Deserializer *d) override
void PushRoots(Serializer *s)
~ProgramSerializationRoots()
void AddBaseObjects(Serializer *s)
ProgramSerializationRoots(ZoneGrowableArray< Object * > *base_objects, ObjectStore *object_store, Snapshot::Kind snapshot_kind)
virtual const CompressedStackMaps & canonicalized_stack_map_entries() const
void WriteRoots(Serializer *s)
void ReadAlloc(Deserializer *d) override
void ReadFill(Deserializer *d_) override
RODataDeserializationCluster(intptr_t cid, bool is_canonical, bool is_root_unit)
~RODataDeserializationCluster()
void PostLoad(Deserializer *d, const Array &refs) override
RODataSerializationCluster(Zone *zone, const char *type, intptr_t cid, bool is_canonical)
void WriteFill(Serializer *s)
~RODataSerializationCluster()
void Trace(Serializer *s, ObjectPtr object)
void WriteAlloc(Serializer *s)
uword ReadWordWith32BitReads()
void Align(intptr_t alignment, intptr_t offset=0)
intptr_t Position() const
intptr_t PendingBytes() const
const uint8_t * AddressOfCurrentPosition() const
void Advance(intptr_t value)
void SetPosition(intptr_t value)
void ReadBytes(void *addr, intptr_t len)
void ReadAlloc(Deserializer *d) override
RecordDeserializationCluster(bool is_canonical, bool is_root_unit)
~RecordDeserializationCluster()
void ReadFill(Deserializer *d_) override
~RecordSerializationCluster()
void WriteFill(Serializer *s)
void WriteAlloc(Serializer *s)
RecordSerializationCluster(bool is_canonical)
void Trace(Serializer *s, ObjectPtr object)
intptr_t num_fields() const
void ReadFill(Deserializer *d_) override
void PostLoad(Deserializer *d, const Array &refs) override
void ReadAlloc(Deserializer *d) override
~RecordTypeDeserializationCluster()
RecordTypeDeserializationCluster(bool is_canonical, bool is_root_unit)
void Trace(Serializer *s, ObjectPtr object)
RecordTypeSerializationCluster(bool is_canonical, bool represents_canonical_set)
void WriteFill(Serializer *s)
~RecordTypeSerializationCluster()
void WriteAlloc(Serializer *s)
static intptr_t InstanceSize()
static intptr_t InstanceSize()
static intptr_t NumFields(RecordPtr ptr)
void ReadFill(Deserializer *d_) override
void ReadAlloc(Deserializer *d) override
RegExpDeserializationCluster()
~RegExpDeserializationCluster()
void WriteAlloc(Serializer *s)
void WriteFill(Serializer *s)
void Trace(Serializer *s, ObjectPtr object)
RegExpSerializationCluster()
~RegExpSerializationCluster()
static intptr_t InstanceSize()
void ReadAlloc(Deserializer *d) override
void ReadFill(Deserializer *d_) override
~ScriptDeserializationCluster()
ScriptDeserializationCluster()
void WriteFill(Serializer *s)
~ScriptSerializationCluster()
void WriteAlloc(Serializer *s)
ScriptSerializationCluster()
void Trace(Serializer *s, ObjectPtr object)
static intptr_t line_starts_offset()
static intptr_t InstanceSize()
const char * name() const
bool is_canonical() const
virtual ~SerializationCluster()
void WriteAndMeasureAlloc(Serializer *serializer)
void WriteAndMeasureFill(Serializer *serializer)
static constexpr intptr_t kSizeVaries
intptr_t target_memory_size() const
const intptr_t target_instance_size_
intptr_t target_memory_size_
intptr_t num_objects() const
virtual void Trace(Serializer *serializer, ObjectPtr object)=0
SerializationCluster(const char *name, intptr_t cid, intptr_t target_instance_size=kSizeVaries, bool is_canonical=false)
virtual void WriteAlloc(Serializer *serializer)=0
virtual void WriteFill(Serializer *serializer)=0
bool is_immutable() const
virtual void AddBaseObjects(Serializer *serializer)=0
virtual const CompressedStackMaps & canonicalized_stack_map_entries() const
virtual void WriteRoots(Serializer *serializer)=0
virtual void PushRoots(Serializer *serializer)=0
virtual ~SerializationRoots()
WritingObjectScope(Serializer *serializer, const char *type, ObjectPtr object, const char *name)
WritingObjectScope(Serializer *serializer, const char *type, ObjectPtr object, StringPtr name)
WritingObjectScope(Serializer *serializer, ObjectPtr object)
intptr_t current_loading_unit_id() const
void WriteCid(intptr_t cid)
void WritePropertyRef(ObjectPtr object, const char *property)
void WriteWordWith32BitWrites(uword value)
NonStreamingWriteStream * stream()
void DumpCombinedCodeStatistics()
DART_NOINLINE void WriteRange(ObjectPtr obj, T from, T to)
void TraceDataOffset(uint32_t offset)
void AddBaseObject(ObjectPtr base_object, const char *type=nullptr, const char *name=nullptr)
void WriteVersionAndFeatures(bool is_vm_snapshot)
bool InCurrentLoadingUnitOrRoot(ObjectPtr obj)
void WriteRootRef(ObjectPtr object, const char *name=nullptr)
Serializer(Thread *thread, Snapshot::Kind kind, NonStreamingWriteStream *stream, ImageWriter *image_writer_, bool vm_, V8SnapshotProfileWriter *profile_writer=nullptr)
GrowableArray< LoadingUnitSerializationData * > * loading_units() const
bool HasArtificialRef(ObjectPtr object) const
void set_loading_units(GrowableArray< LoadingUnitSerializationData * > *units)
void PrintSnapshotSizes()
void set_current_loading_unit_id(intptr_t id)
bool HasProfileNode(ObjectPtr object) const
void WriteFromTo(T obj, P &&... args)
void WriteElementRef(ObjectPtr object, intptr_t index)
void FillHeader(Snapshot::Kind kind)
uint32_t GetDataOffset(ObjectPtr object) const
void AttributeReference(ObjectPtr object, const V8SnapshotProfileWriter::Reference &reference)
bool HasRef(ObjectPtr object) const
bool IsWritten(ObjectPtr object) const
intptr_t AssignArtificialRef(ObjectPtr object=nullptr)
DART_NOINLINE void PushRange(ObjectPtr obj, T from, T to)
void PushWeak(ObjectPtr object)
Snapshot::Kind kind() const
intptr_t RefId(ObjectPtr object) const
intptr_t GetCodeIndex(CodePtr code)
void PushFromTo(T obj, P &&... args)
SerializationCluster * NewClusterForClass(intptr_t cid, bool is_canonical)
bool IsReachable(ObjectPtr object) const
void Trace(ObjectPtr object, intptr_t cid_override)
ZoneGrowableArray< Object * > * Serialize(SerializationRoots *roots)
void RecordDeferredCode(CodePtr ptr)
void WriteBytes(const void *addr, intptr_t len)
void WriteOffsetRef(ObjectPtr object, intptr_t offset)
intptr_t UnsafeRefId(ObjectPtr object) const
void AttributePropertyRef(ObjectPtr object, const char *property)
void WriteUnsigned(intptr_t value)
intptr_t AssignRef(ObjectPtr object)
void WriteTokenPosition(TokenPosition pos)
void Align(intptr_t alignment, intptr_t offset=0)
void PrepareInstructions(const CompressedStackMaps &canonical_smap)
V8SnapshotProfileWriter::ObjectId GetProfileId(ObjectPtr object) const
void WriteDispatchTable(const Array &entries)
V8SnapshotProfileWriter * profile_writer() const
intptr_t GetDataSize() const
void UnexpectedObject(ObjectPtr object, const char *message)
bool CreateArtificialNodeIfNeeded(ObjectPtr obj)
intptr_t bytes_heap_allocated()
void Push(ObjectPtr object, intptr_t cid_override=kIllegalCid)
void WriteRefId(intptr_t value)
void WriteInstructions(InstructionsPtr instr, uint32_t unchecked_offset, CodePtr code, bool deferred)
intptr_t next_ref_index() const
void AttributeElementRef(ObjectPtr object, intptr_t index)
void WriteUnsigned64(uint64_t value)
void ReadAlloc(Deserializer *d) override
SetDeserializationCluster(intptr_t cid, bool is_canonical, bool is_root_unit)
~SetDeserializationCluster()
void ReadFill(Deserializer *d_) override
SetSerializationCluster(bool is_canonical, intptr_t cid)
void Trace(Serializer *s, ObjectPtr object)
void WriteAlloc(Serializer *s)
~SetSerializationCluster()
void WriteFill(Serializer *s)
static intptr_t InstanceSize()
void ReadFill(Deserializer *d_) override
~Simd128DeserializationCluster()
Simd128DeserializationCluster(intptr_t cid, bool is_canonical, bool is_root_unit)
void ReadAlloc(Deserializer *d) override
void WriteAlloc(Serializer *s)
void Trace(Serializer *s, ObjectPtr object)
Simd128SerializationCluster(intptr_t cid, bool is_canonical)
void WriteFill(Serializer *s)
~Simd128SerializationCluster()
static SmiPtr New(intptr_t value)
static bool IsValid(int64_t value)
static bool IsFull(Kind kind)
static const char * KindToCString(Kind kind)
static bool IncludesStringsInROData(Kind kind)
static bool IncludesCode(Kind kind)
static constexpr intptr_t kHeaderSize
ThreadState * thread() const
StackTraceDeserializationCluster()
void ReadFill(Deserializer *d_) override
void ReadAlloc(Deserializer *d) override
~StackTraceDeserializationCluster()
void Trace(Serializer *s, ObjectPtr object)
~StackTraceSerializationCluster()
void WriteAlloc(Serializer *s)
StackTraceSerializationCluster()
void WriteFill(Serializer *s)
static intptr_t InstanceSize()
static StaticTypeExactnessState NotTracking()
static intptr_t DecodeLengthAndCid(intptr_t encoded, intptr_t *out_cid)
StringDeserializationCluster(bool is_canonical, bool is_root_unit)
~StringDeserializationCluster()
void ReadAlloc(Deserializer *d) override
static intptr_t InstanceSize(intptr_t length, intptr_t cid)
void PostLoad(Deserializer *d, const Array &refs) override
void ReadFill(Deserializer *d_) override
void Add(uint16_t code_unit)
void WriteFill(Serializer *s)
~StringSerializationCluster()
StringSerializationCluster(bool is_canonical, bool represents_canonical_set)
void WriteAlloc(Serializer *s)
void Trace(Serializer *s, ObjectPtr object)
static intptr_t EncodeLengthAndCid(intptr_t length, intptr_t cid)
static StringPtr New(const char *cstr, Heap::Space space=Heap::kNew)
static uint32_t SetCachedHash(StringPtr obj, uint32_t hash)
static const Code & EntryAt(intptr_t index)
static const char * NameAt(intptr_t index)
static void InitializationDone()
static intptr_t NumEntries()
static void EntryAtPut(intptr_t index, Code *entry)
void ReadAlloc(Deserializer *d) override
~SubtypeTestCacheDeserializationCluster()
void ReadFill(Deserializer *d_) override
SubtypeTestCacheDeserializationCluster()
void WriteAlloc(Serializer *s)
void WriteFill(Serializer *s)
~SubtypeTestCacheSerializationCluster()
SubtypeTestCacheSerializationCluster()
void Trace(Serializer *s, ObjectPtr object)
static intptr_t InstanceSize()
static void InitFromSnapshot(IsolateGroup *isolate_group)
static StringPtr New(Thread *thread, const char *cstr)
IsolateGroup * isolate_group() const
void DecrementNoSafepointScopeDepth()
static Thread * Current()
IsolateGroup * isolate_group() const
static TokenPosition Deserialize(int32_t value)
static intptr_t InstanceSize()
~TypeArgumentsDeserializationCluster()
void PostLoad(Deserializer *d, const Array &refs) override
void ReadFill(Deserializer *d_) override
TypeArgumentsDeserializationCluster(bool is_canonical, bool is_root_unit)
void ReadAlloc(Deserializer *d) override
~TypeArgumentsSerializationCluster()
TypeArgumentsSerializationCluster(bool is_canonical, bool represents_canonical_set)
void WriteAlloc(Serializer *s)
void Trace(Serializer *s, ObjectPtr object)
void WriteFill(Serializer *s)
static intptr_t InstanceSize()
TypeArgumentsPtr Canonicalize(Thread *thread) const
void ReadAlloc(Deserializer *d) override
void PostLoad(Deserializer *d, const Array &refs) override
~TypeDeserializationCluster()
void ReadFill(Deserializer *d_) override
TypeDeserializationCluster(bool is_canonical, bool is_root_unit)
void ReadFill(Deserializer *d_) override
void PostLoad(Deserializer *d, const Array &refs) override
void ReadAlloc(Deserializer *d) override
~TypeParameterDeserializationCluster()
TypeParameterDeserializationCluster(bool is_canonical, bool is_root_unit)
TypeParameterSerializationCluster(bool is_canonical, bool cluster_represents_canonical_set)
void WriteAlloc(Serializer *s)
void Trace(Serializer *s, ObjectPtr object)
~TypeParameterSerializationCluster()
void WriteFill(Serializer *s)
static intptr_t InstanceSize()
virtual AbstractTypePtr Canonicalize(Thread *thread) const
TypeParametersDeserializationCluster()
void ReadFill(Deserializer *d_) override
~TypeParametersDeserializationCluster()
void ReadAlloc(Deserializer *d) override
static intptr_t InstanceSize()
virtual bool IsInCanonicalSet(Serializer *s, TypePtr type)
TypeSerializationCluster(bool is_canonical, bool represents_canonical_set)
~TypeSerializationCluster()
void WriteAlloc(Serializer *s)
void WriteFill(Serializer *s)
void Trace(Serializer *s, ObjectPtr object)
static CodePtr DefaultCodeForType(const AbstractType &type, bool lazy_specialize=true)
bool IsDeclarationTypeOf(const Class &cls) const
static intptr_t InstanceSize()
intptr_t ElementSizeInBytes() const
~TypedDataDeserializationCluster()
TypedDataDeserializationCluster(intptr_t cid)
void ReadAlloc(Deserializer *d) override
void ReadFill(Deserializer *d_) override
void Trace(Serializer *s, ObjectPtr object)
~TypedDataSerializationCluster()
void WriteAlloc(Serializer *s)
TypedDataSerializationCluster(intptr_t cid)
void WriteFill(Serializer *s)
void ReadAlloc(Deserializer *d) override
~TypedDataViewDeserializationCluster()
TypedDataViewDeserializationCluster(intptr_t cid)
void ReadFill(Deserializer *d_) override
void PostLoad(Deserializer *d, const Array &refs) override
TypedDataViewSerializationCluster(intptr_t cid)
void WriteFill(Serializer *s)
~TypedDataViewSerializationCluster()
void WriteAlloc(Serializer *s)
void Trace(Serializer *s, ObjectPtr object)
static intptr_t InstanceSize()
static intptr_t InstanceSize()
static DART_FORCE_INLINE constexpr intptr_t Length()
DART_FORCE_INLINE bool Get(intptr_t position) const
DART_FORCE_INLINE void Reset()
DART_FORCE_INLINE void Set(intptr_t position)
void ReadAlloc(Deserializer *d) override
UnhandledExceptionDeserializationCluster()
~UnhandledExceptionDeserializationCluster()
void ReadFill(Deserializer *d_) override
void Trace(Serializer *s, ObjectPtr object)
UnhandledExceptionSerializationCluster()
void WriteFill(Serializer *s)
void WriteAlloc(Serializer *s)
~UnhandledExceptionSerializationCluster()
static intptr_t InstanceSize()
void ReadRoots(Deserializer *d) override
void AddBaseObjects(Deserializer *d) override
void PostLoad(Deserializer *d, const Array &refs) override
UnitDeserializationRoots(const LoadingUnit &unit)
void PushRoots(Serializer *s)
UnitSerializationRoots(LoadingUnitSerializationData *unit)
void WriteRoots(Serializer *s)
void AddBaseObjects(Serializer *s)
void ReadAlloc(Deserializer *d) override
void ReadFill(Deserializer *d_) override
UnlinkedCallDeserializationCluster()
~UnlinkedCallDeserializationCluster()
void Trace(Serializer *s, ObjectPtr object)
UnlinkedCallSerializationCluster()
~UnlinkedCallSerializationCluster()
void WriteAlloc(Serializer *s)
void WriteFill(Serializer *s)
static intptr_t InstanceSize()
static constexpr uword update(intptr_t size, uword tag)
static ObjectPtr FromAddr(uword addr)
bool InVMIsolateHeap() const
static bool IsInt(intptr_t N, T value)
static int SNPrint(char *str, size_t size, const char *format,...) PRINTF_ATTRIBUTE(3
static char * StrDup(const char *s)
static intptr_t StrNLen(const char *s, intptr_t n)
static bool IsUint(intptr_t N, T value)
static constexpr bool IsAligned(T x, uintptr_t alignment, uintptr_t offset=0)
static char * StrNDup(const char *s, intptr_t n)
static const ObjectId kArtificialRootId
void AttributeReferenceTo(const ObjectId &from_object_id, const Reference &reference, const ObjectId &to_object_id)
void SetObjectTypeAndName(const ObjectId &object_id, const char *type, const char *name)
void AddRoot(const ObjectId &object_id, const char *name=nullptr)
void AttributeBytesTo(const ObjectId &object_id, size_t num_bytes)
bool HasId(const ObjectId &object_id)
void ReadRoots(Deserializer *d) override
void PostLoad(Deserializer *d, const Array &refs) override
void AddBaseObjects(Deserializer *d) override
void WriteRoots(Serializer *s)
void PushRoots(Serializer *s)
void AddBaseObjects(Serializer *s)
VMSerializationRoots(const WeakArray &symbols, bool should_write_symbols)
static const char * SnapshotString()
static void DontNeed(void *address, intptr_t size)
WeakArrayDeserializationCluster()
void ReadFill(Deserializer *d_) override
~WeakArrayDeserializationCluster()
void ReadAlloc(Deserializer *d) override
WeakArraySerializationCluster()
~WeakArraySerializationCluster()
void Trace(Serializer *s, ObjectPtr object)
void WriteFill(Serializer *s)
void WriteAlloc(Serializer *s)
static intptr_t InstanceSize()
ObjectPtr At(intptr_t index) const
void ReadAlloc(Deserializer *d) override
WeakPropertyDeserializationCluster()
void ReadFill(Deserializer *d_) override
~WeakPropertyDeserializationCluster()
void RetraceEphemerons(Serializer *s)
~WeakPropertySerializationCluster()
void WriteAlloc(Serializer *s)
void Trace(Serializer *s, ObjectPtr object)
void WriteFill(Serializer *s)
WeakPropertySerializationCluster()
static intptr_t key_offset()
static intptr_t value_offset()
static intptr_t InstanceSize()
static constexpr intptr_t kNoValue
char * PrintToString(const char *format,...) PRINTF_ATTRIBUTE(2
static word InstanceSize()
static word InstanceSize()
static word InstanceSize()
static word InstanceSize()
static word InstanceSize()
static word InstanceSize()
static word InstanceSize()
static word InstanceSize()
static word InstanceSize()
static word InstanceSize()
static word InstanceSize()
static word InstanceSize()
static word InstanceSize()
static word InstanceSize()
static word InstanceSize()
static word InstanceSize()
static word InstanceSize()
static word InstanceSize()
static word InstanceSize()
#define THR_Print(format,...)
const EmbeddedViewParams * params
VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE auto & d
G_BEGIN_DECLS G_MODULE_EXPORT FlValue * args
const uint8_t uint32_t uint32_t GError ** error
#define VM_GLOBAL_FLAG_LIST(P, R, C, D)
Dart_NativeFunction function
sk_sp< const SkImage > image
const uint8_t * isolate_snapshot_data
const uint8_t * vm_snapshot_data
static SnapshotKind snapshot_kind
static constexpr intptr_t kWordSize
static constexpr intptr_t kCompressedWordSize
intptr_t RoundedAllocationSize(intptr_t size)
def link(from_root, to_root)
bool IsTypedDataViewClassId(intptr_t index)
bool IsTypedDataClassId(intptr_t index)
static const char *const kObjectStoreFieldNames[]
static constexpr bool IsReachableReference(intptr_t ref)
static void Finish(Thread *thread)
static constexpr intptr_t kCompressedWordSizeLog2
static constexpr intptr_t kUnreachableReference
DART_EXPORT bool IsNull(Dart_Handle object)
static constexpr intptr_t kUnallocatedReference
static constexpr bool IsArtificialReference(intptr_t ref)
static constexpr bool IsAllocatedReference(intptr_t ref)
uintptr_t compressed_uword
static UnboxedFieldBitmap CalculateTargetUnboxedFieldsBitmap(Serializer *s, intptr_t class_id)
bool ShouldHaveImmutabilityBitSetCid(intptr_t predefined_cid)
constexpr intptr_t kFirstInternalOnlyCid
DEFINE_FLAG(bool, print_cluster_information, false, "Print information about clusters written to snapshot")
bool IsInternalVMdefinedClassId(intptr_t index)
static constexpr intptr_t kCompressedWordSize
raw_obj untag() -> num_entries()) VARIABLE_COMPRESSED_VISITOR(Array, Smi::Value(raw_obj->untag() ->length())) VARIABLE_COMPRESSED_VISITOR(TypedData, TypedData::ElementSizeInBytes(raw_obj->GetClassId()) *Smi::Value(raw_obj->untag() ->length())) VARIABLE_COMPRESSED_VISITOR(Record, RecordShape(raw_obj->untag() ->shape()).num_fields()) VARIABLE_NULL_VISITOR(CompressedStackMaps, CompressedStackMaps::PayloadSizeOf(raw_obj)) VARIABLE_NULL_VISITOR(OneByteString, Smi::Value(raw_obj->untag() ->length())) VARIABLE_NULL_VISITOR(TwoByteString, Smi::Value(raw_obj->untag() ->length())) intptr_t UntaggedField::VisitFieldPointers(FieldPtr raw_obj, ObjectPointerVisitor *visitor)
static constexpr intptr_t kFirstReference
static DART_FORCE_INLINE CodePtr GetCodeAndEntryPointByIndex(const Deserializer *d, intptr_t code_index, uword *entry_point)
constexpr intptr_t kWordSize
static constexpr intptr_t kObjectAlignment
ArrayOfTuplesView< Code::SCallTableEntry, std::tuple< Smi, Object, Function > > StaticCallsTable
static int CompareClusters(SerializationCluster *const *a, SerializationCluster *const *b)
static int8_t data[kExtLength]
static constexpr intptr_t kObjectAlignmentLog2
bool IsExternalTypedDataClassId(intptr_t index)
COMPILE_ASSERT(kUnreachableReference==WeakTable::kNoValue)
constexpr intptr_t kLastInternalOnlyCid
bool IsStringClassId(intptr_t index)
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not defaults to or::depending on whether ipv6 is specified vm service A custom Dart VM Service port The default is to pick a randomly available open port disable vm Disable the Dart VM Service The Dart VM Service is never available in release mode disable vm service Disable mDNS Dart VM Service publication Bind to the IPv6 localhost address for the Dart VM Service Ignored if vm service host is set endless trace Enable an endless trace buffer The default is a ring buffer This is useful when very old events need to viewed For during application launch Memory usage will continue to grow indefinitely however Start app with an specific route defined on the framework flutter assets Path to the Flutter assets directory enable service port Allow the VM service to fallback to automatic port selection if binding to a specified port fails trace Trace early application lifecycle Automatically switches to an endless trace buffer trace skia Filters out all Skia trace event categories except those that are specified in this comma separated list dump skp on shader Automatically dump the skp that triggers new shader compilations This is useful for writing custom ShaderWarmUp to reduce jank By this is not enabled to reduce the overhead purge persistent cache
struct PathData * Data(SkPath *path)
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not defaults to or::depending on whether ipv6 is specified vm service A custom Dart VM Service port The default is to pick a randomly available open port disable vm Disable the Dart VM Service The Dart VM Service is never available in release mode disable vm service Disable mDNS Dart VM Service publication Bind to the IPv6 localhost address for the Dart VM Service Ignored if vm service host is set endless trace buffer
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive keep the shell running after the Dart script has completed enable serial On low power devices with low core running concurrent GC tasks on threads can cause them to contend with the UI thread which could potentially lead to jank This option turns off all concurrent GC activities domain network JSON encoded network policy per domain This overrides the DisallowInsecureConnections switch Embedder can specify whether to allow or disallow insecure connections at a domain level old gen heap size
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not set
std::function< void()> closure
SI auto map(std::index_sequence< I... >, Fn &&fn, const Args &... args) -> skvx::Vec< sizeof...(I), decltype(fn(args[0]...))>
#define OBJECT_STORE_FIELD_LIST(R_, RW, ARW_RELAXED, ARW_AR, LAZY_CORE, LAZY_ASYNC, LAZY_ISOLATE, LAZY_INTERNAL, LAZY_FFI)
static DecodeResult decode(std::string path)
#define REUSABLE_FUNCTION_HANDLESCOPE(thread)
#define REUSABLE_OBJECT_HANDLESCOPE(thread)
#define REUSABLE_CODE_HANDLESCOPE(thread)
static const char header[]
@ InsertInstructionOfCode
static constexpr intptr_t kObjectAlignmentLog2
static constexpr intptr_t kObjectAlignment
static Reference Element(intptr_t offset)
static Reference Property(const char *name)
#define TIMELINE_DURATION(thread, stream, name)
#define NOT_IN_PRECOMPILED(code)