5#if !defined(DART_PRECOMPILED_RUNTIME)
23 : object_(
Object::Handle(zone, object.ptr())), next_(nullptr) {}
41 : zone_(zone), first_(nullptr), last_(nullptr) {}
43 bool IsEmpty()
const {
return first_ ==
nullptr; }
47 if (first_ ==
nullptr) {
55 ASSERT(first_ !=
nullptr && last_ !=
nullptr);
61 first_ = first_->
next();
62 if (first_ ==
nullptr) {
86 class_object_(
Object::Handle(zone)),
87 class_fields_(
Array::Handle(zone)),
88 class_field_(
Field::Handle(zone)),
89 class_functions_(
Array::Handle(zone)),
90 class_function_(
Function::Handle(zone)),
91 class_code_(
Code::Handle(zone)),
92 function_code_(
Code::Handle(zone)),
93 static_calls_array_(
Array::Handle(zone)),
94 static_calls_table_entry_(
Object::Handle(zone)),
95 worklist_entry_(
Object::Handle(zone)) {}
103 if (
object.
IsNull() ||
object.
IsSmi() ||
object.InVMIsolateHeap())
return;
107 if (
object.IsClass() ||
110 worklist_.
Add(
object);
116 worklist_entry_ = worklist_.
Remove();
117 if (worklist_entry_.IsClass()) {
118 VisitClass(Class::Cast(worklist_entry_));
119 }
else if (worklist_entry_.IsFunction()) {
120 VisitFunction(Function::Cast(worklist_entry_));
121 }
else if (worklist_entry_.IsCode()) {
122 VisitCode(Code::Cast(worklist_entry_));
130 void VisitClass(
const Class& cls) {
136 for (intptr_t j = 0; j < class_functions_.
Length(); j++) {
137 class_function_ ^= class_functions_.
At(j);
145 class_functions_ = cls.invocation_dispatcher_cache();
146 for (intptr_t j = 0; j < class_functions_.
Length(); j++) {
147 class_object_ = class_functions_.
At(j);
148 if (class_object_.IsFunction()) {
149 class_function_ ^= class_functions_.
At(j);
154 class_fields_ = cls.
fields();
155 for (intptr_t j = 0; j < class_fields_.
Length(); j++) {
156 class_field_ ^= class_fields_.
At(j);
169 void VisitFunction(
const Function&
function) {
173 function_code_ =
function.CurrentCode();
177 void VisitCode(
const Code&
code) {
187 static_calls_array_ =
code.static_calls_target_table();
188 if (FLAG_precompiled_mode && !static_calls_array_.
IsNull()) {
190 for (
auto& view : static_calls) {
191 static_calls_table_entry_ =
193 if (static_calls_table_entry_.IsCode()) {
201 ClassVisitor*
const visitor_;
203 Object& class_object_;
204 Array& class_fields_;
206 Array& class_functions_;
207 Function& class_function_;
209 Code& function_code_;
210 Array& static_calls_array_;
211 Object& static_calls_table_entry_;
212 Object& worklist_entry_;
218 auto const object_store = isolate_group->
object_store();
219 auto const heap = isolate_group->
heap();
223 const auto& libraries =
228 for (intptr_t
i = 0;
i < libraries.Length();
i++) {
229 lib ^= libraries.At(
i);
238 const auto& global_object_pool =
240 if (!global_object_pool.IsNull()) {
242 for (intptr_t
i = 0;
i < global_object_pool.Length();
i++) {
243 auto const type = global_object_pool.TypeAt(
i);
244 if (
type != ObjectPool::EntryType::kTaggedObject)
continue;
245 object = global_object_pool.ObjectAt(
i);
260 if (object_store->ffi_callback_functions() !=
Array::null()) {
263 FfiCallbackFunctionSet::Iterator it(&
set);
264 while (it.MoveNext()) {
265 const intptr_t entry = it.Current();
276 const auto& dispatch_table_entries =
277 Array::Handle(zone, object_store->dispatch_table_code_entries());
278 if (!dispatch_table_entries.IsNull()) {
279 for (intptr_t
i = 0;
i < dispatch_table_entries.Length();
i++) {
280 code ^= dispatch_table_entries.At(
i);
293template <
typename T,
typename S>
322 const auto& object_table = Object::vm_isolate_snapshot_object_table();
324 for (intptr_t
i = 0;
i < object_table.Length();
i++) {
325 obj = object_table.At(
i);
331 typename T::ObjectPtrType
Dedup(
const T& obj) {
334 return canonical->ptr();
345void ProgramVisitor::BindStaticCalls(
Thread* thread) {
346 class BindStaticCallsVisitor :
public CodeVisitor {
348 explicit BindStaticCallsVisitor(
Zone* zone)
355 table_ =
code.static_calls_target_table();
356 if (table_.IsNull())
return;
361 bool only_call_via_code = FLAG_precompiled_mode;
362 for (
const auto& view : static_calls) {
369 only_call_via_code =
false;
374 if (target_.IsNull()) {
377 ASSERT(!target_.IsNull());
381 auto const pc_offset =
383 const uword pc = pc_offset +
code.PayloadStart();
396 const auto& fun = Function::Cast(target_);
397 ASSERT(!FLAG_precompiled_mode || fun.HasCode());
398 target_code_ = fun.HasCode() ? fun.CurrentCode()
399 : StubCode::CallStaticFunction().ptr();
403 if (only_call_via_code) {
404 ASSERT(FLAG_precompiled_mode);
410 code.set_static_calls_target_table(Object::empty_array());
416 Smi& kind_and_offset_;
421 StackZone stack_zone(thread);
422 BindStaticCallsVisitor visitor(thread->
zone());
429void ProgramVisitor::ShareMegamorphicBuckets(
Thread* thread) {
434 if (
table.IsNull())
return;
437 const intptr_t capacity = 1;
441 MegamorphicCache::SetEntry(buckets, 0, Object::smi_illegal_cid(), handler);
443 for (intptr_t
i = 0;
i <
table.Length();
i++) {
445 cache.set_buckets(buckets);
446 cache.set_mask(capacity - 1);
447 cache.set_filled_entry_count(0);
460 spill_slot_bit_count_(it.SpillSlotBitCount()),
461 non_spill_slot_bit_count_(it.Length() - it.SpillSlotBitCount()),
462 bits_offset_(it.current_bits_offset_) {
466 ASSERT(it.current_spill_slot_bit_count_ >= 0);
472 if (hash_ != 0)
return hash_;
478 auto const start = PayloadData();
479 auto const end =
start + PayloadLength();
480 for (
auto cursor =
start; cursor <
end; cursor++) {
489 if (spill_slot_bit_count_ != other.spill_slot_bit_count_ ||
490 non_spill_slot_bit_count_ != other.non_spill_slot_bit_count_) {
497 return memcmp(PayloadData(), other.PayloadData(), PayloadLength()) == 0;
503 auto const current_offset =
stream->Position();
504 stream->WriteLEB128(spill_slot_bit_count_);
505 stream->WriteLEB128(non_spill_slot_bit_count_);
508 stream->WriteBytes(PayloadData(), PayloadLength());
510 return current_offset;
517 intptr_t Length()
const {
518 return spill_slot_bit_count_ + non_spill_slot_bit_count_;
520 intptr_t PayloadLength()
const {
523 const uint8_t* PayloadData()
const {
524 return bits_container_.
ptr()->
untag()->payload()->data() + bits_offset_;
527 const CompressedStackMaps& maps_;
528 const CompressedStackMaps& bits_container_;
529 const intptr_t spill_slot_bit_count_;
530 const intptr_t non_spill_slot_bit_count_;
531 const intptr_t bits_offset_;
563void ProgramVisitor::NormalizeAndDedupCompressedStackMaps(
Thread* thread) {
566 class CollectStackMapEntriesVisitor :
public CodeVisitor {
568 CollectStackMapEntriesVisitor(
Zone* zone,
571 old_global_table_(global_table),
573 collected_entries_(zone, 2),
574 entry_indices_(zone),
575 entry_offset_(zone) {
576 ASSERT(old_global_table_.IsNull() || old_global_table_.IsGlobalTable());
580 compressed_stackmaps_ =
code.compressed_stackmaps();
582 compressed_stackmaps_, old_global_table_);
585 auto const index = entry_indices_.LookupValue(entry);
587 auto new_index = collected_entries_.length();
588 collected_entries_.Add(entry);
589 entry_indices_.Insert({entry, new_index});
599 CompressedStackMapsPtr CreateGlobalTable(
602 if (collected_entries_.length() == 0) {
608 collected_entries_.Sort(
610 return static_cast<int>((*e2)->UsageCount() - (*e1)->UsageCount());
615 for (intptr_t
i = 0, n = collected_entries_.length();
i < n;
i++) {
616 auto const entry = collected_entries_.At(
i);
617 entry_indices_.Update({entry,
i});
618 entry_offsets->
Insert({entry, entry->EncodeTo(&
stream)});
642 class NormalizeAndDedupCompressedStackMapsVisitor
644 public Deduper<CompressedStackMaps,
645 PointerSetKeyValueTrait<const CompressedStackMaps>> {
647 NormalizeAndDedupCompressedStackMapsVisitor(
Zone* zone,
653 ->canonicalized_stack_map_entries())),
654 entry_offsets_(zone),
656 ASSERT(old_global_table_.IsNull() || old_global_table_.IsGlobalTable());
663 CollectStackMapEntriesVisitor collect_visitor(zone, old_global_table_);
664 WalkProgram(zone, isolate_group, &collect_visitor);
673 zone, collect_visitor.CreateGlobalTable(&entry_offsets_));
674 isolate_group->
object_store()->set_canonicalized_stack_map_entries(
683 maps_ =
code.compressed_stackmaps();
684 if (maps_.IsNull())
return;
687 if (
auto const canonical = canonical_objects_.LookupValue(&maps_)) {
688 maps_ = canonical->ptr();
690 maps_ = NormalizeEntries(maps_);
691 maps_ =
Dedup(maps_);
693 code.set_compressed_stackmaps(maps_);
701 return Object::empty_compressed_stackmaps().ptr();
706 intptr_t last_offset = 0;
709 const intptr_t entry_offset = entry_offsets_.LookupValue(&entry);
710 const intptr_t pc_delta = it.
pc_offset() - last_offset;
725 NormalizeAndDedupCompressedStackMapsVisitor visitor(thread->
zone(),
748void ProgramVisitor::DedupPcDescriptors(Thread* thread) {
749 class DedupPcDescriptorsVisitor
750 :
public CodeVisitor,
751 public Deduper<PcDescriptors, PcDescriptorsKeyValueTrait> {
753 explicit DedupPcDescriptorsVisitor(Zone* zone)
754 : Deduper(zone), pc_descriptor_(PcDescriptors::Handle(zone)) {
761 void VisitCode(
const Code&
code) {
762 pc_descriptor_ =
code.pc_descriptors();
763 pc_descriptor_ =
Dedup(pc_descriptor_);
764 code.set_pc_descriptors(pc_descriptor_);
768 PcDescriptors& pc_descriptor_;
771 StackZone stack_zone(thread);
772 DedupPcDescriptorsVisitor visitor(thread->zone());
773 WalkProgram(thread->zone(), thread->isolate_group(), &visitor);
799 bool IsCorrectType(
const Object& obj)
const {
return obj.IsTypedData(); }
802void ProgramVisitor::DedupDeoptEntries(Thread* thread) {
803 class DedupDeoptEntriesVisitor :
public CodeVisitor,
public TypedDataDeduper {
805 explicit DedupDeoptEntriesVisitor(Zone* zone)
806 : TypedDataDeduper(zone),
807 deopt_table_(Array::Handle(zone)),
808 deopt_entry_(TypedData::Handle(zone)),
809 offset_(Smi::Handle(zone)),
810 reason_and_flags_(Smi::Handle(zone)) {}
812 void VisitCode(
const Code&
code) {
813 deopt_table_ =
code.deopt_info_array();
814 if (deopt_table_.IsNull())
return;
819 ASSERT(!deopt_entry_.IsNull());
820 deopt_entry_ =
Dedup(deopt_entry_);
821 ASSERT(!deopt_entry_.IsNull());
829 TypedData& deopt_entry_;
831 Smi& reason_and_flags_;
834 if (FLAG_precompiled_mode)
return;
836 StackZone stack_zone(thread);
837 DedupDeoptEntriesVisitor visitor(thread->zone());
838 WalkProgram(thread->zone(), thread->isolate_group(), &visitor);
841#if defined(DART_PRECOMPILER)
842void ProgramVisitor::DedupCatchEntryMovesMaps(Thread* thread) {
843 class DedupCatchEntryMovesMapsVisitor :
public CodeVisitor,
844 public TypedDataDeduper {
846 explicit DedupCatchEntryMovesMapsVisitor(Zone* zone)
847 : TypedDataDeduper(zone),
848 catch_entry_moves_maps_(TypedData::Handle(zone)) {}
850 void VisitCode(
const Code&
code) {
851 catch_entry_moves_maps_ =
code.catch_entry_moves_maps();
852 catch_entry_moves_maps_ =
Dedup(catch_entry_moves_maps_);
853 code.set_catch_entry_moves_maps(catch_entry_moves_maps_);
857 TypedData& catch_entry_moves_maps_;
860 if (!FLAG_precompiled_mode)
return;
862 StackZone stack_zone(thread);
863 DedupCatchEntryMovesMapsVisitor visitor(thread->zone());
864 WalkProgram(thread->zone(), thread->isolate_group(), &visitor);
867class UnlinkedCallKeyValueTrait {
870 typedef const UnlinkedCall*
Key;
871 typedef const UnlinkedCall*
Value;
872 typedef const UnlinkedCall*
Pair;
874 static Key KeyOf(
Pair kv) {
return kv; }
876 static Value ValueOf(
Pair kv) {
return kv; }
880 static inline bool IsKeyEqual(
Pair pair,
Key key) {
881 return pair->Equals(*
key);
885void ProgramVisitor::DedupUnlinkedCalls(Thread* thread) {
886 class DedupUnlinkedCallsVisitor
887 :
public CodeVisitor,
888 public Deduper<UnlinkedCall, UnlinkedCallKeyValueTrait> {
890 explicit DedupUnlinkedCallsVisitor(Zone* zone, IsolateGroup* isolate_group)
892 entry_(Object::Handle(zone)),
893 pool_(ObjectPool::Handle(zone)) {
895 zone, isolate_group->object_store()->global_object_pool());
900 void DedupPool(
const ObjectPool&
pool) {
901 if (
pool.IsNull())
return;
902 for (intptr_t
i = 0;
i <
pool.Length();
i++) {
903 if (
pool.TypeAt(
i) != ObjectPool::EntryType::kTaggedObject) {
906 entry_ =
pool.ObjectAt(
i);
907 if (!entry_.IsUnlinkedCall())
continue;
908 entry_ =
Dedup(UnlinkedCall::Cast(entry_));
909 pool.SetObjectAt(
i, entry_);
913 void VisitCode(
const Code&
code) {
914 pool_ =
code.object_pool();
923 if (!FLAG_precompiled_mode)
return;
925 StackZone stack_zone(thread);
926 DedupUnlinkedCallsVisitor visitor(thread->zone(), thread->isolate_group());
930 WalkProgram(thread->zone(), thread->isolate_group(), &visitor);
933void ProgramVisitor::PruneSubclasses(Thread* thread) {
934 class PruneSubclassesVisitor :
public ClassVisitor {
936 explicit PruneSubclassesVisitor(Zone* zone)
938 old_implementors_(GrowableObjectArray::Handle(zone)),
939 new_implementors_(GrowableObjectArray::Handle(zone)),
940 implementor_(Class::Handle(zone)),
941 old_subclasses_(GrowableObjectArray::Handle(zone)),
942 new_subclasses_(GrowableObjectArray::Handle(zone)),
943 subclass_(Class::Handle(zone)),
944 null_list_(GrowableObjectArray::Handle(zone)) {}
946 void VisitClass(
const Class& klass) {
947 old_implementors_ = klass.direct_implementors_unsafe();
948 if (!old_implementors_.IsNull()) {
950 for (intptr_t
i = 0;
i < old_implementors_.Length();
i++) {
951 implementor_ ^= old_implementors_.At(
i);
953 new_implementors_.Add(implementor_);
956 if (new_implementors_.Length() == 0) {
957 klass.set_direct_implementors(null_list_);
959 klass.set_direct_implementors(new_implementors_);
963 old_subclasses_ = klass.direct_subclasses_unsafe();
964 if (!old_subclasses_.IsNull()) {
966 for (intptr_t
i = 0;
i < old_subclasses_.Length();
i++) {
967 subclass_ ^= old_subclasses_.At(
i);
969 new_subclasses_.Add(subclass_);
972 if (new_subclasses_.Length() == 0) {
973 klass.set_direct_subclasses(null_list_);
975 klass.set_direct_subclasses(new_subclasses_);
981 GrowableObjectArray& old_implementors_;
982 GrowableObjectArray& new_implementors_;
984 GrowableObjectArray& old_subclasses_;
985 GrowableObjectArray& new_subclasses_;
987 GrowableObjectArray& null_list_;
990 StackZone stack_zone(thread);
991 PruneSubclassesVisitor visitor(thread->zone());
992 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
993 WalkProgram(thread->zone(), thread->isolate_group(), &visitor);
1019void ProgramVisitor::DedupCodeSourceMaps(Thread* thread) {
1020 class DedupCodeSourceMapsVisitor
1021 :
public CodeVisitor,
1022 public Deduper<CodeSourceMap, CodeSourceMapKeyValueTrait> {
1024 explicit DedupCodeSourceMapsVisitor(Zone* zone)
1025 : Deduper(zone), code_source_map_(CodeSourceMap::Handle(zone)) {
1032 void VisitCode(
const Code&
code) {
1033 code_source_map_ =
code.code_source_map();
1034 code_source_map_ =
Dedup(code_source_map_);
1035 code.set_code_source_map(code_source_map_);
1039 CodeSourceMap& code_source_map_;
1042 StackZone stack_zone(thread);
1043 DedupCodeSourceMapsVisitor visitor(thread->zone());
1044 WalkProgram(thread->zone(), thread->isolate_group(), &visitor);
1061 const intptr_t
len =
key->Length();
1063 for (intptr_t
i = 0;
i <
len; ++
i) {
1072 if (pair->
Length() !=
key->Length())
return false;
1073 for (intptr_t
i = 0;
i < pair->
Length();
i++) {
1074 if (pair->
At(
i) !=
key->At(
i))
return false;
1080void ProgramVisitor::DedupLists(Thread* thread) {
1081 class DedupListsVisitor :
public CodeVisitor,
1082 public Deduper<Array, ArrayKeyValueTrait> {
1084 explicit DedupListsVisitor(Zone* zone)
1086 list_(Array::Handle(zone)),
1087 field_(Field::Handle(zone)) {}
1089 void VisitCode(
const Code&
code) {
1090 if (!
code.IsFunctionCode())
return;
1092 list_ =
code.inlined_id_to_function();
1093 list_ =
Dedup(list_);
1094 code.set_inlined_id_to_function(list_);
1096 list_ =
code.deopt_info_array();
1097 list_ =
Dedup(list_);
1098 code.set_deopt_info_array(list_);
1100 list_ =
code.static_calls_target_table();
1101 list_ =
Dedup(list_);
1102 code.set_static_calls_target_table(list_);
1105 void VisitFunction(
const Function&
function) {
1108 if (!FLAG_precompiled_mode) {
1109 list_ =
function.positional_parameter_names();
1110 if (!list_.IsNull()) {
1111 list_ =
Dedup(list_);
1112 function.set_positional_parameter_names(list_);
1118 bool IsCorrectType(
const Object& obj)
const {
return obj.IsArray(); }
1124 StackZone stack_zone(thread);
1127 NoSafepointScope no_safepoint;
1128 DedupListsVisitor visitor(thread->zone());
1129 WalkProgram(thread->zone(), thread->isolate_group(), &visitor);
1166#if defined(DART_PRECOMPILER)
1167class CodeKeyValueTrait {
1170 typedef const Code*
Key;
1171 typedef const Code*
Value;
1172 typedef const Code*
Pair;
1174 static Key KeyOf(
Pair kv) {
return kv; }
1176 static Value ValueOf(
Pair kv) {
return kv; }
1182 static_cast<uword>(
key->static_calls_target_table())));
1185 static inline bool IsKeyEqual(
Pair pair,
Key key) {
1187 ASSERT(!pair->IsDisabled() && !
key->IsDisabled());
1189 if (pair->ptr() ==
key->ptr())
return true;
1193 if (pair->static_calls_target_table() !=
key->static_calls_target_table()) {
1196 if (pair->pc_descriptors() !=
key->pc_descriptors()) {
1199 if (pair->compressed_stackmaps() !=
key->compressed_stackmaps()) {
1202 if (pair->catch_entry_moves_maps() !=
key->catch_entry_moves_maps()) {
1205 if (pair->exception_handlers() !=
key->exception_handlers()) {
1208 if (pair->UncheckedEntryPointOffset() !=
key->UncheckedEntryPointOffset()) {
1220void ProgramVisitor::DedupInstructions(Thread* thread) {
1221 class DedupInstructionsVisitor
1222 :
public CodeVisitor,
1223 public Deduper<Instructions, InstructionsKeyValueTrait>,
1224 public ObjectVisitor {
1226 explicit DedupInstructionsVisitor(Zone* zone)
1228 code_(Code::Handle(zone)),
1229 instructions_(Instructions::Handle(zone)) {
1236 void VisitObject(ObjectPtr obj)
override {
1237 if (!obj->IsInstructions())
return;
1239 AddCanonical(instructions_);
1242 void VisitFunction(
const Function&
function)
override {
1249 function.SetInstructionsSafe(code_);
1252 void VisitCode(
const Code&
code)
override {
1253 instructions_ =
code.instructions();
1254 instructions_ =
Dedup(instructions_);
1255 code.set_instructions(instructions_);
1256 if (
code.IsDisabled()) {
1257 instructions_ =
code.active_instructions();
1258 instructions_ =
Dedup(instructions_);
1260 code.SetActiveInstructionsSafe(instructions_,
1261 code.UncheckedEntryPointOffset());
1266 Instructions& instructions_;
1269#if defined(DART_PRECOMPILER)
1270 class DedupInstructionsWithSameMetadataVisitor
1271 :
public CodeVisitor,
1272 public Deduper<Code, CodeKeyValueTrait> {
1274 explicit DedupInstructionsWithSameMetadataVisitor(Zone* zone)
1276 canonical_(Code::Handle(zone)),
1277 code_(Code::Handle(zone)),
1278 instructions_(Instructions::Handle(zone)) {}
1283 void PostProcess(IsolateGroup* isolate_group) {
1284 const intptr_t canonical_count = canonical_objects_.Length();
1289 auto should_canonicalize = [&](
const Object& obj) {
1290 return CanCanonicalize(Code::Cast(obj)) && !obj.InVMIsolateHeap();
1293 auto process_pool = [&](
const ObjectPool&
pool) {
1294 if (
pool.IsNull()) {
1299 for (intptr_t
i = 0;
i <
pool.Length();
i++) {
1301 if (
type != ObjectPool::EntryType::kTaggedObject)
continue;
1302 object =
pool.ObjectAt(
i);
1303 if (
object.IsCode() && should_canonicalize(
object)) {
1305 pool.SetObjectAt(
i,
object);
1312 auto it = canonical_objects_.GetIterator();
1313 while (
auto canonical_code = it.Next()) {
1314 static_calls_array = (*canonical_code)->static_calls_target_table();
1315 if (!static_calls_array.IsNull()) {
1317 for (
auto& view : static_calls) {
1318 static_calls_table_entry =
1320 if (static_calls_table_entry.IsCode() &&
1321 should_canonicalize(static_calls_table_entry)) {
1322 static_calls_table_entry =
1325 static_calls_table_entry);
1330 pool = (*canonical_code)->object_pool();
1334 auto object_store = isolate_group->object_store();
1336 const auto& dispatch_table_entries =
1337 Array::Handle(zone_, object_store->dispatch_table_code_entries());
1338 if (!dispatch_table_entries.IsNull()) {
1340 for (intptr_t
i = 0;
i < dispatch_table_entries.Length();
i++) {
1341 code ^= dispatch_table_entries.At(
i);
1342 if (should_canonicalize(
code)) {
1344 dispatch_table_entries.SetAt(
i,
code);
1350 pool = object_store->global_object_pool();
1356 void VisitFunction(
const Function&
function) {
1363 function.SetInstructionsSafe(canonical_);
1366 void VisitCode(
const Code&
code) {
1367 canonical_ =
code.ptr();
1368 if (
code.IsDisabled())
return;
1370 instructions_ = canonical_.instructions();
1371 code.SetActiveInstructionsSafe(instructions_,
1372 code.UncheckedEntryPointOffset());
1373 code.set_instructions(instructions_);
1377 bool CanCanonicalize(
const Code&
code)
const {
return !
code.IsDisabled(); }
1381 if (!
code.is_discarded() && canonical_.is_discarded()) {
1382 canonical_.set_is_discarded(
false);
1384 return canonical_.ptr();
1389 Instructions& instructions_;
1392 if (FLAG_precompiled_mode) {
1393 StackZone stack_zone(thread);
1396 NoSafepointScope no_safepoint;
1397 DedupInstructionsWithSameMetadataVisitor visitor(thread->zone());
1398 WalkProgram(thread->zone(), thread->isolate_group(), &visitor);
1399 visitor.PostProcess(thread->isolate_group());
1404 StackZone stack_zone(thread);
1405 DedupInstructionsVisitor visitor(thread->zone());
1406 WalkProgram(thread->zone(), thread->isolate_group(), &visitor);
1410 BindStaticCalls(thread);
1411 ShareMegamorphicBuckets(thread);
1412 NormalizeAndDedupCompressedStackMaps(thread);
1413 DedupPcDescriptors(thread);
1414 DedupDeoptEntries(thread);
1415#if defined(DART_PRECOMPILER)
1416 DedupCatchEntryMovesMaps(thread);
1417 DedupUnlinkedCalls(thread);
1418 PruneSubclasses(thread);
1420 DedupCodeSourceMaps(thread);
1424 if (FLAG_dedup_instructions) {
1425 DedupInstructions(thread);
1429#if defined(DART_PRECOMPILER)
1432 explicit AssignLoadingUnitsCodeVisitor(
Zone* zone)
1433 : heap_(
Thread::Current()->heap()),
1434 code_(
Code::Handle(zone)),
1436 cls_(
Class::Handle(zone)),
1439 obj_(
Object::Handle(zone)) {}
1441 void VisitObject(ObjectPtr obj)
override {
1442 if (obj->IsCode()) {
1448 void VisitCode(
const Code&
code) {
1450 if (
code.IsFunctionCode()) {
1451 func_ ^=
code.function();
1452 obj_ = func_.Owner();
1454 lib_ = cls_.library();
1455 if (lib_.IsNull()) {
1459 unit_ = lib_.loading_unit();
1460 if (unit_.IsNull()) {
1465 }
else if (
code.IsTypeTestStubCode() ||
code.IsStubCode() ||
1466 code.IsAllocationStubCode()) {
1473 heap_->SetLoadingUnit(
code.ptr(),
id);
1475 obj_ =
code.code_source_map();
1476 MergeAssignment(obj_,
id);
1477 obj_ =
code.compressed_stackmaps();
1478 MergeAssignment(obj_,
id);
1481 void MergeAssignment(
const Object& obj, intptr_t
id) {
1482 if (obj.IsNull())
return;
1484 intptr_t old_id = heap_->GetLoadingUnit(obj_.ptr());
1486 heap_->SetLoadingUnit(obj_.ptr(),
id);
1487 }
else if (old_id ==
id) {
1506void ProgramVisitor::AssignUnits(Thread* thread) {
1507 StackZone stack_zone(thread);
1508 Heap* heap = thread->heap();
1514 AssignLoadingUnitsCodeVisitor visitor(thread->zone());
1515 HeapIterationScope iter(thread);
1516 iter.IterateVMIsolateObjects(&visitor);
1517 iter.IterateObjects(&visitor);
1520class ProgramHashVisitor :
public CodeVisitor {
1522 explicit ProgramHashVisitor(Zone* zone)
1523 : str_(String::Handle(zone)),
1524 pool_(ObjectPool::Handle(zone)),
1525 obj_(Object::Handle(zone)),
1526 instr_(Instructions::Handle(zone)),
1529 void VisitClass(
const Class& cls) {
1531 VisitInstance(str_);
1534 void VisitFunction(
const Function&
function) {
1536 VisitInstance(str_);
1539 void VisitCode(
const Code&
code) {
1540 pool_ =
code.object_pool();
1543 instr_ =
code.instructions();
1547 void VisitPool(
const ObjectPool&
pool) {
1548 if (
pool.IsNull())
return;
1550 for (intptr_t
i = 0;
i <
pool.Length();
i++) {
1551 if (
pool.TypeAt(
i) == ObjectPool::EntryType::kTaggedObject) {
1552 obj_ =
pool.ObjectAt(
i);
1553 if (obj_.IsInstance()) {
1554 VisitInstance(Instance::Cast(obj_));
1560 void VisitInstance(
const Instance&
instance) {
1570 Instructions& instr_;
1575 StackZone stack_zone(thread);
1576 Zone* zone = thread->zone();
1578 ProgramHashVisitor visitor(zone);
1579 WalkProgram(zone, thread->isolate_group(), &visitor);
1581 zone, thread->isolate_group()->object_store()->global_object_pool()));
1582 return visitor.hash();
static uint32_t hash(const SkShaderBase::GradientInfo &v)
#define RELEASE_ASSERT(cond)
#define ASSERT_NOTNULL(ptr)
static Value ValueOf(Pair kv)
static bool IsKeyEqual(Pair pair, Key key)
static uword Hash(Key key)
static Key KeyOf(Pair kv)
static ArrayPtr New(intptr_t len, Heap::Space space=Heap::kNew)
ObjectPtr At(intptr_t index) const
KeyValueTrait::Value LookupValue(typename KeyValueTrait::Key key) const
void Insert(typename KeyValueTrait::Pair kv)
bool HasKey(typename KeyValueTrait::Key key) const
DART_FORCE_INLINE intptr_t bytes_written() const
C::only_if_unsigned< T, void > WriteLEB128(T value)
static constexpr CallKind decode(intptr_t value)
virtual void VisitClass(const Class &cls)=0
const CodeVisitor * AsCodeVisitor() const
virtual bool IsCodeVisitor() const
const FunctionVisitor * AsFunctionVisitor() const
virtual bool IsFunctionVisitor() const
CodePtr allocation_stub() const
ArrayPtr current_functions() const
static void ForAllClosureFunctions(std::function< bool(const Function &)> callback)
static void PatchStaticCallAt(uword return_address, const Code &code, const Code &new_target)
const CodeSourceMap * Pair
static bool IsKeyEqual(Pair pair, Key key)
const CodeSourceMap * Key
static Value ValueOf(Pair kv)
const CodeSourceMap * Value
static uword Hash(Key key)
static Key KeyOf(Pair kv)
bool Equals(const CodeSourceMap &other) const
virtual void VisitCode(const Code &code)=0
@ kSCallTableFunctionTarget
@ kSCallTableCodeOrTypeTarget
@ kSCallTableKindAndOffset
uint32_t pc_offset() const
bool IsGlobalTable() const
static CompressedStackMapsPtr NewUsingTable(const void *payload, intptr_t size)
static CompressedStackMapsPtr NewGlobalTable(const void *payload, intptr_t size)
uintptr_t payload_size() const
bool UsesGlobalTable() const
static IsolateGroup * vm_isolate_group()
static Snapshot::Kind vm_snapshot_kind()
bool ShouldAdd(const Object &obj) const
T::ObjectPtrType Dedup(const T &obj)
DirectChainedHashMap< S > canonical_objects_
virtual bool IsCorrectType(const Object &obj) const
virtual bool CanCanonicalize(const T &t) const
void AddCanonical(const T &obj)
static void GetEntry(const Array &table, intptr_t index, Smi *offset, TypedData *info, Smi *reason_and_flags)
static intptr_t GetLength(const Array &table)
static void SetEntry(const Array &table, intptr_t index, const Smi &offset, const TypedData &info, const Smi &reason_and_flags)
FunctionPtr InitializerFunction() const
bool HasInitializerFunction() const
virtual void VisitFunction(const Function &function)=0
FunctionPtr ImplicitClosureFunction() const
bool HasImplicitClosureFunction() const
static GrowableObjectArrayPtr New(Heap::Space space=Heap::kNew)
void ResetObjectIdTable()
intptr_t GetObjectId(ObjectPtr raw_obj) const
void SetObjectId(ObjectPtr raw_obj, intptr_t object_id)
const Instructions * Value
static bool IsKeyEqual(Pair pair, Key key)
static uword Hash(Key key)
static Value ValueOf(Pair kv)
const Instructions * Pair
static Key KeyOf(Pair kv)
bool Equals(const Instructions &other) const
ObjectStore * object_store() const
static intptr_t LoadingUnitOf(const Function &function)
static constexpr intptr_t kRootId
UntaggedObject * untag() const
intptr_t GetClassId() const
virtual const char * ToCString() const
static ObjectPtr RawCast(ObjectPtr obj)
static constexpr intptr_t kHashBits
const PcDescriptors * Key
const PcDescriptors * Pair
static bool IsKeyEqual(Pair pair, Key key)
static uword Hash(Key key)
static Value ValueOf(Pair kv)
static Key KeyOf(Pair kv)
const PcDescriptors * Value
bool Equals(const PcDescriptors &other) const
static void Dedup(Thread *thread)
static void WalkProgram(Zone *zone, IsolateGroup *isolate_group, ClassVisitor *visitor)
ProgramWalker(Zone *zone, Heap *heap, ClassVisitor *visitor)
void AddToWorklist(const Object &object)
static bool IncludesCode(Kind kind)
static uword Hash(Key key)
static Value ValueOf(Pair kv)
static Key KeyOf(Pair kv)
static bool IsKeyEqual(Pair kv, Key key)
void IncrementUsageCount()
intptr_t UsageCount() const
bool Equals(const StackMapEntry &other) const
static constexpr intptr_t kHashBits
StackMapEntry(Zone *zone, const CompressedStackMaps::Iterator< CompressedStackMaps > &it)
intptr_t EncodeTo(NonStreamingWriteStream *stream)
static Thread * Current()
IsolateGroup * isolate_group() const
TypedDataDeduper(Zone *zone)
static Key KeyOf(Pair kv)
static uword Hash(Key key)
static Value ValueOf(Pair kv)
static bool IsKeyEqual(Pair pair, Key key)
virtual bool CanonicalizeEquals(const Instance &other) const
static uint32_t WordHash(intptr_t key)
static constexpr T RoundUp(T x, uintptr_t alignment, uintptr_t offset=0)
static constexpr intptr_t kNoValue
void set_next(WorklistElement *elem)
WorklistElement * next() const
WorklistElement(Zone *zone, const Object &object)
void Add(const Object &value)
Dart_NativeFunction function
constexpr intptr_t kBitsPerByteLog2
DART_EXPORT bool IsNull(Dart_Handle object)
uint32_t CombineHashes(uint32_t hash, uint32_t other_hash)
constexpr intptr_t kBitsPerByte
unibrow::Mapping< unibrow::Ecma262Canonicalize > Canonicalize
uint32_t FinalizeHash(uint32_t hash, intptr_t hashbits=kBitsPerInt32)
static uint32_t Hash(uint32_t key)
ArrayOfTuplesView< Code::SCallTableEntry, std::tuple< Smi, Object, Function > > StaticCallsTable
DirectChainedHashMap< StackMapEntryKeyIntValueTrait > StackMapEntryIntMap
static int8_t data[kExtLength]
DECLARE_FLAG(bool, show_invisible_frames)
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not defaults to or::depending on whether ipv6 is specified vm service A custom Dart VM Service port The default is to pick a randomly available open port disable vm Disable the Dart VM Service The Dart VM Service is never available in release mode disable vm service Disable mDNS Dart VM Service publication Bind to the IPv6 localhost address for the Dart VM Service Ignored if vm service host is set endless trace Enable an endless trace buffer The default is a ring buffer This is useful when very old events need to viewed For during application launch Memory usage will continue to grow indefinitely however Start app with an specific route defined on the framework flutter assets Path to the Flutter assets directory enable service port Allow the VM service to fallback to automatic port selection if binding to a specified port fails trace Trace early application lifecycle Automatically switches to an endless trace buffer trace skia Filters out all Skia trace event categories except those that are specified in this comma separated list dump skp on shader Automatically dump the skp that triggers new shader compilations This is useful for writing custom ShaderWarmUp to reduce jank By this is not enabled to reduce the overhead purge persistent cache
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not set
Pair & operator=(const Pair &)=default
Pair(const Key key, const Value &value)