Flutter Engine
The Flutter Engine
program_visitor.cc
Go to the documentation of this file.
1// Copyright (c) 2015, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#if !defined(DART_PRECOMPILED_RUNTIME)
6
8
11#include "vm/code_patcher.h"
13#include "vm/hash_map.h"
14#include "vm/object.h"
15#include "vm/object_store.h"
16#include "vm/symbols.h"
17
18namespace dart {
19
21 public:
22 WorklistElement(Zone* zone, const Object& object)
23 : object_(Object::Handle(zone, object.ptr())), next_(nullptr) {}
24
25 ObjectPtr value() const { return object_.ptr(); }
26
27 void set_next(WorklistElement* elem) { next_ = elem; }
28 WorklistElement* next() const { return next_; }
29
30 private:
31 const Object& object_;
32 WorklistElement* next_;
33
34 DISALLOW_COPY_AND_ASSIGN(WorklistElement);
35};
36
37// Implements a FIFO queue, using IsEmpty, Add, Remove operations.
38class Worklist : public ValueObject {
39 public:
40 explicit Worklist(Zone* zone)
41 : zone_(zone), first_(nullptr), last_(nullptr) {}
42
43 bool IsEmpty() const { return first_ == nullptr; }
44
45 void Add(const Object& value) {
46 auto element = new (zone_) WorklistElement(zone_, value);
47 if (first_ == nullptr) {
48 first_ = element;
49 ASSERT(last_ == nullptr);
50 } else {
51 ASSERT(last_ != nullptr);
52 last_->set_next(element);
53 }
54 last_ = element;
55 ASSERT(first_ != nullptr && last_ != nullptr);
56 }
57
59 ASSERT(first_ != nullptr);
60 WorklistElement* result = first_;
61 first_ = first_->next();
62 if (first_ == nullptr) {
63 last_ = nullptr;
64 }
65 return result->value();
66 }
67
68 private:
69 Zone* const zone_;
70 WorklistElement* first_;
71 WorklistElement* last_;
72
73 DISALLOW_COPY_AND_ASSIGN(Worklist);
74};
75
76// Walks through the classes, functions, and code for the current program.
77//
78// Uses the heap object ID table to determine whether or not a given object
79// has been visited already.
80class ProgramWalker : public ValueObject {
81 public:
82 ProgramWalker(Zone* zone, Heap* heap, ClassVisitor* visitor)
83 : heap_(heap),
84 visitor_(visitor),
85 worklist_(zone),
86 class_object_(Object::Handle(zone)),
87 class_fields_(Array::Handle(zone)),
88 class_field_(Field::Handle(zone)),
89 class_functions_(Array::Handle(zone)),
90 class_function_(Function::Handle(zone)),
91 class_code_(Code::Handle(zone)),
92 function_code_(Code::Handle(zone)),
93 static_calls_array_(Array::Handle(zone)),
94 static_calls_table_entry_(Object::Handle(zone)),
95 worklist_entry_(Object::Handle(zone)) {}
96
98
99 // Adds the given object to the worklist if it's an object type that the
100 // visitor can visit.
101 void AddToWorklist(const Object& object) {
102 // We don't visit null, non-heap objects, or objects in the VM heap.
103 if (object.IsNull() || object.IsSmi() || object.InVMIsolateHeap()) return;
104 // Check and set visited, even if we don't end up adding this to the list.
105 if (heap_->GetObjectId(object.ptr()) != 0) return;
106 heap_->SetObjectId(object.ptr(), 1);
107 if (object.IsClass() ||
108 (object.IsFunction() && visitor_->IsFunctionVisitor()) ||
109 (object.IsCode() && visitor_->IsCodeVisitor())) {
110 worklist_.Add(object);
111 }
112 }
113
115 while (!worklist_.IsEmpty()) {
116 worklist_entry_ = worklist_.Remove();
117 if (worklist_entry_.IsClass()) {
118 VisitClass(Class::Cast(worklist_entry_));
119 } else if (worklist_entry_.IsFunction()) {
120 VisitFunction(Function::Cast(worklist_entry_));
121 } else if (worklist_entry_.IsCode()) {
122 VisitCode(Code::Cast(worklist_entry_));
123 } else {
124 FATAL("Got unexpected object %s", worklist_entry_.ToCString());
125 }
126 }
127 }
128
129 private:
130 void VisitClass(const Class& cls) {
131 visitor_->VisitClass(cls);
132
133 if (!visitor_->IsFunctionVisitor()) return;
134
135 class_functions_ = cls.current_functions();
136 for (intptr_t j = 0; j < class_functions_.Length(); j++) {
137 class_function_ ^= class_functions_.At(j);
138 AddToWorklist(class_function_);
139 if (class_function_.HasImplicitClosureFunction()) {
140 class_function_ = class_function_.ImplicitClosureFunction();
141 AddToWorklist(class_function_);
142 }
143 }
144
145 class_functions_ = cls.invocation_dispatcher_cache();
146 for (intptr_t j = 0; j < class_functions_.Length(); j++) {
147 class_object_ = class_functions_.At(j);
148 if (class_object_.IsFunction()) {
149 class_function_ ^= class_functions_.At(j);
150 AddToWorklist(class_function_);
151 }
152 }
153
154 class_fields_ = cls.fields();
155 for (intptr_t j = 0; j < class_fields_.Length(); j++) {
156 class_field_ ^= class_fields_.At(j);
157 if (class_field_.HasInitializerFunction()) {
158 class_function_ = class_field_.InitializerFunction();
159 AddToWorklist(class_function_);
160 }
161 }
162
163 if (!visitor_->IsCodeVisitor()) return;
164
165 class_code_ = cls.allocation_stub();
166 if (!class_code_.IsNull()) AddToWorklist(class_code_);
167 }
168
169 void VisitFunction(const Function& function) {
170 ASSERT(visitor_->IsFunctionVisitor());
172 if (!visitor_->IsCodeVisitor() || !function.HasCode()) return;
173 function_code_ = function.CurrentCode();
174 AddToWorklist(function_code_);
175 }
176
177 void VisitCode(const Code& code) {
178 ASSERT(visitor_->IsCodeVisitor());
179 visitor_->AsCodeVisitor()->VisitCode(code);
180
181 // In the precompiler, some entries in the static calls table may need
182 // to be visited as they may not be reachable from other sources.
183 //
184 // TODO(dartbug.com/41636): Figure out why walking the static calls table
185 // in JIT mode with the DedupInstructions visitor fails, so we can remove
186 // the check for AOT mode.
187 static_calls_array_ = code.static_calls_target_table();
188 if (FLAG_precompiled_mode && !static_calls_array_.IsNull()) {
189 StaticCallsTable static_calls(static_calls_array_);
190 for (auto& view : static_calls) {
191 static_calls_table_entry_ =
193 if (static_calls_table_entry_.IsCode()) {
194 AddToWorklist(Code::Cast(static_calls_table_entry_));
195 }
196 }
197 }
198 }
199
200 Heap* const heap_;
201 ClassVisitor* const visitor_;
202 Worklist worklist_;
203 Object& class_object_;
204 Array& class_fields_;
205 Field& class_field_;
206 Array& class_functions_;
207 Function& class_function_;
208 Code& class_code_;
209 Code& function_code_;
210 Array& static_calls_array_;
211 Object& static_calls_table_entry_;
212 Object& worklist_entry_;
213};
214
216 IsolateGroup* isolate_group,
217 ClassVisitor* visitor) {
218 auto const object_store = isolate_group->object_store();
219 auto const heap = isolate_group->heap();
220 ProgramWalker walker(zone, heap, visitor);
221
222 // Walk through the libraries looking for visitable objects.
223 const auto& libraries =
224 GrowableObjectArray::Handle(zone, object_store->libraries());
225 auto& lib = Library::Handle(zone);
226 auto& cls = Class::Handle(zone);
227
228 for (intptr_t i = 0; i < libraries.Length(); i++) {
229 lib ^= libraries.At(i);
231 while (it.HasNext()) {
232 cls = it.GetNextClass();
233 walker.AddToWorklist(cls);
234 }
235 }
236
237 // If there's a global object pool, add any visitable objects.
238 const auto& global_object_pool =
239 ObjectPool::Handle(zone, object_store->global_object_pool());
240 if (!global_object_pool.IsNull()) {
241 auto& object = Object::Handle(zone);
242 for (intptr_t i = 0; i < global_object_pool.Length(); i++) {
243 auto const type = global_object_pool.TypeAt(i);
244 if (type != ObjectPool::EntryType::kTaggedObject) continue;
245 object = global_object_pool.ObjectAt(i);
246 walker.AddToWorklist(object);
247 }
248 }
249
250 if (visitor->IsFunctionVisitor()) {
251 // Function objects not necessarily reachable from classes.
253 walker.AddToWorklist(fun);
255 return true; // Continue iteration.
256 });
257
258 // TODO(dartbug.com/43049): Use a more general solution and remove manual
259 // tracking through object_store->ffi_callback_functions.
260 if (object_store->ffi_callback_functions() != Array::null()) {
261 auto& function = Function::Handle(zone);
262 FfiCallbackFunctionSet set(object_store->ffi_callback_functions());
263 FfiCallbackFunctionSet::Iterator it(&set);
264 while (it.MoveNext()) {
265 const intptr_t entry = it.Current();
266 function ^= set.GetKey(entry);
267 walker.AddToWorklist(function);
268 }
269 set.Release();
270 }
271 }
272
273 if (visitor->IsCodeVisitor()) {
274 // Code objects not necessarily reachable from functions.
275 auto& code = Code::Handle(zone);
276 const auto& dispatch_table_entries =
277 Array::Handle(zone, object_store->dispatch_table_code_entries());
278 if (!dispatch_table_entries.IsNull()) {
279 for (intptr_t i = 0; i < dispatch_table_entries.Length(); i++) {
280 code ^= dispatch_table_entries.At(i);
281 walker.AddToWorklist(code);
282 }
283 }
284 }
285
286 // Walk the program starting from any roots we added to the worklist.
287 walker.VisitWorklist();
288}
289
290// A base class for deduplication of objects. T is the type of canonical objects
291// being stored, whereas S is a trait appropriate for a DirectChainedHashMap
292// based set containing those canonical objects.
293template <typename T, typename S>
294class Deduper : public ValueObject {
295 public:
296 explicit Deduper(Zone* zone) : zone_(zone), canonical_objects_(zone) {}
297 virtual ~Deduper() {}
298
299 protected:
300 // Predicate for objects of type T. Must be overridden for class hierarchies
301 // like Instance and AbstractType, as it defaults to class ID comparison.
302 virtual bool IsCorrectType(const Object& obj) const {
303 return obj.GetClassId() == T::kClassId;
304 }
305
306 // Predicate for choosing Ts to canonicalize.
307 virtual bool CanCanonicalize(const T& t) const { return true; }
308
309 // Predicate for objects that are okay to add to the canonical hash set.
310 // Override IsCorrectType and/or CanCanonicalize to change the behavior.
311 bool ShouldAdd(const Object& obj) const {
312 return !obj.IsNull() && IsCorrectType(obj) && CanCanonicalize(T::Cast(obj));
313 }
314
315 void AddCanonical(const T& obj) {
316 if (!ShouldAdd(obj)) return;
318 canonical_objects_.Insert(&T::ZoneHandle(zone_, obj.ptr()));
319 }
320
322 const auto& object_table = Object::vm_isolate_snapshot_object_table();
323 auto& obj = Object::Handle(zone_);
324 for (intptr_t i = 0; i < object_table.Length(); i++) {
325 obj = object_table.At(i);
326 if (!ShouldAdd(obj)) continue;
327 AddCanonical(T::Cast(obj));
328 }
329 }
330
331 typename T::ObjectPtrType Dedup(const T& obj) {
332 if (ShouldAdd(obj)) {
333 if (auto const canonical = canonical_objects_.LookupValue(&obj)) {
334 return canonical->ptr();
335 }
336 AddCanonical(obj);
337 }
338 return obj.ptr();
339 }
340
341 Zone* const zone_;
343};
344
345void ProgramVisitor::BindStaticCalls(Thread* thread) {
346 class BindStaticCallsVisitor : public CodeVisitor {
347 public:
348 explicit BindStaticCallsVisitor(Zone* zone)
349 : table_(Array::Handle(zone)),
350 kind_and_offset_(Smi::Handle(zone)),
351 target_(Object::Handle(zone)),
352 target_code_(Code::Handle(zone)) {}
353
354 void VisitCode(const Code& code) {
355 table_ = code.static_calls_target_table();
356 if (table_.IsNull()) return;
357
358 StaticCallsTable static_calls(table_);
359 // We can only remove the target table in precompiled mode, since more
360 // calls may be added later otherwise.
361 bool only_call_via_code = FLAG_precompiled_mode;
362 for (const auto& view : static_calls) {
363 kind_and_offset_ = view.Get<Code::kSCallTableKindAndOffset>();
364 auto const kind = Code::KindField::decode(kind_and_offset_.Value());
365 if (kind != Code::kCallViaCode) {
369 only_call_via_code = false;
370 continue;
371 }
372
373 target_ = view.Get<Code::kSCallTableFunctionTarget>();
374 if (target_.IsNull()) {
375 target_ =
377 ASSERT(!target_.IsNull()); // Already bound.
378 continue;
379 }
380
381 auto const pc_offset =
382 Code::OffsetField::decode(kind_and_offset_.Value());
383 const uword pc = pc_offset + code.PayloadStart();
384
385 // In JIT mode, static calls initially call the CallStaticFunction stub
386 // because their target might not be compiled yet. If the target has
387 // been compiled by this point, we patch the call to call the target
388 // directly.
389 //
390 // In precompiled mode, the binder runs after tree shaking, during which
391 // all targets have been compiled, and so the binder replaces all static
392 // calls with direct calls to the target.
393 //
394 // Cf. runtime entry PatchStaticCall called from CallStaticFunction
395 // stub.
396 const auto& fun = Function::Cast(target_);
397 ASSERT(!FLAG_precompiled_mode || fun.HasCode());
398 target_code_ = fun.HasCode() ? fun.CurrentCode()
399 : StubCode::CallStaticFunction().ptr();
400 CodePatcher::PatchStaticCallAt(pc, code, target_code_);
401 }
402
403 if (only_call_via_code) {
404 ASSERT(FLAG_precompiled_mode);
405 // In precompiled mode, the Dart runtime won't patch static calls
406 // anymore, so drop the static call table to save space.
407 // Note: it is okay to drop the table fully even when generating
408 // V8 snapshot profile because code objects are linked through the
409 // pool.
410 code.set_static_calls_target_table(Object::empty_array());
411 }
412 }
413
414 private:
415 Array& table_;
416 Smi& kind_and_offset_;
417 Object& target_;
418 Code& target_code_;
419 };
420
421 StackZone stack_zone(thread);
422 BindStaticCallsVisitor visitor(thread->zone());
423 WalkProgram(thread->zone(), thread->isolate_group(), &visitor);
424}
425
426DECLARE_FLAG(charp, trace_precompiler_to);
427DECLARE_FLAG(charp, write_v8_snapshot_profile_to);
428
429void ProgramVisitor::ShareMegamorphicBuckets(Thread* thread) {
430 StackZone stack_zone(thread);
431 Zone* zone = thread->zone();
433 zone, thread->isolate_group()->object_store()->megamorphic_cache_table());
434 if (table.IsNull()) return;
436
437 const intptr_t capacity = 1;
438 const Array& buckets = Array::Handle(
440 const Function& handler = Function::Handle(zone);
441 MegamorphicCache::SetEntry(buckets, 0, Object::smi_illegal_cid(), handler);
442
443 for (intptr_t i = 0; i < table.Length(); i++) {
444 cache ^= table.At(i);
445 cache.set_buckets(buckets);
446 cache.set_mask(capacity - 1);
447 cache.set_filled_entry_count(0);
448 }
449}
450
452 public:
455 : maps_(CompressedStackMaps::Handle(zone, it.maps_.ptr())),
456 bits_container_(
457 CompressedStackMaps::Handle(zone, it.bits_container_.ptr())),
458 // If the map uses the global table, this accessor call ensures the
459 // entry is fully loaded before we retrieve [it.current_bits_offset_].
460 spill_slot_bit_count_(it.SpillSlotBitCount()),
461 non_spill_slot_bit_count_(it.Length() - it.SpillSlotBitCount()),
462 bits_offset_(it.current_bits_offset_) {
463 ASSERT(!maps_.IsNull() && !maps_.IsGlobalTable());
464 ASSERT(!bits_container_.IsNull());
465 ASSERT(!maps_.UsesGlobalTable() || bits_container_.IsGlobalTable());
466 ASSERT(it.current_spill_slot_bit_count_ >= 0);
467 }
468
469 static constexpr intptr_t kHashBits = Object::kHashBits;
470
472 if (hash_ != 0) return hash_;
473 uint32_t hash = 0;
474 hash = CombineHashes(hash, spill_slot_bit_count_);
475 hash = CombineHashes(hash, non_spill_slot_bit_count_);
476 {
477 NoSafepointScope scope;
478 auto const start = PayloadData();
479 auto const end = start + PayloadLength();
480 for (auto cursor = start; cursor < end; cursor++) {
481 hash = CombineHashes(hash, *cursor);
482 }
483 }
484 hash_ = FinalizeHash(hash, kHashBits);
485 return hash_;
486 }
487
488 bool Equals(const StackMapEntry& other) const {
489 if (spill_slot_bit_count_ != other.spill_slot_bit_count_ ||
490 non_spill_slot_bit_count_ != other.non_spill_slot_bit_count_) {
491 return false;
492 }
493 // Since we ensure that bits in the payload that are not part of the
494 // actual stackmap data are cleared, we can just compare payloads by byte
495 // instead of calling IsObject for each bit.
496 NoSafepointScope scope;
497 return memcmp(PayloadData(), other.PayloadData(), PayloadLength()) == 0;
498 }
499
500 // Encodes this StackMapEntry to the given array of bytes and returns the
501 // initial offset of the entry in the array.
503 auto const current_offset = stream->Position();
504 stream->WriteLEB128(spill_slot_bit_count_);
505 stream->WriteLEB128(non_spill_slot_bit_count_);
506 {
507 NoSafepointScope scope;
508 stream->WriteBytes(PayloadData(), PayloadLength());
509 }
510 return current_offset;
511 }
512
513 intptr_t UsageCount() const { return uses_; }
514 void IncrementUsageCount() { uses_ += 1; }
515
516 private:
517 intptr_t Length() const {
518 return spill_slot_bit_count_ + non_spill_slot_bit_count_;
519 }
520 intptr_t PayloadLength() const {
521 return Utils::RoundUp(Length(), kBitsPerByte) >> kBitsPerByteLog2;
522 }
523 const uint8_t* PayloadData() const {
524 return bits_container_.ptr()->untag()->payload()->data() + bits_offset_;
525 }
526
527 const CompressedStackMaps& maps_;
528 const CompressedStackMaps& bits_container_;
529 const intptr_t spill_slot_bit_count_;
530 const intptr_t non_spill_slot_bit_count_;
531 const intptr_t bits_offset_;
532
533 intptr_t uses_ = 1;
534 intptr_t hash_ = 0;
535};
536
537// Used for maps of indices and offsets. These are non-negative, and so the
538// value for entries may be 0. Since 0 is kNoValue for
539// RawPointerKeyValueTrait<const StackMapEntry, intptr_t>, we can't just use it.
541 public:
543 typedef intptr_t Value;
544
545 struct Pair {
548 Pair() : key(nullptr), value(-1) {}
549 Pair(const Key key, const Value& value)
551 Pair(const Pair& other) : key(other.key), value(other.value) {}
552 Pair& operator=(const Pair&) = default;
553 };
554
555 static Key KeyOf(Pair kv) { return kv.key; }
556 static Value ValueOf(Pair kv) { return kv.value; }
557 static uword Hash(Key key) { return key->Hash(); }
558 static bool IsKeyEqual(Pair kv, Key key) { return key->Equals(*kv.key); }
559};
560
562
563void ProgramVisitor::NormalizeAndDedupCompressedStackMaps(Thread* thread) {
564 // Walks all the CSMs in Code objects and collects their entry information
565 // for consolidation.
566 class CollectStackMapEntriesVisitor : public CodeVisitor {
567 public:
568 CollectStackMapEntriesVisitor(Zone* zone,
569 const CompressedStackMaps& global_table)
570 : zone_(zone),
571 old_global_table_(global_table),
572 compressed_stackmaps_(CompressedStackMaps::Handle(zone)),
573 collected_entries_(zone, 2),
574 entry_indices_(zone),
575 entry_offset_(zone) {
576 ASSERT(old_global_table_.IsNull() || old_global_table_.IsGlobalTable());
577 }
578
579 void VisitCode(const Code& code) {
580 compressed_stackmaps_ = code.compressed_stackmaps();
582 compressed_stackmaps_, old_global_table_);
583 while (it.MoveNext()) {
584 auto const entry = new (zone_) StackMapEntry(zone_, it);
585 auto const index = entry_indices_.LookupValue(entry);
586 if (index < 0) {
587 auto new_index = collected_entries_.length();
588 collected_entries_.Add(entry);
589 entry_indices_.Insert({entry, new_index});
590 } else {
591 collected_entries_.At(index)->IncrementUsageCount();
592 }
593 }
594 }
595
596 // Creates a new global table of stack map information. Also adds the
597 // offsets of encoded StackMapEntry objects to entry_offsets for use
598 // when normalizing CompressedStackMaps.
599 CompressedStackMapsPtr CreateGlobalTable(
600 StackMapEntryIntMap* entry_offsets) {
601 ASSERT(entry_offsets->IsEmpty());
602 if (collected_entries_.length() == 0) {
604 }
605 // First, sort the entries from most used to least used. This way,
606 // the most often used CSMs will have the lowest offsets, which means
607 // they will be smaller when LEB128 encoded.
608 collected_entries_.Sort(
609 [](StackMapEntry* const* e1, StackMapEntry* const* e2) {
610 return static_cast<int>((*e2)->UsageCount() - (*e1)->UsageCount());
611 });
613 // Encode the entries and record their offset in the payload. Sorting the
614 // entries may have changed their indices, so update those as well.
615 for (intptr_t i = 0, n = collected_entries_.length(); i < n; i++) {
616 auto const entry = collected_entries_.At(i);
617 entry_indices_.Update({entry, i});
618 entry_offsets->Insert({entry, entry->EncodeTo(&stream)});
619 }
620 const auto& data = CompressedStackMaps::Handle(
622 stream.bytes_written()));
623 return data.ptr();
624 }
625
626 private:
627 Zone* const zone_;
628 const CompressedStackMaps& old_global_table_;
629
630 CompressedStackMaps& compressed_stackmaps_;
631 GrowableArray<StackMapEntry*> collected_entries_;
632 StackMapEntryIntMap entry_indices_;
633 StackMapEntryIntMap entry_offset_;
634 };
635
636 // Walks all the CSMs in Code objects, normalizes them, and then dedups them.
637 //
638 // We use normalized to refer to CSMs whose entries are references to the
639 // new global table created during stack map collection, and non-normalized
640 // for CSMs that either have inlined entry information or whose entries are
641 // references to the _old_ global table in the object store, if any.
642 class NormalizeAndDedupCompressedStackMapsVisitor
643 : public CodeVisitor,
644 public Deduper<CompressedStackMaps,
645 PointerSetKeyValueTrait<const CompressedStackMaps>> {
646 public:
647 NormalizeAndDedupCompressedStackMapsVisitor(Zone* zone,
648 IsolateGroup* isolate_group)
649 : Deduper(zone),
650 old_global_table_(CompressedStackMaps::Handle(
651 zone,
652 isolate_group->object_store()
653 ->canonicalized_stack_map_entries())),
654 entry_offsets_(zone),
655 maps_(CompressedStackMaps::Handle(zone)) {
656 ASSERT(old_global_table_.IsNull() || old_global_table_.IsGlobalTable());
657 // The stack map normalization and deduplication happens in two phases:
658 //
659 // 1) Visit all CompressedStackMaps (CSM) objects and collect individual
660 // entry info as canonicalized StackMapEntries (SMEs). Also record the
661 // frequency the same entry info was seen across all CSMs in each SME.
662
663 CollectStackMapEntriesVisitor collect_visitor(zone, old_global_table_);
664 WalkProgram(zone, isolate_group, &collect_visitor);
665
666 // The results of phase 1 are used to create a new global table with
667 // entries sorted by decreasing frequency, so that entries that appear
668 // more often in CSMs have smaller payload offsets (less bytes used in
669 // the LEB128 encoding). The new global table is put into place
670 // immediately, as we already have a handle on the old table.
671
672 const auto& new_global_table = CompressedStackMaps::Handle(
673 zone, collect_visitor.CreateGlobalTable(&entry_offsets_));
674 isolate_group->object_store()->set_canonicalized_stack_map_entries(
675 new_global_table);
676
677 // 2) Visit all CSMs and replace each with a canonicalized normalized
678 // version that uses the new global table for non-PC offset entry
679 // information. This part is done in VisitCode.
680 }
681
682 void VisitCode(const Code& code) {
683 maps_ = code.compressed_stackmaps();
684 if (maps_.IsNull()) return;
685 // First check is to make sure [maps] hasn't already been normalized,
686 // since any normalized map already has a canonical entry in the set.
687 if (auto const canonical = canonical_objects_.LookupValue(&maps_)) {
688 maps_ = canonical->ptr();
689 } else {
690 maps_ = NormalizeEntries(maps_);
691 maps_ = Dedup(maps_);
692 }
693 code.set_compressed_stackmaps(maps_);
694 }
695
696 private:
697 // Creates a normalized CSM from the given non-normalized CSM.
698 CompressedStackMapsPtr NormalizeEntries(const CompressedStackMaps& maps) {
699 if (maps.payload_size() == 0) {
700 // No entries, so use the canonical empty map.
701 return Object::empty_compressed_stackmaps().ptr();
702 }
703 MallocWriteStream new_payload(maps.payload_size());
705 old_global_table_);
706 intptr_t last_offset = 0;
707 while (it.MoveNext()) {
708 StackMapEntry entry(zone_, it);
709 const intptr_t entry_offset = entry_offsets_.LookupValue(&entry);
710 const intptr_t pc_delta = it.pc_offset() - last_offset;
711 new_payload.WriteLEB128(pc_delta);
712 new_payload.WriteLEB128(entry_offset);
713 last_offset = it.pc_offset();
714 }
715 return CompressedStackMaps::NewUsingTable(new_payload.buffer(),
716 new_payload.bytes_written());
717 }
718
719 const CompressedStackMaps& old_global_table_;
720 StackMapEntryIntMap entry_offsets_;
721 CompressedStackMaps& maps_;
722 };
723
724 StackZone stack_zone(thread);
725 NormalizeAndDedupCompressedStackMapsVisitor visitor(thread->zone(),
726 thread->isolate_group());
727 WalkProgram(thread->zone(), thread->isolate_group(), &visitor);
728}
729
731 public:
732 // Typedefs needed for the DirectChainedHashMap template.
733 typedef const PcDescriptors* Key;
734 typedef const PcDescriptors* Value;
735 typedef const PcDescriptors* Pair;
736
737 static Key KeyOf(Pair kv) { return kv; }
738
739 static Value ValueOf(Pair kv) { return kv; }
740
741 static inline uword Hash(Key key) { return Utils::WordHash(key->Length()); }
742
743 static inline bool IsKeyEqual(Pair pair, Key key) {
744 return pair->Equals(*key);
745 }
746};
747
748void ProgramVisitor::DedupPcDescriptors(Thread* thread) {
749 class DedupPcDescriptorsVisitor
750 : public CodeVisitor,
751 public Deduper<PcDescriptors, PcDescriptorsKeyValueTrait> {
752 public:
753 explicit DedupPcDescriptorsVisitor(Zone* zone)
754 : Deduper(zone), pc_descriptor_(PcDescriptors::Handle(zone)) {
756 // Prefer existing objects in the VM isolate.
757 AddVMBaseObjects();
758 }
759 }
760
761 void VisitCode(const Code& code) {
762 pc_descriptor_ = code.pc_descriptors();
763 pc_descriptor_ = Dedup(pc_descriptor_);
764 code.set_pc_descriptors(pc_descriptor_);
765 }
766
767 private:
768 PcDescriptors& pc_descriptor_;
769 };
770
771 StackZone stack_zone(thread);
772 DedupPcDescriptorsVisitor visitor(thread->zone());
773 WalkProgram(thread->zone(), thread->isolate_group(), &visitor);
774}
775
777 public:
778 // Typedefs needed for the DirectChainedHashMap template.
779 typedef const TypedData* Key;
780 typedef const TypedData* Value;
781 typedef const TypedData* Pair;
782
783 static Key KeyOf(Pair kv) { return kv; }
784
785 static Value ValueOf(Pair kv) { return kv; }
786
787 static inline uword Hash(Key key) { return key->CanonicalizeHash(); }
788
789 static inline bool IsKeyEqual(Pair pair, Key key) {
790 return pair->CanonicalizeEquals(*key);
791 }
792};
793
794class TypedDataDeduper : public Deduper<TypedData, TypedDataKeyValueTrait> {
795 public:
796 explicit TypedDataDeduper(Zone* zone) : Deduper(zone) {}
797
798 private:
799 bool IsCorrectType(const Object& obj) const { return obj.IsTypedData(); }
800};
801
802void ProgramVisitor::DedupDeoptEntries(Thread* thread) {
803 class DedupDeoptEntriesVisitor : public CodeVisitor, public TypedDataDeduper {
804 public:
805 explicit DedupDeoptEntriesVisitor(Zone* zone)
806 : TypedDataDeduper(zone),
807 deopt_table_(Array::Handle(zone)),
808 deopt_entry_(TypedData::Handle(zone)),
809 offset_(Smi::Handle(zone)),
810 reason_and_flags_(Smi::Handle(zone)) {}
811
812 void VisitCode(const Code& code) {
813 deopt_table_ = code.deopt_info_array();
814 if (deopt_table_.IsNull()) return;
815 intptr_t length = DeoptTable::GetLength(deopt_table_);
816 for (intptr_t i = 0; i < length; i++) {
817 DeoptTable::GetEntry(deopt_table_, i, &offset_, &deopt_entry_,
818 &reason_and_flags_);
819 ASSERT(!deopt_entry_.IsNull());
820 deopt_entry_ = Dedup(deopt_entry_);
821 ASSERT(!deopt_entry_.IsNull());
822 DeoptTable::SetEntry(deopt_table_, i, offset_, deopt_entry_,
823 reason_and_flags_);
824 }
825 }
826
827 private:
828 Array& deopt_table_;
829 TypedData& deopt_entry_;
830 Smi& offset_;
831 Smi& reason_and_flags_;
832 };
833
834 if (FLAG_precompiled_mode) return;
835
836 StackZone stack_zone(thread);
837 DedupDeoptEntriesVisitor visitor(thread->zone());
838 WalkProgram(thread->zone(), thread->isolate_group(), &visitor);
839}
840
841#if defined(DART_PRECOMPILER)
842void ProgramVisitor::DedupCatchEntryMovesMaps(Thread* thread) {
843 class DedupCatchEntryMovesMapsVisitor : public CodeVisitor,
844 public TypedDataDeduper {
845 public:
846 explicit DedupCatchEntryMovesMapsVisitor(Zone* zone)
847 : TypedDataDeduper(zone),
848 catch_entry_moves_maps_(TypedData::Handle(zone)) {}
849
850 void VisitCode(const Code& code) {
851 catch_entry_moves_maps_ = code.catch_entry_moves_maps();
852 catch_entry_moves_maps_ = Dedup(catch_entry_moves_maps_);
853 code.set_catch_entry_moves_maps(catch_entry_moves_maps_);
854 }
855
856 private:
857 TypedData& catch_entry_moves_maps_;
858 };
859
860 if (!FLAG_precompiled_mode) return;
861
862 StackZone stack_zone(thread);
863 DedupCatchEntryMovesMapsVisitor visitor(thread->zone());
864 WalkProgram(thread->zone(), thread->isolate_group(), &visitor);
865}
866
867class UnlinkedCallKeyValueTrait {
868 public:
869 // Typedefs needed for the DirectChainedHashMap template.
870 typedef const UnlinkedCall* Key;
871 typedef const UnlinkedCall* Value;
872 typedef const UnlinkedCall* Pair;
873
874 static Key KeyOf(Pair kv) { return kv; }
875
876 static Value ValueOf(Pair kv) { return kv; }
877
878 static inline uword Hash(Key key) { return key->Hash(); }
879
880 static inline bool IsKeyEqual(Pair pair, Key key) {
881 return pair->Equals(*key);
882 }
883};
884
885void ProgramVisitor::DedupUnlinkedCalls(Thread* thread) {
886 class DedupUnlinkedCallsVisitor
887 : public CodeVisitor,
888 public Deduper<UnlinkedCall, UnlinkedCallKeyValueTrait> {
889 public:
890 explicit DedupUnlinkedCallsVisitor(Zone* zone, IsolateGroup* isolate_group)
891 : Deduper(zone),
892 entry_(Object::Handle(zone)),
893 pool_(ObjectPool::Handle(zone)) {
894 auto& gop = ObjectPool::Handle(
895 zone, isolate_group->object_store()->global_object_pool());
896 ASSERT(!gop.IsNull());
897 DedupPool(gop);
898 }
899
900 void DedupPool(const ObjectPool& pool) {
901 if (pool.IsNull()) return;
902 for (intptr_t i = 0; i < pool.Length(); i++) {
903 if (pool.TypeAt(i) != ObjectPool::EntryType::kTaggedObject) {
904 continue;
905 }
906 entry_ = pool.ObjectAt(i);
907 if (!entry_.IsUnlinkedCall()) continue;
908 entry_ = Dedup(UnlinkedCall::Cast(entry_));
909 pool.SetObjectAt(i, entry_);
910 }
911 }
912
913 void VisitCode(const Code& code) {
914 pool_ = code.object_pool();
915 DedupPool(pool_);
916 }
917
918 private:
919 Object& entry_;
920 ObjectPool& pool_;
921 };
922
923 if (!FLAG_precompiled_mode) return;
924
925 StackZone stack_zone(thread);
926 DedupUnlinkedCallsVisitor visitor(thread->zone(), thread->isolate_group());
927
928 // Deduplicate local object pools as they are used to trace
929 // objects when writing snapshots.
930 WalkProgram(thread->zone(), thread->isolate_group(), &visitor);
931}
932
933void ProgramVisitor::PruneSubclasses(Thread* thread) {
934 class PruneSubclassesVisitor : public ClassVisitor {
935 public:
936 explicit PruneSubclassesVisitor(Zone* zone)
937 : ClassVisitor(),
938 old_implementors_(GrowableObjectArray::Handle(zone)),
939 new_implementors_(GrowableObjectArray::Handle(zone)),
940 implementor_(Class::Handle(zone)),
941 old_subclasses_(GrowableObjectArray::Handle(zone)),
942 new_subclasses_(GrowableObjectArray::Handle(zone)),
943 subclass_(Class::Handle(zone)),
944 null_list_(GrowableObjectArray::Handle(zone)) {}
945
946 void VisitClass(const Class& klass) {
947 old_implementors_ = klass.direct_implementors_unsafe();
948 if (!old_implementors_.IsNull()) {
949 new_implementors_ = GrowableObjectArray::New();
950 for (intptr_t i = 0; i < old_implementors_.Length(); i++) {
951 implementor_ ^= old_implementors_.At(i);
952 if (implementor_.id() != kIllegalCid) {
953 new_implementors_.Add(implementor_);
954 }
955 }
956 if (new_implementors_.Length() == 0) {
957 klass.set_direct_implementors(null_list_);
958 } else {
959 klass.set_direct_implementors(new_implementors_);
960 }
961 }
962
963 old_subclasses_ = klass.direct_subclasses_unsafe();
964 if (!old_subclasses_.IsNull()) {
965 new_subclasses_ = GrowableObjectArray::New();
966 for (intptr_t i = 0; i < old_subclasses_.Length(); i++) {
967 subclass_ ^= old_subclasses_.At(i);
968 if (subclass_.id() != kIllegalCid) {
969 new_subclasses_.Add(subclass_);
970 }
971 }
972 if (new_subclasses_.Length() == 0) {
973 klass.set_direct_subclasses(null_list_);
974 } else {
975 klass.set_direct_subclasses(new_subclasses_);
976 }
977 }
978 }
979
980 private:
981 GrowableObjectArray& old_implementors_;
982 GrowableObjectArray& new_implementors_;
983 Class& implementor_;
984 GrowableObjectArray& old_subclasses_;
985 GrowableObjectArray& new_subclasses_;
986 Class& subclass_;
987 GrowableObjectArray& null_list_;
988 };
989
990 StackZone stack_zone(thread);
991 PruneSubclassesVisitor visitor(thread->zone());
992 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
993 WalkProgram(thread->zone(), thread->isolate_group(), &visitor);
994}
995#endif // defined(DART_PRECOMPILER)
996
998 public:
999 // Typedefs needed for the DirectChainedHashMap template.
1000 typedef const CodeSourceMap* Key;
1001 typedef const CodeSourceMap* Value;
1002 typedef const CodeSourceMap* Pair;
1003
1004 static Key KeyOf(Pair kv) { return kv; }
1005
1006 static Value ValueOf(Pair kv) { return kv; }
1007
1008 static inline uword Hash(Key key) {
1009 ASSERT(!key->IsNull());
1010 return Utils::WordHash(key->Hash());
1011 }
1012
1013 static inline bool IsKeyEqual(Pair pair, Key key) {
1014 ASSERT(!pair->IsNull() && !key->IsNull());
1015 return pair->Equals(*key);
1016 }
1017};
1018
1019void ProgramVisitor::DedupCodeSourceMaps(Thread* thread) {
1020 class DedupCodeSourceMapsVisitor
1021 : public CodeVisitor,
1022 public Deduper<CodeSourceMap, CodeSourceMapKeyValueTrait> {
1023 public:
1024 explicit DedupCodeSourceMapsVisitor(Zone* zone)
1025 : Deduper(zone), code_source_map_(CodeSourceMap::Handle(zone)) {
1027 // Prefer existing objects in the VM isolate.
1028 AddVMBaseObjects();
1029 }
1030 }
1031
1032 void VisitCode(const Code& code) {
1033 code_source_map_ = code.code_source_map();
1034 code_source_map_ = Dedup(code_source_map_);
1035 code.set_code_source_map(code_source_map_);
1036 }
1037
1038 private:
1039 CodeSourceMap& code_source_map_;
1040 };
1041
1042 StackZone stack_zone(thread);
1043 DedupCodeSourceMapsVisitor visitor(thread->zone());
1044 WalkProgram(thread->zone(), thread->isolate_group(), &visitor);
1045}
1046
1048 public:
1049 // Typedefs needed for the DirectChainedHashMap template.
1050 typedef const Array* Key;
1051 typedef const Array* Value;
1052 typedef const Array* Pair;
1053
1054 static Key KeyOf(Pair kv) { return kv; }
1055
1056 static Value ValueOf(Pair kv) { return kv; }
1057
1058 static inline uword Hash(Key key) {
1059 ASSERT(!key->IsNull());
1060 ASSERT(Thread::Current()->no_safepoint_scope_depth() > 0);
1061 const intptr_t len = key->Length();
1062 uint32_t hash = Utils::WordHash(len);
1063 for (intptr_t i = 0; i < len; ++i) {
1064 hash =
1065 CombineHashes(hash, Utils::WordHash(static_cast<uword>(key->At(i))));
1066 }
1067 return hash;
1068 }
1069
1070 static inline bool IsKeyEqual(Pair pair, Key key) {
1071 ASSERT(!pair->IsNull() && !key->IsNull());
1072 if (pair->Length() != key->Length()) return false;
1073 for (intptr_t i = 0; i < pair->Length(); i++) {
1074 if (pair->At(i) != key->At(i)) return false;
1075 }
1076 return true;
1077 }
1078};
1079
1080void ProgramVisitor::DedupLists(Thread* thread) {
1081 class DedupListsVisitor : public CodeVisitor,
1082 public Deduper<Array, ArrayKeyValueTrait> {
1083 public:
1084 explicit DedupListsVisitor(Zone* zone)
1085 : Deduper(zone),
1086 list_(Array::Handle(zone)),
1087 field_(Field::Handle(zone)) {}
1088
1089 void VisitCode(const Code& code) {
1090 if (!code.IsFunctionCode()) return;
1091
1092 list_ = code.inlined_id_to_function();
1093 list_ = Dedup(list_);
1094 code.set_inlined_id_to_function(list_);
1095
1096 list_ = code.deopt_info_array();
1097 list_ = Dedup(list_);
1098 code.set_deopt_info_array(list_);
1099
1100 list_ = code.static_calls_target_table();
1101 list_ = Dedup(list_);
1102 code.set_static_calls_target_table(list_);
1103 }
1104
1105 void VisitFunction(const Function& function) {
1106 // Don't bother deduping the positional names in precompiled mode, as
1107 // they'll be dropped anyway.
1108 if (!FLAG_precompiled_mode) {
1109 list_ = function.positional_parameter_names();
1110 if (!list_.IsNull()) {
1111 list_ = Dedup(list_);
1112 function.set_positional_parameter_names(list_);
1113 }
1114 }
1115 }
1116
1117 private:
1118 bool IsCorrectType(const Object& obj) const { return obj.IsArray(); }
1119
1120 Array& list_;
1121 Field& field_;
1122 };
1123
1124 StackZone stack_zone(thread);
1125 // ArrayKeyValueTrait::Hash is based on object addresses, so make
1126 // sure GC doesn't happen and doesn't move objects.
1127 NoSafepointScope no_safepoint;
1128 DedupListsVisitor visitor(thread->zone());
1129 WalkProgram(thread->zone(), thread->isolate_group(), &visitor);
1130}
1131
1132// Traits for comparing two [Instructions] objects for equality, which is
1133// implemented as bit-wise equality.
1134//
1135// This considers two instruction objects to be equal even if they have
1136// different static call targets. Since the static call targets are called via
1137// the object pool this is ok.
1139 public:
1140 // Typedefs needed for the DirectChainedHashMap template.
1141 typedef const Instructions* Key;
1142 typedef const Instructions* Value;
1143 typedef const Instructions* Pair;
1144
1145 static Key KeyOf(Pair kv) { return kv; }
1146
1147 static Value ValueOf(Pair kv) { return kv; }
1148
1149 static inline uword Hash(Key key) { return key->Hash(); }
1150
1151 static inline bool IsKeyEqual(Pair pair, Key key) {
1152 return pair->Equals(*key);
1153 }
1154};
1155
1156// Traits for comparing two [Code] objects for equality.
1157//
1158// The instruction deduplication naturally causes us to have a one-to-many
1159// relationship between Instructions and Code objects.
1160//
1161// In AOT frames only have PCs. However, the runtime needs e.g. stack maps from
1162// the [Code] to scan such a frame. So we ensure that instructions of code
1163// objects are only deduplicated if the metadata in the code is the same.
1164// The runtime can then pick any code object corresponding to the PC in the
1165// frame and use the metadata.
1166#if defined(DART_PRECOMPILER)
1167class CodeKeyValueTrait {
1168 public:
1169 // Typedefs needed for the DirectChainedHashMap template.
1170 typedef const Code* Key;
1171 typedef const Code* Value;
1172 typedef const Code* Pair;
1173
1174 static Key KeyOf(Pair kv) { return kv; }
1175
1176 static Value ValueOf(Pair kv) { return kv; }
1177
1178 static inline uword Hash(Key key) {
1179 ASSERT(Thread::Current()->no_safepoint_scope_depth() > 0);
1180 return Utils::WordHash(
1181 CombineHashes(Instructions::Hash(key->instructions()),
1182 static_cast<uword>(key->static_calls_target_table())));
1183 }
1184
1185 static inline bool IsKeyEqual(Pair pair, Key key) {
1186 // In AOT, disabled code objects should not be considered for deduplication.
1187 ASSERT(!pair->IsDisabled() && !key->IsDisabled());
1188
1189 if (pair->ptr() == key->ptr()) return true;
1190
1191 // Notice we assume that these entries have already been de-duped, so we
1192 // can use pointer equality.
1193 if (pair->static_calls_target_table() != key->static_calls_target_table()) {
1194 return false;
1195 }
1196 if (pair->pc_descriptors() != key->pc_descriptors()) {
1197 return false;
1198 }
1199 if (pair->compressed_stackmaps() != key->compressed_stackmaps()) {
1200 return false;
1201 }
1202 if (pair->catch_entry_moves_maps() != key->catch_entry_moves_maps()) {
1203 return false;
1204 }
1205 if (pair->exception_handlers() != key->exception_handlers()) {
1206 return false;
1207 }
1208 if (pair->UncheckedEntryPointOffset() != key->UncheckedEntryPointOffset()) {
1209 return false;
1210 }
1211 if (!Instructions::Equals(pair->instructions(), key->instructions())) {
1212 return false;
1213 }
1214 return LoadingUnit::LoadingUnitOf(*pair) ==
1216 }
1217};
1218#endif
1219
1220void ProgramVisitor::DedupInstructions(Thread* thread) {
1221 class DedupInstructionsVisitor
1222 : public CodeVisitor,
1223 public Deduper<Instructions, InstructionsKeyValueTrait>,
1224 public ObjectVisitor {
1225 public:
1226 explicit DedupInstructionsVisitor(Zone* zone)
1227 : Deduper(zone),
1228 code_(Code::Handle(zone)),
1229 instructions_(Instructions::Handle(zone)) {
1231 // Prefer existing objects in the VM isolate.
1232 Dart::vm_isolate_group()->heap()->VisitObjectsImagePages(this);
1233 }
1234 }
1235
1236 void VisitObject(ObjectPtr obj) override {
1237 if (!obj->IsInstructions()) return;
1238 instructions_ = Instructions::RawCast(obj);
1239 AddCanonical(instructions_);
1240 }
1241
1242 void VisitFunction(const Function& function) override {
1243 if (!function.HasCode()) return;
1244 code_ = function.CurrentCode();
1245 // This causes the code to be visited once here and once directly in the
1246 // ProgramWalker, but as long as the deduplication process is idempotent,
1247 // the cached entry points won't change during the second visit.
1248 VisitCode(code_);
1249 function.SetInstructionsSafe(code_); // Update cached entry point.
1250 }
1251
1252 void VisitCode(const Code& code) override {
1253 instructions_ = code.instructions();
1254 instructions_ = Dedup(instructions_);
1255 code.set_instructions(instructions_);
1256 if (code.IsDisabled()) {
1257 instructions_ = code.active_instructions();
1258 instructions_ = Dedup(instructions_);
1259 }
1260 code.SetActiveInstructionsSafe(instructions_,
1261 code.UncheckedEntryPointOffset());
1262 }
1263
1264 private:
1265 Code& code_;
1266 Instructions& instructions_;
1267 };
1268
1269#if defined(DART_PRECOMPILER)
1270 class DedupInstructionsWithSameMetadataVisitor
1271 : public CodeVisitor,
1272 public Deduper<Code, CodeKeyValueTrait> {
1273 public:
1274 explicit DedupInstructionsWithSameMetadataVisitor(Zone* zone)
1275 : Deduper(zone),
1276 canonical_(Code::Handle(zone)),
1277 code_(Code::Handle(zone)),
1278 instructions_(Instructions::Handle(zone)) {}
1279
1280 // Relink the program graph to eliminate references to the non-canonical
1281 // Code objects. We want to arrive to the graph where Code objects
1282 // and Instruction objects are in one-to-one relationship.
1283 void PostProcess(IsolateGroup* isolate_group) {
1284 const intptr_t canonical_count = canonical_objects_.Length();
1285
1286 auto& static_calls_array = Array::Handle(zone_);
1287 auto& static_calls_table_entry = Object::Handle(zone_);
1288
1289 auto should_canonicalize = [&](const Object& obj) {
1290 return CanCanonicalize(Code::Cast(obj)) && !obj.InVMIsolateHeap();
1291 };
1292
1293 auto process_pool = [&](const ObjectPool& pool) {
1294 if (pool.IsNull()) {
1295 return;
1296 }
1297
1298 auto& object = Object::Handle(zone_);
1299 for (intptr_t i = 0; i < pool.Length(); i++) {
1300 auto const type = pool.TypeAt(i);
1301 if (type != ObjectPool::EntryType::kTaggedObject) continue;
1302 object = pool.ObjectAt(i);
1303 if (object.IsCode() && should_canonicalize(object)) {
1304 object = Canonicalize(Code::Cast(object));
1305 pool.SetObjectAt(i, object);
1306 }
1307 }
1308 };
1309
1310 auto& pool = ObjectPool::Handle(zone_);
1311
1312 auto it = canonical_objects_.GetIterator();
1313 while (auto canonical_code = it.Next()) {
1314 static_calls_array = (*canonical_code)->static_calls_target_table();
1315 if (!static_calls_array.IsNull()) {
1316 StaticCallsTable static_calls(static_calls_array);
1317 for (auto& view : static_calls) {
1318 static_calls_table_entry =
1320 if (static_calls_table_entry.IsCode() &&
1321 should_canonicalize(static_calls_table_entry)) {
1322 static_calls_table_entry =
1323 Canonicalize(Code::Cast(static_calls_table_entry));
1325 static_calls_table_entry);
1326 }
1327 }
1328 }
1329
1330 pool = (*canonical_code)->object_pool();
1331 process_pool(pool);
1332 }
1333
1334 auto object_store = isolate_group->object_store();
1335
1336 const auto& dispatch_table_entries =
1337 Array::Handle(zone_, object_store->dispatch_table_code_entries());
1338 if (!dispatch_table_entries.IsNull()) {
1339 auto& code = Code::Handle(zone_);
1340 for (intptr_t i = 0; i < dispatch_table_entries.Length(); i++) {
1341 code ^= dispatch_table_entries.At(i);
1342 if (should_canonicalize(code)) {
1344 dispatch_table_entries.SetAt(i, code);
1345 }
1346 }
1347 }
1348
1349 // If there's a global object pool, add any visitable objects.
1350 pool = object_store->global_object_pool();
1351 process_pool(pool);
1352
1353 RELEASE_ASSERT(canonical_count == canonical_objects_.Length());
1354 }
1355
1356 void VisitFunction(const Function& function) {
1357 if (!function.HasCode()) return;
1358 code_ = function.CurrentCode();
1359 // This causes the code to be visited once here and once directly in the
1360 // ProgramWalker, but as long as the deduplication process is idempotent,
1361 // the cached entry points won't change during the second visit.
1362 VisitCode(code_);
1363 function.SetInstructionsSafe(canonical_); // Update cached entry point.
1364 }
1365
1366 void VisitCode(const Code& code) {
1367 canonical_ = code.ptr();
1368 if (code.IsDisabled()) return;
1369 canonical_ = Canonicalize(code);
1370 instructions_ = canonical_.instructions();
1371 code.SetActiveInstructionsSafe(instructions_,
1372 code.UncheckedEntryPointOffset());
1373 code.set_instructions(instructions_);
1374 }
1375
1376 private:
1377 bool CanCanonicalize(const Code& code) const { return !code.IsDisabled(); }
1378
1379 CodePtr Canonicalize(const Code& code) {
1380 canonical_ = Dedup(code);
1381 if (!code.is_discarded() && canonical_.is_discarded()) {
1382 canonical_.set_is_discarded(false);
1383 }
1384 return canonical_.ptr();
1385 }
1386
1387 Code& canonical_;
1388 Code& code_;
1389 Instructions& instructions_;
1390 };
1391
1392 if (FLAG_precompiled_mode) {
1393 StackZone stack_zone(thread);
1394 // CodeKeyValueTrait::Hash is based on object addresses, so make
1395 // sure GC doesn't happen and doesn't move objects.
1396 NoSafepointScope no_safepoint;
1397 DedupInstructionsWithSameMetadataVisitor visitor(thread->zone());
1398 WalkProgram(thread->zone(), thread->isolate_group(), &visitor);
1399 visitor.PostProcess(thread->isolate_group());
1400 return;
1401 }
1402#endif // defined(DART_PRECOMPILER)
1403
1404 StackZone stack_zone(thread);
1405 DedupInstructionsVisitor visitor(thread->zone());
1406 WalkProgram(thread->zone(), thread->isolate_group(), &visitor);
1407}
1408
1410 BindStaticCalls(thread);
1411 ShareMegamorphicBuckets(thread);
1412 NormalizeAndDedupCompressedStackMaps(thread);
1413 DedupPcDescriptors(thread);
1414 DedupDeoptEntries(thread);
1415#if defined(DART_PRECOMPILER)
1416 DedupCatchEntryMovesMaps(thread);
1417 DedupUnlinkedCalls(thread);
1418 PruneSubclasses(thread);
1419#endif
1420 DedupCodeSourceMaps(thread);
1421 DedupLists(thread);
1422
1423 // Reduces binary size but obfuscates profiler results.
1424 if (FLAG_dedup_instructions) {
1425 DedupInstructions(thread);
1426 }
1427}
1428
1429#if defined(DART_PRECOMPILER)
1430class AssignLoadingUnitsCodeVisitor : public ObjectVisitor {
1431 public:
1432 explicit AssignLoadingUnitsCodeVisitor(Zone* zone)
1433 : heap_(Thread::Current()->heap()),
1434 code_(Code::Handle(zone)),
1435 func_(Function::Handle(zone)),
1436 cls_(Class::Handle(zone)),
1437 lib_(Library::Handle(zone)),
1438 unit_(LoadingUnit::Handle(zone)),
1439 obj_(Object::Handle(zone)) {}
1440
1441 void VisitObject(ObjectPtr obj) override {
1442 if (obj->IsCode()) {
1443 code_ ^= obj;
1444 VisitCode(code_);
1445 }
1446 }
1447
1448 void VisitCode(const Code& code) {
1449 intptr_t id;
1450 if (code.IsFunctionCode()) {
1451 func_ ^= code.function();
1452 obj_ = func_.Owner();
1453 cls_ ^= obj_.ptr();
1454 lib_ = cls_.library();
1455 if (lib_.IsNull()) {
1456 // E.g., dynamic.
1458 } else {
1459 unit_ = lib_.loading_unit();
1460 if (unit_.IsNull()) {
1461 return; // Assignment remains LoadingUnit::kIllegalId
1462 }
1463 id = unit_.id();
1464 }
1465 } else if (code.IsTypeTestStubCode() || code.IsStubCode() ||
1466 code.IsAllocationStubCode()) {
1468 } else {
1469 UNREACHABLE();
1470 }
1471
1472 ASSERT(heap_->GetLoadingUnit(code.ptr()) == WeakTable::kNoValue);
1473 heap_->SetLoadingUnit(code.ptr(), id);
1474
1475 obj_ = code.code_source_map();
1476 MergeAssignment(obj_, id);
1477 obj_ = code.compressed_stackmaps();
1478 MergeAssignment(obj_, id);
1479 }
1480
1481 void MergeAssignment(const Object& obj, intptr_t id) {
1482 if (obj.IsNull()) return;
1483
1484 intptr_t old_id = heap_->GetLoadingUnit(obj_.ptr());
1485 if (old_id == WeakTable::kNoValue) {
1486 heap_->SetLoadingUnit(obj_.ptr(), id);
1487 } else if (old_id == id) {
1488 // Shared with another code in the same loading unit.
1489 } else {
1490 // Shared with another code in a different loading unit.
1491 // Could assign to dominating loading unit.
1492 heap_->SetLoadingUnit(obj_.ptr(), LoadingUnit::kRootId);
1493 }
1494 }
1495
1496 private:
1497 Heap* heap_;
1498 Code& code_;
1499 Function& func_;
1500 Class& cls_;
1501 Library& lib_;
1502 LoadingUnit& unit_;
1503 Object& obj_;
1504};
1505
1506void ProgramVisitor::AssignUnits(Thread* thread) {
1507 StackZone stack_zone(thread);
1508 Heap* heap = thread->heap();
1509
1510 // Oddballs.
1511 heap->SetLoadingUnit(Object::null(), LoadingUnit::kRootId);
1512 heap->SetLoadingUnit(Object::empty_object_pool().ptr(), LoadingUnit::kRootId);
1513
1514 AssignLoadingUnitsCodeVisitor visitor(thread->zone());
1515 HeapIterationScope iter(thread);
1516 iter.IterateVMIsolateObjects(&visitor);
1517 iter.IterateObjects(&visitor);
1518}
1519
1520class ProgramHashVisitor : public CodeVisitor {
1521 public:
1522 explicit ProgramHashVisitor(Zone* zone)
1523 : str_(String::Handle(zone)),
1524 pool_(ObjectPool::Handle(zone)),
1525 obj_(Object::Handle(zone)),
1526 instr_(Instructions::Handle(zone)),
1527 hash_(0) {}
1528
1529 void VisitClass(const Class& cls) {
1530 str_ = cls.Name();
1531 VisitInstance(str_);
1532 }
1533
1534 void VisitFunction(const Function& function) {
1535 str_ = function.name();
1536 VisitInstance(str_);
1537 }
1538
1539 void VisitCode(const Code& code) {
1540 pool_ = code.object_pool();
1541 VisitPool(pool_);
1542
1543 instr_ = code.instructions();
1544 hash_ = CombineHashes(hash_, instr_.Hash());
1545 }
1546
1547 void VisitPool(const ObjectPool& pool) {
1548 if (pool.IsNull()) return;
1549
1550 for (intptr_t i = 0; i < pool.Length(); i++) {
1551 if (pool.TypeAt(i) == ObjectPool::EntryType::kTaggedObject) {
1552 obj_ = pool.ObjectAt(i);
1553 if (obj_.IsInstance()) {
1554 VisitInstance(Instance::Cast(obj_));
1555 }
1556 }
1557 }
1558 }
1559
1560 void VisitInstance(const Instance& instance) {
1561 hash_ = CombineHashes(hash_, instance.CanonicalizeHash());
1562 }
1563
1564 uint32_t hash() const { return FinalizeHash(hash_, String::kHashBits); }
1565
1566 private:
1567 String& str_;
1568 ObjectPool& pool_;
1569 Object& obj_;
1570 Instructions& instr_;
1571 uint32_t hash_;
1572};
1573
1574uint32_t ProgramVisitor::Hash(Thread* thread) {
1575 StackZone stack_zone(thread);
1576 Zone* zone = thread->zone();
1577
1578 ProgramHashVisitor visitor(zone);
1579 WalkProgram(zone, thread->isolate_group(), &visitor);
1580 visitor.VisitPool(ObjectPool::Handle(
1581 zone, thread->isolate_group()->object_store()->global_object_pool()));
1582 return visitor.hash();
1583}
1584
1585#endif // defined(DART_PRECOMPILER)
1586
1587} // namespace dart
1588
1589#endif // defined(DART_PRECOMPILED_RUNTIME)
AutoreleasePool pool
TArray< uint32_t > Key
float e1
static uint32_t hash(const SkShaderBase::GradientInfo &v)
SI F table(const skcms_Curve *curve, F v)
#define UNREACHABLE()
Definition: assert.h:248
#define RELEASE_ASSERT(cond)
Definition: assert.h:327
#define ASSERT_NOTNULL(ptr)
Definition: assert.h:323
GLenum type
static Value ValueOf(Pair kv)
static bool IsKeyEqual(Pair pair, Key key)
static uword Hash(Key key)
static Key KeyOf(Pair kv)
static ArrayPtr New(intptr_t len, Heap::Space space=Heap::kNew)
Definition: object.h:10959
ObjectPtr At(intptr_t index) const
Definition: object.h:10875
intptr_t Length() const
Definition: object.h:10829
KeyValueTrait::Value LookupValue(typename KeyValueTrait::Key key) const
Definition: hash_map.h:159
void Insert(typename KeyValueTrait::Pair kv)
Definition: hash_map.h:230
bool HasKey(typename KeyValueTrait::Key key) const
Definition: hash_map.h:52
DART_FORCE_INLINE intptr_t bytes_written() const
Definition: datastream.h:338
C::only_if_unsigned< T, void > WriteLEB128(T value)
Definition: datastream.h:489
static constexpr CallKind decode(intptr_t value)
Definition: bitfield.h:171
virtual void VisitClass(const Class &cls)=0
const CodeVisitor * AsCodeVisitor() const
virtual bool IsCodeVisitor() const
const FunctionVisitor * AsFunctionVisitor() const
virtual bool IsFunctionVisitor() const
CodePtr allocation_stub() const
Definition: object.h:1800
ArrayPtr fields() const
Definition: object.h:1615
ArrayPtr current_functions() const
Definition: object.h:1641
static void ForAllClosureFunctions(std::function< bool(const Function &)> callback)
static void PatchStaticCallAt(uword return_address, const Code &code, const Code &new_target)
static bool IsKeyEqual(Pair pair, Key key)
bool Equals(const CodeSourceMap &other) const
Definition: object.h:6244
virtual void VisitCode(const Code &code)=0
@ kPcRelativeCall
Definition: object.h:6969
@ kPcRelativeTTSCall
Definition: object.h:6970
@ kCallViaCode
Definition: object.h:6972
@ kPcRelativeTailCall
Definition: object.h:6971
@ kSCallTableFunctionTarget
Definition: object.h:6983
@ kSCallTableCodeOrTypeTarget
Definition: object.h:6982
@ kSCallTableKindAndOffset
Definition: object.h:6981
bool IsGlobalTable() const
Definition: object.h:6310
static CompressedStackMapsPtr NewUsingTable(const void *payload, intptr_t size)
Definition: object.h:6320
static CompressedStackMapsPtr NewGlobalTable(const void *payload, intptr_t size)
Definition: object.h:6326
uintptr_t payload_size() const
Definition: object.h:6269
bool UsesGlobalTable() const
Definition: object.h:6304
static IsolateGroup * vm_isolate_group()
Definition: dart.h:69
static Snapshot::Kind vm_snapshot_kind()
Definition: dart.h:95
bool ShouldAdd(const Object &obj) const
Deduper(Zone *zone)
T::ObjectPtrType Dedup(const T &obj)
DirectChainedHashMap< S > canonical_objects_
Zone *const zone_
virtual ~Deduper()
virtual bool IsCorrectType(const Object &obj) const
virtual bool CanCanonicalize(const T &t) const
void AddCanonical(const T &obj)
static void GetEntry(const Array &table, intptr_t index, Smi *offset, TypedData *info, Smi *reason_and_flags)
static intptr_t GetLength(const Array &table)
static void SetEntry(const Array &table, intptr_t index, const Smi &offset, const TypedData &info, const Smi &reason_and_flags)
FunctionPtr InitializerFunction() const
Definition: object.h:4804
bool HasInitializerFunction() const
Definition: object.cc:12335
virtual void VisitFunction(const Function &function)=0
FunctionPtr ImplicitClosureFunction() const
Definition: object.cc:10385
bool HasImplicitClosureFunction() const
Definition: object.h:3326
static GrowableObjectArrayPtr New(Heap::Space space=Heap::kNew)
Definition: object.h:11144
@ kOld
Definition: heap.h:39
void ResetObjectIdTable()
Definition: heap.cc:899
intptr_t GetObjectId(ObjectPtr raw_obj) const
Definition: heap.h:197
void SetObjectId(ObjectPtr raw_obj, intptr_t object_id)
Definition: heap.h:193
static bool IsKeyEqual(Pair pair, Key key)
uint32_t Hash() const
Definition: object.h:5874
bool Equals(const Instructions &other) const
Definition: object.h:5854
Heap * heap() const
Definition: isolate.h:296
ObjectStore * object_store() const
Definition: isolate.h:510
static intptr_t LoadingUnitOf(const Function &function)
Definition: object.cc:19730
static constexpr intptr_t kRootId
Definition: object.h:7969
uint8_t * buffer() const
Definition: datastream.h:615
UntaggedObject * untag() const
static ObjectPtr null()
Definition: object.h:433
intptr_t GetClassId() const
Definition: object.h:341
ObjectPtr ptr() const
Definition: object.h:332
virtual const char * ToCString() const
Definition: object.h:366
bool IsNull() const
Definition: object.h:363
static Object & Handle()
Definition: object.h:407
static ObjectPtr RawCast(ObjectPtr obj)
Definition: object.h:325
static constexpr intptr_t kHashBits
Definition: object.h:323
static bool IsKeyEqual(Pair pair, Key key)
bool Equals(const PcDescriptors &other) const
Definition: object.h:6194
static void Dedup(Thread *thread)
static void WalkProgram(Zone *zone, IsolateGroup *isolate_group, ClassVisitor *visitor)
ProgramWalker(Zone *zone, Heap *heap, ClassVisitor *visitor)
void AddToWorklist(const Object &object)
static bool IncludesCode(Kind kind)
Definition: snapshot.h:67
static bool IsKeyEqual(Pair kv, Key key)
intptr_t UsageCount() const
bool Equals(const StackMapEntry &other) const
static constexpr intptr_t kHashBits
StackMapEntry(Zone *zone, const CompressedStackMaps::Iterator< CompressedStackMaps > &it)
intptr_t EncodeTo(NonStreamingWriteStream *stream)
Zone * zone() const
Definition: thread_state.h:37
static Thread * Current()
Definition: thread.h:362
IsolateGroup * isolate_group() const
Definition: thread.h:541
static uword Hash(Key key)
static Value ValueOf(Pair kv)
static bool IsKeyEqual(Pair pair, Key key)
virtual bool CanonicalizeEquals(const Instance &other) const
Definition: object.cc:25550
static uint32_t WordHash(intptr_t key)
Definition: utils.cc:217
static constexpr T RoundUp(T x, uintptr_t alignment, uintptr_t offset=0)
Definition: utils.h:120
static constexpr intptr_t kNoValue
Definition: weak_table.h:18
void set_next(WorklistElement *elem)
WorklistElement * next() const
ObjectPtr value() const
WorklistElement(Zone *zone, const Object &object)
void Add(const Object &value)
ObjectPtr Remove()
Worklist(Zone *zone)
bool IsEmpty() const
#define ASSERT(E)
VkInstance instance
Definition: main.cc:48
#define FATAL(error)
glong glong end
uint8_t value
GAsyncResult * result
const char * charp
Definition: flags.h:12
Dart_NativeFunction function
Definition: fuchsia.cc:51
size_t length
bool IsSmi(int64_t v)
Definition: runtime_api.cc:31
Definition: dart_vm.cc:33
constexpr intptr_t kBitsPerByteLog2
Definition: globals.h:462
DART_EXPORT bool IsNull(Dart_Handle object)
uint32_t CombineHashes(uint32_t hash, uint32_t other_hash)
Definition: hash.h:12
@ kIllegalCid
Definition: class_id.h:214
constexpr intptr_t kBitsPerByte
Definition: globals.h:463
unibrow::Mapping< unibrow::Ecma262Canonicalize > Canonicalize
uintptr_t uword
Definition: globals.h:501
uint32_t FinalizeHash(uint32_t hash, intptr_t hashbits=kBitsPerInt32)
Definition: hash.h:20
static uint32_t Hash(uint32_t key)
Definition: hashmap_test.cc:65
ArrayOfTuplesView< Code::SCallTableEntry, std::tuple< Smi, Object, Function > > StaticCallsTable
Definition: object.h:13546
DirectChainedHashMap< StackMapEntryKeyIntValueTrait > StackMapEntryIntMap
static int8_t data[kExtLength]
DECLARE_FLAG(bool, show_invisible_frames)
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not defaults to or::depending on whether ipv6 is specified vm service A custom Dart VM Service port The default is to pick a randomly available open port disable vm Disable the Dart VM Service The Dart VM Service is never available in release mode disable vm service Disable mDNS Dart VM Service publication Bind to the IPv6 localhost address for the Dart VM Service Ignored if vm service host is set endless trace Enable an endless trace buffer The default is a ring buffer This is useful when very old events need to viewed For during application launch Memory usage will continue to grow indefinitely however Start app with an specific route defined on the framework flutter assets Path to the Flutter assets directory enable service port Allow the VM service to fallback to automatic port selection if binding to a specified port fails trace Trace early application lifecycle Automatically switches to an endless trace buffer trace skia Filters out all Skia trace event categories except those that are specified in this comma separated list dump skp on shader Automatically dump the skp that triggers new shader compilations This is useful for writing custom ShaderWarmUp to reduce jank By this is not enabled to reduce the overhead purge persistent cache
Definition: switches.h:191
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not set
Definition: switches.h:76
#define T
Definition: precompiler.cc:65
Pair & operator=(const Pair &)=default
Pair(const Key key, const Value &value)
const uintptr_t id