22#if defined(DART_ENABLE_HEAP_SNAPSHOT_WRITER)
24static bool IsUserClass(intptr_t
cid) {
25 if (
cid == kContextCid)
return true;
26 if (
cid == kTypeArgumentsCid)
return false;
36 bool is_compressed_pointer;
38 ObjectSlot(uint16_t
offset,
bool is_compressed_pointer,
const char*
name)
40 is_compressed_pointer(is_compressed_pointer),
46 using ObjectSlotsType = ZoneGrowableArray<ObjectSlot>;
48 explicit ObjectSlots(Thread* thread) {
49 auto class_table = thread->isolate_group()->class_table();
50 const intptr_t class_count = class_table->NumCids();
58 cid2object_slots_.FillWith(
nullptr, 0, class_count);
59 contains_only_tagged_words_.FillWith(
false, 0, class_count);
61 for (intptr_t
cid = 1;
cid < class_count;
cid++) {
62 if (!class_table->HasValidClassAt(
cid))
continue;
66 cls = class_table->At(
cid);
67 if (!cls.is_finalized())
continue;
69 auto slots = cid2object_slots_[
cid] =
new ObjectSlotsType();
70 for (
const auto& entry : OffsetsTable::offsets_table()) {
71 if (entry.class_id ==
cid) {
72 slots->Add(ObjectSlot(entry.offset, entry.is_compressed_pointer,
79 if (slots->is_empty()) {
82 if (cls.num_native_fields() > 0) {
83 slots->Add(ObjectSlot(
kWordSize,
true,
"native_fields"));
87 const auto tav_offset = cls.host_type_arguments_field_offset();
89 slots->Add(ObjectSlot(tav_offset,
true,
"type_arguments"));
93 while (!cls.IsNull()) {
94 fields = cls.fields();
95 if (!fields.IsNull()) {
96 for (intptr_t
i = 0;
i < fields.Length(); ++
i) {
97 field ^= fields.At(
i);
98 if (!field.is_instance())
continue;
106 slots->Add(ObjectSlot(field.HostOffset(), !field.is_unboxed(),
110 cls = cls.SuperClass();
115 slots->Sort([](
const ObjectSlot*
a,
const ObjectSlot*
b) {
116 return a->offset -
b->offset;
122 bool contains_only_tagged_words =
true;
123 for (
auto& slot : *slots) {
124 if (!slot.is_compressed_pointer) {
125 contains_only_tagged_words =
false;
134 if (contains_only_tagged_words) {
136 for (
auto& slot : *slots) {
141 ASSERT(contains_only_tagged_words ||
142 (
cid != kArrayCid &&
cid != kImmutableArrayCid));
145 contains_only_tagged_words_[
cid] = contains_only_tagged_words;
149 const ObjectSlotsType* ObjectSlotsFor(intptr_t
cid)
const {
150 return cid2object_slots_[
cid];
154 bool ContainsOnlyTaggedPointers(intptr_t
cid) {
155 return contains_only_tagged_words_[
cid];
159 GrowableArray<ObjectSlotsType*> cid2object_slots_;
160 GrowableArray<bool> contains_only_tagged_words_;
171class ObjectGraph::Stack :
public ObjectPointerVisitor {
173 explicit Stack(IsolateGroup* isolate_group)
174 : ObjectPointerVisitor(isolate_group),
175 include_vm_objects_(
true),
176 data_(kInitialCapacity) {
177 object_ids_ =
new WeakTable();
181 object_ids_ =
nullptr;
184 bool trace_values_through_fields()
const override {
return true; }
189 void VisitPointers(ObjectPtr* first, ObjectPtr* last)
override {
190 for (ObjectPtr* current = first; current <= last; ++current) {
191 Visit(current, *current);
195#if defined(DART_COMPRESSED_POINTERS)
196 void VisitCompressedPointers(
uword heap_base,
200 Visit(current, current->Decompress(heap_base));
205 void Visit(
void* ptr, ObjectPtr obj) {
206 if (obj->IsHeapObject() && !obj->untag()->InVMIsolateHeap() &&
207 object_ids_->GetValueExclusive(obj) == 0) {
208 if (!include_vm_objects_ && !IsUserClass(obj->GetClassId())) {
211 object_ids_->SetValueExclusive(obj, 1);
215 node.gc_root_type = gc_root_type();
222 while (!data_.is_empty()) {
223 Node node = data_.Last();
230 ObjectPtr obj = node.obj;
231 ASSERT(obj->IsHeapObject());
235 StackIterator it(
this, data_.length() - 2);
236 visitor->gc_root_type = node.gc_root_type;
237 Visitor::Direction direction = visitor->VisitObject(&it);
238 if (direction == ObjectGraph::Visitor::kAbort) {
241 if (direction == ObjectGraph::Visitor::kProceed) {
242 set_gc_root_type(node.gc_root_type);
243 ASSERT(obj->IsHeapObject());
244 switch (obj->GetClassId()) {
246 VisitWeakArray(
static_cast<WeakArrayPtr
>(obj));
248 case kWeakReferenceCid:
249 VisitWeakReference(
static_cast<WeakReferencePtr
>(obj));
251 case kFinalizerEntryCid:
252 VisitFinalizerEntry(
static_cast<FinalizerEntryPtr
>(obj));
255 obj->untag()->VisitPointers(
this);
258 clear_gc_root_type();
263 void VisitWeakArray(WeakArrayPtr array) {}
265 void VisitWeakReference(WeakReferencePtr ref) {
266#if !defined(DART_COMPRESSED_POINTERS)
267 VisitPointers(&ref->untag()->type_arguments_,
268 &ref->untag()->type_arguments_);
270 VisitCompressedPointers(ref->heap_base(), &ref->untag()->type_arguments_,
271 &ref->untag()->type_arguments_);
275 void VisitFinalizerEntry(FinalizerEntryPtr entry) {
276#if !defined(DART_COMPRESSED_POINTERS)
277 VisitPointers(&entry->untag()->token_, &entry->untag()->token_);
278 VisitPointers(&entry->untag()->next_, &entry->untag()->next_);
280 VisitCompressedPointers(entry->heap_base(), &entry->untag()->token_,
281 &entry->untag()->token_);
282 VisitCompressedPointers(entry->heap_base(), &entry->untag()->next_,
283 &entry->untag()->next_);
287 bool visit_weak_persistent_handles()
const override {
288 return visit_weak_persistent_handles_;
291 void set_visit_weak_persistent_handles(
bool value) {
292 visit_weak_persistent_handles_ =
value;
295 bool include_vm_objects_;
301 const char* gc_root_type;
304 bool visit_weak_persistent_handles_ =
false;
306 static constexpr intptr_t kInitialCapacity = 1024;
307 static constexpr intptr_t kNoParent = -1;
309 intptr_t Parent(intptr_t index)
const {
311 for (intptr_t
i = index;
i >= 1; --
i) {
322 WeakTable* object_ids_ =
nullptr;
323 GrowableArray<Node> data_;
324 friend class StackIterator;
331 return stack_->data_[index_].obj;
334bool ObjectGraph::StackIterator::MoveToParent() {
335 intptr_t parent = stack_->Parent(index_);
336 if (parent == Stack::kNoParent) {
344intptr_t ObjectGraph::StackIterator::OffsetFromParent()
const {
345 intptr_t parent_index = stack_->Parent(index_);
346 if (parent_index == Stack::kNoParent) {
349 Stack::Node parent = stack_->data_[parent_index];
351 Stack::Node child = stack_->data_[index_];
352 uword child_ptr_addr =
reinterpret_cast<uword>(child.ptr);
353 intptr_t
offset = child_ptr_addr - parent_start;
354 if (
offset > 0 && offset < parent.obj->
untag()->HeapSize()) {
360 ASSERT(!parent.obj->IsDartInstance());
365static void IterateUserFields(ObjectPointerVisitor* visitor) {
366 visitor->set_gc_root_type(
"user global");
370 Zone* zone = thread->zone();
372 zone, thread->isolate_group()->object_store()->libraries());
378 for (intptr_t
i = 0;
i < libraries.Length();
i++) {
379 library ^= libraries.At(
i);
380 DictionaryIterator entries(library);
381 while (entries.HasNext()) {
382 entry = entries.GetNext();
383 if (entry.IsClass()) {
385 fields = cls.fields();
386 for (intptr_t j = 0; j < fields.Length(); j++) {
387 field ^= fields.At(j);
388 ObjectPtr ptr = field.ptr();
389 visitor->VisitPointer(&ptr);
391 }
else if (entry.IsField()) {
392 field ^= entry.ptr();
393 ObjectPtr ptr = field.ptr();
394 visitor->VisitPointer(&ptr);
398 visitor->clear_gc_root_type();
401ObjectGraph::ObjectGraph(Thread* thread) : ThreadStackResource(thread) {
404 ASSERT(thread->isolate() != Dart::vm_isolate());
407ObjectGraph::~ObjectGraph() {}
410 Stack stack(isolate_group());
411 stack.set_visit_weak_persistent_handles(
412 visitor->visit_weak_persistent_handles());
413 isolate_group()->VisitObjectPointers(&stack,
414 ValidationPolicy::kDontValidateFrames);
415 stack.TraverseGraph(visitor);
419 Stack stack(isolate_group());
420 stack.set_visit_weak_persistent_handles(
421 visitor->visit_weak_persistent_handles());
422 IterateUserFields(&stack);
423 stack.include_vm_objects_ =
false;
424 stack.TraverseGraph(visitor);
427void ObjectGraph::IterateObjectsFrom(
const Object&
root,
429 Stack stack(isolate_group());
430 stack.set_visit_weak_persistent_handles(
431 visitor->visit_weak_persistent_handles());
432 ObjectPtr root_raw =
root.ptr();
433 stack.VisitPointer(&root_raw);
434 stack.TraverseGraph(visitor);
437class InstanceAccumulator :
public ObjectVisitor {
439 InstanceAccumulator(ObjectGraph::Stack* stack, intptr_t class_id)
440 : stack_(stack), class_id_(class_id) {}
442 void VisitObject(ObjectPtr obj)
override {
443 if (obj->GetClassId() == class_id_) {
444 ObjectPtr rawobj = obj;
445 stack_->VisitPointer(&rawobj);
450 ObjectGraph::Stack* stack_;
451 const intptr_t class_id_;
456void ObjectGraph::IterateObjectsFrom(intptr_t class_id,
457 HeapIterationScope* iteration,
459 Stack stack(isolate_group());
461 InstanceAccumulator accumulator(&stack, class_id);
462 iteration->IterateObjectsNoImagePages(&accumulator);
464 stack.TraverseGraph(visitor);
469 SizeVisitor() : size_(0) {}
470 intptr_t
size()
const {
return size_; }
471 virtual bool ShouldSkip(ObjectPtr obj)
const {
return false; }
472 virtual Direction VisitObject(ObjectGraph::StackIterator* it) {
473 ObjectPtr obj = it->Get();
474 if (ShouldSkip(obj)) {
477 size_ += obj->untag()->HeapSize();
485class SizeExcludingObjectVisitor :
public SizeVisitor {
487 explicit SizeExcludingObjectVisitor(
const Object&
skip) : skip_(
skip) {}
488 virtual bool ShouldSkip(ObjectPtr obj)
const {
return obj == skip_.ptr(); }
494class SizeExcludingClassVisitor :
public SizeVisitor {
496 explicit SizeExcludingClassVisitor(intptr_t
skip) : skip_(
skip) {}
497 virtual bool ShouldSkip(ObjectPtr obj)
const {
498 return obj->GetClassId() == skip_;
502 const intptr_t skip_;
505intptr_t ObjectGraph::SizeRetainedByInstance(
const Object& obj) {
506 HeapIterationScope iteration_scope(Thread::Current(),
true);
508 IterateObjects(&total);
509 intptr_t size_total = total.size();
510 SizeExcludingObjectVisitor excluding_obj(obj);
511 IterateObjects(&excluding_obj);
512 intptr_t size_excluding_obj = excluding_obj.size();
513 return size_total - size_excluding_obj;
516intptr_t ObjectGraph::SizeReachableByInstance(
const Object& obj) {
517 HeapIterationScope iteration_scope(Thread::Current(),
true);
519 IterateObjectsFrom(obj, &total);
523intptr_t ObjectGraph::SizeRetainedByClass(intptr_t class_id) {
524 HeapIterationScope iteration_scope(Thread::Current(),
true);
526 IterateObjects(&total);
527 intptr_t size_total = total.size();
528 SizeExcludingClassVisitor excluding_class(class_id);
529 IterateObjects(&excluding_class);
530 intptr_t size_excluding_class = excluding_class.size();
531 return size_total - size_excluding_class;
534intptr_t ObjectGraph::SizeReachableByClass(intptr_t class_id) {
535 HeapIterationScope iteration_scope(Thread::Current(),
true);
537 IterateObjectsFrom(class_id, &iteration_scope, &total);
544 RetainingPathVisitor(ObjectPtr obj,
const Array&
path)
545 : thread_(Thread::Current()), obj_(obj), path_(
path), length_(0) {}
547 intptr_t
length()
const {
return length_; }
548 virtual bool visit_weak_persistent_handles()
const {
return true; }
550 bool ShouldSkip(ObjectPtr obj) {
553 intptr_t
cid = obj->GetClassId();
562 bool ShouldStop(ObjectPtr obj) {
564 if (obj->IsField()) {
565 const Field& field = Field::Handle(
static_cast<FieldPtr
>(obj));
566 return field.is_static();
571 void StartList() { was_last_array_ =
false; }
573 intptr_t HideNDescendant(ObjectPtr obj) {
576 if (was_last_array_ && obj->IsGrowableObjectArray()) {
577 was_last_array_ =
false;
582 if (was_last_array_ && obj->IsMap()) {
583 was_last_array_ =
false;
586 was_last_array_ = obj->IsArray();
590 virtual Direction VisitObject(ObjectGraph::StackIterator* it) {
591 if (it->Get() != obj_) {
592 if (ShouldSkip(it->Get())) {
599 Object& current = Object::Handle();
600 Smi& offset_from_parent = Smi::Handle();
604 length_ -= HideNDescendant(it->Get());
605 intptr_t obj_index = length_ * 2;
606 intptr_t offset_index = obj_index + 1;
607 if (!path_.IsNull() && offset_index < path_.Length()) {
609 path_.SetAt(obj_index, current);
610 offset_from_parent = Smi::New(it->OffsetFromParent());
611 path_.SetAt(offset_index, offset_from_parent);
614 }
while (!ShouldStop(it->Get()) && it->MoveToParent());
624 bool was_last_array_;
627ObjectGraph::RetainingPathResult ObjectGraph::RetainingPath(Object* obj,
629 HeapIterationScope iteration_scope(Thread::Current(),
true);
632 ObjectPtr raw = obj->ptr();
633 *obj = Object::null();
634 RetainingPathVisitor visitor(raw,
path);
635 IterateUserObjects(&visitor);
636 if (visitor.length() == 0) {
637 IterateObjects(&visitor);
640 return {visitor.length(), visitor.gc_root_type};
643class InboundReferencesVisitor :
public ObjectVisitor,
644 public ObjectPointerVisitor {
647 InboundReferencesVisitor(Isolate* isolate,
649 const Array& references,
651 : ObjectPointerVisitor(isolate->
group()),
654 references_(references),
657 ASSERT(Thread::Current()->no_safepoint_scope_depth() != 0);
660 bool trace_values_through_fields()
const override {
return true; }
662 intptr_t
length()
const {
return length_; }
664 void VisitObject(ObjectPtr raw_obj)
override {
666 raw_obj->untag()->VisitPointers(
this);
669 void VisitPointers(ObjectPtr* first, ObjectPtr* last)
override {
670 for (ObjectPtr* current_ptr = first; current_ptr <= last; current_ptr++) {
671 ObjectPtr current_obj = *current_ptr;
672 if (current_obj == target_) {
673 intptr_t obj_index = length_ * 2;
674 intptr_t offset_index = obj_index + 1;
675 if (!references_.IsNull() && offset_index < references_.Length()) {
677 references_.SetAt(obj_index, *scratch_);
679 *scratch_ = Smi::New(0);
680 uword source_start = UntaggedObject::ToAddr(source_);
681 uword current_ptr_addr =
reinterpret_cast<uword>(current_ptr);
682 intptr_t
offset = current_ptr_addr - source_start;
683 if (
offset > 0 && offset < source_->
untag()->HeapSize()) {
684 *scratch_ = Smi::New(
offset);
689 ASSERT(!source_->IsDartInstance());
690 *scratch_ = Smi::New(-1);
692 references_.SetAt(offset_index, *scratch_);
699#if defined(DART_COMPRESSED_POINTERS)
700 void VisitCompressedPointers(
uword heap_base,
705 ObjectPtr current_obj = current_ptr->Decompress(heap_base);
706 if (current_obj == target_) {
707 intptr_t obj_index = length_ * 2;
708 intptr_t offset_index = obj_index + 1;
709 if (!references_.IsNull() && offset_index < references_.Length()) {
711 references_.SetAt(obj_index, *scratch_);
713 *scratch_ = Smi::New(0);
714 uword source_start = UntaggedObject::ToAddr(source_);
715 uword current_ptr_addr =
reinterpret_cast<uword>(current_ptr);
716 intptr_t
offset = current_ptr_addr - source_start;
717 if (
offset > 0 && offset < source_->
untag()->HeapSize()) {
718 *scratch_ = Smi::New(
offset);
723 ASSERT(!source_->IsDartInstance());
724 *scratch_ = Smi::New(-1);
726 references_.SetAt(offset_index, *scratch_);
737 const Array& references_;
742intptr_t ObjectGraph::InboundReferences(Object* obj,
const Array& references) {
743 Object& scratch = Object::Handle();
744 HeapIterationScope iteration(Thread::Current());
745 NoSafepointScope no_safepoint;
746 InboundReferencesVisitor visitor(isolate(), obj->ptr(), references, &scratch);
747 iteration.IterateObjects(&visitor);
748 return visitor.length();
762 count_bitvector_ = 0;
769 uword preceding_bitmask = (
static_cast<uword>(1) << bitvector_shift) - 1;
771 Utils::CountOneBitsWord(count_bitvector_ & preceding_bitmask);
774 void Record(
uword old_addr, intptr_t
id) {
775 if (base_count_ == 0) {
776 ASSERT(count_bitvector_ == 0);
780 uword block_offset = old_addr & ~kBlockMask;
783 count_bitvector_ |=
static_cast<uword>(1) << bitvector_shift;
787 intptr_t base_count_;
788 uword count_bitvector_;
804 return BlockFor(
addr)->Record(
addr,
id);
808 intptr_t page_offset =
addr & ~kPageMask;
809 intptr_t block_number = page_offset /
kBlockSize;
810 ASSERT(block_number >= 0);
812 return &blocks_[block_number];
822void HeapSnapshotWriter::EnsureAvailable(intptr_t needed) {
823 intptr_t available = capacity_ - size_;
824 if (available >= needed) {
828 if (buffer_ !=
nullptr) {
831 ASSERT(buffer_ ==
nullptr);
834 const intptr_t reserved_prefix = writer_->ReserveChunkPrefixSize();
835 if (
chunk_size < (reserved_prefix + needed)) {
839 size_ = reserved_prefix;
844 if (size_ == 0 && !last) {
848 writer_->WriteChunk(buffer_, size_, last);
855void HeapSnapshotWriter::SetupImagePageBoundaries() {
856 MallocGrowableArray<ImagePageRange> ranges(4);
859 Dart::vm_isolate_group()->heap()->old_space()->image_pages_;
860 while (image_page !=
nullptr) {
861 ImagePageRange range = {image_page->object_start(),
862 image_page->object_end()};
864 image_page = image_page->next();
866 image_page = isolate_group()->heap()->old_space()->image_pages_;
867 while (image_page !=
nullptr) {
868 ImagePageRange range = {image_page->object_start(),
869 image_page->object_end()};
871 image_page = image_page->next();
874 ranges.Sort(CompareImagePageRanges);
875 intptr_t image_page_count;
876 ranges.StealBuffer(&image_page_ranges_, &image_page_count);
877 image_page_hi_ = image_page_count - 1;
880void HeapSnapshotWriter::SetupCountingPages() {
881 Page*
page = isolate_group()->heap()->old_space()->pages_;
882 while (
page !=
nullptr) {
883 CountingPage* counting_page =
884 reinterpret_cast<CountingPage*
>(
page->forwarding_page());
885 ASSERT(counting_page !=
nullptr);
886 counting_page->Clear();
891bool HeapSnapshotWriter::OnImagePage(ObjectPtr obj)
const {
892 const uword addr = UntaggedObject::ToAddr(obj);
894 intptr_t hi = image_page_hi_;
896 intptr_t mid = (hi - lo + 1) / 2 + lo;
899 if (
addr < image_page_ranges_[mid].
start) {
901 }
else if (
addr >= image_page_ranges_[mid].
end) {
910CountingPage* HeapSnapshotWriter::FindCountingPage(ObjectPtr obj)
const {
911 if (obj->IsOldObject() && !OnImagePage(obj)) {
913 Page*
page = Page::Of(obj);
914 return reinterpret_cast<CountingPage*
>(
page->forwarding_page());
921void HeapSnapshotWriter::AssignObjectId(ObjectPtr obj) {
922 if (!obj->IsHeapObject()) {
923 thread()->heap()->SetObjectId(obj, ++object_count_);
927 CountingPage* counting_page = FindCountingPage(obj);
928 if (counting_page !=
nullptr) {
930 counting_page->Record(UntaggedObject::ToAddr(obj), ++object_count_);
933 thread()->heap()->SetObjectId(obj, ++object_count_);
937intptr_t HeapSnapshotWriter::GetObjectId(ObjectPtr obj)
const {
938 if (!obj->IsHeapObject()) {
939 intptr_t
id = thread()->heap()->GetObjectId(obj);
944 CountingPage* counting_page = FindCountingPage(obj);
946 if (counting_page !=
nullptr) {
948 id = counting_page->Lookup(UntaggedObject::ToAddr(obj));
951 id = thread()->heap()->GetObjectId(obj);
957void HeapSnapshotWriter::ClearObjectIds() {
958 thread()->heap()->ResetObjectIdTable();
961void HeapSnapshotWriter::CountReferences(intptr_t
count) {
962 reference_count_ +=
count;
965void HeapSnapshotWriter::CountExternalProperty() {
966 external_property_count_ += 1;
969void HeapSnapshotWriter::AddSmi(SmiPtr smi) {
970 if (thread()->heap()->GetObjectId(smi) == WeakTable::kNoValue) {
971 thread()->heap()->SetObjectId(smi, -1);
976class Pass1Visitor :
public ObjectVisitor,
977 public ObjectPointerVisitor,
978 public HandleVisitor {
980 explicit Pass1Visitor(HeapSnapshotWriter* writer, ObjectSlots* object_slots)
982 ObjectPointerVisitor(IsolateGroup::Current()),
983 HandleVisitor(Thread::Current()),
985 object_slots_(object_slots) {}
987 void VisitObject(ObjectPtr obj)
override {
988 if (obj->IsPseudoObject())
return;
990 writer_->AssignObjectId(obj);
991 const auto cid = obj->GetClassId();
993 if (object_slots_->ContainsOnlyTaggedPointers(
cid)) {
994 obj->untag()->VisitPointersPrecise(
this);
996 for (
auto& slot : *object_slots_->ObjectSlotsFor(
cid)) {
997 if (slot.is_compressed_pointer) {
999 UntaggedObject::ToAddr(obj->untag()) + slot.offset);
1000 VisitCompressedPointers(obj->heap_base(),
target,
target);
1002 writer_->CountReferences(1);
1008 void VisitPointers(ObjectPtr* from, ObjectPtr* to)
override {
1009 for (ObjectPtr* ptr = from; ptr <= to; ptr++) {
1010 ObjectPtr obj = *ptr;
1011 if (!obj->IsHeapObject()) {
1012 writer_->AddSmi(
static_cast<SmiPtr
>(obj));
1014 writer_->CountReferences(1);
1018#if defined(DART_COMPRESSED_POINTERS)
1019 void VisitCompressedPointers(
uword heap_base,
1023 ObjectPtr obj = ptr->Decompress(heap_base);
1024 if (!obj->IsHeapObject()) {
1025 writer_->AddSmi(
static_cast<SmiPtr
>(obj));
1027 writer_->CountReferences(1);
1033 FinalizablePersistentHandle* weak_persistent_handle =
1034 reinterpret_cast<FinalizablePersistentHandle*
>(
addr);
1035 if (!weak_persistent_handle->ptr()->IsHeapObject()) {
1039 writer_->CountExternalProperty();
1043 HeapSnapshotWriter*
const writer_;
1044 ObjectSlots* object_slots_;
1049class CountImagePageRefs :
public ObjectVisitor {
1051 CountImagePageRefs() : ObjectVisitor() {}
1053 void VisitObject(ObjectPtr obj)
override {
1054 if (obj->IsPseudoObject())
return;
1057 intptr_t
count()
const {
return count_; }
1060 intptr_t count_ = 0;
1065class WriteImagePageRefs :
public ObjectVisitor {
1067 explicit WriteImagePageRefs(HeapSnapshotWriter* writer)
1068 : ObjectVisitor(), writer_(writer) {}
1070 void VisitObject(ObjectPtr obj)
override {
1071 if (obj->IsPseudoObject())
return;
1075 writer_->WriteUnsigned(writer_->GetObjectId(obj));
1078 intptr_t
count()
const {
return count_; }
1082 HeapSnapshotWriter*
const writer_;
1084 intptr_t count_ = 0;
1090enum NonReferenceDataTags {
1102static constexpr intptr_t kMaxStringElements = 128;
1106 kImagePageExtraCid = 2,
1107 kIsolateExtraCid = 3,
1112class Pass2Visitor :
public ObjectVisitor,
1113 public ObjectPointerVisitor,
1114 public HandleVisitor {
1116 explicit Pass2Visitor(HeapSnapshotWriter* writer, ObjectSlots* object_slots)
1118 ObjectPointerVisitor(IsolateGroup::Current()),
1119 HandleVisitor(Thread::Current()),
1121 object_slots_(object_slots) {}
1123 void VisitObject(ObjectPtr obj)
override {
1124 if (obj->IsPseudoObject())
return;
1126 intptr_t
cid = obj->GetClassId();
1127 writer_->WriteUnsigned(
cid + kNumExtraCids);
1128 writer_->WriteUnsigned(discount_sizes_ ? 0 : obj->untag()->HeapSize());
1131 writer_->WriteUnsigned(kNullData);
1132 }
else if (
cid == kBoolCid) {
1133 writer_->WriteUnsigned(kBoolData);
1134 writer_->WriteUnsigned(
1135 static_cast<uintptr_t
>(
static_cast<BoolPtr
>(obj)->
untag()->value_));
1136 }
else if (
cid == kSentinelCid) {
1137 if (obj == Object::sentinel().ptr()) {
1138 writer_->WriteUnsigned(kNameData);
1139 writer_->WriteUtf8(
"uninitialized");
1140 }
else if (obj == Object::transition_sentinel().ptr()) {
1141 writer_->WriteUnsigned(kNameData);
1142 writer_->WriteUtf8(
"initializing");
1144 writer_->WriteUnsigned(kNoData);
1146 }
else if (
cid == kSmiCid) {
1148 }
else if (
cid == kMintCid) {
1149 writer_->WriteUnsigned(kIntData);
1150 writer_->WriteSigned(
static_cast<MintPtr
>(obj)->
untag()->value_);
1151 }
else if (
cid == kDoubleCid) {
1152 writer_->WriteUnsigned(kDoubleData);
1153 writer_->WriteBytes(&(
static_cast<DoublePtr
>(obj)->
untag()->value_),
1155 }
else if (
cid == kOneByteStringCid) {
1156 OneByteStringPtr str =
static_cast<OneByteStringPtr
>(obj);
1157 intptr_t
len = Smi::Value(str->untag()->length());
1158 intptr_t trunc_len = Utils::Minimum(
len, kMaxStringElements);
1159 writer_->WriteUnsigned(kLatin1Data);
1160 writer_->WriteUnsigned(
len);
1161 writer_->WriteUnsigned(trunc_len);
1162 writer_->WriteBytes(&str->untag()->data()[0], trunc_len);
1163 }
else if (
cid == kTwoByteStringCid) {
1164 TwoByteStringPtr str =
static_cast<TwoByteStringPtr
>(obj);
1165 intptr_t
len = Smi::Value(str->untag()->length());
1166 intptr_t trunc_len = Utils::Minimum(
len, kMaxStringElements);
1167 writer_->WriteUnsigned(kUTF16Data);
1168 writer_->WriteUnsigned(
len);
1169 writer_->WriteUnsigned(trunc_len);
1170 writer_->WriteBytes(&str->untag()->data()[0], trunc_len * 2);
1171 }
else if (
cid == kArrayCid ||
cid == kImmutableArrayCid) {
1172 writer_->WriteUnsigned(kLengthData);
1173 writer_->WriteUnsigned(
1174 Smi::Value(
static_cast<ArrayPtr
>(obj)->
untag()->
length()));
1175 }
else if (
cid == kGrowableObjectArrayCid) {
1176 writer_->WriteUnsigned(kLengthData);
1177 writer_->WriteUnsigned(Smi::Value(
1178 static_cast<GrowableObjectArrayPtr
>(obj)->
untag()->
length()));
1179 }
else if (
cid == kMapCid ||
cid == kConstMapCid) {
1180 writer_->WriteUnsigned(kLengthData);
1181 writer_->WriteUnsigned(
1182 Smi::Value(
static_cast<MapPtr
>(obj)->
untag()->used_data()));
1183 }
else if (
cid == kSetCid ||
cid == kConstSetCid) {
1184 writer_->WriteUnsigned(kLengthData);
1185 writer_->WriteUnsigned(
1186 Smi::Value(
static_cast<SetPtr
>(obj)->
untag()->used_data()));
1187 }
else if (
cid == kObjectPoolCid) {
1188 writer_->WriteUnsigned(kLengthData);
1189 writer_->WriteUnsigned(
static_cast<ObjectPoolPtr
>(obj)->
untag()->length_);
1191 writer_->WriteUnsigned(kLengthData);
1192 writer_->WriteUnsigned(
1193 Smi::Value(
static_cast<TypedDataPtr
>(obj)->
untag()->
length()));
1195 writer_->WriteUnsigned(kLengthData);
1196 writer_->WriteUnsigned(Smi::Value(
1197 static_cast<ExternalTypedDataPtr
>(obj)->
untag()->
length()));
1198 }
else if (
cid == kFunctionCid) {
1199 writer_->WriteUnsigned(kNameData);
1200 ScrubAndWriteUtf8(
static_cast<FunctionPtr
>(obj)->
untag()->
name());
1201 }
else if (
cid == kCodeCid) {
1202 ObjectPtr owner =
static_cast<CodePtr
>(obj)->
untag()->owner_;
1203 if (!owner->IsHeapObject()) {
1206 writer_->WriteUnsigned(kNoData);
1207 }
else if (owner->IsFunction()) {
1208 writer_->WriteUnsigned(kNameData);
1209 ScrubAndWriteUtf8(
static_cast<FunctionPtr
>(owner)->
untag()->
name());
1210 }
else if (owner->IsClass()) {
1211 writer_->WriteUnsigned(kNameData);
1212 ScrubAndWriteUtf8(
static_cast<ClassPtr
>(owner)->
untag()->
name());
1214 writer_->WriteUnsigned(kNoData);
1216 }
else if (
cid == kFieldCid) {
1217 writer_->WriteUnsigned(kNameData);
1218 ScrubAndWriteUtf8(
static_cast<FieldPtr
>(obj)->
untag()->
name());
1219 }
else if (
cid == kClassCid) {
1220 writer_->WriteUnsigned(kNameData);
1221 ScrubAndWriteUtf8(
static_cast<ClassPtr
>(obj)->
untag()->
name());
1222 }
else if (
cid == kLibraryCid) {
1223 writer_->WriteUnsigned(kNameData);
1224 ScrubAndWriteUtf8(
static_cast<LibraryPtr
>(obj)->
untag()->url());
1225 }
else if (
cid == kScriptCid) {
1226 writer_->WriteUnsigned(kNameData);
1227 ScrubAndWriteUtf8(
static_cast<ScriptPtr
>(obj)->
untag()->url());
1228 }
else if (
cid == kTypeArgumentsCid) {
1233 const TypeArguments&
args =
1234 TypeArguments::Handle(
static_cast<TypeArgumentsPtr
>(obj));
1236 args.PrintSubvectorName(0,
args.Length(), TypeArguments::kScrubbedName,
1238 writer_->WriteUnsigned(kNameData);
1239 writer_->WriteUtf8(
buffer.buffer());
1241 writer_->WriteUnsigned(kNoData);
1244 if (object_slots_->ContainsOnlyTaggedPointers(
cid)) {
1246 obj->untag()->VisitPointersPrecise(
this);
1248 obj->untag()->VisitPointersPrecise(
this);
1250 auto slots = object_slots_->ObjectSlotsFor(
cid);
1252 counted_ += slots->length();
1254 for (
auto& slot : *slots) {
1255 if (slot.is_compressed_pointer) {
1257 UntaggedObject::ToAddr(obj->untag()) + slot.offset);
1258 VisitCompressedPointers(obj->heap_base(),
target,
target);
1260 writer_->WriteUnsigned(0);
1268 void ScrubAndWriteUtf8(StringPtr str) {
1269 if (str == String::null()) {
1270 writer_->WriteUtf8(
"null");
1274 char*
value = handle.ToMallocCString();
1275 writer_->ScrubAndWriteUtf8(
value);
1280 void set_discount_sizes(
bool value) { discount_sizes_ =
value; }
1289 writer_->WriteUnsigned(counted_);
1292 void VisitPointers(ObjectPtr* from, ObjectPtr* to)
override {
1294 for (ObjectPtr* ptr = from; ptr <= to; ptr++) {
1298 writer_->WriteUnsigned(writer_->GetObjectId(
target));
1301 intptr_t
count = to - from + 1;
1307#if defined(DART_COMPRESSED_POINTERS)
1308 void VisitCompressedPointers(
uword heap_base,
1313 ObjectPtr
target = ptr->Decompress(heap_base);
1316 writer_->WriteUnsigned(writer_->GetObjectId(
target));
1319 intptr_t
count = to - from + 1;
1327 FinalizablePersistentHandle* weak_persistent_handle =
1328 reinterpret_cast<FinalizablePersistentHandle*
>(
addr);
1329 if (!weak_persistent_handle->ptr()->IsHeapObject()) {
1333 writer_->WriteUnsigned(writer_->GetObjectId(weak_persistent_handle->ptr()));
1334 writer_->WriteUnsigned(weak_persistent_handle->external_size());
1336 auto const name = NativeSymbolResolver::LookupSymbolName(
1337 reinterpret_cast<uword>(weak_persistent_handle->callback()),
nullptr);
1338 writer_->WriteUtf8((
name ==
nullptr) ?
"Unknown native function" :
name);
1339 if (
name !=
nullptr) {
1340 NativeSymbolResolver::FreeSymbolName(
name);
1344 void CountExtraRefs(intptr_t
count) {
1348 void WriteExtraRef(intptr_t oid) {
1351 writer_->WriteUnsigned(oid);
1355 IsolateGroup* isolate_group_;
1356 HeapSnapshotWriter*
const writer_;
1357 ObjectSlots* object_slots_;
1358 bool writing_ =
false;
1359 intptr_t counted_ = 0;
1360 intptr_t written_ = 0;
1361 intptr_t total_ = 0;
1362 bool discount_sizes_ =
false;
1367class Pass3Visitor :
public ObjectVisitor {
1369 explicit Pass3Visitor(HeapSnapshotWriter* writer)
1370 : ObjectVisitor(), thread_(Thread::Current()), writer_(writer) {}
1372 void VisitObject(ObjectPtr obj)
override {
1373 if (obj->IsPseudoObject()) {
1376 writer_->WriteUnsigned(
1377 HeapSnapshotWriter::GetHeapSnapshotIdentityHash(thread_, obj));
1382 HeapSnapshotWriter*
const writer_;
1387class CollectStaticFieldNames :
public ObjectVisitor {
1389 CollectStaticFieldNames(intptr_t field_table_size,
1390 const char** field_table_names)
1392 field_table_size_(field_table_size),
1393 field_table_names_(field_table_names),
1394 field_(Field::Handle()) {}
1396 void VisitObject(ObjectPtr obj)
override {
1397 if (obj->IsField()) {
1399 if (field_.is_static()) {
1400 intptr_t
id = field_.field_id();
1402 ASSERT(
id < field_table_size_);
1403 field_table_names_[
id] = field_.UserVisibleNameCString();
1410 intptr_t field_table_size_;
1411 const char** field_table_names_;
1417void VmServiceHeapSnapshotChunkedWriter::WriteChunk(uint8_t*
buffer,
1422 JSONObject jsobj(&
js);
1423 jsobj.AddProperty(
"jsonrpc",
"2.0");
1424 jsobj.AddProperty(
"method",
"streamNotify");
1426 JSONObject
params(&jsobj,
"params");
1427 params.AddProperty(
"streamId", Service::heapsnapshot_stream.
id());
1430 event.AddProperty(
"type",
"Event");
1431 event.AddProperty(
"kind",
"HeapSnapshot");
1432 event.AddProperty(
"isolate", thread()->isolate());
1433 event.AddPropertyTimeMillis(
"timestamp", OS::GetCurrentTimeMillis());
1434 event.AddProperty(
"last", last);
1439 Service::SendEventWithData(Service::heapsnapshot_stream.
id(),
"HeapSnapshot",
1440 kMetadataReservation,
js.buffer()->buffer(),
1444FileHeapSnapshotWriter::FileHeapSnapshotWriter(Thread* thread,
1445 const char* filename,
1447 : ChunkedWriter(thread), success_(success) {
1448 auto open = Dart::file_open_callback();
1449 auto write = Dart::file_write_callback();
1450 auto close = Dart::file_close_callback();
1451 if (open !=
nullptr &&
write !=
nullptr && close !=
nullptr) {
1452 file_ = open(filename,
true);
1457 if (success_ !=
nullptr) *success_ = file_ !=
nullptr;
1460FileHeapSnapshotWriter::~FileHeapSnapshotWriter() {
1461 if (file_ !=
nullptr) {
1462 Dart::file_close_callback()(file_);
1466void FileHeapSnapshotWriter::WriteChunk(uint8_t*
buffer,
1469 if (file_ !=
nullptr) {
1470 Dart::file_write_callback()(
buffer,
size, file_);
1475CallbackHeapSnapshotWriter::CallbackHeapSnapshotWriter(
1479 : ChunkedWriter(thread), callback_(
callback), context_(context) {}
1481CallbackHeapSnapshotWriter::~CallbackHeapSnapshotWriter() {}
1483void CallbackHeapSnapshotWriter::WriteChunk(uint8_t*
buffer,
1489void HeapSnapshotWriter::Write() {
1490 HeapIterationScope iteration(thread());
1492 WriteBytes(
"dartheap", 8);
1494 WriteUtf8(isolate()->
name());
1495 Heap*
H = thread()->heap();
1502 WriteUnsigned(used +
image);
1503 WriteUnsigned(capacity +
image);
1504 WriteUnsigned(external);
1507 ObjectSlots object_slots(thread());
1511 ClassTable* class_table = isolate_group()->class_table();
1512 class_count_ = class_table->NumCids() - 1;
1514 Class& cls = Class::Handle();
1515 Library& lib = Library::Handle();
1516 String& str = String::Handle();
1518 intptr_t field_table_size = isolate()->field_table()->NumFieldIds();
1519 const char** field_table_names =
1520 thread()->zone()->Alloc<
const char*>(field_table_size);
1521 for (intptr_t
i = 0;
i < field_table_size;
i++) {
1522 field_table_names[
i] =
nullptr;
1525 CollectStaticFieldNames visitor(field_table_size, field_table_names);
1526 iteration.IterateObjects(&visitor);
1529 WriteUnsigned(class_count_ + kNumExtraCids);
1531 ASSERT(kRootExtraCid == 1);
1540 ASSERT(kImagePageExtraCid == 2);
1542 WriteUtf8(
"Read-Only Pages");
1549 ASSERT(kIsolateExtraCid == 3);
1551 WriteUtf8(
"Isolate");
1556 WriteUnsigned(field_table_size);
1557 for (intptr_t
i = 0;
i < field_table_size;
i++) {
1559 WriteUnsigned(
flags);
1561 const char*
name = field_table_names[
i];
1562 WriteUtf8(
name ==
nullptr ?
"" :
name);
1567 ASSERT(kNumExtraCids == 3);
1568 for (intptr_t
cid = 1;
cid <= class_count_;
cid++) {
1569 if (!class_table->HasValidClassAt(
cid)) {
1577 cls = class_table->At(
cid);
1580 ScrubAndWriteUtf8(
const_cast<char*
>(str.ToCString()));
1581 lib = cls.library();
1587 ScrubAndWriteUtf8(
const_cast<char*
>(str.ToCString()));
1589 ScrubAndWriteUtf8(
const_cast<char*
>(str.ToCString()));
1593 if (
auto slots = object_slots.ObjectSlotsFor(
cid)) {
1594 WriteUnsigned(slots->length());
1595 for (intptr_t index = 0; index < slots->length(); ++index) {
1596 const auto& slot = (*slots)[index];
1597 const intptr_t kStrongFlag = 1;
1598 WriteUnsigned(kStrongFlag);
1599 WriteUnsigned(index);
1600 ScrubAndWriteUtf8(
const_cast<char*
>(slot.name));
1605 ASSERT(!cls.is_finalized());
1612 SetupImagePageBoundaries();
1613 SetupCountingPages();
1615 intptr_t num_isolates = 0;
1616 intptr_t num_image_objects = 0;
1618 Pass1Visitor visitor(
this, &object_slots);
1623 isolate_group()->VisitSharedPointers(&visitor);
1627 CountImagePageRefs visitor;
1628 H->old_space()->VisitObjectsImagePages(&visitor);
1629 num_image_objects = visitor.count();
1630 CountReferences(num_image_objects);
1633 isolate_group()->ForEachIsolate(
1634 [&](Isolate* isolate) {
1636 isolate->VisitObjectPointers(&visitor,
1637 ValidationPolicy::kDontValidateFrames);
1638 isolate->VisitStackPointers(&visitor,
1639 ValidationPolicy::kDontValidateFrames);
1645 CountReferences(num_isolates);
1648 iteration.IterateVMIsolateObjects(&visitor);
1649 iteration.IterateObjects(&visitor);
1652 isolate()->group()->VisitWeakPersistentHandles(&visitor);
1655 for (SmiPtr smi : smis_) {
1656 AssignObjectId(smi);
1661 Pass2Visitor visitor(
this, &object_slots);
1663 WriteUnsigned(reference_count_);
1664 WriteUnsigned(object_count_);
1668 WriteUnsigned(kRootExtraCid);
1670 WriteUnsigned(kNoData);
1672 isolate_group()->VisitSharedPointers(&visitor);
1673 visitor.CountExtraRefs(num_isolates + 1);
1675 isolate_group()->VisitSharedPointers(&visitor);
1676 visitor.WriteExtraRef(2);
1677 for (intptr_t
i = 0;
i < num_isolates;
i++) {
1679 visitor.WriteExtraRef(
i + 3);
1683 WriteUnsigned(kImagePageExtraCid);
1685 WriteUnsigned(kNoData);
1686 WriteUnsigned(num_image_objects);
1687 WriteImagePageRefs visitor(
this);
1688 H->old_space()->VisitObjectsImagePages(&visitor);
1691 isolate_group()->ForEachIsolate(
1692 [&](Isolate* isolate) {
1693 WriteUnsigned(kIsolateExtraCid);
1695 WriteUnsigned(kNameData);
1697 OS::SCreate(thread()->zone(),
"%" Pd64, isolate->main_port()));
1699 isolate->VisitObjectPointers(&visitor,
1700 ValidationPolicy::kDontValidateFrames);
1701 isolate->VisitStackPointers(&visitor,
1702 ValidationPolicy::kDontValidateFrames);
1704 isolate->VisitObjectPointers(&visitor,
1705 ValidationPolicy::kDontValidateFrames);
1706 isolate->VisitStackPointers(&visitor,
1707 ValidationPolicy::kDontValidateFrames);
1712 visitor.set_discount_sizes(
true);
1713 iteration.IterateVMIsolateObjects(&visitor);
1714 visitor.set_discount_sizes(
false);
1715 iteration.IterateObjects(&visitor);
1718 for (SmiPtr smi : smis_) {
1719 WriteUnsigned(kSmiCid + kNumExtraCids);
1721 WriteUnsigned(kIntData);
1722 WriteUnsigned(Smi::Value(smi));
1727 WriteUnsigned(external_property_count_);
1728 isolate()->group()->VisitWeakPersistentHandles(&visitor);
1733 Pass3Visitor visitor(
this);
1737 isolate_group()->ForEachIsolate(
1738 [&](Isolate* isolate) {
1744 iteration.IterateVMIsolateObjects(&visitor);
1745 iteration.IterateObjects(&visitor);
1746 for (SmiPtr smi : smis_) {
1756uint32_t HeapSnapshotWriter::GetHeapSnapshotIdentityHash(Thread* thread,
1758 if (!obj->IsHeapObject())
return 0;
1759 intptr_t
cid = obj->GetClassId();
1768 case kCodeSourceMapCid:
1769 case kCompressedStackMapsCid:
1771 case kGrowableObjectArrayCid:
1772 case kImmutableArrayCid:
1775 case kInstructionsCid:
1776 case kInstructionsSectionCid:
1777 case kInstructionsTableCid:
1784 case kObjectPoolCid:
1785 case kOneByteStringCid:
1786 case kPcDescriptorsCid:
1787 case kTwoByteStringCid:
1794 hash = GetHashHelper(thread, obj);
1802static uint32_t GenerateHash(Random* random) {
1805 hash = random->NextUInt32();
1810uint32_t HeapSnapshotWriter::GetHashHelper(Thread* thread, ObjectPtr obj) {
1812#if defined(HASH_IN_OBJECT_HEADER)
1813 hash = Object::GetCachedHash(obj);
1815 ASSERT(!thread->heap()->old_space()->IsObjectFromImagePages(obj));
1816 hash = GenerateHash(thread->random());
1817 Object::SetCachedHashIfNotSet(obj,
hash);
1820 Heap* heap = thread->heap();
1821 hash = heap->GetHash(obj);
1823 ASSERT(!heap->old_space()->IsObjectFromImagePages(obj));
1824 hash = GenerateHash(thread->random());
1825 heap->SetHashIfNotSet(obj,
hash);
1831CountObjectsVisitor::CountObjectsVisitor(Thread* thread, intptr_t class_count)
1833 HandleVisitor(thread),
1834 new_count_(new intptr_t[class_count]),
1835 new_size_(new intptr_t[class_count]),
1836 new_external_size_(new intptr_t[class_count]),
1837 old_count_(new intptr_t[class_count]),
1838 old_size_(new intptr_t[class_count]),
1839 old_external_size_(new intptr_t[class_count]) {
1840 memset(new_count_.get(), 0, class_count *
sizeof(intptr_t));
1841 memset(new_size_.get(), 0, class_count *
sizeof(intptr_t));
1842 memset(new_external_size_.get(), 0, class_count *
sizeof(intptr_t));
1843 memset(old_count_.get(), 0, class_count *
sizeof(intptr_t));
1844 memset(old_size_.get(), 0, class_count *
sizeof(intptr_t));
1845 memset(old_external_size_.get(), 0, class_count *
sizeof(intptr_t));
1848void CountObjectsVisitor::VisitObject(ObjectPtr obj) {
1849 intptr_t
cid = obj->GetClassId();
1850 intptr_t
size = obj->untag()->HeapSize();
1851 if (obj->IsNewObject()) {
1852 new_count_[
cid] += 1;
1855 old_count_[
cid] += 1;
1860void CountObjectsVisitor::VisitHandle(
uword addr) {
1861 FinalizablePersistentHandle* handle =
1862 reinterpret_cast<FinalizablePersistentHandle*
>(
addr);
1863 ObjectPtr obj = handle->ptr();
1864 if (!obj->IsHeapObject()) {
1867 intptr_t
cid = obj->GetClassId();
1868 intptr_t
size = handle->external_size();
1869 if (obj->IsNewObject()) {
1870 new_external_size_[
cid] +=
size;
1872 old_external_size_[
cid] +=
size;
static bool skip(SkStream *stream, size_t amount)
static uint32_t hash(const SkShaderBase::GradientInfo &v)
#define DEBUG_ASSERT(cond)
#define RELEASE_ASSERT(cond)
#define COMPILE_ASSERT(expr)
static constexpr intptr_t kNoTypeArguments
static Thread * Current()
static uword ToAddr(const UntaggedObject *raw_obj)
const EmbeddedViewParams * params
FlutterSemanticsFlag flags
G_BEGIN_DECLS G_MODULE_EXPORT FlValue * args
FlKeyEvent uint64_t FlKeyResponderAsyncCallback callback
#define HANDLESCOPE(thread)
const GrXPFactory * Get(SkBlendMode mode)
sk_sp< const SkImage > image
Visitor(Ts...) -> Visitor< Ts... >
InvalidClass kObjectAlignmentLog2
static intptr_t chunk_size(intptr_t bytes_left)
bool IsTypedDataClassId(intptr_t index)
constexpr intptr_t kBitsPerWord
void * malloc(size_t size)
static constexpr intptr_t kBlockSize
constexpr intptr_t kWordSizeLog2
static constexpr intptr_t kBitVectorWordsPerBlock
static constexpr intptr_t kCompressedWordSize
raw_obj untag() -> num_entries()) VARIABLE_COMPRESSED_VISITOR(Array, Smi::Value(raw_obj->untag() ->length())) VARIABLE_COMPRESSED_VISITOR(TypedData, TypedData::ElementSizeInBytes(raw_obj->GetClassId()) *Smi::Value(raw_obj->untag() ->length())) VARIABLE_COMPRESSED_VISITOR(Record, RecordShape(raw_obj->untag() ->shape()).num_fields()) VARIABLE_NULL_VISITOR(CompressedStackMaps, CompressedStackMaps::PayloadSizeOf(raw_obj)) VARIABLE_NULL_VISITOR(OneByteString, Smi::Value(raw_obj->untag() ->length())) VARIABLE_NULL_VISITOR(TwoByteString, Smi::Value(raw_obj->untag() ->length())) intptr_t UntaggedField::VisitFieldPointers(FieldPtr raw_obj, ObjectPointerVisitor *visitor)
constexpr intptr_t kWordSize
bool IsInternalOnlyClassId(intptr_t index)
static constexpr intptr_t kBlocksPerPage
bool IsExternalTypedDataClassId(intptr_t index)
ObjectPtr CompressedObjectPtr
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir path
DEF_SWITCHES_START aot vmservice shared library name
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not defaults to or::depending on whether ipv6 is specified vm service A custom Dart VM Service port The default is to pick a randomly available open port disable vm Disable the Dart VM Service The Dart VM Service is never available in release mode disable vm service Disable mDNS Dart VM Service publication Bind to the IPv6 localhost address for the Dart VM Service Ignored if vm service host is set endless trace buffer
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive keep the shell running after the Dart script has completed enable serial On low power devices with low core running concurrent GC tasks on threads can cause them to contend with the UI thread which could potentially lead to jank This option turns off all concurrent GC activities domain network JSON encoded network policy per domain This overrides the DisallowInsecureConnections switch Embedder can specify whether to allow or disallow insecure connections at a domain level old gen heap size
void Flush(SkSurface *surface)
void write(SkWStream *wStream, const T &text)