40 early_tenuring_threshold,
42 "When more than this percentage of promotion candidates survive, "
43 "promote all survivors of next scavenge.");
45 new_gen_garbage_threshold,
47 "Grow new gen when less than this percentage is garbage.");
48DEFINE_FLAG(
int, new_gen_growth_factor, 2,
"Grow new gen by this factor.");
106 uword* __restrict dst_cursor =
reinterpret_cast<uword*
>(
dst);
107 const uword* __restrict src_cursor =
reinterpret_cast<const uword*
>(
src);
120 ->
load(std::memory_order_relaxed);
129template <
bool parallel>
140 scavenger_(scavenger),
142 page_space_(scavenger->heap_->old_space()),
145 visiting_old_object_(nullptr),
147 promoted_list_(promotion_stack) {}
151 constexpr static const char*
const kName =
"Scavenger";
169 const bool is_external =
170 view->untag()->data_ != view->untag()->DataFieldForInternalTypedData();
175 if (view->untag()->data_ ==
nullptr) {
193 TypedDataBasePtr td = view->untag()->typed_data();
194 ASSERT(td->IsHeapObject());
220 view->untag()->RecomputeDataFieldForInternalTypedData();
224#if !defined(TARGET_ARCH_IA32)
229 for (
ObjectPtr* current = first; current <= last; current++) {
230 ScavengePointer(current);
235 bool has_new_target =
false;
236 for (
ObjectPtr* current = first; current <= last; current++) {
237 has_new_target |= PredicateScavengePointer(current);
239 return has_new_target;
242#if defined(DART_COMPRESSED_POINTERS)
246 bool has_new_target =
false;
248 has_new_target |= PredicateScavengeCompressedPointer(heap_base, current);
250 return has_new_target;
258 ObjectPtr visiting_object = visiting_old_object_;
259 if (visiting_object !=
nullptr &&
260 visiting_object->untag()->TryAcquireRememberedBit()) {
268 ASSERT((obj ==
nullptr) || obj->IsOldObject());
269 visiting_old_object_ = obj;
270 if (obj !=
nullptr) {
284 if (setjmp(*jump.
Set()) == 0) {
285 scavenger_->IterateRoots(
this);
287 ASSERT(scavenger_->abort_);
293 if (setjmp(*jump.
Set()) == 0) {
297 ProcessPromotedList();
300 ASSERT(scavenger_->abort_);
307 if (setjmp(*jump.
Set()) == 0) {
311 ProcessPromotedList();
313 ProcessWeakPropertiesScoped();
316 ASSERT(scavenger_->abort_);
322 if (setjmp(*jump.
Set()) == 0) {
323 ProcessWeakPropertiesScoped();
325 ASSERT(scavenger_->abort_);
330 if (scavenger_->abort_)
return false;
331 return (scan_ != tail_) || (scan_ !=
nullptr && !scan_->
IsResolved()) ||
336 return promoted_list_.
WaitForWork(num_busy, scavenger_->abort_);
340 if (!scavenger_->abort_) {
345 page->RecordSurvivors();
348 MournWeakProperties();
349 MournWeakReferences();
351 MournFinalizerEntries();
352 scavenger_->IterateWeak();
359 if (!scavenger_->abort_) {
365 ASSERT(pending_ ==
nullptr);
372 if (pending_ !=
nullptr) {
393 if (obj->IsImmediateObject()) {
396 if (obj->IsOldObject()) {
400 ObjectPtr new_obj = ScavengeObject(obj);
404 return new_obj->IsNewObject();
408 void ScavengePointer(ObjectPtr*
p) {
409 if (PredicateScavengePointer(
p)) {
411 ObjectPtr visiting_object = visiting_old_object_;
412 if (visiting_object !=
nullptr &&
413 visiting_object->untag()->TryAcquireRememberedBit()) {
420 bool PredicateScavengeCompressedPointer(
uword heap_base,
423 ObjectPtr obj =
p->Decompress(heap_base);
425 if (obj->IsImmediateObject()) {
428 if (obj->IsOldObject()) {
429 return obj->untag()->IsEvacuationCandidate();
432 ObjectPtr new_obj = ScavengeObject(obj);
436 return new_obj->IsNewObject();
441 if (PredicateScavengeCompressedPointer(heap_base,
p)) {
443 ObjectPtr visiting_object = visiting_old_object_;
444 if (visiting_object !=
nullptr &&
445 visiting_object->untag()->TryAcquireRememberedBit()) {
452 ObjectPtr ScavengeObject(ObjectPtr obj) {
468 intptr_t
size = obj->untag()->HeapSize(
header);
472 if (!
Page::Of(obj)->IsSurvivor(raw_addr)) {
475 new_addr = TryAllocateCopy(
size);
483 scavenger_->failed_to_promote_ =
true;
484 new_addr = TryAllocateCopy(
size);
494 objcpy(
reinterpret_cast<void*
>(new_addr),
495 reinterpret_cast<void*
>(raw_addr),
size);
498 if (new_obj->IsOldObject()) {
503 new_obj->untag()->tags_.store(tags, std::memory_order_relaxed);
508 static_cast<TypedDataPtr
>(new_obj)->
untag()->RecomputeDataField();
513 if (InstallForwardingPointer(raw_addr, &
header, forwarding_header)) {
514 if (new_obj->IsOldObject()) {
517 promoted_list_.
Push(new_obj);
518 bytes_promoted_ +=
size;
522 if (new_obj->IsOldObject()) {
526 bytes_promoted_ -=
size;
540 bool InstallForwardingPointer(
uword addr,
544 return reinterpret_cast<std::atomic<uword>*
>(
addr)
545 ->compare_exchange_strong(*old_header, new_header,
546 std::memory_order_relaxed);
548 *
reinterpret_cast<uword*
>(
addr) = new_header;
557 if (tail_ !=
nullptr) {
561 if (
LIKELY(new_top <= tail_->end_)) {
562 tail_->top_ = new_top;
566 return TryAllocateCopySlow(
size);
569 DART_NOINLINE
uword TryAllocateCopySlow(intptr_t
size);
571 DART_NOINLINE DART_NORETURN
void AbortScavenge() {
572 if (FLAG_verbose_gc) {
575 scavenger_->abort_ =
true;
581 void ProcessToSpace();
582 void ProcessPromotedList();
583 void ProcessWeakPropertiesScoped();
585 void MournWeakProperties() {
586 weak_property_list_.
Process([](WeakPropertyPtr weak_property) {
592 void MournWeakReferences() {
593 weak_reference_list_.
Process([](WeakReferencePtr weak_reference) {
595 &weak_reference->untag()->target_);
599 void MournWeakArrays() {
600 weak_array_list_.
Process([](WeakArrayPtr weak_array) {
604 &(weak_array->untag()->data()[
i]));
609 void MournFinalizerEntries() {
610 finalizer_entry_list_.
Process([&](FinalizerEntryPtr finalizer_entry) {
616 Scavenger* scavenger_;
618 PageSpace* page_space_;
620 intptr_t bytes_promoted_;
621 ObjectPtr visiting_old_object_;
624 LocalBlockWorkList<64, WeakArrayPtr> weak_array_list_;
625 LocalBlockWorkList<64, WeakPropertyPtr> weak_property_list_;
626 LocalBlockWorkList<64, WeakReferencePtr> weak_reference_list_;
627 LocalBlockWorkList<64, FinalizerEntryPtr> finalizer_entry_list_;
629 Page* head_ =
nullptr;
630 Page* tail_ =
nullptr;
631 Page* scan_ =
nullptr;
641 if (obj->IsImmediateOrOldObject()) {
678 : isolate_group_(isolate_group),
681 num_busy_(num_busy) {}
708 bool more_to_scavenge =
false;
725 more_to_scavenge = visitor_->
HasWork();
726 if (more_to_scavenge) {
736 if (!more_to_scavenge && (num_busy_->
load() > 0)) {
740 more_to_scavenge =
true;
743 }
while (more_to_scavenge);
761 : gc_threshold_in_words_(gc_threshold_in_words) {}
765 while (
page !=
nullptr) {
773 if (capacity_in_words_ >= gc_threshold_in_words_) {
777 if (
page ==
nullptr) {
782 if (head_ ==
nullptr) {
783 head_ = tail_ =
page;
794 if (
page->Contains(
addr))
return true;
801 page->WriteProtect(read_only);
806 if (
head ==
nullptr) {
809 if (head_ ==
nullptr) {
826 max_semi_capacity_in_words_(max_semi_capacity_in_words),
836 max_semi_capacity_in_words, FLAG_new_gen_semi_initial_size *
MBInWords);
838 to_ =
new SemiSpace(initial_semi_capacity_in_words);
839 idle_scavenge_threshold_in_words_ = initial_semi_capacity_in_words;
841 UpdateMaxHeapCapacity();
842 UpdateMaxHeapUsage();
848 ASSERT(blocks_ ==
nullptr);
851intptr_t Scavenger::NewSizeInWords(intptr_t old_size_in_words,
864 if (stats_history_.Size() != 0) {
866 stats_history_.Get(0).ExpectedGarbageFraction(old_size_in_words);
867 if (garbage < (FLAG_new_gen_garbage_threshold / 100.0)) {
878 old_size_in_words * FLAG_new_gen_growth_factor);
880 return old_size_in_words;
887 in_store_buffer_(in_store_buffer),
891 for (
ObjectPtr* ptr = from; ptr <= to; ptr++) {
903 in_store_buffer_->
Add(obj);
907#if defined(DART_COMPRESSED_POINTERS)
916 ObjectSet*
const in_store_buffer_;
930 in_store_buffer_(in_store_buffer),
947 if (is_card_remembered_) {
955 for (
ObjectPtr* ptr = from; ptr <= to; ptr++) {
957 if (obj->IsHeapObject() && obj->IsNewObject()) {
958 if (is_card_remembered_) {
959 if (!
Page::Of(visiting_)->IsCardRemembered(ptr)) {
961 "%s: Old object %#" Px " references new object %#" Px
963 "slot's card is not remembered. Consider using rr to watch the "
964 "slot %p and reverse-continue to find the store with a missing "
966 msg_,
static_cast<uword>(visiting_),
static_cast<uword>(obj),
969 }
else if (!is_remembered_) {
970 FATAL(
"%s: Old object %#" Px " references new object %#" Px
972 "not in any store buffer. Consider using rr to watch the "
973 "slot %p and reverse-continue to find the store with a missing "
975 msg_,
static_cast<uword>(visiting_),
static_cast<uword>(obj),
984#if defined(DART_COMPRESSED_POINTERS)
990 if (obj->IsHeapObject() && obj->IsNewObject()) {
991 if (is_card_remembered_) {
992 if (!
Page::Of(visiting_)->IsCardRemembered(ptr)) {
994 "%s: Old object %#" Px " references new object %#" Px
996 "slot's card is not remembered. Consider using rr to watch the "
997 "slot %p and reverse-continue to find the store with a missing "
999 msg_,
static_cast<uword>(visiting_),
static_cast<uword>(obj),
1002 }
else if (!is_remembered_) {
1003 FATAL(
"%s: Old object %#" Px " references new object %#" Px
1005 "not in any store buffer. Consider using rr to watch the "
1006 "slot %p and reverse-continue to find the store with a missing "
1008 msg_,
static_cast<uword>(visiting_),
static_cast<uword>(obj),
1019 const ObjectSet*
const in_store_buffer_;
1020 const SemiSpace*
const to_;
1021 ObjectPtr visiting_;
1022 bool is_remembered_;
1023 bool is_card_remembered_;
1029void Scavenger::VerifyStoreBuffers(
const char* msg) {
1032 StackZone stack_zone(thread);
1033 Zone* zone = stack_zone.GetZone();
1035 ObjectSet* in_store_buffer =
new (zone) ObjectSet(zone);
1036 heap_->AddRegionsToObjectSet(in_store_buffer);
1039 CollectStoreBufferScavengeVisitor visitor(in_store_buffer, msg);
1044 CheckStoreBufferScavengeVisitor visitor(in_store_buffer, to_, msg);
1049SemiSpace* Scavenger::Prologue(
GCReason reason) {
1055 if (FLAG_verify_store_buffer) {
1057 VerifyStoreBuffers(
"Verifying remembered set before Scavenge");
1065 marker->new_marking_stack_.PushAll(
1066 marker->tlab_deferred_marking_stack_.PopAll());
1067 new_blocks_ =
marker->new_marking_stack_.PopAll();
1068 deferred_blocks_ =
marker->deferred_marking_stack_.PopAll();
1071 UpdateMaxHeapCapacity();
1077 MutexLocker ml(&space_lock_);
1079 to_ =
new SemiSpace(NewSizeInWords(from->gc_threshold_in_words(), reason));
1085void Scavenger::Epilogue(SemiSpace* from) {
1095 [&](Isolate* isolate) {
1096 Thread* mutator_thread = isolate->mutator_thread();
1097 ASSERT(mutator_thread ==
nullptr || mutator_thread->top() == 0);
1102 double avg_frac = stats_history_.Get(0).PromoCandidatesSuccessFraction();
1103 if (stats_history_.Size() >= 2) {
1105 avg_frac += 0.5 * stats_history_.Get(1).PromoCandidatesSuccessFraction();
1106 avg_frac /= 1.0 + 0.5;
1109 early_tenure_ = avg_frac >= (FLAG_early_tenuring_threshold / 100.0);
1113 intptr_t history_used = 0;
1114 intptr_t history_micros = 0;
1115 ASSERT(stats_history_.Size() > 0);
1116 for (intptr_t
i = 0;
i < stats_history_.Size();
i++) {
1117 history_used += stats_history_.Get(
i).UsedBeforeInWords();
1118 history_micros += stats_history_.Get(
i).DurationMicros();
1120 if (history_micros == 0) {
1123 scavenge_words_per_micro_ = history_used / history_micros;
1124 if (scavenge_words_per_micro_ == 0) {
1125 scavenge_words_per_micro_ = 1;
1131 intptr_t average_idle_task_micros = 6000;
1132 idle_scavenge_threshold_in_words_ =
1133 scavenge_words_per_micro_ * average_idle_task_micros;
1138 if (idle_scavenge_threshold_in_words_ < lower_bound) {
1139 idle_scavenge_threshold_in_words_ = lower_bound;
1145 if (idle_scavenge_threshold_in_words_ > upper_bound) {
1146 idle_scavenge_threshold_in_words_ = upper_bound;
1149 if (FLAG_verify_store_buffer) {
1160 VerifyStoreBuffers(
"Verifying remembered set after Scavenge");
1164 UpdateMaxHeapUsage();
1165 if (heap_ !=
nullptr) {
1176 intptr_t used_in_words =
UsedInWords() + freed_in_words_;
1179 bool for_new_space = (used_in_words >= idle_scavenge_threshold_in_words_) ||
1180 (external_in_words >= idle_scavenge_threshold_in_words_);
1181 if (!for_new_space) {
1185 int64_t estimated_scavenge_completion =
1187 used_in_words / scavenge_words_per_micro_;
1188 return estimated_scavenge_completion <= deadline;
1197template <
bool parallel>
1198void Scavenger::IterateStoreBuffers(ScavengerVisitorBase<parallel>* visitor) {
1205 MutexLocker ml(&space_lock_);
1207 if (pending ==
nullptr)
break;
1208 blocks_ = pending->
next();
1211 visitor->set_pending(pending);
1214 while (!pending->IsEmpty()) {
1215 ObjectPtr obj = pending->Pop();
1216 ASSERT(!obj->IsForwardingCorpse());
1217 ASSERT(obj->untag()->IsRemembered());
1218 obj->untag()->ClearRememberedBit();
1219 visitor->VisitingOldObject(obj);
1220 visitor->ProcessObject(obj);
1225 visitor->set_pending(
nullptr);
1229template <
bool parallel>
1230void Scavenger::IterateRememberedCards(
1231 ScavengerVisitorBase<parallel>* visitor) {
1236void Scavenger::IterateObjectIdTable(ObjectPointerVisitor* visitor) {
1249template <
bool parallel>
1250void Scavenger::IterateRoots(ScavengerVisitorBase<parallel>* visitor) {
1252 intptr_t slice = root_slices_started_.
fetch_add(1);
1259 IterateIsolateRoots(visitor);
1262 IterateObjectIdTable(visitor);
1269 IterateStoreBuffers(visitor);
1270 IterateRememberedCards(visitor);
1282void Scavenger::IterateWeak() {
1284 intptr_t slice = weak_slices_started_.
fetch_add(1);
1321void Scavenger::MournWeakHandles() {
1324 ScavengerWeakVisitor weak_visitor(thread);
1328template <
bool parallel>
1329void ScavengerVisitorBase<parallel>::ProcessToSpace() {
1330 VisitingOldObject(
nullptr);
1331 while (scan_ !=
nullptr) {
1332 uword resolved_top = scan_->resolved_top_;
1333 while (resolved_top < scan_->top_) {
1335 resolved_top += ProcessObject(obj);
1337 scan_->resolved_top_ = resolved_top;
1339 Page*
next = scan_->next();
1340 if (
next ==
nullptr) {
1348template <
bool parallel>
1349void ScavengerVisitorBase<parallel>::ProcessPromotedList() {
1351 while (promoted_list_.Pop(&obj)) {
1352 VisitingOldObject(obj);
1355 if (thread_->is_marking() && obj->untag()->TryAcquireMarkBit()) {
1356 thread_->MarkingStackAddObject(obj);
1361template <
bool parallel>
1362void ScavengerVisitorBase<parallel>::ProcessWeakPropertiesScoped() {
1363 if (scavenger_->abort_)
return;
1368 weak_property_list_.Process([&](WeakPropertyPtr weak_property) {
1369 ObjectPtr
key = weak_property->untag()->
key();
1376 VisitingOldObject(weak_property->IsOldObject() ? weak_property :
nullptr);
1377 weak_property->untag()->VisitPointersNonvirtual(
this);
1379 weak_property_list_.Push(weak_property);
1384void Scavenger::UpdateMaxHeapCapacity() {
1386 ASSERT(heap_ !=
nullptr);
1388 ASSERT(isolate_group !=
nullptr);
1389 isolate_group->GetHeapNewCapacityMaxMetric()->SetValue(
1393void Scavenger::UpdateMaxHeapUsage() {
1395 ASSERT(heap_ !=
nullptr);
1397 ASSERT(isolate_group !=
nullptr);
1402 if (obj->IsImmediateOrOldObject())
return true;
1406template <
bool parallel>
1409 if (obj->IsNewObject()) {
1410 ASSERT(visiting_old_object_ ==
nullptr);
1412 ASSERT(visiting_old_object_ == obj);
1419 WeakPropertyPtr weak_property =
static_cast<WeakPropertyPtr
>(obj);
1421 weak_property_list_.Push(weak_property);
1425 WeakReferencePtr weak_reference =
static_cast<WeakReferencePtr
>(obj);
1427#if !defined(DART_COMPRESSED_POINTERS)
1428 ScavengePointer(&weak_reference->untag()->type_arguments_);
1430 ScavengeCompressedPointer(weak_reference->heap_base(),
1431 &weak_reference->untag()->type_arguments_);
1433 weak_reference_list_.Push(weak_reference);
1437 WeakArrayPtr weak_array =
static_cast<WeakArrayPtr
>(obj);
1438 weak_array_list_.Push(weak_array);
1441 FinalizerEntryPtr finalizer_entry =
static_cast<FinalizerEntryPtr
>(obj);
1442#if !defined(DART_COMPRESSED_POINTERS)
1443 ScavengePointer(&finalizer_entry->untag()->token_);
1444 ScavengePointer(&finalizer_entry->untag()->next_);
1446 ScavengeCompressedPointer(finalizer_entry->heap_base(),
1447 &finalizer_entry->untag()->token_);
1448 ScavengeCompressedPointer(finalizer_entry->heap_base(),
1449 &finalizer_entry->untag()->next_);
1451 finalizer_entry_list_.Push(finalizer_entry);
1457void Scavenger::MournWeakTables() {
1464 for (intptr_t
i = 0;
i <
size;
i++) {
1465 if (
table->IsValidEntryAtExclusive(
i)) {
1467 ASSERT(obj->IsHeapObject());
1474 obj->IsNewObject() ? replacement_new : replacement_old;
1475 replacement->SetValueExclusive(obj,
table->ValueAtExclusive(
i));
1478 if (cleanup !=
nullptr) {
1479 cleanup(
reinterpret_cast<void*
>(
table->ValueAtExclusive(
i)));
1496#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
1501 rehash_weak_table(
table, table_new, table_old, cleanup);
1512 [&](Isolate* isolate) {
1513 auto table = isolate->forward_table_new();
1514 if (
table !=
nullptr) {
1516 rehash_weak_table(
table, replacement, isolate->forward_table_old(),
1518 isolate->set_forward_table_new(replacement);
1532 while (reading !=
nullptr) {
1537 ASSERT(obj->IsHeapObject());
1539 if (obj->IsNewObject()) {
1551 if (obj->IsNewObject()) {
1552 new_writing->
Push(obj);
1553 if (new_writing->
IsFull()) {
1554 new_marking_stack->
PushBlock(new_writing);
1558 old_writing->
Push(obj);
1559 if (old_writing->
IsFull()) {
1560 old_marking_stack->
PushBlock(old_writing);
1571 old_marking_stack->
PushBlock(old_writing);
1572 new_marking_stack->
PushBlock(new_writing);
1587 if (obj->IsNewObject()) {
1599#if defined(DART_COMPRESSED_POINTERS)
1600 void VisitCompressedPointers(
uword heap_base,
1625 reading = new_blocks_;
1626 if (reading ==
nullptr)
break;
1627 new_blocks_ = reading->
next();
1633 ASSERT(obj->IsHeapObject());
1634 if (obj->IsNewObject()) {
1641 if (obj->IsNewObject()) {
1642 new_writing->
Push(obj);
1643 if (new_writing->
IsFull()) {
1644 new_marking_stack->
PushBlock(new_writing);
1648 old_writing->
Push(obj);
1649 if (old_writing->
IsFull()) {
1650 old_marking_stack->
PushBlock(old_writing);
1658 old_marking_stack->
PushBlock(old_writing);
1659 new_marking_stack->
PushBlock(new_writing);
1672 reading = deferred_blocks_;
1673 if (reading ==
nullptr)
break;
1674 deferred_blocks_ = reading->
next();
1680 ASSERT(obj->IsHeapObject());
1681 if (obj->IsNewObject()) {
1706 PruneWeak(&deferred->finalizer_entries);
1709template <
typename Type,
typename PtrType>
1711 PtrType weak = list->
Release();
1714 if (weak->IsOldObject()) {
1716 next = weak->untag()->next_seen_by_gc_.Decompress(weak->heap_base());
1717 weak->untag()->next_seen_by_gc_ =
Type::null();
1724 next = weak->untag()->next_seen_by_gc_.Decompress(weak->heap_base());
1725 weak->untag()->next_seen_by_gc_ =
Type::null();
1730 next = weak->untag()->next_seen_by_gc_.Decompress(weak->heap_base());
1739template <
bool parallel>
1744 if (
target->IsImmediateObject()) {
1748 if (
target->IsOldObject()) {
1749 if (parent->IsOldObject() &&
target->untag()->IsEvacuationCandidate()) {
1763 if (
target->IsNewObject() && parent->IsOldObject() &&
1780 page->VisitObjectPointers(visitor);
1789 page->VisitObjects(visitor);
1799void Scavenger::TryAllocateNewTLAB(
Thread* thread,
1801 bool can_safepoint) {
1805#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
1808 intptr_t remaining = thread->
true_end() - thread->
top();
1809 const bool heap_sampling_enabled = thread->
end() != thread->
true_end();
1810 const bool is_first_tlab = thread->
true_end() == 0;
1811 if (heap_sampling_enabled && remaining > min_size) {
1824 MutexLocker ml(&space_lock_);
1826 if (
page->owner() !=
nullptr)
continue;
1827 intptr_t available =
1829 if (available >= min_size) {
1830 page->Acquire(thread);
1831#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
1839 if (
page ==
nullptr) {
1842 page->Acquire(thread);
1843#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
1854 intptr_t
size = thread->
end() - thread->
top();
1864 if (thread->
top() == 0)
return 0;
1873 allocated =
page->Release(thread);
1879template <
bool parallel>
1884 page = scavenger_->to_->TryAllocatePageLocked(
false);
1886 if (
page ==
nullptr) {
1890 if (head_ ==
nullptr) {
1891 head_ = scan_ =
page;
1893 ASSERT(scan_ !=
nullptr);
1894 tail_->set_next(
page);
1898 return tail_->TryAllocateGC(
size);
1912 early_tenure_ =
true;
1915 if (FLAG_verify_before_gc) {
1917 heap_->VerifyGC(
"Verifying before Scavenge",
1922 failed_to_promote_ =
false;
1924 root_slices_started_ = 0;
1925 weak_slices_started_ = 0;
1926 freed_in_words_ = 0;
1927 intptr_t abandoned_bytes = 0;
1929 intptr_t promo_candidate_words = 0;
1932 if (early_tenure_) {
1933 page->EarlyTenure();
1935 promo_candidate_words +=
page->promo_candidate_words();
1940 intptr_t bytes_promoted;
1941 if (FLAG_scavenger_tasks == 0) {
1942 bytes_promoted = SerialScavenge(from);
1944 bytes_promoted = ParallelScavenge(from);
1947 ReverseScavenge(&from);
1953 heap_->assume_scavenge_will_fail_ =
true;
1966 if (FLAG_verify_after_gc) {
1968 heap_->VerifyGC(
"Verifying after Scavenge...",
1974 scavenging_ =
false;
1979 failed_to_promote_);
1982intptr_t Scavenger::SerialScavenge(
SemiSpace* from) {
1986 visitor.ProcessRoots();
1987 visitor.ProcessAll();
1988 visitor.ProcessWeak();
1990 to_->
AddList(visitor.head(), visitor.tail());
1991 return visitor.bytes_promoted();
1994intptr_t Scavenger::ParallelScavenge(SemiSpace* from) {
1995 intptr_t bytes_promoted = 0;
1996 const intptr_t num_tasks = FLAG_scavenger_tasks;
1999 ThreadBarrier* barrier =
new ThreadBarrier(num_tasks, 1);
2000 RelaxedAtomic<uintptr_t> num_busy = 0;
2004 for (intptr_t
i = 0;
i < num_tasks;
i++) {
2007 heap_->
isolate_group(),
this, from, freelist, &promotion_stack_);
2008 if (
i < (num_tasks - 1)) {
2015 ParallelScavengerTask task(heap_->
isolate_group(), barrier, visitors[
i],
2017 task.RunEnteredIsolateGroup();
2024 for (intptr_t
i = 0;
i < num_tasks;
i++) {
2026 visitor->Finalize(store_buffer);
2027 to_->
AddList(visitor->head(), visitor->tail());
2028 bytes_promoted += visitor->bytes_promoted();
2033 return bytes_promoted;
2036void Scavenger::ReverseScavenge(SemiSpace** from) {
2040 class ReverseFromForwardingVisitor :
public ObjectVisitor {
2041 void VisitObject(ObjectPtr from_obj)
override {
2046 intptr_t
size = to_obj->untag()->HeapSize();
2049 uword from_header =
static_cast<uword>(to_header);
2063 ReverseFromForwardingVisitor visitor;
2065 page->VisitObjects(&visitor);
2071 MutexLocker ml(&space_lock_);
2072 SemiSpace* temp = to_;
2078 promotion_stack_.
Reset();
2083 while (pending !=
nullptr) {
2101 marker->new_marking_stack_.PushAll(new_blocks_);
2102 new_blocks_ =
nullptr;
2103 marker->deferred_marking_stack_.PushAll(deferred_blocks_);
2104 deferred_blocks_ =
nullptr;
2123 heap_->assume_scavenge_will_fail_ =
true;
2134 ASSERT(isolate_group !=
nullptr);
2141 int64_t run_time = isolate_group->UptimeMicros();
2144 double avg_time_between_collections =
2145 run_time_millis /
static_cast<double>(
collections());
2147 avg_time_between_collections);
2149 space.
AddProperty(
"avgCollectionPeriodMillis", 0.0);
static float next(float f)
static const char marker[]
#define RELEASE_ASSERT_WITH_MSG(cond, msg)
#define ASSERT_EQUAL(expected, actual)
static constexpr bool UseCardMarkingForAllocation(const intptr_t array_length)
static void FollowForwardingPointers(Thread *thread)
static constexpr ClassIdTagType decode(uword value)
static constexpr uword update(bool value, uword original)
Block * PopNonFullBlock()
void VisitObjectPointers(ObjectPointerVisitor *visitor)
void Push(ObjectPtr raw_obj)
bool WaitForWork(RelaxedAtomic< uintptr_t > *num_busy, bool abort=false)
void VisitPointers(ObjectPtr *from, ObjectPtr *to) override
CheckStoreBufferScavengeVisitor(ObjectSet *in_store_buffer, const SemiSpace *to, const char *msg)
void VisitObject(ObjectPtr obj) override
void VisitPointers(ObjectPtr *from, ObjectPtr *to) override
CollectStoreBufferScavengeVisitor(ObjectSet *in_store_buffer, const char *msg)
static ThreadPool * thread_pool()
static IsolateGroup * vm_isolate_group()
void UpdateUnreachable(IsolateGroup *isolate_group)
void UpdateRelocated(IsolateGroup *isolate_group)
static intptr_t InstanceSize()
static ForwardingCorpse * AsForwarder(uword addr, intptr_t size)
void set_target(ObjectPtr target)
static FreeListElement * AsElement(uword addr, intptr_t size)
void Enqueue(PtrType ptr)
void HandleNewTLAB(intptr_t old_tlab_remaining_space, bool is_first_tlab)
static Dart_HeapSamplingDeleteCallback delete_callback()
void SampleNewSpaceAllocation(intptr_t allocation_size)
IsolateGroup * isolate_group() const
void CheckConcurrentMarking(Thread *thread, GCReason reason, intptr_t size)
void WaitForSweeperTasksAtSafepoint(Thread *thread)
WeakTable * GetWeakTable(Space space, WeakSelector selector) const
void SetWeakTable(Space space, WeakSelector selector, WeakTable *value)
void UpdateGlobalMaxUsed()
StoreBuffer * store_buffer() const
void ForEachIsolate(std::function< void(Isolate *isolate)> function, bool at_safepoint=false)
static IsolateGroup * Current()
void VisitObjectIdRingPointers(ObjectPointerVisitor *visitor)
void VisitObjectPointers(ObjectPointerVisitor *visitor, ValidationPolicy validate_frames)
void ReleaseStoreBuffers()
void RememberLiveTemporaries()
intptr_t MutatorCount() const
void VisitWeakPersistentHandles(HandleVisitor *visitor)
void FlushMarkingStacks()
void AddProperty64(const char *name, int64_t i) const
void AddProperty(const char *name, bool b) const
DART_FORCE_INLINE void Process(Lambda action)
DART_NORETURN void Jump(int value, const Error &error)
void PushBlock(Block *block)
static int64_t GetCurrentMonotonicMicros()
static void static void PrintErr(const char *format,...) PRINTF_ATTRIBUTE(1
IsolateGroup * isolate_group() const
void VisitCompressedPointers(uword heap_base, CompressedObjectPtr *first, CompressedObjectPtr *last)
bool IsFreeListElement() const
ObjectPtr Decompress(uword heap_base) const
UntaggedObject * untag() const
bool IsForwardingCorpse() const
intptr_t GetClassId() const
bool IsPseudoObject() const
void Add(ObjectPtr raw_obj)
bool Contains(ObjectPtr raw_obj) const
static const ClassId kClassId
static intptr_t tags_offset()
FreeList * DataFreeList(intptr_t i=0)
DART_FORCE_INLINE uword TryAllocatePromoLocked(FreeList *freelist, intptr_t size)
void AcquireLock(FreeList *freelist)
void PushDependencyToConcurrentMarking()
void VisitObjects(ObjectVisitor *visitor) const
void ReleaseLock(FreeList *freelist)
void VisitRememberedCards(PredicateObjectPointerVisitor *visitor) const
void ResumeConcurrentMarking()
void PauseConcurrentMarking()
void ResetProgressBars() const
GCMarker * marker() const
void set_next(Page *next)
static Page * Of(ObjectPtr obj)
void Unallocate(uword addr, intptr_t size)
void sub_live_bytes(intptr_t value)
ParallelScavengerTask(IsolateGroup *isolate_group, ThreadBarrier *barrier, ParallelScavengerVisitor *visitor, RelaxedAtomic< uintptr_t > *num_busy)
void RunEnteredIsolateGroup()
PointerBlock< Size > * next() const
bool PredicateVisitCompressedPointers(uword heap_base, CompressedObjectPtr *first, CompressedObjectPtr *last)
T load(std::memory_order order=std::memory_order_relaxed) const
T fetch_add(T arg, std::memory_order order=std::memory_order_relaxed)
void set_pending(StoreBufferBlock *pending)
bool PredicateVisitPointers(ObjectPtr *first, ObjectPtr *last) override
void Finalize(StoreBuffer *store_buffer)
intptr_t bytes_promoted() const
bool WaitForWork(RelaxedAtomic< uintptr_t > *num_busy)
DART_FORCE_INLINE intptr_t ProcessObject(ObjectPtr obj)
void VisitTypedDataViewPointers(TypedDataViewPtr view, CompressedObjectPtr *first, CompressedObjectPtr *last) override
void VisitingOldObject(ObjectPtr obj)
static bool ForwardOrSetNullIfCollected(ObjectPtr parent, CompressedObjectPtr *ptr_address)
ScavengerVisitorBase(IsolateGroup *isolate_group, Scavenger *scavenger, SemiSpace *from, FreeList *freelist, PromotionStack *promotion_stack)
void ProcessWeakProperties()
void VisitPointers(ObjectPtr *first, ObjectPtr *last) override
void VisitHandle(uword addr) override
ScavengerWeakVisitor(Thread *thread)
void Scavenge(Thread *thread, GCType type, GCReason reason)
intptr_t ExternalInWords() const
void VisitObjects(ObjectVisitor *visitor) const
void VisitObjectPointers(ObjectPointerVisitor *visitor) const
void PruneWeak(GCLinkedLists *delayed)
void WriteProtect(bool read_only)
void AbandonRemainingTLABForDebugging(Thread *thread)
bool ShouldPerformIdleScavenge(int64_t deadline)
void AddRegionsToObjectSet(ObjectSet *set) const
intptr_t CapacityInWords() const
intptr_t UsedInWords() const
SpaceUsage GetCurrentUsage() const
int64_t gc_time_micros() const
intptr_t AbandonRemainingTLAB(Thread *thread)
intptr_t ThresholdInWords() const
Scavenger(Heap *heap, intptr_t max_semi_capacity_in_words)
intptr_t collections() const
void Forward(MarkingStackBlock *blocks)
void PrintToJSONObject(JSONObject *object) const
void AddList(Page *head, Page *tail)
void WriteProtect(bool read_only)
Page * TryAllocatePageLocked(bool link)
bool Contains(uword addr) const
intptr_t capacity_in_words() const
SemiSpace(intptr_t gc_threshold_in_words)
void PushBlock(Block *block, ThresholdPolicy policy)
bool Run(Args &&... args)
LongJumpScope * long_jump_base() const
bool force_growth() const
@ kIncrementalCompactorTask
static Thread * Current()
bool OwnsGCSafepoint() const
int32_t no_safepoint_scope_depth() const
static void ExitIsolateGroupAsHelper(bool bypass_safepoint)
IsolateGroup * isolate_group() const
void StoreBufferAddObjectGC(ObjectPtr obj)
HeapProfileSampler & heap_sampler()
static bool EnterIsolateGroupAsHelper(IsolateGroup *isolate_group, TaskKind kind, bool bypass_safepoint)
void DeferredMarkLiveTemporaries()
static ObjectPtr FromAddr(uword addr)
bool IsCardRemembered() const
DART_FORCE_INLINE intptr_t VisitPointersNonvirtual(V *visitor)
static uword ToAddr(const UntaggedObject *raw_obj)
bool TryAcquireRememberedBit()
intptr_t VisitPointers(ObjectPointerVisitor *visitor)
bool IsRemembered() const
static bool IsEvacuationCandidate(uword tags)
static constexpr T Maximum(T x, T y)
static T Minimum(T x, T y)
static constexpr bool IsAligned(T x, uintptr_t alignment, uintptr_t offset=0)
static intptr_t InstanceSize()
static intptr_t InstanceSize()
static intptr_t InstanceSize()
static WeakTable * NewFrom(WeakTable *original)
void(* Dart_HeapSamplingDeleteCallback)(void *data)
#define MSAN_UNPOISON(ptr, len)
def link(from_root, to_root)
bool IsTypedDataViewClassId(intptr_t index)
bool IsTypedDataClassId(intptr_t index)
StoreBuffer::Block StoreBufferBlock
ScavengerVisitorBase< true > ParallelScavengerVisitor
static constexpr intptr_t kNewObjectAlignmentOffset
@ kRememberLiveTemporaries
constexpr double MicrosecondsToSeconds(int64_t micros)
static constexpr intptr_t kPageSizeInWords
static DART_FORCE_INLINE uword ReadHeaderRelaxed(ObjectPtr obj)
static constexpr intptr_t kPageSize
static constexpr intptr_t kConservativeInitialScavengeSpeed
static bool IsUnreachable(const ObjectPtr obj)
intptr_t RawSmiValue(const SmiPtr raw_value)
MarkingStack::Block MarkingStackBlock
static DART_FORCE_INLINE bool IsForwarding(uword header)
bool IsUnmodifiableTypedDataViewClassId(intptr_t index)
constexpr intptr_t kWordSizeLog2
static bool IsScavengeSurvivor(ObjectPtr obj)
constexpr intptr_t MBInWords
bool IsAllocatableInNewSpace(intptr_t size)
BlockWorkList< PromotionStack > PromotionWorkList
static DART_FORCE_INLINE ObjectPtr ForwardedObj(uword header)
ScavengerVisitorBase< false > SerialScavengerVisitor
DEFINE_FLAG(bool, print_cluster_information, false, "Print information about clusters written to snapshot")
static constexpr intptr_t kObjectAlignmentMask
raw_obj untag() -> num_entries()) VARIABLE_COMPRESSED_VISITOR(Array, Smi::Value(raw_obj->untag() ->length())) VARIABLE_COMPRESSED_VISITOR(TypedData, TypedData::ElementSizeInBytes(raw_obj->GetClassId()) *Smi::Value(raw_obj->untag() ->length())) VARIABLE_COMPRESSED_VISITOR(Record, RecordShape(raw_obj->untag() ->shape()).num_fields()) VARIABLE_NULL_VISITOR(CompressedStackMaps, CompressedStackMaps::PayloadSizeOf(raw_obj)) VARIABLE_NULL_VISITOR(OneByteString, Smi::Value(raw_obj->untag() ->length())) VARIABLE_NULL_VISITOR(TwoByteString, Smi::Value(raw_obj->untag() ->length())) intptr_t UntaggedField::VisitFieldPointers(FieldPtr raw_obj, ObjectPointerVisitor *visitor)
static DART_FORCE_INLINE void WriteHeaderRelaxed(ObjectPtr obj, uword header)
void MournFinalizerEntry(GCVisitorType *visitor, FinalizerEntryPtr current_entry)
static void objcpy(void *dst, const void *src, size_t size)
static constexpr intptr_t kAllocationRedZoneSize
constexpr intptr_t kWordSize
static constexpr intptr_t kObjectAlignment
constexpr double MicrosecondsToMilliseconds(int64_t micros)
constexpr intptr_t KBInWords
bool IsExternalTypedDataClassId(intptr_t index)
COMPILE_ASSERT(kUnreachableReference==WeakTable::kNoValue)
ObjectPtr CompressedObjectPtr
static DART_FORCE_INLINE uword ForwardingHeader(ObjectPtr target)
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive keep the shell running after the Dart script has completed enable serial On low power devices with low core running concurrent GC tasks on threads can cause them to contend with the UI thread which could potentially lead to jank This option turns off all concurrent GC activities domain network JSON encoded network policy per domain This overrides the DisallowInsecureConnections switch Embedder can specify whether to allow or disallow insecure connections at a domain level old gen heap size
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not set
static const char header[]
#define NO_SANITIZE_THREAD
#define TIMELINE_FUNCTION_GC_DURATION(thread, name)