40 early_tenuring_threshold,
42 "When more than this percentage of promotion candidates survive, "
43 "promote all survivors of next scavenge.");
45 new_gen_garbage_threshold,
47 "Grow new gen when less than this percentage is garbage.");
48DEFINE_FLAG(
int, new_gen_growth_factor, 2,
"Grow new gen by this factor.");
92static void objcpy(
void* dst,
const void* src,
size_t size) {
96 (
reinterpret_cast<uword>(dst) + size <=
reinterpret_cast<uword>(src)) ||
97 (
reinterpret_cast<uword>(src) + size <=
reinterpret_cast<uword>(dst)));
106 uword* __restrict dst_cursor =
reinterpret_cast<uword*
>(dst);
107 const uword* __restrict src_cursor =
reinterpret_cast<const uword*
>(src);
113 size -= (2 *
sizeof(
uword));
120 ->
load(std::memory_order_relaxed);
129template <
bool parallel>
139 scavenger_(scavenger),
141 page_space_(scavenger->heap_->old_space()),
144 visiting_old_object_(nullptr),
145 promoted_list_(promotion_stack) {}
149 constexpr static const char*
const kName =
"Scavenger";
167 const bool is_external =
168 view->untag()->data_ != view->untag()->DataFieldForInternalTypedData();
173 if (view->untag()->data_ ==
nullptr) {
191 TypedDataBasePtr td = view->untag()->typed_data();
192 ASSERT(td->IsHeapObject());
218 view->untag()->RecomputeDataFieldForInternalTypedData();
224 for (
ObjectPtr* current = first; current <= last; current++) {
225 ScavengePointer(current);
229#if defined(DART_COMPRESSED_POINTERS)
236 ScavengeCompressedPointer(heap_base, current);
242 ASSERT((obj ==
nullptr) || obj->IsOldObject());
243 visiting_old_object_ = obj;
244 if (obj !=
nullptr) {
258 if (setjmp(*jump.
Set()) == 0) {
259 scavenger_->IterateRoots(
this);
261 ASSERT(scavenger_->abort_);
267 if (setjmp(*jump.
Set()) == 0) {
271 ProcessPromotedList();
274 ASSERT(scavenger_->abort_);
281 if (setjmp(*jump.
Set()) == 0) {
285 ProcessPromotedList();
287 ProcessWeakPropertiesScoped();
290 ASSERT(scavenger_->abort_);
296 if (setjmp(*jump.
Set()) == 0) {
297 ProcessWeakPropertiesScoped();
299 ASSERT(scavenger_->abort_);
304 if (scavenger_->abort_)
return false;
305 return (scan_ != tail_) || (scan_ !=
nullptr && !scan_->
IsResolved()) ||
310 return promoted_list_.
WaitForWork(num_busy, scavenger_->abort_);
314 if (!scavenger_->abort_) {
317 for (
Page* page = head_; page !=
nullptr; page = page->next()) {
318 ASSERT(page->IsResolved());
319 page->RecordSurvivors();
322 MournWeakProperties();
323 MournWeakReferences();
325 MournFinalizerEntries();
326 scavenger_->IterateWeak();
333 if (!scavenger_->abort_) {
360 if (obj->IsImmediateOrOldObject()) {
364 ObjectPtr new_obj = ScavengeObject(obj);
368 if (new_obj->IsNewObject()) {
370 ObjectPtr visiting_object = visiting_old_object_;
371 if (visiting_object !=
nullptr &&
372 visiting_object->untag()->TryAcquireRememberedBit()) {
381 ObjectPtr obj =
p->Decompress(heap_base);
384 if (obj->IsImmediateOrOldObject()) {
388 ObjectPtr new_obj = ScavengeObject(obj);
392 if (new_obj->IsNewObject()) {
394 ObjectPtr visiting_object = visiting_old_object_;
395 if (visiting_object !=
nullptr &&
396 visiting_object->untag()->TryAcquireRememberedBit()) {
403 ObjectPtr ScavengeObject(ObjectPtr obj) {
419 intptr_t
size = obj->untag()->HeapSize(
header);
423 if (!
Page::Of(obj)->IsSurvivor(raw_addr)) {
426 new_addr = TryAllocateCopy(size);
434 scavenger_->failed_to_promote_ =
true;
435 new_addr = TryAllocateCopy(size);
445 objcpy(
reinterpret_cast<void*
>(new_addr),
446 reinterpret_cast<void*
>(raw_addr), size);
449 if (new_obj->IsOldObject()) {
454 new_obj->untag()->tags_.store(tags, std::memory_order_relaxed);
459 static_cast<TypedDataPtr
>(new_obj)->
untag()->RecomputeDataField();
464 if (InstallForwardingPointer(raw_addr, &
header, forwarding_header)) {
465 if (new_obj->IsOldObject()) {
468 promoted_list_.
Push(new_obj);
469 bytes_promoted_ +=
size;
473 if (new_obj->IsOldObject()) {
476 bytes_promoted_ -=
size;
490 bool InstallForwardingPointer(
uword addr,
494 return reinterpret_cast<std::atomic<uword>*
>(
addr)
495 ->compare_exchange_strong(*old_header, new_header,
496 std::memory_order_relaxed);
498 *
reinterpret_cast<uword*
>(
addr) = new_header;
504 uword TryAllocateCopy(intptr_t size) {
507 if (tail_ !=
nullptr) {
511 if (
LIKELY(new_top <= tail_->end_)) {
512 tail_->top_ = new_top;
516 return TryAllocateCopySlow(size);
519 DART_NOINLINE
uword TryAllocateCopySlow(intptr_t size);
521 DART_NOINLINE DART_NORETURN
void AbortScavenge() {
522 if (FLAG_verbose_gc) {
525 scavenger_->abort_ =
true;
531 void ProcessToSpace();
532 void ProcessPromotedList();
533 void ProcessWeakPropertiesScoped();
535 void MournWeakProperties() {
536 weak_property_list_.
Process([](WeakPropertyPtr weak_property) {
542 void MournWeakReferences() {
543 weak_reference_list_.
Process([](WeakReferencePtr weak_reference) {
545 &weak_reference->untag()->target_);
549 void MournWeakArrays() {
550 weak_array_list_.
Process([](WeakArrayPtr weak_array) {
552 for (intptr_t i = 0; i <
length; i++) {
554 &(weak_array->untag()->data()[i]));
559 void MournFinalizerEntries() {
560 finalizer_entry_list_.
Process([&](FinalizerEntryPtr finalizer_entry) {
566 Scavenger* scavenger_;
568 PageSpace* page_space_;
570 intptr_t bytes_promoted_;
571 ObjectPtr visiting_old_object_;
573 LocalBlockWorkList<64, WeakArrayPtr> weak_array_list_;
574 LocalBlockWorkList<64, WeakPropertyPtr> weak_property_list_;
575 LocalBlockWorkList<64, WeakReferencePtr> weak_reference_list_;
576 LocalBlockWorkList<64, FinalizerEntryPtr> finalizer_entry_list_;
578 Page* head_ =
nullptr;
579 Page* tail_ =
nullptr;
580 Page* scan_ =
nullptr;
590 if (obj->IsImmediateOrOldObject()) {
627 : isolate_group_(isolate_group),
630 num_busy_(num_busy) {}
657 bool more_to_scavenge =
false;
674 more_to_scavenge = visitor_->
HasWork();
675 if (more_to_scavenge) {
685 if (!more_to_scavenge && (num_busy_->
load() > 0)) {
689 more_to_scavenge =
true;
692 }
while (more_to_scavenge);
710 : gc_threshold_in_words_(gc_threshold_in_words) {}
714 while (page !=
nullptr) {
722 if (capacity_in_words_ >= gc_threshold_in_words_) {
726 if (page ==
nullptr) {
731 if (head_ ==
nullptr) {
732 head_ = tail_ = page;
742 for (
Page* page = head_; page !=
nullptr; page = page->next()) {
743 if (page->Contains(addr))
return true;
749 for (
Page* page = head_; page !=
nullptr; page = page->next()) {
750 page->WriteProtect(read_only);
755 if (
head ==
nullptr) {
758 if (head_ ==
nullptr) {
775 max_semi_capacity_in_words_(max_semi_capacity_in_words),
785 max_semi_capacity_in_words, FLAG_new_gen_semi_initial_size *
MBInWords);
787 to_ =
new SemiSpace(initial_semi_capacity_in_words);
788 idle_scavenge_threshold_in_words_ = initial_semi_capacity_in_words;
790 UpdateMaxHeapCapacity();
791 UpdateMaxHeapUsage();
797 ASSERT(blocks_ ==
nullptr);
800intptr_t Scavenger::NewSizeInWords(intptr_t old_size_in_words,
813 if (stats_history_.Size() != 0) {
815 stats_history_.Get(0).ExpectedGarbageFraction(old_size_in_words);
816 if (garbage < (FLAG_new_gen_garbage_threshold / 100.0)) {
827 old_size_in_words * FLAG_new_gen_growth_factor);
829 return old_size_in_words;
836 in_store_buffer_(in_store_buffer),
840 for (
ObjectPtr* ptr = from; ptr <= to; ptr++) {
852 in_store_buffer_->
Add(obj);
856#if defined(DART_COMPRESSED_POINTERS)
865 ObjectSet*
const in_store_buffer_;
877 in_store_buffer_(in_store_buffer),
891 if (is_card_remembered_) {
899 for (
ObjectPtr* ptr = from; ptr <= to; ptr++) {
901 if (obj->IsHeapObject() && obj->IsNewObject()) {
902 if (is_card_remembered_) {
903 if (!
Page::Of(visiting_)->IsCardRemembered(ptr)) {
905 "%s: Old object %#" Px " references new object %#" Px
907 "slot's card is not remembered. Consider using rr to watch the "
908 "slot %p and reverse-continue to find the store with a missing "
910 msg_,
static_cast<uword>(visiting_),
static_cast<uword>(obj),
913 }
else if (!is_remembered_) {
914 FATAL(
"%s: Old object %#" Px " references new object %#" Px
916 "not in any store buffer. Consider using rr to watch the "
917 "slot %p and reverse-continue to find the store with a missing "
919 msg_,
static_cast<uword>(visiting_),
static_cast<uword>(obj),
928#if defined(DART_COMPRESSED_POINTERS)
934 if (obj->IsHeapObject() && obj->IsNewObject()) {
935 if (is_card_remembered_) {
936 if (!
Page::Of(visiting_)->IsCardRemembered(ptr)) {
938 "%s: Old object %#" Px " references new object %#" Px
940 "slot's card is not remembered. Consider using rr to watch the "
941 "slot %p and reverse-continue to find the store with a missing "
943 msg_,
static_cast<uword>(visiting_),
static_cast<uword>(obj),
946 }
else if (!is_remembered_) {
947 FATAL(
"%s: Old object %#" Px " references new object %#" Px
949 "not in any store buffer. Consider using rr to watch the "
950 "slot %p and reverse-continue to find the store with a missing "
952 msg_,
static_cast<uword>(visiting_),
static_cast<uword>(obj),
963 const ObjectSet*
const in_store_buffer_;
964 const SemiSpace*
const to_;
967 bool is_card_remembered_;
971void Scavenger::VerifyStoreBuffers(
const char* msg) {
974 StackZone stack_zone(thread);
975 Zone* zone = stack_zone.GetZone();
977 ObjectSet* in_store_buffer =
new (zone) ObjectSet(zone);
978 heap_->AddRegionsToObjectSet(in_store_buffer);
981 CollectStoreBufferVisitor visitor(in_store_buffer, msg);
986 CheckStoreBufferVisitor visitor(in_store_buffer, to_, msg);
991SemiSpace* Scavenger::Prologue(
GCReason reason) {
997 if (FLAG_verify_store_buffer) {
999 VerifyStoreBuffers(
"Verifying remembered set before Scavenge");
1007 mark_blocks_ =
marker->marking_stack_.PopAll();
1008 new_blocks_ =
marker->new_marking_stack_.PopAll();
1009 deferred_blocks_ =
marker->deferred_marking_stack_.PopAll();
1012 UpdateMaxHeapCapacity();
1018 MutexLocker ml(&space_lock_);
1020 to_ =
new SemiSpace(NewSizeInWords(from->gc_threshold_in_words(), reason));
1026void Scavenger::Epilogue(SemiSpace* from) {
1036 [&](Isolate* isolate) {
1037 Thread* mutator_thread = isolate->mutator_thread();
1038 ASSERT(mutator_thread ==
nullptr || mutator_thread->top() == 0);
1043 double avg_frac = stats_history_.Get(0).PromoCandidatesSuccessFraction();
1044 if (stats_history_.Size() >= 2) {
1046 avg_frac += 0.5 * stats_history_.Get(1).PromoCandidatesSuccessFraction();
1047 avg_frac /= 1.0 + 0.5;
1050 early_tenure_ = avg_frac >= (FLAG_early_tenuring_threshold / 100.0);
1054 intptr_t history_used = 0;
1055 intptr_t history_micros = 0;
1056 ASSERT(stats_history_.Size() > 0);
1057 for (intptr_t i = 0; i < stats_history_.Size(); i++) {
1058 history_used += stats_history_.Get(i).UsedBeforeInWords();
1059 history_micros += stats_history_.Get(i).DurationMicros();
1061 if (history_micros == 0) {
1064 scavenge_words_per_micro_ = history_used / history_micros;
1065 if (scavenge_words_per_micro_ == 0) {
1066 scavenge_words_per_micro_ = 1;
1072 intptr_t average_idle_task_micros = 6000;
1073 idle_scavenge_threshold_in_words_ =
1074 scavenge_words_per_micro_ * average_idle_task_micros;
1079 if (idle_scavenge_threshold_in_words_ < lower_bound) {
1080 idle_scavenge_threshold_in_words_ = lower_bound;
1086 if (idle_scavenge_threshold_in_words_ > upper_bound) {
1087 idle_scavenge_threshold_in_words_ = upper_bound;
1090 if (FLAG_verify_store_buffer) {
1101 VerifyStoreBuffers(
"Verifying remembered set after Scavenge");
1105 UpdateMaxHeapUsage();
1106 if (heap_ !=
nullptr) {
1117 intptr_t used_in_words =
UsedInWords() + freed_in_words_;
1120 bool for_new_space = (used_in_words >= idle_scavenge_threshold_in_words_) ||
1121 (external_in_words >= idle_scavenge_threshold_in_words_);
1122 if (!for_new_space) {
1126 int64_t estimated_scavenge_completion =
1128 used_in_words / scavenge_words_per_micro_;
1129 return estimated_scavenge_completion <= deadline;
1138template <
bool parallel>
1139void Scavenger::IterateStoreBuffers(ScavengerVisitorBase<parallel>* visitor) {
1146 while (pending !=
nullptr) {
1150 while (!pending->IsEmpty()) {
1151 ObjectPtr obj = pending->Pop();
1152 ASSERT(!obj->IsForwardingCorpse());
1153 ASSERT(obj->untag()->IsRemembered());
1154 obj->untag()->ClearRememberedBit();
1155 visitor->VisitingOldObject(obj);
1156 visitor->ProcessObject(obj);
1161 blocks_ = pending =
next;
1166template <
bool parallel>
1167void Scavenger::IterateRememberedCards(
1168 ScavengerVisitorBase<parallel>* visitor) {
1173void Scavenger::IterateObjectIdTable(ObjectPointerVisitor* visitor) {
1187template <
bool parallel>
1188void Scavenger::IterateRoots(ScavengerVisitorBase<parallel>* visitor) {
1190 intptr_t slice = root_slices_started_.
fetch_add(1);
1197 IterateIsolateRoots(visitor);
1200 IterateObjectIdTable(visitor);
1203 IterateStoreBuffers(visitor);
1210 IterateRememberedCards(visitor);
1222void Scavenger::IterateWeak() {
1224 intptr_t slice = weak_slices_started_.
fetch_add(1);
1258 Prune(&deferred_blocks_, &
marker->deferred_marking_stack_);
1262void Scavenger::MournWeakHandles() {
1265 ScavengerWeakVisitor weak_visitor(thread);
1269template <
bool parallel>
1270void ScavengerVisitorBase<parallel>::ProcessToSpace() {
1271 VisitingOldObject(
nullptr);
1272 while (scan_ !=
nullptr) {
1273 uword resolved_top = scan_->resolved_top_;
1274 while (resolved_top < scan_->top_) {
1276 resolved_top += ProcessObject(obj);
1278 scan_->resolved_top_ = resolved_top;
1280 Page*
next = scan_->next();
1281 if (
next ==
nullptr) {
1289template <
bool parallel>
1290void ScavengerVisitorBase<parallel>::ProcessPromotedList() {
1292 while (promoted_list_.Pop(&obj)) {
1293 VisitingOldObject(obj);
1296 if (thread_->is_marking() && obj->untag()->TryAcquireMarkBit()) {
1297 thread_->MarkingStackAddObject(obj);
1302template <
bool parallel>
1303void ScavengerVisitorBase<parallel>::ProcessWeakPropertiesScoped() {
1304 if (scavenger_->abort_)
return;
1309 weak_property_list_.Process([&](WeakPropertyPtr weak_property) {
1310 ObjectPtr
key = weak_property->untag()->
key();
1317 VisitingOldObject(weak_property->IsOldObject() ? weak_property : nullptr);
1318 weak_property->untag()->VisitPointersNonvirtual(
this);
1320 weak_property_list_.Push(weak_property);
1325void Scavenger::UpdateMaxHeapCapacity() {
1327 ASSERT(heap_ !=
nullptr);
1329 ASSERT(isolate_group !=
nullptr);
1330 isolate_group->GetHeapNewCapacityMaxMetric()->SetValue(
1334void Scavenger::UpdateMaxHeapUsage() {
1336 ASSERT(heap_ !=
nullptr);
1338 ASSERT(isolate_group !=
nullptr);
1343 if (obj->IsImmediateOrOldObject())
return true;
1347template <
bool parallel>
1350 if (obj->IsNewObject()) {
1351 ASSERT(visiting_old_object_ ==
nullptr);
1353 ASSERT(visiting_old_object_ == obj);
1360 WeakPropertyPtr weak_property =
static_cast<WeakPropertyPtr
>(obj);
1362 weak_property_list_.Push(weak_property);
1366 WeakReferencePtr weak_reference =
static_cast<WeakReferencePtr
>(obj);
1368#if !defined(DART_COMPRESSED_POINTERS)
1369 ScavengePointer(&weak_reference->untag()->type_arguments_);
1371 ScavengeCompressedPointer(weak_reference->heap_base(),
1372 &weak_reference->untag()->type_arguments_);
1374 weak_reference_list_.Push(weak_reference);
1378 WeakArrayPtr weak_array =
static_cast<WeakArrayPtr
>(obj);
1379 weak_array_list_.Push(weak_array);
1382 FinalizerEntryPtr finalizer_entry =
static_cast<FinalizerEntryPtr
>(obj);
1383#if !defined(DART_COMPRESSED_POINTERS)
1384 ScavengePointer(&finalizer_entry->untag()->token_);
1385 ScavengePointer(&finalizer_entry->untag()->next_);
1387 ScavengeCompressedPointer(finalizer_entry->heap_base(),
1388 &finalizer_entry->untag()->token_);
1389 ScavengeCompressedPointer(finalizer_entry->heap_base(),
1390 &finalizer_entry->untag()->next_);
1392 finalizer_entry_list_.Push(finalizer_entry);
1398void Scavenger::MournWeakTables() {
1404 intptr_t size =
table->size();
1405 for (intptr_t i = 0; i < size; i++) {
1406 if (
table->IsValidEntryAtExclusive(i)) {
1408 ASSERT(obj->IsHeapObject());
1415 obj->IsNewObject() ? replacement_new : replacement_old;
1416 replacement->SetValueExclusive(obj,
table->ValueAtExclusive(i));
1419 if (cleanup !=
nullptr) {
1420 cleanup(
reinterpret_cast<void*
>(
table->ValueAtExclusive(i)));
1437#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
1442 rehash_weak_table(
table, table_new, table_old, cleanup);
1453 [&](Isolate* isolate) {
1454 auto table = isolate->forward_table_new();
1455 if (
table !=
nullptr) {
1457 rehash_weak_table(
table, replacement, isolate->forward_table_old(),
1459 isolate->set_forward_table_new(replacement);
1474 for (
ObjectPtr* p = first; p <= last; p++) {
1477 if (obj->IsNewObject()) {
1489#if defined(DART_COMPRESSED_POINTERS)
1490 void VisitCompressedPointers(
uword heap_base,
1511 if (reading ==
nullptr)
break;
1518 ASSERT(obj->IsHeapObject());
1519 if (obj->IsNewObject()) {
1544 PruneWeak(&deferred->finalizer_entries);
1547template <
typename Type,
typename PtrType>
1549 PtrType weak = list->
Release();
1552 if (weak->IsOldObject()) {
1554 next = weak->untag()->next_seen_by_gc_.Decompress(weak->heap_base());
1555 weak->untag()->next_seen_by_gc_ =
Type::null();
1562 next = weak->untag()->next_seen_by_gc_.Decompress(weak->heap_base());
1563 weak->untag()->next_seen_by_gc_ =
Type::null();
1568 next = weak->untag()->next_seen_by_gc_.Decompress(weak->heap_base());
1577template <
bool parallel>
1582 if (
target->IsImmediateOrOldObject()) {
1591 if (
target->IsNewObject() && parent->IsOldObject() &&
1607 for (
Page* page = to_->
head(); page !=
nullptr; page = page->next()) {
1608 page->VisitObjectPointers(visitor);
1615 for (
Page* page = to_->
head(); page !=
nullptr; page = page->next()) {
1616 page->VisitObjects(visitor);
1621 for (
Page* page = to_->
head(); page !=
nullptr; page = page->next()) {
1622 set->AddRegion(page->start(), page->end());
1626void Scavenger::TryAllocateNewTLAB(
Thread* thread,
1628 bool can_safepoint) {
1632#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
1635 intptr_t remaining = thread->
true_end() - thread->
top();
1636 const bool heap_sampling_enabled = thread->
end() != thread->
true_end();
1637 const bool is_first_tlab = thread->
true_end() == 0;
1638 if (heap_sampling_enabled && remaining > min_size) {
1651 MutexLocker ml(&space_lock_);
1653 if (
page->owner() !=
nullptr)
continue;
1654 intptr_t available =
1656 if (available >= min_size) {
1657 page->Acquire(thread);
1658#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
1666 if (page ==
nullptr) {
1669 page->Acquire(thread);
1670#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
1681 intptr_t size = thread->
end() - thread->
top();
1691 if (thread->
top() == 0)
return 0;
1700 allocated = page->Release(thread);
1706template <
bool parallel>
1711 page = scavenger_->to_->TryAllocatePageLocked(
false);
1713 if (page ==
nullptr) {
1717 if (head_ ==
nullptr) {
1718 head_ = scan_ =
page;
1720 ASSERT(scan_ !=
nullptr);
1721 tail_->set_next(page);
1725 return tail_->TryAllocateGC(size);
1739 early_tenure_ =
true;
1742 if (FLAG_verify_before_gc) {
1744 heap_->VerifyGC(
"Verifying before Scavenge",
1749 failed_to_promote_ =
false;
1751 root_slices_started_ = 0;
1752 weak_slices_started_ = 0;
1753 freed_in_words_ = 0;
1754 intptr_t abandoned_bytes = 0;
1756 intptr_t promo_candidate_words = 0;
1757 for (
Page* page = to_->
head(); page !=
nullptr; page = page->next()) {
1759 if (early_tenure_) {
1760 page->EarlyTenure();
1762 promo_candidate_words += page->promo_candidate_words();
1767 intptr_t bytes_promoted;
1768 if (FLAG_scavenger_tasks == 0) {
1769 bytes_promoted = SerialScavenge(from);
1771 bytes_promoted = ParallelScavenge(from);
1774 ReverseScavenge(&from);
1780 heap_->assume_scavenge_will_fail_ =
true;
1793 if (FLAG_verify_after_gc) {
1795 heap_->VerifyGC(
"Verifying after Scavenge...",
1801 scavenging_ =
false;
1806 failed_to_promote_);
1809intptr_t Scavenger::SerialScavenge(
SemiSpace* from) {
1813 visitor.ProcessRoots();
1814 visitor.ProcessAll();
1815 visitor.ProcessWeak();
1817 to_->
AddList(visitor.head(), visitor.tail());
1818 return visitor.bytes_promoted();
1821intptr_t Scavenger::ParallelScavenge(SemiSpace* from) {
1822 intptr_t bytes_promoted = 0;
1823 const intptr_t num_tasks = FLAG_scavenger_tasks;
1826 ThreadBarrier* barrier =
new ThreadBarrier(num_tasks, 1);
1827 RelaxedAtomic<uintptr_t> num_busy = 0;
1831 for (intptr_t i = 0; i < num_tasks; i++) {
1834 heap_->
isolate_group(),
this, from, freelist, &promotion_stack_);
1835 if (i < (num_tasks - 1)) {
1842 ParallelScavengerTask task(heap_->
isolate_group(), barrier, visitors[i],
1844 task.RunEnteredIsolateGroup();
1850 for (intptr_t i = 0; i < num_tasks; i++) {
1852 visitor->Finalize();
1853 to_->
AddList(visitor->head(), visitor->tail());
1854 bytes_promoted += visitor->bytes_promoted();
1859 return bytes_promoted;
1862void Scavenger::ReverseScavenge(SemiSpace** from) {
1866 class ReverseFromForwardingVisitor :
public ObjectVisitor {
1867 void VisitObject(ObjectPtr from_obj)
override {
1872 intptr_t
size = to_obj->untag()->HeapSize();
1875 uword from_header =
static_cast<uword>(to_header);
1877 UntaggedObject::OldAndNotRememberedBit::update(
false, from_header);
1878 from_header = UntaggedObject::NewBit::update(
true, from_header);
1882 ForwardingCorpse::AsForwarder(UntaggedObject::ToAddr(to_obj), size)
1883 ->set_target(from_obj);
1888 ReverseFromForwardingVisitor visitor;
1889 for (Page* page = (*from)->head();
page !=
nullptr;
page =
page->next()) {
1890 page->VisitObjects(&visitor);
1896 MutexLocker ml(&space_lock_);
1897 SemiSpace* temp = to_;
1903 promotion_stack_.
Reset();
1908 while (pending !=
nullptr) {
1927 mark_blocks_ =
nullptr;
1928 marker->marking_stack_.PushAll(new_blocks_);
1929 new_blocks_ =
nullptr;
1930 marker->deferred_marking_stack_.PushAll(deferred_blocks_);
1931 deferred_blocks_ =
nullptr;
1946 heap_->assume_scavenge_will_fail_ =
true;
1957 ASSERT(isolate_group !=
nullptr);
1964 int64_t run_time = isolate_group->UptimeMicros();
1967 double avg_time_between_collections =
1968 run_time_millis /
static_cast<double>(
collections());
1970 avg_time_between_collections);
1972 space.
AddProperty(
"avgCollectionPeriodMillis", 0.0);
static float next(float f)
static const char marker[]
static const char kName[]
#define RELEASE_ASSERT_WITH_MSG(cond, msg)
#define ASSERT_EQUAL(expected, actual)
#define COMPILE_ASSERT(expr)
static constexpr bool UseCardMarkingForAllocation(const intptr_t array_length)
static void FollowForwardingPointers(Thread *thread)
static constexpr ClassIdTagType decode(uword value)
static constexpr uword update(bool value, uword original)
Block * PopNonFullBlock()
void PushAll(Block *blocks)
void VisitObjectPointers(ObjectPointerVisitor *visitor)
void Push(ObjectPtr raw_obj)
bool WaitForWork(RelaxedAtomic< uintptr_t > *num_busy, bool abort=false)
void VisitObject(ObjectPtr obj) override
CheckStoreBufferVisitor(ObjectSet *in_store_buffer, const SemiSpace *to, const char *msg)
void VisitPointers(ObjectPtr *from, ObjectPtr *to) override
CollectStoreBufferVisitor(ObjectSet *in_store_buffer, const char *msg)
void VisitPointers(ObjectPtr *from, ObjectPtr *to) override
static ThreadPool * thread_pool()
static IsolateGroup * vm_isolate_group()
void UpdateUnreachable(IsolateGroup *isolate_group)
void UpdateRelocated(IsolateGroup *isolate_group)
static intptr_t InstanceSize()
static ForwardingCorpse * AsForwarder(uword addr, intptr_t size)
static FreeListElement * AsElement(uword addr, intptr_t size)
void Enqueue(PtrType ptr)
void PruneWeak(Scavenger *scavenger)
void HandleNewTLAB(intptr_t old_tlab_remaining_space, bool is_first_tlab)
static Dart_HeapSamplingDeleteCallback delete_callback()
void SampleNewSpaceAllocation(intptr_t allocation_size)
IsolateGroup * isolate_group() const
void CheckConcurrentMarking(Thread *thread, GCReason reason, intptr_t size)
void WaitForSweeperTasksAtSafepoint(Thread *thread)
WeakTable * GetWeakTable(Space space, WeakSelector selector) const
void SetWeakTable(Space space, WeakSelector selector, WeakTable *value)
void UpdateGlobalMaxUsed()
StoreBuffer * store_buffer() const
void ForEachIsolate(std::function< void(Isolate *isolate)> function, bool at_safepoint=false)
static IsolateGroup * Current()
void VisitObjectIdRingPointers(ObjectPointerVisitor *visitor)
void VisitObjectPointers(ObjectPointerVisitor *visitor, ValidationPolicy validate_frames)
void ReleaseStoreBuffers()
void RememberLiveTemporaries()
intptr_t MutatorCount() const
void VisitWeakPersistentHandles(HandleVisitor *visitor)
void FlushMarkingStacks()
void AddProperty64(const char *name, int64_t i) const
void AddProperty(const char *name, bool b) const
DART_FORCE_INLINE void Process(Lambda action)
DART_NORETURN void Jump(int value, const Error &error)
void PushBlock(Block *block)
static int64_t GetCurrentMonotonicMicros()
static void static void PrintErr(const char *format,...) PRINTF_ATTRIBUTE(1
IsolateGroup * isolate_group() const
void VisitCompressedPointers(uword heap_base, CompressedObjectPtr *first, CompressedObjectPtr *last)
bool IsFreeListElement() const
ObjectPtr Decompress(uword heap_base) const
UntaggedObject * untag() const
bool IsForwardingCorpse() const
intptr_t GetClassId() const
bool IsPseudoObject() const
void Add(ObjectPtr raw_obj)
bool Contains(ObjectPtr raw_obj) const
static const ClassId kClassId
static intptr_t tags_offset()
FreeList * DataFreeList(intptr_t i=0)
DART_FORCE_INLINE uword TryAllocatePromoLocked(FreeList *freelist, intptr_t size)
void AcquireLock(FreeList *freelist)
void VisitRememberedCards(ObjectPointerVisitor *visitor) const
void PushDependencyToConcurrentMarking()
void VisitObjects(ObjectVisitor *visitor) const
void ReleaseLock(FreeList *freelist)
void ResumeConcurrentMarking()
void PauseConcurrentMarking()
void ResetProgressBars() const
GCMarker * marker() const
void set_next(Page *next)
static Page * Of(ObjectPtr obj)
void Unallocate(uword addr, intptr_t size)
ParallelScavengerTask(IsolateGroup *isolate_group, ThreadBarrier *barrier, ParallelScavengerVisitor *visitor, RelaxedAtomic< uintptr_t > *num_busy)
void RunEnteredIsolateGroup()
PointerBlock< Size > * next() const
T load(std::memory_order order=std::memory_order_relaxed) const
T fetch_add(T arg, std::memory_order order=std::memory_order_relaxed)
intptr_t bytes_promoted() const
bool WaitForWork(RelaxedAtomic< uintptr_t > *num_busy)
DART_FORCE_INLINE intptr_t ProcessObject(ObjectPtr obj)
void VisitTypedDataViewPointers(TypedDataViewPtr view, CompressedObjectPtr *first, CompressedObjectPtr *last) override
void VisitingOldObject(ObjectPtr obj)
static bool ForwardOrSetNullIfCollected(ObjectPtr parent, CompressedObjectPtr *ptr_address)
ScavengerVisitorBase(IsolateGroup *isolate_group, Scavenger *scavenger, SemiSpace *from, FreeList *freelist, PromotionStack *promotion_stack)
void ProcessWeakProperties()
void VisitPointers(ObjectPtr *first, ObjectPtr *last) override
void VisitHandle(uword addr) override
ScavengerWeakVisitor(Thread *thread)
void Scavenge(Thread *thread, GCType type, GCReason reason)
void Prune(MarkingStackBlock **from, MarkingStack *to)
intptr_t ExternalInWords() const
void VisitObjects(ObjectVisitor *visitor) const
void VisitObjectPointers(ObjectPointerVisitor *visitor) const
void PruneWeak(GCLinkedLists *delayed)
void WriteProtect(bool read_only)
void AbandonRemainingTLABForDebugging(Thread *thread)
bool ShouldPerformIdleScavenge(int64_t deadline)
void AddRegionsToObjectSet(ObjectSet *set) const
intptr_t CapacityInWords() const
intptr_t UsedInWords() const
SpaceUsage GetCurrentUsage() const
int64_t gc_time_micros() const
intptr_t AbandonRemainingTLAB(Thread *thread)
intptr_t ThresholdInWords() const
Scavenger(Heap *heap, intptr_t max_semi_capacity_in_words)
intptr_t collections() const
void PrintToJSONObject(JSONObject *object) const
void Forward(MarkingStack *stack)
void AddList(Page *head, Page *tail)
void WriteProtect(bool read_only)
Page * TryAllocatePageLocked(bool link)
bool Contains(uword addr) const
intptr_t capacity_in_words() const
SemiSpace(intptr_t gc_threshold_in_words)
bool Run(Args &&... args)
LongJumpScope * long_jump_base() const
bool force_growth() const
static Thread * Current()
bool OwnsGCSafepoint() const
int32_t no_safepoint_scope_depth() const
static void ExitIsolateGroupAsHelper(bool bypass_safepoint)
IsolateGroup * isolate_group() const
void StoreBufferAddObjectGC(ObjectPtr obj)
HeapProfileSampler & heap_sampler()
static bool EnterIsolateGroupAsHelper(IsolateGroup *isolate_group, TaskKind kind, bool bypass_safepoint)
void DeferredMarkLiveTemporaries()
static ObjectPtr FromAddr(uword addr)
bool IsCardRemembered() const
DART_FORCE_INLINE intptr_t VisitPointersNonvirtual(V *visitor)
static uword ToAddr(const UntaggedObject *raw_obj)
bool TryAcquireRememberedBit()
intptr_t VisitPointers(ObjectPointerVisitor *visitor)
bool IsRemembered() const
static constexpr T Maximum(T x, T y)
static T Minimum(T x, T y)
static constexpr bool IsAligned(T x, uintptr_t alignment, uintptr_t offset=0)
static intptr_t InstanceSize()
static intptr_t InstanceSize()
static intptr_t InstanceSize()
static WeakTable * NewFrom(WeakTable *original)
void(* Dart_HeapSamplingDeleteCallback)(void *data)
#define DEFINE_FLAG(type, name, default_value, comment)
#define MSAN_UNPOISON(ptr, len)
bool IsTypedDataViewClassId(intptr_t index)
bool IsTypedDataClassId(intptr_t index)
StoreBuffer::Block StoreBufferBlock
ScavengerVisitorBase< true > ParallelScavengerVisitor
static constexpr intptr_t kNewObjectAlignmentOffset
@ kRememberLiveTemporaries
constexpr double MicrosecondsToSeconds(int64_t micros)
static constexpr intptr_t kPageSizeInWords
static DART_FORCE_INLINE uword ReadHeaderRelaxed(ObjectPtr obj)
static constexpr intptr_t kPageSize
static constexpr intptr_t kConservativeInitialScavengeSpeed
static bool IsUnreachable(const ObjectPtr obj)
intptr_t RawSmiValue(const SmiPtr raw_value)
static DART_FORCE_INLINE bool IsForwarding(uword header)
bool IsUnmodifiableTypedDataViewClassId(intptr_t index)
constexpr intptr_t kWordSizeLog2
static bool IsScavengeSurvivor(ObjectPtr obj)
constexpr intptr_t MBInWords
bool IsAllocatableInNewSpace(intptr_t size)
BlockWorkList< PromotionStack > PromotionWorkList
static DART_FORCE_INLINE ObjectPtr ForwardedObj(uword header)
ScavengerVisitorBase< false > SerialScavengerVisitor
static constexpr intptr_t kObjectAlignmentMask
raw_obj untag() -> num_entries()) VARIABLE_COMPRESSED_VISITOR(Array, Smi::Value(raw_obj->untag() ->length())) VARIABLE_COMPRESSED_VISITOR(TypedData, TypedData::ElementSizeInBytes(raw_obj->GetClassId()) *Smi::Value(raw_obj->untag() ->length())) VARIABLE_COMPRESSED_VISITOR(Record, RecordShape(raw_obj->untag() ->shape()).num_fields()) VARIABLE_NULL_VISITOR(CompressedStackMaps, CompressedStackMaps::PayloadSizeOf(raw_obj)) VARIABLE_NULL_VISITOR(OneByteString, Smi::Value(raw_obj->untag() ->length())) VARIABLE_NULL_VISITOR(TwoByteString, Smi::Value(raw_obj->untag() ->length())) intptr_t UntaggedField::VisitFieldPointers(FieldPtr raw_obj, ObjectPointerVisitor *visitor)
static DART_FORCE_INLINE void WriteHeaderRelaxed(ObjectPtr obj, uword header)
void MournFinalizerEntry(GCVisitorType *visitor, FinalizerEntryPtr current_entry)
static constexpr intptr_t kAllocationRedZoneSize
constexpr intptr_t kWordSize
static constexpr intptr_t kObjectAlignment
constexpr double MicrosecondsToMilliseconds(int64_t micros)
constexpr intptr_t KBInWords
static NO_SANITIZE_THREAD void objcpy(void *dst, const void *src, size_t size)
bool IsExternalTypedDataClassId(intptr_t index)
ObjectPtr CompressedObjectPtr
static DART_FORCE_INLINE uword ForwardingHeader(ObjectPtr target)
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive keep the shell running after the Dart script has completed enable serial On low power devices with low core running concurrent GC tasks on threads can cause them to contend with the UI thread which could potentially lead to jank This option turns off all concurrent GC activities domain network JSON encoded network policy per domain This overrides the DisallowInsecureConnections switch Embedder can specify whether to allow or disallow insecure connections at a domain level old gen heap size
static const char header[]
#define NO_SANITIZE_THREAD
#define TIMELINE_FUNCTION_GC_DURATION(thread, name)