37 page_space_(page_space),
38 old_work_list_(old_marking_stack),
39 new_work_list_(new_marking_stack),
40 tlab_deferred_work_list_(tlab_deferred_marking_stack),
41 deferred_work_list_(deferred_marking_stack),
45 has_evacuation_candidate_(
false) {}
50 void AddMicros(int64_t micros) { marked_micros_ += micros; }
54 constexpr static const char*
const kName =
"Marker";
58 ASSERT(raw->IsHeapObject());
64 has_evacuation_candidate_ =
false;
68 bool more_to_mark =
false;
69 WeakPropertyPtr cur_weak = delayed_.weak_properties.
Release();
71 WeakPropertyPtr next_weak =
72 cur_weak->untag()->next_seen_by_gc_.Decompress(cur_weak->heap_base());
76 if (raw_key->IsImmediateObject() || raw_key->
untag()->
IsMarked()) {
78 if (!raw_val->IsImmediateObject() && !raw_val->
untag()->
IsMarked()) {
85 if (has_evacuation_candidate_) {
86 has_evacuation_candidate_ =
false;
87 if (!cur_weak->untag()->IsCardRemembered()) {
88 if (cur_weak->untag()->TryAcquireRememberedBit()) {
97 delayed_.weak_properties.Enqueue(cur_weak);
100 cur_weak = next_weak;
107 old_work_list_.
Flush();
108 new_work_list_.
Flush();
109 tlab_deferred_work_list_.
Flush();
110 deferred_work_list_.
Flush();
123 ASSERT(!has_evacuation_candidate_);
125 if (obj->IsNewObject()) {
133 tlab_deferred_work_list_.
Push(obj);
147 if (class_id == kWeakPropertyCid) {
149 }
else if (class_id == kWeakReferenceCid) {
151 }
else if (class_id == kWeakArrayCid) {
153 }
else if (class_id == kFinalizerEntryCid) {
155 }
else if (class_id == kSuspendStateCid) {
157 deferred_work_list_.
Push(obj);
160 ASSERT((class_id == kArrayCid) || (class_id == kImmutableArrayCid));
165 if (has_evacuation_candidate_) {
166 has_evacuation_candidate_ =
false;
173 if (!obj->IsNewObject()) {
174 marked_bytes_ +=
size;
185 new_work_list_.
Flush();
186 tlab_deferred_work_list_.
Flush();
187 deferred_work_list_.
Flush();
191 ASSERT(obj->IsArray() || obj->IsImmutableArray());
192 ASSERT(obj->untag()->IsCardRemembered());
199 for (intptr_t
i = 0, n =
page->card_table_size();
i < n;
i++) {
208 if (card_from < obj_from) {
210 card_from = obj_from;
212 if (card_to > obj_to) {
219 if (has_evacuation_candidate_) {
220 has_evacuation_candidate_ =
false;
221 page->RememberCard(card_from);
231 return obj->untag()->HeapSize();
240 ASSERT(!has_evacuation_candidate_);
248 if (class_id == kWeakPropertyCid) {
250 }
else if (class_id == kWeakReferenceCid) {
252 }
else if (class_id == kWeakArrayCid) {
254 }
else if (class_id == kFinalizerEntryCid) {
258 ASSERT((class_id == kArrayCid) || (class_id == kImmutableArrayCid));
264 if (has_evacuation_candidate_) {
265 has_evacuation_candidate_ =
false;
271 if (!obj->IsNewObject()) {
272 marked_bytes_ +=
size;
289 constexpr intptr_t kBudget = 512 *
KB;
301 while (old_work_list_.
Pop(&obj)) {
302 ASSERT(!has_evacuation_candidate_);
310 if (class_id == kWeakPropertyCid) {
312 }
else if (class_id == kWeakReferenceCid) {
314 }
else if (class_id == kWeakArrayCid) {
316 }
else if (class_id == kFinalizerEntryCid) {
318 }
else if (sync && concurrent_ && class_id == kSuspendStateCid) {
320 deferred_work_list_.
Push(obj);
323 if ((class_id == kArrayCid) || (class_id == kImmutableArrayCid)) {
325 if (
size > remaining_budget) {
326 old_work_list_.
Push(obj);
331 ASSERT((class_id == kArrayCid) || (class_id == kImmutableArrayCid));
337 if (has_evacuation_candidate_) {
338 has_evacuation_candidate_ =
false;
344 marked_bytes_ +=
size;
345 remaining_budget -=
size;
346 if (remaining_budget < 0) {
376 bool has_evacuation_candidate =
false;
377 for (
ObjectPtr* current = first; current <= last; current++) {
380 has_evacuation_candidate_ |= has_evacuation_candidate;
383#if defined(DART_COMPRESSED_POINTERS)
387 bool has_evacuation_candidate =
false;
389 has_evacuation_candidate |= MarkObject(
392 has_evacuation_candidate_ |= has_evacuation_candidate;
401 if (raw_key->IsHeapObject() && !raw_key->
untag()->
IsMarked()) {
404 delayed_.weak_properties.Enqueue(raw_weak);
405 return raw_weak->untag()->HeapSize();
408 return raw_weak->untag()->VisitPointersNonvirtual(
this);
417 if (raw_target->IsHeapObject()) {
422 delayed_.weak_references.Enqueue(raw_weak);
425 has_evacuation_candidate_ =
true;
433 if (MarkObject(raw_type_arguments)) {
434 has_evacuation_candidate_ =
true;
436 return raw_weak->untag()->HeapSize();
440 delayed_.weak_arrays.Enqueue(raw_weak);
441 return raw_weak->untag()->HeapSize();
446 delayed_.finalizer_entries.Enqueue(raw_entry);
450 has_evacuation_candidate_ =
true;
454 has_evacuation_candidate_ =
true;
456 return raw_entry->untag()->HeapSize();
464 while (deferred_work_list_.
Pop(&obj)) {
465 ASSERT(!has_evacuation_candidate_);
466 ASSERT(obj->IsHeapObject());
486 if (TryAcquireMarkBit(obj)) {
487 if (!obj->IsNewObject()) {
488 marked_bytes_ +=
size;
491 if (has_evacuation_candidate_) {
492 has_evacuation_candidate_ =
false;
506 tlab_deferred_work_list_.
Finalize();
519 WeakPropertyPtr current = delayed_.weak_properties.
Release();
521 WeakPropertyPtr
next = current->untag()->next_seen_by_gc();
530 WeakReferencePtr current = delayed_.weak_references.
Release();
532 WeakReferencePtr
next = current->untag()->next_seen_by_gc();
540 WeakArrayPtr current = delayed_.weak_arrays.
Release();
542 WeakArrayPtr
next = current->untag()->next_seen_by_gc();
553 FinalizerEntryPtr current = delayed_.finalizer_entries.
Release();
555 FinalizerEntryPtr
next = current->untag()->next_seen_by_gc();
566 if (
target->IsImmediateObject()) {
570 if (
target->untag()->IsMarked()) {
573 if (
target->untag()->IsEvacuationCandidate()) {
593 old_work_list_.
Flush();
594 new_work_list_.
Flush();
595 tlab_deferred_work_list_.
Flush();
596 deferred_work_list_.
Flush();
614 old_work_list_.
Flush();
616 new_work_list_.
Flush();
618 tlab_deferred_work_list_.
Flush();
619 tlab_deferred_work_list_.
Finalize();
620 deferred_work_list_.
Flush();
628 static bool TryAcquireMarkBit(
ObjectPtr obj) {
629 if constexpr (!sync) {
641 bool MarkObject(ObjectPtr obj) {
642 if (obj->IsImmediateObject()) {
646 if (obj->IsNewObject()) {
647 if (TryAcquireMarkBit(obj)) {
648 new_work_list_.
Push(obj);
663 uword tags = obj->untag()->tags_ignore_race();
671 if (sync &&
UNLIKELY(class_id == kInstructionsCid)) {
674 deferred_work_list_.
Push(obj);
678 if (TryAcquireMarkBit(obj)) {
679 old_work_list_.
Push(obj);
685 PageSpace* page_space_;
690 GCLinkedLists delayed_;
691 uintptr_t marked_bytes_;
692 int64_t marked_micros_;
694 bool has_evacuation_candidate_;
703 if (obj->IsImmediateObject()) {
726void GCMarker::Prologue() {
728 new_marking_stack_.
PushAll(tlab_deferred_marking_stack_.
PopAll());
731void GCMarker::Epilogue() {}
739void GCMarker::ResetSlices() {
742 root_slices_started_ = 0;
743 root_slices_finished_ = 0;
746 weak_slices_started_ = 0;
749void GCMarker::IterateRoots(ObjectPointerVisitor* visitor) {
751 intptr_t slice = root_slices_started_.
fetch_add(1);
752 if (slice >= root_slices_count_) {
759 "ProcessIsolateGroupRoots");
766 "ProcessObjectIdTable");
772 MonitorLocker ml(&root_slices_monitor_);
773 root_slices_finished_++;
774 if (root_slices_finished_ == root_slices_count_) {
787void GCMarker::IterateWeakRoots(Thread* thread) {
789 intptr_t slice = weak_slices_started_.
fetch_add(1);
796 ProcessWeakHandles(thread);
799 ProcessWeakTables(thread);
802 ProcessRememberedSet(thread);
810void GCMarker::ProcessWeakHandles(Thread* thread) {
812 MarkingWeakVisitor visitor(thread);
818void GCMarker::ProcessWeakTables(Thread* thread) {
822#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
830 for (intptr_t
i = 0;
i <
size;
i++) {
831 if (
table->IsValidEntryAtExclusive(
i)) {
833 ObjectPtr obj =
table->ObjectAtExclusive(
i);
834 if (obj->IsHeapObject() && !obj->untag()->IsMarked()) {
835 if (cleanup !=
nullptr) {
836 cleanup(
reinterpret_cast<void*
>(
table->ValueAtExclusive(
i)));
838 table->InvalidateAtExclusive(
i);
845 for (intptr_t
i = 0;
i <
size;
i++) {
846 if (
table->IsValidEntryAtExclusive(
i)) {
848 ObjectPtr obj =
table->ObjectAtExclusive(
i);
849 if (obj->IsHeapObject() && !obj->untag()->IsMarked()) {
850 if (cleanup !=
nullptr) {
851 cleanup(
reinterpret_cast<void*
>(
table->ValueAtExclusive(
i)));
853 table->InvalidateAtExclusive(
i);
860void GCMarker::ProcessRememberedSet(Thread* thread) {
863 StoreBuffer* store_buffer = isolate_group_->
store_buffer();
866 while (reading !=
nullptr) {
870 while (!reading->IsEmpty()) {
871 ObjectPtr obj = reading->Pop();
872 ASSERT(!obj->IsForwardingCorpse());
873 ASSERT(obj->untag()->IsRemembered());
874 if (obj->untag()->IsMarked()) {
876 if (writing->IsFull()) {
878 writing = store_buffer->PopNonFullBlock();
899 isolate_group_(isolate_group),
900 marking_stack_(marking_stack),
903 num_busy_(num_busy) {}
932 marker_->IterateRoots(visitor_);
937 bool more_to_mark =
false;
964 if (!more_to_mark && (num_busy_->
load() > 0)) {
971 }
while (more_to_mark);
986 marker_->IterateWeakRoots(thread);
989 if (FLAG_log_marker_tasks) {
1014 isolate_group_(isolate_group),
1015 page_space_(page_space),
1031 marker_->IterateRoots(visitor_);
1037 if (FLAG_log_marker_tasks) {
1072 intptr_t marked_words_per_job_micro;
1073 if (marked_micros_ == 0) {
1076 marked_words_per_job_micro =
marked_words() / marked_micros_;
1078 if (marked_words_per_job_micro == 0) {
1079 marked_words_per_job_micro = 1;
1081 intptr_t jobs = FLAG_marker_tasks;
1085 return marked_words_per_job_micro * jobs;
1089 : isolate_group_(isolate_group),
1091 old_marking_stack_(),
1092 new_marking_stack_(),
1093 tlab_deferred_marking_stack_(),
1094 deferred_marking_stack_(),
1100 for (intptr_t
i = 0;
i < FLAG_marker_tasks;
i++) {
1101 visitors_[
i] =
nullptr;
1110 for (intptr_t
i = 0;
i < FLAG_marker_tasks;
i++) {
1112 delete visitors_[
i];
1120 &old_marking_stack_, &new_marking_stack_, &deferred_marking_stack_);
1122 const intptr_t num_tasks = FLAG_marker_tasks;
1139 for (intptr_t
i = 0;
i < num_tasks;
i++) {
1140 ASSERT(visitors_[
i] ==
nullptr);
1142 isolate_group_, page_space, &old_marking_stack_, &new_marking_stack_,
1143 &tlab_deferred_marking_stack_, &deferred_marking_stack_);
1144 visitors_[
i] = visitor;
1146 if (
i < (num_tasks - 1)) {
1149 this, isolate_group_, page_space, visitor);
1155 IterateRoots(visitor);
1159 if (FLAG_log_marker_tasks) {
1165 this, isolate_group_, page_space, visitor);
1174 while (root_slices_finished_ != root_slices_count_) {
1181 "IncrementalMarkWithUnlimitedBudget");
1184 &new_marking_stack_, &tlab_deferred_marking_stack_,
1185 &deferred_marking_stack_);
1202 const intptr_t kMinimumMarkingStep =
KB;
1203 if (
size < kMinimumMarkingStep)
return;
1206 "IncrementalMarkWithSizeBudget");
1209 &new_marking_stack_, &tlab_deferred_marking_stack_,
1210 &deferred_marking_stack_);
1226 "IncrementalMarkWithTimeBudget");
1229 &new_marking_stack_, &tlab_deferred_marking_stack_,
1230 &deferred_marking_stack_);
1257 for (
ObjectPtr* ptr = from; ptr <= to; ptr++) {
1261 static_cast<uword>(current_),
reinterpret_cast<uword>(ptr),
1262 static_cast<uword>(obj));
1268#if defined(DART_COMPRESSED_POINTERS)
1276 static_cast<uword>(current_),
reinterpret_cast<uword>(ptr),
1277 static_cast<uword>(obj));
1288 bool failed_ =
false;
1299 const int num_tasks = FLAG_marker_tasks;
1300 if (num_tasks == 0) {
1305 isolate_group_, page_space, &old_marking_stack_, &new_marking_stack_,
1306 &tlab_deferred_marking_stack_, &deferred_marking_stack_);
1309 IterateRoots(&visitor);
1320 IterateWeakRoots(thread);
1334 for (intptr_t
i = 0;
i < num_tasks; ++
i) {
1338 if (visitor ==
nullptr) {
1340 isolate_group_, page_space, &old_marking_stack_,
1341 &new_marking_stack_, &tlab_deferred_marking_stack_,
1342 &deferred_marking_stack_);
1343 visitors_[
i] = visitor;
1350 visitor->
Flush(&global_list_);
1353 if (
i < (num_tasks - 1)) {
1356 this, isolate_group_, &old_marking_stack_, barrier, visitor,
1361 visitor->
Adopt(&global_list_);
1363 barrier, visitor, &num_busy);
1370 for (intptr_t
i = 0;
i < num_tasks;
i++) {
1376 visitors_[
i] =
nullptr;
1385 if (FLAG_verify_after_marking) {
1387 heap_->VisitObjects(&visitor);
1389 FATAL(
"verify after marking");
1398 for (intptr_t
i = 0, n = FLAG_marker_tasks;
i < n;
i++) {
1399 scavenger->
PruneWeak(visitors_[
i]->delayed());
static float next(float f)
static const char marker[]
static constexpr ClassIdTagType decode(uword value)
void PushAll(Block *blocks)
DART_FORCE_INLINE bool Pop(ObjectPtr *object)
void Push(ObjectPtr raw_obj)
bool WaitForWork(RelaxedAtomic< uintptr_t > *num_busy, bool abort=false)
ConcurrentMarkTask(GCMarker *marker, IsolateGroup *isolate_group, PageSpace *page_space, SyncMarkingVisitor *visitor)
static ThreadPool * thread_pool()
void UpdateUnreachable(IsolateGroup *isolate_group)
void IncrementalMarkWithSizeBudget(PageSpace *page_space, intptr_t size)
GCMarker(IsolateGroup *isolate_group, Heap *heap)
intptr_t MarkedWordsPerMicro() const
void IncrementalMarkWithTimeBudget(PageSpace *page_space, int64_t deadline)
void PruneWeak(Scavenger *scavenger)
void StartConcurrentMark(PageSpace *page_space)
intptr_t marked_words() const
void MarkObjects(PageSpace *page_space)
void IncrementalMarkWithUnlimitedBudget(PageSpace *page_space)
static Dart_HeapSamplingDeleteCallback delete_callback()
WeakTable * GetWeakTable(Space space, WeakSelector selector) const
StoreBuffer * store_buffer() const
void ScheduleInterrupts(uword interrupt_bits)
void DisableIncrementalBarrier()
void VisitObjectIdRingPointers(ObjectPointerVisitor *visitor)
ApiState * api_state() const
void VisitObjectPointers(ObjectPointerVisitor *visitor, ValidationPolicy validate_frames)
void ReleaseStoreBuffers()
MarkingStack * old_marking_stack() const
void VisitWeakPersistentHandles(HandleVisitor *visitor)
void DeferredMarkLiveTemporaries()
void EnableIncrementalBarrier(MarkingStack *old_marking_stack, MarkingStack *new_marking_stack, MarkingStack *deferred_marking_stack)
intptr_t ProcessWeakProperty(WeakPropertyPtr raw_weak)
void AddMicros(int64_t micros)
void MournFinalizerEntries()
uintptr_t marked_bytes() const
GCLinkedLists * delayed()
static bool ForwardOrSetNullIfCollected(ObjectPtr parent, CompressedObjectPtr *slot)
static bool IsMarked(ObjectPtr raw)
void ProcessOldMarkingStackUntil(int64_t deadline)
NO_SANITIZE_THREAD ObjectPtr LoadPointerIgnoreRace(ObjectPtr *ptr)
intptr_t VisitCards(ArrayPtr obj)
void Flush(GCLinkedLists *global_list)
void VisitPointers(ObjectPtr *first, ObjectPtr *last) override
intptr_t ProcessWeakArray(WeakArrayPtr raw_weak)
void MournWeakProperties()
intptr_t ProcessWeakReference(WeakReferencePtr raw_weak)
void DrainMarkingStackWithPauseChecks()
void FinalizeIncremental(GCLinkedLists *global_list)
void ProcessDeferredMarking()
DART_NOINLINE void YieldConcurrentMarking()
int64_t marked_micros() const
void set_concurrent(bool value)
NO_SANITIZE_THREAD CompressedObjectPtr LoadCompressedPointerIgnoreRace(CompressedObjectPtr *ptr)
bool ProcessOldMarkingStack(intptr_t remaining_budget)
void MournWeakReferences()
void Adopt(GCLinkedLists *other)
intptr_t ProcessFinalizerEntry(FinalizerEntryPtr raw_entry)
MarkingVisitorBase(IsolateGroup *isolate_group, PageSpace *page_space, MarkingStack *old_marking_stack, MarkingStack *new_marking_stack, MarkingStack *tlab_deferred_marking_stack, MarkingStack *deferred_marking_stack)
bool ProcessPendingWeakProperties()
bool WaitForWork(RelaxedAtomic< uintptr_t > *num_busy)
void VisitHandle(uword addr) override
MarkingWeakVisitor(Thread *thread)
Monitor::WaitResult Wait(int64_t millis=Monitor::kNoTimeout)
static int64_t GetCurrentMonotonicMicros()
static void static void PrintErr(const char *format,...) PRINTF_ATTRIBUTE(1
IsolateGroup * isolate_group() const
void VisitCompressedPointers(uword heap_base, CompressedObjectPtr *first, CompressedObjectPtr *last)
ObjectPtr Decompress(uword heap_base) const
UntaggedObject * untag() const
intptr_t GetClassId() const
void set_concurrent_marker_tasks_active(intptr_t val)
intptr_t concurrent_marker_tasks_active() const
bool pause_concurrent_marking() const
void set_tasks(intptr_t val)
void set_concurrent_marker_tasks(intptr_t val)
intptr_t concurrent_marker_tasks() const
void YieldConcurrentMarking()
void set_phase(Phase val)
Monitor * tasks_lock() const
static Page * Of(ObjectPtr obj)
void RememberCard(ObjectPtr const *slot)
static constexpr intptr_t kSlotsPerCardLog2
ParallelMarkTask(GCMarker *marker, IsolateGroup *isolate_group, MarkingStack *marking_stack, ThreadBarrier *barrier, SyncMarkingVisitor *visitor, RelaxedAtomic< uintptr_t > *num_busy)
void RunEnteredIsolateGroup()
T load(std::memory_order order=std::memory_order_relaxed) const
T fetch_add(T arg, std::memory_order order=std::memory_order_relaxed)
void PruneWeak(GCLinkedLists *delayed)
bool Run(Args &&... args)
static Thread * Current()
void ReleaseStoreBuffer()
void StoreBufferReleaseGC()
void StoreBufferAcquireGC()
static void ExitIsolateGroupAsHelper(bool bypass_safepoint)
IsolateGroup * isolate_group() const
void StoreBufferAddObjectGC(ObjectPtr obj)
static bool EnterIsolateGroupAsHelper(IsolateGroup *isolate_group, TaskKind kind, bool bypass_safepoint)
bool IsCardRemembered() const
DART_FORCE_INLINE intptr_t VisitPointersNonvirtual(V *visitor)
static bool IsMarked(uword tags)
intptr_t HeapSize() const
bool IsEvacuationCandidate()
bool TryAcquireRememberedBit()
intptr_t VisitPointers(ObjectPointerVisitor *visitor)
DART_WARN_UNUSED_RESULT bool TryAcquireMarkBit()
static bool IsEvacuationCandidate(uword tags)
void SetMarkBitUnsynchronized()
void VisitPointers(ObjectPtr *from, ObjectPtr *to) override
VerifyAfterMarkingVisitor()
void VisitObject(ObjectPtr obj) override
#define THR_Print(format,...)
void(* Dart_HeapSamplingDeleteCallback)(void *data)
#define MSAN_UNPOISON(ptr, len)
void Decompress(const uint8_t *input, intptr_t input_len, uint8_t **output, intptr_t *output_length)
StoreBuffer::Block StoreBufferBlock
static constexpr intptr_t kCardsPerInterruptCheck
static bool IsUnreachable(const ObjectPtr obj)
MarkingVisitorBase< true > SyncMarkingVisitor
void MournFinalizerEntry(GCVisitorType *visitor, FinalizerEntryPtr current_entry)
MarkingVisitorBase< false > UnsyncMarkingVisitor
BlockWorkList< MarkingStack > MarkerWorkList
constexpr intptr_t kIntptrMax
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive keep the shell running after the Dart script has completed enable serial On low power devices with low core running concurrent GC tasks on threads can cause them to contend with the UI thread which could potentially lead to jank This option turns off all concurrent GC activities domain network JSON encoded network policy per domain This overrides the DisallowInsecureConnections switch Embedder can specify whether to allow or disallow insecure connections at a domain level old gen heap size
void FlushInto(GCLinkedLists *to)
#define NO_SANITIZE_THREAD
#define TIMELINE_FUNCTION_GC_DURATION(thread, name)