37DEFINE_FLAG(
bool, write_protect_vm_isolate,
true,
"Write protect vm_isolate.");
39 disable_heap_verification,
41 "Explicitly disable heap verification.");
45 intptr_t max_new_gen_semi_words,
46 intptr_t max_old_gen_words)
47 : isolate_group_(isolate_group),
48 is_vm_isolate_(is_vm_isolate),
49 new_space_(this, max_new_gen_semi_words),
50 old_space_(this, max_old_gen_words),
52 assume_scavenge_will_fail_(
false),
53 gc_on_nth_allocation_(kNoForcedGarbageCollection) {
54 UpdateGlobalMaxUsed();
55 for (
int sel = 0; sel < kNumWeakSelectors; sel++) {
63#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
65 HeapProfileSampler::delete_callback();
66 if (cleanup !=
nullptr) {
67 new_weak_tables_[kHeapSamplingData]->CleanupValues(cleanup);
68 old_weak_tables_[kHeapSamplingData]->CleanupValues(cleanup);
72 for (
int sel = 0; sel < kNumWeakSelectors; sel++) {
73 delete new_weak_tables_[sel];
74 delete old_weak_tables_[sel];
78uword Heap::AllocateNew(
Thread* thread, intptr_t size) {
80 CollectForDebugging(thread);
81 uword addr = new_space_.TryAllocate(thread, size);
85 if (!assume_scavenge_will_fail_ && !thread->
force_growth()) {
86 GcSafepointOperationScope safepoint_operation(thread);
91 addr = new_space_.TryAllocate(thread, size);
96 CollectGarbage(thread, GCType::kScavenge, GCReason::kNewSpace);
98 addr = new_space_.TryAllocate(thread, size);
106 return AllocateOld(thread, size,
false);
109uword Heap::AllocateOld(Thread* thread, intptr_t size,
bool is_exec) {
110 ASSERT(thread->no_safepoint_scope_depth() == 0);
112#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
113 if (HeapProfileSampler::enabled()) {
114 thread->heap_sampler().SampleOldSpaceAllocation(size);
118 if (!thread->force_growth()) {
119 CollectForDebugging(thread);
120 uword addr = old_space_.TryAllocate(size, is_exec);
125 WaitForSweeperTasks(thread);
126 addr = old_space_.TryAllocate(size, is_exec);
130 GcSafepointOperationScope safepoint_operation(thread);
134 addr = old_space_.TryAllocate(size, is_exec);
140 CollectOldSpaceGarbage(thread, GCType::kMarkSweep, GCReason::kOldSpace);
141 addr = old_space_.TryAllocate(size, is_exec);
146 WaitForSweeperTasksAtSafepoint(thread);
147 addr = old_space_.TryAllocate(size, is_exec);
152 addr = old_space_.TryAllocate(size, is_exec, PageSpace::kForceGrowth);
157 CollectOldSpaceGarbage(thread, GCType::kMarkCompact, GCReason::kOldSpace);
158 WaitForSweeperTasksAtSafepoint(thread);
160 uword addr = old_space_.TryAllocate(size, is_exec, PageSpace::kForceGrowth);
165 if (!thread->force_growth()) {
166 WaitForSweeperTasks(thread);
167 old_space_.TryReleaseReservation();
174 OS::PrintErr(
"Exhausted heap space, trying to allocate %" Pd " bytes.\n",
179bool Heap::AllocatedExternal(intptr_t size,
Space space) {
181 if (!new_space_.AllocatedExternal(size)) {
186 if (!old_space_.AllocatedExternal(size)) {
191 Thread* thread = Thread::Current();
193 CheckExternalGC(thread);
200void Heap::FreedExternal(intptr_t size,
Space space) {
202 new_space_.FreedExternal(size);
205 old_space_.FreedExternal(size);
209void Heap::PromotedExternal(intptr_t size) {
210 new_space_.FreedExternal(size);
211 old_space_.AllocatedExternal(size);
214void Heap::CheckExternalGC(
Thread* thread) {
223 if (new_space_.ExternalInWords() >= (4 * new_space_.CapacityInWords())) {
226 CollectGarbage(thread, GCType::kScavenge, GCReason::kExternal);
231 if (old_space_.ReachedHardThreshold()) {
232 CollectGarbage(thread, GCType::kMarkSweep, GCReason::kExternal);
234 CheckConcurrentMarking(thread, GCReason::kExternal, 0);
238bool Heap::Contains(
uword addr)
const {
239 return new_space_.Contains(addr) || old_space_.Contains(addr);
242bool Heap::NewContains(
uword addr)
const {
243 return new_space_.Contains(addr);
246bool Heap::OldContains(
uword addr)
const {
247 return old_space_.Contains(addr);
250bool Heap::CodeContains(
uword addr)
const {
251 return old_space_.CodeContains(addr);
254bool Heap::DataContains(
uword addr)
const {
255 return old_space_.DataContains(addr);
259 new_space_.VisitObjects(visitor);
260 old_space_.VisitObjects(visitor);
263void Heap::VisitObjectsNoImagePages(ObjectVisitor* visitor) {
264 new_space_.VisitObjects(visitor);
265 old_space_.VisitObjectsNoImagePages(visitor);
268void Heap::VisitObjectsImagePages(ObjectVisitor* visitor)
const {
269 old_space_.VisitObjectsImagePages(visitor);
272HeapIterationScope::HeapIterationScope(
Thread* thread,
bool writable)
274 heap_(isolate_group()->heap()),
275 old_space_(heap_->old_space()),
276 writable_(writable) {
289 while ((old_space_->
tasks() > 0) ||
298 while (old_space_->
tasks() > 0) {
303 ASSERT(old_space_->iterating_thread_ ==
nullptr);
304 old_space_->iterating_thread_ =
thread;
323 old_space_->iterating_thread_ =
nullptr;
335 heap_->VisitObjects(visitor);
416 phase = old_space_.
phase();
426 if (FLAG_mark_when_idle) {
450void Heap::CollectNewSpaceGarbage(
Thread* thread,
464 GcSafepointOperationScope safepoint_operation(thread);
465 RecordBeforeGC(
type, reason);
468 ? VMTag::kGCIdleTagId
469 : VMTag::kGCNewSpaceTagId);
474#if defined(SUPPORT_TIMELINE)
475 PrintStatsToTimeline(&tbes, reason);
489void Heap::CollectOldSpaceGarbage(Thread* thread,
492 NoActiveIsolateScope no_active_isolate_scope(thread);
497 if (FLAG_use_compactor) {
508 GcSafepointOperationScope safepoint_operation(thread);
516 thread->isolate_group()->ForEachIsolate(
517 [&](Isolate* isolate) {
519 isolate->CacheRegexpBacktrackStack(
nullptr);
523 RecordBeforeGC(
type, reason);
525 ? VMTag::kGCIdleTagId
526 : VMTag::kGCOldSpaceTagId);
532#if defined(SUPPORT_TIMELINE)
533 PrintStatsToTimeline(&tbes, reason);
537 thread->isolate_group()->ForEachIsolate(
538 [&](Isolate* isolate) {
539 isolate->handler_info_cache()->Clear();
540 isolate->catch_entry_moves_cache()->Clear();
543 assume_scavenge_will_fail_ =
false;
551 CollectNewSpaceGarbage(thread,
type, reason);
555 CollectOldSpaceGarbage(thread,
type, reason);
570 CollectOldSpaceGarbage(
576 if (
old_space()->ReachedHardThreshold()) {
591 phase = old_space_.
phase();
622 phase = old_space_.
phase();
634 ? VMTag::kGCIdleTagId
635 : VMTag::kGCOldSpaceTagId);
640#if defined(SUPPORT_TIMELINE)
641 PrintStatsToTimeline(&tbes, reason);
679 ASSERT(isolate_group_ !=
nullptr);
682 isolate_group_->GetHeapGlobalUsedMaxMetric()->SetValue(
688 read_only_ = read_only;
695 intptr_t max_new_gen_words,
696 intptr_t max_old_gen_words) {
699 max_new_gen_words, max_old_gen_words));
703void Heap::AddRegionsToObjectSet(
ObjectSet* set)
const {
712 gc_on_nth_allocation_ = num_allocations;
715void Heap::CollectForDebugging(
Thread* thread) {
716 if (gc_on_nth_allocation_ == kNoForcedGarbageCollection)
return;
722 gc_on_nth_allocation_--;
723 if (gc_on_nth_allocation_ == 0) {
725 gc_on_nth_allocation_ = kNoForcedGarbageCollection;
736 this->AddRegionsToObjectSet(allocated_set);
738 vm_isolate->
group()->
heap()->AddRegionsToObjectSet(allocated_set);
743 this->VisitObjectsNoImagePages(&object_visitor);
748 this->VisitObjectsImagePages(&object_visitor);
754 vm_isolate->
group()->
heap()->VisitObjects(&vm_object_visitor);
757 return allocated_set;
761 if (FLAG_disable_heap_verification) {
765 return VerifyGC(msg, mark_expectation);
768bool Heap::VerifyGC(
const char* msg,
MarkExpectation mark_expectation) {
776 VisitObjectPointers(&visitor);
784 "New space (%" Pd "k of %" Pd
786 "Old space (%" Pd "k of %" Pd "k)\n",
842 return "MarkCompact";
854 return "store buffer";
894 if (raw_obj->IsImmediateOrOldObject()) {
895 return old_weak_tables_[sel]->
GetValue(raw_obj);
897 return new_weak_tables_[sel]->
GetValue(raw_obj);
902 if (raw_obj->IsImmediateOrOldObject()) {
903 old_weak_tables_[sel]->
SetValue(raw_obj, val);
905 new_weak_tables_[sel]->
SetValue(raw_obj, val);
912 if (raw_obj->IsImmediateOrOldObject()) {
920 const auto before_space =
922 const auto after_space =
927 auto before_table =
GetWeakTable(before_space, selector);
928 intptr_t entry = before_table->RemoveValueExclusive(before_object);
931 after_table->SetValueExclusive(after_object, entry);
937 auto before_table = before_object->IsImmediateOrOldObject()
940 if (before_table !=
nullptr) {
943 auto after_table = after_object->IsImmediateOrOldObject()
946 ASSERT(after_table !=
nullptr);
947 after_table->SetValueExclusive(after_object, entry);
967 if (table_old !=
nullptr) table_old->
Forward(visitor);
997 stats_.reason_ = reason;
1006 int64_t delta = stats_.after_.micros_ - stats_.before_.micros_;
1021 [&](Isolate* isolate) {
1024 event.set_gc_stats(&stats_);
1033void Heap::PrintStats() {
1034 if (!FLAG_verbose_gc)
return;
1036 if ((FLAG_verbose_gc_hdr != 0) &&
1037 (((stats_.num_ - 1) % FLAG_verbose_gc_hdr) == 0)) {
1040 "gen | new gen | new gen | old gen | old gen | old "
1041 "gen | store | delta used ]\n"
1042 "[ GC isolate | space (reason) | GC# | start | time | used "
1043 "(MB) | capacity MB | external| used (MB) | capacity (MB) | "
1044 "external MB | buffer | new | old ]\n"
1045 "[ | | | (s) | (ms) "
1046 "|before| after|before| after| b4 |aftr| before| after | before| after "
1047 "|before| after| b4 |aftr| (MB) | (MB) ]\n");
1052 "[ %-13.13s, %11s(%12s), "
1062 "%3" Pd ", %3" Pd ", "
1071 stats_.before_.micros_),
1072 WordsToMB(stats_.before_.new_.used_in_words),
1073 WordsToMB(stats_.after_.new_.used_in_words),
1074 WordsToMB(stats_.before_.new_.capacity_in_words),
1075 WordsToMB(stats_.after_.new_.capacity_in_words),
1076 WordsToMB(stats_.before_.new_.external_in_words),
1077 WordsToMB(stats_.after_.new_.external_in_words),
1078 WordsToMB(stats_.before_.old_.used_in_words),
1079 WordsToMB(stats_.after_.old_.used_in_words),
1080 WordsToMB(stats_.before_.old_.capacity_in_words),
1081 WordsToMB(stats_.after_.old_.capacity_in_words),
1082 WordsToMB(stats_.before_.old_.external_in_words),
1083 WordsToMB(stats_.after_.old_.external_in_words),
1084 stats_.before_.store_buffer_,
1085 stats_.after_.store_buffer_,
1086 WordsToMB(stats_.after_.new_.used_in_words -
1087 stats_.before_.new_.used_in_words),
1088 WordsToMB(stats_.after_.old_.used_in_words -
1089 stats_.before_.old_.used_in_words));
1093void Heap::PrintStatsToTimeline(TimelineEventScope*
event,
GCReason reason) {
1094#if defined(SUPPORT_TIMELINE)
1095 if ((
event ==
nullptr) || !
event->enabled()) {
1098 intptr_t arguments =
event->GetNumArguments();
1099 event->SetNumArguments(arguments + 13);
1101 event->FormatArgument(arguments + 1,
"Before.New.Used (kB)",
"%" Pd "",
1103 event->FormatArgument(arguments + 2,
"After.New.Used (kB)",
"%" Pd "",
1105 event->FormatArgument(arguments + 3,
"Before.Old.Used (kB)",
"%" Pd "",
1107 event->FormatArgument(arguments + 4,
"After.Old.Used (kB)",
"%" Pd "",
1110 event->FormatArgument(arguments + 5,
"Before.New.Capacity (kB)",
"%" Pd "",
1112 event->FormatArgument(arguments + 6,
"After.New.Capacity (kB)",
"%" Pd "",
1114 event->FormatArgument(arguments + 7,
"Before.Old.Capacity (kB)",
"%" Pd "",
1116 event->FormatArgument(arguments + 8,
"After.Old.Capacity (kB)",
"%" Pd "",
1119 event->FormatArgument(arguments + 9,
"Before.New.External (kB)",
"%" Pd "",
1121 event->FormatArgument(arguments + 10,
"After.New.External (kB)",
"%" Pd "",
1123 event->FormatArgument(arguments + 11,
"Before.Old.External (kB)",
"%" Pd "",
1125 event->FormatArgument(arguments + 12,
"After.Old.External (kB)",
"%" Pd "",
1132 const int kExtNewRatio = 16;
1151 if (FLAG_write_protect_code && FLAG_write_protect_vm_isolate) {
1158 if (FLAG_write_protect_code && FLAG_write_protect_vm_isolate) {
static IsolateGroup * vm_isolate_group()
static Isolate * vm_isolate()
ForceGrowthScope(Thread *thread)
void IterateObjectPointers(ObjectPointerVisitor *visitor, ValidationPolicy validate_frames)
void IterateStackPointers(ObjectPointerVisitor *visitor, ValidationPolicy validate_frames)
void IterateVMIsolateObjects(ObjectVisitor *visitor) const
void IterateObjects(ObjectVisitor *visitor) const
void IterateOldObjectsNoImagePages(ObjectVisitor *visitor) const
void IterateObjectsNoImagePages(ObjectVisitor *visitor) const
void IterateOldObjects(ObjectVisitor *visitor) const
static const char * GCReasonToString(GCReason reason)
intptr_t GetWeakEntry(ObjectPtr raw_obj, WeakSelector sel) const
void PrintMemoryUsageJSON(JSONStream *stream) const
IsolateGroup * isolate_group() const
void CheckConcurrentMarking(Thread *thread, GCReason reason, intptr_t size)
Dart_PerformanceMode SetMode(Dart_PerformanceMode mode)
void WriteProtect(bool read_only)
void WaitForMarkerTasks(Thread *thread)
void WaitForSweeperTasksAtSafepoint(Thread *thread)
int64_t PeerCount() const
void SetWeakEntry(ObjectPtr raw_obj, WeakSelector sel, intptr_t val)
void ResetObjectIdTable()
void PrintToJSONObject(Space space, JSONObject *object) const
void CheckCatchUp(Thread *thread)
bool Verify(const char *msg, MarkExpectation mark_expectation=kForbidMarked)
void CollectOnNthAllocation(intptr_t num_allocations)
bool is_vm_isolate() const
void WriteProtectCode(bool read_only)
intptr_t SetWeakEntryIfNonExistent(ObjectPtr raw_obj, WeakSelector sel, intptr_t val)
intptr_t TotalExternalInWords() const
void CollectAllGarbage(GCReason reason=GCReason::kFull, bool compact=false)
int64_t GCTimeInMicros(Space space) const
friend class ServiceEvent
void NotifyIdle(int64_t deadline)
void WaitForSweeperTasks(Thread *thread)
void CheckFinalizeMarking(Thread *thread)
intptr_t Collections(Space space) const
WeakTable * GetWeakTable(Space space, WeakSelector selector) const
void ForwardWeakEntries(ObjectPtr before_object, ObjectPtr after_object)
void ResetCanonicalHashTable()
intptr_t ExternalInWords(Space space) const
void StartConcurrentMarking(Thread *thread, GCReason reason)
intptr_t TotalCapacityInWords() const
intptr_t UsedInWords(Space space) const
static void Init(IsolateGroup *isolate_group, bool is_vm_isolate, intptr_t max_new_gen_words, intptr_t max_old_gen_words)
intptr_t TotalUsedInWords() const
void CollectGarbage(Thread *thread, GCType type, GCReason reason)
void ForwardWeakTables(ObjectPointerVisitor *visitor)
void UpdateGlobalMaxUsed()
Space SpaceForExternal(intptr_t size) const
intptr_t CapacityInWords(Space space) const
ObjectSet * CreateAllocatedObjectSet(Zone *zone, MarkExpectation mark_expectation)
static const char * GCTypeToString(GCType type)
int64_t UptimeMicros() const
StoreBuffer * store_buffer() const
void ForEachIsolate(std::function< void(Isolate *isolate)> function, bool at_safepoint=false)
void VisitStackPointers(ObjectPointerVisitor *visitor, ValidationPolicy validate_frames)
void VisitObjectPointers(ObjectPointerVisitor *visitor, ValidationPolicy validate_frames)
SafepointHandler * safepoint_handler()
static bool IsSystemIsolate(const Isolate *isolate)
IsolateGroup * group() const
WeakTable * forward_table_old()
WeakTable * forward_table_new()
void AddProperty64(const char *name, int64_t i) const
void AddProperty(const char *name, bool b) const
Monitor::WaitResult WaitWithSafepointCheck(Thread *thread, int64_t millis=Monitor::kNoTimeout)
Monitor::WaitResult Wait(int64_t millis=Monitor::kNoTimeout)
static int64_t GetCurrentMonotonicMicros()
static void static void PrintErr(const char *format,...) PRINTF_ATTRIBUTE(1
intptr_t UsedInWords() const
bool ReachedSoftThreshold() const
bool ShouldStartIdleMarkSweep(int64_t deadline)
void IncrementalMarkWithSizeBudget(intptr_t size)
void AddGCTime(int64_t micros)
int64_t gc_time_micros() const
void WriteProtect(bool read_only)
void IncrementCollections()
bool ShouldPerformIdleMarkCompact(int64_t deadline)
void set_tasks(intptr_t val)
void IncrementalMarkWithTimeBudget(int64_t deadline)
intptr_t collections() const
void VisitObjects(ObjectVisitor *visitor) const
void CollectGarbage(Thread *thread, bool compact, bool finalize)
void VisitObjectsNoImagePages(ObjectVisitor *visitor) const
SpaceUsage GetCurrentUsage() const
bool ReachedHardThreshold() const
intptr_t CapacityInWords() const
void AddRegionsToObjectSet(ObjectSet *set) const
void PrintToJSONObject(JSONObject *object) const
void AssistTasks(MonitorLocker *ml)
Monitor * tasks_lock() const
void VisitObjectPointers(ObjectPointerVisitor *visitor) const
intptr_t ExternalInWords() const
T exchange(T arg, std::memory_order order=std::memory_order_relaxed)
void Scavenge(Thread *thread, GCType type, GCReason reason)
intptr_t ExternalInWords() const
void VisitObjects(ObjectVisitor *visitor) const
void VisitObjectPointers(ObjectPointerVisitor *visitor) const
void AddGCTime(int64_t micros)
void WriteProtect(bool read_only)
void AbandonRemainingTLABForDebugging(Thread *thread)
bool ShouldPerformIdleScavenge(int64_t deadline)
void AddRegionsToObjectSet(ObjectSet *set) const
intptr_t CapacityInWords() const
intptr_t UsedInWords() const
SpaceUsage GetCurrentUsage() const
int64_t gc_time_micros() const
void IncrementCollections()
intptr_t ThresholdInWords() const
intptr_t collections() const
void PrintToJSONObject(JSONObject *object) const
static void HandleEvent(ServiceEvent *event, bool enter_safepoint=true)
static StreamInfo gc_stream
IsolateGroup * isolate_group() const
bool force_growth() const
int32_t no_callback_scope_depth() const
static Thread * Current()
bool OwnsGCSafepoint() const
int32_t no_safepoint_scope_depth() const
void DecrementForceGrowthScopeDepth()
void IncrementForceGrowthScopeDepth()
IsolateGroup * isolate_group() const
intptr_t RemoveValueExclusive(ObjectPtr key)
intptr_t GetValue(ObjectPtr key)
intptr_t SetValueIfNonExistent(ObjectPtr key, intptr_t val)
void Forward(ObjectPointerVisitor *visitor)
void SetValue(ObjectPtr key, intptr_t val)
WritableCodePages(Thread *thread, IsolateGroup *isolate_group)
WritableVMIsolateScope(Thread *thread)
~WritableVMIsolateScope()
@ Dart_PerformanceMode_Default
@ Dart_PerformanceMode_Latency
void(* Dart_HeapSamplingDeleteCallback)(void *data)
#define DEFINE_FLAG(type, name, default_value, comment)
constexpr double MicrosecondsToSeconds(int64_t micros)
constexpr double WordsToMB(intptr_t size_in_words)
constexpr intptr_t kWordSize
constexpr double MicrosecondsToMilliseconds(int64_t micros)
constexpr intptr_t KBInWords
constexpr intptr_t RoundWordsToKB(intptr_t size_in_words)
#define TIMELINE_FUNCTION_GC_DURATION(thread, name)