Flutter Engine
The Flutter Engine
Loading...
Searching...
No Matches
page.cc
Go to the documentation of this file.
1// Copyright (c) 2022, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#include "vm/heap/page.h"
6
7#include "platform/assert.h"
9#include "vm/dart.h"
10#include "vm/heap/become.h"
11#include "vm/heap/compactor.h"
12#include "vm/heap/marker.h"
13#include "vm/heap/safepoint.h"
14#include "vm/heap/sweeper.h"
15#include "vm/lockers.h"
16#include "vm/log.h"
17#include "vm/object.h"
18#include "vm/object_set.h"
19#include "vm/os_thread.h"
20#include "vm/virtual_memory.h"
21
22namespace dart {
23
24// This cache needs to be at least as big as FLAG_new_gen_semi_max_size or
25// munmap will noticeably impact performance.
26static constexpr intptr_t kPageCacheCapacity = 8 * kWordSize;
27static Mutex* page_cache_mutex = nullptr;
29static intptr_t page_cache_size = 0;
30
31void Page::Init() {
32 ASSERT(page_cache_mutex == nullptr);
33 page_cache_mutex = new Mutex(NOT_IN_PRODUCT("page_cache_mutex"));
34}
35
44
46 ClearCache();
47 delete page_cache_mutex;
48 page_cache_mutex = nullptr;
49}
50
55
56static bool CanUseCache(uword flags) {
58 Page::kVMIsolate)) == 0;
59}
60
61Page* Page::Allocate(intptr_t size, uword flags) {
62 const bool executable = (flags & Page::kExecutable) != 0;
63 const bool compressed = !executable;
64 const char* name = executable ? "dart-code" : "dart-heap";
65
66 VirtualMemory* memory = nullptr;
67 if (CanUseCache(flags)) {
68 // We don't automatically use the cache based on size and type because a
69 // large page that happens to be the same size as a regular page can't
70 // use the cache. Large pages are expected to be zeroed on allocation but
71 // cached pages are dirty.
72 ASSERT(size == kPageSize);
73 MutexLocker ml(page_cache_mutex);
76 if (page_cache_size > 0) {
77 memory = page_cache[--page_cache_size];
78 }
79 }
80 if (memory == nullptr) {
81 memory = VirtualMemory::AllocateAligned(size, kPageSize, executable,
82 compressed, name);
83 }
84 if (memory == nullptr) {
85 return nullptr; // Out of memory.
86 }
87
88 if ((flags & kNew) != 0) {
89 // Initialized by generated code.
90 MSAN_UNPOISON(memory->address(), size);
91
92#if defined(DEBUG)
93 uword* cursor = reinterpret_cast<uword*>(memory->address());
94 uword* end = reinterpret_cast<uword*>(memory->end());
95 while (cursor < end) {
96 *cursor++ = kAllocationCanary;
97 }
98#endif
99 } else {
100 // We don't zap old-gen because we rely on implicit zero-initialization
101 // of large typed data arrays.
102 }
103
104 Page* result = reinterpret_cast<Page*>(memory->address());
105 ASSERT(result != nullptr);
106 result->flags_ = flags;
107 result->memory_ = memory;
108 result->next_ = nullptr;
109 result->forwarding_page_ = nullptr;
110 result->card_table_ = nullptr;
111 result->progress_bar_ = 0;
112 result->owner_ = nullptr;
113 result->top_ = 0;
114 result->end_ = 0;
115 result->survivor_end_ = 0;
116 result->resolved_top_ = 0;
117
118 if ((flags & kNew) != 0) {
119 uword top = result->object_start();
120 uword end =
122 result->top_ = top;
123 result->end_ = end;
124 result->survivor_end_ = top;
125 result->resolved_top_ = top;
126 }
127
129
130 return result;
131}
132
133void Page::Deallocate() {
134 if (is_image()) {
135 delete memory_;
136 // For a heap page from a snapshot, the Page object lives in the malloc
137 // heap rather than the page itself.
138 free(this);
139 return;
140 }
141
142 free(card_table_);
143
144 // Load before unregistering with LSAN, or LSAN will temporarily think it has
145 // been leaked.
146 VirtualMemory* memory = memory_;
147
148 LSAN_UNREGISTER_ROOT_REGION(this, sizeof(*this));
149
150 if (CanUseCache(flags_)) {
151 ASSERT(memory->size() == kPageSize);
152 MutexLocker ml(page_cache_mutex);
156 intptr_t size = memory->size();
157#if defined(DEBUG)
158 if ((flags_ & kNew) != 0) {
159 memset(memory->address(), Heap::kZapByte, size);
160 } else {
161 // We don't zap old-gen because we rely on implicit zero-initialization
162 // of large typed data arrays.
163 }
164#endif
165 MSAN_POISON(memory->address(), size);
166 page_cache[page_cache_size++] = memory;
167 memory = nullptr;
168 }
169 }
170 delete memory;
171}
172
173void Page::VisitObjects(ObjectVisitor* visitor) const {
174 ASSERT(Thread::Current()->OwnsGCSafepoint());
175 NoSafepointScope no_safepoint;
176 uword obj_addr = object_start();
177 uword end_addr = object_end();
178 while (obj_addr < end_addr) {
179 ObjectPtr raw_obj = UntaggedObject::FromAddr(obj_addr);
180 visitor->VisitObject(raw_obj);
181 obj_addr += raw_obj->untag()->HeapSize();
182 }
183 ASSERT(obj_addr == end_addr);
184}
185
187 uword obj_addr = object_start();
188 uword end_addr = object_end();
189 while (obj_addr < end_addr) {
190 ObjectPtr raw_obj = UntaggedObject::FromAddr(obj_addr);
191 visitor->VisitObject(raw_obj);
192 obj_addr += raw_obj->untag()->HeapSize();
193 }
194}
195
197 ASSERT(Thread::Current()->OwnsGCSafepoint() ||
198 (Thread::Current()->task_kind() == Thread::kCompactorTask) ||
199 (Thread::Current()->task_kind() == Thread::kMarkerTask));
200 NoSafepointScope no_safepoint;
201 uword obj_addr = object_start();
202 uword end_addr = object_end();
203 while (obj_addr < end_addr) {
204 ObjectPtr raw_obj = UntaggedObject::FromAddr(obj_addr);
205 obj_addr += raw_obj->untag()->VisitPointers(visitor);
206 }
207 ASSERT(obj_addr == end_addr);
208}
209
211 ASSERT(Thread::Current()->OwnsGCSafepoint() ||
212 (Thread::Current()->task_kind() == Thread::kScavengerTask));
213 NoSafepointScope no_safepoint;
214
215 if (card_table_ == nullptr) {
216 return;
217 }
218
219 ArrayPtr obj =
220 static_cast<ArrayPtr>(UntaggedObject::FromAddr(object_start()));
221 ASSERT(obj->IsArray() || obj->IsImmutableArray());
222 ASSERT(obj->untag()->IsCardRemembered());
223 CompressedObjectPtr* obj_from = obj->untag()->from();
224 CompressedObjectPtr* obj_to =
225 obj->untag()->to(Smi::Value(obj->untag()->length()));
226 uword heap_base = obj.heap_base();
227
228 const size_t size_in_bits = card_table_size();
229 const size_t size_in_words =
231 for (;;) {
232 const size_t word_offset = progress_bar_.fetch_add(1);
233 if (word_offset >= size_in_words) break;
234
235 uword cell = card_table_[word_offset];
236 if (cell == 0) continue;
237
238 for (intptr_t bit_offset = 0; bit_offset < kBitsPerWord; bit_offset++) {
239 const uword bit_mask = static_cast<uword>(1) << bit_offset;
240 if ((cell & bit_mask) == 0) continue;
241 const intptr_t i = (word_offset << kBitsPerWordLog2) + bit_offset;
242
243 CompressedObjectPtr* card_from =
244 reinterpret_cast<CompressedObjectPtr*>(this) +
245 (i << kSlotsPerCardLog2);
246 CompressedObjectPtr* card_to =
247 reinterpret_cast<CompressedObjectPtr*>(card_from) +
248 (1 << kSlotsPerCardLog2) - 1;
249 // Minus 1 because to is inclusive.
250
251 if (card_from < obj_from) {
252 // First card overlaps with header.
253 card_from = obj_from;
254 }
255 if (card_to > obj_to) {
256 // Last card(s) may extend past the object. Array truncation can make
257 // this happen for more than one card.
258 card_to = obj_to;
259 }
260
261 visitor->VisitCompressedPointers(heap_base, card_from, card_to);
262
263 bool has_new_target = false;
264 for (CompressedObjectPtr* slot = card_from; slot <= card_to; slot++) {
265 if ((*slot)->IsNewObjectMayBeSmi()) {
266 has_new_target = true;
267 break;
268 }
269 }
270 if (!has_new_target) {
271 cell ^= bit_mask;
272 }
273 }
274 card_table_[word_offset] = cell;
275 }
276}
277
279 progress_bar_ = 0;
280}
281
282void Page::WriteProtect(bool read_only) {
283 ASSERT(!is_image());
284
286 if (read_only) {
287 if (is_executable()) {
289 } else {
291 }
292 } else {
294 }
295 memory_->Protect(prot);
296}
297
298} // namespace dart
static constexpr uint8_t kZapByte
Definition heap.h:58
void VisitCompressedPointers(uword heap_base, CompressedObjectPtr *first, CompressedObjectPtr *last)
Definition visitor.h:43
UntaggedObject * untag() const
virtual void VisitObject(ObjectPtr obj)=0
uword end() const
Definition page.h:90
static intptr_t CachedSize()
Definition page.cc:51
uword object_start() const
Definition page.h:93
uword object_end() const
Definition page.h:102
intptr_t card_table_size() const
Definition page.h:156
bool is_image() const
Definition page.h:78
static void Init()
Definition page.cc:31
void VisitObjects(ObjectVisitor *visitor) const
Definition page.cc:173
void VisitObjectsUnsafe(ObjectVisitor *visitor) const
Definition page.cc:186
@ kNew
Definition page.h:73
@ kLarge
Definition page.h:70
@ kVMIsolate
Definition page.h:72
@ kExecutable
Definition page.h:69
@ kImage
Definition page.h:71
void WriteProtect(bool read_only)
Definition page.cc:282
void ResetProgressBar()
Definition page.cc:278
void VisitObjectPointers(ObjectPointerVisitor *visitor) const
Definition page.cc:196
static void ClearCache()
Definition page.cc:36
bool is_executable() const
Definition page.h:76
void VisitRememberedCards(ObjectPointerVisitor *visitor)
Definition page.cc:210
static constexpr intptr_t kSlotsPerCardLog2
Definition page.h:152
static void Cleanup()
Definition page.cc:45
T fetch_add(T arg, std::memory_order order=std::memory_order_relaxed)
Definition atomic.h:35
intptr_t Value() const
Definition object.h:9969
@ kScavengerTask
Definition thread.h:352
@ kCompactorTask
Definition thread.h:351
static Thread * Current()
Definition thread.h:361
static ObjectPtr FromAddr(uword addr)
Definition raw_object.h:495
intptr_t HeapSize() const
Definition raw_object.h:380
uword heap_base() const
Definition raw_object.h:569
intptr_t VisitPointers(ObjectPointerVisitor *visitor)
Definition raw_object.h:426
static constexpr T RoundUp(T x, uintptr_t alignment, uintptr_t offset=0)
Definition utils.h:105
static VirtualMemory * AllocateAligned(intptr_t size, intptr_t alignment, bool is_executable, bool is_compressed, const char *name)
static void Protect(void *address, intptr_t size, Protection mode)
#define ASSERT(E)
FlutterSemanticsFlag flags
GAsyncResult * result
#define LSAN_UNREGISTER_ROOT_REGION(ptr, len)
#define LSAN_REGISTER_ROOT_REGION(ptr, len)
#define MSAN_POISON(ptr, len)
#define MSAN_UNPOISON(ptr, len)
constexpr intptr_t kBitsPerWordLog2
Definition globals.h:513
static VirtualMemory * page_cache[kPageCacheCapacity]
Definition page.cc:28
static constexpr intptr_t kNewObjectAlignmentOffset
const char *const name
static Mutex * page_cache_mutex
Definition page.cc:27
constexpr intptr_t kBitsPerWord
Definition globals.h:514
static constexpr intptr_t kAllocationCanary
Definition globals.h:181
static constexpr intptr_t kPageSize
Definition page.h:27
uintptr_t uword
Definition globals.h:501
static bool CanUseCache(uword flags)
Definition page.cc:56
static intptr_t page_cache_size
Definition page.cc:29
static constexpr intptr_t kAllocationRedZoneSize
Definition page.h:41
constexpr intptr_t kWordSize
Definition globals.h:509
static constexpr intptr_t kPageCacheCapacity
Definition page.cc:26
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive keep the shell running after the Dart script has completed enable serial On low power devices with low core running concurrent GC tasks on threads can cause them to contend with the UI thread which could potentially lead to jank This option turns off all concurrent GC activities domain network JSON encoded network policy per domain This overrides the DisallowInsecureConnections switch Embedder can specify whether to allow or disallow insecure connections at a domain level old gen heap size
Definition switches.h:259
#define NOT_IN_PRODUCT(code)
Definition globals.h:84