Flutter Engine
The Flutter Engine
Loading...
Searching...
No Matches
heap.h
Go to the documentation of this file.
1// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#ifndef RUNTIME_VM_HEAP_HEAP_H_
6#define RUNTIME_VM_HEAP_HEAP_H_
7
8#if defined(SHOULD_NOT_INCLUDE_RUNTIME)
9#error "Should not include runtime"
10#endif
11
13
14#include "platform/assert.h"
15#include "vm/allocation.h"
16#include "vm/flags.h"
17#include "vm/globals.h"
18#include "vm/heap/pages.h"
19#include "vm/heap/scavenger.h"
20#include "vm/heap/spaces.h"
21#include "vm/heap/weak_table.h"
22#include "vm/isolate.h"
23
24namespace dart {
25
26// Forward declarations.
27class Isolate;
28class IsolateGroup;
29class ObjectPointerVisitor;
30class ObjectSet;
31class ServiceEvent;
32class TimelineEventScope;
33class VirtualMemory;
34
35class Heap {
36 public:
37 enum Space {
41 };
42
44 kPeers = 0,
45#if !defined(HASH_IN_OBJECT_HEADER)
47#endif
51#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
53#endif
55 };
56
57 // Pattern for unused new space and swept old space.
58 static constexpr uint8_t kZapByte = 0xf3;
59
60 ~Heap();
61
62 Scavenger* new_space() { return &new_space_; }
63 PageSpace* old_space() { return &old_space_; }
64
65 uword Allocate(Thread* thread, intptr_t size, Space space) {
66 ASSERT(!read_only_);
67 switch (space) {
68 case kNew:
69 // Do not attempt to allocate very large objects in new space.
70 if (!IsAllocatableInNewSpace(size)) {
71 return AllocateOld(thread, size, /*executable*/ false);
72 }
73 return AllocateNew(thread, size);
74 case kOld:
75 return AllocateOld(thread, size, /*executable*/ false);
76 case kCode:
77 return AllocateOld(thread, size, /*executable*/ true);
78 default:
80 }
81 return 0;
82 }
83
84 // Tracks an external allocation. Returns false without tracking the
85 // allocation if it will make the total external size exceed
86 // kMaxAddrSpaceInWords.
87 bool AllocatedExternal(intptr_t size, Space space);
88 void FreedExternal(intptr_t size, Space space);
89 // Move external size from new to old space. Does not by itself trigger GC.
90 void PromotedExternal(intptr_t size);
91 void CheckExternalGC(Thread* thread);
92
93 // Heap contains the specified address.
94 bool Contains(uword addr) const;
95 bool NewContains(uword addr) const;
96 bool OldContains(uword addr) const;
97 bool CodeContains(uword addr) const;
98 bool DataContains(uword addr) const;
99
100 void NotifyIdle(int64_t deadline);
101 void NotifyDestroyed();
102
103 Dart_PerformanceMode mode() const { return mode_; }
105
106 // Collect a single generation.
107 void CollectGarbage(Thread* thread, GCType type, GCReason reason);
108
109 // Collect both generations by performing a mark-sweep. If incremental marking
110 // was in progress, perform another mark-sweep. This function will collect all
111 // unreachable objects, including those in inter-generational cycles or stored
112 // during incremental marking.
114 bool compact = false);
115
116 void CheckCatchUp(Thread* thread);
117 void CheckConcurrentMarking(Thread* thread, GCReason reason, intptr_t size);
118 void CheckFinalizeMarking(Thread* thread);
119 void StartConcurrentMarking(Thread* thread, GCReason reason);
120 void WaitForMarkerTasks(Thread* thread);
121 void WaitForSweeperTasks(Thread* thread);
123
124 // Protect access to the heap. Note: Code pages are made
125 // executable/non-executable when 'read_only' is true/false, respectively.
126 void WriteProtect(bool read_only);
127 void WriteProtectCode(bool read_only) {
128 old_space_.WriteProtectCode(read_only);
129 }
130
131 // Initialize the heap and register it with the isolate.
132 static void Init(IsolateGroup* isolate_group,
133 bool is_vm_isolate,
134 intptr_t max_new_gen_words,
135 intptr_t max_old_gen_words);
136
137 // Verify that all pointers in the heap point to the heap.
138 bool Verify(const char* msg,
139 MarkExpectation mark_expectation = kForbidMarked);
140
141 // Print heap sizes.
142 void PrintSizes() const;
143
144 // Return amount of memory used and capacity in a space, excluding external.
145 intptr_t UsedInWords(Space space) const;
146 intptr_t CapacityInWords(Space space) const;
147 intptr_t ExternalInWords(Space space) const;
148
149 intptr_t TotalUsedInWords() const;
150 intptr_t TotalCapacityInWords() const;
151 intptr_t TotalExternalInWords() const;
152 // Return the amount of GCing in microseconds.
153 int64_t GCTimeInMicros(Space space) const;
154
155 intptr_t Collections(Space space) const;
156
158 MarkExpectation mark_expectation);
159
160 static const char* GCTypeToString(GCType type);
161 static const char* GCReasonToString(GCReason reason);
162
163 // Associate a peer with an object. A nonexistent peer is equal to nullptr.
164 void SetPeer(ObjectPtr raw_obj, void* peer) {
165 SetWeakEntry(raw_obj, kPeers, reinterpret_cast<intptr_t>(peer));
166 }
167 void* GetPeer(ObjectPtr raw_obj) const {
168 return reinterpret_cast<void*>(GetWeakEntry(raw_obj, kPeers));
169 }
170 int64_t PeerCount() const;
171
172#if !defined(HASH_IN_OBJECT_HEADER)
173 // Associate an identity hashCode with an object. An nonexistent hashCode
174 // is equal to 0.
175 intptr_t SetHashIfNotSet(ObjectPtr raw_obj, intptr_t hash) {
177 }
178 intptr_t GetHash(ObjectPtr raw_obj) const {
179 return GetWeakEntry(raw_obj, kIdentityHashes);
180 }
181#endif
182
183 void SetCanonicalHash(ObjectPtr raw_obj, intptr_t hash) {
185 }
186 intptr_t GetCanonicalHash(ObjectPtr raw_obj) const {
187 return GetWeakEntry(raw_obj, kCanonicalHashes);
188 }
190
191 // Associate an id with an object (used when serializing an object).
192 // A non-existant id is equal to 0.
193 void SetObjectId(ObjectPtr raw_obj, intptr_t object_id) {
194 ASSERT(Thread::Current()->IsDartMutatorThread());
195 SetWeakEntry(raw_obj, kObjectIds, object_id);
196 }
197 intptr_t GetObjectId(ObjectPtr raw_obj) const {
198 ASSERT(Thread::Current()->IsDartMutatorThread());
199 return GetWeakEntry(raw_obj, kObjectIds);
200 }
201 void ResetObjectIdTable();
202
203 void SetLoadingUnit(ObjectPtr raw_obj, intptr_t unit_id) {
204 ASSERT(Thread::Current()->IsDartMutatorThread());
205 SetWeakEntry(raw_obj, kLoadingUnits, unit_id);
206 }
207 intptr_t GetLoadingUnit(ObjectPtr raw_obj) const {
208 ASSERT(Thread::Current()->IsDartMutatorThread());
209 return GetWeakEntry(raw_obj, kLoadingUnits);
210 }
211
212#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
214 SetWeakEntry(obj, kHeapSamplingData, reinterpret_cast<intptr_t>(data));
215 }
216#endif
217
218 // Used by the GC algorithms to propagate weak entries.
219 intptr_t GetWeakEntry(ObjectPtr raw_obj, WeakSelector sel) const;
220 void SetWeakEntry(ObjectPtr raw_obj, WeakSelector sel, intptr_t val);
221 intptr_t SetWeakEntryIfNonExistent(ObjectPtr raw_obj,
222 WeakSelector sel,
223 intptr_t val);
224
225 WeakTable* GetWeakTable(Space space, WeakSelector selector) const {
226 if (space == kNew) {
227 return new_weak_tables_[selector];
228 }
229 ASSERT(space == kOld);
230 return old_weak_tables_[selector];
231 }
232 void SetWeakTable(Space space, WeakSelector selector, WeakTable* value) {
233 if (space == kNew) {
234 new_weak_tables_[selector] = value;
235 } else {
236 ASSERT(space == kOld);
237 old_weak_tables_[selector] = value;
238 }
239 }
240
241 void ForwardWeakEntries(ObjectPtr before_object, ObjectPtr after_object);
243
244#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
246 void* context) {
248 context);
250 context);
251 }
252#endif
253
254 void UpdateGlobalMaxUsed();
255
256#ifndef PRODUCT
257 void PrintToJSONObject(Space space, JSONObject* object) const;
258
259 // Returns a JSON object with total memory usage statistics for both new and
260 // old space combined.
261 void PrintMemoryUsageJSON(JSONStream* stream) const;
262 void PrintMemoryUsageJSON(JSONObject* jsobj) const;
263
264 // The heap map contains the sizes and class ids for the objects in each page.
269#endif // PRODUCT
270
271 intptr_t ReachabilityBarrier() { return old_space_.collections(); }
272
273 IsolateGroup* isolate_group() const { return isolate_group_; }
274 bool is_vm_isolate() const { return is_vm_isolate_; }
275
276 void SetupImagePage(void* pointer, uword size, bool is_executable) {
277 old_space_.SetupImagePage(pointer, size, is_executable);
278 }
279
280 Space SpaceForExternal(intptr_t size) const;
281
282 void CollectOnNthAllocation(intptr_t num_allocations);
283
284 private:
285 class GCStats : public ValueObject {
286 public:
287 GCStats() {}
288 intptr_t num_;
289 GCType type_;
290 GCReason reason_;
291
292 class Data : public ValueObject {
293 public:
294 Data() {}
295 int64_t micros_;
299
300 private:
302 };
303
304 Data before_;
305 Data after_;
306
307 private:
309 };
310
312 bool is_vm_isolate,
313 intptr_t max_new_gen_semi_words, // Max capacity of new semi-space.
314 intptr_t max_old_gen_words);
315
316 uword AllocateNew(Thread* thread, intptr_t size);
317 uword AllocateOld(Thread* thread, intptr_t size, bool executable);
318
319 // Visit all pointers. Caller must ensure concurrent sweeper is not running,
320 // and the visitor must not allocate.
321 void VisitObjectPointers(ObjectPointerVisitor* visitor);
322
323 // Visit all objects, including FreeListElement "objects". Caller must ensure
324 // concurrent sweeper is not running, and the visitor must not allocate.
325 void VisitObjects(ObjectVisitor* visitor);
326 void VisitObjectsNoImagePages(ObjectVisitor* visitor);
327 void VisitObjectsImagePages(ObjectVisitor* visitor) const;
328
329 // Like Verify, but does not wait for concurrent sweeper, so caller must
330 // ensure thread-safety.
331 bool VerifyGC(const char* msg,
332 MarkExpectation mark_expectation = kForbidMarked);
333
334 // Helper functions for garbage collection.
335 void CollectNewSpaceGarbage(Thread* thread, GCType type, GCReason reason);
336 void CollectOldSpaceGarbage(Thread* thread, GCType type, GCReason reason);
337
338 // GC stats collection.
339 void RecordBeforeGC(GCType type, GCReason reason);
340 void RecordAfterGC(GCType type);
341 void PrintStats();
342 void PrintStatsToTimeline(TimelineEventScope* event, GCReason reason);
343
344 void AddRegionsToObjectSet(ObjectSet* set) const;
345
346 // Trigger major GC if 'gc_on_nth_allocation_' is set.
347 void CollectForDebugging(Thread* thread);
348
349 IsolateGroup* isolate_group_;
350 bool is_vm_isolate_;
351
352 // The different spaces used for allocation.
353 Scavenger new_space_;
354 PageSpace old_space_;
355
356 WeakTable* new_weak_tables_[kNumWeakSelectors];
357 WeakTable* old_weak_tables_[kNumWeakSelectors];
358
359 // GC stats collection.
360 GCStats stats_;
361
363
364 // This heap is in read-only mode: No allocation is allowed.
365 bool read_only_;
366
367 bool assume_scavenge_will_fail_;
368
369 static constexpr intptr_t kNoForcedGarbageCollection = -1;
370
371 // Whether the next heap allocation (new or old) should trigger
372 // CollectAllGarbage. Used within unit tests for testing GC on certain
373 // sensitive codepaths.
374 intptr_t gc_on_nth_allocation_;
375
376 friend class Become; // VisitObjectPointers
377 friend class GCCompactor; // VisitObjectPointers
378 friend class Precompiler; // VisitObjects
379 friend class ServiceEvent;
380 friend class Scavenger; // VerifyGC
381 friend class PageSpace; // VerifyGC
382 friend class ProgramReloadContext; // VisitObjects
383 friend class ClassFinalizer; // VisitObjects
384 friend class HeapIterationScope; // VisitObjects
385 friend class GCMarker; // VisitObjects
386 friend class ProgramVisitor; // VisitObjectsImagePages
387 friend class Serializer; // VisitObjectsImagePages
388 friend class HeapTestHelper;
389 friend class GCTestHelper;
390
392};
393
395 public:
396 explicit HeapIterationScope(Thread* thread, bool writable = false);
398
399 void IterateObjects(ObjectVisitor* visitor) const;
400 void IterateObjectsNoImagePages(ObjectVisitor* visitor) const;
401 void IterateOldObjects(ObjectVisitor* visitor) const;
402 void IterateOldObjectsNoImagePages(ObjectVisitor* visitor) const;
403
404 void IterateVMIsolateObjects(ObjectVisitor* visitor) const;
405
407 ValidationPolicy validate_frames);
409 ValidationPolicy validate_frames);
410
411 private:
412 Heap* heap_;
413 PageSpace* old_space_;
414 bool writable_;
415
417};
418
420 public:
421 explicit ForceGrowthScope(Thread* thread);
423
424 private:
426};
427
428// Note: During this scope all pages are writable and the code pages are
429// non-executable.
435
437 public:
440
441 private:
442 IsolateGroup* isolate_group_;
443};
444
445#if defined(TESTING)
446class GCTestHelper : public AllStatic {
447 public:
448 // Collect new gen without triggering any side effects. The normal call to
449 // CollectGarbage(Heap::kNew) could potentially trigger an old gen collection
450 // if there is enough promotion, and this can perturb some tests.
451 static void CollectNewSpace() {
452 Thread* thread = Thread::Current();
453 ASSERT(thread->execution_state() == Thread::kThreadInVM);
454 thread->heap()->CollectGarbage(thread, GCType::kScavenge,
455 GCReason::kDebugging);
456 }
457
458 // Fully collect old gen and wait for the sweeper to finish. The normal call
459 // to CollectGarbage(Heap::kOld) may leave so-called "floating garbage",
460 // objects that were seen by the incremental barrier but later made
461 // unreachable, and this can perturb some tests.
462 static void CollectOldSpace() {
463 Thread* thread = Thread::Current();
464 ASSERT(thread->execution_state() == Thread::kThreadInVM);
465 if (thread->is_marking()) {
466 thread->heap()->CollectGarbage(thread, GCType::kMarkSweep,
467 GCReason::kDebugging);
468 }
469 thread->heap()->CollectGarbage(thread, GCType::kMarkSweep,
470 GCReason::kDebugging);
471 WaitForGCTasks();
472 }
473
474 static void CollectAllGarbage(bool compact = false) {
475 Thread* thread = Thread::Current();
476 ASSERT(thread->execution_state() == Thread::kThreadInVM);
477 thread->heap()->CollectAllGarbage(GCReason::kDebugging, compact);
478 }
479
480 static void WaitForGCTasks() {
481 Thread* thread = Thread::Current();
482 ASSERT(thread->execution_state() == Thread::kThreadInVM);
483 thread->heap()->WaitForMarkerTasks(thread);
484 thread->heap()->WaitForSweeperTasks(thread);
485 }
486};
487#endif // TESTING
488
489} // namespace dart
490
491#endif // RUNTIME_VM_HEAP_HEAP_H_
static uint32_t hash(const SkShaderBase::GradientInfo &v)
#define UNREACHABLE()
Definition assert.h:248
void IterateObjectPointers(ObjectPointerVisitor *visitor, ValidationPolicy validate_frames)
Definition heap.cc:357
void IterateStackPointers(ObjectPointerVisitor *visitor, ValidationPolicy validate_frames)
Definition heap.cc:363
void IterateVMIsolateObjects(ObjectVisitor *visitor) const
Definition heap.cc:353
void IterateObjects(ObjectVisitor *visitor) const
Definition heap.cc:334
void IterateOldObjectsNoImagePages(ObjectVisitor *visitor) const
Definition heap.cc:348
void IterateObjectsNoImagePages(ObjectVisitor *visitor) const
Definition heap.cc:338
void IterateOldObjects(ObjectVisitor *visitor) const
Definition heap.cc:344
void SetupImagePage(void *pointer, uword size, bool is_executable)
Definition heap.h:276
void SetLoadingUnit(ObjectPtr raw_obj, intptr_t unit_id)
Definition heap.h:203
static const char * GCReasonToString(GCReason reason)
Definition heap.cc:849
bool AllocatedExternal(intptr_t size, Space space)
Definition heap.cc:179
uword Allocate(Thread *thread, intptr_t size, Space space)
Definition heap.h:65
void NotifyDestroyed()
Definition heap.cc:435
WeakSelector
Definition heap.h:43
@ kIdentityHashes
Definition heap.h:46
@ kCanonicalHashes
Definition heap.h:48
@ kHeapSamplingData
Definition heap.h:52
@ kObjectIds
Definition heap.h:49
@ kNumWeakSelectors
Definition heap.h:54
@ kLoadingUnits
Definition heap.h:50
@ kPeers
Definition heap.h:44
@ kCode
Definition heap.h:40
@ kNew
Definition heap.h:38
@ kOld
Definition heap.h:39
void CheckExternalGC(Thread *thread)
Definition heap.cc:214
intptr_t GetWeakEntry(ObjectPtr raw_obj, WeakSelector sel) const
Definition heap.cc:893
void PrintMemoryUsageJSON(JSONStream *stream) const
Definition heap.cc:981
IsolateGroup * isolate_group() const
Definition heap.h:273
Scavenger * new_space()
Definition heap.h:62
void CheckConcurrentMarking(Thread *thread, GCReason reason, intptr_t size)
Definition heap.cc:583
intptr_t GetLoadingUnit(ObjectPtr raw_obj) const
Definition heap.h:207
bool OldContains(uword addr) const
Definition heap.cc:246
Dart_PerformanceMode SetMode(Dart_PerformanceMode mode)
Definition heap.cc:441
void PrintSizes() const
Definition heap.cc:782
void SetHeapSamplingData(ObjectPtr obj, void *data)
Definition heap.h:213
intptr_t SetHashIfNotSet(ObjectPtr raw_obj, intptr_t hash)
Definition heap.h:175
void * GetPeer(ObjectPtr raw_obj) const
Definition heap.h:167
static constexpr uint8_t kZapByte
Definition heap.h:58
void ReportSurvivingAllocations(Dart_HeapSamplingReportCallback callback, void *context)
Definition heap.h:245
void WriteProtect(bool read_only)
Definition heap.cc:687
PageSpace * old_space()
Definition heap.h:63
bool NewContains(uword addr) const
Definition heap.cc:242
void WaitForMarkerTasks(Thread *thread)
Definition heap.cc:645
Dart_PerformanceMode mode() const
Definition heap.h:103
void WaitForSweeperTasksAtSafepoint(Thread *thread)
Definition heap.cc:669
int64_t PeerCount() const
Definition heap.cc:879
bool DataContains(uword addr) const
Definition heap.cc:254
void SetWeakEntry(ObjectPtr raw_obj, WeakSelector sel, intptr_t val)
Definition heap.cc:901
void ResetObjectIdTable()
Definition heap.cc:888
void PrintToJSONObject(Space space, JSONObject *object) const
Definition heap.cc:973
intptr_t GetHash(ObjectPtr raw_obj) const
Definition heap.h:178
void CheckCatchUp(Thread *thread)
Definition heap.cc:574
bool Verify(const char *msg, MarkExpectation mark_expectation=kForbidMarked)
Definition heap.cc:760
void SetPeer(ObjectPtr raw_obj, void *peer)
Definition heap.h:164
void CollectOnNthAllocation(intptr_t num_allocations)
Definition heap.cc:709
bool is_vm_isolate() const
Definition heap.h:274
void WriteProtectCode(bool read_only)
Definition heap.h:127
intptr_t SetWeakEntryIfNonExistent(ObjectPtr raw_obj, WeakSelector sel, intptr_t val)
Definition heap.cc:909
intptr_t TotalExternalInWords() const
Definition heap.cc:813
void PrintHeapMapToJSONStream(IsolateGroup *isolate_group, JSONStream *stream)
Definition heap.h:265
void SetCanonicalHash(ObjectPtr raw_obj, intptr_t hash)
Definition heap.h:183
intptr_t ReachabilityBarrier()
Definition heap.h:271
void CollectAllGarbage(GCReason reason=GCReason::kFull, bool compact=false)
Definition heap.cc:562
int64_t GCTimeInMicros(Space space) const
Definition heap.cc:817
void NotifyIdle(int64_t deadline)
Definition heap.cc:374
void WaitForSweeperTasks(Thread *thread)
Definition heap.cc:660
void CheckFinalizeMarking(Thread *thread)
Definition heap.cc:616
intptr_t Collections(Space space) const
Definition heap.cc:824
WeakTable * GetWeakTable(Space space, WeakSelector selector) const
Definition heap.h:225
bool Contains(uword addr) const
Definition heap.cc:238
void ForwardWeakEntries(ObjectPtr before_object, ObjectPtr after_object)
Definition heap.cc:919
void ResetCanonicalHashTable()
Definition heap.cc:883
intptr_t ExternalInWords(Space space) const
Definition heap.cc:800
void StartConcurrentMarking(Thread *thread, GCReason reason)
Definition heap.cc:630
bool CodeContains(uword addr) const
Definition heap.cc:250
void PromotedExternal(intptr_t size)
Definition heap.cc:209
friend class GCTestHelper
Definition heap.h:389
void SetWeakTable(Space space, WeakSelector selector, WeakTable *value)
Definition heap.h:232
intptr_t TotalCapacityInWords() const
Definition heap.cc:809
intptr_t UsedInWords(Space space) const
Definition heap.cc:791
intptr_t TotalUsedInWords() const
Definition heap.cc:805
intptr_t GetCanonicalHash(ObjectPtr raw_obj) const
Definition heap.h:186
void CollectGarbage(Thread *thread, GCType type, GCReason reason)
Definition heap.cc:547
void ForwardWeakTables(ObjectPointerVisitor *visitor)
Definition heap.cc:954
intptr_t GetObjectId(ObjectPtr raw_obj) const
Definition heap.h:197
void UpdateGlobalMaxUsed()
Definition heap.cc:678
Space SpaceForExternal(intptr_t size) const
Definition heap.cc:1130
void SetObjectId(ObjectPtr raw_obj, intptr_t object_id)
Definition heap.h:193
intptr_t CapacityInWords(Space space) const
Definition heap.cc:795
~Heap()
Definition heap.cc:62
ObjectSet * CreateAllocatedObjectSet(Zone *zone, MarkExpectation mark_expectation)
Definition heap.cc:732
void FreedExternal(intptr_t size, Space space)
Definition heap.cc:200
static const char * GCTypeToString(GCType type)
Definition heap.cc:831
void WriteProtectCode(bool read_only)
Definition pages.cc:805
void PrintHeapMapToJSONStream(IsolateGroup *isolate_group, JSONStream *stream) const
Definition pages.cc:765
intptr_t collections() const
Definition pages.h:258
void SetupImagePage(void *pointer, uword size, bool is_executable)
Definition pages.cc:1323
ThreadState * thread() const
Definition allocation.h:33
static Thread * Current()
Definition thread.h:361
Heap * heap() const
Definition thread.cc:876
ExecutionState execution_state() const
Definition thread.h:1027
void ReportSurvivingAllocations(Dart_HeapSamplingReportCallback callback, void *context)
Dart_PerformanceMode
Definition dart_api.h:1368
@ Dart_PerformanceMode_Default
Definition dart_api.h:1372
void(* Dart_HeapSamplingReportCallback)(void *context, void *data)
Definition dart_api.h:1280
#define ASSERT(E)
FlKeyEvent uint64_t FlKeyResponderAsyncCallback callback
FlKeyEvent * event
uint8_t value
void Init()
GCType
Definition spaces.h:32
uintptr_t uword
Definition globals.h:501
bool IsAllocatableInNewSpace(intptr_t size)
Definition spaces.h:57
GCReason
Definition spaces.h:40
MarkExpectation
Definition verifier.h:21
@ kForbidMarked
Definition verifier.h:21
ValidationPolicy
Definition thread.h:271
static int8_t data[kExtLength]
#define DISALLOW_COPY_AND_ASSIGN(TypeName)
Definition globals.h:581