Flutter Engine
The Flutter Engine
raw_object.h
Go to the documentation of this file.
1// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#ifndef RUNTIME_VM_RAW_OBJECT_H_
6#define RUNTIME_VM_RAW_OBJECT_H_
7
8#if defined(SHOULD_NOT_INCLUDE_RUNTIME)
9#error "Should not include runtime"
10#endif
11
12#include "platform/assert.h"
14#include "vm/class_id.h"
17#include "vm/exceptions.h"
18#include "vm/globals.h"
19#include "vm/pointer_tagging.h"
20#include "vm/snapshot.h"
21#include "vm/tagged_pointer.h"
22#include "vm/thread.h"
23#include "vm/token.h"
24#include "vm/token_position.h"
25#include "vm/visitor.h"
26
27// Currently we have two different axes for offset generation:
28//
29// * Target architecture
30// * DART_PRECOMPILED_RUNTIME (i.e, AOT vs. JIT)
31//
32// That is, fields in UntaggedObject and its subclasses should only be included
33// or excluded conditionally based on these factors. Otherwise, the generated
34// offsets can be wrong (which should be caught by offset checking in dart.cc).
35//
36// TODO(dartbug.com/43646): Add DART_PRECOMPILER as another axis.
37
38namespace dart {
39
40// Forward declarations.
41class Isolate;
42class IsolateGroup;
43#define DEFINE_FORWARD_DECLARATION(clazz) class Untagged##clazz;
45#undef DEFINE_FORWARD_DECLARATION
46class CodeStatistics;
47class StackFrame;
48
49#define DEFINE_CONTAINS_COMPRESSED(type) \
50 static constexpr bool kContainsCompressedPointers = \
51 is_compressed_ptr<type>::value;
52
53#define CHECK_CONTAIN_COMPRESSED(type) \
54 static_assert( \
55 kContainsCompressedPointers || is_uncompressed_ptr<type>::value, \
56 "From declaration uses ObjectPtr"); \
57 static_assert( \
58 !kContainsCompressedPointers || is_compressed_ptr<type>::value, \
59 "From declaration uses CompressedObjectPtr");
60
61#define VISIT_FROM(first) \
62 DEFINE_CONTAINS_COMPRESSED(decltype(first##_)) \
63 static constexpr bool kContainsPointerFields = true; \
64 base_ptr_type<decltype(first##_)>::type* from() { \
65 return reinterpret_cast<base_ptr_type<decltype(first##_)>::type*>( \
66 &first##_); \
67 }
68
69#define VISIT_FROM_PAYLOAD_START(elem_type) \
70 static_assert(is_uncompressed_ptr<elem_type>::value || \
71 is_compressed_ptr<elem_type>::value, \
72 "Payload elements must be object pointers"); \
73 DEFINE_CONTAINS_COMPRESSED(elem_type) \
74 static constexpr bool kContainsPointerFields = true; \
75 base_ptr_type<elem_type>::type* from() { \
76 const uword payload_start = reinterpret_cast<uword>(this) + sizeof(*this); \
77 ASSERT(Utils::IsAligned(payload_start, sizeof(elem_type))); \
78 return reinterpret_cast<base_ptr_type<elem_type>::type*>(payload_start); \
79 }
80
81#define VISIT_TO(last) \
82 CHECK_CONTAIN_COMPRESSED(decltype(last##_)); \
83 static_assert(kContainsPointerFields, \
84 "Must have a corresponding VISIT_FROM"); \
85 base_ptr_type<decltype(last##_)>::type* to(intptr_t length = 0) { \
86 return reinterpret_cast<base_ptr_type<decltype(last##_)>::type*>( \
87 &last##_); \
88 }
89
90#define VISIT_TO_PAYLOAD_END(elem_type) \
91 static_assert(is_uncompressed_ptr<elem_type>::value || \
92 is_compressed_ptr<elem_type>::value, \
93 "Payload elements must be object pointers"); \
94 static_assert(kContainsPointerFields, \
95 "Must have a corresponding VISIT_FROM"); \
96 CHECK_CONTAIN_COMPRESSED(elem_type); \
97 base_ptr_type<elem_type>::type* to(intptr_t length) { \
98 const uword payload_start = reinterpret_cast<uword>(this) + sizeof(*this); \
99 ASSERT(Utils::IsAligned(payload_start, sizeof(elem_type))); \
100 const uword payload_last = \
101 payload_start + sizeof(elem_type) * (length - 1); \
102 return reinterpret_cast<base_ptr_type<elem_type>::type*>(payload_last); \
103 }
104
105#define VISIT_NOTHING() int NothingToVisit();
106
107#if defined(DART_COMPRESSED_POINTERS)
108#define ASSERT_UNCOMPRESSED(Type) \
109 static_assert(!Untagged##Type::kContainsCompressedPointers, \
110 "Should contain compressed pointers");
111
112#define ASSERT_COMPRESSED(Type) \
113 static_assert(Untagged##Type::kContainsCompressedPointers, \
114 "Should not contain compressed pointers");
115#else
116// Do no checks if there are no compressed pointers.
117#define ASSERT_UNCOMPRESSED(Type)
118#define ASSERT_COMPRESSED(Type)
119#endif
120
121#define ASSERT_NOTHING_TO_VISIT(Type) \
122 ASSERT(SIZE_OF_RETURNED_VALUE(Untagged##Type, NothingToVisit) == sizeof(int))
123
125#define V(name) k##name##Element,
127#undef V
128};
129
130#define VISITOR_SUPPORT(object) \
131 static intptr_t Visit##object##Pointers(object##Ptr raw_obj, \
132 ObjectPointerVisitor* visitor);
133
134#define RAW_OBJECT_IMPLEMENTATION(object) \
135 private: /* NOLINT */ \
136 VISITOR_SUPPORT(object) \
137 friend class object; \
138 friend class UntaggedObject; \
139 friend class OffsetsTable; \
140 DISALLOW_ALLOCATION(); \
141 DISALLOW_IMPLICIT_CONSTRUCTORS(Untagged##object)
142
143#define RAW_HEAP_OBJECT_IMPLEMENTATION(object) \
144 private: \
145 RAW_OBJECT_IMPLEMENTATION(object); \
146 friend class object##SerializationCluster; \
147 friend class object##DeserializationCluster; \
148 friend class object##MessageSerializationCluster; \
149 friend class object##MessageDeserializationCluster; \
150 friend class Serializer; \
151 friend class Deserializer; \
152 template <typename Base> \
153 friend class ObjectCopy; \
154 friend class Pass2Visitor;
155
156// UntaggedObject is the base class of all raw objects; even though it carries
157// the tags_ field not all raw objects are allocated in the heap and thus cannot
158// be dereferenced (e.g. UntaggedSmi).
160 public:
161 // The tags field which is a part of the object header uses the following
162 // bit fields for storing tags.
163 enum TagBits {
166 kNotMarkedBit = 2, // Incremental barrier target.
167 kNewOrEvacuationCandidateBit = 3, // Generational barrier target.
168 kAlwaysSetBit = 4, // Incremental barrier source.
169 kOldAndNotRememberedBit = 5, // Generational barrier source.
172
179 };
180
181 static constexpr intptr_t kGenerationalBarrierMask =
183 static constexpr intptr_t kIncrementalBarrierMask = 1 << kNotMarkedBit;
184 static constexpr intptr_t kBarrierOverlapShift = 2;
188
189 // The bit in the Smi tag position must be something that can be set to 0
190 // for a dead filler object of either generation.
191 // See Object::MakeUnusedSpaceTraversable.
193
194 // Encodes the object size in the tag in units of object alignment.
195 class SizeTag {
196 public:
197 typedef intptr_t Type;
198
199 static constexpr intptr_t kMaxSizeTagInUnitsOfAlignment =
200 ((1 << UntaggedObject::kSizeTagSize) - 1);
201 static constexpr intptr_t kMaxSizeTag =
203
204 static constexpr uword encode(intptr_t size) {
205 return SizeBits::encode(SizeToTagValue(size));
206 }
207
208 static constexpr uword decode(uword tag) {
209 return TagValueToSize(SizeBits::decode(tag));
210 }
211
212 static constexpr uword update(intptr_t size, uword tag) {
213 return SizeBits::update(SizeToTagValue(size), tag);
214 }
215
216 static constexpr bool SizeFits(intptr_t size) {
218 return (size <= kMaxSizeTag);
219 }
220
221 private:
222 // The actual unscaled bit field used within the tag field.
223 class SizeBits
224 : public BitField<uword, intptr_t, kSizeTagPos, kSizeTagSize> {};
225
226 static constexpr intptr_t SizeToTagValue(intptr_t size) {
228 return !SizeFits(size) ? 0 : (size >> kObjectAlignmentLog2);
229 }
230 static constexpr intptr_t TagValueToSize(intptr_t value) {
231 return value << kObjectAlignmentLog2;
232 }
233 };
234
235 class ClassIdTag : public BitField<uword,
236 ClassIdTagType,
237 kClassIdTagPos,
238 kClassIdTagSize> {};
241
242#if defined(HASH_IN_OBJECT_HEADER)
243 class HashTag : public BitField<uword, uint32_t, kHashTagPos, kHashTagSize> {
244 };
245#endif
246
248 : public BitField<uword, bool, kCardRememberedBit, 1> {};
249
250 class NotMarkedBit : public BitField<uword, bool, kNotMarkedBit, 1> {};
251
253 : public BitField<uword, bool, kNewOrEvacuationCandidateBit, 1> {};
254
255 class CanonicalBit : public BitField<uword, bool, kCanonicalBit, 1> {};
256
257 class AlwaysSetBit : public BitField<uword, bool, kAlwaysSetBit, 1> {};
258
260 : public BitField<uword, bool, kOldAndNotRememberedBit, 1> {};
261
262 // Will be set to 1 for the following instances:
263 //
264 // 1. Deeply immutable instances.
265 // `Class::is_deeply_immutable`.
266 // a. Statically guaranteed deeply immutable instances.
267 // `@pragma('vm:deeply-immutable')`.
268 // b. VM recognized deeply immutable instances.
269 // `IsDeeplyImmutableCid(intptr_t predefined_cid)`.
270 // 2. Shallowly unmodifiable instances.
271 // `IsShallowlyImmutableCid(intptr_t predefined_cid)`
272 // a. Unmodifiable typed data view (backing store may be mutable).
273 // b. Closures (the context may be modifiable).
274 //
275 // The bit is used in `CanShareObject` in object_graph_copy, where special
276 // care is taken to look at the shallow immutable instances. Shallow immutable
277 // instances always need special care in the VM because the VM needs to know
278 // what their fields are.
279 //
280 // The bit is also used to make typed data stores efficient. 2.a.
281 //
282 // See also Class::kIsDeeplyImmutableBit.
283 class ImmutableBit : public BitField<uword, bool, kImmutableBit, 1> {};
284
285 class ReservedBit : public BitField<uword, intptr_t, kReservedBit, 1> {};
286
287 // Assumes this is a heap object.
288 bool IsNewObject() const {
289 uword addr = reinterpret_cast<uword>(this);
291 }
292 // Assumes this is a heap object.
293 bool IsOldObject() const {
294 uword addr = reinterpret_cast<uword>(this);
296 }
297
298 uword tags() const { return tags_; }
299 uword tags_ignore_race() const { return tags_.load_ignore_race(); }
300
301 // Support for GC marking bit. Marked objects are either grey (not yet
302 // visited) or black (already visited).
303 static bool IsMarked(uword tags) { return !NotMarkedBit::decode(tags); }
304 bool IsMarked() const { return !tags_.Read<NotMarkedBit>(); }
305 void SetMarkBit() {
306 ASSERT(!IsMarked());
307 tags_.UpdateBool<NotMarkedBit>(false);
308 }
310 ASSERT(!IsMarked());
312 }
314 ASSERT(!IsMarked());
315 tags_.UpdateBool<NotMarkedBit, std::memory_order_release>(false);
316 }
318 ASSERT(IsMarked());
319 tags_.UpdateBool<NotMarkedBit>(true);
320 }
322 ASSERT(IsMarked());
324 }
325 // Returns false if the bit was already set.
327 bool TryAcquireMarkBit() { return tags_.TryClear<NotMarkedBit>(); }
328
331 }
333 return tags_.Read<NewOrEvacuationCandidateBit>();
334 }
338 }
342 }
346 }
347
348 // Canonical objects have the property that two canonical objects are
349 // logically equal iff they are the same object (pointer equal).
350 bool IsCanonical() const { return tags_.Read<CanonicalBit>(); }
351 void SetCanonical() { tags_.UpdateBool<CanonicalBit>(true); }
352 void ClearCanonical() { tags_.UpdateBool<CanonicalBit>(false); }
353
354 bool IsImmutable() const { return tags_.Read<ImmutableBit>(); }
355 void SetImmutable() { tags_.UpdateBool<ImmutableBit>(true); }
356 void ClearImmutable() { tags_.UpdateBool<ImmutableBit>(false); }
357
358 bool InVMIsolateHeap() const;
359
360 // Support for GC remembered bit.
361 bool IsRemembered() const {
363 return !tags_.Read<OldAndNotRememberedBit>();
364 }
367 return tags_.TryClear<OldAndNotRememberedBit>();
368 }
372 }
376 }
377
378 DART_FORCE_INLINE
381 thread->StoreBufferAddObject(ObjectPtr(this));
382 }
383 }
384
385 bool IsCardRemembered() const { return tags_.Read<CardRememberedBit>(); }
390 }
391
392 intptr_t GetClassId() const { return tags_.Read<ClassIdTag>(); }
393
394#if defined(HASH_IN_OBJECT_HEADER)
395 uint32_t GetHeaderHash() const { return tags_.Read<HashTag>(); }
396 uint32_t SetHeaderHashIfNotSet(uint32_t h) {
397 return tags_.UpdateConditional<HashTag>(h, /*conditional_old_value=*/0);
398 }
399#endif
400
401 intptr_t HeapSize() const {
402 uword tags = tags_;
403 intptr_t result = SizeTag::decode(tags);
404 if (result != 0) {
405#if defined(DEBUG)
406 // TODO(22501) Array::MakeFixedLength has a race with this code: we might
407 // have loaded tags field and then MakeFixedLength could have updated it
408 // leading to inconsistency between HeapSizeFromClass() and
409 // SizeTag::decode(tags). We are working around it by reloading tags_ and
410 // recomputing size from tags.
411 const intptr_t size_from_class = HeapSizeFromClass(tags);
412 if ((result > size_from_class) && (GetClassId() == kArrayCid) &&
413 (tags_ != tags)) {
414 result = SizeTag::decode(tags_);
415 }
416 ASSERT(result == size_from_class);
417#endif
418 return result;
419 }
420 result = HeapSizeFromClass(tags);
422 return result;
423 }
424
425 // This variant must not deference this->tags_.
426 intptr_t HeapSize(uword tags) const {
427 intptr_t result = SizeTag::decode(tags);
428 if (result != 0) {
429 return result;
430 }
431 result = HeapSizeFromClass(tags);
433 return result;
434 }
435
436 bool Contains(uword addr) const {
437 intptr_t this_size = HeapSize();
438 uword this_addr = UntaggedObject::ToAddr(this);
439 return (addr >= this_addr) && (addr < (this_addr + this_size));
440 }
441
442 void Validate(IsolateGroup* isolate_group) const;
443
444 // This function may access the class-ID in the header, but it cannot access
445 // the actual class object, because the sliding compactor uses this function
446 // while the class objects are being moved.
448 // Fall back to virtual variant for predefined classes
449 intptr_t class_id = GetClassId();
450 if (class_id < kNumPredefinedCids) {
451 return VisitPointersPredefined(visitor, class_id);
452 }
453
454 // Calculate the first and last raw object pointer fields.
455 intptr_t instance_size = HeapSize();
456 uword obj_addr = ToAddr(this);
457 uword from = obj_addr + sizeof(UntaggedObject);
458 uword to = obj_addr + instance_size - kCompressedWordSize;
459 const auto first = reinterpret_cast<CompressedObjectPtr*>(from);
460 const auto last = reinterpret_cast<CompressedObjectPtr*>(to);
461
462 const auto unboxed_fields_bitmap =
463 visitor->class_table()->GetUnboxedFieldsMapAt(class_id);
464
465 if (!unboxed_fields_bitmap.IsEmpty()) {
466 intptr_t bit = sizeof(UntaggedObject) / kCompressedWordSize;
467 for (CompressedObjectPtr* current = first; current <= last; current++) {
468 if (!unboxed_fields_bitmap.Get(bit++)) {
469 visitor->VisitCompressedPointers(heap_base(), current, current);
470 }
471 }
472 } else {
473 visitor->VisitCompressedPointers(heap_base(), first, last);
474 }
475
476 return instance_size;
477 }
478
479 template <class V>
480 DART_FORCE_INLINE intptr_t VisitPointersNonvirtual(V* visitor) {
481 // Fall back to virtual variant for predefined classes
482 intptr_t class_id = GetClassId();
483 if (class_id < kNumPredefinedCids) {
484 return VisitPointersPredefined(visitor, class_id);
485 }
486
487 // Calculate the first and last raw object pointer fields.
488 intptr_t instance_size = HeapSize();
489 uword obj_addr = ToAddr(this);
490 uword from = obj_addr + sizeof(UntaggedObject);
491 uword to = obj_addr + instance_size - kCompressedWordSize;
492 const auto first = reinterpret_cast<CompressedObjectPtr*>(from);
493 const auto last = reinterpret_cast<CompressedObjectPtr*>(to);
494
495 const auto unboxed_fields_bitmap =
496 visitor->class_table()->GetUnboxedFieldsMapAt(class_id);
497
498 if (!unboxed_fields_bitmap.IsEmpty()) {
499 intptr_t bit = sizeof(UntaggedObject) / kCompressedWordSize;
500 for (CompressedObjectPtr* current = first; current <= last; current++) {
501 if (!unboxed_fields_bitmap.Get(bit++)) {
502 visitor->V::VisitCompressedPointers(heap_base(), current, current);
503 }
504 }
505 } else {
506 visitor->V::VisitCompressedPointers(heap_base(), first, last);
507 }
508
509 return instance_size;
510 }
511
512 // This variant ensures that we do not visit the extra slot created from
513 // rounding up instance sizes up to the allocation unit.
515
517 // We expect the untagged address here.
519 return static_cast<ObjectPtr>(addr + kHeapObjectTag);
520 }
521
522 static uword ToAddr(const UntaggedObject* raw_obj) {
523 return reinterpret_cast<uword>(raw_obj);
524 }
525 static uword ToAddr(const ObjectPtr raw_obj) {
526 return static_cast<uword>(raw_obj) - kHeapObjectTag;
527 }
528
529 static bool IsCanonical(intptr_t value) {
531 }
532
533 private:
534 AtomicBitFieldContainer<uword> tags_; // Various object tags (bits).
535
536 intptr_t VisitPointersPredefined(ObjectPointerVisitor* visitor,
537 intptr_t class_id);
538
539 intptr_t HeapSizeFromClass(uword tags) const;
540
541 void SetClassId(intptr_t new_cid) { tags_.Update<ClassIdTag>(new_cid); }
542 void SetClassIdUnsynchronized(intptr_t new_cid) {
543 tags_.UpdateUnsynchronized<ClassIdTag>(new_cid);
544 }
545
546 protected:
547 // Automatically inherited by subclasses unless overridden.
548 static constexpr bool kContainsCompressedPointers = false;
549 // Automatically inherited by subclasses unless overridden.
550 static constexpr bool kContainsPointerFields = false;
551
552 // The first offset in an allocated object of the given type that contains a
553 // (possibly compressed) object pointer. Used to initialize object pointer
554 // fields to Object::null() instead of 0.
555 //
556 // Always returns an offset after the object header tags.
557 template <typename T>
558 DART_FORCE_INLINE static uword from_offset();
559
560 // The last offset in an allocated object of the given untagged type that
561 // contains a (possibly compressed) object pointer. Used to initialize object
562 // pointer fields to Object::null() instead of 0.
563 //
564 // Takes an optional argument that is the number of elements in the payload,
565 // which is ignored if the object never contains a payload.
566 //
567 // If there are no pointer fields in the object, then
568 // to_offset<T>() < from_offset<T>().
569 template <typename T>
570 DART_FORCE_INLINE static uword to_offset(intptr_t length = 0);
571
572 // All writes to heap objects should ultimately pass through one of the
573 // methods below or their counterparts in Object, to ensure that the
574 // write barrier is correctly applied.
575 template <typename type, std::memory_order order = std::memory_order_relaxed>
576 type LoadPointer(type const* addr) const {
577 return reinterpret_cast<std::atomic<type>*>(const_cast<type*>(addr))
578 ->load(order);
579 }
580 template <typename type,
581 typename compressed_type,
582 std::memory_order order = std::memory_order_relaxed>
583 type LoadCompressedPointer(compressed_type const* addr) const {
584 compressed_type v = reinterpret_cast<std::atomic<compressed_type>*>(
585 const_cast<compressed_type*>(addr))
586 ->load(order);
587 return static_cast<type>(v.Decompress(heap_base()));
588 }
589
590 uword heap_base() const {
591 return reinterpret_cast<uword>(this) & kHeapBaseMask;
592 }
593
594 template <typename type, std::memory_order order = std::memory_order_relaxed>
596 reinterpret_cast<std::atomic<type>*>(const_cast<type*>(addr))
597 ->store(value, order);
598 if (value.IsHeapObject()) {
599 CheckHeapPointerStore(value, Thread::Current());
600 }
601 }
602
603 template <typename type,
604 typename compressed_type,
605 std::memory_order order = std::memory_order_relaxed>
606 void StoreCompressedPointer(compressed_type const* addr, type value) {
607 reinterpret_cast<std::atomic<compressed_type>*>(
608 const_cast<compressed_type*>(addr))
609 ->store(static_cast<compressed_type>(value), order);
610 if (value.IsHeapObject()) {
611 CheckHeapPointerStore(value, Thread::Current());
612 }
613 }
614
615 template <typename type>
616 void StorePointer(type const* addr, type value, Thread* thread) {
617 *const_cast<type*>(addr) = value;
618 if (value.IsHeapObject()) {
619 CheckHeapPointerStore(value, thread);
620 }
621 }
622
623 template <typename type, typename compressed_type>
624 void StoreCompressedPointer(compressed_type const* addr,
625 type value,
626 Thread* thread) {
627 *const_cast<compressed_type*>(addr) = value;
628 if (value.IsHeapObject()) {
629 CheckHeapPointerStore(value, thread);
630 }
631 }
632
633 template <typename type>
635 StoreUnaligned(const_cast<type*>(addr), value);
636 if (value->IsHeapObject()) {
637 CheckHeapPointerStore(value, thread);
638 }
639 }
640
641 // Note: StoreArrayPointer won't work if value_type is a compressed pointer.
642 template <typename type,
643 std::memory_order order = std::memory_order_relaxed,
644 typename value_type = type>
645 void StoreArrayPointer(type const* addr, value_type value) {
646 reinterpret_cast<std::atomic<type>*>(const_cast<type*>(addr))
647 ->store(type(value), order);
648 if (value->IsHeapObject()) {
649 CheckArrayPointerStore(addr, value, Thread::Current());
650 }
651 }
652
653 template <typename type, typename value_type = type>
654 void StoreArrayPointer(type const* addr, value_type value, Thread* thread) {
655 *const_cast<type*>(addr) = value;
656 if (value->IsHeapObject()) {
657 CheckArrayPointerStore(addr, value, thread);
658 }
659 }
660
661 template <typename type, typename compressed_type, std::memory_order order>
662 void StoreCompressedArrayPointer(compressed_type const* addr, type value) {
663 reinterpret_cast<std::atomic<compressed_type>*>(
664 const_cast<compressed_type*>(addr))
665 ->store(static_cast<compressed_type>(value), order);
666 if (value->IsHeapObject()) {
667 CheckArrayPointerStore(addr, value, Thread::Current());
668 }
669 }
670
671 template <typename type, typename compressed_type, std::memory_order order>
672 void StoreCompressedArrayPointer(compressed_type const* addr,
673 type value,
674 Thread* thread) {
675 reinterpret_cast<std::atomic<compressed_type>*>(
676 const_cast<compressed_type*>(addr))
677 ->store(static_cast<compressed_type>(value), order);
678 if (value->IsHeapObject()) {
679 CheckArrayPointerStore(addr, value, thread);
680 }
681 }
682
683 template <typename type, typename compressed_type>
684 void StoreCompressedArrayPointer(compressed_type const* addr,
685 type value,
686 Thread* thread) {
687 *const_cast<compressed_type*>(addr) = value;
688 if (value->IsHeapObject()) {
689 CheckArrayPointerStore(addr, value, thread);
690 }
691 }
692
693 template <typename type,
694 typename compressed_type,
695 std::memory_order order = std::memory_order_relaxed>
696 type ExchangeCompressedPointer(compressed_type const* addr, type value) {
697 compressed_type previous_value =
698 reinterpret_cast<std::atomic<compressed_type>*>(
699 const_cast<compressed_type*>(addr))
700 ->exchange(static_cast<compressed_type>(value), order);
701 if (value.IsHeapObject()) {
702 CheckHeapPointerStore(value, Thread::Current());
703 }
704 return static_cast<type>(previous_value.Decompress(heap_base()));
705 }
706
707 template <std::memory_order order = std::memory_order_relaxed>
708 SmiPtr LoadSmi(SmiPtr const* addr) const {
709 return reinterpret_cast<std::atomic<SmiPtr>*>(const_cast<SmiPtr*>(addr))
710 ->load(order);
711 }
712 template <std::memory_order order = std::memory_order_relaxed>
713 SmiPtr LoadCompressedSmi(CompressedSmiPtr const* addr) const {
714 return static_cast<SmiPtr>(reinterpret_cast<std::atomic<CompressedSmiPtr>*>(
715 const_cast<CompressedSmiPtr*>(addr))
716 ->load(order)
717 .DecompressSmi());
718 }
719
720 // Use for storing into an explicitly Smi-typed field of an object
721 // (i.e., both the previous and new value are Smis).
722 template <typename type, std::memory_order order = std::memory_order_relaxed>
723 void StoreSmi(type const* addr, type value) {
724 // Can't use Contains, as array length is initialized through this method.
725 ASSERT(reinterpret_cast<uword>(addr) >= UntaggedObject::ToAddr(this));
726 reinterpret_cast<std::atomic<type>*>(const_cast<type*>(addr))
727 ->store(value, order);
728 }
729 template <std::memory_order order = std::memory_order_relaxed>
730 void StoreCompressedSmi(CompressedSmiPtr const* addr, SmiPtr value) {
731 // Can't use Contains, as array length is initialized through this method.
732 ASSERT(reinterpret_cast<uword>(addr) >= UntaggedObject::ToAddr(this));
733 reinterpret_cast<std::atomic<CompressedSmiPtr>*>(
734 const_cast<CompressedSmiPtr*>(addr))
735 ->store(static_cast<CompressedSmiPtr>(value), order);
736 }
737
738 private:
739 DART_FORCE_INLINE
740 void CheckHeapPointerStore(ObjectPtr value, Thread* thread) {
741 uword source_tags = this->tags_;
742 uword target_tags = value->untag()->tags_;
743 uword overlap = (source_tags >> kBarrierOverlapShift) & target_tags &
744 thread->write_barrier_mask();
745 if (overlap != 0) {
746 if ((overlap & kGenerationalBarrierMask) != 0) {
747 // Generational barrier: record when a store creates an
748 // old-and-not-remembered -> new reference.
749 EnsureInRememberedSet(thread);
750 }
751 if ((overlap & kIncrementalBarrierMask) != 0) {
752 // Incremental barrier: record when a store creates an
753 // any -> not-marked reference.
754 if (ClassIdTag::decode(target_tags) == kInstructionsCid) {
755 // Instruction pages may be non-writable. Defer marking.
757 return;
758 }
759 if (value->untag()->TryAcquireMarkBit()) {
761 }
762 }
763 }
764 }
765
766 template <typename type, typename value_type>
767 DART_FORCE_INLINE void CheckArrayPointerStore(type const* addr,
768 value_type value,
769 Thread* thread) {
770 uword source_tags = this->tags_;
771 uword target_tags = value->untag()->tags_;
772 uword overlap = (source_tags >> kBarrierOverlapShift) & target_tags &
773 thread->write_barrier_mask();
774 if (overlap != 0) {
775 if ((overlap & kGenerationalBarrierMask) != 0) {
776 // Generational barrier: record when a store creates an
777 // old-and-not-remembered -> new reference.
778 if (this->IsCardRemembered()) {
779 RememberCard(addr);
780 } else if (this->TryAcquireRememberedBit()) {
781 thread->StoreBufferAddObject(static_cast<ObjectPtr>(this));
782 }
783 }
784 if ((overlap & kIncrementalBarrierMask) != 0) {
785 // Incremental barrier: record when a store creates an
786 // old -> old-and-not-marked reference.
787 if (ClassIdTag::decode(target_tags) == kInstructionsCid) {
788 // Instruction pages may be non-writable. Defer marking.
789 thread->DeferredMarkingStackAddObject(value);
790 return;
791 }
792 if (value->untag()->TryAcquireMarkBit()) {
793 thread->MarkingStackAddObject(value);
794 }
795 }
796 }
797 }
798
799 friend class StoreBufferUpdateVisitor; // RememberCard
800 void RememberCard(ObjectPtr const* slot);
801#if defined(DART_COMPRESSED_POINTERS)
802 void RememberCard(CompressedObjectPtr const* slot);
803#endif
804
805 friend class Array;
806 friend class ByteBuffer;
807 friend class CidRewriteVisitor;
808 friend class Closure;
809 friend class Code;
810 friend class Pointer;
811 friend class Double;
812 friend class DynamicLibrary;
813 friend class ForwardPointersVisitor; // StorePointer
814 friend class FreeListElement;
815 friend class Function;
816 friend class GCMarker;
817 friend class GCSweeper;
818 friend class ExternalTypedData;
819 friend class GrowableObjectArray; // StorePointer
820 template <bool>
821 friend class MarkingVisitorBase;
822 friend class Mint;
823 friend class Object;
824 friend class OneByteString; // StoreSmi
825 friend class UntaggedInstance;
826 friend class Scavenger;
827 template <bool>
829 friend class ImageReader; // tags_ check
830 friend class ImageWriter;
832 friend class BlobImageWriter;
833 friend class Deserializer;
834 friend class String;
835 friend class WeakProperty; // StorePointer
836 friend class Instance; // StorePointer
837 friend class StackFrame; // GetCodeObject assertion.
838 friend class CodeLookupTableBuilder; // profiler
839 friend class ObjectLocator;
840 friend class WriteBarrierUpdateVisitor; // CheckHeapPointerStore
841 friend class OffsetsTable;
842 friend class Object;
844 friend void SetNewSpaceTaggingWord(ObjectPtr, classid_t, uint32_t); // tags_
845 friend class ObjectCopyBase; // LoadPointer/StorePointer
846 friend void ReportImpossibleNullError(intptr_t cid,
847 StackFrame* caller_frame,
848 Thread* thread);
849
852};
853
854// Note that the below templates for from_offset and to_offset for objects
855// with pointer fields assume that the range from from() and to() cover all
856// pointer fields. If this is not the case (e.g., the next_seen_by_gc_ field
857// in WeakArray/WeakProperty/WeakReference), then specialize the definitions.
858
859template <typename T>
861 if constexpr (T::kContainsPointerFields) {
862 return reinterpret_cast<uword>(reinterpret_cast<T*>(kOffsetOfPtr)->from()) -
864 } else {
865 // Non-zero to ensure to_offset() < from_offset() in this case, as
866 // to_offset() is the offset to the last pointer field, not past it.
867 return sizeof(UntaggedObject);
868 }
869}
870
871template <typename T>
872DART_FORCE_INLINE uword UntaggedObject::to_offset(intptr_t length) {
873 if constexpr (T::kContainsPointerFields) {
874 return reinterpret_cast<uword>(
875 reinterpret_cast<T*>(kOffsetOfPtr)->to(length)) -
877 } else {
878 USE(length);
879 // Zero to ensure to_offset() < from_offset() in this case, as
880 // from_offset() is guaranteed to return an offset after the header tags.
881 return 0;
882 }
883}
884
885inline intptr_t ObjectPtr::GetClassId() const {
886 return untag()->GetClassId();
887}
888
889#define POINTER_FIELD(type, name) \
890 public: \
891 template <std::memory_order order = std::memory_order_relaxed> \
892 type name() const { \
893 return LoadPointer<type, order>(&name##_); \
894 } \
895 template <std::memory_order order = std::memory_order_relaxed> \
896 void set_##name(type value) { \
897 StorePointer<type, order>(&name##_, value); \
898 } \
899 \
900 protected: \
901 type name##_;
902
903#define COMPRESSED_POINTER_FIELD(type, name) \
904 public: \
905 template <std::memory_order order = std::memory_order_relaxed> \
906 type name() const { \
907 return LoadCompressedPointer<type, Compressed##type, order>(&name##_); \
908 } \
909 template <std::memory_order order = std::memory_order_relaxed> \
910 void set_##name(type value) { \
911 StoreCompressedPointer<type, Compressed##type, order>(&name##_, value); \
912 } \
913 \
914 protected: \
915 Compressed##type name##_;
916
917#define ARRAY_POINTER_FIELD(type, name) \
918 public: \
919 template <std::memory_order order = std::memory_order_relaxed> \
920 type name() const { \
921 return LoadPointer<type, order>(&name##_); \
922 } \
923 template <std::memory_order order = std::memory_order_relaxed> \
924 void set_##name(type value) { \
925 StoreArrayPointer<type, order>(&name##_, value); \
926 } \
927 \
928 protected: \
929 type name##_;
930
931#define COMPRESSED_ARRAY_POINTER_FIELD(type, name) \
932 public: \
933 template <std::memory_order order = std::memory_order_relaxed> \
934 type name() const { \
935 return LoadPointer<Compressed##type, order>(&name##_).Decompress( \
936 heap_base()); \
937 } \
938 template <std::memory_order order = std::memory_order_relaxed> \
939 void set_##name(type value) { \
940 StoreCompressedArrayPointer<type, Compressed##type, order>(&name##_, \
941 value); \
942 } \
943 \
944 protected: \
945 Compressed##type name##_;
946
947#define VARIABLE_POINTER_FIELDS(type, accessor_name, array_name) \
948 public: \
949 template <std::memory_order order = std::memory_order_relaxed> \
950 type accessor_name(intptr_t index) const { \
951 return LoadPointer<type, order>(&array_name()[index]); \
952 } \
953 template <std::memory_order order = std::memory_order_relaxed> \
954 void set_##accessor_name(intptr_t index, type value) { \
955 StoreArrayPointer<type, order>(&array_name()[index], value); \
956 } \
957 template <std::memory_order order = std::memory_order_relaxed> \
958 void set_##accessor_name(intptr_t index, type value, Thread* thread) { \
959 StoreArrayPointer<type, order>(&array_name()[index], value, thread); \
960 } \
961 \
962 protected: \
963 type* array_name() { \
964 OPEN_ARRAY_START(type, type); \
965 } \
966 type const* array_name() const { \
967 OPEN_ARRAY_START(type, type); \
968 } \
969 VISIT_TO_PAYLOAD_END(type)
970
971#define COMPRESSED_VARIABLE_POINTER_FIELDS(type, accessor_name, array_name) \
972 public: \
973 template <std::memory_order order = std::memory_order_relaxed> \
974 type accessor_name(intptr_t index) const { \
975 return LoadCompressedPointer<type, Compressed##type, order>( \
976 &array_name()[index]); \
977 } \
978 template <std::memory_order order = std::memory_order_relaxed> \
979 void set_##accessor_name(intptr_t index, type value) { \
980 StoreCompressedArrayPointer<type, Compressed##type, order>( \
981 &array_name()[index], value); \
982 } \
983 template <std::memory_order order = std::memory_order_relaxed> \
984 void set_##accessor_name(intptr_t index, type value, Thread* thread) { \
985 StoreCompressedArrayPointer<type, Compressed##type, order>( \
986 &array_name()[index], value, thread); \
987 } \
988 \
989 protected: \
990 Compressed##type* array_name() { \
991 OPEN_ARRAY_START(Compressed##type, Compressed##type); \
992 } \
993 Compressed##type const* array_name() const { \
994 OPEN_ARRAY_START(Compressed##type, Compressed##type); \
995 } \
996 VISIT_TO_PAYLOAD_END(Compressed##type)
997
998#define SMI_FIELD(type, name) \
999 public: \
1000 template <std::memory_order order = std::memory_order_relaxed> \
1001 type name() const { \
1002 type result = LoadSmi<order>(&name##_); \
1003 ASSERT(!result.IsHeapObject()); \
1004 return result; \
1005 } \
1006 template <std::memory_order order = std::memory_order_relaxed> \
1007 void set_##name(type value) { \
1008 ASSERT(!value.IsHeapObject()); \
1009 StoreSmi<type, order>(&name##_, value); \
1010 } \
1011 \
1012 protected: \
1013 type name##_;
1014
1015#define COMPRESSED_SMI_FIELD(type, name) \
1016 public: \
1017 template <std::memory_order order = std::memory_order_relaxed> \
1018 type name() const { \
1019 type result = LoadCompressedSmi<order>(&name##_); \
1020 ASSERT(!result.IsHeapObject()); \
1021 return result; \
1022 } \
1023 template <std::memory_order order = std::memory_order_relaxed> \
1024 void set_##name(type value) { \
1025 ASSERT(!value.IsHeapObject()); \
1026 StoreCompressedSmi(&name##_, value); \
1027 } \
1028 \
1029 protected: \
1030 Compressed##type name##_;
1031
1032// Used to define untagged object fields that can have values wrapped in
1033// WeakSerializationReferences. Since WeakSerializationReferences are only used
1034// during precompilation, these fields have type CompressedObjectPtr in the
1035// precompiler and the normally expected type otherwise.
1036//
1037// Fields that are defined with WSR_COMPRESSED_POINTER_FIELD should have
1038// getters and setters that are declared in object.h with
1039// PRECOMPILER_WSR_FIELD_DECLARATION and defined in object.cc with
1040// PRECOMPILER_WSR_FIELD_DEFINITION.
1041#if defined(DART_PRECOMPILER)
1042#define WSR_COMPRESSED_POINTER_FIELD(Type, Name) \
1043 COMPRESSED_POINTER_FIELD(ObjectPtr, Name)
1044#else
1045#define WSR_COMPRESSED_POINTER_FIELD(Type, Name) \
1046 COMPRESSED_POINTER_FIELD(Type, Name)
1047#endif
1048
1050 public:
1052 kAllocated = 0, // Initial state.
1053 kPreFinalized, // VM classes: size precomputed, but no checks done.
1054 kFinalized, // Class parsed, code compiled, not ready for allocation.
1055 kAllocateFinalized, // CHA invalidated, class is ready for allocation.
1056 };
1058 // Class object is created, but it is not filled up.
1059 // At this state class can only be used as a forward reference during
1060 // class loading.
1062 // Class declaration information such as type parameters, supertype and
1063 // implemented interfaces are loaded. However, types in the class are
1064 // not finalized yet.
1066 // Types in the class are finalized. At this point, members can be loaded
1067 // and class can be finalized.
1069 };
1070
1071 classid_t id() const { return id_; }
1072
1073 private:
1074 RAW_HEAP_OBJECT_IMPLEMENTATION(Class);
1075
1076 COMPRESSED_POINTER_FIELD(StringPtr, name)
1078 NOT_IN_PRODUCT(COMPRESSED_POINTER_FIELD(StringPtr, user_name))
1079 COMPRESSED_POINTER_FIELD(ArrayPtr, functions)
1080 COMPRESSED_POINTER_FIELD(ArrayPtr, functions_hash_table)
1081 COMPRESSED_POINTER_FIELD(ArrayPtr, fields)
1082 COMPRESSED_POINTER_FIELD(ArrayPtr, offset_in_words_to_field)
1083 COMPRESSED_POINTER_FIELD(ArrayPtr, interfaces) // Array of AbstractType.
1084 COMPRESSED_POINTER_FIELD(ScriptPtr, script)
1085 COMPRESSED_POINTER_FIELD(LibraryPtr, library)
1086 COMPRESSED_POINTER_FIELD(TypeParametersPtr, type_parameters)
1087 COMPRESSED_POINTER_FIELD(TypePtr, super_type)
1088 // Canonicalized const instances of this class.
1089 COMPRESSED_POINTER_FIELD(ArrayPtr, constants)
1090 // Declaration type for this class.
1091 COMPRESSED_POINTER_FIELD(TypePtr, declaration_type)
1092 // Cache for dispatcher functions.
1093 COMPRESSED_POINTER_FIELD(ArrayPtr, invocation_dispatcher_cache)
1094
1095#if !defined(PRODUCT) || !defined(DART_PRECOMPILED_RUNTIME)
1096 // Array of Class.
1097 COMPRESSED_POINTER_FIELD(GrowableObjectArrayPtr, direct_implementors)
1098 // Array of Class.
1099 COMPRESSED_POINTER_FIELD(GrowableObjectArrayPtr, direct_subclasses)
1100#endif // !defined(PRODUCT) || !defined(DART_PRECOMPILED_RUNTIME)
1101
1102 // Cached declaration instance type arguments for this class.
1103 // Not preserved in AOT snapshots.
1104 COMPRESSED_POINTER_FIELD(TypeArgumentsPtr,
1105 declaration_instance_type_arguments)
1106#if !defined(DART_PRECOMPILED_RUNTIME)
1107 // Stub code for allocation of instances.
1108 COMPRESSED_POINTER_FIELD(CodePtr, allocation_stub)
1109 // CHA optimized codes.
1110 COMPRESSED_POINTER_FIELD(WeakArrayPtr, dependent_code)
1111#endif // !defined(DART_PRECOMPILED_RUNTIME)
1112
1113#if defined(DART_PRECOMPILED_RUNTIME)
1114 VISIT_TO(declaration_instance_type_arguments)
1115#else
1116 VISIT_TO(dependent_code)
1117#endif // defined(DART_PRECOMPILED_RUNTIME)
1118
1119 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) {
1120 switch (kind) {
1121 case Snapshot::kFullAOT:
1122#if defined(PRODUCT)
1123 return reinterpret_cast<CompressedObjectPtr*>(
1124 &invocation_dispatcher_cache_);
1125#else
1126 return reinterpret_cast<CompressedObjectPtr*>(&direct_subclasses_);
1127#endif // defined(PRODUCT)
1128 case Snapshot::kFull:
1130#if !defined(DART_PRECOMPILED_RUNTIME)
1131 return reinterpret_cast<CompressedObjectPtr*>(&allocation_stub_);
1132#endif
1133 case Snapshot::kFullJIT:
1134#if !defined(DART_PRECOMPILED_RUNTIME)
1135 return reinterpret_cast<CompressedObjectPtr*>(&dependent_code_);
1136#endif
1137 case Snapshot::kNone:
1138 case Snapshot::kInvalid:
1139 break;
1140 }
1141 UNREACHABLE();
1142 return nullptr;
1143 }
1144
1145 NOT_IN_PRECOMPILED(TokenPosition token_pos_);
1146 NOT_IN_PRECOMPILED(TokenPosition end_token_pos_);
1147 NOT_IN_PRECOMPILED(classid_t implementor_cid_);
1148
1149 classid_t id_; // Class Id, also index in the class table.
1150 int16_t num_type_arguments_; // Number of type arguments in flattened vector.
1151 uint16_t num_native_fields_;
1152 uint32_t state_bits_;
1153
1154 // Size if fixed len or 0 if variable len.
1155 int32_t host_instance_size_in_words_;
1156
1157 // Offset of type args fld.
1158 int32_t host_type_arguments_field_offset_in_words_;
1159
1160 // Offset of the next instance field.
1161 int32_t host_next_field_offset_in_words_;
1162
1163#if defined(DART_PRECOMPILER)
1164 // Size if fixed len or 0 if variable len (target).
1165 int32_t target_instance_size_in_words_;
1166
1167 // Offset of type args fld.
1168 int32_t target_type_arguments_field_offset_in_words_;
1169
1170 // Offset of the next instance field (target).
1171 int32_t target_next_field_offset_in_words_;
1172#endif // defined(DART_PRECOMPILER)
1173
1174#if !defined(DART_PRECOMPILED_RUNTIME)
1175 uint32_t kernel_offset_;
1176#endif // !defined(DART_PRECOMPILED_RUNTIME)
1177
1178 friend class Instance;
1179 friend class IsolateGroup;
1180 friend class Object;
1181 friend class UntaggedInstance;
1184 friend class MessageSerializer;
1187 friend class CidRewriteVisitor;
1189 friend class Api;
1190};
1191
1193 private:
1194 RAW_HEAP_OBJECT_IMPLEMENTATION(PatchClass);
1195
1196 COMPRESSED_POINTER_FIELD(ClassPtr, wrapped_class)
1197 VISIT_FROM(wrapped_class)
1199#if !defined(DART_PRECOMPILED_RUNTIME)
1200 COMPRESSED_POINTER_FIELD(KernelProgramInfoPtr, kernel_program_info)
1201 VISIT_TO(kernel_program_info)
1202#else
1204#endif
1205
1206 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) {
1207 switch (kind) {
1208 case Snapshot::kFullAOT:
1209 return reinterpret_cast<CompressedObjectPtr*>(&script_);
1210 case Snapshot::kFull:
1212 case Snapshot::kFullJIT:
1213#if !defined(DART_PRECOMPILED_RUNTIME)
1214 return reinterpret_cast<CompressedObjectPtr*>(&kernel_program_info_);
1215#else
1216 UNREACHABLE();
1217 return nullptr;
1218#endif
1219 case Snapshot::kNone:
1220 case Snapshot::kInvalid:
1221 break;
1222 }
1223 UNREACHABLE();
1224 return nullptr;
1225 }
1226
1227 NOT_IN_PRECOMPILED(intptr_t kernel_library_index_);
1228
1229 friend class Function;
1230};
1231
1233 public:
1234 // When you add a new kind, please also update the observatory to account
1235 // for the new string returned by KindToCString().
1236 // - runtime/observatory/lib/src/models/objects/function.dart (FunctionKind)
1237 // - runtime/observatory/lib/src/elements/function_view.dart
1238 // (_functionKindToString)
1239 // - runtime/observatory/lib/src/service/object.dart (stringToFunctionKind)
1240#define FOR_EACH_RAW_FUNCTION_KIND(V) \
1241 /* an ordinary or operator method */ \
1242 V(RegularFunction) \
1243 /* a user-declared closure function */ \
1244 V(ClosureFunction) \
1245 /* an implicit closure (i.e., tear-off) */ \
1246 V(ImplicitClosureFunction) \
1247 /* a signature only without actual code */ \
1248 V(GetterFunction) \
1249 /* setter functions e.g: set foo(..) { .. } */ \
1250 V(SetterFunction) \
1251 /* a generative (is_static=false) or factory (is_static=true) constructor */ \
1252 V(Constructor) \
1253 /* an implicit getter for instance fields */ \
1254 V(ImplicitGetter) \
1255 /* an implicit setter for instance fields */ \
1256 V(ImplicitSetter) \
1257 /* represents an implicit getter for static fields with initializers */ \
1258 V(ImplicitStaticGetter) \
1259 /* the initialization expression for a static or instance field */ \
1260 V(FieldInitializer) \
1261 /* return a closure on the receiver for tear-offs */ \
1262 V(MethodExtractor) \
1263 /* builds an Invocation and invokes noSuchMethod */ \
1264 V(NoSuchMethodDispatcher) \
1265 /* invokes a field as a closure (i.e., call-through-getter) */ \
1266 V(InvokeFieldDispatcher) \
1267 /* a generated irregexp matcher function. */ \
1268 V(IrregexpFunction) \
1269 /* a forwarder which performs type checks for arguments of a dynamic call */ \
1270 /* (i.e., those checks omitted by the caller for interface calls). */ \
1271 V(DynamicInvocationForwarder) \
1272 /* A `dart:ffi` call or callback trampoline. */ \
1273 V(FfiTrampoline) \
1274 /* getter for a record field */ \
1275 V(RecordFieldGetter)
1276
1277 enum Kind {
1278#define KIND_DEFN(Name) k##Name,
1280#undef KIND_DEFN
1281 };
1282
1283 static const char* KindToCString(Kind k) {
1284 switch (k) {
1285#define KIND_CASE(Name) \
1286 case Kind::k##Name: \
1287 return #Name;
1289#undef KIND_CASE
1290 default:
1291 UNREACHABLE();
1292 return nullptr;
1293 }
1294 }
1295
1296 static bool ParseKind(const char* str, Kind* out) {
1297#define KIND_CASE(Name) \
1298 if (strcmp(str, #Name) == 0) { \
1299 *out = Kind::k##Name; \
1300 return true; \
1301 }
1303#undef KIND_CASE
1304 return false;
1305 }
1306
1314 };
1315
1316 // Wraps a 64-bit integer to represent the bitmap for unboxed parameters and
1317 // return value. Two bits are used for each of them to denote if it is boxed,
1318 // unboxed integer, unboxed double or unboxed record.
1319 // It includes the two bits for the receiver, even though currently we
1320 // do not have information from TFA that allows the receiver to be unboxed.
1321 class alignas(8) UnboxedParameterBitmap {
1322 public:
1328 };
1329 static constexpr intptr_t kBitsPerElement = 2;
1330 static constexpr uint64_t kElementBitmask = (1 << kBitsPerElement) - 1;
1331 static constexpr intptr_t kCapacity =
1332 (kBitsPerByte * sizeof(uint64_t)) / kBitsPerElement;
1333
1334 UnboxedParameterBitmap() : bitmap_(0) {}
1335 explicit UnboxedParameterBitmap(uint64_t bitmap) : bitmap_(bitmap) {}
1338
1339 DART_FORCE_INLINE bool IsUnboxed(intptr_t position) const {
1340 return At(position) != kBoxed;
1341 }
1342 DART_FORCE_INLINE bool IsUnboxedInteger(intptr_t position) const {
1343 return At(position) == kUnboxedInt;
1344 }
1345 DART_FORCE_INLINE bool IsUnboxedDouble(intptr_t position) const {
1346 return At(position) == kUnboxedDouble;
1347 }
1348 DART_FORCE_INLINE bool IsUnboxedRecord(intptr_t position) const {
1349 return At(position) == kUnboxedRecord;
1350 }
1351 DART_FORCE_INLINE void SetUnboxedInteger(intptr_t position) {
1352 SetAt(position, kUnboxedInt);
1353 }
1354 DART_FORCE_INLINE void SetUnboxedDouble(intptr_t position) {
1355 SetAt(position, kUnboxedDouble);
1356 }
1357 DART_FORCE_INLINE void SetUnboxedRecord(intptr_t position) {
1358 SetAt(position, kUnboxedRecord);
1359 }
1360 DART_FORCE_INLINE uint64_t Value() const { return bitmap_; }
1361 DART_FORCE_INLINE bool IsEmpty() const { return bitmap_ == 0; }
1362 DART_FORCE_INLINE void Reset() { bitmap_ = 0; }
1363 DART_FORCE_INLINE bool HasUnboxedParameters() const {
1364 return (bitmap_ >> kBitsPerElement) != 0;
1365 }
1366
1367 private:
1368 DART_FORCE_INLINE UnboxedState At(intptr_t position) const {
1369 if (position >= kCapacity) {
1370 return kBoxed;
1371 }
1372 return static_cast<UnboxedState>(
1373 (bitmap_ >> (kBitsPerElement * position)) & kElementBitmask);
1374 }
1375 DART_FORCE_INLINE void SetAt(intptr_t position, UnboxedState state) {
1376 ASSERT(position < kCapacity);
1377 const intptr_t shift = kBitsPerElement * position;
1378 bitmap_ = (bitmap_ & ~(kElementBitmask << shift)) |
1379 (static_cast<decltype(bitmap_)>(state) << shift);
1380 }
1381
1382 uint64_t bitmap_;
1383 };
1384
1385 private:
1386 friend class Class;
1388
1390
1391 uword entry_point_; // Accessed from generated code.
1392 uword unchecked_entry_point_; // Accessed from generated code.
1393
1394 COMPRESSED_POINTER_FIELD(StringPtr, name)
1396 // Class or patch class or mixin class where this function is defined.
1398 WSR_COMPRESSED_POINTER_FIELD(FunctionTypePtr, signature)
1399 // Additional data specific to the function kind. See Function::set_data()
1400 // for details.
1402 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) {
1403 switch (kind) {
1404 case Snapshot::kFullAOT:
1405 case Snapshot::kFull:
1407 case Snapshot::kFullJIT:
1408 return reinterpret_cast<CompressedObjectPtr*>(&data_);
1409 case Snapshot::kNone:
1410 case Snapshot::kInvalid:
1411 break;
1412 }
1413 UNREACHABLE();
1414 return nullptr;
1415 }
1416 // ICData of unoptimized code.
1417 COMPRESSED_POINTER_FIELD(ArrayPtr, ic_data_array);
1418 // Currently active code. Accessed from generated code.
1420#if defined(DART_PRECOMPILED_RUNTIME)
1421 VISIT_TO(code);
1422#else
1423 // Positional parameter names are not needed in the AOT runtime.
1424 COMPRESSED_POINTER_FIELD(ArrayPtr, positional_parameter_names);
1425 // Unoptimized code, keep it after optimization.
1426 COMPRESSED_POINTER_FIELD(CodePtr, unoptimized_code);
1427 VISIT_TO(unoptimized_code);
1428
1429 UnboxedParameterBitmap unboxed_parameters_info_;
1430#endif
1431
1432#if !defined(DART_PRECOMPILED_RUNTIME) || \
1433 (defined(DART_PRECOMPILED_RUNTIME) && !defined(PRODUCT))
1434 TokenPosition token_pos_;
1435#endif
1436
1437#if !defined(DART_PRECOMPILED_RUNTIME)
1438 TokenPosition end_token_pos_;
1439#endif
1440
1441 AtomicBitFieldContainer<uint32_t> kind_tag_; // See Function::KindTagBits.
1442
1443#define JIT_FUNCTION_COUNTERS(F) \
1444 F(intptr_t, int32_t, usage_counter) \
1445 F(intptr_t, uint16_t, optimized_instruction_count) \
1446 F(intptr_t, uint16_t, optimized_call_site_count) \
1447 F(int8_t, int8_t, deoptimization_counter) \
1448 F(intptr_t, int8_t, state_bits) \
1449 F(int, int8_t, inlining_depth)
1450
1451#if !defined(DART_PRECOMPILED_RUNTIME)
1452 uint32_t kernel_offset_;
1453
1454#define DECLARE(return_type, type, name) type name##_;
1456#undef DECLARE
1457
1458 AtomicBitFieldContainer<uint8_t> packed_fields_;
1459
1460 static constexpr intptr_t kMaxOptimizableBits = 1;
1461
1462 using PackedOptimizable =
1463 BitField<decltype(packed_fields_), bool, 0, kMaxOptimizableBits>;
1464#endif // !defined(DART_PRECOMPILED_RUNTIME)
1465};
1466
1467enum class InstantiationMode : uint8_t {
1468 // Must instantiate the type arguments normally.
1470 // The type arguments are already instantiated.
1472 // Use the instantiator type arguments that would be used to instantiate
1473 // the default type arguments, as instantiating produces the same result.
1475 // Use the function type arguments that would be used to instantiate
1476 // the default type arguments, as instantiating produces the same result.
1478};
1479
1481 private:
1482 RAW_HEAP_OBJECT_IMPLEMENTATION(ClosureData);
1483
1484 COMPRESSED_POINTER_FIELD(ContextScopePtr, context_scope)
1485 VISIT_FROM(context_scope)
1486 // Enclosing function of this local function.
1487 WSR_COMPRESSED_POINTER_FIELD(FunctionPtr, parent_function)
1488 // Closure object for static implicit closures.
1491
1492 // kernel_to_il.cc assumes we can load the untagged value and box it in a Smi.
1493 static_assert(sizeof(InstantiationMode) * kBitsPerByte <=
1495 "Instantiation mode must fit in a Smi");
1496
1497 static constexpr uint8_t kNoAwaiterLinkDepth = 0xFF;
1498
1499 AtomicBitFieldContainer<uint32_t> packed_fields_;
1500
1502 BitField<decltype(packed_fields_), InstantiationMode, 0, 8>;
1503 using PackedAwaiterLinkDepth = BitField<decltype(packed_fields_),
1504 uint8_t,
1506 8>;
1507 using PackedAwaiterLinkIndex = BitField<decltype(packed_fields_),
1508 uint8_t,
1510 8>;
1511
1512 friend class Function;
1514};
1515
1517 private:
1518 RAW_HEAP_OBJECT_IMPLEMENTATION(FfiTrampolineData);
1519
1520 COMPRESSED_POINTER_FIELD(TypePtr, signature_type)
1521 VISIT_FROM(signature_type)
1522
1523 COMPRESSED_POINTER_FIELD(FunctionTypePtr, c_signature)
1524
1525 // Target Dart method for callbacks, otherwise null.
1526 COMPRESSED_POINTER_FIELD(FunctionPtr, callback_target)
1527
1528 // For callbacks, value to return if Dart target throws an exception.
1529 COMPRESSED_POINTER_FIELD(InstancePtr, callback_exceptional_return)
1530 VISIT_TO(callback_exceptional_return)
1531 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
1532
1533 // Callback id for callbacks.
1534 //
1535 // The callbacks ids are used so that native callbacks can lookup their own
1536 // code objects, since native code doesn't pass code objects into function
1537 // calls. The callback id is also used to for verifying that callbacks are
1538 // called on the correct isolate. See DLRT_VerifyCallbackIsolate for details.
1539 //
1540 // Callback id is -1 for non-callbacks or when id is not allocated yet.
1541 // Check 'callback_target_' to determine if this is a callback or not.
1542 int32_t callback_id_;
1543
1544 // The kind of trampoline this is. See FfiCallbackKind.
1545 uint8_t ffi_function_kind_;
1546};
1547
1549 RAW_HEAP_OBJECT_IMPLEMENTATION(Field);
1550
1551 COMPRESSED_POINTER_FIELD(StringPtr, name)
1553 // Class or patch class or mixin class where this field is defined or original
1554 // field.
1556 COMPRESSED_POINTER_FIELD(AbstractTypePtr, type)
1557 // Static initializer function.
1558 COMPRESSED_POINTER_FIELD(FunctionPtr, initializer_function)
1559 // - for instance fields: offset in words to the value in the class instance.
1560 // - for static fields: index into field_table.
1561 COMPRESSED_POINTER_FIELD(SmiPtr, host_offset_or_field_id)
1562 COMPRESSED_POINTER_FIELD(SmiPtr, guarded_list_length)
1563 COMPRESSED_POINTER_FIELD(WeakArrayPtr, dependent_code)
1564 VISIT_TO(dependent_code);
1565 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) {
1566 switch (kind) {
1567 case Snapshot::kFull:
1569 case Snapshot::kFullJIT:
1570 case Snapshot::kFullAOT:
1571 return reinterpret_cast<CompressedObjectPtr*>(&initializer_function_);
1572 case Snapshot::kNone:
1573 case Snapshot::kInvalid:
1574 break;
1575 }
1576 UNREACHABLE();
1577 return nullptr;
1578 }
1579 TokenPosition token_pos_;
1580 TokenPosition end_token_pos_;
1581 ClassIdTagType guarded_cid_;
1582 ClassIdTagType is_nullable_; // kNullCid if field can contain null value and
1583 // kIllegalCid otherwise.
1584
1585#if !defined(DART_PRECOMPILED_RUNTIME)
1586 uint32_t kernel_offset_;
1587#endif // !defined(DART_PRECOMPILED_RUNTIME)
1588
1589 // Offset to the guarded length field inside an instance of class matching
1590 // guarded_cid_. Stored corrected by -kHeapObjectTag to simplify code
1591 // generated on platforms with weak addressing modes (ARM).
1592 int8_t guarded_list_length_in_object_offset_;
1593
1594 // Runtime tracking state of exactness of type annotation of this field.
1595 // See StaticTypeExactnessState for the meaning and possible values in this
1596 // field.
1597 int8_t static_type_exactness_state_;
1598
1599 uint16_t kind_bits_; // static, final, const, has initializer....
1600
1601#if !defined(DART_PRECOMPILED_RUNTIME)
1602 // for instance fields, the offset in words in the target architecture
1603 int32_t target_offset_;
1604#endif // !defined(DART_PRECOMPILED_RUNTIME)
1605
1606 friend class CidRewriteVisitor;
1607 friend class GuardFieldClassInstr; // For sizeof(guarded_cid_/...)
1608 friend class LoadFieldInstr; // For sizeof(guarded_cid_/...)
1609 friend class StoreFieldInstr; // For sizeof(guarded_cid_/...)
1610};
1611
1612class alignas(8) UntaggedScript : public UntaggedObject {
1613 RAW_HEAP_OBJECT_IMPLEMENTATION(Script);
1614
1615 COMPRESSED_POINTER_FIELD(StringPtr, url)
1616 VISIT_FROM(url)
1617 COMPRESSED_POINTER_FIELD(StringPtr, resolved_url)
1618 COMPRESSED_POINTER_FIELD(TypedDataPtr, line_starts)
1619#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
1620 COMPRESSED_POINTER_FIELD(TypedDataViewPtr, constant_coverage)
1621#endif // !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
1622 COMPRESSED_POINTER_FIELD(ArrayPtr, debug_positions)
1623 COMPRESSED_POINTER_FIELD(KernelProgramInfoPtr, kernel_program_info)
1626 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) {
1627 switch (kind) {
1628 case Snapshot::kFullAOT:
1629#if defined(PRODUCT)
1630 return reinterpret_cast<CompressedObjectPtr*>(&url_);
1631#else
1632 return reinterpret_cast<CompressedObjectPtr*>(&resolved_url_);
1633#endif
1634 case Snapshot::kFull:
1636 case Snapshot::kFullJIT:
1637 return reinterpret_cast<CompressedObjectPtr*>(&kernel_program_info_);
1638 case Snapshot::kNone:
1639 case Snapshot::kInvalid:
1640 break;
1641 }
1642 UNREACHABLE();
1643 return nullptr;
1644 }
1645
1646#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
1647 int64_t load_timestamp_;
1648 int32_t kernel_script_index_;
1649#else
1650 int32_t kernel_script_index_;
1651 int64_t load_timestamp_;
1652#endif
1653
1654#if !defined(DART_PRECOMPILED_RUNTIME)
1655 int32_t flags_and_max_position_;
1656
1657 public:
1659 BitField<decltype(flags_and_max_position_), bool, 0, 1>;
1661 BitField<decltype(flags_and_max_position_),
1662 bool,
1664 1>;
1665 using CachedMaxPositionBitField = BitField<decltype(flags_and_max_position_),
1666 intptr_t,
1668
1669 private:
1670#endif
1671};
1672
1674 enum LibraryState {
1675 kAllocated, // Initial state.
1676 kLoadRequested, // Compiler or script requested load of library.
1677 kLoadInProgress, // Library is in the process of being loaded.
1678 kLoaded, // Library is loaded.
1679 };
1680
1681 enum LibraryFlags {
1682 kDartSchemeBit = 0,
1683 kDebuggableBit, // True if debugger can stop in library.
1684 kInFullSnapshotBit, // True if library is in a full snapshot.
1685 kNumFlagBits,
1686 };
1687 COMPILE_ASSERT(kNumFlagBits <= (sizeof(uint8_t) * kBitsPerByte));
1688 class DartSchemeBit : public BitField<uint8_t, bool, kDartSchemeBit, 1> {};
1689 class DebuggableBit : public BitField<uint8_t, bool, kDebuggableBit, 1> {};
1690 class InFullSnapshotBit
1691 : public BitField<uint8_t, bool, kInFullSnapshotBit, 1> {};
1692
1693 RAW_HEAP_OBJECT_IMPLEMENTATION(Library);
1694
1695 COMPRESSED_POINTER_FIELD(StringPtr, name)
1697 COMPRESSED_POINTER_FIELD(StringPtr, url)
1698 COMPRESSED_POINTER_FIELD(StringPtr, private_key)
1699 // Top-level names in this library.
1700 COMPRESSED_POINTER_FIELD(ArrayPtr, dictionary)
1701 // Metadata on classes, methods etc.
1702 COMPRESSED_POINTER_FIELD(ArrayPtr, metadata)
1703 // Class containing top-level elements.
1704 COMPRESSED_POINTER_FIELD(ClassPtr, toplevel_class)
1705 COMPRESSED_POINTER_FIELD(GrowableObjectArrayPtr, used_scripts)
1706 COMPRESSED_POINTER_FIELD(LoadingUnitPtr, loading_unit)
1707 // List of Namespaces imported without prefix.
1708 COMPRESSED_POINTER_FIELD(ArrayPtr, imports)
1709 // List of re-exported Namespaces.
1710 COMPRESSED_POINTER_FIELD(ArrayPtr, exports)
1711 COMPRESSED_POINTER_FIELD(ArrayPtr, dependencies)
1712#if !defined(DART_PRECOMPILED_RUNTIME)
1713 COMPRESSED_POINTER_FIELD(KernelProgramInfoPtr, kernel_program_info)
1714#endif
1715 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) {
1716 switch (kind) {
1717 case Snapshot::kFullAOT:
1718 return reinterpret_cast<CompressedObjectPtr*>(&exports_);
1719 case Snapshot::kFull:
1721 case Snapshot::kFullJIT:
1722#if !defined(DART_PRECOMPILED_RUNTIME)
1723 return reinterpret_cast<CompressedObjectPtr*>(&kernel_program_info_);
1724#else
1725 UNREACHABLE();
1726 return nullptr;
1727#endif
1728 case Snapshot::kNone:
1729 case Snapshot::kInvalid:
1730 break;
1731 }
1732 UNREACHABLE();
1733 return nullptr;
1734 }
1735 // Array of scripts loaded in this library.
1736 COMPRESSED_POINTER_FIELD(ArrayPtr, loaded_scripts);
1737 VISIT_TO(loaded_scripts);
1738
1739 Dart_NativeEntryResolver native_entry_resolver_; // Resolves natives.
1740 Dart_NativeEntrySymbol native_entry_symbol_resolver_;
1741 Dart_FfiNativeResolver ffi_native_resolver_;
1742
1743 classid_t index_; // Library id number.
1744 uint16_t num_imports_; // Number of entries in imports_.
1745 int8_t load_state_; // Of type LibraryState.
1746 uint8_t flags_; // BitField for LibraryFlags.
1747
1748#if !defined(DART_PRECOMPILED_RUNTIME)
1749 uint32_t kernel_library_index_;
1750#endif // !defined(DART_PRECOMPILED_RUNTIME)
1751
1752 friend class Class;
1753 friend class Isolate;
1754};
1755
1757 RAW_HEAP_OBJECT_IMPLEMENTATION(Namespace);
1758
1759 // library with name dictionary.
1760 COMPRESSED_POINTER_FIELD(LibraryPtr, target)
1762 // list of names that are exported.
1763 COMPRESSED_POINTER_FIELD(ArrayPtr, show_names)
1764 // list of names that are hidden.
1765 COMPRESSED_POINTER_FIELD(ArrayPtr, hide_names)
1766 COMPRESSED_POINTER_FIELD(LibraryPtr, owner)
1767 VISIT_TO(owner)
1768 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) {
1769 switch (kind) {
1770 case Snapshot::kFullAOT:
1771 return reinterpret_cast<CompressedObjectPtr*>(&target_);
1772 case Snapshot::kFull:
1774 case Snapshot::kFullJIT:
1775 return reinterpret_cast<CompressedObjectPtr*>(&owner_);
1776 case Snapshot::kNone:
1777 case Snapshot::kInvalid:
1778 break;
1779 }
1780 UNREACHABLE();
1781 return nullptr;
1782 }
1783};
1784
1785// Contains information about a kernel [Component].
1786//
1787// Used to access string tables, canonical name tables, constants, metadata, ...
1789 RAW_HEAP_OBJECT_IMPLEMENTATION(KernelProgramInfo);
1790
1791 COMPRESSED_POINTER_FIELD(TypedDataBasePtr, kernel_component)
1792 VISIT_FROM(kernel_component)
1793 COMPRESSED_POINTER_FIELD(TypedDataPtr, string_offsets)
1794 COMPRESSED_POINTER_FIELD(TypedDataViewPtr, string_data)
1795 COMPRESSED_POINTER_FIELD(TypedDataPtr, canonical_names)
1796 COMPRESSED_POINTER_FIELD(TypedDataViewPtr, metadata_payloads)
1797 COMPRESSED_POINTER_FIELD(TypedDataViewPtr, metadata_mappings)
1799 COMPRESSED_POINTER_FIELD(ArrayPtr, constants)
1800 COMPRESSED_POINTER_FIELD(TypedDataViewPtr, constants_table)
1801 COMPRESSED_POINTER_FIELD(ArrayPtr, libraries_cache)
1802 COMPRESSED_POINTER_FIELD(ArrayPtr, classes_cache)
1803 VISIT_TO(classes_cache)
1804
1805 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) {
1806 return reinterpret_cast<CompressedObjectPtr*>(&constants_table_);
1807 }
1808};
1809
1811 RAW_HEAP_OBJECT_IMPLEMENTATION(WeakSerializationReference);
1812
1816 VISIT_TO(replacement)
1817};
1818
1820 RAW_HEAP_OBJECT_IMPLEMENTATION(WeakArray);
1821
1822 COMPRESSED_POINTER_FIELD(WeakArrayPtr, next_seen_by_gc)
1823
1826 // Variable length data follows here.
1828
1829 template <typename Table, bool kAllCanonicalObjectsAreIncludedIntoSet>
1831 template <typename Type, typename PtrType>
1832 friend class GCLinkedList;
1833 template <bool>
1835 template <bool>
1837 friend class Scavenger;
1838};
1839
1840// WeakArray is special in that it has a pointer field which is not
1841// traversed by pointer visitors, and thus not in the range [from(),to()]:
1842// next_seen_by_gc, which is before the other fields.
1843template <>
1844DART_FORCE_INLINE uword UntaggedObject::from_offset<UntaggedWeakArray>() {
1845 return OFFSET_OF(UntaggedWeakArray, next_seen_by_gc_);
1846}
1847
1849 RAW_HEAP_OBJECT_IMPLEMENTATION(Code);
1850
1851 // When in the precompiled runtime, there is no disabling of Code objects
1852 // and thus no active_instructions_ field. Thus, the entry point caches are
1853 // only set once during deserialization. If not using bare instructions,
1854 // the caches should match the entry points for instructions_.
1855 //
1856 // Otherwise, they should contain entry points for active_instructions_.
1857
1858 uword entry_point_; // Accessed from generated code.
1859
1860 // In AOT this entry-point supports switchable calls. It checks the type of
1861 // the receiver on entry to the function and calls a stub to patch up the
1862 // caller if they mismatch.
1863 uword monomorphic_entry_point_; // Accessed from generated code (AOT only).
1864
1865 // Entry-point used from call-sites with some additional static information.
1866 // The exact behavior of this entry-point depends on the kind of function:
1867 //
1868 // kRegularFunction/kSetter/kGetter:
1869 //
1870 // Call-site is assumed to know that the (type) arguments are invariantly
1871 // type-correct against the actual runtime-type of the receiver. For
1872 // instance, this entry-point is used for invocations against "this" and
1873 // invocations from IC stubs that test the class type arguments.
1874 //
1875 // kClosureFunction:
1876 //
1877 // Call-site is assumed to pass the correct number of positional and type
1878 // arguments (except in the case of partial instantiation, when the type
1879 // arguments are omitted). All (type) arguments are assumed to match the
1880 // corresponding (type) parameter types (bounds).
1881 //
1882 // kImplicitClosureFunction:
1883 //
1884 // Similar to kClosureFunction, except that the types (bounds) of the (type)
1885 // arguments are expected to match the *runtime signature* of the closure,
1886 // which (unlike with kClosureFunction) may have more general (type)
1887 // parameter types (bounds) than the declared type of the forwarded method.
1888 //
1889 // In many cases a distinct static entry-point will not be created for a
1890 // function if it would not be able to skip a lot of work (e.g., no argument
1891 // type checks are necessary or this Code belongs to a stub). In this case
1892 // 'unchecked_entry_point_' will refer to the same position as 'entry_point_'.
1893 //
1894 uword unchecked_entry_point_; // Accessed from generated code.
1895 uword monomorphic_unchecked_entry_point_; // Accessed from generated code.
1896
1897 POINTER_FIELD(ObjectPoolPtr, object_pool) // Accessed from generated code.
1898 VISIT_FROM(object_pool)
1899 POINTER_FIELD(InstructionsPtr,
1900 instructions) // Accessed from generated code.
1901 // If owner_ is Function::null() the owner is a regular stub.
1902 // If owner_ is a Class the owner is the allocation stub for that class.
1903 // Else, owner_ is a regular Dart Function.
1904 POINTER_FIELD(ObjectPtr, owner) // Function, Null, or a Class.
1905 POINTER_FIELD(ExceptionHandlersPtr, exception_handlers)
1906 POINTER_FIELD(PcDescriptorsPtr, pc_descriptors)
1907 // If FLAG_precompiled_mode, then this field contains
1908 // TypedDataPtr catch_entry_moves_maps
1909 // Otherwise, it is
1910 // SmiPtr num_variables
1911 POINTER_FIELD(ObjectPtr, catch_entry)
1912 POINTER_FIELD(CompressedStackMapsPtr, compressed_stackmaps)
1913 POINTER_FIELD(ArrayPtr, inlined_id_to_function)
1914 POINTER_FIELD(CodeSourceMapPtr, code_source_map)
1915 NOT_IN_PRECOMPILED(POINTER_FIELD(InstructionsPtr, active_instructions))
1916 NOT_IN_PRECOMPILED(POINTER_FIELD(ArrayPtr, deopt_info_array))
1917 // (code-offset, function, code) triples.
1918 NOT_IN_PRECOMPILED(POINTER_FIELD(ArrayPtr, static_calls_target_table))
1919 // If return_address_metadata_ is a Smi, it is the offset to the prologue.
1920 // Else, return_address_metadata_ is null.
1921 NOT_IN_PRODUCT(POINTER_FIELD(ObjectPtr, return_address_metadata))
1922 NOT_IN_PRODUCT(POINTER_FIELD(LocalVarDescriptorsPtr, var_descriptors))
1923 NOT_IN_PRODUCT(POINTER_FIELD(ArrayPtr, comments))
1924
1925#if !defined(PRODUCT)
1926 VISIT_TO(comments);
1927#elif defined(DART_PRECOMPILED_RUNTIME)
1928 VISIT_TO(code_source_map);
1929#else
1930 VISIT_TO(static_calls_target_table);
1931#endif
1932
1933 // Compilation timestamp.
1934 NOT_IN_PRODUCT(alignas(8) int64_t compile_timestamp_);
1935
1936 // state_bits_ is a bitfield with three fields:
1937 // The optimized bit, the alive bit, and a count of the number of pointer
1938 // offsets.
1939 // Alive: If true, the embedded object pointers will be visited during GC.
1940 int32_t state_bits_;
1941 // Caches the unchecked entry point offset for instructions_, in case we need
1942 // to reset the active_instructions_ to instructions_.
1943 NOT_IN_PRECOMPILED(uint32_t unchecked_offset_);
1944 // Stores the instructions length when not using RawInstructions objects.
1945 ONLY_IN_PRECOMPILED(uint32_t instructions_length_);
1946
1947 // Variable length data follows here.
1948 int32_t* data() { OPEN_ARRAY_START(int32_t, int32_t); }
1949 const int32_t* data() const { OPEN_ARRAY_START(int32_t, int32_t); }
1950
1951 static bool ContainsPC(const ObjectPtr raw_obj, uword pc);
1952
1953 friend class Function;
1954 template <bool>
1956 friend class StackFrame;
1957 friend class Profiler;
1961 friend class CallSiteResetter;
1962};
1963
1965 RAW_HEAP_OBJECT_IMPLEMENTATION(ObjectPool);
1966
1967 intptr_t length_;
1968
1969 struct Entry {
1970 union {
1971 ObjectPtr raw_obj_;
1972 uword raw_value_;
1973 };
1974 };
1975 Entry* data() { OPEN_ARRAY_START(Entry, Entry); }
1976 Entry const* data() const { OPEN_ARRAY_START(Entry, Entry); }
1977 DEFINE_CONTAINS_COMPRESSED(decltype(Entry::raw_obj_));
1978
1979 // The entry bits are located after the last entry. They are encoded versions
1980 // of `ObjectPool::TypeBits() | ObjectPool::PatchabilityBit()`.
1981 uint8_t* entry_bits() { return reinterpret_cast<uint8_t*>(&data()[length_]); }
1982 uint8_t const* entry_bits() const {
1983 return reinterpret_cast<uint8_t const*>(&data()[length_]);
1984 }
1985
1986 friend class Object;
1990};
1991
1993 RAW_HEAP_OBJECT_IMPLEMENTATION(Instructions);
1994 VISIT_NOTHING();
1995
1996 // Instructions size in bytes and flags.
1997 uint32_t size_and_flags_;
1998
1999 // Variable length data follows here.
2000 uint8_t* data() { OPEN_ARRAY_START(uint8_t, uint8_t); }
2001
2002 // Private helper function used while visiting stack frames. The
2003 // code which iterates over dart frames is also called during GC and
2004 // is not allowed to create handles.
2005 static bool ContainsPC(const InstructionsPtr raw_instr, uword pc);
2006
2007 friend class UntaggedCode;
2008 friend class UntaggedFunction;
2009 friend class Code;
2010 friend class StackFrame;
2011 template <bool>
2013 friend class Function;
2014 friend class ImageReader;
2015 friend class ImageWriter;
2017 friend class BlobImageWriter;
2018};
2019
2020// Used to carry extra information to the VM without changing the embedder
2021// interface, to provide memory accounting for the bare instruction payloads
2022// we serialize, since they are no longer part of RawInstructions objects,
2023// and to avoid special casing bare instructions payload Images in the GC.
2025 RAW_HEAP_OBJECT_IMPLEMENTATION(InstructionsSection);
2026 VISIT_NOTHING();
2027
2028 // Instructions section payload length in bytes.
2029 uword payload_length_;
2030 // The offset of the corresponding BSS section from this text section.
2031 word bss_offset_;
2032 // The relocated address of this text section in the shared object. Properly
2033 // filled for ELF snapshots, always 0 in assembly snapshots. (For the latter,
2034 // we instead get the value during BSS initialization and store it there.)
2035 uword instructions_relocated_address_;
2036 // The offset of the GNU build ID note section from this text section.
2037 word build_id_offset_;
2038
2039 // Variable length data follows here.
2040 uint8_t* data() { OPEN_ARRAY_START(uint8_t, uint8_t); }
2041
2042 friend class Image;
2043};
2044
2046 public:
2047// The macro argument V is passed two arguments, the raw name of the enum value
2048// and the initialization expression used within the enum definition. The uses
2049// of enum values inside the initialization expression are hardcoded currently,
2050// so the second argument is useless outside the enum definition and should be
2051// dropped by other users of this macro.
2052#define FOR_EACH_RAW_PC_DESCRIPTOR(V) \
2053 /* Deoptimization continuation point. */ \
2054 V(Deopt, 1) \
2055 /* IC call. */ \
2056 V(IcCall, kDeopt << 1) \
2057 /* Call to a known target via stub. */ \
2058 V(UnoptStaticCall, kIcCall << 1) \
2059 /* Runtime call. */ \
2060 V(RuntimeCall, kUnoptStaticCall << 1) \
2061 /* OSR entry point in unopt. code. */ \
2062 V(OsrEntry, kRuntimeCall << 1) \
2063 /* Call rewind target address. */ \
2064 V(Rewind, kOsrEntry << 1) \
2065 /* Target-word-size relocation. */ \
2066 V(BSSRelocation, kRewind << 1) \
2067 V(Other, kBSSRelocation << 1) \
2068 V(AnyKind, -1)
2069
2070 enum Kind {
2071#define ENUM_DEF(name, init) k##name = init,
2073#undef ENUM_DEF
2074 kLastKind = kOther,
2075 };
2076
2077 static const char* KindToCString(Kind k);
2078 static bool ParseKind(const char* cstr, Kind* out);
2079
2080 // Used to represent the absence of a yield index in PcDescriptors.
2081 static constexpr intptr_t kInvalidYieldIndex = -1;
2082
2084 public:
2085 // Most of the time try_index will be small and merged field will fit into
2086 // one byte.
2087 static uint32_t Encode(intptr_t kind,
2088 intptr_t try_index,
2089 intptr_t yield_index) {
2091 TryIndexBits::encode(try_index + 1) |
2092 YieldIndexBits::encode(yield_index + 1);
2093 }
2094
2095 static intptr_t DecodeKind(uint32_t kind_and_metadata) {
2096 return 1 << KindShiftBits::decode(kind_and_metadata);
2097 }
2098
2099 static intptr_t DecodeTryIndex(uint32_t kind_and_metadata) {
2100 return TryIndexBits::decode(kind_and_metadata) - 1;
2101 }
2102
2103 static intptr_t DecodeYieldIndex(uint32_t kind_and_metadata) {
2104 return YieldIndexBits::decode(kind_and_metadata) - 1;
2105 }
2106
2107 private:
2108 static constexpr intptr_t kKindShiftSize = 3;
2109 static constexpr intptr_t kTryIndexSize = 10;
2110 static constexpr intptr_t kYieldIndexSize =
2111 32 - kKindShiftSize - kTryIndexSize;
2112
2113 class KindShiftBits
2114 : public BitField<uint32_t, intptr_t, 0, kKindShiftSize> {};
2115 class TryIndexBits : public BitField<uint32_t,
2116 intptr_t,
2117 KindShiftBits::kNextBit,
2118 kTryIndexSize> {};
2119 class YieldIndexBits : public BitField<uint32_t,
2120 intptr_t,
2121 TryIndexBits::kNextBit,
2122 kYieldIndexSize> {};
2123 };
2124
2125 private:
2126 RAW_HEAP_OBJECT_IMPLEMENTATION(PcDescriptors);
2127 VISIT_NOTHING();
2128
2129 // Number of descriptors. This only needs to be an int32_t, but we make it a
2130 // uword so that the variable length data is 64 bit aligned on 64 bit
2131 // platforms.
2132 uword length_;
2133
2134 // Variable length data follows here.
2135 uint8_t* data() { OPEN_ARRAY_START(uint8_t, intptr_t); }
2136 const uint8_t* data() const { OPEN_ARRAY_START(uint8_t, intptr_t); }
2137
2138 friend class Object;
2139 friend class ImageWriter;
2140};
2141
2142// CodeSourceMap encodes a mapping from code PC ranges to source token
2143// positions and the stack of inlined functions.
2145 private:
2146 RAW_HEAP_OBJECT_IMPLEMENTATION(CodeSourceMap);
2147 VISIT_NOTHING();
2148
2149 // Length in bytes. This only needs to be an int32_t, but we make it a uword
2150 // so that the variable length data is 64 bit aligned on 64 bit platforms.
2151 uword length_;
2152
2153 // Variable length data follows here.
2154 uint8_t* data() { OPEN_ARRAY_START(uint8_t, intptr_t); }
2155 const uint8_t* data() const { OPEN_ARRAY_START(uint8_t, intptr_t); }
2156
2157 friend class Object;
2158 friend class ImageWriter;
2159};
2160
2161// RawCompressedStackMaps is a compressed representation of the stack maps
2162// for certain PC offsets into a set of instructions, where a stack map is a bit
2163// map that marks each live object index starting from the base of the frame.
2165 RAW_HEAP_OBJECT_IMPLEMENTATION(CompressedStackMaps);
2166 VISIT_NOTHING();
2167
2168 public:
2169 // Note: AOT snapshots pack these structures without any padding in between
2170 // so payload structure should not have any alignment requirements.
2171 // alignas(1) is here to trigger a compiler error if we violate this.
2172 struct alignas(1) Payload {
2173 using FlagsAndSizeHeader = uint32_t;
2174
2175 // The most significant bits are the length of the encoded payload, in
2176 // bytes (excluding the header itself). The low bits determine the
2177 // expected payload contents, as described below.
2178 DART_FORCE_INLINE FlagsAndSizeHeader flags_and_size() const {
2179 // Note: |this| does not necessarily satisfy alignment requirements
2180 // of uint32_t so we should use bit_cast.
2181 return bit_copy<FlagsAndSizeHeader, Payload>(*this);
2182 }
2183
2185 // Note: |this| does not necessarily satisfy alignment requirements
2186 // of uint32_t hence the byte copy below.
2187 memcpy(reinterpret_cast<void*>(this), &value, sizeof(value)); // NOLINT
2188 }
2189
2190 // Variable length data follows here. The contents of the payload depend on
2191 // the type of CompressedStackMaps (CSM) being represented. There are three
2192 // major types of CSM:
2193 //
2194 // 1) GlobalTableBit = false, UsesTableBit = false: CSMs that include all
2195 // information about the stack maps. The payload for these contain
2196 // tightly packed entries with the following information:
2197 //
2198 // * A header containing the following three pieces of information:
2199 // * An unsigned integer representing the PC offset as a delta from the
2200 // PC offset of the previous entry (from 0 for the first entry).
2201 // * An unsigned integer representing the number of bits used for
2202 // spill slot entries.
2203 // * An unsigned integer representing the number of bits used for other
2204 // entries.
2205 // * The body containing the bits for the stack map. The length of
2206 // the body in bits is the sum of the spill slot and non-spill slot
2207 // bit counts.
2208 //
2209 // 2) GlobalTableBit = false, UsesTableBit = true: CSMs where the majority
2210 // of the stack map information has been offloaded and canonicalized into
2211 // a global table. The payload contains tightly packed entries with the
2212 // following information:
2213 //
2214 // * A header containing just an unsigned integer representing the PC
2215 // offset delta as described above.
2216 // * The body is just an unsigned integer containing the offset into the
2217 // payload for the global table.
2218 //
2219 // 3) GlobalTableBit = true, UsesTableBit = false: A CSM implementing the
2220 // global table. Here, the payload contains tightly packed entries with
2221 // the following information:
2222 //
2223 // * A header containing the following two pieces of information:
2224 // * An unsigned integer representing the number of bits used for
2225 // spill slot entries.
2226 // * An unsigned integer representing the number of bits used for other
2227 // entries.
2228 // * The body containing the bits for the stack map. The length of the
2229 // body in bits is the sum of the spill slot and non-spill slot bit
2230 // counts.
2231 //
2232 // In all types of CSM, each unsigned integer is LEB128 encoded, as
2233 // generally they tend to fit in a single byte or two. Thus, entry headers
2234 // are not a fixed length, and currently there is no random access of
2235 // entries. In addition, PC offsets are currently encoded as deltas, which
2236 // also inhibits random access without accessing previous entries. That
2237 // means to find an entry for a given PC offset, a linear search must be
2238 // done where the payload is decoded up to the entry whose PC offset
2239 // is greater or equal to the given PC.
2240
2241 uint8_t* data() {
2242 return reinterpret_cast<uint8_t*>(this) + sizeof(FlagsAndSizeHeader);
2243 }
2244
2245 const uint8_t* data() const {
2246 return reinterpret_cast<const uint8_t*>(this) +
2247 sizeof(FlagsAndSizeHeader);
2248 }
2249 };
2250
2251 private:
2252 // We are using OPEN_ARRAY_START rather than embedding Payload directly into
2253 // the UntaggedCompressedStackMaps as a field because that would introduce a
2254 // padding at the end of UntaggedCompressedStackMaps - so we would not be
2255 // able to use sizeof(UntaggedCompressedStackMaps) as the size of the header
2256 // anyway.
2257 Payload* payload() { OPEN_ARRAY_START(Payload, uint8_t); }
2258 const Payload* payload() const { OPEN_ARRAY_START(Payload, uint8_t); }
2259
2260 class GlobalTableBit
2261 : public BitField<Payload::FlagsAndSizeHeader, bool, 0, 1> {};
2262 class UsesTableBit : public BitField<Payload::FlagsAndSizeHeader,
2263 bool,
2264 GlobalTableBit::kNextBit,
2265 1> {};
2266 class SizeField
2267 : public BitField<Payload::FlagsAndSizeHeader,
2268 Payload::FlagsAndSizeHeader,
2269 UsesTableBit::kNextBit,
2270 sizeof(Payload::FlagsAndSizeHeader) * kBitsPerByte -
2271 UsesTableBit::kNextBit> {};
2272
2273 friend class Object;
2274 friend class ImageWriter;
2275 friend class StackMapEntry;
2276};
2277
2279 RAW_HEAP_OBJECT_IMPLEMENTATION(InstructionsTable);
2280
2281 POINTER_FIELD(ArrayPtr, code_objects)
2282 VISIT_FROM(code_objects)
2283 VISIT_TO(code_objects)
2284
2285 struct DataEntry {
2286 uint32_t pc_offset;
2287 uint32_t stack_map_offset;
2288 };
2289 static_assert(sizeof(DataEntry) == sizeof(uint32_t) * 2);
2290
2291 struct Data {
2292 uint32_t canonical_stack_map_entries_offset;
2293 uint32_t length;
2294 uint32_t first_entry_with_code;
2295 uint32_t padding;
2296
2297 const DataEntry* entries() const { OPEN_ARRAY_START(DataEntry, uint32_t); }
2298
2299 const UntaggedCompressedStackMaps::Payload* StackMapAt(
2300 intptr_t offset) const {
2301 return reinterpret_cast<UntaggedCompressedStackMaps::Payload*>(
2302 reinterpret_cast<uword>(this) + offset);
2303 }
2304 };
2305 static_assert(sizeof(Data) == sizeof(uint32_t) * 4);
2306
2307 intptr_t length_;
2308 const Data* rodata_;
2309 uword start_pc_;
2310 uword end_pc_;
2311
2312 friend class Deserializer;
2313};
2314
2316 public:
2322 };
2323
2324 enum {
2328 // Since there are 24 bits for the stack slot index, Functions can have
2329 // only ~16.7 million stack slots.
2330 kPayloadSize = sizeof(int32_t) * kBitsPerByte,
2333 kMaxIndex = (1 << (kIndexSize - 1)) - 1,
2334 };
2335
2336 class IndexBits : public BitField<int32_t, int32_t, kIndexPos, kIndexSize> {};
2337 class KindBits : public BitField<int32_t, int8_t, kKindPos, kKindSize> {};
2338
2339 struct VarInfo {
2340 int32_t index_kind = 0; // Bitfield for slot index on stack or in context,
2341 // and Entry kind of type VarInfoKind.
2343 TokenPosition::kNoSource; // Token position of declaration.
2345 TokenPosition::kNoSource; // Token position of scope start.
2347 TokenPosition::kNoSource; // Token position of scope end.
2348 int16_t scope_id; // Scope to which the variable belongs.
2349
2351 return static_cast<VarInfoKind>(KindBits::decode(index_kind));
2352 }
2355 }
2356 int32_t index() const { return IndexBits::decode(index_kind) - kIndexBias; }
2357 void set_index(int32_t index) {
2359 }
2360 };
2361
2362 private:
2363 RAW_HEAP_OBJECT_IMPLEMENTATION(LocalVarDescriptors);
2364 // Number of descriptors. This only needs to be an int32_t, but we make it a
2365 // uword so that the variable length data is 64 bit aligned on 64 bit
2366 // platforms.
2367 uword num_entries_;
2368
2369 VISIT_FROM_PAYLOAD_START(CompressedStringPtr)
2371
2372 CompressedStringPtr* nameAddrAt(intptr_t i) { return &(names()[i]); }
2373 void set_name(intptr_t i, StringPtr value) {
2374 StoreCompressedPointer(nameAddrAt(i), value);
2375 }
2376
2377 // Variable info with [num_entries_] entries.
2378 VarInfo* data() {
2379 return reinterpret_cast<VarInfo*>(nameAddrAt(num_entries_));
2380 }
2381
2382 friend class Object;
2383};
2384
2386 private:
2387 RAW_HEAP_OBJECT_IMPLEMENTATION(ExceptionHandlers);
2388
2389 // Number of exception handler entries and
2390 // async handler.
2391 uint32_t packed_fields_;
2392
2393 // Async handler is used in the async/async* functions.
2394 // It's an implicit exception handler (stub) which runs when
2395 // exception is not handled within the function.
2396 using AsyncHandlerBit = BitField<decltype(packed_fields_), bool, 0, 1>;
2397 using NumEntriesBits = BitField<decltype(packed_fields_),
2398 uint32_t,
2400 31>;
2401
2402 intptr_t num_entries() const {
2403 return NumEntriesBits::decode(packed_fields_);
2404 }
2405
2406 // Array with [num_entries] entries. Each entry is an array of all handled
2407 // exception types.
2408 COMPRESSED_POINTER_FIELD(ArrayPtr, handled_types_data)
2409 VISIT_FROM(handled_types_data)
2410 VISIT_TO(handled_types_data)
2411
2412 // Exception handler info of length [num_entries].
2413 const ExceptionHandlerInfo* data() const {
2415 }
2416 ExceptionHandlerInfo* data() {
2418 }
2419
2420 friend class Object;
2421};
2422
2424 RAW_HEAP_OBJECT_IMPLEMENTATION(Context);
2425
2426 int32_t num_variables_;
2427
2428 COMPRESSED_POINTER_FIELD(ContextPtr, parent)
2429 VISIT_FROM(parent)
2430 // Variable length data follows here.
2432
2433 friend class Object;
2434 friend void UpdateLengthField(intptr_t,
2435 ObjectPtr,
2436 ObjectPtr); // num_variables_
2437};
2438
2439#define CONTEXT_SCOPE_VARIABLE_DESC_FLAG_LIST(V) \
2440 V(Final) \
2441 V(Late) \
2442 V(Nullable) \
2443 V(Invisible) \
2444 V(AwaiterLink)
2445
2448
2449 // TODO(iposva): Switch to conventional enum offset based structure to avoid
2450 // alignment mishaps.
2451 struct VariableDesc {
2452 CompressedSmiPtr declaration_token_pos;
2453 CompressedSmiPtr token_pos;
2454 CompressedStringPtr name;
2455 CompressedSmiPtr flags;
2456 enum FlagBits {
2457#define DECLARE_BIT(Name) kIs##Name,
2459#undef DECLARE_BIT
2460 };
2461 CompressedSmiPtr late_init_offset;
2462 CompressedAbstractTypePtr type;
2463 CompressedSmiPtr cid;
2464 CompressedSmiPtr context_index;
2465 CompressedSmiPtr context_level;
2466 CompressedSmiPtr kernel_offset;
2467 };
2468
2469 int32_t num_variables_;
2470 bool is_implicit_; // true, if this context scope is for an implicit closure.
2471
2472 // Just choose one of the fields in VariableDesc, since they should all be
2473 // compressed or not compressed.
2475
2476 CompressedObjectPtr* from() {
2477 VariableDesc* begin = const_cast<VariableDesc*>(VariableDescAddr(0));
2478 return reinterpret_cast<CompressedObjectPtr*>(begin);
2479 }
2480 // Variable length data follows here.
2481 CompressedObjectPtr const* data() const {
2483 }
2484 const VariableDesc* VariableDescAddr(intptr_t index) const {
2485 // data() points to the first component of the first descriptor.
2486 return reinterpret_cast<const VariableDesc*>(data()) + index;
2487 }
2488
2489#define DEFINE_ACCESSOR(type, name) \
2490 type name##_at(intptr_t index) { \
2491 return LoadCompressedPointer<type>(&VariableDescAddr(index)->name); \
2492 } \
2493 void set_##name##_at(intptr_t index, type value) { \
2494 StoreCompressedPointer(&VariableDescAddr(index)->name, value); \
2495 }
2496 DEFINE_ACCESSOR(SmiPtr, declaration_token_pos)
2497 DEFINE_ACCESSOR(SmiPtr, token_pos)
2498 DEFINE_ACCESSOR(StringPtr, name)
2499 DEFINE_ACCESSOR(SmiPtr, flags)
2500 DEFINE_ACCESSOR(SmiPtr, late_init_offset)
2501 DEFINE_ACCESSOR(AbstractTypePtr, type)
2502 DEFINE_ACCESSOR(SmiPtr, cid)
2503 DEFINE_ACCESSOR(SmiPtr, context_index)
2504 DEFINE_ACCESSOR(SmiPtr, context_level)
2505 DEFINE_ACCESSOR(SmiPtr, kernel_offset)
2506#undef DEFINE_ACCESSOR
2507
2508 CompressedObjectPtr* to(intptr_t num_vars) {
2509 uword end = reinterpret_cast<uword>(VariableDescAddr(num_vars));
2510 // 'end' is the address just beyond the last descriptor, so step back.
2511 return reinterpret_cast<CompressedObjectPtr*>(end -
2512 sizeof(CompressedObjectPtr));
2513 }
2514 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind, intptr_t num_vars) {
2515 return to(num_vars);
2516 }
2517
2518 friend class Object;
2520};
2521
2524 VISIT_NOTHING();
2525};
2526
2529 POINTER_FIELD(CodePtr, target)
2532 uword entry_point_;
2533 ClassIdTagType lower_limit_;
2534 ClassIdTagType upper_limit_;
2535};
2536
2539 VISIT_NOTHING();
2540
2541 uword expected_cid_;
2542 uword entrypoint_;
2543};
2544
2545// Abstract base class for RawICData/RawMegamorphicCache
2547 protected:
2548 POINTER_FIELD(StringPtr, target_name); // Name of target function.
2549 VISIT_FROM(target_name)
2550 // arg_descriptor in RawICData and in RawMegamorphicCache should be
2551 // in the same position so that NoSuchMethod can access it.
2552 POINTER_FIELD(ArrayPtr, args_descriptor); // Arguments descriptor.
2553 VISIT_TO(args_descriptor)
2554 ObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
2555
2556 private:
2558};
2559
2562
2563 bool can_patch_to_monomorphic_;
2564};
2565
2568 POINTER_FIELD(ArrayPtr, entries) // Contains class-ids, target and count.
2569 // Static type of the receiver, if instance call and available.
2570 NOT_IN_PRECOMPILED(POINTER_FIELD(AbstractTypePtr, receivers_static_type))
2572 owner) // Parent/calling function or original IC of cloned IC.
2573 VISIT_TO(owner)
2574 ObjectPtr* to_snapshot(Snapshot::Kind kind) {
2575 switch (kind) {
2576 case Snapshot::kFullAOT:
2577 return reinterpret_cast<ObjectPtr*>(&entries_);
2578 case Snapshot::kFull:
2580 case Snapshot::kFullJIT:
2581 return to();
2582 case Snapshot::kNone:
2583 case Snapshot::kInvalid:
2584 break;
2585 }
2586 UNREACHABLE();
2587 return nullptr;
2588 }
2589 NOT_IN_PRECOMPILED(int32_t deopt_id_);
2590 // Number of arguments tested in IC, deopt reasons.
2592};
2593
2596
2597 POINTER_FIELD(ArrayPtr, buckets)
2598 SMI_FIELD(SmiPtr, mask)
2599 VISIT_TO(mask)
2600 ObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
2601
2602 int32_t filled_entry_count_;
2603};
2604
2607
2608 POINTER_FIELD(ArrayPtr, cache)
2611 uint32_t num_inputs_;
2612 uint32_t num_occupied_;
2613};
2614
2617
2618 COMPRESSED_POINTER_FIELD(LoadingUnitPtr, parent)
2619 VISIT_FROM(parent)
2620 COMPRESSED_POINTER_FIELD(ArrayPtr, base_objects)
2621 VISIT_TO(base_objects)
2622 const uint8_t* instructions_image_;
2623 AtomicBitFieldContainer<intptr_t> packed_fields_;
2624
2625 enum LoadState : int8_t {
2626 kNotLoaded = 0, // Ensure this is the default state when zero-initialized.
2627 kLoadOutstanding,
2628 kLoaded,
2629 };
2630
2631 using LoadStateBits = BitField<decltype(packed_fields_), LoadState, 0, 2>;
2632 using IdBits =
2633 BitField<decltype(packed_fields_), intptr_t, LoadStateBits::kNextBit>;
2634};
2635
2638};
2639
2642
2646};
2647
2650
2651 COMPRESSED_POINTER_FIELD(ErrorPtr, previous_error) // May be null.
2652 VISIT_FROM(previous_error)
2655 // Incl. previous error's formatted message.
2656 COMPRESSED_POINTER_FIELD(StringPtr, formatted_message)
2657 VISIT_TO(formatted_message)
2658 TokenPosition token_pos_; // Source position in script_.
2659 bool report_after_token_; // Report message at or after the token.
2660 int8_t kind_; // Of type Report::Kind.
2661
2662 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
2663};
2664
2667
2668 COMPRESSED_POINTER_FIELD(InstancePtr, exception)
2669 VISIT_FROM(exception)
2670 COMPRESSED_POINTER_FIELD(InstancePtr, stacktrace)
2671 VISIT_TO(stacktrace)
2672 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
2673};
2674
2677
2681 bool is_user_initiated_;
2682};
2683
2686 friend class Object;
2687
2688 public:
2689#if defined(DART_COMPRESSED_POINTERS)
2690 static constexpr bool kContainsCompressedPointers = true;
2691#else
2692 static constexpr bool kContainsCompressedPointers = false;
2693#endif
2694};
2695
2698
2699 // Library prefix name.
2700 COMPRESSED_POINTER_FIELD(StringPtr, name)
2702 // Libraries imported with this prefix.
2703 COMPRESSED_POINTER_FIELD(ArrayPtr, imports)
2704 // Library which declares this prefix.
2705 COMPRESSED_POINTER_FIELD(LibraryPtr, importer)
2706 VISIT_TO(importer)
2707 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) {
2708 switch (kind) {
2709 case Snapshot::kFullAOT:
2710 return reinterpret_cast<CompressedObjectPtr*>(&imports_);
2711 case Snapshot::kFull:
2713 case Snapshot::kFullJIT:
2714 return reinterpret_cast<CompressedObjectPtr*>(&importer_);
2715 case Snapshot::kNone:
2716 case Snapshot::kInvalid:
2717 break;
2718 }
2719 UNREACHABLE();
2720 return nullptr;
2721 }
2722 uint16_t num_imports_; // Number of library entries in libraries_.
2723 bool is_deferred_load_;
2724};
2725
2727 private:
2729
2730 // The instantiations_ array remains empty for instantiated type arguments.
2731 // Of 3-tuple: 2 instantiators, result.
2732 COMPRESSED_POINTER_FIELD(ArrayPtr, instantiations)
2733 VISIT_FROM(instantiations)
2735 COMPRESSED_SMI_FIELD(SmiPtr, hash)
2736 COMPRESSED_SMI_FIELD(SmiPtr, nullability)
2737 // Variable length data follows here.
2738 COMPRESSED_VARIABLE_POINTER_FIELDS(AbstractTypePtr, element, types)
2739
2740 friend class Object;
2741};
2742
2744 private:
2746
2747 // Length of names reflects the number of type parameters.
2750 // flags: isGenericCovariantImpl and (todo) variance.
2752 COMPRESSED_POINTER_FIELD(TypeArgumentsPtr, bounds)
2753 // defaults is the instantiation to bounds (calculated by CFE).
2754 COMPRESSED_POINTER_FIELD(TypeArgumentsPtr, defaults)
2755 VISIT_TO(defaults)
2756 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
2757
2758 friend class Object;
2759};
2760
2762 protected:
2763 // Accessed from generated code.
2765 // Accessed from generated code.
2766 std::atomic<uint32_t> flags_;
2767#if defined(DART_COMPRESSED_POINTERS)
2768 uint32_t padding_; // Makes Windows and Posix agree on layout.
2769#endif
2770 COMPRESSED_POINTER_FIELD(CodePtr, type_test_stub)
2772 VISIT_FROM(type_test_stub)
2773
2774 uint32_t flags() const { return flags_.load(std::memory_order_relaxed); }
2775 void set_flags(uint32_t value) {
2776 flags_.store(value, std::memory_order_relaxed);
2777 }
2778
2779 public:
2781 kAllocated, // Initial state.
2782 kFinalizedInstantiated, // Instantiated type ready for use.
2783 kFinalizedUninstantiated, // Uninstantiated type ready for use.
2784 };
2785
2787 static constexpr intptr_t kNullabilityMask = NullabilityBit::mask();
2788
2789 static constexpr intptr_t kTypeStateShift = NullabilityBit::kNextBit;
2790 static constexpr intptr_t kTypeStateBits = 2;
2793
2794 private:
2796
2797 friend class ObjectStore;
2798 friend class StubCode;
2799};
2800
2802 public:
2803 static constexpr intptr_t kTypeClassIdShift = TypeStateBits::kNextBit;
2806
2807 private:
2809
2810 COMPRESSED_POINTER_FIELD(TypeArgumentsPtr, arguments)
2811 VISIT_TO(arguments)
2812
2813 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
2814
2815 ClassIdTagType type_class_id() const {
2817 }
2818 void set_type_class_id(ClassIdTagType value) {
2819 set_flags(TypeClassIdBits::update(value, flags()));
2820 }
2821
2823 friend class CidRewriteVisitor;
2825};
2826
2828 private:
2830
2831 COMPRESSED_POINTER_FIELD(TypeParametersPtr, type_parameters)
2832 COMPRESSED_POINTER_FIELD(AbstractTypePtr, result_type)
2833 COMPRESSED_POINTER_FIELD(ArrayPtr, parameter_types)
2834 COMPRESSED_POINTER_FIELD(ArrayPtr, named_parameter_names);
2835 VISIT_TO(named_parameter_names)
2836 AtomicBitFieldContainer<uint32_t> packed_parameter_counts_;
2837 AtomicBitFieldContainer<uint16_t> packed_type_parameter_counts_;
2838
2839 // The bit fields are public for use in kernel_to_il.cc.
2840 public:
2841 // For packed_type_parameter_counts_.
2843 BitField<decltype(packed_type_parameter_counts_), uint8_t, 0, 8>;
2845 BitField<decltype(packed_type_parameter_counts_),
2846 uint8_t,
2847 PackedNumParentTypeArguments::kNextBit,
2848 8>;
2849
2850 // For packed_parameter_counts_.
2852 BitField<decltype(packed_parameter_counts_), uint8_t, 0, 1>;
2854 BitField<decltype(packed_parameter_counts_),
2855 bool,
2856 PackedNumImplicitParameters::kNextBit,
2857 1>;
2859 BitField<decltype(packed_parameter_counts_),
2860 uint16_t,
2861 PackedHasNamedOptionalParameters::kNextBit,
2862 14>;
2864 BitField<decltype(packed_parameter_counts_),
2865 uint16_t,
2866 PackedNumFixedParameters::kNextBit,
2867 14>;
2868 static_assert(PackedNumOptionalParameters::kNextBit <=
2870 "In-place mask for number of optional parameters cannot fit in "
2871 "a Smi on the target architecture");
2872
2873 private:
2874 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
2875
2876 friend class Function;
2877};
2878
2880 private:
2882
2883 COMPRESSED_SMI_FIELD(SmiPtr, shape)
2884 COMPRESSED_POINTER_FIELD(ArrayPtr, field_types)
2885 VISIT_TO(field_types)
2886
2887 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
2888};
2889
2891 public:
2892 static constexpr intptr_t kIsFunctionTypeParameterBit =
2893 TypeStateBits::kNextBit;
2896
2897 private:
2899
2900 // FunctionType or Smi (class id).
2902 VISIT_TO(owner)
2903 uint16_t base_; // Number of enclosing function type parameters.
2904 uint16_t index_; // Keep size in sync with BuildTypeParameterTypeTestStub.
2905
2906 private:
2907 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
2908
2909 friend class CidRewriteVisitor;
2910};
2911
2913 private:
2915
2916 // The following fields are also declared in the Dart source of class
2917 // _Closure, and so must be the first fields in the object and must appear
2918 // in the same order, so the offsets are identical in Dart and C++.
2919 //
2920 // Note that the type of a closure is defined by instantiating the
2921 // signature of the closure function with the instantiator, function, and
2922 // delayed (if non-empty) type arguments stored in the closure value.
2923
2924 // Stores the instantiator type arguments provided when the closure was
2925 // created.
2926 COMPRESSED_POINTER_FIELD(TypeArgumentsPtr, instantiator_type_arguments)
2927 VISIT_FROM(instantiator_type_arguments)
2928 // Stores the function type arguments provided for any generic parent
2929 // functions when the closure was created.
2930 COMPRESSED_POINTER_FIELD(TypeArgumentsPtr, function_type_arguments)
2931 // If this field contains the empty type argument vector, then the closure
2932 // value is generic.
2933 //
2934 // To create a new closure that is a specific type instantiation of a generic
2935 // closure, a copy of the closure is created where the empty type argument
2936 // vector in this field is replaced with the vector of local type arguments.
2937 // The resulting closure value is not generic, and so an attempt to provide
2938 // type arguments when invoking the new closure value is treated the same as
2939 // calling any other non-generic function with unneeded type arguments.
2940 //
2941 // If the signature for the closure function has no local type parameters,
2942 // the only guarantee about this field is that it never contains the empty
2943 // type arguments vector. Thus, only this field need be inspected to
2944 // determine whether a given closure value is generic.
2945 COMPRESSED_POINTER_FIELD(TypeArgumentsPtr, delayed_type_arguments)
2947 // For tear-offs - captured receiver.
2948 // For ordinary closures - Context object with captured variables.
2951 VISIT_TO(hash)
2952
2953 // We have an extra word in the object due to alignment rounding, so use it in
2954 // bare instructions mode to cache the entry point from the closure function
2955 // to avoid an extra redirection on call. Closure functions only have
2956 // one entry point, as dynamic calls use dynamic closure call dispatchers.
2957 ONLY_IN_PRECOMPILED(uword entry_point_);
2958
2959 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
2960
2962};
2963
2966};
2967
2970};
2971
2974};
2975
2978 VISIT_NOTHING();
2979
2980 ALIGN8 int64_t value_;
2981
2982 friend class Api;
2983 friend class Class;
2984 friend class Integer;
2985};
2987
2990 VISIT_NOTHING();
2991
2992 ALIGN8 double value_;
2993
2994 friend class Api;
2995 friend class Class;
2996};
2998
3001
3002 protected:
3003#if !defined(HASH_IN_OBJECT_HEADER)
3004 COMPRESSED_SMI_FIELD(SmiPtr, hash)
3006#endif
3008#if defined(HASH_IN_OBJECT_HEADER)
3010#endif
3012
3013 private:
3014 friend class Library;
3016 friend class ImageWriter;
3017};
3018
3021 VISIT_NOTHING();
3022
3023 // Variable length data follows here.
3024 uint8_t* data() { OPEN_ARRAY_START(uint8_t, uint8_t); }
3025 const uint8_t* data() const { OPEN_ARRAY_START(uint8_t, uint8_t); }
3026
3028 friend class String;
3031};
3032
3035 VISIT_NOTHING();
3036
3037 // Variable length data follows here.
3038 uint16_t* data() { OPEN_ARRAY_START(uint16_t, uint16_t); }
3039 const uint16_t* data() const { OPEN_ARRAY_START(uint16_t, uint16_t); }
3040
3042 friend class String;
3045};
3046
3047// Abstract base class for UntaggedTypedData/UntaggedExternalTypedData/
3048// UntaggedTypedDataView/Pointer.
3049//
3050// TypedData extends this with a length field, while Pointer extends this with
3051// TypeArguments field.
3053 public:
3054 uint8_t* data() { return data_; }
3055
3056 protected:
3057 // The contents of [data_] depends on what concrete subclass is used:
3058 //
3059 // - UntaggedTypedData: Start of the payload.
3060 // - UntaggedExternalTypedData: Start of the C-heap payload.
3061 // - UntaggedTypedDataView: The [data_] field of the backing store for the
3062 // view plus the [offset_in_bytes_] the view has.
3063 // - UntaggedPointer: Pointer into C memory (no length specified).
3064 //
3065 // During allocation or snapshot reading the [data_] can be temporarily
3066 // nullptr (which is the case for views which just got created but haven't
3067 // gotten the backing store set).
3068 uint8_t* data_;
3069
3070 private:
3071 template <typename T>
3073 Thread*,
3074 const T&,
3075 const T&,
3076 intptr_t); // Access _data for memmove with safepoint checkins.
3077
3079};
3080
3081// Abstract base class for UntaggedTypedData/UntaggedExternalTypedData/
3082// UntaggedTypedDataView.
3084 protected:
3085#if defined(DART_COMPRESSED_POINTERS)
3086 uint32_t padding_; // Makes Windows and Posix agree on layout.
3087#endif
3088 // The length of the view in element sizes (obtainable via
3089 // [TypedDataBase::ElementSizeInBytes]).
3093
3094 private:
3096 friend void UpdateLengthField(intptr_t, ObjectPtr, ObjectPtr); // length_
3097 friend void InitializeExternalTypedData(
3098 intptr_t,
3099 ExternalTypedDataPtr,
3100 ExternalTypedDataPtr); // initialize fields.
3102 Thread*,
3103 intptr_t,
3104 const ExternalTypedData&,
3105 const ExternalTypedData&); // initialize fields.
3106
3108};
3109
3112
3113 public:
3114 static intptr_t payload_offset() {
3115 return OFFSET_OF_RETURNED_VALUE(UntaggedTypedData, internal_data);
3116 }
3117
3118 // Recompute [data_] pointer to internal data.
3119 void RecomputeDataField() { data_ = internal_data(); }
3120
3121 protected:
3122 // Variable length data follows here.
3123 uint8_t* internal_data() { OPEN_ARRAY_START(uint8_t, uint8_t); }
3124 const uint8_t* internal_data() const { OPEN_ARRAY_START(uint8_t, uint8_t); }
3125
3126 uint8_t* data() {
3127 ASSERT(data_ == internal_data());
3128 return data_;
3129 }
3130 const uint8_t* data() const {
3131 ASSERT(data_ == internal_data());
3132 return data_;
3133 }
3134
3135 friend class Api;
3136 friend class Instance;
3138 friend class NativeEntryData;
3139 friend class Object;
3140 friend class ObjectPool;
3144};
3145
3146// All _*ArrayView/_ByteDataView classes share the same layout.
3149
3150 public:
3151 // Recompute [data_] based on internal/external [typed_data_].
3153 const intptr_t offset_in_bytes = RawSmiValue(this->offset_in_bytes());
3154 uint8_t* payload = typed_data()->untag()->data_;
3155 data_ = payload + offset_in_bytes;
3156 }
3157
3158 // Recompute [data_] based on internal [typed_data_] - needs to be called by
3159 // GC whenever the backing store moved.
3160 //
3161 // NOTICE: This method assumes [this] is the forwarded object and the
3162 // [typed_data_] pointer points to the new backing store. The backing store's
3163 // fields don't need to be valid - only it's address.
3165 data_ = DataFieldForInternalTypedData();
3166 }
3167
3169 const intptr_t offset_in_bytes = RawSmiValue(this->offset_in_bytes());
3170 uint8_t* payload =
3171 reinterpret_cast<uint8_t*>(UntaggedObject::ToAddr(typed_data()) +
3173 return payload + offset_in_bytes;
3174 }
3175
3177 if (typed_data()->untag()->GetClassId() == kNullCid) {
3178 // The view object must have gotten just initialized.
3179 if (data_ != nullptr || RawSmiValue(offset_in_bytes()) != 0 ||
3180 RawSmiValue(length()) != 0) {
3181 FATAL("TypedDataView has invalid inner pointer.");
3182 }
3183 } else {
3184 const intptr_t offset_in_bytes = RawSmiValue(this->offset_in_bytes());
3185 uint8_t* payload = typed_data()->untag()->data_;
3186 if ((payload + offset_in_bytes) != data_) {
3187 FATAL("TypedDataView has invalid inner pointer.");
3188 }
3189 }
3190 }
3191
3192 protected:
3193 COMPRESSED_POINTER_FIELD(TypedDataBasePtr, typed_data)
3194 COMPRESSED_SMI_FIELD(SmiPtr, offset_in_bytes)
3195 VISIT_TO(offset_in_bytes)
3196 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
3197
3198 friend void InitializeTypedDataView(TypedDataViewPtr);
3199 friend class Api;
3200 friend class Object;
3204 friend class GCCompactor;
3205 template <bool>
3207};
3208
3211 VISIT_NOTHING();
3212
3213 bool value_;
3214
3215 friend class Object;
3216};
3217
3220
3221 COMPRESSED_ARRAY_POINTER_FIELD(TypeArgumentsPtr, type_arguments)
3222 VISIT_FROM(type_arguments)
3224 // Variable length data follows here.
3226
3233 friend class Deserializer;
3234 friend class UntaggedCode;
3237 friend class Map;
3238 friend class UntaggedMap;
3239 friend class UntaggedConstMap;
3240 friend class Object;
3241 friend class ICData; // For high performance access.
3242 friend class SubtypeTestCache; // For high performance access.
3243 friend class ReversePc;
3244 template <typename Table, bool kAllCanonicalObjectsAreIncludedIntoSet>
3246 friend class Page;
3247 template <bool>
3249 friend class FastObjectCopy; // For initializing fields.
3250 friend void UpdateLengthField(intptr_t, ObjectPtr, ObjectPtr); // length_
3251};
3252
3255};
3256
3259
3260 COMPRESSED_POINTER_FIELD(TypeArgumentsPtr, type_arguments)
3261 VISIT_FROM(type_arguments)
3264 VISIT_TO(data)
3265 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
3266
3267 friend class ReversePc;
3268};
3269
3272
3273 COMPRESSED_POINTER_FIELD(TypeArgumentsPtr, type_arguments)
3274 VISIT_FROM(type_arguments)
3275 COMPRESSED_POINTER_FIELD(SmiPtr, hash_mask)
3277 COMPRESSED_POINTER_FIELD(SmiPtr, used_data)
3278 COMPRESSED_POINTER_FIELD(SmiPtr, deleted_keys)
3279 COMPRESSED_POINTER_FIELD(TypedDataPtr, index)
3280 VISIT_TO(index)
3281
3282 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) {
3283 // Do not serialize index.
3284 return reinterpret_cast<CompressedObjectPtr*>(&deleted_keys_);
3285 }
3286};
3287
3290
3291 friend class UntaggedConstMap;
3292};
3293
3296};
3297
3300
3301 friend class UntaggedConstSet;
3302};
3303
3306};
3307
3310 VISIT_NOTHING();
3311
3312 ALIGN8 float value_[4];
3313
3314 friend class Class;
3315
3316 public:
3317 float x() const { return value_[0]; }
3318 float y() const { return value_[1]; }
3319 float z() const { return value_[2]; }
3320 float w() const { return value_[3]; }
3321};
3323
3326 VISIT_NOTHING();
3327
3328 ALIGN8 int32_t value_[4];
3329
3334
3335 public:
3336 int32_t x() const { return value_[0]; }
3337 int32_t y() const { return value_[1]; }
3338 int32_t z() const { return value_[2]; }
3339 int32_t w() const { return value_[3]; }
3340};
3342
3345 VISIT_NOTHING();
3346
3347 ALIGN8 double value_[2];
3348
3349 friend class Class;
3350
3351 public:
3352 double x() const { return value_[0]; }
3353 double y() const { return value_[1]; }
3354};
3356
3359
3360#if defined(DART_COMPRESSED_POINTERS)
3361 // This explicit padding avoids implicit padding between [shape] and [data].
3362 // Record allocation doesn't initialize the implicit padding but GC scans
3363 // everything between 'from' (shape) and 'to' (end of data),
3364 // so it would see garbage if implicit padding is inserted.
3365 uint32_t padding_;
3366#endif
3367 COMPRESSED_SMI_FIELD(SmiPtr, shape)
3368 VISIT_FROM(shape)
3369 // Variable length data follows here.
3371
3372 friend void UpdateLengthField(intptr_t, ObjectPtr,
3373 ObjectPtr); // shape_
3374};
3375
3376// Define an aliases for intptr_t.
3377#if defined(ARCH_IS_32_BIT)
3378#define kIntPtrCid kTypedDataInt32ArrayCid
3379#define GetIntPtr GetInt32
3380#define SetIntPtr SetInt32
3381#define kUintPtrCid kTypedDataUint32ArrayCid
3382#define GetUintPtr GetUint32
3383#define SetUintPtr SetUint32
3384#elif defined(ARCH_IS_64_BIT)
3385#define kIntPtrCid kTypedDataInt64ArrayCid
3386#define GetIntPtr GetInt64
3387#define SetIntPtr SetInt64
3388#define kUintPtrCid kTypedDataUint64ArrayCid
3389#define GetUintPtr GetUint64
3390#define SetUintPtr SetUint64
3391#else
3392#error Architecture is not 32-bit or 64-bit.
3393#endif // ARCH_IS_32_BIT
3394
3397};
3398
3401
3402 COMPRESSED_POINTER_FIELD(TypeArgumentsPtr, type_arguments)
3403 VISIT_FROM(type_arguments)
3404 VISIT_TO(type_arguments)
3405
3406 friend class Pointer;
3407};
3408
3411 VISIT_NOTHING();
3412 void* handle_;
3413 bool isClosed_;
3414 bool canBeClosed_;
3415
3416 friend class DynamicLibrary;
3417};
3418
3419// VM implementations of the basic types in the isolate.
3420class alignas(8) UntaggedCapability : public UntaggedInstance {
3422 VISIT_NOTHING();
3423 uint64_t id_;
3424};
3425
3426class alignas(8) UntaggedSendPort : public UntaggedInstance {
3428 VISIT_NOTHING();
3429 Dart_Port id_;
3430 Dart_Port origin_id_;
3431
3432 friend class ReceivePort;
3433};
3434
3437
3438 COMPRESSED_POINTER_FIELD(SendPortPtr, send_port)
3439 VISIT_FROM(send_port)
3440 COMPRESSED_POINTER_FIELD(SmiPtr, bitfield)
3441 COMPRESSED_POINTER_FIELD(InstancePtr, handler)
3442#if defined(PRODUCT)
3443 VISIT_TO(handler)
3444#else
3445 COMPRESSED_POINTER_FIELD(StringPtr, debug_name)
3446 COMPRESSED_POINTER_FIELD(StackTracePtr, allocation_location)
3447 VISIT_TO(allocation_location)
3448#endif // !defined(PRODUCT)
3449};
3450
3453 VISIT_NOTHING();
3454};
3455
3456// VM type for capturing stacktraces when exceptions are thrown,
3457// Currently we don't have any interface that this object is supposed
3458// to implement so we just support the 'toString' method which
3459// converts the stack trace into a string.
3462
3463 // Link to parent async stack trace.
3464 COMPRESSED_POINTER_FIELD(StackTracePtr, async_link);
3465 VISIT_FROM(async_link)
3466 // Code object for each frame in the stack trace.
3467 COMPRESSED_POINTER_FIELD(ArrayPtr, code_array);
3468 // Offset of PC for each frame.
3469 COMPRESSED_POINTER_FIELD(TypedDataPtr, pc_offset_array);
3470
3471 VISIT_TO(pc_offset_array)
3472 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
3473
3474 // False for pre-allocated stack trace (used in OOM and Stack overflow).
3475 bool expand_inlined_;
3476 // Whether the link between the stack and the async-link represents a
3477 // synchronous start to an asynchronous function. In this case, we omit the
3478 // <asynchronous suspension> marker when concatenating the stacks.
3479 bool skip_sync_start_in_parent_stack;
3480};
3481
3484
3485 NOT_IN_PRECOMPILED(intptr_t frame_capacity_);
3486 intptr_t frame_size_;
3487 uword pc_;
3488
3489 // Holds function-specific object which is returned from
3490 // SuspendState.init* method.
3491 // For async functions: _Future instance.
3492 // For async* functions: _AsyncStarStreamController instance.
3493 COMPRESSED_POINTER_FIELD(InstancePtr, function_data)
3494
3495 COMPRESSED_POINTER_FIELD(ClosurePtr, then_callback)
3496 COMPRESSED_POINTER_FIELD(ClosurePtr, error_callback)
3497 VISIT_FROM(function_data)
3498 VISIT_TO(error_callback)
3499
3500 public:
3501 uword pc() const { return pc_; }
3502
3503 intptr_t frame_capacity() const {
3504#if defined(DART_PRECOMPILED_RUNTIME)
3505 return frame_size_;
3506#else
3507 return frame_capacity_;
3508#endif
3509 }
3510
3511 static intptr_t payload_offset() {
3513 }
3514
3515 // Variable length payload follows here.
3516 uint8_t* payload() { OPEN_ARRAY_START(uint8_t, uint8_t); }
3517 const uint8_t* payload() const { OPEN_ARRAY_START(uint8_t, uint8_t); }
3518};
3519
3520// VM type for capturing JS regular expressions.
3523
3524 COMPRESSED_POINTER_FIELD(ArrayPtr, capture_name_map)
3525 VISIT_FROM(capture_name_map)
3526 // Pattern to be used for matching.
3527 COMPRESSED_POINTER_FIELD(StringPtr, pattern)
3528 COMPRESSED_POINTER_FIELD(ObjectPtr, one_byte) // FunctionPtr or TypedDataPtr
3530 COMPRESSED_POINTER_FIELD(ObjectPtr, one_byte_sticky)
3531 COMPRESSED_POINTER_FIELD(ObjectPtr, two_byte_sticky)
3532 VISIT_TO(two_byte_sticky)
3533 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
3534
3535 std::atomic<intptr_t> num_bracket_expressions_;
3536 intptr_t num_bracket_expressions() {
3537 return num_bracket_expressions_.load(std::memory_order_relaxed);
3538 }
3539 void set_num_bracket_expressions(intptr_t value) {
3540 num_bracket_expressions_.store(value, std::memory_order_relaxed);
3541 }
3542
3543 // The same pattern may use different amount of registers if compiled
3544 // for a one-byte target than a two-byte target. For example, we do not
3545 // need to allocate registers to check whether the current position is within
3546 // a surrogate pair when matching a Unicode pattern against a one-byte string.
3547 intptr_t num_one_byte_registers_;
3548 intptr_t num_two_byte_registers_;
3549
3550 // A bitfield with two fields:
3551 // type: Uninitialized, simple or complex.
3552 // flags: Represents global/local, case insensitive, multiline, unicode,
3553 // dotAll.
3554 // It is possible multiple compilers race to update the flags concurrently.
3555 // That should be safe since all updates update to the same values..
3557};
3558
3561
3562 COMPRESSED_POINTER_FIELD(ObjectPtr, key) // Weak reference.
3566 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
3567
3568 // Linked list is chaining all pending weak properties. Not visited by
3569 // pointer visitors.
3570 COMPRESSED_POINTER_FIELD(WeakPropertyPtr, next_seen_by_gc)
3571
3572 template <typename Type, typename PtrType>
3573 friend class GCLinkedList;
3574 template <bool>
3576 template <bool>
3578 friend class Scavenger;
3579 friend class FastObjectCopy; // For OFFSET_OF
3580 friend class SlowObjectCopy; // For OFFSET_OF
3581};
3582
3583// WeakProperty is special in that it has a pointer field which is not
3584// traversed by pointer visitors, and thus not in the range [from(),to()]:
3585// next_seen_by_gc, which is after the other fields.
3586template <>
3587DART_FORCE_INLINE uword
3588UntaggedObject::to_offset<UntaggedWeakProperty>(intptr_t length) {
3589 return OFFSET_OF(UntaggedWeakProperty, next_seen_by_gc_);
3590}
3591
3594
3595 COMPRESSED_POINTER_FIELD(ObjectPtr, target) // Weak reference.
3597 COMPRESSED_POINTER_FIELD(TypeArgumentsPtr, type_arguments)
3598 VISIT_TO(type_arguments)
3599 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
3600
3601 // Linked list is chaining all pending weak properties. Not visited by
3602 // pointer visitors.
3603 COMPRESSED_POINTER_FIELD(WeakReferencePtr, next_seen_by_gc)
3604
3605 template <typename Type, typename PtrType>
3606 friend class GCLinkedList;
3607 template <bool>
3609 template <bool>
3611 friend class Scavenger;
3612 friend class ObjectGraph;
3613 friend class FastObjectCopy; // For OFFSET_OF
3614 friend class SlowObjectCopy; // For OFFSET_OF
3615};
3616
3617// WeakReference is special in that it has a pointer field which is not
3618// traversed by pointer visitors, and thus not in the range [from(),to()]:
3619// next_seen_by_gc, which is after the other fields.
3620template <>
3621DART_FORCE_INLINE uword
3622UntaggedObject::to_offset<UntaggedWeakReference>(intptr_t length) {
3623 return OFFSET_OF(UntaggedWeakReference, next_seen_by_gc_);
3624}
3625
3628
3629 // The isolate this finalizer belongs to. Updated on sent and exit and set
3630 // to null on isolate shutdown. See Isolate::finalizers_.
3631 Isolate* isolate_;
3632
3633// With compressed pointers, the first field in a subclass is at offset 28.
3634// If the fields would be public, the first field in a subclass is at offset 32.
3635// On Windows, it is always at offset 32, no matter public/private.
3636// This makes it 32 for all OSes.
3637// We can't use ALIGN8 on the first fields of the subclasses because they use
3638// the COMPRESSED_POINTER_FIELD macro to define it.
3639// Placed before the first fields so it is not included between from() and to().
3640#ifdef DART_COMPRESSED_POINTERS
3641 uint32_t align_first_field_in_subclass;
3642#endif
3643
3645 VISIT_FROM(detachments)
3646 COMPRESSED_POINTER_FIELD(SetPtr, all_entries)
3647 COMPRESSED_POINTER_FIELD(FinalizerEntryPtr, entries_collected)
3648
3649 template <typename GCVisitorType>
3650 friend void MournFinalizerEntry(GCVisitorType*, FinalizerEntryPtr);
3651 template <bool>
3653 template <bool>
3655 friend class ObjectGraph;
3656};
3657
3660
3662 COMPRESSED_POINTER_FIELD(TypeArgumentsPtr, type_arguments)
3663 VISIT_TO(type_arguments)
3664
3665 template <std::memory_order order = std::memory_order_relaxed>
3666 FinalizerEntryPtr exchange_entries_collected(FinalizerEntryPtr value) {
3667 return ExchangeCompressedPointer<FinalizerEntryPtr,
3668 CompressedFinalizerEntryPtr, order>(
3669 &entries_collected_, value);
3670 }
3671
3672 template <typename GCVisitorType>
3673 friend void MournFinalizerEntry(GCVisitorType*, FinalizerEntryPtr);
3674 template <bool>
3676 template <bool>
3678};
3679
3682
3685
3686 template <bool>
3688 template <bool>
3690};
3691
3693 public:
3694 intptr_t external_size() { return external_size_; }
3695 void set_external_size(intptr_t value) { external_size_ = value; }
3696
3697 private:
3699
3700 COMPRESSED_POINTER_FIELD(ObjectPtr, value) // Weak reference.
3702 COMPRESSED_POINTER_FIELD(ObjectPtr, detach) // Weak reference.
3704 COMPRESSED_POINTER_FIELD(FinalizerBasePtr, finalizer) // Weak reference.
3705 // Used for the linked list in Finalizer::entries_collected_. That cannot be
3706 // an ordinary list because we need to add elements during a GC so we cannot
3707 // modify the heap.
3708 COMPRESSED_POINTER_FIELD(FinalizerEntryPtr, next)
3709 VISIT_TO(next)
3710
3711 // Linked list is chaining all pending. Not visited by pointer visitors.
3712 // Only populated during the GC, otherwise null.
3713 COMPRESSED_POINTER_FIELD(FinalizerEntryPtr, next_seen_by_gc)
3714
3715 intptr_t external_size_;
3716
3717 template <typename Type, typename PtrType>
3718 friend class GCLinkedList;
3719 template <typename GCVisitorType>
3720 friend void MournFinalizerEntry(GCVisitorType*, FinalizerEntryPtr);
3721 template <bool>
3723 template <bool>
3725 friend class Scavenger;
3726 friend class ObjectGraph;
3727};
3728
3729// FinalizerEntry is special in that it has a pointer field which is not
3730// traversed by pointer visitors, and thus not in the range [from(),to()]:
3731// next_seen_by_gc, which is after the other fields.
3732template <>
3733DART_FORCE_INLINE uword
3734UntaggedObject::to_offset<UntaggedFinalizerEntry>(intptr_t length) {
3735 return OFFSET_OF(UntaggedFinalizerEntry, next_seen_by_gc_);
3736}
3737
3738// MirrorReferences are used by mirrors to hold reflectees that are VM
3739// internal objects, such as libraries, classes, functions or types.
3742
3744 VISIT_FROM(referent)
3745 VISIT_TO(referent)
3746};
3747
3748// UserTag are used by the profiler to track Dart script state.
3751
3752 COMPRESSED_POINTER_FIELD(StringPtr, label)
3753 VISIT_FROM(label)
3754 VISIT_TO(label)
3755
3756 // Isolate unique tag.
3757 uword tag_;
3758
3759 // Should CPU samples with this tag be streamed?
3760 bool streamable_;
3761
3762 friend class Object;
3763
3764 public:
3765 uword tag() const { return tag_; }
3766 bool streamable() const { return streamable_; }
3767};
3768
3771
3772 COMPRESSED_POINTER_FIELD(TypeArgumentsPtr, type_arguments)
3773 VISIT_FROM(type_arguments)
3774 VISIT_TO(type_arguments)
3775};
3776
3777#undef WSR_COMPRESSED_POINTER_FIELD
3778
3779} // namespace dart
3780
3781#endif // RUNTIME_VM_RAW_OBJECT_H_
static float next(float f)
static void encode(uint8_t output[16], const uint32_t input[4])
Definition: SkMD5.cpp:240
static uint32_t hash(const SkShaderBase::GradientInfo &v)
#define UNREACHABLE()
Definition: assert.h:248
GLenum type
#define CLASS_LIST_TYPED_DATA(V)
Definition: class_id.h:137
#define CLASS_LIST(V)
Definition: class_id.h:208
void UpdateBool(bool value)
Definition: bitfield.h:56
TargetBitField::Type UpdateConditional(typename TargetBitField::Type value_to_be_set, typename TargetBitField::Type conditional_old_value)
Definition: bitfield.h:87
void UpdateUnsynchronized(typename TargetBitField::Type value)
Definition: bitfield.h:80
NO_SANITIZE_THREAD T load_ignore_race() const
Definition: bitfield.h:39
void Update(typename TargetBitField::Type value)
Definition: bitfield.h:70
TargetBitField::Type Read() const
Definition: bitfield.h:50
static constexpr intptr_t kNextBit
Definition: bitfield.h:143
static constexpr bool decode(uword value)
Definition: bitfield.h:171
static constexpr int32_t update(int8_t value, int32_t original)
Definition: bitfield.h:188
UnboxedFieldBitmap GetUnboxedFieldsMapAt(intptr_t cid) const
Definition: class_table.h:388
const ClassTable * class_table() const
Definition: visitor.h:71
void VisitCompressedPointers(uword heap_base, CompressedObjectPtr *first, CompressedObjectPtr *last)
Definition: visitor.h:43
UntaggedObject * untag() const
intptr_t GetClassId() const
Definition: raw_object.h:885
void MarkingStackAddObject(ObjectPtr obj)
Definition: thread.cc:847
void DeferredMarkingStackAddObject(ObjectPtr obj)
Definition: thread.cc:871
static Thread * Current()
Definition: thread.h:362
void StoreBufferAddObject(ObjectPtr obj)
Definition: thread.cc:796
uword write_barrier_mask() const
Definition: thread.h:428
std::atomic< uint32_t > flags_
Definition: raw_object.h:2766
void set_flags(uint32_t value)
Definition: raw_object.h:2775
std::atomic< uword > type_test_stub_entry_point_
Definition: raw_object.h:2764
POINTER_FIELD(StringPtr, target_name)
classid_t id() const
Definition: raw_object.h:1071
friend void UpdateLengthField(intptr_t, ObjectPtr, ObjectPtr)
void set_external_size(intptr_t value)
Definition: raw_object.h:3695
DART_FORCE_INLINE bool IsUnboxedDouble(intptr_t position) const
Definition: raw_object.h:1345
DART_FORCE_INLINE bool IsEmpty() const
Definition: raw_object.h:1361
UnboxedParameterBitmap(const UnboxedParameterBitmap &)=default
DART_FORCE_INLINE bool IsUnboxedInteger(intptr_t position) const
Definition: raw_object.h:1342
DART_FORCE_INLINE void SetUnboxedInteger(intptr_t position)
Definition: raw_object.h:1351
DART_FORCE_INLINE bool HasUnboxedParameters() const
Definition: raw_object.h:1363
DART_FORCE_INLINE bool IsUnboxed(intptr_t position) const
Definition: raw_object.h:1339
DART_FORCE_INLINE bool IsUnboxedRecord(intptr_t position) const
Definition: raw_object.h:1348
UnboxedParameterBitmap & operator=(const UnboxedParameterBitmap &)=default
DART_FORCE_INLINE void SetUnboxedDouble(intptr_t position)
Definition: raw_object.h:1354
DART_FORCE_INLINE uint64_t Value() const
Definition: raw_object.h:1360
DART_FORCE_INLINE void SetUnboxedRecord(intptr_t position)
Definition: raw_object.h:1357
static bool ParseKind(const char *str, Kind *out)
Definition: raw_object.h:1296
static const char * KindToCString(Kind k)
Definition: raw_object.h:1283
friend class AssemblyImageWriter
Definition: raw_object.h:2016
int32_t z() const
Definition: raw_object.h:3338
int32_t w() const
Definition: raw_object.h:3339
int32_t y() const
Definition: raw_object.h:3337
int32_t x() const
Definition: raw_object.h:3336
static constexpr uword update(intptr_t size, uword tag)
Definition: raw_object.h:212
static constexpr intptr_t kMaxSizeTagInUnitsOfAlignment
Definition: raw_object.h:199
static constexpr bool SizeFits(intptr_t size)
Definition: raw_object.h:216
static constexpr intptr_t kMaxSizeTag
Definition: raw_object.h:201
static constexpr uword encode(intptr_t size)
Definition: raw_object.h:204
static constexpr uword decode(uword tag)
Definition: raw_object.h:208
void StoreSmi(type const *addr, type value)
Definition: raw_object.h:723
static uword ToAddr(const ObjectPtr raw_obj)
Definition: raw_object.h:525
static bool IsCanonical(intptr_t value)
Definition: raw_object.h:529
void StorePointer(type const *addr, type value)
Definition: raw_object.h:595
uword tags() const
Definition: raw_object.h:298
bool IsImmutable() const
Definition: raw_object.h:354
void StoreCompressedArrayPointer(compressed_type const *addr, type value)
Definition: raw_object.h:662
COMPILE_ASSERT(kBitsPerByte *sizeof(ClassIdTagType) >=kClassIdTagSize)
void Validate(IsolateGroup *isolate_group) const
Definition: raw_object.cc:42
friend void ReportImpossibleNullError(intptr_t cid, StackFrame *caller_frame, Thread *thread)
type ExchangeCompressedPointer(compressed_type const *addr, type value)
Definition: raw_object.h:696
void StoreCompressedSmi(CompressedSmiPtr const *addr, SmiPtr value)
Definition: raw_object.h:730
COMPILE_ASSERT(kCardRememberedBit==0)
static constexpr intptr_t kIncrementalBarrierMask
Definition: raw_object.h:183
void StoreCompressedArrayPointer(compressed_type const *addr, type value, Thread *thread)
Definition: raw_object.h:672
void StoreArrayPointer(type const *addr, value_type value)
Definition: raw_object.h:645
friend void SetNewSpaceTaggingWord(ObjectPtr, classid_t, uint32_t)
COMPILE_ASSERT(kNotMarkedBit+kBarrierOverlapShift==kAlwaysSetBit)
static ObjectPtr FromAddr(uword addr)
Definition: raw_object.h:516
bool IsCardRemembered() const
Definition: raw_object.h:385
DART_FORCE_INLINE intptr_t VisitPointersNonvirtual(V *visitor)
Definition: raw_object.h:480
static bool IsMarked(uword tags)
Definition: raw_object.h:303
type LoadCompressedPointer(compressed_type const *addr) const
Definition: raw_object.h:583
static constexpr intptr_t kGenerationalBarrierMask
Definition: raw_object.h:181
static uword ToAddr(const UntaggedObject *raw_obj)
Definition: raw_object.h:522
intptr_t HeapSize(uword tags) const
Definition: raw_object.h:426
bool IsOldObject() const
Definition: raw_object.h:293
intptr_t HeapSize() const
Definition: raw_object.h:401
DART_FORCE_INLINE void EnsureInRememberedSet(Thread *thread)
Definition: raw_object.h:379
COMPILE_ASSERT(kClassIdTagMax==(1<< kClassIdTagSize) - 1)
bool IsMarked() const
Definition: raw_object.h:304
static DART_FORCE_INLINE uword to_offset(intptr_t length=0)
Definition: raw_object.h:872
friend uword TagsFromUntaggedObject(UntaggedObject *)
bool InVMIsolateHeap() const
Definition: raw_object.cc:20
void VisitPointersPrecise(ObjectPointerVisitor *visitor)
Definition: raw_object.cc:371
bool IsNewObject() const
Definition: raw_object.h:288
void SetIsEvacuationCandidateUnsynchronized()
Definition: raw_object.h:339
void StoreCompressedArrayPointer(compressed_type const *addr, type value, Thread *thread)
Definition: raw_object.h:684
static constexpr intptr_t kBarrierOverlapShift
Definition: raw_object.h:184
type LoadPointer(type const *addr) const
Definition: raw_object.h:576
void StoreCompressedPointer(compressed_type const *addr, type value, Thread *thread)
Definition: raw_object.h:624
uword heap_base() const
Definition: raw_object.h:590
static constexpr bool kContainsPointerFields
Definition: raw_object.h:550
void StoreArrayPointer(type const *addr, value_type value, Thread *thread)
Definition: raw_object.h:654
friend class AssemblyImageWriter
Definition: raw_object.h:831
bool IsEvacuationCandidate()
Definition: raw_object.h:332
COMPILE_ASSERT(kNewOrEvacuationCandidateBit+kBarrierOverlapShift==kOldAndNotRememberedBit)
bool TryAcquireRememberedBit()
Definition: raw_object.h:365
bool Contains(uword addr) const
Definition: raw_object.h:436
void ClearRememberedBitUnsynchronized()
Definition: raw_object.h:373
SmiPtr LoadSmi(SmiPtr const *addr) const
Definition: raw_object.h:708
void ClearIsEvacuationCandidateUnsynchronized()
Definition: raw_object.h:343
void SetCardRememberedBitUnsynchronized()
Definition: raw_object.h:386
SmiPtr LoadCompressedSmi(CompressedSmiPtr const *addr) const
Definition: raw_object.h:713
void ClearMarkBitUnsynchronized()
Definition: raw_object.h:321
uword tags_ignore_race() const
Definition: raw_object.h:299
friend class StoreBufferUpdateVisitor
Definition: raw_object.h:799
void SetIsEvacuationCandidate()
Definition: raw_object.h:335
intptr_t VisitPointers(ObjectPointerVisitor *visitor)
Definition: raw_object.h:447
bool IsRemembered() const
Definition: raw_object.h:361
void StoreCompressedPointer(compressed_type const *addr, type value)
Definition: raw_object.h:606
DART_WARN_UNUSED_RESULT bool TryAcquireMarkBit()
Definition: raw_object.h:327
intptr_t GetClassId() const
Definition: raw_object.h:392
static bool IsEvacuationCandidate(uword tags)
Definition: raw_object.h:329
static DART_FORCE_INLINE uword from_offset()
Definition: raw_object.h:860
void StorePointerUnaligned(type const *addr, type value, Thread *thread)
Definition: raw_object.h:634
bool IsCanonical() const
Definition: raw_object.h:350
void SetMarkBitUnsynchronized()
Definition: raw_object.h:309
static constexpr bool kContainsCompressedPointers
Definition: raw_object.h:548
void StorePointer(type const *addr, type value, Thread *thread)
Definition: raw_object.h:616
static intptr_t DecodeKind(uint32_t kind_and_metadata)
Definition: raw_object.h:2095
static intptr_t DecodeYieldIndex(uint32_t kind_and_metadata)
Definition: raw_object.h:2103
static uint32_t Encode(intptr_t kind, intptr_t try_index, intptr_t yield_index)
Definition: raw_object.h:2087
static intptr_t DecodeTryIndex(uint32_t kind_and_metadata)
Definition: raw_object.h:2099
static const char * KindToCString(Kind k)
Definition: raw_object.cc:771
static constexpr intptr_t kInvalidYieldIndex
Definition: raw_object.h:2081
static bool ParseKind(const char *cstr, Kind *out)
Definition: raw_object.cc:783
intptr_t frame_capacity() const
Definition: raw_object.h:3503
const uint8_t * payload() const
Definition: raw_object.h:3517
static intptr_t payload_offset()
Definition: raw_object.h:3511
COMPRESSED_SMI_FIELD(SmiPtr, length)
void RecomputeDataFieldForInternalTypedData()
Definition: raw_object.h:3164
uint8_t * DataFieldForInternalTypedData() const
Definition: raw_object.h:3168
uint8_t * internal_data()
Definition: raw_object.h:3123
static intptr_t payload_offset()
Definition: raw_object.h:3114
const uint8_t * internal_data() const
Definition: raw_object.h:3124
const uint8_t * data() const
Definition: raw_object.h:3130
bool streamable() const
Definition: raw_object.h:3766
uword tag() const
Definition: raw_object.h:3765
static constexpr int ShiftForPowerOfTwo(T x)
Definition: utils.h:81
static constexpr bool IsAligned(T x, uintptr_t alignment, uintptr_t offset=0)
Definition: utils.h:92
int64_t Dart_Port
Definition: dart_api.h:1525
#define DART_WARN_UNUSED_RESULT
Definition: dart_api.h:66
Dart_NativeFunction(* Dart_NativeEntryResolver)(Dart_Handle name, int num_of_arguments, bool *auto_setup_scope)
Definition: dart_api.h:3234
const uint8_t *(* Dart_NativeEntrySymbol)(Dart_NativeFunction nf)
Definition: dart_api.h:3255
void *(* Dart_FfiNativeResolver)(const char *name, uintptr_t args_n)
Definition: dart_api.h:3262
static const char * begin(const StringSlice &s)
Definition: editor.cpp:252
#define ASSERT(E)
SkBitmap source
Definition: examples.cpp:28
#define FATAL(error)
AtkStateType state
FlutterSemanticsFlag flags
glong glong end
FlKeyEvent uint64_t FlKeyResponderAsyncCallback callback
uint8_t value
GAsyncResult * result
uint32_t * target
Dart_NativeFunction function
Definition: fuchsia.cc:51
size_t length
Win32Message message
T __attribute__((ext_vector_type(N))) V
Optional< SkRect > bounds
Definition: SkRecords.h:189
Definition: bitmap.py:1
constexpr intptr_t kSmiBits
Definition: runtime_api.h:301
Definition: dart_vm.cc:33
const intptr_t kOffsetOfPtr
Definition: globals.h:136
static constexpr uintptr_t kHeapBaseMask
void CopyTypedDataBaseWithSafepointChecks(Thread *thread, const T &from, const T &to, intptr_t length)
void InitializeExternalTypedDataWithSafepointChecks(Thread *thread, intptr_t cid, const ExternalTypedData &from, const ExternalTypedData &to)
static constexpr intptr_t kOldObjectAlignmentOffset
static const char *const names[]
Definition: symbols.cc:24
InstantiationMode
Definition: raw_object.h:1467
static constexpr intptr_t kNewObjectAlignmentOffset
const char *const name
void InitializeTypedDataView(TypedDataViewPtr obj)
intptr_t RawSmiValue(const SmiPtr raw_value)
int32_t classid_t
Definition: globals.h:524
@ kNullCid
Definition: class_id.h:252
@ kNumPredefinedCids
Definition: class_id.h:257
constexpr intptr_t kBitsPerByte
Definition: globals.h:463
TypedDataElementType
Definition: raw_object.h:124
uintptr_t uword
Definition: globals.h:501
intptr_t word
Definition: globals.h:500
static void USE(T &&)
Definition: globals.h:618
const intptr_t cid
static constexpr intptr_t kCompressedWordSize
Definition: globals.h:42
static constexpr intptr_t kObjectAlignmentMask
raw_obj untag() -> num_entries()) VARIABLE_COMPRESSED_VISITOR(Array, Smi::Value(raw_obj->untag() ->length())) VARIABLE_COMPRESSED_VISITOR(TypedData, TypedData::ElementSizeInBytes(raw_obj->GetClassId()) *Smi::Value(raw_obj->untag() ->length())) VARIABLE_COMPRESSED_VISITOR(Record, RecordShape(raw_obj->untag() ->shape()).num_fields()) VARIABLE_NULL_VISITOR(CompressedStackMaps, CompressedStackMaps::PayloadSizeOf(raw_obj)) VARIABLE_NULL_VISITOR(OneByteString, Smi::Value(raw_obj->untag() ->length())) VARIABLE_NULL_VISITOR(TwoByteString, Smi::Value(raw_obj->untag() ->length())) intptr_t UntaggedField::VisitFieldPointers(FieldPtr raw_obj, ObjectPointerVisitor *visitor)
Definition: raw_object.cc:558
static void StoreUnaligned(T *ptr, T value)
Definition: unaligned.h:22
void MournFinalizerEntry(GCVisitorType *visitor, FinalizerEntryPtr current_entry)
Definition: gc_shared.h:162
int32_t ClassIdTagType
Definition: class_id.h:20
static constexpr intptr_t kObjectAlignment
void InitializeExternalTypedData(intptr_t cid, ExternalTypedDataPtr from, ExternalTypedDataPtr to)
static int8_t data[kExtLength]
NOT_IN_PRODUCT(LibraryPtr ReloadTestScript(const char *script))
@ kHeapObjectTag
@ kSmiTagMask
static constexpr intptr_t kObjectAlignmentLog2
static constexpr intptr_t kClassIdTagMax
Definition: class_id.h:22
ObjectPtr CompressedObjectPtr
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not defaults to or::depending on whether ipv6 is specified vm service A custom Dart VM Service port The default is to pick a randomly available open port disable vm Disable the Dart VM Service The Dart VM Service is never available in release mode disable vm service Disable mDNS Dart VM Service publication Bind to the IPv6 localhost address for the Dart VM Service Ignored if vm service host is set endless trace Enable an endless trace buffer The default is a ring buffer This is useful when very old events need to viewed For during application launch Memory usage will continue to grow indefinitely however Start app with an specific route defined on the framework flutter assets Path to the Flutter assets directory enable service port Allow the VM service to fallback to automatic port selection if binding to a specified port fails trace Trace early application lifecycle Automatically switches to an endless trace buffer trace skia Filters out all Skia trace event categories except those that are specified in this comma separated list dump skp on shader Automatically dump the skp that triggers new shader compilations This is useful for writing custom ShaderWarmUp to reduce jank By this is not enabled to reduce the overhead purge persistent cache
Definition: switches.h:191
DEF_SWITCHES_START aot vmservice shared library name
Definition: switches.h:32
struct PathData * Data(SkPath *path)
Definition: path_ops.cc:52
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive keep the shell running after the Dart script has completed enable serial On low power devices with low core running concurrent GC tasks on threads can cause them to contend with the UI thread which could potentially lead to jank This option turns off all concurrent GC activities domain network JSON encoded network policy per domain This overrides the DisallowInsecureConnections switch Embedder can specify whether to allow or disallow insecure connections at a domain level old gen heap size
Definition: switches.h:259
std::function< void()> closure
Definition: closure.h:14
dictionary dependencies
Definition: minify_sksl.py:17
SK_API sk_sp< PrecompileColorFilter > Table()
SkScalar h
#define DISALLOW_IMPLICIT_CONSTRUCTORS(TypeName)
Definition: globals.h:593
#define DISALLOW_ALLOCATION()
Definition: globals.h:604
fuchsia::ui::composition::ParentViewportWatcherHandle handle_
static DecodeResult decode(std::string path)
Definition: png_codec.cpp:124
#define T
Definition: precompiler.cc:65
#define DEFINE_CONTAINS_COMPRESSED(type)
Definition: raw_object.h:49
#define VISIT_NOTHING()
Definition: raw_object.h:105
#define FOR_EACH_RAW_FUNCTION_KIND(V)
Definition: raw_object.h:1240
#define COMPRESSED_POINTER_FIELD(type, name)
Definition: raw_object.h:903
#define DECLARE(return_type, type, name)
Definition: raw_object.h:1454
#define ENUM_DEF(name, init)
Definition: raw_object.h:2071
#define VISIT_FROM_PAYLOAD_START(elem_type)
Definition: raw_object.h:69
#define WSR_COMPRESSED_POINTER_FIELD(Type, Name)
Definition: raw_object.h:1045
#define DECLARE_BIT(Name)
Definition: raw_object.h:2457
#define KIND_CASE(Name)
#define COMPRESSED_SMI_FIELD(type, name)
Definition: raw_object.h:1015
#define COMPRESSED_ARRAY_POINTER_FIELD(type, name)
Definition: raw_object.h:931
#define KIND_DEFN(Name)
Definition: raw_object.h:1278
#define FOR_EACH_RAW_PC_DESCRIPTOR(V)
Definition: raw_object.h:2052
#define SMI_FIELD(type, name)
Definition: raw_object.h:998
#define RAW_HEAP_OBJECT_IMPLEMENTATION(object)
Definition: raw_object.h:143
#define VISIT_FROM(first)
Definition: raw_object.h:61
#define COMPRESSED_VARIABLE_POINTER_FIELDS(type, accessor_name, array_name)
Definition: raw_object.h:971
#define RAW_OBJECT_IMPLEMENTATION(object)
Definition: raw_object.h:134
#define VISIT_TO(last)
Definition: raw_object.h:81
#define POINTER_FIELD(type, name)
Definition: raw_object.h:889
#define DEFINE_FORWARD_DECLARATION(clazz)
Definition: raw_object.h:43
#define JIT_FUNCTION_COUNTERS(F)
Definition: raw_object.h:1443
#define DEFINE_ACCESSOR(type, name)
Definition: raw_object.h:2489
#define CONTEXT_SCOPE_VARIABLE_DESC_FLAG_LIST(V)
Definition: raw_object.h:2439
SeparatedVector2 offset
DART_FORCE_INLINE void set_flags_and_size(FlagsAndSizeHeader value)
Definition: raw_object.h:2184
DART_FORCE_INLINE FlagsAndSizeHeader flags_and_size() const
Definition: raw_object.h:2178
#define OPEN_ARRAY_START(type, align)
Definition: globals.h:154
#define ALIGN8
Definition: globals.h:171
#define NOT_IN_PRECOMPILED(code)
Definition: globals.h:100
#define OFFSET_OF(type, field)
Definition: globals.h:138
#define ONLY_IN_PRECOMPILED(code)
Definition: globals.h:101
#define OFFSET_OF_RETURNED_VALUE(type, accessor)
Definition: globals.h:143