Flutter Engine
The Flutter Engine
Loading...
Searching...
No Matches
raw_object.h
Go to the documentation of this file.
1// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#ifndef RUNTIME_VM_RAW_OBJECT_H_
6#define RUNTIME_VM_RAW_OBJECT_H_
7
8#if defined(SHOULD_NOT_INCLUDE_RUNTIME)
9#error "Should not include runtime"
10#endif
11
12#include "platform/assert.h"
13#include "vm/class_id.h"
16#include "vm/exceptions.h"
17#include "vm/globals.h"
18#include "vm/pointer_tagging.h"
19#include "vm/snapshot.h"
20#include "vm/tagged_pointer.h"
21#include "vm/thread.h"
22#include "vm/token.h"
23#include "vm/token_position.h"
24#include "vm/visitor.h"
25
26// Currently we have two different axes for offset generation:
27//
28// * Target architecture
29// * DART_PRECOMPILED_RUNTIME (i.e, AOT vs. JIT)
30//
31// That is, fields in UntaggedObject and its subclasses should only be included
32// or excluded conditionally based on these factors. Otherwise, the generated
33// offsets can be wrong (which should be caught by offset checking in dart.cc).
34//
35// TODO(dartbug.com/43646): Add DART_PRECOMPILER as another axis.
36
37namespace dart {
38
39// Forward declarations.
40class Isolate;
41class IsolateGroup;
42#define DEFINE_FORWARD_DECLARATION(clazz) class Untagged##clazz;
44#undef DEFINE_FORWARD_DECLARATION
45class CodeStatistics;
46class StackFrame;
47
48#define DEFINE_CONTAINS_COMPRESSED(type) \
49 static constexpr bool kContainsCompressedPointers = \
50 is_compressed_ptr<type>::value;
51
52#define CHECK_CONTAIN_COMPRESSED(type) \
53 static_assert( \
54 kContainsCompressedPointers || is_uncompressed_ptr<type>::value, \
55 "From declaration uses ObjectPtr"); \
56 static_assert( \
57 !kContainsCompressedPointers || is_compressed_ptr<type>::value, \
58 "From declaration uses CompressedObjectPtr");
59
60#define VISIT_FROM(first) \
61 DEFINE_CONTAINS_COMPRESSED(decltype(first##_)) \
62 static constexpr bool kContainsPointerFields = true; \
63 base_ptr_type<decltype(first##_)>::type* from() { \
64 return reinterpret_cast<base_ptr_type<decltype(first##_)>::type*>( \
65 &first##_); \
66 }
67
68#define VISIT_FROM_PAYLOAD_START(elem_type) \
69 static_assert(is_uncompressed_ptr<elem_type>::value || \
70 is_compressed_ptr<elem_type>::value, \
71 "Payload elements must be object pointers"); \
72 DEFINE_CONTAINS_COMPRESSED(elem_type) \
73 static constexpr bool kContainsPointerFields = true; \
74 base_ptr_type<elem_type>::type* from() { \
75 const uword payload_start = reinterpret_cast<uword>(this) + sizeof(*this); \
76 ASSERT(Utils::IsAligned(payload_start, sizeof(elem_type))); \
77 return reinterpret_cast<base_ptr_type<elem_type>::type*>(payload_start); \
78 }
79
80#define VISIT_TO(last) \
81 CHECK_CONTAIN_COMPRESSED(decltype(last##_)); \
82 static_assert(kContainsPointerFields, \
83 "Must have a corresponding VISIT_FROM"); \
84 base_ptr_type<decltype(last##_)>::type* to(intptr_t length = 0) { \
85 return reinterpret_cast<base_ptr_type<decltype(last##_)>::type*>( \
86 &last##_); \
87 }
88
89#define VISIT_TO_PAYLOAD_END(elem_type) \
90 static_assert(is_uncompressed_ptr<elem_type>::value || \
91 is_compressed_ptr<elem_type>::value, \
92 "Payload elements must be object pointers"); \
93 static_assert(kContainsPointerFields, \
94 "Must have a corresponding VISIT_FROM"); \
95 CHECK_CONTAIN_COMPRESSED(elem_type); \
96 base_ptr_type<elem_type>::type* to(intptr_t length) { \
97 const uword payload_start = reinterpret_cast<uword>(this) + sizeof(*this); \
98 ASSERT(Utils::IsAligned(payload_start, sizeof(elem_type))); \
99 const uword payload_last = \
100 payload_start + sizeof(elem_type) * (length - 1); \
101 return reinterpret_cast<base_ptr_type<elem_type>::type*>(payload_last); \
102 }
103
104#define VISIT_NOTHING() int NothingToVisit();
105
106#if defined(DART_COMPRESSED_POINTERS)
107#define ASSERT_UNCOMPRESSED(Type) \
108 static_assert(!Untagged##Type::kContainsCompressedPointers, \
109 "Should contain compressed pointers");
110
111#define ASSERT_COMPRESSED(Type) \
112 static_assert(Untagged##Type::kContainsCompressedPointers, \
113 "Should not contain compressed pointers");
114#else
115// Do no checks if there are no compressed pointers.
116#define ASSERT_UNCOMPRESSED(Type)
117#define ASSERT_COMPRESSED(Type)
118#endif
119
120#define ASSERT_NOTHING_TO_VISIT(Type) \
121 ASSERT(SIZE_OF_RETURNED_VALUE(Untagged##Type, NothingToVisit) == sizeof(int))
122
124#define V(name) k##name##Element,
126#undef V
127};
128
129#define VISITOR_SUPPORT(object) \
130 static intptr_t Visit##object##Pointers(object##Ptr raw_obj, \
131 ObjectPointerVisitor* visitor);
132
133#define RAW_OBJECT_IMPLEMENTATION(object) \
134 private: /* NOLINT */ \
135 VISITOR_SUPPORT(object) \
136 friend class object; \
137 friend class UntaggedObject; \
138 friend class OffsetsTable; \
139 DISALLOW_ALLOCATION(); \
140 DISALLOW_IMPLICIT_CONSTRUCTORS(Untagged##object)
141
142#define RAW_HEAP_OBJECT_IMPLEMENTATION(object) \
143 private: \
144 RAW_OBJECT_IMPLEMENTATION(object); \
145 friend class object##SerializationCluster; \
146 friend class object##DeserializationCluster; \
147 friend class object##MessageSerializationCluster; \
148 friend class object##MessageDeserializationCluster; \
149 friend class Serializer; \
150 friend class Deserializer; \
151 template <typename Base> \
152 friend class ObjectCopy; \
153 friend class Pass2Visitor;
154
155// UntaggedObject is the base class of all raw objects; even though it carries
156// the tags_ field not all raw objects are allocated in the heap and thus cannot
157// be dereferenced (e.g. UntaggedSmi).
159 public:
160 // The tags field which is a part of the object header uses the following
161 // bit fields for storing tags.
162 enum TagBits {
165 kNotMarkedBit = 2, // Incremental barrier target.
166 kNewBit = 3, // Generational barrier target.
167 kAlwaysSetBit = 4, // Incremental barrier source.
168 kOldAndNotRememberedBit = 5, // Generational barrier source.
171
178 };
179
180 static constexpr intptr_t kGenerationalBarrierMask = 1 << kNewBit;
181 static constexpr intptr_t kIncrementalBarrierMask = 1 << kNotMarkedBit;
182 static constexpr intptr_t kBarrierOverlapShift = 2;
185
186 // The bit in the Smi tag position must be something that can be set to 0
187 // for a dead filler object of either generation.
188 // See Object::MakeUnusedSpaceTraversable.
190
191 // Encodes the object size in the tag in units of object alignment.
192 class SizeTag {
193 public:
194 typedef intptr_t Type;
195
196 static constexpr intptr_t kMaxSizeTagInUnitsOfAlignment =
197 ((1 << UntaggedObject::kSizeTagSize) - 1);
198 static constexpr intptr_t kMaxSizeTag =
200
201 static constexpr uword encode(intptr_t size) {
202 return SizeBits::encode(SizeToTagValue(size));
203 }
204
205 static constexpr uword decode(uword tag) {
206 return TagValueToSize(SizeBits::decode(tag));
207 }
208
209 static constexpr uword update(intptr_t size, uword tag) {
210 return SizeBits::update(SizeToTagValue(size), tag);
211 }
212
213 static constexpr bool SizeFits(intptr_t size) {
214 assert(Utils::IsAligned(size, kObjectAlignment));
215 return (size <= kMaxSizeTag);
216 }
217
218 private:
219 // The actual unscaled bit field used within the tag field.
220 class SizeBits
221 : public BitField<uword, intptr_t, kSizeTagPos, kSizeTagSize> {};
222
223 static constexpr intptr_t SizeToTagValue(intptr_t size) {
224 assert(Utils::IsAligned(size, kObjectAlignment));
225 return !SizeFits(size) ? 0 : (size >> kObjectAlignmentLog2);
226 }
227 static constexpr intptr_t TagValueToSize(intptr_t value) {
228 return value << kObjectAlignmentLog2;
229 }
230 };
231
232 class ClassIdTag : public BitField<uword,
233 ClassIdTagType,
234 kClassIdTagPos,
235 kClassIdTagSize> {};
238
239#if defined(HASH_IN_OBJECT_HEADER)
240 class HashTag : public BitField<uword, uint32_t, kHashTagPos, kHashTagSize> {
241 };
242#endif
243
245 : public BitField<uword, bool, kCardRememberedBit, 1> {};
246
247 class NotMarkedBit : public BitField<uword, bool, kNotMarkedBit, 1> {};
248
249 class NewBit : public BitField<uword, bool, kNewBit, 1> {};
250
251 class CanonicalBit : public BitField<uword, bool, kCanonicalBit, 1> {};
252
253 class AlwaysSetBit : public BitField<uword, bool, kAlwaysSetBit, 1> {};
254
256 : public BitField<uword, bool, kOldAndNotRememberedBit, 1> {};
257
258 // Will be set to 1 for the following instances:
259 //
260 // 1. Deeply immutable instances.
261 // `Class::is_deeply_immutable`.
262 // a. Statically guaranteed deeply immutable instances.
263 // `@pragma('vm:deeply-immutable')`.
264 // b. VM recognized deeply immutable instances.
265 // `IsDeeplyImmutableCid(intptr_t predefined_cid)`.
266 // 2. Shallowly unmodifiable instances.
267 // `IsShallowlyImmutableCid(intptr_t predefined_cid)`
268 // a. Unmodifiable typed data view (backing store may be mutable).
269 // b. Closures (the context may be modifiable).
270 //
271 // The bit is used in `CanShareObject` in object_graph_copy, where special
272 // care is taken to look at the shallow immutable instances. Shallow immutable
273 // instances always need special care in the VM because the VM needs to know
274 // what their fields are.
275 //
276 // The bit is also used to make typed data stores efficient. 2.a.
277 //
278 // See also Class::kIsDeeplyImmutableBit.
279 class ImmutableBit : public BitField<uword, bool, kImmutableBit, 1> {};
280
281 class ReservedBit : public BitField<uword, intptr_t, kReservedBit, 1> {};
282
283 // Assumes this is a heap object.
284 bool IsNewObject() const {
285 uword addr = reinterpret_cast<uword>(this);
287 }
288 // Assumes this is a heap object.
289 bool IsOldObject() const {
290 uword addr = reinterpret_cast<uword>(this);
292 }
293
294 uword tags() const { return tags_; }
295
296 // Support for GC marking bit. Marked objects are either grey (not yet
297 // visited) or black (already visited).
298 static bool IsMarked(uword tags) { return !NotMarkedBit::decode(tags); }
299 bool IsMarked() const { return !tags_.Read<NotMarkedBit>(); }
300 bool IsMarkedIgnoreRace() const {
301 return !tags_.ReadIgnoreRace<NotMarkedBit>();
302 }
303 void SetMarkBit() {
304 ASSERT(!IsMarked());
305 tags_.UpdateBool<NotMarkedBit>(false);
306 }
312 ASSERT(!IsMarked());
313 tags_.UpdateBool<NotMarkedBit, std::memory_order_release>(false);
314 }
316 ASSERT(IsMarked());
317 tags_.UpdateBool<NotMarkedBit>(true);
318 }
323 // Returns false if the bit was already set.
325 bool TryAcquireMarkBit() { return tags_.TryClear<NotMarkedBit>(); }
326
327 // Canonical objects have the property that two canonical objects are
328 // logically equal iff they are the same object (pointer equal).
329 bool IsCanonical() const { return tags_.Read<CanonicalBit>(); }
330 void SetCanonical() { tags_.UpdateBool<CanonicalBit>(true); }
331 void ClearCanonical() { tags_.UpdateBool<CanonicalBit>(false); }
332
333 bool IsImmutable() const { return tags_.Read<ImmutableBit>(); }
334 void SetImmutable() { tags_.UpdateBool<ImmutableBit>(true); }
335 void ClearImmutable() { tags_.UpdateBool<ImmutableBit>(false); }
336
337 bool InVMIsolateHeap() const;
338
339 // Support for GC remembered bit.
340 bool IsRemembered() const {
342 return !tags_.Read<OldAndNotRememberedBit>();
343 }
356
357 DART_FORCE_INLINE
360 thread->StoreBufferAddObject(ObjectPtr(this));
361 }
362 }
363
364 bool IsCardRemembered() const { return tags_.Read<CardRememberedBit>(); }
370
371 intptr_t GetClassId() const { return tags_.Read<ClassIdTag>(); }
372
373#if defined(HASH_IN_OBJECT_HEADER)
374 uint32_t GetHeaderHash() const { return tags_.Read<HashTag>(); }
375 uint32_t SetHeaderHashIfNotSet(uint32_t h) {
376 return tags_.UpdateConditional<HashTag>(h, /*conditional_old_value=*/0);
377 }
378#endif
379
380 intptr_t HeapSize() const {
381 uword tags = tags_;
382 intptr_t result = SizeTag::decode(tags);
383 if (result != 0) {
384#if defined(DEBUG)
385 // TODO(22501) Array::MakeFixedLength has a race with this code: we might
386 // have loaded tags field and then MakeFixedLength could have updated it
387 // leading to inconsistency between HeapSizeFromClass() and
388 // SizeTag::decode(tags). We are working around it by reloading tags_ and
389 // recomputing size from tags.
390 const intptr_t size_from_class = HeapSizeFromClass(tags);
391 if ((result > size_from_class) && (GetClassId() == kArrayCid) &&
392 (tags_ != tags)) {
393 result = SizeTag::decode(tags_);
394 }
395 ASSERT(result == size_from_class);
396#endif
397 return result;
398 }
399 result = HeapSizeFromClass(tags);
401 return result;
402 }
403
404 // This variant must not deference this->tags_.
405 intptr_t HeapSize(uword tags) const {
406 intptr_t result = SizeTag::decode(tags);
407 if (result != 0) {
408 return result;
409 }
410 result = HeapSizeFromClass(tags);
412 return result;
413 }
414
415 bool Contains(uword addr) const {
416 intptr_t this_size = HeapSize();
417 uword this_addr = UntaggedObject::ToAddr(this);
418 return (addr >= this_addr) && (addr < (this_addr + this_size));
419 }
420
421 void Validate(IsolateGroup* isolate_group) const;
422
423 // This function may access the class-ID in the header, but it cannot access
424 // the actual class object, because the sliding compactor uses this function
425 // while the class objects are being moved.
427 // Fall back to virtual variant for predefined classes
428 intptr_t class_id = GetClassId();
429 if (class_id < kNumPredefinedCids) {
430 return VisitPointersPredefined(visitor, class_id);
431 }
432
433 // Calculate the first and last raw object pointer fields.
434 intptr_t instance_size = HeapSize();
435 uword obj_addr = ToAddr(this);
436 uword from = obj_addr + sizeof(UntaggedObject);
437 uword to = obj_addr + instance_size - kCompressedWordSize;
438 const auto first = reinterpret_cast<CompressedObjectPtr*>(from);
439 const auto last = reinterpret_cast<CompressedObjectPtr*>(to);
440
441 const auto unboxed_fields_bitmap =
442 visitor->class_table()->GetUnboxedFieldsMapAt(class_id);
443
444 if (!unboxed_fields_bitmap.IsEmpty()) {
445 intptr_t bit = sizeof(UntaggedObject) / kCompressedWordSize;
446 for (CompressedObjectPtr* current = first; current <= last; current++) {
447 if (!unboxed_fields_bitmap.Get(bit++)) {
448 visitor->VisitCompressedPointers(heap_base(), current, current);
449 }
450 }
451 } else {
452 visitor->VisitCompressedPointers(heap_base(), first, last);
453 }
454
455 return instance_size;
456 }
457
458 template <class V>
459 DART_FORCE_INLINE intptr_t VisitPointersNonvirtual(V* visitor) {
460 // Fall back to virtual variant for predefined classes
461 intptr_t class_id = GetClassId();
462 if (class_id < kNumPredefinedCids) {
463 return VisitPointersPredefined(visitor, class_id);
464 }
465
466 // Calculate the first and last raw object pointer fields.
467 intptr_t instance_size = HeapSize();
468 uword obj_addr = ToAddr(this);
469 uword from = obj_addr + sizeof(UntaggedObject);
470 uword to = obj_addr + instance_size - kCompressedWordSize;
471 const auto first = reinterpret_cast<CompressedObjectPtr*>(from);
472 const auto last = reinterpret_cast<CompressedObjectPtr*>(to);
473
474 const auto unboxed_fields_bitmap =
475 visitor->class_table()->GetUnboxedFieldsMapAt(class_id);
476
477 if (!unboxed_fields_bitmap.IsEmpty()) {
478 intptr_t bit = sizeof(UntaggedObject) / kCompressedWordSize;
479 for (CompressedObjectPtr* current = first; current <= last; current++) {
480 if (!unboxed_fields_bitmap.Get(bit++)) {
481 visitor->V::VisitCompressedPointers(heap_base(), current, current);
482 }
483 }
484 } else {
485 visitor->V::VisitCompressedPointers(heap_base(), first, last);
486 }
487
488 return instance_size;
489 }
490
491 // This variant ensures that we do not visit the extra slot created from
492 // rounding up instance sizes up to the allocation unit.
494
495 static ObjectPtr FromAddr(uword addr) {
496 // We expect the untagged address here.
497 ASSERT((addr & kSmiTagMask) != kHeapObjectTag);
498 return static_cast<ObjectPtr>(addr + kHeapObjectTag);
499 }
500
501 static uword ToAddr(const UntaggedObject* raw_obj) {
502 return reinterpret_cast<uword>(raw_obj);
503 }
504 static uword ToAddr(const ObjectPtr raw_obj) {
505 return static_cast<uword>(raw_obj) - kHeapObjectTag;
506 }
507
508 static bool IsCanonical(intptr_t value) {
510 }
511
512 private:
513 AtomicBitFieldContainer<uword> tags_; // Various object tags (bits).
514
515 intptr_t VisitPointersPredefined(ObjectPointerVisitor* visitor,
516 intptr_t class_id);
517
518 intptr_t HeapSizeFromClass(uword tags) const;
519
520 void SetClassId(intptr_t new_cid) { tags_.Update<ClassIdTag>(new_cid); }
521 void SetClassIdUnsynchronized(intptr_t new_cid) {
522 tags_.UpdateUnsynchronized<ClassIdTag>(new_cid);
523 }
524
525 protected:
526 // Automatically inherited by subclasses unless overridden.
527 static constexpr bool kContainsCompressedPointers = false;
528 // Automatically inherited by subclasses unless overridden.
529 static constexpr bool kContainsPointerFields = false;
530
531 // The first offset in an allocated object of the given type that contains a
532 // (possibly compressed) object pointer. Used to initialize object pointer
533 // fields to Object::null() instead of 0.
534 //
535 // Always returns an offset after the object header tags.
536 template <typename T>
537 DART_FORCE_INLINE static uword from_offset();
538
539 // The last offset in an allocated object of the given untagged type that
540 // contains a (possibly compressed) object pointer. Used to initialize object
541 // pointer fields to Object::null() instead of 0.
542 //
543 // Takes an optional argument that is the number of elements in the payload,
544 // which is ignored if the object never contains a payload.
545 //
546 // If there are no pointer fields in the object, then
547 // to_offset<T>() < from_offset<T>().
548 template <typename T>
549 DART_FORCE_INLINE static uword to_offset(intptr_t length = 0);
550
551 // All writes to heap objects should ultimately pass through one of the
552 // methods below or their counterparts in Object, to ensure that the
553 // write barrier is correctly applied.
554 template <typename type, std::memory_order order = std::memory_order_relaxed>
555 type LoadPointer(type const* addr) const {
556 return reinterpret_cast<std::atomic<type>*>(const_cast<type*>(addr))
557 ->load(order);
558 }
559 template <typename type,
560 typename compressed_type,
561 std::memory_order order = std::memory_order_relaxed>
562 type LoadCompressedPointer(compressed_type const* addr) const {
563 compressed_type v = reinterpret_cast<std::atomic<compressed_type>*>(
564 const_cast<compressed_type*>(addr))
565 ->load(order);
566 return static_cast<type>(v.Decompress(heap_base()));
567 }
568
569 uword heap_base() const {
570 return reinterpret_cast<uword>(this) & kHeapBaseMask;
571 }
572
573 template <typename type, std::memory_order order = std::memory_order_relaxed>
574 void StorePointer(type const* addr, type value) {
575 reinterpret_cast<std::atomic<type>*>(const_cast<type*>(addr))
576 ->store(value, order);
577 if (value.IsHeapObject()) {
578 CheckHeapPointerStore(value, Thread::Current());
579 }
580 }
581
582 template <typename type,
583 typename compressed_type,
584 std::memory_order order = std::memory_order_relaxed>
585 void StoreCompressedPointer(compressed_type const* addr, type value) {
586 reinterpret_cast<std::atomic<compressed_type>*>(
587 const_cast<compressed_type*>(addr))
588 ->store(static_cast<compressed_type>(value), order);
589 if (value.IsHeapObject()) {
590 CheckHeapPointerStore(value, Thread::Current());
591 }
592 }
593
594 template <typename type>
595 void StorePointer(type const* addr, type value, Thread* thread) {
596 *const_cast<type*>(addr) = value;
597 if (value.IsHeapObject()) {
598 CheckHeapPointerStore(value, thread);
599 }
600 }
601
602 template <typename type, typename compressed_type>
603 void StoreCompressedPointer(compressed_type const* addr,
604 type value,
605 Thread* thread) {
606 *const_cast<compressed_type*>(addr) = value;
607 if (value.IsHeapObject()) {
608 CheckHeapPointerStore(value, thread);
609 }
610 }
611
612 template <typename type>
613 void StorePointerUnaligned(type const* addr, type value, Thread* thread) {
614 StoreUnaligned(const_cast<type*>(addr), value);
615 if (value->IsHeapObject()) {
616 CheckHeapPointerStore(value, thread);
617 }
618 }
619
620 // Note: StoreArrayPointer won't work if value_type is a compressed pointer.
621 template <typename type,
622 std::memory_order order = std::memory_order_relaxed,
623 typename value_type = type>
624 void StoreArrayPointer(type const* addr, value_type value) {
625 reinterpret_cast<std::atomic<type>*>(const_cast<type*>(addr))
626 ->store(type(value), order);
627 if (value->IsHeapObject()) {
628 CheckArrayPointerStore(addr, value, Thread::Current());
629 }
630 }
631
632 template <typename type, typename value_type = type>
633 void StoreArrayPointer(type const* addr, value_type value, Thread* thread) {
634 *const_cast<type*>(addr) = value;
635 if (value->IsHeapObject()) {
636 CheckArrayPointerStore(addr, value, thread);
637 }
638 }
639
640 template <typename type, typename compressed_type, std::memory_order order>
641 void StoreCompressedArrayPointer(compressed_type const* addr, type value) {
642 reinterpret_cast<std::atomic<compressed_type>*>(
643 const_cast<compressed_type*>(addr))
644 ->store(static_cast<compressed_type>(value), order);
645 if (value->IsHeapObject()) {
646 CheckArrayPointerStore(addr, value, Thread::Current());
647 }
648 }
649
650 template <typename type, typename compressed_type, std::memory_order order>
651 void StoreCompressedArrayPointer(compressed_type const* addr,
652 type value,
653 Thread* thread) {
654 reinterpret_cast<std::atomic<compressed_type>*>(
655 const_cast<compressed_type*>(addr))
656 ->store(static_cast<compressed_type>(value), order);
657 if (value->IsHeapObject()) {
658 CheckArrayPointerStore(addr, value, thread);
659 }
660 }
661
662 template <typename type, typename compressed_type>
663 void StoreCompressedArrayPointer(compressed_type const* addr,
664 type value,
665 Thread* thread) {
666 *const_cast<compressed_type*>(addr) = value;
667 if (value->IsHeapObject()) {
668 CheckArrayPointerStore(addr, value, thread);
669 }
670 }
671
672 template <typename type,
673 typename compressed_type,
674 std::memory_order order = std::memory_order_relaxed>
675 type ExchangeCompressedPointer(compressed_type const* addr, type value) {
676 compressed_type previous_value =
677 reinterpret_cast<std::atomic<compressed_type>*>(
678 const_cast<compressed_type*>(addr))
679 ->exchange(static_cast<compressed_type>(value), order);
680 if (value.IsHeapObject()) {
681 CheckHeapPointerStore(value, Thread::Current());
682 }
683 return static_cast<type>(previous_value.Decompress(heap_base()));
684 }
685
686 template <std::memory_order order = std::memory_order_relaxed>
687 SmiPtr LoadSmi(SmiPtr const* addr) const {
688 return reinterpret_cast<std::atomic<SmiPtr>*>(const_cast<SmiPtr*>(addr))
689 ->load(order);
690 }
691 template <std::memory_order order = std::memory_order_relaxed>
692 SmiPtr LoadCompressedSmi(CompressedSmiPtr const* addr) const {
693 return static_cast<SmiPtr>(reinterpret_cast<std::atomic<CompressedSmiPtr>*>(
694 const_cast<CompressedSmiPtr*>(addr))
695 ->load(order)
696 .DecompressSmi());
697 }
698
699 // Use for storing into an explicitly Smi-typed field of an object
700 // (i.e., both the previous and new value are Smis).
701 template <typename type, std::memory_order order = std::memory_order_relaxed>
702 void StoreSmi(type const* addr, type value) {
703 // Can't use Contains, as array length is initialized through this method.
704 ASSERT(reinterpret_cast<uword>(addr) >= UntaggedObject::ToAddr(this));
705 reinterpret_cast<std::atomic<type>*>(const_cast<type*>(addr))
706 ->store(value, order);
707 }
708 template <std::memory_order order = std::memory_order_relaxed>
709 void StoreCompressedSmi(CompressedSmiPtr const* addr, SmiPtr value) {
710 // Can't use Contains, as array length is initialized through this method.
711 ASSERT(reinterpret_cast<uword>(addr) >= UntaggedObject::ToAddr(this));
712 reinterpret_cast<std::atomic<CompressedSmiPtr>*>(
713 const_cast<CompressedSmiPtr*>(addr))
714 ->store(static_cast<CompressedSmiPtr>(value), order);
715 }
716
717 private:
718 DART_FORCE_INLINE
719 void CheckHeapPointerStore(ObjectPtr value, Thread* thread) {
720 uword source_tags = this->tags_;
721 uword target_tags = value->untag()->tags_;
722 uword overlap = (source_tags >> kBarrierOverlapShift) & target_tags &
723 thread->write_barrier_mask();
724 if (overlap != 0) {
725 if ((overlap & kGenerationalBarrierMask) != 0) {
726 // Generational barrier: record when a store creates an
727 // old-and-not-remembered -> new reference.
728 EnsureInRememberedSet(thread);
729 }
730 if ((overlap & kIncrementalBarrierMask) != 0) {
731 // Incremental barrier: record when a store creates an
732 // any -> not-marked reference.
733 if (ClassIdTag::decode(target_tags) == kInstructionsCid) {
734 // Instruction pages may be non-writable. Defer marking.
735 thread->DeferredMarkingStackAddObject(value);
736 return;
737 }
738 if (value->untag()->TryAcquireMarkBit()) {
739 thread->MarkingStackAddObject(value);
740 }
741 }
742 }
743 }
744
745 template <typename type, typename value_type>
746 DART_FORCE_INLINE void CheckArrayPointerStore(type const* addr,
747 value_type value,
748 Thread* thread) {
749 uword source_tags = this->tags_;
750 uword target_tags = value->untag()->tags_;
751 uword overlap = (source_tags >> kBarrierOverlapShift) & target_tags &
752 thread->write_barrier_mask();
753 if (overlap != 0) {
754 if ((overlap & kGenerationalBarrierMask) != 0) {
755 // Generational barrier: record when a store creates an
756 // old-and-not-remembered -> new reference.
757 if (this->IsCardRemembered()) {
758 RememberCard(addr);
759 } else if (this->TryAcquireRememberedBit()) {
760 thread->StoreBufferAddObject(static_cast<ObjectPtr>(this));
761 }
762 }
763 if ((overlap & kIncrementalBarrierMask) != 0) {
764 // Incremental barrier: record when a store creates an
765 // old -> old-and-not-marked reference.
766 if (ClassIdTag::decode(target_tags) == kInstructionsCid) {
767 // Instruction pages may be non-writable. Defer marking.
768 thread->DeferredMarkingStackAddObject(value);
769 return;
770 }
771 if (value->untag()->TryAcquireMarkBit()) {
772 thread->MarkingStackAddObject(value);
773 }
774 }
775 }
776 }
777
778 friend class StoreBufferUpdateVisitor; // RememberCard
779 void RememberCard(ObjectPtr const* slot);
780#if defined(DART_COMPRESSED_POINTERS)
781 void RememberCard(CompressedObjectPtr const* slot);
782#endif
783
784 friend class Array;
785 friend class ByteBuffer;
786 friend class CidRewriteVisitor;
787 friend class Closure;
788 friend class Code;
789 friend class Pointer;
790 friend class Double;
791 friend class DynamicLibrary;
792 friend class ForwardPointersVisitor; // StorePointer
793 friend class FreeListElement;
794 friend class Function;
795 friend class GCMarker;
796 friend class GCSweeper;
797 friend class ExternalTypedData;
798 friend class GrowableObjectArray; // StorePointer
799 template <bool>
800 friend class MarkingVisitorBase;
801 friend class Mint;
802 friend class Object;
803 friend class OneByteString; // StoreSmi
804 friend class UntaggedInstance;
805 friend class Scavenger;
806 template <bool>
808 friend class ImageReader; // tags_ check
809 friend class ImageWriter;
811 friend class BlobImageWriter;
812 friend class Deserializer;
813 friend class String;
814 friend class WeakProperty; // StorePointer
815 friend class Instance; // StorePointer
816 friend class StackFrame; // GetCodeObject assertion.
817 friend class CodeLookupTableBuilder; // profiler
818 friend class ObjectLocator;
819 friend class WriteBarrierUpdateVisitor; // CheckHeapPointerStore
820 friend class OffsetsTable;
821 friend class Object;
823 friend void SetNewSpaceTaggingWord(ObjectPtr, classid_t, uint32_t); // tags_
824 friend class ObjectCopyBase; // LoadPointer/StorePointer
825 friend void ReportImpossibleNullError(intptr_t cid,
826 StackFrame* caller_frame,
827 Thread* thread);
828
831};
832
833// Note that the below templates for from_offset and to_offset for objects
834// with pointer fields assume that the range from from() and to() cover all
835// pointer fields. If this is not the case (e.g., the next_seen_by_gc_ field
836// in WeakArray/WeakProperty/WeakReference), then specialize the definitions.
837
838template <typename T>
840 if constexpr (T::kContainsPointerFields) {
841 return reinterpret_cast<uword>(reinterpret_cast<T*>(kOffsetOfPtr)->from()) -
843 } else {
844 // Non-zero to ensure to_offset() < from_offset() in this case, as
845 // to_offset() is the offset to the last pointer field, not past it.
846 return sizeof(UntaggedObject);
847 }
848}
849
850template <typename T>
851DART_FORCE_INLINE uword UntaggedObject::to_offset(intptr_t length) {
852 if constexpr (T::kContainsPointerFields) {
853 return reinterpret_cast<uword>(
854 reinterpret_cast<T*>(kOffsetOfPtr)->to(length)) -
856 } else {
857 USE(length);
858 // Zero to ensure to_offset() < from_offset() in this case, as
859 // from_offset() is guaranteed to return an offset after the header tags.
860 return 0;
861 }
862}
863
864inline intptr_t ObjectPtr::GetClassId() const {
865 return untag()->GetClassId();
866}
867
868#define POINTER_FIELD(type, name) \
869 public: \
870 template <std::memory_order order = std::memory_order_relaxed> \
871 type name() const { \
872 return LoadPointer<type, order>(&name##_); \
873 } \
874 template <std::memory_order order = std::memory_order_relaxed> \
875 void set_##name(type value) { \
876 StorePointer<type, order>(&name##_, value); \
877 } \
878 \
879 protected: \
880 type name##_;
881
882#define COMPRESSED_POINTER_FIELD(type, name) \
883 public: \
884 template <std::memory_order order = std::memory_order_relaxed> \
885 type name() const { \
886 return LoadCompressedPointer<type, Compressed##type, order>(&name##_); \
887 } \
888 template <std::memory_order order = std::memory_order_relaxed> \
889 void set_##name(type value) { \
890 StoreCompressedPointer<type, Compressed##type, order>(&name##_, value); \
891 } \
892 \
893 protected: \
894 Compressed##type name##_;
895
896#define ARRAY_POINTER_FIELD(type, name) \
897 public: \
898 template <std::memory_order order = std::memory_order_relaxed> \
899 type name() const { \
900 return LoadPointer<type, order>(&name##_); \
901 } \
902 template <std::memory_order order = std::memory_order_relaxed> \
903 void set_##name(type value) { \
904 StoreArrayPointer<type, order>(&name##_, value); \
905 } \
906 \
907 protected: \
908 type name##_;
909
910#define COMPRESSED_ARRAY_POINTER_FIELD(type, name) \
911 public: \
912 template <std::memory_order order = std::memory_order_relaxed> \
913 type name() const { \
914 return LoadPointer<Compressed##type, order>(&name##_).Decompress( \
915 heap_base()); \
916 } \
917 template <std::memory_order order = std::memory_order_relaxed> \
918 void set_##name(type value) { \
919 StoreCompressedArrayPointer<type, Compressed##type, order>(&name##_, \
920 value); \
921 } \
922 \
923 protected: \
924 Compressed##type name##_;
925
926#define VARIABLE_POINTER_FIELDS(type, accessor_name, array_name) \
927 public: \
928 template <std::memory_order order = std::memory_order_relaxed> \
929 type accessor_name(intptr_t index) const { \
930 return LoadPointer<type, order>(&array_name()[index]); \
931 } \
932 template <std::memory_order order = std::memory_order_relaxed> \
933 void set_##accessor_name(intptr_t index, type value) { \
934 StoreArrayPointer<type, order>(&array_name()[index], value); \
935 } \
936 template <std::memory_order order = std::memory_order_relaxed> \
937 void set_##accessor_name(intptr_t index, type value, Thread* thread) { \
938 StoreArrayPointer<type, order>(&array_name()[index], value, thread); \
939 } \
940 \
941 protected: \
942 type* array_name() { \
943 OPEN_ARRAY_START(type, type); \
944 } \
945 type const* array_name() const { \
946 OPEN_ARRAY_START(type, type); \
947 } \
948 VISIT_TO_PAYLOAD_END(type)
949
950#define COMPRESSED_VARIABLE_POINTER_FIELDS(type, accessor_name, array_name) \
951 public: \
952 template <std::memory_order order = std::memory_order_relaxed> \
953 type accessor_name(intptr_t index) const { \
954 return LoadCompressedPointer<type, Compressed##type, order>( \
955 &array_name()[index]); \
956 } \
957 template <std::memory_order order = std::memory_order_relaxed> \
958 void set_##accessor_name(intptr_t index, type value) { \
959 StoreCompressedArrayPointer<type, Compressed##type, order>( \
960 &array_name()[index], value); \
961 } \
962 template <std::memory_order order = std::memory_order_relaxed> \
963 void set_##accessor_name(intptr_t index, type value, Thread* thread) { \
964 StoreCompressedArrayPointer<type, Compressed##type, order>( \
965 &array_name()[index], value, thread); \
966 } \
967 \
968 protected: \
969 Compressed##type* array_name() { \
970 OPEN_ARRAY_START(Compressed##type, Compressed##type); \
971 } \
972 Compressed##type const* array_name() const { \
973 OPEN_ARRAY_START(Compressed##type, Compressed##type); \
974 } \
975 VISIT_TO_PAYLOAD_END(Compressed##type)
976
977#define SMI_FIELD(type, name) \
978 public: \
979 template <std::memory_order order = std::memory_order_relaxed> \
980 type name() const { \
981 type result = LoadSmi<order>(&name##_); \
982 ASSERT(!result.IsHeapObject()); \
983 return result; \
984 } \
985 template <std::memory_order order = std::memory_order_relaxed> \
986 void set_##name(type value) { \
987 ASSERT(!value.IsHeapObject()); \
988 StoreSmi<type, order>(&name##_, value); \
989 } \
990 \
991 protected: \
992 type name##_;
993
994#define COMPRESSED_SMI_FIELD(type, name) \
995 public: \
996 template <std::memory_order order = std::memory_order_relaxed> \
997 type name() const { \
998 type result = LoadCompressedSmi<order>(&name##_); \
999 ASSERT(!result.IsHeapObject()); \
1000 return result; \
1001 } \
1002 template <std::memory_order order = std::memory_order_relaxed> \
1003 void set_##name(type value) { \
1004 ASSERT(!value.IsHeapObject()); \
1005 StoreCompressedSmi(&name##_, value); \
1006 } \
1007 \
1008 protected: \
1009 Compressed##type name##_;
1010
1011// Used to define untagged object fields that can have values wrapped in
1012// WeakSerializationReferences. Since WeakSerializationReferences are only used
1013// during precompilation, these fields have type CompressedObjectPtr in the
1014// precompiler and the normally expected type otherwise.
1015//
1016// Fields that are defined with WSR_COMPRESSED_POINTER_FIELD should have
1017// getters and setters that are declared in object.h with
1018// PRECOMPILER_WSR_FIELD_DECLARATION and defined in object.cc with
1019// PRECOMPILER_WSR_FIELD_DEFINITION.
1020#if defined(DART_PRECOMPILER)
1021#define WSR_COMPRESSED_POINTER_FIELD(Type, Name) \
1022 COMPRESSED_POINTER_FIELD(ObjectPtr, Name)
1023#else
1024#define WSR_COMPRESSED_POINTER_FIELD(Type, Name) \
1025 COMPRESSED_POINTER_FIELD(Type, Name)
1026#endif
1027
1029 public:
1031 kAllocated = 0, // Initial state.
1032 kPreFinalized, // VM classes: size precomputed, but no checks done.
1033 kFinalized, // Class parsed, code compiled, not ready for allocation.
1034 kAllocateFinalized, // CHA invalidated, class is ready for allocation.
1035 };
1037 // Class object is created, but it is not filled up.
1038 // At this state class can only be used as a forward reference during
1039 // class loading.
1041 // Class declaration information such as type parameters, supertype and
1042 // implemented interfaces are loaded. However, types in the class are
1043 // not finalized yet.
1045 // Types in the class are finalized. At this point, members can be loaded
1046 // and class can be finalized.
1048 };
1049
1050 classid_t id() const { return id_; }
1051
1052 private:
1054
1055 COMPRESSED_POINTER_FIELD(StringPtr, name)
1057 NOT_IN_PRODUCT(COMPRESSED_POINTER_FIELD(StringPtr, user_name))
1058 COMPRESSED_POINTER_FIELD(ArrayPtr, functions)
1059 COMPRESSED_POINTER_FIELD(ArrayPtr, functions_hash_table)
1060 COMPRESSED_POINTER_FIELD(ArrayPtr, fields)
1061 COMPRESSED_POINTER_FIELD(ArrayPtr, offset_in_words_to_field)
1062 COMPRESSED_POINTER_FIELD(ArrayPtr, interfaces) // Array of AbstractType.
1063 COMPRESSED_POINTER_FIELD(ScriptPtr, script)
1064 COMPRESSED_POINTER_FIELD(LibraryPtr, library)
1065 COMPRESSED_POINTER_FIELD(TypeParametersPtr, type_parameters)
1066 COMPRESSED_POINTER_FIELD(TypePtr, super_type)
1067 // Canonicalized const instances of this class.
1068 COMPRESSED_POINTER_FIELD(ArrayPtr, constants)
1069 // Declaration type for this class.
1070 COMPRESSED_POINTER_FIELD(TypePtr, declaration_type)
1071 // Cache for dispatcher functions.
1072 COMPRESSED_POINTER_FIELD(ArrayPtr, invocation_dispatcher_cache)
1073
1074#if !defined(PRODUCT) || !defined(DART_PRECOMPILED_RUNTIME)
1075 // Array of Class.
1076 COMPRESSED_POINTER_FIELD(GrowableObjectArrayPtr, direct_implementors)
1077 // Array of Class.
1078 COMPRESSED_POINTER_FIELD(GrowableObjectArrayPtr, direct_subclasses)
1079#endif // !defined(PRODUCT) || !defined(DART_PRECOMPILED_RUNTIME)
1080
1081 // Cached declaration instance type arguments for this class.
1082 // Not preserved in AOT snapshots.
1083 COMPRESSED_POINTER_FIELD(TypeArgumentsPtr,
1084 declaration_instance_type_arguments)
1085#if !defined(DART_PRECOMPILED_RUNTIME)
1086 // Stub code for allocation of instances.
1087 COMPRESSED_POINTER_FIELD(CodePtr, allocation_stub)
1088 // CHA optimized codes.
1089 COMPRESSED_POINTER_FIELD(WeakArrayPtr, dependent_code)
1090#endif // !defined(DART_PRECOMPILED_RUNTIME)
1091
1092#if defined(DART_PRECOMPILED_RUNTIME)
1093 VISIT_TO(declaration_instance_type_arguments)
1094#else
1095 VISIT_TO(dependent_code)
1096#endif // defined(DART_PRECOMPILED_RUNTIME)
1097
1098 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) {
1099 switch (kind) {
1100 case Snapshot::kFullAOT:
1101#if defined(PRODUCT)
1102 return reinterpret_cast<CompressedObjectPtr*>(
1103 &invocation_dispatcher_cache_);
1104#else
1105 return reinterpret_cast<CompressedObjectPtr*>(&direct_subclasses_);
1106#endif // defined(PRODUCT)
1107 case Snapshot::kFull:
1109#if !defined(DART_PRECOMPILED_RUNTIME)
1110 return reinterpret_cast<CompressedObjectPtr*>(&allocation_stub_);
1111#endif
1112 case Snapshot::kFullJIT:
1113#if !defined(DART_PRECOMPILED_RUNTIME)
1114 return reinterpret_cast<CompressedObjectPtr*>(&dependent_code_);
1115#endif
1116 case Snapshot::kNone:
1117 case Snapshot::kInvalid:
1118 break;
1119 }
1120 UNREACHABLE();
1121 return nullptr;
1122 }
1123
1124 NOT_IN_PRECOMPILED(TokenPosition token_pos_);
1125 NOT_IN_PRECOMPILED(TokenPosition end_token_pos_);
1126 NOT_IN_PRECOMPILED(classid_t implementor_cid_);
1127
1128 classid_t id_; // Class Id, also index in the class table.
1129 int16_t num_type_arguments_; // Number of type arguments in flattened vector.
1130 uint16_t num_native_fields_;
1131 uint32_t state_bits_;
1132
1133 // Size if fixed len or 0 if variable len.
1134 int32_t host_instance_size_in_words_;
1135
1136 // Offset of type args fld.
1137 int32_t host_type_arguments_field_offset_in_words_;
1138
1139 // Offset of the next instance field.
1140 int32_t host_next_field_offset_in_words_;
1141
1142#if defined(DART_PRECOMPILER)
1143 // Size if fixed len or 0 if variable len (target).
1144 int32_t target_instance_size_in_words_;
1145
1146 // Offset of type args fld.
1147 int32_t target_type_arguments_field_offset_in_words_;
1148
1149 // Offset of the next instance field (target).
1150 int32_t target_next_field_offset_in_words_;
1151#endif // defined(DART_PRECOMPILER)
1152
1153#if !defined(DART_PRECOMPILED_RUNTIME)
1154 uint32_t kernel_offset_;
1155#endif // !defined(DART_PRECOMPILED_RUNTIME)
1156
1157 friend class Instance;
1158 friend class IsolateGroup;
1159 friend class Object;
1160 friend class UntaggedInstance;
1163 friend class MessageSerializer;
1166 friend class CidRewriteVisitor;
1168 friend class Api;
1169};
1170
1172 private:
1174
1175 COMPRESSED_POINTER_FIELD(ClassPtr, wrapped_class)
1176 VISIT_FROM(wrapped_class)
1177 COMPRESSED_POINTER_FIELD(ScriptPtr, script)
1178#if !defined(DART_PRECOMPILED_RUNTIME)
1179 COMPRESSED_POINTER_FIELD(KernelProgramInfoPtr, kernel_program_info)
1180 VISIT_TO(kernel_program_info)
1181#else
1182 VISIT_TO(script)
1183#endif
1184
1185 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) {
1186 switch (kind) {
1187 case Snapshot::kFullAOT:
1188 return reinterpret_cast<CompressedObjectPtr*>(&script_);
1189 case Snapshot::kFull:
1191 case Snapshot::kFullJIT:
1192#if !defined(DART_PRECOMPILED_RUNTIME)
1193 return reinterpret_cast<CompressedObjectPtr*>(&kernel_program_info_);
1194#else
1195 UNREACHABLE();
1196 return nullptr;
1197#endif
1198 case Snapshot::kNone:
1199 case Snapshot::kInvalid:
1200 break;
1201 }
1202 UNREACHABLE();
1203 return nullptr;
1204 }
1205
1206 NOT_IN_PRECOMPILED(intptr_t kernel_library_index_);
1207
1208 friend class Function;
1209};
1210
1212 public:
1213 // When you add a new kind, please also update the observatory to account
1214 // for the new string returned by KindToCString().
1215 // - runtime/observatory/lib/src/models/objects/function.dart (FunctionKind)
1216 // - runtime/observatory/lib/src/elements/function_view.dart
1217 // (_functionKindToString)
1218 // - runtime/observatory/lib/src/service/object.dart (stringToFunctionKind)
1219#define FOR_EACH_RAW_FUNCTION_KIND(V) \
1220 /* an ordinary or operator method */ \
1221 V(RegularFunction) \
1222 /* a user-declared closure function */ \
1223 V(ClosureFunction) \
1224 /* an implicit closure (i.e., tear-off) */ \
1225 V(ImplicitClosureFunction) \
1226 /* a signature only without actual code */ \
1227 V(GetterFunction) \
1228 /* setter functions e.g: set foo(..) { .. } */ \
1229 V(SetterFunction) \
1230 /* a generative (is_static=false) or factory (is_static=true) constructor */ \
1231 V(Constructor) \
1232 /* an implicit getter for instance fields */ \
1233 V(ImplicitGetter) \
1234 /* an implicit setter for instance fields */ \
1235 V(ImplicitSetter) \
1236 /* represents an implicit getter for static fields with initializers */ \
1237 V(ImplicitStaticGetter) \
1238 /* the initialization expression for a static or instance field */ \
1239 V(FieldInitializer) \
1240 /* return a closure on the receiver for tear-offs */ \
1241 V(MethodExtractor) \
1242 /* builds an Invocation and invokes noSuchMethod */ \
1243 V(NoSuchMethodDispatcher) \
1244 /* invokes a field as a closure (i.e., call-through-getter) */ \
1245 V(InvokeFieldDispatcher) \
1246 /* a generated irregexp matcher function. */ \
1247 V(IrregexpFunction) \
1248 /* a forwarder which performs type checks for arguments of a dynamic call */ \
1249 /* (i.e., those checks omitted by the caller for interface calls). */ \
1250 V(DynamicInvocationForwarder) \
1251 /* A `dart:ffi` call or callback trampoline. */ \
1252 V(FfiTrampoline) \
1253 /* getter for a record field */ \
1254 V(RecordFieldGetter)
1255
1256 enum Kind {
1257#define KIND_DEFN(Name) k##Name,
1259#undef KIND_DEFN
1260 };
1261
1262 static const char* KindToCString(Kind k) {
1263 switch (k) {
1264#define KIND_CASE(Name) \
1265 case Kind::k##Name: \
1266 return #Name;
1268#undef KIND_CASE
1269 default:
1270 UNREACHABLE();
1271 return nullptr;
1272 }
1273 }
1274
1275 static bool ParseKind(const char* str, Kind* out) {
1276#define KIND_CASE(Name) \
1277 if (strcmp(str, #Name) == 0) { \
1278 *out = Kind::k##Name; \
1279 return true; \
1280 }
1282#undef KIND_CASE
1283 return false;
1284 }
1285
1294
1295 // Wraps a 64-bit integer to represent the bitmap for unboxed parameters and
1296 // return value. Two bits are used for each of them to denote if it is boxed,
1297 // unboxed integer, unboxed double or unboxed record.
1298 // It includes the two bits for the receiver, even though currently we
1299 // do not have information from TFA that allows the receiver to be unboxed.
1300 class alignas(8) UnboxedParameterBitmap {
1301 public:
1308 static constexpr intptr_t kBitsPerElement = 2;
1309 static constexpr uint64_t kElementBitmask = (1 << kBitsPerElement) - 1;
1310 static constexpr intptr_t kCapacity =
1311 (kBitsPerByte * sizeof(uint64_t)) / kBitsPerElement;
1312
1313 UnboxedParameterBitmap() : bitmap_(0) {}
1314 explicit UnboxedParameterBitmap(uint64_t bitmap) : bitmap_(bitmap) {}
1317
1318 DART_FORCE_INLINE bool IsUnboxed(intptr_t position) const {
1319 return At(position) != kBoxed;
1320 }
1321 DART_FORCE_INLINE bool IsUnboxedInteger(intptr_t position) const {
1322 return At(position) == kUnboxedInt;
1323 }
1324 DART_FORCE_INLINE bool IsUnboxedDouble(intptr_t position) const {
1325 return At(position) == kUnboxedDouble;
1326 }
1327 DART_FORCE_INLINE bool IsUnboxedRecord(intptr_t position) const {
1328 return At(position) == kUnboxedRecord;
1329 }
1330 DART_FORCE_INLINE void SetUnboxedInteger(intptr_t position) {
1331 SetAt(position, kUnboxedInt);
1332 }
1333 DART_FORCE_INLINE void SetUnboxedDouble(intptr_t position) {
1334 SetAt(position, kUnboxedDouble);
1335 }
1336 DART_FORCE_INLINE void SetUnboxedRecord(intptr_t position) {
1337 SetAt(position, kUnboxedRecord);
1338 }
1339 DART_FORCE_INLINE uint64_t Value() const { return bitmap_; }
1340 DART_FORCE_INLINE bool IsEmpty() const { return bitmap_ == 0; }
1341 DART_FORCE_INLINE void Reset() { bitmap_ = 0; }
1342 DART_FORCE_INLINE bool HasUnboxedParameters() const {
1343 return (bitmap_ >> kBitsPerElement) != 0;
1344 }
1345
1346 private:
1347 DART_FORCE_INLINE UnboxedState At(intptr_t position) const {
1348 if (position >= kCapacity) {
1349 return kBoxed;
1350 }
1351 return static_cast<UnboxedState>(
1352 (bitmap_ >> (kBitsPerElement * position)) & kElementBitmask);
1353 }
1354 DART_FORCE_INLINE void SetAt(intptr_t position, UnboxedState state) {
1355 ASSERT(position < kCapacity);
1356 const intptr_t shift = kBitsPerElement * position;
1357 bitmap_ = (bitmap_ & ~(kElementBitmask << shift)) |
1358 (static_cast<decltype(bitmap_)>(state) << shift);
1359 }
1360
1361 uint64_t bitmap_;
1362 };
1363
1364 private:
1365 friend class Class;
1367
1369
1370 uword entry_point_; // Accessed from generated code.
1371 uword unchecked_entry_point_; // Accessed from generated code.
1372
1373 COMPRESSED_POINTER_FIELD(StringPtr, name)
1375 // Class or patch class or mixin class where this function is defined.
1377 WSR_COMPRESSED_POINTER_FIELD(FunctionTypePtr, signature)
1378 // Additional data specific to the function kind. See Function::set_data()
1379 // for details.
1381 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) {
1382 switch (kind) {
1383 case Snapshot::kFullAOT:
1384 case Snapshot::kFull:
1386 case Snapshot::kFullJIT:
1387 return reinterpret_cast<CompressedObjectPtr*>(&data_);
1388 case Snapshot::kNone:
1389 case Snapshot::kInvalid:
1390 break;
1391 }
1392 UNREACHABLE();
1393 return nullptr;
1394 }
1395 // ICData of unoptimized code.
1396 COMPRESSED_POINTER_FIELD(ArrayPtr, ic_data_array);
1397 // Currently active code. Accessed from generated code.
1398 COMPRESSED_POINTER_FIELD(CodePtr, code);
1399#if defined(DART_PRECOMPILED_RUNTIME)
1400 VISIT_TO(code);
1401#else
1402 // Positional parameter names are not needed in the AOT runtime.
1403 COMPRESSED_POINTER_FIELD(ArrayPtr, positional_parameter_names);
1404 // Unoptimized code, keep it after optimization.
1405 COMPRESSED_POINTER_FIELD(CodePtr, unoptimized_code);
1406 VISIT_TO(unoptimized_code);
1407
1408 UnboxedParameterBitmap unboxed_parameters_info_;
1409#endif
1410
1411#if !defined(DART_PRECOMPILED_RUNTIME) || \
1412 (defined(DART_PRECOMPILED_RUNTIME) && !defined(PRODUCT))
1413 TokenPosition token_pos_;
1414#endif
1415
1416#if !defined(DART_PRECOMPILED_RUNTIME)
1417 TokenPosition end_token_pos_;
1418#endif
1419
1420 AtomicBitFieldContainer<uint32_t> kind_tag_; // See Function::KindTagBits.
1421
1422#define JIT_FUNCTION_COUNTERS(F) \
1423 F(intptr_t, int32_t, usage_counter) \
1424 F(intptr_t, uint16_t, optimized_instruction_count) \
1425 F(intptr_t, uint16_t, optimized_call_site_count) \
1426 F(int8_t, int8_t, deoptimization_counter) \
1427 F(intptr_t, int8_t, state_bits) \
1428 F(int, int8_t, inlining_depth)
1429
1430#if !defined(DART_PRECOMPILED_RUNTIME)
1431 uint32_t kernel_offset_;
1432
1433#define DECLARE(return_type, type, name) type name##_;
1435#undef DECLARE
1436
1437 AtomicBitFieldContainer<uint8_t> packed_fields_;
1438
1439 static constexpr intptr_t kMaxOptimizableBits = 1;
1440
1441 using PackedOptimizable =
1442 BitField<decltype(packed_fields_), bool, 0, kMaxOptimizableBits>;
1443#endif // !defined(DART_PRECOMPILED_RUNTIME)
1444};
1445
1446enum class InstantiationMode : uint8_t {
1447 // Must instantiate the type arguments normally.
1449 // The type arguments are already instantiated.
1451 // Use the instantiator type arguments that would be used to instantiate
1452 // the default type arguments, as instantiating produces the same result.
1454 // Use the function type arguments that would be used to instantiate
1455 // the default type arguments, as instantiating produces the same result.
1457};
1458
1460 private:
1462
1463 COMPRESSED_POINTER_FIELD(ContextScopePtr, context_scope)
1464 VISIT_FROM(context_scope)
1465 // Enclosing function of this local function.
1466 WSR_COMPRESSED_POINTER_FIELD(FunctionPtr, parent_function)
1467 // Closure object for static implicit closures.
1468 COMPRESSED_POINTER_FIELD(ClosurePtr, closure)
1469 VISIT_TO(closure)
1470
1471 // kernel_to_il.cc assumes we can load the untagged value and box it in a Smi.
1472 static_assert(sizeof(InstantiationMode) * kBitsPerByte <=
1473 compiler::target::kSmiBits,
1474 "Instantiation mode must fit in a Smi");
1475
1476 static constexpr uint8_t kNoAwaiterLinkDepth = 0xFF;
1477
1478 AtomicBitFieldContainer<uint32_t> packed_fields_;
1479
1481 BitField<decltype(packed_fields_), InstantiationMode, 0, 8>;
1482 using PackedAwaiterLinkDepth = BitField<decltype(packed_fields_),
1483 uint8_t,
1485 8>;
1486 using PackedAwaiterLinkIndex = BitField<decltype(packed_fields_),
1487 uint8_t,
1489 8>;
1490
1491 friend class Function;
1493};
1494
1496 private:
1498
1499 COMPRESSED_POINTER_FIELD(TypePtr, signature_type)
1500 VISIT_FROM(signature_type)
1501
1502 COMPRESSED_POINTER_FIELD(FunctionTypePtr, c_signature)
1503
1504 // Target Dart method for callbacks, otherwise null.
1505 COMPRESSED_POINTER_FIELD(FunctionPtr, callback_target)
1506
1507 // For callbacks, value to return if Dart target throws an exception.
1508 COMPRESSED_POINTER_FIELD(InstancePtr, callback_exceptional_return)
1509 VISIT_TO(callback_exceptional_return)
1510 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
1511
1512 // Callback id for callbacks.
1513 //
1514 // The callbacks ids are used so that native callbacks can lookup their own
1515 // code objects, since native code doesn't pass code objects into function
1516 // calls. The callback id is also used to for verifying that callbacks are
1517 // called on the correct isolate. See DLRT_VerifyCallbackIsolate for details.
1518 //
1519 // Callback id is -1 for non-callbacks or when id is not allocated yet.
1520 // Check 'callback_target_' to determine if this is a callback or not.
1521 int32_t callback_id_;
1522
1523 // The kind of trampoline this is. See FfiCallbackKind.
1524 uint8_t ffi_function_kind_;
1525};
1526
1529
1530 COMPRESSED_POINTER_FIELD(StringPtr, name)
1532 // Class or patch class or mixin class where this field is defined or original
1533 // field.
1535 COMPRESSED_POINTER_FIELD(AbstractTypePtr, type)
1536 // Static initializer function.
1537 COMPRESSED_POINTER_FIELD(FunctionPtr, initializer_function)
1538 // - for instance fields: offset in words to the value in the class instance.
1539 // - for static fields: index into field_table.
1540 COMPRESSED_POINTER_FIELD(SmiPtr, host_offset_or_field_id)
1541 COMPRESSED_POINTER_FIELD(SmiPtr, guarded_list_length)
1542 COMPRESSED_POINTER_FIELD(WeakArrayPtr, dependent_code)
1543 VISIT_TO(dependent_code);
1544 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) {
1545 switch (kind) {
1546 case Snapshot::kFull:
1548 case Snapshot::kFullJIT:
1549 case Snapshot::kFullAOT:
1550 return reinterpret_cast<CompressedObjectPtr*>(&initializer_function_);
1551 case Snapshot::kNone:
1552 case Snapshot::kInvalid:
1553 break;
1554 }
1555 UNREACHABLE();
1556 return nullptr;
1557 }
1558 TokenPosition token_pos_;
1559 TokenPosition end_token_pos_;
1560 ClassIdTagType guarded_cid_;
1561 ClassIdTagType is_nullable_; // kNullCid if field can contain null value and
1562 // kIllegalCid otherwise.
1563
1564#if !defined(DART_PRECOMPILED_RUNTIME)
1565 uint32_t kernel_offset_;
1566#endif // !defined(DART_PRECOMPILED_RUNTIME)
1567
1568 // Offset to the guarded length field inside an instance of class matching
1569 // guarded_cid_. Stored corrected by -kHeapObjectTag to simplify code
1570 // generated on platforms with weak addressing modes (ARM).
1571 int8_t guarded_list_length_in_object_offset_;
1572
1573 // Runtime tracking state of exactness of type annotation of this field.
1574 // See StaticTypeExactnessState for the meaning and possible values in this
1575 // field.
1576 int8_t static_type_exactness_state_;
1577
1578 uint16_t kind_bits_; // static, final, const, has initializer....
1579
1580#if !defined(DART_PRECOMPILED_RUNTIME)
1581 // for instance fields, the offset in words in the target architecture
1582 int32_t target_offset_;
1583#endif // !defined(DART_PRECOMPILED_RUNTIME)
1584
1585 friend class CidRewriteVisitor;
1586 friend class GuardFieldClassInstr; // For sizeof(guarded_cid_/...)
1587 friend class LoadFieldInstr; // For sizeof(guarded_cid_/...)
1588 friend class StoreFieldInstr; // For sizeof(guarded_cid_/...)
1589};
1590
1591class alignas(8) UntaggedScript : public UntaggedObject {
1593
1594 COMPRESSED_POINTER_FIELD(StringPtr, url)
1595 VISIT_FROM(url)
1596 COMPRESSED_POINTER_FIELD(StringPtr, resolved_url)
1597 COMPRESSED_POINTER_FIELD(TypedDataPtr, line_starts)
1598#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
1599 COMPRESSED_POINTER_FIELD(TypedDataViewPtr, constant_coverage)
1600#endif // !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
1601 COMPRESSED_POINTER_FIELD(ArrayPtr, debug_positions)
1602 COMPRESSED_POINTER_FIELD(KernelProgramInfoPtr, kernel_program_info)
1605 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) {
1606 switch (kind) {
1607 case Snapshot::kFullAOT:
1608#if defined(PRODUCT)
1609 return reinterpret_cast<CompressedObjectPtr*>(&url_);
1610#else
1611 return reinterpret_cast<CompressedObjectPtr*>(&resolved_url_);
1612#endif
1613 case Snapshot::kFull:
1615 case Snapshot::kFullJIT:
1616 return reinterpret_cast<CompressedObjectPtr*>(&kernel_program_info_);
1617 case Snapshot::kNone:
1618 case Snapshot::kInvalid:
1619 break;
1620 }
1621 UNREACHABLE();
1622 return nullptr;
1623 }
1624
1625#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
1626 int64_t load_timestamp_;
1627 int32_t kernel_script_index_;
1628#else
1629 int32_t kernel_script_index_;
1630 int64_t load_timestamp_;
1631#endif
1632
1633#if !defined(DART_PRECOMPILED_RUNTIME)
1634 int32_t flags_and_max_position_;
1635
1636 public:
1638 BitField<decltype(flags_and_max_position_), bool, 0, 1>;
1640 BitField<decltype(flags_and_max_position_),
1641 bool,
1643 1>;
1644 using CachedMaxPositionBitField = BitField<decltype(flags_and_max_position_),
1645 intptr_t,
1647
1648 private:
1649#endif
1650};
1651
1653 enum LibraryState {
1654 kAllocated, // Initial state.
1655 kLoadRequested, // Compiler or script requested load of library.
1656 kLoadInProgress, // Library is in the process of being loaded.
1657 kLoaded, // Library is loaded.
1658 };
1659
1660 enum LibraryFlags {
1661 kDartSchemeBit = 0,
1662 kDebuggableBit, // True if debugger can stop in library.
1663 kInFullSnapshotBit, // True if library is in a full snapshot.
1664 kNumFlagBits,
1665 };
1666 COMPILE_ASSERT(kNumFlagBits <= (sizeof(uint8_t) * kBitsPerByte));
1667 class DartSchemeBit : public BitField<uint8_t, bool, kDartSchemeBit, 1> {};
1668 class DebuggableBit : public BitField<uint8_t, bool, kDebuggableBit, 1> {};
1669 class InFullSnapshotBit
1670 : public BitField<uint8_t, bool, kInFullSnapshotBit, 1> {};
1671
1673
1674 COMPRESSED_POINTER_FIELD(StringPtr, name)
1676 COMPRESSED_POINTER_FIELD(StringPtr, url)
1677 COMPRESSED_POINTER_FIELD(StringPtr, private_key)
1678 // Top-level names in this library.
1679 COMPRESSED_POINTER_FIELD(ArrayPtr, dictionary)
1680 // Metadata on classes, methods etc.
1681 COMPRESSED_POINTER_FIELD(ArrayPtr, metadata)
1682 // Class containing top-level elements.
1683 COMPRESSED_POINTER_FIELD(ClassPtr, toplevel_class)
1684 COMPRESSED_POINTER_FIELD(GrowableObjectArrayPtr, used_scripts)
1685 COMPRESSED_POINTER_FIELD(LoadingUnitPtr, loading_unit)
1686 // List of Namespaces imported without prefix.
1687 COMPRESSED_POINTER_FIELD(ArrayPtr, imports)
1688 // List of re-exported Namespaces.
1689 COMPRESSED_POINTER_FIELD(ArrayPtr, exports)
1690 COMPRESSED_POINTER_FIELD(ArrayPtr, dependencies)
1691#if !defined(DART_PRECOMPILED_RUNTIME)
1692 COMPRESSED_POINTER_FIELD(KernelProgramInfoPtr, kernel_program_info)
1693#endif
1694 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) {
1695 switch (kind) {
1696 case Snapshot::kFullAOT:
1697 return reinterpret_cast<CompressedObjectPtr*>(&exports_);
1698 case Snapshot::kFull:
1700 case Snapshot::kFullJIT:
1701#if !defined(DART_PRECOMPILED_RUNTIME)
1702 return reinterpret_cast<CompressedObjectPtr*>(&kernel_program_info_);
1703#else
1704 UNREACHABLE();
1705 return nullptr;
1706#endif
1707 case Snapshot::kNone:
1708 case Snapshot::kInvalid:
1709 break;
1710 }
1711 UNREACHABLE();
1712 return nullptr;
1713 }
1714 // Array of scripts loaded in this library.
1715 COMPRESSED_POINTER_FIELD(ArrayPtr, loaded_scripts);
1716 VISIT_TO(loaded_scripts);
1717
1718 Dart_NativeEntryResolver native_entry_resolver_; // Resolves natives.
1719 Dart_NativeEntrySymbol native_entry_symbol_resolver_;
1720 Dart_FfiNativeResolver ffi_native_resolver_;
1721
1722 classid_t index_; // Library id number.
1723 uint16_t num_imports_; // Number of entries in imports_.
1724 int8_t load_state_; // Of type LibraryState.
1725 uint8_t flags_; // BitField for LibraryFlags.
1726
1727#if !defined(DART_PRECOMPILED_RUNTIME)
1728 uint32_t kernel_library_index_;
1729#endif // !defined(DART_PRECOMPILED_RUNTIME)
1730
1731 friend class Class;
1732 friend class Isolate;
1733};
1734
1737
1738 // library with name dictionary.
1739 COMPRESSED_POINTER_FIELD(LibraryPtr, target)
1741 // list of names that are exported.
1742 COMPRESSED_POINTER_FIELD(ArrayPtr, show_names)
1743 // list of names that are hidden.
1744 COMPRESSED_POINTER_FIELD(ArrayPtr, hide_names)
1745 COMPRESSED_POINTER_FIELD(LibraryPtr, owner)
1746 VISIT_TO(owner)
1747 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) {
1748 switch (kind) {
1749 case Snapshot::kFullAOT:
1750 return reinterpret_cast<CompressedObjectPtr*>(&target_);
1751 case Snapshot::kFull:
1753 case Snapshot::kFullJIT:
1754 return reinterpret_cast<CompressedObjectPtr*>(&owner_);
1755 case Snapshot::kNone:
1756 case Snapshot::kInvalid:
1757 break;
1758 }
1759 UNREACHABLE();
1760 return nullptr;
1761 }
1762};
1763
1764// Contains information about a kernel [Component].
1765//
1766// Used to access string tables, canonical name tables, constants, metadata, ...
1769
1770 COMPRESSED_POINTER_FIELD(TypedDataBasePtr, kernel_component)
1771 VISIT_FROM(kernel_component)
1772 COMPRESSED_POINTER_FIELD(TypedDataPtr, string_offsets)
1773 COMPRESSED_POINTER_FIELD(TypedDataViewPtr, string_data)
1774 COMPRESSED_POINTER_FIELD(TypedDataPtr, canonical_names)
1775 COMPRESSED_POINTER_FIELD(TypedDataViewPtr, metadata_payloads)
1776 COMPRESSED_POINTER_FIELD(TypedDataViewPtr, metadata_mappings)
1778 COMPRESSED_POINTER_FIELD(ArrayPtr, constants)
1779 COMPRESSED_POINTER_FIELD(TypedDataViewPtr, constants_table)
1780 COMPRESSED_POINTER_FIELD(ArrayPtr, libraries_cache)
1781 COMPRESSED_POINTER_FIELD(ArrayPtr, classes_cache)
1782 VISIT_TO(classes_cache)
1783
1784 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) {
1785 return reinterpret_cast<CompressedObjectPtr*>(&constants_table_);
1786 }
1787};
1788
1797
1800
1801 COMPRESSED_POINTER_FIELD(WeakArrayPtr, next_seen_by_gc)
1802
1805 // Variable length data follows here.
1807
1808 template <typename Table, bool kAllCanonicalObjectsAreIncludedIntoSet>
1810 template <typename Type, typename PtrType>
1811 friend class GCLinkedList;
1812 template <bool>
1814 template <bool>
1816 friend class Scavenger;
1817};
1818
1819// WeakArray is special in that it has a pointer field which is not
1820// traversed by pointer visitors, and thus not in the range [from(),to()]:
1821// next_seen_by_gc, which is before the other fields.
1822template <>
1823DART_FORCE_INLINE uword UntaggedObject::from_offset<UntaggedWeakArray>() {
1824 return OFFSET_OF(UntaggedWeakArray, next_seen_by_gc_);
1825}
1826
1829
1830 // When in the precompiled runtime, there is no disabling of Code objects
1831 // and thus no active_instructions_ field. Thus, the entry point caches are
1832 // only set once during deserialization. If not using bare instructions,
1833 // the caches should match the entry points for instructions_.
1834 //
1835 // Otherwise, they should contain entry points for active_instructions_.
1836
1837 uword entry_point_; // Accessed from generated code.
1838
1839 // In AOT this entry-point supports switchable calls. It checks the type of
1840 // the receiver on entry to the function and calls a stub to patch up the
1841 // caller if they mismatch.
1842 uword monomorphic_entry_point_; // Accessed from generated code (AOT only).
1843
1844 // Entry-point used from call-sites with some additional static information.
1845 // The exact behavior of this entry-point depends on the kind of function:
1846 //
1847 // kRegularFunction/kSetter/kGetter:
1848 //
1849 // Call-site is assumed to know that the (type) arguments are invariantly
1850 // type-correct against the actual runtime-type of the receiver. For
1851 // instance, this entry-point is used for invocations against "this" and
1852 // invocations from IC stubs that test the class type arguments.
1853 //
1854 // kClosureFunction:
1855 //
1856 // Call-site is assumed to pass the correct number of positional and type
1857 // arguments (except in the case of partial instantiation, when the type
1858 // arguments are omitted). All (type) arguments are assumed to match the
1859 // corresponding (type) parameter types (bounds).
1860 //
1861 // kImplicitClosureFunction:
1862 //
1863 // Similar to kClosureFunction, except that the types (bounds) of the (type)
1864 // arguments are expected to match the *runtime signature* of the closure,
1865 // which (unlike with kClosureFunction) may have more general (type)
1866 // parameter types (bounds) than the declared type of the forwarded method.
1867 //
1868 // In many cases a distinct static entry-point will not be created for a
1869 // function if it would not be able to skip a lot of work (e.g., no argument
1870 // type checks are necessary or this Code belongs to a stub). In this case
1871 // 'unchecked_entry_point_' will refer to the same position as 'entry_point_'.
1872 //
1873 uword unchecked_entry_point_; // Accessed from generated code.
1874 uword monomorphic_unchecked_entry_point_; // Accessed from generated code.
1875
1876 POINTER_FIELD(ObjectPoolPtr, object_pool) // Accessed from generated code.
1877 VISIT_FROM(object_pool)
1878 POINTER_FIELD(InstructionsPtr,
1879 instructions) // Accessed from generated code.
1880 // If owner_ is Function::null() the owner is a regular stub.
1881 // If owner_ is a Class the owner is the allocation stub for that class.
1882 // Else, owner_ is a regular Dart Function.
1883 POINTER_FIELD(ObjectPtr, owner) // Function, Null, or a Class.
1884 POINTER_FIELD(ExceptionHandlersPtr, exception_handlers)
1885 POINTER_FIELD(PcDescriptorsPtr, pc_descriptors)
1886 // If FLAG_precompiled_mode, then this field contains
1887 // TypedDataPtr catch_entry_moves_maps
1888 // Otherwise, it is
1889 // SmiPtr num_variables
1890 POINTER_FIELD(ObjectPtr, catch_entry)
1891 POINTER_FIELD(CompressedStackMapsPtr, compressed_stackmaps)
1892 POINTER_FIELD(ArrayPtr, inlined_id_to_function)
1893 POINTER_FIELD(CodeSourceMapPtr, code_source_map)
1894 NOT_IN_PRECOMPILED(POINTER_FIELD(InstructionsPtr, active_instructions))
1895 NOT_IN_PRECOMPILED(POINTER_FIELD(ArrayPtr, deopt_info_array))
1896 // (code-offset, function, code) triples.
1897 NOT_IN_PRECOMPILED(POINTER_FIELD(ArrayPtr, static_calls_target_table))
1898 // If return_address_metadata_ is a Smi, it is the offset to the prologue.
1899 // Else, return_address_metadata_ is null.
1900 NOT_IN_PRODUCT(POINTER_FIELD(ObjectPtr, return_address_metadata))
1901 NOT_IN_PRODUCT(POINTER_FIELD(LocalVarDescriptorsPtr, var_descriptors))
1902 NOT_IN_PRODUCT(POINTER_FIELD(ArrayPtr, comments))
1903
1904#if !defined(PRODUCT)
1905 VISIT_TO(comments);
1906#elif defined(DART_PRECOMPILED_RUNTIME)
1907 VISIT_TO(code_source_map);
1908#else
1909 VISIT_TO(static_calls_target_table);
1910#endif
1911
1912 // Compilation timestamp.
1913 NOT_IN_PRODUCT(alignas(8) int64_t compile_timestamp_);
1914
1915 // state_bits_ is a bitfield with three fields:
1916 // The optimized bit, the alive bit, and a count of the number of pointer
1917 // offsets.
1918 // Alive: If true, the embedded object pointers will be visited during GC.
1919 int32_t state_bits_;
1920 // Caches the unchecked entry point offset for instructions_, in case we need
1921 // to reset the active_instructions_ to instructions_.
1922 NOT_IN_PRECOMPILED(uint32_t unchecked_offset_);
1923 // Stores the instructions length when not using RawInstructions objects.
1924 ONLY_IN_PRECOMPILED(uint32_t instructions_length_);
1925
1926 // Variable length data follows here.
1927 int32_t* data() { OPEN_ARRAY_START(int32_t, int32_t); }
1928 const int32_t* data() const { OPEN_ARRAY_START(int32_t, int32_t); }
1929
1930 static bool ContainsPC(const ObjectPtr raw_obj, uword pc);
1931
1932 friend class Function;
1933 template <bool>
1935 friend class StackFrame;
1936 friend class Profiler;
1940 friend class CallSiteResetter;
1941};
1942
1945
1946 intptr_t length_;
1947
1948 struct Entry {
1949 union {
1950 ObjectPtr raw_obj_;
1951 uword raw_value_;
1952 };
1953 };
1954 Entry* data() { OPEN_ARRAY_START(Entry, Entry); }
1955 Entry const* data() const { OPEN_ARRAY_START(Entry, Entry); }
1956 DEFINE_CONTAINS_COMPRESSED(decltype(Entry::raw_obj_));
1957
1958 // The entry bits are located after the last entry. They are encoded versions
1959 // of `ObjectPool::TypeBits() | ObjectPool::PatchabilityBit()`.
1960 uint8_t* entry_bits() { return reinterpret_cast<uint8_t*>(&data()[length_]); }
1961 uint8_t const* entry_bits() const {
1962 return reinterpret_cast<uint8_t const*>(&data()[length_]);
1963 }
1964
1965 friend class Object;
1969};
1970
1973 VISIT_NOTHING();
1974
1975 // Instructions size in bytes and flags.
1976 uint32_t size_and_flags_;
1977
1978 // Variable length data follows here.
1979 uint8_t* data() { OPEN_ARRAY_START(uint8_t, uint8_t); }
1980
1981 // Private helper function used while visiting stack frames. The
1982 // code which iterates over dart frames is also called during GC and
1983 // is not allowed to create handles.
1984 static bool ContainsPC(const InstructionsPtr raw_instr, uword pc);
1985
1986 friend class UntaggedCode;
1987 friend class UntaggedFunction;
1988 friend class Code;
1989 friend class StackFrame;
1990 template <bool>
1992 friend class Function;
1993 friend class ImageReader;
1994 friend class ImageWriter;
1996 friend class BlobImageWriter;
1997};
1998
1999// Used to carry extra information to the VM without changing the embedder
2000// interface, to provide memory accounting for the bare instruction payloads
2001// we serialize, since they are no longer part of RawInstructions objects,
2002// and to avoid special casing bare instructions payload Images in the GC.
2005 VISIT_NOTHING();
2006
2007 // Instructions section payload length in bytes.
2008 uword payload_length_;
2009 // The offset of the corresponding BSS section from this text section.
2010 word bss_offset_;
2011 // The relocated address of this text section in the shared object. Properly
2012 // filled for ELF snapshots, always 0 in assembly snapshots. (For the latter,
2013 // we instead get the value during BSS initialization and store it there.)
2014 uword instructions_relocated_address_;
2015 // The offset of the GNU build ID note section from this text section.
2016 word build_id_offset_;
2017
2018 // Variable length data follows here.
2019 uint8_t* data() { OPEN_ARRAY_START(uint8_t, uint8_t); }
2020
2021 friend class Image;
2022};
2023
2025 public:
2026// The macro argument V is passed two arguments, the raw name of the enum value
2027// and the initialization expression used within the enum definition. The uses
2028// of enum values inside the initialization expression are hardcoded currently,
2029// so the second argument is useless outside the enum definition and should be
2030// dropped by other users of this macro.
2031#define FOR_EACH_RAW_PC_DESCRIPTOR(V) \
2032 /* Deoptimization continuation point. */ \
2033 V(Deopt, 1) \
2034 /* IC call. */ \
2035 V(IcCall, kDeopt << 1) \
2036 /* Call to a known target via stub. */ \
2037 V(UnoptStaticCall, kIcCall << 1) \
2038 /* Runtime call. */ \
2039 V(RuntimeCall, kUnoptStaticCall << 1) \
2040 /* OSR entry point in unopt. code. */ \
2041 V(OsrEntry, kRuntimeCall << 1) \
2042 /* Call rewind target address. */ \
2043 V(Rewind, kOsrEntry << 1) \
2044 /* Target-word-size relocation. */ \
2045 V(BSSRelocation, kRewind << 1) \
2046 V(Other, kBSSRelocation << 1) \
2047 V(AnyKind, -1)
2048
2049 enum Kind {
2050#define ENUM_DEF(name, init) k##name = init,
2052#undef ENUM_DEF
2054 };
2055
2056 static const char* KindToCString(Kind k);
2057 static bool ParseKind(const char* cstr, Kind* out);
2058
2059 // Used to represent the absence of a yield index in PcDescriptors.
2060 static constexpr intptr_t kInvalidYieldIndex = -1;
2061
2063 public:
2064 // Most of the time try_index will be small and merged field will fit into
2065 // one byte.
2066 static uint32_t Encode(intptr_t kind,
2067 intptr_t try_index,
2068 intptr_t yield_index) {
2070 TryIndexBits::encode(try_index + 1) |
2071 YieldIndexBits::encode(yield_index + 1);
2072 }
2073
2074 static intptr_t DecodeKind(uint32_t kind_and_metadata) {
2075 return 1 << KindShiftBits::decode(kind_and_metadata);
2076 }
2077
2078 static intptr_t DecodeTryIndex(uint32_t kind_and_metadata) {
2079 return TryIndexBits::decode(kind_and_metadata) - 1;
2080 }
2081
2082 static intptr_t DecodeYieldIndex(uint32_t kind_and_metadata) {
2083 return YieldIndexBits::decode(kind_and_metadata) - 1;
2084 }
2085
2086 private:
2087 static constexpr intptr_t kKindShiftSize = 3;
2088 static constexpr intptr_t kTryIndexSize = 10;
2089 static constexpr intptr_t kYieldIndexSize =
2090 32 - kKindShiftSize - kTryIndexSize;
2091
2092 class KindShiftBits
2093 : public BitField<uint32_t, intptr_t, 0, kKindShiftSize> {};
2094 class TryIndexBits : public BitField<uint32_t,
2095 intptr_t,
2096 KindShiftBits::kNextBit,
2097 kTryIndexSize> {};
2098 class YieldIndexBits : public BitField<uint32_t,
2099 intptr_t,
2100 TryIndexBits::kNextBit,
2101 kYieldIndexSize> {};
2102 };
2103
2104 private:
2105 RAW_HEAP_OBJECT_IMPLEMENTATION(PcDescriptors);
2106 VISIT_NOTHING();
2107
2108 // Number of descriptors. This only needs to be an int32_t, but we make it a
2109 // uword so that the variable length data is 64 bit aligned on 64 bit
2110 // platforms.
2111 uword length_;
2112
2113 // Variable length data follows here.
2114 uint8_t* data() { OPEN_ARRAY_START(uint8_t, intptr_t); }
2115 const uint8_t* data() const { OPEN_ARRAY_START(uint8_t, intptr_t); }
2116
2117 friend class Object;
2118 friend class ImageWriter;
2119};
2120
2121// CodeSourceMap encodes a mapping from code PC ranges to source token
2122// positions and the stack of inlined functions.
2124 private:
2126 VISIT_NOTHING();
2127
2128 // Length in bytes. This only needs to be an int32_t, but we make it a uword
2129 // so that the variable length data is 64 bit aligned on 64 bit platforms.
2130 uword length_;
2131
2132 // Variable length data follows here.
2133 uint8_t* data() { OPEN_ARRAY_START(uint8_t, intptr_t); }
2134 const uint8_t* data() const { OPEN_ARRAY_START(uint8_t, intptr_t); }
2135
2136 friend class Object;
2137 friend class ImageWriter;
2138};
2139
2140// RawCompressedStackMaps is a compressed representation of the stack maps
2141// for certain PC offsets into a set of instructions, where a stack map is a bit
2142// map that marks each live object index starting from the base of the frame.
2145 VISIT_NOTHING();
2146
2147 public:
2148 // Note: AOT snapshots pack these structures without any padding in between
2149 // so payload structure should not have any alignment requirements.
2150 // alignas(1) is here to trigger a compiler error if we violate this.
2151 struct alignas(1) Payload {
2152 using FlagsAndSizeHeader = uint32_t;
2153
2154 // The most significant bits are the length of the encoded payload, in
2155 // bytes (excluding the header itself). The low bits determine the
2156 // expected payload contents, as described below.
2157 DART_FORCE_INLINE FlagsAndSizeHeader flags_and_size() const {
2158 // Note: |this| does not necessarily satisfy alignment requirements
2159 // of uint32_t so we should use bit_cast.
2160 return bit_copy<FlagsAndSizeHeader, Payload>(*this);
2161 }
2162
2163 DART_FORCE_INLINE void set_flags_and_size(FlagsAndSizeHeader value) {
2164 // Note: |this| does not necessarily satisfy alignment requirements
2165 // of uint32_t hence the byte copy below.
2166 memcpy(reinterpret_cast<void*>(this), &value, sizeof(value)); // NOLINT
2167 }
2168
2169 // Variable length data follows here. The contents of the payload depend on
2170 // the type of CompressedStackMaps (CSM) being represented. There are three
2171 // major types of CSM:
2172 //
2173 // 1) GlobalTableBit = false, UsesTableBit = false: CSMs that include all
2174 // information about the stack maps. The payload for these contain
2175 // tightly packed entries with the following information:
2176 //
2177 // * A header containing the following three pieces of information:
2178 // * An unsigned integer representing the PC offset as a delta from the
2179 // PC offset of the previous entry (from 0 for the first entry).
2180 // * An unsigned integer representing the number of bits used for
2181 // spill slot entries.
2182 // * An unsigned integer representing the number of bits used for other
2183 // entries.
2184 // * The body containing the bits for the stack map. The length of
2185 // the body in bits is the sum of the spill slot and non-spill slot
2186 // bit counts.
2187 //
2188 // 2) GlobalTableBit = false, UsesTableBit = true: CSMs where the majority
2189 // of the stack map information has been offloaded and canonicalized into
2190 // a global table. The payload contains tightly packed entries with the
2191 // following information:
2192 //
2193 // * A header containing just an unsigned integer representing the PC
2194 // offset delta as described above.
2195 // * The body is just an unsigned integer containing the offset into the
2196 // payload for the global table.
2197 //
2198 // 3) GlobalTableBit = true, UsesTableBit = false: A CSM implementing the
2199 // global table. Here, the payload contains tightly packed entries with
2200 // the following information:
2201 //
2202 // * A header containing the following two pieces of information:
2203 // * An unsigned integer representing the number of bits used for
2204 // spill slot entries.
2205 // * An unsigned integer representing the number of bits used for other
2206 // entries.
2207 // * The body containing the bits for the stack map. The length of the
2208 // body in bits is the sum of the spill slot and non-spill slot bit
2209 // counts.
2210 //
2211 // In all types of CSM, each unsigned integer is LEB128 encoded, as
2212 // generally they tend to fit in a single byte or two. Thus, entry headers
2213 // are not a fixed length, and currently there is no random access of
2214 // entries. In addition, PC offsets are currently encoded as deltas, which
2215 // also inhibits random access without accessing previous entries. That
2216 // means to find an entry for a given PC offset, a linear search must be
2217 // done where the payload is decoded up to the entry whose PC offset
2218 // is greater or equal to the given PC.
2219
2220 uint8_t* data() {
2221 return reinterpret_cast<uint8_t*>(this) + sizeof(FlagsAndSizeHeader);
2222 }
2223
2224 const uint8_t* data() const {
2225 return reinterpret_cast<const uint8_t*>(this) +
2226 sizeof(FlagsAndSizeHeader);
2227 }
2228 };
2229
2230 private:
2231 // We are using OPEN_ARRAY_START rather than embedding Payload directly into
2232 // the UntaggedCompressedStackMaps as a field because that would introduce a
2233 // padding at the end of UntaggedCompressedStackMaps - so we would not be
2234 // able to use sizeof(UntaggedCompressedStackMaps) as the size of the header
2235 // anyway.
2236 Payload* payload() { OPEN_ARRAY_START(Payload, uint8_t); }
2237 const Payload* payload() const { OPEN_ARRAY_START(Payload, uint8_t); }
2238
2239 class GlobalTableBit
2240 : public BitField<Payload::FlagsAndSizeHeader, bool, 0, 1> {};
2241 class UsesTableBit : public BitField<Payload::FlagsAndSizeHeader,
2242 bool,
2243 GlobalTableBit::kNextBit,
2244 1> {};
2245 class SizeField
2246 : public BitField<Payload::FlagsAndSizeHeader,
2247 Payload::FlagsAndSizeHeader,
2248 UsesTableBit::kNextBit,
2249 sizeof(Payload::FlagsAndSizeHeader) * kBitsPerByte -
2250 UsesTableBit::kNextBit> {};
2251
2252 friend class Object;
2253 friend class ImageWriter;
2254 friend class StackMapEntry;
2255};
2256
2259
2260 POINTER_FIELD(ArrayPtr, code_objects)
2261 VISIT_FROM(code_objects)
2262 VISIT_TO(code_objects)
2263
2264 struct DataEntry {
2265 uint32_t pc_offset;
2266 uint32_t stack_map_offset;
2267 };
2268 static_assert(sizeof(DataEntry) == sizeof(uint32_t) * 2);
2269
2270 struct Data {
2271 uint32_t canonical_stack_map_entries_offset;
2272 uint32_t length;
2273 uint32_t first_entry_with_code;
2274 uint32_t padding;
2275
2276 const DataEntry* entries() const { OPEN_ARRAY_START(DataEntry, uint32_t); }
2277
2278 const UntaggedCompressedStackMaps::Payload* StackMapAt(
2279 intptr_t offset) const {
2280 return reinterpret_cast<UntaggedCompressedStackMaps::Payload*>(
2281 reinterpret_cast<uword>(this) + offset);
2282 }
2283 };
2284 static_assert(sizeof(Data) == sizeof(uint32_t) * 4);
2285
2286 intptr_t length_;
2287 const Data* rodata_;
2288 uword start_pc_;
2289 uword end_pc_;
2290
2291 friend class Deserializer;
2292};
2293
2295 public:
2302
2303 enum {
2307 // Since there are 24 bits for the stack slot index, Functions can have
2308 // only ~16.7 million stack slots.
2309 kPayloadSize = sizeof(int32_t) * kBitsPerByte,
2312 kMaxIndex = (1 << (kIndexSize - 1)) - 1,
2313 };
2314
2315 class IndexBits : public BitField<int32_t, int32_t, kIndexPos, kIndexSize> {};
2316 class KindBits : public BitField<int32_t, int8_t, kKindPos, kKindSize> {};
2317
2318 struct VarInfo {
2319 int32_t index_kind = 0; // Bitfield for slot index on stack or in context,
2320 // and Entry kind of type VarInfoKind.
2322 TokenPosition::kNoSource; // Token position of declaration.
2324 TokenPosition::kNoSource; // Token position of scope start.
2326 TokenPosition::kNoSource; // Token position of scope end.
2327 int16_t scope_id; // Scope to which the variable belongs.
2328
2330 return static_cast<VarInfoKind>(KindBits::decode(index_kind));
2331 }
2335 int32_t index() const { return IndexBits::decode(index_kind) - kIndexBias; }
2339 };
2340
2341 private:
2343 // Number of descriptors. This only needs to be an int32_t, but we make it a
2344 // uword so that the variable length data is 64 bit aligned on 64 bit
2345 // platforms.
2346 uword num_entries_;
2347
2348 VISIT_FROM_PAYLOAD_START(CompressedStringPtr)
2350
2351 CompressedStringPtr* nameAddrAt(intptr_t i) { return &(names()[i]); }
2352 void set_name(intptr_t i, StringPtr value) {
2353 StoreCompressedPointer(nameAddrAt(i), value);
2354 }
2355
2356 // Variable info with [num_entries_] entries.
2357 VarInfo* data() {
2358 return reinterpret_cast<VarInfo*>(nameAddrAt(num_entries_));
2359 }
2360
2361 friend class Object;
2362};
2363
2365 private:
2367
2368 // Number of exception handler entries and
2369 // async handler.
2370 uint32_t packed_fields_;
2371
2372 // Async handler is used in the async/async* functions.
2373 // It's an implicit exception handler (stub) which runs when
2374 // exception is not handled within the function.
2375 using AsyncHandlerBit = BitField<decltype(packed_fields_), bool, 0, 1>;
2376 using NumEntriesBits = BitField<decltype(packed_fields_),
2377 uint32_t,
2379 31>;
2380
2381 intptr_t num_entries() const {
2382 return NumEntriesBits::decode(packed_fields_);
2383 }
2384
2385 // Array with [num_entries] entries. Each entry is an array of all handled
2386 // exception types.
2387 COMPRESSED_POINTER_FIELD(ArrayPtr, handled_types_data)
2388 VISIT_FROM(handled_types_data)
2389 VISIT_TO(handled_types_data)
2390
2391 // Exception handler info of length [num_entries].
2392 const ExceptionHandlerInfo* data() const {
2394 }
2395 ExceptionHandlerInfo* data() {
2397 }
2398
2399 friend class Object;
2400};
2401
2404
2405 int32_t num_variables_;
2406
2407 COMPRESSED_POINTER_FIELD(ContextPtr, parent)
2408 VISIT_FROM(parent)
2409 // Variable length data follows here.
2411
2412 friend class Object;
2413 friend void UpdateLengthField(intptr_t,
2414 ObjectPtr,
2415 ObjectPtr); // num_variables_
2416};
2417
2418#define CONTEXT_SCOPE_VARIABLE_DESC_FLAG_LIST(V) \
2419 V(Final) \
2420 V(Late) \
2421 V(Nullable) \
2422 V(Invisible) \
2423 V(AwaiterLink)
2424
2427
2428 // TODO(iposva): Switch to conventional enum offset based structure to avoid
2429 // alignment mishaps.
2430 struct VariableDesc {
2431 CompressedSmiPtr declaration_token_pos;
2432 CompressedSmiPtr token_pos;
2433 CompressedStringPtr name;
2434 CompressedSmiPtr flags;
2435 enum FlagBits {
2436#define DECLARE_BIT(Name) kIs##Name,
2438#undef DECLARE_BIT
2439 };
2440 CompressedSmiPtr late_init_offset;
2441 CompressedAbstractTypePtr type;
2442 CompressedSmiPtr cid;
2443 CompressedSmiPtr context_index;
2444 CompressedSmiPtr context_level;
2445 CompressedSmiPtr kernel_offset;
2446 };
2447
2448 int32_t num_variables_;
2449 bool is_implicit_; // true, if this context scope is for an implicit closure.
2450
2451 // Just choose one of the fields in VariableDesc, since they should all be
2452 // compressed or not compressed.
2453 DEFINE_CONTAINS_COMPRESSED(decltype(VariableDesc::name));
2454
2455 CompressedObjectPtr* from() {
2456 VariableDesc* begin = const_cast<VariableDesc*>(VariableDescAddr(0));
2457 return reinterpret_cast<CompressedObjectPtr*>(begin);
2458 }
2459 // Variable length data follows here.
2460 CompressedObjectPtr const* data() const {
2461 OPEN_ARRAY_START(CompressedObjectPtr, CompressedObjectPtr);
2462 }
2463 const VariableDesc* VariableDescAddr(intptr_t index) const {
2464 // data() points to the first component of the first descriptor.
2465 return reinterpret_cast<const VariableDesc*>(data()) + index;
2466 }
2467
2468#define DEFINE_ACCESSOR(type, name) \
2469 type name##_at(intptr_t index) { \
2470 return LoadCompressedPointer<type>(&VariableDescAddr(index)->name); \
2471 } \
2472 void set_##name##_at(intptr_t index, type value) { \
2473 StoreCompressedPointer(&VariableDescAddr(index)->name, value); \
2474 }
2475 DEFINE_ACCESSOR(SmiPtr, declaration_token_pos)
2476 DEFINE_ACCESSOR(SmiPtr, token_pos)
2477 DEFINE_ACCESSOR(StringPtr, name)
2478 DEFINE_ACCESSOR(SmiPtr, flags)
2479 DEFINE_ACCESSOR(SmiPtr, late_init_offset)
2480 DEFINE_ACCESSOR(AbstractTypePtr, type)
2481 DEFINE_ACCESSOR(SmiPtr, cid)
2482 DEFINE_ACCESSOR(SmiPtr, context_index)
2483 DEFINE_ACCESSOR(SmiPtr, context_level)
2484 DEFINE_ACCESSOR(SmiPtr, kernel_offset)
2485#undef DEFINE_ACCESSOR
2486
2487 CompressedObjectPtr* to(intptr_t num_vars) {
2488 uword end = reinterpret_cast<uword>(VariableDescAddr(num_vars));
2489 // 'end' is the address just beyond the last descriptor, so step back.
2490 return reinterpret_cast<CompressedObjectPtr*>(end -
2491 sizeof(CompressedObjectPtr));
2492 }
2493 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind, intptr_t num_vars) {
2494 return to(num_vars);
2495 }
2496
2497 friend class Object;
2499};
2500
2505
2508 POINTER_FIELD(CodePtr, target)
2511 uword entry_point_;
2512 ClassIdTagType lower_limit_;
2513 ClassIdTagType upper_limit_;
2514};
2515
2518 VISIT_NOTHING();
2519
2520 uword expected_cid_;
2521 uword entrypoint_;
2522};
2523
2524// Abstract base class for RawICData/RawMegamorphicCache
2526 protected:
2527 POINTER_FIELD(StringPtr, target_name); // Name of target function.
2528 VISIT_FROM(target_name)
2529 // arg_descriptor in RawICData and in RawMegamorphicCache should be
2530 // in the same position so that NoSuchMethod can access it.
2531 POINTER_FIELD(ArrayPtr, args_descriptor); // Arguments descriptor.
2532 VISIT_TO(args_descriptor)
2533 ObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
2534
2535 private:
2537};
2538
2541
2542 bool can_patch_to_monomorphic_;
2543};
2544
2547 POINTER_FIELD(ArrayPtr, entries) // Contains class-ids, target and count.
2548 // Static type of the receiver, if instance call and available.
2549 NOT_IN_PRECOMPILED(POINTER_FIELD(AbstractTypePtr, receivers_static_type))
2551 owner) // Parent/calling function or original IC of cloned IC.
2552 VISIT_TO(owner)
2553 ObjectPtr* to_snapshot(Snapshot::Kind kind) {
2554 switch (kind) {
2555 case Snapshot::kFullAOT:
2556 return reinterpret_cast<ObjectPtr*>(&entries_);
2557 case Snapshot::kFull:
2558 case Snapshot::kFullCore:
2559 case Snapshot::kFullJIT:
2560 return to();
2561 case Snapshot::kNone:
2562 case Snapshot::kInvalid:
2563 break;
2564 }
2565 UNREACHABLE();
2566 return nullptr;
2567 }
2568 NOT_IN_PRECOMPILED(int32_t deopt_id_);
2569 // Number of arguments tested in IC, deopt reasons.
2571};
2572
2575
2576 POINTER_FIELD(ArrayPtr, buckets)
2577 SMI_FIELD(SmiPtr, mask)
2578 VISIT_TO(mask)
2579 ObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
2580
2581 int32_t filled_entry_count_;
2582};
2583
2586
2587 POINTER_FIELD(ArrayPtr, cache)
2588 VISIT_FROM(cache)
2589 VISIT_TO(cache)
2590 uint32_t num_inputs_;
2591 uint32_t num_occupied_;
2592};
2593
2596
2597 COMPRESSED_POINTER_FIELD(LoadingUnitPtr, parent)
2598 VISIT_FROM(parent)
2599 COMPRESSED_POINTER_FIELD(ArrayPtr, base_objects)
2600 VISIT_TO(base_objects)
2601 const uint8_t* instructions_image_;
2602 AtomicBitFieldContainer<intptr_t> packed_fields_;
2603
2604 enum LoadState : int8_t {
2605 kNotLoaded = 0, // Ensure this is the default state when zero-initialized.
2606 kLoadOutstanding,
2607 kLoaded,
2608 };
2609
2610 using LoadStateBits = BitField<decltype(packed_fields_), LoadState, 0, 2>;
2611 using IdBits =
2612 BitField<decltype(packed_fields_), intptr_t, LoadStateBits::kNextBit>;
2613};
2614
2618
2626
2629
2630 COMPRESSED_POINTER_FIELD(ErrorPtr, previous_error) // May be null.
2631 VISIT_FROM(previous_error)
2632 COMPRESSED_POINTER_FIELD(ScriptPtr, script)
2634 // Incl. previous error's formatted message.
2635 COMPRESSED_POINTER_FIELD(StringPtr, formatted_message)
2636 VISIT_TO(formatted_message)
2637 TokenPosition token_pos_; // Source position in script_.
2638 bool report_after_token_; // Report message at or after the token.
2639 int8_t kind_; // Of type Report::Kind.
2640
2641 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
2642};
2643
2646
2647 COMPRESSED_POINTER_FIELD(InstancePtr, exception)
2648 VISIT_FROM(exception)
2649 COMPRESSED_POINTER_FIELD(InstancePtr, stacktrace)
2650 VISIT_TO(stacktrace)
2651 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
2652};
2653
2656
2660 bool is_user_initiated_;
2661};
2662
2665 friend class Object;
2666
2667 public:
2668#if defined(DART_COMPRESSED_POINTERS)
2669 static constexpr bool kContainsCompressedPointers = true;
2670#else
2671 static constexpr bool kContainsCompressedPointers = false;
2672#endif
2673};
2674
2677
2678 // Library prefix name.
2679 COMPRESSED_POINTER_FIELD(StringPtr, name)
2681 // Libraries imported with this prefix.
2682 COMPRESSED_POINTER_FIELD(ArrayPtr, imports)
2683 // Library which declares this prefix.
2684 COMPRESSED_POINTER_FIELD(LibraryPtr, importer)
2685 VISIT_TO(importer)
2686 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) {
2687 switch (kind) {
2688 case Snapshot::kFullAOT:
2689 return reinterpret_cast<CompressedObjectPtr*>(&imports_);
2690 case Snapshot::kFull:
2691 case Snapshot::kFullCore:
2692 case Snapshot::kFullJIT:
2693 return reinterpret_cast<CompressedObjectPtr*>(&importer_);
2694 case Snapshot::kNone:
2695 case Snapshot::kInvalid:
2696 break;
2697 }
2698 UNREACHABLE();
2699 return nullptr;
2700 }
2701 uint16_t num_imports_; // Number of library entries in libraries_.
2702 bool is_deferred_load_;
2703};
2704
2706 private:
2708
2709 // The instantiations_ array remains empty for instantiated type arguments.
2710 // Of 3-tuple: 2 instantiators, result.
2711 COMPRESSED_POINTER_FIELD(ArrayPtr, instantiations)
2712 VISIT_FROM(instantiations)
2714 COMPRESSED_SMI_FIELD(SmiPtr, hash)
2715 COMPRESSED_SMI_FIELD(SmiPtr, nullability)
2716 // Variable length data follows here.
2717 COMPRESSED_VARIABLE_POINTER_FIELDS(AbstractTypePtr, element, types)
2718
2719 friend class Object;
2720};
2721
2723 private:
2725
2726 // Length of names reflects the number of type parameters.
2727 COMPRESSED_POINTER_FIELD(ArrayPtr, names)
2728 VISIT_FROM(names)
2729 // flags: isGenericCovariantImpl and (todo) variance.
2731 COMPRESSED_POINTER_FIELD(TypeArgumentsPtr, bounds)
2732 // defaults is the instantiation to bounds (calculated by CFE).
2733 COMPRESSED_POINTER_FIELD(TypeArgumentsPtr, defaults)
2734 VISIT_TO(defaults)
2735 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
2736
2737 friend class Object;
2738};
2739
2741 protected:
2742 // Accessed from generated code.
2744 // Accessed from generated code.
2745 std::atomic<uint32_t> flags_;
2746#if defined(DART_COMPRESSED_POINTERS)
2747 uint32_t padding_; // Makes Windows and Posix agree on layout.
2748#endif
2749 COMPRESSED_POINTER_FIELD(CodePtr, type_test_stub)
2751 VISIT_FROM(type_test_stub)
2752
2753 uint32_t flags() const { return flags_.load(std::memory_order_relaxed); }
2754 void set_flags(uint32_t value) {
2755 flags_.store(value, std::memory_order_relaxed);
2756 }
2757
2758 public:
2760 kAllocated, // Initial state.
2761 kFinalizedInstantiated, // Instantiated type ready for use.
2762 kFinalizedUninstantiated, // Uninstantiated type ready for use.
2763 };
2764
2766 static constexpr intptr_t kNullabilityMask = NullabilityBits::mask();
2767
2768 static constexpr intptr_t kTypeStateShift = NullabilityBits::kNextBit;
2769 static constexpr intptr_t kTypeStateBits = 2;
2772
2773 private:
2775
2776 friend class ObjectStore;
2777 friend class StubCode;
2778};
2779
2781 public:
2782 static constexpr intptr_t kTypeClassIdShift = TypeStateBits::kNextBit;
2785
2786 private:
2788
2789 COMPRESSED_POINTER_FIELD(TypeArgumentsPtr, arguments)
2790 VISIT_TO(arguments)
2791
2792 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
2793
2794 ClassIdTagType type_class_id() const {
2795 return TypeClassIdBits::decode(flags());
2796 }
2797 void set_type_class_id(ClassIdTagType value) {
2798 set_flags(TypeClassIdBits::update(value, flags()));
2799 }
2800
2801 friend class compiler::target::UntaggedType;
2802 friend class CidRewriteVisitor;
2804};
2805
2807 private:
2809
2810 COMPRESSED_POINTER_FIELD(TypeParametersPtr, type_parameters)
2811 COMPRESSED_POINTER_FIELD(AbstractTypePtr, result_type)
2812 COMPRESSED_POINTER_FIELD(ArrayPtr, parameter_types)
2813 COMPRESSED_POINTER_FIELD(ArrayPtr, named_parameter_names);
2814 VISIT_TO(named_parameter_names)
2815 AtomicBitFieldContainer<uint32_t> packed_parameter_counts_;
2816 AtomicBitFieldContainer<uint16_t> packed_type_parameter_counts_;
2817
2818 // The bit fields are public for use in kernel_to_il.cc.
2819 public:
2820 // For packed_type_parameter_counts_.
2822 BitField<decltype(packed_type_parameter_counts_), uint8_t, 0, 8>;
2824 BitField<decltype(packed_type_parameter_counts_),
2825 uint8_t,
2826 PackedNumParentTypeArguments::kNextBit,
2827 8>;
2828
2829 // For packed_parameter_counts_.
2831 BitField<decltype(packed_parameter_counts_), uint8_t, 0, 1>;
2833 BitField<decltype(packed_parameter_counts_),
2834 bool,
2835 PackedNumImplicitParameters::kNextBit,
2836 1>;
2838 BitField<decltype(packed_parameter_counts_),
2839 uint16_t,
2840 PackedHasNamedOptionalParameters::kNextBit,
2841 14>;
2843 BitField<decltype(packed_parameter_counts_),
2844 uint16_t,
2845 PackedNumFixedParameters::kNextBit,
2846 14>;
2847 static_assert(PackedNumOptionalParameters::kNextBit <=
2848 compiler::target::kSmiBits,
2849 "In-place mask for number of optional parameters cannot fit in "
2850 "a Smi on the target architecture");
2851
2852 private:
2853 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
2854
2855 friend class Function;
2856};
2857
2859 private:
2861
2862 COMPRESSED_SMI_FIELD(SmiPtr, shape)
2863 COMPRESSED_POINTER_FIELD(ArrayPtr, field_types)
2864 VISIT_TO(field_types)
2865
2866 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
2867};
2868
2870 public:
2871 static constexpr intptr_t kIsFunctionTypeParameterBit =
2872 TypeStateBits::kNextBit;
2875
2876 private:
2878
2879 // FunctionType or Smi (class id).
2881 VISIT_TO(owner)
2882 uint16_t base_; // Number of enclosing function type parameters.
2883 uint16_t index_; // Keep size in sync with BuildTypeParameterTypeTestStub.
2884
2885 private:
2886 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
2887
2888 friend class CidRewriteVisitor;
2889};
2890
2892 private:
2894
2895 // The following fields are also declared in the Dart source of class
2896 // _Closure, and so must be the first fields in the object and must appear
2897 // in the same order, so the offsets are identical in Dart and C++.
2898 //
2899 // Note that the type of a closure is defined by instantiating the
2900 // signature of the closure function with the instantiator, function, and
2901 // delayed (if non-empty) type arguments stored in the closure value.
2902
2903 // Stores the instantiator type arguments provided when the closure was
2904 // created.
2905 COMPRESSED_POINTER_FIELD(TypeArgumentsPtr, instantiator_type_arguments)
2906 VISIT_FROM(instantiator_type_arguments)
2907 // Stores the function type arguments provided for any generic parent
2908 // functions when the closure was created.
2909 COMPRESSED_POINTER_FIELD(TypeArgumentsPtr, function_type_arguments)
2910 // If this field contains the empty type argument vector, then the closure
2911 // value is generic.
2912 //
2913 // To create a new closure that is a specific type instantiation of a generic
2914 // closure, a copy of the closure is created where the empty type argument
2915 // vector in this field is replaced with the vector of local type arguments.
2916 // The resulting closure value is not generic, and so an attempt to provide
2917 // type arguments when invoking the new closure value is treated the same as
2918 // calling any other non-generic function with unneeded type arguments.
2919 //
2920 // If the signature for the closure function has no local type parameters,
2921 // the only guarantee about this field is that it never contains the empty
2922 // type arguments vector. Thus, only this field need be inspected to
2923 // determine whether a given closure value is generic.
2924 COMPRESSED_POINTER_FIELD(TypeArgumentsPtr, delayed_type_arguments)
2926 // For tear-offs - captured receiver.
2927 // For ordinary closures - Context object with captured variables.
2930 VISIT_TO(hash)
2931
2932 // We have an extra word in the object due to alignment rounding, so use it in
2933 // bare instructions mode to cache the entry point from the closure function
2934 // to avoid an extra redirection on call. Closure functions only have
2935 // one entry point, as dynamic calls use dynamic closure call dispatchers.
2936 ONLY_IN_PRECOMPILED(uword entry_point_);
2937
2938 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
2939
2941};
2942
2946
2950
2954
2957 VISIT_NOTHING();
2958
2959 ALIGN8 int64_t value_;
2960
2961 friend class Api;
2962 friend class Class;
2963 friend class Integer;
2964};
2966
2969 VISIT_NOTHING();
2970
2971 ALIGN8 double value_;
2972
2973 friend class Api;
2974 friend class Class;
2975};
2977
2980
2981 protected:
2982#if !defined(HASH_IN_OBJECT_HEADER)
2983 COMPRESSED_SMI_FIELD(SmiPtr, hash)
2985#endif
2987#if defined(HASH_IN_OBJECT_HEADER)
2989#endif
2991
2992 private:
2993 friend class Library;
2995 friend class ImageWriter;
2996};
2997
3000 VISIT_NOTHING();
3001
3002 // Variable length data follows here.
3003 uint8_t* data() { OPEN_ARRAY_START(uint8_t, uint8_t); }
3004 const uint8_t* data() const { OPEN_ARRAY_START(uint8_t, uint8_t); }
3005
3007 friend class String;
3010};
3011
3014 VISIT_NOTHING();
3015
3016 // Variable length data follows here.
3017 uint16_t* data() { OPEN_ARRAY_START(uint16_t, uint16_t); }
3018 const uint16_t* data() const { OPEN_ARRAY_START(uint16_t, uint16_t); }
3019
3021 friend class String;
3024};
3025
3026// Abstract base class for UntaggedTypedData/UntaggedExternalTypedData/
3027// UntaggedTypedDataView/Pointer.
3028//
3029// TypedData extends this with a length field, while Pointer extends this with
3030// TypeArguments field.
3032 public:
3033 uint8_t* data() { return data_; }
3034
3035 protected:
3036 // The contents of [data_] depends on what concrete subclass is used:
3037 //
3038 // - UntaggedTypedData: Start of the payload.
3039 // - UntaggedExternalTypedData: Start of the C-heap payload.
3040 // - UntaggedTypedDataView: The [data_] field of the backing store for the
3041 // view plus the [offset_in_bytes_] the view has.
3042 // - UntaggedPointer: Pointer into C memory (no length specified).
3043 //
3044 // During allocation or snapshot reading the [data_] can be temporarily
3045 // nullptr (which is the case for views which just got created but haven't
3046 // gotten the backing store set).
3047 uint8_t* data_;
3048
3049 private:
3050 template <typename T>
3052 Thread*,
3053 const T&,
3054 const T&,
3055 intptr_t); // Access _data for memmove with safepoint checkins.
3056
3058};
3059
3060// Abstract base class for UntaggedTypedData/UntaggedExternalTypedData/
3061// UntaggedTypedDataView.
3063 protected:
3064#if defined(DART_COMPRESSED_POINTERS)
3065 uint32_t padding_; // Makes Windows and Posix agree on layout.
3066#endif
3067 // The length of the view in element sizes (obtainable via
3068 // [TypedDataBase::ElementSizeInBytes]).
3072
3073 private:
3075 friend void UpdateLengthField(intptr_t, ObjectPtr, ObjectPtr); // length_
3076 friend void InitializeExternalTypedData(
3077 intptr_t,
3078 ExternalTypedDataPtr,
3079 ExternalTypedDataPtr); // initialize fields.
3081 Thread*,
3082 intptr_t,
3083 const ExternalTypedData&,
3084 const ExternalTypedData&); // initialize fields.
3085
3087};
3088
3091
3092 public:
3093 static intptr_t payload_offset() {
3094 return OFFSET_OF_RETURNED_VALUE(UntaggedTypedData, internal_data);
3095 }
3096
3097 // Recompute [data_] pointer to internal data.
3098 void RecomputeDataField() { data_ = internal_data(); }
3099
3100 protected:
3101 // Variable length data follows here.
3102 uint8_t* internal_data() { OPEN_ARRAY_START(uint8_t, uint8_t); }
3103 const uint8_t* internal_data() const { OPEN_ARRAY_START(uint8_t, uint8_t); }
3104
3105 uint8_t* data() {
3106 ASSERT(data_ == internal_data());
3107 return data_;
3108 }
3109 const uint8_t* data() const {
3110 ASSERT(data_ == internal_data());
3111 return data_;
3112 }
3113
3114 friend class Api;
3115 friend class Instance;
3117 friend class NativeEntryData;
3118 friend class Object;
3119 friend class ObjectPool;
3123};
3124
3125// All _*ArrayView/_ByteDataView classes share the same layout.
3128
3129 public:
3130 // Recompute [data_] based on internal/external [typed_data_].
3132 const intptr_t offset_in_bytes = RawSmiValue(this->offset_in_bytes());
3133 uint8_t* payload = typed_data()->untag()->data_;
3134 data_ = payload + offset_in_bytes;
3135 }
3136
3137 // Recompute [data_] based on internal [typed_data_] - needs to be called by
3138 // GC whenever the backing store moved.
3139 //
3140 // NOTICE: This method assumes [this] is the forwarded object and the
3141 // [typed_data_] pointer points to the new backing store. The backing store's
3142 // fields don't need to be valid - only it's address.
3144 data_ = DataFieldForInternalTypedData();
3145 }
3146
3148 const intptr_t offset_in_bytes = RawSmiValue(this->offset_in_bytes());
3149 uint8_t* payload =
3150 reinterpret_cast<uint8_t*>(UntaggedObject::ToAddr(typed_data()) +
3151 UntaggedTypedData::payload_offset());
3152 return payload + offset_in_bytes;
3153 }
3154
3156 if (typed_data()->untag()->GetClassId() == kNullCid) {
3157 // The view object must have gotten just initialized.
3158 if (data_ != nullptr || RawSmiValue(offset_in_bytes()) != 0 ||
3159 RawSmiValue(length()) != 0) {
3160 FATAL("TypedDataView has invalid inner pointer.");
3161 }
3162 } else {
3163 const intptr_t offset_in_bytes = RawSmiValue(this->offset_in_bytes());
3164 uint8_t* payload = typed_data()->untag()->data_;
3165 if ((payload + offset_in_bytes) != data_) {
3166 FATAL("TypedDataView has invalid inner pointer.");
3167 }
3168 }
3169 }
3170
3171 protected:
3172 COMPRESSED_POINTER_FIELD(TypedDataBasePtr, typed_data)
3173 COMPRESSED_SMI_FIELD(SmiPtr, offset_in_bytes)
3174 VISIT_TO(offset_in_bytes)
3175 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
3176
3177 friend void InitializeTypedDataView(TypedDataViewPtr);
3178 friend class Api;
3179 friend class Object;
3183 friend class GCCompactor;
3184 template <bool>
3186};
3187
3190 VISIT_NOTHING();
3191
3192 bool value_;
3193
3194 friend class Object;
3195};
3196
3199
3200 COMPRESSED_ARRAY_POINTER_FIELD(TypeArgumentsPtr, type_arguments)
3201 VISIT_FROM(type_arguments)
3203 // Variable length data follows here.
3205
3212 friend class Deserializer;
3213 friend class UntaggedCode;
3216 friend class Map;
3217 friend class UntaggedMap;
3218 friend class UntaggedConstMap;
3219 friend class Object;
3220 friend class ICData; // For high performance access.
3221 friend class SubtypeTestCache; // For high performance access.
3222 friend class ReversePc;
3223 template <typename Table, bool kAllCanonicalObjectsAreIncludedIntoSet>
3225 friend class Page;
3226 friend class FastObjectCopy; // For initializing fields.
3227 friend void UpdateLengthField(intptr_t, ObjectPtr, ObjectPtr); // length_
3228};
3229
3233
3236
3237 COMPRESSED_POINTER_FIELD(TypeArgumentsPtr, type_arguments)
3238 VISIT_FROM(type_arguments)
3240 COMPRESSED_POINTER_FIELD(ArrayPtr, data)
3241 VISIT_TO(data)
3242 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
3243
3244 friend class ReversePc;
3245};
3246
3249
3250 COMPRESSED_POINTER_FIELD(TypeArgumentsPtr, type_arguments)
3251 VISIT_FROM(type_arguments)
3252 COMPRESSED_POINTER_FIELD(SmiPtr, hash_mask)
3253 COMPRESSED_POINTER_FIELD(ArrayPtr, data)
3254 COMPRESSED_POINTER_FIELD(SmiPtr, used_data)
3255 COMPRESSED_POINTER_FIELD(SmiPtr, deleted_keys)
3256 COMPRESSED_POINTER_FIELD(TypedDataPtr, index)
3257 VISIT_TO(index)
3258
3259 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) {
3260 // Do not serialize index.
3261 return reinterpret_cast<CompressedObjectPtr*>(&deleted_keys_);
3262 }
3263};
3264
3270
3274
3280
3284
3287 VISIT_NOTHING();
3288
3289 ALIGN8 float value_[4];
3290
3291 friend class Class;
3292
3293 public:
3294 float x() const { return value_[0]; }
3295 float y() const { return value_[1]; }
3296 float z() const { return value_[2]; }
3297 float w() const { return value_[3]; }
3298};
3300
3303 VISIT_NOTHING();
3304
3305 ALIGN8 int32_t value_[4];
3306
3311
3312 public:
3313 int32_t x() const { return value_[0]; }
3314 int32_t y() const { return value_[1]; }
3315 int32_t z() const { return value_[2]; }
3316 int32_t w() const { return value_[3]; }
3317};
3319
3322 VISIT_NOTHING();
3323
3324 ALIGN8 double value_[2];
3325
3326 friend class Class;
3327
3328 public:
3329 double x() const { return value_[0]; }
3330 double y() const { return value_[1]; }
3331};
3333
3336
3337#if defined(DART_COMPRESSED_POINTERS)
3338 // This explicit padding avoids implicit padding between [shape] and [data].
3339 // Record allocation doesn't initialize the implicit padding but GC scans
3340 // everything between 'from' (shape) and 'to' (end of data),
3341 // so it would see garbage if implicit padding is inserted.
3342 uint32_t padding_;
3343#endif
3344 COMPRESSED_SMI_FIELD(SmiPtr, shape)
3345 VISIT_FROM(shape)
3346 // Variable length data follows here.
3348
3349 friend void UpdateLengthField(intptr_t, ObjectPtr,
3350 ObjectPtr); // shape_
3351};
3352
3353// Define an aliases for intptr_t.
3354#if defined(ARCH_IS_32_BIT)
3355#define kIntPtrCid kTypedDataInt32ArrayCid
3356#define GetIntPtr GetInt32
3357#define SetIntPtr SetInt32
3358#define kUintPtrCid kTypedDataUint32ArrayCid
3359#define GetUintPtr GetUint32
3360#define SetUintPtr SetUint32
3361#elif defined(ARCH_IS_64_BIT)
3362#define kIntPtrCid kTypedDataInt64ArrayCid
3363#define GetIntPtr GetInt64
3364#define SetIntPtr SetInt64
3365#define kUintPtrCid kTypedDataUint64ArrayCid
3366#define GetUintPtr GetUint64
3367#define SetUintPtr SetUint64
3368#else
3369#error Architecture is not 32-bit or 64-bit.
3370#endif // ARCH_IS_32_BIT
3371
3375
3378
3379 COMPRESSED_POINTER_FIELD(TypeArgumentsPtr, type_arguments)
3380 VISIT_FROM(type_arguments)
3381 VISIT_TO(type_arguments)
3382
3383 friend class Pointer;
3384};
3385
3388 VISIT_NOTHING();
3389 void* handle_;
3390 bool isClosed_;
3391 bool canBeClosed_;
3392
3393 friend class DynamicLibrary;
3394};
3395
3396// VM implementations of the basic types in the isolate.
3397class alignas(8) UntaggedCapability : public UntaggedInstance {
3399 VISIT_NOTHING();
3400 uint64_t id_;
3401};
3402
3403class alignas(8) UntaggedSendPort : public UntaggedInstance {
3405 VISIT_NOTHING();
3406 Dart_Port id_;
3407 Dart_Port origin_id_;
3408
3409 friend class ReceivePort;
3410};
3411
3414
3415 COMPRESSED_POINTER_FIELD(SendPortPtr, send_port)
3416 VISIT_FROM(send_port)
3417 COMPRESSED_POINTER_FIELD(SmiPtr, bitfield)
3418 COMPRESSED_POINTER_FIELD(InstancePtr, handler)
3419#if defined(PRODUCT)
3420 VISIT_TO(handler)
3421#else
3422 COMPRESSED_POINTER_FIELD(StringPtr, debug_name)
3423 COMPRESSED_POINTER_FIELD(StackTracePtr, allocation_location)
3424 VISIT_TO(allocation_location)
3425#endif // !defined(PRODUCT)
3426};
3427
3432
3433// VM type for capturing stacktraces when exceptions are thrown,
3434// Currently we don't have any interface that this object is supposed
3435// to implement so we just support the 'toString' method which
3436// converts the stack trace into a string.
3439
3440 // Link to parent async stack trace.
3441 COMPRESSED_POINTER_FIELD(StackTracePtr, async_link);
3442 VISIT_FROM(async_link)
3443 // Code object for each frame in the stack trace.
3444 COMPRESSED_POINTER_FIELD(ArrayPtr, code_array);
3445 // Offset of PC for each frame.
3446 COMPRESSED_POINTER_FIELD(TypedDataPtr, pc_offset_array);
3447
3448 VISIT_TO(pc_offset_array)
3449 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
3450
3451 // False for pre-allocated stack trace (used in OOM and Stack overflow).
3452 bool expand_inlined_;
3453 // Whether the link between the stack and the async-link represents a
3454 // synchronous start to an asynchronous function. In this case, we omit the
3455 // <asynchronous suspension> marker when concatenating the stacks.
3456 bool skip_sync_start_in_parent_stack;
3457};
3458
3461
3462 NOT_IN_PRECOMPILED(intptr_t frame_capacity_);
3463 intptr_t frame_size_;
3464 uword pc_;
3465
3466 // Holds function-specific object which is returned from
3467 // SuspendState.init* method.
3468 // For async functions: _Future instance.
3469 // For async* functions: _AsyncStarStreamController instance.
3470 COMPRESSED_POINTER_FIELD(InstancePtr, function_data)
3471
3472 COMPRESSED_POINTER_FIELD(ClosurePtr, then_callback)
3473 COMPRESSED_POINTER_FIELD(ClosurePtr, error_callback)
3474 VISIT_FROM(function_data)
3475 VISIT_TO(error_callback)
3476
3477 public:
3478 uword pc() const { return pc_; }
3479
3480 intptr_t frame_capacity() const {
3481#if defined(DART_PRECOMPILED_RUNTIME)
3482 return frame_size_;
3483#else
3484 return frame_capacity_;
3485#endif
3486 }
3487
3488 static intptr_t payload_offset() {
3490 }
3491
3492 // Variable length payload follows here.
3493 uint8_t* payload() { OPEN_ARRAY_START(uint8_t, uint8_t); }
3494 const uint8_t* payload() const { OPEN_ARRAY_START(uint8_t, uint8_t); }
3495};
3496
3497// VM type for capturing JS regular expressions.
3500
3501 COMPRESSED_POINTER_FIELD(ArrayPtr, capture_name_map)
3502 VISIT_FROM(capture_name_map)
3503 // Pattern to be used for matching.
3504 COMPRESSED_POINTER_FIELD(StringPtr, pattern)
3505 COMPRESSED_POINTER_FIELD(ObjectPtr, one_byte) // FunctionPtr or TypedDataPtr
3507 COMPRESSED_POINTER_FIELD(ObjectPtr, one_byte_sticky)
3508 COMPRESSED_POINTER_FIELD(ObjectPtr, two_byte_sticky)
3509 VISIT_TO(two_byte_sticky)
3510 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
3511
3512 std::atomic<intptr_t> num_bracket_expressions_;
3513 intptr_t num_bracket_expressions() {
3514 return num_bracket_expressions_.load(std::memory_order_relaxed);
3515 }
3516 void set_num_bracket_expressions(intptr_t value) {
3517 num_bracket_expressions_.store(value, std::memory_order_relaxed);
3518 }
3519
3520 // The same pattern may use different amount of registers if compiled
3521 // for a one-byte target than a two-byte target. For example, we do not
3522 // need to allocate registers to check whether the current position is within
3523 // a surrogate pair when matching a Unicode pattern against a one-byte string.
3524 intptr_t num_one_byte_registers_;
3525 intptr_t num_two_byte_registers_;
3526
3527 // A bitfield with two fields:
3528 // type: Uninitialized, simple or complex.
3529 // flags: Represents global/local, case insensitive, multiline, unicode,
3530 // dotAll.
3531 // It is possible multiple compilers race to update the flags concurrently.
3532 // That should be safe since all updates update to the same values..
3534};
3535
3538
3539 COMPRESSED_POINTER_FIELD(ObjectPtr, key) // Weak reference.
3543 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
3544
3545 // Linked list is chaining all pending weak properties. Not visited by
3546 // pointer visitors.
3547 COMPRESSED_POINTER_FIELD(WeakPropertyPtr, next_seen_by_gc)
3548
3549 template <typename Type, typename PtrType>
3550 friend class GCLinkedList;
3551 template <bool>
3553 template <bool>
3555 friend class Scavenger;
3556 friend class FastObjectCopy; // For OFFSET_OF
3557 friend class SlowObjectCopy; // For OFFSET_OF
3558};
3559
3560// WeakProperty is special in that it has a pointer field which is not
3561// traversed by pointer visitors, and thus not in the range [from(),to()]:
3562// next_seen_by_gc, which is after the other fields.
3563template <>
3564DART_FORCE_INLINE uword
3565UntaggedObject::to_offset<UntaggedWeakProperty>(intptr_t length) {
3566 return OFFSET_OF(UntaggedWeakProperty, next_seen_by_gc_);
3567}
3568
3571
3572 COMPRESSED_POINTER_FIELD(ObjectPtr, target) // Weak reference.
3574 COMPRESSED_POINTER_FIELD(TypeArgumentsPtr, type_arguments)
3575 VISIT_TO(type_arguments)
3576 CompressedObjectPtr* to_snapshot(Snapshot::Kind kind) { return to(); }
3577
3578 // Linked list is chaining all pending weak properties. Not visited by
3579 // pointer visitors.
3580 COMPRESSED_POINTER_FIELD(WeakReferencePtr, next_seen_by_gc)
3581
3582 template <typename Type, typename PtrType>
3583 friend class GCLinkedList;
3584 template <bool>
3586 template <bool>
3588 friend class Scavenger;
3589 friend class ObjectGraph;
3590 friend class FastObjectCopy; // For OFFSET_OF
3591 friend class SlowObjectCopy; // For OFFSET_OF
3592};
3593
3594// WeakReference is special in that it has a pointer field which is not
3595// traversed by pointer visitors, and thus not in the range [from(),to()]:
3596// next_seen_by_gc, which is after the other fields.
3597template <>
3598DART_FORCE_INLINE uword
3599UntaggedObject::to_offset<UntaggedWeakReference>(intptr_t length) {
3600 return OFFSET_OF(UntaggedWeakReference, next_seen_by_gc_);
3601}
3602
3605
3606 // The isolate this finalizer belongs to. Updated on sent and exit and set
3607 // to null on isolate shutdown. See Isolate::finalizers_.
3608 Isolate* isolate_;
3609
3610// With compressed pointers, the first field in a subclass is at offset 28.
3611// If the fields would be public, the first field in a subclass is at offset 32.
3612// On Windows, it is always at offset 32, no matter public/private.
3613// This makes it 32 for all OSes.
3614// We can't use ALIGN8 on the first fields of the subclasses because they use
3615// the COMPRESSED_POINTER_FIELD macro to define it.
3616// Placed before the first fields so it is not included between from() and to().
3617#ifdef DART_COMPRESSED_POINTERS
3618 uint32_t align_first_field_in_subclass;
3619#endif
3620
3622 VISIT_FROM(detachments)
3623 COMPRESSED_POINTER_FIELD(SetPtr, all_entries)
3624 COMPRESSED_POINTER_FIELD(FinalizerEntryPtr, entries_collected)
3625
3626 template <typename GCVisitorType>
3627 friend void MournFinalizerEntry(GCVisitorType*, FinalizerEntryPtr);
3628 template <bool>
3630 template <bool>
3632 friend class ObjectGraph;
3633};
3634
3637
3639 COMPRESSED_POINTER_FIELD(TypeArgumentsPtr, type_arguments)
3640 VISIT_TO(type_arguments)
3641
3642 template <std::memory_order order = std::memory_order_relaxed>
3643 FinalizerEntryPtr exchange_entries_collected(FinalizerEntryPtr value) {
3644 return ExchangeCompressedPointer<FinalizerEntryPtr,
3645 CompressedFinalizerEntryPtr, order>(
3646 &entries_collected_, value);
3647 }
3648
3649 template <typename GCVisitorType>
3650 friend void MournFinalizerEntry(GCVisitorType*, FinalizerEntryPtr);
3651 template <bool>
3653 template <bool>
3655};
3656
3659
3662
3663 template <bool>
3665 template <bool>
3667};
3668
3670 public:
3671 intptr_t external_size() { return external_size_; }
3672 void set_external_size(intptr_t value) { external_size_ = value; }
3673
3674 private:
3676
3677 COMPRESSED_POINTER_FIELD(ObjectPtr, value) // Weak reference.
3678 VISIT_FROM(value)
3679 COMPRESSED_POINTER_FIELD(ObjectPtr, detach) // Weak reference.
3681 COMPRESSED_POINTER_FIELD(FinalizerBasePtr, finalizer) // Weak reference.
3682 // Used for the linked list in Finalizer::entries_collected_. That cannot be
3683 // an ordinary list because we need to add elements during a GC so we cannot
3684 // modify the heap.
3685 COMPRESSED_POINTER_FIELD(FinalizerEntryPtr, next)
3686 VISIT_TO(next)
3687
3688 // Linked list is chaining all pending. Not visited by pointer visitors.
3689 // Only populated during the GC, otherwise null.
3690 COMPRESSED_POINTER_FIELD(FinalizerEntryPtr, next_seen_by_gc)
3691
3692 intptr_t external_size_;
3693
3694 template <typename Type, typename PtrType>
3695 friend class GCLinkedList;
3696 template <typename GCVisitorType>
3697 friend void MournFinalizerEntry(GCVisitorType*, FinalizerEntryPtr);
3698 template <bool>
3700 template <bool>
3702 friend class Scavenger;
3703 friend class ObjectGraph;
3704};
3705
3706// FinalizerEntry is special in that it has a pointer field which is not
3707// traversed by pointer visitors, and thus not in the range [from(),to()]:
3708// next_seen_by_gc, which is after the other fields.
3709template <>
3710DART_FORCE_INLINE uword
3711UntaggedObject::to_offset<UntaggedFinalizerEntry>(intptr_t length) {
3712 return OFFSET_OF(UntaggedFinalizerEntry, next_seen_by_gc_);
3713}
3714
3715// MirrorReferences are used by mirrors to hold reflectees that are VM
3716// internal objects, such as libraries, classes, functions or types.
3724
3725// UserTag are used by the profiler to track Dart script state.
3728
3729 COMPRESSED_POINTER_FIELD(StringPtr, label)
3730 VISIT_FROM(label)
3731 VISIT_TO(label)
3732
3733 // Isolate unique tag.
3734 uword tag_;
3735
3736 // Should CPU samples with this tag be streamed?
3737 bool streamable_;
3738
3739 friend class Object;
3740
3741 public:
3742 uword tag() const { return tag_; }
3743 bool streamable() const { return streamable_; }
3744};
3745
3748
3749 COMPRESSED_POINTER_FIELD(TypeArgumentsPtr, type_arguments)
3750 VISIT_FROM(type_arguments)
3751 VISIT_TO(type_arguments)
3752};
3753
3754#undef WSR_COMPRESSED_POINTER_FIELD
3755
3756} // namespace dart
3757
3758#endif // RUNTIME_VM_RAW_OBJECT_H_
static float next(float f)
static uint32_t hash(const SkShaderBase::GradientInfo &v)
#define UNREACHABLE()
Definition assert.h:248
#define COMPILE_ASSERT(expr)
Definition assert.h:339
#define CLASS_LIST_TYPED_DATA(V)
Definition class_id.h:137
#define CLASS_LIST(V)
Definition class_id.h:208
void UpdateBool(bool value)
Definition bitfield.h:58
TargetBitField::Type UpdateConditional(typename TargetBitField::Type value_to_be_set, typename TargetBitField::Type conditional_old_value)
Definition bitfield.h:89
void UpdateUnsynchronized(typename TargetBitField::Type value)
Definition bitfield.h:82
void Update(typename TargetBitField::Type value)
Definition bitfield.h:72
NO_SANITIZE_THREAD TargetBitField::Type ReadIgnoreRace() const
Definition bitfield.h:52
TargetBitField::Type Read() const
Definition bitfield.h:47
static constexpr intptr_t kNextBit
Definition bitfield.h:145
static constexpr intptr_t decode(uword value)
Definition bitfield.h:173
static constexpr uword update(intptr_t value, uword original)
Definition bitfield.h:190
static constexpr uword encode(intptr_t value)
Definition bitfield.h:167
UnboxedFieldBitmap GetUnboxedFieldsMapAt(intptr_t cid) const
const ClassTable * class_table() const
Definition visitor.h:71
void VisitCompressedPointers(uword heap_base, CompressedObjectPtr *first, CompressedObjectPtr *last)
Definition visitor.h:43
UntaggedObject * untag() const
intptr_t GetClassId() const
Definition raw_object.h:864
void MarkingStackAddObject(ObjectPtr obj)
Definition thread.cc:826
void DeferredMarkingStackAddObject(ObjectPtr obj)
Definition thread.cc:833
static Thread * Current()
Definition thread.h:361
void StoreBufferAddObject(ObjectPtr obj)
Definition thread.cc:791
uword write_barrier_mask() const
Definition thread.h:427
std::atomic< uint32_t > flags_
void set_flags(uint32_t value)
std::atomic< uword > type_test_stub_entry_point_
POINTER_FIELD(StringPtr, target_name)
classid_t id() const
friend void UpdateLengthField(intptr_t, ObjectPtr, ObjectPtr)
void set_external_size(intptr_t value)
DART_FORCE_INLINE bool IsUnboxedDouble(intptr_t position) const
DART_FORCE_INLINE bool IsEmpty() const
UnboxedParameterBitmap(const UnboxedParameterBitmap &)=default
DART_FORCE_INLINE bool IsUnboxedInteger(intptr_t position) const
DART_FORCE_INLINE void SetUnboxedInteger(intptr_t position)
DART_FORCE_INLINE bool HasUnboxedParameters() const
DART_FORCE_INLINE bool IsUnboxed(intptr_t position) const
DART_FORCE_INLINE bool IsUnboxedRecord(intptr_t position) const
UnboxedParameterBitmap & operator=(const UnboxedParameterBitmap &)=default
DART_FORCE_INLINE void SetUnboxedDouble(intptr_t position)
DART_FORCE_INLINE uint64_t Value() const
DART_FORCE_INLINE void SetUnboxedRecord(intptr_t position)
static bool ParseKind(const char *str, Kind *out)
static const char * KindToCString(Kind k)
friend class AssemblyImageWriter
int32_t z() const
int32_t w() const
int32_t y() const
int32_t x() const
static constexpr uword update(intptr_t size, uword tag)
Definition raw_object.h:209
static constexpr intptr_t kMaxSizeTagInUnitsOfAlignment
Definition raw_object.h:196
static constexpr bool SizeFits(intptr_t size)
Definition raw_object.h:213
static constexpr intptr_t kMaxSizeTag
Definition raw_object.h:198
static constexpr uword encode(intptr_t size)
Definition raw_object.h:201
static constexpr uword decode(uword tag)
Definition raw_object.h:205
void StoreSmi(type const *addr, type value)
Definition raw_object.h:702
static uword ToAddr(const ObjectPtr raw_obj)
Definition raw_object.h:504
static bool IsCanonical(intptr_t value)
Definition raw_object.h:508
void StorePointer(type const *addr, type value)
Definition raw_object.h:574
uword tags() const
Definition raw_object.h:294
bool IsMarkedIgnoreRace() const
Definition raw_object.h:300
bool IsImmutable() const
Definition raw_object.h:333
void StoreCompressedArrayPointer(compressed_type const *addr, type value)
Definition raw_object.h:641
COMPILE_ASSERT(kBitsPerByte *sizeof(ClassIdTagType) >=kClassIdTagSize)
void Validate(IsolateGroup *isolate_group) const
Definition raw_object.cc:42
friend void ReportImpossibleNullError(intptr_t cid, StackFrame *caller_frame, Thread *thread)
type ExchangeCompressedPointer(compressed_type const *addr, type value)
Definition raw_object.h:675
void StoreCompressedSmi(CompressedSmiPtr const *addr, SmiPtr value)
Definition raw_object.h:709
COMPILE_ASSERT(kCardRememberedBit==0)
static constexpr intptr_t kIncrementalBarrierMask
Definition raw_object.h:181
void StoreCompressedArrayPointer(compressed_type const *addr, type value, Thread *thread)
Definition raw_object.h:651
void StoreArrayPointer(type const *addr, value_type value)
Definition raw_object.h:624
friend void SetNewSpaceTaggingWord(ObjectPtr, classid_t, uint32_t)
COMPILE_ASSERT(kNotMarkedBit+kBarrierOverlapShift==kAlwaysSetBit)
static ObjectPtr FromAddr(uword addr)
Definition raw_object.h:495
bool IsCardRemembered() const
Definition raw_object.h:364
DART_FORCE_INLINE intptr_t VisitPointersNonvirtual(V *visitor)
Definition raw_object.h:459
static bool IsMarked(uword tags)
Definition raw_object.h:298
type LoadCompressedPointer(compressed_type const *addr) const
Definition raw_object.h:562
static constexpr intptr_t kGenerationalBarrierMask
Definition raw_object.h:180
static uword ToAddr(const UntaggedObject *raw_obj)
Definition raw_object.h:501
intptr_t HeapSize(uword tags) const
Definition raw_object.h:405
bool IsOldObject() const
Definition raw_object.h:289
intptr_t HeapSize() const
Definition raw_object.h:380
DART_FORCE_INLINE void EnsureInRememberedSet(Thread *thread)
Definition raw_object.h:358
COMPILE_ASSERT(kClassIdTagMax==(1<< kClassIdTagSize) - 1)
bool IsMarked() const
Definition raw_object.h:299
static DART_FORCE_INLINE uword to_offset(intptr_t length=0)
Definition raw_object.h:851
friend uword TagsFromUntaggedObject(UntaggedObject *)
bool InVMIsolateHeap() const
Definition raw_object.cc:20
void VisitPointersPrecise(ObjectPointerVisitor *visitor)
bool IsNewObject() const
Definition raw_object.h:284
void StoreCompressedArrayPointer(compressed_type const *addr, type value, Thread *thread)
Definition raw_object.h:663
static constexpr intptr_t kBarrierOverlapShift
Definition raw_object.h:182
type LoadPointer(type const *addr) const
Definition raw_object.h:555
void StoreCompressedPointer(compressed_type const *addr, type value, Thread *thread)
Definition raw_object.h:603
uword heap_base() const
Definition raw_object.h:569
static constexpr bool kContainsPointerFields
Definition raw_object.h:529
void StoreArrayPointer(type const *addr, value_type value, Thread *thread)
Definition raw_object.h:633
friend class AssemblyImageWriter
Definition raw_object.h:810
bool TryAcquireRememberedBit()
Definition raw_object.h:344
bool Contains(uword addr) const
Definition raw_object.h:415
void ClearRememberedBitUnsynchronized()
Definition raw_object.h:352
SmiPtr LoadSmi(SmiPtr const *addr) const
Definition raw_object.h:687
void SetCardRememberedBitUnsynchronized()
Definition raw_object.h:365
SmiPtr LoadCompressedSmi(CompressedSmiPtr const *addr) const
Definition raw_object.h:692
void ClearMarkBitUnsynchronized()
Definition raw_object.h:319
COMPILE_ASSERT(kNewBit+kBarrierOverlapShift==kOldAndNotRememberedBit)
friend class StoreBufferUpdateVisitor
Definition raw_object.h:778
intptr_t VisitPointers(ObjectPointerVisitor *visitor)
Definition raw_object.h:426
bool IsRemembered() const
Definition raw_object.h:340
void StoreCompressedPointer(compressed_type const *addr, type value)
Definition raw_object.h:585
DART_WARN_UNUSED_RESULT bool TryAcquireMarkBit()
Definition raw_object.h:325
intptr_t GetClassId() const
Definition raw_object.h:371
static DART_FORCE_INLINE uword from_offset()
Definition raw_object.h:839
void StorePointerUnaligned(type const *addr, type value, Thread *thread)
Definition raw_object.h:613
bool IsCanonical() const
Definition raw_object.h:329
void SetMarkBitUnsynchronized()
Definition raw_object.h:307
static constexpr bool kContainsCompressedPointers
Definition raw_object.h:527
void StorePointer(type const *addr, type value, Thread *thread)
Definition raw_object.h:595
static intptr_t DecodeKind(uint32_t kind_and_metadata)
static intptr_t DecodeYieldIndex(uint32_t kind_and_metadata)
static uint32_t Encode(intptr_t kind, intptr_t try_index, intptr_t yield_index)
static intptr_t DecodeTryIndex(uint32_t kind_and_metadata)
static const char * KindToCString(Kind k)
static constexpr intptr_t kInvalidYieldIndex
static bool ParseKind(const char *cstr, Kind *out)
intptr_t frame_capacity() const
const uint8_t * payload() const
static intptr_t payload_offset()
COMPRESSED_SMI_FIELD(SmiPtr, length)
void RecomputeDataFieldForInternalTypedData()
uint8_t * DataFieldForInternalTypedData() const
static intptr_t payload_offset()
const uint8_t * internal_data() const
const uint8_t * data() const
bool streamable() const
static constexpr int ShiftForPowerOfTwo(T x)
Definition utils.h:66
static constexpr bool IsAligned(T x, uintptr_t alignment, uintptr_t offset=0)
Definition utils.h:77
int64_t Dart_Port
Definition dart_api.h:1524
#define DART_WARN_UNUSED_RESULT
Definition dart_api.h:66
Dart_NativeFunction(* Dart_NativeEntryResolver)(Dart_Handle name, int num_of_arguments, bool *auto_setup_scope)
Definition dart_api.h:3225
const uint8_t *(* Dart_NativeEntrySymbol)(Dart_NativeFunction nf)
Definition dart_api.h:3246
void *(* Dart_FfiNativeResolver)(const char *name, uintptr_t args_n)
Definition dart_api.h:3253
static const char * begin(const StringSlice &s)
Definition editor.cpp:252
#define ASSERT(E)
SkBitmap source
Definition examples.cpp:28
#define FATAL(error)
AtkStateType state
FlutterSemanticsFlag flags
glong glong end
FlKeyEvent uint64_t FlKeyResponderAsyncCallback callback
uint8_t value
GAsyncResult * result
uint32_t * target
Dart_NativeFunction function
Definition fuchsia.cc:51
const char * name
Definition fuchsia.cc:50
#define KIND_CASE(name)
#define KIND_DEFN(name)
Definition il.h:4364
#define DECLARE_BIT(Name)
Definition isolate.h:766
#define DECLARE(Name, value)
size_t length
Win32Message message
T __attribute__((ext_vector_type(N))) V
const intptr_t kOffsetOfPtr
Definition globals.h:136
static constexpr uintptr_t kHeapBaseMask
void CopyTypedDataBaseWithSafepointChecks(Thread *thread, const T &from, const T &to, intptr_t length)
void InitializeExternalTypedDataWithSafepointChecks(Thread *thread, intptr_t cid, const ExternalTypedData &from, const ExternalTypedData &to)
static constexpr intptr_t kOldObjectAlignmentOffset
static const char *const names[]
Definition symbols.cc:24
InstantiationMode
static constexpr intptr_t kNewObjectAlignmentOffset
const char *const name
void InitializeTypedDataView(TypedDataViewPtr obj)
intptr_t RawSmiValue(const SmiPtr raw_value)
int32_t classid_t
Definition globals.h:524
@ kNullCid
Definition class_id.h:252
@ kNumPredefinedCids
Definition class_id.h:257
constexpr intptr_t kBitsPerByte
Definition globals.h:463
TypedDataElementType
Definition raw_object.h:123
@ kHeapObjectTag
uintptr_t uword
Definition globals.h:501
intptr_t word
Definition globals.h:500
DART_FORCE_INLINE void UpdateLengthField(intptr_t cid, ObjectPtr from, ObjectPtr to)
static void USE(T &&)
Definition globals.h:618
const intptr_t cid
static constexpr intptr_t kCompressedWordSize
Definition globals.h:42
static constexpr intptr_t kObjectAlignmentMask
raw_obj untag() -> num_entries()) VARIABLE_COMPRESSED_VISITOR(Array, Smi::Value(raw_obj->untag() ->length())) VARIABLE_COMPRESSED_VISITOR(TypedData, TypedData::ElementSizeInBytes(raw_obj->GetClassId()) *Smi::Value(raw_obj->untag() ->length())) VARIABLE_COMPRESSED_VISITOR(Record, RecordShape(raw_obj->untag() ->shape()).num_fields()) VARIABLE_NULL_VISITOR(CompressedStackMaps, CompressedStackMaps::PayloadSizeOf(raw_obj)) VARIABLE_NULL_VISITOR(OneByteString, Smi::Value(raw_obj->untag() ->length())) VARIABLE_NULL_VISITOR(TwoByteString, Smi::Value(raw_obj->untag() ->length())) intptr_t UntaggedField::VisitFieldPointers(FieldPtr raw_obj, ObjectPointerVisitor *visitor)
static void StoreUnaligned(T *ptr, T value)
Definition unaligned.h:22
void MournFinalizerEntry(GCVisitorType *visitor, FinalizerEntryPtr current_entry)
Definition gc_shared.h:162
int32_t ClassIdTagType
Definition class_id.h:20
static constexpr intptr_t kObjectAlignment
void InitializeExternalTypedData(intptr_t cid, ExternalTypedDataPtr from, ExternalTypedDataPtr to)
static int8_t data[kExtLength]
static constexpr intptr_t kObjectAlignmentLog2
static constexpr intptr_t kClassIdTagMax
Definition class_id.h:22
ObjectPtr CompressedObjectPtr
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot data
Definition switches.h:41
#define DEFINE_FORWARD_DECLARATION(clazz)
Definition object.h:54
SkScalar h
#define DISALLOW_IMPLICIT_CONSTRUCTORS(TypeName)
Definition globals.h:593
#define DISALLOW_ALLOCATION()
Definition globals.h:604
fuchsia::ui::composition::ParentViewportWatcherHandle handle_
#define T
#define DEFINE_CONTAINS_COMPRESSED(type)
Definition raw_object.h:48
#define VISIT_NOTHING()
Definition raw_object.h:104
#define FOR_EACH_RAW_FUNCTION_KIND(V)
#define COMPRESSED_POINTER_FIELD(type, name)
Definition raw_object.h:882
#define ENUM_DEF(name, init)
#define VISIT_FROM_PAYLOAD_START(elem_type)
Definition raw_object.h:68
#define WSR_COMPRESSED_POINTER_FIELD(Type, Name)
#define COMPRESSED_SMI_FIELD(type, name)
Definition raw_object.h:994
#define COMPRESSED_ARRAY_POINTER_FIELD(type, name)
Definition raw_object.h:910
#define FOR_EACH_RAW_PC_DESCRIPTOR(V)
#define SMI_FIELD(type, name)
Definition raw_object.h:977
#define RAW_HEAP_OBJECT_IMPLEMENTATION(object)
Definition raw_object.h:142
#define VISIT_FROM(first)
Definition raw_object.h:60
#define COMPRESSED_VARIABLE_POINTER_FIELDS(type, accessor_name, array_name)
Definition raw_object.h:950
#define RAW_OBJECT_IMPLEMENTATION(object)
Definition raw_object.h:133
#define VISIT_TO(last)
Definition raw_object.h:80
#define POINTER_FIELD(type, name)
Definition raw_object.h:868
#define JIT_FUNCTION_COUNTERS(F)
#define DEFINE_ACCESSOR(type, name)
#define CONTEXT_SCOPE_VARIABLE_DESC_FLAG_LIST(V)
Point offset
DART_FORCE_INLINE void set_flags_and_size(FlagsAndSizeHeader value)
DART_FORCE_INLINE FlagsAndSizeHeader flags_and_size() const
#define OPEN_ARRAY_START(type, align)
Definition globals.h:154
#define ALIGN8
Definition globals.h:171
#define NOT_IN_PRECOMPILED(code)
Definition globals.h:100
#define NOT_IN_PRODUCT(code)
Definition globals.h:84
#define OFFSET_OF(type, field)
Definition globals.h:138
#define ONLY_IN_PRECOMPILED(code)
Definition globals.h:101
#define OFFSET_OF_RETURNED_VALUE(type, accessor)
Definition globals.h:143