5#ifndef RUNTIME_VM_RAW_OBJECT_H_
6#define RUNTIME_VM_RAW_OBJECT_H_
8#if defined(SHOULD_NOT_INCLUDE_RUNTIME)
9#error "Should not include runtime"
42#define DEFINE_FORWARD_DECLARATION(clazz) class Untagged##clazz;
44#undef DEFINE_FORWARD_DECLARATION
48#define DEFINE_CONTAINS_COMPRESSED(type) \
49 static constexpr bool kContainsCompressedPointers = \
50 is_compressed_ptr<type>::value;
52#define CHECK_CONTAIN_COMPRESSED(type) \
54 kContainsCompressedPointers || is_uncompressed_ptr<type>::value, \
55 "From declaration uses ObjectPtr"); \
57 !kContainsCompressedPointers || is_compressed_ptr<type>::value, \
58 "From declaration uses CompressedObjectPtr");
60#define VISIT_FROM(first) \
61 DEFINE_CONTAINS_COMPRESSED(decltype(first##_)) \
62 static constexpr bool kContainsPointerFields = true; \
63 base_ptr_type<decltype(first##_)>::type* from() { \
64 return reinterpret_cast<base_ptr_type<decltype(first##_)>::type*>( \
68#define VISIT_FROM_PAYLOAD_START(elem_type) \
69 static_assert(is_uncompressed_ptr<elem_type>::value || \
70 is_compressed_ptr<elem_type>::value, \
71 "Payload elements must be object pointers"); \
72 DEFINE_CONTAINS_COMPRESSED(elem_type) \
73 static constexpr bool kContainsPointerFields = true; \
74 base_ptr_type<elem_type>::type* from() { \
75 const uword payload_start = reinterpret_cast<uword>(this) + sizeof(*this); \
76 ASSERT(Utils::IsAligned(payload_start, sizeof(elem_type))); \
77 return reinterpret_cast<base_ptr_type<elem_type>::type*>(payload_start); \
80#define VISIT_TO(last) \
81 CHECK_CONTAIN_COMPRESSED(decltype(last##_)); \
82 static_assert(kContainsPointerFields, \
83 "Must have a corresponding VISIT_FROM"); \
84 base_ptr_type<decltype(last##_)>::type* to(intptr_t length = 0) { \
85 return reinterpret_cast<base_ptr_type<decltype(last##_)>::type*>( \
89#define VISIT_TO_PAYLOAD_END(elem_type) \
90 static_assert(is_uncompressed_ptr<elem_type>::value || \
91 is_compressed_ptr<elem_type>::value, \
92 "Payload elements must be object pointers"); \
93 static_assert(kContainsPointerFields, \
94 "Must have a corresponding VISIT_FROM"); \
95 CHECK_CONTAIN_COMPRESSED(elem_type); \
96 base_ptr_type<elem_type>::type* to(intptr_t length) { \
97 const uword payload_start = reinterpret_cast<uword>(this) + sizeof(*this); \
98 ASSERT(Utils::IsAligned(payload_start, sizeof(elem_type))); \
99 const uword payload_last = \
100 payload_start + sizeof(elem_type) * (length - 1); \
101 return reinterpret_cast<base_ptr_type<elem_type>::type*>(payload_last); \
104#define VISIT_NOTHING() int NothingToVisit();
106#if defined(DART_COMPRESSED_POINTERS)
107#define ASSERT_UNCOMPRESSED(Type) \
108 static_assert(!Untagged##Type::kContainsCompressedPointers, \
109 "Should contain compressed pointers");
111#define ASSERT_COMPRESSED(Type) \
112 static_assert(Untagged##Type::kContainsCompressedPointers, \
113 "Should not contain compressed pointers");
116#define ASSERT_UNCOMPRESSED(Type)
117#define ASSERT_COMPRESSED(Type)
120#define ASSERT_NOTHING_TO_VISIT(Type) \
121 ASSERT(SIZE_OF_RETURNED_VALUE(Untagged##Type, NothingToVisit) == sizeof(int))
124#define V(name) k##name##Element,
129#define VISITOR_SUPPORT(object) \
130 static intptr_t Visit##object##Pointers(object##Ptr raw_obj, \
131 ObjectPointerVisitor* visitor);
133#define RAW_OBJECT_IMPLEMENTATION(object) \
135 VISITOR_SUPPORT(object) \
136 friend class object; \
137 friend class UntaggedObject; \
138 friend class OffsetsTable; \
139 DISALLOW_ALLOCATION(); \
140 DISALLOW_IMPLICIT_CONSTRUCTORS(Untagged##object)
142#define RAW_HEAP_OBJECT_IMPLEMENTATION(object) \
144 RAW_OBJECT_IMPLEMENTATION(object); \
145 friend class object##SerializationCluster; \
146 friend class object##DeserializationCluster; \
147 friend class object##MessageSerializationCluster; \
148 friend class object##MessageDeserializationCluster; \
149 friend class Serializer; \
150 friend class Deserializer; \
151 template <typename Base> \
152 friend class ObjectCopy; \
153 friend class Pass2Visitor;
221 :
public BitField<uword, intptr_t, kSizeTagPos, kSizeTagSize> {};
223 static constexpr intptr_t SizeToTagValue(intptr_t size) {
227 static constexpr intptr_t TagValueToSize(intptr_t value) {
239#if defined(HASH_IN_OBJECT_HEADER)
240 class HashTag :
public BitField<uword, uint32_t, kHashTagPos, kHashTagSize> {
245 :
public BitField<uword, bool, kCardRememberedBit, 1> {};
256 :
public BitField<uword, bool, kOldAndNotRememberedBit, 1> {};
373#if defined(HASH_IN_OBJECT_HEADER)
374 uint32_t GetHeaderHash()
const {
return tags_.
Read<HashTag>(); }
375 uint32_t SetHeaderHashIfNotSet(uint32_t
h) {
390 const intptr_t size_from_class = HeapSizeFromClass(
tags);
418 return (addr >= this_addr) && (addr < (this_addr + this_size));
430 return VisitPointersPredefined(visitor, class_id);
434 intptr_t instance_size =
HeapSize();
441 const auto unboxed_fields_bitmap =
444 if (!unboxed_fields_bitmap.IsEmpty()) {
447 if (!unboxed_fields_bitmap.Get(bit++)) {
455 return instance_size;
463 return VisitPointersPredefined(visitor, class_id);
467 intptr_t instance_size =
HeapSize();
474 const auto unboxed_fields_bitmap =
475 visitor->class_table()->GetUnboxedFieldsMapAt(class_id);
477 if (!unboxed_fields_bitmap.IsEmpty()) {
480 if (!unboxed_fields_bitmap.Get(bit++)) {
481 visitor->V::VisitCompressedPointers(
heap_base(), current, current);
485 visitor->V::VisitCompressedPointers(
heap_base(), first, last);
488 return instance_size;
502 return reinterpret_cast<uword>(raw_obj);
518 intptr_t HeapSizeFromClass(
uword tags)
const;
520 void SetClassId(intptr_t new_cid) { tags_.
Update<ClassIdTag>(new_cid); }
521 void SetClassIdUnsynchronized(intptr_t new_cid) {
536 template <
typename T>
548 template <
typename T>
554 template <
typename type, std::memory_order order = std::memory_order_relaxed>
556 return reinterpret_cast<std::atomic<type>*
>(
const_cast<type*
>(addr))
559 template <
typename type,
560 typename compressed_type,
561 std::memory_order order = std::memory_order_relaxed>
563 compressed_type v =
reinterpret_cast<std::atomic<compressed_type>*
>(
564 const_cast<compressed_type*
>(addr))
573 template <
typename type, std::memory_order order = std::memory_order_relaxed>
575 reinterpret_cast<std::atomic<type>*
>(
const_cast<type*
>(addr))
576 ->store(
value, order);
577 if (
value.IsHeapObject()) {
582 template <
typename type,
583 typename compressed_type,
584 std::memory_order order = std::memory_order_relaxed>
586 reinterpret_cast<std::atomic<compressed_type>*
>(
587 const_cast<compressed_type*
>(addr))
588 ->store(
static_cast<compressed_type
>(
value), order);
589 if (
value.IsHeapObject()) {
594 template <
typename type>
597 if (
value.IsHeapObject()) {
598 CheckHeapPointerStore(
value, thread);
602 template <
typename type,
typename compressed_type>
606 *
const_cast<compressed_type*
>(addr) =
value;
607 if (
value.IsHeapObject()) {
608 CheckHeapPointerStore(
value, thread);
612 template <
typename type>
615 if (
value->IsHeapObject()) {
616 CheckHeapPointerStore(
value, thread);
621 template <
typename type,
622 std::memory_order order = std::memory_order_relaxed,
623 typename value_type =
type>
625 reinterpret_cast<std::atomic<type>*
>(
const_cast<type*
>(addr))
627 if (
value->IsHeapObject()) {
632 template <
typename type,
typename value_type = type>
635 if (
value->IsHeapObject()) {
636 CheckArrayPointerStore(addr,
value, thread);
640 template <
typename type,
typename compressed_type, std::memory_order order>
642 reinterpret_cast<std::atomic<compressed_type>*
>(
643 const_cast<compressed_type*
>(addr))
644 ->store(
static_cast<compressed_type
>(
value), order);
645 if (
value->IsHeapObject()) {
650 template <
typename type,
typename compressed_type, std::memory_order order>
654 reinterpret_cast<std::atomic<compressed_type>*
>(
655 const_cast<compressed_type*
>(addr))
656 ->store(
static_cast<compressed_type
>(
value), order);
657 if (
value->IsHeapObject()) {
658 CheckArrayPointerStore(addr,
value, thread);
662 template <
typename type,
typename compressed_type>
666 *
const_cast<compressed_type*
>(addr) =
value;
667 if (
value->IsHeapObject()) {
668 CheckArrayPointerStore(addr,
value, thread);
672 template <
typename type,
673 typename compressed_type,
674 std::memory_order order = std::memory_order_relaxed>
676 compressed_type previous_value =
677 reinterpret_cast<std::atomic<compressed_type>*
>(
678 const_cast<compressed_type*
>(addr))
679 ->exchange(
static_cast<compressed_type
>(
value), order);
680 if (
value.IsHeapObject()) {
683 return static_cast<type>(previous_value.Decompress(
heap_base()));
686 template <std::memory_order order = std::memory_order_relaxed>
688 return reinterpret_cast<std::atomic<SmiPtr>*
>(
const_cast<SmiPtr*
>(addr))
691 template <std::memory_order order = std::memory_order_relaxed>
693 return static_cast<SmiPtr
>(
reinterpret_cast<std::atomic<CompressedSmiPtr>*
>(
694 const_cast<CompressedSmiPtr*
>(addr))
701 template <
typename type, std::memory_order order = std::memory_order_relaxed>
705 reinterpret_cast<std::atomic<type>*
>(
const_cast<type*
>(addr))
706 ->store(
value, order);
708 template <std::memory_order order = std::memory_order_relaxed>
712 reinterpret_cast<std::atomic<CompressedSmiPtr>*
>(
713 const_cast<CompressedSmiPtr*
>(addr))
714 ->store(
static_cast<CompressedSmiPtr
>(
value), order);
720 uword source_tags = this->tags_;
738 if (
value->untag()->TryAcquireMarkBit()) {
745 template <
typename type,
typename value_type>
746 DART_FORCE_INLINE
void CheckArrayPointerStore(
type const* addr,
749 uword source_tags = this->tags_;
752 thread->write_barrier_mask();
760 thread->StoreBufferAddObject(
static_cast<ObjectPtr
>(
this));
768 thread->DeferredMarkingStackAddObject(value);
771 if (
value->untag()->TryAcquireMarkBit()) {
772 thread->MarkingStackAddObject(value);
779 void RememberCard(
ObjectPtr const* slot);
780#if defined(DART_COMPRESSED_POINTERS)
840 if constexpr (T::kContainsPointerFields) {
852 if constexpr (T::kContainsPointerFields) {
853 return reinterpret_cast<uword>(
868#define POINTER_FIELD(type, name) \
870 template <std::memory_order order = std::memory_order_relaxed> \
871 type name() const { \
872 return LoadPointer<type, order>(&name##_); \
874 template <std::memory_order order = std::memory_order_relaxed> \
875 void set_##name(type value) { \
876 StorePointer<type, order>(&name##_, value); \
882#define COMPRESSED_POINTER_FIELD(type, name) \
884 template <std::memory_order order = std::memory_order_relaxed> \
885 type name() const { \
886 return LoadCompressedPointer<type, Compressed##type, order>(&name##_); \
888 template <std::memory_order order = std::memory_order_relaxed> \
889 void set_##name(type value) { \
890 StoreCompressedPointer<type, Compressed##type, order>(&name##_, value); \
894 Compressed##type name##_;
896#define ARRAY_POINTER_FIELD(type, name) \
898 template <std::memory_order order = std::memory_order_relaxed> \
899 type name() const { \
900 return LoadPointer<type, order>(&name##_); \
902 template <std::memory_order order = std::memory_order_relaxed> \
903 void set_##name(type value) { \
904 StoreArrayPointer<type, order>(&name##_, value); \
910#define COMPRESSED_ARRAY_POINTER_FIELD(type, name) \
912 template <std::memory_order order = std::memory_order_relaxed> \
913 type name() const { \
914 return LoadPointer<Compressed##type, order>(&name##_).Decompress( \
917 template <std::memory_order order = std::memory_order_relaxed> \
918 void set_##name(type value) { \
919 StoreCompressedArrayPointer<type, Compressed##type, order>(&name##_, \
924 Compressed##type name##_;
926#define VARIABLE_POINTER_FIELDS(type, accessor_name, array_name) \
928 template <std::memory_order order = std::memory_order_relaxed> \
929 type accessor_name(intptr_t index) const { \
930 return LoadPointer<type, order>(&array_name()[index]); \
932 template <std::memory_order order = std::memory_order_relaxed> \
933 void set_##accessor_name(intptr_t index, type value) { \
934 StoreArrayPointer<type, order>(&array_name()[index], value); \
936 template <std::memory_order order = std::memory_order_relaxed> \
937 void set_##accessor_name(intptr_t index, type value, Thread* thread) { \
938 StoreArrayPointer<type, order>(&array_name()[index], value, thread); \
942 type* array_name() { \
943 OPEN_ARRAY_START(type, type); \
945 type const* array_name() const { \
946 OPEN_ARRAY_START(type, type); \
948 VISIT_TO_PAYLOAD_END(type)
950#define COMPRESSED_VARIABLE_POINTER_FIELDS(type, accessor_name, array_name) \
952 template <std::memory_order order = std::memory_order_relaxed> \
953 type accessor_name(intptr_t index) const { \
954 return LoadCompressedPointer<type, Compressed##type, order>( \
955 &array_name()[index]); \
957 template <std::memory_order order = std::memory_order_relaxed> \
958 void set_##accessor_name(intptr_t index, type value) { \
959 StoreCompressedArrayPointer<type, Compressed##type, order>( \
960 &array_name()[index], value); \
962 template <std::memory_order order = std::memory_order_relaxed> \
963 void set_##accessor_name(intptr_t index, type value, Thread* thread) { \
964 StoreCompressedArrayPointer<type, Compressed##type, order>( \
965 &array_name()[index], value, thread); \
969 Compressed##type* array_name() { \
970 OPEN_ARRAY_START(Compressed##type, Compressed##type); \
972 Compressed##type const* array_name() const { \
973 OPEN_ARRAY_START(Compressed##type, Compressed##type); \
975 VISIT_TO_PAYLOAD_END(Compressed##type)
977#define SMI_FIELD(type, name) \
979 template <std::memory_order order = std::memory_order_relaxed> \
980 type name() const { \
981 type result = LoadSmi<order>(&name##_); \
982 ASSERT(!result.IsHeapObject()); \
985 template <std::memory_order order = std::memory_order_relaxed> \
986 void set_##name(type value) { \
987 ASSERT(!value.IsHeapObject()); \
988 StoreSmi<type, order>(&name##_, value); \
994#define COMPRESSED_SMI_FIELD(type, name) \
996 template <std::memory_order order = std::memory_order_relaxed> \
997 type name() const { \
998 type result = LoadCompressedSmi<order>(&name##_); \
999 ASSERT(!result.IsHeapObject()); \
1002 template <std::memory_order order = std::memory_order_relaxed> \
1003 void set_##name(type value) { \
1004 ASSERT(!value.IsHeapObject()); \
1005 StoreCompressedSmi(&name##_, value); \
1009 Compressed##type name##_;
1020#if defined(DART_PRECOMPILER)
1021#define WSR_COMPRESSED_POINTER_FIELD(Type, Name) \
1022 COMPRESSED_POINTER_FIELD(ObjectPtr, Name)
1024#define WSR_COMPRESSED_POINTER_FIELD(Type, Name) \
1025 COMPRESSED_POINTER_FIELD(Type, Name)
1074#if !defined(PRODUCT) || !defined(DART_PRECOMPILED_RUNTIME)
1084 declaration_instance_type_arguments)
1085#if !defined(DART_PRECOMPILED_RUNTIME)
1092#if defined(DART_PRECOMPILED_RUNTIME)
1093 VISIT_TO(declaration_instance_type_arguments)
1103 &invocation_dispatcher_cache_);
1109#if !defined(DART_PRECOMPILED_RUNTIME)
1113#if !defined(DART_PRECOMPILED_RUNTIME)
1129 int16_t num_type_arguments_;
1130 uint16_t num_native_fields_;
1131 uint32_t state_bits_;
1134 int32_t host_instance_size_in_words_;
1137 int32_t host_type_arguments_field_offset_in_words_;
1140 int32_t host_next_field_offset_in_words_;
1142#if defined(DART_PRECOMPILER)
1144 int32_t target_instance_size_in_words_;
1147 int32_t target_type_arguments_field_offset_in_words_;
1150 int32_t target_next_field_offset_in_words_;
1153#if !defined(DART_PRECOMPILED_RUNTIME)
1154 uint32_t kernel_offset_;
1178#if !defined(DART_PRECOMPILED_RUNTIME)
1192#if !defined(DART_PRECOMPILED_RUNTIME)
1219#define FOR_EACH_RAW_FUNCTION_KIND(V) \
1221 V(RegularFunction) \
1223 V(ClosureFunction) \
1225 V(ImplicitClosureFunction) \
1237 V(ImplicitStaticGetter) \
1239 V(FieldInitializer) \
1241 V(MethodExtractor) \
1243 V(NoSuchMethodDispatcher) \
1245 V(InvokeFieldDispatcher) \
1247 V(IrregexpFunction) \
1250 V(DynamicInvocationForwarder) \
1254 V(RecordFieldGetter)
1257#define KIND_DEFN(Name) k##Name,
1264#define KIND_CASE(Name) \
1265 case Kind::k##Name: \
1276#define KIND_CASE(Name) \
1277 if (strcmp(str, #Name) == 0) { \
1278 *out = Kind::k##Name; \
1319 return At(position) !=
kBoxed;
1339 DART_FORCE_INLINE uint64_t
Value()
const {
return bitmap_; }
1340 DART_FORCE_INLINE
bool IsEmpty()
const {
return bitmap_ == 0; }
1341 DART_FORCE_INLINE
void Reset() { bitmap_ = 0; }
1347 DART_FORCE_INLINE
UnboxedState At(intptr_t position)
const {
1358 (
static_cast<decltype(bitmap_)
>(
state) << shift);
1371 uword unchecked_entry_point_;
1399#if defined(DART_PRECOMPILED_RUNTIME)
1408 UnboxedParameterBitmap unboxed_parameters_info_;
1411#if !defined(DART_PRECOMPILED_RUNTIME) || \
1412 (defined(DART_PRECOMPILED_RUNTIME) && !defined(PRODUCT))
1413 TokenPosition token_pos_;
1416#if !defined(DART_PRECOMPILED_RUNTIME)
1417 TokenPosition end_token_pos_;
1420 AtomicBitFieldContainer<uint32_t> kind_tag_;
1422#define JIT_FUNCTION_COUNTERS(F) \
1423 F(intptr_t, int32_t, usage_counter) \
1424 F(intptr_t, uint16_t, optimized_instruction_count) \
1425 F(intptr_t, uint16_t, optimized_call_site_count) \
1426 F(int8_t, int8_t, deoptimization_counter) \
1427 F(intptr_t, int8_t, state_bits) \
1428 F(int, int8_t, inlining_depth)
1430#if !defined(DART_PRECOMPILED_RUNTIME)
1431 uint32_t kernel_offset_;
1433#define DECLARE(return_type, type, name) type name##_;
1437 AtomicBitFieldContainer<uint8_t> packed_fields_;
1439 static constexpr intptr_t kMaxOptimizableBits = 1;
1441 using PackedOptimizable =
1442 BitField<
decltype(packed_fields_),
bool, 0, kMaxOptimizableBits>;
1473 compiler::target::kSmiBits,
1474 "Instantiation mode must fit in a Smi");
1476 static constexpr uint8_t kNoAwaiterLinkDepth = 0xFF;
1509 VISIT_TO(callback_exceptional_return)
1521 int32_t callback_id_;
1524 uint8_t ffi_function_kind_;
1564#if !defined(DART_PRECOMPILED_RUNTIME)
1565 uint32_t kernel_offset_;
1571 int8_t guarded_list_length_in_object_offset_;
1576 int8_t static_type_exactness_state_;
1578 uint16_t kind_bits_;
1580#if !defined(DART_PRECOMPILED_RUNTIME)
1582 int32_t target_offset_;
1598#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
1625#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
1626 int64_t load_timestamp_;
1627 int32_t kernel_script_index_;
1629 int32_t kernel_script_index_;
1630 int64_t load_timestamp_;
1633#if !defined(DART_PRECOMPILED_RUNTIME)
1634 int32_t flags_and_max_position_;
1638 BitField<
decltype(flags_and_max_position_),
bool, 0, 1>;
1640 BitField<
decltype(flags_and_max_position_),
1667 class DartSchemeBit :
public BitField<uint8_t, bool, kDartSchemeBit, 1> {};
1668 class DebuggableBit :
public BitField<uint8_t, bool, kDebuggableBit, 1> {};
1669 class InFullSnapshotBit
1670 :
public BitField<uint8_t, bool, kInFullSnapshotBit, 1> {};
1691#if !defined(DART_PRECOMPILED_RUNTIME)
1701#if !defined(DART_PRECOMPILED_RUNTIME)
1723 uint16_t num_imports_;
1727#if !defined(DART_PRECOMPILED_RUNTIME)
1728 uint32_t kernel_library_index_;
1808 template <
typename Table,
bool kAllCanonicalObjectsAreIncludedIntoSet>
1810 template <
typename Type,
typename PtrType>
1823DART_FORCE_INLINE
uword UntaggedObject::from_offset<UntaggedWeakArray>() {
1842 uword monomorphic_entry_point_;
1873 uword unchecked_entry_point_;
1874 uword monomorphic_unchecked_entry_point_;
1904#if !defined(PRODUCT)
1906#elif defined(DART_PRECOMPILED_RUNTIME)
1909 VISIT_TO(static_calls_target_table);
1919 int32_t state_bits_;
1960 uint8_t* entry_bits() {
return reinterpret_cast<uint8_t*
>(&data()[length_]); }
1961 uint8_t
const* entry_bits()
const {
1962 return reinterpret_cast<uint8_t const*
>(&data()[length_]);
1976 uint32_t size_and_flags_;
1984 static bool ContainsPC(
const InstructionsPtr raw_instr,
uword pc);
2008 uword payload_length_;
2014 uword instructions_relocated_address_;
2016 word build_id_offset_;
2031#define FOR_EACH_RAW_PC_DESCRIPTOR(V) \
2035 V(IcCall, kDeopt << 1) \
2037 V(UnoptStaticCall, kIcCall << 1) \
2039 V(RuntimeCall, kUnoptStaticCall << 1) \
2041 V(OsrEntry, kRuntimeCall << 1) \
2043 V(Rewind, kOsrEntry << 1) \
2045 V(BSSRelocation, kRewind << 1) \
2046 V(Other, kBSSRelocation << 1) \
2050#define ENUM_DEF(name, init) k##name = init,
2068 intptr_t yield_index) {
2087 static constexpr intptr_t kKindShiftSize = 3;
2088 static constexpr intptr_t kTryIndexSize = 10;
2089 static constexpr intptr_t kYieldIndexSize =
2090 32 - kKindShiftSize - kTryIndexSize;
2093 :
public BitField<uint32_t, intptr_t, 0, kKindShiftSize> {};
2094 class TryIndexBits :
public BitField<uint32_t,
2096 KindShiftBits::kNextBit,
2098 class YieldIndexBits :
public BitField<uint32_t,
2100 TryIndexBits::kNextBit,
2101 kYieldIndexSize> {};
2160 return bit_copy<FlagsAndSizeHeader, Payload>(*
this);
2166 memcpy(
reinterpret_cast<void*
>(
this), &
value,
sizeof(
value));
2225 return reinterpret_cast<const uint8_t*
>(
this) +
2239 class GlobalTableBit
2240 :
public BitField<Payload::FlagsAndSizeHeader, bool, 0, 1> {};
2241 class UsesTableBit :
public BitField<Payload::FlagsAndSizeHeader,
2243 GlobalTableBit::kNextBit,
2246 :
public BitField<Payload::FlagsAndSizeHeader,
2247 Payload::FlagsAndSizeHeader,
2248 UsesTableBit::kNextBit,
2249 sizeof(Payload::FlagsAndSizeHeader) * kBitsPerByte -
2250 UsesTableBit::kNextBit> {};
2266 uint32_t stack_map_offset;
2268 static_assert(
sizeof(DataEntry) ==
sizeof(uint32_t) * 2);
2271 uint32_t canonical_stack_map_entries_offset;
2273 uint32_t first_entry_with_code;
2276 const DataEntry* entries()
const {
OPEN_ARRAY_START(DataEntry, uint32_t); }
2284 static_assert(
sizeof(Data) ==
sizeof(uint32_t) * 4);
2287 const Data* rodata_;
2322 TokenPosition::kNoSource;
2324 TokenPosition::kNoSource;
2326 TokenPosition::kNoSource;
2351 CompressedStringPtr* nameAddrAt(intptr_t i) {
return &(
names()[i]); }
2352 void set_name(intptr_t i, StringPtr value) {
2358 return reinterpret_cast<VarInfo*
>(nameAddrAt(num_entries_));
2370 uint32_t packed_fields_;
2381 intptr_t num_entries()
const {
2405 int32_t num_variables_;
2418#define CONTEXT_SCOPE_VARIABLE_DESC_FLAG_LIST(V) \
2430 struct VariableDesc {
2431 CompressedSmiPtr declaration_token_pos;
2432 CompressedSmiPtr token_pos;
2433 CompressedStringPtr
name;
2434 CompressedSmiPtr
flags;
2436#define DECLARE_BIT(Name) kIs##Name,
2440 CompressedSmiPtr late_init_offset;
2441 CompressedAbstractTypePtr
type;
2442 CompressedSmiPtr
cid;
2443 CompressedSmiPtr context_index;
2444 CompressedSmiPtr context_level;
2445 CompressedSmiPtr kernel_offset;
2448 int32_t num_variables_;
2456 VariableDesc*
begin =
const_cast<VariableDesc*
>(VariableDescAddr(0));
2463 const VariableDesc* VariableDescAddr(intptr_t index)
const {
2465 return reinterpret_cast<const VariableDesc*
>(
data()) + index;
2468#define DEFINE_ACCESSOR(type, name) \
2469 type name##_at(intptr_t index) { \
2470 return LoadCompressedPointer<type>(&VariableDescAddr(index)->name); \
2472 void set_##name##_at(intptr_t index, type value) { \
2473 StoreCompressedPointer(&VariableDescAddr(index)->name, value); \
2485#undef DEFINE_ACCESSOR
2487 CompressedObjectPtr* to(intptr_t num_vars) {
2488 uword
end =
reinterpret_cast<uword
>(VariableDescAddr(num_vars));
2490 return reinterpret_cast<CompressedObjectPtr*
>(
end -
2491 sizeof(CompressedObjectPtr));
2494 return to(num_vars);
2520 uword expected_cid_;
2542 bool can_patch_to_monomorphic_;
2555 case Snapshot::kFullAOT:
2556 return reinterpret_cast<ObjectPtr*
>(&entries_);
2557 case Snapshot::kFull:
2558 case Snapshot::kFullCore:
2559 case Snapshot::kFullJIT:
2561 case Snapshot::kNone:
2562 case Snapshot::kInvalid:
2581 int32_t filled_entry_count_;
2590 uint32_t num_inputs_;
2591 uint32_t num_occupied_;
2601 const uint8_t* instructions_image_;
2604 enum LoadState : int8_t {
2612 BitField<
decltype(packed_fields_), intptr_t, LoadStateBits::kNextBit>;
2638 bool report_after_token_;
2660 bool is_user_initiated_;
2668#if defined(DART_COMPRESSED_POINTERS)
2688 case Snapshot::kFullAOT:
2690 case Snapshot::kFull:
2691 case Snapshot::kFullCore:
2692 case Snapshot::kFullJIT:
2694 case Snapshot::kNone:
2695 case Snapshot::kInvalid:
2701 uint16_t num_imports_;
2702 bool is_deferred_load_;
2746#if defined(DART_COMPRESSED_POINTERS)
2753 uint32_t
flags()
const {
return flags_.load(std::memory_order_relaxed); }
2755 flags_.store(
value, std::memory_order_relaxed);
2766 static constexpr intptr_t kNullabilityMask = NullabilityBits::mask();
2768 static constexpr intptr_t kTypeStateShift = NullabilityBits::kNextBit;
2769 static constexpr intptr_t kTypeStateBits = 2;
2782 static constexpr intptr_t kTypeClassIdShift = TypeStateBits::kNextBit;
2795 return TypeClassIdBits::decode(
flags());
2797 void set_type_class_id(ClassIdTagType value) {
2798 set_flags(TypeClassIdBits::update(value,
flags()));
2801 friend class compiler::target::UntaggedType;
2822 BitField<
decltype(packed_type_parameter_counts_), uint8_t, 0, 8>;
2824 BitField<
decltype(packed_type_parameter_counts_),
2826 PackedNumParentTypeArguments::kNextBit,
2831 BitField<
decltype(packed_parameter_counts_), uint8_t, 0, 1>;
2833 BitField<
decltype(packed_parameter_counts_),
2835 PackedNumImplicitParameters::kNextBit,
2838 BitField<
decltype(packed_parameter_counts_),
2840 PackedHasNamedOptionalParameters::kNextBit,
2843 BitField<
decltype(packed_parameter_counts_),
2845 PackedNumFixedParameters::kNextBit,
2847 static_assert(PackedNumOptionalParameters::kNextBit <=
2848 compiler::target::kSmiBits,
2849 "In-place mask for number of optional parameters cannot fit in "
2850 "a Smi on the target architecture");
2871 static constexpr intptr_t kIsFunctionTypeParameterBit =
2872 TypeStateBits::kNextBit;
2982#if !defined(HASH_IN_OBJECT_HEADER)
2987#if defined(HASH_IN_OBJECT_HEADER)
3050 template <
typename T>
3064#if defined(DART_COMPRESSED_POINTERS)
3078 ExternalTypedDataPtr,
3079 ExternalTypedDataPtr);
3106 ASSERT(data_ == internal_data());
3110 ASSERT(data_ == internal_data());
3132 const intptr_t offset_in_bytes =
RawSmiValue(this->offset_in_bytes());
3133 uint8_t* payload = typed_data()->untag()->data_;
3134 data_ = payload + offset_in_bytes;
3144 data_ = DataFieldForInternalTypedData();
3148 const intptr_t offset_in_bytes =
RawSmiValue(this->offset_in_bytes());
3150 reinterpret_cast<uint8_t*
>(UntaggedObject::ToAddr(typed_data()) +
3151 UntaggedTypedData::payload_offset());
3152 return payload + offset_in_bytes;
3158 if (data_ !=
nullptr ||
RawSmiValue(offset_in_bytes()) != 0 ||
3160 FATAL(
"TypedDataView has invalid inner pointer.");
3163 const intptr_t offset_in_bytes =
RawSmiValue(this->offset_in_bytes());
3164 uint8_t* payload = typed_data()->untag()->data_;
3165 if ((payload + offset_in_bytes) != data_) {
3166 FATAL(
"TypedDataView has invalid inner pointer.");
3223 template <typename Table,
bool kAllCanonicalObjectsAreIncludedIntoSet>
3294 float x()
const {
return value_[0]; }
3295 float y()
const {
return value_[1]; }
3296 float z()
const {
return value_[2]; }
3297 float w()
const {
return value_[3]; }
3305 ALIGN8 int32_t value_[4];
3313 int32_t
x()
const {
return value_[0]; }
3314 int32_t
y()
const {
return value_[1]; }
3315 int32_t
z()
const {
return value_[2]; }
3316 int32_t
w()
const {
return value_[3]; }
3329 double x()
const {
return value_[0]; }
3330 double y()
const {
return value_[1]; }
3337#if defined(DART_COMPRESSED_POINTERS)
3354#if defined(ARCH_IS_32_BIT)
3355#define kIntPtrCid kTypedDataInt32ArrayCid
3356#define GetIntPtr GetInt32
3357#define SetIntPtr SetInt32
3358#define kUintPtrCid kTypedDataUint32ArrayCid
3359#define GetUintPtr GetUint32
3360#define SetUintPtr SetUint32
3361#elif defined(ARCH_IS_64_BIT)
3362#define kIntPtrCid kTypedDataInt64ArrayCid
3363#define GetIntPtr GetInt64
3364#define SetIntPtr SetInt64
3365#define kUintPtrCid kTypedDataUint64ArrayCid
3366#define GetUintPtr GetUint64
3367#define SetUintPtr SetUint64
3369#error Architecture is not 32-bit or 64-bit.
3452 bool expand_inlined_;
3456 bool skip_sync_start_in_parent_stack;
3463 intptr_t frame_size_;
3481#if defined(DART_PRECOMPILED_RUNTIME)
3484 return frame_capacity_;
3512 std::atomic<intptr_t> num_bracket_expressions_;
3513 intptr_t num_bracket_expressions() {
3514 return num_bracket_expressions_.load(std::memory_order_relaxed);
3516 void set_num_bracket_expressions(intptr_t
value) {
3517 num_bracket_expressions_.store(
value, std::memory_order_relaxed);
3524 intptr_t num_one_byte_registers_;
3525 intptr_t num_two_byte_registers_;
3549 template <
typename Type,
typename PtrType>
3565UntaggedObject::to_offset<UntaggedWeakProperty>(intptr_t
length) {
3582 template <
typename Type,
typename PtrType>
3589 friend class ObjectGraph;
3599UntaggedObject::to_offset<UntaggedWeakReference>(intptr_t
length) {
3617#ifdef DART_COMPRESSED_POINTERS
3618 uint32_t align_first_field_in_subclass;
3626 template <
typename GCVisitorType>
3632 friend class ObjectGraph;
3642 template <std::memory_order order = std::memory_order_relaxed>
3643 FinalizerEntryPtr exchange_entries_collected(FinalizerEntryPtr
value) {
3644 return ExchangeCompressedPointer<FinalizerEntryPtr,
3645 CompressedFinalizerEntryPtr, order>(
3646 &entries_collected_,
value);
3649 template <
typename GCVisitorType>
3692 intptr_t external_size_;
3694 template <
typename Type,
typename PtrType>
3696 template <
typename GCVisitorType>
3703 friend class ObjectGraph;
3711UntaggedObject::to_offset<UntaggedFinalizerEntry>(intptr_t
length) {
3754#undef WSR_COMPRESSED_POINTER_FIELD
static float next(float f)
static uint32_t hash(const SkShaderBase::GradientInfo &v)
#define COMPILE_ASSERT(expr)
#define CLASS_LIST_TYPED_DATA(V)
void UpdateBool(bool value)
TargetBitField::Type UpdateConditional(typename TargetBitField::Type value_to_be_set, typename TargetBitField::Type conditional_old_value)
void UpdateUnsynchronized(typename TargetBitField::Type value)
void Update(typename TargetBitField::Type value)
NO_SANITIZE_THREAD TargetBitField::Type ReadIgnoreRace() const
TargetBitField::Type Read() const
static constexpr intptr_t kNextBit
static constexpr intptr_t decode(uword value)
static constexpr uword update(intptr_t value, uword original)
static constexpr uword encode(intptr_t value)
UnboxedFieldBitmap GetUnboxedFieldsMapAt(intptr_t cid) const
const ClassTable * class_table() const
void VisitCompressedPointers(uword heap_base, CompressedObjectPtr *first, CompressedObjectPtr *last)
UntaggedObject * untag() const
intptr_t GetClassId() const
void MarkingStackAddObject(ObjectPtr obj)
void DeferredMarkingStackAddObject(ObjectPtr obj)
static Thread * Current()
void StoreBufferAddObject(ObjectPtr obj)
uword write_barrier_mask() const
@ kFinalizedUninstantiated
std::atomic< uint32_t > flags_
void set_flags(uint32_t value)
std::atomic< uword > type_test_stub_entry_point_
POINTER_FIELD(StringPtr, target_name)
friend void UpdateLengthField(intptr_t, ObjectPtr, ObjectPtr)
void set_external_size(intptr_t value)
DART_FORCE_INLINE bool IsUnboxedDouble(intptr_t position) const
static constexpr intptr_t kCapacity
UnboxedParameterBitmap(uint64_t bitmap)
DART_FORCE_INLINE bool IsEmpty() const
static constexpr intptr_t kBitsPerElement
UnboxedParameterBitmap(const UnboxedParameterBitmap &)=default
static constexpr uint64_t kElementBitmask
DART_FORCE_INLINE bool IsUnboxedInteger(intptr_t position) const
DART_FORCE_INLINE void SetUnboxedInteger(intptr_t position)
DART_FORCE_INLINE bool HasUnboxedParameters() const
DART_FORCE_INLINE bool IsUnboxed(intptr_t position) const
DART_FORCE_INLINE bool IsUnboxedRecord(intptr_t position) const
UnboxedParameterBitmap & operator=(const UnboxedParameterBitmap &)=default
DART_FORCE_INLINE void Reset()
DART_FORCE_INLINE void SetUnboxedDouble(intptr_t position)
DART_FORCE_INLINE uint64_t Value() const
DART_FORCE_INLINE void SetUnboxedRecord(intptr_t position)
static bool ParseKind(const char *str, Kind *out)
static const char * KindToCString(Kind k)
friend class AssemblyImageWriter
static constexpr uword update(intptr_t size, uword tag)
static constexpr intptr_t kMaxSizeTagInUnitsOfAlignment
static constexpr bool SizeFits(intptr_t size)
static constexpr intptr_t kMaxSizeTag
static constexpr uword encode(intptr_t size)
static constexpr uword decode(uword tag)
void StoreSmi(type const *addr, type value)
static uword ToAddr(const ObjectPtr raw_obj)
static bool IsCanonical(intptr_t value)
void StorePointer(type const *addr, type value)
bool IsMarkedIgnoreRace() const
void StoreCompressedArrayPointer(compressed_type const *addr, type value)
COMPILE_ASSERT(kBitsPerByte *sizeof(ClassIdTagType) >=kClassIdTagSize)
void Validate(IsolateGroup *isolate_group) const
friend void ReportImpossibleNullError(intptr_t cid, StackFrame *caller_frame, Thread *thread)
type ExchangeCompressedPointer(compressed_type const *addr, type value)
void StoreCompressedSmi(CompressedSmiPtr const *addr, SmiPtr value)
COMPILE_ASSERT(kCardRememberedBit==0)
static constexpr intptr_t kIncrementalBarrierMask
void StoreCompressedArrayPointer(compressed_type const *addr, type value, Thread *thread)
void StoreArrayPointer(type const *addr, value_type value)
friend void SetNewSpaceTaggingWord(ObjectPtr, classid_t, uint32_t)
COMPILE_ASSERT(kNotMarkedBit+kBarrierOverlapShift==kAlwaysSetBit)
static ObjectPtr FromAddr(uword addr)
bool IsCardRemembered() const
DART_FORCE_INLINE intptr_t VisitPointersNonvirtual(V *visitor)
static bool IsMarked(uword tags)
type LoadCompressedPointer(compressed_type const *addr) const
static constexpr intptr_t kGenerationalBarrierMask
static uword ToAddr(const UntaggedObject *raw_obj)
intptr_t HeapSize(uword tags) const
intptr_t HeapSize() const
DART_FORCE_INLINE void EnsureInRememberedSet(Thread *thread)
COMPILE_ASSERT(kClassIdTagMax==(1<< kClassIdTagSize) - 1)
static DART_FORCE_INLINE uword to_offset(intptr_t length=0)
friend uword TagsFromUntaggedObject(UntaggedObject *)
bool InVMIsolateHeap() const
void VisitPointersPrecise(ObjectPointerVisitor *visitor)
void StoreCompressedArrayPointer(compressed_type const *addr, type value, Thread *thread)
static constexpr intptr_t kBarrierOverlapShift
type LoadPointer(type const *addr) const
void StoreCompressedPointer(compressed_type const *addr, type value, Thread *thread)
void ClearRememberedBit()
static constexpr bool kContainsPointerFields
void StoreArrayPointer(type const *addr, value_type value, Thread *thread)
friend class AssemblyImageWriter
bool TryAcquireRememberedBit()
bool Contains(uword addr) const
void ClearRememberedBitUnsynchronized()
SmiPtr LoadSmi(SmiPtr const *addr) const
void SetCardRememberedBitUnsynchronized()
SmiPtr LoadCompressedSmi(CompressedSmiPtr const *addr) const
void ClearMarkBitUnsynchronized()
COMPILE_ASSERT(kNewBit+kBarrierOverlapShift==kOldAndNotRememberedBit)
friend class StoreBufferUpdateVisitor
intptr_t VisitPointers(ObjectPointerVisitor *visitor)
bool IsRemembered() const
void StoreCompressedPointer(compressed_type const *addr, type value)
@ kOldAndNotRememberedBit
DART_WARN_UNUSED_RESULT bool TryAcquireMarkBit()
intptr_t GetClassId() const
static DART_FORCE_INLINE uword from_offset()
void StorePointerUnaligned(type const *addr, type value, Thread *thread)
void SetMarkBitUnsynchronized()
static constexpr bool kContainsCompressedPointers
void StorePointer(type const *addr, type value, Thread *thread)
static const char * KindToCString(Kind k)
static constexpr intptr_t kInvalidYieldIndex
static bool ParseKind(const char *cstr, Kind *out)
intptr_t frame_capacity() const
const uint8_t * payload() const
static intptr_t payload_offset()
COMPRESSED_SMI_FIELD(SmiPtr, length)
void RecomputeDataField()
void ValidateInnerPointer()
void RecomputeDataFieldForInternalTypedData()
uint8_t * DataFieldForInternalTypedData() const
void RecomputeDataField()
uint8_t * internal_data()
static intptr_t payload_offset()
const uint8_t * internal_data() const
const uint8_t * data() const
static constexpr int ShiftForPowerOfTwo(T x)
static constexpr bool IsAligned(T x, uintptr_t alignment, uintptr_t offset=0)
#define DART_WARN_UNUSED_RESULT
Dart_NativeFunction(* Dart_NativeEntryResolver)(Dart_Handle name, int num_of_arguments, bool *auto_setup_scope)
const uint8_t *(* Dart_NativeEntrySymbol)(Dart_NativeFunction nf)
void *(* Dart_FfiNativeResolver)(const char *name, uintptr_t args_n)
static const char * begin(const StringSlice &s)
FlutterSemanticsFlag flags
FlKeyEvent uint64_t FlKeyResponderAsyncCallback callback
Dart_NativeFunction function
#define DECLARE_BIT(Name)
#define DECLARE(Name, value)
T __attribute__((ext_vector_type(N))) V
const intptr_t kOffsetOfPtr
static constexpr uintptr_t kHeapBaseMask
void CopyTypedDataBaseWithSafepointChecks(Thread *thread, const T &from, const T &to, intptr_t length)
void InitializeExternalTypedDataWithSafepointChecks(Thread *thread, intptr_t cid, const ExternalTypedData &from, const ExternalTypedData &to)
static constexpr intptr_t kOldObjectAlignmentOffset
static const char *const names[]
@ kSharesInstantiatorTypeArguments
@ kSharesFunctionTypeArguments
static constexpr intptr_t kNewObjectAlignmentOffset
void InitializeTypedDataView(TypedDataViewPtr obj)
intptr_t RawSmiValue(const SmiPtr raw_value)
constexpr intptr_t kBitsPerByte
DART_FORCE_INLINE void UpdateLengthField(intptr_t cid, ObjectPtr from, ObjectPtr to)
static constexpr intptr_t kCompressedWordSize
static constexpr intptr_t kObjectAlignmentMask
raw_obj untag() -> num_entries()) VARIABLE_COMPRESSED_VISITOR(Array, Smi::Value(raw_obj->untag() ->length())) VARIABLE_COMPRESSED_VISITOR(TypedData, TypedData::ElementSizeInBytes(raw_obj->GetClassId()) *Smi::Value(raw_obj->untag() ->length())) VARIABLE_COMPRESSED_VISITOR(Record, RecordShape(raw_obj->untag() ->shape()).num_fields()) VARIABLE_NULL_VISITOR(CompressedStackMaps, CompressedStackMaps::PayloadSizeOf(raw_obj)) VARIABLE_NULL_VISITOR(OneByteString, Smi::Value(raw_obj->untag() ->length())) VARIABLE_NULL_VISITOR(TwoByteString, Smi::Value(raw_obj->untag() ->length())) intptr_t UntaggedField::VisitFieldPointers(FieldPtr raw_obj, ObjectPointerVisitor *visitor)
static void StoreUnaligned(T *ptr, T value)
void MournFinalizerEntry(GCVisitorType *visitor, FinalizerEntryPtr current_entry)
static constexpr intptr_t kObjectAlignment
void InitializeExternalTypedData(intptr_t cid, ExternalTypedDataPtr from, ExternalTypedDataPtr to)
static int8_t data[kExtLength]
static constexpr intptr_t kObjectAlignmentLog2
static constexpr intptr_t kClassIdTagMax
ObjectPtr CompressedObjectPtr
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot data
#define DEFINE_FORWARD_DECLARATION(clazz)
#define DEFINE_CONTAINS_COMPRESSED(type)
#define FOR_EACH_RAW_FUNCTION_KIND(V)
#define COMPRESSED_POINTER_FIELD(type, name)
#define ENUM_DEF(name, init)
#define VISIT_FROM_PAYLOAD_START(elem_type)
#define WSR_COMPRESSED_POINTER_FIELD(Type, Name)
#define COMPRESSED_SMI_FIELD(type, name)
#define COMPRESSED_ARRAY_POINTER_FIELD(type, name)
#define FOR_EACH_RAW_PC_DESCRIPTOR(V)
#define SMI_FIELD(type, name)
#define RAW_HEAP_OBJECT_IMPLEMENTATION(object)
#define VISIT_FROM(first)
#define COMPRESSED_VARIABLE_POINTER_FIELDS(type, accessor_name, array_name)
#define RAW_OBJECT_IMPLEMENTATION(object)
#define POINTER_FIELD(type, name)
#define JIT_FUNCTION_COUNTERS(F)
#define DEFINE_ACCESSOR(type, name)
#define CONTEXT_SCOPE_VARIABLE_DESC_FLAG_LIST(V)
DART_FORCE_INLINE void set_flags_and_size(FlagsAndSizeHeader value)
DART_FORCE_INLINE FlagsAndSizeHeader flags_and_size() const
const uint8_t * data() const
uint32_t FlagsAndSizeHeader
void set_index(int32_t index)
void set_kind(VarInfoKind kind)
TokenPosition declaration_pos
#define OPEN_ARRAY_START(type, align)
#define NOT_IN_PRECOMPILED(code)
#define NOT_IN_PRODUCT(code)
#define OFFSET_OF(type, field)
#define ONLY_IN_PRECOMPILED(code)
#define OFFSET_OF_RETURNED_VALUE(type, accessor)