5#ifndef RUNTIME_VM_RAW_OBJECT_H_
6#define RUNTIME_VM_RAW_OBJECT_H_
8#if defined(SHOULD_NOT_INCLUDE_RUNTIME)
9#error "Should not include runtime"
43#define DEFINE_FORWARD_DECLARATION(clazz) class Untagged##clazz;
45#undef DEFINE_FORWARD_DECLARATION
49#define DEFINE_CONTAINS_COMPRESSED(type) \
50 static constexpr bool kContainsCompressedPointers = \
51 is_compressed_ptr<type>::value;
53#define CHECK_CONTAIN_COMPRESSED(type) \
55 kContainsCompressedPointers || is_uncompressed_ptr<type>::value, \
56 "From declaration uses ObjectPtr"); \
58 !kContainsCompressedPointers || is_compressed_ptr<type>::value, \
59 "From declaration uses CompressedObjectPtr");
61#define VISIT_FROM(first) \
62 DEFINE_CONTAINS_COMPRESSED(decltype(first##_)) \
63 static constexpr bool kContainsPointerFields = true; \
64 base_ptr_type<decltype(first##_)>::type* from() { \
65 return reinterpret_cast<base_ptr_type<decltype(first##_)>::type*>( \
69#define VISIT_FROM_PAYLOAD_START(elem_type) \
70 static_assert(is_uncompressed_ptr<elem_type>::value || \
71 is_compressed_ptr<elem_type>::value, \
72 "Payload elements must be object pointers"); \
73 DEFINE_CONTAINS_COMPRESSED(elem_type) \
74 static constexpr bool kContainsPointerFields = true; \
75 base_ptr_type<elem_type>::type* from() { \
76 const uword payload_start = reinterpret_cast<uword>(this) + sizeof(*this); \
77 ASSERT(Utils::IsAligned(payload_start, sizeof(elem_type))); \
78 return reinterpret_cast<base_ptr_type<elem_type>::type*>(payload_start); \
81#define VISIT_TO(last) \
82 CHECK_CONTAIN_COMPRESSED(decltype(last##_)); \
83 static_assert(kContainsPointerFields, \
84 "Must have a corresponding VISIT_FROM"); \
85 base_ptr_type<decltype(last##_)>::type* to(intptr_t length = 0) { \
86 return reinterpret_cast<base_ptr_type<decltype(last##_)>::type*>( \
90#define VISIT_TO_PAYLOAD_END(elem_type) \
91 static_assert(is_uncompressed_ptr<elem_type>::value || \
92 is_compressed_ptr<elem_type>::value, \
93 "Payload elements must be object pointers"); \
94 static_assert(kContainsPointerFields, \
95 "Must have a corresponding VISIT_FROM"); \
96 CHECK_CONTAIN_COMPRESSED(elem_type); \
97 base_ptr_type<elem_type>::type* to(intptr_t length) { \
98 const uword payload_start = reinterpret_cast<uword>(this) + sizeof(*this); \
99 ASSERT(Utils::IsAligned(payload_start, sizeof(elem_type))); \
100 const uword payload_last = \
101 payload_start + sizeof(elem_type) * (length - 1); \
102 return reinterpret_cast<base_ptr_type<elem_type>::type*>(payload_last); \
105#define VISIT_NOTHING() int NothingToVisit();
107#if defined(DART_COMPRESSED_POINTERS)
108#define ASSERT_UNCOMPRESSED(Type) \
109 static_assert(!Untagged##Type::kContainsCompressedPointers, \
110 "Should contain compressed pointers");
112#define ASSERT_COMPRESSED(Type) \
113 static_assert(Untagged##Type::kContainsCompressedPointers, \
114 "Should not contain compressed pointers");
117#define ASSERT_UNCOMPRESSED(Type)
118#define ASSERT_COMPRESSED(Type)
121#define ASSERT_NOTHING_TO_VISIT(Type) \
122 ASSERT(SIZE_OF_RETURNED_VALUE(Untagged##Type, NothingToVisit) == sizeof(int))
125#define V(name) k##name##Element,
130#define VISITOR_SUPPORT(object) \
131 static intptr_t Visit##object##Pointers(object##Ptr raw_obj, \
132 ObjectPointerVisitor* visitor);
134#define RAW_OBJECT_IMPLEMENTATION(object) \
136 VISITOR_SUPPORT(object) \
137 friend class object; \
138 friend class UntaggedObject; \
139 friend class OffsetsTable; \
140 DISALLOW_ALLOCATION(); \
141 DISALLOW_IMPLICIT_CONSTRUCTORS(Untagged##object)
143#define RAW_HEAP_OBJECT_IMPLEMENTATION(object) \
145 RAW_OBJECT_IMPLEMENTATION(object); \
146 friend class object##SerializationCluster; \
147 friend class object##DeserializationCluster; \
148 friend class object##MessageSerializationCluster; \
149 friend class object##MessageDeserializationCluster; \
150 friend class Serializer; \
151 friend class Deserializer; \
152 template <typename Base> \
153 friend class ObjectCopy; \
154 friend class Pass2Visitor;
213 return SizeBits::update(SizeToTagValue(
size), tag);
224 :
public BitField<uword, intptr_t, kSizeTagPos, kSizeTagSize> {};
226 static constexpr intptr_t SizeToTagValue(intptr_t
size) {
230 static constexpr intptr_t TagValueToSize(intptr_t
value) {
242#if defined(HASH_IN_OBJECT_HEADER)
243 class HashTag :
public BitField<uword, uint32_t, kHashTagPos, kHashTagSize> {
248 :
public BitField<uword, bool, kCardRememberedBit, 1> {};
253 :
public BitField<uword, bool, kNewOrEvacuationCandidateBit, 1> {};
260 :
public BitField<uword, bool, kOldAndNotRememberedBit, 1> {};
394#if defined(HASH_IN_OBJECT_HEADER)
395 uint32_t GetHeaderHash()
const {
return tags_.
Read<HashTag>(); }
396 uint32_t SetHeaderHashIfNotSet(uint32_t
h) {
411 const intptr_t size_from_class = HeapSizeFromClass(
tags);
439 return (
addr >= this_addr) && (
addr < (this_addr + this_size));
451 return VisitPointersPredefined(visitor, class_id);
455 intptr_t instance_size =
HeapSize();
462 const auto unboxed_fields_bitmap =
465 if (!unboxed_fields_bitmap.IsEmpty()) {
468 if (!unboxed_fields_bitmap.Get(bit++)) {
476 return instance_size;
484 return VisitPointersPredefined(visitor, class_id);
488 intptr_t instance_size =
HeapSize();
495 const auto unboxed_fields_bitmap =
496 visitor->class_table()->GetUnboxedFieldsMapAt(class_id);
498 if (!unboxed_fields_bitmap.IsEmpty()) {
501 if (!unboxed_fields_bitmap.Get(bit++)) {
502 visitor->V::VisitCompressedPointers(
heap_base(), current, current);
506 visitor->V::VisitCompressedPointers(
heap_base(), first, last);
509 return instance_size;
523 return reinterpret_cast<uword>(raw_obj);
539 intptr_t HeapSizeFromClass(
uword tags)
const;
541 void SetClassId(intptr_t new_cid) { tags_.
Update<ClassIdTag>(new_cid); }
542 void SetClassIdUnsynchronized(intptr_t new_cid) {
557 template <
typename T>
569 template <
typename T>
575 template <
typename type, std::memory_order order = std::memory_order_relaxed>
577 return reinterpret_cast<std::atomic<type>*
>(
const_cast<type*
>(
addr))
580 template <
typename type,
581 typename compressed_type,
582 std::memory_order order = std::memory_order_relaxed>
584 compressed_type v =
reinterpret_cast<std::atomic<compressed_type>*
>(
585 const_cast<compressed_type*
>(
addr))
594 template <
typename type, std::memory_order order = std::memory_order_relaxed>
596 reinterpret_cast<std::atomic<type>*
>(
const_cast<type*
>(
addr))
597 ->store(
value, order);
598 if (
value.IsHeapObject()) {
603 template <
typename type,
604 typename compressed_type,
605 std::memory_order order = std::memory_order_relaxed>
607 reinterpret_cast<std::atomic<compressed_type>*
>(
608 const_cast<compressed_type*
>(
addr))
609 ->store(
static_cast<compressed_type
>(
value), order);
610 if (
value.IsHeapObject()) {
615 template <
typename type>
618 if (
value.IsHeapObject()) {
619 CheckHeapPointerStore(
value, thread);
623 template <
typename type,
typename compressed_type>
627 *
const_cast<compressed_type*
>(
addr) =
value;
628 if (
value.IsHeapObject()) {
629 CheckHeapPointerStore(
value, thread);
633 template <
typename type>
636 if (
value->IsHeapObject()) {
637 CheckHeapPointerStore(
value, thread);
642 template <
typename type,
643 std::memory_order order = std::memory_order_relaxed,
644 typename value_type =
type>
646 reinterpret_cast<std::atomic<type>*
>(
const_cast<type*
>(
addr))
648 if (
value->IsHeapObject()) {
653 template <
typename type,
typename value_type = type>
656 if (
value->IsHeapObject()) {
657 CheckArrayPointerStore(
addr,
value, thread);
661 template <
typename type,
typename compressed_type, std::memory_order order>
663 reinterpret_cast<std::atomic<compressed_type>*
>(
664 const_cast<compressed_type*
>(
addr))
665 ->store(
static_cast<compressed_type
>(
value), order);
666 if (
value->IsHeapObject()) {
671 template <
typename type,
typename compressed_type, std::memory_order order>
675 reinterpret_cast<std::atomic<compressed_type>*
>(
676 const_cast<compressed_type*
>(
addr))
677 ->store(
static_cast<compressed_type
>(
value), order);
678 if (
value->IsHeapObject()) {
679 CheckArrayPointerStore(
addr,
value, thread);
683 template <
typename type,
typename compressed_type>
687 *
const_cast<compressed_type*
>(
addr) =
value;
688 if (
value->IsHeapObject()) {
689 CheckArrayPointerStore(
addr,
value, thread);
693 template <
typename type,
694 typename compressed_type,
695 std::memory_order order = std::memory_order_relaxed>
697 compressed_type previous_value =
698 reinterpret_cast<std::atomic<compressed_type>*
>(
699 const_cast<compressed_type*
>(
addr))
700 ->exchange(
static_cast<compressed_type
>(
value), order);
701 if (
value.IsHeapObject()) {
704 return static_cast<type>(previous_value.Decompress(
heap_base()));
707 template <std::memory_order order = std::memory_order_relaxed>
709 return reinterpret_cast<std::atomic<SmiPtr>*
>(
const_cast<SmiPtr*
>(
addr))
712 template <std::memory_order order = std::memory_order_relaxed>
714 return static_cast<SmiPtr
>(
reinterpret_cast<std::atomic<CompressedSmiPtr>*
>(
715 const_cast<CompressedSmiPtr*
>(
addr))
722 template <
typename type, std::memory_order order = std::memory_order_relaxed>
726 reinterpret_cast<std::atomic<type>*
>(
const_cast<type*
>(
addr))
727 ->store(
value, order);
729 template <std::memory_order order = std::memory_order_relaxed>
733 reinterpret_cast<std::atomic<CompressedSmiPtr>*
>(
734 const_cast<CompressedSmiPtr*
>(
addr))
735 ->store(
static_cast<CompressedSmiPtr
>(
value), order);
741 uword source_tags = this->tags_;
759 if (
value->untag()->TryAcquireMarkBit()) {
766 template <
typename type,
typename value_type>
767 DART_FORCE_INLINE
void CheckArrayPointerStore(
type const*
addr,
770 uword source_tags = this->tags_;
773 thread->write_barrier_mask();
781 thread->StoreBufferAddObject(
static_cast<ObjectPtr
>(
this));
789 thread->DeferredMarkingStackAddObject(
value);
792 if (
value->untag()->TryAcquireMarkBit()) {
793 thread->MarkingStackAddObject(
value);
800 void RememberCard(
ObjectPtr const* slot);
801#if defined(DART_COMPRESSED_POINTERS)
861 if constexpr (T::kContainsPointerFields) {
873 if constexpr (T::kContainsPointerFields) {
874 return reinterpret_cast<uword>(
889#define POINTER_FIELD(type, name) \
891 template <std::memory_order order = std::memory_order_relaxed> \
892 type name() const { \
893 return LoadPointer<type, order>(&name##_); \
895 template <std::memory_order order = std::memory_order_relaxed> \
896 void set_##name(type value) { \
897 StorePointer<type, order>(&name##_, value); \
903#define COMPRESSED_POINTER_FIELD(type, name) \
905 template <std::memory_order order = std::memory_order_relaxed> \
906 type name() const { \
907 return LoadCompressedPointer<type, Compressed##type, order>(&name##_); \
909 template <std::memory_order order = std::memory_order_relaxed> \
910 void set_##name(type value) { \
911 StoreCompressedPointer<type, Compressed##type, order>(&name##_, value); \
915 Compressed##type name##_;
917#define ARRAY_POINTER_FIELD(type, name) \
919 template <std::memory_order order = std::memory_order_relaxed> \
920 type name() const { \
921 return LoadPointer<type, order>(&name##_); \
923 template <std::memory_order order = std::memory_order_relaxed> \
924 void set_##name(type value) { \
925 StoreArrayPointer<type, order>(&name##_, value); \
931#define COMPRESSED_ARRAY_POINTER_FIELD(type, name) \
933 template <std::memory_order order = std::memory_order_relaxed> \
934 type name() const { \
935 return LoadPointer<Compressed##type, order>(&name##_).Decompress( \
938 template <std::memory_order order = std::memory_order_relaxed> \
939 void set_##name(type value) { \
940 StoreCompressedArrayPointer<type, Compressed##type, order>(&name##_, \
945 Compressed##type name##_;
947#define VARIABLE_POINTER_FIELDS(type, accessor_name, array_name) \
949 template <std::memory_order order = std::memory_order_relaxed> \
950 type accessor_name(intptr_t index) const { \
951 return LoadPointer<type, order>(&array_name()[index]); \
953 template <std::memory_order order = std::memory_order_relaxed> \
954 void set_##accessor_name(intptr_t index, type value) { \
955 StoreArrayPointer<type, order>(&array_name()[index], value); \
957 template <std::memory_order order = std::memory_order_relaxed> \
958 void set_##accessor_name(intptr_t index, type value, Thread* thread) { \
959 StoreArrayPointer<type, order>(&array_name()[index], value, thread); \
963 type* array_name() { \
964 OPEN_ARRAY_START(type, type); \
966 type const* array_name() const { \
967 OPEN_ARRAY_START(type, type); \
969 VISIT_TO_PAYLOAD_END(type)
971#define COMPRESSED_VARIABLE_POINTER_FIELDS(type, accessor_name, array_name) \
973 template <std::memory_order order = std::memory_order_relaxed> \
974 type accessor_name(intptr_t index) const { \
975 return LoadCompressedPointer<type, Compressed##type, order>( \
976 &array_name()[index]); \
978 template <std::memory_order order = std::memory_order_relaxed> \
979 void set_##accessor_name(intptr_t index, type value) { \
980 StoreCompressedArrayPointer<type, Compressed##type, order>( \
981 &array_name()[index], value); \
983 template <std::memory_order order = std::memory_order_relaxed> \
984 void set_##accessor_name(intptr_t index, type value, Thread* thread) { \
985 StoreCompressedArrayPointer<type, Compressed##type, order>( \
986 &array_name()[index], value, thread); \
990 Compressed##type* array_name() { \
991 OPEN_ARRAY_START(Compressed##type, Compressed##type); \
993 Compressed##type const* array_name() const { \
994 OPEN_ARRAY_START(Compressed##type, Compressed##type); \
996 VISIT_TO_PAYLOAD_END(Compressed##type)
998#define SMI_FIELD(type, name) \
1000 template <std::memory_order order = std::memory_order_relaxed> \
1001 type name() const { \
1002 type result = LoadSmi<order>(&name##_); \
1003 ASSERT(!result.IsHeapObject()); \
1006 template <std::memory_order order = std::memory_order_relaxed> \
1007 void set_##name(type value) { \
1008 ASSERT(!value.IsHeapObject()); \
1009 StoreSmi<type, order>(&name##_, value); \
1015#define COMPRESSED_SMI_FIELD(type, name) \
1017 template <std::memory_order order = std::memory_order_relaxed> \
1018 type name() const { \
1019 type result = LoadCompressedSmi<order>(&name##_); \
1020 ASSERT(!result.IsHeapObject()); \
1023 template <std::memory_order order = std::memory_order_relaxed> \
1024 void set_##name(type value) { \
1025 ASSERT(!value.IsHeapObject()); \
1026 StoreCompressedSmi(&name##_, value); \
1030 Compressed##type name##_;
1041#if defined(DART_PRECOMPILER)
1042#define WSR_COMPRESSED_POINTER_FIELD(Type, Name) \
1043 COMPRESSED_POINTER_FIELD(ObjectPtr, Name)
1045#define WSR_COMPRESSED_POINTER_FIELD(Type, Name) \
1046 COMPRESSED_POINTER_FIELD(Type, Name)
1074 RAW_HEAP_OBJECT_IMPLEMENTATION(
Class);
1076 COMPRESSED_POINTER_FIELD(StringPtr,
name)
1079 COMPRESSED_POINTER_FIELD(ArrayPtr, functions)
1080 COMPRESSED_POINTER_FIELD(ArrayPtr, functions_hash_table)
1081 COMPRESSED_POINTER_FIELD(ArrayPtr, fields)
1082 COMPRESSED_POINTER_FIELD(ArrayPtr, offset_in_words_to_field)
1083 COMPRESSED_POINTER_FIELD(ArrayPtr, interfaces)
1084 COMPRESSED_POINTER_FIELD(ScriptPtr,
script)
1085 COMPRESSED_POINTER_FIELD(LibraryPtr, library)
1086 COMPRESSED_POINTER_FIELD(TypeParametersPtr, type_parameters)
1087 COMPRESSED_POINTER_FIELD(TypePtr, super_type)
1089 COMPRESSED_POINTER_FIELD(ArrayPtr, constants)
1091 COMPRESSED_POINTER_FIELD(TypePtr, declaration_type)
1093 COMPRESSED_POINTER_FIELD(ArrayPtr, invocation_dispatcher_cache)
1095#if !defined(PRODUCT) || !defined(DART_PRECOMPILED_RUNTIME)
1097 COMPRESSED_POINTER_FIELD(GrowableObjectArrayPtr, direct_implementors)
1099 COMPRESSED_POINTER_FIELD(GrowableObjectArrayPtr, direct_subclasses)
1104 COMPRESSED_POINTER_FIELD(TypeArgumentsPtr,
1105 declaration_instance_type_arguments)
1106#if !defined(DART_PRECOMPILED_RUNTIME)
1108 COMPRESSED_POINTER_FIELD(CodePtr, allocation_stub)
1110 COMPRESSED_POINTER_FIELD(WeakArrayPtr, dependent_code)
1113#if defined(DART_PRECOMPILED_RUNTIME)
1114 VISIT_TO(declaration_instance_type_arguments)
1124 &invocation_dispatcher_cache_);
1130#if !defined(DART_PRECOMPILED_RUNTIME)
1134#if !defined(DART_PRECOMPILED_RUNTIME)
1145 NOT_IN_PRECOMPILED(TokenPosition token_pos_);
1146 NOT_IN_PRECOMPILED(TokenPosition end_token_pos_);
1147 NOT_IN_PRECOMPILED(
classid_t implementor_cid_);
1150 int16_t num_type_arguments_;
1151 uint16_t num_native_fields_;
1152 uint32_t state_bits_;
1155 int32_t host_instance_size_in_words_;
1158 int32_t host_type_arguments_field_offset_in_words_;
1161 int32_t host_next_field_offset_in_words_;
1163#if defined(DART_PRECOMPILER)
1165 int32_t target_instance_size_in_words_;
1168 int32_t target_type_arguments_field_offset_in_words_;
1171 int32_t target_next_field_offset_in_words_;
1174#if !defined(DART_PRECOMPILED_RUNTIME)
1175 uint32_t kernel_offset_;
1199#if !defined(DART_PRECOMPILED_RUNTIME)
1213#if !defined(DART_PRECOMPILED_RUNTIME)
1227 NOT_IN_PRECOMPILED(intptr_t kernel_library_index_);
1240#define FOR_EACH_RAW_FUNCTION_KIND(V) \
1242 V(RegularFunction) \
1244 V(ClosureFunction) \
1246 V(ImplicitClosureFunction) \
1258 V(ImplicitStaticGetter) \
1260 V(FieldInitializer) \
1262 V(MethodExtractor) \
1264 V(NoSuchMethodDispatcher) \
1266 V(InvokeFieldDispatcher) \
1268 V(IrregexpFunction) \
1271 V(DynamicInvocationForwarder) \
1275 V(RecordFieldGetter)
1278#define KIND_DEFN(Name) k##Name,
1285#define KIND_CASE(Name) \
1286 case Kind::k##Name: \
1297#define KIND_CASE(Name) \
1298 if (strcmp(str, #Name) == 0) { \
1299 *out = Kind::k##Name; \
1340 return At(position) !=
kBoxed;
1360 DART_FORCE_INLINE uint64_t
Value()
const {
return bitmap_; }
1361 DART_FORCE_INLINE
bool IsEmpty()
const {
return bitmap_ == 0; }
1362 DART_FORCE_INLINE
void Reset() { bitmap_ = 0; }
1368 DART_FORCE_INLINE
UnboxedState At(intptr_t position)
const {
1379 (
static_cast<decltype(bitmap_)
>(
state) << shift);
1392 uword unchecked_entry_point_;
1420#if defined(DART_PRECOMPILED_RUNTIME)
1429 UnboxedParameterBitmap unboxed_parameters_info_;
1432#if !defined(DART_PRECOMPILED_RUNTIME) || \
1433 (defined(DART_PRECOMPILED_RUNTIME) && !defined(PRODUCT))
1434 TokenPosition token_pos_;
1437#if !defined(DART_PRECOMPILED_RUNTIME)
1438 TokenPosition end_token_pos_;
1441 AtomicBitFieldContainer<uint32_t> kind_tag_;
1443#define JIT_FUNCTION_COUNTERS(F) \
1444 F(intptr_t, int32_t, usage_counter) \
1445 F(intptr_t, uint16_t, optimized_instruction_count) \
1446 F(intptr_t, uint16_t, optimized_call_site_count) \
1447 F(int8_t, int8_t, deoptimization_counter) \
1448 F(intptr_t, int8_t, state_bits) \
1449 F(int, int8_t, inlining_depth)
1451#if !defined(DART_PRECOMPILED_RUNTIME)
1452 uint32_t kernel_offset_;
1454#define DECLARE(return_type, type, name) type name##_;
1458 AtomicBitFieldContainer<uint8_t> packed_fields_;
1460 static constexpr intptr_t kMaxOptimizableBits = 1;
1462 using PackedOptimizable =
1463 BitField<
decltype(packed_fields_),
bool, 0, kMaxOptimizableBits>;
1495 "Instantiation mode must fit in a Smi");
1497 static constexpr uint8_t kNoAwaiterLinkDepth = 0xFF;
1530 VISIT_TO(callback_exceptional_return)
1542 int32_t callback_id_;
1545 uint8_t ffi_function_kind_;
1549 RAW_HEAP_OBJECT_IMPLEMENTATION(
Field);
1564 VISIT_TO(dependent_code);
1585#if !defined(DART_PRECOMPILED_RUNTIME)
1586 uint32_t kernel_offset_;
1592 int8_t guarded_list_length_in_object_offset_;
1597 int8_t static_type_exactness_state_;
1599 uint16_t kind_bits_;
1601#if !defined(DART_PRECOMPILED_RUNTIME)
1603 int32_t target_offset_;
1613 RAW_HEAP_OBJECT_IMPLEMENTATION(
Script);
1619#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
1646#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
1647 int64_t load_timestamp_;
1648 int32_t kernel_script_index_;
1650 int32_t kernel_script_index_;
1651 int64_t load_timestamp_;
1654#if !defined(DART_PRECOMPILED_RUNTIME)
1655 int32_t flags_and_max_position_;
1659 BitField<
decltype(flags_and_max_position_),
bool, 0, 1>;
1661 BitField<
decltype(flags_and_max_position_),
1687 COMPILE_ASSERT(kNumFlagBits <= (
sizeof(uint8_t) *
kBitsPerByte));
1688 class DartSchemeBit :
public BitField<uint8_t, bool, kDartSchemeBit, 1> {};
1689 class DebuggableBit :
public BitField<uint8_t, bool, kDebuggableBit, 1> {};
1690 class InFullSnapshotBit
1691 :
public BitField<uint8_t, bool, kInFullSnapshotBit, 1> {};
1693 RAW_HEAP_OBJECT_IMPLEMENTATION(
Library);
1695 COMPRESSED_POINTER_FIELD(StringPtr,
name)
1697 COMPRESSED_POINTER_FIELD(StringPtr, url)
1698 COMPRESSED_POINTER_FIELD(StringPtr, private_key)
1700 COMPRESSED_POINTER_FIELD(ArrayPtr, dictionary)
1702 COMPRESSED_POINTER_FIELD(ArrayPtr, metadata)
1704 COMPRESSED_POINTER_FIELD(ClassPtr, toplevel_class)
1705 COMPRESSED_POINTER_FIELD(GrowableObjectArrayPtr, used_scripts)
1706 COMPRESSED_POINTER_FIELD(LoadingUnitPtr, loading_unit)
1708 COMPRESSED_POINTER_FIELD(ArrayPtr, imports)
1710 COMPRESSED_POINTER_FIELD(ArrayPtr, exports)
1712#if !defined(DART_PRECOMPILED_RUNTIME)
1713 COMPRESSED_POINTER_FIELD(KernelProgramInfoPtr, kernel_program_info)
1722#if !defined(DART_PRECOMPILED_RUNTIME)
1736 COMPRESSED_POINTER_FIELD(ArrayPtr, loaded_scripts);
1737 VISIT_TO(loaded_scripts);
1744 uint16_t num_imports_;
1748#if !defined(DART_PRECOMPILED_RUNTIME)
1749 uint32_t kernel_library_index_;
1757 RAW_HEAP_OBJECT_IMPLEMENTATION(
Namespace);
1820 RAW_HEAP_OBJECT_IMPLEMENTATION(
WeakArray);
1829 template <
typename Table,
bool kAllCanonicalObjectsAreIncludedIntoSet>
1831 template <
typename Type,
typename PtrType>
1844DART_FORCE_INLINE
uword UntaggedObject::from_offset<UntaggedWeakArray>() {
1849 RAW_HEAP_OBJECT_IMPLEMENTATION(
Code);
1863 uword monomorphic_entry_point_;
1894 uword unchecked_entry_point_;
1895 uword monomorphic_unchecked_entry_point_;
1897 POINTER_FIELD(ObjectPoolPtr, object_pool)
1899 POINTER_FIELD(InstructionsPtr,
1905 POINTER_FIELD(ExceptionHandlersPtr, exception_handlers)
1906 POINTER_FIELD(PcDescriptorsPtr, pc_descriptors)
1912 POINTER_FIELD(CompressedStackMapsPtr, compressed_stackmaps)
1913 POINTER_FIELD(ArrayPtr, inlined_id_to_function)
1914 POINTER_FIELD(CodeSourceMapPtr, code_source_map)
1915 NOT_IN_PRECOMPILED(POINTER_FIELD(InstructionsPtr, active_instructions))
1916 NOT_IN_PRECOMPILED(POINTER_FIELD(ArrayPtr, deopt_info_array))
1918 NOT_IN_PRECOMPILED(POINTER_FIELD(ArrayPtr, static_calls_target_table))
1921 NOT_IN_PRODUCT(POINTER_FIELD(
ObjectPtr, return_address_metadata))
1922 NOT_IN_PRODUCT(POINTER_FIELD(LocalVarDescriptorsPtr, var_descriptors))
1923 NOT_IN_PRODUCT(POINTER_FIELD(ArrayPtr, comments))
1925#if !defined(PRODUCT)
1927#elif defined(DART_PRECOMPILED_RUNTIME)
1930 VISIT_TO(static_calls_target_table);
1934 NOT_IN_PRODUCT(
alignas(8) int64_t compile_timestamp_);
1940 int32_t state_bits_;
1943 NOT_IN_PRECOMPILED(uint32_t unchecked_offset_);
1945 ONLY_IN_PRECOMPILED(uint32_t instructions_length_);
1977 DEFINE_CONTAINS_COMPRESSED(
decltype(Entry::raw_obj_));
1981 uint8_t* entry_bits() {
return reinterpret_cast<uint8_t*
>(&data()[length_]); }
1982 uint8_t
const* entry_bits()
const {
1983 return reinterpret_cast<uint8_t const*
>(&data()[length_]);
1997 uint32_t size_and_flags_;
2005 static bool ContainsPC(
const InstructionsPtr raw_instr,
uword pc);
2029 uword payload_length_;
2035 uword instructions_relocated_address_;
2037 word build_id_offset_;
2052#define FOR_EACH_RAW_PC_DESCRIPTOR(V) \
2056 V(IcCall, kDeopt << 1) \
2058 V(UnoptStaticCall, kIcCall << 1) \
2060 V(RuntimeCall, kUnoptStaticCall << 1) \
2062 V(OsrEntry, kRuntimeCall << 1) \
2064 V(Rewind, kOsrEntry << 1) \
2066 V(BSSRelocation, kRewind << 1) \
2067 V(Other, kBSSRelocation << 1) \
2071#define ENUM_DEF(name, init) k##name = init,
2089 intptr_t yield_index) {
2108 static constexpr intptr_t kKindShiftSize = 3;
2109 static constexpr intptr_t kTryIndexSize = 10;
2110 static constexpr intptr_t kYieldIndexSize =
2111 32 - kKindShiftSize - kTryIndexSize;
2114 :
public BitField<uint32_t, intptr_t, 0, kKindShiftSize> {};
2115 class TryIndexBits :
public BitField<uint32_t,
2117 KindShiftBits::kNextBit,
2119 class YieldIndexBits :
public BitField<uint32_t,
2121 TryIndexBits::kNextBit,
2122 kYieldIndexSize> {};
2126 RAW_HEAP_OBJECT_IMPLEMENTATION(PcDescriptors);
2181 return bit_copy<FlagsAndSizeHeader, Payload>(*
this);
2187 memcpy(
reinterpret_cast<void*
>(
this), &
value,
sizeof(
value));
2246 return reinterpret_cast<const uint8_t*
>(
this) +
2260 class GlobalTableBit
2261 :
public BitField<Payload::FlagsAndSizeHeader, bool, 0, 1> {};
2262 class UsesTableBit :
public BitField<Payload::FlagsAndSizeHeader,
2264 GlobalTableBit::kNextBit,
2267 :
public BitField<Payload::FlagsAndSizeHeader,
2268 Payload::FlagsAndSizeHeader,
2269 UsesTableBit::kNextBit,
2270 sizeof(Payload::FlagsAndSizeHeader) * kBitsPerByte -
2271 UsesTableBit::kNextBit> {};
2287 uint32_t stack_map_offset;
2289 static_assert(
sizeof(DataEntry) ==
sizeof(uint32_t) * 2);
2292 uint32_t canonical_stack_map_entries_offset;
2294 uint32_t first_entry_with_code;
2297 const DataEntry* entries()
const {
OPEN_ARRAY_START(DataEntry, uint32_t); }
2305 static_assert(
sizeof(
Data) ==
sizeof(uint32_t) * 4);
2308 const Data* rodata_;
2343 TokenPosition::kNoSource;
2345 TokenPosition::kNoSource;
2347 TokenPosition::kNoSource;
2372 CompressedStringPtr* nameAddrAt(intptr_t
i) {
return &(
names()[
i]); }
2373 void set_name(intptr_t
i, StringPtr
value) {
2379 return reinterpret_cast<VarInfo*
>(nameAddrAt(num_entries_));
2391 uint32_t packed_fields_;
2402 intptr_t num_entries()
const {
2424 RAW_HEAP_OBJECT_IMPLEMENTATION(
Context);
2426 int32_t num_variables_;
2439#define CONTEXT_SCOPE_VARIABLE_DESC_FLAG_LIST(V) \
2451 struct VariableDesc {
2452 CompressedSmiPtr declaration_token_pos;
2453 CompressedSmiPtr token_pos;
2454 CompressedStringPtr
name;
2455 CompressedSmiPtr
flags;
2457#define DECLARE_BIT(Name) kIs##Name,
2461 CompressedSmiPtr late_init_offset;
2462 CompressedAbstractTypePtr
type;
2463 CompressedSmiPtr
cid;
2464 CompressedSmiPtr context_index;
2465 CompressedSmiPtr context_level;
2466 CompressedSmiPtr kernel_offset;
2469 int32_t num_variables_;
2477 VariableDesc*
begin =
const_cast<VariableDesc*
>(VariableDescAddr(0));
2484 const VariableDesc* VariableDescAddr(intptr_t index)
const {
2486 return reinterpret_cast<const VariableDesc*
>(
data()) + index;
2489#define DEFINE_ACCESSOR(type, name) \
2490 type name##_at(intptr_t index) { \
2491 return LoadCompressedPointer<type>(&VariableDescAddr(index)->name); \
2493 void set_##name##_at(intptr_t index, type value) { \
2494 StoreCompressedPointer(&VariableDescAddr(index)->name, value); \
2506#undef DEFINE_ACCESSOR
2509 uword end =
reinterpret_cast<uword>(VariableDescAddr(num_vars));
2515 return to(num_vars);
2541 uword expected_cid_;
2563 bool can_patch_to_monomorphic_;
2577 return reinterpret_cast<ObjectPtr*
>(&entries_);
2602 int32_t filled_entry_count_;
2611 uint32_t num_inputs_;
2612 uint32_t num_occupied_;
2622 const uint8_t* instructions_image_;
2625 enum LoadState : int8_t {
2633 BitField<
decltype(packed_fields_), intptr_t, LoadStateBits::kNextBit>;
2659 bool report_after_token_;
2681 bool is_user_initiated_;
2689#if defined(DART_COMPRESSED_POINTERS)
2722 uint16_t num_imports_;
2723 bool is_deferred_load_;
2767#if defined(DART_COMPRESSED_POINTERS)
2774 uint32_t
flags()
const {
return flags_.load(std::memory_order_relaxed); }
2776 flags_.store(
value, std::memory_order_relaxed);
2787 static constexpr intptr_t kNullabilityMask = NullabilityBit::mask();
2789 static constexpr intptr_t kTypeStateShift = NullabilityBit::kNextBit;
2790 static constexpr intptr_t kTypeStateBits = 2;
2803 static constexpr intptr_t kTypeClassIdShift = TypeStateBits::kNextBit;
2819 set_flags(TypeClassIdBits::update(
value,
flags()));
2843 BitField<
decltype(packed_type_parameter_counts_), uint8_t, 0, 8>;
2845 BitField<
decltype(packed_type_parameter_counts_),
2847 PackedNumParentTypeArguments::kNextBit,
2852 BitField<
decltype(packed_parameter_counts_), uint8_t, 0, 1>;
2854 BitField<
decltype(packed_parameter_counts_),
2856 PackedNumImplicitParameters::kNextBit,
2859 BitField<
decltype(packed_parameter_counts_),
2861 PackedHasNamedOptionalParameters::kNextBit,
2864 BitField<
decltype(packed_parameter_counts_),
2866 PackedNumFixedParameters::kNextBit,
2868 static_assert(PackedNumOptionalParameters::kNextBit <=
2870 "In-place mask for number of optional parameters cannot fit in "
2871 "a Smi on the target architecture");
2892 static constexpr intptr_t kIsFunctionTypeParameterBit =
2893 TypeStateBits::kNextBit;
3003#if !defined(HASH_IN_OBJECT_HEADER)
3008#if defined(HASH_IN_OBJECT_HEADER)
3071 template <
typename T>
3085#if defined(DART_COMPRESSED_POINTERS)
3099 ExternalTypedDataPtr,
3100 ExternalTypedDataPtr);
3127 ASSERT(data_ == internal_data());
3131 ASSERT(data_ == internal_data());
3153 const intptr_t offset_in_bytes =
RawSmiValue(this->offset_in_bytes());
3154 uint8_t* payload = typed_data()->untag()->data_;
3155 data_ = payload + offset_in_bytes;
3165 data_ = DataFieldForInternalTypedData();
3169 const intptr_t offset_in_bytes =
RawSmiValue(this->offset_in_bytes());
3173 return payload + offset_in_bytes;
3179 if (data_ !=
nullptr ||
RawSmiValue(offset_in_bytes()) != 0 ||
3181 FATAL(
"TypedDataView has invalid inner pointer.");
3184 const intptr_t offset_in_bytes =
RawSmiValue(this->offset_in_bytes());
3185 uint8_t* payload = typed_data()->untag()->data_;
3186 if ((payload + offset_in_bytes) != data_) {
3187 FATAL(
"TypedDataView has invalid inner pointer.");
3244 template <typename
Table,
bool kAllCanonicalObjectsAreIncludedIntoSet>
3317 float x()
const {
return value_[0]; }
3318 float y()
const {
return value_[1]; }
3319 float z()
const {
return value_[2]; }
3320 float w()
const {
return value_[3]; }
3328 ALIGN8 int32_t value_[4];
3336 int32_t
x()
const {
return value_[0]; }
3337 int32_t
y()
const {
return value_[1]; }
3338 int32_t
z()
const {
return value_[2]; }
3339 int32_t
w()
const {
return value_[3]; }
3352 double x()
const {
return value_[0]; }
3353 double y()
const {
return value_[1]; }
3360#if defined(DART_COMPRESSED_POINTERS)
3377#if defined(ARCH_IS_32_BIT)
3378#define kIntPtrCid kTypedDataInt32ArrayCid
3379#define GetIntPtr GetInt32
3380#define SetIntPtr SetInt32
3381#define kUintPtrCid kTypedDataUint32ArrayCid
3382#define GetUintPtr GetUint32
3383#define SetUintPtr SetUint32
3384#elif defined(ARCH_IS_64_BIT)
3385#define kIntPtrCid kTypedDataInt64ArrayCid
3386#define GetIntPtr GetInt64
3387#define SetIntPtr SetInt64
3388#define kUintPtrCid kTypedDataUint64ArrayCid
3389#define GetUintPtr GetUint64
3390#define SetUintPtr SetUint64
3392#error Architecture is not 32-bit or 64-bit.
3475 bool expand_inlined_;
3479 bool skip_sync_start_in_parent_stack;
3486 intptr_t frame_size_;
3504#if defined(DART_PRECOMPILED_RUNTIME)
3507 return frame_capacity_;
3535 std::atomic<intptr_t> num_bracket_expressions_;
3536 intptr_t num_bracket_expressions() {
3537 return num_bracket_expressions_.load(std::memory_order_relaxed);
3539 void set_num_bracket_expressions(intptr_t
value) {
3540 num_bracket_expressions_.store(
value, std::memory_order_relaxed);
3547 intptr_t num_one_byte_registers_;
3548 intptr_t num_two_byte_registers_;
3572 template <
typename Type,
typename PtrType>
3588UntaggedObject::to_offset<UntaggedWeakProperty>(intptr_t
length) {
3605 template <
typename Type,
typename PtrType>
3612 friend class ObjectGraph;
3622UntaggedObject::to_offset<UntaggedWeakReference>(intptr_t
length) {
3640#ifdef DART_COMPRESSED_POINTERS
3641 uint32_t align_first_field_in_subclass;
3649 template <
typename GCVisitorType>
3655 friend class ObjectGraph;
3665 template <std::memory_order order = std::memory_order_relaxed>
3666 FinalizerEntryPtr exchange_entries_collected(FinalizerEntryPtr
value) {
3668 CompressedFinalizerEntryPtr, order>(
3669 &entries_collected_,
value);
3672 template <
typename GCVisitorType>
3715 intptr_t external_size_;
3717 template <
typename Type,
typename PtrType>
3719 template <
typename GCVisitorType>
3726 friend class ObjectGraph;
3734UntaggedObject::to_offset<UntaggedFinalizerEntry>(intptr_t
length) {
3777#undef WSR_COMPRESSED_POINTER_FIELD
static float next(float f)
static void encode(uint8_t output[16], const uint32_t input[4])
static uint32_t hash(const SkShaderBase::GradientInfo &v)
#define CLASS_LIST_TYPED_DATA(V)
void UpdateBool(bool value)
TargetBitField::Type UpdateConditional(typename TargetBitField::Type value_to_be_set, typename TargetBitField::Type conditional_old_value)
void UpdateUnsynchronized(typename TargetBitField::Type value)
NO_SANITIZE_THREAD T load_ignore_race() const
void Update(typename TargetBitField::Type value)
TargetBitField::Type Read() const
static constexpr intptr_t kNextBit
static constexpr bool decode(uword value)
static constexpr int32_t update(int8_t value, int32_t original)
UnboxedFieldBitmap GetUnboxedFieldsMapAt(intptr_t cid) const
const ClassTable * class_table() const
void VisitCompressedPointers(uword heap_base, CompressedObjectPtr *first, CompressedObjectPtr *last)
UntaggedObject * untag() const
intptr_t GetClassId() const
void MarkingStackAddObject(ObjectPtr obj)
void DeferredMarkingStackAddObject(ObjectPtr obj)
static Thread * Current()
void StoreBufferAddObject(ObjectPtr obj)
uword write_barrier_mask() const
@ kFinalizedUninstantiated
std::atomic< uint32_t > flags_
void set_flags(uint32_t value)
std::atomic< uword > type_test_stub_entry_point_
POINTER_FIELD(StringPtr, target_name)
friend void UpdateLengthField(intptr_t, ObjectPtr, ObjectPtr)
void set_external_size(intptr_t value)
DART_FORCE_INLINE bool IsUnboxedDouble(intptr_t position) const
static constexpr intptr_t kCapacity
UnboxedParameterBitmap(uint64_t bitmap)
DART_FORCE_INLINE bool IsEmpty() const
static constexpr intptr_t kBitsPerElement
UnboxedParameterBitmap(const UnboxedParameterBitmap &)=default
static constexpr uint64_t kElementBitmask
DART_FORCE_INLINE bool IsUnboxedInteger(intptr_t position) const
DART_FORCE_INLINE void SetUnboxedInteger(intptr_t position)
DART_FORCE_INLINE bool HasUnboxedParameters() const
DART_FORCE_INLINE bool IsUnboxed(intptr_t position) const
DART_FORCE_INLINE bool IsUnboxedRecord(intptr_t position) const
UnboxedParameterBitmap & operator=(const UnboxedParameterBitmap &)=default
DART_FORCE_INLINE void Reset()
DART_FORCE_INLINE void SetUnboxedDouble(intptr_t position)
DART_FORCE_INLINE uint64_t Value() const
DART_FORCE_INLINE void SetUnboxedRecord(intptr_t position)
static bool ParseKind(const char *str, Kind *out)
static const char * KindToCString(Kind k)
friend class AssemblyImageWriter
static constexpr uword update(intptr_t size, uword tag)
static constexpr intptr_t kMaxSizeTagInUnitsOfAlignment
static constexpr bool SizeFits(intptr_t size)
static constexpr intptr_t kMaxSizeTag
static constexpr uword encode(intptr_t size)
static constexpr uword decode(uword tag)
void StoreSmi(type const *addr, type value)
static uword ToAddr(const ObjectPtr raw_obj)
static bool IsCanonical(intptr_t value)
void StorePointer(type const *addr, type value)
void StoreCompressedArrayPointer(compressed_type const *addr, type value)
COMPILE_ASSERT(kBitsPerByte *sizeof(ClassIdTagType) >=kClassIdTagSize)
void Validate(IsolateGroup *isolate_group) const
friend void ReportImpossibleNullError(intptr_t cid, StackFrame *caller_frame, Thread *thread)
type ExchangeCompressedPointer(compressed_type const *addr, type value)
void StoreCompressedSmi(CompressedSmiPtr const *addr, SmiPtr value)
COMPILE_ASSERT(kCardRememberedBit==0)
static constexpr intptr_t kIncrementalBarrierMask
void StoreCompressedArrayPointer(compressed_type const *addr, type value, Thread *thread)
void StoreArrayPointer(type const *addr, value_type value)
friend void SetNewSpaceTaggingWord(ObjectPtr, classid_t, uint32_t)
COMPILE_ASSERT(kNotMarkedBit+kBarrierOverlapShift==kAlwaysSetBit)
static ObjectPtr FromAddr(uword addr)
bool IsCardRemembered() const
DART_FORCE_INLINE intptr_t VisitPointersNonvirtual(V *visitor)
static bool IsMarked(uword tags)
type LoadCompressedPointer(compressed_type const *addr) const
static constexpr intptr_t kGenerationalBarrierMask
static uword ToAddr(const UntaggedObject *raw_obj)
intptr_t HeapSize(uword tags) const
intptr_t HeapSize() const
DART_FORCE_INLINE void EnsureInRememberedSet(Thread *thread)
COMPILE_ASSERT(kClassIdTagMax==(1<< kClassIdTagSize) - 1)
static DART_FORCE_INLINE uword to_offset(intptr_t length=0)
friend uword TagsFromUntaggedObject(UntaggedObject *)
bool InVMIsolateHeap() const
void VisitPointersPrecise(ObjectPointerVisitor *visitor)
void SetIsEvacuationCandidateUnsynchronized()
void StoreCompressedArrayPointer(compressed_type const *addr, type value, Thread *thread)
static constexpr intptr_t kBarrierOverlapShift
type LoadPointer(type const *addr) const
void StoreCompressedPointer(compressed_type const *addr, type value, Thread *thread)
void ClearRememberedBit()
static constexpr bool kContainsPointerFields
void StoreArrayPointer(type const *addr, value_type value, Thread *thread)
friend class AssemblyImageWriter
bool IsEvacuationCandidate()
COMPILE_ASSERT(kNewOrEvacuationCandidateBit+kBarrierOverlapShift==kOldAndNotRememberedBit)
bool TryAcquireRememberedBit()
bool Contains(uword addr) const
void ClearRememberedBitUnsynchronized()
SmiPtr LoadSmi(SmiPtr const *addr) const
void ClearIsEvacuationCandidateUnsynchronized()
void SetCardRememberedBitUnsynchronized()
SmiPtr LoadCompressedSmi(CompressedSmiPtr const *addr) const
void ClearMarkBitUnsynchronized()
uword tags_ignore_race() const
friend class StoreBufferUpdateVisitor
void SetIsEvacuationCandidate()
intptr_t VisitPointers(ObjectPointerVisitor *visitor)
bool IsRemembered() const
void StoreCompressedPointer(compressed_type const *addr, type value)
@ kNewOrEvacuationCandidateBit
@ kOldAndNotRememberedBit
DART_WARN_UNUSED_RESULT bool TryAcquireMarkBit()
intptr_t GetClassId() const
static bool IsEvacuationCandidate(uword tags)
static DART_FORCE_INLINE uword from_offset()
void StorePointerUnaligned(type const *addr, type value, Thread *thread)
void SetMarkBitUnsynchronized()
static constexpr bool kContainsCompressedPointers
void StorePointer(type const *addr, type value, Thread *thread)
static const char * KindToCString(Kind k)
static constexpr intptr_t kInvalidYieldIndex
static bool ParseKind(const char *cstr, Kind *out)
intptr_t frame_capacity() const
const uint8_t * payload() const
static intptr_t payload_offset()
COMPRESSED_SMI_FIELD(SmiPtr, length)
void RecomputeDataField()
void ValidateInnerPointer()
void RecomputeDataFieldForInternalTypedData()
uint8_t * DataFieldForInternalTypedData() const
void RecomputeDataField()
uint8_t * internal_data()
static intptr_t payload_offset()
const uint8_t * internal_data() const
const uint8_t * data() const
static constexpr int ShiftForPowerOfTwo(T x)
static constexpr bool IsAligned(T x, uintptr_t alignment, uintptr_t offset=0)
#define DART_WARN_UNUSED_RESULT
Dart_NativeFunction(* Dart_NativeEntryResolver)(Dart_Handle name, int num_of_arguments, bool *auto_setup_scope)
const uint8_t *(* Dart_NativeEntrySymbol)(Dart_NativeFunction nf)
void *(* Dart_FfiNativeResolver)(const char *name, uintptr_t args_n)
static const char * begin(const StringSlice &s)
FlutterSemanticsFlag flags
FlKeyEvent uint64_t FlKeyResponderAsyncCallback callback
Dart_NativeFunction function
T __attribute__((ext_vector_type(N))) V
Optional< SkRect > bounds
constexpr intptr_t kSmiBits
const intptr_t kOffsetOfPtr
static constexpr uintptr_t kHeapBaseMask
void CopyTypedDataBaseWithSafepointChecks(Thread *thread, const T &from, const T &to, intptr_t length)
void InitializeExternalTypedDataWithSafepointChecks(Thread *thread, intptr_t cid, const ExternalTypedData &from, const ExternalTypedData &to)
static constexpr intptr_t kOldObjectAlignmentOffset
static const char *const names[]
@ kSharesInstantiatorTypeArguments
@ kSharesFunctionTypeArguments
static constexpr intptr_t kNewObjectAlignmentOffset
void InitializeTypedDataView(TypedDataViewPtr obj)
intptr_t RawSmiValue(const SmiPtr raw_value)
constexpr intptr_t kBitsPerByte
static constexpr intptr_t kCompressedWordSize
static constexpr intptr_t kObjectAlignmentMask
raw_obj untag() -> num_entries()) VARIABLE_COMPRESSED_VISITOR(Array, Smi::Value(raw_obj->untag() ->length())) VARIABLE_COMPRESSED_VISITOR(TypedData, TypedData::ElementSizeInBytes(raw_obj->GetClassId()) *Smi::Value(raw_obj->untag() ->length())) VARIABLE_COMPRESSED_VISITOR(Record, RecordShape(raw_obj->untag() ->shape()).num_fields()) VARIABLE_NULL_VISITOR(CompressedStackMaps, CompressedStackMaps::PayloadSizeOf(raw_obj)) VARIABLE_NULL_VISITOR(OneByteString, Smi::Value(raw_obj->untag() ->length())) VARIABLE_NULL_VISITOR(TwoByteString, Smi::Value(raw_obj->untag() ->length())) intptr_t UntaggedField::VisitFieldPointers(FieldPtr raw_obj, ObjectPointerVisitor *visitor)
static void StoreUnaligned(T *ptr, T value)
void MournFinalizerEntry(GCVisitorType *visitor, FinalizerEntryPtr current_entry)
static constexpr intptr_t kObjectAlignment
void InitializeExternalTypedData(intptr_t cid, ExternalTypedDataPtr from, ExternalTypedDataPtr to)
static int8_t data[kExtLength]
NOT_IN_PRODUCT(LibraryPtr ReloadTestScript(const char *script))
static constexpr intptr_t kObjectAlignmentLog2
static constexpr intptr_t kClassIdTagMax
ObjectPtr CompressedObjectPtr
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not defaults to or::depending on whether ipv6 is specified vm service A custom Dart VM Service port The default is to pick a randomly available open port disable vm Disable the Dart VM Service The Dart VM Service is never available in release mode disable vm service Disable mDNS Dart VM Service publication Bind to the IPv6 localhost address for the Dart VM Service Ignored if vm service host is set endless trace Enable an endless trace buffer The default is a ring buffer This is useful when very old events need to viewed For during application launch Memory usage will continue to grow indefinitely however Start app with an specific route defined on the framework flutter assets Path to the Flutter assets directory enable service port Allow the VM service to fallback to automatic port selection if binding to a specified port fails trace Trace early application lifecycle Automatically switches to an endless trace buffer trace skia Filters out all Skia trace event categories except those that are specified in this comma separated list dump skp on shader Automatically dump the skp that triggers new shader compilations This is useful for writing custom ShaderWarmUp to reduce jank By this is not enabled to reduce the overhead purge persistent cache
DEF_SWITCHES_START aot vmservice shared library name
struct PathData * Data(SkPath *path)
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive keep the shell running after the Dart script has completed enable serial On low power devices with low core running concurrent GC tasks on threads can cause them to contend with the UI thread which could potentially lead to jank This option turns off all concurrent GC activities domain network JSON encoded network policy per domain This overrides the DisallowInsecureConnections switch Embedder can specify whether to allow or disallow insecure connections at a domain level old gen heap size
std::function< void()> closure
SK_API sk_sp< PrecompileColorFilter > Table()
static DecodeResult decode(std::string path)
#define DEFINE_CONTAINS_COMPRESSED(type)
#define FOR_EACH_RAW_FUNCTION_KIND(V)
#define COMPRESSED_POINTER_FIELD(type, name)
#define DECLARE(return_type, type, name)
#define ENUM_DEF(name, init)
#define VISIT_FROM_PAYLOAD_START(elem_type)
#define WSR_COMPRESSED_POINTER_FIELD(Type, Name)
#define DECLARE_BIT(Name)
#define COMPRESSED_SMI_FIELD(type, name)
#define COMPRESSED_ARRAY_POINTER_FIELD(type, name)
#define FOR_EACH_RAW_PC_DESCRIPTOR(V)
#define SMI_FIELD(type, name)
#define RAW_HEAP_OBJECT_IMPLEMENTATION(object)
#define VISIT_FROM(first)
#define COMPRESSED_VARIABLE_POINTER_FIELDS(type, accessor_name, array_name)
#define RAW_OBJECT_IMPLEMENTATION(object)
#define POINTER_FIELD(type, name)
#define DEFINE_FORWARD_DECLARATION(clazz)
#define JIT_FUNCTION_COUNTERS(F)
#define DEFINE_ACCESSOR(type, name)
#define CONTEXT_SCOPE_VARIABLE_DESC_FLAG_LIST(V)
DART_FORCE_INLINE void set_flags_and_size(FlagsAndSizeHeader value)
DART_FORCE_INLINE FlagsAndSizeHeader flags_and_size() const
const uint8_t * data() const
uint32_t FlagsAndSizeHeader
void set_index(int32_t index)
void set_kind(VarInfoKind kind)
TokenPosition declaration_pos
#define OPEN_ARRAY_START(type, align)
#define NOT_IN_PRECOMPILED(code)
#define OFFSET_OF(type, field)
#define ONLY_IN_PRECOMPILED(code)
#define OFFSET_OF_RETURNED_VALUE(type, accessor)