5#ifndef RUNTIME_VM_THREAD_H_
6#define RUNTIME_VM_THREAD_H_
8#if defined(SHOULD_NOT_INCLUDE_RUNTIME)
9#error "Should not include runtime"
43class ExceptionHandlers;
47class GrowableObjectArray;
58class NoActiveIsolateScope;
78#define REUSABLE_HANDLE_LIST(V) \
84 V(ExceptionHandlers) \
87 V(GrowableObjectArray) \
100#define CACHED_VM_STUBS_LIST(V) \
101 V(CodePtr, fix_callers_target_code_, StubCode::FixCallersTarget().ptr(), \
103 V(CodePtr, fix_allocation_stub_code_, \
104 StubCode::FixAllocationStubTarget().ptr(), nullptr) \
105 V(CodePtr, invoke_dart_code_stub_, StubCode::InvokeDartCode().ptr(), \
107 V(CodePtr, call_to_runtime_stub_, StubCode::CallToRuntime().ptr(), nullptr) \
108 V(CodePtr, late_initialization_error_shared_without_fpu_regs_stub_, \
109 StubCode::LateInitializationErrorSharedWithoutFPURegs().ptr(), nullptr) \
110 V(CodePtr, late_initialization_error_shared_with_fpu_regs_stub_, \
111 StubCode::LateInitializationErrorSharedWithFPURegs().ptr(), nullptr) \
112 V(CodePtr, null_error_shared_without_fpu_regs_stub_, \
113 StubCode::NullErrorSharedWithoutFPURegs().ptr(), nullptr) \
114 V(CodePtr, null_error_shared_with_fpu_regs_stub_, \
115 StubCode::NullErrorSharedWithFPURegs().ptr(), nullptr) \
116 V(CodePtr, null_arg_error_shared_without_fpu_regs_stub_, \
117 StubCode::NullArgErrorSharedWithoutFPURegs().ptr(), nullptr) \
118 V(CodePtr, null_arg_error_shared_with_fpu_regs_stub_, \
119 StubCode::NullArgErrorSharedWithFPURegs().ptr(), nullptr) \
120 V(CodePtr, null_cast_error_shared_without_fpu_regs_stub_, \
121 StubCode::NullCastErrorSharedWithoutFPURegs().ptr(), nullptr) \
122 V(CodePtr, null_cast_error_shared_with_fpu_regs_stub_, \
123 StubCode::NullCastErrorSharedWithFPURegs().ptr(), nullptr) \
124 V(CodePtr, range_error_shared_without_fpu_regs_stub_, \
125 StubCode::RangeErrorSharedWithoutFPURegs().ptr(), nullptr) \
126 V(CodePtr, range_error_shared_with_fpu_regs_stub_, \
127 StubCode::RangeErrorSharedWithFPURegs().ptr(), nullptr) \
128 V(CodePtr, write_error_shared_without_fpu_regs_stub_, \
129 StubCode::WriteErrorSharedWithoutFPURegs().ptr(), nullptr) \
130 V(CodePtr, write_error_shared_with_fpu_regs_stub_, \
131 StubCode::WriteErrorSharedWithFPURegs().ptr(), nullptr) \
132 V(CodePtr, allocate_mint_with_fpu_regs_stub_, \
133 StubCode::AllocateMintSharedWithFPURegs().ptr(), nullptr) \
134 V(CodePtr, allocate_mint_without_fpu_regs_stub_, \
135 StubCode::AllocateMintSharedWithoutFPURegs().ptr(), nullptr) \
136 V(CodePtr, allocate_object_stub_, StubCode::AllocateObject().ptr(), nullptr) \
137 V(CodePtr, allocate_object_parameterized_stub_, \
138 StubCode::AllocateObjectParameterized().ptr(), nullptr) \
139 V(CodePtr, allocate_object_slow_stub_, StubCode::AllocateObjectSlow().ptr(), \
141 V(CodePtr, async_exception_handler_stub_, \
142 StubCode::AsyncExceptionHandler().ptr(), nullptr) \
143 V(CodePtr, resume_stub_, StubCode::Resume().ptr(), nullptr) \
144 V(CodePtr, return_async_stub_, StubCode::ReturnAsync().ptr(), nullptr) \
145 V(CodePtr, return_async_not_future_stub_, \
146 StubCode::ReturnAsyncNotFuture().ptr(), nullptr) \
147 V(CodePtr, return_async_star_stub_, StubCode::ReturnAsyncStar().ptr(), \
149 V(CodePtr, stack_overflow_shared_without_fpu_regs_stub_, \
150 StubCode::StackOverflowSharedWithoutFPURegs().ptr(), nullptr) \
151 V(CodePtr, stack_overflow_shared_with_fpu_regs_stub_, \
152 StubCode::StackOverflowSharedWithFPURegs().ptr(), nullptr) \
153 V(CodePtr, switchable_call_miss_stub_, StubCode::SwitchableCallMiss().ptr(), \
155 V(CodePtr, throw_stub_, StubCode::Throw().ptr(), nullptr) \
156 V(CodePtr, re_throw_stub_, StubCode::Throw().ptr(), nullptr) \
157 V(CodePtr, assert_boolean_stub_, StubCode::AssertBoolean().ptr(), nullptr) \
158 V(CodePtr, optimize_stub_, StubCode::OptimizeFunction().ptr(), nullptr) \
159 V(CodePtr, deoptimize_stub_, StubCode::Deoptimize().ptr(), nullptr) \
160 V(CodePtr, lazy_deopt_from_return_stub_, \
161 StubCode::DeoptimizeLazyFromReturn().ptr(), nullptr) \
162 V(CodePtr, lazy_deopt_from_throw_stub_, \
163 StubCode::DeoptimizeLazyFromThrow().ptr(), nullptr) \
164 V(CodePtr, slow_type_test_stub_, StubCode::SlowTypeTest().ptr(), nullptr) \
165 V(CodePtr, lazy_specialize_type_test_stub_, \
166 StubCode::LazySpecializeTypeTest().ptr(), nullptr) \
167 V(CodePtr, enter_safepoint_stub_, StubCode::EnterSafepoint().ptr(), nullptr) \
168 V(CodePtr, exit_safepoint_stub_, StubCode::ExitSafepoint().ptr(), nullptr) \
169 V(CodePtr, exit_safepoint_ignore_unwind_in_progress_stub_, \
170 StubCode::ExitSafepointIgnoreUnwindInProgress().ptr(), nullptr) \
171 V(CodePtr, call_native_through_safepoint_stub_, \
172 StubCode::CallNativeThroughSafepoint().ptr(), nullptr)
174#define CACHED_NON_VM_STUB_LIST(V) \
175 V(ObjectPtr, object_null_, Object::null(), nullptr) \
176 V(BoolPtr, bool_true_, Object::bool_true().ptr(), nullptr) \
177 V(BoolPtr, bool_false_, Object::bool_false().ptr(), nullptr) \
178 V(ArrayPtr, empty_array_, Object::empty_array().ptr(), nullptr) \
179 V(TypeArgumentsPtr, empty_type_arguments_, \
180 Object::empty_type_arguments().ptr(), nullptr) \
181 V(TypePtr, dynamic_type_, Type::dynamic_type().ptr(), nullptr)
185#define CACHED_VM_OBJECTS_LIST(V) \
186 CACHED_NON_VM_STUB_LIST(V) \
187 CACHED_VM_STUBS_LIST(V)
189#define CACHED_FUNCTION_ENTRY_POINTS_LIST(V) \
190 V(suspend_state_init_async) \
191 V(suspend_state_await) \
192 V(suspend_state_await_with_type_check) \
193 V(suspend_state_return_async) \
194 V(suspend_state_return_async_not_future) \
195 V(suspend_state_init_async_star) \
196 V(suspend_state_yield_async_star) \
197 V(suspend_state_return_async_star) \
198 V(suspend_state_init_sync_star) \
199 V(suspend_state_suspend_sync_star_at_start) \
200 V(suspend_state_handle_exception)
204#define ASSERT_BOOL_FALSE_FOLLOWS_BOOL_TRUE() \
205 ASSERT((Thread::bool_true_offset() + kWordSize) == \
206 Thread::bool_false_offset());
208#define CACHED_VM_STUBS_ADDRESSES_LIST(V) \
209 V(uword, write_barrier_entry_point_, StubCode::WriteBarrier().EntryPoint(), \
211 V(uword, array_write_barrier_entry_point_, \
212 StubCode::ArrayWriteBarrier().EntryPoint(), 0) \
213 V(uword, call_to_runtime_entry_point_, \
214 StubCode::CallToRuntime().EntryPoint(), 0) \
215 V(uword, allocate_mint_with_fpu_regs_entry_point_, \
216 StubCode::AllocateMintSharedWithFPURegs().EntryPoint(), 0) \
217 V(uword, allocate_mint_without_fpu_regs_entry_point_, \
218 StubCode::AllocateMintSharedWithoutFPURegs().EntryPoint(), 0) \
219 V(uword, allocate_object_entry_point_, \
220 StubCode::AllocateObject().EntryPoint(), 0) \
221 V(uword, allocate_object_parameterized_entry_point_, \
222 StubCode::AllocateObjectParameterized().EntryPoint(), 0) \
223 V(uword, allocate_object_slow_entry_point_, \
224 StubCode::AllocateObjectSlow().EntryPoint(), 0) \
225 V(uword, stack_overflow_shared_without_fpu_regs_entry_point_, \
226 StubCode::StackOverflowSharedWithoutFPURegs().EntryPoint(), 0) \
227 V(uword, stack_overflow_shared_with_fpu_regs_entry_point_, \
228 StubCode::StackOverflowSharedWithFPURegs().EntryPoint(), 0) \
229 V(uword, megamorphic_call_checked_entry_, \
230 StubCode::MegamorphicCall().EntryPoint(), 0) \
231 V(uword, switchable_call_miss_entry_, \
232 StubCode::SwitchableCallMiss().EntryPoint(), 0) \
233 V(uword, optimize_entry_, StubCode::OptimizeFunction().EntryPoint(), 0) \
234 V(uword, deoptimize_entry_, StubCode::Deoptimize().EntryPoint(), 0) \
235 V(uword, call_native_through_safepoint_entry_point_, \
236 StubCode::CallNativeThroughSafepoint().EntryPoint(), 0) \
237 V(uword, jump_to_frame_entry_point_, StubCode::JumpToFrame().EntryPoint(), \
239 V(uword, slow_type_test_entry_point_, StubCode::SlowTypeTest().EntryPoint(), \
242#define CACHED_ADDRESSES_LIST(V) \
243 CACHED_VM_STUBS_ADDRESSES_LIST(V) \
244 V(uword, bootstrap_native_wrapper_entry_point_, \
245 NativeEntry::BootstrapNativeCallWrapperEntry(), 0) \
246 V(uword, no_scope_native_wrapper_entry_point_, \
247 NativeEntry::NoScopeNativeCallWrapperEntry(), 0) \
248 V(uword, auto_scope_native_wrapper_entry_point_, \
249 NativeEntry::AutoScopeNativeCallWrapperEntry(), 0) \
250 V(StringPtr*, predefined_symbols_address_, Symbols::PredefinedAddress(), \
252 V(uword, double_nan_address_, reinterpret_cast<uword>(&double_nan_constant), \
254 V(uword, double_negate_address_, \
255 reinterpret_cast<uword>(&double_negate_constant), 0) \
256 V(uword, double_abs_address_, reinterpret_cast<uword>(&double_abs_constant), \
258 V(uword, float_not_address_, reinterpret_cast<uword>(&float_not_constant), \
260 V(uword, float_negate_address_, \
261 reinterpret_cast<uword>(&float_negate_constant), 0) \
262 V(uword, float_absolute_address_, \
263 reinterpret_cast<uword>(&float_absolute_constant), 0) \
264 V(uword, float_zerow_address_, \
265 reinterpret_cast<uword>(&float_zerow_constant), 0)
267#define CACHED_CONSTANTS_LIST(V) \
268 CACHED_VM_OBJECTS_LIST(V) \
269 CACHED_ADDRESSES_LIST(V)
309#if defined(USING_THREAD_SANITIZER)
377 static void ExitIsolate(
bool isolate_shutdown =
false);
381 bool bypass_safepoint);
400 return reinterpret_cast<uword>(&stack_limit_);
412#if defined(USING_SAFE_STACK)
413 uword saved_safestack_limit()
const {
return saved_safestack_limit_; }
414 void set_saved_safestack_limit(
uword limit) {
415 saved_safestack_limit_ = limit;
430#if defined(DART_COMPRESSED_POINTERS)
440#if defined(DART_COMPRESSED_POINTERS)
441 static intptr_t heap_base_offset() {
return OFFSET_OF(
Thread, heap_base_); }
448 return ++stack_overflow_count_;
455 ? stack_overflow_shared_with_fpu_regs_entry_point_offset()
456 : stack_overflow_shared_without_fpu_regs_entry_point_offset();
507 ASSERT(
value ==
nullptr || api_reusable_scope_ ==
nullptr);
508 api_reusable_scope_ =
value;
528#if defined(USING_THREAD_SANITIZER)
529 uword exit_through_ffi()
const {
return exit_through_ffi_; }
530 TsanUtils* tsan_utils()
const {
return tsan_utils_; }
552 return scheduled_dart_mutator_isolate_ !=
nullptr;
563 return scheduled_dart_mutator_isolate_;
567 bool IsInsideCompiler()
const {
return inside_compiler_; }
590 return *compiler_state_;
594 ASSERT(isolate_group_ !=
nullptr);
595 return hierarchy_info_;
599 ASSERT(isolate_group_ !=
nullptr);
600 ASSERT((hierarchy_info_ ==
nullptr &&
value !=
nullptr) ||
601 (hierarchy_info_ !=
nullptr &&
value ==
nullptr));
602 hierarchy_info_ =
value;
606 ASSERT(isolate_group_ !=
nullptr);
607 return type_usage_info_;
611 ASSERT(isolate_group_ !=
nullptr);
612 ASSERT((type_usage_info_ ==
nullptr &&
value !=
nullptr) ||
613 (type_usage_info_ !=
nullptr &&
value ==
nullptr));
614 type_usage_info_ =
value;
620 compiler_timings_ =
stats;
625 ASSERT(no_callback_scope_depth_ < INT_MAX);
626 no_callback_scope_depth_ += 1;
629 ASSERT(no_callback_scope_depth_ > 0);
630 no_callback_scope_depth_ -= 1;
635 ASSERT(force_growth_scope_depth_ < INT_MAX);
636 force_growth_scope_depth_ += 1;
639 ASSERT(force_growth_scope_depth_ > 0);
640 force_growth_scope_depth_ -= 1;
646 is_unwind_in_progress_ =
true;
651 void EnterCompiler() {
652 ASSERT(!IsInsideCompiler());
653 inside_compiler_ =
true;
656 void LeaveCompiler() {
657 ASSERT(IsInsideCompiler());
658 inside_compiler_ =
false;
665 bool StoreBufferContains(ObjectPtr obj)
const {
666 return store_buffer_block_->Contains(obj);
676 bool is_marking()
const {
return old_marking_stack_block_ !=
nullptr; }
720 return no_safepoint_scope_depth_;
728 ASSERT(no_safepoint_scope_depth_ < INT_MAX);
729 no_safepoint_scope_depth_ += 1;
735 ASSERT(no_safepoint_scope_depth_ > 0);
736 no_safepoint_scope_depth_ -= 1;
743 return stopped_mutators_scope_depth_ > 0;
746#define DEFINE_OFFSET_METHOD(type_name, member_name, expr, default_init_value) \
747 static intptr_t member_name##offset() { \
748 return OFFSET_OF(Thread, member_name); \
751#undef DEFINE_OFFSET_METHOD
762 index *
sizeof(
uword);
778#define DEFINE_OFFSET_METHOD(name) \
779 static intptr_t name##_entry_point_offset() { \
780 return OFFSET_OF(Thread, name##_entry_point_); \
783#undef DEFINE_OFFSET_METHOD
785#define DEFINE_OFFSET_METHOD(returntype, name, ...) \
786 static intptr_t name##_entry_point_offset() { \
787 return OFFSET_OF(Thread, name##_entry_point_); \
790#undef DEFINE_OFFSET_METHOD
794 global_object_pool_ = raw_value;
799 dispatch_table_array_ = array;
807#define DEFINE_OFFSET_METHOD(name) \
808 static intptr_t name##_entry_point_offset() { \
809 return OFFSET_OF(Thread, name##_entry_point_); \
812#undef DEFINE_OFFSET_METHOD
817 bool TopErrorHandlerIsSetJump()
const;
818 bool TopErrorHandlerIsExitFrame()
const;
844 return unboxed_runtime_arg_;
847 unboxed_runtime_arg_ =
value;
883#define REUSABLE_HANDLE_SCOPE_ACCESSORS(object) \
884 void set_reusable_##object##_handle_scope_active(bool value) { \
885 reusable_##object##_handle_scope_active_ = value; \
887 bool reusable_##object##_handle_scope_active() const { \
888 return reusable_##object##_handle_scope_active_; \
891#undef REUSABLE_HANDLE_SCOPE_ACCESSORS
893 bool IsAnyReusableHandleScopeActive()
const {
894#define IS_REUSABLE_HANDLE_SCOPE_ACTIVE(object) \
895 if (reusable_##object##_handle_scope_active_) { \
900#undef IS_REUSABLE_HANDLE_SCOPE_ACTIVE
906#define REUSABLE_HANDLE(object) \
907 object& object##Handle() const { return *object##_handle_; }
909#undef REUSABLE_HANDLE
913 return (
state & mask) == mask;
928 safepoint_state_ |= AtSafepointBits(
level);
930 safepoint_state_ &= ~AtSafepointBits(
level);
942 for (intptr_t
i =
level;
i >= 0; --
i) {
958 return (
state & SafepointRequestedField::mask_in_place()) != 0;
960 return (
state & DeoptSafepointRequestedField::mask_in_place()) != 0;
962 return (
state & ReloadSafepointRequestedField::mask_in_place()) != 0;
976 mask = SafepointRequestedField::mask_in_place();
979 mask = DeoptSafepointRequestedField::mask_in_place();
982 mask = ReloadSafepointRequestedField::mask_in_place();
990 return safepoint_state_.fetch_or(mask, std::memory_order_acquire);
993 return safepoint_state_.fetch_and(~mask, std::memory_order_release);
1005 BlockedForSafepointField::update(
value, safepoint_state_);
1011 return BypassSafepointsField::update(
value,
state);
1017 const uword mask = UnwindErrorInProgressField::mask_in_place();
1019 safepoint_state_.fetch_or(mask);
1021 safepoint_state_.fetch_and(~mask);
1049 execution_state_ =
static_cast<uword>(
state);
1061 return (0 << AtSafepointField::shift()) |
1062 (0 << AtDeoptSafepointField::shift());
1065 return (1 << AtSafepointField::shift()) |
1066 (1 << AtDeoptSafepointField::shift());
1070 uword old_state = 0;
1072 return safepoint_state_.compare_exchange_strong(old_state, new_state,
1073 std::memory_order_release);
1083 EnterSafepointUsingLock();
1089 uword new_state = 0;
1090 return safepoint_state_.compare_exchange_strong(old_state, new_state,
1091 std::memory_order_acquire);
1100 ExitSafepointUsingLock();
1140#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
1147 if (runtime_call_deopt_ability_ ==
1151 if (no_reload_scope_depth_ > 0 || allow_reload_scope_depth_ <= 0) {
1159 T* AllocateReusableHandle();
1161 enum class RestoreWriteBarrierInvariantOp {
1162 kAddToRememberedSet,
1163 kAddToDeferredMarkingStack
1166 void RestoreWriteBarrierInvariant(RestoreWriteBarrierInvariantOp op);
1171 compiler_state_ =
state;
1182 uword write_barrier_mask_;
1183#if defined(DART_COMPRESSED_POINTERS)
1184 uword heap_base_ = 0;
1188 const uword* dispatch_table_array_ =
nullptr;
1189 ObjectPtr* field_table_values_ =
nullptr;
1190 ObjectPtr* shared_field_table_values_ =
nullptr;
1197#define DECLARE_MEMBERS(type_name, member_name, expr, default_init_value) \
1198 type_name member_name;
1200#undef DECLARE_MEMBERS
1202#define DECLARE_MEMBERS(name) uword name##_entry_point_;
1204#undef DECLARE_MEMBERS
1206#define DECLARE_MEMBERS(returntype, name, ...) uword name##_entry_point_;
1208#undef DECLARE_MEMBERS
1212#define DECLARE_MEMBERS(name) uword name##_entry_point_ = 0;
1214#undef DECLARE_MEMBERS
1223 uword stack_overflow_flags_ = 0;
1224 uword volatile top_exit_frame_info_ = 0;
1229 uword volatile vm_tag_ = 0;
1234 ALIGN8 simd128_value_t unboxed_runtime_arg_;
1237 ObjectPtr active_exception_;
1238 ObjectPtr active_stacktrace_;
1240 ObjectPoolPtr global_object_pool_;
1242 uword saved_shadow_call_stack_ = 0;
1257 uword execution_state_;
1284 std::atomic<uword> safepoint_state_;
1285 uword exit_through_ffi_ = 0;
1286 ApiLocalScope* api_top_scope_;
1287 uint8_t double_truncate_round_supported_;
1288 ALIGN8 int64_t next_task_id_;
1289 ALIGN8 Random thread_random_;
1291 TsanUtils* tsan_utils_ =
nullptr;
1300 uword true_end_ = 0;
1302 TimelineStream*
const dart_stream_;
1303 StreamInfo*
const service_extension_stream_;
1304 mutable Monitor thread_lock_;
1305 ApiLocalScope* api_reusable_scope_;
1306 int32_t no_callback_scope_depth_;
1307 int32_t force_growth_scope_depth_ = 0;
1308 intptr_t no_reload_scope_depth_ = 0;
1309 intptr_t allow_reload_scope_depth_ = 0;
1310 intptr_t stopped_mutators_scope_depth_ = 0;
1312 int32_t no_safepoint_scope_depth_;
1314 VMHandles reusable_handles_;
1315 int32_t stack_overflow_count_;
1316 uint32_t runtime_call_count_ = 0;
1321 PendingDeopts pending_deopts_;
1325 HierarchyInfo* hierarchy_info_;
1326 TypeUsageInfo* type_usage_info_;
1329 CompilerTimings* compiler_timings_ =
nullptr;
1331 ErrorPtr sticky_error_;
1333 ObjectPtr* field_table_values()
const {
return field_table_values_; }
1334 ObjectPtr* shared_field_table_values()
const {
1335 return shared_field_table_values_;
1339#define REUSABLE_HANDLE_FIELDS(object) object* object##_handle_;
1341#undef REUSABLE_HANDLE_FIELDS
1344#define REUSABLE_HANDLE_SCOPE_VARIABLE(object) \
1345 bool reusable_##object##_handle_scope_active_;
1347#undef REUSABLE_HANDLE_SCOPE_VARIABLE
1350 class AtSafepointField :
public BitField<uword, bool, 0, 1> {};
1351 class SafepointRequestedField
1352 :
public BitField<uword, bool, AtSafepointField::kNextBit, 1> {};
1354 class AtDeoptSafepointField
1355 :
public BitField<uword, bool, SafepointRequestedField::kNextBit, 1> {};
1356 class DeoptSafepointRequestedField
1357 :
public BitField<uword, bool, AtDeoptSafepointField::kNextBit, 1> {};
1359 class AtReloadSafepointField
1360 :
public BitField<uword,
1362 DeoptSafepointRequestedField::kNextBit,
1364 class ReloadSafepointRequestedField
1365 :
public BitField<uword, bool, AtReloadSafepointField::kNextBit, 1> {};
1367 class BlockedForSafepointField
1368 :
public BitField<uword,
1370 ReloadSafepointRequestedField::kNextBit,
1372 class BypassSafepointsField
1373 :
public BitField<uword, bool, BlockedForSafepointField::kNextBit, 1> {};
1374 class UnwindErrorInProgressField
1375 :
public BitField<uword, bool, BypassSafepointsField::kNextBit, 1> {};
1380 return AtSafepointField::mask_in_place();
1382 return AtSafepointField::mask_in_place() |
1383 AtDeoptSafepointField::mask_in_place();
1385 return AtSafepointField::mask_in_place() |
1386 AtDeoptSafepointField::mask_in_place() |
1387 AtReloadSafepointField::mask_in_place();
1393#if defined(USING_SAFE_STACK)
1394 uword saved_safestack_limit_;
1398 Isolate* scheduled_dart_mutator_isolate_ =
nullptr;
1400 bool is_unwind_in_progress_ =
false;
1403 bool inside_compiler_ =
false;
1406#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
1407 HeapProfileSampler heap_sampler_;
1410 explicit Thread(
bool is_vm_isolate);
1412 void StoreBufferRelease(
1414 void StoreBufferAcquire();
1416 void OldMarkingStackRelease();
1417 void OldMarkingStackAcquire();
1418 void NewMarkingStackRelease();
1419 void NewMarkingStackAcquire();
1420 void DeferredMarkingStackRelease();
1421 void DeferredMarkingStackAcquire();
1423 void AcquireMarkingStacks();
1424 void ReleaseMarkingStacks();
1425 void FlushMarkingStacks();
1427 void set_safepoint_state(uint32_t
value) { safepoint_state_ =
value; }
1428 void EnterSafepointUsingLock();
1429 void ExitSafepointUsingLock();
1434 void SetupMutatorState(
TaskKind kind);
1435 void ResetMutatorState();
1441 static void SuspendDartMutatorThreadInternal(Thread* thread,
1443 static void ResumeDartMutatorThreadInternal(Thread* thread);
1445 static void SuspendThreadInternal(Thread* thread,
VMTag::VMTagId tag);
1446 static void ResumeThreadInternal(Thread* thread);
1458 bool is_dart_mutator,
1459 bool bypass_safepoint);
1464 static void FreeActiveThread(Thread* thread,
bool bypass_safepoint);
1468#define REUSABLE_FRIEND_DECLARATION(name) \
1469 friend class Reusable##name##HandleScope;
1471#undef REUSABLE_FRIEND_DECLARATION
1506 ASSERT(thread->runtime_call_deopt_ability_ ==
1508 thread->runtime_call_deopt_ability_ = kind;
1511 thread()->runtime_call_deopt_ability_ =
1521#if defined(DART_HOST_OS_WINDOWS)
1523void WindowsThreadCleanUp();
1526#if !defined(PRODUCT)
1545class NoSafepointScope :
public ThreadStackResource {
1548 : ThreadStackResource(thread != nullptr ? thread : Thread::Current()) {
1549 this->thread()->IncrementNoSafepointScopeDepth();
1551 ~NoSafepointScope() { thread()->DecrementNoSafepointScopeDepth(); }
1608#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
1609 if (thread->allow_reload_scope_depth_ == 0) {
1612 thread->allow_reload_scope_depth_++;
1613 ASSERT(thread->allow_reload_scope_depth_ >= 0);
1618#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
1619 thread_->allow_reload_scope_depth_ -= 1;
1620 ASSERT(thread_->allow_reload_scope_depth_ >= 0);
1621 if (thread_->allow_reload_scope_depth_ == 0) {
1639#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
1640 thread->stopped_mutators_scope_depth_++;
1646#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
1647 thread()->stopped_mutators_scope_depth_ -= 1;
1658class EnterCompilerScope :
public ThreadStackResource {
1661 : ThreadStackResource(thread != nullptr ? thread : Thread::Current()) {
1662 previously_is_inside_compiler_ = this->thread()->IsInsideCompiler();
1663 if (!previously_is_inside_compiler_) {
1664 this->thread()->EnterCompiler();
1667 ~EnterCompilerScope() {
1668 if (!previously_is_inside_compiler_) {
1669 thread()->LeaveCompiler();
1674 bool previously_is_inside_compiler_;
1689class LeaveCompilerScope :
public ThreadStackResource {
1692 : ThreadStackResource(thread != nullptr ? thread : Thread::Current()) {
1693 previously_is_inside_compiler_ = this->thread()->IsInsideCompiler();
1694 if (previously_is_inside_compiler_) {
1695 this->thread()->LeaveCompiler();
1698 ~LeaveCompilerScope() {
1699 if (previously_is_inside_compiler_) {
1700 thread()->EnterCompiler();
1705 bool previously_is_inside_compiler_;
~DisableThreadInterruptsScope()
DisableThreadInterruptsScope(Thread *thread)
EnterCompilerScope(Thread *thread=nullptr)
LeaveCompilerScope(Thread *thread=nullptr)
NoReloadScope(Thread *thread)
NoSafepointScope(Thread *thread=nullptr)
static const uword kInvalidStackLimit
static void SetCurrentTLS(BaseThread *value)
static ThreadState * CurrentVMThread()
~RawReloadParticipationScope()
RawReloadParticipationScope(Thread *thread)
T load(std::memory_order order=std::memory_order_relaxed) const
RuntimeCallDeoptScope(Thread *thread, RuntimeCallDeoptAbility kind)
virtual ~RuntimeCallDeoptScope()
ThreadState * thread() const
void UnwindScopes(uword stack_marker)
bool IsBlockedForSafepoint() const
bool HasCompilerState() const
HierarchyInfo * hierarchy_info() const
void set_execution_state(ExecutionState state)
void set_compiler_timings(CompilerTimings *stats)
void ScheduleInterrupts(uword interrupt_bits)
bool CanAcquireSafepointLocks() const
static uword full_safepoint_state_unacquired()
void set_type_usage_info(TypeUsageInfo *value)
void set_hierarchy_info(HierarchyInfo *value)
static intptr_t execution_state_offset()
void set_vm_tag(uword tag)
static intptr_t stack_limit_offset()
void set_unboxed_simd128_runtime_arg(simd128_value_t value)
friend Isolate * CreateWithinExistingIsolateGroup(IsolateGroup *, const char *, char **)
const uword * dispatch_table_array() const
void MarkingStackAddObject(ObjectPtr obj)
static intptr_t write_barrier_mask_offset()
bool IsSafepointRequested(SafepointLevel level) const
NO_SANITIZE_THREAD ExecutionState execution_state_cross_thread_for_testing() const
ApiLocalScope * api_top_scope() const
void set_active_stacktrace(const Object &value)
void DecrementNoSafepointScopeDepth()
void RememberLiveTemporaries()
bool OwnsSafepoint() const
bool force_growth() const
uword GetAndClearInterrupts()
void DeferredMarkingStackAddObject(ObjectPtr obj)
void set_top_exit_frame_info(uword top_exit_frame_info)
static bool IsBlockedForSafepoint(uword state)
ObjectPoolPtr global_object_pool() const
bool HasScheduledInterrupts() const
int32_t no_callback_scope_depth() const
void PrintJSON(JSONStream *stream) const
uword saved_shadow_call_stack() const
static intptr_t OffsetFromThread(const Object &object)
static intptr_t safepoint_state_offset()
static intptr_t write_barrier_wrappers_thread_offset(Register reg)
static intptr_t vm_tag_offset()
@ kIncrementalCompactorTask
static Thread * Current()
PendingDeopts & pending_deopts()
bool OwnsGCSafepoint() const
void set_unboxed_int64_runtime_arg(int64_t value)
static bool IsAtSafepoint(SafepointLevel level, uword state)
bool IsValidLocalHandle(Dart_Handle object) const
bool IsValidHandle(Dart_Handle object) const
static intptr_t store_buffer_block_offset()
void AssertEmptyThreadInvariants()
bool IsSafepointRequested() const
ObjectPtr active_stacktrace() const
static intptr_t saved_stack_limit_offset()
void set_resume_pc(uword value)
int32_t no_safepoint_scope_depth() const
static intptr_t top_offset()
static bool ObjectAtOffset(intptr_t offset, Object *object)
void AssertNonMutatorInvariants()
DART_WARN_UNUSED_RESULT ErrorPtr StealStickyError()
static intptr_t service_extension_stream_offset()
void SetUnwindErrorInProgress(bool value)
friend class CompilerState
ErrorPtr HandleInterrupts()
void SetStackLimit(uword value)
@ kExitThroughRuntimeCall
void VisitObjectPointers(ObjectPointerVisitor *visitor, ValidationPolicy validate_frames)
TypeUsageInfo * type_usage_info() const
void IncrementNoCallbackScopeDepth()
static intptr_t random_offset()
static uword stack_overflow_shared_stub_entry_point_offset(bool fpu_regs)
static intptr_t isolate_offset()
static intptr_t new_marking_stack_block_offset()
Monitor * thread_lock() const
static intptr_t active_exception_offset()
static void ExitIsolateGroupAsNonMutator()
ApiLocalScope * api_reusable_scope() const
static intptr_t WriteBarrierWrappersOffsetForRegister(Register reg)
static const char * TaskKindToCString(TaskKind kind)
void set_api_reusable_scope(ApiLocalScope *value)
CompilerState & compiler_state()
static intptr_t top_exit_frame_info_offset()
int ZoneSizeInBytes() const
static bool IsSafepointLevelRequested(uword state, SafepointLevel level)
void ReleaseStoreBuffer()
double unboxed_double_runtime_arg() const
intptr_t CountLocalHandles() const
void AssertNonDartMutatorInvariants()
void StoreBufferBlockProcess(StoreBuffer::ThresholdPolicy policy)
void DeferredMarkingStackBlockProcess()
void AcquireMarkingStack()
bool HasExitedDartCode() const
uword GetAndClearStackOverflowFlags()
void OldMarkingStackBlockProcess()
void set_sticky_error(const Error &value)
void StoreBufferAddObject(ObjectPtr obj)
simd128_value_t unboxed_simd128_runtime_arg() const
void set_global_object_pool(ObjectPoolPtr raw_value)
bool IsExecutingDartCode() const
bool IsSafepointRequestedLocked(SafepointLevel level) const
void NewMarkingStackAddObject(ObjectPtr obj)
static intptr_t end_offset()
void DecrementForceGrowthScopeDepth()
static uword saved_shadow_call_stack_offset()
void StoreBufferReleaseGC()
static intptr_t global_object_pool_offset()
void StoreBufferAcquireGC()
friend class TransitionVMToGenerated
static intptr_t field_table_values_offset()
bool UnwindErrorInProgress() const
static intptr_t shared_field_table_values_offset()
static bool CanLoadFromThread(const Object &object)
void ReleaseMarkingStack()
SafepointLevel current_safepoint_level() const
int64_t unboxed_int64_runtime_arg() const
static void ExitIsolateGroupAsHelper(bool bypass_safepoint)
bool is_unwind_in_progress() const
bool IsSafepointLevelRequestedLocked(SafepointLevel level) const
static intptr_t isolate_group_offset()
uword top_exit_frame_info() const
bool OwnsReloadSafepoint() const
uword stack_limit_address() const
bool OwnsDeoptSafepoint() const
int64_t unboxed_int64_runtime_second_arg() const
static intptr_t next_task_id_offset()
void ClearReusableHandles()
void OldMarkingStackAddObject(ObjectPtr obj)
bool IsDartMutatorThread() const
static intptr_t exit_through_ffi_offset()
void IncrementForceGrowthScopeDepth()
static bool EnterIsolateGroupAsNonMutator(IsolateGroup *isolate_group, TaskKind kind)
void SetAtSafepoint(bool value, SafepointLevel level)
ExecutionState execution_state() const
Isolate * isolate() const
int32_t IncrementAndGetStackOverflowCount()
static uword resume_pc_offset()
bool BypassSafepoints() const
uword saved_stack_limit() const
friend class TransitionGeneratedToVM
void set_true_end(uword true_end)
CompilerTimings * compiler_timings() const
uint32_t IncrementAndGetRuntimeCallCount()
TaskKind task_kind() const
bool IsAtSafepoint() const
bool IsInNoReloadScope() const
static uword full_safepoint_state_acquired()
void set_unboxed_double_runtime_arg(double value)
IsolateGroup * isolate_group() const
ObjectPtr active_exception() const
static intptr_t dart_stream_offset()
void StoreBufferAddObjectGC(ObjectPtr obj)
static intptr_t api_top_scope_offset()
void SetBlockedForSafepoint(bool value)
void DecrementNoCallbackScopeDepth()
void set_api_top_scope(ApiLocalScope *value)
void NewMarkingStackBlockProcess()
void AssertEmptyStackInvariants()
static intptr_t double_truncate_round_supported_offset()
uword write_barrier_mask() const
static intptr_t stack_overflow_flags_offset()
bool IsAtSafepoint(SafepointLevel level) const
friend class IsolateGroup
static uword SetBypassSafepoints(bool value, uword state)
static void EnterIsolate(Isolate *isolate)
static intptr_t active_stacktrace_offset()
bool IsInStoppedMutatorsScope() const
void IncrementNoSafepointScopeDepth()
static intptr_t tsan_utils_offset()
static void ExitIsolate(bool isolate_shutdown=false)
virtual bool MayAllocateHandles()
ErrorPtr sticky_error() const
void set_dispatch_table_array(const uword *array)
void set_active_exception(const Object &value)
HeapProfileSampler & heap_sampler()
uword SetSafepointRequested(SafepointLevel level, bool value)
void set_unboxed_int64_runtime_second_arg(int64_t value)
static bool EnterIsolateGroupAsHelper(IsolateGroup *isolate_group, TaskKind kind, bool bypass_safepoint)
static intptr_t unboxed_runtime_arg_offset()
Isolate * scheduled_dart_mutator_isolate() const
friend class NoActiveIsolateScope
static intptr_t old_marking_stack_block_offset()
static intptr_t dispatch_table_array_offset()
void DeferredMarkLiveTemporaries()
struct _Dart_Handle * Dart_Handle
#define DART_WARN_UNUSED_RESULT
StoreBuffer::Block StoreBufferBlock
MarkingStack::Block MarkingStackBlock
constexpr RegList kDartAvailableCpuRegs
const intptr_t kStoreBufferWrapperSize
constexpr int kNumberOfDartAvailableCpuRegs
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive keep the shell running after the Dart script has completed enable serial On low power devices with low core running concurrent GC tasks on threads can cause them to contend with the UI thread which could potentially lead to jank This option turns off all concurrent GC activities domain network policy
static DecodeResult decode(std::string path)
#define RUNTIME_ENTRY_LIST(V)
#define LEAF_RUNTIME_ENTRY_LIST(V)
static intptr_t setjmp_buffer_offset()
static intptr_t exception_fp_offset()
static intptr_t exception_sp_offset()
static intptr_t exception_pc_offset()
static intptr_t setjmp_function_offset()
#define CACHED_FUNCTION_ENTRY_POINTS_LIST(V)
#define REUSABLE_HANDLE_LIST(V)
#define REUSABLE_HANDLE_FIELDS(object)
#define CACHED_CONSTANTS_LIST(V)
#define DEFINE_OFFSET_METHOD(type_name, member_name, expr, default_init_value)
#define REUSABLE_FRIEND_DECLARATION(name)
#define DECLARE_MEMBERS(type_name, member_name, expr, default_init_value)
#define REUSABLE_HANDLE(object)
#define NO_SANITIZE_THREAD
#define OFFSET_OF(type, field)