5#ifndef RUNTIME_VM_THREAD_H_
6#define RUNTIME_VM_THREAD_H_
8#if defined(SHOULD_NOT_INCLUDE_RUNTIME)
9#error "Should not include runtime"
43class ExceptionHandlers;
47class GrowableObjectArray;
58class NoActiveIsolateScope;
78#define REUSABLE_HANDLE_LIST(V) \
84 V(ExceptionHandlers) \
87 V(GrowableObjectArray) \
100#define CACHED_VM_STUBS_LIST(V) \
101 V(CodePtr, fix_callers_target_code_, StubCode::FixCallersTarget().ptr(), \
103 V(CodePtr, fix_allocation_stub_code_, \
104 StubCode::FixAllocationStubTarget().ptr(), nullptr) \
105 V(CodePtr, invoke_dart_code_stub_, StubCode::InvokeDartCode().ptr(), \
107 V(CodePtr, call_to_runtime_stub_, StubCode::CallToRuntime().ptr(), nullptr) \
108 V(CodePtr, late_initialization_error_shared_without_fpu_regs_stub_, \
109 StubCode::LateInitializationErrorSharedWithoutFPURegs().ptr(), nullptr) \
110 V(CodePtr, late_initialization_error_shared_with_fpu_regs_stub_, \
111 StubCode::LateInitializationErrorSharedWithFPURegs().ptr(), nullptr) \
112 V(CodePtr, null_error_shared_without_fpu_regs_stub_, \
113 StubCode::NullErrorSharedWithoutFPURegs().ptr(), nullptr) \
114 V(CodePtr, null_error_shared_with_fpu_regs_stub_, \
115 StubCode::NullErrorSharedWithFPURegs().ptr(), nullptr) \
116 V(CodePtr, null_arg_error_shared_without_fpu_regs_stub_, \
117 StubCode::NullArgErrorSharedWithoutFPURegs().ptr(), nullptr) \
118 V(CodePtr, null_arg_error_shared_with_fpu_regs_stub_, \
119 StubCode::NullArgErrorSharedWithFPURegs().ptr(), nullptr) \
120 V(CodePtr, null_cast_error_shared_without_fpu_regs_stub_, \
121 StubCode::NullCastErrorSharedWithoutFPURegs().ptr(), nullptr) \
122 V(CodePtr, null_cast_error_shared_with_fpu_regs_stub_, \
123 StubCode::NullCastErrorSharedWithFPURegs().ptr(), nullptr) \
124 V(CodePtr, range_error_shared_without_fpu_regs_stub_, \
125 StubCode::RangeErrorSharedWithoutFPURegs().ptr(), nullptr) \
126 V(CodePtr, range_error_shared_with_fpu_regs_stub_, \
127 StubCode::RangeErrorSharedWithFPURegs().ptr(), nullptr) \
128 V(CodePtr, write_error_shared_without_fpu_regs_stub_, \
129 StubCode::WriteErrorSharedWithoutFPURegs().ptr(), nullptr) \
130 V(CodePtr, write_error_shared_with_fpu_regs_stub_, \
131 StubCode::WriteErrorSharedWithFPURegs().ptr(), nullptr) \
132 V(CodePtr, allocate_mint_with_fpu_regs_stub_, \
133 StubCode::AllocateMintSharedWithFPURegs().ptr(), nullptr) \
134 V(CodePtr, allocate_mint_without_fpu_regs_stub_, \
135 StubCode::AllocateMintSharedWithoutFPURegs().ptr(), nullptr) \
136 V(CodePtr, allocate_object_stub_, StubCode::AllocateObject().ptr(), nullptr) \
137 V(CodePtr, allocate_object_parameterized_stub_, \
138 StubCode::AllocateObjectParameterized().ptr(), nullptr) \
139 V(CodePtr, allocate_object_slow_stub_, StubCode::AllocateObjectSlow().ptr(), \
141 V(CodePtr, async_exception_handler_stub_, \
142 StubCode::AsyncExceptionHandler().ptr(), nullptr) \
143 V(CodePtr, resume_stub_, StubCode::Resume().ptr(), nullptr) \
144 V(CodePtr, return_async_stub_, StubCode::ReturnAsync().ptr(), nullptr) \
145 V(CodePtr, return_async_not_future_stub_, \
146 StubCode::ReturnAsyncNotFuture().ptr(), nullptr) \
147 V(CodePtr, return_async_star_stub_, StubCode::ReturnAsyncStar().ptr(), \
149 V(CodePtr, stack_overflow_shared_without_fpu_regs_stub_, \
150 StubCode::StackOverflowSharedWithoutFPURegs().ptr(), nullptr) \
151 V(CodePtr, stack_overflow_shared_with_fpu_regs_stub_, \
152 StubCode::StackOverflowSharedWithFPURegs().ptr(), nullptr) \
153 V(CodePtr, switchable_call_miss_stub_, StubCode::SwitchableCallMiss().ptr(), \
155 V(CodePtr, throw_stub_, StubCode::Throw().ptr(), nullptr) \
156 V(CodePtr, re_throw_stub_, StubCode::Throw().ptr(), nullptr) \
157 V(CodePtr, assert_boolean_stub_, StubCode::AssertBoolean().ptr(), nullptr) \
158 V(CodePtr, optimize_stub_, StubCode::OptimizeFunction().ptr(), nullptr) \
159 V(CodePtr, deoptimize_stub_, StubCode::Deoptimize().ptr(), nullptr) \
160 V(CodePtr, lazy_deopt_from_return_stub_, \
161 StubCode::DeoptimizeLazyFromReturn().ptr(), nullptr) \
162 V(CodePtr, lazy_deopt_from_throw_stub_, \
163 StubCode::DeoptimizeLazyFromThrow().ptr(), nullptr) \
164 V(CodePtr, slow_type_test_stub_, StubCode::SlowTypeTest().ptr(), nullptr) \
165 V(CodePtr, lazy_specialize_type_test_stub_, \
166 StubCode::LazySpecializeTypeTest().ptr(), nullptr) \
167 V(CodePtr, enter_safepoint_stub_, StubCode::EnterSafepoint().ptr(), nullptr) \
168 V(CodePtr, exit_safepoint_stub_, StubCode::ExitSafepoint().ptr(), nullptr) \
169 V(CodePtr, exit_safepoint_ignore_unwind_in_progress_stub_, \
170 StubCode::ExitSafepointIgnoreUnwindInProgress().ptr(), nullptr) \
171 V(CodePtr, call_native_through_safepoint_stub_, \
172 StubCode::CallNativeThroughSafepoint().ptr(), nullptr)
174#define CACHED_NON_VM_STUB_LIST(V) \
175 V(ObjectPtr, object_null_, Object::null(), nullptr) \
176 V(BoolPtr, bool_true_, Object::bool_true().ptr(), nullptr) \
177 V(BoolPtr, bool_false_, Object::bool_false().ptr(), nullptr) \
178 V(ArrayPtr, empty_array_, Object::empty_array().ptr(), nullptr) \
179 V(TypeArgumentsPtr, empty_type_arguments_, \
180 Object::empty_type_arguments().ptr(), nullptr) \
181 V(TypePtr, dynamic_type_, Type::dynamic_type().ptr(), nullptr)
185#define CACHED_VM_OBJECTS_LIST(V) \
186 CACHED_NON_VM_STUB_LIST(V) \
187 CACHED_VM_STUBS_LIST(V)
189#define CACHED_FUNCTION_ENTRY_POINTS_LIST(V) \
190 V(suspend_state_init_async) \
191 V(suspend_state_await) \
192 V(suspend_state_await_with_type_check) \
193 V(suspend_state_return_async) \
194 V(suspend_state_return_async_not_future) \
195 V(suspend_state_init_async_star) \
196 V(suspend_state_yield_async_star) \
197 V(suspend_state_return_async_star) \
198 V(suspend_state_init_sync_star) \
199 V(suspend_state_suspend_sync_star_at_start) \
200 V(suspend_state_handle_exception)
204#define ASSERT_BOOL_FALSE_FOLLOWS_BOOL_TRUE() \
205 ASSERT((Thread::bool_true_offset() + kWordSize) == \
206 Thread::bool_false_offset());
208#define CACHED_VM_STUBS_ADDRESSES_LIST(V) \
209 V(uword, write_barrier_entry_point_, StubCode::WriteBarrier().EntryPoint(), \
211 V(uword, array_write_barrier_entry_point_, \
212 StubCode::ArrayWriteBarrier().EntryPoint(), 0) \
213 V(uword, call_to_runtime_entry_point_, \
214 StubCode::CallToRuntime().EntryPoint(), 0) \
215 V(uword, allocate_mint_with_fpu_regs_entry_point_, \
216 StubCode::AllocateMintSharedWithFPURegs().EntryPoint(), 0) \
217 V(uword, allocate_mint_without_fpu_regs_entry_point_, \
218 StubCode::AllocateMintSharedWithoutFPURegs().EntryPoint(), 0) \
219 V(uword, allocate_object_entry_point_, \
220 StubCode::AllocateObject().EntryPoint(), 0) \
221 V(uword, allocate_object_parameterized_entry_point_, \
222 StubCode::AllocateObjectParameterized().EntryPoint(), 0) \
223 V(uword, allocate_object_slow_entry_point_, \
224 StubCode::AllocateObjectSlow().EntryPoint(), 0) \
225 V(uword, stack_overflow_shared_without_fpu_regs_entry_point_, \
226 StubCode::StackOverflowSharedWithoutFPURegs().EntryPoint(), 0) \
227 V(uword, stack_overflow_shared_with_fpu_regs_entry_point_, \
228 StubCode::StackOverflowSharedWithFPURegs().EntryPoint(), 0) \
229 V(uword, megamorphic_call_checked_entry_, \
230 StubCode::MegamorphicCall().EntryPoint(), 0) \
231 V(uword, switchable_call_miss_entry_, \
232 StubCode::SwitchableCallMiss().EntryPoint(), 0) \
233 V(uword, optimize_entry_, StubCode::OptimizeFunction().EntryPoint(), 0) \
234 V(uword, deoptimize_entry_, StubCode::Deoptimize().EntryPoint(), 0) \
235 V(uword, call_native_through_safepoint_entry_point_, \
236 StubCode::CallNativeThroughSafepoint().EntryPoint(), 0) \
237 V(uword, jump_to_frame_entry_point_, StubCode::JumpToFrame().EntryPoint(), \
239 V(uword, slow_type_test_entry_point_, StubCode::SlowTypeTest().EntryPoint(), \
242#define CACHED_ADDRESSES_LIST(V) \
243 CACHED_VM_STUBS_ADDRESSES_LIST(V) \
244 V(uword, bootstrap_native_wrapper_entry_point_, \
245 NativeEntry::BootstrapNativeCallWrapperEntry(), 0) \
246 V(uword, no_scope_native_wrapper_entry_point_, \
247 NativeEntry::NoScopeNativeCallWrapperEntry(), 0) \
248 V(uword, auto_scope_native_wrapper_entry_point_, \
249 NativeEntry::AutoScopeNativeCallWrapperEntry(), 0) \
250 V(StringPtr*, predefined_symbols_address_, Symbols::PredefinedAddress(), \
252 V(uword, double_nan_address_, reinterpret_cast<uword>(&double_nan_constant), \
254 V(uword, double_negate_address_, \
255 reinterpret_cast<uword>(&double_negate_constant), 0) \
256 V(uword, double_abs_address_, reinterpret_cast<uword>(&double_abs_constant), \
258 V(uword, float_not_address_, reinterpret_cast<uword>(&float_not_constant), \
260 V(uword, float_negate_address_, \
261 reinterpret_cast<uword>(&float_negate_constant), 0) \
262 V(uword, float_absolute_address_, \
263 reinterpret_cast<uword>(&float_absolute_constant), 0) \
264 V(uword, float_zerow_address_, \
265 reinterpret_cast<uword>(&float_zerow_constant), 0)
267#define CACHED_CONSTANTS_LIST(V) \
268 CACHED_VM_OBJECTS_LIST(V) \
269 CACHED_ADDRESSES_LIST(V)
309#if defined(USING_THREAD_SANITIZER)
376 static void ExitIsolate(
bool isolate_shutdown =
false);
380 bool bypass_safepoint);
399 return reinterpret_cast<uword>(&stack_limit_);
411#if defined(USING_SAFE_STACK)
412 uword saved_safestack_limit()
const {
return saved_safestack_limit_; }
413 void set_saved_safestack_limit(
uword limit) {
414 saved_safestack_limit_ = limit;
429#if defined(DART_COMPRESSED_POINTERS)
439#if defined(DART_COMPRESSED_POINTERS)
440 static intptr_t heap_base_offset() {
return OFFSET_OF(
Thread, heap_base_); }
447 return ++stack_overflow_count_;
454 ? stack_overflow_shared_with_fpu_regs_entry_point_offset()
455 : stack_overflow_shared_without_fpu_regs_entry_point_offset();
506 ASSERT(
value ==
nullptr || api_reusable_scope_ ==
nullptr);
507 api_reusable_scope_ =
value;
527#if defined(USING_THREAD_SANITIZER)
528 uword exit_through_ffi()
const {
return exit_through_ffi_; }
529 TsanUtils* tsan_utils()
const {
return tsan_utils_; }
547 return scheduled_dart_mutator_isolate_ !=
nullptr;
558 return scheduled_dart_mutator_isolate_;
562 bool IsInsideCompiler()
const {
return inside_compiler_; }
585 return *compiler_state_;
589 ASSERT(isolate_group_ !=
nullptr);
590 return hierarchy_info_;
594 ASSERT(isolate_group_ !=
nullptr);
595 ASSERT((hierarchy_info_ ==
nullptr &&
value !=
nullptr) ||
596 (hierarchy_info_ !=
nullptr &&
value ==
nullptr));
597 hierarchy_info_ =
value;
601 ASSERT(isolate_group_ !=
nullptr);
602 return type_usage_info_;
606 ASSERT(isolate_group_ !=
nullptr);
607 ASSERT((type_usage_info_ ==
nullptr &&
value !=
nullptr) ||
608 (type_usage_info_ !=
nullptr &&
value ==
nullptr));
609 type_usage_info_ =
value;
615 compiler_timings_ = stats;
620 ASSERT(no_callback_scope_depth_ < INT_MAX);
621 no_callback_scope_depth_ += 1;
624 ASSERT(no_callback_scope_depth_ > 0);
625 no_callback_scope_depth_ -= 1;
630 ASSERT(force_growth_scope_depth_ < INT_MAX);
631 force_growth_scope_depth_ += 1;
634 ASSERT(force_growth_scope_depth_ > 0);
635 force_growth_scope_depth_ -= 1;
641 is_unwind_in_progress_ =
true;
646 void EnterCompiler() {
647 ASSERT(!IsInsideCompiler());
648 inside_compiler_ =
true;
651 void LeaveCompiler() {
652 ASSERT(IsInsideCompiler());
653 inside_compiler_ =
false;
660 bool StoreBufferContains(ObjectPtr obj)
const {
661 return store_buffer_block_->Contains(obj);
669 bool is_marking()
const {
return marking_stack_block_ !=
nullptr; }
707 return no_safepoint_scope_depth_;
715 ASSERT(no_safepoint_scope_depth_ < INT_MAX);
716 no_safepoint_scope_depth_ += 1;
722 ASSERT(no_safepoint_scope_depth_ > 0);
723 no_safepoint_scope_depth_ -= 1;
730 return stopped_mutators_scope_depth_ > 0;
733#define DEFINE_OFFSET_METHOD(type_name, member_name, expr, default_init_value) \
734 static intptr_t member_name##offset() { \
735 return OFFSET_OF(Thread, member_name); \
738#undef DEFINE_OFFSET_METHOD
749 index *
sizeof(
uword);
765#define DEFINE_OFFSET_METHOD(name) \
766 static intptr_t name##_entry_point_offset() { \
767 return OFFSET_OF(Thread, name##_entry_point_); \
770#undef DEFINE_OFFSET_METHOD
772#define DEFINE_OFFSET_METHOD(returntype, name, ...) \
773 static intptr_t name##_entry_point_offset() { \
774 return OFFSET_OF(Thread, name##_entry_point_); \
777#undef DEFINE_OFFSET_METHOD
781 global_object_pool_ = raw_value;
786 dispatch_table_array_ = array;
794#define DEFINE_OFFSET_METHOD(name) \
795 static intptr_t name##_entry_point_offset() { \
796 return OFFSET_OF(Thread, name##_entry_point_); \
799#undef DEFINE_OFFSET_METHOD
804 bool TopErrorHandlerIsSetJump()
const;
805 bool TopErrorHandlerIsExitFrame()
const;
831 return unboxed_runtime_arg_;
834 unboxed_runtime_arg_ =
value;
870#define REUSABLE_HANDLE_SCOPE_ACCESSORS(object) \
871 void set_reusable_##object##_handle_scope_active(bool value) { \
872 reusable_##object##_handle_scope_active_ = value; \
874 bool reusable_##object##_handle_scope_active() const { \
875 return reusable_##object##_handle_scope_active_; \
878#undef REUSABLE_HANDLE_SCOPE_ACCESSORS
880 bool IsAnyReusableHandleScopeActive()
const {
881#define IS_REUSABLE_HANDLE_SCOPE_ACTIVE(object) \
882 if (reusable_##object##_handle_scope_active_) { \
887#undef IS_REUSABLE_HANDLE_SCOPE_ACTIVE
893#define REUSABLE_HANDLE(object) \
894 object& object##Handle() const { return *object##_handle_; }
896#undef REUSABLE_HANDLE
899 const uword mask = AtSafepointBits(level);
900 return (
state & mask) == mask;
915 safepoint_state_ |= AtSafepointBits(level);
917 safepoint_state_ &= ~AtSafepointBits(level);
929 for (intptr_t i = level; i >= 0; --i) {
977 return safepoint_state_.fetch_or(mask, std::memory_order_acquire);
980 return safepoint_state_.fetch_and(~mask, std::memory_order_release);
1006 safepoint_state_.fetch_or(mask);
1008 safepoint_state_.fetch_and(~mask);
1036 execution_state_ =
static_cast<uword>(
state);
1057 uword old_state = 0;
1059 return safepoint_state_.compare_exchange_strong(old_state, new_state,
1060 std::memory_order_release);
1070 EnterSafepointUsingLock();
1076 uword new_state = 0;
1077 return safepoint_state_.compare_exchange_strong(old_state, new_state,
1078 std::memory_order_acquire);
1087 ExitSafepointUsingLock();
1127#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
1134 if (runtime_call_deopt_ability_ ==
1138 if (no_reload_scope_depth_ > 0 || allow_reload_scope_depth_ <= 0) {
1146 T* AllocateReusableHandle();
1148 enum class RestoreWriteBarrierInvariantOp {
1149 kAddToRememberedSet,
1150 kAddToDeferredMarkingStack
1153 void RestoreWriteBarrierInvariant(RestoreWriteBarrierInvariantOp op);
1158 compiler_state_ =
state;
1169 uword write_barrier_mask_;
1170#if defined(DART_COMPRESSED_POINTERS)
1171 uword heap_base_ = 0;
1175 const uword* dispatch_table_array_ =
nullptr;
1176 ObjectPtr* field_table_values_ =
nullptr;
1183#define DECLARE_MEMBERS(type_name, member_name, expr, default_init_value) \
1184 type_name member_name;
1186#undef DECLARE_MEMBERS
1188#define DECLARE_MEMBERS(name) uword name##_entry_point_;
1190#undef DECLARE_MEMBERS
1192#define DECLARE_MEMBERS(returntype, name, ...) uword name##_entry_point_;
1194#undef DECLARE_MEMBERS
1198#define DECLARE_MEMBERS(name) uword name##_entry_point_ = 0;
1200#undef DECLARE_MEMBERS
1209 uword stack_overflow_flags_ = 0;
1210 uword volatile top_exit_frame_info_ = 0;
1214 uword volatile vm_tag_ = 0;
1219 ALIGN8 simd128_value_t unboxed_runtime_arg_;
1222 ObjectPtr active_exception_;
1223 ObjectPtr active_stacktrace_;
1225 ObjectPoolPtr global_object_pool_;
1227 uword saved_shadow_call_stack_ = 0;
1242 uword execution_state_;
1269 std::atomic<uword> safepoint_state_;
1270 uword exit_through_ffi_ = 0;
1271 ApiLocalScope* api_top_scope_;
1272 uint8_t double_truncate_round_supported_;
1273 ALIGN8 int64_t next_task_id_;
1274 ALIGN8 Random thread_random_;
1276 TsanUtils* tsan_utils_ =
nullptr;
1285 uword true_end_ = 0;
1287 TimelineStream*
const dart_stream_;
1288 StreamInfo*
const service_extension_stream_;
1289 mutable Monitor thread_lock_;
1290 ApiLocalScope* api_reusable_scope_;
1291 int32_t no_callback_scope_depth_;
1292 int32_t force_growth_scope_depth_ = 0;
1293 intptr_t no_reload_scope_depth_ = 0;
1294 intptr_t allow_reload_scope_depth_ = 0;
1295 intptr_t stopped_mutators_scope_depth_ = 0;
1297 int32_t no_safepoint_scope_depth_;
1299 VMHandles reusable_handles_;
1300 int32_t stack_overflow_count_;
1301 uint32_t runtime_call_count_ = 0;
1306 PendingDeopts pending_deopts_;
1310 HierarchyInfo* hierarchy_info_;
1311 TypeUsageInfo* type_usage_info_;
1314 CompilerTimings* compiler_timings_ =
nullptr;
1316 ErrorPtr sticky_error_;
1318 ObjectPtr* field_table_values()
const {
return field_table_values_; }
1321#define REUSABLE_HANDLE_FIELDS(object) object* object##_handle_;
1323#undef REUSABLE_HANDLE_FIELDS
1326#define REUSABLE_HANDLE_SCOPE_VARIABLE(object) \
1327 bool reusable_##object##_handle_scope_active_;
1329#undef REUSABLE_HANDLE_SCOPE_VARIABLE
1332 class AtSafepointField :
public BitField<uword, bool, 0, 1> {};
1333 class SafepointRequestedField
1334 :
public BitField<uword, bool, AtSafepointField::kNextBit, 1> {};
1336 class AtDeoptSafepointField
1337 :
public BitField<uword, bool, SafepointRequestedField::kNextBit, 1> {};
1338 class DeoptSafepointRequestedField
1339 :
public BitField<uword, bool, AtDeoptSafepointField::kNextBit, 1> {};
1341 class AtReloadSafepointField
1342 :
public BitField<uword,
1344 DeoptSafepointRequestedField::kNextBit,
1346 class ReloadSafepointRequestedField
1347 :
public BitField<uword, bool, AtReloadSafepointField::kNextBit, 1> {};
1349 class BlockedForSafepointField
1350 :
public BitField<uword,
1352 ReloadSafepointRequestedField::kNextBit,
1354 class BypassSafepointsField
1355 :
public BitField<uword, bool, BlockedForSafepointField::kNextBit, 1> {};
1356 class UnwindErrorInProgressField
1357 :
public BitField<uword, bool, BypassSafepointsField::kNextBit, 1> {};
1375#if defined(USING_SAFE_STACK)
1376 uword saved_safestack_limit_;
1380 Isolate* scheduled_dart_mutator_isolate_ =
nullptr;
1382 bool is_unwind_in_progress_ =
false;
1385 bool inside_compiler_ =
false;
1388#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
1389 HeapProfileSampler heap_sampler_;
1392 explicit Thread(
bool is_vm_isolate);
1394 void StoreBufferRelease(
1396 void StoreBufferAcquire();
1398 void MarkingStackRelease();
1399 void MarkingStackAcquire();
1400 void MarkingStackFlush();
1401 void DeferredMarkingStackRelease();
1402 void DeferredMarkingStackAcquire();
1403 void DeferredMarkingStackFlush();
1405 void set_safepoint_state(uint32_t value) { safepoint_state_ =
value; }
1406 void EnterSafepointUsingLock();
1407 void ExitSafepointUsingLock();
1412 void SetupMutatorState(
TaskKind kind);
1413 void ResetMutatorState();
1416 void SetupDartMutatorStateDependingOnSnapshot(
IsolateGroup* group);
1419 static void SuspendDartMutatorThreadInternal(
Thread* thread,
1421 static void ResumeDartMutatorThreadInternal(
Thread* thread);
1424 static void ResumeThreadInternal(
Thread* thread);
1436 bool is_dart_mutator,
1437 bool bypass_safepoint);
1442 static void FreeActiveThread(
Thread* thread,
bool bypass_safepoint);
1446#define REUSABLE_FRIEND_DECLARATION(name) \
1447 friend class Reusable##name##HandleScope;
1449#undef REUSABLE_FRIEND_DECLARATION
1484 ASSERT(thread->runtime_call_deopt_ability_ ==
1486 thread->runtime_call_deopt_ability_ = kind;
1489 thread()->runtime_call_deopt_ability_ =
1499#if defined(DART_HOST_OS_WINDOWS)
1501void WindowsThreadCleanUp();
1504#if !defined(PRODUCT)
1523class NoSafepointScope :
public ThreadStackResource {
1526 : ThreadStackResource(thread != nullptr ? thread : Thread::Current()) {
1527 this->thread()->IncrementNoSafepointScopeDepth();
1529 ~NoSafepointScope() { thread()->DecrementNoSafepointScopeDepth(); }
1586#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
1587 if (thread->allow_reload_scope_depth_ == 0) {
1590 thread->allow_reload_scope_depth_++;
1591 ASSERT(thread->allow_reload_scope_depth_ >= 0);
1596#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
1597 thread_->allow_reload_scope_depth_ -= 1;
1598 ASSERT(thread_->allow_reload_scope_depth_ >= 0);
1599 if (thread_->allow_reload_scope_depth_ == 0) {
1617#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
1618 thread->stopped_mutators_scope_depth_++;
1624#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
1625 thread()->stopped_mutators_scope_depth_ -= 1;
1636class EnterCompilerScope :
public ThreadStackResource {
1638 explicit EnterCompilerScope(Thread* thread =
nullptr)
1639 : ThreadStackResource(thread != nullptr ? thread : Thread::Current()) {
1640 previously_is_inside_compiler_ = this->thread()->IsInsideCompiler();
1641 if (!previously_is_inside_compiler_) {
1642 this->thread()->EnterCompiler();
1645 ~EnterCompilerScope() {
1646 if (!previously_is_inside_compiler_) {
1647 thread()->LeaveCompiler();
1652 bool previously_is_inside_compiler_;
1667class LeaveCompilerScope :
public ThreadStackResource {
1669 explicit LeaveCompilerScope(Thread* thread =
nullptr)
1670 : ThreadStackResource(thread != nullptr ? thread : Thread::Current()) {
1671 previously_is_inside_compiler_ = this->thread()->IsInsideCompiler();
1672 if (previously_is_inside_compiler_) {
1673 this->thread()->LeaveCompiler();
1676 ~LeaveCompilerScope() {
1677 if (previously_is_inside_compiler_) {
1678 thread()->EnterCompiler();
1683 bool previously_is_inside_compiler_;
static constexpr bool decode(uword value)
static constexpr uword update(bool value, uword original)
static constexpr uword mask_in_place()
static constexpr int shift()
~DisableThreadInterruptsScope()
EnterCompilerScope(Thread *thread=nullptr)
LeaveCompilerScope(Thread *thread=nullptr)
NoSafepointScope(Thread *thread=nullptr)
static const uword kInvalidStackLimit
static void SetCurrentTLS(BaseThread *value)
static ThreadState * CurrentVMThread()
~RawReloadParticipationScope()
RawReloadParticipationScope(Thread *thread)
T load(std::memory_order order=std::memory_order_relaxed) const
RuntimeCallDeoptScope(Thread *thread, RuntimeCallDeoptAbility kind)
virtual ~RuntimeCallDeoptScope()
ThreadState * thread() const
void UnwindScopes(uword stack_marker)
bool IsBlockedForSafepoint() const
bool HasCompilerState() const
HierarchyInfo * hierarchy_info() const
void set_execution_state(ExecutionState state)
void set_compiler_timings(CompilerTimings *stats)
void ScheduleInterrupts(uword interrupt_bits)
bool CanAcquireSafepointLocks() const
static uword full_safepoint_state_unacquired()
void set_type_usage_info(TypeUsageInfo *value)
void set_hierarchy_info(HierarchyInfo *value)
static intptr_t execution_state_offset()
void set_vm_tag(uword tag)
static intptr_t stack_limit_offset()
void set_unboxed_simd128_runtime_arg(simd128_value_t value)
friend Isolate * CreateWithinExistingIsolateGroup(IsolateGroup *, const char *, char **)
const uword * dispatch_table_array() const
void MarkingStackAddObject(ObjectPtr obj)
static intptr_t write_barrier_mask_offset()
bool IsSafepointRequested(SafepointLevel level) const
NO_SANITIZE_THREAD ExecutionState execution_state_cross_thread_for_testing() const
ApiLocalScope * api_top_scope() const
void set_active_stacktrace(const Object &value)
void DecrementNoSafepointScopeDepth()
void RememberLiveTemporaries()
bool OwnsSafepoint() const
bool force_growth() const
uword GetAndClearInterrupts()
void DeferredMarkingStackAddObject(ObjectPtr obj)
void set_top_exit_frame_info(uword top_exit_frame_info)
static bool IsBlockedForSafepoint(uword state)
ObjectPoolPtr global_object_pool() const
bool HasScheduledInterrupts() const
int32_t no_callback_scope_depth() const
void PrintJSON(JSONStream *stream) const
uword saved_shadow_call_stack() const
static intptr_t OffsetFromThread(const Object &object)
static intptr_t safepoint_state_offset()
static intptr_t write_barrier_wrappers_thread_offset(Register reg)
void MarkingStackBlockProcess()
static intptr_t vm_tag_offset()
static Thread * Current()
PendingDeopts & pending_deopts()
bool OwnsGCSafepoint() const
void set_unboxed_int64_runtime_arg(int64_t value)
static bool IsAtSafepoint(SafepointLevel level, uword state)
bool IsValidLocalHandle(Dart_Handle object) const
bool IsValidHandle(Dart_Handle object) const
static intptr_t store_buffer_block_offset()
void AssertEmptyThreadInvariants()
bool IsSafepointRequested() const
ObjectPtr active_stacktrace() const
static intptr_t saved_stack_limit_offset()
void set_resume_pc(uword value)
int32_t no_safepoint_scope_depth() const
static intptr_t top_offset()
static bool ObjectAtOffset(intptr_t offset, Object *object)
void AssertNonMutatorInvariants()
DART_WARN_UNUSED_RESULT ErrorPtr StealStickyError()
static intptr_t service_extension_stream_offset()
void SetUnwindErrorInProgress(bool value)
friend class CompilerState
ErrorPtr HandleInterrupts()
void SetStackLimit(uword value)
@ kExitThroughRuntimeCall
void VisitObjectPointers(ObjectPointerVisitor *visitor, ValidationPolicy validate_frames)
TypeUsageInfo * type_usage_info() const
void IncrementNoCallbackScopeDepth()
static intptr_t random_offset()
static uword stack_overflow_shared_stub_entry_point_offset(bool fpu_regs)
static intptr_t isolate_offset()
Monitor * thread_lock() const
static intptr_t active_exception_offset()
static void ExitIsolateGroupAsNonMutator()
ApiLocalScope * api_reusable_scope() const
static intptr_t WriteBarrierWrappersOffsetForRegister(Register reg)
static const char * TaskKindToCString(TaskKind kind)
void set_api_reusable_scope(ApiLocalScope *value)
CompilerState & compiler_state()
static intptr_t top_exit_frame_info_offset()
int ZoneSizeInBytes() const
static bool IsSafepointLevelRequested(uword state, SafepointLevel level)
void ReleaseStoreBuffer()
friend class compiler::target::Thread
double unboxed_double_runtime_arg() const
intptr_t CountLocalHandles() const
void AssertNonDartMutatorInvariants()
void StoreBufferBlockProcess(StoreBuffer::ThresholdPolicy policy)
void DeferredMarkingStackBlockProcess()
void AcquireMarkingStack()
bool HasExitedDartCode() const
uword GetAndClearStackOverflowFlags()
void set_sticky_error(const Error &value)
void StoreBufferAddObject(ObjectPtr obj)
simd128_value_t unboxed_simd128_runtime_arg() const
void set_global_object_pool(ObjectPoolPtr raw_value)
bool IsExecutingDartCode() const
bool IsSafepointRequestedLocked(SafepointLevel level) const
static intptr_t end_offset()
void DecrementForceGrowthScopeDepth()
static intptr_t marking_stack_block_offset()
static uword saved_shadow_call_stack_offset()
static intptr_t global_object_pool_offset()
friend class TransitionVMToGenerated
static intptr_t field_table_values_offset()
bool UnwindErrorInProgress() const
static bool CanLoadFromThread(const Object &object)
void ReleaseMarkingStack()
SafepointLevel current_safepoint_level() const
int64_t unboxed_int64_runtime_arg() const
static void ExitIsolateGroupAsHelper(bool bypass_safepoint)
bool is_unwind_in_progress() const
bool IsSafepointLevelRequestedLocked(SafepointLevel level) const
static intptr_t isolate_group_offset()
uword top_exit_frame_info() const
bool OwnsReloadSafepoint() const
uword stack_limit_address() const
bool OwnsDeoptSafepoint() const
int64_t unboxed_int64_runtime_second_arg() const
static intptr_t next_task_id_offset()
void ClearReusableHandles()
bool IsDartMutatorThread() const
static intptr_t exit_through_ffi_offset()
void IncrementForceGrowthScopeDepth()
static bool EnterIsolateGroupAsNonMutator(IsolateGroup *isolate_group, TaskKind kind)
void SetAtSafepoint(bool value, SafepointLevel level)
ExecutionState execution_state() const
Isolate * isolate() const
int32_t IncrementAndGetStackOverflowCount()
static uword resume_pc_offset()
bool BypassSafepoints() const
uword saved_stack_limit() const
friend class TransitionGeneratedToVM
void set_true_end(uword true_end)
CompilerTimings * compiler_timings() const
uint32_t IncrementAndGetRuntimeCallCount()
TaskKind task_kind() const
bool IsAtSafepoint() const
bool IsInNoReloadScope() const
static uword full_safepoint_state_acquired()
void set_unboxed_double_runtime_arg(double value)
IsolateGroup * isolate_group() const
ObjectPtr active_exception() const
static intptr_t dart_stream_offset()
void StoreBufferAddObjectGC(ObjectPtr obj)
static intptr_t api_top_scope_offset()
void SetBlockedForSafepoint(bool value)
void DecrementNoCallbackScopeDepth()
void set_api_top_scope(ApiLocalScope *value)
void AssertEmptyStackInvariants()
static intptr_t double_truncate_round_supported_offset()
uword write_barrier_mask() const
static intptr_t stack_overflow_flags_offset()
bool IsAtSafepoint(SafepointLevel level) const
friend class IsolateGroup
static uword SetBypassSafepoints(bool value, uword state)
static void EnterIsolate(Isolate *isolate)
static intptr_t active_stacktrace_offset()
bool IsInStoppedMutatorsScope() const
void IncrementNoSafepointScopeDepth()
static intptr_t tsan_utils_offset()
static void ExitIsolate(bool isolate_shutdown=false)
virtual bool MayAllocateHandles()
ErrorPtr sticky_error() const
void set_dispatch_table_array(const uword *array)
void set_active_exception(const Object &value)
HeapProfileSampler & heap_sampler()
uword SetSafepointRequested(SafepointLevel level, bool value)
void set_unboxed_int64_runtime_second_arg(int64_t value)
static bool EnterIsolateGroupAsHelper(IsolateGroup *isolate_group, TaskKind kind, bool bypass_safepoint)
static intptr_t unboxed_runtime_arg_offset()
Isolate * scheduled_dart_mutator_isolate() const
friend class NoActiveIsolateScope
static intptr_t dispatch_table_array_offset()
void DeferredMarkLiveTemporaries()
struct _Dart_Handle * Dart_Handle
#define DART_WARN_UNUSED_RESULT
#define REUSABLE_FRIEND_DECLARATION(name)
StoreBuffer::Block StoreBufferBlock
MarkingStack::Block MarkingStackBlock
constexpr RegList kDartAvailableCpuRegs
const intptr_t kStoreBufferWrapperSize
constexpr int kNumberOfDartAvailableCpuRegs
#define RUNTIME_ENTRY_LIST(V)
#define LEAF_RUNTIME_ENTRY_LIST(V)
static intptr_t setjmp_buffer_offset()
static intptr_t exception_fp_offset()
static intptr_t exception_sp_offset()
static intptr_t exception_pc_offset()
static intptr_t setjmp_function_offset()
#define CACHED_FUNCTION_ENTRY_POINTS_LIST(V)
#define REUSABLE_HANDLE_LIST(V)
#define REUSABLE_HANDLE_FIELDS(object)
#define CACHED_CONSTANTS_LIST(V)
#define DEFINE_OFFSET_METHOD(type_name, member_name, expr, default_init_value)
#define DECLARE_MEMBERS(type_name, member_name, expr, default_init_value)
#define REUSABLE_HANDLE(object)
#define NO_SANITIZE_THREAD
#define OFFSET_OF(type, field)