39 ASSERT(isolate_ ==
nullptr);
40 ASSERT(store_buffer_block_ ==
nullptr);
41 ASSERT(old_marking_stack_block_ ==
nullptr);
42 ASSERT(new_marking_stack_block_ ==
nullptr);
43 ASSERT(deferred_marking_stack_block_ ==
nullptr);
47 if (api_reusable_scope_ !=
nullptr) {
48 delete api_reusable_scope_;
49 api_reusable_scope_ =
nullptr;
56#define REUSABLE_HANDLE_SCOPE_INIT(object) \
57 reusable_##object##_handle_scope_active_(false),
59#define REUSABLE_HANDLE_SCOPE_INIT(object)
62#define REUSABLE_HANDLE_INITIALIZERS(object) object##_handle_(nullptr),
64Thread::Thread(
bool is_vm_isolate)
66 write_barrier_mask_(UntaggedObject::kGenerationalBarrierMask),
67 active_exception_(Object::null()),
68 active_stacktrace_(Object::null()),
69 global_object_pool_(ObjectPool::null()),
71 execution_state_(kThreadInNative),
73 api_top_scope_(nullptr),
74 double_truncate_round_supported_(
75 TargetCPUFeatures::double_truncate_round_supported() ? 1 : 0),
77 task_kind_(kUnknownTask),
81 dart_stream_(nullptr),
84 service_extension_stream_(
ASSERT_NOTNULL(&Service::extension_stream)),
86 service_extension_stream_(nullptr),
89 api_reusable_scope_(nullptr),
90 no_callback_scope_depth_(0),
92 no_safepoint_scope_depth_(0),
95 stack_overflow_count_(0),
96 hierarchy_info_(nullptr),
97 type_usage_info_(nullptr),
98 sticky_error_(Error::null()),
101#
if defined(USING_SAFE_STACK)
102 saved_safestack_limit_(0),
104#
if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
106 heap_sampler_(this) {
111#define DEFAULT_INIT(type_name, member_name, init_expr, default_init_value) \
112 member_name = default_init_value;
117 write_barrier_wrappers_entry_points_[
i] = 0;
120#define DEFAULT_INIT(name) name##_entry_point_ = 0;
124#define DEFAULT_INIT(returntype, name, ...) name##_entry_point_ = 0;
130 if (!is_vm_isolate) {
134#if defined(DART_HOST_OS_FUCHSIA)
135 next_task_id_ = trace_generate_nonce();
137 next_task_id_ = Random::GlobalNextUInt64();
140 memset(&unboxed_runtime_arg_, 0,
sizeof(simd128_value_t));
183void Thread::InitVMConstants() {
184#if defined(DART_COMPRESSED_POINTERS)
185 heap_base_ = Object::null()->heap_base();
188#define ASSERT_VM_HEAP(type_name, member_name, init_expr, default_init_value) \
189 ASSERT((init_expr)->IsOldObject());
193#define INIT_VALUE(type_name, member_name, init_expr, default_init_value) \
194 ASSERT(member_name == default_init_value); \
195 member_name = (init_expr);
200 write_barrier_wrappers_entry_points_[
i] =
201 StubCode::WriteBarrierWrappers().EntryPoint() +
205#define INIT_VALUE(name) \
206 ASSERT(name##_entry_point_ == 0); \
207 name##_entry_point_ = k##name##RuntimeEntry.GetEntryPoint();
211#define INIT_VALUE(returntype, name, ...) \
212 ASSERT(name##_entry_point_ == 0); \
213 name##_entry_point_ = k##name##RuntimeEntry.GetEntryPoint();
218#define REUSABLE_HANDLE_ALLOCATION(object) \
219 this->object##_handle_ = this->AllocateReusableHandle<object>();
221#undef REUSABLE_HANDLE_ALLOCATION
225 active_exception_ =
value.ptr();
229 active_stacktrace_ =
value.ptr();
232ErrorPtr Thread::sticky_error()
const {
233 return sticky_error_;
238 sticky_error_ =
value.ptr();
241void Thread::ClearStickyError() {
242 sticky_error_ = Error::null();
245ErrorPtr Thread::StealStickyError() {
247 ErrorPtr return_value = sticky_error_;
248 sticky_error_ = Error::null();
252const char* Thread::TaskKindToCString(
TaskKind kind) {
255 return "kUnknownTask";
257 return "kMutatorTask";
259 return "kCompilerTask";
261 return "kSweeperTask";
263 return "kMarkerTask";
270void Thread::AssertNonMutatorInvariants() {
271 ASSERT(BypassSafepoints());
272 ASSERT(store_buffer_block_ ==
nullptr);
273 ASSERT(old_marking_stack_block_ ==
nullptr);
274 ASSERT(new_marking_stack_block_ ==
nullptr);
275 ASSERT(deferred_marking_stack_block_ ==
nullptr);
276 AssertNonDartMutatorInvariants();
279void Thread::AssertNonDartMutatorInvariants() {
280 ASSERT(!IsDartMutatorThread());
281 ASSERT(isolate() ==
nullptr);
282 ASSERT(isolate_group() !=
nullptr);
283 ASSERT(task_kind_ != kMutatorTask);
287void Thread::AssertEmptyStackInvariants() {
288 ASSERT(zone() ==
nullptr);
289 ASSERT(top_handle_scope() ==
nullptr);
290 ASSERT(long_jump_base() ==
nullptr);
291 ASSERT(top_resource() ==
nullptr);
292 ASSERT(top_exit_frame_info_ == 0);
293 ASSERT(api_top_scope_ ==
nullptr);
294 ASSERT(!pending_deopts_.HasPendingDeopts());
295 ASSERT(compiler_state_ ==
nullptr);
296 ASSERT(hierarchy_info_ ==
nullptr);
297 ASSERT(type_usage_info_ ==
nullptr);
298 ASSERT(no_active_isolate_scope_ ==
nullptr);
299 ASSERT(compiler_timings_ ==
nullptr);
300 ASSERT(!exit_through_ffi_);
301 ASSERT(runtime_call_deopt_ability_ == RuntimeCallDeoptAbility::kCanLazyDeopt);
302 ASSERT(no_callback_scope_depth_ == 0);
303 ASSERT(force_growth_scope_depth_ == 0);
304 ASSERT(no_reload_scope_depth_ == 0);
305 ASSERT(stopped_mutators_scope_depth_ == 0);
306 ASSERT(stack_overflow_flags_ == 0);
311 if (active_stacktrace_.untag() != 0) {
312 ASSERT(sticky_error() == Error::null());
313 ASSERT(active_exception_ == Object::null());
314 ASSERT(active_stacktrace_ == Object::null());
318void Thread::AssertEmptyThreadInvariants() {
319 AssertEmptyStackInvariants();
324 ASSERT(isolate_ ==
nullptr);
325 ASSERT(isolate_group_ ==
nullptr);
326 ASSERT(os_thread() ==
nullptr);
327 ASSERT(vm_tag_ == VMTag::kInvalidTagId);
328 ASSERT(task_kind_ == kUnknownTask);
329 ASSERT(execution_state_ == Thread::kThreadInNative);
330 ASSERT(scheduled_dart_mutator_isolate_ ==
nullptr);
332 ASSERT(write_barrier_mask_ == UntaggedObject::kGenerationalBarrierMask);
333 ASSERT(store_buffer_block_ ==
nullptr);
334 ASSERT(old_marking_stack_block_ ==
nullptr);
335 ASSERT(new_marking_stack_block_ ==
nullptr);
336 ASSERT(deferred_marking_stack_block_ ==
nullptr);
337 ASSERT(!is_unwind_in_progress_);
339 ASSERT(saved_stack_limit_ == OSThread::kInvalidStackLimit);
340 ASSERT(stack_limit_.load() == 0);
341 ASSERT(safepoint_state_ == 0);
344 if (active_stacktrace_.untag() != 0) {
345 ASSERT(field_table_values_ ==
nullptr);
346 ASSERT(shared_field_table_values_ ==
nullptr);
347 ASSERT(global_object_pool_ == Object::null());
348#define CHECK_REUSABLE_HANDLE(object) ASSERT(object##_handle_->IsNull());
350#undef CHECK_REUSABLE_HANDLE
354bool Thread::HasActiveState() {
356 if (top_exit_frame_info() != 0) {
360 if (api_top_scope() !=
nullptr) {
364 if (zone() !=
nullptr) {
367 AssertEmptyStackInvariants();
376 const bool is_nested_reenter =
381 group->IncreaseMutatorCount(isolate, is_nested_reenter);
392 ASSERT(thread->scheduled_dart_mutator_isolate_ == isolate);
401 thread = AddActiveThread(
group, isolate,
true,
403 thread->SetupState(kMutatorTask);
404 thread->SetupMutatorState(kMutatorTask);
405 thread->SetupDartMutatorState(isolate);
409 ResumeDartMutatorThreadInternal(thread);
414 if (isolate_shutdown)
return false;
422 const intptr_t kMaxSuspendedThreads = 20;
424 return group->thread_registry()->active_isolates_count() <
425 kMaxSuspendedThreads;
428void Thread::ExitIsolate(
bool isolate_shutdown) {
429 Thread* thread = Thread::Current();
430 ASSERT(thread !=
nullptr);
436 DEBUG_ASSERT(!thread->IsAnyReusableHandleScopeActive());
438 auto isolate = thread->
isolate();
441 thread->
set_vm_tag(isolate->is_runnable() ? VMTag::kIdleTagId
442 : VMTag::kLoadWaitTagId);
444 ASSERT(isolate->sticky_error_ == Error::null());
448 isolate->scheduled_mutator_thread_ =
nullptr;
457 isolate->is_runnable() ? VMTag::kIdleTagId : VMTag::kLoadWaitTagId;
458 SuspendDartMutatorThreadInternal(thread, tag);
466 thread->ResetDartMutatorState(isolate);
467 thread->ResetMutatorState();
468 thread->ResetState();
469 SuspendDartMutatorThreadInternal(thread, VMTag::kInvalidTagId);
470 FreeActiveThread(thread,
false);
475 ASSERT(!(isolate_shutdown && is_nested_exit));
477 group->DecreaseMutatorCount(isolate, is_nested_exit);
483 bool bypass_safepoint) {
484 Thread* thread = AddActiveThread(isolate_group,
nullptr,
485 false, bypass_safepoint);
486 if (thread !=
nullptr) {
487 thread->SetupState(kind);
490 thread->SetupMutatorState(kind);
491 ResumeThreadInternal(thread);
499void Thread::ExitIsolateGroupAsHelper(
bool bypass_safepoint) {
500 Thread* thread = Thread::Current();
505 thread->ResetMutatorState();
506 thread->ResetState();
507 SuspendThreadInternal(thread, VMTag::kInvalidTagId);
508 FreeActiveThread(thread, bypass_safepoint);
514 AddActiveThread(isolate_group,
nullptr,
516 if (thread !=
nullptr) {
517 thread->SetupState(kind);
518 ResumeThreadInternal(thread);
526void Thread::ExitIsolateGroupAsNonMutator() {
527 Thread* thread = Thread::Current();
528 ASSERT(thread !=
nullptr);
531 thread->ResetState();
532 SuspendThreadInternal(thread, VMTag::kInvalidTagId);
533 FreeActiveThread(thread,
true);
536void Thread::ResumeDartMutatorThreadInternal(
Thread* thread) {
537 ResumeThreadInternal(thread);
538 if (Dart::vm_isolate() !=
nullptr &&
539 thread->
isolate() != Dart::vm_isolate()) {
540#if defined(USING_SIMULATOR)
541 thread->
SetStackLimit(Simulator::Current()->overflow_stack_limit());
543 thread->
SetStackLimit(OSThread::Current()->overflow_stack_limit());
548void Thread::SuspendDartMutatorThreadInternal(Thread* thread,
549 VMTag::VMTagId tag) {
550 thread->ClearStackLimit();
551 SuspendThreadInternal(thread, tag);
554void Thread::ResumeThreadInternal(Thread* thread) {
555 ASSERT(!thread->IsAtSafepoint());
556 ASSERT(thread->isolate_group() !=
nullptr);
557 ASSERT(thread->execution_state() == Thread::kThreadInNative);
558 ASSERT(thread->vm_tag() == VMTag::kInvalidTagId ||
559 thread->vm_tag() == VMTag::kIdleTagId ||
560 thread->vm_tag() == VMTag::kLoadWaitTagId);
562 thread->set_vm_tag(VMTag::kVMTagId);
563 thread->set_execution_state(Thread::kThreadInVM);
565 OSThread* os_thread = OSThread::Current();
566 thread->set_os_thread(os_thread);
567 os_thread->set_thread(thread);
568 Thread::SetCurrent(thread);
571#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
572 thread->heap_sampler().Initialize();
576void Thread::SuspendThreadInternal(Thread* thread, VMTag::VMTagId tag) {
577 thread->heap()->new_space()->AbandonRemainingTLAB(thread);
579#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
580 thread->heap_sampler().Cleanup();
583 OSThread* os_thread = thread->os_thread();
584 ASSERT(os_thread !=
nullptr);
586 os_thread->set_thread(
nullptr);
587 OSThread::SetCurrent(os_thread);
588 thread->set_os_thread(
nullptr);
590 thread->set_vm_tag(tag);
593Thread* Thread::AddActiveThread(IsolateGroup*
group,
595 bool is_dart_mutator,
596 bool bypass_safepoint) {
599 const bool is_vm_isolate =
600 Dart::vm_isolate() ==
nullptr || Dart::vm_isolate() == isolate;
602 auto thread_registry =
group->thread_registry();
603 auto safepoint_handler =
group->safepoint_handler();
604 MonitorLocker ml(thread_registry->threads_lock());
606 if (!bypass_safepoint) {
607 while (safepoint_handler->AnySafepointInProgressLocked()) {
612 Thread* thread = thread_registry->GetFreeThreadLocked(is_vm_isolate);
613 thread->AssertEmptyThreadInvariants();
615 thread->isolate_ = isolate;
616 thread->isolate_group_ =
group;
617 thread->scheduled_dart_mutator_isolate_ = isolate;
621 thread->set_safepoint_state(Thread::SetBypassSafepoints(bypass_safepoint, 0));
622 thread->runtime_call_deopt_ability_ = RuntimeCallDeoptAbility::kCanLazyDeopt;
623 ASSERT(!thread->IsAtSafepoint());
625 ASSERT(thread->saved_stack_limit_ == OSThread::kInvalidStackLimit);
629void Thread::FreeActiveThread(Thread* thread,
bool bypass_safepoint) {
630 ASSERT(!thread->HasActiveState());
631 ASSERT(!thread->IsAtSafepoint());
633 if (!bypass_safepoint) {
638 thread->ClearReusableHandles();
641 auto group = thread->isolate_group_;
642 auto thread_registry =
group->thread_registry();
644 MonitorLocker ml(thread_registry->threads_lock());
646 if (!bypass_safepoint) {
662 RawReloadParticipationScope enable_reload(thread);
663 thread->EnterSafepoint();
666 thread->isolate_ =
nullptr;
667 thread->isolate_group_ =
nullptr;
668 thread->scheduled_dart_mutator_isolate_ =
nullptr;
669 thread->set_execution_state(Thread::kThreadInNative);
670 thread->stack_limit_.store(0);
671 thread->safepoint_state_ = 0;
673 thread->AssertEmptyThreadInvariants();
674 thread_registry->ReturnThreadLocked(thread);
677void Thread::ReleaseStoreBuffer() {
678 ASSERT(IsAtSafepoint() || OwnsSafepoint() || task_kind_ == kMarkerTask);
679 if (store_buffer_block_ ==
nullptr || store_buffer_block_->IsEmpty()) {
683 StoreBufferRelease(StoreBuffer::kIgnoreThreshold);
687 store_buffer_block_ = isolate_group()->store_buffer()->PopEmptyBlock();
690void Thread::SetStackLimit(
uword limit) {
694 if (!HasScheduledInterrupts()) {
696 stack_limit_.store(limit);
698 saved_stack_limit_ = limit;
701void Thread::ClearStackLimit() {
702 SetStackLimit(OSThread::kInvalidStackLimit);
706 return (limit & ~Thread::kInterruptsMask) ==
710void Thread::ScheduleInterrupts(
uword interrupt_bits) {
711 ASSERT((interrupt_bits & ~kInterruptsMask) == 0);
713 uword old_limit = stack_limit_.load();
717 new_limit = old_limit | interrupt_bits;
721 }
while (!stack_limit_.compare_exchange_weak(old_limit, new_limit));
724uword Thread::GetAndClearInterrupts() {
725 uword interrupt_bits = 0;
726 uword old_limit = stack_limit_.load();
727 uword new_limit = saved_stack_limit_;
730 interrupt_bits = interrupt_bits | (old_limit & kInterruptsMask);
732 return interrupt_bits;
734 }
while (!stack_limit_.compare_exchange_weak(old_limit, new_limit));
736 return interrupt_bits;
740 uword interrupt_bits = GetAndClearInterrupts();
741 if ((interrupt_bits & kVMInterrupt) != 0) {
743 if (isolate_group()->store_buffer()->Overflowed()) {
747 heap()->CollectGarbage(
this, GCType::kEvacuate, GCReason::kStoreBuffer);
749 heap()->CheckFinalizeMarking(
this);
752 if (isolate()->TakeHasCompletedBlocks()) {
753 Profiler::ProcessCompletedBlocks(isolate());
757#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
767 if ((interrupt_bits & kMessageInterrupt) != 0) {
769 isolate()->message_handler()->HandleOOBMessages();
770 if (status != MessageHandler::kOK) {
773 if (FLAG_trace_isolates) {
775 "[!] Terminating isolate due to OOB message:\n"
779 return StealStickyError();
782 return Error::null();
785uword Thread::GetAndClearStackOverflowFlags() {
786 uword stack_overflow_flags = stack_overflow_flags_;
787 stack_overflow_flags_ = 0;
788 return stack_overflow_flags;
792 StoreBufferRelease(
policy);
793 StoreBufferAcquire();
797 ASSERT(
this == Thread::Current());
798 store_buffer_block_->Push(obj);
799 if (store_buffer_block_->IsFull()) {
800 StoreBufferBlockProcess(StoreBuffer::kCheckThreshold);
805 store_buffer_block_->Push(obj);
806 if (store_buffer_block_->IsFull()) {
807 StoreBufferBlockProcess(StoreBuffer::kIgnoreThreshold);
813 store_buffer_block_ =
nullptr;
814 isolate_group()->store_buffer()->PushBlock(block,
policy);
817void Thread::StoreBufferAcquire() {
818 store_buffer_block_ = isolate_group()->store_buffer()->PopNonFullBlock();
821void Thread::StoreBufferReleaseGC() {
823 store_buffer_block_ =
nullptr;
824 isolate_group()->store_buffer()->PushBlock(block,
825 StoreBuffer::kIgnoreThreshold);
828void Thread::StoreBufferAcquireGC() {
829 store_buffer_block_ = isolate_group()->store_buffer()->PopNonFullBlock();
832void Thread::OldMarkingStackBlockProcess() {
833 OldMarkingStackRelease();
834 OldMarkingStackAcquire();
837void Thread::NewMarkingStackBlockProcess() {
838 NewMarkingStackRelease();
839 NewMarkingStackAcquire();
842void Thread::DeferredMarkingStackBlockProcess() {
843 DeferredMarkingStackRelease();
844 DeferredMarkingStackAcquire();
848 if (obj->IsNewObject()) {
849 NewMarkingStackAddObject(obj);
851 OldMarkingStackAddObject(obj);
856 ASSERT(obj->IsOldObject());
857 old_marking_stack_block_->Push(obj);
858 if (old_marking_stack_block_->IsFull()) {
859 OldMarkingStackBlockProcess();
864 ASSERT(obj->IsNewObject());
865 new_marking_stack_block_->Push(obj);
866 if (new_marking_stack_block_->IsFull()) {
867 NewMarkingStackBlockProcess();
871void Thread::DeferredMarkingStackAddObject(
ObjectPtr obj) {
872 deferred_marking_stack_block_->Push(obj);
873 if (deferred_marking_stack_block_->IsFull()) {
874 DeferredMarkingStackBlockProcess();
878void Thread::OldMarkingStackRelease() {
880 old_marking_stack_block_ =
nullptr;
881 isolate_group()->old_marking_stack()->PushBlock(old_block);
883 write_barrier_mask_ = UntaggedObject::kGenerationalBarrierMask;
886void Thread::NewMarkingStackRelease() {
888 new_marking_stack_block_ =
nullptr;
889 isolate_group()->new_marking_stack()->PushBlock(new_block);
892void Thread::OldMarkingStackAcquire() {
893 old_marking_stack_block_ =
894 isolate_group()->old_marking_stack()->PopEmptyBlock();
896 write_barrier_mask_ = UntaggedObject::kGenerationalBarrierMask |
897 UntaggedObject::kIncrementalBarrierMask;
900void Thread::NewMarkingStackAcquire() {
901 new_marking_stack_block_ =
902 isolate_group()->new_marking_stack()->PopEmptyBlock();
905void Thread::DeferredMarkingStackRelease() {
907 deferred_marking_stack_block_ =
nullptr;
908 isolate_group()->deferred_marking_stack()->PushBlock(block);
911void Thread::DeferredMarkingStackAcquire() {
912 deferred_marking_stack_block_ =
913 isolate_group()->deferred_marking_stack()->PopEmptyBlock();
916void Thread::AcquireMarkingStacks() {
917 OldMarkingStackAcquire();
918 NewMarkingStackAcquire();
919 DeferredMarkingStackAcquire();
922void Thread::ReleaseMarkingStacks() {
923 OldMarkingStackRelease();
924 NewMarkingStackRelease();
925 DeferredMarkingStackRelease();
928void Thread::FlushMarkingStacks() {
929 isolate_group()->old_marking_stack()->PushBlock(old_marking_stack_block_);
930 old_marking_stack_block_ =
931 isolate_group()->old_marking_stack()->PopEmptyBlock();
933 isolate_group()->new_marking_stack()->PushBlock(new_marking_stack_block_);
934 new_marking_stack_block_ =
935 isolate_group()->new_marking_stack()->PopEmptyBlock();
937 isolate_group()->deferred_marking_stack()->PushBlock(
938 deferred_marking_stack_block_);
939 deferred_marking_stack_block_ =
940 isolate_group()->deferred_marking_stack()->PopEmptyBlock();
944 return isolate_group_->heap();
947bool Thread::IsExecutingDartCode()
const {
948 return (top_exit_frame_info() == 0) && VMTag::IsDartTag(vm_tag());
951bool Thread::HasExitedDartCode()
const {
952 return (top_exit_frame_info() != 0) && !VMTag::IsDartTag(vm_tag());
956C* Thread::AllocateReusableHandle() {
957 C* handle =
reinterpret_cast<C*
>(reusable_handles_.AllocateScopedHandle());
958 C::initializeHandle(handle, C::null());
962void Thread::ClearReusableHandles() {
963#define CLEAR_REUSABLE_HANDLE(object) *object##_handle_ = object::null();
965#undef CLEAR_REUSABLE_HANDLE
970 ASSERT(visitor !=
nullptr);
972 if (zone() !=
nullptr) {
973 zone()->VisitObjectPointers(visitor);
977 reusable_handles_.VisitObjectPointers(visitor);
986 while (scope !=
nullptr) {
992 if (IsDartMutatorThread()) {
1004 StackFrameIterator::kAllowCrossThreadIteration;
1008 this, cross_thread_policy);
1011 while (
frame !=
nullptr) {
1012 frame->VisitObjectPointers(visitor);
1026 Thread::RestoreWriteBarrierInvariantOp op)
1029 current_(
Thread::Current()),
1033 for (; first != last + 1; first++) {
1035 if (obj->IsImmediateObject())
continue;
1042 if (
length > Array::kMaxLengthForWriteBarrierElimination) {
1051 !obj->IsUnhandledException())
1063 case Thread::RestoreWriteBarrierInvariantOp::kAddToRememberedSet:
1064 if (obj->IsOldObject()) {
1067 if (current_->is_marking()) {
1068 current_->DeferredMarkingStackAddObject(obj);
1071 case Thread::RestoreWriteBarrierInvariantOp::kAddToDeferredMarkingStack:
1073 ASSERT(current_->is_marking());
1074 current_->DeferredMarkingStackAddObject(obj);
1080#if defined(DART_COMPRESSED_POINTERS)
1081 void VisitCompressedPointers(
uword heap_base,
1089 Thread*
const thread_;
1090 Thread*
const current_;
1091 Thread::RestoreWriteBarrierInvariantOp op_;
1105void Thread::RestoreWriteBarrierInvariant(RestoreWriteBarrierInvariantOp op) {
1106 ASSERT(IsAtSafepoint() || OwnsGCSafepoint() ||
this == Thread::Current());
1108 const StackFrameIterator::CrossThreadPolicy cross_thread_policy =
1109 StackFrameIterator::kAllowCrossThreadIteration;
1110 StackFrameIterator frames_iterator(top_exit_frame_info(),
1111 ValidationPolicy::kDontValidateFrames,
1112 this, cross_thread_policy);
1113 RestoreWriteBarrierInvariantVisitor visitor(isolate_group(),
this, op);
1114 ObjectStore* object_store = isolate_group()->object_store();
1115 bool scan_next_dart_frame =
false;
1116 for (StackFrame*
frame = frames_iterator.NextFrame();
frame !=
nullptr;
1117 frame = frames_iterator.NextFrame()) {
1118 if (
frame->IsExitFrame()) {
1119 scan_next_dart_frame =
true;
1120 }
else if (
frame->IsEntryFrame()) {
1122 }
else if (
frame->IsStubFrame()) {
1124 if (Code::ContainsInstructionAt(
1125 object_store->init_late_static_field_stub(), pc) ||
1126 Code::ContainsInstructionAt(
1127 object_store->init_late_final_static_field_stub(), pc) ||
1128 Code::ContainsInstructionAt(
1129 object_store->init_late_instance_field_stub(), pc) ||
1130 Code::ContainsInstructionAt(
1131 object_store->init_late_final_instance_field_stub(), pc)) {
1132 scan_next_dart_frame =
true;
1136 if (scan_next_dart_frame) {
1137 frame->VisitObjectPointers(&visitor);
1139 scan_next_dart_frame =
false;
1144void Thread::DeferredMarkLiveTemporaries() {
1145 RestoreWriteBarrierInvariant(
1146 RestoreWriteBarrierInvariantOp::kAddToDeferredMarkingStack);
1149void Thread::RememberLiveTemporaries() {
1150 RestoreWriteBarrierInvariant(
1151 RestoreWriteBarrierInvariantOp::kAddToRememberedSet);
1158 if (
object.IsCode()) {
1159#define CHECK_OBJECT(type_name, member_name, expr, default_init_value) \
1160 if (object.ptr() == expr) { \
1169#define CHECK_OBJECT(type_name, member_name, expr, default_init_value) \
1170 if (object.ptr() == expr) { \
1178intptr_t Thread::OffsetFromThread(
const Object&
object) {
1182 if (
object.IsCode()) {
1183#define COMPUTE_OFFSET(type_name, member_name, expr, default_init_value) \
1184 ASSERT((expr)->untag()->InVMIsolateHeap()); \
1185 if (object.ptr() == expr) { \
1186 return Thread::member_name##offset(); \
1189#undef COMPUTE_OFFSET
1194#define COMPUTE_OFFSET(type_name, member_name, expr, default_init_value) \
1195 if (object.ptr() == expr) { \
1196 return Thread::member_name##offset(); \
1199#undef COMPUTE_OFFSET
1206 if (Isolate::Current() == Dart::vm_isolate()) {
1212#define COMPUTE_OFFSET(type_name, member_name, expr, default_init_value) \
1213 if (Thread::member_name##offset() == offset) { \
1218#undef COMPUTE_OFFSET
1223#define COMPUTE_OFFSET(name) \
1224 if (runtime_entry == &k##name##RuntimeEntry) { \
1225 return Thread::name##_entry_point_offset(); \
1228#undef COMPUTE_OFFSET
1230#define COMPUTE_OFFSET(returntype, name, ...) \
1231 if (runtime_entry == &k##name##RuntimeEntry) { \
1232 return Thread::name##_entry_point_offset(); \
1235#undef COMPUTE_OFFSET
1242bool Thread::TopErrorHandlerIsSetJump()
const {
1243 if (long_jump_base() ==
nullptr)
return false;
1244 if (top_exit_frame_info_ == 0)
return true;
1245#if defined(USING_SIMULATOR) || defined(USING_SAFE_STACK)
1249 return reinterpret_cast<uword>(long_jump_base()) < top_exit_frame_info_;
1253bool Thread::TopErrorHandlerIsExitFrame()
const {
1254 if (top_exit_frame_info_ == 0)
return false;
1255 if (long_jump_base() ==
nullptr)
return true;
1256#if defined(USING_SIMULATOR) || defined(USING_SAFE_STACK)
1260 return top_exit_frame_info_ < reinterpret_cast<uword>(long_jump_base());
1266 return IsValidLocalHandle(
object) || IsValidZoneHandle(
object) ||
1267 IsValidScopedHandle(
object);
1272 while (scope !=
nullptr) {
1281intptr_t Thread::CountLocalHandles()
const {
1284 while (scope !=
nullptr) {
1291int Thread::ZoneSizeInBytes()
const {
1294 while (scope !=
nullptr) {
1301void Thread::EnterApiScope() {
1302 ASSERT(MayAllocateHandles());
1304 if (new_scope ==
nullptr) {
1305 new_scope =
new ApiLocalScope(api_top_scope(), top_exit_frame_info());
1306 ASSERT(new_scope !=
nullptr);
1308 new_scope->
Reinit(
this, api_top_scope(), top_exit_frame_info());
1309 set_api_reusable_scope(
nullptr);
1311 set_api_top_scope(new_scope);
1314void Thread::ExitApiScope() {
1315 ASSERT(MayAllocateHandles());
1318 set_api_top_scope(scope->
previous());
1319 if (reusable_scope ==
nullptr) {
1321 set_api_reusable_scope(scope);
1323 ASSERT(reusable_scope != scope);
1328void Thread::UnwindScopes(
uword stack_marker) {
1332 while (scope !=
nullptr && scope->
stack_marker() != 0 &&
1334 api_top_scope_ = scope->
previous();
1336 scope = api_top_scope_;
1340void Thread::EnterSafepointUsingLock() {
1341 isolate_group()->safepoint_handler()->EnterSafepointUsingLock(
this);
1344void Thread::ExitSafepointUsingLock() {
1345 isolate_group()->safepoint_handler()->ExitSafepointUsingLock(
this);
1348void Thread::BlockForSafepoint() {
1349 isolate_group()->safepoint_handler()->BlockForSafepoint(
this);
1352bool Thread::OwnsGCSafepoint()
const {
1353 return isolate_group()->safepoint_handler()->InnermostSafepointOperation(
1357bool Thread::OwnsDeoptSafepoint()
const {
1358 return isolate_group()->safepoint_handler()->InnermostSafepointOperation(
1362bool Thread::OwnsReloadSafepoint()
const {
1363 return isolate_group()->safepoint_handler()->InnermostSafepointOperation(
1367bool Thread::OwnsSafepoint()
const {
1368 return isolate_group()->safepoint_handler()->InnermostSafepointOperation(
1372bool Thread::CanAcquireSafepointLocks()
const {
1387 return isolate_group()->safepoint_handler()->InnermostSafepointOperation(
1391void Thread::SetupState(TaskKind kind) {
1395void Thread::ResetState() {
1396 task_kind_ = kUnknownTask;
1397 vm_tag_ = VMTag::kInvalidTagId;
1400void Thread::SetupMutatorState(TaskKind kind) {
1401 ASSERT(store_buffer_block_ ==
nullptr);
1403 if (isolate_group()->old_marking_stack() !=
nullptr) {
1404 ASSERT(isolate_group()->new_marking_stack() !=
nullptr);
1405 ASSERT(isolate_group()->deferred_marking_stack() !=
nullptr);
1407 OldMarkingStackAcquire();
1408 NewMarkingStackAcquire();
1409 DeferredMarkingStackAcquire();
1414 if (kind == kMutatorTask) {
1415 StoreBufferAcquire();
1417 store_buffer_block_ = isolate_group()->store_buffer()->PopEmptyBlock();
1421void Thread::ResetMutatorState() {
1422 ASSERT(execution_state() == Thread::kThreadInVM);
1423 ASSERT(store_buffer_block_ !=
nullptr);
1426 OldMarkingStackRelease();
1427 NewMarkingStackRelease();
1428 DeferredMarkingStackRelease();
1430 StoreBufferRelease();
1433void Thread::SetupDartMutatorState(Isolate* isolate) {
1434 field_table_values_ = isolate->field_table_->table();
1435 isolate->mutator_thread_ =
this;
1437 SetupDartMutatorStateDependingOnSnapshot(isolate->group());
1440void Thread::SetupDartMutatorStateDependingOnSnapshot(IsolateGroup*
group) {
1447#if defined(DART_PRECOMPILED_RUNTIME)
1448 auto object_store =
group->object_store();
1449 if (object_store !=
nullptr) {
1450 global_object_pool_ = object_store->global_object_pool();
1452 auto dispatch_table =
group->dispatch_table();
1453 if (dispatch_table !=
nullptr) {
1454 dispatch_table_array_ = dispatch_table->ArrayOrigin();
1456#define INIT_ENTRY_POINT(name) \
1457 if (object_store->name() != Object::null()) { \
1458 name##_entry_point_ = Function::EntryPointOf(object_store->name()); \
1461#undef INIT_ENTRY_POINT
1465 shared_field_table_values_ =
group->shared_field_table()->table();
1468void Thread::ResetDartMutatorState(Isolate* isolate) {
1469 ASSERT(execution_state() == Thread::kThreadInVM);
1471 isolate->mutator_thread_ =
nullptr;
1472 is_unwind_in_progress_ =
false;
1474 field_table_values_ =
nullptr;
1475 shared_field_table_values_ =
nullptr;
1480#if !defined(PRODUCT)
1481DisableThreadInterruptsScope::DisableThreadInterruptsScope(
Thread* thread)
1485 ASSERT(os_thread !=
nullptr);
1491 if (
thread() !=
nullptr) {
1493 ASSERT(os_thread !=
nullptr);
1500#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
1501 thread->no_reload_scope_depth_++;
1507#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
1508 thread()->no_reload_scope_depth_ -= 1;
1513 if (
thread()->no_reload_scope_depth_ == 0) {
#define DEBUG_ASSERT(cond)
#define RELEASE_ASSERT(cond)
#define ASSERT_NOTNULL(ptr)
uword stack_marker() const
void Reinit(Thread *thread, ApiLocalScope *previous, uword stack_marker)
LocalHandles * local_handles()
ApiLocalScope * previous() const
void Reset(Thread *thread)
Thread * scheduled_mutator_thread_
~DisableThreadInterruptsScope()
void SetThreadSamplingInterval()
void UpdateThreadEnable()
bool ShouldSetThreadSamplingInterval()
bool ShouldUpdateThreadEnable()
void SendInternalLibMessage(LibMsgId msg_id, uint64_t capability)
IsolateGroup * group() const
Thread * mutator_thread() const
void VisitObjectPointers(ObjectPointerVisitor *visitor)
bool IsValidHandle(Dart_Handle object) const
NoReloadScope(Thread *thread)
void DisableThreadInterrupts()
void EnableThreadInterrupts()
void clear_gc_root_type()
void set_gc_root_type(const char *gc_root_type)
void VisitPointer(ObjectPtr *p)
bool IsDartInstance() const
UntaggedObject * untag() const
intptr_t GetClassId() const
void VisitPointers(ObjectPtr *first, ObjectPtr *last) override
RestoreWriteBarrierInvariantVisitor(IsolateGroup *group, Thread *thread, Thread::RestoreWriteBarrierInvariantOp op)
ThreadState * thread() const
Isolate * isolate() const
OSThread * os_thread() const
void set_execution_state(ExecutionState state)
void set_vm_tag(uword tag)
ApiLocalScope * api_top_scope() const
bool OwnsSafepoint() const
void AssertNonMutatorInvariants()
DART_WARN_UNUSED_RESULT ErrorPtr StealStickyError()
void SetStackLimit(uword value)
static bool IsSafepointLevelRequested(uword state, SafepointLevel level)
void AssertNonDartMutatorInvariants()
uword top_exit_frame_info() const
bool IsDartMutatorThread() const
Isolate * isolate() const
IsolateGroup * isolate_group() const
ErrorPtr sticky_error() const
DART_FORCE_INLINE void EnsureInRememberedSet(Thread *thread)
bool InVMIsolateHeap() const
uintptr_t SizeInBytes() const
struct _Dart_Handle * Dart_Handle
VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE auto & d
bool CanLoadFromThread(const dart::Object &object, intptr_t *offset)
static const struct dart::ALIGN16 float_negate_constant
static bool ShouldSuspend(bool isolate_shutdown, Thread *thread)
static const struct dart::ALIGN16 float_not_constant
static const struct dart::ALIGN16 float_absolute_constant
MarkingStack::Block MarkingStackBlock
static const struct dart::ALIGN16 double_negate_constant
static const struct dart::ALIGN16 float_zerow_constant
static constexpr uword kInterruptStackLimit
static const double double_nan_constant
const intptr_t kStoreBufferWrapperSize
raw_obj untag() -> num_entries()) VARIABLE_COMPRESSED_VISITOR(Array, Smi::Value(raw_obj->untag() ->length())) VARIABLE_COMPRESSED_VISITOR(TypedData, TypedData::ElementSizeInBytes(raw_obj->GetClassId()) *Smi::Value(raw_obj->untag() ->length())) VARIABLE_COMPRESSED_VISITOR(Record, RecordShape(raw_obj->untag() ->shape()).num_fields()) VARIABLE_NULL_VISITOR(CompressedStackMaps, CompressedStackMaps::PayloadSizeOf(raw_obj)) VARIABLE_NULL_VISITOR(OneByteString, Smi::Value(raw_obj->untag() ->length())) VARIABLE_NULL_VISITOR(TwoByteString, Smi::Value(raw_obj->untag() ->length())) intptr_t UntaggedField::VisitFieldPointers(FieldPtr raw_obj, ObjectPointerVisitor *visitor)
void HandleInterrupts(Thread *thread)
static bool IsInterruptLimit(uword limit)
static const struct dart::ALIGN16 double_abs_constant
constexpr int kNumberOfDartAvailableCpuRegs
DECLARE_FLAG(bool, show_invisible_frames)
DEF_SWITCHES_START aot vmservice shared library name
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive keep the shell running after the Dart script has completed enable serial On low power devices with low core running concurrent GC tasks on threads can cause them to contend with the UI thread which could potentially lead to jank This option turns off all concurrent GC activities domain network policy
#define RUNTIME_ENTRY_LIST(V)
#define LEAF_RUNTIME_ENTRY_LIST(V)
#define CHECK_REUSABLE_HANDLE(object)
#define INIT_VALUE(type_name, member_name, init_expr, default_init_value)
#define REUSABLE_HANDLE_SCOPE_INIT(object)
#define COMPUTE_OFFSET(type_name, member_name, expr, default_init_value)
#define REUSABLE_HANDLE_INITIALIZERS(object)
#define DEFAULT_INIT(type_name, member_name, init_expr, default_init_value)
#define CLEAR_REUSABLE_HANDLE(object)
#define ASSERT_VM_HEAP(type_name, member_name, init_expr, default_init_value)
#define REUSABLE_HANDLE_ALLOCATION(object)
#define CHECK_OBJECT(type_name, member_name, expr, default_init_value)
#define CACHED_FUNCTION_ENTRY_POINTS_LIST(V)
#define REUSABLE_HANDLE_LIST(V)
#define CACHED_CONSTANTS_LIST(V)
#define CACHED_NON_VM_STUB_LIST(V)
#define CACHED_VM_STUBS_LIST(V)
#define CACHED_VM_OBJECTS_LIST(V)
#define DO_IF_NOT_TSAN(CODE)
#define NOT_IN_PRODUCT(code)
#define ONLY_IN_PRECOMPILED(code)