23#if !defined(DART_PRECOMPILED_RUNTIME)
103 if (FLAG_precompiled_mode) {
106#if defined(DART_PRECOMPILED_RUNTIME)
107 if (FLAG_precompiled_mode) {
115 if (!FLAG_precompiled_mode) {
124 auto const cid =
code.OwnerClassId();
126 return cid == kFunctionCid;
133 if (!FLAG_precompiled_mode) {
142 auto const cid =
code.OwnerClassId();
151 if (FLAG_precompiled_mode) {
156#if !defined(DART_HOST_OS_WINDOWS) && !defined(DART_HOST_OS_FUCHSIA)
162 CodePtr
code = GetCodeObject();
175 ?
"Cannot find code object"
182 ASSERT(visitor !=
nullptr);
188 if (first_fixed <= last_fixed) {
197 ASSERT(visitor !=
nullptr);
207 ASSERT(visitor !=
nullptr);
222 if (FLAG_precompiled_mode) {
226 &global_table_payload);
227 global_table = global_table_payload;
234 if (pc_marker->IsHeapObject() && (pc_marker->
GetClassId() == kCodeCid)) {
236 code_start =
code.PayloadStart();
238 maps =
code.compressed_stackmaps();
253 const uint32_t pc_offset =
pc() - code_start;
254 if (it.
Find(pc_offset)) {
272 for (intptr_t bit = 0; bit < spill_slot_count; ++bit) {
281 for (intptr_t bit = it.
Length() - 1; bit >= spill_slot_count; --bit) {
290 ASSERT((last + 1) >= first);
307 if (FLAG_precompiled_mode) {
311 (pc_offset ==
code.EntryPoint() -
code.PayloadStart()));
327 if (!
code.IsNull()) {
329 if (owner.IsFunction()) {
330 return Function::Cast(owner).ptr();
340#if !defined(DART_HOST_OS_WINDOWS) && !defined(DART_HOST_OS_FUCHSIA)
345 CodePtr
code = GetCodeObject();
352CodePtr StackFrame::GetCodeObject()
const {
353#if defined(DART_PRECOMPILED_RUNTIME)
354 if (FLAG_precompiled_mode) {
363 ObjectPtr pc_marker = *(
reinterpret_cast<ObjectPtr*
>(
366 (pc_marker->GetClassId() == kCodeCid));
367 return static_cast<CodePtr
>(pc_marker);
372 bool* needs_stacktrace,
374 bool* is_optimized)
const {
376 Code&
code = reused_code_handle.Handle();
387 handlers =
code.exception_handlers();
388 descriptors =
code.pc_descriptors();
389 *is_optimized =
code.is_optimized();
392 if (
info !=
nullptr) {
393 *handler_pc =
start +
info->handler_pc_offset;
394 *needs_stacktrace = (
info->needs_stacktrace != 0);
395 *has_catch_all = (
info->has_catch_all != 0);
399 intptr_t try_index = -1;
404 const intptr_t current_try_index = iter.
TryIndex();
405 if ((iter.
PcOffset() == pc_offset) && (current_try_index != -1)) {
406 try_index = current_try_index;
411 if (try_index == -1) {
413 *handler_pc = StubCode::AsyncExceptionHandler().EntryPoint();
414 *needs_stacktrace =
true;
415 *has_catch_all =
true;
425 cache->Insert(
pc(), handler_info);
432 return TokenPosition::kNoSource;
444 return TokenPosition::kNoSource;
459 while (
frame !=
nullptr) {
465void StackFrameIterator::SetupLastExitFrameData() {
466 ASSERT(thread_ !=
nullptr);
468 frames_.fp_ = exit_marker;
474void StackFrameIterator::SetupNextExitFrameData() {
477 uword exit_marker = *
reinterpret_cast<uword*
>(exit_address);
478 frames_.fp_ = exit_marker;
491 current_frame_(nullptr),
495 SetupLastExitFrameData();
506 current_frame_(nullptr),
510 frames_.fp_ = last_fp;
526 current_frame_(nullptr),
537 : validate_(orig.validate_),
538 entry_(orig.thread_),
540 frames_(orig.thread_),
541 current_frame_(nullptr),
542 thread_(orig.thread_) {
543 frames_.fp_ = orig.frames_.fp_;
544 frames_.sp_ = orig.frames_.sp_;
545 frames_.pc_ = orig.frames_.pc_;
562 if (current_frame_ ==
nullptr) {
566 if (frames_.pc_ == 0) {
568 current_frame_ = NextExitFrame();
569 }
else if (*(
reinterpret_cast<uword*
>(
573 current_frame_ = NextEntryFrame();
576 current_frame_ = frames_.NextFrame(validate_);
578 return current_frame_;
583 current_frame_ = NextExitFrame();
584 return current_frame_;
586 current_frame_ =
nullptr;
587 return current_frame_;
597 (frames_.HasNext()) ? frames_.NextFrame(validate_) : NextEntryFrame();
598 return current_frame_;
602void StackFrameIterator::FrameSetIterator::Unpoison() {
607#if !defined(USING_SIMULATOR)
608 if (fp_ == 0)
return;
610 ASSERT((thread_->os_thread() ==
nullptr) ||
611 ((thread_->os_thread()->stack_limit() < fp_) &&
612 (thread_->os_thread()->stack_base() > fp_)));
626StackFrame* StackFrameIterator::FrameSetIterator::NextFrame(
bool validate) {
629 frame = &stack_frame_;
633 sp_ =
frame->GetCallerSp();
634 fp_ =
frame->GetCallerFp();
635 pc_ =
frame->GetCallerPc();
641ExitFrame* StackFrameIterator::NextExitFrame() {
642 exit_.sp_ = frames_.sp_;
643 exit_.fp_ = frames_.fp_;
644 exit_.pc_ = frames_.pc_;
646 frames_.fp_ = exit_.GetCallerFp();
647 frames_.pc_ = exit_.GetCallerPc();
653EntryFrame* StackFrameIterator::NextEntryFrame() {
654 ASSERT(!frames_.HasNext());
655 entry_.sp_ = frames_.sp_;
656 entry_.fp_ = frames_.fp_;
657 entry_.pc_ = frames_.pc_;
658 SetupNextExitFrameData();
665 num_materializations_(0),
671 deopt_instructions_(),
676#if defined(DART_PRECOMPILED_RUNTIME)
681 uint32_t deopt_flags = 0;
683 if (deopt_info_.
IsNull()) {
706#if defined(DART_PRECOMPILED_RUNTIME)
711 if (deopt_info_.
IsNull()) {
716 ASSERT(deopt_instructions_.length() != 0);
717 while (index_ < deopt_instructions_.length()) {
718 DeoptInstr* deopt_instr = deopt_instructions_[index_++];
729#if !defined(DART_PRECOMPILED_RUNTIME)
733 ASSERT(deopt_instructions_.length() != 0);
734 for (intptr_t index = index_; index < deopt_instructions_.length(); index++) {
735 DeoptInstr* deopt_instr = deopt_instructions_[index];
746void ValidateFrames() {
751 while (
frame !=
nullptr) {
752 frame = frames.NextFrame();
static void info(const char *fmt,...) SK_PRINTF_LIKE(1
FunctionPtr function() const
ArrayPtr deopt_info_array() const
bool is_optimized() const
ObjectPoolPtr GetObjectPool() const
static classid_t OwnerClassIdOf(CodePtr raw)
TypedDataPtr GetDeoptInfoAtPc(uword pc, ICData::DeoptReasonId *deopt_reason, uint32_t *deopt_flags) const
bool Find(uint32_t pc_offset)
bool IsObject(intptr_t bit_index) const
intptr_t SpillSlotBitCount() const
bool UsesGlobalTable() const
static intptr_t NumMaterializations(const GrowableArray< DeoptInstr * > &)
static void Unpack(const Array &table, const TypedData &packed, GrowableArray< DeoptInstr * > *instructions)
static intptr_t FrameSize(const TypedData &packed)
virtual DeoptInstr::Kind kind() const =0
static uword GetRetAddress(DeoptInstr *instr, const ObjectPool &object_pool, Code *code)
virtual void VisitObjectPointers(ObjectPointerVisitor *visitor)
void GetHandlerInfo(intptr_t try_index, ExceptionHandlerInfo *info) const
bool has_async_handler() const
intptr_t num_entries() const
virtual void VisitObjectPointers(ObjectPointerVisitor *visitor)
intptr_t GetDeoptFpOffset() const
InlinedFunctionsIterator(const Code &code, uword pc)
ObjectStore * object_store() const
HandlerInfoCache * handler_info_cache()
VariableIndex index() const
static void static void PrintErr(const char *format,...) PRINTF_ATTRIBUTE(1
virtual void VisitPointers(ObjectPtr *first, ObjectPtr *last)=0
void VisitPointer(ObjectPtr *p)
intptr_t GetClassId() const
TokenPosition TokenPos() const
intptr_t TryIndex() const
static CodePtr Lookup(IsolateGroup *group, uword pc, bool is_return_address)
static const UntaggedCompressedStackMaps::Payload * FindStackMap(IsolateGroup *group, uword pc, bool is_return_address, uword *code_start, const UntaggedCompressedStackMaps::Payload **global_table)
bool HasNextFrame() const
StackFrameIterator(ValidationPolicy validation_policy, Thread *thread, CrossThreadPolicy cross_thread_policy)
@ kAllowCrossThreadIteration
@ kNoCrossThreadIteration
virtual bool IsEntryFrame() const
virtual void VisitObjectPointers(ObjectPointerVisitor *visitor)
virtual bool IsExitFrame() const
const char * ToCString() const
CodePtr LookupDartCode() const
virtual bool IsStubFrame() const
bool IsBareInstructionsStubFrame() const
bool FindExceptionHandler(Thread *thread, uword *handler_pc, bool *needs_stacktrace, bool *is_catch_all, bool *is_optimized) const
TokenPosition GetTokenPos() const
bool IsBareInstructionsDartFrame() const
virtual bool IsDartFrame(bool validate=true) const
FunctionPtr LookupDartFunction() const
uword GetCallerSp() const
static void DumpCurrentTrace()
IsolateGroup * isolate_group() const
virtual bool IsValid() const
static Thread * Current()
uword top_exit_frame_info() const
Isolate * isolate() const
char * PrintToString(const char *format,...) PRINTF_ATTRIBUTE(2
#define MSAN_UNPOISON(ptr, len)
static constexpr int kExitLinkSlotFromEntryFp
static constexpr int kSavedCallerPcSlotFromFp
static constexpr int kLastParamSlotFromEntrySp
static constexpr int kFirstObjectSlotFromFp
static constexpr int kDartFrameFixedSize
static constexpr int kSavedCallerFpSlotFromFp
const FrameLayout default_frame_layout
static constexpr int kParamEndSlotFromFp
FrameLayout runtime_frame_layout
const FrameLayout invalid_frame_layout
static constexpr int kPcMarkerSlotFromFp
static constexpr int kFirstLocalSlotFromFp
constexpr intptr_t kWordSize
static constexpr int kSavedCallerPpSlotFromFp
static constexpr int kLastFixedObjectSlotFromFp
const FrameLayout bare_instructions_frame_layout
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not defaults to or::depending on whether ipv6 is specified vm service A custom Dart VM Service port The default is to pick a randomly available open port disable vm Disable the Dart VM Service The Dart VM Service is never available in release mode disable vm service Disable mDNS Dart VM Service publication Bind to the IPv6 localhost address for the Dart VM Service Ignored if vm service host is set endless trace Enable an endless trace buffer The default is a ring buffer This is useful when very old events need to viewed For during application launch Memory usage will continue to grow indefinitely however Start app with an specific route defined on the framework flutter assets Path to the Flutter assets directory enable service port Allow the VM service to fallback to automatic port selection if binding to a specified port fails trace Trace early application lifecycle Automatically switches to an endless trace buffer trace skia Filters out all Skia trace event categories except those that are specified in this comma separated list dump skp on shader Automatically dump the skp that triggers new shader compilations This is useful for writing custom ShaderWarmUp to reduce jank By this is not enabled to reduce the overhead purge persistent cache
#define REUSABLE_PC_DESCRIPTORS_HANDLESCOPE(thread)
#define REUSABLE_EXCEPTION_HANDLERS_HANDLESCOPE(thread)
#define REUSABLE_CODE_HANDLESCOPE(thread)
uint32_t handler_pc_offset
intptr_t last_fixed_object_from_fp
intptr_t param_end_from_fp
intptr_t first_local_from_fp
intptr_t FrameSlotForVariable(const LocalVariable *variable) const
intptr_t first_object_from_fp
intptr_t FrameSlotForVariableIndex(intptr_t index) const