22 FLAG_check_token_positions && (inline_id_to_function != nullptr)
23 ? inline_id_to_function->At(0)->ptr()
28 encoded_data_(zone, kInitialStreamSize),
38 intptr_t yield_index) {
42 ASSERT((kind == UntaggedPcDescriptors::kRuntimeCall) ||
43 (kind == UntaggedPcDescriptors::kBSSRelocation) ||
44 (kind == UntaggedPcDescriptors::kOther) ||
50 if (!FLAG_precompiled_mode || try_index != -1 ||
52 kind == UntaggedPcDescriptors::kBSSRelocation) {
53 const int32_t kind_and_metadata =
59 prev_pc_offset = pc_offset;
61 if (!FLAG_precompiled_mode) {
62 if (FLAG_check_token_positions && token_pos.
IsReal()) {
66 FATAL(
"Token position %s for PC descriptor %s at offset 0x%" Px
67 " invalid for function %s (%s, %s)",
75 FATAL(
"Token position %s for PC descriptor %s at offset 0x%" Px
76 " invalid for script %s of function %s",
82 const int32_t encoded_pos = token_pos.
Serialize();
86 prev_deopt_id = deopt_id;
87 prev_token_pos = encoded_pos;
94 return Object::empty_descriptors().ptr();
102 intptr_t spill_slot_bit_count) {
104 ASSERT(pc_offset > last_pc_offset_);
105 ASSERT(spill_slot_bit_count >= 0 && spill_slot_bit_count <= bitmap->Length());
106 const uword pc_delta = pc_offset - last_pc_offset_;
107 const uword non_spill_slot_bit_count =
108 bitmap->Length() - spill_slot_bit_count;
111 encoded_bytes_.
WriteLEB128(non_spill_slot_bit_count);
112 bitmap->AppendAsBytesTo(&encoded_bytes_);
113 last_pc_offset_ = pc_offset;
118 return Object::empty_compressed_stackmaps().ptr();
125 uword entry_point)
const {
126 intptr_t num_handlers =
Length();
127 if (num_handlers == 0) {
128 return has_async_handler_ ? Object::empty_async_exception_handlers().ptr()
129 : Object::empty_exception_handlers().ptr();
134 for (intptr_t
i = 0;
i < num_handlers;
i++) {
136 if (list_[
i].handler_types ==
nullptr) {
139 const bool has_catch_all =
false;
141 ASSERT((list_[
i].outer_try_index == -1) &&
144 list_[
i].needs_stacktrace, has_catch_all,
145 list_[
i].is_generated);
150 list_[
i].needs_stacktrace, has_catch_all,
151 list_[
i].is_generated);
155 return handlers.
ptr();
158#if !defined(DART_PRECOMPILED_RUNTIME)
163 : move_(move), entry_state_offset_(index) {}
165 intptr_t
Offset() {
return entry_state_offset_; }
173 for (intptr_t
i = 0;
i < children_.length();
i++) {
174 if (children_[
i]->move_ ==
next)
return children_[
i];
181 const intptr_t entry_state_offset_;
186 : zone_(
Thread::Current()->zone()),
188 current_pc_offset_(0),
189 stream_(zone_, 64) {}
197 current_pc_offset_ = pc_offset;
201 intptr_t suffix_length = 0;
204 for (intptr_t
i = moves_.length() - 1;
i >= 0;
i--) {
206 if (n ==
nullptr)
break;
210 intptr_t
length = moves_.length() - suffix_length;
214 Writer::Write(&stream_, current_pc_offset_);
215 Writer::Write(&stream_,
length);
216 Writer::Write(&stream_, suffix_length);
217 Writer::Write(&stream_,
suffix->Offset());
221 for (intptr_t
i =
length - 1;
i >= 0;
i--) {
222 moves_[
i].WriteTo(&stream_);
234 uint8_t*
dest =
reinterpret_cast<uint8_t*
>(td.
DataAddr(0));
247 const int32_t n =
stream->Read<int32_t>();
250 if (*arg1 > kMaxArgValue) {
253#if defined(DART_PRECOMPILER)
258 const int32_t
m =
stream->Read<int32_t>();
259 if (arg2 !=
nullptr) {
272 ASSERT(arg1 >= kMinArgValue && arg1 <= kMaxArgValue);
278#if defined(DART_PRECOMPILER)
289 TokenPosition::kDartCodePrologue;
293 bool stack_traces_only,
298 buffered_pc_offset_(0),
299 buffered_inline_id_stack_(),
300 buffered_token_pos_stack_(),
301 written_pc_offset_(0),
302 written_inline_id_stack_(),
303 written_token_pos_stack_(),
304 caller_inline_id_(caller_inline_id),
305 inline_id_to_token_pos_(inline_id_to_token_pos),
306 inline_id_to_function_(inline_id_to_function),
311 stack_traces_only_(stack_traces_only) {
312 buffered_inline_id_stack_.
Add(0);
314 written_inline_id_stack_.
Add(0);
318void CodeSourceMapBuilder::FlushBuffer() {
322 intptr_t common_index;
323 for (common_index = buffered_inline_id_stack_.
length() - 1; common_index >= 0;
325 intptr_t buffered_id = buffered_inline_id_stack_[common_index];
326 if (common_index < written_inline_id_stack_.
length()) {
327 intptr_t written_id = written_inline_id_stack_[common_index];
328 if (buffered_id == written_id) {
333 if (common_index < 0) {
337 while (written_inline_id_stack_.
length() > common_index + 1) {
340 for (intptr_t j = common_index + 1; j < buffered_inline_id_stack_.
length();
342 const auto& buffered_pos = buffered_token_pos_stack_[j - 1];
343 const auto& written_pos = written_token_pos_stack_[j - 1];
344 if (buffered_pos != written_pos) {
345 WriteChangePosition(buffered_pos);
347 WritePush(buffered_inline_id_stack_[j]);
351 written_token_pos_stack_.length());
354 intptr_t top = buffered_token_pos_stack_.length() - 1;
355 const auto& buffered_pos = buffered_token_pos_stack_[top];
356 const auto& written_pos = written_token_pos_stack_[top];
357 if (buffered_pos != written_pos) {
358 WriteChangePosition(buffered_pos);
362 if (buffered_pc_offset_ != written_pc_offset_) {
363 WriteAdvancePC(buffered_pc_offset_ - written_pc_offset_);
367void CodeSourceMapBuilder::StartInliningInterval(
369 const InstructionSource&
source) {
370 if (!
source.token_pos.IsReal() && !
source.token_pos.IsSynthetic()) {
376 if (buffered_inline_id_stack_.
Last() ==
source.inlining_id) {
381 if (
source.inlining_id < 0) {
386 if (!stack_traces_only_) {
394 intptr_t common_parent =
source.inlining_id;
395 while (!IsOnBufferedStack(common_parent)) {
396 common_parent = caller_inline_id_[common_parent];
398 while (buffered_inline_id_stack_.
Last() != common_parent) {
403 GrowableArray<intptr_t> to_push;
404 for (intptr_t
id =
source.inlining_id;
id != common_parent;
405 id = caller_inline_id_[
id]) {
408 for (intptr_t
i = to_push.length() - 1;
i >= 0;
i--) {
409 intptr_t callee_id = to_push[
i];
412 BufferChangePosition(inline_id_to_token_pos_[callee_id - 1]);
413 BufferPush(callee_id);
415 if (FLAG_check_token_positions) {
418 script_ = inline_id_to_function_[
source.inlining_id]->script();
424 ASSERT(written_pc_offset_ == 0 && buffered_pc_offset_ == 0);
426 WriteChangePosition(
source.token_pos);
433 StartInliningInterval(pc_offset,
source);
438 if (pc_offset == buffered_pc_offset_) {
441 StartInliningInterval(pc_offset,
source);
442 if (
source.token_pos != buffered_token_pos_stack_.Last()) {
443 if (!stack_traces_only_) {
446 BufferChangePosition(
source.token_pos);
448 BufferAdvancePC(pc_offset - buffered_pc_offset_);
454 const uint8_t kCanThrow =
455 UntaggedPcDescriptors::kIcCall | UntaggedPcDescriptors::kUnoptStaticCall |
456 UntaggedPcDescriptors::kRuntimeCall | UntaggedPcDescriptors::kOther;
457 if ((kind & kCanThrow) != 0) {
458 StartInliningInterval(pc_offset,
source);
459 BufferChangePosition(
source.token_pos);
460 BufferAdvancePC(pc_offset - buffered_pc_offset_);
467 intptr_t name_index) {
468 StartInliningInterval(pc_offset,
source);
469 BufferChangePosition(
source.token_pos);
470 BufferAdvancePC(pc_offset - buffered_pc_offset_);
472 WriteNullCheck(name_index);
475intptr_t CodeSourceMapBuilder::GetFunctionId(intptr_t inline_id) {
477 for (intptr_t
i = 0;
i < inlined_functions_.
Length();
i++) {
484 return inlined_functions_.
Length() - 1;
491 intptr_t
id =
source.inlining_id;
492 while (caller_inline_id_[
id] != 0) {
493 id = caller_inline_id_[
id];
495 return inline_id_to_token_pos_[
id - 1];
499 if (inlined_functions_.
Length() == 0) {
500 return Object::empty_array().ptr();
506 if (!stack_traces_only_) {
520 if (FLAG_check_token_positions &&
pos.IsReal()) {
521 const intptr_t inline_id = buffered_inline_id_stack_.
Last();
522 const auto&
function = *inline_id_to_function_[inline_id];
523 if (
function.end_token_pos().IsReal() &&
526 buffer.Printf(
"Token position %s is invalid for function %s (%s, %s)",
527 pos.ToCString(),
function.ToFullyQualifiedCString(),
529 function.end_token_pos().ToCString());
531 buffer.Printf(
" while compiling function %s",
532 inline_id_to_function_[0]->ToFullyQualifiedCString());
539 buffer.Printf(
"Token position %s is invalid for script %s of function %s",
541 function.ToFullyQualifiedCString());
542 if (inline_id != 0) {
543 buffer.Printf(
" inlined into function %s",
544 inline_id_to_function_[0]->ToFullyQualifiedCString());
549 buffered_token_pos_stack_.Last() =
pos;
552void CodeSourceMapBuilder::WriteChangePosition(
const TokenPosition
pos) {
553 const TokenPosition& last_written = written_token_pos_stack_.Last();
554 intptr_t position_or_line =
556 intptr_t column = TokenPosition::kNoSource.Serialize();
557#if defined(DART_PRECOMPILER)
558 if (FLAG_precompiled_mode) {
562 position_or_line = TokenPosition::kNoSource.Serialize();
563 const intptr_t inline_id = written_inline_id_stack_.
Last();
564 ASSERT(inline_id < inline_id_to_function_.length());
565 script_ = inline_id_to_function_[inline_id]->script();
567 intptr_t old_line = TokenPosition::kNoSource.Serialize();
570 Utils::SubWithWrapAround<int32_t>(position_or_line, old_line);
574 position_or_line, column);
575 written_token_pos_stack_.Last() =
pos;
582 function_stack->
Clear();
583 token_positions->
Clear();
588 int32_t current_pc_offset = 0;
589 function_stack->
Add(&root_);
590 token_positions->
Add(InitialPosition());
592 while (
stream.PendingBytes() > 0) {
603 current_pc_offset += arg;
604 if (current_pc_offset > pc_offset) {
612 token_positions->
Add(InitialPosition());
633void CodeSourceMapReader::PrintJSONInlineIntervals(
JSONObject* jsobj) {
635 JSONArray inlined_functions(jsobj,
"_inlinedFunctions");
637 for (intptr_t
i = 0;
i < functions_.
Length();
i++) {
640 inlined_functions.AddValue(
function);
644 GrowableArray<intptr_t> function_stack;
645 JSONArray inline_intervals(jsobj,
"_inlinedIntervals");
646 NoSafepointScope no_safepoint;
649 int32_t current_pc_offset = 0;
650 function_stack.Add(0);
652 while (
stream.PendingBytes() > 0) {
661 JSONArray inline_interval(&inline_intervals);
662 inline_interval.AddValue(
static_cast<intptr_t
>(current_pc_offset));
663 inline_interval.AddValue(
664 static_cast<intptr_t
>(current_pc_offset + arg - 1));
665 for (intptr_t
i = 0;
i < function_stack.length();
i++) {
666 inline_interval.AddValue(function_stack[
i]);
668 current_pc_offset += arg;
672 function_stack.Add(arg);
677 ASSERT(function_stack.length() > 1);
678 function_stack.RemoveLast();
698 int32_t current_pc_offset = 0;
699 function_stack.
Add(&root_);
700 token_positions.
Add(InitialPosition());
702 THR_Print(
"Inline intervals for function '%s' {\n",
704 while (
stream.PendingBytes() > 0) {
716 start + current_pc_offset + arg - 1);
717 for (intptr_t
i = 0;
i < function_stack.
length();
i++) {
718 THR_Print(
"%s", function_stack[
i]->ToCString());
719 if (token_positions[
i].IsReal()) {
724 current_pc_offset += arg;
730 token_positions.
Add(InitialPosition());
758 int32_t current_pc_offset = 0;
759 function_stack.
Add(&root_);
760 token_positions.
Add(InitialPosition());
762 THR_Print(
"Source positions for function '%s' {\n",
764 while (
stream.PendingBytes() > 0) {
770 token_positions[token_positions.
length() - 1];
771 token_positions[token_positions.
length() - 1] =
778 start + current_pc_offset + arg - 1);
779 for (intptr_t
i = 0;
i < function_stack.
length();
i++) {
780 THR_Print(
"%s@%s", function_stack[
i]->ToCString(),
781 token_positions[
i].ToCString());
784 current_pc_offset += arg;
790 token_positions.
Add(InitialPosition());
803 start + current_pc_offset,
start + current_pc_offset, arg);
817 int32_t current_pc_offset = 0;
819 while (
stream.PendingBytes() > 0) {
827 current_pc_offset += arg;
838 if (current_pc_offset == pc_offset) {
static float next(float f)
#define ASSERT_EQUAL(expected, actual)
#define RELEASE_ASSERT(cond)
ObjectPtr At(intptr_t index) const
static ArrayPtr MakeFixedLength(const GrowableObjectArray &growable_array, bool unique=false)
C::only_if_signed< T, void > WriteSLEB128(T value)
DART_FORCE_INLINE intptr_t bytes_written() const
C::only_if_unsigned< T, void > WriteLEB128(T value)
static constexpr T decode(S value)
static constexpr S encode(T value)
TrieNode * Insert(TrieNode *node)
TrieNode(CatchEntryMove move, intptr_t index)
TrieNode * Follow(CatchEntryMove next)
CatchEntryMovesMapBuilder()
void NewMapping(intptr_t pc_offset)
TypedDataPtr FinalizeCatchEntryMovesMap()
void Append(const CatchEntryMove &move)
TokenPosition RootPosition(const InstructionSource &source)
void EndCodeSourceRange(int32_t pc_offset, const InstructionSource &source)
void WriteFunctionEntrySourcePosition(const InstructionSource &source)
CodeSourceMapBuilder(Zone *zone, bool stack_traces_only, const GrowableArray< intptr_t > &caller_inline_id, const GrowableArray< TokenPosition > &inline_id_to_token_pos, const GrowableArray< const Function * > &inline_id_to_function)
void NoteNullCheck(int32_t pc_offset, const InstructionSource &source, intptr_t name_index)
static const TokenPosition & kInitialPosition
ArrayPtr InliningIdToFunction()
void BeginCodeSourceRange(int32_t pc_offset, const InstructionSource &source)
CodeSourceMapPtr Finalize()
void NoteDescriptor(UntaggedPcDescriptors::Kind kind, int32_t pc_offset, const InstructionSource &source)
intptr_t GetNullCheckNameIndexAt(int32_t pc_offset)
void DumpInlineIntervals(uword start)
void GetInlinedFunctionsAt(int32_t pc_offset, GrowableArray< const Function * > *function_stack, GrowableArray< TokenPosition > *token_positions)
void DumpSourcePositions(uword start)
static CodeSourceMapPtr New(intptr_t length)
CompressedStackMapsPtr Finalize() const
void AddEntry(intptr_t pc_offset, BitmapBuilder *bitmap, intptr_t spill_slot_bit_count)
static CompressedStackMapsPtr NewInlined(const void *payload, intptr_t size)
static constexpr intptr_t kNone
void AddDescriptor(UntaggedPcDescriptors::Kind kind, intptr_t pc_offset, intptr_t deopt_id, TokenPosition token_pos, intptr_t try_index, intptr_t yield_index)
PcDescriptorsPtr FinalizePcDescriptors(uword entry_point)
DescriptorList(Zone *zone, const GrowableArray< const Function * > *inline_id_to_function=nullptr)
ExceptionHandlersPtr FinalizeExceptionHandlers(uword entry_point) const
static bool ContainsCatchAllType(const Array &array)
void SetHandledTypes(intptr_t try_index, const Array &handled_types) const
void set_has_async_handler(bool value) const
void SetHandlerInfo(intptr_t try_index, intptr_t outer_try_index, uword handler_pc_offset, bool needs_stacktrace, bool has_catch_all, bool is_generated) const
static constexpr intptr_t kInvalidPcOffset
static ExceptionHandlersPtr New(intptr_t num_handlers)
const char * ToFullyQualifiedCString() const
TokenPosition token_pos() const
TokenPosition end_token_pos() const
void Add(const Object &value, Heap::Space space=Heap::kNew) const
ObjectPtr At(intptr_t index) const
virtual const char * ToCString() const
static ObjectPtr RawCast(ObjectPtr obj)
static PcDescriptorsPtr New(const void *delta_encoded_data, intptr_t size)
bool IsValidTokenPosition(TokenPosition token_pos) const
bool GetTokenLocation(const TokenPosition &token_pos, intptr_t *line, intptr_t *column=nullptr) const
const char * ToCString() const
int32_t Serialize() const
static TokenPosition Deserialize(int32_t value)
bool IsWithin(const TokenPosition &a, const TokenPosition &b) const
void * DataAddr(intptr_t byte_offset) const
static TypedDataPtr New(intptr_t class_id, intptr_t len, Heap::Space space=Heap::kNew)
static const char * KindToCString(Kind k)
static constexpr intptr_t kInvalidYieldIndex
static T AddWithWrapAround(T a, T b)
static T SubWithWrapAround(T a, T b)
#define THR_Print(format,...)
Dart_NativeFunction function
DART_EXPORT bool IsNull(Dart_Handle object)
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not defaults to or::depending on whether ipv6 is specified vm service A custom Dart VM Service port The default is to pick a randomly available open port disable vm Disable the Dart VM Service The Dart VM Service is never available in release mode disable vm service Disable mDNS Dart VM Service publication Bind to the IPv6 localhost address for the Dart VM Service Ignored if vm service host is set endless trace buffer
AsciiTrie::TrieNode TrieNode
SI auto map(std::index_sequence< I... >, Fn &&fn, const Args &... args) -> skvx::Vec< sizeof...(I), decltype(fn(args[0]...))>
static constexpr uint8_t kAdvancePC
static constexpr uint8_t kNullCheck
static uint8_t Read(ReadStream *stream, int32_t *arg1, int32_t *arg2=nullptr)
static void Write(BaseWriteStream *stream, uint8_t op, int32_t arg1=0, int32_t arg2=0)
static constexpr uint8_t kPopFunction
static constexpr uint8_t kPushFunction
static constexpr uint8_t kChangePosition