Flutter Engine
The Flutter Engine
Loading...
Searching...
No Matches
profiler_service.cc
Go to the documentation of this file.
1// Copyright (c) 2015, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
6
7#include <memory>
8
10#include "vm/growable_array.h"
11#include "vm/hash_map.h"
12#include "vm/heap/safepoint.h"
13#include "vm/json_stream.h"
14#include "vm/log.h"
15#include "vm/native_symbol.h"
16#include "vm/object.h"
17#include "vm/os.h"
18#include "vm/profiler.h"
19#include "vm/reusable_handles.h"
20#include "vm/scope_timer.h"
21#include "vm/service.h"
22#include "vm/service_event.h"
23#include "vm/timeline.h"
24
25#if defined(SUPPORT_PERFETTO) && !defined(PRODUCT)
26#include "perfetto/ext/tracing/core/trace_packet.h"
27#include "perfetto/protozero/scattered_heap_buffer.h"
28#include "vm/perfetto_utils.h"
34#endif // defined(SUPPORT_PERFETTO) && !defined(PRODUCT)
35
36namespace dart {
37
38DECLARE_FLAG(int, max_profile_depth);
39DECLARE_FLAG(int, profile_period);
40DECLARE_FLAG(bool, profile_vm);
41
42#ifndef PRODUCT
43
45 TokenPosition token_pos)
46 : token_pos_(token_pos), exclusive_ticks_(0), inclusive_ticks_(0) {}
47
49 if (exclusive) {
50 exclusive_ticks_++;
51 } else {
52 inclusive_ticks_++;
53 }
54}
55
57 const char* name,
58 const Function& function,
59 const intptr_t table_index)
60 : kind_(kind),
61 name_(name),
62 function_(Function::ZoneHandle(function.ptr())),
63 table_index_(table_index),
64 profile_codes_(0),
65 source_position_ticks_(0),
66 exclusive_ticks_(0),
67 inclusive_ticks_(0),
68 inclusive_serial_(-1) {
69 ASSERT((kind_ != kDartFunction) || !function_.IsNull());
70 ASSERT((kind_ != kDartFunction) || (table_index_ >= 0));
71 ASSERT(profile_codes_.length() == 0);
72}
73
74const char* ProfileFunction::Name() const {
75 if (name_ != nullptr) {
76 return name_;
77 }
78 ASSERT(!function_.IsNull());
79 const String& func_name =
81 return func_name.ToCString();
82}
83
85 if (function_.IsNull()) {
86 return nullptr;
87 }
88 const Script& script = Script::Handle(function_.script());
89 if (script.IsNull()) {
90 return nullptr;
91 }
92 const String& uri = String::Handle(script.resolved_url());
93 if (uri.IsNull()) {
94 return nullptr;
95 }
96 return uri.ToCString();
97}
98
100 if (function_.IsNull()) {
101 // Some synthetic function.
102 return true;
103 }
104 return FLAG_show_invisible_frames || function_.is_visible();
105}
106
107void ProfileFunction::Tick(bool exclusive,
108 intptr_t inclusive_serial,
109 TokenPosition token_position) {
110 if (exclusive) {
111 exclusive_ticks_++;
112 TickSourcePosition(token_position, exclusive);
113 }
114 // Fall through and tick inclusive count too.
115 if (inclusive_serial_ == inclusive_serial) {
116 // Already ticked.
117 return;
118 }
119 inclusive_serial_ = inclusive_serial;
120 inclusive_ticks_++;
121 TickSourcePosition(token_position, false);
122}
123
125 bool exclusive) {
126 intptr_t i = 0;
127 for (; i < source_position_ticks_.length(); i++) {
128 ProfileFunctionSourcePosition& position = source_position_ticks_[i];
129 const intptr_t cmp =
130 TokenPosition::CompareForSorting(position.token_pos(), token_position);
131 if (cmp > 0) {
132 // Found insertion point.
133 break;
134 } else if (cmp == 0) {
135 if (FLAG_trace_profiler_verbose) {
136 OS::PrintErr("Ticking source position %s %s\n",
137 exclusive ? "exclusive" : "inclusive",
138 token_position.ToCString());
139 }
140 // Found existing position, tick it.
141 position.Tick(exclusive);
142 return;
143 }
144 }
145
146 // Add new one, sorted by token position value.
147 ProfileFunctionSourcePosition pfsp(token_position);
148 if (FLAG_trace_profiler_verbose) {
149 OS::PrintErr("Ticking source position %s %s\n",
150 exclusive ? "exclusive" : "inclusive",
151 token_position.ToCString());
152 }
153 pfsp.Tick(exclusive);
154
155 if (i < source_position_ticks_.length()) {
156 source_position_ticks_.InsertAt(i, pfsp);
157 } else {
158 source_position_ticks_.Add(pfsp);
159 }
160}
161
163 switch (kind) {
164 case kDartFunction:
165 return "Dart";
166 case kNativeFunction:
167 return "Native";
168 case kTagFunction:
169 return "Tag";
170 case kStubFunction:
171 return "Stub";
172 case kUnknownFunction:
173 return "Collected";
174 default:
176 return "";
177 }
178}
179
180void ProfileFunction::PrintToJSONObject(JSONObject* func) {
181 func->AddProperty("type", "NativeFunction");
182 func->AddProperty("name", name());
183 func->AddProperty("_kind", KindToCString(kind()));
184}
185
187 bool print_only_ids) {
188 if (print_only_ids) {
189 JSONObject obj(functions);
190 if (kind() == kDartFunction) {
191 ASSERT(!function_.IsNull());
192 obj.AddProperty("type", "@Object");
193 function_.AddFunctionServiceId(obj);
194 } else {
195 PrintToJSONObject(&obj);
196 }
197 return;
198 }
199 JSONObject obj(functions);
200 obj.AddProperty("type", "ProfileFunction");
201 obj.AddProperty("kind", KindToCString(kind()));
202 obj.AddProperty("inclusiveTicks", inclusive_ticks());
203 obj.AddProperty("exclusiveTicks", exclusive_ticks());
204 obj.AddProperty("resolvedUrl", ResolvedScriptUrl());
205 if (kind() == kDartFunction) {
206 ASSERT(!function_.IsNull());
207 obj.AddProperty("function", function_);
208 } else {
209 JSONObject func(&obj, "function");
210 PrintToJSONObject(&func);
211 }
212 {
213 JSONArray codes(&obj, "_codes");
214 for (intptr_t i = 0; i < profile_codes_.length(); i++) {
215 intptr_t code_index = profile_codes_[i];
216 codes.AddValue(code_index);
217 }
218 }
219}
220
221void ProfileFunction::AddProfileCode(intptr_t code_table_index) {
222 for (intptr_t i = 0; i < profile_codes_.length(); i++) {
223 if (profile_codes_[i] == code_table_index) {
224 return;
225 }
226 }
227 profile_codes_.Add(code_table_index);
228}
229
231 if (pfsp == nullptr) {
232 return false;
233 }
234 if (source_position_ticks_.length() != 1) {
235 return false;
236 }
237 *pfsp = source_position_ticks_[0];
238 return true;
239}
240
242 : pc_(pc), exclusive_ticks_(0), inclusive_ticks_(0) {}
243
244void ProfileCodeAddress::Tick(bool exclusive) {
245 if (exclusive) {
246 exclusive_ticks_++;
247 } else {
248 inclusive_ticks_++;
249 }
250}
251
253 uword start,
254 uword end,
255 int64_t timestamp,
256 const AbstractCode code)
257 : kind_(kind),
258 start_(start),
259 end_(end),
260 exclusive_ticks_(0),
261 inclusive_ticks_(0),
262 inclusive_serial_(-1),
263 code_(code),
264 name_(nullptr),
265 compile_timestamp_(0),
266 function_(nullptr),
267 code_table_index_(-1),
268 address_ticks_(0) {
269 ASSERT(start_ < end_);
270}
271
273 if (start > start_) {
274 start_ = start;
275 }
276 ASSERT(start_ < end_);
277}
278
280 if (end < end_) {
281 end_ = end;
282 }
283 ASSERT(start_ < end_);
284}
285
287 if (start < start_) {
288 start_ = start;
289 }
290 ASSERT(start_ < end_);
291}
292
294 if (end > end_) {
295 end_ = end;
296 }
297 ASSERT(start_ < end_);
298}
299
300bool ProfileCode::Overlaps(const ProfileCode* other) const {
301 ASSERT(other != nullptr);
302 return other->Contains(start_) || other->Contains(end_ - 1) ||
303 Contains(other->start()) || Contains(other->end() - 1);
304}
305
307 return !code_.IsNull() && code_.is_optimized();
308}
309
310void ProfileCode::SetName(const char* name) {
311 if (name == nullptr) {
312 name_ = nullptr;
313 }
314 intptr_t len = strlen(name) + 1;
315 name_ = Thread::Current()->zone()->Alloc<char>(len);
316 strncpy(name_, name, len);
317}
318
320 const intptr_t kBuffSize = 512;
321 char buff[kBuffSize];
322 Utils::SNPrint(&buff[0], kBuffSize - 1, "%s [%" Px ", %" Px ")", prefix,
323 start(), end());
324 SetName(buff);
325}
326
327void ProfileCode::Tick(uword pc, bool exclusive, intptr_t serial) {
328 // If exclusive is set, tick it.
329 if (exclusive) {
330 exclusive_ticks_++;
331 TickAddress(pc, true);
332 }
333 // Fall through and tick inclusive count too.
334 if (inclusive_serial_ == serial) {
335 // Already gave inclusive tick for this sample.
336 return;
337 }
338 inclusive_serial_ = serial;
339 inclusive_ticks_++;
340 TickAddress(pc, false);
341}
342
343void ProfileCode::TickAddress(uword pc, bool exclusive) {
344 const intptr_t length = address_ticks_.length();
345
346 intptr_t i = 0;
347 for (; i < length; i++) {
348 ProfileCodeAddress& entry = address_ticks_[i];
349 if (entry.pc() == pc) {
350 // Tick the address entry.
351 entry.Tick(exclusive);
352 return;
353 }
354 if (entry.pc() > pc) {
355 break;
356 }
357 }
358
359 // New address, add entry.
360 ProfileCodeAddress entry(pc);
361
362 entry.Tick(exclusive);
363
364 if (i < length) {
365 // Insert at i.
366 address_ticks_.InsertAt(i, entry);
367 } else {
368 // Add to end.
369 address_ticks_.Add(entry);
370 }
371}
372
373void ProfileCode::PrintNativeCode(JSONObject* profile_code_obj) {
374 ASSERT(kind() == kNativeCode);
375 JSONObject obj(profile_code_obj, "code");
376 obj.AddProperty("type", "@Code");
377 obj.AddProperty("kind", "Native");
378 obj.AddProperty("name", name());
379 obj.AddProperty("_optimized", false);
380 obj.AddPropertyF("start", "%" Px "", start());
381 obj.AddPropertyF("end", "%" Px "", end());
382 {
383 // Generate a fake function entry.
384 JSONObject func(&obj, "function");
385 ASSERT(function_ != nullptr);
386 function_->PrintToJSONObject(&func);
387 }
388}
389
390void ProfileCode::PrintCollectedCode(JSONObject* profile_code_obj) {
392 JSONObject obj(profile_code_obj, "code");
393 obj.AddProperty("type", "@Code");
394 obj.AddProperty("kind", "Collected");
395 obj.AddProperty("name", name());
396 obj.AddProperty("_optimized", false);
397 obj.AddPropertyF("start", "%" Px "", start());
398 obj.AddPropertyF("end", "%" Px "", end());
399 {
400 // Generate a fake function entry.
401 JSONObject func(&obj, "function");
402 ASSERT(function_ != nullptr);
403 function_->PrintToJSONObject(&func);
404 }
405}
406
407void ProfileCode::PrintOverwrittenCode(JSONObject* profile_code_obj) {
408 ASSERT(kind() == kReusedCode);
409 JSONObject obj(profile_code_obj, "code");
410 obj.AddProperty("type", "@Code");
411 obj.AddProperty("kind", "Collected");
412 obj.AddProperty("name", name());
413 obj.AddProperty("_optimized", false);
414 obj.AddPropertyF("start", "%" Px "", start());
415 obj.AddPropertyF("end", "%" Px "", end());
416 {
417 // Generate a fake function entry.
418 JSONObject func(&obj, "function");
419 ASSERT(function_ != nullptr);
420 function_->PrintToJSONObject(&func);
421 }
422}
423
424void ProfileCode::PrintTagCode(JSONObject* profile_code_obj) {
425 ASSERT(kind() == kTagCode);
426 JSONObject obj(profile_code_obj, "code");
427 obj.AddProperty("type", "@Code");
428 obj.AddProperty("kind", "Tag");
429 obj.AddProperty("name", name());
430 obj.AddPropertyF("start", "%" Px "", start());
431 obj.AddPropertyF("end", "%" Px "", end());
432 obj.AddProperty("_optimized", false);
433 {
434 // Generate a fake function entry.
435 JSONObject func(&obj, "function");
436 ASSERT(function_ != nullptr);
437 function_->PrintToJSONObject(&func);
438 }
439}
440
442 switch (kind) {
443 case kDartCode:
444 return "Dart";
445 case kCollectedCode:
446 return "Collected";
447 case kNativeCode:
448 return "Native";
449 case kReusedCode:
450 return "Overwritten";
451 case kTagCode:
452 return "Tag";
453 }
454 UNREACHABLE();
455 return nullptr;
456}
457
459 JSONObject obj(codes);
461 obj.AddProperty("inclusiveTicks", inclusive_ticks());
462 obj.AddProperty("exclusiveTicks", exclusive_ticks());
463 if (kind() == kDartCode) {
464 ASSERT(!code_.IsNull());
465 obj.AddProperty("code", *code_.handle());
466 } else if (kind() == kCollectedCode) {
467 PrintCollectedCode(&obj);
468 } else if (kind() == kReusedCode) {
469 PrintOverwrittenCode(&obj);
470 } else if (kind() == kTagCode) {
471 PrintTagCode(&obj);
472 } else {
473 ASSERT(kind() == kNativeCode);
474 PrintNativeCode(&obj);
475 }
476 {
477 JSONArray ticks(&obj, "ticks");
478 for (intptr_t i = 0; i < address_ticks_.length(); i++) {
479 const ProfileCodeAddress& entry = address_ticks_[i];
480 ticks.AddValueF("%" Px "", entry.pc());
481 ticks.AddValue(entry.exclusive_ticks());
482 ticks.AddValue(entry.inclusive_ticks());
483 }
484 }
485}
486
488 public:
490 : null_function_(Function::ZoneHandle()),
491 unknown_function_(nullptr),
492 table_(8) {
493 unknown_function_ =
494 Add(ProfileFunction::kUnknownFunction, "<unknown Dart function>");
495 }
496
498 ASSERT(!function.IsNull());
499 ProfileFunction* profile_function = Lookup(function);
500 if (profile_function != nullptr) {
501 return profile_function;
502 }
503 return Add(function);
504 }
505
507 ASSERT(!function.IsNull());
508 return function_hash_.LookupValue(&function);
509 }
510
512 ASSERT(unknown_function_ != nullptr);
513 return unknown_function_;
514 }
515
516 // No protection against being called more than once for the same tag_id.
517 ProfileFunction* AddTag(uword tag_id, const char* name) {
518 // TODO(johnmccutchan): Canonicalize ProfileFunctions for tags.
520 }
521
522 // No protection against being called more than once for the same native
523 // address.
524 ProfileFunction* AddNative(uword start_address, const char* name) {
525 // TODO(johnmccutchan): Canonicalize ProfileFunctions for natives.
527 }
528
529 // No protection against being called more tha once for the same stub.
530 ProfileFunction* AddStub(uword start_address, const char* name) {
532 }
533
534 intptr_t length() const { return table_.length(); }
535
536 ProfileFunction* At(intptr_t i) const {
537 ASSERT(i >= 0);
538 ASSERT(i < length());
539 return table_[i];
540 }
541
542 private:
543 ProfileFunction* Add(ProfileFunction::Kind kind, const char* name) {
545 ASSERT(name != nullptr);
546 ProfileFunction* profile_function =
547 new ProfileFunction(kind, name, null_function_, table_.length());
548 table_.Add(profile_function);
549 return profile_function;
550 }
551
552 ProfileFunction* Add(const Function& function) {
553 ASSERT(Lookup(function) == nullptr);
554 ProfileFunction* profile_function = new ProfileFunction(
555 ProfileFunction::kDartFunction, nullptr, function, table_.length());
556 table_.Add(profile_function);
557 function_hash_.Insert(profile_function);
558 return profile_function;
559 }
560
561 // Needed for DirectChainedHashMap.
562 struct ProfileFunctionTableTrait {
563 typedef ProfileFunction* Value;
564 typedef const Function* Key;
565 typedef ProfileFunction* Pair;
566
567 static Key KeyOf(Pair kv) { return kv->function(); }
568
569 static Value ValueOf(Pair kv) { return kv; }
570
571 static inline uword Hash(Key key) { return key->Hash(); }
572
573 static inline bool IsKeyEqual(Pair kv, Key key) {
574 return kv->function()->ptr() == key->ptr();
575 }
576 };
577
578 const Function& null_function_;
579 ProfileFunction* unknown_function_;
580 ZoneGrowableArray<ProfileFunction*> table_;
581 DirectChainedHashMap<ProfileFunctionTableTrait> function_hash_;
582};
583
584ProfileFunction* ProfileCode::SetFunctionAndName(ProfileFunctionTable* table) {
585 ASSERT(function_ == nullptr);
586
587 ProfileFunction* function = nullptr;
588 if ((kind() == kReusedCode) || (kind() == kCollectedCode)) {
589 if (name() == nullptr) {
590 // Lazily set generated name.
591 GenerateAndSetSymbolName("[Collected]");
592 }
593 // Map these to a canonical unknown function.
594 function = table->GetUnknown();
595 } else if (kind() == kDartCode) {
596 ASSERT(!code_.IsNull());
597 const char* name = code_.QualifiedName();
598 const Object& obj = Object::Handle(code_.owner());
599 if (obj.IsFunction()) {
600 function = table->LookupOrAdd(Function::Cast(obj));
601 } else {
602 // A stub.
603 function = table->AddStub(start(), name);
604 }
605 SetName(name);
606 } else if (kind() == kNativeCode) {
607 if (name() == nullptr) {
608 // Lazily set generated name.
609 const intptr_t kBuffSize = 512;
610 char buff[kBuffSize];
611 uword dso_base;
612 char* dso_name;
614 &dso_name)) {
615 uword dso_offset = start() - dso_base;
616 Utils::SNPrint(&buff[0], kBuffSize - 1, "[Native] %s+0x%" Px, dso_name,
617 dso_offset);
619 } else {
620 Utils::SNPrint(&buff[0], kBuffSize - 1, "[Native] %" Px, start());
621 }
622 SetName(buff);
623 }
624 function = table->AddNative(start(), name());
625 } else if (kind() == kTagCode) {
626 if (name() == nullptr) {
627 if (UserTags::IsUserTag(start())) {
628 const char* tag_name = UserTags::TagName(start());
629 ASSERT(tag_name != nullptr);
630 SetName(tag_name);
633 const char* tag_name = VMTag::TagName(start());
634 ASSERT(tag_name != nullptr);
635 SetName(tag_name);
636 } else {
637 switch (start()) {
639 SetName("Root");
640 break;
642 SetName("[Truncated]");
643 break;
645 SetName("[No Code]");
646 break;
648 SetName("[Optimized Code]");
649 break;
651 SetName("[Unoptimized Code]");
652 break;
654 SetName("[Native Code]");
655 break;
657 SetName("[Inline Start]");
658 break;
660 SetName("[Inline End]");
661 break;
662 default:
664 break;
665 }
666 }
667 }
668 function = table->AddTag(start(), name());
669 } else {
670 UNREACHABLE();
671 }
672 ASSERT(function != nullptr);
673
674 function->AddProfileCode(code_table_index());
675
676 function_ = function;
677 return function_;
678}
679
681 intptr_t length = table_.length();
682 if (length == 0) {
683 return -1; // Not found.
684 }
685 intptr_t lo = 0;
686 intptr_t hi = length - 1;
687 while (lo <= hi) {
688 intptr_t mid = (hi - lo + 1) / 2 + lo;
689 ASSERT(mid >= lo);
690 ASSERT(mid <= hi);
691 ProfileCode* code = At(mid);
692 if (code->Contains(pc)) {
693 return mid;
694 } else if (pc < code->start()) {
695 hi = mid - 1;
696 } else {
697 lo = mid + 1;
698 }
699 }
700 return -1;
701}
702
704 const intptr_t length = table_.length();
705 if (length == 0) {
706 table_.Add(new_code);
707 return length;
708 }
709
710 // Determine the correct place to insert or merge |new_code| into table.
711 intptr_t lo = -1;
712 intptr_t hi = -1;
713 ProfileCode* lo_code = nullptr;
714 ProfileCode* hi_code = nullptr;
715 const uword pc = new_code->end() - 1;
716 FindNeighbors(pc, &lo, &hi, &lo_code, &hi_code);
717 ASSERT((lo_code != nullptr) || (hi_code != nullptr));
718
719 if (lo != -1) {
720 // Has left neighbor.
721 new_code->TruncateLower(lo_code->end());
722 ASSERT(!new_code->Overlaps(lo_code));
723 }
724 if (hi != -1) {
725 // Has right neighbor.
726 new_code->TruncateUpper(hi_code->start());
727 ASSERT(!new_code->Overlaps(hi_code));
728 }
729
730 if ((lo != -1) && (lo_code->kind() == ProfileCode::kNativeCode) &&
731 (new_code->kind() == ProfileCode::kNativeCode) &&
732 (lo_code->end() == new_code->start())) {
733 // Adjacent left neighbor of the same kind: merge.
734 // (dladdr doesn't give us symbol size so processing more samples may see
735 // more PCs we didn't previously know belonged to it.)
736 lo_code->ExpandUpper(new_code->end());
737 return lo;
738 }
739
740 if ((hi != -1) && (hi_code->kind() == ProfileCode::kNativeCode) &&
741 (new_code->kind() == ProfileCode::kNativeCode) &&
742 (new_code->end() == hi_code->start())) {
743 // Adjacent right neighbor of the same kind: merge.
744 // (dladdr doesn't give us symbol size so processing more samples may see
745 // more PCs we didn't previously know belonged to it.)
746 hi_code->ExpandLower(new_code->start());
747 return hi;
748 }
749
750 intptr_t insert;
751 if (lo == -1) {
752 insert = 0;
753 } else if (hi == -1) {
754 insert = length;
755 } else {
756 insert = lo + 1;
757 }
758 table_.InsertAt(insert, new_code);
759 return insert;
760}
761
762void ProfileCodeTable::FindNeighbors(uword pc,
763 intptr_t* lo,
764 intptr_t* hi,
765 ProfileCode** lo_code,
766 ProfileCode** hi_code) const {
767 ASSERT(table_.length() >= 1);
768
769 intptr_t length = table_.length();
770
771 if (pc < At(0)->start()) {
772 // Lower than any existing code.
773 *lo = -1;
774 *lo_code = nullptr;
775 *hi = 0;
776 *hi_code = At(*hi);
777 return;
778 }
779
780 if (pc >= At(length - 1)->end()) {
781 // Higher than any existing code.
782 *lo = length - 1;
783 *lo_code = At(*lo);
784 *hi = -1;
785 *hi_code = nullptr;
786 return;
787 }
788
789 *lo = 0;
790 *lo_code = At(*lo);
791 *hi = length - 1;
792 *hi_code = At(*hi);
793
794 while ((*hi - *lo) > 1) {
795 intptr_t mid = (*hi - *lo + 1) / 2 + *lo;
796 ASSERT(*lo <= mid);
797 ASSERT(*hi >= mid);
798 ProfileCode* code = At(mid);
799 if (code->end() <= pc) {
800 *lo = mid;
801 *lo_code = code;
802 }
803 if (pc < code->end()) {
804 *hi = mid;
805 *hi_code = code;
806 }
807 }
808}
809
810void ProfileCodeTable::VerifyOrder() {
811 const intptr_t length = table_.length();
812 if (length == 0) {
813 return;
814 }
815 uword last = table_[0]->end();
816 for (intptr_t i = 1; i < length; i++) {
817 ProfileCode* a = table_[i];
818 ASSERT(last <= a->start());
819 last = a->end();
820 }
821}
822
823void ProfileCodeTable::VerifyOverlap() {
824 const intptr_t length = table_.length();
825 for (intptr_t i = 0; i < length; i++) {
826 ProfileCode* a = table_[i];
827 for (intptr_t j = i + 1; j < length; j++) {
828 ProfileCode* b = table_[j];
829 ASSERT(!a->Contains(b->start()) && !a->Contains(b->end() - 1) &&
830 !b->Contains(a->start()) && !b->Contains(a->end() - 1));
831 }
832 }
833}
834
836 uword pc,
837 const Code& code,
838 ProcessedSample* sample,
839 intptr_t frame_index,
840 // Outputs:
841 GrowableArray<const Function*>** inlined_functions,
842 GrowableArray<TokenPosition>** inlined_token_positions,
843 TokenPosition* token_position) {
844 const intptr_t offset = OffsetForPC(pc, code, sample, frame_index);
845 if (FindInCache(pc, offset, inlined_functions, inlined_token_positions,
846 token_position)) {
847 // Found in cache.
848 return;
849 }
850 Add(pc, code, sample, frame_index, inlined_functions, inlined_token_positions,
851 token_position);
852}
853
854bool ProfileCodeInlinedFunctionsCache::FindInCache(
855 uword pc,
856 intptr_t offset,
857 GrowableArray<const Function*>** inlined_functions,
858 GrowableArray<TokenPosition>** inlined_token_positions,
859 TokenPosition* token_position) {
860 // Simple linear scan.
861 for (intptr_t i = 0; i < kCacheSize; i++) {
862 intptr_t index = (last_hit_ + i) % kCacheSize;
863 if ((cache_[index].pc == pc) && (cache_[index].offset == offset)) {
864 // Hit.
865 if (cache_[index].inlined_functions.length() == 0) {
866 *inlined_functions = nullptr;
867 *inlined_token_positions = nullptr;
868 } else {
869 *inlined_functions = &cache_[index].inlined_functions;
870 *inlined_token_positions = &cache_[index].inlined_token_positions;
871 }
872 *token_position = cache_[index].token_position;
873 cache_hit_++;
874 last_hit_ = index;
875 return true;
876 }
877 }
878 cache_miss_++;
879 return false;
880}
881
882// Add to cache and fill in outputs.
883void ProfileCodeInlinedFunctionsCache::Add(
884 uword pc,
885 const Code& code,
886 ProcessedSample* sample,
887 intptr_t frame_index,
888 // Outputs:
889 GrowableArray<const Function*>** inlined_functions,
890 GrowableArray<TokenPosition>** inlined_token_positions,
891 TokenPosition* token_position) {
892 const intptr_t offset = OffsetForPC(pc, code, sample, frame_index);
893 CacheEntry* cache_entry = &cache_[NextFreeIndex()];
894 cache_entry->Reset();
895 cache_entry->pc = pc;
896 cache_entry->offset = offset;
897 code.GetInlinedFunctionsAtInstruction(
898 offset, &(cache_entry->inlined_functions),
899 &(cache_entry->inlined_token_positions));
900 if (cache_entry->inlined_functions.length() == 0) {
901 *inlined_functions = nullptr;
902 *inlined_token_positions = nullptr;
903 *token_position = cache_entry->token_position = TokenPosition::kNoSource;
904 return;
905 }
906
907 // Write outputs.
908 *inlined_functions = &(cache_entry->inlined_functions);
909 *inlined_token_positions = &(cache_entry->inlined_token_positions);
910 *token_position = cache_entry->token_position =
911 cache_entry->inlined_token_positions[0];
912}
913
914intptr_t ProfileCodeInlinedFunctionsCache::OffsetForPC(uword pc,
915 const Code& code,
916 ProcessedSample* sample,
917 intptr_t frame_index) {
918 intptr_t offset = pc - code.PayloadStart();
919 if (frame_index != 0) {
920 // The PC of frames below the top frame is a call's return address,
921 // which can belong to a different inlining interval than the call.
922 offset--;
923 } else if (sample->IsAllocationSample()) {
924 // Allocation samples skip the top frame, so the top frame's pc is
925 // also a call's return address.
926 offset--;
927 } else if (!sample->first_frame_executing()) {
928 // If the first frame wasn't executing code (i.e. we started to collect
929 // the stack trace at an exit frame), the top frame's pc is also a
930 // call's return address.
931 offset--;
932 }
933 return offset;
934}
935
937 public:
947
949 Isolate* isolate,
950 SampleFilter* filter,
951 SampleBlockBuffer* sample_buffer,
952 Profile* profile)
953 : thread_(thread),
954 isolate_(isolate),
955 vm_isolate_(Dart::vm_isolate()),
956 filter_(filter),
957 sample_buffer_(sample_buffer),
958 profile_(profile),
959 null_code_(Code::null()),
960 null_function_(Function::ZoneHandle()),
961 inclusive_tree_(false),
962 inlined_functions_cache_(new ProfileCodeInlinedFunctionsCache()),
963 samples_(nullptr),
964 info_kind_(kNone) {
965 ASSERT(profile_ != nullptr);
966 }
967
968 void Build() {
969 ScopeTimer sw("ProfileBuilder::Build", FLAG_trace_profiler);
970 if (!FilterSamples()) {
971 return;
972 }
973 Setup();
974 BuildCodeTable();
975 FinalizeCodeIndexes();
976 BuildFunctionTable();
977 PopulateFunctionTicks();
978 SanitizeMinMaxTimes();
979 }
980
981 private:
982 // Returns true if |frame_index| in |sample| is using CPU.
983 static bool IsExecutingFrame(ProcessedSample* sample, intptr_t frame_index) {
984 return (frame_index == 0) &&
985 (sample->first_frame_executing() || sample->IsAllocationSample());
986 }
987
988 void Setup() {
989 profile_->live_code_ = new ProfileCodeTable();
990 profile_->dead_code_ = new ProfileCodeTable();
991 profile_->tag_code_ = new ProfileCodeTable();
992 profile_->functions_ = new ProfileFunctionTable();
993 // Register some synthetic tags.
994 RegisterProfileCodeTag(VMTag::kRootTagId);
995 RegisterProfileCodeTag(VMTag::kTruncatedTagId);
996 RegisterProfileCodeTag(VMTag::kNoneCodeTagId);
997 RegisterProfileCodeTag(VMTag::kOptimizedCodeTagId);
998 RegisterProfileCodeTag(VMTag::kUnoptimizedCodeTagId);
999 RegisterProfileCodeTag(VMTag::kNativeCodeTagId);
1000 RegisterProfileCodeTag(VMTag::kInlineStartCodeTagId);
1001 RegisterProfileCodeTag(VMTag::kInlineEndCodeTagId);
1002 }
1003
1004 bool FilterSamples() {
1005 ScopeTimer sw("ProfileBuilder::FilterSamples", FLAG_trace_profiler);
1006 if (sample_buffer_ == nullptr) {
1007 return false;
1008 }
1009 samples_ = sample_buffer_->BuildProcessedSampleBuffer(isolate_, filter_);
1010 profile_->samples_ = samples_;
1011 profile_->sample_count_ = samples_->length();
1012 return true;
1013 }
1014
1015 void UpdateMinMaxTimes(int64_t timestamp) {
1016 profile_->min_time_ =
1017 timestamp < profile_->min_time_ ? timestamp : profile_->min_time_;
1018 profile_->max_time_ =
1019 timestamp > profile_->max_time_ ? timestamp : profile_->max_time_;
1020 }
1021
1022 void SanitizeMinMaxTimes() {
1023 if ((profile_->min_time_ == kMaxInt64) && (profile_->max_time_ == 0)) {
1024 profile_->min_time_ = 0;
1025 profile_->max_time_ = 0;
1026 }
1027 }
1028
1029 void BuildCodeTable() {
1030 ScopeTimer sw("ProfileBuilder::BuildCodeTable", FLAG_trace_profiler);
1031
1032 // Build the live code table eagerly by populating it with code objects
1033 // from the processed sample buffer.
1034 const CodeLookupTable& code_lookup_table = samples_->code_lookup_table();
1035 for (intptr_t i = 0; i < code_lookup_table.length(); i++) {
1036 const CodeDescriptor* descriptor = code_lookup_table.At(i);
1037 ASSERT(descriptor != nullptr);
1038 const AbstractCode code = descriptor->code();
1039 RegisterLiveProfileCode(new ProfileCode(
1040 ProfileCode::kDartCode, code.PayloadStart(),
1041 code.PayloadStart() + code.Size(), code.compile_timestamp(), code));
1042 thread_->CheckForSafepoint();
1043 }
1044
1045 // Iterate over samples.
1046 for (intptr_t sample_index = 0; sample_index < samples_->length();
1047 sample_index++) {
1048 ProcessedSample* sample = samples_->At(sample_index);
1049 const int64_t timestamp = sample->timestamp();
1050
1051 // This is our first pass over the sample buffer, use this as an
1052 // opportunity to determine the min and max time ranges of this profile.
1053 UpdateMinMaxTimes(timestamp);
1054
1055 // Make sure VM tag exists.
1056 if (VMTag::IsNativeEntryTag(sample->vm_tag())) {
1057 RegisterProfileCodeTag(VMTag::kNativeTagId);
1058 } else if (VMTag::IsRuntimeEntryTag(sample->vm_tag())) {
1059 RegisterProfileCodeTag(VMTag::kRuntimeTagId);
1060 }
1061 RegisterProfileCodeTag(sample->vm_tag());
1062 // Make sure user tag exists.
1063 RegisterProfileCodeTag(sample->user_tag());
1064
1065 // Make sure that a ProfileCode objects exist for all pcs in the sample
1066 // and tick each one.
1067 for (intptr_t frame_index = 0; frame_index < sample->length();
1068 frame_index++) {
1069 const uword pc = sample->At(frame_index);
1070 ASSERT(pc != 0);
1071 ProfileCode* code = FindOrRegisterProfileCode(pc, timestamp);
1072 ASSERT(code != nullptr);
1073 code->Tick(pc, IsExecutingFrame(sample, frame_index), sample_index);
1074 }
1075
1076 TickExitFrame(sample->vm_tag(), sample_index, sample);
1077 thread_->CheckForSafepoint();
1078 }
1079 }
1080
1081 void FinalizeCodeIndexes() {
1082 ScopeTimer sw("ProfileBuilder::FinalizeCodeIndexes", FLAG_trace_profiler);
1083 ProfileCodeTable* live_table = profile_->live_code_;
1084 ProfileCodeTable* dead_table = profile_->dead_code_;
1085 ProfileCodeTable* tag_table = profile_->tag_code_;
1086 const intptr_t dead_code_index_offset = live_table->length();
1087 const intptr_t tag_code_index_offset =
1088 dead_table->length() + dead_code_index_offset;
1089
1090 profile_->dead_code_index_offset_ = dead_code_index_offset;
1091 profile_->tag_code_index_offset_ = tag_code_index_offset;
1092
1093 for (intptr_t i = 0; i < live_table->length(); i++) {
1094 const intptr_t index = i;
1095 ProfileCode* code = live_table->At(i);
1096 ASSERT(code != nullptr);
1097 code->set_code_table_index(index);
1098 }
1099
1100 for (intptr_t i = 0; i < dead_table->length(); i++) {
1101 const intptr_t index = dead_code_index_offset + i;
1102 ProfileCode* code = dead_table->At(i);
1103 ASSERT(code != nullptr);
1104 code->set_code_table_index(index);
1105 }
1106
1107 for (intptr_t i = 0; i < tag_table->length(); i++) {
1108 const intptr_t index = tag_code_index_offset + i;
1109 ProfileCode* code = tag_table->At(i);
1110 ASSERT(code != nullptr);
1111 code->set_code_table_index(index);
1112 }
1113 }
1114
1115 void BuildFunctionTable() {
1116 ScopeTimer sw("ProfileBuilder::BuildFunctionTable", FLAG_trace_profiler);
1117 ProfileCodeTable* live_table = profile_->live_code_;
1118 ProfileCodeTable* dead_table = profile_->dead_code_;
1119 ProfileCodeTable* tag_table = profile_->tag_code_;
1120 ProfileFunctionTable* function_table = profile_->functions_;
1121 for (intptr_t i = 0; i < live_table->length(); i++) {
1122 ProfileCode* code = live_table->At(i);
1123 ASSERT(code != nullptr);
1124 code->SetFunctionAndName(function_table);
1125 thread_->CheckForSafepoint();
1126 }
1127
1128 for (intptr_t i = 0; i < dead_table->length(); i++) {
1129 ProfileCode* code = dead_table->At(i);
1130 ASSERT(code != nullptr);
1131 code->SetFunctionAndName(function_table);
1132 thread_->CheckForSafepoint();
1133 }
1134
1135 for (intptr_t i = 0; i < tag_table->length(); i++) {
1136 ProfileCode* code = tag_table->At(i);
1137 ASSERT(code != nullptr);
1138 code->SetFunctionAndName(function_table);
1139 thread_->CheckForSafepoint();
1140 }
1141 }
1142
1143 void PopulateFunctionTicks() {
1144 ScopeTimer sw("ProfileBuilder::PopulateFunctionTicks", FLAG_trace_profiler);
1145 for (intptr_t sample_index = 0; sample_index < samples_->length();
1146 sample_index++) {
1147 ProcessedSample* sample = samples_->At(sample_index);
1148
1149 // Walk the sampled PCs.
1150 for (intptr_t frame_index = 0; frame_index < sample->length();
1151 frame_index++) {
1152 ASSERT(sample->At(frame_index) != 0);
1153 ProcessFrame(sample_index, sample, frame_index);
1154 }
1155 if (sample->truncated()) {
1156 InclusiveTickTruncatedTag(sample);
1157 }
1158 }
1159 }
1160
1161 void ProcessFrame(intptr_t sample_index,
1162 ProcessedSample* sample,
1163 intptr_t frame_index) {
1164 const uword pc = sample->At(frame_index);
1165 ProfileCode* profile_code = GetProfileCode(pc, sample->timestamp());
1166 ProfileFunction* function = profile_code->function();
1167 ASSERT(function != nullptr);
1168 const intptr_t code_index = profile_code->code_table_index();
1169 ASSERT(profile_code != nullptr);
1170
1171 GrowableArray<const Function*>* inlined_functions = nullptr;
1172 GrowableArray<TokenPosition>* inlined_token_positions = nullptr;
1173 TokenPosition token_position = TokenPosition::kNoSource;
1174 Code& code = Code::ZoneHandle();
1175 if (profile_code->code().IsCode()) {
1176 code ^= profile_code->code().ptr();
1177 inlined_functions_cache_->Get(pc, code, sample, frame_index,
1178 &inlined_functions,
1179 &inlined_token_positions, &token_position);
1180 if (FLAG_trace_profiler_verbose && (inlined_functions != nullptr)) {
1181 for (intptr_t i = 0; i < inlined_functions->length(); i++) {
1182 const String& name =
1183 String::Handle((*inlined_functions)[i]->QualifiedScrubbedName());
1184 THR_Print("InlinedFunction[%" Pd "] = {%s, %s}\n", i,
1185 name.ToCString(),
1186 (*inlined_token_positions)[i].ToCString());
1187 }
1188 }
1189 }
1190
1191 if (code.IsNull() || (inlined_functions == nullptr) ||
1192 (inlined_functions->length() <= 1)) {
1193 ProcessFunction(sample_index, sample, frame_index, function,
1194 token_position, code_index);
1195 return;
1196 }
1197
1198 if (!code.is_optimized()) {
1199 OS::PrintErr("Code that should be optimized is not. Please file a bug\n");
1200 OS::PrintErr("Code object: %s\n", code.ToCString());
1201 OS::PrintErr("Inlined functions length: %" Pd "\n",
1202 inlined_functions->length());
1203 for (intptr_t i = 0; i < inlined_functions->length(); i++) {
1204 OS::PrintErr("IF[%" Pd "] = %s\n", i,
1205 (*inlined_functions)[i]->ToFullyQualifiedCString());
1206 }
1207 }
1208
1209 ASSERT(code.is_optimized());
1210
1211 // Append the inlined children.
1212 for (intptr_t i = inlined_functions->length() - 1; i >= 0; i--) {
1213 const Function* inlined_function = (*inlined_functions)[i];
1214 ASSERT(inlined_function != nullptr);
1215 ASSERT(!inlined_function->IsNull());
1216 TokenPosition inlined_token_position = (*inlined_token_positions)[i];
1217 ProcessInlinedFunction(sample_index, sample, frame_index + i,
1218 inlined_function, inlined_token_position,
1219 code_index);
1220 }
1221 }
1222
1223 void ProcessInlinedFunction(intptr_t sample_index,
1224 ProcessedSample* sample,
1225 intptr_t frame_index,
1226 const Function* inlined_function,
1227 TokenPosition inlined_token_position,
1228 intptr_t code_index) {
1229 ProfileFunctionTable* function_table = profile_->functions_;
1230 ProfileFunction* function = function_table->LookupOrAdd(*inlined_function);
1231 ASSERT(function != nullptr);
1232 ProcessFunction(sample_index, sample, frame_index, function,
1233 inlined_token_position, code_index);
1234 }
1235
1236 bool ShouldTickNode(ProcessedSample* sample, intptr_t frame_index) {
1237 if (frame_index != 0) {
1238 return true;
1239 }
1240 // Only tick the first frame's node, if we are executing
1241 return IsExecutingFrame(sample, frame_index) || !FLAG_profile_vm;
1242 }
1243
1244 void ProcessFunction(intptr_t sample_index,
1245 ProcessedSample* sample,
1246 intptr_t frame_index,
1247 ProfileFunction* function,
1248 TokenPosition token_position,
1249 intptr_t code_index) {
1250 if (!function->is_visible()) {
1251 return;
1252 }
1253 if (FLAG_trace_profiler_verbose) {
1254 THR_Print("S[%" Pd "]F[%" Pd "] %s %s 0x%" Px "\n", sample_index,
1255 frame_index, function->Name(), token_position.ToCString(),
1256 sample->At(frame_index));
1257 }
1258 function->Tick(IsExecutingFrame(sample, frame_index), sample_index,
1259 token_position);
1260 function->AddProfileCode(code_index);
1261 }
1262
1263 // Tick the truncated tag's inclusive tick count.
1264 void InclusiveTickTruncatedTag(ProcessedSample* sample) {
1265 ProfileCodeTable* tag_table = profile_->tag_code_;
1266 intptr_t index = tag_table->FindCodeIndexForPC(VMTag::kTruncatedTagId);
1267 ASSERT(index >= 0);
1268 ProfileCode* code = tag_table->At(index);
1269 code->IncInclusiveTicks();
1270 ASSERT(code != nullptr);
1271 ProfileFunction* function = code->function();
1272 function->IncInclusiveTicks();
1273 }
1274
1275 uword ProfileInfoKindToVMTag(ProfileInfoKind kind) {
1276 switch (kind) {
1277 case kNone:
1278 return VMTag::kNoneCodeTagId;
1279 case kOptimized:
1281 case kUnoptimized:
1283 case kNative:
1285 case kInlineStart:
1287 case kInlineFinish:
1289 default:
1290 UNIMPLEMENTED();
1291 return VMTag::kInvalidTagId;
1292 }
1293 }
1294
1295 void TickExitFrame(uword vm_tag, intptr_t serial, ProcessedSample* sample) {
1296 if (FLAG_profile_vm) {
1297 return;
1298 }
1299 if (!VMTag::IsExitFrameTag(vm_tag)) {
1300 return;
1301 }
1302 ProfileCodeTable* tag_table = profile_->tag_code_;
1303 ProfileCode* code = tag_table->FindCodeForPC(vm_tag);
1304 ASSERT(code != nullptr);
1305 code->Tick(vm_tag, true, serial);
1306 }
1307
1308 void TickExitFrameFunction(uword vm_tag, intptr_t serial) {
1309 if (FLAG_profile_vm) {
1310 return;
1311 }
1312 if (!VMTag::IsExitFrameTag(vm_tag)) {
1313 return;
1314 }
1315 ProfileCodeTable* tag_table = profile_->tag_code_;
1316 ProfileCode* code = tag_table->FindCodeForPC(vm_tag);
1317 ASSERT(code != nullptr);
1318 ProfileFunction* function = code->function();
1319 ASSERT(function != nullptr);
1320 function->Tick(true, serial, TokenPosition::kNoSource);
1321 }
1322
1323 intptr_t GetProfileCodeTagIndex(uword tag) {
1324 ProfileCodeTable* tag_table = profile_->tag_code_;
1325 intptr_t index = tag_table->FindCodeIndexForPC(tag);
1326 ASSERT(index >= 0);
1327 ProfileCode* code = tag_table->At(index);
1328 ASSERT(code != nullptr);
1329 return code->code_table_index();
1330 }
1331
1332 intptr_t GetProfileFunctionTagIndex(uword tag) {
1333 ProfileCodeTable* tag_table = profile_->tag_code_;
1334 intptr_t index = tag_table->FindCodeIndexForPC(tag);
1335 ASSERT(index >= 0);
1336 ProfileCode* code = tag_table->At(index);
1337 ASSERT(code != nullptr);
1338 ProfileFunction* function = code->function();
1339 ASSERT(function != nullptr);
1340 return function->table_index();
1341 }
1342
1343 intptr_t GetProfileCodeIndex(uword pc, int64_t timestamp) {
1344 return GetProfileCode(pc, timestamp)->code_table_index();
1345 }
1346
1347 ProfileCode* GetProfileCode(uword pc, int64_t timestamp) {
1348 return profile_->GetCodeFromPC(pc, timestamp);
1349 }
1350
1351 void RegisterProfileCodeTag(uword tag) {
1352 if (tag == 0) {
1353 // No tag.
1354 return;
1355 }
1356 ProfileCodeTable* tag_table = profile_->tag_code_;
1357 intptr_t index = tag_table->FindCodeIndexForPC(tag);
1358 if (index >= 0) {
1359 // Already created.
1360 return;
1361 }
1362 ProfileCode* code =
1363 new ProfileCode(ProfileCode::kTagCode, tag, tag + 1, 0, null_code_);
1364 index = tag_table->InsertCode(code);
1365 ASSERT(index >= 0);
1366 }
1367
1368 ProfileCode* CreateProfileCodeReused(uword pc) {
1369 ProfileCode* code =
1370 new ProfileCode(ProfileCode::kReusedCode, pc, pc + 1, 0, null_code_);
1371 return code;
1372 }
1373
1374 bool IsPCInDartHeap(uword pc) {
1375 return vm_isolate_->group()->heap()->CodeContains(pc) ||
1376 thread_->isolate()->group()->heap()->CodeContains(pc);
1377 }
1378
1379 ProfileCode* FindOrRegisterNativeProfileCode(uword pc) {
1380 // Check if |pc| is already known in the live code table.
1381 ProfileCodeTable* live_table = profile_->live_code_;
1382 ProfileCode* profile_code = live_table->FindCodeForPC(pc);
1383 if (profile_code != nullptr) {
1384 return profile_code;
1385 }
1386
1387 // We haven't seen this pc yet.
1388
1389 // Check NativeSymbolResolver for pc.
1390 uword native_start = 0;
1391 char* native_name =
1392 NativeSymbolResolver::LookupSymbolName(pc, &native_start);
1393 if (native_name == nullptr) {
1394 // Failed to find a native symbol for pc.
1395 native_start = pc;
1396 }
1397
1398#if defined(HOST_ARCH_ARM)
1399 // The symbol for a Thumb function will be xxx1, but we may have samples
1400 // at function entry which will have pc xxx0.
1401 native_start &= ~1;
1402#endif
1403
1404 if (native_start > pc) {
1405 // Bogus lookup result.
1406 if (native_name != nullptr) {
1408 native_name = nullptr;
1409 }
1410 native_start = pc;
1411 }
1412 if ((pc - native_start) > (32 * KB)) {
1413 // Suspect lookup result. More likely dladdr going off the rails than a
1414 // jumbo function.
1415 if (native_name != nullptr) {
1417 native_name = nullptr;
1418 }
1419 native_start = pc;
1420 }
1421
1422 ASSERT(pc >= native_start);
1423 ASSERT(pc < (pc + 1)); // Should not overflow.
1424 profile_code = new ProfileCode(ProfileCode::kNativeCode, native_start,
1425 pc + 1, 0, null_code_);
1426 if (native_name != nullptr) {
1427 profile_code->SetName(native_name);
1429 }
1430
1431 RegisterLiveProfileCode(profile_code);
1432 return profile_code;
1433 }
1434
1435 void RegisterLiveProfileCode(ProfileCode* code) {
1436 ProfileCodeTable* live_table = profile_->live_code_;
1437 intptr_t index = live_table->InsertCode(code);
1438 ASSERT(index >= 0);
1439 }
1440
1441 ProfileCode* FindOrRegisterDeadProfileCode(uword pc) {
1442 ProfileCodeTable* dead_table = profile_->dead_code_;
1443
1444 ProfileCode* code = dead_table->FindCodeForPC(pc);
1445 if (code != nullptr) {
1446 return code;
1447 }
1448
1449 // Create a new dead code entry.
1450 intptr_t index = dead_table->InsertCode(CreateProfileCodeReused(pc));
1451 ASSERT(index >= 0);
1452 return dead_table->At(index);
1453 }
1454
1455 ProfileCode* FindOrRegisterProfileCode(uword pc, int64_t timestamp) {
1456 ProfileCodeTable* live_table = profile_->live_code_;
1457 ProfileCode* code = live_table->FindCodeForPC(pc);
1458 if ((code != nullptr) && (code->compile_timestamp() <= timestamp)) {
1459 // Code was compiled before sample was taken.
1460 return code;
1461 }
1462 if ((code == nullptr) && !IsPCInDartHeap(pc)) {
1463 // Not a PC from Dart code. Check with native code.
1464 return FindOrRegisterNativeProfileCode(pc);
1465 }
1466 // We either didn't find the code or it was compiled after the sample.
1467 return FindOrRegisterDeadProfileCode(pc);
1468 }
1469
1470 Thread* thread_;
1471 Isolate* isolate_;
1472 Isolate* vm_isolate_;
1473 SampleFilter* filter_;
1474 SampleBlockBuffer* sample_buffer_;
1475 Profile* profile_;
1476 const AbstractCode null_code_;
1477 const Function& null_function_;
1478 bool inclusive_tree_;
1479 ProfileCodeInlinedFunctionsCache* inlined_functions_cache_;
1480 ProcessedSampleBuffer* samples_;
1481 ProfileInfoKind info_kind_;
1482}; // ProfileBuilder.
1483
1485 : zone_(Thread::Current()->zone()),
1486 samples_(nullptr),
1487 live_code_(nullptr),
1488 dead_code_(nullptr),
1489 tag_code_(nullptr),
1490 functions_(nullptr),
1491 dead_code_index_offset_(-1),
1492 tag_code_index_offset_(-1),
1493 min_time_(kMaxInt64),
1494 max_time_(0),
1495 sample_count_(0) {}
1496
1498 Isolate* isolate,
1499 SampleFilter* filter,
1500 SampleBlockBuffer* sample_buffer) {
1501 ASSERT(isolate != nullptr);
1502
1503 // Disable thread interrupts while processing the buffer.
1504 DisableThreadInterruptsScope dtis(thread);
1505 ProfileBuilder builder(thread, isolate, filter, sample_buffer, this);
1506 builder.Build();
1507}
1508
1510 ASSERT(index >= 0);
1511 ASSERT(index < sample_count_);
1512 return samples_->At(index);
1513}
1514
1515intptr_t Profile::NumFunctions() const {
1516 return functions_->length();
1517}
1518
1520 ASSERT(functions_ != nullptr);
1521 return functions_->At(index);
1522}
1523
1525 ASSERT(live_code_ != nullptr);
1526 ASSERT(dead_code_ != nullptr);
1527 ASSERT(tag_code_ != nullptr);
1528 ASSERT(dead_code_index_offset_ >= 0);
1529 ASSERT(tag_code_index_offset_ >= 0);
1530
1531 // Code indexes span three arrays.
1532 // 0 ... |live_code|
1533 // |live_code| ... |dead_code|
1534 // |dead_code| ... |tag_code|
1535
1536 if (index < dead_code_index_offset_) {
1537 return live_code_->At(index);
1538 }
1539
1540 if (index < tag_code_index_offset_) {
1541 index -= dead_code_index_offset_;
1542 return dead_code_->At(index);
1543 }
1544
1545 index -= tag_code_index_offset_;
1546 return tag_code_->At(index);
1547}
1548
1550 intptr_t index = live_code_->FindCodeIndexForPC(pc);
1551 ProfileCode* code = nullptr;
1552 if (index < 0) {
1553 index = dead_code_->FindCodeIndexForPC(pc);
1554 ASSERT(index >= 0);
1555 code = dead_code_->At(index);
1556 } else {
1557 code = live_code_->At(index);
1558 ASSERT(code != nullptr);
1559 if (code->compile_timestamp() > timestamp) {
1560 // Code is newer than sample. Fall back to dead code table.
1561 index = dead_code_->FindCodeIndexForPC(pc);
1562 ASSERT(index >= 0);
1563 code = dead_code_->At(index);
1564 }
1565 }
1566
1567 ASSERT(code != nullptr);
1568 ASSERT(code->Contains(pc));
1569 ASSERT(code->compile_timestamp() <= timestamp);
1570 return code;
1571}
1572
1573void Profile::PrintHeaderJSON(JSONObject* obj) {
1574 intptr_t pid = OS::ProcessId();
1575
1576 obj->AddProperty("samplePeriod", static_cast<intptr_t>(FLAG_profile_period));
1577 obj->AddProperty("maxStackDepth",
1578 static_cast<intptr_t>(FLAG_max_profile_depth));
1579 obj->AddProperty("sampleCount", sample_count());
1580 obj->AddPropertyTimeMicros("timeOriginMicros", min_time());
1581 obj->AddPropertyTimeMicros("timeExtentMicros", GetTimeSpan());
1582 obj->AddProperty64("pid", pid);
1584 {
1585 JSONObject counts(obj, "_counters");
1586 counts.AddProperty64("bail_out_unknown_task",
1587 counters.bail_out_unknown_task);
1588 counts.AddProperty64("bail_out_jump_to_exception_handler",
1589 counters.bail_out_jump_to_exception_handler);
1590 counts.AddProperty64("bail_out_check_isolate",
1591 counters.bail_out_check_isolate);
1592 counts.AddProperty64("single_frame_sample_deoptimizing",
1593 counters.single_frame_sample_deoptimizing);
1594 counts.AddProperty64(
1595 "single_frame_sample_get_and_validate_stack_bounds",
1596 counters.single_frame_sample_get_and_validate_stack_bounds);
1597 counts.AddProperty64("stack_walker_native", counters.stack_walker_native);
1598 counts.AddProperty64("stack_walker_dart_exit",
1599 counters.stack_walker_dart_exit);
1600 counts.AddProperty64("stack_walker_dart", counters.stack_walker_dart);
1601 counts.AddProperty64("stack_walker_none", counters.stack_walker_none);
1602 }
1603}
1604
1605void Profile::ProcessSampleFrameJSON(JSONArray* stack,
1606 ProfileCodeInlinedFunctionsCache* cache,
1607 ProcessedSample* sample,
1608 intptr_t frame_index) {
1609 const uword pc = sample->At(frame_index);
1610 ProfileCode* profile_code = GetCodeFromPC(pc, sample->timestamp());
1611 ASSERT(profile_code != nullptr);
1612 ProfileFunction* function = profile_code->function();
1613 ASSERT(function != nullptr);
1614
1615 // Don't show stubs in stack traces.
1616 if (!function->is_visible() ||
1618 return;
1619 }
1620
1621 GrowableArray<const Function*>* inlined_functions = nullptr;
1622 GrowableArray<TokenPosition>* inlined_token_positions = nullptr;
1623 TokenPosition token_position = TokenPosition::kNoSource;
1624 Code& code = Code::ZoneHandle();
1625
1626 if (profile_code->code().IsCode()) {
1627 code ^= profile_code->code().ptr();
1628 cache->Get(pc, code, sample, frame_index, &inlined_functions,
1629 &inlined_token_positions, &token_position);
1630 if (FLAG_trace_profiler_verbose && (inlined_functions != nullptr)) {
1631 for (intptr_t i = 0; i < inlined_functions->length(); i++) {
1632 const String& name =
1633 String::Handle((*inlined_functions)[i]->QualifiedScrubbedName());
1634 THR_Print("InlinedFunction[%" Pd "] = {%s, %s}\n", i, name.ToCString(),
1635 (*inlined_token_positions)[i].ToCString());
1636 }
1637 }
1638 }
1639
1640 if (code.IsNull() || (inlined_functions == nullptr) ||
1641 (inlined_functions->length() <= 1)) {
1642 PrintFunctionFrameIndexJSON(stack, function);
1643 return;
1644 }
1645
1646 if (!code.is_optimized()) {
1647 OS::PrintErr("Code that should be optimized is not. Please file a bug\n");
1648 OS::PrintErr("Code object: %s\n", code.ToCString());
1649 OS::PrintErr("Inlined functions length: %" Pd "\n",
1650 inlined_functions->length());
1651 for (intptr_t i = 0; i < inlined_functions->length(); i++) {
1652 OS::PrintErr("IF[%" Pd "] = %s\n", i,
1653 (*inlined_functions)[i]->ToFullyQualifiedCString());
1654 }
1655 }
1656
1657 ASSERT(code.is_optimized());
1658
1659 for (intptr_t i = inlined_functions->length() - 1; i >= 0; i--) {
1660 const Function* inlined_function = (*inlined_functions)[i];
1661 ASSERT(inlined_function != nullptr);
1662 ASSERT(!inlined_function->IsNull());
1663 ProcessInlinedFunctionFrameJSON(stack, inlined_function);
1664 }
1665}
1666
1667#if defined(SUPPORT_PERFETTO) && !defined(PRODUCT)
1668void Profile::ProcessSampleFramePerfetto(
1670 ProfileCodeInlinedFunctionsCache* cache,
1671 ProcessedSample* sample,
1672 intptr_t frame_index) {
1673 const uword pc = sample->At(frame_index);
1674 ProfileCode* profile_code = GetCodeFromPC(pc, sample->timestamp());
1675 ASSERT(profile_code != nullptr);
1676 ProfileFunction* function = profile_code->function();
1677 ASSERT(function != nullptr);
1678
1679 // Don't show stubs in stack traces.
1680 if (!function->is_visible() ||
1682 return;
1683 }
1684
1685 GrowableArray<const Function*>* inlined_functions = nullptr;
1686 GrowableArray<TokenPosition>* inlined_token_positions = nullptr;
1687 TokenPosition token_position = TokenPosition::kNoSource;
1688 Code& code = Code::ZoneHandle();
1689
1690 if (profile_code->code().IsCode()) {
1691 code ^= profile_code->code().ptr();
1692 cache->Get(pc, code, sample, frame_index, &inlined_functions,
1693 &inlined_token_positions, &token_position);
1694 if (FLAG_trace_profiler_verbose && (inlined_functions != NULL)) {
1695 for (intptr_t i = 0; i < inlined_functions->length(); i++) {
1696 const String& name =
1697 String::Handle((*inlined_functions)[i]->QualifiedScrubbedName());
1698 THR_Print("InlinedFunction[%" Pd "] = {%s, %s}\n", i, name.ToCString(),
1699 (*inlined_token_positions)[i].ToCString());
1700 }
1701 }
1702 }
1703
1704 if (code.IsNull() || (inlined_functions == nullptr) ||
1705 (inlined_functions->length() <= 1)) {
1706 // This is the ID of a |Frame| that was added to the interned data table in
1707 // |ProfilerService::PrintProfilePerfetto|. See the comments in that method
1708 // for more details.
1709 callstack->add_frame_ids(function->table_index() + 1);
1710 return;
1711 }
1712
1713 if (!code.is_optimized()) {
1714 OS::PrintErr("Code that should be optimized is not. Please file a bug\n");
1715 OS::PrintErr("Code object: %s\n", code.ToCString());
1716 OS::PrintErr("Inlined functions length: %" Pd "\n",
1717 inlined_functions->length());
1718 for (intptr_t i = 0; i < inlined_functions->length(); i++) {
1719 OS::PrintErr("IF[%" Pd "] = %s\n", i,
1720 (*inlined_functions)[i]->ToFullyQualifiedCString());
1721 }
1722 }
1723
1724 ASSERT(code.is_optimized());
1725
1726 for (intptr_t i = 0; i < inlined_functions->length(); ++i) {
1727 const Function* inlined_function = (*inlined_functions)[i];
1728 ASSERT(inlined_function != NULL);
1729 ASSERT(!inlined_function->IsNull());
1730 ProfileFunction* profile_function =
1731 functions_->LookupOrAdd(*inlined_function);
1732 ASSERT(profile_function != NULL);
1733 // This is the ID of a |Frame| that was added to the interned data table in
1734 // |ProfilerService::PrintProfilePerfetto|. See the comments in that method
1735 // for more details.
1736 callstack->add_frame_ids(profile_function->table_index() + 1);
1737 }
1738}
1739#endif // defined(SUPPORT_PERFETTO) && !defined(PRODUCT)
1740
1741void Profile::ProcessInlinedFunctionFrameJSON(
1742 JSONArray* stack,
1743 const Function* inlined_function) {
1744 ProfileFunction* function = functions_->LookupOrAdd(*inlined_function);
1745 ASSERT(function != nullptr);
1746 PrintFunctionFrameIndexJSON(stack, function);
1747}
1748
1749void Profile::PrintFunctionFrameIndexJSON(JSONArray* stack,
1750 ProfileFunction* function) {
1751 stack->AddValue64(function->table_index());
1752}
1753
1754void Profile::PrintCodeFrameIndexJSON(JSONArray* stack,
1755 ProcessedSample* sample,
1756 intptr_t frame_index) {
1757 ProfileCode* code =
1758 GetCodeFromPC(sample->At(frame_index), sample->timestamp());
1759 const AbstractCode codeObj = code->code();
1760
1761 // Ignore stub code objects.
1762 if (codeObj.IsStubCode() || codeObj.IsAllocationStubCode() ||
1763 codeObj.IsTypeTestStubCode()) {
1764 return;
1765 }
1766 stack->AddValue64(code->code_table_index());
1767}
1768
1769void Profile::PrintSamplesJSON(JSONObject* obj, bool code_samples) {
1770 JSONArray samples(obj, "samples");
1771 // Note that |cache| is zone-allocated, so it does not need to be deallocated
1772 // manually.
1773 auto* cache = new ProfileCodeInlinedFunctionsCache();
1774 for (intptr_t sample_index = 0; sample_index < samples_->length();
1775 sample_index++) {
1776 JSONObject sample_obj(&samples);
1777 ProcessedSample* sample = samples_->At(sample_index);
1778 sample_obj.AddProperty64("tid", OSThread::ThreadIdToIntPtr(sample->tid()));
1779 sample_obj.AddPropertyTimeMicros("timestamp", sample->timestamp());
1780 sample_obj.AddProperty("vmTag", VMTag::TagName(sample->vm_tag()));
1781 if (VMTag::IsNativeEntryTag(sample->vm_tag())) {
1782 sample_obj.AddProperty("nativeEntryTag", true);
1783 }
1784 if (VMTag::IsRuntimeEntryTag(sample->vm_tag())) {
1785 sample_obj.AddProperty("runtimeEntryTag", true);
1786 }
1787 if (UserTags::IsUserTag(sample->user_tag())) {
1788 sample_obj.AddProperty("userTag", UserTags::TagName(sample->user_tag()));
1789 }
1790 if (sample->truncated()) {
1791 sample_obj.AddProperty("truncated", true);
1792 }
1793 {
1794 JSONArray stack(&sample_obj, "stack");
1795 // Walk the sampled PCs.
1796 for (intptr_t frame_index = 0; frame_index < sample->length();
1797 frame_index++) {
1798 ASSERT(sample->At(frame_index) != 0);
1799 ProcessSampleFrameJSON(&stack, cache, sample, frame_index);
1800 }
1801 }
1802 if (code_samples) {
1803 JSONArray stack(&sample_obj, "_codeStack");
1804 for (intptr_t frame_index = 0; frame_index < sample->length();
1805 frame_index++) {
1806 ASSERT(sample->At(frame_index) != 0);
1807 PrintCodeFrameIndexJSON(&stack, sample, frame_index);
1808 }
1809 }
1810 if (sample->IsAllocationSample()) {
1811 sample_obj.AddProperty64("classId", sample->allocation_cid());
1812 sample_obj.AddProperty64("identityHashCode",
1813 sample->allocation_identity_hash());
1814 }
1815 }
1816}
1817
1818#if defined(SUPPORT_PERFETTO) && !defined(PRODUCT)
1819void Profile::PrintSamplesPerfetto(
1820 JSONBase64String* jsonBase64String,
1821 protozero::HeapBuffered<perfetto::protos::pbzero::TracePacket>*
1822 packet_ptr) {
1823 ASSERT(jsonBase64String != nullptr);
1824 ASSERT(packet_ptr != nullptr);
1825 auto& packet = *packet_ptr;
1826
1827 // Note that |cache| is zone-allocated, so it does not need to be deallocated
1828 // manually.
1829 auto* cache = new ProfileCodeInlinedFunctionsCache();
1830 for (intptr_t sample_index = 0; sample_index < samples_->length();
1831 ++sample_index) {
1832 ProcessedSample* sample = samples_->At(sample_index);
1833
1834 perfetto_utils::SetTrustedPacketSequenceId(packet.get());
1835 // We set this flag to indicate that this packet reads from the interned
1836 // data table.
1837 packet->set_sequence_flags(
1840 perfetto_utils::SetTimestampAndMonotonicClockId(packet.get(),
1841 sample->timestamp());
1842
1843 const intptr_t callstack_iid = sample_index + 1;
1844 // Add a |Callstack| to the interned data table that represents the stack
1845 // trace stored in |sample|.
1847 packet->set_interned_data()->add_callstacks();
1848 callstack->set_iid(callstack_iid);
1849 // Walk the sampled PCs.
1850 for (intptr_t frame_index = sample->length() - 1; frame_index >= 0;
1851 --frame_index) {
1852 ASSERT(sample->At(frame_index) != 0);
1853 ProcessSampleFramePerfetto(callstack, cache, sample, frame_index);
1854 }
1855
1856 // Populate |packet| with a |PerfSample| that is linked to the |Callstack|
1857 // that we populated above.
1859 *packet->set_perf_sample();
1860 perf_sample.set_pid(OS::ProcessId());
1861 perf_sample.set_tid(OSThread::ThreadIdToIntPtr(sample->tid()));
1862 perf_sample.set_callstack_iid(callstack_iid);
1863
1864 perfetto_utils::AppendPacketToJSONBase64String(jsonBase64String, &packet);
1865 packet.Reset();
1866 }
1867}
1868#endif // defined(SUPPORT_PERFETTO) && !defined(PRODUCT)
1869
1871 return (functions_ != nullptr) ? functions_->Lookup(function) : nullptr;
1872}
1873
1874void Profile::PrintProfileJSON(JSONStream* stream, bool include_code_samples) {
1875 JSONObject obj(stream);
1876 PrintProfileJSON(&obj, include_code_samples);
1877}
1878
1880 bool include_code_samples,
1881 bool is_event) {
1882 ScopeTimer sw("Profile::PrintProfileJSON", FLAG_trace_profiler);
1883 Thread* thread = Thread::Current();
1884 if (is_event) {
1885 obj->AddProperty("type", "CpuSamplesEvent");
1886 } else {
1887 obj->AddProperty("type", "CpuSamples");
1888 }
1889 PrintHeaderJSON(obj);
1890 if (include_code_samples) {
1891 JSONArray codes(obj, "_codes");
1892 for (intptr_t i = 0; i < live_code_->length(); i++) {
1893 ProfileCode* code = live_code_->At(i);
1894 ASSERT(code != nullptr);
1895 code->PrintToJSONArray(&codes);
1896 thread->CheckForSafepoint();
1897 }
1898 for (intptr_t i = 0; i < dead_code_->length(); i++) {
1899 ProfileCode* code = dead_code_->At(i);
1900 ASSERT(code != nullptr);
1901 code->PrintToJSONArray(&codes);
1902 thread->CheckForSafepoint();
1903 }
1904 for (intptr_t i = 0; i < tag_code_->length(); i++) {
1905 ProfileCode* code = tag_code_->At(i);
1906 ASSERT(code != nullptr);
1907 code->PrintToJSONArray(&codes);
1908 thread->CheckForSafepoint();
1909 }
1910 }
1911
1912 {
1913 JSONArray functions(obj, "functions");
1914 for (intptr_t i = 0; i < functions_->length(); i++) {
1915 ProfileFunction* function = functions_->At(i);
1916 ASSERT(function != nullptr);
1917 function->PrintToJSONArray(&functions, is_event);
1918 thread->CheckForSafepoint();
1919 }
1920 }
1921 PrintSamplesJSON(obj, include_code_samples);
1922 thread->CheckForSafepoint();
1923}
1924
1925#if defined(SUPPORT_PERFETTO) && !defined(PRODUCT)
1926void Profile::PrintProfilePerfetto(JSONStream* js) {
1927 ScopeTimer sw("Profile::PrintProfilePerfetto", FLAG_trace_profiler);
1928 Thread* thread = Thread::Current();
1929
1930 JSONObject jsobj_topLevel(js);
1931 jsobj_topLevel.AddProperty("type", "PerfettoCpuSamples");
1932 PrintHeaderJSON(&jsobj_topLevel);
1933
1934 js->AppendSerializedObject("\"samples\":");
1935 JSONBase64String jsonBase64String(js);
1936
1937 // We allocate one heap-buffered packet and continuously follow a cycle of
1938 // resetting the buffer and writing its contents.
1939 protozero::HeapBuffered<perfetto::protos::pbzero::TracePacket> packet;
1940
1941 perfetto_utils::PopulateClockSnapshotPacket(packet.get());
1942 perfetto_utils::AppendPacketToJSONBase64String(&jsonBase64String, &packet);
1943 packet.Reset();
1944
1945 perfetto_utils::SetTrustedPacketSequenceId(packet.get());
1946 // We use |PerfSample|s to serialize our CPU sample information. Each
1947 // |PerfSample| must be linked to a |Callstack| in the interned data table.
1948 // When serializing a new profile, we set |SEQ_INCREMENTAL_STATE_CLEARED| on
1949 // the first packet to clear the interned data table and avoid conflicts with
1950 // any profiles that are combined with this one.
1951 // See "runtime/vm/protos/perfetto/trace/interned_data/interned_data.proto"
1952 // a detailed description of how the interned data table works.
1953 packet->set_sequence_flags(
1955 SEQ_INCREMENTAL_STATE_CLEARED);
1956
1958 *packet->set_interned_data();
1959
1960 // The Perfetto trace viewer will not be able to parse our trace if the
1961 // mapping with iid 0 is not declared.
1962 perfetto::protos::pbzero::Mapping& mapping = *interned_data.add_mappings();
1963 mapping.set_iid(0);
1964
1965 for (intptr_t i = 0; i < functions_->length(); ++i) {
1966 ProfileFunction* function = functions_->At(i);
1967 ASSERT(function != NULL);
1968 const intptr_t common_iid = function->table_index() + 1;
1969
1971 *interned_data.add_function_names();
1972 function_name.set_iid(common_iid);
1973 function_name.set_str(function->Name());
1974
1975 const char* resolved_script_url = function->ResolvedScriptUrl();
1976 if (resolved_script_url != nullptr) {
1978 *interned_data.add_mapping_paths();
1979 mapping_path.set_iid(common_iid);
1980 const Script& script_handle =
1981 Script::Handle(function->function()->script());
1982 TokenPosition token_pos = function->function()->token_pos();
1983 if (!script_handle.IsNull() && token_pos.IsReal()) {
1984 intptr_t line = -1;
1985 intptr_t column = -1;
1986 script_handle.GetTokenLocation(token_pos, &line, &column);
1987 intptr_t path_with_location_buffer_size =
1988 Utils::SNPrint(nullptr, 0, "%s:%" Pd ":%" Pd, resolved_script_url,
1989 line, column) +
1990 1;
1991 std::unique_ptr<char[]> path_with_location =
1992 std::make_unique<char[]>(path_with_location_buffer_size);
1993 Utils::SNPrint(path_with_location.get(), path_with_location_buffer_size,
1994 "%s:%" Pd ":%" Pd, resolved_script_url, line, column);
1995 mapping_path.set_str(path_with_location.get());
1996 } else {
1997 mapping_path.set_str(resolved_script_url);
1998 }
1999
2000 // TODO(derekx): Check if using profiled_frame_symbols instead of mapping
2001 // provides any benefit.
2003 *interned_data.add_mappings();
2004 mapping.set_iid(common_iid);
2005 mapping.add_path_string_ids(common_iid);
2006 }
2007
2008 // Add a |Frame| to the interned data table that is linked to |function|'s
2009 // name and source location (through the interned data table). A Perfetto
2010 // |Callstack| consists of a stack of |Frame|s, so the |Callstack|s
2011 // populated by |PrintSamplesPerfetto| will refer to these |Frame|s.
2013 frame.set_iid(common_iid);
2014 frame.set_function_name_id(common_iid);
2015 frame.set_mapping_id(resolved_script_url == nullptr ? 0 : common_iid);
2016
2017 thread->CheckForSafepoint();
2018 }
2019 perfetto_utils::AppendPacketToJSONBase64String(&jsonBase64String, &packet);
2020 packet.Reset();
2021
2022 PrintSamplesPerfetto(&jsonBase64String, &packet);
2023 thread->CheckForSafepoint();
2024}
2025#endif // defined(SUPPORT_PERFETTO) && !defined(PRODUCT)
2026
2027void ProfilerService::PrintCommonImpl(PrintFormat format,
2028 Thread* thread,
2029 JSONStream* js,
2030 SampleFilter* filter,
2031 SampleBlockBuffer* buffer,
2032 bool include_code_samples) {
2033 // We should bail out in service.cc if the profiler is disabled.
2034 ASSERT(buffer != nullptr);
2035
2036 StackZone zone(thread);
2037 Profile profile;
2038 profile.Build(thread, thread->isolate(), filter, buffer);
2039
2040 if (format == PrintFormat::JSON) {
2041 profile.PrintProfileJSON(js, include_code_samples);
2042 } else if (format == PrintFormat::Perfetto) {
2043#if defined(SUPPORT_PERFETTO) && !defined(PRODUCT)
2044 // This branch will never be reached when SUPPORT_PERFETTO is not defined or
2045 // when PRODUCT is defined, because |PrintPerfetto| is not defined when
2046 // SUPPORT_PERFETTO is not defined or when PRODUCT is defined.
2047 profile.PrintProfilePerfetto(js);
2048#else
2049 UNREACHABLE();
2050
2051#endif // defined(SUPPORT_PERFETTO) && !defined(PRODUCT)
2052 }
2053}
2054
2056 public:
2058 intptr_t thread_task_mask,
2059 int64_t time_origin_micros,
2060 int64_t time_extent_micros)
2062 thread_task_mask,
2063 time_origin_micros,
2064 time_extent_micros) {}
2065
2066 bool FilterSample(Sample* sample) { return !sample->is_allocation_sample(); }
2067};
2068
2069void ProfilerService::PrintCommon(PrintFormat format,
2070 JSONStream* js,
2071 int64_t time_origin_micros,
2072 int64_t time_extent_micros,
2073 bool include_code_samples) {
2074 Thread* thread = Thread::Current();
2075 const Isolate* isolate = thread->isolate();
2076 NoAllocationSampleFilter filter(isolate->main_port(), Thread::kMutatorTask,
2077 time_origin_micros, time_extent_micros);
2078
2079 PrintCommonImpl(format, thread, js, &filter, Profiler::sample_block_buffer(),
2080 include_code_samples);
2081}
2082
2084 int64_t time_origin_micros,
2085 int64_t time_extent_micros,
2086 bool include_code_samples) {
2087 PrintCommon(PrintFormat::JSON, js, time_origin_micros, time_extent_micros,
2088 include_code_samples);
2089}
2090
2091#if defined(SUPPORT_PERFETTO) && !defined(PRODUCT)
2092void ProfilerService::PrintPerfetto(JSONStream* js,
2093 int64_t time_origin_micros,
2094 int64_t time_extent_micros) {
2095 PrintCommon(PrintFormat::Perfetto, js, time_origin_micros,
2096 time_extent_micros);
2097}
2098#endif // defined(SUPPORT_PERFETTO) && !defined(PRODUCT)
2099
2101 public:
2103 intptr_t thread_task_mask,
2104 int64_t time_origin_micros,
2105 int64_t time_extent_micros)
2107 thread_task_mask,
2108 time_origin_micros,
2109 time_extent_micros) {}
2110
2111 bool FilterSample(Sample* sample) { return sample->is_allocation_sample(); }
2112};
2113
2115 int64_t time_origin_micros,
2116 int64_t time_extent_micros) {
2117 Thread* thread = Thread::Current();
2118 Isolate* isolate = thread->isolate();
2120 time_origin_micros, time_extent_micros);
2121 PrintCommonImpl(PrintFormat::JSON, thread, stream, &filter,
2123}
2124
2126 public:
2128 const Class& cls,
2129 intptr_t thread_task_mask,
2130 int64_t time_origin_micros,
2131 int64_t time_extent_micros)
2133 thread_task_mask,
2134 time_origin_micros,
2135 time_extent_micros),
2136 cls_(Class::Handle(cls.ptr())) {
2137 ASSERT(!cls_.IsNull());
2138 }
2139
2140 bool FilterSample(Sample* sample) {
2141 return sample->is_allocation_sample() &&
2142 (sample->allocation_cid() == cls_.id());
2143 }
2144
2145 private:
2146 const Class& cls_;
2147};
2148
2150 const Class& cls,
2151 int64_t time_origin_micros,
2152 int64_t time_extent_micros) {
2153 Thread* thread = Thread::Current();
2154 Isolate* isolate = thread->isolate();
2155 ClassAllocationSampleFilter filter(isolate->main_port(), cls,
2156 Thread::kMutatorTask, time_origin_micros,
2157 time_extent_micros);
2158 PrintCommonImpl(PrintFormat::JSON, thread, stream, &filter,
2160}
2161
2163 SampleBlockBuffer* sample_block_buffer = Profiler::sample_block_buffer();
2164 if (sample_block_buffer == nullptr) {
2165 return;
2166 }
2167
2168 Thread* thread = Thread::Current();
2169 Isolate* isolate = thread->isolate();
2170
2171 // Disable thread interrupts while processing the buffer.
2172 DisableThreadInterruptsScope dtis(thread);
2173
2174 ClearProfileVisitor clear_profile(isolate);
2175 sample_block_buffer->VisitSamples(&clear_profile);
2176}
2177
2178#endif // !PRODUCT
2179
2180} // namespace dart
TArray< uint32_t > Key
SI F table(const skcms_Curve *curve, F v)
#define UNREACHABLE()
Definition assert.h:248
bool IsNull() const
Definition profiler.h:489
const char * QualifiedName() const
Definition profiler.h:448
const Object * handle() const
Definition profiler.h:420
ObjectPtr owner() const
Definition profiler.h:481
bool is_optimized() const
Definition profiler.h:492
AllocationSampleFilter(Dart_Port port, intptr_t thread_task_mask, int64_t time_origin_micros, int64_t time_extent_micros)
bool FilterSample(Sample *sample)
KeyValueTrait::Value LookupValue(typename KeyValueTrait::Key key) const
Definition hash_map.h:159
void Insert(typename KeyValueTrait::Pair kv)
Definition hash_map.h:230
void Add(const T &value)
intptr_t length() const
ClassAllocationSampleFilter(Dart_Port port, const Class &cls, intptr_t thread_task_mask, int64_t time_origin_micros, int64_t time_extent_micros)
intptr_t id() const
Definition object.h:1235
const CodeDescriptor * At(intptr_t index) const
Definition profiler.h:553
static bool is_visible(FunctionPtr f)
Definition object.h:4163
void AddFunctionServiceId(const JSONObject &obj) const
ScriptPtr script() const
Definition object.cc:10939
StringPtr QualifiedUserVisibleName() const
Definition object.cc:11081
bool CodeContains(uword addr) const
Definition heap.cc:250
Heap * heap() const
Definition isolate.h:295
IsolateGroup * group() const
Definition isolate.h:990
Dart_Port main_port() const
Definition isolate.h:1001
void AddValue(bool b) const
void AddValueF(const char *format,...) const PRINTF_ATTRIBUTE(2
void AddProperty64(const char *name, int64_t i) const
void AddProperty(const char *name, bool b) const
void AddPropertyTimeMicros(const char *name, int64_t micros) const
static void FreeSymbolName(char *name)
static bool LookupSharedObject(uword pc, uword *dso_base=nullptr, char **dso_name=nullptr)
static char * LookupSymbolName(uword pc, uword *start)
NoAllocationSampleFilter(Dart_Port port, intptr_t thread_task_mask, int64_t time_origin_micros, int64_t time_extent_micros)
static intptr_t ThreadIdToIntPtr(ThreadId id)
static void static void PrintErr(const char *format,...) PRINTF_ATTRIBUTE(1
static intptr_t ProcessId()
bool IsNull() const
Definition object.h:363
static Object & Handle()
Definition object.h:407
static Object & ZoneHandle()
Definition object.h:419
ProcessedSample * At(intptr_t index)
Definition profiler.h:898
intptr_t length() const
Definition profiler.h:896
const CodeLookupTable & code_lookup_table() const
Definition profiler.h:900
bool first_frame_executing() const
Definition profiler.h:860
int64_t timestamp() const
Definition profiler.h:826
bool IsAllocationSample() const
Definition profiler.h:853
ProfileBuilder(Thread *thread, Isolate *isolate, SampleFilter *filter, SampleBlockBuffer *sample_buffer, Profile *profile)
void Tick(bool exclusive)
intptr_t exclusive_ticks() const
intptr_t inclusive_ticks() const
void Get(uword pc, const Code &code, ProcessedSample *sample, intptr_t frame_index, GrowableArray< const Function * > **inlined_functions, GrowableArray< TokenPosition > **inlined_token_positions, TokenPosition *token_position)
ProfileCode * At(intptr_t index) const
intptr_t InsertCode(ProfileCode *new_code)
ProfileCode * FindCodeForPC(uword pc) const
intptr_t FindCodeIndexForPC(uword pc) const
void ExpandUpper(uword end)
void TruncateLower(uword start)
intptr_t inclusive_ticks() const
static const char * KindToCString(Kind kind)
ProfileFunction * function() const
void TruncateUpper(uword end)
void SetName(const char *name)
intptr_t exclusive_ticks() const
bool IsOptimizedDart() const
bool Contains(uword pc) const
void ExpandLower(uword start)
void PrintToJSONArray(JSONArray *codes)
bool Overlaps(const ProfileCode *other) const
void GenerateAndSetSymbolName(const char *prefix)
ProfileCode(Kind kind, uword start, uword end, int64_t timestamp, const AbstractCode code)
const char * name() const
intptr_t code_table_index() const
ProfileFunctionSourcePosition(TokenPosition token_pos)
ProfileFunction * GetUnknown()
ProfileFunction * Lookup(const Function &function)
ProfileFunction * AddTag(uword tag_id, const char *name)
ProfileFunction * LookupOrAdd(const Function &function)
ProfileFunction * AddNative(uword start_address, const char *name)
ProfileFunction * At(intptr_t i) const
ProfileFunction * AddStub(uword start_address, const char *name)
ProfileFunction(Kind kind, const char *name, const Function &function, const intptr_t table_index)
void Tick(bool exclusive, intptr_t inclusive_serial, TokenPosition token_position)
const char * name() const
void TickSourcePosition(TokenPosition token_position, bool exclusive)
intptr_t inclusive_ticks() const
const char * ResolvedScriptUrl() const
void PrintToJSONArray(JSONArray *functions, bool print_only_ids=false)
bool GetSinglePosition(ProfileFunctionSourcePosition *pfsp)
intptr_t exclusive_ticks() const
const char * Name() const
static const char * KindToCString(Kind kind)
intptr_t sample_count() const
ProcessedSample * SampleAt(intptr_t index)
ProfileCode * GetCodeFromPC(uword pc, int64_t timestamp)
intptr_t NumFunctions() const
void Build(Thread *thread, Isolate *isolate, SampleFilter *filter, SampleBlockBuffer *sample_block_buffer)
ProfileCode * GetCode(intptr_t index)
int64_t min_time() const
void PrintProfileJSON(JSONStream *stream, bool include_code_samples)
ProfileFunction * FindFunction(const Function &function)
ProfileFunction * GetFunction(intptr_t index)
int64_t GetTimeSpan() const
static void PrintAllocationJSON(JSONStream *stream, const Class &cls, int64_t time_origin_micros, int64_t time_extent_micros)
static void PrintJSON(JSONStream *stream, int64_t time_origin_micros, int64_t time_extent_micros, bool include_code_samples)
static ProfilerCounters counters()
Definition profiler.h:91
static SampleBlockBuffer * sample_block_buffer()
Definition profiler.h:67
ProcessedSampleBuffer * BuildProcessedSampleBuffer(Isolate *isolate, SampleFilter *filter, ProcessedSampleBuffer *buffer=nullptr)
Definition profiler.cc:755
void VisitSamples(SampleVisitor *visitor)
Definition profiler.h:749
Dart_Port port() const
Definition profiler.h:167
bool is_allocation_sample() const
Definition profiler.h:306
intptr_t allocation_cid() const
Definition profiler.h:341
bool GetTokenLocation(const TokenPosition &token_pos, intptr_t *line, intptr_t *column=nullptr) const
Definition object.cc:13330
static const char * ToCString(Thread *thread, StringPtr ptr)
Definition object.cc:24205
Zone * zone() const
static Thread * Current()
Definition thread.h:361
void CheckForSafepoint()
Definition thread.h:1091
Isolate * isolate() const
Definition thread.h:533
static intptr_t CompareForSorting(const TokenPosition &a, const TokenPosition &b)
const char * ToCString() const
static const char * TagName(uword tag_id)
Definition tags.cc:139
static bool IsUserTag(uword tag_id)
Definition tags.h:114
static int SNPrint(char *str, size_t size, const char *format,...) PRINTF_ATTRIBUTE(3
static bool IsExitFrameTag(uword id)
Definition tags.cc:42
static bool IsVMTag(uword id)
Definition tags.h:59
static bool IsNativeEntryTag(uword id)
Definition tags.cc:38
@ kUnoptimizedCodeTagId
Definition tags.h:52
@ kNoneCodeTagId
Definition tags.h:50
@ kInlineStartCodeTagId
Definition tags.h:54
@ kInvalidTagId
Definition tags.h:38
@ kNativeCodeTagId
Definition tags.h:53
@ kInlineEndCodeTagId
Definition tags.h:55
@ kTruncatedTagId
Definition tags.h:48
@ kRootTagId
Definition tags.h:47
@ kOptimizedCodeTagId
Definition tags.h:51
static bool IsRuntimeEntryTag(uword id)
Definition tags.cc:47
static const char * TagName(uword id)
Definition tags.cc:19
ElementType * Alloc(intptr_t length)
void set_str(const uint8_t *data, size_t size)
#define THR_Print(format,...)
Definition log.h:20
int64_t Dart_Port
Definition dart_api.h:1524
#define UNIMPLEMENTED
#define ASSERT(E)
double frame
Definition examples.cpp:31
static bool b
struct MyStruct a[10]
glong glong end
static const uint8_t buffer[]
uint32_t uint32_t * format
#define DECLARE_FLAG(type, name)
Definition flags.h:14
Dart_NativeFunction function
Definition fuchsia.cc:51
size_t length
constexpr int64_t kMaxInt64
Definition globals.h:486
const char *const name
constexpr intptr_t KB
Definition globals.h:528
uintptr_t uword
Definition globals.h:501
const char *const function_name
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not defaults to or::depending on whether ipv6 is specified vm service A custom Dart VM Service port The default is to pick a randomly available open port disable vm Disable the Dart VM Service The Dart VM Service is never available in release mode disable vm service Disable mDNS Dart VM Service publication Bind to the IPv6 localhost address for the Dart VM Service Ignored if vm service host is set endless trace Enable an endless trace buffer The default is a ring buffer This is useful when very old events need to viewed For during application launch Memory usage will continue to grow indefinitely however Start app with an specific route defined on the framework flutter assets Path to the Flutter assets directory enable service port Allow the VM service to fallback to automatic port selection if binding to a specified port fails trace Trace early application lifecycle Automatically switches to an endless trace buffer trace skia Filters out all Skia trace event categories except those that are specified in this comma separated list dump skp on shader Automatically dump the skp that triggers new shader compilations This is useful for writing custom ShaderWarmUp to reduce jank By this is not enabled to reduce the overhead purge persistent cache
Definition switches.h:191
#define Px
Definition globals.h:410
#define Pd
Definition globals.h:408
Point offset