Flutter Engine
The Flutter Engine
profiler.cc
Go to the documentation of this file.
1// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#include "vm/profiler.h"
6
8#include "platform/atomic.h"
10#include "platform/utils.h"
11#include "vm/allocation.h"
12#include "vm/code_patcher.h"
13#if !defined(DART_PRECOMPILED_RUNTIME)
15#endif
16#include "vm/debugger.h"
17#include "vm/instructions.h"
18#include "vm/isolate.h"
19#include "vm/json_stream.h"
20#include "vm/lockers.h"
21#include "vm/message_handler.h"
22#include "vm/native_symbol.h"
23#include "vm/object.h"
24#include "vm/os.h"
25#include "vm/profiler_service.h"
26#include "vm/reusable_handles.h"
27#include "vm/signal_handler.h"
28#include "vm/simulator.h"
29#include "vm/stack_frame.h"
30#include "vm/timeline.h"
31#include "vm/version.h"
32
33namespace dart {
34
35static constexpr intptr_t kMaxSamplesPerTick = 4;
36
37DEFINE_FLAG(bool, trace_profiled_isolates, false, "Trace profiled isolates.");
38
40 profile_period,
41 1000,
42 "Time between profiler samples in microseconds. Minimum 50.");
44 max_profile_depth,
46 "Maximum number stack frames walked. Minimum 1. Maximum 255.");
47#if defined(USING_SIMULATOR)
48DEFINE_FLAG(bool, profile_vm, true, "Always collect native stack traces.");
49#else
50DEFINE_FLAG(bool, profile_vm, false, "Always collect native stack traces.");
51#endif
53 profile_vm_allocation,
54 false,
55 "Collect native stack traces when tracing Dart allocations.");
56
58 int,
59 sample_buffer_duration,
60 0,
61 "Defines the size of the profiler sample buffer to contain at least "
62 "N seconds of samples at a given sample rate. If not provided, the "
63 "default is ~4 seconds. Large values will greatly increase memory "
64 "consumption.");
65
66// Include native stack dumping helpers into AOT compiler even in PRODUCT
67// mode. This allows to report more informative errors when gen_snapshot
68// crashes.
69#if !defined(PRODUCT) || defined(DART_PRECOMPILER)
70ProfilerCounters Profiler::counters_ = {};
71
72static void DumpStackFrame(uword pc, uword fp, const char* name, uword offset) {
73 OS::PrintErr(" pc 0x%" Pp " fp 0x%" Pp " %s+0x%" Px "\n", pc, fp, name,
74 offset);
75}
76
77void DumpStackFrame(intptr_t frame_index, uword pc, uword fp) {
78 uword start = 0;
79 // The pc for all frames except the top frame is a return address, which can
80 // belong to a different inlining interval than the call. Subtract one to get
81 // the symbolization for the call.
82 uword lookup_pc = frame_index == 0 ? pc : pc - 1;
83 if (auto const name =
85 DumpStackFrame(pc, fp, name, pc - start);
87 return;
88 }
89
90 const char* dso_name;
91 uword dso_base;
92 if (NativeSymbolResolver::LookupSharedObject(pc, &dso_base, &dso_name)) {
93 DumpStackFrame(pc, fp, dso_name, pc - dso_base);
95 return;
96 }
97
98#if !defined(DART_PRECOMPILED_RUNTIME)
99 // This relies on heap iteration, which might fail if we're crashing because
100 // of heap corruption. A nested crash symbolizing a JIT frame will prevent
101 // seeing all caller frames, so only do this when we aren't able to use the
102 // safer StackFrameIterator.
103 Thread* thread = Thread::Current();
104 bool symbolize_jit_code =
105 (thread != nullptr) &&
108 if (symbolize_jit_code) {
109 Code result;
110 result = Code::FindCodeUnsafe(lookup_pc);
111 if (!result.IsNull()) {
113 pc, fp,
115 pc - result.PayloadStart());
116 return;
117 }
118 }
119#endif
120
121 OS::PrintErr(" pc 0x%" Pp " fp 0x%" Pp " Unknown symbol\n", pc, fp);
122}
123
125 public:
127 Sample* head_sample,
128 SampleBuffer* sample_buffer,
129 intptr_t skip_count = 0)
130 : port_id_(port_id),
131 sample_(head_sample),
132 sample_buffer_(sample_buffer),
133 skip_count_(skip_count),
135 frame_index_(0),
136 total_frames_(0) {
137 if (sample_ == nullptr) {
138 ASSERT(sample_buffer_ == nullptr);
139 } else {
140 ASSERT(sample_buffer_ != nullptr);
142 }
143 }
144
145 bool Append(uword pc, uword fp) {
148 return true;
149 }
150
151 if (sample_ == nullptr) {
153 frame_index_++;
155 return true;
156 }
157 if (total_frames_ >= FLAG_max_profile_depth) {
159 return false;
160 }
161 ASSERT(sample_ != nullptr);
164 if (new_sample == nullptr) {
165 // Could not reserve new sample- mark this as truncated.
167 return false;
168 }
169 frame_index_ = 0;
170 sample_ = new_sample;
171 }
174 frame_index_++;
176 return true;
177 }
178
179 protected:
183 intptr_t skip_count_;
185 intptr_t frame_index_;
187};
188
189// The layout of C stack frames.
190#if defined(HOST_ARCH_IA32) || defined(HOST_ARCH_X64) || \
191 defined(HOST_ARCH_ARM) || defined(HOST_ARCH_ARM64)
192// +-------------+
193// | saved IP/LR |
194// +-------------+
195// | saved FP | <- FP
196// +-------------+
197static constexpr intptr_t kHostSavedCallerPcSlotFromFp = 1;
198static constexpr intptr_t kHostSavedCallerFpSlotFromFp = 0;
199#elif defined(HOST_ARCH_RISCV32) || defined(HOST_ARCH_RISCV64)
200// +-------------+
201// | | <- FP
202// +-------------+
203// | saved RA |
204// +-------------+
205// | saved FP |
206// +-------------+
207static constexpr intptr_t kHostSavedCallerPcSlotFromFp = -1;
208static constexpr intptr_t kHostSavedCallerFpSlotFromFp = -2;
209#else
210#error What architecture?
211#endif
212
213// If the VM is compiled without frame pointers (which is the default on
214// recent GCC versions with optimizing enabled) the stack walking code may
215// fail.
216//
218 public:
220 Dart_Port port_id,
221 Sample* sample,
222 SampleBuffer* sample_buffer,
223 uword stack_lower,
224 uword stack_upper,
225 uword pc,
226 uword fp,
227 uword sp,
228 intptr_t skip_count = 0)
229 : ProfilerStackWalker(port_id, sample, sample_buffer, skip_count),
230 counters_(counters),
231 stack_upper_(stack_upper),
232 original_pc_(pc),
233 original_fp_(fp),
234 original_sp_(sp),
235 lower_bound_(stack_lower) {}
236
237 void walk() {
238 Append(original_pc_, original_fp_);
239
240 uword* pc = reinterpret_cast<uword*>(original_pc_);
241 uword* fp = reinterpret_cast<uword*>(original_fp_);
242 uword* previous_fp = fp;
243
244 if (!ValidFramePointer(fp)) {
245 counters_->incomplete_sample_fp_bounds.fetch_add(1);
246 return;
247 }
248
249 while (true) {
250 pc = CallerPC(fp);
251 previous_fp = fp;
252 fp = CallerFP(fp);
253
254 if (fp == nullptr) {
255 return;
256 }
257
258 if (fp <= previous_fp) {
259 // Frame pointer did not move to a higher address.
260 counters_->incomplete_sample_fp_step.fetch_add(1);
261 return;
262 }
263
264 if (!ValidFramePointer(fp)) {
265 // Frame pointer is outside of isolate stack boundary.
266 counters_->incomplete_sample_fp_bounds.fetch_add(1);
267 return;
268 }
269
270 const uword pc_value = reinterpret_cast<uword>(pc);
271 if ((pc_value + 1) < pc_value) {
272 // It is not uncommon to encounter an invalid pc as we
273 // traverse a stack frame. Most of these we can tolerate. If
274 // the pc is so large that adding one to it will cause an
275 // overflow it is invalid and it will cause headaches later
276 // while we are building the profile. Discard it.
277 counters_->incomplete_sample_bad_pc.fetch_add(1);
278 return;
279 }
280
281 // Move the lower bound up.
282 lower_bound_ = reinterpret_cast<uword>(fp);
283
284 if (!Append(pc_value, reinterpret_cast<uword>(fp))) {
285 return;
286 }
287 }
288 }
289
290 private:
291 uword* CallerPC(uword* fp) const {
292 ASSERT(fp != nullptr);
293 uword* caller_pc_ptr = fp + kHostSavedCallerPcSlotFromFp;
294 // This may actually be uninitialized, by design (see class comment above).
295 MSAN_UNPOISON(caller_pc_ptr, kWordSize);
296 ASAN_UNPOISON(caller_pc_ptr, kWordSize);
297 return reinterpret_cast<uword*>(*caller_pc_ptr);
298 }
299
300 uword* CallerFP(uword* fp) const {
301 ASSERT(fp != nullptr);
302 uword* caller_fp_ptr = fp + kHostSavedCallerFpSlotFromFp;
303 // This may actually be uninitialized, by design (see class comment above).
304 MSAN_UNPOISON(caller_fp_ptr, kWordSize);
305 ASAN_UNPOISON(caller_fp_ptr, kWordSize);
306 return reinterpret_cast<uword*>(*caller_fp_ptr);
307 }
308
309 bool ValidFramePointer(uword* fp) const {
310 if (fp == nullptr) {
311 return false;
312 }
313 uword cursor = reinterpret_cast<uword>(fp);
314 cursor += sizeof(fp);
315 bool r = (cursor >= lower_bound_) && (cursor < stack_upper_);
316 return r;
317 }
318
319 ProfilerCounters* const counters_;
320 const uword stack_upper_;
321 const uword original_pc_;
322 const uword original_fp_;
323 const uword original_sp_;
324 uword lower_bound_;
325};
326
327static bool ValidateThreadStackBounds(uintptr_t fp,
328 uintptr_t sp,
329 uword stack_lower,
330 uword stack_upper) {
331 if (stack_lower >= stack_upper) {
332 // Stack boundary is invalid.
333 return false;
334 }
335
336 if ((sp < stack_lower) || (sp >= stack_upper)) {
337 // Stack pointer is outside thread's stack boundary.
338 return false;
339 }
340
341 if ((fp < stack_lower) || (fp >= stack_upper)) {
342 // Frame pointer is outside threads's stack boundary.
343 return false;
344 }
345
346 return true;
347}
348
349#if !defined(PRODUCT)
350// Get |thread|'s stack boundary and verify that |sp| and |fp| are within
351// it. Return |false| if anything looks suspicious.
353 Thread* thread,
354 uintptr_t fp,
355 uintptr_t sp,
356 uword* stack_lower,
357 uword* stack_upper) {
358 ASSERT(os_thread != nullptr);
359 ASSERT(stack_lower != nullptr);
360 ASSERT(stack_upper != nullptr);
361
362#if defined(USING_SIMULATOR)
363 const bool use_simulator_stack_bounds =
364 thread != nullptr && thread->IsExecutingDartCode();
365 if (use_simulator_stack_bounds) {
366 Isolate* isolate = thread->isolate();
367 ASSERT(isolate != nullptr);
368 Simulator* simulator = isolate->simulator();
369 *stack_lower = simulator->stack_limit();
370 *stack_upper = simulator->stack_base();
371 }
372#else
373 const bool use_simulator_stack_bounds = false;
374#endif // defined(USING_SIMULATOR)
375
376 if (!use_simulator_stack_bounds) {
377 *stack_lower = os_thread->stack_limit();
378 *stack_upper = os_thread->stack_base();
379 }
380
381 if ((*stack_lower == 0) || (*stack_upper == 0)) {
382 return false;
383 }
384
385 if (!use_simulator_stack_bounds && (sp > *stack_lower)) {
386 // The stack pointer gives us a tighter lower bound.
387 *stack_lower = sp;
388 }
389
390 return ValidateThreadStackBounds(fp, sp, *stack_lower, *stack_upper);
391}
392#endif // !defined(PRODUCT)
393
395 uintptr_t sp,
396 uword* stack_lower,
397 uword* stack_upper) {
398 ASSERT(stack_lower != nullptr);
399 ASSERT(stack_upper != nullptr);
400
401 if (!OSThread::GetCurrentStackBounds(stack_lower, stack_upper)) {
402 return false;
403 }
404
405 if ((*stack_lower == 0) || (*stack_upper == 0)) {
406 return false;
407 }
408
409 if (sp > *stack_lower) {
410 // The stack pointer gives us a tighter lower bound.
411 *stack_lower = sp;
412 }
413
414 return ValidateThreadStackBounds(fp, sp, *stack_lower, *stack_upper);
415}
416
417void Profiler::DumpStackTrace(void* context) {
418 if (context == nullptr) {
419 DumpStackTrace(/*for_crash=*/true);
420 return;
421 }
422#if defined(DART_HOST_OS_LINUX) || defined(DART_HOST_OS_MACOS) || \
423 defined(DART_HOST_OS_ANDROID)
424 ucontext_t* ucontext = reinterpret_cast<ucontext_t*>(context);
425 mcontext_t mcontext = ucontext->uc_mcontext;
429 DumpStackTrace(sp, fp, pc, /*for_crash=*/true);
430#elif defined(DART_HOST_OS_WINDOWS)
431 CONTEXT* ctx = reinterpret_cast<CONTEXT*>(context);
432#if defined(HOST_ARCH_IA32)
433 uword pc = static_cast<uword>(ctx->Eip);
434 uword fp = static_cast<uword>(ctx->Ebp);
435 uword sp = static_cast<uword>(ctx->Esp);
436#elif defined(HOST_ARCH_X64)
437 uword pc = static_cast<uword>(ctx->Rip);
438 uword fp = static_cast<uword>(ctx->Rbp);
439 uword sp = static_cast<uword>(ctx->Rsp);
440#elif defined(HOST_ARCH_ARM)
441 uword pc = static_cast<uword>(ctx->Pc);
442 uword fp = static_cast<uword>(ctx->R11);
443 uword sp = static_cast<uword>(ctx->Sp);
444#elif defined(HOST_ARCH_ARM64)
445 uword pc = static_cast<uword>(ctx->Pc);
446 uword fp = static_cast<uword>(ctx->Fp);
447 uword sp = static_cast<uword>(ctx->Sp);
448#else
449#error Unsupported architecture.
450#endif
451 DumpStackTrace(sp, fp, pc, /*for_crash=*/true);
452#else
453// TODO(fschneider): Add support for more platforms.
454// Do nothing on unsupported platforms.
455#endif
456}
457
458void Profiler::DumpStackTrace(bool for_crash) {
459 uintptr_t sp = OSThread::GetCurrentStackPointer();
460 uintptr_t fp = 0;
461 uintptr_t pc = OS::GetProgramCounter();
462
464
465 DumpStackTrace(sp, fp, pc, for_crash);
466}
467
468static void DumpCompilerState(Thread* thread) {
469#if !defined(DART_PRECOMPILED_RUNTIME)
470 if (thread != nullptr && thread->execution_state() == Thread::kThreadInVM &&
471 thread->HasCompilerState()) {
472 thread->compiler_state().ReportCrash();
473 }
474#endif
475}
476
477void Profiler::DumpStackTrace(uword sp, uword fp, uword pc, bool for_crash) {
478 if (for_crash) {
479 // Allow only one stack trace to prevent recursively printing stack traces
480 // if we hit an assert while printing the stack.
481 static RelaxedAtomic<uintptr_t> started_dump = 0;
482 if (started_dump.fetch_add(1u) != 0) {
483 OS::PrintErr("Aborting reentrant request for stack trace.\n");
484 return;
485 }
486 }
487
488 auto thread = Thread::Current(); // nullptr if no current isolate.
489 auto isolate = thread == nullptr ? nullptr : thread->isolate();
490 auto isolate_group = thread == nullptr ? nullptr : thread->isolate_group();
491 auto source = isolate_group == nullptr ? nullptr : isolate_group->source();
492 auto vm_source =
493 Dart::vm_isolate() == nullptr ? nullptr : Dart::vm_isolate()->source();
494 const char* isolate_group_name =
495 isolate_group == nullptr ? "(nil)" : isolate_group->source()->name;
496 const char* isolate_name = isolate == nullptr ? "(nil)" : isolate->name();
497#ifdef SUPPORT_TIMELINE
498 const intptr_t thread_id =
499 OSThread::ThreadIdToIntPtr(OSThread::GetCurrentThreadTraceId());
500#else
501 const intptr_t thread_id = -1;
502#endif
503 OS::PrintErr("version=%s\n", Version::String());
504 OS::PrintErr("pid=%" Pd ", thread=%" Pd
505 ", isolate_group=%s(%p), isolate=%s(%p)\n",
506 static_cast<intptr_t>(OS::ProcessId()), thread_id,
507 isolate_group_name, isolate_group, isolate_name, isolate);
508#if defined(DART_COMPRESSED_POINTERS)
509 const char kCompressedPointers[] = "yes";
510#else
511 const char kCompressedPointers[] = "no";
512#endif
513#if defined(USING_SIMULATOR)
514 const char kUsingSimulator[] = "yes";
515#else
516 const char kUsingSimulator[] = "no";
517#endif
518 OS::PrintErr("os=%s, arch=%s, comp=%s, sim=%s\n", kHostOperatingSystemName,
519 kTargetArchitectureName, kCompressedPointers, kUsingSimulator);
520 OS::PrintErr("isolate_instructions=%" Px ", vm_instructions=%" Px "\n",
521 source == nullptr
522 ? 0
523 : reinterpret_cast<uword>(source->snapshot_instructions),
524 vm_source == nullptr
525 ? 0
526 : reinterpret_cast<uword>(vm_source->snapshot_instructions));
527 OS::PrintErr("fp=%" Px ", sp=%" Px ", pc=%" Px "\n", fp, sp, pc);
528
529 uword stack_lower = 0;
530 uword stack_upper = 0;
531 if (!GetAndValidateCurrentThreadStackBounds(fp, sp, &stack_lower,
532 &stack_upper)) {
534 "Stack dump aborted because GetAndValidateThreadStackBounds failed.\n");
535 if (pc != 0) { // At the very least dump the top frame.
536 DumpStackFrame(0, pc, fp);
537 }
538 DumpCompilerState(thread);
539 return;
540 }
541
542 ProfilerNativeStackWalker native_stack_walker(
543 &counters_, ILLEGAL_PORT, nullptr, nullptr, stack_lower, stack_upper, pc,
544 fp, sp, /*skip_count=*/0);
545 native_stack_walker.walk();
546 OS::PrintErr("-- End of DumpStackTrace\n");
547
548 if (thread != nullptr) {
549 if (thread->execution_state() == Thread::kThreadInNative) {
550 TransitionNativeToVM transition(thread);
552 } else if (thread->execution_state() == Thread::kThreadInVM) {
554 }
555 }
556
557 DumpCompilerState(thread);
558}
559#endif // !defined(PRODUCT) || defined(DART_PRECOMPILER)
560
561#ifndef PRODUCT
562
563RelaxedAtomic<bool> Profiler::initialized_ = false;
564SampleBlockBuffer* Profiler::sample_block_buffer_ = nullptr;
565
566bool SampleBlockProcessor::initialized_ = false;
567bool SampleBlockProcessor::shutdown_ = false;
568bool SampleBlockProcessor::thread_running_ = false;
569ThreadJoinId SampleBlockProcessor::processor_thread_id_ =
571Monitor* SampleBlockProcessor::monitor_ = nullptr;
572
574 // Place some sane restrictions on user controlled flags.
575 SetSampleDepth(FLAG_max_profile_depth);
576 if (!FLAG_profiler) {
577 return;
578 }
579 ASSERT(!initialized_);
580 SetSamplePeriod(FLAG_profile_period);
581 // The profiler may have been shutdown previously, in which case the sample
582 // buffer will have already been initialized.
583 if (sample_block_buffer_ == nullptr) {
584 intptr_t num_blocks = CalculateSampleBufferCapacity();
585 sample_block_buffer_ = new SampleBlockBuffer(num_blocks);
586 }
591 initialized_ = true;
592}
593
595 public:
597 virtual ~SampleBlockCleanupVisitor() = default;
598
599 void VisitIsolate(Isolate* isolate) {
600 isolate->set_current_allocation_sample_block(nullptr);
601 isolate->set_current_sample_block(nullptr);
602 }
603};
604
606 if (!FLAG_profiler) {
607 return;
608 }
609 ASSERT(initialized_);
613 Isolate::VisitIsolates(&visitor);
614 initialized_ = false;
615}
616
618 if (!FLAG_profiler && initialized_) {
619 Cleanup();
620 } else if (FLAG_profiler && !initialized_) {
621 Init();
622 }
623}
624
625void Profiler::SetSampleDepth(intptr_t depth) {
626 const int kMinimumDepth = 2;
627 const int kMaximumDepth = 255;
628 if (depth < kMinimumDepth) {
629 FLAG_max_profile_depth = kMinimumDepth;
630 } else if (depth > kMaximumDepth) {
631 FLAG_max_profile_depth = kMaximumDepth;
632 } else {
633 FLAG_max_profile_depth = depth;
634 }
635}
636
637static intptr_t SamplesPerSecond() {
638 const intptr_t kMicrosPerSec = 1000000;
639 return kMicrosPerSec / FLAG_profile_period;
640}
641
642intptr_t Profiler::CalculateSampleBufferCapacity() {
643 if (FLAG_sample_buffer_duration <= 0) {
645 }
646 // Deeper stacks require more than a single Sample object to be represented
647 // correctly. These samples are chained, so we need to determine the worst
648 // case sample chain length for a single stack.
649 const intptr_t max_sample_chain_length =
650 FLAG_max_profile_depth / kMaxSamplesPerTick;
651 const intptr_t sample_count = FLAG_sample_buffer_duration *
652 SamplesPerSecond() * max_sample_chain_length;
653 return (sample_count / SampleBlock::kSamplesPerBlock) + 1;
654}
655
656void Profiler::SetSamplePeriod(intptr_t period) {
657 const int kMinimumProfilePeriod = 50;
658 if (period < kMinimumProfilePeriod) {
659 FLAG_profile_period = kMinimumProfilePeriod;
660 } else {
661 FLAG_profile_period = period;
662 }
663 ThreadInterrupter::SetInterruptPeriod(FLAG_profile_period);
664}
665
667 SetSamplePeriod(FLAG_profile_period);
668}
669
671 intptr_t samples_per_block) {
672 const intptr_t size = Utils::RoundUp(
673 blocks * samples_per_block * sizeof(Sample), VirtualMemory::PageSize());
674 const bool executable = false;
675 const bool compressed = false;
676 memory_ =
677 VirtualMemory::Allocate(size, executable, compressed, "dart-profiler");
678 if (memory_ == nullptr) {
680 }
681 sample_buffer_ = reinterpret_cast<Sample*>(memory_->address());
682 blocks_ = new SampleBlock[blocks];
683 for (intptr_t i = 0; i < blocks; ++i) {
684 blocks_[i].Init(&sample_buffer_[i * samples_per_block], samples_per_block);
685 }
686 capacity_ = blocks;
687 cursor_ = 0;
688}
689
691 delete[] blocks_;
692 blocks_ = nullptr;
693 delete memory_;
694 memory_ = nullptr;
695 capacity_ = 0;
696 cursor_ = 0;
697}
698
699SampleBlock* SampleBlockBuffer::ReserveSampleBlock() {
700 intptr_t capacity = capacity_;
701 intptr_t start = cursor_.fetch_add(1) % capacity;
702 intptr_t i = start;
703 do {
704 SampleBlock* block = &blocks_[i];
705 if (block->TryAllocateFree()) {
706 return block;
707 }
708 i = (i + 1) % capacity;
709 } while (i != start);
710
711 // No free blocks: try for completed block instead.
712 i = start;
713 do {
714 SampleBlock* block = &blocks_[i];
715 if (block->TryAllocateCompleted()) {
716 return block;
717 }
718 i = (i + 1) % capacity;
719 } while (i != start);
720
721 return nullptr;
722}
723
725 for (intptr_t i = 0; i < capacity_; i++) {
726 blocks_[i].FreeCompleted();
727 }
728}
729
731 UserTag* tag) {
732 for (intptr_t i = 0; i < capacity_; ++i) {
733 Sample* sample = At(i);
734 uword sample_tag = sample->user_tag();
735 for (intptr_t j = 0; j < tag_table.Length(); ++j) {
736 *tag ^= tag_table.At(j);
737 if (tag->tag() == sample_tag && tag->streamable()) {
738 return true;
739 }
740 }
741 }
742 return false;
743}
744
745static void FlushSampleBlocks(Isolate* isolate) {
746 ASSERT(isolate != nullptr);
747
748 SampleBlock* block = isolate->current_sample_block();
749 if (block != nullptr) {
750 isolate->set_current_sample_block(nullptr);
751 block->MarkCompleted();
752 }
753
754 block = isolate->current_allocation_sample_block();
755 if (block != nullptr) {
756 isolate->set_current_allocation_sample_block(nullptr);
757 block->MarkCompleted();
758 }
759}
760
762 Isolate* isolate,
763 SampleFilter* filter,
765 ASSERT(isolate != nullptr);
766
767 Thread* thread = Thread::Current();
768 Zone* zone = thread->zone();
769
770 if (buffer == nullptr) {
771 buffer = new (zone) ProcessedSampleBuffer();
772 }
773
774 FlushSampleBlocks(isolate);
775
776 for (intptr_t i = 0; i < capacity_; ++i) {
777 SampleBlock* block = &blocks_[i];
778 if (block->TryAcquireStreaming(isolate)) {
779 block->BuildProcessedSampleBuffer(filter, buffer);
780 if (filter->take_samples()) {
781 block->StreamingToFree();
782 } else {
783 block->StreamingToCompleted();
784 }
785 }
786 }
787
788 return buffer;
789}
790
792 intptr_t slot = cursor_.fetch_add(1u);
793 if (slot < capacity_) {
794 return At(slot);
795 }
796 return nullptr;
797}
798
800 ASSERT(previous != nullptr);
802 Isolate* isolate = owner_;
803 ASSERT(isolate != nullptr);
804 Sample* next = previous->is_allocation_sample()
805 ? buffer->ReserveAllocationSample(isolate)
806 : buffer->ReserveCPUSample(isolate);
807 if (next == nullptr) {
808 return nullptr; // No blocks left, so drop sample.
809 }
810 next->Init(previous->port(), previous->timestamp(), previous->tid());
811 next->set_head_sample(false);
812 // Mark that previous continues at next.
813 previous->SetContinuation(next);
814 return next;
815}
816
818 return ReserveSampleImpl(isolate, false);
819}
820
822 return ReserveSampleImpl(isolate, true);
823}
824
825Sample* SampleBlockBuffer::ReserveSampleImpl(Isolate* isolate,
826 bool allocation_sample) {
827 SampleBlock* block = allocation_sample
829 : isolate->current_sample_block();
830 Sample* sample = nullptr;
831 if (block != nullptr) {
832 sample = block->ReserveSample();
833 }
834 if (sample != nullptr) {
835 return sample;
836 }
837
838 SampleBlock* next = ReserveSampleBlock();
839 if (next == nullptr) {
840 // We're out of blocks to reserve. Drop the sample.
841 return nullptr;
842 }
843
844 next->set_owner(isolate);
845 if (allocation_sample) {
847 } else {
849 }
850 if (block != nullptr) {
851 block->MarkCompleted();
852 if (!Isolate::IsSystemIsolate(isolate)) {
853 Thread* mutator = isolate->mutator_thread();
854 // The mutator thread might be NULL if we sample in the middle of
855 // Thread::Enter/ExitIsolate.
856 if ((mutator != nullptr) && isolate->TrySetHasCompletedBlocks()) {
858 }
859 }
860 }
861 return next->ReserveSample();
862}
863
864// Attempts to find the true return address when a Dart frame is being setup
865// or torn down.
866// NOTE: Architecture specific implementations below.
868 public:
870 : stack_buffer_(sample->GetStackBuffer()),
871 pc_(sample->pc()),
872 code_(Code::ZoneHandle(code.ptr())) {
873 ASSERT(!code_.IsNull());
875 }
876
877 ReturnAddressLocator(uword pc, uword* stack_buffer, const Code& code)
878 : stack_buffer_(stack_buffer),
879 pc_(pc),
880 code_(Code::ZoneHandle(code.ptr())) {
881 ASSERT(!code_.IsNull());
882 ASSERT(code_.ContainsInstructionAt(pc_));
883 }
884
885 uword pc() { return pc_; }
886
887 // Returns false on failure.
888 bool LocateReturnAddress(uword* return_address);
889
890 // Returns offset into code object.
891 intptr_t RelativePC() {
892 ASSERT(pc() >= code_.PayloadStart());
893 return static_cast<intptr_t>(pc() - code_.PayloadStart());
894 }
895
896 uint8_t* CodePointer(intptr_t offset) {
897 const intptr_t size = code_.Size();
898 ASSERT(offset < size);
899 uint8_t* code_pointer = reinterpret_cast<uint8_t*>(code_.PayloadStart());
900 code_pointer += offset;
901 return code_pointer;
902 }
903
904 uword StackAt(intptr_t i) {
905 ASSERT(i >= 0);
907 return stack_buffer_[i];
908 }
909
910 private:
911 uword* stack_buffer_;
912 uword pc_;
913 const Code& code_;
914};
915
916#if defined(TARGET_ARCH_IA32) || defined(TARGET_ARCH_X64)
918 ASSERT(return_address != nullptr);
919 const intptr_t offset = RelativePC();
920 ASSERT(offset >= 0);
921 const intptr_t size = code_.Size();
922 ASSERT(offset < size);
923 const intptr_t prologue_offset = code_.GetPrologueOffset();
924 if (offset < prologue_offset) {
925 // Before the prologue, return address is at the top of the stack.
926 // TODO(johnmccutchan): Some intrinsics and stubs do not conform to the
927 // expected stack layout. Use a more robust solution for those code objects.
928 *return_address = StackAt(0);
929 return true;
930 }
931 // Detect if we are:
932 // push ebp <--- here
933 // mov ebp, esp
934 // on X64 the register names are different but the sequence is the same.
935 ProloguePattern pp(pc());
936 if (pp.IsValid()) {
937 // Stack layout:
938 // 0 RETURN ADDRESS.
939 *return_address = StackAt(0);
940 return true;
941 }
942 // Detect if we are:
943 // push ebp
944 // mov ebp, esp <--- here
945 // on X64 the register names are different but the sequence is the same.
946 SetFramePointerPattern sfpp(pc());
947 if (sfpp.IsValid()) {
948 // Stack layout:
949 // 0 CALLER FRAME POINTER
950 // 1 RETURN ADDRESS
951 *return_address = StackAt(1);
952 return true;
953 }
954 // Detect if we are:
955 // ret <--- here
956 ReturnPattern rp(pc());
957 if (rp.IsValid()) {
958 // Stack layout:
959 // 0 RETURN ADDRESS.
960 *return_address = StackAt(0);
961 return true;
962 }
963 return false;
964}
965#elif defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_ARM64) || \
966 defined(TARGET_ARCH_RISCV32) || defined(TARGET_ARCH_RISCV64)
968 ASSERT(return_address != nullptr);
969 return false;
970}
971#else
972#error ReturnAddressLocator implementation missing for this architecture.
973#endif
974
976 if ((time_origin_micros_ == -1) || (time_extent_micros_ == -1)) {
977 // No time filter passed in, always pass.
978 return true;
979 }
980 const int64_t timestamp = sample->timestamp();
981 int64_t delta = timestamp - time_origin_micros_;
982 return (delta >= 0) && (delta <= time_extent_micros_);
983}
984
986 const intptr_t task = static_cast<intptr_t>(sample->thread_task());
987 if (thread_task_mask_ == kNoTaskFilter) {
988 return true;
989 }
990 return (task & thread_task_mask_) != 0;
991}
992
994 : SampleVisitor(isolate->main_port()) {}
995
997 sample->Clear();
998}
999
1000// Executing Dart code, walk the stack.
1002 public:
1004 Sample* sample,
1005 SampleBuffer* sample_buffer,
1006 uword pc,
1007 uword fp,
1008 uword sp,
1009 uword lr,
1010 bool allocation_sample,
1011 intptr_t skip_count = 0)
1012 : ProfilerStackWalker((thread->isolate() != nullptr)
1013 ? thread->isolate()->main_port()
1014 : ILLEGAL_PORT,
1015 sample,
1016 sample_buffer,
1017 skip_count),
1018 thread_(thread),
1019 pc_(reinterpret_cast<uword*>(pc)),
1020 fp_(reinterpret_cast<uword*>(fp)),
1021 sp_(reinterpret_cast<uword*>(sp)),
1022 lr_(reinterpret_cast<uword*>(lr)) {}
1023
1024 void walk() {
1026 if (thread_->isolate()->IsDeoptimizing()) {
1028 return;
1029 }
1030
1031 uword* exit_fp = reinterpret_cast<uword*>(thread_->top_exit_frame_info());
1032 bool has_exit_frame = exit_fp != nullptr;
1033 if (has_exit_frame) {
1034 // Exited from compiled code.
1035 pc_ = nullptr;
1036 fp_ = exit_fp;
1037
1038 // Skip exit frame.
1039 pc_ = CallerPC();
1040 fp_ = CallerFP();
1041 } else {
1042 if (thread_->vm_tag() == VMTag::kDartTagId) {
1043 // Running compiled code.
1044 // Use the FP and PC from the thread interrupt or simulator; already set
1045 // in the constructor.
1046 } else {
1047 // No Dart on the stack; caller shouldn't use this walker.
1048 UNREACHABLE();
1049 }
1050
1051 const bool is_entry_frame =
1052#if defined(TARGET_ARCH_IA32) || defined(TARGET_ARCH_X64)
1053 StubCode::InInvocationStub(Stack(0)) ||
1055#else
1056 StubCode::InInvocationStub(reinterpret_cast<uword>(lr_));
1057#endif
1058 if (is_entry_frame) {
1059 // During the prologue of a function, CallerPC will return the caller's
1060 // caller. For most frames, the missing PC will be added during profile
1061 // processing. However, during this stack walk, it can cause us to fail
1062 // to identify the entry frame and lead the stack walk into the weeds.
1063 // Do not continue the stalk walk since this might be a false positive
1064 // from a Smi or unboxed value.
1066 return;
1067 }
1068 }
1069
1070 sample_->set_exit_frame_sample(has_exit_frame);
1071
1072 for (;;) {
1073 // Skip entry frame.
1074 if (StubCode::InInvocationStub(reinterpret_cast<uword>(pc_))) {
1075 pc_ = nullptr;
1076 fp_ = ExitLink();
1077 if (fp_ == nullptr) {
1078 break; // End of Dart stack.
1079 }
1080
1081 // Skip exit frame.
1082 pc_ = CallerPC();
1083 fp_ = CallerFP();
1084
1085 // At least one frame between exit and next entry frame.
1087 !StubCode::InInvocationStub(reinterpret_cast<uword>(pc_)));
1088 }
1089
1090 if (!Append(reinterpret_cast<uword>(pc_), reinterpret_cast<uword>(fp_))) {
1091 break; // Sample is full.
1092 }
1093
1094 pc_ = CallerPC();
1095 fp_ = CallerFP();
1096 }
1097 }
1098
1099 private:
1100 uword* CallerPC() const {
1101 ASSERT(fp_ != nullptr);
1102 uword* caller_pc_ptr = fp_ + kSavedCallerPcSlotFromFp;
1103 // MSan/ASan are unaware of frames initialized by generated code.
1104 MSAN_UNPOISON(caller_pc_ptr, kWordSize);
1105 ASAN_UNPOISON(caller_pc_ptr, kWordSize);
1106 return reinterpret_cast<uword*>(*caller_pc_ptr);
1107 }
1108
1109 uword* CallerFP() const {
1110 ASSERT(fp_ != nullptr);
1111 uword* caller_fp_ptr = fp_ + kSavedCallerFpSlotFromFp;
1112 // MSan/ASan are unaware of frames initialized by generated code.
1113 MSAN_UNPOISON(caller_fp_ptr, kWordSize);
1114 ASAN_UNPOISON(caller_fp_ptr, kWordSize);
1115 return reinterpret_cast<uword*>(*caller_fp_ptr);
1116 }
1117
1118 uword* ExitLink() const {
1119 ASSERT(fp_ != nullptr);
1120 uword* exit_link_ptr = fp_ + kExitLinkSlotFromEntryFp;
1121 // MSan/ASan are unaware of frames initialized by generated code.
1122 MSAN_UNPOISON(exit_link_ptr, kWordSize);
1123 ASAN_UNPOISON(exit_link_ptr, kWordSize);
1124 return reinterpret_cast<uword*>(*exit_link_ptr);
1125 }
1126
1127 uword Stack(intptr_t index) const {
1128 ASSERT(sp_ != nullptr);
1129 uword* stack_ptr = sp_ + index;
1130 // MSan/ASan are unaware of frames initialized by generated code.
1131 MSAN_UNPOISON(stack_ptr, kWordSize);
1132 ASAN_UNPOISON(stack_ptr, kWordSize);
1133 return *stack_ptr;
1134 }
1135
1136 Thread* const thread_;
1137 uword* pc_;
1138 uword* fp_;
1139 uword* sp_;
1140 uword* lr_;
1141};
1142
1143static void CopyStackBuffer(Sample* sample, uword sp_addr) {
1144 ASSERT(sample != nullptr);
1145 uword* sp = reinterpret_cast<uword*>(sp_addr);
1146 uword* buffer = sample->GetStackBuffer();
1147 if (sp != nullptr) {
1148 for (intptr_t i = 0; i < Sample::kStackBufferSizeInWords; i++) {
1151 buffer[i] = *sp;
1152 sp++;
1153 }
1154 }
1155}
1156
1157#if defined(DART_HOST_OS_WINDOWS)
1158// On Windows this code is synchronously executed from the thread interrupter
1159// thread. This means we can safely have a static fault_address.
1160static uword fault_address = 0;
1161static LONG GuardPageExceptionFilter(EXCEPTION_POINTERS* ep) {
1162 fault_address = 0;
1163 if (ep->ExceptionRecord->ExceptionCode != STATUS_GUARD_PAGE_VIOLATION) {
1164 return EXCEPTION_CONTINUE_SEARCH;
1165 }
1166 // https://goo.gl/p5Fe10
1167 fault_address = ep->ExceptionRecord->ExceptionInformation[1];
1168 // Read access.
1169 ASSERT(ep->ExceptionRecord->ExceptionInformation[0] == 0);
1170 return EXCEPTION_EXECUTE_HANDLER;
1171}
1172#endif
1173
1174// All memory access done to collect the sample is performed in CollectSample.
1175static void CollectSample(Isolate* isolate,
1176 bool exited_dart_code,
1177 bool in_dart_code,
1178 Sample* sample,
1179 ProfilerNativeStackWalker* native_stack_walker,
1180 ProfilerDartStackWalker* dart_stack_walker,
1181 uword pc,
1182 uword fp,
1183 uword sp,
1184 ProfilerCounters* counters) {
1185 ASSERT(counters != nullptr);
1186#if defined(DART_HOST_OS_WINDOWS)
1187 // Use structured exception handling to trap guard page access on Windows.
1188 __try {
1189#endif
1190
1191 if (in_dart_code) {
1192 // We can only trust the stack pointer if we are executing Dart code.
1193 // See http://dartbug.com/20421 for details.
1194 CopyStackBuffer(sample, sp);
1195 }
1196
1197 if (FLAG_profile_vm) {
1198 // Always walk the native stack collecting both native and Dart frames.
1199 counters->stack_walker_native.fetch_add(1);
1200 native_stack_walker->walk();
1201 } else if (StubCode::HasBeenInitialized() && exited_dart_code) {
1202 counters->stack_walker_dart_exit.fetch_add(1);
1203 // We have a valid exit frame info, use the Dart stack walker.
1204 dart_stack_walker->walk();
1205 } else if (StubCode::HasBeenInitialized() && in_dart_code) {
1206 counters->stack_walker_dart.fetch_add(1);
1207 // We are executing Dart code. We have frame pointers.
1208 dart_stack_walker->walk();
1209 } else {
1210 counters->stack_walker_none.fetch_add(1);
1211 sample->SetAt(0, pc);
1212 }
1213
1214#if defined(DART_HOST_OS_WINDOWS)
1215 // Use structured exception handling to trap guard page access.
1216 } __except (GuardPageExceptionFilter(GetExceptionInformation())) { // NOLINT
1217 // Sample collection triggered a guard page fault:
1218 // 1) discard entire sample.
1219 sample->set_ignore_sample(true);
1220
1221 // 2) Reenable guard bit on page that triggered the fault.
1222 // https://goo.gl/5mCsXW
1223 DWORD new_protect = PAGE_READWRITE | PAGE_GUARD;
1224 DWORD old_protect = 0;
1225 BOOL success =
1226 VirtualProtect(reinterpret_cast<void*>(fault_address),
1227 sizeof(fault_address), new_protect, &old_protect);
1228 USE(success);
1229 ASSERT(success);
1230 ASSERT(old_protect == PAGE_READWRITE);
1231 }
1232#endif
1233}
1234
1235static Sample* SetupSample(Thread* thread,
1236 bool allocation_sample,
1237 ThreadId tid) {
1238 ASSERT(thread != nullptr);
1239 Isolate* isolate = thread->isolate();
1241 Sample* sample = allocation_sample ? buffer->ReserveAllocationSample(isolate)
1242 : buffer->ReserveCPUSample(isolate);
1243 if (sample == nullptr) {
1244 return nullptr;
1245 }
1246 sample->Init(isolate->main_port(), OS::GetCurrentMonotonicMicros(), tid);
1247 uword vm_tag = thread->vm_tag();
1248#if defined(USING_SIMULATOR)
1249 // When running in the simulator, the runtime entry function address
1250 // (stored as the vm tag) is the address of a redirect function.
1251 // Attempt to find the real runtime entry function address and use that.
1252 uword redirect_vm_tag = Simulator::FunctionForRedirect(vm_tag);
1253 if (redirect_vm_tag != 0) {
1254 vm_tag = redirect_vm_tag;
1255 }
1256#endif
1257 sample->set_vm_tag(vm_tag);
1258 sample->set_user_tag(isolate->user_tag());
1259 sample->set_thread_task(thread->task_kind());
1260 return sample;
1261}
1262
1263static bool CheckIsolate(Isolate* isolate) {
1264 if ((isolate == nullptr) || (Dart::vm_isolate() == nullptr)) {
1265 // No isolate.
1266 return false;
1267 }
1268 return isolate != Dart::vm_isolate();
1269}
1270
1272 intptr_t cid,
1273 uint32_t identity_hash) {
1274 ASSERT(thread != nullptr);
1275 OSThread* os_thread = thread->os_thread();
1276 ASSERT(os_thread != nullptr);
1277 Isolate* isolate = thread->isolate();
1278 if (!CheckIsolate(isolate)) {
1279 return;
1280 }
1281 const bool exited_dart_code = thread->HasExitedDartCode();
1282
1284 if (buffer == nullptr) {
1285 // Profiler not initialized.
1286 return;
1287 }
1288
1289 uintptr_t sp = OSThread::GetCurrentStackPointer();
1290 uintptr_t fp = 0;
1291 uintptr_t pc = OS::GetProgramCounter();
1292 uintptr_t lr = 0;
1293
1295
1296 uword stack_lower = 0;
1297 uword stack_upper = 0;
1298
1299 if (!GetAndValidateThreadStackBounds(os_thread, thread, fp, sp, &stack_lower,
1300 &stack_upper)) {
1301 // Could not get stack boundary.
1302 return;
1303 }
1304
1305 Sample* sample =
1306 SetupSample(thread, /*allocation_block*/ true, os_thread->trace_id());
1307 if (sample == nullptr) {
1308 // We were unable to assign a sample for this allocation.
1309 counters_.sample_allocation_failure++;
1310 return;
1311 }
1312 sample->SetAllocationCid(cid);
1313 sample->set_allocation_identity_hash(identity_hash);
1314
1315 if (FLAG_profile_vm_allocation) {
1316 ProfilerNativeStackWalker native_stack_walker(
1317 &counters_, (isolate != nullptr) ? isolate->main_port() : ILLEGAL_PORT,
1318 sample, isolate->current_allocation_sample_block(), stack_lower,
1319 stack_upper, pc, fp, sp);
1320 native_stack_walker.walk();
1321 } else if (exited_dart_code) {
1322 ProfilerDartStackWalker dart_exit_stack_walker(
1323 thread, sample, isolate->current_allocation_sample_block(), pc, fp, sp,
1324 lr, /* allocation_sample*/ true);
1325 dart_exit_stack_walker.walk();
1326 } else {
1327 // Fall back.
1328 uintptr_t pc = OS::GetProgramCounter();
1329 sample->SetAt(0, pc);
1330 }
1331}
1332
1333void Profiler::SampleThreadSingleFrame(Thread* thread,
1334 Sample* sample,
1335 uintptr_t pc) {
1336 ASSERT(thread != nullptr);
1337 OSThread* os_thread = thread->os_thread();
1338 ASSERT(os_thread != nullptr);
1339 Isolate* isolate = thread->isolate();
1340
1342
1343 // Increment counter for vm tag.
1345 ASSERT(counters != nullptr);
1346 if (thread->IsDartMutatorThread()) {
1347 counters->Increment(sample->vm_tag());
1348 }
1349
1350 // Write the single pc value.
1351 sample->SetAt(0, pc);
1352}
1353
1356 ASSERT(thread != nullptr);
1357 OSThread* os_thread = thread->os_thread();
1358 ASSERT(os_thread != nullptr);
1359 Isolate* isolate = thread->isolate();
1360
1361 // Thread is not doing VM work.
1362 if (thread->task_kind() == Thread::kUnknownTask) {
1363 counters_.bail_out_unknown_task.fetch_add(1);
1364 return;
1365 }
1366
1368 // The JumpToFrame stub manually adjusts the stack pointer, frame
1369 // pointer, and some isolate state. It is not safe to walk the
1370 // stack when executing this stub.
1371 counters_.bail_out_jump_to_exception_handler.fetch_add(1);
1372 return;
1373 }
1374
1375 const bool in_dart_code = thread->IsExecutingDartCode();
1376
1377 uintptr_t sp = 0;
1378 uintptr_t fp = state.fp;
1379 uintptr_t pc = state.pc;
1380 uintptr_t lr = state.lr;
1381#if defined(USING_SIMULATOR)
1382 Simulator* simulator = nullptr;
1383#endif
1384
1385 if (in_dart_code) {
1386// If we're in Dart code, use the Dart stack pointer.
1387#if defined(USING_SIMULATOR)
1388 simulator = isolate->simulator();
1389 sp = simulator->get_register(SPREG);
1390 fp = simulator->get_register(FPREG);
1391 pc = simulator->get_pc();
1392 lr = simulator->get_lr();
1393#else
1394 sp = state.dsp;
1395#endif
1396 } else {
1397 // If we're in runtime code, use the C stack pointer.
1398 sp = state.csp;
1399 }
1400
1401 if (!CheckIsolate(isolate)) {
1402 counters_.bail_out_check_isolate.fetch_add(1);
1403 return;
1404 }
1405
1407 if (sample_block_buffer == nullptr) {
1408 // Profiler not initialized.
1409 return;
1410 }
1411
1412 // Setup sample.
1413 Sample* sample =
1414 SetupSample(thread, /*allocation_block*/ false, os_thread->trace_id());
1415 if (sample == nullptr) {
1416 // We were unable to assign a sample for this profiler tick.
1417 counters_.sample_allocation_failure++;
1418 return;
1419 }
1420
1421 if (thread->IsDartMutatorThread()) {
1422 if (isolate->IsDeoptimizing()) {
1423 counters_.single_frame_sample_deoptimizing.fetch_add(1);
1424 SampleThreadSingleFrame(thread, sample, pc);
1425 return;
1426 }
1427 }
1428
1429 uword stack_lower = 0;
1430 uword stack_upper = 0;
1431 if (!GetAndValidateThreadStackBounds(os_thread, thread, fp, sp, &stack_lower,
1432 &stack_upper)) {
1433 counters_.single_frame_sample_get_and_validate_stack_bounds.fetch_add(1);
1434 // Could not get stack boundary.
1435 SampleThreadSingleFrame(thread, sample, pc);
1436 return;
1437 }
1438
1439 // At this point we have a valid stack boundary for this isolate and
1440 // know that our initial stack and frame pointers are within the boundary.
1441
1442 // Increment counter for vm tag.
1444 ASSERT(counters != nullptr);
1445 if (thread->IsDartMutatorThread()) {
1446 counters->Increment(sample->vm_tag());
1447 }
1448
1449 ProfilerNativeStackWalker native_stack_walker(
1450 &counters_, (isolate != nullptr) ? isolate->main_port() : ILLEGAL_PORT,
1451 sample, isolate->current_sample_block(), stack_lower, stack_upper, pc, fp,
1452 sp);
1453 const bool exited_dart_code = thread->HasExitedDartCode();
1454 ProfilerDartStackWalker dart_stack_walker(
1455 thread, sample, isolate->current_sample_block(), pc, fp, sp, lr,
1456 /* allocation_sample*/ false);
1457
1458 // All memory access is done inside CollectSample.
1459 CollectSample(isolate, exited_dart_code, in_dart_code, sample,
1460 &native_stack_walker, &dart_stack_walker, pc, fp, sp,
1461 &counters_);
1462}
1463
1465
1467 return code_.PayloadStart();
1468}
1469
1471 return code_.Size();
1472}
1473
1475 return code_.compile_timestamp();
1476}
1477
1479 Build(thread);
1480}
1481
1483 public:
1485 ASSERT(table_ != nullptr);
1486 }
1487
1489
1490 void VisitObject(ObjectPtr raw_obj) override {
1491 if (raw_obj->IsCode() && !Code::IsUnknownDartCode(Code::RawCast(raw_obj))) {
1492 table_->Add(Code::Handle(Code::RawCast(raw_obj)));
1493 }
1494 }
1495
1496 private:
1497 CodeLookupTable* table_;
1498};
1499
1500void CodeLookupTable::Build(Thread* thread) {
1501 ASSERT(thread != nullptr);
1502 Isolate* vm_isolate = Dart::vm_isolate();
1503 ASSERT(vm_isolate != nullptr);
1504
1505 // Clear.
1506 code_objects_.Clear();
1507
1508 thread->CheckForSafepoint();
1509 // Add all found Code objects.
1510 {
1511 TimelineBeginEndScope tl(Timeline::GetIsolateStream(),
1512 "CodeLookupTable::Build HeapIterationScope");
1513 HeapIterationScope iteration(thread);
1514 CodeLookupTableBuilder cltb(this);
1515 iteration.IterateVMIsolateObjects(&cltb);
1516 iteration.IterateOldObjects(&cltb);
1517 }
1518 thread->CheckForSafepoint();
1519
1520 // Sort by entry.
1521 code_objects_.Sort(CodeDescriptor::Compare);
1522
1523#if defined(DEBUG)
1524 if (length() <= 1) {
1525 return;
1526 }
1527 ASSERT(FindCode(0) == nullptr);
1528 ASSERT(FindCode(~0) == nullptr);
1529 // Sanity check that we don't have duplicate entries and that the entries
1530 // are sorted.
1531 for (intptr_t i = 0; i < length() - 1; i++) {
1532 const CodeDescriptor* a = At(i);
1533 const CodeDescriptor* b = At(i + 1);
1534 ASSERT(a->Start() < b->Start());
1535 ASSERT(FindCode(a->Start()) == a);
1536 ASSERT(FindCode(b->Start()) == b);
1537 ASSERT(FindCode(a->Start() + a->Size() - 1) == a);
1538 ASSERT(FindCode(b->Start() + b->Size() - 1) == b);
1539 }
1540#endif
1541}
1542
1543void CodeLookupTable::Add(const Object& code) {
1544 ASSERT(!code.IsNull());
1545 ASSERT(code.IsCode());
1546 CodeDescriptor* cd = new CodeDescriptor(AbstractCode(code.ptr()));
1547 code_objects_.Add(cd);
1548}
1549
1551 intptr_t first = 0;
1552 intptr_t count = length();
1553 while (count > 0) {
1554 intptr_t current = first;
1555 intptr_t step = count / 2;
1556 current += step;
1557 const CodeDescriptor* cd = At(current);
1558 if (pc >= cd->Start()) {
1559 first = ++current;
1560 count -= step + 1;
1561 } else {
1562 count = step;
1563 }
1564 }
1565 // First points to the first code object whose entry is greater than PC.
1566 // That means the code object we need to check is first - 1.
1567 if (first == 0) {
1568 return nullptr;
1569 }
1570 first--;
1571 ASSERT(first >= 0);
1572 ASSERT(first < length());
1573 const CodeDescriptor* cd = At(first);
1574 if (cd->Contains(pc)) {
1575 return cd;
1576 }
1577 return nullptr;
1578}
1579
1581 SampleFilter* filter,
1583 Thread* thread = Thread::Current();
1584 Zone* zone = thread->zone();
1585
1586 if (buffer == nullptr) {
1587 buffer = new (zone) ProcessedSampleBuffer();
1588 }
1589
1590 const intptr_t length = capacity();
1591 for (intptr_t i = 0; i < length; i++) {
1592 thread->CheckForSafepoint();
1593 Sample* sample = At(i);
1594 if (sample->ignore_sample()) {
1595 // Bad sample.
1596 continue;
1597 }
1598 if (!sample->head_sample()) {
1599 // An inner sample in a chain of samples.
1600 continue;
1601 }
1602 if (sample->timestamp() == 0) {
1603 // Empty.
1604 continue;
1605 }
1606 if (sample->At(0) == 0) {
1607 // No frames.
1608 continue;
1609 }
1610 if (filter != nullptr) {
1611 // If we're requesting all the native allocation samples, we don't care
1612 // whether or not we're in the same isolate as the sample.
1613 if (sample->port() != filter->port()) {
1614 // Another isolate.
1615 continue;
1616 }
1617 if (!filter->TimeFilterSample(sample)) {
1618 // Did not pass time filter.
1619 continue;
1620 }
1621 if (!filter->TaskFilterSample(sample)) {
1622 // Did not pass task filter.
1623 continue;
1624 }
1625 if (!filter->FilterSample(sample)) {
1626 // Did not pass filter.
1627 continue;
1628 }
1629 }
1630 buffer->Add(BuildProcessedSample(sample, buffer->code_lookup_table()));
1631 }
1632 return buffer;
1633}
1634
1636 Sample* sample,
1637 const CodeLookupTable& clt) {
1638 Thread* thread = Thread::Current();
1639 Zone* zone = thread->zone();
1640
1641 ProcessedSample* processed_sample = new (zone) ProcessedSample();
1642
1643 // Copy state bits from sample.
1644 processed_sample->set_timestamp(sample->timestamp());
1645 processed_sample->set_tid(sample->tid());
1646 processed_sample->set_vm_tag(sample->vm_tag());
1647 processed_sample->set_user_tag(sample->user_tag());
1648 if (sample->is_allocation_sample()) {
1649 processed_sample->set_allocation_cid(sample->allocation_cid());
1650 processed_sample->set_allocation_identity_hash(
1651 sample->allocation_identity_hash());
1652 }
1653 processed_sample->set_first_frame_executing(!sample->exit_frame_sample());
1654
1655 // Copy stack trace from sample(s).
1656 bool truncated = false;
1657 Sample* current = sample;
1658 while (current != nullptr) {
1659 for (intptr_t i = 0; i < Sample::kPCArraySizeInWords; i++) {
1660 if (current->At(i) == 0) {
1661 break;
1662 }
1663 processed_sample->Add(current->At(i));
1664 }
1665
1666 truncated = truncated || current->truncated_trace();
1667 current = Next(current);
1668 }
1669
1670 if (!sample->exit_frame_sample()) {
1671 processed_sample->FixupCaller(clt, /* pc_marker */ 0,
1672 sample->GetStackBuffer());
1673 }
1674
1675 processed_sample->set_truncated(truncated);
1676 return processed_sample;
1677}
1678
1680 if (!sample->is_continuation_sample()) return nullptr;
1681 Sample* next_sample = sample->continuation_sample();
1682 // Sanity check.
1683 ASSERT(sample != next_sample);
1684 // Detect invalid chaining.
1685 if (sample->port() != next_sample->port()) {
1686 return nullptr;
1687 }
1688 if (sample->timestamp() != next_sample->timestamp()) {
1689 return nullptr;
1690 }
1691 if (sample->tid() != next_sample->tid()) {
1692 return nullptr;
1693 }
1694 return next_sample;
1695}
1696
1698 : pcs_(Sample::kPCArraySizeInWords),
1699 timestamp_(0),
1700 vm_tag_(0),
1701 user_tag_(0),
1702 allocation_cid_(-1),
1703 allocation_identity_hash_(0),
1704 truncated_(false) {}
1705
1706void ProcessedSample::FixupCaller(const CodeLookupTable& clt,
1707 uword pc_marker,
1708 uword* stack_buffer) {
1709 const CodeDescriptor* cd = clt.FindCode(At(0));
1710 if (cd == nullptr) {
1711 // No Dart code.
1712 return;
1713 }
1714 if (cd->CompileTimestamp() > timestamp()) {
1715 // Code compiled after sample. Ignore.
1716 return;
1717 }
1718 CheckForMissingDartFrame(clt, cd, pc_marker, stack_buffer);
1719}
1720
1721void ProcessedSample::CheckForMissingDartFrame(const CodeLookupTable& clt,
1722 const CodeDescriptor* cd,
1723 uword pc_marker,
1724 uword* stack_buffer) {
1725 ASSERT(cd != nullptr);
1726 const Code& code = Code::Handle(Code::RawCast(cd->code().ptr()));
1727 ASSERT(!code.IsNull());
1728 // Some stubs (and intrinsics) do not push a frame onto the stack leaving
1729 // the frame pointer in the caller.
1730 //
1731 // PC -> STUB
1732 // FP -> DART3 <-+
1733 // DART2 <-| <- TOP FRAME RETURN ADDRESS.
1734 // DART1 <-|
1735 // .....
1736 //
1737 // In this case, traversing the linked stack frames will not collect a PC
1738 // inside DART3. The stack will incorrectly be: STUB, DART2, DART1.
1739 // In Dart code, after pushing the FP onto the stack, an IP in the current
1740 // function is pushed onto the stack as well. This stack slot is called
1741 // the PC marker. We can use the PC marker to insert DART3 into the stack
1742 // so that it will correctly be: STUB, DART3, DART2, DART1. Note the
1743 // inserted PC may not accurately reflect the true return address into DART3.
1744
1745 // The pc marker is our current best guess of a return address.
1746 uword return_address = pc_marker;
1747
1748 // Attempt to find a better return address.
1749 ReturnAddressLocator ral(At(0), stack_buffer, code);
1750
1751 if (!ral.LocateReturnAddress(&return_address)) {
1752 ASSERT(return_address == pc_marker);
1753 if (code.GetPrologueOffset() == 0) {
1754 // Code has the prologue at offset 0. The frame is already setup and
1755 // can be trusted.
1756 return;
1757 }
1758 // Could not find a better return address than the pc_marker.
1759 if (code.ContainsInstructionAt(return_address)) {
1760 // PC marker is in the same code as pc, no missing frame.
1761 return;
1762 }
1763 }
1764
1765 if (clt.FindCode(return_address) == nullptr) {
1766 // Return address is not from a Dart code object. Do not insert.
1767 return;
1768 }
1769
1770 if (return_address != 0) {
1771 InsertAt(1, return_address);
1772 }
1773}
1774
1776 : code_lookup_table_(new CodeLookupTable(Thread::Current())) {
1777 ASSERT(code_lookup_table_ != nullptr);
1778}
1779
1781 ASSERT(!initialized_);
1782 if (monitor_ == nullptr) {
1783 monitor_ = new Monitor();
1784 }
1785 ASSERT(monitor_ != nullptr);
1786 initialized_ = true;
1787 shutdown_ = false;
1788}
1789
1791 ASSERT(initialized_);
1792 ASSERT(processor_thread_id_ == OSThread::kInvalidThreadJoinId);
1793 MonitorLocker startup_ml(monitor_);
1794 OSThread::Start("Dart Profiler SampleBlockProcessor", ThreadMain, 0);
1795 while (!thread_running_) {
1796 startup_ml.Wait();
1797 }
1798 ASSERT(processor_thread_id_ != OSThread::kInvalidThreadJoinId);
1799}
1800
1802 {
1803 MonitorLocker shutdown_ml(monitor_);
1804 if (shutdown_) {
1805 // Already shutdown.
1806 return;
1807 }
1808 shutdown_ = true;
1809 // Notify.
1810 shutdown_ml.Notify();
1811 ASSERT(initialized_);
1812 }
1813
1814 // Join the thread.
1815 ASSERT(processor_thread_id_ != OSThread::kInvalidThreadJoinId);
1816 OSThread::Join(processor_thread_id_);
1817 processor_thread_id_ = OSThread::kInvalidThreadJoinId;
1818 initialized_ = false;
1819 ASSERT(!thread_running_);
1820}
1821
1823 public:
1825 : SampleFilter(port, kNoTaskFilter, -1, -1, true), isolate_(isolate) {}
1826
1827 bool FilterSample(Sample* sample) override {
1828 const UserTag& tag =
1829 UserTag::Handle(UserTag::FindTagById(isolate_, sample->user_tag()));
1830 return tag.streamable();
1831 }
1832
1833 private:
1834 const Isolate* isolate_;
1835};
1836
1838 if (!Service::profiler_stream.enabled()) return;
1839 auto thread = Thread::Current();
1840 if (Isolate::IsSystemIsolate(isolate)) return;
1841
1842 TIMELINE_DURATION(thread, Isolate, "Profiler::ProcessCompletedBlocks")
1843 DisableThreadInterruptsScope dtis(thread);
1844 StackZone zone(thread);
1845 HandleScope handle_scope(thread);
1846 StreamableSampleFilter filter(isolate->main_port(), isolate);
1848 profile.Build(thread, isolate, &filter, Profiler::sample_block_buffer());
1850 event.set_cpu_profile(&profile);
1852}
1853
1855 FlushSampleBlocks(thread->isolate());
1857}
1858
1859void SampleBlockProcessor::ThreadMain(uword parameters) {
1860 ASSERT(initialized_);
1861 {
1862 // Signal to main thread we are ready.
1863 MonitorLocker startup_ml(monitor_);
1864 OSThread* os_thread = OSThread::Current();
1865 ASSERT(os_thread != nullptr);
1866 processor_thread_id_ = OSThread::GetCurrentThreadJoinId(os_thread);
1867 thread_running_ = true;
1868 startup_ml.Notify();
1869 }
1870
1871 MonitorLocker wait_ml(monitor_);
1872 // Wakeup every 100ms.
1873 const int64_t wakeup_interval = 1000 * 100;
1874 while (true) {
1875 wait_ml.WaitMicros(wakeup_interval);
1876 if (shutdown_) {
1877 break;
1878 }
1879
1880 IsolateGroup::ForEach([&](IsolateGroup* group) {
1881 if (group == Dart::vm_isolate_group()) return;
1882
1883 const bool kBypassSafepoint = false;
1885 kBypassSafepoint);
1886 group->ForEachIsolate([&](Isolate* isolate) {
1887 if (isolate->TakeHasCompletedBlocks()) {
1888 Profiler::ProcessCompletedBlocks(isolate);
1889 }
1890 });
1891 Thread::ExitIsolateGroupAsHelper(kBypassSafepoint);
1892 });
1893 }
1894 // Signal to main thread we are exiting.
1895 thread_running_ = false;
1896}
1897
1898#endif // !PRODUCT
1899
1900} // namespace dart
static int step(int x, SkScalar min, SkScalar max)
Definition: BlurTest.cpp:215
int count
Definition: FontMgrTest.cpp:50
static float next(float f)
SI F table(const skcms_Curve *curve, F v)
#define ASAN_UNPOISON(ptr, len)
#define UNREACHABLE()
Definition: assert.h:248
#define OUT_OF_MEMORY()
Definition: assert.h:250
#define RELEASE_ASSERT(cond)
Definition: assert.h:327
uword Size() const
Definition: profiler.h:420
uword PayloadStart() const
Definition: profiler.h:415
int64_t compile_timestamp() const
Definition: profiler.h:425
ClearProfileVisitor(Isolate *isolate)
Definition: profiler.cc:993
virtual void VisitSample(Sample *sample)
Definition: profiler.cc:996
int64_t CompileTimestamp() const
Definition: profiler.cc:1474
CodeDescriptor(const AbstractCode code)
Definition: profiler.cc:1464
uword Start() const
Definition: profiler.cc:1466
bool Contains(uword pc) const
Definition: profiler.h:512
static int Compare(CodeDescriptor *const *a, CodeDescriptor *const *b)
Definition: profiler.h:517
uword Size() const
Definition: profiler.cc:1470
CodeLookupTableBuilder(CodeLookupTable *table)
Definition: profiler.cc:1484
void VisitObject(ObjectPtr raw_obj) override
Definition: profiler.cc:1490
intptr_t length() const
Definition: profiler.h:544
CodeLookupTable(Thread *thread)
Definition: profiler.cc:1478
const CodeDescriptor * At(intptr_t index) const
Definition: profiler.h:546
friend class CodeLookupTableBuilder
Definition: profiler.h:560
const CodeDescriptor * FindCode(uword pc) const
Definition: profiler.cc:1550
uword Size() const
Definition: object.h:6903
intptr_t GetPrologueOffset() const
Definition: object.cc:17934
static CodePtr FindCodeUnsafe(uword pc)
Definition: object.cc:18220
bool ContainsInstructionAt(uword addr) const
Definition: object.h:6915
bool IsUnknownDartCode() const
Definition: object.h:7245
uword PayloadStart() const
Definition: object.h:6850
static IsolateGroup * vm_isolate_group()
Definition: dart.h:69
static Isolate * vm_isolate()
Definition: dart.h:68
intptr_t Length() const
Definition: object.h:11072
ObjectPtr At(intptr_t index) const
Definition: object.h:11085
static void ForEach(std::function< void(IsolateGroup *)> action)
Definition: isolate.cc:683
IsolateGroupSource * source() const
Definition: isolate.h:1036
bool TrySetHasCompletedBlocks()
Definition: isolate.h:1128
Simulator * simulator() const
Definition: isolate.h:1193
static bool IsSystemIsolate(const Isolate *isolate)
Definition: isolate.h:1445
uword user_tag() const
Definition: isolate.h:1327
VMTagCounters * vm_tag_counters()
Definition: isolate.h:1311
static void VisitIsolates(IsolateVisitor *visitor)
Definition: isolate.cc:3531
void set_current_sample_block(SampleBlock *block)
Definition: isolate.h:1112
SampleBlock * current_sample_block() const
Definition: isolate.h:1111
bool IsDeoptimizing() const
Definition: isolate.h:1254
void set_current_allocation_sample_block(SampleBlock *block)
Definition: isolate.h:1121
Thread * mutator_thread() const
Definition: isolate.cc:1920
SampleBlock * current_allocation_sample_block() const
Definition: isolate.h:1118
Dart_Port main_port() const
Definition: isolate.h:1048
Monitor::WaitResult Wait(int64_t millis=Monitor::kNoTimeout)
Definition: lockers.h:172
static bool LookupSharedObject(uword pc, uword *dso_base=nullptr, const char **dso_name=nullptr)
static const char * LookupSymbolName(uword pc, uword *start)
static void FreeSymbolName(const char *name)
uword stack_base() const
Definition: os_thread.h:128
static int Start(const char *name, ThreadStartFunction function, uword parameter)
static bool GetCurrentStackBounds(uword *lower, uword *upper)
static uword GetCurrentStackPointer()
Definition: os_thread.cc:132
static void Join(ThreadJoinId id)
static OSThread * Current()
Definition: os_thread.h:179
uword stack_limit() const
Definition: os_thread.h:129
static ThreadJoinId GetCurrentThreadJoinId(OSThread *thread)
static intptr_t ThreadIdToIntPtr(ThreadId id)
static const ThreadJoinId kInvalidThreadJoinId
Definition: os_thread.h:249
static int64_t GetCurrentMonotonicMicros()
static void static void PrintErr(const char *format,...) PRINTF_ATTRIBUTE(1
static uintptr_t GetProgramCounter()
static intptr_t ProcessId()
@ kInternalName
Definition: object.h:622
bool IsNull() const
Definition: object.h:363
static Object & Handle()
Definition: object.h:407
static ObjectPtr RawCast(ObjectPtr obj)
Definition: object.h:325
void set_tid(ThreadId tid)
Definition: profiler.h:823
void set_allocation_cid(intptr_t cid)
Definition: profiler.h:835
uword At(intptr_t index) const
Definition: profiler.h:812
void set_truncated(bool truncated)
Definition: profiler.h:850
void Add(uword pc)
Definition: profiler.h:803
void set_first_frame_executing(bool first_frame_executing)
Definition: profiler.h:854
void set_timestamp(int64_t timestamp)
Definition: profiler.h:820
int64_t timestamp() const
Definition: profiler.h:819
void set_vm_tag(uword tag)
Definition: profiler.h:827
void InsertAt(intptr_t index, uword pc)
Definition: profiler.h:806
void set_allocation_identity_hash(uint32_t hash)
Definition: profiler.h:842
void set_user_tag(uword tag)
Definition: profiler.h:831
ProfilerDartStackWalker(Thread *thread, Sample *sample, SampleBuffer *sample_buffer, uword pc, uword fp, uword sp, uword lr, bool allocation_sample, intptr_t skip_count=0)
Definition: profiler.cc:1003
ProfilerNativeStackWalker(ProfilerCounters *counters, Dart_Port port_id, Sample *sample, SampleBuffer *sample_buffer, uword stack_lower, uword stack_upper, uword pc, uword fp, uword sp, intptr_t skip_count=0)
Definition: profiler.cc:219
bool Append(uword pc, uword fp)
Definition: profiler.cc:145
ProfilerStackWalker(Dart_Port port_id, Sample *head_sample, SampleBuffer *sample_buffer, intptr_t skip_count=0)
Definition: profiler.cc:126
SampleBuffer * sample_buffer_
Definition: profiler.cc:182
static void DumpStackTrace(void *context)
Definition: profiler.cc:417
static void IsolateShutdown(Thread *thread)
Definition: profiler.cc:1854
static void SampleAllocation(Thread *thread, intptr_t cid, uint32_t identity_hash)
Definition: profiler.cc:1271
static void SetSampleDepth(intptr_t depth)
Definition: profiler.cc:625
static ProfilerCounters counters()
Definition: profiler.h:91
static void UpdateRunningState()
Definition: profiler.cc:617
static void Cleanup()
Definition: profiler.cc:605
static void SampleThread(Thread *thread, const InterruptedThreadState &state)
Definition: profiler.cc:1354
static void Init()
Definition: profiler.cc:573
static void ProcessCompletedBlocks(Isolate *isolate)
Definition: profiler.cc:1837
static SampleBlockBuffer * sample_block_buffer()
Definition: profiler.h:67
static void UpdateSamplePeriod()
Definition: profiler.cc:666
static void SetSamplePeriod(intptr_t period)
Definition: profiler.cc:656
T fetch_add(T arg, std::memory_order order=std::memory_order_relaxed)
Definition: atomic.h:35
ReturnAddressLocator(Sample *sample, const Code &code)
Definition: profiler.cc:869
uword StackAt(intptr_t i)
Definition: profiler.cc:904
ReturnAddressLocator(uword pc, uword *stack_buffer, const Code &code)
Definition: profiler.cc:877
uint8_t * CodePointer(intptr_t offset)
Definition: profiler.cc:896
bool LocateReturnAddress(uword *return_address)
virtual ~SampleBlockBuffer()
Definition: profiler.cc:690
SampleBlockBuffer(intptr_t blocks=kDefaultBlockCount, intptr_t samples_per_block=SampleBlock::kSamplesPerBlock)
Definition: profiler.cc:670
ProcessedSampleBuffer * BuildProcessedSampleBuffer(Isolate *isolate, SampleFilter *filter, ProcessedSampleBuffer *buffer=nullptr)
Definition: profiler.cc:761
static constexpr intptr_t kDefaultBlockCount
Definition: profiler.h:730
Sample * ReserveAllocationSample(Isolate *isolate)
Definition: profiler.cc:821
Sample * ReserveCPUSample(Isolate *isolate)
Definition: profiler.cc:817
void VisitIsolate(Isolate *isolate)
Definition: profiler.cc:599
virtual ~SampleBlockCleanupVisitor()=default
void StreamingToFree()
Definition: profiler.h:691
bool HasStreamableSamples(const GrowableObjectArray &tag_table, UserTag *tag)
Definition: profiler.cc:730
RelaxedAtomic< uint32_t > cursor_
Definition: profiler.h:718
void FreeCompleted()
Definition: profiler.h:697
void MarkCompleted()
Definition: profiler.h:672
static constexpr intptr_t kSamplesPerBlock
Definition: profiler.h:637
virtual Sample * ReserveSampleAndLink(Sample *previous)
Definition: profiler.cc:799
void StreamingToCompleted()
Definition: profiler.h:687
Isolate * owner_
Definition: profiler.h:719
virtual Sample * ReserveSample()
Definition: profiler.cc:791
bool TryAcquireStreaming(Isolate *isolate)
Definition: profiler.h:676
bool TryAllocateFree()
Definition: profiler.h:651
ProcessedSample * BuildProcessedSample(Sample *sample, const CodeLookupTable &clt)
Definition: profiler.cc:1635
Sample * Next(Sample *sample)
Definition: profiler.cc:1679
ProcessedSampleBuffer * BuildProcessedSampleBuffer(SampleFilter *filter, ProcessedSampleBuffer *buffer=nullptr)
Definition: profiler.cc:1580
intptr_t capacity_
Definition: profiler.h:629
intptr_t capacity() const
Definition: profiler.h:616
virtual void Init(Sample *samples, intptr_t capacity)
Definition: profiler.h:570
virtual Sample * ReserveSampleAndLink(Sample *previous)=0
Sample * At(intptr_t idx) const
Definition: profiler.h:610
static constexpr intptr_t kNoTaskFilter
Definition: profiler.h:177
virtual bool FilterSample(Sample *sample)
Definition: profiler.h:165
bool TimeFilterSample(Sample *sample)
Definition: profiler.cc:975
Dart_Port port() const
Definition: profiler.h:167
bool take_samples() const
Definition: profiler.h:175
bool TaskFilterSample(Sample *sample)
Definition: profiler.cc:985
void set_thread_task(Thread::TaskKind task)
Definition: profiler.h:324
uword * GetStackBuffer()
Definition: profiler.h:366
Thread::TaskKind thread_task() const
Definition: profiler.h:322
static constexpr int kPCArraySizeInWords
Definition: profiler.h:362
ThreadId tid() const
Definition: profiler.h:210
Sample * continuation_sample() const
Definition: profiler.h:339
uword At(intptr_t i) const
Definition: profiler.h:237
bool is_continuation_sample() const
Definition: profiler.h:328
void set_allocation_identity_hash(uint32_t hash)
Definition: profiler.h:318
void set_truncated_trace(bool truncated_trace)
Definition: profiler.h:302
void Init(Dart_Port port, int64_t timestamp, ThreadId tid)
Definition: profiler.h:199
void set_vm_tag(uword tag)
Definition: profiler.h:266
bool is_allocation_sample() const
Definition: profiler.h:306
intptr_t allocation_cid() const
Definition: profiler.h:341
void SetContinuation(Sample *next)
Definition: profiler.h:332
void SetAllocationCid(intptr_t cid)
Definition: profiler.h:357
int64_t timestamp() const
Definition: profiler.h:231
uword vm_tag() const
Definition: profiler.h:265
uword user_tag() const
Definition: profiler.h:271
static constexpr int kStackBufferSizeInWords
Definition: profiler.h:365
Dart_Port port() const
Definition: profiler.h:207
void set_user_tag(uword tag)
Definition: profiler.h:272
bool ignore_sample() const
Definition: profiler.h:280
bool exit_frame_sample() const
Definition: profiler.h:286
void set_ignore_sample(bool ignore_sample)
Definition: profiler.h:282
void set_exit_frame_sample(bool exit_frame_sample)
Definition: profiler.h:288
void SetAt(intptr_t i, uword pc)
Definition: profiler.h:244
uint32_t allocation_identity_hash() const
Definition: profiler.h:314
bool truncated_trace() const
Definition: profiler.h:300
void Clear()
Definition: profiler.h:212
bool head_sample() const
Definition: profiler.h:350
static void HandleEvent(ServiceEvent *event, bool enter_safepoint=true)
Definition: service.cc:1206
static StreamInfo profiler_stream
Definition: service.h:188
static uintptr_t GetCStackPointer(const mcontext_t &mcontext)
static uintptr_t GetFramePointer(const mcontext_t &mcontext)
static uintptr_t GetProgramCounter(const mcontext_t &mcontext)
DART_FORCE_INLINE int32_t get_pc() const
Definition: simulator_arm.h:66
int32_t get_lr() const
Definition: simulator_arm.h:62
uword stack_base() const
Definition: simulator_arm.h:85
uword stack_limit() const
Definition: simulator_arm.h:89
DART_FORCE_INLINE int32_t get_register(Register reg) const
Definition: simulator_arm.h:56
static uword FunctionForRedirect(uword redirect)
static void DumpCurrentTrace()
Definition: stack_frame.cc:454
StreamableSampleFilter(Dart_Port port, const Isolate *isolate)
Definition: profiler.cc:1824
bool FilterSample(Sample *sample) override
Definition: profiler.cc:1827
static bool HasBeenInitialized()
Definition: stub_code.h:41
static bool InInvocationStub(uword pc)
Definition: stub_code.cc:132
static bool InJumpToFrameStub(uword pc)
Definition: stub_code.cc:139
static void SetInterruptPeriod(intptr_t period)
Zone * zone() const
Definition: thread_state.h:37
OSThread * os_thread() const
Definition: thread_state.h:33
bool HasCompilerState() const
Definition: thread.h:586
void ScheduleInterrupts(uword interrupt_bits)
Definition: thread.cc:710
uword vm_tag() const
Definition: thread.h:821
@ kVMInterrupt
Definition: thread.h:488
@ kSampleBlockTask
Definition: thread.h:353
@ kUnknownTask
Definition: thread.h:346
static Thread * Current()
Definition: thread.h:362
CompilerState & compiler_state()
Definition: thread.h:588
bool HasExitedDartCode() const
Definition: thread.cc:951
void CheckForSafepoint()
Definition: thread.h:1104
bool IsExecutingDartCode() const
Definition: thread.cc:947
static void ExitIsolateGroupAsHelper(bool bypass_safepoint)
Definition: thread.cc:499
uword top_exit_frame_info() const
Definition: thread.h:691
bool IsDartMutatorThread() const
Definition: thread.h:551
ExecutionState execution_state() const
Definition: thread.h:1040
Isolate * isolate() const
Definition: thread.h:534
TaskKind task_kind() const
Definition: thread.h:479
@ kThreadInNative
Definition: thread.h:1036
static bool EnterIsolateGroupAsHelper(IsolateGroup *isolate_group, TaskKind kind, bool bypass_safepoint)
Definition: thread.cc:481
static UserTagPtr FindTagById(const Isolate *isolate, uword tag_id)
Definition: object.cc:27053
uword tag() const
Definition: object.h:13154
bool streamable() const
Definition: object.h:13161
static constexpr T RoundUp(T x, uintptr_t alignment, uintptr_t offset=0)
Definition: utils.h:120
static const char * String()
static intptr_t PageSize()
static VirtualMemory * Allocate(intptr_t size, bool is_executable, bool is_compressed, const char *name)
void * address() const
#define ILLEGAL_PORT
Definition: dart_api.h:1535
int64_t Dart_Port
Definition: dart_api.h:1525
#define ASSERT(E)
SkBitmap source
Definition: examples.cpp:28
static bool b
struct MyStruct a[10]
AtkStateType state
FlKeyEvent * event
GAsyncResult * result
size_t length
#define MSAN_UNPOISON(ptr, len)
Definition: dart_vm.cc:33
static constexpr int kExitLinkSlotFromEntryFp
static Sample * SetupSample(Thread *thread, bool allocation_sample, ThreadId tid)
Definition: profiler.cc:1235
static void DumpCompilerState(Thread *thread)
Definition: profiler.cc:468
static bool CheckIsolate(Isolate *isolate)
Definition: profiler.cc:1263
const char *const name
static constexpr int kSavedCallerPcSlotFromFp
pthread_t ThreadJoinId
static intptr_t SamplesPerSecond()
Definition: profiler.cc:637
static void CollectSample(Isolate *isolate, bool exited_dart_code, bool in_dart_code, Sample *sample, ProfilerNativeStackWalker *native_stack_walker, ProfilerDartStackWalker *dart_stack_walker, uword pc, uword fp, uword sp, ProfilerCounters *counters)
Definition: profiler.cc:1175
static void CopyStackBuffer(Sample *sample, uword sp_addr)
Definition: profiler.cc:1143
static constexpr int kSavedCallerFpSlotFromFp
uintptr_t uword
Definition: globals.h:501
static void FlushSampleBlocks(Isolate *isolate)
Definition: profiler.cc:745
static bool ValidateThreadStackBounds(uintptr_t fp, uintptr_t sp, uword stack_lower, uword stack_upper)
Definition: profiler.cc:327
const uint32_t fp
static bool GetAndValidateCurrentThreadStackBounds(uintptr_t fp, uintptr_t sp, uword *stack_lower, uword *stack_upper)
Definition: profiler.cc:394
static constexpr intptr_t kMaxSamplesPerTick
Definition: profiler.cc:35
static void USE(T &&)
Definition: globals.h:618
DEFINE_FLAG(bool, print_cluster_information, false, "Print information about clusters written to snapshot")
const Register FPREG
const intptr_t cid
static void DumpStackFrame(uword pc, uword fp, const char *name, uword offset)
Definition: profiler.cc:72
static bool GetAndValidateThreadStackBounds(OSThread *os_thread, Thread *thread, uintptr_t fp, uintptr_t sp, uword *stack_lower, uword *stack_upper)
Definition: profiler.cc:352
constexpr intptr_t kWordSize
Definition: globals.h:509
pthread_t ThreadId
const Register SPREG
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not defaults to or::depending on whether ipv6 is specified vm service A custom Dart VM Service port The default is to pick a randomly available open port disable vm Disable the Dart VM Service The Dart VM Service is never available in release mode disable vm service Disable mDNS Dart VM Service publication Bind to the IPv6 localhost address for the Dart VM Service Ignored if vm service host is set endless trace buffer
Definition: switches.h:126
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive keep the shell running after the Dart script has completed enable serial On low power devices with low core running concurrent GC tasks on threads can cause them to contend with the UI thread which could potentially lead to jank This option turns off all concurrent GC activities domain network JSON encoded network policy per domain This overrides the DisallowInsecureConnections switch Embedder can specify whether to allow or disallow insecure connections at a domain level old gen heap size
Definition: switches.h:259
#define Pp
Definition: globals.h:425
#define Px
Definition: globals.h:410
#define Pd
Definition: globals.h:408
SeparatedVector2 offset
#define TIMELINE_DURATION(thread, stream, name)
Definition: timeline.h:39
#define COPY_FP_REGISTER(fp)
Definition: globals.h:200
int BOOL
Definition: windows_types.h:37
long LONG
Definition: windows_types.h:23
unsigned long DWORD
Definition: windows_types.h:22