Flutter Engine
The Flutter Engine
Loading...
Searching...
No Matches
profiler.cc
Go to the documentation of this file.
1// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#include "vm/profiler.h"
6
8#include "platform/atomic.h"
10#include "platform/utils.h"
11#include "vm/allocation.h"
12#include "vm/code_patcher.h"
13#if !defined(DART_PRECOMPILED_RUNTIME)
15#endif
16#include "vm/debugger.h"
17#include "vm/instructions.h"
18#include "vm/isolate.h"
19#include "vm/json_stream.h"
20#include "vm/lockers.h"
21#include "vm/message_handler.h"
22#include "vm/native_symbol.h"
23#include "vm/object.h"
24#include "vm/os.h"
25#include "vm/profiler_service.h"
26#include "vm/reusable_handles.h"
27#include "vm/signal_handler.h"
28#include "vm/simulator.h"
29#include "vm/stack_frame.h"
30#include "vm/timeline.h"
31#include "vm/version.h"
32
33namespace dart {
34
35static constexpr intptr_t kMaxSamplesPerTick = 4;
36
37DEFINE_FLAG(bool, trace_profiled_isolates, false, "Trace profiled isolates.");
38
40 profile_period,
41 1000,
42 "Time between profiler samples in microseconds. Minimum 50.");
44 max_profile_depth,
46 "Maximum number stack frames walked. Minimum 1. Maximum 255.");
47#if defined(USING_SIMULATOR)
48DEFINE_FLAG(bool, profile_vm, true, "Always collect native stack traces.");
49#else
50DEFINE_FLAG(bool, profile_vm, false, "Always collect native stack traces.");
51#endif
53 profile_vm_allocation,
54 false,
55 "Collect native stack traces when tracing Dart allocations.");
56
58 int,
59 sample_buffer_duration,
60 0,
61 "Defines the size of the profiler sample buffer to contain at least "
62 "N seconds of samples at a given sample rate. If not provided, the "
63 "default is ~4 seconds. Large values will greatly increase memory "
64 "consumption.");
65
66// Include native stack dumping helpers into AOT compiler even in PRODUCT
67// mode. This allows to report more informative errors when gen_snapshot
68// crashes.
69#if !defined(PRODUCT) || defined(DART_PRECOMPILER)
70ProfilerCounters Profiler::counters_ = {};
71
72static void DumpStackFrame(uword pc, uword fp, const char* name, uword offset) {
73 OS::PrintErr(" pc 0x%" Pp " fp 0x%" Pp " %s+0x%" Px "\n", pc, fp, name,
74 offset);
75}
76
77void DumpStackFrame(intptr_t frame_index, uword pc, uword fp) {
78 uword start = 0;
79 // The pc for all frames except the top frame is a return address, which can
80 // belong to a different inlining interval than the call. Subtract one to get
81 // the symbolization for the call.
82 uword lookup_pc = frame_index == 0 ? pc : pc - 1;
83 if (auto const name =
85 DumpStackFrame(pc, fp, name, pc - start);
87 return;
88 }
89
90 char* dso_name;
91 uword dso_base;
92 if (NativeSymbolResolver::LookupSharedObject(pc, &dso_base, &dso_name)) {
93 DumpStackFrame(pc, fp, dso_name, pc - dso_base);
95 return;
96 }
97
98#if !defined(DART_PRECOMPILED_RUNTIME)
99 // This relies on heap iteration, which might fail if we're crashing because
100 // of heap corruption. A nested crash symbolizing a JIT frame will prevent
101 // seeing all caller frames, so only do this when we aren't able to use the
102 // safer StackFrameIterator.
103 Thread* thread = Thread::Current();
104 bool symbolize_jit_code =
105 (thread != nullptr) &&
108 if (symbolize_jit_code) {
109 Code result;
110 result = Code::FindCodeUnsafe(lookup_pc);
111 if (!result.IsNull()) {
113 pc, fp,
115 pc - result.PayloadStart());
116 return;
117 }
118 }
119#endif
120
121 OS::PrintErr(" pc 0x%" Pp " fp 0x%" Pp " Unknown symbol\n", pc, fp);
122}
123
125 public:
127 Sample* head_sample,
128 SampleBuffer* sample_buffer,
129 intptr_t skip_count = 0)
130 : port_id_(port_id),
131 sample_(head_sample),
132 sample_buffer_(sample_buffer),
133 skip_count_(skip_count),
135 frame_index_(0),
136 total_frames_(0) {
137 if (sample_ == nullptr) {
138 ASSERT(sample_buffer_ == nullptr);
139 } else {
140 ASSERT(sample_buffer_ != nullptr);
142 }
143 }
144
145 bool Append(uword pc, uword fp) {
148 return true;
149 }
150
151 if (sample_ == nullptr) {
153 frame_index_++;
155 return true;
156 }
157 if (total_frames_ >= FLAG_max_profile_depth) {
159 return false;
160 }
161 ASSERT(sample_ != nullptr);
164 if (new_sample == nullptr) {
165 // Could not reserve new sample- mark this as truncated.
167 return false;
168 }
169 frame_index_ = 0;
170 sample_ = new_sample;
171 }
174 frame_index_++;
176 return true;
177 }
178
179 protected:
183 intptr_t skip_count_;
185 intptr_t frame_index_;
187};
188
189// The layout of C stack frames.
190#if defined(HOST_ARCH_IA32) || defined(HOST_ARCH_X64) || \
191 defined(HOST_ARCH_ARM) || defined(HOST_ARCH_ARM64)
192// +-------------+
193// | saved IP/LR |
194// +-------------+
195// | saved FP | <- FP
196// +-------------+
197static constexpr intptr_t kHostSavedCallerPcSlotFromFp = 1;
198static constexpr intptr_t kHostSavedCallerFpSlotFromFp = 0;
199#elif defined(HOST_ARCH_RISCV32) || defined(HOST_ARCH_RISCV64)
200// +-------------+
201// | | <- FP
202// +-------------+
203// | saved RA |
204// +-------------+
205// | saved FP |
206// +-------------+
207static constexpr intptr_t kHostSavedCallerPcSlotFromFp = -1;
208static constexpr intptr_t kHostSavedCallerFpSlotFromFp = -2;
209#else
210#error What architecture?
211#endif
212
213// If the VM is compiled without frame pointers (which is the default on
214// recent GCC versions with optimizing enabled) the stack walking code may
215// fail.
216//
218 public:
220 Dart_Port port_id,
221 Sample* sample,
222 SampleBuffer* sample_buffer,
223 uword stack_lower,
224 uword stack_upper,
225 uword pc,
226 uword fp,
227 uword sp,
228 intptr_t skip_count = 0)
229 : ProfilerStackWalker(port_id, sample, sample_buffer, skip_count),
230 counters_(counters),
231 stack_upper_(stack_upper),
232 original_pc_(pc),
233 original_fp_(fp),
234 original_sp_(sp),
235 lower_bound_(stack_lower) {}
236
237 void walk() {
238 const uword kMaxStep = VirtualMemory::PageSize();
239
240 Append(original_pc_, original_fp_);
241
242 uword* pc = reinterpret_cast<uword*>(original_pc_);
243 uword* fp = reinterpret_cast<uword*>(original_fp_);
244 uword* previous_fp = fp;
245
246 uword gap = original_fp_ - original_sp_;
247 if (gap >= kMaxStep) {
248 // Gap between frame pointer and stack pointer is
249 // too large.
250 counters_->incomplete_sample_fp_step.fetch_add(1);
251 return;
252 }
253
254 if (!ValidFramePointer(fp)) {
255 counters_->incomplete_sample_fp_bounds.fetch_add(1);
256 return;
257 }
258
259 while (true) {
260 pc = CallerPC(fp);
261 previous_fp = fp;
262 fp = CallerFP(fp);
263
264 if (fp == nullptr) {
265 return;
266 }
267
268 if (fp <= previous_fp) {
269 // Frame pointer did not move to a higher address.
270 counters_->incomplete_sample_fp_step.fetch_add(1);
271 return;
272 }
273
274 gap = fp - previous_fp;
275 if (gap >= kMaxStep) {
276 // Frame pointer step is too large.
277 counters_->incomplete_sample_fp_step.fetch_add(1);
278 return;
279 }
280
281 if (!ValidFramePointer(fp)) {
282 // Frame pointer is outside of isolate stack boundary.
283 counters_->incomplete_sample_fp_bounds.fetch_add(1);
284 return;
285 }
286
287 const uword pc_value = reinterpret_cast<uword>(pc);
288 if ((pc_value + 1) < pc_value) {
289 // It is not uncommon to encounter an invalid pc as we
290 // traverse a stack frame. Most of these we can tolerate. If
291 // the pc is so large that adding one to it will cause an
292 // overflow it is invalid and it will cause headaches later
293 // while we are building the profile. Discard it.
294 counters_->incomplete_sample_bad_pc.fetch_add(1);
295 return;
296 }
297
298 // Move the lower bound up.
299 lower_bound_ = reinterpret_cast<uword>(fp);
300
301 if (!Append(pc_value, reinterpret_cast<uword>(fp))) {
302 return;
303 }
304 }
305 }
306
307 private:
308 uword* CallerPC(uword* fp) const {
309 ASSERT(fp != nullptr);
310 uword* caller_pc_ptr = fp + kHostSavedCallerPcSlotFromFp;
311 // This may actually be uninitialized, by design (see class comment above).
312 MSAN_UNPOISON(caller_pc_ptr, kWordSize);
313 ASAN_UNPOISON(caller_pc_ptr, kWordSize);
314 return reinterpret_cast<uword*>(*caller_pc_ptr);
315 }
316
317 uword* CallerFP(uword* fp) const {
318 ASSERT(fp != nullptr);
319 uword* caller_fp_ptr = fp + kHostSavedCallerFpSlotFromFp;
320 // This may actually be uninitialized, by design (see class comment above).
321 MSAN_UNPOISON(caller_fp_ptr, kWordSize);
322 ASAN_UNPOISON(caller_fp_ptr, kWordSize);
323 return reinterpret_cast<uword*>(*caller_fp_ptr);
324 }
325
326 bool ValidFramePointer(uword* fp) const {
327 if (fp == nullptr) {
328 return false;
329 }
330 uword cursor = reinterpret_cast<uword>(fp);
331 cursor += sizeof(fp);
332 bool r = (cursor >= lower_bound_) && (cursor < stack_upper_);
333 return r;
334 }
335
336 ProfilerCounters* const counters_;
337 const uword stack_upper_;
338 const uword original_pc_;
339 const uword original_fp_;
340 const uword original_sp_;
341 uword lower_bound_;
342};
343
344static bool ValidateThreadStackBounds(uintptr_t fp,
345 uintptr_t sp,
346 uword stack_lower,
347 uword stack_upper) {
348 if (stack_lower >= stack_upper) {
349 // Stack boundary is invalid.
350 return false;
351 }
352
353 if ((sp < stack_lower) || (sp >= stack_upper)) {
354 // Stack pointer is outside thread's stack boundary.
355 return false;
356 }
357
358 if ((fp < stack_lower) || (fp >= stack_upper)) {
359 // Frame pointer is outside threads's stack boundary.
360 return false;
361 }
362
363 return true;
364}
365
366// Get |thread|'s stack boundary and verify that |sp| and |fp| are within
367// it. Return |false| if anything looks suspicious.
369 Thread* thread,
370 uintptr_t fp,
371 uintptr_t sp,
372 uword* stack_lower,
373 uword* stack_upper) {
374 ASSERT(os_thread != nullptr);
375 ASSERT(stack_lower != nullptr);
376 ASSERT(stack_upper != nullptr);
377
378#if defined(USING_SIMULATOR)
379 const bool use_simulator_stack_bounds =
380 thread != nullptr && thread->IsExecutingDartCode();
381 if (use_simulator_stack_bounds) {
382 Isolate* isolate = thread->isolate();
383 ASSERT(isolate != nullptr);
384 Simulator* simulator = isolate->simulator();
385 *stack_lower = simulator->stack_limit();
386 *stack_upper = simulator->stack_base();
387 }
388#else
389 const bool use_simulator_stack_bounds = false;
390#endif // defined(USING_SIMULATOR)
391
392 if (!use_simulator_stack_bounds) {
393 *stack_lower = os_thread->stack_limit();
394 *stack_upper = os_thread->stack_base();
395 }
396
397 if ((*stack_lower == 0) || (*stack_upper == 0)) {
398 return false;
399 }
400
401 if (!use_simulator_stack_bounds && (sp > *stack_lower)) {
402 // The stack pointer gives us a tighter lower bound.
403 *stack_lower = sp;
404 }
405
406 return ValidateThreadStackBounds(fp, sp, *stack_lower, *stack_upper);
407}
408
409void Profiler::DumpStackTrace(void* context) {
410 if (context == nullptr) {
411 DumpStackTrace(/*for_crash=*/true);
412 return;
413 }
414#if defined(DART_HOST_OS_LINUX) || defined(DART_HOST_OS_MACOS) || \
415 defined(DART_HOST_OS_ANDROID)
416 ucontext_t* ucontext = reinterpret_cast<ucontext_t*>(context);
417 mcontext_t mcontext = ucontext->uc_mcontext;
421 DumpStackTrace(sp, fp, pc, /*for_crash=*/true);
422#elif defined(DART_HOST_OS_WINDOWS)
423 CONTEXT* ctx = reinterpret_cast<CONTEXT*>(context);
424#if defined(HOST_ARCH_IA32)
425 uword pc = static_cast<uword>(ctx->Eip);
426 uword fp = static_cast<uword>(ctx->Ebp);
427 uword sp = static_cast<uword>(ctx->Esp);
428#elif defined(HOST_ARCH_X64)
429 uword pc = static_cast<uword>(ctx->Rip);
430 uword fp = static_cast<uword>(ctx->Rbp);
431 uword sp = static_cast<uword>(ctx->Rsp);
432#elif defined(HOST_ARCH_ARM)
433 uword pc = static_cast<uword>(ctx->Pc);
434 uword fp = static_cast<uword>(ctx->R11);
435 uword sp = static_cast<uword>(ctx->Sp);
436#elif defined(HOST_ARCH_ARM64)
437 uword pc = static_cast<uword>(ctx->Pc);
438 uword fp = static_cast<uword>(ctx->Fp);
439 uword sp = static_cast<uword>(ctx->Sp);
440#else
441#error Unsupported architecture.
442#endif
443 DumpStackTrace(sp, fp, pc, /*for_crash=*/true);
444#else
445// TODO(fschneider): Add support for more platforms.
446// Do nothing on unsupported platforms.
447#endif
448}
449
450void Profiler::DumpStackTrace(bool for_crash) {
451 uintptr_t sp = OSThread::GetCurrentStackPointer();
452 uintptr_t fp = 0;
453 uintptr_t pc = OS::GetProgramCounter();
454
456
457 DumpStackTrace(sp, fp, pc, for_crash);
458}
459
460static void DumpCompilerState(Thread* thread) {
461#if !defined(DART_PRECOMPILED_RUNTIME)
462 if (thread != nullptr && thread->execution_state() == Thread::kThreadInVM &&
463 thread->HasCompilerState()) {
464 thread->compiler_state().ReportCrash();
465 }
466#endif
467}
468
469void Profiler::DumpStackTrace(uword sp, uword fp, uword pc, bool for_crash) {
470 if (for_crash) {
471 // Allow only one stack trace to prevent recursively printing stack traces
472 // if we hit an assert while printing the stack.
473 static RelaxedAtomic<uintptr_t> started_dump = 0;
474 if (started_dump.fetch_add(1u) != 0) {
475 OS::PrintErr("Aborting reentrant request for stack trace.\n");
476 return;
477 }
478 }
479
480 auto os_thread = OSThread::Current();
481 ASSERT(os_thread != nullptr);
482 auto thread = Thread::Current(); // nullptr if no current isolate.
483 auto isolate = thread == nullptr ? nullptr : thread->isolate();
484 auto isolate_group = thread == nullptr ? nullptr : thread->isolate_group();
485 auto source = isolate_group == nullptr ? nullptr : isolate_group->source();
486 auto vm_source =
487 Dart::vm_isolate() == nullptr ? nullptr : Dart::vm_isolate()->source();
488 const char* isolate_group_name =
489 isolate_group == nullptr ? "(nil)" : isolate_group->source()->name;
490 const char* isolate_name = isolate == nullptr ? "(nil)" : isolate->name();
491#if defined(PRODUCT)
492 const intptr_t thread_id = -1;
493#else
494 const intptr_t thread_id = OSThread::ThreadIdToIntPtr(os_thread->trace_id());
495#endif
496
497 OS::PrintErr("version=%s\n", Version::String());
498 OS::PrintErr("pid=%" Pd ", thread=%" Pd
499 ", isolate_group=%s(%p), isolate=%s(%p)\n",
500 static_cast<intptr_t>(OS::ProcessId()), thread_id,
501 isolate_group_name, isolate_group, isolate_name, isolate);
502#if defined(DART_COMPRESSED_POINTERS)
503 const char kCompressedPointers[] = "yes";
504#else
505 const char kCompressedPointers[] = "no";
506#endif
507#if defined(USING_SIMULATOR)
508 const char kUsingSimulator[] = "yes";
509#else
510 const char kUsingSimulator[] = "no";
511#endif
512 OS::PrintErr("os=%s, arch=%s, comp=%s, sim=%s\n", kHostOperatingSystemName,
513 kTargetArchitectureName, kCompressedPointers, kUsingSimulator);
514 OS::PrintErr("isolate_instructions=%" Px ", vm_instructions=%" Px "\n",
515 source == nullptr
516 ? 0
517 : reinterpret_cast<uword>(source->snapshot_instructions),
518 vm_source == nullptr
519 ? 0
520 : reinterpret_cast<uword>(vm_source->snapshot_instructions));
521 OS::PrintErr("fp=%" Px ", sp=%" Px ", pc=%" Px "\n", fp, sp, pc);
522
523 uword stack_lower = 0;
524 uword stack_upper = 0;
525 if (!GetAndValidateThreadStackBounds(os_thread, thread, fp, sp, &stack_lower,
526 &stack_upper)) {
528 "Stack dump aborted because GetAndValidateThreadStackBounds failed.\n");
529 if (pc != 0) { // At the very least dump the top frame.
530 DumpStackFrame(0, pc, fp);
531 }
532 DumpCompilerState(thread);
533 return;
534 }
535
536 ProfilerNativeStackWalker native_stack_walker(
537 &counters_, ILLEGAL_PORT, nullptr, nullptr, stack_lower, stack_upper, pc,
538 fp, sp, /*skip_count=*/0);
539 native_stack_walker.walk();
540 OS::PrintErr("-- End of DumpStackTrace\n");
541
542 if (thread != nullptr) {
543 if (thread->execution_state() == Thread::kThreadInNative) {
544 TransitionNativeToVM transition(thread);
546 } else if (thread->execution_state() == Thread::kThreadInVM) {
548 }
549 }
550
551 DumpCompilerState(thread);
552}
553#endif // !defined(PRODUCT) || defined(DART_PRECOMPILER)
554
555#ifndef PRODUCT
556
557RelaxedAtomic<bool> Profiler::initialized_ = false;
558SampleBlockBuffer* Profiler::sample_block_buffer_ = nullptr;
559
560bool SampleBlockProcessor::initialized_ = false;
561bool SampleBlockProcessor::shutdown_ = false;
562bool SampleBlockProcessor::thread_running_ = false;
563ThreadJoinId SampleBlockProcessor::processor_thread_id_ =
565Monitor* SampleBlockProcessor::monitor_ = nullptr;
566
568 // Place some sane restrictions on user controlled flags.
569 SetSampleDepth(FLAG_max_profile_depth);
570 if (!FLAG_profiler) {
571 return;
572 }
573 ASSERT(!initialized_);
574 SetSamplePeriod(FLAG_profile_period);
575 // The profiler may have been shutdown previously, in which case the sample
576 // buffer will have already been initialized.
577 if (sample_block_buffer_ == nullptr) {
578 intptr_t num_blocks = CalculateSampleBufferCapacity();
579 sample_block_buffer_ = new SampleBlockBuffer(num_blocks);
580 }
585 initialized_ = true;
586}
587
589 public:
591 virtual ~SampleBlockCleanupVisitor() = default;
592
593 void VisitIsolate(Isolate* isolate) {
594 isolate->set_current_allocation_sample_block(nullptr);
595 isolate->set_current_sample_block(nullptr);
596 }
597};
598
600 if (!FLAG_profiler) {
601 return;
602 }
603 ASSERT(initialized_);
607 Isolate::VisitIsolates(&visitor);
608 initialized_ = false;
609}
610
612 if (!FLAG_profiler && initialized_) {
613 Cleanup();
614 } else if (FLAG_profiler && !initialized_) {
615 Init();
616 }
617}
618
619void Profiler::SetSampleDepth(intptr_t depth) {
620 const int kMinimumDepth = 2;
621 const int kMaximumDepth = 255;
622 if (depth < kMinimumDepth) {
623 FLAG_max_profile_depth = kMinimumDepth;
624 } else if (depth > kMaximumDepth) {
625 FLAG_max_profile_depth = kMaximumDepth;
626 } else {
627 FLAG_max_profile_depth = depth;
628 }
629}
630
631static intptr_t SamplesPerSecond() {
632 const intptr_t kMicrosPerSec = 1000000;
633 return kMicrosPerSec / FLAG_profile_period;
634}
635
636intptr_t Profiler::CalculateSampleBufferCapacity() {
637 if (FLAG_sample_buffer_duration <= 0) {
639 }
640 // Deeper stacks require more than a single Sample object to be represented
641 // correctly. These samples are chained, so we need to determine the worst
642 // case sample chain length for a single stack.
643 const intptr_t max_sample_chain_length =
644 FLAG_max_profile_depth / kMaxSamplesPerTick;
645 const intptr_t sample_count = FLAG_sample_buffer_duration *
646 SamplesPerSecond() * max_sample_chain_length;
647 return (sample_count / SampleBlock::kSamplesPerBlock) + 1;
648}
649
650void Profiler::SetSamplePeriod(intptr_t period) {
651 const int kMinimumProfilePeriod = 50;
652 if (period < kMinimumProfilePeriod) {
653 FLAG_profile_period = kMinimumProfilePeriod;
654 } else {
655 FLAG_profile_period = period;
656 }
657 ThreadInterrupter::SetInterruptPeriod(FLAG_profile_period);
658}
659
661 SetSamplePeriod(FLAG_profile_period);
662}
663
665 intptr_t samples_per_block) {
666 const intptr_t size = Utils::RoundUp(
667 blocks * samples_per_block * sizeof(Sample), VirtualMemory::PageSize());
668 const bool executable = false;
669 const bool compressed = false;
670 memory_ =
671 VirtualMemory::Allocate(size, executable, compressed, "dart-profiler");
672 if (memory_ == nullptr) {
674 }
675 sample_buffer_ = reinterpret_cast<Sample*>(memory_->address());
676 blocks_ = new SampleBlock[blocks];
677 for (intptr_t i = 0; i < blocks; ++i) {
678 blocks_[i].Init(&sample_buffer_[i * samples_per_block], samples_per_block);
679 }
680 capacity_ = blocks;
681 cursor_ = 0;
682}
683
685 delete[] blocks_;
686 blocks_ = nullptr;
687 delete memory_;
688 memory_ = nullptr;
689 capacity_ = 0;
690 cursor_ = 0;
691}
692
693SampleBlock* SampleBlockBuffer::ReserveSampleBlock() {
694 intptr_t capacity = capacity_;
695 intptr_t start = cursor_.fetch_add(1) % capacity;
696 intptr_t i = start;
697 do {
698 SampleBlock* block = &blocks_[i];
699 if (block->TryAllocateFree()) {
700 return block;
701 }
702 i = (i + 1) % capacity;
703 } while (i != start);
704
705 // No free blocks: try for completed block instead.
706 i = start;
707 do {
708 SampleBlock* block = &blocks_[i];
709 if (block->TryAllocateCompleted()) {
710 return block;
711 }
712 i = (i + 1) % capacity;
713 } while (i != start);
714
715 return nullptr;
716}
717
719 for (intptr_t i = 0; i < capacity_; i++) {
720 blocks_[i].FreeCompleted();
721 }
722}
723
725 UserTag* tag) {
726 for (intptr_t i = 0; i < capacity_; ++i) {
727 Sample* sample = At(i);
728 uword sample_tag = sample->user_tag();
729 for (intptr_t j = 0; j < tag_table.Length(); ++j) {
730 *tag ^= tag_table.At(j);
731 if (tag->tag() == sample_tag && tag->streamable()) {
732 return true;
733 }
734 }
735 }
736 return false;
737}
738
739static void FlushSampleBlocks(Isolate* isolate) {
740 ASSERT(isolate != nullptr);
741
742 SampleBlock* block = isolate->current_sample_block();
743 if (block != nullptr) {
744 isolate->set_current_sample_block(nullptr);
745 block->MarkCompleted();
746 }
747
748 block = isolate->current_allocation_sample_block();
749 if (block != nullptr) {
750 isolate->set_current_allocation_sample_block(nullptr);
751 block->MarkCompleted();
752 }
753}
754
756 Isolate* isolate,
757 SampleFilter* filter,
759 ASSERT(isolate != nullptr);
760
761 Thread* thread = Thread::Current();
762 Zone* zone = thread->zone();
763
764 if (buffer == nullptr) {
765 buffer = new (zone) ProcessedSampleBuffer();
766 }
767
768 FlushSampleBlocks(isolate);
769
770 for (intptr_t i = 0; i < capacity_; ++i) {
771 SampleBlock* block = &blocks_[i];
772 if (block->TryAcquireStreaming(isolate)) {
773 block->BuildProcessedSampleBuffer(filter, buffer);
774 if (filter->take_samples()) {
775 block->StreamingToFree();
776 } else {
777 block->StreamingToCompleted();
778 }
779 }
780 }
781
782 return buffer;
783}
784
786 intptr_t slot = cursor_.fetch_add(1u);
787 if (slot < capacity_) {
788 return At(slot);
789 }
790 return nullptr;
791}
792
794 ASSERT(previous != nullptr);
796 Isolate* isolate = owner_;
797 ASSERT(isolate != nullptr);
798 Sample* next = previous->is_allocation_sample()
799 ? buffer->ReserveAllocationSample(isolate)
800 : buffer->ReserveCPUSample(isolate);
801 if (next == nullptr) {
802 return nullptr; // No blocks left, so drop sample.
803 }
804 next->Init(previous->port(), previous->timestamp(), previous->tid());
805 next->set_head_sample(false);
806 // Mark that previous continues at next.
807 previous->SetContinuation(next);
808 return next;
809}
810
812 return ReserveSampleImpl(isolate, false);
813}
814
816 return ReserveSampleImpl(isolate, true);
817}
818
819Sample* SampleBlockBuffer::ReserveSampleImpl(Isolate* isolate,
820 bool allocation_sample) {
821 SampleBlock* block = allocation_sample
823 : isolate->current_sample_block();
824 Sample* sample = nullptr;
825 if (block != nullptr) {
826 sample = block->ReserveSample();
827 }
828 if (sample != nullptr) {
829 return sample;
830 }
831
832 SampleBlock* next = ReserveSampleBlock();
833 if (next == nullptr) {
834 // We're out of blocks to reserve. Drop the sample.
835 return nullptr;
836 }
837
838 next->set_owner(isolate);
839 if (allocation_sample) {
841 } else {
843 }
844 if (block != nullptr) {
845 block->MarkCompleted();
846 if (!Isolate::IsSystemIsolate(isolate)) {
847 Thread* mutator = isolate->mutator_thread();
848 // The mutator thread might be NULL if we sample in the middle of
849 // Thread::Enter/ExitIsolate.
850 if ((mutator != nullptr) && isolate->TrySetHasCompletedBlocks()) {
852 }
853 }
854 }
855 return next->ReserveSample();
856}
857
858// Attempts to find the true return address when a Dart frame is being setup
859// or torn down.
860// NOTE: Architecture specific implementations below.
862 public:
863 ReturnAddressLocator(Sample* sample, const Code& code)
864 : stack_buffer_(sample->GetStackBuffer()),
865 pc_(sample->pc()),
866 code_(Code::ZoneHandle(code.ptr())) {
867 ASSERT(!code_.IsNull());
869 }
870
871 ReturnAddressLocator(uword pc, uword* stack_buffer, const Code& code)
872 : stack_buffer_(stack_buffer),
873 pc_(pc),
874 code_(Code::ZoneHandle(code.ptr())) {
875 ASSERT(!code_.IsNull());
876 ASSERT(code_.ContainsInstructionAt(pc_));
877 }
878
879 uword pc() { return pc_; }
880
881 // Returns false on failure.
882 bool LocateReturnAddress(uword* return_address);
883
884 // Returns offset into code object.
885 intptr_t RelativePC() {
886 ASSERT(pc() >= code_.PayloadStart());
887 return static_cast<intptr_t>(pc() - code_.PayloadStart());
888 }
889
890 uint8_t* CodePointer(intptr_t offset) {
891 const intptr_t size = code_.Size();
892 ASSERT(offset < size);
893 uint8_t* code_pointer = reinterpret_cast<uint8_t*>(code_.PayloadStart());
894 code_pointer += offset;
895 return code_pointer;
896 }
897
898 uword StackAt(intptr_t i) {
899 ASSERT(i >= 0);
901 return stack_buffer_[i];
902 }
903
904 private:
905 uword* stack_buffer_;
906 uword pc_;
907 const Code& code_;
908};
909
910#if defined(TARGET_ARCH_IA32) || defined(TARGET_ARCH_X64)
912 ASSERT(return_address != nullptr);
913 const intptr_t offset = RelativePC();
914 ASSERT(offset >= 0);
915 const intptr_t size = code_.Size();
916 ASSERT(offset < size);
917 const intptr_t prologue_offset = code_.GetPrologueOffset();
918 if (offset < prologue_offset) {
919 // Before the prologue, return address is at the top of the stack.
920 // TODO(johnmccutchan): Some intrinsics and stubs do not conform to the
921 // expected stack layout. Use a more robust solution for those code objects.
922 *return_address = StackAt(0);
923 return true;
924 }
925 // Detect if we are:
926 // push ebp <--- here
927 // mov ebp, esp
928 // on X64 the register names are different but the sequence is the same.
929 ProloguePattern pp(pc());
930 if (pp.IsValid()) {
931 // Stack layout:
932 // 0 RETURN ADDRESS.
933 *return_address = StackAt(0);
934 return true;
935 }
936 // Detect if we are:
937 // push ebp
938 // mov ebp, esp <--- here
939 // on X64 the register names are different but the sequence is the same.
940 SetFramePointerPattern sfpp(pc());
941 if (sfpp.IsValid()) {
942 // Stack layout:
943 // 0 CALLER FRAME POINTER
944 // 1 RETURN ADDRESS
945 *return_address = StackAt(1);
946 return true;
947 }
948 // Detect if we are:
949 // ret <--- here
950 ReturnPattern rp(pc());
951 if (rp.IsValid()) {
952 // Stack layout:
953 // 0 RETURN ADDRESS.
954 *return_address = StackAt(0);
955 return true;
956 }
957 return false;
958}
959#elif defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_ARM64) || \
960 defined(TARGET_ARCH_RISCV32) || defined(TARGET_ARCH_RISCV64)
962 ASSERT(return_address != nullptr);
963 return false;
964}
965#else
966#error ReturnAddressLocator implementation missing for this architecture.
967#endif
968
970 if ((time_origin_micros_ == -1) || (time_extent_micros_ == -1)) {
971 // No time filter passed in, always pass.
972 return true;
973 }
974 const int64_t timestamp = sample->timestamp();
975 int64_t delta = timestamp - time_origin_micros_;
976 return (delta >= 0) && (delta <= time_extent_micros_);
977}
978
980 const intptr_t task = static_cast<intptr_t>(sample->thread_task());
981 if (thread_task_mask_ == kNoTaskFilter) {
982 return true;
983 }
984 return (task & thread_task_mask_) != 0;
985}
986
988 : SampleVisitor(isolate->main_port()) {}
989
991 sample->Clear();
992}
993
994// Executing Dart code, walk the stack.
996 public:
998 Sample* sample,
999 SampleBuffer* sample_buffer,
1000 uword pc,
1001 uword fp,
1002 uword sp,
1003 uword lr,
1004 bool allocation_sample,
1005 intptr_t skip_count = 0)
1006 : ProfilerStackWalker((thread->isolate() != nullptr)
1007 ? thread->isolate()->main_port()
1008 : ILLEGAL_PORT,
1009 sample,
1010 sample_buffer,
1011 skip_count),
1012 thread_(thread),
1013 pc_(reinterpret_cast<uword*>(pc)),
1014 fp_(reinterpret_cast<uword*>(fp)),
1015 sp_(reinterpret_cast<uword*>(sp)),
1016 lr_(reinterpret_cast<uword*>(lr)) {}
1017
1018 void walk() {
1020 if (thread_->isolate()->IsDeoptimizing()) {
1022 return;
1023 }
1024
1025 uword* exit_fp = reinterpret_cast<uword*>(thread_->top_exit_frame_info());
1026 bool has_exit_frame = exit_fp != nullptr;
1027 if (has_exit_frame) {
1028 // Exited from compiled code.
1029 pc_ = nullptr;
1030 fp_ = exit_fp;
1031
1032 // Skip exit frame.
1033 pc_ = CallerPC();
1034 fp_ = CallerFP();
1035 } else {
1036 if (thread_->vm_tag() == VMTag::kDartTagId) {
1037 // Running compiled code.
1038 // Use the FP and PC from the thread interrupt or simulator; already set
1039 // in the constructor.
1040 } else {
1041 // No Dart on the stack; caller shouldn't use this walker.
1042 UNREACHABLE();
1043 }
1044
1045 const bool is_entry_frame =
1046#if defined(TARGET_ARCH_IA32) || defined(TARGET_ARCH_X64)
1047 StubCode::InInvocationStub(Stack(0)) ||
1049#else
1050 StubCode::InInvocationStub(reinterpret_cast<uword>(lr_));
1051#endif
1052 if (is_entry_frame) {
1053 // During the prologue of a function, CallerPC will return the caller's
1054 // caller. For most frames, the missing PC will be added during profile
1055 // processing. However, during this stack walk, it can cause us to fail
1056 // to identify the entry frame and lead the stack walk into the weeds.
1057 // Do not continue the stalk walk since this might be a false positive
1058 // from a Smi or unboxed value.
1060 return;
1061 }
1062 }
1063
1064 sample_->set_exit_frame_sample(has_exit_frame);
1065
1066 for (;;) {
1067 // Skip entry frame.
1068 if (StubCode::InInvocationStub(reinterpret_cast<uword>(pc_))) {
1069 pc_ = nullptr;
1070 fp_ = ExitLink();
1071 if (fp_ == nullptr) {
1072 break; // End of Dart stack.
1073 }
1074
1075 // Skip exit frame.
1076 pc_ = CallerPC();
1077 fp_ = CallerFP();
1078
1079 // At least one frame between exit and next entry frame.
1081 !StubCode::InInvocationStub(reinterpret_cast<uword>(pc_)));
1082 }
1083
1084 if (!Append(reinterpret_cast<uword>(pc_), reinterpret_cast<uword>(fp_))) {
1085 break; // Sample is full.
1086 }
1087
1088 pc_ = CallerPC();
1089 fp_ = CallerFP();
1090 }
1091 }
1092
1093 private:
1094 uword* CallerPC() const {
1095 ASSERT(fp_ != nullptr);
1096 uword* caller_pc_ptr = fp_ + kSavedCallerPcSlotFromFp;
1097 // MSan/ASan are unaware of frames initialized by generated code.
1098 MSAN_UNPOISON(caller_pc_ptr, kWordSize);
1099 ASAN_UNPOISON(caller_pc_ptr, kWordSize);
1100 return reinterpret_cast<uword*>(*caller_pc_ptr);
1101 }
1102
1103 uword* CallerFP() const {
1104 ASSERT(fp_ != nullptr);
1105 uword* caller_fp_ptr = fp_ + kSavedCallerFpSlotFromFp;
1106 // MSan/ASan are unaware of frames initialized by generated code.
1107 MSAN_UNPOISON(caller_fp_ptr, kWordSize);
1108 ASAN_UNPOISON(caller_fp_ptr, kWordSize);
1109 return reinterpret_cast<uword*>(*caller_fp_ptr);
1110 }
1111
1112 uword* ExitLink() const {
1113 ASSERT(fp_ != nullptr);
1114 uword* exit_link_ptr = fp_ + kExitLinkSlotFromEntryFp;
1115 // MSan/ASan are unaware of frames initialized by generated code.
1116 MSAN_UNPOISON(exit_link_ptr, kWordSize);
1117 ASAN_UNPOISON(exit_link_ptr, kWordSize);
1118 return reinterpret_cast<uword*>(*exit_link_ptr);
1119 }
1120
1121 uword Stack(intptr_t index) const {
1122 ASSERT(sp_ != nullptr);
1123 uword* stack_ptr = sp_ + index;
1124 // MSan/ASan are unaware of frames initialized by generated code.
1125 MSAN_UNPOISON(stack_ptr, kWordSize);
1126 ASAN_UNPOISON(stack_ptr, kWordSize);
1127 return *stack_ptr;
1128 }
1129
1130 Thread* const thread_;
1131 uword* pc_;
1132 uword* fp_;
1133 uword* sp_;
1134 uword* lr_;
1135};
1136
1137static void CopyStackBuffer(Sample* sample, uword sp_addr) {
1138 ASSERT(sample != nullptr);
1139 uword* sp = reinterpret_cast<uword*>(sp_addr);
1140 uword* buffer = sample->GetStackBuffer();
1141 if (sp != nullptr) {
1142 for (intptr_t i = 0; i < Sample::kStackBufferSizeInWords; i++) {
1145 buffer[i] = *sp;
1146 sp++;
1147 }
1148 }
1149}
1150
1151#if defined(DART_HOST_OS_WINDOWS)
1152// On Windows this code is synchronously executed from the thread interrupter
1153// thread. This means we can safely have a static fault_address.
1154static uword fault_address = 0;
1155static LONG GuardPageExceptionFilter(EXCEPTION_POINTERS* ep) {
1156 fault_address = 0;
1157 if (ep->ExceptionRecord->ExceptionCode != STATUS_GUARD_PAGE_VIOLATION) {
1158 return EXCEPTION_CONTINUE_SEARCH;
1159 }
1160 // https://goo.gl/p5Fe10
1161 fault_address = ep->ExceptionRecord->ExceptionInformation[1];
1162 // Read access.
1163 ASSERT(ep->ExceptionRecord->ExceptionInformation[0] == 0);
1164 return EXCEPTION_EXECUTE_HANDLER;
1165}
1166#endif
1167
1168// All memory access done to collect the sample is performed in CollectSample.
1169static void CollectSample(Isolate* isolate,
1170 bool exited_dart_code,
1171 bool in_dart_code,
1172 Sample* sample,
1173 ProfilerNativeStackWalker* native_stack_walker,
1174 ProfilerDartStackWalker* dart_stack_walker,
1175 uword pc,
1176 uword fp,
1177 uword sp,
1178 ProfilerCounters* counters) {
1179 ASSERT(counters != nullptr);
1180#if defined(DART_HOST_OS_WINDOWS)
1181 // Use structured exception handling to trap guard page access on Windows.
1182 __try {
1183#endif
1184
1185 if (in_dart_code) {
1186 // We can only trust the stack pointer if we are executing Dart code.
1187 // See http://dartbug.com/20421 for details.
1188 CopyStackBuffer(sample, sp);
1189 }
1190
1191 if (FLAG_profile_vm) {
1192 // Always walk the native stack collecting both native and Dart frames.
1193 counters->stack_walker_native.fetch_add(1);
1194 native_stack_walker->walk();
1195 } else if (StubCode::HasBeenInitialized() && exited_dart_code) {
1196 counters->stack_walker_dart_exit.fetch_add(1);
1197 // We have a valid exit frame info, use the Dart stack walker.
1198 dart_stack_walker->walk();
1199 } else if (StubCode::HasBeenInitialized() && in_dart_code) {
1200 counters->stack_walker_dart.fetch_add(1);
1201 // We are executing Dart code. We have frame pointers.
1202 dart_stack_walker->walk();
1203 } else {
1204 counters->stack_walker_none.fetch_add(1);
1205 sample->SetAt(0, pc);
1206 }
1207
1208#if defined(DART_HOST_OS_WINDOWS)
1209 // Use structured exception handling to trap guard page access.
1210 } __except (GuardPageExceptionFilter(GetExceptionInformation())) { // NOLINT
1211 // Sample collection triggered a guard page fault:
1212 // 1) discard entire sample.
1213 sample->set_ignore_sample(true);
1214
1215 // 2) Reenable guard bit on page that triggered the fault.
1216 // https://goo.gl/5mCsXW
1217 DWORD new_protect = PAGE_READWRITE | PAGE_GUARD;
1218 DWORD old_protect = 0;
1219 BOOL success =
1220 VirtualProtect(reinterpret_cast<void*>(fault_address),
1221 sizeof(fault_address), new_protect, &old_protect);
1222 USE(success);
1223 ASSERT(success);
1224 ASSERT(old_protect == PAGE_READWRITE);
1225 }
1226#endif
1227}
1228
1229static Sample* SetupSample(Thread* thread,
1230 bool allocation_sample,
1231 ThreadId tid) {
1232 ASSERT(thread != nullptr);
1233 Isolate* isolate = thread->isolate();
1235 Sample* sample = allocation_sample ? buffer->ReserveAllocationSample(isolate)
1236 : buffer->ReserveCPUSample(isolate);
1237 if (sample == nullptr) {
1238 return nullptr;
1239 }
1240 sample->Init(isolate->main_port(), OS::GetCurrentMonotonicMicros(), tid);
1241 uword vm_tag = thread->vm_tag();
1242#if defined(USING_SIMULATOR)
1243 // When running in the simulator, the runtime entry function address
1244 // (stored as the vm tag) is the address of a redirect function.
1245 // Attempt to find the real runtime entry function address and use that.
1246 uword redirect_vm_tag = Simulator::FunctionForRedirect(vm_tag);
1247 if (redirect_vm_tag != 0) {
1248 vm_tag = redirect_vm_tag;
1249 }
1250#endif
1251 sample->set_vm_tag(vm_tag);
1252 sample->set_user_tag(isolate->user_tag());
1253 sample->set_thread_task(thread->task_kind());
1254 return sample;
1255}
1256
1257static bool CheckIsolate(Isolate* isolate) {
1258 if ((isolate == nullptr) || (Dart::vm_isolate() == nullptr)) {
1259 // No isolate.
1260 return false;
1261 }
1262 return isolate != Dart::vm_isolate();
1263}
1264
1266 intptr_t cid,
1267 uint32_t identity_hash) {
1268 ASSERT(thread != nullptr);
1269 OSThread* os_thread = thread->os_thread();
1270 ASSERT(os_thread != nullptr);
1271 Isolate* isolate = thread->isolate();
1272 if (!CheckIsolate(isolate)) {
1273 return;
1274 }
1275 const bool exited_dart_code = thread->HasExitedDartCode();
1276
1278 if (buffer == nullptr) {
1279 // Profiler not initialized.
1280 return;
1281 }
1282
1283 uintptr_t sp = OSThread::GetCurrentStackPointer();
1284 uintptr_t fp = 0;
1285 uintptr_t pc = OS::GetProgramCounter();
1286 uintptr_t lr = 0;
1287
1289
1290 uword stack_lower = 0;
1291 uword stack_upper = 0;
1292
1293 if (!GetAndValidateThreadStackBounds(os_thread, thread, fp, sp, &stack_lower,
1294 &stack_upper)) {
1295 // Could not get stack boundary.
1296 return;
1297 }
1298
1299 Sample* sample =
1300 SetupSample(thread, /*allocation_block*/ true, os_thread->trace_id());
1301 if (sample == nullptr) {
1302 // We were unable to assign a sample for this allocation.
1303 counters_.sample_allocation_failure++;
1304 return;
1305 }
1306 sample->SetAllocationCid(cid);
1307 sample->set_allocation_identity_hash(identity_hash);
1308
1309 if (FLAG_profile_vm_allocation) {
1310 ProfilerNativeStackWalker native_stack_walker(
1311 &counters_, (isolate != nullptr) ? isolate->main_port() : ILLEGAL_PORT,
1312 sample, isolate->current_allocation_sample_block(), stack_lower,
1313 stack_upper, pc, fp, sp);
1314 native_stack_walker.walk();
1315 } else if (exited_dart_code) {
1316 ProfilerDartStackWalker dart_exit_stack_walker(
1317 thread, sample, isolate->current_allocation_sample_block(), pc, fp, sp,
1318 lr, /* allocation_sample*/ true);
1319 dart_exit_stack_walker.walk();
1320 } else {
1321 // Fall back.
1322 uintptr_t pc = OS::GetProgramCounter();
1323 sample->SetAt(0, pc);
1324 }
1325}
1326
1327void Profiler::SampleThreadSingleFrame(Thread* thread,
1328 Sample* sample,
1329 uintptr_t pc) {
1330 ASSERT(thread != nullptr);
1331 OSThread* os_thread = thread->os_thread();
1332 ASSERT(os_thread != nullptr);
1333 Isolate* isolate = thread->isolate();
1334
1336
1337 // Increment counter for vm tag.
1339 ASSERT(counters != nullptr);
1340 if (thread->IsDartMutatorThread()) {
1341 counters->Increment(sample->vm_tag());
1342 }
1343
1344 // Write the single pc value.
1345 sample->SetAt(0, pc);
1346}
1347
1350 ASSERT(thread != nullptr);
1351 OSThread* os_thread = thread->os_thread();
1352 ASSERT(os_thread != nullptr);
1353 Isolate* isolate = thread->isolate();
1354
1355 // Thread is not doing VM work.
1356 if (thread->task_kind() == Thread::kUnknownTask) {
1357 counters_.bail_out_unknown_task.fetch_add(1);
1358 return;
1359 }
1360
1362 // The JumpToFrame stub manually adjusts the stack pointer, frame
1363 // pointer, and some isolate state. It is not safe to walk the
1364 // stack when executing this stub.
1365 counters_.bail_out_jump_to_exception_handler.fetch_add(1);
1366 return;
1367 }
1368
1369 const bool in_dart_code = thread->IsExecutingDartCode();
1370
1371 uintptr_t sp = 0;
1372 uintptr_t fp = state.fp;
1373 uintptr_t pc = state.pc;
1374 uintptr_t lr = state.lr;
1375#if defined(USING_SIMULATOR)
1376 Simulator* simulator = nullptr;
1377#endif
1378
1379 if (in_dart_code) {
1380// If we're in Dart code, use the Dart stack pointer.
1381#if defined(USING_SIMULATOR)
1382 simulator = isolate->simulator();
1383 sp = simulator->get_register(SPREG);
1384 fp = simulator->get_register(FPREG);
1385 pc = simulator->get_pc();
1386 lr = simulator->get_lr();
1387#else
1388 sp = state.dsp;
1389#endif
1390 } else {
1391 // If we're in runtime code, use the C stack pointer.
1392 sp = state.csp;
1393 }
1394
1395 if (!CheckIsolate(isolate)) {
1396 counters_.bail_out_check_isolate.fetch_add(1);
1397 return;
1398 }
1399
1401 if (sample_block_buffer == nullptr) {
1402 // Profiler not initialized.
1403 return;
1404 }
1405
1406 // Setup sample.
1407 Sample* sample =
1408 SetupSample(thread, /*allocation_block*/ false, os_thread->trace_id());
1409 if (sample == nullptr) {
1410 // We were unable to assign a sample for this profiler tick.
1411 counters_.sample_allocation_failure++;
1412 return;
1413 }
1414
1415 if (thread->IsDartMutatorThread()) {
1416 if (isolate->IsDeoptimizing()) {
1417 counters_.single_frame_sample_deoptimizing.fetch_add(1);
1418 SampleThreadSingleFrame(thread, sample, pc);
1419 return;
1420 }
1421 }
1422
1423 uword stack_lower = 0;
1424 uword stack_upper = 0;
1425 if (!GetAndValidateThreadStackBounds(os_thread, thread, fp, sp, &stack_lower,
1426 &stack_upper)) {
1427 counters_.single_frame_sample_get_and_validate_stack_bounds.fetch_add(1);
1428 // Could not get stack boundary.
1429 SampleThreadSingleFrame(thread, sample, pc);
1430 return;
1431 }
1432
1433 // At this point we have a valid stack boundary for this isolate and
1434 // know that our initial stack and frame pointers are within the boundary.
1435
1436 // Increment counter for vm tag.
1438 ASSERT(counters != nullptr);
1439 if (thread->IsDartMutatorThread()) {
1440 counters->Increment(sample->vm_tag());
1441 }
1442
1443 ProfilerNativeStackWalker native_stack_walker(
1444 &counters_, (isolate != nullptr) ? isolate->main_port() : ILLEGAL_PORT,
1445 sample, isolate->current_sample_block(), stack_lower, stack_upper, pc, fp,
1446 sp);
1447 const bool exited_dart_code = thread->HasExitedDartCode();
1448 ProfilerDartStackWalker dart_stack_walker(
1449 thread, sample, isolate->current_sample_block(), pc, fp, sp, lr,
1450 /* allocation_sample*/ false);
1451
1452 // All memory access is done inside CollectSample.
1453 CollectSample(isolate, exited_dart_code, in_dart_code, sample,
1454 &native_stack_walker, &dart_stack_walker, pc, fp, sp,
1455 &counters_);
1456}
1457
1459
1461 return code_.PayloadStart();
1462}
1463
1465 return code_.Size();
1466}
1467
1469 return code_.compile_timestamp();
1470}
1471
1473 Build(thread);
1474}
1475
1477 public:
1479 ASSERT(table_ != nullptr);
1480 }
1481
1483
1484 void VisitObject(ObjectPtr raw_obj) override {
1485 if (raw_obj->IsCode() && !Code::IsUnknownDartCode(Code::RawCast(raw_obj))) {
1486 table_->Add(Code::Handle(Code::RawCast(raw_obj)));
1487 }
1488 }
1489
1490 private:
1491 CodeLookupTable* table_;
1492};
1493
1494void CodeLookupTable::Build(Thread* thread) {
1495 ASSERT(thread != nullptr);
1496 Isolate* vm_isolate = Dart::vm_isolate();
1497 ASSERT(vm_isolate != nullptr);
1498
1499 // Clear.
1500 code_objects_.Clear();
1501
1502 thread->CheckForSafepoint();
1503 // Add all found Code objects.
1504 {
1505 TimelineBeginEndScope tl(Timeline::GetIsolateStream(),
1506 "CodeLookupTable::Build HeapIterationScope");
1507 HeapIterationScope iteration(thread);
1508 CodeLookupTableBuilder cltb(this);
1509 iteration.IterateVMIsolateObjects(&cltb);
1510 iteration.IterateOldObjects(&cltb);
1511 }
1512 thread->CheckForSafepoint();
1513
1514 // Sort by entry.
1515 code_objects_.Sort(CodeDescriptor::Compare);
1516
1517#if defined(DEBUG)
1518 if (length() <= 1) {
1519 return;
1520 }
1521 ASSERT(FindCode(0) == nullptr);
1522 ASSERT(FindCode(~0) == nullptr);
1523 // Sanity check that we don't have duplicate entries and that the entries
1524 // are sorted.
1525 for (intptr_t i = 0; i < length() - 1; i++) {
1526 const CodeDescriptor* a = At(i);
1527 const CodeDescriptor* b = At(i + 1);
1528 ASSERT(a->Start() < b->Start());
1529 ASSERT(FindCode(a->Start()) == a);
1530 ASSERT(FindCode(b->Start()) == b);
1531 ASSERT(FindCode(a->Start() + a->Size() - 1) == a);
1532 ASSERT(FindCode(b->Start() + b->Size() - 1) == b);
1533 }
1534#endif
1535}
1536
1537void CodeLookupTable::Add(const Object& code) {
1538 ASSERT(!code.IsNull());
1539 ASSERT(code.IsCode());
1540 CodeDescriptor* cd = new CodeDescriptor(AbstractCode(code.ptr()));
1541 code_objects_.Add(cd);
1542}
1543
1545 intptr_t first = 0;
1546 intptr_t count = length();
1547 while (count > 0) {
1548 intptr_t current = first;
1549 intptr_t step = count / 2;
1550 current += step;
1551 const CodeDescriptor* cd = At(current);
1552 if (pc >= cd->Start()) {
1553 first = ++current;
1554 count -= step + 1;
1555 } else {
1556 count = step;
1557 }
1558 }
1559 // First points to the first code object whose entry is greater than PC.
1560 // That means the code object we need to check is first - 1.
1561 if (first == 0) {
1562 return nullptr;
1563 }
1564 first--;
1565 ASSERT(first >= 0);
1566 ASSERT(first < length());
1567 const CodeDescriptor* cd = At(first);
1568 if (cd->Contains(pc)) {
1569 return cd;
1570 }
1571 return nullptr;
1572}
1573
1575 SampleFilter* filter,
1577 Thread* thread = Thread::Current();
1578 Zone* zone = thread->zone();
1579
1580 if (buffer == nullptr) {
1581 buffer = new (zone) ProcessedSampleBuffer();
1582 }
1583
1584 const intptr_t length = capacity();
1585 for (intptr_t i = 0; i < length; i++) {
1586 thread->CheckForSafepoint();
1587 Sample* sample = At(i);
1588 if (sample->ignore_sample()) {
1589 // Bad sample.
1590 continue;
1591 }
1592 if (!sample->head_sample()) {
1593 // An inner sample in a chain of samples.
1594 continue;
1595 }
1596 if (sample->timestamp() == 0) {
1597 // Empty.
1598 continue;
1599 }
1600 if (sample->At(0) == 0) {
1601 // No frames.
1602 continue;
1603 }
1604 if (filter != nullptr) {
1605 // If we're requesting all the native allocation samples, we don't care
1606 // whether or not we're in the same isolate as the sample.
1607 if (sample->port() != filter->port()) {
1608 // Another isolate.
1609 continue;
1610 }
1611 if (!filter->TimeFilterSample(sample)) {
1612 // Did not pass time filter.
1613 continue;
1614 }
1615 if (!filter->TaskFilterSample(sample)) {
1616 // Did not pass task filter.
1617 continue;
1618 }
1619 if (!filter->FilterSample(sample)) {
1620 // Did not pass filter.
1621 continue;
1622 }
1623 }
1624 buffer->Add(BuildProcessedSample(sample, buffer->code_lookup_table()));
1625 }
1626 return buffer;
1627}
1628
1630 Sample* sample,
1631 const CodeLookupTable& clt) {
1632 Thread* thread = Thread::Current();
1633 Zone* zone = thread->zone();
1634
1635 ProcessedSample* processed_sample = new (zone) ProcessedSample();
1636
1637 // Copy state bits from sample.
1638 processed_sample->set_timestamp(sample->timestamp());
1639 processed_sample->set_tid(sample->tid());
1640 processed_sample->set_vm_tag(sample->vm_tag());
1641 processed_sample->set_user_tag(sample->user_tag());
1642 if (sample->is_allocation_sample()) {
1643 processed_sample->set_allocation_cid(sample->allocation_cid());
1644 processed_sample->set_allocation_identity_hash(
1645 sample->allocation_identity_hash());
1646 }
1647 processed_sample->set_first_frame_executing(!sample->exit_frame_sample());
1648
1649 // Copy stack trace from sample(s).
1650 bool truncated = false;
1651 Sample* current = sample;
1652 while (current != nullptr) {
1653 for (intptr_t i = 0; i < Sample::kPCArraySizeInWords; i++) {
1654 if (current->At(i) == 0) {
1655 break;
1656 }
1657 processed_sample->Add(current->At(i));
1658 }
1659
1660 truncated = truncated || current->truncated_trace();
1661 current = Next(current);
1662 }
1663
1664 if (!sample->exit_frame_sample()) {
1665 processed_sample->FixupCaller(clt, /* pc_marker */ 0,
1666 sample->GetStackBuffer());
1667 }
1668
1669 processed_sample->set_truncated(truncated);
1670 return processed_sample;
1671}
1672
1674 if (!sample->is_continuation_sample()) return nullptr;
1675 Sample* next_sample = sample->continuation_sample();
1676 // Sanity check.
1677 ASSERT(sample != next_sample);
1678 // Detect invalid chaining.
1679 if (sample->port() != next_sample->port()) {
1680 return nullptr;
1681 }
1682 if (sample->timestamp() != next_sample->timestamp()) {
1683 return nullptr;
1684 }
1685 if (sample->tid() != next_sample->tid()) {
1686 return nullptr;
1687 }
1688 return next_sample;
1689}
1690
1692 : pcs_(Sample::kPCArraySizeInWords),
1693 timestamp_(0),
1694 vm_tag_(0),
1695 user_tag_(0),
1696 allocation_cid_(-1),
1697 allocation_identity_hash_(0),
1698 truncated_(false) {}
1699
1700void ProcessedSample::FixupCaller(const CodeLookupTable& clt,
1701 uword pc_marker,
1702 uword* stack_buffer) {
1703 const CodeDescriptor* cd = clt.FindCode(At(0));
1704 if (cd == nullptr) {
1705 // No Dart code.
1706 return;
1707 }
1708 if (cd->CompileTimestamp() > timestamp()) {
1709 // Code compiled after sample. Ignore.
1710 return;
1711 }
1712 CheckForMissingDartFrame(clt, cd, pc_marker, stack_buffer);
1713}
1714
1715void ProcessedSample::CheckForMissingDartFrame(const CodeLookupTable& clt,
1716 const CodeDescriptor* cd,
1717 uword pc_marker,
1718 uword* stack_buffer) {
1719 ASSERT(cd != nullptr);
1720 const Code& code = Code::Handle(Code::RawCast(cd->code().ptr()));
1721 ASSERT(!code.IsNull());
1722 // Some stubs (and intrinsics) do not push a frame onto the stack leaving
1723 // the frame pointer in the caller.
1724 //
1725 // PC -> STUB
1726 // FP -> DART3 <-+
1727 // DART2 <-| <- TOP FRAME RETURN ADDRESS.
1728 // DART1 <-|
1729 // .....
1730 //
1731 // In this case, traversing the linked stack frames will not collect a PC
1732 // inside DART3. The stack will incorrectly be: STUB, DART2, DART1.
1733 // In Dart code, after pushing the FP onto the stack, an IP in the current
1734 // function is pushed onto the stack as well. This stack slot is called
1735 // the PC marker. We can use the PC marker to insert DART3 into the stack
1736 // so that it will correctly be: STUB, DART3, DART2, DART1. Note the
1737 // inserted PC may not accurately reflect the true return address into DART3.
1738
1739 // The pc marker is our current best guess of a return address.
1740 uword return_address = pc_marker;
1741
1742 // Attempt to find a better return address.
1743 ReturnAddressLocator ral(At(0), stack_buffer, code);
1744
1745 if (!ral.LocateReturnAddress(&return_address)) {
1746 ASSERT(return_address == pc_marker);
1747 if (code.GetPrologueOffset() == 0) {
1748 // Code has the prologue at offset 0. The frame is already setup and
1749 // can be trusted.
1750 return;
1751 }
1752 // Could not find a better return address than the pc_marker.
1753 if (code.ContainsInstructionAt(return_address)) {
1754 // PC marker is in the same code as pc, no missing frame.
1755 return;
1756 }
1757 }
1758
1759 if (clt.FindCode(return_address) == nullptr) {
1760 // Return address is not from a Dart code object. Do not insert.
1761 return;
1762 }
1763
1764 if (return_address != 0) {
1765 InsertAt(1, return_address);
1766 }
1767}
1768
1770 : code_lookup_table_(new CodeLookupTable(Thread::Current())) {
1771 ASSERT(code_lookup_table_ != nullptr);
1772}
1773
1775 ASSERT(!initialized_);
1776 if (monitor_ == nullptr) {
1777 monitor_ = new Monitor();
1778 }
1779 ASSERT(monitor_ != nullptr);
1780 initialized_ = true;
1781 shutdown_ = false;
1782}
1783
1785 ASSERT(initialized_);
1786 ASSERT(processor_thread_id_ == OSThread::kInvalidThreadJoinId);
1787 MonitorLocker startup_ml(monitor_);
1788 OSThread::Start("Dart Profiler SampleBlockProcessor", ThreadMain, 0);
1789 while (!thread_running_) {
1790 startup_ml.Wait();
1791 }
1792 ASSERT(processor_thread_id_ != OSThread::kInvalidThreadJoinId);
1793}
1794
1796 {
1797 MonitorLocker shutdown_ml(monitor_);
1798 if (shutdown_) {
1799 // Already shutdown.
1800 return;
1801 }
1802 shutdown_ = true;
1803 // Notify.
1804 shutdown_ml.Notify();
1805 ASSERT(initialized_);
1806 }
1807
1808 // Join the thread.
1809 ASSERT(processor_thread_id_ != OSThread::kInvalidThreadJoinId);
1810 OSThread::Join(processor_thread_id_);
1811 processor_thread_id_ = OSThread::kInvalidThreadJoinId;
1812 initialized_ = false;
1813 ASSERT(!thread_running_);
1814}
1815
1817 public:
1819 : SampleFilter(port, kNoTaskFilter, -1, -1, true), isolate_(isolate) {}
1820
1821 bool FilterSample(Sample* sample) override {
1822 const UserTag& tag =
1823 UserTag::Handle(UserTag::FindTagById(isolate_, sample->user_tag()));
1824 return tag.streamable();
1825 }
1826
1827 private:
1828 const Isolate* isolate_;
1829};
1830
1832 if (!Service::profiler_stream.enabled()) return;
1833 auto thread = Thread::Current();
1834 if (Isolate::IsSystemIsolate(isolate)) return;
1835
1836 TIMELINE_DURATION(thread, Isolate, "Profiler::ProcessCompletedBlocks")
1837 DisableThreadInterruptsScope dtis(thread);
1838 StackZone zone(thread);
1839 HandleScope handle_scope(thread);
1840 StreamableSampleFilter filter(isolate->main_port(), isolate);
1841 Profile profile;
1842 profile.Build(thread, isolate, &filter, Profiler::sample_block_buffer());
1844 event.set_cpu_profile(&profile);
1846}
1847
1849 FlushSampleBlocks(thread->isolate());
1851}
1852
1853void SampleBlockProcessor::ThreadMain(uword parameters) {
1854 ASSERT(initialized_);
1855 {
1856 // Signal to main thread we are ready.
1857 MonitorLocker startup_ml(monitor_);
1858 OSThread* os_thread = OSThread::Current();
1859 ASSERT(os_thread != nullptr);
1860 processor_thread_id_ = OSThread::GetCurrentThreadJoinId(os_thread);
1861 thread_running_ = true;
1862 startup_ml.Notify();
1863 }
1864
1865 MonitorLocker wait_ml(monitor_);
1866 // Wakeup every 100ms.
1867 const int64_t wakeup_interval = 1000 * 100;
1868 while (true) {
1869 wait_ml.WaitMicros(wakeup_interval);
1870 if (shutdown_) {
1871 break;
1872 }
1873
1874 IsolateGroup::ForEach([&](IsolateGroup* group) {
1875 if (group == Dart::vm_isolate_group()) return;
1876
1877 const bool kBypassSafepoint = false;
1879 kBypassSafepoint);
1880 group->ForEachIsolate([&](Isolate* isolate) {
1881 if (isolate->TakeHasCompletedBlocks()) {
1882 Profiler::ProcessCompletedBlocks(isolate);
1883 }
1884 });
1885 Thread::ExitIsolateGroupAsHelper(kBypassSafepoint);
1886 });
1887 }
1888 // Signal to main thread we are exiting.
1889 thread_running_ = false;
1890}
1891
1892#endif // !PRODUCT
1893
1894} // namespace dart
static int step(int x, SkScalar min, SkScalar max)
Definition BlurTest.cpp:215
int count
static float next(float f)
SI F table(const skcms_Curve *curve, F v)
#define ASAN_UNPOISON(ptr, len)
#define UNREACHABLE()
Definition assert.h:248
#define OUT_OF_MEMORY()
Definition assert.h:250
#define RELEASE_ASSERT(cond)
Definition assert.h:327
uword Size() const
Definition profiler.h:427
uword PayloadStart() const
Definition profiler.h:422
int64_t compile_timestamp() const
Definition profiler.h:432
ClearProfileVisitor(Isolate *isolate)
Definition profiler.cc:987
virtual void VisitSample(Sample *sample)
Definition profiler.cc:990
int64_t CompileTimestamp() const
Definition profiler.cc:1468
CodeDescriptor(const AbstractCode code)
Definition profiler.cc:1458
uword Start() const
Definition profiler.cc:1460
bool Contains(uword pc) const
Definition profiler.h:519
static int Compare(CodeDescriptor *const *a, CodeDescriptor *const *b)
Definition profiler.h:524
uword Size() const
Definition profiler.cc:1464
CodeLookupTableBuilder(CodeLookupTable *table)
Definition profiler.cc:1478
void VisitObject(ObjectPtr raw_obj) override
Definition profiler.cc:1484
intptr_t length() const
Definition profiler.h:551
CodeLookupTable(Thread *thread)
Definition profiler.cc:1472
const CodeDescriptor * At(intptr_t index) const
Definition profiler.h:553
friend class CodeLookupTableBuilder
Definition profiler.h:567
const CodeDescriptor * FindCode(uword pc) const
Definition profiler.cc:1544
uword Size() const
Definition object.h:6876
intptr_t GetPrologueOffset() const
Definition object.cc:17984
static CodePtr FindCodeUnsafe(uword pc)
Definition object.cc:18270
bool ContainsInstructionAt(uword addr) const
Definition object.h:6888
bool IsUnknownDartCode() const
Definition object.h:7216
uword PayloadStart() const
Definition object.h:6823
static IsolateGroup * vm_isolate_group()
Definition dart.h:69
static Isolate * vm_isolate()
Definition dart.h:68
intptr_t Length() const
Definition object.h:11046
ObjectPtr At(intptr_t index) const
Definition object.h:11059
static void ForEach(std::function< void(IsolateGroup *)> action)
Definition isolate.cc:677
IsolateGroupSource * source() const
Definition isolate.h:989
bool TrySetHasCompletedBlocks()
Definition isolate.h:1081
Simulator * simulator() const
Definition isolate.h:1146
static bool IsSystemIsolate(const Isolate *isolate)
Definition isolate.h:1398
uword user_tag() const
Definition isolate.h:1280
VMTagCounters * vm_tag_counters()
Definition isolate.h:1264
static void VisitIsolates(IsolateVisitor *visitor)
Definition isolate.cc:3485
void set_current_sample_block(SampleBlock *block)
Definition isolate.h:1065
SampleBlock * current_sample_block() const
Definition isolate.h:1064
bool IsDeoptimizing() const
Definition isolate.h:1207
void set_current_allocation_sample_block(SampleBlock *block)
Definition isolate.h:1074
Thread * mutator_thread() const
Definition isolate.cc:1884
SampleBlock * current_allocation_sample_block() const
Definition isolate.h:1071
Dart_Port main_port() const
Definition isolate.h:1001
Monitor::WaitResult Wait(int64_t millis=Monitor::kNoTimeout)
Definition lockers.h:172
static void FreeSymbolName(char *name)
static bool LookupSharedObject(uword pc, uword *dso_base=nullptr, char **dso_name=nullptr)
static char * LookupSymbolName(uword pc, uword *start)
uword stack_base() const
Definition os_thread.h:128
static int Start(const char *name, ThreadStartFunction function, uword parameter)
static uword GetCurrentStackPointer()
Definition os_thread.cc:132
static void Join(ThreadJoinId id)
static OSThread * Current()
Definition os_thread.h:175
uword stack_limit() const
Definition os_thread.h:129
static ThreadJoinId GetCurrentThreadJoinId(OSThread *thread)
static intptr_t ThreadIdToIntPtr(ThreadId id)
static const ThreadJoinId kInvalidThreadJoinId
Definition os_thread.h:245
static int64_t GetCurrentMonotonicMicros()
static void static void PrintErr(const char *format,...) PRINTF_ATTRIBUTE(1
static uintptr_t GetProgramCounter()
static intptr_t ProcessId()
@ kInternalName
Definition object.h:622
bool IsNull() const
Definition object.h:363
static Object & Handle()
Definition object.h:407
static ObjectPtr RawCast(ObjectPtr obj)
Definition object.h:325
void set_tid(ThreadId tid)
Definition profiler.h:830
void set_allocation_cid(intptr_t cid)
Definition profiler.h:842
uword At(intptr_t index) const
Definition profiler.h:819
void set_truncated(bool truncated)
Definition profiler.h:857
void Add(uword pc)
Definition profiler.h:810
void set_first_frame_executing(bool first_frame_executing)
Definition profiler.h:861
void set_timestamp(int64_t timestamp)
Definition profiler.h:827
int64_t timestamp() const
Definition profiler.h:826
void set_vm_tag(uword tag)
Definition profiler.h:834
void InsertAt(intptr_t index, uword pc)
Definition profiler.h:813
void set_allocation_identity_hash(uint32_t hash)
Definition profiler.h:849
void set_user_tag(uword tag)
Definition profiler.h:838
ProfilerDartStackWalker(Thread *thread, Sample *sample, SampleBuffer *sample_buffer, uword pc, uword fp, uword sp, uword lr, bool allocation_sample, intptr_t skip_count=0)
Definition profiler.cc:997
ProfilerNativeStackWalker(ProfilerCounters *counters, Dart_Port port_id, Sample *sample, SampleBuffer *sample_buffer, uword stack_lower, uword stack_upper, uword pc, uword fp, uword sp, intptr_t skip_count=0)
Definition profiler.cc:219
bool Append(uword pc, uword fp)
Definition profiler.cc:145
ProfilerStackWalker(Dart_Port port_id, Sample *head_sample, SampleBuffer *sample_buffer, intptr_t skip_count=0)
Definition profiler.cc:126
SampleBuffer * sample_buffer_
Definition profiler.cc:182
static void DumpStackTrace(void *context)
Definition profiler.cc:409
static void IsolateShutdown(Thread *thread)
Definition profiler.cc:1848
static void SampleAllocation(Thread *thread, intptr_t cid, uint32_t identity_hash)
Definition profiler.cc:1265
static void SetSampleDepth(intptr_t depth)
Definition profiler.cc:619
static ProfilerCounters counters()
Definition profiler.h:91
static void UpdateRunningState()
Definition profiler.cc:611
static void Cleanup()
Definition profiler.cc:599
static void SampleThread(Thread *thread, const InterruptedThreadState &state)
Definition profiler.cc:1348
static void Init()
Definition profiler.cc:567
static void ProcessCompletedBlocks(Isolate *isolate)
Definition profiler.cc:1831
static SampleBlockBuffer * sample_block_buffer()
Definition profiler.h:67
static void UpdateSamplePeriod()
Definition profiler.cc:660
static void SetSamplePeriod(intptr_t period)
Definition profiler.cc:650
T fetch_add(T arg, std::memory_order order=std::memory_order_relaxed)
Definition atomic.h:35
ReturnAddressLocator(Sample *sample, const Code &code)
Definition profiler.cc:863
uword StackAt(intptr_t i)
Definition profiler.cc:898
ReturnAddressLocator(uword pc, uword *stack_buffer, const Code &code)
Definition profiler.cc:871
uint8_t * CodePointer(intptr_t offset)
Definition profiler.cc:890
bool LocateReturnAddress(uword *return_address)
virtual ~SampleBlockBuffer()
Definition profiler.cc:684
SampleBlockBuffer(intptr_t blocks=kDefaultBlockCount, intptr_t samples_per_block=SampleBlock::kSamplesPerBlock)
Definition profiler.cc:664
ProcessedSampleBuffer * BuildProcessedSampleBuffer(Isolate *isolate, SampleFilter *filter, ProcessedSampleBuffer *buffer=nullptr)
Definition profiler.cc:755
static constexpr intptr_t kDefaultBlockCount
Definition profiler.h:737
Sample * ReserveAllocationSample(Isolate *isolate)
Definition profiler.cc:815
Sample * ReserveCPUSample(Isolate *isolate)
Definition profiler.cc:811
void VisitIsolate(Isolate *isolate)
Definition profiler.cc:593
virtual ~SampleBlockCleanupVisitor()=default
void StreamingToFree()
Definition profiler.h:698
bool HasStreamableSamples(const GrowableObjectArray &tag_table, UserTag *tag)
Definition profiler.cc:724
RelaxedAtomic< uint32_t > cursor_
Definition profiler.h:725
void FreeCompleted()
Definition profiler.h:704
void MarkCompleted()
Definition profiler.h:679
static constexpr intptr_t kSamplesPerBlock
Definition profiler.h:644
virtual Sample * ReserveSampleAndLink(Sample *previous)
Definition profiler.cc:793
void StreamingToCompleted()
Definition profiler.h:694
Isolate * owner_
Definition profiler.h:726
virtual Sample * ReserveSample()
Definition profiler.cc:785
bool TryAcquireStreaming(Isolate *isolate)
Definition profiler.h:683
bool TryAllocateFree()
Definition profiler.h:658
ProcessedSample * BuildProcessedSample(Sample *sample, const CodeLookupTable &clt)
Definition profiler.cc:1629
Sample * Next(Sample *sample)
Definition profiler.cc:1673
ProcessedSampleBuffer * BuildProcessedSampleBuffer(SampleFilter *filter, ProcessedSampleBuffer *buffer=nullptr)
Definition profiler.cc:1574
intptr_t capacity_
Definition profiler.h:636
intptr_t capacity() const
Definition profiler.h:623
virtual void Init(Sample *samples, intptr_t capacity)
Definition profiler.h:577
virtual Sample * ReserveSampleAndLink(Sample *previous)=0
Sample * At(intptr_t idx) const
Definition profiler.h:617
static constexpr intptr_t kNoTaskFilter
Definition profiler.h:177
virtual bool FilterSample(Sample *sample)
Definition profiler.h:165
bool TimeFilterSample(Sample *sample)
Definition profiler.cc:969
Dart_Port port() const
Definition profiler.h:167
bool take_samples() const
Definition profiler.h:175
bool TaskFilterSample(Sample *sample)
Definition profiler.cc:979
void set_thread_task(Thread::TaskKind task)
Definition profiler.h:324
uword * GetStackBuffer()
Definition profiler.h:366
Thread::TaskKind thread_task() const
Definition profiler.h:322
static constexpr int kPCArraySizeInWords
Definition profiler.h:362
ThreadId tid() const
Definition profiler.h:210
Sample * continuation_sample() const
Definition profiler.h:339
uword At(intptr_t i) const
Definition profiler.h:237
bool is_continuation_sample() const
Definition profiler.h:328
void set_allocation_identity_hash(uint32_t hash)
Definition profiler.h:318
void set_truncated_trace(bool truncated_trace)
Definition profiler.h:302
void Init(Dart_Port port, int64_t timestamp, ThreadId tid)
Definition profiler.h:199
void set_vm_tag(uword tag)
Definition profiler.h:266
bool is_allocation_sample() const
Definition profiler.h:306
intptr_t allocation_cid() const
Definition profiler.h:341
void SetContinuation(Sample *next)
Definition profiler.h:332
void SetAllocationCid(intptr_t cid)
Definition profiler.h:357
int64_t timestamp() const
Definition profiler.h:231
uword vm_tag() const
Definition profiler.h:265
uword user_tag() const
Definition profiler.h:271
static constexpr int kStackBufferSizeInWords
Definition profiler.h:365
Dart_Port port() const
Definition profiler.h:207
void set_user_tag(uword tag)
Definition profiler.h:272
bool ignore_sample() const
Definition profiler.h:280
bool exit_frame_sample() const
Definition profiler.h:286
void set_ignore_sample(bool ignore_sample)
Definition profiler.h:282
void set_exit_frame_sample(bool exit_frame_sample)
Definition profiler.h:288
void SetAt(intptr_t i, uword pc)
Definition profiler.h:244
uint32_t allocation_identity_hash() const
Definition profiler.h:314
bool truncated_trace() const
Definition profiler.h:300
void Clear()
Definition profiler.h:212
bool head_sample() const
Definition profiler.h:350
static void HandleEvent(ServiceEvent *event, bool enter_safepoint=true)
Definition service.cc:1206
static StreamInfo profiler_stream
Definition service.h:188
static uintptr_t GetCStackPointer(const mcontext_t &mcontext)
static uintptr_t GetFramePointer(const mcontext_t &mcontext)
static uintptr_t GetProgramCounter(const mcontext_t &mcontext)
DART_FORCE_INLINE int32_t get_pc() const
int32_t get_lr() const
uword stack_base() const
uword stack_limit() const
DART_FORCE_INLINE int32_t get_register(Register reg) const
static uword FunctionForRedirect(uword redirect)
static void DumpCurrentTrace()
StreamableSampleFilter(Dart_Port port, const Isolate *isolate)
Definition profiler.cc:1818
bool FilterSample(Sample *sample) override
Definition profiler.cc:1821
static bool HasBeenInitialized()
Definition stub_code.h:41
static bool InInvocationStub(uword pc)
Definition stub_code.cc:132
static bool InJumpToFrameStub(uword pc)
Definition stub_code.cc:139
static void SetInterruptPeriod(intptr_t period)
Zone * zone() const
OSThread * os_thread() const
bool HasCompilerState() const
Definition thread.h:581
void ScheduleInterrupts(uword interrupt_bits)
Definition thread.cc:705
uword vm_tag() const
Definition thread.h:808
@ kSampleBlockTask
Definition thread.h:353
static Thread * Current()
Definition thread.h:361
CompilerState & compiler_state()
Definition thread.h:583
bool HasExitedDartCode() const
Definition thread.cc:884
void CheckForSafepoint()
Definition thread.h:1091
bool IsExecutingDartCode() const
Definition thread.cc:880
static void ExitIsolateGroupAsHelper(bool bypass_safepoint)
Definition thread.cc:494
uword top_exit_frame_info() const
Definition thread.h:678
bool IsDartMutatorThread() const
Definition thread.h:546
ExecutionState execution_state() const
Definition thread.h:1027
Isolate * isolate() const
Definition thread.h:533
TaskKind task_kind() const
Definition thread.h:478
@ kThreadInNative
Definition thread.h:1023
static bool EnterIsolateGroupAsHelper(IsolateGroup *isolate_group, TaskKind kind, bool bypass_safepoint)
Definition thread.cc:476
static UserTagPtr FindTagById(const Isolate *isolate, uword tag_id)
Definition object.cc:27132
uword tag() const
Definition object.h:13128
bool streamable() const
Definition object.h:13135
static constexpr T RoundUp(T x, uintptr_t alignment, uintptr_t offset=0)
Definition utils.h:105
static const char * String()
Definition version_in.cc:11
static intptr_t PageSize()
static VirtualMemory * Allocate(intptr_t size, bool is_executable, bool is_compressed, const char *name)
void * address() const
#define ILLEGAL_PORT
Definition dart_api.h:1530
int64_t Dart_Port
Definition dart_api.h:1524
#define ASSERT(E)
SkBitmap source
Definition examples.cpp:28
static bool b
struct MyStruct a[10]
AtkStateType state
FlKeyEvent * event
static const uint8_t buffer[]
GAsyncResult * result
#define DEFINE_FLAG(type, name, default_value, comment)
Definition flags.h:16
size_t length
#define MSAN_UNPOISON(ptr, len)
static constexpr int kExitLinkSlotFromEntryFp
static Sample * SetupSample(Thread *thread, bool allocation_sample, ThreadId tid)
Definition profiler.cc:1229
static void DumpCompilerState(Thread *thread)
Definition profiler.cc:460
static bool CheckIsolate(Isolate *isolate)
Definition profiler.cc:1257
const char *const name
static constexpr int kSavedCallerPcSlotFromFp
pthread_t ThreadJoinId
static intptr_t SamplesPerSecond()
Definition profiler.cc:631
static void CollectSample(Isolate *isolate, bool exited_dart_code, bool in_dart_code, Sample *sample, ProfilerNativeStackWalker *native_stack_walker, ProfilerDartStackWalker *dart_stack_walker, uword pc, uword fp, uword sp, ProfilerCounters *counters)
Definition profiler.cc:1169
static void CopyStackBuffer(Sample *sample, uword sp_addr)
Definition profiler.cc:1137
static constexpr int kSavedCallerFpSlotFromFp
uintptr_t uword
Definition globals.h:501
static void FlushSampleBlocks(Isolate *isolate)
Definition profiler.cc:739
static bool ValidateThreadStackBounds(uintptr_t fp, uintptr_t sp, uword stack_lower, uword stack_upper)
Definition profiler.cc:344
const uint32_t fp
static constexpr intptr_t kMaxSamplesPerTick
Definition profiler.cc:35
static void USE(T &&)
Definition globals.h:618
const Register FPREG
const intptr_t cid
static void DumpStackFrame(uword pc, uword fp, const char *name, uword offset)
Definition profiler.cc:72
static bool GetAndValidateThreadStackBounds(OSThread *os_thread, Thread *thread, uintptr_t fp, uintptr_t sp, uword *stack_lower, uword *stack_upper)
Definition profiler.cc:368
constexpr intptr_t kWordSize
Definition globals.h:509
pthread_t ThreadId
const Register SPREG
#define Pp
Definition globals.h:425
#define Px
Definition globals.h:410
#define Pd
Definition globals.h:408
Point offset
#define TIMELINE_DURATION(thread, stream, name)
Definition timeline.h:39
#define COPY_FP_REGISTER(fp)
Definition globals.h:200
int BOOL
long LONG
unsigned long DWORD