Flutter Engine
The Flutter Engine
Loading...
Searching...
No Matches
Classes | Namespaces | Macros
profiler.h File Reference
#include "platform/atomic.h"
#include "vm/allocation.h"
#include "vm/bitfield.h"
#include "vm/code_observers.h"
#include "vm/globals.h"
#include "vm/growable_array.h"
#include "vm/native_symbol.h"
#include "vm/object.h"
#include "vm/tags.h"
#include "vm/thread_interrupter.h"

Go to the source code of this file.

Classes

struct  dart::ProfilerCounters
 
class  dart::Profiler
 
class  dart::SampleVisitor
 
class  dart::SampleFilter
 
class  dart::ClearProfileVisitor
 
class  dart::Sample
 
class  dart::AbstractCode
 
class  dart::CodeDescriptor
 
class  dart::CodeLookupTable
 
class  dart::SampleBuffer
 
class  dart::SampleBlock
 
class  dart::SampleBlockBuffer
 
class  dart::ProcessedSample
 
class  dart::ProcessedSampleBuffer
 
class  dart::SampleBlockProcessor
 

Namespaces

namespace  dart
 

Macros

#define PROFILER_COUNTERS(V)
 
#define DECLARE_PROFILER_COUNTER(name)   RelaxedAtomic<int64_t> name;
 

Macro Definition Documentation

◆ DECLARE_PROFILER_COUNTER

#define DECLARE_PROFILER_COUNTER (   name)    RelaxedAtomic<int64_t> name;

Definition at line 48 of file profiler.h.

◆ PROFILER_COUNTERS

#define PROFILER_COUNTERS (   V)
Value:
V(bail_out_unknown_task) \
V(bail_out_jump_to_exception_handler) \
V(bail_out_check_isolate) \
V(single_frame_sample_deoptimizing) \
V(single_frame_sample_get_and_validate_stack_bounds) \
V(stack_walker_native) \
V(stack_walker_dart_exit) \
V(stack_walker_dart) \
V(stack_walker_none) \
V(incomplete_sample_fp_bounds) \
V(incomplete_sample_fp_step) \
V(incomplete_sample_bad_pc) \
V(sample_allocation_failure)
#define V(name)
Definition raw_object.h:124

Definition at line 32 of file profiler.h.

46 {
47#define DECLARE_PROFILER_COUNTER(name) RelaxedAtomic<int64_t> name;
49#undef DECLARE_PROFILER_COUNTER
50};
51
52class Profiler : public AllStatic {
53 public:
54 static void Init();
55 static void Cleanup();
56
57 static void SetSampleDepth(intptr_t depth);
58 static void SetSamplePeriod(intptr_t period);
59 // Restarts sampling with a given profile period. This is called after the
60 // profile period is changed via the service protocol.
61 static void UpdateSamplePeriod();
62 // Starts or shuts down the profiler after --profiler is changed via the
63 // service protocol.
64 static void UpdateRunningState();
65
66 static SampleBlockBuffer* sample_block_buffer() {
67 return sample_block_buffer_;
68 }
69 static void set_sample_block_buffer(SampleBlockBuffer* buffer) {
70 sample_block_buffer_ = buffer;
71 }
72
73 static void DumpStackTrace(void* context);
74 static void DumpStackTrace(bool for_crash = true);
75
76 static void SampleAllocation(Thread* thread,
77 intptr_t cid,
78 uint32_t identity_hash);
79
80 // SampleThread is called from inside the signal handler and hence it is very
81 // critical that the implementation of SampleThread does not do any of the
82 // following:
83 // * Accessing TLS -- Because on Fuchsia, Mac and Windows the callback will
84 // be running in a different thread.
85 // * Allocating memory -- Because this takes locks which may already be
86 // held, resulting in a dead lock.
87 // * Taking a lock -- See above.
88 static void SampleThread(Thread* thread, const InterruptedThreadState& state);
89
90 static ProfilerCounters counters() {
91 // Copies the counter values.
92 return counters_;
93 }
94 inline static intptr_t Size();
95
96 static void ProcessCompletedBlocks(Isolate* isolate);
97 static void IsolateShutdown(Thread* thread);
98
99 private:
100 static void DumpStackTrace(uword sp, uword fp, uword pc, bool for_crash);
101
102 // Calculates the sample buffer capacity. Returns
103 // SampleBuffer::kDefaultBufferCapacity if --sample-buffer-duration is not
104 // provided. Otherwise, the capacity is based on the sample rate, maximum
105 // sample stack depth, and the number of seconds of samples the sample buffer
106 // should be able to accomodate.
107 static intptr_t CalculateSampleBufferCapacity();
108
109 // Does not walk the thread's stack.
110 static void SampleThreadSingleFrame(Thread* thread,
111 Sample* sample,
112 uintptr_t pc);
113 static RelaxedAtomic<bool> initialized_;
114
115 static SampleBlockBuffer* sample_block_buffer_;
116
117 static ProfilerCounters counters_;
118
119 friend class Thread;
120};
121
122class SampleVisitor : public ValueObject {
123 public:
124 explicit SampleVisitor(Dart_Port port) : port_(port), visited_(0) {}
125 virtual ~SampleVisitor() {}
126
127 virtual void VisitSample(Sample* sample) = 0;
128
129 virtual void Reset() { visited_ = 0; }
130
131 intptr_t visited() const { return visited_; }
132
133 void IncrementVisited() { visited_++; }
134
135 Dart_Port port() const { return port_; }
136
137 private:
138 Dart_Port port_;
139 intptr_t visited_;
140
141 DISALLOW_IMPLICIT_CONSTRUCTORS(SampleVisitor);
142};
143
144class SampleFilter : public ValueObject {
145 public:
146 SampleFilter(Dart_Port port,
147 intptr_t thread_task_mask,
148 int64_t time_origin_micros,
149 int64_t time_extent_micros,
150 bool take_samples = false)
151 : port_(port),
152 thread_task_mask_(thread_task_mask),
153 time_origin_micros_(time_origin_micros),
154 time_extent_micros_(time_extent_micros),
155 take_samples_(take_samples) {
156 ASSERT(thread_task_mask != 0);
157 ASSERT(time_origin_micros_ >= -1);
158 ASSERT(time_extent_micros_ >= -1);
159 }
160 virtual ~SampleFilter() {}
161
162 // Override this function.
163 // Return |true| if |sample| passes the filter.
164 virtual bool FilterSample(Sample* sample) { return true; }
165
166 Dart_Port port() const { return port_; }
167
168 // Returns |true| if |sample| passes the time filter.
169 bool TimeFilterSample(Sample* sample);
170
171 // Returns |true| if |sample| passes the thread task filter.
172 bool TaskFilterSample(Sample* sample);
173
174 bool take_samples() const { return take_samples_; }
175
176 static constexpr intptr_t kNoTaskFilter = -1;
177
178 private:
179 Dart_Port port_;
180 intptr_t thread_task_mask_;
181 int64_t time_origin_micros_;
182 int64_t time_extent_micros_;
183 bool take_samples_;
184};
185
186class ClearProfileVisitor : public SampleVisitor {
187 public:
188 explicit ClearProfileVisitor(Isolate* isolate);
189
190 virtual void VisitSample(Sample* sample);
191};
192
193// Each Sample holds a stack trace from an isolate.
194class Sample {
195 public:
196 Sample() = default;
197
198 void Init(Dart_Port port, int64_t timestamp, ThreadId tid) {
199 Clear();
200 timestamp_ = timestamp;
201 tid_ = tid;
202 port_ = port;
203 next_ = nullptr;
204 }
205
206 Dart_Port port() const { return port_; }
207
208 // Thread sample was taken on.
209 ThreadId tid() const { return tid_; }
210
211 void Clear() {
212 timestamp_ = 0;
213 port_ = ILLEGAL_PORT;
214 tid_ = OSThread::kInvalidThreadId;
215 for (intptr_t i = 0; i < kStackBufferSizeInWords; i++) {
216 stack_buffer_[i] = 0;
217 }
218 for (intptr_t i = 0; i < kPCArraySizeInWords; i++) {
219 pc_array_[i] = 0;
220 }
221 vm_tag_ = VMTag::kInvalidTagId;
222 user_tag_ = UserTags::kDefaultUserTag;
223 state_ = 0;
224 next_ = nullptr;
225 allocation_identity_hash_ = 0;
226 set_head_sample(true);
227 }
228
229 // Timestamp sample was taken at.
230 int64_t timestamp() const { return timestamp_; }
231
232 // Top most pc.
233 uword pc() const { return At(0); }
234
235 // Get stack trace entry.
236 uword At(intptr_t i) const {
237 ASSERT(i >= 0);
238 ASSERT(i < kPCArraySizeInWords);
239 return pc_array_[i];
240 }
241
242 // Set stack trace entry.
243 void SetAt(intptr_t i, uword pc) {
244 ASSERT(i >= 0);
245 ASSERT(i < kPCArraySizeInWords);
246 pc_array_[i] = pc;
247 }
248
249 void DumpStackTrace() {
250 for (intptr_t i = 0; i < kPCArraySizeInWords; ++i) {
251 uintptr_t start = 0;
252 uword pc = At(i);
253 char* native_symbol_name =
254 NativeSymbolResolver::LookupSymbolName(pc, &start);
255 if (native_symbol_name == nullptr) {
256 OS::PrintErr(" [0x%" Pp "] Unknown symbol\n", pc);
257 } else {
258 OS::PrintErr(" [0x%" Pp "] %s\n", pc, native_symbol_name);
259 NativeSymbolResolver::FreeSymbolName(native_symbol_name);
260 }
261 }
262 }
263
264 uword vm_tag() const { return vm_tag_; }
265 void set_vm_tag(uword tag) {
266 ASSERT(tag != VMTag::kInvalidTagId);
267 vm_tag_ = tag;
268 }
269
270 uword user_tag() const { return user_tag_; }
271 void set_user_tag(uword tag) { user_tag_ = tag; }
272
273 bool leaf_frame_is_dart() const { return LeafFrameIsDart::decode(state_); }
274
275 void set_leaf_frame_is_dart(bool leaf_frame_is_dart) {
276 state_ = LeafFrameIsDart::update(leaf_frame_is_dart, state_);
277 }
278
279 bool ignore_sample() const { return IgnoreBit::decode(state_); }
280
281 void set_ignore_sample(bool ignore_sample) {
282 state_ = IgnoreBit::update(ignore_sample, state_);
283 }
284
285 bool exit_frame_sample() const { return ExitFrameBit::decode(state_); }
286
287 void set_exit_frame_sample(bool exit_frame_sample) {
288 state_ = ExitFrameBit::update(exit_frame_sample, state_);
289 }
290
291 bool missing_frame_inserted() const {
292 return MissingFrameInsertedBit::decode(state_);
293 }
294
295 void set_missing_frame_inserted(bool missing_frame_inserted) {
296 state_ = MissingFrameInsertedBit::update(missing_frame_inserted, state_);
297 }
298
299 bool truncated_trace() const { return TruncatedTraceBit::decode(state_); }
300
301 void set_truncated_trace(bool truncated_trace) {
302 state_ = TruncatedTraceBit::update(truncated_trace, state_);
303 }
304
305 bool is_allocation_sample() const {
306 return ClassAllocationSampleBit::decode(state_);
307 }
308
309 void set_is_allocation_sample(bool allocation_sample) {
310 state_ = ClassAllocationSampleBit::update(allocation_sample, state_);
311 }
312
313 uint32_t allocation_identity_hash() const {
314 return allocation_identity_hash_;
315 }
316
317 void set_allocation_identity_hash(uint32_t hash) {
318 allocation_identity_hash_ = hash;
319 }
320
321 Thread::TaskKind thread_task() const { return ThreadTaskBit::decode(state_); }
322
323 void set_thread_task(Thread::TaskKind task) {
324 state_ = ThreadTaskBit::update(task, state_);
325 }
326
327 bool is_continuation_sample() const {
328 return ContinuationSampleBit::decode(state_);
329 }
330
331 void SetContinuation(Sample* next) {
332 ASSERT(!is_continuation_sample());
333 ASSERT(next_ == nullptr);
334 state_ = ContinuationSampleBit::update(true, state_);
335 next_ = next;
336 }
337
338 Sample* continuation_sample() const { return next_; }
339
340 intptr_t allocation_cid() const {
341 ASSERT(is_allocation_sample());
342 return metadata();
343 }
344
345 void set_head_sample(bool head_sample) {
346 state_ = HeadSampleBit::update(head_sample, state_);
347 }
348
349 bool head_sample() const { return HeadSampleBit::decode(state_); }
350
351 intptr_t metadata() const { return MetadataBits::decode(state_); }
352 void set_metadata(intptr_t metadata) {
353 state_ = MetadataBits::update(metadata, state_);
354 }
355
356 void SetAllocationCid(intptr_t cid) {
357 set_is_allocation_sample(true);
358 set_metadata(cid);
359 }
360
361 static constexpr int kPCArraySizeInWords = 32;
362 uword* GetPCArray() { return &pc_array_[0]; }
363
364 static constexpr int kStackBufferSizeInWords = 2;
365 uword* GetStackBuffer() { return &stack_buffer_[0]; }
366
367 private:
368 enum StateBits {
369 kHeadSampleBit = 0,
370 kLeafFrameIsDartBit = 1,
371 kIgnoreBit = 2,
372 kExitFrameBit = 3,
373 kMissingFrameInsertedBit = 4,
374 kTruncatedTraceBit = 5,
375 kClassAllocationSampleBit = 6,
376 kContinuationSampleBit = 7,
377 kThreadTaskBit = 8, // 7 bits.
378 kMetadataBit = 15, // 16 bits.
379 kNextFreeBit = 31,
380 };
381 class HeadSampleBit : public BitField<uint32_t, bool, kHeadSampleBit, 1> {};
382 class LeafFrameIsDart
383 : public BitField<uint32_t, bool, kLeafFrameIsDartBit, 1> {};
384 class IgnoreBit : public BitField<uint32_t, bool, kIgnoreBit, 1> {};
385 class ExitFrameBit : public BitField<uint32_t, bool, kExitFrameBit, 1> {};
386 class MissingFrameInsertedBit
387 : public BitField<uint32_t, bool, kMissingFrameInsertedBit, 1> {};
388 class TruncatedTraceBit
389 : public BitField<uint32_t, bool, kTruncatedTraceBit, 1> {};
390 class ClassAllocationSampleBit
391 : public BitField<uint32_t, bool, kClassAllocationSampleBit, 1> {};
392 class ContinuationSampleBit
393 : public BitField<uint32_t, bool, kContinuationSampleBit, 1> {};
394 class ThreadTaskBit
395 : public BitField<uint32_t, Thread::TaskKind, kThreadTaskBit, 7> {};
396 class MetadataBits : public BitField<uint32_t, intptr_t, kMetadataBit, 16> {};
397
398 int64_t timestamp_;
399 Dart_Port port_;
400 ThreadId tid_;
401 uword stack_buffer_[kStackBufferSizeInWords];
402 uword pc_array_[kPCArraySizeInWords];
403 uword vm_tag_;
404 uword user_tag_;
405 uint32_t state_;
406 Sample* next_;
407 uint32_t allocation_identity_hash_;
408
410};
411
412class AbstractCode {
413 public:
414 explicit AbstractCode(ObjectPtr code) : code_(Object::Handle(code)) {
415 ASSERT(code_.IsNull() || code_.IsCode());
416 }
417
418 ObjectPtr ptr() const { return code_.ptr(); }
419 const Object* handle() const { return &code_; }
420
421 uword PayloadStart() const {
422 ASSERT(code_.IsCode());
423 return Code::Cast(code_).PayloadStart();
424 }
425
426 uword Size() const {
427 ASSERT(code_.IsCode());
428 return Code::Cast(code_).Size();
429 }
430
431 int64_t compile_timestamp() const {
432 if (code_.IsCode()) {
433 return Code::Cast(code_).compile_timestamp();
434 } else {
435 return 0;
436 }
437 }
438
439 const char* Name() const {
440 if (code_.IsCode()) {
441 return Code::Cast(code_).Name();
442 } else {
443 return "";
444 }
445 }
446
447 const char* QualifiedName() const {
448 if (code_.IsCode()) {
449 return Code::Cast(code_).QualifiedName(
450 NameFormattingParams(Object::kUserVisibleName));
451 } else {
452 return "";
453 }
454 }
455
456 bool IsStubCode() const {
457 if (code_.IsCode()) {
458 return Code::Cast(code_).IsStubCode();
459 } else {
460 return false;
461 }
462 }
463
464 bool IsAllocationStubCode() const {
465 if (code_.IsCode()) {
466 return Code::Cast(code_).IsAllocationStubCode();
467 } else {
468 return false;
469 }
470 }
471
472 bool IsTypeTestStubCode() const {
473 if (code_.IsCode()) {
474 return Code::Cast(code_).IsTypeTestStubCode();
475 } else {
476 return false;
477 }
478 }
479
480 ObjectPtr owner() const {
481 if (code_.IsCode()) {
482 return Code::Cast(code_).owner();
483 } else {
484 return Object::null();
485 }
486 }
487
488 bool IsNull() const { return code_.IsNull(); }
489 bool IsCode() const { return code_.IsCode(); }
490
491 bool is_optimized() const {
492 if (code_.IsCode()) {
493 return Code::Cast(code_).is_optimized();
494 } else {
495 return false;
496 }
497 }
498
499 private:
500 const Object& code_;
501};
502
503// A Code object descriptor.
504class CodeDescriptor : public ZoneAllocated {
505 public:
506 explicit CodeDescriptor(const AbstractCode code);
507
508 uword Start() const;
509
510 uword Size() const;
511
512 int64_t CompileTimestamp() const;
513
514 const AbstractCode code() const { return code_; }
515
516 const char* Name() const { return code_.Name(); }
517
518 bool Contains(uword pc) const {
519 uword end = Start() + Size();
520 return (pc >= Start()) && (pc < end);
521 }
522
523 static int Compare(CodeDescriptor* const* a, CodeDescriptor* const* b) {
524 ASSERT(a != nullptr);
525 ASSERT(b != nullptr);
526
527 uword a_start = (*a)->Start();
528 uword b_start = (*b)->Start();
529
530 if (a_start < b_start) {
531 return -1;
532 } else if (a_start > b_start) {
533 return 1;
534 } else {
535 return 0;
536 }
537 }
538
539 private:
540 const AbstractCode code_;
541
542 DISALLOW_COPY_AND_ASSIGN(CodeDescriptor);
543};
544
545// Fast lookup of Dart code objects.
546class CodeLookupTable : public ZoneAllocated {
547 public:
548 explicit CodeLookupTable(Thread* thread);
549
550 intptr_t length() const { return code_objects_.length(); }
551
552 const CodeDescriptor* At(intptr_t index) const {
553 return code_objects_.At(index);
554 }
555
556 const CodeDescriptor* FindCode(uword pc) const;
557
558 private:
559 void Build(Thread* thread);
560
561 void Add(const Object& code);
562
563 // Code objects sorted by entry.
564 ZoneGrowableArray<CodeDescriptor*> code_objects_;
565
566 friend class CodeLookupTableBuilder;
567
568 DISALLOW_COPY_AND_ASSIGN(CodeLookupTable);
569};
570
571class SampleBuffer {
572 public:
573 SampleBuffer() = default;
574 virtual ~SampleBuffer() = default;
575
576 virtual void Init(Sample* samples, intptr_t capacity) {
577 ASSERT(samples != nullptr);
578 ASSERT(capacity > 0);
579 samples_ = samples;
580 capacity_ = capacity;
581 }
582
583 void VisitSamples(SampleVisitor* visitor) {
584 ASSERT(visitor != nullptr);
585 const intptr_t length = capacity();
586 for (intptr_t i = 0; i < length; i++) {
587 Sample* sample = At(i);
588 if (!sample->head_sample()) {
589 // An inner sample in a chain of samples.
590 continue;
591 }
592 if (sample->ignore_sample()) {
593 // Bad sample.
594 continue;
595 }
596 if (sample->port() != visitor->port()) {
597 // Another isolate.
598 continue;
599 }
600 if (sample->timestamp() == 0) {
601 // Empty.
602 continue;
603 }
604 if (sample->At(0) == 0) {
605 // No frames.
606 continue;
607 }
608 visitor->IncrementVisited();
609 visitor->VisitSample(sample);
610 }
611 }
612
613 virtual Sample* ReserveSample() = 0;
614 virtual Sample* ReserveSampleAndLink(Sample* previous) = 0;
615
616 Sample* At(intptr_t idx) const {
617 ASSERT(idx >= 0);
618 ASSERT(idx < capacity_);
619 return &samples_[idx];
620 }
621
622 intptr_t capacity() const { return capacity_; }
623
624 ProcessedSampleBuffer* BuildProcessedSampleBuffer(
625 SampleFilter* filter,
626 ProcessedSampleBuffer* buffer = nullptr);
627
628 protected:
629 Sample* Next(Sample* sample);
630
631 ProcessedSample* BuildProcessedSample(Sample* sample,
632 const CodeLookupTable& clt);
633
634 Sample* samples_;
635 intptr_t capacity_;
636
637 DISALLOW_COPY_AND_ASSIGN(SampleBuffer);
638};
639
640class SampleBlock : public SampleBuffer {
641 public:
642 // The default number of samples per block. Overridden by some tests.
643 static constexpr intptr_t kSamplesPerBlock = 100;
644
645 SampleBlock() = default;
646 virtual ~SampleBlock() = default;
647
648 // Returns the number of samples contained within this block.
649 intptr_t capacity() const { return capacity_; }
650
651 Isolate* owner() const { return owner_; }
652 void set_owner(Isolate* isolate) { owner_ = isolate; }
653
654 virtual Sample* ReserveSample();
655 virtual Sample* ReserveSampleAndLink(Sample* previous);
656
657 bool TryAllocateFree() {
658 State expected = kFree;
659 State desired = kSampling;
660 std::memory_order success_order = std::memory_order_acquire;
661 std::memory_order failure_order = std::memory_order_relaxed;
662 return state_.compare_exchange_strong(expected, desired, success_order,
663 failure_order);
664 }
665 bool TryAllocateCompleted() {
666 State expected = kCompleted;
667 State desired = kSampling;
668 std::memory_order success_order = std::memory_order_acquire;
669 std::memory_order failure_order = std::memory_order_relaxed;
670 if (state_.compare_exchange_strong(expected, desired, success_order,
671 failure_order)) {
672 owner_ = nullptr;
673 cursor_ = 0;
674 return true;
675 }
676 return false;
677 }
678 void MarkCompleted() {
679 ASSERT(state_.load(std::memory_order_relaxed) == kSampling);
680 state_.store(kCompleted, std::memory_order_release);
681 }
682 bool TryAcquireStreaming(Isolate* isolate) {
683 if (state_.load(std::memory_order_relaxed) != kCompleted) return false;
684 if (owner_ != isolate) return false;
685
686 State expected = kCompleted;
687 State desired = kStreaming;
688 std::memory_order success_order = std::memory_order_acquire;
689 std::memory_order failure_order = std::memory_order_relaxed;
690 return state_.compare_exchange_strong(expected, desired, success_order,
691 failure_order);
692 }
693 void StreamingToCompleted() {
694 ASSERT(state_.load(std::memory_order_relaxed) == kStreaming);
695 state_.store(kCompleted, std::memory_order_relaxed);
696 }
697 void StreamingToFree() {
698 ASSERT(state_.load(std::memory_order_relaxed) == kStreaming);
699 owner_ = nullptr;
700 cursor_ = 0;
701 state_.store(kFree, std::memory_order_release);
702 }
703 void FreeCompleted() {
704 State expected = kCompleted;
705 State desired = kStreaming;
706 std::memory_order success_order = std::memory_order_acquire;
707 std::memory_order failure_order = std::memory_order_relaxed;
708 if (state_.compare_exchange_strong(expected, desired, success_order,
709 failure_order)) {
710 StreamingToFree();
711 }
712 }
713
714 protected:
715 bool HasStreamableSamples(const GrowableObjectArray& tag_table, UserTag* tag);
716
717 enum State : uint32_t {
718 kFree,
719 kSampling, // I.e., writing.
720 kCompleted,
721 kStreaming, // I.e., reading.
722 };
723 std::atomic<State> state_ = kFree;
724 RelaxedAtomic<uint32_t> cursor_ = 0;
725 Isolate* owner_ = nullptr;
726
727 private:
728 friend class SampleBlockListProcessor;
729 friend class SampleBlockBuffer;
730
731 DISALLOW_COPY_AND_ASSIGN(SampleBlock);
732};
733
734class SampleBlockBuffer {
735 public:
736 static constexpr intptr_t kDefaultBlockCount = 600;
737
738 // Creates a SampleBlockBuffer with a predetermined number of blocks.
739 //
740 // Defaults to kDefaultBlockCount blocks. Block size is fixed to
741 // SampleBlock::kSamplesPerBlock samples per block, except for in tests.
742 explicit SampleBlockBuffer(
743 intptr_t blocks = kDefaultBlockCount,
744 intptr_t samples_per_block = SampleBlock::kSamplesPerBlock);
745
746 virtual ~SampleBlockBuffer();
747
748 void VisitSamples(SampleVisitor* visitor) {
749 ASSERT(visitor != nullptr);
750 for (intptr_t i = 0; i < capacity_; ++i) {
751 blocks_[i].VisitSamples(visitor);
752 }
753 }
754
755 void FreeCompletedBlocks();
756
757 // Reserves a sample for a CPU profile.
758 //
759 // Returns nullptr when a sample can't be reserved.
760 Sample* ReserveCPUSample(Isolate* isolate);
761
762 // Reserves a sample for a Dart object allocation profile.
763 //
764 // Returns nullptr when a sample can't be reserved.
765 Sample* ReserveAllocationSample(Isolate* isolate);
766
767 intptr_t Size() const { return memory_->size(); }
768
769 ProcessedSampleBuffer* BuildProcessedSampleBuffer(
770 Isolate* isolate,
771 SampleFilter* filter,
772 ProcessedSampleBuffer* buffer = nullptr);
773
774 private:
775 Sample* ReserveSampleImpl(Isolate* isolate, bool allocation_sample);
776
777 // Returns nullptr if there are no available blocks.
778 SampleBlock* ReserveSampleBlock();
779
780 // Sample block management.
781 RelaxedAtomic<int> cursor_;
782 SampleBlock* blocks_;
783 intptr_t capacity_;
784
785 // Sample buffer management.
786 VirtualMemory* memory_;
787 Sample* sample_buffer_;
788
789 friend class Isolate;
790 DISALLOW_COPY_AND_ASSIGN(SampleBlockBuffer);
791};
792
793intptr_t Profiler::Size() {
794 intptr_t size = 0;
795 if (sample_block_buffer_ != nullptr) {
796 size += sample_block_buffer_->Size();
797 }
798 return size;
799}
800
801// A |ProcessedSample| is a combination of 1 (or more) |Sample|(s) that have
802// been merged into a logical sample. The raw data may have been processed to
803// improve the quality of the stack trace.
804class ProcessedSample : public ZoneAllocated {
805 public:
806 ProcessedSample();
807
808 // Add |pc| to stack trace.
809 void Add(uword pc) { pcs_.Add(pc); }
810
811 // Insert |pc| at |index|.
812 void InsertAt(intptr_t index, uword pc) { pcs_.InsertAt(index, pc); }
813
814 // Number of pcs in stack trace.
815 intptr_t length() const { return pcs_.length(); }
816
817 // Get |pc| at |index|.
818 uword At(intptr_t index) const {
819 ASSERT(index >= 0);
820 ASSERT(index < length());
821 return pcs_[index];
822 }
823
824 // Timestamp sample was taken at.
825 int64_t timestamp() const { return timestamp_; }
826 void set_timestamp(int64_t timestamp) { timestamp_ = timestamp; }
827
828 ThreadId tid() const { return tid_; }
829 void set_tid(ThreadId tid) { tid_ = tid; }
830
831 // The VM tag.
832 uword vm_tag() const { return vm_tag_; }
833 void set_vm_tag(uword tag) { vm_tag_ = tag; }
834
835 // The user tag.
836 uword user_tag() const { return user_tag_; }
837 void set_user_tag(uword tag) { user_tag_ = tag; }
838
839 // The class id if this is an allocation profile sample. -1 otherwise.
840 intptr_t allocation_cid() const { return allocation_cid_; }
841 void set_allocation_cid(intptr_t cid) { allocation_cid_ = cid; }
842
843 // The identity hash code of the allocated object if this is an allocation
844 // profile sample. -1 otherwise.
845 uint32_t allocation_identity_hash() const {
846 return allocation_identity_hash_;
847 }
848 void set_allocation_identity_hash(uint32_t hash) {
849 allocation_identity_hash_ = hash;
850 }
851
852 bool IsAllocationSample() const { return allocation_cid_ > 0; }
853
854 // Was the stack trace truncated?
855 bool truncated() const { return truncated_; }
856 void set_truncated(bool truncated) { truncated_ = truncated; }
857
858 // Was the first frame in the stack trace executing?
859 bool first_frame_executing() const { return first_frame_executing_; }
860 void set_first_frame_executing(bool first_frame_executing) {
861 first_frame_executing_ = first_frame_executing;
862 }
863
864 private:
865 void FixupCaller(const CodeLookupTable& clt,
866 uword pc_marker,
867 uword* stack_buffer);
868
869 void CheckForMissingDartFrame(const CodeLookupTable& clt,
870 const CodeDescriptor* code,
871 uword pc_marker,
872 uword* stack_buffer);
873
874 ZoneGrowableArray<uword> pcs_;
875 int64_t timestamp_;
876 ThreadId tid_;
877 uword vm_tag_;
878 uword user_tag_;
879 intptr_t allocation_cid_;
880 uint32_t allocation_identity_hash_;
881 bool truncated_;
882 bool first_frame_executing_;
883
884 friend class SampleBuffer;
885 DISALLOW_COPY_AND_ASSIGN(ProcessedSample);
886};
887
888// A collection of |ProcessedSample|s.
889class ProcessedSampleBuffer : public ZoneAllocated {
890 public:
891 ProcessedSampleBuffer();
892
893 void Add(ProcessedSample* sample) { samples_.Add(sample); }
894
895 intptr_t length() const { return samples_.length(); }
896
897 ProcessedSample* At(intptr_t index) { return samples_.At(index); }
898
899 const CodeLookupTable& code_lookup_table() const {
900 return *code_lookup_table_;
901 }
902
903 private:
904 ZoneGrowableArray<ProcessedSample*> samples_;
905 CodeLookupTable* code_lookup_table_;
906
907 DISALLOW_COPY_AND_ASSIGN(ProcessedSampleBuffer);
908};
909
910class SampleBlockProcessor : public AllStatic {
911 public:
912 static void Init();
913
914 static void Startup();
915 static void Cleanup();
916
917 private:
918 static constexpr intptr_t kMaxThreads = 4096;
919 static bool initialized_;
920 static bool shutdown_;
921 static bool thread_running_;
922 static ThreadJoinId processor_thread_id_;
923 static Monitor* monitor_;
924
925 static void ThreadMain(uword parameters);
926};
927
928} // namespace dart
929
930#endif // RUNTIME_VM_PROFILER_H_
static float next(float f)
static uint32_t hash(const SkShaderBase::GradientInfo &v)
#define ILLEGAL_PORT
Definition dart_api.h:1530
int64_t Dart_Port
Definition dart_api.h:1524
#define ASSERT(E)
static bool b
struct MyStruct a[10]
AtkStateType state
glong glong end
static const uint8_t buffer[]
void Init()
size_t length
ImplicitString Name
Definition DMSrcSink.h:38
bool Contains(const Container &container, const Value &value)
Build(configs, env, options)
Definition build.py:232
pthread_t ThreadId
Definition thread_absl.h:21
pthread_t ThreadJoinId
DART_EXPORT bool IsNull(Dart_Handle object)
uintptr_t uword
Definition globals.h:501
const intptr_t cid
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not defaults to or::depending on whether ipv6 is specified vm service port
Definition switches.h:87
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive keep the shell running after the Dart script has completed enable serial On low power devices with low core running concurrent GC tasks on threads can cause them to contend with the UI thread which could potentially lead to jank This option turns off all concurrent GC activities domain network JSON encoded network policy per domain This overrides the DisallowInsecureConnections switch Embedder can specify whether to allow or disallow insecure connections at a domain level old gen heap size
Definition switches.h:259
void Reset(SkPath *path)
Definition path_ops.cc:40
TSize< Scalar > Size
Definition size.h:137
#define Pp
Definition globals.h:425
#define DISALLOW_IMPLICIT_CONSTRUCTORS(TypeName)
Definition globals.h:593
#define DISALLOW_COPY_AND_ASSIGN(TypeName)
Definition globals.h:581
#define DECLARE_PROFILER_COUNTER(name)
Definition profiler.h:48
#define PROFILER_COUNTERS(V)
Definition profiler.h:32