Flutter Engine
The Flutter Engine
sampler.cc
Go to the documentation of this file.
1// Copyright (c) 2022, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
6
7#include <math.h>
8#include <algorithm>
9
10#include "vm/heap/safepoint.h"
11#include "vm/heap/sampler.h"
12#include "vm/isolate.h"
13#include "vm/lockers.h"
14#include "vm/os.h"
15#include "vm/random.h"
16#include "vm/thread.h"
17#include "vm/thread_registry.h"
18
19#define ASSERT_TLAB_BOUNDARIES_VALID(__thread) \
20 do { \
21 ASSERT(__thread->top() <= __thread->end()); \
22 ASSERT(__thread->end() <= __thread->true_end()); \
23 if (next_tlab_offset_ != kUninitialized) { \
24 ASSERT(__thread->end() == __thread->true_end()); \
25 ASSERT(next_tlab_offset_ > 0); \
26 } \
27 } while (0)
28
29#define ASSERT_THREAD_STATE(__thread) \
30 do { \
31 Thread* __cur = Thread::Current(); \
32 ASSERT(__cur == nullptr || __cur == __thread); \
33 } while (0)
34
35namespace dart {
36
37bool HeapProfileSampler::enabled_ = false;
38Dart_HeapSamplingCreateCallback HeapProfileSampler::create_callback_ = nullptr;
39Dart_HeapSamplingDeleteCallback HeapProfileSampler::delete_callback_ = nullptr;
40RwLock* HeapProfileSampler::lock_ = new RwLock();
41intptr_t HeapProfileSampler::sampling_interval_ =
42 HeapProfileSampler::kDefaultSamplingInterval;
43
45 : interval_to_next_sample_(kUninitialized), thread_(thread) {}
46
47void HeapProfileSampler::Enable(bool enabled) {
48 // Don't try and change enabled state if sampler instances are currently
49 // doing work.
50 WriteRwLocker locker(Thread::Current(), lock_);
51 enabled_ = enabled;
52
54 group->thread_registry()->ForEachThread([&](Thread* thread) {
56 });
57 });
58}
59
60void HeapProfileSampler::SetSamplingInterval(intptr_t bytes_interval) {
61 // Don't try and change sampling interval state if sampler instances are
62 // currently doing work.
63 WriteRwLocker locker(Thread::Current(), lock_);
64 ASSERT(bytes_interval >= 0);
65 sampling_interval_ = bytes_interval;
66
67 // The sampling interval will be set in each thread once sampling is enabled.
68 if (!enabled_) {
69 return;
70 }
71
72 // If sampling is enabled, notify each thread that it should update its
73 // sampling interval.
75 group->thread_registry()->ForEachThread([&](Thread* thread) {
77 });
78 });
79}
80
82 Dart_HeapSamplingCreateCallback create_callback,
83 Dart_HeapSamplingDeleteCallback delete_callback) {
84 // Protect against the callback being changed in the middle of a sample.
85 WriteRwLocker locker(Thread::Current(), lock_);
86 if ((create_callback_ != nullptr && create_callback == nullptr) ||
87 (delete_callback_ != nullptr && delete_callback == nullptr)) {
88 FATAL("Clearing sampling callbacks is prohibited.");
89 }
90 create_callback_ = create_callback;
91 delete_callback_ = delete_callback;
92}
93
94void HeapProfileSampler::ResetState() {
95 thread_->set_end(thread_->true_end());
96 next_tlab_offset_ = kUninitialized;
97 ResetIntervalState();
99}
100
102 // Don't grab lock_ here as it can cause a deadlock if thread initialization
103 // occurs when the profiler state is being changed. Instead, let the thread
104 // perform initialization when it's no longer holding the thread registry's
105 // thread_lock().
107}
108
110 schedule_thread_enable_ = true;
112}
113
115 ASSERT_THREAD_STATE(thread_);
116 ReadRwLocker locker(Thread::Current(), lock_);
117 UpdateThreadEnableLocked();
118}
119
120void HeapProfileSampler::UpdateThreadEnableLocked() {
121 thread_enabled_ = enabled_;
122 if (thread_enabled_) {
123 SetNextSamplingIntervalLocked(GetNextSamplingIntervalLocked());
124 } else {
125 // Reset the TLAB boundaries to the true end to avoid unnecessary slow
126 // path invocations when sampling is disabled.
127 ResetState();
128 }
129}
130
132 schedule_thread_set_sampling_interval_ = true;
134}
135
137 ASSERT_THREAD_STATE(thread_);
138 ReadRwLocker locker(thread_, lock_);
139 SetThreadSamplingIntervalLocked();
140}
141
142void HeapProfileSampler::SetThreadSamplingIntervalLocked() {
143 // Don't try and update the sampling interval if the sampler isn't enabled for
144 // this thread. Otherwise, we'll get into an inconsistent state.
145 if (!thread_enabled_) {
146 return;
147 }
148 // Force reset the next sampling point.
149 ResetState();
150 SetNextSamplingIntervalLocked(GetNextSamplingIntervalLocked());
151}
152
154 ReadRwLocker locker(thread, lock_);
155 if (!enabled_) {
156 return;
157 }
158 interval_to_next_sample_ = remaining_TLAB_interval();
159 next_tlab_offset_ = kUninitialized;
160}
161
162void HeapProfileSampler::HandleNewTLAB(intptr_t old_tlab_remaining_space,
163 bool is_first_tlab) {
164 ASSERT_THREAD_STATE(thread_);
165 ReadRwLocker locker(thread_, lock_);
166 if (!enabled_ || (next_tlab_offset_ == kUninitialized && !is_first_tlab)) {
167 return;
168 } else if (is_first_tlab) {
169 ASSERT(next_tlab_offset_ == kUninitialized);
170 if (interval_to_next_sample_ != kUninitialized) {
171 intptr_t top = thread_->top();
172 intptr_t tlab_size = thread_->true_end() - top;
173 if (tlab_size >= interval_to_next_sample_) {
174 thread_->set_end(top + interval_to_next_sample_);
176 } else {
177 next_tlab_offset_ = interval_to_next_sample_ - tlab_size;
179 }
180 } else {
181 SetThreadSamplingIntervalLocked();
182 }
183 return;
184 }
185 intptr_t updated_offset = next_tlab_offset_ + old_tlab_remaining_space;
186 if (updated_offset + thread_->top() > thread_->true_end()) {
187 // The next sampling point isn't in this TLAB.
188 next_tlab_offset_ = updated_offset - (thread_->true_end() - thread_->top());
189 thread_->set_end(thread_->true_end());
191 } else {
192 ASSERT(updated_offset <= static_cast<intptr_t>(thread_->true_end()) -
193 static_cast<intptr_t>(thread_->top()));
194 thread_->set_end(updated_offset + thread_->top());
195 next_tlab_offset_ = kUninitialized;
197 }
198}
199
201 ASSERT(enabled_);
202 ASSERT(create_callback_ != nullptr);
203 ReadRwLocker locker(thread_, lock_);
205 void* result = create_callback_(
206 reinterpret_cast<Dart_Isolate>(thread_->isolate()),
207 reinterpret_cast<Dart_IsolateGroup>(thread_->isolate_group()),
208 table->UserVisibleNameFor(cid), last_sample_size_);
209 last_sample_size_ = kUninitialized;
210 return result;
211}
212
213void HeapProfileSampler::SampleNewSpaceAllocation(intptr_t allocation_size) {
214 ReadRwLocker locker(thread_, lock_);
215 if (!enabled_) {
216 return;
217 }
218 // We should never be sampling an allocation that won't fit in the
219 // current TLAB.
220 ASSERT(allocation_size <=
221 static_cast<intptr_t>(thread_->true_end() - thread_->top()));
222 ASSERT(sampling_interval_ >= 0);
223
224 // Clean up interval state in preparation for a new interval.
225 ResetIntervalState();
226
227 if (UNLIKELY(allocation_size >= sampling_interval_)) {
228 last_sample_size_ = allocation_size;
229 // Reset the sampling interval, but only count the sample once.
230 NumberOfSamplesLocked(allocation_size);
231 return;
232 }
233 last_sample_size_ =
234 sampling_interval_ * NumberOfSamplesLocked(allocation_size);
235}
236
237void HeapProfileSampler::SampleOldSpaceAllocation(intptr_t allocation_size) {
238 ASSERT_THREAD_STATE(thread_);
239 ReadRwLocker locker(thread_, lock_);
240 if (!enabled_) {
241 return;
242 }
243 ASSERT(sampling_interval_ >= 0);
244 // Account for any new space allocations that have happened since we last
245 // updated the sampling interval statistic.
246 intptr_t tlab_interval = remaining_TLAB_interval();
247 if (tlab_interval != kUninitialized) {
248 interval_to_next_sample_ = tlab_interval;
249 }
250
251 // If we don't have a TLAB yet simply initialize interval_to_next_sample_
252 // from the sampling interval.
253 if (interval_to_next_sample_ == kUninitialized) {
254 interval_to_next_sample_ = sampling_interval_;
255 }
256
257 // Check the allocation is large enough to trigger a sample. If not, tighten
258 // the interval.
259 if (allocation_size < interval_to_next_sample_) {
260 intptr_t end = static_cast<intptr_t>(thread_->end());
261 const intptr_t orig_end = end;
262 const intptr_t true_end = static_cast<intptr_t>(thread_->true_end());
263 const intptr_t orig_tlab_offset = next_tlab_offset_;
264 USE(orig_tlab_offset);
265 USE(orig_end);
266 // We may not have a TLAB, don't pull one out of thin air.
267 if (end != 0) {
268 if (next_tlab_offset_ != kUninitialized) {
269 end += next_tlab_offset_;
270 next_tlab_offset_ = kUninitialized;
271 }
272
273 end += allocation_size;
274 if (end > true_end) {
275 thread_->set_end(true_end);
276 next_tlab_offset_ = end - true_end;
278 } else {
279 thread_->set_end(end);
281 }
282 }
283 interval_to_next_sample_ -= allocation_size;
284 ASSERT(interval_to_next_sample_ > 0);
285 return;
286 }
287 // Clean up interval state in preparation for a new interval.
288 ResetIntervalState();
289
290 // Find a new sampling point and reset TLAB boundaries.
291 SetThreadSamplingIntervalLocked();
292 last_sample_size_ = allocation_size;
293}
294
295// Determines the next sampling interval by sampling from a poisson
296intptr_t HeapProfileSampler::GetNextSamplingIntervalLocked() {
297 ASSERT(thread_->isolate_group() != nullptr);
298 double u = thread_->isolate_group()->random()->NextDouble();
299 ASSERT(u >= 0.0 && u <= 1.0);
300 // Approximate sampling from a poisson distribution using an exponential
301 // distribution. We take the sample by feeding in a random uniform value in
302 // the range [0,1] to the inverse of the exponential CDF.
303 double next = -log(1 - u) * sampling_interval_;
304 ASSERT(next > 0);
305 // + 1 since the sample implies the number of "failures" before the next
306 // success, which should be included in our interval.
307 return std::max(kObjectAlignment, static_cast<intptr_t>(next) + 1);
308}
309
310intptr_t HeapProfileSampler::NumberOfSamplesLocked(intptr_t allocation_size) {
311 // There's always at least a single sample if we've reached this point.
312 intptr_t sample_count = 1;
313
314 intptr_t next_interval = GetNextSamplingIntervalLocked();
315 intptr_t total_next_interval = next_interval;
316
317 // The remaining portion of the allocation that hasn't been accounted for yet.
318 intptr_t remaining_size =
319 allocation_size - static_cast<intptr_t>(thread_->end() - thread_->top());
320 while (remaining_size > 0) {
321 if (remaining_size > next_interval) {
322 // The allocation is large enough to be counted again.
323 sample_count++;
324 }
325 remaining_size =
326 std::max(remaining_size - next_interval, static_cast<intptr_t>(0));
327 next_interval = GetNextSamplingIntervalLocked();
328 total_next_interval += next_interval;
329 }
330
331 // Update the TLAB boundary to account for the potential multiple samples
332 // the last allocation generated.
333 SetNextSamplingIntervalLocked(total_next_interval);
334
335 return sample_count;
336}
337
338intptr_t HeapProfileSampler::remaining_TLAB_interval() const {
339 if (thread_->end() == 0) {
340 return kUninitialized;
341 }
342 intptr_t remaining = thread_->end() - thread_->top();
343 if (next_tlab_offset_ != kUninitialized) {
344 remaining += next_tlab_offset_;
345 }
346 return remaining;
347}
348
349void HeapProfileSampler::SetNextSamplingIntervalLocked(intptr_t next_interval) {
350 ASSERT_THREAD_STATE(thread_);
351 intptr_t new_end = thread_->end();
352 const intptr_t top = static_cast<intptr_t>(thread_->top());
353 const intptr_t true_end = static_cast<intptr_t>(thread_->true_end());
354 // Don't create a TLAB out of thin air if one doesn't exist.
355 if (true_end != 0) {
356 if (new_end == true_end) {
357 // Sampling was likely just enabled.
358 new_end = top;
359 }
360 new_end += next_interval;
361
362 if (new_end > true_end) {
363 // The next sampling point is in the next TLAB.
364 ASSERT(next_tlab_offset_ == kUninitialized);
365 next_tlab_offset_ = new_end - true_end;
366 new_end = true_end;
367 }
368 ASSERT(top <= new_end);
369 thread_->set_end(new_end);
371 }
372 interval_to_next_sample_ = next_interval;
373}
374
375} // namespace dart
376
377#endif // !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
static float next(float f)
SI F table(const skcms_Curve *curve, F v)
static void SetSamplingCallback(Dart_HeapSamplingCreateCallback create_callback, Dart_HeapSamplingDeleteCallback delete_callback)
Definition: sampler.cc:81
HeapProfileSampler(Thread *thread)
Definition: sampler.cc:44
void SetThreadSamplingInterval()
Definition: sampler.cc:136
void HandleNewTLAB(intptr_t old_tlab_remaining_space, bool is_first_tlab)
Definition: sampler.cc:162
void ScheduleUpdateThreadEnable()
Definition: sampler.cc:109
void HandleReleasedTLAB(Thread *thread)
Definition: sampler.cc:153
static void SetSamplingInterval(intptr_t bytes_interval)
Definition: sampler.cc:60
void ScheduleSetThreadSamplingInterval()
Definition: sampler.cc:131
static bool enabled()
Definition: sampler.h:39
static Dart_HeapSamplingDeleteCallback delete_callback()
Definition: sampler.h:54
static void Enable(bool enabled)
Definition: sampler.cc:47
void SampleNewSpaceAllocation(intptr_t allocation_size)
Definition: sampler.cc:213
void SampleOldSpaceAllocation(intptr_t allocation_size)
Definition: sampler.cc:237
void * InvokeCallbackForLastSample(intptr_t cid)
Definition: sampler.cc:200
Random * random()
Definition: isolate.h:411
static IsolateGroup * Current()
Definition: isolate.h:539
ClassTable * class_table() const
Definition: isolate.h:496
static void ForEach(std::function< void(IsolateGroup *)> action)
Definition: isolate.cc:683
double NextDouble()
Definition: random.cc:100
void ScheduleInterrupts(uword interrupt_bits)
Definition: thread.cc:710
@ kVMInterrupt
Definition: thread.h:488
static Thread * Current()
Definition: thread.h:362
uword end() const
Definition: thread.h:710
void set_end(uword end)
Definition: thread.h:713
uword top() const
Definition: thread.h:709
Isolate * isolate() const
Definition: thread.h:534
IsolateGroup * isolate_group() const
Definition: thread.h:541
HeapProfileSampler & heap_sampler()
Definition: thread.h:1141
uword true_end() const
Definition: thread.h:711
struct _Dart_Isolate * Dart_Isolate
Definition: dart_api.h:88
struct _Dart_IsolateGroup * Dart_IsolateGroup
Definition: dart_api.h:89
void *(* Dart_HeapSamplingCreateCallback)(Dart_Isolate isolate, Dart_IsolateGroup isolate_group, const char *cls_name, intptr_t allocation_size)
Definition: dart_api.h:1283
void(* Dart_HeapSamplingDeleteCallback)(void *data)
Definition: dart_api.h:1288
#define ASSERT(E)
#define FATAL(error)
glong glong end
GAsyncResult * result
static float max(float r, float g, float b)
Definition: hsl.cpp:49
Definition: dart_vm.cc:33
static void USE(T &&)
Definition: globals.h:618
const intptr_t cid
static constexpr intptr_t kObjectAlignment
#define UNLIKELY(cond)
Definition: globals.h:261
#define ASSERT_THREAD_STATE(__thread)
Definition: sampler.cc:29
#define ASSERT_TLAB_BOUNDARIES_VALID(__thread)
Definition: sampler.cc:19