5#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
19#define ASSERT_TLAB_BOUNDARIES_VALID(__thread) \
21 ASSERT(__thread->top() <= __thread->end()); \
22 ASSERT(__thread->end() <= __thread->true_end()); \
23 if (next_tlab_offset_ != kUninitialized) { \
24 ASSERT(__thread->end() == __thread->true_end()); \
25 ASSERT(next_tlab_offset_ > 0); \
29#define ASSERT_THREAD_STATE(__thread) \
31 Thread* __cur = Thread::Current(); \
32 ASSERT(__cur == nullptr || __cur == __thread); \
37bool HeapProfileSampler::enabled_ =
false;
40RwLock* HeapProfileSampler::lock_ =
new RwLock();
41intptr_t HeapProfileSampler::sampling_interval_ =
42 HeapProfileSampler::kDefaultSamplingInterval;
45 : interval_to_next_sample_(kUninitialized), thread_(thread) {}
54 group->thread_registry()->ForEachThread([&](
Thread* thread) {
64 ASSERT(bytes_interval >= 0);
65 sampling_interval_ = bytes_interval;
75 group->thread_registry()->ForEachThread([&](
Thread* thread) {
86 if ((create_callback_ !=
nullptr && create_callback ==
nullptr) ||
88 FATAL(
"Clearing sampling callbacks is prohibited.");
90 create_callback_ = create_callback;
94void HeapProfileSampler::ResetState() {
96 next_tlab_offset_ = kUninitialized;
110 schedule_thread_enable_ =
true;
117 UpdateThreadEnableLocked();
120void HeapProfileSampler::UpdateThreadEnableLocked() {
121 thread_enabled_ = enabled_;
122 if (thread_enabled_) {
123 SetNextSamplingIntervalLocked(GetNextSamplingIntervalLocked());
132 schedule_thread_set_sampling_interval_ =
true;
139 SetThreadSamplingIntervalLocked();
142void HeapProfileSampler::SetThreadSamplingIntervalLocked() {
145 if (!thread_enabled_) {
150 SetNextSamplingIntervalLocked(GetNextSamplingIntervalLocked());
158 interval_to_next_sample_ = remaining_TLAB_interval();
159 next_tlab_offset_ = kUninitialized;
163 bool is_first_tlab) {
166 if (!enabled_ || (next_tlab_offset_ == kUninitialized && !is_first_tlab)) {
168 }
else if (is_first_tlab) {
169 ASSERT(next_tlab_offset_ == kUninitialized);
170 if (interval_to_next_sample_ != kUninitialized) {
171 intptr_t top = thread_->
top();
172 intptr_t tlab_size = thread_->
true_end() - top;
173 if (tlab_size >= interval_to_next_sample_) {
174 thread_->
set_end(top + interval_to_next_sample_);
177 next_tlab_offset_ = interval_to_next_sample_ - tlab_size;
181 SetThreadSamplingIntervalLocked();
185 intptr_t updated_offset = next_tlab_offset_ + old_tlab_remaining_space;
186 if (updated_offset + thread_->
top() > thread_->
true_end()) {
188 next_tlab_offset_ = updated_offset - (thread_->
true_end() - thread_->
top());
192 ASSERT(updated_offset <=
static_cast<intptr_t
>(thread_->
true_end()) -
193 static_cast<intptr_t
>(thread_->
top()));
194 thread_->
set_end(updated_offset + thread_->
top());
195 next_tlab_offset_ = kUninitialized;
202 ASSERT(create_callback_ !=
nullptr);
205 void*
result = create_callback_(
208 table->UserVisibleNameFor(
cid), last_sample_size_);
209 last_sample_size_ = kUninitialized;
221 static_cast<intptr_t
>(thread_->
true_end() - thread_->
top()));
222 ASSERT(sampling_interval_ >= 0);
225 ResetIntervalState();
227 if (
UNLIKELY(allocation_size >= sampling_interval_)) {
228 last_sample_size_ = allocation_size;
230 NumberOfSamplesLocked(allocation_size);
234 sampling_interval_ * NumberOfSamplesLocked(allocation_size);
243 ASSERT(sampling_interval_ >= 0);
246 intptr_t tlab_interval = remaining_TLAB_interval();
247 if (tlab_interval != kUninitialized) {
248 interval_to_next_sample_ = tlab_interval;
253 if (interval_to_next_sample_ == kUninitialized) {
254 interval_to_next_sample_ = sampling_interval_;
259 if (allocation_size < interval_to_next_sample_) {
260 intptr_t
end =
static_cast<intptr_t
>(thread_->
end());
261 const intptr_t orig_end =
end;
262 const intptr_t true_end =
static_cast<intptr_t
>(thread_->
true_end());
263 const intptr_t orig_tlab_offset = next_tlab_offset_;
264 USE(orig_tlab_offset);
268 if (next_tlab_offset_ != kUninitialized) {
269 end += next_tlab_offset_;
270 next_tlab_offset_ = kUninitialized;
273 end += allocation_size;
274 if (
end > true_end) {
276 next_tlab_offset_ =
end - true_end;
283 interval_to_next_sample_ -= allocation_size;
284 ASSERT(interval_to_next_sample_ > 0);
288 ResetIntervalState();
291 SetThreadSamplingIntervalLocked();
292 last_sample_size_ = allocation_size;
296intptr_t HeapProfileSampler::GetNextSamplingIntervalLocked() {
299 ASSERT(u >= 0.0 && u <= 1.0);
303 double next = -
log(1 - u) * sampling_interval_;
310intptr_t HeapProfileSampler::NumberOfSamplesLocked(intptr_t allocation_size) {
312 intptr_t sample_count = 1;
314 intptr_t next_interval = GetNextSamplingIntervalLocked();
315 intptr_t total_next_interval = next_interval;
318 intptr_t remaining_size =
319 allocation_size -
static_cast<intptr_t
>(thread_->
end() - thread_->
top());
320 while (remaining_size > 0) {
321 if (remaining_size > next_interval) {
326 std::max(remaining_size - next_interval,
static_cast<intptr_t
>(0));
327 next_interval = GetNextSamplingIntervalLocked();
328 total_next_interval += next_interval;
333 SetNextSamplingIntervalLocked(total_next_interval);
338intptr_t HeapProfileSampler::remaining_TLAB_interval()
const {
339 if (thread_->
end() == 0) {
340 return kUninitialized;
342 intptr_t remaining = thread_->
end() - thread_->
top();
343 if (next_tlab_offset_ != kUninitialized) {
344 remaining += next_tlab_offset_;
349void HeapProfileSampler::SetNextSamplingIntervalLocked(intptr_t next_interval) {
351 intptr_t new_end = thread_->
end();
352 const intptr_t top =
static_cast<intptr_t
>(thread_->
top());
353 const intptr_t true_end =
static_cast<intptr_t
>(thread_->
true_end());
356 if (new_end == true_end) {
360 new_end += next_interval;
362 if (new_end > true_end) {
364 ASSERT(next_tlab_offset_ == kUninitialized);
365 next_tlab_offset_ = new_end - true_end;
372 interval_to_next_sample_ = next_interval;
static float next(float f)
static void SetSamplingCallback(Dart_HeapSamplingCreateCallback create_callback, Dart_HeapSamplingDeleteCallback delete_callback)
HeapProfileSampler(Thread *thread)
void SetThreadSamplingInterval()
void HandleNewTLAB(intptr_t old_tlab_remaining_space, bool is_first_tlab)
void ScheduleUpdateThreadEnable()
void HandleReleasedTLAB(Thread *thread)
void UpdateThreadEnable()
static void SetSamplingInterval(intptr_t bytes_interval)
void ScheduleSetThreadSamplingInterval()
static Dart_HeapSamplingDeleteCallback delete_callback()
static void Enable(bool enabled)
void SampleNewSpaceAllocation(intptr_t allocation_size)
void SampleOldSpaceAllocation(intptr_t allocation_size)
void * InvokeCallbackForLastSample(intptr_t cid)
static IsolateGroup * Current()
ClassTable * class_table() const
static void ForEach(std::function< void(IsolateGroup *)> action)
void ScheduleInterrupts(uword interrupt_bits)
static Thread * Current()
Isolate * isolate() const
IsolateGroup * isolate_group() const
HeapProfileSampler & heap_sampler()
struct _Dart_Isolate * Dart_Isolate
struct _Dart_IsolateGroup * Dart_IsolateGroup
void *(* Dart_HeapSamplingCreateCallback)(Dart_Isolate isolate, Dart_IsolateGroup isolate_group, const char *cls_name, intptr_t allocation_size)
void(* Dart_HeapSamplingDeleteCallback)(void *data)
static float max(float r, float g, float b)
static constexpr intptr_t kObjectAlignment
#define ASSERT_THREAD_STATE(__thread)
#define ASSERT_TLAB_BOUNDARIES_VALID(__thread)