5#if defined(DART_PRECOMPILER)
28 int32_t
begin()
const {
return begin_; }
29 void set_begin(int32_t
value) { begin_ =
value; }
31 int32_t
end()
const {
return end_; }
34 int32_t
length()
const {
return end_ - begin_; }
40 bool IsSame(
const Interval other)
const {
41 return end() == other.end() &&
begin() == other.begin();
44 bool IsBefore(
const Interval other)
const {
return end() <= other.begin(); }
46 bool IsAfter(
const Interval other)
const {
return begin() >= other.end(); }
48 bool Overlap(
const Interval other)
const {
49 return !IsBefore(other) && !IsAfter(other);
52 bool ContainsBeginOf(
const Interval other)
const {
53 return begin() <= other.begin() && other.begin() <=
end();
56 bool ContainsEndOf(
const Interval other)
const {
57 return begin() <= other.end() && other.end() <=
end();
61 return ContainsBeginOf(other) && ContainsEndOf(other);
64 void ExtendToIncludeInterval(
const Interval& other) {
65 if (other.begin() < begin_) begin_ = other.begin();
66 if (other.end() > end_) end_ = other.end();
80 : cid_(
cid), depth_(depth), range_(range), function_(
function) {}
83 int16_t depth()
const {
return depth_; }
84 const Interval& range()
const {
return range_; }
86 const Function*
function()
const {
return function_; }
92 const Function* function_;
97 SelectorRow(Zone* zone, TableSelector* selector)
98 : selector_(selector),
99 class_ranges_(zone, 0),
101 code_(Code::ZoneHandle(zone)) {}
103 TableSelector* selector()
const {
return selector_; }
105 int32_t
total_size()
const {
return total_size_; }
107 const GrowableArray<Interval>& ranges()
const {
return ranges_; }
109 const GrowableArray<CidInterval>& class_ranges()
const {
110 return class_ranges_;
113 void DefineSelectorImplementationForInterval(
classid_t cid,
119 int32_t CallCount()
const {
return selector_->call_count; }
121 bool IsAllocated()
const {
122 return selector_->offset != SelectorMap::kInvalidSelectorOffset;
125 void AllocateAt(int32_t
offset) {
127 selector_->offset =
offset;
130 void FillTable(ClassTable* class_table,
const Array& entries);
133 TableSelector* selector_;
134 int32_t total_size_ = 0;
136 GrowableArray<CidInterval> class_ranges_;
137 GrowableArray<Interval> ranges_;
143 RowFitter() : first_slot_index_(0) { free_slots_.Add(
Interval(0, INT_MAX)); }
149 bool TryFit(SelectorRow* row, int32_t
offset, int32_t* next_offset);
153 void FitAndAllocate(SelectorRow* row,
155 int32_t max_offset = INT32_MAX);
157 int32_t TableSize()
const {
return free_slots_.Last().begin(); }
160 intptr_t MoveForwardToCover(
const Interval range, intptr_t slot_index);
162 void UpdateFreeSlots(int32_t
offset,
163 const GrowableArray<Interval>& ranges,
164 intptr_t slot_index);
166 intptr_t FitInFreeSlot(
const Interval range, intptr_t slot_index);
168 GrowableArray<Interval> free_slots_;
169 intptr_t first_slot_index_;
172void SelectorRow::DefineSelectorImplementationForInterval(
177 CidInterval cid_range(
cid, depth, range,
function);
178 class_ranges_.Add(cid_range);
181bool SelectorRow::Finalize() {
182 if (class_ranges_.length() == 0) {
188 for (intptr_t
i = 0;
i < class_ranges_.length();
i++) {
189 ranges_.Add(class_ranges_[
i].range());
192 struct IntervalSorter {
194 if (
a->begin() !=
b->begin()) {
195 return a->begin() -
b->begin();
197 return b->length() -
a->length();
203 intptr_t current_index = 0;
204 intptr_t write_index = 1;
205 intptr_t read_index = 1;
206 for (; read_index < ranges_.length(); read_index++) {
207 Interval& current_range = ranges_[current_index];
208 Interval& next_range = ranges_[read_index];
209 if (current_range.Contains(next_range)) {
211 }
else if (current_range.end() == next_range.begin()) {
213 current_range.ExtendToIncludeInterval(next_range);
216 if (read_index != write_index) {
217 ranges_[write_index] = ranges_[read_index];
219 current_index = write_index;
223 ranges_.TruncateTo(write_index);
225 for (intptr_t
i = 0;
i < ranges_.length() - 1;
i++) {
232 for (intptr_t
i = 0;
i < ranges_.length();
i++) {
233 total_size_ += ranges_[
i].length();
239void SelectorRow::FillTable(ClassTable* class_table,
const Array& entries) {
244 struct IntervalSorter {
245 static int Compare(
const CidInterval*
a,
const CidInterval*
b) {
247 !
a->range().Overlap(
b->range()));
248 return a->depth() -
b->depth();
253 for (intptr_t
i = 0;
i < class_ranges_.length();
i++) {
254 const CidInterval& cid_range = class_ranges_[
i];
255 const Interval& range = cid_range.range();
256 const Function*
function = cid_range.function();
260 entries.SetAt(selector()->
offset +
cid, code_);
266void RowFitter::FitAndAllocate(SelectorRow* row,
268 int32_t max_offset) {
269 if (row->IsAllocated()) {
275 int32_t
offset = min_offset;
276 while (
offset <= max_offset && !TryFit(row,
offset, &next_offset)) {
279 if (
offset <= max_offset) {
284bool RowFitter::TryFit(SelectorRow* row, int32_t
offset, int32_t* next_offset) {
285 const GrowableArray<Interval>& ranges = row->ranges();
288 if (first_slot_index_ > 0 &&
289 free_slots_[first_slot_index_ - 1].
end() >= first_range.end()) {
291 first_slot_index_ = 0;
293 first_slot_index_ = MoveForwardToCover(first_range, first_slot_index_);
294 intptr_t slot_index = first_slot_index_;
296 for (intptr_t index = 0; index < ranges.length(); index++) {
298 slot_index = MoveForwardToCover(range, slot_index);
299 ASSERT(slot_index < free_slots_.length());
300 const Interval slot = free_slots_[slot_index];
301 ASSERT(slot.end() >= range.end());
302 if (slot.begin() > range.begin()) {
303 *next_offset =
offset + slot.begin() - range.begin();
308 UpdateFreeSlots(
offset, ranges, first_slot_index_);
312intptr_t RowFitter::MoveForwardToCover(
const Interval range,
313 intptr_t slot_index) {
314 while (free_slots_[slot_index].
end() < range.end()) {
320void RowFitter::UpdateFreeSlots(int32_t
offset,
321 const GrowableArray<Interval>& ranges,
322 intptr_t slot_index) {
323 for (intptr_t
i = 0;
i < ranges.length();
i++) {
324 ASSERT(slot_index < free_slots_.length());
327 ASSERT(!free_slots_[slot_index].IsAfter(range));
328 slot_index = MoveForwardToCover(range, slot_index);
331 ASSERT(slot_index < free_slots_.length());
334 slot_index = FitInFreeSlot(range, slot_index);
337 for (intptr_t
i = 0;
i < free_slots_.length();
i++) {
342intptr_t RowFitter::FitInFreeSlot(
const Interval range, intptr_t slot_index) {
343 const Interval& slot = free_slots_[slot_index];
344 ASSERT(slot.Contains(range));
345 if (slot.begin() < range.begin()) {
347 if (slot.end() > range.end()) {
348 Interval free_after(range.end(), slot.end());
349 free_slots_[slot_index] = free_before;
350 free_slots_.InsertAt(slot_index + 1, free_after);
352 free_slots_[slot_index] = free_before;
355 }
else if (slot.end() <= range.end()) {
356 ASSERT(slot.IsSame(range));
357 free_slots_.EraseAt(slot_index);
359 Interval free_after(range.end(), slot.end());
360 free_slots_[slot_index] = free_after;
365int32_t SelectorMap::SelectorId(
const Function& interface_target)
const {
366 kernel::ProcedureAttributesMetadata metadata;
368 return interface_target.IsGetterFunction() ||
369 interface_target.IsImplicitGetterFunction() ||
370 interface_target.IsMethodExtractor()
371 ? metadata.getter_selector_id
372 : metadata.method_or_setter_selector_id;
376 const Function& interface_target)
const {
377 const int32_t sid = SelectorId(interface_target);
382 if (sid == kInvalidSelectorId)
return nullptr;
383 const TableSelector* selector = &selectors_[sid];
384 if (!selector->IsUsed())
return nullptr;
385 if (selector->offset == kInvalidSelectorOffset)
return nullptr;
389void SelectorMap::AddSelector(int32_t call_count,
392 const int32_t added_sid = selectors_.length();
393 selectors_.Add(TableSelector(added_sid, call_count, kInvalidSelectorOffset,
394 called_on_null, torn_off));
397void SelectorMap::SetSelectorProperties(int32_t sid,
398 bool on_null_interface,
399 bool requires_args_descriptor) {
400 ASSERT(sid < selectors_.length());
401 selectors_[sid].on_null_interface |= on_null_interface;
402 selectors_[sid].requires_args_descriptor |= requires_args_descriptor;
410 selector_map_(zone) {}
416 ReadTableSelectorInfo();
419 ComputeSelectorOffsets();
422void DispatchTableGenerator::ReadTableSelectorInfo() {
423 const auto& object_class = Class::Handle(
Z, classes_->At(kInstanceCid));
425 KernelProgramInfo::Handle(
Z, object_class.KernelProgramInfo());
426 kernel::TableSelectorMetadata* metadata =
429 if (metadata ==
nullptr) {
431 "Missing table selector metadata!\n"
432 "Probably gen_kernel was run in non-AOT mode or without TFA.\n");
434 for (intptr_t
i = 0;
i < metadata->selectors.length();
i++) {
435 const kernel::TableSelectorInfo*
info = &metadata->selectors[
i];
436 selector_map_.AddSelector(
info->call_count,
info->called_on_null,
441void DispatchTableGenerator::NumberSelectors() {
442 num_classes_ = classes_->NumCids();
444 Object& obj = Object::Handle(
Z);
445 Class& klass = Class::Handle(
Z);
446 Array& functions = Array::Handle(
Z);
447 Function&
function = Function::Handle(
Z);
450 obj = classes_->At(
cid);
452 klass = Class::RawCast(obj.ptr());
453 functions = klass.current_functions();
454 if (!functions.IsNull()) {
455 for (intptr_t j = 0; j < functions.Length(); j++) {
457 if (
function.IsDynamicFunction(
false)) {
458 const bool on_null_interface = klass.IsObjectClass();
459 const bool requires_args_descriptor =
460 function.PrologueNeedsArgumentsDescriptor();
462 const int32_t sid = selector_map_.SelectorId(
function);
463 if (sid == SelectorMap::kInvalidSelectorId) {
465 FATAL(
"Function has no assigned selector ID.\n");
467 selector_map_.SetSelectorProperties(sid, on_null_interface,
468 requires_args_descriptor);
475 num_selectors_ = selector_map_.NumIds();
478void DispatchTableGenerator::SetupSelectorRows() {
479 Object& obj = Object::Handle(
Z);
480 Class& klass = Class::Handle(
Z);
481 Array& functions = Array::Handle(
Z);
482 Function&
function = Function::Handle(
Z);
489 std::unique_ptr<classid_t[]> parent_cids(
new classid_t[num_classes_]);
490 std::unique_ptr<bool[]> is_concrete_class(
new bool[num_classes_]);
493 bool concrete =
false;
495 obj = classes_->At(
cid);
497 klass = Class::RawCast(obj.ptr());
498 concrete = !klass.is_abstract();
499 klass = klass.SuperClass();
500 if (!klass.IsNull()) {
501 parent_cid = klass.id();
505 parent_cids[
cid] = parent_cid;
506 is_concrete_class[
cid] = concrete;
510 std::unique_ptr<int16_t[]> cid_depth(
new int16_t[num_classes_]);
515 pcid = parent_cids[pcid];
518 cid_depth[
cid] = depth;
522 std::unique_ptr<GrowableArray<Interval>[]> cid_subclass_ranges(
523 new GrowableArray<Interval>[num_classes_]());
531 pcid = parent_cids[pcid];
533 const bool is_subclass =
cid == pcid;
534 const bool in_range = is_subclass && is_concrete_class[sub_cid];
540 cid_subclass_ranges[
cid].Add(range);
546 cid_subclass_ranges[
cid].Add(range);
551 SelectorRow* selector_rows =
Z->Alloc<SelectorRow>(num_selectors_);
552 for (intptr_t
i = 0;
i < num_selectors_;
i++) {
553 TableSelector* selector = &selector_map_.selectors_[
i];
554 new (&selector_rows[
i]) SelectorRow(
Z, selector);
555 if (selector->called_on_null && !selector->on_null_interface) {
556 selector_rows[
i].DefineSelectorImplementationForInterval(
564 obj = classes_->At(
cid);
566 klass = Class::RawCast(obj.ptr());
567 GrowableArray<Interval>& subclass_cid_ranges = cid_subclass_ranges[
cid];
569 functions = klass.current_functions();
570 if (!functions.IsNull()) {
571 const int16_t depth = cid_depth[
cid];
572 for (intptr_t j = 0; j < functions.Length(); j++) {
574 if (
function.IsDynamicFunction(
false)) {
575 const int32_t sid = selector_map_.SelectorId(
function);
577 if (sid != SelectorMap::kInvalidSelectorId) {
578 auto MakeIntervals = [&](
const Function&
function, int32_t sid) {
580 auto& function_handle = Function::ZoneHandle(
Z,
function.ptr());
582 for (intptr_t
i = 0;
i < subclass_cid_ranges.length();
i++) {
583 Interval& subclass_cid_range = subclass_cid_ranges[
i];
584 selector_rows[sid].DefineSelectorImplementationForInterval(
585 cid, depth, subclass_cid_range, &function_handle);
590 if (selector_map_.selectors_[sid].torn_off) {
591 const String& method_name = String::Handle(
Z,
function.name());
592 const String& getter_name =
593 String::Handle(
Z, Field::GetterName(method_name));
594 const Function& tearoff = Function::Handle(
595 Z,
function.GetMethodExtractor(getter_name));
596 const int32_t tearoff_sid = selector_map_.SelectorId(tearoff);
598 if (tearoff_sid != SelectorMap::kInvalidSelectorId) {
599 MakeIntervals(tearoff, tearoff_sid);
610 for (intptr_t
i = 0;
i < num_selectors_;
i++) {
611 const TableSelector& selector = selector_map_.selectors_[
i];
612 if (selector.IsUsed() && selector_rows[
i].Finalize()) {
613 table_rows_.Add(&selector_rows[
i]);
618void DispatchTableGenerator::ComputeSelectorOffsets() {
619 ASSERT(table_rows_.length() > 0);
624 struct PopularitySorter {
625 static int Compare(SelectorRow*
const*
a, SelectorRow*
const*
b) {
626 return (*b)->CallCount() - (*a)->CallCount();
632 const int32_t optimal_offset = DispatchTable::kOriginElement;
633 for (intptr_t
i = 0;
i < table_rows_.length();
i++) {
634 fitter.FitAndAllocate(table_rows_[
i], optimal_offset, optimal_offset);
638 struct PopularitySizeRatioSorter {
639 static int Compare(SelectorRow*
const*
a, SelectorRow*
const*
b) {
640 return (*b)->CallCount() * (*a)->total_size() -
641 (*a)->CallCount() * (*b)->total_size();
647 const int32_t max_offset = DispatchTable::kLargestSmallOffset;
648 for (intptr_t
i = 0;
i < table_rows_.length();
i++) {
649 fitter.FitAndAllocate(table_rows_[
i], 0, max_offset);
654 static int Compare(SelectorRow*
const*
a, SelectorRow*
const*
b) {
655 return (*b)->total_size() - (*a)->total_size();
661 const int32_t min_large_offset = DispatchTable::kLargestSmallOffset + 1;
662 for (intptr_t
i = 0;
i < table_rows_.length();
i++) {
663 fitter.FitAndAllocate(table_rows_[
i], min_large_offset);
666 table_size_ = fitter.TableSize();
669ArrayPtr DispatchTableGenerator::BuildCodeArray() {
670 auto& entries = Array::Handle(zone_, Array::New(table_size_,
Heap::kOld));
671 for (intptr_t
i = 0;
i < table_rows_.length();
i++) {
672 table_rows_[
i]->FillTable(classes_, entries);
674 entries.MakeImmutable();
675 return entries.ptr();
static void info(const char *fmt,...) SK_PRINTF_LIKE(1
static size_t total_size(SkSBlockAllocator< N > &pool)
DispatchTableGenerator(Zone *zone)
const TableSelector * GetSelector(const Function &interface_target) const
static const char * begin(const StringSlice &s)
Dart_NativeFunction function
#define HANDLESCOPE(thread)
bool Contains(const Container &container, const Value &value)
static ProcedureAttributesMetadata ProcedureAttributesOf(Zone *zone, const KernelProgramInfo &kernel_program_info, const TypedDataView &kernel_data, intptr_t kernel_data_program_offset, intptr_t kernel_offset)
TableSelectorMetadata * TableSelectorMetadataForProgram(const KernelProgramInfo &info, Zone *zone)
def Compare(symbols1, symbols2)
void Initialize(zx::channel directory_request, std::optional< zx::eventpair > view_ref)
Initializes Dart bindings for the Fuchsia application model.