32 deoptimization_counter_inlining_threshold,
34 "How many times we allow deoptimization before we stop inlining.");
40 inline_getters_setters_smaller_than,
42 "Always inline getters and setters that have fewer instructions");
44 inlining_depth_threshold,
46 "Inline function calls up to threshold nesting depth");
49 inlining_size_threshold,
51 "Always inline functions that have threshold or fewer instructions");
53 inlining_callee_call_sites_threshold,
55 "Always inline functions containing threshold or fewer calls.");
57 inlining_callee_size_threshold,
59 "Do not inline callees larger than threshold");
61 inlining_small_leaf_size_threshold,
63 "Do not inline leaf callees larger than threshold");
65 inlining_caller_size_threshold,
67 "Stop inlining once caller reaches the threshold.");
71 "Inline only hotter calls, in percents (0 .. 100); "
72 "default 10%: calls above-equal 10% of max-count are inlined.");
74 inlining_recursion_depth_threshold,
76 "Inline recursive function calls up to threshold recursion depth.");
78 max_inlined_per_depth,
80 "Max. number of inlined calls per depth");
81DEFINE_FLAG(
bool, print_inlining_tree,
false,
"Print inlining tree");
90#define TRACE_INLINING(statement) \
92 if (trace_inlining()) statement; \
95#define PRINT_INLINING_TREE(comment, caller, target, instance_call) \
97 if (FLAG_print_inlining_tree) { \
98 inlined_info_.Add(InlinedInfo(caller, target, inlining_depth_, \
99 instance_call, comment)); \
106 while (
env !=
nullptr) {
130 !block_it.
Done(); block_it.Advance()) {
135 if (current->IsBranch()) {
136 current = current->AsBranch()->comparison();
140 ASSERT(!current->IsTailCall());
141 ASSERT(!current->IsLoadIndexedUnsafe());
142 ASSERT(!current->IsStoreIndexedUnsafe());
156 call_site_count_ = 0;
157 instruction_count_ = 0;
159 block_it.Advance()) {
162 const intptr_t block_id = block_it.Current()->block_id();
171 if (current->IsRedefinition()) {
177 if (current->IsUnboxedConstant()) {
180 ++instruction_count_;
186 if (current->IsAllocateObject()) {
192 if (current->IsInstanceCall() || current->IsStaticCall() ||
193 current->IsClosureCall()) {
197 if (current->IsPolymorphicInstanceCall()) {
199 current->AsPolymorphicInstanceCall();
205 if (!
call->IsSureToCallSingleRecognizedTarget() &&
206 (
call->token_kind() != Token::kEQ)) {
218 intptr_t call_site_count_;
219 intptr_t instruction_count_;
231 const intptr_t depth,
234 :
caller(caller_function),
245 switch (nesting_depth) {
263 template <
typename CallType>
292 : inlining_depth_threshold_(threshold),
300 return instance_calls_;
304 return static_calls_;
308 return closure_calls_;
312 return !(static_calls_.is_empty() && closure_calls_.is_empty() &&
313 instance_calls_.is_empty());
317 return instance_calls_.length() + static_calls_.length() +
318 closure_calls_.length();
322 static_calls_.Clear();
323 closure_calls_.Clear();
324 instance_calls_.Clear();
327 template <
typename CallType>
330 intptr_t start_index) {
331 intptr_t max_count = 0;
332 for (intptr_t
i = start_index;
i < calls.length(); ++
i) {
333 const auto count = calls[
i].call_count;
334 if (
count > max_count) {
341 template <
typename CallType>
343 intptr_t start_index,
344 intptr_t max_count) {
345 for (intptr_t
i = start_index;
i < calls.length(); ++
i) {
346 calls[
i].ratio =
static_cast<double>(calls[
i].call_count) / max_count;
355 intptr_t instance_calls_start_ix,
356 intptr_t calls_start_ix) {
357 intptr_t max_count = 0;
366 if (max_count == 0) {
382 block_it.Advance()) {
387 if (current->IsPolymorphicInstanceCall()) {
389 current->AsPolymorphicInstanceCall();
391 call = instance_call;
392 }
else if (current->IsStaticCall()) {
396 }
else if (current->IsClosureCall()) {
399 if (
call !=
nullptr) {
407 template <
typename CallType>
410 for (intptr_t
i = 0;
i < arr->length();
i++) {
411 if ((*arr)[
i].
call->previous() !=
nullptr) {
413 (*arr)[j] = (*arr)[
i];
427 auto add_to_worklist = [&](
Definition* defn) {
428 ASSERT(defn->HasSSATemp());
429 const auto ssa_index = defn->ssa_temp_index();
430 if (ssa_index < processed.
length() && !processed.
Contains(ssa_index)) {
431 processed.
Add(ssa_index);
438 auto add_transitive_dependencies_to_worklist = [&](intptr_t from_index) {
443 for (intptr_t
i = from_index;
i <
worklist.length();
i++) {
445 for (
auto input : defn->inputs()) {
446 add_to_worklist(input);
450 ASSERT(defn->ArgumentCount() == 0 || !defn->HasMoveArguments());
457 for (
auto& call_info : *calls_) {
461 if (call_info.call->HasSSATemp()) {
462 add_to_worklist(call_info.call);
468 add_transitive_dependencies_to_worklist(0);
475 bool changed =
false;
476 intptr_t last_unhandled_call_index = calls_->length() - 1;
483 if (
worklist.length() == last_unhandled_call_index) {
484 call_info = &(*calls_)[last_unhandled_call_index];
486 last_unhandled_call_index--;
490 if (defn->HasUnmatchedInputRepresentations() &&
495 auto replacement = defn->Canonicalize(graph);
496 if (replacement != defn) {
498 if (replacement !=
nullptr) {
499 defn->ReplaceUsesWith(replacement);
500 if (replacement->ssa_temp_index() == -1) {
505 if (add_to_worklist(replacement)) {
506 add_transitive_dependencies_to_worklist(
worklist.length() - 1);
518 const bool newly_inserted =
519 replacement->ssa_temp_index() >= processed.
length();
520 if (call_info !=
nullptr && replacement->IsStaticCall() &&
522 HandleDevirtualization(call_info,
526 if (
auto phi = defn->AsPhi()) {
527 phi->UnuseAllInputs();
528 phi->block()->RemovePhi(phi);
530 defn->RemoveFromGraph();
548 if (depth > inlining_depth_threshold_) {
549 if (FLAG_print_inlining_tree) {
557 const bool inline_only_profitable_methods =
558 (depth >= inlining_depth_threshold_);
566 const intptr_t instance_calls_start_ix = instance_calls_.length();
567 const intptr_t static_calls_start_ix = static_calls_.length();
568 const intptr_t calls_start_ix = calls_->length();
570 block_it.Advance()) {
574 if (
auto instance_call = current->AsPolymorphicInstanceCall()) {
575 if (!inline_only_profitable_methods ||
576 instance_call->IsSureToCallSingleRecognizedTarget() ||
577 instance_call->HasOnlyDispatcherOrImplicitAccessorTargets()) {
580 instance_calls_.Add({graph, instance_call, depth, nesting_depth});
584 if (FLAG_print_inlining_tree) {
588 instance_call,
"Too deep"));
591 }
else if (
auto call = current->AsInstanceCall()) {
592 calls_->Add({graph,
call, depth, nesting_depth});
593 }
else if (
auto static_call = current->AsStaticCall()) {
594 HandleStaticCall(static_call, inline_only_profitable_methods, graph,
595 depth, nesting_depth, inlined_info);
596 }
else if (
auto closure_call = current->AsClosureCall()) {
597 if (!inline_only_profitable_methods) {
600 closure_calls_.Add({graph, closure_call, depth, nesting_depth});
613 bool inline_only_profitable_methods,
616 intptr_t nesting_depth,
619 if (!inline_only_profitable_methods ||
function.IsRecognized() ||
620 function.IsDispatcherOrImplicitAccessor() ||
625 static_calls_.Add({graph, static_call, depth, nesting_depth});
627 }
else if (inlined_info !=
nullptr) {
630 if (FLAG_print_inlining_tree) {
631 const Function* caller = &graph->
function();
634 InlinedInfo(caller,
target, depth + 1, static_call,
"Too deep"));
640 bool HandleDevirtualization(CallInfo<InstanceCallInstr>* call_info,
641 StaticCallInstr* static_call) {
643 const bool inline_only_profitable_methods =
644 (call_info->call_depth >= inlining_depth_threshold_);
645 if (HandleStaticCall(static_call, inline_only_profitable_methods,
646 call_info->caller_graph, call_info->call_depth,
647 call_info->nesting_depth,
649 static_calls_.Last().ratio = call_info->ratio;
655 intptr_t inlining_depth_threshold_;
656 GrowableArray<CallInfo<StaticCallInstr>> static_calls_;
657 GrowableArray<CallInfo<ClosureCallInstr>> closure_calls_;
658 GrowableArray<CallInfo<PolymorphicInstanceCallInstr>> instance_calls_;
659 GrowableArray<CallInfo<InstanceCallInstr>>* calls_;
667 intptr_t call_site_instructions,
669 intptr_t instruction_count = 0;
670 intptr_t call_count = 0;
672 block_it.Advance()) {
676 if (current->IsDartReturn())
continue;
677 ASSERT(!current->IsNativeReturn());
679 if (current->IsInstanceCall() || current->IsPolymorphicInstanceCall() ||
680 current->IsClosureCall()) {
683 if (current->IsStaticCall()) {
685 const intptr_t inl_size =
function.optimized_instruction_count();
686 const bool always_inline =
692 if (always_inline ||
function.IsRecognized()) {
693 if (!always_inline) {
694 const intptr_t kAvgListedMethodSize = 20;
696 (inl_size == 0 ? kAvgListedMethodSize : inl_size);
700 instruction_count += current->AsStaticCall()->
ArgumentCount();
701 instruction_count += 1;
707 if (call_count > 0) {
708 return instruction_count <= call_site_instructions;
710 return instruction_count <= FLAG_inlining_small_leaf_size_threshold;
758 intptr_t AllocateBlockId()
const;
759 inline bool trace_inlining()
const;
763 const intptr_t num_variants_;
777 if (
auto instance_call =
call->AsInstanceCallBase()) {
778 return (instance_call->entry_kind() == Code::EntryKind::kUnchecked) &&
779 instance_call->is_call_on_this();
790 intptr_t first_arg_index,
791 intptr_t arg_index) {
792 if (arg_index > first_arg_index && is_generic_covariant_impl !=
nullptr &&
793 is_generic_covariant_impl->
Contains(arg_index - first_arg_index)) {
794 const intptr_t param_index = arg_index - first_arg_index;
795 const intptr_t num_named_params =
797 const intptr_t num_params = callee_signature.
NumParameters();
798 if (num_named_params == 0 &&
807 intptr_t interface_target_param_index = param_index;
808 if (num_named_params > 0 &&
809 (num_params - num_named_params) <= param_index) {
813 interface_target_param_index = -1;
819 interface_target_param_index =
i;
825 if (interface_target_param_index == -1) {
833 interface_target_param_index));
834 if (interface_target_parameter_type.
ptr() != callee_parameter_type.
ptr()) {
846 const bool is_polymorphic = call_data->
call->IsPolymorphicInstanceCall();
847 const bool no_checks =
849 ASSERT(is_polymorphic == (target_info !=
nullptr));
876 BitVector* is_generic_covariant_impl =
nullptr;
879 call_data->
call->AsInstanceCallBase()->interface_target();
881 callee_signature = callee.signature();
882 interface_target_signature = interface_target.signature();
885 if (interface_target.signature() != callee.signature()) {
887 BitVector is_covariant(zone, num_params);
888 is_generic_covariant_impl =
new (zone)
BitVector(zone, num_params);
891 is_generic_covariant_impl);
902 intptr_t first_arg_stub_index = 0;
905 ASSERT(first_arg_index == 0);
908 first_arg_stub_index = 1;
910 for (intptr_t
i = 0;
i < arguments->
length(); ++
i) {
911 Value* actual = (*arguments)[
i];
916 const bool is_polymorphic_receiver =
917 (is_polymorphic && (
i == first_arg_index));
919 if (actual ==
nullptr) {
920 ASSERT(!is_polymorphic_receiver);
924 if (is_polymorphic_receiver ||
926 is_generic_covariant_impl, interface_target_signature,
927 callee_signature, first_arg_index,
i)) {
931 if (is_polymorphic_receiver && target_info->
IsSingleCid()) {
946 ->ReplaceUsesWith(defn);
952 for (intptr_t
i = 0;
i < defns->length(); ++
i) {
954 if (constant !=
nullptr && constant->
HasUses()) {
961 for (intptr_t
i = 0;
i < defns->length(); ++
i) {
962 auto defn = (*defns)[
i];
963 if (!defn->HasUses())
continue;
965 if (
auto constant = defn->AsConstant()) {
966 constant->ReplaceUsesWith(caller_graph->
GetConstant(
967 constant->value(), constant->representation()));
970 if (
auto param = defn->AsParameter()) {
972 param->ReplaceUsesWith(
983 caller_graph_(inliner->flow_graph()),
985 initial_size_(inliner->flow_graph()->InstructionCount()),
987 inlined_recursive_call_(
false),
989 inlining_recursion_depth_(0),
990 inlining_depth_threshold_(threshold),
991 collected_call_sites_(nullptr),
992 inlining_call_sites_(nullptr),
1019 intptr_t instr_count,
1020 intptr_t call_site_count) {
1024 }
else if (inlined_size_ > FLAG_inlining_caller_size_threshold) {
1027 }
else if (instr_count > FLAG_inlining_callee_size_threshold) {
1032 const int callee_inlining_depth = callee.inlining_depth();
1033 if (callee_inlining_depth > 0 &&
1034 ((callee_inlining_depth + inlining_depth_) >
1035 FLAG_inlining_depth_threshold)) {
1041 if (instr_count == 0) {
1043 }
else if (instr_count <= FLAG_inlining_size_threshold) {
1045 }
else if (call_site_count <= FLAG_inlining_callee_call_sites_threshold) {
1053 if (inlining_depth_threshold_ < 1)
return;
1054 if (caller_graph_->
function().deoptimization_counter() >=
1055 FLAG_deoptimization_counter_inlining_threshold) {
1060 CallSites sites1(inlining_depth_threshold_, &calls);
1061 CallSites sites2(inlining_depth_threshold_, &calls);
1063 collected_call_sites_ = &sites1;
1064 inlining_call_sites_ = &sites2;
1066 collected_call_sites_->
FindCallSites(caller_graph_, inlining_depth_,
1068 while (collected_call_sites_->
HasCalls()) {
1070 THR_Print(
" Depth %" Pd " ----------\n", inlining_depth_));
1071 if (FLAG_print_inlining_tree) {
1074 inlining_depth_, collected_call_sites_->
NumCalls(),
1075 static_cast<intptr_t
>(FLAG_max_inlined_per_depth));
1077 if (collected_call_sites_->
NumCalls() > FLAG_max_inlined_per_depth) {
1081 call_sites_temp = collected_call_sites_;
1082 collected_call_sites_ = inlining_call_sites_;
1083 inlining_call_sites_ = call_sites_temp;
1084 collected_call_sites_->
Clear();
1086 bool inlined_instance = InlineInstanceCalls();
1087 bool inlined_statics = InlineStaticCalls();
1088 bool inlined_closures = InlineClosureCalls();
1089 if (inlined_instance || inlined_statics || inlined_closures) {
1093 if (inlined_recursive_call_) {
1094 ++inlining_recursion_depth_;
1095 inlined_recursive_call_ =
false;
1101 collected_call_sites_ =
nullptr;
1102 inlining_call_sites_ =
nullptr;
1108 return static_cast<double>(inlined_size_) /
1109 static_cast<double>(initial_size_);
1117 if (constant !=
nullptr) {
1132 param->UpdateType(*argument->
Type());
1138 const Array& argument_names,
1140 bool stricter_heuristic) {
1142 if (
thread()->compiler_timings() !=
nullptr) {
1146 stricter_heuristic);
1147 if (
thread()->compiler_timings() !=
nullptr) {
1156 const Array& argument_names,
1158 bool stricter_heuristic) {
1162 function.deoptimization_counter());
1168 " Bailout: not inlinable due to !function.CanBeInlined()\n"));
1211 if (
function.deoptimization_counter() >=
1212 FLAG_max_deoptimization_counter_threshold) {
1226 const intptr_t constant_arg_count = CountConstants(*arguments);
1227 const intptr_t instruction_count =
1228 constant_arg_count == 0 ?
function.optimized_instruction_count() : 0;
1229 const intptr_t call_site_count =
1230 constant_arg_count == 0 ?
function.optimized_call_site_count() : 0;
1233 if (!decision.
value) {
1235 THR_Print(
" Bailout: early heuristics (%s) with "
1236 "code size: %" Pd ", "
1237 "call sites: %" Pd ", "
1238 "inlining depth of callee: %d, "
1239 "const args: %" Pd "\n",
1240 decision.
reason, instruction_count, call_site_count,
1241 function.inlining_depth(), constant_arg_count));
1247 if ((
function.HasOptionalPositionalParameters() ||
1248 function.HasOptionalNamedParameters()) &&
1250 arguments->
length(), argument_names,
1262 if (is_recursive_call &&
1263 inlining_recursion_depth_ >= FLAG_inlining_recursion_depth_threshold) {
1277 if (setjmp(*jump.
Set()) == 0) {
1284 function.RestoreICDataMap(ic_data_array, clone_ic_data);
1289 GetParsedFunction(
function, &in_cache);
1297 entry_kind = instr->entry_kind();
1299 call_data->
call->AsInstanceCall()) {
1300 entry_kind = instr->entry_kind();
1302 call_data->
call->AsPolymorphicInstanceCall()) {
1303 entry_kind = instr->entry_kind();
1304 }
else if (call_data->
call->IsClosureCall()) {
1309 parsed_function, ic_data_array,
nullptr,
1313 entry_kind == Code::EntryKind::kUnchecked);
1316 callee_graph =
builder.BuildGraph();
1327 FlowGraphChecker(callee_graph, callee_inline_id_to_function)
1328 .Check(
"Builder (callee)");
1336#if defined(DART_PRECOMPILER) && !defined(TARGET_ARCH_IA32)
1355 const intptr_t inlined_type_args_param =
function.IsGeneric() ? 1 : 0;
1356 const intptr_t num_inlined_params =
1357 inlined_type_args_param +
function.NumParameters();
1362 if (first_actual_param_index > 0) {
1366 }
else if (inlined_type_args_param > 0) {
1372 for (intptr_t
i = 0;
i <
function.num_fixed_parameters(); ++
i) {
1374 i, (*arguments)[first_actual_param_index +
i], callee_graph));
1380 if (
function.HasOptionalParameters()) {
1382 if (!AdjustForOptionalParameters(
1383 *parsed_function, first_actual_param_index, argument_names,
1384 arguments, param_stubs, callee_graph)) {
1396 first_actual_param_index +
function.NumParameters());
1401 intptr_t try_index = call_block->
try_index();
1403 !it.
Done(); it.Advance()) {
1420 FlowGraphChecker(callee_graph, callee_inline_id_to_function)
1421 .Check(
"SSA (callee)");
1426 (FLAG_print_flow_graph || FLAG_print_flow_graph_optimized)) {
1427 THR_Print(
"Callee graph for inlining %s (unoptimized)\n",
1428 function.ToFullyQualifiedCString());
1437#if defined(DART_PRECOMPILER) && !defined(TARGET_ARCH_IA32)
1440 inliner_->speculative_policy_);
1443 inliner_->speculative_policy_);
1444 state.call_specializer = &call_specializer;
1451 inliner_->speculative_policy_);
1454 inliner_->speculative_policy_);
1455 state.call_specializer = &call_specializer;
1461 (FLAG_print_flow_graph || FLAG_print_flow_graph_optimized)) {
1462 THR_Print(
"Callee graph for inlining %s (optimized)\n",
1463 function.ToFullyQualifiedCString());
1471 intptr_t constants_count = 0;
1472 for (intptr_t
i = 0, n = param_stubs->
length();
i < n; ++
i) {
1473 if ((*param_stubs)[
i]->
IsConstant()) ++constants_count;
1475 intptr_t instruction_count = 0;
1476 intptr_t call_site_count = 0;
1478 false, &instruction_count,
1486 if (!decision.
value) {
1492 if ((instruction_count > FLAG_inlining_size_threshold) &&
1493 (call_site_count > FLAG_inlining_callee_call_sites_threshold)) {
1503 THR_Print(
" Bailout: heuristics (%s) with "
1504 "code size: %" Pd ", "
1505 "call sites: %" Pd ", "
1506 "inlining depth of callee: %d, "
1507 "const args: %" Pd "\n",
1508 decision.
reason, instruction_count, call_site_count,
1509 function.inlining_depth(), constants_count));
1521 if (stricter_heuristic) {
1522 intptr_t call_site_instructions = 0;
1523 if (
auto static_call =
call->AsStaticCall()) {
1525 call_site_instructions = static_call->ArgumentCount() + 1 + 1;
1530 THR_Print(
" Bailout: heuristics (no small leaf)\n"));
1541 const intptr_t depth =
1542 function.IsDispatcherOrImplicitAccessor() ? 0 : inlining_depth_;
1549 function_cache_.Add(parsed_function);
1554 inlined_size_ += instruction_count;
1555 if (is_recursive_call) {
1556 inlined_recursive_call_ =
true;
1565 const FieldSet* callee_guarded_fields =
1567 FieldSet::Iterator it = callee_guarded_fields->
GetIterator();
1568 while (
const Field** field = it.Next()) {
1580 " with reason %s, code size %" Pd ", call sites: %" Pd "\n",
1581 decision.
reason, instruction_count, call_site_count));
1587 if (
error.IsLanguageError() &&
1589 if (
error.ptr() == Object::background_compilation_error().ptr()) {
1611 (
error.ptr() == Object::out_of_memory_error().ptr()) ||
1619 if (inlined_info_.length() > 0) {
1620 THR_Print(
"Inlining into: '%s'\n growth: %f (%" Pd " -> %" Pd ")\n",
1623 PrintInlinedInfoFor(top, 1);
1631 for (intptr_t
i = 0;
i <
a.length();
i++) {
1632 if (
a[
i] == deopt_id)
return true;
1637 void PrintInlinedInfoFor(
const Function& caller, intptr_t depth) {
1639 GrowableArray<intptr_t> call_instructions_printed;
1641 for (intptr_t
i = 0;
i < inlined_info_.length();
i++) {
1642 const InlinedInfo&
info = inlined_info_[
i];
1643 if (
info.bailout_reason !=
nullptr) {
1646 if ((
info.inlined_depth == depth) &&
1647 (
info.caller->ptr() == caller.ptr()) &&
1648 !
Contains(call_instructions_printed,
info.call_instr->GetDeoptId())) {
1649 for (
int t = 0; t < depth; t++) {
1653 info.inlined->ToQualifiedCString());
1654 PrintInlinedInfoFor(*
info.inlined, depth + 1);
1655 call_instructions_printed.Add(
info.call_instr->GetDeoptId());
1658 call_instructions_printed.Clear();
1660 for (intptr_t
i = 0;
i < inlined_info_.length();
i++) {
1661 const InlinedInfo&
info = inlined_info_[
i];
1662 if (
info.bailout_reason ==
nullptr) {
1665 if ((
info.inlined_depth == depth) &&
1666 (
info.caller->ptr() == caller.ptr()) &&
1667 !
Contains(call_instructions_printed,
info.call_instr->GetDeoptId())) {
1668 for (
int t = 0; t < depth; t++) {
1672 info.inlined->ToQualifiedCString(),
info.bailout_reason);
1673 call_instructions_printed.Add(
info.call_instr->GetDeoptId());
1678 void InlineCall(InlinedCallData* call_data) {
1680 FlowGraph* callee_graph = call_data->callee_graph;
1681 auto callee_function_entry = callee_graph->graph_entry()->normal_entry();
1684 InlineExitCollector* exit_collector = call_data->exit_collector;
1685 exit_collector->PrepareGraphs(callee_graph);
1690 if (callee_graph->function().ForceOptimize()) {
1695 for (BlockIterator block_it = callee_graph->postorder_iterator();
1696 !block_it.Done(); block_it.Advance()) {
1697 for (ForwardInstructionIterator it(block_it.Current()); !it.Done();
1699 Instruction* current = it.Current();
1700 if (current->env() !=
nullptr) {
1701 call_data->call->env()->DeepCopyTo(
zone(), current);
1702 current->CopyDeoptIdFrom(*call_data->call);
1703 current->env()->MarkAsLazyDeoptToBeforeDeoptId();
1708 exit_collector->ReplaceCall(callee_function_entry);
1710 ASSERT(!call_data->call->HasMoveArguments());
1713 static intptr_t CountConstants(
const GrowableArray<Value*>& arguments) {
1715 for (intptr_t
i = 0;
i < arguments.length();
i++) {
1716 if (arguments[
i]->BindsToConstant())
count++;
1722 ParsedFunction* GetParsedFunction(
const Function&
function,
bool* in_cache) {
1724 for (intptr_t
i = 0;
i < function_cache_.length(); ++
i) {
1725 ParsedFunction* parsed_function = function_cache_[
i];
1726 if (parsed_function->function().ptr() ==
function.ptr()) {
1728 return parsed_function;
1732 ParsedFunction* parsed_function =
1734 return parsed_function;
1737 bool InlineStaticCalls() {
1739 const auto& call_info = inlining_call_sites_->
static_calls();
1741 for (intptr_t call_idx = 0; call_idx < call_info.length(); ++call_idx) {
1742 StaticCallInstr*
call = call_info[call_idx].call;
1745 (call_info[call_idx].ratio * 100) < FLAG_inlining_hotness) {
1748 THR_Print(
" => %s (deopt count %d)\n Bailout: cold %f\n",
1749 name.ToCString(),
target.deoptimization_counter(),
1750 call_info[call_idx].ratio);
1757 GrowableArray<Value*> arguments(
call->ArgumentCount());
1758 for (
int i = 0;
i <
call->ArgumentCount(); ++
i) {
1759 arguments.Add(
call->ArgumentValueAt(
i));
1761 InlinedCallData call_data(
1763 call->FirstArgIndex(), &arguments, call_info[call_idx].caller());
1770 FLAG_optimization_level <= 2 &&
1772 call_info[call_idx].nesting_depth == 0;
1774 stricter_heuristic)) {
1775 InlineCall(&call_data);
1782 bool InlineClosureCalls() {
1786 if (FLAG_enable_testing_pragmas)
return false;
1788 const auto& call_info = inlining_call_sites_->
closure_calls();
1790 THR_Print(
" Closure Calls (%" Pd ")\n", call_info.length()));
1791 for (intptr_t call_idx = 0; call_idx < call_info.length(); ++call_idx) {
1792 ClosureCallInstr*
call = call_info[call_idx].call;
1797 Definition* receiver =
1798 call->Receiver()->definition()->OriginalDefinition();
1799 if (
const auto* alloc = receiver->AsAllocateClosure()) {
1800 target = alloc->known_function().ptr();
1801 }
else if (ConstantInstr* constant = receiver->AsConstant()) {
1802 if (constant->value().IsClosure()) {
1803 target = Closure::Cast(constant->value()).function();
1813 if (
call->ArgumentCount() >
target.NumParameters() ||
1814 call->ArgumentCount() <
target.num_fixed_parameters()) {
1819 GrowableArray<Value*> arguments(
call->ArgumentCount());
1820 for (
int i = 0;
i <
call->ArgumentCount(); ++
i) {
1821 arguments.Add(
call->ArgumentValueAt(
i));
1823 const Array& arguments_descriptor =
1825 InlinedCallData call_data(
call, arguments_descriptor,
1826 call->FirstArgIndex(), &arguments,
1827 call_info[call_idx].caller());
1829 InlineCall(&call_data);
1836 bool InlineInstanceCalls() {
1840 call_info.length()));
1841 for (intptr_t call_idx = 0; call_idx < call_info.length(); ++call_idx) {
1842 PolymorphicInstanceCallInstr*
call = call_info[call_idx].call;
1844 if (!
call->complete() && !FLAG_polymorphic_with_deopt) {
1846 call->function_name().ToCString()));
1849 const Function& cl = call_info[call_idx].caller();
1851 if (inliner.Inline())
inlined =
true;
1856 bool AdjustForOptionalParameters(
const ParsedFunction& parsed_function,
1857 intptr_t first_arg_index,
1858 const Array& argument_names,
1859 GrowableArray<Value*>* arguments,
1860 ZoneGrowableArray<Definition*>* param_stubs,
1861 FlowGraph* callee_graph) {
1862 const Function&
function = parsed_function.function();
1866 !
function.HasOptionalNamedParameters());
1868 intptr_t arg_count = arguments->length();
1869 intptr_t param_count =
function.NumParameters();
1870 intptr_t fixed_param_count =
function.num_fixed_parameters();
1871 intptr_t argument_names_count =
1872 (argument_names.IsNull()) ? 0 : argument_names.Length();
1873 ASSERT(fixed_param_count <= arg_count - first_arg_index);
1874 ASSERT(arg_count - first_arg_index <= param_count);
1876 if (
function.HasOptionalPositionalParameters()) {
1878 ASSERT(argument_names_count == 0);
1880 for (intptr_t
i = first_arg_index + fixed_param_count;
i < arg_count;
1885 (param_count - fixed_param_count));
1888 for (intptr_t
i = arg_count - first_arg_index;
i < param_count; ++
i) {
1889 const Instance&
object =
1890 parsed_function.DefaultParameterValueAt(
i - fixed_param_count);
1891 ConstantInstr* constant = callee_graph->GetConstant(
object);
1892 arguments->Add(
nullptr);
1893 param_stubs->Add(constant);
1900 const intptr_t positional_args =
1901 arg_count - first_arg_index - argument_names_count;
1903 ASSERT(positional_args == fixed_param_count);
1906 if (argument_names_count == 0) {
1907 for (intptr_t
i = 0;
i < param_count - fixed_param_count; ++
i) {
1908 const Instance&
object = parsed_function.DefaultParameterValueAt(
i);
1909 ConstantInstr* constant = callee_graph->GetConstant(
object);
1910 arguments->Add(
nullptr);
1911 param_stubs->Add(constant);
1917 GrowableArray<NamedArgument> named_args(argument_names_count);
1918 for (intptr_t
i = 0;
i < argument_names.Length(); ++
i) {
1920 arg_name ^= argument_names.At(
i);
1921 named_args.Add(NamedArgument(
1922 &arg_name, (*arguments)[first_arg_index + fixed_param_count +
i]));
1926 arguments->TruncateTo(first_arg_index + fixed_param_count);
1930 intptr_t match_count = 0;
1931 for (intptr_t
i = fixed_param_count;
i < param_count; ++
i) {
1934 Value* arg =
nullptr;
1935 for (intptr_t j = 0; j < named_args.length(); ++j) {
1936 if (param_name.Equals(*named_args[j].name)) {
1937 arg = named_args[j].value;
1942 arguments->Add(arg);
1944 if (arg !=
nullptr) {
1947 const Instance&
object =
1948 parsed_function.DefaultParameterValueAt(
i - fixed_param_count);
1949 ConstantInstr* constant = callee_graph->GetConstant(
object);
1950 param_stubs->Add(constant);
1953 return argument_names_count == match_count;
1956 FlowGraphInliner* inliner_;
1957 FlowGraph* caller_graph_;
1959 const intptr_t initial_size_;
1960 intptr_t inlined_size_;
1961 bool inlined_recursive_call_;
1962 intptr_t inlining_depth_;
1963 intptr_t inlining_recursion_depth_;
1964 intptr_t inlining_depth_threshold_;
1965 CallSites* collected_call_sites_;
1966 CallSites* inlining_call_sites_;
1967 GrowableArray<ParsedFunction*> function_cache_;
1968 GrowableArray<InlinedInfo> inlined_info_;
1978 num_variants_(
call->NumberOfChecks()),
1979 variants_(
call->targets_),
1980 inlined_variants_(zone()),
1981 non_inlined_variants_(new(zone())
CallTargets(zone())),
1982 inlined_entries_(num_variants_),
1984 caller_function_(caller_function) {}
1986IsolateGroup* PolymorphicInliner::isolate_group()
const {
1990Zone* PolymorphicInliner::zone()
const {
1994intptr_t PolymorphicInliner::AllocateBlockId()
const {
2007bool PolymorphicInliner::CheckInlinedDuplicate(
const Function&
target) {
2008 for (intptr_t
i = 0;
i < inlined_variants_.
length(); ++
i) {
2010 !
target.is_polymorphic_target()) {
2017 if (inlined_entries_[
i]->IsGraphEntry()) {
2019 auto old_entry = inlined_entries_[
i]->AsGraphEntry()->normal_entry();
2020 BlockEntryInstr* old_target = old_entry;
2024 inlined_entries_[
i]->AsGraphEntry()->UnuseAllInputs();
2026 JoinEntryInstr* new_join =
2028 old_target->ReplaceAsPredecessorWith(new_join);
2029 for (intptr_t j = 0; j < old_target->dominated_blocks().
length(); ++j) {
2030 BlockEntryInstr* block = old_target->dominated_blocks()[j];
2031 new_join->AddDominatedBlock(block);
2036 ASSERT(new_join->next()->IsRedefinition());
2039 TargetEntryInstr* new_target =
new TargetEntryInstr(
2041 new_target->InheritDeoptTarget(zone(), new_join);
2043 new_goto->InheritDeoptTarget(zone(), new_join);
2044 new_target->LinkTo(new_goto);
2045 new_target->set_last_instruction(new_goto);
2046 new_join->predecessors_.Add(new_target);
2049 inlined_entries_[
i] = new_target;
2051 ASSERT(inlined_entries_[
i]->IsTargetEntry());
2053 BlockEntryInstr*
join =
2054 inlined_entries_[
i]->last_instruction()->SuccessorAt(0);
2056 inlined_entries_.Add(
join);
2064bool PolymorphicInliner::CheckNonInlinedDuplicate(
const Function&
target) {
2065 for (intptr_t
i = 0;
i < non_inlined_variants_->
length(); ++
i) {
2074bool PolymorphicInliner::TryInliningPoly(
const TargetInfo& target_info) {
2079 const Array& arguments_descriptor =
2081 InlinedCallData call_data(call_, arguments_descriptor, call_->
FirstArgIndex(),
2082 &arguments, caller_function_);
2089 FlowGraph* callee_graph = call_data.callee_graph;
2090 call_data.exit_collector->PrepareGraphs(callee_graph);
2091 inlined_entries_.Add(callee_graph->graph_entry());
2092 exit_collector_->
Union(call_data.exit_collector);
2114TargetEntryInstr* PolymorphicInliner::BuildDecisionGraph() {
2119 TargetEntryInstr* entry =
new (
Z) TargetEntryInstr(
2121 entry->InheritDeoptTarget(zone(), call_);
2126 BlockEntryInstr* current_block = entry;
2127 Instruction* cursor = entry;
2135 LoadClassIdInstr* load_cid =
2136 new (
Z) LoadClassIdInstr(
new (
Z)
Value(receiver), cid_representation);
2139 for (intptr_t
i = 0;
i < inlined_variants_.
length(); ++
i) {
2140 const CidRange& variant = inlined_variants_[
i];
2141 bool is_last_test = (
i == inlined_variants_.
length() - 1);
2145 if (is_last_test && non_inlined_variants_->
is_empty()) {
2150 RedefinitionInstr* cid_redefinition =
2151 new RedefinitionInstr(
new (
Z)
Value(load_cid));
2154 CheckClassIdInstr* check_class_id =
new (
Z) CheckClassIdInstr(
2156 check_class_id->InheritDeoptTarget(zone(), call_);
2163 BlockEntryInstr* callee_entry = inlined_entries_[
i];
2164 if (callee_entry->IsGraphEntry()) {
2167 auto target = callee_entry->AsGraphEntry()->normal_entry();
2168 ASSERT(cursor !=
nullptr);
2169 cursor->LinkTo(
target->next());
2170 target->ReplaceAsPredecessorWith(current_block);
2173 callee_entry->UnuseAllInputs();
2174 target->UnuseAllInputs();
2177 for (intptr_t j = 0; j <
target->dominated_blocks().
length(); ++j) {
2178 BlockEntryInstr* block =
target->dominated_blocks()[j];
2179 current_block->AddDominatedBlock(block);
2181 }
else if (callee_entry->IsJoinEntry()) {
2185 JoinEntryInstr*
join = callee_entry->AsJoinEntry();
2188 goto_join->InheritDeoptTarget(zone(),
join);
2189 cursor->LinkTo(goto_join);
2190 current_block->set_last_instruction(goto_join);
2200 BlockEntryInstr* cid_test_entry_block = current_block;
2202 if (variant.cid_start == variant.cid_end) {
2205 compare =
new EqualityCompareInstr(
2207 new Value(cid_constant),
2208 cid_representation == kTagged ? kSmiCid : kIntegerCid,
2212 variant.cid_start, variant.cid_end,
2213 cid_representation);
2217 branch->InheritDeoptTarget(zone(), call_);
2220 current_block->set_last_instruction(branch);
2225 BlockEntryInstr* callee_entry = inlined_entries_[
i];
2226 TargetEntryInstr* true_target =
nullptr;
2227 if (callee_entry->IsGraphEntry()) {
2229 auto graph_entry = callee_entry->AsGraphEntry();
2230 auto function_entry = graph_entry->normal_entry();
2233 function_entry->ReplaceAsPredecessorWith(true_target);
2234 for (intptr_t j = 0; j < function_entry->dominated_blocks().
length();
2236 BlockEntryInstr* block = function_entry->dominated_blocks()[j];
2237 true_target->AddDominatedBlock(block);
2241 graph_entry->UnuseAllInputs();
2242 }
else if (callee_entry->IsTargetEntry()) {
2243 ASSERT(!callee_entry->IsFunctionEntry());
2246 true_target = callee_entry->AsTargetEntry();
2247 BlockEntryInstr*
join = true_target->last_instruction()->SuccessorAt(0);
2248 current_block->AddDominatedBlock(
join);
2253 JoinEntryInstr*
join = callee_entry->AsJoinEntry();
2257 new TargetEntryInstr(AllocateBlockId(), try_idx,
DeoptId::kNone);
2258 true_target->InheritDeoptTarget(zone(),
join);
2260 goto_join->InheritDeoptTarget(zone(),
join);
2261 true_target->LinkTo(goto_join);
2262 true_target->set_last_instruction(goto_join);
2264 *branch->true_successor_address() = true_target;
2265 current_block->AddDominatedBlock(true_target);
2270 TargetEntryInstr* false_target =
2271 new TargetEntryInstr(AllocateBlockId(), try_idx,
DeoptId::kNone);
2272 false_target->InheritDeoptTarget(zone(), call_);
2273 *branch->false_successor_address() = false_target;
2274 cid_test_entry_block->AddDominatedBlock(false_target);
2276 cursor = current_block = false_target;
2283 if (!non_inlined_variants_->
is_empty()) {
2284 PolymorphicInstanceCallInstr* fallback_call =
2288 fallback_call->InheritDeoptTarget(zone(), call_);
2289 fallback_call->set_total_call_count(call_->
CallCount());
2290 DartReturnInstr* fallback_return =
new DartReturnInstr(
2292 fallback_return->InheritDeoptTargetAfter(owner_->
caller_graph(), call_,
2296 exit_collector_->
AddExit(fallback_return);
2308 int percent = total == 0 ? 0 : (100 * targets.
TargetAt(idx)->
count) / total;
2310 name.ToCString(), targets[idx].cid_start, targets[idx].cid_end,
2314bool PolymorphicInliner::trace_inlining()
const {
2319 ASSERT(&variants_ == &call_->targets_);
2322 for (intptr_t var_idx = 0; var_idx < variants_.
length(); ++var_idx) {
2324 if (variants_.
length() > FLAG_max_polymorphic_checks) {
2325 non_inlined_variants_->
Add(
info);
2335 const bool try_harder = (var_idx >= variants_.
length() - 2) &&
2336 non_inlined_variants_->
length() == 0;
2338 intptr_t
size =
target.optimized_instruction_count();
2339 bool small = (
size != 0 &&
size < FLAG_inlining_size_threshold);
2344 if (!try_harder &&
count < (total >> 5)) {
2347 non_inlined_variants_->
Add(
info);
2352 if (CheckInlinedDuplicate(
target)) {
2354 "duplicate already inlined"));
2362 if (!try_harder &&
count < (total >> (small ? 4 : 3))) {
2365 non_inlined_variants_->
Add(&variants_[var_idx]);
2372 if (CheckNonInlinedDuplicate(
target)) {
2375 non_inlined_variants_->
Add(&variants_[var_idx]);
2380 if (TryInliningPoly(*
info)) {
2382 inlined_variants_.
Add(&variants_[var_idx]);
2386 non_inlined_variants_->
Add(&variants_[var_idx]);
2391 if (inlined_variants_.
is_empty())
return false;
2407 : flow_graph_(flow_graph),
2408 inline_id_to_function_(inline_id_to_function),
2409 inline_id_to_token_pos_(inline_id_to_token_pos),
2410 caller_inline_id_(caller_inline_id),
2411 trace_inlining_(FLAG_trace_inlining && flow_graph->should_print()),
2412 speculative_policy_(speculative_policy),
2413 precompiler_(precompiler) {}
2416 intptr_t constants_count,
2418 intptr_t* instruction_count,
2419 intptr_t* call_site_count) {
2424 *instruction_count = 0;
2425 *call_site_count = 0;
2429 if (constants_count > 0) {
2433 *instruction_count =
info.instruction_count();
2434 *call_site_count =
info.call_site_count();
2438 ASSERT(constants_count == 0);
2439 if (force || (
function.optimized_instruction_count() == 0)) {
2442 function.SetOptimizedInstructionCountClamped(
info.instruction_count());
2443 function.SetOptimizedCallSiteCountClamped(
info.call_site_count());
2445 *instruction_count =
function.optimized_instruction_count();
2446 *call_site_count =
function.optimized_call_site_count();
2450 intptr_t inlining_id) {
2457 !block_it.
Done(); block_it.Advance()) {
2469 return (
function.name() == Symbols::IndexToken().ptr()) ||
2470 (
function.name() == Symbols::AssignIndexToken().ptr()) ||
2483 Symbols::vm_prefer_inline(),
2495 Symbols::vm_never_inline(),
2508 Symbols::vm_always_consider_inlining(),
2523 if (
function.IsDispatcherOrImplicitAccessor() &&
2524 !(
function.kind() == UntaggedFunction::kDynamicInvocationForwarder &&
2535 if (
function.IsMethodExtractor()) {
2542 (
function.kind() == UntaggedFunction::kConstructor)) {
2543 const intptr_t
count =
function.optimized_instruction_count();
2544 if ((
count != 0) && (
count < FLAG_inline_getters_setters_smaller_than)) {
2555 intptr_t instruction_count = 0;
2556 intptr_t call_site_count = 0;
2559 false, &instruction_count,
2563 if ((FLAG_inlining_filter !=
nullptr) &&
2575 (FLAG_print_flow_graph || FLAG_print_flow_graph_optimized)) {
2582 intptr_t inlining_depth_threshold = FLAG_inlining_depth_threshold;
2586 if (FLAG_print_inlining_tree) {
2595 (FLAG_print_flow_graph || FLAG_print_flow_graph_optimized)) {
2608 const intptr_t
id = inline_id_to_function_->length();
2612 source.token_pos.IsNoSource());
2615 inline_id_to_function_->Add(&
function);
2616 inline_id_to_token_pos_->Add(
source.token_pos);
2617 caller_inline_id_->
Add(
source.inlining_id);
2619 ASSERT(inline_id_to_token_pos_->length() ==
2620 (inline_id_to_function_->length() - 1));
static void info(const char *fmt,...) SK_PRINTF_LIKE(1
#define RELEASE_ASSERT(cond)
Iterator GetIterator() const
bool Contains(intptr_t i) const
InstructionsIterable instructions()
intptr_t NestingDepth() const
intptr_t try_index() const
bool InsideTryBlock() const
void set_try_index(intptr_t index)
GrowableArray< Definition * > * initial_definitions()
static void AssignEdgeWeights(FlowGraph *flow_graph)
static TargetEntryInstr * ToTargetEntry(Zone *zone, BlockEntryInstr *target)
static JoinEntryInstr * ToJoinEntry(Zone *zone, BlockEntryInstr *target)
InliningDecision ShouldWeInline(const Function &callee, intptr_t instr_count, intptr_t call_site_count)
bool TryInlining(const Function &function, const Array &argument_names, InlinedCallData *call_data, bool stricter_heuristic)
friend class PolymorphicInliner
FlowGraph * caller_graph() const
Definition * CreateParameterStub(intptr_t i, Value *argument, FlowGraph *graph)
double GrowthFactor() const
bool trace_inlining() const
bool TryInliningImpl(const Function &function, const Array &argument_names, InlinedCallData *call_data, bool stricter_heuristic)
void PrintInlinedInfo(const Function &top)
CallSiteInliner(FlowGraphInliner *inliner, intptr_t threshold)
void FindCallSites(FlowGraph *graph, intptr_t depth, GrowableArray< InlinedInfo > *inlined_info)
static void ComputeCallRatio(GrowableArray< CallInfo< CallType > > &calls, intptr_t start_index, intptr_t max_count)
intptr_t NumCalls() const
static intptr_t ComputeMaxCallCount(const GrowableArray< CallInfo< CallType > > &calls, intptr_t start_index)
const GrowableArray< CallInfo< ClosureCallInstr > > & closure_calls() const
const GrowableArray< CallInfo< StaticCallInstr > > & static_calls() const
void ComputeCallSiteRatio(intptr_t static_calls_start_ix, intptr_t instance_calls_start_ix, intptr_t calls_start_ix)
void TryDevirtualize(FlowGraph *graph)
static void RecordAllNotInlinedFunction(FlowGraph *graph, intptr_t depth, GrowableArray< InlinedInfo > *inlined_info)
CallSites(intptr_t threshold, GrowableArray< CallInfo< InstanceCallInstr > > *calls)
const GrowableArray< CallInfo< PolymorphicInstanceCallInstr > > & instance_calls() const
static void PruneRemovedCallsIn(GrowableArray< CallInfo< CallType > > *arr)
TargetInfo * TargetAt(int i) const
const Function & FirstTarget() const
static void Validate(FlowGraph *callee_graph)
void Add(CidRange *target)
static CompileType * ComputeRefinedType(CompileType *old_type, CompileType *new_type)
static CompileType FromCid(intptr_t cid)
static CompileType Dynamic()
static void RunInliningPipeline(PipelineMode mode, CompilerPassState *state)
intptr_t GetNextDeoptId()
static CompilerState & Current()
const Function * function() const
void RecordInliningStatsByOutcome(bool success, const Timer &timer)
static bool IsBackgroundCompilation()
static constexpr intptr_t kNoOSRDeoptId
const Object & value() const
void ReplaceUsesWith(Definition *other)
void AddInputUse(Value *value)
static constexpr intptr_t kNone
bool AlwaysInline(const Function &function)
FlowGraphInliner(FlowGraph *flow_graph, GrowableArray< const Function * > *inline_id_to_function, GrowableArray< TokenPosition > *inline_id_to_token_pos, GrowableArray< intptr_t > *caller_inline_id, SpeculativeInliningPolicy *speculative_policy, Precompiler *precompiler)
intptr_t NextInlineId(const Function &function, const InstructionSource &source)
bool trace_inlining() const
static bool FunctionHasAlwaysConsiderInliningPragma(const Function &function)
static void SetInliningId(FlowGraph *flow_graph, intptr_t inlining_id)
static bool FunctionHasNeverInlinePragma(const Function &function)
static void CollectGraphInfo(FlowGraph *flow_graph, intptr_t num_constant_args, bool force, intptr_t *instruction_count, intptr_t *call_site_count)
FlowGraph * flow_graph() const
static bool FunctionHasPreferInlinePragma(const Function &function)
GraphEntryInstr * graph_entry() const
ConstantInstr * GetConstant(const Object &object, Representation representation=kTagged)
IsolateGroup * isolate_group() const
PrologueInfo prologue_info() const
void EnsureSSATempIndex(Definition *defn, Definition *replacement)
bool IsCompiledForOsr() const
intptr_t current_ssa_temp_index() const
intptr_t inlining_id() const
void set_current_ssa_temp_index(intptr_t index)
BlockIterator postorder_iterator() const
intptr_t max_block_id() const
void set_inlining_id(intptr_t value)
const LoopHierarchy & GetLoopHierarchy()
const Function & function() const
const ParsedFunction & parsed_function() const
void PopulateWithICData(const Function &function)
void AllocateSSAIndex(Definition *def)
BlockIterator reverse_postorder_iterator() const
void ComputeSSA(ZoneGrowableArray< Definition * > *inlining_parameters)
intptr_t allocate_block_id()
intptr_t NumOptionalNamedParameters() const
AbstractTypePtr ParameterTypeAt(intptr_t index) const
StringPtr ParameterNameAt(intptr_t index) const
intptr_t NumParameters() const
bool IsRegularFunction() const
bool IsIdempotent() const
const char * ToFullyQualifiedCString() const
StringPtr QualifiedUserVisibleName() const
intptr_t NumParameters() const
FunctionEntryInstr * normal_entry() const
void Collect(const FlowGraph &graph)
intptr_t instruction_count() const
intptr_t call_site_count() const
void Union(const InlineExitCollector *other)
void AddExit(DartReturnInstr *exit)
void ReplaceCall(BlockEntryInstr *callee_entry)
virtual intptr_t InputCount() const =0
void LinkTo(Instruction *next)
virtual Value * InputAt(intptr_t i) const =0
virtual BlockEntryInstr * GetBlock()
virtual intptr_t ArgumentCount() const
virtual Representation representation() const
void ReplaceInEnvironment(Definition *current, Definition *replacement)
InstructionSource source() const
Value * ArgumentValueAt(intptr_t index) const
intptr_t deopt_id() const
void InsertAfter(Instruction *prev)
bool HasMoveArguments() const
virtual void set_inlining_id(intptr_t value)
static bool FindPragma(Thread *T, bool only_core, const Object &object, const String &pragma_name, bool multiple=false, Object *options=nullptr)
static Location RegisterLocation(Register reg)
DART_NORETURN void Jump(int value, const Error &error)
static Object & ZoneHandle()
const Function & function() const
void AddToGuardedFields(const Field *field) const
const FieldSet * guarded_fields() const
PolymorphicInliner(CallSiteInliner *owner, PolymorphicInstanceCallInstr *call, const Function &caller_function)
virtual intptr_t CallCount() const
const CallTargets & targets() const
static PolymorphicInstanceCallInstr * FromCall(Zone *zone, InstanceCallBaseInstr *call, const CallTargets &targets, bool complete)
intptr_t total_call_count()
static SmiPtr New(intptr_t value)
const Function & function() const
static const String & Plus()
static const String & Minus()
const Array & argument_names() const
intptr_t ArgumentCount() const
intptr_t FirstArgIndex() const
ArrayPtr GetArgumentsDescriptor() const
LongJumpScope * long_jump_base() const
static Thread * Current()
DART_WARN_UNUSED_RESULT ErrorPtr StealStickyError()
CompilerTimings * compiler_timings() const
static constexpr T Maximum(T x, T y)
Definition * definition() const
#define COMPILER_TIMINGS_TIMER_SCOPE(thread, timer_id)
#define THR_Print(format,...)
const uint8_t uint32_t uint32_t GError ** error
constexpr bool FLAG_support_il_printer
Dart_NativeFunction function
#define PRINT_INLINING_TREE(comment, caller, target, instance_call)
#define TRACE_INLINING(statement)
bool Contains(const Container &container, const Value &value)
void ReadParameterCovariance(const Function &function, BitVector *is_covariant, BitVector *is_generic_covariant_impl)
static bool IsSmallLeafOrReduction(int inlining_depth, intptr_t call_site_instructions, FlowGraph *graph)
static constexpr Representation kUnboxedUword
static bool IsInlineableOperator(const Function &function)
static bool IsAThisCallThroughAnUncheckedEntryPoint(Definition *call)
const Register ARGS_DESC_REG
static bool IsCallRecursive(const Function &function, Definition *call)
static void ReplaceParameterStubs(Zone *zone, FlowGraph *caller_graph, InlinedCallData *call_data, const TargetInfo *target_info)
static intptr_t AotCallCountApproximation(intptr_t nesting_depth)
DEFINE_FLAG(bool, print_cluster_information, false, "Print information about clusters written to snapshot")
static bool IsConstant(Definition *def, int64_t *val)
static bool CalleeParameterTypeMightBeMoreSpecific(BitVector *is_generic_covariant_impl, const FunctionType &interface_target_signature, const FunctionType &callee_signature, intptr_t first_arg_index, intptr_t arg_index)
static Instruction * AppendInstruction(Instruction *first, Instruction *second)
static void TracePolyInlining(const CallTargets &targets, intptr_t idx, intptr_t total, const char *message)
DECLARE_FLAG(bool, show_invisible_frames)
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive keep the shell running after the Dart script has completed enable serial On low power devices with low core running concurrent GC tasks on threads can cause them to contend with the UI thread which could potentially lead to jank This option turns off all concurrent GC activities domain network JSON encoded network policy per domain This overrides the DisallowInsecureConnections switch Embedder can specify whether to allow or disallow insecure connections at a domain level old gen heap size
int compare(const void *untyped_lhs, const void *untyped_rhs)
static SkString join(const CommandLineFlags::StringArray &)
InliningDecision(bool b, const char *r)
static InliningDecision No(const char *reason)
static InliningDecision Yes(const char *reason)
CallInfo(FlowGraph *caller_graph, CallType *call, intptr_t call_depth, intptr_t nesting_depth)
const Function & caller() const
const Array & arguments_descriptor
GrowableArray< Value * > * arguments
InlineExitCollector * exit_collector
const intptr_t first_arg_index
InlinedCallData(Definition *call, const Array &arguments_descriptor, intptr_t first_arg_index, GrowableArray< Value * > *arguments, const Function &caller)
ZoneGrowableArray< Definition * > * parameter_stubs
const char * bailout_reason
InlinedInfo(const Function *caller_function, const Function *inlined_function, const intptr_t depth, const Definition *call, const char *reason)
const Definition * call_instr
NamedArgument(String *name, Value *value)
bool Contains(intptr_t block_id) const