27DEFINE_FLAG(
bool, prune_dead_locals,
true,
"optimize dead locals away");
35 FLAG_reorder_basic_blocks && !
function.IsFfiCallbackTrampoline();
42 Symbols::vm_unsafe_no_bounds_checks(),
48 intptr_t max_block_id,
51 : thread_(
Thread::Current()),
53 current_ssa_temp_index_(0),
54 max_block_id_(max_block_id),
55 parsed_function_(parsed_function),
56 num_direct_parameters_(parsed_function.
function().MakesCopyOfParameters()
58 : parsed_function.
function().NumParameters()),
59 direct_parameter_locations_(
60 parsed_function.
function().num_fixed_parameters()),
61 graph_entry_(graph_entry),
65 optimized_block_order_(),
66 constant_null_(nullptr),
67 constant_dead_(nullptr),
69 should_reorder_blocks_(
71 prologue_info_(prologue_info),
72 loop_hierarchy_(nullptr),
73 loop_invariant_loads_(nullptr),
74 captured_parameters_(new(zone())
BitVector(zone(), variable_count())),
77 should_remove_all_bounds_checks_(
81 &compiler_pass_filters_);
86 &direct_parameter_locations_);
101 const intptr_t fixed_parameters_size_in_bytes =
105 return fixed_parameters_size_in_bytes +
116 return kUnboxedInt64;
118 return kUnboxedDouble;
130 return kUnboxedInt64;
132 return kUnboxedDouble;
134 return kPairOfTagged;
144 Definition* current_defn = current->AsDefinition();
145 if ((replacement !=
nullptr) && (current_defn !=
nullptr)) {
146 Definition* replacement_defn = replacement->AsDefinition();
147 ASSERT(replacement_defn !=
nullptr);
157 if (current_defn ==
nullptr) {
172 : &reverse_postorder_;
177 : &reverse_postorder_;
183 return constant_instr_pool_.LookupValue(
190 if (constant ==
nullptr) {
193 if (representation == kTagged) {
200 constant_instr_pool_.Insert(constant);
207 bool tagged_value_must_be_smi) {
208 switch (target_rep) {
210 return !tagged_value_must_be_smi ||
value.IsSmi();
213 if (
value.IsInteger()) {
219 if (
value.IsInteger()) {
225 return value.IsInteger();
241 value, representation,
246 if (((representation == kUnboxedFloat) ||
247 (representation == kUnboxedDouble)) &&
251 Integer::Cast(
value).AsDoubleValue())),
267 if (
auto par = defn->AsParameter()) {
268 par->set_block(entry);
278 ASSERT(instr->IsDefinition());
283 if (
env !=
nullptr) {
284 env->DeepCopyTo(
zone(), instr);
293 if (instr->
env() !=
nullptr) {
303 ASSERT(instr->IsDefinition());
307 if (
env !=
nullptr) {
308 env->DeepCopyTo(
zone(), instr);
310 return prev->AppendInstruction(instr);
317 if (instr->
env() !=
nullptr) {
329 next_successor_ix_(
block->last_instruction()->SuccessorCount() - 1) {}
341 intptr_t next_successor_ix_;
343 DISALLOW_ALLOCATION();
354 reverse_postorder_.Clear();
363 if (
state.HasNextSuccessor()) {
374 postorder_.Add(block);
378 ASSERT(postorder_.length() == preorder_.length());
383 reverse_postorder_.Add(postorder_[
block_count -
i - 1]);
390 bool changed =
false;
393 block_it.Advance()) {
395 if (block->IsGraphEntry())
continue;
400 while (
auto goto_instr = last->AsGoto()) {
420 last_merged_block = successor;
429 if (last_merged_block !=
nullptr) {
437void FlowGraph::ComputeIsReceiverRecursive(
444 if (def->IsParameter() && (def->AsParameter()->env_index() == 0))
continue;
449 ComputeIsReceiverRecursive(def->AsPhi(), unmark);
461void FlowGraph::ComputeIsReceiver(PhiInstr* phi)
const {
462 GrowableArray<PhiInstr*> unmark;
463 ComputeIsReceiverRecursive(phi, &unmark);
466 while (!unmark.is_empty()) {
467 PhiInstr* phi = unmark.RemoveLast();
468 for (Value::Iterator it(phi->input_use_list()); !it.Done(); it.Advance()) {
469 PhiInstr* use = it.Current()->instruction()->AsPhi();
480 if (def->IsParameter())
return (def->AsParameter()->env_index() == 0);
481 if (!def->IsPhi() ||
graph_entry()->HasSingleEntryPoint()) {
489 ComputeIsReceiver(phi);
496 if (!FLAG_use_cha_deopt && !
isolate_group()->all_classes_finalized()) {
505 bool receiver_maybe_null =
false;
514 const intptr_t receiver_cid =
type->ToNullableCid();
517 receiver_maybe_null =
type->is_nullable();
522 if (
type->is_nullable()) {
523 receiver_maybe_null =
true;
534 if (receiver_class.
IsNull() ||
537 }
else if (
call->HasICData()) {
542 const intptr_t
cid = receiver_class.
id();
547 for (intptr_t
i = 0,
len =
data->NumberOfChecks();
i <
len;
i++) {
548 if (!
data->IsUsedAt(
i)) {
551 fun =
data->GetTargetAt(
i);
553 if (
data->GetReceiverClassIdAt(
i) ==
cid || cls.
id() ==
cid) {
563 const String& method_name =
564 (kind == UntaggedFunction::kMethodExtractor)
566 :
call->function_name();
570 if (receiver_maybe_null) {
571 const Class& null_class =
587 intptr_t subclass_count = 0;
589 if (!cha.
HasOverride(receiver_class, method_name, &subclass_count)) {
590 if (FLAG_trace_cha) {
592 " **(CHA) Instance call needs no class check since there "
593 "are no overrides of method '%s' on '%s'\n",
596 if (FLAG_use_cha_deopt) {
628 intptr_t receiver_cid) {
633 call->Receiver()->CopyWithType(),
646 false,
call->deopt_id()),
654 block_it.Advance()) {
656 !instr_it.
Done(); instr_it.Advance()) {
658 if (redefinition !=
nullptr) {
662 Value* original_use = it.Current();
680 intptr_t variable_count,
682 : zone_(
Thread::Current()->zone()),
683 variable_count_(variable_count),
684 postorder_(postorder),
685 live_out_(postorder.
length()),
686 kill_(postorder.
length()),
687 live_in_(postorder.
length()) {}
691 bool changed =
false;
775 flow_graph_(flow_graph),
784 assigned_vars_.Clear();
803 assigned_vars_.Add(kill);
806 return assigned_vars_;
816 if (
store->is_dead()) {
819 if (
store->is_last()) {
820 const intptr_t index = flow_graph_->
EnvIndex(&
store->local());
833 const intptr_t index = flow_graph_->
EnvIndex(&
load->local());
871 if (
load !=
nullptr) {
872 const intptr_t index = flow_graph_->
EnvIndex(&
load->local());
873 if (index >= live_in->
length())
continue;
876 last_loads->
Add(index);
883 if (
store !=
nullptr) {
884 const intptr_t index = flow_graph_->
EnvIndex(&
store->local());
885 if (index >= live_in->
length())
continue;
903 const bool is_function_entry = block->IsFunctionEntry();
904 const bool is_osr_entry = block->IsOsrEntry();
905 const bool is_catch_block_entry = block->IsCatchBlockEntry();
906 if (is_function_entry || is_osr_entry || is_catch_block_entry) {
907 const intptr_t parameter_count =
908 (is_osr_entry || is_catch_block_entry)
911 for (intptr_t
i = 0;
i < parameter_count; ++
i) {
916 if (is_function_entry) {
932 if (inlining_parameters !=
nullptr) {
933 for (intptr_t
i = 0, n = inlining_parameters->
length();
i < n; ++
i) {
935 if (defn->IsConstant()) {
957 dominance_frontier, &live_phis);
961 Rename(&live_phis, &variable_liveness, inlining_parameters);
965 RemoveDeadPhis(&live_phis);
1003 for (intptr_t
i = 0;
i <
size; ++
i) {
1004 idom.
Add(parent_[
i]);
1013 preorder_[0]->ClearDominatedBlocks();
1014 for (intptr_t block_index =
size - 1; block_index >= 1; --block_index) {
1027 intptr_t best = pred_index;
1028 if (pred_index > block_index) {
1029 CompressPath(block_index, pred_index, &parent_, &label);
1030 best = label[pred_index];
1034 semi[block_index] =
Utils::Minimum(semi[block_index], semi[best]);
1038 label[block_index] = semi[block_index];
1043 for (intptr_t block_index = 1; block_index <
size; ++block_index) {
1044 intptr_t dom_index = idom[block_index];
1045 while (dom_index > semi[block_index]) {
1046 dom_index = idom[dom_index];
1048 idom[block_index] = dom_index;
1049 preorder_[dom_index]->AddDominatedBlock(preorder_[block_index]);
1057 for (intptr_t block_index = 0; block_index <
size; ++block_index) {
1060 if (
count <= 1)
continue;
1061 for (intptr_t
i = 0;
i <
count; ++
i) {
1071void FlowGraph::CompressPath(intptr_t start_index,
1072 intptr_t current_index,
1075 intptr_t next_index = (*parent)[current_index];
1076 if (next_index > start_index) {
1077 CompressPath(start_index, next_index, parent, label);
1078 (*label)[current_index] =
1080 (*parent)[current_index] = (*parent)[next_index];
1084void FlowGraph::InsertPhis(
const GrowableArray<BlockEntryInstr*>& preorder,
1085 const GrowableArray<BitVector*>& assigned_vars,
1086 const GrowableArray<BitVector*>& dom_frontier,
1087 GrowableArray<PhiInstr*>* live_phis) {
1099 for (intptr_t block_index = 0; block_index <
block_count; ++block_index) {
1100 has_already.Add(-1);
1105 GrowableArray<BlockEntryInstr*>
worklist;
1106 for (intptr_t var_index = 0; var_index <
variable_count(); ++var_index) {
1107 const bool always_live =
1110 for (intptr_t block_index = 0; block_index <
block_count; ++block_index) {
1111 if (assigned_vars[block_index]->
Contains(var_index)) {
1112 work[block_index] = var_index;
1118 BlockEntryInstr* current =
worklist.RemoveLast();
1120 for (BitVector::Iterator it(dom_frontier[current->preorder_number()]);
1121 !it.Done(); it.Advance()) {
1122 int index = it.Current();
1123 if (has_already[index] < var_index) {
1126 PhiInstr* phi =
join->InsertPhi(
1130 live_phis->Add(phi);
1132 has_already[index] = var_index;
1133 if (work[index] < var_index) {
1134 work[index] = var_index;
1145 constant_dead_ =
GetConstant(Object::optimized_out());
1150 if (
auto join = block->AsJoinEntry()) {
1153 const intptr_t osr_phi_count =
1156 if (
join->phis() ==
nullptr || (*
join->phis())[
i] ==
nullptr) {
1157 join->InsertPhi(
i, local_phi_count)->mark_alive();
1163void FlowGraph::Rename(GrowableArray<PhiInstr*>* live_phis,
1164 VariableLivenessAnalysis* variable_liveness,
1165 ZoneGrowableArray<Definition*>* inlining_parameters) {
1176 if (entry->catch_entries().is_empty()) {
1177 ASSERT(entry->unchecked_entry() !=
nullptr ? entry->SuccessorCount() == 2
1178 : entry->SuccessorCount() == 1);
1196 RenameRecursive(entry, &
env, live_phis, variable_liveness,
1197 inlining_parameters);
1207 bool should_assign_stack_locations ,
1212 const intptr_t index = (function.IsFactory() ? (i - 1) : i);
1213 return index >= 0 ? ParameterRepresentationAt(function, index)
1216 should_assign_stack_locations, parameter_info);
1219void FlowGraph::PopulateEnvironmentFromFunctionEntry(
1228 const intptr_t inlined_type_args_param =
1236 const auto& [location, representation] = direct_parameter_locations_[
i];
1237 if (location.IsInvalid()) {
1242 const intptr_t env_index =
1246 auto param =
new (
zone())
1247 ParameterInstr(function_entry,
1249 i, location, representation);
1253 (*env)[env_index] = param;
1260 if (inlining_parameters !=
nullptr) {
1262 Definition* defn = (*inlining_parameters)[inlined_type_args_param +
i];
1263 if (defn->IsConstant()) {
1264 ASSERT(defn->previous() == graph_entry_);
1265 ASSERT(defn->HasSSATemp());
1267 ASSERT(defn->previous() ==
nullptr);
1272 (*env)[index] = defn;
1278 if (reify_generic_argument) {
1281 if (inlining_parameters ==
nullptr) {
1288 defn = (*inlining_parameters)[0];
1290 if (defn->IsConstant()) {
1291 ASSERT(defn->previous() == graph_entry_);
1292 ASSERT(defn->HasSSATemp());
1294 ASSERT(defn->previous() ==
nullptr);
1315 intptr_t env_index) {
1318 num_direct_parameters - env_index),
1322void FlowGraph::PopulateEnvironmentFromOsrEntry(
1323 OsrEntryInstr* osr_entry,
1324 GrowableArray<Definition*>*
env) {
1330 ASSERT(parameter_count ==
env->length());
1331 for (intptr_t
i = 0;
i < parameter_count;
i++) {
1335 ParameterInstr* param =
new (
zone()) ParameterInstr(
1336 osr_entry,
i, param_index,
1344void FlowGraph::PopulateEnvironmentFromCatchEntry(
1345 CatchBlockEntryInstr* catch_entry,
1346 GrowableArray<Definition*>*
env) {
1347 const intptr_t raw_exception_var_envindex =
1348 catch_entry->raw_exception_var() !=
nullptr
1349 ?
EnvIndex(catch_entry->raw_exception_var())
1351 const intptr_t raw_stacktrace_var_envindex =
1352 catch_entry->raw_stacktrace_var() !=
nullptr
1353 ?
EnvIndex(catch_entry->raw_stacktrace_var())
1358 intptr_t additional_slots = 0;
1363 if (raw_exception_var_envindex ==
i) {
1365 }
else if (raw_stacktrace_var_envindex ==
i) {
1370 if (param_rep == kTagged && param_loc.IsStackSlot()) {
1378 n + additional_slots);
1385 auto param =
new (
Z) ParameterInstr(
1399void FlowGraph::AttachEnvironment(Instruction* instr,
1400 GrowableArray<Definition*>*
env) {
1402 instr->NumberOfInputsConsumedBeforeCall(),
1404 instr->SetEnvironment(deopt_env);
1405 for (Environment::DeepIterator it(deopt_env); !it.Done(); it.Advance()) {
1406 Value* use = it.CurrentValue();
1407 use->definition()->AddEnvUse(use);
1411void FlowGraph::RenameRecursive(
1412 BlockEntryInstr* block_entry,
1413 GrowableArray<Definition*>*
env,
1414 GrowableArray<PhiInstr*>* live_phis,
1415 VariableLivenessAnalysis* variable_liveness,
1416 ZoneGrowableArray<Definition*>* inlining_parameters) {
1418 if (
auto join = block_entry->AsJoinEntry()) {
1419 if (
join->phis() !=
nullptr) {
1421 ASSERT(
join->phis()->length() == local_phi_count);
1422 for (intptr_t
i = 0;
i < local_phi_count; ++
i) {
1423 PhiInstr* phi = (*
join->phis())[
i];
1424 if (phi !=
nullptr) {
1427 if (block_entry->InsideTryBlock() && !phi->is_alive()) {
1434 live_phis->Add(phi);
1439 }
else if (
auto osr_entry = block_entry->AsOsrEntry()) {
1440 PopulateEnvironmentFromOsrEntry(osr_entry,
env);
1441 }
else if (
auto function_entry = block_entry->AsFunctionEntry()) {
1443 PopulateEnvironmentFromFunctionEntry(
1444 function_entry,
env, live_phis, variable_liveness, inlining_parameters);
1445 }
else if (
auto catch_entry = block_entry->AsCatchBlockEntry()) {
1446 PopulateEnvironmentFromCatchEntry(catch_entry,
env);
1449 if (!block_entry->IsGraphEntry() &&
1450 !block_entry->IsBlockEntryWithInitialDefs()) {
1453 BitVector* live_in = variable_liveness->GetLiveInSet(block_entry);
1457 if (FLAG_prune_dead_locals && !live_in->Contains(
i) &&
1465 AttachEnvironment(block_entry,
env);
1468 for (ForwardInstructionIterator it(block_entry); !it.Done(); it.Advance()) {
1469 Instruction* current = it.Current();
1472 if (current->NeedsEnvironment()) {
1473 AttachEnvironment(current,
env);
1481 for (intptr_t
i = current->InputCount() - 1;
i >= 0; --
i) {
1482 Value* v = current->InputAt(
i);
1485 Definition* reaching_defn =
env->RemoveLast();
1486 Definition* input_defn = v->definition();
1487 if (input_defn != reaching_defn) {
1492 if (input_defn->IsConstant() && reaching_defn->IsPhi()) {
1494 auto constant =
GetConstant(input_defn->AsConstant()->value());
1495 current->ReplaceInEnvironment(reaching_defn, constant);
1496 reaching_defn = constant;
1500 ASSERT(input_defn->IsLoadLocal() || input_defn->IsStoreLocal() ||
1501 input_defn->IsDropTemps() || input_defn->IsMakeTemp() ||
1502 (input_defn->IsConstant() && reaching_defn->IsConstant()));
1505 ASSERT(reaching_defn->ssa_temp_index() != -1);
1507 v->set_definition(reaching_defn);
1508 input_defn = reaching_defn;
1510 input_defn->AddInputUse(v);
1515 Definition*
result =
nullptr;
1516 switch (current->tag()) {
1517 case Instruction::kLoadLocal: {
1518 LoadLocalInstr*
load = current->Cast<LoadLocalInstr>();
1526 PhiInstr* phi =
result->AsPhi();
1527 if ((phi !=
nullptr) && !phi->is_alive()) {
1529 live_phis->Add(phi);
1532 if (FLAG_prune_dead_locals &&
1533 variable_liveness->IsLastLoad(block_entry,
load)) {
1539 if (
load->local().is_captured_parameter()) {
1540 captured_parameters_->
Add(index);
1543 if (phi !=
nullptr) {
1557 if (!phi->HasType()) {
1559 auto* phis = phi->block()->phis();
1560 if ((index < phis->
length()) && (*phis)[index] == phi) {
1561 phi->UpdateType(*
load->local().inferred_type());
1570 case Instruction::kStoreLocal: {
1571 StoreLocalInstr*
store = current->Cast<StoreLocalInstr>();
1575 if (!FLAG_prune_dead_locals ||
1576 variable_liveness->IsStoreAlive(block_entry,
store)) {
1584 case Instruction::kDropTemps: {
1586 DropTempsInstr* drop = current->Cast<DropTempsInstr>();
1587 for (intptr_t j = 0; j < drop->num_temps(); j++) {
1590 if (drop->value() !=
nullptr) {
1591 result = drop->value()->definition();
1593 ASSERT((drop->value() !=
nullptr) || !drop->HasTemp());
1597 case Instruction::kConstant:
1598 case Instruction::kUnboxedConstant: {
1599 ConstantInstr* constant = current->Cast<ConstantInstr>();
1600 if (constant->HasTemp()) {
1606 case Instruction::kMakeTemp: {
1612 case Instruction::kMoveArgument:
1616 case Instruction::kCheckStackOverflow:
1619 current->AsCheckStackOverflow()->stack_depth()) ==
1625 if (Definition* definition = current->AsDefinition()) {
1626 if (definition->HasTemp()) {
1629 env->Add(definition);
1636 Definition* definition = current->Cast<Definition>();
1637 if (definition->HasTemp()) {
1641 it.RemoveCurrentFromGraph();
1646 for (intptr_t
i = 0;
i < block_entry->dominated_blocks().length(); ++
i) {
1648 GrowableArray<Definition*> new_env(
env->length());
1649 new_env.AddArray(*
env);
1654 const intptr_t stack_depth = block->stack_depth();
1655 ASSERT(stack_depth >= 0);
1659 }
else if (!block->last_instruction()->IsTailCall()) {
1663 RenameRecursive(block, &new_env, live_phis, variable_liveness,
1664 inlining_parameters);
1669 if ((block_entry->last_instruction()->SuccessorCount() == 1) &&
1670 block_entry->last_instruction()->SuccessorAt(0)->IsJoinEntry()) {
1671 JoinEntryInstr* successor =
1672 block_entry->last_instruction()->SuccessorAt(0)->AsJoinEntry();
1673 intptr_t pred_index = successor->IndexOfPredecessor(block_entry);
1675 if (successor->phis() !=
nullptr) {
1676 for (intptr_t
i = 0;
i < successor->phis()->
length(); ++
i) {
1677 PhiInstr* phi = (*successor->phis())[
i];
1678 if (phi !=
nullptr) {
1680 Definition* input = (*env)[
i];
1681 ASSERT(input !=
nullptr);
1682 ASSERT(!input->IsMoveArgument());
1684 phi->SetInputAt(pred_index, use);
1692void FlowGraph::ValidatePhis() {
1693 if (!FLAG_prune_dead_locals) {
1699 BlockEntryInstr* block_entry =
preorder()[
i];
1700 Instruction* last_instruction = block_entry->last_instruction();
1702 if ((last_instruction->SuccessorCount() == 1) &&
1703 last_instruction->SuccessorAt(0)->IsJoinEntry()) {
1704 JoinEntryInstr* successor =
1705 last_instruction->SuccessorAt(0)->AsJoinEntry();
1706 if (successor->phis() !=
nullptr) {
1707 for (intptr_t j = 0; j < successor->phis()->
length(); ++j) {
1708 PhiInstr* phi = (*successor->phis())[j];
1713 ASSERT(last_instruction->env() !=
nullptr);
1714 Definition* current_definition =
1715 last_instruction->env()->ValueAt(j)->definition();
1716 ASSERT(successor->env() !=
nullptr);
1717 Definition* successor_definition =
1718 successor->env()->ValueAt(j)->definition();
1719 if (!current_definition->IsConstant() &&
1720 !successor_definition->IsConstant()) {
1721 ASSERT(current_definition == successor_definition);
1731void FlowGraph::RemoveDeadPhis(GrowableArray<PhiInstr*>* live_phis) {
1736 JoinEntryInstr*
join = it.Current()->AsJoinEntry();
1737 if (
join ==
nullptr)
continue;
1738 for (PhiIterator phi_it(
join); !phi_it.Done(); phi_it.Advance()) {
1739 PhiInstr* phi = phi_it.Current();
1740 if (phi ==
nullptr || phi->is_alive() ||
1741 (phi->input_use_list() !=
nullptr) ||
1742 (phi->env_use_list() ==
nullptr)) {
1745 for (Value::Iterator it(phi->env_use_list()); !it.Done();
1747 Value* use = it.Current();
1748 if (use->instruction()->MayThrow() &&
1749 use->instruction()->GetBlock()->InsideTryBlock()) {
1750 live_phis->Add(phi);
1759 while (!live_phis->is_empty()) {
1760 PhiInstr* phi = live_phis->RemoveLast();
1761 for (intptr_t
i = 0;
i < phi->InputCount();
i++) {
1762 Value* val = phi->InputAt(
i);
1763 PhiInstr* used_phi = val->definition()->AsPhi();
1764 if ((used_phi !=
nullptr) && !used_phi->is_alive()) {
1765 used_phi->mark_alive();
1766 live_phis->Add(used_phi);
1772 JoinEntryInstr*
join = it.Current()->AsJoinEntry();
1792 if (!compile_type.
IsNone()) {
1799 if (redef->input_use_list() ==
nullptr) {
1815 block_it.Advance()) {
1818 !instr_it.
Done(); instr_it.Advance()) {
1820 if (
auto redef = instruction->AsRedefinition()) {
1821 redef->ReplaceUsesWith(redef->value()->definition());
1822 instr_it.RemoveCurrentFromGraph();
1823 }
else if (keep_checks) {
1825 }
else if (
auto def = instruction->AsDefinition()) {
1827 if (
value !=
nullptr) {
1843 loop_blocks->
Add(
m->preorder_number());
1849 for (intptr_t
i = 0;
i <
p->PredecessorCount(); ++
i) {
1850 BlockEntryInstr* q =
p->PredecessorAt(
i);
1851 if (!loop_blocks->
Contains(q->preorder_number())) {
1852 loop_blocks->
Add(q->preorder_number());
1860LoopHierarchy* FlowGraph::ComputeLoops()
const {
1863 ZoneGrowableArray<BlockEntryInstr*>* loop_headers =
1864 new (
zone()) ZoneGrowableArray<BlockEntryInstr*>();
1866 BlockEntryInstr* block = it.Current();
1869 block->set_loop_info(
nullptr);
1871 for (intptr_t
i = 0;
i < block->PredecessorCount(); ++
i) {
1872 BlockEntryInstr* pred = block->PredecessorAt(
i);
1873 if (block->Dominates(pred)) {
1877 BitVector* loop_blocks = FindLoopBlocks(pred, block);
1878 if (block->loop_info() ==
nullptr) {
1879 intptr_t
id = loop_headers->length();
1880 block->set_loop_info(
new (
zone()) LoopInfo(
id, block, loop_blocks));
1881 loop_headers->Add(block);
1883 ASSERT(block->loop_info()->header() == block);
1884 block->loop_info()->AddBlocks(loop_blocks);
1886 block->loop_info()->AddBackEdge(pred);
1893 return new (
zone()) LoopHierarchy(loop_headers, preorder_,
should_print());
1899 for (intptr_t
i = 1;
i < preorder_.length(); ++
i) {
1904 const intptr_t block_id = block->
block_id();
1905 if (prologue_info_.
Contains(block_id)) {
1919 if (from_rep == to_rep || to_rep == kNoRepresentation) {
1922 InsertConversion(from_rep, to_rep, use,
false);
1926 return (rep == kUnboxedInt32) || (rep == kUnboxedUint32) ||
1927 (rep == kUnboxedInt64);
1941 bool is_environment_use) {
1943 Instruction* insert_before;
1944 PhiInstr* phi = use->instruction()->AsPhi();
1945 if (phi !=
nullptr) {
1948 auto predecessor = phi->block()->PredecessorAt(use->use_index());
1949 insert_before = predecessor->last_instruction();
1950 ASSERT(insert_before->GetBlock() == predecessor);
1952 insert_before = use->instruction();
1955 use->instruction()->SpeculativeModeOfInput(use->use_index());
1956 Instruction* deopt_target =
nullptr;
1958 deopt_target = insert_before;
1961 Definition* converted =
nullptr;
1963 const intptr_t deopt_id = (to == kUnboxedInt32) && (deopt_target !=
nullptr)
1964 ? deopt_target->DeoptimizationTarget()
1967 new (
Z) IntConverterInstr(from, to, use->CopyWithType(), deopt_id);
1968 }
else if ((from == kUnboxedInt32) && (to == kUnboxedDouble)) {
1969 converted =
new Int32ToDoubleInstr(use->CopyWithType());
1970 }
else if ((from == kUnboxedInt64) && (to == kUnboxedDouble) &&
1972 const intptr_t deopt_id = (deopt_target !=
nullptr)
1973 ? deopt_target->DeoptimizationTarget()
1975 converted =
new Int64ToDoubleInstr(use->CopyWithType(), deopt_id);
1977 const intptr_t deopt_id = (deopt_target !=
nullptr)
1978 ? deopt_target->DeoptimizationTarget()
1984 }
else if ((to == kPairOfTagged) && (from == kTagged)) {
1987 ASSERT(use->instruction()->IsDartReturn());
1988 Definition*
x =
new (
Z)
1989 LoadFieldInstr(use->CopyWithType(),
1992 InstructionSource());
1994 Definition*
y =
new (
Z)
1995 LoadFieldInstr(use->CopyWithType(),
1998 InstructionSource());
2000 converted =
new (
Z) MakePairInstr(
new (
Z)
Value(
x),
new (
Z)
Value(
y));
2001 }
else if ((to == kTagged) && (from == kPairOfTagged)) {
2009 FATAL(
"Illegal conversion %s->%s for the use of %s at %s\n",
2012 use->definition()->ToCString(), use->instruction()->ToCString());
2014 FATAL(
"Illegal conversion %s->%s for a use of v%" Pd "\n",
2017 use->definition()->ssa_temp_index());
2026 StopInstr* stop =
new (
Z) StopInstr(
"Incompatible conversion.");
2029 const intptr_t deopt_id = (deopt_target !=
nullptr)
2030 ? deopt_target->DeoptimizationTarget()
2038 ASSERT(converted !=
nullptr);
2040 (deopt_target !=
nullptr) ? deopt_target->env() :
nullptr,
2042 if (is_environment_use) {
2043 use->BindToEnvironment(converted);
2045 use->BindTo(converted);
2052 Value* use = it.Current();
2061void FlowGraph::InsertRecordBoxing(Definition* def) {
2064 const Function*
target =
nullptr;
2065 if (
auto*
call = def->AsStaticCall()) {
2067 }
else if (
auto*
call = def->AsInstanceCallBase()) {
2069 }
else if (
auto*
call = def->AsDispatchTableCall()) {
2076 kernel::UnboxingInfoMetadata* unboxing_metadata =
2078 ASSERT(unboxing_metadata !=
nullptr);
2079 const RecordShape shape = unboxing_metadata->return_info.record_shape;
2080 ASSERT(shape.num_fields() == 2);
2086 auto* alloc =
new (
Z)
2087 AllocateSmallRecordInstr(InstructionSource(), shape,
new (
Z)
Value(
x),
2088 new (
Z)
Value(
y),
nullptr, def->deopt_id());
2089 def->ReplaceUsesWith(alloc);
2092 ASSERT(
x->value()->definition() == def);
2093 ASSERT(
y->value()->definition() == def);
2094 Instruction* insert_before = def->next();
2095 ASSERT(insert_before !=
nullptr);
2101void FlowGraph::InsertConversionsFor(Definition* def) {
2106 if (from_rep == kPairOfTagged) {
2108 InsertRecordBoxing(def);
2113 for (Value::Iterator it(def->input_use_list()); !it.Done(); it.Advance()) {
2114 ConvertUse(it.Current(), from_rep);
2119class PhiUnboxingHeuristic :
public ValueObject {
2121 explicit PhiUnboxingHeuristic(FlowGraph* flow_graph)
2122 : worklist_(flow_graph, 10) {}
2124 void Process(PhiInstr* phi) {
2125 auto new_representation = phi->representation();
2126 switch (phi->Type()->ToCid()) {
2128 new_representation = DetermineIfAnyIncomingUnboxedFloats(phi)
2132 if (new_representation == kUnboxedFloat) {
2133 for (
auto input : phi->inputs()) {
2134 ASSERT(input->representation() != kUnboxedDouble);
2141 new_representation = kUnboxedFloat32x4;
2146 new_representation = kUnboxedInt32x4;
2151 new_representation = kUnboxedFloat64x2;
2156 if (new_representation == kTagged && phi->Type()->IsInt()) {
2160 for (
auto input : phi->inputs()) {
2161 if (input == phi)
continue;
2164 new_representation = kTagged;
2168 if (new_representation == kTagged) {
2169 new_representation = input->representation();
2170 }
else if (new_representation != input->representation()) {
2175 new_representation = kNoRepresentation;
2179 if (new_representation == kNoRepresentation) {
2183 new_representation =
2190 if (new_representation == kTagged && !phi->Type()->can_be_sentinel()) {
2191#if defined(TARGET_ARCH_IS_64_BIT)
2195 if (is_aot_ || phi->Type()->ToCid() != kSmiCid) {
2196 new_representation = kUnboxedInt64;
2204 const bool has_unboxed_incoming_value = HasUnboxedIncomingValue(phi);
2205 const bool flows_into_unboxed_use = FlowsIntoUnboxedUse(phi);
2207 if (has_unboxed_incoming_value && flows_into_unboxed_use) {
2208 new_representation =
2223 for (
auto input : phi->inputs()) {
2224 if (input != phi && input->representation() == kUntagged) {
2225 new_representation = kUntagged;
2230 phi->set_representation(new_representation);
2237 bool DetermineIfAnyIncomingUnboxedFloats(PhiInstr* phi) {
2240 for (intptr_t
i = 0;
i < worklist_.definitions().
length();
i++) {
2241 const auto defn = worklist_.definitions()[
i];
2242 for (
auto input : defn->inputs()) {
2243 if (input->representation() == kUnboxedFloat) {
2246 if (input->IsPhi()) {
2247 worklist_.Add(input);
2257 bool HasUnboxedIncomingValue(PhiInstr* phi) {
2260 for (intptr_t
i = 0;
i < worklist_.definitions().
length();
i++) {
2261 const auto defn = worklist_.definitions()[
i];
2262 for (
auto input : defn->inputs()) {
2265 }
else if (input->IsPhi()) {
2266 worklist_.Add(input);
2275 bool FlowsIntoUnboxedUse(PhiInstr* phi) {
2278 for (intptr_t
i = 0;
i < worklist_.definitions().
length();
i++) {
2279 const auto defn = worklist_.definitions()[
i];
2280 for (
auto use : defn->input_uses()) {
2282 use->use_index())) ||
2283 use->instruction()->IsUnbox()) {
2285 }
else if (
auto phi_use = use->instruction()->AsPhi()) {
2286 worklist_.Add(phi_use);
2294 DefinitionWorklist worklist_;
2301 PhiUnboxingHeuristic phi_unboxing_heuristic(
this);
2303 block_it.Advance()) {
2305 if (join_entry !=
nullptr) {
2308 phi_unboxing_heuristic.Process(phi);
2317 InsertConversionsFor((*
graph_entry()->initial_definitions())[
i]);
2321 if (
auto entry = successor->AsBlockEntryWithInitialDefs()) {
2322 auto& initial_definitions = *entry->initial_definitions();
2323 for (intptr_t j = 0; j < initial_definitions.length(); j++) {
2324 InsertConversionsFor(initial_definitions[j]);
2332 block_it.Advance()) {
2339 InsertConversionsFor(phi);
2344 if (def !=
nullptr) {
2345 InsertConversionsFor(def);
2357 block_it.Advance()) {
2359 if (!block->IsCatchBlockEntry()) {
2382void FlowGraph::ExtractUntaggedPayload(
Instruction* instr,
2386 auto*
const untag_payload =
new (
Z)
2389 array->
BindTo(untag_payload);
2408 const Slot* slot =
nullptr;
2410 slot = &Slot::PointerBase_data();
2415 ExtractUntaggedPayload(instr, array, *slot,
2420void FlowGraph::ExtractNonInternalTypedDataPayload(
Instruction* instr,
2427 auto const type_cid = array->
Type()->
ToCid();
2439 ExtractUntaggedPayload(instr, array, Slot::PointerBase_data(),
2446 block_it.Advance()) {
2450 if (
auto*
const load_indexed = current->AsLoadIndexed()) {
2451 ExtractNonInternalTypedDataPayload(load_indexed, load_indexed->array(),
2452 load_indexed->class_id());
2453 }
else if (
auto*
const store_indexed = current->AsStoreIndexed()) {
2454 ExtractNonInternalTypedDataPayload(
2455 store_indexed, store_indexed->array(), store_indexed->class_id());
2456 }
else if (
auto*
const memory_copy = current->AsMemoryCopy()) {
2457 ExtractNonInternalTypedDataPayload(memory_copy, memory_copy->src(),
2458 memory_copy->src_cid());
2459 ExtractNonInternalTypedDataPayload(memory_copy, memory_copy->dest(),
2460 memory_copy->dest_cid());
2467 bool changed =
false;
2470 block_it.Advance()) {
2472 if (
auto join = block->AsJoinEntry()) {
2483 ASSERT(replacement !=
nullptr);
2486 if (replacement != current) {
2488 it.RemoveCurrentFromGraph();
2504 if (replacement != current) {
2507 if (replacement !=
nullptr) {
2508 ASSERT(current->IsDefinition());
2512 current->AsDefinition()->HasUses()) {
2532 block_it.Advance()) {
2536 if (instr->IsInstanceCall()) {
2538 if (!
call->HasICData()) {
2539 const Array& arguments_descriptor =
2543 ICData::New(
function,
call->function_name(), arguments_descriptor,
2544 call->deopt_id(),
call->checked_argument_count(),
2545 ICData::kInstance));
2546 call->set_ic_data(&ic_data);
2548 }
else if (instr->IsStaticCall()) {
2550 if (!
call->HasICData()) {
2551 const Array& arguments_descriptor =
2554 int num_args_checked =
2559 call->deopt_id(), num_args_checked, ICData::kStatic));
2560 call->set_ic_data(&ic_data);
2571 if (!FLAG_truncating_left_shift)
return;
2575 block_it.Advance()) {
2577 div_mod_merge.
Clear();
2578 sin_cos_merge.
Clear();
2581 if (it.
Current()->IsBinarySmiOp()) {
2583 if (binop->
op_kind() == Token::kBIT_AND) {
2584 OptimizeLeftShiftBitAndSmiOp(&it, binop, binop->
left()->
definition(),
2586 }
else if ((binop->
op_kind() == Token::kTRUNCDIV) ||
2587 (binop->
op_kind() == Token::kMOD)) {
2589 div_mod_merge.
Add(binop);
2592 }
else if (it.
Current()->IsBinaryInt64Op()) {
2594 if (mintop->
op_kind() == Token::kBIT_AND) {
2595 OptimizeLeftShiftBitAndSmiOp(&it, mintop,
2599 }
else if (it.
Current()->IsInvokeMathCFunction()) {
2601 it.
Current()->AsInvokeMathCFunction();
2605 sin_cos_merge.
Add(math_unary);
2610 TryMergeTruncDivMod(&div_mod_merge);
2622 if (phi !=
nullptr) {
2627 if (use_block == dom_block) {
2629 if (dom_block ==
dom)
return true;
2632 curr = curr->next()) {
2633 if (curr == instr)
return true;
2646 Value* use = it.Current();
2655 block_it.Advance()) {
2657 !instr_it.
Done(); instr_it.Advance()) {
2661 if (definition !=
nullptr && !definition->IsCheckArrayBound()) {
2663 if (redefined !=
nullptr) {
2677 if ((const_instr !=
nullptr) && (const_instr->
value().IsSmi())) {
2678 return Smi::Cast(const_instr->
value()).Value() >= 0;
2685 if ((instr !=
nullptr) && (instr->
op_kind() == Token::kSHL)) {
2691void FlowGraph::OptimizeLeftShiftBitAndSmiOp(
2692 ForwardInstructionIterator* current_iterator,
2693 Definition* bit_and_instr,
2694 Definition* left_instr,
2695 Definition* right_instr) {
2696 ASSERT(bit_and_instr !=
nullptr);
2697 ASSERT((left_instr !=
nullptr) && (right_instr !=
nullptr));
2701 if (!is_positive_or_zero) {
2704 if (!is_positive_or_zero)
return;
2706 BinarySmiOpInstr* smi_shift_left =
nullptr;
2707 if (bit_and_instr->InputAt(0)->IsSingleUse()) {
2710 if ((smi_shift_left ==
nullptr) &&
2711 (bit_and_instr->InputAt(1)->IsSingleUse())) {
2714 if (smi_shift_left ==
nullptr)
return;
2717 smi_shift_left->mark_truncating();
2718 ASSERT(bit_and_instr->IsBinarySmiOp() || bit_and_instr->IsBinaryInt64Op());
2719 if (bit_and_instr->IsBinaryInt64Op()) {
2721 BinarySmiOpInstr* smi_op =
new (
Z) BinarySmiOpInstr(
2722 Token::kBIT_AND,
new (
Z)
Value(left_instr),
new (
Z)
Value(right_instr),
2724 bit_and_instr->ReplaceWith(smi_op, current_iterator);
2745void FlowGraph::TryMergeTruncDivMod(
2746 GrowableArray<BinarySmiOpInstr*>* merge_candidates) {
2747 if (merge_candidates->length() < 2) {
2751 for (intptr_t
i = 0;
i < merge_candidates->length();
i++) {
2752 BinarySmiOpInstr* curr_instr = (*merge_candidates)[
i];
2753 if (curr_instr ==
nullptr) {
2757 ASSERT((curr_instr->op_kind() == Token::kTRUNCDIV) ||
2758 (curr_instr->op_kind() == Token::kMOD));
2760 const Token::Kind other_kind = (curr_instr->op_kind() == Token::kTRUNCDIV)
2763 Definition* left_def = curr_instr->left()->definition();
2764 Definition* right_def = curr_instr->right()->definition();
2765 for (intptr_t k =
i + 1; k < merge_candidates->length(); k++) {
2766 BinarySmiOpInstr* other_binop = (*merge_candidates)[k];
2768 if ((other_binop !=
nullptr) && (other_binop->op_kind() == other_kind) &&
2769 (other_binop->left()->definition() == left_def) &&
2770 (other_binop->right()->definition() == right_def)) {
2771 (*merge_candidates)[k] =
nullptr;
2772 ASSERT(curr_instr->HasUses());
2773 AppendExtractNthOutputForMerged(
2776 ASSERT(other_binop->HasUses());
2777 AppendExtractNthOutputForMerged(
2783 TruncDivModInstr* div_mod =
new (
Z) TruncDivModInstr(
2784 curr_instr->left()->CopyWithType(),
2785 curr_instr->right()->CopyWithType(), curr_instr->deopt_id());
2786 curr_instr->ReplaceWith(div_mod,
nullptr);
2787 other_binop->ReplaceUsesWith(div_mod);
2788 other_binop->RemoveFromGraph();
2798void FlowGraph::AppendExtractNthOutputForMerged(Definition* instr,
2802 ExtractNthOutputInstr*
extract =
2803 new (
Z) ExtractNthOutputInstr(
new (
Z)
Value(instr), index, rep,
cid);
2804 instr->ReplaceUsesWith(
extract);
2816 target->InheritDeoptTarget(graph->
zone(), inherit);
2824 join->InheritDeoptTarget(graph->
zone(), inherit);
2924 return NewDiamond(mid_point, inherit, circuit, b_true, b_false);
2941 join->InsertPhi(phi);
2953 block_it.Advance()) {
2956 !instr_it.
Done(); instr_it.Advance()) {
2959 if (arg_count == 0) {
2964 if (
auto static_call = instruction->AsStaticCall()) {
2965 target = static_call->function().ptr();
2966 }
else if (
auto instance_call = instruction->AsInstanceCallBase()) {
2967 target = instance_call->interface_target().ptr();
2968 }
else if (
auto dispatch_call = instruction->AsDispatchTableCall()) {
2969 target = dispatch_call->interface_target().ptr();
2970 }
else if (
auto cachable_call = instruction->AsCachableIdempotentCall()) {
2971 target = cachable_call->function().ptr();
2978 const intptr_t stack_arguments_size_in_words =
2984 true, &argument_locations);
2986 for (intptr_t
i = 0;
i < arg_count; ++
i) {
2987 const auto& [location, rep] = argument_locations[
i];
2993 stack_arguments_size_in_words);
2995 for (
auto move_arg : *arguments) {
2999 if (!move_arg->is_register_move()) {
3004 if (instruction->
env() !=
nullptr) {
3019 intptr_t num_ssa_vars,
3021 : block_num_(num_blocks),
3022 ssa_num_(num_ssa_vars),
3023 detached_defs_(detached_defs) {
3030 block_num_[block->block_id()] = 1;
3031 CollectDetachedMaterializations(block->env());
3033 if (
auto* block_with_idefs = block->AsBlockEntryWithInitialDefs()) {
3034 for (
Definition* def : *block_with_idefs->initial_definitions()) {
3035 RenumberDefinition(def);
3036 CollectDetachedMaterializations(def->env());
3039 if (
auto*
join = block->AsJoinEntry()) {
3041 RenumberDefinition(it.Current());
3046 if (
Definition* def = instr->AsDefinition()) {
3047 RenumberDefinition(def);
3049 CollectDetachedMaterializations(instr->
env());
3052 for (
auto* def : (*detached_defs_)) {
3053 RenumberDefinition(def);
3059 intptr_t current_block_index = 0;
3060 for (intptr_t
i = 0, n = block_num_.
length();
i < n; ++
i) {
3061 if (block_num_[
i] >= 0) {
3062 block_num_[
i] = current_block_index++;
3066 block->set_block_id(block_num_[block->block_id()]);
3075 intptr_t new_index = ssa_num_[old_index];
3076 if (new_index < 0) {
3077 ssa_num_[old_index] = new_index = current_ssa_index_++;
3083 bool IsDetachedDefinition(Definition* def) {
3084 return def->IsMaterializeObject() && (def->next() ==
nullptr);
3087 void AddDetachedDefinition(Definition* def) {
3088 for (intptr_t
i = 0, n = detached_defs_->length();
i < n; ++
i) {
3089 if ((*detached_defs_)[
i] == def) {
3093 detached_defs_->Add(def);
3096 for (intptr_t
i = 0, n = def->InputCount();
i < n; ++
i) {
3097 Definition* input = def->InputAt(
i)->definition();
3098 if (IsDetachedDefinition(input)) {
3099 AddDetachedDefinition(input);
3102 ASSERT(def->env() ==
nullptr);
3105 void CollectDetachedMaterializations(Environment*
env) {
3106 if (
env ==
nullptr) {
3109 for (Environment::DeepIterator it(
env); !it.Done(); it.Advance()) {
3110 Definition* def = it.CurrentValue()->definition();
3111 if (IsDetachedDefinition(def)) {
3112 AddDetachedDefinition(def);
3117 GrowableArray<intptr_t> block_num_;
3118 GrowableArray<intptr_t> ssa_num_;
3119 intptr_t current_ssa_index_ = 0;
3120 ZoneGrowableArray<Definition*>* detached_defs_;
3124 if (detached_defs ==
nullptr) {
T extract(SkSpan< const uint8_t > &data)
static void is_empty(skiatest::Reporter *reporter, const SkPath &p)
static float prev(float f)
static int block_count(const SkSBlockAllocator< N > &pool)
#define ASSERT_EQUAL(expected, actual)
#define RELEASE_ASSERT(cond)
virtual bool HasTypeClass() const
virtual bool IsInstantiated(Genericity genericity=kAny, intptr_t num_free_fun_type_params=kAllFree) const
virtual ClassPtr type_class() const
bool IsDynamicType() const
void EnsureLength(intptr_t new_length, const T &default_value)
Token::Kind op_kind() const
bool KillAndAdd(BitVector *kill, BitVector *gen)
bool Contains(intptr_t i) const
bool AddAll(const BitVector *from)
void Intersect(const BitVector *other)
BlockEntryInstr * dominator() const
void ClearDominatedBlocks()
intptr_t try_index() const
intptr_t postorder_number() const
void AddDominatedBlock(BlockEntryInstr *block)
void set_block_id(intptr_t block_id)
intptr_t preorder_number() const
bool InsideTryBlock() const
intptr_t block_id() const
virtual intptr_t PredecessorCount() const =0
const GrowableArray< BlockEntryInstr * > & dominated_blocks()
bool Dominates(BlockEntryInstr *other) const
void set_postorder_number(intptr_t number)
virtual BlockEntryInstr * PredecessorAt(intptr_t index) const =0
void set_last_instruction(Instruction *instr)
bool DiscoverBlock(BlockEntryInstr *predecessor, GrowableArray< BlockEntryInstr * > *preorder, GrowableArray< intptr_t > *parent)
Instruction * last_instruction() const
GrowableArray< Definition * > * initial_definitions()
BlockTraversalState(BlockEntryInstr *block)
BlockEntryInstr * NextSuccessor()
BlockEntryInstr * block() const
bool HasNextSuccessor() const
static const Bool & False()
static const Bool & True()
static BoxInstr * Create(Representation from, Value *value)
TargetEntryInstr ** false_successor_address()
TargetEntryInstr ** true_successor_address()
bool HasOverride(const Class &cls, const String &function_name, intptr_t *subclass_count)
void AddToGuardedClassesForSubclassCount(const Class &cls, intptr_t subclass_count)
intptr_t MonomorphicReceiverCid() const
bool IsMonomorphic() const
ClassPtr At(intptr_t cid) const
bool has_dynamically_extendable_subtypes() const
ErrorPtr EnsureIsFinalized(Thread *thread) const
bool is_implemented() const
bool IsEqualTo(CompileType *other)
static CompilerState & Current()
const Object & value() const
Value * env_use_list() const
Value * input_use_list() const
virtual Value * RedefinedValue() const
void ReplaceUsesWith(Definition *other)
virtual Definition * AsDefinition()
void AddInputUse(Value *value)
Definition * OriginalDefinition()
void set_ssa_temp_index(intptr_t index)
intptr_t ssa_temp_index() const
static constexpr intptr_t kNone
static DoublePtr NewCanonical(double d)
void MarkAsLazyDeoptToBeforeDeoptId()
static Environment * From(Zone *zone, const GrowableArray< Definition * > &definitions, intptr_t fixed_parameter_count, intptr_t lazy_deopt_pruning_count, const ParsedFunction &parsed_function)
static StringPtr NameFromGetter(const String &getter_name)
static bool SupportsUnboxedSimd128()
static bool CanConvertInt64ToDouble()
static void PrintGraph(const char *phase, FlowGraph *flow_graph)
static bool ShouldPrint(const Function &function, uint8_t **compiler_pass_filter=nullptr)
const GrowableArray< BlockEntryInstr * > & reverse_postorder() const
GraphEntryInstr * graph_entry() const
ConstantInstr * GetConstant(const Object &object, Representation representation=kTagged)
IsolateGroup * isolate_group() const
bool should_print() const
bool VerifyRedefinitions()
void EnsureSSATempIndex(Definition *defn, Definition *replacement)
Instruction * AppendTo(Instruction *prev, Instruction *instr, Environment *env, UseKind use_kind)
void CompactSSA(ZoneGrowableArray< Definition * > *detached_defs=nullptr)
intptr_t max_argument_slot_count() const
bool IsCompiledForOsr() const
ConstantInstr * constant_dead() const
ConstantInstr * GetExistingConstant(const Object &object, Representation representation=kTagged) const
intptr_t current_ssa_temp_index() const
bool IsReceiver(Definition *def) const
intptr_t ArgumentDescriptorEnvIndex() const
static Representation ReturnRepresentationOf(const Function &function)
void ReplaceCurrentInstruction(ForwardInstructionIterator *iterator, Instruction *current, Instruction *replacement)
JoinEntryInstr * NewDiamond(Instruction *instruction, Instruction *inherit, ComparisonInstr *compare, TargetEntryInstr **block_true, TargetEntryInstr **block_false)
Instruction * AppendSpeculativeTo(Instruction *prev, Instruction *instr, Environment *env, UseKind use_kind)
intptr_t InstructionCount() const
ToCheck CheckForInstanceCall(InstanceCallInstr *call, UntaggedFunction::Kind kind) const
void RemoveRedefinitions(bool keep_checks=false)
const GrowableArray< BlockEntryInstr * > & preorder() const
void set_current_ssa_temp_index(intptr_t index)
void CreateCommonConstants()
void AddExactnessGuard(InstanceCallInstr *call, intptr_t receiver_cid)
const std::pair< Location, Representation > & GetDirectParameterInfoAt(intptr_t i)
void InsertSpeculativeAfter(Instruction *prev, Instruction *instr, Environment *env, UseKind use_kind)
void set_max_argument_slot_count(intptr_t count)
BlockIterator postorder_iterator() const
Definition * CreateCheckBound(Definition *length, Definition *index, intptr_t deopt_id)
intptr_t max_block_id() const
static intptr_t ComputeArgumentsSizeInWords(const Function &function, intptr_t arguments_count)
void InsertMoveArguments()
void Print(const char *phase="unknown")
const GrowableArray< BlockEntryInstr * > & postorder() const
intptr_t num_stack_locals() const
void ComputeDominators(GrowableArray< BitVector * > *dominance_frontier)
static Representation ParameterRepresentationAt(const Function &function, intptr_t index)
void EliminateEnvironments()
static intptr_t ComputeLocationsOfFixedParameters(Zone *zone, const Function &function, bool should_assign_stack_locations=false, compiler::ParameterInfoArray *parameter_info=nullptr)
void set_max_block_id(intptr_t id)
ConstantInstr * constant_null() const
Instruction * CreateCheckClass(Definition *to_check, const Cids &cids, intptr_t deopt_id, const InstructionSource &source)
const Function & function() const
const ParsedFunction & parsed_function() const
void ResetLoopHierarchy()
intptr_t num_direct_parameters() const
void RenameUsesDominatedByRedefinitions()
FlowGraph(const ParsedFunction &parsed_function, GraphEntryInstr *graph_entry, intptr_t max_block_id, PrologueInfo prologue_info, CompilationMode compilation_mode)
bool unmatched_representations_allowed() const
Definition * TryCreateConstantReplacementFor(Definition *op, const Object &value)
intptr_t osr_variable_count() const
void PopulateWithICData(const Function &function)
RedefinitionInstr * EnsureRedefinition(Instruction *prev, Definition *original, CompileType compile_type)
void TryOptimizePatterns()
PhiInstr * AddPhi(JoinEntryInstr *join, Definition *d1, Definition *d2)
void AddToGraphInitialDefinitions(Definition *defn)
bool ExtractExternalUntaggedPayload(Instruction *instr, Value *array, classid_t cid)
void AllocateSSAIndex(Definition *def)
static bool IsConstantRepresentable(const Object &value, Representation target_rep, bool tagged_value_must_be_smi)
BlockIterator reverse_postorder_iterator() const
void ComputeSSA(ZoneGrowableArray< Definition * > *inlining_parameters)
LocalVariable * CurrentContextVar() const
intptr_t RawTypeArgumentEnvIndex() const
void InsertBefore(Instruction *next, Instruction *instr, Environment *env, UseKind use_kind)
static void RenameDominatedUses(Definition *def, Instruction *dom, Definition *other)
void SelectRepresentations()
intptr_t variable_count() const
void AddToInitialDefinitions(BlockEntryWithInitialDefs *entry, Definition *defn)
void ExtractNonInternalTypedDataPayloads()
GrowableArray< BlockEntryInstr * > * CodegenBlockOrder()
intptr_t allocate_block_id()
void InsertAfter(Instruction *prev, Instruction *instr, Environment *env, UseKind use_kind)
bool should_reorder_blocks() const
bool IsImmortalVariable(intptr_t env_index) const
intptr_t EnvIndex(const LocalVariable *variable) const
Instruction * Current() const
void RemoveCurrentFromGraph()
bool is_unboxed_integer_parameter_at(intptr_t index) const
bool has_unboxed_return() const
bool has_unboxed_double_return() const
bool has_unboxed_integer_return() const
bool MakesCopyOfParameters() const
bool has_unboxed_record_return() const
bool is_unboxed_parameter_at(intptr_t index) const
bool is_unboxed_double_parameter_at(intptr_t index) const
intptr_t num_fixed_parameters() const
intptr_t NumParameters() const
intptr_t fixed_slot_count() const
virtual BlockEntryInstr * SuccessorAt(intptr_t index) const
void set_fixed_slot_count(intptr_t count)
virtual intptr_t SuccessorCount() const
static ICDataPtr NewForStaticCall(const Function &owner, const Function &target, const Array &arguments_descriptor, intptr_t deopt_id, intptr_t num_args_tested, RebindRule rebind_rule)
Instruction * next() const
void set_previous(Instruction *instr)
void LinkTo(Instruction *next)
virtual bool MayThrow() const =0
void InheritDeoptTarget(Zone *zone, Instruction *other)
virtual bool ComputeCanDeoptimizeAfterCall() const
virtual BlockEntryInstr * SuccessorAt(intptr_t index) const
virtual BlockEntryInstr * GetBlock()
virtual bool ComputeCanDeoptimize() const =0
Environment * env() const
bool HasUnmatchedInputRepresentations() const
const char * ToCString() const
Instruction * AppendInstruction(Instruction *tail)
virtual Representation RequiredInputRepresentation(intptr_t idx) const
virtual void ReplaceInputsWithMoveArguments(MoveArgumentsArray *move_arguments)
virtual intptr_t ArgumentCount() const
bool IsDominatedBy(Instruction *dom)
virtual Instruction * Canonicalize(FlowGraph *flow_graph)
virtual Representation representation() const
void RepairArgumentUsesInEnvironment() const
void SetInputAt(intptr_t i, Value *value)
InstructionSource source() const
Value * ArgumentValueAt(intptr_t index) const
void InsertAfter(Instruction *prev)
virtual intptr_t SuccessorCount() const
Instruction * RemoveFromGraph(bool return_previous=true)
bool HasMoveArguments() const
SpeculativeMode SpeculativeModeOfInputs() const
virtual const char * DebugName() const =0
Instruction * previous() const
MethodRecognizer::Kind recognized_kind() const
ClassTable * class_table() const
virtual BlockEntryInstr * PredecessorAt(intptr_t index) const
virtual intptr_t PredecessorCount() const
static bool FindPragma(Thread *T, bool only_core, const Object &object, const String &pragma_name, bool multiple=false, Object *options=nullptr)
const intptr_t variable_count_
virtual void ComputeInitialSets()=0
GrowableArray< BitVector * > kill_
void ComputeLiveInAndLiveOutSets()
bool UpdateLiveOut(const BlockEntryInstr &instr)
BitVector * GetKillSet(BlockEntryInstr *block) const
LivenessAnalysis(intptr_t variable_count, const GrowableArray< BlockEntryInstr * > &postorder)
GrowableArray< BitVector * > live_out_
BitVector * GetLiveOutSet(BlockEntryInstr *block) const
GrowableArray< BitVector * > live_in_
const GrowableArray< BlockEntryInstr * > & postorder_
bool UpdateLiveIn(const BlockEntryInstr &instr)
static Location StackSlot(intptr_t stack_index, Register base)
static Location RegisterLocation(Register reg)
static intptr_t NumArgsCheckedForStaticCall(const Function &function)
virtual const char * ToCString() const
static Object & ZoneHandle()
static constexpr intptr_t kNotFunctionParameter
const Function & function() const
bool has_arg_desc_var() const
LocalVariable * RawParameterVariable(intptr_t i) const
JoinEntryInstr * block() const
virtual Definition * Canonicalize(FlowGraph *flow_graph)
void set_is_receiver(ReceiverType is_receiver)
ReceiverType is_receiver() const
static bool Fits(Range *range, RangeBoundary::RangeSize size)
void set_constrained_type(CompileType *type)
CompileType * constrained_type() const
static FunctionPtr ResolveDynamicAnyArgs(Zone *zone, const Class &receiver_class, const String &function_name, bool allow_add)
SSACompactor(intptr_t num_blocks, intptr_t num_ssa_vars, ZoneGrowableArray< Definition * > *detached_defs)
void RenumberGraph(FlowGraph *graph)
static const Slot & GetRecordFieldSlot(Thread *thread, intptr_t offset_in_bytes)
static const Slot & GetTypeArgumentsSlotFor(Thread *thread, const Class &cls)
static const char * ToCString(Thread *thread, StringPtr ptr)
static Thread * Current()
CompilerState & compiler_state()
static intptr_t OutputIndexOf(Token::Kind token)
static UnboxInstr * Create(Representation to, Value *value, intptr_t deopt_id, SpeculativeMode speculative_mode=kGuardInputs)
static bool IsInt(intptr_t N, T value)
static constexpr T Maximum(T x, T y)
static T Minimum(T x, T y)
static bool IsUint(intptr_t N, T value)
intptr_t use_index() const
Instruction * instruction() const
Value * CopyWithType(Zone *zone)
Definition * definition() const
void BindTo(Definition *definition)
const GrowableArray< BitVector * > & ComputeAssignedVars()
bool IsStoreAlive(BlockEntryInstr *block, StoreLocalInstr *store)
virtual void ComputeInitialSets()
VariableLivenessAnalysis(FlowGraph *flow_graph)
bool IsLastLoad(BlockEntryInstr *block, LoadLocalInstr *load)
intptr_t InputCount() const
Value * InputAt(intptr_t i) const
static word field_offset(intptr_t index)
#define COMPILER_TIMINGS_TIMER_SCOPE(thread, timer_id)
#define THR_Print(format,...)
VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE auto & d
G_BEGIN_DECLS G_MODULE_EXPORT FlValue * args
constexpr bool FLAG_support_il_printer
Dart_NativeFunction function
bool Contains(const Container &container, const Value &value)
intptr_t ComputeCallingConvention(Zone *zone, const Function &target, intptr_t argc, std::function< Representation(intptr_t)> argument_rep, bool should_assign_stack_locations, ParameterInfoArray *parameter_info)
static UnboxingInfoMetadata * UnboxingInfoMetadataOf(Zone *zone, const KernelProgramInfo &kernel_program_info, const TypedDataView &kernel_data, intptr_t kernel_data_program_offset, intptr_t kernel_offset)
static JoinEntryInstr * NewJoin(FlowGraph *graph, Instruction *inherit)
static bool CanConvertInt64ToDouble()
bool IsTypedDataViewClassId(intptr_t index)
bool IsTypedDataClassId(intptr_t index)
static GotoInstr * NewGoto(FlowGraph *graph, JoinEntryInstr *target, Instruction *inherit)
static bool ShouldInlineSimd()
bool IsTypedDataBaseClassId(intptr_t index)
static Location EnvIndexToStackLocation(intptr_t num_direct_parameters, intptr_t env_index)
static BranchInstr * NewBranch(FlowGraph *graph, ComparisonInstr *cmp, Instruction *inherit)
static bool IsPositiveOrZeroSmiConst(Definition *d)
Location LocationExceptionLocation()
static bool ShouldReorderBlocks(const Function &function, FlowGraph::CompilationMode mode)
const Register ARGS_DESC_REG
static bool NeedsRecordBoxing(Definition *def)
static bool IsDominatedUse(Instruction *dom, Value *use)
static void PrintBitVector(const char *tag, BitVector *v)
bool IsExternalPayloadClassId(classid_t cid)
static TargetEntryInstr * NewTarget(FlowGraph *graph, Instruction *inherit)
DEFINE_FLAG(bool, print_cluster_information, false, "Print information about clusters written to snapshot")
ZoneGrowableArray< MoveArgumentInstr * > MoveArgumentsArray
Location LocationStackTraceLocation()
static int8_t data[kExtLength]
static BinarySmiOpInstr * AsSmiShiftLeftInstruction(Definition *d)
static bool IsMarkedWithNoBoundsChecks(const Function &function)
bool IsExternalTypedDataClassId(intptr_t index)
static bool IsUnboxedInteger(Representation rep)
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive mode
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive keep the shell running after the Dart script has completed enable serial On low power devices with low core running concurrent GC tasks on threads can cause them to contend with the UI thread which could potentially lead to jank This option turns off all concurrent GC activities domain network JSON encoded network policy per domain This overrides the DisallowInsecureConnections switch Embedder can specify whether to allow or disallow insecure connections at a domain level old gen heap size
int compare(const void *untyped_lhs, const void *untyped_rhs)
static SkString join(const CommandLineFlags::StringArray &)
static bool Supports(Representation rep)
bool Contains(intptr_t block_id) const
static const char * ToCString(Representation rep)