27#if defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_IA32)
29DEFINE_FLAG(
bool, use_smi_widening,
false,
"Enable Smi->Int32 widening pass.");
30DEFINE_FLAG(
bool, trace_smi_widening,
false,
"Trace Smi->Int32 widening pass.");
32DEFINE_FLAG(
bool, prune_dead_locals,
true,
"optimize dead locals away");
40 FLAG_reorder_basic_blocks && !
function.IsFfiCallbackTrampoline();
47 Symbols::vm_unsafe_no_bounds_checks(),
53 intptr_t max_block_id,
56 : thread_(
Thread::Current()),
58 current_ssa_temp_index_(0),
59 max_block_id_(max_block_id),
60 parsed_function_(parsed_function),
61 num_direct_parameters_(parsed_function.
function().MakesCopyOfParameters()
63 : parsed_function.
function().NumParameters()),
64 direct_parameter_locations_(
65 parsed_function.
function().num_fixed_parameters()),
66 graph_entry_(graph_entry),
70 optimized_block_order_(),
71 constant_null_(nullptr),
72 constant_dead_(nullptr),
74 should_reorder_blocks_(
76 prologue_info_(prologue_info),
77 loop_hierarchy_(nullptr),
78 loop_invariant_loads_(nullptr),
79 captured_parameters_(new(zone())
BitVector(zone(), variable_count())),
82 should_remove_all_bounds_checks_(
86 &compiler_pass_filters_);
91 &direct_parameter_locations_);
106 const intptr_t fixed_parameters_size_in_bytes =
110 return fixed_parameters_size_in_bytes +
121 return kUnboxedInt64;
123 return kUnboxedDouble;
135 return kUnboxedInt64;
137 return kUnboxedDouble;
139 return kPairOfTagged;
149 Definition* current_defn = current->AsDefinition();
150 if ((replacement !=
nullptr) && (current_defn !=
nullptr)) {
151 Definition* replacement_defn = replacement->AsDefinition();
152 ASSERT(replacement_defn !=
nullptr);
162 if (current_defn ==
nullptr) {
177 : &reverse_postorder_;
182 : &reverse_postorder_;
188 return constant_instr_pool_.LookupValue(
195 if (constant ==
nullptr) {
198 if (representation == kTagged) {
205 constant_instr_pool_.Insert(constant);
212 bool tagged_value_must_be_smi) {
213 switch (target_rep) {
215 return !tagged_value_must_be_smi ||
value.IsSmi();
218 if (
value.IsInteger()) {
224 if (
value.IsInteger()) {
230 return value.IsInteger();
246 value, representation,
251 if (((representation == kUnboxedFloat) ||
252 (representation == kUnboxedDouble)) &&
256 Integer::Cast(
value).AsDoubleValue())),
272 if (
auto par = defn->AsParameter()) {
273 par->set_block(entry);
283 ASSERT(instr->IsDefinition());
288 if (
env !=
nullptr) {
289 env->DeepCopyTo(
zone(), instr);
298 if (instr->
env() !=
nullptr) {
308 ASSERT(instr->IsDefinition());
312 if (
env !=
nullptr) {
313 env->DeepCopyTo(
zone(), instr);
315 return prev->AppendInstruction(instr);
322 if (instr->
env() !=
nullptr) {
334 next_successor_ix_(
block->last_instruction()->SuccessorCount() - 1) {}
346 intptr_t next_successor_ix_;
348 DISALLOW_ALLOCATION();
359 reverse_postorder_.Clear();
368 if (
state.HasNextSuccessor()) {
379 postorder_.Add(block);
383 ASSERT(postorder_.length() == preorder_.length());
388 reverse_postorder_.Add(postorder_[
block_count - i - 1]);
395 bool changed =
false;
398 block_it.Advance()) {
400 if (block->IsGraphEntry())
continue;
405 while (
auto goto_instr = last->AsGoto()) {
419 successor->UnuseAllInputs();
425 last_merged_block = successor;
434 if (last_merged_block !=
nullptr) {
442void FlowGraph::ComputeIsReceiverRecursive(
447 for (intptr_t i = 0; i < phi->
InputCount(); ++i) {
449 if (def->IsParameter() && (def->AsParameter()->env_index() == 0))
continue;
454 ComputeIsReceiverRecursive(def->AsPhi(), unmark);
466void FlowGraph::ComputeIsReceiver(PhiInstr* phi)
const {
467 GrowableArray<PhiInstr*> unmark;
468 ComputeIsReceiverRecursive(phi, &unmark);
471 while (!unmark.is_empty()) {
472 PhiInstr* phi = unmark.RemoveLast();
473 for (Value::Iterator it(phi->input_use_list()); !it.Done(); it.Advance()) {
474 PhiInstr* use = it.Current()->instruction()->AsPhi();
485 if (def->IsParameter())
return (def->AsParameter()->env_index() == 0);
486 if (!def->IsPhi() ||
graph_entry()->HasSingleEntryPoint()) {
494 ComputeIsReceiver(phi);
501 if (!FLAG_use_cha_deopt && !
isolate_group()->all_classes_finalized()) {
508 Value* receiver = call->Receiver();
510 bool receiver_maybe_null =
false;
522 receiver_maybe_null =
type->is_nullable();
527 if (
type->is_nullable()) {
528 receiver_maybe_null =
true;
539 if (receiver_class.
IsNull()) {
541 }
else if (call->HasICData()) {
546 const intptr_t
cid = receiver_class.
id();
551 for (intptr_t i = 0, len =
data->NumberOfChecks(); i < len; i++) {
552 if (!
data->IsUsedAt(i)) {
555 fun =
data->GetTargetAt(i);
557 if (
data->GetReceiverClassIdAt(i) ==
cid || cls.
id() ==
cid) {
567 const String& method_name =
568 (kind == UntaggedFunction::kMethodExtractor)
570 : call->function_name();
574 if (receiver_maybe_null) {
575 const Class& null_class =
590 intptr_t subclass_count = 0;
592 if (!cha.
HasOverride(receiver_class, method_name, &subclass_count)) {
593 if (FLAG_trace_cha) {
595 " **(CHA) Instance call needs no class check since there "
596 "are no overrides of method '%s' on '%s'\n",
599 if (FLAG_use_cha_deopt) {
631 intptr_t receiver_cid) {
636 call->Receiver()->CopyWithType(),
649 false, call->deopt_id()),
657 block_it.Advance()) {
659 !instr_it.Done(); instr_it.Advance()) {
661 if (redefinition !=
nullptr) {
665 Value* original_use = it.Current();
671 THR_Print(
"%s\n", redefinition->ToCString());
683 intptr_t variable_count,
685 : zone_(
Thread::Current()->zone()),
686 variable_count_(variable_count),
687 postorder_(postorder),
688 live_out_(postorder.
length()),
689 kill_(postorder.
length()),
690 live_in_(postorder.
length()) {}
694 bool changed =
false;
778 flow_graph_(flow_graph),
787 assigned_vars_.Clear();
806 assigned_vars_.Add(kill);
809 return assigned_vars_;
819 if (
store->is_dead()) {
822 if (
store->is_last()) {
823 const intptr_t index = flow_graph_->
EnvIndex(&
store->local());
836 const intptr_t index = flow_graph_->
EnvIndex(&
load->local());
874 if (
load !=
nullptr) {
875 const intptr_t index = flow_graph_->
EnvIndex(&
load->local());
876 if (index >= live_in->
length())
continue;
879 last_loads->
Add(index);
886 if (
store !=
nullptr) {
887 const intptr_t index = flow_graph_->
EnvIndex(&
store->local());
888 if (index >= live_in->
length())
continue;
906 const bool is_function_entry = block->IsFunctionEntry();
907 const bool is_osr_entry = block->IsOsrEntry();
908 const bool is_catch_block_entry = block->IsCatchBlockEntry();
909 if (is_function_entry || is_osr_entry || is_catch_block_entry) {
910 const intptr_t parameter_count =
911 (is_osr_entry || is_catch_block_entry)
914 for (intptr_t i = 0; i < parameter_count; ++i) {
919 if (is_function_entry) {
935 if (inlining_parameters !=
nullptr) {
936 for (intptr_t i = 0, n = inlining_parameters->
length(); i < n; ++i) {
938 if (defn->IsConstant()) {
960 dominance_frontier, &live_phis);
964 Rename(&live_phis, &variable_liveness, inlining_parameters);
968 RemoveDeadPhis(&live_phis);
991 intptr_t size = parent_.
length();
1006 for (intptr_t i = 0; i < size; ++i) {
1007 idom.
Add(parent_[i]);
1016 preorder_[0]->ClearDominatedBlocks();
1017 for (intptr_t block_index = size - 1; block_index >= 1; --block_index) {
1030 intptr_t best = pred_index;
1031 if (pred_index > block_index) {
1032 CompressPath(block_index, pred_index, &parent_, &label);
1033 best = label[pred_index];
1037 semi[block_index] =
Utils::Minimum(semi[block_index], semi[best]);
1041 label[block_index] = semi[block_index];
1046 for (intptr_t block_index = 1; block_index < size; ++block_index) {
1047 intptr_t dom_index = idom[block_index];
1048 while (dom_index > semi[block_index]) {
1049 dom_index = idom[dom_index];
1051 idom[block_index] = dom_index;
1052 preorder_[dom_index]->AddDominatedBlock(preorder_[block_index]);
1060 for (intptr_t block_index = 0; block_index < size; ++block_index) {
1063 if (
count <= 1)
continue;
1064 for (intptr_t i = 0; i <
count; ++i) {
1074void FlowGraph::CompressPath(intptr_t start_index,
1075 intptr_t current_index,
1078 intptr_t next_index = (*parent)[current_index];
1079 if (next_index > start_index) {
1080 CompressPath(start_index, next_index, parent, label);
1081 (*label)[current_index] =
1083 (*parent)[current_index] = (*parent)[next_index];
1087void FlowGraph::InsertPhis(
const GrowableArray<BlockEntryInstr*>& preorder,
1088 const GrowableArray<BitVector*>& assigned_vars,
1089 const GrowableArray<BitVector*>& dom_frontier,
1090 GrowableArray<PhiInstr*>* live_phis) {
1102 for (intptr_t block_index = 0; block_index <
block_count; ++block_index) {
1103 has_already.Add(-1);
1108 GrowableArray<BlockEntryInstr*>
worklist;
1109 for (intptr_t var_index = 0; var_index <
variable_count(); ++var_index) {
1110 const bool always_live =
1113 for (intptr_t block_index = 0; block_index <
block_count; ++block_index) {
1114 if (assigned_vars[block_index]->
Contains(var_index)) {
1115 work[block_index] = var_index;
1121 BlockEntryInstr* current =
worklist.RemoveLast();
1123 for (BitVector::Iterator it(dom_frontier[current->preorder_number()]);
1124 !it.Done(); it.Advance()) {
1125 int index = it.Current();
1126 if (has_already[index] < var_index) {
1129 PhiInstr* phi =
join->InsertPhi(
1133 live_phis->Add(phi);
1135 has_already[index] = var_index;
1136 if (work[index] < var_index) {
1137 work[index] = var_index;
1148 constant_dead_ =
GetConstant(Object::optimized_out());
1153 if (
auto join = block->AsJoinEntry()) {
1154 const intptr_t local_phi_count =
variable_count() + join->stack_depth();
1156 const intptr_t osr_phi_count =
1159 if (join->phis() ==
nullptr || (*join->phis())[i] ==
nullptr) {
1160 join->InsertPhi(i, local_phi_count)->mark_alive();
1166void FlowGraph::Rename(GrowableArray<PhiInstr*>* live_phis,
1167 VariableLivenessAnalysis* variable_liveness,
1168 ZoneGrowableArray<Definition*>* inlining_parameters) {
1179 if (entry->catch_entries().is_empty()) {
1180 ASSERT(entry->unchecked_entry() !=
nullptr ? entry->SuccessorCount() == 2
1181 : entry->SuccessorCount() == 1);
1199 RenameRecursive(entry, &
env, live_phis, variable_liveness,
1200 inlining_parameters);
1210 bool should_assign_stack_locations ,
1215 const intptr_t index = (function.IsFactory() ? (i - 1) : i);
1216 return index >= 0 ? ParameterRepresentationAt(function, index)
1219 should_assign_stack_locations, parameter_info);
1222void FlowGraph::PopulateEnvironmentFromFunctionEntry(
1231 const intptr_t inlined_type_args_param =
1239 const auto& [location, representation] = direct_parameter_locations_[i];
1240 if (location.IsInvalid()) {
1245 const intptr_t env_index =
1249 auto param =
new (
zone())
1250 ParameterInstr(function_entry,
1252 i, location, representation);
1256 (*env)[env_index] = param;
1263 if (inlining_parameters !=
nullptr) {
1265 Definition* defn = (*inlining_parameters)[inlined_type_args_param + i];
1266 if (defn->IsConstant()) {
1267 ASSERT(defn->previous() == graph_entry_);
1268 ASSERT(defn->HasSSATemp());
1270 ASSERT(defn->previous() ==
nullptr);
1275 (*env)[index] = defn;
1281 if (reify_generic_argument) {
1284 if (inlining_parameters ==
nullptr) {
1291 defn = (*inlining_parameters)[0];
1293 if (defn->IsConstant()) {
1294 ASSERT(defn->previous() == graph_entry_);
1295 ASSERT(defn->HasSSATemp());
1297 ASSERT(defn->previous() ==
nullptr);
1318 intptr_t env_index) {
1320 compiler::target::frame_layout.FrameSlotForVariableIndex(
1321 num_direct_parameters - env_index),
1325void FlowGraph::PopulateEnvironmentFromOsrEntry(
1326 OsrEntryInstr* osr_entry,
1327 GrowableArray<Definition*>*
env) {
1333 ASSERT(parameter_count ==
env->length());
1334 for (intptr_t i = 0; i < parameter_count; i++) {
1337 : ParameterInstr::kNotFunctionParameter;
1338 ParameterInstr* param =
new (
zone()) ParameterInstr(
1339 osr_entry, i, param_index,
1347void FlowGraph::PopulateEnvironmentFromCatchEntry(
1348 CatchBlockEntryInstr* catch_entry,
1349 GrowableArray<Definition*>*
env) {
1350 const intptr_t raw_exception_var_envindex =
1351 catch_entry->raw_exception_var() !=
nullptr
1352 ?
EnvIndex(catch_entry->raw_exception_var())
1354 const intptr_t raw_stacktrace_var_envindex =
1355 catch_entry->raw_stacktrace_var() !=
nullptr
1356 ?
EnvIndex(catch_entry->raw_stacktrace_var())
1361 intptr_t additional_slots = 0;
1366 if (raw_exception_var_envindex == i) {
1368 }
else if (raw_stacktrace_var_envindex == i) {
1373 if (param_rep == kTagged && param_loc.IsStackSlot()) {
1381 n + additional_slots);
1388 auto param =
new (
Z) ParameterInstr(
1402void FlowGraph::AttachEnvironment(Instruction* instr,
1403 GrowableArray<Definition*>*
env) {
1405 instr->NumberOfInputsConsumedBeforeCall(),
1407 instr->SetEnvironment(deopt_env);
1408 for (Environment::DeepIterator it(deopt_env); !it.Done(); it.Advance()) {
1409 Value* use = it.CurrentValue();
1410 use->definition()->AddEnvUse(use);
1414void FlowGraph::RenameRecursive(
1415 BlockEntryInstr* block_entry,
1416 GrowableArray<Definition*>*
env,
1417 GrowableArray<PhiInstr*>* live_phis,
1418 VariableLivenessAnalysis* variable_liveness,
1419 ZoneGrowableArray<Definition*>* inlining_parameters) {
1421 if (
auto join = block_entry->AsJoinEntry()) {
1422 if (
join->phis() !=
nullptr) {
1424 ASSERT(
join->phis()->length() == local_phi_count);
1425 for (intptr_t i = 0; i < local_phi_count; ++i) {
1426 PhiInstr* phi = (*
join->phis())[i];
1427 if (phi !=
nullptr) {
1430 if (block_entry->InsideTryBlock() && !phi->is_alive()) {
1437 live_phis->Add(phi);
1442 }
else if (
auto osr_entry = block_entry->AsOsrEntry()) {
1443 PopulateEnvironmentFromOsrEntry(osr_entry,
env);
1444 }
else if (
auto function_entry = block_entry->AsFunctionEntry()) {
1446 PopulateEnvironmentFromFunctionEntry(
1447 function_entry,
env, live_phis, variable_liveness, inlining_parameters);
1448 }
else if (
auto catch_entry = block_entry->AsCatchBlockEntry()) {
1449 PopulateEnvironmentFromCatchEntry(catch_entry,
env);
1452 if (!block_entry->IsGraphEntry() &&
1453 !block_entry->IsBlockEntryWithInitialDefs()) {
1456 BitVector* live_in = variable_liveness->GetLiveInSet(block_entry);
1460 if (FLAG_prune_dead_locals && !live_in->Contains(i) &&
1468 AttachEnvironment(block_entry,
env);
1471 for (ForwardInstructionIterator it(block_entry); !it.Done(); it.Advance()) {
1472 Instruction* current = it.Current();
1475 if (current->NeedsEnvironment()) {
1476 AttachEnvironment(current,
env);
1484 for (intptr_t i = current->InputCount() - 1; i >= 0; --i) {
1485 Value* v = current->InputAt(i);
1488 Definition* reaching_defn =
env->RemoveLast();
1489 Definition* input_defn = v->definition();
1490 if (input_defn != reaching_defn) {
1495 if (input_defn->IsConstant() && reaching_defn->IsPhi()) {
1497 auto constant =
GetConstant(input_defn->AsConstant()->value());
1498 current->ReplaceInEnvironment(reaching_defn, constant);
1499 reaching_defn = constant;
1503 ASSERT(input_defn->IsLoadLocal() || input_defn->IsStoreLocal() ||
1504 input_defn->IsDropTemps() || input_defn->IsMakeTemp() ||
1505 (input_defn->IsConstant() && reaching_defn->IsConstant()));
1508 ASSERT(reaching_defn->ssa_temp_index() != -1);
1510 v->set_definition(reaching_defn);
1511 input_defn = reaching_defn;
1513 input_defn->AddInputUse(v);
1518 Definition*
result =
nullptr;
1519 switch (current->tag()) {
1520 case Instruction::kLoadLocal: {
1521 LoadLocalInstr*
load = current->Cast<LoadLocalInstr>();
1529 PhiInstr* phi =
result->AsPhi();
1530 if ((phi !=
nullptr) && !phi->is_alive()) {
1532 live_phis->Add(phi);
1535 if (FLAG_prune_dead_locals &&
1536 variable_liveness->IsLastLoad(block_entry,
load)) {
1542 if (
load->local().is_captured_parameter()) {
1543 captured_parameters_->
Add(index);
1546 if (phi !=
nullptr) {
1560 if (!phi->HasType()) {
1562 auto* phis = phi->block()->phis();
1563 if ((index < phis->
length()) && (*phis)[index] == phi) {
1564 phi->UpdateType(*
load->local().inferred_type());
1573 case Instruction::kStoreLocal: {
1574 StoreLocalInstr*
store = current->Cast<StoreLocalInstr>();
1578 if (!FLAG_prune_dead_locals ||
1579 variable_liveness->IsStoreAlive(block_entry,
store)) {
1587 case Instruction::kDropTemps: {
1589 DropTempsInstr* drop = current->Cast<DropTempsInstr>();
1590 for (intptr_t j = 0; j < drop->num_temps(); j++) {
1593 if (drop->value() !=
nullptr) {
1594 result = drop->value()->definition();
1596 ASSERT((drop->value() !=
nullptr) || !drop->HasTemp());
1600 case Instruction::kConstant: {
1601 ConstantInstr* constant = current->Cast<ConstantInstr>();
1602 if (constant->HasTemp()) {
1608 case Instruction::kMakeTemp: {
1614 case Instruction::kMoveArgument:
1618 case Instruction::kCheckStackOverflow:
1621 current->AsCheckStackOverflow()->stack_depth()) ==
1627 if (Definition* definition = current->AsDefinition()) {
1628 if (definition->HasTemp()) {
1631 env->Add(definition);
1638 Definition* definition = current->Cast<Definition>();
1639 if (definition->HasTemp()) {
1643 it.RemoveCurrentFromGraph();
1648 for (intptr_t i = 0; i < block_entry->dominated_blocks().length(); ++i) {
1650 GrowableArray<Definition*> new_env(
env->length());
1651 new_env.AddArray(*
env);
1656 const intptr_t stack_depth = block->stack_depth();
1657 ASSERT(stack_depth >= 0);
1661 }
else if (!block->last_instruction()->IsTailCall()) {
1665 RenameRecursive(block, &new_env, live_phis, variable_liveness,
1666 inlining_parameters);
1671 if ((block_entry->last_instruction()->SuccessorCount() == 1) &&
1672 block_entry->last_instruction()->SuccessorAt(0)->IsJoinEntry()) {
1673 JoinEntryInstr* successor =
1674 block_entry->last_instruction()->SuccessorAt(0)->AsJoinEntry();
1675 intptr_t pred_index = successor->IndexOfPredecessor(block_entry);
1677 if (successor->phis() !=
nullptr) {
1678 for (intptr_t i = 0; i < successor->phis()->
length(); ++i) {
1679 PhiInstr* phi = (*successor->phis())[i];
1680 if (phi !=
nullptr) {
1682 Definition* input = (*env)[i];
1683 ASSERT(input !=
nullptr);
1684 ASSERT(!input->IsMoveArgument());
1686 phi->SetInputAt(pred_index, use);
1694void FlowGraph::ValidatePhis() {
1695 if (!FLAG_prune_dead_locals) {
1701 BlockEntryInstr* block_entry =
preorder()[i];
1702 Instruction* last_instruction = block_entry->last_instruction();
1704 if ((last_instruction->SuccessorCount() == 1) &&
1705 last_instruction->SuccessorAt(0)->IsJoinEntry()) {
1706 JoinEntryInstr* successor =
1707 last_instruction->SuccessorAt(0)->AsJoinEntry();
1708 if (successor->phis() !=
nullptr) {
1709 for (intptr_t j = 0; j < successor->phis()->
length(); ++j) {
1710 PhiInstr* phi = (*successor->phis())[j];
1715 ASSERT(last_instruction->env() !=
nullptr);
1716 Definition* current_definition =
1717 last_instruction->env()->ValueAt(j)->definition();
1718 ASSERT(successor->env() !=
nullptr);
1719 Definition* successor_definition =
1720 successor->env()->ValueAt(j)->definition();
1721 if (!current_definition->IsConstant() &&
1722 !successor_definition->IsConstant()) {
1723 ASSERT(current_definition == successor_definition);
1733void FlowGraph::RemoveDeadPhis(GrowableArray<PhiInstr*>* live_phis) {
1738 JoinEntryInstr*
join = it.Current()->AsJoinEntry();
1739 if (join ==
nullptr)
continue;
1740 for (PhiIterator phi_it(join); !phi_it.Done(); phi_it.Advance()) {
1741 PhiInstr* phi = phi_it.Current();
1742 if (phi ==
nullptr || phi->is_alive() ||
1743 (phi->input_use_list() !=
nullptr) ||
1744 (phi->env_use_list() ==
nullptr)) {
1747 for (Value::Iterator it(phi->env_use_list()); !it.Done();
1749 Value* use = it.Current();
1750 if (use->instruction()->MayThrow() &&
1751 use->instruction()->GetBlock()->InsideTryBlock()) {
1752 live_phis->Add(phi);
1761 while (!live_phis->is_empty()) {
1762 PhiInstr* phi = live_phis->RemoveLast();
1763 for (intptr_t i = 0; i < phi->InputCount(); i++) {
1764 Value* val = phi->InputAt(i);
1765 PhiInstr* used_phi = val->definition()->AsPhi();
1766 if ((used_phi !=
nullptr) && !used_phi->is_alive()) {
1767 used_phi->mark_alive();
1768 live_phis->Add(used_phi);
1774 JoinEntryInstr*
join = it.Current()->AsJoinEntry();
1794 if (!compile_type.
IsNone()) {
1801 if (redef->input_use_list() ==
nullptr) {
1805 redef->RemoveFromGraph();
1817 block_it.Advance()) {
1820 !instr_it.Done(); instr_it.Advance()) {
1822 if (
auto redef = instruction->AsRedefinition()) {
1823 redef->ReplaceUsesWith(redef->value()->definition());
1824 instr_it.RemoveCurrentFromGraph();
1825 }
else if (keep_checks) {
1827 }
else if (
auto def = instruction->AsDefinition()) {
1829 if (
value !=
nullptr) {
1845 loop_blocks->
Add(m->preorder_number());
1851 for (intptr_t i = 0; i < p->PredecessorCount(); ++i) {
1852 BlockEntryInstr* q = p->PredecessorAt(i);
1853 if (!loop_blocks->
Contains(q->preorder_number())) {
1854 loop_blocks->
Add(q->preorder_number());
1862LoopHierarchy* FlowGraph::ComputeLoops()
const {
1865 ZoneGrowableArray<BlockEntryInstr*>* loop_headers =
1866 new (
zone()) ZoneGrowableArray<BlockEntryInstr*>();
1868 BlockEntryInstr* block = it.Current();
1871 block->set_loop_info(
nullptr);
1873 for (intptr_t i = 0; i < block->PredecessorCount(); ++i) {
1874 BlockEntryInstr* pred = block->PredecessorAt(i);
1875 if (block->Dominates(pred)) {
1879 BitVector* loop_blocks = FindLoopBlocks(pred, block);
1880 if (block->loop_info() ==
nullptr) {
1881 intptr_t
id = loop_headers->length();
1882 block->set_loop_info(
new (
zone()) LoopInfo(
id, block, loop_blocks));
1883 loop_headers->Add(block);
1885 ASSERT(block->loop_info()->header() == block);
1886 block->loop_info()->AddBlocks(loop_blocks);
1888 block->loop_info()->AddBackEdge(pred);
1895 return new (
zone()) LoopHierarchy(loop_headers, preorder_,
should_print());
1901 for (intptr_t i = 1; i < preorder_.length(); ++i) {
1906 const intptr_t block_id = block->
block_id();
1907 if (prologue_info_.
Contains(block_id)) {
1921 if (from_rep == to_rep || to_rep == kNoRepresentation) {
1924 InsertConversion(from_rep, to_rep, use,
false);
1928 return (rep == kUnboxedInt32) || (rep == kUnboxedUint32) ||
1929 (rep == kUnboxedInt64);
1947 bool is_environment_use) {
1949 Instruction* insert_before;
1950 PhiInstr* phi = use->instruction()->AsPhi();
1951 if (phi !=
nullptr) {
1954 auto predecessor = phi->block()->PredecessorAt(use->use_index());
1955 insert_before = predecessor->last_instruction();
1956 ASSERT(insert_before->GetBlock() == predecessor);
1958 insert_before = use->instruction();
1961 use->instruction()->SpeculativeModeOfInput(use->use_index());
1962 Instruction* deopt_target =
nullptr;
1964 deopt_target = insert_before;
1967 Definition* converted =
nullptr;
1969 const intptr_t deopt_id = (to == kUnboxedInt32) && (deopt_target !=
nullptr)
1970 ? deopt_target->DeoptimizationTarget()
1973 new (
Z) IntConverterInstr(from, to, use->CopyWithType(), deopt_id);
1974 }
else if ((from == kUnboxedInt32) && (to == kUnboxedDouble)) {
1975 converted =
new Int32ToDoubleInstr(use->CopyWithType());
1976 }
else if ((from == kUnboxedInt64) && (to == kUnboxedDouble) &&
1978 const intptr_t deopt_id = (deopt_target !=
nullptr)
1979 ? deopt_target->DeoptimizationTarget()
1982 converted =
new Int64ToDoubleInstr(use->CopyWithType(), deopt_id);
1984 const intptr_t deopt_id = (deopt_target !=
nullptr)
1985 ? deopt_target->DeoptimizationTarget()
1991 }
else if ((to == kPairOfTagged) && (from == kTagged)) {
1994 ASSERT(use->instruction()->IsDartReturn());
1995 Definition*
x =
new (
Z)
1996 LoadFieldInstr(use->CopyWithType(),
1998 thread(), compiler::target::Record::field_offset(0)),
1999 InstructionSource());
2001 Definition*
y =
new (
Z)
2002 LoadFieldInstr(use->CopyWithType(),
2004 thread(), compiler::target::Record::field_offset(1)),
2005 InstructionSource());
2007 converted =
new (
Z) MakePairInstr(
new (
Z)
Value(
x),
new (
Z)
Value(
y));
2008 }
else if ((to == kTagged) && (from == kPairOfTagged)) {
2016 FATAL(
"Illegal conversion %s->%s for the use of %s at %s\n",
2019 use->definition()->ToCString(), use->instruction()->ToCString());
2021 FATAL(
"Illegal conversion %s->%s for a use of v%" Pd "\n",
2024 use->definition()->ssa_temp_index());
2033 StopInstr* stop =
new (
Z) StopInstr(
"Incompatible conversion.");
2036 const intptr_t deopt_id = (deopt_target !=
nullptr)
2037 ? deopt_target->DeoptimizationTarget()
2045 ASSERT(converted !=
nullptr);
2047 (deopt_target !=
nullptr) ? deopt_target->env() : nullptr,
2049 if (is_environment_use) {
2050 use->BindToEnvironment(converted);
2052 use->BindTo(converted);
2059 Value* use = it.Current();
2068void FlowGraph::InsertRecordBoxing(Definition* def) {
2071 const Function*
target =
nullptr;
2072 if (
auto* call = def->AsStaticCall()) {
2073 target = &(call->function());
2074 }
else if (
auto* call = def->AsInstanceCallBase()) {
2076 }
else if (
auto* call = def->AsDispatchTableCall()) {
2083 kernel::UnboxingInfoMetadata* unboxing_metadata =
2084 kernel::UnboxingInfoMetadataOf(*
target,
Z);
2085 ASSERT(unboxing_metadata !=
nullptr);
2086 const RecordShape shape = unboxing_metadata->return_info.record_shape;
2087 ASSERT(shape.num_fields() == 2);
2093 auto* alloc =
new (
Z)
2094 AllocateSmallRecordInstr(InstructionSource(), shape,
new (
Z)
Value(
x),
2095 new (
Z)
Value(
y),
nullptr, def->deopt_id());
2096 def->ReplaceUsesWith(alloc);
2099 ASSERT(
x->value()->definition() == def);
2100 ASSERT(
y->value()->definition() == def);
2101 Instruction* insert_before = def->next();
2102 ASSERT(insert_before !=
nullptr);
2108void FlowGraph::InsertConversionsFor(Definition* def) {
2113 if (from_rep == kPairOfTagged) {
2115 InsertRecordBoxing(def);
2120 for (Value::Iterator it(def->input_use_list()); !it.Done(); it.Advance()) {
2121 ConvertUse(it.Current(), from_rep);
2126class PhiUnboxingHeuristic :
public ValueObject {
2128 explicit PhiUnboxingHeuristic(FlowGraph* flow_graph)
2129 : worklist_(flow_graph, 10) {}
2131 void Process(PhiInstr* phi) {
2132 auto new_representation = phi->representation();
2133 switch (phi->Type()->ToCid()) {
2136 new_representation = DetermineIfAnyIncomingUnboxedFloats(phi)
2140 if (new_representation == kUnboxedFloat) {
2141 for (
auto input : phi->
inputs()) {
2142 ASSERT(input->representation() != kUnboxedDouble);
2150 new_representation = kUnboxedFloat32x4;
2155 new_representation = kUnboxedInt32x4;
2160 new_representation = kUnboxedFloat64x2;
2165 if (new_representation == kTagged && phi->Type()->IsInt()) {
2169 for (
auto input : phi->
inputs()) {
2170 if (input == phi)
continue;
2173 new_representation = kTagged;
2177 if (new_representation == kTagged) {
2178 new_representation = input->representation();
2179 }
else if (new_representation != input->representation()) {
2184 new_representation = kNoRepresentation;
2188 if (new_representation == kNoRepresentation) {
2192 new_representation =
2199 if (new_representation == kTagged && !phi->Type()->can_be_sentinel()) {
2200#if defined(TARGET_ARCH_IS_64_BIT)
2204 if (is_aot_ || phi->Type()->ToCid() != kSmiCid) {
2205 new_representation = kUnboxedInt64;
2213 const bool has_unboxed_incoming_value = HasUnboxedIncomingValue(phi);
2214 const bool flows_into_unboxed_use = FlowsIntoUnboxedUse(phi);
2216 if (has_unboxed_incoming_value && flows_into_unboxed_use) {
2217 new_representation =
2232 for (
auto input : phi->
inputs()) {
2233 if (input != phi && input->representation() == kUntagged) {
2234 new_representation = kUntagged;
2239 phi->set_representation(new_representation);
2246 bool DetermineIfAnyIncomingUnboxedFloats(PhiInstr* phi) {
2249 for (intptr_t i = 0; i < worklist_.definitions().
length(); i++) {
2250 const auto defn = worklist_.definitions()[i];
2251 for (
auto input : defn->
inputs()) {
2252 if (input->representation() == kUnboxedFloat) {
2255 if (input->IsPhi()) {
2256 worklist_.Add(input);
2266 bool HasUnboxedIncomingValue(PhiInstr* phi) {
2269 for (intptr_t i = 0; i < worklist_.definitions().
length(); i++) {
2270 const auto defn = worklist_.definitions()[i];
2271 for (
auto input : defn->
inputs()) {
2274 }
else if (input->IsPhi()) {
2275 worklist_.Add(input);
2284 bool FlowsIntoUnboxedUse(PhiInstr* phi) {
2287 for (intptr_t i = 0; i < worklist_.definitions().
length(); i++) {
2288 const auto defn = worklist_.definitions()[i];
2289 for (
auto use : defn->input_uses()) {
2291 use->use_index())) ||
2292 use->instruction()->IsUnbox()) {
2294 }
else if (
auto phi_use = use->instruction()->AsPhi()) {
2295 worklist_.Add(phi_use);
2303 DefinitionWorklist worklist_;
2310 PhiUnboxingHeuristic phi_unboxing_heuristic(
this);
2312 block_it.Advance()) {
2314 if (join_entry !=
nullptr) {
2317 phi_unboxing_heuristic.Process(phi);
2326 InsertConversionsFor((*
graph_entry()->initial_definitions())[i]);
2330 if (
auto entry = successor->AsBlockEntryWithInitialDefs()) {
2331 auto& initial_definitions = *entry->initial_definitions();
2332 for (intptr_t j = 0; j < initial_definitions.length(); j++) {
2333 InsertConversionsFor(initial_definitions[j]);
2341 block_it.Advance()) {
2348 InsertConversionsFor(phi);
2353 if (def !=
nullptr) {
2354 InsertConversionsFor(def);
2360#if defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_IA32)
2368static bool BenefitsFromWidening(BinarySmiOpInstr* smi_op) {
2371 switch (smi_op->op_kind()) {
2385static intptr_t LoopId(BlockEntryInstr* block) {
2386 LoopInfo* loop = block->loop_info();
2387 if (loop !=
nullptr) {
2394 if (!FLAG_use_smi_widening) {
2398 GrowableArray<BinarySmiOpInstr*> candidates;
2403 block_it.Advance()) {
2404 for (ForwardInstructionIterator instr_it(block_it.Current());
2405 !instr_it.Done(); instr_it.Advance()) {
2406 BinarySmiOpInstr* smi_op = instr_it.Current()->AsBinarySmiOp();
2407 if ((smi_op !=
nullptr) && smi_op->HasSSATemp() &&
2408 BenefitsFromWidening(smi_op) && CanBeWidened(smi_op)) {
2409 candidates.Add(smi_op);
2414 if (candidates.is_empty()) {
2436 DefinitionWorklist
worklist(
this, candidates.length());
2437 for (intptr_t i = 0; i < candidates.length(); i++) {
2438 BinarySmiOpInstr* op = candidates[i];
2439 if (op->WasEliminated() || processed->Contains(op->ssa_temp_index())) {
2444 THR_Print(
"analysing candidate: %s\n", op->ToCString());
2453 Definition* defn =
worklist.definitions()[j];
2459 if (defn->IsBinarySmiOp() &&
2460 BenefitsFromWidening(defn->AsBinarySmiOp())) {
2463 THR_Print(
"^ [%" Pd "] (o) %s\n", gain, defn->ToCString());
2467 const intptr_t defn_loop = LoopId(defn->GetBlock());
2470 for (intptr_t k = 0; k < defn->InputCount(); k++) {
2471 Definition* input = defn->InputAt(k)->definition();
2472 if (input->IsBinarySmiOp() && CanBeWidened(input->AsBinarySmiOp())) {
2474 }
else if (input->IsPhi() && (input->Type()->ToCid() == kSmiCid)) {
2476 }
else if (input->IsBinaryInt64Op()) {
2480 THR_Print(
"^ [%" Pd "] (i) %s\n", gain, input->ToCString());
2482 }
else if (defn_loop == LoopId(input->GetBlock()) &&
2483 (input->Type()->ToCid() == kSmiCid)) {
2491 THR_Print(
"v [%" Pd "] (i) %s\n", gain, input->ToCString());
2497 for (
Value* use = defn->input_use_list(); use !=
nullptr;
2498 use = use->next_use()) {
2499 Instruction* instr = use->instruction();
2500 Definition* use_defn = instr->AsDefinition();
2501 if (use_defn ==
nullptr) {
2504 ASSERT(!instr->IsMoveArgument());
2505 if (!instr->IsReturnBase() &&
2506 (use->use_index() >= instr->ArgumentCount())) {
2510 use->instruction()->ToCString());
2514 }
else if (use_defn->IsBinarySmiOp() &&
2515 CanBeWidened(use_defn->AsBinarySmiOp())) {
2517 }
else if (use_defn->IsPhi() &&
2518 use_defn->AsPhi()->Type()->ToCid() == kSmiCid) {
2520 }
else if (use_defn->IsBinaryInt64Op()) {
2527 use->instruction()->ToCString());
2529 }
else if (defn_loop == LoopId(instr->GetBlock())) {
2533 use->instruction()->ToCString());
2539 processed->AddAll(
worklist.contains_vector());
2542 THR_Print(
"~ %s gain %" Pd "\n", op->ToCString(), gain);
2549 Definition* defn =
worklist.definitions()[j];
2550 ASSERT(defn->IsPhi() || defn->IsBinarySmiOp());
2554 for (Value::Iterator it(defn->input_use_list()); !it.Done();
2556 it.Current()->SetReachingType(
nullptr);
2559 if (defn->IsBinarySmiOp()) {
2560 BinarySmiOpInstr* smi_op = defn->AsBinarySmiOp();
2561 BinaryInt32OpInstr* int32_op =
new (
Z) BinaryInt32OpInstr(
2562 smi_op->op_kind(), smi_op->left()->CopyWithType(),
2563 smi_op->right()->CopyWithType(), smi_op->DeoptimizationTarget());
2565 smi_op->ReplaceWith(int32_op,
nullptr);
2566 }
else if (defn->IsPhi()) {
2567 defn->AsPhi()->set_representation(kUnboxedInt32);
2568 ASSERT(defn->Type()->IsInt());
2589 block_it.Advance()) {
2591 if (!block->IsCatchBlockEntry()) {
2592 block->RemoveEnvironment();
2614void FlowGraph::ExtractUntaggedPayload(
Instruction* instr,
2618 auto*
const untag_payload =
new (
Z)
2621 array->
BindTo(untag_payload);
2640 const Slot* slot =
nullptr;
2642 slot = &Slot::PointerBase_data();
2647 ExtractUntaggedPayload(instr, array, *slot,
2652void FlowGraph::ExtractNonInternalTypedDataPayload(
Instruction* instr,
2659 auto const type_cid = array->
Type()->
ToCid();
2671 ExtractUntaggedPayload(instr, array, Slot::PointerBase_data(),
2678 block_it.Advance()) {
2682 if (
auto*
const load_indexed = current->AsLoadIndexed()) {
2683 ExtractNonInternalTypedDataPayload(load_indexed, load_indexed->array(),
2684 load_indexed->class_id());
2685 }
else if (
auto*
const store_indexed = current->AsStoreIndexed()) {
2686 ExtractNonInternalTypedDataPayload(
2687 store_indexed, store_indexed->array(), store_indexed->class_id());
2688 }
else if (
auto*
const memory_copy = current->AsMemoryCopy()) {
2689 ExtractNonInternalTypedDataPayload(memory_copy, memory_copy->src(),
2690 memory_copy->src_cid());
2691 ExtractNonInternalTypedDataPayload(memory_copy, memory_copy->dest(),
2692 memory_copy->dest_cid());
2699 bool changed =
false;
2702 block_it.Advance()) {
2704 if (
auto join = block->AsJoinEntry()) {
2715 ASSERT(replacement !=
nullptr);
2718 if (replacement != current) {
2720 it.RemoveCurrentFromGraph();
2736 if (replacement != current) {
2739 if (replacement !=
nullptr) {
2740 ASSERT(current->IsDefinition());
2744 current->AsDefinition()->HasUses()) {
2764 block_it.Advance()) {
2768 if (instr->IsInstanceCall()) {
2770 if (!call->HasICData()) {
2771 const Array& arguments_descriptor =
2775 ICData::New(
function, call->function_name(), arguments_descriptor,
2776 call->deopt_id(), call->checked_argument_count(),
2777 ICData::kInstance));
2778 call->set_ic_data(&ic_data);
2780 }
else if (instr->IsStaticCall()) {
2782 if (!call->HasICData()) {
2783 const Array& arguments_descriptor =
2786 int num_args_checked =
2791 call->deopt_id(), num_args_checked, ICData::kStatic));
2792 call->set_ic_data(&ic_data);
2803 if (!FLAG_truncating_left_shift)
return;
2807 block_it.Advance()) {
2809 div_mod_merge.
Clear();
2810 sin_cos_merge.
Clear();
2813 if (it.
Current()->IsBinarySmiOp()) {
2815 if (binop->
op_kind() == Token::kBIT_AND) {
2816 OptimizeLeftShiftBitAndSmiOp(&it, binop, binop->
left()->
definition(),
2818 }
else if ((binop->
op_kind() == Token::kTRUNCDIV) ||
2819 (binop->
op_kind() == Token::kMOD)) {
2820 if (binop->HasUses()) {
2821 div_mod_merge.
Add(binop);
2824 }
else if (it.
Current()->IsBinaryInt64Op()) {
2826 if (mintop->
op_kind() == Token::kBIT_AND) {
2827 OptimizeLeftShiftBitAndSmiOp(&it, mintop,
2831 }
else if (it.
Current()->IsInvokeMathCFunction()) {
2833 it.
Current()->AsInvokeMathCFunction();
2837 sin_cos_merge.
Add(math_unary);
2842 TryMergeTruncDivMod(&div_mod_merge);
2854 if (phi !=
nullptr) {
2859 if (use_block == dom_block) {
2861 if (dom_block ==
dom)
return true;
2864 curr = curr->next()) {
2865 if (curr == instr)
return true;
2878 Value* use = it.Current();
2887 block_it.Advance()) {
2889 !instr_it.Done(); instr_it.Advance()) {
2893 if (definition !=
nullptr && !definition->IsCheckArrayBound()) {
2895 if (redefined !=
nullptr) {
2909 if ((const_instr !=
nullptr) && (const_instr->
value().IsSmi())) {
2910 return Smi::Cast(const_instr->
value()).Value() >= 0;
2917 if ((instr !=
nullptr) && (instr->
op_kind() == Token::kSHL)) {
2923void FlowGraph::OptimizeLeftShiftBitAndSmiOp(
2924 ForwardInstructionIterator* current_iterator,
2925 Definition* bit_and_instr,
2926 Definition* left_instr,
2927 Definition* right_instr) {
2928 ASSERT(bit_and_instr !=
nullptr);
2929 ASSERT((left_instr !=
nullptr) && (right_instr !=
nullptr));
2933 if (!is_positive_or_zero) {
2936 if (!is_positive_or_zero)
return;
2938 BinarySmiOpInstr* smi_shift_left =
nullptr;
2939 if (bit_and_instr->InputAt(0)->IsSingleUse()) {
2942 if ((smi_shift_left ==
nullptr) &&
2943 (bit_and_instr->InputAt(1)->IsSingleUse())) {
2946 if (smi_shift_left ==
nullptr)
return;
2949 smi_shift_left->mark_truncating();
2950 ASSERT(bit_and_instr->IsBinarySmiOp() || bit_and_instr->IsBinaryInt64Op());
2951 if (bit_and_instr->IsBinaryInt64Op()) {
2953 BinarySmiOpInstr* smi_op =
new (
Z) BinarySmiOpInstr(
2954 Token::kBIT_AND,
new (
Z)
Value(left_instr),
new (
Z)
Value(right_instr),
2956 bit_and_instr->ReplaceWith(smi_op, current_iterator);
2977void FlowGraph::TryMergeTruncDivMod(
2978 GrowableArray<BinarySmiOpInstr*>* merge_candidates) {
2979 if (merge_candidates->length() < 2) {
2983 for (intptr_t i = 0; i < merge_candidates->length(); i++) {
2984 BinarySmiOpInstr* curr_instr = (*merge_candidates)[i];
2985 if (curr_instr ==
nullptr) {
2989 ASSERT((curr_instr->op_kind() == Token::kTRUNCDIV) ||
2990 (curr_instr->op_kind() == Token::kMOD));
2992 const Token::Kind other_kind = (curr_instr->op_kind() == Token::kTRUNCDIV)
2995 Definition* left_def = curr_instr->left()->definition();
2996 Definition* right_def = curr_instr->right()->definition();
2997 for (intptr_t k = i + 1; k < merge_candidates->length(); k++) {
2998 BinarySmiOpInstr* other_binop = (*merge_candidates)[k];
3000 if ((other_binop !=
nullptr) && (other_binop->op_kind() == other_kind) &&
3001 (other_binop->left()->definition() == left_def) &&
3002 (other_binop->right()->definition() == right_def)) {
3003 (*merge_candidates)[k] =
nullptr;
3004 ASSERT(curr_instr->HasUses());
3005 AppendExtractNthOutputForMerged(
3008 ASSERT(other_binop->HasUses());
3009 AppendExtractNthOutputForMerged(
3015 TruncDivModInstr* div_mod =
new (
Z) TruncDivModInstr(
3016 curr_instr->left()->CopyWithType(),
3017 curr_instr->right()->CopyWithType(), curr_instr->deopt_id());
3018 curr_instr->ReplaceWith(div_mod,
nullptr);
3019 other_binop->ReplaceUsesWith(div_mod);
3020 other_binop->RemoveFromGraph();
3030void FlowGraph::AppendExtractNthOutputForMerged(Definition* instr,
3034 ExtractNthOutputInstr*
extract =
3035 new (
Z) ExtractNthOutputInstr(
new (
Z)
Value(instr), index, rep,
cid);
3036 instr->ReplaceUsesWith(
extract);
3048 target->InheritDeoptTarget(graph->
zone(), inherit);
3056 join->InheritDeoptTarget(graph->
zone(), inherit);
3064 got->InheritDeoptTarget(graph->
zone(), inherit);
3110 bt->AppendInstruction(gotot);
3113 bf->AppendInstruction(gotof);
3156 return NewDiamond(mid_point, inherit, circuit, b_true, b_false);
3173 join->InsertPhi(phi);
3185 block_it.Advance()) {
3188 !instr_it.Done(); instr_it.Advance()) {
3191 if (arg_count == 0) {
3196 if (
auto static_call = instruction->AsStaticCall()) {
3197 target = static_call->function().ptr();
3198 }
else if (
auto instance_call = instruction->AsInstanceCallBase()) {
3199 target = instance_call->interface_target().ptr();
3200 }
else if (
auto dispatch_call = instruction->AsDispatchTableCall()) {
3201 target = dispatch_call->interface_target().ptr();
3202 }
else if (
auto cachable_call = instruction->AsCachableIdempotentCall()) {
3203 target = cachable_call->function().ptr();
3210 const intptr_t stack_arguments_size_in_words =
3216 true, &argument_locations);
3218 for (intptr_t i = 0; i < arg_count; ++i) {
3219 const auto& [location, rep] = argument_locations[i];
3225 stack_arguments_size_in_words);
3227 for (
auto move_arg : *arguments) {
3231 if (!move_arg->is_register_move()) {
3236 if (instruction->
env() !=
nullptr) {
3251 intptr_t num_ssa_vars,
3253 : block_num_(num_blocks),
3254 ssa_num_(num_ssa_vars),
3255 detached_defs_(detached_defs) {
3262 block_num_[block->block_id()] = 1;
3263 CollectDetachedMaterializations(block->env());
3265 if (
auto* block_with_idefs = block->AsBlockEntryWithInitialDefs()) {
3266 for (
Definition* def : *block_with_idefs->initial_definitions()) {
3267 RenumberDefinition(def);
3268 CollectDetachedMaterializations(def->env());
3271 if (
auto* join = block->AsJoinEntry()) {
3273 RenumberDefinition(it.Current());
3278 if (
Definition* def = instr->AsDefinition()) {
3279 RenumberDefinition(def);
3281 CollectDetachedMaterializations(instr->
env());
3284 for (
auto* def : (*detached_defs_)) {
3285 RenumberDefinition(def);
3291 intptr_t current_block_index = 0;
3292 for (intptr_t i = 0, n = block_num_.
length(); i < n; ++i) {
3293 if (block_num_[i] >= 0) {
3294 block_num_[i] = current_block_index++;
3298 block->set_block_id(block_num_[block->block_id()]);
3307 intptr_t new_index = ssa_num_[old_index];
3308 if (new_index < 0) {
3309 ssa_num_[old_index] = new_index = current_ssa_index_++;
3315 bool IsDetachedDefinition(Definition* def) {
3316 return def->IsMaterializeObject() && (def->next() ==
nullptr);
3319 void AddDetachedDefinition(Definition* def) {
3320 for (intptr_t i = 0, n = detached_defs_->length(); i < n; ++i) {
3321 if ((*detached_defs_)[i] == def) {
3325 detached_defs_->Add(def);
3328 for (intptr_t i = 0, n = def->InputCount(); i < n; ++i) {
3329 Definition* input = def->InputAt(i)->definition();
3330 if (IsDetachedDefinition(input)) {
3331 AddDetachedDefinition(input);
3334 ASSERT(def->env() ==
nullptr);
3337 void CollectDetachedMaterializations(Environment*
env) {
3338 if (
env ==
nullptr) {
3341 for (Environment::DeepIterator it(
env); !it.Done(); it.Advance()) {
3342 Definition* def = it.CurrentValue()->definition();
3343 if (IsDetachedDefinition(def)) {
3344 AddDetachedDefinition(def);
3349 GrowableArray<intptr_t> block_num_;
3350 GrowableArray<intptr_t> ssa_num_;
3351 intptr_t current_ssa_index_ = 0;
3352 ZoneGrowableArray<Definition*>* detached_defs_;
3356 if (detached_defs ==
nullptr) {
static bool compare(const SkBitmap &ref, const SkIRect &iref, const SkBitmap &test, const SkIRect &itest)
static bool match(const char *needle, const char *haystack)
T extract(SkSpan< const uint8_t > &data)
static void is_empty(skiatest::Reporter *reporter, const SkPath &p)
static float prev(float f)
static int block_count(const SkSBlockAllocator< N > &pool)
#define ASSERT_EQUAL(expected, actual)
#define RELEASE_ASSERT(cond)
virtual bool HasTypeClass() const
virtual bool IsInstantiated(Genericity genericity=kAny, intptr_t num_free_fun_type_params=kAllFree) const
virtual ClassPtr type_class() const
bool IsDynamicType() const
void EnsureLength(intptr_t new_length, const T &default_value)
static bool IsSupported(Token::Kind op_kind, Value *left, Value *right)
Token::Kind op_kind() const
bool KillAndAdd(BitVector *kill, BitVector *gen)
bool Contains(intptr_t i) const
bool AddAll(const BitVector *from)
void Intersect(const BitVector *other)
BlockEntryInstr * dominator() const
void ClearDominatedBlocks()
intptr_t try_index() const
intptr_t postorder_number() const
void AddDominatedBlock(BlockEntryInstr *block)
void set_block_id(intptr_t block_id)
intptr_t preorder_number() const
bool InsideTryBlock() const
intptr_t block_id() const
virtual intptr_t PredecessorCount() const =0
const GrowableArray< BlockEntryInstr * > & dominated_blocks()
bool Dominates(BlockEntryInstr *other) const
void set_postorder_number(intptr_t number)
virtual BlockEntryInstr * PredecessorAt(intptr_t index) const =0
void set_last_instruction(Instruction *instr)
bool DiscoverBlock(BlockEntryInstr *predecessor, GrowableArray< BlockEntryInstr * > *preorder, GrowableArray< intptr_t > *parent)
Instruction * last_instruction() const
GrowableArray< Definition * > * initial_definitions()
BlockTraversalState(BlockEntryInstr *block)
BlockEntryInstr * NextSuccessor()
BlockEntryInstr * block() const
bool HasNextSuccessor() const
static const Bool & False()
static const Bool & True()
static BoxInstr * Create(Representation from, Value *value)
TargetEntryInstr ** false_successor_address()
TargetEntryInstr ** true_successor_address()
bool HasOverride(const Class &cls, const String &function_name, intptr_t *subclass_count)
void AddToGuardedClassesForSubclassCount(const Class &cls, intptr_t subclass_count)
intptr_t MonomorphicReceiverCid() const
bool IsMonomorphic() const
ClassPtr At(intptr_t cid) const
ErrorPtr EnsureIsFinalized(Thread *thread) const
bool is_implemented() const
bool IsEqualTo(CompileType *other)
static CompilerState & Current()
const Object & value() const
Value * env_use_list() const
Value * input_use_list() const
virtual Value * RedefinedValue() const
void ReplaceUsesWith(Definition *other)
virtual Definition * AsDefinition()
void AddInputUse(Value *value)
Definition * OriginalDefinition()
void set_ssa_temp_index(intptr_t index)
intptr_t ssa_temp_index() const
static constexpr intptr_t kNone
static DoublePtr NewCanonical(double d)
void MarkAsLazyDeoptToBeforeDeoptId()
static Environment * From(Zone *zone, const GrowableArray< Definition * > &definitions, intptr_t fixed_parameter_count, intptr_t lazy_deopt_pruning_count, const ParsedFunction &parsed_function)
static StringPtr NameFromGetter(const String &getter_name)
static bool SupportsUnboxedDoubles()
static bool SupportsUnboxedSimd128()
static bool CanConvertInt64ToDouble()
static void PrintGraph(const char *phase, FlowGraph *flow_graph)
static bool ShouldPrint(const Function &function, uint8_t **compiler_pass_filter=nullptr)
const GrowableArray< BlockEntryInstr * > & reverse_postorder() const
GraphEntryInstr * graph_entry() const
ConstantInstr * GetConstant(const Object &object, Representation representation=kTagged)
IsolateGroup * isolate_group() const
bool should_print() const
bool VerifyRedefinitions()
void EnsureSSATempIndex(Definition *defn, Definition *replacement)
Instruction * AppendTo(Instruction *prev, Instruction *instr, Environment *env, UseKind use_kind)
void CompactSSA(ZoneGrowableArray< Definition * > *detached_defs=nullptr)
intptr_t max_argument_slot_count() const
bool IsCompiledForOsr() const
ConstantInstr * constant_dead() const
ConstantInstr * GetExistingConstant(const Object &object, Representation representation=kTagged) const
intptr_t current_ssa_temp_index() const
bool IsReceiver(Definition *def) const
intptr_t ArgumentDescriptorEnvIndex() const
static Representation ReturnRepresentationOf(const Function &function)
void ReplaceCurrentInstruction(ForwardInstructionIterator *iterator, Instruction *current, Instruction *replacement)
JoinEntryInstr * NewDiamond(Instruction *instruction, Instruction *inherit, ComparisonInstr *compare, TargetEntryInstr **block_true, TargetEntryInstr **block_false)
Instruction * AppendSpeculativeTo(Instruction *prev, Instruction *instr, Environment *env, UseKind use_kind)
intptr_t InstructionCount() const
ToCheck CheckForInstanceCall(InstanceCallInstr *call, UntaggedFunction::Kind kind) const
void RemoveRedefinitions(bool keep_checks=false)
const GrowableArray< BlockEntryInstr * > & preorder() const
void set_current_ssa_temp_index(intptr_t index)
void CreateCommonConstants()
void AddExactnessGuard(InstanceCallInstr *call, intptr_t receiver_cid)
const std::pair< Location, Representation > & GetDirectParameterInfoAt(intptr_t i)
void InsertSpeculativeAfter(Instruction *prev, Instruction *instr, Environment *env, UseKind use_kind)
void set_max_argument_slot_count(intptr_t count)
BlockIterator postorder_iterator() const
Definition * CreateCheckBound(Definition *length, Definition *index, intptr_t deopt_id)
intptr_t max_block_id() const
static intptr_t ComputeArgumentsSizeInWords(const Function &function, intptr_t arguments_count)
void InsertMoveArguments()
void Print(const char *phase="unknown")
const GrowableArray< BlockEntryInstr * > & postorder() const
intptr_t num_stack_locals() const
void ComputeDominators(GrowableArray< BitVector * > *dominance_frontier)
static Representation ParameterRepresentationAt(const Function &function, intptr_t index)
void EliminateEnvironments()
static intptr_t ComputeLocationsOfFixedParameters(Zone *zone, const Function &function, bool should_assign_stack_locations=false, compiler::ParameterInfoArray *parameter_info=nullptr)
void set_max_block_id(intptr_t id)
ConstantInstr * constant_null() const
Instruction * CreateCheckClass(Definition *to_check, const Cids &cids, intptr_t deopt_id, const InstructionSource &source)
const LoopHierarchy & GetLoopHierarchy()
const Function & function() const
const ParsedFunction & parsed_function() const
void ResetLoopHierarchy()
intptr_t num_direct_parameters() const
void RenameUsesDominatedByRedefinitions()
FlowGraph(const ParsedFunction &parsed_function, GraphEntryInstr *graph_entry, intptr_t max_block_id, PrologueInfo prologue_info, CompilationMode compilation_mode)
bool unmatched_representations_allowed() const
Definition * TryCreateConstantReplacementFor(Definition *op, const Object &value)
intptr_t osr_variable_count() const
void PopulateWithICData(const Function &function)
RedefinitionInstr * EnsureRedefinition(Instruction *prev, Definition *original, CompileType compile_type)
void TryOptimizePatterns()
PhiInstr * AddPhi(JoinEntryInstr *join, Definition *d1, Definition *d2)
void AddToGraphInitialDefinitions(Definition *defn)
bool ExtractExternalUntaggedPayload(Instruction *instr, Value *array, classid_t cid)
void AllocateSSAIndex(Definition *def)
static bool IsConstantRepresentable(const Object &value, Representation target_rep, bool tagged_value_must_be_smi)
BlockIterator reverse_postorder_iterator() const
void ComputeSSA(ZoneGrowableArray< Definition * > *inlining_parameters)
LocalVariable * CurrentContextVar() const
intptr_t RawTypeArgumentEnvIndex() const
void InsertBefore(Instruction *next, Instruction *instr, Environment *env, UseKind use_kind)
static void RenameDominatedUses(Definition *def, Instruction *dom, Definition *other)
void SelectRepresentations()
intptr_t variable_count() const
void AddToInitialDefinitions(BlockEntryWithInitialDefs *entry, Definition *defn)
void ExtractNonInternalTypedDataPayloads()
GrowableArray< BlockEntryInstr * > * CodegenBlockOrder()
intptr_t allocate_block_id()
void InsertAfter(Instruction *prev, Instruction *instr, Environment *env, UseKind use_kind)
bool should_reorder_blocks() const
bool IsImmortalVariable(intptr_t env_index) const
intptr_t EnvIndex(const LocalVariable *variable) const
Instruction * Current() const
void RemoveCurrentFromGraph()
bool is_unboxed_integer_parameter_at(intptr_t index) const
bool has_unboxed_return() const
bool has_unboxed_double_return() const
bool has_unboxed_integer_return() const
bool MakesCopyOfParameters() const
bool has_unboxed_record_return() const
bool is_unboxed_parameter_at(intptr_t index) const
bool is_unboxed_double_parameter_at(intptr_t index) const
intptr_t num_fixed_parameters() const
intptr_t NumParameters() const
intptr_t fixed_slot_count() const
virtual BlockEntryInstr * SuccessorAt(intptr_t index) const
void set_fixed_slot_count(intptr_t count)
virtual intptr_t SuccessorCount() const
static ICDataPtr NewForStaticCall(const Function &owner, const Function &target, const Array &arguments_descriptor, intptr_t deopt_id, intptr_t num_args_tested, RebindRule rebind_rule)
void set_previous(Instruction *instr)
void LinkTo(Instruction *next)
virtual bool MayThrow() const =0
void InheritDeoptTarget(Zone *zone, Instruction *other)
virtual bool ComputeCanDeoptimizeAfterCall() const
virtual BlockEntryInstr * SuccessorAt(intptr_t index) const
virtual BlockEntryInstr * GetBlock()
virtual bool ComputeCanDeoptimize() const =0
Environment * env() const
bool HasUnmatchedInputRepresentations() const
const char * ToCString() const
Instruction * AppendInstruction(Instruction *tail)
virtual Representation RequiredInputRepresentation(intptr_t idx) const
virtual void ReplaceInputsWithMoveArguments(MoveArgumentsArray *move_arguments)
virtual intptr_t ArgumentCount() const
bool IsDominatedBy(Instruction *dom)
virtual Instruction * Canonicalize(FlowGraph *flow_graph)
virtual Representation representation() const
void RepairArgumentUsesInEnvironment() const
void SetInputAt(intptr_t i, Value *value)
InstructionSource source() const
Value * ArgumentValueAt(intptr_t index) const
void InsertAfter(Instruction *prev)
virtual intptr_t SuccessorCount() const
bool HasMoveArguments() const
SpeculativeMode SpeculativeModeOfInputs() const
virtual const char * DebugName() const =0
Instruction * previous() const
MethodRecognizer::Kind recognized_kind() const
ClassTable * class_table() const
virtual BlockEntryInstr * PredecessorAt(intptr_t index) const
virtual intptr_t PredecessorCount() const
static bool FindPragma(Thread *T, bool only_core, const Object &object, const String &pragma_name, bool multiple=false, Object *options=nullptr)
const intptr_t variable_count_
virtual void ComputeInitialSets()=0
GrowableArray< BitVector * > kill_
void ComputeLiveInAndLiveOutSets()
bool UpdateLiveOut(const BlockEntryInstr &instr)
BitVector * GetKillSet(BlockEntryInstr *block) const
LivenessAnalysis(intptr_t variable_count, const GrowableArray< BlockEntryInstr * > &postorder)
GrowableArray< BitVector * > live_out_
BitVector * GetLiveOutSet(BlockEntryInstr *block) const
GrowableArray< BitVector * > live_in_
const GrowableArray< BlockEntryInstr * > & postorder_
bool UpdateLiveIn(const BlockEntryInstr &instr)
static Location StackSlot(intptr_t stack_index, Register base)
static Location RegisterLocation(Register reg)
static intptr_t NumArgsCheckedForStaticCall(const Function &function)
virtual const char * ToCString() const
static Object & ZoneHandle()
static constexpr intptr_t kNotFunctionParameter
const Function & function() const
bool has_arg_desc_var() const
LocalVariable * RawParameterVariable(intptr_t i) const
JoinEntryInstr * block() const
virtual Definition * Canonicalize(FlowGraph *flow_graph)
void set_is_receiver(ReceiverType is_receiver)
ReceiverType is_receiver() const
static bool Fits(Range *range, RangeBoundary::RangeSize size)
void set_constrained_type(CompileType *type)
CompileType * constrained_type() const
static FunctionPtr ResolveDynamicAnyArgs(Zone *zone, const Class &receiver_class, const String &function_name, bool allow_add=true)
SSACompactor(intptr_t num_blocks, intptr_t num_ssa_vars, ZoneGrowableArray< Definition * > *detached_defs)
void RenumberGraph(FlowGraph *graph)
static const Slot & GetRecordFieldSlot(Thread *thread, intptr_t offset_in_bytes)
static const Slot & GetTypeArgumentsSlotFor(Thread *thread, const Class &cls)
static const char * ToCString(Thread *thread, StringPtr ptr)
static Thread * Current()
CompilerState & compiler_state()
static intptr_t OutputIndexOf(Token::Kind token)
static UnboxInstr * Create(Representation to, Value *value, intptr_t deopt_id, SpeculativeMode speculative_mode=kGuardInputs)
static bool IsInt(intptr_t N, T value)
static constexpr T Maximum(T x, T y)
static T Minimum(T x, T y)
static bool IsUint(intptr_t N, T value)
intptr_t use_index() const
Instruction * instruction() const
Value * CopyWithType(Zone *zone)
Definition * definition() const
void BindTo(Definition *definition)
const GrowableArray< BitVector * > & ComputeAssignedVars()
bool IsStoreAlive(BlockEntryInstr *block, StoreLocalInstr *store)
virtual void ComputeInitialSets()
VariableLivenessAnalysis(FlowGraph *flow_graph)
bool IsLastLoad(BlockEntryInstr *block, LoadLocalInstr *load)
intptr_t InputCount() const
Value * InputAt(intptr_t i) const
#define COMPILER_TIMINGS_TIMER_SCOPE(thread, timer_id)
#define THR_Print(format,...)
VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE auto & d
G_BEGIN_DECLS G_MODULE_EXPORT FlValue * args
constexpr bool FLAG_support_il_printer
#define DEFINE_FLAG(type, name, default_value, comment)
Dart_NativeFunction function
bool Contains(const Container &container, const Value &value)
intptr_t ComputeCallingConvention(Zone *zone, const Function &target, intptr_t argc, std::function< Representation(intptr_t)> argument_rep, bool should_assign_stack_locations, ParameterInfoArray *parameter_info)
static JoinEntryInstr * NewJoin(FlowGraph *graph, Instruction *inherit)
static bool CanConvertInt64ToDouble()
bool IsTypedDataViewClassId(intptr_t index)
bool IsTypedDataClassId(intptr_t index)
static GotoInstr * NewGoto(FlowGraph *graph, JoinEntryInstr *target, Instruction *inherit)
static bool ShouldInlineSimd()
bool IsTypedDataBaseClassId(intptr_t index)
static Location EnvIndexToStackLocation(intptr_t num_direct_parameters, intptr_t env_index)
static BranchInstr * NewBranch(FlowGraph *graph, ComparisonInstr *cmp, Instruction *inherit)
static bool IsPositiveOrZeroSmiConst(Definition *d)
Location LocationExceptionLocation()
static bool ShouldReorderBlocks(const Function &function, FlowGraph::CompilationMode mode)
const Register ARGS_DESC_REG
static bool NeedsRecordBoxing(Definition *def)
static bool IsDominatedUse(Instruction *dom, Value *use)
static bool CanUnboxDouble()
static void PrintBitVector(const char *tag, BitVector *v)
bool IsExternalPayloadClassId(classid_t cid)
static TargetEntryInstr * NewTarget(FlowGraph *graph, Instruction *inherit)
ZoneGrowableArray< MoveArgumentInstr * > MoveArgumentsArray
Location LocationStackTraceLocation()
static int8_t data[kExtLength]
static BinarySmiOpInstr * AsSmiShiftLeftInstruction(Definition *d)
static bool IsMarkedWithNoBoundsChecks(const Function &function)
bool IsExternalTypedDataClassId(intptr_t index)
static bool IsUnboxedInteger(Representation rep)
SINT Vec< 2 *N, T > join(const Vec< N, T > &lo, const Vec< N, T > &hi)
static bool Supports(Representation rep)
bool Contains(intptr_t block_id) const
static const char * ToCString(Representation rep)