14 array_bounds_check_elimination,
16 "Eliminate redundant bounds checks.");
17DEFINE_FLAG(
bool, trace_range_analysis,
false,
"Trace range analysis progress");
19 trace_integer_ir_selection,
21 "Print integer IR selection optimization pass.");
28static void CheckRangeForRepresentation(
const Assert& assert,
29 const Instruction* instr,
35 "During range analysis for:\n %s\n"
36 "expected range containing only %s-representable values, but got %s",
42#define ASSERT_VALID_RANGE_FOR_REPRESENTATION(instr, range, representation) \
44 CheckRangeForRepresentation(dart::Assert(__FILE__, __LINE__), instr, \
45 range, representation); \
48#define ASSERT_VALID_RANGE_FOR_REPRESENTATION(instr, range, representation) \
52 USE(representation); \
61 EliminateRedundantBoundsChecks();
62 MarkUnreachableBlocks();
74 while (defn->IsConstraint()) {
75 defn = defn->AsConstraint()->value()->definition();
80void RangeAnalysis::CollectValues() {
84 for (intptr_t
i = 0;
i < initial.length(); ++
i) {
85 Definition* current = initial[
i];
91 for (intptr_t
i = 0;
i < graph_entry->SuccessorCount(); ++
i) {
92 auto successor = graph_entry->SuccessorAt(
i);
93 if (
auto entry = successor->AsBlockEntryWithInitialDefs()) {
94 const auto& initial = *entry->initial_definitions();
95 for (intptr_t j = 0; j < initial.length(); ++j) {
96 Definition* current = initial[j];
105 !block_it.
Done(); block_it.Advance()) {
106 BlockEntryInstr* block = block_it.Current();
107 JoinEntryInstr*
join = block->AsJoinEntry();
108 if (
join !=
nullptr) {
109 for (PhiIterator phi_it(
join); !phi_it.Done(); phi_it.Advance()) {
110 PhiInstr* current = phi_it.Current();
111 if (current->Type()->IsInt()) {
112 values_.Add(current);
117 for (ForwardInstructionIterator instr_it(block); !instr_it.Done();
118 instr_it.Advance()) {
119 Instruction* current = instr_it.Current();
120 Definition* defn = current->AsDefinition();
121 if (defn !=
nullptr) {
124 if (defn->IsBinaryInt64Op()) {
125 binary_int64_ops_.Add(defn->AsBinaryInt64Op());
126 }
else if (defn->IsShiftInt64Op() ||
127 defn->IsSpeculativeShiftInt64Op()) {
128 shift_int64_ops_.Add(defn->AsShiftIntegerOp());
132 if (
auto check = current->AsCheckBoundBase()) {
133 bounds_checks_.Add(
check);
144Range* RangeAnalysis::ConstraintSmiRange(
Token::Kind op, Definition* boundary) {
169ConstraintInstr* RangeAnalysis::InsertConstraintFor(
Value* use,
171 Range* constraint_range,
172 Instruction* after) {
174 if (defn->IsConstant())
return nullptr;
178 ConstraintInstr* constraint = after->next()->AsConstraint();
179 while (constraint !=
nullptr) {
180 if ((constraint->value()->definition() == defn) &&
181 constraint->constraint()->Equals(constraint_range)) {
184 constraint = constraint->next()->AsConstraint();
187 constraint =
new (
Z) ConstraintInstr(use->CopyWithType(), constraint_range);
191 constraints_.Add(constraint);
195bool RangeAnalysis::ConstrainValueAfterBranch(
Value* use, Definition* defn) {
196 BranchInstr* branch = use->instruction()->AsBranch();
197 RelationalOpInstr* rel_op = branch->comparison()->AsRelationalOp();
198 if ((rel_op !=
nullptr) && (rel_op->operation_cid() == kSmiCid)) {
201 Definition* boundary;
203 if (use->use_index() == 0) {
204 boundary = rel_op->InputAt(1)->definition();
205 op_kind = rel_op->kind();
207 ASSERT(use->use_index() == 1);
208 boundary = rel_op->InputAt(0)->definition();
215 ConstraintInstr* true_constraint =
216 InsertConstraintFor(use, defn, ConstraintSmiRange(op_kind, boundary),
217 branch->true_successor());
218 if (true_constraint !=
nullptr) {
219 true_constraint->set_target(branch->true_successor());
223 ConstraintInstr* false_constraint = InsertConstraintFor(
226 branch->false_successor());
227 if (false_constraint !=
nullptr) {
228 false_constraint->set_target(branch->false_successor());
237void RangeAnalysis::InsertConstraintsFor(Definition* defn) {
238 for (
Value* use = defn->input_use_list(); use !=
nullptr;
239 use = use->next_use()) {
240 if (
auto branch = use->instruction()->AsBranch()) {
241 if (ConstrainValueAfterBranch(use, defn)) {
242 Value* other_value = branch->InputAt(1 - use->use_index());
244 ConstrainValueAfterBranch(other_value, other_value->definition());
247 }
else if (
auto check = use->instruction()->AsCheckBoundBase()) {
248 ConstrainValueAfterCheckBound(use,
check, defn);
253void RangeAnalysis::ConstrainValueAfterCheckBound(
Value* use,
254 CheckBoundBaseInstr*
check,
256 const intptr_t use_index = use->use_index();
258 Range* constraint_range =
nullptr;
260 Definition*
length =
check->length()->definition();
265 Definition* index =
check->index()->definition();
266 constraint_range =
new (
Z)
269 InsertConstraintFor(use, defn, constraint_range,
check);
272void RangeAnalysis::InsertConstraints() {
273 for (intptr_t
i = 0;
i < values_.length();
i++) {
274 InsertConstraintsFor(values_[
i]);
277 for (intptr_t
i = 0;
i < constraints_.length();
i++) {
278 InsertConstraintsFor(constraints_[
i]);
286 if ((range ==
nullptr) && (defn->
Type()->
ToCid() != kSmiCid)) {
307 if ((range ==
nullptr) && !defn->
Type()->
IsInt()) {
314 return &int64_range_;
323 return (
a ==
b) || (
a->AllowsCSE() &&
b->AllowsCSE() &&
a->Equals(*
b));
327 return a.IsSymbol() &&
b.IsSymbol() &&
335 const Range* new_range,
340 if (
min.IsSymbol()) {
341 if (
min.LowerBound().Overflowed(
size)) {
364 const Range* new_range,
369 if (
max.IsSymbol()) {
370 if (
max.UpperBound().Overflowed(
size)) {
396 const Range* new_range,
413 const Range* new_range,
423char RangeAnalysis::OpPrefix(JoinOperator op) {
450bool RangeAnalysis::InferRange(JoinOperator op,
452 intptr_t iteration) {
454 defn->InferRange(
this, &range);
462 }
else if (op == NARROW) {
468 if (!range.Equals(defn->range())) {
471 THR_Print(
"%c [%" Pd "] %s: %s => %s\n", OpPrefix(op), iteration,
476 defn->set_range(range);
484void RangeAnalysis::CollectDefinitions(BitVector*
set) {
486 !block_it.
Done(); block_it.Advance()) {
487 BlockEntryInstr* block = block_it.Current();
489 JoinEntryInstr*
join = block->AsJoinEntry();
490 if (
join !=
nullptr) {
491 for (PhiIterator it(
join); !it.Done(); it.Advance()) {
492 PhiInstr* phi = it.Current();
493 if (
set->Contains(phi->ssa_temp_index())) {
494 definitions_.Add(phi);
499 for (ForwardInstructionIterator it(block); !it.Done(); it.Advance()) {
500 Definition* defn = it.Current()->AsDefinition();
501 if ((defn !=
nullptr) && defn->HasSSATemp() &&
502 set->Contains(defn->ssa_temp_index())) {
503 definitions_.Add(defn);
509void RangeAnalysis::Iterate(JoinOperator op, intptr_t max_iterations) {
511 intptr_t iteration = 0;
515 for (intptr_t
i = 0;
i < definitions_.length();
i++) {
516 Definition* defn = definitions_[
i];
517 if (InferRange(op, defn, iteration)) {
523 }
while (changed && (iteration < max_iterations));
526void RangeAnalysis::InferRanges() {
527 Zone* zone = flow_graph_->
zone();
531 for (intptr_t
i = 0;
i < values_.length();
i++) {
532 set->Add(values_[
i]->ssa_temp_index());
534 for (intptr_t
i = 0;
i < constraints_.length();
i++) {
535 set->Add(constraints_[
i]->ssa_temp_index());
543 for (intptr_t
i = 0;
i < initial.length(); ++
i) {
544 Definition* definition = initial[
i];
545 if (
set->Contains(definition->ssa_temp_index())) {
546 definitions_.Add(definition);
550 for (intptr_t
i = 0;
i < graph_entry->SuccessorCount(); ++
i) {
551 auto successor = graph_entry->SuccessorAt(
i);
552 if (
auto function_entry = successor->AsFunctionEntry()) {
553 const auto& initial = *function_entry->initial_definitions();
554 for (intptr_t j = 0; j < initial.length(); ++j) {
555 Definition* definition = initial[j];
556 if (
set->Contains(definition->ssa_temp_index())) {
557 definitions_.Add(definition);
563 CollectDefinitions(
set);
596 if (!input_defn->
HasSSATemp() || input_defn->IsConstant()) {
617 : flow_graph_(flow_graph),
618 loop_headers_(flow_graph->GetLoopHierarchy().
headers()),
619 pre_headers_(loop_headers_.
length()) {
620 for (intptr_t
i = 0;
i < loop_headers_.length();
i++) {
621 pre_headers_.Add(loop_headers_[
i]->ImmediateDominator());
635 template <
typename T>
637 return static_cast<T*
>(EmitRecursively(instruction, post_dominator));
642 for (intptr_t
i = emitted_.length() - 1;
i >= 0;
i--) {
643 emitted_[
i]->RemoveFromGraph();
664 if (scheduled ==
nullptr) {
668 }
else if (defn->IsConstraint()) {
676 Instruction* emitted = map_.LookupValue(instruction);
677 if (emitted !=
nullptr && !emitted->WasEliminated() &&
684 for (intptr_t
i = loop_headers_.length() - 1;
i >= 0;
i--) {
685 BlockEntryInstr*
header = loop_headers_[
i];
686 BlockEntryInstr* pre_header = pre_headers_[
i];
688 if (pre_header ==
nullptr) {
696 Instruction* last = pre_header->last_instruction();
698 bool inputs_are_invariant =
true;
699 for (intptr_t j = 0; j < instruction->
InputCount(); j++) {
701 if (!last->IsDominatedBy(defn)) {
702 inputs_are_invariant =
false;
707 if (inputs_are_invariant) {
708 EmitTo(pre_header, instruction);
716 void EmitTo(BlockEntryInstr* block, Instruction* instr) {
717 GotoInstr* last = block->last_instruction()->AsGoto();
719 last, instr, last->env(),
721 instr->CopyDeoptIdFrom(*last);
727 FlowGraph* flow_graph_;
728 PointerSet<Instruction> map_;
729 const ZoneGrowableArray<BlockEntryInstr*>& loop_headers_;
730 GrowableArray<BlockEntryInstr*> pre_headers_;
731 GrowableArray<Instruction*> emitted_;
746 : range_analysis_(range_analysis),
747 flow_graph_(flow_graph),
748 scheduler_(flow_graph) {}
752 ConstructUpperBound(
check->index()->definition(),
check);
756 THR_Print(
"Failed to construct upper bound for %s index\n",
765 if (!Simplify(&upper_bound,
nullptr)) {
767 THR_Print(
"Failed to simplify upper bound for %s index\n",
772 upper_bound = ApplyConstraints(upper_bound,
check);
780 if (!FindNonPositiveSymbols(&non_positive_symbols, upper_bound)) {
784 "Failed to generalize %s index to %s"
785 " (can't ensure positivity)\n",
786 check->ToCString(), IndexBoundToCString(upper_bound));
796 non_positive_symbols.
length());
797 Range* positive_range =
800 for (intptr_t
i = 0;
i < non_positive_symbols.
length();
i++) {
802 positive_constraints.
Add(
807 ConstructLowerBound(
check->index()->definition(),
check);
810 lower_bound = ApplyConstraints(lower_bound,
check, &positive_constraints);
819 "Failed to generalize %s index to %s"
820 " (lower bound is not positive)\n",
821 check->ToCString(), IndexBoundToCString(upper_bound));
829 THR_Print(
"For %s computed index bounds [%s, %s]\n",
check->ToCString(),
830 IndexBoundToCString(lower_bound),
831 IndexBoundToCString(upper_bound));
844 for (intptr_t
i = 0;
i < non_positive_symbols.
length();
i++) {
846 new Value(max_smi),
new Value(non_positive_symbols[
i]),
849 precondition = scheduler_.
Emit(precondition,
check);
850 if (precondition ==
nullptr) {
851 if (FLAG_trace_range_analysis) {
852 THR_Print(
" => failed to insert positivity constraint\n");
864 if (FLAG_trace_range_analysis) {
865 THR_Print(
" => generalized check is redundant\n");
871 new_check = scheduler_.
Emit(new_check,
check);
872 if (new_check !=
nullptr) {
873 if (FLAG_trace_range_analysis) {
874 THR_Print(
" => generalized check was hoisted into B%" Pd "\n",
879 if (FLAG_trace_range_analysis) {
880 THR_Print(
" => generalized check can't be hoisted\n");
888 if (binary_op !=
nullptr) {
891 check->ReplaceUsesWith(
check->index()->definition());
892 check->RemoveFromGraph();
903 BinarySmiOpInstr* MakeBinaryOp(
Token::Kind op_kind,
906 ConstantInstr* constant_right =
908 return MakeBinaryOp(op_kind, left, constant_right);
911 Definition* RangeBoundaryToDefinition(
const RangeBoundary& bound) {
913 if (bound.offset() == 0) {
916 return MakeBinaryOp(Token::kADD, symbol, bound.offset());
926 Definition* ConstructLowerBound(Definition*
value, Instruction* point) {
927 return ConstructBound(&BoundsCheckGeneralizer::InductionVariableLowerBound,
932 Definition* ConstructUpperBound(Definition*
value, Instruction* point) {
933 return ConstructBound(&BoundsCheckGeneralizer::InductionVariableUpperBound,
941 LoopInfo* GetSmiBoundedLoop(PhiInstr* phi) {
942 LoopInfo* loop = phi->GetBlock()->loop_info();
943 if (loop ==
nullptr) {
946 ConstraintInstr* limit = loop->limit();
947 if (limit ==
nullptr) {
951 Range* constraining_range = limit->constraint();
952 if (GetSmiInduction(loop, def) !=
nullptr &&
954 constraining_range->max().IsSymbol() &&
955 def->IsDominatedBy(constraining_range->max().symbol())) {
963 static bool IsSmiInvariant(
const InductionVar*
x) {
966 (
x->mult() == 0 ||
x->def()->Type()->ToCid() == kSmiCid);
970 InductionVar* GetSmiInduction(LoopInfo* loop, Definition* def) {
971 if (loop !=
nullptr && def->Type()->ToCid() == kSmiCid) {
972 InductionVar* induc = loop->LookupInduction(def);
975 stride == 1 && IsSmiInvariant(induc->initial())) {
983 Definition* GenerateInvariant(InductionVar* induc) {
984 Definition* res =
nullptr;
985 if (induc->mult() == 0) {
990 if (induc->mult() != 1) {
991 res = MakeBinaryOp(Token::kMUL, res, induc->mult());
993 if (induc->offset() != 0) {
994 res = MakeBinaryOp(Token::kADD, res, induc->offset());
1012 Definition* ConstructBound(PhiBoundFunc phi_bound_func,
1014 Instruction* point) {
1016 if (
value->IsPhi()) {
1017 PhiInstr* phi =
value->AsPhi();
1018 LoopInfo* loop = GetSmiBoundedLoop(phi);
1019 InductionVar* induc = GetSmiInduction(loop, phi);
1020 if (induc !=
nullptr) {
1021 return (this->*phi_bound_func)(phi, loop, induc, point);
1023 }
else if (
value->IsBinarySmiOp()) {
1024 BinarySmiOpInstr* bin_op =
value->AsBinarySmiOp();
1025 if ((bin_op->op_kind() == Token::kADD) ||
1026 (bin_op->op_kind() == Token::kMUL) ||
1027 (bin_op->op_kind() == Token::kSUB)) {
1028 Definition* new_left =
1029 ConstructBound(phi_bound_func, bin_op->left()->definition(), point);
1030 Definition* new_right =
1031 (bin_op->op_kind() != Token::kSUB)
1032 ? ConstructBound(phi_bound_func, bin_op->right()->definition(),
1038 return MakeBinaryOp(bin_op->op_kind(), new_left, new_right);
1045 Definition* InductionVariableUpperBound(PhiInstr* phi,
1047 InductionVar* induc,
1048 Instruction* point) {
1050 ConstraintInstr* limit = loop->limit();
1051 if (!point->IsDominatedBy(limit)) {
1056 if (bounded_def == phi) {
1063 return RangeBoundaryToDefinition(limit->constraint()->max());
1075 InductionVar* bounded_induc = GetSmiInduction(loop, bounded_def);
1076 Definition* x0 = GenerateInvariant(bounded_induc->initial());
1077 Definition* y0 = GenerateInvariant(induc->initial());
1078 Definition*
m = RangeBoundaryToDefinition(limit->constraint()->max());
1079 BinarySmiOpInstr* loop_length =
1080 MakeBinaryOp(Token::kSUB, ConstructUpperBound(
m, point),
1081 ConstructLowerBound(x0, point));
1082 return MakeBinaryOp(Token::kADD, ConstructUpperBound(y0, point),
1087 Definition* InductionVariableLowerBound(PhiInstr* phi,
1089 InductionVar* induc,
1090 Instruction* point) {
1096 return ConstructLowerBound(GenerateInvariant(induc->initial()), point);
1102 bool Simplify(Definition** defn, intptr_t* constant) {
1103 if ((*defn)->IsBinarySmiOp()) {
1104 BinarySmiOpInstr* binary_op = (*defn)->AsBinarySmiOp();
1105 Definition*
left = binary_op->left()->definition();
1106 Definition*
right = binary_op->right()->definition();
1109 if (binary_op->op_kind() == Token::kADD) {
1110 intptr_t left_const = 0;
1111 intptr_t right_const = 0;
1112 if (!Simplify(&left, &left_const) || !Simplify(&right, &right_const)) {
1116 c = left_const + right_const;
1122 if (constant !=
nullptr) {
1126 if ((left ==
nullptr) && (right ==
nullptr)) {
1127 if (constant !=
nullptr) {
1135 if (left ==
nullptr) {
1136 if ((constant !=
nullptr) || (c == 0)) {
1145 if (right ==
nullptr) {
1146 if ((constant !=
nullptr) || (c == 0)) {
1154 }
else if (binary_op->op_kind() == Token::kSUB) {
1155 intptr_t left_const = 0;
1156 intptr_t right_const = 0;
1157 if (!Simplify(&left, &left_const) || !Simplify(&right, &right_const)) {
1161 c = (left_const - right_const);
1167 if (constant !=
nullptr) {
1171 if ((left ==
nullptr) && (
right ==
nullptr)) {
1172 if (constant !=
nullptr) {
1180 if (left ==
nullptr) {
1184 if (right ==
nullptr) {
1185 if ((constant !=
nullptr) || (c == 0)) {
1193 }
else if (binary_op->op_kind() == Token::kMUL) {
1194 if (!Simplify(&left,
nullptr) || !Simplify(&right,
nullptr)) {
1203 ASSERT(right !=
nullptr);
1205 const bool left_changed = (
left != binary_op->left()->definition());
1206 const bool right_changed = (
right != binary_op->right()->definition());
1207 if (left_changed || right_changed) {
1208 if (!(*defn)->HasSSATemp()) {
1209 if (left_changed) binary_op->left()->set_definition(left);
1210 if (right_changed) binary_op->right()->set_definition(right);
1218 if ((c != 0) && (constant ==
nullptr)) {
1219 *defn = MakeBinaryOp(Token::kADD, *defn, c);
1221 }
else if ((*defn)->IsConstant()) {
1222 ConstantInstr* constant_defn = (*defn)->AsConstant();
1223 if ((constant !=
nullptr) && constant_defn->IsSmi()) {
1225 *constant = Smi::Cast(constant_defn->value()).Value();
1234 bool FindNonPositiveSymbols(GrowableArray<Definition*>*
symbols,
1236 if (defn->IsConstant()) {
1237 const Object&
value = defn->AsConstant()->value();
1239 }
else if (defn->HasSSATemp()) {
1244 }
else if (defn->IsBinarySmiOp()) {
1245 BinarySmiOpInstr* binary_op = defn->AsBinarySmiOp();
1246 ASSERT((binary_op->op_kind() == Token::kADD) ||
1247 (binary_op->op_kind() == Token::kSUB) ||
1248 (binary_op->op_kind() == Token::kMUL));
1256 if (binary_op->op_kind() == Token::kSUB) {
1263 return FindNonPositiveSymbols(
symbols, binary_op->left()->definition()) &&
1264 FindNonPositiveSymbols(
symbols, binary_op->right()->definition());
1272 static Definition* FindInnermostConstraint(Definition* defn,
1273 Instruction* post_dominator) {
1274 for (
Value* use = defn->input_use_list(); use !=
nullptr;
1275 use = use->next_use()) {
1276 ConstraintInstr* constraint = use->instruction()->AsConstraint();
1277 if ((constraint !=
nullptr) &&
1278 post_dominator->IsDominatedBy(constraint)) {
1279 return FindInnermostConstraint(constraint, post_dominator);
1290 static Definition* ApplyConstraints(
1292 Instruction* post_dominator,
1293 GrowableArray<ConstraintInstr*>* constraints =
nullptr) {
1294 if (defn->HasSSATemp()) {
1295 defn = FindInnermostConstraint(defn, post_dominator);
1296 if (constraints !=
nullptr) {
1297 for (intptr_t
i = 0;
i < constraints->length();
i++) {
1298 ConstraintInstr* constraint = (*constraints)[
i];
1299 if (constraint->value()->definition() == defn) {
1307 for (intptr_t
i = 0;
i < defn->InputCount();
i++) {
1308 defn->InputAt(
i)->set_definition(ApplyConstraints(
1309 defn->InputAt(
i)->definition(), post_dominator, constraints));
1316 static void PrettyPrintIndexBoundRecursively(BaseTextBuffer*
f,
1317 Definition* index_bound) {
1318 BinarySmiOpInstr* binary_op = index_bound->AsBinarySmiOp();
1319 if (binary_op !=
nullptr) {
1321 PrettyPrintIndexBoundRecursively(
f, binary_op->left()->definition());
1322 f->Printf(
" %s ",
Token::Str(binary_op->op_kind()));
1323 PrettyPrintIndexBoundRecursively(
f, binary_op->right()->definition());
1325 }
else if (index_bound->IsConstant()) {
1326 f->Printf(
"%" Pd "",
1327 Smi::Cast(index_bound->AsConstant()->value()).Value());
1329 f->Printf(
"v%" Pd "", index_bound->ssa_temp_index());
1334 static const char* IndexBoundToCString(Definition* index_bound) {
1337 PrettyPrintIndexBoundRecursively(&
f, index_bound);
1342 RangeAnalysis* range_analysis_;
1343 FlowGraph* flow_graph_;
1344 Scheduler scheduler_;
1347void RangeAnalysis::EliminateRedundantBoundsChecks() {
1348 if (FLAG_array_bounds_check_elimination) {
1353 const bool try_generalization =
1355 !
function.ProhibitsBoundsCheckGeneralization();
1356 BoundsCheckGeneralizer generalizer(
this, flow_graph_);
1357 for (CheckBoundBaseInstr*
check : bounds_checks_) {
1358 if (
check->IsRedundant(
true)) {
1359 check->ReplaceUsesWith(
check->index()->definition());
1360 check->RemoveFromGraph();
1361 }
else if (try_generalization) {
1362 if (
auto jit_check =
check->AsCheckArrayBound()) {
1363 generalizer.TryGeneralize(jit_check);
1370void RangeAnalysis::MarkUnreachableBlocks() {
1371 for (intptr_t
i = 0;
i < constraints_.length();
i++) {
1373 TargetEntryInstr*
target = constraints_[
i]->target();
1380 BranchInstr* branch =
1381 target->PredecessorAt(0)->last_instruction()->AsBranch();
1382 if (
target == branch->true_successor()) {
1384 if (FLAG_trace_constant_propagation && flow_graph_->
should_print()) {
1385 THR_Print(
"Range analysis: True unreachable (B%" Pd ")\n",
1386 branch->true_successor()->block_id());
1388 branch->set_constant_target(branch->false_successor());
1392 if (FLAG_trace_constant_propagation && flow_graph_->
should_print()) {
1393 THR_Print(
"Range analysis: False unreachable (B%" Pd ")\n",
1394 branch->false_successor()->block_id());
1396 branch->set_constant_target(branch->true_successor());
1402void RangeAnalysis::RemoveConstraints() {
1403 for (intptr_t
i = 0;
i < constraints_.length();
i++) {
1404 Definition* def = constraints_[
i]->value()->definition();
1407 while (def->IsConstraint()) {
1408 def = def->AsConstraint()->value()->definition();
1410 constraints_[
i]->ReplaceUsesWith(def);
1411 constraints_[
i]->RemoveFromGraph();
1422 int64_op->
right())) {
1439 int64_op->
right())) {
1449void RangeAnalysis::NarrowMintToInt32() {
1450 for (intptr_t
i = 0;
i < binary_int64_ops_.length();
i++) {
1454 for (intptr_t
i = 0;
i < shift_int64_ops_.length();
i++) {
1460 : flow_graph_(flow_graph) {
1461 ASSERT(flow_graph_ !=
nullptr);
1462 zone_ = flow_graph_->
zone();
1463 selected_uint32_defs_ =
1468 if (FLAG_trace_integer_ir_selection) {
1469 THR_Print(
"---- starting integer ir selection -------\n");
1471 FindPotentialUint32Definitions();
1472 FindUint32NarrowingDefinitions();
1474 ReplaceInstructions();
1476 THR_Print(
"---- after integer ir selection -------\n");
1482bool IntegerInstructionSelector::IsPotentialUint32Definition(
Definition* def) {
1486 return def->IsBoxInt64() || def->IsUnboxInt64() || def->IsShiftInt64Op() ||
1487 def->IsSpeculativeShiftInt64Op() ||
1489 def->AsBinaryInt64Op()->op_kind())) ||
1490 (def->IsUnaryInt64Op() &&
1494void IntegerInstructionSelector::FindPotentialUint32Definitions() {
1495 if (FLAG_trace_integer_ir_selection) {
1496 THR_Print(
"++++ Finding potential Uint32 definitions:\n");
1500 !block_it.
Done(); block_it.Advance()) {
1501 BlockEntryInstr* block = block_it.Current();
1503 for (ForwardInstructionIterator instr_it(block); !instr_it.Done();
1504 instr_it.Advance()) {
1505 Instruction* current = instr_it.Current();
1506 Definition* defn = current->AsDefinition();
1507 if ((defn !=
nullptr) && defn->HasSSATemp()) {
1508 if (IsPotentialUint32Definition(defn)) {
1510 THR_Print(
"Adding %s\n", current->ToCString());
1512 potential_uint32_defs_.Add(defn);
1521bool IntegerInstructionSelector::IsUint32NarrowingDefinition(Definition* def) {
1522 if (def->IsBinaryInt64Op()) {
1523 BinaryInt64OpInstr* op = def->AsBinaryInt64Op();
1525 if (op->op_kind() != Token::kBIT_AND) {
1528 Range* range = op->range();
1529 if ((range ==
nullptr) ||
1530 !range->IsWithin(0,
static_cast<int64_t
>(
kMaxUint32))) {
1539void IntegerInstructionSelector::FindUint32NarrowingDefinitions() {
1540 ASSERT(selected_uint32_defs_ !=
nullptr);
1541 if (FLAG_trace_integer_ir_selection) {
1542 THR_Print(
"++++ Selecting Uint32 definitions:\n");
1545 for (intptr_t
i = 0;
i < potential_uint32_defs_.length();
i++) {
1546 Definition* defn = potential_uint32_defs_[
i];
1547 if (IsUint32NarrowingDefinition(defn)) {
1549 THR_Print(
"Adding %s\n", defn->ToCString());
1551 selected_uint32_defs_->
Add(defn->ssa_temp_index());
1556bool IntegerInstructionSelector::AllUsesAreUint32Narrowing(
Value* list_head) {
1557 for (Value::Iterator it(list_head); !it.Done(); it.Advance()) {
1558 Value* use = it.Current();
1559 Definition* defn = use->instruction()->AsDefinition();
1560 if ((defn ==
nullptr) || !defn->HasSSATemp() ||
1561 !selected_uint32_defs_->
Contains(defn->ssa_temp_index())) {
1566 if (ShiftIntegerOpInstr* shift = defn->AsShiftIntegerOp()) {
1567 if (use == shift->right()) {
1575bool IntegerInstructionSelector::CanBecomeUint32(Definition* def) {
1576 ASSERT(IsPotentialUint32Definition(def));
1577 if (def->IsBoxInt64()) {
1579 Definition* box_input = def->AsBoxInt64()->value()->definition();
1580 return selected_uint32_defs_->
Contains(box_input->ssa_temp_index());
1584 if (def->IsShiftInt64Op() || def->IsSpeculativeShiftInt64Op()) {
1585 ShiftIntegerOpInstr* op = def->AsShiftIntegerOp();
1586 if ((op->op_kind() == Token::kSHR) || (op->op_kind() == Token::kUSHR)) {
1587 Definition* shift_input = op->left()->definition();
1588 ASSERT(shift_input !=
nullptr);
1589 Range* range = shift_input->range();
1590 if ((range ==
nullptr) ||
1591 !range->IsWithin(0,
static_cast<int64_t
>(
kMaxUint32))) {
1596 if (!def->HasUses()) {
1600 return AllUsesAreUint32Narrowing(def->input_use_list()) &&
1601 AllUsesAreUint32Narrowing(def->env_use_list());
1604void IntegerInstructionSelector::Propagate() {
1605 ASSERT(selected_uint32_defs_ !=
nullptr);
1606 bool changed =
true;
1607 intptr_t iteration = 0;
1609 if (FLAG_trace_integer_ir_selection) {
1610 THR_Print(
"+++ Iteration: %" Pd "\n", iteration++);
1613 for (intptr_t
i = 0;
i < potential_uint32_defs_.length();
i++) {
1614 Definition* defn = potential_uint32_defs_[
i];
1615 if (selected_uint32_defs_->
Contains(defn->ssa_temp_index())) {
1619 if (defn->IsConstant()) {
1623 if (CanBecomeUint32(defn)) {
1625 THR_Print(
"Adding %s\n", defn->ToCString());
1628 selected_uint32_defs_->
Add(defn->ssa_temp_index());
1634 if (FLAG_trace_integer_ir_selection) {
1639Definition* IntegerInstructionSelector::ConstructReplacementFor(
1642 ASSERT(IsPotentialUint32Definition(def));
1644 ASSERT(!def->IsConstant());
1645 if (def->IsBinaryIntegerOp()) {
1646 BinaryIntegerOpInstr* op = def->AsBinaryIntegerOp();
1648 Value*
left = op->left()->CopyWithType();
1650 intptr_t deopt_id = op->DeoptimizationTarget();
1652 kUnboxedUint32, op_kind, left, right, deopt_id,
1655 }
else if (def->IsBoxInt64()) {
1656 Value*
value = def->AsBoxInt64()->value()->CopyWithType();
1657 return new (
Z) BoxUint32Instr(
value);
1658 }
else if (def->IsUnboxInt64()) {
1659 UnboxInstr* unbox = def->AsUnboxInt64();
1660 Value*
value = unbox->value()->CopyWithType();
1661 intptr_t deopt_id = unbox->DeoptimizationTarget();
1663 UnboxUint32Instr(
value, deopt_id, def->SpeculativeModeOfInputs());
1664 }
else if (def->IsUnaryInt64Op()) {
1665 UnaryInt64OpInstr* op = def->AsUnaryInt64Op();
1668 intptr_t deopt_id = op->DeoptimizationTarget();
1669 return new (
Z) UnaryUint32OpInstr(op_kind,
value, deopt_id);
1675void IntegerInstructionSelector::ReplaceInstructions() {
1676 if (FLAG_trace_integer_ir_selection) {
1677 THR_Print(
"++++ Replacing instructions:\n");
1679 for (intptr_t
i = 0;
i < potential_uint32_defs_.length();
i++) {
1680 Definition* defn = potential_uint32_defs_[
i];
1681 if (!selected_uint32_defs_->
Contains(defn->ssa_temp_index())) {
1685 Definition* replacement = ConstructReplacementFor(defn);
1686 ASSERT(replacement !=
nullptr);
1688 if (defn->range()->IsPositive()) {
1689 replacement->set_range(*defn->range());
1696 THR_Print(
"Replacing %s with %s\n", defn->ToCString(),
1697 replacement->ToCString());
1699 defn->ReplaceWith(replacement,
nullptr);
1704 if (defn->IsConstant() && defn->AsConstant()->IsSmi()) {
1705 return FromConstant(Smi::Cast(defn->AsConstant()->value()).Value() + offs);
1708 return RangeBoundary(kSymbol,
reinterpret_cast<intptr_t
>(defn), offs);
1726 ASSERT(
a.IsConstant() &&
b.IsConstant());
1732 ASSERT(
a.IsConstant() &&
b.IsConstant());
1734 const int64_t
result =
a.ConstantValue() +
b.ConstantValue();
1740 ASSERT(
a.IsConstant() &&
b.IsConstant());
1746 ASSERT(
a.IsConstant() &&
b.IsConstant());
1748 const int64_t
result =
a.ConstantValue() -
b.ConstantValue();
1755 if (
a.IsSymbol() &&
b.IsConstant()) {
1760 const int64_t
offset =
a.offset() +
b.ConstantValue();
1768 }
else if (
b.IsSymbol() &&
a.IsConstant()) {
1777 if (
a.IsSymbol() &&
b.IsConstant()) {
1782 const int64_t
offset =
a.offset() -
b.ConstantValue();
1806 int64_t shift_count) {
1808 ASSERT(shift_count >= 0);
1818 int64_t shift_count) {
1821 ASSERT(shift_count >= 0);
1828 const int64_t
result =
static_cast<uint64_t
>(
value) << shift_count;
1834 int64_t shift_count) {
1836 ASSERT(shift_count >= 0);
1838 const int64_t
result = (shift_count <= 63)
1839 ? (
value >> shift_count)
1840 : (
value >= 0 ? 0 : -1);
1846 if (
a.IsConstant()) {
1856 if (symbol->IsConstraint()) {
1857 symbol = symbol->AsConstraint()->value()->definition();
1859 }
else if (symbol->IsBinarySmiOp()) {
1865 if (right->IsConstant()) {
1866 int64_t rhs = Smi::Cast(right->AsConstant()->value()).
Value();
1873 }
else if (left->IsConstant()) {
1874 int64_t rhs = Smi::Cast(left->AsConstant()->value()).Value();
1885 if (right->IsConstant()) {
1886 int64_t rhs = Smi::Cast(right->AsConstant()->value()).Value();
1910 if (!
a->IsSymbol())
return false;
1912 Range* range =
a->symbol()->range();
1913 if ((range ==
nullptr) || !range->
max().
IsSymbol())
return false;
1935 if (!
a->IsSymbol())
return false;
1937 Range* range =
a->symbol()->range();
1938 if ((range ==
nullptr) || !range->
min().
IsSymbol())
return false;
1965 if (!
a->IsSymbol() || !
b->IsSymbol()) {
1978 }
while (op(&canonical_a) || op(&canonical_b));
1992 return (
a.offset() <=
b.offset()) ?
a :
b;
1995 const int64_t inf_a =
a.LowerBound(
size);
1996 const int64_t inf_b =
b.LowerBound(
size);
1997 const int64_t sup_a =
a.UpperBound(
size);
1998 const int64_t sup_b =
b.UpperBound(
size);
2000 if ((sup_a <= inf_b) && !
a.LowerBound().Overflowed(
size)) {
2002 }
else if ((sup_b <= inf_a) && !
b.LowerBound().Overflowed(
size)) {
2019 return (
a.offset() >=
b.offset()) ?
a :
b;
2022 const int64_t inf_a =
a.LowerBound(
size);
2023 const int64_t inf_b =
b.LowerBound(
size);
2024 const int64_t sup_a =
a.UpperBound(
size);
2025 const int64_t sup_b =
b.UpperBound(
size);
2027 if ((sup_a <= inf_b) && !
b.UpperBound().Overflowed(
size)) {
2029 }
else if ((sup_b <= inf_a) && !
a.UpperBound().Overflowed(
size)) {
2037 ASSERT(!
a.IsUnknown() && !
b.IsUnknown());
2043 if (
a.IsConstant() &&
b.IsConstant()) {
2056 return (
a.offset() >=
b.offset()) ?
a :
b;
2059 const int64_t inf_a =
a.SmiLowerBound();
2060 const int64_t inf_b =
b.SmiLowerBound();
2062 return (inf_a >= inf_b) ?
a :
b;
2066 ASSERT(!
a.IsUnknown() && !
b.IsUnknown());
2072 if (
a.IsConstant() &&
b.IsConstant()) {
2085 return (
a.offset() <=
b.offset()) ?
a :
b;
2088 const int64_t sup_a =
a.SmiUpperBound();
2089 const int64_t sup_b =
b.SmiUpperBound();
2091 return (sup_a <= sup_b) ?
a :
b;
2109 case kUnboxedUint16:
2112 case kUnboxedUint32:
2158 const int64_t this_min =
lower.ConstantValue();
2160 if ((this_min <= min_int) && (min_int <= this_max))
return true;
2161 if ((this_min <= max_int) && (max_int <= this_max))
return true;
2162 if ((min_int < this_min) && (max_int > this_max))
return true;
2191 ASSERT(right !=
nullptr);
2192 ASSERT(result_min !=
nullptr);
2193 ASSERT(result_max !=
nullptr);
2199 static_cast<int64_t
>(0));
2201 static_cast<int64_t
>(0));
2202 bool overflow =
false;
2204 const auto shift_amount =
2213 const auto shift_amount =
2238 static_cast<int64_t
>(0));
2240 static_cast<int64_t
>(0));
2243 left_min, left_min.
ConstantValue() > 0 ? right_max : right_min);
2246 left_max, left_max.
ConstantValue() > 0 ? right_min : right_max);
2254 if ((
a < 0) && (
b >= 0)) {
2261 *ua =
static_cast<uint64_t
>(
a);
2262 *ub =
static_cast<uint64_t
>(
b);
2271 if ((
a <=
static_cast<uint64_t
>(
kMaxInt64)) &&
2279 *sa =
static_cast<int64_t
>(
a);
2280 *sb =
static_cast<int64_t
>(
b);
2297 uint64_t unsigned_left_min, unsigned_left_max;
2299 &unsigned_left_max);
2301 const uint64_t unsigned_result_min =
2304 : unsigned_left_min >>
static_cast<uint64_t
>(right_max);
2305 const uint64_t unsigned_result_max =
2308 : unsigned_left_max >>
static_cast<uint64_t
>(right_min);
2310 int64_t signed_result_min, signed_result_max;
2312 &signed_result_min, &signed_result_max);
2319 const Range* right_range,
2322 ASSERT(left_range !=
nullptr);
2323 ASSERT(right_range !=
nullptr);
2324 ASSERT(result_min !=
nullptr);
2325 ASSERT(result_max !=
nullptr);
2339 BitwiseOp(left_range, right_range, result_min, result_max);
2349 const Range* right_range,
2366 const Range* right_range,
2370 ASSERT(left_range !=
nullptr);
2371 ASSERT(right_range !=
nullptr);
2372 ASSERT(result_min !=
nullptr);
2373 ASSERT(result_max !=
nullptr);
2377 : left_range->
min();
2381 : left_range->
max();
2383 bool overflow =
false;
2386 const auto right_min_bound = right_range->
min().
LowerBound();
2395 const auto right_max_bound = right_range->
max().
UpperBound();
2411 const Range* right_range,
2415 ASSERT(left_range !=
nullptr);
2416 ASSERT(right_range !=
nullptr);
2417 ASSERT(result_min !=
nullptr);
2418 ASSERT(result_max !=
nullptr);
2422 : left_range->
min();
2426 : left_range->
max();
2428 bool overflow =
false;
2431 const auto right_max_bound = right_range->
max().
UpperBound();
2440 const auto right_min_bound = right_range->
min().
LowerBound();
2456 const Range* right_range,
2459 ASSERT(left_range !=
nullptr);
2460 ASSERT(right_range !=
nullptr);
2461 ASSERT(result_min !=
nullptr);
2462 ASSERT(result_max !=
nullptr);
2468 ((left_max == 0) || (right_max <=
kMaxInt64 / left_max))) {
2470 const int64_t mul_max = left_max * right_max;
2476 const int64_t mul_min =
2495 const Range* right_range,
2498 ASSERT(left_range !=
nullptr);
2499 ASSERT(right_range !=
nullptr);
2500 ASSERT(result_min !=
nullptr);
2501 ASSERT(result_max !=
nullptr);
2522 ASSERT(right_range !=
nullptr);
2523 ASSERT(result_min !=
nullptr);
2524 ASSERT(result_max !=
nullptr);
2529 const int64_t kModMin = 0;
2541 return a.OnlyGreaterThanOrEqualTo(0) &&
b.OnlyGreaterThanOrEqualTo(0);
2546 return a.OnlyLessThanOrEqualTo(0) &&
b.OnlyLessThanOrEqualTo(0);
2551 if (range ==
nullptr) {
2554 const int64_t abs_min =
2556 const int64_t abs_max =
2563 if (range ==
nullptr) {
2566 const int64_t abs_min =
2568 const int64_t abs_max =
2574 const Range* left_range,
2575 const Range* right_range,
2578 ASSERT(left_range !=
nullptr);
2579 ASSERT(right_range !=
nullptr);
2598 case Token::kTRUNCDIV:
2618 case Token::kBIT_AND:
2622 case Token::kBIT_XOR:
2623 case Token::kBIT_OR:
2651 if (
Type()->ToCid() == kSmiCid) {
2657 }
else if (
Type()->IsInt()) {
2684 const Range* defn_range,
2691 *range = *defn_range;
2695 Range other = *defn_range;
2717 if (sup_range <= inf_other) {
2721 }
else if (sup_other <= inf_range) {
2735 return a->Dominates(phi_block) && (
a != phi_block);
2746 if (!
a.IsSymbol() ||
DominatesPhi(
a.symbol()->GetBlock(), phi_block)) {
2752 if ((unwrapped !=
a.symbol()) &&
2794 if (value_.IsSmi()) {
2795 int64_t
value = Smi::Cast(value_).Value();
2798 }
else if (value_.IsMint()) {
2799 int64_t
value = Mint::Cast(value_).value();
2804 FATAL(
"Unexpected constant: %s\n", value_.ToCString());
2821 if (
result.IsUnsatisfiable()) {
2829 switch (
slot().kind()) {
2830 case Slot::Kind::kArray_length:
2831 case Slot::Kind::kGrowableObjectArray_length:
2837 case Slot::Kind::kTypedDataBase_length:
2838 case Slot::Kind::kTypedDataView_offset_in_bytes:
2842 case Slot::Kind::kAbstractType_hash:
2843 case Slot::Kind::kTypeArguments_hash:
2847 case Slot::Kind::kTypeArguments_length:
2853 case Slot::Kind::kRecord_shape:
2857 case Slot::Kind::kString_length:
2872#define NATIVE_SLOT_CASE(ClassName, __, FieldName, ___, ____) \
2873 case Slot::Kind::k##ClassName##_##FieldName:
2875#undef NATIVE_SLOT_CASE
2885#define UNBOXED_NATIVE_SLOT_CASE(Class, __, Field, ___, ____) \
2886 case Slot::Kind::k##Class##_##Field:
2888#undef UNBOXED_NATIVE_SLOT_CASE
2892 case Slot::Kind::kClosure_hash:
2893 case Slot::Kind::kLinkedHashBase_hash_mask:
2894 case Slot::Kind::kLinkedHashBase_used_data:
2895 case Slot::Kind::kLinkedHashBase_deleted_keys:
2899 case Slot::Kind::kArgumentsDescriptor_type_args_len:
2900 case Slot::Kind::kArgumentsDescriptor_positional_count:
2901 case Slot::Kind::kArgumentsDescriptor_count:
2902 case Slot::Kind::kArgumentsDescriptor_size:
2936 if (hi !=
nullptr) {
2938 if (
type.IsType() && !
type.IsFutureOrType() &&
2941 if (!type_class.has_dynamically_extendable_subtypes()) {
2942 const auto& ranges =
2945 if (ranges.length() > 0) {
2946 *
lower = ranges[0].cid_start;
2947 *upper = ranges[ranges.length() - 1].cid_end;
2967 ASSERT(element_count_ > 0);
2969 case kOneByteStringCid:
2970 ASSERT(element_count_ <= 4);
2974 case kTwoByteStringCid:
2975 ASSERT(element_count_ <= 2);
2998void BinaryIntegerOpInstr::InferRangeHelper(
const Range* left_range,
2999 const Range* right_range,
3026 if (range !=
nullptr) {
3027 if (*slot ==
nullptr) {
3028 *slot =
new Range();
3034 }
else if (*slot !=
nullptr) {
3040 auto const left_size =
3042 auto const right_size =
3053 op_kind() == Token::kTRUNCDIV) {
3066 InferRangeHelper(
left()->definition()->
range(), right_range,
range);
3076 *
range = *value_range;
3088 }
else if (value_range->IsWithin(&to_range)) {
3089 *
range = *value_range;
3110 if (
from() == kUntagged) {
3111 ASSERT(value_range ==
nullptr);
3123 *
range = *value_range;
3130 if (value_range->
IsWithin(&common_range)) {
3131 *
range = *value_range;
3149 *
range = *value_range;
3158 const auto length_val =
length->BoundSmiConstant();
3159 return (0 <= index_val && index_val < length_val);
3165 if (index_range ==
nullptr) {
3175 index_range = index_defn->
range();
3207 return max.offset() < canonical_length.
offset();
3220 }
else if (!use_loops) {
3225 if (loop !=
nullptr) {
#define check(reporter, ref, unref, make, kill)
virtual void InferRange(RangeAnalysis *analysis, Range *range)
static bool IsSupported(Token::Kind op_kind, Value *left, Value *right)
void set_can_overflow(bool overflow)
virtual void InferRange(RangeAnalysis *analysis, Range *range)
Token::Kind op_kind() const
static BinaryIntegerOpInstr * Make(Representation representation, Token::Kind op_kind, Value *left, Value *right, intptr_t deopt_id, SpeculativeMode speculative_mode=kGuardInputs)
bool is_truncating() const
virtual intptr_t DeoptimizationTarget() const
virtual void InferRange(RangeAnalysis *analysis, Range *range)
static bool IsSupported(Token::Kind op_kind)
bool Contains(intptr_t i) const
intptr_t block_id() const
LoopInfo * loop_info() const
GrowableArray< Definition * > * initial_definitions()
static void RemoveGeneralizedCheck(CheckArrayBoundInstr *check)
BoundsCheckGeneralizer(RangeAnalysis *range_analysis, FlowGraph *flow_graph)
void TryGeneralize(CheckArrayBoundInstr *check)
Representation from_representation() const
virtual void InferRange(RangeAnalysis *analysis, Range *range)
bool IsRedundant(bool use_loops=false)
const AbstractType * ToAbstractType()
static CompilerState & Current()
const Object & value() const
virtual void InferRange(RangeAnalysis *analysis, Range *range)
virtual void InferRange(RangeAnalysis *analysis, Range *range)
Range * constraint() const
static bool IsArrayLength(Definition *def)
void ReplaceWith(Definition *other, ForwardInstructionIterator *iterator)
void set_range(const Range &)
bool HasOnlyInputUse(Value *use) const
virtual void InferRange(RangeAnalysis *analysis, Range *range)
Definition * OriginalDefinition()
static constexpr intptr_t kNone
GraphEntryInstr * graph_entry() const
ConstantInstr * GetConstant(const Object &object, Representation representation=kTagged)
bool should_print() const
intptr_t current_ssa_temp_index() const
const LoopHierarchy & GetLoopHierarchy()
const Function & function() const
BlockIterator reverse_postorder_iterator() const
void InsertBefore(Instruction *next, Instruction *instr, Environment *env, UseKind use_kind)
static void RenameDominatedUses(Definition *def, Instruction *dom, Definition *other)
void InsertAfter(Instruction *prev, Instruction *instr, Environment *env, UseKind use_kind)
const CidRangeVector & SubtypeRangesForClass(const Class &klass, bool include_abstract, bool exclude_null)
virtual void InferRange(RangeAnalysis *analysis, Range *range)
static bool IsInvariant(const InductionVar *x)
static bool IsLinear(const InductionVar *x)
static bool NullIsAssignableTo(const AbstractType &other)
virtual intptr_t InputCount() const =0
virtual Value * InputAt(intptr_t i) const =0
virtual BlockEntryInstr * GetBlock()
const char * ToCString() const
virtual Representation RequiredInputRepresentation(intptr_t idx) const
bool IsDominatedBy(Instruction *dom)
virtual Representation representation() const
bool is_truncating() const
virtual void InferRange(RangeAnalysis *analysis, Range *range)
Representation to() const
Representation from() const
IntegerInstructionSelector(FlowGraph *flow_graph)
bool has_dynamically_extendable_classes() const
static IsolateGroup * Current()
ClassTable * class_table() const
void InferRange(uword *lower, uword *upper)
virtual void InferRange(RangeAnalysis *analysis, Range *range)
intptr_t class_id() const
virtual void InferRange(RangeAnalysis *analysis, Range *range)
const Slot & slot() const
virtual Representation representation() const
intptr_t class_id() const
virtual void InferRange(RangeAnalysis *analysis, Range *range)
void ComputeInduction() const
bool IsInRange(Instruction *pos, Value *index, Value *length)
static Object & ZoneHandle()
virtual void InferRange(RangeAnalysis *analysis, Range *range)
virtual BlockEntryInstr * GetBlock()
void AssignRangesRecursively(Definition *defn)
const Range * GetIntRange(Value *value) const
static bool IsIntegerDefinition(Definition *defn)
const Range * GetSmiRange(Value *value) const
bool Equals(const RangeBoundary &other) const
bool OverflowedSmi() const
static bool WillSubOverflow(const RangeBoundary &a, const RangeBoundary &b)
static RangeBoundary FromDefinition(Definition *defn, int64_t offs=0)
static RangeBoundary MaxSmi()
RangeBoundary Clamp(RangeSize size) const
static bool SymbolicAdd(const RangeBoundary &a, const RangeBoundary &b, RangeBoundary *result)
static bool IsValidOffsetForSymbolicRangeBoundary(int64_t offset)
bool IsMaximumOrAbove(RangeSize size) const
static bool WillAddOverflow(const RangeBoundary &a, const RangeBoundary &b)
static RangeBoundary MinConstant(RangeSize size)
static RangeBoundary Add(const RangeBoundary &a, const RangeBoundary &b)
RangeBoundary UpperBound() const
static RangeBoundary JoinMax(RangeBoundary a, RangeBoundary b, RangeBoundary::RangeSize size)
RangeBoundary LowerBound() const
static bool SymbolicSub(const RangeBoundary &a, const RangeBoundary &b, RangeBoundary *result)
static RangeBoundary IntersectionMin(RangeBoundary a, RangeBoundary b)
static RangeBoundary MinSmi()
static RangeBoundary Shr(const RangeBoundary &value_boundary, int64_t shift_count)
static RangeBoundary IntersectionMax(RangeBoundary a, RangeBoundary b)
int64_t ConstantValue() const
static RangeBoundary JoinMin(RangeBoundary a, RangeBoundary b, RangeBoundary::RangeSize size)
static constexpr int64_t kMax
static RangeBoundary Sub(const RangeBoundary &a, const RangeBoundary &b)
static RangeBoundary Shl(const RangeBoundary &value_boundary, int64_t shift_count)
static RangeBoundary MaxConstant(RangeSize size)
static bool WillShlOverflow(const RangeBoundary &a, int64_t shift_count)
bool IsMinimumOrBelow(RangeSize size) const
static RangeBoundary FromConstant(int64_t val)
Definition * symbol() const
static bool Fits(Range *range, RangeBoundary::RangeSize size)
static bool IsWithin(const Range *range, int64_t min, int64_t max)
static bool IsPositive(Range *range)
static RangeBoundary ConstantMinSmi(const Range *range)
static int64_t ConstantAbsMax(const Range *range)
static bool OnlyNegativeOrZero(const Range &a, const Range &b)
static void BitwiseOp(const Range *left_range, const Range *right_range, RangeBoundary *min, RangeBoundary *max)
static const char * ToCString(const Range *range)
void set_max(const RangeBoundary &value)
static void Shr(const Range *left_range, const Range *right_range, RangeBoundary *min, RangeBoundary *max)
static RangeBoundary ConstantMaxSmi(const Range *range)
bool IsWithin(int64_t min_int, int64_t max_int) const
Range Intersect(const Range *other) const
void ClampToConstant(RangeBoundary::RangeSize size)
static void Mul(const Range *left_range, const Range *right_range, RangeBoundary *min, RangeBoundary *max)
static bool OnlyPositiveOrZero(const Range &a, const Range &b)
static bool IsUnknown(const Range *other)
static void TruncDiv(const Range *left_range, const Range *right_range, RangeBoundary *min, RangeBoundary *max)
static void Ushr(const Range *left_range, const Range *right_range, RangeBoundary *min, RangeBoundary *max)
const RangeBoundary & min() const
bool IsUnsatisfiable() const
bool Fits(RangeBoundary::RangeSize size) const
static void BinaryOp(const Token::Kind op, const Range *left_range, const Range *right_range, Definition *left_defn, Range *result)
const RangeBoundary & max() const
static void Add(const Range *left_range, const Range *right_range, RangeBoundary *min, RangeBoundary *max, Definition *left_defn)
static void And(const Range *left_range, const Range *right_range, RangeBoundary *min, RangeBoundary *max)
static Range Full(RangeBoundary::RangeSize size)
static void Sub(const Range *left_range, const Range *right_range, RangeBoundary *min, RangeBoundary *max, Definition *left_defn)
void Clamp(RangeBoundary::RangeSize size)
static void Shl(const Range *left_range, const Range *right_range, RangeBoundary *min, RangeBoundary *max)
bool OnlyGreaterThanOrEqualTo(int64_t val) const
static RangeBoundary ConstantMin(const Range *range)
void set_min(const RangeBoundary &value)
static void Mod(const Range *right_range, RangeBoundary *min, RangeBoundary *max)
bool OnlyLessThanOrEqualTo(int64_t val) const
static RangeBoundary ConstantMax(const Range *range)
bool Overlaps(int64_t min_int, int64_t max_int) const
static int64_t ConstantAbsMin(const Range *range)
T * Emit(T *instruction, Instruction *post_dominator)
Scheduler(FlowGraph *flow_graph)
virtual void InferRange(RangeAnalysis *analysis, Range *range)
static SmiPtr New(intptr_t value)
static bool IsValid(int64_t value)
HierarchyInfo * hierarchy_info() const
static Thread * Current()
static Token::Kind FlipComparison(Token::Kind op)
static Token::Kind NegateComparison(Token::Kind op)
static const char * Str(Kind tok)
static bool IsSupported(Token::Kind op_kind)
virtual Representation representation() const
bool is_truncating() const
virtual void InferRange(RangeAnalysis *analysis, Range *range)
virtual void InferRange(RangeAnalysis *analysis, Range *range)
static T AbsWithSaturation(T x)
static bool IsInt(intptr_t N, T value)
static constexpr T Maximum(T x, T y)
static constexpr T NBitMask(size_t n)
static bool WillSubOverflow(int64_t a, int64_t b)
static T Minimum(T x, T y)
static constexpr size_t BitLength(int64_t value)
static bool WillAddOverflow(int64_t a, int64_t b)
intptr_t BoundSmiConstant() const
bool BindsToSmiConstant() const
void set_definition(Definition *definition)
Value * CopyWithType(Zone *zone)
Definition * definition() const
intptr_t InputCount() const
Value * InputAt(intptr_t i) const
char * MakeCopyOfString(const char *str)
static const word kMaxElements
static const word kMaxElements
static const word kMaxElements
#define THR_Print(format,...)
constexpr bool FLAG_support_il_printer
Dart_NativeFunction function
static float max(float r, float g, float b)
static float min(float r, float g, float b)
static void ConvertRangeToSigned(uint64_t a, uint64_t b, int64_t *sa, int64_t *sb)
constexpr int64_t kMaxInt64
constexpr int64_t kMinInt64
static void NarrowBinaryInt64Op(BinaryInt64OpInstr *int64_op)
static bool IsRedundantBasedOnRangeInformation(Value *index, Value *length)
static RangeBoundary NarrowMin(const Range *range, const Range *new_range, RangeBoundary::RangeSize size)
static bool AreEqualDefinitions(Definition *a, Definition *b)
static void ConvertRangeToUnsigned(int64_t a, int64_t b, uint64_t *ua, uint64_t *ub)
static void Join(Range *range, Definition *defn, const Range *defn_range, RangeBoundary::RangeSize size)
static const Range * GetInputRange(RangeAnalysis *analysis, RangeBoundary::RangeSize size, Value *input)
static RangeBoundary WidenMax(const Range *range, const Range *new_range, RangeBoundary::RangeSize size)
constexpr uint64_t kMaxUint64
static void CacheRange(Range **slot, const Range *range, RangeBoundary::RangeSize size)
constexpr uint32_t kMaxUint32
constexpr intptr_t kBitsPerByte
static bool CanonicalizeMaxBoundary(RangeBoundary *a)
static RangeBoundary EnsureAcyclicSymbol(BlockEntryInstr *phi_block, const RangeBoundary &a, const RangeBoundary &limit)
static bool CanonicalizeForComparison(RangeBoundary *a, RangeBoundary *b, BoundaryOp op, const RangeBoundary &overflow)
DEFINE_FLAG(bool, print_cluster_information, false, "Print information about clusters written to snapshot")
static Definition * UnwrapConstraint(Definition *defn)
static int BitSize(const Range *range)
static bool DominatesPhi(BlockEntryInstr *a, BlockEntryInstr *phi_block)
static RangeBoundary WidenMin(const Range *range, const Range *new_range, RangeBoundary::RangeSize size)
constexpr int32_t kMaxInt32
static void NarrowShiftInt64Op(ShiftIntegerOpInstr *int64_op)
static RangeBoundary::RangeSize RepresentationToRangeSize(Representation r)
static RangeBoundary::RangeSize RangeSizeForPhi(Definition *phi)
static bool DependOnSameSymbol(const RangeBoundary &a, const RangeBoundary &b)
static RangeBoundary CanonicalizeBoundary(const RangeBoundary &a, const RangeBoundary &overflow)
static RangeBoundary NarrowMax(const Range *range, const Range *new_range, RangeBoundary::RangeSize size)
static constexpr intptr_t kClassIdTagMax
constexpr intptr_t kBitsPerInt64
static bool DependsOnSymbol(const RangeBoundary &a, Definition *symbol)
bool IsStringClassId(intptr_t index)
bool(* BoundaryOp)(RangeBoundary *)
static bool CanonicalizeMinBoundary(RangeBoundary *a)
DECLARE_FLAG(bool, show_invisible_frames)
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not defaults to or::depending on whether ipv6 is specified vm service A custom Dart VM Service port The default is to pick a randomly available open port disable vm Disable the Dart VM Service The Dart VM Service is never available in release mode disable vm service Disable mDNS Dart VM Service publication Bind to the IPv6 localhost address for the Dart VM Service Ignored if vm service host is set endless trace buffer
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive keep the shell running after the Dart script has completed enable serial On low power devices with low core running concurrent GC tasks on threads can cause them to contend with the UI thread which could potentially lead to jank This option turns off all concurrent GC activities domain network JSON encoded network policy per domain This overrides the DisallowInsecureConnections switch Embedder can specify whether to allow or disallow insecure connections at a domain level old gen heap size
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not set
#define ASSERT_VALID_RANGE_FOR_REPRESENTATION(instr, range, representation)
#define UNBOXED_NATIVE_SLOT_CASE(Class, __, Field, ___, ____)
#define NATIVE_SLOT_CASE(ClassName, __, FieldName, ___, ____)
static const char header[]
static SkString join(const CommandLineFlags::StringArray &)
#define UNBOXED_NATIVE_SLOTS_LIST(V)
#define NOT_INT_NATIVE_SLOTS_LIST(V)
static constexpr size_t ValueSize(Representation rep)
static constexpr bool IsUnboxedInteger(Representation rep)
static int64_t MaxValue(Representation rep)
static int64_t MinValue(Representation rep)
static constexpr bool IsUnboxed(Representation rep)
static const char * ToCString(Representation rep)
static bool IsUnsignedInteger(Representation rep)
static Representation RepresentationOfArrayElement(classid_t cid)