29 : preorder_(preorder),
35 zone_(
Thread::Current()->zone()) {}
48 explicit SCCInfo(intptr_t
d) : depth(
d),
done(
false) {}
52 return depth != other.depth ||
done != other.done;
55 return depth == other.depth &&
done == other.done;
58 typedef RawPointerKeyValueTrait<Definition, SCCInfo> VisitKV;
61 bool Visit(LoopInfo* loop, Definition* def);
62 intptr_t VisitDescendant(LoopInfo* loop, Definition* def);
63 void Classify(LoopInfo* loop, Definition* def);
64 void ClassifySCC(LoopInfo* loop);
65 void ClassifyControl(LoopInfo* loop);
69 InductionVar* TransferPhi(LoopInfo* loop, Definition* def, intptr_t idx = -1);
70 InductionVar* TransferDef(LoopInfo* loop, Definition* def);
71 InductionVar* TransferBinary(LoopInfo* loop, Definition* def);
72 InductionVar* TransferUnary(LoopInfo* loop, Definition* def);
77 InductionVar* SolvePhi(LoopInfo* loop, Definition* def, intptr_t idx = -1);
78 InductionVar* SolveConstraint(LoopInfo* loop,
81 InductionVar* SolveBinary(LoopInfo* loop,
84 InductionVar* SolveUnary(LoopInfo* loop, Definition* def, InductionVar*
init);
87 InductionVar* Lookup(LoopInfo* loop, Definition* def);
88 InductionVar* LookupCycle(Definition* def);
91 InductionVar* Add(InductionVar*
x, InductionVar*
y);
92 InductionVar* Sub(InductionVar*
x, InductionVar*
y);
93 InductionVar* Mul(InductionVar*
x, InductionVar*
y);
96 const GrowableArray<BlockEntryInstr*>& preorder_;
97 GrowableArray<Definition*> stack_;
98 GrowableArray<Definition*> scc_;
99 GrowableArray<BranchInstr*> branches_;
100 DirectChainedHashMap<LoopInfo::InductionKV> cycle_;
101 DirectChainedHashMap<VisitKV> map_;
102 intptr_t current_index_;
113 for (intptr_t
i = 0;
i <
header->PredecessorCount(); ++
i) {
124 if (def->IsConstant()) {
126 if (
value.IsInteger()) {
127 *val = Integer::Cast(
value).AsInt64Value();
143 if (
x->CanComputeBounds(loop, branch, &
min, &
max)) {
153 return max->offset() < 0;
169 int64_t lower_bound_offset,
171 int64_t upper_bound_offset,
174 bool success =
false;
178 const int64_t l = lval + lower_bound_offset;
183 const int64_t u = uval + upper_bound_offset;
186 upper_bound->
mult() == 1 &&
190 const int64_t c = upper_bound->
offset() + upper_bound_offset;
191 success = ((lower_bound_offset >= 0 && lval <= l) ||
192 (lower_bound_offset < 0 && lval > l)) &&
197 *
min = (lower_bound_offset == 0)
200 *
max = (upper_bound_offset == 0)
204 upper_bound->
mult(), upper_bound->
def());
210 for (; loop !=
nullptr; loop = loop->next_) {
222 ASSERT(stack_.is_empty());
224 ASSERT(branches_.is_empty());
232 if (block->IsJoinEntry()) {
234 Visit(loop, it.Current());
240 Visit(loop, instruction->AsDefinition());
241 if (instruction->IsBranch()) {
242 branches_.Add(instruction->AsBranch());
246 ASSERT(stack_.is_empty());
249 ClassifyControl(loop);
254 if (def ==
nullptr || map_.HasKey(def)) {
257 intptr_t
d = ++current_index_;
258 map_.Insert(VisitKV::Pair(def, SCCInfo(
d)));
265 if (input !=
nullptr) {
266 low =
Utils::Minimum(low, VisitDescendant(loop, input->definition()));
272 map_.Lookup(def)->value.depth =
low;
276 while (!stack_.is_empty()) {
277 Definition* top = stack_.RemoveLast();
279 map_.Lookup(top)->value.done =
true;
285 if (scc_.length() == 1) {
286 Classify(loop, scc_[0]);
288 ASSERT(scc_.length() > 1);
298intptr_t InductionVarAnalysis::VisitDescendant(LoopInfo* loop,
303 if (def->GetBlock()->loop_info() != loop) {
304 return current_index_;
307 if (!Visit(loop, def) && map_.Lookup(def)->value.done) {
308 return current_index_;
310 return map_.Lookup(def)->value.depth;
313void InductionVarAnalysis::Classify(LoopInfo* loop, Definition* def) {
315 InductionVar* induc =
nullptr;
316 if (loop->IsHeaderPhi(def)) {
318 induc = TransferPhi(loop, def, idx);
319 if (induc !=
nullptr) {
320 InductionVar*
init = Lookup(loop, def->InputAt(idx)->definition());
322 if (!
init->IsEqual(induc)) {
327 }
else if (def->IsPhi()) {
328 induc = TransferPhi(loop, def);
330 induc = TransferDef(loop, def);
333 if (induc !=
nullptr) {
334 loop->AddInduction(def, induc);
338void InductionVarAnalysis::ClassifySCC(LoopInfo* loop) {
339 intptr_t
size = scc_.length();
342 for (intptr_t
i =
size - 1;
i >= 0;
i--) {
343 if (loop->IsHeaderPhi(scc_[
i])) {
350 Definition* phi = scc_[
p];
352 InductionVar*
init = Lookup(loop, phi->InputAt(idx)->definition());
356 cycle_.Insert(LoopInfo::InductionKV::Pair(phi,
init));
357 for (intptr_t
i = 1, j =
p;
i <
size;
i++) {
358 if (++j >=
size) j = 0;
359 Definition* def = scc_[j];
360 InductionVar*
update =
nullptr;
362 update = SolvePhi(loop, def);
363 }
else if (def->IsBinaryIntegerOp()) {
365 }
else if (def->IsUnaryIntegerOp()) {
367 }
else if (def->IsConstraint()) {
370 Definition* orig = def->OriginalDefinitionIgnoreBoxingAndConstraints();
372 update = LookupCycle(orig);
379 cycle_.Insert(LoopInfo::InductionKV::Pair(def,
update));
385 InductionVar* induc = SolvePhi(loop, phi, idx);
386 if (induc !=
nullptr) {
394 loop->AddInduction(phi, induc);
395 for (intptr_t
i = 1, j =
p;
i <
size;
i++) {
396 if (++j >=
size) j = 0;
397 Classify(loop, scc_[j]);
403void InductionVarAnalysis::ClassifyControl(LoopInfo* loop) {
404 for (
auto branch : branches_) {
406 ComparisonInstr*
compare = branch->comparison();
407 if (
compare->InputCount() != 2) {
412 TargetEntryInstr* ift = branch->true_successor();
413 TargetEntryInstr* iff = branch->false_successor();
414 if (loop->Contains(ift) && !loop->Contains(iff)) {
416 }
else if (!loop->Contains(ift) && loop->Contains(iff)) {
426 ->OriginalDefinitionIgnoreBoxingAndConstraints();
429 ->OriginalDefinitionIgnoreBoxingAndConstraints();
430 InductionVar*
x = Lookup(loop, left);
431 InductionVar*
y = Lookup(loop, right);
436 InductionVar* tmp =
x;
452 if (stride == 1)
break;
456 if (stride == -1)
break;
463 y = Add(
y,
new (zone_) InductionVar(1));
473 y = Sub(
y,
new (zone_) InductionVar(1));
485 if ((stride == +1 && start < end) || (stride == -1 && start > end)) {
498 x->bounds_.Add(InductionVar::Bound(branch,
y));
500 if (branch == loop->header_->last_instruction()) {
506InductionVar* InductionVarAnalysis::TransferPhi(LoopInfo* loop,
509 InductionVar* induc =
nullptr;
510 for (intptr_t
i = 0, n = def->InputCount();
i < n;
i++) {
512 InductionVar*
x = Lookup(loop, def->InputAt(
i)->definition());
515 }
else if (induc ==
nullptr) {
517 }
else if (!induc->IsEqual(
x)) {
525InductionVar* InductionVarAnalysis::TransferDef(LoopInfo* loop,
527 if (def->IsBinaryIntegerOp()) {
528 return TransferBinary(loop, def);
529 }
else if (def->IsUnaryIntegerOp()) {
530 return TransferUnary(loop, def);
537 if (
auto check = def->AsCheckBoundBase()) {
540 ->OriginalDefinitionIgnoreBoxingAndConstraints();
544 Definition* orig = def->OriginalDefinitionIgnoreBoxingAndConstraints();
546 return Lookup(loop, orig);
552InductionVar* InductionVarAnalysis::TransferBinary(LoopInfo* loop,
554 InductionVar*
x = Lookup(loop, def->InputAt(0)->definition());
555 InductionVar*
y = Lookup(loop, def->InputAt(1)->definition());
557 switch (def->AsBinaryIntegerOp()->op_kind()) {
569InductionVar* InductionVarAnalysis::TransferUnary(LoopInfo* loop,
571 InductionVar*
x = Lookup(loop, def->InputAt(0)->definition());
572 switch (def->AsUnaryIntegerOp()->op_kind()) {
573 case Token::kNEGATE: {
574 InductionVar* zero =
new (zone_) InductionVar(0);
582InductionVar* InductionVarAnalysis::SolvePhi(LoopInfo* loop,
585 InductionVar* induc =
nullptr;
586 for (intptr_t
i = 0, n = def->InputCount();
i < n;
i++) {
588 InductionVar* c = LookupCycle(def->InputAt(
i)->definition());
591 }
else if (induc ==
nullptr) {
593 }
else if (!induc->IsEqual(c)) {
601InductionVar* InductionVarAnalysis::SolveConstraint(LoopInfo* loop,
603 InductionVar*
init) {
604 InductionVar* c = LookupCycle(def->InputAt(0)->definition());
607 ConstraintInstr* constraint = def->AsConstraint();
608 if (constraint->target() !=
nullptr) {
609 loop->limit_ = constraint;
615InductionVar* InductionVarAnalysis::SolveBinary(LoopInfo* loop,
617 InductionVar*
init) {
618 InductionVar*
x = Lookup(loop, def->InputAt(0)->definition());
619 InductionVar*
y = Lookup(loop, def->InputAt(1)->definition());
620 switch (def->AsBinaryIntegerOp()->op_kind()) {
623 InductionVar* c = LookupCycle(def->InputAt(1)->definition());
632 InductionVar* c = LookupCycle(def->InputAt(0)->definition());
643 InductionVar* c = LookupCycle(def->InputAt(1)->definition());
655 InductionVar* c = LookupCycle(def->InputAt(0)->definition());
658 InductionVar* zero =
new (zone_) InductionVar(0);
670InductionVar* InductionVarAnalysis::SolveUnary(LoopInfo* loop,
672 InductionVar*
init) {
673 InductionVar* c = LookupCycle(def->InputAt(0)->definition());
674 switch (def->AsUnaryIntegerOp()->op_kind()) {
679 InductionVar* zero =
new (zone_) InductionVar(0);
680 InductionVar*
next = Sub(zero,
init);
691InductionVar* InductionVarAnalysis::Lookup(LoopInfo* loop, Definition* def) {
692 InductionVar* induc = loop->LookupInduction(def);
693 if (induc ==
nullptr) {
697 induc =
new (zone_) InductionVar(val);
698 loop->AddInduction(def, induc);
699 }
else if (!loop->Contains(def->GetBlock())) {
702 induc = TransferDef(loop, def);
703 if (induc ==
nullptr) {
704 induc =
new (zone_) InductionVar(0, 1, def);
706 loop->AddInduction(def, induc);
712InductionVar* InductionVarAnalysis::LookupCycle(Definition* def) {
713 LoopInfo::InductionKV::Pair* pair = cycle_.Lookup(def);
714 if (pair !=
nullptr) {
720InductionVar* InductionVarAnalysis::Add(InductionVar*
x, InductionVar*
y) {
724 if (
x->def_ ==
y->def_) {
728 }
else if (
y->mult_ == 0) {
732 }
else if (
x->mult_ == 0) {
737 }
else if (
y !=
nullptr) {
739 InductionVar*
i = Add(
x,
y->initial_);
742 if (
i !=
nullptr && n !=
nullptr) {
743 return new (zone_) InductionVar(
y->kind_,
i, n);
750 InductionVar*
i = Add(
x->initial_,
y);
753 if (
i !=
nullptr && n !=
nullptr) {
754 return new (zone_) InductionVar(
x->kind_,
i, n);
759 InductionVar*
i = Add(
x->initial_,
y->initial_);
760 InductionVar* n = Add(
x->next_,
y->next_);
761 if (
i !=
nullptr && n !=
nullptr) {
768InductionVar* InductionVarAnalysis::Sub(InductionVar*
x, InductionVar*
y) {
772 if (
x->def_ ==
y->def_) {
776 }
else if (
y->mult_ == 0) {
780 }
else if (
x->mult_ == 0) {
785 }
else if (
y !=
nullptr) {
787 InductionVar*
i = Sub(
x,
y->initial_);
790 InductionVar* zero =
new (zone_) InductionVar(0, 0,
nullptr);
791 n = Sub(zero,
y->next_);
793 n = Sub(
x,
y->next_);
795 if (
i !=
nullptr && n !=
nullptr) {
796 return new (zone_) InductionVar(
y->kind_,
i, n);
803 InductionVar*
i = Sub(
x->initial_,
y);
806 if (
i !=
nullptr && n !=
nullptr) {
807 return new (zone_) InductionVar(
x->kind_,
i, n);
812 InductionVar*
i = Sub(
x->initial_,
y->initial_);
813 InductionVar* n = Sub(
x->next_,
y->next_);
814 if (
i !=
nullptr && n !=
nullptr) {
821InductionVar* InductionVarAnalysis::Mul(InductionVar*
x, InductionVar*
y) {
824 InductionVar* tmp =
x;
836 InductionVar(
y->kind_, Mul(
x,
y->initial_), Mul(
x,
y->next_));
842 int64_t* diff)
const {
856bool InductionVar::CanComputeBoundsImpl(
LoopInfo* loop,
863 for (loop = loop->
outer(); loop !=
nullptr; loop = loop->
outer()) {
900 if (
pos->IsDominatedBy(bound.branch_)) {
903 if (bound.limit_->CanComputeBounds(loop, bound.branch_, &u_min,
906 return stride > 0 ?
SafelyAdjust(z, l_min, 0, u_max, -stride - off,
926 if (pair1 !=
nullptr) {
928 if (pair2 !=
nullptr) {
935 if (CanComputeBoundsImpl(loop,
pos,
min,
max)) {
937 LoopInfo::MemoVal* memo =
nullptr;
938 if (pair1 !=
nullptr) {
941 memo =
new LoopInfo::MemoVal();
998 back_edges_.Add(block);
1002 for (intptr_t
i = 0, n = back_edges_.length();
i < n;
i++) {
1003 if (back_edges_[
i] == block) {
1014 if (block == header_) {
1022 if (control_ !=
nullptr) {
1024 for (
auto bound : control_->
bounds()) {
1026 limit = bound.limit_;
1031 if (
limit !=
nullptr) {
1047 return def !=
nullptr && def->IsPhi() && def->
GetBlock() == header_ &&
1048 !def->AsPhi()->IsRedundant();
1052 if (loop !=
nullptr) {
1063 intptr_t nesting_depth = 1;
1067 return nesting_depth;
1072 memo_cache_.Clear();
1077 ASSERT(induc !=
nullptr);
1083 if (pair !=
nullptr) {
1097 length->definition()->OriginalDefinitionIgnoreBoxingAndConstraints());
1098 if (induc !=
nullptr &&
len !=
nullptr) {
1106 for (
auto bound : induc->
bounds()) {
1107 if (
pos->IsDominatedBy(bound.branch_) &&
1108 len->CanComputeDifferenceWith(bound.limit_, &diff) && diff <= 0) {
1120 len->CanComputeDifferenceWith(
max, &diff) && diff < 0;
1127 f->Printf(
"%*c",
static_cast<int>(2 *
NestingDepth()),
' ');
1128 f->Printf(
"loop%" Pd " B%" Pd " ", id_, header_->
block_id());
1129 intptr_t num_blocks = 0;
1133 f->Printf(
"#blocks=%" Pd, num_blocks);
1134 if (outer_ !=
nullptr)
f->Printf(
" outer=%" Pd, outer_->id_);
1135 if (inner_ !=
nullptr)
f->Printf(
" inner=%" Pd, inner_->id_);
1136 if (next_ !=
nullptr)
f->Printf(
" next=%" Pd, next_->id_);
1138 for (intptr_t
i = 0, n = back_edges_.length();
i < n;
i++) {
1139 f->Printf(
" B%" Pd, back_edges_[
i]->block_id());
1155 preorder_(preorder),
1157 print_traces_(print_traces) {
1161void LoopHierarchy::Build() {
1163 for (intptr_t
i = 0, n = headers_->length();
i < n; ++
i) {
1164 LoopInfo* loop = (*headers_)[
i]->loop_info();
1175 for (intptr_t
i = 0, n = headers_->length();
i < n; ++
i) {
1176 BlockEntryInstr*
header = (*headers_)[
i];
1177 LoopInfo* loop =
header->loop_info();
1178 LoopInfo* dom_loop =
header->dominator()->loop_info();
1179 ASSERT(loop->outer_ ==
nullptr);
1180 ASSERT(loop->next_ ==
nullptr);
1181 if (loop->IsIn(dom_loop)) {
1182 loop->outer_ = dom_loop;
1183 loop->next_ = dom_loop->inner_;
1184 dom_loop->inner_ = loop;
1191 if (FLAG_trace_optimization && print_traces_) {
1196void LoopHierarchy::Print(LoopInfo* loop)
const {
1197 for (; loop !=
nullptr; loop = loop->next_) {
1199 for (BitVector::Iterator it(loop->blocks_); !it.Done(); it.Advance()) {
1200 THR_Print(
" B%" Pd, preorder_[it.Current()]->block_id());
1203 Print(loop->inner_);
static void done(const char *config, const char *src, const char *srcOptions, const char *name)
static float next(float f)
#define check(reporter, ref, unref, make, kill)
static bool is_lower(int c)
bool Contains(intptr_t i) const
bool AddAll(const BitVector *from)
intptr_t preorder_number() const
intptr_t block_id() const
virtual intptr_t PredecessorCount() const =0
LoopInfo * loop_info() const
void set_loop_info(LoopInfo *loop_info)
virtual BlockEntryInstr * PredecessorAt(intptr_t index) const =0
Instruction * last_instruction() const
static bool IsArrayLength(Definition *def)
Definition * OriginalDefinitionIgnoreBoxingAndConstraints()
void VisitHierarchy(LoopInfo *loop)
InductionVarAnalysis(const GrowableArray< BlockEntryInstr * > &preorder)
void VisitLoop(LoopInfo *loop)
bool CanComputeBounds(LoopInfo *loop, Instruction *pos, InductionVar **min, InductionVar **max)
bool CanComputeDifferenceWith(const InductionVar *other, int64_t *diff) const
const GrowableArray< Bound > & bounds()
bool IsEqual(const InductionVar *other) const
static bool IsInvariant(const InductionVar *x)
static bool IsInduction(const InductionVar *x)
static bool IsConstant(const InductionVar *x)
InductionVar * initial() const
const char * ToCString() const
void PrintTo(BaseTextBuffer *f) const
static bool IsLinear(const InductionVar *x)
InductionVar(int64_t offset, int64_t mult, Definition *def)
virtual intptr_t InputCount() const =0
virtual Value * InputAt(intptr_t i) const =0
virtual BlockEntryInstr * GetBlock()
const char * ToCString() const
void ComputeInduction() const
LoopHierarchy(ZoneGrowableArray< BlockEntryInstr * > *headers, const GrowableArray< BlockEntryInstr * > &preorder, bool print_traces)
bool IsHeaderPhi(Definition *def) const
ConstraintInstr * limit() const
InductionVar * LookupInduction(Definition *def) const
void PrintTo(BaseTextBuffer *f) const
bool IsAlwaysTaken(BlockEntryInstr *block) const
void AddInduction(Definition *def, InductionVar *induc)
bool IsIn(LoopInfo *loop) const
bool IsInRange(Instruction *pos, Value *index, Value *length)
BitVector * blocks() const
void AddBlocks(BitVector *blocks)
bool Contains(BlockEntryInstr *block) const
BlockEntryInstr * header() const
intptr_t NestingDepth() const
InductionVar * control() const
const char * ToCString() const
bool IsBackEdge(BlockEntryInstr *block) const
LoopInfo(intptr_t id, BlockEntryInstr *header, BitVector *blocks)
void AddBackEdge(BlockEntryInstr *block)
static Thread * Current()
static Token::Kind FlipComparison(Token::Kind op)
static Token::Kind NegateComparison(Token::Kind op)
static T MulWithWrapAround(T a, T b)
static T Minimum(T x, T y)
static T AddWithWrapAround(T a, T b)
static T SubWithWrapAround(T a, T b)
static T NegWithWrapAround(T a)
Definition * definition() const
char * MakeCopyOfString(const char *str)
#define THR_Print(format,...)
static const char * begin(const StringSlice &s)
VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE auto & d
static float max(float r, float g, float b)
static float min(float r, float g, float b)
constexpr int64_t kMaxInt64
constexpr int64_t kMinInt64
constexpr bool operator!=(Register r, LinkRegister lr)
static bool CanBeMadeExclusive(LoopInfo *loop, InductionVar *x, Instruction *branch, bool is_lower)
constexpr bool operator==(Register r, LinkRegister)
static intptr_t InitIndex(LoopInfo *loop)
static bool SafelyAdjust(Zone *zone, InductionVar *lower_bound, int64_t lower_bound_offset, InductionVar *upper_bound, int64_t upper_bound_offset, InductionVar **min, InductionVar **max)
static bool IsConstant(Definition *def, int64_t *val)
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not defaults to or::depending on whether ipv6 is specified vm service A custom Dart VM Service port The default is to pick a randomly available open port disable vm Disable the Dart VM Service The Dart VM Service is never available in release mode disable vm service Disable mDNS Dart VM Service publication Bind to the IPv6 localhost address for the Dart VM Service Ignored if vm service host is set endless trace buffer
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive keep the shell running after the Dart script has completed enable serial On low power devices with low core running concurrent GC tasks on threads can cause them to contend with the UI thread which could potentially lead to jank This option turns off all concurrent GC activities domain network JSON encoded network policy per domain This overrides the DisallowInsecureConnections switch Embedder can specify whether to allow or disallow insecure connections at a domain level old gen heap size
int compare(const void *untyped_lhs, const void *untyped_rhs)
static const char header[]