20 verify_definitions_threshold,
22 "Definition count threshold for extensive instruction checks");
24#define ASSERT1(cond, ctxt1) \
27 dart::Assert(__FILE__, __LINE__) \
28 .Fail("expected: %s (%s=%s)", #cond, #ctxt1, (ctxt1)->ToCString()); \
31#define ASSERT2(cond, ctxt1, ctxt2) \
34 dart::Assert(__FILE__, __LINE__) \
35 .Fail("expected: %s (%s=%s, %s=%s)", #cond, #ctxt1, \
36 (ctxt1)->ToCString(), #ctxt2, (ctxt2)->ToCString()); \
41static bool IsCommonConstant(Definition* def) {
42 if (
auto c = def->AsConstant()) {
43 return c->value().ptr() == Object::optimized_out().ptr() ||
50static bool IsPred(BlockEntryInstr* block, BlockEntryInstr* succ) {
51 for (intptr_t
i = 0, n = succ->PredecessorCount();
i < n; ++
i) {
52 if (succ->PredecessorAt(
i) == block) {
60static bool IsSucc(BlockEntryInstr* block, BlockEntryInstr* pred) {
61 Instruction* last = pred->last_instruction();
62 for (intptr_t
i = 0, n = last->SuccessorCount();
i < n; ++
i) {
63 if (last->SuccessorAt(
i) == block) {
71static bool IsDirectlyDominated(BlockEntryInstr* block, BlockEntryInstr*
dom) {
72 for (intptr_t
i = 0, n =
dom->dominated_blocks().length();
i < n; ++
i) {
73 if (
dom->dominated_blocks()[
i] == block) {
81static bool IsInUseList(
Value* use, Instruction* instruction) {
82 for (; use !=
nullptr; use = use->next_use()) {
83 if (use->instruction() == instruction) {
93static bool DefDominatesUse(Definition* def, Instruction* instruction) {
94 if (instruction->IsPhi()) {
99 }
else if (def->IsMaterializeObject() || instruction->IsMaterializeObject()) {
102 }
else if (
auto entry =
103 instruction->GetBlock()->AsBlockEntryWithInitialDefs()) {
106 for (
auto idef : *entry->initial_definitions()) {
113 return instruction->IsDominatedBy(def);
117static bool IsControlFlow(Instruction* instruction) {
118 return instruction->IsBranch() || instruction->IsGoto() ||
119 instruction->IsIndirectGoto() || instruction->IsReturnBase() ||
120 instruction->IsThrow() || instruction->IsReThrow() ||
121 instruction->IsTailCall();
124void FlowGraphChecker::VisitBlocks() {
125 const GrowableArray<BlockEntryInstr*>& preorder = flow_graph_->preorder();
126 const GrowableArray<BlockEntryInstr*>& postorder = flow_graph_->postorder();
127 const GrowableArray<BlockEntryInstr*>& rev_postorder =
128 flow_graph_->reverse_postorder();
141 const intptr_t max_block_id = flow_graph_->max_block_id();
142 for (BlockIterator it = flow_graph_->reverse_postorder_iterator(); !it.Done();
144 BlockEntryInstr* block = it.Current();
145 ASSERT1(block->block_id() <= max_block_id, block);
147 ASSERT1(block->preorder_number() <=
block_count, block);
148 ASSERT1(block->postorder_number() <=
block_count, block);
149 ASSERT1(preorder[block->preorder_number()] == block, block);
150 ASSERT1(postorder[block->postorder_number()] == block, block);
152 Instruction* last = block->last_instruction();
153 for (intptr_t
i = 0, n = last->SuccessorCount();
i < n; ++
i) {
154 ASSERT1(IsPred(block, last->SuccessorAt(
i)), block);
156 for (intptr_t
i = 0, n = block->PredecessorCount();
i < n; ++
i) {
157 ASSERT1(IsSucc(block, block->PredecessorAt(
i)), block);
160 for (intptr_t
i = 0, n = block->dominated_blocks().length();
i < n; ++
i) {
161 ASSERT1(block->dominated_blocks()[
i]->dominator() == block, block);
163 if (block->dominator() !=
nullptr) {
164 ASSERT1(IsDirectlyDominated(block, block->dominator()), block);
167 VisitInstructions(block);
171void FlowGraphChecker::VisitInstructions(BlockEntryInstr* block) {
174 if (flow_graph_->current_ssa_temp_index() >
175 FLAG_verify_definitions_threshold) {
179 current_block_ = block;
181 if (
auto entry = block->AsBlockEntryWithInitialDefs()) {
182 for (
auto def : *entry->initial_definitions()) {
184 ASSERT1(def->IsConstant() || def->IsParameter(), def);
186 ASSERT1(def->GetBlock() == entry, def);
188 ASSERT1(def->next() ==
nullptr, def);
189 ASSERT1(def->previous() == entry, def);
191 ASSERT1(!def->MayCreateUnsafeUntaggedPointer(), def);
193 if (IsCommonConstant(def))
continue;
195 VisitInstruction(def);
199 if (
auto entry = block->AsJoinEntry()) {
200 for (PhiIterator it(entry); !it.Done(); it.Advance()) {
201 PhiInstr* phi = it.Current();
203 ASSERT1(phi->GetBlock() == entry, phi);
205 ASSERT1(phi->next() ==
nullptr, phi);
206 ASSERT1(phi->previous() ==
nullptr, phi);
208 VisitInstruction(phi);
212 Instruction* last = block->last_instruction();
213 ASSERT1((last == block) == block->IsGraphEntry(), block);
214 Instruction*
prev = block;
216 for (ForwardInstructionIterator it(block); !it.Done(); it.Advance()) {
217 Instruction* instruction = it.Current();
219 ASSERT1(instruction->GetBlock() == block, instruction);
221 ASSERT1(
prev->next() == instruction, instruction);
222 ASSERT1(instruction->previous() ==
prev, instruction);
225 ASSERT1(IsControlFlow(instruction) == (instruction == last), instruction);
226 ASSERT1(!instruction->IsPhi(), instruction);
228 VisitInstruction(instruction);
233 if (flow_graph_->loop_hierarchy_ !=
nullptr) {
234 for (LoopInfo* loop = block->loop_info(); loop !=
nullptr;
235 loop = loop->outer()) {
236 ASSERT1(loop->Contains(block), block);
241void FlowGraphChecker::VisitInstruction(Instruction* instruction) {
242 ASSERT1(!instruction->IsBlockEntry(), instruction);
244#if !defined(DART_PRECOMPILER)
247 ASSERT1(!instruction->MayThrow() ||
248 !instruction->GetBlock()->InsideTryBlock() ||
249 instruction->IsTailCall() ||
255 if (instruction->ComputeCanDeoptimize()) {
256 ASSERT2(!flow_graph_->function().ForceOptimize(), instruction,
257 &flow_graph_->function());
265 if (FLAG_check_token_positions && flow_graph_->inlining_id() >= 0) {
266 const TokenPosition&
pos = instruction->token_pos();
267 if (
pos.IsReal() ||
pos.IsSynthetic()) {
268 ASSERT1(instruction->has_inlining_id(), instruction);
269 const intptr_t inlining_id = instruction->inlining_id();
270 const auto&
function = *inline_id_to_function_[inlining_id];
271 if (
function.end_token_pos().IsReal() &&
274 buffer.Printf(
"Token position %s is invalid for function %s (%s, %s)",
275 pos.ToCString(),
function.ToFullyQualifiedCString(),
277 function.end_token_pos().ToCString());
278 if (inlining_id > 0) {
279 buffer.Printf(
" while compiling function %s",
280 inline_id_to_function_[0]->ToFullyQualifiedCString());
285 if (!script_.IsNull() && !script_.IsValidTokenPosition(
pos)) {
288 "Token position %s is invalid for script %s of function %s",
289 pos.ToCString(), script_.ToCString(),
290 function.ToFullyQualifiedCString());
291 if (inlining_id > 0) {
292 buffer.Printf(
" while compiling function %s",
293 inline_id_to_function_[0]->ToFullyQualifiedCString());
299 ASSERT1(flow_graph_->unmatched_representations_allowed() ||
300 !instruction->HasUnmatchedInputRepresentations(),
304 for (intptr_t
i = 0, n = instruction->InputCount();
i < n; ++
i) {
305 VisitUseDef(instruction, instruction->InputAt(
i),
i,
false);
309 for (Environment::DeepIterator it(instruction->env()); !it.Done();
311 VisitUseDef(instruction, it.CurrentValue(),
i++,
true);
314 if (
auto def = instruction->AsDefinition()) {
315 VisitDefinition(def);
317 instruction->Accept(
this);
320void FlowGraphChecker::VisitDefinition(Definition* def) {
323 if (def->HasSSATemp()) {
324 ASSERT1(def->ssa_temp_index() < flow_graph_->current_ssa_temp_index(), def);
326 ASSERT1(def->input_use_list() ==
nullptr, def);
330 for (
Value* use = def->input_use_list(); use !=
nullptr;
331 use = use->next_use()) {
332 VisitDefUse(def, use,
prev,
false);
337 for (
Value* use = def->env_use_list(); use !=
nullptr;
338 use = use->next_use()) {
339 VisitDefUse(def, use,
prev,
true);
344void FlowGraphChecker::VisitUseDef(Instruction* instruction,
348 ASSERT2(use->instruction() == instruction, use, instruction);
349 ASSERT1(use->use_index() == index, use);
351 Definition* def = use->definition();
353 ASSERT1(def != instruction || def->IsPhi() || def->IsMaterializeObject(),
359 ASSERT1(def->GetBlock()->IsJoinEntry(), def);
361 ASSERT1(def->next() ==
nullptr, def);
362 ASSERT1(def->previous() ==
nullptr, def);
363 }
else if (def->IsConstant() || def->IsParameter()) {
366 ASSERT1(def->previous() !=
nullptr, def);
368 if (IsCommonConstant(def))
return;
369 }
else if (def->next() ==
nullptr) {
371 if (
auto move_arg = def->AsMoveArgument()) {
372 ASSERT1(move_arg->location().IsMachineRegister() ||
373 (move_arg->location().IsPairLocation() &&
377 .IsMachineRegister() &&
381 .IsMachineRegister()),
384 ASSERT1(def->IsMaterializeObject(), def);
386 ASSERT1(def->previous() ==
nullptr, def);
389 ASSERT1(def->next() !=
nullptr, def);
390 ASSERT1(def->previous() !=
nullptr, def);
392 if (def->HasSSATemp()) {
393 ASSERT2(DefDominatesUse(def, instruction), def, instruction);
394 ASSERT2(IsInUseList(is_env ? def->env_use_list() : def->input_use_list(),
400void FlowGraphChecker::VisitDefUse(Definition* def,
404 ASSERT2(use->definition() == def, use, def);
405 ASSERT1(use->previous_use() ==
prev, use);
407 Instruction* instruction = use->instruction();
408 ASSERT(instruction !=
nullptr);
409 ASSERT1(def != instruction || def->IsPhi() || def->IsMaterializeObject(),
412 ASSERT2(instruction->env()->ValueAtUseIndex(use->use_index()) == use,
415 ASSERT2(instruction->InputAt(use->use_index()) == use, instruction, use);
418 if (
auto const type = use->reaching_type()) {
419 ASSERT1(
type->owner() ==
nullptr ||
type->owner() == def, use);
424 if (instruction->IsPhi()) {
425 ASSERT1(instruction->AsPhi()->is_alive(), instruction);
426 ASSERT1(instruction->GetBlock()->IsJoinEntry(), instruction);
428 ASSERT1(instruction->next() ==
nullptr, instruction);
429 ASSERT1(instruction->previous() ==
nullptr, instruction);
430 }
else if (instruction->IsBlockEntry()) {
433 ASSERT1(is_env, instruction);
434 ASSERT1(instruction->IsGraphEntry() || instruction->next() !=
nullptr,
436 ASSERT2(DefDominatesUse(def, instruction), def, instruction);
437 }
else if (instruction->IsMaterializeObject()) {
439 if (instruction->next() !=
nullptr) {
440 ASSERT1(instruction->previous() !=
nullptr, instruction);
441 ASSERT2(DefDominatesUse(def, instruction), def, instruction);
443 ASSERT1(instruction->previous() ==
nullptr, instruction);
445 }
else if (instruction->IsMoveArgument()) {
447 if (instruction->next() !=
nullptr) {
448 ASSERT1(instruction->previous() !=
nullptr, instruction);
449 ASSERT2(DefDominatesUse(def, instruction), def, instruction);
451 ASSERT1(instruction->previous() ==
nullptr, instruction);
455 ASSERT1(IsControlFlow(instruction) || instruction->next() !=
nullptr,
457 ASSERT1(instruction->previous() !=
nullptr, instruction);
458 ASSERT2(!def->HasSSATemp() || DefDominatesUse(def, instruction), def,
461 if (def->MayCreateUnsafeUntaggedPointer()) {
464 ASSERT2(def->GetBlock() == instruction->GetBlock(), def, instruction);
467 ASSERT2(!instruction->IsPhi(), def, instruction);
469 ASSERT2(!instruction->IsReturnBase(), def, instruction);
472 for (
const auto* current = def->next(); current != instruction->next();
473 current = current->next()) {
474 ASSERT2(!current->CanTriggerGC(), def, current);
479void FlowGraphChecker::VisitConstant(ConstantInstr* constant) {
481 const Object&
value = constant->value();
483 const int64_t smi_value = Integer::Cast(
value).AsInt64Value();
495void FlowGraphChecker::VisitPhi(PhiInstr* phi) {
498 ASSERT1(phi->InputCount() == current_block_->PredecessorCount(), phi);
499 for (intptr_t
i = 0, n = phi->InputCount();
i < n; ++
i) {
500 Definition* def = phi->InputAt(
i)->definition();
501 ASSERT1(def->HasSSATemp(), def);
502 BlockEntryInstr* edge = current_block_->PredecessorAt(
i);
503 ASSERT1(DefDominatesUse(def, edge->last_instruction()), def);
507void FlowGraphChecker::VisitGoto(GotoInstr* jmp) {
508 ASSERT1(jmp->SuccessorCount() == 1, jmp);
511void FlowGraphChecker::VisitIndirectGoto(IndirectGotoInstr* jmp) {
512 ASSERT1(jmp->SuccessorCount() >= 1, jmp);
515void FlowGraphChecker::VisitBranch(BranchInstr* branch) {
516 ASSERT1(branch->SuccessorCount() == 2, branch);
519void FlowGraphChecker::VisitRedefinition(RedefinitionInstr* def) {
520 ASSERT1(def->value()->definition() != def, def);
524void FlowGraphChecker::AssertArgumentsInEnv(Definition*
call) {
525 const auto&
function = flow_graph_->function();
526 Environment*
env =
call->env();
527 if (
env ==
nullptr) {
530 }
else if (
function.IsIrregexpFunction()) {
535 const intptr_t env_count =
env->Length();
536 const intptr_t arg_count =
call->ArgumentCount();
539 const intptr_t after_args_input_count =
call->env()->LazyDeoptPruneCount();
540 ASSERT1((arg_count + after_args_input_count) <= env_count,
call);
541 const intptr_t env_base = env_count - arg_count - after_args_input_count;
542 for (intptr_t
i = 0;
i < arg_count;
i++) {
543 if (
call->HasMoveArguments()) {
544 ASSERT1(
call->ArgumentAt(
i) ==
env->ValueAt(env_base +
i)
551 if (
env->LazyDeoptToBeforeDeoptId()) {
561 const auto&
function = *inline_id_to_function_[
call->inlining_id()];
577 Definition* arg_def =
578 call->ArgumentAt(
i)->OriginalDefinitionIgnoreBoxingAndConstraints();
579 Definition* env_def =
580 env->ValueAt(env_base +
i)
582 ->OriginalDefinitionIgnoreBoxingAndConstraints();
583 ASSERT2((arg_def == env_def) || arg_def->IsConstant(), arg_def,
590void FlowGraphChecker::VisitClosureCall(ClosureCallInstr*
call) {
591 AssertArgumentsInEnv(
call);
594void FlowGraphChecker::VisitStaticCall(StaticCallInstr*
call) {
595 AssertArgumentsInEnv(
call);
598void FlowGraphChecker::VisitInstanceCall(InstanceCallInstr*
call) {
599 AssertArgumentsInEnv(
call);
602 ASSERT(!flow_graph_->function().ForceOptimize());
605void FlowGraphChecker::VisitPolymorphicInstanceCall(
606 PolymorphicInstanceCallInstr*
call) {
607 AssertArgumentsInEnv(
call);
610 ASSERT(!flow_graph_->function().ForceOptimize());
615 if (FLAG_trace_compiler) {
616 THR_Print(
"Running checker after %s\n", pass_name);
618 ASSERT(flow_graph_ !=
nullptr);
static float prev(float f)
static int block_count(const SkSBlockAllocator< N > &pool)
static constexpr intptr_t kNone
#define THR_Print(format,...)
Dart_NativeFunction function
DEFINE_FLAG(bool, print_cluster_information, false, "Print information about clusters written to snapshot")
DECLARE_FLAG(bool, show_invisible_frames)
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not defaults to or::depending on whether ipv6 is specified vm service A custom Dart VM Service port The default is to pick a randomly available open port disable vm Disable the Dart VM Service The Dart VM Service is never available in release mode disable vm service Disable mDNS Dart VM Service publication Bind to the IPv6 localhost address for the Dart VM Service Ignored if vm service host is set endless trace buffer