Flutter Engine
The Flutter Engine
flow_graph_checker.cc
Go to the documentation of this file.
1// Copyright (c) 2019, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#include "platform/globals.h"
6
7#if defined(DEBUG)
8
10
14
15namespace dart {
16
17DECLARE_FLAG(bool, trace_compiler);
18
19DEFINE_FLAG(int,
20 verify_definitions_threshold,
21 250,
22 "Definition count threshold for extensive instruction checks");
23
24#define ASSERT1(cond, ctxt1) \
25 do { \
26 if (!(cond)) \
27 dart::Assert(__FILE__, __LINE__) \
28 .Fail("expected: %s (%s=%s)", #cond, #ctxt1, (ctxt1)->ToCString()); \
29 } while (false)
30
31#define ASSERT2(cond, ctxt1, ctxt2) \
32 do { \
33 if (!(cond)) \
34 dart::Assert(__FILE__, __LINE__) \
35 .Fail("expected: %s (%s=%s, %s=%s)", #cond, #ctxt1, \
36 (ctxt1)->ToCString(), #ctxt2, (ctxt2)->ToCString()); \
37 } while (false)
38
39// Returns true for the "optimized out" and "null" constant.
40// Such constants may have a lot of uses and checking them could be too slow.
41static bool IsCommonConstant(Definition* def) {
42 if (auto c = def->AsConstant()) {
43 return c->value().ptr() == Object::optimized_out().ptr() ||
44 c->value().ptr() == Object::null();
45 }
46 return false;
47}
48
49// Returns true if block is a predecessor of succ.
50static bool IsPred(BlockEntryInstr* block, BlockEntryInstr* succ) {
51 for (intptr_t i = 0, n = succ->PredecessorCount(); i < n; ++i) {
52 if (succ->PredecessorAt(i) == block) {
53 return true;
54 }
55 }
56 return false;
57}
58
59// Returns true if block is a successor of pred.
60static bool IsSucc(BlockEntryInstr* block, BlockEntryInstr* pred) {
61 Instruction* last = pred->last_instruction();
62 for (intptr_t i = 0, n = last->SuccessorCount(); i < n; ++i) {
63 if (last->SuccessorAt(i) == block) {
64 return true;
65 }
66 }
67 return false;
68}
69
70// Returns true if dom directly dominates block.
71static bool IsDirectlyDominated(BlockEntryInstr* block, BlockEntryInstr* dom) {
72 for (intptr_t i = 0, n = dom->dominated_blocks().length(); i < n; ++i) {
73 if (dom->dominated_blocks()[i] == block) {
74 return true;
75 }
76 }
77 return false;
78}
79
80// Returns true if instruction appears in use list.
81static bool IsInUseList(Value* use, Instruction* instruction) {
82 for (; use != nullptr; use = use->next_use()) {
83 if (use->instruction() == instruction) {
84 return true;
85 }
86 }
87 return false;
88}
89
90// Returns true if definition dominates instruction. Note that this
91// helper is required to account for some situations that are not
92// accounted for in the IR methods that compute dominance.
93static bool DefDominatesUse(Definition* def, Instruction* instruction) {
94 if (instruction->IsPhi()) {
95 // A phi use is not necessarily dominated by a definition.
96 // Proper dominance relation on the input values of Phis is
97 // checked by the Phi visitor below.
98 return true;
99 } else if (def->IsMaterializeObject() || instruction->IsMaterializeObject()) {
100 // These instructions reside outside the IR.
101 return true;
102 } else if (auto entry =
103 instruction->GetBlock()->AsBlockEntryWithInitialDefs()) {
104 // An initial definition in the same block.
105 // TODO(ajcbik): use an initial def too?
106 for (auto idef : *entry->initial_definitions()) {
107 if (idef == def) {
108 return true;
109 }
110 }
111 }
112 // Use the standard IR method for dominance.
113 return instruction->IsDominatedBy(def);
114}
115
116// Returns true if instruction forces control flow.
117static bool IsControlFlow(Instruction* instruction) {
118 return instruction->IsBranch() || instruction->IsGoto() ||
119 instruction->IsIndirectGoto() || instruction->IsReturnBase() ||
120 instruction->IsThrow() || instruction->IsReThrow() ||
121 instruction->IsTailCall();
122}
123
124void FlowGraphChecker::VisitBlocks() {
125 const GrowableArray<BlockEntryInstr*>& preorder = flow_graph_->preorder();
126 const GrowableArray<BlockEntryInstr*>& postorder = flow_graph_->postorder();
127 const GrowableArray<BlockEntryInstr*>& rev_postorder =
128 flow_graph_->reverse_postorder();
129
130 // Make sure lengths match.
131 const intptr_t block_count = preorder.length();
132 ASSERT(block_count == postorder.length());
133 ASSERT(block_count == rev_postorder.length());
134
135 // Make sure postorder has true reverse.
136 for (intptr_t i = 0; i < block_count; ++i) {
137 ASSERT(postorder[i] == rev_postorder[block_count - i - 1]);
138 }
139
140 // Iterate over all basic blocks.
141 const intptr_t max_block_id = flow_graph_->max_block_id();
142 for (BlockIterator it = flow_graph_->reverse_postorder_iterator(); !it.Done();
143 it.Advance()) {
144 BlockEntryInstr* block = it.Current();
145 ASSERT1(block->block_id() <= max_block_id, block);
146 // Make sure ordering is consistent.
147 ASSERT1(block->preorder_number() <= block_count, block);
148 ASSERT1(block->postorder_number() <= block_count, block);
149 ASSERT1(preorder[block->preorder_number()] == block, block);
150 ASSERT1(postorder[block->postorder_number()] == block, block);
151 // Make sure predecessors and successors agree.
152 Instruction* last = block->last_instruction();
153 for (intptr_t i = 0, n = last->SuccessorCount(); i < n; ++i) {
154 ASSERT1(IsPred(block, last->SuccessorAt(i)), block);
155 }
156 for (intptr_t i = 0, n = block->PredecessorCount(); i < n; ++i) {
157 ASSERT1(IsSucc(block, block->PredecessorAt(i)), block);
158 }
159 // Make sure dominance relations agree.
160 for (intptr_t i = 0, n = block->dominated_blocks().length(); i < n; ++i) {
161 ASSERT1(block->dominated_blocks()[i]->dominator() == block, block);
162 }
163 if (block->dominator() != nullptr) {
164 ASSERT1(IsDirectlyDominated(block, block->dominator()), block);
165 }
166 // Visit all instructions in this block.
167 VisitInstructions(block);
168 }
169}
170
171void FlowGraphChecker::VisitInstructions(BlockEntryInstr* block) {
172 // To avoid excessive runtimes, skip the instructions check if there
173 // are many definitions (as happens in e.g. an initialization block).
174 if (flow_graph_->current_ssa_temp_index() >
175 FLAG_verify_definitions_threshold) {
176 return;
177 }
178 // Give all visitors quick access.
179 current_block_ = block;
180 // Visit initial definitions.
181 if (auto entry = block->AsBlockEntryWithInitialDefs()) {
182 for (auto def : *entry->initial_definitions()) {
183 ASSERT(def != nullptr);
184 ASSERT1(def->IsConstant() || def->IsParameter(), def);
185 // Make sure block lookup agrees.
186 ASSERT1(def->GetBlock() == entry, def);
187 // Initial definitions are partially linked into graph.
188 ASSERT1(def->next() == nullptr, def);
189 ASSERT1(def->previous() == entry, def);
190 // No initial definition should contain an unsafe untagged pointer.
191 ASSERT1(!def->MayCreateUnsafeUntaggedPointer(), def);
192 // Skip common constants as checking them could be slow.
193 if (IsCommonConstant(def)) continue;
194 // Visit the initial definition as instruction.
195 VisitInstruction(def);
196 }
197 }
198 // Visit phis in join.
199 if (auto entry = block->AsJoinEntry()) {
200 for (PhiIterator it(entry); !it.Done(); it.Advance()) {
201 PhiInstr* phi = it.Current();
202 // Make sure block lookup agrees.
203 ASSERT1(phi->GetBlock() == entry, phi);
204 // Phis are never linked into graph.
205 ASSERT1(phi->next() == nullptr, phi);
206 ASSERT1(phi->previous() == nullptr, phi);
207 // Visit the phi as instruction.
208 VisitInstruction(phi);
209 }
210 }
211 // Visit regular instructions.
212 Instruction* last = block->last_instruction();
213 ASSERT1((last == block) == block->IsGraphEntry(), block);
214 Instruction* prev = block;
215 ASSERT(prev->previous() == nullptr);
216 for (ForwardInstructionIterator it(block); !it.Done(); it.Advance()) {
217 Instruction* instruction = it.Current();
218 // Make sure block lookup agrees (scan in scan).
219 ASSERT1(instruction->GetBlock() == block, instruction);
220 // Make sure linked list agrees.
221 ASSERT1(prev->next() == instruction, instruction);
222 ASSERT1(instruction->previous() == prev, instruction);
223 prev = instruction;
224 // Make sure control flow makes sense.
225 ASSERT1(IsControlFlow(instruction) == (instruction == last), instruction);
226 ASSERT1(!instruction->IsPhi(), instruction);
227 // Visit the instruction.
228 VisitInstruction(instruction);
229 }
230 ASSERT(prev->next() == nullptr);
231 ASSERT(prev == last);
232 // Make sure loop information, when up-to-date, agrees.
233 if (flow_graph_->loop_hierarchy_ != nullptr) {
234 for (LoopInfo* loop = block->loop_info(); loop != nullptr;
235 loop = loop->outer()) {
236 ASSERT1(loop->Contains(block), block);
237 }
238 }
239}
240
241void FlowGraphChecker::VisitInstruction(Instruction* instruction) {
242 ASSERT1(!instruction->IsBlockEntry(), instruction);
243
244#if !defined(DART_PRECOMPILER)
245 // In JIT mode, any instruction which may throw must have a deopt-id, except
246 // tail-call because it replaces the stack frame.
247 ASSERT1(!instruction->MayThrow() ||
248 !instruction->GetBlock()->InsideTryBlock() ||
249 instruction->IsTailCall() ||
250 instruction->deopt_id() != DeoptId::kNone,
251 instruction);
252
253 // Any instruction that can eagerly deopt cannot come from a force-optimized
254 // function.
255 if (instruction->ComputeCanDeoptimize()) {
256 ASSERT2(!flow_graph_->function().ForceOptimize(), instruction,
257 &flow_graph_->function());
258 }
259
260#endif // !defined(DART_PRECOMPILER)
261
262 // If checking token positions and the flow graph has an inlining ID,
263 // check the inlining ID and token position for instructions with real or
264 // synthetic token positions.
265 if (FLAG_check_token_positions && flow_graph_->inlining_id() >= 0) {
266 const TokenPosition& pos = instruction->token_pos();
267 if (pos.IsReal() || pos.IsSynthetic()) {
268 ASSERT1(instruction->has_inlining_id(), instruction);
269 const intptr_t inlining_id = instruction->inlining_id();
270 const auto& function = *inline_id_to_function_[inlining_id];
271 if (function.end_token_pos().IsReal() &&
272 !pos.IsWithin(function.token_pos(), function.end_token_pos())) {
273 TextBuffer buffer(256);
274 buffer.Printf("Token position %s is invalid for function %s (%s, %s)",
275 pos.ToCString(), function.ToFullyQualifiedCString(),
276 function.token_pos().ToCString(),
277 function.end_token_pos().ToCString());
278 if (inlining_id > 0) {
279 buffer.Printf(" while compiling function %s",
280 inline_id_to_function_[0]->ToFullyQualifiedCString());
281 }
282 FATAL("%s", buffer.buffer());
283 }
284 script_ = function.script();
285 if (!script_.IsNull() && !script_.IsValidTokenPosition(pos)) {
286 TextBuffer buffer(256);
287 buffer.Printf(
288 "Token position %s is invalid for script %s of function %s",
289 pos.ToCString(), script_.ToCString(),
290 function.ToFullyQualifiedCString());
291 if (inlining_id > 0) {
292 buffer.Printf(" while compiling function %s",
293 inline_id_to_function_[0]->ToFullyQualifiedCString());
294 }
295 FATAL("%s", buffer.buffer());
296 }
297 }
298 }
299 ASSERT1(flow_graph_->unmatched_representations_allowed() ||
300 !instruction->HasUnmatchedInputRepresentations(),
301 instruction);
302
303 // Check all regular inputs.
304 for (intptr_t i = 0, n = instruction->InputCount(); i < n; ++i) {
305 VisitUseDef(instruction, instruction->InputAt(i), i, /*is_env*/ false);
306 }
307 // Check all environment inputs (including outer ones).
308 intptr_t i = 0;
309 for (Environment::DeepIterator it(instruction->env()); !it.Done();
310 it.Advance()) {
311 VisitUseDef(instruction, it.CurrentValue(), i++, /*is_env*/ true);
312 }
313 // Visit specific instructions (definitions and anything with Visit()).
314 if (auto def = instruction->AsDefinition()) {
315 VisitDefinition(def);
316 }
317 instruction->Accept(this);
318}
319
320void FlowGraphChecker::VisitDefinition(Definition* def) {
321 // Used definitions must have an SSA name, and the SSA name must
322 // be less than the current_ssa_temp_index.
323 if (def->HasSSATemp()) {
324 ASSERT1(def->ssa_temp_index() < flow_graph_->current_ssa_temp_index(), def);
325 } else {
326 ASSERT1(def->input_use_list() == nullptr, def);
327 }
328 // Check all regular uses.
329 Value* prev = nullptr;
330 for (Value* use = def->input_use_list(); use != nullptr;
331 use = use->next_use()) {
332 VisitDefUse(def, use, prev, /*is_env*/ false);
333 prev = use;
334 }
335 // Check all environment uses.
336 prev = nullptr;
337 for (Value* use = def->env_use_list(); use != nullptr;
338 use = use->next_use()) {
339 VisitDefUse(def, use, prev, /*is_env*/ true);
340 prev = use;
341 }
342}
343
344void FlowGraphChecker::VisitUseDef(Instruction* instruction,
345 Value* use,
346 intptr_t index,
347 bool is_env) {
348 ASSERT2(use->instruction() == instruction, use, instruction);
349 ASSERT1(use->use_index() == index, use);
350 // Get definition.
351 Definition* def = use->definition();
352 ASSERT(def != nullptr);
353 ASSERT1(def != instruction || def->IsPhi() || def->IsMaterializeObject(),
354 def);
355 // Make sure each input is properly defined in the graph by something
356 // that dominates the input (note that the proper dominance relation
357 // on the input values of Phis is checked by the Phi visitor below).
358 if (def->IsPhi()) {
359 ASSERT1(def->GetBlock()->IsJoinEntry(), def);
360 // Phis are never linked into graph.
361 ASSERT1(def->next() == nullptr, def);
362 ASSERT1(def->previous() == nullptr, def);
363 } else if (def->IsConstant() || def->IsParameter()) {
364 // Initial definitions are partially linked into graph, but some
365 // constants are fully linked into graph (so no next() assert).
366 ASSERT1(def->previous() != nullptr, def);
367 // Skip checks below for common constants as checking them could be slow.
368 if (IsCommonConstant(def)) return;
369 } else if (def->next() == nullptr) {
370 // MaterializeObject and MoveArgument can be detached from the graph.
371 if (auto move_arg = def->AsMoveArgument()) {
372 ASSERT1(move_arg->location().IsMachineRegister() ||
373 (move_arg->location().IsPairLocation() &&
374 move_arg->location()
375 .AsPairLocation()
376 ->At(0)
377 .IsMachineRegister() &&
378 move_arg->location()
379 .AsPairLocation()
380 ->At(1)
381 .IsMachineRegister()),
382 move_arg);
383 } else {
384 ASSERT1(def->IsMaterializeObject(), def);
385 }
386 ASSERT1(def->previous() == nullptr, def);
387 } else {
388 // Others are fully linked into graph.
389 ASSERT1(def->next() != nullptr, def);
390 ASSERT1(def->previous() != nullptr, def);
391 }
392 if (def->HasSSATemp()) {
393 ASSERT2(DefDominatesUse(def, instruction), def, instruction);
394 ASSERT2(IsInUseList(is_env ? def->env_use_list() : def->input_use_list(),
395 instruction),
396 def, instruction);
397 }
398}
399
400void FlowGraphChecker::VisitDefUse(Definition* def,
401 Value* use,
402 Value* prev,
403 bool is_env) {
404 ASSERT2(use->definition() == def, use, def);
405 ASSERT1(use->previous_use() == prev, use);
406 // Get using instruction.
407 Instruction* instruction = use->instruction();
408 ASSERT(instruction != nullptr);
409 ASSERT1(def != instruction || def->IsPhi() || def->IsMaterializeObject(),
410 def);
411 if (is_env) {
412 ASSERT2(instruction->env()->ValueAtUseIndex(use->use_index()) == use,
413 instruction, use);
414 } else {
415 ASSERT2(instruction->InputAt(use->use_index()) == use, instruction, use);
416 }
417 // Make sure the reaching type, if any, has an owner consistent with this use.
418 if (auto const type = use->reaching_type()) {
419 ASSERT1(type->owner() == nullptr || type->owner() == def, use);
420 }
421 // Make sure each use appears in the graph and is properly dominated
422 // by the definition (note that the proper dominance relation on the
423 // input values of Phis is checked by the Phi visitor below).
424 if (instruction->IsPhi()) {
425 ASSERT1(instruction->AsPhi()->is_alive(), instruction);
426 ASSERT1(instruction->GetBlock()->IsJoinEntry(), instruction);
427 // Phis are never linked into graph.
428 ASSERT1(instruction->next() == nullptr, instruction);
429 ASSERT1(instruction->previous() == nullptr, instruction);
430 } else if (instruction->IsBlockEntry()) {
431 // BlockEntry instructions have environments attached to them but
432 // have no reliable way to verify if they are still in the graph.
433 ASSERT1(is_env, instruction);
434 ASSERT1(instruction->IsGraphEntry() || instruction->next() != nullptr,
435 instruction);
436 ASSERT2(DefDominatesUse(def, instruction), def, instruction);
437 } else if (instruction->IsMaterializeObject()) {
438 // Materializations can be both linked into graph and detached.
439 if (instruction->next() != nullptr) {
440 ASSERT1(instruction->previous() != nullptr, instruction);
441 ASSERT2(DefDominatesUse(def, instruction), def, instruction);
442 } else {
443 ASSERT1(instruction->previous() == nullptr, instruction);
444 }
445 } else if (instruction->IsMoveArgument()) {
446 // MoveArgument can be both linked into graph and detached.
447 if (instruction->next() != nullptr) {
448 ASSERT1(instruction->previous() != nullptr, instruction);
449 ASSERT2(DefDominatesUse(def, instruction), def, instruction);
450 } else {
451 ASSERT1(instruction->previous() == nullptr, instruction);
452 }
453 } else {
454 // Others are fully linked into graph.
455 ASSERT1(IsControlFlow(instruction) || instruction->next() != nullptr,
456 instruction);
457 ASSERT1(instruction->previous() != nullptr, instruction);
458 ASSERT2(!def->HasSSATemp() || DefDominatesUse(def, instruction), def,
459 instruction);
460 }
461 if (def->MayCreateUnsafeUntaggedPointer()) {
462 // We assume that all uses of a GC-movable untagged pointer are within the
463 // same basic block as the definition.
464 ASSERT2(def->GetBlock() == instruction->GetBlock(), def, instruction);
465 // Unsafe untagged pointers should not be used as inputs to Phi nodes in
466 // the same basic block.
467 ASSERT2(!instruction->IsPhi(), def, instruction);
468 // Unsafe untagged pointers should not be returned.
469 ASSERT2(!instruction->IsReturnBase(), def, instruction);
470 // Make sure no instruction between the definition and the use (including
471 // the use) can trigger GC.
472 for (const auto* current = def->next(); current != instruction->next();
473 current = current->next()) {
474 ASSERT2(!current->CanTriggerGC(), def, current);
475 }
476 }
477}
478
479void FlowGraphChecker::VisitConstant(ConstantInstr* constant) {
480 // Range check on smi.
481 const Object& value = constant->value();
482 if (value.IsSmi()) {
483 const int64_t smi_value = Integer::Cast(value).AsInt64Value();
484 ASSERT(compiler::target::kSmiMin <= smi_value);
485 ASSERT(smi_value <= compiler::target::kSmiMax);
486 }
487 // Any constant involved in SSA should appear in the entry (making it more
488 // likely it was inserted by the utility that avoids duplication).
489 //
490 // TODO(dartbug.com/36894)
491 //
492 // ASSERT(constant->GetBlock() == flow_graph_->graph_entry());
493}
494
495void FlowGraphChecker::VisitPhi(PhiInstr* phi) {
496 // Make sure the definition of each input value of a Phi dominates
497 // the corresponding incoming edge, as defined by order.
498 ASSERT1(phi->InputCount() == current_block_->PredecessorCount(), phi);
499 for (intptr_t i = 0, n = phi->InputCount(); i < n; ++i) {
500 Definition* def = phi->InputAt(i)->definition();
501 ASSERT1(def->HasSSATemp(), def); // phis have SSA defs
502 BlockEntryInstr* edge = current_block_->PredecessorAt(i);
503 ASSERT1(DefDominatesUse(def, edge->last_instruction()), def);
504 }
505}
506
507void FlowGraphChecker::VisitGoto(GotoInstr* jmp) {
508 ASSERT1(jmp->SuccessorCount() == 1, jmp);
509}
510
511void FlowGraphChecker::VisitIndirectGoto(IndirectGotoInstr* jmp) {
512 ASSERT1(jmp->SuccessorCount() >= 1, jmp);
513}
514
515void FlowGraphChecker::VisitBranch(BranchInstr* branch) {
516 ASSERT1(branch->SuccessorCount() == 2, branch);
517}
518
519void FlowGraphChecker::VisitRedefinition(RedefinitionInstr* def) {
520 ASSERT1(def->value()->definition() != def, def);
521}
522
523// Asserts that arguments appear in environment at the right place.
524void FlowGraphChecker::AssertArgumentsInEnv(Definition* call) {
525 const auto& function = flow_graph_->function();
526 Environment* env = call->env();
527 if (env == nullptr) {
528 // Environments can be removed by EliminateEnvironments pass and
529 // are not present before SSA.
530 } else if (function.IsIrregexpFunction()) {
531 // TODO(dartbug.com/38577): cleanup regexp pipeline too....
532 } else {
533 // Otherwise, the trailing environment entries must
534 // correspond directly with the arguments.
535 const intptr_t env_count = env->Length();
536 const intptr_t arg_count = call->ArgumentCount();
537 // Some calls (e.g. closure calls) have more inputs than actual arguments.
538 // Those extra inputs will be consumed from the stack before the call.
539 const intptr_t after_args_input_count = call->env()->LazyDeoptPruneCount();
540 ASSERT1((arg_count + after_args_input_count) <= env_count, call);
541 const intptr_t env_base = env_count - arg_count - after_args_input_count;
542 for (intptr_t i = 0; i < arg_count; i++) {
543 if (call->HasMoveArguments()) {
544 ASSERT1(call->ArgumentAt(i) == env->ValueAt(env_base + i)
545 ->definition()
546 ->AsMoveArgument()
547 ->value()
548 ->definition(),
549 call);
550 } else {
551 if (env->LazyDeoptToBeforeDeoptId()) {
552 // The deoptimization environment attached to this [call] instruction
553 // may no longer target the same call in unoptimized code. It may
554 // target anything.
555 //
556 // As a result, we cannot assume the arguments we pass to the call
557 // will also be in the deopt environment.
558 //
559 // This currently can happen in inlined force-optimized instructions.
560 ASSERT(call->inlining_id() > 0);
561 const auto& function = *inline_id_to_function_[call->inlining_id()];
562 ASSERT(function.ForceOptimize());
563 return;
564 }
565
566 // Redefinition instructions and boxing/unboxing are inserted
567 // without updating environment uses (FlowGraph::RenameDominatedUses,
568 // FlowGraph::InsertConversionsFor).
569 //
570 // Conditional constant propagation doesn't update environments either
571 // and may also replace redefinition instructions with constants
572 // without updating environment uses of their original definitions
573 // (ConstantPropagator::InsertRedefinitionsAfterEqualityComparisons).
574 //
575 // Also, constants may belong to different blocks (e.g. function entry
576 // and graph entry).
577 Definition* arg_def =
578 call->ArgumentAt(i)->OriginalDefinitionIgnoreBoxingAndConstraints();
579 Definition* env_def =
580 env->ValueAt(env_base + i)
581 ->definition()
582 ->OriginalDefinitionIgnoreBoxingAndConstraints();
583 ASSERT2((arg_def == env_def) || arg_def->IsConstant(), arg_def,
584 env_def);
585 }
586 }
587 }
588}
589
590void FlowGraphChecker::VisitClosureCall(ClosureCallInstr* call) {
591 AssertArgumentsInEnv(call);
592}
593
594void FlowGraphChecker::VisitStaticCall(StaticCallInstr* call) {
595 AssertArgumentsInEnv(call);
596}
597
598void FlowGraphChecker::VisitInstanceCall(InstanceCallInstr* call) {
599 AssertArgumentsInEnv(call);
600 // Force-optimized functions may not have instance calls inside them because
601 // we do not reset ICData for these.
602 ASSERT(!flow_graph_->function().ForceOptimize());
603}
604
605void FlowGraphChecker::VisitPolymorphicInstanceCall(
606 PolymorphicInstanceCallInstr* call) {
607 AssertArgumentsInEnv(call);
608 // Force-optimized functions may not have instance calls inside them because
609 // we do not reset ICData for these.
610 ASSERT(!flow_graph_->function().ForceOptimize());
611}
612
613// Main entry point of graph checker.
614void FlowGraphChecker::Check(const char* pass_name) {
615 if (FLAG_trace_compiler) {
616 THR_Print("Running checker after %s\n", pass_name);
617 }
618 ASSERT(flow_graph_ != nullptr);
619 VisitBlocks();
620}
621
622} // namespace dart
623
624#endif // defined(DEBUG)
SkPoint pos
static float prev(float f)
static int block_count(const SkSBlockAllocator< N > &pool)
GLenum type
static constexpr intptr_t kNone
Definition: deopt_id.h:27
static ObjectPtr null()
Definition: object.h:433
#define THR_Print(format,...)
Definition: log.h:20
#define ASSERT(E)
#define FATAL(error)
uint8_t value
Dart_NativeFunction function
Definition: fuchsia.cc:51
constexpr word kSmiMin
Definition: runtime_api.h:306
constexpr word kSmiMax
Definition: runtime_api.h:305
Definition: dart_vm.cc:33
DEFINE_FLAG(bool, print_cluster_information, false, "Print information about clusters written to snapshot")
DECLARE_FLAG(bool, show_invisible_frames)
Definition: dom.py:1
def call(args)
Definition: dom.py:159
Definition: __init__.py:1
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not defaults to or::depending on whether ipv6 is specified vm service A custom Dart VM Service port The default is to pick a randomly available open port disable vm Disable the Dart VM Service The Dart VM Service is never available in release mode disable vm service Disable mDNS Dart VM Service publication Bind to the IPv6 localhost address for the Dart VM Service Ignored if vm service host is set endless trace buffer
Definition: switches.h:126
def Check(request)
Definition: tester.py:24