Flutter Engine
The Flutter Engine
Loading...
Searching...
No Matches
base_flow_graph_builder.h
Go to the documentation of this file.
1// Copyright (c) 2018, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#ifndef RUNTIME_VM_COMPILER_FRONTEND_BASE_FLOW_GRAPH_BUILDER_H_
6#define RUNTIME_VM_COMPILER_FRONTEND_BASE_FLOW_GRAPH_BUILDER_H_
7
8#if defined(DART_PRECOMPILED_RUNTIME)
9#error "AOT runtime should not use compiler sources (including header files)"
10#endif // defined(DART_PRECOMPILED_RUNTIME)
11
12#include <initializer_list>
13
16#include "vm/object.h"
17
18namespace dart {
19
20class InlineExitCollector;
21
22namespace kernel {
23
24class BaseFlowGraphBuilder;
25struct InferredTypeMetadata;
26class TryCatchBlock;
27
28class Fragment {
29 public:
30 Instruction* entry = nullptr;
31 Instruction* current = nullptr;
32
34
35 explicit Fragment(Instruction* instruction)
36 : entry(instruction), current(instruction) {}
37
42
43 bool is_open() const { return entry == nullptr || current != nullptr; }
44 bool is_closed() const { return !is_open(); }
45
46 bool is_empty() const { return entry == nullptr && current == nullptr; }
47
48 // Non-empty fragment should have an entry.
49 bool is_valid() const { return is_empty() || (entry != nullptr); }
50
52
53 Fragment& operator+=(const Fragment& other);
55
57
58 private:
59 DISALLOW_ALLOCATION();
60};
61
62Fragment operator+(const Fragment& first, const Fragment& second);
63Fragment operator<<(const Fragment& fragment, Instruction* next);
64
65// IL fragment that performs some sort of test (comparison) and
66// has a single entry and multiple true and false exits.
68 public:
71
75
76 // If negate is true then return negated fragment by flipping
77 // true and false successors. Otherwise return this fragment
78 // without change.
79 TestFragment Negate(bool negate) {
80 if (negate) {
83 } else {
84 return *this;
85 }
86 }
87
89
90 // Create an empty fragment.
92
93 // Create a fragment with the given entry and true/false exits.
100
101 // Create a fragment with the given entry and a single branch as an exit.
103
106 JoinEntryInstr* join);
107
109 BaseFlowGraphBuilder* builder,
111
112 Instruction* entry = nullptr;
115};
116
117// Indicates which form of the unchecked entrypoint we are compiling.
118//
119// kNone:
120//
121// There is no unchecked entrypoint: the unchecked entry is set to nullptr in
122// the 'GraphEntryInstr'.
123//
124// kSeparate:
125//
126// The normal and unchecked entrypoint each point to their own versions of
127// the prologue, containing exactly those checks which need to be performed
128// on either side. Both sides jump directly to the body after performing
129// their prologue.
130//
131// kSharedWithVariable:
132//
133// A temporary variable is allocated and initialized to 0 on normal entry
134// and 2 on unchecked entry. Code which should be omitted on the unchecked
135// entrypoint is made conditional on this variable being equal to 0.
136//
138 kNone = 0,
139 kSeparate = 1,
141};
142
144 public:
146 const ParsedFunction* parsed_function,
147 intptr_t last_used_block_id,
148 intptr_t osr_id = DeoptId::kNone,
149 ZoneGrowableArray<intptr_t>* context_level_array = nullptr,
150 InlineExitCollector* exit_collector = nullptr,
151 bool inlining_unchecked_entry = false)
152 : parsed_function_(parsed_function),
154 thread_(Thread::Current()),
155 zone_(thread_->zone()),
156 osr_id_(osr_id),
157 context_level_array_(context_level_array),
159 last_used_block_id_(last_used_block_id),
162 stack_(nullptr),
163 exit_collector_(exit_collector),
164 inlining_unchecked_entry_(inlining_unchecked_entry),
167 ? Array::ZoneHandle(zone_, function_.saved_args_desc())
168 : Object::null_array()),
170 Array::ZoneHandle(parsed_function->function().GetCoverageArray())) {
171 }
172
173 const Array& coverage_array() const { return coverage_array_; }
174
176
177 Fragment LoadField(const Field& field, bool calls_initializer);
178 Fragment LoadNativeField(const Slot& native_field,
179 InnerPointerAccess loads_inner_pointer,
180 bool calls_initializer = false);
181 Fragment LoadNativeField(const Slot& native_field,
182 bool calls_initializer = false);
183 // Pass true for index_unboxed if indexing into external typed data.
185 intptr_t index_scale = compiler::target::kWordSize,
186 bool index_unboxed = false,
187 AlignmentType alignment = kAlignedAccess);
189
190 Fragment LoadUntagged(intptr_t offset);
191 Fragment CalculateElementAddress(intptr_t index_scale);
196
197 void SetTempIndex(Definition* definition);
198
201 return StoreLocal(TokenPosition::kNoSource, variable);
202 }
203 Fragment StoreLocal(TokenPosition position, LocalVariable* variable);
205 Fragment LoadContextAt(int depth);
206 Fragment GuardFieldLength(const Field& field, intptr_t deopt_id);
207 Fragment GuardFieldClass(const Field& field, intptr_t deopt_id);
208 static const Field& MayCloneField(Zone* zone, const Field& field);
210 TokenPosition position,
211 const Slot& slot,
212 InnerPointerAccess stores_inner_pointer,
214 StoreBarrierType emit_store_barrier = kEmitStoreBarrier,
218 TokenPosition position,
219 const Slot& slot,
221 StoreBarrierType emit_store_barrier = kEmitStoreBarrier,
225 kind, emit_store_barrier, memory_order);
226 }
228 const Slot& slot,
229 InnerPointerAccess stores_inner_pointer,
231 StoreBarrierType emit_store_barrier = kEmitStoreBarrier,
234 return StoreNativeField(TokenPosition::kNoSource, slot,
235 stores_inner_pointer, kind, emit_store_barrier,
236 memory_order);
237 }
239 const Slot& slot,
241 StoreBarrierType emit_store_barrier = kEmitStoreBarrier,
244 return StoreNativeField(TokenPosition::kNoSource, slot,
246 emit_store_barrier, memory_order);
247 }
249 const Field& field,
251 StoreBarrierType emit_store_barrier = kEmitStoreBarrier);
253 const Field& field,
255 Fragment LoadStaticField(const Field& field, bool calls_initializer);
258 Fragment StoreStaticField(TokenPosition position, const Field& field);
260 // Takes a [class_id] valid for StoreIndexed.
262 intptr_t index_scale,
263 bool index_unboxed,
264 AlignmentType alignment = kAlignedAccess);
265
266 // Sign-extends kUnboxedInt32 and zero-extends kUnboxedUint32.
268
269 void Push(Definition* definition);
270 Definition* Peek(intptr_t depth = 0);
271 Value* Pop();
272 Fragment Drop();
273 // Drop given number of temps from the stack but preserve top of the stack.
274 Fragment DropTempsPreserveTop(intptr_t num_temps_to_drop);
276
277 // Create a pseudo-local variable for a location on the expression stack.
278 // Note: SSA construction currently does not support inserting Phi functions
279 // for expression stack locations - only real local variables are supported.
280 // This means that you can't use MakeTemporary in a way that would require
281 // a Phi in SSA form. For example, the example below will be miscompiled or
282 // will crash debug VM with assertion when building SSA for optimizing
283 // compiler:
284 //
285 // t = MakeTemporary()
286 // Branch B1 or B2
287 // B1:
288 // StoreLocal(t, v0)
289 // goto B3
290 // B2:
291 // StoreLocal(t, v1)
292 // goto B3
293 // B3:
294 // LoadLocal(t)
295 LocalVariable* MakeTemporary(const char* suffix = nullptr);
297
299
303 JoinEntryInstr* BuildJoinEntry(intptr_t try_index);
304 IndirectEntryInstr* BuildIndirectEntry(intptr_t indirect_id,
305 intptr_t try_index);
306
308 Token::Kind kind,
309 bool number_check = false);
310 Fragment StrictCompare(Token::Kind kind, bool number_check = false);
311 Fragment Goto(JoinEntryInstr* destination);
312 Fragment UnboxedIntConstant(int64_t value, Representation representation);
313 Fragment IntConstant(int64_t value);
314 Fragment Constant(const Object& value);
317 Fragment SmiBinaryOp(Token::Kind op, bool is_truncating = false);
319 Representation representation,
320 bool is_truncating = false);
322 CompileType result_type,
323 Representation representation = kTagged);
326 TargetEntryInstr** otherwise_entry,
327 bool negate = false);
329 TargetEntryInstr** otherwise_entry,
330 bool negate = false);
332 TargetEntryInstr** otherwise_entry,
333 bool negate = false);
335 TargetEntryInstr** otherwise_entry);
336 Fragment Return(TokenPosition position);
338 intptr_t stack_depth,
339 intptr_t loop_depth);
342 classid_t dest_cid,
343 bool unboxed_inputs,
344 bool can_overlap = true);
345 Fragment TailCall(const Code& code);
347
348 intptr_t GetNextDeoptId() {
349 intptr_t deopt_id = thread_->compiler_state().GetNextDeoptId();
350 if (context_level_array_ != nullptr) {
351 intptr_t level = context_depth_;
352 context_level_array_->Add(deopt_id);
354 }
355 return deopt_id;
356 }
357
358 intptr_t AllocateTryIndex() { return next_used_try_index_++; }
359 intptr_t CurrentTryIndex() const { return current_try_index_; }
360 void SetCurrentTryIndex(intptr_t try_index) {
361 current_try_index_ = try_index;
362 }
363
365
366 bool IsInlining() const { return exit_collector_ != nullptr; }
367
368 void InlineBailout(const char* reason);
369
372 Fragment neq_branch,
373 intptr_t num_type_args);
375 Fragment present,
376 Fragment absent);
377 Fragment TestAnyTypeArgs(Fragment present, Fragment absent);
378
381
385 // Top of the stack should be the closure function.
387 bool has_instantiator_type_args,
388 bool is_generic,
389 bool is_tear_off);
395 Fragment InstantiateTypeArguments(const TypeArguments& type_arguments);
398
399 // Returns true if we are building a graph for inlining of a call site that
400 // enters the function through the unchecked entry.
402
403 // Returns depth of expression stack.
404 intptr_t GetStackDepth() const {
405 return stack_ == nullptr ? 0 : stack_->definition()->temp_index() + 1;
406 }
407
409 const Class& klass,
410 intptr_t argument_count);
411
413
414 // Loads 'receiver' and checks it for null. Throws NoSuchMethod if it is null.
415 // 'function_name' is a selector which is being called (reported in
416 // NoSuchMethod message).
417 // Note that this does _not_ use the result of the CheckNullInstr, so it does
418 // not create a data dependency and might break with code motion.
420 LocalVariable* receiver,
421 const String& function_name);
422
423 // Pops the top of the stack, checks it for null, and pushes the result on
424 // the stack to create a data dependency.
425 //
426 // Note that the result can currently only be used in optimized code, because
427 // optimized code uses FlowGraph::RemoveRedefinitions to remove the
428 // redefinitions, while unoptimized code does not.
430 const String& name,
431 CheckNullInstr::ExceptionType exception_type,
432 TokenPosition position = TokenPosition::kNoSource);
434 const String& function_name,
435 TokenPosition position = TokenPosition::kNoSource) {
437 position);
438 }
439
441
442 // Records extra unchecked entry point 'unchecked_entry' in 'graph_entry'.
444 FunctionEntryInstr* unchecked_entry);
445
446 // Pop the index of the current entry-point off the stack. If there is any
447 // entrypoint-tracing hook registered in a pragma for the function, it is
448 // called with the name of the current function and the current entry-point
449 // index.
451
452 // Builds closure call with given number of arguments. Target closure
453 // (in bare instructions mode) or closure function (otherwise) is taken from
454 // top of the stack.
455 // MoveArgument instructions should be already added for arguments.
456 Fragment ClosureCall(const Function& target_function,
457 TokenPosition position,
458 intptr_t type_args_len,
459 intptr_t argument_count,
460 const Array& argument_names,
461 const InferredTypeMetadata* result_type = nullptr);
462
463 // Pops function type arguments, instantiator type arguments, dst_type, and
464 // value; and type checks value against the type arguments.
466 TokenPosition position,
467 const String& dst_name,
468 AssertAssignableInstr::Kind kind = AssertAssignableInstr::kUnknown);
469
470 // Returns true if we're currently recording deopt_id -> context level
471 // mapping.
473 return context_level_array_ != nullptr;
474 }
475
476 // Sets current context level. It will be recorded for all subsequent
477 // deopt ids (until it is adjusted again).
478 void set_context_depth(intptr_t context_level) {
479 context_depth_ = context_level;
480 }
481
482 // Reset context level for the given deopt id (which was allocated earlier).
483 void reset_context_depth_for_deopt_id(intptr_t deopt_id);
484
485 // Sets raw parameter variables to inferred constant values.
487
489 intptr_t num_inputs);
490
491 // Pops double value and converts it to int as specified
492 // by the recognized method (kDoubleToInteger,
493 // kDoubleFloorToInt or kDoubleCeilToInt).
495
496 // Pops double value and applies unary math operation.
498
499 // Records coverage for this position, if the current VM mode supports it.
502
503 // Returns whether this function has a saved arguments descriptor array.
507
508 // Returns the saved arguments descriptor array for functions that have them.
513
514 protected:
515 intptr_t AllocateBlockId() { return ++last_used_block_id_; }
516 Fragment RecordCoverageImpl(TokenPosition position, bool is_branch_coverage);
517 intptr_t GetCoverageIndexFor(intptr_t encoded_position);
518
523 intptr_t osr_id_;
524 // Contains (deopt_id, context_level) pairs.
528
531
534
537
540
542
543 private:
545};
546
547} // namespace kernel
548} // namespace dart
549
550#endif // RUNTIME_VM_COMPILER_FRONTEND_BASE_FLOW_GRAPH_BUILDER_H_
int count
static float next(float f)
void Add(const T &value)
intptr_t GetNextDeoptId()
intptr_t temp_index() const
Definition il.h:2480
static constexpr intptr_t kNone
Definition deopt_id.h:27
bool HasSavedArgumentsDescriptor() const
Definition object.h:3253
CompilerState & compiler_state()
Definition thread.h:583
Definition * definition() const
Definition il.h:103
Fragment TestDelayedTypeArgs(LocalVariable *closure, Fragment present, Fragment absent)
Fragment LoadLocal(LocalVariable *variable)
Fragment StoreNativeField(TokenPosition position, const Slot &slot, InnerPointerAccess stores_inner_pointer, StoreFieldInstr::Kind kind=StoreFieldInstr::Kind::kOther, StoreBarrierType emit_store_barrier=kEmitStoreBarrier, compiler::Assembler::MemoryOrder memory_order=compiler::Assembler::kRelaxedNonAtomic)
Fragment ThrowException(TokenPosition position)
Fragment RecordCoverageImpl(TokenPosition position, bool is_branch_coverage)
void SetTempIndex(Definition *definition)
Fragment TestAnyTypeArgs(Fragment present, Fragment absent)
Fragment DebugStepCheck(TokenPosition position)
Fragment StoreNativeField(const Slot &slot, InnerPointerAccess stores_inner_pointer, StoreFieldInstr::Kind kind=StoreFieldInstr::Kind::kOther, StoreBarrierType emit_store_barrier=kEmitStoreBarrier, compiler::Assembler::MemoryOrder memory_order=compiler::Assembler::kRelaxedNonAtomic)
ZoneGrowableArray< intptr_t > * context_level_array_
Fragment CalculateElementAddress(intptr_t index_scale)
Fragment LoadFpRelativeSlot(intptr_t offset, CompileType result_type, Representation representation=kTagged)
Fragment InvokeMathCFunction(MethodRecognizer::Kind recognized_kind, intptr_t num_inputs)
void reset_context_depth_for_deopt_id(intptr_t deopt_id)
Fragment StoreField(const Field &field, StoreFieldInstr::Kind kind=StoreFieldInstr::Kind::kOther, StoreBarrierType emit_store_barrier=kEmitStoreBarrier)
Fragment GuardFieldLength(const Field &field, intptr_t deopt_id)
Fragment CheckNotDeeplyImmutable(CheckWritableInstr::Kind kind)
Fragment StoreLocalRaw(TokenPosition position, LocalVariable *variable)
Fragment AllocateTypedData(TokenPosition position, classid_t class_id)
Fragment AllocateRecord(TokenPosition position, RecordShape shape)
Fragment MemoryCopy(classid_t src_cid, classid_t dest_cid, bool unboxed_inputs, bool can_overlap=true)
Fragment InstantiateTypeArguments(const TypeArguments &type_arguments)
Fragment CheckNull(TokenPosition position, LocalVariable *receiver, const String &function_name)
Fragment StoreNativeField(TokenPosition position, const Slot &slot, StoreFieldInstr::Kind kind=StoreFieldInstr::Kind::kOther, StoreBarrierType emit_store_barrier=kEmitStoreBarrier, compiler::Assembler::MemoryOrder memory_order=compiler::Assembler::kRelaxedNonAtomic)
Fragment StoreStaticField(TokenPosition position, const Field &field)
Fragment AllocateSmallRecord(TokenPosition position, RecordShape shape)
Fragment InstantiateType(const AbstractType &type)
Fragment StoreIndexedTypedData(classid_t class_id, intptr_t index_scale, bool index_unboxed, AlignmentType alignment=kAlignedAccess)
Fragment RecordBranchCoverage(TokenPosition position)
Fragment AssertBool(TokenPosition position)
Fragment AssertAssignable(TokenPosition position, const String &dst_name, AssertAssignableInstr::Kind kind=AssertAssignableInstr::kUnknown)
Fragment StoreNativeField(const Slot &slot, StoreFieldInstr::Kind kind=StoreFieldInstr::Kind::kOther, StoreBarrierType emit_store_barrier=kEmitStoreBarrier, compiler::Assembler::MemoryOrder memory_order=compiler::Assembler::kRelaxedNonAtomic)
Fragment StoreLocal(LocalVariable *variable)
Fragment LoadField(const Field &field, bool calls_initializer)
Fragment DropTempsPreserveTop(intptr_t num_temps_to_drop)
Fragment ClosureCall(const Function &target_function, TokenPosition position, intptr_t type_args_len, intptr_t argument_count, const Array &argument_names, const InferredTypeMetadata *result_type=nullptr)
FunctionEntryInstr * BuildFunctionEntry(GraphEntryInstr *graph_entry)
Fragment GuardFieldClass(const Field &field, intptr_t deopt_id)
Fragment LoadNativeField(const Slot &native_field, InnerPointerAccess loads_inner_pointer, bool calls_initializer=false)
Fragment StoreFieldGuarded(const Field &field, StoreFieldInstr::Kind kind=StoreFieldInstr::Kind::kOther)
Fragment LoadStaticField(const Field &field, bool calls_initializer)
Fragment BranchIfTrue(TargetEntryInstr **then_entry, TargetEntryInstr **otherwise_entry, bool negate=false)
intptr_t GetCoverageIndexFor(intptr_t encoded_position)
Fragment BranchIfEqual(TargetEntryInstr **then_entry, TargetEntryInstr **otherwise_entry, bool negate=false)
Fragment UnboxedIntConstant(int64_t value, Representation representation)
Fragment RedefinitionWithType(const AbstractType &type)
Fragment LoadIndexed(classid_t class_id, intptr_t index_scale=compiler::target::kWordSize, bool index_unboxed=false, AlignmentType alignment=kAlignedAccess)
Fragment RecordCoverage(TokenPosition position)
IndirectEntryInstr * BuildIndirectEntry(intptr_t indirect_id, intptr_t try_index)
void RecordUncheckedEntryPoint(GraphEntryInstr *graph_entry, FunctionEntryInstr *unchecked_entry)
LocalVariable * MakeTemporary(const char *suffix=nullptr)
static const Field & MayCloneField(Zone *zone, const Field &field)
Fragment BinaryIntegerOp(Token::Kind op, Representation representation, bool is_truncating=false)
Fragment CheckStackOverflow(TokenPosition position, intptr_t stack_depth, intptr_t loop_depth)
Fragment TestTypeArgsLen(Fragment eq_branch, Fragment neq_branch, intptr_t num_type_args)
Fragment AllocateClosure(TokenPosition position, bool has_instantiator_type_args, bool is_generic, bool is_tear_off)
Fragment StrictCompare(TokenPosition position, Token::Kind kind, bool number_check=false)
BaseFlowGraphBuilder(const ParsedFunction *parsed_function, intptr_t last_used_block_id, intptr_t osr_id=DeoptId::kNone, ZoneGrowableArray< intptr_t > *context_level_array=nullptr, InlineExitCollector *exit_collector=nullptr, bool inlining_unchecked_entry=false)
Fragment AllocateObject(TokenPosition position, const Class &klass, intptr_t argument_count)
Fragment StoreIndexed(classid_t class_id)
Fragment CheckNullOptimized(const String &name, CheckNullInstr::ExceptionType exception_type, TokenPosition position=TokenPosition::kNoSource)
Fragment SmiBinaryOp(Token::Kind op, bool is_truncating=false)
Fragment CheckNullOptimized(const String &function_name, TokenPosition position=TokenPosition::kNoSource)
GrowableArray< intptr_t > coverage_array_positions_
Fragment DoubleToInteger(MethodRecognizer::Kind recognized_kind)
void set_context_depth(intptr_t context_level)
Fragment BranchIfNull(TargetEntryInstr **then_entry, TargetEntryInstr **otherwise_entry, bool negate=false)
Fragment DropTemporary(LocalVariable **temp)
Fragment CheckStackOverflowInPrologue(TokenPosition position)
Fragment Goto(JoinEntryInstr *destination)
Fragment AllocateContext(const ZoneGrowableArray< const Slot * > &scope)
Fragment BranchIfStrictEqual(TargetEntryInstr **then_entry, TargetEntryInstr **otherwise_entry)
Fragment & operator+=(const Fragment &other)
Fragment(Instruction *instruction)
void Prepend(Instruction *start)
Fragment & operator<<=(Instruction *next)
Fragment(Instruction *entry, Instruction *current)
void IfTrueGoto(BaseFlowGraphBuilder *builder, JoinEntryInstr *join)
BlockEntryInstr * CreateSuccessorFor(BaseFlowGraphBuilder *builder, const TestFragment::SuccessorAddressArray &branches)
SuccessorAddressArray * true_successor_addresses
SuccessorAddressArray * false_successor_addresses
ZoneGrowableArray< TargetEntryInstr ** > SuccessorAddressArray
BlockEntryInstr * CreateTrueSuccessor(BaseFlowGraphBuilder *builder)
BlockEntryInstr * CreateFalseSuccessor(BaseFlowGraphBuilder *builder)
TestFragment Negate(bool negate)
void ConnectBranchesTo(BaseFlowGraphBuilder *builder, const TestFragment::SuccessorAddressArray &branches, JoinEntryInstr *join)
TestFragment(Instruction *entry, SuccessorAddressArray *true_successor_addresses, SuccessorAddressArray *false_successor_addresses)
#define ASSERT(E)
Dart_NativeFunction function
Definition fuchsia.cc:51
int argument_count
Definition fuchsia.cc:52
Fragment operator+(const Fragment &first, const Fragment &second)
Fragment operator<<(const Fragment &fragment, Instruction *next)
const char *const name
InnerPointerAccess
Definition il.h:6246
int32_t classid_t
Definition globals.h:524
StoreBarrierType
Definition il.h:6252
@ kEmitStoreBarrier
Definition il.h:6252
Representation
Definition locations.h:66
const char *const function_name
static constexpr intptr_t kInvalidTryIndex
AlignmentType
Definition il.h:6720
@ kAlignedAccess
Definition il.h:6722
#define DISALLOW_COPY_AND_ASSIGN(TypeName)
Definition globals.h:581
Point offset