5#ifndef RUNTIME_VM_COMPILER_FRONTEND_BASE_FLOW_GRAPH_BUILDER_H_
6#define RUNTIME_VM_COMPILER_FRONTEND_BASE_FLOW_GRAPH_BUILDER_H_
8#if defined(DART_PRECOMPILED_RUNTIME)
9#error "AOT runtime should not use compiler sources (including header files)"
12#include <initializer_list>
20class InlineExitCollector;
24class BaseFlowGraphBuilder;
25struct InferredTypeMetadata;
59 DISALLOW_ALLOCATION();
62Fragment
operator+(
const Fragment& first,
const Fragment& second);
147 intptr_t last_used_block_id,
151 bool inlining_unchecked_entry =
false)
170 Array::ZoneHandle(parsed_function->
function().GetCoverageArray())) {
180 bool calls_initializer =
false);
182 bool calls_initializer =
false);
186 bool index_unboxed =
false,
201 return StoreLocal(TokenPosition::kNoSource, variable);
225 kind, emit_store_barrier, memory_order);
235 stores_inner_pointer, kind, emit_store_barrier,
246 emit_store_barrier, memory_order);
262 intptr_t index_scale,
309 bool number_check =
false);
320 bool is_truncating =
false);
327 bool negate =
false);
330 bool negate =
false);
333 bool negate =
false);
338 intptr_t stack_depth,
339 intptr_t loop_depth);
344 bool can_overlap =
true);
373 intptr_t num_type_args);
387 bool has_instantiator_type_args,
458 intptr_t type_args_len,
460 const Array& argument_names,
489 intptr_t num_inputs);
static float next(float f)
intptr_t GetNextDeoptId()
intptr_t temp_index() const
static constexpr intptr_t kNone
bool HasSavedArgumentsDescriptor() const
CompilerState & compiler_state()
Definition * definition() const
Fragment IntConstant(int64_t value)
IntMap< intptr_t > coverage_state_index_for_position_
Fragment SmiRelationalOp(Token::Kind kind)
Fragment TestDelayedTypeArgs(LocalVariable *closure, Fragment present, Fragment absent)
Fragment LoadLocal(LocalVariable *variable)
Fragment StoreNativeField(TokenPosition position, const Slot &slot, InnerPointerAccess stores_inner_pointer, StoreFieldInstr::Kind kind=StoreFieldInstr::Kind::kOther, StoreBarrierType emit_store_barrier=kEmitStoreBarrier, compiler::Assembler::MemoryOrder memory_order=compiler::Assembler::kRelaxedNonAtomic)
Fragment ThrowException(TokenPosition position)
Fragment RecordCoverageImpl(TokenPosition position, bool is_branch_coverage)
Fragment GenericCheckBound()
Definition * Peek(intptr_t depth=0)
void SetTempIndex(Definition *definition)
intptr_t current_try_index_
Fragment InitConstantParameters()
Fragment TestAnyTypeArgs(Fragment present, Fragment absent)
const Array & saved_args_desc_array_
Fragment ConvertUnboxedToUntagged()
Fragment LoadContextAt(int depth)
Fragment DebugStepCheck(TokenPosition position)
Fragment StoreNativeField(const Slot &slot, InnerPointerAccess stores_inner_pointer, StoreFieldInstr::Kind kind=StoreFieldInstr::Kind::kOther, StoreBarrierType emit_store_barrier=kEmitStoreBarrier, compiler::Assembler::MemoryOrder memory_order=compiler::Assembler::kRelaxedNonAtomic)
const Array & saved_args_desc_array()
ZoneGrowableArray< intptr_t > * context_level_array_
Fragment CalculateElementAddress(intptr_t index_scale)
intptr_t last_used_block_id_
JoinEntryInstr * BuildThrowNoSuchMethod()
InputsArray GetArguments(int count)
Fragment LoadFpRelativeSlot(intptr_t offset, CompileType result_type, Representation representation=kTagged)
Fragment InvokeMathCFunction(MethodRecognizer::Kind recognized_kind, intptr_t num_inputs)
Fragment LoadArgDescriptor()
void reset_context_depth_for_deopt_id(intptr_t deopt_id)
Fragment StoreField(const Field &field, StoreFieldInstr::Kind kind=StoreFieldInstr::Kind::kOther, StoreBarrierType emit_store_barrier=kEmitStoreBarrier)
JoinEntryInstr * BuildJoinEntry()
Fragment GuardFieldLength(const Field &field, intptr_t deopt_id)
Fragment CheckNotDeeplyImmutable(CheckWritableInstr::Kind kind)
Fragment StoreLocalRaw(TokenPosition position, LocalVariable *variable)
Fragment AllocateTypedData(TokenPosition position, classid_t class_id)
Fragment AllocateRecord(TokenPosition position, RecordShape shape)
intptr_t next_used_try_index_
Fragment StoreFpRelativeSlot(intptr_t offset)
Fragment MemoryCopy(classid_t src_cid, classid_t dest_cid, bool unboxed_inputs, bool can_overlap=true)
bool is_recording_context_levels() const
Fragment InstantiateTypeArguments(const TypeArguments &type_arguments)
Fragment CheckNull(TokenPosition position, LocalVariable *receiver, const String &function_name)
intptr_t AllocateBlockId()
Fragment StoreNativeField(TokenPosition position, const Slot &slot, StoreFieldInstr::Kind kind=StoreFieldInstr::Kind::kOther, StoreBarrierType emit_store_barrier=kEmitStoreBarrier, compiler::Assembler::MemoryOrder memory_order=compiler::Assembler::kRelaxedNonAtomic)
Fragment StoreStaticField(TokenPosition position, const Field &field)
void InlineBailout(const char *reason)
Fragment AllocateSmallRecord(TokenPosition position, RecordShape shape)
Fragment InstantiateType(const AbstractType &type)
Fragment StoreIndexedTypedData(classid_t class_id, intptr_t index_scale, bool index_unboxed, AlignmentType alignment=kAlignedAccess)
void FinalizeCoverageArray()
Fragment LoadUntagged(intptr_t offset)
Fragment RecordBranchCoverage(TokenPosition position)
Fragment TailCall(const Code &code)
Fragment AssertBool(TokenPosition position)
Fragment InstantiateDynamicTypeArguments()
Fragment BuildEntryPointsIntrospection()
Fragment AssertAssignable(TokenPosition position, const String &dst_name, AssertAssignableInstr::Kind kind=AssertAssignableInstr::kUnknown)
Fragment StoreNativeField(const Slot &slot, StoreFieldInstr::Kind kind=StoreFieldInstr::Kind::kOther, StoreBarrierType emit_store_barrier=kEmitStoreBarrier, compiler::Assembler::MemoryOrder memory_order=compiler::Assembler::kRelaxedNonAtomic)
Fragment StoreLocal(LocalVariable *variable)
Fragment LoadField(const Field &field, bool calls_initializer)
Fragment DropTempsPreserveTop(intptr_t num_temps_to_drop)
void SetCurrentTryIndex(intptr_t try_index)
Fragment ClosureCall(const Function &target_function, TokenPosition position, intptr_t type_args_len, intptr_t argument_count, const Array &argument_names, const InferredTypeMetadata *result_type=nullptr)
FunctionEntryInstr * BuildFunctionEntry(GraphEntryInstr *graph_entry)
intptr_t AllocateTryIndex()
Fragment GuardFieldClass(const Field &field, intptr_t deopt_id)
Fragment LoadNativeField(const Slot &native_field, InnerPointerAccess loads_inner_pointer, bool calls_initializer=false)
Fragment StoreFieldGuarded(const Field &field, StoreFieldInstr::Kind kind=StoreFieldInstr::Kind::kOther)
InlineExitCollector * exit_collector_
Fragment LoadStaticField(const Field &field, bool calls_initializer)
Fragment BranchIfTrue(TargetEntryInstr **then_entry, TargetEntryInstr **otherwise_entry, bool negate=false)
intptr_t GetCoverageIndexFor(intptr_t encoded_position)
Fragment BranchIfEqual(TargetEntryInstr **then_entry, TargetEntryInstr **otherwise_entry, bool negate=false)
Fragment UnboxedIntConstant(int64_t value, Representation representation)
Fragment RedefinitionWithType(const AbstractType &type)
Fragment LoadIndexed(classid_t class_id, intptr_t index_scale=compiler::target::kWordSize, bool index_unboxed=false, AlignmentType alignment=kAlignedAccess)
Fragment RecordCoverage(TokenPosition position)
Fragment Return(TokenPosition position)
bool InliningUncheckedEntry() const
TargetEntryInstr * BuildTargetEntry()
bool has_saved_args_desc_array()
const bool inlining_unchecked_entry_
Fragment Box(Representation from)
IndirectEntryInstr * BuildIndirectEntry(intptr_t indirect_id, intptr_t try_index)
intptr_t GetStackDepth() const
void RecordUncheckedEntryPoint(GraphEntryInstr *graph_entry, FunctionEntryInstr *unchecked_entry)
LocalVariable * MakeTemporary(const char *suffix=nullptr)
static const Field & MayCloneField(Zone *zone, const Field &field)
Fragment BinaryIntegerOp(Token::Kind op, Representation representation, bool is_truncating=false)
Fragment CheckStackOverflow(TokenPosition position, intptr_t stack_depth, intptr_t loop_depth)
Fragment TestTypeArgsLen(Fragment eq_branch, Fragment neq_branch, intptr_t num_type_args)
intptr_t GetNextDeoptId()
Fragment AllocateClosure(TokenPosition position, bool has_instantiator_type_args, bool is_generic, bool is_tear_off)
Fragment UnaryDoubleOp(Token::Kind op)
const Function & function_
const Array & coverage_array() const
Fragment StrictCompare(TokenPosition position, Token::Kind kind, bool number_check=false)
BaseFlowGraphBuilder(const ParsedFunction *parsed_function, intptr_t last_used_block_id, intptr_t osr_id=DeoptId::kNone, ZoneGrowableArray< intptr_t > *context_level_array=nullptr, InlineExitCollector *exit_collector=nullptr, bool inlining_unchecked_entry=false)
Fragment AllocateObject(TokenPosition position, const Class &klass, intptr_t argument_count)
Fragment Constant(const Object &value)
Fragment StoreIndexed(classid_t class_id)
const ParsedFunction * parsed_function_
Fragment CheckNullOptimized(const String &name, CheckNullInstr::ExceptionType exception_type, TokenPosition position=TokenPosition::kNoSource)
void Push(Definition *definition)
Fragment SmiBinaryOp(Token::Kind op, bool is_truncating=false)
Fragment CheckNullOptimized(const String &function_name, TokenPosition position=TokenPosition::kNoSource)
intptr_t CurrentTryIndex() const
Fragment DoubleToInteger(MethodRecognizer::Kind recognized_kind)
void set_context_depth(intptr_t context_level)
Fragment BranchIfNull(TargetEntryInstr **then_entry, TargetEntryInstr **otherwise_entry, bool negate=false)
Fragment ConvertUntaggedToUnboxed()
Fragment DropTemporary(LocalVariable **temp)
Fragment CheckStackOverflowInPrologue(TokenPosition position)
Fragment ReachabilityFence()
Fragment Goto(JoinEntryInstr *destination)
Fragment AllocateContext(const ZoneGrowableArray< const Slot * > &scope)
Fragment BranchIfStrictEqual(TargetEntryInstr **then_entry, TargetEntryInstr **otherwise_entry)
void Prepend(Instruction *start)
Fragment & operator<<=(Instruction *next)
Fragment(Instruction *instruction)
Fragment & operator+=(const Fragment &other)
Fragment(Instruction *entry, Instruction *current)
void IfTrueGoto(BaseFlowGraphBuilder *builder, JoinEntryInstr *join)
BlockEntryInstr * CreateSuccessorFor(BaseFlowGraphBuilder *builder, const TestFragment::SuccessorAddressArray &branches)
SuccessorAddressArray * true_successor_addresses
SuccessorAddressArray * false_successor_addresses
ZoneGrowableArray< TargetEntryInstr ** > SuccessorAddressArray
BlockEntryInstr * CreateTrueSuccessor(BaseFlowGraphBuilder *builder)
BlockEntryInstr * CreateFalseSuccessor(BaseFlowGraphBuilder *builder)
TestFragment Negate(bool negate)
void ConnectBranchesTo(BaseFlowGraphBuilder *builder, const TestFragment::SuccessorAddressArray &branches, JoinEntryInstr *join)
TestFragment(Instruction *entry, SuccessorAddressArray *true_successor_addresses, SuccessorAddressArray *false_successor_addresses)
Dart_NativeFunction function
static constexpr intptr_t kWordSize
Fragment operator+(const Fragment &first, const Fragment &second)
Fragment operator<<(const Fragment &fragment, Instruction *next)
const char *const function_name
static constexpr intptr_t kInvalidTryIndex
std::function< void()> closure
static SkString join(const CommandLineFlags::StringArray &)