Flutter Engine
The Flutter Engine
Classes | Public Member Functions | Static Public Member Functions | Static Public Attributes | Friends | List of all members
dart::FlowGraphCompiler Class Reference

#include <flow_graph_compiler.h>

Inheritance diagram for dart::FlowGraphCompiler:
dart::ValueObject

Public Member Functions

 FlowGraphCompiler (compiler::Assembler *assembler, FlowGraph *flow_graph, const ParsedFunction &parsed_function, bool is_optimizing, SpeculativeInliningPolicy *speculative_policy, const GrowableArray< const Function * > &inline_id_to_function, const GrowableArray< TokenPosition > &inline_id_to_token_pos, const GrowableArray< intptr_t > &caller_inline_id, ZoneGrowableArray< const ICData * > *deopt_id_to_ic_data, CodeStatistics *stats=nullptr)
 
void ArchSpecificInitialization ()
 
 ~FlowGraphCompiler ()
 
compiler::Assemblerassembler () const
 
const ParsedFunctionparsed_function () const
 
const Functionfunction () const
 
const GrowableArray< BlockEntryInstr * > & block_order () const
 
const GrowableArray< const compiler::TableSelector * > & dispatch_table_call_targets () const
 
bool ForcedOptimization () const
 
const FlowGraphflow_graph () const
 
BlockEntryInstrcurrent_block () const
 
void set_current_block (BlockEntryInstr *value)
 
Instructioncurrent_instruction () const
 
bool CanOptimize () const
 
bool CanOptimizeFunction () const
 
bool CanOSRFunction () const
 
bool is_optimizing () const
 
void InsertBSSRelocation (BSS::Relocation reloc)
 
void LoadBSSEntry (BSS::Relocation relocation, Register dst, Register tmp)
 
bool skip_body_compilation () const
 
void EnterIntrinsicMode ()
 
void ExitIntrinsicMode ()
 
bool intrinsic_mode () const
 
void set_intrinsic_flow_graph (const FlowGraph &flow_graph)
 
void set_intrinsic_slow_path_label (compiler::Label *label)
 
compiler::Labelintrinsic_slow_path_label () const
 
bool ForceSlowPathForStackOverflow () const
 
const GrowableArray< BlockInfo * > & block_info () const
 
void StatsBegin (Instruction *instr)
 
void StatsEnd (Instruction *instr)
 
void SpecialStatsBegin (intptr_t tag)
 
void SpecialStatsEnd (intptr_t tag)
 
GrowableArray< const Field * > & used_static_fields ()
 
void InitCompiler ()
 
void CompileGraph ()
 
void EmitPrologue ()
 
void VisitBlocks ()
 
void EmitFunctionEntrySourcePositionDescriptorIfNeeded ()
 
void Bailout (const char *reason)
 
bool TryIntrinsify ()
 
void EmitMove (Location dst, Location src, TemporaryRegisterAllocator *temp)
 
void EmitNativeMove (const compiler::ffi::NativeLocation &dst, const compiler::ffi::NativeLocation &src, TemporaryRegisterAllocator *temp)
 
void EmitMoveToNative (const compiler::ffi::NativeLocation &dst, Location src_loc, Representation src_type, TemporaryRegisterAllocator *temp)
 
void EmitMoveFromNative (Location dst_loc, Representation dst_type, const compiler::ffi::NativeLocation &src, TemporaryRegisterAllocator *temp)
 
void EmitMoveConst (const compiler::ffi::NativeLocation &dst, Location src, Representation src_type, TemporaryRegisterAllocator *temp)
 
bool CheckAssertAssignableTypeTestingABILocations (const LocationSummary &locs)
 
void GenerateAssertAssignable (CompileType *receiver_type, const InstructionSource &source, intptr_t deopt_id, Environment *env, const String &dst_name, LocationSummary *locs)
 
void GenerateCallerChecksForAssertAssignable (CompileType *receiver_type, const AbstractType &dst_type, compiler::Label *done)
 
void GenerateTTSCall (const InstructionSource &source, intptr_t deopt_id, Environment *env, Register reg_with_type, const AbstractType &dst_type, const String &dst_name, LocationSummary *locs)
 
void GenerateStubCall (const InstructionSource &source, const Code &stub, UntaggedPcDescriptors::Kind kind, LocationSummary *locs, intptr_t deopt_id, Environment *env)
 
void GenerateNonLazyDeoptableStubCall (const InstructionSource &source, const Code &stub, UntaggedPcDescriptors::Kind kind, LocationSummary *locs, ObjectPool::SnapshotBehavior snapshot_behavior=compiler::ObjectPoolBuilderEntry::kSnapshotable)
 
void GeneratePatchableCall (const InstructionSource &source, const Code &stub, UntaggedPcDescriptors::Kind kind, LocationSummary *locs, ObjectPool::SnapshotBehavior snapshot_behavior=compiler::ObjectPoolBuilderEntry::kSnapshotable)
 
void GenerateDartCall (intptr_t deopt_id, const InstructionSource &source, const Code &stub, UntaggedPcDescriptors::Kind kind, LocationSummary *locs, Code::EntryKind entry_kind=Code::EntryKind::kNormal)
 
void GenerateStaticDartCall (intptr_t deopt_id, const InstructionSource &source, UntaggedPcDescriptors::Kind kind, LocationSummary *locs, const Function &target, Code::EntryKind entry_kind=Code::EntryKind::kNormal)
 
void GenerateInstanceOf (const InstructionSource &source, intptr_t deopt_id, Environment *env, const AbstractType &type, LocationSummary *locs)
 
void GenerateInstanceCall (intptr_t deopt_id, const InstructionSource &source, LocationSummary *locs, const ICData &ic_data, Code::EntryKind entry_kind, bool receiver_can_be_smi)
 
void GenerateStaticCall (intptr_t deopt_id, const InstructionSource &source, const Function &function, ArgumentsInfo args_info, LocationSummary *locs, const ICData &ic_data_in, ICData::RebindRule rebind_rule, Code::EntryKind entry_kind=Code::EntryKind::kNormal)
 
void GenerateNumberTypeCheck (Register kClassIdReg, const AbstractType &type, compiler::Label *is_instance_lbl, compiler::Label *is_not_instance_lbl)
 
void GenerateStringTypeCheck (Register kClassIdReg, compiler::Label *is_instance_lbl, compiler::Label *is_not_instance_lbl)
 
void GenerateListTypeCheck (Register kClassIdReg, compiler::Label *is_instance_lbl)
 
bool GenerateSubtypeRangeCheck (Register class_id_reg, const Class &type_class, compiler::Label *is_subtype_lbl)
 
void EmitOptimizedInstanceCall (const Code &stub, const ICData &ic_data, intptr_t deopt_id, const InstructionSource &source, LocationSummary *locs, Code::EntryKind entry_kind=Code::EntryKind::kNormal)
 
void EmitInstanceCallJIT (const Code &stub, const ICData &ic_data, intptr_t deopt_id, const InstructionSource &source, LocationSummary *locs, Code::EntryKind entry_kind)
 
void EmitPolymorphicInstanceCall (const PolymorphicInstanceCallInstr *call, const CallTargets &targets, ArgumentsInfo args_info, intptr_t deopt_id, const InstructionSource &source, LocationSummary *locs, bool complete, intptr_t total_call_count, bool receiver_can_be_smi=true)
 
void EmitMegamorphicInstanceCall (const ICData &icdata, intptr_t deopt_id, const InstructionSource &source, LocationSummary *locs)
 
void EmitMegamorphicInstanceCall (const String &function_name, const Array &arguments_descriptor, intptr_t deopt_id, const InstructionSource &source, LocationSummary *locs)
 
void EmitInstanceCallAOT (const ICData &ic_data, intptr_t deopt_id, const InstructionSource &source, LocationSummary *locs, Code::EntryKind entry_kind=Code::EntryKind::kNormal, bool receiver_can_be_smi=true)
 
void EmitTestAndCall (const CallTargets &targets, const String &function_name, ArgumentsInfo args_info, compiler::Label *failed, compiler::Label *match_found, intptr_t deopt_id, const InstructionSource &source_index, LocationSummary *locs, bool complete, intptr_t total_ic_calls, Code::EntryKind entry_kind=Code::EntryKind::kNormal)
 
void EmitDispatchTableCall (int32_t selector_offset, const Array &arguments_descriptor)
 
Condition EmitEqualityRegConstCompare (Register reg, const Object &obj, bool needs_number_check, const InstructionSource &source, intptr_t deopt_id)
 
Condition EmitEqualityRegRegCompare (Register left, Register right, bool needs_number_check, const InstructionSource &source, intptr_t deopt_id)
 
Condition EmitBoolTest (Register value, BranchLabels labels, bool invert)
 
bool NeedsEdgeCounter (BlockEntryInstr *block)
 
void EmitEdgeCounter (intptr_t edge_id)
 
void RecordCatchEntryMoves (Environment *env)
 
void EmitCallToStub (const Code &stub, ObjectPool::SnapshotBehavior snapshot_behavior=compiler::ObjectPoolBuilderEntry::kSnapshotable)
 
void EmitJumpToStub (const Code &stub)
 
void EmitTailCallToStub (const Code &stub)
 
void EmitDropArguments (intptr_t count)
 
void EmitCallsiteMetadata (const InstructionSource &source, intptr_t deopt_id, UntaggedPcDescriptors::Kind kind, LocationSummary *locs, Environment *env)
 
void EmitYieldPositionMetadata (const InstructionSource &source, intptr_t yield_index)
 
void EmitComment (Instruction *instr)
 
intptr_t StackSize () const
 
intptr_t ExtraStackSlotsOnOsrEntry () const
 
compiler::LabelGetJumpLabel (BlockEntryInstr *block_entry) const
 
bool WasCompacted (BlockEntryInstr *block_entry) const
 
compiler::LabelNextNonEmptyLabel () const
 
bool CanFallThroughTo (BlockEntryInstr *block_entry) const
 
BranchLabels CreateBranchLabels (BranchInstr *branch) const
 
void AddExceptionHandler (CatchBlockEntryInstr *entry)
 
void SetNeedsStackTrace (intptr_t try_index)
 
void AddCurrentDescriptor (UntaggedPcDescriptors::Kind kind, intptr_t deopt_id, const InstructionSource &source)
 
void AddDescriptor (UntaggedPcDescriptors::Kind kind, intptr_t pc_offset, intptr_t deopt_id, const InstructionSource &source, intptr_t try_index, intptr_t yield_index=UntaggedPcDescriptors::kInvalidYieldIndex)
 
void AddNullCheck (const InstructionSource &source, const String &name)
 
void RecordSafepoint (LocationSummary *locs, intptr_t slow_path_argument_count=0)
 
compiler::LabelAddDeoptStub (intptr_t deopt_id, ICData::DeoptReasonId reason, uint32_t flags=0)
 
CompilerDeoptInfoAddDeoptIndexAtCall (intptr_t deopt_id, Environment *env)
 
CompilerDeoptInfoAddSlowPathDeoptInfo (intptr_t deopt_id, Environment *env)
 
void AddSlowPathCode (SlowPathCode *slow_path)
 
void FinalizeExceptionHandlers (const Code &code)
 
void FinalizePcDescriptors (const Code &code)
 
ArrayPtr CreateDeoptInfo (compiler::Assembler *assembler)
 
void FinalizeStackMaps (const Code &code)
 
void FinalizeVarDescriptors (const Code &code)
 
void FinalizeCatchEntryMovesMap (const Code &code)
 
void FinalizeStaticCallTargetsTable (const Code &code)
 
void FinalizeCodeSourceMap (const Code &code)
 
const Classdouble_class () const
 
const Classmint_class () const
 
const Classfloat32x4_class () const
 
const Classfloat64x2_class () const
 
const Classint32x4_class () const
 
const ClassBoxClassFor (Representation rep)
 
void SaveLiveRegisters (LocationSummary *locs)
 
void RestoreLiveRegisters (LocationSummary *locs)
 
EnvironmentSlowPathEnvironmentFor (Instruction *inst, intptr_t num_slow_path_args)
 
EnvironmentSlowPathEnvironmentFor (Environment *env, LocationSummary *locs, intptr_t num_slow_path_args)
 
intptr_t CurrentTryIndex () const
 
bool may_reoptimize () const
 
const ICDataGetOrAddInstanceCallICData (intptr_t deopt_id, const String &target_name, const Array &arguments_descriptor, intptr_t num_args_tested, const AbstractType &receiver_type, const Function &binary_smi_target)
 
const ICDataGetOrAddStaticCallICData (intptr_t deopt_id, const Function &target, const Array &arguments_descriptor, intptr_t num_args_tested, ICData::RebindRule rebind_rule)
 
const ZoneGrowableArray< const ICData * > & deopt_id_to_ic_data () const
 
Threadthread () const
 
IsolateGroupisolate_group () const
 
Zonezone () const
 
void AddStubCallTarget (const Code &code)
 
void AddDispatchTableCallTarget (const compiler::TableSelector *selector)
 
ArrayPtr edge_counters_array () const
 
ArrayPtr InliningIdToFunction () const
 
void BeginCodeSourceRange (const InstructionSource &source)
 
void EndCodeSourceRange (const InstructionSource &source)
 
bool IsEmptyBlock (BlockEntryInstr *block) const
 
void EmitOptimizedStaticCall (const Function &function, const Array &arguments_descriptor, intptr_t size_with_type_args, intptr_t deopt_id, const InstructionSource &source, LocationSummary *locs, Code::EntryKind entry_kind=Code::EntryKind::kNormal)
 
- Public Member Functions inherited from dart::ValueObject
 ValueObject ()
 
 ~ValueObject ()
 

Static Public Member Functions

static bool SupportsUnboxedSimd128 ()
 
static bool CanConvertInt64ToDouble ()
 
static void GenerateIndirectTTSCall (compiler::Assembler *assembler, Register reg_with_type, intptr_t sub_type_cache_index)
 
static bool GenerateCidRangesCheck (compiler::Assembler *assembler, Register class_id_reg, const CidRangeVector &cid_ranges, compiler::Label *inside_range_lbl, compiler::Label *outside_range_lbl=nullptr, bool fall_through_if_inside=false)
 
static const CallTargetsResolveCallTargetsForReceiverCid (intptr_t cid, const String &selector, const Array &args_desc_array)
 
static bool LookupMethodFor (int class_id, const String &name, const ArgumentsDescriptor &args_desc, Function *fn_return, bool *class_is_abstract_return=nullptr)
 
static int EmitTestAndCallCheckCid (compiler::Assembler *assembler, compiler::Label *label, Register class_id_reg, const CidRangeValue &range, int bias, bool jump_on_miss=true)
 

Static Public Attributes

static constexpr intptr_t kMaxNumberOfCidRangesToTest = 4
 

Friends

class BoxInt64Instr
 
class CheckNullInstr
 
class NullErrorSlowPath
 
class CheckStackOverflowInstr
 
class StoreIndexedInstr
 
class StoreFieldInstr
 
class CheckStackOverflowSlowPath
 
class GraphIntrinsicCodeGenScope
 

Detailed Description

Definition at line 338 of file flow_graph_compiler.h.

Constructor & Destructor Documentation

◆ FlowGraphCompiler()

dart::FlowGraphCompiler::FlowGraphCompiler ( compiler::Assembler assembler,
FlowGraph flow_graph,
const ParsedFunction parsed_function,
bool  is_optimizing,
SpeculativeInliningPolicy speculative_policy,
const GrowableArray< const Function * > &  inline_id_to_function,
const GrowableArray< TokenPosition > &  inline_id_to_token_pos,
const GrowableArray< intptr_t > &  caller_inline_id,
ZoneGrowableArray< const ICData * > *  deopt_id_to_ic_data,
CodeStatistics stats = nullptr 
)

Definition at line 135 of file flow_graph_compiler.cc.

146 : thread_(Thread::Current()),
147 zone_(Thread::Current()->zone()),
148 assembler_(assembler),
149 parsed_function_(parsed_function),
150 flow_graph_(*flow_graph),
151 block_order_(*flow_graph->CodegenBlockOrder()),
152 current_block_(nullptr),
153 exception_handlers_list_(nullptr),
154 pc_descriptors_list_(nullptr),
155 compressed_stackmaps_builder_(nullptr),
156 code_source_map_builder_(nullptr),
157 catch_entry_moves_maps_builder_(nullptr),
158 block_info_(block_order_.length()),
159 deopt_infos_(),
160 static_calls_target_table_(),
161 indirect_gotos_(),
162 is_optimizing_(is_optimizing),
163 speculative_policy_(speculative_policy),
164 may_reoptimize_(false),
165 intrinsic_mode_(false),
166 stats_(stats),
167 double_class_(
168 Class::ZoneHandle(isolate_group()->object_store()->double_class())),
169 mint_class_(
170 Class::ZoneHandle(isolate_group()->object_store()->mint_class())),
171 float32x4_class_(Class::ZoneHandle(
172 isolate_group()->object_store()->float32x4_class())),
173 float64x2_class_(Class::ZoneHandle(
174 isolate_group()->object_store()->float64x2_class())),
175 int32x4_class_(
176 Class::ZoneHandle(isolate_group()->object_store()->int32x4_class())),
178 .LookupClass(Symbols::List()))),
179 pending_deoptimization_env_(nullptr),
180 deopt_id_to_ic_data_(deopt_id_to_ic_data),
181 edge_counters_array_(Array::ZoneHandle()) {
184 if (is_optimizing) {
185 // No need to collect extra ICData objects created during compilation.
186 deopt_id_to_ic_data_ = nullptr;
187 } else {
188 const intptr_t len = thread()->compiler_state().deopt_id();
189 deopt_id_to_ic_data_->EnsureLength(len, nullptr);
190 }
191 ASSERT(assembler != nullptr);
192 ASSERT(!list_class_.IsNull());
193
194#if defined(PRODUCT)
195 const bool stack_traces_only = true;
196#else
197 const bool stack_traces_only = false;
198#endif
199 // Make sure that the function is at the position for inline_id 0.
200 ASSERT(inline_id_to_function.length() >= 1);
201 ASSERT(inline_id_to_function[0]->ptr() ==
203 code_source_map_builder_ = new (zone_)
204 CodeSourceMapBuilder(zone_, stack_traces_only, caller_inline_id,
205 inline_id_to_token_pos, inline_id_to_function);
206
208}
SkIDChangeListener::List List
intptr_t deopt_id() const
IsolateGroup * isolate_group() const
const Class & float64x2_class() const
const Class & double_class() const
const FlowGraph & flow_graph() const
const ParsedFunction & parsed_function() const
const Class & float32x4_class() const
const Class & mint_class() const
const Class & int32x4_class() const
const ZoneGrowableArray< const ICData * > & deopt_id_to_ic_data() const
compiler::Assembler * assembler() const
const ParsedFunction & parsed_function() const
Definition: flow_graph.h:129
GrowableArray< BlockEntryInstr * > * CodegenBlockOrder()
Definition: flow_graph.cc:170
static LibraryPtr CoreLibrary()
Definition: object.cc:14787
ObjectPtr ptr() const
Definition: object.h:332
bool IsNull() const
Definition: object.h:363
static Object & Handle()
Definition: object.h:407
static Object & ZoneHandle()
Definition: object.h:419
const Function & function() const
Definition: parser.h:73
static Thread * Current()
Definition: thread.h:362
CompilerState & compiler_state()
Definition: thread.h:588
#define ASSERT(E)
dictionary stats
Definition: malisc.py:20

◆ ~FlowGraphCompiler()

dart::FlowGraphCompiler::~FlowGraphCompiler ( )

Member Function Documentation

◆ AddCurrentDescriptor()

void dart::FlowGraphCompiler::AddCurrentDescriptor ( UntaggedPcDescriptors::Kind  kind,
intptr_t  deopt_id,
const InstructionSource source 
)

Definition at line 934 of file flow_graph_compiler.cc.

936 {
937 AddDescriptor(kind, assembler()->CodeSize(), deopt_id, source,
939}
intptr_t CurrentTryIndex() const
void AddDescriptor(UntaggedPcDescriptors::Kind kind, intptr_t pc_offset, intptr_t deopt_id, const InstructionSource &source, intptr_t try_index, intptr_t yield_index=UntaggedPcDescriptors::kInvalidYieldIndex)
SkBitmap source
Definition: examples.cpp:28

◆ AddDeoptIndexAtCall()

CompilerDeoptInfo * dart::FlowGraphCompiler::AddDeoptIndexAtCall ( intptr_t  deopt_id,
Environment env 
)

Definition at line 1015 of file flow_graph_compiler.cc.

1016 {
1019 ASSERT(!FLAG_precompiled_mode);
1020 if (env != nullptr) {
1021 env = env->GetLazyDeoptEnv(zone());
1022 }
1023 CompilerDeoptInfo* info =
1024 new (zone()) CompilerDeoptInfo(deopt_id, ICData::kDeoptAtCall,
1025 0, // No flags.
1026 env);
1027 info->set_pc_offset(assembler()->CodeSize());
1028 deopt_infos_.Add(info);
1029 return info;
1030}
static void info(const char *fmt,...) SK_PRINTF_LIKE(1
Definition: DM.cpp:213
Definition: __init__.py:1

◆ AddDeoptStub()

compiler::Label * dart::FlowGraphCompiler::AddDeoptStub ( intptr_t  deopt_id,
ICData::DeoptReasonId  reason,
uint32_t  flags = 0 
)

Definition at line 1248 of file flow_graph_compiler.cc.

1250 {
1251 if (intrinsic_mode()) {
1252 return intrinsic_slow_path_label_;
1253 }
1254
1255 // No deoptimization allowed when 'FLAG_precompiled_mode' is set.
1256 if (FLAG_precompiled_mode) {
1257 if (FLAG_trace_compiler) {
1258 THR_Print(
1259 "Retrying compilation %s, suppressing inlining of deopt_id:%" Pd "\n",
1260 parsed_function_.function().ToFullyQualifiedCString(), deopt_id);
1261 }
1262 ASSERT(speculative_policy_->AllowsSpeculativeInlining());
1263 ASSERT(deopt_id != 0); // longjmp must return non-zero value.
1265 deopt_id, Object::speculative_inlining_error());
1266 }
1267
1268 ASSERT(is_optimizing_);
1269 ASSERT(pending_deoptimization_env_ != nullptr);
1270 if (pending_deoptimization_env_->IsHoisted()) {
1272 }
1273 CompilerDeoptInfoWithStub* stub = new (zone()) CompilerDeoptInfoWithStub(
1274 deopt_id, reason, flags, pending_deoptimization_env_);
1275 deopt_infos_.Add(stub);
1276 return stub->entry_label();
1277}
bool IsHoisted() const
Definition: il.h:11686
const char * ToFullyQualifiedCString() const
Definition: object.cc:9762
DART_NORETURN void Jump(int value, const Error &error)
Definition: longjump.cc:22
bool AllowsSpeculativeInlining() const
Definition: inliner.h:39
LongJumpScope * long_jump_base() const
Definition: thread_state.h:47
#define THR_Print(format,...)
Definition: log.h:20
FlutterSemanticsFlag flags
#define Pd
Definition: globals.h:408

◆ AddDescriptor()

void dart::FlowGraphCompiler::AddDescriptor ( UntaggedPcDescriptors::Kind  kind,
intptr_t  pc_offset,
intptr_t  deopt_id,
const InstructionSource source,
intptr_t  try_index,
intptr_t  yield_index = UntaggedPcDescriptors::kInvalidYieldIndex 
)

Definition at line 917 of file flow_graph_compiler.cc.

922 {
923 code_source_map_builder_->NoteDescriptor(kind, pc_offset, source);
924 // Don't emit deopt-descriptors in AOT mode.
925 if (FLAG_precompiled_mode && (kind == UntaggedPcDescriptors::kDeopt)) return;
926 // Use the token position of the original call in the root function if source
927 // has an inlining id.
928 const auto& root_pos = code_source_map_builder_->RootPosition(source);
929 pc_descriptors_list_->AddDescriptor(kind, pc_offset, deopt_id, root_pos,
930 try_index, yield_index);
931}
TokenPosition RootPosition(const InstructionSource &source)
void NoteDescriptor(UntaggedPcDescriptors::Kind kind, int32_t pc_offset, const InstructionSource &source)
void AddDescriptor(UntaggedPcDescriptors::Kind kind, intptr_t pc_offset, intptr_t deopt_id, TokenPosition token_pos, intptr_t try_index, intptr_t yield_index)

◆ AddDispatchTableCallTarget()

void dart::FlowGraphCompiler::AddDispatchTableCallTarget ( const compiler::TableSelector selector)

Definition at line 1010 of file flow_graph_compiler.cc.

1011 {
1012 dispatch_table_call_targets_.Add(selector);
1013}

◆ AddExceptionHandler()

void dart::FlowGraphCompiler::AddExceptionHandler ( CatchBlockEntryInstr entry)

Definition at line 903 of file flow_graph_compiler.cc.

903 {
904 exception_handlers_list_->AddHandler(
905 entry->catch_try_index(), entry->try_index(), assembler()->CodeSize(),
906 entry->is_generated(), entry->catch_handler_types(),
907 entry->needs_stacktrace());
908 if (is_optimizing()) {
909 RecordSafepoint(entry->locs());
910 }
911}
void AddHandler(intptr_t try_index, intptr_t outer_try_index, intptr_t pc_offset, bool is_generated, const Array &handler_types, bool needs_stacktrace)
void RecordSafepoint(LocationSummary *locs, intptr_t slow_path_argument_count=0)

◆ AddNullCheck()

void dart::FlowGraphCompiler::AddNullCheck ( const InstructionSource source,
const String name 
)

Definition at line 941 of file flow_graph_compiler.cc.

942 {
943#if defined(DART_PRECOMPILER)
944 // If we are generating an AOT snapshot and have DWARF stack traces enabled,
945 // the AOT runtime is unable to obtain the pool index at runtime. Therefore,
946 // there is no reason to put the name into the pool in the first place.
947 // TODO(dartbug.com/40605): Move this info to the pc descriptors.
948 if (FLAG_precompiled_mode && FLAG_dwarf_stack_traces_mode) return;
949#endif
950 const intptr_t name_index =
952 code_source_map_builder_->NoteNullCheck(assembler()->CodeSize(), source,
953 name_index);
954}
void NoteNullCheck(int32_t pc_offset, const InstructionSource &source, intptr_t name_index)
ObjectPoolBuilder & object_pool_builder()
intptr_t FindObject(const Object &obj, ObjectPoolBuilderEntry::Patchability patchable=ObjectPoolBuilderEntry::kNotPatchable, ObjectPoolBuilderEntry::SnapshotBehavior snapshot_behavior=ObjectPoolBuilderEntry::kSnapshotable)
const char *const name

◆ AddSlowPathCode()

void dart::FlowGraphCompiler::AddSlowPathCode ( SlowPathCode slow_path)

Definition at line 860 of file flow_graph_compiler.cc.

860 {
861 slow_path_code_.Add(code);
862}

◆ AddSlowPathDeoptInfo()

CompilerDeoptInfo * dart::FlowGraphCompiler::AddSlowPathDeoptInfo ( intptr_t  deopt_id,
Environment env 
)

Definition at line 1032 of file flow_graph_compiler.cc.

1033 {
1034 ASSERT(deopt_id != DeoptId::kNone);
1035 deopt_id = DeoptId::ToDeoptAfter(deopt_id);
1036 CompilerDeoptInfo* info =
1037 new (zone()) CompilerDeoptInfo(deopt_id, ICData::kDeoptUnknown, 0, env);
1038 info->set_pc_offset(assembler()->CodeSize());
1039 deopt_infos_.Add(info);
1040 return info;
1041}
static constexpr intptr_t kNone
Definition: deopt_id.h:27
static intptr_t ToDeoptAfter(intptr_t deopt_id)
Definition: deopt_id.h:31

◆ AddStubCallTarget()

void dart::FlowGraphCompiler::AddStubCallTarget ( const Code code)

Definition at line 1003 of file flow_graph_compiler.cc.

1003 {
1004 DEBUG_ASSERT(code.IsNotTemporaryScopedHandle());
1005 static_calls_target_table_.Add(new (zone()) StaticCallsStruct(
1006 Code::kCallViaCode, Code::kDefaultEntry, assembler()->CodeSize(), nullptr,
1007 &code, nullptr));
1008}
#define DEBUG_ASSERT(cond)
Definition: assert.h:321
void Add(const T &value)
@ kCallViaCode
Definition: object.h:6972
@ kDefaultEntry
Definition: object.h:6976

◆ ArchSpecificInitialization()

void dart::FlowGraphCompiler::ArchSpecificInitialization ( )

◆ assembler()

compiler::Assembler * dart::FlowGraphCompiler::assembler ( ) const
inline

Definition at line 399 of file flow_graph_compiler.h.

399{ return assembler_; }

◆ Bailout()

void dart::FlowGraphCompiler::Bailout ( const char *  reason)

Definition at line 812 of file flow_graph_compiler.cc.

812 {
813 parsed_function_.Bailout("FlowGraphCompiler", reason);
814}
void Bailout(const char *origin, const char *reason) const
Definition: parser.cc:118

◆ BeginCodeSourceRange()

void dart::FlowGraphCompiler::BeginCodeSourceRange ( const InstructionSource source)

Definition at line 1994 of file flow_graph_compiler.cc.

1994 {
1995 code_source_map_builder_->BeginCodeSourceRange(assembler()->CodeSize(),
1996 source);
1997}
void BeginCodeSourceRange(int32_t pc_offset, const InstructionSource &source)

◆ block_info()

const GrowableArray< BlockInfo * > & dart::FlowGraphCompiler::block_info ( ) const
inline

Definition at line 460 of file flow_graph_compiler.h.

460{ return block_info_; }

◆ block_order()

const GrowableArray< BlockEntryInstr * > & dart::FlowGraphCompiler::block_order ( ) const
inline

Definition at line 402 of file flow_graph_compiler.h.

402 {
403 return block_order_;
404 }

◆ BoxClassFor()

const Class & dart::FlowGraphCompiler::BoxClassFor ( Representation  rep)

Definition at line 1975 of file flow_graph_compiler.cc.

1975 {
1976 switch (rep) {
1977 case kUnboxedFloat:
1978 case kUnboxedDouble:
1979 return double_class();
1980 case kUnboxedFloat32x4:
1981 return float32x4_class();
1982 case kUnboxedFloat64x2:
1983 return float64x2_class();
1984 case kUnboxedInt32x4:
1985 return int32x4_class();
1986 case kUnboxedInt64:
1987 return mint_class();
1988 default:
1989 UNREACHABLE();
1990 return Class::ZoneHandle();
1991 }
1992}
#define UNREACHABLE()
Definition: assert.h:248

◆ CanConvertInt64ToDouble()

static bool dart::FlowGraphCompiler::CanConvertInt64ToDouble ( )
static

◆ CanFallThroughTo()

bool dart::FlowGraphCompiler::CanFallThroughTo ( BlockEntryInstr block_entry) const

Definition at line 848 of file flow_graph_compiler.cc.

848 {
849 return NextNonEmptyLabel() == GetJumpLabel(block_entry);
850}
compiler::Label * GetJumpLabel(BlockEntryInstr *block_entry) const
compiler::Label * NextNonEmptyLabel() const

◆ CanOptimize()

bool dart::FlowGraphCompiler::CanOptimize ( ) const

Definition at line 256 of file flow_graph_compiler.cc.

256 {
258}
intptr_t optimization_counter_threshold() const
Definition: isolate.h:306
IsolateGroup * isolate_group() const
Definition: thread.h:541

◆ CanOptimizeFunction()

bool dart::FlowGraphCompiler::CanOptimizeFunction ( ) const

Definition at line 260 of file flow_graph_compiler.cc.

260 {
262}
bool HasBreakpoint() const
Definition: object.cc:7890

◆ CanOSRFunction()

bool dart::FlowGraphCompiler::CanOSRFunction ( ) const

Definition at line 264 of file flow_graph_compiler.cc.

264 {
265 return isolate_group()->use_osr() && CanOptimizeFunction() &&
266 !is_optimizing();
267}

◆ CheckAssertAssignableTypeTestingABILocations()

bool dart::FlowGraphCompiler::CheckAssertAssignableTypeTestingABILocations ( const LocationSummary locs)

Definition at line 2325 of file flow_graph_compiler.cc.

2326 {
2327 ASSERT(locs.in(AssertAssignableInstr::kInstancePos).IsRegister() &&
2328 locs.in(AssertAssignableInstr::kInstancePos).reg() ==
2330 ASSERT((locs.in(AssertAssignableInstr::kDstTypePos).IsConstant() &&
2332 .constant()
2333 .IsAbstractType()) ||
2334 (locs.in(AssertAssignableInstr::kDstTypePos).IsRegister() &&
2335 locs.in(AssertAssignableInstr::kDstTypePos).reg() ==
2337 ASSERT(locs.in(AssertAssignableInstr::kInstantiatorTAVPos).IsRegister() &&
2340 ASSERT(locs.in(AssertAssignableInstr::kFunctionTAVPos).IsRegister() &&
2343 ASSERT(locs.out(0).IsRegister() &&
2344 locs.out(0).reg() == TypeTestABI::kInstanceReg);
2345 return true;
2346}
static constexpr Register kDstTypeReg
static constexpr Register kInstanceReg
static constexpr Register kFunctionTypeArgumentsReg
static constexpr Register kInstantiatorTypeArgumentsReg

◆ CompileGraph()

void dart::FlowGraphCompiler::CompileGraph ( )

Definition at line 628 of file flow_graph_compiler.cc.

628 {
629 InitCompiler();
630
631#if !defined(TARGET_ARCH_IA32)
632 // For JIT we have multiple entrypoints functionality which moved the frame
633 // setup into the [TargetEntryInstr] (which will set the constant pool
634 // allowed bit to true). Despite this we still have to set the
635 // constant pool allowed bit to true here as well, because we can generate
636 // code for [CatchEntryInstr]s, which need the pool.
638#endif
639
641 VisitBlocks();
642
643#if defined(DEBUG)
645#endif
646
647 if (!skip_body_compilation()) {
648#if !defined(TARGET_ARCH_IA32)
649 ASSERT(assembler()->constant_pool_allowed());
650#endif
651 GenerateDeferredCode();
652 }
653
654 for (intptr_t i = 0; i < indirect_gotos_.length(); ++i) {
655 indirect_gotos_[i]->ComputeOffsetTable(this);
656 }
657}
void EmitFunctionEntrySourcePositionDescriptorIfNeeded()
void set_constant_pool_allowed(bool b)
void Breakpoint() override

◆ CreateBranchLabels()

BranchLabels dart::FlowGraphCompiler::CreateBranchLabels ( BranchInstr branch) const

Definition at line 852 of file flow_graph_compiler.cc.

852 {
853 compiler::Label* true_label = GetJumpLabel(branch->true_successor());
854 compiler::Label* false_label = GetJumpLabel(branch->false_successor());
855 compiler::Label* fall_through = NextNonEmptyLabel();
856 BranchLabels result = {true_label, false_label, fall_through};
857 return result;
858}
GAsyncResult * result

◆ CreateDeoptInfo()

ArrayPtr dart::FlowGraphCompiler::CreateDeoptInfo ( compiler::Assembler assembler)

Definition at line 1294 of file flow_graph_compiler.cc.

1294 {
1295 // No deopt information if we precompile (no deoptimization allowed).
1296 if (FLAG_precompiled_mode) {
1297 return Array::empty_array().ptr();
1298 }
1299 // For functions with optional arguments, all incoming arguments are copied
1300 // to spill slots. The deoptimization environment does not track them.
1301 const Function& function = parsed_function().function();
1302 const intptr_t incoming_arg_count =
1304 DeoptInfoBuilder builder(zone(), incoming_arg_count, assembler);
1305
1306 intptr_t deopt_info_table_size = DeoptTable::SizeFor(deopt_infos_.length());
1307 if (deopt_info_table_size == 0) {
1308 return Object::empty_array().ptr();
1309 } else {
1310 const Array& array =
1311 Array::Handle(Array::New(deopt_info_table_size, Heap::kOld));
1312 Smi& offset = Smi::Handle();
1313 TypedData& info = TypedData::Handle();
1314 Smi& reason_and_flags = Smi::Handle();
1315 for (intptr_t i = 0; i < deopt_infos_.length(); i++) {
1316 offset = Smi::New(deopt_infos_[i]->pc_offset());
1317 info = deopt_infos_[i]->CreateDeoptInfo(this, &builder, array);
1318 reason_and_flags = DeoptTable::EncodeReasonAndFlags(
1319 deopt_infos_[i]->reason(), deopt_infos_[i]->flags());
1320 DeoptTable::SetEntry(array, i, offset, info, reason_and_flags);
1321 }
1322 return array.ptr();
1323 }
1324}
static ArrayPtr New(intptr_t len, Heap::Space space=Heap::kNew)
Definition: object.h:10959
static SmiPtr EncodeReasonAndFlags(ICData::DeoptReasonId reason, uint32_t flags)
static intptr_t SizeFor(intptr_t length)
static void SetEntry(const Array &table, intptr_t index, const Smi &offset, const TypedData &info, const Smi &reason_and_flags)
const Function & function() const
bool MakesCopyOfParameters() const
Definition: object.h:3514
intptr_t num_fixed_parameters() const
Definition: object.cc:8856
@ kOld
Definition: heap.h:39
static SmiPtr New(intptr_t value)
Definition: object.h:10006
SeparatedVector2 offset

◆ current_block()

BlockEntryInstr * dart::FlowGraphCompiler::current_block ( ) const
inline

Definition at line 419 of file flow_graph_compiler.h.

419{ return current_block_; }

◆ current_instruction()

Instruction * dart::FlowGraphCompiler::current_instruction ( ) const
inline

Definition at line 422 of file flow_graph_compiler.h.

422{ return current_instruction_; }

◆ CurrentTryIndex()

intptr_t dart::FlowGraphCompiler::CurrentTryIndex ( ) const
inline

Definition at line 877 of file flow_graph_compiler.h.

877 {
878 if (current_block_ == nullptr) {
879 return kInvalidTryIndex;
880 }
881 return current_block_->try_index();
882 }
intptr_t try_index() const
Definition: il.h:1730
static constexpr intptr_t kInvalidTryIndex

◆ deopt_id_to_ic_data()

const ZoneGrowableArray< const ICData * > & dart::FlowGraphCompiler::deopt_id_to_ic_data ( ) const
inline

Definition at line 908 of file flow_graph_compiler.h.

908 {
909 return *deopt_id_to_ic_data_;
910 }

◆ dispatch_table_call_targets()

const GrowableArray< const compiler::TableSelector * > & dart::FlowGraphCompiler::dispatch_table_call_targets ( ) const
inline

Definition at line 406 of file flow_graph_compiler.h.

406 {
407 return dispatch_table_call_targets_;
408 }

◆ double_class()

const Class & dart::FlowGraphCompiler::double_class ( ) const
inline

Definition at line 844 of file flow_graph_compiler.h.

844{ return double_class_; }

◆ edge_counters_array()

ArrayPtr dart::FlowGraphCompiler::edge_counters_array ( ) const
inline

Definition at line 919 of file flow_graph_compiler.h.

919{ return edge_counters_array_.ptr(); }

◆ EmitBoolTest()

Condition dart::FlowGraphCompiler::EmitBoolTest ( Register  value,
BranchLabels  labels,
bool  invert 
)

◆ EmitCallsiteMetadata()

void dart::FlowGraphCompiler::EmitCallsiteMetadata ( const InstructionSource source,
intptr_t  deopt_id,
UntaggedPcDescriptors::Kind  kind,
LocationSummary locs,
Environment env 
)

Definition at line 469 of file flow_graph_compiler.cc.

473 {
474 AddCurrentDescriptor(kind, deopt_id, source);
475 RecordSafepoint(locs);
477 if ((deopt_id != DeoptId::kNone) && !FLAG_precompiled_mode) {
478 // Marks either the continuation point in unoptimized code or the
479 // deoptimization point in optimized code, after call.
480 if (env != nullptr) {
481 // Note that we may lazy-deopt to the same IR instruction in unoptimized
482 // code or to another IR instruction (e.g. if LICM hoisted an instruction
483 // it will lazy-deopt to a Goto).
484 // If we happen to deopt to the beginning of an instruction in unoptimized
485 // code, we'll use the before deopt-id, otherwise the after deopt-id.
486 const intptr_t dest_deopt_id = env->LazyDeoptToBeforeDeoptId()
487 ? deopt_id
488 : DeoptId::ToDeoptAfter(deopt_id);
489 AddDeoptIndexAtCall(dest_deopt_id, env);
490 } else {
491 const intptr_t deopt_id_after = DeoptId::ToDeoptAfter(deopt_id);
492 // Add deoptimization continuation point after the call and before the
493 // arguments are removed.
494 AddCurrentDescriptor(UntaggedPcDescriptors::kDeopt, deopt_id_after,
495 source);
496 }
497 }
498}
CompilerDeoptInfo * AddDeoptIndexAtCall(intptr_t deopt_id, Environment *env)
void RecordCatchEntryMoves(Environment *env)
void AddCurrentDescriptor(UntaggedPcDescriptors::Kind kind, intptr_t deopt_id, const InstructionSource &source)

◆ EmitCallToStub()

void dart::FlowGraphCompiler::EmitCallToStub ( const Code stub,
ObjectPool::SnapshotBehavior  snapshot_behavior = compiler::ObjectPoolBuilderEntry::kSnapshotable 
)

◆ EmitComment()

void dart::FlowGraphCompiler::EmitComment ( Instruction instr)

Definition at line 1621 of file flow_graph_compiler.cc.

1621 {
1622#if defined(INCLUDE_IL_PRINTER)
1623 char buffer[256];
1624 BufferFormatter f(buffer, sizeof(buffer));
1625 instr->PrintTo(&f);
1626 assembler()->Comment("%s", buffer);
1627#endif // defined(INCLUDE_IL_PRINTER)
1628}
void Comment(const char *format,...) PRINTF_ATTRIBUTE(2
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not defaults to or::depending on whether ipv6 is specified vm service A custom Dart VM Service port The default is to pick a randomly available open port disable vm Disable the Dart VM Service The Dart VM Service is never available in release mode disable vm service Disable mDNS Dart VM Service publication Bind to the IPv6 localhost address for the Dart VM Service Ignored if vm service host is set endless trace buffer
Definition: switches.h:126

◆ EmitDispatchTableCall()

void dart::FlowGraphCompiler::EmitDispatchTableCall ( int32_t  selector_offset,
const Array arguments_descriptor 
)

◆ EmitDropArguments()

void dart::FlowGraphCompiler::EmitDropArguments ( intptr_t  count)

Definition at line 2106 of file flow_graph_compiler.cc.

2106 {
2107 if (!is_optimizing()) {
2108 __ Drop(count);
2109 }
2110}
int count
Definition: FontMgrTest.cpp:50
#define __

◆ EmitEdgeCounter()

void dart::FlowGraphCompiler::EmitEdgeCounter ( intptr_t  edge_id)

◆ EmitEqualityRegConstCompare()

Condition dart::FlowGraphCompiler::EmitEqualityRegConstCompare ( Register  reg,
const Object obj,
bool  needs_number_check,
const InstructionSource source,
intptr_t  deopt_id 
)

◆ EmitEqualityRegRegCompare()

Condition dart::FlowGraphCompiler::EmitEqualityRegRegCompare ( Register  left,
Register  right,
bool  needs_number_check,
const InstructionSource source,
intptr_t  deopt_id 
)

◆ EmitFunctionEntrySourcePositionDescriptorIfNeeded()

void dart::FlowGraphCompiler::EmitFunctionEntrySourcePositionDescriptorIfNeeded ( )

Definition at line 612 of file flow_graph_compiler.cc.

612 {
613 // When unwinding async stacks we might produce frames which correspond
614 // to future listeners which are going to be called when the future completes.
615 // These listeners are not yet called and thus their frame pc_offset is set
616 // to 0 - which does not actually correspond to any call- or yield- site
617 // inside the code object. Nevertheless we would like to be able to
618 // produce proper position information for it when symbolizing the stack.
619 // To achieve that in AOT mode (where we don't actually have
620 // |Function::token_pos| available) we instead emit an artificial descriptor
621 // at the very beginning of the function.
622 if (FLAG_precompiled_mode && flow_graph().function().IsClosureFunction()) {
623 code_source_map_builder_->WriteFunctionEntrySourcePosition(
624 InstructionSource(flow_graph().function().token_pos()));
625 }
626}
void WriteFunctionEntrySourcePosition(const InstructionSource &source)

◆ EmitInstanceCallAOT()

void dart::FlowGraphCompiler::EmitInstanceCallAOT ( const ICData ic_data,
intptr_t  deopt_id,
const InstructionSource source,
LocationSummary locs,
Code::EntryKind  entry_kind = Code::EntryKind::kNormal,
bool  receiver_can_be_smi = true 
)

◆ EmitInstanceCallJIT()

void dart::FlowGraphCompiler::EmitInstanceCallJIT ( const Code stub,
const ICData ic_data,
intptr_t  deopt_id,
const InstructionSource source,
LocationSummary locs,
Code::EntryKind  entry_kind 
)

◆ EmitJumpToStub()

void dart::FlowGraphCompiler::EmitJumpToStub ( const Code stub)

◆ EmitMegamorphicInstanceCall() [1/2]

void dart::FlowGraphCompiler::EmitMegamorphicInstanceCall ( const ICData icdata,
intptr_t  deopt_id,
const InstructionSource source,
LocationSummary locs 
)
inline

Definition at line 690 of file flow_graph_compiler.h.

693 {
694 const String& name = String::Handle(icdata.target_name());
695 const Array& arguments_descriptor =
696 Array::Handle(icdata.arguments_descriptor());
697 EmitMegamorphicInstanceCall(name, arguments_descriptor, deopt_id, source,
698 locs);
699 }
void EmitMegamorphicInstanceCall(const ICData &icdata, intptr_t deopt_id, const InstructionSource &source, LocationSummary *locs)

◆ EmitMegamorphicInstanceCall() [2/2]

void dart::FlowGraphCompiler::EmitMegamorphicInstanceCall ( const String function_name,
const Array arguments_descriptor,
intptr_t  deopt_id,
const InstructionSource source,
LocationSummary locs 
)

◆ EmitMove()

void dart::FlowGraphCompiler::EmitMove ( Location  dst,
Location  src,
TemporaryRegisterAllocator temp 
)

◆ EmitMoveConst()

void dart::FlowGraphCompiler::EmitMoveConst ( const compiler::ffi::NativeLocation dst,
Location  src,
Representation  src_type,
TemporaryRegisterAllocator temp 
)

Definition at line 3483 of file flow_graph_compiler.cc.

3486 {
3487 ASSERT(src.IsConstant() || src.IsPairLocation());
3488 const auto& dst_type = dst.payload_type();
3489 Register scratch = kNoRegister;
3490 if (dst.IsExpressibleAsLocation() &&
3491 dst_type.IsExpressibleAsRepresentation() &&
3492 dst_type.AsRepresentationOverApprox(zone_) == src_type) {
3493 // We can directly emit the const in the right place and representation.
3494 const Location dst_loc = dst.AsLocation();
3495 assembler()->Comment("dst.IsExpressibleAsLocation() %s",
3496 dst_loc.ToCString());
3497 EmitMove(dst_loc, src, temp);
3498 } else {
3499 // We need an intermediate location.
3500 Location intermediate;
3501 if (dst_type.IsInt()) {
3502 if (TMP == kNoRegister) {
3503 scratch = temp->AllocateTemporary();
3505 } else {
3506 intermediate = Location::RegisterLocation(TMP);
3507 }
3508 } else {
3509 ASSERT(dst_type.IsFloat());
3510 intermediate = Location::FpuRegisterLocation(FpuTMP);
3511 }
3512 assembler()->Comment("constant using intermediate: %s",
3513 intermediate.ToCString());
3514
3515 if (src.IsPairLocation()) {
3516 for (intptr_t i : {0, 1}) {
3517 const Representation src_type_split =
3519 .Split(zone_, i)
3521 const auto& intermediate_native =
3523 src_type_split);
3524 EmitMove(intermediate, src.AsPairLocation()->At(i), temp);
3525 EmitNativeMove(dst.Split(zone_, 2, i), intermediate_native, temp);
3526 }
3527 } else {
3528 const auto& intermediate_native =
3530 src_type);
3531 EmitMove(intermediate, src, temp);
3532 EmitNativeMove(dst, intermediate_native, temp);
3533 }
3534
3535 if (scratch != kNoRegister) {
3536 temp->ReleaseTemporary();
3537 }
3538 }
3539 return;
3540}
void EmitMove(Location dst, Location src, TemporaryRegisterAllocator *temp)
void EmitNativeMove(const compiler::ffi::NativeLocation &dst, const compiler::ffi::NativeLocation &src, TemporaryRegisterAllocator *temp)
static Location FpuRegisterLocation(FpuRegister reg)
Definition: locations.h:410
static Location RegisterLocation(Register reg)
Definition: locations.h:398
static NativeLocation & FromLocation(Zone *zone, Location loc, Representation rep)
virtual Representation AsRepresentation() const
Definition: native_type.cc:313
virtual NativePrimitiveType & Split(Zone *zone, intptr_t part) const
Definition: native_type.cc:396
static NativePrimitiveType & FromRepresentation(Zone *zone, Representation rep)
Definition: native_type.cc:632
Representation
Definition: locations.h:66
const FpuRegister FpuTMP
@ kNoRegister
Definition: constants_arm.h:99
const Register TMP
dst
Definition: cp.py:12

◆ EmitMoveFromNative()

void dart::FlowGraphCompiler::EmitMoveFromNative ( Location  dst_loc,
Representation  dst_type,
const compiler::ffi::NativeLocation src,
TemporaryRegisterAllocator temp 
)

Definition at line 3456 of file flow_graph_compiler.cc.

3460 {
3461 if (dst_loc.IsPairLocation()) {
3462 for (intptr_t i : {0, 1}) {
3463 const auto& dest_split = compiler::ffi::NativeLocation::FromPairLocation(
3464 zone_, dst_loc, dst_type, i);
3465 EmitNativeMove(dest_split, src.Split(zone_, 2, i), temp);
3466 }
3467 } else {
3468 const auto& dst =
3469 compiler::ffi::NativeLocation::FromLocation(zone_, dst_loc, dst_type);
3470 // Deal with sign mismatch caused by lack of kUnboxedUint64 representation.
3471 if (dst_type == kUnboxedInt64 &&
3472 src.container_type().AsPrimitive().representation() ==
3474 EmitNativeMove(dst.WithOtherNativeType(zone_, src.container_type(),
3475 src.container_type()),
3476 src, temp);
3477 } else {
3478 EmitNativeMove(dst, src, temp);
3479 }
3480 }
3481}
static NativeLocation & FromPairLocation(Zone *zone, Location loc, Representation rep, intptr_t index)

◆ EmitMoveToNative()

void dart::FlowGraphCompiler::EmitMoveToNative ( const compiler::ffi::NativeLocation dst,
Location  src_loc,
Representation  src_type,
TemporaryRegisterAllocator temp 
)

Definition at line 3428 of file flow_graph_compiler.cc.

3432 {
3433 if (src_loc.IsPairLocation()) {
3434 for (intptr_t i : {0, 1}) {
3436 zone_, src_loc, src_type, i);
3437 EmitNativeMove(dst.Split(zone_, 2, i), src_split, temp);
3438 }
3439 } else {
3440 const auto& src =
3441 compiler::ffi::NativeLocation::FromLocation(zone_, src_loc, src_type);
3442 // Deal with sign mismatch caused by lack of kUnboxedUint64 representation.
3443 if (src_type == kUnboxedInt64 &&
3444 dst.container_type().AsPrimitive().representation() ==
3447 src.WithOtherNativeType(zone_, dst.container_type(),
3448 dst.container_type()),
3449 temp);
3450 } else {
3451 EmitNativeMove(dst, src, temp);
3452 }
3453 }
3454}

◆ EmitNativeMove()

void dart::FlowGraphCompiler::EmitNativeMove ( const compiler::ffi::NativeLocation dst,
const compiler::ffi::NativeLocation src,
TemporaryRegisterAllocator temp 
)

Definition at line 3298 of file flow_graph_compiler.cc.

3301 {
3302 if (destination.IsBoth()) {
3303 // Copy to both.
3304 const auto& both = destination.AsBoth();
3305 EmitNativeMove(both.location(0), source, temp);
3306 EmitNativeMove(both.location(1), source, temp);
3307 return;
3308 }
3309 if (source.IsBoth()) {
3310 // Copy from one of both.
3311 const auto& both = source.AsBoth();
3312 EmitNativeMove(destination, both.location(0), temp);
3313 return;
3314 }
3315
3316 const auto& src_payload_type = source.payload_type();
3317 const auto& dst_payload_type = destination.payload_type();
3318 const auto& src_container_type = source.container_type();
3319 const auto& dst_container_type = destination.container_type();
3320 const intptr_t src_payload_size = src_payload_type.SizeInBytes();
3321 const intptr_t dst_payload_size = dst_payload_type.SizeInBytes();
3322 const intptr_t src_container_size = src_container_type.SizeInBytes();
3323 const intptr_t dst_container_size = dst_container_type.SizeInBytes();
3324
3325 // This function does not know how to do larger mem copy moves yet.
3326 ASSERT(src_payload_type.IsPrimitive());
3327 ASSERT(dst_payload_type.IsPrimitive());
3328
3329 // This function does not deal with sign conversions yet.
3330 ASSERT(src_payload_type.IsSigned() == dst_payload_type.IsSigned());
3331
3332 // If the location, payload, and container are equal, we're done.
3333 if (source.Equals(destination) && src_payload_type.Equals(dst_payload_type) &&
3334 src_container_type.Equals(dst_container_type)) {
3335#if defined(TARGET_ARCH_RISCV64)
3336 // Except we might still need to adjust for the difference between C's
3337 // representation of uint32 (sign-extended to 64 bits) and Dart's
3338 // (zero-extended).
3339 EmitNativeMoveArchitecture(destination, source);
3340#endif
3341 return;
3342 }
3343
3344 // Solve discrepancies between container size and payload size.
3345 if (src_payload_type.IsInt() && dst_payload_type.IsInt() &&
3346 (src_payload_size != src_container_size ||
3347 dst_payload_size != dst_container_size)) {
3348 if (source.IsStack() && src_container_size > src_payload_size) {
3349 // Shrink loads since all loads are extending.
3350 return EmitNativeMove(
3351 destination,
3352 source.WithOtherNativeType(zone_, src_payload_type, src_payload_type),
3353 temp);
3354 }
3355 if (src_payload_size <= dst_payload_size &&
3356 src_container_size >= dst_container_size) {
3357 // The upper bits of the source are already properly sign or zero
3358 // extended, so just copy the required amount of bits.
3359 return EmitNativeMove(destination.WithOtherNativeType(
3360 zone_, dst_container_type, dst_container_type),
3361 source.WithOtherNativeType(
3362 zone_, dst_container_type, dst_container_type),
3363 temp);
3364 }
3365 if (src_payload_size >= dst_payload_size &&
3366 dst_container_size > dst_payload_size) {
3367 // The upper bits of the source are not properly sign or zero extended
3368 // to be copied to the target, so regard the source as smaller.
3369 return EmitNativeMove(
3370 destination.WithOtherNativeType(zone_, dst_container_type,
3371 dst_container_type),
3372 source.WithOtherNativeType(zone_, dst_payload_type, dst_payload_type),
3373 temp);
3374 }
3375 UNREACHABLE();
3376 }
3377 ASSERT(src_payload_size == src_container_size);
3378 ASSERT(dst_payload_size == dst_container_size);
3379
3380 // Split moves that are larger than kWordSize, these require separate
3381 // instructions on all architectures.
3382 if (compiler::target::kWordSize == 4 && src_container_size == 8 &&
3383 dst_container_size == 8 && !source.IsFpuRegisters() &&
3384 !destination.IsFpuRegisters()) {
3385 // TODO(40209): If this is stack to stack, we could use FpuTMP.
3386 // Test the impact on code size and speed.
3387 EmitNativeMove(destination.Split(zone_, 2, 0), source.Split(zone_, 2, 0),
3388 temp);
3389 EmitNativeMove(destination.Split(zone_, 2, 1), source.Split(zone_, 2, 1),
3390 temp);
3391 return;
3392 }
3393
3394 // Split moves from stack to stack, none of the architectures provides
3395 // memory to memory move instructions.
3396 if (source.IsStack() && destination.IsStack()) {
3397 Register scratch = temp->AllocateTemporary();
3398 ASSERT(scratch != kNoRegister);
3399#if defined(TARGET_ARCH_RISCV32) || defined(TARGET_ARCH_RISCV64)
3400 ASSERT(scratch != TMP); // TMP is an argument register.
3401 ASSERT(scratch != TMP2); // TMP2 is an argument register.
3402#endif
3403 const auto& intermediate =
3404 *new (zone_) compiler::ffi::NativeRegistersLocation(
3405 zone_, dst_payload_type, dst_container_type, scratch);
3406 EmitNativeMove(intermediate, source, temp);
3407 EmitNativeMove(destination, intermediate, temp);
3408 temp->ReleaseTemporary();
3409 return;
3410 }
3411
3412 const bool sign_or_zero_extend = dst_container_size > src_container_size;
3413
3414 // No architecture supports sign extending with memory as destination.
3415 if (sign_or_zero_extend && destination.IsStack()) {
3416 ASSERT(source.IsRegisters());
3417 const auto& intermediate =
3418 source.WithOtherNativeType(zone_, dst_payload_type, dst_container_type);
3419 EmitNativeMove(intermediate, source, temp);
3420 EmitNativeMove(destination, intermediate, temp);
3421 return;
3422 }
3423
3424 // Do the simple architecture specific moves.
3425 EmitNativeMoveArchitecture(destination, source);
3426}
static constexpr intptr_t kWordSize
Definition: runtime_api.h:274
const Register TMP2

◆ EmitOptimizedInstanceCall()

void dart::FlowGraphCompiler::EmitOptimizedInstanceCall ( const Code stub,
const ICData ic_data,
intptr_t  deopt_id,
const InstructionSource source,
LocationSummary locs,
Code::EntryKind  entry_kind = Code::EntryKind::kNormal 
)

◆ EmitOptimizedStaticCall()

void dart::FlowGraphCompiler::EmitOptimizedStaticCall ( const Function function,
const Array arguments_descriptor,
intptr_t  size_with_type_args,
intptr_t  deopt_id,
const InstructionSource source,
LocationSummary locs,
Code::EntryKind  entry_kind = Code::EntryKind::kNormal 
)

◆ EmitPolymorphicInstanceCall()

void dart::FlowGraphCompiler::EmitPolymorphicInstanceCall ( const PolymorphicInstanceCallInstr call,
const CallTargets targets,
ArgumentsInfo  args_info,
intptr_t  deopt_id,
const InstructionSource source,
LocationSummary locs,
bool  complete,
intptr_t  total_call_count,
bool  receiver_can_be_smi = true 
)

Definition at line 2050 of file flow_graph_compiler.cc.

2059 {
2060 ASSERT(call != nullptr);
2061 if (!FLAG_precompiled_mode) {
2062 if (FLAG_polymorphic_with_deopt) {
2063 compiler::Label* deopt =
2064 AddDeoptStub(deopt_id, ICData::kDeoptPolymorphicInstanceCallTestFail);
2065 compiler::Label ok;
2066 EmitTestAndCall(targets, call->function_name(), args_info,
2067 deopt, // No cid match.
2068 &ok, // Found cid.
2069 deopt_id, source, locs, complete, total_ic_calls,
2070 call->entry_kind());
2071 assembler()->Bind(&ok);
2072 } else {
2073 compiler::Label megamorphic, ok;
2074 EmitTestAndCall(targets, call->function_name(), args_info,
2075 &megamorphic, // No cid match.
2076 &ok, // Found cid.
2077 deopt_id, source, locs, complete, total_ic_calls,
2078 call->entry_kind());
2079 assembler()->Jump(&ok);
2080 assembler()->Bind(&megamorphic);
2081 // Instead of deoptimizing, do a megamorphic call when no matching
2082 // cid found.
2083 EmitMegamorphicInstanceCall(*call->ic_data(), deopt_id, source, locs);
2084 assembler()->Bind(&ok);
2085 }
2086 } else {
2087 if (complete) {
2088 compiler::Label ok;
2089 EmitTestAndCall(targets, call->function_name(), args_info,
2090 nullptr, // No cid match.
2091 &ok, // Found cid.
2092 deopt_id, source, locs, true, total_ic_calls,
2093 call->entry_kind());
2094 assembler()->Bind(&ok);
2095 } else {
2096 const ICData& unary_checks =
2097 ICData::ZoneHandle(zone(), call->ic_data()->AsUnaryClassChecks());
2098 EmitInstanceCallAOT(unary_checks, deopt_id, source, locs,
2099 call->entry_kind(), receiver_can_be_smi);
2100 }
2101 }
2102}
static bool ok(int result)
compiler::Label * AddDeoptStub(intptr_t deopt_id, ICData::DeoptReasonId reason, uint32_t flags=0)
void EmitTestAndCall(const CallTargets &targets, const String &function_name, ArgumentsInfo args_info, compiler::Label *failed, compiler::Label *match_found, intptr_t deopt_id, const InstructionSource &source_index, LocationSummary *locs, bool complete, intptr_t total_ic_calls, Code::EntryKind entry_kind=Code::EntryKind::kNormal)
void EmitInstanceCallAOT(const ICData &ic_data, intptr_t deopt_id, const InstructionSource &source, LocationSummary *locs, Code::EntryKind entry_kind=Code::EntryKind::kNormal, bool receiver_can_be_smi=true)
void Jump(Label *label, JumpDistance distance=kFarJump)
void Bind(Label *label) override
def call(args)
Definition: dom.py:159

◆ EmitPrologue()

void dart::FlowGraphCompiler::EmitPrologue ( )

◆ EmitTailCallToStub()

void dart::FlowGraphCompiler::EmitTailCallToStub ( const Code stub)

◆ EmitTestAndCall()

void dart::FlowGraphCompiler::EmitTestAndCall ( const CallTargets targets,
const String function_name,
ArgumentsInfo  args_info,
compiler::Label failed,
compiler::Label match_found,
intptr_t  deopt_id,
const InstructionSource source_index,
LocationSummary locs,
bool  complete,
intptr_t  total_ic_calls,
Code::EntryKind  entry_kind = Code::EntryKind::kNormal 
)

Definition at line 2123 of file flow_graph_compiler.cc.

2133 {
2135 ASSERT(complete || (failed != nullptr)); // Complete calls can't fail.
2136
2137 const Array& arguments_descriptor =
2138 Array::ZoneHandle(zone(), args_info.ToArgumentsDescriptor());
2139 EmitTestAndCallLoadReceiver(args_info.count_without_type_args,
2140 arguments_descriptor);
2141
2142 const int kNoCase = -1;
2143 int smi_case = kNoCase;
2144 int which_case_to_skip = kNoCase;
2145
2146 const int length = targets.length();
2147 ASSERT(length > 0);
2148 int non_smi_length = length;
2149
2150 // Find out if one of the classes in one of the cases is the Smi class. We
2151 // will be handling that specially.
2152 for (int i = 0; i < length; i++) {
2153 const intptr_t start = targets[i].cid_start;
2154 if (start > kSmiCid) continue;
2155 const intptr_t end = targets[i].cid_end;
2156 if (end >= kSmiCid) {
2157 smi_case = i;
2158 if (start == kSmiCid && end == kSmiCid) {
2159 // If this case has only the Smi class then we won't need to emit it at
2160 // all later.
2161 which_case_to_skip = i;
2162 non_smi_length--;
2163 }
2164 break;
2165 }
2166 }
2167
2168 if (smi_case != kNoCase) {
2169 compiler::Label after_smi_test;
2170 // If the call is complete and there are no other possible receiver
2171 // classes - then receiver can only be a smi value and we don't need
2172 // to check if it is a smi.
2173 if (!(complete && non_smi_length == 0)) {
2174 EmitTestAndCallSmiBranch(non_smi_length == 0 ? failed : &after_smi_test,
2175 /* jump_if_smi= */ false);
2176 }
2177
2178 // Do not use the code from the function, but let the code be patched so
2179 // that we can record the outgoing edges to other code.
2180 const Function& function = *targets.TargetAt(smi_case)->target;
2181 GenerateStaticDartCall(deopt_id, source_index,
2182 UntaggedPcDescriptors::kOther, locs, function,
2183 entry_kind);
2184 EmitDropArguments(args_info.size_with_type_args);
2185 if (match_found != nullptr) {
2186 __ Jump(match_found);
2187 }
2188 __ Bind(&after_smi_test);
2189 } else {
2190 if (!complete) {
2191 // Smi is not a valid class.
2192 EmitTestAndCallSmiBranch(failed, /* jump_if_smi = */ true);
2193 }
2194 }
2195
2196 if (non_smi_length == 0) {
2197 // If non_smi_length is 0 then only a Smi check was needed; the Smi check
2198 // above will fail if there was only one check and receiver is not Smi.
2199 return;
2200 }
2201
2202 bool add_megamorphic_call = false;
2203 int bias = 0;
2204
2205 // Value is not Smi.
2206 EmitTestAndCallLoadCid(EmitTestCidRegister());
2207
2208 int last_check = which_case_to_skip == length - 1 ? length - 2 : length - 1;
2209
2210 for (intptr_t i = 0; i < length; i++) {
2211 if (i == which_case_to_skip) continue;
2212 const bool is_last_check = (i == last_check);
2213 const int count = targets.TargetAt(i)->count;
2214 if (!is_last_check && !complete && count < (total_ic_calls >> 5)) {
2215 // This case is hit too rarely to be worth writing class-id checks inline
2216 // for. Note that we can't do this for calls with only one target because
2217 // the type propagator may have made use of that and expects a deopt if
2218 // a new class is seen at this calls site. See IsMonomorphic.
2219 add_megamorphic_call = true;
2220 break;
2221 }
2222 compiler::Label next_test;
2223 if (!complete || !is_last_check) {
2225 is_last_check ? failed : &next_test,
2226 EmitTestCidRegister(), targets[i], bias,
2227 /*jump_on_miss =*/true);
2228 }
2229 // Do not use the code from the function, but let the code be patched so
2230 // that we can record the outgoing edges to other code.
2231 const Function& function = *targets.TargetAt(i)->target;
2232 GenerateStaticDartCall(deopt_id, source_index,
2233 UntaggedPcDescriptors::kOther, locs, function,
2234 entry_kind);
2235 EmitDropArguments(args_info.size_with_type_args);
2236 if (!is_last_check || add_megamorphic_call) {
2237 __ Jump(match_found);
2238 }
2239 __ Bind(&next_test);
2240 }
2241 if (add_megamorphic_call) {
2242 EmitMegamorphicInstanceCall(function_name, arguments_descriptor, deopt_id,
2243 source_index, locs);
2244 }
2245}
void EmitDropArguments(intptr_t count)
void GenerateStaticDartCall(intptr_t deopt_id, const InstructionSource &source, UntaggedPcDescriptors::Kind kind, LocationSummary *locs, const Function &target, Code::EntryKind entry_kind=Code::EntryKind::kNormal)
static int EmitTestAndCallCheckCid(compiler::Assembler *assembler, compiler::Label *label, Register class_id_reg, const CidRangeValue &range, int bias, bool jump_on_miss=true)
glong glong end
size_t length
const char *const function_name
static bool Bind(PassBindingsCacheMTL &pass, ShaderStage stage, size_t bind_index, const BufferView &view)

◆ EmitTestAndCallCheckCid()

int dart::FlowGraphCompiler::EmitTestAndCallCheckCid ( compiler::Assembler assembler,
compiler::Label label,
Register  class_id_reg,
const CidRangeValue range,
int  bias,
bool  jump_on_miss = true 
)
static

Definition at line 2305 of file flow_graph_compiler.cc.

2310 {
2311 const intptr_t cid_start = range.cid_start;
2312 if (range.IsSingleCid()) {
2313 assembler->CompareImmediate(class_id_reg, cid_start - bias);
2314 assembler->BranchIf(jump_on_miss ? NOT_EQUAL : EQUAL, label);
2315 } else {
2316 assembler->AddImmediate(class_id_reg, bias - cid_start);
2317 bias = cid_start;
2318 assembler->CompareImmediate(class_id_reg, range.Extent());
2320 label);
2321 }
2322 return bias;
2323}
void CompareImmediate(Register rn, int32_t value, Condition cond)
void BranchIf(Condition condition, Label *label, JumpDistance distance=kFarJump)
void AddImmediate(Register rd, int32_t value, Condition cond=AL)
@ UNSIGNED_GREATER
@ NOT_EQUAL
@ UNSIGNED_LESS_EQUAL

◆ EmitYieldPositionMetadata()

void dart::FlowGraphCompiler::EmitYieldPositionMetadata ( const InstructionSource source,
intptr_t  yield_index 
)

Definition at line 500 of file flow_graph_compiler.cc.

502 {
503 AddDescriptor(UntaggedPcDescriptors::kOther, assembler()->CodeSize(),
504 DeoptId::kNone, source, CurrentTryIndex(), yield_index);
505}

◆ EndCodeSourceRange()

void dart::FlowGraphCompiler::EndCodeSourceRange ( const InstructionSource source)

Definition at line 1999 of file flow_graph_compiler.cc.

1999 {
2000 code_source_map_builder_->EndCodeSourceRange(assembler()->CodeSize(), source);
2001}
void EndCodeSourceRange(int32_t pc_offset, const InstructionSource &source)

◆ EnterIntrinsicMode()

void dart::FlowGraphCompiler::EnterIntrinsicMode ( )

◆ ExitIntrinsicMode()

void dart::FlowGraphCompiler::ExitIntrinsicMode ( )

◆ ExtraStackSlotsOnOsrEntry()

intptr_t dart::FlowGraphCompiler::ExtraStackSlotsOnOsrEntry ( ) const

Definition at line 824 of file flow_graph_compiler.cc.

824 {
825 ASSERT(flow_graph().IsCompiledForOsr());
826 const intptr_t stack_depth =
828 const intptr_t num_stack_locals = flow_graph().num_stack_locals();
829 return StackSize() - stack_depth - num_stack_locals;
830}
intptr_t stack_depth() const
Definition: il.h:1756
GraphEntryInstr * graph_entry() const
Definition: flow_graph.h:268
intptr_t num_stack_locals() const
Definition: flow_graph.h:161
OsrEntryInstr * osr_entry() const
Definition: il.h:2007

◆ FinalizeCatchEntryMovesMap()

void dart::FlowGraphCompiler::FinalizeCatchEntryMovesMap ( const Code code)

Definition at line 1365 of file flow_graph_compiler.cc.

1365 {
1366#if defined(DART_PRECOMPILER)
1367 if (FLAG_precompiled_mode) {
1368 TypedData& maps = TypedData::Handle(
1369 catch_entry_moves_maps_builder_->FinalizeCatchEntryMovesMap());
1370 code.set_catch_entry_moves_maps(maps);
1371 return;
1372 }
1373#endif
1374 code.set_num_variables(flow_graph().variable_count());
1375}

◆ FinalizeCodeSourceMap()

void dart::FlowGraphCompiler::FinalizeCodeSourceMap ( const Code code)

Definition at line 1412 of file flow_graph_compiler.cc.

1412 {
1413 const Array& inlined_id_array =
1414 Array::Handle(zone(), code_source_map_builder_->InliningIdToFunction());
1415 code.set_inlined_id_to_function(inlined_id_array);
1416
1417 const CodeSourceMap& map =
1418 CodeSourceMap::Handle(code_source_map_builder_->Finalize());
1419 code.set_code_source_map(map);
1420
1421#if defined(DEBUG)
1422 // Force simulation through the last pc offset. This checks we can decode
1423 // the whole CodeSourceMap without hitting an unknown opcode, stack underflow,
1424 // etc.
1425 GrowableArray<const Function*> fs;
1426 GrowableArray<TokenPosition> tokens;
1427 code.GetInlinedFunctionsAtInstruction(code.Size() - 1, &fs, &tokens);
1428#endif
1429}
SI auto map(std::index_sequence< I... >, Fn &&fn, const Args &... args) -> skvx::Vec< sizeof...(I), decltype(fn(args[0]...))>
Definition: SkVx.h:680

◆ FinalizeExceptionHandlers()

void dart::FlowGraphCompiler::FinalizeExceptionHandlers ( const Code code)

Definition at line 1279 of file flow_graph_compiler.cc.

1279 {
1280 ASSERT(exception_handlers_list_ != nullptr);
1281 const ExceptionHandlers& handlers = ExceptionHandlers::Handle(
1282 exception_handlers_list_->FinalizeExceptionHandlers(code.PayloadStart()));
1283 code.set_exception_handlers(handlers);
1284}
ExceptionHandlersPtr FinalizeExceptionHandlers(uword entry_point) const

◆ FinalizePcDescriptors()

void dart::FlowGraphCompiler::FinalizePcDescriptors ( const Code code)

Definition at line 1286 of file flow_graph_compiler.cc.

1286 {
1287 ASSERT(pc_descriptors_list_ != nullptr);
1288 const PcDescriptors& descriptors = PcDescriptors::Handle(
1289 pc_descriptors_list_->FinalizePcDescriptors(code.PayloadStart()));
1290 if (!is_optimizing_) descriptors.Verify(parsed_function_.function());
1291 code.set_pc_descriptors(descriptors);
1292}
PcDescriptorsPtr FinalizePcDescriptors(uword entry_point)

◆ FinalizeStackMaps()

void dart::FlowGraphCompiler::FinalizeStackMaps ( const Code code)

Definition at line 1326 of file flow_graph_compiler.cc.

1326 {
1327 ASSERT(compressed_stackmaps_builder_ != nullptr);
1328 // Finalize the compressed stack maps and add it to the code object.
1329 const auto& maps =
1330 CompressedStackMaps::Handle(compressed_stackmaps_builder_->Finalize());
1331 code.set_compressed_stackmaps(maps);
1332}
CompressedStackMapsPtr Finalize() const

◆ FinalizeStaticCallTargetsTable()

void dart::FlowGraphCompiler::FinalizeStaticCallTargetsTable ( const Code code)

Definition at line 1377 of file flow_graph_compiler.cc.

1377 {
1378 ASSERT(code.static_calls_target_table() == Array::null());
1379 const auto& calls = static_calls_target_table_;
1380 const intptr_t array_length = calls.length() * Code::kSCallTableEntryLength;
1381 const auto& targets =
1382 Array::Handle(zone(), Array::New(array_length, Heap::kOld));
1383
1384 StaticCallsTable entries(targets);
1385 auto& kind_type_and_offset = Smi::Handle(zone());
1386 for (intptr_t i = 0; i < calls.length(); i++) {
1387 auto entry = calls[i];
1388 kind_type_and_offset =
1389 Smi::New(Code::KindField::encode(entry->call_kind) |
1390 Code::EntryPointField::encode(entry->entry_point) |
1391 Code::OffsetField::encode(entry->offset));
1392 auto view = entries[i];
1393 view.Set<Code::kSCallTableKindAndOffset>(kind_type_and_offset);
1394 const Object* target = nullptr;
1395 if (entry->function != nullptr) {
1396 target = entry->function;
1397 view.Set<Code::kSCallTableFunctionTarget>(*entry->function);
1398 }
1399 if (entry->code != nullptr) {
1400 ASSERT(target == nullptr);
1401 target = entry->code;
1402 view.Set<Code::kSCallTableCodeOrTypeTarget>(*entry->code);
1403 }
1404 if (entry->dst_type != nullptr) {
1405 ASSERT(target == nullptr);
1406 view.Set<Code::kSCallTableCodeOrTypeTarget>(*entry->dst_type);
1407 }
1408 }
1409 code.set_static_calls_target_table(targets);
1410}
intptr_t length() const
static constexpr intptr_t encode(CallKind value)
Definition: bitfield.h:165
@ kSCallTableEntryLength
Definition: object.h:6984
@ kSCallTableFunctionTarget
Definition: object.h:6983
@ kSCallTableCodeOrTypeTarget
Definition: object.h:6982
@ kSCallTableKindAndOffset
Definition: object.h:6981
static ObjectPtr null()
Definition: object.h:433
uint32_t * target
ArrayOfTuplesView< Code::SCallTableEntry, std::tuple< Smi, Object, Function > > StaticCallsTable
Definition: object.h:13546

◆ FinalizeVarDescriptors()

void dart::FlowGraphCompiler::FinalizeVarDescriptors ( const Code code)

Definition at line 1334 of file flow_graph_compiler.cc.

1334 {
1335#if defined(PRODUCT)
1336// No debugger: no var descriptors.
1337#else
1338 if (code.is_optimized()) {
1339 // Optimized code does not need variable descriptors. They are
1340 // only stored in the unoptimized version.
1341 code.set_var_descriptors(Object::empty_var_descriptors());
1342 return;
1343 }
1344 LocalVarDescriptors& var_descs = LocalVarDescriptors::Handle();
1345 if (flow_graph().IsIrregexpFunction()) {
1346 // Eager local var descriptors computation for Irregexp function as it is
1347 // complicated to factor out.
1348 // TODO(srdjan): Consider canonicalizing and reusing the local var
1349 // descriptor for IrregexpFunction.
1350 ASSERT(parsed_function().scope() == nullptr);
1351 var_descs = LocalVarDescriptors::New(1);
1352 UntaggedLocalVarDescriptors::VarInfo info;
1354 info.scope_id = 0;
1355 info.begin_pos = TokenPosition::kMinSource;
1357 info.set_index(compiler::target::frame_layout.FrameSlotForVariable(
1358 parsed_function().current_context_var()));
1359 var_descs.SetVar(0, Symbols::CurrentContextVar(), &info);
1360 }
1361 code.set_var_descriptors(var_descs);
1362#endif
1363}
static LocalVarDescriptorsPtr New(intptr_t num_variables)
Definition: object.cc:16134
static const TokenPosition kMinSource
FrameLayout frame_layout
Definition: stack_frame.cc:76

◆ float32x4_class()

const Class & dart::FlowGraphCompiler::float32x4_class ( ) const
inline

Definition at line 846 of file flow_graph_compiler.h.

846{ return float32x4_class_; }

◆ float64x2_class()

const Class & dart::FlowGraphCompiler::float64x2_class ( ) const
inline

Definition at line 847 of file flow_graph_compiler.h.

847{ return float64x2_class_; }

◆ flow_graph()

const FlowGraph & dart::FlowGraphCompiler::flow_graph ( ) const
inline

Definition at line 415 of file flow_graph_compiler.h.

415 {
416 return intrinsic_mode() ? *intrinsic_flow_graph_ : flow_graph_;
417 }

◆ ForcedOptimization()

bool dart::FlowGraphCompiler::ForcedOptimization ( ) const
inline

Definition at line 413 of file flow_graph_compiler.h.

413{ return function().ForceOptimize(); }
bool ForceOptimize() const
Definition: object.cc:9017

◆ ForceSlowPathForStackOverflow()

bool dart::FlowGraphCompiler::ForceSlowPathForStackOverflow ( ) const

Definition at line 276 of file flow_graph_compiler.cc.

276 {
277#if !defined(PRODUCT)
278 if (FLAG_stacktrace_every > 0 || FLAG_deoptimize_every > 0 ||
279 FLAG_gc_every > 0 ||
280 (isolate_group()->reload_every_n_stack_overflow_checks() > 0)) {
282 return true;
283 }
284 }
285 if (FLAG_stacktrace_filter != nullptr &&
286 strstr(parsed_function().function().ToFullyQualifiedCString(),
287 FLAG_stacktrace_filter) != nullptr) {
288 return true;
289 }
290 if (is_optimizing() && FLAG_deoptimize_filter != nullptr &&
291 strstr(parsed_function().function().ToFullyQualifiedCString(),
292 FLAG_deoptimize_filter) != nullptr) {
293 return true;
294 }
295#endif // !defined(PRODUCT)
296 return false;
297}
static bool IsSystemIsolateGroup(const IsolateGroup *group)
Definition: isolate.cc:3605

◆ function()

const Function & dart::FlowGraphCompiler::function ( ) const
inline

Definition at line 401 of file flow_graph_compiler.h.

401{ return parsed_function_.function(); }

◆ GenerateAssertAssignable()

void dart::FlowGraphCompiler::GenerateAssertAssignable ( CompileType receiver_type,
const InstructionSource source,
intptr_t  deopt_id,
Environment env,
const String dst_name,
LocationSummary locs 
)

Definition at line 2811 of file flow_graph_compiler.cc.

2817 {
2818 ASSERT(!source.token_pos.IsClassifying());
2820
2821 // Non-null if we have a constant destination type.
2822 const auto& dst_type =
2823 locs->in(AssertAssignableInstr::kDstTypePos).IsConstant()
2824 ? AbstractType::Cast(
2825 locs->in(AssertAssignableInstr::kDstTypePos).constant())
2826 : Object::null_abstract_type();
2827
2828 if (!dst_type.IsNull()) {
2829 ASSERT(dst_type.IsFinalized());
2830 if (dst_type.IsTopTypeForSubtyping()) return; // No code needed.
2831 }
2832
2833 compiler::Label done;
2835 // Generate caller-side checks to perform prior to calling the TTS.
2836 if (dst_type.IsNull()) {
2837 __ Comment("AssertAssignable for runtime type");
2838 // kDstTypeReg should already contain the destination type.
2839 } else {
2840 __ Comment("AssertAssignable for compile-time type");
2841 GenerateCallerChecksForAssertAssignable(receiver_type, dst_type, &done);
2842 if (dst_type.IsTypeParameter()) {
2843 // The resolved type parameter is in the scratch register.
2844 type_reg = TypeTestABI::kScratchReg;
2845 }
2846 }
2847
2848 GenerateTTSCall(source, deopt_id, env, type_reg, dst_type, dst_name, locs);
2849 __ Bind(&done);
2850}
static void done(const char *config, const char *src, const char *srcOptions, const char *name)
Definition: DM.cpp:263
bool CheckAssertAssignableTypeTestingABILocations(const LocationSummary &locs)
void GenerateTTSCall(const InstructionSource &source, intptr_t deopt_id, Environment *env, Register reg_with_type, const AbstractType &dst_type, const String &dst_name, LocationSummary *locs)
void GenerateCallerChecksForAssertAssignable(CompileType *receiver_type, const AbstractType &dst_type, compiler::Label *done)
static constexpr Register kScratchReg

◆ GenerateCallerChecksForAssertAssignable()

void dart::FlowGraphCompiler::GenerateCallerChecksForAssertAssignable ( CompileType receiver_type,
const AbstractType dst_type,
compiler::Label done 
)

Definition at line 2916 of file flow_graph_compiler.cc.

2919 {
2920 // Top types should be handled by the caller and cannot reach here.
2921 ASSERT(!dst_type.IsTopTypeForSubtyping());
2922
2923 // Set this to avoid marking the type testing stub for optimization.
2924 bool elide_info = false;
2925 // Call before any return points to set the destination type register and
2926 // mark the destination type TTS as needing optimization, unless it is
2927 // unlikely to be called.
2928 auto output_dst_type = [&]() -> void {
2929 // If we haven't handled the positive case of the type check on the call
2930 // site and we'll be using the TTS of the destination type, we want an
2931 // optimized type testing stub and thus record it in the [TypeUsageInfo].
2932 if (!elide_info) {
2933 if (auto const type_usage_info = thread()->type_usage_info()) {
2934 type_usage_info->UseTypeInAssertAssignable(dst_type);
2935 } else {
2936 ASSERT(!FLAG_precompiled_mode);
2937 }
2938 }
2939 __ LoadObject(TypeTestABI::kDstTypeReg, dst_type);
2940 };
2941
2942 // We can handle certain types and checks very efficiently on the call site,
2943 // meaning those need not be checked within the stubs (which may involve
2944 // a runtime call).
2945
2946 if (dst_type.IsObjectType()) {
2947 // Special case: non-nullable Object.
2948 ASSERT(dst_type.IsNonNullable());
2949 __ CompareObject(TypeTestABI::kInstanceReg, Object::null_object());
2950 __ BranchIf(NOT_EQUAL, done);
2951 // Fall back to type testing stub in caller to throw the exception.
2952 return output_dst_type();
2953 }
2954
2955 // If the int type is assignable to [dst_type] we special case it on the
2956 // caller side!
2957 const Type& int_type = Type::Handle(zone(), Type::IntType());
2958 bool is_non_smi = false;
2959 if (int_type.IsSubtypeOf(dst_type, Heap::kOld)) {
2960 __ BranchIfSmi(TypeTestABI::kInstanceReg, done);
2961 is_non_smi = true;
2962 } else if (!receiver_type->CanBeSmi()) {
2963 is_non_smi = true;
2964 }
2965
2966 if (dst_type.IsTypeParameter()) {
2967 // Special case: Instantiate the type parameter on the caller side, invoking
2968 // the TTS of the corresponding type parameter in the caller.
2969 const TypeParameter& type_param = TypeParameter::Cast(dst_type);
2970 if (!type_param.IsNonNullable()) {
2971 // If the type parameter is nullable when running in strong mode, we need
2972 // to handle null before calling the TTS because the type parameter may be
2973 // instantiated with a non-nullable type, where the TTS rejects null.
2974 __ CompareObject(TypeTestABI::kInstanceReg, Object::null_object());
2975 __ BranchIf(EQUAL, done);
2976 }
2977 const Register kTypeArgumentsReg =
2978 type_param.IsClassTypeParameter()
2981
2982 // Check if type arguments are null, i.e. equivalent to vector of dynamic.
2983 // If so, then the value is guaranteed assignable as dynamic is a top type.
2984 __ CompareObject(kTypeArgumentsReg, Object::null_object());
2985 __ BranchIf(EQUAL, done);
2986 // Put the instantiated type parameter into the scratch register, so its
2987 // TTS can be called by the caller.
2988 __ LoadCompressedFieldFromOffset(
2989 TypeTestABI::kScratchReg, kTypeArgumentsReg,
2991 return output_dst_type();
2992 }
2993
2994 if (dst_type.IsFunctionType() || dst_type.IsRecordType()) {
2995 return output_dst_type();
2996 }
2997
2998 if (auto const hi = thread()->hierarchy_info()) {
2999 const Class& type_class = Class::Handle(zone(), dst_type.type_class());
3000
3001 if (hi->CanUseSubtypeRangeCheckFor(dst_type)) {
3002 const CidRangeVector& ranges = hi->SubtypeRangesForClass(
3003 type_class,
3004 /*include_abstract=*/false,
3005 /*exclude_null=*/!Instance::NullIsAssignableTo(dst_type));
3006 if (ranges.length() <= kMaxNumberOfCidRangesToTest) {
3007 if (is_non_smi) {
3009 } else {
3010 __ LoadClassIdMayBeSmi(TypeTestABI::kScratchReg,
3012 }
3014 done);
3015 elide_info = true;
3016 } else if (IsListClass(type_class)) {
3017 __ LoadClassIdMayBeSmi(TypeTestABI::kScratchReg,
3020 }
3021 }
3022 }
3023 output_dst_type();
3024}
static bool GenerateCidRangesCheck(compiler::Assembler *assembler, Register class_id_reg, const CidRangeVector &cid_ranges, compiler::Label *inside_range_lbl, compiler::Label *outside_range_lbl=nullptr, bool fall_through_if_inside=false)
static constexpr intptr_t kMaxNumberOfCidRangesToTest
void GenerateListTypeCheck(Register kClassIdReg, compiler::Label *is_instance_lbl)
static bool NullIsAssignableTo(const AbstractType &other)
Definition: object.cc:20674
TypeUsageInfo * type_usage_info() const
Definition: thread.h:605
static TypePtr IntType()
static word type_at_offset(intptr_t i)
MallocGrowableArray< CidRangeValue > CidRangeVector
Definition: il.h:253

◆ GenerateCidRangesCheck()

bool dart::FlowGraphCompiler::GenerateCidRangesCheck ( compiler::Assembler assembler,
Register  class_id_reg,
const CidRangeVector cid_ranges,
compiler::Label inside_range_lbl,
compiler::Label outside_range_lbl = nullptr,
bool  fall_through_if_inside = false 
)
static

Definition at line 2271 of file flow_graph_compiler.cc.

2277 {
2278 // If there are no valid class ranges, the check will fail. If we are
2279 // supposed to fall-through in the positive case, we'll explicitly jump to
2280 // the [outside_range_lbl].
2281 if (cid_ranges.is_empty()) {
2282 if (fall_through_if_inside) {
2283 assembler->Jump(outside_range_lbl);
2284 }
2285 return false;
2286 }
2287
2288 int bias = 0;
2289 for (intptr_t i = 0; i < cid_ranges.length(); ++i) {
2290 const CidRangeValue& range = cid_ranges[i];
2291 RELEASE_ASSERT(!range.IsIllegalRange());
2292 const bool last_round = i == (cid_ranges.length() - 1);
2293
2294 compiler::Label* jump_label = last_round && fall_through_if_inside
2295 ? outside_range_lbl
2296 : inside_range_lbl;
2297 const bool jump_on_miss = last_round && fall_through_if_inside;
2298
2299 bias = EmitTestAndCallCheckCid(assembler, jump_label, class_id_reg, range,
2300 bias, jump_on_miss);
2301 }
2302 return bias != 0;
2303}
#define RELEASE_ASSERT(cond)
Definition: assert.h:327

◆ GenerateDartCall()

void dart::FlowGraphCompiler::GenerateDartCall ( intptr_t  deopt_id,
const InstructionSource source,
const Code stub,
UntaggedPcDescriptors::Kind  kind,
LocationSummary locs,
Code::EntryKind  entry_kind = Code::EntryKind::kNormal 
)

◆ GenerateIndirectTTSCall()

static void dart::FlowGraphCompiler::GenerateIndirectTTSCall ( compiler::Assembler assembler,
Register  reg_with_type,
intptr_t  sub_type_cache_index 
)
static

◆ GenerateInstanceCall()

void dart::FlowGraphCompiler::GenerateInstanceCall ( intptr_t  deopt_id,
const InstructionSource source,
LocationSummary locs,
const ICData ic_data,
Code::EntryKind  entry_kind,
bool  receiver_can_be_smi 
)

Definition at line 1509 of file flow_graph_compiler.cc.

1514 {
1515 ICData& ic_data = ICData::ZoneHandle(ic_data_in.Original());
1516 if (FLAG_precompiled_mode) {
1517 ic_data = ic_data.AsUnaryClassChecks();
1518 EmitInstanceCallAOT(ic_data, deopt_id, source, locs, entry_kind,
1519 receiver_can_be_smi);
1520 return;
1521 }
1522 ASSERT(!ic_data.IsNull());
1523 if (is_optimizing() && (ic_data_in.NumberOfUsedChecks() == 0)) {
1524 // Emit IC call that will count and thus may need reoptimization at
1525 // function entry.
1526 ASSERT(may_reoptimize() || flow_graph().IsCompiledForOsr());
1527 EmitOptimizedInstanceCall(StubEntryFor(ic_data, /*optimized=*/true),
1528 ic_data, deopt_id, source, locs, entry_kind);
1529 return;
1530 }
1531
1532 if (is_optimizing()) {
1533 EmitMegamorphicInstanceCall(ic_data_in, deopt_id, source, locs);
1534 return;
1535 }
1536
1537 EmitInstanceCallJIT(StubEntryFor(ic_data, /*optimized=*/false), ic_data,
1538 deopt_id, source, locs, entry_kind);
1539}
void EmitInstanceCallJIT(const Code &stub, const ICData &ic_data, intptr_t deopt_id, const InstructionSource &source, LocationSummary *locs, Code::EntryKind entry_kind)
void EmitOptimizedInstanceCall(const Code &stub, const ICData &ic_data, intptr_t deopt_id, const InstructionSource &source, LocationSummary *locs, Code::EntryKind entry_kind=Code::EntryKind::kNormal)
static const Code & StubEntryFor(const ICData &ic_data, bool optimized)

◆ GenerateInstanceOf()

void dart::FlowGraphCompiler::GenerateInstanceOf ( const InstructionSource source,
intptr_t  deopt_id,
Environment env,
const AbstractType type,
LocationSummary locs 
)

Definition at line 2721 of file flow_graph_compiler.cc.

2725 {
2726 ASSERT(type.IsFinalized());
2727 ASSERT(!type.IsTopTypeForInstanceOf()); // Already checked.
2728
2729 compiler::Label is_instance, is_not_instance;
2730 // 'null' is an instance of Null, Object*, Never*, void, and dynamic.
2731 // In addition, 'null' is an instance of any nullable type.
2732 // It is also an instance of FutureOr<T> if it is an instance of T.
2733 const AbstractType& unwrapped_type =
2734 AbstractType::Handle(type.UnwrapFutureOr());
2735 if (!unwrapped_type.IsTypeParameter() || unwrapped_type.IsNullable()) {
2736 // Only nullable type parameter remains nullable after instantiation.
2737 // See NullIsInstanceOf().
2738 __ CompareObject(TypeTestABI::kInstanceReg, Object::null_object());
2739 __ BranchIf(EQUAL,
2740 unwrapped_type.IsNullable() ? &is_instance : &is_not_instance);
2741 }
2742
2743 // Generate inline instanceof test.
2744 SubtypeTestCache& test_cache = SubtypeTestCache::ZoneHandle(zone());
2745 // kInstanceReg, kInstantiatorTypeArgumentsReg, and kFunctionTypeArgumentsReg
2746 // are preserved across the call.
2747 test_cache =
2748 GenerateInlineInstanceof(source, type, &is_instance, &is_not_instance);
2749
2750 // test_cache is null if there is no fall-through.
2751 compiler::Label done;
2752 if (!test_cache.IsNull()) {
2753 // Generate Runtime call.
2754 __ LoadUniqueObject(TypeTestABI::kDstTypeReg, type);
2756 GenerateStubCall(source, StubCode::InstanceOf(),
2757 /*kind=*/UntaggedPcDescriptors::kOther, locs, deopt_id,
2758 env);
2760 }
2761 __ Bind(&is_not_instance);
2764
2765 __ Bind(&is_instance);
2767 __ Bind(&done);
2768}
static void test_cache(skiatest::Reporter *reporter, SkResourceCache &cache, bool testPurge)
GLenum type
static const Bool & Get(bool value)
Definition: object.h:10801
void GenerateStubCall(const InstructionSource &source, const Code &stub, UntaggedPcDescriptors::Kind kind, LocationSummary *locs, intptr_t deopt_id, Environment *env)
static constexpr Register kSubtypeTestCacheReg
static constexpr Register kInstanceOfResultReg

◆ GenerateListTypeCheck()

void dart::FlowGraphCompiler::GenerateListTypeCheck ( Register  kClassIdReg,
compiler::Label is_instance_lbl 
)

Definition at line 1610 of file flow_graph_compiler.cc.

1612 {
1613 assembler()->Comment("ListTypeCheck");
1614 COMPILE_ASSERT((kImmutableArrayCid == kArrayCid + 1) &&
1615 (kGrowableObjectArrayCid == kArrayCid + 2));
1616 CidRangeVector ranges;
1617 ranges.Add({kArrayCid, kGrowableObjectArrayCid});
1618 GenerateCidRangesCheck(assembler(), class_id_reg, ranges, is_instance_lbl);
1619}
COMPILE_ASSERT(kUnreachableReference==WeakTable::kNoValue)

◆ GenerateNonLazyDeoptableStubCall()

void dart::FlowGraphCompiler::GenerateNonLazyDeoptableStubCall ( const InstructionSource source,
const Code stub,
UntaggedPcDescriptors::Kind  kind,
LocationSummary locs,
ObjectPool::SnapshotBehavior  snapshot_behavior = compiler::ObjectPoolBuilderEntry::kSnapshotable 
)

Definition at line 1476 of file flow_graph_compiler.cc.

1481 {
1482 EmitCallToStub(stub, snapshot_behavior);
1483 EmitCallsiteMetadata(source, DeoptId::kNone, kind, locs, /*env=*/nullptr);
1484}
void EmitCallToStub(const Code &stub, ObjectPool::SnapshotBehavior snapshot_behavior=compiler::ObjectPoolBuilderEntry::kSnapshotable)
void EmitCallsiteMetadata(const InstructionSource &source, intptr_t deopt_id, UntaggedPcDescriptors::Kind kind, LocationSummary *locs, Environment *env)

◆ GenerateNumberTypeCheck()

void dart::FlowGraphCompiler::GenerateNumberTypeCheck ( Register  kClassIdReg,
const AbstractType type,
compiler::Label is_instance_lbl,
compiler::Label is_not_instance_lbl 
)

Definition at line 1581 of file flow_graph_compiler.cc.

1585 {
1586 assembler()->Comment("NumberTypeCheck");
1587 GrowableArray<intptr_t> args;
1588 if (type.IsNumberType()) {
1589 args.Add(kDoubleCid);
1590 args.Add(kMintCid);
1591 } else if (type.IsIntType()) {
1592 args.Add(kMintCid);
1593 } else if (type.IsDoubleType()) {
1594 args.Add(kDoubleCid);
1595 }
1596 CheckClassIds(class_id_reg, args, is_instance_lbl, is_not_instance_lbl);
1597}
G_BEGIN_DECLS G_MODULE_EXPORT FlValue * args

◆ GeneratePatchableCall()

void dart::FlowGraphCompiler::GeneratePatchableCall ( const InstructionSource source,
const Code stub,
UntaggedPcDescriptors::Kind  kind,
LocationSummary locs,
ObjectPool::SnapshotBehavior  snapshot_behavior = compiler::ObjectPoolBuilderEntry::kSnapshotable 
)

◆ GenerateStaticCall()

void dart::FlowGraphCompiler::GenerateStaticCall ( intptr_t  deopt_id,
const InstructionSource source,
const Function function,
ArgumentsInfo  args_info,
LocationSummary locs,
const ICData ic_data_in,
ICData::RebindRule  rebind_rule,
Code::EntryKind  entry_kind = Code::EntryKind::kNormal 
)

Definition at line 1541 of file flow_graph_compiler.cc.

1548 {
1549 const ICData& ic_data = ICData::ZoneHandle(ic_data_in.Original());
1550 const Array& arguments_descriptor = Array::ZoneHandle(
1551 zone(), ic_data.IsNull() ? args_info.ToArgumentsDescriptor()
1552 : ic_data.arguments_descriptor());
1553 ASSERT(ArgumentsDescriptor(arguments_descriptor).TypeArgsLen() ==
1554 args_info.type_args_len);
1555 ASSERT(ArgumentsDescriptor(arguments_descriptor).Count() ==
1556 args_info.count_without_type_args);
1557 ASSERT(ArgumentsDescriptor(arguments_descriptor).Size() ==
1558 args_info.size_without_type_args);
1559 // Force-optimized functions lack the deopt info which allows patching of
1560 // optimized static calls.
1561 if (is_optimizing() && (!ForcedOptimization() || FLAG_precompiled_mode)) {
1562 EmitOptimizedStaticCall(function, arguments_descriptor,
1563 args_info.size_with_type_args, deopt_id, source,
1564 locs, entry_kind);
1565 } else {
1566 ICData& call_ic_data = ICData::ZoneHandle(zone(), ic_data.ptr());
1567 if (call_ic_data.IsNull()) {
1568 const intptr_t kNumArgsChecked = 0;
1569 call_ic_data =
1570 GetOrAddStaticCallICData(deopt_id, function, arguments_descriptor,
1571 kNumArgsChecked, rebind_rule)
1572 ->ptr();
1573 call_ic_data = call_ic_data.Original();
1574 }
1575 AddCurrentDescriptor(UntaggedPcDescriptors::kRewind, deopt_id, source);
1576 EmitUnoptimizedStaticCall(args_info.size_with_type_args, deopt_id, source,
1577 locs, call_ic_data, entry_kind);
1578 }
1579}
void EmitOptimizedStaticCall(const Function &function, const Array &arguments_descriptor, intptr_t size_with_type_args, intptr_t deopt_id, const InstructionSource &source, LocationSummary *locs, Code::EntryKind entry_kind=Code::EntryKind::kNormal)
const ICData * GetOrAddStaticCallICData(intptr_t deopt_id, const Function &target, const Array &arguments_descriptor, intptr_t num_args_tested, ICData::RebindRule rebind_rule)
TSize< Scalar > Size
Definition: size.h:137

◆ GenerateStaticDartCall()

void dart::FlowGraphCompiler::GenerateStaticDartCall ( intptr_t  deopt_id,
const InstructionSource source,
UntaggedPcDescriptors::Kind  kind,
LocationSummary locs,
const Function target,
Code::EntryKind  entry_kind = Code::EntryKind::kNormal 
)

◆ GenerateStringTypeCheck()

void dart::FlowGraphCompiler::GenerateStringTypeCheck ( Register  kClassIdReg,
compiler::Label is_instance_lbl,
compiler::Label is_not_instance_lbl 
)

Definition at line 1599 of file flow_graph_compiler.cc.

1602 {
1603 assembler()->Comment("StringTypeCheck");
1604 GrowableArray<intptr_t> args;
1605 args.Add(kOneByteStringCid);
1606 args.Add(kTwoByteStringCid);
1607 CheckClassIds(class_id_reg, args, is_instance_lbl, is_not_instance_lbl);
1608}

◆ GenerateStubCall()

void dart::FlowGraphCompiler::GenerateStubCall ( const InstructionSource source,
const Code stub,
UntaggedPcDescriptors::Kind  kind,
LocationSummary locs,
intptr_t  deopt_id,
Environment env 
)

Definition at line 1464 of file flow_graph_compiler.cc.

1469 {
1470 ASSERT(FLAG_precompiled_mode ||
1471 (deopt_id != DeoptId::kNone && (!is_optimizing() || env != nullptr)));
1472 EmitCallToStub(stub);
1473 EmitCallsiteMetadata(source, deopt_id, kind, locs, env);
1474}

◆ GenerateSubtypeRangeCheck()

bool dart::FlowGraphCompiler::GenerateSubtypeRangeCheck ( Register  class_id_reg,
const Class type_class,
compiler::Label is_subtype_lbl 
)

Definition at line 2247 of file flow_graph_compiler.cc.

2249 {
2250 HierarchyInfo* hi = Thread::Current()->hierarchy_info();
2251 if (hi != nullptr) {
2252 const CidRangeVector& ranges =
2253 hi->SubtypeRangesForClass(type_class,
2254 /*include_abstract=*/false,
2255 /*exclude_null=*/false);
2256 if (ranges.length() <= kMaxNumberOfCidRangesToTest) {
2257 GenerateCidRangesCheck(assembler(), class_id_reg, ranges, is_subtype);
2258 return true;
2259 }
2260 }
2261
2262 // We don't have cid-ranges for subclasses, so we'll just test against the
2263 // class directly if it's non-abstract.
2264 if (!type_class.is_abstract()) {
2265 __ CompareImmediate(class_id_reg, type_class.id());
2266 __ BranchIf(EQUAL, is_subtype);
2267 }
2268 return false;
2269}
HierarchyInfo * hierarchy_info() const
Definition: thread.h:593

◆ GenerateTTSCall()

void dart::FlowGraphCompiler::GenerateTTSCall ( const InstructionSource source,
intptr_t  deopt_id,
Environment env,
Register  reg_with_type,
const AbstractType dst_type,
const String dst_name,
LocationSummary locs 
)

Definition at line 2855 of file flow_graph_compiler.cc.

2861 {
2862 ASSERT(!dst_name.IsNull());
2863 // We use 2 consecutive entries in the pool for the subtype cache and the
2864 // destination name. The second entry, namely [dst_name] seems to be unused,
2865 // but it will be used by the code throwing a TypeError if the type test fails
2866 // (see runtime/vm/runtime_entry.cc:TypeCheck). It will use pattern matching
2867 // on the call site to find out at which pool index the destination name is
2868 // located.
2869 const intptr_t sub_type_cache_index = __ object_pool_builder().AddObject(
2870 Object::null_object(), compiler::ObjectPoolBuilderEntry::kPatchable);
2871 const intptr_t dst_name_index = __ object_pool_builder().AddObject(
2873 ASSERT((sub_type_cache_index + 1) == dst_name_index);
2874 ASSERT(__ constant_pool_allowed());
2875
2876 __ Comment("TTSCall");
2877 // If the dst_type is known at compile time and instantiated, we know the
2878 // target TTS stub and so can use a PC-relative call when available.
2879 if (!dst_type.IsNull() && dst_type.IsInstantiated() &&
2880 CanPcRelativeCall(dst_type)) {
2881 __ LoadWordFromPoolIndex(TypeTestABI::kSubtypeTestCacheReg,
2882 sub_type_cache_index);
2883 __ GenerateUnRelocatedPcRelativeCall();
2884 AddPcRelativeTTSCallTypeTarget(dst_type);
2885 } else {
2886 GenerateIndirectTTSCall(assembler(), reg_with_type, sub_type_cache_index);
2887 }
2888
2889 EmitCallsiteMetadata(source, deopt_id, UntaggedPcDescriptors::kOther, locs,
2890 env);
2891}
static void GenerateIndirectTTSCall(compiler::Assembler *assembler, Register reg_with_type, intptr_t sub_type_cache_index)

◆ GetJumpLabel()

compiler::Label * dart::FlowGraphCompiler::GetJumpLabel ( BlockEntryInstr block_entry) const

Definition at line 832 of file flow_graph_compiler.cc.

833 {
834 const intptr_t block_index = block_entry->postorder_number();
835 return block_info_[block_index]->jump_label();
836}

◆ GetOrAddInstanceCallICData()

const ICData * dart::FlowGraphCompiler::GetOrAddInstanceCallICData ( intptr_t  deopt_id,
const String target_name,
const Array arguments_descriptor,
intptr_t  num_args_tested,
const AbstractType receiver_type,
const Function binary_smi_target 
)

Definition at line 1867 of file flow_graph_compiler.cc.

1873 {
1874 if ((deopt_id_to_ic_data_ != nullptr) &&
1875 ((*deopt_id_to_ic_data_)[deopt_id] != nullptr)) {
1876 const ICData* res = (*deopt_id_to_ic_data_)[deopt_id];
1877 ASSERT(res->deopt_id() == deopt_id);
1878 ASSERT(res->target_name() == target_name.ptr());
1879 ASSERT(res->NumArgsTested() == num_args_tested);
1880 ASSERT(res->TypeArgsLen() ==
1881 ArgumentsDescriptor(arguments_descriptor).TypeArgsLen());
1882 ASSERT(!res->is_static_call());
1883 ASSERT(res->receivers_static_type() == receiver_type.ptr());
1884 return res;
1885 }
1886
1887 auto& ic_data = ICData::ZoneHandle(zone());
1888 if (!binary_smi_target.IsNull()) {
1889 ASSERT(num_args_tested == 2);
1890 ASSERT(!binary_smi_target.IsNull());
1891 GrowableArray<intptr_t> cids(num_args_tested);
1892 cids.Add(kSmiCid);
1893 cids.Add(kSmiCid);
1894 ic_data = ICData::NewWithCheck(parsed_function().function(), target_name,
1895 arguments_descriptor, deopt_id,
1896 num_args_tested, ICData::kInstance, &cids,
1897 binary_smi_target, receiver_type);
1898 } else {
1899 ic_data = ICData::New(parsed_function().function(), target_name,
1900 arguments_descriptor, deopt_id, num_args_tested,
1901 ICData::kInstance, receiver_type);
1902 }
1903
1904 if (deopt_id_to_ic_data_ != nullptr) {
1905 (*deopt_id_to_ic_data_)[deopt_id] = &ic_data;
1906 }
1907 ASSERT(!ic_data.is_static_call());
1908 return &ic_data;
1909}
static ICDataPtr NewWithCheck(const Function &owner, const String &target_name, const Array &arguments_descriptor, intptr_t deopt_id, intptr_t num_args_tested, RebindRule rebind_rule, GrowableArray< intptr_t > *cids, const Function &target, const AbstractType &receiver_type=Object::null_abstract_type())
Definition: object.cc:17360

◆ GetOrAddStaticCallICData()

const ICData * dart::FlowGraphCompiler::GetOrAddStaticCallICData ( intptr_t  deopt_id,
const Function target,
const Array arguments_descriptor,
intptr_t  num_args_tested,
ICData::RebindRule  rebind_rule 
)

Definition at line 1911 of file flow_graph_compiler.cc.

1916 {
1917 if ((deopt_id_to_ic_data_ != nullptr) &&
1918 ((*deopt_id_to_ic_data_)[deopt_id] != nullptr)) {
1919 const ICData* res = (*deopt_id_to_ic_data_)[deopt_id];
1920 ASSERT(res->deopt_id() == deopt_id);
1921 ASSERT(res->target_name() == target.name());
1922 ASSERT(res->NumArgsTested() == num_args_tested);
1923 ASSERT(res->TypeArgsLen() ==
1924 ArgumentsDescriptor(arguments_descriptor).TypeArgsLen());
1925 ASSERT(res->is_static_call());
1926 return res;
1927 }
1928
1929 const auto& ic_data = ICData::ZoneHandle(
1931 arguments_descriptor, deopt_id,
1932 num_args_tested, rebind_rule));
1933 if (deopt_id_to_ic_data_ != nullptr) {
1934 (*deopt_id_to_ic_data_)[deopt_id] = &ic_data;
1935 }
1936 return &ic_data;
1937}
static ICDataPtr NewForStaticCall(const Function &owner, const Function &target, const Array &arguments_descriptor, intptr_t deopt_id, intptr_t num_args_tested, RebindRule rebind_rule)
Definition: object.cc:17401

◆ InitCompiler()

void dart::FlowGraphCompiler::InitCompiler ( )

Definition at line 210 of file flow_graph_compiler.cc.

210 {
211 compressed_stackmaps_builder_ =
212 new (zone()) CompressedStackMapsBuilder(zone());
213 pc_descriptors_list_ = new (zone()) DescriptorList(
214 zone(), &code_source_map_builder_->inline_id_to_function());
215 exception_handlers_list_ =
216 new (zone()) ExceptionHandlerList(parsed_function().function());
217#if defined(DART_PRECOMPILER)
218 catch_entry_moves_maps_builder_ = new (zone()) CatchEntryMovesMapBuilder();
219#endif
220 block_info_.Clear();
221 // Initialize block info and search optimized (non-OSR) code for calls
222 // indicating a non-leaf routine and calls without IC data indicating
223 // possible reoptimization.
224
225 for (int i = 0; i < block_order_.length(); ++i) {
226 block_info_.Add(new (zone()) BlockInfo());
228 BlockEntryInstr* entry = block_order_[i];
229 for (ForwardInstructionIterator it(entry); !it.Done(); it.Advance()) {
230 Instruction* current = it.Current();
231 if (auto* branch = current->AsBranch()) {
232 current = branch->comparison();
233 }
234 if (auto* instance_call = current->AsInstanceCall()) {
235 const ICData* ic_data = instance_call->ic_data();
236 if ((ic_data == nullptr) || (ic_data->NumberOfUsedChecks() == 0)) {
237 may_reoptimize_ = true;
238 }
239 }
240 }
241 }
242 }
243
244 if (!is_optimizing() && FLAG_reorder_basic_blocks) {
245 // Initialize edge counter array.
246 const intptr_t num_counters = flow_graph_.preorder().length();
247 const Array& edge_counters =
248 Array::Handle(Array::New(num_counters, Heap::kOld));
249 for (intptr_t i = 0; i < num_counters; ++i) {
250 edge_counters.SetAt(i, Object::smi_zero());
251 }
252 edge_counters_array_ = edge_counters.ptr();
253 }
254}
const GrowableArray< const Function * > & inline_id_to_function() const
bool IsCompiledForOsr() const
Definition: flow_graph.h:460
const GrowableArray< BlockEntryInstr * > & preorder() const
Definition: flow_graph.h:203

◆ InliningIdToFunction()

ArrayPtr dart::FlowGraphCompiler::InliningIdToFunction ( ) const

◆ InsertBSSRelocation()

void dart::FlowGraphCompiler::InsertBSSRelocation ( BSS::Relocation  reloc)

Definition at line 269 of file flow_graph_compiler.cc.

269 {
270 const intptr_t offset = assembler()->InsertAlignedRelocation(reloc);
271 AddDescriptor(UntaggedPcDescriptors::kBSSRelocation, /*pc_offset=*/offset,
272 /*deopt_id=*/DeoptId::kNone, InstructionSource(),
273 /*try_index=*/-1);
274}
intptr_t InsertAlignedRelocation(BSS::Relocation reloc)

◆ int32x4_class()

const Class & dart::FlowGraphCompiler::int32x4_class ( ) const
inline

Definition at line 848 of file flow_graph_compiler.h.

848{ return int32x4_class_; }

◆ intrinsic_mode()

bool dart::FlowGraphCompiler::intrinsic_mode ( ) const
inline

Definition at line 443 of file flow_graph_compiler.h.

443{ return intrinsic_mode_; }

◆ intrinsic_slow_path_label()

compiler::Label * dart::FlowGraphCompiler::intrinsic_slow_path_label ( ) const
inline

Definition at line 453 of file flow_graph_compiler.h.

453 {
454 ASSERT(intrinsic_slow_path_label_ != nullptr);
455 return intrinsic_slow_path_label_;
456 }

◆ is_optimizing()

bool dart::FlowGraphCompiler::is_optimizing ( ) const
inline

Definition at line 427 of file flow_graph_compiler.h.

427{ return is_optimizing_; }

◆ IsEmptyBlock()

bool dart::FlowGraphCompiler::IsEmptyBlock ( BlockEntryInstr block) const

Definition at line 299 of file flow_graph_compiler.cc.

299 {
300 // Entry-points cannot be merged because they must have assembly
301 // prologue emitted which should not be included in any block they jump to.
302 return !block->IsGraphEntry() && !block->IsFunctionEntry() &&
303 !block->IsCatchBlockEntry() && !block->IsOsrEntry() &&
304 !block->IsIndirectEntry() && !block->HasNonRedundantParallelMove() &&
305 block->next()->IsGoto() &&
306 !block->next()->AsGoto()->HasNonRedundantParallelMove();
307}

◆ isolate_group()

IsolateGroup * dart::FlowGraphCompiler::isolate_group ( ) const
inline

Definition at line 913 of file flow_graph_compiler.h.

913{ return thread_->isolate_group(); }

◆ LoadBSSEntry()

void dart::FlowGraphCompiler::LoadBSSEntry ( BSS::Relocation  relocation,
Register  dst,
Register  tmp 
)

◆ LookupMethodFor()

bool dart::FlowGraphCompiler::LookupMethodFor ( int  class_id,
const String name,
const ArgumentsDescriptor args_desc,
Function fn_return,
bool *  class_is_abstract_return = nullptr 
)
static

Definition at line 2021 of file flow_graph_compiler.cc.

2025 {
2026 auto thread = Thread::Current();
2027 auto zone = thread->zone();
2028 auto class_table = thread->isolate_group()->class_table();
2029 if (class_id < 0) return false;
2030 if (class_id >= class_table->NumCids()) return false;
2031
2032 ClassPtr raw_class = class_table->At(class_id);
2033 if (raw_class == nullptr) return false;
2034 Class& cls = Class::Handle(zone, raw_class);
2035 if (cls.IsNull()) return false;
2036 if (!cls.is_finalized()) return false;
2037 if (Array::Handle(cls.current_functions()).IsNull()) return false;
2038
2039 if (class_is_abstract_return != nullptr) {
2040 *class_is_abstract_return = cls.is_abstract();
2041 }
2042 Function& target_function =
2044 cls, name, args_desc, /*allow_add=*/false));
2045 if (target_function.IsNull()) return false;
2046 *fn_return = target_function.ptr();
2047 return true;
2048}
ClassPtr At(intptr_t cid) const
Definition: class_table.h:362
ClassTable * class_table() const
Definition: isolate.h:496
static FunctionPtr ResolveDynamicForReceiverClass(const Class &receiver_class, const String &function_name, const ArgumentsDescriptor &args_desc, bool allow_add)
Definition: resolver.cc:148
Zone * zone() const
Definition: thread_state.h:37

◆ may_reoptimize()

bool dart::FlowGraphCompiler::may_reoptimize ( ) const
inline

Definition at line 884 of file flow_graph_compiler.h.

884{ return may_reoptimize_; }

◆ mint_class()

const Class & dart::FlowGraphCompiler::mint_class ( ) const
inline

Definition at line 845 of file flow_graph_compiler.h.

845{ return mint_class_; }

◆ NeedsEdgeCounter()

bool dart::FlowGraphCompiler::NeedsEdgeCounter ( BlockEntryInstr block)

Definition at line 1630 of file flow_graph_compiler.cc.

1630 {
1631 // Only emit an edge counter if there is not goto at the end of the block,
1632 // except for the entry block.
1633 return FLAG_reorder_basic_blocks &&
1634 (!block->last_instruction()->IsGoto() || block->IsFunctionEntry());
1635}

◆ NextNonEmptyLabel()

compiler::Label * dart::FlowGraphCompiler::NextNonEmptyLabel ( ) const

Definition at line 843 of file flow_graph_compiler.cc.

843 {
844 const intptr_t current_index = current_block()->postorder_number();
845 return block_info_[current_index]->next_nonempty_label();
846}
intptr_t postorder_number() const
Definition: il.h:1658
BlockEntryInstr * current_block() const

◆ parsed_function()

const ParsedFunction & dart::FlowGraphCompiler::parsed_function ( ) const
inline

Definition at line 400 of file flow_graph_compiler.h.

400{ return parsed_function_; }

◆ RecordCatchEntryMoves()

void dart::FlowGraphCompiler::RecordCatchEntryMoves ( Environment env)

Definition at line 427 of file flow_graph_compiler.cc.

427 {
428#if defined(DART_PRECOMPILER)
429 const intptr_t try_index = CurrentTryIndex();
430 if (is_optimizing() && env != nullptr && (try_index != kInvalidTryIndex)) {
431 env = env->Outermost();
432 CatchBlockEntryInstr* catch_block =
433 flow_graph().graph_entry()->GetCatchEntry(try_index);
434 const GrowableArray<Definition*>* idefs =
435 catch_block->initial_definitions();
436 catch_entry_moves_maps_builder_->NewMapping(assembler()->CodeSize());
437
438 for (intptr_t i = 0; i < flow_graph().variable_count(); ++i) {
439 // Don't sync captured parameters. They are not in the environment.
440 if (flow_graph().captured_parameters()->Contains(i)) continue;
441 auto param = (*idefs)[i]->AsParameter();
442
443 // Don't sync values that have been replaced with constants.
444 if (param == nullptr) continue;
445 RELEASE_ASSERT(param->env_index() == i);
446 Location dst = param->location();
447
448 // Don't sync exception or stack trace variables.
449 if (dst.IsRegister()) continue;
450
451 Location src = env->LocationAt(i);
452 // Can only occur if AllocationSinking is enabled - and it is disabled
453 // in functions with try.
454 ASSERT(!src.IsInvalid());
455 const Representation src_type =
456 env->ValueAt(i)->definition()->representation();
457 const auto move = CatchEntryMoveFor(assembler(), src_type, src,
458 LocationToStackIndex(dst));
459 if (!move.IsRedundant()) {
460 catch_entry_moves_maps_builder_->Append(move);
461 }
462 }
463
464 catch_entry_moves_maps_builder_->EndMapping();
465 }
466#endif // defined(DART_PRECOMPILER)
467}
bool Contains(intptr_t i) const
Definition: bit_vector.h:91
GrowableArray< Definition * > * initial_definitions()
Definition: il.h:1917
void NewMapping(intptr_t pc_offset)
void Append(const CatchEntryMove &move)
BitVector * captured_parameters() const
Definition: flow_graph.h:462
intptr_t variable_count() const
Definition: flow_graph.h:143
CatchBlockEntryInstr * GetCatchEntry(intptr_t index)
Definition: il.cc:1248

◆ RecordSafepoint()

void dart::FlowGraphCompiler::RecordSafepoint ( LocationSummary locs,
intptr_t  slow_path_argument_count = 0 
)

Definition at line 1047 of file flow_graph_compiler.cc.

1048 {
1049 if (is_optimizing() || locs->live_registers()->HasUntaggedValues()) {
1050 const intptr_t spill_area_size =
1051 is_optimizing() ? flow_graph_.graph_entry()->spill_slot_count() : 0;
1052
1053 RegisterSet* registers = locs->live_registers();
1054 ASSERT(registers != nullptr);
1055 const intptr_t kFpuRegisterSpillFactor =
1057 const bool using_shared_stub = locs->call_on_shared_slow_path();
1058
1059 BitmapBuilder bitmap(locs->stack_bitmap());
1060
1061 // Expand the bitmap to cover the whole area reserved for spill slots.
1062 // (register allocator takes care of marking slots containing live tagged
1063 // values but it does not do the same for other slots so length might be
1064 // below spill_area_size at this point).
1065 RELEASE_ASSERT(bitmap.Length() <= spill_area_size);
1066 bitmap.SetLength(spill_area_size);
1067
1068 auto instr = current_instruction();
1069 const intptr_t args_count = instr->ArgumentCount();
1070 RELEASE_ASSERT(args_count == 0 || is_optimizing());
1071
1072 for (intptr_t i = 0; i < args_count; i++) {
1073 const auto move_arg =
1074 instr->ArgumentValueAt(i)->instruction()->AsMoveArgument();
1075 const auto rep = move_arg->representation();
1076 if (move_arg->is_register_move()) {
1077 continue;
1078 }
1079
1080 ASSERT(rep == kTagged || rep == kUnboxedInt64 || rep == kUnboxedDouble);
1081 static_assert(compiler::target::kIntSpillFactor ==
1083 "int and double are of the same size");
1084 const bool is_tagged = move_arg->representation() == kTagged;
1085 const intptr_t num_bits =
1086 is_tagged ? 1 : compiler::target::kIntSpillFactor;
1087
1088 // Note: bits are reversed so higher bit corresponds to lower word.
1089 const intptr_t last_arg_bit =
1090 (spill_area_size - 1) - move_arg->sp_relative_index();
1091 bitmap.SetRange(last_arg_bit - (num_bits - 1), last_arg_bit, is_tagged);
1092 }
1093 ASSERT(slow_path_argument_count == 0 || !using_shared_stub);
1094 RELEASE_ASSERT(bitmap.Length() == spill_area_size);
1095
1096 // Trim the fully tagged suffix. Stack walking assumes that everything
1097 // not included into the stack map is tagged.
1098 intptr_t spill_area_bits = bitmap.Length();
1099 while (spill_area_bits > 0) {
1100 if (!bitmap.Get(spill_area_bits - 1)) {
1101 break;
1102 }
1103 spill_area_bits--;
1104 }
1105 bitmap.SetLength(spill_area_bits);
1106
1107 // Mark the bits in the stack map in the same order we push registers in
1108 // slow path code (see FlowGraphCompiler::SaveLiveRegisters).
1109 //
1110 // Slow path code can have registers at the safepoint.
1111 if (!locs->always_calls() && !using_shared_stub) {
1112 RegisterSet* regs = locs->live_registers();
1113 if (regs->FpuRegisterCount() > 0) {
1114 // Denote FPU registers with 0 bits in the stackmap. Based on the
1115 // assumption that there are normally few live FPU registers, this
1116 // encoding is simpler and roughly as compact as storing a separate
1117 // count of FPU registers.
1118 //
1119 // FPU registers have the highest register number at the highest
1120 // address (i.e., first in the stackmap).
1121 for (intptr_t i = kNumberOfFpuRegisters - 1; i >= 0; --i) {
1122 FpuRegister reg = static_cast<FpuRegister>(i);
1123 if (regs->ContainsFpuRegister(reg)) {
1124 for (intptr_t j = 0; j < kFpuRegisterSpillFactor; ++j) {
1125 bitmap.Set(bitmap.Length(), false);
1126 }
1127 }
1128 }
1129 }
1130
1131 // General purpose registers have the highest register number at the
1132 // highest address (i.e., first in the stackmap).
1133 for (intptr_t i = kNumberOfCpuRegisters - 1; i >= 0; --i) {
1134 Register reg = static_cast<Register>(i);
1135 if (locs->live_registers()->ContainsRegister(reg)) {
1136 bitmap.Set(bitmap.Length(), locs->live_registers()->IsTagged(reg));
1137 }
1138 }
1139 }
1140
1141 if (using_shared_stub) {
1142 // To simplify the code in the shared stub, we create an untagged hole
1143 // in the stack frame where the shared stub can leave the return address
1144 // before saving registers.
1145 bitmap.Set(bitmap.Length(), false);
1146 if (registers->FpuRegisterCount() > 0) {
1147 bitmap.SetRange(bitmap.Length(),
1148 bitmap.Length() +
1149 kNumberOfFpuRegisters * kFpuRegisterSpillFactor - 1,
1150 false);
1151 }
1152 for (intptr_t i = kNumberOfCpuRegisters - 1; i >= 0; --i) {
1153 if ((kReservedCpuRegisters & (1 << i)) != 0) continue;
1154 const Register reg = static_cast<Register>(i);
1155 bitmap.Set(bitmap.Length(),
1156 locs->live_registers()->ContainsRegister(reg) &&
1157 locs->live_registers()->IsTagged(reg));
1158 }
1159 }
1160
1161 // Arguments pushed after live registers in the slow path are tagged.
1162 for (intptr_t i = 0; i < slow_path_argument_count; ++i) {
1163 bitmap.Set(bitmap.Length(), true);
1164 }
1165
1166 compressed_stackmaps_builder_->AddEntry(assembler()->CodeSize(), &bitmap,
1167 spill_area_bits);
1168 }
1169}
void AddEntry(intptr_t pc_offset, BitmapBuilder *bitmap, intptr_t spill_slot_bit_count)
Instruction * current_instruction() const
intptr_t spill_slot_count() const
Definition: il.h:1983
Definition: bitmap.py:1
constexpr intptr_t kDoubleSpillFactor
Definition: runtime_api.h:340
constexpr intptr_t kIntSpillFactor
Definition: runtime_api.h:339
const RegList kReservedCpuRegisters
@ kNumberOfCpuRegisters
Definition: constants_arm.h:98
const int kNumberOfFpuRegisters
QRegister FpuRegister
const int kFpuRegisterSize

◆ ResolveCallTargetsForReceiverCid()

const CallTargets * dart::FlowGraphCompiler::ResolveCallTargetsForReceiverCid ( intptr_t  cid,
const String selector,
const Array args_desc_array 
)
static

Definition at line 2003 of file flow_graph_compiler.cc.

2006 {
2007 Zone* zone = Thread::Current()->zone();
2008
2009 ArgumentsDescriptor args_desc(args_desc_array);
2010
2011 Function& fn = Function::ZoneHandle(zone);
2012 if (!LookupMethodFor(cid, selector, args_desc, &fn)) return nullptr;
2013
2014 CallTargets* targets = new (zone) CallTargets(zone);
2015 targets->Add(new (zone) TargetInfo(cid, cid, &fn, /* count = */ 1,
2017
2018 return targets;
2019}
static bool LookupMethodFor(int class_id, const String &name, const ArgumentsDescriptor &args_desc, Function *fn_return, bool *class_is_abstract_return=nullptr)
static StaticTypeExactnessState NotTracking()
const intptr_t cid

◆ RestoreLiveRegisters()

void dart::FlowGraphCompiler::RestoreLiveRegisters ( LocationSummary locs)

◆ SaveLiveRegisters()

void dart::FlowGraphCompiler::SaveLiveRegisters ( LocationSummary locs)

◆ set_current_block()

void dart::FlowGraphCompiler::set_current_block ( BlockEntryInstr value)
inline

Definition at line 420 of file flow_graph_compiler.h.

420{ current_block_ = value; }
uint8_t value

◆ set_intrinsic_flow_graph()

void dart::FlowGraphCompiler::set_intrinsic_flow_graph ( const FlowGraph flow_graph)
inline

Definition at line 445 of file flow_graph_compiler.h.

445 {
446 intrinsic_flow_graph_ = &flow_graph;
447 }

◆ set_intrinsic_slow_path_label()

void dart::FlowGraphCompiler::set_intrinsic_slow_path_label ( compiler::Label label)
inline

Definition at line 449 of file flow_graph_compiler.h.

449 {
450 ASSERT(intrinsic_slow_path_label_ == nullptr || label == nullptr);
451 intrinsic_slow_path_label_ = label;
452 }

◆ SetNeedsStackTrace()

void dart::FlowGraphCompiler::SetNeedsStackTrace ( intptr_t  try_index)

Definition at line 913 of file flow_graph_compiler.cc.

913 {
914 exception_handlers_list_->SetNeedsStackTrace(try_index);
915}
void SetNeedsStackTrace(intptr_t try_index)

◆ skip_body_compilation()

bool dart::FlowGraphCompiler::skip_body_compilation ( ) const
inline

Definition at line 437 of file flow_graph_compiler.h.

437 {
438 return fully_intrinsified_ && is_optimizing();
439 }

◆ SlowPathEnvironmentFor() [1/2]

Environment * dart::FlowGraphCompiler::SlowPathEnvironmentFor ( Environment env,
LocationSummary locs,
intptr_t  num_slow_path_args 
)

Definition at line 1177 of file flow_graph_compiler.cc.

1180 {
1181 const bool using_shared_stub = locs->call_on_shared_slow_path();
1182 const bool shared_stub_save_fpu_registers =
1183 using_shared_stub && locs->live_registers()->FpuRegisterCount() > 0;
1184 // TODO(sjindel): Modify logic below to account for slow-path args with shared
1185 // stubs.
1186 ASSERT(!using_shared_stub || num_slow_path_args == 0);
1187 if (env == nullptr) {
1188 // In AOT, environments can be removed by EliminateEnvironments pass
1189 // (if not in a try block).
1190 ASSERT(!is_optimizing() || FLAG_precompiled_mode);
1191 return nullptr;
1192 }
1193
1194 Environment* slow_path_env =
1195 env->DeepCopy(zone(), env->Length() - env->LazyDeoptPruneCount());
1196 // 1. Iterate the registers in the order they will be spilled to compute
1197 // the slots they will be spilled to.
1198 intptr_t next_slot = StackSize() + slow_path_env->CountArgsPushed();
1199 if (using_shared_stub) {
1200 // The PC from the call to the shared stub is pushed here.
1201 next_slot++;
1202 }
1203 RegisterSet* regs = locs->live_registers();
1204 intptr_t fpu_reg_slots[kNumberOfFpuRegisters];
1205 intptr_t cpu_reg_slots[kNumberOfCpuRegisters];
1206 const intptr_t kFpuRegisterSpillFactor =
1208 // FPU registers are spilled first from highest to lowest register number.
1209 for (intptr_t i = kNumberOfFpuRegisters - 1; i >= 0; --i) {
1210 FpuRegister reg = static_cast<FpuRegister>(i);
1211 if (regs->ContainsFpuRegister(reg)) {
1212 // We use the lowest address (thus highest index) to identify a
1213 // multi-word spill slot.
1214 next_slot += kFpuRegisterSpillFactor;
1215 fpu_reg_slots[i] = (next_slot - 1);
1216 } else {
1217 if (using_shared_stub && shared_stub_save_fpu_registers) {
1218 next_slot += kFpuRegisterSpillFactor;
1219 }
1220 fpu_reg_slots[i] = -1;
1221 }
1222 }
1223 // General purpose registers are spilled from highest to lowest register
1224 // number.
1225 for (intptr_t i = kNumberOfCpuRegisters - 1; i >= 0; --i) {
1226 if ((kReservedCpuRegisters & (1 << i)) != 0) continue;
1227 Register reg = static_cast<Register>(i);
1228 if (regs->ContainsRegister(reg)) {
1229 cpu_reg_slots[i] = next_slot++;
1230 } else {
1231 if (using_shared_stub) next_slot++;
1232 cpu_reg_slots[i] = -1;
1233 }
1234 }
1235
1236 // 2. Iterate the environment and replace register locations with the
1237 // corresponding spill slot locations.
1238 for (Environment::DeepIterator it(slow_path_env); !it.Done(); it.Advance()) {
1239 Location loc = it.CurrentLocation();
1240 Value* value = it.CurrentValue();
1241 it.SetCurrentLocation(LocationRemapForSlowPath(
1242 loc, value->definition(), cpu_reg_slots, fpu_reg_slots));
1243 }
1244
1245 return slow_path_env;
1246}
Location LocationRemapForSlowPath(Location loc, Definition *def, intptr_t *cpu_reg_slots, intptr_t *fpu_reg_slots)
Definition: locations.cc:492

◆ SlowPathEnvironmentFor() [2/2]

Environment * dart::FlowGraphCompiler::SlowPathEnvironmentFor ( Instruction inst,
intptr_t  num_slow_path_args 
)
inline

Definition at line 860 of file flow_graph_compiler.h.

861 {
862 if (inst->env() == nullptr && is_optimizing()) {
863 if (pending_deoptimization_env_ == nullptr) {
864 return nullptr;
865 }
866 return SlowPathEnvironmentFor(pending_deoptimization_env_, inst->locs(),
867 num_slow_path_args);
868 }
869 return SlowPathEnvironmentFor(inst->env(), inst->locs(),
870 num_slow_path_args);
871 }
Environment * SlowPathEnvironmentFor(Instruction *inst, intptr_t num_slow_path_args)
inst
Definition: malisc.py:37

◆ SpecialStatsBegin()

void dart::FlowGraphCompiler::SpecialStatsBegin ( intptr_t  tag)
inline

Definition at line 470 of file flow_graph_compiler.h.

470 {
471 if (stats_ != nullptr) stats_->SpecialBegin(tag);
472 }
void SpecialBegin(intptr_t tag)

◆ SpecialStatsEnd()

void dart::FlowGraphCompiler::SpecialStatsEnd ( intptr_t  tag)
inline

Definition at line 474 of file flow_graph_compiler.h.

474 {
475 if (stats_ != nullptr) stats_->SpecialEnd(tag);
476 }
void SpecialEnd(intptr_t tag)

◆ StackSize()

intptr_t dart::FlowGraphCompiler::StackSize ( ) const

Definition at line 816 of file flow_graph_compiler.cc.

816 {
817 if (is_optimizing_) {
818 return flow_graph_.graph_entry()->spill_slot_count();
819 } else {
820 return parsed_function_.num_stack_locals();
821 }
822}
int num_stack_locals() const
Definition: parser.h:194

◆ StatsBegin()

void dart::FlowGraphCompiler::StatsBegin ( Instruction instr)
inline

Definition at line 462 of file flow_graph_compiler.h.

462 {
463 if (stats_ != nullptr) stats_->Begin(instr);
464 }
void Begin(Instruction *instruction)

◆ StatsEnd()

void dart::FlowGraphCompiler::StatsEnd ( Instruction instr)
inline

Definition at line 466 of file flow_graph_compiler.h.

466 {
467 if (stats_ != nullptr) stats_->End(instr);
468 }
void End(Instruction *instruction)

◆ SupportsUnboxedSimd128()

static bool dart::FlowGraphCompiler::SupportsUnboxedSimd128 ( )
static

◆ thread()

Thread * dart::FlowGraphCompiler::thread ( ) const
inline

Definition at line 912 of file flow_graph_compiler.h.

912{ return thread_; }

◆ TryIntrinsify()

bool dart::FlowGraphCompiler::TryIntrinsify ( )

Definition at line 1432 of file flow_graph_compiler.cc.

1432 {
1433 if (TryIntrinsifyHelper()) {
1434 fully_intrinsified_ = true;
1435 return true;
1436 }
1437 return false;
1438}

◆ used_static_fields()

GrowableArray< const Field * > & dart::FlowGraphCompiler::used_static_fields ( )
inline

Definition at line 478 of file flow_graph_compiler.h.

478 {
479 return used_static_fields_;
480 }

◆ VisitBlocks()

void dart::FlowGraphCompiler::VisitBlocks ( )

Definition at line 670 of file flow_graph_compiler.cc.

670 {
671 CompactBlocks();
673 // The loop_info fields were cleared, recompute.
674 flow_graph().ComputeLoops();
675 }
676
677 // In precompiled mode, we require the function entry to come first (after the
678 // graph entry), since the polymorphic check is performed in the function
679 // entry (see Instructions::EntryPoint).
680 if (FLAG_precompiled_mode) {
681 ASSERT(block_order()[1] == flow_graph().graph_entry()->normal_entry());
682 }
683
684#if defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_ARM64)
685 const auto inner_lr_state = ComputeInnerLRState(flow_graph());
686#endif // defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_ARM64)
687
688#if defined(TARGET_ARCH_X64) || defined(TARGET_ARCH_ARM64)
689 const bool should_align_loops =
690 FLAG_align_all_loops || IsMarkedWithAlignLoops(function());
691#endif // defined(TARGET_ARCH_X64) || defined(TARGET_ARCH_ARM64)
692
693 for (intptr_t i = 0; i < block_order().length(); ++i) {
694 // Compile the block entry.
695 BlockEntryInstr* entry = block_order()[i];
696 assembler()->Comment("B%" Pd "", entry->block_id());
697 set_current_block(entry);
698
699 if (WasCompacted(entry)) {
700 continue;
701 }
702
703#if defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_ARM64)
704 // At the start of every non-entry block we expect return address either
705 // to be spilled into the frame or to be in the LR register.
706 if (entry->IsFunctionEntry() || entry->IsNativeEntry()) {
707 assembler()->set_lr_state(compiler::LRState::OnEntry());
708 } else {
709 assembler()->set_lr_state(inner_lr_state);
710 }
711#endif // defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_ARM64)
712
713#if defined(DEBUG)
714 if (!is_optimizing()) {
715 FrameStateClear();
716 }
717#endif
718
720 for (LoopInfo* l = entry->loop_info(); l != nullptr; l = l->outer()) {
721 assembler()->Comment(" Loop %" Pd "", l->id());
722 }
723 if (entry->IsLoopHeader()) {
724 assembler()->Comment(" Loop Header");
725 }
726 }
727
728#if defined(TARGET_ARCH_X64) || defined(TARGET_ARCH_ARM64)
729 if (should_align_loops && entry->IsLoopHeader() &&
733 }
734#else
735 static_assert(kPreferredLoopAlignment == 1);
736#endif // defined(TARGET_ARCH_X64) || defined(TARGET_ARCH_ARM64)
737
738 BeginCodeSourceRange(entry->source());
739 ASSERT(pending_deoptimization_env_ == nullptr);
740 pending_deoptimization_env_ = entry->env();
741 set_current_instruction(entry);
742 StatsBegin(entry);
743 entry->EmitNativeCode(this);
744 StatsEnd(entry);
745 set_current_instruction(nullptr);
746 pending_deoptimization_env_ = nullptr;
747 EndCodeSourceRange(entry->source());
748
749 if (skip_body_compilation()) {
750 ASSERT(entry == flow_graph().graph_entry()->normal_entry());
751 break;
752 }
753
754 // Compile all successors until an exit, branch, or a block entry.
755 for (ForwardInstructionIterator it(entry); !it.Done(); it.Advance()) {
756 Instruction* instr = it.Current();
757 set_current_instruction(instr);
758 StatsBegin(instr);
759 // Unoptimized code always stores boxed values on the expression stack.
760 // However, unboxed representation is allowed for instruction inputs and
761 // outputs of certain types (e.g. for doubles).
762 // Unboxed inputs/outputs are handled in the instruction prologue
763 // and epilogue, but flagged as a mismatch on the IL level.
765 !instr->HasUnmatchedInputRepresentations());
766
767 if (FLAG_code_comments || FLAG_disassemble ||
768 FLAG_disassemble_optimized) {
769 if (FLAG_source_lines) {
770 EmitSourceLine(instr);
771 }
772 EmitComment(instr);
773 }
774
775 BeginCodeSourceRange(instr->source());
776 EmitInstructionPrologue(instr);
777 ASSERT(pending_deoptimization_env_ == nullptr);
778 pending_deoptimization_env_ = instr->env();
779 DEBUG_ONLY(current_instruction_ = instr);
780 instr->EmitNativeCode(this);
781 DEBUG_ONLY(current_instruction_ = nullptr);
782 pending_deoptimization_env_ = nullptr;
783 if (IsPeephole(instr)) {
784 ASSERT(top_of_stack_ == nullptr);
785 top_of_stack_ = instr->AsDefinition();
786 } else {
787 EmitInstructionEpilogue(instr);
788 }
789 EndCodeSourceRange(instr->source());
790
791#if defined(DEBUG)
792 if (!is_optimizing()) {
793 FrameStateUpdateWith(instr);
794 }
795#endif
796 StatsEnd(instr);
797 set_current_instruction(nullptr);
798
799 if (auto indirect_goto = instr->AsIndirectGoto()) {
800 indirect_gotos_.Add(indirect_goto);
801 }
802 }
803
804#if defined(DEBUG)
805 ASSERT(is_optimizing() || FrameStateIsSafeToCall());
806#endif
807 }
808
809 set_current_block(nullptr);
810}
virtual Definition * AsDefinition()
Definition: il.h:2683
void StatsBegin(Instruction *instr)
void set_current_block(BlockEntryInstr *value)
bool WasCompacted(BlockEntryInstr *block_entry) const
void EmitComment(Instruction *instr)
const GrowableArray< BlockEntryInstr * > & block_order() const
void StatsEnd(Instruction *instr)
void EndCodeSourceRange(const InstructionSource &source)
void BeginCodeSourceRange(const InstructionSource &source)
void static bool EmittingComments()
void Align(intptr_t alignment, intptr_t offset)
void set_lr_state(compiler::LRState b)
const intptr_t kPreferredLoopAlignment
#define DEBUG_ONLY(code)
Definition: globals.h:141

◆ WasCompacted()

bool dart::FlowGraphCompiler::WasCompacted ( BlockEntryInstr block_entry) const

Definition at line 838 of file flow_graph_compiler.cc.

838 {
839 const intptr_t block_index = block_entry->postorder_number();
840 return block_info_[block_index]->WasCompacted();
841}

◆ zone()

Zone * dart::FlowGraphCompiler::zone ( ) const
inline

Definition at line 914 of file flow_graph_compiler.h.

914{ return zone_; }

Friends And Related Function Documentation

◆ BoxInt64Instr

friend class BoxInt64Instr
friend

Definition at line 954 of file flow_graph_compiler.h.

◆ CheckNullInstr

friend class CheckNullInstr
friend

Definition at line 955 of file flow_graph_compiler.h.

◆ CheckStackOverflowInstr

friend class CheckStackOverflowInstr
friend

Definition at line 957 of file flow_graph_compiler.h.

◆ CheckStackOverflowSlowPath

friend class CheckStackOverflowSlowPath
friend

Definition at line 960 of file flow_graph_compiler.h.

◆ GraphIntrinsicCodeGenScope

friend class GraphIntrinsicCodeGenScope
friend

Definition at line 961 of file flow_graph_compiler.h.

◆ NullErrorSlowPath

friend class NullErrorSlowPath
friend

Definition at line 956 of file flow_graph_compiler.h.

◆ StoreFieldInstr

friend class StoreFieldInstr
friend

Definition at line 959 of file flow_graph_compiler.h.

◆ StoreIndexedInstr

friend class StoreIndexedInstr
friend

Definition at line 958 of file flow_graph_compiler.h.

Member Data Documentation

◆ kMaxNumberOfCidRangesToTest

constexpr intptr_t dart::FlowGraphCompiler::kMaxNumberOfCidRangesToTest = 4
staticconstexpr

Definition at line 650 of file flow_graph_compiler.h.


The documentation for this class was generated from the following files: