Flutter Engine
The Flutter Engine
Loading...
Searching...
No Matches
Classes | Public Member Functions | Static Public Member Functions | Static Public Attributes | Friends | List of all members
dart::FlowGraphCompiler Class Reference

#include <flow_graph_compiler.h>

Inheritance diagram for dart::FlowGraphCompiler:
dart::ValueObject

Public Member Functions

 FlowGraphCompiler (compiler::Assembler *assembler, FlowGraph *flow_graph, const ParsedFunction &parsed_function, bool is_optimizing, SpeculativeInliningPolicy *speculative_policy, const GrowableArray< const Function * > &inline_id_to_function, const GrowableArray< TokenPosition > &inline_id_to_token_pos, const GrowableArray< intptr_t > &caller_inline_id, ZoneGrowableArray< const ICData * > *deopt_id_to_ic_data, CodeStatistics *stats=nullptr)
 
void ArchSpecificInitialization ()
 
 ~FlowGraphCompiler ()
 
compiler::Assemblerassembler () const
 
const ParsedFunctionparsed_function () const
 
const Functionfunction () const
 
const GrowableArray< BlockEntryInstr * > & block_order () const
 
const GrowableArray< const compiler::TableSelector * > & dispatch_table_call_targets () const
 
bool ForcedOptimization () const
 
const FlowGraphflow_graph () const
 
BlockEntryInstrcurrent_block () const
 
void set_current_block (BlockEntryInstr *value)
 
Instructioncurrent_instruction () const
 
bool CanOptimize () const
 
bool CanOptimizeFunction () const
 
bool CanOSRFunction () const
 
bool is_optimizing () const
 
void InsertBSSRelocation (BSS::Relocation reloc)
 
void LoadBSSEntry (BSS::Relocation relocation, Register dst, Register tmp)
 
bool skip_body_compilation () const
 
void EnterIntrinsicMode ()
 
void ExitIntrinsicMode ()
 
bool intrinsic_mode () const
 
void set_intrinsic_flow_graph (const FlowGraph &flow_graph)
 
void set_intrinsic_slow_path_label (compiler::Label *label)
 
compiler::Labelintrinsic_slow_path_label () const
 
bool ForceSlowPathForStackOverflow () const
 
const GrowableArray< BlockInfo * > & block_info () const
 
void StatsBegin (Instruction *instr)
 
void StatsEnd (Instruction *instr)
 
void SpecialStatsBegin (intptr_t tag)
 
void SpecialStatsEnd (intptr_t tag)
 
GrowableArray< const Field * > & used_static_fields ()
 
void InitCompiler ()
 
void CompileGraph ()
 
void EmitPrologue ()
 
void VisitBlocks ()
 
void EmitFunctionEntrySourcePositionDescriptorIfNeeded ()
 
void Bailout (const char *reason)
 
bool TryIntrinsify ()
 
void EmitMove (Location dst, Location src, TemporaryRegisterAllocator *temp)
 
void EmitNativeMove (const compiler::ffi::NativeLocation &dst, const compiler::ffi::NativeLocation &src, TemporaryRegisterAllocator *temp)
 
void EmitMoveToNative (const compiler::ffi::NativeLocation &dst, Location src_loc, Representation src_type, TemporaryRegisterAllocator *temp)
 
void EmitMoveFromNative (Location dst_loc, Representation dst_type, const compiler::ffi::NativeLocation &src, TemporaryRegisterAllocator *temp)
 
void EmitMoveConst (const compiler::ffi::NativeLocation &dst, Location src, Representation src_type, TemporaryRegisterAllocator *temp)
 
bool CheckAssertAssignableTypeTestingABILocations (const LocationSummary &locs)
 
void GenerateAssertAssignable (CompileType *receiver_type, const InstructionSource &source, intptr_t deopt_id, Environment *env, const String &dst_name, LocationSummary *locs)
 
void GenerateCallerChecksForAssertAssignable (CompileType *receiver_type, const AbstractType &dst_type, compiler::Label *done)
 
void GenerateTTSCall (const InstructionSource &source, intptr_t deopt_id, Environment *env, Register reg_with_type, const AbstractType &dst_type, const String &dst_name, LocationSummary *locs)
 
void GenerateStubCall (const InstructionSource &source, const Code &stub, UntaggedPcDescriptors::Kind kind, LocationSummary *locs, intptr_t deopt_id, Environment *env)
 
void GenerateNonLazyDeoptableStubCall (const InstructionSource &source, const Code &stub, UntaggedPcDescriptors::Kind kind, LocationSummary *locs, ObjectPool::SnapshotBehavior snapshot_behavior=compiler::ObjectPoolBuilderEntry::kSnapshotable)
 
void GeneratePatchableCall (const InstructionSource &source, const Code &stub, UntaggedPcDescriptors::Kind kind, LocationSummary *locs, ObjectPool::SnapshotBehavior snapshot_behavior=compiler::ObjectPoolBuilderEntry::kSnapshotable)
 
void GenerateDartCall (intptr_t deopt_id, const InstructionSource &source, const Code &stub, UntaggedPcDescriptors::Kind kind, LocationSummary *locs, Code::EntryKind entry_kind=Code::EntryKind::kNormal)
 
void GenerateStaticDartCall (intptr_t deopt_id, const InstructionSource &source, UntaggedPcDescriptors::Kind kind, LocationSummary *locs, const Function &target, Code::EntryKind entry_kind=Code::EntryKind::kNormal)
 
void GenerateInstanceOf (const InstructionSource &source, intptr_t deopt_id, Environment *env, const AbstractType &type, LocationSummary *locs)
 
void GenerateInstanceCall (intptr_t deopt_id, const InstructionSource &source, LocationSummary *locs, const ICData &ic_data, Code::EntryKind entry_kind, bool receiver_can_be_smi)
 
void GenerateStaticCall (intptr_t deopt_id, const InstructionSource &source, const Function &function, ArgumentsInfo args_info, LocationSummary *locs, const ICData &ic_data_in, ICData::RebindRule rebind_rule, Code::EntryKind entry_kind=Code::EntryKind::kNormal)
 
void GenerateNumberTypeCheck (Register kClassIdReg, const AbstractType &type, compiler::Label *is_instance_lbl, compiler::Label *is_not_instance_lbl)
 
void GenerateStringTypeCheck (Register kClassIdReg, compiler::Label *is_instance_lbl, compiler::Label *is_not_instance_lbl)
 
void GenerateListTypeCheck (Register kClassIdReg, compiler::Label *is_instance_lbl)
 
bool GenerateSubtypeRangeCheck (Register class_id_reg, const Class &type_class, compiler::Label *is_subtype_lbl)
 
void EmitOptimizedInstanceCall (const Code &stub, const ICData &ic_data, intptr_t deopt_id, const InstructionSource &source, LocationSummary *locs, Code::EntryKind entry_kind=Code::EntryKind::kNormal)
 
void EmitInstanceCallJIT (const Code &stub, const ICData &ic_data, intptr_t deopt_id, const InstructionSource &source, LocationSummary *locs, Code::EntryKind entry_kind)
 
void EmitPolymorphicInstanceCall (const PolymorphicInstanceCallInstr *call, const CallTargets &targets, ArgumentsInfo args_info, intptr_t deopt_id, const InstructionSource &source, LocationSummary *locs, bool complete, intptr_t total_call_count, bool receiver_can_be_smi=true)
 
void EmitMegamorphicInstanceCall (const ICData &icdata, intptr_t deopt_id, const InstructionSource &source, LocationSummary *locs)
 
void EmitMegamorphicInstanceCall (const String &function_name, const Array &arguments_descriptor, intptr_t deopt_id, const InstructionSource &source, LocationSummary *locs)
 
void EmitInstanceCallAOT (const ICData &ic_data, intptr_t deopt_id, const InstructionSource &source, LocationSummary *locs, Code::EntryKind entry_kind=Code::EntryKind::kNormal, bool receiver_can_be_smi=true)
 
void EmitTestAndCall (const CallTargets &targets, const String &function_name, ArgumentsInfo args_info, compiler::Label *failed, compiler::Label *match_found, intptr_t deopt_id, const InstructionSource &source_index, LocationSummary *locs, bool complete, intptr_t total_ic_calls, Code::EntryKind entry_kind=Code::EntryKind::kNormal)
 
void EmitDispatchTableCall (int32_t selector_offset, const Array &arguments_descriptor)
 
Condition EmitEqualityRegConstCompare (Register reg, const Object &obj, bool needs_number_check, const InstructionSource &source, intptr_t deopt_id)
 
Condition EmitEqualityRegRegCompare (Register left, Register right, bool needs_number_check, const InstructionSource &source, intptr_t deopt_id)
 
Condition EmitBoolTest (Register value, BranchLabels labels, bool invert)
 
bool NeedsEdgeCounter (BlockEntryInstr *block)
 
void EmitEdgeCounter (intptr_t edge_id)
 
void RecordCatchEntryMoves (Environment *env)
 
void EmitCallToStub (const Code &stub, ObjectPool::SnapshotBehavior snapshot_behavior=compiler::ObjectPoolBuilderEntry::kSnapshotable)
 
void EmitJumpToStub (const Code &stub)
 
void EmitTailCallToStub (const Code &stub)
 
void EmitDropArguments (intptr_t count)
 
void EmitCallsiteMetadata (const InstructionSource &source, intptr_t deopt_id, UntaggedPcDescriptors::Kind kind, LocationSummary *locs, Environment *env)
 
void EmitYieldPositionMetadata (const InstructionSource &source, intptr_t yield_index)
 
void EmitComment (Instruction *instr)
 
intptr_t StackSize () const
 
intptr_t ExtraStackSlotsOnOsrEntry () const
 
compiler::LabelGetJumpLabel (BlockEntryInstr *block_entry) const
 
bool WasCompacted (BlockEntryInstr *block_entry) const
 
compiler::LabelNextNonEmptyLabel () const
 
bool CanFallThroughTo (BlockEntryInstr *block_entry) const
 
BranchLabels CreateBranchLabels (BranchInstr *branch) const
 
void AddExceptionHandler (CatchBlockEntryInstr *entry)
 
void SetNeedsStackTrace (intptr_t try_index)
 
void AddCurrentDescriptor (UntaggedPcDescriptors::Kind kind, intptr_t deopt_id, const InstructionSource &source)
 
void AddDescriptor (UntaggedPcDescriptors::Kind kind, intptr_t pc_offset, intptr_t deopt_id, const InstructionSource &source, intptr_t try_index, intptr_t yield_index=UntaggedPcDescriptors::kInvalidYieldIndex)
 
void AddNullCheck (const InstructionSource &source, const String &name)
 
void RecordSafepoint (LocationSummary *locs, intptr_t slow_path_argument_count=0)
 
compiler::LabelAddDeoptStub (intptr_t deopt_id, ICData::DeoptReasonId reason, uint32_t flags=0)
 
CompilerDeoptInfoAddDeoptIndexAtCall (intptr_t deopt_id, Environment *env)
 
CompilerDeoptInfoAddSlowPathDeoptInfo (intptr_t deopt_id, Environment *env)
 
void AddSlowPathCode (SlowPathCode *slow_path)
 
void FinalizeExceptionHandlers (const Code &code)
 
void FinalizePcDescriptors (const Code &code)
 
ArrayPtr CreateDeoptInfo (compiler::Assembler *assembler)
 
void FinalizeStackMaps (const Code &code)
 
void FinalizeVarDescriptors (const Code &code)
 
void FinalizeCatchEntryMovesMap (const Code &code)
 
void FinalizeStaticCallTargetsTable (const Code &code)
 
void FinalizeCodeSourceMap (const Code &code)
 
const Classdouble_class () const
 
const Classmint_class () const
 
const Classfloat32x4_class () const
 
const Classfloat64x2_class () const
 
const Classint32x4_class () const
 
const ClassBoxClassFor (Representation rep)
 
void SaveLiveRegisters (LocationSummary *locs)
 
void RestoreLiveRegisters (LocationSummary *locs)
 
EnvironmentSlowPathEnvironmentFor (Instruction *inst, intptr_t num_slow_path_args)
 
EnvironmentSlowPathEnvironmentFor (Environment *env, LocationSummary *locs, intptr_t num_slow_path_args)
 
intptr_t CurrentTryIndex () const
 
bool may_reoptimize () const
 
const ICDataGetOrAddInstanceCallICData (intptr_t deopt_id, const String &target_name, const Array &arguments_descriptor, intptr_t num_args_tested, const AbstractType &receiver_type, const Function &binary_smi_target)
 
const ICDataGetOrAddStaticCallICData (intptr_t deopt_id, const Function &target, const Array &arguments_descriptor, intptr_t num_args_tested, ICData::RebindRule rebind_rule)
 
const ZoneGrowableArray< const ICData * > & deopt_id_to_ic_data () const
 
Threadthread () const
 
IsolateGroupisolate_group () const
 
Zonezone () const
 
void AddStubCallTarget (const Code &code)
 
void AddDispatchTableCallTarget (const compiler::TableSelector *selector)
 
ArrayPtr edge_counters_array () const
 
ArrayPtr InliningIdToFunction () const
 
void BeginCodeSourceRange (const InstructionSource &source)
 
void EndCodeSourceRange (const InstructionSource &source)
 
bool IsEmptyBlock (BlockEntryInstr *block) const
 
void EmitOptimizedStaticCall (const Function &function, const Array &arguments_descriptor, intptr_t size_with_type_args, intptr_t deopt_id, const InstructionSource &source, LocationSummary *locs, Code::EntryKind entry_kind=Code::EntryKind::kNormal)
 
- Public Member Functions inherited from dart::ValueObject
 ValueObject ()
 
 ~ValueObject ()
 

Static Public Member Functions

static bool SupportsUnboxedDoubles ()
 
static bool SupportsUnboxedSimd128 ()
 
static bool CanConvertInt64ToDouble ()
 
static void GenerateIndirectTTSCall (compiler::Assembler *assembler, Register reg_with_type, intptr_t sub_type_cache_index)
 
static bool GenerateCidRangesCheck (compiler::Assembler *assembler, Register class_id_reg, const CidRangeVector &cid_ranges, compiler::Label *inside_range_lbl, compiler::Label *outside_range_lbl=nullptr, bool fall_through_if_inside=false)
 
static const CallTargetsResolveCallTargetsForReceiverCid (intptr_t cid, const String &selector, const Array &args_desc_array)
 
static bool LookupMethodFor (int class_id, const String &name, const ArgumentsDescriptor &args_desc, Function *fn_return, bool *class_is_abstract_return=nullptr)
 
static int EmitTestAndCallCheckCid (compiler::Assembler *assembler, compiler::Label *label, Register class_id_reg, const CidRangeValue &range, int bias, bool jump_on_miss=true)
 

Static Public Attributes

static constexpr intptr_t kMaxNumberOfCidRangesToTest = 4
 

Friends

class BoxInt64Instr
 
class CheckNullInstr
 
class NullErrorSlowPath
 
class CheckStackOverflowInstr
 
class StoreIndexedInstr
 
class StoreFieldInstr
 
class CheckStackOverflowSlowPath
 
class GraphIntrinsicCodeGenScope
 

Detailed Description

Definition at line 338 of file flow_graph_compiler.h.

Constructor & Destructor Documentation

◆ FlowGraphCompiler()

dart::FlowGraphCompiler::FlowGraphCompiler ( compiler::Assembler assembler,
FlowGraph flow_graph,
const ParsedFunction parsed_function,
bool  is_optimizing,
SpeculativeInliningPolicy speculative_policy,
const GrowableArray< const Function * > &  inline_id_to_function,
const GrowableArray< TokenPosition > &  inline_id_to_token_pos,
const GrowableArray< intptr_t > &  caller_inline_id,
ZoneGrowableArray< const ICData * > *  deopt_id_to_ic_data,
CodeStatistics stats = nullptr 
)

Definition at line 135 of file flow_graph_compiler.cc.

146 : thread_(Thread::Current()),
147 zone_(Thread::Current()->zone()),
148 assembler_(assembler),
149 parsed_function_(parsed_function),
150 flow_graph_(*flow_graph),
151 block_order_(*flow_graph->CodegenBlockOrder()),
152 current_block_(nullptr),
153 exception_handlers_list_(nullptr),
154 pc_descriptors_list_(nullptr),
155 compressed_stackmaps_builder_(nullptr),
156 code_source_map_builder_(nullptr),
157 catch_entry_moves_maps_builder_(nullptr),
158 block_info_(block_order_.length()),
159 deopt_infos_(),
160 static_calls_target_table_(),
161 indirect_gotos_(),
162 is_optimizing_(is_optimizing),
163 speculative_policy_(speculative_policy),
164 may_reoptimize_(false),
165 intrinsic_mode_(false),
166 stats_(stats),
167 double_class_(
168 Class::ZoneHandle(isolate_group()->object_store()->double_class())),
169 mint_class_(
170 Class::ZoneHandle(isolate_group()->object_store()->mint_class())),
171 float32x4_class_(Class::ZoneHandle(
172 isolate_group()->object_store()->float32x4_class())),
173 float64x2_class_(Class::ZoneHandle(
174 isolate_group()->object_store()->float64x2_class())),
175 int32x4_class_(
176 Class::ZoneHandle(isolate_group()->object_store()->int32x4_class())),
178 .LookupClass(Symbols::List()))),
179 pending_deoptimization_env_(nullptr),
180 deopt_id_to_ic_data_(deopt_id_to_ic_data),
181 edge_counters_array_(Array::ZoneHandle()) {
184 if (is_optimizing) {
185 // No need to collect extra ICData objects created during compilation.
186 deopt_id_to_ic_data_ = nullptr;
187 } else {
188 const intptr_t len = thread()->compiler_state().deopt_id();
189 deopt_id_to_ic_data_->EnsureLength(len, nullptr);
190 }
191 ASSERT(assembler != nullptr);
192 ASSERT(!list_class_.IsNull());
193
194#if defined(PRODUCT)
195 const bool stack_traces_only = true;
196#else
197 const bool stack_traces_only = false;
198#endif
199 // Make sure that the function is at the position for inline_id 0.
200 ASSERT(inline_id_to_function.length() >= 1);
201 ASSERT(inline_id_to_function[0]->ptr() ==
203 code_source_map_builder_ = new (zone_)
204 CodeSourceMapBuilder(zone_, stack_traces_only, caller_inline_id,
205 inline_id_to_token_pos, inline_id_to_function);
206
208}
intptr_t deopt_id() const
IsolateGroup * isolate_group() const
const Class & float64x2_class() const
const Class & double_class() const
const FlowGraph & flow_graph() const
const ParsedFunction & parsed_function() const
const Class & float32x4_class() const
const Class & mint_class() const
const Class & int32x4_class() const
const ZoneGrowableArray< const ICData * > & deopt_id_to_ic_data() const
compiler::Assembler * assembler() const
const ParsedFunction & parsed_function() const
Definition flow_graph.h:129
GrowableArray< BlockEntryInstr * > * CodegenBlockOrder()
static LibraryPtr CoreLibrary()
Definition object.cc:14834
ObjectPtr ptr() const
Definition object.h:332
bool IsNull() const
Definition object.h:363
static Object & Handle()
Definition object.h:407
static Object & ZoneHandle()
Definition object.h:419
const Function & function() const
Definition parser.h:73
static Thread * Current()
Definition thread.h:361
CompilerState & compiler_state()
Definition thread.h:583
#define ASSERT(E)

◆ ~FlowGraphCompiler()

dart::FlowGraphCompiler::~FlowGraphCompiler ( )

Member Function Documentation

◆ AddCurrentDescriptor()

void dart::FlowGraphCompiler::AddCurrentDescriptor ( UntaggedPcDescriptors::Kind  kind,
intptr_t  deopt_id,
const InstructionSource source 
)

Definition at line 934 of file flow_graph_compiler.cc.

936 {
937 AddDescriptor(kind, assembler()->CodeSize(), deopt_id, source,
939}
void AddDescriptor(UntaggedPcDescriptors::Kind kind, intptr_t pc_offset, intptr_t deopt_id, const InstructionSource &source, intptr_t try_index, intptr_t yield_index=UntaggedPcDescriptors::kInvalidYieldIndex)
SkBitmap source
Definition examples.cpp:28

◆ AddDeoptIndexAtCall()

CompilerDeoptInfo * dart::FlowGraphCompiler::AddDeoptIndexAtCall ( intptr_t  deopt_id,
Environment env 
)

Definition at line 1015 of file flow_graph_compiler.cc.

1016 {
1019 ASSERT(!FLAG_precompiled_mode);
1020 if (env != nullptr) {
1021 env = env->GetLazyDeoptEnv(zone());
1022 }
1023 CompilerDeoptInfo* info =
1024 new (zone()) CompilerDeoptInfo(deopt_id, ICData::kDeoptAtCall,
1025 0, // No flags.
1026 env);
1027 info->set_pc_offset(assembler()->CodeSize());
1028 deopt_infos_.Add(info);
1029 return info;
1030}
static void info(const char *fmt,...) SK_PRINTF_LIKE(1
Definition DM.cpp:213
Definition __init__.py:1

◆ AddDeoptStub()

compiler::Label * dart::FlowGraphCompiler::AddDeoptStub ( intptr_t  deopt_id,
ICData::DeoptReasonId  reason,
uint32_t  flags = 0 
)

Definition at line 1248 of file flow_graph_compiler.cc.

1250 {
1251 if (intrinsic_mode()) {
1252 return intrinsic_slow_path_label_;
1253 }
1254
1255 // No deoptimization allowed when 'FLAG_precompiled_mode' is set.
1256 if (FLAG_precompiled_mode) {
1257 if (FLAG_trace_compiler) {
1258 THR_Print(
1259 "Retrying compilation %s, suppressing inlining of deopt_id:%" Pd "\n",
1260 parsed_function_.function().ToFullyQualifiedCString(), deopt_id);
1261 }
1262 ASSERT(speculative_policy_->AllowsSpeculativeInlining());
1263 ASSERT(deopt_id != 0); // longjmp must return non-zero value.
1265 deopt_id, Object::speculative_inlining_error());
1266 }
1267
1268 ASSERT(is_optimizing_);
1269 ASSERT(pending_deoptimization_env_ != nullptr);
1270 if (pending_deoptimization_env_->IsHoisted()) {
1272 }
1273 CompilerDeoptInfoWithStub* stub = new (zone()) CompilerDeoptInfoWithStub(
1274 deopt_id, reason, flags, pending_deoptimization_env_);
1275 deopt_infos_.Add(stub);
1276 return stub->entry_label();
1277}
bool IsHoisted() const
Definition il.h:11632
const char * ToFullyQualifiedCString() const
Definition object.cc:9820
DART_NORETURN void Jump(int value, const Error &error)
Definition longjump.cc:22
bool AllowsSpeculativeInlining() const
Definition inliner.h:39
LongJumpScope * long_jump_base() const
#define THR_Print(format,...)
Definition log.h:20
FlutterSemanticsFlag flags
#define Pd
Definition globals.h:408

◆ AddDescriptor()

void dart::FlowGraphCompiler::AddDescriptor ( UntaggedPcDescriptors::Kind  kind,
intptr_t  pc_offset,
intptr_t  deopt_id,
const InstructionSource source,
intptr_t  try_index,
intptr_t  yield_index = UntaggedPcDescriptors::kInvalidYieldIndex 
)

Definition at line 917 of file flow_graph_compiler.cc.

922 {
923 code_source_map_builder_->NoteDescriptor(kind, pc_offset, source);
924 // Don't emit deopt-descriptors in AOT mode.
925 if (FLAG_precompiled_mode && (kind == UntaggedPcDescriptors::kDeopt)) return;
926 // Use the token position of the original call in the root function if source
927 // has an inlining id.
928 const auto& root_pos = code_source_map_builder_->RootPosition(source);
929 pc_descriptors_list_->AddDescriptor(kind, pc_offset, deopt_id, root_pos,
930 try_index, yield_index);
931}
TokenPosition RootPosition(const InstructionSource &source)
void NoteDescriptor(UntaggedPcDescriptors::Kind kind, int32_t pc_offset, const InstructionSource &source)
void AddDescriptor(UntaggedPcDescriptors::Kind kind, intptr_t pc_offset, intptr_t deopt_id, TokenPosition token_pos, intptr_t try_index, intptr_t yield_index)

◆ AddDispatchTableCallTarget()

void dart::FlowGraphCompiler::AddDispatchTableCallTarget ( const compiler::TableSelector selector)

Definition at line 1010 of file flow_graph_compiler.cc.

1011 {
1012 dispatch_table_call_targets_.Add(selector);
1013}

◆ AddExceptionHandler()

void dart::FlowGraphCompiler::AddExceptionHandler ( CatchBlockEntryInstr entry)

Definition at line 903 of file flow_graph_compiler.cc.

903 {
904 exception_handlers_list_->AddHandler(
905 entry->catch_try_index(), entry->try_index(), assembler()->CodeSize(),
906 entry->is_generated(), entry->catch_handler_types(),
907 entry->needs_stacktrace());
908 if (is_optimizing()) {
909 RecordSafepoint(entry->locs());
910 }
911}
void AddHandler(intptr_t try_index, intptr_t outer_try_index, intptr_t pc_offset, bool is_generated, const Array &handler_types, bool needs_stacktrace)
void RecordSafepoint(LocationSummary *locs, intptr_t slow_path_argument_count=0)

◆ AddNullCheck()

void dart::FlowGraphCompiler::AddNullCheck ( const InstructionSource source,
const String name 
)

Definition at line 941 of file flow_graph_compiler.cc.

942 {
943#if defined(DART_PRECOMPILER)
944 // If we are generating an AOT snapshot and have DWARF stack traces enabled,
945 // the AOT runtime is unable to obtain the pool index at runtime. Therefore,
946 // there is no reason to put the name into the pool in the first place.
947 // TODO(dartbug.com/40605): Move this info to the pc descriptors.
948 if (FLAG_precompiled_mode && FLAG_dwarf_stack_traces_mode) return;
949#endif
950 const intptr_t name_index =
952 code_source_map_builder_->NoteNullCheck(assembler()->CodeSize(), source,
953 name_index);
954}
void NoteNullCheck(int32_t pc_offset, const InstructionSource &source, intptr_t name_index)
ObjectPoolBuilder & object_pool_builder()
intptr_t FindObject(const Object &obj, ObjectPoolBuilderEntry::Patchability patchable=ObjectPoolBuilderEntry::kNotPatchable, ObjectPoolBuilderEntry::SnapshotBehavior snapshot_behavior=ObjectPoolBuilderEntry::kSnapshotable)
const char *const name

◆ AddSlowPathCode()

void dart::FlowGraphCompiler::AddSlowPathCode ( SlowPathCode slow_path)

Definition at line 860 of file flow_graph_compiler.cc.

860 {
861 slow_path_code_.Add(code);
862}

◆ AddSlowPathDeoptInfo()

CompilerDeoptInfo * dart::FlowGraphCompiler::AddSlowPathDeoptInfo ( intptr_t  deopt_id,
Environment env 
)

Definition at line 1032 of file flow_graph_compiler.cc.

1033 {
1034 ASSERT(deopt_id != DeoptId::kNone);
1035 deopt_id = DeoptId::ToDeoptAfter(deopt_id);
1036 CompilerDeoptInfo* info =
1037 new (zone()) CompilerDeoptInfo(deopt_id, ICData::kDeoptUnknown, 0, env);
1038 info->set_pc_offset(assembler()->CodeSize());
1039 deopt_infos_.Add(info);
1040 return info;
1041}
static constexpr intptr_t kNone
Definition deopt_id.h:27
static intptr_t ToDeoptAfter(intptr_t deopt_id)
Definition deopt_id.h:31

◆ AddStubCallTarget()

void dart::FlowGraphCompiler::AddStubCallTarget ( const Code code)

Definition at line 1003 of file flow_graph_compiler.cc.

1003 {
1004 DEBUG_ASSERT(code.IsNotTemporaryScopedHandle());
1005 static_calls_target_table_.Add(new (zone()) StaticCallsStruct(
1006 Code::kCallViaCode, Code::kDefaultEntry, assembler()->CodeSize(), nullptr,
1007 &code, nullptr));
1008}
#define DEBUG_ASSERT(cond)
Definition assert.h:321
void Add(const T &value)
@ kCallViaCode
Definition object.h:6945
@ kDefaultEntry
Definition object.h:6949

◆ ArchSpecificInitialization()

void dart::FlowGraphCompiler::ArchSpecificInitialization ( )

◆ assembler()

compiler::Assembler * dart::FlowGraphCompiler::assembler ( ) const
inline

Definition at line 400 of file flow_graph_compiler.h.

400{ return assembler_; }

◆ Bailout()

void dart::FlowGraphCompiler::Bailout ( const char *  reason)

Definition at line 812 of file flow_graph_compiler.cc.

812 {
813 parsed_function_.Bailout("FlowGraphCompiler", reason);
814}
void Bailout(const char *origin, const char *reason) const
Definition parser.cc:118

◆ BeginCodeSourceRange()

void dart::FlowGraphCompiler::BeginCodeSourceRange ( const InstructionSource source)

Definition at line 1994 of file flow_graph_compiler.cc.

1994 {
1995 code_source_map_builder_->BeginCodeSourceRange(assembler()->CodeSize(),
1996 source);
1997}
void BeginCodeSourceRange(int32_t pc_offset, const InstructionSource &source)

◆ block_info()

const GrowableArray< BlockInfo * > & dart::FlowGraphCompiler::block_info ( ) const
inline

Definition at line 461 of file flow_graph_compiler.h.

461{ return block_info_; }

◆ block_order()

const GrowableArray< BlockEntryInstr * > & dart::FlowGraphCompiler::block_order ( ) const
inline

Definition at line 403 of file flow_graph_compiler.h.

403 {
404 return block_order_;
405 }

◆ BoxClassFor()

const Class & dart::FlowGraphCompiler::BoxClassFor ( Representation  rep)

Definition at line 1975 of file flow_graph_compiler.cc.

1975 {
1976 switch (rep) {
1977 case kUnboxedFloat:
1978 case kUnboxedDouble:
1979 return double_class();
1980 case kUnboxedFloat32x4:
1981 return float32x4_class();
1982 case kUnboxedFloat64x2:
1983 return float64x2_class();
1984 case kUnboxedInt32x4:
1985 return int32x4_class();
1986 case kUnboxedInt64:
1987 return mint_class();
1988 default:
1989 UNREACHABLE();
1990 return Class::ZoneHandle();
1991 }
1992}
#define UNREACHABLE()
Definition assert.h:248

◆ CanConvertInt64ToDouble()

static bool dart::FlowGraphCompiler::CanConvertInt64ToDouble ( )
static

◆ CanFallThroughTo()

bool dart::FlowGraphCompiler::CanFallThroughTo ( BlockEntryInstr block_entry) const

Definition at line 848 of file flow_graph_compiler.cc.

848 {
849 return NextNonEmptyLabel() == GetJumpLabel(block_entry);
850}
compiler::Label * GetJumpLabel(BlockEntryInstr *block_entry) const
compiler::Label * NextNonEmptyLabel() const

◆ CanOptimize()

bool dart::FlowGraphCompiler::CanOptimize ( ) const

Definition at line 256 of file flow_graph_compiler.cc.

256 {
258}
intptr_t optimization_counter_threshold() const
Definition isolate.h:305
IsolateGroup * isolate_group() const
Definition thread.h:540

◆ CanOptimizeFunction()

bool dart::FlowGraphCompiler::CanOptimizeFunction ( ) const

Definition at line 260 of file flow_graph_compiler.cc.

260 {
262}
bool HasBreakpoint() const
Definition object.cc:7948

◆ CanOSRFunction()

bool dart::FlowGraphCompiler::CanOSRFunction ( ) const

Definition at line 264 of file flow_graph_compiler.cc.

264 {
265 return isolate_group()->use_osr() && CanOptimizeFunction() &&
266 !is_optimizing();
267}

◆ CheckAssertAssignableTypeTestingABILocations()

bool dart::FlowGraphCompiler::CheckAssertAssignableTypeTestingABILocations ( const LocationSummary locs)

Definition at line 2311 of file flow_graph_compiler.cc.

2312 {
2313 ASSERT(locs.in(AssertAssignableInstr::kInstancePos).IsRegister() &&
2314 locs.in(AssertAssignableInstr::kInstancePos).reg() ==
2316 ASSERT((locs.in(AssertAssignableInstr::kDstTypePos).IsConstant() &&
2318 .constant()
2319 .IsAbstractType()) ||
2320 (locs.in(AssertAssignableInstr::kDstTypePos).IsRegister() &&
2321 locs.in(AssertAssignableInstr::kDstTypePos).reg() ==
2323 ASSERT(locs.in(AssertAssignableInstr::kInstantiatorTAVPos).IsRegister() &&
2326 ASSERT(locs.in(AssertAssignableInstr::kFunctionTAVPos).IsRegister() &&
2329 ASSERT(locs.out(0).IsRegister() &&
2330 locs.out(0).reg() == TypeTestABI::kInstanceReg);
2331 return true;
2332}
static constexpr Register kDstTypeReg
static constexpr Register kInstanceReg
static constexpr Register kFunctionTypeArgumentsReg
static constexpr Register kInstantiatorTypeArgumentsReg

◆ CompileGraph()

void dart::FlowGraphCompiler::CompileGraph ( )

Definition at line 628 of file flow_graph_compiler.cc.

628 {
629 InitCompiler();
630
631#if !defined(TARGET_ARCH_IA32)
632 // For JIT we have multiple entrypoints functionality which moved the frame
633 // setup into the [TargetEntryInstr] (which will set the constant pool
634 // allowed bit to true). Despite this we still have to set the
635 // constant pool allowed bit to true here as well, because we can generate
636 // code for [CatchEntryInstr]s, which need the pool.
638#endif
639
641 VisitBlocks();
642
643#if defined(DEBUG)
645#endif
646
647 if (!skip_body_compilation()) {
648#if !defined(TARGET_ARCH_IA32)
649 ASSERT(assembler()->constant_pool_allowed());
650#endif
651 GenerateDeferredCode();
652 }
653
654 for (intptr_t i = 0; i < indirect_gotos_.length(); ++i) {
655 indirect_gotos_[i]->ComputeOffsetTable(this);
656 }
657}
void EmitFunctionEntrySourcePositionDescriptorIfNeeded()
void set_constant_pool_allowed(bool b)

◆ CreateBranchLabels()

BranchLabels dart::FlowGraphCompiler::CreateBranchLabels ( BranchInstr branch) const

Definition at line 852 of file flow_graph_compiler.cc.

852 {
853 compiler::Label* true_label = GetJumpLabel(branch->true_successor());
854 compiler::Label* false_label = GetJumpLabel(branch->false_successor());
855 compiler::Label* fall_through = NextNonEmptyLabel();
856 BranchLabels result = {true_label, false_label, fall_through};
857 return result;
858}
GAsyncResult * result

◆ CreateDeoptInfo()

ArrayPtr dart::FlowGraphCompiler::CreateDeoptInfo ( compiler::Assembler assembler)

Definition at line 1294 of file flow_graph_compiler.cc.

1294 {
1295 // No deopt information if we precompile (no deoptimization allowed).
1296 if (FLAG_precompiled_mode) {
1297 return Array::empty_array().ptr();
1298 }
1299 // For functions with optional arguments, all incoming arguments are copied
1300 // to spill slots. The deoptimization environment does not track them.
1301 const Function& function = parsed_function().function();
1302 const intptr_t incoming_arg_count =
1304 DeoptInfoBuilder builder(zone(), incoming_arg_count, assembler);
1305
1306 intptr_t deopt_info_table_size = DeoptTable::SizeFor(deopt_infos_.length());
1307 if (deopt_info_table_size == 0) {
1308 return Object::empty_array().ptr();
1309 } else {
1310 const Array& array =
1311 Array::Handle(Array::New(deopt_info_table_size, Heap::kOld));
1312 Smi& offset = Smi::Handle();
1313 TypedData& info = TypedData::Handle();
1314 Smi& reason_and_flags = Smi::Handle();
1315 for (intptr_t i = 0; i < deopt_infos_.length(); i++) {
1316 offset = Smi::New(deopt_infos_[i]->pc_offset());
1317 info = deopt_infos_[i]->CreateDeoptInfo(this, &builder, array);
1318 reason_and_flags = DeoptTable::EncodeReasonAndFlags(
1319 deopt_infos_[i]->reason(), deopt_infos_[i]->flags());
1320 DeoptTable::SetEntry(array, i, offset, info, reason_and_flags);
1321 }
1322 return array.ptr();
1323 }
1324}
static ArrayPtr New(intptr_t len, Heap::Space space=Heap::kNew)
Definition object.h:10933
static SmiPtr EncodeReasonAndFlags(ICData::DeoptReasonId reason, uint32_t flags)
static intptr_t SizeFor(intptr_t length)
static void SetEntry(const Array &table, intptr_t index, const Smi &offset, const TypedData &info, const Smi &reason_and_flags)
const Function & function() const
bool MakesCopyOfParameters() const
Definition object.h:3494
intptr_t num_fixed_parameters() const
Definition object.cc:8914
@ kOld
Definition heap.h:39
static SmiPtr New(intptr_t value)
Definition object.h:9985
Point offset

◆ current_block()

BlockEntryInstr * dart::FlowGraphCompiler::current_block ( ) const
inline

Definition at line 420 of file flow_graph_compiler.h.

420{ return current_block_; }

◆ current_instruction()

Instruction * dart::FlowGraphCompiler::current_instruction ( ) const
inline

Definition at line 423 of file flow_graph_compiler.h.

423{ return current_instruction_; }

◆ CurrentTryIndex()

intptr_t dart::FlowGraphCompiler::CurrentTryIndex ( ) const
inline

Definition at line 878 of file flow_graph_compiler.h.

878 {
879 if (current_block_ == nullptr) {
880 return kInvalidTryIndex;
881 }
882 return current_block_->try_index();
883 }
intptr_t try_index() const
Definition il.h:1724
static constexpr intptr_t kInvalidTryIndex

◆ deopt_id_to_ic_data()

const ZoneGrowableArray< const ICData * > & dart::FlowGraphCompiler::deopt_id_to_ic_data ( ) const
inline

Definition at line 909 of file flow_graph_compiler.h.

909 {
910 return *deopt_id_to_ic_data_;
911 }

◆ dispatch_table_call_targets()

const GrowableArray< const compiler::TableSelector * > & dart::FlowGraphCompiler::dispatch_table_call_targets ( ) const
inline

Definition at line 407 of file flow_graph_compiler.h.

407 {
408 return dispatch_table_call_targets_;
409 }

◆ double_class()

const Class & dart::FlowGraphCompiler::double_class ( ) const
inline

Definition at line 845 of file flow_graph_compiler.h.

845{ return double_class_; }

◆ edge_counters_array()

ArrayPtr dart::FlowGraphCompiler::edge_counters_array ( ) const
inline

Definition at line 920 of file flow_graph_compiler.h.

920{ return edge_counters_array_.ptr(); }

◆ EmitBoolTest()

Condition dart::FlowGraphCompiler::EmitBoolTest ( Register  value,
BranchLabels  labels,
bool  invert 
)

◆ EmitCallsiteMetadata()

void dart::FlowGraphCompiler::EmitCallsiteMetadata ( const InstructionSource source,
intptr_t  deopt_id,
UntaggedPcDescriptors::Kind  kind,
LocationSummary locs,
Environment env 
)

Definition at line 469 of file flow_graph_compiler.cc.

473 {
474 AddCurrentDescriptor(kind, deopt_id, source);
475 RecordSafepoint(locs);
477 if ((deopt_id != DeoptId::kNone) && !FLAG_precompiled_mode) {
478 // Marks either the continuation point in unoptimized code or the
479 // deoptimization point in optimized code, after call.
480 if (env != nullptr) {
481 // Note that we may lazy-deopt to the same IR instruction in unoptimized
482 // code or to another IR instruction (e.g. if LICM hoisted an instruction
483 // it will lazy-deopt to a Goto).
484 // If we happen to deopt to the beginning of an instruction in unoptimized
485 // code, we'll use the before deopt-id, otherwise the after deopt-id.
486 const intptr_t dest_deopt_id = env->LazyDeoptToBeforeDeoptId()
487 ? deopt_id
488 : DeoptId::ToDeoptAfter(deopt_id);
489 AddDeoptIndexAtCall(dest_deopt_id, env);
490 } else {
491 const intptr_t deopt_id_after = DeoptId::ToDeoptAfter(deopt_id);
492 // Add deoptimization continuation point after the call and before the
493 // arguments are removed.
494 AddCurrentDescriptor(UntaggedPcDescriptors::kDeopt, deopt_id_after,
495 source);
496 }
497 }
498}
CompilerDeoptInfo * AddDeoptIndexAtCall(intptr_t deopt_id, Environment *env)
void RecordCatchEntryMoves(Environment *env)
void AddCurrentDescriptor(UntaggedPcDescriptors::Kind kind, intptr_t deopt_id, const InstructionSource &source)

◆ EmitCallToStub()

void dart::FlowGraphCompiler::EmitCallToStub ( const Code stub,
ObjectPool::SnapshotBehavior  snapshot_behavior = compiler::ObjectPoolBuilderEntry::kSnapshotable 
)

◆ EmitComment()

void dart::FlowGraphCompiler::EmitComment ( Instruction instr)

Definition at line 1621 of file flow_graph_compiler.cc.

1621 {
1622#if defined(INCLUDE_IL_PRINTER)
1623 char buffer[256];
1624 BufferFormatter f(buffer, sizeof(buffer));
1625 instr->PrintTo(&f);
1626 assembler()->Comment("%s", buffer);
1627#endif // defined(INCLUDE_IL_PRINTER)
1628}
void Comment(const char *format,...) PRINTF_ATTRIBUTE(2
static const uint8_t buffer[]

◆ EmitDispatchTableCall()

void dart::FlowGraphCompiler::EmitDispatchTableCall ( int32_t  selector_offset,
const Array arguments_descriptor 
)

◆ EmitDropArguments()

void dart::FlowGraphCompiler::EmitDropArguments ( intptr_t  count)

Definition at line 2092 of file flow_graph_compiler.cc.

2092 {
2093 if (!is_optimizing()) {
2094 __ Drop(count);
2095 }
2096}
int count
#define __

◆ EmitEdgeCounter()

void dart::FlowGraphCompiler::EmitEdgeCounter ( intptr_t  edge_id)

◆ EmitEqualityRegConstCompare()

Condition dart::FlowGraphCompiler::EmitEqualityRegConstCompare ( Register  reg,
const Object obj,
bool  needs_number_check,
const InstructionSource source,
intptr_t  deopt_id 
)

◆ EmitEqualityRegRegCompare()

Condition dart::FlowGraphCompiler::EmitEqualityRegRegCompare ( Register  left,
Register  right,
bool  needs_number_check,
const InstructionSource source,
intptr_t  deopt_id 
)

◆ EmitFunctionEntrySourcePositionDescriptorIfNeeded()

void dart::FlowGraphCompiler::EmitFunctionEntrySourcePositionDescriptorIfNeeded ( )

Definition at line 612 of file flow_graph_compiler.cc.

612 {
613 // When unwinding async stacks we might produce frames which correspond
614 // to future listeners which are going to be called when the future completes.
615 // These listeners are not yet called and thus their frame pc_offset is set
616 // to 0 - which does not actually correspond to any call- or yield- site
617 // inside the code object. Nevertheless we would like to be able to
618 // produce proper position information for it when symbolizing the stack.
619 // To achieve that in AOT mode (where we don't actually have
620 // |Function::token_pos| available) we instead emit an artificial descriptor
621 // at the very beginning of the function.
622 if (FLAG_precompiled_mode && flow_graph().function().IsClosureFunction()) {
623 code_source_map_builder_->WriteFunctionEntrySourcePosition(
624 InstructionSource(flow_graph().function().token_pos()));
625 }
626}
void WriteFunctionEntrySourcePosition(const InstructionSource &source)

◆ EmitInstanceCallAOT()

void dart::FlowGraphCompiler::EmitInstanceCallAOT ( const ICData ic_data,
intptr_t  deopt_id,
const InstructionSource source,
LocationSummary locs,
Code::EntryKind  entry_kind = Code::EntryKind::kNormal,
bool  receiver_can_be_smi = true 
)

◆ EmitInstanceCallJIT()

void dart::FlowGraphCompiler::EmitInstanceCallJIT ( const Code stub,
const ICData ic_data,
intptr_t  deopt_id,
const InstructionSource source,
LocationSummary locs,
Code::EntryKind  entry_kind 
)

◆ EmitJumpToStub()

void dart::FlowGraphCompiler::EmitJumpToStub ( const Code stub)

◆ EmitMegamorphicInstanceCall() [1/2]

void dart::FlowGraphCompiler::EmitMegamorphicInstanceCall ( const ICData icdata,
intptr_t  deopt_id,
const InstructionSource source,
LocationSummary locs 
)
inline

Definition at line 691 of file flow_graph_compiler.h.

694 {
695 const String& name = String::Handle(icdata.target_name());
696 const Array& arguments_descriptor =
697 Array::Handle(icdata.arguments_descriptor());
698 EmitMegamorphicInstanceCall(name, arguments_descriptor, deopt_id, source,
699 locs);
700 }
void EmitMegamorphicInstanceCall(const ICData &icdata, intptr_t deopt_id, const InstructionSource &source, LocationSummary *locs)

◆ EmitMegamorphicInstanceCall() [2/2]

void dart::FlowGraphCompiler::EmitMegamorphicInstanceCall ( const String function_name,
const Array arguments_descriptor,
intptr_t  deopt_id,
const InstructionSource source,
LocationSummary locs 
)

◆ EmitMove()

void dart::FlowGraphCompiler::EmitMove ( Location  dst,
Location  src,
TemporaryRegisterAllocator temp 
)

◆ EmitMoveConst()

void dart::FlowGraphCompiler::EmitMoveConst ( const compiler::ffi::NativeLocation dst,
Location  src,
Representation  src_type,
TemporaryRegisterAllocator temp 
)

Definition at line 3472 of file flow_graph_compiler.cc.

3475 {
3476 ASSERT(src.IsConstant() || src.IsPairLocation());
3477 const auto& dst_type = dst.payload_type();
3478 Register scratch = kNoRegister;
3479 if (dst.IsExpressibleAsLocation() &&
3480 dst_type.IsExpressibleAsRepresentation() &&
3481 dst_type.AsRepresentationOverApprox(zone_) == src_type) {
3482 // We can directly emit the const in the right place and representation.
3483 const Location dst_loc = dst.AsLocation();
3484 assembler()->Comment("dst.IsExpressibleAsLocation() %s",
3485 dst_loc.ToCString());
3486 EmitMove(dst_loc, src, temp);
3487 } else {
3488 // We need an intermediate location.
3489 Location intermediate;
3490 if (dst_type.IsInt()) {
3491 if (TMP == kNoRegister) {
3492 scratch = temp->AllocateTemporary();
3494 } else {
3495 intermediate = Location::RegisterLocation(TMP);
3496 }
3497 } else {
3498 ASSERT(dst_type.IsFloat());
3499 intermediate = Location::FpuRegisterLocation(FpuTMP);
3500 }
3501 assembler()->Comment("constant using intermediate: %s",
3502 intermediate.ToCString());
3503
3504 if (src.IsPairLocation()) {
3505 for (intptr_t i : {0, 1}) {
3506 const Representation src_type_split =
3508 .Split(zone_, i)
3510 const auto& intermediate_native =
3512 src_type_split);
3513 EmitMove(intermediate, src.AsPairLocation()->At(i), temp);
3514 EmitNativeMove(dst.Split(zone_, 2, i), intermediate_native, temp);
3515 }
3516 } else {
3517 const auto& intermediate_native =
3519 src_type);
3520 EmitMove(intermediate, src, temp);
3521 EmitNativeMove(dst, intermediate_native, temp);
3522 }
3523
3524 if (scratch != kNoRegister) {
3525 temp->ReleaseTemporary();
3526 }
3527 }
3528 return;
3529}
void EmitMove(Location dst, Location src, TemporaryRegisterAllocator *temp)
void EmitNativeMove(const compiler::ffi::NativeLocation &dst, const compiler::ffi::NativeLocation &src, TemporaryRegisterAllocator *temp)
static Location FpuRegisterLocation(FpuRegister reg)
Definition locations.h:410
static Location RegisterLocation(Register reg)
Definition locations.h:398
static NativeLocation & FromLocation(Zone *zone, Location loc, Representation rep)
virtual Representation AsRepresentation() const
virtual NativePrimitiveType & Split(Zone *zone, intptr_t part) const
static NativePrimitiveType & FromRepresentation(Zone *zone, Representation rep)
Representation
Definition locations.h:66
const FpuRegister FpuTMP
@ kNoRegister
const Register TMP
dst
Definition cp.py:12

◆ EmitMoveFromNative()

void dart::FlowGraphCompiler::EmitMoveFromNative ( Location  dst_loc,
Representation  dst_type,
const compiler::ffi::NativeLocation src,
TemporaryRegisterAllocator temp 
)

Definition at line 3445 of file flow_graph_compiler.cc.

3449 {
3450 if (dst_loc.IsPairLocation()) {
3451 for (intptr_t i : {0, 1}) {
3452 const auto& dest_split = compiler::ffi::NativeLocation::FromPairLocation(
3453 zone_, dst_loc, dst_type, i);
3454 EmitNativeMove(dest_split, src.Split(zone_, 2, i), temp);
3455 }
3456 } else {
3457 const auto& dst =
3458 compiler::ffi::NativeLocation::FromLocation(zone_, dst_loc, dst_type);
3459 // Deal with sign mismatch caused by lack of kUnboxedUint64 representation.
3460 if (dst_type == kUnboxedInt64 &&
3461 src.container_type().AsPrimitive().representation() ==
3463 EmitNativeMove(dst.WithOtherNativeType(zone_, src.container_type(),
3464 src.container_type()),
3465 src, temp);
3466 } else {
3467 EmitNativeMove(dst, src, temp);
3468 }
3469 }
3470}
static NativeLocation & FromPairLocation(Zone *zone, Location loc, Representation rep, intptr_t index)

◆ EmitMoveToNative()

void dart::FlowGraphCompiler::EmitMoveToNative ( const compiler::ffi::NativeLocation dst,
Location  src_loc,
Representation  src_type,
TemporaryRegisterAllocator temp 
)

Definition at line 3417 of file flow_graph_compiler.cc.

3421 {
3422 if (src_loc.IsPairLocation()) {
3423 for (intptr_t i : {0, 1}) {
3425 zone_, src_loc, src_type, i);
3426 EmitNativeMove(dst.Split(zone_, 2, i), src_split, temp);
3427 }
3428 } else {
3429 const auto& src =
3430 compiler::ffi::NativeLocation::FromLocation(zone_, src_loc, src_type);
3431 // Deal with sign mismatch caused by lack of kUnboxedUint64 representation.
3432 if (src_type == kUnboxedInt64 &&
3433 dst.container_type().AsPrimitive().representation() ==
3435 EmitNativeMove(dst,
3436 src.WithOtherNativeType(zone_, dst.container_type(),
3437 dst.container_type()),
3438 temp);
3439 } else {
3440 EmitNativeMove(dst, src, temp);
3441 }
3442 }
3443}

◆ EmitNativeMove()

void dart::FlowGraphCompiler::EmitNativeMove ( const compiler::ffi::NativeLocation dst,
const compiler::ffi::NativeLocation src,
TemporaryRegisterAllocator temp 
)

Definition at line 3287 of file flow_graph_compiler.cc.

3290 {
3291 if (destination.IsBoth()) {
3292 // Copy to both.
3293 const auto& both = destination.AsBoth();
3294 EmitNativeMove(both.location(0), source, temp);
3295 EmitNativeMove(both.location(1), source, temp);
3296 return;
3297 }
3298 if (source.IsBoth()) {
3299 // Copy from one of both.
3300 const auto& both = source.AsBoth();
3301 EmitNativeMove(destination, both.location(0), temp);
3302 return;
3303 }
3304
3305 const auto& src_payload_type = source.payload_type();
3306 const auto& dst_payload_type = destination.payload_type();
3307 const auto& src_container_type = source.container_type();
3308 const auto& dst_container_type = destination.container_type();
3309 const intptr_t src_payload_size = src_payload_type.SizeInBytes();
3310 const intptr_t dst_payload_size = dst_payload_type.SizeInBytes();
3311 const intptr_t src_container_size = src_container_type.SizeInBytes();
3312 const intptr_t dst_container_size = dst_container_type.SizeInBytes();
3313
3314 // This function does not know how to do larger mem copy moves yet.
3315 ASSERT(src_payload_type.IsPrimitive());
3316 ASSERT(dst_payload_type.IsPrimitive());
3317
3318 // This function does not deal with sign conversions yet.
3319 ASSERT(src_payload_type.IsSigned() == dst_payload_type.IsSigned());
3320
3321 // If the location, payload, and container are equal, we're done.
3322 if (source.Equals(destination) && src_payload_type.Equals(dst_payload_type) &&
3323 src_container_type.Equals(dst_container_type)) {
3324#if defined(TARGET_ARCH_RISCV64)
3325 // Except we might still need to adjust for the difference between C's
3326 // representation of uint32 (sign-extended to 64 bits) and Dart's
3327 // (zero-extended).
3328 EmitNativeMoveArchitecture(destination, source);
3329#endif
3330 return;
3331 }
3332
3333 // Solve discrepancies between container size and payload size.
3334 if (src_payload_type.IsInt() && dst_payload_type.IsInt() &&
3335 (src_payload_size != src_container_size ||
3336 dst_payload_size != dst_container_size)) {
3337 if (source.IsStack() && src_container_size > src_payload_size) {
3338 // Shrink loads since all loads are extending.
3339 return EmitNativeMove(
3340 destination,
3341 source.WithOtherNativeType(zone_, src_payload_type, src_payload_type),
3342 temp);
3343 }
3344 if (src_payload_size <= dst_payload_size &&
3345 src_container_size >= dst_container_size) {
3346 // The upper bits of the source are already properly sign or zero
3347 // extended, so just copy the required amount of bits.
3348 return EmitNativeMove(destination.WithOtherNativeType(
3349 zone_, dst_container_type, dst_container_type),
3350 source.WithOtherNativeType(
3351 zone_, dst_container_type, dst_container_type),
3352 temp);
3353 }
3354 if (src_payload_size >= dst_payload_size &&
3355 dst_container_size > dst_payload_size) {
3356 // The upper bits of the source are not properly sign or zero extended
3357 // to be copied to the target, so regard the source as smaller.
3358 return EmitNativeMove(
3359 destination.WithOtherNativeType(zone_, dst_container_type,
3360 dst_container_type),
3361 source.WithOtherNativeType(zone_, dst_payload_type, dst_payload_type),
3362 temp);
3363 }
3364 UNREACHABLE();
3365 }
3366 ASSERT(src_payload_size == src_container_size);
3367 ASSERT(dst_payload_size == dst_container_size);
3368
3369 // Split moves that are larger than kWordSize, these require separate
3370 // instructions on all architectures.
3371 if (compiler::target::kWordSize == 4 && src_container_size == 8 &&
3372 dst_container_size == 8 && !source.IsFpuRegisters() &&
3373 !destination.IsFpuRegisters()) {
3374 // TODO(40209): If this is stack to stack, we could use FpuTMP.
3375 // Test the impact on code size and speed.
3376 EmitNativeMove(destination.Split(zone_, 2, 0), source.Split(zone_, 2, 0),
3377 temp);
3378 EmitNativeMove(destination.Split(zone_, 2, 1), source.Split(zone_, 2, 1),
3379 temp);
3380 return;
3381 }
3382
3383 // Split moves from stack to stack, none of the architectures provides
3384 // memory to memory move instructions.
3385 if (source.IsStack() && destination.IsStack()) {
3386 Register scratch = temp->AllocateTemporary();
3387 ASSERT(scratch != kNoRegister);
3388#if defined(TARGET_ARCH_RISCV32) || defined(TARGET_ARCH_RISCV64)
3389 ASSERT(scratch != TMP); // TMP is an argument register.
3390 ASSERT(scratch != TMP2); // TMP2 is an argument register.
3391#endif
3392 const auto& intermediate =
3393 *new (zone_) compiler::ffi::NativeRegistersLocation(
3394 zone_, dst_payload_type, dst_container_type, scratch);
3395 EmitNativeMove(intermediate, source, temp);
3396 EmitNativeMove(destination, intermediate, temp);
3397 temp->ReleaseTemporary();
3398 return;
3399 }
3400
3401 const bool sign_or_zero_extend = dst_container_size > src_container_size;
3402
3403 // No architecture supports sign extending with memory as destination.
3404 if (sign_or_zero_extend && destination.IsStack()) {
3405 ASSERT(source.IsRegisters());
3406 const auto& intermediate =
3407 source.WithOtherNativeType(zone_, dst_payload_type, dst_container_type);
3408 EmitNativeMove(intermediate, source, temp);
3409 EmitNativeMove(destination, intermediate, temp);
3410 return;
3411 }
3412
3413 // Do the simple architecture specific moves.
3414 EmitNativeMoveArchitecture(destination, source);
3415}
const Register TMP2

◆ EmitOptimizedInstanceCall()

void dart::FlowGraphCompiler::EmitOptimizedInstanceCall ( const Code stub,
const ICData ic_data,
intptr_t  deopt_id,
const InstructionSource source,
LocationSummary locs,
Code::EntryKind  entry_kind = Code::EntryKind::kNormal 
)

◆ EmitOptimizedStaticCall()

void dart::FlowGraphCompiler::EmitOptimizedStaticCall ( const Function function,
const Array arguments_descriptor,
intptr_t  size_with_type_args,
intptr_t  deopt_id,
const InstructionSource source,
LocationSummary locs,
Code::EntryKind  entry_kind = Code::EntryKind::kNormal 
)

◆ EmitPolymorphicInstanceCall()

void dart::FlowGraphCompiler::EmitPolymorphicInstanceCall ( const PolymorphicInstanceCallInstr call,
const CallTargets targets,
ArgumentsInfo  args_info,
intptr_t  deopt_id,
const InstructionSource source,
LocationSummary locs,
bool  complete,
intptr_t  total_call_count,
bool  receiver_can_be_smi = true 
)

Definition at line 2051 of file flow_graph_compiler.cc.

2060 {
2061 ASSERT(call != nullptr);
2062 if (FLAG_polymorphic_with_deopt) {
2063 compiler::Label* deopt =
2064 AddDeoptStub(deopt_id, ICData::kDeoptPolymorphicInstanceCallTestFail);
2065 compiler::Label ok;
2066 EmitTestAndCall(targets, call->function_name(), args_info,
2067 deopt, // No cid match.
2068 &ok, // Found cid.
2069 deopt_id, source, locs, complete, total_ic_calls,
2070 call->entry_kind());
2071 assembler()->Bind(&ok);
2072 } else {
2073 if (complete) {
2074 compiler::Label ok;
2075 EmitTestAndCall(targets, call->function_name(), args_info,
2076 nullptr, // No cid match.
2077 &ok, // Found cid.
2078 deopt_id, source, locs, true, total_ic_calls,
2079 call->entry_kind());
2080 assembler()->Bind(&ok);
2081 } else {
2082 const ICData& unary_checks =
2083 ICData::ZoneHandle(zone(), call->ic_data()->AsUnaryClassChecks());
2084 EmitInstanceCallAOT(unary_checks, deopt_id, source, locs,
2085 call->entry_kind(), receiver_can_be_smi);
2086 }
2087 }
2088}
static bool ok(int result)
compiler::Label * AddDeoptStub(intptr_t deopt_id, ICData::DeoptReasonId reason, uint32_t flags=0)
void EmitTestAndCall(const CallTargets &targets, const String &function_name, ArgumentsInfo args_info, compiler::Label *failed, compiler::Label *match_found, intptr_t deopt_id, const InstructionSource &source_index, LocationSummary *locs, bool complete, intptr_t total_ic_calls, Code::EntryKind entry_kind=Code::EntryKind::kNormal)
void EmitInstanceCallAOT(const ICData &ic_data, intptr_t deopt_id, const InstructionSource &source, LocationSummary *locs, Code::EntryKind entry_kind=Code::EntryKind::kNormal, bool receiver_can_be_smi=true)
void Bind(Label *label) override
call(args)
Definition dom.py:159

◆ EmitPrologue()

void dart::FlowGraphCompiler::EmitPrologue ( )

◆ EmitTailCallToStub()

void dart::FlowGraphCompiler::EmitTailCallToStub ( const Code stub)

◆ EmitTestAndCall()

void dart::FlowGraphCompiler::EmitTestAndCall ( const CallTargets targets,
const String function_name,
ArgumentsInfo  args_info,
compiler::Label failed,
compiler::Label match_found,
intptr_t  deopt_id,
const InstructionSource source_index,
LocationSummary locs,
bool  complete,
intptr_t  total_ic_calls,
Code::EntryKind  entry_kind = Code::EntryKind::kNormal 
)

Definition at line 2109 of file flow_graph_compiler.cc.

2119 {
2121 ASSERT(complete || (failed != nullptr)); // Complete calls can't fail.
2122
2123 const Array& arguments_descriptor =
2124 Array::ZoneHandle(zone(), args_info.ToArgumentsDescriptor());
2125 EmitTestAndCallLoadReceiver(args_info.count_without_type_args,
2126 arguments_descriptor);
2127
2128 const int kNoCase = -1;
2129 int smi_case = kNoCase;
2130 int which_case_to_skip = kNoCase;
2131
2132 const int length = targets.length();
2133 ASSERT(length > 0);
2134 int non_smi_length = length;
2135
2136 // Find out if one of the classes in one of the cases is the Smi class. We
2137 // will be handling that specially.
2138 for (int i = 0; i < length; i++) {
2139 const intptr_t start = targets[i].cid_start;
2140 if (start > kSmiCid) continue;
2141 const intptr_t end = targets[i].cid_end;
2142 if (end >= kSmiCid) {
2143 smi_case = i;
2144 if (start == kSmiCid && end == kSmiCid) {
2145 // If this case has only the Smi class then we won't need to emit it at
2146 // all later.
2147 which_case_to_skip = i;
2148 non_smi_length--;
2149 }
2150 break;
2151 }
2152 }
2153
2154 if (smi_case != kNoCase) {
2155 compiler::Label after_smi_test;
2156 // If the call is complete and there are no other possible receiver
2157 // classes - then receiver can only be a smi value and we don't need
2158 // to check if it is a smi.
2159 if (!(complete && non_smi_length == 0)) {
2160 EmitTestAndCallSmiBranch(non_smi_length == 0 ? failed : &after_smi_test,
2161 /* jump_if_smi= */ false);
2162 }
2163
2164 // Do not use the code from the function, but let the code be patched so
2165 // that we can record the outgoing edges to other code.
2166 const Function& function = *targets.TargetAt(smi_case)->target;
2167 GenerateStaticDartCall(deopt_id, source_index,
2168 UntaggedPcDescriptors::kOther, locs, function,
2169 entry_kind);
2170 EmitDropArguments(args_info.size_with_type_args);
2171 if (match_found != nullptr) {
2172 __ Jump(match_found);
2173 }
2174 __ Bind(&after_smi_test);
2175 } else {
2176 if (!complete) {
2177 // Smi is not a valid class.
2178 EmitTestAndCallSmiBranch(failed, /* jump_if_smi = */ true);
2179 }
2180 }
2181
2182 if (non_smi_length == 0) {
2183 // If non_smi_length is 0 then only a Smi check was needed; the Smi check
2184 // above will fail if there was only one check and receiver is not Smi.
2185 return;
2186 }
2187
2188 bool add_megamorphic_call = false;
2189 int bias = 0;
2190
2191 // Value is not Smi.
2192 EmitTestAndCallLoadCid(EmitTestCidRegister());
2193
2194 int last_check = which_case_to_skip == length - 1 ? length - 2 : length - 1;
2195
2196 for (intptr_t i = 0; i < length; i++) {
2197 if (i == which_case_to_skip) continue;
2198 const bool is_last_check = (i == last_check);
2199 const int count = targets.TargetAt(i)->count;
2200 if (!is_last_check && !complete && count < (total_ic_calls >> 5)) {
2201 // This case is hit too rarely to be worth writing class-id checks inline
2202 // for. Note that we can't do this for calls with only one target because
2203 // the type propagator may have made use of that and expects a deopt if
2204 // a new class is seen at this calls site. See IsMonomorphic.
2205 add_megamorphic_call = true;
2206 break;
2207 }
2208 compiler::Label next_test;
2209 if (!complete || !is_last_check) {
2211 is_last_check ? failed : &next_test,
2212 EmitTestCidRegister(), targets[i], bias,
2213 /*jump_on_miss =*/true);
2214 }
2215 // Do not use the code from the function, but let the code be patched so
2216 // that we can record the outgoing edges to other code.
2217 const Function& function = *targets.TargetAt(i)->target;
2218 GenerateStaticDartCall(deopt_id, source_index,
2219 UntaggedPcDescriptors::kOther, locs, function,
2220 entry_kind);
2221 EmitDropArguments(args_info.size_with_type_args);
2222 if (!is_last_check || add_megamorphic_call) {
2223 __ Jump(match_found);
2224 }
2225 __ Bind(&next_test);
2226 }
2227 if (add_megamorphic_call) {
2228 EmitMegamorphicInstanceCall(function_name, arguments_descriptor, deopt_id,
2229 source_index, locs);
2230 }
2231}
void EmitDropArguments(intptr_t count)
void GenerateStaticDartCall(intptr_t deopt_id, const InstructionSource &source, UntaggedPcDescriptors::Kind kind, LocationSummary *locs, const Function &target, Code::EntryKind entry_kind=Code::EntryKind::kNormal)
static int EmitTestAndCallCheckCid(compiler::Assembler *assembler, compiler::Label *label, Register class_id_reg, const CidRangeValue &range, int bias, bool jump_on_miss=true)
glong glong end
size_t length
const char *const function_name

◆ EmitTestAndCallCheckCid()

int dart::FlowGraphCompiler::EmitTestAndCallCheckCid ( compiler::Assembler assembler,
compiler::Label label,
Register  class_id_reg,
const CidRangeValue range,
int  bias,
bool  jump_on_miss = true 
)
static

Definition at line 2291 of file flow_graph_compiler.cc.

2296 {
2297 const intptr_t cid_start = range.cid_start;
2298 if (range.IsSingleCid()) {
2299 assembler->CompareImmediate(class_id_reg, cid_start - bias);
2300 assembler->BranchIf(jump_on_miss ? NOT_EQUAL : EQUAL, label);
2301 } else {
2302 assembler->AddImmediate(class_id_reg, bias - cid_start);
2303 bias = cid_start;
2304 assembler->CompareImmediate(class_id_reg, range.Extent());
2306 label);
2307 }
2308 return bias;
2309}
void CompareImmediate(Register rn, int32_t value, Condition cond)
void BranchIf(Condition condition, Label *label, JumpDistance distance=kFarJump)
void AddImmediate(Register rd, int32_t value, Condition cond=AL)
@ UNSIGNED_GREATER
@ UNSIGNED_LESS_EQUAL

◆ EmitYieldPositionMetadata()

void dart::FlowGraphCompiler::EmitYieldPositionMetadata ( const InstructionSource source,
intptr_t  yield_index 
)

Definition at line 500 of file flow_graph_compiler.cc.

502 {
503 AddDescriptor(UntaggedPcDescriptors::kOther, assembler()->CodeSize(),
504 DeoptId::kNone, source, CurrentTryIndex(), yield_index);
505}

◆ EndCodeSourceRange()

void dart::FlowGraphCompiler::EndCodeSourceRange ( const InstructionSource source)

Definition at line 1999 of file flow_graph_compiler.cc.

1999 {
2000 code_source_map_builder_->EndCodeSourceRange(assembler()->CodeSize(), source);
2001}
void EndCodeSourceRange(int32_t pc_offset, const InstructionSource &source)

◆ EnterIntrinsicMode()

void dart::FlowGraphCompiler::EnterIntrinsicMode ( )

◆ ExitIntrinsicMode()

void dart::FlowGraphCompiler::ExitIntrinsicMode ( )

◆ ExtraStackSlotsOnOsrEntry()

intptr_t dart::FlowGraphCompiler::ExtraStackSlotsOnOsrEntry ( ) const

Definition at line 824 of file flow_graph_compiler.cc.

824 {
825 ASSERT(flow_graph().IsCompiledForOsr());
826 const intptr_t stack_depth =
828 const intptr_t num_stack_locals = flow_graph().num_stack_locals();
829 return StackSize() - stack_depth - num_stack_locals;
830}
intptr_t stack_depth() const
Definition il.h:1750
GraphEntryInstr * graph_entry() const
Definition flow_graph.h:268
intptr_t num_stack_locals() const
Definition flow_graph.h:161
OsrEntryInstr * osr_entry() const
Definition il.h:1992

◆ FinalizeCatchEntryMovesMap()

void dart::FlowGraphCompiler::FinalizeCatchEntryMovesMap ( const Code code)

Definition at line 1365 of file flow_graph_compiler.cc.

1365 {
1366#if defined(DART_PRECOMPILER)
1367 if (FLAG_precompiled_mode) {
1368 TypedData& maps = TypedData::Handle(
1369 catch_entry_moves_maps_builder_->FinalizeCatchEntryMovesMap());
1370 code.set_catch_entry_moves_maps(maps);
1371 return;
1372 }
1373#endif
1374 code.set_num_variables(flow_graph().variable_count());
1375}

◆ FinalizeCodeSourceMap()

void dart::FlowGraphCompiler::FinalizeCodeSourceMap ( const Code code)

Definition at line 1412 of file flow_graph_compiler.cc.

1412 {
1413 const Array& inlined_id_array =
1414 Array::Handle(zone(), code_source_map_builder_->InliningIdToFunction());
1415 code.set_inlined_id_to_function(inlined_id_array);
1416
1417 const CodeSourceMap& map =
1418 CodeSourceMap::Handle(code_source_map_builder_->Finalize());
1419 code.set_code_source_map(map);
1420
1421#if defined(DEBUG)
1422 // Force simulation through the last pc offset. This checks we can decode
1423 // the whole CodeSourceMap without hitting an unknown opcode, stack underflow,
1424 // etc.
1425 GrowableArray<const Function*> fs;
1426 GrowableArray<TokenPosition> tokens;
1427 code.GetInlinedFunctionsAtInstruction(code.Size() - 1, &fs, &tokens);
1428#endif
1429}
SI auto map(std::index_sequence< I... >, Fn &&fn, const Args &... args) -> skvx::Vec< sizeof...(I), decltype(fn(args[0]...))>
Definition SkVx.h:680

◆ FinalizeExceptionHandlers()

void dart::FlowGraphCompiler::FinalizeExceptionHandlers ( const Code code)

Definition at line 1279 of file flow_graph_compiler.cc.

1279 {
1280 ASSERT(exception_handlers_list_ != nullptr);
1281 const ExceptionHandlers& handlers = ExceptionHandlers::Handle(
1282 exception_handlers_list_->FinalizeExceptionHandlers(code.PayloadStart()));
1283 code.set_exception_handlers(handlers);
1284}
ExceptionHandlersPtr FinalizeExceptionHandlers(uword entry_point) const

◆ FinalizePcDescriptors()

void dart::FlowGraphCompiler::FinalizePcDescriptors ( const Code code)

Definition at line 1286 of file flow_graph_compiler.cc.

1286 {
1287 ASSERT(pc_descriptors_list_ != nullptr);
1288 const PcDescriptors& descriptors = PcDescriptors::Handle(
1289 pc_descriptors_list_->FinalizePcDescriptors(code.PayloadStart()));
1290 if (!is_optimizing_) descriptors.Verify(parsed_function_.function());
1291 code.set_pc_descriptors(descriptors);
1292}
PcDescriptorsPtr FinalizePcDescriptors(uword entry_point)

◆ FinalizeStackMaps()

void dart::FlowGraphCompiler::FinalizeStackMaps ( const Code code)

Definition at line 1326 of file flow_graph_compiler.cc.

1326 {
1327 ASSERT(compressed_stackmaps_builder_ != nullptr);
1328 // Finalize the compressed stack maps and add it to the code object.
1329 const auto& maps =
1330 CompressedStackMaps::Handle(compressed_stackmaps_builder_->Finalize());
1331 code.set_compressed_stackmaps(maps);
1332}
CompressedStackMapsPtr Finalize() const

◆ FinalizeStaticCallTargetsTable()

void dart::FlowGraphCompiler::FinalizeStaticCallTargetsTable ( const Code code)

Definition at line 1377 of file flow_graph_compiler.cc.

1377 {
1378 ASSERT(code.static_calls_target_table() == Array::null());
1379 const auto& calls = static_calls_target_table_;
1380 const intptr_t array_length = calls.length() * Code::kSCallTableEntryLength;
1381 const auto& targets =
1382 Array::Handle(zone(), Array::New(array_length, Heap::kOld));
1383
1384 StaticCallsTable entries(targets);
1385 auto& kind_type_and_offset = Smi::Handle(zone());
1386 for (intptr_t i = 0; i < calls.length(); i++) {
1387 auto entry = calls[i];
1388 kind_type_and_offset =
1389 Smi::New(Code::KindField::encode(entry->call_kind) |
1390 Code::EntryPointField::encode(entry->entry_point) |
1391 Code::OffsetField::encode(entry->offset));
1392 auto view = entries[i];
1393 view.Set<Code::kSCallTableKindAndOffset>(kind_type_and_offset);
1394 const Object* target = nullptr;
1395 if (entry->function != nullptr) {
1396 target = entry->function;
1397 view.Set<Code::kSCallTableFunctionTarget>(*entry->function);
1398 }
1399 if (entry->code != nullptr) {
1400 ASSERT(target == nullptr);
1401 target = entry->code;
1402 view.Set<Code::kSCallTableCodeOrTypeTarget>(*entry->code);
1403 }
1404 if (entry->dst_type != nullptr) {
1405 ASSERT(target == nullptr);
1406 view.Set<Code::kSCallTableCodeOrTypeTarget>(*entry->dst_type);
1407 }
1408 }
1409 code.set_static_calls_target_table(targets);
1410}
intptr_t length() const
static constexpr intptr_t encode(CallKind value)
Definition bitfield.h:167
@ kSCallTableEntryLength
Definition object.h:6957
@ kSCallTableFunctionTarget
Definition object.h:6956
@ kSCallTableCodeOrTypeTarget
Definition object.h:6955
@ kSCallTableKindAndOffset
Definition object.h:6954
static ObjectPtr null()
Definition object.h:433
uint32_t * target
ArrayOfTuplesView< Code::SCallTableEntry, std::tuple< Smi, Object, Function > > StaticCallsTable
Definition object.h:13520

◆ FinalizeVarDescriptors()

void dart::FlowGraphCompiler::FinalizeVarDescriptors ( const Code code)

Definition at line 1334 of file flow_graph_compiler.cc.

1334 {
1335#if defined(PRODUCT)
1336// No debugger: no var descriptors.
1337#else
1338 if (code.is_optimized()) {
1339 // Optimized code does not need variable descriptors. They are
1340 // only stored in the unoptimized version.
1341 code.set_var_descriptors(Object::empty_var_descriptors());
1342 return;
1343 }
1344 LocalVarDescriptors& var_descs = LocalVarDescriptors::Handle();
1345 if (flow_graph().IsIrregexpFunction()) {
1346 // Eager local var descriptors computation for Irregexp function as it is
1347 // complicated to factor out.
1348 // TODO(srdjan): Consider canonicalizing and reusing the local var
1349 // descriptor for IrregexpFunction.
1350 ASSERT(parsed_function().scope() == nullptr);
1351 var_descs = LocalVarDescriptors::New(1);
1352 UntaggedLocalVarDescriptors::VarInfo info;
1354 info.scope_id = 0;
1355 info.begin_pos = TokenPosition::kMinSource;
1357 info.set_index(compiler::target::frame_layout.FrameSlotForVariable(
1358 parsed_function().current_context_var()));
1359 var_descs.SetVar(0, Symbols::CurrentContextVar(), &info);
1360 }
1361 code.set_var_descriptors(var_descs);
1362#endif
1363}
static LocalVarDescriptorsPtr New(intptr_t num_variables)
Definition object.cc:16181
static const TokenPosition kMinSource

◆ float32x4_class()

const Class & dart::FlowGraphCompiler::float32x4_class ( ) const
inline

Definition at line 847 of file flow_graph_compiler.h.

847{ return float32x4_class_; }

◆ float64x2_class()

const Class & dart::FlowGraphCompiler::float64x2_class ( ) const
inline

Definition at line 848 of file flow_graph_compiler.h.

848{ return float64x2_class_; }

◆ flow_graph()

const FlowGraph & dart::FlowGraphCompiler::flow_graph ( ) const
inline

Definition at line 416 of file flow_graph_compiler.h.

416 {
417 return intrinsic_mode() ? *intrinsic_flow_graph_ : flow_graph_;
418 }

◆ ForcedOptimization()

bool dart::FlowGraphCompiler::ForcedOptimization ( ) const
inline

Definition at line 414 of file flow_graph_compiler.h.

414{ return function().ForceOptimize(); }
bool ForceOptimize() const
Definition object.cc:9075

◆ ForceSlowPathForStackOverflow()

bool dart::FlowGraphCompiler::ForceSlowPathForStackOverflow ( ) const

Definition at line 276 of file flow_graph_compiler.cc.

276 {
277#if !defined(PRODUCT)
278 if (FLAG_stacktrace_every > 0 || FLAG_deoptimize_every > 0 ||
279 FLAG_gc_every > 0 ||
280 (isolate_group()->reload_every_n_stack_overflow_checks() > 0)) {
282 return true;
283 }
284 }
285 if (FLAG_stacktrace_filter != nullptr &&
286 strstr(parsed_function().function().ToFullyQualifiedCString(),
287 FLAG_stacktrace_filter) != nullptr) {
288 return true;
289 }
290 if (is_optimizing() && FLAG_deoptimize_filter != nullptr &&
291 strstr(parsed_function().function().ToFullyQualifiedCString(),
292 FLAG_deoptimize_filter) != nullptr) {
293 return true;
294 }
295#endif // !defined(PRODUCT)
296 return false;
297}
static bool IsSystemIsolateGroup(const IsolateGroup *group)
Definition isolate.cc:3559

◆ function()

const Function & dart::FlowGraphCompiler::function ( ) const
inline

Definition at line 402 of file flow_graph_compiler.h.

402{ return parsed_function_.function(); }

◆ GenerateAssertAssignable()

void dart::FlowGraphCompiler::GenerateAssertAssignable ( CompileType receiver_type,
const InstructionSource source,
intptr_t  deopt_id,
Environment env,
const String dst_name,
LocationSummary locs 
)

Definition at line 2800 of file flow_graph_compiler.cc.

2806 {
2807 ASSERT(!source.token_pos.IsClassifying());
2809
2810 // Non-null if we have a constant destination type.
2811 const auto& dst_type =
2812 locs->in(AssertAssignableInstr::kDstTypePos).IsConstant()
2813 ? AbstractType::Cast(
2814 locs->in(AssertAssignableInstr::kDstTypePos).constant())
2815 : Object::null_abstract_type();
2816
2817 if (!dst_type.IsNull()) {
2818 ASSERT(dst_type.IsFinalized());
2819 if (dst_type.IsTopTypeForSubtyping()) return; // No code needed.
2820 }
2821
2822 compiler::Label done;
2824 // Generate caller-side checks to perform prior to calling the TTS.
2825 if (dst_type.IsNull()) {
2826 __ Comment("AssertAssignable for runtime type");
2827 // kDstTypeReg should already contain the destination type.
2828 } else {
2829 __ Comment("AssertAssignable for compile-time type");
2830 GenerateCallerChecksForAssertAssignable(receiver_type, dst_type, &done);
2831 if (dst_type.IsTypeParameter()) {
2832 // The resolved type parameter is in the scratch register.
2833 type_reg = TypeTestABI::kScratchReg;
2834 }
2835 }
2836
2837 GenerateTTSCall(source, deopt_id, env, type_reg, dst_type, dst_name, locs);
2838 __ Bind(&done);
2839}
static void done(const char *config, const char *src, const char *srcOptions, const char *name)
Definition DM.cpp:263
bool CheckAssertAssignableTypeTestingABILocations(const LocationSummary &locs)
void GenerateTTSCall(const InstructionSource &source, intptr_t deopt_id, Environment *env, Register reg_with_type, const AbstractType &dst_type, const String &dst_name, LocationSummary *locs)
void GenerateCallerChecksForAssertAssignable(CompileType *receiver_type, const AbstractType &dst_type, compiler::Label *done)
static constexpr Register kScratchReg

◆ GenerateCallerChecksForAssertAssignable()

void dart::FlowGraphCompiler::GenerateCallerChecksForAssertAssignable ( CompileType receiver_type,
const AbstractType dst_type,
compiler::Label done 
)

Definition at line 2905 of file flow_graph_compiler.cc.

2908 {
2909 // Top types should be handled by the caller and cannot reach here.
2910 ASSERT(!dst_type.IsTopTypeForSubtyping());
2911
2912 // Set this to avoid marking the type testing stub for optimization.
2913 bool elide_info = false;
2914 // Call before any return points to set the destination type register and
2915 // mark the destination type TTS as needing optimization, unless it is
2916 // unlikely to be called.
2917 auto output_dst_type = [&]() -> void {
2918 // If we haven't handled the positive case of the type check on the call
2919 // site and we'll be using the TTS of the destination type, we want an
2920 // optimized type testing stub and thus record it in the [TypeUsageInfo].
2921 if (!elide_info) {
2922 if (auto const type_usage_info = thread()->type_usage_info()) {
2923 type_usage_info->UseTypeInAssertAssignable(dst_type);
2924 } else {
2925 ASSERT(!FLAG_precompiled_mode);
2926 }
2927 }
2928 __ LoadObject(TypeTestABI::kDstTypeReg, dst_type);
2929 };
2930
2931 // We can handle certain types and checks very efficiently on the call site,
2932 // meaning those need not be checked within the stubs (which may involve
2933 // a runtime call).
2934
2935 if (dst_type.IsObjectType()) {
2936 // Special case: non-nullable Object.
2937 ASSERT(dst_type.IsNonNullable());
2938 __ CompareObject(TypeTestABI::kInstanceReg, Object::null_object());
2939 __ BranchIf(NOT_EQUAL, done);
2940 // Fall back to type testing stub in caller to throw the exception.
2941 return output_dst_type();
2942 }
2943
2944 // If the int type is assignable to [dst_type] we special case it on the
2945 // caller side!
2946 const Type& int_type = Type::Handle(zone(), Type::IntType());
2947 bool is_non_smi = false;
2948 if (int_type.IsSubtypeOf(dst_type, Heap::kOld)) {
2949 __ BranchIfSmi(TypeTestABI::kInstanceReg, done);
2950 is_non_smi = true;
2951 } else if (!receiver_type->CanBeSmi()) {
2952 is_non_smi = true;
2953 }
2954
2955 if (dst_type.IsTypeParameter()) {
2956 // Special case: Instantiate the type parameter on the caller side, invoking
2957 // the TTS of the corresponding type parameter in the caller.
2958 const TypeParameter& type_param = TypeParameter::Cast(dst_type);
2959 if (!type_param.IsNonNullable()) {
2960 // If the type parameter is nullable when running in strong mode, we need
2961 // to handle null before calling the TTS because the type parameter may be
2962 // instantiated with a non-nullable type, where the TTS rejects null.
2963 __ CompareObject(TypeTestABI::kInstanceReg, Object::null_object());
2964 __ BranchIf(EQUAL, done);
2965 }
2966 const Register kTypeArgumentsReg =
2967 type_param.IsClassTypeParameter()
2970
2971 // Check if type arguments are null, i.e. equivalent to vector of dynamic.
2972 // If so, then the value is guaranteed assignable as dynamic is a top type.
2973 __ CompareObject(kTypeArgumentsReg, Object::null_object());
2974 __ BranchIf(EQUAL, done);
2975 // Put the instantiated type parameter into the scratch register, so its
2976 // TTS can be called by the caller.
2977 __ LoadCompressedFieldFromOffset(
2978 TypeTestABI::kScratchReg, kTypeArgumentsReg,
2979 compiler::target::TypeArguments::type_at_offset(type_param.index()));
2980 return output_dst_type();
2981 }
2982
2983 if (dst_type.IsFunctionType() || dst_type.IsRecordType()) {
2984 return output_dst_type();
2985 }
2986
2987 if (auto const hi = thread()->hierarchy_info()) {
2988 const Class& type_class = Class::Handle(zone(), dst_type.type_class());
2989
2990 if (hi->CanUseSubtypeRangeCheckFor(dst_type)) {
2991 const CidRangeVector& ranges = hi->SubtypeRangesForClass(
2992 type_class,
2993 /*include_abstract=*/false,
2994 /*exclude_null=*/!Instance::NullIsAssignableTo(dst_type));
2995 if (ranges.length() <= kMaxNumberOfCidRangesToTest) {
2996 if (is_non_smi) {
2998 } else {
2999 __ LoadClassIdMayBeSmi(TypeTestABI::kScratchReg,
3001 }
3003 done);
3004 elide_info = true;
3005 } else if (IsListClass(type_class)) {
3006 __ LoadClassIdMayBeSmi(TypeTestABI::kScratchReg,
3009 }
3010 }
3011 }
3012 output_dst_type();
3013}
static bool GenerateCidRangesCheck(compiler::Assembler *assembler, Register class_id_reg, const CidRangeVector &cid_ranges, compiler::Label *inside_range_lbl, compiler::Label *outside_range_lbl=nullptr, bool fall_through_if_inside=false)
static constexpr intptr_t kMaxNumberOfCidRangesToTest
void GenerateListTypeCheck(Register kClassIdReg, compiler::Label *is_instance_lbl)
static bool NullIsAssignableTo(const AbstractType &other)
Definition object.cc:20715
TypeUsageInfo * type_usage_info() const
Definition thread.h:600
static TypePtr IntType()
Definition object.cc:21886
MallocGrowableArray< CidRangeValue > CidRangeVector
Definition il.h:253

◆ GenerateCidRangesCheck()

bool dart::FlowGraphCompiler::GenerateCidRangesCheck ( compiler::Assembler assembler,
Register  class_id_reg,
const CidRangeVector cid_ranges,
compiler::Label inside_range_lbl,
compiler::Label outside_range_lbl = nullptr,
bool  fall_through_if_inside = false 
)
static

Definition at line 2257 of file flow_graph_compiler.cc.

2263 {
2264 // If there are no valid class ranges, the check will fail. If we are
2265 // supposed to fall-through in the positive case, we'll explicitly jump to
2266 // the [outside_range_lbl].
2267 if (cid_ranges.is_empty()) {
2268 if (fall_through_if_inside) {
2269 assembler->Jump(outside_range_lbl);
2270 }
2271 return false;
2272 }
2273
2274 int bias = 0;
2275 for (intptr_t i = 0; i < cid_ranges.length(); ++i) {
2276 const CidRangeValue& range = cid_ranges[i];
2277 RELEASE_ASSERT(!range.IsIllegalRange());
2278 const bool last_round = i == (cid_ranges.length() - 1);
2279
2280 compiler::Label* jump_label = last_round && fall_through_if_inside
2281 ? outside_range_lbl
2282 : inside_range_lbl;
2283 const bool jump_on_miss = last_round && fall_through_if_inside;
2284
2285 bias = EmitTestAndCallCheckCid(assembler, jump_label, class_id_reg, range,
2286 bias, jump_on_miss);
2287 }
2288 return bias != 0;
2289}
#define RELEASE_ASSERT(cond)
Definition assert.h:327
void Jump(Label *label, JumpDistance distance=kFarJump)

◆ GenerateDartCall()

void dart::FlowGraphCompiler::GenerateDartCall ( intptr_t  deopt_id,
const InstructionSource source,
const Code stub,
UntaggedPcDescriptors::Kind  kind,
LocationSummary locs,
Code::EntryKind  entry_kind = Code::EntryKind::kNormal 
)

◆ GenerateIndirectTTSCall()

static void dart::FlowGraphCompiler::GenerateIndirectTTSCall ( compiler::Assembler assembler,
Register  reg_with_type,
intptr_t  sub_type_cache_index 
)
static

◆ GenerateInstanceCall()

void dart::FlowGraphCompiler::GenerateInstanceCall ( intptr_t  deopt_id,
const InstructionSource source,
LocationSummary locs,
const ICData ic_data,
Code::EntryKind  entry_kind,
bool  receiver_can_be_smi 
)

Definition at line 1509 of file flow_graph_compiler.cc.

1514 {
1515 ICData& ic_data = ICData::ZoneHandle(ic_data_in.Original());
1516 if (FLAG_precompiled_mode) {
1517 ic_data = ic_data.AsUnaryClassChecks();
1518 EmitInstanceCallAOT(ic_data, deopt_id, source, locs, entry_kind,
1519 receiver_can_be_smi);
1520 return;
1521 }
1522 ASSERT(!ic_data.IsNull());
1523 if (is_optimizing() && (ic_data_in.NumberOfUsedChecks() == 0)) {
1524 // Emit IC call that will count and thus may need reoptimization at
1525 // function entry.
1526 ASSERT(may_reoptimize() || flow_graph().IsCompiledForOsr());
1527 EmitOptimizedInstanceCall(StubEntryFor(ic_data, /*optimized=*/true),
1528 ic_data, deopt_id, source, locs, entry_kind);
1529 return;
1530 }
1531
1532 if (is_optimizing()) {
1533 EmitMegamorphicInstanceCall(ic_data_in, deopt_id, source, locs);
1534 return;
1535 }
1536
1537 EmitInstanceCallJIT(StubEntryFor(ic_data, /*optimized=*/false), ic_data,
1538 deopt_id, source, locs, entry_kind);
1539}
void EmitInstanceCallJIT(const Code &stub, const ICData &ic_data, intptr_t deopt_id, const InstructionSource &source, LocationSummary *locs, Code::EntryKind entry_kind)
void EmitOptimizedInstanceCall(const Code &stub, const ICData &ic_data, intptr_t deopt_id, const InstructionSource &source, LocationSummary *locs, Code::EntryKind entry_kind=Code::EntryKind::kNormal)
static const Code & StubEntryFor(const ICData &ic_data, bool optimized)

◆ GenerateInstanceOf()

void dart::FlowGraphCompiler::GenerateInstanceOf ( const InstructionSource source,
intptr_t  deopt_id,
Environment env,
const AbstractType type,
LocationSummary locs 
)

Definition at line 2707 of file flow_graph_compiler.cc.

2711 {
2712 ASSERT(type.IsFinalized());
2713 ASSERT(!type.IsTopTypeForInstanceOf()); // Already checked.
2714
2715 compiler::Label is_instance, is_not_instance;
2716 // 'null' is an instance of Null, Object*, Never*, void, and dynamic.
2717 // In addition, 'null' is an instance of any nullable type.
2718 // It is also an instance of FutureOr<T> if it is an instance of T.
2719 const AbstractType& unwrapped_type =
2720 AbstractType::Handle(type.UnwrapFutureOr());
2721 if (!unwrapped_type.IsTypeParameter() || unwrapped_type.IsNullable()) {
2722 // Only nullable type parameter remains nullable after instantiation.
2723 // See NullIsInstanceOf().
2724 __ CompareObject(TypeTestABI::kInstanceReg, Object::null_object());
2725 __ BranchIf(EQUAL,
2726 (unwrapped_type.IsNullable() ||
2727 (unwrapped_type.IsLegacy() && unwrapped_type.IsNeverType()))
2728 ? &is_instance
2729 : &is_not_instance);
2730 }
2731
2732 // Generate inline instanceof test.
2733 SubtypeTestCache& test_cache = SubtypeTestCache::ZoneHandle(zone());
2734 // kInstanceReg, kInstantiatorTypeArgumentsReg, and kFunctionTypeArgumentsReg
2735 // are preserved across the call.
2736 test_cache =
2737 GenerateInlineInstanceof(source, type, &is_instance, &is_not_instance);
2738
2739 // test_cache is null if there is no fall-through.
2740 compiler::Label done;
2741 if (!test_cache.IsNull()) {
2742 // Generate Runtime call.
2743 __ LoadUniqueObject(TypeTestABI::kDstTypeReg, type);
2745 GenerateStubCall(source, StubCode::InstanceOf(),
2746 /*kind=*/UntaggedPcDescriptors::kOther, locs, deopt_id,
2747 env);
2749 }
2750 __ Bind(&is_not_instance);
2753
2754 __ Bind(&is_instance);
2756 __ Bind(&done);
2757}
static void test_cache(skiatest::Reporter *reporter, SkResourceCache &cache, bool testPurge)
static const Bool & Get(bool value)
Definition object.h:10780
void GenerateStubCall(const InstructionSource &source, const Code &stub, UntaggedPcDescriptors::Kind kind, LocationSummary *locs, intptr_t deopt_id, Environment *env)
static constexpr Register kSubtypeTestCacheReg
static constexpr Register kInstanceOfResultReg

◆ GenerateListTypeCheck()

void dart::FlowGraphCompiler::GenerateListTypeCheck ( Register  kClassIdReg,
compiler::Label is_instance_lbl 
)

Definition at line 1610 of file flow_graph_compiler.cc.

1612 {
1613 assembler()->Comment("ListTypeCheck");
1614 COMPILE_ASSERT((kImmutableArrayCid == kArrayCid + 1) &&
1615 (kGrowableObjectArrayCid == kArrayCid + 2));
1616 CidRangeVector ranges;
1617 ranges.Add({kArrayCid, kGrowableObjectArrayCid});
1618 GenerateCidRangesCheck(assembler(), class_id_reg, ranges, is_instance_lbl);
1619}
#define COMPILE_ASSERT(expr)
Definition assert.h:339

◆ GenerateNonLazyDeoptableStubCall()

void dart::FlowGraphCompiler::GenerateNonLazyDeoptableStubCall ( const InstructionSource source,
const Code stub,
UntaggedPcDescriptors::Kind  kind,
LocationSummary locs,
ObjectPool::SnapshotBehavior  snapshot_behavior = compiler::ObjectPoolBuilderEntry::kSnapshotable 
)

Definition at line 1476 of file flow_graph_compiler.cc.

1481 {
1482 EmitCallToStub(stub, snapshot_behavior);
1483 EmitCallsiteMetadata(source, DeoptId::kNone, kind, locs, /*env=*/nullptr);
1484}
void EmitCallToStub(const Code &stub, ObjectPool::SnapshotBehavior snapshot_behavior=compiler::ObjectPoolBuilderEntry::kSnapshotable)
void EmitCallsiteMetadata(const InstructionSource &source, intptr_t deopt_id, UntaggedPcDescriptors::Kind kind, LocationSummary *locs, Environment *env)

◆ GenerateNumberTypeCheck()

void dart::FlowGraphCompiler::GenerateNumberTypeCheck ( Register  kClassIdReg,
const AbstractType type,
compiler::Label is_instance_lbl,
compiler::Label is_not_instance_lbl 
)

Definition at line 1581 of file flow_graph_compiler.cc.

1585 {
1586 assembler()->Comment("NumberTypeCheck");
1587 GrowableArray<intptr_t> args;
1588 if (type.IsNumberType()) {
1589 args.Add(kDoubleCid);
1590 args.Add(kMintCid);
1591 } else if (type.IsIntType()) {
1592 args.Add(kMintCid);
1593 } else if (type.IsDoubleType()) {
1594 args.Add(kDoubleCid);
1595 }
1596 CheckClassIds(class_id_reg, args, is_instance_lbl, is_not_instance_lbl);
1597}
G_BEGIN_DECLS G_MODULE_EXPORT FlValue * args

◆ GeneratePatchableCall()

void dart::FlowGraphCompiler::GeneratePatchableCall ( const InstructionSource source,
const Code stub,
UntaggedPcDescriptors::Kind  kind,
LocationSummary locs,
ObjectPool::SnapshotBehavior  snapshot_behavior = compiler::ObjectPoolBuilderEntry::kSnapshotable 
)

◆ GenerateStaticCall()

void dart::FlowGraphCompiler::GenerateStaticCall ( intptr_t  deopt_id,
const InstructionSource source,
const Function function,
ArgumentsInfo  args_info,
LocationSummary locs,
const ICData ic_data_in,
ICData::RebindRule  rebind_rule,
Code::EntryKind  entry_kind = Code::EntryKind::kNormal 
)

Definition at line 1541 of file flow_graph_compiler.cc.

1548 {
1549 const ICData& ic_data = ICData::ZoneHandle(ic_data_in.Original());
1550 const Array& arguments_descriptor = Array::ZoneHandle(
1551 zone(), ic_data.IsNull() ? args_info.ToArgumentsDescriptor()
1552 : ic_data.arguments_descriptor());
1553 ASSERT(ArgumentsDescriptor(arguments_descriptor).TypeArgsLen() ==
1554 args_info.type_args_len);
1555 ASSERT(ArgumentsDescriptor(arguments_descriptor).Count() ==
1556 args_info.count_without_type_args);
1557 ASSERT(ArgumentsDescriptor(arguments_descriptor).Size() ==
1558 args_info.size_without_type_args);
1559 // Force-optimized functions lack the deopt info which allows patching of
1560 // optimized static calls.
1561 if (is_optimizing() && (!ForcedOptimization() || FLAG_precompiled_mode)) {
1562 EmitOptimizedStaticCall(function, arguments_descriptor,
1563 args_info.size_with_type_args, deopt_id, source,
1564 locs, entry_kind);
1565 } else {
1566 ICData& call_ic_data = ICData::ZoneHandle(zone(), ic_data.ptr());
1567 if (call_ic_data.IsNull()) {
1568 const intptr_t kNumArgsChecked = 0;
1569 call_ic_data =
1570 GetOrAddStaticCallICData(deopt_id, function, arguments_descriptor,
1571 kNumArgsChecked, rebind_rule)
1572 ->ptr();
1573 call_ic_data = call_ic_data.Original();
1574 }
1575 AddCurrentDescriptor(UntaggedPcDescriptors::kRewind, deopt_id, source);
1576 EmitUnoptimizedStaticCall(args_info.size_with_type_args, deopt_id, source,
1577 locs, call_ic_data, entry_kind);
1578 }
1579}
void EmitOptimizedStaticCall(const Function &function, const Array &arguments_descriptor, intptr_t size_with_type_args, intptr_t deopt_id, const InstructionSource &source, LocationSummary *locs, Code::EntryKind entry_kind=Code::EntryKind::kNormal)
const ICData * GetOrAddStaticCallICData(intptr_t deopt_id, const Function &target, const Array &arguments_descriptor, intptr_t num_args_tested, ICData::RebindRule rebind_rule)
TSize< Scalar > Size
Definition size.h:137

◆ GenerateStaticDartCall()

void dart::FlowGraphCompiler::GenerateStaticDartCall ( intptr_t  deopt_id,
const InstructionSource source,
UntaggedPcDescriptors::Kind  kind,
LocationSummary locs,
const Function target,
Code::EntryKind  entry_kind = Code::EntryKind::kNormal 
)

◆ GenerateStringTypeCheck()

void dart::FlowGraphCompiler::GenerateStringTypeCheck ( Register  kClassIdReg,
compiler::Label is_instance_lbl,
compiler::Label is_not_instance_lbl 
)

Definition at line 1599 of file flow_graph_compiler.cc.

1602 {
1603 assembler()->Comment("StringTypeCheck");
1604 GrowableArray<intptr_t> args;
1605 args.Add(kOneByteStringCid);
1606 args.Add(kTwoByteStringCid);
1607 CheckClassIds(class_id_reg, args, is_instance_lbl, is_not_instance_lbl);
1608}

◆ GenerateStubCall()

void dart::FlowGraphCompiler::GenerateStubCall ( const InstructionSource source,
const Code stub,
UntaggedPcDescriptors::Kind  kind,
LocationSummary locs,
intptr_t  deopt_id,
Environment env 
)

Definition at line 1464 of file flow_graph_compiler.cc.

1469 {
1470 ASSERT(FLAG_precompiled_mode ||
1471 (deopt_id != DeoptId::kNone && (!is_optimizing() || env != nullptr)));
1472 EmitCallToStub(stub);
1473 EmitCallsiteMetadata(source, deopt_id, kind, locs, env);
1474}

◆ GenerateSubtypeRangeCheck()

bool dart::FlowGraphCompiler::GenerateSubtypeRangeCheck ( Register  class_id_reg,
const Class type_class,
compiler::Label is_subtype_lbl 
)

Definition at line 2233 of file flow_graph_compiler.cc.

2235 {
2236 HierarchyInfo* hi = Thread::Current()->hierarchy_info();
2237 if (hi != nullptr) {
2238 const CidRangeVector& ranges =
2239 hi->SubtypeRangesForClass(type_class,
2240 /*include_abstract=*/false,
2241 /*exclude_null=*/false);
2242 if (ranges.length() <= kMaxNumberOfCidRangesToTest) {
2243 GenerateCidRangesCheck(assembler(), class_id_reg, ranges, is_subtype);
2244 return true;
2245 }
2246 }
2247
2248 // We don't have cid-ranges for subclasses, so we'll just test against the
2249 // class directly if it's non-abstract.
2250 if (!type_class.is_abstract()) {
2251 __ CompareImmediate(class_id_reg, type_class.id());
2252 __ BranchIf(EQUAL, is_subtype);
2253 }
2254 return false;
2255}
const CidRangeVector & SubtypeRangesForClass(const Class &klass, bool include_abstract, bool exclude_null)
Definition il.cc:110
HierarchyInfo * hierarchy_info() const
Definition thread.h:588

◆ GenerateTTSCall()

void dart::FlowGraphCompiler::GenerateTTSCall ( const InstructionSource source,
intptr_t  deopt_id,
Environment env,
Register  reg_with_type,
const AbstractType dst_type,
const String dst_name,
LocationSummary locs 
)

Definition at line 2844 of file flow_graph_compiler.cc.

2850 {
2851 ASSERT(!dst_name.IsNull());
2852 // We use 2 consecutive entries in the pool for the subtype cache and the
2853 // destination name. The second entry, namely [dst_name] seems to be unused,
2854 // but it will be used by the code throwing a TypeError if the type test fails
2855 // (see runtime/vm/runtime_entry.cc:TypeCheck). It will use pattern matching
2856 // on the call site to find out at which pool index the destination name is
2857 // located.
2858 const intptr_t sub_type_cache_index = __ object_pool_builder().AddObject(
2859 Object::null_object(), compiler::ObjectPoolBuilderEntry::kPatchable);
2860 const intptr_t dst_name_index = __ object_pool_builder().AddObject(
2862 ASSERT((sub_type_cache_index + 1) == dst_name_index);
2863 ASSERT(__ constant_pool_allowed());
2864
2865 __ Comment("TTSCall");
2866 // If the dst_type is known at compile time and instantiated, we know the
2867 // target TTS stub and so can use a PC-relative call when available.
2868 if (!dst_type.IsNull() && dst_type.IsInstantiated() &&
2869 CanPcRelativeCall(dst_type)) {
2870 __ LoadWordFromPoolIndex(TypeTestABI::kSubtypeTestCacheReg,
2871 sub_type_cache_index);
2872 __ GenerateUnRelocatedPcRelativeCall();
2873 AddPcRelativeTTSCallTypeTarget(dst_type);
2874 } else {
2875 GenerateIndirectTTSCall(assembler(), reg_with_type, sub_type_cache_index);
2876 }
2877
2878 EmitCallsiteMetadata(source, deopt_id, UntaggedPcDescriptors::kOther, locs,
2879 env);
2880}
static void GenerateIndirectTTSCall(compiler::Assembler *assembler, Register reg_with_type, intptr_t sub_type_cache_index)

◆ GetJumpLabel()

compiler::Label * dart::FlowGraphCompiler::GetJumpLabel ( BlockEntryInstr block_entry) const

Definition at line 832 of file flow_graph_compiler.cc.

833 {
834 const intptr_t block_index = block_entry->postorder_number();
835 return block_info_[block_index]->jump_label();
836}

◆ GetOrAddInstanceCallICData()

const ICData * dart::FlowGraphCompiler::GetOrAddInstanceCallICData ( intptr_t  deopt_id,
const String target_name,
const Array arguments_descriptor,
intptr_t  num_args_tested,
const AbstractType receiver_type,
const Function binary_smi_target 
)

Definition at line 1867 of file flow_graph_compiler.cc.

1873 {
1874 if ((deopt_id_to_ic_data_ != nullptr) &&
1875 ((*deopt_id_to_ic_data_)[deopt_id] != nullptr)) {
1876 const ICData* res = (*deopt_id_to_ic_data_)[deopt_id];
1877 ASSERT(res->deopt_id() == deopt_id);
1878 ASSERT(res->target_name() == target_name.ptr());
1879 ASSERT(res->NumArgsTested() == num_args_tested);
1880 ASSERT(res->TypeArgsLen() ==
1881 ArgumentsDescriptor(arguments_descriptor).TypeArgsLen());
1882 ASSERT(!res->is_static_call());
1883 ASSERT(res->receivers_static_type() == receiver_type.ptr());
1884 return res;
1885 }
1886
1887 auto& ic_data = ICData::ZoneHandle(zone());
1888 if (!binary_smi_target.IsNull()) {
1889 ASSERT(num_args_tested == 2);
1890 ASSERT(!binary_smi_target.IsNull());
1891 GrowableArray<intptr_t> cids(num_args_tested);
1892 cids.Add(kSmiCid);
1893 cids.Add(kSmiCid);
1894 ic_data = ICData::NewWithCheck(parsed_function().function(), target_name,
1895 arguments_descriptor, deopt_id,
1896 num_args_tested, ICData::kInstance, &cids,
1897 binary_smi_target, receiver_type);
1898 } else {
1899 ic_data = ICData::New(parsed_function().function(), target_name,
1900 arguments_descriptor, deopt_id, num_args_tested,
1901 ICData::kInstance, receiver_type);
1902 }
1903
1904 if (deopt_id_to_ic_data_ != nullptr) {
1905 (*deopt_id_to_ic_data_)[deopt_id] = &ic_data;
1906 }
1907 ASSERT(!ic_data.is_static_call());
1908 return &ic_data;
1909}
static ICDataPtr NewWithCheck(const Function &owner, const String &target_name, const Array &arguments_descriptor, intptr_t deopt_id, intptr_t num_args_tested, RebindRule rebind_rule, GrowableArray< intptr_t > *cids, const Function &target, const AbstractType &receiver_type=Object::null_abstract_type())
Definition object.cc:17407

◆ GetOrAddStaticCallICData()

const ICData * dart::FlowGraphCompiler::GetOrAddStaticCallICData ( intptr_t  deopt_id,
const Function target,
const Array arguments_descriptor,
intptr_t  num_args_tested,
ICData::RebindRule  rebind_rule 
)

Definition at line 1911 of file flow_graph_compiler.cc.

1916 {
1917 if ((deopt_id_to_ic_data_ != nullptr) &&
1918 ((*deopt_id_to_ic_data_)[deopt_id] != nullptr)) {
1919 const ICData* res = (*deopt_id_to_ic_data_)[deopt_id];
1920 ASSERT(res->deopt_id() == deopt_id);
1921 ASSERT(res->target_name() == target.name());
1922 ASSERT(res->NumArgsTested() == num_args_tested);
1923 ASSERT(res->TypeArgsLen() ==
1924 ArgumentsDescriptor(arguments_descriptor).TypeArgsLen());
1925 ASSERT(res->is_static_call());
1926 return res;
1927 }
1928
1929 const auto& ic_data = ICData::ZoneHandle(
1931 arguments_descriptor, deopt_id,
1932 num_args_tested, rebind_rule));
1933 if (deopt_id_to_ic_data_ != nullptr) {
1934 (*deopt_id_to_ic_data_)[deopt_id] = &ic_data;
1935 }
1936 return &ic_data;
1937}
static ICDataPtr NewForStaticCall(const Function &owner, const Function &target, const Array &arguments_descriptor, intptr_t deopt_id, intptr_t num_args_tested, RebindRule rebind_rule)
Definition object.cc:17448

◆ InitCompiler()

void dart::FlowGraphCompiler::InitCompiler ( )

Definition at line 210 of file flow_graph_compiler.cc.

210 {
211 compressed_stackmaps_builder_ =
212 new (zone()) CompressedStackMapsBuilder(zone());
213 pc_descriptors_list_ = new (zone()) DescriptorList(
214 zone(), &code_source_map_builder_->inline_id_to_function());
215 exception_handlers_list_ =
216 new (zone()) ExceptionHandlerList(parsed_function().function());
217#if defined(DART_PRECOMPILER)
218 catch_entry_moves_maps_builder_ = new (zone()) CatchEntryMovesMapBuilder();
219#endif
220 block_info_.Clear();
221 // Initialize block info and search optimized (non-OSR) code for calls
222 // indicating a non-leaf routine and calls without IC data indicating
223 // possible reoptimization.
224
225 for (int i = 0; i < block_order_.length(); ++i) {
226 block_info_.Add(new (zone()) BlockInfo());
228 BlockEntryInstr* entry = block_order_[i];
229 for (ForwardInstructionIterator it(entry); !it.Done(); it.Advance()) {
230 Instruction* current = it.Current();
231 if (auto* branch = current->AsBranch()) {
232 current = branch->comparison();
233 }
234 if (auto* instance_call = current->AsInstanceCall()) {
235 const ICData* ic_data = instance_call->ic_data();
236 if ((ic_data == nullptr) || (ic_data->NumberOfUsedChecks() == 0)) {
237 may_reoptimize_ = true;
238 }
239 }
240 }
241 }
242 }
243
244 if (!is_optimizing() && FLAG_reorder_basic_blocks) {
245 // Initialize edge counter array.
246 const intptr_t num_counters = flow_graph_.preorder().length();
247 const Array& edge_counters =
248 Array::Handle(Array::New(num_counters, Heap::kOld));
249 for (intptr_t i = 0; i < num_counters; ++i) {
250 edge_counters.SetAt(i, Object::smi_zero());
251 }
252 edge_counters_array_ = edge_counters.ptr();
253 }
254}
const GrowableArray< const Function * > & inline_id_to_function() const
bool IsCompiledForOsr() const
Definition flow_graph.h:460
const GrowableArray< BlockEntryInstr * > & preorder() const
Definition flow_graph.h:203

◆ InliningIdToFunction()

ArrayPtr dart::FlowGraphCompiler::InliningIdToFunction ( ) const

◆ InsertBSSRelocation()

void dart::FlowGraphCompiler::InsertBSSRelocation ( BSS::Relocation  reloc)

Definition at line 269 of file flow_graph_compiler.cc.

269 {
270 const intptr_t offset = assembler()->InsertAlignedRelocation(reloc);
271 AddDescriptor(UntaggedPcDescriptors::kBSSRelocation, /*pc_offset=*/offset,
272 /*deopt_id=*/DeoptId::kNone, InstructionSource(),
273 /*try_index=*/-1);
274}
intptr_t InsertAlignedRelocation(BSS::Relocation reloc)

◆ int32x4_class()

const Class & dart::FlowGraphCompiler::int32x4_class ( ) const
inline

Definition at line 849 of file flow_graph_compiler.h.

849{ return int32x4_class_; }

◆ intrinsic_mode()

bool dart::FlowGraphCompiler::intrinsic_mode ( ) const
inline

Definition at line 444 of file flow_graph_compiler.h.

444{ return intrinsic_mode_; }

◆ intrinsic_slow_path_label()

compiler::Label * dart::FlowGraphCompiler::intrinsic_slow_path_label ( ) const
inline

Definition at line 454 of file flow_graph_compiler.h.

454 {
455 ASSERT(intrinsic_slow_path_label_ != nullptr);
456 return intrinsic_slow_path_label_;
457 }

◆ is_optimizing()

bool dart::FlowGraphCompiler::is_optimizing ( ) const
inline

Definition at line 428 of file flow_graph_compiler.h.

428{ return is_optimizing_; }

◆ IsEmptyBlock()

bool dart::FlowGraphCompiler::IsEmptyBlock ( BlockEntryInstr block) const

Definition at line 299 of file flow_graph_compiler.cc.

299 {
300 // Entry-points cannot be merged because they must have assembly
301 // prologue emitted which should not be included in any block they jump to.
302 return !block->IsGraphEntry() && !block->IsFunctionEntry() &&
303 !block->IsCatchBlockEntry() && !block->IsOsrEntry() &&
304 !block->IsIndirectEntry() && !block->HasNonRedundantParallelMove() &&
305 block->next()->IsGoto() &&
306 !block->next()->AsGoto()->HasNonRedundantParallelMove();
307}

◆ isolate_group()

IsolateGroup * dart::FlowGraphCompiler::isolate_group ( ) const
inline

Definition at line 914 of file flow_graph_compiler.h.

914{ return thread_->isolate_group(); }

◆ LoadBSSEntry()

void dart::FlowGraphCompiler::LoadBSSEntry ( BSS::Relocation  relocation,
Register  dst,
Register  tmp 
)

◆ LookupMethodFor()

bool dart::FlowGraphCompiler::LookupMethodFor ( int  class_id,
const String name,
const ArgumentsDescriptor args_desc,
Function fn_return,
bool *  class_is_abstract_return = nullptr 
)
static

Definition at line 2021 of file flow_graph_compiler.cc.

2025 {
2026 auto thread = Thread::Current();
2027 auto zone = thread->zone();
2028 auto class_table = thread->isolate_group()->class_table();
2029 if (class_id < 0) return false;
2030 if (class_id >= class_table->NumCids()) return false;
2031
2032 ClassPtr raw_class = class_table->At(class_id);
2033 if (raw_class == nullptr) return false;
2034 Class& cls = Class::Handle(zone, raw_class);
2035 if (cls.IsNull()) return false;
2036 if (!cls.is_finalized()) return false;
2037 if (Array::Handle(cls.current_functions()).IsNull()) return false;
2038
2039 if (class_is_abstract_return != nullptr) {
2040 *class_is_abstract_return = cls.is_abstract();
2041 }
2042 const bool allow_add = false;
2043 Function& target_function =
2045 cls, name, args_desc, allow_add));
2046 if (target_function.IsNull()) return false;
2047 *fn_return = target_function.ptr();
2048 return true;
2049}
ClassPtr At(intptr_t cid) const
ClassTable * class_table() const
Definition isolate.h:491
static FunctionPtr ResolveDynamicForReceiverClass(const Class &receiver_class, const String &function_name, const ArgumentsDescriptor &args_desc, bool allow_add=true)
Definition resolver.cc:160
Zone * zone() const

◆ may_reoptimize()

bool dart::FlowGraphCompiler::may_reoptimize ( ) const
inline

Definition at line 885 of file flow_graph_compiler.h.

885{ return may_reoptimize_; }

◆ mint_class()

const Class & dart::FlowGraphCompiler::mint_class ( ) const
inline

Definition at line 846 of file flow_graph_compiler.h.

846{ return mint_class_; }

◆ NeedsEdgeCounter()

bool dart::FlowGraphCompiler::NeedsEdgeCounter ( BlockEntryInstr block)

Definition at line 1630 of file flow_graph_compiler.cc.

1630 {
1631 // Only emit an edge counter if there is not goto at the end of the block,
1632 // except for the entry block.
1633 return FLAG_reorder_basic_blocks &&
1634 (!block->last_instruction()->IsGoto() || block->IsFunctionEntry());
1635}

◆ NextNonEmptyLabel()

compiler::Label * dart::FlowGraphCompiler::NextNonEmptyLabel ( ) const

Definition at line 843 of file flow_graph_compiler.cc.

843 {
844 const intptr_t current_index = current_block()->postorder_number();
845 return block_info_[current_index]->next_nonempty_label();
846}
intptr_t postorder_number() const
Definition il.h:1652
BlockEntryInstr * current_block() const

◆ parsed_function()

const ParsedFunction & dart::FlowGraphCompiler::parsed_function ( ) const
inline

Definition at line 401 of file flow_graph_compiler.h.

401{ return parsed_function_; }

◆ RecordCatchEntryMoves()

void dart::FlowGraphCompiler::RecordCatchEntryMoves ( Environment env)

Definition at line 427 of file flow_graph_compiler.cc.

427 {
428#if defined(DART_PRECOMPILER)
429 const intptr_t try_index = CurrentTryIndex();
430 if (is_optimizing() && env != nullptr && (try_index != kInvalidTryIndex)) {
431 env = env->Outermost();
432 CatchBlockEntryInstr* catch_block =
433 flow_graph().graph_entry()->GetCatchEntry(try_index);
434 const GrowableArray<Definition*>* idefs =
435 catch_block->initial_definitions();
436 catch_entry_moves_maps_builder_->NewMapping(assembler()->CodeSize());
437
438 for (intptr_t i = 0; i < flow_graph().variable_count(); ++i) {
439 // Don't sync captured parameters. They are not in the environment.
440 if (flow_graph().captured_parameters()->Contains(i)) continue;
441 auto param = (*idefs)[i]->AsParameter();
442
443 // Don't sync values that have been replaced with constants.
444 if (param == nullptr) continue;
445 RELEASE_ASSERT(param->env_index() == i);
446 Location dst = param->location();
447
448 // Don't sync exception or stack trace variables.
449 if (dst.IsRegister()) continue;
450
451 Location src = env->LocationAt(i);
452 // Can only occur if AllocationSinking is enabled - and it is disabled
453 // in functions with try.
454 ASSERT(!src.IsInvalid());
455 const Representation src_type =
456 env->ValueAt(i)->definition()->representation();
457 const auto move = CatchEntryMoveFor(assembler(), src_type, src,
458 LocationToStackIndex(dst));
459 if (!move.IsRedundant()) {
460 catch_entry_moves_maps_builder_->Append(move);
461 }
462 }
463
464 catch_entry_moves_maps_builder_->EndMapping();
465 }
466#endif // defined(DART_PRECOMPILER)
467}
bool Contains(intptr_t i) const
Definition bit_vector.h:91
GrowableArray< Definition * > * initial_definitions()
Definition il.h:1911
void NewMapping(intptr_t pc_offset)
void Append(const CatchEntryMove &move)
BitVector * captured_parameters() const
Definition flow_graph.h:462
intptr_t variable_count() const
Definition flow_graph.h:143
CatchBlockEntryInstr * GetCatchEntry(intptr_t index)
Definition il.cc:1246

◆ RecordSafepoint()

void dart::FlowGraphCompiler::RecordSafepoint ( LocationSummary locs,
intptr_t  slow_path_argument_count = 0 
)

Definition at line 1047 of file flow_graph_compiler.cc.

1048 {
1049 if (is_optimizing() || locs->live_registers()->HasUntaggedValues()) {
1050 const intptr_t spill_area_size =
1051 is_optimizing() ? flow_graph_.graph_entry()->spill_slot_count() : 0;
1052
1053 RegisterSet* registers = locs->live_registers();
1054 ASSERT(registers != nullptr);
1055 const intptr_t kFpuRegisterSpillFactor =
1056 kFpuRegisterSize / compiler::target::kWordSize;
1057 const bool using_shared_stub = locs->call_on_shared_slow_path();
1058
1059 BitmapBuilder bitmap(locs->stack_bitmap());
1060
1061 // Expand the bitmap to cover the whole area reserved for spill slots.
1062 // (register allocator takes care of marking slots containing live tagged
1063 // values but it does not do the same for other slots so length might be
1064 // below spill_area_size at this point).
1065 RELEASE_ASSERT(bitmap.Length() <= spill_area_size);
1066 bitmap.SetLength(spill_area_size);
1067
1068 auto instr = current_instruction();
1069 const intptr_t args_count = instr->ArgumentCount();
1070 RELEASE_ASSERT(args_count == 0 || is_optimizing());
1071
1072 for (intptr_t i = 0; i < args_count; i++) {
1073 const auto move_arg =
1074 instr->ArgumentValueAt(i)->instruction()->AsMoveArgument();
1075 const auto rep = move_arg->representation();
1076 if (move_arg->is_register_move()) {
1077 continue;
1078 }
1079
1080 ASSERT(rep == kTagged || rep == kUnboxedInt64 || rep == kUnboxedDouble);
1081 static_assert(compiler::target::kIntSpillFactor ==
1082 compiler::target::kDoubleSpillFactor,
1083 "int and double are of the same size");
1084 const bool is_tagged = move_arg->representation() == kTagged;
1085 const intptr_t num_bits =
1086 is_tagged ? 1 : compiler::target::kIntSpillFactor;
1087
1088 // Note: bits are reversed so higher bit corresponds to lower word.
1089 const intptr_t last_arg_bit =
1090 (spill_area_size - 1) - move_arg->sp_relative_index();
1091 bitmap.SetRange(last_arg_bit - (num_bits - 1), last_arg_bit, is_tagged);
1092 }
1093 ASSERT(slow_path_argument_count == 0 || !using_shared_stub);
1094 RELEASE_ASSERT(bitmap.Length() == spill_area_size);
1095
1096 // Trim the fully tagged suffix. Stack walking assumes that everything
1097 // not included into the stack map is tagged.
1098 intptr_t spill_area_bits = bitmap.Length();
1099 while (spill_area_bits > 0) {
1100 if (!bitmap.Get(spill_area_bits - 1)) {
1101 break;
1102 }
1103 spill_area_bits--;
1104 }
1105 bitmap.SetLength(spill_area_bits);
1106
1107 // Mark the bits in the stack map in the same order we push registers in
1108 // slow path code (see FlowGraphCompiler::SaveLiveRegisters).
1109 //
1110 // Slow path code can have registers at the safepoint.
1111 if (!locs->always_calls() && !using_shared_stub) {
1112 RegisterSet* regs = locs->live_registers();
1113 if (regs->FpuRegisterCount() > 0) {
1114 // Denote FPU registers with 0 bits in the stackmap. Based on the
1115 // assumption that there are normally few live FPU registers, this
1116 // encoding is simpler and roughly as compact as storing a separate
1117 // count of FPU registers.
1118 //
1119 // FPU registers have the highest register number at the highest
1120 // address (i.e., first in the stackmap).
1121 for (intptr_t i = kNumberOfFpuRegisters - 1; i >= 0; --i) {
1122 FpuRegister reg = static_cast<FpuRegister>(i);
1123 if (regs->ContainsFpuRegister(reg)) {
1124 for (intptr_t j = 0; j < kFpuRegisterSpillFactor; ++j) {
1125 bitmap.Set(bitmap.Length(), false);
1126 }
1127 }
1128 }
1129 }
1130
1131 // General purpose registers have the highest register number at the
1132 // highest address (i.e., first in the stackmap).
1133 for (intptr_t i = kNumberOfCpuRegisters - 1; i >= 0; --i) {
1134 Register reg = static_cast<Register>(i);
1135 if (locs->live_registers()->ContainsRegister(reg)) {
1136 bitmap.Set(bitmap.Length(), locs->live_registers()->IsTagged(reg));
1137 }
1138 }
1139 }
1140
1141 if (using_shared_stub) {
1142 // To simplify the code in the shared stub, we create an untagged hole
1143 // in the stack frame where the shared stub can leave the return address
1144 // before saving registers.
1145 bitmap.Set(bitmap.Length(), false);
1146 if (registers->FpuRegisterCount() > 0) {
1147 bitmap.SetRange(bitmap.Length(),
1148 bitmap.Length() +
1149 kNumberOfFpuRegisters * kFpuRegisterSpillFactor - 1,
1150 false);
1151 }
1152 for (intptr_t i = kNumberOfCpuRegisters - 1; i >= 0; --i) {
1153 if ((kReservedCpuRegisters & (1 << i)) != 0) continue;
1154 const Register reg = static_cast<Register>(i);
1155 bitmap.Set(bitmap.Length(),
1156 locs->live_registers()->ContainsRegister(reg) &&
1157 locs->live_registers()->IsTagged(reg));
1158 }
1159 }
1160
1161 // Arguments pushed after live registers in the slow path are tagged.
1162 for (intptr_t i = 0; i < slow_path_argument_count; ++i) {
1163 bitmap.Set(bitmap.Length(), true);
1164 }
1165
1166 compressed_stackmaps_builder_->AddEntry(assembler()->CodeSize(), &bitmap,
1167 spill_area_bits);
1168 }
1169}
void AddEntry(intptr_t pc_offset, BitmapBuilder *bitmap, intptr_t spill_slot_bit_count)
Instruction * current_instruction() const
intptr_t spill_slot_count() const
Definition il.h:1968
const RegList kReservedCpuRegisters
@ kNumberOfCpuRegisters
const int kNumberOfFpuRegisters
QRegister FpuRegister
const int kFpuRegisterSize

◆ ResolveCallTargetsForReceiverCid()

const CallTargets * dart::FlowGraphCompiler::ResolveCallTargetsForReceiverCid ( intptr_t  cid,
const String selector,
const Array args_desc_array 
)
static

Definition at line 2003 of file flow_graph_compiler.cc.

2006 {
2007 Zone* zone = Thread::Current()->zone();
2008
2009 ArgumentsDescriptor args_desc(args_desc_array);
2010
2011 Function& fn = Function::ZoneHandle(zone);
2012 if (!LookupMethodFor(cid, selector, args_desc, &fn)) return nullptr;
2013
2014 CallTargets* targets = new (zone) CallTargets(zone);
2015 targets->Add(new (zone) TargetInfo(cid, cid, &fn, /* count = */ 1,
2017
2018 return targets;
2019}
static bool LookupMethodFor(int class_id, const String &name, const ArgumentsDescriptor &args_desc, Function *fn_return, bool *class_is_abstract_return=nullptr)
static StaticTypeExactnessState NotTracking()
const intptr_t cid

◆ RestoreLiveRegisters()

void dart::FlowGraphCompiler::RestoreLiveRegisters ( LocationSummary locs)

◆ SaveLiveRegisters()

void dart::FlowGraphCompiler::SaveLiveRegisters ( LocationSummary locs)

◆ set_current_block()

void dart::FlowGraphCompiler::set_current_block ( BlockEntryInstr value)
inline

Definition at line 421 of file flow_graph_compiler.h.

421{ current_block_ = value; }
uint8_t value

◆ set_intrinsic_flow_graph()

void dart::FlowGraphCompiler::set_intrinsic_flow_graph ( const FlowGraph flow_graph)
inline

Definition at line 446 of file flow_graph_compiler.h.

446 {
447 intrinsic_flow_graph_ = &flow_graph;
448 }

◆ set_intrinsic_slow_path_label()

void dart::FlowGraphCompiler::set_intrinsic_slow_path_label ( compiler::Label label)
inline

Definition at line 450 of file flow_graph_compiler.h.

450 {
451 ASSERT(intrinsic_slow_path_label_ == nullptr || label == nullptr);
452 intrinsic_slow_path_label_ = label;
453 }

◆ SetNeedsStackTrace()

void dart::FlowGraphCompiler::SetNeedsStackTrace ( intptr_t  try_index)

Definition at line 913 of file flow_graph_compiler.cc.

913 {
914 exception_handlers_list_->SetNeedsStackTrace(try_index);
915}
void SetNeedsStackTrace(intptr_t try_index)

◆ skip_body_compilation()

bool dart::FlowGraphCompiler::skip_body_compilation ( ) const
inline

Definition at line 438 of file flow_graph_compiler.h.

438 {
439 return fully_intrinsified_ && is_optimizing();
440 }

◆ SlowPathEnvironmentFor() [1/2]

Environment * dart::FlowGraphCompiler::SlowPathEnvironmentFor ( Environment env,
LocationSummary locs,
intptr_t  num_slow_path_args 
)

Definition at line 1177 of file flow_graph_compiler.cc.

1180 {
1181 const bool using_shared_stub = locs->call_on_shared_slow_path();
1182 const bool shared_stub_save_fpu_registers =
1183 using_shared_stub && locs->live_registers()->FpuRegisterCount() > 0;
1184 // TODO(sjindel): Modify logic below to account for slow-path args with shared
1185 // stubs.
1186 ASSERT(!using_shared_stub || num_slow_path_args == 0);
1187 if (env == nullptr) {
1188 // In AOT, environments can be removed by EliminateEnvironments pass
1189 // (if not in a try block).
1190 ASSERT(!is_optimizing() || FLAG_precompiled_mode);
1191 return nullptr;
1192 }
1193
1194 Environment* slow_path_env =
1195 env->DeepCopy(zone(), env->Length() - env->LazyDeoptPruneCount());
1196 // 1. Iterate the registers in the order they will be spilled to compute
1197 // the slots they will be spilled to.
1198 intptr_t next_slot = StackSize() + slow_path_env->CountArgsPushed();
1199 if (using_shared_stub) {
1200 // The PC from the call to the shared stub is pushed here.
1201 next_slot++;
1202 }
1203 RegisterSet* regs = locs->live_registers();
1204 intptr_t fpu_reg_slots[kNumberOfFpuRegisters];
1205 intptr_t cpu_reg_slots[kNumberOfCpuRegisters];
1206 const intptr_t kFpuRegisterSpillFactor =
1207 kFpuRegisterSize / compiler::target::kWordSize;
1208 // FPU registers are spilled first from highest to lowest register number.
1209 for (intptr_t i = kNumberOfFpuRegisters - 1; i >= 0; --i) {
1210 FpuRegister reg = static_cast<FpuRegister>(i);
1211 if (regs->ContainsFpuRegister(reg)) {
1212 // We use the lowest address (thus highest index) to identify a
1213 // multi-word spill slot.
1214 next_slot += kFpuRegisterSpillFactor;
1215 fpu_reg_slots[i] = (next_slot - 1);
1216 } else {
1217 if (using_shared_stub && shared_stub_save_fpu_registers) {
1218 next_slot += kFpuRegisterSpillFactor;
1219 }
1220 fpu_reg_slots[i] = -1;
1221 }
1222 }
1223 // General purpose registers are spilled from highest to lowest register
1224 // number.
1225 for (intptr_t i = kNumberOfCpuRegisters - 1; i >= 0; --i) {
1226 if ((kReservedCpuRegisters & (1 << i)) != 0) continue;
1227 Register reg = static_cast<Register>(i);
1228 if (regs->ContainsRegister(reg)) {
1229 cpu_reg_slots[i] = next_slot++;
1230 } else {
1231 if (using_shared_stub) next_slot++;
1232 cpu_reg_slots[i] = -1;
1233 }
1234 }
1235
1236 // 2. Iterate the environment and replace register locations with the
1237 // corresponding spill slot locations.
1238 for (Environment::DeepIterator it(slow_path_env); !it.Done(); it.Advance()) {
1239 Location loc = it.CurrentLocation();
1240 Value* value = it.CurrentValue();
1241 it.SetCurrentLocation(LocationRemapForSlowPath(
1242 loc, value->definition(), cpu_reg_slots, fpu_reg_slots));
1243 }
1244
1245 return slow_path_env;
1246}
Location LocationRemapForSlowPath(Location loc, Definition *def, intptr_t *cpu_reg_slots, intptr_t *fpu_reg_slots)
Definition locations.cc:492

◆ SlowPathEnvironmentFor() [2/2]

Environment * dart::FlowGraphCompiler::SlowPathEnvironmentFor ( Instruction inst,
intptr_t  num_slow_path_args 
)
inline

Definition at line 861 of file flow_graph_compiler.h.

862 {
863 if (inst->env() == nullptr && is_optimizing()) {
864 if (pending_deoptimization_env_ == nullptr) {
865 return nullptr;
866 }
867 return SlowPathEnvironmentFor(pending_deoptimization_env_, inst->locs(),
868 num_slow_path_args);
869 }
870 return SlowPathEnvironmentFor(inst->env(), inst->locs(),
871 num_slow_path_args);
872 }
Environment * SlowPathEnvironmentFor(Instruction *inst, intptr_t num_slow_path_args)

◆ SpecialStatsBegin()

void dart::FlowGraphCompiler::SpecialStatsBegin ( intptr_t  tag)
inline

Definition at line 471 of file flow_graph_compiler.h.

471 {
472 if (stats_ != nullptr) stats_->SpecialBegin(tag);
473 }
void SpecialBegin(intptr_t tag)

◆ SpecialStatsEnd()

void dart::FlowGraphCompiler::SpecialStatsEnd ( intptr_t  tag)
inline

Definition at line 475 of file flow_graph_compiler.h.

475 {
476 if (stats_ != nullptr) stats_->SpecialEnd(tag);
477 }
void SpecialEnd(intptr_t tag)

◆ StackSize()

intptr_t dart::FlowGraphCompiler::StackSize ( ) const

Definition at line 816 of file flow_graph_compiler.cc.

816 {
817 if (is_optimizing_) {
818 return flow_graph_.graph_entry()->spill_slot_count();
819 } else {
820 return parsed_function_.num_stack_locals();
821 }
822}
int num_stack_locals() const
Definition parser.h:194

◆ StatsBegin()

void dart::FlowGraphCompiler::StatsBegin ( Instruction instr)
inline

Definition at line 463 of file flow_graph_compiler.h.

463 {
464 if (stats_ != nullptr) stats_->Begin(instr);
465 }
void Begin(Instruction *instruction)

◆ StatsEnd()

void dart::FlowGraphCompiler::StatsEnd ( Instruction instr)
inline

Definition at line 467 of file flow_graph_compiler.h.

467 {
468 if (stats_ != nullptr) stats_->End(instr);
469 }
void End(Instruction *instruction)

◆ SupportsUnboxedDoubles()

static bool dart::FlowGraphCompiler::SupportsUnboxedDoubles ( )
static

◆ SupportsUnboxedSimd128()

static bool dart::FlowGraphCompiler::SupportsUnboxedSimd128 ( )
static

◆ thread()

Thread * dart::FlowGraphCompiler::thread ( ) const
inline

Definition at line 913 of file flow_graph_compiler.h.

913{ return thread_; }

◆ TryIntrinsify()

bool dart::FlowGraphCompiler::TryIntrinsify ( )

Definition at line 1432 of file flow_graph_compiler.cc.

1432 {
1433 if (TryIntrinsifyHelper()) {
1434 fully_intrinsified_ = true;
1435 return true;
1436 }
1437 return false;
1438}

◆ used_static_fields()

GrowableArray< const Field * > & dart::FlowGraphCompiler::used_static_fields ( )
inline

Definition at line 479 of file flow_graph_compiler.h.

479 {
480 return used_static_fields_;
481 }

◆ VisitBlocks()

void dart::FlowGraphCompiler::VisitBlocks ( )

Definition at line 670 of file flow_graph_compiler.cc.

670 {
671 CompactBlocks();
673 // The loop_info fields were cleared, recompute.
674 flow_graph().ComputeLoops();
675 }
676
677 // In precompiled mode, we require the function entry to come first (after the
678 // graph entry), since the polymorphic check is performed in the function
679 // entry (see Instructions::EntryPoint).
680 if (FLAG_precompiled_mode) {
681 ASSERT(block_order()[1] == flow_graph().graph_entry()->normal_entry());
682 }
683
684#if defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_ARM64)
685 const auto inner_lr_state = ComputeInnerLRState(flow_graph());
686#endif // defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_ARM64)
687
688#if defined(TARGET_ARCH_X64) || defined(TARGET_ARCH_ARM64)
689 const bool should_align_loops =
690 FLAG_align_all_loops || IsMarkedWithAlignLoops(function());
691#endif // defined(TARGET_ARCH_X64) || defined(TARGET_ARCH_ARM64)
692
693 for (intptr_t i = 0; i < block_order().length(); ++i) {
694 // Compile the block entry.
695 BlockEntryInstr* entry = block_order()[i];
696 assembler()->Comment("B%" Pd "", entry->block_id());
697 set_current_block(entry);
698
699 if (WasCompacted(entry)) {
700 continue;
701 }
702
703#if defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_ARM64)
704 // At the start of every non-entry block we expect return address either
705 // to be spilled into the frame or to be in the LR register.
706 if (entry->IsFunctionEntry() || entry->IsNativeEntry()) {
707 assembler()->set_lr_state(compiler::LRState::OnEntry());
708 } else {
709 assembler()->set_lr_state(inner_lr_state);
710 }
711#endif // defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_ARM64)
712
713#if defined(DEBUG)
714 if (!is_optimizing()) {
715 FrameStateClear();
716 }
717#endif
718
720 for (LoopInfo* l = entry->loop_info(); l != nullptr; l = l->outer()) {
721 assembler()->Comment(" Loop %" Pd "", l->id());
722 }
723 if (entry->IsLoopHeader()) {
724 assembler()->Comment(" Loop Header");
725 }
726 }
727
728#if defined(TARGET_ARCH_X64) || defined(TARGET_ARCH_ARM64)
729 if (should_align_loops && entry->IsLoopHeader() &&
733 }
734#else
735 static_assert(kPreferredLoopAlignment == 1);
736#endif // defined(TARGET_ARCH_X64) || defined(TARGET_ARCH_ARM64)
737
738 BeginCodeSourceRange(entry->source());
739 ASSERT(pending_deoptimization_env_ == nullptr);
740 pending_deoptimization_env_ = entry->env();
741 set_current_instruction(entry);
742 StatsBegin(entry);
743 entry->EmitNativeCode(this);
744 StatsEnd(entry);
745 set_current_instruction(nullptr);
746 pending_deoptimization_env_ = nullptr;
747 EndCodeSourceRange(entry->source());
748
749 if (skip_body_compilation()) {
750 ASSERT(entry == flow_graph().graph_entry()->normal_entry());
751 break;
752 }
753
754 // Compile all successors until an exit, branch, or a block entry.
755 for (ForwardInstructionIterator it(entry); !it.Done(); it.Advance()) {
756 Instruction* instr = it.Current();
757 set_current_instruction(instr);
758 StatsBegin(instr);
759 // Unoptimized code always stores boxed values on the expression stack.
760 // However, unboxed representation is allowed for instruction inputs and
761 // outputs of certain types (e.g. for doubles).
762 // Unboxed inputs/outputs are handled in the instruction prologue
763 // and epilogue, but flagged as a mismatch on the IL level.
765 !instr->HasUnmatchedInputRepresentations());
766
767 if (FLAG_code_comments || FLAG_disassemble ||
768 FLAG_disassemble_optimized) {
769 if (FLAG_source_lines) {
770 EmitSourceLine(instr);
771 }
772 EmitComment(instr);
773 }
774
775 BeginCodeSourceRange(instr->source());
776 EmitInstructionPrologue(instr);
777 ASSERT(pending_deoptimization_env_ == nullptr);
778 pending_deoptimization_env_ = instr->env();
779 DEBUG_ONLY(current_instruction_ = instr);
780 instr->EmitNativeCode(this);
781 DEBUG_ONLY(current_instruction_ = nullptr);
782 pending_deoptimization_env_ = nullptr;
783 if (IsPeephole(instr)) {
784 ASSERT(top_of_stack_ == nullptr);
785 top_of_stack_ = instr->AsDefinition();
786 } else {
787 EmitInstructionEpilogue(instr);
788 }
789 EndCodeSourceRange(instr->source());
790
791#if defined(DEBUG)
792 if (!is_optimizing()) {
793 FrameStateUpdateWith(instr);
794 }
795#endif
796 StatsEnd(instr);
797 set_current_instruction(nullptr);
798
799 if (auto indirect_goto = instr->AsIndirectGoto()) {
800 indirect_gotos_.Add(indirect_goto);
801 }
802 }
803
804#if defined(DEBUG)
805 ASSERT(is_optimizing() || FrameStateIsSafeToCall());
806#endif
807 }
808
809 set_current_block(nullptr);
810}
virtual Definition * AsDefinition()
Definition il.h:2665
void StatsBegin(Instruction *instr)
void set_current_block(BlockEntryInstr *value)
bool WasCompacted(BlockEntryInstr *block_entry) const
void EmitComment(Instruction *instr)
const GrowableArray< BlockEntryInstr * > & block_order() const
void StatsEnd(Instruction *instr)
void EndCodeSourceRange(const InstructionSource &source)
void BeginCodeSourceRange(const InstructionSource &source)
void static bool EmittingComments()
void Align(intptr_t alignment, intptr_t offset)
void set_lr_state(compiler::LRState b)
const intptr_t kPreferredLoopAlignment
#define DEBUG_ONLY(code)
Definition globals.h:141

◆ WasCompacted()

bool dart::FlowGraphCompiler::WasCompacted ( BlockEntryInstr block_entry) const

Definition at line 838 of file flow_graph_compiler.cc.

838 {
839 const intptr_t block_index = block_entry->postorder_number();
840 return block_info_[block_index]->WasCompacted();
841}

◆ zone()

Zone * dart::FlowGraphCompiler::zone ( ) const
inline

Definition at line 915 of file flow_graph_compiler.h.

915{ return zone_; }

Friends And Related Symbol Documentation

◆ BoxInt64Instr

friend class BoxInt64Instr
friend

Definition at line 955 of file flow_graph_compiler.h.

◆ CheckNullInstr

friend class CheckNullInstr
friend

Definition at line 956 of file flow_graph_compiler.h.

◆ CheckStackOverflowInstr

friend class CheckStackOverflowInstr
friend

Definition at line 958 of file flow_graph_compiler.h.

◆ CheckStackOverflowSlowPath

friend class CheckStackOverflowSlowPath
friend

Definition at line 961 of file flow_graph_compiler.h.

◆ GraphIntrinsicCodeGenScope

friend class GraphIntrinsicCodeGenScope
friend

Definition at line 962 of file flow_graph_compiler.h.

◆ NullErrorSlowPath

friend class NullErrorSlowPath
friend

Definition at line 957 of file flow_graph_compiler.h.

◆ StoreFieldInstr

friend class StoreFieldInstr
friend

Definition at line 960 of file flow_graph_compiler.h.

◆ StoreIndexedInstr

friend class StoreIndexedInstr
friend

Definition at line 959 of file flow_graph_compiler.h.

Member Data Documentation

◆ kMaxNumberOfCidRangesToTest

constexpr intptr_t dart::FlowGraphCompiler::kMaxNumberOfCidRangesToTest = 4
staticconstexpr

Definition at line 651 of file flow_graph_compiler.h.


The documentation for this class was generated from the following files: