Flutter Engine
The Flutter Engine
Loading...
Searching...
No Matches
Classes | Public Member Functions | Protected Member Functions | Protected Attributes | List of all members
dart::CallSpecializer Class Referenceabstract

#include <call_specializer.h>

Inheritance diagram for dart::CallSpecializer:
dart::FlowGraphVisitor dart::InstructionVisitor dart::ValueObject dart::AotCallSpecializer dart::JitCallSpecializer

Classes

struct  ExactnessInfo
 

Public Member Functions

 CallSpecializer (FlowGraph *flow_graph, SpeculativeInliningPolicy *speculative_policy, bool should_clone_fields)
 
virtual ~CallSpecializer ()
 
FlowGraphflow_graph () const
 
void set_flow_graph (FlowGraph *flow_graph)
 
void ApplyICData ()
 
void ApplyClassIds ()
 
virtual void ReplaceInstanceCallsWithDispatchTableCalls ()
 
void InsertBefore (Instruction *next, Instruction *instr, Environment *env, FlowGraph::UseKind use_kind)
 
void InsertSpeculativeBefore (Instruction *next, Instruction *instr, Environment *env, FlowGraph::UseKind use_kind)
 
virtual void VisitStaticCall (StaticCallInstr *instr)
 
virtual void VisitLoadCodeUnits (LoadCodeUnitsInstr *instr)
 
- Public Member Functions inherited from dart::FlowGraphVisitor
 FlowGraphVisitor (const GrowableArray< BlockEntryInstr * > &block_order)
 
virtual ~FlowGraphVisitor ()
 
ForwardInstructionIteratorcurrent_iterator () const
 
virtual void VisitBlocks ()
 
- Public Member Functions inherited from dart::InstructionVisitor
 InstructionVisitor ()
 
virtual ~InstructionVisitor ()
 
- Public Member Functions inherited from dart::ValueObject
 ValueObject ()
 
 ~ValueObject ()
 

Protected Member Functions

Threadthread () const
 
IsolateGroupisolate_group () const
 
Zonezone () const
 
const Functionfunction () const
 
bool TryReplaceWithBinaryOp (InstanceCallInstr *call, Token::Kind op_kind)
 
bool TryReplaceWithUnaryOp (InstanceCallInstr *call, Token::Kind op_kind)
 
bool TryReplaceWithEqualityOp (InstanceCallInstr *call, Token::Kind op_kind)
 
bool TryReplaceWithRelationalOp (InstanceCallInstr *call, Token::Kind op_kind)
 
bool TryInlineInstanceGetter (InstanceCallInstr *call)
 
bool TryInlineInstanceSetter (InstanceCallInstr *call)
 
bool TryInlineInstanceMethod (InstanceCallInstr *call)
 
void ReplaceWithInstanceOf (InstanceCallInstr *instr)
 
void ReplaceCallWithResult (Definition *call, Instruction *replacement, Definition *result)
 
void ReplaceCall (Definition *call, Definition *replacement)
 
void AddReceiverCheck (InstanceCallInstr *call)
 
void AddCheckNull (Value *to_check, const String &function_name, intptr_t deopt_id, Environment *deopt_environment, Instruction *insert_before)
 
virtual bool TryCreateICData (InstanceCallInstr *call)
 
virtual bool TryReplaceInstanceOfWithRangeCheck (InstanceCallInstr *call, const AbstractType &type)
 
virtual bool TryOptimizeStaticCallUsingStaticTypes (StaticCallInstr *call)=0
 
void InlineImplicitInstanceGetter (Definition *call, const Field &field)
 
void AddCheckClass (Definition *to_check, const Cids &cids, intptr_t deopt_id, Environment *deopt_environment, Instruction *insert_before)
 
- Protected Member Functions inherited from dart::FlowGraphVisitor
void set_block_order (const GrowableArray< BlockEntryInstr * > &block_order)
 

Protected Attributes

SpeculativeInliningPolicyspeculative_policy_
 
const bool should_clone_fields_
 
- Protected Attributes inherited from dart::FlowGraphVisitor
ForwardInstructionIteratorcurrent_iterator_
 

Detailed Description

Definition at line 34 of file call_specializer.h.

Constructor & Destructor Documentation

◆ CallSpecializer()

dart::CallSpecializer::CallSpecializer ( FlowGraph flow_graph,
SpeculativeInliningPolicy speculative_policy,
bool  should_clone_fields 
)
inline

Definition at line 36 of file call_specializer.h.

40 speculative_policy_(speculative_policy),
41 should_clone_fields_(should_clone_fields),
42 flow_graph_(flow_graph) {}
FlowGraph * flow_graph() const
SpeculativeInliningPolicy * speculative_policy_
FlowGraphVisitor(const GrowableArray< BlockEntryInstr * > &block_order)
Definition il.h:11788
const GrowableArray< BlockEntryInstr * > & reverse_postorder() const
Definition flow_graph.h:207

◆ ~CallSpecializer()

virtual dart::CallSpecializer::~CallSpecializer ( )
inlinevirtual

Definition at line 44 of file call_specializer.h.

44{}

Member Function Documentation

◆ AddCheckClass()

void dart::CallSpecializer::AddCheckClass ( Definition to_check,
const Cids cids,
intptr_t  deopt_id,
Environment deopt_environment,
Instruction insert_before 
)
protected

Definition at line 239 of file call_specializer.cc.

243 {
244 // Type propagation has not run yet, we cannot eliminate the check.
245 Instruction* check = flow_graph_->CreateCheckClass(to_check, cids, deopt_id,
246 insert_before->source());
247 InsertBefore(insert_before, check, deopt_environment, FlowGraph::kEffect);
248}
#define check(reporter, ref, unref, make, kill)
void InsertBefore(Instruction *next, Instruction *instr, Environment *env, FlowGraph::UseKind use_kind)
Instruction * CreateCheckClass(Definition *to_check, const Cids &cids, intptr_t deopt_id, const InstructionSource &source)

◆ AddCheckNull()

void dart::CallSpecializer::AddCheckNull ( Value to_check,
const String function_name,
intptr_t  deopt_id,
Environment deopt_environment,
Instruction insert_before 
)
protected

Definition at line 258 of file call_specializer.cc.

262 {
263 if (to_check->Type()->is_nullable()) {
264 CheckNullInstr* check_null =
265 new (Z) CheckNullInstr(to_check->CopyWithType(Z), function_name,
266 deopt_id, insert_before->source());
267 if (FLAG_trace_strong_mode_types) {
268 THR_Print("[Strong mode] Inserted %s\n", check_null->ToCString());
269 }
270 InsertBefore(insert_before, check_null, deopt_environment,
272 }
273}
#define Z
#define THR_Print(format,...)
Definition log.h:20
const char *const function_name

◆ AddReceiverCheck()

void dart::CallSpecializer::AddReceiverCheck ( InstanceCallInstr call)
inlineprotected

Definition at line 113 of file call_specializer.h.

113 {
114 AddCheckClass(call->Receiver()->definition(), call->Targets(),
115 call->deopt_id(), call->env(), call);
116 }
void AddCheckClass(Definition *to_check, const Cids &cids, intptr_t deopt_id, Environment *deopt_environment, Instruction *insert_before)
call(args)
Definition dom.py:159

◆ ApplyClassIds()

void dart::CallSpecializer::ApplyClassIds ( )

Definition at line 71 of file call_specializer.cc.

71 {
72 ASSERT(current_iterator_ == nullptr);
73 for (BlockIterator block_it = flow_graph_->reverse_postorder_iterator();
74 !block_it.Done(); block_it.Advance()) {
76 ForwardInstructionIterator it(block_it.Current());
78 for (; !it.Done(); it.Advance()) {
79 Instruction* instr = it.Current();
80 if (instr->IsInstanceCall()) {
81 InstanceCallInstr* call = instr->AsInstanceCall();
82 if (call->HasICData()) {
83 if (TryCreateICData(call)) {
84 VisitInstanceCall(call);
85 }
86 }
87 } else if (auto static_call = instr->AsStaticCall()) {
88 // If TFA devirtualized instance calls to static calls we also want to
89 // process them here.
90 VisitStaticCall(static_call);
91 } else if (instr->IsPolymorphicInstanceCall()) {
92 SpecializePolymorphicInstanceCall(instr->AsPolymorphicInstanceCall());
93 }
94 }
95 current_iterator_ = nullptr;
96 }
97}
virtual void VisitStaticCall(StaticCallInstr *instr)
Thread * thread() const
virtual bool TryCreateICData(InstanceCallInstr *call)
ForwardInstructionIterator * current_iterator_
Definition il.h:11805
BlockIterator reverse_postorder_iterator() const
Definition flow_graph.h:219
Instruction * Current() const
Definition il.h:1847
void CheckForSafepoint()
Definition thread.h:1091
#define ASSERT(E)

◆ ApplyICData()

void dart::CallSpecializer::ApplyICData ( )

Definition at line 61 of file call_specializer.cc.

61 {
63}
virtual void VisitBlocks()
Definition il.cc:1374

◆ flow_graph()

FlowGraph * dart::CallSpecializer::flow_graph ( ) const
inline

Definition at line 46 of file call_specializer.h.

46{ return flow_graph_; }

◆ function()

const Function & dart::CallSpecializer::function ( ) const
inlineprotected

Definition at line 90 of file call_specializer.h.

90{ return flow_graph_->function(); }
const Function & function() const
Definition flow_graph.h:130

◆ InlineImplicitInstanceGetter()

void dart::CallSpecializer::InlineImplicitInstanceGetter ( Definition call,
const Field field 
)
protected

Definition at line 752 of file call_specializer.cc.

753 {
754 ASSERT(field.is_instance());
755 Definition* receiver = call->ArgumentAt(0);
756
757 const bool calls_initializer = field.NeedsInitializationCheckOnLoad();
758 const Slot& slot = Slot::Get(field, &flow_graph()->parsed_function());
759 LoadFieldInstr* load = new (Z) LoadFieldInstr(
760 new (Z) Value(receiver), slot, call->source(), calls_initializer,
761 calls_initializer ? call->deopt_id() : DeoptId::kNone);
762
763 // Note that this is a case of LoadField -> InstanceCall lazy deopt.
764 // Which means that we don't need to remove arguments from the environment
765 // because normal getter call expects receiver pushed (unlike the case
766 // of LoadField -> LoadField deoptimization handled by
767 // FlowGraph::AttachEnvironment).
768 if (!calls_initializer) {
769 // If we don't call initializer then we don't need an environment.
770 call->RemoveEnvironment();
771 }
772 ReplaceCall(call, load);
773
774 if (load->slot().type().ToNullableCid() != kDynamicCid) {
775 // Reset value types if we know concrete cid.
776 for (Value::Iterator it(load->input_use_list()); !it.Done(); it.Advance()) {
777 it.Current()->SetReachingType(nullptr);
778 }
779 }
780}
SI T load(const P *ptr)
void ReplaceCall(Definition *call, Definition *replacement)
static constexpr intptr_t kNone
Definition deopt_id.h:27
static const Slot & Get(const Field &field, const ParsedFunction *parsed_function)
Definition slot.cc:351
@ kDynamicCid
Definition class_id.h:253

◆ InsertBefore()

void dart::CallSpecializer::InsertBefore ( Instruction next,
Instruction instr,
Environment env,
FlowGraph::UseKind  use_kind 
)
inline

Definition at line 61 of file call_specializer.h.

64 {
65 flow_graph_->InsertBefore(next, instr, env, use_kind);
66 }
static float next(float f)
void InsertBefore(Instruction *next, Instruction *instr, Environment *env, UseKind use_kind)
Definition flow_graph.h:312
Definition __init__.py:1

◆ InsertSpeculativeBefore()

void dart::CallSpecializer::InsertSpeculativeBefore ( Instruction next,
Instruction instr,
Environment env,
FlowGraph::UseKind  use_kind 
)
inline

Definition at line 67 of file call_specializer.h.

70 {
71 flow_graph_->InsertSpeculativeBefore(next, instr, env, use_kind);
72 }
void InsertSpeculativeBefore(Instruction *next, Instruction *instr, Environment *env, UseKind use_kind)
Definition flow_graph.h:318

◆ isolate_group()

IsolateGroup * dart::CallSpecializer::isolate_group ( ) const
inlineprotected

Definition at line 88 of file call_specializer.h.

88{ return flow_graph_->isolate_group(); }
IsolateGroup * isolate_group() const
Definition flow_graph.h:262

◆ ReplaceCall()

void dart::CallSpecializer::ReplaceCall ( Definition call,
Definition replacement 
)
protected

Definition at line 222 of file call_specializer.cc.

222 {
223 ReplaceCallWithResult(call, replacement, nullptr);
224}
void ReplaceCallWithResult(Definition *call, Instruction *replacement, Definition *result)

◆ ReplaceCallWithResult()

void dart::CallSpecializer::ReplaceCallWithResult ( Definition call,
Instruction replacement,
Definition result 
)
protected

Definition at line 210 of file call_specializer.cc.

212 {
213 ASSERT(!call->HasMoveArguments());
214 if (result == nullptr) {
215 ASSERT(replacement->IsDefinition());
216 call->ReplaceWith(replacement->AsDefinition(), current_iterator());
217 } else {
218 call->ReplaceWithResult(replacement, result, current_iterator());
219 }
220}
ForwardInstructionIterator * current_iterator() const
Definition il.h:11792
GAsyncResult * result

◆ ReplaceInstanceCallsWithDispatchTableCalls()

void dart::CallSpecializer::ReplaceInstanceCallsWithDispatchTableCalls ( )
virtual

Reimplemented in dart::AotCallSpecializer.

Definition at line 1690 of file call_specializer.cc.

1690 {
1691 // Only implemented for AOT.
1692}

◆ ReplaceWithInstanceOf()

void dart::CallSpecializer::ReplaceWithInstanceOf ( InstanceCallInstr instr)
protected

Definition at line 1197 of file call_specializer.cc.

1197 {
1198 ASSERT(Token::IsTypeTestOperator(call->token_kind()));
1199 Definition* left = call->ArgumentAt(0);
1200 Definition* instantiator_type_args = nullptr;
1201 Definition* function_type_args = nullptr;
1202 AbstractType& type = AbstractType::ZoneHandle(Z);
1203 ASSERT(call->type_args_len() == 0);
1204 if (call->ArgumentCount() == 2) {
1205 instantiator_type_args = flow_graph()->constant_null();
1206 function_type_args = flow_graph()->constant_null();
1207 ASSERT(call->MatchesCoreName(Symbols::_simpleInstanceOf()));
1208 type = AbstractType::Cast(call->ArgumentAt(1)->AsConstant()->value()).ptr();
1209 } else {
1210 ASSERT(call->ArgumentCount() == 4);
1211 instantiator_type_args = call->ArgumentAt(1);
1212 function_type_args = call->ArgumentAt(2);
1213 type = AbstractType::Cast(call->ArgumentAt(3)->AsConstant()->value()).ptr();
1214 }
1215
1216 if (TryOptimizeInstanceOfUsingStaticTypes(call, type)) {
1217 return;
1218 }
1219
1220 intptr_t type_cid;
1221 if (TypeCheckAsClassEquality(type, &type_cid)) {
1222 LoadClassIdInstr* load_cid =
1223 new (Z) LoadClassIdInstr(new (Z) Value(left), kUnboxedUword);
1224 InsertBefore(call, load_cid, nullptr, FlowGraph::kValue);
1225 ConstantInstr* constant_cid = flow_graph()->GetConstant(
1226 Smi::Handle(Z, Smi::New(type_cid)), kUnboxedUword);
1227 EqualityCompareInstr* check_cid = new (Z) EqualityCompareInstr(
1228 call->source(), Token::kEQ, new Value(load_cid),
1229 new Value(constant_cid), kIntegerCid, DeoptId::kNone, false,
1231 ReplaceCall(call, check_cid);
1232 return;
1233 }
1234
1236 return;
1237 }
1238
1239 const ICData& unary_checks =
1240 ICData::ZoneHandle(Z, call->ic_data()->AsUnaryClassChecks());
1241 const intptr_t number_of_checks = unary_checks.NumberOfChecks();
1242 if (number_of_checks > 0 && number_of_checks <= FLAG_max_polymorphic_checks) {
1243 ZoneGrowableArray<intptr_t>* results =
1244 new (Z) ZoneGrowableArray<intptr_t>(number_of_checks * 2);
1245 const Bool& as_bool =
1246 Bool::ZoneHandle(Z, InstanceOfAsBool(unary_checks, type, results));
1247 if (as_bool.IsNull() || CompilerState::Current().is_aot()) {
1248 if (results->length() == number_of_checks * 2) {
1249 const bool can_deopt = SpecializeTestCidsForNumericTypes(results, type);
1250 if (can_deopt &&
1252 // Guard against repeated speculative inlining.
1253 return;
1254 }
1255 TestCidsInstr* test_cids = new (Z) TestCidsInstr(
1256 call->source(), Token::kIS, new (Z) Value(left), *results,
1257 can_deopt ? call->deopt_id() : DeoptId::kNone);
1258 // Remove type.
1259 ReplaceCall(call, test_cids);
1260 return;
1261 }
1262 } else {
1263 // One result only.
1264 AddReceiverCheck(call);
1265 ConstantInstr* bool_const = flow_graph()->GetConstant(as_bool);
1266 ASSERT(!call->HasMoveArguments());
1267 call->ReplaceUsesWith(bool_const);
1268 ASSERT(current_iterator()->Current() == call);
1270 return;
1271 }
1272 }
1273
1274 InstanceOfInstr* instance_of = new (Z) InstanceOfInstr(
1275 call->source(), new (Z) Value(left),
1276 new (Z) Value(instantiator_type_args), new (Z) Value(function_type_args),
1277 type, call->deopt_id());
1278 ReplaceCall(call, instance_of);
1279}
static bool left(const SkPoint &p0, const SkPoint &p1)
void AddReceiverCheck(InstanceCallInstr *call)
virtual bool TryReplaceInstanceOfWithRangeCheck(InstanceCallInstr *call, const AbstractType &type)
static CompilerState & Current()
ConstantInstr * GetConstant(const Object &object, Representation representation=kTagged)
ConstantInstr * constant_null() const
Definition flow_graph.h:270
@ kNotSpeculative
Definition il.h:969
static Object & Handle()
Definition object.h:407
static Object & ZoneHandle()
Definition object.h:419
static SmiPtr New(intptr_t value)
Definition object.h:9985
bool IsAllowedForInlining(intptr_t call_deopt_id) const
Definition inliner.h:43
static bool IsTypeTestOperator(Kind tok)
Definition token.h:244
static constexpr Representation kUnboxedUword
Definition locations.h:171

◆ set_flow_graph()

void dart::CallSpecializer::set_flow_graph ( FlowGraph flow_graph)
inline

Definition at line 48 of file call_specializer.h.

48 {
49 flow_graph_ = flow_graph;
51 }
void set_block_order(const GrowableArray< BlockEntryInstr * > &block_order)
Definition il.h:11801

◆ thread()

Thread * dart::CallSpecializer::thread ( ) const
inlineprotected

Definition at line 87 of file call_specializer.h.

87{ return flow_graph_->thread(); }
Thread * thread() const
Definition flow_graph.h:260

◆ TryCreateICData()

bool dart::CallSpecializer::TryCreateICData ( InstanceCallInstr call)
protectedvirtual

Reimplemented in dart::AotCallSpecializer.

Definition at line 99 of file call_specializer.cc.

99 {
100 ASSERT(call->HasICData());
101
102 if (call->Targets().length() > 0) {
103 // This occurs when an instance call has too many checks, will be converted
104 // to megamorphic call.
105 return false;
106 }
107
108 const intptr_t receiver_index = call->FirstArgIndex();
109 GrowableArray<intptr_t> class_ids(call->ic_data()->NumArgsTested());
110 ASSERT(call->ic_data()->NumArgsTested() <=
111 call->ArgumentCountWithoutTypeArgs());
112 for (intptr_t i = 0; i < call->ic_data()->NumArgsTested(); i++) {
113 class_ids.Add(call->ArgumentValueAt(receiver_index + i)->Type()->ToCid());
114 }
115
116 const Token::Kind op_kind = call->token_kind();
117 if (FLAG_guess_icdata_cid && !CompilerState::Current().is_aot()) {
118 if (Token::IsRelationalOperator(op_kind) ||
119 Token::IsEqualityOperator(op_kind) ||
120 Token::IsBinaryOperator(op_kind)) {
121 // Guess cid: if one of the inputs is a number assume that the other
122 // is a number of same type, unless the interface target tells us this
123 // is impossible.
124 if (call->CanReceiverBeSmiBasedOnInterfaceTarget(zone())) {
125 const intptr_t cid_0 = class_ids[0];
126 const intptr_t cid_1 = class_ids[1];
127 if ((cid_0 == kDynamicCid) && (IsNumberCid(cid_1))) {
128 class_ids[0] = cid_1;
129 } else if (IsNumberCid(cid_0) && (cid_1 == kDynamicCid)) {
130 class_ids[1] = cid_0;
131 }
132 }
133 }
134 }
135
136 bool all_cids_known = true;
137 for (intptr_t i = 0; i < class_ids.length(); i++) {
138 if (class_ids[i] == kDynamicCid) {
139 // Not all cid-s known.
140 all_cids_known = false;
141 break;
142 }
143 }
144
145 if (all_cids_known) {
146 const intptr_t receiver_cid = class_ids[0];
147 if (receiver_cid == kSentinelCid) {
148 // Unreachable call.
149 return false;
150 }
151 const Class& receiver_class =
152 Class::Handle(Z, IG->class_table()->At(receiver_cid));
153 if (!receiver_class.is_finalized()) {
154 // Do not eagerly finalize classes. ResolveDynamicForReceiverClass can
155 // cause class finalization, since callee's receiver class may not be
156 // finalized yet.
157 return false;
158 }
159 const Function& function = Function::Handle(
160 Z, call->ResolveForReceiverClass(receiver_class, /*allow_add=*/false));
161 if (function.IsNull()) {
162 return false;
163 }
165
166 // Update the CallTargets attached to the instruction with our speculative
167 // target. The next round of CallSpecializer::VisitInstanceCall will make
168 // use of this.
169 call->SetTargets(CallTargets::CreateMonomorphic(Z, class_ids[0], function));
170 if (class_ids.length() == 2) {
171 call->SetBinaryFeedback(
172 BinaryFeedback::CreateMonomorphic(Z, class_ids[0], class_ids[1]));
173 }
174 return true;
175 }
176
177 return false;
178}
#define IG
static const BinaryFeedback * CreateMonomorphic(Zone *zone, intptr_t receiver_cid, intptr_t argument_cid)
Definition il.cc:4113
const Function & function() const
static const CallTargets * CreateMonomorphic(Zone *zone, intptr_t receiver_cid, const Function &target)
Definition il.cc:4121
bool IsInvokeFieldDispatcher() const
Definition object.h:3276
bool IsNull() const
Definition object.h:363
static bool IsRelationalOperator(Kind tok)
Definition token.h:232
static bool IsBinaryOperator(Token::Kind token)
Definition token.cc:31
static bool IsEqualityOperator(Kind tok)
Definition token.h:236
static bool IsNumberCid(intptr_t cid)

◆ TryInlineInstanceGetter()

bool dart::CallSpecializer::TryInlineInstanceGetter ( InstanceCallInstr call)
protected

Definition at line 948 of file call_specializer.cc.

948 {
949 const CallTargets& targets = call->Targets();
950 if (!targets.HasSingleTarget()) {
951 // Polymorphic sites are inlined like normal methods by conventional
952 // inlining in FlowGraphInliner.
953 return false;
954 }
955 const Function& target = targets.FirstTarget();
956 if (target.kind() != UntaggedFunction::kImplicitGetter) {
957 // Non-implicit getters are inlined like normal methods by conventional
958 // inlining in FlowGraphInliner.
959 return false;
960 }
961 if (!CompilerState::Current().is_aot() && !target.WasCompiled()) {
962 return false;
963 }
964 return TryInlineImplicitInstanceGetter(call);
965}
uint32_t * target

◆ TryInlineInstanceMethod()

bool dart::CallSpecializer::TryInlineInstanceMethod ( InstanceCallInstr call)
protected

Definition at line 968 of file call_specializer.cc.

968 {
969 const CallTargets& targets = call->Targets();
970 if (!targets.IsMonomorphic()) {
971 // No type feedback collected or multiple receivers/targets found.
972 return false;
973 }
974
975 const Function& target = targets.FirstTarget();
976 intptr_t receiver_cid = targets.MonomorphicReceiverCid();
977 MethodRecognizer::Kind recognized_kind = target.recognized_kind();
978
979 if (CanUnboxDouble() &&
980 (recognized_kind == MethodRecognizer::kIntegerToDouble)) {
981 if (receiver_cid == kSmiCid) {
982 AddReceiverCheck(call);
983 ReplaceCall(call,
984 new (Z) SmiToDoubleInstr(new (Z) Value(call->ArgumentAt(0)),
985 call->source()));
986 return true;
987 } else if ((receiver_cid == kMintCid) && CanConvertInt64ToDouble()) {
988 AddReceiverCheck(call);
989 ReplaceCall(call,
990 new (Z) Int64ToDoubleInstr(new (Z) Value(call->ArgumentAt(0)),
991 call->deopt_id()));
992 return true;
993 }
994 }
995
996 if (receiver_cid == kDoubleCid) {
997 if (!CanUnboxDouble()) {
998 return false;
999 }
1000 switch (recognized_kind) {
1001 case MethodRecognizer::kDoubleToInteger: {
1002 AddReceiverCheck(call);
1003 ASSERT(call->HasICData());
1004 const ICData& ic_data = *call->ic_data();
1005 Definition* input = call->ArgumentAt(0);
1006 Definition* d2i_instr = nullptr;
1007 if (ic_data.HasDeoptReason(ICData::kDeoptDoubleToSmi)) {
1008 // Do not repeatedly deoptimize because result didn't fit into Smi.
1009 d2i_instr = new (Z) DoubleToIntegerInstr(
1010 new (Z) Value(input), recognized_kind, call->deopt_id());
1011 } else {
1012 // Optimistically assume result fits into Smi.
1013 d2i_instr =
1014 new (Z) DoubleToSmiInstr(new (Z) Value(input), call->deopt_id());
1015 }
1016 ReplaceCall(call, d2i_instr);
1017 return true;
1018 }
1019 default:
1020 break;
1021 }
1022 }
1023
1024 return TryReplaceInstanceCallWithInline(flow_graph_, current_iterator(), call,
1026}
static bool CanConvertInt64ToDouble()
static bool CanUnboxDouble()

◆ TryInlineInstanceSetter()

bool dart::CallSpecializer::TryInlineInstanceSetter ( InstanceCallInstr call)
protected

Definition at line 782 of file call_specializer.cc.

782 {
783 const CallTargets& targets = instr->Targets();
784 if (!targets.HasSingleTarget()) {
785 // Polymorphic sites are inlined like normal method calls by conventional
786 // inlining.
787 return false;
788 }
789 const Function& target = targets.FirstTarget();
790 if (target.kind() != UntaggedFunction::kImplicitSetter) {
791 // Non-implicit setter are inlined like normal method calls.
792 return false;
793 }
794 if (!CompilerState::Current().is_aot() && !target.WasCompiled()) {
795 return false;
796 }
797 Field& field = Field::ZoneHandle(Z, target.accessor_field());
798 ASSERT(!field.IsNull());
800 field = field.CloneFromOriginal();
801 }
802 if (field.is_late() && field.is_final()) {
803 return false;
804 }
805
806 switch (flow_graph()->CheckForInstanceCall(
807 instr, UntaggedFunction::kImplicitSetter)) {
809 AddCheckNull(instr->Receiver(), instr->function_name(), instr->deopt_id(),
810 instr->env(), instr);
811 break;
813 if (CompilerState::Current().is_aot()) {
814 return false; // AOT cannot class check
815 }
816 AddReceiverCheck(instr);
817 break;
819 break;
820 }
821
822 // True if we can use unchecked entry into the setter.
823 bool is_unchecked_call = false;
824 if (!CompilerState::Current().is_aot()) {
825 if (targets.IsMonomorphic() && targets.MonomorphicExactness().IsExact()) {
826 if (targets.MonomorphicExactness().IsTriviallyExact()) {
828 targets.MonomorphicReceiverCid());
829 }
830 is_unchecked_call = true;
831 }
832 }
833
834 if (IG->use_field_guards()) {
835 if (field.guarded_cid() != kDynamicCid) {
837 instr,
838 new (Z) GuardFieldClassInstr(new (Z) Value(instr->ArgumentAt(1)),
839 field, instr->deopt_id()),
840 instr->env(), FlowGraph::kEffect);
841 }
842
843 if (field.needs_length_check()) {
845 instr,
846 new (Z) GuardFieldLengthInstr(new (Z) Value(instr->ArgumentAt(1)),
847 field, instr->deopt_id()),
848 instr->env(), FlowGraph::kEffect);
849 }
850
851 if (field.static_type_exactness_state().NeedsFieldGuard()) {
853 instr,
854 new (Z) GuardFieldTypeInstr(new (Z) Value(instr->ArgumentAt(1)),
855 field, instr->deopt_id()),
856 instr->env(), FlowGraph::kEffect);
857 }
858 }
859
860 // Build an AssertAssignable if necessary.
861 const AbstractType& dst_type = AbstractType::ZoneHandle(zone(), field.type());
862 if (!dst_type.IsTopTypeForSubtyping()) {
863 // Compute if we need to type check the value. Always type check if
864 // at a dynamic invocation.
865 bool needs_check = true;
866 if (!instr->interface_target().IsNull()) {
867 if (field.is_covariant()) {
868 // Always type check covariant fields.
869 needs_check = true;
870 } else if (field.is_generic_covariant_impl()) {
871 // If field is generic covariant then we don't need to check it
872 // if the invocation was marked as unchecked (e.g. receiver of
873 // the invocation is also the receiver of the surrounding method).
874 // Note: we can't use flow_graph()->IsReceiver() for this optimization
875 // because strong mode only gives static guarantees at the AST level
876 // not at the SSA level.
877 needs_check = !(is_unchecked_call ||
878 (instr->entry_kind() == Code::EntryKind::kUnchecked));
879 } else {
880 // The rest of the stores are checked statically (we are not at
881 // a dynamic invocation).
882 needs_check = false;
883 }
884 }
885
886 if (needs_check) {
887 Definition* instantiator_type_args = flow_graph_->constant_null();
888 Definition* function_type_args = flow_graph_->constant_null();
889 if (!dst_type.IsInstantiated()) {
890 const Class& owner = Class::Handle(Z, field.Owner());
891 if (owner.NumTypeArguments() > 0) {
892 instantiator_type_args = new (Z) LoadFieldInstr(
893 new (Z) Value(instr->ArgumentAt(0)),
894 Slot::GetTypeArgumentsSlotFor(thread(), owner), instr->source());
895 InsertSpeculativeBefore(instr, instantiator_type_args, instr->env(),
897 }
898 }
899
900 auto assert_assignable = new (Z) AssertAssignableInstr(
901 instr->source(), new (Z) Value(instr->ArgumentAt(1)),
902 new (Z) Value(flow_graph_->GetConstant(dst_type)),
903 new (Z) Value(instantiator_type_args),
904 new (Z) Value(function_type_args),
905 String::ZoneHandle(zone(), field.name()), instr->deopt_id());
906 InsertSpeculativeBefore(instr, assert_assignable, instr->env(),
908 }
909 }
910
911 // Field guard was detached.
912 ASSERT(instr->FirstArgIndex() == 0);
913 StoreFieldInstr* store = new (Z)
914 StoreFieldInstr(field, new (Z) Value(instr->ArgumentAt(0)),
915 new (Z) Value(instr->ArgumentAt(1)), kEmitStoreBarrier,
916 instr->source(), &flow_graph()->parsed_function());
917
918 // Discard the environment from the original instruction because the store
919 // can't deoptimize.
920 instr->RemoveEnvironment();
921 ReplaceCallWithResult(instr, store, flow_graph()->constant_null());
922 return true;
923}
SI void store(P *ptr, const T &val)
void AddCheckNull(Value *to_check, const String &function_name, intptr_t deopt_id, Environment *deopt_environment, Instruction *insert_before)
void InsertSpeculativeBefore(Instruction *next, Instruction *instr, Environment *env, FlowGraph::UseKind use_kind)
void AddExactnessGuard(InstanceCallInstr *call, intptr_t receiver_cid)
const ParsedFunction & parsed_function() const
Definition flow_graph.h:129
static const Slot & GetTypeArgumentsSlotFor(Thread *thread, const Class &cls)
Definition slot.cc:276
@ kEmitStoreBarrier
Definition il.h:6252

◆ TryOptimizeStaticCallUsingStaticTypes()

virtual bool dart::CallSpecializer::TryOptimizeStaticCallUsingStaticTypes ( StaticCallInstr call)
protectedpure virtual

◆ TryReplaceInstanceOfWithRangeCheck()

bool dart::CallSpecializer::TryReplaceInstanceOfWithRangeCheck ( InstanceCallInstr call,
const AbstractType type 
)
protectedvirtual

Reimplemented in dart::AotCallSpecializer.

Definition at line 1137 of file call_specializer.cc.

1139 {
1140 // TODO(dartbug.com/30632) does this optimization make sense in JIT?
1141 return false;
1142}

◆ TryReplaceWithBinaryOp()

bool dart::CallSpecializer::TryReplaceWithBinaryOp ( InstanceCallInstr call,
Token::Kind  op_kind 
)
protected

Definition at line 501 of file call_specializer.cc.

502 {
503 intptr_t operands_type = kIllegalCid;
504 ASSERT(call->HasICData());
505 const BinaryFeedback& binary_feedback = call->BinaryFeedback();
506 switch (op_kind) {
507 case Token::kADD:
508 case Token::kSUB:
509 case Token::kMUL:
510 if (binary_feedback.OperandsAre(kSmiCid)) {
511 // Don't generate smi code if the IC data is marked because
512 // of an overflow.
513 operands_type =
514 call->ic_data()->HasDeoptReason(ICData::kDeoptBinarySmiOp)
515 ? kMintCid
516 : kSmiCid;
517 } else if (binary_feedback.OperandsAreSmiOrMint()) {
518 // Don't generate mint code if the IC data is marked because of an
519 // overflow.
520 if (call->ic_data()->HasDeoptReason(ICData::kDeoptBinaryInt64Op))
521 return false;
522 operands_type = kMintCid;
523 } else if (ShouldSpecializeForDouble(binary_feedback)) {
524 operands_type = kDoubleCid;
525 } else if (binary_feedback.OperandsAre(kFloat32x4Cid)) {
526 operands_type = kFloat32x4Cid;
527 } else if (binary_feedback.OperandsAre(kInt32x4Cid)) {
528 ASSERT(op_kind != Token::kMUL); // Int32x4 doesn't have a multiply op.
529 operands_type = kInt32x4Cid;
530 } else if (binary_feedback.OperandsAre(kFloat64x2Cid)) {
531 operands_type = kFloat64x2Cid;
532 } else {
533 return false;
534 }
535 break;
536 case Token::kDIV:
537 if (ShouldSpecializeForDouble(binary_feedback) ||
538 binary_feedback.OperandsAre(kSmiCid)) {
539 operands_type = kDoubleCid;
540 } else if (binary_feedback.OperandsAre(kFloat32x4Cid)) {
541 operands_type = kFloat32x4Cid;
542 } else if (binary_feedback.OperandsAre(kFloat64x2Cid)) {
543 operands_type = kFloat64x2Cid;
544 } else {
545 return false;
546 }
547 break;
548 case Token::kBIT_AND:
549 case Token::kBIT_OR:
550 case Token::kBIT_XOR:
551 if (binary_feedback.OperandsAre(kSmiCid)) {
552 operands_type = kSmiCid;
553 } else if (binary_feedback.OperandsAreSmiOrMint()) {
554 operands_type = kMintCid;
555 } else if (binary_feedback.OperandsAre(kInt32x4Cid)) {
556 operands_type = kInt32x4Cid;
557 } else {
558 return false;
559 }
560 break;
561 case Token::kSHL:
562 case Token::kSHR:
563 case Token::kUSHR:
564 if (binary_feedback.OperandsAre(kSmiCid)) {
565 // Left shift may overflow from smi into mint.
566 // Don't generate smi code if the IC data is marked because
567 // of an overflow.
568 if (call->ic_data()->HasDeoptReason(ICData::kDeoptBinaryInt64Op)) {
569 return false;
570 }
571 operands_type =
572 call->ic_data()->HasDeoptReason(ICData::kDeoptBinarySmiOp)
573 ? kMintCid
574 : kSmiCid;
575 } else if (binary_feedback.OperandsAreSmiOrMint() &&
576 binary_feedback.ArgumentIs(kSmiCid)) {
577 // Don't generate mint code if the IC data is marked because of an
578 // overflow.
579 if (call->ic_data()->HasDeoptReason(ICData::kDeoptBinaryInt64Op)) {
580 return false;
581 }
582 // Check for smi/mint << smi or smi/mint >> smi.
583 operands_type = kMintCid;
584 } else {
585 return false;
586 }
587 break;
588 case Token::kMOD:
589 case Token::kTRUNCDIV:
590 if (binary_feedback.OperandsAre(kSmiCid)) {
591 if (call->ic_data()->HasDeoptReason(ICData::kDeoptBinarySmiOp)) {
592 return false;
593 }
594 operands_type = kSmiCid;
595 } else {
596 return false;
597 }
598 break;
599 default:
600 UNREACHABLE();
601 }
602
603 ASSERT(call->type_args_len() == 0);
604 ASSERT(call->ArgumentCount() == 2);
605 Definition* left = call->ArgumentAt(0);
606 Definition* right = call->ArgumentAt(1);
607 if (operands_type == kDoubleCid) {
608 if (!CanUnboxDouble()) {
609 return false;
610 }
611 // Check that either left or right are not a smi. Result of a
612 // binary operation with two smis is a smi not a double, except '/' which
613 // returns a double for two smis.
614 if (op_kind != Token::kDIV) {
616 call,
617 new (Z) CheckEitherNonSmiInstr(
618 new (Z) Value(left), new (Z) Value(right), call->deopt_id()),
619 call->env(), FlowGraph::kEffect);
620 }
621
622 BinaryDoubleOpInstr* double_bin_op = new (Z)
623 BinaryDoubleOpInstr(op_kind, new (Z) Value(left), new (Z) Value(right),
624 call->deopt_id(), call->source());
625 ReplaceCall(call, double_bin_op);
626 } else if (operands_type == kMintCid) {
627 if ((op_kind == Token::kSHL) || (op_kind == Token::kSHR) ||
628 (op_kind == Token::kUSHR)) {
629 SpeculativeShiftInt64OpInstr* shift_op = new (Z)
630 SpeculativeShiftInt64OpInstr(op_kind, new (Z) Value(left),
631 new (Z) Value(right), call->deopt_id());
632 ReplaceCall(call, shift_op);
633 } else {
634 BinaryInt64OpInstr* bin_op = new (Z) BinaryInt64OpInstr(
635 op_kind, new (Z) Value(left), new (Z) Value(right), call->deopt_id());
636 ReplaceCall(call, bin_op);
637 }
638 } else if ((operands_type == kFloat32x4Cid) ||
639 (operands_type == kInt32x4Cid) ||
640 (operands_type == kFloat64x2Cid)) {
641 return InlineSimdBinaryOp(call, operands_type, op_kind);
642 } else if (op_kind == Token::kMOD) {
643 ASSERT(operands_type == kSmiCid);
644 if (right->IsConstant()) {
645 const Object& obj = right->AsConstant()->value();
646 if (obj.IsSmi() && Utils::IsPowerOfTwo(Smi::Cast(obj).Value())) {
647 // Insert smi check and attach a copy of the original environment
648 // because the smi operation can still deoptimize.
649 InsertBefore(call,
650 new (Z) CheckSmiInstr(new (Z) Value(left),
651 call->deopt_id(), call->source()),
652 call->env(), FlowGraph::kEffect);
653 ConstantInstr* constant = flow_graph()->GetConstant(
654 Smi::Handle(Z, Smi::New(Smi::Cast(obj).Value() - 1)));
655 BinarySmiOpInstr* bin_op =
656 new (Z) BinarySmiOpInstr(Token::kBIT_AND, new (Z) Value(left),
657 new (Z) Value(constant), call->deopt_id());
658 ReplaceCall(call, bin_op);
659 return true;
660 }
661 }
662 // Insert two smi checks and attach a copy of the original
663 // environment because the smi operation can still deoptimize.
664 AddCheckSmi(left, call->deopt_id(), call->env(), call);
665 AddCheckSmi(right, call->deopt_id(), call->env(), call);
666 BinarySmiOpInstr* bin_op = new (Z) BinarySmiOpInstr(
667 op_kind, new (Z) Value(left), new (Z) Value(right), call->deopt_id());
668 ReplaceCall(call, bin_op);
669 } else {
670 ASSERT(operands_type == kSmiCid);
671 // Insert two smi checks and attach a copy of the original
672 // environment because the smi operation can still deoptimize.
673 AddCheckSmi(left, call->deopt_id(), call->env(), call);
674 AddCheckSmi(right, call->deopt_id(), call->env(), call);
675 if (left->IsConstant() &&
676 ((op_kind == Token::kADD) || (op_kind == Token::kMUL))) {
677 // Constant should be on the right side.
678 Definition* temp = left;
679 left = right;
680 right = temp;
681 }
682 BinarySmiOpInstr* bin_op = new (Z) BinarySmiOpInstr(
683 op_kind, new (Z) Value(left), new (Z) Value(right), call->deopt_id());
684 ReplaceCall(call, bin_op);
685 }
686 return true;
687}
static bool right(const SkPoint &p0, const SkPoint &p1)
#define UNREACHABLE()
Definition assert.h:248
static constexpr bool IsPowerOfTwo(T x)
Definition utils.h:61
@ kIllegalCid
Definition class_id.h:214
static bool ShouldSpecializeForDouble(const BinaryFeedback &binary_feedback)

◆ TryReplaceWithEqualityOp()

bool dart::CallSpecializer::TryReplaceWithEqualityOp ( InstanceCallInstr call,
Token::Kind  op_kind 
)
protected

Definition at line 373 of file call_specializer.cc.

374 {
375 const BinaryFeedback& binary_feedback = call->BinaryFeedback();
376
377 ASSERT(call->type_args_len() == 0);
378 ASSERT(call->ArgumentCount() == 2);
379 Definition* const left = call->ArgumentAt(0);
380 Definition* const right = call->ArgumentAt(1);
381
382 intptr_t cid = kIllegalCid;
383 if (binary_feedback.OperandsAre(kOneByteStringCid)) {
384 return TryStringLengthOneEquality(call, op_kind);
385 } else if (binary_feedback.OperandsAre(kSmiCid)) {
386 InsertBefore(call,
387 new (Z) CheckSmiInstr(new (Z) Value(left), call->deopt_id(),
388 call->source()),
389 call->env(), FlowGraph::kEffect);
390 InsertBefore(call,
391 new (Z) CheckSmiInstr(new (Z) Value(right), call->deopt_id(),
392 call->source()),
393 call->env(), FlowGraph::kEffect);
394 cid = kSmiCid;
395 } else if (binary_feedback.OperandsAreSmiOrMint()) {
396 cid = kMintCid;
397 } else if (binary_feedback.OperandsAreSmiOrDouble() && CanUnboxDouble()) {
398 // Use double comparison.
399 if (SmiFitsInDouble()) {
400 cid = kDoubleCid;
401 } else {
402 if (binary_feedback.IncludesOperands(kSmiCid)) {
403 // We cannot use double comparison on two smis. Need polymorphic
404 // call.
405 return false;
406 } else {
408 call,
409 new (Z) CheckEitherNonSmiInstr(
410 new (Z) Value(left), new (Z) Value(right), call->deopt_id()),
411 call->env(), FlowGraph::kEffect);
412 cid = kDoubleCid;
413 }
414 }
415 } else {
416 // Check if ICDData contains checks with Smi/Null combinations. In that case
417 // we can still emit the optimized Smi equality operation but need to add
418 // checks for null or Smi.
419 if (binary_feedback.OperandsAreSmiOrNull()) {
420 AddChecksForArgNr(call, left, /* arg_number = */ 0);
421 AddChecksForArgNr(call, right, /* arg_number = */ 1);
422
423 cid = kSmiCid;
424 } else {
425 // Shortcut for equality with null.
426 // TODO(vegorov): this optimization is not speculative and should
427 // be hoisted out of this function.
428 ConstantInstr* right_const = right->AsConstant();
429 ConstantInstr* left_const = left->AsConstant();
430 if ((right_const != nullptr && right_const->value().IsNull()) ||
431 (left_const != nullptr && left_const->value().IsNull())) {
432 StrictCompareInstr* comp = new (Z)
433 StrictCompareInstr(call->source(), Token::kEQ_STRICT,
434 new (Z) Value(left), new (Z) Value(right),
435 /* number_check = */ false, DeoptId::kNone);
436 ReplaceCall(call, comp);
437 return true;
438 }
439 return false;
440 }
441 }
443 EqualityCompareInstr* comp =
444 new (Z) EqualityCompareInstr(call->source(), op_kind, new (Z) Value(left),
445 new (Z) Value(right), cid, call->deopt_id());
446 ReplaceCall(call, comp);
447 return true;
448}
const intptr_t cid
static bool SmiFitsInDouble()

◆ TryReplaceWithRelationalOp()

bool dart::CallSpecializer::TryReplaceWithRelationalOp ( InstanceCallInstr call,
Token::Kind  op_kind 
)
protected

Definition at line 450 of file call_specializer.cc.

451 {
452 ASSERT(call->type_args_len() == 0);
453 ASSERT(call->ArgumentCount() == 2);
454
455 const BinaryFeedback& binary_feedback = call->BinaryFeedback();
456 Definition* left = call->ArgumentAt(0);
457 Definition* right = call->ArgumentAt(1);
458
459 intptr_t cid = kIllegalCid;
460 if (binary_feedback.OperandsAre(kSmiCid)) {
461 InsertBefore(call,
462 new (Z) CheckSmiInstr(new (Z) Value(left), call->deopt_id(),
463 call->source()),
464 call->env(), FlowGraph::kEffect);
465 InsertBefore(call,
466 new (Z) CheckSmiInstr(new (Z) Value(right), call->deopt_id(),
467 call->source()),
468 call->env(), FlowGraph::kEffect);
469 cid = kSmiCid;
470 } else if (binary_feedback.OperandsAreSmiOrMint()) {
471 cid = kMintCid;
472 } else if (binary_feedback.OperandsAreSmiOrDouble() && CanUnboxDouble()) {
473 // Use double comparison.
474 if (SmiFitsInDouble()) {
475 cid = kDoubleCid;
476 } else {
477 if (binary_feedback.IncludesOperands(kSmiCid)) {
478 // We cannot use double comparison on two smis. Need polymorphic
479 // call.
480 return false;
481 } else {
483 call,
484 new (Z) CheckEitherNonSmiInstr(
485 new (Z) Value(left), new (Z) Value(right), call->deopt_id()),
486 call->env(), FlowGraph::kEffect);
487 cid = kDoubleCid;
488 }
489 }
490 } else {
491 return false;
492 }
494 RelationalOpInstr* comp =
495 new (Z) RelationalOpInstr(call->source(), op_kind, new (Z) Value(left),
496 new (Z) Value(right), cid, call->deopt_id());
497 ReplaceCall(call, comp);
498 return true;
499}

◆ TryReplaceWithUnaryOp()

bool dart::CallSpecializer::TryReplaceWithUnaryOp ( InstanceCallInstr call,
Token::Kind  op_kind 
)
protected

Definition at line 689 of file call_specializer.cc.

690 {
691 ASSERT(call->type_args_len() == 0);
692 ASSERT(call->ArgumentCount() == 1);
693 Definition* input = call->ArgumentAt(0);
694 Definition* unary_op = nullptr;
695 if (call->Targets().ReceiverIs(kSmiCid)) {
696 InsertBefore(call,
697 new (Z) CheckSmiInstr(new (Z) Value(input), call->deopt_id(),
698 call->source()),
699 call->env(), FlowGraph::kEffect);
700 unary_op = new (Z)
701 UnarySmiOpInstr(op_kind, new (Z) Value(input), call->deopt_id());
702 } else if ((op_kind == Token::kBIT_NOT) &&
703 call->Targets().ReceiverIsSmiOrMint()) {
704 unary_op = new (Z)
705 UnaryInt64OpInstr(op_kind, new (Z) Value(input), call->deopt_id());
706 } else if (call->Targets().ReceiverIs(kDoubleCid) &&
707 (op_kind == Token::kNEGATE) && CanUnboxDouble()) {
708 AddReceiverCheck(call);
709 unary_op = new (Z) UnaryDoubleOpInstr(Token::kNEGATE, new (Z) Value(input),
710 call->deopt_id());
711 } else {
712 return false;
713 }
714 ASSERT(unary_op != nullptr);
715 ReplaceCall(call, unary_op);
716 return true;
717}

◆ VisitLoadCodeUnits()

void dart::CallSpecializer::VisitLoadCodeUnits ( LoadCodeUnitsInstr instr)
virtual

Definition at line 1355 of file call_specializer.cc.

1355 {
1356// TODO(zerny): Use kUnboxedUint32 once it is fully supported/optimized.
1357#if defined(TARGET_ARCH_IA32) || defined(TARGET_ARCH_ARM)
1358 if (!instr->can_pack_into_smi()) instr->set_representation(kUnboxedInt64);
1359#endif
1360}

◆ VisitStaticCall()

void dart::CallSpecializer::VisitStaticCall ( StaticCallInstr instr)
virtual

Reimplemented in dart::AotCallSpecializer.

Definition at line 1281 of file call_specializer.cc.

1281 {
1282 if (TryReplaceStaticCallWithInline(flow_graph_, current_iterator(), call,
1284 return;
1285 }
1286
1287 if (speculative_policy_->IsAllowedForInlining(call->deopt_id())) {
1288 // Only if speculative inlining is enabled.
1289
1290 MethodRecognizer::Kind recognized_kind = call->function().recognized_kind();
1291 const CallTargets& targets = call->Targets();
1292 const BinaryFeedback& binary_feedback = call->BinaryFeedback();
1293
1294 switch (recognized_kind) {
1295 case MethodRecognizer::kMathMin:
1296 case MethodRecognizer::kMathMax: {
1297 // We can handle only monomorphic min/max call sites with both arguments
1298 // being either doubles or smis.
1299 if (CanUnboxDouble() && targets.IsMonomorphic() &&
1300 (call->FirstArgIndex() == 0)) {
1301 intptr_t result_cid = kIllegalCid;
1302 if (binary_feedback.IncludesOperands(kDoubleCid)) {
1303 result_cid = kDoubleCid;
1304 } else if (binary_feedback.IncludesOperands(kSmiCid)) {
1305 result_cid = kSmiCid;
1306 }
1307 if (result_cid != kIllegalCid) {
1308 MathMinMaxInstr* min_max = new (Z) MathMinMaxInstr(
1309 recognized_kind, new (Z) Value(call->ArgumentAt(0)),
1310 new (Z) Value(call->ArgumentAt(1)), call->deopt_id(),
1311 result_cid);
1312 const Cids* cids = Cids::CreateMonomorphic(Z, result_cid);
1313 AddCheckClass(min_max->left()->definition(), *cids,
1314 call->deopt_id(), call->env(), call);
1315 AddCheckClass(min_max->right()->definition(), *cids,
1316 call->deopt_id(), call->env(), call);
1317 ReplaceCall(call, min_max);
1318 return;
1319 }
1320 }
1321 break;
1322 }
1323 case MethodRecognizer::kDoubleFromInteger: {
1324 if (call->HasICData() && targets.IsMonomorphic() &&
1325 (call->FirstArgIndex() == 0)) {
1326 if (CanUnboxDouble()) {
1327 if (binary_feedback.ArgumentIs(kSmiCid)) {
1328 Definition* arg = call->ArgumentAt(1);
1329 AddCheckSmi(arg, call->deopt_id(), call->env(), call);
1330 ReplaceCall(call, new (Z) SmiToDoubleInstr(new (Z) Value(arg),
1331 call->source()));
1332 return;
1333 } else if (binary_feedback.ArgumentIs(kMintCid) &&
1335 Definition* arg = call->ArgumentAt(1);
1336 ReplaceCall(call, new (Z) Int64ToDoubleInstr(new (Z) Value(arg),
1337 call->deopt_id()));
1338 return;
1339 }
1340 }
1341 }
1342 break;
1343 }
1344
1345 default:
1346 break;
1347 }
1348 }
1349
1351 return;
1352 }
1353}
virtual bool TryOptimizeStaticCallUsingStaticTypes(StaticCallInstr *call)=0
static Cids * CreateMonomorphic(Zone *zone, intptr_t cid)
Definition il.cc:689

◆ zone()

Zone * dart::CallSpecializer::zone ( ) const
inlineprotected

Definition at line 89 of file call_specializer.h.

89{ return flow_graph_->zone(); }
Zone * zone() const
Definition flow_graph.h:261

Member Data Documentation

◆ should_clone_fields_

const bool dart::CallSpecializer::should_clone_fields_
protected

Definition at line 147 of file call_specializer.h.

◆ speculative_policy_

SpeculativeInliningPolicy* dart::CallSpecializer::speculative_policy_
protected

Definition at line 146 of file call_specializer.h.


The documentation for this class was generated from the following files: