Flutter Engine
The Flutter Engine
Classes | Public Member Functions | Protected Member Functions | Protected Attributes | List of all members
dart::CallSpecializer Class Referenceabstract

#include <call_specializer.h>

Inheritance diagram for dart::CallSpecializer:
dart::FlowGraphVisitor dart::InstructionVisitor dart::ValueObject dart::AotCallSpecializer dart::JitCallSpecializer

Classes

struct  ExactnessInfo
 

Public Member Functions

 CallSpecializer (FlowGraph *flow_graph, SpeculativeInliningPolicy *speculative_policy, bool should_clone_fields)
 
virtual ~CallSpecializer ()
 
FlowGraphflow_graph () const
 
void set_flow_graph (FlowGraph *flow_graph)
 
void ApplyICData ()
 
void ApplyClassIds ()
 
virtual void ReplaceInstanceCallsWithDispatchTableCalls ()
 
void InsertBefore (Instruction *next, Instruction *instr, Environment *env, FlowGraph::UseKind use_kind)
 
void InsertSpeculativeBefore (Instruction *next, Instruction *instr, Environment *env, FlowGraph::UseKind use_kind)
 
virtual void VisitStaticCall (StaticCallInstr *instr)
 
virtual void VisitLoadCodeUnits (LoadCodeUnitsInstr *instr)
 
- Public Member Functions inherited from dart::FlowGraphVisitor
 FlowGraphVisitor (const GrowableArray< BlockEntryInstr * > &block_order)
 
virtual ~FlowGraphVisitor ()
 
ForwardInstructionIteratorcurrent_iterator () const
 
virtual void VisitBlocks ()
 
- Public Member Functions inherited from dart::InstructionVisitor
 InstructionVisitor ()
 
virtual ~InstructionVisitor ()
 
- Public Member Functions inherited from dart::ValueObject
 ValueObject ()
 
 ~ValueObject ()
 

Protected Member Functions

Threadthread () const
 
IsolateGroupisolate_group () const
 
Zonezone () const
 
const Functionfunction () const
 
bool TryReplaceWithBinaryOp (InstanceCallInstr *call, Token::Kind op_kind)
 
bool TryReplaceWithUnaryOp (InstanceCallInstr *call, Token::Kind op_kind)
 
bool TryReplaceWithEqualityOp (InstanceCallInstr *call, Token::Kind op_kind)
 
bool TryReplaceWithRelationalOp (InstanceCallInstr *call, Token::Kind op_kind)
 
bool TryInlineInstanceGetter (InstanceCallInstr *call)
 
bool TryInlineInstanceSetter (InstanceCallInstr *call)
 
bool TryInlineInstanceMethod (InstanceCallInstr *call)
 
void ReplaceWithInstanceOf (InstanceCallInstr *instr)
 
void ReplaceCallWithResult (Definition *call, Instruction *replacement, Definition *result)
 
void ReplaceCall (Definition *call, Definition *replacement)
 
void AddReceiverCheck (InstanceCallInstr *call)
 
void AddCheckNull (Value *to_check, const String &function_name, intptr_t deopt_id, Environment *deopt_environment, Instruction *insert_before)
 
virtual bool TryCreateICData (InstanceCallInstr *call)
 
virtual bool TryReplaceInstanceOfWithRangeCheck (InstanceCallInstr *call, const AbstractType &type)
 
virtual bool TryOptimizeStaticCallUsingStaticTypes (StaticCallInstr *call)=0
 
void InlineImplicitInstanceGetter (Definition *call, const Field &field)
 
void AddCheckClass (Definition *to_check, const Cids &cids, intptr_t deopt_id, Environment *deopt_environment, Instruction *insert_before)
 
- Protected Member Functions inherited from dart::FlowGraphVisitor
void set_block_order (const GrowableArray< BlockEntryInstr * > &block_order)
 

Protected Attributes

SpeculativeInliningPolicyspeculative_policy_
 
const bool should_clone_fields_
 
- Protected Attributes inherited from dart::FlowGraphVisitor
ForwardInstructionIteratorcurrent_iterator_
 

Detailed Description

Definition at line 36 of file call_specializer.h.

Constructor & Destructor Documentation

◆ CallSpecializer()

dart::CallSpecializer::CallSpecializer ( FlowGraph flow_graph,
SpeculativeInliningPolicy speculative_policy,
bool  should_clone_fields 
)
inline

Definition at line 38 of file call_specializer.h.

42 speculative_policy_(speculative_policy),
43 should_clone_fields_(should_clone_fields),
44 flow_graph_(flow_graph) {}
FlowGraph * flow_graph() const
SpeculativeInliningPolicy * speculative_policy_
FlowGraphVisitor(const GrowableArray< BlockEntryInstr * > &block_order)
Definition: il.h:11842
const GrowableArray< BlockEntryInstr * > & reverse_postorder() const
Definition: flow_graph.h:207

◆ ~CallSpecializer()

virtual dart::CallSpecializer::~CallSpecializer ( )
inlinevirtual

Definition at line 46 of file call_specializer.h.

46{}

Member Function Documentation

◆ AddCheckClass()

void dart::CallSpecializer::AddCheckClass ( Definition to_check,
const Cids cids,
intptr_t  deopt_id,
Environment deopt_environment,
Instruction insert_before 
)
protected

Definition at line 230 of file call_specializer.cc.

234 {
235 // Type propagation has not run yet, we cannot eliminate the check.
236 Instruction* check = flow_graph_->CreateCheckClass(to_check, cids, deopt_id,
237 insert_before->source());
238 InsertBefore(insert_before, check, deopt_environment, FlowGraph::kEffect);
239}
#define check(reporter, ref, unref, make, kill)
Definition: RefCntTest.cpp:85
void InsertBefore(Instruction *next, Instruction *instr, Environment *env, FlowGraph::UseKind use_kind)
Instruction * CreateCheckClass(Definition *to_check, const Cids &cids, intptr_t deopt_id, const InstructionSource &source)
Definition: flow_graph.cc:604

◆ AddCheckNull()

void dart::CallSpecializer::AddCheckNull ( Value to_check,
const String function_name,
intptr_t  deopt_id,
Environment deopt_environment,
Instruction insert_before 
)
protected

Definition at line 249 of file call_specializer.cc.

253 {
254 if (to_check->Type()->is_nullable()) {
255 CheckNullInstr* check_null =
256 new (Z) CheckNullInstr(to_check->CopyWithType(Z), function_name,
257 deopt_id, insert_before->source());
258 if (FLAG_trace_strong_mode_types) {
259 THR_Print("[Strong mode] Inserted %s\n", check_null->ToCString());
260 }
261 InsertBefore(insert_before, check_null, deopt_environment,
263 }
264}
#define Z
#define THR_Print(format,...)
Definition: log.h:20
const char *const function_name

◆ AddReceiverCheck()

void dart::CallSpecializer::AddReceiverCheck ( InstanceCallInstr call)
inlineprotected

Definition at line 115 of file call_specializer.h.

115 {
116 AddCheckClass(call->Receiver()->definition(), call->Targets(),
117 call->deopt_id(), call->env(), call);
118 }
void AddCheckClass(Definition *to_check, const Cids &cids, intptr_t deopt_id, Environment *deopt_environment, Instruction *insert_before)
def call(args)
Definition: dom.py:159

◆ ApplyClassIds()

void dart::CallSpecializer::ApplyClassIds ( )

Definition at line 62 of file call_specializer.cc.

62 {
63 ASSERT(current_iterator_ == nullptr);
64 for (BlockIterator block_it = flow_graph_->reverse_postorder_iterator();
65 !block_it.Done(); block_it.Advance()) {
67 ForwardInstructionIterator it(block_it.Current());
69 for (; !it.Done(); it.Advance()) {
70 Instruction* instr = it.Current();
71 if (instr->IsInstanceCall()) {
72 InstanceCallInstr* call = instr->AsInstanceCall();
73 if (call->HasICData()) {
74 if (TryCreateICData(call)) {
75 VisitInstanceCall(call);
76 }
77 }
78 } else if (auto static_call = instr->AsStaticCall()) {
79 // If TFA devirtualized instance calls to static calls we also want to
80 // process them here.
81 VisitStaticCall(static_call);
82 } else if (instr->IsPolymorphicInstanceCall()) {
83 SpecializePolymorphicInstanceCall(instr->AsPolymorphicInstanceCall());
84 }
85 }
86 current_iterator_ = nullptr;
87 }
88}
bool Done() const
Definition: flow_graph.h:46
virtual void VisitStaticCall(StaticCallInstr *instr)
Thread * thread() const
virtual bool TryCreateICData(InstanceCallInstr *call)
ForwardInstructionIterator * current_iterator_
Definition: il.h:11859
BlockIterator reverse_postorder_iterator() const
Definition: flow_graph.h:219
Instruction * Current() const
Definition: il.h:1853
void CheckForSafepoint()
Definition: thread.h:1104
#define ASSERT(E)

◆ ApplyICData()

void dart::CallSpecializer::ApplyICData ( )

Definition at line 52 of file call_specializer.cc.

52 {
54}
virtual void VisitBlocks()
Definition: il.cc:1376

◆ flow_graph()

FlowGraph * dart::CallSpecializer::flow_graph ( ) const
inline

Definition at line 48 of file call_specializer.h.

48{ return flow_graph_; }

◆ function()

const Function & dart::CallSpecializer::function ( ) const
inlineprotected

Definition at line 92 of file call_specializer.h.

92{ return flow_graph_->function(); }
const Function & function() const
Definition: flow_graph.h:130

◆ InlineImplicitInstanceGetter()

void dart::CallSpecializer::InlineImplicitInstanceGetter ( Definition call,
const Field field 
)
protected

Definition at line 740 of file call_specializer.cc.

741 {
742 ASSERT(field.is_instance());
743 Definition* receiver = call->ArgumentAt(0);
744
745 const bool calls_initializer = field.NeedsInitializationCheckOnLoad();
746 const Slot& slot = Slot::Get(field, &flow_graph()->parsed_function());
747 LoadFieldInstr* load = new (Z) LoadFieldInstr(
748 new (Z) Value(receiver), slot, call->source(), calls_initializer,
749 calls_initializer ? call->deopt_id() : DeoptId::kNone);
750
751 // Note that this is a case of LoadField -> InstanceCall lazy deopt.
752 // Which means that we don't need to remove arguments from the environment
753 // because normal getter call expects receiver pushed (unlike the case
754 // of LoadField -> LoadField deoptimization handled by
755 // FlowGraph::AttachEnvironment).
756 if (!calls_initializer) {
757 // If we don't call initializer then we don't need an environment.
758 call->RemoveEnvironment();
759 }
761
762 if (load->slot().type().ToNullableCid() != kDynamicCid) {
763 // Reset value types if we know concrete cid.
764 for (Value::Iterator it(load->input_use_list()); !it.Done(); it.Advance()) {
765 it.Current()->SetReachingType(nullptr);
766 }
767 }
768}
SI T load(const P *ptr)
Definition: Transform_inl.h:98
void ReplaceCall(Definition *call, Definition *replacement)
static constexpr intptr_t kNone
Definition: deopt_id.h:27
static const Slot & Get(const Field &field, const ParsedFunction *parsed_function)
Definition: slot.cc:351
@ kDynamicCid
Definition: class_id.h:253

◆ InsertBefore()

void dart::CallSpecializer::InsertBefore ( Instruction next,
Instruction instr,
Environment env,
FlowGraph::UseKind  use_kind 
)
inline

Definition at line 63 of file call_specializer.h.

66 {
67 flow_graph_->InsertBefore(next, instr, env, use_kind);
68 }
static float next(float f)
void InsertBefore(Instruction *next, Instruction *instr, Environment *env, UseKind use_kind)
Definition: flow_graph.h:312
Definition: __init__.py:1

◆ InsertSpeculativeBefore()

void dart::CallSpecializer::InsertSpeculativeBefore ( Instruction next,
Instruction instr,
Environment env,
FlowGraph::UseKind  use_kind 
)
inline

Definition at line 69 of file call_specializer.h.

72 {
73 flow_graph_->InsertSpeculativeBefore(next, instr, env, use_kind);
74 }
void InsertSpeculativeBefore(Instruction *next, Instruction *instr, Environment *env, UseKind use_kind)
Definition: flow_graph.h:318

◆ isolate_group()

IsolateGroup * dart::CallSpecializer::isolate_group ( ) const
inlineprotected

Definition at line 90 of file call_specializer.h.

90{ return flow_graph_->isolate_group(); }
IsolateGroup * isolate_group() const
Definition: flow_graph.h:262

◆ ReplaceCall()

void dart::CallSpecializer::ReplaceCall ( Definition call,
Definition replacement 
)
protected

Definition at line 213 of file call_specializer.cc.

213 {
214 ReplaceCallWithResult(call, replacement, nullptr);
215}
void ReplaceCallWithResult(Definition *call, Instruction *replacement, Definition *result)

◆ ReplaceCallWithResult()

void dart::CallSpecializer::ReplaceCallWithResult ( Definition call,
Instruction replacement,
Definition result 
)
protected

Definition at line 201 of file call_specializer.cc.

203 {
204 ASSERT(!call->HasMoveArguments());
205 if (result == nullptr) {
206 ASSERT(replacement->IsDefinition());
207 call->ReplaceWith(replacement->AsDefinition(), current_iterator());
208 } else {
209 call->ReplaceWithResult(replacement, result, current_iterator());
210 }
211}
ForwardInstructionIterator * current_iterator() const
Definition: il.h:11846
GAsyncResult * result

◆ ReplaceInstanceCallsWithDispatchTableCalls()

void dart::CallSpecializer::ReplaceInstanceCallsWithDispatchTableCalls ( )
virtual

Reimplemented in dart::AotCallSpecializer.

Definition at line 1682 of file call_specializer.cc.

1682 {
1683 // Only implemented for AOT.
1684}

◆ ReplaceWithInstanceOf()

void dart::CallSpecializer::ReplaceWithInstanceOf ( InstanceCallInstr instr)
protected

Definition at line 1177 of file call_specializer.cc.

1177 {
1178 ASSERT(Token::IsTypeTestOperator(call->token_kind()));
1179 Definition* left = call->ArgumentAt(0);
1180 Definition* instantiator_type_args = nullptr;
1181 Definition* function_type_args = nullptr;
1182 AbstractType& type = AbstractType::ZoneHandle(Z);
1183 ASSERT(call->type_args_len() == 0);
1184 if (call->ArgumentCount() == 2) {
1185 instantiator_type_args = flow_graph()->constant_null();
1186 function_type_args = flow_graph()->constant_null();
1187 ASSERT(call->MatchesCoreName(Symbols::_simpleInstanceOf()));
1188 type = AbstractType::Cast(call->ArgumentAt(1)->AsConstant()->value()).ptr();
1189 } else {
1190 ASSERT(call->ArgumentCount() == 4);
1191 instantiator_type_args = call->ArgumentAt(1);
1192 function_type_args = call->ArgumentAt(2);
1193 type = AbstractType::Cast(call->ArgumentAt(3)->AsConstant()->value()).ptr();
1194 }
1195
1196 if (TryOptimizeInstanceOfUsingStaticTypes(call, type)) {
1197 return;
1198 }
1199
1200 intptr_t type_cid;
1201 if (TypeCheckAsClassEquality(type, &type_cid)) {
1202 LoadClassIdInstr* load_cid =
1203 new (Z) LoadClassIdInstr(new (Z) Value(left), kUnboxedUword);
1204 InsertBefore(call, load_cid, nullptr, FlowGraph::kValue);
1205 ConstantInstr* constant_cid = flow_graph()->GetConstant(
1206 Smi::Handle(Z, Smi::New(type_cid)), kUnboxedUword);
1207 EqualityCompareInstr* check_cid = new (Z) EqualityCompareInstr(
1208 call->source(), Token::kEQ, new Value(load_cid),
1209 new Value(constant_cid), kIntegerCid, DeoptId::kNone, false,
1211 ReplaceCall(call, check_cid);
1212 return;
1213 }
1214
1216 return;
1217 }
1218
1219 const ICData& unary_checks =
1220 ICData::ZoneHandle(Z, call->ic_data()->AsUnaryClassChecks());
1221 const intptr_t number_of_checks = unary_checks.NumberOfChecks();
1222 if (number_of_checks > 0 && number_of_checks <= FLAG_max_polymorphic_checks) {
1223 ZoneGrowableArray<intptr_t>* results =
1224 new (Z) ZoneGrowableArray<intptr_t>(number_of_checks * 2);
1225 const Bool& as_bool =
1226 Bool::ZoneHandle(Z, InstanceOfAsBool(unary_checks, type, results));
1227 if (as_bool.IsNull() || CompilerState::Current().is_aot()) {
1228 if (results->length() == number_of_checks * 2) {
1229 const bool can_deopt = SpecializeTestCidsForNumericTypes(results, type);
1230 if (can_deopt &&
1232 // Guard against repeated speculative inlining.
1233 return;
1234 }
1235 TestCidsInstr* test_cids = new (Z) TestCidsInstr(
1236 call->source(), Token::kIS, new (Z) Value(left), *results,
1237 can_deopt ? call->deopt_id() : DeoptId::kNone);
1238 // Remove type.
1239 ReplaceCall(call, test_cids);
1240 return;
1241 }
1242 } else {
1243 // One result only.
1245 ConstantInstr* bool_const = flow_graph()->GetConstant(as_bool);
1246 ASSERT(!call->HasMoveArguments());
1247 call->ReplaceUsesWith(bool_const);
1248 ASSERT(current_iterator()->Current() == call);
1250 return;
1251 }
1252 }
1253
1254 InstanceOfInstr* instance_of = new (Z) InstanceOfInstr(
1255 call->source(), new (Z) Value(left),
1256 new (Z) Value(instantiator_type_args), new (Z) Value(function_type_args),
1257 type, call->deopt_id());
1258 ReplaceCall(call, instance_of);
1259}
GLenum type
void AddReceiverCheck(InstanceCallInstr *call)
virtual bool TryReplaceInstanceOfWithRangeCheck(InstanceCallInstr *call, const AbstractType &type)
bool is_aot() const
static CompilerState & Current()
ConstantInstr * GetConstant(const Object &object, Representation representation=kTagged)
Definition: flow_graph.cc:187
ConstantInstr * constant_null() const
Definition: flow_graph.h:270
@ kNotSpeculative
Definition: il.h:975
static Object & Handle()
Definition: object.h:407
static Object & ZoneHandle()
Definition: object.h:419
static SmiPtr New(intptr_t value)
Definition: object.h:10006
bool IsAllowedForInlining(intptr_t call_deopt_id) const
Definition: inliner.h:43
static bool IsTypeTestOperator(Kind tok)
Definition: token.h:244
static constexpr Representation kUnboxedUword
Definition: locations.h:171

◆ set_flow_graph()

void dart::CallSpecializer::set_flow_graph ( FlowGraph flow_graph)
inline

Definition at line 50 of file call_specializer.h.

50 {
51 flow_graph_ = flow_graph;
53 }
void set_block_order(const GrowableArray< BlockEntryInstr * > &block_order)
Definition: il.h:11855

◆ thread()

Thread * dart::CallSpecializer::thread ( ) const
inlineprotected

Definition at line 89 of file call_specializer.h.

89{ return flow_graph_->thread(); }
Thread * thread() const
Definition: flow_graph.h:260

◆ TryCreateICData()

bool dart::CallSpecializer::TryCreateICData ( InstanceCallInstr call)
protectedvirtual

Reimplemented in dart::AotCallSpecializer.

Definition at line 90 of file call_specializer.cc.

90 {
91 ASSERT(call->HasICData());
92
93 if (call->Targets().length() > 0) {
94 // This occurs when an instance call has too many checks, will be converted
95 // to megamorphic call.
96 return false;
97 }
98
99 const intptr_t receiver_index = call->FirstArgIndex();
100 GrowableArray<intptr_t> class_ids(call->ic_data()->NumArgsTested());
101 ASSERT(call->ic_data()->NumArgsTested() <=
102 call->ArgumentCountWithoutTypeArgs());
103 for (intptr_t i = 0; i < call->ic_data()->NumArgsTested(); i++) {
104 class_ids.Add(call->ArgumentValueAt(receiver_index + i)->Type()->ToCid());
105 }
106
107 const Token::Kind op_kind = call->token_kind();
108 if (FLAG_guess_icdata_cid && !CompilerState::Current().is_aot()) {
109 if (Token::IsRelationalOperator(op_kind) ||
110 Token::IsEqualityOperator(op_kind) ||
111 Token::IsBinaryOperator(op_kind)) {
112 // Guess cid: if one of the inputs is a number assume that the other
113 // is a number of same type, unless the interface target tells us this
114 // is impossible.
115 if (call->CanReceiverBeSmiBasedOnInterfaceTarget(zone())) {
116 const intptr_t cid_0 = class_ids[0];
117 const intptr_t cid_1 = class_ids[1];
118 if ((cid_0 == kDynamicCid) && (IsNumberCid(cid_1))) {
119 class_ids[0] = cid_1;
120 } else if (IsNumberCid(cid_0) && (cid_1 == kDynamicCid)) {
121 class_ids[1] = cid_0;
122 }
123 }
124 }
125 }
126
127 bool all_cids_known = true;
128 for (intptr_t i = 0; i < class_ids.length(); i++) {
129 if (class_ids[i] == kDynamicCid) {
130 // Not all cid-s known.
131 all_cids_known = false;
132 break;
133 }
134 }
135
136 if (all_cids_known) {
137 const intptr_t receiver_cid = class_ids[0];
138 if (receiver_cid == kSentinelCid) {
139 // Unreachable call.
140 return false;
141 }
142 const Class& receiver_class =
143 Class::Handle(Z, IG->class_table()->At(receiver_cid));
144 if (!receiver_class.is_finalized()) {
145 // Do not eagerly finalize classes. ResolveDynamicForReceiverClass can
146 // cause class finalization, since callee's receiver class may not be
147 // finalized yet.
148 return false;
149 }
150 const Function& function = Function::Handle(
151 Z, call->ResolveForReceiverClass(receiver_class, /*allow_add=*/false));
152 if (function.IsNull()) {
153 return false;
154 }
156
157 // Update the CallTargets attached to the instruction with our speculative
158 // target. The next round of CallSpecializer::VisitInstanceCall will make
159 // use of this.
160 call->SetTargets(CallTargets::CreateMonomorphic(Z, class_ids[0], function));
161 if (class_ids.length() == 2) {
162 call->SetBinaryFeedback(
163 BinaryFeedback::CreateMonomorphic(Z, class_ids[0], class_ids[1]));
164 }
165 return true;
166 }
167
168 return false;
169}
#define IG
static const BinaryFeedback * CreateMonomorphic(Zone *zone, intptr_t receiver_cid, intptr_t argument_cid)
Definition: il.cc:4110
const Function & function() const
static const CallTargets * CreateMonomorphic(Zone *zone, intptr_t receiver_cid, const Function &target)
Definition: il.cc:4118
bool IsInvokeFieldDispatcher() const
Definition: object.h:3296
bool IsNull() const
Definition: object.h:363
static bool IsRelationalOperator(Kind tok)
Definition: token.h:232
static bool IsBinaryOperator(Token::Kind token)
Definition: token.cc:31
static bool IsEqualityOperator(Kind tok)
Definition: token.h:236
static bool IsNumberCid(intptr_t cid)

◆ TryInlineInstanceGetter()

bool dart::CallSpecializer::TryInlineInstanceGetter ( InstanceCallInstr call)
protected

Definition at line 936 of file call_specializer.cc.

936 {
937 const CallTargets& targets = call->Targets();
938 if (!targets.HasSingleTarget()) {
939 // Polymorphic sites are inlined like normal methods by conventional
940 // inlining in FlowGraphInliner.
941 return false;
942 }
943 const Function& target = targets.FirstTarget();
944 if (target.kind() != UntaggedFunction::kImplicitGetter) {
945 // Non-implicit getters are inlined like normal methods by conventional
946 // inlining in FlowGraphInliner.
947 return false;
948 }
949 if (!CompilerState::Current().is_aot() && !target.WasCompiled()) {
950 return false;
951 }
952 return TryInlineImplicitInstanceGetter(call);
953}
uint32_t * target

◆ TryInlineInstanceMethod()

bool dart::CallSpecializer::TryInlineInstanceMethod ( InstanceCallInstr call)
protected

Definition at line 956 of file call_specializer.cc.

956 {
957 const CallTargets& targets = call->Targets();
958 if (!targets.IsMonomorphic()) {
959 // No type feedback collected or multiple receivers/targets found.
960 return false;
961 }
962
963 const Function& target = targets.FirstTarget();
964 intptr_t receiver_cid = targets.MonomorphicReceiverCid();
965 MethodRecognizer::Kind recognized_kind = target.recognized_kind();
966
967 if (recognized_kind == MethodRecognizer::kIntegerToDouble) {
968 if (receiver_cid == kSmiCid) {
971 new (Z) SmiToDoubleInstr(new (Z) Value(call->ArgumentAt(0)),
972 call->source()));
973 return true;
974 } else if ((receiver_cid == kMintCid) && CanConvertInt64ToDouble()) {
977 new (Z) Int64ToDoubleInstr(new (Z) Value(call->ArgumentAt(0)),
978 call->deopt_id()));
979 return true;
980 }
981 }
982
983 if (receiver_cid == kDoubleCid) {
984 switch (recognized_kind) {
985 case MethodRecognizer::kDoubleToInteger: {
987 ASSERT(call->HasICData());
988 const ICData& ic_data = *call->ic_data();
989 Definition* input = call->ArgumentAt(0);
990 Definition* d2i_instr = nullptr;
991 if (ic_data.HasDeoptReason(ICData::kDeoptDoubleToSmi)) {
992 // Do not repeatedly deoptimize because result didn't fit into Smi.
993 d2i_instr = new (Z) DoubleToIntegerInstr(
994 new (Z) Value(input), recognized_kind, call->deopt_id());
995 } else {
996 // Optimistically assume result fits into Smi.
997 d2i_instr =
998 new (Z) DoubleToSmiInstr(new (Z) Value(input), call->deopt_id());
999 }
1000 ReplaceCall(call, d2i_instr);
1001 return true;
1002 }
1003 default:
1004 break;
1005 }
1006 }
1007
1008 return TryReplaceInstanceCallWithInline(flow_graph_, current_iterator(), call,
1010}
static bool CanConvertInt64ToDouble()
Definition: flow_graph.cc:1934

◆ TryInlineInstanceSetter()

bool dart::CallSpecializer::TryInlineInstanceSetter ( InstanceCallInstr call)
protected

Definition at line 770 of file call_specializer.cc.

770 {
771 const CallTargets& targets = instr->Targets();
772 if (!targets.HasSingleTarget()) {
773 // Polymorphic sites are inlined like normal method calls by conventional
774 // inlining.
775 return false;
776 }
777 const Function& target = targets.FirstTarget();
778 if (target.kind() != UntaggedFunction::kImplicitSetter) {
779 // Non-implicit setter are inlined like normal method calls.
780 return false;
781 }
782 if (!CompilerState::Current().is_aot() && !target.WasCompiled()) {
783 return false;
784 }
785 Field& field = Field::ZoneHandle(Z, target.accessor_field());
786 ASSERT(!field.IsNull());
788 field = field.CloneFromOriginal();
789 }
790 if (field.is_late() && field.is_final()) {
791 return false;
792 }
793
794 switch (flow_graph()->CheckForInstanceCall(
795 instr, UntaggedFunction::kImplicitSetter)) {
797 AddCheckNull(instr->Receiver(), instr->function_name(), instr->deopt_id(),
798 instr->env(), instr);
799 break;
801 if (CompilerState::Current().is_aot()) {
802 return false; // AOT cannot class check
803 }
804 AddReceiverCheck(instr);
805 break;
807 break;
808 }
809
810 // True if we can use unchecked entry into the setter.
811 bool is_unchecked_call = false;
812 if (!CompilerState::Current().is_aot()) {
813 if (targets.IsMonomorphic() && targets.MonomorphicExactness().IsExact()) {
814 if (targets.MonomorphicExactness().IsTriviallyExact()) {
816 targets.MonomorphicReceiverCid());
817 }
818 is_unchecked_call = true;
819 }
820 }
821
822 if (IG->use_field_guards()) {
823 if (field.guarded_cid() != kDynamicCid) {
825 instr,
826 new (Z) GuardFieldClassInstr(new (Z) Value(instr->ArgumentAt(1)),
827 field, instr->deopt_id()),
828 instr->env(), FlowGraph::kEffect);
829 }
830
831 if (field.needs_length_check()) {
833 instr,
834 new (Z) GuardFieldLengthInstr(new (Z) Value(instr->ArgumentAt(1)),
835 field, instr->deopt_id()),
836 instr->env(), FlowGraph::kEffect);
837 }
838
839 if (field.static_type_exactness_state().NeedsFieldGuard()) {
841 instr,
842 new (Z) GuardFieldTypeInstr(new (Z) Value(instr->ArgumentAt(1)),
843 field, instr->deopt_id()),
844 instr->env(), FlowGraph::kEffect);
845 }
846 }
847
848 // Build an AssertAssignable if necessary.
849 const AbstractType& dst_type = AbstractType::ZoneHandle(zone(), field.type());
850 if (!dst_type.IsTopTypeForSubtyping()) {
851 // Compute if we need to type check the value. Always type check if
852 // at a dynamic invocation.
853 bool needs_check = true;
854 if (!instr->interface_target().IsNull()) {
855 if (field.is_covariant()) {
856 // Always type check covariant fields.
857 needs_check = true;
858 } else if (field.is_generic_covariant_impl()) {
859 // If field is generic covariant then we don't need to check it
860 // if the invocation was marked as unchecked (e.g. receiver of
861 // the invocation is also the receiver of the surrounding method).
862 // Note: we can't use flow_graph()->IsReceiver() for this optimization
863 // because strong mode only gives static guarantees at the AST level
864 // not at the SSA level.
865 needs_check = !(is_unchecked_call ||
866 (instr->entry_kind() == Code::EntryKind::kUnchecked));
867 } else {
868 // The rest of the stores are checked statically (we are not at
869 // a dynamic invocation).
870 needs_check = false;
871 }
872 }
873
874 if (needs_check) {
875 Definition* instantiator_type_args = flow_graph_->constant_null();
876 Definition* function_type_args = flow_graph_->constant_null();
877 if (!dst_type.IsInstantiated()) {
878 const Class& owner = Class::Handle(Z, field.Owner());
879 if (owner.NumTypeArguments() > 0) {
880 instantiator_type_args = new (Z) LoadFieldInstr(
881 new (Z) Value(instr->ArgumentAt(0)),
882 Slot::GetTypeArgumentsSlotFor(thread(), owner), instr->source());
883 InsertSpeculativeBefore(instr, instantiator_type_args, instr->env(),
885 }
886 }
887
888 auto assert_assignable = new (Z) AssertAssignableInstr(
889 instr->source(), new (Z) Value(instr->ArgumentAt(1)),
890 new (Z) Value(flow_graph_->GetConstant(dst_type)),
891 new (Z) Value(instantiator_type_args),
892 new (Z) Value(function_type_args),
893 String::ZoneHandle(zone(), field.name()), instr->deopt_id());
894 InsertSpeculativeBefore(instr, assert_assignable, instr->env(),
896 }
897 }
898
899 // Field guard was detached.
900 ASSERT(instr->FirstArgIndex() == 0);
901 StoreFieldInstr* store = new (Z)
902 StoreFieldInstr(field, new (Z) Value(instr->ArgumentAt(0)),
903 new (Z) Value(instr->ArgumentAt(1)), kEmitStoreBarrier,
904 instr->source(), &flow_graph()->parsed_function());
905
906 // Discard the environment from the original instruction because the store
907 // can't deoptimize.
908 instr->RemoveEnvironment();
909 ReplaceCallWithResult(instr, store, flow_graph()->constant_null());
910 return true;
911}
SI void store(P *ptr, const T &val)
void AddCheckNull(Value *to_check, const String &function_name, intptr_t deopt_id, Environment *deopt_environment, Instruction *insert_before)
void InsertSpeculativeBefore(Instruction *next, Instruction *instr, Environment *env, FlowGraph::UseKind use_kind)
void AddExactnessGuard(InstanceCallInstr *call, intptr_t receiver_cid)
Definition: flow_graph.cc:627
const ParsedFunction & parsed_function() const
Definition: flow_graph.h:129
static const Slot & GetTypeArgumentsSlotFor(Thread *thread, const Class &cls)
Definition: slot.cc:276
@ kEmitStoreBarrier
Definition: il.h:6301

◆ TryOptimizeStaticCallUsingStaticTypes()

virtual bool dart::CallSpecializer::TryOptimizeStaticCallUsingStaticTypes ( StaticCallInstr call)
protectedpure virtual

◆ TryReplaceInstanceOfWithRangeCheck()

bool dart::CallSpecializer::TryReplaceInstanceOfWithRangeCheck ( InstanceCallInstr call,
const AbstractType type 
)
protectedvirtual

Reimplemented in dart::AotCallSpecializer.

Definition at line 1120 of file call_specializer.cc.

1122 {
1123 // TODO(dartbug.com/30632) does this optimization make sense in JIT?
1124 return false;
1125}

◆ TryReplaceWithBinaryOp()

bool dart::CallSpecializer::TryReplaceWithBinaryOp ( InstanceCallInstr call,
Token::Kind  op_kind 
)
protected

Definition at line 492 of file call_specializer.cc.

493 {
494 intptr_t operands_type = kIllegalCid;
495 ASSERT(call->HasICData());
496 const BinaryFeedback& binary_feedback = call->BinaryFeedback();
497 switch (op_kind) {
498 case Token::kADD:
499 case Token::kSUB:
500 case Token::kMUL:
501 if (binary_feedback.OperandsAre(kSmiCid)) {
502 // Don't generate smi code if the IC data is marked because
503 // of an overflow.
504 operands_type =
505 call->ic_data()->HasDeoptReason(ICData::kDeoptBinarySmiOp)
506 ? kMintCid
507 : kSmiCid;
508 } else if (binary_feedback.OperandsAreSmiOrMint()) {
509 // Don't generate mint code if the IC data is marked because of an
510 // overflow.
511 if (call->ic_data()->HasDeoptReason(ICData::kDeoptBinaryInt64Op))
512 return false;
513 operands_type = kMintCid;
514 } else if (ShouldSpecializeForDouble(binary_feedback)) {
515 operands_type = kDoubleCid;
516 } else if (binary_feedback.OperandsAre(kFloat32x4Cid)) {
517 operands_type = kFloat32x4Cid;
518 } else if (binary_feedback.OperandsAre(kInt32x4Cid)) {
519 ASSERT(op_kind != Token::kMUL); // Int32x4 doesn't have a multiply op.
520 operands_type = kInt32x4Cid;
521 } else if (binary_feedback.OperandsAre(kFloat64x2Cid)) {
522 operands_type = kFloat64x2Cid;
523 } else {
524 return false;
525 }
526 break;
527 case Token::kDIV:
528 if (ShouldSpecializeForDouble(binary_feedback) ||
529 binary_feedback.OperandsAre(kSmiCid)) {
530 operands_type = kDoubleCid;
531 } else if (binary_feedback.OperandsAre(kFloat32x4Cid)) {
532 operands_type = kFloat32x4Cid;
533 } else if (binary_feedback.OperandsAre(kFloat64x2Cid)) {
534 operands_type = kFloat64x2Cid;
535 } else {
536 return false;
537 }
538 break;
539 case Token::kBIT_AND:
540 case Token::kBIT_OR:
541 case Token::kBIT_XOR:
542 if (binary_feedback.OperandsAre(kSmiCid)) {
543 operands_type = kSmiCid;
544 } else if (binary_feedback.OperandsAreSmiOrMint()) {
545 operands_type = kMintCid;
546 } else if (binary_feedback.OperandsAre(kInt32x4Cid)) {
547 operands_type = kInt32x4Cid;
548 } else {
549 return false;
550 }
551 break;
552 case Token::kSHL:
553 case Token::kSHR:
554 case Token::kUSHR:
555 if (binary_feedback.OperandsAre(kSmiCid)) {
556 // Left shift may overflow from smi into mint.
557 // Don't generate smi code if the IC data is marked because
558 // of an overflow.
559 if (call->ic_data()->HasDeoptReason(ICData::kDeoptBinaryInt64Op)) {
560 return false;
561 }
562 operands_type =
563 call->ic_data()->HasDeoptReason(ICData::kDeoptBinarySmiOp)
564 ? kMintCid
565 : kSmiCid;
566 } else if (binary_feedback.OperandsAreSmiOrMint() &&
567 binary_feedback.ArgumentIs(kSmiCid)) {
568 // Don't generate mint code if the IC data is marked because of an
569 // overflow.
570 if (call->ic_data()->HasDeoptReason(ICData::kDeoptBinaryInt64Op)) {
571 return false;
572 }
573 // Check for smi/mint << smi or smi/mint >> smi.
574 operands_type = kMintCid;
575 } else {
576 return false;
577 }
578 break;
579 case Token::kMOD:
580 case Token::kTRUNCDIV:
581 if (binary_feedback.OperandsAre(kSmiCid)) {
582 if (call->ic_data()->HasDeoptReason(ICData::kDeoptBinarySmiOp)) {
583 return false;
584 }
585 operands_type = kSmiCid;
586 } else {
587 return false;
588 }
589 break;
590 default:
591 UNREACHABLE();
592 }
593
594 ASSERT(call->type_args_len() == 0);
595 ASSERT(call->ArgumentCount() == 2);
596 Definition* left = call->ArgumentAt(0);
597 Definition* right = call->ArgumentAt(1);
598 if (operands_type == kDoubleCid) {
599 // Check that either left or right are not a smi. Result of a
600 // binary operation with two smis is a smi not a double, except '/' which
601 // returns a double for two smis.
602 if (op_kind != Token::kDIV) {
604 call,
605 new (Z) CheckEitherNonSmiInstr(
606 new (Z) Value(left), new (Z) Value(right), call->deopt_id()),
607 call->env(), FlowGraph::kEffect);
608 }
609
610 BinaryDoubleOpInstr* double_bin_op = new (Z)
611 BinaryDoubleOpInstr(op_kind, new (Z) Value(left), new (Z) Value(right),
612 call->deopt_id(), call->source());
613 ReplaceCall(call, double_bin_op);
614 } else if (operands_type == kMintCid) {
615 if ((op_kind == Token::kSHL) || (op_kind == Token::kSHR) ||
616 (op_kind == Token::kUSHR)) {
617 SpeculativeShiftInt64OpInstr* shift_op = new (Z)
618 SpeculativeShiftInt64OpInstr(op_kind, new (Z) Value(left),
619 new (Z) Value(right), call->deopt_id());
620 ReplaceCall(call, shift_op);
621 } else {
622 BinaryInt64OpInstr* bin_op = new (Z) BinaryInt64OpInstr(
623 op_kind, new (Z) Value(left), new (Z) Value(right), call->deopt_id());
624 ReplaceCall(call, bin_op);
625 }
626 } else if ((operands_type == kFloat32x4Cid) ||
627 (operands_type == kInt32x4Cid) ||
628 (operands_type == kFloat64x2Cid)) {
629 return InlineSimdBinaryOp(call, operands_type, op_kind);
630 } else if (op_kind == Token::kMOD) {
631 ASSERT(operands_type == kSmiCid);
632 if (right->IsConstant()) {
633 const Object& obj = right->AsConstant()->value();
634 if (obj.IsSmi() && Utils::IsPowerOfTwo(Smi::Cast(obj).Value())) {
635 // Insert smi check and attach a copy of the original environment
636 // because the smi operation can still deoptimize.
638 new (Z) CheckSmiInstr(new (Z) Value(left),
639 call->deopt_id(), call->source()),
640 call->env(), FlowGraph::kEffect);
641 ConstantInstr* constant = flow_graph()->GetConstant(
642 Smi::Handle(Z, Smi::New(Smi::Cast(obj).Value() - 1)));
643 BinarySmiOpInstr* bin_op =
644 new (Z) BinarySmiOpInstr(Token::kBIT_AND, new (Z) Value(left),
645 new (Z) Value(constant), call->deopt_id());
646 ReplaceCall(call, bin_op);
647 return true;
648 }
649 }
650 // Insert two smi checks and attach a copy of the original
651 // environment because the smi operation can still deoptimize.
652 AddCheckSmi(left, call->deopt_id(), call->env(), call);
653 AddCheckSmi(right, call->deopt_id(), call->env(), call);
654 BinarySmiOpInstr* bin_op = new (Z) BinarySmiOpInstr(
655 op_kind, new (Z) Value(left), new (Z) Value(right), call->deopt_id());
656 ReplaceCall(call, bin_op);
657 } else {
658 ASSERT(operands_type == kSmiCid);
659 // Insert two smi checks and attach a copy of the original
660 // environment because the smi operation can still deoptimize.
661 AddCheckSmi(left, call->deopt_id(), call->env(), call);
662 AddCheckSmi(right, call->deopt_id(), call->env(), call);
663 if (left->IsConstant() &&
664 ((op_kind == Token::kADD) || (op_kind == Token::kMUL))) {
665 // Constant should be on the right side.
666 Definition* temp = left;
667 left = right;
668 right = temp;
669 }
670 BinarySmiOpInstr* bin_op = new (Z) BinarySmiOpInstr(
671 op_kind, new (Z) Value(left), new (Z) Value(right), call->deopt_id());
672 ReplaceCall(call, bin_op);
673 }
674 return true;
675}
#define UNREACHABLE()
Definition: assert.h:248
static constexpr bool IsPowerOfTwo(T x)
Definition: utils.h:76
@ kIllegalCid
Definition: class_id.h:214
static bool ShouldSpecializeForDouble(const BinaryFeedback &binary_feedback)

◆ TryReplaceWithEqualityOp()

bool dart::CallSpecializer::TryReplaceWithEqualityOp ( InstanceCallInstr call,
Token::Kind  op_kind 
)
protected

Definition at line 364 of file call_specializer.cc.

365 {
366 const BinaryFeedback& binary_feedback = call->BinaryFeedback();
367
368 ASSERT(call->type_args_len() == 0);
369 ASSERT(call->ArgumentCount() == 2);
370 Definition* const left = call->ArgumentAt(0);
371 Definition* const right = call->ArgumentAt(1);
372
373 intptr_t cid = kIllegalCid;
374 if (binary_feedback.OperandsAre(kOneByteStringCid)) {
375 return TryStringLengthOneEquality(call, op_kind);
376 } else if (binary_feedback.OperandsAre(kSmiCid)) {
378 new (Z) CheckSmiInstr(new (Z) Value(left), call->deopt_id(),
379 call->source()),
380 call->env(), FlowGraph::kEffect);
382 new (Z) CheckSmiInstr(new (Z) Value(right), call->deopt_id(),
383 call->source()),
384 call->env(), FlowGraph::kEffect);
385 cid = kSmiCid;
386 } else if (binary_feedback.OperandsAreSmiOrMint()) {
387 cid = kMintCid;
388 } else if (binary_feedback.OperandsAreSmiOrDouble()) {
389 // Use double comparison.
390 if (SmiFitsInDouble()) {
391 cid = kDoubleCid;
392 } else {
393 if (binary_feedback.IncludesOperands(kSmiCid)) {
394 // We cannot use double comparison on two smis. Need polymorphic
395 // call.
396 return false;
397 } else {
399 call,
400 new (Z) CheckEitherNonSmiInstr(
401 new (Z) Value(left), new (Z) Value(right), call->deopt_id()),
402 call->env(), FlowGraph::kEffect);
403 cid = kDoubleCid;
404 }
405 }
406 } else {
407 // Check if ICDData contains checks with Smi/Null combinations. In that case
408 // we can still emit the optimized Smi equality operation but need to add
409 // checks for null or Smi.
410 if (binary_feedback.OperandsAreSmiOrNull()) {
411 AddChecksForArgNr(call, left, /* arg_number = */ 0);
412 AddChecksForArgNr(call, right, /* arg_number = */ 1);
413
414 cid = kSmiCid;
415 } else {
416 // Shortcut for equality with null.
417 // TODO(vegorov): this optimization is not speculative and should
418 // be hoisted out of this function.
419 ConstantInstr* right_const = right->AsConstant();
420 ConstantInstr* left_const = left->AsConstant();
421 if ((right_const != nullptr && right_const->value().IsNull()) ||
422 (left_const != nullptr && left_const->value().IsNull())) {
423 StrictCompareInstr* comp = new (Z)
424 StrictCompareInstr(call->source(), Token::kEQ_STRICT,
425 new (Z) Value(left), new (Z) Value(right),
426 /* number_check = */ false, DeoptId::kNone);
427 ReplaceCall(call, comp);
428 return true;
429 }
430 return false;
431 }
432 }
434 EqualityCompareInstr* comp =
435 new (Z) EqualityCompareInstr(call->source(), op_kind, new (Z) Value(left),
436 new (Z) Value(right), cid, call->deopt_id());
437 ReplaceCall(call, comp);
438 return true;
439}
const intptr_t cid
static bool SmiFitsInDouble()

◆ TryReplaceWithRelationalOp()

bool dart::CallSpecializer::TryReplaceWithRelationalOp ( InstanceCallInstr call,
Token::Kind  op_kind 
)
protected

Definition at line 441 of file call_specializer.cc.

442 {
443 ASSERT(call->type_args_len() == 0);
444 ASSERT(call->ArgumentCount() == 2);
445
446 const BinaryFeedback& binary_feedback = call->BinaryFeedback();
447 Definition* left = call->ArgumentAt(0);
448 Definition* right = call->ArgumentAt(1);
449
450 intptr_t cid = kIllegalCid;
451 if (binary_feedback.OperandsAre(kSmiCid)) {
453 new (Z) CheckSmiInstr(new (Z) Value(left), call->deopt_id(),
454 call->source()),
455 call->env(), FlowGraph::kEffect);
457 new (Z) CheckSmiInstr(new (Z) Value(right), call->deopt_id(),
458 call->source()),
459 call->env(), FlowGraph::kEffect);
460 cid = kSmiCid;
461 } else if (binary_feedback.OperandsAreSmiOrMint()) {
462 cid = kMintCid;
463 } else if (binary_feedback.OperandsAreSmiOrDouble()) {
464 // Use double comparison.
465 if (SmiFitsInDouble()) {
466 cid = kDoubleCid;
467 } else {
468 if (binary_feedback.IncludesOperands(kSmiCid)) {
469 // We cannot use double comparison on two smis. Need polymorphic
470 // call.
471 return false;
472 } else {
474 call,
475 new (Z) CheckEitherNonSmiInstr(
476 new (Z) Value(left), new (Z) Value(right), call->deopt_id()),
477 call->env(), FlowGraph::kEffect);
478 cid = kDoubleCid;
479 }
480 }
481 } else {
482 return false;
483 }
485 RelationalOpInstr* comp =
486 new (Z) RelationalOpInstr(call->source(), op_kind, new (Z) Value(left),
487 new (Z) Value(right), cid, call->deopt_id());
488 ReplaceCall(call, comp);
489 return true;
490}

◆ TryReplaceWithUnaryOp()

bool dart::CallSpecializer::TryReplaceWithUnaryOp ( InstanceCallInstr call,
Token::Kind  op_kind 
)
protected

Definition at line 677 of file call_specializer.cc.

678 {
679 ASSERT(call->type_args_len() == 0);
680 ASSERT(call->ArgumentCount() == 1);
681 Definition* input = call->ArgumentAt(0);
682 Definition* unary_op = nullptr;
683 if (call->Targets().ReceiverIs(kSmiCid)) {
685 new (Z) CheckSmiInstr(new (Z) Value(input), call->deopt_id(),
686 call->source()),
687 call->env(), FlowGraph::kEffect);
688 unary_op = new (Z)
689 UnarySmiOpInstr(op_kind, new (Z) Value(input), call->deopt_id());
690 } else if ((op_kind == Token::kBIT_NOT) &&
691 call->Targets().ReceiverIsSmiOrMint()) {
692 unary_op = new (Z)
693 UnaryInt64OpInstr(op_kind, new (Z) Value(input), call->deopt_id());
694 } else if (call->Targets().ReceiverIs(kDoubleCid) &&
695 (op_kind == Token::kNEGATE)) {
697 unary_op = new (Z) UnaryDoubleOpInstr(Token::kNEGATE, new (Z) Value(input),
698 call->deopt_id());
699 } else {
700 return false;
701 }
702 ASSERT(unary_op != nullptr);
703 ReplaceCall(call, unary_op);
704 return true;
705}

◆ VisitLoadCodeUnits()

void dart::CallSpecializer::VisitLoadCodeUnits ( LoadCodeUnitsInstr instr)
virtual

Definition at line 1332 of file call_specializer.cc.

1332 {
1333// TODO(zerny): Use kUnboxedUint32 once it is fully supported/optimized.
1334#if defined(TARGET_ARCH_IA32) || defined(TARGET_ARCH_ARM)
1335 if (!instr->can_pack_into_smi()) instr->set_representation(kUnboxedInt64);
1336#endif
1337}

◆ VisitStaticCall()

void dart::CallSpecializer::VisitStaticCall ( StaticCallInstr instr)
virtual

Reimplemented in dart::AotCallSpecializer.

Definition at line 1261 of file call_specializer.cc.

1261 {
1262 if (TryReplaceStaticCallWithInline(flow_graph_, current_iterator(), call,
1264 return;
1265 }
1266
1267 if (speculative_policy_->IsAllowedForInlining(call->deopt_id())) {
1268 // Only if speculative inlining is enabled.
1269
1270 MethodRecognizer::Kind recognized_kind = call->function().recognized_kind();
1271 const CallTargets& targets = call->Targets();
1272 const BinaryFeedback& binary_feedback = call->BinaryFeedback();
1273
1274 switch (recognized_kind) {
1275 case MethodRecognizer::kMathMin:
1276 case MethodRecognizer::kMathMax: {
1277 // We can handle only monomorphic min/max call sites with both arguments
1278 // being either doubles or smis.
1279 if (targets.IsMonomorphic() && (call->FirstArgIndex() == 0)) {
1280 intptr_t result_cid = kIllegalCid;
1281 if (binary_feedback.IncludesOperands(kDoubleCid)) {
1282 result_cid = kDoubleCid;
1283 } else if (binary_feedback.IncludesOperands(kSmiCid)) {
1284 result_cid = kSmiCid;
1285 }
1286 if (result_cid != kIllegalCid) {
1287 MathMinMaxInstr* min_max = new (Z) MathMinMaxInstr(
1288 recognized_kind, new (Z) Value(call->ArgumentAt(0)),
1289 new (Z) Value(call->ArgumentAt(1)), call->deopt_id(),
1290 result_cid);
1291 const Cids* cids = Cids::CreateMonomorphic(Z, result_cid);
1292 AddCheckClass(min_max->left()->definition(), *cids,
1293 call->deopt_id(), call->env(), call);
1294 AddCheckClass(min_max->right()->definition(), *cids,
1295 call->deopt_id(), call->env(), call);
1296 ReplaceCall(call, min_max);
1297 return;
1298 }
1299 }
1300 break;
1301 }
1302 case MethodRecognizer::kDoubleFromInteger: {
1303 if (call->HasICData() && targets.IsMonomorphic() &&
1304 (call->FirstArgIndex() == 0)) {
1305 if (binary_feedback.ArgumentIs(kSmiCid)) {
1306 Definition* arg = call->ArgumentAt(1);
1307 AddCheckSmi(arg, call->deopt_id(), call->env(), call);
1308 ReplaceCall(call, new (Z) SmiToDoubleInstr(new (Z) Value(arg),
1309 call->source()));
1310 return;
1311 } else if (binary_feedback.ArgumentIs(kMintCid) &&
1313 Definition* arg = call->ArgumentAt(1);
1314 ReplaceCall(call, new (Z) Int64ToDoubleInstr(new (Z) Value(arg),
1315 call->deopt_id()));
1316 return;
1317 }
1318 }
1319 break;
1320 }
1321
1322 default:
1323 break;
1324 }
1325 }
1326
1328 return;
1329 }
1330}
virtual bool TryOptimizeStaticCallUsingStaticTypes(StaticCallInstr *call)=0
static Cids * CreateMonomorphic(Zone *zone, intptr_t cid)
Definition: il.cc:691

◆ zone()

Zone * dart::CallSpecializer::zone ( ) const
inlineprotected

Definition at line 91 of file call_specializer.h.

91{ return flow_graph_->zone(); }
Zone * zone() const
Definition: flow_graph.h:261

Member Data Documentation

◆ should_clone_fields_

const bool dart::CallSpecializer::should_clone_fields_
protected

Definition at line 149 of file call_specializer.h.

◆ speculative_policy_

SpeculativeInliningPolicy* dart::CallSpecializer::speculative_policy_
protected

Definition at line 148 of file call_specializer.h.


The documentation for this class was generated from the following files: