Flutter Engine
The Flutter Engine
Loading...
Searching...
No Matches
call_specializer.cc
Go to the documentation of this file.
1// Copyright (c) 2017, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
6
9#include "vm/compiler/cha.h"
12#include "vm/cpu.h"
13
14namespace dart {
15
16DECLARE_FLAG(bool, enable_simd_inline);
17
18// Quick access to the current isolate and zone.
19#define IG (isolate_group())
20#define Z (zone())
21
22static void RefineUseTypes(Definition* instr) {
23 CompileType* new_type = instr->Type();
24 for (Value::Iterator it(instr->input_use_list()); !it.Done(); it.Advance()) {
25 it.Current()->RefineReachingType(new_type);
26 }
27}
28
32
36
40
41static bool IsNumberCid(intptr_t cid) {
42 return (cid == kSmiCid) || (cid == kDoubleCid);
43}
44
45static bool ShouldSpecializeForDouble(const BinaryFeedback& binary_feedback) {
46 // Don't specialize for double if we can't unbox them.
47 if (!CanUnboxDouble()) {
48 return false;
49 }
50
51 // Unboxed double operation can't handle case of two smis.
52 if (binary_feedback.IncludesOperands(kSmiCid)) {
53 return false;
54 }
55
56 // Check that the call site has seen only smis and doubles.
57 return binary_feedback.OperandsAreSmiOrDouble();
58}
59
60// Optimize instance calls using ICData.
64
65// Optimize instance calls using cid. This is called after optimizer
66// converted instance calls to instructions. Any remaining
67// instance calls are either megamorphic calls, cannot be optimized or
68// have no runtime type feedback collected.
69// Attempts to convert an instance call (IC call) using propagated class-ids,
70// e.g., receiver class id, guarded-cid, or by guessing cid-s.
72 ASSERT(current_iterator_ == nullptr);
73 for (BlockIterator block_it = flow_graph_->reverse_postorder_iterator();
74 !block_it.Done(); block_it.Advance()) {
76 ForwardInstructionIterator it(block_it.Current());
78 for (; !it.Done(); it.Advance()) {
79 Instruction* instr = it.Current();
80 if (instr->IsInstanceCall()) {
81 InstanceCallInstr* call = instr->AsInstanceCall();
82 if (call->HasICData()) {
83 if (TryCreateICData(call)) {
84 VisitInstanceCall(call);
85 }
86 }
87 } else if (auto static_call = instr->AsStaticCall()) {
88 // If TFA devirtualized instance calls to static calls we also want to
89 // process them here.
90 VisitStaticCall(static_call);
91 } else if (instr->IsPolymorphicInstanceCall()) {
92 SpecializePolymorphicInstanceCall(instr->AsPolymorphicInstanceCall());
93 }
94 }
95 current_iterator_ = nullptr;
96 }
97}
98
100 ASSERT(call->HasICData());
101
102 if (call->Targets().length() > 0) {
103 // This occurs when an instance call has too many checks, will be converted
104 // to megamorphic call.
105 return false;
106 }
107
108 const intptr_t receiver_index = call->FirstArgIndex();
109 GrowableArray<intptr_t> class_ids(call->ic_data()->NumArgsTested());
110 ASSERT(call->ic_data()->NumArgsTested() <=
111 call->ArgumentCountWithoutTypeArgs());
112 for (intptr_t i = 0; i < call->ic_data()->NumArgsTested(); i++) {
113 class_ids.Add(call->ArgumentValueAt(receiver_index + i)->Type()->ToCid());
114 }
115
116 const Token::Kind op_kind = call->token_kind();
117 if (FLAG_guess_icdata_cid && !CompilerState::Current().is_aot()) {
118 if (Token::IsRelationalOperator(op_kind) ||
119 Token::IsEqualityOperator(op_kind) ||
120 Token::IsBinaryOperator(op_kind)) {
121 // Guess cid: if one of the inputs is a number assume that the other
122 // is a number of same type, unless the interface target tells us this
123 // is impossible.
124 if (call->CanReceiverBeSmiBasedOnInterfaceTarget(zone())) {
125 const intptr_t cid_0 = class_ids[0];
126 const intptr_t cid_1 = class_ids[1];
127 if ((cid_0 == kDynamicCid) && (IsNumberCid(cid_1))) {
128 class_ids[0] = cid_1;
129 } else if (IsNumberCid(cid_0) && (cid_1 == kDynamicCid)) {
130 class_ids[1] = cid_0;
131 }
132 }
133 }
134 }
135
136 bool all_cids_known = true;
137 for (intptr_t i = 0; i < class_ids.length(); i++) {
138 if (class_ids[i] == kDynamicCid) {
139 // Not all cid-s known.
140 all_cids_known = false;
141 break;
142 }
143 }
144
145 if (all_cids_known) {
146 const intptr_t receiver_cid = class_ids[0];
147 if (receiver_cid == kSentinelCid) {
148 // Unreachable call.
149 return false;
150 }
151 const Class& receiver_class =
152 Class::Handle(Z, IG->class_table()->At(receiver_cid));
153 if (!receiver_class.is_finalized()) {
154 // Do not eagerly finalize classes. ResolveDynamicForReceiverClass can
155 // cause class finalization, since callee's receiver class may not be
156 // finalized yet.
157 return false;
158 }
160 Z, call->ResolveForReceiverClass(receiver_class, /*allow_add=*/false));
161 if (function.IsNull()) {
162 return false;
163 }
165
166 // Update the CallTargets attached to the instruction with our speculative
167 // target. The next round of CallSpecializer::VisitInstanceCall will make
168 // use of this.
169 call->SetTargets(CallTargets::CreateMonomorphic(Z, class_ids[0], function));
170 if (class_ids.length() == 2) {
171 call->SetBinaryFeedback(
172 BinaryFeedback::CreateMonomorphic(Z, class_ids[0], class_ids[1]));
173 }
174 return true;
175 }
176
177 return false;
178}
179
180void CallSpecializer::SpecializePolymorphicInstanceCall(
182 if (!FLAG_polymorphic_with_deopt) {
183 // Specialization adds receiver checks which can lead to deoptimization.
184 return;
185 }
186
187 const intptr_t receiver_cid = call->Receiver()->Type()->ToCid();
188 if (receiver_cid == kDynamicCid) {
189 return; // No information about receiver was inferred.
190 }
191
192 const ICData& ic_data = *call->ic_data();
193
194 const CallTargets* targets =
196 receiver_cid, String::Handle(zone(), ic_data.target_name()),
197 Array::Handle(zone(), ic_data.arguments_descriptor()));
198 if (targets == nullptr) {
199 // No specialization.
200 return;
201 }
202
203 ASSERT(targets->HasSingleTarget());
204 const Function& target = targets->FirstTarget();
205 StaticCallInstr* specialized =
206 StaticCallInstr::FromCall(Z, call, target, targets->AggregateCallCount());
207 call->ReplaceWith(specialized, current_iterator());
208}
209
211 Instruction* replacement,
213 ASSERT(!call->HasMoveArguments());
214 if (result == nullptr) {
215 ASSERT(replacement->IsDefinition());
216 call->ReplaceWith(replacement->AsDefinition(), current_iterator());
217 } else {
218 call->ReplaceWithResult(replacement, result, current_iterator());
219 }
220}
221
223 ReplaceCallWithResult(call, replacement, nullptr);
224}
225
226void CallSpecializer::AddCheckSmi(Definition* to_check,
227 intptr_t deopt_id,
228 Environment* deopt_environment,
229 Instruction* insert_before) {
230 // TODO(alexmarkov): check reaching type instead of definition type
231 if (to_check->Type()->ToCid() != kSmiCid) {
232 InsertBefore(insert_before,
233 new (Z) CheckSmiInstr(new (Z) Value(to_check), deopt_id,
234 insert_before->source()),
235 deopt_environment, FlowGraph::kEffect);
236 }
237}
238
240 const Cids& cids,
241 intptr_t deopt_id,
242 Environment* deopt_environment,
243 Instruction* insert_before) {
244 // Type propagation has not run yet, we cannot eliminate the check.
245 Instruction* check = flow_graph_->CreateCheckClass(to_check, cids, deopt_id,
246 insert_before->source());
247 InsertBefore(insert_before, check, deopt_environment, FlowGraph::kEffect);
248}
249
250void CallSpecializer::AddChecksForArgNr(InstanceCallInstr* call,
251 Definition* argument,
252 int argument_number) {
253 const Cids* cids =
254 Cids::CreateForArgument(zone(), call->BinaryFeedback(), argument_number);
255 AddCheckClass(argument, *cids, call->deopt_id(), call->env(), call);
256}
257
259 const String& function_name,
260 intptr_t deopt_id,
261 Environment* deopt_environment,
262 Instruction* insert_before) {
263 if (to_check->Type()->is_nullable()) {
264 CheckNullInstr* check_null =
265 new (Z) CheckNullInstr(to_check->CopyWithType(Z), function_name,
266 deopt_id, insert_before->source());
267 if (FLAG_trace_strong_mode_types) {
268 THR_Print("[Strong mode] Inserted %s\n", check_null->ToCString());
269 }
270 InsertBefore(insert_before, check_null, deopt_environment,
272 }
273}
274
275// Return true if d is a string of length one (a constant or result from
276// from string-from-char-code instruction.
278 if (d->IsConstant()) {
279 const Object& obj = d->AsConstant()->value();
280 if (obj.IsString()) {
281 return String::Cast(obj).Length() == 1;
282 } else {
283 return false;
284 }
285 } else {
286 return d->IsOneByteStringFromCharCode();
287 }
288}
289
290// Returns true if the string comparison was converted into char-code
291// comparison. Conversion is only possible for strings of length one.
292// E.g., detect str[x] == "x"; and use an integer comparison of char-codes.
293bool CallSpecializer::TryStringLengthOneEquality(InstanceCallInstr* call,
294 Token::Kind op_kind) {
295 ASSERT(call->BinaryFeedback().OperandsAre(kOneByteStringCid));
296 // Check that left and right are length one strings (either string constants
297 // or results of string-from-char-code.
298 Definition* left = call->ArgumentAt(0);
299 Definition* right = call->ArgumentAt(1);
300 Value* left_val = nullptr;
301 Definition* to_remove_left = nullptr;
303 // Swap, since we know that both arguments are strings
304 Definition* temp = left;
305 left = right;
306 right = temp;
307 }
308 if (IsLengthOneString(left)) {
309 // Optimize if left is a string with length one (either constant or
310 // result of string-from-char-code.
311 if (left->IsConstant()) {
312 ConstantInstr* left_const = left->AsConstant();
313 const String& str = String::Cast(left_const->value());
314 ASSERT(str.Length() == 1);
315 ConstantInstr* char_code_left = flow_graph()->GetConstant(
316 Smi::ZoneHandle(Z, Smi::New(static_cast<intptr_t>(str.CharAt(0)))));
317 left_val = new (Z) Value(char_code_left);
318 } else if (left->IsOneByteStringFromCharCode()) {
319 // Use input of string-from-charcode as left value.
320 OneByteStringFromCharCodeInstr* instr =
321 left->AsOneByteStringFromCharCode();
322 left_val = new (Z) Value(instr->char_code()->definition());
323 to_remove_left = instr;
324 } else {
325 // IsLengthOneString(left) should have been false.
326 UNREACHABLE();
327 }
328
329 Definition* to_remove_right = nullptr;
330 Value* right_val = nullptr;
331 if (right->IsOneByteStringFromCharCode()) {
332 // Skip string-from-char-code, and use its input as right value.
333 OneByteStringFromCharCodeInstr* right_instr =
334 right->AsOneByteStringFromCharCode();
335 right_val = new (Z) Value(right_instr->char_code()->definition());
336 to_remove_right = right_instr;
337 } else {
338 AddChecksForArgNr(call, right, /* arg_number = */ 1);
339 // String-to-char-code instructions returns -1 (illegal charcode) if
340 // string is not of length one.
341 StringToCharCodeInstr* char_code_right = new (Z)
342 StringToCharCodeInstr(new (Z) Value(right), kOneByteStringCid);
343 InsertBefore(call, char_code_right, call->env(), FlowGraph::kValue);
344 right_val = new (Z) Value(char_code_right);
345 }
346
347 // Comparing char-codes instead of strings.
348 EqualityCompareInstr* comp =
349 new (Z) EqualityCompareInstr(call->source(), op_kind, left_val,
350 right_val, kSmiCid, call->deopt_id());
351 ReplaceCall(call, comp);
352
353 // Remove dead instructions.
354 if ((to_remove_left != nullptr) &&
355 (to_remove_left->input_use_list() == nullptr)) {
356 to_remove_left->ReplaceUsesWith(flow_graph()->constant_null());
357 to_remove_left->RemoveFromGraph();
358 }
359 if ((to_remove_right != nullptr) &&
360 (to_remove_right->input_use_list() == nullptr)) {
361 to_remove_right->ReplaceUsesWith(flow_graph()->constant_null());
362 to_remove_right->RemoveFromGraph();
363 }
364 return true;
365 }
366 return false;
367}
368
369static bool SmiFitsInDouble() {
370 return compiler::target::kSmiBits < 53;
371}
372
374 Token::Kind op_kind) {
375 const BinaryFeedback& binary_feedback = call->BinaryFeedback();
376
377 ASSERT(call->type_args_len() == 0);
378 ASSERT(call->ArgumentCount() == 2);
379 Definition* const left = call->ArgumentAt(0);
380 Definition* const right = call->ArgumentAt(1);
381
382 intptr_t cid = kIllegalCid;
383 if (binary_feedback.OperandsAre(kOneByteStringCid)) {
384 return TryStringLengthOneEquality(call, op_kind);
385 } else if (binary_feedback.OperandsAre(kSmiCid)) {
386 InsertBefore(call,
387 new (Z) CheckSmiInstr(new (Z) Value(left), call->deopt_id(),
388 call->source()),
389 call->env(), FlowGraph::kEffect);
390 InsertBefore(call,
391 new (Z) CheckSmiInstr(new (Z) Value(right), call->deopt_id(),
392 call->source()),
393 call->env(), FlowGraph::kEffect);
394 cid = kSmiCid;
395 } else if (binary_feedback.OperandsAreSmiOrMint()) {
396 cid = kMintCid;
397 } else if (binary_feedback.OperandsAreSmiOrDouble() && CanUnboxDouble()) {
398 // Use double comparison.
399 if (SmiFitsInDouble()) {
400 cid = kDoubleCid;
401 } else {
402 if (binary_feedback.IncludesOperands(kSmiCid)) {
403 // We cannot use double comparison on two smis. Need polymorphic
404 // call.
405 return false;
406 } else {
408 call,
410 new (Z) Value(left), new (Z) Value(right), call->deopt_id()),
411 call->env(), FlowGraph::kEffect);
412 cid = kDoubleCid;
413 }
414 }
415 } else {
416 // Check if ICDData contains checks with Smi/Null combinations. In that case
417 // we can still emit the optimized Smi equality operation but need to add
418 // checks for null or Smi.
419 if (binary_feedback.OperandsAreSmiOrNull()) {
420 AddChecksForArgNr(call, left, /* arg_number = */ 0);
421 AddChecksForArgNr(call, right, /* arg_number = */ 1);
422
423 cid = kSmiCid;
424 } else {
425 // Shortcut for equality with null.
426 // TODO(vegorov): this optimization is not speculative and should
427 // be hoisted out of this function.
428 ConstantInstr* right_const = right->AsConstant();
429 ConstantInstr* left_const = left->AsConstant();
430 if ((right_const != nullptr && right_const->value().IsNull()) ||
431 (left_const != nullptr && left_const->value().IsNull())) {
432 StrictCompareInstr* comp = new (Z)
433 StrictCompareInstr(call->source(), Token::kEQ_STRICT,
434 new (Z) Value(left), new (Z) Value(right),
435 /* number_check = */ false, DeoptId::kNone);
436 ReplaceCall(call, comp);
437 return true;
438 }
439 return false;
440 }
441 }
444 new (Z) EqualityCompareInstr(call->source(), op_kind, new (Z) Value(left),
445 new (Z) Value(right), cid, call->deopt_id());
446 ReplaceCall(call, comp);
447 return true;
448}
449
451 Token::Kind op_kind) {
452 ASSERT(call->type_args_len() == 0);
453 ASSERT(call->ArgumentCount() == 2);
454
455 const BinaryFeedback& binary_feedback = call->BinaryFeedback();
456 Definition* left = call->ArgumentAt(0);
457 Definition* right = call->ArgumentAt(1);
458
459 intptr_t cid = kIllegalCid;
460 if (binary_feedback.OperandsAre(kSmiCid)) {
461 InsertBefore(call,
462 new (Z) CheckSmiInstr(new (Z) Value(left), call->deopt_id(),
463 call->source()),
464 call->env(), FlowGraph::kEffect);
465 InsertBefore(call,
466 new (Z) CheckSmiInstr(new (Z) Value(right), call->deopt_id(),
467 call->source()),
468 call->env(), FlowGraph::kEffect);
469 cid = kSmiCid;
470 } else if (binary_feedback.OperandsAreSmiOrMint()) {
471 cid = kMintCid;
472 } else if (binary_feedback.OperandsAreSmiOrDouble() && CanUnboxDouble()) {
473 // Use double comparison.
474 if (SmiFitsInDouble()) {
475 cid = kDoubleCid;
476 } else {
477 if (binary_feedback.IncludesOperands(kSmiCid)) {
478 // We cannot use double comparison on two smis. Need polymorphic
479 // call.
480 return false;
481 } else {
483 call,
485 new (Z) Value(left), new (Z) Value(right), call->deopt_id()),
486 call->env(), FlowGraph::kEffect);
487 cid = kDoubleCid;
488 }
489 }
490 } else {
491 return false;
492 }
494 RelationalOpInstr* comp =
495 new (Z) RelationalOpInstr(call->source(), op_kind, new (Z) Value(left),
496 new (Z) Value(right), cid, call->deopt_id());
497 ReplaceCall(call, comp);
498 return true;
499}
500
502 Token::Kind op_kind) {
503 intptr_t operands_type = kIllegalCid;
504 ASSERT(call->HasICData());
505 const BinaryFeedback& binary_feedback = call->BinaryFeedback();
506 switch (op_kind) {
507 case Token::kADD:
508 case Token::kSUB:
509 case Token::kMUL:
510 if (binary_feedback.OperandsAre(kSmiCid)) {
511 // Don't generate smi code if the IC data is marked because
512 // of an overflow.
513 operands_type =
514 call->ic_data()->HasDeoptReason(ICData::kDeoptBinarySmiOp)
515 ? kMintCid
516 : kSmiCid;
517 } else if (binary_feedback.OperandsAreSmiOrMint()) {
518 // Don't generate mint code if the IC data is marked because of an
519 // overflow.
520 if (call->ic_data()->HasDeoptReason(ICData::kDeoptBinaryInt64Op))
521 return false;
522 operands_type = kMintCid;
523 } else if (ShouldSpecializeForDouble(binary_feedback)) {
524 operands_type = kDoubleCid;
525 } else if (binary_feedback.OperandsAre(kFloat32x4Cid)) {
526 operands_type = kFloat32x4Cid;
527 } else if (binary_feedback.OperandsAre(kInt32x4Cid)) {
528 ASSERT(op_kind != Token::kMUL); // Int32x4 doesn't have a multiply op.
529 operands_type = kInt32x4Cid;
530 } else if (binary_feedback.OperandsAre(kFloat64x2Cid)) {
531 operands_type = kFloat64x2Cid;
532 } else {
533 return false;
534 }
535 break;
536 case Token::kDIV:
537 if (ShouldSpecializeForDouble(binary_feedback) ||
538 binary_feedback.OperandsAre(kSmiCid)) {
539 operands_type = kDoubleCid;
540 } else if (binary_feedback.OperandsAre(kFloat32x4Cid)) {
541 operands_type = kFloat32x4Cid;
542 } else if (binary_feedback.OperandsAre(kFloat64x2Cid)) {
543 operands_type = kFloat64x2Cid;
544 } else {
545 return false;
546 }
547 break;
548 case Token::kBIT_AND:
549 case Token::kBIT_OR:
550 case Token::kBIT_XOR:
551 if (binary_feedback.OperandsAre(kSmiCid)) {
552 operands_type = kSmiCid;
553 } else if (binary_feedback.OperandsAreSmiOrMint()) {
554 operands_type = kMintCid;
555 } else if (binary_feedback.OperandsAre(kInt32x4Cid)) {
556 operands_type = kInt32x4Cid;
557 } else {
558 return false;
559 }
560 break;
561 case Token::kSHL:
562 case Token::kSHR:
563 case Token::kUSHR:
564 if (binary_feedback.OperandsAre(kSmiCid)) {
565 // Left shift may overflow from smi into mint.
566 // Don't generate smi code if the IC data is marked because
567 // of an overflow.
568 if (call->ic_data()->HasDeoptReason(ICData::kDeoptBinaryInt64Op)) {
569 return false;
570 }
571 operands_type =
572 call->ic_data()->HasDeoptReason(ICData::kDeoptBinarySmiOp)
573 ? kMintCid
574 : kSmiCid;
575 } else if (binary_feedback.OperandsAreSmiOrMint() &&
576 binary_feedback.ArgumentIs(kSmiCid)) {
577 // Don't generate mint code if the IC data is marked because of an
578 // overflow.
579 if (call->ic_data()->HasDeoptReason(ICData::kDeoptBinaryInt64Op)) {
580 return false;
581 }
582 // Check for smi/mint << smi or smi/mint >> smi.
583 operands_type = kMintCid;
584 } else {
585 return false;
586 }
587 break;
588 case Token::kMOD:
589 case Token::kTRUNCDIV:
590 if (binary_feedback.OperandsAre(kSmiCid)) {
591 if (call->ic_data()->HasDeoptReason(ICData::kDeoptBinarySmiOp)) {
592 return false;
593 }
594 operands_type = kSmiCid;
595 } else {
596 return false;
597 }
598 break;
599 default:
600 UNREACHABLE();
601 }
602
603 ASSERT(call->type_args_len() == 0);
604 ASSERT(call->ArgumentCount() == 2);
605 Definition* left = call->ArgumentAt(0);
606 Definition* right = call->ArgumentAt(1);
607 if (operands_type == kDoubleCid) {
608 if (!CanUnboxDouble()) {
609 return false;
610 }
611 // Check that either left or right are not a smi. Result of a
612 // binary operation with two smis is a smi not a double, except '/' which
613 // returns a double for two smis.
614 if (op_kind != Token::kDIV) {
616 call,
618 new (Z) Value(left), new (Z) Value(right), call->deopt_id()),
619 call->env(), FlowGraph::kEffect);
620 }
621
622 BinaryDoubleOpInstr* double_bin_op = new (Z)
623 BinaryDoubleOpInstr(op_kind, new (Z) Value(left), new (Z) Value(right),
624 call->deopt_id(), call->source());
625 ReplaceCall(call, double_bin_op);
626 } else if (operands_type == kMintCid) {
627 if ((op_kind == Token::kSHL) || (op_kind == Token::kSHR) ||
628 (op_kind == Token::kUSHR)) {
629 SpeculativeShiftInt64OpInstr* shift_op = new (Z)
631 new (Z) Value(right), call->deopt_id());
632 ReplaceCall(call, shift_op);
633 } else {
635 op_kind, new (Z) Value(left), new (Z) Value(right), call->deopt_id());
636 ReplaceCall(call, bin_op);
637 }
638 } else if ((operands_type == kFloat32x4Cid) ||
639 (operands_type == kInt32x4Cid) ||
640 (operands_type == kFloat64x2Cid)) {
641 return InlineSimdBinaryOp(call, operands_type, op_kind);
642 } else if (op_kind == Token::kMOD) {
643 ASSERT(operands_type == kSmiCid);
644 if (right->IsConstant()) {
645 const Object& obj = right->AsConstant()->value();
646 if (obj.IsSmi() && Utils::IsPowerOfTwo(Smi::Cast(obj).Value())) {
647 // Insert smi check and attach a copy of the original environment
648 // because the smi operation can still deoptimize.
649 InsertBefore(call,
650 new (Z) CheckSmiInstr(new (Z) Value(left),
651 call->deopt_id(), call->source()),
652 call->env(), FlowGraph::kEffect);
653 ConstantInstr* constant = flow_graph()->GetConstant(
654 Smi::Handle(Z, Smi::New(Smi::Cast(obj).Value() - 1)));
655 BinarySmiOpInstr* bin_op =
656 new (Z) BinarySmiOpInstr(Token::kBIT_AND, new (Z) Value(left),
657 new (Z) Value(constant), call->deopt_id());
658 ReplaceCall(call, bin_op);
659 return true;
660 }
661 }
662 // Insert two smi checks and attach a copy of the original
663 // environment because the smi operation can still deoptimize.
664 AddCheckSmi(left, call->deopt_id(), call->env(), call);
665 AddCheckSmi(right, call->deopt_id(), call->env(), call);
666 BinarySmiOpInstr* bin_op = new (Z) BinarySmiOpInstr(
667 op_kind, new (Z) Value(left), new (Z) Value(right), call->deopt_id());
668 ReplaceCall(call, bin_op);
669 } else {
670 ASSERT(operands_type == kSmiCid);
671 // Insert two smi checks and attach a copy of the original
672 // environment because the smi operation can still deoptimize.
673 AddCheckSmi(left, call->deopt_id(), call->env(), call);
674 AddCheckSmi(right, call->deopt_id(), call->env(), call);
675 if (left->IsConstant() &&
676 ((op_kind == Token::kADD) || (op_kind == Token::kMUL))) {
677 // Constant should be on the right side.
678 Definition* temp = left;
679 left = right;
680 right = temp;
681 }
682 BinarySmiOpInstr* bin_op = new (Z) BinarySmiOpInstr(
683 op_kind, new (Z) Value(left), new (Z) Value(right), call->deopt_id());
684 ReplaceCall(call, bin_op);
685 }
686 return true;
687}
688
690 Token::Kind op_kind) {
691 ASSERT(call->type_args_len() == 0);
692 ASSERT(call->ArgumentCount() == 1);
693 Definition* input = call->ArgumentAt(0);
694 Definition* unary_op = nullptr;
695 if (call->Targets().ReceiverIs(kSmiCid)) {
696 InsertBefore(call,
697 new (Z) CheckSmiInstr(new (Z) Value(input), call->deopt_id(),
698 call->source()),
699 call->env(), FlowGraph::kEffect);
700 unary_op = new (Z)
701 UnarySmiOpInstr(op_kind, new (Z) Value(input), call->deopt_id());
702 } else if ((op_kind == Token::kBIT_NOT) &&
703 call->Targets().ReceiverIsSmiOrMint()) {
704 unary_op = new (Z)
705 UnaryInt64OpInstr(op_kind, new (Z) Value(input), call->deopt_id());
706 } else if (call->Targets().ReceiverIs(kDoubleCid) &&
707 (op_kind == Token::kNEGATE) && CanUnboxDouble()) {
708 AddReceiverCheck(call);
709 unary_op = new (Z) UnaryDoubleOpInstr(Token::kNEGATE, new (Z) Value(input),
710 call->deopt_id());
711 } else {
712 return false;
713 }
714 ASSERT(unary_op != nullptr);
715 ReplaceCall(call, unary_op);
716 return true;
717}
718
719bool CallSpecializer::TryInlineImplicitInstanceGetter(InstanceCallInstr* call) {
720 const CallTargets& targets = call->Targets();
721 ASSERT(targets.HasSingleTarget());
722
723 // Inline implicit instance getter.
724 Field& field = Field::ZoneHandle(Z, targets.FirstTarget().accessor_field());
725 ASSERT(!field.IsNull());
726 if (field.needs_load_guard()) {
727 return false;
728 }
730 field = field.CloneFromOriginal();
731 }
732
733 switch (flow_graph()->CheckForInstanceCall(
734 call, UntaggedFunction::kImplicitGetter)) {
736 AddCheckNull(call->Receiver(), call->function_name(), call->deopt_id(),
737 call->env(), call);
738 break;
740 if (CompilerState::Current().is_aot()) {
741 return false; // AOT cannot class check
742 }
743 AddReceiverCheck(call);
744 break;
746 break;
747 }
748 InlineImplicitInstanceGetter(call, field);
749 return true;
750}
751
753 const Field& field) {
754 ASSERT(field.is_instance());
755 Definition* receiver = call->ArgumentAt(0);
756
757 const bool calls_initializer = field.NeedsInitializationCheckOnLoad();
758 const Slot& slot = Slot::Get(field, &flow_graph()->parsed_function());
760 new (Z) Value(receiver), slot, call->source(), calls_initializer,
761 calls_initializer ? call->deopt_id() : DeoptId::kNone);
762
763 // Note that this is a case of LoadField -> InstanceCall lazy deopt.
764 // Which means that we don't need to remove arguments from the environment
765 // because normal getter call expects receiver pushed (unlike the case
766 // of LoadField -> LoadField deoptimization handled by
767 // FlowGraph::AttachEnvironment).
768 if (!calls_initializer) {
769 // If we don't call initializer then we don't need an environment.
770 call->RemoveEnvironment();
771 }
772 ReplaceCall(call, load);
773
774 if (load->slot().type().ToNullableCid() != kDynamicCid) {
775 // Reset value types if we know concrete cid.
776 for (Value::Iterator it(load->input_use_list()); !it.Done(); it.Advance()) {
777 it.Current()->SetReachingType(nullptr);
778 }
779 }
780}
781
783 const CallTargets& targets = instr->Targets();
784 if (!targets.HasSingleTarget()) {
785 // Polymorphic sites are inlined like normal method calls by conventional
786 // inlining.
787 return false;
788 }
789 const Function& target = targets.FirstTarget();
790 if (target.kind() != UntaggedFunction::kImplicitSetter) {
791 // Non-implicit setter are inlined like normal method calls.
792 return false;
793 }
794 if (!CompilerState::Current().is_aot() && !target.WasCompiled()) {
795 return false;
796 }
797 Field& field = Field::ZoneHandle(Z, target.accessor_field());
798 ASSERT(!field.IsNull());
800 field = field.CloneFromOriginal();
801 }
802 if (field.is_late() && field.is_final()) {
803 return false;
804 }
805
806 switch (flow_graph()->CheckForInstanceCall(
807 instr, UntaggedFunction::kImplicitSetter)) {
809 AddCheckNull(instr->Receiver(), instr->function_name(), instr->deopt_id(),
810 instr->env(), instr);
811 break;
813 if (CompilerState::Current().is_aot()) {
814 return false; // AOT cannot class check
815 }
816 AddReceiverCheck(instr);
817 break;
819 break;
820 }
821
822 // True if we can use unchecked entry into the setter.
823 bool is_unchecked_call = false;
824 if (!CompilerState::Current().is_aot()) {
825 if (targets.IsMonomorphic() && targets.MonomorphicExactness().IsExact()) {
826 if (targets.MonomorphicExactness().IsTriviallyExact()) {
828 targets.MonomorphicReceiverCid());
829 }
830 is_unchecked_call = true;
831 }
832 }
833
834 if (IG->use_field_guards()) {
835 if (field.guarded_cid() != kDynamicCid) {
837 instr,
838 new (Z) GuardFieldClassInstr(new (Z) Value(instr->ArgumentAt(1)),
839 field, instr->deopt_id()),
840 instr->env(), FlowGraph::kEffect);
841 }
842
843 if (field.needs_length_check()) {
845 instr,
846 new (Z) GuardFieldLengthInstr(new (Z) Value(instr->ArgumentAt(1)),
847 field, instr->deopt_id()),
848 instr->env(), FlowGraph::kEffect);
849 }
850
853 instr,
854 new (Z) GuardFieldTypeInstr(new (Z) Value(instr->ArgumentAt(1)),
855 field, instr->deopt_id()),
856 instr->env(), FlowGraph::kEffect);
857 }
858 }
859
860 // Build an AssertAssignable if necessary.
861 const AbstractType& dst_type = AbstractType::ZoneHandle(zone(), field.type());
862 if (!dst_type.IsTopTypeForSubtyping()) {
863 // Compute if we need to type check the value. Always type check if
864 // at a dynamic invocation.
865 bool needs_check = true;
866 if (!instr->interface_target().IsNull()) {
867 if (field.is_covariant()) {
868 // Always type check covariant fields.
869 needs_check = true;
870 } else if (field.is_generic_covariant_impl()) {
871 // If field is generic covariant then we don't need to check it
872 // if the invocation was marked as unchecked (e.g. receiver of
873 // the invocation is also the receiver of the surrounding method).
874 // Note: we can't use flow_graph()->IsReceiver() for this optimization
875 // because strong mode only gives static guarantees at the AST level
876 // not at the SSA level.
877 needs_check = !(is_unchecked_call ||
878 (instr->entry_kind() == Code::EntryKind::kUnchecked));
879 } else {
880 // The rest of the stores are checked statically (we are not at
881 // a dynamic invocation).
882 needs_check = false;
883 }
884 }
885
886 if (needs_check) {
887 Definition* instantiator_type_args = flow_graph_->constant_null();
888 Definition* function_type_args = flow_graph_->constant_null();
889 if (!dst_type.IsInstantiated()) {
890 const Class& owner = Class::Handle(Z, field.Owner());
891 if (owner.NumTypeArguments() > 0) {
892 instantiator_type_args = new (Z) LoadFieldInstr(
893 new (Z) Value(instr->ArgumentAt(0)),
894 Slot::GetTypeArgumentsSlotFor(thread(), owner), instr->source());
895 InsertSpeculativeBefore(instr, instantiator_type_args, instr->env(),
897 }
898 }
899
900 auto assert_assignable = new (Z) AssertAssignableInstr(
901 instr->source(), new (Z) Value(instr->ArgumentAt(1)),
902 new (Z) Value(flow_graph_->GetConstant(dst_type)),
903 new (Z) Value(instantiator_type_args),
904 new (Z) Value(function_type_args),
905 String::ZoneHandle(zone(), field.name()), instr->deopt_id());
906 InsertSpeculativeBefore(instr, assert_assignable, instr->env(),
908 }
909 }
910
911 // Field guard was detached.
912 ASSERT(instr->FirstArgIndex() == 0);
913 StoreFieldInstr* store = new (Z)
914 StoreFieldInstr(field, new (Z) Value(instr->ArgumentAt(0)),
915 new (Z) Value(instr->ArgumentAt(1)), kEmitStoreBarrier,
916 instr->source(), &flow_graph()->parsed_function());
917
918 // Discard the environment from the original instruction because the store
919 // can't deoptimize.
920 instr->RemoveEnvironment();
921 ReplaceCallWithResult(instr, store, flow_graph()->constant_null());
922 return true;
923}
924
925bool CallSpecializer::InlineSimdBinaryOp(InstanceCallInstr* call,
926 intptr_t cid,
927 Token::Kind op_kind) {
928 if (!ShouldInlineSimd()) {
929 return false;
930 }
931 ASSERT(call->type_args_len() == 0);
932 ASSERT(call->ArgumentCount() == 2);
933 Definition* const left = call->ArgumentAt(0);
934 Definition* const right = call->ArgumentAt(1);
935 // Type check left and right.
936 AddChecksForArgNr(call, left, /* arg_number = */ 0);
937 AddChecksForArgNr(call, right, /* arg_number = */ 1);
938 // Replace call.
939 SimdOpInstr* op = SimdOpInstr::Create(
940 SimdOpInstr::KindForOperator(cid, op_kind), new (Z) Value(left),
941 new (Z) Value(right), call->deopt_id());
942 ReplaceCall(call, op);
943
944 return true;
945}
946
947// Only unique implicit instance getters can be currently handled.
949 const CallTargets& targets = call->Targets();
950 if (!targets.HasSingleTarget()) {
951 // Polymorphic sites are inlined like normal methods by conventional
952 // inlining in FlowGraphInliner.
953 return false;
954 }
955 const Function& target = targets.FirstTarget();
956 if (target.kind() != UntaggedFunction::kImplicitGetter) {
957 // Non-implicit getters are inlined like normal methods by conventional
958 // inlining in FlowGraphInliner.
959 return false;
960 }
961 if (!CompilerState::Current().is_aot() && !target.WasCompiled()) {
962 return false;
963 }
964 return TryInlineImplicitInstanceGetter(call);
965}
966
967// Inline only simple, frequently called core library methods.
969 const CallTargets& targets = call->Targets();
970 if (!targets.IsMonomorphic()) {
971 // No type feedback collected or multiple receivers/targets found.
972 return false;
973 }
974
975 const Function& target = targets.FirstTarget();
976 intptr_t receiver_cid = targets.MonomorphicReceiverCid();
977 MethodRecognizer::Kind recognized_kind = target.recognized_kind();
978
979 if (CanUnboxDouble() &&
980 (recognized_kind == MethodRecognizer::kIntegerToDouble)) {
981 if (receiver_cid == kSmiCid) {
982 AddReceiverCheck(call);
983 ReplaceCall(call,
984 new (Z) SmiToDoubleInstr(new (Z) Value(call->ArgumentAt(0)),
985 call->source()));
986 return true;
987 } else if ((receiver_cid == kMintCid) && CanConvertInt64ToDouble()) {
988 AddReceiverCheck(call);
989 ReplaceCall(call,
990 new (Z) Int64ToDoubleInstr(new (Z) Value(call->ArgumentAt(0)),
991 call->deopt_id()));
992 return true;
993 }
994 }
995
996 if (receiver_cid == kDoubleCid) {
997 if (!CanUnboxDouble()) {
998 return false;
999 }
1000 switch (recognized_kind) {
1001 case MethodRecognizer::kDoubleToInteger: {
1002 AddReceiverCheck(call);
1003 ASSERT(call->HasICData());
1004 const ICData& ic_data = *call->ic_data();
1005 Definition* input = call->ArgumentAt(0);
1006 Definition* d2i_instr = nullptr;
1007 if (ic_data.HasDeoptReason(ICData::kDeoptDoubleToSmi)) {
1008 // Do not repeatedly deoptimize because result didn't fit into Smi.
1009 d2i_instr = new (Z) DoubleToIntegerInstr(
1010 new (Z) Value(input), recognized_kind, call->deopt_id());
1011 } else {
1012 // Optimistically assume result fits into Smi.
1013 d2i_instr =
1014 new (Z) DoubleToSmiInstr(new (Z) Value(input), call->deopt_id());
1015 }
1016 ReplaceCall(call, d2i_instr);
1017 return true;
1018 }
1019 default:
1020 break;
1021 }
1022 }
1023
1024 return TryReplaceInstanceCallWithInline(flow_graph_, current_iterator(), call,
1026}
1027
1028// If type tests specified by 'ic_data' do not depend on type arguments,
1029// return mapping cid->result in 'results' (i : cid; i + 1: result).
1030// If all tests yield the same result, return it otherwise return Bool::null.
1031// If no mapping is possible, 'results' has less than
1032// (ic_data.NumberOfChecks() * 2) entries
1033// An instance-of test returning all same results can be converted to a class
1034// check.
1035BoolPtr CallSpecializer::InstanceOfAsBool(
1036 const ICData& ic_data,
1037 const AbstractType& type,
1038 ZoneGrowableArray<intptr_t>* results) const {
1039 ASSERT(results->is_empty());
1040 ASSERT(ic_data.NumArgsTested() == 1); // Unary checks only.
1041 if (type.IsFunctionType() || type.IsDartFunctionType() ||
1042 type.IsRecordType() || !type.IsInstantiated()) {
1043 return Bool::null();
1044 }
1045 const Class& type_class = Class::Handle(Z, type.type_class());
1046 const intptr_t num_type_args = type_class.NumTypeArguments();
1047 if (num_type_args > 0) {
1048 // Only raw types can be directly compared, thus disregarding type
1049 // arguments.
1050 const TypeArguments& type_arguments =
1051 TypeArguments::Handle(Z, Type::Cast(type).arguments());
1052 const bool is_raw_type = type_arguments.IsNull() ||
1053 type_arguments.IsRaw(0, type_arguments.Length());
1054 if (!is_raw_type) {
1055 // Unknown result.
1056 return Bool::null();
1057 }
1058 }
1059
1060 const ClassTable& class_table = *IG->class_table();
1061 Bool& prev = Bool::Handle(Z);
1062 Class& cls = Class::Handle(Z);
1063
1064 bool results_differ = false;
1065 const intptr_t number_of_checks = ic_data.NumberOfChecks();
1066 for (int i = 0; i < number_of_checks; i++) {
1067 cls = class_table.At(ic_data.GetReceiverClassIdAt(i));
1068 if (cls.NumTypeArguments() > 0) {
1069 return Bool::null();
1070 }
1071 bool is_subtype = false;
1072 if (cls.IsNullClass()) {
1073 // 'null' is an instance of Null, Object*, Never*, void, and dynamic.
1074 // In addition, 'null' is an instance of any nullable type.
1075 // It is also an instance of FutureOr<T> if it is an instance of T.
1076 const AbstractType& unwrapped_type =
1077 AbstractType::Handle(type.UnwrapFutureOr());
1078 ASSERT(unwrapped_type.IsInstantiated());
1079 is_subtype = unwrapped_type.IsTopTypeForInstanceOf() ||
1080 unwrapped_type.IsNullable() ||
1081 (unwrapped_type.IsLegacy() && unwrapped_type.IsNeverType());
1082 } else {
1083 is_subtype =
1084 Class::IsSubtypeOf(cls, Object::null_type_arguments(),
1086 }
1087 results->Add(cls.id());
1088 results->Add(static_cast<intptr_t>(is_subtype));
1089 if (prev.IsNull()) {
1090 prev = Bool::Get(is_subtype).ptr();
1091 } else {
1092 if (is_subtype != prev.value()) {
1093 results_differ = true;
1094 }
1095 }
1096 }
1097 return results_differ ? Bool::null() : prev.ptr();
1098}
1099
1100// Returns true if checking against this type is a direct class id comparison.
1101bool CallSpecializer::TypeCheckAsClassEquality(const AbstractType& type,
1102 intptr_t* type_cid) {
1103 *type_cid = kIllegalCid;
1104 ASSERT(type.IsFinalized());
1105 // Requires CHA.
1106 if (!type.IsInstantiated()) return false;
1107 // Function and record types have different type checking rules.
1108 if (type.IsFunctionType() || type.IsRecordType()) return false;
1109
1110 const Class& type_class = Class::Handle(type.type_class());
1111 if (!CHA::HasSingleConcreteImplementation(type_class, type_cid)) {
1112 return false;
1113 }
1114
1115 const intptr_t num_type_args = type_class.NumTypeArguments();
1116 if (num_type_args > 0) {
1117 // Only raw types can be directly compared, thus disregarding type
1118 // arguments.
1119 const TypeArguments& type_arguments =
1120 TypeArguments::Handle(Type::Cast(type).arguments());
1121 const bool is_raw_type = type_arguments.IsNull() ||
1122 type_arguments.IsRaw(0, type_arguments.Length());
1123 if (!is_raw_type) {
1124 return false;
1125 }
1126 }
1127 if (type.IsNullable() || type.IsTopTypeForInstanceOf() ||
1128 type.IsNeverType()) {
1129 // A class id check is not sufficient, since a null instance also satisfies
1130 // the test against a nullable type.
1131 // TODO(regis): Add a null check in addition to the class id check?
1132 return false;
1133 }
1134 return true;
1135}
1136
1138 InstanceCallInstr* call,
1139 const AbstractType& type) {
1140 // TODO(dartbug.com/30632) does this optimization make sense in JIT?
1141 return false;
1142}
1143
1144bool CallSpecializer::TryOptimizeInstanceOfUsingStaticTypes(
1145 InstanceCallInstr* call,
1146 const AbstractType& type) {
1147 ASSERT(Token::IsTypeTestOperator(call->token_kind()));
1148 if (!type.IsInstantiated()) {
1149 return false;
1150 }
1151
1152 Value* left_value = call->Receiver();
1153 if (left_value->Type()->IsInstanceOf(type)) {
1154 ConstantInstr* replacement = flow_graph()->GetConstant(Bool::True());
1155 call->ReplaceUsesWith(replacement);
1156 ASSERT(current_iterator()->Current() == call);
1158 return true;
1159 }
1160
1161 // The goal is to emit code that will determine the result of 'x is type'
1162 // depending solely on the fact that x == null or not.
1163 // Checking whether the receiver is null can only help if the tested type is
1164 // non-nullable or legacy (including Never*) or the Null type.
1165 // Also, testing receiver for null cannot help with FutureOr.
1166 if ((type.IsNullable() && !type.IsNullType()) || type.IsFutureOrType()) {
1167 return false;
1168 }
1169
1170 // If type is Null or Never*, or the static type of the receiver is a
1171 // subtype of the tested type, replace 'receiver is type' with
1172 // - 'receiver == null' if type is Null or Never*,
1173 // - 'receiver != null' otherwise.
1174 if (type.IsNullType() || (type.IsNeverType() && type.IsLegacy()) ||
1175 left_value->Type()->IsSubtypeOf(type)) {
1176 Definition* replacement = new (Z) StrictCompareInstr(
1177 call->source(),
1178 (type.IsNullType() || (type.IsNeverType() && type.IsLegacy()))
1179 ? Token::kEQ_STRICT
1180 : Token::kNE_STRICT,
1181 left_value->CopyWithType(Z),
1182 new (Z) Value(flow_graph()->constant_null()),
1183 /* number_check = */ false, DeoptId::kNone);
1184 if (FLAG_trace_strong_mode_types) {
1185 THR_Print("[Strong mode] replacing %s with %s (%s < %s)\n",
1186 call->ToCString(), replacement->ToCString(),
1187 left_value->Type()->ToAbstractType()->ToCString(),
1188 type.ToCString());
1189 }
1190 ReplaceCall(call, replacement);
1191 return true;
1192 }
1193
1194 return false;
1195}
1196
1198 ASSERT(Token::IsTypeTestOperator(call->token_kind()));
1199 Definition* left = call->ArgumentAt(0);
1200 Definition* instantiator_type_args = nullptr;
1201 Definition* function_type_args = nullptr;
1203 ASSERT(call->type_args_len() == 0);
1204 if (call->ArgumentCount() == 2) {
1205 instantiator_type_args = flow_graph()->constant_null();
1206 function_type_args = flow_graph()->constant_null();
1207 ASSERT(call->MatchesCoreName(Symbols::_simpleInstanceOf()));
1208 type = AbstractType::Cast(call->ArgumentAt(1)->AsConstant()->value()).ptr();
1209 } else {
1210 ASSERT(call->ArgumentCount() == 4);
1211 instantiator_type_args = call->ArgumentAt(1);
1212 function_type_args = call->ArgumentAt(2);
1213 type = AbstractType::Cast(call->ArgumentAt(3)->AsConstant()->value()).ptr();
1214 }
1215
1216 if (TryOptimizeInstanceOfUsingStaticTypes(call, type)) {
1217 return;
1218 }
1219
1220 intptr_t type_cid;
1221 if (TypeCheckAsClassEquality(type, &type_cid)) {
1222 LoadClassIdInstr* load_cid =
1223 new (Z) LoadClassIdInstr(new (Z) Value(left), kUnboxedUword);
1224 InsertBefore(call, load_cid, nullptr, FlowGraph::kValue);
1225 ConstantInstr* constant_cid = flow_graph()->GetConstant(
1226 Smi::Handle(Z, Smi::New(type_cid)), kUnboxedUword);
1227 EqualityCompareInstr* check_cid = new (Z) EqualityCompareInstr(
1228 call->source(), Token::kEQ, new Value(load_cid),
1229 new Value(constant_cid), kIntegerCid, DeoptId::kNone, false,
1231 ReplaceCall(call, check_cid);
1232 return;
1233 }
1234
1236 return;
1237 }
1238
1239 const ICData& unary_checks =
1240 ICData::ZoneHandle(Z, call->ic_data()->AsUnaryClassChecks());
1241 const intptr_t number_of_checks = unary_checks.NumberOfChecks();
1242 if (number_of_checks > 0 && number_of_checks <= FLAG_max_polymorphic_checks) {
1244 new (Z) ZoneGrowableArray<intptr_t>(number_of_checks * 2);
1245 const Bool& as_bool =
1246 Bool::ZoneHandle(Z, InstanceOfAsBool(unary_checks, type, results));
1247 if (as_bool.IsNull() || CompilerState::Current().is_aot()) {
1248 if (results->length() == number_of_checks * 2) {
1249 const bool can_deopt = SpecializeTestCidsForNumericTypes(results, type);
1250 if (can_deopt &&
1251 !speculative_policy_->IsAllowedForInlining(call->deopt_id())) {
1252 // Guard against repeated speculative inlining.
1253 return;
1254 }
1255 TestCidsInstr* test_cids = new (Z) TestCidsInstr(
1256 call->source(), Token::kIS, new (Z) Value(left), *results,
1257 can_deopt ? call->deopt_id() : DeoptId::kNone);
1258 // Remove type.
1259 ReplaceCall(call, test_cids);
1260 return;
1261 }
1262 } else {
1263 // One result only.
1264 AddReceiverCheck(call);
1265 ConstantInstr* bool_const = flow_graph()->GetConstant(as_bool);
1266 ASSERT(!call->HasMoveArguments());
1267 call->ReplaceUsesWith(bool_const);
1268 ASSERT(current_iterator()->Current() == call);
1270 return;
1271 }
1272 }
1273
1274 InstanceOfInstr* instance_of = new (Z) InstanceOfInstr(
1275 call->source(), new (Z) Value(left),
1276 new (Z) Value(instantiator_type_args), new (Z) Value(function_type_args),
1277 type, call->deopt_id());
1278 ReplaceCall(call, instance_of);
1279}
1280
1282 if (TryReplaceStaticCallWithInline(flow_graph_, current_iterator(), call,
1284 return;
1285 }
1286
1287 if (speculative_policy_->IsAllowedForInlining(call->deopt_id())) {
1288 // Only if speculative inlining is enabled.
1289
1290 MethodRecognizer::Kind recognized_kind = call->function().recognized_kind();
1291 const CallTargets& targets = call->Targets();
1292 const BinaryFeedback& binary_feedback = call->BinaryFeedback();
1293
1294 switch (recognized_kind) {
1295 case MethodRecognizer::kMathMin:
1296 case MethodRecognizer::kMathMax: {
1297 // We can handle only monomorphic min/max call sites with both arguments
1298 // being either doubles or smis.
1299 if (CanUnboxDouble() && targets.IsMonomorphic() &&
1300 (call->FirstArgIndex() == 0)) {
1301 intptr_t result_cid = kIllegalCid;
1302 if (binary_feedback.IncludesOperands(kDoubleCid)) {
1303 result_cid = kDoubleCid;
1304 } else if (binary_feedback.IncludesOperands(kSmiCid)) {
1305 result_cid = kSmiCid;
1306 }
1307 if (result_cid != kIllegalCid) {
1308 MathMinMaxInstr* min_max = new (Z) MathMinMaxInstr(
1309 recognized_kind, new (Z) Value(call->ArgumentAt(0)),
1310 new (Z) Value(call->ArgumentAt(1)), call->deopt_id(),
1311 result_cid);
1312 const Cids* cids = Cids::CreateMonomorphic(Z, result_cid);
1313 AddCheckClass(min_max->left()->definition(), *cids,
1314 call->deopt_id(), call->env(), call);
1315 AddCheckClass(min_max->right()->definition(), *cids,
1316 call->deopt_id(), call->env(), call);
1317 ReplaceCall(call, min_max);
1318 return;
1319 }
1320 }
1321 break;
1322 }
1323 case MethodRecognizer::kDoubleFromInteger: {
1324 if (call->HasICData() && targets.IsMonomorphic() &&
1325 (call->FirstArgIndex() == 0)) {
1326 if (CanUnboxDouble()) {
1327 if (binary_feedback.ArgumentIs(kSmiCid)) {
1328 Definition* arg = call->ArgumentAt(1);
1329 AddCheckSmi(arg, call->deopt_id(), call->env(), call);
1330 ReplaceCall(call, new (Z) SmiToDoubleInstr(new (Z) Value(arg),
1331 call->source()));
1332 return;
1333 } else if (binary_feedback.ArgumentIs(kMintCid) &&
1335 Definition* arg = call->ArgumentAt(1);
1336 ReplaceCall(call, new (Z) Int64ToDoubleInstr(new (Z) Value(arg),
1337 call->deopt_id()));
1338 return;
1339 }
1340 }
1341 }
1342 break;
1343 }
1344
1345 default:
1346 break;
1347 }
1348 }
1349
1351 return;
1352 }
1353}
1354
1356// TODO(zerny): Use kUnboxedUint32 once it is fully supported/optimized.
1357#if defined(TARGET_ARCH_IA32) || defined(TARGET_ARCH_ARM)
1358 if (!instr->can_pack_into_smi()) instr->set_representation(kUnboxedInt64);
1359#endif
1360}
1361
1363 intptr_t test_cid) {
1364 for (intptr_t i = 0; i < results.length(); i += 2) {
1365 if (results[i] == test_cid) return true;
1366 }
1367 return false;
1368}
1369
1371 intptr_t test_cid,
1372 bool result) {
1373 if (!CidTestResultsContains(*results, test_cid)) {
1374 results->Add(test_cid);
1375 results->Add(static_cast<intptr_t>(result));
1376 }
1377}
1378
1379// Used when we only need the positive result because we return false by
1380// default.
1382 // We can't purge the Smi entry at the beginning since it is used in the
1383 // Smi check before the Cid is loaded.
1384 int dest = 2;
1385 for (intptr_t i = 2; i < results->length(); i += 2) {
1386 if (results->At(i + 1) != 0) {
1387 (*results)[dest++] = results->At(i);
1388 (*results)[dest++] = results->At(i + 1);
1389 }
1390 }
1391 results->SetLength(dest);
1392}
1393
1394bool CallSpecializer::SpecializeTestCidsForNumericTypes(
1395 ZoneGrowableArray<intptr_t>* results,
1396 const AbstractType& type) {
1397 ASSERT(results->length() >= 2); // At least on entry.
1398 const ClassTable& class_table = *IsolateGroup::Current()->class_table();
1399 if ((*results)[0] != kSmiCid) {
1400 const Class& smi_class = Class::Handle(class_table.At(kSmiCid));
1401 const bool smi_is_subtype =
1402 Class::IsSubtypeOf(smi_class, Object::null_type_arguments(),
1404 results->Add((*results)[results->length() - 2]);
1405 results->Add((*results)[results->length() - 2]);
1406 for (intptr_t i = results->length() - 3; i > 1; --i) {
1407 (*results)[i] = (*results)[i - 2];
1408 }
1409 (*results)[0] = kSmiCid;
1410 (*results)[1] = static_cast<intptr_t>(smi_is_subtype);
1411 }
1412
1413 ASSERT(type.IsInstantiated());
1414 ASSERT(results->length() >= 2);
1415 if (type.IsSmiType()) {
1416 ASSERT((*results)[0] == kSmiCid);
1418 return false;
1419 } else if (type.IsIntType()) {
1420 ASSERT((*results)[0] == kSmiCid);
1421 TryAddTest(results, kMintCid, true);
1422 // Cannot deoptimize since all tests returning true have been added.
1424 return false;
1425 } else if (type.IsNumberType()) {
1426 ASSERT((*results)[0] == kSmiCid);
1427 TryAddTest(results, kMintCid, true);
1428 TryAddTest(results, kDoubleCid, true);
1430 return false;
1431 } else if (type.IsDoubleType()) {
1432 ASSERT((*results)[0] == kSmiCid);
1433 TryAddTest(results, kDoubleCid, true);
1435 return false;
1436 }
1437 return true; // May deoptimize since we have not identified all 'true' tests.
1438}
1439
1441 TypedDataSpecializer optimizer(flow_graph);
1442 optimizer.VisitBlocks();
1443}
1444
1445void TypedDataSpecializer::EnsureIsInitialized() {
1446 if (initialized_) return;
1447
1448 initialized_ = true;
1449
1450 int_type_ = Type::IntType();
1451 double_type_ = Type::Double();
1452
1453 const auto& typed_data = Library::Handle(
1454 Z, Library::LookupLibrary(thread_, Symbols::DartTypedData()));
1455
1456 auto& td_class = Class::Handle(Z);
1457 auto& direct_implementors = GrowableObjectArray::Handle(Z);
1458 SafepointReadRwLocker ml(thread_, thread_->isolate_group()->program_lock());
1459
1460#define INIT_HANDLE(iface, member_name, type, cid) \
1461 td_class = typed_data.LookupClass(Symbols::iface()); \
1462 ASSERT(!td_class.IsNull()); \
1463 direct_implementors = td_class.direct_implementors(); \
1464 member_name = td_class.RareType();
1465
1467#undef INIT_HANDLE
1468}
1469
1471 TryInlineCall(call);
1472}
1473
1475 const Function& function = call->function();
1476 if (!function.is_static()) {
1477 ASSERT(call->ArgumentCount() > 0);
1478 TryInlineCall(call);
1479 }
1480}
1481
1482void TypedDataSpecializer::TryInlineCall(TemplateDartCall<0>* call) {
1483 const bool is_length_getter = call->Selector() == Symbols::GetLength().ptr();
1484 const bool is_index_get = call->Selector() == Symbols::IndexToken().ptr();
1485 const bool is_index_set =
1486 call->Selector() == Symbols::AssignIndexToken().ptr();
1487
1488 if (is_length_getter || is_index_get || is_index_set) {
1489 EnsureIsInitialized();
1490
1491 const intptr_t receiver_index = call->FirstArgIndex();
1492
1493 CompileType* receiver_type =
1494 call->ArgumentValueAt(receiver_index + 0)->Type();
1495
1496 CompileType* index_type = nullptr;
1497 if (is_index_get || is_index_set) {
1498 index_type = call->ArgumentValueAt(receiver_index + 1)->Type();
1499 }
1500
1501 CompileType* value_type = nullptr;
1502 if (is_index_set) {
1503 value_type = call->ArgumentValueAt(receiver_index + 2)->Type();
1504 }
1505
1506 auto& type_class = Class::Handle(zone_);
1507#define TRY_INLINE(iface, member_name, type, cid) \
1508 if (!member_name.IsNull()) { \
1509 auto const rep = RepresentationUtils::RepresentationOfArrayElement(cid); \
1510 const bool is_float_access = \
1511 rep == kUnboxedFloat || rep == kUnboxedDouble; \
1512 if (receiver_type->IsAssignableTo(member_name)) { \
1513 if (is_length_getter) { \
1514 type_class = member_name.type_class(); \
1515 ReplaceWithLengthGetter(call); \
1516 } else if (is_index_get) { \
1517 if (is_float_access && !FlowGraphCompiler::SupportsUnboxedDoubles()) { \
1518 return; \
1519 } \
1520 if (!index_type->IsNullableInt()) return; \
1521 type_class = member_name.type_class(); \
1522 ReplaceWithIndexGet(call, cid); \
1523 } else { \
1524 if (is_float_access && !FlowGraphCompiler::SupportsUnboxedDoubles()) { \
1525 return; \
1526 } \
1527 if (!index_type->IsNullableInt()) return; \
1528 if (!value_type->IsAssignableTo(type)) return; \
1529 type_class = member_name.type_class(); \
1530 ReplaceWithIndexSet(call, cid); \
1531 } \
1532 return; \
1533 } \
1534 }
1536#undef INIT_HANDLE
1537 }
1538}
1539
1540void TypedDataSpecializer::ReplaceWithLengthGetter(TemplateDartCall<0>* call) {
1541 const intptr_t receiver_idx = call->FirstArgIndex();
1542 auto array = call->ArgumentAt(receiver_idx + 0);
1543
1544 if (array->Type()->is_nullable()) {
1545 AppendNullCheck(call, &array);
1546 }
1547 Definition* length = AppendLoadLength(call, array);
1548 flow_graph_->ReplaceCurrentInstruction(current_iterator(), call, length);
1550}
1551
1552void TypedDataSpecializer::ReplaceWithIndexGet(TemplateDartCall<0>* call,
1553 classid_t cid) {
1554 const intptr_t receiver_idx = call->FirstArgIndex();
1555 auto array = call->ArgumentAt(receiver_idx + 0);
1556 auto index = call->ArgumentAt(receiver_idx + 1);
1557
1558 if (array->Type()->is_nullable()) {
1559 AppendNullCheck(call, &array);
1560 }
1561 if (index->Type()->is_nullable()) {
1562 AppendNullCheck(call, &index);
1563 }
1564 AppendBoundsCheck(call, array, &index);
1565 Definition* value = AppendLoadIndexed(call, array, index, cid);
1566 flow_graph_->ReplaceCurrentInstruction(current_iterator(), call, value);
1567 RefineUseTypes(value);
1568}
1569
1570void TypedDataSpecializer::ReplaceWithIndexSet(TemplateDartCall<0>* call,
1571 classid_t cid) {
1572 const intptr_t receiver_idx = call->FirstArgIndex();
1573 auto array = call->ArgumentAt(receiver_idx + 0);
1574 auto index = call->ArgumentAt(receiver_idx + 1);
1575 auto value = call->ArgumentAt(receiver_idx + 2);
1576
1577 if (array->Type()->is_nullable()) {
1578 AppendNullCheck(call, &array);
1579 }
1580 if (index->Type()->is_nullable()) {
1581 AppendNullCheck(call, &index);
1582 }
1583 if (value->Type()->is_nullable()) {
1584 AppendNullCheck(call, &value);
1585 }
1586 AppendMutableCheck(call, &array);
1587 AppendBoundsCheck(call, array, &index);
1588 AppendStoreIndexed(call, array, index, value, cid);
1589
1590 RELEASE_ASSERT(!call->HasUses());
1591 flow_graph_->ReplaceCurrentInstruction(current_iterator(), call, nullptr);
1592}
1593
1594void TypedDataSpecializer::AppendNullCheck(TemplateDartCall<0>* call,
1595 Definition** value) {
1596 auto check =
1597 new (Z) CheckNullInstr(new (Z) Value(*value), Symbols::OptimizedOut(),
1598 call->deopt_id(), call->source());
1599 flow_graph_->InsertBefore(call, check, call->env(), FlowGraph::kValue);
1600
1601 // Use data dependency as control dependency.
1602 *value = check;
1603}
1604
1605void TypedDataSpecializer::AppendMutableCheck(TemplateDartCall<0>* call,
1606 Definition** value) {
1607 auto check = new (Z) CheckWritableInstr(new (Z) Value(*value),
1608 call->deopt_id(), call->source());
1609 flow_graph_->InsertBefore(call, check, call->env(), FlowGraph::kValue);
1610
1611 // Use data dependency as control dependency.
1612 *value = check;
1613}
1614
1615void TypedDataSpecializer::AppendBoundsCheck(TemplateDartCall<0>* call,
1616 Definition* array,
1617 Definition** index) {
1618 auto length = new (Z) LoadFieldInstr(
1619 new (Z) Value(array), Slot::TypedDataBase_length(), call->source());
1620 flow_graph_->InsertBefore(call, length, call->env(), FlowGraph::kValue);
1621
1622 auto check = new (Z) GenericCheckBoundInstr(
1623 new (Z) Value(length), new (Z) Value(*index), DeoptId::kNone);
1624 flow_graph_->InsertBefore(call, check, call->env(), FlowGraph::kValue);
1625
1626 // Use data dependency as control dependency.
1627 *index = check;
1628}
1629
1630Definition* TypedDataSpecializer::AppendLoadLength(TemplateDartCall<0>* call,
1631 Definition* array) {
1632 auto length = new (Z) LoadFieldInstr(
1633 new (Z) Value(array), Slot::TypedDataBase_length(), call->source());
1634 flow_graph_->InsertBefore(call, length, call->env(), FlowGraph::kValue);
1635 return length;
1636}
1637
1638Definition* TypedDataSpecializer::AppendLoadIndexed(TemplateDartCall<0>* call,
1639 Definition* array,
1640 Definition* index,
1641 classid_t cid) {
1643 const intptr_t index_scale = element_size;
1645
1646 Definition* load = new (Z) LoadIndexedInstr(
1647 new (Z) Value(array), new (Z) Value(index), /*index_unboxed=*/false,
1648 index_scale, cid, kAlignedAccess, call->deopt_id(), call->source());
1649 flow_graph_->InsertBefore(call, load, call->env(), FlowGraph::kValue);
1650
1651 if (rep == kUnboxedFloat) {
1652 load = new (Z) FloatToDoubleInstr(new (Z) Value(load), call->deopt_id());
1653 flow_graph_->InsertBefore(call, load, call->env(), FlowGraph::kValue);
1654 }
1655
1656 return load;
1657}
1658
1659void TypedDataSpecializer::AppendStoreIndexed(TemplateDartCall<0>* call,
1660 Definition* array,
1661 Definition* index,
1662 Definition* value,
1663 classid_t cid) {
1665 const intptr_t index_scale = element_size;
1667
1668 const auto deopt_id = call->deopt_id();
1669
1671 // Insert explicit unboxing instructions with truncation to avoid relying
1672 // on [SelectRepresentations] which doesn't mark them as truncating.
1673 value = UnboxInstr::Create(rep, new (Z) Value(value), deopt_id,
1675 flow_graph_->InsertBefore(call, value, call->env(), FlowGraph::kValue);
1676 } else if (rep == kUnboxedFloat) {
1677 value = new (Z) DoubleToFloatInstr(new (Z) Value(value), deopt_id,
1679 flow_graph_->InsertBefore(call, value, call->env(), FlowGraph::kValue);
1680 }
1681
1682 auto store = new (Z) StoreIndexedInstr(
1683 new (Z) Value(array), new (Z) Value(index), new (Z) Value(value),
1684 kNoStoreBarrier, /*index_unboxed=*/false, index_scale, cid,
1687 flow_graph_->InsertBefore(call, store, call->env(), FlowGraph::kEffect);
1688}
1689
1691 // Only implemented for AOT.
1692}
1693
1694// Test and obtain Smi value.
1695static bool IsSmiValue(Value* val, intptr_t* int_val) {
1696 if (val->BindsToConstant() && val->BoundConstant().IsSmi()) {
1697 *int_val = Smi::Cast(val->BoundConstant()).Value();
1698 return true;
1699 }
1700 return false;
1701}
1702
1703// Helper to get result type from call (or nullptr otherwise).
1705 if (auto static_call = call->AsStaticCall()) {
1706 return static_call->result_type();
1707 } else if (auto instance_call = call->AsInstanceCall()) {
1708 return instance_call->result_type();
1709 }
1710 return nullptr;
1711}
1712
1713// Quick access to the current one.
1714#undef Z
1715#define Z (flow_graph->zone())
1716
1717static bool InlineTypedDataIndexCheck(FlowGraph* flow_graph,
1718 Instruction* call,
1719 Definition* receiver,
1720 GraphEntryInstr* graph_entry,
1721 FunctionEntryInstr** entry,
1722 Instruction** last,
1724 const String& symbol) {
1725 *entry =
1726 new (Z) FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
1727 call->GetBlock()->try_index(), DeoptId::kNone);
1728 (*entry)->InheritDeoptTarget(Z, call);
1729 Instruction* cursor = *entry;
1730
1731 Definition* index = call->ArgumentAt(1);
1732 Definition* length = call->ArgumentAt(2);
1733
1735 // Add a null-check in case the index argument is known to be compatible
1736 // but possibly nullable. We don't need to do the same for length
1737 // because all callers in typed_data_patch.dart retrieve the length
1738 // from the typed data object.
1739 auto* const null_check =
1740 new (Z) CheckNullInstr(new (Z) Value(index), symbol, call->deopt_id(),
1741 call->source(), CheckNullInstr::kArgumentError);
1742 cursor = flow_graph->AppendTo(cursor, null_check, call->env(),
1744 }
1745 index = flow_graph->CreateCheckBound(length, index, call->deopt_id());
1746 cursor = flow_graph->AppendTo(cursor, index, call->env(), FlowGraph::kValue);
1747
1748 *last = cursor;
1749 *result = index;
1750 return true;
1751}
1752
1753static intptr_t PrepareInlineIndexedOp(FlowGraph* flow_graph,
1754 Instruction* call,
1755 intptr_t array_cid,
1756 Definition** array,
1757 Definition** index,
1758 Instruction** cursor) {
1759 // Insert array length load and bounds check.
1761 new (Z) Value(*array), Slot::GetLengthFieldForArrayCid(array_cid),
1762 call->source());
1763 *cursor = flow_graph->AppendTo(*cursor, length, nullptr, FlowGraph::kValue);
1764 *index = flow_graph->CreateCheckBound(length, *index, call->deopt_id());
1765 *cursor =
1766 flow_graph->AppendTo(*cursor, *index, call->env(), FlowGraph::kValue);
1767
1768 if (array_cid == kGrowableObjectArrayCid) {
1769 // Insert data elements load.
1770 LoadFieldInstr* elements = new (Z)
1771 LoadFieldInstr(new (Z) Value(*array), Slot::GrowableObjectArray_data(),
1772 call->source());
1773 *cursor =
1774 flow_graph->AppendTo(*cursor, elements, nullptr, FlowGraph::kValue);
1775 // Load from the data from backing store which is a fixed-length array.
1776 *array = elements;
1777 array_cid = kArrayCid;
1778 } else if (IsExternalTypedDataClassId(array_cid)) {
1779 auto* const elements = new (Z) LoadFieldInstr(
1780 new (Z) Value(*array), Slot::PointerBase_data(),
1782 *cursor =
1783 flow_graph->AppendTo(*cursor, elements, nullptr, FlowGraph::kValue);
1784 *array = elements;
1785 }
1786 return array_cid;
1787}
1788
1789static bool InlineGetIndexed(FlowGraph* flow_graph,
1790 bool can_speculate,
1791 bool is_dynamic_call,
1793 Definition* call,
1794 Definition* receiver,
1795 GraphEntryInstr* graph_entry,
1796 FunctionEntryInstr** entry,
1797 Instruction** last,
1798 Definition** result) {
1799 intptr_t array_cid = MethodRecognizer::MethodKindToReceiverCid(kind);
1800
1801 Definition* array = receiver;
1802 Definition* index = call->ArgumentAt(1);
1803
1804 if (!can_speculate && is_dynamic_call && !index->Type()->IsInt()) {
1805 return false;
1806 }
1807
1808 *entry =
1809 new (Z) FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
1810 call->GetBlock()->try_index(), DeoptId::kNone);
1811 (*entry)->InheritDeoptTarget(Z, call);
1812 *last = *entry;
1813
1814 array_cid =
1815 PrepareInlineIndexedOp(flow_graph, call, array_cid, &array, &index, last);
1816
1817 // Array load and return.
1818 intptr_t index_scale = compiler::target::Instance::ElementSizeFor(array_cid);
1819 *result = new (Z) LoadIndexedInstr(
1820 new (Z) Value(array), new (Z) Value(index),
1821 /*index_unboxed=*/false, index_scale, array_cid, kAlignedAccess,
1822 call->deopt_id(), call->source(), ResultType(call));
1823 *last = flow_graph->AppendTo(*last, *result, call->env(), FlowGraph::kValue);
1824
1825 if (LoadIndexedInstr::ReturnRepresentation(array_cid) == kUnboxedFloat) {
1826 *result =
1827 new (Z) FloatToDoubleInstr(new (Z) Value(*result), call->deopt_id());
1828 *last =
1829 flow_graph->AppendTo(*last, *result, call->env(), FlowGraph::kValue);
1830 }
1831
1832 return true;
1833}
1834
1835static bool InlineSetIndexed(FlowGraph* flow_graph,
1837 const Function& target,
1838 Instruction* call,
1839 Definition* receiver,
1842 GraphEntryInstr* graph_entry,
1843 FunctionEntryInstr** entry,
1844 Instruction** last,
1845 Definition** result) {
1846 intptr_t array_cid = MethodRecognizer::MethodKindToReceiverCid(kind);
1847 auto const rep = StoreIndexedInstr::ValueRepresentation(array_cid);
1848
1849 Definition* array = receiver;
1850 Definition* index = call->ArgumentAt(1);
1851 Definition* stored_value = call->ArgumentAt(2);
1852
1853 *entry =
1854 new (Z) FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
1855 call->GetBlock()->try_index(), DeoptId::kNone);
1856 (*entry)->InheritDeoptTarget(Z, call);
1857 *last = *entry;
1858
1859 bool is_unchecked_call = false;
1860 if (StaticCallInstr* static_call = call->AsStaticCall()) {
1861 is_unchecked_call =
1862 static_call->entry_kind() == Code::EntryKind::kUnchecked;
1863 } else if (InstanceCallInstr* instance_call = call->AsInstanceCall()) {
1864 is_unchecked_call =
1865 instance_call->entry_kind() == Code::EntryKind::kUnchecked;
1866 } else if (PolymorphicInstanceCallInstr* instance_call =
1867 call->AsPolymorphicInstanceCall()) {
1868 is_unchecked_call =
1869 instance_call->entry_kind() == Code::EntryKind::kUnchecked;
1870 }
1871
1872 if (!is_unchecked_call &&
1873 (kind != MethodRecognizer::kObjectArraySetIndexedUnchecked &&
1874 kind != MethodRecognizer::kGrowableArraySetIndexedUnchecked)) {
1875 // Only type check for the value. A type check for the index is not
1876 // needed here because we insert a deoptimizing smi-check for the case
1877 // the index is not a smi.
1878 const AbstractType& value_type =
1879 AbstractType::ZoneHandle(Z, target.ParameterTypeAt(2));
1880 Definition* type_args = nullptr;
1881 if (rep == kTagged) {
1882 const Class& instantiator_class = Class::Handle(Z, target.Owner());
1883 LoadFieldInstr* load_type_args =
1884 new (Z) LoadFieldInstr(new (Z) Value(array),
1886 flow_graph->thread(), instantiator_class),
1887 call->source());
1888 *last = flow_graph->AppendTo(*last, load_type_args, call->env(),
1890 type_args = load_type_args;
1891 } else if (!RepresentationUtils::IsUnboxed(rep)) {
1892 UNREACHABLE();
1893 } else {
1894 type_args = flow_graph->constant_null();
1895 ASSERT(value_type.IsInstantiated());
1896#if defined(DEBUG)
1897 if (rep == kUnboxedFloat || rep == kUnboxedDouble) {
1898 ASSERT(value_type.IsDoubleType());
1899 } else if (rep == kUnboxedFloat32x4) {
1900 ASSERT(value_type.IsFloat32x4Type());
1901 } else if (rep == kUnboxedInt32x4) {
1902 ASSERT(value_type.IsInt32x4Type());
1903 } else if (rep == kUnboxedFloat64x2) {
1904 ASSERT(value_type.IsFloat64x2Type());
1905 } else {
1907 ASSERT(value_type.IsIntType());
1908 }
1909#endif
1910 }
1911
1912 if (exactness != nullptr && exactness->is_exact) {
1913 exactness->emit_exactness_guard = true;
1914 } else {
1915 auto const function_type_args = flow_graph->constant_null();
1916 auto const dst_type = flow_graph->GetConstant(value_type);
1917 AssertAssignableInstr* assert_value = new (Z) AssertAssignableInstr(
1918 source, new (Z) Value(stored_value), new (Z) Value(dst_type),
1919 new (Z) Value(type_args), new (Z) Value(function_type_args),
1920 Symbols::Value(), call->deopt_id());
1921 *last = flow_graph->AppendSpeculativeTo(*last, assert_value, call->env(),
1923 }
1924 }
1925
1926 array_cid =
1927 PrepareInlineIndexedOp(flow_graph, call, array_cid, &array, &index, last);
1928
1929 const bool is_typed_data_store = IsTypedDataBaseClassId(array_cid);
1930
1931 // Check if store barrier is needed. Byte arrays don't need a store barrier.
1932 StoreBarrierType needs_store_barrier =
1933 is_typed_data_store ? kNoStoreBarrier : kEmitStoreBarrier;
1934
1935 if (rep == kUnboxedFloat) {
1936 stored_value = new (Z)
1937 DoubleToFloatInstr(new (Z) Value(stored_value), call->deopt_id());
1938 *last = flow_graph->AppendTo(*last, stored_value, call->env(),
1940 } else if (RepresentationUtils::IsUnboxedInteger(rep)) {
1941 // Insert explicit unboxing instructions with truncation to avoid relying
1942 // on [SelectRepresentations] which doesn't mark them as truncating.
1943 stored_value =
1944 UnboxInstr::Create(rep, new (Z) Value(stored_value), call->deopt_id(),
1946 *last = flow_graph->AppendTo(*last, stored_value, call->env(),
1948 }
1949
1950 const intptr_t index_scale =
1951 compiler::target::Instance::ElementSizeFor(array_cid);
1952 auto* const store = new (Z) StoreIndexedInstr(
1953 new (Z) Value(array), new (Z) Value(index), new (Z) Value(stored_value),
1954 needs_store_barrier, /*index_unboxed=*/false, index_scale, array_cid,
1955 kAlignedAccess, call->deopt_id(), call->source());
1956 *last = flow_graph->AppendTo(*last, store, call->env(), FlowGraph::kEffect);
1957 // We need a return value to replace uses of the original definition. However,
1958 // the final instruction is a use of 'void operator[]=()', so we use null.
1959 *result = flow_graph->constant_null();
1960 return true;
1961}
1962
1963static bool InlineDoubleOp(FlowGraph* flow_graph,
1964 Token::Kind op_kind,
1965 Instruction* call,
1966 Definition* receiver,
1967 GraphEntryInstr* graph_entry,
1968 FunctionEntryInstr** entry,
1969 Instruction** last,
1970 Definition** result) {
1971 if (!CanUnboxDouble()) {
1972 return false;
1973 }
1974 Definition* left = receiver;
1975 Definition* right = call->ArgumentAt(1);
1976
1977 *entry =
1978 new (Z) FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
1979 call->GetBlock()->try_index(), DeoptId::kNone);
1980 (*entry)->InheritDeoptTarget(Z, call);
1981 // Arguments are checked. No need for class check.
1982 BinaryDoubleOpInstr* double_bin_op = new (Z)
1983 BinaryDoubleOpInstr(op_kind, new (Z) Value(left), new (Z) Value(right),
1984 call->deopt_id(), call->source());
1985 flow_graph->AppendTo(*entry, double_bin_op, call->env(), FlowGraph::kValue);
1986 *last = double_bin_op;
1987 *result = double_bin_op->AsDefinition();
1988
1989 return true;
1990}
1991
1992static bool InlineDoubleTestOp(FlowGraph* flow_graph,
1993 Instruction* call,
1994 Definition* receiver,
1996 GraphEntryInstr* graph_entry,
1997 FunctionEntryInstr** entry,
1998 Instruction** last,
1999 Definition** result) {
2000 if (!CanUnboxDouble()) {
2001 return false;
2002 }
2003
2004 *entry =
2005 new (Z) FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
2006 call->GetBlock()->try_index(), DeoptId::kNone);
2007 (*entry)->InheritDeoptTarget(Z, call);
2008 // Arguments are checked. No need for class check.
2009
2010 DoubleTestOpInstr* double_test_op = new (Z) DoubleTestOpInstr(
2011 kind, new (Z) Value(receiver), call->deopt_id(), call->source());
2012 flow_graph->AppendTo(*entry, double_test_op, call->env(), FlowGraph::kValue);
2013 *last = double_test_op;
2014 *result = double_test_op->AsDefinition();
2015
2016 return true;
2017}
2018
2019static bool InlineGrowableArraySetter(FlowGraph* flow_graph,
2020 const Slot& field,
2021 StoreBarrierType store_barrier_type,
2022 Instruction* call,
2023 Definition* receiver,
2024 GraphEntryInstr* graph_entry,
2025 FunctionEntryInstr** entry,
2026 Instruction** last,
2027 Definition** result) {
2028 Definition* array = receiver;
2029 Definition* value = call->ArgumentAt(1);
2030
2031 *entry =
2032 new (Z) FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
2033 call->GetBlock()->try_index(), DeoptId::kNone);
2034 (*entry)->InheritDeoptTarget(Z, call);
2035
2036 // This is an internal method, no need to check argument types.
2038 new (Z) StoreFieldInstr(field, new (Z) Value(array), new (Z) Value(value),
2039 store_barrier_type, call->source());
2040 flow_graph->AppendTo(*entry, store, call->env(), FlowGraph::kEffect);
2041 *last = store;
2042 // We need a return value to replace uses of the original definition. However,
2043 // the last instruction is a field setter, which returns void, so we use null.
2044 *result = flow_graph->constant_null();
2045
2046 return true;
2047}
2048
2049static bool InlineLoadClassId(FlowGraph* flow_graph,
2050 Instruction* call,
2051 GraphEntryInstr* graph_entry,
2052 FunctionEntryInstr** entry,
2053 Instruction** last,
2054 Definition** result) {
2055 *entry =
2056 new (Z) FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
2057 call->GetBlock()->try_index(), DeoptId::kNone);
2058 (*entry)->InheritDeoptTarget(Z, call);
2059 auto load_cid =
2060 new (Z) LoadClassIdInstr(call->ArgumentValueAt(0)->CopyWithType(Z));
2061 flow_graph->InsertBefore(call, load_cid, nullptr, FlowGraph::kValue);
2062 *last = load_cid;
2063 *result = load_cid->AsDefinition();
2064 return true;
2065}
2066
2067// Returns the LoadIndexedInstr.
2069 Instruction* call,
2070 intptr_t cid,
2071 Definition* str,
2072 Definition* index,
2073 Instruction* cursor) {
2075 new (Z) Value(str), Slot::GetLengthFieldForArrayCid(cid), str->source());
2076 cursor = flow_graph->AppendTo(cursor, length, nullptr, FlowGraph::kValue);
2077
2078 // Bounds check.
2080 // Add a null-check in case the index argument is known to be compatible
2081 // but possibly nullable. By inserting the null-check, we can allow the
2082 // unbox instruction later inserted to be non-speculative.
2083 auto* const null_check = new (Z)
2084 CheckNullInstr(new (Z) Value(index), Symbols::Index(), call->deopt_id(),
2085 call->source(), CheckNullInstr::kArgumentError);
2086 cursor = flow_graph->AppendTo(cursor, null_check, call->env(),
2088 }
2089 index = flow_graph->CreateCheckBound(length, index, call->deopt_id());
2090 cursor = flow_graph->AppendTo(cursor, index, call->env(), FlowGraph::kValue);
2091
2092 LoadIndexedInstr* load_indexed = new (Z) LoadIndexedInstr(
2093 new (Z) Value(str), new (Z) Value(index), /*index_unboxed=*/false,
2094 compiler::target::Instance::ElementSizeFor(cid), cid, kAlignedAccess,
2095 call->deopt_id(), call->source());
2096 cursor =
2097 flow_graph->AppendTo(cursor, load_indexed, nullptr, FlowGraph::kValue);
2098
2099 auto box = BoxInstr::Create(kUnboxedIntPtr, new Value(load_indexed));
2100 cursor = flow_graph->AppendTo(cursor, box, nullptr, FlowGraph::kValue);
2101
2102 ASSERT(box == cursor);
2103 return box;
2104}
2105
2106static bool InlineStringBaseCharAt(FlowGraph* flow_graph,
2107 Instruction* call,
2108 Definition* receiver,
2109 intptr_t cid,
2110 GraphEntryInstr* graph_entry,
2111 FunctionEntryInstr** entry,
2112 Instruction** last,
2113 Definition** result) {
2114 if (cid != kOneByteStringCid) {
2115 return false;
2116 }
2117 Definition* str = receiver;
2118 Definition* index = call->ArgumentAt(1);
2119
2120 *entry =
2121 new (Z) FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
2122 call->GetBlock()->try_index(), DeoptId::kNone);
2123 (*entry)->InheritDeoptTarget(Z, call);
2124
2125 *last = PrepareInlineStringIndexOp(flow_graph, call, cid, str, index, *entry);
2126
2127 OneByteStringFromCharCodeInstr* char_at = new (Z)
2128 OneByteStringFromCharCodeInstr(new (Z) Value((*last)->AsDefinition()));
2129
2130 flow_graph->AppendTo(*last, char_at, nullptr, FlowGraph::kValue);
2131 *last = char_at;
2132 *result = char_at->AsDefinition();
2133
2134 return true;
2135}
2136
2138 Instruction* call,
2139 Definition* receiver,
2140 intptr_t cid,
2141 GraphEntryInstr* graph_entry,
2142 FunctionEntryInstr** entry,
2143 Instruction** last,
2144 Definition** result) {
2145 if (cid == kDynamicCid) {
2146 ASSERT(call->IsStaticCall());
2147 return false;
2148 } else if ((cid != kOneByteStringCid) && (cid != kTwoByteStringCid)) {
2149 return false;
2150 }
2151 Definition* str = receiver;
2152 Definition* index = call->ArgumentAt(1);
2153
2154 *entry =
2155 new (Z) FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
2156 call->GetBlock()->try_index(), DeoptId::kNone);
2157 (*entry)->InheritDeoptTarget(Z, call);
2158
2159 *last = PrepareInlineStringIndexOp(flow_graph, call, cid, str, index, *entry);
2160 *result = (*last)->AsDefinition();
2161
2162 return true;
2163}
2164
2165// Only used for monomorphic calls.
2166bool CallSpecializer::TryReplaceInstanceCallWithInline(
2167 FlowGraph* flow_graph,
2168 ForwardInstructionIterator* iterator,
2169 InstanceCallInstr* call,
2170 SpeculativeInliningPolicy* policy) {
2171 const CallTargets& targets = call->Targets();
2172 ASSERT(targets.IsMonomorphic());
2173 const intptr_t receiver_cid = targets.MonomorphicReceiverCid();
2174 const Function& target = targets.FirstTarget();
2175 const auto exactness = targets.MonomorphicExactness();
2176 ExactnessInfo exactness_info{exactness.IsExact(), false};
2177
2178 FunctionEntryInstr* entry = nullptr;
2179 Instruction* last = nullptr;
2180 Definition* result = nullptr;
2181 if (CallSpecializer::TryInlineRecognizedMethod(
2182 flow_graph, receiver_cid, target, call,
2183 call->Receiver()->definition(), call->source(), call->ic_data(),
2184 /*graph_entry=*/nullptr, &entry, &last, &result, policy,
2185 &exactness_info)) {
2186 // The empty Object constructor is the only case where the inlined body is
2187 // empty and there is no result.
2188 ASSERT((last != nullptr && result != nullptr) ||
2189 (target.recognized_kind() == MethodRecognizer::kObjectConstructor));
2190 // Determine if inlining instance methods needs a check.
2191 // StringBase.codeUnitAt is monomorphic but its implementation is selected
2192 // based on the receiver cid.
2194 if (target.is_polymorphic_target() ||
2195 (target.recognized_kind() == MethodRecognizer::kStringBaseCodeUnitAt)) {
2197 } else {
2199 }
2200
2201 // Insert receiver class or null check if needed.
2202 switch (check) {
2204 Instruction* check_class = flow_graph->CreateCheckClass(
2205 call->Receiver()->definition(), targets, call->deopt_id(),
2206 call->source());
2207 flow_graph->InsertBefore(call, check_class, call->env(),
2209 break;
2210 }
2212 Instruction* check_null = new (Z) CheckNullInstr(
2213 call->Receiver()->CopyWithType(Z), call->function_name(),
2214 call->deopt_id(), call->source());
2215 flow_graph->InsertBefore(call, check_null, call->env(),
2217 break;
2218 }
2220 break;
2221 }
2222
2223 if (exactness_info.emit_exactness_guard && exactness.IsTriviallyExact()) {
2224 flow_graph->AddExactnessGuard(call, receiver_cid);
2225 }
2226
2227 ASSERT(!call->HasMoveArguments());
2228
2229 // Replace all uses of this definition with the result.
2230 if (call->HasUses()) {
2231 ASSERT(result != nullptr && result->HasSSATemp());
2232 call->ReplaceUsesWith(result);
2233 }
2234 // Finally insert the sequence other definition in place of this one in the
2235 // graph.
2236 if (entry->next() != nullptr) {
2237 call->previous()->LinkTo(entry->next());
2238 }
2239 entry->UnuseAllInputs(); // Entry block is not in the graph.
2240 if (last != nullptr) {
2241 ASSERT(call->GetBlock() == last->GetBlock());
2242 last->LinkTo(call);
2243 }
2244 // Remove through the iterator.
2245 ASSERT(iterator->Current() == call);
2246 iterator->RemoveCurrentFromGraph();
2247 call->set_previous(nullptr);
2248 call->set_next(nullptr);
2249 return true;
2250 }
2251 return false;
2252}
2253
2254bool CallSpecializer::TryReplaceStaticCallWithInline(
2255 FlowGraph* flow_graph,
2256 ForwardInstructionIterator* iterator,
2257 StaticCallInstr* call,
2258 SpeculativeInliningPolicy* policy) {
2259 FunctionEntryInstr* entry = nullptr;
2260 Instruction* last = nullptr;
2261 Definition* result = nullptr;
2262 Definition* receiver = nullptr;
2263 intptr_t receiver_cid = kIllegalCid;
2264 if (!call->function().is_static()) {
2265 receiver = call->Receiver()->definition();
2266 receiver_cid = call->Receiver()->Type()->ToCid();
2267 }
2268 if (CallSpecializer::TryInlineRecognizedMethod(
2269 flow_graph, receiver_cid, call->function(), call, receiver,
2270 call->source(), call->ic_data(), /*graph_entry=*/nullptr, &entry,
2271 &last, &result, policy)) {
2272 // The empty Object constructor is the only case where the inlined body is
2273 // empty and there is no result.
2274 ASSERT((last != nullptr && result != nullptr) ||
2275 (call->function().recognized_kind() ==
2276 MethodRecognizer::kObjectConstructor));
2277 ASSERT(!call->HasMoveArguments());
2278 // Replace all uses of this definition with the result.
2279 if (call->HasUses()) {
2280 ASSERT(result->HasSSATemp());
2281 call->ReplaceUsesWith(result);
2282 }
2283 // Finally insert the sequence other definition in place of this one in the
2284 // graph.
2285 if (entry != nullptr) {
2286 if (entry->next() != nullptr) {
2287 call->previous()->LinkTo(entry->next());
2288 }
2289 entry->UnuseAllInputs(); // Entry block is not in the graph.
2290 if (last != nullptr) {
2291 BlockEntryInstr* link = call->GetBlock();
2292 BlockEntryInstr* exit = last->GetBlock();
2293 if (link != exit) {
2294 // Dominance relation and SSA are updated incrementally when
2295 // conditionals are inserted. But succ/pred and ordering needs
2296 // to be redone. TODO(ajcbik): do this incrementally too.
2297 for (intptr_t i = 0, n = link->dominated_blocks().length(); i < n;
2298 ++i) {
2299 exit->AddDominatedBlock(link->dominated_blocks()[i]);
2300 }
2301 link->ClearDominatedBlocks();
2302 for (intptr_t i = 0, n = entry->dominated_blocks().length(); i < n;
2303 ++i) {
2304 link->AddDominatedBlock(entry->dominated_blocks()[i]);
2305 }
2306 Instruction* scan = exit;
2307 while (scan->next() != nullptr) {
2308 scan = scan->next();
2309 }
2310 scan->LinkTo(call);
2312 } else {
2313 last->LinkTo(call);
2314 }
2315 }
2316 }
2317 // Remove through the iterator.
2318 if (iterator != nullptr) {
2319 ASSERT(iterator->Current() == call);
2320 iterator->RemoveCurrentFromGraph();
2321 } else {
2322 call->RemoveFromGraph();
2323 }
2324 return true;
2325 }
2326 return false;
2327}
2328
2329static bool CheckMask(Definition* definition, intptr_t* mask_ptr) {
2330 if (!definition->IsConstant()) return false;
2331 ConstantInstr* constant_instruction = definition->AsConstant();
2332 const Object& constant_mask = constant_instruction->value();
2333 if (!constant_mask.IsSmi()) return false;
2334 const intptr_t mask = Smi::Cast(constant_mask).Value();
2335 if ((mask < 0) || (mask > 255)) {
2336 return false; // Not a valid mask.
2337 }
2338 *mask_ptr = mask;
2339 return true;
2340}
2341
2343 public:
2345 Instruction* call,
2346 GraphEntryInstr* graph_entry,
2347 FunctionEntryInstr** entry,
2348 Instruction** last,
2350 : flow_graph_(flow_graph),
2351 call_(call),
2352 graph_entry_(graph_entry),
2353 entry_(entry),
2354 last_(last),
2355 result_(result) {
2356 *entry_ = new (zone())
2357 FunctionEntryInstr(graph_entry_, flow_graph_->allocate_block_id(),
2358 call_->GetBlock()->try_index(), call_->deopt_id());
2359 *last = *entry_;
2360 }
2361
2363 switch (kind) {
2364 // ==== Int32x4 ====
2365 case MethodRecognizer::kInt32x4FromInts:
2366 UnboxScalar(0, kUnboxedInt32, 4);
2367 UnboxScalar(1, kUnboxedInt32, 4);
2368 UnboxScalar(2, kUnboxedInt32, 4);
2369 UnboxScalar(3, kUnboxedInt32, 4);
2370 Gather(4);
2371 BoxVector(kUnboxedInt32, 4);
2372 return true;
2373 case MethodRecognizer::kInt32x4FromBools:
2374 UnboxBool(0, 4);
2375 UnboxBool(1, 4);
2376 UnboxBool(2, 4);
2377 UnboxBool(3, 4);
2378 Gather(4);
2379 BoxVector(kUnboxedInt32, 4);
2380 return true;
2381 case MethodRecognizer::kInt32x4GetFlagX:
2382 UnboxVector(0, kUnboxedInt32, kMintCid, 4);
2383 IntToBool();
2384 Return(0);
2385 return true;
2386 case MethodRecognizer::kInt32x4GetFlagY:
2387 UnboxVector(0, kUnboxedInt32, kMintCid, 4);
2388 IntToBool();
2389 Return(1);
2390 return true;
2391 case MethodRecognizer::kInt32x4GetFlagZ:
2392 UnboxVector(0, kUnboxedInt32, kMintCid, 4);
2393 IntToBool();
2394 Return(2);
2395 return true;
2396 case MethodRecognizer::kInt32x4GetFlagW:
2397 UnboxVector(0, kUnboxedInt32, kMintCid, 4);
2398 IntToBool();
2399 Return(3);
2400 return true;
2401 case MethodRecognizer::kInt32x4WithFlagX:
2402 UnboxVector(0, kUnboxedInt32, kMintCid, 4);
2403 UnboxBool(1, 4);
2404 With(0);
2405 BoxVector(kUnboxedInt32, 4);
2406 return true;
2407 case MethodRecognizer::kInt32x4WithFlagY:
2408 UnboxVector(0, kUnboxedInt32, kMintCid, 4);
2409 UnboxBool(1, 4);
2410 With(1);
2411 BoxVector(kUnboxedInt32, 4);
2412 return true;
2413 case MethodRecognizer::kInt32x4WithFlagZ:
2414 UnboxVector(0, kUnboxedInt32, kMintCid, 4);
2415 UnboxBool(1, 4);
2416 With(2);
2417 BoxVector(kUnboxedInt32, 4);
2418 return true;
2419 case MethodRecognizer::kInt32x4WithFlagW:
2420 UnboxVector(0, kUnboxedInt32, kMintCid, 4);
2421 UnboxBool(1, 4);
2422 With(3);
2423 BoxVector(kUnboxedInt32, 4);
2424 return true;
2425 case MethodRecognizer::kInt32x4Shuffle: {
2426 Definition* mask_definition =
2427 call_->ArgumentAt(call_->ArgumentCount() - 1);
2428 intptr_t mask = 0;
2429 if (!CheckMask(mask_definition, &mask)) {
2430 return false;
2431 }
2432 UnboxVector(0, kUnboxedInt32, kMintCid, 4);
2433 Shuffle(mask);
2434 BoxVector(kUnboxedInt32, 4);
2435 return true;
2436 }
2437 case MethodRecognizer::kInt32x4ShuffleMix: {
2438 Definition* mask_definition =
2439 call_->ArgumentAt(call_->ArgumentCount() - 1);
2440 intptr_t mask = 0;
2441 if (!CheckMask(mask_definition, &mask)) {
2442 return false;
2443 }
2444 UnboxVector(0, kUnboxedInt32, kMintCid, 4);
2445 UnboxVector(1, kUnboxedInt32, kMintCid, 4);
2446 ShuffleMix(mask);
2447 BoxVector(kUnboxedInt32, 4);
2448 return true;
2449 }
2450 case MethodRecognizer::kInt32x4GetSignMask:
2451 case MethodRecognizer::kInt32x4Select:
2452 // TODO(riscv)
2453 return false;
2454
2455 // ==== Float32x4 ====
2456 case MethodRecognizer::kFloat32x4Abs:
2457 Float32x4Unary(Token::kABS);
2458 return true;
2459 case MethodRecognizer::kFloat32x4Negate:
2460 Float32x4Unary(Token::kNEGATE);
2461 return true;
2462 case MethodRecognizer::kFloat32x4Sqrt:
2463 Float32x4Unary(Token::kSQRT);
2464 return true;
2465 case MethodRecognizer::kFloat32x4Reciprocal:
2466 Float32x4Unary(Token::kRECIPROCAL);
2467 return true;
2468 case MethodRecognizer::kFloat32x4ReciprocalSqrt:
2469 Float32x4Unary(Token::kRECIPROCAL_SQRT);
2470 return true;
2471 case MethodRecognizer::kFloat32x4GetSignMask:
2472 // TODO(riscv)
2473 return false;
2474 case MethodRecognizer::kFloat32x4Equal:
2475 Float32x4Compare(Token::kEQ);
2476 return true;
2477 case MethodRecognizer::kFloat32x4GreaterThan:
2478 Float32x4Compare(Token::kGT);
2479 return true;
2480 case MethodRecognizer::kFloat32x4GreaterThanOrEqual:
2481 Float32x4Compare(Token::kGTE);
2482 return true;
2483 case MethodRecognizer::kFloat32x4LessThan:
2484 Float32x4Compare(Token::kLT);
2485 return true;
2486 case MethodRecognizer::kFloat32x4LessThanOrEqual:
2487 Float32x4Compare(Token::kLTE);
2488 return true;
2489 case MethodRecognizer::kFloat32x4Add:
2490 Float32x4Binary(Token::kADD);
2491 return true;
2492 case MethodRecognizer::kFloat32x4Sub:
2493 Float32x4Binary(Token::kSUB);
2494 return true;
2495 case MethodRecognizer::kFloat32x4Mul:
2496 Float32x4Binary(Token::kMUL);
2497 return true;
2498 case MethodRecognizer::kFloat32x4Div:
2499 Float32x4Binary(Token::kDIV);
2500 return true;
2501 case MethodRecognizer::kFloat32x4Min:
2502 Float32x4Binary(Token::kMIN);
2503 return true;
2504 case MethodRecognizer::kFloat32x4Max:
2505 Float32x4Binary(Token::kMAX);
2506 return true;
2507 case MethodRecognizer::kFloat32x4Scale:
2508 UnboxVector(0, kUnboxedFloat, kDoubleCid, 4);
2509 UnboxScalar(1, kUnboxedFloat, 4);
2510 BinaryDoubleOp(Token::kMUL, kUnboxedFloat, 4);
2511 BoxVector(kUnboxedFloat, 4);
2512 return true;
2513 case MethodRecognizer::kFloat32x4Splat:
2514 UnboxScalar(0, kUnboxedFloat, 4);
2515 Splat(4);
2516 BoxVector(kUnboxedFloat, 4);
2517 return true;
2518 case MethodRecognizer::kFloat32x4WithX:
2519 UnboxVector(0, kUnboxedFloat, kDoubleCid, 4);
2520 UnboxScalar(1, kUnboxedFloat, 4);
2521 With(0);
2522 BoxVector(kUnboxedFloat, 4);
2523 return true;
2524 case MethodRecognizer::kFloat32x4WithY:
2525 UnboxVector(0, kUnboxedFloat, kDoubleCid, 4);
2526 UnboxScalar(1, kUnboxedFloat, 4);
2527 With(1);
2528 BoxVector(kUnboxedFloat, 4);
2529 return true;
2530 case MethodRecognizer::kFloat32x4WithZ:
2531 UnboxVector(0, kUnboxedFloat, kDoubleCid, 4);
2532 UnboxScalar(1, kUnboxedFloat, 4);
2533 With(2);
2534 BoxVector(kUnboxedFloat, 4);
2535 return true;
2536 case MethodRecognizer::kFloat32x4WithW:
2537 UnboxVector(0, kUnboxedFloat, kDoubleCid, 4);
2538 UnboxScalar(1, kUnboxedFloat, 4);
2539 With(3);
2540 BoxVector(kUnboxedFloat, 4);
2541 return true;
2542 case MethodRecognizer::kFloat32x4Zero:
2543 UnboxDoubleZero(kUnboxedFloat, 4);
2544 BoxVector(kUnboxedFloat, 4);
2545 return true;
2546 case MethodRecognizer::kFloat32x4FromDoubles:
2547 UnboxScalar(0, kUnboxedFloat, 4);
2548 UnboxScalar(1, kUnboxedFloat, 4);
2549 UnboxScalar(2, kUnboxedFloat, 4);
2550 UnboxScalar(3, kUnboxedFloat, 4);
2551 Gather(4);
2552 BoxVector(kUnboxedFloat, 4);
2553 return true;
2554 case MethodRecognizer::kFloat32x4GetX:
2555 UnboxVector(0, kUnboxedFloat, kDoubleCid, 4);
2556 BoxScalar(0, kUnboxedFloat);
2557 return true;
2558 case MethodRecognizer::kFloat32x4GetY:
2559 UnboxVector(0, kUnboxedFloat, kDoubleCid, 4);
2560 BoxScalar(1, kUnboxedFloat);
2561 return true;
2562 case MethodRecognizer::kFloat32x4GetZ:
2563 UnboxVector(0, kUnboxedFloat, kDoubleCid, 4);
2564 BoxScalar(2, kUnboxedFloat);
2565 return true;
2566 case MethodRecognizer::kFloat32x4GetW:
2567 UnboxVector(0, kUnboxedFloat, kDoubleCid, 4);
2568 BoxScalar(3, kUnboxedFloat);
2569 return true;
2570 case MethodRecognizer::kFloat32x4Shuffle: {
2571 Definition* mask_definition =
2572 call_->ArgumentAt(call_->ArgumentCount() - 1);
2573 intptr_t mask = 0;
2574 if (!CheckMask(mask_definition, &mask)) {
2575 return false;
2576 }
2577 UnboxVector(0, kUnboxedFloat, kDoubleCid, 4);
2578 Shuffle(mask);
2579 BoxVector(kUnboxedFloat, 4);
2580 return true;
2581 }
2582 case MethodRecognizer::kFloat32x4ShuffleMix: {
2583 Definition* mask_definition =
2584 call_->ArgumentAt(call_->ArgumentCount() - 1);
2585 intptr_t mask = 0;
2586 if (!CheckMask(mask_definition, &mask)) {
2587 return false;
2588 }
2589 UnboxVector(0, kUnboxedFloat, kDoubleCid, 4);
2590 UnboxVector(1, kUnboxedFloat, kDoubleCid, 4);
2591 ShuffleMix(mask);
2592 BoxVector(kUnboxedFloat, 4);
2593 return true;
2594 }
2595
2596 // ==== Float64x2 ====
2597 case MethodRecognizer::kFloat64x2Abs:
2598 Float64x2Unary(Token::kABS);
2599 return true;
2600 case MethodRecognizer::kFloat64x2Negate:
2601 Float64x2Unary(Token::kNEGATE);
2602 return true;
2603 case MethodRecognizer::kFloat64x2Sqrt:
2604 Float64x2Unary(Token::kSQRT);
2605 return true;
2606 case MethodRecognizer::kFloat64x2Add:
2607 Float64x2Binary(Token::kADD);
2608 return true;
2609 case MethodRecognizer::kFloat64x2Sub:
2610 Float64x2Binary(Token::kSUB);
2611 return true;
2612 case MethodRecognizer::kFloat64x2Mul:
2613 Float64x2Binary(Token::kMUL);
2614 return true;
2615 case MethodRecognizer::kFloat64x2Div:
2616 Float64x2Binary(Token::kDIV);
2617 return true;
2618 case MethodRecognizer::kFloat64x2Min:
2619 Float64x2Binary(Token::kMIN);
2620 return true;
2621 case MethodRecognizer::kFloat64x2Max:
2622 Float64x2Binary(Token::kMAX);
2623 return true;
2624 case MethodRecognizer::kFloat64x2Scale:
2625 UnboxVector(0, kUnboxedDouble, kDoubleCid, 2);
2626 UnboxScalar(1, kUnboxedDouble, 2);
2627 BinaryDoubleOp(Token::kMUL, kUnboxedDouble, 2);
2628 BoxVector(kUnboxedDouble, 2);
2629 return true;
2630 case MethodRecognizer::kFloat64x2Splat:
2631 UnboxScalar(0, kUnboxedDouble, 2);
2632 Splat(2);
2633 BoxVector(kUnboxedDouble, 2);
2634 return true;
2635 case MethodRecognizer::kFloat64x2WithX:
2636 UnboxVector(0, kUnboxedDouble, kDoubleCid, 2);
2637 UnboxScalar(1, kUnboxedDouble, 2);
2638 With(0);
2639 BoxVector(kUnboxedDouble, 2);
2640 return true;
2641 case MethodRecognizer::kFloat64x2WithY:
2642 UnboxVector(0, kUnboxedDouble, kDoubleCid, 2);
2643 UnboxScalar(1, kUnboxedDouble, 2);
2644 With(1);
2645 BoxVector(kUnboxedDouble, 2);
2646 return true;
2647 case MethodRecognizer::kFloat64x2Zero:
2648 UnboxDoubleZero(kUnboxedDouble, 2);
2649 BoxVector(kUnboxedDouble, 2);
2650 return true;
2651 case MethodRecognizer::kFloat64x2FromDoubles:
2652 UnboxScalar(0, kUnboxedDouble, 2);
2653 UnboxScalar(1, kUnboxedDouble, 2);
2654 Gather(2);
2655 BoxVector(kUnboxedDouble, 2);
2656 return true;
2657 case MethodRecognizer::kFloat64x2GetX:
2658 UnboxVector(0, kUnboxedDouble, kDoubleCid, 2);
2659 BoxScalar(0, kUnboxedDouble);
2660 return true;
2661 case MethodRecognizer::kFloat64x2GetY:
2662 UnboxVector(0, kUnboxedDouble, kDoubleCid, 2);
2663 BoxScalar(1, kUnboxedDouble);
2664 return true;
2665
2666 // Mixed
2667 case MethodRecognizer::kFloat32x4ToFloat64x2: {
2668 UnboxVector(0, kUnboxedFloat, kDoubleCid, 4, 1);
2669 Float32x4ToFloat64x2();
2670 BoxVector(kUnboxedDouble, 2);
2671 return true;
2672 }
2673 case MethodRecognizer::kFloat64x2ToFloat32x4: {
2674 UnboxVector(0, kUnboxedDouble, kDoubleCid, 2, 1);
2675 Float64x2ToFloat32x4();
2676 BoxVector(kUnboxedFloat, 4);
2677 return true;
2678 }
2679 case MethodRecognizer::kInt32x4ToFloat32x4:
2680 UnboxVector(0, kUnboxedInt32, kMintCid, 4, 1);
2681 Int32x4ToFloat32x4();
2682 BoxVector(kUnboxedFloat, 4);
2683 return true;
2684 case MethodRecognizer::kFloat32x4ToInt32x4:
2685 UnboxVector(0, kUnboxedFloat, kDoubleCid, 4, 1);
2686 Float32x4ToInt32x4();
2687 BoxVector(kUnboxedInt32, 4);
2688 return true;
2689 default:
2690 return false;
2691 }
2692 }
2693
2694 private:
2695 void Float32x4Unary(Token::Kind op) {
2696 UnboxVector(0, kUnboxedFloat, kDoubleCid, 4);
2697 UnaryDoubleOp(op, kUnboxedFloat, 4);
2698 BoxVector(kUnboxedFloat, 4);
2699 }
2700 void Float32x4Binary(Token::Kind op) {
2701 UnboxVector(0, kUnboxedFloat, kDoubleCid, 4);
2702 UnboxVector(1, kUnboxedFloat, kDoubleCid, 4);
2703 BinaryDoubleOp(op, kUnboxedFloat, 4);
2704 BoxVector(kUnboxedFloat, 4);
2705 }
2706 void Float32x4Compare(Token::Kind op) {
2707 UnboxVector(0, kUnboxedFloat, kDoubleCid, 4);
2708 UnboxVector(1, kUnboxedFloat, kDoubleCid, 4);
2709 FloatCompare(op);
2710 BoxVector(kUnboxedInt32, 4);
2711 }
2712 void Float64x2Unary(Token::Kind op) {
2713 UnboxVector(0, kUnboxedDouble, kDoubleCid, 2);
2714 UnaryDoubleOp(op, kUnboxedDouble, 2);
2715 BoxVector(kUnboxedDouble, 2);
2716 }
2717 void Float64x2Binary(Token::Kind op) {
2718 UnboxVector(0, kUnboxedDouble, kDoubleCid, 2);
2719 UnboxVector(1, kUnboxedDouble, kDoubleCid, 2);
2720 BinaryDoubleOp(op, kUnboxedDouble, 2);
2721 BoxVector(kUnboxedDouble, 2);
2722 }
2723
2724 void UnboxVector(intptr_t i,
2725 Representation rep,
2726 intptr_t cid,
2727 intptr_t n,
2728 intptr_t type_args = 0) {
2729 Definition* arg = call_->ArgumentAt(i + type_args);
2730 if (CompilerState::Current().is_aot()) {
2731 // Add null-checks in case of the arguments are known to be compatible
2732 // but they are possibly nullable.
2733 // By inserting the null-check, we can allow the unbox instruction later
2734 // inserted to be non-speculative.
2735 arg = AddDefinition(new (zone()) CheckNullInstr(
2736 new (zone()) Value(arg), Symbols::SecondArg(), call_->deopt_id(),
2738 }
2739 for (intptr_t lane = 0; lane < n; lane++) {
2740 in_[i][lane] = AddDefinition(
2741 new (zone()) UnboxLaneInstr(new (zone()) Value(arg), lane, rep, cid));
2742 }
2743 }
2744
2745 void UnboxScalar(intptr_t i,
2746 Representation rep,
2747 intptr_t n,
2748 intptr_t type_args = 0) {
2749 Definition* arg = call_->ArgumentAt(i + type_args);
2750 if (CompilerState::Current().is_aot()) {
2751 // Add null-checks in case of the arguments are known to be compatible
2752 // but they are possibly nullable.
2753 // By inserting the null-check, we can allow the unbox instruction later
2754 // inserted to be non-speculative.
2755 arg = AddDefinition(new (zone()) CheckNullInstr(
2756 new (zone()) Value(arg), Symbols::SecondArg(), call_->deopt_id(),
2758 }
2759 Definition* unbox = AddDefinition(
2760 UnboxInstr::Create(rep, new (zone()) Value(arg), DeoptId::kNone,
2762 for (intptr_t lane = 0; lane < n; lane++) {
2763 in_[i][lane] = unbox;
2764 }
2765 }
2766
2767 void UnboxBool(intptr_t i, intptr_t n) {
2768 Definition* unbox = AddDefinition(new (zone()) BoolToIntInstr(
2769 call_->ArgumentValueAt(i)->CopyWithType(zone())));
2770 for (intptr_t lane = 0; lane < n; lane++) {
2771 in_[i][lane] = unbox;
2772 }
2773 }
2774
2775 void UnboxDoubleZero(Representation rep, intptr_t n) {
2776 Definition* zero = flow_graph_->GetConstant(
2778 for (intptr_t lane = 0; lane < n; lane++) {
2779 op_[lane] = zero;
2780 }
2781 }
2782
2783 void UnaryDoubleOp(Token::Kind op, Representation rep, intptr_t n) {
2784 for (intptr_t lane = 0; lane < n; lane++) {
2785 op_[lane] = AddDefinition(new (zone()) UnaryDoubleOpInstr(
2786 op, new (zone()) Value(in_[0][lane]), call_->deopt_id(),
2788 }
2789 }
2790
2791 void BinaryDoubleOp(Token::Kind op, Representation rep, intptr_t n) {
2792 for (intptr_t lane = 0; lane < n; lane++) {
2793 op_[lane] = AddDefinition(new (zone()) BinaryDoubleOpInstr(
2794 op, new (zone()) Value(in_[0][lane]),
2795 new (zone()) Value(in_[1][lane]), call_->deopt_id(), call_->source(),
2797 }
2798 }
2799
2800 void FloatCompare(Token::Kind op) {
2801 for (intptr_t lane = 0; lane < 4; lane++) {
2802 op_[lane] = AddDefinition(
2803 new (zone()) FloatCompareInstr(op, new (zone()) Value(in_[0][lane]),
2804 new (zone()) Value(in_[1][lane])));
2805 }
2806 }
2807
2808 void With(intptr_t i) {
2809 for (intptr_t lane = 0; lane < 4; lane++) {
2810 op_[lane] = in_[0][lane];
2811 }
2812 op_[i] = in_[1][0];
2813 }
2814 void Splat(intptr_t n) {
2815 for (intptr_t lane = 0; lane < n; lane++) {
2816 op_[lane] = in_[0][0];
2817 }
2818 }
2819 void Gather(intptr_t n) {
2820 for (intptr_t lane = 0; lane < n; lane++) {
2821 op_[lane] = in_[lane][0];
2822 }
2823 }
2824 void Shuffle(intptr_t mask) {
2825 op_[0] = in_[0][(mask >> 0) & 3];
2826 op_[1] = in_[0][(mask >> 2) & 3];
2827 op_[2] = in_[0][(mask >> 4) & 3];
2828 op_[3] = in_[0][(mask >> 6) & 3];
2829 }
2830 void ShuffleMix(intptr_t mask) {
2831 op_[0] = in_[0][(mask >> 0) & 3];
2832 op_[1] = in_[0][(mask >> 2) & 3];
2833 op_[2] = in_[1][(mask >> 4) & 3];
2834 op_[3] = in_[1][(mask >> 6) & 3];
2835 }
2836 void Float32x4ToFloat64x2() {
2837 for (intptr_t lane = 0; lane < 2; lane++) {
2838 op_[lane] = AddDefinition(new (zone()) FloatToDoubleInstr(
2839 new (zone()) Value(in_[0][lane]), DeoptId::kNone));
2840 }
2841 }
2842 void Float64x2ToFloat32x4() {
2843 for (intptr_t lane = 0; lane < 2; lane++) {
2844 op_[lane] = AddDefinition(new (zone()) DoubleToFloatInstr(
2845 new (zone()) Value(in_[0][lane]), DeoptId::kNone));
2846 }
2847 Definition* zero = flow_graph_->GetConstant(
2848 Double::ZoneHandle(Double::NewCanonical(0.0)), kUnboxedFloat);
2849 op_[2] = zero;
2850 op_[3] = zero;
2851 }
2852 void Int32x4ToFloat32x4() {
2853 for (intptr_t lane = 0; lane < 4; lane++) {
2854 op_[lane] = AddDefinition(new (zone()) BitCastInstr(
2855 kUnboxedInt32, kUnboxedFloat, new (zone()) Value(in_[0][lane])));
2856 }
2857 }
2858 void Float32x4ToInt32x4() {
2859 for (intptr_t lane = 0; lane < 4; lane++) {
2860 op_[lane] = AddDefinition(new (zone()) BitCastInstr(
2861 kUnboxedFloat, kUnboxedInt32, new (zone()) Value(in_[0][lane])));
2862 }
2863 }
2864 void IntToBool() {
2865 for (intptr_t lane = 0; lane < 4; lane++) {
2866 op_[lane] = AddDefinition(
2867 new (zone()) IntToBoolInstr(new (zone()) Value(in_[0][lane])));
2868 }
2869 }
2870
2871 void BoxVector(Representation rep, intptr_t n) {
2872 Definition* box;
2873 if (n == 2) {
2874 box = new (zone()) BoxLanesInstr(rep, new (zone()) Value(op_[0]),
2875 new (zone()) Value(op_[1]));
2876 } else {
2877 ASSERT(n == 4);
2878 box = new (zone()) BoxLanesInstr(
2879 rep, new (zone()) Value(op_[0]), new (zone()) Value(op_[1]),
2880 new (zone()) Value(op_[2]), new (zone()) Value(op_[3]));
2881 }
2882 Done(AddDefinition(box));
2883 }
2884
2885 void BoxScalar(intptr_t lane, Representation rep) {
2886 Definition* box = BoxInstr::Create(rep, new (zone()) Value(in_[0][lane]));
2887 Done(AddDefinition(box));
2888 }
2889
2890 void Return(intptr_t lane) { Done(op_[lane]); }
2891
2892 void Done(Definition* result) {
2893 // InheritDeoptTarget also inherits environment (which may add 'entry' into
2894 // env_use_list()), so InheritDeoptTarget should be done only after decided
2895 // to inline.
2896 (*entry_)->InheritDeoptTarget(zone(), call_);
2897 *result_ = result;
2898 }
2899
2900 Definition* AddDefinition(Definition* def) {
2901 *last_ = flow_graph_->AppendTo(
2902 *last_, def, call_->deopt_id() != DeoptId::kNone ? call_->env() : NULL,
2903 FlowGraph::kValue);
2904 return def;
2905 }
2906 Zone* zone() { return flow_graph_->zone(); }
2907
2908 FlowGraph* flow_graph_;
2909 Instruction* call_;
2910 GraphEntryInstr* graph_entry_;
2911 FunctionEntryInstr** entry_;
2912 Instruction** last_;
2913 Definition** result_;
2914
2915 // First index is the argment number, second index is the lane number.
2916 Definition* in_[4][4];
2917 // Index is the lane number.
2918 Definition* op_[4];
2919};
2920
2921static bool InlineSimdOp(FlowGraph* flow_graph,
2922 bool is_dynamic_call,
2923 Instruction* call,
2924 Definition* receiver,
2926 GraphEntryInstr* graph_entry,
2927 FunctionEntryInstr** entry,
2928 Instruction** last,
2929 Definition** result) {
2930 if (is_dynamic_call && call->ArgumentCount() > 1) {
2931 // Issue(dartbug.com/37737): Dynamic invocation forwarders have the
2932 // same recognized kind as the method they are forwarding to.
2933 // That causes us to inline the recognized method and not the
2934 // dyn: forwarder itself.
2935 // This is only safe if all arguments are checked in the flow graph we
2936 // build.
2937 // For double/int arguments speculative unboxing instructions should ensure
2938 // to bailout in AOT (or deoptimize in JIT) if the incoming values are not
2939 // correct. Though for user-implementable types, like
2940 // operator+(Float32x4 other), this is not safe and we therefore bailout.
2941 return false;
2942 }
2943
2944 if (!FLAG_enable_simd_inline) {
2945 return false;
2946 }
2947
2949#if defined(TARGET_ARCH_RISCV32) || defined(TARGET_ARCH_RISCV64)
2950 SimdLowering lowering(flow_graph, call, graph_entry, entry, last, result);
2951 return lowering.TryInline(kind);
2952#else
2953 return false;
2954#endif
2955 }
2956
2957 *entry =
2958 new (Z) FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
2959 call->GetBlock()->try_index(), DeoptId::kNone);
2960 Instruction* cursor = *entry;
2961 switch (kind) {
2962 case MethodRecognizer::kInt32x4Shuffle:
2963 case MethodRecognizer::kInt32x4ShuffleMix:
2964 case MethodRecognizer::kFloat32x4Shuffle:
2965 case MethodRecognizer::kFloat32x4ShuffleMix: {
2966 Definition* mask_definition = call->ArgumentAt(call->ArgumentCount() - 1);
2967 intptr_t mask = 0;
2968 if (!CheckMask(mask_definition, &mask)) {
2969 return false;
2970 }
2971 *last = SimdOpInstr::CreateFromCall(Z, kind, receiver, call, mask);
2972 break;
2973 }
2974
2975 case MethodRecognizer::kFloat32x4WithX:
2976 case MethodRecognizer::kFloat32x4WithY:
2977 case MethodRecognizer::kFloat32x4WithZ:
2978 case MethodRecognizer::kFloat32x4WithW:
2979 case MethodRecognizer::kFloat32x4Scale: {
2980 Definition* left = receiver;
2981 Definition* right = call->ArgumentAt(1);
2982 // Note: left and right values are swapped when handed to the instruction,
2983 // this is done so that the double value is loaded into the output
2984 // register and can be destroyed.
2985 // TODO(dartbug.com/31035) this swapping is only needed because register
2986 // allocator has SameAsFirstInput policy and not SameAsNthInput(n).
2987 *last = SimdOpInstr::Create(kind, new (Z) Value(right),
2988 new (Z) Value(left), call->deopt_id());
2989 break;
2990 }
2991
2992 case MethodRecognizer::kFloat32x4Zero:
2993 case MethodRecognizer::kFloat32x4ToFloat64x2:
2994 case MethodRecognizer::kFloat64x2ToFloat32x4:
2995 case MethodRecognizer::kFloat32x4ToInt32x4:
2996 case MethodRecognizer::kInt32x4ToFloat32x4:
2997 case MethodRecognizer::kFloat64x2Zero:
2998 *last = SimdOpInstr::CreateFromFactoryCall(Z, kind, call);
2999 break;
3000 case MethodRecognizer::kFloat32x4Mul:
3001 case MethodRecognizer::kFloat32x4Div:
3002 case MethodRecognizer::kFloat32x4Add:
3003 case MethodRecognizer::kFloat32x4Sub:
3004 case MethodRecognizer::kFloat64x2Mul:
3005 case MethodRecognizer::kFloat64x2Div:
3006 case MethodRecognizer::kFloat64x2Add:
3007 case MethodRecognizer::kFloat64x2Sub:
3008 *last = SimdOpInstr::CreateFromCall(Z, kind, receiver, call);
3009 if (CompilerState::Current().is_aot()) {
3010 // Add null-checks in case of the arguments are known to be compatible
3011 // but they are possibly nullable.
3012 // By inserting the null-check, we can allow the unbox instruction later
3013 // inserted to be non-speculative.
3014 CheckNullInstr* check1 =
3015 new (Z) CheckNullInstr(new (Z) Value(receiver), Symbols::FirstArg(),
3016 call->deopt_id(), call->source());
3017
3018 CheckNullInstr* check2 = new (Z) CheckNullInstr(
3019 new (Z) Value(call->ArgumentAt(1)), Symbols::SecondArg(),
3020 call->deopt_id(), call->source(), CheckNullInstr::kArgumentError);
3021
3022 (*last)->SetInputAt(0, new (Z) Value(check1));
3023 (*last)->SetInputAt(1, new (Z) Value(check2));
3024
3025 flow_graph->InsertBefore(call, check1, call->env(), FlowGraph::kValue);
3026 flow_graph->InsertBefore(call, check2, call->env(), FlowGraph::kValue);
3027 }
3028 break;
3029 default:
3030 *last = SimdOpInstr::CreateFromCall(Z, kind, receiver, call);
3031 break;
3032 }
3033 // InheritDeoptTarget also inherits environment (which may add 'entry' into
3034 // env_use_list()), so InheritDeoptTarget should be done only after decided
3035 // to inline.
3036 (*entry)->InheritDeoptTarget(Z, call);
3037 flow_graph->AppendTo(
3038 cursor, *last, call->deopt_id() != DeoptId::kNone ? call->env() : nullptr,
3040 *result = (*last)->AsDefinition();
3041 return true;
3042}
3043
3044static Instruction* InlineMul(FlowGraph* flow_graph,
3045 Instruction* cursor,
3046 Definition* x,
3047 Definition* y) {
3048 BinaryInt64OpInstr* mul = new (Z)
3049 BinaryInt64OpInstr(Token::kMUL, new (Z) Value(x), new (Z) Value(y),
3051 return flow_graph->AppendTo(cursor, mul, nullptr, FlowGraph::kValue);
3052}
3053
3054static bool InlineMathIntPow(FlowGraph* flow_graph,
3055 Instruction* call,
3056 GraphEntryInstr* graph_entry,
3057 FunctionEntryInstr** entry,
3058 Instruction** last,
3059 Definition** result) {
3060 // Invoking the _intPow(x, y) implies that both:
3061 // (1) x, y are int
3062 // (2) y >= 0.
3063 // Thus, try to inline some very obvious cases.
3064 // TODO(ajcbik): useful to generalize?
3065 intptr_t val = 0;
3066 Value* x = call->ArgumentValueAt(0);
3067 Value* y = call->ArgumentValueAt(1);
3068 // Use x^0 == 1, x^1 == x, and x^c == x * .. * x for small c.
3069 const intptr_t small_exponent = 5;
3070 if (IsSmiValue(y, &val)) {
3071 if (val == 0) {
3072 *last = flow_graph->GetConstant(Smi::ZoneHandle(Smi::New(1)));
3073 *result = (*last)->AsDefinition();
3074 return true;
3075 } else if (val == 1) {
3076 *last = x->definition();
3077 *result = (*last)->AsDefinition();
3078 return true;
3079 } else if (1 < val && val <= small_exponent) {
3080 // Lazily construct entry only in this case.
3081 *entry = new (Z)
3082 FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
3083 call->GetBlock()->try_index(), DeoptId::kNone);
3084 (*entry)->InheritDeoptTarget(Z, call);
3085 Definition* x_def = x->definition();
3087 InlineMul(flow_graph, *entry, x_def, x_def)->AsDefinition();
3088 *last = square;
3089 *result = square;
3090 switch (val) {
3091 case 2:
3092 return true;
3093 case 3:
3094 *last = InlineMul(flow_graph, *last, x_def, square);
3095 *result = (*last)->AsDefinition();
3096 return true;
3097 case 4:
3098 *last = InlineMul(flow_graph, *last, square, square);
3099 *result = (*last)->AsDefinition();
3100 return true;
3101 case 5:
3102 *last = InlineMul(flow_graph, *last, square, square);
3103 *last = InlineMul(flow_graph, *last, x_def, (*last)->AsDefinition());
3104 *result = (*last)->AsDefinition();
3105 return true;
3106 }
3107 }
3108 }
3109 // Use 0^y == 0 (only for y != 0) and 1^y == 1.
3110 if (IsSmiValue(x, &val)) {
3111 if (val == 1) {
3112 *last = x->definition();
3113 *result = x->definition();
3114 return true;
3115 }
3116 }
3117 return false;
3118}
3119
3120bool CallSpecializer::TryInlineRecognizedMethod(
3121 FlowGraph* flow_graph,
3122 intptr_t receiver_cid,
3123 const Function& target,
3124 Definition* call,
3125 Definition* receiver,
3126 const InstructionSource& source,
3127 const ICData* ic_data,
3128 GraphEntryInstr* graph_entry,
3129 FunctionEntryInstr** entry,
3130 Instruction** last,
3131 Definition** result,
3132 SpeculativeInliningPolicy* policy,
3133 CallSpecializer::ExactnessInfo* exactness) {
3134 COMPILER_TIMINGS_TIMER_SCOPE(flow_graph->thread(), InlineRecognizedMethod);
3135
3136 if (receiver_cid == kSentinelCid) {
3137 // Receiver was defined in dead code and was replaced by the sentinel.
3138 // Original receiver cid is lost, so don't try to inline recognized
3139 // methods.
3140 return false;
3141 }
3142
3143 const bool can_speculate = policy->IsAllowedForInlining(call->deopt_id());
3144 const bool is_dynamic_call = Function::IsDynamicInvocationForwarderName(
3145 String::Handle(flow_graph->zone(), target.name()));
3146
3147 const MethodRecognizer::Kind kind = target.recognized_kind();
3148 switch (kind) {
3149 case MethodRecognizer::kTypedDataIndexCheck:
3150 return InlineTypedDataIndexCheck(flow_graph, call, receiver, graph_entry,
3151 entry, last, result, Symbols::Index());
3152 case MethodRecognizer::kByteDataByteOffsetCheck:
3153 return InlineTypedDataIndexCheck(flow_graph, call, receiver, graph_entry,
3154 entry, last, result,
3155 Symbols::byteOffset());
3156 // Recognized [] operators.
3157 case MethodRecognizer::kObjectArrayGetIndexed:
3158 case MethodRecognizer::kGrowableArrayGetIndexed:
3159 case MethodRecognizer::kInt8ArrayGetIndexed:
3160 case MethodRecognizer::kUint8ArrayGetIndexed:
3161 case MethodRecognizer::kUint8ClampedArrayGetIndexed:
3162 case MethodRecognizer::kExternalUint8ArrayGetIndexed:
3163 case MethodRecognizer::kExternalUint8ClampedArrayGetIndexed:
3164 case MethodRecognizer::kInt16ArrayGetIndexed:
3165 case MethodRecognizer::kUint16ArrayGetIndexed:
3166 return InlineGetIndexed(flow_graph, can_speculate, is_dynamic_call, kind,
3167 call, receiver, graph_entry, entry, last, result);
3168 case MethodRecognizer::kFloat32ArrayGetIndexed:
3169 case MethodRecognizer::kFloat64ArrayGetIndexed:
3170 if (!CanUnboxDouble()) {
3171 return false;
3172 }
3173 return InlineGetIndexed(flow_graph, can_speculate, is_dynamic_call, kind,
3174 call, receiver, graph_entry, entry, last, result);
3175 case MethodRecognizer::kFloat32x4ArrayGetIndexed:
3176 case MethodRecognizer::kFloat64x2ArrayGetIndexed:
3177 if (!ShouldInlineSimd()) {
3178 return false;
3179 }
3180 return InlineGetIndexed(flow_graph, can_speculate, is_dynamic_call, kind,
3181 call, receiver, graph_entry, entry, last, result);
3182 case MethodRecognizer::kInt32ArrayGetIndexed:
3183 case MethodRecognizer::kUint32ArrayGetIndexed:
3184 return InlineGetIndexed(flow_graph, can_speculate, is_dynamic_call, kind,
3185 call, receiver, graph_entry, entry, last, result);
3186 case MethodRecognizer::kInt64ArrayGetIndexed:
3187 case MethodRecognizer::kUint64ArrayGetIndexed:
3188 return InlineGetIndexed(flow_graph, can_speculate, is_dynamic_call, kind,
3189 call, receiver, graph_entry, entry, last, result);
3190 case MethodRecognizer::kClassIDgetID:
3191 return InlineLoadClassId(flow_graph, call, graph_entry, entry, last,
3192 result);
3193 default:
3194 break;
3195 }
3196
3197 // The following ones need to speculate.
3198 if (!can_speculate) {
3199 return false;
3200 }
3201
3202 switch (kind) {
3203 case MethodRecognizer::kUint8ClampedArraySetIndexed:
3204 case MethodRecognizer::kExternalUint8ClampedArraySetIndexed:
3205 // These require clamping. Just inline normal body instead which
3206 // contains necessary clamping code.
3207 return false;
3208
3209 // Recognized []= operators.
3210 case MethodRecognizer::kObjectArraySetIndexed:
3211 case MethodRecognizer::kGrowableArraySetIndexed:
3212 case MethodRecognizer::kObjectArraySetIndexedUnchecked:
3213 case MethodRecognizer::kGrowableArraySetIndexedUnchecked:
3214 case MethodRecognizer::kInt8ArraySetIndexed:
3215 case MethodRecognizer::kUint8ArraySetIndexed:
3216 case MethodRecognizer::kExternalUint8ArraySetIndexed:
3217 case MethodRecognizer::kInt16ArraySetIndexed:
3218 case MethodRecognizer::kUint16ArraySetIndexed:
3219 case MethodRecognizer::kInt32ArraySetIndexed:
3220 case MethodRecognizer::kUint32ArraySetIndexed:
3221 case MethodRecognizer::kInt64ArraySetIndexed:
3222 case MethodRecognizer::kUint64ArraySetIndexed:
3223 return InlineSetIndexed(flow_graph, kind, target, call, receiver, source,
3224 exactness, graph_entry, entry, last, result);
3225
3226 case MethodRecognizer::kFloat32ArraySetIndexed:
3227 case MethodRecognizer::kFloat64ArraySetIndexed: {
3228 if (!CanUnboxDouble()) {
3229 return false;
3230 }
3231 return InlineSetIndexed(flow_graph, kind, target, call, receiver, source,
3232 exactness, graph_entry, entry, last, result);
3233 }
3234 case MethodRecognizer::kFloat32x4ArraySetIndexed: {
3235 if (!ShouldInlineSimd()) {
3236 return false;
3237 }
3238 return InlineSetIndexed(flow_graph, kind, target, call, receiver, source,
3239 exactness, graph_entry, entry, last, result);
3240 }
3241 case MethodRecognizer::kFloat64x2ArraySetIndexed: {
3242 if (!ShouldInlineSimd()) {
3243 return false;
3244 }
3245 return InlineSetIndexed(flow_graph, kind, target, call, receiver, source,
3246 exactness, graph_entry, entry, last, result);
3247 }
3248 case MethodRecognizer::kStringBaseCodeUnitAt:
3249 return InlineStringBaseCodeUnitAt(flow_graph, call, receiver,
3250 receiver_cid, graph_entry, entry, last,
3251 result);
3252 case MethodRecognizer::kStringBaseCharAt:
3253 return InlineStringBaseCharAt(flow_graph, call, receiver, receiver_cid,
3254 graph_entry, entry, last, result);
3255 case MethodRecognizer::kDoubleAdd:
3256 return InlineDoubleOp(flow_graph, Token::kADD, call, receiver,
3257 graph_entry, entry, last, result);
3258 case MethodRecognizer::kDoubleSub:
3259 return InlineDoubleOp(flow_graph, Token::kSUB, call, receiver,
3260 graph_entry, entry, last, result);
3261 case MethodRecognizer::kDoubleMul:
3262 return InlineDoubleOp(flow_graph, Token::kMUL, call, receiver,
3263 graph_entry, entry, last, result);
3264 case MethodRecognizer::kDoubleDiv:
3265 return InlineDoubleOp(flow_graph, Token::kDIV, call, receiver,
3266 graph_entry, entry, last, result);
3267 case MethodRecognizer::kDouble_getIsNaN:
3268 case MethodRecognizer::kDouble_getIsInfinite:
3269 case MethodRecognizer::kDouble_getIsNegative:
3270 return InlineDoubleTestOp(flow_graph, call, receiver, kind, graph_entry,
3271 entry, last, result);
3272 case MethodRecognizer::kGrowableArraySetData:
3273 ASSERT((receiver_cid == kGrowableObjectArrayCid) ||
3274 ((receiver_cid == kDynamicCid) && call->IsStaticCall()));
3276 flow_graph, Slot::GrowableObjectArray_data(), kEmitStoreBarrier, call,
3277 receiver, graph_entry, entry, last, result);
3278 case MethodRecognizer::kGrowableArraySetLength:
3279 ASSERT((receiver_cid == kGrowableObjectArrayCid) ||
3280 ((receiver_cid == kDynamicCid) && call->IsStaticCall()));
3282 flow_graph, Slot::GrowableObjectArray_length(), kNoStoreBarrier, call,
3283 receiver, graph_entry, entry, last, result);
3284
3285 case MethodRecognizer::kFloat32x4Abs:
3286 case MethodRecognizer::kFloat32x4Clamp:
3287 case MethodRecognizer::kFloat32x4FromDoubles:
3288 case MethodRecognizer::kFloat32x4Equal:
3289 case MethodRecognizer::kFloat32x4GetSignMask:
3290 case MethodRecognizer::kFloat32x4GreaterThan:
3291 case MethodRecognizer::kFloat32x4GreaterThanOrEqual:
3292 case MethodRecognizer::kFloat32x4LessThan:
3293 case MethodRecognizer::kFloat32x4LessThanOrEqual:
3294 case MethodRecognizer::kFloat32x4Max:
3295 case MethodRecognizer::kFloat32x4Min:
3296 case MethodRecognizer::kFloat32x4Negate:
3297 case MethodRecognizer::kFloat32x4NotEqual:
3298 case MethodRecognizer::kFloat32x4Reciprocal:
3299 case MethodRecognizer::kFloat32x4ReciprocalSqrt:
3300 case MethodRecognizer::kFloat32x4Scale:
3301 case MethodRecognizer::kFloat32x4GetW:
3302 case MethodRecognizer::kFloat32x4GetX:
3303 case MethodRecognizer::kFloat32x4GetY:
3304 case MethodRecognizer::kFloat32x4GetZ:
3305 case MethodRecognizer::kFloat32x4Splat:
3306 case MethodRecognizer::kFloat32x4Sqrt:
3307 case MethodRecognizer::kFloat32x4ToFloat64x2:
3308 case MethodRecognizer::kFloat32x4ToInt32x4:
3309 case MethodRecognizer::kFloat32x4WithW:
3310 case MethodRecognizer::kFloat32x4WithX:
3311 case MethodRecognizer::kFloat32x4WithY:
3312 case MethodRecognizer::kFloat32x4WithZ:
3313 case MethodRecognizer::kFloat32x4Zero:
3314 case MethodRecognizer::kFloat64x2Abs:
3315 case MethodRecognizer::kFloat64x2Clamp:
3316 case MethodRecognizer::kFloat64x2FromDoubles:
3317 case MethodRecognizer::kFloat64x2GetSignMask:
3318 case MethodRecognizer::kFloat64x2GetX:
3319 case MethodRecognizer::kFloat64x2GetY:
3320 case MethodRecognizer::kFloat64x2Max:
3321 case MethodRecognizer::kFloat64x2Min:
3322 case MethodRecognizer::kFloat64x2Negate:
3323 case MethodRecognizer::kFloat64x2Scale:
3324 case MethodRecognizer::kFloat64x2Splat:
3325 case MethodRecognizer::kFloat64x2Sqrt:
3326 case MethodRecognizer::kFloat64x2ToFloat32x4:
3327 case MethodRecognizer::kFloat64x2WithX:
3328 case MethodRecognizer::kFloat64x2WithY:
3329 case MethodRecognizer::kFloat64x2Zero:
3330 case MethodRecognizer::kInt32x4FromBools:
3331 case MethodRecognizer::kInt32x4FromInts:
3332 case MethodRecognizer::kInt32x4GetFlagW:
3333 case MethodRecognizer::kInt32x4GetFlagX:
3334 case MethodRecognizer::kInt32x4GetFlagY:
3335 case MethodRecognizer::kInt32x4GetFlagZ:
3336 case MethodRecognizer::kInt32x4GetSignMask:
3337 case MethodRecognizer::kInt32x4Select:
3338 case MethodRecognizer::kInt32x4ToFloat32x4:
3339 case MethodRecognizer::kInt32x4WithFlagW:
3340 case MethodRecognizer::kInt32x4WithFlagX:
3341 case MethodRecognizer::kInt32x4WithFlagY:
3342 case MethodRecognizer::kInt32x4WithFlagZ:
3343 case MethodRecognizer::kFloat32x4ShuffleMix:
3344 case MethodRecognizer::kInt32x4ShuffleMix:
3345 case MethodRecognizer::kFloat32x4Shuffle:
3346 case MethodRecognizer::kInt32x4Shuffle:
3347 case MethodRecognizer::kFloat32x4Mul:
3348 case MethodRecognizer::kFloat32x4Div:
3349 case MethodRecognizer::kFloat32x4Add:
3350 case MethodRecognizer::kFloat32x4Sub:
3351 case MethodRecognizer::kFloat64x2Mul:
3352 case MethodRecognizer::kFloat64x2Div:
3353 case MethodRecognizer::kFloat64x2Add:
3354 case MethodRecognizer::kFloat64x2Sub:
3355 return InlineSimdOp(flow_graph, is_dynamic_call, call, receiver, kind,
3356 graph_entry, entry, last, result);
3357
3358 case MethodRecognizer::kMathIntPow:
3359 return InlineMathIntPow(flow_graph, call, graph_entry, entry, last,
3360 result);
3361
3362 case MethodRecognizer::kObjectConstructor: {
3363 *entry = new (Z)
3364 FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
3365 call->GetBlock()->try_index(), DeoptId::kNone);
3366 (*entry)->InheritDeoptTarget(Z, call);
3367 ASSERT(!call->HasUses());
3368 *last = nullptr; // Empty body.
3369 *result =
3370 nullptr; // Since no uses of original call, result will be unused.
3371 return true;
3372 }
3373
3374 case MethodRecognizer::kObjectArrayAllocate: {
3375 Value* num_elements = new (Z) Value(call->ArgumentAt(1));
3376 intptr_t length = 0;
3377 if (IsSmiValue(num_elements, &length)) {
3379 Value* type = new (Z) Value(call->ArgumentAt(0));
3380 *entry = new (Z)
3381 FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
3382 call->GetBlock()->try_index(), DeoptId::kNone);
3383 (*entry)->InheritDeoptTarget(Z, call);
3384 *last = new (Z) CreateArrayInstr(call->source(), type, num_elements,
3385 call->deopt_id());
3387 *entry, *last,
3388 call->deopt_id() != DeoptId::kNone ? call->env() : nullptr,
3390 *result = (*last)->AsDefinition();
3391 return true;
3392 }
3393 }
3394 return false;
3395 }
3396
3397 case MethodRecognizer::kObjectRuntimeType: {
3399 if (receiver_cid == kDynamicCid) {
3400 return false;
3401 } else if (IsStringClassId(receiver_cid)) {
3403 } else if (receiver_cid == kDoubleCid) {
3404 type = Type::Double();
3405 } else if (IsIntegerClassId(receiver_cid)) {
3406 type = Type::IntType();
3407 } else if (IsTypeClassId(receiver_cid)) {
3409 } else if ((receiver_cid != kClosureCid) &&
3410 (receiver_cid != kRecordCid)) {
3411 const Class& cls = Class::Handle(
3412 Z, flow_graph->isolate_group()->class_table()->At(receiver_cid));
3413 if (!cls.IsGeneric()) {
3414 type = cls.DeclarationType();
3415 }
3416 }
3417
3418 if (!type.IsNull()) {
3419 *entry = new (Z)
3420 FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
3421 call->GetBlock()->try_index(), DeoptId::kNone);
3422 (*entry)->InheritDeoptTarget(Z, call);
3423 ConstantInstr* ctype = flow_graph->GetConstant(type);
3424 // Create a synthetic (re)definition for return to flag insertion.
3425 // TODO(ajcbik): avoid this mechanism altogether
3426 RedefinitionInstr* redef =
3427 new (Z) RedefinitionInstr(new (Z) Value(ctype));
3429 *entry, redef,
3430 call->deopt_id() != DeoptId::kNone ? call->env() : nullptr,
3432 *last = *result = redef;
3433 return true;
3434 }
3435 return false;
3436 }
3437
3438 case MethodRecognizer::kWriteIntoOneByteString:
3439 case MethodRecognizer::kWriteIntoTwoByteString: {
3440 // This is an internal method, no need to check argument types nor
3441 // range.
3442 *entry = new (Z)
3443 FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
3444 call->GetBlock()->try_index(), DeoptId::kNone);
3445 (*entry)->InheritDeoptTarget(Z, call);
3446 Definition* str = call->ArgumentAt(0);
3447 Definition* index = call->ArgumentAt(1);
3448 Definition* value = call->ArgumentAt(2);
3449
3450 const bool is_onebyte = kind == MethodRecognizer::kWriteIntoOneByteString;
3451 const intptr_t index_scale = is_onebyte ? 1 : 2;
3452 const intptr_t cid = is_onebyte ? kOneByteStringCid : kTwoByteStringCid;
3453
3454 // Insert explicit unboxing instructions with truncation to avoid relying
3455 // on [SelectRepresentations] which doesn't mark them as truncating.
3457 new (Z) Value(value), call->deopt_id(),
3459 flow_graph->AppendTo(*entry, value, call->env(), FlowGraph::kValue);
3460
3461 *last = new (Z) StoreIndexedInstr(
3462 new (Z) Value(str), new (Z) Value(index), new (Z) Value(value),
3463 kNoStoreBarrier, /*index_unboxed=*/false, index_scale, cid,
3464 kAlignedAccess, call->deopt_id(), call->source());
3465 flow_graph->AppendTo(value, *last, call->env(), FlowGraph::kEffect);
3466
3467 // We need a return value to replace uses of the original definition.
3468 // The final instruction is a use of 'void operator[]=()', so we use null.
3470 return true;
3471 }
3472
3473 default:
3474 return false;
3475 }
3476}
3477
3478} // namespace dart
static float prev(float f)
#define check(reporter, ref, unref, make, kill)
static bool left(const SkPoint &p0, const SkPoint &p1)
static bool right(const SkPoint &p0, const SkPoint &p1)
SI void store(P *ptr, const T &val)
SI T load(const P *ptr)
static size_t element_size(Layout layout, SkSLType type)
static const char kValue[]
Definition Viewer.cpp:482
#define IG
#define UNREACHABLE()
Definition assert.h:248
#define RELEASE_ASSERT(cond)
Definition assert.h:327
#define Z
#define INIT_HANDLE(iface, member_name, type, cid)
#define TRY_INLINE(iface, member_name, type, cid)
#define PUBLIC_TYPED_DATA_CLASS_LIST(V)
bool IsTopTypeForSubtyping() const
Definition object.cc:21457
bool IsInt32x4Type() const
Definition object.cc:21501
bool IsFloat64x2Type() const
Definition object.cc:21495
virtual bool IsInstantiated(Genericity genericity=kAny, intptr_t num_free_fun_type_params=kAllFree) const
Definition object.cc:21200
bool IsDoubleType() const
Definition object.cc:21484
bool IsFloat32x4Type() const
Definition object.cc:21489
bool IsIntType() const
Definition object.cc:21472
static constexpr bool IsValidLength(intptr_t len)
Definition object.h:10906
void Add(const T &value)
const T & At(intptr_t index) const
void SetLength(intptr_t new_length)
intptr_t length() const
bool OperandsAre(intptr_t cid) const
Definition il.h:875
bool OperandsAreSmiOrMint() const
Definition il.h:868
bool IncludesOperands(intptr_t cid) const
Definition il.h:880
bool OperandsAreSmiOrNull() const
Definition il.h:865
bool ArgumentIs(intptr_t cid) const
Definition il.h:839
static const BinaryFeedback * CreateMonomorphic(Zone *zone, intptr_t receiver_cid, intptr_t argument_cid)
Definition il.cc:4113
bool OperandsAreSmiOrDouble() const
Definition il.h:871
intptr_t try_index() const
Definition il.h:1724
static const Bool & Get(bool value)
Definition object.h:10780
static const Bool & True()
Definition object.h:10776
static BoxInstr * Create(Representation from, Value *value)
Definition il.cc:4009
static bool HasSingleConcreteImplementation(const Class &interface, intptr_t *implementation_cid)
Definition cha.cc:127
virtual void VisitStaticCall(StaticCallInstr *instr)
void AddReceiverCheck(InstanceCallInstr *call)
bool TryReplaceWithEqualityOp(InstanceCallInstr *call, Token::Kind op_kind)
FlowGraph * flow_graph() const
void AddCheckNull(Value *to_check, const String &function_name, intptr_t deopt_id, Environment *deopt_environment, Instruction *insert_before)
void AddCheckClass(Definition *to_check, const Cids &cids, intptr_t deopt_id, Environment *deopt_environment, Instruction *insert_before)
void ReplaceCall(Definition *call, Definition *replacement)
bool TryInlineInstanceSetter(InstanceCallInstr *call)
void InsertBefore(Instruction *next, Instruction *instr, Environment *env, FlowGraph::UseKind use_kind)
void ReplaceCallWithResult(Definition *call, Instruction *replacement, Definition *result)
Thread * thread() const
virtual void VisitLoadCodeUnits(LoadCodeUnitsInstr *instr)
virtual bool TryCreateICData(InstanceCallInstr *call)
void InlineImplicitInstanceGetter(Definition *call, const Field &field)
bool TryInlineInstanceGetter(InstanceCallInstr *call)
bool TryReplaceWithRelationalOp(InstanceCallInstr *call, Token::Kind op_kind)
virtual bool TryOptimizeStaticCallUsingStaticTypes(StaticCallInstr *call)=0
bool TryReplaceWithUnaryOp(InstanceCallInstr *call, Token::Kind op_kind)
SpeculativeInliningPolicy * speculative_policy_
const Function & function() const
virtual void ReplaceInstanceCallsWithDispatchTableCalls()
void ReplaceWithInstanceOf(InstanceCallInstr *instr)
bool TryReplaceWithBinaryOp(InstanceCallInstr *call, Token::Kind op_kind)
bool TryInlineInstanceMethod(InstanceCallInstr *call)
virtual bool TryReplaceInstanceOfWithRangeCheck(InstanceCallInstr *call, const AbstractType &type)
void InsertSpeculativeBefore(Instruction *next, Instruction *instr, Environment *env, FlowGraph::UseKind use_kind)
static const CallTargets * CreateMonomorphic(Zone *zone, intptr_t receiver_cid, const Function &target)
Definition il.cc:4121
bool HasSingleTarget() const
Definition il.cc:5507
const Function & FirstTarget() const
Definition il.cc:5515
StaticTypeExactnessState MonomorphicExactness() const
Definition il.cc:809
static Cids * CreateMonomorphic(Zone *zone, intptr_t cid)
Definition il.cc:689
intptr_t MonomorphicReceiverCid() const
Definition il.cc:804
static Cids * CreateForArgument(Zone *zone, const BinaryFeedback &binary_feedback, int argument_number)
Definition il.cc:695
bool IsMonomorphic() const
Definition il.cc:799
ClassPtr At(intptr_t cid) const
intptr_t NumTypeArguments() const
Definition object.cc:3690
static bool IsSubtypeOf(const Class &cls, const TypeArguments &type_arguments, Nullability nullability, const AbstractType &other, Heap::Space space, FunctionTypeMapping *function_type_equivalence=nullptr)
Definition object.cc:5975
bool is_finalized() const
Definition object.h:1725
bool is_nullable() const
static CompilerState & Current()
const Object & value() const
Definition il.h:4212
Value * input_use_list() const
Definition il.h:2557
CompileType * Type()
Definition il.h:2503
virtual Definition * AsDefinition()
Definition il.h:2665
static constexpr intptr_t kNone
Definition deopt_id.h:27
static DoublePtr NewCanonical(double d)
Definition object.cc:23497
bool is_final() const
Definition object.h:4420
ClassPtr Owner() const
Definition object.cc:11911
bool NeedsInitializationCheckOnLoad() const
Definition object.h:4679
FieldPtr CloneFromOriginal() const
Definition object.cc:11786
bool needs_length_check() const
Definition object.h:4670
bool is_late() const
Definition object.h:4422
StaticTypeExactnessState static_type_exactness_state() const
Definition object.h:4606
StringPtr name() const
Definition object.h:4408
bool needs_load_guard() const
Definition object.h:4429
bool is_covariant() const
Definition object.h:4454
intptr_t guarded_cid() const
Definition object.cc:11800
bool is_generic_covariant_impl() const
Definition object.h:4460
AbstractTypePtr type() const
Definition object.h:4523
bool is_instance() const
Definition object.h:4419
static const CallTargets * ResolveCallTargetsForReceiverCid(intptr_t cid, const String &selector, const Array &args_desc_array)
static bool SupportsUnboxedDoubles()
static bool SupportsUnboxedSimd128()
static bool CanConvertInt64ToDouble()
ForwardInstructionIterator * current_iterator() const
Definition il.h:11792
ForwardInstructionIterator * current_iterator_
Definition il.h:11805
virtual void VisitBlocks()
Definition il.cc:1374
ConstantInstr * GetConstant(const Object &object, Representation representation=kTagged)
IsolateGroup * isolate_group() const
Definition flow_graph.h:262
Instruction * AppendTo(Instruction *prev, Instruction *instr, Environment *env, UseKind use_kind)
Zone * zone() const
Definition flow_graph.h:261
void ReplaceCurrentInstruction(ForwardInstructionIterator *iterator, Instruction *current, Instruction *replacement)
Instruction * AppendSpeculativeTo(Instruction *prev, Instruction *instr, Environment *env, UseKind use_kind)
Thread * thread() const
Definition flow_graph.h:260
ToCheck CheckForInstanceCall(InstanceCallInstr *call, UntaggedFunction::Kind kind) const
void AddExactnessGuard(InstanceCallInstr *call, intptr_t receiver_cid)
Definition * CreateCheckBound(Definition *length, Definition *index, intptr_t deopt_id)
void DiscoverBlocks()
ConstantInstr * constant_null() const
Definition flow_graph.h:270
Instruction * CreateCheckClass(Definition *to_check, const Cids &cids, intptr_t deopt_id, const InstructionSource &source)
const ParsedFunction & parsed_function() const
Definition flow_graph.h:129
BlockIterator reverse_postorder_iterator() const
Definition flow_graph.h:219
void InsertBefore(Instruction *next, Instruction *instr, Environment *env, UseKind use_kind)
Definition flow_graph.h:312
intptr_t allocate_block_id()
Definition flow_graph.h:266
Instruction * Current() const
Definition il.h:1847
static bool IsDynamicInvocationForwarderName(const String &name)
Definition object.cc:4240
bool IsInvokeFieldDispatcher() const
Definition object.h:3276
FieldPtr accessor_field() const
Definition object.cc:8207
@ kOld
Definition heap.h:39
intptr_t NumArgsTested() const
Definition object.cc:16518
bool HasDeoptReason(ICData::DeoptReasonId reason) const
Definition object.cc:16560
intptr_t GetReceiverClassIdAt(intptr_t index) const
Definition object.cc:17067
intptr_t NumberOfChecks() const
Definition object.cc:16624
Code::EntryKind entry_kind() const
Definition il.h:4741
const Function & interface_target() const
Definition il.h:4708
const String & function_name() const
Definition il.h:4706
const CallTargets & Targets()
Definition il.cc:5347
static intptr_t ElementSizeFor(intptr_t cid)
Definition object.cc:21008
void InheritDeoptTarget(Zone *zone, Instruction *other)
Definition il.cc:1560
virtual BlockEntryInstr * GetBlock()
Definition il.cc:1350
Environment * env() const
Definition il.h:1209
@ kNotSpeculative
Definition il.h:969
void RemoveEnvironment()
Definition il.cc:1280
virtual intptr_t ArgumentCount() const
Definition il.h:1035
Definition * ArgumentAt(intptr_t index) const
Definition il.h:3423
InstructionSource source() const
Definition il.h:1002
Value * ArgumentValueAt(intptr_t index) const
Definition il.h:3417
intptr_t deopt_id() const
Definition il.h:987
SafepointRwLock * program_lock()
Definition isolate.h:532
static IsolateGroup * Current()
Definition isolate.h:534
ClassTable * class_table() const
Definition isolate.h:491
static LibraryPtr LookupLibrary(Thread *thread, const String &url)
Definition object.cc:14646
bool can_pack_into_smi() const
Definition il.h:6858
void set_representation(Representation repr)
Definition il.h:6866
static Representation ReturnRepresentation(intptr_t array_cid)
Definition il.cc:6874
Value * right() const
Definition il.h:8922
Value * left() const
Definition il.h:8921
static intptr_t MethodKindToReceiverCid(Kind kind)
static ObjectPtr null()
Definition object.h:433
ObjectPtr ptr() const
Definition object.h:332
bool IsNull() const
Definition object.h:363
static Object & Handle()
Definition object.h:407
static Object & ZoneHandle()
Definition object.h:419
bool TryInline(MethodRecognizer::Kind kind)
SimdLowering(FlowGraph *flow_graph, Instruction *call, GraphEntryInstr *graph_entry, FunctionEntryInstr **entry, Instruction **last, Definition **result)
static SimdOpInstr * Create(Kind kind, Value *left, Value *right, intptr_t deopt_id)
Definition il.h:11268
static SimdOpInstr * CreateFromCall(Zone *zone, MethodRecognizer::Kind kind, Definition *receiver, Instruction *call, intptr_t mask=0)
Definition il.cc:8110
static SimdOpInstr * CreateFromFactoryCall(Zone *zone, MethodRecognizer::Kind kind, Instruction *call)
Definition il.cc:8164
static Kind KindForOperator(MethodRecognizer::Kind kind)
Definition il.cc:8085
static const Slot & Get(const Field &field, const ParsedFunction *parsed_function)
Definition slot.cc:351
static const Slot & GetLengthFieldForArrayCid(intptr_t array_cid)
Definition slot.cc:249
static const Slot & GetTypeArgumentsSlotFor(Thread *thread, const Class &cls)
Definition slot.cc:276
static SmiPtr New(intptr_t value)
Definition object.h:9985
bool IsAllowedForInlining(intptr_t call_deopt_id) const
Definition inliner.h:43
static StaticCallInstr * FromCall(Zone *zone, const C *call, const Function &target, intptr_t call_count)
Definition il.h:5535
static Representation ValueRepresentation(intptr_t array_cid)
Definition il.cc:6927
intptr_t FirstArgIndex() const
Definition il.h:4558
Value * Receiver() const
Definition il.h:4559
void CheckForSafepoint()
Definition thread.h:1091
IsolateGroup * isolate_group() const
Definition thread.h:540
static bool IsTypeTestOperator(Kind tok)
Definition token.h:244
static bool IsRelationalOperator(Kind tok)
Definition token.h:232
static bool IsBinaryOperator(Token::Kind token)
Definition token.cc:31
static bool IsEqualityOperator(Kind tok)
Definition token.h:236
static TypePtr DartTypeType()
Definition object.cc:21942
static TypePtr Double()
Definition object.cc:21902
static TypePtr StringType()
Definition object.cc:21930
static TypePtr IntType()
Definition object.cc:21886
virtual void VisitStaticCall(StaticCallInstr *instr)
virtual void VisitInstanceCall(InstanceCallInstr *instr)
static void Optimize(FlowGraph *flow_graph)
static UnboxInstr * Create(Representation to, Value *value, intptr_t deopt_id, SpeculativeMode speculative_mode=kGuardInputs)
Definition il.cc:4045
static constexpr bool IsPowerOfTwo(T x)
Definition utils.h:61
bool BindsToConstant() const
Definition il.cc:1181
const Object & BoundConstant() const
Definition il.cc:1199
Value * CopyWithType(Zone *zone)
Definition il.h:138
Definition * definition() const
Definition il.h:103
CompileType * Type()
#define COMPILER_TIMINGS_TIMER_SCOPE(thread, timer_id)
#define THR_Print(format,...)
Definition log.h:20
#define ASSERT(E)
static int square(int x)
Definition etc1.cpp:302
VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE auto & d
Definition main.cc:19
SkBitmap source
Definition examples.cpp:28
uint8_t value
GAsyncResult * result
uint32_t * target
#define DECLARE_FLAG(type, name)
Definition flags.h:14
Dart_NativeFunction function
Definition fuchsia.cc:51
size_t length
double y
double x
exit(kErrorExitCode)
link(from_root, to_root)
Definition dart_pkg.py:44
static bool CanConvertInt64ToDouble()
static bool CidTestResultsContains(const ZoneGrowableArray< intptr_t > &results, intptr_t test_cid)
static bool ShouldInlineSimd()
static bool InlineDoubleTestOp(FlowGraph *flow_graph, Instruction *call, Definition *receiver, MethodRecognizer::Kind kind, GraphEntryInstr *graph_entry, FunctionEntryInstr **entry, Instruction **last, Definition **result)
bool IsTypedDataBaseClassId(intptr_t index)
Definition class_id.h:429
static bool IsSmiValue(Value *val, intptr_t *int_val)
static constexpr Representation kUnboxedUword
Definition locations.h:171
static bool IsLengthOneString(Definition *d)
static bool InlineMathIntPow(FlowGraph *flow_graph, Instruction *call, GraphEntryInstr *graph_entry, FunctionEntryInstr **entry, Instruction **last, Definition **result)
bool IsTypeClassId(intptr_t index)
Definition class_id.h:370
static bool InlineSetIndexed(FlowGraph *flow_graph, MethodRecognizer::Kind kind, const Function &target, Instruction *call, Definition *receiver, const InstructionSource &source, CallSpecializer::ExactnessInfo *exactness, GraphEntryInstr *graph_entry, FunctionEntryInstr **entry, Instruction **last, Definition **result)
static bool CheckMask(Definition *definition, intptr_t *mask_ptr)
static void RefineUseTypes(Definition *instr)
int32_t classid_t
Definition globals.h:524
StoreBarrierType
Definition il.h:6252
@ kNoStoreBarrier
Definition il.h:6252
@ kEmitStoreBarrier
Definition il.h:6252
static bool InlineDoubleOp(FlowGraph *flow_graph, Token::Kind op_kind, Instruction *call, Definition *receiver, GraphEntryInstr *graph_entry, FunctionEntryInstr **entry, Instruction **last, Definition **result)
@ kIllegalCid
Definition class_id.h:214
@ kDynamicCid
Definition class_id.h:253
Representation
Definition locations.h:66
static bool ShouldSpecializeForDouble(const BinaryFeedback &binary_feedback)
static bool InlineGetIndexed(FlowGraph *flow_graph, bool can_speculate, bool is_dynamic_call, MethodRecognizer::Kind kind, Definition *call, Definition *receiver, GraphEntryInstr *graph_entry, FunctionEntryInstr **entry, Instruction **last, Definition **result)
static void TryAddTest(ZoneGrowableArray< intptr_t > *results, intptr_t test_cid, bool result)
static intptr_t PrepareInlineIndexedOp(FlowGraph *flow_graph, Instruction *call, intptr_t array_cid, Definition **array, Definition **index, Instruction **cursor)
static bool InlineStringBaseCodeUnitAt(FlowGraph *flow_graph, Instruction *call, Definition *receiver, intptr_t cid, GraphEntryInstr *graph_entry, FunctionEntryInstr **entry, Instruction **last, Definition **result)
static bool CanUnboxDouble()
static Instruction * InlineMul(FlowGraph *flow_graph, Instruction *cursor, Definition *x, Definition *y)
static bool InlineSimdOp(FlowGraph *flow_graph, bool is_dynamic_call, Instruction *call, Definition *receiver, MethodRecognizer::Kind kind, GraphEntryInstr *graph_entry, FunctionEntryInstr **entry, Instruction **last, Definition **result)
const intptr_t cid
static bool IsNumberCid(intptr_t cid)
static void PurgeNegativeTestCidsEntries(ZoneGrowableArray< intptr_t > *results)
static constexpr Representation kUnboxedIntPtr
Definition locations.h:176
static bool InlineGrowableArraySetter(FlowGraph *flow_graph, const Slot &field, StoreBarrierType store_barrier_type, Instruction *call, Definition *receiver, GraphEntryInstr *graph_entry, FunctionEntryInstr **entry, Instruction **last, Definition **result)
bool IsIntegerClassId(intptr_t index)
Definition class_id.h:340
static bool InlineStringBaseCharAt(FlowGraph *flow_graph, Instruction *call, Definition *receiver, intptr_t cid, GraphEntryInstr *graph_entry, FunctionEntryInstr **entry, Instruction **last, Definition **result)
static bool InlineLoadClassId(FlowGraph *flow_graph, Instruction *call, GraphEntryInstr *graph_entry, FunctionEntryInstr **entry, Instruction **last, Definition **result)
const char *const function_name
static CompileType * ResultType(Definition *call)
static Definition * PrepareInlineStringIndexOp(FlowGraph *flow_graph, Instruction *call, intptr_t cid, Definition *str, Definition *index, Instruction *cursor)
static bool InlineTypedDataIndexCheck(FlowGraph *flow_graph, Instruction *call, Definition *receiver, GraphEntryInstr *graph_entry, FunctionEntryInstr **entry, Instruction **last, Definition **result, const String &symbol)
static bool SmiFitsInDouble()
bool IsExternalTypedDataClassId(intptr_t index)
Definition class_id.h:447
@ kAlignedAccess
Definition il.h:6722
bool IsStringClassId(intptr_t index)
Definition class_id.h:350
call(args)
Definition dom.py:159
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive keep the shell running after the Dart script has completed enable serial On low power devices with low core running concurrent GC tasks on threads can cause them to contend with the UI thread which could potentially lead to jank This option turns off all concurrent GC activities domain network policy
Definition switches.h:248
static constexpr bool IsUnboxedInteger(Representation rep)
Definition locations.h:92
static constexpr bool IsUnboxed(Representation rep)
Definition locations.h:101