Flutter Engine
The Flutter Engine
call_specializer.cc
Go to the documentation of this file.
1// Copyright (c) 2017, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
6
9#include "vm/compiler/cha.h"
12#include "vm/cpu.h"
13
14namespace dart {
15
16DECLARE_FLAG(bool, enable_simd_inline);
17
18// Quick access to the current isolate and zone.
19#define IG (isolate_group())
20#define Z (zone())
21
22static void RefineUseTypes(Definition* instr) {
23 CompileType* new_type = instr->Type();
24 for (Value::Iterator it(instr->input_use_list()); !it.Done(); it.Advance()) {
25 it.Current()->RefineReachingType(new_type);
26 }
27}
28
29static bool ShouldInlineSimd() {
31}
32
35}
36
37static bool IsNumberCid(intptr_t cid) {
38 return (cid == kSmiCid) || (cid == kDoubleCid);
39}
40
41static bool ShouldSpecializeForDouble(const BinaryFeedback& binary_feedback) {
42 // Unboxed double operation can't handle case of two smis.
43 if (binary_feedback.IncludesOperands(kSmiCid)) {
44 return false;
45 }
46
47 // Check that the call site has seen only smis and doubles.
48 return binary_feedback.OperandsAreSmiOrDouble();
49}
50
51// Optimize instance calls using ICData.
54}
55
56// Optimize instance calls using cid. This is called after optimizer
57// converted instance calls to instructions. Any remaining
58// instance calls are either megamorphic calls, cannot be optimized or
59// have no runtime type feedback collected.
60// Attempts to convert an instance call (IC call) using propagated class-ids,
61// e.g., receiver class id, guarded-cid, or by guessing cid-s.
63 ASSERT(current_iterator_ == nullptr);
64 for (BlockIterator block_it = flow_graph_->reverse_postorder_iterator();
65 !block_it.Done(); block_it.Advance()) {
67 ForwardInstructionIterator it(block_it.Current());
69 for (; !it.Done(); it.Advance()) {
70 Instruction* instr = it.Current();
71 if (instr->IsInstanceCall()) {
72 InstanceCallInstr* call = instr->AsInstanceCall();
73 if (call->HasICData()) {
74 if (TryCreateICData(call)) {
75 VisitInstanceCall(call);
76 }
77 }
78 } else if (auto static_call = instr->AsStaticCall()) {
79 // If TFA devirtualized instance calls to static calls we also want to
80 // process them here.
81 VisitStaticCall(static_call);
82 } else if (instr->IsPolymorphicInstanceCall()) {
83 SpecializePolymorphicInstanceCall(instr->AsPolymorphicInstanceCall());
84 }
85 }
86 current_iterator_ = nullptr;
87 }
88}
89
91 ASSERT(call->HasICData());
92
93 if (call->Targets().length() > 0) {
94 // This occurs when an instance call has too many checks, will be converted
95 // to megamorphic call.
96 return false;
97 }
98
99 const intptr_t receiver_index = call->FirstArgIndex();
100 GrowableArray<intptr_t> class_ids(call->ic_data()->NumArgsTested());
101 ASSERT(call->ic_data()->NumArgsTested() <=
102 call->ArgumentCountWithoutTypeArgs());
103 for (intptr_t i = 0; i < call->ic_data()->NumArgsTested(); i++) {
104 class_ids.Add(call->ArgumentValueAt(receiver_index + i)->Type()->ToCid());
105 }
106
107 const Token::Kind op_kind = call->token_kind();
108 if (FLAG_guess_icdata_cid && !CompilerState::Current().is_aot()) {
109 if (Token::IsRelationalOperator(op_kind) ||
110 Token::IsEqualityOperator(op_kind) ||
111 Token::IsBinaryOperator(op_kind)) {
112 // Guess cid: if one of the inputs is a number assume that the other
113 // is a number of same type, unless the interface target tells us this
114 // is impossible.
115 if (call->CanReceiverBeSmiBasedOnInterfaceTarget(zone())) {
116 const intptr_t cid_0 = class_ids[0];
117 const intptr_t cid_1 = class_ids[1];
118 if ((cid_0 == kDynamicCid) && (IsNumberCid(cid_1))) {
119 class_ids[0] = cid_1;
120 } else if (IsNumberCid(cid_0) && (cid_1 == kDynamicCid)) {
121 class_ids[1] = cid_0;
122 }
123 }
124 }
125 }
126
127 bool all_cids_known = true;
128 for (intptr_t i = 0; i < class_ids.length(); i++) {
129 if (class_ids[i] == kDynamicCid) {
130 // Not all cid-s known.
131 all_cids_known = false;
132 break;
133 }
134 }
135
136 if (all_cids_known) {
137 const intptr_t receiver_cid = class_ids[0];
138 if (receiver_cid == kSentinelCid) {
139 // Unreachable call.
140 return false;
141 }
142 const Class& receiver_class =
143 Class::Handle(Z, IG->class_table()->At(receiver_cid));
144 if (!receiver_class.is_finalized()) {
145 // Do not eagerly finalize classes. ResolveDynamicForReceiverClass can
146 // cause class finalization, since callee's receiver class may not be
147 // finalized yet.
148 return false;
149 }
151 Z, call->ResolveForReceiverClass(receiver_class, /*allow_add=*/false));
152 if (function.IsNull()) {
153 return false;
154 }
156
157 // Update the CallTargets attached to the instruction with our speculative
158 // target. The next round of CallSpecializer::VisitInstanceCall will make
159 // use of this.
160 call->SetTargets(CallTargets::CreateMonomorphic(Z, class_ids[0], function));
161 if (class_ids.length() == 2) {
162 call->SetBinaryFeedback(
163 BinaryFeedback::CreateMonomorphic(Z, class_ids[0], class_ids[1]));
164 }
165 return true;
166 }
167
168 return false;
169}
170
171void CallSpecializer::SpecializePolymorphicInstanceCall(
173 if (!FLAG_polymorphic_with_deopt) {
174 // Specialization adds receiver checks which can lead to deoptimization.
175 return;
176 }
177
178 const intptr_t receiver_cid = call->Receiver()->Type()->ToCid();
179 if (receiver_cid == kDynamicCid) {
180 return; // No information about receiver was inferred.
181 }
182
183 const ICData& ic_data = *call->ic_data();
184
185 const CallTargets* targets =
187 receiver_cid, String::Handle(zone(), ic_data.target_name()),
188 Array::Handle(zone(), ic_data.arguments_descriptor()));
189 if (targets == nullptr) {
190 // No specialization.
191 return;
192 }
193
194 ASSERT(targets->HasSingleTarget());
195 const Function& target = targets->FirstTarget();
196 StaticCallInstr* specialized =
197 StaticCallInstr::FromCall(Z, call, target, targets->AggregateCallCount());
198 call->ReplaceWith(specialized, current_iterator());
199}
200
202 Instruction* replacement,
204 ASSERT(!call->HasMoveArguments());
205 if (result == nullptr) {
206 ASSERT(replacement->IsDefinition());
207 call->ReplaceWith(replacement->AsDefinition(), current_iterator());
208 } else {
209 call->ReplaceWithResult(replacement, result, current_iterator());
210 }
211}
212
214 ReplaceCallWithResult(call, replacement, nullptr);
215}
216
217void CallSpecializer::AddCheckSmi(Definition* to_check,
218 intptr_t deopt_id,
219 Environment* deopt_environment,
220 Instruction* insert_before) {
221 // TODO(alexmarkov): check reaching type instead of definition type
222 if (to_check->Type()->ToCid() != kSmiCid) {
223 InsertBefore(insert_before,
224 new (Z) CheckSmiInstr(new (Z) Value(to_check), deopt_id,
225 insert_before->source()),
226 deopt_environment, FlowGraph::kEffect);
227 }
228}
229
231 const Cids& cids,
232 intptr_t deopt_id,
233 Environment* deopt_environment,
234 Instruction* insert_before) {
235 // Type propagation has not run yet, we cannot eliminate the check.
236 Instruction* check = flow_graph_->CreateCheckClass(to_check, cids, deopt_id,
237 insert_before->source());
238 InsertBefore(insert_before, check, deopt_environment, FlowGraph::kEffect);
239}
240
241void CallSpecializer::AddChecksForArgNr(InstanceCallInstr* call,
242 Definition* argument,
243 int argument_number) {
244 const Cids* cids =
245 Cids::CreateForArgument(zone(), call->BinaryFeedback(), argument_number);
246 AddCheckClass(argument, *cids, call->deopt_id(), call->env(), call);
247}
248
250 const String& function_name,
251 intptr_t deopt_id,
252 Environment* deopt_environment,
253 Instruction* insert_before) {
254 if (to_check->Type()->is_nullable()) {
255 CheckNullInstr* check_null =
256 new (Z) CheckNullInstr(to_check->CopyWithType(Z), function_name,
257 deopt_id, insert_before->source());
258 if (FLAG_trace_strong_mode_types) {
259 THR_Print("[Strong mode] Inserted %s\n", check_null->ToCString());
260 }
261 InsertBefore(insert_before, check_null, deopt_environment,
263 }
264}
265
266// Return true if d is a string of length one (a constant or result from
267// from string-from-char-code instruction.
269 if (d->IsConstant()) {
270 const Object& obj = d->AsConstant()->value();
271 if (obj.IsString()) {
272 return String::Cast(obj).Length() == 1;
273 } else {
274 return false;
275 }
276 } else {
277 return d->IsOneByteStringFromCharCode();
278 }
279}
280
281// Returns true if the string comparison was converted into char-code
282// comparison. Conversion is only possible for strings of length one.
283// E.g., detect str[x] == "x"; and use an integer comparison of char-codes.
284bool CallSpecializer::TryStringLengthOneEquality(InstanceCallInstr* call,
285 Token::Kind op_kind) {
286 ASSERT(call->BinaryFeedback().OperandsAre(kOneByteStringCid));
287 // Check that left and right are length one strings (either string constants
288 // or results of string-from-char-code.
289 Definition* left = call->ArgumentAt(0);
290 Definition* right = call->ArgumentAt(1);
291 Value* left_val = nullptr;
292 Definition* to_remove_left = nullptr;
293 if (IsLengthOneString(right)) {
294 // Swap, since we know that both arguments are strings
295 Definition* temp = left;
296 left = right;
297 right = temp;
298 }
299 if (IsLengthOneString(left)) {
300 // Optimize if left is a string with length one (either constant or
301 // result of string-from-char-code.
302 if (left->IsConstant()) {
303 ConstantInstr* left_const = left->AsConstant();
304 const String& str = String::Cast(left_const->value());
305 ASSERT(str.Length() == 1);
306 ConstantInstr* char_code_left = flow_graph()->GetConstant(
307 Smi::ZoneHandle(Z, Smi::New(static_cast<intptr_t>(str.CharAt(0)))));
308 left_val = new (Z) Value(char_code_left);
309 } else if (left->IsOneByteStringFromCharCode()) {
310 // Use input of string-from-charcode as left value.
311 OneByteStringFromCharCodeInstr* instr =
312 left->AsOneByteStringFromCharCode();
313 left_val = new (Z) Value(instr->char_code()->definition());
314 to_remove_left = instr;
315 } else {
316 // IsLengthOneString(left) should have been false.
317 UNREACHABLE();
318 }
319
320 Definition* to_remove_right = nullptr;
321 Value* right_val = nullptr;
322 if (right->IsOneByteStringFromCharCode()) {
323 // Skip string-from-char-code, and use its input as right value.
324 OneByteStringFromCharCodeInstr* right_instr =
325 right->AsOneByteStringFromCharCode();
326 right_val = new (Z) Value(right_instr->char_code()->definition());
327 to_remove_right = right_instr;
328 } else {
329 AddChecksForArgNr(call, right, /* arg_number = */ 1);
330 // String-to-char-code instructions returns -1 (illegal charcode) if
331 // string is not of length one.
332 StringToCharCodeInstr* char_code_right = new (Z)
333 StringToCharCodeInstr(new (Z) Value(right), kOneByteStringCid);
334 InsertBefore(call, char_code_right, call->env(), FlowGraph::kValue);
335 right_val = new (Z) Value(char_code_right);
336 }
337
338 // Comparing char-codes instead of strings.
339 EqualityCompareInstr* comp =
340 new (Z) EqualityCompareInstr(call->source(), op_kind, left_val,
341 right_val, kSmiCid, call->deopt_id());
342 ReplaceCall(call, comp);
343
344 // Remove dead instructions.
345 if ((to_remove_left != nullptr) &&
346 (to_remove_left->input_use_list() == nullptr)) {
347 to_remove_left->ReplaceUsesWith(flow_graph()->constant_null());
348 to_remove_left->RemoveFromGraph();
349 }
350 if ((to_remove_right != nullptr) &&
351 (to_remove_right->input_use_list() == nullptr)) {
352 to_remove_right->ReplaceUsesWith(flow_graph()->constant_null());
353 to_remove_right->RemoveFromGraph();
354 }
355 return true;
356 }
357 return false;
358}
359
360static bool SmiFitsInDouble() {
361 return compiler::target::kSmiBits < 53;
362}
363
365 Token::Kind op_kind) {
366 const BinaryFeedback& binary_feedback = call->BinaryFeedback();
367
368 ASSERT(call->type_args_len() == 0);
369 ASSERT(call->ArgumentCount() == 2);
370 Definition* const left = call->ArgumentAt(0);
371 Definition* const right = call->ArgumentAt(1);
372
373 intptr_t cid = kIllegalCid;
374 if (binary_feedback.OperandsAre(kOneByteStringCid)) {
375 return TryStringLengthOneEquality(call, op_kind);
376 } else if (binary_feedback.OperandsAre(kSmiCid)) {
378 new (Z) CheckSmiInstr(new (Z) Value(left), call->deopt_id(),
379 call->source()),
380 call->env(), FlowGraph::kEffect);
382 new (Z) CheckSmiInstr(new (Z) Value(right), call->deopt_id(),
383 call->source()),
384 call->env(), FlowGraph::kEffect);
385 cid = kSmiCid;
386 } else if (binary_feedback.OperandsAreSmiOrMint()) {
387 cid = kMintCid;
388 } else if (binary_feedback.OperandsAreSmiOrDouble()) {
389 // Use double comparison.
390 if (SmiFitsInDouble()) {
391 cid = kDoubleCid;
392 } else {
393 if (binary_feedback.IncludesOperands(kSmiCid)) {
394 // We cannot use double comparison on two smis. Need polymorphic
395 // call.
396 return false;
397 } else {
399 call,
401 new (Z) Value(left), new (Z) Value(right), call->deopt_id()),
402 call->env(), FlowGraph::kEffect);
403 cid = kDoubleCid;
404 }
405 }
406 } else {
407 // Check if ICDData contains checks with Smi/Null combinations. In that case
408 // we can still emit the optimized Smi equality operation but need to add
409 // checks for null or Smi.
410 if (binary_feedback.OperandsAreSmiOrNull()) {
411 AddChecksForArgNr(call, left, /* arg_number = */ 0);
412 AddChecksForArgNr(call, right, /* arg_number = */ 1);
413
414 cid = kSmiCid;
415 } else {
416 // Shortcut for equality with null.
417 // TODO(vegorov): this optimization is not speculative and should
418 // be hoisted out of this function.
419 ConstantInstr* right_const = right->AsConstant();
420 ConstantInstr* left_const = left->AsConstant();
421 if ((right_const != nullptr && right_const->value().IsNull()) ||
422 (left_const != nullptr && left_const->value().IsNull())) {
423 StrictCompareInstr* comp = new (Z)
424 StrictCompareInstr(call->source(), Token::kEQ_STRICT,
425 new (Z) Value(left), new (Z) Value(right),
426 /* number_check = */ false, DeoptId::kNone);
427 ReplaceCall(call, comp);
428 return true;
429 }
430 return false;
431 }
432 }
435 new (Z) EqualityCompareInstr(call->source(), op_kind, new (Z) Value(left),
436 new (Z) Value(right), cid, call->deopt_id());
437 ReplaceCall(call, comp);
438 return true;
439}
440
442 Token::Kind op_kind) {
443 ASSERT(call->type_args_len() == 0);
444 ASSERT(call->ArgumentCount() == 2);
445
446 const BinaryFeedback& binary_feedback = call->BinaryFeedback();
447 Definition* left = call->ArgumentAt(0);
448 Definition* right = call->ArgumentAt(1);
449
450 intptr_t cid = kIllegalCid;
451 if (binary_feedback.OperandsAre(kSmiCid)) {
453 new (Z) CheckSmiInstr(new (Z) Value(left), call->deopt_id(),
454 call->source()),
455 call->env(), FlowGraph::kEffect);
457 new (Z) CheckSmiInstr(new (Z) Value(right), call->deopt_id(),
458 call->source()),
459 call->env(), FlowGraph::kEffect);
460 cid = kSmiCid;
461 } else if (binary_feedback.OperandsAreSmiOrMint()) {
462 cid = kMintCid;
463 } else if (binary_feedback.OperandsAreSmiOrDouble()) {
464 // Use double comparison.
465 if (SmiFitsInDouble()) {
466 cid = kDoubleCid;
467 } else {
468 if (binary_feedback.IncludesOperands(kSmiCid)) {
469 // We cannot use double comparison on two smis. Need polymorphic
470 // call.
471 return false;
472 } else {
474 call,
476 new (Z) Value(left), new (Z) Value(right), call->deopt_id()),
477 call->env(), FlowGraph::kEffect);
478 cid = kDoubleCid;
479 }
480 }
481 } else {
482 return false;
483 }
485 RelationalOpInstr* comp =
486 new (Z) RelationalOpInstr(call->source(), op_kind, new (Z) Value(left),
487 new (Z) Value(right), cid, call->deopt_id());
488 ReplaceCall(call, comp);
489 return true;
490}
491
493 Token::Kind op_kind) {
494 intptr_t operands_type = kIllegalCid;
495 ASSERT(call->HasICData());
496 const BinaryFeedback& binary_feedback = call->BinaryFeedback();
497 switch (op_kind) {
498 case Token::kADD:
499 case Token::kSUB:
500 case Token::kMUL:
501 if (binary_feedback.OperandsAre(kSmiCid)) {
502 // Don't generate smi code if the IC data is marked because
503 // of an overflow.
504 operands_type =
505 call->ic_data()->HasDeoptReason(ICData::kDeoptBinarySmiOp)
506 ? kMintCid
507 : kSmiCid;
508 } else if (binary_feedback.OperandsAreSmiOrMint()) {
509 // Don't generate mint code if the IC data is marked because of an
510 // overflow.
511 if (call->ic_data()->HasDeoptReason(ICData::kDeoptBinaryInt64Op))
512 return false;
513 operands_type = kMintCid;
514 } else if (ShouldSpecializeForDouble(binary_feedback)) {
515 operands_type = kDoubleCid;
516 } else if (binary_feedback.OperandsAre(kFloat32x4Cid)) {
517 operands_type = kFloat32x4Cid;
518 } else if (binary_feedback.OperandsAre(kInt32x4Cid)) {
519 ASSERT(op_kind != Token::kMUL); // Int32x4 doesn't have a multiply op.
520 operands_type = kInt32x4Cid;
521 } else if (binary_feedback.OperandsAre(kFloat64x2Cid)) {
522 operands_type = kFloat64x2Cid;
523 } else {
524 return false;
525 }
526 break;
527 case Token::kDIV:
528 if (ShouldSpecializeForDouble(binary_feedback) ||
529 binary_feedback.OperandsAre(kSmiCid)) {
530 operands_type = kDoubleCid;
531 } else if (binary_feedback.OperandsAre(kFloat32x4Cid)) {
532 operands_type = kFloat32x4Cid;
533 } else if (binary_feedback.OperandsAre(kFloat64x2Cid)) {
534 operands_type = kFloat64x2Cid;
535 } else {
536 return false;
537 }
538 break;
539 case Token::kBIT_AND:
540 case Token::kBIT_OR:
541 case Token::kBIT_XOR:
542 if (binary_feedback.OperandsAre(kSmiCid)) {
543 operands_type = kSmiCid;
544 } else if (binary_feedback.OperandsAreSmiOrMint()) {
545 operands_type = kMintCid;
546 } else if (binary_feedback.OperandsAre(kInt32x4Cid)) {
547 operands_type = kInt32x4Cid;
548 } else {
549 return false;
550 }
551 break;
552 case Token::kSHL:
553 case Token::kSHR:
554 case Token::kUSHR:
555 if (binary_feedback.OperandsAre(kSmiCid)) {
556 // Left shift may overflow from smi into mint.
557 // Don't generate smi code if the IC data is marked because
558 // of an overflow.
559 if (call->ic_data()->HasDeoptReason(ICData::kDeoptBinaryInt64Op)) {
560 return false;
561 }
562 operands_type =
563 call->ic_data()->HasDeoptReason(ICData::kDeoptBinarySmiOp)
564 ? kMintCid
565 : kSmiCid;
566 } else if (binary_feedback.OperandsAreSmiOrMint() &&
567 binary_feedback.ArgumentIs(kSmiCid)) {
568 // Don't generate mint code if the IC data is marked because of an
569 // overflow.
570 if (call->ic_data()->HasDeoptReason(ICData::kDeoptBinaryInt64Op)) {
571 return false;
572 }
573 // Check for smi/mint << smi or smi/mint >> smi.
574 operands_type = kMintCid;
575 } else {
576 return false;
577 }
578 break;
579 case Token::kMOD:
580 case Token::kTRUNCDIV:
581 if (binary_feedback.OperandsAre(kSmiCid)) {
582 if (call->ic_data()->HasDeoptReason(ICData::kDeoptBinarySmiOp)) {
583 return false;
584 }
585 operands_type = kSmiCid;
586 } else {
587 return false;
588 }
589 break;
590 default:
591 UNREACHABLE();
592 }
593
594 ASSERT(call->type_args_len() == 0);
595 ASSERT(call->ArgumentCount() == 2);
596 Definition* left = call->ArgumentAt(0);
597 Definition* right = call->ArgumentAt(1);
598 if (operands_type == kDoubleCid) {
599 // Check that either left or right are not a smi. Result of a
600 // binary operation with two smis is a smi not a double, except '/' which
601 // returns a double for two smis.
602 if (op_kind != Token::kDIV) {
604 call,
606 new (Z) Value(left), new (Z) Value(right), call->deopt_id()),
607 call->env(), FlowGraph::kEffect);
608 }
609
610 BinaryDoubleOpInstr* double_bin_op = new (Z)
611 BinaryDoubleOpInstr(op_kind, new (Z) Value(left), new (Z) Value(right),
612 call->deopt_id(), call->source());
613 ReplaceCall(call, double_bin_op);
614 } else if (operands_type == kMintCid) {
615 if ((op_kind == Token::kSHL) || (op_kind == Token::kSHR) ||
616 (op_kind == Token::kUSHR)) {
617 SpeculativeShiftInt64OpInstr* shift_op = new (Z)
618 SpeculativeShiftInt64OpInstr(op_kind, new (Z) Value(left),
619 new (Z) Value(right), call->deopt_id());
620 ReplaceCall(call, shift_op);
621 } else {
623 op_kind, new (Z) Value(left), new (Z) Value(right), call->deopt_id());
624 ReplaceCall(call, bin_op);
625 }
626 } else if ((operands_type == kFloat32x4Cid) ||
627 (operands_type == kInt32x4Cid) ||
628 (operands_type == kFloat64x2Cid)) {
629 return InlineSimdBinaryOp(call, operands_type, op_kind);
630 } else if (op_kind == Token::kMOD) {
631 ASSERT(operands_type == kSmiCid);
632 if (right->IsConstant()) {
633 const Object& obj = right->AsConstant()->value();
634 if (obj.IsSmi() && Utils::IsPowerOfTwo(Smi::Cast(obj).Value())) {
635 // Insert smi check and attach a copy of the original environment
636 // because the smi operation can still deoptimize.
638 new (Z) CheckSmiInstr(new (Z) Value(left),
639 call->deopt_id(), call->source()),
640 call->env(), FlowGraph::kEffect);
641 ConstantInstr* constant = flow_graph()->GetConstant(
642 Smi::Handle(Z, Smi::New(Smi::Cast(obj).Value() - 1)));
643 BinarySmiOpInstr* bin_op =
644 new (Z) BinarySmiOpInstr(Token::kBIT_AND, new (Z) Value(left),
645 new (Z) Value(constant), call->deopt_id());
646 ReplaceCall(call, bin_op);
647 return true;
648 }
649 }
650 // Insert two smi checks and attach a copy of the original
651 // environment because the smi operation can still deoptimize.
652 AddCheckSmi(left, call->deopt_id(), call->env(), call);
653 AddCheckSmi(right, call->deopt_id(), call->env(), call);
654 BinarySmiOpInstr* bin_op = new (Z) BinarySmiOpInstr(
655 op_kind, new (Z) Value(left), new (Z) Value(right), call->deopt_id());
656 ReplaceCall(call, bin_op);
657 } else {
658 ASSERT(operands_type == kSmiCid);
659 // Insert two smi checks and attach a copy of the original
660 // environment because the smi operation can still deoptimize.
661 AddCheckSmi(left, call->deopt_id(), call->env(), call);
662 AddCheckSmi(right, call->deopt_id(), call->env(), call);
663 if (left->IsConstant() &&
664 ((op_kind == Token::kADD) || (op_kind == Token::kMUL))) {
665 // Constant should be on the right side.
666 Definition* temp = left;
667 left = right;
668 right = temp;
669 }
670 BinarySmiOpInstr* bin_op = new (Z) BinarySmiOpInstr(
671 op_kind, new (Z) Value(left), new (Z) Value(right), call->deopt_id());
672 ReplaceCall(call, bin_op);
673 }
674 return true;
675}
676
678 Token::Kind op_kind) {
679 ASSERT(call->type_args_len() == 0);
680 ASSERT(call->ArgumentCount() == 1);
681 Definition* input = call->ArgumentAt(0);
682 Definition* unary_op = nullptr;
683 if (call->Targets().ReceiverIs(kSmiCid)) {
685 new (Z) CheckSmiInstr(new (Z) Value(input), call->deopt_id(),
686 call->source()),
687 call->env(), FlowGraph::kEffect);
688 unary_op = new (Z)
689 UnarySmiOpInstr(op_kind, new (Z) Value(input), call->deopt_id());
690 } else if ((op_kind == Token::kBIT_NOT) &&
691 call->Targets().ReceiverIsSmiOrMint()) {
692 unary_op = new (Z)
693 UnaryInt64OpInstr(op_kind, new (Z) Value(input), call->deopt_id());
694 } else if (call->Targets().ReceiverIs(kDoubleCid) &&
695 (op_kind == Token::kNEGATE)) {
697 unary_op = new (Z) UnaryDoubleOpInstr(Token::kNEGATE, new (Z) Value(input),
698 call->deopt_id());
699 } else {
700 return false;
701 }
702 ASSERT(unary_op != nullptr);
703 ReplaceCall(call, unary_op);
704 return true;
705}
706
707bool CallSpecializer::TryInlineImplicitInstanceGetter(InstanceCallInstr* call) {
708 const CallTargets& targets = call->Targets();
709 ASSERT(targets.HasSingleTarget());
710
711 // Inline implicit instance getter.
712 Field& field = Field::ZoneHandle(Z, targets.FirstTarget().accessor_field());
713 ASSERT(!field.IsNull());
714 if (field.needs_load_guard()) {
715 return false;
716 }
718 field = field.CloneFromOriginal();
719 }
720
721 switch (flow_graph()->CheckForInstanceCall(
722 call, UntaggedFunction::kImplicitGetter)) {
724 AddCheckNull(call->Receiver(), call->function_name(), call->deopt_id(),
725 call->env(), call);
726 break;
728 if (CompilerState::Current().is_aot()) {
729 return false; // AOT cannot class check
730 }
732 break;
734 break;
735 }
737 return true;
738}
739
741 const Field& field) {
742 ASSERT(field.is_instance());
743 Definition* receiver = call->ArgumentAt(0);
744
745 const bool calls_initializer = field.NeedsInitializationCheckOnLoad();
746 const Slot& slot = Slot::Get(field, &flow_graph()->parsed_function());
748 new (Z) Value(receiver), slot, call->source(), calls_initializer,
749 calls_initializer ? call->deopt_id() : DeoptId::kNone);
750
751 // Note that this is a case of LoadField -> InstanceCall lazy deopt.
752 // Which means that we don't need to remove arguments from the environment
753 // because normal getter call expects receiver pushed (unlike the case
754 // of LoadField -> LoadField deoptimization handled by
755 // FlowGraph::AttachEnvironment).
756 if (!calls_initializer) {
757 // If we don't call initializer then we don't need an environment.
758 call->RemoveEnvironment();
759 }
761
762 if (load->slot().type().ToNullableCid() != kDynamicCid) {
763 // Reset value types if we know concrete cid.
764 for (Value::Iterator it(load->input_use_list()); !it.Done(); it.Advance()) {
765 it.Current()->SetReachingType(nullptr);
766 }
767 }
768}
769
771 const CallTargets& targets = instr->Targets();
772 if (!targets.HasSingleTarget()) {
773 // Polymorphic sites are inlined like normal method calls by conventional
774 // inlining.
775 return false;
776 }
777 const Function& target = targets.FirstTarget();
778 if (target.kind() != UntaggedFunction::kImplicitSetter) {
779 // Non-implicit setter are inlined like normal method calls.
780 return false;
781 }
782 if (!CompilerState::Current().is_aot() && !target.WasCompiled()) {
783 return false;
784 }
785 Field& field = Field::ZoneHandle(Z, target.accessor_field());
786 ASSERT(!field.IsNull());
788 field = field.CloneFromOriginal();
789 }
790 if (field.is_late() && field.is_final()) {
791 return false;
792 }
793
794 switch (flow_graph()->CheckForInstanceCall(
795 instr, UntaggedFunction::kImplicitSetter)) {
797 AddCheckNull(instr->Receiver(), instr->function_name(), instr->deopt_id(),
798 instr->env(), instr);
799 break;
801 if (CompilerState::Current().is_aot()) {
802 return false; // AOT cannot class check
803 }
804 AddReceiverCheck(instr);
805 break;
807 break;
808 }
809
810 // True if we can use unchecked entry into the setter.
811 bool is_unchecked_call = false;
812 if (!CompilerState::Current().is_aot()) {
813 if (targets.IsMonomorphic() && targets.MonomorphicExactness().IsExact()) {
814 if (targets.MonomorphicExactness().IsTriviallyExact()) {
816 targets.MonomorphicReceiverCid());
817 }
818 is_unchecked_call = true;
819 }
820 }
821
822 if (IG->use_field_guards()) {
823 if (field.guarded_cid() != kDynamicCid) {
825 instr,
826 new (Z) GuardFieldClassInstr(new (Z) Value(instr->ArgumentAt(1)),
827 field, instr->deopt_id()),
828 instr->env(), FlowGraph::kEffect);
829 }
830
831 if (field.needs_length_check()) {
833 instr,
834 new (Z) GuardFieldLengthInstr(new (Z) Value(instr->ArgumentAt(1)),
835 field, instr->deopt_id()),
836 instr->env(), FlowGraph::kEffect);
837 }
838
841 instr,
842 new (Z) GuardFieldTypeInstr(new (Z) Value(instr->ArgumentAt(1)),
843 field, instr->deopt_id()),
844 instr->env(), FlowGraph::kEffect);
845 }
846 }
847
848 // Build an AssertAssignable if necessary.
849 const AbstractType& dst_type = AbstractType::ZoneHandle(zone(), field.type());
850 if (!dst_type.IsTopTypeForSubtyping()) {
851 // Compute if we need to type check the value. Always type check if
852 // at a dynamic invocation.
853 bool needs_check = true;
854 if (!instr->interface_target().IsNull()) {
855 if (field.is_covariant()) {
856 // Always type check covariant fields.
857 needs_check = true;
858 } else if (field.is_generic_covariant_impl()) {
859 // If field is generic covariant then we don't need to check it
860 // if the invocation was marked as unchecked (e.g. receiver of
861 // the invocation is also the receiver of the surrounding method).
862 // Note: we can't use flow_graph()->IsReceiver() for this optimization
863 // because strong mode only gives static guarantees at the AST level
864 // not at the SSA level.
865 needs_check = !(is_unchecked_call ||
866 (instr->entry_kind() == Code::EntryKind::kUnchecked));
867 } else {
868 // The rest of the stores are checked statically (we are not at
869 // a dynamic invocation).
870 needs_check = false;
871 }
872 }
873
874 if (needs_check) {
875 Definition* instantiator_type_args = flow_graph_->constant_null();
876 Definition* function_type_args = flow_graph_->constant_null();
877 if (!dst_type.IsInstantiated()) {
878 const Class& owner = Class::Handle(Z, field.Owner());
879 if (owner.NumTypeArguments() > 0) {
880 instantiator_type_args = new (Z) LoadFieldInstr(
881 new (Z) Value(instr->ArgumentAt(0)),
882 Slot::GetTypeArgumentsSlotFor(thread(), owner), instr->source());
883 InsertSpeculativeBefore(instr, instantiator_type_args, instr->env(),
885 }
886 }
887
888 auto assert_assignable = new (Z) AssertAssignableInstr(
889 instr->source(), new (Z) Value(instr->ArgumentAt(1)),
890 new (Z) Value(flow_graph_->GetConstant(dst_type)),
891 new (Z) Value(instantiator_type_args),
892 new (Z) Value(function_type_args),
893 String::ZoneHandle(zone(), field.name()), instr->deopt_id());
894 InsertSpeculativeBefore(instr, assert_assignable, instr->env(),
896 }
897 }
898
899 // Field guard was detached.
900 ASSERT(instr->FirstArgIndex() == 0);
901 StoreFieldInstr* store = new (Z)
902 StoreFieldInstr(field, new (Z) Value(instr->ArgumentAt(0)),
903 new (Z) Value(instr->ArgumentAt(1)), kEmitStoreBarrier,
904 instr->source(), &flow_graph()->parsed_function());
905
906 // Discard the environment from the original instruction because the store
907 // can't deoptimize.
908 instr->RemoveEnvironment();
909 ReplaceCallWithResult(instr, store, flow_graph()->constant_null());
910 return true;
911}
912
913bool CallSpecializer::InlineSimdBinaryOp(InstanceCallInstr* call,
914 intptr_t cid,
915 Token::Kind op_kind) {
916 if (!ShouldInlineSimd()) {
917 return false;
918 }
919 ASSERT(call->type_args_len() == 0);
920 ASSERT(call->ArgumentCount() == 2);
921 Definition* const left = call->ArgumentAt(0);
922 Definition* const right = call->ArgumentAt(1);
923 // Type check left and right.
924 AddChecksForArgNr(call, left, /* arg_number = */ 0);
925 AddChecksForArgNr(call, right, /* arg_number = */ 1);
926 // Replace call.
927 SimdOpInstr* op = SimdOpInstr::Create(
928 SimdOpInstr::KindForOperator(cid, op_kind), new (Z) Value(left),
929 new (Z) Value(right), call->deopt_id());
930 ReplaceCall(call, op);
931
932 return true;
933}
934
935// Only unique implicit instance getters can be currently handled.
937 const CallTargets& targets = call->Targets();
938 if (!targets.HasSingleTarget()) {
939 // Polymorphic sites are inlined like normal methods by conventional
940 // inlining in FlowGraphInliner.
941 return false;
942 }
943 const Function& target = targets.FirstTarget();
944 if (target.kind() != UntaggedFunction::kImplicitGetter) {
945 // Non-implicit getters are inlined like normal methods by conventional
946 // inlining in FlowGraphInliner.
947 return false;
948 }
949 if (!CompilerState::Current().is_aot() && !target.WasCompiled()) {
950 return false;
951 }
952 return TryInlineImplicitInstanceGetter(call);
953}
954
955// Inline only simple, frequently called core library methods.
957 const CallTargets& targets = call->Targets();
958 if (!targets.IsMonomorphic()) {
959 // No type feedback collected or multiple receivers/targets found.
960 return false;
961 }
962
963 const Function& target = targets.FirstTarget();
964 intptr_t receiver_cid = targets.MonomorphicReceiverCid();
965 MethodRecognizer::Kind recognized_kind = target.recognized_kind();
966
967 if (recognized_kind == MethodRecognizer::kIntegerToDouble) {
968 if (receiver_cid == kSmiCid) {
971 new (Z) SmiToDoubleInstr(new (Z) Value(call->ArgumentAt(0)),
972 call->source()));
973 return true;
974 } else if ((receiver_cid == kMintCid) && CanConvertInt64ToDouble()) {
977 new (Z) Int64ToDoubleInstr(new (Z) Value(call->ArgumentAt(0)),
978 call->deopt_id()));
979 return true;
980 }
981 }
982
983 if (receiver_cid == kDoubleCid) {
984 switch (recognized_kind) {
985 case MethodRecognizer::kDoubleToInteger: {
987 ASSERT(call->HasICData());
988 const ICData& ic_data = *call->ic_data();
989 Definition* input = call->ArgumentAt(0);
990 Definition* d2i_instr = nullptr;
991 if (ic_data.HasDeoptReason(ICData::kDeoptDoubleToSmi)) {
992 // Do not repeatedly deoptimize because result didn't fit into Smi.
993 d2i_instr = new (Z) DoubleToIntegerInstr(
994 new (Z) Value(input), recognized_kind, call->deopt_id());
995 } else {
996 // Optimistically assume result fits into Smi.
997 d2i_instr =
998 new (Z) DoubleToSmiInstr(new (Z) Value(input), call->deopt_id());
999 }
1000 ReplaceCall(call, d2i_instr);
1001 return true;
1002 }
1003 default:
1004 break;
1005 }
1006 }
1007
1008 return TryReplaceInstanceCallWithInline(flow_graph_, current_iterator(), call,
1010}
1011
1012// If type tests specified by 'ic_data' do not depend on type arguments,
1013// return mapping cid->result in 'results' (i : cid; i + 1: result).
1014// If all tests yield the same result, return it otherwise return Bool::null.
1015// If no mapping is possible, 'results' has less than
1016// (ic_data.NumberOfChecks() * 2) entries
1017// An instance-of test returning all same results can be converted to a class
1018// check.
1019BoolPtr CallSpecializer::InstanceOfAsBool(
1020 const ICData& ic_data,
1021 const AbstractType& type,
1022 ZoneGrowableArray<intptr_t>* results) const {
1023 ASSERT(results->is_empty());
1024 ASSERT(ic_data.NumArgsTested() == 1); // Unary checks only.
1025 if (type.IsFunctionType() || type.IsDartFunctionType() ||
1026 type.IsRecordType() || !type.IsInstantiated()) {
1027 return Bool::null();
1028 }
1029 const Class& type_class = Class::Handle(Z, type.type_class());
1030 const intptr_t num_type_args = type_class.NumTypeArguments();
1031 if (num_type_args > 0) {
1032 // Only raw types can be directly compared, thus disregarding type
1033 // arguments.
1034 const TypeArguments& type_arguments =
1035 TypeArguments::Handle(Z, Type::Cast(type).arguments());
1036 const bool is_raw_type = type_arguments.IsNull() ||
1037 type_arguments.IsRaw(0, type_arguments.Length());
1038 if (!is_raw_type) {
1039 // Unknown result.
1040 return Bool::null();
1041 }
1042 }
1043
1044 const ClassTable& class_table = *IG->class_table();
1045 Bool& prev = Bool::Handle(Z);
1046 Class& cls = Class::Handle(Z);
1047
1048 bool results_differ = false;
1049 const intptr_t number_of_checks = ic_data.NumberOfChecks();
1050 for (int i = 0; i < number_of_checks; i++) {
1051 cls = class_table.At(ic_data.GetReceiverClassIdAt(i));
1052 if (cls.NumTypeArguments() > 0) {
1053 return Bool::null();
1054 }
1055 bool is_subtype = false;
1056 if (cls.IsNullClass()) {
1057 // 'null' is an instance of Null, Object*, Never*, void, and dynamic.
1058 // In addition, 'null' is an instance of any nullable type.
1059 // It is also an instance of FutureOr<T> if it is an instance of T.
1060 const AbstractType& unwrapped_type =
1061 AbstractType::Handle(type.UnwrapFutureOr());
1062 ASSERT(unwrapped_type.IsInstantiated());
1063 is_subtype = unwrapped_type.IsTopTypeForInstanceOf() ||
1064 unwrapped_type.IsNullable();
1065 } else {
1066 is_subtype =
1067 Class::IsSubtypeOf(cls, Object::null_type_arguments(),
1069 }
1070 results->Add(cls.id());
1071 results->Add(static_cast<intptr_t>(is_subtype));
1072 if (prev.IsNull()) {
1073 prev = Bool::Get(is_subtype).ptr();
1074 } else {
1075 if (is_subtype != prev.value()) {
1076 results_differ = true;
1077 }
1078 }
1079 }
1080 return results_differ ? Bool::null() : prev.ptr();
1081}
1082
1083// Returns true if checking against this type is a direct class id comparison.
1084bool CallSpecializer::TypeCheckAsClassEquality(const AbstractType& type,
1085 intptr_t* type_cid) {
1086 *type_cid = kIllegalCid;
1087 ASSERT(type.IsFinalized());
1088 // Requires CHA.
1089 if (!type.IsInstantiated()) return false;
1090 // Function and record types have different type checking rules.
1091 if (type.IsFunctionType() || type.IsRecordType()) return false;
1092
1093 const Class& type_class = Class::Handle(type.type_class());
1094 if (!CHA::HasSingleConcreteImplementation(type_class, type_cid)) {
1095 return false;
1096 }
1097
1098 const intptr_t num_type_args = type_class.NumTypeArguments();
1099 if (num_type_args > 0) {
1100 // Only raw types can be directly compared, thus disregarding type
1101 // arguments.
1102 const TypeArguments& type_arguments =
1103 TypeArguments::Handle(Type::Cast(type).arguments());
1104 const bool is_raw_type = type_arguments.IsNull() ||
1105 type_arguments.IsRaw(0, type_arguments.Length());
1106 if (!is_raw_type) {
1107 return false;
1108 }
1109 }
1110 if (type.IsNullable() || type.IsTopTypeForInstanceOf() ||
1111 type.IsNeverType()) {
1112 // A class id check is not sufficient, since a null instance also satisfies
1113 // the test against a nullable type.
1114 // TODO(regis): Add a null check in addition to the class id check?
1115 return false;
1116 }
1117 return true;
1118}
1119
1122 const AbstractType& type) {
1123 // TODO(dartbug.com/30632) does this optimization make sense in JIT?
1124 return false;
1125}
1126
1127bool CallSpecializer::TryOptimizeInstanceOfUsingStaticTypes(
1129 const AbstractType& type) {
1130 ASSERT(Token::IsTypeTestOperator(call->token_kind()));
1131 if (!type.IsInstantiated()) {
1132 return false;
1133 }
1134
1135 Value* left_value = call->Receiver();
1136 if (left_value->Type()->IsInstanceOf(type)) {
1137 ConstantInstr* replacement = flow_graph()->GetConstant(Bool::True());
1138 call->ReplaceUsesWith(replacement);
1139 ASSERT(current_iterator()->Current() == call);
1141 return true;
1142 }
1143
1144 // The goal is to emit code that will determine the result of 'x is type'
1145 // depending solely on the fact that x == null or not.
1146 // Checking whether the receiver is null can only help if the tested type is
1147 // non-nullable or legacy (including Never*) or the Null type.
1148 // Also, testing receiver for null cannot help with FutureOr.
1149 if ((type.IsNullable() && !type.IsNullType()) || type.IsFutureOrType()) {
1150 return false;
1151 }
1152
1153 // If type is Null or the static type of the receiver is a
1154 // subtype of the tested type, replace 'receiver is type' with
1155 // - 'receiver == null' if type is Null,
1156 // - 'receiver != null' otherwise.
1157 if (type.IsNullType() || left_value->Type()->IsSubtypeOf(type)) {
1158 Definition* replacement = new (Z) StrictCompareInstr(
1159 call->source(),
1160 type.IsNullType() ? Token::kEQ_STRICT : Token::kNE_STRICT,
1161 left_value->CopyWithType(Z),
1162 new (Z) Value(flow_graph()->constant_null()),
1163 /* number_check = */ false, DeoptId::kNone);
1164 if (FLAG_trace_strong_mode_types) {
1165 THR_Print("[Strong mode] replacing %s with %s (%s < %s)\n",
1166 call->ToCString(), replacement->ToCString(),
1167 left_value->Type()->ToAbstractType()->ToCString(),
1168 type.ToCString());
1169 }
1170 ReplaceCall(call, replacement);
1171 return true;
1172 }
1173
1174 return false;
1175}
1176
1178 ASSERT(Token::IsTypeTestOperator(call->token_kind()));
1179 Definition* left = call->ArgumentAt(0);
1180 Definition* instantiator_type_args = nullptr;
1181 Definition* function_type_args = nullptr;
1183 ASSERT(call->type_args_len() == 0);
1184 if (call->ArgumentCount() == 2) {
1185 instantiator_type_args = flow_graph()->constant_null();
1186 function_type_args = flow_graph()->constant_null();
1187 ASSERT(call->MatchesCoreName(Symbols::_simpleInstanceOf()));
1188 type = AbstractType::Cast(call->ArgumentAt(1)->AsConstant()->value()).ptr();
1189 } else {
1190 ASSERT(call->ArgumentCount() == 4);
1191 instantiator_type_args = call->ArgumentAt(1);
1192 function_type_args = call->ArgumentAt(2);
1193 type = AbstractType::Cast(call->ArgumentAt(3)->AsConstant()->value()).ptr();
1194 }
1195
1196 if (TryOptimizeInstanceOfUsingStaticTypes(call, type)) {
1197 return;
1198 }
1199
1200 intptr_t type_cid;
1201 if (TypeCheckAsClassEquality(type, &type_cid)) {
1202 LoadClassIdInstr* load_cid =
1203 new (Z) LoadClassIdInstr(new (Z) Value(left), kUnboxedUword);
1204 InsertBefore(call, load_cid, nullptr, FlowGraph::kValue);
1205 ConstantInstr* constant_cid = flow_graph()->GetConstant(
1206 Smi::Handle(Z, Smi::New(type_cid)), kUnboxedUword);
1207 EqualityCompareInstr* check_cid = new (Z) EqualityCompareInstr(
1208 call->source(), Token::kEQ, new Value(load_cid),
1209 new Value(constant_cid), kIntegerCid, DeoptId::kNone, false,
1211 ReplaceCall(call, check_cid);
1212 return;
1213 }
1214
1216 return;
1217 }
1218
1219 const ICData& unary_checks =
1220 ICData::ZoneHandle(Z, call->ic_data()->AsUnaryClassChecks());
1221 const intptr_t number_of_checks = unary_checks.NumberOfChecks();
1222 if (number_of_checks > 0 && number_of_checks <= FLAG_max_polymorphic_checks) {
1224 new (Z) ZoneGrowableArray<intptr_t>(number_of_checks * 2);
1225 const Bool& as_bool =
1226 Bool::ZoneHandle(Z, InstanceOfAsBool(unary_checks, type, results));
1227 if (as_bool.IsNull() || CompilerState::Current().is_aot()) {
1228 if (results->length() == number_of_checks * 2) {
1229 const bool can_deopt = SpecializeTestCidsForNumericTypes(results, type);
1230 if (can_deopt &&
1232 // Guard against repeated speculative inlining.
1233 return;
1234 }
1235 TestCidsInstr* test_cids = new (Z) TestCidsInstr(
1236 call->source(), Token::kIS, new (Z) Value(left), *results,
1237 can_deopt ? call->deopt_id() : DeoptId::kNone);
1238 // Remove type.
1239 ReplaceCall(call, test_cids);
1240 return;
1241 }
1242 } else {
1243 // One result only.
1245 ConstantInstr* bool_const = flow_graph()->GetConstant(as_bool);
1246 ASSERT(!call->HasMoveArguments());
1247 call->ReplaceUsesWith(bool_const);
1248 ASSERT(current_iterator()->Current() == call);
1250 return;
1251 }
1252 }
1253
1254 InstanceOfInstr* instance_of = new (Z) InstanceOfInstr(
1255 call->source(), new (Z) Value(left),
1256 new (Z) Value(instantiator_type_args), new (Z) Value(function_type_args),
1257 type, call->deopt_id());
1258 ReplaceCall(call, instance_of);
1259}
1260
1262 if (TryReplaceStaticCallWithInline(flow_graph_, current_iterator(), call,
1264 return;
1265 }
1266
1267 if (speculative_policy_->IsAllowedForInlining(call->deopt_id())) {
1268 // Only if speculative inlining is enabled.
1269
1270 MethodRecognizer::Kind recognized_kind = call->function().recognized_kind();
1271 const CallTargets& targets = call->Targets();
1272 const BinaryFeedback& binary_feedback = call->BinaryFeedback();
1273
1274 switch (recognized_kind) {
1275 case MethodRecognizer::kMathMin:
1276 case MethodRecognizer::kMathMax: {
1277 // We can handle only monomorphic min/max call sites with both arguments
1278 // being either doubles or smis.
1279 if (targets.IsMonomorphic() && (call->FirstArgIndex() == 0)) {
1280 intptr_t result_cid = kIllegalCid;
1281 if (binary_feedback.IncludesOperands(kDoubleCid)) {
1282 result_cid = kDoubleCid;
1283 } else if (binary_feedback.IncludesOperands(kSmiCid)) {
1284 result_cid = kSmiCid;
1285 }
1286 if (result_cid != kIllegalCid) {
1287 MathMinMaxInstr* min_max = new (Z) MathMinMaxInstr(
1288 recognized_kind, new (Z) Value(call->ArgumentAt(0)),
1289 new (Z) Value(call->ArgumentAt(1)), call->deopt_id(),
1290 result_cid);
1291 const Cids* cids = Cids::CreateMonomorphic(Z, result_cid);
1292 AddCheckClass(min_max->left()->definition(), *cids,
1293 call->deopt_id(), call->env(), call);
1294 AddCheckClass(min_max->right()->definition(), *cids,
1295 call->deopt_id(), call->env(), call);
1296 ReplaceCall(call, min_max);
1297 return;
1298 }
1299 }
1300 break;
1301 }
1302 case MethodRecognizer::kDoubleFromInteger: {
1303 if (call->HasICData() && targets.IsMonomorphic() &&
1304 (call->FirstArgIndex() == 0)) {
1305 if (binary_feedback.ArgumentIs(kSmiCid)) {
1306 Definition* arg = call->ArgumentAt(1);
1307 AddCheckSmi(arg, call->deopt_id(), call->env(), call);
1308 ReplaceCall(call, new (Z) SmiToDoubleInstr(new (Z) Value(arg),
1309 call->source()));
1310 return;
1311 } else if (binary_feedback.ArgumentIs(kMintCid) &&
1313 Definition* arg = call->ArgumentAt(1);
1314 ReplaceCall(call, new (Z) Int64ToDoubleInstr(new (Z) Value(arg),
1315 call->deopt_id()));
1316 return;
1317 }
1318 }
1319 break;
1320 }
1321
1322 default:
1323 break;
1324 }
1325 }
1326
1328 return;
1329 }
1330}
1331
1333// TODO(zerny): Use kUnboxedUint32 once it is fully supported/optimized.
1334#if defined(TARGET_ARCH_IA32) || defined(TARGET_ARCH_ARM)
1335 if (!instr->can_pack_into_smi()) instr->set_representation(kUnboxedInt64);
1336#endif
1337}
1338
1340 intptr_t test_cid) {
1341 for (intptr_t i = 0; i < results.length(); i += 2) {
1342 if (results[i] == test_cid) return true;
1343 }
1344 return false;
1345}
1346
1348 intptr_t test_cid,
1349 bool result) {
1350 if (!CidTestResultsContains(*results, test_cid)) {
1351 results->Add(test_cid);
1352 results->Add(static_cast<intptr_t>(result));
1353 }
1354}
1355
1356// Used when we only need the positive result because we return false by
1357// default.
1359 // We can't purge the Smi entry at the beginning since it is used in the
1360 // Smi check before the Cid is loaded.
1361 int dest = 2;
1362 for (intptr_t i = 2; i < results->length(); i += 2) {
1363 if (results->At(i + 1) != 0) {
1364 (*results)[dest++] = results->At(i);
1365 (*results)[dest++] = results->At(i + 1);
1366 }
1367 }
1368 results->SetLength(dest);
1369}
1370
1371bool CallSpecializer::SpecializeTestCidsForNumericTypes(
1372 ZoneGrowableArray<intptr_t>* results,
1373 const AbstractType& type) {
1374 ASSERT(results->length() >= 2); // At least on entry.
1375 const ClassTable& class_table = *IsolateGroup::Current()->class_table();
1376 if ((*results)[0] != kSmiCid) {
1377 const Class& smi_class = Class::Handle(class_table.At(kSmiCid));
1378 const bool smi_is_subtype =
1379 Class::IsSubtypeOf(smi_class, Object::null_type_arguments(),
1381 results->Add((*results)[results->length() - 2]);
1382 results->Add((*results)[results->length() - 2]);
1383 for (intptr_t i = results->length() - 3; i > 1; --i) {
1384 (*results)[i] = (*results)[i - 2];
1385 }
1386 (*results)[0] = kSmiCid;
1387 (*results)[1] = static_cast<intptr_t>(smi_is_subtype);
1388 }
1389
1390 ASSERT(type.IsInstantiated());
1391 ASSERT(results->length() >= 2);
1392 if (type.IsSmiType()) {
1393 ASSERT((*results)[0] == kSmiCid);
1395 return false;
1396 } else if (type.IsIntType()) {
1397 ASSERT((*results)[0] == kSmiCid);
1398 TryAddTest(results, kMintCid, true);
1399 // Cannot deoptimize since all tests returning true have been added.
1401 return false;
1402 } else if (type.IsNumberType()) {
1403 ASSERT((*results)[0] == kSmiCid);
1404 TryAddTest(results, kMintCid, true);
1405 TryAddTest(results, kDoubleCid, true);
1407 return false;
1408 } else if (type.IsDoubleType()) {
1409 ASSERT((*results)[0] == kSmiCid);
1410 TryAddTest(results, kDoubleCid, true);
1412 return false;
1413 }
1414 return true; // May deoptimize since we have not identified all 'true' tests.
1415}
1416
1418 TypedDataSpecializer optimizer(flow_graph);
1419 optimizer.VisitBlocks();
1420}
1421
1422void TypedDataSpecializer::EnsureIsInitialized() {
1423 if (initialized_) return;
1424
1425 initialized_ = true;
1426
1427 int_type_ = Type::IntType();
1428 double_type_ = Type::Double();
1429 float32x4_type_ = Type::Float32x4();
1430 int32x4_type_ = Type::Int32x4();
1431 float64x2_type_ = Type::Float64x2();
1432
1433 const auto& typed_data = Library::Handle(
1434 Z, Library::LookupLibrary(thread_, Symbols::DartTypedData()));
1435
1436 auto& td_class = Class::Handle(Z);
1437 auto& direct_implementors = GrowableObjectArray::Handle(Z);
1438 SafepointReadRwLocker ml(thread_, thread_->isolate_group()->program_lock());
1439
1440#define INIT_HANDLE(iface, type, cid) \
1441 td_class = typed_data.LookupClass(Symbols::iface()); \
1442 ASSERT(!td_class.IsNull()); \
1443 direct_implementors = td_class.direct_implementors(); \
1444 typed_data_variants_[k##iface##Index].array_type = td_class.RareType(); \
1445 typed_data_variants_[k##iface##Index].array_cid = cid; \
1446 typed_data_variants_[k##iface##Index].element_type = type.ptr();
1447
1449#undef INIT_HANDLE
1450}
1451
1453 TryInlineCall(call);
1454}
1455
1457 const Function& function = call->function();
1458 if (!function.is_static()) {
1459 ASSERT(call->ArgumentCount() > 0);
1460 TryInlineCall(call);
1461 }
1462}
1463
1464void TypedDataSpecializer::TryInlineCall(TemplateDartCall<0>* call) {
1465 const bool is_length_getter = call->Selector() == Symbols::GetLength().ptr();
1466 const bool is_index_get = call->Selector() == Symbols::IndexToken().ptr();
1467 const bool is_index_set =
1468 call->Selector() == Symbols::AssignIndexToken().ptr();
1469
1470 if (!(is_length_getter || is_index_get || is_index_set)) {
1471 return;
1472 }
1473
1474 EnsureIsInitialized();
1475
1476 const intptr_t receiver_index = call->FirstArgIndex();
1477
1478 CompileType* receiver_type =
1479 call->ArgumentValueAt(receiver_index + 0)->Type();
1480
1481 CompileType* index_type = nullptr;
1482 if (is_index_get || is_index_set) {
1483 index_type = call->ArgumentValueAt(receiver_index + 1)->Type();
1484 }
1485
1486 CompileType* value_type = nullptr;
1487 if (is_index_set) {
1488 value_type = call->ArgumentValueAt(receiver_index + 2)->Type();
1489 }
1490
1491 auto& type_class = Class::Handle(zone_);
1492 for (auto& variant : typed_data_variants_) {
1493 if (!receiver_type->IsAssignableTo(variant.array_type)) {
1494 continue;
1495 }
1496
1497 if (is_length_getter) {
1498 type_class = variant.array_type.type_class();
1499 ReplaceWithLengthGetter(call);
1500 return;
1501 }
1502
1503 auto const rep =
1505 const bool is_simd_access = rep == kUnboxedInt32x4 ||
1506 rep == kUnboxedFloat32x4 ||
1507 rep == kUnboxedFloat64x2;
1508
1509 if (is_simd_access && !FlowGraphCompiler::SupportsUnboxedSimd128()) {
1510 return;
1511 }
1512
1513 if (!index_type->IsNullableInt()) {
1514 return;
1515 }
1516
1517 if (is_index_get) {
1518 type_class = variant.array_type.type_class();
1519 ReplaceWithIndexGet(call, variant.array_cid);
1520 } else {
1521 if (!value_type->IsAssignableTo(variant.element_type)) {
1522 return;
1523 }
1524 type_class = variant.array_type.type_class();
1525 ReplaceWithIndexSet(call, variant.array_cid);
1526 }
1527
1528 return;
1529 }
1530}
1531
1532void TypedDataSpecializer::ReplaceWithLengthGetter(TemplateDartCall<0>* call) {
1533 const intptr_t receiver_idx = call->FirstArgIndex();
1534 auto array = call->ArgumentAt(receiver_idx + 0);
1535
1536 if (array->Type()->is_nullable()) {
1537 AppendNullCheck(call, &array);
1538 }
1539 Definition* length = AppendLoadLength(call, array);
1542}
1543
1544void TypedDataSpecializer::ReplaceWithIndexGet(TemplateDartCall<0>* call,
1545 classid_t cid) {
1546 const intptr_t receiver_idx = call->FirstArgIndex();
1547 auto array = call->ArgumentAt(receiver_idx + 0);
1548 auto index = call->ArgumentAt(receiver_idx + 1);
1549
1550 if (array->Type()->is_nullable()) {
1551 AppendNullCheck(call, &array);
1552 }
1553 if (index->Type()->is_nullable()) {
1554 AppendNullCheck(call, &index);
1555 }
1556 AppendBoundsCheck(call, array, &index);
1557 Definition* value = AppendLoadIndexed(call, array, index, cid);
1560}
1561
1562void TypedDataSpecializer::ReplaceWithIndexSet(TemplateDartCall<0>* call,
1563 classid_t cid) {
1564 const intptr_t receiver_idx = call->FirstArgIndex();
1565 auto array = call->ArgumentAt(receiver_idx + 0);
1566 auto index = call->ArgumentAt(receiver_idx + 1);
1567 auto value = call->ArgumentAt(receiver_idx + 2);
1568
1569 if (array->Type()->is_nullable()) {
1570 AppendNullCheck(call, &array);
1571 }
1572 if (index->Type()->is_nullable()) {
1573 AppendNullCheck(call, &index);
1574 }
1575 if (value->Type()->is_nullable()) {
1576 AppendNullCheck(call, &value);
1577 }
1578 AppendMutableCheck(call, &array);
1579 AppendBoundsCheck(call, array, &index);
1580 AppendStoreIndexed(call, array, index, value, cid);
1581
1582 RELEASE_ASSERT(!call->HasUses());
1583 flow_graph_->ReplaceCurrentInstruction(current_iterator(), call, nullptr);
1584}
1585
1586void TypedDataSpecializer::AppendNullCheck(TemplateDartCall<0>* call,
1587 Definition** value) {
1588 auto check =
1589 new (Z) CheckNullInstr(new (Z) Value(*value), Symbols::OptimizedOut(),
1590 call->deopt_id(), call->source());
1591 flow_graph_->InsertBefore(call, check, call->env(), FlowGraph::kValue);
1592
1593 // Use data dependency as control dependency.
1594 *value = check;
1595}
1596
1597void TypedDataSpecializer::AppendMutableCheck(TemplateDartCall<0>* call,
1598 Definition** value) {
1599 auto check = new (Z) CheckWritableInstr(new (Z) Value(*value),
1600 call->deopt_id(), call->source());
1601 flow_graph_->InsertBefore(call, check, call->env(), FlowGraph::kValue);
1602
1603 // Use data dependency as control dependency.
1604 *value = check;
1605}
1606
1607void TypedDataSpecializer::AppendBoundsCheck(TemplateDartCall<0>* call,
1608 Definition* array,
1609 Definition** index) {
1610 auto length = new (Z) LoadFieldInstr(
1611 new (Z) Value(array), Slot::TypedDataBase_length(), call->source());
1612 flow_graph_->InsertBefore(call, length, call->env(), FlowGraph::kValue);
1613
1614 auto check = new (Z) GenericCheckBoundInstr(
1615 new (Z) Value(length), new (Z) Value(*index), DeoptId::kNone);
1616 flow_graph_->InsertBefore(call, check, call->env(), FlowGraph::kValue);
1617
1618 // Use data dependency as control dependency.
1619 *index = check;
1620}
1621
1622Definition* TypedDataSpecializer::AppendLoadLength(TemplateDartCall<0>* call,
1623 Definition* array) {
1624 auto length = new (Z) LoadFieldInstr(
1625 new (Z) Value(array), Slot::TypedDataBase_length(), call->source());
1626 flow_graph_->InsertBefore(call, length, call->env(), FlowGraph::kValue);
1627 return length;
1628}
1629
1630Definition* TypedDataSpecializer::AppendLoadIndexed(TemplateDartCall<0>* call,
1631 Definition* array,
1632 Definition* index,
1633 classid_t cid) {
1635 const intptr_t index_scale = element_size;
1637
1638 Definition* load = new (Z) LoadIndexedInstr(
1639 new (Z) Value(array), new (Z) Value(index), /*index_unboxed=*/false,
1640 index_scale, cid, kAlignedAccess, call->deopt_id(), call->source());
1641 flow_graph_->InsertBefore(call, load, call->env(), FlowGraph::kValue);
1642
1643 if (rep == kUnboxedFloat) {
1644 load = new (Z) FloatToDoubleInstr(new (Z) Value(load), call->deopt_id());
1645 flow_graph_->InsertBefore(call, load, call->env(), FlowGraph::kValue);
1646 }
1647
1648 return load;
1649}
1650
1651void TypedDataSpecializer::AppendStoreIndexed(TemplateDartCall<0>* call,
1652 Definition* array,
1653 Definition* index,
1654 Definition* value,
1655 classid_t cid) {
1657 const intptr_t index_scale = element_size;
1659
1660 const auto deopt_id = call->deopt_id();
1661
1663 // Insert explicit unboxing instructions with truncation to avoid relying
1664 // on [SelectRepresentations] which doesn't mark them as truncating.
1665 value = UnboxInstr::Create(rep, new (Z) Value(value), deopt_id,
1667 flow_graph_->InsertBefore(call, value, call->env(), FlowGraph::kValue);
1668 } else if (rep == kUnboxedFloat) {
1669 value = new (Z) DoubleToFloatInstr(new (Z) Value(value), deopt_id,
1671 flow_graph_->InsertBefore(call, value, call->env(), FlowGraph::kValue);
1672 }
1673
1674 auto store = new (Z) StoreIndexedInstr(
1675 new (Z) Value(array), new (Z) Value(index), new (Z) Value(value),
1676 kNoStoreBarrier, /*index_unboxed=*/false, index_scale, cid,
1679 flow_graph_->InsertBefore(call, store, call->env(), FlowGraph::kEffect);
1680}
1681
1683 // Only implemented for AOT.
1684}
1685
1686// Test and obtain Smi value.
1687static bool IsSmiValue(Value* val, intptr_t* int_val) {
1688 if (val->BindsToConstant() && val->BoundConstant().IsSmi()) {
1689 *int_val = Smi::Cast(val->BoundConstant()).Value();
1690 return true;
1691 }
1692 return false;
1693}
1694
1695// Helper to get result type from call (or nullptr otherwise).
1697 if (auto static_call = call->AsStaticCall()) {
1698 return static_call->result_type();
1699 } else if (auto instance_call = call->AsInstanceCall()) {
1700 return instance_call->result_type();
1701 }
1702 return nullptr;
1703}
1704
1705// Quick access to the current one.
1706#undef Z
1707#define Z (flow_graph->zone())
1708
1709static bool InlineTypedDataIndexCheck(FlowGraph* flow_graph,
1711 Definition* receiver,
1712 GraphEntryInstr* graph_entry,
1713 FunctionEntryInstr** entry,
1714 Instruction** last,
1716 const String& symbol) {
1717 *entry =
1718 new (Z) FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
1719 call->GetBlock()->try_index(), DeoptId::kNone);
1720 (*entry)->InheritDeoptTarget(Z, call);
1721 Instruction* cursor = *entry;
1722
1723 Definition* index = call->ArgumentAt(1);
1724 Definition* length = call->ArgumentAt(2);
1725
1727 // Add a null-check in case the index argument is known to be compatible
1728 // but possibly nullable. We don't need to do the same for length
1729 // because all callers in typed_data_patch.dart retrieve the length
1730 // from the typed data object.
1731 auto* const null_check =
1732 new (Z) CheckNullInstr(new (Z) Value(index), symbol, call->deopt_id(),
1734 cursor = flow_graph->AppendTo(cursor, null_check, call->env(),
1736 }
1737 index = flow_graph->CreateCheckBound(length, index, call->deopt_id());
1738 cursor = flow_graph->AppendTo(cursor, index, call->env(), FlowGraph::kValue);
1739
1740 *last = cursor;
1741 *result = index;
1742 return true;
1743}
1744
1745static intptr_t PrepareInlineIndexedOp(FlowGraph* flow_graph,
1747 intptr_t array_cid,
1748 Definition** array,
1749 Definition** index,
1750 Instruction** cursor) {
1751 // Insert array length load and bounds check.
1753 new (Z) Value(*array), Slot::GetLengthFieldForArrayCid(array_cid),
1754 call->source());
1755 *cursor = flow_graph->AppendTo(*cursor, length, nullptr, FlowGraph::kValue);
1756 *index = flow_graph->CreateCheckBound(length, *index, call->deopt_id());
1757 *cursor =
1758 flow_graph->AppendTo(*cursor, *index, call->env(), FlowGraph::kValue);
1759
1760 if (array_cid == kGrowableObjectArrayCid) {
1761 // Insert data elements load.
1762 LoadFieldInstr* elements = new (Z)
1763 LoadFieldInstr(new (Z) Value(*array), Slot::GrowableObjectArray_data(),
1764 call->source());
1765 *cursor =
1766 flow_graph->AppendTo(*cursor, elements, nullptr, FlowGraph::kValue);
1767 // Load from the data from backing store which is a fixed-length array.
1768 *array = elements;
1769 array_cid = kArrayCid;
1770 } else if (IsExternalTypedDataClassId(array_cid)) {
1771 auto* const elements = new (Z) LoadFieldInstr(
1772 new (Z) Value(*array), Slot::PointerBase_data(),
1774 *cursor =
1775 flow_graph->AppendTo(*cursor, elements, nullptr, FlowGraph::kValue);
1776 *array = elements;
1777 }
1778 return array_cid;
1779}
1780
1781static bool InlineGetIndexed(FlowGraph* flow_graph,
1782 bool can_speculate,
1783 bool is_dynamic_call,
1786 Definition* receiver,
1787 GraphEntryInstr* graph_entry,
1788 FunctionEntryInstr** entry,
1789 Instruction** last,
1790 Definition** result) {
1791 intptr_t array_cid = MethodRecognizer::MethodKindToReceiverCid(kind);
1792
1793 Definition* array = receiver;
1794 Definition* index = call->ArgumentAt(1);
1795
1796 if (!can_speculate && is_dynamic_call && !index->Type()->IsInt()) {
1797 return false;
1798 }
1799
1800 *entry =
1801 new (Z) FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
1802 call->GetBlock()->try_index(), DeoptId::kNone);
1803 (*entry)->InheritDeoptTarget(Z, call);
1804 *last = *entry;
1805
1806 array_cid =
1807 PrepareInlineIndexedOp(flow_graph, call, array_cid, &array, &index, last);
1808
1809 // Array load and return.
1810 intptr_t index_scale = compiler::target::Instance::ElementSizeFor(array_cid);
1811 *result = new (Z) LoadIndexedInstr(
1812 new (Z) Value(array), new (Z) Value(index),
1813 /*index_unboxed=*/false, index_scale, array_cid, kAlignedAccess,
1814 call->deopt_id(), call->source(), ResultType(call));
1815 *last = flow_graph->AppendTo(*last, *result, call->env(), FlowGraph::kValue);
1816
1817 if (LoadIndexedInstr::ReturnRepresentation(array_cid) == kUnboxedFloat) {
1818 *result =
1819 new (Z) FloatToDoubleInstr(new (Z) Value(*result), call->deopt_id());
1820 *last =
1821 flow_graph->AppendTo(*last, *result, call->env(), FlowGraph::kValue);
1822 }
1823
1824 return true;
1825}
1826
1827static bool InlineSetIndexed(FlowGraph* flow_graph,
1829 const Function& target,
1831 Definition* receiver,
1834 GraphEntryInstr* graph_entry,
1835 FunctionEntryInstr** entry,
1836 Instruction** last,
1837 Definition** result) {
1838 intptr_t array_cid = MethodRecognizer::MethodKindToReceiverCid(kind);
1839 auto const rep = StoreIndexedInstr::ValueRepresentation(array_cid);
1840
1841 Definition* array = receiver;
1842 Definition* index = call->ArgumentAt(1);
1843 Definition* stored_value = call->ArgumentAt(2);
1844
1845 *entry =
1846 new (Z) FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
1847 call->GetBlock()->try_index(), DeoptId::kNone);
1848 (*entry)->InheritDeoptTarget(Z, call);
1849 *last = *entry;
1850
1851 bool is_unchecked_call = false;
1852 if (StaticCallInstr* static_call = call->AsStaticCall()) {
1853 is_unchecked_call =
1854 static_call->entry_kind() == Code::EntryKind::kUnchecked;
1855 } else if (InstanceCallInstr* instance_call = call->AsInstanceCall()) {
1856 is_unchecked_call =
1857 instance_call->entry_kind() == Code::EntryKind::kUnchecked;
1858 } else if (PolymorphicInstanceCallInstr* instance_call =
1859 call->AsPolymorphicInstanceCall()) {
1860 is_unchecked_call =
1861 instance_call->entry_kind() == Code::EntryKind::kUnchecked;
1862 }
1863
1864 if (!is_unchecked_call &&
1865 (kind != MethodRecognizer::kObjectArraySetIndexedUnchecked &&
1866 kind != MethodRecognizer::kGrowableArraySetIndexedUnchecked)) {
1867 // Only type check for the value. A type check for the index is not
1868 // needed here because we insert a deoptimizing smi-check for the case
1869 // the index is not a smi.
1870 const AbstractType& value_type =
1871 AbstractType::ZoneHandle(Z, target.ParameterTypeAt(2));
1872 Definition* type_args = nullptr;
1873 if (rep == kTagged) {
1874 const Class& instantiator_class = Class::Handle(Z, target.Owner());
1875 LoadFieldInstr* load_type_args =
1876 new (Z) LoadFieldInstr(new (Z) Value(array),
1878 flow_graph->thread(), instantiator_class),
1879 call->source());
1880 *last = flow_graph->AppendTo(*last, load_type_args, call->env(),
1882 type_args = load_type_args;
1883 } else if (!RepresentationUtils::IsUnboxed(rep)) {
1884 UNREACHABLE();
1885 } else {
1886 type_args = flow_graph->constant_null();
1887 ASSERT(value_type.IsInstantiated());
1888#if defined(DEBUG)
1889 if (rep == kUnboxedFloat || rep == kUnboxedDouble) {
1890 ASSERT(value_type.IsDoubleType());
1891 } else if (rep == kUnboxedFloat32x4) {
1892 ASSERT(value_type.IsFloat32x4Type());
1893 } else if (rep == kUnboxedInt32x4) {
1894 ASSERT(value_type.IsInt32x4Type());
1895 } else if (rep == kUnboxedFloat64x2) {
1896 ASSERT(value_type.IsFloat64x2Type());
1897 } else {
1899 ASSERT(value_type.IsIntType());
1900 }
1901#endif
1902 }
1903
1904 if (exactness != nullptr && exactness->is_exact) {
1905 exactness->emit_exactness_guard = true;
1906 } else {
1907 auto const function_type_args = flow_graph->constant_null();
1908 auto const dst_type = flow_graph->GetConstant(value_type);
1909 AssertAssignableInstr* assert_value = new (Z) AssertAssignableInstr(
1910 source, new (Z) Value(stored_value), new (Z) Value(dst_type),
1911 new (Z) Value(type_args), new (Z) Value(function_type_args),
1912 Symbols::Value(), call->deopt_id());
1913 *last = flow_graph->AppendSpeculativeTo(*last, assert_value, call->env(),
1915 }
1916 }
1917
1918 array_cid =
1919 PrepareInlineIndexedOp(flow_graph, call, array_cid, &array, &index, last);
1920
1921 const bool is_typed_data_store = IsTypedDataBaseClassId(array_cid);
1922
1923 // Check if store barrier is needed. Byte arrays don't need a store barrier.
1924 StoreBarrierType needs_store_barrier =
1925 is_typed_data_store ? kNoStoreBarrier : kEmitStoreBarrier;
1926
1927 if (rep == kUnboxedFloat) {
1928 stored_value = new (Z)
1929 DoubleToFloatInstr(new (Z) Value(stored_value), call->deopt_id());
1930 *last = flow_graph->AppendTo(*last, stored_value, call->env(),
1932 } else if (RepresentationUtils::IsUnboxedInteger(rep)) {
1933 // Insert explicit unboxing instructions with truncation to avoid relying
1934 // on [SelectRepresentations] which doesn't mark them as truncating.
1935 stored_value =
1936 UnboxInstr::Create(rep, new (Z) Value(stored_value), call->deopt_id(),
1938 *last = flow_graph->AppendTo(*last, stored_value, call->env(),
1940 }
1941
1942 const intptr_t index_scale =
1944 auto* const store = new (Z) StoreIndexedInstr(
1945 new (Z) Value(array), new (Z) Value(index), new (Z) Value(stored_value),
1946 needs_store_barrier, /*index_unboxed=*/false, index_scale, array_cid,
1947 kAlignedAccess, call->deopt_id(), call->source());
1948 *last = flow_graph->AppendTo(*last, store, call->env(), FlowGraph::kEffect);
1949 // We need a return value to replace uses of the original definition. However,
1950 // the final instruction is a use of 'void operator[]=()', so we use null.
1951 *result = flow_graph->constant_null();
1952 return true;
1953}
1954
1955static bool InlineDoubleOp(FlowGraph* flow_graph,
1956 Token::Kind op_kind,
1958 Definition* receiver,
1959 GraphEntryInstr* graph_entry,
1960 FunctionEntryInstr** entry,
1961 Instruction** last,
1962 Definition** result) {
1963 Definition* left = receiver;
1964 Definition* right = call->ArgumentAt(1);
1965
1966 *entry =
1967 new (Z) FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
1968 call->GetBlock()->try_index(), DeoptId::kNone);
1969 (*entry)->InheritDeoptTarget(Z, call);
1970 // Arguments are checked. No need for class check.
1971 BinaryDoubleOpInstr* double_bin_op = new (Z)
1972 BinaryDoubleOpInstr(op_kind, new (Z) Value(left), new (Z) Value(right),
1973 call->deopt_id(), call->source());
1974 flow_graph->AppendTo(*entry, double_bin_op, call->env(), FlowGraph::kValue);
1975 *last = double_bin_op;
1976 *result = double_bin_op->AsDefinition();
1977
1978 return true;
1979}
1980
1981static bool InlineDoubleTestOp(FlowGraph* flow_graph,
1983 Definition* receiver,
1985 GraphEntryInstr* graph_entry,
1986 FunctionEntryInstr** entry,
1987 Instruction** last,
1988 Definition** result) {
1989 *entry =
1990 new (Z) FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
1991 call->GetBlock()->try_index(), DeoptId::kNone);
1992 (*entry)->InheritDeoptTarget(Z, call);
1993 // Arguments are checked. No need for class check.
1994
1995 DoubleTestOpInstr* double_test_op = new (Z) DoubleTestOpInstr(
1996 kind, new (Z) Value(receiver), call->deopt_id(), call->source());
1997 flow_graph->AppendTo(*entry, double_test_op, call->env(), FlowGraph::kValue);
1998 *last = double_test_op;
1999 *result = double_test_op->AsDefinition();
2000
2001 return true;
2002}
2003
2004static bool InlineGrowableArraySetter(FlowGraph* flow_graph,
2005 const Slot& field,
2006 StoreBarrierType store_barrier_type,
2008 Definition* receiver,
2009 GraphEntryInstr* graph_entry,
2010 FunctionEntryInstr** entry,
2011 Instruction** last,
2012 Definition** result) {
2013 Definition* array = receiver;
2014 Definition* value = call->ArgumentAt(1);
2015
2016 *entry =
2017 new (Z) FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
2018 call->GetBlock()->try_index(), DeoptId::kNone);
2019 (*entry)->InheritDeoptTarget(Z, call);
2020
2021 // This is an internal method, no need to check argument types.
2023 new (Z) StoreFieldInstr(field, new (Z) Value(array), new (Z) Value(value),
2024 store_barrier_type, call->source());
2025 flow_graph->AppendTo(*entry, store, call->env(), FlowGraph::kEffect);
2026 *last = store;
2027 // We need a return value to replace uses of the original definition. However,
2028 // the last instruction is a field setter, which returns void, so we use null.
2029 *result = flow_graph->constant_null();
2030
2031 return true;
2032}
2033
2034static bool InlineLoadClassId(FlowGraph* flow_graph,
2036 GraphEntryInstr* graph_entry,
2037 FunctionEntryInstr** entry,
2038 Instruction** last,
2039 Definition** result) {
2040 *entry =
2041 new (Z) FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
2042 call->GetBlock()->try_index(), DeoptId::kNone);
2043 (*entry)->InheritDeoptTarget(Z, call);
2044 auto load_cid =
2045 new (Z) LoadClassIdInstr(call->ArgumentValueAt(0)->CopyWithType(Z));
2046 flow_graph->InsertBefore(call, load_cid, nullptr, FlowGraph::kValue);
2047 *last = load_cid;
2048 *result = load_cid->AsDefinition();
2049 return true;
2050}
2051
2052// Returns the LoadIndexedInstr.
2055 intptr_t cid,
2056 Definition* str,
2057 Definition* index,
2058 Instruction* cursor) {
2060 new (Z) Value(str), Slot::GetLengthFieldForArrayCid(cid), str->source());
2061 cursor = flow_graph->AppendTo(cursor, length, nullptr, FlowGraph::kValue);
2062
2063 // Bounds check.
2065 // Add a null-check in case the index argument is known to be compatible
2066 // but possibly nullable. By inserting the null-check, we can allow the
2067 // unbox instruction later inserted to be non-speculative.
2068 auto* const null_check = new (Z)
2069 CheckNullInstr(new (Z) Value(index), Symbols::Index(), call->deopt_id(),
2071 cursor = flow_graph->AppendTo(cursor, null_check, call->env(),
2073 }
2074 index = flow_graph->CreateCheckBound(length, index, call->deopt_id());
2075 cursor = flow_graph->AppendTo(cursor, index, call->env(), FlowGraph::kValue);
2076
2077 LoadIndexedInstr* load_indexed = new (Z) LoadIndexedInstr(
2078 new (Z) Value(str), new (Z) Value(index), /*index_unboxed=*/false,
2080 call->deopt_id(), call->source());
2081 cursor =
2082 flow_graph->AppendTo(cursor, load_indexed, nullptr, FlowGraph::kValue);
2083
2084 auto box = BoxInstr::Create(kUnboxedIntPtr, new Value(load_indexed));
2085 cursor = flow_graph->AppendTo(cursor, box, nullptr, FlowGraph::kValue);
2086
2087 ASSERT(box == cursor);
2088 return box;
2089}
2090
2091static bool InlineStringBaseCharAt(FlowGraph* flow_graph,
2093 Definition* receiver,
2094 intptr_t cid,
2095 GraphEntryInstr* graph_entry,
2096 FunctionEntryInstr** entry,
2097 Instruction** last,
2098 Definition** result) {
2099 if (cid != kOneByteStringCid) {
2100 return false;
2101 }
2102 Definition* str = receiver;
2103 Definition* index = call->ArgumentAt(1);
2104
2105 *entry =
2106 new (Z) FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
2107 call->GetBlock()->try_index(), DeoptId::kNone);
2108 (*entry)->InheritDeoptTarget(Z, call);
2109
2110 *last = PrepareInlineStringIndexOp(flow_graph, call, cid, str, index, *entry);
2111
2112 OneByteStringFromCharCodeInstr* char_at = new (Z)
2113 OneByteStringFromCharCodeInstr(new (Z) Value((*last)->AsDefinition()));
2114
2115 flow_graph->AppendTo(*last, char_at, nullptr, FlowGraph::kValue);
2116 *last = char_at;
2117 *result = char_at->AsDefinition();
2118
2119 return true;
2120}
2121
2124 Definition* receiver,
2125 intptr_t cid,
2126 GraphEntryInstr* graph_entry,
2127 FunctionEntryInstr** entry,
2128 Instruction** last,
2129 Definition** result) {
2130 if (cid == kDynamicCid) {
2131 ASSERT(call->IsStaticCall());
2132 return false;
2133 } else if ((cid != kOneByteStringCid) && (cid != kTwoByteStringCid)) {
2134 return false;
2135 }
2136 Definition* str = receiver;
2137 Definition* index = call->ArgumentAt(1);
2138
2139 *entry =
2140 new (Z) FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
2141 call->GetBlock()->try_index(), DeoptId::kNone);
2142 (*entry)->InheritDeoptTarget(Z, call);
2143
2144 *last = PrepareInlineStringIndexOp(flow_graph, call, cid, str, index, *entry);
2145 *result = (*last)->AsDefinition();
2146
2147 return true;
2148}
2149
2150// Only used for monomorphic calls.
2151bool CallSpecializer::TryReplaceInstanceCallWithInline(
2152 FlowGraph* flow_graph,
2153 ForwardInstructionIterator* iterator,
2154 InstanceCallInstr* call,
2155 SpeculativeInliningPolicy* policy) {
2156 const CallTargets& targets = call->Targets();
2157 ASSERT(targets.IsMonomorphic());
2158 const intptr_t receiver_cid = targets.MonomorphicReceiverCid();
2159 const Function& target = targets.FirstTarget();
2160 const auto exactness = targets.MonomorphicExactness();
2161 ExactnessInfo exactness_info{exactness.IsExact(), false};
2162
2163 FunctionEntryInstr* entry = nullptr;
2164 Instruction* last = nullptr;
2165 Definition* result = nullptr;
2166 if (CallSpecializer::TryInlineRecognizedMethod(
2167 flow_graph, receiver_cid, target, call,
2168 call->Receiver()->definition(), call->source(), call->ic_data(),
2169 /*graph_entry=*/nullptr, &entry, &last, &result, policy,
2170 &exactness_info)) {
2171 // The empty Object constructor is the only case where the inlined body is
2172 // empty and there is no result.
2173 ASSERT((last != nullptr && result != nullptr) ||
2174 (target.recognized_kind() == MethodRecognizer::kObjectConstructor));
2175 // Determine if inlining instance methods needs a check.
2176 // StringBase.codeUnitAt is monomorphic but its implementation is selected
2177 // based on the receiver cid.
2179 if (target.is_polymorphic_target() ||
2180 (target.recognized_kind() == MethodRecognizer::kStringBaseCodeUnitAt)) {
2182 } else {
2184 }
2185
2186 // Insert receiver class or null check if needed.
2187 switch (check) {
2189 Instruction* check_class = flow_graph->CreateCheckClass(
2190 call->Receiver()->definition(), targets, call->deopt_id(),
2191 call->source());
2192 flow_graph->InsertBefore(call, check_class, call->env(),
2194 break;
2195 }
2197 Instruction* check_null = new (Z) CheckNullInstr(
2198 call->Receiver()->CopyWithType(Z), call->function_name(),
2199 call->deopt_id(), call->source());
2200 flow_graph->InsertBefore(call, check_null, call->env(),
2202 break;
2203 }
2205 break;
2206 }
2207
2208 if (exactness_info.emit_exactness_guard && exactness.IsTriviallyExact()) {
2209 flow_graph->AddExactnessGuard(call, receiver_cid);
2210 }
2211
2212 ASSERT(!call->HasMoveArguments());
2213
2214 // Replace all uses of this definition with the result.
2215 if (call->HasUses()) {
2216 ASSERT(result != nullptr && result->HasSSATemp());
2217 call->ReplaceUsesWith(result);
2218 }
2219 // Finally insert the sequence other definition in place of this one in the
2220 // graph.
2221 if (entry->next() != nullptr) {
2222 call->previous()->LinkTo(entry->next());
2223 }
2224 entry->UnuseAllInputs(); // Entry block is not in the graph.
2225 if (last != nullptr) {
2226 ASSERT(call->GetBlock() == last->GetBlock());
2227 last->LinkTo(call);
2228 }
2229 // Remove through the iterator.
2230 ASSERT(iterator->Current() == call);
2231 iterator->RemoveCurrentFromGraph();
2232 call->set_previous(nullptr);
2233 call->set_next(nullptr);
2234 return true;
2235 }
2236 return false;
2237}
2238
2239bool CallSpecializer::TryReplaceStaticCallWithInline(
2240 FlowGraph* flow_graph,
2241 ForwardInstructionIterator* iterator,
2242 StaticCallInstr* call,
2243 SpeculativeInliningPolicy* policy) {
2244 FunctionEntryInstr* entry = nullptr;
2245 Instruction* last = nullptr;
2246 Definition* result = nullptr;
2247 Definition* receiver = nullptr;
2248 intptr_t receiver_cid = kIllegalCid;
2249 if (!call->function().is_static()) {
2250 receiver = call->Receiver()->definition();
2251 receiver_cid = call->Receiver()->Type()->ToCid();
2252 }
2253 if (CallSpecializer::TryInlineRecognizedMethod(
2254 flow_graph, receiver_cid, call->function(), call, receiver,
2255 call->source(), call->ic_data(), /*graph_entry=*/nullptr, &entry,
2256 &last, &result, policy)) {
2257 // The empty Object constructor is the only case where the inlined body is
2258 // empty and there is no result.
2259 ASSERT((last != nullptr && result != nullptr) ||
2260 (call->function().recognized_kind() ==
2261 MethodRecognizer::kObjectConstructor));
2262 ASSERT(!call->HasMoveArguments());
2263 // Replace all uses of this definition with the result.
2264 if (call->HasUses()) {
2265 ASSERT(result->HasSSATemp());
2266 call->ReplaceUsesWith(result);
2267 }
2268 // Finally insert the sequence other definition in place of this one in the
2269 // graph.
2270 if (entry != nullptr) {
2271 if (entry->next() != nullptr) {
2272 call->previous()->LinkTo(entry->next());
2273 }
2274 entry->UnuseAllInputs(); // Entry block is not in the graph.
2275 if (last != nullptr) {
2276 BlockEntryInstr* link = call->GetBlock();
2277 BlockEntryInstr* exit = last->GetBlock();
2278 if (link != exit) {
2279 // Dominance relation and SSA are updated incrementally when
2280 // conditionals are inserted. But succ/pred and ordering needs
2281 // to be redone. TODO(ajcbik): do this incrementally too.
2282 for (intptr_t i = 0, n = link->dominated_blocks().length(); i < n;
2283 ++i) {
2284 exit->AddDominatedBlock(link->dominated_blocks()[i]);
2285 }
2286 link->ClearDominatedBlocks();
2287 for (intptr_t i = 0, n = entry->dominated_blocks().length(); i < n;
2288 ++i) {
2289 link->AddDominatedBlock(entry->dominated_blocks()[i]);
2290 }
2291 Instruction* scan = exit;
2292 while (scan->next() != nullptr) {
2293 scan = scan->next();
2294 }
2295 scan->LinkTo(call);
2297 } else {
2298 last->LinkTo(call);
2299 }
2300 }
2301 }
2302 // Remove through the iterator.
2303 if (iterator != nullptr) {
2304 ASSERT(iterator->Current() == call);
2305 iterator->RemoveCurrentFromGraph();
2306 } else {
2307 call->RemoveFromGraph();
2308 }
2309 return true;
2310 }
2311 return false;
2312}
2313
2314static bool CheckMask(Definition* definition, intptr_t* mask_ptr) {
2315 if (!definition->IsConstant()) return false;
2316 ConstantInstr* constant_instruction = definition->AsConstant();
2317 const Object& constant_mask = constant_instruction->value();
2318 if (!constant_mask.IsSmi()) return false;
2319 const intptr_t mask = Smi::Cast(constant_mask).Value();
2320 if ((mask < 0) || (mask > 255)) {
2321 return false; // Not a valid mask.
2322 }
2323 *mask_ptr = mask;
2324 return true;
2325}
2326
2328 public:
2331 GraphEntryInstr* graph_entry,
2332 FunctionEntryInstr** entry,
2333 Instruction** last,
2335 : flow_graph_(flow_graph),
2336 call_(call),
2337 graph_entry_(graph_entry),
2338 entry_(entry),
2339 last_(last),
2340 result_(result) {
2341 *entry_ = new (zone())
2342 FunctionEntryInstr(graph_entry_, flow_graph_->allocate_block_id(),
2343 call_->GetBlock()->try_index(), call_->deopt_id());
2344 *last = *entry_;
2345 }
2346
2348 switch (kind) {
2349 // ==== Int32x4 ====
2350 case MethodRecognizer::kInt32x4FromInts:
2351 UnboxScalar(0, kUnboxedInt32, 4);
2352 UnboxScalar(1, kUnboxedInt32, 4);
2353 UnboxScalar(2, kUnboxedInt32, 4);
2354 UnboxScalar(3, kUnboxedInt32, 4);
2355 Gather(4);
2356 BoxVector(kUnboxedInt32, 4);
2357 return true;
2358 case MethodRecognizer::kInt32x4FromBools:
2359 UnboxBool(0, 4);
2360 UnboxBool(1, 4);
2361 UnboxBool(2, 4);
2362 UnboxBool(3, 4);
2363 Gather(4);
2364 BoxVector(kUnboxedInt32, 4);
2365 return true;
2366 case MethodRecognizer::kInt32x4GetFlagX:
2367 UnboxVector(0, kUnboxedInt32, kMintCid, 4);
2368 IntToBool();
2369 Return(0);
2370 return true;
2371 case MethodRecognizer::kInt32x4GetFlagY:
2372 UnboxVector(0, kUnboxedInt32, kMintCid, 4);
2373 IntToBool();
2374 Return(1);
2375 return true;
2376 case MethodRecognizer::kInt32x4GetFlagZ:
2377 UnboxVector(0, kUnboxedInt32, kMintCid, 4);
2378 IntToBool();
2379 Return(2);
2380 return true;
2381 case MethodRecognizer::kInt32x4GetFlagW:
2382 UnboxVector(0, kUnboxedInt32, kMintCid, 4);
2383 IntToBool();
2384 Return(3);
2385 return true;
2386 case MethodRecognizer::kInt32x4WithFlagX:
2387 UnboxVector(0, kUnboxedInt32, kMintCid, 4);
2388 UnboxBool(1, 4);
2389 With(0);
2390 BoxVector(kUnboxedInt32, 4);
2391 return true;
2392 case MethodRecognizer::kInt32x4WithFlagY:
2393 UnboxVector(0, kUnboxedInt32, kMintCid, 4);
2394 UnboxBool(1, 4);
2395 With(1);
2396 BoxVector(kUnboxedInt32, 4);
2397 return true;
2398 case MethodRecognizer::kInt32x4WithFlagZ:
2399 UnboxVector(0, kUnboxedInt32, kMintCid, 4);
2400 UnboxBool(1, 4);
2401 With(2);
2402 BoxVector(kUnboxedInt32, 4);
2403 return true;
2404 case MethodRecognizer::kInt32x4WithFlagW:
2405 UnboxVector(0, kUnboxedInt32, kMintCid, 4);
2406 UnboxBool(1, 4);
2407 With(3);
2408 BoxVector(kUnboxedInt32, 4);
2409 return true;
2410 case MethodRecognizer::kInt32x4Shuffle: {
2411 Definition* mask_definition =
2412 call_->ArgumentAt(call_->ArgumentCount() - 1);
2413 intptr_t mask = 0;
2414 if (!CheckMask(mask_definition, &mask)) {
2415 return false;
2416 }
2417 UnboxVector(0, kUnboxedInt32, kMintCid, 4);
2418 Shuffle(mask);
2419 BoxVector(kUnboxedInt32, 4);
2420 return true;
2421 }
2422 case MethodRecognizer::kInt32x4ShuffleMix: {
2423 Definition* mask_definition =
2424 call_->ArgumentAt(call_->ArgumentCount() - 1);
2425 intptr_t mask = 0;
2426 if (!CheckMask(mask_definition, &mask)) {
2427 return false;
2428 }
2429 UnboxVector(0, kUnboxedInt32, kMintCid, 4);
2430 UnboxVector(1, kUnboxedInt32, kMintCid, 4);
2431 ShuffleMix(mask);
2432 BoxVector(kUnboxedInt32, 4);
2433 return true;
2434 }
2435 case MethodRecognizer::kInt32x4GetSignMask:
2436 case MethodRecognizer::kInt32x4Select:
2437 // TODO(riscv)
2438 return false;
2439
2440 // ==== Float32x4 ====
2441 case MethodRecognizer::kFloat32x4Abs:
2442 Float32x4Unary(Token::kABS);
2443 return true;
2444 case MethodRecognizer::kFloat32x4Negate:
2445 Float32x4Unary(Token::kNEGATE);
2446 return true;
2447 case MethodRecognizer::kFloat32x4Sqrt:
2448 Float32x4Unary(Token::kSQRT);
2449 return true;
2450 case MethodRecognizer::kFloat32x4Reciprocal:
2451 Float32x4Unary(Token::kRECIPROCAL);
2452 return true;
2453 case MethodRecognizer::kFloat32x4ReciprocalSqrt:
2454 Float32x4Unary(Token::kRECIPROCAL_SQRT);
2455 return true;
2456 case MethodRecognizer::kFloat32x4GetSignMask:
2457 // TODO(riscv)
2458 return false;
2459 case MethodRecognizer::kFloat32x4Equal:
2460 Float32x4Compare(Token::kEQ);
2461 return true;
2462 case MethodRecognizer::kFloat32x4GreaterThan:
2463 Float32x4Compare(Token::kGT);
2464 return true;
2465 case MethodRecognizer::kFloat32x4GreaterThanOrEqual:
2466 Float32x4Compare(Token::kGTE);
2467 return true;
2468 case MethodRecognizer::kFloat32x4LessThan:
2469 Float32x4Compare(Token::kLT);
2470 return true;
2471 case MethodRecognizer::kFloat32x4LessThanOrEqual:
2472 Float32x4Compare(Token::kLTE);
2473 return true;
2474 case MethodRecognizer::kFloat32x4Add:
2475 Float32x4Binary(Token::kADD);
2476 return true;
2477 case MethodRecognizer::kFloat32x4Sub:
2478 Float32x4Binary(Token::kSUB);
2479 return true;
2480 case MethodRecognizer::kFloat32x4Mul:
2481 Float32x4Binary(Token::kMUL);
2482 return true;
2483 case MethodRecognizer::kFloat32x4Div:
2484 Float32x4Binary(Token::kDIV);
2485 return true;
2486 case MethodRecognizer::kFloat32x4Min:
2487 Float32x4Binary(Token::kMIN);
2488 return true;
2489 case MethodRecognizer::kFloat32x4Max:
2490 Float32x4Binary(Token::kMAX);
2491 return true;
2492 case MethodRecognizer::kFloat32x4Scale:
2493 UnboxVector(0, kUnboxedFloat, kDoubleCid, 4);
2494 UnboxScalar(1, kUnboxedFloat, 4);
2495 BinaryDoubleOp(Token::kMUL, kUnboxedFloat, 4);
2496 BoxVector(kUnboxedFloat, 4);
2497 return true;
2498 case MethodRecognizer::kFloat32x4Splat:
2499 UnboxScalar(0, kUnboxedFloat, 4);
2500 Splat(4);
2501 BoxVector(kUnboxedFloat, 4);
2502 return true;
2503 case MethodRecognizer::kFloat32x4WithX:
2504 UnboxVector(0, kUnboxedFloat, kDoubleCid, 4);
2505 UnboxScalar(1, kUnboxedFloat, 4);
2506 With(0);
2507 BoxVector(kUnboxedFloat, 4);
2508 return true;
2509 case MethodRecognizer::kFloat32x4WithY:
2510 UnboxVector(0, kUnboxedFloat, kDoubleCid, 4);
2511 UnboxScalar(1, kUnboxedFloat, 4);
2512 With(1);
2513 BoxVector(kUnboxedFloat, 4);
2514 return true;
2515 case MethodRecognizer::kFloat32x4WithZ:
2516 UnboxVector(0, kUnboxedFloat, kDoubleCid, 4);
2517 UnboxScalar(1, kUnboxedFloat, 4);
2518 With(2);
2519 BoxVector(kUnboxedFloat, 4);
2520 return true;
2521 case MethodRecognizer::kFloat32x4WithW:
2522 UnboxVector(0, kUnboxedFloat, kDoubleCid, 4);
2523 UnboxScalar(1, kUnboxedFloat, 4);
2524 With(3);
2525 BoxVector(kUnboxedFloat, 4);
2526 return true;
2527 case MethodRecognizer::kFloat32x4Zero:
2528 UnboxDoubleZero(kUnboxedFloat, 4);
2529 BoxVector(kUnboxedFloat, 4);
2530 return true;
2531 case MethodRecognizer::kFloat32x4FromDoubles:
2532 UnboxScalar(0, kUnboxedFloat, 4);
2533 UnboxScalar(1, kUnboxedFloat, 4);
2534 UnboxScalar(2, kUnboxedFloat, 4);
2535 UnboxScalar(3, kUnboxedFloat, 4);
2536 Gather(4);
2537 BoxVector(kUnboxedFloat, 4);
2538 return true;
2539 case MethodRecognizer::kFloat32x4GetX:
2540 UnboxVector(0, kUnboxedFloat, kDoubleCid, 4);
2541 BoxScalar(0, kUnboxedFloat);
2542 return true;
2543 case MethodRecognizer::kFloat32x4GetY:
2544 UnboxVector(0, kUnboxedFloat, kDoubleCid, 4);
2545 BoxScalar(1, kUnboxedFloat);
2546 return true;
2547 case MethodRecognizer::kFloat32x4GetZ:
2548 UnboxVector(0, kUnboxedFloat, kDoubleCid, 4);
2549 BoxScalar(2, kUnboxedFloat);
2550 return true;
2551 case MethodRecognizer::kFloat32x4GetW:
2552 UnboxVector(0, kUnboxedFloat, kDoubleCid, 4);
2553 BoxScalar(3, kUnboxedFloat);
2554 return true;
2555 case MethodRecognizer::kFloat32x4Shuffle: {
2556 Definition* mask_definition =
2557 call_->ArgumentAt(call_->ArgumentCount() - 1);
2558 intptr_t mask = 0;
2559 if (!CheckMask(mask_definition, &mask)) {
2560 return false;
2561 }
2562 UnboxVector(0, kUnboxedFloat, kDoubleCid, 4);
2563 Shuffle(mask);
2564 BoxVector(kUnboxedFloat, 4);
2565 return true;
2566 }
2567 case MethodRecognizer::kFloat32x4ShuffleMix: {
2568 Definition* mask_definition =
2569 call_->ArgumentAt(call_->ArgumentCount() - 1);
2570 intptr_t mask = 0;
2571 if (!CheckMask(mask_definition, &mask)) {
2572 return false;
2573 }
2574 UnboxVector(0, kUnboxedFloat, kDoubleCid, 4);
2575 UnboxVector(1, kUnboxedFloat, kDoubleCid, 4);
2576 ShuffleMix(mask);
2577 BoxVector(kUnboxedFloat, 4);
2578 return true;
2579 }
2580
2581 // ==== Float64x2 ====
2582 case MethodRecognizer::kFloat64x2Abs:
2583 Float64x2Unary(Token::kABS);
2584 return true;
2585 case MethodRecognizer::kFloat64x2Negate:
2586 Float64x2Unary(Token::kNEGATE);
2587 return true;
2588 case MethodRecognizer::kFloat64x2Sqrt:
2589 Float64x2Unary(Token::kSQRT);
2590 return true;
2591 case MethodRecognizer::kFloat64x2Add:
2592 Float64x2Binary(Token::kADD);
2593 return true;
2594 case MethodRecognizer::kFloat64x2Sub:
2595 Float64x2Binary(Token::kSUB);
2596 return true;
2597 case MethodRecognizer::kFloat64x2Mul:
2598 Float64x2Binary(Token::kMUL);
2599 return true;
2600 case MethodRecognizer::kFloat64x2Div:
2601 Float64x2Binary(Token::kDIV);
2602 return true;
2603 case MethodRecognizer::kFloat64x2Min:
2604 Float64x2Binary(Token::kMIN);
2605 return true;
2606 case MethodRecognizer::kFloat64x2Max:
2607 Float64x2Binary(Token::kMAX);
2608 return true;
2609 case MethodRecognizer::kFloat64x2Scale:
2610 UnboxVector(0, kUnboxedDouble, kDoubleCid, 2);
2611 UnboxScalar(1, kUnboxedDouble, 2);
2612 BinaryDoubleOp(Token::kMUL, kUnboxedDouble, 2);
2613 BoxVector(kUnboxedDouble, 2);
2614 return true;
2615 case MethodRecognizer::kFloat64x2Splat:
2616 UnboxScalar(0, kUnboxedDouble, 2);
2617 Splat(2);
2618 BoxVector(kUnboxedDouble, 2);
2619 return true;
2620 case MethodRecognizer::kFloat64x2WithX:
2621 UnboxVector(0, kUnboxedDouble, kDoubleCid, 2);
2622 UnboxScalar(1, kUnboxedDouble, 2);
2623 With(0);
2624 BoxVector(kUnboxedDouble, 2);
2625 return true;
2626 case MethodRecognizer::kFloat64x2WithY:
2627 UnboxVector(0, kUnboxedDouble, kDoubleCid, 2);
2628 UnboxScalar(1, kUnboxedDouble, 2);
2629 With(1);
2630 BoxVector(kUnboxedDouble, 2);
2631 return true;
2632 case MethodRecognizer::kFloat64x2Zero:
2633 UnboxDoubleZero(kUnboxedDouble, 2);
2634 BoxVector(kUnboxedDouble, 2);
2635 return true;
2636 case MethodRecognizer::kFloat64x2FromDoubles:
2637 UnboxScalar(0, kUnboxedDouble, 2);
2638 UnboxScalar(1, kUnboxedDouble, 2);
2639 Gather(2);
2640 BoxVector(kUnboxedDouble, 2);
2641 return true;
2642 case MethodRecognizer::kFloat64x2GetX:
2643 UnboxVector(0, kUnboxedDouble, kDoubleCid, 2);
2644 BoxScalar(0, kUnboxedDouble);
2645 return true;
2646 case MethodRecognizer::kFloat64x2GetY:
2647 UnboxVector(0, kUnboxedDouble, kDoubleCid, 2);
2648 BoxScalar(1, kUnboxedDouble);
2649 return true;
2650
2651 // Mixed
2652 case MethodRecognizer::kFloat32x4ToFloat64x2: {
2653 UnboxVector(0, kUnboxedFloat, kDoubleCid, 4, 1);
2654 Float32x4ToFloat64x2();
2655 BoxVector(kUnboxedDouble, 2);
2656 return true;
2657 }
2658 case MethodRecognizer::kFloat64x2ToFloat32x4: {
2659 UnboxVector(0, kUnboxedDouble, kDoubleCid, 2, 1);
2660 Float64x2ToFloat32x4();
2661 BoxVector(kUnboxedFloat, 4);
2662 return true;
2663 }
2664 case MethodRecognizer::kInt32x4ToFloat32x4:
2665 UnboxVector(0, kUnboxedInt32, kMintCid, 4, 1);
2666 Int32x4ToFloat32x4();
2667 BoxVector(kUnboxedFloat, 4);
2668 return true;
2669 case MethodRecognizer::kFloat32x4ToInt32x4:
2670 UnboxVector(0, kUnboxedFloat, kDoubleCid, 4, 1);
2671 Float32x4ToInt32x4();
2672 BoxVector(kUnboxedInt32, 4);
2673 return true;
2674 default:
2675 return false;
2676 }
2677 }
2678
2679 private:
2680 void Float32x4Unary(Token::Kind op) {
2681 UnboxVector(0, kUnboxedFloat, kDoubleCid, 4);
2682 UnaryDoubleOp(op, kUnboxedFloat, 4);
2683 BoxVector(kUnboxedFloat, 4);
2684 }
2685 void Float32x4Binary(Token::Kind op) {
2686 UnboxVector(0, kUnboxedFloat, kDoubleCid, 4);
2687 UnboxVector(1, kUnboxedFloat, kDoubleCid, 4);
2688 BinaryDoubleOp(op, kUnboxedFloat, 4);
2689 BoxVector(kUnboxedFloat, 4);
2690 }
2691 void Float32x4Compare(Token::Kind op) {
2692 UnboxVector(0, kUnboxedFloat, kDoubleCid, 4);
2693 UnboxVector(1, kUnboxedFloat, kDoubleCid, 4);
2694 FloatCompare(op);
2695 BoxVector(kUnboxedInt32, 4);
2696 }
2697 void Float64x2Unary(Token::Kind op) {
2698 UnboxVector(0, kUnboxedDouble, kDoubleCid, 2);
2699 UnaryDoubleOp(op, kUnboxedDouble, 2);
2700 BoxVector(kUnboxedDouble, 2);
2701 }
2702 void Float64x2Binary(Token::Kind op) {
2703 UnboxVector(0, kUnboxedDouble, kDoubleCid, 2);
2704 UnboxVector(1, kUnboxedDouble, kDoubleCid, 2);
2705 BinaryDoubleOp(op, kUnboxedDouble, 2);
2706 BoxVector(kUnboxedDouble, 2);
2707 }
2708
2709 void UnboxVector(intptr_t i,
2710 Representation rep,
2711 intptr_t cid,
2712 intptr_t n,
2713 intptr_t type_args = 0) {
2714 Definition* arg = call_->ArgumentAt(i + type_args);
2715 if (CompilerState::Current().is_aot()) {
2716 // Add null-checks in case of the arguments are known to be compatible
2717 // but they are possibly nullable.
2718 // By inserting the null-check, we can allow the unbox instruction later
2719 // inserted to be non-speculative.
2720 arg = AddDefinition(new (zone()) CheckNullInstr(
2721 new (zone()) Value(arg), Symbols::SecondArg(), call_->deopt_id(),
2723 }
2724 for (intptr_t lane = 0; lane < n; lane++) {
2725 in_[i][lane] = AddDefinition(
2726 new (zone()) UnboxLaneInstr(new (zone()) Value(arg), lane, rep, cid));
2727 }
2728 }
2729
2730 void UnboxScalar(intptr_t i,
2731 Representation rep,
2732 intptr_t n,
2733 intptr_t type_args = 0) {
2734 Definition* arg = call_->ArgumentAt(i + type_args);
2735 if (CompilerState::Current().is_aot()) {
2736 // Add null-checks in case of the arguments are known to be compatible
2737 // but they are possibly nullable.
2738 // By inserting the null-check, we can allow the unbox instruction later
2739 // inserted to be non-speculative.
2740 arg = AddDefinition(new (zone()) CheckNullInstr(
2741 new (zone()) Value(arg), Symbols::SecondArg(), call_->deopt_id(),
2743 }
2744 Definition* unbox = AddDefinition(
2745 UnboxInstr::Create(rep, new (zone()) Value(arg), DeoptId::kNone,
2747 for (intptr_t lane = 0; lane < n; lane++) {
2748 in_[i][lane] = unbox;
2749 }
2750 }
2751
2752 void UnboxBool(intptr_t i, intptr_t n) {
2753 Definition* unbox = AddDefinition(new (zone()) BoolToIntInstr(
2754 call_->ArgumentValueAt(i)->CopyWithType(zone())));
2755 for (intptr_t lane = 0; lane < n; lane++) {
2756 in_[i][lane] = unbox;
2757 }
2758 }
2759
2760 void UnboxDoubleZero(Representation rep, intptr_t n) {
2761 Definition* zero = flow_graph_->GetConstant(
2763 for (intptr_t lane = 0; lane < n; lane++) {
2764 op_[lane] = zero;
2765 }
2766 }
2767
2768 void UnaryDoubleOp(Token::Kind op, Representation rep, intptr_t n) {
2769 for (intptr_t lane = 0; lane < n; lane++) {
2770 op_[lane] = AddDefinition(new (zone()) UnaryDoubleOpInstr(
2771 op, new (zone()) Value(in_[0][lane]), call_->deopt_id(),
2773 }
2774 }
2775
2776 void BinaryDoubleOp(Token::Kind op, Representation rep, intptr_t n) {
2777 for (intptr_t lane = 0; lane < n; lane++) {
2778 op_[lane] = AddDefinition(new (zone()) BinaryDoubleOpInstr(
2779 op, new (zone()) Value(in_[0][lane]),
2780 new (zone()) Value(in_[1][lane]), call_->deopt_id(), call_->source(),
2782 }
2783 }
2784
2785 void FloatCompare(Token::Kind op) {
2786 for (intptr_t lane = 0; lane < 4; lane++) {
2787 op_[lane] = AddDefinition(
2788 new (zone()) FloatCompareInstr(op, new (zone()) Value(in_[0][lane]),
2789 new (zone()) Value(in_[1][lane])));
2790 }
2791 }
2792
2793 void With(intptr_t i) {
2794 for (intptr_t lane = 0; lane < 4; lane++) {
2795 op_[lane] = in_[0][lane];
2796 }
2797 op_[i] = in_[1][0];
2798 }
2799 void Splat(intptr_t n) {
2800 for (intptr_t lane = 0; lane < n; lane++) {
2801 op_[lane] = in_[0][0];
2802 }
2803 }
2804 void Gather(intptr_t n) {
2805 for (intptr_t lane = 0; lane < n; lane++) {
2806 op_[lane] = in_[lane][0];
2807 }
2808 }
2809 void Shuffle(intptr_t mask) {
2810 op_[0] = in_[0][(mask >> 0) & 3];
2811 op_[1] = in_[0][(mask >> 2) & 3];
2812 op_[2] = in_[0][(mask >> 4) & 3];
2813 op_[3] = in_[0][(mask >> 6) & 3];
2814 }
2815 void ShuffleMix(intptr_t mask) {
2816 op_[0] = in_[0][(mask >> 0) & 3];
2817 op_[1] = in_[0][(mask >> 2) & 3];
2818 op_[2] = in_[1][(mask >> 4) & 3];
2819 op_[3] = in_[1][(mask >> 6) & 3];
2820 }
2821 void Float32x4ToFloat64x2() {
2822 for (intptr_t lane = 0; lane < 2; lane++) {
2823 op_[lane] = AddDefinition(new (zone()) FloatToDoubleInstr(
2824 new (zone()) Value(in_[0][lane]), DeoptId::kNone));
2825 }
2826 }
2827 void Float64x2ToFloat32x4() {
2828 for (intptr_t lane = 0; lane < 2; lane++) {
2829 op_[lane] = AddDefinition(new (zone()) DoubleToFloatInstr(
2830 new (zone()) Value(in_[0][lane]), DeoptId::kNone));
2831 }
2832 Definition* zero = flow_graph_->GetConstant(
2833 Double::ZoneHandle(Double::NewCanonical(0.0)), kUnboxedFloat);
2834 op_[2] = zero;
2835 op_[3] = zero;
2836 }
2837 void Int32x4ToFloat32x4() {
2838 for (intptr_t lane = 0; lane < 4; lane++) {
2839 op_[lane] = AddDefinition(new (zone()) BitCastInstr(
2840 kUnboxedInt32, kUnboxedFloat, new (zone()) Value(in_[0][lane])));
2841 }
2842 }
2843 void Float32x4ToInt32x4() {
2844 for (intptr_t lane = 0; lane < 4; lane++) {
2845 op_[lane] = AddDefinition(new (zone()) BitCastInstr(
2846 kUnboxedFloat, kUnboxedInt32, new (zone()) Value(in_[0][lane])));
2847 }
2848 }
2849 void IntToBool() {
2850 for (intptr_t lane = 0; lane < 4; lane++) {
2851 op_[lane] = AddDefinition(
2852 new (zone()) IntToBoolInstr(new (zone()) Value(in_[0][lane])));
2853 }
2854 }
2855
2856 void BoxVector(Representation rep, intptr_t n) {
2857 Definition* box;
2858 if (n == 2) {
2859 box = new (zone()) BoxLanesInstr(rep, new (zone()) Value(op_[0]),
2860 new (zone()) Value(op_[1]));
2861 } else {
2862 ASSERT(n == 4);
2863 box = new (zone()) BoxLanesInstr(
2864 rep, new (zone()) Value(op_[0]), new (zone()) Value(op_[1]),
2865 new (zone()) Value(op_[2]), new (zone()) Value(op_[3]));
2866 }
2867 Done(AddDefinition(box));
2868 }
2869
2870 void BoxScalar(intptr_t lane, Representation rep) {
2871 Definition* box = BoxInstr::Create(rep, new (zone()) Value(in_[0][lane]));
2872 Done(AddDefinition(box));
2873 }
2874
2875 void Return(intptr_t lane) { Done(op_[lane]); }
2876
2877 void Done(Definition* result) {
2878 // InheritDeoptTarget also inherits environment (which may add 'entry' into
2879 // env_use_list()), so InheritDeoptTarget should be done only after decided
2880 // to inline.
2881 (*entry_)->InheritDeoptTarget(zone(), call_);
2882 *result_ = result;
2883 }
2884
2885 Definition* AddDefinition(Definition* def) {
2886 *last_ = flow_graph_->AppendTo(
2887 *last_, def, call_->deopt_id() != DeoptId::kNone ? call_->env() : NULL,
2889 return def;
2890 }
2891 Zone* zone() { return flow_graph_->zone(); }
2892
2893 FlowGraph* flow_graph_;
2894 Instruction* call_;
2895 GraphEntryInstr* graph_entry_;
2896 FunctionEntryInstr** entry_;
2897 Instruction** last_;
2898 Definition** result_;
2899
2900 // First index is the argment number, second index is the lane number.
2901 Definition* in_[4][4];
2902 // Index is the lane number.
2903 Definition* op_[4];
2904};
2905
2906static bool InlineSimdOp(FlowGraph* flow_graph,
2907 bool is_dynamic_call,
2909 Definition* receiver,
2911 GraphEntryInstr* graph_entry,
2912 FunctionEntryInstr** entry,
2913 Instruction** last,
2914 Definition** result) {
2915 if (is_dynamic_call && call->ArgumentCount() > 1) {
2916 // Issue(dartbug.com/37737): Dynamic invocation forwarders have the
2917 // same recognized kind as the method they are forwarding to.
2918 // That causes us to inline the recognized method and not the
2919 // dyn: forwarder itself.
2920 // This is only safe if all arguments are checked in the flow graph we
2921 // build.
2922 // For double/int arguments speculative unboxing instructions should ensure
2923 // to bailout in AOT (or deoptimize in JIT) if the incoming values are not
2924 // correct. Though for user-implementable types, like
2925 // operator+(Float32x4 other), this is not safe and we therefore bailout.
2926 return false;
2927 }
2928
2929 if (!FLAG_enable_simd_inline) {
2930 return false;
2931 }
2932
2934#if defined(TARGET_ARCH_RISCV32) || defined(TARGET_ARCH_RISCV64)
2935 SimdLowering lowering(flow_graph, call, graph_entry, entry, last, result);
2936 return lowering.TryInline(kind);
2937#else
2938 return false;
2939#endif
2940 }
2941
2942 *entry =
2943 new (Z) FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
2944 call->GetBlock()->try_index(), DeoptId::kNone);
2945 Instruction* cursor = *entry;
2946 switch (kind) {
2947 case MethodRecognizer::kInt32x4Shuffle:
2948 case MethodRecognizer::kInt32x4ShuffleMix:
2949 case MethodRecognizer::kFloat32x4Shuffle:
2950 case MethodRecognizer::kFloat32x4ShuffleMix: {
2951 Definition* mask_definition = call->ArgumentAt(call->ArgumentCount() - 1);
2952 intptr_t mask = 0;
2953 if (!CheckMask(mask_definition, &mask)) {
2954 return false;
2955 }
2956 *last = SimdOpInstr::CreateFromCall(Z, kind, receiver, call, mask);
2957 break;
2958 }
2959
2960 case MethodRecognizer::kFloat32x4WithX:
2961 case MethodRecognizer::kFloat32x4WithY:
2962 case MethodRecognizer::kFloat32x4WithZ:
2963 case MethodRecognizer::kFloat32x4WithW:
2964 case MethodRecognizer::kFloat32x4Scale: {
2965 Definition* left = receiver;
2966 Definition* right = call->ArgumentAt(1);
2967 // Note: left and right values are swapped when handed to the instruction,
2968 // this is done so that the double value is loaded into the output
2969 // register and can be destroyed.
2970 // TODO(dartbug.com/31035) this swapping is only needed because register
2971 // allocator has SameAsFirstInput policy and not SameAsNthInput(n).
2972 *last = SimdOpInstr::Create(kind, new (Z) Value(right),
2973 new (Z) Value(left), call->deopt_id());
2974 break;
2975 }
2976
2977 case MethodRecognizer::kFloat32x4Zero:
2978 case MethodRecognizer::kFloat32x4ToFloat64x2:
2979 case MethodRecognizer::kFloat64x2ToFloat32x4:
2980 case MethodRecognizer::kFloat32x4ToInt32x4:
2981 case MethodRecognizer::kInt32x4ToFloat32x4:
2982 case MethodRecognizer::kFloat64x2Zero:
2984 break;
2985 case MethodRecognizer::kFloat32x4Mul:
2986 case MethodRecognizer::kFloat32x4Div:
2987 case MethodRecognizer::kFloat32x4Add:
2988 case MethodRecognizer::kFloat32x4Sub:
2989 case MethodRecognizer::kFloat64x2Mul:
2990 case MethodRecognizer::kFloat64x2Div:
2991 case MethodRecognizer::kFloat64x2Add:
2992 case MethodRecognizer::kFloat64x2Sub:
2993 *last = SimdOpInstr::CreateFromCall(Z, kind, receiver, call);
2994 if (CompilerState::Current().is_aot()) {
2995 // Add null-checks in case of the arguments are known to be compatible
2996 // but they are possibly nullable.
2997 // By inserting the null-check, we can allow the unbox instruction later
2998 // inserted to be non-speculative.
2999 CheckNullInstr* check1 =
3000 new (Z) CheckNullInstr(new (Z) Value(receiver), Symbols::FirstArg(),
3001 call->deopt_id(), call->source());
3002
3003 CheckNullInstr* check2 = new (Z) CheckNullInstr(
3004 new (Z) Value(call->ArgumentAt(1)), Symbols::SecondArg(),
3005 call->deopt_id(), call->source(), CheckNullInstr::kArgumentError);
3006
3007 (*last)->SetInputAt(0, new (Z) Value(check1));
3008 (*last)->SetInputAt(1, new (Z) Value(check2));
3009
3010 flow_graph->InsertBefore(call, check1, call->env(), FlowGraph::kValue);
3011 flow_graph->InsertBefore(call, check2, call->env(), FlowGraph::kValue);
3012 }
3013 break;
3014 default:
3015 *last = SimdOpInstr::CreateFromCall(Z, kind, receiver, call);
3016 break;
3017 }
3018 // InheritDeoptTarget also inherits environment (which may add 'entry' into
3019 // env_use_list()), so InheritDeoptTarget should be done only after decided
3020 // to inline.
3021 (*entry)->InheritDeoptTarget(Z, call);
3022 flow_graph->AppendTo(
3023 cursor, *last, call->deopt_id() != DeoptId::kNone ? call->env() : nullptr,
3025 *result = (*last)->AsDefinition();
3026 return true;
3027}
3028
3029static Instruction* InlineMul(FlowGraph* flow_graph,
3030 Instruction* cursor,
3031 Definition* x,
3032 Definition* y) {
3033 BinaryInt64OpInstr* mul = new (Z)
3034 BinaryInt64OpInstr(Token::kMUL, new (Z) Value(x), new (Z) Value(y),
3036 return flow_graph->AppendTo(cursor, mul, nullptr, FlowGraph::kValue);
3037}
3038
3039static bool InlineMathIntPow(FlowGraph* flow_graph,
3041 GraphEntryInstr* graph_entry,
3042 FunctionEntryInstr** entry,
3043 Instruction** last,
3044 Definition** result) {
3045 // Invoking the _intPow(x, y) implies that both:
3046 // (1) x, y are int
3047 // (2) y >= 0.
3048 // Thus, try to inline some very obvious cases.
3049 // TODO(ajcbik): useful to generalize?
3050 intptr_t val = 0;
3051 Value* x = call->ArgumentValueAt(0);
3052 Value* y = call->ArgumentValueAt(1);
3053 // Use x^0 == 1, x^1 == x, and x^c == x * .. * x for small c.
3054 const intptr_t small_exponent = 5;
3055 if (IsSmiValue(y, &val)) {
3056 if (val == 0) {
3057 *last = flow_graph->GetConstant(Smi::ZoneHandle(Smi::New(1)));
3058 *result = (*last)->AsDefinition();
3059 return true;
3060 } else if (val == 1) {
3061 *last = x->definition();
3062 *result = (*last)->AsDefinition();
3063 return true;
3064 } else if (1 < val && val <= small_exponent) {
3065 // Lazily construct entry only in this case.
3066 *entry = new (Z)
3067 FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
3068 call->GetBlock()->try_index(), DeoptId::kNone);
3069 (*entry)->InheritDeoptTarget(Z, call);
3070 Definition* x_def = x->definition();
3072 InlineMul(flow_graph, *entry, x_def, x_def)->AsDefinition();
3073 *last = square;
3074 *result = square;
3075 switch (val) {
3076 case 2:
3077 return true;
3078 case 3:
3079 *last = InlineMul(flow_graph, *last, x_def, square);
3080 *result = (*last)->AsDefinition();
3081 return true;
3082 case 4:
3083 *last = InlineMul(flow_graph, *last, square, square);
3084 *result = (*last)->AsDefinition();
3085 return true;
3086 case 5:
3087 *last = InlineMul(flow_graph, *last, square, square);
3088 *last = InlineMul(flow_graph, *last, x_def, (*last)->AsDefinition());
3089 *result = (*last)->AsDefinition();
3090 return true;
3091 }
3092 }
3093 }
3094 // Use 0^y == 0 (only for y != 0) and 1^y == 1.
3095 if (IsSmiValue(x, &val)) {
3096 if (val == 1) {
3097 *last = x->definition();
3098 *result = x->definition();
3099 return true;
3100 }
3101 }
3102 return false;
3103}
3104
3105bool CallSpecializer::TryInlineRecognizedMethod(
3106 FlowGraph* flow_graph,
3107 intptr_t receiver_cid,
3108 const Function& target,
3109 Definition* call,
3110 Definition* receiver,
3111 const InstructionSource& source,
3112 const ICData* ic_data,
3113 GraphEntryInstr* graph_entry,
3114 FunctionEntryInstr** entry,
3115 Instruction** last,
3116 Definition** result,
3117 SpeculativeInliningPolicy* policy,
3118 CallSpecializer::ExactnessInfo* exactness) {
3119 COMPILER_TIMINGS_TIMER_SCOPE(flow_graph->thread(), InlineRecognizedMethod);
3120
3121 if (receiver_cid == kSentinelCid) {
3122 // Receiver was defined in dead code and was replaced by the sentinel.
3123 // Original receiver cid is lost, so don't try to inline recognized
3124 // methods.
3125 return false;
3126 }
3127
3128 const bool can_speculate = policy->IsAllowedForInlining(call->deopt_id());
3129 const bool is_dynamic_call = Function::IsDynamicInvocationForwarderName(
3130 String::Handle(flow_graph->zone(), target.name()));
3131
3132 const MethodRecognizer::Kind kind = target.recognized_kind();
3133 switch (kind) {
3134 case MethodRecognizer::kTypedDataIndexCheck:
3135 return InlineTypedDataIndexCheck(flow_graph, call, receiver, graph_entry,
3136 entry, last, result, Symbols::Index());
3137 case MethodRecognizer::kByteDataByteOffsetCheck:
3138 return InlineTypedDataIndexCheck(flow_graph, call, receiver, graph_entry,
3139 entry, last, result,
3140 Symbols::byteOffset());
3141 // Recognized [] operators.
3142 case MethodRecognizer::kObjectArrayGetIndexed:
3143 case MethodRecognizer::kGrowableArrayGetIndexed:
3144 case MethodRecognizer::kInt8ArrayGetIndexed:
3145 case MethodRecognizer::kUint8ArrayGetIndexed:
3146 case MethodRecognizer::kUint8ClampedArrayGetIndexed:
3147 case MethodRecognizer::kExternalUint8ArrayGetIndexed:
3148 case MethodRecognizer::kExternalUint8ClampedArrayGetIndexed:
3149 case MethodRecognizer::kInt16ArrayGetIndexed:
3150 case MethodRecognizer::kUint16ArrayGetIndexed:
3151 return InlineGetIndexed(flow_graph, can_speculate, is_dynamic_call, kind,
3152 call, receiver, graph_entry, entry, last, result);
3153 case MethodRecognizer::kFloat32ArrayGetIndexed:
3154 case MethodRecognizer::kFloat64ArrayGetIndexed:
3155 return InlineGetIndexed(flow_graph, can_speculate, is_dynamic_call, kind,
3156 call, receiver, graph_entry, entry, last, result);
3157 case MethodRecognizer::kFloat32x4ArrayGetIndexed:
3158 case MethodRecognizer::kFloat64x2ArrayGetIndexed:
3159 if (!ShouldInlineSimd()) {
3160 return false;
3161 }
3162 return InlineGetIndexed(flow_graph, can_speculate, is_dynamic_call, kind,
3163 call, receiver, graph_entry, entry, last, result);
3164 case MethodRecognizer::kInt32ArrayGetIndexed:
3165 case MethodRecognizer::kUint32ArrayGetIndexed:
3166 return InlineGetIndexed(flow_graph, can_speculate, is_dynamic_call, kind,
3167 call, receiver, graph_entry, entry, last, result);
3168 case MethodRecognizer::kInt64ArrayGetIndexed:
3169 case MethodRecognizer::kUint64ArrayGetIndexed:
3170 return InlineGetIndexed(flow_graph, can_speculate, is_dynamic_call, kind,
3171 call, receiver, graph_entry, entry, last, result);
3172 case MethodRecognizer::kClassIDgetID:
3173 return InlineLoadClassId(flow_graph, call, graph_entry, entry, last,
3174 result);
3175 default:
3176 break;
3177 }
3178
3179 // The following ones need to speculate.
3180 if (!can_speculate) {
3181 return false;
3182 }
3183
3184 switch (kind) {
3185 case MethodRecognizer::kUint8ClampedArraySetIndexed:
3186 case MethodRecognizer::kExternalUint8ClampedArraySetIndexed:
3187 // These require clamping. Just inline normal body instead which
3188 // contains necessary clamping code.
3189 return false;
3190
3191 // Recognized []= operators.
3192 case MethodRecognizer::kObjectArraySetIndexed:
3193 case MethodRecognizer::kGrowableArraySetIndexed:
3194 case MethodRecognizer::kObjectArraySetIndexedUnchecked:
3195 case MethodRecognizer::kGrowableArraySetIndexedUnchecked:
3196 case MethodRecognizer::kInt8ArraySetIndexed:
3197 case MethodRecognizer::kUint8ArraySetIndexed:
3198 case MethodRecognizer::kExternalUint8ArraySetIndexed:
3199 case MethodRecognizer::kInt16ArraySetIndexed:
3200 case MethodRecognizer::kUint16ArraySetIndexed:
3201 case MethodRecognizer::kInt32ArraySetIndexed:
3202 case MethodRecognizer::kUint32ArraySetIndexed:
3203 case MethodRecognizer::kInt64ArraySetIndexed:
3204 case MethodRecognizer::kUint64ArraySetIndexed:
3205 return InlineSetIndexed(flow_graph, kind, target, call, receiver, source,
3206 exactness, graph_entry, entry, last, result);
3207
3208 case MethodRecognizer::kFloat32ArraySetIndexed:
3209 case MethodRecognizer::kFloat64ArraySetIndexed: {
3210 return InlineSetIndexed(flow_graph, kind, target, call, receiver, source,
3211 exactness, graph_entry, entry, last, result);
3212 }
3213 case MethodRecognizer::kFloat32x4ArraySetIndexed: {
3214 if (!ShouldInlineSimd()) {
3215 return false;
3216 }
3217 return InlineSetIndexed(flow_graph, kind, target, call, receiver, source,
3218 exactness, graph_entry, entry, last, result);
3219 }
3220 case MethodRecognizer::kFloat64x2ArraySetIndexed: {
3221 if (!ShouldInlineSimd()) {
3222 return false;
3223 }
3224 return InlineSetIndexed(flow_graph, kind, target, call, receiver, source,
3225 exactness, graph_entry, entry, last, result);
3226 }
3227 case MethodRecognizer::kStringBaseCodeUnitAt:
3228 return InlineStringBaseCodeUnitAt(flow_graph, call, receiver,
3229 receiver_cid, graph_entry, entry, last,
3230 result);
3231 case MethodRecognizer::kStringBaseCharAt:
3232 return InlineStringBaseCharAt(flow_graph, call, receiver, receiver_cid,
3233 graph_entry, entry, last, result);
3234 case MethodRecognizer::kDoubleAdd:
3235 return InlineDoubleOp(flow_graph, Token::kADD, call, receiver,
3236 graph_entry, entry, last, result);
3237 case MethodRecognizer::kDoubleSub:
3238 return InlineDoubleOp(flow_graph, Token::kSUB, call, receiver,
3239 graph_entry, entry, last, result);
3240 case MethodRecognizer::kDoubleMul:
3241 return InlineDoubleOp(flow_graph, Token::kMUL, call, receiver,
3242 graph_entry, entry, last, result);
3243 case MethodRecognizer::kDoubleDiv:
3244 return InlineDoubleOp(flow_graph, Token::kDIV, call, receiver,
3245 graph_entry, entry, last, result);
3246 case MethodRecognizer::kDouble_getIsNaN:
3247 case MethodRecognizer::kDouble_getIsInfinite:
3248 case MethodRecognizer::kDouble_getIsNegative:
3249 return InlineDoubleTestOp(flow_graph, call, receiver, kind, graph_entry,
3250 entry, last, result);
3251 case MethodRecognizer::kGrowableArraySetData:
3252 ASSERT((receiver_cid == kGrowableObjectArrayCid) ||
3253 ((receiver_cid == kDynamicCid) && call->IsStaticCall()));
3255 flow_graph, Slot::GrowableObjectArray_data(), kEmitStoreBarrier, call,
3256 receiver, graph_entry, entry, last, result);
3257 case MethodRecognizer::kGrowableArraySetLength:
3258 ASSERT((receiver_cid == kGrowableObjectArrayCid) ||
3259 ((receiver_cid == kDynamicCid) && call->IsStaticCall()));
3261 flow_graph, Slot::GrowableObjectArray_length(), kNoStoreBarrier, call,
3262 receiver, graph_entry, entry, last, result);
3263
3264 case MethodRecognizer::kFloat32x4Abs:
3265 case MethodRecognizer::kFloat32x4Clamp:
3266 case MethodRecognizer::kFloat32x4FromDoubles:
3267 case MethodRecognizer::kFloat32x4Equal:
3268 case MethodRecognizer::kFloat32x4GetSignMask:
3269 case MethodRecognizer::kFloat32x4GreaterThan:
3270 case MethodRecognizer::kFloat32x4GreaterThanOrEqual:
3271 case MethodRecognizer::kFloat32x4LessThan:
3272 case MethodRecognizer::kFloat32x4LessThanOrEqual:
3273 case MethodRecognizer::kFloat32x4Max:
3274 case MethodRecognizer::kFloat32x4Min:
3275 case MethodRecognizer::kFloat32x4Negate:
3276 case MethodRecognizer::kFloat32x4NotEqual:
3277 case MethodRecognizer::kFloat32x4Reciprocal:
3278 case MethodRecognizer::kFloat32x4ReciprocalSqrt:
3279 case MethodRecognizer::kFloat32x4Scale:
3280 case MethodRecognizer::kFloat32x4GetW:
3281 case MethodRecognizer::kFloat32x4GetX:
3282 case MethodRecognizer::kFloat32x4GetY:
3283 case MethodRecognizer::kFloat32x4GetZ:
3284 case MethodRecognizer::kFloat32x4Splat:
3285 case MethodRecognizer::kFloat32x4Sqrt:
3286 case MethodRecognizer::kFloat32x4ToFloat64x2:
3287 case MethodRecognizer::kFloat32x4ToInt32x4:
3288 case MethodRecognizer::kFloat32x4WithW:
3289 case MethodRecognizer::kFloat32x4WithX:
3290 case MethodRecognizer::kFloat32x4WithY:
3291 case MethodRecognizer::kFloat32x4WithZ:
3292 case MethodRecognizer::kFloat32x4Zero:
3293 case MethodRecognizer::kFloat64x2Abs:
3294 case MethodRecognizer::kFloat64x2Clamp:
3295 case MethodRecognizer::kFloat64x2FromDoubles:
3296 case MethodRecognizer::kFloat64x2GetSignMask:
3297 case MethodRecognizer::kFloat64x2GetX:
3298 case MethodRecognizer::kFloat64x2GetY:
3299 case MethodRecognizer::kFloat64x2Max:
3300 case MethodRecognizer::kFloat64x2Min:
3301 case MethodRecognizer::kFloat64x2Negate:
3302 case MethodRecognizer::kFloat64x2Scale:
3303 case MethodRecognizer::kFloat64x2Splat:
3304 case MethodRecognizer::kFloat64x2Sqrt:
3305 case MethodRecognizer::kFloat64x2ToFloat32x4:
3306 case MethodRecognizer::kFloat64x2WithX:
3307 case MethodRecognizer::kFloat64x2WithY:
3308 case MethodRecognizer::kFloat64x2Zero:
3309 case MethodRecognizer::kInt32x4FromBools:
3310 case MethodRecognizer::kInt32x4FromInts:
3311 case MethodRecognizer::kInt32x4GetFlagW:
3312 case MethodRecognizer::kInt32x4GetFlagX:
3313 case MethodRecognizer::kInt32x4GetFlagY:
3314 case MethodRecognizer::kInt32x4GetFlagZ:
3315 case MethodRecognizer::kInt32x4GetSignMask:
3316 case MethodRecognizer::kInt32x4Select:
3317 case MethodRecognizer::kInt32x4ToFloat32x4:
3318 case MethodRecognizer::kInt32x4WithFlagW:
3319 case MethodRecognizer::kInt32x4WithFlagX:
3320 case MethodRecognizer::kInt32x4WithFlagY:
3321 case MethodRecognizer::kInt32x4WithFlagZ:
3322 case MethodRecognizer::kFloat32x4ShuffleMix:
3323 case MethodRecognizer::kInt32x4ShuffleMix:
3324 case MethodRecognizer::kFloat32x4Shuffle:
3325 case MethodRecognizer::kInt32x4Shuffle:
3326 case MethodRecognizer::kFloat32x4Mul:
3327 case MethodRecognizer::kFloat32x4Div:
3328 case MethodRecognizer::kFloat32x4Add:
3329 case MethodRecognizer::kFloat32x4Sub:
3330 case MethodRecognizer::kFloat64x2Mul:
3331 case MethodRecognizer::kFloat64x2Div:
3332 case MethodRecognizer::kFloat64x2Add:
3333 case MethodRecognizer::kFloat64x2Sub:
3334 return InlineSimdOp(flow_graph, is_dynamic_call, call, receiver, kind,
3335 graph_entry, entry, last, result);
3336
3337 case MethodRecognizer::kMathIntPow:
3338 return InlineMathIntPow(flow_graph, call, graph_entry, entry, last,
3339 result);
3340
3341 case MethodRecognizer::kObjectConstructor: {
3342 *entry = new (Z)
3343 FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
3344 call->GetBlock()->try_index(), DeoptId::kNone);
3345 (*entry)->InheritDeoptTarget(Z, call);
3346 ASSERT(!call->HasUses());
3347 *last = nullptr; // Empty body.
3348 *result =
3349 nullptr; // Since no uses of original call, result will be unused.
3350 return true;
3351 }
3352
3353 case MethodRecognizer::kObjectArrayAllocate: {
3354 Value* num_elements = new (Z) Value(call->ArgumentAt(1));
3355 intptr_t length = 0;
3356 if (IsSmiValue(num_elements, &length)) {
3358 Value* type = new (Z) Value(call->ArgumentAt(0));
3359 *entry = new (Z)
3360 FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
3361 call->GetBlock()->try_index(), DeoptId::kNone);
3362 (*entry)->InheritDeoptTarget(Z, call);
3363 *last = new (Z) CreateArrayInstr(call->source(), type, num_elements,
3364 call->deopt_id());
3366 *entry, *last,
3367 call->deopt_id() != DeoptId::kNone ? call->env() : nullptr,
3369 *result = (*last)->AsDefinition();
3370 return true;
3371 }
3372 }
3373 return false;
3374 }
3375
3376 case MethodRecognizer::kObjectRuntimeType: {
3378 if (receiver_cid == kDynamicCid) {
3379 return false;
3380 } else if (IsStringClassId(receiver_cid)) {
3382 } else if (receiver_cid == kDoubleCid) {
3383 type = Type::Double();
3384 } else if (IsIntegerClassId(receiver_cid)) {
3385 type = Type::IntType();
3386 } else if (IsTypeClassId(receiver_cid)) {
3388 } else if ((receiver_cid != kClosureCid) &&
3389 (receiver_cid != kRecordCid)) {
3390 const Class& cls = Class::Handle(
3391 Z, flow_graph->isolate_group()->class_table()->At(receiver_cid));
3392 if (!cls.IsGeneric()) {
3393 type = cls.DeclarationType();
3394 }
3395 }
3396
3397 if (!type.IsNull()) {
3398 *entry = new (Z)
3399 FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
3400 call->GetBlock()->try_index(), DeoptId::kNone);
3401 (*entry)->InheritDeoptTarget(Z, call);
3402 ConstantInstr* ctype = flow_graph->GetConstant(type);
3403 // Create a synthetic (re)definition for return to flag insertion.
3404 // TODO(ajcbik): avoid this mechanism altogether
3405 RedefinitionInstr* redef =
3406 new (Z) RedefinitionInstr(new (Z) Value(ctype));
3408 *entry, redef,
3409 call->deopt_id() != DeoptId::kNone ? call->env() : nullptr,
3411 *last = *result = redef;
3412 return true;
3413 }
3414 return false;
3415 }
3416
3417 case MethodRecognizer::kWriteIntoOneByteString:
3418 case MethodRecognizer::kWriteIntoTwoByteString: {
3419 // This is an internal method, no need to check argument types nor
3420 // range.
3421 *entry = new (Z)
3422 FunctionEntryInstr(graph_entry, flow_graph->allocate_block_id(),
3423 call->GetBlock()->try_index(), DeoptId::kNone);
3424 (*entry)->InheritDeoptTarget(Z, call);
3425 Definition* str = call->ArgumentAt(0);
3426 Definition* index = call->ArgumentAt(1);
3427 Definition* value = call->ArgumentAt(2);
3428
3429 const bool is_onebyte = kind == MethodRecognizer::kWriteIntoOneByteString;
3430 const intptr_t index_scale = is_onebyte ? 1 : 2;
3431 const intptr_t cid = is_onebyte ? kOneByteStringCid : kTwoByteStringCid;
3432
3433 // Insert explicit unboxing instructions with truncation to avoid relying
3434 // on [SelectRepresentations] which doesn't mark them as truncating.
3436 new (Z) Value(value), call->deopt_id(),
3438 flow_graph->AppendTo(*entry, value, call->env(), FlowGraph::kValue);
3439
3440 *last = new (Z) StoreIndexedInstr(
3441 new (Z) Value(str), new (Z) Value(index), new (Z) Value(value),
3442 kNoStoreBarrier, /*index_unboxed=*/false, index_scale, cid,
3443 kAlignedAccess, call->deopt_id(), call->source());
3445
3446 // We need a return value to replace uses of the original definition.
3447 // The final instruction is a use of 'void operator[]=()', so we use null.
3449 return true;
3450 }
3451
3452 default:
3453 return false;
3454 }
3455}
3456
3457} // namespace dart
static float prev(float f)
#define check(reporter, ref, unref, make, kill)
Definition: RefCntTest.cpp:85
SI void store(P *ptr, const T &val)
SI T load(const P *ptr)
Definition: Transform_inl.h:98
static size_t element_size(Layout layout, SkSLType type)
#define UNREACHABLE()
Definition: assert.h:248
#define RELEASE_ASSERT(cond)
Definition: assert.h:327
GLenum type
#define IG
#define INIT_HANDLE(iface, type, cid)
#define Z
#define PUBLIC_TYPED_DATA_CLASS_LIST(V)
bool IsTopTypeForSubtyping() const
Definition: object.cc:21396
bool IsInt32x4Type() const
Definition: object.cc:21440
bool IsFloat64x2Type() const
Definition: object.cc:21434
virtual bool IsInstantiated(Genericity genericity=kAny, intptr_t num_free_fun_type_params=kAllFree) const
Definition: object.cc:21151
bool IsDoubleType() const
Definition: object.cc:21423
bool IsFloat32x4Type() const
Definition: object.cc:21428
bool IsIntType() const
Definition: object.cc:21411
static constexpr bool IsValidLength(intptr_t len)
Definition: object.h:10932
void Add(const T &value)
const T & At(intptr_t index) const
void SetLength(intptr_t new_length)
intptr_t length() const
bool OperandsAre(intptr_t cid) const
Definition: il.h:881
bool OperandsAreSmiOrMint() const
Definition: il.h:874
bool IncludesOperands(intptr_t cid) const
Definition: il.h:886
bool OperandsAreSmiOrNull() const
Definition: il.h:871
bool ArgumentIs(intptr_t cid) const
Definition: il.h:845
static const BinaryFeedback * CreateMonomorphic(Zone *zone, intptr_t receiver_cid, intptr_t argument_cid)
Definition: il.cc:4110
bool OperandsAreSmiOrDouble() const
Definition: il.h:877
intptr_t try_index() const
Definition: il.h:1730
bool Done() const
Definition: flow_graph.h:46
static const Bool & Get(bool value)
Definition: object.h:10801
static const Bool & True()
Definition: object.h:10797
static BoxInstr * Create(Representation from, Value *value)
Definition: il.cc:4007
static bool HasSingleConcreteImplementation(const Class &interface, intptr_t *implementation_cid)
Definition: cha.cc:132
virtual void VisitStaticCall(StaticCallInstr *instr)
void AddReceiverCheck(InstanceCallInstr *call)
bool TryReplaceWithEqualityOp(InstanceCallInstr *call, Token::Kind op_kind)
FlowGraph * flow_graph() const
void AddCheckNull(Value *to_check, const String &function_name, intptr_t deopt_id, Environment *deopt_environment, Instruction *insert_before)
void AddCheckClass(Definition *to_check, const Cids &cids, intptr_t deopt_id, Environment *deopt_environment, Instruction *insert_before)
void ReplaceCall(Definition *call, Definition *replacement)
bool TryInlineInstanceSetter(InstanceCallInstr *call)
void InsertBefore(Instruction *next, Instruction *instr, Environment *env, FlowGraph::UseKind use_kind)
void ReplaceCallWithResult(Definition *call, Instruction *replacement, Definition *result)
Thread * thread() const
virtual void VisitLoadCodeUnits(LoadCodeUnitsInstr *instr)
virtual bool TryCreateICData(InstanceCallInstr *call)
void InlineImplicitInstanceGetter(Definition *call, const Field &field)
bool TryInlineInstanceGetter(InstanceCallInstr *call)
bool TryReplaceWithRelationalOp(InstanceCallInstr *call, Token::Kind op_kind)
virtual bool TryOptimizeStaticCallUsingStaticTypes(StaticCallInstr *call)=0
bool TryReplaceWithUnaryOp(InstanceCallInstr *call, Token::Kind op_kind)
SpeculativeInliningPolicy * speculative_policy_
const Function & function() const
virtual void ReplaceInstanceCallsWithDispatchTableCalls()
void ReplaceWithInstanceOf(InstanceCallInstr *instr)
bool TryReplaceWithBinaryOp(InstanceCallInstr *call, Token::Kind op_kind)
bool TryInlineInstanceMethod(InstanceCallInstr *call)
virtual bool TryReplaceInstanceOfWithRangeCheck(InstanceCallInstr *call, const AbstractType &type)
void InsertSpeculativeBefore(Instruction *next, Instruction *instr, Environment *env, FlowGraph::UseKind use_kind)
static const CallTargets * CreateMonomorphic(Zone *zone, intptr_t receiver_cid, const Function &target)
Definition: il.cc:4118
bool HasSingleTarget() const
Definition: il.cc:5505
const Function & FirstTarget() const
Definition: il.cc:5513
StaticTypeExactnessState MonomorphicExactness() const
Definition: il.cc:811
Definition: il.h:736
static Cids * CreateMonomorphic(Zone *zone, intptr_t cid)
Definition: il.cc:691
intptr_t MonomorphicReceiverCid() const
Definition: il.cc:806
static Cids * CreateForArgument(Zone *zone, const BinaryFeedback &binary_feedback, int argument_number)
Definition: il.cc:697
bool IsMonomorphic() const
Definition: il.cc:801
ClassPtr At(intptr_t cid) const
Definition: class_table.h:362
intptr_t NumTypeArguments() const
Definition: object.cc:3640
static bool IsSubtypeOf(const Class &cls, const TypeArguments &type_arguments, Nullability nullability, const AbstractType &other, Heap::Space space, FunctionTypeMapping *function_type_equivalence=nullptr)
Definition: object.cc:5920
bool is_finalized() const
Definition: object.h:1723
bool is_nullable() const
Definition: compile_type.h:76
bool is_aot() const
static CompilerState & Current()
const Object & value() const
Definition: il.h:4230
Value * input_use_list() const
Definition: il.h:2575
CompileType * Type()
Definition: il.h:2521
virtual Definition * AsDefinition()
Definition: il.h:2683
static constexpr intptr_t kNone
Definition: deopt_id.h:27
static DoublePtr NewCanonical(double d)
Definition: object.cc:23418
bool is_final() const
Definition: object.h:4442
ClassPtr Owner() const
Definition: object.cc:11860
bool NeedsInitializationCheckOnLoad() const
Definition: object.h:4706
FieldPtr CloneFromOriginal() const
Definition: object.cc:11735
bool needs_length_check() const
Definition: object.h:4697
bool is_late() const
Definition: object.h:4444
StaticTypeExactnessState static_type_exactness_state() const
Definition: object.h:4633
StringPtr name() const
Definition: object.h:4430
bool needs_load_guard() const
Definition: object.h:4451
bool is_covariant() const
Definition: object.h:4476
intptr_t guarded_cid() const
Definition: object.cc:11749
bool is_generic_covariant_impl() const
Definition: object.h:4482
AbstractTypePtr type() const
Definition: object.h:4550
bool is_instance() const
Definition: object.h:4441
static const CallTargets * ResolveCallTargetsForReceiverCid(intptr_t cid, const String &selector, const Array &args_desc_array)
static bool SupportsUnboxedSimd128()
static bool CanConvertInt64ToDouble()
ForwardInstructionIterator * current_iterator() const
Definition: il.h:11846
ForwardInstructionIterator * current_iterator_
Definition: il.h:11859
virtual void VisitBlocks()
Definition: il.cc:1376
ConstantInstr * GetConstant(const Object &object, Representation representation=kTagged)
Definition: flow_graph.cc:187
IsolateGroup * isolate_group() const
Definition: flow_graph.h:262
Instruction * AppendTo(Instruction *prev, Instruction *instr, Environment *env, UseKind use_kind)
Definition: flow_graph.cc:298
Zone * zone() const
Definition: flow_graph.h:261
void ReplaceCurrentInstruction(ForwardInstructionIterator *iterator, Instruction *current, Instruction *replacement)
Definition: flow_graph.cc:141
Instruction * AppendSpeculativeTo(Instruction *prev, Instruction *instr, Environment *env, UseKind use_kind)
Definition: flow_graph.cc:312
Thread * thread() const
Definition: flow_graph.h:260
ToCheck CheckForInstanceCall(InstanceCallInstr *call, UntaggedFunction::Kind kind) const
Definition: flow_graph.cc:493
void AddExactnessGuard(InstanceCallInstr *call, intptr_t receiver_cid)
Definition: flow_graph.cc:627
Definition * CreateCheckBound(Definition *length, Definition *index, intptr_t deopt_id)
Definition: flow_graph.cc:616
void DiscoverBlocks()
Definition: flow_graph.cc:346
ConstantInstr * constant_null() const
Definition: flow_graph.h:270
Instruction * CreateCheckClass(Definition *to_check, const Cids &cids, intptr_t deopt_id, const InstructionSource &source)
Definition: flow_graph.cc:604
const ParsedFunction & parsed_function() const
Definition: flow_graph.h:129
BlockIterator reverse_postorder_iterator() const
Definition: flow_graph.h:219
void InsertBefore(Instruction *next, Instruction *instr, Environment *env, UseKind use_kind)
Definition: flow_graph.h:312
intptr_t allocate_block_id()
Definition: flow_graph.h:266
Instruction * Current() const
Definition: il.h:1853
static bool IsDynamicInvocationForwarderName(const String &name)
Definition: object.cc:4190
bool IsInvokeFieldDispatcher() const
Definition: object.h:3296
FieldPtr accessor_field() const
Definition: object.cc:8149
@ kOld
Definition: heap.h:39
intptr_t NumArgsTested() const
Definition: object.cc:16471
bool HasDeoptReason(ICData::DeoptReasonId reason) const
Definition: object.cc:16513
intptr_t GetReceiverClassIdAt(intptr_t index) const
Definition: object.cc:17020
intptr_t NumberOfChecks() const
Definition: object.cc:16577
Code::EntryKind entry_kind() const
Definition: il.h:4759
const Function & interface_target() const
Definition: il.h:4726
const String & function_name() const
Definition: il.h:4724
const CallTargets & Targets()
Definition: il.cc:5345
static intptr_t ElementSizeFor(intptr_t cid)
Definition: object.cc:20967
void InheritDeoptTarget(Zone *zone, Instruction *other)
Definition: il.cc:1569
virtual BlockEntryInstr * GetBlock()
Definition: il.cc:1352
Environment * env() const
Definition: il.h:1215
@ kNotSpeculative
Definition: il.h:975
void RemoveEnvironment()
Definition: il.cc:1282
const char * ToCString() const
Definition: il_printer.cc:1683
virtual intptr_t ArgumentCount() const
Definition: il.h:1041
Definition * ArgumentAt(intptr_t index) const
Definition: il.h:3441
void SetInputAt(intptr_t i, Value *value)
Definition: il.h:1014
InstructionSource source() const
Definition: il.h:1008
Value * ArgumentValueAt(intptr_t index) const
Definition: il.h:3435
intptr_t deopt_id() const
Definition: il.h:993
SafepointRwLock * program_lock()
Definition: isolate.h:537
static IsolateGroup * Current()
Definition: isolate.h:539
ClassTable * class_table() const
Definition: isolate.h:496
static LibraryPtr LookupLibrary(Thread *thread, const String &url)
Definition: object.cc:14599
bool can_pack_into_smi() const
Definition: il.h:6902
void set_representation(Representation repr)
Definition: il.h:6910
static Representation ReturnRepresentation(intptr_t array_cid)
Definition: il.cc:6867
Value * right() const
Definition: il.h:8970
Value * left() const
Definition: il.h:8969
static intptr_t MethodKindToReceiverCid(Kind kind)
static ObjectPtr null()
Definition: object.h:433
ObjectPtr ptr() const
Definition: object.h:332
bool IsNull() const
Definition: object.h:363
static Object & Handle()
Definition: object.h:407
static Object & ZoneHandle()
Definition: object.h:419
bool TryInline(MethodRecognizer::Kind kind)
SimdLowering(FlowGraph *flow_graph, Instruction *call, GraphEntryInstr *graph_entry, FunctionEntryInstr **entry, Instruction **last, Definition **result)
static SimdOpInstr * Create(Kind kind, Value *left, Value *right, intptr_t deopt_id)
Definition: il.h:11322
static SimdOpInstr * CreateFromCall(Zone *zone, MethodRecognizer::Kind kind, Definition *receiver, Instruction *call, intptr_t mask=0)
Definition: il.cc:8259
static SimdOpInstr * CreateFromFactoryCall(Zone *zone, MethodRecognizer::Kind kind, Instruction *call)
Definition: il.cc:8313
static Kind KindForOperator(MethodRecognizer::Kind kind)
Definition: il.cc:8234
static const Slot & Get(const Field &field, const ParsedFunction *parsed_function)
Definition: slot.cc:351
static const Slot & GetLengthFieldForArrayCid(intptr_t array_cid)
Definition: slot.cc:249
static const Slot & GetTypeArgumentsSlotFor(Thread *thread, const Class &cls)
Definition: slot.cc:276
static SmiPtr New(intptr_t value)
Definition: object.h:10006
bool IsAllowedForInlining(intptr_t call_deopt_id) const
Definition: inliner.h:43
static StaticCallInstr * FromCall(Zone *zone, const C *call, const Function &target, intptr_t call_count)
Definition: il.h:5584
static Representation ValueRepresentation(intptr_t array_cid)
Definition: il.cc:6920
intptr_t FirstArgIndex() const
Definition: il.h:4576
Value * Receiver() const
Definition: il.h:4577
void CheckForSafepoint()
Definition: thread.h:1104
IsolateGroup * isolate_group() const
Definition: thread.h:541
static bool IsTypeTestOperator(Kind tok)
Definition: token.h:244
static bool IsRelationalOperator(Kind tok)
Definition: token.h:232
static bool IsBinaryOperator(Token::Kind token)
Definition: token.cc:31
static bool IsEqualityOperator(Kind tok)
Definition: token.h:236
static TypePtr IntType()
static TypePtr Double()
static TypePtr StringType()
static TypePtr Int32x4()
static TypePtr Float64x2()
static TypePtr Float32x4()
static TypePtr DartTypeType()
virtual void VisitStaticCall(StaticCallInstr *instr)
virtual void VisitInstanceCall(InstanceCallInstr *instr)
static void Optimize(FlowGraph *flow_graph)
static UnboxInstr * Create(Representation to, Value *value, intptr_t deopt_id, SpeculativeMode speculative_mode=kGuardInputs)
Definition: il.cc:4043
static constexpr bool IsPowerOfTwo(T x)
Definition: utils.h:76
bool Done() const
Definition: il.h:83
Definition: il.h:75
bool BindsToConstant() const
Definition: il.cc:1183
const Object & BoundConstant() const
Definition: il.cc:1201
Value * CopyWithType(Zone *zone)
Definition: il.h:138
Definition * definition() const
Definition: il.h:103
CompileType * Type()
static word ElementSizeFor(intptr_t cid)
Definition: runtime_api.cc:581
#define COMPILER_TIMINGS_TIMER_SCOPE(thread, timer_id)
#define THR_Print(format,...)
Definition: log.h:20
#define ASSERT(E)
static int square(int x)
Definition: etc1.cpp:302
VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE auto & d
Definition: main.cc:19
SkBitmap source
Definition: examples.cpp:28
uint8_t value
GAsyncResult * result
uint32_t * target
Dart_NativeFunction function
Definition: fuchsia.cc:51
size_t length
double y
double x
exit(kErrorExitCode)
constexpr intptr_t kSmiBits
Definition: runtime_api.h:301
def link(from_root, to_root)
Definition: dart_pkg.py:44
Definition: dart_vm.cc:33
static bool CanConvertInt64ToDouble()
Definition: flow_graph.cc:1934
static bool CidTestResultsContains(const ZoneGrowableArray< intptr_t > &results, intptr_t test_cid)
static bool ShouldInlineSimd()
Definition: flow_graph.cc:1930
static bool InlineDoubleTestOp(FlowGraph *flow_graph, Instruction *call, Definition *receiver, MethodRecognizer::Kind kind, GraphEntryInstr *graph_entry, FunctionEntryInstr **entry, Instruction **last, Definition **result)
bool IsTypedDataBaseClassId(intptr_t index)
Definition: class_id.h:429
static bool IsSmiValue(Value *val, intptr_t *int_val)
static constexpr Representation kUnboxedUword
Definition: locations.h:171
static bool IsLengthOneString(Definition *d)
static bool InlineMathIntPow(FlowGraph *flow_graph, Instruction *call, GraphEntryInstr *graph_entry, FunctionEntryInstr **entry, Instruction **last, Definition **result)
@ TypedDataBase_length
Definition: il_test.cc:1250
bool IsTypeClassId(intptr_t index)
Definition: class_id.h:370
static bool InlineSetIndexed(FlowGraph *flow_graph, MethodRecognizer::Kind kind, const Function &target, Instruction *call, Definition *receiver, const InstructionSource &source, CallSpecializer::ExactnessInfo *exactness, GraphEntryInstr *graph_entry, FunctionEntryInstr **entry, Instruction **last, Definition **result)
static bool CheckMask(Definition *definition, intptr_t *mask_ptr)
static void RefineUseTypes(Definition *instr)
int32_t classid_t
Definition: globals.h:524
StoreBarrierType
Definition: il.h:6301
@ kNoStoreBarrier
Definition: il.h:6301
@ kEmitStoreBarrier
Definition: il.h:6301
static bool InlineDoubleOp(FlowGraph *flow_graph, Token::Kind op_kind, Instruction *call, Definition *receiver, GraphEntryInstr *graph_entry, FunctionEntryInstr **entry, Instruction **last, Definition **result)
@ kIllegalCid
Definition: class_id.h:214
@ kDynamicCid
Definition: class_id.h:253
Representation
Definition: locations.h:66
static bool ShouldSpecializeForDouble(const BinaryFeedback &binary_feedback)
static bool InlineGetIndexed(FlowGraph *flow_graph, bool can_speculate, bool is_dynamic_call, MethodRecognizer::Kind kind, Definition *call, Definition *receiver, GraphEntryInstr *graph_entry, FunctionEntryInstr **entry, Instruction **last, Definition **result)
static void TryAddTest(ZoneGrowableArray< intptr_t > *results, intptr_t test_cid, bool result)
static intptr_t PrepareInlineIndexedOp(FlowGraph *flow_graph, Instruction *call, intptr_t array_cid, Definition **array, Definition **index, Instruction **cursor)
static bool InlineStringBaseCodeUnitAt(FlowGraph *flow_graph, Instruction *call, Definition *receiver, intptr_t cid, GraphEntryInstr *graph_entry, FunctionEntryInstr **entry, Instruction **last, Definition **result)
static Instruction * InlineMul(FlowGraph *flow_graph, Instruction *cursor, Definition *x, Definition *y)
static bool InlineSimdOp(FlowGraph *flow_graph, bool is_dynamic_call, Instruction *call, Definition *receiver, MethodRecognizer::Kind kind, GraphEntryInstr *graph_entry, FunctionEntryInstr **entry, Instruction **last, Definition **result)
const intptr_t cid
static bool IsNumberCid(intptr_t cid)
static void PurgeNegativeTestCidsEntries(ZoneGrowableArray< intptr_t > *results)
static constexpr Representation kUnboxedIntPtr
Definition: locations.h:176
static bool InlineGrowableArraySetter(FlowGraph *flow_graph, const Slot &field, StoreBarrierType store_barrier_type, Instruction *call, Definition *receiver, GraphEntryInstr *graph_entry, FunctionEntryInstr **entry, Instruction **last, Definition **result)
bool IsIntegerClassId(intptr_t index)
Definition: class_id.h:340
static bool InlineStringBaseCharAt(FlowGraph *flow_graph, Instruction *call, Definition *receiver, intptr_t cid, GraphEntryInstr *graph_entry, FunctionEntryInstr **entry, Instruction **last, Definition **result)
static bool InlineLoadClassId(FlowGraph *flow_graph, Instruction *call, GraphEntryInstr *graph_entry, FunctionEntryInstr **entry, Instruction **last, Definition **result)
const char *const function_name
static CompileType * ResultType(Definition *call)
static Definition * PrepareInlineStringIndexOp(FlowGraph *flow_graph, Instruction *call, intptr_t cid, Definition *str, Definition *index, Instruction *cursor)
static bool InlineTypedDataIndexCheck(FlowGraph *flow_graph, Instruction *call, Definition *receiver, GraphEntryInstr *graph_entry, FunctionEntryInstr **entry, Instruction **last, Definition **result, const String &symbol)
static bool SmiFitsInDouble()
bool IsExternalTypedDataClassId(intptr_t index)
Definition: class_id.h:447
@ kAlignedAccess
Definition: il.h:6766
bool IsStringClassId(intptr_t index)
Definition: class_id.h:350
DECLARE_FLAG(bool, show_invisible_frames)
def call(args)
Definition: dom.py:159
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive keep the shell running after the Dart script has completed enable serial On low power devices with low core running concurrent GC tasks on threads can cause them to contend with the UI thread which could potentially lead to jank This option turns off all concurrent GC activities domain network policy
Definition: switches.h:248
dest
Definition: zip.py:79
static constexpr bool IsUnboxedInteger(Representation rep)
Definition: locations.h:92
static constexpr bool IsUnboxed(Representation rep)
Definition: locations.h:101
static Representation RepresentationOfArrayElement(classid_t cid)
Definition: locations.cc:79