6#if defined(TARGET_ARCH_ARM)
30#define __ compiler->assembler()->
31#define Z (compiler->zone())
39 const Instruction* instr,
40 LocationSummary* locs) {
74LocationSummary* LoadIndexedUnsafeInstr::MakeLocationSummary(Zone* zone,
76 const intptr_t kNumInputs = 1;
77 const intptr_t kNumTemps = ((
representation() == kUnboxedDouble) ? 1 : 0);
78 LocationSummary* locs =
new (zone)
101void LoadIndexedUnsafeInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
110 const auto out = locs()->out(0).reg();
112 __ LoadFromOffset(out, out,
offset());
115 case kUnboxedInt64: {
116 const auto out_lo = locs()->out(0).AsPairLocation()->At(0).reg();
117 const auto out_hi = locs()->out(0).AsPairLocation()->At(1).reg();
120 __ LoadFromOffset(out_lo, out_hi,
offset());
121 __ LoadFromOffset(out_hi, out_hi,
offset() + compiler::target::kWordSize);
124 case kUnboxedDouble: {
125 const auto tmp = locs()->temp(0).reg();
128 __ LoadDFromOffset(out, tmp,
offset());
139 ASSERT(instr->RequiredInputRepresentation(
141 __ add(
TMP, instr->base_reg(), compiler::Operand(index,
LSL, 1));
142 __ str(value, compiler::Address(
TMP, instr->offset()));
150 Fixed<Register, ARGS_DESC_REG>,
151 Temp<Register> temp)) {
152 compiler->EmitTailCallToStub(instr->code());
158 __ set_constant_pool_allowed(
true);
165static constexpr intptr_t kMaxMemoryCopyElementSize =
166 2 * compiler::target::kWordSize;
168static constexpr intptr_t kMemoryCopyPayloadTemps = 2;
170LocationSummary* MemoryCopyInstr::MakeLocationSummary(Zone* zone,
178 const intptr_t kNumInputs = 5;
179 const intptr_t kNumTemps =
180 kMemoryCopyPayloadTemps +
181 (element_size_ >= kMaxMemoryCopyElementSize ? 1 : 0);
182 LocationSummary* locs =
new (zone)
190 for (intptr_t i = 0; i < kNumTemps; i++) {
199 intptr_t num_elements,
201 const intptr_t num_bytes = num_elements * element_size_;
203 const intptr_t mov_size =
205 const intptr_t mov_repeat = num_bytes / mov_size;
206 ASSERT(num_bytes % mov_size == 0);
211 if (mov_size == kMaxMemoryCopyElementSize) {
213 for (intptr_t i = kMemoryCopyPayloadTemps; i < locs()->temp_count(); i++) {
214 temp_regs |= 1 << locs()->temp(i).reg();
221 __ AddImmediate(src_reg, num_bytes);
222 __ AddImmediate(dest_reg, num_bytes);
224 for (intptr_t i = 0; i < mov_repeat; i++) {
225 __ ldm(block_mode, src_reg, temp_regs);
226 __ stm(block_mode, dest_reg, temp_regs);
231 for (intptr_t i = 0; i < mov_repeat; i++) {
232 const intptr_t byte_index =
233 (reversed ? mov_repeat - (i + 1) : i) * mov_size;
236 __ ldrb(
TMP, compiler::Address(src_reg, byte_index));
237 __ strb(
TMP, compiler::Address(dest_reg, byte_index));
240 __ ldrh(
TMP, compiler::Address(src_reg, byte_index));
241 __ strh(
TMP, compiler::Address(dest_reg, byte_index));
244 __ ldr(
TMP, compiler::Address(src_reg, byte_index));
245 __ str(
TMP, compiler::Address(dest_reg, byte_index));
255 compiler::Label*
done) {
256 __ BranchIfZero(length_reg,
done);
276static void CopyUpToWordMultiple(FlowGraphCompiler*
compiler,
283 compiler::Label*
done) {
285 if (
element_size >= compiler::target::kWordSize)
return;
288 const intptr_t base_shift =
292 intptr_t tested_bits = 0;
294 __ Comment(
"Copying until region is a multiple of word size");
296 for (intptr_t bit = compiler::target::kWordSizeLog2 - 1; bit >= element_shift;
298 const intptr_t bytes = 1 << bit;
299 const intptr_t tested_bit = bit + base_shift;
300 tested_bits |= (1 << tested_bit);
301 __ tst(length_reg, compiler::Operand(1 << tested_bit));
302 auto const sz = OperandSizeFor(bytes);
303 __ Load(
TMP, compiler::Address(src_reg, bytes, mode), sz,
NOT_ZERO);
304 __ Store(
TMP, compiler::Address(dest_reg, bytes, mode), sz,
NOT_ZERO);
307 __ bics(length_reg, length_reg, compiler::Operand(tested_bits));
315 compiler::Label*
done,
316 compiler::Label* copy_forwards) {
317 const bool reversed = copy_forwards !=
nullptr;
324 __ add(src_reg, src_reg, compiler::Operand(length_reg,
ASR, -shift));
326 __ add(src_reg, src_reg, compiler::Operand(length_reg,
LSL, shift));
328 __ CompareRegisters(dest_reg, src_reg);
332 __ sub(src_reg, src_reg, compiler::Operand(length_reg,
ASR, -shift),
335 __ sub(src_reg, src_reg, compiler::Operand(length_reg,
LSL, shift),
341 __ add(dest_reg, dest_reg, compiler::Operand(length_reg,
ASR, -shift));
343 __ add(dest_reg, dest_reg, compiler::Operand(length_reg,
LSL, shift));
349 CopyUpToWordMultiple(
compiler, dest_reg, src_reg, length_reg, element_size_,
350 unboxed_inputs_, reversed,
done);
353 const auto load_mode =
355 const auto load_multiple_mode =
359 const intptr_t loop_subtract =
360 Utils::Maximum<intptr_t>(1, compiler::target::kWordSize / element_size_)
364 for (intptr_t i = kMemoryCopyPayloadTemps; i < locs()->temp_count(); i++) {
365 temp_regs |= 1 << locs()->temp(i).reg();
367 __ Comment(
"Copying by multiples of word size");
368 compiler::Label loop;
370 switch (element_size_) {
375 __ ldr(
TMP, compiler::Address(src_reg, 4, load_mode));
376 __ str(
TMP, compiler::Address(dest_reg, 4, load_mode));
381 __ ldm(load_multiple_mode, src_reg, temp_regs);
382 __ stm(load_multiple_mode, dest_reg, temp_regs);
387 __ ldm(load_multiple_mode, src_reg, temp_regs);
388 __ stm(load_multiple_mode, dest_reg, temp_regs);
389 __ ldm(load_multiple_mode, src_reg, temp_regs);
390 __ stm(load_multiple_mode, dest_reg, temp_regs);
396 __ subs(length_reg, length_reg, compiler::Operand(loop_subtract));
400void MemoryCopyInstr::EmitComputeStartPointer(FlowGraphCompiler*
compiler,
407 if (array_rep != kTagged) {
418 case kOneByteStringCid:
422 case kTwoByteStringCid:
431 ASSERT(start_loc.IsRegister() || start_loc.IsConstant());
432 if (start_loc.IsConstant()) {
433 const auto& constant = start_loc.constant();
434 ASSERT(constant.IsInteger());
435 const int64_t start_value = Integer::Cast(constant).AsInt64Value();
437 Utils::MulWithWrapAround<intptr_t>(start_value, element_size_),
offset);
438 __ AddImmediate(payload_reg, array_reg, add_value);
441 const Register start_reg = start_loc.reg();
445 __ add(payload_reg, array_reg, compiler::Operand(start_reg,
ASR, -shift));
447 __ add(payload_reg, array_reg, compiler::Operand(start_reg,
LSL, shift));
449 __ AddImmediate(payload_reg,
offset);
452LocationSummary* CalculateElementAddressInstr::MakeLocationSummary(
455 const intptr_t kNumInputs = 3;
456 const intptr_t kNumTemps = 0;
457 auto*
const summary =
new (zone)
474void CalculateElementAddressInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
478 const Register result_reg = locs()->out(0).reg();
480 if (index_loc.IsConstant()) {
481 if (offset_loc.IsConstant()) {
482 ASSERT_EQUAL(Smi::Cast(index_loc.constant()).Value(), 0);
483 ASSERT(Smi::Cast(offset_loc.constant()).Value() != 0);
485 const int32_t offset_value = Smi::Cast(offset_loc.constant()).Value();
486 __ AddImmediate(result_reg, base_reg, offset_value);
488 __ add(result_reg, base_reg, compiler::Operand(offset_loc.reg()));
491 const int32_t scaled_index =
492 Smi::Cast(index_loc.constant()).Value() *
index_scale();
493 __ AddImmediate(result_reg, scaled_index);
496 __ add(result_reg, base_reg,
497 compiler::Operand(index_loc.reg(),
LSL,
499 if (offset_loc.IsConstant()) {
500 const int32_t offset_value = Smi::Cast(offset_loc.constant()).Value();
501 __ AddImmediate(result_reg, offset_value);
503 __ AddRegisters(result_reg, offset_loc.reg());
508LocationSummary* MoveArgumentInstr::MakeLocationSummary(Zone* zone,
510 const intptr_t kNumInputs = 1;
511 const intptr_t kNumTemps = 0;
512 LocationSummary* locs =
new (zone)
527class ArgumentsMover :
public ValueObject {
531 if (pending_regs_ != 0) {
532 if (is_single_register_) {
534 lowest_register_, SP,
535 lowest_register_sp_relative_index_ * compiler::target::kWordSize);
537 if (lowest_register_sp_relative_index_ == 0) {
538 __ stm(IA, SP, pending_regs_);
541 lowest_register_sp_relative_index_ * compiler::target::kWordSize;
543 if (((1 << reg) & pending_regs_) != 0) {
545 offset += compiler::target::kWordSize;
552 is_single_register_ =
false;
557 void MoveRegister(FlowGraphCompiler*
compiler,
558 intptr_t sp_relative_index,
560 if (pending_regs_ != 0) {
561 ASSERT(lowest_register_ != kNoRegister);
564 if (reg < lowest_register_) {
565 ASSERT((sp_relative_index + 1) == lowest_register_sp_relative_index_);
566 pending_regs_ |= (1 << reg);
567 lowest_register_ = reg;
568 is_single_register_ =
false;
569 lowest_register_sp_relative_index_ = sp_relative_index;
574 pending_regs_ = (1 << reg);
575 lowest_register_ = reg;
576 is_single_register_ =
true;
577 lowest_register_sp_relative_index_ = sp_relative_index;
582 Instruction* move_arg) {
588 for (Instruction* instr = move_arg;; instr = instr->next()) {
590 if (ParallelMoveInstr* parallel_move = instr->AsParallelMove()) {
591 for (intptr_t i = 0, n = parallel_move->NumMoves(); i < n; ++i) {
592 const auto src_loc = parallel_move->MoveOperandsAt(i)->src();
593 if (src_loc.IsRegister()) {
594 busy |= (1 << src_loc.reg());
595 }
else if (src_loc.IsPairLocation()) {
596 busy |= (1 << src_loc.AsPairLocation()->At(0).reg());
597 busy |= (1 << src_loc.AsPairLocation()->At(1).reg());
601 ASSERT(instr->IsMoveArgument() || (instr->ArgumentCount() > 0));
602 for (intptr_t i = 0, n = instr->locs()->input_count(); i < n; ++i) {
603 const auto in_loc = instr->locs()->in(i);
604 if (in_loc.IsRegister()) {
605 busy |= (1 << in_loc.reg());
606 }
else if (in_loc.IsPairLocation()) {
607 const auto pair_location = in_loc.AsPairLocation();
608 busy |= (1 << pair_location->At(0).reg());
609 busy |= (1 << pair_location->At(1).reg());
612 if (instr->ArgumentCount() > 0) {
617 if (pending_regs_ != 0) {
620 Register reg = HighestAvailableRegister(busy, lowest_register_);
621 if (reg != kNoRegister) {
630 static_assert(((1 <<
LR) & kDartAvailableCpuRegs) == 0,
631 "LR should not be allocatable");
639 intptr_t lowest_register_sp_relative_index_ = -1;
640 bool is_single_register_ =
false;
642 Register HighestAvailableRegister(intptr_t busy, Register upper_bound) {
643 for (intptr_t i = upper_bound - 1; i >= 0; --i) {
644 if ((busy & (1 << i)) == 0) {
652void MoveArgumentInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
654 if (previous()->IsMoveArgument()) {
659 ArgumentsMover pusher;
660 for (MoveArgumentInstr* move_arg =
this; move_arg !=
nullptr;
661 move_arg = move_arg->next()->AsMoveArgument()) {
663 if (
value.IsRegister()) {
664 pusher.MoveRegister(
compiler, move_arg->location().stack_index(),
666 }
else if (
value.IsPairLocation()) {
668 auto pair = move_arg->location().AsPairLocation();
671 pusher.MoveRegister(
compiler, pair->At(1).stack_index(),
672 value.AsPairLocation()->At(1).reg());
673 pusher.MoveRegister(
compiler, pair->At(0).stack_index(),
674 value.AsPairLocation()->At(0).reg());
675 }
else if (
value.IsFpuRegister()) {
679 move_arg->location().stack_index() * compiler::target::kWordSize);
683 if (
value.IsConstant()) {
684 __ LoadObject(reg,
value.constant());
687 const intptr_t value_offset =
value.ToStackSlotOffset();
688 __ LoadFromOffset(reg,
value.base_reg(), value_offset);
690 pusher.MoveRegister(
compiler, move_arg->location().stack_index(), reg);
698 const intptr_t kNumInputs = 1;
699 const intptr_t kNumTemps = 0;
700 LocationSummary*
locs =
new (zone)
730 if (
locs()->in(0).IsRegister()) {
733 }
else if (
locs()->in(0).IsPairLocation()) {
744 if (
compiler->parsed_function().function().IsAsyncFunction() ||
745 compiler->parsed_function().function().IsAsyncGenerator()) {
747 const Code& stub = GetReturnStub(
compiler);
752 if (!
compiler->flow_graph().graph_entry()->NeedsFrame()) {
758 compiler::Label stack_ok;
759 __ Comment(
"Stack Check");
760 const intptr_t fp_sp_dist =
761 (compiler::target::frame_layout.first_local_from_fp + 1 -
763 compiler::target::kWordSize;
765 __ sub(
R2,
SP, compiler::Operand(
FP));
766 __ CompareImmediate(
R2, fp_sp_dist);
772 __ LeaveDartFrameAndReturn();
775 __ set_constant_pool_allowed(
true);
779static bool IsPowerOfTwoKind(intptr_t v1, intptr_t
v2) {
803 BranchLabels labels = {
nullptr,
nullptr,
nullptr};
807 const bool is_power_of_two_kind = IsPowerOfTwoKind(if_true_, if_false_);
809 intptr_t true_value = if_true_;
810 intptr_t false_value = if_false_;
812 if (is_power_of_two_kind) {
813 if (true_value == 0) {
818 if (true_value == 0) {
820 intptr_t temp = true_value;
821 true_value = false_value;
828 __ mov(
result, compiler::Operand(1), true_condition);
830 if (is_power_of_two_kind) {
831 const intptr_t shift =
839 if (false_value != 0) {
847 const intptr_t kNumInputs = 1;
848 const intptr_t kNumTemps = 0;
849 LocationSummary* summary =
new (zone)
859 const Array& arguments_descriptor =
863 if (FLAG_precompiled_mode) {
866 __ ldr(
R2, compiler::FieldAddress(
867 R0, compiler::target::Closure::entry_point_offset()));
873 compiler::target::Function::code_offset()));
876 compiler::FieldAddress(
877 FUNCTION_REG, compiler::target::Function::entry_point_offset()));
882 if (!FLAG_precompiled_mode) {
888 UntaggedPcDescriptors::kOther,
locs(),
env());
892LocationSummary* LoadLocalInstr::MakeLocationSummary(Zone* zone,
898void LoadLocalInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
901 compiler::target::FrameOffsetInBytesForVariable(&
local()));
904LocationSummary* StoreLocalInstr::MakeLocationSummary(Zone* zone,
910void StoreLocalInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
915 compiler::target::FrameOffsetInBytesForVariable(&
local()));
918LocationSummary* ConstantInstr::MakeLocationSummary(Zone* zone,
924void ConstantInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
926 if (!locs()->
out(0).IsInvalid()) {
935 intptr_t pair_index) {
936 if (destination.IsRegister()) {
941 if (value_.IsSmi() &&
947 __ LoadImmediate(destination.reg(), pair_index == 0
949 : Utils::High32Bits(v));
951 ASSERT(representation() == kTagged);
952 __ LoadObject(destination.reg(), value_);
954 }
else if (destination.IsFpuRegister()) {
955 switch (representation()) {
959 Double::Cast(value_).value());
964 Double::Cast(value_).value(), tmp);
966 case kUnboxedFloat64x2:
967 __ LoadQImmediate(destination.fpu_reg(),
968 Float64x2::Cast(value_).value());
970 case kUnboxedFloat32x4:
971 __ LoadQImmediate(destination.fpu_reg(),
972 Float32x4::Cast(value_).value());
974 case kUnboxedInt32x4:
975 __ LoadQImmediate(destination.fpu_reg(), Int32x4::Cast(value_).value());
980 }
else if (destination.IsDoubleStackSlot()) {
982 __ LoadDImmediate(
DTMP, Double::Cast(value_).
value(), tmp);
983 const intptr_t dest_offset = destination.ToStackSlotOffset();
984 __ StoreDToOffset(
DTMP, destination.base_reg(), dest_offset);
985 }
else if (destination.IsQuadStackSlot()) {
986 switch (representation()) {
987 case kUnboxedFloat64x2:
988 __ LoadQImmediate(
QTMP, Float64x2::Cast(value_).
value());
990 case kUnboxedFloat32x4:
991 __ LoadQImmediate(
QTMP, Float32x4::Cast(value_).
value());
993 case kUnboxedInt32x4:
994 __ LoadQImmediate(
QTMP, Int32x4::Cast(value_).
value());
999 const intptr_t dest_offset = destination.ToStackSlotOffset();
1003 ASSERT(destination.IsStackSlot());
1005 const intptr_t dest_offset = destination.ToStackSlotOffset();
1012 }
else if (representation() == kUnboxedFloat) {
1014 bit_cast<int32_t, float>(Double::Cast(value_).
value());
1017 ASSERT(representation() == kTagged);
1018 __ LoadObject(tmp, value_);
1020 __ StoreToOffset(tmp, destination.base_reg(), dest_offset);
1024LocationSummary* UnboxedConstantInstr::MakeLocationSummary(Zone* zone,
1026 const bool is_unboxed_int =
1029 compiler::target::kWordSize);
1030 const intptr_t kNumInputs = 0;
1031 const intptr_t kNumTemps = is_unboxed_int ? 0 : 1;
1032 LocationSummary* locs =
new (zone)
1034 if (is_unboxed_int) {
1037 ASSERT(representation_ == kUnboxedDouble);
1040 if (kNumTemps > 0) {
1046void UnboxedConstantInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
1048 if (!locs()->
out(0).IsInvalid()) {
1050 locs()->temp_count() == 0 ?
kNoRegister : locs()->temp(0).reg();
1055LocationSummary* AssertAssignableInstr::MakeLocationSummary(Zone* zone,
1057 auto const dst_type_loc =
1067 const intptr_t kNonChangeableInputRegs =
1069 ((dst_type_loc.IsRegister() ? 1 : 0) << TypeTestABI::kDstTypeReg) |
1070 (1 << TypeTestABI::kInstantiatorTypeArgumentsReg) |
1071 (1 << TypeTestABI::kFunctionTypeArgumentsReg);
1079 const intptr_t kCpuRegistersToPreserve =
1081 const intptr_t kFpuRegistersToPreserve =
1090 LocationSummary* summary =
new (zone) LocationSummary(
1103 intptr_t next_temp = 0;
1105 const bool should_preserve = ((1 << i) & kCpuRegistersToPreserve) != 0;
1106 if (should_preserve) {
1107 summary->set_temp(next_temp++,
1113 const bool should_preserve = ((1 << i) & kFpuRegistersToPreserve) != 0;
1114 if (should_preserve) {
1123void AssertBooleanInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
1124 ASSERT(locs()->always_calls());
1126 auto object_store =
compiler->isolate_group()->object_store();
1127 const auto& assert_boolean_stub =
1130 compiler::Label
done;
1135 UntaggedPcDescriptors::kOther, locs(),
1160static bool CanBePairOfImmediateOperands(
const dart::Object& constant,
1161 compiler::Operand* low,
1162 compiler::Operand* high) {
1171static bool CanBePairOfImmediateOperands(
Value* value,
1172 compiler::Operand* low,
1173 compiler::Operand* high) {
1174 if (!
value->BindsToConstant()) {
1177 return CanBePairOfImmediateOperands(
value->BoundConstant(), low, high);
1180LocationSummary* EqualityCompareInstr::MakeLocationSummary(Zone* zone,
1182 const intptr_t kNumInputs = 2;
1184 const intptr_t kNumTemps = 1;
1185 LocationSummary* locs =
new (zone)
1193 if (operation_cid() == kMintCid) {
1194 compiler::Operand o;
1195 const intptr_t kNumTemps = 0;
1196 LocationSummary* locs =
new (zone)
1198 if (CanBePairOfImmediateOperands(
left(), &o, &o)) {
1202 }
else if (CanBePairOfImmediateOperands(
right(), &o, &o)) {
1215 if (operation_cid() == kDoubleCid) {
1216 const intptr_t kNumTemps = 0;
1217 LocationSummary* locs =
new (zone)
1224 if (operation_cid() == kSmiCid || operation_cid() == kIntegerCid) {
1225 const intptr_t kNumTemps = 0;
1226 LocationSummary* locs =
new (zone)
1231 locs->set_in(1, locs->in(0).IsConstant()
1241static void LoadValueCid(FlowGraphCompiler*
compiler,
1244 compiler::Label* value_is_smi =
nullptr) {
1245 if (value_is_smi ==
nullptr) {
1246 __ mov(value_cid_reg, compiler::Operand(kSmiCid));
1249 if (value_is_smi ==
nullptr) {
1250 __ LoadClassId(value_cid_reg, value_reg,
NE);
1252 __ b(value_is_smi,
EQ);
1253 __ LoadClassId(value_cid_reg, value_reg);
1258 switch (condition) {
1285static void EmitBranchOnCondition(FlowGraphCompiler*
compiler,
1287 BranchLabels labels) {
1288 if (labels.fall_through == labels.false_label) {
1290 __ b(labels.true_label, true_condition);
1294 __ b(labels.false_label, false_condition);
1297 if (labels.fall_through != labels.true_label) {
1298 __ b(labels.true_label);
1304 LocationSummary* locs,
1310 Condition true_condition = TokenKindToIntCondition(kind);
1312 if (
left.IsConstant()) {
1314 true_condition = FlipCondition(true_condition);
1315 }
else if (
right.IsConstant()) {
1318 __ cmp(
left.reg(), compiler::Operand(
right.reg()));
1320 return true_condition;
1324 LocationSummary* locs,
1330 Condition true_condition = TokenKindToIntCondition(kind);
1332 if (
left.IsConstant()) {
1333 __ CompareImmediate(
1335 static_cast<uword>(Integer::Cast(
left.constant()).AsInt64Value()));
1336 true_condition = FlipCondition(true_condition);
1337 }
else if (
right.IsConstant()) {
1338 __ CompareImmediate(
1340 static_cast<uword>(Integer::Cast(
right.constant()).AsInt64Value()));
1342 __ cmp(
left.reg(), compiler::Operand(
right.reg()));
1344 return true_condition;
1348 LocationSummary* locs,
1351 PairLocation* left_pair;
1352 compiler::Operand right_lo, right_hi;
1353 if (locs->in(0).IsConstant()) {
1354 const bool ok = CanBePairOfImmediateOperands(locs->in(0).constant(),
1355 &right_lo, &right_hi);
1357 left_pair = locs->in(1).AsPairLocation();
1358 }
else if (locs->in(1).IsConstant()) {
1359 const bool ok = CanBePairOfImmediateOperands(locs->in(1).constant(),
1360 &right_lo, &right_hi);
1362 left_pair = locs->in(0).AsPairLocation();
1364 left_pair = locs->in(0).AsPairLocation();
1365 PairLocation* right_pair = locs->in(1).AsPairLocation();
1366 right_lo = compiler::Operand(right_pair->At(0).reg());
1367 right_hi = compiler::Operand(right_pair->At(1).reg());
1369 Register left_lo = left_pair->At(0).reg();
1370 Register left_hi = left_pair->At(1).reg();
1373 __ cmp(left_lo, right_lo);
1375 __ cmp(left_hi, right_hi,
EQ);
1376 return TokenKindToIntCondition(kind);
1380 LocationSummary* locs,
1382 BranchLabels labels) {
1383 PairLocation* left_pair;
1384 compiler::Operand right_lo, right_hi;
1385 Condition true_condition = TokenKindToIntCondition(kind);
1386 if (locs->in(0).IsConstant()) {
1387 const bool ok = CanBePairOfImmediateOperands(locs->in(0).constant(),
1388 &right_lo, &right_hi);
1390 left_pair = locs->in(1).AsPairLocation();
1391 true_condition = FlipCondition(true_condition);
1392 }
else if (locs->in(1).IsConstant()) {
1393 const bool ok = CanBePairOfImmediateOperands(locs->in(1).constant(),
1394 &right_lo, &right_hi);
1396 left_pair = locs->in(0).AsPairLocation();
1398 left_pair = locs->in(0).AsPairLocation();
1399 PairLocation* right_pair = locs->in(1).AsPairLocation();
1400 right_lo = compiler::Operand(right_pair->At(0).reg());
1401 right_hi = compiler::Operand(right_pair->At(1).reg());
1403 Register left_lo = left_pair->At(0).reg();
1404 Register left_hi = left_pair->At(1).reg();
1408 switch (true_condition) {
1427 hi_cond = lo_cond =
VS;
1430 __ cmp(left_hi, right_hi);
1431 __ b(labels.true_label, hi_cond);
1432 __ b(labels.false_label, FlipCondition(hi_cond));
1435 __ cmp(left_lo, right_lo);
1440 LocationSummary* locs,
1442 BranchLabels labels) {
1443 ASSERT((kind == Token::kEQ) || (kind == Token::kNE));
1446 const Register temp = locs->temp(0).reg();
1447 const Condition true_condition = TokenKindToIntCondition(kind);
1448 compiler::Label* equal_result =
1449 (true_condition ==
EQ) ? labels.true_label : labels.false_label;
1450 compiler::Label* not_equal_result =
1451 (true_condition ==
EQ) ? labels.false_label : labels.true_label;
1456 __ b(equal_result,
EQ);
1458 __ BranchIfSmi(temp, not_equal_result);
1459 __ CompareClassId(
left, kMintCid, temp);
1460 __ b(not_equal_result,
NE);
1461 __ CompareClassId(
right, kMintCid, temp);
1462 __ b(not_equal_result,
NE);
1463 __ LoadFieldFromOffset(temp,
left, compiler::target::Mint::value_offset());
1464 __ LoadFieldFromOffset(
TMP,
right, compiler::target::Mint::value_offset());
1465 __ cmp(temp, compiler::Operand(
TMP));
1466 __ LoadFieldFromOffset(
1468 compiler::target::Mint::value_offset() + compiler::target::kWordSize,
1470 __ LoadFieldFromOffset(
1472 compiler::target::Mint::value_offset() + compiler::target::kWordSize,
1474 __ cmp(temp, compiler::Operand(
TMP),
EQ);
1475 return true_condition;
1499 LocationSummary* locs,
1500 BranchLabels labels,
1509 __ vcmpd(dleft, dright);
1513 __ vcmpd(dleft, dright);
1517 __ vcmpd(dright, dleft);
1521 __ vcmpd(dleft, dright);
1525 __ vcmpd(dright, dleft);
1529 __ vcmpd(dleft, dright);
1539 BranchLabels labels) {
1541 ASSERT(operation_cid() == kMintCid);
1542 return EmitNullAwareInt64ComparisonOp(
compiler, locs(), kind(), labels);
1544 if (operation_cid() == kSmiCid) {
1545 return EmitSmiComparisonOp(
compiler, locs(), kind());
1546 }
else if (operation_cid() == kIntegerCid) {
1547 return EmitWordComparisonOp(
compiler, locs(), kind());
1548 }
else if (operation_cid() == kMintCid) {
1549 return EmitUnboxedMintEqualityOp(
compiler, locs(), kind());
1551 ASSERT(operation_cid() == kDoubleCid);
1552 return EmitDoubleComparisonOp(
compiler, locs(), labels, kind());
1556LocationSummary* TestSmiInstr::MakeLocationSummary(Zone* zone,
bool opt)
const {
1557 const intptr_t kNumInputs = 2;
1558 const intptr_t kNumTemps = 0;
1559 LocationSummary* locs =
new (zone)
1569 BranchLabels labels) {
1572 if (
right.IsConstant()) {
1575 __ TestImmediate(
left, imm);
1579 Condition true_condition = (kind() == Token::kNE) ?
NE :
EQ;
1580 return true_condition;
1583LocationSummary* TestCidsInstr::MakeLocationSummary(Zone* zone,
1585 const intptr_t kNumInputs = 1;
1586 const intptr_t kNumTemps = 1;
1587 LocationSummary* locs =
new (zone)
1596 BranchLabels labels) {
1597 ASSERT((kind() == Token::kIS) || (kind() == Token::kISNOT));
1598 const Register val_reg = locs()->in(0).reg();
1599 const Register cid_reg = locs()->temp(0).reg();
1601 compiler::Label* deopt =
1603 ?
compiler->AddDeoptStub(deopt_id(), ICData::kDeoptTestCids)
1606 const intptr_t true_result = (kind() == Token::kIS) ? 1 : 0;
1611 __ b(
result ? labels.true_label : labels.false_label,
EQ);
1612 __ LoadClassId(cid_reg, val_reg);
1614 for (intptr_t i = 2; i <
data.length(); i += 2) {
1615 const intptr_t test_cid =
data[i];
1616 ASSERT(test_cid != kSmiCid);
1618 __ CompareImmediate(cid_reg, test_cid);
1619 __ b(
result ? labels.true_label : labels.false_label,
EQ);
1622 if (deopt ==
nullptr) {
1626 compiler::Label*
target =
result ? labels.false_label : labels.true_label;
1627 if (
target != labels.fall_through) {
1638LocationSummary* RelationalOpInstr::MakeLocationSummary(Zone* zone,
1640 const intptr_t kNumInputs = 2;
1641 const intptr_t kNumTemps = 0;
1642 if (operation_cid() == kMintCid) {
1643 compiler::Operand o;
1644 const intptr_t kNumTemps = 0;
1645 LocationSummary* locs =
new (zone)
1647 if (CanBePairOfImmediateOperands(
left(), &o, &o)) {
1651 }
else if (CanBePairOfImmediateOperands(
right(), &o, &o)) {
1664 if (operation_cid() == kDoubleCid) {
1665 LocationSummary* summary =
new (zone)
1672 ASSERT(operation_cid() == kSmiCid);
1673 LocationSummary* summary =
new (zone)
1678 summary->set_in(1, summary->in(0).IsConstant()
1686 BranchLabels labels) {
1687 if (operation_cid() == kSmiCid) {
1688 return EmitSmiComparisonOp(
compiler, locs(), kind());
1689 }
else if (operation_cid() == kMintCid) {
1690 return EmitUnboxedMintComparisonOp(
compiler, locs(), kind(), labels);
1692 ASSERT(operation_cid() == kDoubleCid);
1693 return EmitDoubleComparisonOp(
compiler, locs(), labels, kind());
1704 compiler::Operand((
ArgumentCount() - 1) * compiler::target::kWordSize));
1713 stub = &StubCode::CallBootstrapNative();
1718 stub = &StubCode::CallBootstrapNative();
1720 stub = &StubCode::CallAutoScopeNative();
1722 stub = &StubCode::CallNoScopeNative();
1725 __ LoadImmediate(
R1, argc_tag);
1726 compiler::ExternalLabel label(entry);
1727 __ LoadNativeEntry(
R9, &label,
1730 :
compiler::ObjectPoolBuilderEntry::kNotPatchable);
1733 source(), *stub, UntaggedPcDescriptors::kOther,
locs(),
1738 compiler->GenerateNonLazyDeoptableStubCall(
1739 source(), *stub, UntaggedPcDescriptors::kOther,
locs(),
1747#define R(r) (1 << r)
1750 bool is_optimizing)
const {
1753 return MakeLocationSummaryInternal(
1754 zone, is_optimizing,
1776 __ mov(saved_fp_or_sp,
1781 __ PushImmediate(0);
1784 __ LoadObject(
CODE_REG, Object::null_object());
1785 __ set_constant_pool_allowed(
false);
1786 __ EnterDartFrame(0,
false);
1790 __ ReserveAlignedFrameSpace(marshaller_.RequiredStackSpaceInBytes());
1791#if defined(USING_MEMORY_SANITIZER)
1798 __ Comment(is_leaf_ ?
"Leaf Call" :
"Call");
1802#if !defined(PRODUCT)
1806 compiler::target::Thread::top_exit_frame_info_offset());
1807 __ StoreToOffset(branch,
THR, compiler::target::Thread::vm_tag_offset());
1812#if !defined(PRODUCT)
1813 __ LoadImmediate(temp1, compiler::target::Thread::vm_tag_dart_id());
1814 __ StoreToOffset(temp1,
THR, compiler::target::Thread::vm_tag_offset());
1815 __ LoadImmediate(temp1, 0);
1816 __ StoreToOffset(temp1,
THR,
1817 compiler::target::Thread::top_exit_frame_info_offset());
1822 __ mov(temp1, compiler::Operand(
PC));
1824 compiler::target::kWordSize));
1830 UntaggedPcDescriptors::Kind::kOther,
locs(),
1838 THR, compiler::target::Thread::
1839 call_native_through_safepoint_entry_point_offset()));
1844 "NOTFP should be a reserved register");
1848 __ Comment(
"Check Dart_Handle for Error.");
1849 compiler::Label not_error;
1854 compiler::target::LocalHandle::ptr_offset()));
1855 __ BranchIfSmi(temp1, ¬_error);
1856 __ LoadClassId(temp1, temp1);
1861 __ Comment(
"Slow path: call Dart_PropagateError through stub.");
1866 THR, compiler::target::Thread::
1867 call_native_through_safepoint_entry_point_offset()));
1868 __ ldr(branch, compiler::Address(
1869 THR, kPropagateErrorRuntimeEntry.OffsetFromThread()));
1876 __ Bind(¬_error);
1881 if (FLAG_precompiled_mode) {
1882 __ SetupGlobalPoolAndDispatchTable();
1890 __ mov(
SPREG, compiler::Operand(saved_fp_or_sp));
1893 __ LeaveDartFrame();
1894 __ set_constant_pool_allowed(
true);
1897 __ PopRegister(temp1);
1905 __ LeaveDartFrame();
1913 const Register old_exit_through_ffi_reg =
R4;
1916 __ Pop(old_exit_frame_reg);
1917 __ Pop(old_exit_through_ffi_reg);
1921 __ StoreToOffset(tmp,
THR, compiler::target::Thread::top_resource_offset());
1926 __ TransitionGeneratedToNative(vm_tag_reg, old_exit_frame_reg,
1927 old_exit_through_ffi_reg, tmp,
1930 __ PopNativeCalleeSavedRegisters();
1932#if defined(DART_TARGET_OS_FUCHSIA) && defined(USING_SHADOW_CALL_STACK)
1937 RESTORES_LR_FROM_FRAME(
__ LeaveFrame(1 <<
LR | 1 <<
FP));
1940 RESTORES_LR_FROM_FRAME(
__ LeaveFrame(1 <<
LR | 1 <<
FP));
1945 __ set_constant_pool_allowed(
true);
1949void NativeEntryInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
1951 __ set_constant_pool_allowed(
false);
1957 SPILLS_LR_TO_FRAME(
__ EnterFrame((1 <<
FP) | (1 <<
LR), 0));
1964 SPILLS_LR_TO_FRAME(
__ EnterFrame((1 <<
FP) | (1 <<
LR), 0));
1967 __ PushImmediate(0);
1969#if defined(DART_TARGET_OS_FUCHSIA) && defined(USING_SHADOW_CALL_STACK)
1973 __ PushNativeCalleeSavedRegisters();
1976 __ LoadFromOffset(
R0,
THR, compiler::target::Thread::vm_tag_offset());
1980 const intptr_t top_resource_offset =
1981 compiler::target::Thread::top_resource_offset();
1982 __ LoadFromOffset(
R0,
THR, top_resource_offset);
1984 __ LoadImmediate(
R0, 0);
1985 __ StoreToOffset(
R0,
THR, top_resource_offset);
1988 compiler::target::Thread::exit_through_ffi_offset());
1994 compiler::target::Thread::top_exit_frame_info_offset());
1997 __ EmitEntryFrameVerification(
R0);
2000 __ TransitionNativeToGenerated(
R0,
R1,
2007 const Function& target_function = marshaller_.dart_signature();
2008 const intptr_t callback_id = target_function.FfiCallbackId();
2009 __ LoadFromOffset(
R0,
THR, compiler::target::Thread::isolate_group_offset());
2010 __ LoadFromOffset(
R0,
R0,
2011 compiler::target::IsolateGroup::object_store_offset());
2012 __ LoadFromOffset(
R0,
R0,
2013 compiler::target::ObjectStore::ffi_callback_code_offset());
2014 __ LoadFieldFromOffset(
R0,
R0,
2015 compiler::target::GrowableObjectArray::data_offset());
2017 compiler::target::Array::data_offset() +
2018 callback_id * compiler::target::kWordSize);
2023 if (FLAG_precompiled_mode) {
2024 __ SetupGlobalPoolAndDispatchTable();
2026 __ LoadImmediate(
PP, 0);
2036 compiler::target::Thread::invoke_dart_code_stub_offset());
2037 __ LoadFieldFromOffset(
LR,
LR,
2038 compiler::target::Code::entry_point_offset());
2041 FunctionEntryInstr::EmitNativeCode(
compiler);
2044#define R(r) (1 << r)
2048 bool is_optimizing)
const {
2059 __ MoveRegister(saved_fp,
FPREG);
2061 const intptr_t frame_space = native_calling_convention_.
StackTopInBytes();
2062 __ EnterCFrame(frame_space);
2067 __ str(target_address,
2068 compiler::Address(
THR, compiler::target::Thread::vm_tag_offset()));
2069 __ CallCFunction(target_address);
2070 __ LoadImmediate(temp0, VMTag::kDartTagId);
2072 compiler::Address(
THR, compiler::target::Thread::vm_tag_offset()));
2077LocationSummary* OneByteStringFromCharCodeInstr::MakeLocationSummary(
2080 const intptr_t kNumInputs = 1;
2086void OneByteStringFromCharCodeInstr::EmitNativeCode(
2095 THR, compiler::target::Thread::predefined_symbols_address_offset()));
2102LocationSummary* StringToCharCodeInstr::MakeLocationSummary(Zone* zone,
2104 const intptr_t kNumInputs = 1;
2109void StringToCharCodeInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
2110 ASSERT(cid_ == kOneByteStringCid);
2113 __ ldr(
result, compiler::FieldAddress(
2114 str, compiler::target::String::length_offset()));
2118 compiler::FieldAddress(
2119 str, compiler::target::OneByteString::data_offset()),
2124LocationSummary* Utf8ScanInstr::MakeLocationSummary(Zone* zone,
2126 const intptr_t kNumInputs = 5;
2127 const intptr_t kNumTemps = 0;
2128 LocationSummary* summary =
new (zone)
2139void Utf8ScanInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
2140 const Register bytes_reg = locs()->in(1).reg();
2141 const Register start_reg = locs()->in(2).reg();
2142 const Register end_reg = locs()->in(3).reg();
2143 const Register table_reg = locs()->in(4).reg();
2144 const Register size_reg = locs()->out(0).reg();
2146 const Register bytes_ptr_reg = start_reg;
2147 const Register bytes_end_reg = end_reg;
2148 const Register flags_reg = bytes_reg;
2150 const Register decoder_temp_reg = start_reg;
2151 const Register flags_temp_reg = end_reg;
2153 const intptr_t kSizeMask = 0x03;
2154 const intptr_t kFlagsMask = 0x3C;
2156 compiler::Label loop, loop_in;
2159 __ LoadFromSlot(bytes_reg, bytes_reg, Slot::PointerBase_data());
2163 table_reg, table_reg,
2164 compiler::target::OneByteString::data_offset() -
kHeapObjectTag);
2167 __ add(bytes_ptr_reg, bytes_reg, compiler::Operand(start_reg));
2168 __ add(bytes_end_reg, bytes_reg, compiler::Operand(end_reg));
2171 __ LoadImmediate(size_reg, 0);
2172 __ LoadImmediate(flags_reg, 0);
2182 __ ldrb(temp_reg, compiler::Address(table_reg, temp_reg));
2183 __ orr(flags_reg, flags_reg, compiler::Operand(temp_reg));
2184 __ and_(temp_reg, temp_reg, compiler::Operand(kSizeMask));
2185 __ add(size_reg, size_reg, compiler::Operand(temp_reg));
2189 __ cmp(bytes_ptr_reg, compiler::Operand(bytes_end_reg));
2193 __ AndImmediate(flags_reg, flags_reg, kFlagsMask);
2195 __ SmiTag(flags_reg);
2198 const Location decoder_location = locs()->in(0);
2199 if (decoder_location.IsStackSlot()) {
2201 decoder_reg = decoder_temp_reg;
2203 decoder_reg = decoder_location.reg();
2205 const auto scan_flags_field_offset = scan_flags_field_.offset_in_bytes();
2206 __ LoadFieldFromOffset(flags_temp_reg, decoder_reg, scan_flags_field_offset);
2207 __ orr(flags_temp_reg, flags_temp_reg, compiler::Operand(flags_reg));
2208 __ StoreFieldToOffset(flags_temp_reg, decoder_reg, scan_flags_field_offset);
2211LocationSummary* LoadIndexedInstr::MakeLocationSummary(Zone* zone,
2220 const bool directly_addressable =
aligned() && rep != kUnboxedInt64;
2221 const intptr_t kNumInputs = 2;
2222 intptr_t kNumTemps = 0;
2223 if (!directly_addressable) {
2225 if (rep == kUnboxedFloat || rep == kUnboxedDouble) {
2229 LocationSummary* locs =
new (zone)
2233 const bool can_be_constant =
2245 if (rep == kUnboxedInt64) {
2252 if (rep == kUnboxedFloat) {
2262 if (!directly_addressable) {
2264 if (rep == kUnboxedFloat || rep == kUnboxedDouble) {
2271void LoadIndexedInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
2275 const bool directly_addressable =
aligned() && rep != kUnboxedInt64;
2280 directly_addressable ?
kNoRegister : locs()->temp(0).reg();
2283 if (directly_addressable) {
2286 ?
__ ElementAddressForRegIndex(
true,
2290 :
__ ElementAddressForIntIndex(
2297 if (
index.IsRegister()) {
2298 __ LoadElementAddressForRegIndex(address,
2303 __ LoadElementAddressForIntIndex(
2312 if (rep == kUnboxedInt64) {
2313 ASSERT(!directly_addressable);
2314 ASSERT(locs()->
out(0).IsPairLocation());
2315 PairLocation* result_pair = locs()->out(0).AsPairLocation();
2316 const Register result_lo = result_pair->At(0).reg();
2317 const Register result_hi = result_pair->At(1).reg();
2319 __ ldr(result_lo, compiler::Address(address));
2321 compiler::Address(address, compiler::target::kWordSize));
2323 __ LoadWordUnaligned(result_lo, address,
TMP);
2324 __ AddImmediate(address, address, compiler::target::kWordSize);
2325 __ LoadWordUnaligned(result_hi, address,
TMP);
2333 case kUnboxedUint32:
2337 case kUnboxedUint16:
2338 __ LoadHalfWordUnsignedUnaligned(
result, address,
TMP);
2352 if (rep == kUnboxedFloat) {
2359 __ LoadWordUnaligned(value, address,
TMP);
2362 }
else if (rep == kUnboxedDouble) {
2365 __ vldrd(dresult0, element_address);
2368 __ LoadWordUnaligned(value, address,
TMP);
2369 __ vmovdr(dresult0, 0, value);
2370 __ AddImmediate(address, address, 4);
2371 __ LoadWordUnaligned(value, address,
TMP);
2372 __ vmovdr(dresult0, 1, value);
2375 ASSERT(rep == kUnboxedInt32x4 || rep == kUnboxedFloat32x4 ||
2376 rep == kUnboxedFloat64x2);
2377 ASSERT(element_address.Equals(compiler::Address(
IP)));
2379 __ vldmd(
IA,
IP, dresult0, 2);
2390LocationSummary* StoreIndexedInstr::MakeLocationSummary(Zone* zone,
2399 const bool directly_addressable =
2401 const intptr_t kNumInputs = 3;
2402 LocationSummary* locs;
2404 intptr_t kNumTemps = 0;
2405 bool needs_base =
false;
2406 const bool can_be_constant =
2411 if (can_be_constant) {
2412 if (!directly_addressable) {
2414 }
else if (needs_base) {
2423 if (!directly_addressable) {
2433 for (intptr_t i = 0; i < kNumTemps; i++) {
2438 if (rep == kUnboxedInt64) {
2441 }
else if (rep == kUnboxedInt8 || rep == kUnboxedUint8) {
2447 if (rep == kUnboxedFloat) {
2453 }
else if (
class_id() == kArrayCid) {
2466void StoreIndexedInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
2470 const bool directly_addressable =
2477 (locs()->temp_count() > 0) ? locs()->temp(0).reg() :
kNoRegister;
2479 (locs()->temp_count() > 1) ? locs()->temp(1).reg() :
kNoRegister;
2482 if (directly_addressable) {
2485 ?
__ ElementAddressForRegIndex(
false,
2489 :
__ ElementAddressForIntIndex(
2494 if (
index.IsRegister()) {
2495 __ LoadElementAddressForRegIndex(temp,
2500 __ LoadElementAddressForIntIndex(
2509 ASSERT(rep == kUnboxedUint8);
2515 }
else if (
value < 0) {
2518 __ LoadImmediate(
IP,
static_cast<int8_t
>(
value));
2519 __ strb(
IP, element_address);
2523 __ LoadImmediate(
IP, 0xFF);
2527 __ mov(
IP, compiler::Operand(0),
LE);
2530 __ strb(
IP, element_address);
2533 if (rep == kUnboxedInt64) {
2534 ASSERT(!directly_addressable);
2535 ASSERT(locs()->in(2).IsPairLocation());
2536 PairLocation* value_pair = locs()->in(2).AsPairLocation();
2537 Register value_lo = value_pair->At(0).reg();
2538 Register value_hi = value_pair->At(1).reg();
2540 __ str(value_lo, compiler::Address(temp));
2541 __ str(value_hi, compiler::Address(temp, compiler::target::kWordSize));
2543 __ StoreWordUnaligned(value_lo, temp, temp2);
2544 __ AddImmediate(temp, temp, compiler::target::kWordSize);
2545 __ StoreWordUnaligned(value_hi, temp, temp2);
2547 }
else if (rep == kUnboxedInt8 || rep == kUnboxedUint8) {
2549 __ LoadImmediate(
IP,
2551 __ strb(
IP, element_address);
2554 __ strb(
value, element_address);
2562 case kUnboxedUint32:
2564 __ StoreWordUnaligned(
value, temp, temp2);
2566 case kUnboxedUint16:
2568 __ StoreHalfWordUnaligned(
value, temp, temp2);
2577 if (rep == kUnboxedFloat) {
2581 __ vstrs(value_reg, element_address);
2588 }
else if (rep == kUnboxedDouble) {
2591 __ vstrd(value_reg, element_address);
2597 __ AddImmediate(address, address, 4);
2602 ASSERT(rep == kUnboxedInt32x4 || rep == kUnboxedFloat32x4 ||
2603 rep == kUnboxedFloat64x2);
2604 ASSERT(element_address.Equals(compiler::Address(
index.reg())));
2607 __ vstmd(
IA,
index.reg(), value_reg, 2);
2609 }
else if (
class_id() == kArrayCid) {
2612 __ StoreIntoArray(
array, temp,
value, CanValueBeSmi());
2614 const Object& constant = locs()->in(2).constant();
2615 __ StoreObjectIntoObjectNoBarrier(
array, compiler::Address(temp),
2619 __ StoreIntoObjectNoBarrier(
array, compiler::Address(temp),
value);
2624LocationSummary* GuardFieldClassInstr::MakeLocationSummary(Zone* zone,
2626 const intptr_t kNumInputs = 1;
2631 const bool emit_full_guard = !opt || (field_cid ==
kIllegalCid);
2633 const bool needs_value_cid_temp_reg =
2634 emit_full_guard || ((value_cid ==
kDynamicCid) && (field_cid != kSmiCid));
2636 const bool needs_field_temp_reg = emit_full_guard;
2638 intptr_t num_temps = 0;
2639 if (needs_value_cid_temp_reg) {
2642 if (needs_field_temp_reg) {
2646 LocationSummary* summary =
new (zone)
2650 for (intptr_t i = 0; i < num_temps; i++) {
2657void GuardFieldClassInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
2658 ASSERT(compiler::target::UntaggedObject::kClassIdTagSize == 20);
2659 ASSERT(
sizeof(UntaggedField::guarded_cid_) == 4);
2660 ASSERT(
sizeof(UntaggedField::is_nullable_) == 4);
2670 const bool emit_full_guard =
2673 const bool needs_value_cid_temp_reg =
2674 emit_full_guard || ((value_cid ==
kDynamicCid) && (field_cid != kSmiCid));
2676 const bool needs_field_temp_reg = emit_full_guard;
2678 const Register value_reg = locs()->in(0).reg();
2681 needs_value_cid_temp_reg ? locs()->temp(0).reg() :
kNoRegister;
2683 const Register field_reg = needs_field_temp_reg
2684 ? locs()->temp(locs()->temp_count() - 1).reg()
2687 compiler::Label
ok, fail_label;
2689 compiler::Label* deopt =
2691 ?
compiler->AddDeoptStub(deopt_id(), ICData::kDeoptGuardField)
2694 compiler::Label*
fail = (deopt !=
nullptr) ? deopt : &fail_label;
2696 if (emit_full_guard) {
2699 compiler::FieldAddress field_cid_operand(
2700 field_reg, compiler::target::Field::guarded_cid_offset());
2701 compiler::FieldAddress field_nullability_operand(
2702 field_reg, compiler::target::Field::is_nullable_offset());
2705 LoadValueCid(
compiler, value_cid_reg, value_reg);
2706 __ ldr(
IP, field_cid_operand);
2707 __ cmp(value_cid_reg, compiler::Operand(
IP));
2709 __ ldr(
IP, field_nullability_operand);
2710 __ cmp(value_cid_reg, compiler::Operand(
IP));
2711 }
else if (value_cid ==
kNullCid) {
2712 __ ldr(value_cid_reg, field_nullability_operand);
2713 __ CompareImmediate(value_cid_reg, value_cid);
2715 __ ldr(value_cid_reg, field_cid_operand);
2716 __ CompareImmediate(value_cid_reg, value_cid);
2726 if (!
field().needs_length_check()) {
2729 __ ldr(
IP, field_cid_operand);
2734 __ str(value_cid_reg, field_cid_operand);
2735 __ str(value_cid_reg, field_nullability_operand);
2737 __ LoadImmediate(
IP, value_cid);
2738 __ str(
IP, field_cid_operand);
2739 __ str(
IP, field_nullability_operand);
2745 if (deopt ==
nullptr) {
2748 __ ldr(
IP, compiler::FieldAddress(
2749 field_reg, compiler::target::Field::guarded_cid_offset()));
2756 __ CallRuntime(kUpdateFieldCidRuntimeEntry, 2);
2763 ASSERT(deopt !=
nullptr);
2770 if (field_cid != kSmiCid) {
2772 __ LoadClassId(value_cid_reg, value_reg);
2773 __ CompareImmediate(value_cid_reg, field_cid);
2778 if (field_cid != kSmiCid) {
2779 __ CompareImmediate(value_cid_reg,
kNullCid);
2781 __ CompareObject(value_reg, Object::null_object());
2785 }
else if (value_cid == field_cid) {
2790 ASSERT(value_cid != nullability);
2797LocationSummary* GuardFieldLengthInstr::MakeLocationSummary(Zone* zone,
2799 const intptr_t kNumInputs = 1;
2801 const intptr_t kNumTemps = 3;
2802 LocationSummary* summary =
new (zone)
2813 const intptr_t kNumTemps = 1;
2814 LocationSummary* summary =
new (zone)
2823void GuardFieldLengthInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
2828 compiler::Label* deopt =
2830 ?
compiler->AddDeoptStub(deopt_id(), ICData::kDeoptGuardField)
2833 const Register value_reg = locs()->in(0).reg();
2837 const Register field_reg = locs()->temp(0).reg();
2838 const Register offset_reg = locs()->temp(1).reg();
2839 const Register length_reg = locs()->temp(2).reg();
2845 __ ldrsb(offset_reg,
2846 compiler::FieldAddress(
2847 field_reg, compiler::target::Field::
2848 guarded_list_length_in_object_offset_offset()));
2851 compiler::FieldAddress(
2852 field_reg, compiler::target::Field::guarded_list_length_offset()));
2854 __ tst(offset_reg, compiler::Operand(offset_reg));
2861 __ ldr(
IP, compiler::Address(value_reg, offset_reg));
2862 __ cmp(length_reg, compiler::Operand(
IP));
2864 if (deopt ==
nullptr) {
2870 __ CallRuntime(kUpdateFieldCidRuntimeEntry, 2);
2880 ASSERT(
field().guarded_list_length_in_object_offset() !=
2883 const Register length_reg = locs()->temp(0).reg();
2886 compiler::FieldAddress(
2887 value_reg,
field().guarded_list_length_in_object_offset()));
2888 __ CompareImmediate(
2896LocationSummary* LoadCodeUnitsInstr::MakeLocationSummary(Zone* zone,
2899 const intptr_t kNumInputs = 2;
2900 const intptr_t kNumTemps = might_box ? 2 : 0;
2901 LocationSummary* summary =
new (zone) LocationSummary(
2902 zone, kNumInputs, kNumTemps,
2923void LoadCodeUnitsInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
2925 const Register str = locs()->in(0).reg();
2928 compiler::Address element_address =
__ ElementAddressForRegIndex(
2935 ASSERT(locs()->
out(0).IsPairLocation());
2936 PairLocation* result_pair = locs()->out(0).AsPairLocation();
2937 Register result1 = result_pair->At(0).reg();
2938 Register result2 = result_pair->At(1).reg();
2940 case kOneByteStringCid:
2942 __ ldr(result1, element_address);
2943 __ eor(result2, result2, compiler::Operand(result2));
2945 case kTwoByteStringCid:
2947 __ ldr(result1, element_address);
2948 __ eor(result2, result2, compiler::Operand(result2));
2957 case kOneByteStringCid:
2972 case kTwoByteStringCid:
2993 Register temp = locs()->temp(1).reg();
2995 locs()->live_registers()->Add(locs()->temp(0), kUnboxedInt32);
3001 compiler::Label
done;
3002 __ TestImmediate(value, 0xC0000000);
3006 __ eor(temp, temp, compiler::Operand(temp));
3007 __ StoreFieldToOffset(value,
result,
3008 compiler::target::Mint::value_offset());
3009 __ StoreFieldToOffset(
3011 compiler::target::Mint::value_offset() + compiler::target::kWordSize);
3017LocationSummary* StoreStaticFieldInstr::MakeLocationSummary(Zone* zone,
3019 const intptr_t kNumInputs = 1;
3020 const intptr_t kNumTemps = 1;
3021 LocationSummary* locs =
new (zone)
3028void StoreStaticFieldInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
3030 const Register temp = locs()->temp(0).reg();
3034 __ LoadFromOffset(temp,
THR,
3035 compiler::target::Thread::field_table_values_offset());
3038 compiler::target::FieldTable::OffsetOf(
field()));
3041LocationSummary* InstanceOfInstr::MakeLocationSummary(Zone* zone,
3043 const intptr_t kNumInputs = 3;
3044 const intptr_t kNumTemps = 0;
3045 LocationSummary* summary =
new (zone)
3056void InstanceOfInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
3067 const intptr_t kNumInputs = 2;
3068 const intptr_t kNumTemps = 0;
3069 LocationSummary*
locs =
new (zone)
3080static void InlineArrayAllocation(FlowGraphCompiler*
compiler,
3081 intptr_t num_elements,
3082 compiler::Label* slow_path,
3083 compiler::Label*
done) {
3084 const int kInlineArraySize = 12;
3087 __ TryAllocateArray(kArrayCid, instance_size, slow_path,
3095 __ StoreIntoObjectNoBarrier(
3098 compiler::target::Array::type_arguments_offset()),
3102 __ StoreIntoObjectNoBarrier(
3105 compiler::target::Array::length_offset()),
3114 if (num_elements > 0) {
3115 const intptr_t array_size = instance_size -
sizeof(UntaggedArray);
3116 __ LoadObject(
R8, Object::null_object());
3117 if (num_elements >= 2) {
3118 __ mov(
R9, compiler::Operand(
R8));
3122 __ LoadImmediate(
R9, 0x1);
3127 if (array_size < (kInlineArraySize * compiler::target::kWordSize)) {
3128 __ InitializeFieldsNoBarrierUnrolled(
3130 num_elements * compiler::target::kWordSize,
R8,
R9);
3140 TypeUsageInfo* type_usage_info =
compiler->thread()->type_usage_info();
3141 if (type_usage_info !=
nullptr) {
3142 const Class& list_class =
3148 compiler::Label slow_path,
done;
3149 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
3150 if (
compiler->is_optimizing() && !FLAG_precompiled_mode &&
3161 __ Bind(&slow_path);
3162 auto object_store =
compiler->isolate_group()->object_store();
3163 const auto& allocate_array_stub =
3175 const intptr_t kNumInputs = 0;
3176 const intptr_t kNumTemps = 3;
3177 LocationSummary*
locs =
new (zone) LocationSummary(
3186class AllocateContextSlowPath
3187 :
public TemplateSlowPathCode<AllocateUninitializedContextInstr> {
3189 explicit AllocateContextSlowPath(
3190 AllocateUninitializedContextInstr* instruction)
3191 : TemplateSlowPathCode(instruction) {}
3193 virtual void EmitNativeCode(FlowGraphCompiler*
compiler) {
3194 __ Comment(
"AllocateContextSlowPath");
3195 __ Bind(entry_label());
3197 LocationSummary* locs = instruction()->locs();
3198 locs->live_registers()->Remove(locs->out(0));
3202 auto slow_path_env =
compiler->SlowPathEnvironmentFor(
3204 ASSERT(slow_path_env !=
nullptr);
3206 auto object_store =
compiler->isolate_group()->object_store();
3207 const auto& allocate_context_stub = Code::ZoneHandle(
3208 compiler->zone(), object_store->allocate_context_stub());
3209 __ LoadImmediate(R1, instruction()->num_context_variables());
3210 compiler->GenerateStubCall(instruction()->
source(), allocate_context_stub,
3211 UntaggedPcDescriptors::kOther, locs,
3212 instruction()->deopt_id(), slow_path_env);
3213 ASSERT(instruction()->locs()->
out(0).reg() == R0);
3214 compiler->RestoreLiveRegisters(instruction()->locs());
3226 AllocateContextSlowPath* slow_path =
new AllocateContextSlowPath(
this);
3227 compiler->AddSlowPathCode(slow_path);
3230 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
3231 __ TryAllocateArray(kContextCid, instance_size, slow_path->entry_label(),
3233 temp0, temp1, temp2);
3238 compiler::FieldAddress(
3239 result, compiler::target::Context::num_variables_offset()));
3241 __ Jump(slow_path->entry_label());
3244 __ Bind(slow_path->exit_label());
3249 const intptr_t kNumInputs = 0;
3250 const intptr_t kNumTemps = 1;
3251 LocationSummary*
locs =
new (zone)
3262 auto object_store =
compiler->isolate_group()->object_store();
3263 const auto& allocate_context_stub =
3271LocationSummary* CloneContextInstr::MakeLocationSummary(Zone* zone,
3273 const intptr_t kNumInputs = 1;
3274 const intptr_t kNumTemps = 0;
3275 LocationSummary* locs =
new (zone)
3282void CloneContextInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
3286 auto object_store =
compiler->isolate_group()->object_store();
3287 const auto& clone_context_stub =
3290 UntaggedPcDescriptors::kOther, locs(),
3294LocationSummary* CatchBlockEntryInstr::MakeLocationSummary(Zone* zone,
3299void CatchBlockEntryInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
3301 compiler->AddExceptionHandler(
this);
3308 const intptr_t fp_sp_dist =
3309 (compiler::target::frame_layout.first_local_from_fp + 1 -
3311 compiler::target::kWordSize;
3313 __ AddImmediate(
SP,
FP, fp_sp_dist);
3316 if (raw_exception_var_ !=
nullptr) {
3319 compiler::target::FrameOffsetInBytesForVariable(raw_exception_var_));
3321 if (raw_stacktrace_var_ !=
nullptr) {
3324 compiler::target::FrameOffsetInBytesForVariable(raw_stacktrace_var_));
3329LocationSummary* CheckStackOverflowInstr::MakeLocationSummary(Zone* zone,
3331 const intptr_t kNumInputs = 0;
3332 const intptr_t kNumTemps = 2;
3334 LocationSummary* summary =
new (zone)
3335 LocationSummary(zone, kNumInputs, kNumTemps,
3337 : LocationSummary::kCallOnSlowPath);
3343class CheckStackOverflowSlowPath
3344 :
public TemplateSlowPathCode<CheckStackOverflowInstr> {
3346 static constexpr intptr_t kNumSlowPathArgs = 0;
3348 explicit CheckStackOverflowSlowPath(CheckStackOverflowInstr* instruction)
3349 : TemplateSlowPathCode(instruction) {}
3351 virtual void EmitNativeCode(FlowGraphCompiler*
compiler) {
3352 if (
compiler->isolate_group()->use_osr() && osr_entry_label()->IsLinked()) {
3353 const Register value = instruction()->locs()->temp(0).reg();
3354 __ Comment(
"CheckStackOverflowSlowPathOsr");
3355 __ Bind(osr_entry_label());
3356 __ LoadImmediate(value, Thread::kOsrRequest);
3359 THR, compiler::target::Thread::stack_overflow_flags_offset()));
3361 __ Comment(
"CheckStackOverflowSlowPath");
3362 __ Bind(entry_label());
3363 const bool using_shared_stub =
3364 instruction()->locs()->call_on_shared_slow_path();
3365 if (!using_shared_stub) {
3366 compiler->SaveLiveRegisters(instruction()->locs());
3372 compiler->SlowPathEnvironmentFor(instruction(), kNumSlowPathArgs);
3375 const bool has_frame =
compiler->flow_graph().graph_entry()->NeedsFrame();
3376 if (using_shared_stub) {
3378 ASSERT(
__ constant_pool_allowed());
3379 __ set_constant_pool_allowed(
false);
3380 __ EnterDartFrame(0);
3382 const uword entry_point_offset = compiler::target::Thread::
3383 stack_overflow_shared_stub_entry_point_offset(
3384 instruction()->locs()->live_registers()->FpuRegisterCount() > 0);
3385 __ Call(compiler::Address(THR, entry_point_offset));
3386 compiler->RecordSafepoint(instruction()->locs(), kNumSlowPathArgs);
3388 compiler->AddCurrentDescriptor(UntaggedPcDescriptors::kOther,
3389 instruction()->deopt_id(),
3390 instruction()->
source());
3392 __ LeaveDartFrame();
3393 __ set_constant_pool_allowed(
true);
3397 __ CallRuntime(kInterruptOrStackOverflowRuntimeEntry, kNumSlowPathArgs);
3399 instruction()->
source(), instruction()->deopt_id(),
3400 UntaggedPcDescriptors::kOther, instruction()->locs(),
env);
3404 instruction()->in_loop()) {
3406 compiler->AddCurrentDescriptor(UntaggedPcDescriptors::kOsrEntry,
3407 instruction()->deopt_id(),
3408 InstructionSource());
3410 compiler->pending_deoptimization_env_ =
nullptr;
3411 if (!using_shared_stub) {
3412 compiler->RestoreLiveRegisters(instruction()->locs());
3417 compiler::Label* osr_entry_label() {
3418 ASSERT(IsolateGroup::Current()->use_osr());
3419 return &osr_entry_label_;
3423 compiler::Label osr_entry_label_;
3426void CheckStackOverflowInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
3427 __ ldr(
IP, compiler::Address(
THR,
3428 compiler::target::Thread::stack_limit_offset()));
3429 __ cmp(
SP, compiler::Operand(
IP));
3431 auto object_store =
compiler->isolate_group()->object_store();
3432 const bool live_fpu_regs = locs()->live_registers()->FpuRegisterCount() > 0;
3436 ? object_store->stack_overflow_stub_with_fpu_regs_stub()
3437 : object_store->stack_overflow_stub_without_fpu_regs_stub());
3438 const bool using_shared_stub = locs()->call_on_shared_slow_path();
3439 if (using_shared_stub &&
compiler->CanPcRelativeCall(stub) &&
3440 compiler->flow_graph().graph_entry()->NeedsFrame()) {
3441 __ GenerateUnRelocatedPcRelativeCall(
LS);
3442 compiler->AddPcRelativeCallStubTarget(stub);
3447 auto extended_env =
compiler->SlowPathEnvironmentFor(
this, 0);
3449 UntaggedPcDescriptors::kOther, locs(),
3454 CheckStackOverflowSlowPath* slow_path =
new CheckStackOverflowSlowPath(
this);
3455 compiler->AddSlowPathCode(slow_path);
3456 __ b(slow_path->entry_label(),
LS);
3464 const intptr_t configured_optimization_counter_threshold =
3465 compiler->thread()->isolate_group()->optimization_counter_threshold();
3466 const int32_t threshold =
3467 configured_optimization_counter_threshold * (
loop_depth() + 1);
3469 compiler::FieldAddress(
3470 function, compiler::target::Function::usage_counter_offset()));
3473 compiler::FieldAddress(
3474 function, compiler::target::Function::usage_counter_offset()));
3475 __ CompareImmediate(
count, threshold);
3476 __ b(slow_path->osr_entry_label(),
GE);
3478 if (
compiler->ForceSlowPathForStackOverflow()) {
3479 __ b(slow_path->entry_label());
3481 __ Bind(slow_path->exit_label());
3484static void EmitSmiShiftLeft(FlowGraphCompiler*
compiler,
3485 BinarySmiOpInstr* shift_left) {
3486 const LocationSummary& locs = *shift_left->locs();
3489 compiler::Label* deopt =
3490 shift_left->CanDeoptimize()
3491 ?
compiler->AddDeoptStub(shift_left->deopt_id(),
3492 ICData::kDeoptBinarySmiOp)
3494 if (locs.in(1).IsConstant()) {
3495 const Object& constant = locs.in(1).constant();
3498 const intptr_t kCountLimit = 0x1F;
3500 ASSERT((0 < value) && (value < kCountLimit));
3501 if (shift_left->can_overflow()) {
3503 __ Lsl(
IP,
left, compiler::Operand(value));
3514 Range* right_range = shift_left->right_range();
3515 if (shift_left->left()->BindsToConstant() && shift_left->can_overflow()) {
3518 const Object& obj = shift_left->left()->BoundConstant();
3521 if (left_int == 0) {
3522 __ cmp(
right, compiler::Operand(0));
3524 __ mov(
result, compiler::Operand(0));
3527 const intptr_t max_right =
3529 const bool right_needs_check =
3531 if (right_needs_check) {
3541 const bool right_needs_check =
3543 if (!shift_left->can_overflow()) {
3544 if (right_needs_check) {
3546 ASSERT(shift_left->CanDeoptimize());
3547 __ cmp(
right, compiler::Operand(0));
3552 compiler::target::kSmiBits)));
3561 if (right_needs_check) {
3562 ASSERT(shift_left->CanDeoptimize());
3564 compiler::target::kSmiBits)));
3571 const Register temp = locs.temp(0).reg();
3580LocationSummary* BinarySmiOpInstr::MakeLocationSummary(Zone* zone,
3582 const intptr_t kNumInputs = 2;
3584 intptr_t num_temps = 0;
3585 if (
op_kind() == Token::kTRUNCDIV) {
3591 }
else if (
op_kind() == Token::kMOD) {
3597 LocationSummary* summary =
new (zone)
3599 if (
op_kind() == Token::kTRUNCDIV) {
3614 if (
op_kind() == Token::kMOD) {
3635void BinarySmiOpInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
3636 if (
op_kind() == Token::kSHL) {
3643 compiler::Label* deopt =
nullptr;
3644 if (CanDeoptimize()) {
3645 deopt =
compiler->AddDeoptStub(deopt_id(), ICData::kDeoptBinarySmiOp);
3649 const Object& constant = locs()->in(1).constant();
3654 if (deopt ==
nullptr) {
3663 if (deopt ==
nullptr) {
3676 if (deopt ==
nullptr) {
3677 __ LoadImmediate(
IP, value);
3680 __ LoadImmediate(
IP, value);
3688 case Token::kTRUNCDIV: {
3692 const intptr_t shift_count =
3697 const Register temp = locs()->temp(0).reg();
3698 __ add(temp,
left, compiler::Operand(
IP,
LSR, 32 - shift_count));
3700 __ mov(
result, compiler::Operand(temp,
ASR, shift_count));
3707 case Token::kBIT_AND: {
3709 compiler::Operand o;
3715 __ LoadImmediate(
IP, imm);
3720 case Token::kBIT_OR: {
3722 compiler::Operand o;
3726 __ LoadImmediate(
IP, imm);
3731 case Token::kBIT_XOR: {
3733 compiler::Operand o;
3737 __ LoadImmediate(
IP, imm);
3744 const intptr_t kCountLimit = 0x1F;
3752 case Token::kUSHR: {
3754 ASSERT((value > 0) && (value < 64));
3771 if (value < (64 - compiler::target::kSmiBits)) {
3772 if (deopt !=
nullptr) {
3773 __ CompareImmediate(
left, 0);
3782 if (value >= compiler::target::kSmiBits) {
3809 if (deopt ==
nullptr) {
3818 if (deopt ==
nullptr) {
3828 if (deopt ==
nullptr) {
3838 case Token::kBIT_AND: {
3843 case Token::kBIT_OR: {
3848 case Token::kBIT_XOR: {
3853 case Token::kTRUNCDIV: {
3856 __ cmp(
right, compiler::Operand(0));
3859 const Register temp = locs()->temp(0).reg();
3868 __ CompareImmediate(
result, 0x40000000);
3877 __ cmp(
right, compiler::Operand(0));
3880 const Register temp = locs()->temp(0).reg();
3896 compiler::Label
done;
3897 __ cmp(
result, compiler::Operand(0));
3900 __ cmp(
right, compiler::Operand(0));
3907 if (CanDeoptimize()) {
3908 __ CompareImmediate(
right, 0);
3913 const intptr_t kCountLimit = 0x1F;
3915 __ CompareImmediate(
IP, kCountLimit);
3916 __ LoadImmediate(
IP, kCountLimit,
GT);
3918 const Register temp = locs()->temp(0).reg();
3924 case Token::kUSHR: {
3925 compiler::Label
done;
3943 right_range(), 64 - compiler::target::kSmiBits - 1)) {
3951 __ CompareImmediate(
IP, 64 - compiler::target::kSmiBits);
3956 __ sub(
IP,
IP, compiler::Operand(32),
GE);
3965 if (deopt !=
nullptr) {
3975 compiler::target::kSmiBits - 1)) {
3976 __ CompareImmediate(
IP, compiler::target::kSmiBits);
3982 const Register temp = locs()->temp(0).reg();
4008static void EmitInt32ShiftLeft(FlowGraphCompiler*
compiler,
4009 BinaryInt32OpInstr* shift_left) {
4010 const LocationSummary& locs = *shift_left->locs();
4013 compiler::Label* deopt =
4014 shift_left->CanDeoptimize()
4015 ?
compiler->AddDeoptStub(shift_left->deopt_id(),
4016 ICData::kDeoptBinarySmiOp)
4018 ASSERT(locs.in(1).IsConstant());
4019 const Object& constant = locs.in(1).constant();
4022 const intptr_t kCountLimit = 0x1F;
4024 ASSERT((0 < value) && (value < kCountLimit));
4025 if (shift_left->can_overflow()) {
4027 __ Lsl(
IP,
left, compiler::Operand(value));
4035LocationSummary* BinaryInt32OpInstr::MakeLocationSummary(Zone* zone,
4037 const intptr_t kNumInputs = 2;
4039 intptr_t num_temps = 0;
4044 LocationSummary* summary =
new (zone)
4058void BinaryInt32OpInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
4059 if (
op_kind() == Token::kSHL) {
4060 EmitInt32ShiftLeft(
compiler,
this);
4066 compiler::Label* deopt =
nullptr;
4067 if (CanDeoptimize()) {
4068 deopt =
compiler->AddDeoptStub(deopt_id(), ICData::kDeoptBinarySmiOp);
4072 const Object& constant = locs()->in(1).constant();
4077 if (deopt ==
nullptr) {
4086 if (deopt ==
nullptr) {
4097 if (deopt ==
nullptr) {
4098 __ LoadImmediate(
IP, value);
4101 __ LoadImmediate(
IP, value);
4109 case Token::kBIT_AND: {
4111 compiler::Operand o;
4117 __ LoadImmediate(
IP, value);
4122 case Token::kBIT_OR: {
4124 compiler::Operand o;
4128 __ LoadImmediate(
IP, value);
4133 case Token::kBIT_XOR: {
4135 compiler::Operand o;
4139 __ LoadImmediate(
IP, value);
4146 const intptr_t kCountLimit = 0x1F;
4151 case Token::kUSHR: {
4152 ASSERT((value > 0) && (value < 64));
4166 if (deopt !=
nullptr) {
4201 if (deopt ==
nullptr) {
4210 if (deopt ==
nullptr) {
4219 if (deopt ==
nullptr) {
4229 case Token::kBIT_AND: {
4234 case Token::kBIT_OR: {
4239 case Token::kBIT_XOR: {
4250LocationSummary* CheckEitherNonSmiInstr::MakeLocationSummary(Zone* zone,
4254 ASSERT((left_cid != kDoubleCid) && (right_cid != kDoubleCid));
4255 const intptr_t kNumInputs = 2;
4256 const intptr_t kNumTemps = 0;
4257 LocationSummary* summary =
new (zone)
4264void CheckEitherNonSmiInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
4265 compiler::Label* deopt =
4266 compiler->AddDeoptStub(deopt_id(), ICData::kDeoptBinaryDoubleOp);
4271 if (this->
left()->definition() == this->
right()->definition()) {
4273 }
else if (left_cid == kSmiCid) {
4275 }
else if (right_cid == kSmiCid) {
4284LocationSummary* BoxInstr::MakeLocationSummary(Zone* zone,
bool opt)
const {
4285 const intptr_t kNumInputs = 1;
4286 const intptr_t kNumTemps = 1;
4287 LocationSummary* summary =
new (zone) LocationSummary(
4295void BoxInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
4296 const Register out_reg = locs()->out(0).reg();
4301 out_reg, locs()->temp(0).reg());
4304 case kUnboxedDouble:
4312 case kUnboxedFloat32x4:
4313 case kUnboxedFloat64x2:
4314 case kUnboxedInt32x4:
4315 __ StoreMultipleDToOffset(
value, 2, out_reg,
4324LocationSummary* UnboxInstr::MakeLocationSummary(Zone* zone,
bool opt)
const {
4325 ASSERT(BoxCid() != kSmiCid);
4326 const bool needs_temp = CanDeoptimize();
4327 const intptr_t kNumInputs = 1;
4328 const intptr_t kNumTemps = needs_temp ? 1 : 0;
4329 LocationSummary* summary =
new (zone)
4350void UnboxInstr::EmitLoadFromBox(FlowGraphCompiler*
compiler) {
4351 const Register box = locs()->in(0).reg();
4354 case kUnboxedInt64: {
4355 PairLocation*
result = locs()->out(0).AsPairLocation();
4357 __ LoadFieldFromOffset(
result->At(0).reg(), box, ValueOffset());
4358 __ LoadFieldFromOffset(
result->At(1).reg(), box,
4359 ValueOffset() + compiler::target::kWordSize);
4363 case kUnboxedDouble: {
4369 case kUnboxedFloat: {
4377 case kUnboxedFloat32x4:
4378 case kUnboxedFloat64x2:
4379 case kUnboxedInt32x4: {
4381 __ LoadMultipleDFromOffset(
result, 2, box,
4392void UnboxInstr::EmitSmiConversion(FlowGraphCompiler*
compiler) {
4393 const Register box = locs()->in(0).reg();
4396 case kUnboxedInt64: {
4397 PairLocation*
result = locs()->out(0).AsPairLocation();
4398 __ SmiUntag(
result->At(0).reg(), box);
4403 case kUnboxedDouble: {
4405 __ SmiUntag(
IP, box);
4417void UnboxInstr::EmitLoadInt32FromBoxOrSmi(FlowGraphCompiler*
compiler) {
4423void UnboxInstr::EmitLoadInt64FromBoxOrSmi(FlowGraphCompiler*
compiler) {
4424 const Register box = locs()->in(0).reg();
4425 PairLocation*
result = locs()->out(0).AsPairLocation();
4428 compiler::Label
done;
4429 __ SignFill(
result->At(1).reg(), box);
4435LocationSummary* BoxInteger32Instr::MakeLocationSummary(Zone* zone,
4439 const intptr_t kNumInputs = 1;
4441 LocationSummary* summary =
new (zone)
4442 LocationSummary(zone, kNumInputs, kNumTemps,
4453void BoxInteger32Instr::EmitNativeCode(FlowGraphCompiler*
compiler) {
4460 Register temp = locs()->temp(0).reg();
4461 compiler::Label
done;
4463 __ cmp(
value, compiler::Operand(out,
ASR, 1));
4468 __ TestImmediate(
value, 0xC0000000);
4475 compiler::Operand(compiler::target::kBitsPerWord - 1));
4478 __ eor(temp, temp, compiler::Operand(temp));
4480 __ StoreFieldToOffset(
value, out, compiler::target::Mint::value_offset());
4481 __ StoreFieldToOffset(
4483 compiler::target::Mint::value_offset() + compiler::target::kWordSize);
4488LocationSummary* BoxInt64Instr::MakeLocationSummary(Zone* zone,
4490 const intptr_t kNumInputs = 1;
4496 const bool stubs_in_vm_isolate =
4497 object_store->allocate_mint_with_fpu_regs_stub()
4499 ->InVMIsolateHeap() ||
4500 object_store->allocate_mint_without_fpu_regs_stub()
4502 ->InVMIsolateHeap();
4503 const bool shared_slow_path_call =
4504 SlowPathSharingSupported(opt) && !stubs_in_vm_isolate;
4505 LocationSummary* summary =
new (zone) LocationSummary(
4506 zone, kNumInputs, kNumTemps,
4515 }
else if (shared_slow_path_call) {
4526void BoxInt64Instr::EmitNativeCode(FlowGraphCompiler*
compiler) {
4528 PairLocation* value_pair = locs()->in(0).AsPairLocation();
4529 Register value_lo = value_pair->At(0).reg();
4530 Register out_reg = locs()->out(0).reg();
4531 __ SmiTag(out_reg, value_lo);
4535 PairLocation* value_pair = locs()->in(0).AsPairLocation();
4536 Register value_lo = value_pair->At(0).reg();
4537 Register value_hi = value_pair->At(1).reg();
4538 Register tmp = locs()->temp(0).reg();
4539 Register out_reg = locs()->out(0).reg();
4541 compiler::Label
done;
4542 __ SmiTag(out_reg, value_lo);
4544 __ cmp(value_hi, compiler::Operand(out_reg,
ASR, 31),
EQ);
4549 compiler->intrinsic_slow_path_label(),
4551 }
else if (locs()->call_on_shared_slow_path()) {
4552 const bool has_frame =
compiler->flow_graph().graph_entry()->NeedsFrame();
4554 ASSERT(
__ constant_pool_allowed());
4555 __ set_constant_pool_allowed(
false);
4556 __ EnterDartFrame(0);
4558 auto object_store =
compiler->isolate_group()->object_store();
4559 const bool live_fpu_regs = locs()->live_registers()->FpuRegisterCount() > 0;
4562 live_fpu_regs ? object_store->allocate_mint_with_fpu_regs_stub()
4563 : object_store->allocate_mint_without_fpu_regs_stub());
4565 ASSERT(!locs()->live_registers()->ContainsRegister(
4567 auto extended_env =
compiler->SlowPathEnvironmentFor(
this, 0);
4568 compiler->GenerateStubCall(
source(), stub, UntaggedPcDescriptors::kOther,
4571 __ LeaveDartFrame();
4572 __ set_constant_pool_allowed(
true);
4579 __ StoreFieldToOffset(value_lo, out_reg,
4580 compiler::target::Mint::value_offset());
4581 __ StoreFieldToOffset(
4583 compiler::target::Mint::value_offset() + compiler::target::kWordSize);
4587static void LoadInt32FromMint(FlowGraphCompiler*
compiler,
4591 compiler::Label* deopt) {
4592 __ LoadFieldFromOffset(
result, mint, compiler::target::Mint::value_offset());
4593 if (deopt !=
nullptr) {
4594 __ LoadFieldFromOffset(
4596 compiler::target::Mint::value_offset() + compiler::target::kWordSize);
4598 compiler::Operand(
result,
ASR, compiler::target::kBitsPerWord - 1));
4603LocationSummary* UnboxInteger32Instr::MakeLocationSummary(Zone* zone,
4608 const intptr_t kNumInputs = 1;
4609 const intptr_t kNumTemps = CanDeoptimize() ? 1 : 0;
4610 LocationSummary* summary =
new (zone)
4613 if (kNumTemps > 0) {
4620void UnboxInteger32Instr::EmitNativeCode(FlowGraphCompiler*
compiler) {
4625 compiler::Label* deopt =
4627 ?
compiler->AddDeoptStub(GetDeoptId(), ICData::kDeoptUnboxInteger)
4629 compiler::Label* out_of_range = !
is_truncating() ? deopt :
nullptr;
4632 if (value_cid == kSmiCid) {
4634 }
else if (value_cid == kMintCid) {
4635 LoadInt32FromMint(
compiler,
value, out, temp, out_of_range);
4636 }
else if (!CanDeoptimize()) {
4637 compiler::Label
done;
4642 compiler::Label
done;
4644 __ CompareClassId(
value, kMintCid, temp);
4646 LoadInt32FromMint(
compiler,
value, out, temp, out_of_range);
4651LocationSummary* BinaryDoubleOpInstr::MakeLocationSummary(Zone* zone,
4653 const intptr_t kNumInputs = 2;
4654 const intptr_t kNumTemps = 0;
4655 LocationSummary* summary =
new (zone)
4663void BinaryDoubleOpInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
4685LocationSummary* DoubleTestOpInstr::MakeLocationSummary(Zone* zone,
4687 const bool needs_temp =
op_kind() != MethodRecognizer::kDouble_getIsNaN;
4688 const intptr_t kNumInputs = 1;
4689 const intptr_t kNumTemps = needs_temp ? 1 : 0;
4690 LocationSummary* summary =
new (zone)
4701 BranchLabels labels) {
4704 const bool is_negated = kind() != Token::kEQ;
4707 case MethodRecognizer::kDouble_getIsNaN: {
4710 return is_negated ?
VC :
VS;
4712 case MethodRecognizer::kDouble_getIsInfinite: {
4713 const Register temp = locs()->temp(0).reg();
4714 compiler::Label
done;
4717 __ cmp(
TMP, compiler::Operand(0));
4718 __ b(is_negated ? labels.true_label : labels.false_label,
NE);
4721 __ AndImmediate(temp, temp, 0x7FFFFFFF);
4723 __ CompareImmediate(temp, 0x7FF00000);
4724 return is_negated ?
NE :
EQ;
4726 case MethodRecognizer::kDouble_getIsNegative: {
4727 const Register temp = locs()->temp(0).reg();
4731 __ b(is_negated ? labels.true_label : labels.false_label,
VS);
4734 __ cmp(temp, compiler::Operand(0),
ZERO);
4735 return is_negated ?
GE :
LT;
4744#define DEFINE_EMIT(Name, Args) \
4745 static void Emit##Name(FlowGraphCompiler* compiler, SimdOpInstr* instr, \
4746 PP_APPLY(PP_UNPACK, Args))
4748DEFINE_EMIT(Simd32x4BinaryOp,
4750 switch (instr->kind()) {
4751 case SimdOpInstr::kFloat32x4Add:
4754 case SimdOpInstr::kFloat32x4Sub:
4757 case SimdOpInstr::kFloat32x4Mul:
4760 case SimdOpInstr::kFloat32x4Div:
4763 case SimdOpInstr::kFloat32x4Equal:
4766 case SimdOpInstr::kFloat32x4NotEqual:
4771 case SimdOpInstr::kFloat32x4GreaterThan:
4774 case SimdOpInstr::kFloat32x4GreaterThanOrEqual:
4777 case SimdOpInstr::kFloat32x4LessThan:
4780 case SimdOpInstr::kFloat32x4LessThanOrEqual:
4783 case SimdOpInstr::kFloat32x4Min:
4786 case SimdOpInstr::kFloat32x4Max:
4789 case SimdOpInstr::kFloat32x4Scale:
4794 case SimdOpInstr::kInt32x4BitAnd:
4797 case SimdOpInstr::kInt32x4BitOr:
4800 case SimdOpInstr::kInt32x4BitXor:
4803 case SimdOpInstr::kInt32x4Add:
4806 case SimdOpInstr::kInt32x4Sub:
4814DEFINE_EMIT(Float64x2BinaryOp,
4816 switch (instr->kind()) {
4817 case SimdOpInstr::kFloat64x2Add:
4821 case SimdOpInstr::kFloat64x2Sub:
4825 case SimdOpInstr::kFloat64x2Mul:
4829 case SimdOpInstr::kFloat64x2Div:
4840DEFINE_EMIT(Simd32x4Shuffle,
4841 (FixedQRegisterView<Q6>
result, FixedQRegisterView<Q5> value)) {
4845 switch (instr->kind()) {
4846 case SimdOpInstr::kFloat32x4GetX:
4849 case SimdOpInstr::kFloat32x4GetY:
4852 case SimdOpInstr::kFloat32x4GetZ:
4855 case SimdOpInstr::kFloat32x4GetW:
4858 case SimdOpInstr::kInt32x4Shuffle:
4859 case SimdOpInstr::kFloat32x4Shuffle: {
4860 if (instr->mask() == 0x00) {
4862 }
else if (instr->mask() == 0x55) {
4864 }
else if (instr->mask() == 0xAA) {
4866 }
else if (instr->mask() == 0xFF) {
4871 QRegisterView temp(
QTMP);
4873 __ vmovq(temp, value);
4874 for (intptr_t i = 0; i < 4; i++) {
4875 __ vmovs(
result.s(i), temp.s((instr->mask() >> (2 * i)) & 0x3));
4886DEFINE_EMIT(Simd32x4ShuffleMix,
4887 (FixedQRegisterView<Q6>
result,
4888 FixedQRegisterView<Q4>
left,
4889 FixedQRegisterView<Q5>
right)) {
4891 __ vmovs(
result.s(0),
left.s((instr->mask() >> 0) & 0x3));
4892 __ vmovs(
result.s(1),
left.s((instr->mask() >> 2) & 0x3));
4898DEFINE_EMIT(Simd32x4GetSignMask,
4899 (
Register out, FixedQRegisterView<Q5> value, Temp<Register> temp)) {
4902 __ Lsr(out, out, compiler::Operand(31));
4905 __ Lsr(temp, temp, compiler::Operand(31));
4906 __ orr(out, out, compiler::Operand(temp,
LSL, 1));
4909 __ Lsr(temp, temp, compiler::Operand(31));
4910 __ orr(out, out, compiler::Operand(temp,
LSL, 2));
4913 __ Lsr(temp, temp, compiler::Operand(31));
4914 __ orr(out, out, compiler::Operand(temp,
LSL, 3));
4919DEFINE_EMIT(Float32x4FromDoubles,
4920 (FixedQRegisterView<Q6> out,
4924 QRegisterView q3)) {
4925 __ vcvtsd(
out.s(0), q0.d(0));
4926 __ vcvtsd(
out.s(1), q1.d(0));
4927 __ vcvtsd(
out.s(2), q2.d(0));
4928 __ vcvtsd(
out.s(3), q3.d(0));
4931DEFINE_EMIT(Float32x4Zero, (
QRegister out)) {
4932 __ veorq(out, out, out);
4943DEFINE_EMIT(Float32x4Sqrt,
4949 switch (instr->kind()) {
4950 case SimdOpInstr::kFloat32x4Negate:
4953 case SimdOpInstr::kFloat32x4Abs:
4956 case SimdOpInstr::kFloat32x4Reciprocal:
4959 case SimdOpInstr::kFloat32x4ReciprocalSqrt:
4967DEFINE_EMIT(Simd32x4ToSimd32x4Conversion, (SameAsFirstInput,
QRegister left)) {
4980DEFINE_EMIT(Float64x2Clamp,
4983 QRegisterView lower,
4984 QRegisterView upper)) {
4985 compiler::Label done0, done1;
4989 __ vcmpd(
left.d(0), upper.d(0));
4999 __ vcmpd(
left.d(1), upper.d(1));
5012DEFINE_EMIT(Float32x4With,
5013 (FixedQRegisterView<Q6>
result,
5014 QRegisterView replacement,
5016 __ vcvtsd(STMP, replacement.d(0));
5018 switch (instr->kind()) {
5019 case SimdOpInstr::kFloat32x4WithX:
5022 case SimdOpInstr::kFloat32x4WithY:
5025 case SimdOpInstr::kFloat32x4WithZ:
5028 case SimdOpInstr::kFloat32x4WithW:
5036DEFINE_EMIT(Simd64x2Shuffle, (QRegisterView
result, QRegisterView value)) {
5037 switch (instr->kind()) {
5038 case SimdOpInstr::kFloat64x2GetX:
5041 case SimdOpInstr::kFloat64x2GetY:
5049DEFINE_EMIT(Float64x2Zero, (
QRegister q)) {
5053DEFINE_EMIT(Float64x2Splat, (QRegisterView
result, QRegisterView value)) {
5059DEFINE_EMIT(Float64x2FromDoubles,
5060 (QRegisterView r, QRegisterView q0, QRegisterView q1)) {
5061 __ vmovd(r.d(0), q0.d(0));
5062 __ vmovd(r.d(1), q1.d(0));
5067DEFINE_EMIT(Float64x2ToFloat32x4, (FixedQRegisterView<Q6> r, QRegisterView q)) {
5070 __ vcvtsd(r.s(0), q.d(0));
5072 __ vcvtsd(r.s(1), q.d(1));
5077DEFINE_EMIT(Float32x4ToFloat64x2, (QRegisterView r, FixedQRegisterView<Q6> q)) {
5079 __ vcvtds(r.d(0), q.s(0));
5081 __ vcvtds(r.d(1), q.s(1));
5086DEFINE_EMIT(Float64x2GetSignMask,
5087 (
Register out, FixedQRegisterView<Q6> value)) {
5090 __ Lsr(out, out, compiler::Operand(31));
5093 __ Lsr(
TMP,
TMP, compiler::Operand(31));
5094 __ orr(out, out, compiler::Operand(
TMP,
LSL, 1));
5097DEFINE_EMIT(Float64x2Unary, (QRegisterView
result, QRegisterView value)) {
5098 switch (instr->kind()) {
5099 case SimdOpInstr::kFloat64x2Negate:
5103 case SimdOpInstr::kFloat64x2Abs:
5107 case SimdOpInstr::kFloat64x2Sqrt:
5116DEFINE_EMIT(Float64x2Binary,
5117 (SameAsFirstInput, QRegisterView
left, QRegisterView
right)) {
5118 switch (instr->kind()) {
5119 case SimdOpInstr::kFloat64x2Scale:
5123 case SimdOpInstr::kFloat64x2WithX:
5126 case SimdOpInstr::kFloat64x2WithY:
5129 case SimdOpInstr::kFloat64x2Min: {
5140 case SimdOpInstr::kFloat64x2Max: {
5156DEFINE_EMIT(Int32x4FromInts,
5167DEFINE_EMIT(Int32x4FromBools,
5173 Temp<Register> temp)) {
5175 __ LoadImmediate(temp, 0xffffffff);
5178 __ cmp(v0, compiler::Operand(
IP));
5181 __ cmp(v1, compiler::Operand(
IP));
5184 __ cmp(
v2, compiler::Operand(
IP));
5187 __ cmp(v3, compiler::Operand(
IP));
5193DEFINE_EMIT(Int32x4GetFlag, (
Register result, FixedQRegisterView<Q6> value)) {
5194 switch (instr->kind()) {
5195 case SimdOpInstr::kInt32x4GetFlagX:
5198 case SimdOpInstr::kInt32x4GetFlagY:
5201 case SimdOpInstr::kInt32x4GetFlagZ:
5204 case SimdOpInstr::kInt32x4GetFlagW:
5216DEFINE_EMIT(Int32x4Select,
5221 Temp<QRegister> temp)) {
5223 __ vmovq(temp, mask);
5225 __ vmvnq(temp, temp);
5227 __ vandq(mask, mask, trueValue);
5229 __ vandq(temp, temp, falseValue);
5231 __ vorrq(out, mask, temp);
5234DEFINE_EMIT(Int32x4WithFlag,
5238 __ LoadImmediate(
TMP, 0xffffffff,
EQ);
5240 switch (instr->kind()) {
5241 case SimdOpInstr::kInt32x4WithFlagX:
5244 case SimdOpInstr::kInt32x4WithFlagY:
5247 case SimdOpInstr::kInt32x4WithFlagZ:
5250 case SimdOpInstr::kInt32x4WithFlagW:
5264#define SIMD_OP_VARIANTS(CASE, ____, SIMPLE) \
5265 CASE(Float32x4Add) \
5266 CASE(Float32x4Sub) \
5267 CASE(Float32x4Mul) \
5268 CASE(Float32x4Div) \
5269 CASE(Float32x4Equal) \
5270 CASE(Float32x4NotEqual) \
5271 CASE(Float32x4GreaterThan) \
5272 CASE(Float32x4GreaterThanOrEqual) \
5273 CASE(Float32x4LessThan) \
5274 CASE(Float32x4LessThanOrEqual) \
5275 CASE(Float32x4Min) \
5276 CASE(Float32x4Max) \
5277 CASE(Float32x4Scale) \
5278 CASE(Int32x4BitAnd) \
5279 CASE(Int32x4BitOr) \
5280 CASE(Int32x4BitXor) \
5283 ____(Simd32x4BinaryOp) \
5284 CASE(Float64x2Add) \
5285 CASE(Float64x2Sub) \
5286 CASE(Float64x2Mul) \
5287 CASE(Float64x2Div) \
5288 ____(Float64x2BinaryOp) \
5289 CASE(Float32x4GetX) \
5290 CASE(Float32x4GetY) \
5291 CASE(Float32x4GetZ) \
5292 CASE(Float32x4GetW) \
5293 CASE(Int32x4Shuffle) \
5294 CASE(Float32x4Shuffle) \
5295 ____(Simd32x4Shuffle) \
5296 CASE(Float32x4ShuffleMix) \
5297 CASE(Int32x4ShuffleMix) \
5298 ____(Simd32x4ShuffleMix) \
5299 CASE(Float32x4GetSignMask) \
5300 CASE(Int32x4GetSignMask) \
5301 ____(Simd32x4GetSignMask) \
5302 SIMPLE(Float32x4FromDoubles) \
5303 SIMPLE(Float32x4Zero) \
5304 SIMPLE(Float32x4Splat) \
5305 SIMPLE(Float32x4Sqrt) \
5306 CASE(Float32x4Negate) \
5307 CASE(Float32x4Abs) \
5308 CASE(Float32x4Reciprocal) \
5309 CASE(Float32x4ReciprocalSqrt) \
5310 ____(Float32x4Unary) \
5311 CASE(Float32x4ToInt32x4) \
5312 CASE(Int32x4ToFloat32x4) \
5313 ____(Simd32x4ToSimd32x4Conversion) \
5314 SIMPLE(Float32x4Clamp) \
5315 SIMPLE(Float64x2Clamp) \
5316 CASE(Float32x4WithX) \
5317 CASE(Float32x4WithY) \
5318 CASE(Float32x4WithZ) \
5319 CASE(Float32x4WithW) \
5320 ____(Float32x4With) \
5321 CASE(Float64x2GetX) \
5322 CASE(Float64x2GetY) \
5323 ____(Simd64x2Shuffle) \
5324 SIMPLE(Float64x2Zero) \
5325 SIMPLE(Float64x2Splat) \
5326 SIMPLE(Float64x2FromDoubles) \
5327 SIMPLE(Float64x2ToFloat32x4) \
5328 SIMPLE(Float32x4ToFloat64x2) \
5329 SIMPLE(Float64x2GetSignMask) \
5330 CASE(Float64x2Negate) \
5331 CASE(Float64x2Abs) \
5332 CASE(Float64x2Sqrt) \
5333 ____(Float64x2Unary) \
5334 CASE(Float64x2Scale) \
5335 CASE(Float64x2WithX) \
5336 CASE(Float64x2WithY) \
5337 CASE(Float64x2Min) \
5338 CASE(Float64x2Max) \
5339 ____(Float64x2Binary) \
5340 SIMPLE(Int32x4FromInts) \
5341 SIMPLE(Int32x4FromBools) \
5342 CASE(Int32x4GetFlagX) \
5343 CASE(Int32x4GetFlagY) \
5344 CASE(Int32x4GetFlagZ) \
5345 CASE(Int32x4GetFlagW) \
5346 ____(Int32x4GetFlag) \
5347 SIMPLE(Int32x4Select) \
5348 CASE(Int32x4WithFlagX) \
5349 CASE(Int32x4WithFlagY) \
5350 CASE(Int32x4WithFlagZ) \
5351 CASE(Int32x4WithFlagW) \
5352 ____(Int32x4WithFlag)
5356#define CASE(Name) case k##Name:
5358 return MakeLocationSummaryFromEmitter(zone, this, &Emit##Name);
5359#define SIMPLE(Name) CASE(Name) EMIT(Name)
5374#define CASE(Name) case k##Name:
5376 InvokeEmitter(compiler, this, &Emit##Name); \
5378#define SIMPLE(Name) CASE(Name) EMIT(Name)
5391LocationSummary* CaseInsensitiveCompareInstr::MakeLocationSummary(
5394 const intptr_t kNumTemps = 0;
5395 LocationSummary* summary =
new (zone)
5405void CaseInsensitiveCompareInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
5406 compiler::LeafRuntimeScope rt(
compiler->assembler(),
5413LocationSummary* MathMinMaxInstr::MakeLocationSummary(Zone* zone,
5416 const intptr_t kNumInputs = 2;
5417 const intptr_t kNumTemps = 1;
5418 LocationSummary* summary =
new (zone)
5428 const intptr_t kNumInputs = 2;
5429 const intptr_t kNumTemps = 0;
5430 LocationSummary* summary =
new (zone)
5439void MathMinMaxInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
5441 (
op_kind() == MethodRecognizer::kMathMax));
5442 const bool is_min = (
op_kind() == MethodRecognizer::kMathMin);
5448 const Register temp = locs()->temp(0).reg();
5451 __ b(&returns_nan,
VS);
5454 is_min ? TokenKindToDoubleCondition(Token::kGTE)
5455 : TokenKindToDoubleCondition(Token::kLTE);
5460 __ Bind(&returns_nan);
5461 __ LoadDImmediate(
result, NAN, temp);
5472 __ cmp(temp, compiler::Operand(0));
5497LocationSummary* UnarySmiOpInstr::MakeLocationSummary(Zone* zone,
5499 const intptr_t kNumInputs = 1;
5500 const intptr_t kNumTemps = 0;
5501 LocationSummary* summary =
new (zone)
5510void UnarySmiOpInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
5514 case Token::kNEGATE: {
5515 compiler::Label* deopt =
5516 compiler->AddDeoptStub(deopt_id(), ICData::kDeoptUnaryOp);
5521 case Token::kBIT_NOT:
5531LocationSummary* UnaryDoubleOpInstr::MakeLocationSummary(Zone* zone,
5533 const intptr_t kNumInputs = 1;
5534 const intptr_t kNumTemps = 0;
5535 LocationSummary* summary =
new (zone)
5542void UnaryDoubleOpInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
5547 case Token::kNEGATE:
5553 case Token::kSQUARE:
5561LocationSummary* Int32ToDoubleInstr::MakeLocationSummary(Zone* zone,
5563 const intptr_t kNumInputs = 1;
5564 const intptr_t kNumTemps = 0;
5565 LocationSummary*
result =
new (zone)
5572void Int32ToDoubleInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
5579LocationSummary* SmiToDoubleInstr::MakeLocationSummary(Zone* zone,
5581 const intptr_t kNumInputs = 1;
5582 const intptr_t kNumTemps = 0;
5583 LocationSummary*
result =
new (zone)
5590void SmiToDoubleInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
5598LocationSummary* Int64ToDoubleInstr::MakeLocationSummary(Zone* zone,
5604void Int64ToDoubleInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
5608LocationSummary* DoubleToIntegerInstr::MakeLocationSummary(Zone* zone,
5610 const intptr_t kNumInputs = 1;
5611 const intptr_t kNumTemps = 0;
5612 LocationSummary*
result =
new (zone) LocationSummary(
5619void DoubleToIntegerInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
5623 DoubleToIntegerSlowPath* slow_path =
5624 new DoubleToIntegerSlowPath(
this, locs()->in(0).fpu_reg());
5625 compiler->AddSlowPathCode(slow_path);
5629 __ vcmpd(value_double, value_double);
5631 __ b(slow_path->entry_label(),
VS);
5633 __ vcvtid(STMP, value_double);
5637 __ CompareImmediate(
result, 0xC0000000);
5638 __ b(slow_path->entry_label(),
MI);
5640 __ Bind(slow_path->exit_label());
5643LocationSummary* DoubleToSmiInstr::MakeLocationSummary(Zone* zone,
5645 const intptr_t kNumInputs = 1;
5646 const intptr_t kNumTemps = 0;
5647 LocationSummary*
result =
new (zone)
5654void DoubleToSmiInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
5655 compiler::Label* deopt =
5656 compiler->AddDeoptStub(deopt_id(), ICData::kDeoptDoubleToSmi);
5668 __ CompareImmediate(
result, 0xC0000000);
5673LocationSummary* DoubleToFloatInstr::MakeLocationSummary(Zone* zone,
5675 const intptr_t kNumInputs = 1;
5676 const intptr_t kNumTemps = 0;
5677 LocationSummary*
result =
new (zone)
5685void DoubleToFloatInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
5692LocationSummary* FloatToDoubleInstr::MakeLocationSummary(Zone* zone,
5694 const intptr_t kNumInputs = 1;
5695 const intptr_t kNumTemps = 0;
5696 LocationSummary*
result =
new (zone)
5704void FloatToDoubleInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
5711LocationSummary* FloatCompareInstr::MakeLocationSummary(Zone* zone,
5717void FloatCompareInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
5724 const intptr_t kNumTemps =
5728 LocationSummary*
result =
new (zone)
5766static void InvokeDoublePow(FlowGraphCompiler*
compiler,
5767 InvokeMathCFunctionInstr* instr) {
5768 ASSERT(instr->recognized_kind() == MethodRecognizer::kMathDoublePow);
5769 const intptr_t kInputCount = 2;
5770 ASSERT(instr->InputCount() == kInputCount);
5771 LocationSummary* locs = instr->locs();
5776 const Register temp = locs->temp(0).reg();
5780 compiler::Label skip_call, try_sqrt, check_base, return_nan;
5781 __ vmovd(saved_base,
base);
5782 __ LoadDImmediate(
result, 1.0, temp);
5786 __ b(&check_base,
VS);
5787 __ b(&skip_call,
EQ);
5792 compiler::Label return_base;
5793 __ b(&return_base,
EQ);
5796 __ LoadDImmediate(
DTMP, 2.0, temp);
5799 compiler::Label return_base_times_2;
5800 __ b(&return_base_times_2,
EQ);
5803 __ LoadDImmediate(
DTMP, 3.0, temp);
5806 __ b(&check_base,
NE);
5809 __ vmuld(
result, saved_base, saved_base);
5813 __ Bind(&return_base);
5817 __ Bind(&return_base_times_2);
5818 __ vmuld(
result, saved_base, saved_base);
5821 __ Bind(&check_base);
5826 __ b(&return_nan,
VS);
5827 __ b(&skip_call,
EQ);
5829 __ vcmpd(saved_base, exp);
5831 __ b(&try_sqrt,
VC);
5833 __ Bind(&return_nan);
5834 __ LoadDImmediate(
result, NAN, temp);
5837 compiler::Label do_pow, return_zero;
5849 __ LoadDImmediate(
result, 0.5, temp);
5855 __ vcmpdz(saved_base);
5857 __ b(&return_zero,
EQ);
5862 __ Bind(&return_zero);
5863 __ LoadDImmediate(
result, 0.0, temp);
5867 __ vmovd(
base, saved_base);
5872 ASSERT(instr->TargetFunction().is_leaf());
5873 compiler::LeafRuntimeScope rt(
compiler->assembler(),
5876 rt.Call(instr->TargetFunction(), kInputCount);
5881 compiler::LeafRuntimeScope rt(
compiler->assembler(),
5886 rt.Call(instr->TargetFunction(), kInputCount);
5890 __ Bind(&skip_call);
5904 compiler::LeafRuntimeScope rt(
compiler->assembler(),
5912 compiler::LeafRuntimeScope rt(
compiler->assembler(),
5923LocationSummary* ExtractNthOutputInstr::MakeLocationSummary(Zone* zone,
5927 const intptr_t kNumInputs = 1;
5928 LocationSummary* summary =
5955void ExtractNthOutputInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
5956 ASSERT(locs()->in(0).IsPairLocation());
5957 PairLocation* pair = locs()->in(0).AsPairLocation();
5967 __ mov(out, compiler::Operand(in));
5971LocationSummary* UnboxLaneInstr::MakeLocationSummary(Zone* zone,
5977void UnboxLaneInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
5981LocationSummary* BoxLanesInstr::MakeLocationSummary(Zone* zone,
5987void BoxLanesInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
5991LocationSummary* TruncDivModInstr::MakeLocationSummary(Zone* zone,
5993 const intptr_t kNumInputs = 2;
5994 const intptr_t kNumTemps = 2;
5995 LocationSummary* summary =
new (zone)
6008void TruncDivModInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
6010 compiler::Label* deopt =
6011 compiler->AddDeoptStub(deopt_id(), ICData::kDeoptBinarySmiOp);
6015 ASSERT(locs()->
out(0).IsPairLocation());
6016 PairLocation* pair = locs()->out(0).AsPairLocation();
6017 const Register result_div = pair->At(0).reg();
6018 const Register result_mod = pair->At(1).reg();
6021 __ cmp(
right, compiler::Operand(0));
6024 const Register temp = locs()->temp(0).reg();
6028 __ IntegerDivide(result_div, temp,
IP, dtemp,
DTMP);
6032 __ CompareImmediate(result_div, 0x40000000);
6036 __ mls(result_mod,
IP, result_div, temp);
6037 __ SmiTag(result_div);
6038 __ SmiTag(result_mod);
6048 compiler::Label
done;
6049 __ cmp(result_mod, compiler::Operand(0));
6052 __ cmp(
right, compiler::Operand(0));
6053 __ sub(result_mod, result_mod, compiler::Operand(
right),
LT);
6054 __ add(result_mod, result_mod, compiler::Operand(
right),
GE);
6059static void EmitHashIntegerCodeSequence(FlowGraphCompiler*
compiler,
6063 __ LoadImmediate(
TMP, compiler::Immediate(0x2d51));
6065 __ umull(
TMP, value_hi, value_hi,
TMP);
6066 __ add(
TMP,
TMP, compiler::Operand(value_lo));
6073LocationSummary* HashDoubleOpInstr::MakeLocationSummary(Zone* zone,
6075 const intptr_t kNumInputs = 1;
6076 const intptr_t kNumTemps = 4;
6077 LocationSummary* summary =
new (zone) LocationSummary(
6089void HashDoubleOpInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
6091 const Register temp = locs()->temp(0).reg();
6092 const Register temp1 = locs()->temp(1).reg();
6095 ASSERT(locs()->temp(3).reg() ==
R4);
6096 const PairLocation* out_pair = locs()->out(0).AsPairLocation();
6099 ASSERT(out_pair->At(1).reg() ==
R1);
6101 compiler::Label hash_double, hash_double_value, try_convert;
6104 __ AndImmediate(temp, temp, 0x7FF00000);
6105 __ CompareImmediate(temp, 0x7FF00000);
6106 __ b(&hash_double_value,
EQ);
6108 compiler::Label slow_path;
6109 __ Bind(&try_convert);
6112 __ vmovrs(temp1, STMP);
6114 __ CompareImmediate(temp1, 0xC0000000);
6115 __ b(&slow_path,
MI);
6116 __ vmovdr(
DTMP, 0, temp1);
6117 __ vcvtdi(temp_double, STMP);
6122 __ b(&hash_double_value,
NE);
6125 __ SignFill(temp, temp1);
6127 compiler::Label hash_integer,
done;
6129 __ Bind(&hash_integer);
6135 __ Bind(&slow_path);
6139 compiler::target::Thread::unboxed_runtime_arg_offset());
6141 compiler::LeafRuntimeScope rt(
compiler->assembler(), 0,
6143 __ mov(
R0, compiler::Operand(
THR));
6146 rt.Call(kTryDoubleAsIntegerRuntimeEntry, 1);
6147 __ mov(
R4, compiler::Operand(
R0));
6149 __ LoadFromOffset(temp1,
THR,
6150 compiler::target::Thread::unboxed_runtime_arg_offset());
6151 __ LoadFromOffset(temp,
THR,
6152 compiler::target::Thread::unboxed_runtime_arg_offset() +
6153 compiler::target::kWordSize);
6154 __ cmp(
R4, compiler::Operand(0));
6155 __ b(&hash_integer,
NE);
6158 __ Bind(&hash_double_value);
6159 __ vmovrrd(temp, temp1,
value);
6161 __ Bind(&hash_double);
6163 __ eor(
result, temp1, compiler::Operand(temp));
6167 __ mov(
R1, compiler::Operand(0));
6170LocationSummary* HashIntegerOpInstr::MakeLocationSummary(Zone* zone,
6172 const intptr_t kNumInputs = 1;
6173 const intptr_t kNumTemps = 1;
6174 LocationSummary* summary =
new (zone)
6182void HashIntegerOpInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
6185 Register temp = locs()->temp(0).reg();
6191 __ LoadFieldFromOffset(temp,
value,
6210LocationSummary* CheckClassInstr::MakeLocationSummary(Zone* zone,
6212 const intptr_t kNumInputs = 1;
6213 const bool need_mask_temp =
IsBitTest();
6214 const intptr_t kNumTemps = !
IsNullCheck() ? (need_mask_temp ? 2 : 1) : 0;
6215 LocationSummary* summary =
new (zone)
6220 if (need_mask_temp) {
6228 compiler::Label* deopt) {
6229 __ CompareObject(locs()->in(0).reg(), Object::null_object());
6239 compiler::Label* deopt) {
6240 Register biased_cid = locs()->temp(0).reg();
6241 __ AddImmediate(biased_cid, -
min);
6242 __ CompareImmediate(biased_cid,
max -
min);
6245 Register bit_reg = locs()->temp(1).reg();
6246 __ LoadImmediate(bit_reg, 1);
6247 __ Lsl(bit_reg, bit_reg, biased_cid);
6248 __ TestImmediate(bit_reg, mask);
6252int CheckClassInstr::EmitCheckCid(FlowGraphCompiler*
compiler,
6257 compiler::Label* is_ok,
6258 compiler::Label* deopt,
6259 bool use_near_jump) {
6260 Register biased_cid = locs()->temp(0).reg();
6262 if (cid_start == cid_end) {
6263 __ CompareImmediate(biased_cid, cid_start - bias);
6269 __ AddImmediate(biased_cid, bias - cid_start);
6271 __ CompareImmediate(biased_cid, cid_end - cid_start);
6276 __ b(deopt, no_match);
6283LocationSummary* CheckSmiInstr::MakeLocationSummary(Zone* zone,
6285 const intptr_t kNumInputs = 1;
6286 const intptr_t kNumTemps = 0;
6287 LocationSummary* summary =
new (zone)
6293void CheckSmiInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
6295 compiler::Label* deopt =
6296 compiler->AddDeoptStub(deopt_id(), ICData::kDeoptCheckSmi);
6297 __ BranchIfNotSmi(
value, deopt);
6300void CheckNullInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
6301 Register value_reg = locs()->in(0).reg();
6304 __ CompareObject(value_reg, Object::null_object());
6306 const bool live_fpu_regs = locs()->live_registers()->FpuRegisterCount() > 0;
6310 const bool using_shared_stub = locs()->call_on_shared_slow_path();
6312 if (using_shared_stub &&
compiler->CanPcRelativeCall(stub) &&
6313 compiler->flow_graph().graph_entry()->NeedsFrame()) {
6314 __ GenerateUnRelocatedPcRelativeCall(
EQUAL);
6315 compiler->AddPcRelativeCallStubTarget(stub);
6320 auto extended_env =
compiler->SlowPathEnvironmentFor(
this, 0);
6322 UntaggedPcDescriptors::kOther, locs(),
6328 ThrowErrorSlowPathCode* slow_path =
new NullErrorSlowPath(
this);
6329 compiler->AddSlowPathCode(slow_path);
6331 __ BranchIf(
EQUAL, slow_path->entry_label());
6334LocationSummary* CheckClassIdInstr::MakeLocationSummary(Zone* zone,
6336 const intptr_t kNumInputs = 1;
6337 const intptr_t kNumTemps = 0;
6338 LocationSummary* summary =
new (zone)
6345void CheckClassIdInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
6347 compiler::Label* deopt =
6348 compiler->AddDeoptStub(deopt_id(), ICData::kDeoptCheckClass);
6349 if (cids_.IsSingleCid()) {
6359LocationSummary* CheckArrayBoundInstr::MakeLocationSummary(Zone* zone,
6361 const intptr_t kNumInputs = 2;
6362 const intptr_t kNumTemps = 0;
6363 LocationSummary* locs =
new (zone)
6370void CheckArrayBoundInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
6372 compiler::Label* deopt =
6373 compiler->AddDeoptStub(deopt_id(), ICData::kDeoptCheckArrayBound,
flags);
6378 if (length_loc.IsConstant() && index_loc.IsConstant()) {
6391 if (index_loc.IsConstant()) {
6396 }
else if (length_loc.IsConstant()) {
6398 if (index_cid != kSmiCid) {
6399 __ BranchIfNotSmi(
index, deopt);
6402 compiler::target::kSmiMax) {
6413 if (index_cid != kSmiCid) {
6414 __ BranchIfNotSmi(
index, deopt);
6421LocationSummary* CheckWritableInstr::MakeLocationSummary(Zone* zone,
6423 const intptr_t kNumInputs = 1;
6424 const intptr_t kNumTemps = 0;
6425 LocationSummary* locs =
new (zone) LocationSummary(
6426 zone, kNumInputs, kNumTemps,
6433void CheckWritableInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
6434 WriteErrorSlowPath* slow_path =
new WriteErrorSlowPath(
this);
6435 compiler->AddSlowPathCode(slow_path);
6436 __ ldrb(
TMP, compiler::FieldAddress(locs()->in(0).reg(),
6437 compiler::target::Object::tags_offset()));
6439 ASSERT(compiler::target::UntaggedObject::kImmutableBit < 8);
6440 __ TestImmediate(
TMP, 1 << compiler::target::UntaggedObject::kImmutableBit);
6444LocationSummary* BinaryInt64OpInstr::MakeLocationSummary(Zone* zone,
6446 const intptr_t kNumInputs = 2;
6447 const intptr_t kNumTemps = (
op_kind() == Token::kMUL) ? 1 : 0;
6448 LocationSummary* summary =
new (zone)
6453 compiler::Operand o;
6454 if (CanBePairOfImmediateOperands(
right(), &o, &o) &&
6465 if (
op_kind() == Token::kMUL) {
6471void BinaryInt64OpInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
6472 PairLocation* left_pair = locs()->in(0).AsPairLocation();
6473 Register left_lo = left_pair->At(0).reg();
6474 Register left_hi = left_pair->At(1).reg();
6475 PairLocation* out_pair = locs()->out(0).AsPairLocation();
6476 Register out_lo = out_pair->At(0).reg();
6477 Register out_hi = out_pair->At(1).reg();
6479 ASSERT(!CanDeoptimize());
6481 compiler::Operand right_lo, right_hi;
6483 const bool ok = CanBePairOfImmediateOperands(locs()->in(1).constant(),
6484 &right_lo, &right_hi);
6487 PairLocation* right_pair = locs()->in(1).AsPairLocation();
6488 right_lo = compiler::Operand(right_pair->At(0).reg());
6489 right_hi = compiler::Operand(right_pair->At(1).reg());
6493 case Token::kBIT_AND: {
6494 __ and_(out_lo, left_lo, compiler::Operand(right_lo));
6495 __ and_(out_hi, left_hi, compiler::Operand(right_hi));
6498 case Token::kBIT_OR: {
6499 __ orr(out_lo, left_lo, compiler::Operand(right_lo));
6500 __ orr(out_hi, left_hi, compiler::Operand(right_hi));
6503 case Token::kBIT_XOR: {
6504 __ eor(out_lo, left_lo, compiler::Operand(right_lo));
6505 __ eor(out_hi, left_hi, compiler::Operand(right_hi));
6509 __ adds(out_lo, left_lo, compiler::Operand(right_lo));
6510 __ adcs(out_hi, left_hi, compiler::Operand(right_hi));
6514 __ subs(out_lo, left_lo, compiler::Operand(right_lo));
6515 __ sbcs(out_hi, left_hi, compiler::Operand(right_hi));
6519 PairLocation* right_pair = locs()->in(1).AsPairLocation();
6520 Register right_lo_reg = right_pair->At(0).reg();
6521 Register right_hi_reg = right_pair->At(1).reg();
6524 Register temp = locs()->temp(0).reg();
6525 __ mul(temp, left_lo, right_hi_reg);
6526 __ mla(out_hi, left_hi, right_lo_reg, temp);
6527 __ umull(out_lo, temp, left_lo, right_lo_reg);
6528 __ add(out_hi, out_hi, compiler::Operand(temp));
6536static void EmitShiftInt64ByConstant(FlowGraphCompiler*
compiler,
6542 const Object&
right) {
6543 const int64_t shift = Integer::Cast(
right).AsInt64Value();
6549 __ Lsl(out_lo, left_hi, compiler::Operand(32 - shift));
6550 __ orr(out_lo, out_lo, compiler::Operand(left_lo,
LSR, shift));
6551 __ Asr(out_hi, left_hi, compiler::Operand(shift));
6554 __ mov(out_lo, compiler::Operand(left_hi));
6555 }
else if (shift < 64) {
6556 __ Asr(out_lo, left_hi, compiler::Operand(shift - 32));
6558 __ Asr(out_lo, left_hi, compiler::Operand(31));
6560 __ Asr(out_hi, left_hi, compiler::Operand(31));
6564 case Token::kUSHR: {
6567 __ Lsl(out_lo, left_hi, compiler::Operand(32 - shift));
6568 __ orr(out_lo, out_lo, compiler::Operand(left_lo,
LSR, shift));
6569 __ Lsr(out_hi, left_hi, compiler::Operand(shift));
6572 __ mov(out_lo, compiler::Operand(left_hi));
6574 __ Lsr(out_lo, left_hi, compiler::Operand(shift - 32));
6576 __ mov(out_hi, compiler::Operand(0));
6583 __ Lsr(out_hi, left_lo, compiler::Operand(32 - shift));
6584 __ orr(out_hi, out_hi, compiler::Operand(left_hi,
LSL, shift));
6585 __ Lsl(out_lo, left_lo, compiler::Operand(shift));
6588 __ mov(out_hi, compiler::Operand(left_lo));
6590 __ Lsl(out_hi, left_lo, compiler::Operand(shift - 32));
6592 __ mov(out_lo, compiler::Operand(0));
6601static void EmitShiftInt64ByRegister(FlowGraphCompiler*
compiler,
6610 __ rsbs(
IP,
right, compiler::Operand(32));
6612 __ mov(out_lo, compiler::Operand(left_hi,
ASR,
IP),
MI);
6613 __ mov(out_lo, compiler::Operand(left_lo,
LSR,
right),
PL);
6614 __ orr(out_lo, out_lo, compiler::Operand(left_hi,
LSL,
IP),
PL);
6615 __ mov(out_hi, compiler::Operand(left_hi,
ASR,
right));
6618 case Token::kUSHR: {
6619 __ rsbs(
IP,
right, compiler::Operand(32));
6621 __ mov(out_lo, compiler::Operand(left_hi,
LSR,
IP),
MI);
6622 __ mov(out_lo, compiler::Operand(left_lo,
LSR,
right),
PL);
6623 __ orr(out_lo, out_lo, compiler::Operand(left_hi,
LSL,
IP),
PL);
6624 __ mov(out_hi, compiler::Operand(left_hi,
LSR,
right));
6628 __ rsbs(
IP,
right, compiler::Operand(32));
6630 __ mov(out_hi, compiler::Operand(left_lo,
LSL,
IP),
MI);
6631 __ mov(out_hi, compiler::Operand(left_hi,
LSL,
right),
PL);
6632 __ orr(out_hi, out_hi, compiler::Operand(left_lo,
LSR,
IP),
PL);
6633 __ mov(out_lo, compiler::Operand(left_lo,
LSL,
right));
6641static void EmitShiftUint32ByConstant(FlowGraphCompiler*
compiler,
6645 const Object&
right) {
6646 const int64_t shift = Integer::Cast(
right).AsInt64Value();
6649 __ LoadImmediate(out, 0);
6654 __ Lsr(out,
left, compiler::Operand(shift));
6657 __ Lsl(out,
left, compiler::Operand(shift));
6665static void EmitShiftUint32ByRegister(FlowGraphCompiler*
compiler,
6683class ShiftInt64OpSlowPath :
public ThrowErrorSlowPathCode {
6685 explicit ShiftInt64OpSlowPath(ShiftInt64OpInstr* instruction)
6686 : ThrowErrorSlowPathCode(instruction,
6687 kArgumentErrorUnboxedInt64RuntimeEntry) {}
6689 const char*
name()
override {
return "int64 shift"; }
6691 void EmitCodeAtSlowPathEntry(FlowGraphCompiler*
compiler)
override {
6692 PairLocation* left_pair = instruction()->locs()->in(0).AsPairLocation();
6693 Register left_hi = left_pair->At(1).reg();
6694 PairLocation* right_pair = instruction()->locs()->in(1).AsPairLocation();
6695 Register right_lo = right_pair->At(0).reg();
6696 Register right_hi = right_pair->At(1).reg();
6697 PairLocation* out_pair = instruction()->locs()->out(0).AsPairLocation();
6698 Register out_lo = out_pair->At(0).reg();
6699 Register out_hi = out_pair->At(1).reg();
6701 __ CompareImmediate(right_hi, 0);
6703 switch (instruction()->AsShiftInt64Op()->op_kind()) {
6705 __ Asr(out_hi, left_hi,
6706 compiler::Operand(compiler::target::kBitsPerWord - 1), GE);
6707 __ mov(out_lo, compiler::Operand(out_hi), GE);
6711 __ LoadImmediate(out_lo, 0, GE);
6712 __ LoadImmediate(out_hi, 0, GE);
6719 __ b(exit_label(), GE);
6726 __ StoreToOffset(right_lo, THR,
6727 compiler::target::Thread::unboxed_runtime_arg_offset());
6728 __ StoreToOffset(right_hi, THR,
6729 compiler::target::Thread::unboxed_runtime_arg_offset() +
6730 compiler::target::kWordSize);
6734LocationSummary* ShiftInt64OpInstr::MakeLocationSummary(Zone* zone,
6736 const intptr_t kNumInputs = 2;
6737 const intptr_t kNumTemps = 0;
6738 LocationSummary* summary =
new (zone) LocationSummary(
6755void ShiftInt64OpInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
6756 PairLocation* left_pair = locs()->in(0).AsPairLocation();
6757 Register left_lo = left_pair->At(0).reg();
6758 Register left_hi = left_pair->At(1).reg();
6759 PairLocation* out_pair = locs()->out(0).AsPairLocation();
6760 Register out_lo = out_pair->At(0).reg();
6761 Register out_hi = out_pair->At(1).reg();
6765 EmitShiftInt64ByConstant(
compiler,
op_kind(), out_lo, out_hi, left_lo,
6766 left_hi, locs()->in(1).constant());
6769 PairLocation* right_pair = locs()->in(1).AsPairLocation();
6770 Register right_lo = right_pair->At(0).reg();
6771 Register right_hi = right_pair->At(1).reg();
6774 ShiftInt64OpSlowPath* slow_path =
nullptr;
6776 slow_path =
new (
Z) ShiftInt64OpSlowPath(
this);
6777 compiler->AddSlowPathCode(slow_path);
6778 __ CompareImmediate(right_hi, 0);
6779 __ b(slow_path->entry_label(),
NE);
6780 __ CompareImmediate(right_lo, kShiftCountLimit);
6781 __ b(slow_path->entry_label(),
HI);
6784 EmitShiftInt64ByRegister(
compiler,
op_kind(), out_lo, out_hi, left_lo,
6787 if (slow_path !=
nullptr) {
6788 __ Bind(slow_path->exit_label());
6793LocationSummary* SpeculativeShiftInt64OpInstr::MakeLocationSummary(
6796 const intptr_t kNumInputs = 2;
6797 const intptr_t kNumTemps = 0;
6798 LocationSummary* summary =
new (zone)
6808void SpeculativeShiftInt64OpInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
6809 PairLocation* left_pair = locs()->in(0).AsPairLocation();
6810 Register left_lo = left_pair->At(0).reg();
6811 Register left_hi = left_pair->At(1).reg();
6812 PairLocation* out_pair = locs()->out(0).AsPairLocation();
6813 Register out_lo = out_pair->At(0).reg();
6814 Register out_hi = out_pair->At(1).reg();
6818 EmitShiftInt64ByConstant(
compiler,
op_kind(), out_lo, out_hi, left_lo,
6819 left_hi, locs()->in(1).constant());
6822 Register shift = locs()->in(1).reg();
6828 compiler::Label* deopt =
6829 compiler->AddDeoptStub(deopt_id(), ICData::kDeoptBinaryInt64Op);
6831 __ CompareImmediate(shift, kShiftCountLimit);
6835 EmitShiftInt64ByRegister(
compiler,
op_kind(), out_lo, out_hi, left_lo,
6840class ShiftUint32OpSlowPath :
public ThrowErrorSlowPathCode {
6842 explicit ShiftUint32OpSlowPath(ShiftUint32OpInstr* instruction)
6843 : ThrowErrorSlowPathCode(instruction,
6844 kArgumentErrorUnboxedInt64RuntimeEntry) {}
6846 const char*
name()
override {
return "uint32 shift"; }
6848 void EmitCodeAtSlowPathEntry(FlowGraphCompiler*
compiler)
override {
6849 PairLocation* right_pair = instruction()->locs()->in(1).AsPairLocation();
6850 Register right_lo = right_pair->At(0).reg();
6851 Register right_hi = right_pair->At(1).reg();
6852 Register out = instruction()->locs()->out(0).reg();
6854 __ CompareImmediate(right_hi, 0);
6855 __ LoadImmediate(out, 0, GE);
6856 __ b(exit_label(), GE);
6863 __ StoreToOffset(right_lo, THR,
6864 compiler::target::Thread::unboxed_runtime_arg_offset());
6865 __ StoreToOffset(right_hi, THR,
6866 compiler::target::Thread::unboxed_runtime_arg_offset() +
6867 compiler::target::kWordSize);
6871LocationSummary* ShiftUint32OpInstr::MakeLocationSummary(Zone* zone,
6873 const intptr_t kNumInputs = 2;
6874 const intptr_t kNumTemps = 0;
6875 LocationSummary* summary =
new (zone) LocationSummary(
6890void ShiftUint32OpInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
6898 locs()->in(1).constant());
6901 PairLocation* right_pair = locs()->in(1).AsPairLocation();
6902 Register right_lo = right_pair->At(0).reg();
6903 Register right_hi = right_pair->At(1).reg();
6906 ShiftUint32OpSlowPath* slow_path =
nullptr;
6908 slow_path =
new (
Z) ShiftUint32OpSlowPath(
this);
6909 compiler->AddSlowPathCode(slow_path);
6911 __ CompareImmediate(right_hi, 0);
6912 __ b(slow_path->entry_label(),
NE);
6913 __ CompareImmediate(right_lo, kUint32ShiftCountLimit);
6914 __ b(slow_path->entry_label(),
HI);
6919 if (slow_path !=
nullptr) {
6920 __ Bind(slow_path->exit_label());
6925LocationSummary* SpeculativeShiftUint32OpInstr::MakeLocationSummary(
6928 const intptr_t kNumInputs = 2;
6929 const intptr_t kNumTemps = 1;
6930 LocationSummary* summary =
new (zone)
6939void SpeculativeShiftUint32OpInstr::EmitNativeCode(
6943 Register temp = locs()->temp(0).reg();
6948 locs()->in(1).constant());
6951 const bool shift_count_in_range =
6958 if (!shift_count_in_range) {
6960 compiler::Label* deopt =
6961 compiler->AddDeoptStub(deopt_id(), ICData::kDeoptBinaryInt64Op);
6963 __ CompareImmediate(
right, 0);
6969 if (!shift_count_in_range) {
6970 __ CompareImmediate(
right, kUint32ShiftCountLimit);
6971 __ LoadImmediate(out, 0,
HI);
6976LocationSummary* UnaryInt64OpInstr::MakeLocationSummary(Zone* zone,
6978 const intptr_t kNumInputs = 1;
6979 const intptr_t kNumTemps = 0;
6980 LocationSummary* summary =
new (zone)
6989void UnaryInt64OpInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
6990 PairLocation* left_pair = locs()->in(0).AsPairLocation();
6991 Register left_lo = left_pair->At(0).reg();
6992 Register left_hi = left_pair->At(1).reg();
6994 PairLocation* out_pair = locs()->out(0).AsPairLocation();
6995 Register out_lo = out_pair->At(0).reg();
6996 Register out_hi = out_pair->At(1).reg();
6999 case Token::kBIT_NOT:
7000 __ mvn_(out_lo, compiler::Operand(left_lo));
7001 __ mvn_(out_hi, compiler::Operand(left_hi));
7003 case Token::kNEGATE:
7004 __ rsbs(out_lo, left_lo, compiler::Operand(0));
7005 __ sbc(out_hi, out_hi, compiler::Operand(out_hi));
7006 __ sub(out_hi, out_hi, compiler::Operand(left_hi));
7013LocationSummary* BinaryUint32OpInstr::MakeLocationSummary(Zone* zone,
7015 const intptr_t kNumInputs = 2;
7016 const intptr_t kNumTemps = 0;
7017 LocationSummary* summary =
new (zone)
7025void BinaryUint32OpInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
7031 case Token::kBIT_AND:
7034 case Token::kBIT_OR:
7037 case Token::kBIT_XOR:
7054LocationSummary* UnaryUint32OpInstr::MakeLocationSummary(Zone* zone,
7056 const intptr_t kNumInputs = 1;
7057 const intptr_t kNumTemps = 0;
7058 LocationSummary* summary =
new (zone)
7065void UnaryUint32OpInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
7072 __ mvn_(out, compiler::Operand(
left));
7075LocationSummary* IntConverterInstr::MakeLocationSummary(Zone* zone,
7077 const intptr_t kNumInputs = 1;
7078 const intptr_t kNumTemps = 0;
7079 LocationSummary* summary =
new (zone)
7081 if (
from() == kUntagged ||
to() == kUntagged) {
7082 ASSERT((
from() == kUntagged &&
to() == kUnboxedInt32) ||
7083 (
from() == kUntagged &&
to() == kUnboxedUint32) ||
7084 (
from() == kUnboxedInt32 &&
to() == kUntagged) ||
7085 (
from() == kUnboxedUint32 &&
to() == kUntagged));
7086 ASSERT(!CanDeoptimize());
7089 }
else if (
from() == kUnboxedInt64) {
7090 ASSERT(
to() == kUnboxedUint32 ||
to() == kUnboxedInt32);
7094 }
else if (
to() == kUnboxedInt64) {
7100 ASSERT(
to() == kUnboxedUint32 ||
to() == kUnboxedInt32);
7108void IntConverterInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
7109 const bool is_nop_conversion =
7110 (
from() == kUntagged &&
to() == kUnboxedInt32) ||
7111 (
from() == kUntagged &&
to() == kUnboxedUint32) ||
7112 (
from() == kUnboxedInt32 &&
to() == kUntagged) ||
7113 (
from() == kUnboxedUint32 &&
to() == kUntagged);
7114 if (is_nop_conversion) {
7115 ASSERT(locs()->in(0).reg() == locs()->
out(0).reg());
7119 if (
from() == kUnboxedInt32 &&
to() == kUnboxedUint32) {
7122 ASSERT(out == locs()->in(0).reg());
7123 }
else if (
from() == kUnboxedUint32 &&
to() == kUnboxedInt32) {
7126 ASSERT(out == locs()->in(0).reg());
7127 if (CanDeoptimize()) {
7128 compiler::Label* deopt =
7129 compiler->AddDeoptStub(deopt_id(), ICData::kDeoptUnboxInteger);
7130 __ tst(out, compiler::Operand(out));
7133 }
else if (
from() == kUnboxedInt64) {
7134 ASSERT(
to() == kUnboxedUint32 ||
to() == kUnboxedInt32);
7135 PairLocation* in_pair = locs()->in(0).AsPairLocation();
7136 Register in_lo = in_pair->At(0).reg();
7137 Register in_hi = in_pair->At(1).reg();
7140 __ mov(out, compiler::Operand(in_lo));
7141 if (CanDeoptimize()) {
7142 compiler::Label* deopt =
7143 compiler->AddDeoptStub(deopt_id(), ICData::kDeoptUnboxInteger);
7146 compiler::Operand(in_lo,
ASR, compiler::target::kBitsPerWord - 1));
7149 }
else if (
from() == kUnboxedUint32 ||
from() == kUnboxedInt32) {
7152 PairLocation* out_pair = locs()->out(0).AsPairLocation();
7153 Register out_lo = out_pair->At(0).reg();
7154 Register out_hi = out_pair->At(1).reg();
7156 __ mov(out_lo, compiler::Operand(in));
7157 if (
from() == kUnboxedUint32) {
7158 __ eor(out_hi, out_hi, compiler::Operand(out_hi));
7162 compiler::Operand(in,
ASR, compiler::target::kBitsPerWord - 1));
7169LocationSummary* BitCastInstr::MakeLocationSummary(Zone* zone,
bool opt)
const {
7170 LocationSummary* summary =
7171 new (zone) LocationSummary(zone,
InputCount(),
7182 case kUnboxedDouble:
7199 case kUnboxedDouble:
7209void BitCastInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
7211 case kUnboxedInt32: {
7213 const Register from_reg = locs()->in(0).reg();
7214 const FpuRegister to_reg = locs()->out(0).fpu_reg();
7218 case kUnboxedFloat: {
7220 const FpuRegister from_reg = locs()->in(0).fpu_reg();
7221 const Register to_reg = locs()->out(0).reg();
7225 case kUnboxedInt64: {
7227 const Register from_lo = locs()->in(0).AsPairLocation()->At(0).reg();
7228 const Register from_hi = locs()->in(0).AsPairLocation()->At(1).reg();
7229 const FpuRegister to_reg = locs()->out(0).fpu_reg();
7234 case kUnboxedDouble: {
7236 const FpuRegister from_reg = locs()->in(0).fpu_reg();
7237 const Register to_lo = locs()->out(0).AsPairLocation()->At(0).reg();
7238 const Register to_hi = locs()->out(0).AsPairLocation()->At(1).reg();
7248LocationSummary* StopInstr::MakeLocationSummary(Zone* zone,
bool opt)
const {
7252void StopInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
7256void GraphEntryInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
7258 if (entry !=
nullptr) {
7259 if (!
compiler->CanFallThroughTo(entry)) {
7260 FATAL(
"Checked function entry must have no offset");
7264 if (!
compiler->CanFallThroughTo(entry)) {
7270LocationSummary* GotoInstr::MakeLocationSummary(Zone* zone,
bool opt)
const {
7274void GotoInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
7276 if (FLAG_reorder_basic_blocks) {
7281 compiler->AddCurrentDescriptor(UntaggedPcDescriptors::kDeopt, GetDeoptId(),
7282 InstructionSource());
7295LocationSummary* IndirectGotoInstr::MakeLocationSummary(Zone* zone,
7297 const intptr_t kNumInputs = 1;
7298 const intptr_t kNumTemps = 2;
7300 LocationSummary* summary =
new (zone)
7310void IndirectGotoInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
7311 Register index_reg = locs()->in(0).reg();
7312 Register target_address_reg = locs()->temp(0).reg();
7313 Register offset_reg = locs()->temp(1).reg();
7316 __ LoadObject(offset_reg, offsets_);
7317 const auto element_address =
__ ElementAddressForRegIndex(
7319 false, kTypedDataInt32ArrayCid,
7321 false, offset_reg, index_reg);
7322 __ ldr(offset_reg, element_address);
7326 __ mov(target_address_reg, compiler::Operand(
PC));
7327 __ AddImmediate(target_address_reg, -entry_to_pc_offset);
7329 __ add(target_address_reg, target_address_reg, compiler::Operand(offset_reg));
7332 __ bx(target_address_reg);
7335LocationSummary* StrictCompareInstr::MakeLocationSummary(Zone* zone,
7337 const intptr_t kNumInputs = 2;
7338 const intptr_t kNumTemps = 0;
7340 LocationSummary* locs =
new (zone)
7347 LocationSummary* locs =
new (zone)
7352 ConstantInstr* constant =
left()->definition()->AsConstant();
7353 if ((constant !=
nullptr) && !
left()->IsSingleUse()) {
7359 constant =
right()->definition()->AsConstant();
7360 if ((constant !=
nullptr) && !
right()->IsSingleUse()) {
7365 locs->set_in(1, locs->in(0).IsConstant()
7373Condition StrictCompareInstr::EmitComparisonCodeRegConstant(
7375 BranchLabels labels,
7377 const Object& obj) {
7384 compiler::Label is_true, is_false,
done;
7385 BranchLabels labels = {&is_true, &is_false, &is_false};
7389 if (is_false.IsLinked() || is_true.IsLinked()) {
7391 EmitBranchOnCondition(
compiler, true_condition, labels);
7409 BranchInstr* branch) {
7410 BranchLabels labels =
compiler->CreateBranchLabels(branch);
7413 EmitBranchOnCondition(
compiler, true_condition, labels);
7417LocationSummary* BooleanNegateInstr::MakeLocationSummary(Zone* zone,
7423void BooleanNegateInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
7424 const Register input = locs()->in(0).reg();
7430LocationSummary* BoolToIntInstr::MakeLocationSummary(Zone* zone,
7436void BoolToIntInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
7440LocationSummary* IntToBoolInstr::MakeLocationSummary(Zone* zone,
7446void IntToBoolInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
7452 const intptr_t kNumInputs = (
type_arguments() !=
nullptr) ? 1 : 0;
7453 const intptr_t kNumTemps = 0;
7454 LocationSummary*
locs =
new (zone)
7466 TypeUsageInfo* type_usage_info =
compiler->thread()->type_usage_info();
7467 if (type_usage_info !=
nullptr) {
7474 compiler->GenerateStubCall(
source(), stub, UntaggedPcDescriptors::kOther,
7478void DebugStepCheckInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
7483 __ BranchLinkPatchable(StubCode::DebugStepCheck());
static void done(const char *config, const char *src, const char *srcOptions, const char *name)
static void fail(const SkString &err)
static bool match(const char *needle, const char *haystack)
static bool are_equal(skiatest::Reporter *reporter, const SkMatrix &a, const SkMatrix &b)
static int float_bits(float f)
static bool ok(int result)
static bool left(const SkPoint &p0, const SkPoint &p1)
static bool right(const SkPoint &p0, const SkPoint &p1)
#define ASSERT_EQUAL(expected, actual)
#define RELEASE_ASSERT(cond)
#define COMPILE_ASSERT(expr)
intptr_t num_context_variables() const
Value * type_arguments() const
const Class & cls() const
intptr_t num_context_variables() const
static intptr_t InstanceSize()
static constexpr bool IsValidLength(intptr_t len)
Token::Kind op_kind() const
bool can_overflow() const
Token::Kind op_kind() const
bool RightIsPowerOfTwoConstant() const
Range * right_range() const
Representation to() const
Representation from() const
ParallelMoveInstr * parallel_move() const
bool HasParallelMove() const
BlockEntryInstr(intptr_t block_id, intptr_t try_index, intptr_t deopt_id, intptr_t stack_depth)
static const Bool & False()
static const Bool & True()
static void Allocate(FlowGraphCompiler *compiler, Instruction *instruction, const Class &cls, Register result, Register temp)
Representation from_representation() const
virtual bool ValueFitsSmi() const
ComparisonInstr * comparison() const
intptr_t index_scale() const
static constexpr Register kSecondReturnReg
static const Register ArgumentRegisters[]
static constexpr FpuRegister kReturnFpuReg
static constexpr Register kFfiAnyNonAbiRegister
static constexpr Register kReturnReg
static constexpr Register kSecondNonArgumentRegister
const RuntimeEntry & TargetFunction() const
bool IsDeoptIfNull() const
DECLARE_INSTRUCTION_SERIALIZABLE_FIELDS(CheckClassInstr, TemplateInstruction, FIELD_LIST) private void EmitBitTest(FlowGraphCompiler *compiler, intptr_t min, intptr_t max, intptr_t mask, compiler::Label *deopt)
void EmitNullCheck(FlowGraphCompiler *compiler, compiler::Label *deopt)
bool IsDeoptIfNotNull() const
static void AddMetadataForRuntimeCall(CheckNullInstr *check_null, FlowGraphCompiler *compiler)
ExceptionType exception_type() const
virtual bool UseSharedSlowPathStub(bool is_optimizing) const
intptr_t loop_depth() const
virtual void EmitBranchCode(FlowGraphCompiler *compiler, BranchInstr *branch)
virtual void EmitNativeCode(FlowGraphCompiler *compiler)
virtual Condition EmitComparisonCode(FlowGraphCompiler *compiler, BranchLabels labels)=0
const Object & value() const
void EmitMoveToLocation(FlowGraphCompiler *compiler, const Location &destination, Register tmp=kNoRegister, intptr_t pair_index=0)
static intptr_t InstanceSize()
Value * type_arguments() const
virtual Value * num_elements() const
virtual Representation representation() const
static constexpr intptr_t kNone
MethodRecognizer::Kind op_kind() const
bool is_null_aware() const
void EmitReturnMoves(FlowGraphCompiler *compiler, const Register temp0, const Register temp1)
DECLARE_INSTRUCTION_SERIALIZABLE_FIELDS(FfiCallInstr, VariadicDefinition, FIELD_LIST) private void EmitParamMoves(FlowGraphCompiler *compiler, const Register saved_fp, const Register temp0, const Register temp1)
intptr_t TargetAddressIndex() const
intptr_t guarded_cid() const
ParallelMoveInstr * parallel_move() const
BlockEntryInstr * block() const
bool HasParallelMove() const
JoinEntryInstr * successor() const
FunctionEntryInstr * normal_entry() const
OsrEntryInstr * osr_entry() const
const Field & field() const
ComparisonInstr * comparison() const
virtual Representation RequiredInputRepresentation(intptr_t idx) const
const AbstractType & type() const
Environment * env() const
virtual LocationSummary * MakeLocationSummary(Zone *zone, bool is_optimizing) const =0
virtual void EmitNativeCode(FlowGraphCompiler *compiler)
void InitializeLocationSummary(Zone *zone, bool optimizing)
virtual Representation representation() const
InstructionSource source() const
intptr_t deopt_id() const
static LocationSummary * MakeCallSummary(Zone *zone, const Instruction *instr, LocationSummary *locs=nullptr)
Representation to() const
Representation from() const
const RuntimeEntry & TargetFunction() const
MethodRecognizer::Kind recognized_kind() const
ObjectStore * object_store() const
static IsolateGroup * Current()
intptr_t TargetAddressIndex() const
void EmitParamMoves(FlowGraphCompiler *compiler, Register saved_fp, Register temp0)
LocationSummary * MakeLocationSummaryInternal(Zone *zone, const RegList temps) const
virtual Representation representation() const
intptr_t index_scale() const
bool can_pack_into_smi() const
intptr_t element_count() const
intptr_t class_id() const
intptr_t class_id() const
intptr_t index_scale() const
Representation representation() const
virtual Representation RequiredInputRepresentation(intptr_t index) const
Register base_reg() const
virtual Representation representation() const
const LocalVariable & local() const
Location temp(intptr_t index) const
Location out(intptr_t index) const
static LocationSummary * Make(Zone *zone, intptr_t input_count, Location out, ContainsCall contains_call)
void set_temp(intptr_t index, Location loc)
void set_out(intptr_t index, Location loc)
bool always_calls() const
Location in(intptr_t index) const
void set_in(intptr_t index, Location loc)
static Location NoLocation()
static Location SameAsFirstInput()
static Location Pair(Location first, Location second)
static Location FpuRegisterLocation(FpuRegister reg)
static Location WritableRegister()
static Location RegisterLocation(Register reg)
PairLocation * AsPairLocation() const
static Location RequiresRegister()
static Location RequiresFpuRegister()
FpuRegister fpu_reg() const
static Location Constant(const ConstantInstr *obj, int pair_index=0)
intptr_t result_cid() const
MethodRecognizer::Kind op_kind() const
bool unboxed_inputs() const
DECLARE_INSTRUCTION_SERIALIZABLE_FIELDS(MemoryCopyInstr, TemplateInstruction, FIELD_LIST) private void EmitUnrolledCopy(FlowGraphCompiler *compiler, Register dest_reg, Register src_reg, intptr_t num_elements, bool reversed)
Value * src_start() const
void EmitLoopCopy(FlowGraphCompiler *compiler, Register dest_reg, Register src_reg, Register length_reg, compiler::Label *done, compiler::Label *copy_forwards=nullptr)
void PrepareLengthRegForLoop(FlowGraphCompiler *compiler, Register length_reg, compiler::Label *done)
Value * dest_start() const
static intptr_t value_offset()
virtual Representation representation() const
static int ComputeArgcTag(const Function &function)
bool is_auto_scope() const
bool is_bootstrap_native() const
const Function & function() const
NativeFunction native_c_function() const
static uword LinkNativeCallEntry()
static CodePtr GetStub(FlowGraphCompiler *compiler, CheckNullInstr::ExceptionType exception_type, bool save_fpu_registers)
static Object & ZoneHandle()
Value * char_code() const
Location At(intptr_t i) const
static bool Overlaps(Range *range, intptr_t min, intptr_t max)
static bool OnlyLessThanOrEqualTo(Range *range, intptr_t value)
static bool IsWithin(const Range *range, int64_t min, int64_t max)
static bool IsPositive(Range *range)
static bool CanBeZero(Range *range)
DECLARE_INSTRUCTION_SERIALIZABLE_FIELDS(ShiftIntegerOpInstr, BinaryIntegerOpInstr, FIELD_LIST) protected bool IsShiftCountInRange(int64_t max=kShiftCountLimit) const
Range * shift_range() const
const char * message() const
bool ShouldEmitStoreBarrier() const
virtual Representation RequiredInputRepresentation(intptr_t idx) const
intptr_t class_id() const
intptr_t index_scale() const
const LocalVariable & local() const
const Field & field() const
bool needs_number_check() const
static CodePtr GetAllocationStubForClass(const Class &cls)
static constexpr int kNullCharCodeSymbolOffset
static bool hardfp_supported()
intptr_t ArgumentCount() const
ArrayPtr GetArgumentsDescriptor() const
virtual intptr_t InputCount() const
const ZoneGrowableArray< intptr_t > & cid_results() const
static bool IsEqualityOperator(Kind tok)
virtual Representation representation() const
Token::Kind op_kind() const
Token::Kind op_kind() const
virtual Representation representation() const
bool is_truncating() const
virtual Representation representation() const
bool IsScanFlagsUnboxed() const
static int32_t Low32Bits(int64_t value)
static constexpr int CountOneBitsWord(uword x)
static constexpr T Maximum(T x, T y)
static constexpr int ShiftForPowerOfTwo(T x)
static int32_t High32Bits(int64_t value)
static T Minimum(T x, T y)
static T AddWithWrapAround(T a, T b)
static constexpr int CountOneBits64(uint64_t x)
static constexpr size_t HighestBit(int64_t v)
static constexpr bool IsPowerOfTwo(T x)
bool BindsToConstant() const
Definition * definition() const
intptr_t InputCount() const
void static bool EmittingComments()
static bool AddressCanHoldConstantIndex(const Object &constant, bool is_load, bool is_external, intptr_t cid, intptr_t index_scale, bool *needs_base=nullptr)
static bool CanHold(uint32_t immediate, Operand *o)
intptr_t StackTopInBytes() const
FlutterSemanticsFlag flag
FlutterSemanticsFlag flags
Dart_NativeFunction function
static float max(float r, float g, float b)
static float min(float r, float g, float b)
#define CASE(Arity, Mask, Name, Args, Result)
#define DEFINE_UNIMPLEMENTED_INSTRUCTION(Name)
#define DEFINE_BACKEND(Name, Args)
const intptr_t kResultIndex
word ToRawSmi(const dart::Object &a)
word SmiValue(const dart::Object &a)
bool HasIntegerValue(const dart::Object &object, int64_t *value)
Location LocationAnyOrConstant(Value *value)
Location LocationRegisterOrConstant(Value *value)
const Register kWriteBarrierSlotReg
static DRegister EvenDRegisterOf(QRegister q)
static Condition InvertCondition(Condition c)
static constexpr int kSavedCallerPcSlotFromFp
bool IsTypedDataBaseClassId(intptr_t index)
const Register kExceptionObjectReg
static DRegister OddDRegisterOf(QRegister q)
const RegList kReservedCpuRegisters
const Register kWriteBarrierObjectReg
constexpr int32_t kMinInt32
const Register kWriteBarrierValueReg
static constexpr bool IsCalleeSavedRegister(Register reg)
constexpr intptr_t kIntptrMin
static const ClassId kLastErrorCid
static const ClassId kFirstErrorCid
static SRegister OddSRegisterOf(DRegister d)
void RegisterTypeArgumentsUse(const Function &function, TypeUsageInfo *type_usage_info, const Class &klass, Definition *type_arguments)
const Register ARGS_DESC_REG
bool IsClampedTypedDataBaseClassId(intptr_t index)
Location LocationFixedRegisterOrConstant(Value *value, Register reg)
const int kNumberOfFpuRegisters
Location LocationWritableRegisterOrSmiConstant(Value *value, intptr_t min_value, intptr_t max_value)
bool IsExternalPayloadClassId(classid_t cid)
constexpr RegList kDartAvailableCpuRegs
const int kAbiPreservedFpuRegCount
static constexpr int kPcMarkerSlotFromFp
const Register FUNCTION_REG
const Register IC_DATA_REG
constexpr int32_t kMaxInt32
compiler::Address LocationToStackSlotAddress(Location loc)
static bool IsConstant(Definition *def, int64_t *val)
const Register kStackTraceObjectReg
static int8_t data[kExtLength]
Location LocationRegisterOrSmiConstant(Value *value, intptr_t min_value, intptr_t max_value)
constexpr intptr_t kBitsPerInt64
static SRegister EvenSRegisterOf(DRegister d)
const QRegister kAbiFirstPreservedFpuReg
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive mode
void Flush(SkSurface *surface)
static constexpr Register kResultReg
static constexpr Register kLengthReg
static constexpr Register kTypeArgumentsReg
static constexpr Register kResultReg
static constexpr Register kTempReg
static constexpr Register kTypeArgumentsReg
static constexpr Register kResultReg
static constexpr Register kObjectReg
static constexpr Representation NativeRepresentation(Representation rep)
static constexpr intptr_t kBoolVsNullMask
static constexpr intptr_t kBoolValueMask
static constexpr size_t ValueSize(Representation rep)
static constexpr bool IsUnboxedInteger(Representation rep)
static compiler::OperandSize OperandSize(Representation rep)
static constexpr bool IsUnboxed(Representation rep)
static bool IsUnsignedInteger(Representation rep)
static Representation RepresentationOfArrayElement(classid_t cid)
static constexpr Register kDstTypeReg
static constexpr Register kInstanceReg
static constexpr Register kFunctionTypeArgumentsReg
static constexpr Register kInstantiatorTypeArgumentsReg
@ kResetToBootstrapNative