7#if defined(TARGET_ARCH_IA32)
28#define __ compiler->assembler()->
29#define Z (compiler->zone())
36 const Instruction* instr,
37 LocationSummary* locs) {
40 ASSERT(instr->representation() == kTagged);
41 const intptr_t kNumInputs = 0;
42 const intptr_t kNumTemps = 0;
43 LocationSummary*
result =
new (zone)
50 ASSERT(instr->RequiredInputRepresentation(0) == kTagged);
51 ASSERT(instr->representation() == kTagged);
52 __ movl(out, compiler::Address(instr->base_reg(), index,
TIMES_2,
61 ASSERT(instr->RequiredInputRepresentation(
63 __ movl(compiler::Address(instr->base_reg(), index,
TIMES_2, instr->offset()),
72 Fixed<Register, ARGS_DESC_REG>,
73 Temp<Register> temp)) {
80LocationSummary* MemoryCopyInstr::MakeLocationSummary(Zone* zone,
88 const bool remove_loop =
90 const intptr_t kNumInputs = 5;
91 const intptr_t kNumTemps = remove_loop ? 1 : 0;
92 LocationSummary* locs =
new (zone)
100 const bool needs_writable_inputs =
101 (((element_size_ == 1) && !unboxed_inputs_) ||
102 ((element_size_ == 16) && unboxed_inputs_));
104 needs_writable_inputs
108 needs_writable_inputs
115 length()->definition()->OriginalDefinition()->AsConstant()));
126static inline intptr_t SizeOfMemoryCopyElements(intptr_t
element_size) {
127 return Utils::Minimum<intptr_t>(
element_size, compiler::target::kWordSize);
132 compiler::Label*
done) {
133 const intptr_t mov_size = SizeOfMemoryCopyElements(element_size_);
141 __ SmiUntag(length_reg);
142 }
else if (shift > 0) {
143 __ shll(length_reg, compiler::Immediate(shift));
151 compiler::Label*
done,
152 compiler::Label* copy_forwards) {
153 const intptr_t mov_size = SizeOfMemoryCopyElements(element_size_);
154 const bool reversed = copy_forwards !=
nullptr;
158 __ BranchIfZero(length_reg,
done);
162 __ leal(
ESI, compiler::Address(src_reg, length_reg,
scale, -mov_size));
163 __ CompareRegisters(dest_reg,
ESI);
169 compiler::Address(dest_reg, length_reg,
scale, -mov_size));
173 __ movl(
ESI, src_reg);
193void MemoryCopyInstr::EmitComputeStartPointer(FlowGraphCompiler*
compiler,
200 if (array_rep != kTagged) {
211 case kOneByteStringCid:
215 case kTwoByteStringCid:
224 ASSERT(start_loc.IsRegister() || start_loc.IsConstant());
225 if (start_loc.IsConstant()) {
226 const auto& constant = start_loc.constant();
227 ASSERT(constant.IsInteger());
228 const int64_t start_value = Integer::Cast(constant).AsInt64Value();
230 Utils::MulWithWrapAround<intptr_t>(start_value, element_size_),
offset);
231 __ leal(payload_reg, compiler::Address(array_reg, add_value));
235 const Register start_reg = start_loc.reg();
236 bool index_unboxed = unboxed_inputs_;
239 if (element_size_ == 1 && !index_unboxed) {
241 __ SmiUntag(start_reg);
242 index_unboxed =
true;
243 }
else if (element_size_ == 16 && index_unboxed) {
246 __ SmiTag(start_reg);
247 index_unboxed =
false;
250 __ leal(payload_reg, compiler::Address(array_reg, start_reg,
scale,
offset));
253LocationSummary* CalculateElementAddressInstr::MakeLocationSummary(
256 const intptr_t kNumInputs = 3;
257 const intptr_t kNumTemps = 0;
258 auto*
const summary =
new (zone)
274void CalculateElementAddressInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
278 const Register result_reg = locs()->out(0).reg();
280 if (index_loc.IsConstant()) {
281 if (offset_loc.IsConstant()) {
282 ASSERT_EQUAL(Smi::Cast(index_loc.constant()).Value(), 0);
283 ASSERT(Smi::Cast(offset_loc.constant()).Value() != 0);
285 const int32_t offset_value = Smi::Cast(offset_loc.constant()).Value();
286 __ leal(result_reg, compiler::Address(base_reg, offset_value));
290 const int32_t scaled_index =
291 Smi::Cast(index_loc.constant()).Value() *
index_scale();
292 __ leal(result_reg, compiler::Address(base_reg, offset_loc.reg(),
TIMES_1,
296 Register index_reg = index_loc.reg();
305 __ MoveAndSmiTagRegister(result_reg, index_reg);
306 index_reg = result_reg;
307 index_unboxed =
false;
310 if (offset_loc.IsConstant()) {
311 const int32_t offset_value = Smi::Cast(offset_loc.constant()).Value();
313 compiler::Address(base_reg, index_reg,
scale, offset_value));
317 __ leal(result_reg, compiler::Address(base_reg, index_reg,
scale,
319 __ AddRegisters(result_reg, offset_loc.reg());
324LocationSummary* MoveArgumentInstr::MakeLocationSummary(Zone* zone,
326 const intptr_t kNumInputs = 1;
327 const intptr_t kNumTemps = 0;
328 LocationSummary* locs =
new (zone)
335void MoveArgumentInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
340 if (
value.IsConstant()) {
341 __ Store(
value.constant(), dst);
350 const intptr_t kNumInputs = 1;
351 const intptr_t kNumTemps = 0;
352 LocationSummary*
locs =
new (zone)
366 if (
compiler->parsed_function().function().IsAsyncFunction() ||
367 compiler->parsed_function().function().IsAsyncGenerator()) {
369 const Code& stub = GetReturnStub(
compiler);
374 if (!
compiler->flow_graph().graph_entry()->NeedsFrame()) {
380 __ Comment(
"Stack Check");
381 compiler::Label
done;
382 const intptr_t fp_sp_dist =
383 (compiler::target::frame_layout.first_local_from_fp + 1 -
389 __ cmpl(
EDI, compiler::Immediate(fp_sp_dist));
402 bool return_in_st0 =
false;
407 return_in_st0 =
true;
426 const Register old_exit_through_ffi_reg = tmp;
428 __ popl(old_exit_frame_reg);
434 compiler::Address(
THR, compiler::target::Thread::top_resource_offset()),
437 __ movl(old_exit_through_ffi_reg, vm_tag_reg);
443 __ TransitionGeneratedToNative(vm_tag_reg, old_exit_frame_reg,
444 old_exit_through_ffi_reg,
451 .SizeInBytes() == 8) {
453 __ fldl(compiler::Address(
SPREG, -8));
456 __ flds(compiler::Address(
SPREG, -4));
465#if defined(DART_TARGET_OS_FUCHSIA) && defined(USING_SHADOW_CALL_STACK)
476LocationSummary* LoadLocalInstr::MakeLocationSummary(Zone* zone,
478 const intptr_t kNumInputs = 0;
479 const intptr_t stack_index =
480 compiler::target::frame_layout.FrameSlotForVariable(&
local());
486void LoadLocalInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
491LocationSummary* StoreLocalInstr::MakeLocationSummary(Zone* zone,
493 const intptr_t kNumInputs = 1;
498void StoreLocalInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
502 __ movl(compiler::Address(
503 EBP, compiler::target::FrameOffsetInBytesForVariable(&
local())),
507LocationSummary* ConstantInstr::MakeLocationSummary(Zone* zone,
509 const intptr_t kNumInputs = 0;
514 LocationSummary::kNoCall);
517void ConstantInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
521 if (
out.IsRegister()) {
530 intptr_t pair_index) {
531 if (destination.IsRegister()) {
536 if (value_.IsSmi() &&
542 __ movl(destination.reg(),
544 : Utils::High32Bits(v)));
546 ASSERT(representation() == kTagged);
547 __ LoadObjectSafely(destination.reg(), value_);
549 }
else if (destination.IsFpuRegister()) {
550 switch (representation()) {
552 __ LoadSImmediate(destination.fpu_reg(),
553 static_cast<float>(Double::Cast(value_).value()));
555 case kUnboxedDouble: {
556 const double value_as_double = Double::Cast(value_).value();
560 __ LoadObject(
EAX, value_);
561 __ movsd(destination.fpu_reg(),
565 __ xorps(destination.fpu_reg(), destination.fpu_reg());
571 case kUnboxedFloat64x2:
572 __ LoadQImmediate(destination.fpu_reg(),
573 Float64x2::Cast(value_).value());
575 case kUnboxedFloat32x4:
576 __ LoadQImmediate(destination.fpu_reg(),
577 Float32x4::Cast(value_).value());
579 case kUnboxedInt32x4:
580 __ LoadQImmediate(destination.fpu_reg(), Int32x4::Cast(value_).value());
585 }
else if (destination.IsDoubleStackSlot()) {
586 const double value_as_double = Double::Cast(value_).value();
590 __ LoadObject(
EAX, value_);
599 }
else if (destination.IsQuadStackSlot()) {
600 switch (representation()) {
601 case kUnboxedFloat64x2:
604 case kUnboxedFloat32x4:
607 case kUnboxedInt32x4:
615 ASSERT(destination.IsStackSlot());
622 : Utils::High32Bits(v)));
623 }
else if (representation() == kUnboxedFloat) {
625 bit_cast<int32_t, float>(Double::Cast(value_).
value());
629 ASSERT(representation() == kTagged);
632 compiler::Immediate(
static_cast<int32_t
>(value_.ptr())));
635 __ LoadObjectSafely(
EAX, value_);
643LocationSummary* UnboxedConstantInstr::MakeLocationSummary(Zone* zone,
645 const bool is_unboxed_int =
648 compiler::target::kWordSize);
649 const intptr_t kNumInputs = 0;
650 const intptr_t kNumTemps =
652 LocationSummary* locs =
new (zone)
660 if (kNumTemps == 1) {
666void UnboxedConstantInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
668 if (!locs()->
out(0).IsInvalid()) {
673LocationSummary* AssertAssignableInstr::MakeLocationSummary(Zone* zone,
676 const intptr_t kNumTemps = 0;
677 LocationSummary* summary =
new (zone)
692void AssertBooleanInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
693 ASSERT(locs()->always_calls());
695 auto object_store =
compiler->isolate_group()->object_store();
696 const auto& assert_boolean_stub =
699 compiler::Label
done;
705 UntaggedPcDescriptors::kOther, locs(),
730LocationSummary* EqualityCompareInstr::MakeLocationSummary(Zone* zone,
732 const intptr_t kNumInputs = 2;
733 if (operation_cid() == kMintCid) {
734 const intptr_t kNumTemps = 0;
735 LocationSummary* locs =
new (zone)
744 if (operation_cid() == kDoubleCid) {
745 const intptr_t kNumTemps = 0;
746 LocationSummary* locs =
new (zone)
753 if (operation_cid() == kSmiCid || operation_cid() == kIntegerCid) {
754 const intptr_t kNumTemps = 0;
755 LocationSummary* locs =
new (zone)
761 locs->set_in(1, locs->in(0).IsConstant()
771static void LoadValueCid(FlowGraphCompiler*
compiler,
774 compiler::Label* value_is_smi =
nullptr) {
775 compiler::Label
done;
776 if (value_is_smi ==
nullptr) {
777 __ movl(value_cid_reg, compiler::Immediate(kSmiCid));
780 if (value_is_smi ==
nullptr) {
785 __ LoadClassId(value_cid_reg, value_reg);
817static void EmitBranchOnCondition(
823 if (labels.fall_through == labels.false_label) {
825 __ j(true_condition, labels.true_label, jump_distance);
829 __ j(false_condition, labels.false_label, jump_distance);
832 if (labels.fall_through != labels.true_label) {
833 __ jmp(labels.true_label, jump_distance);
839 const LocationSummary& locs,
841 BranchLabels labels) {
846 Condition true_condition = TokenKindToIntCondition(kind);
848 if (
left.IsConstant()) {
850 true_condition = FlipCondition(true_condition);
851 }
else if (
right.IsConstant()) {
853 }
else if (
right.IsStackSlot()) {
858 return true_condition;
862 const LocationSummary& locs,
864 BranchLabels labels) {
869 Condition true_condition = TokenKindToIntCondition(kind);
871 if (
left.IsConstant()) {
874 static_cast<uword>(Integer::Cast(
left.constant()).AsInt64Value()));
875 true_condition = FlipCondition(true_condition);
876 }
else if (
right.IsConstant()) {
879 static_cast<uword>(Integer::Cast(
right.constant()).AsInt64Value()));
880 }
else if (
right.IsStackSlot()) {
885 return true_condition;
889 const LocationSummary& locs,
891 BranchLabels labels) {
893 PairLocation* left_pair = locs.in(0).AsPairLocation();
894 Register left1 = left_pair->At(0).reg();
895 Register left2 = left_pair->At(1).reg();
896 PairLocation* right_pair = locs.in(1).AsPairLocation();
897 Register right1 = right_pair->At(0).reg();
898 Register right2 = right_pair->At(1).reg();
899 compiler::Label
done;
901 __ cmpl(left1, right1);
904 __ cmpl(left2, right2);
906 Condition true_condition = TokenKindToIntCondition(kind);
907 return true_condition;
911 const LocationSummary& locs,
913 BranchLabels labels) {
914 PairLocation* left_pair = locs.in(0).AsPairLocation();
915 Register left1 = left_pair->At(0).reg();
916 Register left2 = left_pair->At(1).reg();
917 PairLocation* right_pair = locs.in(1).AsPairLocation();
918 Register right1 = right_pair->At(0).reg();
919 Register right2 = right_pair->At(1).reg();
944 __ cmpl(left2, right2);
945 __ j(hi_cond, labels.true_label);
946 __ j(FlipCondition(hi_cond), labels.false_label);
949 __ cmpl(left1, right1);
974 const LocationSummary& locs,
976 BranchLabels labels) {
982 Condition true_condition = TokenKindToDoubleCondition(kind);
983 compiler::Label* nan_result =
984 (true_condition ==
NOT_EQUAL) ? labels.true_label : labels.false_label;
986 return true_condition;
990 BranchLabels labels) {
995 if (operation_cid() == kSmiCid) {
996 return EmitSmiComparisonOp(
compiler, *locs(), kind(), labels);
997 }
else if (operation_cid() == kMintCid) {
998 return EmitUnboxedMintEqualityOp(
compiler, *locs(), kind(), labels);
999 }
else if (operation_cid() == kIntegerCid) {
1000 return EmitWordComparisonOp(
compiler, *locs(), kind(), labels);
1002 ASSERT(operation_cid() == kDoubleCid);
1003 return EmitDoubleComparisonOp(
compiler, *locs(), kind(), labels);
1008 compiler::Label is_true, is_false;
1009 BranchLabels labels = {&is_true, &is_false, &is_false};
1012 EmitBranchOnCondition(
compiler, true_condition, labels,
1017 compiler::Label
done;
1027 BranchInstr* branch) {
1028 BranchLabels labels =
compiler->CreateBranchLabels(branch);
1031 EmitBranchOnCondition(
compiler, true_condition, labels);
1035LocationSummary* TestSmiInstr::MakeLocationSummary(Zone* zone,
bool opt)
const {
1036 const intptr_t kNumInputs = 2;
1037 const intptr_t kNumTemps = 0;
1038 LocationSummary* locs =
new (zone)
1048 BranchLabels labels) {
1051 if (
right.IsConstant()) {
1053 const int32_t imm =
static_cast<int32_t
>(
right.constant().ptr());
1054 __ testl(
left, compiler::Immediate(imm));
1059 return true_condition;
1062LocationSummary* TestCidsInstr::MakeLocationSummary(Zone* zone,
1064 const intptr_t kNumInputs = 1;
1065 const intptr_t kNumTemps = 1;
1066 LocationSummary* locs =
new (zone)
1075 BranchLabels labels) {
1076 ASSERT((kind() == Token::kIS) || (kind() == Token::kISNOT));
1077 Register val_reg = locs()->in(0).reg();
1078 Register cid_reg = locs()->temp(0).reg();
1080 compiler::Label* deopt =
1082 ?
compiler->AddDeoptStub(deopt_id(), ICData::kDeoptTestCids)
1085 const intptr_t true_result = (kind() == Token::kIS) ? 1 : 0;
1090 __ j(
ZERO,
result ? labels.true_label : labels.false_label);
1091 __ LoadClassId(cid_reg, val_reg);
1092 for (intptr_t i = 2; i <
data.length(); i += 2) {
1093 const intptr_t test_cid =
data[i];
1094 ASSERT(test_cid != kSmiCid);
1096 __ cmpl(cid_reg, compiler::Immediate(test_cid));
1097 __ j(
EQUAL,
result ? labels.true_label : labels.false_label);
1100 if (deopt ==
nullptr) {
1104 compiler::Label*
target =
result ? labels.false_label : labels.true_label;
1105 if (
target != labels.fall_through) {
1116LocationSummary* RelationalOpInstr::MakeLocationSummary(Zone* zone,
1118 const intptr_t kNumInputs = 2;
1119 const intptr_t kNumTemps = 0;
1120 if (operation_cid() == kMintCid) {
1121 const intptr_t kNumTemps = 0;
1122 LocationSummary* locs =
new (zone)
1131 if (operation_cid() == kDoubleCid) {
1132 LocationSummary* summary =
new (zone)
1139 ASSERT(operation_cid() == kSmiCid);
1140 LocationSummary* summary =
new (zone)
1145 summary->set_in(1, summary->in(0).IsConstant()
1153 BranchLabels labels) {
1154 if (operation_cid() == kSmiCid) {
1155 return EmitSmiComparisonOp(
compiler, *locs(), kind(), labels);
1156 }
else if (operation_cid() == kMintCid) {
1157 return EmitUnboxedMintComparisonOp(
compiler, *locs(), kind(), labels);
1159 ASSERT(operation_cid() == kDoubleCid);
1160 return EmitDoubleComparisonOp(
compiler, *locs(), kind(), labels);
1172 __ movl(
EDX, compiler::Immediate(argc_tag));
1179 stub = &StubCode::CallBootstrapNative();
1181 stub = &StubCode::CallAutoScopeNative();
1183 stub = &StubCode::CallNoScopeNative();
1185 const compiler::ExternalLabel label(
1187 __ movl(
ECX, compiler::Immediate(label.address()));
1190 compiler->GenerateNonLazyDeoptableStubCall(
1191 source(), *stub, UntaggedPcDescriptors::kOther,
locs());
1197#define R(r) (1 << r)
1200 bool is_optimizing)
const {
1203 return MakeLocationSummaryInternal(
1204 zone, is_optimizing,
1228 intptr_t stack_required = marshaller_.RequiredStackSpaceInBytes();
1232 stack_required += compiler::target::kWordSize;
1235 __ pushl(compiler::Immediate(0));
1238 __ LoadObject(
CODE_REG, Object::null_object());
1239 __ EnterDartFrame(0);
1243 __ ReserveAlignedFrameSpace(stack_required);
1244#if defined(USING_MEMORY_SANITIZER)
1249 locs()->temp(1).reg());
1255 __ movl(compiler::Address(
SPREG, marshaller_.RequiredStackSpaceInBytes()),
1260 __ Comment(is_leaf_ ?
"Leaf Call" :
"Call");
1264#if !defined(PRODUCT)
1267 __ movl(compiler::Address(
1268 THR, compiler::target::Thread::top_exit_frame_info_offset()),
1275#if !defined(PRODUCT)
1277 compiler::Immediate(compiler::target::Thread::vm_tag_dart_id()));
1278 __ movl(compiler::Address(
1279 THR, compiler::target::Thread::top_exit_frame_info_offset()),
1280 compiler::Immediate(0));
1286 compiler::Label get_pc;
1289 UntaggedPcDescriptors::Kind::kOther,
locs(),
1303 THR, compiler::target::Thread::
1304 call_native_through_safepoint_entry_point_offset()));
1311 __ Comment(
"Check Dart_Handle for Error.");
1312 compiler::Label not_error;
1315 compiler::target::LocalHandle::ptr_offset()));
1316 __ BranchIfSmi(temp, ¬_error);
1317 __ LoadClassId(temp, temp);
1322 __ Comment(
"Slow path: call Dart_PropagateError through stub.");
1325 THR, compiler::target::Thread::
1326 call_native_through_safepoint_entry_point_offset()));
1328 __ movl(
EAX, compiler::Address(
1329 THR, kPropagateErrorRuntimeEntry.OffsetFromThread()));
1336 __ Bind(¬_error);
1347 __ subl(
SPREG, compiler::Immediate(compiler::target::kWordSize));
1368 compiler::Address(
SPREG, marshaller_.RequiredStackSpaceInBytes()));
1371 __ LeaveDartFrame();
1379void NativeEntryInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
1390#if defined(DART_TARGET_OS_FUCHSIA) && defined(USING_SHADOW_CALL_STACK)
1405 compiler::Address(
THR, compiler::target::Thread::top_resource_offset()));
1407 compiler::Address(
THR, compiler::target::Thread::top_resource_offset()),
1408 compiler::Immediate(0));
1410 __ pushl(compiler::Address(
1411 THR, compiler::target::Thread::exit_through_ffi_offset()));
1414 __ pushl(compiler::Address(
1415 THR, compiler::target::Thread::top_exit_frame_info_offset()));
1419 __ EmitEntryFrameVerification();
1422 __ TransitionNativeToGenerated(
EAX,
false);
1427 const Function& target_function = marshaller_.dart_signature();
1428 const intptr_t callback_id = target_function.FfiCallbackId();
1429 __ movl(
EAX, compiler::Address(
1430 THR, compiler::target::Thread::isolate_group_offset()));
1431 __ movl(
EAX, compiler::Address(
1432 EAX, compiler::target::IsolateGroup::object_store_offset()));
1435 EAX, compiler::target::ObjectStore::ffi_callback_code_offset()));
1436 __ movl(
EAX, compiler::FieldAddress(
1437 EAX, compiler::target::GrowableObjectArray::data_offset()));
1439 EAX, compiler::target::Array::data_offset() +
1440 callback_id * compiler::target::kWordSize));
1443 __ movl(compiler::Address(
FPREG,
1454 THR, compiler::target::Thread::invoke_dart_code_stub_offset()));
1455 __ pushl(compiler::FieldAddress(
1456 EAX, compiler::target::Code::entry_point_offset()));
1459 FunctionEntryInstr::EmitNativeCode(
compiler);
1462#define R(r) (1 << r)
1466 bool is_optimizing)
const {
1469 static_assert(saved_fp < temp0,
"Unexpected ordering of registers in set.");
1479 __ MoveRegister(saved_fp,
FPREG);
1480 const intptr_t frame_space = native_calling_convention_.
StackTopInBytes();
1481 __ EnterCFrame(frame_space);
1487 __ CallCFunction(target_address);
1489 compiler::Immediate(VMTag::kDartTagId));
1494LocationSummary* OneByteStringFromCharCodeInstr::MakeLocationSummary(
1497 const intptr_t kNumInputs = 1;
1503void OneByteStringFromCharCodeInstr::EmitNativeCode(
1507 __ movl(
result, compiler::Immediate(
1511 TIMES_HALF_WORD_SIZE,
1515LocationSummary* StringToCharCodeInstr::MakeLocationSummary(Zone* zone,
1517 const intptr_t kNumInputs = 1;
1522void StringToCharCodeInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
1523 ASSERT(cid_ == kOneByteStringCid);
1526 compiler::Label is_one,
done;
1538LocationSummary* Utf8ScanInstr::MakeLocationSummary(Zone* zone,
1540 const intptr_t kNumInputs = 5;
1541 const intptr_t kNumTemps = 0;
1542 LocationSummary* summary =
new (zone)
1553void Utf8ScanInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
1554 const Register bytes_reg = locs()->in(1).reg();
1555 const Register start_reg = locs()->in(2).reg();
1556 const Register end_reg = locs()->in(3).reg();
1557 const Register table_reg = locs()->in(4).reg();
1558 const Register size_reg = locs()->out(0).reg();
1560 const Register bytes_ptr_reg = start_reg;
1561 const Register flags_reg = end_reg;
1562 const Register temp_reg = bytes_reg;
1565 const intptr_t kBytesEndTempOffset = 1 * compiler::target::kWordSize;
1566 const intptr_t kBytesEndMinus16TempOffset = 0 * compiler::target::kWordSize;
1568 const intptr_t kSizeMask = 0x03;
1569 const intptr_t kFlagsMask = 0x3C;
1571 compiler::Label scan_ascii, ascii_loop, ascii_loop_in, nonascii_loop;
1572 compiler::Label rest, rest_loop, rest_loop_in,
done;
1575 __ LoadFromSlot(bytes_reg, bytes_reg, Slot::PointerBase_data());
1578 __ leal(bytes_ptr_reg, compiler::Address(bytes_reg, start_reg,
TIMES_1, 0));
1579 __ leal(temp_reg, compiler::Address(bytes_reg, end_reg,
TIMES_1, 0));
1581 __ leal(temp_reg, compiler::Address(temp_reg, -16));
1585 __ xorl(size_reg, size_reg);
1586 __ xorl(flags_reg, flags_reg);
1595 __ Bind(&ascii_loop);
1596 __ addl(bytes_ptr_reg, compiler::Immediate(16));
1597 __ Bind(&ascii_loop_in);
1600 __ cmpl(bytes_ptr_reg, compiler::Address(
ESP, kBytesEndMinus16TempOffset));
1607 __ movups(vector_reg, compiler::Address(bytes_ptr_reg, 0));
1608 __ pmovmskb(temp_reg, vector_reg);
1609 __ bsfl(temp_reg, temp_reg);
1613 __ addl(bytes_ptr_reg, temp_reg);
1614 __ addl(size_reg, bytes_ptr_reg);
1617 __ movzxb(temp_reg, compiler::Address(bytes_ptr_reg, 0));
1620 __ Bind(&nonascii_loop);
1621 __ addl(bytes_ptr_reg, compiler::Immediate(1));
1624 __ movzxb(temp_reg, compiler::FieldAddress(
1626 compiler::target::OneByteString::data_offset()));
1627 __ orl(flags_reg, temp_reg);
1628 __ andl(temp_reg, compiler::Immediate(kSizeMask));
1629 __ addl(size_reg, temp_reg);
1632 __ cmpl(bytes_ptr_reg, compiler::Address(
ESP, kBytesEndTempOffset));
1636 __ movzxb(temp_reg, compiler::Address(bytes_ptr_reg, 0));
1637 __ testl(temp_reg, compiler::Immediate(0x80));
1641 __ Bind(&scan_ascii);
1642 __ subl(size_reg, bytes_ptr_reg);
1643 __ jmp(&ascii_loop_in);
1649 __ addl(size_reg, bytes_ptr_reg);
1652 __ Bind(&rest_loop);
1655 __ movzxb(temp_reg, compiler::Address(bytes_ptr_reg, 0));
1656 __ addl(bytes_ptr_reg, compiler::Immediate(1));
1659 __ movzxb(temp_reg, compiler::FieldAddress(
1661 compiler::target::OneByteString::data_offset()));
1662 __ orl(flags_reg, temp_reg);
1663 __ andl(temp_reg, compiler::Immediate(kSizeMask));
1664 __ addl(size_reg, temp_reg);
1667 __ Bind(&rest_loop_in);
1668 __ cmpl(bytes_ptr_reg, compiler::Address(
ESP, kBytesEndTempOffset));
1673 __ addl(
ESP, compiler::Immediate(2 * compiler::target::kWordSize));
1676 __ andl(flags_reg, compiler::Immediate(kFlagsMask));
1678 __ SmiTag(flags_reg);
1681 const Location decoder_location = locs()->in(0);
1682 if (decoder_location.IsStackSlot()) {
1684 decoder_reg = temp_reg;
1686 decoder_reg = decoder_location.reg();
1688 const auto scan_flags_field_offset = scan_flags_field_.offset_in_bytes();
1689 __ orl(compiler::FieldAddress(decoder_reg, scan_flags_field_offset),
1693LocationSummary* LoadIndexedInstr::MakeLocationSummary(Zone* zone,
1700 const intptr_t kNumInputs = 2;
1701 const intptr_t kNumTemps = 0;
1702 LocationSummary* locs =
new (zone)
1707 const bool need_writable_index_register =
index_scale() == 1;
1708 const bool can_be_constant =
1716 : (need_writable_index_register ?
Location::WritableRegister()
1721 if (rep == kUnboxedInt64) {
1735void LoadIndexedInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
1740 bool index_unboxed = index_unboxed_;
1742 if (
index.IsRegister()) {
1744 index_unboxed =
true;
1750 compiler::Address element_address =
1754 :
compiler::Assembler::ElementAddressForIntIndex(
1762 if (rep == kUnboxedInt64) {
1763 ASSERT(locs()->
out(0).IsPairLocation());
1764 PairLocation* result_pair = locs()->out(0).AsPairLocation();
1765 const Register result_lo = result_pair->At(0).reg();
1766 const Register result_hi = result_pair->At(1).reg();
1767 __ movl(result_lo, element_address);
1773 :
compiler::Assembler::ElementAddressForIntIndex(
1776 __ movl(result_hi, element_address);
1783 if (rep == kUnboxedFloat) {
1785 }
else if (rep == kUnboxedDouble) {
1788 ASSERT(rep == kUnboxedInt32x4 || rep == kUnboxedFloat32x4 ||
1789 rep == kUnboxedFloat64x2);
1790 __ movups(
result, element_address);
1801LocationSummary* StoreIndexedInstr::MakeLocationSummary(Zone* zone,
1808 const intptr_t kNumInputs = 3;
1809 const intptr_t kNumTemps =
1811 LocationSummary* locs =
new (zone)
1816 const bool need_writable_index_register =
index_scale() == 1;
1817 const bool can_be_constant =
1824 : (need_writable_index_register ?
Location::WritableRegister()
1829 if (rep == kUnboxedUint8 || rep == kUnboxedInt8) {
1833 }
else if (rep == kUnboxedInt64) {
1842 }
else if (
class_id() == kArrayCid) {
1856void StoreIndexedInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
1861 bool index_unboxed = index_unboxed_;
1864 index_unboxed =
true;
1866 compiler::Address element_address =
1870 :
compiler::Assembler::ElementAddressForIntIndex(
1878 ASSERT(rep == kUnboxedUint8);
1880 const Smi& constant = Smi::Cast(locs()->in(2).constant());
1885 }
else if (
value < 0) {
1888 __ movb(element_address, compiler::Immediate(
static_cast<int8_t
>(
value)));
1891 compiler::Label store_value, store_0xff;
1892 __ cmpl(
EAX, compiler::Immediate(0xFF));
1898 __ Bind(&store_0xff);
1899 __ movl(
EAX, compiler::Immediate(0xFF));
1900 __ Bind(&store_value);
1901 __ movb(element_address,
AL);
1904 if (rep == kUnboxedUint8 || rep == kUnboxedInt8) {
1906 const Smi& constant = Smi::Cast(locs()->in(2).constant());
1907 __ movb(element_address,
1908 compiler::Immediate(
static_cast<int8_t
>(constant.Value())));
1911 __ movb(element_address,
AL);
1913 }
else if (rep == kUnboxedInt64) {
1914 ASSERT(locs()->in(2).IsPairLocation());
1915 PairLocation* value_pair = locs()->in(2).AsPairLocation();
1916 const Register value_lo = value_pair->At(0).reg();
1917 const Register value_hi = value_pair->At(1).reg();
1918 __ movl(element_address, value_lo);
1924 :
compiler::Assembler::ElementAddressForIntIndex(
1927 __ movl(element_address, value_hi);
1933 if (rep == kUnboxedFloat) {
1934 __ movss(element_address, locs()->in(2).fpu_reg());
1935 }
else if (rep == kUnboxedDouble) {
1936 __ movsd(element_address, locs()->in(2).fpu_reg());
1938 ASSERT(rep == kUnboxedInt32x4 || rep == kUnboxedFloat32x4 ||
1939 rep == kUnboxedFloat64x2);
1940 __ movups(element_address, locs()->in(2).fpu_reg());
1942 }
else if (
class_id() == kArrayCid) {
1946 Register slot = locs()->temp(0).reg();
1947 Register scratch = locs()->temp(1).reg();
1948 __ leal(slot, element_address);
1949 __ StoreIntoArray(
array, slot,
value, CanValueBeSmi(), scratch);
1951 const Object& constant = locs()->in(2).constant();
1952 __ StoreObjectIntoObjectNoBarrier(
array, element_address, constant);
1955 __ StoreIntoObjectNoBarrier(
array, element_address,
value);
1964LocationSummary* GuardFieldClassInstr::MakeLocationSummary(Zone* zone,
1966 const intptr_t kNumInputs = 1;
1971 const bool emit_full_guard = !opt || (field_cid ==
kIllegalCid);
1972 const bool needs_value_cid_temp_reg =
1973 (value_cid ==
kDynamicCid) && (emit_full_guard || (field_cid != kSmiCid));
1974 const bool needs_field_temp_reg = emit_full_guard;
1976 intptr_t num_temps = 0;
1977 if (needs_value_cid_temp_reg) {
1980 if (needs_field_temp_reg) {
1984 LocationSummary* summary =
new (zone)
1988 for (intptr_t i = 0; i < num_temps; i++) {
1995void GuardFieldClassInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
1996 ASSERT(compiler::target::UntaggedObject::kClassIdTagSize == 20);
1997 ASSERT(
sizeof(UntaggedField::guarded_cid_) == 4);
1998 ASSERT(
sizeof(UntaggedField::is_nullable_) == 4);
2008 const bool emit_full_guard =
2011 const bool needs_value_cid_temp_reg =
2012 (value_cid ==
kDynamicCid) && (emit_full_guard || (field_cid != kSmiCid));
2014 const bool needs_field_temp_reg = emit_full_guard;
2016 const Register value_reg = locs()->in(0).reg();
2019 needs_value_cid_temp_reg ? locs()->temp(0).reg() :
kNoRegister;
2021 const Register field_reg = needs_field_temp_reg
2022 ? locs()->temp(locs()->temp_count() - 1).reg()
2025 compiler::Label
ok, fail_label;
2027 compiler::Label* deopt =
nullptr;
2029 deopt =
compiler->AddDeoptStub(deopt_id(), ICData::kDeoptGuardField);
2032 compiler::Label*
fail = (deopt !=
nullptr) ? deopt : &fail_label;
2034 if (emit_full_guard) {
2037 compiler::FieldAddress field_cid_operand(field_reg,
2039 compiler::FieldAddress field_nullability_operand(
2043 LoadValueCid(
compiler, value_cid_reg, value_reg);
2044 __ cmpl(value_cid_reg, field_cid_operand);
2046 __ cmpl(value_cid_reg, field_nullability_operand);
2047 }
else if (value_cid ==
kNullCid) {
2050 __ cmpl(field_nullability_operand, compiler::Immediate(value_cid));
2054 __ cmpl(field_cid_operand, compiler::Immediate(value_cid));
2064 if (!
field().needs_length_check()) {
2067 __ cmpl(field_cid_operand, compiler::Immediate(
kIllegalCid));
2074 __ movl(field_cid_operand, value_cid_reg);
2075 __ movl(field_nullability_operand, value_cid_reg);
2078 __ movl(field_cid_operand, compiler::Immediate(value_cid));
2079 __ movl(field_nullability_operand, compiler::Immediate(value_cid));
2085 if (deopt ==
nullptr) {
2092 __ pushl(field_reg);
2093 __ pushl(value_reg);
2095 __ CallRuntime(kUpdateFieldCidRuntimeEntry, 2);
2102 ASSERT(deopt !=
nullptr);
2110 if (field_cid != kSmiCid) {
2112 __ LoadClassId(value_cid_reg, value_reg);
2113 __ cmpl(value_cid_reg, compiler::Immediate(field_cid));
2118 if (field_cid != kSmiCid) {
2119 __ cmpl(value_cid_reg, compiler::Immediate(
kNullCid));
2121 const compiler::Immediate& raw_null =
2122 compiler::Immediate(
static_cast<intptr_t
>(
Object::null()));
2123 __ cmpl(value_reg, raw_null);
2127 }
else if (value_cid == field_cid) {
2132 ASSERT(value_cid != nullability);
2139LocationSummary* GuardFieldLengthInstr::MakeLocationSummary(Zone* zone,
2141 const intptr_t kNumInputs = 1;
2143 const intptr_t kNumTemps = 3;
2144 LocationSummary* summary =
new (zone)
2153 LocationSummary* summary =
new (zone)
2161void GuardFieldLengthInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
2166 compiler::Label* deopt =
2168 ?
compiler->AddDeoptStub(deopt_id(), ICData::kDeoptGuardField)
2171 const Register value_reg = locs()->in(0).reg();
2175 const Register field_reg = locs()->temp(0).reg();
2176 const Register offset_reg = locs()->temp(1).reg();
2177 const Register length_reg = locs()->temp(2).reg();
2185 compiler::FieldAddress(
2187 __ movl(length_reg, compiler::FieldAddress(
2190 __ cmpl(offset_reg, compiler::Immediate(0));
2197 __ cmpl(length_reg, compiler::Address(value_reg, offset_reg,
TIMES_1, 0));
2199 if (deopt ==
nullptr) {
2202 __ pushl(field_reg);
2203 __ pushl(value_reg);
2205 __ CallRuntime(kUpdateFieldCidRuntimeEntry, 2);
2215 ASSERT(
field().guarded_list_length_in_object_offset() !=
2218 __ cmpl(compiler::FieldAddress(
2219 value_reg,
field().guarded_list_length_in_object_offset()),
2225LocationSummary* StoreStaticFieldInstr::MakeLocationSummary(Zone* zone,
2227 LocationSummary* locs =
2235void StoreStaticFieldInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
2237 Register temp = locs()->temp(0).reg();
2243 THR, compiler::target::Thread::field_table_values_offset()));
2246 compiler::Address(temp, compiler::target::FieldTable::OffsetOf(
field())),
2250LocationSummary* InstanceOfInstr::MakeLocationSummary(Zone* zone,
2252 const intptr_t kNumInputs = 3;
2253 const intptr_t kNumTemps = 0;
2254 LocationSummary* summary =
new (zone)
2266void InstanceOfInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
2279 const intptr_t kNumInputs = 2;
2280 const intptr_t kNumTemps = 0;
2281 LocationSummary*
locs =
new (zone)
2292static void InlineArrayAllocation(FlowGraphCompiler*
compiler,
2293 intptr_t num_elements,
2294 compiler::Label* slow_path,
2295 compiler::Label*
done) {
2296 const int kInlineArraySize = 12;
2301 __ TryAllocateArray(kArrayCid, instance_size, slow_path,
2308 __ StoreIntoObjectNoBarrier(
2315 __ StoreIntoObjectNoBarrier(
2326 if (num_elements > 0) {
2327 const intptr_t array_size = instance_size -
sizeof(UntaggedArray);
2328 const compiler::Immediate& raw_null =
2329 compiler::Immediate(
static_cast<intptr_t
>(
Object::null()));
2331 sizeof(UntaggedArray)));
2332 if (array_size < (kInlineArraySize *
kWordSize)) {
2333 intptr_t current_offset = 0;
2334 __ movl(
EBX, raw_null);
2335 while (current_offset < array_size) {
2337 compiler::Address(
EDI, current_offset),
2342 compiler::Label init_loop;
2343 __ Bind(&init_loop);
2345 compiler::Address(
EDI, 0),
2346 Object::null_object());
2356 compiler::Label slow_path,
done;
2357 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
2368 __ Bind(&slow_path);
2369 auto object_store =
compiler->isolate_group()->object_store();
2370 const auto& allocate_array_stub =
2382 const intptr_t kNumInputs = 0;
2383 const intptr_t kNumTemps = 2;
2384 LocationSummary*
locs =
new (zone) LocationSummary(
2392class AllocateContextSlowPath
2393 :
public TemplateSlowPathCode<AllocateUninitializedContextInstr> {
2395 explicit AllocateContextSlowPath(
2396 AllocateUninitializedContextInstr* instruction)
2397 : TemplateSlowPathCode(instruction) {}
2399 virtual void EmitNativeCode(FlowGraphCompiler*
compiler) {
2400 __ Comment(
"AllocateContextSlowPath");
2401 __ Bind(entry_label());
2403 LocationSummary* locs = instruction()->locs();
2404 ASSERT(!locs->live_registers()->Contains(locs->out(0)));
2408 auto slow_path_env =
compiler->SlowPathEnvironmentFor(
2410 ASSERT(slow_path_env !=
nullptr);
2412 __ movl(EDX, compiler::Immediate(instruction()->num_context_variables()));
2414 StubCode::AllocateContext(),
2415 UntaggedPcDescriptors::kOther, locs,
2416 instruction()->deopt_id(), slow_path_env);
2417 ASSERT(instruction()->locs()->
out(0).reg() == EAX);
2418 compiler->RestoreLiveRegisters(instruction()->locs());
2419 __ jmp(exit_label());
2430 AllocateContextSlowPath* slow_path =
new AllocateContextSlowPath(
this);
2431 compiler->AddSlowPathCode(slow_path);
2434 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
2435 __ TryAllocateArray(kContextCid, instance_size, slow_path->entry_label(),
2445 __ Jump(slow_path->entry_label());
2448 __ Bind(slow_path->exit_label());
2453 const intptr_t kNumInputs = 0;
2454 const intptr_t kNumTemps = 1;
2455 LocationSummary*
locs =
new (zone)
2467 compiler->GenerateStubCall(
source(), StubCode::AllocateContext(),
2472LocationSummary* CloneContextInstr::MakeLocationSummary(Zone* zone,
2474 const intptr_t kNumInputs = 1;
2475 const intptr_t kNumTemps = 0;
2476 LocationSummary* locs =
new (zone)
2483void CloneContextInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
2488 UntaggedPcDescriptors::kOther, locs(),
2492LocationSummary* CatchBlockEntryInstr::MakeLocationSummary(Zone* zone,
2497void CatchBlockEntryInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
2499 compiler->AddExceptionHandler(
this);
2506 const intptr_t fp_sp_dist =
2507 (compiler::target::frame_layout.first_local_from_fp + 1 -
2511 __ leal(
ESP, compiler::Address(
EBP, fp_sp_dist));
2514 if (raw_exception_var_ !=
nullptr) {
2515 __ movl(compiler::Address(
EBP,
2516 compiler::target::FrameOffsetInBytesForVariable(
2517 raw_exception_var_)),
2520 if (raw_stacktrace_var_ !=
nullptr) {
2521 __ movl(compiler::Address(
EBP,
2522 compiler::target::FrameOffsetInBytesForVariable(
2523 raw_stacktrace_var_)),
2529LocationSummary* CheckStackOverflowInstr::MakeLocationSummary(Zone* zone,
2531 const intptr_t kNumInputs = 0;
2532 const intptr_t kNumTemps = opt ? 0 : 1;
2533 LocationSummary* summary =
new (zone) LocationSummary(
2541class CheckStackOverflowSlowPath
2542 :
public TemplateSlowPathCode<CheckStackOverflowInstr> {
2544 static constexpr intptr_t kNumSlowPathArgs = 0;
2546 explicit CheckStackOverflowSlowPath(CheckStackOverflowInstr* instruction)
2547 : TemplateSlowPathCode(instruction) {}
2549 virtual void EmitNativeCode(FlowGraphCompiler*
compiler) {
2550 if (
compiler->isolate_group()->use_osr() && osr_entry_label()->IsLinked()) {
2551 __ Comment(
"CheckStackOverflowSlowPathOsr");
2552 __ Bind(osr_entry_label());
2553 __ movl(compiler::Address(THR, Thread::stack_overflow_flags_offset()),
2554 compiler::Immediate(Thread::kOsrRequest));
2556 __ Comment(
"CheckStackOverflowSlowPath");
2557 __ Bind(entry_label());
2558 compiler->SaveLiveRegisters(instruction()->locs());
2562 Environment*
env =
compiler->SlowPathEnvironmentFor(
2566 __ CallRuntime(kInterruptOrStackOverflowRuntimeEntry, kNumSlowPathArgs);
2568 instruction()->
source(), instruction()->deopt_id(),
2569 UntaggedPcDescriptors::kOther, instruction()->locs(),
env);
2572 instruction()->in_loop()) {
2574 compiler->AddCurrentDescriptor(UntaggedPcDescriptors::kOsrEntry,
2575 instruction()->deopt_id(),
2576 InstructionSource());
2578 compiler->pending_deoptimization_env_ =
nullptr;
2579 compiler->RestoreLiveRegisters(instruction()->locs());
2580 __ jmp(exit_label());
2583 compiler::Label* osr_entry_label() {
2584 ASSERT(IsolateGroup::Current()->use_osr());
2585 return &osr_entry_label_;
2589 compiler::Label osr_entry_label_;
2592void CheckStackOverflowInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
2593 CheckStackOverflowSlowPath* slow_path =
new CheckStackOverflowSlowPath(
this);
2594 compiler->AddSlowPathCode(slow_path);
2603 const intptr_t configured_optimization_counter_threshold =
2604 compiler->thread()->isolate_group()->optimization_counter_threshold();
2605 const int32_t threshold =
2606 configured_optimization_counter_threshold * (
loop_depth() + 1);
2607 __ incl(compiler::FieldAddress(
EDI, Function::usage_counter_offset()));
2608 __ cmpl(compiler::FieldAddress(
EDI, Function::usage_counter_offset()),
2609 compiler::Immediate(threshold));
2612 if (
compiler->ForceSlowPathForStackOverflow()) {
2615 __ jmp(slow_path->entry_label());
2617 __ Bind(slow_path->exit_label());
2620static void EmitSmiShiftLeft(FlowGraphCompiler*
compiler,
2621 BinarySmiOpInstr* shift_left) {
2622 const LocationSummary& locs = *shift_left->locs();
2626 compiler::Label* deopt =
2627 shift_left->CanDeoptimize()
2628 ?
compiler->AddDeoptStub(shift_left->deopt_id(),
2629 ICData::kDeoptBinarySmiOp)
2631 if (locs.in(1).IsConstant()) {
2632 const Object& constant = locs.in(1).constant();
2633 ASSERT(constant.IsSmi());
2635 const intptr_t kCountLimit = 0x1F;
2636 const intptr_t
value = Smi::Cast(constant).Value();
2637 ASSERT((0 < value) && (value < kCountLimit));
2638 if (shift_left->can_overflow()) {
2641 __ shll(
left, compiler::Immediate(1));
2646 Register temp = locs.temp(0).reg();
2648 __ shll(
left, compiler::Immediate(value));
2649 __ sarl(
left, compiler::Immediate(value));
2654 __ shll(
left, compiler::Immediate(value));
2660 Range* right_range = shift_left->right_range();
2661 if (shift_left->left()->BindsToConstant() && shift_left->can_overflow()) {
2664 const Object& obj = shift_left->left()->BoundConstant();
2666 const intptr_t left_int = Smi::Cast(obj).Value();
2667 if (left_int == 0) {
2668 __ cmpl(
right, compiler::Immediate(0));
2673 const bool right_needs_check =
2675 if (right_needs_check) {
2677 compiler::Immediate(
static_cast<int32_t
>(
Smi::New(max_right))));
2686 const bool right_needs_check =
2689 if (!shift_left->can_overflow()) {
2690 if (right_needs_check) {
2692 ASSERT(shift_left->CanDeoptimize());
2693 __ cmpl(
right, compiler::Immediate(0));
2696 compiler::Label
done, is_not_zero;
2702 __ Bind(&is_not_zero);
2711 if (right_needs_check) {
2712 ASSERT(shift_left->CanDeoptimize());
2718 Register temp = locs.temp(0).reg();
2732static bool IsSmiValue(
const Object& constant, intptr_t value) {
2733 return constant.IsSmi() && (Smi::Cast(constant).Value() ==
value);
2736LocationSummary* BinarySmiOpInstr::MakeLocationSummary(Zone* zone,
2738 const intptr_t kNumInputs = 2;
2739 if (
op_kind() == Token::kTRUNCDIV) {
2740 const intptr_t kNumTemps = 1;
2741 LocationSummary* summary =
new (zone)
2759 }
else if (
op_kind() == Token::kMOD) {
2760 const intptr_t kNumTemps = 1;
2761 LocationSummary* summary =
new (zone)
2770 }
else if ((
op_kind() == Token::kSHR) || (
op_kind() == Token::kUSHR)) {
2771 const intptr_t kNumTemps = 0;
2772 LocationSummary* summary =
new (zone)
2778 }
else if (
op_kind() == Token::kSHL) {
2781 const bool shiftBy1 =
2782 (right_constant !=
nullptr) &&
IsSmiValue(right_constant->value(), 1);
2783 const intptr_t kNumTemps = (
can_overflow() && !shiftBy1) ? 1 : 0;
2784 LocationSummary* summary =
new (zone)
2788 if (kNumTemps == 1) {
2794 const intptr_t kNumTemps = 0;
2795 LocationSummary* summary =
new (zone)
2799 if (constant !=
nullptr) {
2809template <
typename OperandType>
2810static void EmitIntegerArithmetic(FlowGraphCompiler*
compiler,
2813 const OperandType&
right,
2814 compiler::Label* deopt) {
2822 case Token::kBIT_AND:
2825 case Token::kBIT_OR:
2828 case Token::kBIT_XOR:
2840void BinarySmiOpInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
2841 if (
op_kind() == Token::kSHL) {
2849 compiler::Label* deopt =
nullptr;
2850 if (CanDeoptimize()) {
2851 deopt =
compiler->AddDeoptStub(deopt_id(), ICData::kDeoptBinarySmiOp);
2855 const Object& constant = locs()->in(1).constant();
2856 ASSERT(constant.IsSmi());
2857 const intptr_t
value = Smi::Cast(constant).Value();
2861 case Token::kBIT_AND:
2862 case Token::kBIT_OR:
2863 case Token::kBIT_XOR:
2865 const intptr_t imm =
2866 (
op_kind() == Token::kMUL) ? value : Smi::RawValue(
value);
2868 compiler::Immediate(imm), deopt);
2872 case Token::kTRUNCDIV: {
2875 const intptr_t shift_count =
2878 Register temp = locs()->temp(0).reg();
2880 __ sarl(temp, compiler::Immediate(31));
2882 __ shrl(temp, compiler::Immediate(32 - shift_count));
2885 __ sarl(
left, compiler::Immediate(shift_count));
2895 const intptr_t kCountLimit = 0x1F;
2896 __ sarl(
left, compiler::Immediate(
2902 case Token::kUSHR: {
2903 ASSERT((value > 0) && (value < 64));
2920 if (value < (64 - compiler::target::kSmiBits)) {
2921 if (deopt !=
nullptr) {
2931 if (value >= compiler::target::kSmiBits) {
2941 __ sarl(
left, compiler::Immediate(31));
2942 __ shrl(
left, compiler::Immediate(value - 32));
2955 if (locs()->in(1).IsStackSlot()) {
2957 if (
op_kind() == Token::kMUL) {
2969 case Token::kBIT_AND:
2970 case Token::kBIT_OR:
2971 case Token::kBIT_XOR:
2973 if (
op_kind() == Token::kMUL) {
2979 case Token::kTRUNCDIV: {
2996 __ cmpl(
result, compiler::Immediate(0x40000000));
3025 compiler::Label
done;
3026 __ cmpl(
result, compiler::Immediate(0));
3032 __ cmpl(
right, compiler::Immediate(0));
3050 if (CanDeoptimize()) {
3051 __ cmpl(
right, compiler::Immediate(0));
3056 const intptr_t kCountLimit = 0x1F;
3058 __ cmpl(
right, compiler::Immediate(kCountLimit));
3059 compiler::Label count_ok;
3061 __ movl(
right, compiler::Immediate(kCountLimit));
3070 case Token::kUSHR: {
3071 compiler::Label
done;
3089 right_range(), 64 - compiler::target::kSmiBits - 1)) {
3090 __ cmpl(
right, compiler::Immediate(64 - compiler::target::kSmiBits));
3091 compiler::Label shift_less_34;
3096 compiler::Label shift_less_64;
3101 __ Bind(&shift_less_64);
3108 __ subl(
right, compiler::Immediate(32));
3109 __ sarl(
left, compiler::Immediate(31));
3113 __ Bind(&shift_less_34);
3118 if (deopt !=
nullptr) {
3129 compiler::target::kSmiBits - 1)) {
3130 __ cmpl(
right, compiler::Immediate(compiler::target::kSmiBits));
3131 compiler::Label shift_less_30;
3136 __ Bind(&shift_less_30);
3165LocationSummary* BinaryInt32OpInstr::MakeLocationSummary(Zone* zone,
3167 const intptr_t kNumInputs = 2;
3168 if (
op_kind() == Token::kTRUNCDIV) {
3171 }
else if (
op_kind() == Token::kMOD) {
3174 }
else if ((
op_kind() == Token::kSHR) || (
op_kind() == Token::kUSHR)) {
3175 const intptr_t kNumTemps = 0;
3176 LocationSummary* summary =
new (zone)
3182 }
else if (
op_kind() == Token::kSHL) {
3184 LocationSummary* summary =
new (zone)
3194 const intptr_t kNumTemps = 0;
3195 LocationSummary* summary =
new (zone)
3199 if (constant !=
nullptr) {
3209static void EmitInt32ShiftLeft(FlowGraphCompiler*
compiler,
3210 BinaryInt32OpInstr* shift_left) {
3211 const LocationSummary& locs = *shift_left->
locs();
3215 compiler::Label* deopt =
3216 shift_left->CanDeoptimize()
3217 ?
compiler->AddDeoptStub(shift_left->deopt_id(),
3218 ICData::kDeoptBinarySmiOp)
3220 ASSERT(locs.in(1).IsConstant());
3222 const Object& constant = locs.in(1).constant();
3223 ASSERT(constant.IsSmi());
3225 const intptr_t kCountLimit = 0x1F;
3226 const intptr_t
value = Smi::Cast(constant).Value();
3227 ASSERT((0 < value) && (value < kCountLimit));
3228 if (shift_left->can_overflow()) {
3230 Register temp = locs.temp(0).reg();
3232 __ shll(
left, compiler::Immediate(value));
3233 __ sarl(
left, compiler::Immediate(value));
3238 __ shll(
left, compiler::Immediate(value));
3241void BinaryInt32OpInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
3242 if (
op_kind() == Token::kSHL) {
3243 EmitInt32ShiftLeft(
compiler,
this);
3250 compiler::Label* deopt =
nullptr;
3251 if (CanDeoptimize()) {
3252 deopt =
compiler->AddDeoptStub(deopt_id(), ICData::kDeoptBinarySmiOp);
3256 const Object& constant = locs()->in(1).constant();
3257 ASSERT(constant.IsSmi());
3258 const intptr_t
value = Smi::Cast(constant).Value();
3263 case Token::kBIT_AND:
3264 case Token::kBIT_OR:
3265 case Token::kBIT_XOR:
3267 compiler::Immediate(value), deopt);
3270 case Token::kTRUNCDIV: {
3277 const intptr_t kCountLimit = 0x1F;
3282 case Token::kUSHR: {
3283 ASSERT((value > 0) && (value < 64));
3297 if (deopt !=
nullptr) {
3310 __ shrl(
left, compiler::Immediate(value));
3316 __ sarl(
left, compiler::Immediate(31));
3317 __ shrl(
left, compiler::Immediate(value - 32));
3329 if (locs()->in(1).IsStackSlot()) {
3341 case Token::kBIT_AND:
3342 case Token::kBIT_OR:
3343 case Token::kBIT_XOR:
3353LocationSummary* BinaryUint32OpInstr::MakeLocationSummary(Zone* zone,
3355 const intptr_t kNumInputs = 2;
3356 const intptr_t kNumTemps = (
op_kind() == Token::kMUL) ? 1 : 0;
3357 LocationSummary* summary =
new (zone)
3359 if (
op_kind() == Token::kMUL) {
3370void BinaryUint32OpInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
3376 case Token::kBIT_AND:
3377 case Token::kBIT_OR:
3378 case Token::kBIT_XOR:
3394LocationSummary* CheckEitherNonSmiInstr::MakeLocationSummary(Zone* zone,
3398 ASSERT((left_cid != kDoubleCid) && (right_cid != kDoubleCid));
3399 const intptr_t kNumInputs = 2;
3401 (left_cid != kSmiCid) && (right_cid != kSmiCid);
3402 const intptr_t kNumTemps = need_temp ? 1 : 0;
3403 LocationSummary* summary =
new (zone)
3411void CheckEitherNonSmiInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
3412 compiler::Label* deopt =
3413 compiler->AddDeoptStub(deopt_id(), ICData::kDeoptBinaryDoubleOp);
3418 if (this->
left()->definition() == this->
right()->definition()) {
3420 }
else if (left_cid == kSmiCid) {
3422 }
else if (right_cid == kSmiCid) {
3425 Register temp = locs()->temp(0).reg();
3433LocationSummary* BoxInstr::MakeLocationSummary(Zone* zone,
bool opt)
const {
3434 const intptr_t kNumInputs = 1;
3435 const intptr_t kNumTemps = 1;
3436 LocationSummary* summary =
new (zone) LocationSummary(
3444void BoxInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
3445 Register out_reg = locs()->out(0).reg();
3450 out_reg, locs()->temp(0).reg());
3453 case kUnboxedDouble:
3454 __ movsd(compiler::FieldAddress(out_reg, ValueOffset()),
value);
3458 __ movsd(compiler::FieldAddress(out_reg, ValueOffset()),
FpuTMP);
3460 case kUnboxedFloat32x4:
3461 case kUnboxedFloat64x2:
3462 case kUnboxedInt32x4:
3463 __ movups(compiler::FieldAddress(out_reg, ValueOffset()),
value);
3471LocationSummary* UnboxInstr::MakeLocationSummary(Zone* zone,
bool opt)
const {
3472 ASSERT(BoxCid() != kSmiCid);
3473 const bool needs_temp =
3477 const intptr_t kNumInputs = 1;
3478 const intptr_t kNumTemps = needs_temp ? 1 : 0;
3479 LocationSummary* summary =
new (zone)
3496void UnboxInstr::EmitLoadFromBox(FlowGraphCompiler*
compiler) {
3497 const Register box = locs()->in(0).reg();
3500 case kUnboxedInt64: {
3501 PairLocation*
result = locs()->out(0).AsPairLocation();
3503 __ movl(
result->At(0).reg(), compiler::FieldAddress(box, ValueOffset()));
3505 compiler::FieldAddress(box, ValueOffset() +
kWordSize));
3509 case kUnboxedDouble: {
3511 __ movsd(
result, compiler::FieldAddress(box, ValueOffset()));
3515 case kUnboxedFloat: {
3517 __ movsd(
result, compiler::FieldAddress(box, ValueOffset()));
3522 case kUnboxedFloat32x4:
3523 case kUnboxedFloat64x2:
3524 case kUnboxedInt32x4: {
3526 __ movups(
result, compiler::FieldAddress(box, ValueOffset()));
3536void UnboxInstr::EmitSmiConversion(FlowGraphCompiler*
compiler) {
3537 const Register box = locs()->in(0).reg();
3540 case kUnboxedInt64: {
3541 PairLocation*
result = locs()->out(0).AsPairLocation();
3550 case kUnboxedDouble: {
3551 const Register temp = locs()->temp(0).reg();
3565void UnboxInstr::EmitLoadInt32FromBoxOrSmi(FlowGraphCompiler*
compiler) {
3571void UnboxInstr::EmitLoadInt64FromBoxOrSmi(FlowGraphCompiler*
compiler) {
3572 const Register box = locs()->in(0).reg();
3573 PairLocation*
result = locs()->out(0).AsPairLocation();
3576 compiler::Label
done;
3583LocationSummary* BoxInteger32Instr::MakeLocationSummary(Zone* zone,
3585 const intptr_t kNumInputs = 1;
3588 LocationSummary* summary =
new (zone)
3595 LocationSummary* summary =
new (zone) LocationSummary(
3608void BoxInteger32Instr::EmitNativeCode(FlowGraphCompiler*
compiler) {
3621 compiler::Label
done;
3626 __ testl(
value, compiler::Immediate(0xC0000000));
3635 locs()->live_registers()->Add(locs()->in(0), kUnboxedInt32);
3639 locs()->temp(0).reg());
3644 __ sarl(
value, compiler::Immediate(31));
3649 compiler::Immediate(0));
3654LocationSummary* BoxInt64Instr::MakeLocationSummary(Zone* zone,
3656 const intptr_t kNumInputs = 1;
3658 LocationSummary* summary =
new (zone)
3659 LocationSummary(zone, kNumInputs, kNumTemps,
3671void BoxInt64Instr::EmitNativeCode(FlowGraphCompiler*
compiler) {
3673 PairLocation* value_pair = locs()->in(0).AsPairLocation();
3674 Register value_lo = value_pair->At(0).reg();
3675 Register out_reg = locs()->out(0).reg();
3676 __ movl(out_reg, value_lo);
3681 PairLocation* value_pair = locs()->in(0).AsPairLocation();
3682 Register value_lo = value_pair->At(0).reg();
3683 Register value_hi = value_pair->At(1).reg();
3684 Register out_reg = locs()->out(0).reg();
3688 __ movl(out_reg, value_hi);
3692 compiler::Label not_smi,
done;
3696 __ addl(value_lo, compiler::Immediate(0x40000000));
3697 __ adcl(out_reg, compiler::Immediate(0));
3699 __ cmpl(value_lo, compiler::Immediate(0x80000000));
3700 __ sbbl(out_reg, compiler::Immediate(0));
3703 __ subl(value_lo, compiler::Immediate(0x40000000));
3704 __ movl(out_reg, value_lo);
3709 __ subl(value_lo, compiler::Immediate(0x40000000));
3712 out_reg, locs()->temp(0).reg());
3719LocationSummary* UnboxInteger32Instr::MakeLocationSummary(Zone* zone,
3722 const intptr_t kNumInputs = 1;
3723 intptr_t kNumTemps = 0;
3725 if (CanDeoptimize()) {
3726 if ((value_cid != kSmiCid) && (value_cid != kMintCid) && !
is_truncating()) {
3733 LocationSummary* summary =
new (zone)
3736 for (
int i = 0; i < kNumTemps; i++) {
3739 summary->set_out(0, ((value_cid == kSmiCid) || (value_cid != kMintCid))
3745static void LoadInt32FromMint(FlowGraphCompiler*
compiler,
3747 const compiler::Address& lo,
3748 const compiler::Address& hi,
3750 compiler::Label* deopt) {
3752 if (deopt !=
nullptr) {
3755 __ sarl(temp, compiler::Immediate(31));
3761void UnboxInteger32Instr::EmitNativeCode(FlowGraphCompiler*
compiler) {
3766 compiler::Label* deopt =
nullptr;
3767 if (CanDeoptimize()) {
3768 deopt =
compiler->AddDeoptStub(GetDeoptId(), ICData::kDeoptUnboxInteger);
3770 compiler::Label* out_of_range = !
is_truncating() ? deopt :
nullptr;
3775 if (value_cid == kSmiCid) {
3778 }
else if (value_cid == kMintCid) {
3782 compiler::FieldAddress(
value, hi_offset), temp, out_of_range);
3783 }
else if (!CanDeoptimize()) {
3785 compiler::Label
done;
3792 compiler::Label
done;
3793 __ SmiUntagOrCheckClass(
value, kMintCid, temp, &
done);
3795 if (out_of_range !=
nullptr) {
3796 Register value_temp = locs()->temp(1).reg();
3802 compiler::Address(
value,
TIMES_2, hi_offset), temp, out_of_range);
3807LocationSummary* LoadCodeUnitsInstr::MakeLocationSummary(Zone* zone,
3810 const intptr_t kNumInputs = 2;
3811 const intptr_t kNumTemps = might_box ? 2 : 0;
3812 LocationSummary* summary =
new (zone) LocationSummary(
3813 zone, kNumInputs, kNumTemps,
3836void LoadCodeUnitsInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
3838 const Register str = locs()->in(0).reg();
3841 bool index_unboxed =
false;
3844 index_unboxed =
true;
3846 compiler::Address element_address =
3853 ASSERT(locs()->
out(0).IsPairLocation());
3854 PairLocation* result_pair = locs()->out(0).AsPairLocation();
3855 Register result1 = result_pair->At(0).reg();
3856 Register result2 = result_pair->At(1).reg();
3859 case kOneByteStringCid:
3861 __ movl(result1, element_address);
3862 __ xorl(result2, result2);
3864 case kTwoByteStringCid:
3866 __ movl(result1, element_address);
3867 __ xorl(result2, result2);
3876 case kOneByteStringCid:
3879 __ movzxb(
result, element_address);
3882 __ movzxw(
result, element_address);
3891 case kTwoByteStringCid:
3894 __ movzxw(
result, element_address);
3911 Register temp = locs()->temp(0).reg();
3912 Register temp2 = locs()->temp(1).reg();
3916 locs()->live_registers()->Add(locs()->temp(0), kUnboxedInt32);
3922 compiler::Label
done;
3923 __ testl(temp, compiler::Immediate(0xC0000000));
3929 compiler::Immediate(0));
3935LocationSummary* BinaryDoubleOpInstr::MakeLocationSummary(Zone* zone,
3937 const intptr_t kNumInputs = 2;
3938 const intptr_t kNumTemps = 0;
3939 LocationSummary* summary =
new (zone)
3947void BinaryDoubleOpInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
3971LocationSummary* DoubleTestOpInstr::MakeLocationSummary(Zone* zone,
3973 const intptr_t kNumInputs = 1;
3974 const intptr_t kNumTemps =
3975 op_kind() == MethodRecognizer::kDouble_getIsNegative
3977 : (
op_kind() == MethodRecognizer::kDouble_getIsInfinite ? 1 : 0);
3978 LocationSummary* summary =
new (zone)
3981 if (kNumTemps > 0) {
3983 if (
op_kind() == MethodRecognizer::kDouble_getIsNegative) {
3992 BranchLabels labels) {
3995 const bool is_negated = kind() != Token::kEQ;
3998 case MethodRecognizer::kDouble_getIsNaN: {
4002 case MethodRecognizer::kDouble_getIsInfinite: {
4003 const Register temp = locs()->temp(0).reg();
4004 compiler::Label check_upper;
4007 __ movl(temp, compiler::Address(
ESP, 0));
4009 __ cmpl(temp, compiler::Immediate(0));
4012 __ jmp(is_negated ? labels.true_label : labels.false_label);
4013 __ Bind(&check_upper);
4018 __ andl(temp, compiler::Immediate(0x7FFFFFFF));
4020 __ cmpl(temp, compiler::Immediate(0x7FF00000));
4023 case MethodRecognizer::kDouble_getIsNegative: {
4024 const Register temp = locs()->temp(0).reg();
4025 const FpuRegister temp_fpu = locs()->temp(1).fpu_reg();
4026 compiler::Label not_zero;
4027 __ xorpd(temp_fpu, temp_fpu);
4030 __ j(
PARITY_EVEN, is_negated ? labels.true_label : labels.false_label);
4033 __ testl(temp, compiler::Immediate(1));
4043#define DEFINE_EMIT(Name, Args) \
4044 static void Emit##Name(FlowGraphCompiler* compiler, SimdOpInstr* instr, \
4045 PP_APPLY(PP_UNPACK, Args))
4047#define SIMD_OP_FLOAT_ARITH(V, Name, op) \
4048 V(Float32x4##Name, op##ps) \
4049 V(Float64x2##Name, op##pd)
4051#define SIMD_OP_SIMPLE_BINARY(V) \
4052 SIMD_OP_FLOAT_ARITH(V, Add, add) \
4053 SIMD_OP_FLOAT_ARITH(V, Sub, sub) \
4054 SIMD_OP_FLOAT_ARITH(V, Mul, mul) \
4055 SIMD_OP_FLOAT_ARITH(V, Div, div) \
4056 SIMD_OP_FLOAT_ARITH(V, Min, min) \
4057 SIMD_OP_FLOAT_ARITH(V, Max, max) \
4058 V(Int32x4Add, addpl) \
4059 V(Int32x4Sub, subpl) \
4060 V(Int32x4BitAnd, andps) \
4061 V(Int32x4BitOr, orps) \
4062 V(Int32x4BitXor, xorps) \
4063 V(Float32x4Equal, cmppseq) \
4064 V(Float32x4NotEqual, cmppsneq) \
4065 V(Float32x4LessThan, cmppslt) \
4066 V(Float32x4LessThanOrEqual, cmppsle)
4068DEFINE_EMIT(SimdBinaryOp,
4070 switch (instr->kind()) {
4071#define EMIT(Name, op) \
4072 case SimdOpInstr::k##Name: \
4073 __ op(left, right); \
4075 SIMD_OP_SIMPLE_BINARY(EMIT)
4077 case SimdOpInstr::kFloat32x4Scale:
4079 __ shufps(
left,
left, compiler::Immediate(0x00));
4082 case SimdOpInstr::kFloat32x4ShuffleMix:
4083 case SimdOpInstr::kInt32x4ShuffleMix:
4084 __ shufps(
left,
right, compiler::Immediate(instr->mask()));
4086 case SimdOpInstr::kFloat64x2FromDoubles:
4092 case SimdOpInstr::kFloat64x2Scale:
4096 case SimdOpInstr::kFloat64x2WithX:
4097 case SimdOpInstr::kFloat64x2WithY: {
4100 (SimdOpInstr::kFloat64x2WithX + 1));
4101 const intptr_t lane_index = instr->kind() - SimdOpInstr::kFloat64x2WithX;
4102 ASSERT(0 <= lane_index && lane_index < 2);
4104 __ movups(compiler::Address(
ESP, 0),
left);
4106 __ movups(
left, compiler::Address(
ESP, 0));
4110 case SimdOpInstr::kFloat32x4WithX:
4111 case SimdOpInstr::kFloat32x4WithY:
4112 case SimdOpInstr::kFloat32x4WithZ:
4113 case SimdOpInstr::kFloat32x4WithW: {
4118 SimdOpInstr::kFloat32x4WithY == (SimdOpInstr::kFloat32x4WithX + 1) &&
4119 SimdOpInstr::kFloat32x4WithZ == (SimdOpInstr::kFloat32x4WithX + 2) &&
4120 SimdOpInstr::kFloat32x4WithW == (SimdOpInstr::kFloat32x4WithX + 3));
4121 const intptr_t lane_index = instr->kind() - SimdOpInstr::kFloat32x4WithX;
4122 ASSERT(0 <= lane_index && lane_index < 4);
4127 __ movups(
left, compiler::Address(
ESP, 0));
4136#define SIMD_OP_SIMPLE_UNARY(V) \
4137 SIMD_OP_FLOAT_ARITH(V, Sqrt, sqrt) \
4138 SIMD_OP_FLOAT_ARITH(V, Negate, negate) \
4139 SIMD_OP_FLOAT_ARITH(V, Abs, abs) \
4140 V(Float32x4Reciprocal, reciprocalps) \
4141 V(Float32x4ReciprocalSqrt, rsqrtps)
4143DEFINE_EMIT(SimdUnaryOp, (SameAsFirstInput,
XmmRegister value)) {
4147 switch (instr->kind()) {
4148#define EMIT(Name, op) \
4149 case SimdOpInstr::k##Name: \
4152 SIMD_OP_SIMPLE_UNARY(EMIT)
4154 case SimdOpInstr::kFloat32x4GetX:
4156 __ cvtss2sd(value, value);
4158 case SimdOpInstr::kFloat32x4GetY:
4159 __ shufps(value, value, compiler::Immediate(0x55));
4160 __ cvtss2sd(value, value);
4162 case SimdOpInstr::kFloat32x4GetZ:
4163 __ shufps(value, value, compiler::Immediate(0xAA));
4164 __ cvtss2sd(value, value);
4166 case SimdOpInstr::kFloat32x4GetW:
4167 __ shufps(value, value, compiler::Immediate(0xFF));
4168 __ cvtss2sd(value, value);
4170 case SimdOpInstr::kFloat32x4Shuffle:
4171 case SimdOpInstr::kInt32x4Shuffle:
4172 __ shufps(value, value, compiler::Immediate(instr->mask()));
4174 case SimdOpInstr::kFloat32x4Splat:
4176 __ cvtsd2ss(value, value);
4178 __ shufps(value, value, compiler::Immediate(0x00));
4180 case SimdOpInstr::kFloat64x2ToFloat32x4:
4181 __ cvtpd2ps(value, value);
4183 case SimdOpInstr::kFloat32x4ToFloat64x2:
4184 __ cvtps2pd(value, value);
4186 case SimdOpInstr::kFloat32x4ToInt32x4:
4187 case SimdOpInstr::kInt32x4ToFloat32x4:
4192 case SimdOpInstr::kFloat64x2GetX:
4195 case SimdOpInstr::kFloat64x2GetY:
4196 __ shufpd(value, value, compiler::Immediate(0x33));
4198 case SimdOpInstr::kFloat64x2Splat:
4199 __ shufpd(value, value, compiler::Immediate(0x0));
4207 switch (instr->kind()) {
4208 case SimdOpInstr::kFloat32x4GetSignMask:
4209 case SimdOpInstr::kInt32x4GetSignMask:
4210 __ movmskps(out, value);
4212 case SimdOpInstr::kFloat64x2GetSignMask:
4213 __ movmskpd(out, value);
4222 Float32x4FromDoubles,
4228 for (intptr_t i = 0; i < 4; i++) {
4229 __ cvtsd2ss(out, instr->locs()->in(i).fpu_reg());
4232 __ movups(out, compiler::Address(
ESP, 0));
4241 __ xorpd(value, value);
4244DEFINE_EMIT(Float32x4Clamp,
4253DEFINE_EMIT(Float64x2Clamp,
4262DEFINE_EMIT(Int32x4FromInts,
4266 for (intptr_t i = 0; i < 4; i++) {
4267 __ movl(compiler::Address(
ESP, i *
kInt32Size), instr->locs()->in(i).reg());
4273DEFINE_EMIT(Int32x4FromBools,
4277 for (intptr_t i = 0; i < 4; i++) {
4278 compiler::Label store_false,
done;
4279 __ CompareObject(instr->locs()->in(i).reg(),
Bool::True());
4282 compiler::Immediate(0xFFFFFFFF));
4284 __ Bind(&store_false);
4285 __ movl(compiler::Address(
ESP,
kInt32Size * i), compiler::Immediate(0x0));
4295 SimdOpInstr::kInt32x4GetFlagY == (SimdOpInstr::kInt32x4GetFlagX + 1) &&
4296 SimdOpInstr::kInt32x4GetFlagZ == (SimdOpInstr::kInt32x4GetFlagX + 2) &&
4297 SimdOpInstr::kInt32x4GetFlagW == (SimdOpInstr::kInt32x4GetFlagX + 3));
4298 const intptr_t lane_index = instr->kind() - SimdOpInstr::kInt32x4GetFlagX;
4299 ASSERT(0 <= lane_index && lane_index < 4);
4303 __ movups(compiler::Address(
ESP, 0), value);
4314 compiler::Address(
THR,
EDX,
TIMES_4, Thread::bool_true_offset()));
4318DEFINE_EMIT(Int32x4WithFlag,
4322 Temp<Fixed<Register, EDX> > temp)) {
4324 SimdOpInstr::kInt32x4WithFlagY == (SimdOpInstr::kInt32x4WithFlagX + 1) &&
4325 SimdOpInstr::kInt32x4WithFlagZ == (SimdOpInstr::kInt32x4WithFlagX + 2) &&
4326 SimdOpInstr::kInt32x4WithFlagW == (SimdOpInstr::kInt32x4WithFlagX + 3));
4327 const intptr_t lane_index = instr->kind() - SimdOpInstr::kInt32x4WithFlagX;
4328 ASSERT(0 <= lane_index && lane_index < 4);
4332 __ movups(compiler::Address(
ESP, 0), mask);
4343 __ movups(mask, compiler::Address(
ESP, 0));
4347DEFINE_EMIT(Int32x4Select,
4352 Temp<XmmRegister> temp)) {
4354 __ movaps(temp, mask);
4358 __ andps(mask, trueValue);
4360 __ andps(temp, falseValue);
4362 __ orps(mask, temp);
4371#define SIMD_OP_VARIANTS(CASE, ____, SIMPLE) \
4372 SIMD_OP_SIMPLE_BINARY(CASE) \
4373 CASE(Float32x4Scale) \
4374 CASE(Float32x4ShuffleMix) \
4375 CASE(Int32x4ShuffleMix) \
4376 CASE(Float64x2FromDoubles) \
4377 CASE(Float64x2Scale) \
4378 CASE(Float64x2WithX) \
4379 CASE(Float64x2WithY) \
4380 CASE(Float32x4WithX) \
4381 CASE(Float32x4WithY) \
4382 CASE(Float32x4WithZ) \
4383 CASE(Float32x4WithW) \
4384 ____(SimdBinaryOp) \
4385 SIMD_OP_SIMPLE_UNARY(CASE) \
4386 CASE(Float32x4GetX) \
4387 CASE(Float32x4GetY) \
4388 CASE(Float32x4GetZ) \
4389 CASE(Float32x4GetW) \
4390 CASE(Float32x4Shuffle) \
4391 CASE(Int32x4Shuffle) \
4392 CASE(Float32x4Splat) \
4393 CASE(Float32x4ToFloat64x2) \
4394 CASE(Float64x2ToFloat32x4) \
4395 CASE(Int32x4ToFloat32x4) \
4396 CASE(Float32x4ToInt32x4) \
4397 CASE(Float64x2GetX) \
4398 CASE(Float64x2GetY) \
4399 CASE(Float64x2Splat) \
4401 CASE(Float32x4GetSignMask) \
4402 CASE(Int32x4GetSignMask) \
4403 CASE(Float64x2GetSignMask) \
4404 ____(SimdGetSignMask) \
4405 SIMPLE(Float32x4FromDoubles) \
4406 SIMPLE(Int32x4FromInts) \
4407 SIMPLE(Int32x4FromBools) \
4408 SIMPLE(Float32x4Zero) \
4409 SIMPLE(Float64x2Zero) \
4410 SIMPLE(Float32x4Clamp) \
4411 SIMPLE(Float64x2Clamp) \
4412 CASE(Int32x4GetFlagX) \
4413 CASE(Int32x4GetFlagY) \
4414 CASE(Int32x4GetFlagZ) \
4415 CASE(Int32x4GetFlagW) \
4416 ____(Int32x4GetFlag) \
4417 CASE(Int32x4WithFlagX) \
4418 CASE(Int32x4WithFlagY) \
4419 CASE(Int32x4WithFlagZ) \
4420 CASE(Int32x4WithFlagW) \
4421 ____(Int32x4WithFlag) \
4422 SIMPLE(Int32x4Select)
4426#define CASE(Name, ...) case k##Name:
4428 return MakeLocationSummaryFromEmitter(zone, this, &Emit##Name);
4429#define SIMPLE(Name) CASE(Name) EMIT(Name)
4434 case SimdOpInstr::kFloat32x4GreaterThan:
4435 case SimdOpInstr::kFloat32x4GreaterThanOrEqual:
4446#define CASE(Name, ...) case k##Name:
4448 InvokeEmitter(compiler, this, &Emit##Name); \
4450#define SIMPLE(Name) CASE(Name) EMIT(Name)
4455 case SimdOpInstr::kFloat32x4GreaterThan:
4456 case SimdOpInstr::kFloat32x4GreaterThanOrEqual:
4465LocationSummary* CaseInsensitiveCompareInstr::MakeLocationSummary(
4468 const intptr_t kNumTemps = 0;
4469 LocationSummary* summary =
new (zone)
4479void CaseInsensitiveCompareInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
4480 compiler::LeafRuntimeScope rt(
compiler->assembler(),
4481 4 * compiler::target::kWordSize,
4483 __ movl(compiler::Address(
ESP, +0 *
kWordSize), locs()->in(0).reg());
4484 __ movl(compiler::Address(
ESP, +1 *
kWordSize), locs()->in(1).reg());
4485 __ movl(compiler::Address(
ESP, +2 *
kWordSize), locs()->in(2).reg());
4486 __ movl(compiler::Address(
ESP, +3 *
kWordSize), locs()->in(3).reg());
4490LocationSummary* MathMinMaxInstr::MakeLocationSummary(Zone* zone,
4493 const intptr_t kNumInputs = 2;
4494 const intptr_t kNumTemps = 1;
4495 LocationSummary* summary =
new (zone)
4506 const intptr_t kNumInputs = 2;
4507 const intptr_t kNumTemps = 0;
4508 LocationSummary* summary =
new (zone)
4517void MathMinMaxInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
4519 (
op_kind() == MethodRecognizer::kMathMax));
4520 const bool is_min = (
op_kind() == MethodRecognizer::kMathMin);
4526 Register temp = locs()->temp(0).reg();
4531 is_min ? TokenKindToDoubleCondition(Token::kLT)
4532 : TokenKindToDoubleCondition(Token::kGT);
4538 __ Bind(&returns_nan);
4539 static double kNaN = NAN;
4545 compiler::Label left_is_negative;
4553 __ testl(temp, compiler::Immediate(1));
4580LocationSummary* UnarySmiOpInstr::MakeLocationSummary(Zone* zone,
4582 const intptr_t kNumInputs = 1;
4587void UnarySmiOpInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
4591 case Token::kNEGATE: {
4592 compiler::Label* deopt =
4593 compiler->AddDeoptStub(deopt_id(), ICData::kDeoptUnaryOp);
4598 case Token::kBIT_NOT:
4608LocationSummary* UnaryDoubleOpInstr::MakeLocationSummary(Zone* zone,
4610 const intptr_t kNumInputs = 1;
4611 const intptr_t kNumTemps = 0;
4612 LocationSummary* summary =
new (zone)
4619void UnaryDoubleOpInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
4624 case Token::kNEGATE:
4630 case Token::kSQUARE:
4633 case Token::kTRUNCATE:
4639 case Token::kCEILING:
4647LocationSummary* Int32ToDoubleInstr::MakeLocationSummary(Zone* zone,
4649 const intptr_t kNumInputs = 1;
4650 const intptr_t kNumTemps = 0;
4651 LocationSummary*
result =
new (zone)
4658void Int32ToDoubleInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
4664LocationSummary* SmiToDoubleInstr::MakeLocationSummary(Zone* zone,
4666 const intptr_t kNumInputs = 1;
4667 const intptr_t kNumTemps = 0;
4668 LocationSummary*
result =
new (zone)
4675void SmiToDoubleInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
4682LocationSummary* Int64ToDoubleInstr::MakeLocationSummary(Zone* zone,
4684 const intptr_t kNumInputs = 1;
4685 const intptr_t kNumTemps = 0;
4686 LocationSummary*
result =
new (zone)
4694void Int64ToDoubleInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
4695 PairLocation* pair = locs()->in(0).AsPairLocation();
4696 Register in_lo = pair->At(0).reg();
4697 Register in_hi = pair->At(1).reg();
4706 __ fildl(compiler::Address(
ESP, 0));
4708 __ fstpl(compiler::Address(
ESP, 0));
4715LocationSummary* DoubleToIntegerInstr::MakeLocationSummary(Zone* zone,
4717 const intptr_t kNumInputs = 1;
4718 const intptr_t kNumTemps = 0;
4719 LocationSummary*
result =
new (zone) LocationSummary(
4726void DoubleToIntegerInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
4729 const XmmRegister value_double = locs()->in(0).fpu_reg();
4731 DoubleToIntegerSlowPath* slow_path =
4732 new DoubleToIntegerSlowPath(
this, value_double);
4733 compiler->AddSlowPathCode(slow_path);
4735 __ cvttsd2si(
result, value_double);
4738 __ cmpl(
result, compiler::Immediate(0xC0000000));
4741 __ Bind(slow_path->exit_label());
4744LocationSummary* DoubleToSmiInstr::MakeLocationSummary(Zone* zone,
4746 const intptr_t kNumInputs = 1;
4747 const intptr_t kNumTemps = 0;
4748 LocationSummary*
result =
new (zone)
4755void DoubleToSmiInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
4756 compiler::Label* deopt =
4757 compiler->AddDeoptStub(deopt_id(), ICData::kDeoptDoubleToSmi);
4762 __ cmpl(
result, compiler::Immediate(0xC0000000));
4767LocationSummary* DoubleToFloatInstr::MakeLocationSummary(Zone* zone,
4769 const intptr_t kNumInputs = 1;
4770 const intptr_t kNumTemps = 0;
4771 LocationSummary*
result =
new (zone)
4778void DoubleToFloatInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
4779 __ cvtsd2ss(locs()->
out(0).fpu_reg(), locs()->in(0).fpu_reg());
4782LocationSummary* FloatToDoubleInstr::MakeLocationSummary(Zone* zone,
4784 const intptr_t kNumInputs = 1;
4785 const intptr_t kNumTemps = 0;
4786 LocationSummary*
result =
new (zone)
4793void FloatToDoubleInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
4794 __ cvtss2sd(locs()->
out(0).fpu_reg(), locs()->in(0).fpu_reg());
4797LocationSummary* FloatCompareInstr::MakeLocationSummary(Zone* zone,
4803void FloatCompareInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
4810 const intptr_t kNumTemps =
4812 LocationSummary*
result =
new (zone)
4848static void InvokeDoublePow(FlowGraphCompiler*
compiler,
4849 InvokeMathCFunctionInstr* instr) {
4850 ASSERT(instr->recognized_kind() == MethodRecognizer::kMathDoublePow);
4851 const intptr_t kInputCount = 2;
4852 ASSERT(instr->InputCount() == kInputCount);
4853 LocationSummary* locs = instr->locs();
4862 __ xorps(zero_temp, zero_temp);
4866 compiler::Label check_base, skip_call;
4868 __ comisd(exp, zero_temp);
4874 compiler::Label return_base;
4881 compiler::Label return_base_times_2;
4896 __ Bind(&return_base);
4900 __ Bind(&return_base_times_2);
4905 __ Bind(&check_base);
4910 compiler::Label return_nan;
4916 compiler::Label try_sqrt;
4919 __ Bind(&return_nan);
4924 compiler::Label do_pow, return_zero;
4940 __ comisd(
base, zero_temp);
4946 __ Bind(&return_zero);
4952 compiler::LeafRuntimeScope rt(
compiler->assembler(),
4955 for (intptr_t i = 0; i < kInputCount; i++) {
4958 rt.Call(instr->TargetFunction(), kInputCount);
4959 __ fstpl(compiler::Address(
ESP, 0));
4960 __ movsd(locs->out(0).fpu_reg(), compiler::Address(
ESP, 0));
4962 __ Bind(&skip_call);
4972 compiler::LeafRuntimeScope rt(
compiler->assembler(),
4975 for (intptr_t i = 0; i <
InputCount(); i++) {
4977 locs()->in(i).fpu_reg());
4980 __ fstpl(compiler::Address(
ESP, 0));
4981 __ movsd(
locs()->
out(0).fpu_reg(), compiler::Address(
ESP, 0));
4985LocationSummary* ExtractNthOutputInstr::MakeLocationSummary(Zone* zone,
4989 const intptr_t kNumInputs = 1;
4990 LocationSummary* summary =
5017void ExtractNthOutputInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
5018 ASSERT(locs()->in(0).IsPairLocation());
5019 PairLocation* pair = locs()->in(0).AsPairLocation();
5033LocationSummary* UnboxLaneInstr::MakeLocationSummary(Zone* zone,
5039void UnboxLaneInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
5043LocationSummary* BoxLanesInstr::MakeLocationSummary(Zone* zone,
5049void BoxLanesInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
5053LocationSummary* TruncDivModInstr::MakeLocationSummary(Zone* zone,
5055 const intptr_t kNumInputs = 2;
5056 const intptr_t kNumTemps = 0;
5057 LocationSummary* summary =
new (zone)
5068void TruncDivModInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
5070 compiler::Label* deopt =
5071 compiler->AddDeoptStub(deopt_id(), ICData::kDeoptBinarySmiOp);
5074 ASSERT(locs()->
out(0).IsPairLocation());
5075 PairLocation* pair = locs()->out(0).AsPairLocation();
5076 Register result1 = pair->At(0).reg();
5077 Register result2 = pair->At(1).reg();
5096 __ cmpl(
EAX, compiler::Immediate(0x40000000));
5107 compiler::Label
done;
5108 __ cmpl(
EDX, compiler::Immediate(0));
5113 __ cmpl(
right, compiler::Immediate(0));
5119 }
else if (divisor_range()->IsPositive()) {
5133static void EmitHashIntegerCodeSequence(FlowGraphCompiler*
compiler,
5137 __ movl(
EDX, compiler::Immediate(0x2d51));
5140 __ movl(
EAX, value_hi);
5141 __ movl(value_hi,
EDX);
5142 __ movl(
EDX, compiler::Immediate(0x2d51));
5144 __ addl(
EAX, value_hi);
5148 __ andl(
EAX, compiler::Immediate(0x3fffffff));
5151LocationSummary* HashDoubleOpInstr::MakeLocationSummary(Zone* zone,
5153 const intptr_t kNumInputs = 1;
5154 const intptr_t kNumTemps = 4;
5155 LocationSummary* summary =
new (zone)
5167void HashDoubleOpInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
5169 const Register temp = locs()->temp(0).reg();
5172 const XmmRegister temp_double = locs()->temp(3).fpu_reg();
5173 PairLocation* result_pair = locs()->out(0).AsPairLocation();
5174 ASSERT(result_pair->At(0).reg() ==
EAX);
5175 ASSERT(result_pair->At(1).reg() ==
EDX);
5178 compiler::Label hash_double, try_convert;
5182 __ pextrd(temp,
value, compiler::Immediate(1));
5189 __ andl(temp, compiler::Immediate(0x7FF00000));
5190 __ cmpl(temp, compiler::Immediate(0x7FF00000));
5193 compiler::Label slow_path;
5194 __ Bind(&try_convert);
5197 __ cmpl(
EAX, compiler::Immediate(0x80000000));
5199 __ cvtsi2sd(temp_double,
EAX);
5200 __ comisd(
value, temp_double);
5205 compiler::Label hash_integer,
done;
5207 __ Bind(&hash_integer);
5211 __ Bind(&slow_path);
5215 compiler::target::Thread::unboxed_runtime_arg_offset());
5217 compiler::LeafRuntimeScope rt(
5219 1 * compiler::target::kWordSize,
5221 __ movl(compiler::Address(
ESP, 0 * compiler::target::kWordSize),
THR);
5224 rt.Call(kTryDoubleAsIntegerRuntimeEntry, 1);
5228 __ j(
ZERO, &hash_double);
5231 THR, compiler::target::Thread::unboxed_runtime_arg_offset()));
5234 THR, compiler::target::Thread::unboxed_runtime_arg_offset() +
5236 __ jmp(&hash_integer);
5238 __ Bind(&hash_double);
5240 __ pextrd(
EAX,
value, compiler::Immediate(0));
5241 __ pextrd(temp,
value, compiler::Immediate(1));
5245 __ movl(
EAX, compiler::Address(
ESP, 0));
5250 __ andl(
EAX, compiler::Immediate(compiler::target::kSmiMax));
5256LocationSummary* HashIntegerOpInstr::MakeLocationSummary(Zone* zone,
5258 const intptr_t kNumInputs = 1;
5259 const intptr_t kNumTemps = 3;
5260 LocationSummary* summary =
new (zone)
5270void HashIntegerOpInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
5273 Register temp = locs()->temp(0).reg();
5274 Register temp1 = locs()->temp(1).reg();
5283 __ LoadFieldFromOffset(temp,
EAX,
5299 EmitHashIntegerCodeSequence(
compiler,
EAX, temp, temp1);
5314LocationSummary* CheckClassInstr::MakeLocationSummary(Zone* zone,
5316 const intptr_t kNumInputs = 1;
5317 const bool need_mask_temp =
IsBitTest();
5318 const intptr_t kNumTemps = !
IsNullCheck() ? (need_mask_temp ? 2 : 1) : 0;
5319 LocationSummary* summary =
new (zone)
5324 if (need_mask_temp) {
5332 compiler::Label* deopt) {
5333 const compiler::Immediate& raw_null =
5334 compiler::Immediate(
static_cast<intptr_t
>(
Object::null()));
5335 __ cmpl(locs()->in(0).reg(), raw_null);
5345 compiler::Label* deopt) {
5346 Register biased_cid = locs()->temp(0).reg();
5347 __ subl(biased_cid, compiler::Immediate(
min));
5348 __ cmpl(biased_cid, compiler::Immediate(
max -
min));
5351 Register mask_reg = locs()->temp(1).reg();
5352 __ movl(mask_reg, compiler::Immediate(mask));
5353 __ bt(mask_reg, biased_cid);
5357int CheckClassInstr::EmitCheckCid(FlowGraphCompiler*
compiler,
5362 compiler::Label* is_ok,
5363 compiler::Label* deopt,
5364 bool use_near_jump) {
5365 Register biased_cid = locs()->temp(0).reg();
5367 if (cid_start == cid_end) {
5368 __ cmpl(biased_cid, compiler::Immediate(cid_start - bias));
5374 __ addl(biased_cid, compiler::Immediate(bias - cid_start));
5376 __ cmpl(biased_cid, compiler::Immediate(cid_end - cid_start));
5382 __ j(no_match, deopt);
5384 if (use_near_jump) {
5393LocationSummary* CheckSmiInstr::MakeLocationSummary(Zone* zone,
5395 const intptr_t kNumInputs = 1;
5396 const intptr_t kNumTemps = 0;
5397 LocationSummary* summary =
new (zone)
5403void CheckSmiInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
5405 compiler::Label* deopt =
5406 compiler->AddDeoptStub(deopt_id(), ICData::kDeoptCheckSmi);
5407 __ BranchIfNotSmi(
value, deopt);
5410void CheckNullInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
5411 ThrowErrorSlowPathCode* slow_path =
new NullErrorSlowPath(
this);
5412 compiler->AddSlowPathCode(slow_path);
5414 Register value_reg = locs()->in(0).reg();
5417 __ CompareObject(value_reg, Object::null_object());
5418 __ BranchIf(
EQUAL, slow_path->entry_label());
5421LocationSummary* CheckClassIdInstr::MakeLocationSummary(Zone* zone,
5423 const intptr_t kNumInputs = 1;
5424 const intptr_t kNumTemps = 0;
5425 LocationSummary* summary =
new (zone)
5432void CheckClassIdInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
5434 compiler::Label* deopt =
5435 compiler->AddDeoptStub(deopt_id(), ICData::kDeoptCheckClass);
5436 if (cids_.IsSingleCid()) {
5449LocationSummary* CheckArrayBoundInstr::MakeLocationSummary(Zone* zone,
5451 const intptr_t kNumInputs = 2;
5452 const intptr_t kNumTemps = 0;
5453 LocationSummary* locs =
new (zone)
5464void CheckArrayBoundInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
5466 compiler::Label* deopt =
5467 compiler->AddDeoptStub(deopt_id(), ICData::kDeoptCheckArrayBound,
flags);
5472 if (length_loc.IsConstant() && index_loc.IsConstant()) {
5473 ASSERT((Smi::Cast(length_loc.constant()).Value() <=
5474 Smi::Cast(index_loc.constant()).Value()) ||
5475 (Smi::Cast(index_loc.constant()).Value() < 0));
5483 if (length_loc.IsConstant()) {
5485 if (index_cid != kSmiCid) {
5486 __ BranchIfNotSmi(
index, deopt);
5488 const Smi&
length = Smi::Cast(length_loc.constant());
5493 __ cmpl(
index, compiler::Immediate(
static_cast<int32_t
>(
length.ptr())));
5496 }
else if (index_loc.IsConstant()) {
5497 const Smi&
index = Smi::Cast(index_loc.constant());
5498 if (length_loc.IsStackSlot()) {
5500 __ cmpl(
length, compiler::Immediate(
static_cast<int32_t
>(
index.ptr())));
5503 __ cmpl(
length, compiler::Immediate(
static_cast<int32_t
>(
index.ptr())));
5506 }
else if (length_loc.IsStackSlot()) {
5509 if (index_cid != kSmiCid) {
5510 __ BranchIfNotSmi(
index, deopt);
5517 if (index_cid != kSmiCid) {
5518 __ BranchIfNotSmi(
index, deopt);
5525LocationSummary* CheckWritableInstr::MakeLocationSummary(Zone* zone,
5527 const intptr_t kNumInputs = 1;
5528 const intptr_t kNumTemps = 1;
5529 LocationSummary* locs =
new (zone) LocationSummary(
5530 zone, kNumInputs, kNumTemps,
5538void CheckWritableInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
5539 WriteErrorSlowPath* slow_path =
new WriteErrorSlowPath(
this);
5540 const Register temp = locs()->temp(0).reg();
5541 compiler->AddSlowPathCode(slow_path);
5543 compiler::FieldAddress(locs()->in(0).reg(),
5544 compiler::target::Object::tags_offset()));
5545 __ testl(temp, compiler::Immediate(
5546 1 << compiler::target::UntaggedObject::kImmutableBit));
5550LocationSummary* BinaryInt64OpInstr::MakeLocationSummary(Zone* zone,
5552 const intptr_t kNumInputs = 2;
5554 case Token::kBIT_AND:
5555 case Token::kBIT_OR:
5556 case Token::kBIT_XOR:
5559 const intptr_t kNumTemps = 0;
5560 LocationSummary* summary =
new (zone) LocationSummary(
5570 const intptr_t kNumTemps = 1;
5571 LocationSummary* summary =
new (zone) LocationSummary(
5587void BinaryInt64OpInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
5588 PairLocation* left_pair = locs()->in(0).AsPairLocation();
5589 Register left_lo = left_pair->At(0).reg();
5590 Register left_hi = left_pair->At(1).reg();
5591 PairLocation* right_pair = locs()->in(1).AsPairLocation();
5592 Register right_lo = right_pair->At(0).reg();
5593 Register right_hi = right_pair->At(1).reg();
5594 PairLocation* out_pair = locs()->out(0).AsPairLocation();
5595 Register out_lo = out_pair->At(0).reg();
5596 Register out_hi = out_pair->At(1).reg();
5597 ASSERT(out_lo == left_lo);
5598 ASSERT(out_hi == left_hi);
5600 ASSERT(!CanDeoptimize());
5603 case Token::kBIT_AND:
5604 __ andl(left_lo, right_lo);
5605 __ andl(left_hi, right_hi);
5607 case Token::kBIT_OR:
5608 __ orl(left_lo, right_lo);
5609 __ orl(left_hi, right_hi);
5611 case Token::kBIT_XOR:
5612 __ xorl(left_lo, right_lo);
5613 __ xorl(left_hi, right_hi);
5617 if (
op_kind() == Token::kADD) {
5618 __ addl(left_lo, right_lo);
5619 __ adcl(left_hi, right_hi);
5621 __ subl(left_lo, right_lo);
5622 __ sbbl(left_hi, right_hi);
5632 Register temp = locs()->temp(0).reg();
5633 __ movl(temp, left_lo);
5634 __ imull(left_hi, right_lo);
5635 __ imull(temp, right_hi);
5636 __ addl(temp, left_hi);
5649static void EmitShiftInt64ByConstant(FlowGraphCompiler*
compiler,
5653 const Object&
right) {
5654 const int64_t shift = Integer::Cast(
right).AsInt64Value();
5659 __ movl(left_lo, left_hi);
5660 __ sarl(left_hi, compiler::Immediate(31));
5662 __ sarl(left_lo, compiler::Immediate(shift > 63 ? 31 : shift - 32));
5665 __ shrdl(left_lo, left_hi, compiler::Immediate(shift));
5666 __ sarl(left_hi, compiler::Immediate(shift));
5670 case Token::kUSHR: {
5673 __ movl(left_lo, left_hi);
5674 __ xorl(left_hi, left_hi);
5676 __ shrl(left_lo, compiler::Immediate(shift - 32));
5679 __ shrdl(left_lo, left_hi, compiler::Immediate(shift));
5680 __ shrl(left_hi, compiler::Immediate(shift));
5687 __ movl(left_hi, left_lo);
5688 __ xorl(left_lo, left_lo);
5690 __ shll(left_hi, compiler::Immediate(shift - 32));
5693 __ shldl(left_hi, left_lo, compiler::Immediate(shift));
5694 __ shll(left_lo, compiler::Immediate(shift));
5703static void EmitShiftInt64ByECX(FlowGraphCompiler*
compiler,
5709 compiler::Label
done, large_shift;
5712 __ cmpl(
ECX, compiler::Immediate(31));
5715 __ shrdl(left_lo, left_hi,
ECX);
5716 __ sarl(left_hi,
ECX);
5719 __ Bind(&large_shift);
5721 __ movl(left_lo, left_hi);
5722 __ sarl(left_hi, compiler::Immediate(31));
5723 __ sarl(left_lo,
ECX);
5726 case Token::kUSHR: {
5727 __ cmpl(
ECX, compiler::Immediate(31));
5730 __ shrdl(left_lo, left_hi,
ECX);
5731 __ shrl(left_hi,
ECX);
5734 __ Bind(&large_shift);
5736 __ movl(left_lo, left_hi);
5737 __ xorl(left_hi, left_hi);
5738 __ shrl(left_lo,
ECX);
5742 __ cmpl(
ECX, compiler::Immediate(31));
5745 __ shldl(left_hi, left_lo,
ECX);
5746 __ shll(left_lo,
ECX);
5749 __ Bind(&large_shift);
5751 __ movl(left_hi, left_lo);
5752 __ xorl(left_lo, left_lo);
5753 __ shll(left_hi,
ECX);
5762static void EmitShiftUint32ByConstant(FlowGraphCompiler*
compiler,
5765 const Object&
right) {
5766 const int64_t shift = Integer::Cast(
right).AsInt64Value();
5772 case Token::kUSHR: {
5773 __ shrl(
left, compiler::Immediate(shift));
5777 __ shll(
left, compiler::Immediate(shift));
5786static void EmitShiftUint32ByECX(FlowGraphCompiler*
compiler,
5791 case Token::kUSHR: {
5804class ShiftInt64OpSlowPath :
public ThrowErrorSlowPathCode {
5806 explicit ShiftInt64OpSlowPath(ShiftInt64OpInstr* instruction)
5807 : ThrowErrorSlowPathCode(instruction,
5808 kArgumentErrorUnboxedInt64RuntimeEntry) {}
5810 const char*
name()
override {
return "int64 shift"; }
5812 void EmitCodeAtSlowPathEntry(FlowGraphCompiler*
compiler)
override {
5813 PairLocation* right_pair = instruction()->locs()->in(1).AsPairLocation();
5814 Register right_lo = right_pair->At(0).reg();
5815 Register right_hi = right_pair->At(1).reg();
5816 PairLocation* out_pair = instruction()->locs()->out(0).AsPairLocation();
5817 Register out_lo = out_pair->At(0).reg();
5818 Register out_hi = out_pair->At(1).reg();
5820 PairLocation* left_pair = instruction()->locs()->in(0).AsPairLocation();
5821 Register left_lo = left_pair->At(0).reg();
5822 Register left_hi = left_pair->At(1).reg();
5823 ASSERT(out_lo == left_lo);
5824 ASSERT(out_hi == left_hi);
5827 compiler::Label throw_error;
5828 __ testl(right_hi, right_hi);
5829 __ j(NEGATIVE, &throw_error);
5831 switch (instruction()->AsShiftInt64Op()->op_kind()) {
5833 __ sarl(out_hi, compiler::Immediate(31));
5834 __ movl(out_lo, out_hi);
5838 __ xorl(out_lo, out_lo);
5839 __ xorl(out_hi, out_hi);
5845 __ jmp(exit_label());
5847 __ Bind(&throw_error);
5854 __ movl(compiler::Address(
5855 THR, compiler::target::Thread::unboxed_runtime_arg_offset()),
5857 __ movl(compiler::Address(
5858 THR, compiler::target::Thread::unboxed_runtime_arg_offset() +
5864LocationSummary* ShiftInt64OpInstr::MakeLocationSummary(Zone* zone,
5866 const intptr_t kNumInputs = 2;
5867 const intptr_t kNumTemps = 0;
5868 LocationSummary* summary =
new (zone) LocationSummary(
5884void ShiftInt64OpInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
5885 PairLocation* left_pair = locs()->in(0).AsPairLocation();
5886 Register left_lo = left_pair->At(0).reg();
5887 Register left_hi = left_pair->At(1).reg();
5888 PairLocation* out_pair = locs()->out(0).AsPairLocation();
5889 Register out_lo = out_pair->At(0).reg();
5890 Register out_hi = out_pair->At(1).reg();
5891 ASSERT(out_lo == left_lo);
5892 ASSERT(out_hi == left_hi);
5897 locs()->in(1).constant());
5900 ASSERT(locs()->in(1).AsPairLocation()->At(0).reg() ==
ECX);
5901 Register right_hi = locs()->in(1).AsPairLocation()->At(1).reg();
5904 ShiftInt64OpSlowPath* slow_path =
nullptr;
5906 slow_path =
new (
Z) ShiftInt64OpSlowPath(
this);
5907 compiler->AddSlowPathCode(slow_path);
5908 __ testl(right_hi, right_hi);
5910 __ cmpl(
ECX, compiler::Immediate(kShiftCountLimit));
5911 __ j(
ABOVE, slow_path->entry_label());
5916 if (slow_path !=
nullptr) {
5917 __ Bind(slow_path->exit_label());
5922LocationSummary* SpeculativeShiftInt64OpInstr::MakeLocationSummary(
5925 const intptr_t kNumInputs = 2;
5926 const intptr_t kNumTemps = 0;
5927 LocationSummary* summary =
new (zone)
5936void SpeculativeShiftInt64OpInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
5937 PairLocation* left_pair = locs()->in(0).AsPairLocation();
5938 Register left_lo = left_pair->At(0).reg();
5939 Register left_hi = left_pair->At(1).reg();
5940 PairLocation* out_pair = locs()->out(0).AsPairLocation();
5941 Register out_lo = out_pair->At(0).reg();
5942 Register out_hi = out_pair->At(1).reg();
5943 ASSERT(out_lo == left_lo);
5944 ASSERT(out_hi == left_hi);
5949 locs()->in(1).constant());
5957 compiler::Label* deopt =
5958 compiler->AddDeoptStub(deopt_id(), ICData::kDeoptBinaryInt64Op);
5959 __ cmpl(
ECX, compiler::Immediate(kShiftCountLimit));
5967class ShiftUint32OpSlowPath :
public ThrowErrorSlowPathCode {
5969 explicit ShiftUint32OpSlowPath(ShiftUint32OpInstr* instruction)
5970 : ThrowErrorSlowPathCode(instruction,
5971 kArgumentErrorUnboxedInt64RuntimeEntry) {}
5973 const char*
name()
override {
return "uint32 shift"; }
5975 void EmitCodeAtSlowPathEntry(FlowGraphCompiler*
compiler)
override {
5976 PairLocation* right_pair = instruction()->locs()->in(1).AsPairLocation();
5977 Register right_lo = right_pair->At(0).reg();
5978 Register right_hi = right_pair->At(1).reg();
5979 const Register out = instruction()->locs()->out(0).reg();
5980 ASSERT(out == instruction()->locs()->in(0).reg());
5982 compiler::Label throw_error;
5983 __ testl(right_hi, right_hi);
5984 __ j(NEGATIVE, &throw_error);
5987 __ jmp(exit_label());
5989 __ Bind(&throw_error);
5996 __ movl(compiler::Address(
5997 THR, compiler::target::Thread::unboxed_runtime_arg_offset()),
5999 __ movl(compiler::Address(
6000 THR, compiler::target::Thread::unboxed_runtime_arg_offset() +
6006LocationSummary* ShiftUint32OpInstr::MakeLocationSummary(Zone* zone,
6008 const intptr_t kNumInputs = 2;
6009 const intptr_t kNumTemps = 0;
6010 LocationSummary* summary =
new (zone) LocationSummary(
6025void ShiftUint32OpInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
6032 locs()->in(1).constant());
6035 ASSERT(locs()->in(1).AsPairLocation()->At(0).reg() ==
ECX);
6036 Register right_hi = locs()->in(1).AsPairLocation()->At(1).reg();
6039 ShiftUint32OpSlowPath* slow_path =
nullptr;
6041 slow_path =
new (
Z) ShiftUint32OpSlowPath(
this);
6042 compiler->AddSlowPathCode(slow_path);
6044 __ testl(right_hi, right_hi);
6046 __ cmpl(
ECX, compiler::Immediate(kUint32ShiftCountLimit));
6047 __ j(
ABOVE, slow_path->entry_label());
6052 if (slow_path !=
nullptr) {
6053 __ Bind(slow_path->exit_label());
6058LocationSummary* SpeculativeShiftUint32OpInstr::MakeLocationSummary(
6061 const intptr_t kNumInputs = 2;
6062 const intptr_t kNumTemps = 0;
6063 LocationSummary* summary =
new (zone)
6071void SpeculativeShiftUint32OpInstr::EmitNativeCode(
6079 locs()->in(1).constant());
6088 compiler::Label* deopt =
6089 compiler->AddDeoptStub(deopt_id(), ICData::kDeoptBinaryInt64Op);
6095 compiler::Label cont;
6096 __ cmpl(
ECX, compiler::Immediate(kUint32ShiftCountLimit));
6108LocationSummary* UnaryInt64OpInstr::MakeLocationSummary(Zone* zone,
6110 const intptr_t kNumInputs = 1;
6111 const intptr_t kNumTemps = 0;
6112 LocationSummary* summary =
new (zone)
6120void UnaryInt64OpInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
6121 PairLocation* left_pair = locs()->in(0).AsPairLocation();
6122 Register left_lo = left_pair->At(0).reg();
6123 Register left_hi = left_pair->At(1).reg();
6124 PairLocation* out_pair = locs()->out(0).AsPairLocation();
6125 Register out_lo = out_pair->At(0).reg();
6126 Register out_hi = out_pair->At(1).reg();
6127 ASSERT(out_lo == left_lo);
6128 ASSERT(out_hi == left_hi);
6130 case Token::kBIT_NOT:
6134 case Token::kNEGATE:
6136 __ adcl(left_hi, compiler::Immediate(0));
6144LocationSummary* UnaryUint32OpInstr::MakeLocationSummary(Zone* zone,
6146 const intptr_t kNumInputs = 1;
6147 const intptr_t kNumTemps = 0;
6148 LocationSummary* summary =
new (zone)
6155void UnaryUint32OpInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
6157 ASSERT(locs()->in(0).reg() == out);
6164LocationSummary* IntConverterInstr::MakeLocationSummary(Zone* zone,
6166 const intptr_t kNumInputs = 1;
6167 const intptr_t kNumTemps = 0;
6168 LocationSummary* summary =
new (zone)
6171 if (
from() == kUntagged ||
to() == kUntagged) {
6172 ASSERT((
from() == kUntagged &&
to() == kUnboxedInt32) ||
6173 (
from() == kUntagged &&
to() == kUnboxedUint32) ||
6174 (
from() == kUnboxedInt32 &&
to() == kUntagged) ||
6175 (
from() == kUnboxedUint32 &&
to() == kUntagged));
6176 ASSERT(!CanDeoptimize());
6179 }
else if ((
from() == kUnboxedInt32 ||
from() == kUnboxedUint32) &&
6180 (
to() == kUnboxedInt32 ||
to() == kUnboxedUint32)) {
6183 }
else if (
from() == kUnboxedInt64) {
6189 }
else if (
from() == kUnboxedUint32) {
6193 }
else if (
from() == kUnboxedInt32) {
6202void IntConverterInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
6203 const bool is_nop_conversion =
6204 (
from() == kUntagged &&
to() == kUnboxedInt32) ||
6205 (
from() == kUntagged &&
to() == kUnboxedUint32) ||
6206 (
from() == kUnboxedInt32 &&
to() == kUntagged) ||
6207 (
from() == kUnboxedUint32 &&
to() == kUntagged);
6208 if (is_nop_conversion) {
6209 ASSERT(locs()->in(0).reg() == locs()->
out(0).reg());
6213 if (
from() == kUnboxedInt32 &&
to() == kUnboxedUint32) {
6215 ASSERT(locs()->
out(0).reg() == locs()->in(0).reg());
6216 }
else if (
from() == kUnboxedUint32 &&
to() == kUnboxedInt32) {
6218 ASSERT(locs()->
out(0).reg() == locs()->in(0).reg());
6219 if (CanDeoptimize()) {
6220 compiler::Label* deopt =
6221 compiler->AddDeoptStub(deopt_id(), ICData::kDeoptUnboxInteger);
6222 __ testl(locs()->
out(0).reg(), locs()->
out(0).reg());
6225 }
else if (
from() == kUnboxedInt64) {
6230 ASSERT(
to() == kUnboxedInt32 ||
to() == kUnboxedUint32);
6231 PairLocation* in_pair = locs()->in(0).AsPairLocation();
6232 Register in_lo = in_pair->At(0).reg();
6233 Register in_hi = in_pair->At(1).reg();
6236 __ movl(out, in_lo);
6237 if (CanDeoptimize()) {
6238 compiler::Label* deopt =
6239 compiler->AddDeoptStub(deopt_id(), ICData::kDeoptUnboxInteger);
6240 __ sarl(in_lo, compiler::Immediate(31));
6241 __ cmpl(in_lo, in_hi);
6244 }
else if (
from() == kUnboxedUint32) {
6247 PairLocation* out_pair = locs()->out(0).AsPairLocation();
6248 Register out_lo = out_pair->At(0).reg();
6249 Register out_hi = out_pair->At(1).reg();
6251 __ movl(out_lo, in);
6253 __ xorl(out_hi, out_hi);
6254 }
else if (
from() == kUnboxedInt32) {
6256 PairLocation* out_pair = locs()->out(0).AsPairLocation();
6257 Register out_lo = out_pair->At(0).reg();
6258 Register out_hi = out_pair->At(1).reg();
6267LocationSummary* StopInstr::MakeLocationSummary(Zone* zone,
bool opt)
const {
6271void StopInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
6275void GraphEntryInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
6277 if (entry !=
nullptr) {
6278 if (!
compiler->CanFallThroughTo(entry)) {
6279 FATAL(
"Checked function entry must have no offset");
6283 if (!
compiler->CanFallThroughTo(entry)) {
6289LocationSummary* GotoInstr::MakeLocationSummary(Zone* zone,
bool opt)
const {
6293void GotoInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
6295 if (FLAG_reorder_basic_blocks) {
6300 compiler->AddCurrentDescriptor(UntaggedPcDescriptors::kDeopt, GetDeoptId(),
6301 InstructionSource());
6314LocationSummary* IndirectGotoInstr::MakeLocationSummary(Zone* zone,
6316 const intptr_t kNumInputs = 1;
6317 const intptr_t kNumTemps = 2;
6319 LocationSummary* summary =
new (zone)
6329void IndirectGotoInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
6330 Register index_reg = locs()->in(0).reg();
6331 Register target_reg = locs()->temp(0).reg();
6337 false, kTypedDataInt32ArrayCid,
6339 false,
offset, index_reg));
6344 EBP, compiler::target::frame_layout.code_from_fp *
kWordSize));
6357LocationSummary* StrictCompareInstr::MakeLocationSummary(Zone* zone,
6359 const intptr_t kNumInputs = 2;
6360 const intptr_t kNumTemps = 0;
6362 LocationSummary* locs =
new (zone)
6369 LocationSummary* locs =
new (zone)
6374 locs->set_in(1, locs->in(0).IsConstant()
6381Condition StrictCompareInstr::EmitComparisonCodeRegConstant(
6383 BranchLabels labels,
6385 const Object& obj) {
6391static bool IsPowerOfTwoKind(intptr_t v1, intptr_t
v2) {
6415 BranchLabels labels = {
nullptr,
nullptr,
nullptr};
6419 const bool is_power_of_two_kind = IsPowerOfTwoKind(if_true_, if_false_);
6421 intptr_t true_value = if_true_;
6422 intptr_t false_value = if_false_;
6424 if (is_power_of_two_kind) {
6425 if (true_value == 0) {
6430 if (true_value == 0) {
6432 intptr_t temp = true_value;
6433 true_value = false_value;
6440 __ setcc(true_condition,
DL);
6442 if (is_power_of_two_kind) {
6443 const intptr_t shift =
6450 if (false_value != 0) {
6458 const intptr_t kNumInputs = 1;
6459 const intptr_t kNumTemps = 0;
6460 LocationSummary* summary =
new (zone)
6470 const Array& arguments_descriptor =
6485 UntaggedPcDescriptors::kOther,
locs(),
env());
6489LocationSummary* BooleanNegateInstr::MakeLocationSummary(Zone* zone,
6495void BooleanNegateInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
6496 Register input = locs()->in(0).reg();
6499 __ xorl(
result, compiler::Immediate(
6503LocationSummary* BoolToIntInstr::MakeLocationSummary(Zone* zone,
6509void BoolToIntInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
6513LocationSummary* IntToBoolInstr::MakeLocationSummary(Zone* zone,
6519void IntToBoolInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
6525 const intptr_t kNumInputs = (
type_arguments() !=
nullptr) ? 1 : 0;
6526 const intptr_t kNumTemps = 0;
6527 LocationSummary*
locs =
new (zone)
6540 compiler->GenerateStubCall(
source(), stub, UntaggedPcDescriptors::kOther,
6544void DebugStepCheckInstr::EmitNativeCode(FlowGraphCompiler*
compiler) {
6549 __ Call(StubCode::DebugStepCheck());
static void done(const char *config, const char *src, const char *srcOptions, const char *name)
static void fail(const SkString &err)
static bool match(const char *needle, const char *haystack)
static bool are_equal(skiatest::Reporter *reporter, const SkMatrix &a, const SkMatrix &b)
static int float_bits(float f)
static bool ok(int result)
static bool left(const SkPoint &p0, const SkPoint &p1)
static bool right(const SkPoint &p0, const SkPoint &p1)
static bool subtract(const R &a, const R &b, R *out)
#define ASSERT_EQUAL(expected, actual)
#define RELEASE_ASSERT(cond)
#define COMPILE_ASSERT(expr)
intptr_t num_context_variables() const
Value * type_arguments() const
const Class & cls() const
intptr_t num_context_variables() const
static intptr_t type_arguments_offset()
static intptr_t InstanceSize()
static constexpr bool IsValidLength(intptr_t len)
static intptr_t length_offset()
Token::Kind op_kind() const
bool can_overflow() const
Token::Kind op_kind() const
bool RightIsPowerOfTwoConstant() const
Range * right_range() const
ParallelMoveInstr * parallel_move() const
bool HasParallelMove() const
BlockEntryInstr(intptr_t block_id, intptr_t try_index, intptr_t deopt_id, intptr_t stack_depth)
static const Bool & False()
static const Bool & True()
static void Allocate(FlowGraphCompiler *compiler, Instruction *instruction, const Class &cls, Register result, Register temp)
Representation from_representation() const
virtual bool ValueFitsSmi() const
ComparisonInstr * comparison() const
virtual Representation RequiredInputRepresentation(intptr_t idx) const
intptr_t index_scale() const
static constexpr Register kFfiAnyNonAbiRegister
static constexpr Register kReturnReg
static constexpr Register kSecondNonArgumentRegister
static constexpr bool kUsesRet4
const RuntimeEntry & TargetFunction() const
bool IsDeoptIfNull() const
DECLARE_INSTRUCTION_SERIALIZABLE_FIELDS(CheckClassInstr, TemplateInstruction, FIELD_LIST) private void EmitBitTest(FlowGraphCompiler *compiler, intptr_t min, intptr_t max, intptr_t mask, compiler::Label *deopt)
void EmitNullCheck(FlowGraphCompiler *compiler, compiler::Label *deopt)
bool IsDeoptIfNotNull() const
intptr_t loop_depth() const
static intptr_t instructions_offset()
static intptr_t entry_point_offset(EntryKind kind=EntryKind::kNormal)
virtual void EmitBranchCode(FlowGraphCompiler *compiler, BranchInstr *branch)
virtual void EmitNativeCode(FlowGraphCompiler *compiler)
virtual Condition EmitComparisonCode(FlowGraphCompiler *compiler, BranchLabels labels)=0
const Object & value() const
void EmitMoveToLocation(FlowGraphCompiler *compiler, const Location &destination, Register tmp=kNoRegister, intptr_t pair_index=0)
static intptr_t num_variables_offset()
static intptr_t InstanceSize()
virtual Value * num_elements() const
virtual Representation representation() const
MethodRecognizer::Kind op_kind() const
MethodRecognizer::Kind recognized_kind() const
static intptr_t value_offset()
static DoublePtr NewCanonical(double d)
bool is_null_aware() const
void EmitReturnMoves(FlowGraphCompiler *compiler, const Register temp0, const Register temp1)
DECLARE_INSTRUCTION_SERIALIZABLE_FIELDS(FfiCallInstr, VariadicDefinition, FIELD_LIST) private void EmitParamMoves(FlowGraphCompiler *compiler, const Register saved_fp, const Register temp0, const Register temp1)
virtual Representation representation() const
static bool CanExecuteGeneratedCodeInSafepoint()
intptr_t TargetAddressIndex() const
static intptr_t guarded_cid_offset()
static intptr_t guarded_list_length_in_object_offset_offset()
intptr_t guarded_cid() const
static intptr_t is_nullable_offset()
static intptr_t guarded_list_length_offset()
static intptr_t entry_point_offset(CodeEntryKind entry_kind=CodeEntryKind::kNormal)
ParallelMoveInstr * parallel_move() const
BlockEntryInstr * block() const
bool HasParallelMove() const
JoinEntryInstr * successor() const
FunctionEntryInstr * normal_entry() const
OsrEntryInstr * osr_entry() const
const Field & field() const
ComparisonInstr * comparison() const
virtual Representation RequiredInputRepresentation(intptr_t idx) const
const AbstractType & type() const
Environment * env() const
virtual LocationSummary * MakeLocationSummary(Zone *zone, bool is_optimizing) const =0
virtual void EmitNativeCode(FlowGraphCompiler *compiler)
void InitializeLocationSummary(Zone *zone, bool optimizing)
InstructionSource source() const
intptr_t deopt_id() const
static LocationSummary * MakeCallSummary(Zone *zone, const Instruction *instr, LocationSummary *locs=nullptr)
static intptr_t HeaderSize()
Representation to() const
Representation from() const
const RuntimeEntry & TargetFunction() const
MethodRecognizer::Kind recognized_kind() const
static constexpr intptr_t kDoubleTempIndex
static constexpr intptr_t kObjectTempIndex
intptr_t TargetAddressIndex() const
void EmitParamMoves(FlowGraphCompiler *compiler, Register saved_fp, Register temp0)
LocationSummary * MakeLocationSummaryInternal(Zone *zone, const RegList temps) const
virtual Representation representation() const
intptr_t index_scale() const
bool can_pack_into_smi() const
intptr_t element_count() const
intptr_t class_id() const
intptr_t class_id() const
intptr_t index_scale() const
Representation representation() const
const LocalVariable & local() const
Location temp(intptr_t index) const
Location out(intptr_t index) const
static LocationSummary * Make(Zone *zone, intptr_t input_count, Location out, ContainsCall contains_call)
void set_temp(intptr_t index, Location loc)
void set_out(intptr_t index, Location loc)
Location in(intptr_t index) const
void set_in(intptr_t index, Location loc)
static Location StackSlot(intptr_t stack_index, Register base)
static Location NoLocation()
static Location SameAsFirstInput()
static Location Pair(Location first, Location second)
static Location FpuRegisterLocation(FpuRegister reg)
static Location WritableRegister()
static Location RegisterLocation(Register reg)
static Location PrefersRegister()
static Location RequiresRegister()
static Location RequiresFpuRegister()
static Location Constant(const ConstantInstr *obj, int pair_index=0)
intptr_t result_cid() const
MethodRecognizer::Kind op_kind() const
bool unboxed_inputs() const
Value * src_start() const
void EmitLoopCopy(FlowGraphCompiler *compiler, Register dest_reg, Register src_reg, Register length_reg, compiler::Label *done, compiler::Label *copy_forwards=nullptr)
void PrepareLengthRegForLoop(FlowGraphCompiler *compiler, Register length_reg, compiler::Label *done)
Value * dest_start() const
static intptr_t value_offset()
virtual Representation representation() const
Location location() const
static int ComputeArgcTag(const Function &function)
bool is_auto_scope() const
bool is_bootstrap_native() const
const Function & function() const
NativeFunction native_c_function() const
static Object & ZoneHandle()
Value * char_code() const
static intptr_t data_offset()
static bool Overlaps(Range *range, intptr_t min, intptr_t max)
static bool OnlyLessThanOrEqualTo(Range *range, intptr_t value)
static bool IsWithin(const Range *range, int64_t min, int64_t max)
static bool IsPositive(Range *range)
static bool CanBeZero(Range *range)
DECLARE_INSTRUCTION_SERIALIZABLE_FIELDS(ShiftIntegerOpInstr, BinaryIntegerOpInstr, FIELD_LIST) protected bool IsShiftCountInRange(int64_t max=kShiftCountLimit) const
Range * shift_range() const
static constexpr intptr_t kBits
static SmiPtr New(intptr_t value)
static constexpr intptr_t kMaxValue
static intptr_t RawValue(intptr_t value)
const char * message() const
bool ShouldEmitStoreBarrier() const
virtual Representation RequiredInputRepresentation(intptr_t idx) const
intptr_t class_id() const
intptr_t index_scale() const
const LocalVariable & local() const
const Field & field() const
bool needs_number_check() const
static intptr_t length_offset()
static CodePtr GetAllocationStubForClass(const Class &cls)
static constexpr int kNullCharCodeSymbolOffset
static StringPtr * PredefinedAddress()
static bool sse4_1_supported()
intptr_t ArgumentCount() const
ArrayPtr GetArgumentsDescriptor() const
virtual intptr_t InputCount() const
const ZoneGrowableArray< intptr_t > & cid_results() const
static intptr_t stack_limit_offset()
static bool IsEqualityOperator(Kind tok)
virtual Representation representation() const
Token::Kind op_kind() const
Token::Kind op_kind() const
virtual Representation representation() const
bool is_truncating() const
uword constant_address() const
virtual Representation representation() const
bool IsScanFlagsUnboxed() const
static int32_t Low32Bits(int64_t value)
static constexpr T Maximum(T x, T y)
static constexpr int ShiftForPowerOfTwo(T x)
static T Minimum(T x, T y)
static T AddWithWrapAround(T a, T b)
static bool DoublesBitEqual(const double a, const double b)
static constexpr size_t HighestBit(int64_t v)
static constexpr bool IsPowerOfTwo(T x)
bool BindsToConstant() const
intptr_t BoundSmiConstant() const
bool BindsToSmiConstant() const
Definition * definition() const
Value(Definition *definition)
intptr_t InputCount() const
static Address Absolute(const uword addr)
void static bool EmittingComments()
Address ElementAddressForRegIndex(bool is_load, bool is_external, intptr_t cid, intptr_t index_scale, bool index_unboxed, Register array, Register index)
static Address VMTagAddress()
static bool AddressCanHoldConstantIndex(const Object &constant, bool is_load, bool is_external, intptr_t cid, intptr_t index_scale, bool *needs_base=nullptr)
static bool IsSafeSmi(const Object &object)
static bool IsSafe(const Object &object)
intptr_t StackTopInBytes() const
FlutterSemanticsFlag flag
FlutterSemanticsFlag flags
static float max(float r, float g, float b)
static float min(float r, float g, float b)
#define CASE(Arity, Mask, Name, Args, Result)
#define DEFINE_UNIMPLEMENTED_INSTRUCTION(Name)
#define DEFINE_BACKEND(Name, Args)
const intptr_t kResultIndex
bool HasIntegerValue(const dart::Object &object, int64_t *value)
Location LocationRegisterOrConstant(Value *value)
const Register kWriteBarrierSlotReg
uword FindDoubleConstant(double value)
static Condition InvertCondition(Condition c)
static constexpr int kSavedCallerPcSlotFromFp
bool IsTypedDataBaseClassId(intptr_t index)
static bool IsSmiValue(Value *val, intptr_t *int_val)
const Register kExceptionObjectReg
const Register kWriteBarrierObjectReg
constexpr int32_t kMinInt32
const Register kWriteBarrierValueReg
static constexpr bool IsCalleeSavedRegister(Register reg)
constexpr intptr_t kIntptrMin
static const ClassId kLastErrorCid
constexpr intptr_t kSimd128Size
static const ClassId kFirstErrorCid
const Register ARGS_DESC_REG
bool IsClampedTypedDataBaseClassId(intptr_t index)
Location LocationFixedRegisterOrConstant(Value *value, Register reg)
bool IsExternalPayloadClassId(classid_t cid)
constexpr intptr_t kInt32Size
static constexpr int kPcMarkerSlotFromFp
const Register FUNCTION_REG
const Register IC_DATA_REG
constexpr int32_t kMaxInt32
compiler::Address LocationToStackSlotAddress(Location loc)
constexpr intptr_t kWordSize
Location LocationWritableRegisterOrConstant(Value *value)
static bool IsConstant(Definition *def, int64_t *val)
constexpr intptr_t kFloatSize
const Register kStackTraceObjectReg
static int8_t data[kExtLength]
constexpr intptr_t kDoubleSize
Location LocationFixedRegisterOrSmiConstant(Value *value, Register reg)
static ScaleFactor ToScaleFactor(intptr_t index_scale, bool index_unboxed)
Location LocationRegisterOrSmiConstant(Value *value, intptr_t min_value, intptr_t max_value)
constexpr intptr_t kBitsPerInt64
SIN Vec< N, uint16_t > mull(const Vec< N, uint8_t > &x, const Vec< N, uint8_t > &y)
static constexpr Register kResultReg
static constexpr Register kLengthReg
static constexpr Register kTypeArgumentsReg
static constexpr Register kTypeArgumentsReg
static constexpr Register kResultReg
static constexpr Register kObjectReg
static constexpr Representation NativeRepresentation(Representation rep)
static constexpr intptr_t kBoolVsNullMask
static constexpr intptr_t kBoolValueMask
static constexpr size_t ValueSize(Representation rep)
static constexpr bool IsUnboxedInteger(Representation rep)
static compiler::OperandSize OperandSize(Representation rep)
static constexpr bool IsUnboxed(Representation rep)
static bool IsUnsignedInteger(Representation rep)
static Representation RepresentationOfArrayElement(classid_t cid)
static constexpr Register kDstTypeReg
static constexpr Register kInstanceReg
static constexpr Register kFunctionTypeArgumentsReg
static constexpr Register kInstantiatorTypeArgumentsReg
#define ASSERT_BOOL_FALSE_FOLLOWS_BOOL_TRUE()