6#if defined(TARGET_ARCH_RISCV32) || defined(TARGET_ARCH_RISCV64)
8#define SHOULD_NOT_INCLUDE_RUNTIME
22DEFINE_FLAG(
int, far_branch_level, 0,
"Always use far branches");
27 intptr_t far_branch_level,
28 ExtensionSet extensions)
29 : AssemblerBase(object_pool_builder),
31 far_branch_level_(far_branch_level) {
32 ASSERT(far_branch_level >= 0);
33 ASSERT(far_branch_level <= 2);
36MicroAssembler::~MicroAssembler() {}
38void MicroAssembler::Bind(Label* label) {
40 intptr_t target_position = Position();
41 intptr_t branch_position;
43#define BIND(head, update) \
44 branch_position = label->head; \
45 while (branch_position >= 0) { \
46 ASSERT(Utils::IsAligned(branch_position, Supports(RV_C) ? 2 : 4)); \
47 intptr_t new_offset = target_position - branch_position; \
48 ASSERT(Utils::IsAligned(new_offset, Supports(RV_C) ? 2 : 4)); \
49 intptr_t old_offset = update(branch_position, new_offset); \
50 if (old_offset == 0) break; \
51 branch_position -= old_offset; \
55 BIND(unresolved_cb_, UpdateCBOffset);
56 BIND(unresolved_cj_, UpdateCJOffset);
57 BIND(unresolved_b_, UpdateBOffset);
58 BIND(unresolved_j_, UpdateJOffset);
59 BIND(unresolved_far_, UpdateFarOffset);
61 label->BindTo(target_position);
64intptr_t MicroAssembler::UpdateCBOffset(intptr_t branch_position,
65 intptr_t new_offset) {
66 CInstr instr(Read16(branch_position));
67 ASSERT((instr.opcode() == C_BEQZ) || (instr.opcode() == C_BNEZ));
68 intptr_t old_offset = instr.b_imm();
70 FATAL(
"Incorrect Assembler::kNearJump");
72 Write16(branch_position,
73 instr.opcode() | EncodeCRs1p(instr.rs1p()) |
EncodeCBImm(new_offset));
77intptr_t MicroAssembler::UpdateCJOffset(intptr_t branch_position,
78 intptr_t new_offset) {
79 CInstr instr(Read16(branch_position));
80 ASSERT((instr.opcode() == C_J) || (instr.opcode() == C_JAL));
81 intptr_t old_offset = instr.j_imm();
83 FATAL(
"Incorrect Assembler::kNearJump");
85 Write16(branch_position, instr.opcode() |
EncodeCJImm(new_offset));
89intptr_t MicroAssembler::UpdateBOffset(intptr_t branch_position,
90 intptr_t new_offset) {
91 Instr instr(Read32(branch_position));
92 ASSERT(instr.opcode() == BRANCH);
93 intptr_t old_offset = instr.btype_imm();
97 Write32(branch_position, EncodeRs2(instr.rs2()) | EncodeRs1(instr.rs1()) |
98 EncodeFunct3(instr.funct3()) |
99 EncodeOpcode(instr.opcode()) |
104intptr_t MicroAssembler::UpdateJOffset(intptr_t branch_position,
105 intptr_t new_offset) {
106 Instr instr(Read32(branch_position));
107 ASSERT(instr.opcode() == JAL);
108 intptr_t old_offset = instr.jtype_imm();
112 Write32(branch_position, EncodeRd(instr.rd()) | EncodeOpcode(instr.opcode()) |
117intptr_t MicroAssembler::UpdateFarOffset(intptr_t branch_position,
118 intptr_t new_offset) {
119 Instr auipc_instr(Read32(branch_position));
120 ASSERT(auipc_instr.opcode() == AUIPC);
121 ASSERT(auipc_instr.rd() == FAR_TMP);
122 Instr jr_instr(Read32(branch_position + 4));
123 ASSERT(jr_instr.opcode() == JALR);
124 ASSERT(jr_instr.rd() == ZR);
125 ASSERT(jr_instr.funct3() == F3_0);
126 ASSERT(jr_instr.rs1() == FAR_TMP);
127 intptr_t old_offset = auipc_instr.utype_imm() + jr_instr.itype_imm();
128 intx_t lo =
ImmLo(new_offset);
129 intx_t hi =
ImmHi(new_offset);
131 FATAL(
"Jump/branch distance exceeds 2GB!");
133 Write32(branch_position,
135 Write32(branch_position + 4,
EncodeITypeImm(lo) | EncodeRs1(FAR_TMP) |
136 EncodeFunct3(F3_0) | EncodeRd(ZR) |
141void MicroAssembler::lui(Register rd, intptr_t imm) {
143 if (Supports(RV_C) && (rd != ZR) && (rd != SP) &&
IsCUImm(imm)) {
147 EmitUType(imm, rd, LUI);
150void MicroAssembler::lui_fixed(Register rd, intptr_t imm) {
152 EmitUType(imm, rd, LUI);
155void MicroAssembler::auipc(Register rd, intptr_t imm) {
157 EmitUType(imm, rd, AUIPC);
160void MicroAssembler::jal(Register rd, Label* label, JumpDistance distance) {
162 if (Supports(RV_C) &&
163 ((distance == kNearJump) ||
164 (label->IsBound() &&
IsCJImm(label->Position() - Position())))) {
176 EmitJump(rd, label, JAL, distance);
179void MicroAssembler::jalr(Register rd, Register rs1, intptr_t
offset) {
181 if (Supports(RV_C)) {
182 if (rs1 != ZR &&
offset == 0) {
186 }
else if (rd ==
RA) {
192 EmitIType(
offset, rs1, F3_0, rd, JALR);
195void MicroAssembler::jalr_fixed(Register rd, Register rs1, intptr_t
offset) {
197 EmitIType(
offset, rs1, F3_0, rd, JALR);
200void MicroAssembler::beq(Register rs1,
203 JumpDistance distance) {
205 if (Supports(RV_C) &&
206 ((distance == kNearJump) ||
207 (label->IsBound() &&
IsCBImm(label->Position() - Position())))) {
208 if ((rs1 == ZR) && IsCRs1p(rs2)) {
211 }
else if ((rs2 == ZR) && IsCRs1p(rs1)) {
216 EmitBranch(rs1, rs2, label, BEQ, distance);
219void MicroAssembler::bne(Register rs1,
222 JumpDistance distance) {
224 if (Supports(RV_C) &&
225 ((distance == kNearJump) ||
226 (label->IsBound() &&
IsCBImm(label->Position() - Position())))) {
227 if ((rs1 == ZR) && IsCRs1p(rs2)) {
230 }
else if ((rs2 == ZR) && IsCRs1p(rs1)) {
235 EmitBranch(rs1, rs2, label, BNE, distance);
238void MicroAssembler::blt(Register rs1,
241 JumpDistance distance) {
243 EmitBranch(rs1, rs2, label, BLT, distance);
246void MicroAssembler::bge(Register rs1,
249 JumpDistance distance) {
251 EmitBranch(rs1, rs2, label, BGE, distance);
254void MicroAssembler::bltu(Register rs1,
257 JumpDistance distance) {
259 EmitBranch(rs1, rs2, label, BLTU, distance);
262void MicroAssembler::bgeu(Register rs1,
265 JumpDistance distance) {
266 EmitBranch(rs1, rs2, label, BGEU, distance);
269void MicroAssembler::lb(Register rd, Address addr) {
271 EmitIType(
addr.offset(),
addr.base(), LB, rd, LOAD);
274void MicroAssembler::lh(Register rd, Address addr) {
276 EmitIType(
addr.offset(),
addr.base(),
LH, rd, LOAD);
279void MicroAssembler::lw(Register rd, Address addr) {
281 if (Supports(RV_C)) {
291 EmitIType(
addr.offset(),
addr.base(), LW, rd, LOAD);
294void MicroAssembler::lbu(Register rd, Address addr) {
296 EmitIType(
addr.offset(),
addr.base(), LBU, rd, LOAD);
299void MicroAssembler::lhu(Register rd, Address addr) {
301 EmitIType(
addr.offset(),
addr.base(), LHU, rd, LOAD);
304void MicroAssembler::sb(Register rs2, Address addr) {
306 EmitSType(
addr.offset(), rs2,
addr.base(), SB, STORE);
309void MicroAssembler::sh(Register rs2, Address addr) {
311 EmitSType(
addr.offset(), rs2,
addr.base(),
SH, STORE);
314void MicroAssembler::sw(Register rs2, Address addr) {
316 if (Supports(RV_C)) {
326 EmitSType(
addr.offset(), rs2,
addr.base(),
SW, STORE);
329void MicroAssembler::addi(Register rd, Register rs1, intptr_t imm) {
331 if (Supports(RV_C)) {
332 if ((rd != ZR) && (rs1 == ZR) &&
IsCIImm(imm)) {
336 if ((rd == rs1) &&
IsCIImm(imm) && (imm != 0)) {
337 c_addi(rd, rs1, imm);
340 if ((rd == SP) && (rs1 == SP) &&
IsCI16Imm(imm) && (imm != 0)) {
341 c_addi16sp(rd, rs1, imm);
344 if (IsCRdp(rd) && (rs1 == SP) &&
IsCI4SPNImm(imm) && (imm != 0)) {
345 c_addi4spn(rd, rs1, imm);
349 if ((rd == ZR) && (rs1 == ZR)) {
353 if ((rd != ZR) && (rs1 != ZR)) {
359 EmitIType(imm, rs1, ADDI, rd, OPIMM);
362void MicroAssembler::slti(Register rd, Register rs1, intptr_t imm) {
364 EmitIType(imm, rs1, SLTI, rd, OPIMM);
367void MicroAssembler::sltiu(Register rd, Register rs1, intptr_t imm) {
369 EmitIType(imm, rs1, SLTIU, rd, OPIMM);
372void MicroAssembler::xori(Register rd, Register rs1, intptr_t imm) {
374 EmitIType(imm, rs1, XORI, rd, OPIMM);
377void MicroAssembler::ori(Register rd, Register rs1, intptr_t imm) {
379 EmitIType(imm, rs1, ORI, rd, OPIMM);
382void MicroAssembler::andi(Register rd, Register rs1, intptr_t imm) {
384 if (Supports(RV_C)) {
385 if ((rd == rs1) && IsCRs1p(rs1) &&
IsCIImm(imm)) {
386 c_andi(rd, rs1, imm);
390 EmitIType(imm, rs1, ANDI, rd, OPIMM);
393void MicroAssembler::slli(Register rd, Register rs1, intptr_t shamt) {
394 ASSERT((shamt > 0) && (shamt < XLEN));
396 if (Supports(RV_C)) {
397 if ((rd == rs1) && (shamt != 0) &&
IsCIImm(shamt)) {
398 c_slli(rd, rs1, shamt);
402 EmitRType(F7_0, shamt, rs1, SLLI, rd, OPIMM);
405void MicroAssembler::srli(Register rd, Register rs1, intptr_t shamt) {
406 ASSERT((shamt > 0) && (shamt < XLEN));
408 if (Supports(RV_C)) {
409 if ((rd == rs1) && IsCRs1p(rs1) && (shamt != 0) &&
IsCIImm(shamt)) {
410 c_srli(rd, rs1, shamt);
414 EmitRType(F7_0, shamt, rs1, SRI, rd, OPIMM);
417void MicroAssembler::srai(Register rd, Register rs1, intptr_t shamt) {
418 ASSERT((shamt > 0) && (shamt < XLEN));
420 if (Supports(RV_C)) {
421 if ((rd == rs1) && IsCRs1p(rs1) && (shamt != 0) &&
IsCIImm(shamt)) {
422 c_srai(rd, rs1, shamt);
426 EmitRType(SRA, shamt, rs1, SRI, rd, OPIMM);
429void MicroAssembler::add(Register rd, Register rs1, Register rs2) {
431 if (Supports(RV_C)) {
441 EmitRType(F7_0, rs2, rs1, ADD, rd, OP);
444void MicroAssembler::sub(Register rd, Register rs1, Register rs2) {
446 if (Supports(RV_C)) {
447 if ((rd == rs1) && IsCRs1p(rs1) && IsCRs2p(rs2)) {
452 EmitRType(SUB, rs2, rs1, ADD, rd, OP);
455void MicroAssembler::sll(Register rd, Register rs1, Register rs2) {
457 EmitRType(F7_0, rs2, rs1, SLL, rd, OP);
460void MicroAssembler::slt(Register rd, Register rs1, Register rs2) {
462 EmitRType(F7_0, rs2, rs1, SLT, rd, OP);
465void MicroAssembler::sltu(Register rd, Register rs1, Register rs2) {
467 EmitRType(F7_0, rs2, rs1, SLTU, rd, OP);
470void MicroAssembler::xor_(Register rd, Register rs1, Register rs2) {
472 if (Supports(RV_C)) {
473 if ((rd == rs1) && IsCRs1p(rs1) && IsCRs2p(rs2)) {
477 if ((rd == rs2) && IsCRs1p(rs1) && IsCRs2p(rs2)) {
482 EmitRType(F7_0, rs2, rs1, XOR, rd, OP);
485void MicroAssembler::srl(Register rd, Register rs1, Register rs2) {
487 EmitRType(F7_0, rs2, rs1, SR, rd, OP);
490void MicroAssembler::sra(Register rd, Register rs1, Register rs2) {
492 EmitRType(SRA, rs2, rs1, SR, rd, OP);
495void MicroAssembler::or_(Register rd, Register rs1, Register rs2) {
497 if (Supports(RV_C)) {
498 if ((rd == rs1) && IsCRs1p(rs1) && IsCRs2p(rs2)) {
502 if ((rd == rs2) && IsCRs1p(rs1) && IsCRs2p(rs2)) {
507 EmitRType(F7_0, rs2, rs1, OR, rd, OP);
510void MicroAssembler::and_(Register rd, Register rs1, Register rs2) {
512 if (Supports(RV_C)) {
513 if ((rd == rs1) && IsCRs1p(rs1) && IsCRs2p(rs2)) {
517 if ((rd == rs2) && IsCRs1p(rs1) && IsCRs2p(rs2)) {
522 EmitRType(F7_0, rs2, rs1, AND, rd, OP);
525void MicroAssembler::fence(HartEffects predecessor, HartEffects successor) {
529 EmitIType((predecessor << 4) | successor, ZR, FENCE, ZR, MISCMEM);
532void MicroAssembler::fencei() {
534 EmitIType(0, ZR, FENCEI, ZR, MISCMEM);
537void MicroAssembler::ecall() {
539 EmitIType(ECALL, ZR, F3_0, ZR, SYSTEM);
541void MicroAssembler::ebreak() {
543 if (Supports(RV_C)) {
547 EmitIType(EBREAK, ZR, F3_0, ZR, SYSTEM);
549void MicroAssembler::SimulatorPrintObject(Register rs1) {
551 EmitIType(ECALL, rs1, F3_0, ZR, SYSTEM);
554void MicroAssembler::csrrw(Register rd, uint32_t csr, Register rs1) {
556 EmitIType(csr, rs1, CSRRW, rd, SYSTEM);
559void MicroAssembler::csrrs(Register rd, uint32_t csr, Register rs1) {
561 EmitIType(csr, rs1, CSRRS, rd, SYSTEM);
564void MicroAssembler::csrrc(Register rd, uint32_t csr, Register rs1) {
566 EmitIType(csr, rs1, CSRRC, rd, SYSTEM);
569void MicroAssembler::csrrwi(Register rd, uint32_t csr, uint32_t imm) {
571 EmitIType(csr,
Register(imm), CSRRWI, rd, SYSTEM);
574void MicroAssembler::csrrsi(Register rd, uint32_t csr, uint32_t imm) {
576 EmitIType(csr,
Register(imm), CSRRSI, rd, SYSTEM);
579void MicroAssembler::csrrci(Register rd, uint32_t csr, uint32_t imm) {
581 EmitIType(csr,
Register(imm), CSRRCI, rd, SYSTEM);
584void MicroAssembler::trap() {
586 if (Supports(RV_C)) {
594void MicroAssembler::lwu(Register rd, Address addr) {
596 EmitIType(
addr.offset(),
addr.base(), LWU, rd, LOAD);
599void MicroAssembler::ld(Register rd, Address addr) {
601 if (Supports(RV_C)) {
611 EmitIType(
addr.offset(),
addr.base(), LD, rd, LOAD);
614void MicroAssembler::sd(Register rs2, Address addr) {
616 if (Supports(RV_C)) {
626 EmitSType(
addr.offset(), rs2,
addr.base(), SD, STORE);
629void MicroAssembler::addiw(Register rd, Register rs1, intptr_t imm) {
631 if (Supports(RV_C)) {
632 if ((rd != ZR) && (rs1 == ZR) &&
IsCIImm(imm)) {
636 if ((rd == rs1) && (rd != ZR) &&
IsCIImm(imm)) {
637 c_addiw(rd, rs1, imm);
641 EmitIType(imm, rs1, ADDI, rd, OPIMM32);
644void MicroAssembler::slliw(Register rd, Register rs1, intptr_t shamt) {
645 ASSERT((shamt > 0) && (shamt < 32));
647 EmitRType(F7_0, shamt, rs1, SLLI, rd, OPIMM32);
650void MicroAssembler::srliw(Register rd, Register rs1, intptr_t shamt) {
651 ASSERT((shamt > 0) && (shamt < 32));
653 EmitRType(F7_0, shamt, rs1, SRI, rd, OPIMM32);
656void MicroAssembler::sraiw(Register rd, Register rs1, intptr_t shamt) {
657 ASSERT((shamt > 0) && (shamt < XLEN));
659 EmitRType(SRA, shamt, rs1, SRI, rd, OPIMM32);
662void MicroAssembler::addw(Register rd, Register rs1, Register rs2) {
664 if (Supports(RV_C)) {
665 if ((rd == rs1) && IsCRs1p(rs1) && IsCRs2p(rs2)) {
666 c_addw(rd, rs1, rs2);
669 if ((rd == rs2) && IsCRs1p(rs1) && IsCRs2p(rs2)) {
670 c_addw(rd, rs2, rs1);
674 EmitRType(F7_0, rs2, rs1, ADD, rd, OP32);
677void MicroAssembler::subw(Register rd, Register rs1, Register rs2) {
679 if (Supports(RV_C)) {
680 if ((rd == rs1) && IsCRs1p(rs1) && IsCRs2p(rs2)) {
681 c_subw(rd, rs1, rs2);
685 EmitRType(SUB, rs2, rs1, ADD, rd, OP32);
688void MicroAssembler::sllw(Register rd, Register rs1, Register rs2) {
690 EmitRType(F7_0, rs2, rs1, SLL, rd, OP32);
693void MicroAssembler::srlw(Register rd, Register rs1, Register rs2) {
695 EmitRType(F7_0, rs2, rs1, SR, rd, OP32);
697void MicroAssembler::sraw(Register rd, Register rs1, Register rs2) {
699 EmitRType(SRA, rs2, rs1, SR, rd, OP32);
703void MicroAssembler::mul(Register rd, Register rs1, Register rs2) {
705 EmitRType(MULDIV, rs2, rs1, MUL, rd, OP);
708void MicroAssembler::mulh(Register rd, Register rs1, Register rs2) {
710 EmitRType(MULDIV, rs2, rs1, MULH, rd, OP);
713void MicroAssembler::mulhsu(Register rd, Register rs1, Register rs2) {
715 EmitRType(MULDIV, rs2, rs1, MULHSU, rd, OP);
718void MicroAssembler::mulhu(Register rd, Register rs1, Register rs2) {
720 EmitRType(MULDIV, rs2, rs1, MULHU, rd, OP);
723void MicroAssembler::div(Register rd, Register rs1, Register rs2) {
725 EmitRType(MULDIV, rs2, rs1, DIV, rd, OP);
728void MicroAssembler::divu(Register rd, Register rs1, Register rs2) {
730 EmitRType(MULDIV, rs2, rs1, DIVU, rd, OP);
733void MicroAssembler::rem(Register rd, Register rs1, Register rs2) {
735 EmitRType(MULDIV, rs2, rs1, REM, rd, OP);
738void MicroAssembler::remu(Register rd, Register rs1, Register rs2) {
740 EmitRType(MULDIV, rs2, rs1, REMU, rd, OP);
744void MicroAssembler::mulw(Register rd, Register rs1, Register rs2) {
746 EmitRType(MULDIV, rs2, rs1, MULW, rd, OP32);
749void MicroAssembler::divw(Register rd, Register rs1, Register rs2) {
751 EmitRType(MULDIV, rs2, rs1, DIVW, rd, OP32);
754void MicroAssembler::divuw(Register rd, Register rs1, Register rs2) {
756 EmitRType(MULDIV, rs2, rs1, DIVUW, rd, OP32);
759void MicroAssembler::remw(Register rd, Register rs1, Register rs2) {
761 EmitRType(MULDIV, rs2, rs1, REMW, rd, OP32);
764void MicroAssembler::remuw(Register rd, Register rs1, Register rs2) {
766 EmitRType(MULDIV, rs2, rs1, REMUW, rd, OP32);
770void MicroAssembler::lrw(Register rd, Address addr, std::memory_order order) {
773 EmitRType(
LR, order, ZR,
addr.base(), WIDTH32, rd, AMO);
775void MicroAssembler::scw(Register rd,
778 std::memory_order order) {
781 EmitRType(SC, order, rs2,
addr.base(), WIDTH32, rd, AMO);
784void MicroAssembler::amoswapw(Register rd,
787 std::memory_order order) {
790 EmitRType(AMOSWAP, order, rs2,
addr.base(), WIDTH32, rd, AMO);
793void MicroAssembler::amoaddw(Register rd,
796 std::memory_order order) {
799 EmitRType(AMOADD, order, rs2,
addr.base(), WIDTH32, rd, AMO);
802void MicroAssembler::amoxorw(Register rd,
805 std::memory_order order) {
808 EmitRType(AMOXOR, order, rs2,
addr.base(), WIDTH32, rd, AMO);
811void MicroAssembler::amoandw(Register rd,
814 std::memory_order order) {
817 EmitRType(AMOAND, order, rs2,
addr.base(), WIDTH32, rd, AMO);
820void MicroAssembler::amoorw(Register rd,
823 std::memory_order order) {
826 EmitRType(AMOOR, order, rs2,
addr.base(), WIDTH32, rd, AMO);
829void MicroAssembler::amominw(Register rd,
832 std::memory_order order) {
835 EmitRType(AMOMIN, order, rs2,
addr.base(), WIDTH32, rd, AMO);
838void MicroAssembler::amomaxw(Register rd,
841 std::memory_order order) {
844 EmitRType(AMOMAX, order, rs2,
addr.base(), WIDTH32, rd, AMO);
847void MicroAssembler::amominuw(Register rd,
850 std::memory_order order) {
853 EmitRType(AMOMINU, order, rs2,
addr.base(), WIDTH32, rd, AMO);
856void MicroAssembler::amomaxuw(Register rd,
859 std::memory_order order) {
862 EmitRType(AMOMAXU, order, rs2,
addr.base(), WIDTH32, rd, AMO);
866void MicroAssembler::lrd(Register rd, Address addr, std::memory_order order) {
869 EmitRType(
LR, order, ZR,
addr.base(), WIDTH64, rd, AMO);
872void MicroAssembler::scd(Register rd,
875 std::memory_order order) {
878 EmitRType(SC, order, rs2,
addr.base(), WIDTH64, rd, AMO);
881void MicroAssembler::amoswapd(Register rd,
884 std::memory_order order) {
887 EmitRType(AMOSWAP, order, rs2,
addr.base(), WIDTH64, rd, AMO);
890void MicroAssembler::amoaddd(Register rd,
893 std::memory_order order) {
896 EmitRType(AMOADD, order, rs2,
addr.base(), WIDTH64, rd, AMO);
899void MicroAssembler::amoxord(Register rd,
902 std::memory_order order) {
905 EmitRType(AMOXOR, order, rs2,
addr.base(), WIDTH64, rd, AMO);
908void MicroAssembler::amoandd(Register rd,
911 std::memory_order order) {
914 EmitRType(AMOAND, order, rs2,
addr.base(), WIDTH64, rd, AMO);
917void MicroAssembler::amoord(Register rd,
920 std::memory_order order) {
923 EmitRType(AMOOR, order, rs2,
addr.base(), WIDTH64, rd, AMO);
926void MicroAssembler::amomind(Register rd,
929 std::memory_order order) {
932 EmitRType(AMOMIN, order, rs2,
addr.base(), WIDTH64, rd, AMO);
935void MicroAssembler::amomaxd(Register rd,
938 std::memory_order order) {
941 EmitRType(AMOMAX, order, rs2,
addr.base(), WIDTH64, rd, AMO);
944void MicroAssembler::amominud(Register rd,
947 std::memory_order order) {
950 EmitRType(AMOMINU, order, rs2,
addr.base(), WIDTH64, rd, AMO);
953void MicroAssembler::amomaxud(Register rd,
956 std::memory_order order) {
959 EmitRType(AMOMAXU, order, rs2,
addr.base(), WIDTH64, rd, AMO);
963void MicroAssembler::flw(FRegister rd, Address addr) {
966 if (Supports(RV_C)) {
977 EmitIType(
addr.offset(),
addr.base(), S, rd, LOADFP);
980void MicroAssembler::fsw(FRegister rs2, Address addr) {
983 if (Supports(RV_C)) {
994 EmitSType(
addr.offset(), rs2,
addr.base(), S, STOREFP);
997void MicroAssembler::fmadds(FRegister rd,
1001 RoundingMode rounding) {
1003 EmitR4Type(rs3, F2_S, rs2, rs1, rounding, rd, FMADD);
1006void MicroAssembler::fmsubs(FRegister rd,
1010 RoundingMode rounding) {
1012 EmitR4Type(rs3, F2_S, rs2, rs1, rounding, rd, FMSUB);
1015void MicroAssembler::fnmsubs(FRegister rd,
1019 RoundingMode rounding) {
1021 EmitR4Type(rs3, F2_S, rs2, rs1, rounding, rd, FNMSUB);
1024void MicroAssembler::fnmadds(FRegister rd,
1028 RoundingMode rounding) {
1030 EmitR4Type(rs3, F2_S, rs2, rs1, rounding, rd, FNMADD);
1033void MicroAssembler::fadds(FRegister rd,
1036 RoundingMode rounding) {
1038 EmitRType(FADDS, rs2, rs1, rounding, rd, OPFP);
1041void MicroAssembler::fsubs(FRegister rd,
1044 RoundingMode rounding) {
1046 EmitRType(FSUBS, rs2, rs1, rounding, rd, OPFP);
1049void MicroAssembler::fmuls(FRegister rd,
1052 RoundingMode rounding) {
1054 EmitRType(FMULS, rs2, rs1, rounding, rd, OPFP);
1057void MicroAssembler::fdivs(FRegister rd,
1060 RoundingMode rounding) {
1062 EmitRType(FDIVS, rs2, rs1, rounding, rd, OPFP);
1065void MicroAssembler::fsqrts(FRegister rd,
1067 RoundingMode rounding) {
1069 EmitRType(FSQRTS,
FRegister(0), rs1, rounding, rd, OPFP);
1072void MicroAssembler::fsgnjs(FRegister rd, FRegister rs1, FRegister rs2) {
1074 EmitRType(FSGNJS, rs2, rs1,
J, rd, OPFP);
1077void MicroAssembler::fsgnjns(FRegister rd, FRegister rs1, FRegister rs2) {
1079 EmitRType(FSGNJS, rs2, rs1, JN, rd, OPFP);
1082void MicroAssembler::fsgnjxs(FRegister rd, FRegister rs1, FRegister rs2) {
1084 EmitRType(FSGNJS, rs2, rs1, JX, rd, OPFP);
1087void MicroAssembler::fmins(FRegister rd, FRegister rs1, FRegister rs2) {
1089 EmitRType(FMINMAXS, rs2, rs1, FMIN, rd, OPFP);
1092void MicroAssembler::fmaxs(FRegister rd, FRegister rs1, FRegister rs2) {
1094 EmitRType(FMINMAXS, rs2, rs1, FMAX, rd, OPFP);
1097void MicroAssembler::feqs(Register rd, FRegister rs1, FRegister rs2) {
1099 EmitRType(FCMPS, rs2, rs1, FEQ, rd, OPFP);
1102void MicroAssembler::flts(Register rd, FRegister rs1, FRegister rs2) {
1104 EmitRType(FCMPS, rs2, rs1, FLT, rd, OPFP);
1107void MicroAssembler::fles(Register rd, FRegister rs1, FRegister rs2) {
1109 EmitRType(FCMPS, rs2, rs1, FLE, rd, OPFP);
1112void MicroAssembler::fclasss(Register rd, FRegister rs1) {
1114 EmitRType(FCLASSS,
FRegister(0), rs1, F3_1, rd, OPFP);
1117void MicroAssembler::fcvtws(Register rd, FRegister rs1, RoundingMode rounding) {
1119 EmitRType(FCVTintS,
FRegister(
W), rs1, rounding, rd, OPFP);
1122void MicroAssembler::fcvtwus(Register rd,
1124 RoundingMode rounding) {
1126 EmitRType(FCVTintS,
FRegister(WU), rs1, rounding, rd, OPFP);
1129void MicroAssembler::fcvtsw(FRegister rd, Register rs1, RoundingMode rounding) {
1131 EmitRType(FCVTSint,
FRegister(
W), rs1, rounding, rd, OPFP);
1134void MicroAssembler::fcvtswu(FRegister rd,
1136 RoundingMode rounding) {
1138 EmitRType(FCVTSint,
FRegister(WU), rs1, rounding, rd, OPFP);
1141void MicroAssembler::fmvxw(Register rd, FRegister rs1) {
1143 EmitRType(FMVXW,
FRegister(0), rs1, F3_0, rd, OPFP);
1146void MicroAssembler::fmvwx(FRegister rd, Register rs1) {
1148 EmitRType(FMVWX,
FRegister(0), rs1, F3_0, rd, OPFP);
1152void MicroAssembler::fcvtls(Register rd, FRegister rs1, RoundingMode rounding) {
1154 EmitRType(FCVTintS,
FRegister(L), rs1, rounding, rd, OPFP);
1157void MicroAssembler::fcvtlus(Register rd,
1159 RoundingMode rounding) {
1161 EmitRType(FCVTintS,
FRegister(LU), rs1, rounding, rd, OPFP);
1164void MicroAssembler::fcvtsl(FRegister rd, Register rs1, RoundingMode rounding) {
1166 EmitRType(FCVTSint,
FRegister(L), rs1, rounding, rd, OPFP);
1169void MicroAssembler::fcvtslu(FRegister rd,
1171 RoundingMode rounding) {
1173 EmitRType(FCVTSint,
FRegister(LU), rs1, rounding, rd, OPFP);
1177void MicroAssembler::fld(FRegister rd, Address addr) {
1179 if (Supports(RV_C)) {
1189 EmitIType(
addr.offset(),
addr.base(),
D, rd, LOADFP);
1192void MicroAssembler::fsd(FRegister rs2, Address addr) {
1194 if (Supports(RV_C)) {
1204 EmitSType(
addr.offset(), rs2,
addr.base(),
D, STOREFP);
1207void MicroAssembler::fmaddd(FRegister rd,
1211 RoundingMode rounding) {
1213 EmitR4Type(rs3, F2_D, rs2, rs1, rounding, rd, FMADD);
1216void MicroAssembler::fmsubd(FRegister rd,
1220 RoundingMode rounding) {
1222 EmitR4Type(rs3, F2_D, rs2, rs1, rounding, rd, FMSUB);
1225void MicroAssembler::fnmsubd(FRegister rd,
1229 RoundingMode rounding) {
1231 EmitR4Type(rs3, F2_D, rs2, rs1, rounding, rd, FNMSUB);
1234void MicroAssembler::fnmaddd(FRegister rd,
1238 RoundingMode rounding) {
1240 EmitR4Type(rs3, F2_D, rs2, rs1, rounding, rd, FNMADD);
1243void MicroAssembler::faddd(FRegister rd,
1246 RoundingMode rounding) {
1248 EmitRType(FADDD, rs2, rs1, rounding, rd, OPFP);
1251void MicroAssembler::fsubd(FRegister rd,
1254 RoundingMode rounding) {
1256 EmitRType(FSUBD, rs2, rs1, rounding, rd, OPFP);
1259void MicroAssembler::fmuld(FRegister rd,
1262 RoundingMode rounding) {
1264 EmitRType(FMULD, rs2, rs1, rounding, rd, OPFP);
1267void MicroAssembler::fdivd(FRegister rd,
1270 RoundingMode rounding) {
1272 EmitRType(FDIVD, rs2, rs1, rounding, rd, OPFP);
1275void MicroAssembler::fsqrtd(FRegister rd,
1277 RoundingMode rounding) {
1279 EmitRType(FSQRTD,
FRegister(0), rs1, rounding, rd, OPFP);
1282void MicroAssembler::fsgnjd(FRegister rd, FRegister rs1, FRegister rs2) {
1284 EmitRType(FSGNJD, rs2, rs1,
J, rd, OPFP);
1287void MicroAssembler::fsgnjnd(FRegister rd, FRegister rs1, FRegister rs2) {
1289 EmitRType(FSGNJD, rs2, rs1, JN, rd, OPFP);
1292void MicroAssembler::fsgnjxd(FRegister rd, FRegister rs1, FRegister rs2) {
1294 EmitRType(FSGNJD, rs2, rs1, JX, rd, OPFP);
1297void MicroAssembler::fmind(FRegister rd, FRegister rs1, FRegister rs2) {
1299 EmitRType(FMINMAXD, rs2, rs1, FMIN, rd, OPFP);
1302void MicroAssembler::fmaxd(FRegister rd, FRegister rs1, FRegister rs2) {
1304 EmitRType(FMINMAXD, rs2, rs1, FMAX, rd, OPFP);
1307void MicroAssembler::fcvtsd(FRegister rd,
1309 RoundingMode rounding) {
1311 EmitRType(FCVTS,
FRegister(1), rs1, rounding, rd, OPFP);
1314void MicroAssembler::fcvtds(FRegister rd,
1316 RoundingMode rounding) {
1318 EmitRType(FCVTD,
FRegister(0), rs1, rounding, rd, OPFP);
1321void MicroAssembler::feqd(Register rd, FRegister rs1, FRegister rs2) {
1323 EmitRType(FCMPD, rs2, rs1, FEQ, rd, OPFP);
1326void MicroAssembler::fltd(Register rd, FRegister rs1, FRegister rs2) {
1328 EmitRType(FCMPD, rs2, rs1, FLT, rd, OPFP);
1331void MicroAssembler::fled(Register rd, FRegister rs1, FRegister rs2) {
1333 EmitRType(FCMPD, rs2, rs1, FLE, rd, OPFP);
1336void MicroAssembler::fclassd(Register rd, FRegister rs1) {
1338 EmitRType(FCLASSD,
FRegister(0), rs1, F3_1, rd, OPFP);
1341void MicroAssembler::fcvtwd(Register rd, FRegister rs1, RoundingMode rounding) {
1343 EmitRType(FCVTintD,
FRegister(
W), rs1, rounding, rd, OPFP);
1346void MicroAssembler::fcvtwud(Register rd,
1348 RoundingMode rounding) {
1350 EmitRType(FCVTintD,
FRegister(WU), rs1, rounding, rd, OPFP);
1353void MicroAssembler::fcvtdw(FRegister rd, Register rs1, RoundingMode rounding) {
1355 EmitRType(FCVTDint,
FRegister(
W), rs1, rounding, rd, OPFP);
1358void MicroAssembler::fcvtdwu(FRegister rd,
1360 RoundingMode rounding) {
1362 EmitRType(FCVTDint,
FRegister(WU), rs1, rounding, rd, OPFP);
1366void MicroAssembler::fcvtld(Register rd, FRegister rs1, RoundingMode rounding) {
1368 EmitRType(FCVTintD,
FRegister(L), rs1, rounding, rd, OPFP);
1371void MicroAssembler::fcvtlud(Register rd,
1373 RoundingMode rounding) {
1375 EmitRType(FCVTintD,
FRegister(LU), rs1, rounding, rd, OPFP);
1378void MicroAssembler::fmvxd(Register rd, FRegister rs1) {
1380 EmitRType(FMVXD,
FRegister(0), rs1, F3_0, rd, OPFP);
1383void MicroAssembler::fcvtdl(FRegister rd, Register rs1, RoundingMode rounding) {
1385 EmitRType(FCVTDint,
FRegister(L), rs1, rounding, rd, OPFP);
1388void MicroAssembler::fcvtdlu(FRegister rd,
1390 RoundingMode rounding) {
1392 EmitRType(FCVTDint,
FRegister(LU), rs1, rounding, rd, OPFP);
1395void MicroAssembler::fmvdx(FRegister rd, Register rs1) {
1397 EmitRType(FMVDX,
FRegister(0), rs1, F3_0, rd, OPFP);
1402void MicroAssembler::adduw(Register rd, Register rs1, Register rs2) {
1403 ASSERT(Supports(RV_Zba));
1404 EmitRType(ADDUW, rs2, rs1, F3_0, rd, OP32);
1408void MicroAssembler::sh1add(Register rd, Register rs1, Register rs2) {
1409 ASSERT(Supports(RV_Zba));
1410 EmitRType(SHADD, rs2, rs1, SH1ADD, rd, OP);
1414void MicroAssembler::sh1adduw(Register rd, Register rs1, Register rs2) {
1415 ASSERT(Supports(RV_Zba));
1416 EmitRType(SHADD, rs2, rs1, SH1ADD, rd, OP32);
1420void MicroAssembler::sh2add(Register rd, Register rs1, Register rs2) {
1421 ASSERT(Supports(RV_Zba));
1422 EmitRType(SHADD, rs2, rs1, SH2ADD, rd, OP);
1426void MicroAssembler::sh2adduw(Register rd, Register rs1, Register rs2) {
1427 ASSERT(Supports(RV_Zba));
1428 EmitRType(SHADD, rs2, rs1, SH2ADD, rd, OP32);
1432void MicroAssembler::sh3add(Register rd, Register rs1, Register rs2) {
1433 ASSERT(Supports(RV_Zba));
1434 EmitRType(SHADD, rs2, rs1, SH3ADD, rd, OP);
1438void MicroAssembler::sh3adduw(Register rd, Register rs1, Register rs2) {
1439 ASSERT(Supports(RV_Zba));
1440 EmitRType(SHADD, rs2, rs1, SH3ADD, rd, OP32);
1443void MicroAssembler::slliuw(Register rd, Register rs1, intx_t shamt) {
1444 ASSERT((shamt > 0) && (shamt < 32));
1445 ASSERT(Supports(RV_Zba));
1446 EmitRType(SLLIUW, shamt, rs1, SLLI, rd, OPIMM32);
1450void MicroAssembler::andn(Register rd, Register rs1, Register rs2) {
1451 ASSERT(Supports(RV_Zbb));
1452 EmitRType(SUB, rs2, rs1, AND, rd, OP);
1455void MicroAssembler::orn(Register rd, Register rs1, Register rs2) {
1456 ASSERT(Supports(RV_Zbb));
1457 EmitRType(SUB, rs2, rs1, OR, rd, OP);
1460void MicroAssembler::xnor(Register rd, Register rs1, Register rs2) {
1461 ASSERT(Supports(RV_Zbb));
1462 EmitRType(SUB, rs2, rs1, XOR, rd, OP);
1465void MicroAssembler::clz(Register rd, Register rs1) {
1466 ASSERT(Supports(RV_Zbb));
1467 EmitRType(
COUNT, 0b00000, rs1, F3_COUNT, rd, OPIMM);
1470void MicroAssembler::clzw(Register rd, Register rs1) {
1471 ASSERT(Supports(RV_Zbb));
1472 EmitRType(
COUNT, 0b00000, rs1, F3_COUNT, rd, OPIMM32);
1475void MicroAssembler::ctz(Register rd, Register rs1) {
1476 ASSERT(Supports(RV_Zbb));
1477 EmitRType(
COUNT, 0b00001, rs1, F3_COUNT, rd, OPIMM);
1480void MicroAssembler::ctzw(Register rd, Register rs1) {
1481 ASSERT(Supports(RV_Zbb));
1482 EmitRType(
COUNT, 0b00001, rs1, F3_COUNT, rd, OPIMM32);
1485void MicroAssembler::cpop(Register rd, Register rs1) {
1486 ASSERT(Supports(RV_Zbb));
1487 EmitRType(
COUNT, 0b00010, rs1, F3_COUNT, rd, OPIMM);
1490void MicroAssembler::cpopw(Register rd, Register rs1) {
1491 ASSERT(Supports(RV_Zbb));
1492 EmitRType(
COUNT, 0b00010, rs1, F3_COUNT, rd, OPIMM32);
1495void MicroAssembler::max(Register rd, Register rs1, Register rs2) {
1496 ASSERT(Supports(RV_Zbb));
1497 EmitRType(MINMAXCLMUL, rs2, rs1, MAX, rd, OP);
1500void MicroAssembler::maxu(Register rd, Register rs1, Register rs2) {
1501 ASSERT(Supports(RV_Zbb));
1502 EmitRType(MINMAXCLMUL, rs2, rs1, MAXU, rd, OP);
1505void MicroAssembler::min(Register rd, Register rs1, Register rs2) {
1506 ASSERT(Supports(RV_Zbb));
1507 EmitRType(MINMAXCLMUL, rs2, rs1, MIN, rd, OP);
1510void MicroAssembler::minu(Register rd, Register rs1, Register rs2) {
1511 ASSERT(Supports(RV_Zbb));
1512 EmitRType(MINMAXCLMUL, rs2, rs1, MINU, rd, OP);
1515void MicroAssembler::sextb(Register rd, Register rs1) {
1516 ASSERT(Supports(RV_Zbb));
1517 EmitRType((Funct7)0b0110000, 0b00100, rs1, SEXT, rd, OPIMM);
1520void MicroAssembler::sexth(Register rd, Register rs1) {
1521 ASSERT(Supports(RV_Zbb));
1522 EmitRType((Funct7)0b0110000, 0b00101, rs1, SEXT, rd, OPIMM);
1525void MicroAssembler::zexth(Register rd, Register rs1) {
1526 ASSERT(Supports(RV_Zbb));
1528 EmitRType((Funct7)0b0000100, 0b00000, rs1, ZEXT, rd, OP);
1530 EmitRType((Funct7)0b0000100, 0b00000, rs1, ZEXT, rd, OP32);
1536void MicroAssembler::rol(Register rd, Register rs1, Register rs2) {
1537 ASSERT(Supports(RV_Zbb));
1538 EmitRType(
ROTATE, rs2, rs1, ROL, rd, OP);
1541void MicroAssembler::rolw(Register rd, Register rs1, Register rs2) {
1542 ASSERT(Supports(RV_Zbb));
1543 EmitRType(
ROTATE, rs2, rs1, ROL, rd, OP32);
1546void MicroAssembler::ror(Register rd, Register rs1, Register rs2) {
1547 ASSERT(Supports(RV_Zbb));
1548 EmitRType(
ROTATE, rs2, rs1, ROR, rd, OP);
1551void MicroAssembler::rori(Register rd, Register rs1, intx_t shamt) {
1552 ASSERT(Supports(RV_Zbb));
1553 EmitRType(
ROTATE, shamt, rs1, ROR, rd, OPIMM);
1556void MicroAssembler::roriw(Register rd, Register rs1, intx_t shamt) {
1557 ASSERT(Supports(RV_Zbb));
1558 EmitRType(
ROTATE, shamt, rs1, ROR, rd, OPIMM32);
1561void MicroAssembler::rorw(Register rd, Register rs1, Register rs2) {
1562 ASSERT(Supports(RV_Zbb));
1563 EmitRType(
ROTATE, rs2, rs1, ROR, rd, OP32);
1566void MicroAssembler::orcb(Register rd, Register rs1) {
1567 ASSERT(Supports(RV_Zbb));
1568 EmitRType((Funct7)0b0010100, 0b00111, rs1, (Funct3)0b101, rd, OPIMM);
1571void MicroAssembler::rev8(Register rd, Register rs1) {
1572 ASSERT(Supports(RV_Zbb));
1574 EmitRType((Funct7)0b0110100, 0b11000, rs1, (Funct3)0b101, rd, OPIMM);
1576 EmitRType((Funct7)0b0110101, 0b11000, rs1, (Funct3)0b101, rd, OPIMM);
1582void MicroAssembler::clmul(Register rd, Register rs1, Register rs2) {
1583 ASSERT(Supports(RV_Zbc));
1584 EmitRType(MINMAXCLMUL, rs2, rs1, CLMUL, rd, OP);
1587void MicroAssembler::clmulh(Register rd, Register rs1, Register rs2) {
1588 ASSERT(Supports(RV_Zbc));
1589 EmitRType(MINMAXCLMUL, rs2, rs1, CLMULH, rd, OP);
1592void MicroAssembler::clmulr(Register rd, Register rs1, Register rs2) {
1593 ASSERT(Supports(RV_Zbc));
1594 EmitRType(MINMAXCLMUL, rs2, rs1, CLMULR, rd, OP);
1597void MicroAssembler::bclr(Register rd, Register rs1, Register rs2) {
1598 ASSERT(Supports(RV_Zbs));
1599 EmitRType(BCLRBEXT, rs2, rs1, BCLR, rd, OP);
1602void MicroAssembler::bclri(Register rd, Register rs1, intx_t shamt) {
1603 ASSERT(Supports(RV_Zbs));
1604 EmitRType(BCLRBEXT, shamt, rs1, BCLR, rd, OPIMM);
1607void MicroAssembler::bext(Register rd, Register rs1, Register rs2) {
1608 ASSERT(Supports(RV_Zbs));
1609 EmitRType(BCLRBEXT, rs2, rs1, BEXT, rd, OP);
1612void MicroAssembler::bexti(Register rd, Register rs1, intx_t shamt) {
1613 ASSERT(Supports(RV_Zbs));
1614 EmitRType(BCLRBEXT, shamt, rs1, BEXT, rd, OPIMM);
1617void MicroAssembler::binv(Register rd, Register rs1, Register rs2) {
1618 ASSERT(Supports(RV_Zbs));
1619 EmitRType(BINV, rs2, rs1, F3_BINV, rd, OP);
1622void MicroAssembler::binvi(Register rd, Register rs1, intx_t shamt) {
1623 ASSERT(Supports(RV_Zbs));
1624 EmitRType(BINV, shamt, rs1, F3_BINV, rd, OPIMM);
1627void MicroAssembler::bset(Register rd, Register rs1, Register rs2) {
1628 ASSERT(Supports(RV_Zbs));
1629 EmitRType(BSET, rs2, rs1, F3_BSET, rd, OP);
1632void MicroAssembler::bseti(Register rd, Register rs1, intx_t shamt) {
1633 ASSERT(Supports(RV_Zbs));
1634 EmitRType(BSET, shamt, rs1, F3_BSET, rd, OPIMM);
1637void MicroAssembler::c_lwsp(Register rd, Address addr) {
1645void MicroAssembler::c_flwsp(FRegister rd, Address addr) {
1652void MicroAssembler::c_ldsp(Register rd, Address addr) {
1660void MicroAssembler::c_fldsp(FRegister rd, Address addr) {
1667void MicroAssembler::c_swsp(Register rs2, Address addr) {
1674void MicroAssembler::c_fswsp(FRegister rs2, Address addr) {
1681void MicroAssembler::c_sdsp(Register rs2, Address addr) {
1687void MicroAssembler::c_fsdsp(FRegister rs2, Address addr) {
1694void MicroAssembler::c_lw(Register rd, Address addr) {
1696 Emit16(C_LW | EncodeCRdp(rd) | EncodeCRs1p(
addr.base()) |
1700void MicroAssembler::c_ld(Register rd, Address addr) {
1702 Emit16(C_LD | EncodeCRdp(rd) | EncodeCRs1p(
addr.base()) |
1706void MicroAssembler::c_flw(FRegister rd, Address addr) {
1709 Emit16(C_FLW | EncodeCFRdp(rd) | EncodeCRs1p(
addr.base()) |
1713void MicroAssembler::c_fld(FRegister rd, Address addr) {
1716 Emit16(C_FLD | EncodeCFRdp(rd) | EncodeCRs1p(
addr.base()) |
1720void MicroAssembler::c_sw(Register rs2, Address addr) {
1722 Emit16(C_SW | EncodeCRs1p(
addr.base()) | EncodeCRs2p(rs2) |
1726void MicroAssembler::c_sd(Register rs2, Address addr) {
1728 Emit16(C_SD | EncodeCRs1p(
addr.base()) | EncodeCRs2p(rs2) |
1732void MicroAssembler::c_fsw(FRegister rs2, Address addr) {
1735 Emit16(C_FSW | EncodeCRs1p(
addr.base()) | EncodeCFRs2p(rs2) |
1739void MicroAssembler::c_fsd(FRegister rs2, Address addr) {
1742 Emit16(C_FSD | EncodeCRs1p(
addr.base()) | EncodeCFRs2p(rs2) |
1746void MicroAssembler::c_j(Label* label) {
1748 EmitCJump(label, C_J);
1752void MicroAssembler::c_jal(Label* label) {
1754 EmitCJump(label, C_JAL);
1758void MicroAssembler::c_jr(Register rs1) {
1761 Emit16(C_JR | EncodeCRs1(rs1) | EncodeCRs2(ZR));
1764void MicroAssembler::c_jalr(Register rs1) {
1766 Emit16(C_JALR | EncodeCRs1(rs1) | EncodeCRs2(ZR));
1769void MicroAssembler::c_beqz(Register rs1p, Label* label) {
1771 EmitCBranch(rs1p, label, C_BEQZ);
1774void MicroAssembler::c_bnez(Register rs1p, Label* label) {
1776 EmitCBranch(rs1p, label, C_BNEZ);
1779void MicroAssembler::c_li(Register rd, intptr_t imm) {
1785void MicroAssembler::c_lui(Register rd, uintptr_t imm) {
1792void MicroAssembler::c_addi(Register rd, Register rs1, intptr_t imm) {
1796 Emit16(C_ADDI | EncodeCRd(rd) |
EncodeCIImm(imm));
1800void MicroAssembler::c_addiw(Register rd, Register rs1, intptr_t imm) {
1803 Emit16(C_ADDIW | EncodeCRd(rd) |
EncodeCIImm(imm));
1806void MicroAssembler::c_addi16sp(Register rd, Register rs1, intptr_t imm) {
1812void MicroAssembler::c_addi4spn(Register rdp, Register rs1, intptr_t imm) {
1819void MicroAssembler::c_slli(Register rd, Register rs1, intptr_t imm) {
1823 Emit16(C_SLLI | EncodeCRd(rd) |
EncodeCIImm(imm));
1826void MicroAssembler::c_srli(Register rd, Register rs1, intptr_t imm) {
1830 Emit16(C_SRLI | EncodeCRs1p(rd) |
EncodeCIImm(imm));
1833void MicroAssembler::c_srai(Register rd, Register rs1, intptr_t imm) {
1837 Emit16(C_SRAI | EncodeCRs1p(rd) |
EncodeCIImm(imm));
1840void MicroAssembler::c_andi(Register rd, Register rs1, intptr_t imm) {
1843 Emit16(C_ANDI | EncodeCRs1p(rd) |
EncodeCIImm(imm));
1846void MicroAssembler::c_mv(Register rd, Register rs2) {
1850 Emit16(C_MV | EncodeCRd(rd) | EncodeCRs2(rs2));
1853void MicroAssembler::c_add(Register rd, Register rs1, Register rs2) {
1858 Emit16(C_ADD | EncodeCRd(rd) | EncodeCRs2(rs2));
1861void MicroAssembler::c_and(Register rd, Register rs1, Register rs2) {
1864 Emit16(C_AND | EncodeCRs1p(rs1) | EncodeCRs2p(rs2));
1867void MicroAssembler::c_or(Register rd, Register rs1, Register rs2) {
1869 Emit16(C_OR | EncodeCRs1p(rs1) | EncodeCRs2p(rs2));
1872void MicroAssembler::c_xor(Register rd, Register rs1, Register rs2) {
1874 Emit16(C_XOR | EncodeCRs1p(rs1) | EncodeCRs2p(rs2));
1877void MicroAssembler::c_sub(Register rd, Register rs1, Register rs2) {
1879 Emit16(C_SUB | EncodeCRs1p(rs1) | EncodeCRs2p(rs2));
1883void MicroAssembler::c_addw(Register rd, Register rs1, Register rs2) {
1885 Emit16(C_ADDW | EncodeCRs1p(rs1) | EncodeCRs2p(rs2));
1888void MicroAssembler::c_subw(Register rd, Register rs1, Register rs2) {
1890 Emit16(C_SUBW | EncodeCRs1p(rs1) | EncodeCRs2p(rs2));
1894void MicroAssembler::c_nop() {
1899void MicroAssembler::c_ebreak() {
1904static Funct3 InvertFunct3(Funct3 func) {
1923void MicroAssembler::EmitBranch(Register rs1,
1927 JumpDistance distance) {
1929 if (label->IsBound()) {
1931 offset = label->Position() - Position();
1933 EmitBType(
offset, rs2, rs1, func, BRANCH);
1938 intptr_t
start = Position();
1939 const intptr_t kFarBranchLength = 8;
1940 EmitBType(kFarBranchLength, rs2, rs1, InvertFunct3(func), BRANCH);
1941 offset = label->Position() - Position();
1942 EmitJType(
offset, ZR, JAL);
1943 intptr_t
end = Position();
1948 intptr_t
start = Position();
1949 const intptr_t kFarBranchLength = 12;
1950 EmitBType(kFarBranchLength, rs2, rs1, InvertFunct3(func), BRANCH);
1951 offset = label->Position() - Position();
1955 FATAL(
"Branch distance exceeds 2GB!");
1957 EmitUType(hi, FAR_TMP, AUIPC);
1958 EmitIType(lo, FAR_TMP, F3_0, ZR, JALR);
1959 intptr_t
end = Position();
1965 if (distance == kNearJump) {
1966 offset = label->link_b(Position());
1968 FATAL(
"Incorrect Assembler::kNearJump");
1970 EmitBType(
offset, rs2, rs1, func, BRANCH);
1971 }
else if (far_branch_level() == 0) {
1972 offset = label->link_b(Position());
1980 EmitBType(
offset, rs2, rs1, func, BRANCH);
1981 }
else if (far_branch_level() == 1) {
1982 intptr_t
start = Position();
1983 const intptr_t kFarBranchLength = 8;
1984 EmitBType(kFarBranchLength, rs2, rs1, InvertFunct3(func), BRANCH);
1985 offset = label->link_j(Position());
1986 EmitJType(
offset, ZR, JAL);
1987 intptr_t
end = Position();
1990 intptr_t
start = Position();
1991 const intptr_t kFarBranchLength = 12;
1992 EmitBType(kFarBranchLength, rs2, rs1, InvertFunct3(func), BRANCH);
1993 offset = label->link_far(Position());
1997 FATAL(
"Branch distance exceeds 2GB!");
1999 EmitUType(hi, FAR_TMP, AUIPC);
2000 EmitIType(lo, FAR_TMP, F3_0, ZR, JALR);
2001 intptr_t
end = Position();
2007void MicroAssembler::EmitJump(Register rd,
2010 JumpDistance distance) {
2012 if (label->IsBound()) {
2014 offset = label->Position() - Position();
2017 EmitJType(
offset, rd, JAL);
2023 FATAL(
"Jump distance exceeds 2GB!");
2025 EmitUType(hi, FAR_TMP, AUIPC);
2026 EmitIType(lo, FAR_TMP, F3_0, ZR, JALR);
2031 if (distance == kNearJump) {
2032 offset = label->link_j(Position());
2034 FATAL(
"Incorrect Assembler::kNearJump");
2036 EmitJType(
offset, rd, JAL);
2037 }
else if (far_branch_level() < 2) {
2038 offset = label->link_j(Position());
2042 EmitJType(
offset, rd, JAL);
2044 offset = label->link_far(Position());
2048 FATAL(
"Jump distance exceeds 2GB!");
2050 EmitUType(hi, FAR_TMP, AUIPC);
2051 EmitIType(lo, FAR_TMP, F3_0, ZR, JALR);
2056void MicroAssembler::EmitCBranch(Register rs1p, Label* label, COpcode op) {
2058 if (label->IsBound()) {
2059 offset = label->Position() - Position();
2061 offset = label->link_cb(Position());
2064 FATAL(
"Incorrect Assembler::kNearJump");
2069void MicroAssembler::EmitCJump(Label* label, COpcode op) {
2071 if (label->IsBound()) {
2072 offset = label->Position() - Position();
2074 offset = label->link_cj(Position());
2077 FATAL(
"Incorrect Assembler::kNearJump");
2082void MicroAssembler::EmitRType(Funct5 funct5,
2083 std::memory_order order,
2089 intptr_t funct7 = funct5 << 2;
2091 case std::memory_order_acq_rel:
2094 case std::memory_order_acquire:
2097 case std::memory_order_release:
2100 case std::memory_order_relaxed:
2104 FATAL(
"Invalid memory order");
2106 EmitRType((Funct7)funct7, rs2, rs1, funct3, rd, opcode);
2109void MicroAssembler::EmitRType(Funct7 funct7,
2116 e |= EncodeFunct7(funct7);
2117 e |= EncodeRs2(rs2);
2118 e |= EncodeRs1(rs1);
2119 e |= EncodeFunct3(funct3);
2121 e |= EncodeOpcode(opcode);
2125void MicroAssembler::EmitRType(Funct7 funct7,
2132 e |= EncodeFunct7(funct7);
2133 e |= EncodeFRs2(rs2);
2134 e |= EncodeFRs1(rs1);
2135 e |= EncodeFunct3(funct3);
2137 e |= EncodeOpcode(opcode);
2141void MicroAssembler::EmitRType(Funct7 funct7,
2148 e |= EncodeFunct7(funct7);
2149 e |= EncodeFRs2(rs2);
2150 e |= EncodeFRs1(rs1);
2151 e |= EncodeRoundingMode(
round);
2153 e |= EncodeOpcode(opcode);
2157void MicroAssembler::EmitRType(Funct7 funct7,
2164 e |= EncodeFunct7(funct7);
2165 e |= EncodeFRs2(rs2);
2166 e |= EncodeRs1(rs1);
2167 e |= EncodeRoundingMode(
round);
2169 e |= EncodeOpcode(opcode);
2173void MicroAssembler::EmitRType(Funct7 funct7,
2180 e |= EncodeFunct7(funct7);
2181 e |= EncodeFRs2(rs2);
2182 e |= EncodeRs1(rs1);
2183 e |= EncodeFunct3(funct3);
2185 e |= EncodeOpcode(opcode);
2189void MicroAssembler::EmitRType(Funct7 funct7,
2196 e |= EncodeFunct7(funct7);
2197 e |= EncodeFRs2(rs2);
2198 e |= EncodeFRs1(rs1);
2199 e |= EncodeFunct3(funct3);
2201 e |= EncodeOpcode(opcode);
2205void MicroAssembler::EmitRType(Funct7 funct7,
2212 e |= EncodeFunct7(funct7);
2213 e |= EncodeFRs2(rs2);
2214 e |= EncodeFRs1(rs1);
2215 e |= EncodeRoundingMode(
round);
2217 e |= EncodeOpcode(opcode);
2221void MicroAssembler::EmitRType(Funct7 funct7,
2228 e |= EncodeFunct7(funct7);
2229 e |= EncodeShamt(shamt);
2230 e |= EncodeRs1(rs1);
2231 e |= EncodeFunct3(funct3);
2233 e |= EncodeOpcode(opcode);
2237void MicroAssembler::EmitR4Type(FRegister rs3,
2245 e |= EncodeFRs3(rs3);
2246 e |= EncodeFunct2(funct2);
2247 e |= EncodeFRs2(rs2);
2248 e |= EncodeFRs1(rs1);
2249 e |= EncodeRoundingMode(
round);
2251 e |= EncodeOpcode(opcode);
2255void MicroAssembler::EmitIType(intptr_t imm,
2262 e |= EncodeRs1(rs1);
2263 e |= EncodeFunct3(funct3);
2265 e |= EncodeOpcode(opcode);
2269void MicroAssembler::EmitIType(intptr_t imm,
2276 e |= EncodeRs1(rs1);
2277 e |= EncodeFunct3(funct3);
2279 e |= EncodeOpcode(opcode);
2283void MicroAssembler::EmitSType(intptr_t imm,
2290 e |= EncodeRs2(rs2);
2291 e |= EncodeRs1(rs1);
2292 e |= EncodeFunct3(funct3);
2293 e |= EncodeOpcode(opcode);
2297void MicroAssembler::EmitSType(intptr_t imm,
2304 e |= EncodeFRs2(rs2);
2305 e |= EncodeRs1(rs1);
2306 e |= EncodeFunct3(funct3);
2307 e |= EncodeOpcode(opcode);
2311void MicroAssembler::EmitBType(intptr_t imm,
2318 e |= EncodeRs2(rs2);
2319 e |= EncodeRs1(rs1);
2320 e |= EncodeFunct3(funct3);
2321 e |= EncodeOpcode(opcode);
2325void MicroAssembler::EmitUType(intptr_t imm, Register rd, Opcode opcode) {
2329 e |= EncodeOpcode(opcode);
2333void MicroAssembler::EmitJType(intptr_t imm, Register rd, Opcode opcode) {
2337 e |= EncodeOpcode(opcode);
2341Assembler::Assembler(ObjectPoolBuilder* object_pool_builder,
2342 intptr_t far_branch_level)
2343 : MicroAssembler(object_pool_builder,
2345 FLAG_use_compressed_instructions ? RV_GC : RV_G),
2346 constant_pool_allowed_(
false) {
2347 generate_invoke_write_barrier_wrapper_ = [&](
Register reg) {
2350 Address(THR, target::Thread::write_barrier_wrappers_thread_offset(reg)));
2353 generate_invoke_array_write_barrier_ = [&]() {
2355 Address(THR, target::Thread::array_write_barrier_entry_point_offset()));
2359void Assembler::PushRegister(Register r) {
2361 subi(SP, SP, target::kWordSize);
2362 sx(r, Address(SP, 0));
2364void Assembler::PopRegister(Register r) {
2366 lx(r, Address(SP, 0));
2367 addi(SP, SP, target::kWordSize);
2370void Assembler::PushRegisterPair(Register r0, Register r1) {
2373 subi(SP, SP, 2 * target::kWordSize);
2374 sx(r1, Address(SP, target::kWordSize));
2375 sx(r0, Address(SP, 0));
2378void Assembler::PopRegisterPair(Register r0, Register r1) {
2381 lx(r1, Address(SP, target::kWordSize));
2382 lx(r0, Address(SP, 0));
2383 addi(SP, SP, 2 * target::kWordSize);
2386void Assembler::PushRegisters(
const RegisterSet& regs) {
2390 intptr_t
size = (regs.CpuRegisterCount() * target::kWordSize) +
2398 for (intptr_t i = kNumberOfFpuRegisters - 1; i >= 0; i--) {
2400 if (regs.ContainsFpuRegister(reg)) {
2402 fsd(reg, Address(SP,
offset));
2405 for (intptr_t i = kNumberOfCpuRegisters - 1; i >= 0; i--) {
2407 if (regs.ContainsRegister(reg)) {
2408 offset -= target::kWordSize;
2409 sx(reg, Address(SP,
offset));
2415void Assembler::PopRegisters(
const RegisterSet& regs) {
2419 intptr_t
size = (regs.CpuRegisterCount() * target::kWordSize) +
2427 if (regs.ContainsRegister(reg)) {
2428 lx(reg, Address(SP,
offset));
2429 offset += target::kWordSize;
2434 if (regs.ContainsFpuRegister(reg)) {
2435 fld(reg, Address(SP,
offset));
2443void Assembler::PushRegistersInOrder(std::initializer_list<Register> regs) {
2444 intptr_t
offset = regs.size() * target::kWordSize;
2446 for (Register reg : regs) {
2448 offset -= target::kWordSize;
2449 sx(reg, Address(SP,
offset));
2453void Assembler::PushNativeCalleeSavedRegisters() {
2454 RegisterSet regs(kAbiPreservedCpuRegs, kAbiPreservedFpuRegs);
2455 intptr_t
size = (regs.CpuRegisterCount() * target::kWordSize) +
2456 (regs.FpuRegisterCount() *
sizeof(double));
2461 if (regs.ContainsFpuRegister(reg)) {
2462 fsd(reg, Address(SP,
offset));
2463 offset +=
sizeof(double);
2468 if (regs.ContainsRegister(reg)) {
2469 sx(reg, Address(SP,
offset));
2470 offset += target::kWordSize;
2476void Assembler::PopNativeCalleeSavedRegisters() {
2477 RegisterSet regs(kAbiPreservedCpuRegs, kAbiPreservedFpuRegs);
2478 intptr_t
size = (regs.CpuRegisterCount() * target::kWordSize) +
2479 (regs.FpuRegisterCount() *
sizeof(double));
2483 if (regs.ContainsFpuRegister(reg)) {
2484 fld(reg, Address(SP,
offset));
2485 offset +=
sizeof(double);
2490 if (regs.ContainsRegister(reg)) {
2491 lx(reg, Address(SP,
offset));
2492 offset += target::kWordSize;
2499void Assembler::ExtendValue(Register rd, Register rn, OperandSize sz) {
2503 if (rd == rn)
return;
2508 return sextw(rd, rn);
2512 if (rd == rn)
return;
2525void Assembler::ExtendAndSmiTagValue(Register rd, Register rn, OperandSize sz) {
2526 if (sz == kWordBytes) {
2534 slli(rd, rn, XLEN - kBitsPerInt32);
2535 srli(rd, rd, XLEN - kBitsPerInt32 - kSmiTagShift);
2538 slli(rd, rn, XLEN - kBitsPerInt32);
2539 srai(rd, rd, XLEN - kBitsPerInt32 - kSmiTagShift);
2543 slli(rd, rn, XLEN - kBitsPerInt16);
2544 srli(rd, rd, XLEN - kBitsPerInt16 - kSmiTagShift);
2547 slli(rd, rn, XLEN - kBitsPerInt16);
2548 srai(rd, rd, XLEN - kBitsPerInt16 - kSmiTagShift);
2551 slli(rd, rn, XLEN - kBitsPerInt8);
2552 srli(rd, rd, XLEN - kBitsPerInt8 - kSmiTagShift);
2555 slli(rd, rn, XLEN - kBitsPerInt8);
2556 srai(rd, rd, XLEN - kBitsPerInt8 - kSmiTagShift);
2565void Assembler::Jump(
const Address& address) {
2570#if defined(TARGET_USES_THREAD_SANITIZER)
2571void Assembler::TsanLoadAcquire(Register addr) {
2572 LeafRuntimeScope rt(
this, 0,
true);
2573 MoveRegister(A0, addr);
2574 rt.Call(kTsanLoadAcquireRuntimeEntry, 1);
2576void Assembler::TsanStoreRelease(Register addr) {
2577 LeafRuntimeScope rt(
this, 0,
true);
2578 MoveRegister(A0, addr);
2579 rt.Call(kTsanStoreReleaseRuntimeEntry, 1);
2583void Assembler::LoadAcquire(Register dst,
2584 const Address& address,
2586 ASSERT(dst != address.base());
2587 Load(dst, address, size);
2588 fence(HartEffects::kRead, HartEffects::kMemory);
2590#if defined(TARGET_USES_THREAD_SANITIZER)
2591 if (address.offset() == 0) {
2592 TsanLoadAcquire(address.base());
2594 AddImmediate(TMP2, address.base(), address.offset());
2595 TsanLoadAcquire(TMP2);
2600void Assembler::StoreRelease(Register src,
2601 const Address& address,
2603 fence(HartEffects::kMemory, HartEffects::kWrite);
2604 Store(src, address, size);
2607void Assembler::CompareWithMemoryValue(Register value,
2611 ASSERT(size == kEightBytes || size == kFourBytes);
2612 if (size == kFourBytes) {
2621 CompareRegisters(value, TMP2);
2624void Assembler::ReserveAlignedFrameSpace(intptr_t frame_space) {
2625 if (frame_space != 0) {
2626 addi(SP, SP, -frame_space);
2628 const intptr_t kAbiStackAlignment = 16;
2629 andi(SP, SP, ~(kAbiStackAlignment - 1));
2635void Assembler::EmitEntryFrameVerification() {
2638 ASSERT(!constant_pool_allowed());
2639 LoadImmediate(TMP, target::frame_layout.exit_link_slot_from_entry_fp *
2641 add(TMP, TMP, FPREG);
2642 beq(TMP, SPREG, &
done, kNearJump);
2650void Assembler::CompareRegisters(Register rn, Register rm) {
2652 deferred_compare_ = kCompareReg;
2653 deferred_left_ = rn;
2656void Assembler::CompareObjectRegisters(Register rn, Register rm) {
2657 CompareRegisters(rn, rm);
2659void Assembler::TestRegisters(Register rn, Register rm) {
2661 deferred_compare_ = kTestReg;
2662 deferred_left_ = rn;
2666void Assembler::BranchIf(Condition condition,
2668 JumpDistance distance) {
2671 if (deferred_compare_ == kCompareImm || deferred_compare_ == kCompareReg) {
2674 if (deferred_compare_ == kCompareImm) {
2675 if (deferred_imm_ == 0) {
2678 LoadImmediate(TMP2, deferred_imm_);
2682 right = deferred_reg_;
2684 switch (condition) {
2717 FATAL(
"Use Add/Subtract/MultiplyBranchOverflow instead.");
2721 }
else if (deferred_compare_ == kTestImm || deferred_compare_ == kTestReg) {
2722 if (deferred_compare_ == kTestImm) {
2723 AndImmediate(TMP2, deferred_left_, deferred_imm_);
2725 and_(TMP2, deferred_left_, deferred_reg_);
2727 switch (condition) {
2729 beqz(TMP2, label, distance);
2732 bnez(TMP2, label, distance);
2740 deferred_compare_ =
kNone;
2743void Assembler::SetIf(Condition condition, Register rd) {
2746 if (deferred_compare_ == kCompareImm) {
2747 if (deferred_imm_ == 0) {
2748 deferred_compare_ = kCompareReg;
2750 SetIf(condition, rd);
2754 LoadImmediate(TMP2, deferred_imm_);
2755 deferred_compare_ = kCompareReg;
2756 deferred_reg_ =
TMP2;
2757 SetIf(condition, rd);
2761 intx_t
right = deferred_imm_;
2762 switch (condition) {
2802 }
else if (deferred_compare_ == kCompareReg) {
2805 switch (condition) {
2853 }
else if (deferred_compare_ == kTestImm) {
2854 uintx_t uimm = deferred_imm_;
2855 if (deferred_imm_ == 1) {
2856 switch (condition) {
2858 andi(rd, deferred_left_, 1);
2862 andi(rd, deferred_left_, 1);
2867 }
else if (Supports(RV_Zbs) && Utils::IsPowerOfTwo(uimm)) {
2868 switch (condition) {
2870 bexti(rd, deferred_left_, Utils::ShiftForPowerOfTwo(uimm));
2874 bexti(rd, deferred_left_, Utils::ShiftForPowerOfTwo(uimm));
2880 AndImmediate(rd, deferred_left_, deferred_imm_);
2881 switch (condition) {
2892 }
else if (deferred_compare_ == kTestReg) {
2893 and_(rd, deferred_left_, deferred_reg_);
2894 switch (condition) {
2908 deferred_compare_ =
kNone;
2911void Assembler::BranchIfZero(Register rn, Label* label, JumpDistance distance) {
2912 beqz(rn, label, distance);
2915void Assembler::BranchIfBit(Register rn,
2916 intptr_t bit_number,
2917 Condition condition,
2919 JumpDistance distance) {
2921 andi(TMP2, rn, 1 << bit_number);
2922 if (condition == ZERO) {
2923 beqz(TMP2, label, distance);
2924 }
else if (condition == NOT_ZERO) {
2925 bnez(TMP2, label, distance);
2931void Assembler::BranchIfNotSmi(Register reg,
2933 JumpDistance distance) {
2935 andi(TMP2, reg, kSmiTagMask);
2936 bnez(TMP2, label, distance);
2938void Assembler::BranchIfSmi(Register reg, Label* label, JumpDistance distance) {
2940 andi(TMP2, reg, kSmiTagMask);
2941 beqz(TMP2, label, distance);
2944void Assembler::ArithmeticShiftRightImmediate(Register reg, intptr_t shift) {
2945 srai(reg, reg, shift);
2948void Assembler::CompareWords(Register reg1,
2956 BranchIfZero(
count,
equals, Assembler::kNearJump);
2957 AddImmediate(
count, -1);
2958 lx(temp, FieldAddress(reg1,
offset));
2959 lx(TMP, FieldAddress(reg2,
offset));
2960 addi(reg1, reg1, target::kWordSize);
2961 addi(reg2, reg2, target::kWordSize);
2962 beq(temp, TMP, &loop, Assembler::kNearJump);
2965void Assembler::JumpAndLink(intptr_t target_code_pool_index,
2966 CodeEntryKind entry_kind) {
2973 LoadWordFromPoolIndex(code_reg, target_code_pool_index);
2974 Call(FieldAddress(code_reg, target::Code::entry_point_offset(entry_kind)));
2977void Assembler::JumpAndLink(
2979 ObjectPoolBuilderEntry::Patchability patchable,
2980 CodeEntryKind entry_kind,
2981 ObjectPoolBuilderEntry::SnapshotBehavior snapshot_behavior) {
2982 const intptr_t index = object_pool_builder().FindObject(
2984 JumpAndLink(index, entry_kind);
2987void Assembler::JumpAndLinkWithEquivalence(
const Code&
target,
2988 const Object& equivalence,
2989 CodeEntryKind entry_kind) {
2990 const intptr_t index =
2992 JumpAndLink(index, entry_kind);
2995void Assembler::Call(Address
target) {
3000void Assembler::Call(Register
target) {
3004void Assembler::AddShifted(Register dest,
3009 add(dest, index,
base);
3010 }
else if (Supports(RV_Zba) && (shift == 1)) {
3011 sh1add(dest, index,
base);
3012 }
else if (Supports(RV_Zba) && (shift == 2)) {
3013 sh2add(dest, index,
base);
3014 }
else if (Supports(RV_Zba) && (shift == 3)) {
3015 sh3add(dest, index,
base);
3016 }
else if (shift < 0) {
3018 srai(dest, index, -shift);
3019 add(dest, dest,
base);
3021 srai(TMP2, index, -shift);
3022 add(dest, TMP2,
base);
3026 slli(dest, index, shift);
3027 add(dest, dest,
base);
3029 slli(TMP2, index, shift);
3030 add(dest, TMP2,
base);
3035void Assembler::AddImmediate(Register rd,
3039 if ((imm == 0) && (rd == rs1)) {
3046 LoadImmediate(TMP2, imm);
3051void Assembler::MulImmediate(Register rd,
3055 if (Utils::IsPowerOfTwo(imm)) {
3056 const intx_t shift = Utils::ShiftForPowerOfTwo(imm);
3058 ASSERT(sz == kFourBytes || sz == kEightBytes);
3059 if (sz == kFourBytes) {
3060 slliw(rd, rs1, shift);
3062 slli(rd, rs1, shift);
3065 ASSERT(sz == kFourBytes);
3066 slli(rd, rs1, shift);
3069 LoadImmediate(TMP, imm);
3071 ASSERT(sz == kFourBytes || sz == kEightBytes);
3072 if (sz == kFourBytes) {
3078 ASSERT(sz == kFourBytes);
3084void Assembler::AndImmediate(Register rd,
3090 MoveRegister(rd, rs1);
3093 }
else if (Supports(RV_Zbs) && Utils::IsPowerOfTwo(~uimm)) {
3094 bclri(rd, rs1, Utils::ShiftForPowerOfTwo(~uimm));
3095 }
else if (Utils::IsPowerOfTwo(uimm + 1)) {
3096 intptr_t shift = Utils::ShiftForPowerOfTwo(uimm + 1);
3097 if (Supports(RV_Zbb) && (shift == 16)) {
3100 slli(rd, rs1, XLEN - shift);
3101 srli(rd, rd, XLEN - shift);
3105 LoadImmediate(TMP2, imm);
3106 and_(rd, rs1, TMP2);
3109void Assembler::OrImmediate(Register rd,
3115 MoveRegister(rd, rs1);
3118 }
else if (Supports(RV_Zbs) && Utils::IsPowerOfTwo(uimm)) {
3119 bseti(rd, rs1, Utils::ShiftForPowerOfTwo(uimm));
3122 LoadImmediate(TMP2, imm);
3126void Assembler::XorImmediate(Register rd,
3132 MoveRegister(rd, rs1);
3135 }
else if (Supports(RV_Zbs) && Utils::IsPowerOfTwo(uimm)) {
3136 binvi(rd, rs1, Utils::ShiftForPowerOfTwo(uimm));
3139 LoadImmediate(TMP2, imm);
3140 xor_(rd, rs1, TMP2);
3144void Assembler::TestImmediate(Register rn, intx_t imm, OperandSize sz) {
3146 deferred_compare_ = kTestImm;
3147 deferred_left_ = rn;
3148 deferred_imm_ = imm;
3150void Assembler::CompareImmediate(Register rn, intx_t imm, OperandSize sz) {
3152 deferred_compare_ = kCompareImm;
3153 deferred_left_ = rn;
3154 deferred_imm_ = imm;
3157Address Assembler::PrepareLargeOffset(Register
base, int32_t
offset) {
3166 add(TMP2, TMP2,
base);
3167 return Address(TMP2, lo);
3170void Assembler::Load(Register dest,
const Address& address, OperandSize sz) {
3171 Address
addr = PrepareLargeOffset(address.base(), address.offset());
3175 return ld(dest, addr);
3177 return lwu(dest, addr);
3180 return lw(dest, addr);
3183 return lw(dest, addr);
3185 return lhu(dest, addr);
3187 return lh(dest, addr);
3189 return lbu(dest, addr);
3191 return lb(dest, addr);
3199void Assembler::LoadIndexedPayload(Register dest,
3201 int32_t payload_offset,
3206 LoadFromOffset(dest, TMP, payload_offset - kHeapObjectTag, sz);
3209void Assembler::LoadSFromOffset(FRegister dest, Register
base, int32_t
offset) {
3213void Assembler::LoadDFromOffset(FRegister dest, Register
base, int32_t
offset) {
3217void Assembler::LoadFromStack(Register dst, intptr_t depth) {
3218 LoadFromOffset(dst, SPREG, target::kWordSize * depth);
3220void Assembler::StoreToStack(Register src, intptr_t depth) {
3221 StoreToOffset(src, SPREG, target::kWordSize * depth);
3223void Assembler::CompareToStack(Register src, intptr_t depth) {
3224 CompareWithMemoryValue(src, Address(SPREG, target::kWordSize * depth));
3227void Assembler::Store(Register src,
const Address& address, OperandSize sz) {
3228 Address
addr = PrepareLargeOffset(address.base(), address.offset());
3232 return sd(src, addr);
3236 return sw(src, addr);
3239 return sh(src, addr);
3242 return sb(src, addr);
3248void Assembler::StoreSToOffset(FRegister src, Register
base, int32_t
offset) {
3252void Assembler::StoreDToOffset(FRegister src, Register
base, int32_t
offset) {
3256void Assembler::StoreBarrier(Register
object,
3258 CanBeSmi can_value_be_smi,
3262 ASSERT(
object != scratch);
3263 ASSERT(value != scratch);
3270 ASSERT(scratch != kNoRegister);
3281 if (can_value_be_smi == kValueCanBeSmi) {
3282 BranchIfSmi(value, &
done, kNearJump);
3286 BranchIfNotSmi(value, &passed_check, kNearJump);
3288 Bind(&passed_check);
3291 lbu(scratch, FieldAddress(
object, target::Object::tags_offset()));
3292 lbu(TMP2, FieldAddress(value, target::Object::tags_offset()));
3293 srli(scratch, scratch, target::UntaggedObject::kBarrierOverlapShift);
3294 and_(scratch, scratch, TMP2);
3295 ble(scratch, WRITE_BARRIER_STATE, &
done, kNearJump);
3298 if (value != kWriteBarrierValueReg) {
3301 if (
object != kWriteBarrierValueReg) {
3302 PushRegister(kWriteBarrierValueReg);
3306 objectForCall = (
value ==
S3) ? S4 :
S3;
3307 PushRegisterPair(kWriteBarrierValueReg, objectForCall);
3308 mv(objectForCall,
object);
3310 mv(kWriteBarrierValueReg, value);
3314 generate_invoke_write_barrier_wrapper_(objectForCall);
3316 if (value != kWriteBarrierValueReg) {
3317 if (
object != kWriteBarrierValueReg) {
3318 PopRegister(kWriteBarrierValueReg);
3320 PopRegisterPair(kWriteBarrierValueReg, objectForCall);
3326void Assembler::ArrayStoreBarrier(Register
object,
3329 CanBeSmi can_value_be_smi,
3332 const bool spill_lr =
true;
3335 ASSERT(
object != scratch);
3338 ASSERT(value != scratch);
3347 ASSERT(scratch != kNoRegister);
3358 if (can_value_be_smi == kValueCanBeSmi) {
3359 BranchIfSmi(value, &
done, kNearJump);
3363 BranchIfNotSmi(value, &passed_check, kNearJump);
3365 Bind(&passed_check);
3368 lbu(scratch, FieldAddress(
object, target::Object::tags_offset()));
3369 lbu(TMP2, FieldAddress(value, target::Object::tags_offset()));
3370 srli(scratch, scratch, target::UntaggedObject::kBarrierOverlapShift);
3371 and_(scratch, scratch, TMP2);
3372 ble(scratch, WRITE_BARRIER_STATE, &
done, kNearJump);
3376 if ((
object != kWriteBarrierObjectReg) || (value != kWriteBarrierValueReg) ||
3377 (slot != kWriteBarrierSlotReg)) {
3383 generate_invoke_array_write_barrier_();
3390void Assembler::VerifyStoreNeedsNoWriteBarrier(Register
object,
3398 BranchIfSmi(value, &
done, kNearJump);
3399 lbu(TMP2, FieldAddress(value, target::Object::tags_offset()));
3400 andi(TMP2, TMP2, 1 << target::UntaggedObject::kNewBit);
3401 beqz(TMP2, &
done, kNearJump);
3402 lbu(TMP2, FieldAddress(
object, target::Object::tags_offset()));
3403 andi(TMP2, TMP2, 1 << target::UntaggedObject::kOldAndNotRememberedBit);
3404 beqz(TMP2, &
done, kNearJump);
3405 Stop(
"Write barrier is required");
3409void Assembler::StoreObjectIntoObjectNoBarrier(Register
object,
3410 const Address& dest,
3411 const Object& value,
3412 MemoryOrder memory_order,
3420 }
else if (target::IsSmi(value) && (target::ToRawSmi(value) == 0)) {
3424 LoadObject(TMP, value);
3427 if (memory_order == kRelease) {
3428 fence(HartEffects::kMemory, HartEffects::kWrite);
3430 Store(value_reg, dest, size);
3434void Assembler::StoreInternalPointer(Register
object,
3435 const Address& dest,
3441void Assembler::LoadPoolPointer(Register pp) {
3443 lx(pp, FieldAddress(CODE_REG, target::Code::object_pool_offset()));
3451 subi(pp, pp, kHeapObjectTag);
3452 set_constant_pool_allowed(pp == PP);
3455bool Assembler::CanLoadFromObjectPool(
const Object&
object)
const {
3457 if (!constant_pool_allowed()) {
3465void Assembler::LoadNativeEntry(
3467 const ExternalLabel* label,
3468 ObjectPoolBuilderEntry::Patchability patchable) {
3469 const intptr_t index =
3470 object_pool_builder().FindNativeFunction(label, patchable);
3471 LoadWordFromPoolIndex(dst, index);
3473void Assembler::LoadIsolate(Register dst) {
3474 lx(dst, Address(THR, target::Thread::isolate_offset()));
3476void Assembler::LoadIsolateGroup(Register dst) {
3477 lx(dst, Address(THR, target::Thread::isolate_group_offset()));
3480void Assembler::LoadImmediate(Register reg, intx_t imm) {
3482 if (!Utils::IsInt(32, imm)) {
3483 int shift = Utils::CountTrailingZeros64(imm);
3485 li(reg, imm >> shift);
3486 slli(reg, reg, shift);
3489 if ((shift >= 12) &&
IsUTypeImm(imm >> (shift - 12))) {
3490 lui(reg, imm >> (shift - 12));
3491 slli(reg, reg, shift - 12);
3495 if (constant_pool_allowed()) {
3496 intptr_t index = object_pool_builder().FindImmediate(imm);
3497 LoadWordFromPoolIndex(reg, index);
3501 intx_t lo =
ImmLo(imm);
3502 intx_t hi = imm - lo;
3503 shift = Utils::CountTrailingZeros64(hi);
3505 LoadImmediate(reg, hi >> shift);
3506 slli(reg, reg, shift);
3514 intx_t lo =
ImmLo(imm);
3515 intx_t hi =
ImmHi(imm);
3524 addiw(reg, reg, lo);
3530void Assembler::LoadSImmediate(FRegister reg,
float imms) {
3531 int32_t imm = bit_cast<int32_t, float>(imms);
3535 ASSERT(constant_pool_allowed());
3536 intptr_t index = object_pool_builder().FindImmediate(imm);
3537 intptr_t
offset = target::ObjectPool::element_offset(index);
3538 LoadSFromOffset(reg, PP,
offset);
3542void Assembler::LoadDImmediate(FRegister reg,
double immd) {
3543 int64_t imm = bit_cast<int64_t, double>(immd);
3551 ASSERT(constant_pool_allowed());
3552 intptr_t index = object_pool_builder().FindImmediate64(imm);
3553 intptr_t
offset = target::ObjectPool::element_offset(index);
3554 LoadDFromOffset(reg, PP,
offset);
3558void Assembler::LoadQImmediate(FRegister reg, simd128_value_t immq) {
3566void Assembler::LoadWordFromPoolIndex(Register dst,
3569 ASSERT((pp != PP) || constant_pool_allowed());
3571 const uint32_t
offset = target::ObjectPool::element_offset(index);
3576 lx(dst, Address(pp, lo));
3580 lx(dst, Address(dst, lo));
3584void Assembler::StoreWordToPoolIndex(Register src,
3587 ASSERT((pp != PP) || constant_pool_allowed());
3589 const uint32_t
offset = target::ObjectPool::element_offset(index);
3594 sx(src, Address(pp, lo));
3598 sx(src, Address(TMP, lo));
3602void Assembler::CompareObject(Register reg,
const Object&
object) {
3605 CompareObjectRegisters(reg, NULL_REG);
3606 }
else if (target::IsSmi(
object)) {
3607 CompareImmediate(reg, target::ToRawSmi(
object), kObjectBytes);
3609 LoadObject(TMP,
object);
3610 CompareObjectRegisters(reg, TMP);
3614void Assembler::ExtractClassIdFromTags(Register
result, Register tags) {
3615 ASSERT(target::UntaggedObject::kClassIdTagPos == 12);
3616 ASSERT(target::UntaggedObject::kClassIdTagSize == 20);
3618 srliw(
result, tags, target::UntaggedObject::kClassIdTagPos);
3620 srli(
result, tags, target::UntaggedObject::kClassIdTagPos);
3624void Assembler::ExtractInstanceSizeFromTags(Register
result, Register tags) {
3625 ASSERT(target::UntaggedObject::kSizeTagPos == 8);
3626 ASSERT(target::UntaggedObject::kSizeTagSize == 4);
3627 srli(
result, tags, target::UntaggedObject::kSizeTagPos);
3628 andi(
result,
result, (1 << target::UntaggedObject::kSizeTagSize) - 1);
3629 slli(
result,
result, target::ObjectAlignment::kObjectAlignmentLog2);
3632void Assembler::LoadClassId(Register
result, Register
object) {
3633 ASSERT(target::UntaggedObject::kClassIdTagPos == 12);
3634 ASSERT(target::UntaggedObject::kClassIdTagSize == 20);
3636 lwu(
result, FieldAddress(
object, target::Object::tags_offset()));
3638 lw(
result, FieldAddress(
object, target::Object::tags_offset()));
3640 srli(
result,
result, target::UntaggedObject::kClassIdTagPos);
3643void Assembler::LoadClassById(Register
result, Register class_id) {
3646 const intptr_t table_offset =
3647 target::IsolateGroup::cached_class_table_table_offset();
3649 LoadIsolateGroup(
result);
3651 AddShifted(
result,
result, class_id, target::kWordSizeLog2);
3654void Assembler::CompareClassId(Register
object,
3657 ASSERT(scratch != kNoRegister);
3658 LoadClassId(scratch,
object);
3659 CompareImmediate(scratch, class_id);
3662void Assembler::LoadClassIdMayBeSmi(Register
result, Register
object) {
3668 BranchIfSmi(
object, &
done, kNearJump);
3669 LoadClassId(
result,
object);
3672void Assembler::LoadTaggedClassIdMayBeSmi(Register
result, Register
object) {
3673 LoadClassIdMayBeSmi(
result,
object);
3676void Assembler::EnsureHasClassIdInDEBUG(intptr_t cid,
3681 Comment(
"Check that object in register has cid %" Pd "", cid);
3683 LoadClassIdMayBeSmi(scratch, src);
3684 CompareImmediate(scratch, cid);
3685 BranchIf(
EQUAL, &matches, Assembler::kNearJump);
3687 CompareImmediate(scratch, kNullCid);
3688 BranchIf(
EQUAL, &matches, Assembler::kNearJump);
3695void Assembler::EnterFrame(intptr_t frame_size) {
3698 subi(SP, SP, frame_size + 2 * target::kWordSize);
3699 sx(
RA, Address(SP, frame_size + 1 * target::kWordSize));
3700 sx(FP, Address(SP, frame_size + 0 * target::kWordSize));
3701 addi(FP, SP, frame_size + 2 * target::kWordSize);
3703void Assembler::LeaveFrame() {
3706 subi(SP, FP, 2 * target::kWordSize);
3707 lx(FP, Address(SP, 0 * target::kWordSize));
3708 lx(
RA, Address(SP, 1 * target::kWordSize));
3709 addi(SP, SP, 2 * target::kWordSize);
3712void Assembler::TransitionGeneratedToNative(Register destination,
3713 Register new_exit_frame,
3714 Register new_exit_through_ffi,
3715 bool enter_safepoint) {
3718 Address(THR, target::Thread::top_exit_frame_info_offset()));
3720 sx(new_exit_through_ffi,
3721 Address(THR, target::Thread::exit_through_ffi_offset()));
3722 Register tmp = new_exit_through_ffi;
3725 sx(destination, Address(THR, target::Thread::vm_tag_offset()));
3726 li(tmp, target::Thread::native_execution_state());
3727 sx(tmp, Address(THR, target::Thread::execution_state_offset()));
3729 if (enter_safepoint) {
3730 EnterFullSafepoint(tmp);
3734void Assembler::TransitionNativeToGenerated(Register
state,
3735 bool exit_safepoint,
3736 bool ignore_unwind_in_progress) {
3737 if (exit_safepoint) {
3738 ExitFullSafepoint(
state, ignore_unwind_in_progress);
3741 ASSERT(!ignore_unwind_in_progress);
3744 ASSERT(target::Thread::full_safepoint_state_acquired() != 0);
3745 li(
state, target::Thread::full_safepoint_state_acquired());
3746 lx(
RA, Address(THR, target::Thread::safepoint_state_offset()));
3749 beqz(
RA, &
ok, Assembler::kNearJump);
3756 li(
state, target::Thread::vm_tag_dart_id());
3757 sx(
state, Address(THR, target::Thread::vm_tag_offset()));
3758 li(
state, target::Thread::generated_execution_state());
3759 sx(
state, Address(THR, target::Thread::execution_state_offset()));
3762 sx(ZR, Address(THR, target::Thread::top_exit_frame_info_offset()));
3763 sx(ZR, Address(THR, target::Thread::exit_through_ffi_offset()));
3766void Assembler::EnterFullSafepoint(Register
state) {
3775 Label slow_path,
done, retry;
3777 j(&slow_path, Assembler::kNearJump);
3780 addi(addr, THR, target::Thread::safepoint_state_offset());
3782 lr(
state, Address(addr, 0));
3783 subi(
state,
state, target::Thread::full_safepoint_state_unacquired());
3784 bnez(
state, &slow_path, Assembler::kNearJump);
3786 li(
state, target::Thread::full_safepoint_state_acquired());
3788 beqz(
state, &
done, Assembler::kNearJump);
3791 j(&retry, Assembler::kNearJump);
3795 lx(addr, Address(THR, target::Thread::enter_safepoint_stub_offset()));
3796 lx(addr, FieldAddress(addr, target::Code::entry_point_offset()));
3802void Assembler::ExitFullSafepoint(Register
state,
3803 bool ignore_unwind_in_progress) {
3811 Label slow_path,
done, retry;
3813 j(&slow_path, Assembler::kNearJump);
3816 addi(addr, THR, target::Thread::safepoint_state_offset());
3818 lr(
state, Address(addr, 0));
3819 subi(
state,
state, target::Thread::full_safepoint_state_acquired());
3820 bnez(
state, &slow_path, Assembler::kNearJump);
3822 li(
state, target::Thread::full_safepoint_state_unacquired());
3824 beqz(
state, &
done, Assembler::kNearJump);
3827 j(&retry, Assembler::kNearJump);
3831 if (ignore_unwind_in_progress) {
3835 exit_safepoint_ignore_unwind_in_progress_stub_offset()));
3837 lx(addr, Address(THR, target::Thread::exit_safepoint_stub_offset()));
3839 lx(addr, FieldAddress(addr, target::Code::entry_point_offset()));
3845void Assembler::CheckFpSpDist(intptr_t fp_sp_dist) {
3849 Comment(
"CheckFpSpDist");
3851 CompareImmediate(TMP, fp_sp_dist);
3852 BranchIf(EQ, &
ok, compiler::Assembler::kNearJump);
3858void Assembler::CheckCodePointer() {
3860 if (!FLAG_check_code_pointer) {
3863 Comment(
"CheckCodePointer");
3864 Label cid_ok, instructions_ok;
3865 CompareClassId(CODE_REG, kCodeCid, TMP);
3866 BranchIf(EQ, &cid_ok, kNearJump);
3870 const intptr_t entry_offset =
3871 CodeSize() + target::Instructions::HeaderSize() -
kHeapObjectTag;
3872 intx_t imm = -entry_offset;
3873 intx_t lo =
ImmLo(imm);
3874 intx_t hi =
ImmHi(imm);
3877 lx(TMP2, FieldAddress(CODE_REG, target::Code::instructions_offset()));
3878 beq(TMP, TMP2, &instructions_ok, kNearJump);
3880 Bind(&instructions_ok);
3884void Assembler::RestoreCodePointer() {
3886 Address(FP, target::frame_layout.code_from_fp * target::kWordSize));
3890void Assembler::RestorePoolPointer() {
3891 if (FLAG_precompiled_mode) {
3892 lx(PP, Address(THR, target::Thread::global_object_pool_offset()));
3894 lx(PP, Address(FP, target::frame_layout.code_from_fp * target::kWordSize));
3895 lx(PP, FieldAddress(PP, target::Code::object_pool_offset()));
3897 subi(PP, PP, kHeapObjectTag);
3900void Assembler::RestorePinnedRegisters() {
3901 lx(WRITE_BARRIER_STATE,
3902 Address(THR, target::Thread::write_barrier_mask_offset()));
3903 lx(NULL_REG, Address(THR, target::Thread::object_null_offset()));
3928 xori(WRITE_BARRIER_STATE, WRITE_BARRIER_STATE,
3929 (target::UntaggedObject::kGenerationalBarrierMask << 1) - 1);
3933 ASSERT(target::UntaggedObject::kGenerationalBarrierMask ==
3934 (target::UntaggedObject::kIncrementalBarrierMask << 1));
3936 ASSERT(target::UntaggedObject::kIncrementalBarrierMask >
3937 target::UntaggedObject::kCanonicalBit);
3938 ASSERT(target::UntaggedObject::kIncrementalBarrierMask >
3939 target::UntaggedObject::kCardRememberedBit);
3942void Assembler::SetupGlobalPoolAndDispatchTable() {
3943 ASSERT(FLAG_precompiled_mode);
3944 lx(PP, Address(THR, target::Thread::global_object_pool_offset()));
3945 subi(PP, PP, kHeapObjectTag);
3946 lx(DISPATCH_TABLE_REG,
3947 Address(THR, target::Thread::dispatch_table_array_offset()));
3950void Assembler::EnterDartFrame(intptr_t frame_size, Register new_pp) {
3951 ASSERT(!constant_pool_allowed());
3953 if (!
IsITypeImm(frame_size + 4 * target::kWordSize)) {
3954 EnterDartFrame(0, new_pp);
3955 AddImmediate(SP, SP, -frame_size);
3961 if (FLAG_precompiled_mode) {
3962 subi(SP, SP, frame_size + 2 * target::kWordSize);
3963 sx(
RA, Address(SP, frame_size + 1 * target::kWordSize));
3964 sx(FP, Address(SP, frame_size + 0 * target::kWordSize));
3965 addi(FP, SP, frame_size + 2 * target::kWordSize);
3967 subi(SP, SP, frame_size + 4 * target::kWordSize);
3968 sx(
RA, Address(SP, frame_size + 3 * target::kWordSize));
3969 sx(FP, Address(SP, frame_size + 2 * target::kWordSize));
3970 sx(CODE_REG, Address(SP, frame_size + 1 * target::kWordSize));
3971 addi(PP, PP, kHeapObjectTag);
3972 sx(PP, Address(SP, frame_size + 0 * target::kWordSize));
3973 addi(FP, SP, frame_size + 4 * target::kWordSize);
3974 if (new_pp == kNoRegister) {
3980 set_constant_pool_allowed(
true);
3988void Assembler::EnterOsrFrame(intptr_t extra_size, Register new_pp) {
3989 ASSERT(!constant_pool_allowed());
3990 Comment(
"EnterOsrFrame");
3991 RestoreCodePointer();
3994 if (extra_size > 0) {
3995 AddImmediate(SP, -extra_size);
3999void Assembler::LeaveDartFrame() {
4002 if (!FLAG_precompiled_mode) {
4003 lx(PP, Address(FP, target::frame_layout.saved_caller_pp_from_fp *
4004 target::kWordSize));
4005 subi(PP, PP, kHeapObjectTag);
4007 set_constant_pool_allowed(
false);
4008 subi(SP, FP, 2 * target::kWordSize);
4009 lx(FP, Address(SP, 0 * target::kWordSize));
4010 lx(
RA, Address(SP, 1 * target::kWordSize));
4011 addi(SP, SP, 2 * target::kWordSize);
4014void Assembler::LeaveDartFrame(intptr_t fp_sp_dist) {
4015 intptr_t pp_offset =
4016 target::frame_layout.saved_caller_pp_from_fp * target::kWordSize -
4018 intptr_t fp_offset =
4019 target::frame_layout.saved_caller_fp_from_fp * target::kWordSize -
4021 intptr_t ra_offset =
4022 target::frame_layout.saved_caller_pc_from_fp * target::kWordSize -
4031 if (!FLAG_precompiled_mode) {
4032 lx(PP, Address(SP, pp_offset));
4033 subi(PP, PP, kHeapObjectTag);
4035 set_constant_pool_allowed(
false);
4036 lx(FP, Address(SP, fp_offset));
4037 lx(
RA, Address(SP, ra_offset));
4038 addi(SP, SP, -fp_sp_dist);
4041void Assembler::CallRuntime(
const RuntimeEntry& entry,
4043 ASSERT(!entry.is_leaf());
4046 lx(T5, compiler::Address(THR, entry.OffsetFromThread()));
4048 Call(Address(THR, target::Thread::call_to_runtime_entry_point_offset()));
4051static const RegisterSet kRuntimeCallSavedRegisters(kDartVolatileCpuRegs,
4052 kAbiVolatileFpuRegs);
4054#define __ assembler_->
4056LeafRuntimeScope::LeafRuntimeScope(Assembler* assembler,
4057 intptr_t frame_size,
4058 bool preserve_registers)
4059 : assembler_(assembler), preserve_registers_(preserve_registers) {
4062 __ subi(SP, SP, 4 * target::kWordSize);
4063 __ sx(
RA, Address(SP, 3 * target::kWordSize));
4064 __ sx(FP, Address(SP, 2 * target::kWordSize));
4065 __ sx(CODE_REG, Address(SP, 1 * target::kWordSize));
4066 __ sx(PP, Address(SP, 0 * target::kWordSize));
4067 __ addi(FP, SP, 4 * target::kWordSize);
4069 if (preserve_registers) {
4070 __ PushRegisters(kRuntimeCallSavedRegisters);
4082 __ ReserveAlignedFrameSpace(frame_size);
4085void LeafRuntimeScope::Call(
const RuntimeEntry& entry,
4088 __ lx(TMP2, compiler::Address(THR, entry.OffsetFromThread()));
4089 __ sx(TMP2, compiler::Address(THR, target::Thread::vm_tag_offset()));
4091 __ LoadImmediate(TMP2, VMTag::kDartTagId);
4092 __ sx(TMP2, compiler::Address(THR, target::Thread::vm_tag_offset()));
4095LeafRuntimeScope::~LeafRuntimeScope() {
4096 if (preserve_registers_) {
4097 const intptr_t kSavedRegistersSize =
4098 kRuntimeCallSavedRegisters.CpuRegisterCount() * target::kWordSize +
4100 4 * target::kWordSize;
4102 __ subi(SP, FP, kSavedRegistersSize);
4104 __ PopRegisters(kRuntimeCallSavedRegisters);
4107 __ subi(SP, FP, 4 * target::kWordSize);
4108 __ lx(PP, Address(SP, 0 * target::kWordSize));
4109 __ lx(CODE_REG, Address(SP, 1 * target::kWordSize));
4110 __ lx(FP, Address(SP, 2 * target::kWordSize));
4111 __ lx(
RA, Address(SP, 3 * target::kWordSize));
4112 __ addi(SP, SP, 4 * target::kWordSize);
4117void Assembler::EnterCFrame(intptr_t frame_space) {
4128 subi(SP, SP, frame_space + 3 * target::kWordSize);
4129 sx(
RA, Address(SP, frame_space + 2 * target::kWordSize));
4130 sx(FP, Address(SP, frame_space + 1 * target::kWordSize));
4131 sx(PP, Address(SP, frame_space + 0 * target::kWordSize));
4132 addi(FP, SP, frame_space + 3 * target::kWordSize);
4133 const intptr_t kAbiStackAlignment = 16;
4134 andi(SP, SP, ~(kAbiStackAlignment - 1));
4137void Assembler::LeaveCFrame() {
4140 subi(SP, FP, 3 * target::kWordSize);
4141 lx(PP, Address(SP, 0 * target::kWordSize));
4142 lx(FP, Address(SP, 1 * target::kWordSize));
4143 lx(
RA, Address(SP, 2 * target::kWordSize));
4144 addi(SP, SP, 3 * target::kWordSize);
4150void Assembler::MonomorphicCheckedEntryJIT() {
4151 has_monomorphic_entry_ =
true;
4152 const intptr_t saved_far_branch_level = far_branch_level();
4153 set_far_branch_level(0);
4154 const intptr_t
start = CodeSize();
4156 Label immediate, miss;
4158 lx(TMP, Address(THR, target::Thread::switchable_call_miss_entry_offset()));
4161 Comment(
"MonomorphicCheckedEntry");
4163 target::Instructions::kMonomorphicEntryOffsetJIT);
4166 const intptr_t cid_offset = target::Array::element_offset(0);
4167 const intptr_t count_offset = target::Array::element_offset(1);
4169 ASSERT(A1 != entries_reg);
4172 lx(TMP, FieldAddress(entries_reg, cid_offset));
4173 LoadTaggedClassIdMayBeSmi(A1, A0);
4174 bne(TMP, A1, &miss, kNearJump);
4176 lx(TMP, FieldAddress(entries_reg, count_offset));
4177 addi(TMP, TMP, target::ToRawSmi(1));
4178 sx(TMP, FieldAddress(entries_reg, count_offset));
4180 li(ARGS_DESC_REG, 0);
4184 target::Instructions::kPolymorphicEntryOffsetJIT);
4186 set_far_branch_level(saved_far_branch_level);
4192void Assembler::MonomorphicCheckedEntryAOT() {
4193 has_monomorphic_entry_ =
true;
4194 intptr_t saved_far_branch_level = far_branch_level();
4195 set_far_branch_level(0);
4197 const intptr_t
start = CodeSize();
4199 Label immediate, miss;
4201 lx(TMP, Address(THR, target::Thread::switchable_call_miss_entry_offset()));
4204 Comment(
"MonomorphicCheckedEntry");
4206 target::Instructions::kMonomorphicEntryOffsetAOT);
4207 LoadClassId(TMP, A0);
4209 bne(S5, TMP, &miss, kNearJump);
4213 target::Instructions::kPolymorphicEntryOffsetAOT);
4215 set_far_branch_level(saved_far_branch_level);
4218void Assembler::BranchOnMonomorphicCheckedEntryJIT(Label* label) {
4219 has_monomorphic_entry_ =
true;
4220 while (CodeSize() < target::Instructions::kMonomorphicEntryOffsetJIT) {
4224 while (CodeSize() < target::Instructions::kPolymorphicEntryOffsetJIT) {
4229void Assembler::CombineHashes(Register
hash, Register other) {
4234 slliw(other,
hash, 10);
4237 srliw(other,
hash, 6);
4243 slli(other,
hash, 10);
4246 srli(other,
hash, 6);
4251void Assembler::FinalizeHashForSize(intptr_t bit_size,
4257 ASSERT(bit_size <= kBitsPerInt32);
4258 ASSERT(scratch != kNoRegister);
4261 slliw(scratch,
hash, 3);
4264 srliw(scratch,
hash, 11);
4267 slliw(scratch,
hash, 15);
4271 slli(scratch,
hash, 3);
4274 srli(scratch,
hash, 11);
4277 slli(scratch,
hash, 15);
4281 if (bit_size < kBitsPerInt32) {
4282 AndImmediate(
hash,
hash, Utils::NBitMask(bit_size));
4285 seqz(scratch,
hash);
4290void Assembler::MaybeTraceAllocation(Register cid,
4293 JumpDistance distance) {
4294 LoadIsolateGroup(temp_reg);
4295 lx(temp_reg, Address(temp_reg, target::IsolateGroup::class_table_offset()));
4298 target::ClassTable::allocation_tracing_state_table_offset()));
4299 add(temp_reg, temp_reg, cid);
4300 LoadFromOffset(temp_reg, temp_reg,
4301 target::ClassTable::AllocationTracingStateSlotOffsetFor(0),
4303 bnez(temp_reg, trace);
4306void Assembler::MaybeTraceAllocation(intptr_t cid,
4309 JumpDistance distance) {
4311 LoadIsolateGroup(temp_reg);
4312 lx(temp_reg, Address(temp_reg, target::IsolateGroup::class_table_offset()));
4315 target::ClassTable::allocation_tracing_state_table_offset()));
4316 LoadFromOffset(temp_reg, temp_reg,
4317 target::ClassTable::AllocationTracingStateSlotOffsetFor(cid),
4319 bnez(temp_reg, trace);
4323void Assembler::TryAllocateObject(intptr_t cid,
4324 intptr_t instance_size,
4326 JumpDistance distance,
4327 Register instance_reg,
4328 Register temp_reg) {
4329 ASSERT(failure !=
nullptr);
4330 ASSERT(instance_size != 0);
4331 ASSERT(instance_reg != temp_reg);
4332 ASSERT(temp_reg != kNoRegister);
4333 ASSERT(Utils::IsAligned(instance_size,
4334 target::ObjectAlignment::kObjectAlignment));
4335 if (FLAG_inline_alloc &&
4336 target::Heap::IsAllocatableInNewSpace(instance_size)) {
4342 lx(instance_reg, Address(THR, target::Thread::top_offset()));
4343 lx(temp_reg, Address(THR, target::Thread::end_offset()));
4348 AddImmediate(instance_reg, instance_size);
4351 bleu(temp_reg, instance_reg, failure, distance);
4352 CheckAllocationCanary(instance_reg, temp_reg);
4356 sx(instance_reg, Address(THR, target::Thread::top_offset()));
4358 AddImmediate(instance_reg, -instance_size + kHeapObjectTag);
4360 const uword tags = target::MakeTagWordForNewSpaceObject(cid, instance_size);
4361 LoadImmediate(temp_reg, tags);
4362 Store(temp_reg, FieldAddress(instance_reg, target::Object::tags_offset()));
4364 j(failure, distance);
4368void Assembler::TryAllocateArray(intptr_t cid,
4369 intptr_t instance_size,
4372 Register end_address,
4375 if (FLAG_inline_alloc &&
4376 target::Heap::IsAllocatableInNewSpace(instance_size)) {
4382 lx(
instance, Address(THR, target::Thread::top_offset()));
4383 AddImmediate(end_address,
instance, instance_size);
4384 bltu(end_address,
instance, failure);
4389 lx(temp2, Address(THR, target::Thread::end_offset()));
4390 bgeu(end_address, temp2, failure);
4391 CheckAllocationCanary(
instance, temp2);
4395 sx(end_address, Address(THR, target::Thread::top_offset()));
4401 const uword tags = target::MakeTagWordForNewSpaceObject(cid, instance_size);
4402 LoadImmediate(temp2, tags);
4403 sx(temp2, FieldAddress(
instance, target::Object::tags_offset()));
4409void Assembler::CopyMemoryWords(Register src,
4414 beqz(size, &
done, kNearJump);
4416 lx(temp, Address(src));
4417 addi(src, src, target::kWordSize);
4418 sx(temp, Address(dst));
4419 addi(dst, dst, target::kWordSize);
4420 subi(size, size, target::kWordSize);
4421 bnez(size, &loop, kNearJump);
4425void Assembler::GenerateUnRelocatedPcRelativeCall(intptr_t offset_into_target) {
4427 intx_t lo =
ImmLo(offset_into_target);
4428 intx_t hi =
ImmHi(offset_into_target);
4430 jalr_fixed(
RA,
RA, lo);
4433void Assembler::GenerateUnRelocatedPcRelativeTailCall(
4434 intptr_t offset_into_target) {
4436 intx_t lo =
ImmLo(offset_into_target);
4437 intx_t hi =
ImmHi(offset_into_target);
4439 jalr_fixed(ZR, TMP, lo);
4442bool Assembler::AddressCanHoldConstantIndex(
const Object& constant,
4445 intptr_t index_scale) {
4446 if (!IsSafeSmi(constant))
return false;
4447 const int64_t index = target::SmiValue(constant);
4448 const int64_t
offset = index * index_scale + HeapDataOffset(is_external, cid);
4456Address Assembler::ElementAddressForIntIndex(
bool is_external,
4458 intptr_t index_scale,
4460 intptr_t index)
const {
4461 const int64_t
offset = index * index_scale + HeapDataOffset(is_external, cid);
4463 return Address(array,
static_cast<int32_t
>(
offset));
4465void Assembler::ComputeElementAddressForIntIndex(Register address,
4468 intptr_t index_scale,
4471 const int64_t
offset = index * index_scale + HeapDataOffset(is_external, cid);
4472 AddImmediate(address, array,
offset);
4475Address Assembler::ElementAddressForRegIndex(
bool is_external,
4477 intptr_t index_scale,
4483 const intptr_t boxing_shift = index_unboxed ? 0 : -
kSmiTagShift;
4484 const intptr_t shift = Utils::ShiftForPowerOfTwo(index_scale) + boxing_shift;
4485 const int32_t
offset = HeapDataOffset(is_external, cid);
4488 AddShifted(temp, array, index, shift);
4489 return Address(temp,
offset);
4492void Assembler::ComputeElementAddressForRegIndex(Register address,
4495 intptr_t index_scale,
4500 const intptr_t boxing_shift = index_unboxed ? 0 : -
kSmiTagShift;
4501 const intptr_t shift = Utils::ShiftForPowerOfTwo(index_scale) + boxing_shift;
4502 const int32_t
offset = HeapDataOffset(is_external, cid);
4503 ASSERT(array != address);
4504 ASSERT(index != address);
4505 AddShifted(address, array, index, shift);
4507 AddImmediate(address, address,
offset);
4511void Assembler::LoadStaticFieldAddress(Register address,
4514 LoadCompressedSmiFieldFromOffset(
4515 scratch, field, target::Field::host_offset_or_field_id_offset());
4516 const intptr_t field_table_offset =
4517 compiler::target::Thread::field_table_values_offset();
4518 LoadMemoryValue(address, THR,
static_cast<int32_t
>(field_table_offset));
4519 slli(scratch, scratch, target::kWordSizeLog2 - kSmiTagShift);
4520 add(address, address, scratch);
4523void Assembler::LoadFieldAddressForRegOffset(Register address,
4525 Register offset_in_words_as_smi) {
4526 AddShifted(address,
instance, offset_in_words_as_smi,
4527 target::kWordSizeLog2 - kSmiTagShift);
4528 addi(address, address, -kHeapObjectTag);
4532void Assembler::LoadObjectHelper(
4534 const Object&
object,
4536 ObjectPoolBuilderEntry::SnapshotBehavior snapshot_behavior) {
4545 if (
IsSameObject(CastHandle<Object>(compiler::TrueObject()),
object)) {
4546 addi(dst, NULL_REG, kTrueOffsetFromNull);
4549 if (
IsSameObject(CastHandle<Object>(compiler::FalseObject()),
object)) {
4550 addi(dst, NULL_REG, kFalseOffsetFromNull);
4554 if (target::CanLoadFromThread(
object, &
offset)) {
4555 lx(dst, Address(THR,
offset));
4558 if (target::IsSmi(
object)) {
4559 LoadImmediate(dst, target::ToRawSmi(
object));
4564 const intptr_t index =
4566 ? object_pool_builder().AddObject(
4567 object, ObjectPoolBuilderEntry::kPatchable, snapshot_behavior)
4568 : object_pool_builder().FindObject(
4569 object, ObjectPoolBuilderEntry::kNotPatchable,
4571 LoadWordFromPoolIndex(dst, index);
4574void Assembler::AddImmediateBranchOverflow(Register rd,
4581 AddImmediate(rd, rs1, imm);
4583 blt(rd, TMP2, overflow);
4584 }
else if (imm < 0) {
4585 bgt(rd, TMP2, overflow);
4588 AddImmediate(rd, rs1, imm);
4590 blt(rd, rs1, overflow);
4591 }
else if (imm < 0) {
4592 bgt(rd, rs1, overflow);
4596void Assembler::SubtractImmediateBranchOverflow(Register rd,
4601 AddImmediateBranchOverflow(rd, rs1, -imm, overflow);
4603void Assembler::MultiplyImmediateBranchOverflow(Register rd,
4612 LoadImmediate(TMP2, imm);
4615 mulh(TMP, rs1, TMP2);
4617 srai(TMP2, rd, XLEN - 1);
4618 bne(TMP, TMP2, overflow);
4620void Assembler::AddBranchOverflow(Register rd,
4631 if ((rd == rs1) && (rd == rs2)) {
4636 bltz(TMP, overflow);
4637 }
else if (rs1 == rs2) {
4642 bltz(TMP, overflow);
4643 }
else if (rd == rs1) {
4648 bne(TMP, TMP2, overflow);
4649 }
else if (rd == rs2) {
4654 bne(TMP, TMP2, overflow);
4659 bne(TMP, TMP2, overflow);
4663void Assembler::SubtractBranchOverflow(Register rd,
4674 if ((rd == rs1) && (rd == rs2)) {
4679 bltz(TMP, overflow);
4680 }
else if (rs1 == rs2) {
4685 bltz(TMP, overflow);
4686 }
else if (rd == rs1) {
4691 bne(TMP, TMP2, overflow);
4692 }
else if (rd == rs2) {
4697 bne(TMP, TMP2, overflow);
4702 bne(TMP, TMP2, overflow);
4706void Assembler::MultiplyBranchOverflow(Register rd,
4719 mulh(TMP, rs1, rs2);
4721 srai(TMP2, rd, XLEN - 1);
4722 bne(TMP, TMP2, overflow);
4725void Assembler::CountLeadingZeroes(Register rd, Register rs) {
4726 if (Supports(RV_Zbb)) {
4739 Label l0, l1, l2, l3, l4, l5;
4743 beqz(TMP, &l0, Assembler::kNearJump);
4744 subi(TMP2, TMP2, 32);
4749 beqz(TMP, &l1, Assembler::kNearJump);
4750 subi(TMP2, TMP2, 16);
4754 beqz(TMP, &l2, Assembler::kNearJump);
4755 subi(TMP2, TMP2, 8);
4759 beqz(TMP, &l3, Assembler::kNearJump);
4760 subi(TMP2, TMP2, 4);
4764 beqz(TMP, &l4, Assembler::kNearJump);
4765 subi(TMP2, TMP2, 2);
4770 beqz(TMP, &l5, Assembler::kNearJump);
4775void Assembler::RangeCheck(Register value,
4779 RangeCheckCondition condition,
4781 auto cc = condition == kIfInRange ?
LS :
HI;
4783 AddImmediate(to_check, value, -low);
4784 CompareImmediate(to_check, high - low);
static void done(const char *config, const char *src, const char *srcOptions, const char *name)
static void round(SkPoint *p)
static bool ok(int result)
static uint32_t hash(const SkShaderBase::GradientInfo &v)
static bool equals(T *a, T *b)
static bool left(const SkPoint &p0, const SkPoint &p1)
static bool right(const SkPoint &p0, const SkPoint &p1)
#define RA(width, name,...)
#define DEBUG_ASSERT(cond)
#define ASSERT_EQUAL(expected, actual)
#define RELEASE_ASSERT(cond)
#define COMPILE_ASSERT(expr)
MicroAssembler(ObjectPoolBuilder *object_pool_builder, intptr_t far_branch_level, ExtensionSet extensions)
#define DECLARE_FLAG(type, name)
#define DEFINE_FLAG(type, name, default_value, comment)
void BailoutWithBranchOffsetError()
bool IsOriginalObject(const Object &object)
bool IsInOldSpace(const Object &obj)
bool IsSameObject(const Object &a, const Object &b)
const Object & ToObject(const Code &handle)
uint32_t EncodeBTypeImm(intptr_t imm)
uint32_t EncodeCBImm(intptr_t imm)
constexpr bool IsAbiPreservedRegister(Register reg)
bool IsCI16Imm(intptr_t imm)
uint32_t EncodeUTypeImm(intptr_t imm)
uint32_t EncodeCSPStore4Imm(intptr_t imm)
bool IsCSPLoad4Imm(intptr_t imm)
bool IsCJImm(intptr_t imm)
uint32_t EncodeCJImm(intptr_t imm)
bool IsCIImm(intptr_t imm)
uint32_t EncodeCSPLoad8Imm(intptr_t imm)
const int kNumberOfFpuRegisters
bool IsCSPStore8Imm(intptr_t imm)
uint32_t EncodeCMem8Imm(intptr_t imm)
uint32_t EncodeCUImm(intptr_t imm)
uint32_t EncodeCI4SPNImm(intptr_t imm)
bool IsSTypeImm(intptr_t imm)
bool IsJTypeImm(intptr_t imm)
uint32_t EncodeCSPLoad4Imm(intptr_t imm)
uint32_t EncodeCIImm(intptr_t imm)
bool IsITypeImm(intptr_t imm)
bool IsCSPStore4Imm(intptr_t imm)
bool IsCUImm(intptr_t imm)
const Register IC_DATA_REG
bool IsUTypeImm(intptr_t imm)
bool IsCMem4Imm(intptr_t imm)
bool IsBTypeImm(intptr_t imm)
uint32_t EncodeITypeImm(intptr_t imm)
uint32_t EncodeSTypeImm(intptr_t imm)
uint32_t EncodeCMem4Imm(intptr_t imm)
uint32_t EncodeCSPStore8Imm(intptr_t imm)
bool IsCSPLoad8Imm(intptr_t imm)
bool IsCMem8Imm(intptr_t imm)
uint32_t EncodeJTypeImm(intptr_t imm)
bool IsCBImm(intptr_t imm)
bool IsCI4SPNImm(intptr_t imm)
uint32_t EncodeCI16Imm(intptr_t imm)
const int kFpuRegisterSize
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive keep the shell running after the Dart script has completed enable serial On low power devices with low core running concurrent GC tasks on threads can cause them to contend with the UI thread which could potentially lead to jank This option turns off all concurrent GC activities domain network JSON encoded network policy per domain This overrides the DisallowInsecureConnections switch Embedder can specify whether to allow or disallow insecure connections at a domain level old gen heap size
constexpr bool kTargetUsesThreadSanitizer
#define NOT_IN_PRODUCT(code)