5#ifndef RUNTIME_VM_COMPILER_ASSEMBLER_ASSEMBLER_X64_H_
6#define RUNTIME_VM_COMPILER_ASSEMBLER_ASSEMBLER_X64_H_
8#if defined(DART_PRECOMPILED_RUNTIME)
9#error "AOT runtime should not use compiler sources (including header files)"
12#ifndef RUNTIME_VM_COMPILER_ASSEMBLER_ASSEMBLER_H_
13#error Do not include assembler_x64.h directly; use assembler.h instead.
29class FlowGraphCompiler;
34class Immediate :
public ValueObject {
40 int64_t
value()
const {
return value_; }
59 uint8_t
rex()
const {
return rex_; }
61 uint8_t
mod()
const {
return (encoding_at(0) >> 6) & 3; }
64 int rm_rex = (rex_ &
REX_B) << 3;
65 return static_cast<Register>(rm_rex + (encoding_at(0) & 7));
69 return static_cast<ScaleFactor>((encoding_at(1) >> 6) & 3);
73 int index_rex = (rex_ &
REX_X) << 2;
74 return static_cast<Register>(index_rex + ((encoding_at(1) >> 3) & 7));
78 int base_rex = (rex_ &
REX_B) << 3;
79 return static_cast<Register>(base_rex + (encoding_at(1) & 7));
84 return static_cast<int8_t
>(encoding_[length_ - 1]);
89 return bit_copy<int32_t>(encoding_[length_ - 4]);
93 :
ValueObject(), length_(other.length_), rex_(other.rex_) {
94 memmove(&encoding_[0], &other.encoding_[0], other.length_);
98 length_ = other.length_;
100 memmove(&encoding_[0], &other.encoding_[0], other.length_);
105 if (length_ != other.length_)
return false;
106 if (rex_ != other.rex_)
return false;
107 for (uint8_t
i = 0;
i < length_;
i++) {
108 if (encoding_[
i] != other.encoding_[
i])
return false;
118 if ((
rm > 7) && !((
rm ==
R12) && (
mod != 3))) {
121 encoding_[0] = (
mod << 6) | (
rm & 7);
138 ASSERT(length_ == 1 || length_ == 2);
139 encoding_[length_++] =
static_cast<uint8_t
>(disp);
143 ASSERT(length_ == 1 || length_ == 2);
144 memmove(&encoding_[length_], &disp,
sizeof(disp));
145 length_ +=
sizeof(disp);
151 uint8_t encoding_[6];
156 uint8_t encoding_at(intptr_t
index)
const {
158 return encoding_[
index];
163 bool IsRegister(
Register reg)
const {
164 return ((reg > 7 ? 1 : 0) == (rex_ &
REX_B))
165 && ((encoding_at(0) & 0xF8) == 0xC0)
166 && ((encoding_at(0) & 0x07) == reg);
172class Address :
public Operand {
175 if ((disp == 0) && ((
base & 7) !=
RBP)) {
212 if ((disp == 0) && ((
base & 7) !=
RBP)) {
237 return Address(RIPRelativeDisp(disp));
256 struct RIPRelativeDisp {
257 explicit RIPRelativeDisp(int32_t disp) : disp_(disp) {}
261 explicit Address(
const RIPRelativeDisp& disp) {
267class FieldAddress :
public Address {
289#if !defined(DART_COMPRESSED_POINTERS)
295class Assembler :
public AssemblerBase {
306 void call(
const Address& address) { EmitUnaryL(address, 0xFF, 2); }
317 void popq(
const Address& address) { EmitUnaryL(address, 0x8F, 0); }
326 bool enter_safepoint);
328 bool ignore_unwind_in_progress =
false,
329 bool set_tag =
true);
332#define RR(width, name, ...) \
333 void name(Register dst, Register src) { Emit##width(dst, src, __VA_ARGS__); }
334#define RA(width, name, ...) \
335 void name(Register dst, const Address& src) { \
336 Emit##width(dst, src, __VA_ARGS__); \
338#define RAB(name, ...) \
339 void name(ByteRegister dst, const Address& src) { \
340 EmitB(dst, src, __VA_ARGS__); \
342#define AR(width, name, ...) \
343 void name(const Address& dst, Register src) { \
344 Emit##width(src, dst, __VA_ARGS__); \
346#define ARB(name, ...) \
347 void name(const Address& dst, ByteRegister src) { \
348 EmitB(src, dst, __VA_ARGS__); \
350#define REGULAR_INSTRUCTION(name, ...) \
351 RA(W, name##w, __VA_ARGS__) \
352 RA(L, name##l, __VA_ARGS__) \
353 RA(Q, name##q, __VA_ARGS__) \
354 RR(W, name##w, __VA_ARGS__) \
355 RR(L, name##l, __VA_ARGS__) \
356 RR(Q, name##q, __VA_ARGS__)
364#undef REGULAR_INSTRUCTION
378 AR(
Q, cmpxchgq, 0xB1, 0x0F)
380 RA(
Q, cmpxchgq, 0xB1, 0x0F)
382 RR(
Q, cmpxchgq, 0xB1, 0x0F)
391#define DECLARE_CMOV(name, code) \
392 RR(Q, cmov##name##q, 0x40 + code, 0x0F) \
393 RR(L, cmov##name##l, 0x40 + code, 0x0F) \
394 RA(Q, cmov##name##q, 0x40 + code, 0x0F) \
395 RA(L, cmov##name##l, 0x40 + code, 0x0F)
402#define SIMPLE(name, ...) \
403 void name() { EmitSimple(__VA_ARGS__); }
412 SIMPLE(rep_movsq, 0xF3, 0x48, 0xA5)
415#define XX(width, name, ...) \
416 void name(XmmRegister dst, XmmRegister src) { \
417 Emit##width(dst, src, __VA_ARGS__); \
419#define XA(width, name, ...) \
420 void name(XmmRegister dst, const Address& src) { \
421 Emit##width(dst, src, __VA_ARGS__); \
423#define AX(width, name, ...) \
424 void name(const Address& dst, XmmRegister src) { \
425 Emit##width(src, dst, __VA_ARGS__); \
442#define DECLARE_XMM(name, code) \
443 XX(L, name##ps, 0x50 + code, 0x0F) \
444 XA(L, name##ps, 0x50 + code, 0x0F) \
445 AX(L, name##ps, 0x50 + code, 0x0F) \
446 XX(L, name##pd, 0x50 + code, 0x0F, 0x66) \
447 XA(L, name##pd, 0x50 + code, 0x0F, 0x66) \
448 AX(L, name##pd, 0x50 + code, 0x0F, 0x66) \
449 XX(L, name##sd, 0x50 + code, 0x0F, 0xF2) \
450 XA(L, name##sd, 0x50 + code, 0x0F, 0xF2) \
451 AX(L, name##sd, 0x50 + code, 0x0F, 0xF2) \
452 XX(L, name##ss, 0x50 + code, 0x0F, 0xF3) \
453 XA(L, name##ss, 0x50 + code, 0x0F, 0xF3) \
454 AX(L, name##ss, 0x50 + code, 0x0F, 0xF3)
468#define DECLARE_CMPPS(name, code) \
469 void cmpps##name(XmmRegister dst, XmmRegister src) { \
470 EmitL(dst, src, 0xC2, 0x0F); \
471 AssemblerBuffer::EnsureCapacity ensured(&buffer_); \
477#define DECLARE_SIMPLE(name, opcode) \
478 void name() { EmitSimple(opcode); }
495 EmitQ(
src,
dst, 0x7E, 0x0F, 0x66);
498 EmitL(
src,
dst, 0x7E, 0x0F, 0x66);
501 EmitL(
src,
dst, 0x11, 0x0F, 0xF3);
504 EmitL(
src,
dst, 0x11, 0x0F, 0xF2);
514 EmitQ(
dst,
src, 0x6E, 0x0F, 0x66);
518 EmitL(
dst,
src, 0x6E, 0x0F, 0x66);
521 EmitQ(
dst,
src, 0x2A, 0x0F, 0xF2);
524 EmitL(
dst,
src, 0x2A, 0x0F, 0xF2);
527 EmitQ(
dst,
src, 0x2C, 0x0F, 0xF2);
530 EmitL(
dst,
src, 0x2C, 0x0F, 0xF2);
533 EmitL(
dst,
src, 0x50, 0x0F, 0x66);
537 EmitL(
dst,
src, 0xD7, 0x0F, 0x66);
612 EmitQ(
src,
dst, 0xA5, 0x0F);
616 EmitQ(
src,
dst, 0xAD, 0x0F);
619#define DECLARE_ALU(op, c) \
620 void op##w(Register dst, Register src) { EmitW(dst, src, c * 8 + 3); } \
621 void op##l(Register dst, Register src) { EmitL(dst, src, c * 8 + 3); } \
622 void op##q(Register dst, Register src) { EmitQ(dst, src, c * 8 + 3); } \
623 void op##w(Register dst, const Address& src) { EmitW(dst, src, c * 8 + 3); } \
624 void op##l(Register dst, const Address& src) { EmitL(dst, src, c * 8 + 3); } \
625 void op##q(Register dst, const Address& src) { EmitQ(dst, src, c * 8 + 3); } \
626 void op##w(const Address& dst, Register src) { EmitW(src, dst, c * 8 + 1); } \
627 void op##l(const Address& dst, Register src) { EmitL(src, dst, c * 8 + 1); } \
628 void op##q(const Address& dst, Register src) { EmitQ(src, dst, c * 8 + 1); } \
629 void op##l(Register dst, const Immediate& imm) { AluL(c, dst, imm); } \
630 void op##q(Register dst, const Immediate& imm) { \
631 AluQ(c, c * 8 + 3, dst, imm); \
633 void op##b(const Address& dst, const Immediate& imm) { AluB(c, dst, imm); } \
634 void op##w(const Address& dst, const Immediate& imm) { AluW(c, dst, imm); } \
635 void op##l(const Address& dst, const Immediate& imm) { AluL(c, dst, imm); } \
636 void op##q(const Address& dst, const Immediate& imm) { \
637 AluQ(c, c * 8 + 3, dst, imm); \
647#define REGULAR_UNARY(name, opcode, modrm) \
648 void name##q(Register reg) { EmitUnaryQ(reg, opcode, modrm); } \
649 void name##l(Register reg) { EmitUnaryL(reg, opcode, modrm); } \
650 void name##q(const Address& address) { EmitUnaryQ(address, opcode, modrm); } \
651 void name##l(const Address& address) { EmitUnaryL(address, opcode, modrm); }
704 void jmp(
const Address& address) { EmitUnaryL(address, 0xFF, 4); }
791 int32_t disp)
override {
847 const Object& equivalence,
860#if defined(DART_COMPRESSED_POINTERS)
898 cmpxchgq(address, reg);
956 bool can_be_null =
false)
override;
958#if defined(DART_COMPRESSED_POINTERS)
982#if !defined(DART_COMPRESSED_POINTERS)
994#if !defined(DART_COMPRESSED_POINTERS)
1049 int32_t payload_offset,
1055#if defined(DART_COMPRESSED_POINTERS)
1119 if (FLAG_target_thread_sanitizer) {
1123#if defined(DART_COMPRESSED_POINTERS)
1128 if (FLAG_target_thread_sanitizer) {
1139 if (FLAG_target_thread_sanitizer) {
1149 cmpl(
value, address);
1151 cmpq(
value, address);
1230 intptr_t instance_size,
1237 intptr_t instance_size,
1249 Stop(
"Allocation canary");
1297 intptr_t index_scale);
1301 intptr_t index_scale,
1306 intptr_t index_scale,
1318 const intptr_t field_table_offset =
1322 static_assert(
kSmiTagShift == 1,
"adjust scale factor");
1323 leaq(address,
Address(address, scratch, TIMES_HALF_WORD_SIZE, 0));
1328 Register offset_in_words_as_smi)
override {
1329 static_assert(
kSmiTagShift == 1,
"adjust scale factor");
1333#if defined(DART_COMPRESSED_POINTERS)
1337 Register offset_in_words_as_smi)
override {
1338 static_assert(
kSmiTagShift == 1,
"adjust scale factor");
1346 int32_t
offset)
override {
1361 bool constant_pool_allowed_;
1363 void CallCodeThroughPool(intptr_t target_code_pool_index,
1367 void LoadObjectHelper(
1378 void AluQ(uint8_t modrm_opcode,
1382 void AluQ(uint8_t modrm_opcode,
1387 void EmitSimple(
int opcode,
int opcode2 = -1,
int opcode3 = -1);
1388 void EmitUnaryQ(
Register reg,
int opcode,
int modrm_code);
1389 void EmitUnaryL(
Register reg,
int opcode,
int modrm_code);
1390 void EmitUnaryQ(
const Address& address,
int opcode,
int modrm_code);
1391 void EmitUnaryL(
const Address& address,
int opcode,
int modrm_code);
1409 void EmitQ(
int dst,
int src,
int opcode,
int prefix2 = -1,
int prefix1 = -1);
1410 void EmitL(
int dst,
int src,
int opcode,
int prefix2 = -1,
int prefix1 = -1);
1416 void EmitB(
int reg,
const Address& address,
int opcode);
1419 inline void EmitUint8(uint8_t
value);
1420 inline void EmitInt32(int32_t
value);
1421 inline void EmitUInt32(uint32_t
value);
1422 inline void EmitInt64(int64_t
value);
1424 inline void EmitRegisterREX(
Register reg,
1426 bool force_emit =
false);
1427 inline void EmitOperandREX(
int rm,
const Operand& operand, uint8_t rex);
1428 inline void EmitRegisterOperand(
int rm,
int reg);
1430 inline void EmitOperandSizeOverride();
1431 inline void EmitRegRegRex(
int reg,
int base, uint8_t rex =
REX_NONE);
1432 void EmitOperand(
int rm,
const Operand& operand);
1433 void EmitImmediate(
const Immediate& imm);
1434 void EmitComplex(
int rm,
const Operand& operand,
const Immediate& immediate);
1435 void EmitSignExtendedInt8(
int rm,
1438 void EmitLabel(
Label* label, intptr_t instruction_size);
1439 void EmitLabelLink(
Label* label);
1440 void EmitNearLabelLink(
Label* label);
1443 void EmitGenericShift(
bool wide,
int rm,
Register operand,
Register shifter);
1445 enum BarrierFilterMode {
1455 void StoreIntoArrayBarrier(
Register object,
1469 DISALLOW_ALLOCATION();
1473inline void Assembler::EmitUint8(uint8_t
value) {
1477inline void Assembler::EmitInt32(int32_t
value) {
1481inline void Assembler::EmitUInt32(uint32_t
value) {
1485inline void Assembler::EmitInt64(int64_t
value) {
1489inline void Assembler::EmitRegisterREX(
Register reg, uint8_t rex,
bool force) {
1496inline void Assembler::EmitOperandREX(
int rm,
1497 const Operand& operand,
1503inline void Assembler::EmitRegRegRex(
int reg,
int base, uint8_t rex) {
1507 if (reg > 7) rex |=
REX_R;
1512inline void Assembler::EmitFixup(AssemblerFixup* fixup) {
1516inline void Assembler::EmitOperandSizeOverride() {
bool equals(SkDrawable *a, SkDrawable *b)
#define DECLARE_ALU(op, c)
#define RR(width, name,...)
#define XX(width, name,...)
#define DECLARE_SIMPLE(name, opcode)
#define REGULAR_INSTRUCTION(name,...)
#define AR(width, name,...)
#define DECLARE_XMM(name, code)
#define DECLARE_CMPPS(name, code)
#define DECLARE_CMOV(name, code)
#define REGULAR_UNARY(name, opcode, modrm)
static bool IsInt(intptr_t N, T value)
static bool IsUint(intptr_t N, T value)
static Address AddressBaseImm32(Register base, int32_t disp)
Address(Register base, Register index, ScaleFactor scale, int32_t disp)
Address(Register index, ScaleFactor scale, int32_t disp)
static Address AddressRIPRelative(int32_t disp)
Address(Register base, Register r)
Address & operator=(const Address &other)
static Address AddressBaseImm32(Register base, Register r)
Address(Register base, int32_t disp)
Address(Register base, Register index, ScaleFactor scale, Register r)
Address(Register index, ScaleFactor scale, Register r)
Address(const Address &other)
void LoadCompressedSmi(Register dst, const Address &address)
void Stop(const char *message)
void LoadCompressed(Register dst, const Address &address)
ObjectPoolBuilder & object_pool_builder()
void LoadCompressedFieldAddressForRegOffset(Register address, Register instance, Register offset_in_words_as_smi)
void LoadIndexedCompressed(Register dst, Register base, int32_t offset, Register index)
void ExtendNonNegativeSmi(Register dst)
void LoadAcquireCompressed(Register dst, const Address &address)
void EmitFixup(AssemblerFixup *fixup)
void PushRegistersInOrder(std::initializer_list< Register > regs)
void PopRegisterPair(Register r0, Register r1)
void MoveUnboxedDouble(FpuRegister dst, FpuRegister src)
void LoadClassId(Register result, Register object)
void StoreObject(const Address &dst, const Object &obj, OperandSize size=kWordBytes)
void SmiUntagOrCheckClass(Register object, intptr_t class_id, Label *smi)
void sarq(Register reg, const Immediate &imm)
void RestoreCodePointer()
void LoadPoolPointer(Register pp=PP)
void Call(Address target)
void unpcklpd(XmmRegister dst, XmmRegister src)
static Address ElementAddressForRegIndex(bool is_external, intptr_t cid, intptr_t index_scale, bool index_unboxed, Register array, Register index)
bool CanLoadFromObjectPool(const Object &object) const
void CompareClassId(Register object, intptr_t class_id, Register scratch=kNoRegister)
void PushRegisters(const RegisterSet ®isters)
void StoreUnboxedDouble(FpuRegister src, Register base, int32_t offset)
void Jmp(const Code &code, Register pp=PP)
void CompareObject(Register reg, const Object &object)
void ZeroInitCompressedSmiField(const Address &dest)
void movsd(XmmRegister dst, XmmRegister src)
void MoveUnboxedSimd128(FpuRegister dst, FpuRegister src)
void LoadUniqueObject(Register dst, const Object &obj, ObjectPoolBuilderEntry::SnapshotBehavior snapshot_behavior=ObjectPoolBuilderEntry::kSnapshotable)
void abspd(XmmRegister dst, XmmRegister src)
void LoadObject(Register dst, const Object &obj)
void CombineHashes(Register dst, Register other) override
void MonomorphicCheckedEntryJIT()
void BranchIfSmi(Register reg, Label *label, JumpDistance distance=kFarJump) override
void LoadTaggedClassIdMayBeSmi(Register result, Register object)
void notps(XmmRegister dst, XmmRegister src)
void movups(XmmRegister dst, const Address &src)
void AddImmediate(const Address &address, const Immediate &imm)
void TsanLoadAcquire(Register addr)
void Load(Register reg, const Address &address, OperandSize type, Condition cond)
void BranchIfZero(Register src, Label *label, JumpDistance distance=kFarJump)
void unpckhpd(XmmRegister dst, XmmRegister src)
void movq(const Address &dst, XmmRegister src)
void LoadQImmediate(FpuRegister dst, simd128_value_t immediate)
void PushRegisterPair(Register r0, Register r1)
void StoreZero(const Address &address, Register temp=kNoRegister)
void shlq(Register reg, const Immediate &imm)
void Call(const Code &stub_entry, ObjectPoolBuilderEntry::SnapshotBehavior snapshot_behavior=ObjectPoolBuilderEntry::kSnapshotable)
void EmitEntryFrameVerification()
void pxor(XmmRegister dst, XmmRegister src)
Assembler(ObjectPoolBuilder *object_pool_builder, intptr_t far_branch_level=0)
void jmp(const ExternalLabel *label)
void movlhps(XmmRegister dst, XmmRegister src)
void ZeroInitSmiField(const Address &dest)
void movq(Register dst, Register src)
void LoadImmediate(Register reg, int64_t immediate) override
void LoadDImmediate(FpuRegister dst, double immediate)
void movl(const Address &dst, const Immediate &imm)
void movd(XmmRegister dst, Register src)
void movl(Register dst, const Immediate &src)
void DoubleNegate(XmmRegister dst, XmmRegister src)
void j(Condition condition, Label *label, JumpDistance distance=kFarJump)
void SmiUntag(Register reg)
void absps(XmmRegister dst, XmmRegister src)
void CompareImmediate(Register reg, const Immediate &imm, OperandSize width=kEightBytes)
void Jump(Label *label, JumpDistance distance=kFarJump)
void AddImmediate(Register reg, const Immediate &imm, OperandSize width=kEightBytes)
void shldq(Register dst, Register src, const Immediate &imm)
void JmpPatchable(const Code &code, Register pp)
void LoadFromStack(Register dst, intptr_t depth)
void movmskpd(Register dst, XmmRegister src)
void cvtsi2sdq(XmmRegister dst, Register src)
void leal(Register dst, const Address &src)
void shufps(XmmRegister dst, XmmRegister src, const Immediate &mask)
void shll(Register reg, const Immediate &imm)
void movb(const Address &dst, const Immediate &imm)
void shufpd(XmmRegister dst, XmmRegister src, const Immediate &mask)
void ExtendValue(Register dst, Register src, OperandSize sz) override
void CompareImmediate(Register rn, int32_t value, Condition cond)
void LoadIndexedPayload(Register dst, Register base, int32_t payload_offset, Register index, ScaleFactor scale, OperandSize sz=kEightBytes) override
void PushRegister(Register r)
void LoadMemoryValue(Register dst, Register base, int32_t offset)
void MulImmediate(Register reg, int64_t imm, OperandSize width=kEightBytes) override
void CompareImmediate(const Address &address, const Immediate &imm, OperandSize width=kEightBytes)
void b(Label *label, Condition cond=AL)
void imulq(Register dst, const Immediate &imm)
void cvtpd2ps(XmmRegister dst, XmmRegister src)
void set1ps(XmmRegister dst, Register tmp, const Immediate &imm)
void roundsd(XmmRegister dst, XmmRegister src, RoundingMode mode)
void set_constant_pool_allowed(bool b)
void sarq(Register operand, Register shifter)
void cvtps2pd(XmmRegister dst, XmmRegister src)
void subpl(XmmRegister dst, XmmRegister src)
void movw(Register dst, const Address &src)
void zerowps(XmmRegister dst, XmmRegister src)
void OrImmediate(Register dst, int64_t value)
void PushImmediate(const Immediate &imm)
void movmskps(Register dst, XmmRegister src)
void jmp(Label *label, JumpDistance distance=kFarJump)
void TsanStoreRelease(Address addr)
void Align(int alignment, intptr_t offset)
static bool AddressCanHoldConstantIndex(const Object &constant, bool is_external, intptr_t cid, intptr_t index_scale)
void StoreObjectIntoObjectNoBarrier(Register object, const Address &dest, const Object &value, MemoryOrder memory_order=kRelaxedNonAtomic, OperandSize size=kWordBytes) override
void MaybeTraceAllocation(Register cid, Label *trace, Register temp_reg=kNoRegister, JumpDistance distance=JumpDistance::kFarJump)
void EnterDartFrame(intptr_t frame_size, Register new_pp=kNoRegister)
void EnterFullSafepoint()
void LoadClassById(Register result, Register class_id)
void cvtss2sd(XmmRegister dst, XmmRegister src)
void movzxw(Register dst, Register src)
void PushValueAtOffset(Register base, int32_t offset)
void LoadIsolate(Register dst)
void MaybeTraceAllocation(intptr_t cid, Label *trace, Register temp_reg=kNoRegister, JumpDistance distance=JumpDistance::kFarJump)
void imull(Register reg, const Immediate &imm)
void movsd(XmmRegister dst, const Address &src)
void PushImmediate(int64_t value)
void AndImmediate(Register dst, int64_t value) override
void OrImmediate(Register dst, const Immediate &imm)
void CheckAllocationCanary(Register top)
void XorImmediate(Register dst, const Immediate &imm)
void shll(Register operand, Register shifter)
void mov(Register rd, Operand o, Condition cond=AL)
void SetReturnAddress(Register value)
void GenerateUnRelocatedPcRelativeCall(intptr_t offset_into_target=0)
void CallCFunction(Address address, bool restore_rsp=false)
void LoadInt32FromBoxOrSmi(Register result, Register value) override
void Exchange(Register reg, const Address &mem)
void BranchIf(Condition condition, Label *label, JumpDistance distance=kFarJump)
void EnterOsrFrame(intptr_t extra_size)
void comisd(XmmRegister a, XmmRegister b)
void TsanLoadAcquire(Address addr)
void btl(Register dst, Register src)
void PushObject(const Object &object)
void AndRegisters(Register dst, Register src1, Register src2=kNoRegister) override
void testb(const Address &address, const Immediate &imm)
void sarl(Register reg, const Immediate &imm)
void jmp(const Code &code)
void Exchange(const Address &mem1, const Address &mem2)
void cmp(Register rn, Operand o, Condition cond=AL)
void LoadUnboxedSimd128(FpuRegister dst, Register base, int32_t offset)
XA(L, movups, 0x10, 0x0F)
void StoreMemoryValue(Register src, Register base, int32_t offset)
void TransitionGeneratedToNative(Register destination_address, Register new_exit_frame, Register new_exit_through_ffi, bool enter_safepoint)
void CompareImmediate(Register reg, int64_t immediate, OperandSize width=kEightBytes) override
void pushq(const Immediate &imm)
void StoreToStack(Register src, intptr_t depth)
void TestImmediate(Register dst, const Immediate &imm, OperandSize width=kEightBytes)
void J(Condition condition, const Code &code, Register pp)
void AddScaled(Register dest, Register base, Register index, ScaleFactor scale, int32_t disp) override
void CallWithEquivalence(const Code &code, const Object &equivalence, CodeEntryKind entry_kind=CodeEntryKind::kNormal)
void movw(const Address &dst, const Immediate &imm)
void LoadStaticFieldAddress(Register address, Register field, Register scratch, bool is_shared)
void popq(const Address &address)
void SmiUntagAndSignExtend(Register dst, Register src)
void SubRegisters(Register dest, Register src)
void PopRegisters(const RegisterSet ®isters)
void StoreInternalPointer(Register object, const Address &dest, Register value)
void CompareRegisters(Register a, Register b)
void MulImmediate(Register reg, int32_t imm, OperandSize width=kFourBytes) override
void AndImmediate(Register dst, const Immediate &imm)
void cvttsd2sil(Register dst, XmmRegister src)
void AddRegisters(Register dest, Register src)
void testq(Register reg, const Immediate &imm)
void Bind(Label *label) override
void ReserveAlignedFrameSpace(intptr_t frame_space)
void negateps(XmmRegister dst, XmmRegister src)
void btq(Register base, int bit)
void MonomorphicCheckedEntryAOT()
void AddImmediate(Register dest, Register src, int64_t value)
void SubImmediate(const Address &address, const Immediate &imm)
void LockCmpxchgq(const Address &address, Register reg)
void movq(const Address &dst, const Immediate &imm)
void RangeCheck(Register value, Register temp, intptr_t low, intptr_t high, RangeCheckCondition condition, Label *target) override
void enter(const Immediate &imm)
bool constant_pool_allowed() const
void setcc(Condition condition, ByteRegister dst)
void PopRegister(Register r)
void CallCFunction(Register reg, bool restore_rsp=false)
void AddImmediate(Register reg, int64_t value, OperandSize width=kEightBytes)
void ArrayStoreBarrier(Register object, Register slot, Register value, CanBeSmi can_be_smi, Register scratch) override
void testb(const Address &address, Register reg)
void btq(Register dst, Register src)
void LslRegister(Register dst, Register shift) override
void add(Register rd, Register rn, Operand o, Condition cond=AL)
void shrl(Register operand, Register shifter)
void CompareWithMemoryValue(Register value, Address address, OperandSize size=kEightBytes) override
void shrdq(Register dst, Register src, Register shifter)
void mul(Register rd, Register rn, Register rm, Condition cond=AL)
void TryAllocateArray(intptr_t cid, intptr_t instance_size, Label *failure, JumpDistance distance, Register instance, Register end_address, Register temp)
void WriteAllocationCanary(Register top)
void addpl(XmmRegister dst, XmmRegister src)
void Load(Register dst, const Address &address, OperandSize sz=kEightBytes) override
void CallRuntime(const RuntimeEntry &entry, intptr_t argument_count)
void ExtractInstanceSizeFromTags(Register result, Register tags)
void LockCmpxchgl(const Address &address, Register reg)
void CopyMemoryWords(Register src, Register dst, Register size, Register temp=kNoRegister)
void CompareToStack(Register src, intptr_t depth)
void LoadImmediate(Register rd, Immediate value, Condition cond=AL)
void shrq(Register operand, Register shifter)
void MulImmediate(Register reg, const Immediate &imm, OperandSize width=kEightBytes)
static Address VMTagAddress()
void AndImmediate(Register rd, Register rs, int32_t imm, Condition cond=AL)
void cvtsd2ss(XmmRegister dst, XmmRegister src)
void CompareObjectRegisters(Register a, Register b)
void Jump(const Address &address)
static bool IsSafeSmi(const Object &object)
void movsxb(Register dst, ByteRegister src)
void TransitionNativeToGenerated(bool leave_safepoint, bool ignore_unwind_in_progress=false, bool set_tag=true)
void SmiUntagAndSignExtend(Register reg)
void LoadAcquire(Register dst, const Address &address, OperandSize size=kEightBytes) override
void shldq(Register dst, Register src, Register shifter)
void cvtsi2sdl(XmmRegister dst, Register src)
void EnterCFrame(intptr_t frame_space)
void StoreBarrier(Register object, Register value, CanBeSmi can_be_smi, Register scratch) override
void call(const Address &address)
void TryAllocateObject(intptr_t cid, intptr_t instance_size, Label *failure, JumpDistance distance, Register instance_reg, Register temp) override
void shlq(Register operand, Register shifter)
void LoadSImmediate(FpuRegister dst, float immediate)
void VerifyStoreNeedsNoWriteBarrier(Register object, Register value) override
void IncrementCompressedSmiField(const Address &dest, int64_t increment)
void cvttsd2siq(Register dst, XmmRegister src)
void CompareWords(Register reg1, Register reg2, intptr_t offset, Register count, Register temp, Label *equals) override
void StoreWordToPoolIndex(Register src, intptr_t index)
void pushq(const Address &address)
void shldl(Register dst, Register src, const Immediate &imm)
void testl(Register reg, const Immediate &imm)
void movl(Register dst, const Immediate &imm)
void pmovmskb(Register dst, XmmRegister src)
void movhlps(XmmRegister dst, XmmRegister src)
void movb(Register dst, const Address &src)
void BranchIfNotSmi(Register reg, Label *label, JumpDistance distance=kFarJump)
void GenerateUnRelocatedPcRelativeTailCall(intptr_t offset_into_target=0)
void negatepd(XmmRegister dst, XmmRegister src)
void LoadInt64FromBoxOrSmi(Register result, Register value) override
void Store(Register src, const Address &address, OperandSize sz=kEightBytes) override
void SubImmediate(Register reg, const Immediate &imm, OperandSize width=kEightBytes)
void ExitFullSafepoint(bool ignore_unwind_in_progress)
void LslImmediate(Register dst, int32_t shift)
void EnterFrame(intptr_t frame_space)
void shrq(Register reg, const Immediate &imm)
static bool IsSafe(const Object &object)
void jmp(const Address &address)
void BranchIfBit(Register rn, intptr_t bit_number, Condition condition, Label *label, JumpDistance distance=kFarJump)
void Drop(intptr_t stack_elements, Register tmp=TMP)
void OrImmediate(Register rd, Register rs, int32_t imm, Condition cond=AL)
void StoreIntoSmiField(const Address &dest, Register value)
void Jump(Register target)
void call(const ExternalLabel *label)
void CallPatchable(const Code &code, CodeEntryKind entry_kind=CodeEntryKind::kNormal, ObjectPoolBuilderEntry::SnapshotBehavior snapshot_behavior=ObjectPoolBuilderEntry::kSnapshotable)
void LoadClassIdMayBeSmi(Register result, Register object)
void sarl(Register operand, Register shifter)
void LoadUnboxedDouble(FpuRegister dst, Register base, int32_t offset)
void shrl(Register reg, const Immediate &imm)
void movss(XmmRegister dst, XmmRegister src)
void LoadIsolateGroup(Register dst)
void LoadFieldAddressForRegOffset(Register address, Register instance, Register offset_in_words_as_smi) override
void AndImmediate(Register dst, Register src, int64_t value)
void BranchOnMonomorphicCheckedEntryJIT(Label *label)
void movl(Register dst, XmmRegister src)
void Store(Register reg, const Address &address, OperandSize type, Condition cond)
void movzxb(Register dst, ByteRegister src)
void movss(XmmRegister dst, const Address &src)
void movq(Register dst, XmmRegister src)
void cmpxchgl(const Address &address, Register reg)
void MoveRegister(Register rd, Register rm, Condition cond)
void Breakpoint() override
void TsanStoreRelease(Register addr)
AX(L, movups, 0x11, 0x0F)
void LoadWordFromPoolIndex(Register dst, intptr_t index)
void DoubleAbs(XmmRegister dst, XmmRegister src)
void LoadFieldAddressForOffset(Register address, Register instance, int32_t offset) override
void neg(Register rd, Register rm)
void StoreUnboxedSimd128(FpuRegister dst, Register base, int32_t offset)
void ExtractClassIdFromTags(Register result, Register tags)
void movq(XmmRegister dst, Register src)
void fldl(const Address &src)
void unpckhps(XmmRegister dst, XmmRegister src)
void movq(Register dst, const Immediate &imm)
void FinalizeHashForSize(intptr_t bit_size, Register dst, Register scratch=TMP) override
void SmiUntag(Register dst, Register src)
void EnsureHasClassIdInDEBUG(intptr_t cid, Register src, Register scratch, bool can_be_null=false) override
void LoadDispatchTable(Register dst)
void AddImmediate(Register rd, int32_t value, Condition cond=AL)
void LsrImmediate(Register dst, int32_t shift) override
void MoveMemoryToMemory(const Address &dst, const Address &src)
void LoadNativeEntry(Register dst, const ExternalLabel *label, ObjectPoolBuilderEntry::Patchability patchable)
static Address ElementAddressForIntIndex(bool is_external, intptr_t cid, intptr_t index_scale, Register array, intptr_t index)
void LoadCompressedMemoryValue(Register dst, Register base, int32_t offset)
void movsxw(Register dst, Register src)
void StoreRelease(Register src, const Address &address, OperandSize size=kWordBytes) override
void LoadImmediate(Register reg, const Immediate &imm)
void fstpl(const Address &dst)
void SmiTag(Register reg) override
void LoadUnboxedSingle(FpuRegister dst, Register base, int32_t offset)
void unpcklps(XmmRegister dst, XmmRegister src)
void ffree(intptr_t value)
void ArithmeticShiftRightImmediate(Register reg, intptr_t shift) override
void movaps(XmmRegister dst, XmmRegister src)
FieldAddress & operator=(const FieldAddress &other)
FieldAddress(Register base, Register r)
FieldAddress(Register base, Register index, ScaleFactor scale, Register r)
FieldAddress(const FieldAddress &other)
FieldAddress(Register base, Register index, ScaleFactor scale, int32_t disp)
FieldAddress(Register base, int32_t disp)
void div(Register rd, Register rs1, Register rs2)
intptr_t far_branch_level() const
bool Equals(const Operand &other) const
void SetSIB(ScaleFactor scale, Register index, Register base)
Operand(const Operand &other)
void SetDisp8(int8_t disp)
Operand & operator=(const Operand &other)
void SetModRM(int mod, Register rm)
void SetDisp32(int32_t disp)
ScaleFactor scale() const
static word host_offset_or_field_id_offset()
static word shared_field_table_values_offset()
static word field_table_values_offset()
#define X86_CONDITIONAL_SUFFIXES(F)
#define XMM_CONDITIONAL_CODES(F)
#define X86_ZERO_OPERAND_1_BYTE_INSTRUCTIONS(F)
Dart_NativeFunction function
constexpr OperandSize kWordBytes
@ TIMES_COMPRESSED_HALF_WORD_SIZE
@ TIMES_COMPRESSED_WORD_SIZE
static constexpr intptr_t kAllocationCanary
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive mode
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive keep the shell running after the Dart script has completed enable serial On low power devices with low core running concurrent GC tasks on threads can cause them to contend with the UI thread which could potentially lead to jank This option turns off all concurrent GC activities domain network JSON encoded network policy per domain This overrides the DisallowInsecureConnections switch Embedder can specify whether to allow or disallow insecure connections at a domain level old gen heap size