22 "Verify instructions offset in code object."
23 "NOTE: This breaks the profiler.");
24#if defined(TARGET_ARCH_ARM)
25DEFINE_FLAG(
bool, use_far_branches,
false,
"Enable far branches for ARM.");
73 auto const can_be_smi =
89 can_be_smi, memory_order, scratch);
92 memory_order, scratch);
231 ASSERT(
object != scratch);
262 memory_order, scratch,
size);
289 ASSERT(
object != scratch);
335#if !defined(TARGET_ARCH_IA32)
381 Comment(
"MsanUnpoison base %s length_in_bytes %" Pd,
387 rt.
Call(kMsanUnpoisonRuntimeEntry, 2);
391 Comment(
"MsanUnpoison base %s length_in_bytes %s",
398 if (length_in_bytes == a0) {
411 rt.
Call(kMsanUnpoisonRuntimeEntry, 2);
415static void InitializeMemoryWithBreakpoints(
uword data, intptr_t
length) {
416#if defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_ARM64)
436 InitializeMemoryWithBreakpoints(
result, capacity);
452 ASSERT(gap_ >= kMinimumGap);
455 buffer->has_ensured_capacity_ =
true;
458AssemblerBuffer::EnsureCapacity::~EnsureCapacity() {
460 buffer_->has_ensured_capacity_ =
false;
463 intptr_t
delta = gap_ - ComputeGap();
470 const intptr_t kInitialBufferCapacity = 4 *
KB;
473 limit_ = ComputeLimit(contents_, kInitialBufferCapacity);
476 has_ensured_capacity_ =
false;
477 fixups_processed_ =
false;
481 ASSERT(Capacity() == kInitialBufferCapacity);
489 while (fixup !=
nullptr) {
491 fixup = fixup->previous();
501 ProcessFixups(instructions);
503 fixups_processed_ =
true;
507void AssemblerBuffer::ExtendCapacity() {
508 intptr_t old_size =
Size();
509 intptr_t old_capacity = Capacity();
510 intptr_t new_capacity =
512 if (new_capacity < old_capacity) {
513 FATAL(
"Unexpected overflow in AssemblerBuffer::ExtendCapacity");
518 memmove(
reinterpret_cast<void*
>(new_contents),
519 reinterpret_cast<void*
>(contents_), old_size);
522 intptr_t
delta = new_contents - contents_;
523 contents_ = new_contents;
527 limit_ = ComputeLimit(new_contents, new_capacity);
530 ASSERT(Capacity() == new_capacity);
538 : pointer_offsets_(pointer_offsets), object_(object) {}
544 region.StoreUnaligned<
const Object*>(position, &object_);
545 pointer_offsets_->
Add(position);
558 while (current !=
nullptr) {
560 current = current->previous_;
565#if defined(TARGET_ARCH_IA32)
566void AssemblerBuffer::EmitObject(
const Object&
object) {
578 const char*
format =
"Unimplemented: %s";
586 const char*
format =
"Untested: %s";
594 const char*
format =
"Unreachable: %s";
616 return FLAG_code_comments || FLAG_disassemble || FLAG_disassemble_optimized ||
617 FLAG_disassemble_stubs;
626 switch (
key.type()) {
628 return key.imm128_.int_storage[0] ^
key.imm128_.int_storage[1] ^
629 key.imm128_.int_storage[2] ^
key.imm128_.int_storage[3];
631#if defined(TARGET_ARCH_IS_32_BIT)
632 case ObjectPoolBuilderEntry::kImmediate64:
647 for (intptr_t
i = 0;
i < object_pool_.length(); ++
i) {
654 object_pool_.Clear();
655 object_pool_index_table_.Clear();
675#if defined(TARGET_ARCH_IS_32_BIT)
692 (IsNotTemporaryScopedHandle(*entry.
obj_) &&
699 if (zone_ !=
nullptr) {
707#if defined(TARGET_ARCH_IS_32_BIT)
708 if (entry.
type() == ObjectPoolBuilderEntry::kImmediate64) {
710 uint64_t imm = entry.
imm64_;
713 object_pool_index_table_.Insert(
ObjIndexPair(entry, idx));
722 object_pool_index_table_.Insert(
ObjIndexPair(entry, idx));
736 object_pool_index_table_.Insert(
ObjIndexPair(entry, idx));
741 const intptr_t idx = base_index_ + object_pool_.length();
742 object_pool_.Add(entry);
745 object_pool_index_table_.Insert(
ObjIndexPair(entry, idx));
755 if (parent_ !=
nullptr) {
756 const intptr_t idx = parent_->object_pool_index_table_.LookupValue(entry);
758 used_from_parent_.
Add(idx);
763 const intptr_t idx = object_pool_index_table_.LookupValue(entry);
779 const Object& equivalence) {
791#if defined(TARGET_ARCH_IS_32_BIT)
814 ASSERT(parent_ !=
nullptr);
818 for (intptr_t
i = 0;
i < object_pool_.length();
i++) {
819 intptr_t idx = parent_->
AddObject(object_pool_[
i]);
820 ASSERT(idx == (base_index_ +
i));
#define DEBUG_ASSERT(cond)
static constexpr intptr_t RelocationIndex(Relocation reloc)
static const Register ArgumentRegisters[]
static constexpr int32_t kBreakPointInstruction
void CopyFrom(uword offset, const MemoryRegion &from) const
static const char * RegisterName(Register reg)
bool is_guarded_field() const
Representation representation() const
intptr_t offset_in_bytes() const
bool has_untagged_instance() const
bool is_compressed() const
static Thread * Current()
static int32_t Low32Bits(int64_t value)
static int SNPrint(char *str, size_t size, const char *format,...) PRINTF_ATTRIBUTE(3
static int static int VSNPrint(char *str, size_t size, const char *format, va_list args)
static int32_t High32Bits(int64_t value)
static T Minimum(T x, T y)
static constexpr T RoundUp(T x, uintptr_t alignment, uintptr_t offset=0)
static constexpr bool IsAligned(T x, uintptr_t alignment, uintptr_t offset=0)
void * AllocUnsafe(intptr_t size)
virtual void StoreRelease(Register src, const Address &address, OperandSize size=kWordBytes)=0
void LoadCompressedField(Register dst, const FieldAddress &address)
void LoadCompressedSmiField(Register dst, const FieldAddress &address)
void UnrolledMemCopy(Register dst_base, intptr_t dst_offset, Register src_base, intptr_t src_offset, intptr_t size, Register temp)
void LoadCompressedSmi(Register dst, const Address &address)
virtual void StoreBarrier(Register object, Register value, CanBeSmi can_be_smi, Register scratch)=0
intptr_t CodeSize() const
void Untested(const char *message)
void Stop(const char *message)
void LoadCompressed(Register dst, const Address &address)
void LoadCompressedFieldFromOffset(Register dst, Register base, int32_t offset)
void LoadCompressedSmiFromOffset(Register dst, Register base, int32_t offset)
void LoadCompressedSmiFieldFromOffset(Register dst, Register base, int32_t offset)
void StoreReleaseToOffset(Register src, Register base, int32_t offset=0, OperandSize size=kWordBytes)
virtual void MoveRegister(Register dst, Register src)
void StoreIntoObjectNoBarrier(Register object, const Address &address, Register value, MemoryOrder memory_order=kRelaxedNonAtomic, OperandSize size=kWordBytes)
void StoreToSlot(Register src, Register base, const Slot &slot, CanBeSmi can_be_smi, MemoryOrder memory_order=kRelaxedNonAtomic, Register scratch=TMP)
void LoadAcquireFromOffset(Register dst, Register base, int32_t offset=0, OperandSize size=kWordBytes)
virtual void StoreFieldToOffset(Register src, Register base, int32_t offset, OperandSize sz=kWordBytes)
virtual void StoreIntoObjectOffsetNoBarrier(Register object, int32_t offset, Register value, MemoryOrder memory_order=kRelaxedNonAtomic, OperandSize size=kWordBytes)
void LoadCompressedFromOffset(Register dst, Register base, int32_t offset)
void LoadSmiFieldFromOffset(Register dst, Register base, int32_t offset)
void LoadField(Register dst, const FieldAddress &address, OperandSize sz=kWordBytes)
virtual void LoadAcquire(Register dst, const Address &address, OperandSize size=kWordBytes)=0
void LoadFromSlot(Register dst, Register base, const Slot &slot)
void static bool EmittingComments()
void StoreToSlotNoBarrier(Register src, Register base, const Slot &slot, MemoryOrder memory_order=kRelaxedNonAtomic)
virtual void LoadFromOffset(Register dst, Register base, int32_t offset, OperandSize sz=kWordBytes)
virtual void CompareImmediate(Register reg, target::word imm, OperandSize width=kWordBytes)=0
void StoreCompressedIntoObjectOffsetNoBarrier(Register object, int32_t offset, Register value, MemoryOrder memory_order=kRelaxedNonAtomic)
void LoadAcquireCompressedFromOffset(Register dst, Register base, int32_t offset)
void StoreObjectIntoObjectOffsetNoBarrier(Register object, int32_t offset, const Object &value, MemoryOrder memory_order=kRelaxedNonAtomic, OperandSize size=kWordBytes)
virtual void VerifyStoreNeedsNoWriteBarrier(Register object, Register value)=0
void LoadTypeClassId(Register dst, Register src)
intptr_t InsertAlignedRelocation(BSS::Relocation reloc)
virtual void StoreIntoObjectOffset(Register object, int32_t offset, Register value, CanBeSmi can_be_smi=kValueCanBeSmi, MemoryOrder memory_order=kRelaxedNonAtomic, Register scratch=TMP, OperandSize size=kWordBytes)
virtual void LoadFieldFromOffset(Register dst, Register base, int32_t offset, OperandSize sz=kWordBytes)
void LoadAcquireCompressed(Register dst, const Address &address)
virtual void Load(Register dst, const Address &address, OperandSize sz=kWordBytes)=0
virtual void StoreObjectIntoObjectNoBarrier(Register object, const Address &address, const Object &value, MemoryOrder memory_order=kRelaxedNonAtomic, OperandSize size=kWordBytes)=0
void StoreCompressedIntoObjectOffset(Register object, int32_t offset, Register value, CanBeSmi can_be_smi=kValueCanBeSmi, MemoryOrder memory_order=kRelaxedNonAtomic, Register scratch=TMP)
virtual void ArrayStoreBarrier(Register object, Register slot, Register value, CanBeSmi can_be_smi, Register scratch)=0
void LoadSmi(Register dst, const Address &address)
virtual void Breakpoint()=0
virtual void StoreToOffset(Register src, Register base, int32_t offset, OperandSize sz=kWordBytes)
void LoadAbstractTypeNullability(Register dst, Register type)
virtual void LsrImmediate(Register dst, int32_t shift)=0
void LoadSmiFromOffset(Register dst, Register base, int32_t offset)
virtual void AndImmediate(Register dst, target::word imm)=0
void StoreIntoArray(Register object, Register slot, Register value, CanBeSmi can_value_be_smi=kValueCanBeSmi, Register scratch=TMP, OperandSize size=kWordBytes)
void LoadSmiField(Register dst, const FieldAddress &address)
void Comment(const char *format,...) PRINTF_ATTRIBUTE(2
void StoreIntoObject(Register object, const Address &address, Register value, CanBeSmi can_be_smi=kValueCanBeSmi, MemoryOrder memory_order=kRelaxedNonAtomic, Register scratch=TMP, OperandSize size=kWordBytes)
virtual void EnsureHasClassIdInDEBUG(intptr_t cid, Register src, Register scratch, bool can_be_null=false)=0
void MsanUnpoison(Register base, intptr_t length_in_bytes)
void CompareAbstractTypeNullabilityWith(Register type, int8_t value, Register scratch)
void Unreachable(const char *message)
virtual void LoadImmediate(Register dst, target::word imm)=0
void Unimplemented(const char *message)
virtual void Store(Register src, const Address &address, OperandSize sz=kWordBytes)=0
EnsureCapacity(AssemblerBuffer *buffer)
void EmitFixup(AssemblerFixup *fixup)
void FinalizeInstructions(const MemoryRegion ®ion)
intptr_t CountPointerOffsets() const
intptr_t GetPosition() const
virtual bool IsPointerOffset() const =0
virtual void Process(const MemoryRegion ®ion, intptr_t position)=0
void Call(const RuntimeEntry &entry, intptr_t argument_count)
static constexpr intptr_t kNoIndex
static uword Hash(Key key)
intptr_t AddObject(const Object &obj, ObjectPoolBuilderEntry::Patchability patchable=ObjectPoolBuilderEntry::kNotPatchable, ObjectPoolBuilderEntry::SnapshotBehavior snapshot_behavior=ObjectPoolBuilderEntry::kSnapshotable)
intptr_t AddImmediate(uword imm, ObjectPoolBuilderEntry::Patchability patchable=ObjectPoolBuilderEntry::kNotPatchable, ObjectPoolBuilderEntry::SnapshotBehavior snapshotability=ObjectPoolBuilderEntry::kSnapshotable)
intptr_t AddImmediate64(uint64_t imm)
intptr_t CurrentLength() const
intptr_t FindImmediate128(simd128_value_t imm)
intptr_t FindObject(const Object &obj, ObjectPoolBuilderEntry::Patchability patchable=ObjectPoolBuilderEntry::kNotPatchable, ObjectPoolBuilderEntry::SnapshotBehavior snapshot_behavior=ObjectPoolBuilderEntry::kSnapshotable)
intptr_t FindImmediate(uword imm)
intptr_t FindImmediate64(uint64_t imm)
intptr_t FindNativeFunction(const ExternalLabel *label, ObjectPoolBuilderEntry::Patchability patchable)
intptr_t AddImmediate128(simd128_value_t imm)
PatchCodeWithHandle(ZoneGrowableArray< intptr_t > *pointer_offsets, const Object &object)
virtual bool IsPointerOffset() const
void Process(const MemoryRegion ®ion, intptr_t position)
static const word kNullabilityMask
static const word kTypeClassIdShift
G_BEGIN_DECLS G_MODULE_EXPORT FlValue * args
uint32_t uint32_t * format
ClipOpAndAA opAA SkRegion region
static constexpr intptr_t kWordSize
Object & NewZoneHandle(Zone *zone)
intptr_t ObjectHash(const Object &obj)
void SetToNull(Object *obj)
const String & AllocateString(const char *buffer)
static uword NewContents(intptr_t capacity)
bool IsInOldSpace(const Object &obj)
void * malloc(size_t size)
DEFINE_FLAG(bool, print_cluster_information, false, "Print information about clusters written to snapshot")
static int8_t data[kExtLength]
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not defaults to or::depending on whether ipv6 is specified vm service A custom Dart VM Service port The default is to pick a randomly available open port disable vm Disable the Dart VM Service The Dart VM Service is never available in release mode disable vm service Disable mDNS Dart VM Service publication Bind to the IPv6 localhost address for the Dart VM Service Ignored if vm service host is set endless trace buffer
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive keep the shell running after the Dart script has completed enable serial On low power devices with low core running concurrent GC tasks on threads can cause them to contend with the UI thread which could potentially lead to jank This option turns off all concurrent GC activities domain network JSON encoded network policy per domain This overrides the DisallowInsecureConnections switch Embedder can specify whether to allow or disallow insecure connections at a domain level old gen heap size
static constexpr size_t ValueSize(Representation rep)
static constexpr bool IsUnboxedInteger(Representation rep)
static compiler::OperandSize OperandSize(Representation rep)
const Object * equivalence_
Patchability patchable() const