|
| POINTER_FIELD (StringPtr, target_name) |
|
| POINTER_FIELD (ArrayPtr, args_descriptor) |
|
ObjectPtr * | to_snapshot (Snapshot::Kind kind) |
|
template<typename type , std::memory_order order = std::memory_order_relaxed> |
type | LoadPointer (type const *addr) const |
|
template<typename type , typename compressed_type , std::memory_order order = std::memory_order_relaxed> |
type | LoadCompressedPointer (compressed_type const *addr) const |
|
uword | heap_base () const |
|
template<typename type , std::memory_order order = std::memory_order_relaxed> |
void | StorePointer (type const *addr, type value) |
|
template<typename type , typename compressed_type , std::memory_order order = std::memory_order_relaxed> |
void | StoreCompressedPointer (compressed_type const *addr, type value) |
|
template<typename type > |
void | StorePointer (type const *addr, type value, Thread *thread) |
|
template<typename type , typename compressed_type > |
void | StoreCompressedPointer (compressed_type const *addr, type value, Thread *thread) |
|
template<typename type > |
void | StorePointerUnaligned (type const *addr, type value, Thread *thread) |
|
template<typename type , std::memory_order order = std::memory_order_relaxed, typename value_type = type> |
void | StoreArrayPointer (type const *addr, value_type value) |
|
template<typename type , typename value_type = type> |
void | StoreArrayPointer (type const *addr, value_type value, Thread *thread) |
|
template<typename type , typename compressed_type , std::memory_order order> |
void | StoreCompressedArrayPointer (compressed_type const *addr, type value) |
|
template<typename type , typename compressed_type , std::memory_order order> |
void | StoreCompressedArrayPointer (compressed_type const *addr, type value, Thread *thread) |
|
template<typename type , typename compressed_type > |
void | StoreCompressedArrayPointer (compressed_type const *addr, type value, Thread *thread) |
|
template<typename type , typename compressed_type , std::memory_order order = std::memory_order_relaxed> |
type | ExchangeCompressedPointer (compressed_type const *addr, type value) |
|
template<std::memory_order order = std::memory_order_relaxed> |
SmiPtr | LoadSmi (SmiPtr const *addr) const |
|
template<std::memory_order order = std::memory_order_relaxed> |
SmiPtr | LoadCompressedSmi (CompressedSmiPtr const *addr) const |
|
template<typename type , std::memory_order order = std::memory_order_relaxed> |
void | StoreSmi (type const *addr, type value) |
|
template<std::memory_order order = std::memory_order_relaxed> |
void | StoreCompressedSmi (CompressedSmiPtr const *addr, SmiPtr value) |
|
|
enum | TagBits {
kCardRememberedBit = 0
, kCanonicalBit = 1
, kNotMarkedBit = 2
, kNewOrEvacuationCandidateBit = 3
,
kAlwaysSetBit = 4
, kOldAndNotRememberedBit = 5
, kImmutableBit = 6
, kReservedBit = 7
,
kSizeTagPos = kReservedBit + 1
, kSizeTagSize = 4
, kClassIdTagPos = kSizeTagPos + kSizeTagSize
, kClassIdTagSize = 20
,
kHashTagPos = kClassIdTagPos + kClassIdTagSize
, kHashTagSize = 32
} |
|
| COMPILE_ASSERT (kNotMarkedBit+kBarrierOverlapShift==kAlwaysSetBit) |
|
| COMPILE_ASSERT (kNewOrEvacuationCandidateBit+kBarrierOverlapShift==kOldAndNotRememberedBit) |
|
| COMPILE_ASSERT (kCardRememberedBit==0) |
|
| COMPILE_ASSERT (kBitsPerByte *sizeof(ClassIdTagType) >=kClassIdTagSize) |
|
| COMPILE_ASSERT (kClassIdTagMax==(1<< kClassIdTagSize) - 1) |
|
bool | IsNewObject () const |
|
bool | IsOldObject () const |
|
uword | tags () const |
|
uword | tags_ignore_race () const |
|
bool | IsMarked () const |
|
void | SetMarkBit () |
|
void | SetMarkBitUnsynchronized () |
|
void | SetMarkBitRelease () |
|
void | ClearMarkBit () |
|
void | ClearMarkBitUnsynchronized () |
|
DART_WARN_UNUSED_RESULT bool | TryAcquireMarkBit () |
|
bool | IsEvacuationCandidate () |
|
void | SetIsEvacuationCandidate () |
|
void | SetIsEvacuationCandidateUnsynchronized () |
|
void | ClearIsEvacuationCandidateUnsynchronized () |
|
bool | IsCanonical () const |
|
void | SetCanonical () |
|
void | ClearCanonical () |
|
bool | IsImmutable () const |
|
void | SetImmutable () |
|
void | ClearImmutable () |
|
bool | InVMIsolateHeap () const |
|
bool | IsRemembered () const |
|
bool | TryAcquireRememberedBit () |
|
void | ClearRememberedBit () |
|
void | ClearRememberedBitUnsynchronized () |
|
DART_FORCE_INLINE void | EnsureInRememberedSet (Thread *thread) |
|
bool | IsCardRemembered () const |
|
void | SetCardRememberedBitUnsynchronized () |
|
intptr_t | GetClassId () const |
|
intptr_t | HeapSize () const |
|
intptr_t | HeapSize (uword tags) const |
|
bool | Contains (uword addr) const |
|
void | Validate (IsolateGroup *isolate_group) const |
|
intptr_t | VisitPointers (ObjectPointerVisitor *visitor) |
|
template<class V > |
DART_FORCE_INLINE intptr_t | VisitPointersNonvirtual (V *visitor) |
|
void | VisitPointersPrecise (ObjectPointerVisitor *visitor) |
|
static bool | IsMarked (uword tags) |
|
static bool | IsEvacuationCandidate (uword tags) |
|
static ObjectPtr | FromAddr (uword addr) |
|
static uword | ToAddr (const UntaggedObject *raw_obj) |
|
static uword | ToAddr (const ObjectPtr raw_obj) |
|
static bool | IsCanonical (intptr_t value) |
|
static constexpr intptr_t | kGenerationalBarrierMask |
|
static constexpr intptr_t | kIncrementalBarrierMask = 1 << kNotMarkedBit |
|
static constexpr intptr_t | kBarrierOverlapShift = 2 |
|
template<typename T > |
static DART_FORCE_INLINE uword | from_offset () |
|
template<typename T > |
static DART_FORCE_INLINE uword | to_offset (intptr_t length=0) |
|
template<> |
DART_FORCE_INLINE uword | from_offset () |
|
template<> |
DART_FORCE_INLINE uword | to_offset (intptr_t length) |
|
template<> |
DART_FORCE_INLINE uword | to_offset (intptr_t length) |
|
template<> |
DART_FORCE_INLINE uword | to_offset (intptr_t length) |
|
static constexpr bool | kContainsCompressedPointers = false |
|
static constexpr bool | kContainsPointerFields = false |
|
Definition at line 2546 of file raw_object.h.