5#ifndef RUNTIME_VM_BITFIELD_H_
6#define RUNTIME_VM_BITFIELD_H_
24 static_assert(
sizeof(std::atomic<T>) ==
sizeof(
T),
25 "Size of type changes when made atomic");
32 operator T()
const {
return field_.load(std::memory_order_relaxed); }
34 field_.store(tags, std::memory_order_relaxed);
38 T load(std::memory_order order)
const {
return field_.load(order); }
39 void store(
T value, std::memory_order order) { field_.store(
value, order); }
42 return field_.compare_exchange_weak(old_tags, new_tags, order);
45 template <
class TargetBitField,
46 std::memory_order order = std::memory_order_relaxed>
47 typename TargetBitField::Type
Read()
const {
48 return TargetBitField::decode(field_.load(order));
51 template <
class TargetBitField>
53 return TargetBitField::decode(*
reinterpret_cast<const T*
>(&field_));
56 template <
class TargetBitField,
57 std::memory_order order = std::memory_order_relaxed>
60 field_.fetch_or(TargetBitField::encode(
true), order);
62 field_.fetch_and(~TargetBitField::encode(
true), order);
66 template <
class TargetBitField>
67 void FetchOr(
typename TargetBitField::Type value) {
68 field_.fetch_or(TargetBitField::encode(
value), std::memory_order_relaxed);
71 template <
class TargetBitField>
72 void Update(
typename TargetBitField::Type value) {
73 T old_field = field_.load(std::memory_order_relaxed);
76 new_field = TargetBitField::update(
value, old_field);
77 }
while (!field_.compare_exchange_weak(old_field, new_field,
78 std::memory_order_relaxed));
81 template <
class TargetBitField>
84 TargetBitField::update(
value, field_.load(std::memory_order_relaxed)),
85 std::memory_order_relaxed);
88 template <
class TargetBitField>
90 typename TargetBitField::Type value_to_be_set,
91 typename TargetBitField::Type conditional_old_value) {
92 T old_field = field_.load(std::memory_order_relaxed);
95 auto old_value = TargetBitField::decode(old_field);
96 if (old_value != conditional_old_value) {
99 T new_tags = TargetBitField::update(value_to_be_set, old_field);
100 if (field_.compare_exchange_weak(old_field, new_tags,
101 std::memory_order_relaxed)) {
102 return value_to_be_set;
108 template <
class TargetBitField>
110 T mask = TargetBitField::encode(
true);
111 T old_field = field_.fetch_or(mask, std::memory_order_relaxed);
112 return !TargetBitField::decode(old_field);
115 template <
class TargetBitField>
118 T old_field = field_.fetch_and(mask, std::memory_order_relaxed);
119 return TargetBitField::decode(old_field);
123 std::atomic<T> field_;
135 typename Enable =
void>
140 static_assert((
sizeof(
S) *
kBitsPerByte) >= (position + size),
141 "BitField does not fit into the type.");
142 static_assert(!
sign_extend || std::is_signed<T>::value,
143 "Should only sign extend signed bitfield types");
145 static constexpr intptr_t
kNextBit = position + size;
161 static constexpr int shift() {
return position; }
164 static constexpr int bitsize() {
return size; }
169 return encode_unchecked(
value);
178 auto const u =
static_cast<uint64_t
>(
value);
179 return static_cast<T>((
static_cast<int64_t
>(u << (64 -
kNextBit))) >>
182 auto const u =
static_cast<typename std::make_unsigned<S>::type
>(
value);
183 return static_cast<T>((u >> position) &
mask());
191 return encode(
value) | (~mask_in_place() & original);
196 static constexpr S encode_unchecked(
T value) {
197 auto const u =
static_cast<typename std::make_unsigned<S>::type
>(
value);
198 return (u &
mask()) << position;
205template <
typename S,
typename T,
int position,
int size,
bool sign_extend>
211 typename
std::enable_if<
212 std::is_base_of<AtomicBitFieldContainerBase, S>::value,
219template <
typename S,
typename T,
int position,
int size>
225 typename
std::enable_if<
226 std::is_base_of<AtomicBitFieldContainerBase, S>::value,
228 :
public BitField<typename S::ContainedType, T, position, size, false> {};
static void encode(uint8_t output[16], const uint32_t input[4])
void UpdateBool(bool value)
void FetchOr(typename TargetBitField::Type value)
TargetBitField::Type UpdateConditional(typename TargetBitField::Type value_to_be_set, typename TargetBitField::Type conditional_old_value)
AtomicBitFieldContainer()
void UpdateUnsynchronized(typename TargetBitField::Type value)
bool compare_exchange_weak(T old_tags, T new_tags, std::memory_order order)
void store(T value, std::memory_order order)
void Update(typename TargetBitField::Type value)
T load(std::memory_order order) const
NO_SANITIZE_THREAD TargetBitField::Type ReadIgnoreRace() const
TargetBitField::Type Read() const
static constexpr int bitsize()
static constexpr intptr_t kNextBit
static constexpr T decode(S value)
static constexpr S update(T value, S original)
static constexpr S mask_in_place()
static constexpr S mask()
static constexpr S encode(T value)
static constexpr bool is_valid(T value)
static constexpr int shift()
constexpr intptr_t kBitsPerByte
intx_t sign_extend(int32_t x)
static constexpr uword kUwordOne
#define NO_SANITIZE_THREAD