8#ifndef SkBlockAllocator_DEFINED
9#define SkBlockAllocator_DEFINED
74 void operator delete(
void* p) { ::operator
delete(p); }
77 template <
size_t Align = 1,
size_t Padding = 0>
78 int avail()
const {
return std::max(0, fSize - this->cursor<Align, Padding>()); }
84 template <
size_t Align = 1,
size_t Padding = 0>
90 return reinterpret_cast<char*
>(
this) +
offset;
129 void poisonRange(
int start,
int end) {
132 void unpoisonRange(
int start,
int end) {
137 template <
size_t Align,
size_t Padding>
138 int cursor()
const {
return this->alignedOffset<Align, Padding>(fCursor); }
140 template <
size_t Align,
size_t Padding>
141 int alignedOffset(
int offset)
const;
143 bool isScratch()
const {
return fCursor < 0; }
144 void markAsScratch() {
146 this->poisonRange(kDataStart, fSize);
164 int fAllocatorMetadata;
185 size_t additionalPreallocBytes = 0);
188 void operator delete(
void* p) { ::operator
delete(p); }
196 template<
size_t Align = 1,
size_t Padding = 0>
205 template<
size_t Align = 1,
size_t Padding = 0>
240 return fHead.fSize - kDataStart;
248 int metadata()
const {
return fHead.fAllocatorMetadata; }
282 template <
size_t Align,
size_t Padding = 0>
309 template <
size_t Align = 1,
size_t Padding = 0>
333 template <
size_t Align,
size_t Padding = 0>
336 template <
size_t Align,
size_t Padding = 0>
389 template <
bool Forward,
bool Const>
class BlockIter;
401 inline BlockIter<true, false>
blocks();
402 inline BlockIter<true, true>
blocks()
const;
403 inline BlockIter<false, false>
rblocks();
404 inline BlockIter<false, true>
rblocks()
const;
407 inline static constexpr uint32_t kAssignedMarker = 0xBEEFFACE;
408 inline static constexpr uint32_t kFreedMarker = 0xCAFEBABE;
410 void validate()
const;
417 inline static constexpr int kDataStart =
sizeof(
Block);
418 #ifdef SK_FORCE_8_BYTE_ALIGNMENT
426 static constexpr size_t kAddressAlign = 8;
430 static constexpr size_t kAddressAlign =
alignof(std::max_align_t);
435 template<
size_t Align,
size_t Padding>
436 static constexpr size_t MaxBlockSize();
438 static constexpr int BaseHeadBlockSize() {
445 void addBlock(
int minSize,
int maxSize);
447 int scratchBlockSize()
const {
return fHead.fPrev ? fHead.fPrev->fSize : 0; }
463 uint64_t fBlockIncrement : 16;
464 uint64_t fGrowthPolicy : 2;
473 alignas(kAddressAlign)
Block fHead;
520template<
size_t Align,
size_t Padding>
526template<
size_t Align,
size_t Padding>
534template<
size_t Align,
size_t Padding>
535constexpr size_t SkBlockAllocator::MaxBlockSize() {
542template<
size_t Align,
size_t Padding>
545 SK_ABORT(
"Allocation too large (%zu bytes requested)", size);
547 int iSize = (
int) size;
551 int blockSize = BlockOverhead<Align, Padding>() + iSize;
553 : MaxBlockSize<Align, Padding>();
554 SkASSERT((
size_t) maxSize <= (MaxBlockSize<Align, Padding>()));
557 this->addBlock(blockSize, maxSize);
565template <
size_t Align,
size_t Padding>
568 static constexpr int kBlockOverhead = (
int) BlockOverhead<Align, Padding>();
572 <= (
size_t) std::numeric_limits<int32_t>::max());
575 <= std::numeric_limits<int32_t>::max());
578 SK_ABORT(
"Allocation too large (%zu bytes requested)", size);
581 int iSize = (
int) size;
584 if (
end > fTail->fSize) {
585 this->addBlock(iSize + kBlockOverhead, MaxBlockSize<Align, Padding>());
594 offset - fTail->fCursor <= (
int) (Padding +
Align - 1));
597 int start = fTail->fCursor;
598 fTail->fCursor =
end;
600 fTail->unpoisonRange(
offset - Padding,
end);
605template <
size_t Align,
size_t Padding>
614 if (
Align <= kAddressAlign) {
616 (
reinterpret_cast<uintptr_t
>(p) -
start - Padding) & ~(
Align - 1));
617 SkASSERT(block->fSentinel == kAssignedMarker);
626template <
size_t Align,
size_t Padding>
627int SkBlockAllocator::Block::alignedOffset(
int offset)
const {
630 static_assert(MaxBlockSize<Align, Padding>() + Padding +
Align - 1
631 <= (size_t) std::numeric_limits<int32_t>::max());
633 if (
Align <= kAddressAlign) {
639 uintptr_t blockPtr =
reinterpret_cast<uintptr_t
>(
this);
640 uintptr_t alignedPtr = (blockPtr +
offset + Padding +
Align - 1) & ~(
Align - 1);
641 SkASSERT(alignedPtr - blockPtr <= (uintptr_t) std::numeric_limits<int32_t>::max());
642 return (
int) (alignedPtr - blockPtr);
647 SkASSERT(fSentinel == kAssignedMarker);
654 if (fCursor ==
end) {
655 int nextCursor =
end + deltaBytes;
658 if (nextCursor <= fSize && nextCursor >=
start) {
659 if (nextCursor < fCursor) {
661 this->poisonRange(nextCursor + 1,
end);
664 this->unpoisonRange(
end, nextCursor);
667 fCursor = nextCursor;
678 SkASSERT(fSentinel == kAssignedMarker);
683 if (fCursor ==
end) {
692template <
bool Forward,
bool Const>
695 using BlockT =
typename std::conditional<Const, const Block, Block>::type;
697 typename std::conditional<Const, const SkBlockAllocator, SkBlockAllocator>::type;
700 BlockIter(AllocatorT* allocator) : fAllocator(allocator) {}
709 this->advance(fNext);
716 Item(BlockT* block) { this->advance(block); }
718 void advance(BlockT* block) {
720 fNext = block ? (Forward ? block->fNext : block->fPrev) : nullptr;
721 if (!Forward && fNext && fNext->isScratch()) {
726 SkASSERT(!fNext || !fNext->isScratch());
734 Item begin()
const {
return Item(Forward ? &fAllocator->fHead : fAllocator->fTail); }
738 AllocatorT* fAllocator;
static float prev(float f)
static void sk_asan_poison_memory_region(void const volatile *addr, size_t size)
static void sk_asan_unpoison_memory_region(void const volatile *addr, size_t size)
static constexpr size_t SkAlignTo(size_t x, size_t alignment)
#define SK_ABORT(message,...)
SkBlockAllocator::Block Block
#define SK_MAKE_BITFIELD_OPS(X)
constexpr bool SkIsPow2(T value)
Type::kYUV Type::kRGBA() int(0.7 *637)
BlockT * operator*() const
bool operator!=(const Item &other) const
BlockIter(AllocatorT *allocator)
int firstAlignedOffset() const
const void * ptr(int offset) const
bool release(int start, int end)
bool resize(int start, int end, int deltaBytes)
void setMetadata(int value)
static constexpr size_t BlockOverhead()
size_t totalSpaceInUse() const
void reserve(size_t size, ReserveFlags flags=kNo_ReserveFlags)
static constexpr size_t Overhead()
size_t preallocUsableSpace() const
void setMetadata(int value)
void releaseBlock(Block *block)
void stealHeapBlocks(SkBlockAllocator *other)
const Block * findOwningBlock(const void *ptr) const
ByteRange allocate(size_t size)
size_t preallocSize() const
size_t totalUsableSpace() const
@ kIgnoreExistingBytes_Flag
@ kIgnoreGrowthPolicy_Flag
static constexpr int kGrowthPolicyCount
Block * findOwningBlock(const void *ptr)
static constexpr int kMaxAllocationSize
BlockIter< false, false > rblocks()
const Block * owningBlock(const void *ptr, int start) const
const Block * headBlock() const
BlockIter< true, false > blocks()
const Block * currentBlock() const
Block * owningBlock(const void *ptr, int start)
const SkBlockAllocator * allocator() const
const SkBlockAllocator * operator->() const
SkBlockAllocator * allocator()
SkSBlockAllocator(GrowthPolicy policy, size_t blockIncrementBytes)
SkBlockAllocator * operator->()
SkSBlockAllocator(GrowthPolicy policy)
FlutterSemanticsFlag flags