Flutter Engine
The Flutter Engine
Loading...
Searching...
No Matches
SkTArray.h
Go to the documentation of this file.
1/*
2 * Copyright 2011 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef SkTArray_DEFINED
9#define SkTArray_DEFINED
10
11#include "include/private/base/SkASAN.h" // IWYU pragma: keep
21#include "include/private/base/SkTypeTraits.h" // IWYU pragma: keep
22
23#include <algorithm>
24#include <climits>
25#include <cstddef>
26#include <cstdint>
27#include <cstring>
28#include <initializer_list>
29#include <new>
30#include <utility>
31
32namespace skia_private {
33/** TArray<T> implements a typical, mostly std::vector-like array.
34 Each T will be default-initialized on allocation, and ~T will be called on destruction.
35
36 MEM_MOVE controls the behavior when a T needs to be moved (e.g. when the array is resized)
37 - true: T will be bit-copied via memcpy.
38 - false: T will be moved via move-constructors.
39*/
40template <typename T, bool MEM_MOVE = sk_is_trivially_relocatable_v<T>> class TArray {
41public:
42 using value_type = T;
43
44 /**
45 * Creates an empty array with no initial storage
46 */
47 TArray() : fOwnMemory(true), fCapacity{0} {}
48
49 /**
50 * Creates an empty array that will preallocate space for reserveCount elements.
51 */
52 explicit TArray(int reserveCount) : TArray() { this->reserve_exact(reserveCount); }
53
54 /**
55 * Copies one array to another. The new array will be heap allocated.
56 */
57 TArray(const TArray& that) : TArray(that.fData, that.fSize) {}
58
59 TArray(TArray&& that) {
60 if (that.fOwnMemory) {
61 this->setData(that);
62 that.setData({});
63 } else {
64 this->initData(that.fSize);
65 that.move(fData);
66 }
67 this->changeSize(that.fSize);
68 that.changeSize(0);
69 }
70
71 /**
72 * Creates a TArray by copying contents of a standard C array. The new
73 * array will be heap allocated. Be careful not to use this constructor
74 * when you really want the (void*, int) version.
75 */
76 TArray(const T* array, int count) {
77 this->initData(count);
78 this->copy(array);
79 }
80
81 /**
82 * Creates a TArray by copying contents of an initializer list.
83 */
84 TArray(std::initializer_list<T> data) : TArray(data.begin(), data.size()) {}
85
86 TArray& operator=(const TArray& that) {
87 if (this == &that) {
88 return *this;
89 }
90 this->clear();
91 this->checkRealloc(that.size(), kExactFit);
92 this->changeSize(that.fSize);
93 this->copy(that.fData);
94 return *this;
95 }
96
98 if (this != &that) {
99 this->clear();
100 this->unpoison();
101 that.unpoison();
102 if (that.fOwnMemory) {
103 // The storage is on the heap, so move the data pointer.
104 if (fOwnMemory) {
105 sk_free(fData);
106 }
107
108 fData = std::exchange(that.fData, nullptr);
109
110 // Can't use exchange with bitfields.
111 fCapacity = that.fCapacity;
112 that.fCapacity = 0;
113
114 fOwnMemory = true;
115
116 this->changeSize(that.fSize);
117 } else {
118 // The data is stored inline in that, so move it element-by-element.
119 this->checkRealloc(that.size(), kExactFit);
120 this->changeSize(that.fSize);
121 that.move(fData);
122 }
123 that.changeSize(0);
124 }
125 return *this;
126 }
127
129 this->destroyAll();
130 this->unpoison();
131 if (fOwnMemory) {
132 sk_free(fData);
133 }
134 }
135
136 /**
137 * Resets to size() = n newly constructed T objects and resets any reserve count.
138 */
139 void reset(int n) {
140 SkASSERT(n >= 0);
141 this->clear();
142 this->checkRealloc(n, kExactFit);
143 this->changeSize(n);
144 for (int i = 0; i < this->size(); ++i) {
145 new (fData + i) T;
146 }
147 }
148
149 /**
150 * Resets to a copy of a C array and resets any reserve count.
151 */
152 void reset(const T* array, int count) {
153 SkASSERT(count >= 0);
154 this->clear();
155 this->checkRealloc(count, kExactFit);
156 this->changeSize(count);
157 this->copy(array);
158 }
159
160 /**
161 * Ensures there is enough reserved space for at least n elements. This is guaranteed at least
162 * until the array size grows above n and subsequently shrinks below n, any version of reset()
163 * is called, or reserve() is called again.
164 */
165 void reserve(int n) {
166 SkASSERT(n >= 0);
167 if (n > this->size()) {
168 this->checkRealloc(n - this->size(), kGrowing);
169 }
170 }
171
172 /**
173 * Ensures there is enough reserved space for exactly n elements. The same capacity guarantees
174 * as above apply.
175 */
176 void reserve_exact(int n) {
177 SkASSERT(n >= 0);
178 if (n > this->size()) {
179 this->checkRealloc(n - this->size(), kExactFit);
180 }
181 }
182
183 void removeShuffle(int n) {
184 SkASSERT(n < this->size());
185 int newCount = fSize - 1;
186 fData[n].~T();
187 if (n != newCount) {
188 this->move(n, newCount);
189 }
190 this->changeSize(newCount);
191 }
192
193 // Is the array empty.
194 bool empty() const { return fSize == 0; }
195
196 /**
197 * Adds one new default-initialized T value and returns it by reference. Note that the reference
198 * only remains valid until the next call that adds or removes elements.
199 */
201 void* newT = this->push_back_raw(1);
202 return *new (newT) T;
203 }
204
205 /**
206 * Adds one new T value which is copy-constructed, returning it by reference. As always,
207 * the reference only remains valid until the next call that adds or removes elements.
208 */
209 T& push_back(const T& t) {
210 this->unpoison();
211 T* newT;
212 if (this->capacity() > fSize) SK_LIKELY {
213 // Copy over the element directly.
214 newT = new (fData + fSize) T(t);
215 } else {
216 newT = this->growAndConstructAtEnd(t);
217 }
218
219 this->changeSize(fSize + 1);
220 return *newT;
221 }
222
223 /**
224 * Adds one new T value which is copy-constructed, returning it by reference.
225 */
226 T& push_back(T&& t) {
227 this->unpoison();
228 T* newT;
229 if (this->capacity() > fSize) SK_LIKELY {
230 // Move over the element directly.
231 newT = new (fData + fSize) T(std::move(t));
232 } else {
233 newT = this->growAndConstructAtEnd(std::move(t));
234 }
235
236 this->changeSize(fSize + 1);
237 return *newT;
238 }
239
240 /**
241 * Constructs a new T at the back of this array, returning it by reference.
242 */
243 template <typename... Args> T& emplace_back(Args&&... args) {
244 this->unpoison();
245 T* newT;
246 if (this->capacity() > fSize) SK_LIKELY {
247 // Emplace the new element in directly.
248 newT = new (fData + fSize) T(std::forward<Args>(args)...);
249 } else {
250 newT = this->growAndConstructAtEnd(std::forward<Args>(args)...);
251 }
252
253 this->changeSize(fSize + 1);
254 return *newT;
255 }
256
257 /**
258 * Allocates n more default-initialized T values, and returns the address of
259 * the start of that new range. Note: this address is only valid until the
260 * next API call made on the array that might add or remove elements.
261 */
262 T* push_back_n(int n) {
263 SkASSERT(n >= 0);
264 T* newTs = TCast(this->push_back_raw(n));
265 for (int i = 0; i < n; ++i) {
266 new (&newTs[i]) T;
267 }
268 return newTs;
269 }
270
271 /**
272 * Version of above that uses a copy constructor to initialize all n items
273 * to the same T.
274 */
275 T* push_back_n(int n, const T& t) {
276 SkASSERT(n >= 0);
277 T* newTs = TCast(this->push_back_raw(n));
278 for (int i = 0; i < n; ++i) {
279 new (&newTs[i]) T(t);
280 }
281 return static_cast<T*>(newTs);
282 }
283
284 /**
285 * Version of above that uses a copy constructor to initialize the n items
286 * to separate T values.
287 */
288 T* push_back_n(int n, const T t[]) {
289 SkASSERT(n >= 0);
290 this->checkRealloc(n, kGrowing);
291 T* end = this->end();
292 this->changeSize(fSize + n);
293 for (int i = 0; i < n; ++i) {
294 new (end + i) T(t[i]);
295 }
296 return end;
297 }
298
299 /**
300 * Version of above that uses the move constructor to set n items.
301 */
302 T* move_back_n(int n, T* t) {
303 SkASSERT(n >= 0);
304 this->checkRealloc(n, kGrowing);
305 T* end = this->end();
306 this->changeSize(fSize + n);
307 for (int i = 0; i < n; ++i) {
308 new (end + i) T(std::move(t[i]));
309 }
310 return end;
311 }
312
313 /**
314 * Removes the last element. Not safe to call when size() == 0.
315 */
316 void pop_back() {
318 fData[fSize - 1].~T();
319 this->changeSize(fSize - 1);
320 }
321
322 /**
323 * Removes the last n elements. Not safe to call when size() < n.
324 */
325 void pop_back_n(int n) {
326 SkASSERT(n >= 0);
327 SkASSERT(this->size() >= n);
328 int i = fSize;
329 while (i-- > fSize - n) {
330 (*this)[i].~T();
331 }
332 this->changeSize(fSize - n);
333 }
334
335 /**
336 * Pushes or pops from the back to resize. Pushes will be default initialized.
337 */
338 void resize_back(int newCount) {
339 SkASSERT(newCount >= 0);
340 if (newCount > this->size()) {
341 if (this->empty()) {
342 // When the container is completely empty, grow to exactly the requested size.
343 this->checkRealloc(newCount, kExactFit);
344 }
345 this->push_back_n(newCount - fSize);
346 } else if (newCount < this->size()) {
347 this->pop_back_n(fSize - newCount);
348 }
349 }
350
351 /** Swaps the contents of this array with that array. Does a pointer swap if possible,
352 otherwise copies the T values. */
353 void swap(TArray& that) {
354 using std::swap;
355 if (this == &that) {
356 return;
357 }
358 if (fOwnMemory && that.fOwnMemory) {
359 swap(fData, that.fData);
360 swap(fSize, that.fSize);
361
362 // Can't use swap because fCapacity is a bit field.
363 auto allocCount = fCapacity;
364 fCapacity = that.fCapacity;
365 that.fCapacity = allocCount;
366 } else {
367 // This could be more optimal...
368 TArray copy(std::move(that));
369 that = std::move(*this);
370 *this = std::move(copy);
371 }
372 }
373
374 /**
375 * Moves all elements of `that` to the end of this array, leaving `that` empty.
376 * This is a no-op if `that` is empty or equal to this array.
377 */
378 void move_back(TArray& that) {
379 if (that.empty() || &that == this) {
380 return;
381 }
382 void* dst = this->push_back_raw(that.size());
383 // After move() returns, the contents of `dst` will have either been in-place initialized
384 // using a the move constructor (per-item from `that`'s elements), or will have been
385 // mem-copied into when MEM_MOVE is true (now valid objects).
386 that.move(dst);
387 // All items in `that` have either been destroyed (when MEM_MOVE is false) or should be
388 // considered invalid (when MEM_MOVE is true). Reset fSize to 0 directly to skip any further
389 // per-item destruction.
390 that.changeSize(0);
391 }
392
393 T* begin() {
394 return fData;
395 }
396 const T* begin() const {
397 return fData;
398 }
399
400 // It's safe to use fItemArray + fSize because if fItemArray is nullptr then adding 0 is
401 // valid and returns nullptr. See [expr.add] in the C++ standard.
402 T* end() {
403 if (fData == nullptr) {
404 SkASSERT(fSize == 0);
405 }
406 return fData + fSize;
407 }
408 const T* end() const {
409 if (fData == nullptr) {
410 SkASSERT(fSize == 0);
411 }
412 return fData + fSize;
413 }
414 T* data() { return fData; }
415 const T* data() const { return fData; }
416 int size() const { return fSize; }
417 size_t size_bytes() const { return Bytes(fSize); }
418 void resize(size_t count) { this->resize_back((int)count); }
419
420 void clear() {
421 this->destroyAll();
422 this->changeSize(0);
423 }
424
426 if (!fOwnMemory || fSize == fCapacity) {
427 return;
428 }
429 this->unpoison();
430 if (fSize == 0) {
431 sk_free(fData);
432 fData = nullptr;
433 fCapacity = 0;
434 } else {
435 SkSpan<std::byte> allocation = Allocate(fSize);
436 this->move(TCast(allocation.data()));
437 if (fOwnMemory) {
438 sk_free(fData);
439 }
440 // Poison is applied in `setDataFromBytes`.
441 this->setDataFromBytes(allocation);
442 }
443 }
444
445 /**
446 * Get the i^th element.
447 */
448 T& operator[] (int i) {
449 return fData[sk_collection_check_bounds(i, this->size())];
450 }
451
452 const T& operator[] (int i) const {
453 return fData[sk_collection_check_bounds(i, this->size())];
454 }
455
456 T& at(int i) { return (*this)[i]; }
457 const T& at(int i) const { return (*this)[i]; }
458
459 /**
460 * equivalent to operator[](0)
461 */
462 T& front() {
464 return fData[0];
465 }
466
467 const T& front() const {
469 return fData[0];
470 }
471
472 /**
473 * equivalent to operator[](size() - 1)
474 */
475 T& back() {
477 return fData[fSize - 1];
478 }
479
480 const T& back() const {
482 return fData[fSize - 1];
483 }
484
485 /**
486 * equivalent to operator[](size()-1-i)
487 */
488 T& fromBack(int i) {
489 return (*this)[fSize - i - 1];
490 }
491
492 const T& fromBack(int i) const {
493 return (*this)[fSize - i - 1];
494 }
495
497 int leftCount = this->size();
498 if (leftCount != right.size()) {
499 return false;
500 }
501 for (int index = 0; index < leftCount; ++index) {
502 if (fData[index] != right.fData[index]) {
503 return false;
504 }
505 }
506 return true;
507 }
508
510 return !(*this == right);
511 }
512
513 int capacity() const {
514 return fCapacity;
515 }
516
517protected:
518 // Creates an empty array that will use the passed storage block until it is insufficiently
519 // large to hold the entire array.
520 template <int InitialCapacity>
522 static_assert(InitialCapacity >= 0);
523 SkASSERT(size >= 0);
524 SkASSERT(storage->get() != nullptr);
525 if (size > InitialCapacity) {
526 this->initData(size);
527 } else {
528 this->setDataFromBytes(*storage);
529 this->changeSize(size);
530
531 // setDataFromBytes always sets fOwnMemory to true, but we are actually using static
532 // storage here, which shouldn't ever be freed.
533 fOwnMemory = false;
534 }
535 }
536
537 // Copy a C array, using pre-allocated storage if preAllocCount >= count. Otherwise, storage
538 // will only be used when array shrinks to fit.
539 template <int InitialCapacity>
541 : TArray{storage, size} {
542 this->copy(array);
543 }
544
545private:
546 // Growth factors for checkRealloc.
547 static constexpr double kExactFit = 1.0;
548 static constexpr double kGrowing = 1.5;
549
550 static constexpr int kMinHeapAllocCount = 8;
551 static_assert(SkIsPow2(kMinHeapAllocCount), "min alloc count not power of two.");
552
553 // Note for 32-bit machines kMaxCapacity will be <= SIZE_MAX. For 64-bit machines it will
554 // just be INT_MAX if the sizeof(T) < 2^32.
555 static constexpr int kMaxCapacity = SkToInt(std::min(SIZE_MAX / sizeof(T), (size_t)INT_MAX));
556
557 void setDataFromBytes(SkSpan<std::byte> allocation) {
558 T* data = TCast(allocation.data());
559 // We have gotten extra bytes back from the allocation limit, pin to kMaxCapacity. It
560 // would seem like the SkContainerAllocator should handle the divide, but it would have
561 // to a full divide instruction. If done here the size is known at compile, and usually
562 // can be implemented by a right shift. The full divide takes ~50X longer than the shift.
563 size_t size = std::min(allocation.size() / sizeof(T), SkToSizeT(kMaxCapacity));
564 this->setData(SkSpan<T>(data, size));
565 }
566
567 void setData(SkSpan<T> array) {
568 this->unpoison();
569
570 fData = array.data();
571 fCapacity = SkToU32(array.size());
572 fOwnMemory = true;
573
574 this->poison();
575 }
576
577 void unpoison() {
578#ifdef SK_SANITIZE_ADDRESS
579 if (fData) {
580 // SkDebugf("UNPOISONING %p : 0 -> %zu\n", fData, Bytes(fCapacity));
581 sk_asan_unpoison_memory_region(this->begin(), Bytes(fCapacity));
582 }
583#endif
584 }
585
586 void poison() {
587#ifdef SK_SANITIZE_ADDRESS
588 if (fData && fCapacity > fSize) {
589 // SkDebugf(" POISONING %p : %zu -> %zu\n", fData, Bytes(fSize), Bytes(fCapacity));
590 sk_asan_poison_memory_region(this->end(), Bytes(fCapacity - fSize));
591 }
592#endif
593 }
594
595 void changeSize(int n) {
596 this->unpoison();
597 fSize = n;
598 this->poison();
599 }
600
601 // We disable Control-Flow Integrity sanitization (go/cfi) when casting item-array buffers.
602 // CFI flags this code as dangerous because we are casting `buffer` to a T* while the buffer's
603 // contents might still be uninitialized memory. When T has a vtable, this is especially risky
604 // because we could hypothetically access a virtual method on fItemArray and jump to an
605 // unpredictable location in memory. Of course, TArray won't actually use fItemArray in this
606 // way, and we don't want to construct a T before the user requests one. There's no real risk
607 // here, so disable CFI when doing these casts.
609 static T* TCast(void* buffer) {
610 return (T*)buffer;
611 }
612
613 static size_t Bytes(int n) {
614 SkASSERT(n <= kMaxCapacity);
615 return SkToSizeT(n) * sizeof(T);
616 }
617
618 static SkSpan<std::byte> Allocate(int capacity, double growthFactor = 1.0) {
619 return SkContainerAllocator{sizeof(T), kMaxCapacity}.allocate(capacity, growthFactor);
620 }
621
622 void initData(int count) {
623 this->setDataFromBytes(Allocate(count));
624 this->changeSize(count);
625 }
626
627 void destroyAll() {
628 if (!this->empty()) {
629 T* cursor = this->begin();
630 T* const end = this->end();
631 do {
632 cursor->~T();
633 cursor++;
634 } while (cursor < end);
635 }
636 }
637
638 /** In the following move and copy methods, 'dst' is assumed to be uninitialized raw storage.
639 * In the following move methods, 'src' is destroyed leaving behind uninitialized raw storage.
640 */
641 void copy(const T* src) {
642 if constexpr (std::is_trivially_copyable_v<T>) {
643 if (!this->empty() && src != nullptr) {
644 sk_careful_memcpy(fData, src, this->size_bytes());
645 }
646 } else {
647 for (int i = 0; i < this->size(); ++i) {
648 new (fData + i) T(src[i]);
649 }
650 }
651 }
652
653 void move(int dst, int src) {
654 if constexpr (MEM_MOVE) {
655 memcpy(static_cast<void*>(&fData[dst]),
656 static_cast<const void*>(&fData[src]),
657 sizeof(T));
658 } else {
659 new (&fData[dst]) T(std::move(fData[src]));
660 fData[src].~T();
661 }
662 }
663
664 void move(void* dst) {
665 if constexpr (MEM_MOVE) {
666 sk_careful_memcpy(dst, fData, Bytes(fSize));
667 } else {
668 for (int i = 0; i < this->size(); ++i) {
669 new (static_cast<char*>(dst) + Bytes(i)) T(std::move(fData[i]));
670 fData[i].~T();
671 }
672 }
673 }
674
675 // Helper function that makes space for n objects, adjusts the count, but does not initialize
676 // the new objects.
677 void* push_back_raw(int n) {
678 this->checkRealloc(n, kGrowing);
679 void* ptr = fData + fSize;
680 this->changeSize(fSize + n);
681 return ptr;
682 }
683
684 template <typename... Args>
685 SK_ALWAYS_INLINE T* growAndConstructAtEnd(Args&&... args) {
686 SkSpan<std::byte> buffer = this->preallocateNewData(/*delta=*/1, kGrowing);
687 T* newT = new (TCast(buffer.data()) + fSize) T(std::forward<Args>(args)...);
688 this->installDataAndUpdateCapacity(buffer);
689
690 return newT;
691 }
692
693 void checkRealloc(int delta, double growthFactor) {
694 SkASSERT(delta >= 0);
695 SkASSERT(fSize >= 0);
696 SkASSERT(fCapacity >= 0);
697
698 // Check if there are enough remaining allocated elements to satisfy the request.
699 if (this->capacity() - fSize < delta) {
700 // Looks like we need to reallocate.
701 this->installDataAndUpdateCapacity(this->preallocateNewData(delta, growthFactor));
702 }
703 }
704
705 SkSpan<std::byte> preallocateNewData(int delta, double growthFactor) {
706 SkASSERT(delta >= 0);
707 SkASSERT(fSize >= 0);
708 SkASSERT(fCapacity >= 0);
709
710 // Don't overflow fSize or size_t later in the memory allocation. Overflowing memory
711 // allocation really only applies to fSizes on 32-bit machines; on 64-bit machines this
712 // will probably never produce a check. Since kMaxCapacity is bounded above by INT_MAX,
713 // this also checks the bounds of fSize.
714 if (delta > kMaxCapacity - fSize) {
716 }
717 const int newCount = fSize + delta;
718
719 return Allocate(newCount, growthFactor);
720 }
721
722 void installDataAndUpdateCapacity(SkSpan<std::byte> allocation) {
723 this->move(TCast(allocation.data()));
724 if (fOwnMemory) {
725 sk_free(fData);
726 }
727 this->setDataFromBytes(allocation);
728 SkASSERT(fData != nullptr);
729 }
730
731 T* fData{nullptr};
732 int fSize{0};
733 uint32_t fOwnMemory : 1;
734 uint32_t fCapacity : 31;
735};
736
737template <typename T, bool M> static inline void swap(TArray<T, M>& a, TArray<T, M>& b) {
738 a.swap(b);
739}
740
741// Subclass of TArray that contains a pre-allocated memory block for the array.
742template <int Nreq, typename T, bool MEM_MOVE = sk_is_trivially_relocatable_v<T>>
743class STArray : private SkAlignedSTStorage<SkContainerAllocator::RoundUp<T>(Nreq), T>,
744 public TArray<T, MEM_MOVE> {
745 // We round up the requested array size to the next capacity multiple.
746 // This space would likely otherwise go to waste.
747 static constexpr int N = SkContainerAllocator::RoundUp<T>(Nreq);
748 static_assert(Nreq > 0);
749 static_assert(N >= Nreq);
750
752
753public:
755 : Storage{}
756 , TArray<T, MEM_MOVE>(this) {} // Must use () to avoid confusion with initializer_list
757 // when T=bool because * are convertable to bool.
758
759 STArray(const T* array, int count)
760 : Storage{}
761 , TArray<T, MEM_MOVE>{array, count, this} {}
762
763 STArray(std::initializer_list<T> data)
764 : STArray{data.begin(), SkToInt(data.size())} {}
765
766 explicit STArray(int reserveCount)
767 : STArray() { this->reserve_exact(reserveCount); }
768
769 STArray(const STArray& that)
770 : STArray() { *this = that; }
771
772 explicit STArray(const TArray<T, MEM_MOVE>& that)
773 : STArray() { *this = that; }
774
776 : STArray() { *this = std::move(that); }
777
779 : STArray() { *this = std::move(that); }
780
781 STArray& operator=(const STArray& that) {
783 return *this;
784 }
785
788 return *this;
789 }
790
792 TArray<T, MEM_MOVE>::operator=(std::move(that));
793 return *this;
794 }
795
797 TArray<T, MEM_MOVE>::operator=(std::move(that));
798 return *this;
799 }
800
801 // Force the use of TArray for data() and size().
802 using TArray<T, MEM_MOVE>::data;
803 using TArray<T, MEM_MOVE>::size;
804};
805} // namespace skia_private
806#endif // SkTArray_DEFINED
int count
static void sk_asan_poison_memory_region(void const volatile *addr, size_t size)
Definition SkASAN.h:34
static void sk_asan_unpoison_memory_region(void const volatile *addr, size_t size)
Definition SkASAN.h:41
SK_API void sk_collection_not_empty(bool empty)
Definition SkAssert.h:175
SK_API T sk_collection_check_bounds(T i, T size)
Definition SkAssert.h:143
#define SkASSERT(cond)
Definition SkAssert.h:116
#define SK_LIKELY
Definition SkAssert.h:27
#define SK_CLANG_NO_SANITIZE(A)
#define SK_ALWAYS_INLINE
SK_SPI void sk_report_container_overflow_and_die()
SK_API void sk_free(void *)
static void * sk_careful_memcpy(void *dst, const void *src, size_t len)
Definition SkMalloc.h:125
constexpr bool SkIsPow2(T value)
Definition SkMath.h:51
static bool right(const SkPoint &p0, const SkPoint &p1)
constexpr size_t SkToSizeT(S x)
Definition SkTo.h:31
constexpr int SkToInt(S x)
Definition SkTo.h:29
constexpr uint32_t SkToU32(S x)
Definition SkTo.h:26
#define N
Definition beziers.cpp:19
SkSpan< std::byte > allocate(int capacity, double growthFactor=1.0)
constexpr T * data() const
Definition SkSpan_impl.h:94
constexpr size_t size() const
Definition SkSpan_impl.h:95
STArray(const STArray &that)
Definition SkTArray.h:769
STArray & operator=(TArray< T, MEM_MOVE > &&that)
Definition SkTArray.h:796
STArray(STArray &&that)
Definition SkTArray.h:775
STArray & operator=(const TArray< T, MEM_MOVE > &that)
Definition SkTArray.h:786
STArray(std::initializer_list< T > data)
Definition SkTArray.h:763
STArray & operator=(STArray &&that)
Definition SkTArray.h:791
STArray & operator=(const STArray &that)
Definition SkTArray.h:781
STArray(int reserveCount)
Definition SkTArray.h:766
STArray(const TArray< T, MEM_MOVE > &that)
Definition SkTArray.h:772
STArray(TArray< T, MEM_MOVE > &&that)
Definition SkTArray.h:778
STArray(const T *array, int count)
Definition SkTArray.h:759
TArray(const TArray &that)
Definition SkTArray.h:57
T * push_back_n(int n, const T &t)
Definition SkTArray.h:275
void reserve(int n)
Definition SkTArray.h:165
T * push_back_n(int n, const T t[])
Definition SkTArray.h:288
TArray(SkAlignedSTStorage< InitialCapacity, T > *storage, int size=0)
Definition SkTArray.h:521
size_t size_bytes() const
Definition SkTArray.h:417
T * push_back_n(int n)
Definition SkTArray.h:262
void move_back(TArray &that)
Definition SkTArray.h:378
int capacity() const
Definition SkTArray.h:513
void reset(const T *array, int count)
Definition SkTArray.h:152
bool empty() const
Definition SkTArray.h:194
void resize_back(int newCount)
Definition SkTArray.h:338
TArray(TArray &&that)
Definition SkTArray.h:59
const T * begin() const
Definition SkTArray.h:396
const T * end() const
Definition SkTArray.h:408
const T & at(int i) const
Definition SkTArray.h:457
void reset(int n)
Definition SkTArray.h:139
const T & back() const
Definition SkTArray.h:480
TArray(const T *array, int count)
Definition SkTArray.h:76
const T & fromBack(int i) const
Definition SkTArray.h:492
T & push_back(T &&t)
Definition SkTArray.h:226
void resize(size_t count)
Definition SkTArray.h:418
TArray & operator=(TArray &&that)
Definition SkTArray.h:97
void pop_back_n(int n)
Definition SkTArray.h:325
const T & front() const
Definition SkTArray.h:467
T * move_back_n(int n, T *t)
Definition SkTArray.h:302
T & fromBack(int i)
Definition SkTArray.h:488
void removeShuffle(int n)
Definition SkTArray.h:183
TArray(int reserveCount)
Definition SkTArray.h:52
T & operator[](int i)
Definition SkTArray.h:448
int size() const
Definition SkTArray.h:416
T & push_back(const T &t)
Definition SkTArray.h:209
bool operator!=(const TArray< T, MEM_MOVE > &right) const
Definition SkTArray.h:509
const T * data() const
Definition SkTArray.h:415
void swap(TArray &that)
Definition SkTArray.h:353
void reserve_exact(int n)
Definition SkTArray.h:176
bool operator==(const TArray< T, MEM_MOVE > &right) const
Definition SkTArray.h:496
T & emplace_back(Args &&... args)
Definition SkTArray.h:243
TArray(std::initializer_list< T > data)
Definition SkTArray.h:84
TArray & operator=(const TArray &that)
Definition SkTArray.h:86
TArray(const T *array, int size, SkAlignedSTStorage< InitialCapacity, T > *storage)
Definition SkTArray.h:540
static bool b
struct MyStruct a[10]
G_BEGIN_DECLS G_MODULE_EXPORT FlValue * args
static const uint8_t buffer[]
Definition copy.py:1
dst
Definition cp.py:12
static void swap(TArray< T, M > &a, TArray< T, M > &b)
Definition SkTArray.h:737
#define T