Flutter Engine
The Flutter Engine
SkTArray.h
Go to the documentation of this file.
1/*
2 * Copyright 2011 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef SkTArray_DEFINED
9#define SkTArray_DEFINED
10
11#include "include/private/base/SkASAN.h" // IWYU pragma: keep
21#include "include/private/base/SkTypeTraits.h" // IWYU pragma: keep
22
23#include <algorithm>
24#include <climits>
25#include <cstddef>
26#include <cstdint>
27#include <cstring>
28#include <initializer_list>
29#include <new>
30#include <utility>
31
32namespace skia_private {
33/** TArray<T> implements a typical, mostly std::vector-like array.
34 Each T will be default-initialized on allocation, and ~T will be called on destruction.
35
36 MEM_MOVE controls the behavior when a T needs to be moved (e.g. when the array is resized)
37 - true: T will be bit-copied via memcpy.
38 - false: T will be moved via move-constructors.
39*/
40template <typename T, bool MEM_MOVE = sk_is_trivially_relocatable_v<T>> class TArray {
41public:
42 using value_type = T;
43
44 /**
45 * Creates an empty array with no initial storage
46 */
47 TArray() : fOwnMemory(true), fCapacity{0} {}
48
49 /**
50 * Creates an empty array that will preallocate space for reserveCount elements.
51 */
52 explicit TArray(int reserveCount) : TArray() { this->reserve_exact(reserveCount); }
53
54 /**
55 * Copies one array to another. The new array will be heap allocated.
56 */
57 TArray(const TArray& that) : TArray(that.fData, that.fSize) {}
58
59 TArray(TArray&& that) {
60 if (that.fOwnMemory) {
61 this->setData(that);
62 that.setData({});
63 } else {
64 this->initData(that.fSize);
65 that.move(fData);
66 }
67 this->changeSize(that.fSize);
68 that.changeSize(0);
69 }
70
71 /**
72 * Creates a TArray by copying contents of a standard C array. The new
73 * array will be heap allocated. Be careful not to use this constructor
74 * when you really want the (void*, int) version.
75 */
76 TArray(const T* array, int count) {
77 this->initData(count);
78 this->copy(array);
79 }
80
81 /**
82 * Creates a TArray by copying contents from an SkSpan. The new array will be heap allocated.
83 */
85
86 /**
87 * Creates a TArray by copying contents of an initializer list.
88 */
89 TArray(std::initializer_list<T> data) : TArray(data.begin(), data.size()) {}
90
91 TArray& operator=(const TArray& that) {
92 if (this == &that) {
93 return *this;
94 }
95 this->clear();
96 this->checkRealloc(that.size(), kExactFit);
97 this->changeSize(that.fSize);
98 this->copy(that.fData);
99 return *this;
100 }
101
103 if (this != &that) {
104 this->clear();
105 this->unpoison();
106 that.unpoison();
107 if (that.fOwnMemory) {
108 // The storage is on the heap, so move the data pointer.
109 if (fOwnMemory) {
110 sk_free(fData);
111 }
112
113 fData = std::exchange(that.fData, nullptr);
114
115 // Can't use exchange with bitfields.
116 fCapacity = that.fCapacity;
117 that.fCapacity = 0;
118
119 fOwnMemory = true;
120
121 this->changeSize(that.fSize);
122 } else {
123 // The data is stored inline in that, so move it element-by-element.
124 this->checkRealloc(that.size(), kExactFit);
125 this->changeSize(that.fSize);
126 that.move(fData);
127 }
128 that.changeSize(0);
129 }
130 return *this;
131 }
132
134 this->destroyAll();
135 this->unpoison();
136 if (fOwnMemory) {
137 sk_free(fData);
138 }
139 }
140
141 /**
142 * Resets to size() = n newly constructed T objects and resets any reserve count.
143 */
144 void reset(int n) {
145 SkASSERT(n >= 0);
146 this->clear();
147 this->checkRealloc(n, kExactFit);
148 this->changeSize(n);
149 for (int i = 0; i < this->size(); ++i) {
150 new (fData + i) T;
151 }
152 }
153
154 /**
155 * Resets to a copy of a C array and resets any reserve count.
156 */
157 void reset(const T* array, int count) {
158 SkASSERT(count >= 0);
159 this->clear();
160 this->checkRealloc(count, kExactFit);
161 this->changeSize(count);
162 this->copy(array);
163 }
164
165 /**
166 * Ensures there is enough reserved space for at least n elements. This is guaranteed at least
167 * until the array size grows above n and subsequently shrinks below n, any version of reset()
168 * is called, or reserve() is called again.
169 */
170 void reserve(int n) {
171 SkASSERT(n >= 0);
172 if (n > this->size()) {
173 this->checkRealloc(n - this->size(), kGrowing);
174 }
175 }
176
177 /**
178 * Ensures there is enough reserved space for exactly n elements. The same capacity guarantees
179 * as above apply.
180 */
181 void reserve_exact(int n) {
182 SkASSERT(n >= 0);
183 if (n > this->size()) {
184 this->checkRealloc(n - this->size(), kExactFit);
185 }
186 }
187
188 void removeShuffle(int n) {
189 SkASSERT(n < this->size());
190 int newCount = fSize - 1;
191 fData[n].~T();
192 if (n != newCount) {
193 this->move(n, newCount);
194 }
195 this->changeSize(newCount);
196 }
197
198 // Is the array empty.
199 bool empty() const { return fSize == 0; }
200
201 /**
202 * Adds one new default-initialized T value and returns it by reference. Note that the reference
203 * only remains valid until the next call that adds or removes elements.
204 */
206 void* newT = this->push_back_raw(1);
207 return *new (newT) T;
208 }
209
210 /**
211 * Adds one new T value which is copy-constructed, returning it by reference. As always,
212 * the reference only remains valid until the next call that adds or removes elements.
213 */
214 T& push_back(const T& t) {
215 this->unpoison();
216 T* newT;
217 if (this->capacity() > fSize) SK_LIKELY {
218 // Copy over the element directly.
219 newT = new (fData + fSize) T(t);
220 } else {
221 newT = this->growAndConstructAtEnd(t);
222 }
223
224 this->changeSize(fSize + 1);
225 return *newT;
226 }
227
228 /**
229 * Adds one new T value which is copy-constructed, returning it by reference.
230 */
231 T& push_back(T&& t) {
232 this->unpoison();
233 T* newT;
234 if (this->capacity() > fSize) SK_LIKELY {
235 // Move over the element directly.
236 newT = new (fData + fSize) T(std::move(t));
237 } else {
238 newT = this->growAndConstructAtEnd(std::move(t));
239 }
240
241 this->changeSize(fSize + 1);
242 return *newT;
243 }
244
245 /**
246 * Constructs a new T at the back of this array, returning it by reference.
247 */
248 template <typename... Args> T& emplace_back(Args&&... args) {
249 this->unpoison();
250 T* newT;
251 if (this->capacity() > fSize) SK_LIKELY {
252 // Emplace the new element in directly.
253 newT = new (fData + fSize) T(std::forward<Args>(args)...);
254 } else {
255 newT = this->growAndConstructAtEnd(std::forward<Args>(args)...);
256 }
257
258 this->changeSize(fSize + 1);
259 return *newT;
260 }
261
262 /**
263 * Allocates n more default-initialized T values, and returns the address of
264 * the start of that new range. Note: this address is only valid until the
265 * next API call made on the array that might add or remove elements.
266 */
267 T* push_back_n(int n) {
268 SkASSERT(n >= 0);
269 T* newTs = TCast(this->push_back_raw(n));
270 for (int i = 0; i < n; ++i) {
271 new (&newTs[i]) T;
272 }
273 return newTs;
274 }
275
276 /**
277 * Version of above that uses a copy constructor to initialize all n items
278 * to the same T.
279 */
280 T* push_back_n(int n, const T& t) {
281 SkASSERT(n >= 0);
282 T* newTs = TCast(this->push_back_raw(n));
283 for (int i = 0; i < n; ++i) {
284 new (&newTs[i]) T(t);
285 }
286 return static_cast<T*>(newTs);
287 }
288
289 /**
290 * Version of above that uses a copy constructor to initialize the n items
291 * to separate T values.
292 */
293 T* push_back_n(int n, const T t[]) {
294 SkASSERT(n >= 0);
295 this->checkRealloc(n, kGrowing);
296 T* end = this->end();
297 this->changeSize(fSize + n);
298 for (int i = 0; i < n; ++i) {
299 new (end + i) T(t[i]);
300 }
301 return end;
302 }
303
304 /**
305 * Version of above that uses the move constructor to set n items.
306 */
307 T* move_back_n(int n, T* t) {
308 SkASSERT(n >= 0);
309 this->checkRealloc(n, kGrowing);
310 T* end = this->end();
311 this->changeSize(fSize + n);
312 for (int i = 0; i < n; ++i) {
313 new (end + i) T(std::move(t[i]));
314 }
315 return end;
316 }
317
318 /**
319 * Removes the last element. Not safe to call when size() == 0.
320 */
321 void pop_back() {
323 fData[fSize - 1].~T();
324 this->changeSize(fSize - 1);
325 }
326
327 /**
328 * Removes the last n elements. Not safe to call when size() < n.
329 */
330 void pop_back_n(int n) {
331 SkASSERT(n >= 0);
332 SkASSERT(this->size() >= n);
333 int i = fSize;
334 while (i-- > fSize - n) {
335 (*this)[i].~T();
336 }
337 this->changeSize(fSize - n);
338 }
339
340 /**
341 * Pushes or pops from the back to resize. Pushes will be default initialized.
342 */
343 void resize_back(int newCount) {
344 SkASSERT(newCount >= 0);
345 if (newCount > this->size()) {
346 if (this->empty()) {
347 // When the container is completely empty, grow to exactly the requested size.
348 this->checkRealloc(newCount, kExactFit);
349 }
350 this->push_back_n(newCount - fSize);
351 } else if (newCount < this->size()) {
352 this->pop_back_n(fSize - newCount);
353 }
354 }
355
356 /** Swaps the contents of this array with that array. Does a pointer swap if possible,
357 otherwise copies the T values. */
358 void swap(TArray& that) {
359 using std::swap;
360 if (this == &that) {
361 return;
362 }
363 if (fOwnMemory && that.fOwnMemory) {
364 swap(fData, that.fData);
365 swap(fSize, that.fSize);
366
367 // Can't use swap because fCapacity is a bit field.
368 auto allocCount = fCapacity;
369 fCapacity = that.fCapacity;
370 that.fCapacity = allocCount;
371 } else {
372 // This could be more optimal...
373 TArray copy(std::move(that));
374 that = std::move(*this);
375 *this = std::move(copy);
376 }
377 }
378
379 /**
380 * Moves all elements of `that` to the end of this array, leaving `that` empty.
381 * This is a no-op if `that` is empty or equal to this array.
382 */
383 void move_back(TArray& that) {
384 if (that.empty() || &that == this) {
385 return;
386 }
387 void* dst = this->push_back_raw(that.size());
388 // After move() returns, the contents of `dst` will have either been in-place initialized
389 // using a the move constructor (per-item from `that`'s elements), or will have been
390 // mem-copied into when MEM_MOVE is true (now valid objects).
391 that.move(dst);
392 // All items in `that` have either been destroyed (when MEM_MOVE is false) or should be
393 // considered invalid (when MEM_MOVE is true). Reset fSize to 0 directly to skip any further
394 // per-item destruction.
395 that.changeSize(0);
396 }
397
398 T* begin() {
399 return fData;
400 }
401 const T* begin() const {
402 return fData;
403 }
404
405 // It's safe to use fItemArray + fSize because if fItemArray is nullptr then adding 0 is
406 // valid and returns nullptr. See [expr.add] in the C++ standard.
407 T* end() {
408 if (fData == nullptr) {
409 SkASSERT(fSize == 0);
410 }
411 return fData + fSize;
412 }
413 const T* end() const {
414 if (fData == nullptr) {
415 SkASSERT(fSize == 0);
416 }
417 return fData + fSize;
418 }
419 T* data() { return fData; }
420 const T* data() const { return fData; }
421 int size() const { return fSize; }
422 size_t size_bytes() const { return Bytes(fSize); }
423 void resize(size_t count) { this->resize_back((int)count); }
424
425 void clear() {
426 this->destroyAll();
427 this->changeSize(0);
428 }
429
431 if (!fOwnMemory || fSize == fCapacity) {
432 return;
433 }
434 this->unpoison();
435 if (fSize == 0) {
436 sk_free(fData);
437 fData = nullptr;
438 fCapacity = 0;
439 } else {
440 SkSpan<std::byte> allocation = Allocate(fSize);
441 this->move(TCast(allocation.data()));
442 if (fOwnMemory) {
443 sk_free(fData);
444 }
445 // Poison is applied in `setDataFromBytes`.
446 this->setDataFromBytes(allocation);
447 }
448 }
449
450 /**
451 * Get the i^th element.
452 */
454 return fData[sk_collection_check_bounds(i, this->size())];
455 }
456
457 const T& operator[] (int i) const {
458 return fData[sk_collection_check_bounds(i, this->size())];
459 }
460
461 T& at(int i) { return (*this)[i]; }
462 const T& at(int i) const { return (*this)[i]; }
463
464 /**
465 * equivalent to operator[](0)
466 */
467 T& front() {
469 return fData[0];
470 }
471
472 const T& front() const {
474 return fData[0];
475 }
476
477 /**
478 * equivalent to operator[](size() - 1)
479 */
480 T& back() {
482 return fData[fSize - 1];
483 }
484
485 const T& back() const {
487 return fData[fSize - 1];
488 }
489
490 /**
491 * equivalent to operator[](size()-1-i)
492 */
493 T& fromBack(int i) {
494 return (*this)[fSize - i - 1];
495 }
496
497 const T& fromBack(int i) const {
498 return (*this)[fSize - i - 1];
499 }
500
501 bool operator==(const TArray<T, MEM_MOVE>& right) const {
502 int leftCount = this->size();
503 if (leftCount != right.size()) {
504 return false;
505 }
506 for (int index = 0; index < leftCount; ++index) {
507 if (fData[index] != right.fData[index]) {
508 return false;
509 }
510 }
511 return true;
512 }
513
514 bool operator!=(const TArray<T, MEM_MOVE>& right) const {
515 return !(*this == right);
516 }
517
518 int capacity() const {
519 return fCapacity;
520 }
521
522protected:
523 // Creates an empty array that will use the passed storage block until it is insufficiently
524 // large to hold the entire array.
525 template <int InitialCapacity>
527 static_assert(InitialCapacity >= 0);
528 SkASSERT(size >= 0);
529 SkASSERT(storage->get() != nullptr);
530 if (size > InitialCapacity) {
531 this->initData(size);
532 } else {
533 this->setDataFromBytes(*storage);
534 this->changeSize(size);
535
536 // setDataFromBytes always sets fOwnMemory to true, but we are actually using static
537 // storage here, which shouldn't ever be freed.
538 fOwnMemory = false;
539 }
540 }
541
542 // Copy a C array, using pre-allocated storage if preAllocCount >= count. Otherwise, storage
543 // will only be used when array shrinks to fit.
544 template <int InitialCapacity>
546 : TArray{storage, size} {
547 this->copy(array);
548 }
549 template <int InitialCapacity>
551 : TArray{storage, static_cast<int>(data.size())} {
552 this->copy(data.begin());
553 }
554
555private:
556 // Growth factors for checkRealloc.
557 static constexpr double kExactFit = 1.0;
558 static constexpr double kGrowing = 1.5;
559
560 static constexpr int kMinHeapAllocCount = 8;
561 static_assert(SkIsPow2(kMinHeapAllocCount), "min alloc count not power of two.");
562
563 // Note for 32-bit machines kMaxCapacity will be <= SIZE_MAX. For 64-bit machines it will
564 // just be INT_MAX if the sizeof(T) < 2^32.
565 static constexpr int kMaxCapacity = SkToInt(std::min(SIZE_MAX / sizeof(T), (size_t)INT_MAX));
566
567 void setDataFromBytes(SkSpan<std::byte> allocation) {
568 T* data = TCast(allocation.data());
569 // We have gotten extra bytes back from the allocation limit, pin to kMaxCapacity. It
570 // would seem like the SkContainerAllocator should handle the divide, but it would have
571 // to a full divide instruction. If done here the size is known at compile, and usually
572 // can be implemented by a right shift. The full divide takes ~50X longer than the shift.
573 size_t size = std::min(allocation.size() / sizeof(T), SkToSizeT(kMaxCapacity));
574 this->setData(SkSpan<T>(data, size));
575 }
576
577 void setData(SkSpan<T> array) {
578 this->unpoison();
579
580 fData = array.data();
581 fCapacity = SkToU32(array.size());
582 fOwnMemory = true;
583
584 this->poison();
585 }
586
587 void unpoison() {
588#ifdef SK_SANITIZE_ADDRESS
589 if (fData) {
590 // SkDebugf("UNPOISONING %p : 0 -> %zu\n", fData, Bytes(fCapacity));
591 sk_asan_unpoison_memory_region(this->begin(), Bytes(fCapacity));
592 }
593#endif
594 }
595
596 void poison() {
597#ifdef SK_SANITIZE_ADDRESS
598 if (fData && fCapacity > fSize) {
599 // SkDebugf(" POISONING %p : %zu -> %zu\n", fData, Bytes(fSize), Bytes(fCapacity));
600 sk_asan_poison_memory_region(this->end(), Bytes(fCapacity - fSize));
601 }
602#endif
603 }
604
605 void changeSize(int n) {
606 this->unpoison();
607 fSize = n;
608 this->poison();
609 }
610
611 // We disable Control-Flow Integrity sanitization (go/cfi) when casting item-array buffers.
612 // CFI flags this code as dangerous because we are casting `buffer` to a T* while the buffer's
613 // contents might still be uninitialized memory. When T has a vtable, this is especially risky
614 // because we could hypothetically access a virtual method on fItemArray and jump to an
615 // unpredictable location in memory. Of course, TArray won't actually use fItemArray in this
616 // way, and we don't want to construct a T before the user requests one. There's no real risk
617 // here, so disable CFI when doing these casts.
619 static T* TCast(void* buffer) {
620 return (T*)buffer;
621 }
622
623 static size_t Bytes(int n) {
624 SkASSERT(n <= kMaxCapacity);
625 return SkToSizeT(n) * sizeof(T);
626 }
627
628 static SkSpan<std::byte> Allocate(int capacity, double growthFactor = 1.0) {
629 return SkContainerAllocator{sizeof(T), kMaxCapacity}.allocate(capacity, growthFactor);
630 }
631
632 void initData(int count) {
633 this->setDataFromBytes(Allocate(count));
634 this->changeSize(count);
635 }
636
637 void destroyAll() {
638 if (!this->empty()) {
639 T* cursor = this->begin();
640 T* const end = this->end();
641 do {
642 cursor->~T();
643 cursor++;
644 } while (cursor < end);
645 }
646 }
647
648 /** In the following move and copy methods, 'dst' is assumed to be uninitialized raw storage.
649 * In the following move methods, 'src' is destroyed leaving behind uninitialized raw storage.
650 */
651 void copy(const T* src) {
652 if constexpr (std::is_trivially_copyable_v<T>) {
653 if (!this->empty() && src != nullptr) {
654 sk_careful_memcpy(fData, src, this->size_bytes());
655 }
656 } else {
657 for (int i = 0; i < this->size(); ++i) {
658 new (fData + i) T(src[i]);
659 }
660 }
661 }
662
663 void move(int dst, int src) {
664 if constexpr (MEM_MOVE) {
665 memcpy(static_cast<void*>(&fData[dst]),
666 static_cast<const void*>(&fData[src]),
667 sizeof(T));
668 } else {
669 new (&fData[dst]) T(std::move(fData[src]));
670 fData[src].~T();
671 }
672 }
673
674 void move(void* dst) {
675 if constexpr (MEM_MOVE) {
676 sk_careful_memcpy(dst, fData, Bytes(fSize));
677 } else {
678 for (int i = 0; i < this->size(); ++i) {
679 new (static_cast<char*>(dst) + Bytes(i)) T(std::move(fData[i]));
680 fData[i].~T();
681 }
682 }
683 }
684
685 // Helper function that makes space for n objects, adjusts the count, but does not initialize
686 // the new objects.
687 void* push_back_raw(int n) {
688 this->checkRealloc(n, kGrowing);
689 void* ptr = fData + fSize;
690 this->changeSize(fSize + n);
691 return ptr;
692 }
693
694 template <typename... Args>
695 SK_ALWAYS_INLINE T* growAndConstructAtEnd(Args&&... args) {
696 SkSpan<std::byte> buffer = this->preallocateNewData(/*delta=*/1, kGrowing);
697 T* newT = new (TCast(buffer.data()) + fSize) T(std::forward<Args>(args)...);
698 this->installDataAndUpdateCapacity(buffer);
699
700 return newT;
701 }
702
703 void checkRealloc(int delta, double growthFactor) {
704 SkASSERT(delta >= 0);
705 SkASSERT(fSize >= 0);
706 SkASSERT(fCapacity >= 0);
707
708 // Check if there are enough remaining allocated elements to satisfy the request.
709 if (this->capacity() - fSize < delta) {
710 // Looks like we need to reallocate.
711 this->installDataAndUpdateCapacity(this->preallocateNewData(delta, growthFactor));
712 }
713 }
714
715 SkSpan<std::byte> preallocateNewData(int delta, double growthFactor) {
716 SkASSERT(delta >= 0);
717 SkASSERT(fSize >= 0);
718 SkASSERT(fCapacity >= 0);
719
720 // Don't overflow fSize or size_t later in the memory allocation. Overflowing memory
721 // allocation really only applies to fSizes on 32-bit machines; on 64-bit machines this
722 // will probably never produce a check. Since kMaxCapacity is bounded above by INT_MAX,
723 // this also checks the bounds of fSize.
724 if (delta > kMaxCapacity - fSize) {
726 }
727 const int newCount = fSize + delta;
728
729 return Allocate(newCount, growthFactor);
730 }
731
732 void installDataAndUpdateCapacity(SkSpan<std::byte> allocation) {
733 this->move(TCast(allocation.data()));
734 if (fOwnMemory) {
735 sk_free(fData);
736 }
737 this->setDataFromBytes(allocation);
738 SkASSERT(fData != nullptr);
739 }
740
741 T* fData{nullptr};
742 int fSize{0};
743 uint32_t fOwnMemory : 1;
744 uint32_t fCapacity : 31;
745};
746
747template <typename T, bool M> static inline void swap(TArray<T, M>& a, TArray<T, M>& b) {
748 a.swap(b);
749}
750
751// Subclass of TArray that contains a pre-allocated memory block for the array.
752template <int Nreq, typename T, bool MEM_MOVE = sk_is_trivially_relocatable_v<T>>
753class STArray : private SkAlignedSTStorage<SkContainerAllocator::RoundUp<T>(Nreq), T>,
754 public TArray<T, MEM_MOVE> {
755 // We round up the requested array size to the next capacity multiple.
756 // This space would likely otherwise go to waste.
757 static constexpr int N = SkContainerAllocator::RoundUp<T>(Nreq);
758 static_assert(Nreq > 0);
759 static_assert(N >= Nreq);
760
762
763public:
765 : Storage{}
766 , TArray<T, MEM_MOVE>(this) {} // Must use () to avoid confusion with initializer_list
767 // when T=bool because * are convertable to bool.
768
769 STArray(const T* array, int count)
770 : Storage{}
771 , TArray<T, MEM_MOVE>{array, count, this} {}
772
774 : Storage{}
775 , TArray<T, MEM_MOVE>{data, this} {}
776
777 STArray(std::initializer_list<T> data)
778 : STArray{data.begin(), SkToInt(data.size())} {}
779
780 explicit STArray(int reserveCount)
781 : STArray() { this->reserve_exact(reserveCount); }
782
783 STArray(const STArray& that)
784 : STArray() { *this = that; }
785
786 explicit STArray(const TArray<T, MEM_MOVE>& that)
787 : STArray() { *this = that; }
788
790 : STArray() { *this = std::move(that); }
791
793 : STArray() { *this = std::move(that); }
794
795 STArray& operator=(const STArray& that) {
797 return *this;
798 }
799
802 return *this;
803 }
804
806 TArray<T, MEM_MOVE>::operator=(std::move(that));
807 return *this;
808 }
809
811 TArray<T, MEM_MOVE>::operator=(std::move(that));
812 return *this;
813 }
814
815 // Force the use of TArray for data() and size().
816 using TArray<T, MEM_MOVE>::data;
817 using TArray<T, MEM_MOVE>::size;
818};
819} // namespace skia_private
820#endif // SkTArray_DEFINED
int count
Definition: FontMgrTest.cpp:50
static void sk_asan_poison_memory_region(void const volatile *addr, size_t size)
Definition: SkASAN.h:34
static void sk_asan_unpoison_memory_region(void const volatile *addr, size_t size)
Definition: SkASAN.h:41
SK_API void sk_collection_not_empty(bool empty)
Definition: SkAssert.h:175
SK_API T sk_collection_check_bounds(T i, T size)
Definition: SkAssert.h:143
#define SkASSERT(cond)
Definition: SkAssert.h:116
#define SK_LIKELY
Definition: SkAssert.h:27
#define SK_CLANG_NO_SANITIZE(A)
Definition: SkAttributes.h:72
#define SK_ALWAYS_INLINE
Definition: SkAttributes.h:30
SK_SPI void sk_report_container_overflow_and_die()
SK_API void sk_free(void *)
static void * sk_careful_memcpy(void *dst, const void *src, size_t len)
Definition: SkMalloc.h:125
constexpr bool SkIsPow2(T value)
Definition: SkMath.h:51
void swap(sk_sp< T > &a, sk_sp< T > &b)
Definition: SkRefCnt.h:341
constexpr size_t SkToSizeT(S x)
Definition: SkTo.h:31
constexpr int SkToInt(S x)
Definition: SkTo.h:29
constexpr uint32_t SkToU32(S x)
Definition: SkTo.h:26
#define N
Definition: beziers.cpp:19
SkSpan< std::byte > allocate(int capacity, double growthFactor=1.0)
constexpr T * data() const
Definition: SkSpan_impl.h:94
constexpr T * begin() const
Definition: SkSpan_impl.h:90
constexpr size_t size() const
Definition: SkSpan_impl.h:95
STArray(const STArray &that)
Definition: SkTArray.h:783
STArray(SkSpan< const T > data)
Definition: SkTArray.h:773
STArray & operator=(TArray< T, MEM_MOVE > &&that)
Definition: SkTArray.h:810
STArray(STArray &&that)
Definition: SkTArray.h:789
STArray & operator=(const TArray< T, MEM_MOVE > &that)
Definition: SkTArray.h:800
STArray(std::initializer_list< T > data)
Definition: SkTArray.h:777
STArray & operator=(STArray &&that)
Definition: SkTArray.h:805
STArray & operator=(const STArray &that)
Definition: SkTArray.h:795
STArray(int reserveCount)
Definition: SkTArray.h:780
STArray(const TArray< T, MEM_MOVE > &that)
Definition: SkTArray.h:786
STArray(TArray< T, MEM_MOVE > &&that)
Definition: SkTArray.h:792
STArray(const T *array, int count)
Definition: SkTArray.h:769
TArray(const TArray &that)
Definition: SkTArray.h:57
T * push_back_n(int n, const T &t)
Definition: SkTArray.h:280
void reserve(int n)
Definition: SkTArray.h:170
T * push_back_n(int n, const T t[])
Definition: SkTArray.h:293
TArray(SkAlignedSTStorage< InitialCapacity, T > *storage, int size=0)
Definition: SkTArray.h:526
size_t size_bytes() const
Definition: SkTArray.h:422
T * push_back_n(int n)
Definition: SkTArray.h:267
void move_back(TArray &that)
Definition: SkTArray.h:383
int capacity() const
Definition: SkTArray.h:518
void reset(const T *array, int count)
Definition: SkTArray.h:157
bool empty() const
Definition: SkTArray.h:199
void resize_back(int newCount)
Definition: SkTArray.h:343
TArray(TArray &&that)
Definition: SkTArray.h:59
const T * begin() const
Definition: SkTArray.h:401
TArray(SkSpan< const T > data, SkAlignedSTStorage< InitialCapacity, T > *storage)
Definition: SkTArray.h:550
const T * end() const
Definition: SkTArray.h:413
const T & at(int i) const
Definition: SkTArray.h:462
void reset(int n)
Definition: SkTArray.h:144
const T & back() const
Definition: SkTArray.h:485
TArray(const T *array, int count)
Definition: SkTArray.h:76
const T & fromBack(int i) const
Definition: SkTArray.h:497
T & push_back(T &&t)
Definition: SkTArray.h:231
void resize(size_t count)
Definition: SkTArray.h:423
TArray & operator=(TArray &&that)
Definition: SkTArray.h:102
void pop_back_n(int n)
Definition: SkTArray.h:330
const T & front() const
Definition: SkTArray.h:472
T * move_back_n(int n, T *t)
Definition: SkTArray.h:307
T & fromBack(int i)
Definition: SkTArray.h:493
void removeShuffle(int n)
Definition: SkTArray.h:188
TArray(int reserveCount)
Definition: SkTArray.h:52
T & operator[](int i)
Definition: SkTArray.h:453
int size() const
Definition: SkTArray.h:421
TArray(SkSpan< const T > data)
Definition: SkTArray.h:84
T & push_back(const T &t)
Definition: SkTArray.h:214
bool operator!=(const TArray< T, MEM_MOVE > &right) const
Definition: SkTArray.h:514
const T * data() const
Definition: SkTArray.h:420
void swap(TArray &that)
Definition: SkTArray.h:358
void reserve_exact(int n)
Definition: SkTArray.h:181
bool operator==(const TArray< T, MEM_MOVE > &right) const
Definition: SkTArray.h:501
T & emplace_back(Args &&... args)
Definition: SkTArray.h:248
TArray(std::initializer_list< T > data)
Definition: SkTArray.h:89
TArray & operator=(const TArray &that)
Definition: SkTArray.h:91
TArray(const T *array, int size, SkAlignedSTStorage< InitialCapacity, T > *storage)
Definition: SkTArray.h:545
static bool b
struct MyStruct a[10]
G_BEGIN_DECLS G_MODULE_EXPORT FlValue * args
static float min(float r, float g, float b)
Definition: hsl.cpp:48
Definition: copy.py:1
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not defaults to or::depending on whether ipv6 is specified vm service A custom Dart VM Service port The default is to pick a randomly available open port disable vm Disable the Dart VM Service The Dart VM Service is never available in release mode disable vm service Disable mDNS Dart VM Service publication Bind to the IPv6 localhost address for the Dart VM Service Ignored if vm service host is set endless trace buffer
Definition: switches.h:126
dst
Definition: cp.py:12
static void swap(TArray< T, M > &a, TArray< T, M > &b)
Definition: SkTArray.h:747
#define T
Definition: precompiler.cc:65