14#if (defined(__x86_64__) || defined(_M_X64)) && !defined(SK_ENABLE_OPTIMIZE_SIZE)
16static const char* note =
"MSAN can't see that repsto initializes memory.";
20static inline void repsto(uint16_t*
dst, uint16_t v,
size_t n) {
24static inline void repsto(uint32_t*
dst, uint32_t v,
size_t n) {
26 static_assert(
sizeof(uint32_t) ==
sizeof(
unsigned long));
27 __stosd(
reinterpret_cast<unsigned long*
>(
dst), v, n);
29static inline void repsto(uint64_t*
dst, uint64_t v,
size_t n) {
34static inline void repsto(uint16_t*
dst, uint16_t v,
size_t n) {
36 asm volatile(
"rep stosw" :
"+D"(
dst),
"+c"(n) :
"a"(v) :
"memory");
38static inline void repsto(uint32_t*
dst, uint32_t v,
size_t n) {
40 asm volatile(
"rep stosl" :
"+D"(
dst),
"+c"(n) :
"a"(v) :
"memory");
42static inline void repsto(uint64_t*
dst, uint64_t v,
size_t n) {
44 asm volatile(
"rep stosq" :
"+D"(
dst),
"+c"(n) :
"a"(v) :
"memory");
50static void (*g_memset16_prev)(uint16_t*, uint16_t,
int);
51static void (*g_memset32_prev)(uint32_t*, uint32_t,
int);
52static void (*g_memset64_prev)(uint64_t*, uint64_t,
int);
53static void (*g_rect_memset16_prev)(uint16_t*, uint16_t,
int, size_t,
int);
54static void (*g_rect_memset32_prev)(uint32_t*, uint32_t,
int, size_t,
int);
55static void (*g_rect_memset64_prev)(uint64_t*, uint64_t,
int, size_t,
int);
58static bool small(
size_t bytes) {
return bytes < 1024; }
62static inline void memset16(uint16_t*
dst, uint16_t v,
int n) {
63 return small(
sizeof(v) * n) ? g_memset16_prev(
dst, v, n) : repsto(
dst, v, n);
65static inline void memset32(uint32_t*
dst, uint32_t v,
int n) {
66 return small(
sizeof(v) * n) ? g_memset32_prev(
dst, v, n) : repsto(
dst, v, n);
68static inline void memset64(uint64_t*
dst, uint64_t v,
int n) {
69 return small(
sizeof(v) * n) ? g_memset64_prev(
dst, v, n) : repsto(
dst, v, n);
73 if (small(
sizeof(v) * n)) {
74 return g_rect_memset16_prev(
dst, v, n, rowBytes,
height);
76 for (
int stride = rowBytes /
sizeof(v);
height-- > 0;
dst += stride) {
81 if (small(
sizeof(v) * n)) {
82 return g_rect_memset32_prev(
dst, v, n, rowBytes,
height);
84 for (
int stride = rowBytes /
sizeof(v);
height-- > 0;
dst += stride) {
89 if (small(
sizeof(v) * n)) {
90 return g_rect_memset64_prev(
dst, v, n, rowBytes,
height);
92 for (
int stride = rowBytes /
sizeof(v);
height-- > 0;
dst += stride) {
103 #if (defined(__x86_64__) || defined(_M_X64)) && !defined(SK_ENABLE_OPTIMIZE_SIZE)
static void sk_msan_mark_initialized(const void *begin, const void *end, const char *skbug)
void(* rect_memset32)(uint32_t[], uint32_t, int, size_t, int)
void(* rect_memset16)(uint16_t[], uint16_t, int, size_t, int)
void(* memset64)(uint64_t[], uint64_t, int)
void(* memset16)(uint16_t[], uint16_t, int)
void(* memset32)(uint32_t[], uint32_t, int)
void(* rect_memset64)(uint64_t[], uint64_t, int, size_t, int)