18 SkDEBUGF(
"Convert error: Invalid utf16 input");
36 SkDEBUGF(
"Convert error: Invalid utf8 input");
37 return std::u16string();
44 return std::u16string((
char16_t *)utf16.
data(), utf16Units);
SkDEBUGCODE(SK_SPI) SkThreadID SkGetThreadID()
static std::u16string convertUtf8ToUtf16(const char *utf8, int utf8Units)
static bool hasSoftLineBreakFlag(SkUnicode::CodeUnitFlags flags)
static bool hasTabulationFlag(SkUnicode::CodeUnitFlags flags)
static bool hasGraphemeStartFlag(SkUnicode::CodeUnitFlags flags)
static bool hasControlFlag(SkUnicode::CodeUnitFlags flags)
static bool hasPartOfWhiteSpaceBreakFlag(SkUnicode::CodeUnitFlags flags)
static bool hasHardLineBreakFlag(SkUnicode::CodeUnitFlags flags)
static SkString convertUtf16ToUtf8(const char16_t *utf16, int utf16Units)
FlutterSemanticsFlag flags
SK_SPI int UTF8ToUTF16(uint16_t dst[], int dstCapacity, const char src[], size_t srcByteLength)
SK_SPI int UTF16ToUTF8(char dst[], int dstCapacity, const uint16_t src[], size_t srcLength)