19RelaxedAtomic<intptr_t> Zone::total_size_ = {0};
27 intptr_t
size()
const {
return size_; }
44 uword address(intptr_t n) {
return reinterpret_cast<uword>(
this) + n; }
46 DISALLOW_IMPLICIT_CONSTRUCTORS(Segment);
81 if (
size == kSegmentSize) {
90 bool executable =
false;
91 bool compressed =
false;
93 total_size_.fetch_add(
size);
102 memset(
reinterpret_cast<void*
>(
result), kZapUninitializedByte,
size);
107 result->alignment_ =
nullptr;
116 while (current !=
nullptr) {
123 memset(
reinterpret_cast<void*
>(current), kZapDeletedByte, current->
size());
127 if (
size == kSegmentSize) {
136 if (memory !=
nullptr) {
145 : position_(reinterpret_cast<
uword>(&buffer_)),
146 limit_(position_ + kInitialChunkSize),
153 memset(&buffer_, kZapUninitializedByte, kInitialChunkSize);
158 if (FLAG_trace_zones) {
172 memset(&buffer_, kZapDeletedByte, kInitialChunkSize);
174 position_ =
reinterpret_cast<uword>(&buffer_);
175 limit_ = position_ + kInitialChunkSize;
177 small_segment_capacity_ = 0;
187 uintptr_t
size = kInitialChunkSize;
188 for (
Segment*
s = segments_;
s !=
nullptr;
s =
s->next()) {
198 intptr_t
total_size = segment_size + scoped_handle_size + zone_handle_size;
200 ", zone_handles: %" Pd ", total: %" Pd ")\n",
201 this, segment_size, scoped_handle_size, zone_handle_size,
205uword Zone::AllocateExpand(intptr_t
size) {
207 if (FLAG_trace_zones) {
209 reinterpret_cast<intptr_t
>(
this));
215 intptr_t free_size = (limit_ - position_);
222 if (
size > max_size) {
223 return AllocateLargeSegment(
size);
226 const intptr_t kSuperPageSize = 2 *
MB;
228 if (small_segment_capacity_ < kSuperPageSize) {
231 next_size = kSegmentSize;
235 next_size =
Utils::RoundUp(small_segment_capacity_ >> 3, kSuperPageSize);
237 ASSERT(next_size >= kSegmentSize);
241 small_segment_capacity_ += next_size;
246 limit_ = segments_->
end();
248 ASSERT(position_ <= limit_);
252uword Zone::AllocateLargeSegment(intptr_t
size) {
257 intptr_t free_size = (limit_ - position_);
271 intptr_t
len = strlen(str) + 1;
279 for (intptr_t
i = 0;
i <
len;
i++) {
280 if (str[
i] ==
'\0') {
285 char*
copy = Alloc<char>(
len + 1);
292 intptr_t a_len = (
a ==
nullptr) ? 0 : strlen(
a);
293 const intptr_t b_len = strlen(
b) + 1;
294 const intptr_t
len = a_len + b_len;
297 strncpy(
copy,
a, a_len);
301 strncpy(&
copy[a_len],
b, b_len);
307 while (zone !=
nullptr) {
309 zone = zone->previous_;
326#if defined(DART_USE_ABSL)
332 if (FLAG_trace_zones) {
334 reinterpret_cast<intptr_t
>(
this),
335 reinterpret_cast<intptr_t
>(GetZone()));
342 Zone* lzone = GetZone();
343 lzone->Link(thread->
zone());
344 thread->set_zone(lzone);
352 Zone* lzone = GetZone();
353 ASSERT(thread()->zone() == lzone);
354 thread()->set_zone(lzone->previous_);
355 if (FLAG_trace_zones) {
357 reinterpret_cast<intptr_t
>(
this),
358 reinterpret_cast<intptr_t
>(lzone));
361#if defined(DART_USE_ABSL)
static float next(float f)
static size_t total_size(SkSBlockAllocator< N > &pool)
static void copy(void *dst, const uint8_t *src, int width, int bpp, int deltaSrc, int offset, const SkPMColor ctable[])
#define ASAN_UNPOISON(ptr, len)
intptr_t ZoneHandlesCapacityInBytes() const
intptr_t ScopedHandlesCapacityInBytes() const
static void static void PrintErr(const char *format,...) PRINTF_ATTRIBUTE(1
static char static char * VSCreate(Zone *zone, const char *format, va_list args)
T fetch_sub(T arg, std::memory_order order=std::memory_order_relaxed)
StackZone(ThreadState *thread)
static Thread * Current()
static constexpr T RoundUp(T x, uintptr_t alignment, uintptr_t offset=0)
static constexpr T RoundDown(T x, intptr_t alignment)
static constexpr bool IsAligned(T x, uintptr_t alignment, uintptr_t offset=0)
void VisitObjectPointers(ObjectPointerVisitor *visitor)
static intptr_t PageSize()
static VirtualMemory * Allocate(intptr_t size, bool is_executable, bool is_compressed, const char *name)
static void DeleteSegmentList(Segment *segment)
static Segment * New(intptr_t size, Segment *next)
VirtualMemory * memory() const
char * PrintToString(const char *format,...) PRINTF_ATTRIBUTE(2
uintptr_t CapacityInBytes() const
uintptr_t SizeInBytes() const
char * MakeCopyOfStringN(const char *str, intptr_t len)
static constexpr intptr_t kAlignment
char * MakeCopyOfString(const char *str)
char * ConcatStrings(const char *a, const char *b, char join=',')
void VisitObjectPointers(ObjectPointerVisitor *visitor)
char char * VPrint(const char *format, va_list args)
G_BEGIN_DECLS G_MODULE_EXPORT FlValue * args
uint32_t uint32_t * format
#define LSAN_UNREGISTER_ROOT_REGION(ptr, len)
#define LSAN_REGISTER_ROOT_REGION(ptr, len)
static intptr_t segment_cache_size
static VirtualMemory * segment_cache[kSegmentCacheCapacity]
static constexpr intptr_t kSegmentCacheCapacity
static Mutex * segment_cache_mutex
NOT_IN_PRODUCT(LibraryPtr ReloadTestScript(const char *script))
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not defaults to or::depending on whether ipv6 is specified vm service A custom Dart VM Service port The default is to pick a randomly available open port disable vm Disable the Dart VM Service The Dart VM Service is never available in release mode disable vm service Disable mDNS Dart VM Service publication Bind to the IPv6 localhost address for the Dart VM Service Ignored if vm service host is set endless trace buffer
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive keep the shell running after the Dart script has completed enable serial On low power devices with low core running concurrent GC tasks on threads can cause them to contend with the UI thread which could potentially lead to jank This option turns off all concurrent GC activities domain network JSON encoded network policy per domain This overrides the DisallowInsecureConnections switch Embedder can specify whether to allow or disallow insecure connections at a domain level old gen heap size
static SkString join(const CommandLineFlags::StringArray &)