Flutter Engine
The Flutter Engine
Loading...
Searching...
No Matches
virtual_memory_win.cc
Go to the documentation of this file.
1// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#include "vm/globals.h"
6#if defined(DART_HOST_OS_WINDOWS)
7
8#include "vm/virtual_memory.h"
9
10#include "platform/assert.h"
11#include "vm/isolate.h"
12#include "vm/os.h"
14
15namespace dart {
16
17DECLARE_FLAG(bool, write_protect_code);
18
19uword VirtualMemory::page_size_ = 0;
20VirtualMemory* VirtualMemory::compressed_heap_ = nullptr;
21
22intptr_t VirtualMemory::CalculatePageSize() {
23 SYSTEM_INFO info;
24 GetSystemInfo(&info);
25 const intptr_t page_size = info.dwPageSize;
26 ASSERT(page_size != 0);
27 ASSERT(Utils::IsPowerOfTwo(page_size));
28 return page_size;
29}
30
31static void* AllocateAlignedImpl(intptr_t size,
32 intptr_t alignment,
33 intptr_t reserved_size,
34 int prot,
35 void** out_reserved_address) {
36 void* address = VirtualAlloc(nullptr, reserved_size, MEM_RESERVE, prot);
37 if (address == nullptr) {
38 return nullptr;
39 }
40
41 void* aligned_address = reinterpret_cast<void*>(
42 Utils::RoundUp(reinterpret_cast<uword>(address), alignment));
43 if (VirtualAlloc(aligned_address, size, MEM_COMMIT, prot) !=
44 aligned_address) {
45 VirtualFree(address, reserved_size, MEM_RELEASE);
46 return nullptr;
47 }
48
49 if (out_reserved_address != nullptr) {
50 *out_reserved_address = address;
51 }
52 return aligned_address;
53}
54
56 if (FLAG_old_gen_heap_size < 0 || FLAG_old_gen_heap_size > kMaxAddrSpaceMB) {
58 "warning: value specified for --old_gen_heap_size %d is larger than"
59 " the physically addressable range, using 0(unlimited) instead.`\n",
60 FLAG_old_gen_heap_size);
61 FLAG_old_gen_heap_size = 0;
62 }
63 if (FLAG_new_gen_semi_max_size < 0 ||
64 FLAG_new_gen_semi_max_size > kMaxAddrSpaceMB) {
66 "warning: value specified for --new_gen_semi_max_size %d is larger"
67 " than the physically addressable range, using %" Pd " instead.`\n",
68 FLAG_new_gen_semi_max_size, kDefaultNewGenSemiMaxSize);
69 FLAG_new_gen_semi_max_size = kDefaultNewGenSemiMaxSize;
70 }
71 page_size_ = CalculatePageSize();
72#if defined(DART_COMPRESSED_POINTERS)
73 ASSERT(compressed_heap_ == nullptr);
74 compressed_heap_ = Reserve(kCompressedHeapSize, kCompressedHeapAlignment);
75 if (compressed_heap_ == nullptr) {
76 int error = GetLastError();
77 FATAL("Failed to reserve region for compressed heap: %d", error);
78 }
79 VirtualMemoryCompressedHeap::Init(compressed_heap_->address(),
80 compressed_heap_->size());
81#endif // defined(DART_COMPRESSED_POINTERS)
82}
83
85#if defined(DART_COMPRESSED_POINTERS)
86 delete compressed_heap_;
87#endif // defined(DART_COMPRESSED_POINTERS)
88 page_size_ = 0;
89#if defined(DART_COMPRESSED_POINTERS)
90 compressed_heap_ = nullptr;
91 VirtualMemoryCompressedHeap::Cleanup();
92#endif // defined(DART_COMPRESSED_POINTERS)
93}
94
95VirtualMemory* VirtualMemory::AllocateAligned(intptr_t size,
96 intptr_t alignment,
97 bool is_executable,
98 bool is_compressed,
99 const char* name) {
100 // When FLAG_write_protect_code is active, code memory (indicated by
101 // is_executable = true) is allocated as non-executable and later
102 // changed to executable via VirtualMemory::Protect.
104 ASSERT(Utils::IsPowerOfTwo(alignment));
105 ASSERT(Utils::IsAligned(alignment, PageSize()));
106
107#if defined(DART_COMPRESSED_POINTERS)
108 if (is_compressed) {
109 RELEASE_ASSERT(!is_executable);
110 MemoryRegion region =
111 VirtualMemoryCompressedHeap::Allocate(size, alignment);
112 if (region.pointer() == nullptr) {
113 return nullptr;
114 }
115 Commit(region.pointer(), region.size());
116 return new VirtualMemory(region, region);
117 }
118#endif // defined(DART_COMPRESSED_POINTERS)
119
120 intptr_t reserved_size = size + alignment - PageSize();
121 int prot = (is_executable && !FLAG_write_protect_code)
122 ? PAGE_EXECUTE_READWRITE
123 : PAGE_READWRITE;
124
125 void* reserved_address;
126 void* aligned_address = AllocateAlignedImpl(size, alignment, reserved_size,
127 prot, &reserved_address);
128 if (aligned_address == nullptr) {
129 return nullptr;
130 }
131
132 MemoryRegion region(aligned_address, size);
133 MemoryRegion reserved(reserved_address, reserved_size);
134 return new VirtualMemory(region, reserved);
135}
136
137VirtualMemory* VirtualMemory::Reserve(intptr_t size, intptr_t alignment) {
139 ASSERT(Utils::IsPowerOfTwo(alignment));
140 ASSERT(Utils::IsAligned(alignment, PageSize()));
141 intptr_t reserved_size = size + alignment - PageSize();
142 void* reserved_address =
143 VirtualAlloc(nullptr, reserved_size, MEM_RESERVE, PAGE_NOACCESS);
144 if (reserved_address == nullptr) {
145 return nullptr;
146 }
147
148 void* aligned_address = reinterpret_cast<void*>(
149 Utils::RoundUp(reinterpret_cast<uword>(reserved_address), alignment));
150 MemoryRegion region(aligned_address, size);
151 MemoryRegion reserved(reserved_address, reserved_size);
152 return new VirtualMemory(region, reserved);
153}
154
155void VirtualMemory::Commit(void* address, intptr_t size) {
158 void* result = VirtualAlloc(address, size, MEM_COMMIT, PAGE_READWRITE);
159 if (result == nullptr) {
160 int error = GetLastError();
161 FATAL("Failed to commit: %d\n", error);
162 }
163}
164
165void VirtualMemory::Decommit(void* address, intptr_t size) {
168 bool result = VirtualFree(address, size, MEM_DECOMMIT);
169 if (!result) {
170 int error = GetLastError();
171 FATAL("Failed to decommit: %d\n", error);
172 }
173}
174
176 // Note that the size of the reserved region might be set to 0 by
177 // Truncate(0, true) but that does not actually release the mapping
178 // itself. The only way to release the mapping is to invoke VirtualFree
179 // with original base pointer and MEM_RELEASE.
180#if defined(DART_COMPRESSED_POINTERS)
181 if (VirtualMemoryCompressedHeap::Contains(reserved_.pointer()) &&
182 (this != compressed_heap_)) {
183 Decommit(reserved_.pointer(), reserved_.size());
184 VirtualMemoryCompressedHeap::Free(reserved_.pointer(), reserved_.size());
185 return;
186 }
187#endif // defined(DART_COMPRESSED_POINTERS)
188 if (!vm_owns_region()) {
189 return;
190 }
191 if (VirtualFree(reserved_.pointer(), 0, MEM_RELEASE) == 0) {
192 FATAL("VirtualFree failed: Error code %d\n", GetLastError());
193 }
194}
195
196bool VirtualMemory::FreeSubSegment(void* address, intptr_t size) {
197#if defined(DART_COMPRESSED_POINTERS)
198 // Don't free the sub segment if it's managed by the compressed pointer heap.
199 if (VirtualMemoryCompressedHeap::Contains(address)) {
200 return false;
201 }
202#endif // defined(DART_COMPRESSED_POINTERS)
203 if (VirtualFree(address, size, MEM_DECOMMIT) == 0) {
204 FATAL("VirtualFree failed: Error code %d\n", GetLastError());
205 }
206 return true;
207}
208
209void VirtualMemory::Protect(void* address, intptr_t size, Protection mode) {
210#if defined(DEBUG)
211 Thread* thread = Thread::Current();
212 ASSERT(thread == nullptr || thread->IsDartMutatorThread() ||
213 thread->isolate() == nullptr ||
214 thread->isolate()->mutator_thread()->IsAtSafepoint());
215#endif
216 uword start_address = reinterpret_cast<uword>(address);
217 uword end_address = start_address + size;
218 uword page_address = Utils::RoundDown(start_address, PageSize());
219 DWORD prot = 0;
220 switch (mode) {
221 case kNoAccess:
222 prot = PAGE_NOACCESS;
223 break;
224 case kReadOnly:
225 prot = PAGE_READONLY;
226 break;
227 case kReadWrite:
228 prot = PAGE_READWRITE;
229 break;
230 case kReadExecute:
231 prot = PAGE_EXECUTE_READ;
232 break;
234 prot = PAGE_EXECUTE_READWRITE;
235 break;
236 }
237 DWORD old_prot = 0;
238 if (VirtualProtect(reinterpret_cast<void*>(page_address),
239 end_address - page_address, prot, &old_prot) == 0) {
240 FATAL("VirtualProtect failed %d\n", GetLastError());
241 }
242}
243
244void VirtualMemory::DontNeed(void* address, intptr_t size) {}
245
246} // namespace dart
247
248#endif // defined(DART_HOST_OS_WINDOWS)
static void info(const char *fmt,...) SK_PRINTF_LIKE(1
Definition DM.cpp:213
#define RELEASE_ASSERT(cond)
Definition assert.h:327
void * pointer() const
uword size() const
static void static void PrintErr(const char *format,...) PRINTF_ATTRIBUTE(1
static Thread * Current()
Definition thread.h:361
static constexpr T RoundUp(T x, uintptr_t alignment, uintptr_t offset=0)
Definition utils.h:105
static constexpr T RoundDown(T x, intptr_t alignment)
Definition utils.h:93
static constexpr bool IsAligned(T x, uintptr_t alignment, uintptr_t offset=0)
Definition utils.h:77
static constexpr bool IsPowerOfTwo(T x)
Definition utils.h:61
static void Init()
static VirtualMemory * AllocateAligned(intptr_t size, intptr_t alignment, bool is_executable, bool is_compressed, const char *name)
static void Protect(void *address, intptr_t size, Protection mode)
static intptr_t PageSize()
bool vm_owns_region() const
intptr_t size() const
static void DontNeed(void *address, intptr_t size)
static void Cleanup()
void * address() const
#define ASSERT(E)
#define FATAL(error)
const uint8_t uint32_t uint32_t GError ** error
GAsyncResult * result
#define DECLARE_FLAG(type, name)
Definition flags.h:14
ClipOpAndAA opAA SkRegion region
Definition SkRecords.h:238
const char *const name
const intptr_t kDefaultNewGenSemiMaxSize
Definition globals.h:63
uintptr_t uword
Definition globals.h:501
const intptr_t kMaxAddrSpaceMB
Definition globals.h:49
#define Pd
Definition globals.h:408
WINBASEAPI _Check_return_ _Post_equals_last_error_ DWORD WINAPI GetLastError(VOID)
unsigned long DWORD