Flutter Engine
The Flutter Engine
freelist_test.cc
Go to the documentation of this file.
1// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#include <memory>
6
7#include "platform/assert.h"
8#include "vm/heap/freelist.h"
10#include "vm/unit_test.h"
11
12namespace dart {
13
14static uword Allocate(FreeList* free_list, intptr_t size, bool is_protected) {
15 uword result = free_list->TryAllocate(size, is_protected);
16 if ((result != 0u) && is_protected) {
17 VirtualMemory::Protect(reinterpret_cast<void*>(result), size,
19 }
20 return result;
21}
22
23static void Free(FreeList* free_list,
24 uword address,
25 intptr_t size,
26 bool is_protected) {
27 if (is_protected) {
28 VirtualMemory::Protect(reinterpret_cast<void*>(address), size,
30 }
31 free_list->Free(address, size);
32 if (is_protected) {
33 VirtualMemory::Protect(reinterpret_cast<void*>(address), size,
35 }
36}
37
39 FreeList* free_list,
40 bool is_protected) {
41 const intptr_t kSmallObjectSize = 4 * kWordSize;
42 const intptr_t kMediumObjectSize = 16 * kWordSize;
43 const intptr_t kLargeObjectSize = 8 * KB;
44 uword blob = region->start();
45 // Enqueue the large blob as one free block.
46 free_list->Free(blob, region->size());
47
48 if (is_protected) {
49 // Write protect the whole region.
51 }
52
53 // Allocate a small object. Expect it to be positioned as the first element.
54 uword small_object = Allocate(free_list, kSmallObjectSize, is_protected);
55 EXPECT_EQ(blob, small_object);
56 // Freeing and allocating should give us the same memory back.
57 Free(free_list, small_object, kSmallObjectSize, is_protected);
58 small_object = Allocate(free_list, kSmallObjectSize, is_protected);
59 EXPECT_EQ(blob, small_object);
60 // Splitting the remainder further with small and medium objects.
61 uword small_object2 = Allocate(free_list, kSmallObjectSize, is_protected);
62 EXPECT_EQ(blob + kSmallObjectSize, small_object2);
63 uword med_object = Allocate(free_list, kMediumObjectSize, is_protected);
64 EXPECT_EQ(small_object2 + kSmallObjectSize, med_object);
65 // Allocate a large object.
66 uword large_object = Allocate(free_list, kLargeObjectSize, is_protected);
67 EXPECT_EQ(med_object + kMediumObjectSize, large_object);
68 // Make sure that small objects can still split the remainder.
69 uword small_object3 = Allocate(free_list, kSmallObjectSize, is_protected);
70 EXPECT_EQ(large_object + kLargeObjectSize, small_object3);
71 // Split the large object.
72 Free(free_list, large_object, kLargeObjectSize, is_protected);
73 uword small_object4 = Allocate(free_list, kSmallObjectSize, is_protected);
74 EXPECT_EQ(large_object, small_object4);
75 // Get the full remainder of the large object.
76 large_object =
77 Allocate(free_list, kLargeObjectSize - kSmallObjectSize, is_protected);
78 EXPECT_EQ(small_object4 + kSmallObjectSize, large_object);
79 // Get another large object from the large unallocated remainder.
80 uword large_object2 = Allocate(free_list, kLargeObjectSize, is_protected);
81 EXPECT_EQ(small_object3 + kSmallObjectSize, large_object2);
82}
83
85 FreeList* free_list = new FreeList();
86 const intptr_t kBlobSize = 1 * MB;
88 kBlobSize, /* is_executable */ false, /* is_compressed */ false, "test");
89
90 TestFreeList(region, free_list, false);
91
92 // Delete the memory associated with the test.
93 delete region;
94 delete free_list;
95}
96
97TEST_CASE(FreeListProtected) {
98 FreeList* free_list = new FreeList();
99 const intptr_t kBlobSize = 1 * MB;
101 kBlobSize, /*is_executable*/ false, /*is_compressed*/ false, "test");
102
103 TestFreeList(region, free_list, true);
104
105 // Delete the memory associated with the test.
106 delete region;
107 delete free_list;
108}
109
110TEST_CASE(FreeListProtectedTinyObjects) {
111 FreeList* free_list = new FreeList();
112 const intptr_t kBlobSize = 1 * MB;
113 const intptr_t kObjectSize = 2 * kWordSize;
114 uword* objects = new uword[kBlobSize / kObjectSize];
115
117 kBlobSize, /*is_executable*/ false, /*is_compressed*/ false, "test");
118 ASSERT(Utils::IsAligned(blob->start(), 4096));
120
121 // Enqueue the large blob as one free block.
122 free_list->Free(blob->start(), blob->size());
123
124 // Write protect the whole region.
126
127 // Allocate small objects.
128 for (intptr_t i = 0; i < blob->size() / kObjectSize; i++) {
129 objects[i] = Allocate(free_list, kObjectSize, true); // is_protected
130 }
131
132 // All space is occupied. Expect failed allocation.
133 ASSERT(Allocate(free_list, kObjectSize, true) == 0);
134
135 // Free all objects again. Make the whole region writable for this.
137 for (intptr_t i = 0; i < blob->size() / kObjectSize; i++) {
138 free_list->Free(objects[i], kObjectSize);
139 }
140
141 // Delete the memory associated with the test.
142 delete blob;
143 delete free_list;
144 delete[] objects;
145}
146
147TEST_CASE(FreeListProtectedVariableSizeObjects) {
148 FreeList* free_list = new FreeList();
149 const intptr_t kBlobSize = 16 * KB;
150 ASSERT(kBlobSize >= VirtualMemory::PageSize());
151 const intptr_t kMinSize = 2 * kWordSize;
152 uword* objects = new uword[kBlobSize / kMinSize];
153 for (intptr_t i = 0; i < kBlobSize / kMinSize; ++i) {
154 objects[i] = 0;
155 }
156
158 kBlobSize, /*is_executable*/ false, /*is_compressed*/ false, "test");
160
161 // Enqueue the large blob as one free block.
162 free_list->Free(blob->start(), blob->size());
163
164 // Write protect the whole region.
166
167 // Allocate and free objects so that free list has > 1 elements.
168 uword e0 = Allocate(free_list, 2 * KB, true);
169 ASSERT(e0);
170 uword e1 = Allocate(free_list, 6 * KB, true);
171 ASSERT(e1);
172 uword e2 = Allocate(free_list, 4 * KB, true);
173 ASSERT(e2);
174 uword e3 = Allocate(free_list, 4 * KB, true);
175 ASSERT(e3);
176
177 Free(free_list, e1, 6 * KB, true);
178 Free(free_list, e2, 4 * KB, true);
179 e0 = Allocate(free_list, 6 * KB - 2 * kWordSize, true);
180 ASSERT(e0);
181
182 // Delete the memory associated with the test.
183 delete blob;
184 delete free_list;
185 delete[] objects;
186}
187
188static void TestRegress38528(intptr_t header_overlap) {
189 // Test the following scenario.
190 //
191 // | <------------ free list element -----------------> |
192 // | <allocated code> | <header> | <remainder - header> | <other code> |
193 // ^
194 // page boundary around here, depending on header_overlap
195 //
196 // It is important that after the allocation has been re-protected, the
197 // "<other code>" region is also still executable (and not writable).
198 std::unique_ptr<FreeList> free_list(new FreeList());
200 std::unique_ptr<VirtualMemory> blob(VirtualMemory::Allocate(
201 2 * page,
202 /*is_executable=*/true, /*is_compressed=*/false, "test"));
203 const intptr_t remainder_size = page / 2;
204 const intptr_t alloc_size = page - header_overlap * kObjectAlignment;
205 void* const other_code =
206 reinterpret_cast<void*>(blob->start() + alloc_size + remainder_size);
207
208 // Load a simple function into the "other code" section which just returns.
209 // This is used to ensure that it's still executable.
210#if defined(HOST_ARCH_X64) || defined(HOST_ARCH_IA32)
211 const uint8_t ret[1] = {0xC3}; // ret
212#elif defined(HOST_ARCH_ARM)
213 const uint8_t ret[4] = {0x1e, 0xff, 0x2f, 0xe1}; // bx lr
214#elif defined(HOST_ARCH_ARM64)
215 const uint8_t ret[4] = {0xc0, 0x03, 0x5f, 0xd6}; // ret
216#elif defined(HOST_ARCH_RISCV32) || defined(HOST_ARCH_RISCV64)
217 const uint8_t ret[2] = {0x82, 0x80}; // c.ret
218#else
219#error "Unknown architecture."
220#endif
221 memcpy(other_code, ret, sizeof(ret)); // NOLINT
222
223 free_list->Free(blob->start(), alloc_size + remainder_size);
224 blob->Protect(VirtualMemory::kReadExecute); // not writable
225 Allocate(free_list.get(), alloc_size, /*protected=*/true);
226 VirtualMemory::Protect(blob->address(), alloc_size,
228 reinterpret_cast<void (*)()>(other_code)();
229}
230
231TEST_CASE(Regress38528) {
232 for (const intptr_t i : {-2, -1, 0, 1, 2}) {
234 }
235}
236
237} // namespace dart
float e1
float e0
uword TryAllocate(intptr_t size, bool is_protected)
Definition: freelist.cc:82
void Free(uword addr, intptr_t size)
Definition: freelist.cc:193
static constexpr bool IsAligned(T x, uintptr_t alignment, uintptr_t offset=0)
Definition: utils.h:92
static void Protect(void *address, intptr_t size, Protection mode)
static intptr_t PageSize()
intptr_t size() const
static VirtualMemory * Allocate(intptr_t size, bool is_executable, bool is_compressed, const char *name)
uword start() const
#define ASSERT(E)
GAsyncResult * result
ClipOpAndAA opAA SkRegion region
Definition: SkRecords.h:238
Definition: dart_vm.cc:33
constexpr intptr_t MB
Definition: globals.h:530
static void TestFreeList(VirtualMemory *region, FreeList *free_list, bool is_protected)
static void * Allocate(uword size, Zone *zone)
Definition: allocation.cc:14
constexpr intptr_t KB
Definition: globals.h:528
uintptr_t uword
Definition: globals.h:501
static void Free(FreeList *free_list, uword address, intptr_t size, bool is_protected)
TEST_CASE(DirectoryCurrent)
constexpr intptr_t kWordSize
Definition: globals.h:509
static constexpr intptr_t kObjectAlignment
static void TestRegress38528(intptr_t header_overlap)
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive keep the shell running after the Dart script has completed enable serial On low power devices with low core running concurrent GC tasks on threads can cause them to contend with the UI thread which could potentially lead to jank This option turns off all concurrent GC activities domain network JSON encoded network policy per domain This overrides the DisallowInsecureConnections switch Embedder can specify whether to allow or disallow insecure connections at a domain level old gen heap size
Definition: switches.h:259