Flutter Engine
The Flutter Engine
relocation_test.cc
Go to the documentation of this file.
1// Copyright (c) 2021, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#include "platform/assert.h"
6
7#include "vm/allocation.h"
8#include "vm/code_patcher.h"
11#include "vm/instructions.h"
12#include "vm/longjump.h"
13#include "vm/unit_test.h"
14
15#define __ assembler->
16
17namespace dart {
18
19#if defined(DART_PRECOMPILER) && !defined(TARGET_ARCH_IA32)
20
21DECLARE_FLAG(int, lower_pc_relative_call_distance);
22DECLARE_FLAG(int, upper_pc_relative_call_distance);
23
24struct RelocatorTestHelper {
25 const intptr_t kTrampolineSize =
28
29 // The callers on arm/arm64 have to save LR before calling, so the call
30 // instruction will be 4 byte sinto the instruction stream.
31#if defined(TARGET_ARCH_ARM64)
32 static constexpr intptr_t kOffsetOfCall = 4;
33#elif defined(TARGET_ARCH_ARM)
34 static constexpr intptr_t kOffsetOfCall = 4;
35#elif defined(TARGET_ARCH_RISCV32)
36 static constexpr intptr_t kOffsetOfCall = 4;
37#elif defined(TARGET_ARCH_RISCV64)
38 static constexpr intptr_t kOffsetOfCall = 4;
39#else
40 static constexpr intptr_t kOffsetOfCall = 0;
41#endif
42
43 explicit RelocatorTestHelper(Thread* thread)
44 : thread(thread),
45 locker(thread, thread->isolate_group()->program_lock()),
46 safepoint_and_growth_scope(thread, SafepointLevel::kGC) {
47 // So the relocator uses the correct instruction size layout.
48 FLAG_precompiled_mode = true;
49
50 FLAG_lower_pc_relative_call_distance = -128;
51 FLAG_upper_pc_relative_call_distance = 128;
52 }
53 ~RelocatorTestHelper() { FLAG_precompiled_mode = false; }
54
55 void CreateInstructions(std::initializer_list<intptr_t> sizes) {
56 for (auto size : sizes) {
57 codes.Add(&Code::Handle(AllocationInstruction(size)));
58 }
59 }
60
61 CodePtr AllocationInstruction(uintptr_t size) {
62 const auto& instructions = Instructions::Handle(Instructions::New(
63 size, /*has_monomorphic=*/false, /*should_be_aligned=*/false));
64
65 uword addr = instructions.PayloadStart();
66 for (uintptr_t i = 0; i < (size / 4); ++i) {
67 *reinterpret_cast<uint32_t*>(addr + 4 * i) =
68 static_cast<uint32_t>(kBreakInstructionFiller);
69 }
70
71 const auto& code = Code::Handle(Code::New(0));
72 code.SetActiveInstructions(instructions, 0);
73 code.set_instructions(instructions);
74 return code.ptr();
75 }
76
77 void EmitPcRelativeCallFunction(intptr_t idx, intptr_t to_idx) {
78 const Code& code = *codes[idx];
79 const Code& target = *codes[to_idx];
80
81 EmitCodeFor(code, [&](compiler::Assembler* assembler) {
82#if defined(TARGET_ARCH_ARM64)
83 SPILLS_RETURN_ADDRESS_FROM_LR_TO_REGISTER(
84 __ stp(LR, R1,
85 compiler::Address(CSP, -2 * kWordSize,
87#elif defined(TARGET_ARCH_ARM)
88 SPILLS_RETURN_ADDRESS_FROM_LR_TO_REGISTER(__ PushList((1 << LR)));
89#elif defined(TARGET_ARCH_RISCV32) || defined(TARGET_ARCH_RISCV64)
90 __ PushRegister(RA);
91#endif
92 __ GenerateUnRelocatedPcRelativeCall();
93 AddPcRelativeCallTargetAt(__ CodeSize(), code, target);
94#if defined(TARGET_ARCH_ARM64)
95 RESTORES_RETURN_ADDRESS_FROM_REGISTER_TO_LR(
96 __ ldp(LR, R1,
97 compiler::Address(CSP, 2 * kWordSize,
99#elif defined(TARGET_ARCH_ARM)
100 RESTORES_RETURN_ADDRESS_FROM_REGISTER_TO_LR(__ PopList((1 << LR)));
101#elif defined(TARGET_ARCH_RISCV32) || defined(TARGET_ARCH_RISCV64)
102 __ PopRegister(RA);
103#endif
104 __ Ret();
105 });
106 }
107
108 void EmitReturn42Function(intptr_t idx) {
109 const Code& code = *codes[idx];
110 EmitCodeFor(code, [&](compiler::Assembler* assembler) {
111#if defined(TARGET_ARCH_X64)
112 __ LoadImmediate(RAX, 42);
113#elif defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_ARM64)
114 __ LoadImmediate(R0, 42);
115#elif defined(TARGET_ARCH_RISCV32) || defined(TARGET_ARCH_RISCV64)
116 __ LoadImmediate(A0, 42);
117#endif
118 __ Ret();
119 });
120 }
121
122 void EmitCodeFor(const Code& code,
123 std::function<void(compiler::Assembler* assembler)> fun) {
124 const auto& inst = Instructions::Handle(code.instructions());
125
126 compiler::Assembler assembler(nullptr);
127 fun(&assembler);
128
129 const uword addr = inst.PayloadStart();
130 memmove(reinterpret_cast<void*>(addr),
131 reinterpret_cast<void*>(assembler.CodeAddress(0)),
132 assembler.CodeSize());
133
134 if (FLAG_disassemble) {
135 OS::PrintErr("Disassemble:\n");
136 code.Disassemble();
137 }
138 }
139
140 void AddPcRelativeCallTargetAt(intptr_t offset,
141 const Code& code,
142 const Code& target) {
143 const auto& kind_and_offset = Smi::Handle(
147 AddCall(code, target, kind_and_offset);
148 }
149
150 void AddCall(const Code& code,
151 const Code& target,
152 const Smi& kind_and_offset) {
153 auto& call_targets = Array::Handle(code.static_calls_target_table());
154 if (call_targets.IsNull()) {
156 } else {
157 call_targets = Array::Grow(
158 call_targets, call_targets.Length() + Code::kSCallTableEntryLength);
159 }
160
161 StaticCallsTable table(call_targets);
162 auto entry = table[table.Length() - 1];
163 entry.Set<Code::kSCallTableKindAndOffset>(kind_and_offset);
167 code.set_static_calls_target_table(call_targets);
168 }
169
170 void BuildImageAndRunTest(
171 std::function<void(const GrowableArray<ImageWriterCommand>&, uword*)>
172 fun) {
173 auto& image = Instructions::Handle();
174 uword entrypoint = 0;
175 {
176 GrowableArray<CodePtr> raw_codes;
177 for (auto code : codes) {
178 raw_codes.Add(code->ptr());
179 }
180
181 GrowableArray<ImageWriterCommand> commands;
182 CodeRelocator::Relocate(thread, &raw_codes, &commands,
183 /*is_vm_isolate=*/false);
184
185 uword expected_offset = 0;
186 fun(commands, &expected_offset);
187
188 image = BuildImage(&commands);
189 entrypoint = image.EntryPoint() + expected_offset;
190
191 for (intptr_t i = 0; i < commands.length(); ++i) {
193 delete[] commands[i].insert_trampoline_bytes.buffer;
194 commands[i].insert_trampoline_bytes.buffer = nullptr;
195 }
196 }
197 }
198 typedef intptr_t (*Fun)() DART_UNUSED;
199#if defined(TARGET_ARCH_X64)
200 EXPECT_EQ(42, reinterpret_cast<Fun>(entrypoint)());
201#elif defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_RISCV32)
202 EXPECT_EQ(42, EXECUTE_TEST_CODE_INT32(Fun, entrypoint));
203#elif defined(TARGET_ARCH_ARM64) || defined(TARGET_ARCH_RISCV64)
204 EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Fun, entrypoint));
205#endif
206 }
207
208 InstructionsPtr BuildImage(GrowableArray<ImageWriterCommand>* commands) {
209 intptr_t size = 0;
210 for (intptr_t i = 0; i < commands->length(); ++i) {
211 switch ((*commands)[i].op) {
213 size += (*commands)[i].insert_trampoline_bytes.buffer_length;
214 break;
216 size += (*commands)[i].insert_padding.padding_length;
217 break;
220 (*commands)[i].insert_instruction_of_code.code));
221 break;
222 }
223 }
224
225 auto& instructions = Instructions::Handle(Instructions::New(
226 size, /*has_monomorphic=*/false, /*should_be_aligned=*/false));
227 {
228 uword addr = instructions.PayloadStart();
229 for (intptr_t i = 0; i < commands->length(); ++i) {
230 switch ((*commands)[i].op) {
232 const auto entry = (*commands)[i].insert_trampoline_bytes;
233 const auto current_size = entry.buffer_length;
234 ASSERT(addr + current_size <= instructions.PayloadStart() + size);
235 memmove(reinterpret_cast<void*>(addr), entry.buffer, current_size);
236 addr += current_size;
237 break;
238 }
240 const auto entry = (*commands)[i].insert_padding;
241 const auto current_size = entry.padding_length;
242 ASSERT(addr + current_size <= instructions.PayloadStart() + size);
243 memset(reinterpret_cast<void*>(addr), 0, current_size);
244 addr += current_size;
245 break;
246 }
248 const auto entry = (*commands)[i].insert_instruction_of_code;
249 const auto current_size =
251 ASSERT(addr + current_size <= instructions.PayloadStart() + size);
252 memmove(reinterpret_cast<void*>(addr),
253 reinterpret_cast<void*>(Instructions::PayloadStart(
254 Code::InstructionsOf(entry.code))),
255 current_size);
256 addr += current_size;
257 break;
258 }
259 }
260 }
261
262 if (FLAG_write_protect_code) {
263 const uword address = UntaggedObject::ToAddr(instructions.ptr());
264 const auto size = instructions.ptr()->untag()->HeapSize();
265 VirtualMemory::Protect(reinterpret_cast<void*>(address), size,
267 }
268 CPU::FlushICache(instructions.PayloadStart(), instructions.Size());
269 }
270 return instructions.ptr();
271 }
272
273 Thread* thread;
274 SafepointWriteRwLocker locker;
275 ForceGrowthSafepointOperationScope safepoint_and_growth_scope;
276 GrowableArray<const Code*> codes;
277};
278
279ISOLATE_UNIT_TEST_CASE(CodeRelocator_DirectForwardCall) {
280 RelocatorTestHelper helper(thread);
281 const intptr_t fmax = FLAG_upper_pc_relative_call_distance;
282
283 // The gap is 8 bytes smaller than what could be directly forward-called,
284 // because the relocator's decision when to insert a trampoline is purely
285 // based on whether unresolved calls can reach such a trampoline if the next
286 // instruction is emitted (not taking into account that the next instruction
287 // might actually make some of those unresolved calls resolved).
288 helper.CreateInstructions({
289 20, // caller (call instruction @helper.kOffsetOfCall)
290 fmax - (20 - helper.kOffsetOfCall) - 8, // 8 bytes less than maximum gap
291 8 // forward call target
292 });
293 helper.EmitPcRelativeCallFunction(0, 2);
294 helper.EmitReturn42Function(2);
295 helper.BuildImageAndRunTest(
296 [&](const GrowableArray<ImageWriterCommand>& commands,
297 uword* entry_point) {
298 EXPECT_EQ(3, commands.length());
299
300 // This makes an in-range forward call.
303 // This is is the target of the forwards call.
305
306 *entry_point = commands[0].expected_offset;
307 });
308}
309
310ISOLATE_UNIT_TEST_CASE(CodeRelocator_OutOfRangeForwardCall) {
311 RelocatorTestHelper helper(thread);
312 const intptr_t fmax = FLAG_upper_pc_relative_call_distance;
313
314 helper.CreateInstructions({
315 20, // caller (call instruction @helper.kOffsetOfCall)
316 fmax - (20 - helper.kOffsetOfCall) + 4, // 4 bytes above maximum gap
317 8 // forwards call target
318 });
319 helper.EmitPcRelativeCallFunction(0, 2);
320 helper.EmitReturn42Function(2);
321 helper.BuildImageAndRunTest([&](const GrowableArray<ImageWriterCommand>&
322 commands,
323 uword* entry_point) {
324 EXPECT_EQ(4, commands.length());
325
326 // This makes an out-of-range forward call.
328 // This is the last change the relocator thinks it can ensure the
329 // out-of-range call above can call a trampoline - so it injets it here and
330 // no later.
333 // This is the target of the forwards call.
335
336 *entry_point = commands[0].expected_offset;
337 });
338}
339
340ISOLATE_UNIT_TEST_CASE(CodeRelocator_DirectBackwardCall) {
341 RelocatorTestHelper helper(thread);
342 const intptr_t bmax = -FLAG_lower_pc_relative_call_distance;
343
344 helper.CreateInstructions({
345 8, // backwards call target
346 bmax - 8 - helper.kOffsetOfCall, // maximize out backwards call range
347 20 // caller (call instruction @helper.kOffsetOfCall)
348 });
349 helper.EmitReturn42Function(0);
350 helper.EmitPcRelativeCallFunction(2, 0);
351 helper.BuildImageAndRunTest(
352 [&](const GrowableArray<ImageWriterCommand>& commands,
353 uword* entry_point) {
354 EXPECT_EQ(3, commands.length());
355
356 // This is the backwards call target.
359 // This makes an in-range backwards call.
361
362 *entry_point = commands[2].expected_offset;
363 });
364}
365
366ISOLATE_UNIT_TEST_CASE(CodeRelocator_OutOfRangeBackwardCall) {
367 RelocatorTestHelper helper(thread);
368 const intptr_t bmax = -FLAG_lower_pc_relative_call_distance;
369 const intptr_t fmax = FLAG_upper_pc_relative_call_distance;
370
371 helper.CreateInstructions({
372 8, // backward call target
373 bmax - 8 - helper.kOffsetOfCall + 4, // 4 bytes exceeding backwards range
374 20, // caller (call instruction @helper.kOffsetOfCall)
375 fmax - (20 - helper.kOffsetOfCall) -
376 4, // 4 bytes less than forward range
377 4,
378 4, // out-of-range, so trampoline has to be inserted before this
379 });
380 helper.EmitReturn42Function(0);
381 helper.EmitPcRelativeCallFunction(2, 0);
382 helper.BuildImageAndRunTest([&](const GrowableArray<ImageWriterCommand>&
383 commands,
384 uword* entry_point) {
385 EXPECT_EQ(7, commands.length());
386
387 // This is the backwards call target.
390 // This makes an out-of-range backwards call. The relocator will make the
391 // call go to a trampoline instead. It will delay insertion of the
392 // trampoline until it almost becomes out-of-range.
396 // This is the last change the relocator thinks it can ensure the
397 // out-of-range call above can call a trampoline - so it injets it here and
398 // no later.
401
402 *entry_point = commands[2].expected_offset;
403 });
404}
405
406ISOLATE_UNIT_TEST_CASE(CodeRelocator_OutOfRangeBackwardCall2) {
407 RelocatorTestHelper helper(thread);
408 const intptr_t bmax = -FLAG_lower_pc_relative_call_distance;
409
410 helper.CreateInstructions({
411 8, // backwards call target
412 bmax - 8 - helper.kOffsetOfCall + 4, // 4 bytes exceeding backwards range
413 20, // caller (call instruction @helper.kOffsetOfCall)
414 4,
415 });
416 helper.EmitReturn42Function(0);
417 helper.EmitPcRelativeCallFunction(2, 0);
418 helper.BuildImageAndRunTest(
419 [&](const GrowableArray<ImageWriterCommand>& commands,
420 uword* entry_point) {
421 EXPECT_EQ(5, commands.length());
422
423 // This is the backwards call target.
426 // This makes an out-of-range backwards call. The relocator will make
427 // the call go to a trampoline instead. It will delay insertion of the
428 // trampoline until it almost becomes out-of-range (or in this case no
429 // more instructions follow).
432 // There's no other instructions coming, so the relocator will resolve
433 // any pending out-of-range calls by inserting trampolines at the end.
435
436 *entry_point = commands[4].expected_offset;
437 });
438}
439
440UNIT_TEST_CASE(PCRelativeCallPatterns) {
441 {
442 uint8_t instruction[PcRelativeCallPattern::kLengthInBytes] = {};
443
444 PcRelativeCallPattern pattern(reinterpret_cast<uword>(&instruction));
445
446 pattern.set_distance(PcRelativeCallPattern::kLowerCallingRange);
447 EXPECT_EQ(PcRelativeCallPattern::kLowerCallingRange, pattern.distance());
448
449 pattern.set_distance(PcRelativeCallPattern::kUpperCallingRange);
450 EXPECT_EQ(PcRelativeCallPattern::kUpperCallingRange, pattern.distance());
451 }
452 {
453 uint8_t instruction[PcRelativeTailCallPattern::kLengthInBytes] = {};
454
455 PcRelativeTailCallPattern pattern(reinterpret_cast<uword>(&instruction));
456
459 pattern.distance());
460
463 pattern.distance());
464 }
465}
466
467#endif // defined(DART_PRECOMPILER) && !defined(TARGET_ARCH_IA32)
468
469} // namespace dart
SI F table(const skcms_Curve *curve, F v)
static ArrayPtr New(intptr_t len, Heap::Space space=Heap::kNew)
Definition: object.h:10959
static ArrayPtr Grow(const Array &source, intptr_t new_length, Heap::Space space=Heap::kNew)
Definition: object.cc:24853
static constexpr intptr_t encode(CallKind value)
Definition: bitfield.h:165
static void FlushICache(uword start, uword size)
@ kPcRelativeCall
Definition: object.h:6969
static InstructionsPtr InstructionsOf(const CodePtr code)
Definition: object.h:6775
@ kSCallTableEntryLength
Definition: object.h:6984
@ kSCallTableFunctionTarget
Definition: object.h:6983
@ kSCallTableCodeOrTypeTarget
Definition: object.h:6982
@ kSCallTableKindAndOffset
Definition: object.h:6981
@ kDefaultEntry
Definition: object.h:6976
static intptr_t SizeInSnapshot(ObjectPtr object)
uword PayloadStart() const
Definition: object.h:5745
static void static void PrintErr(const char *format,...) PRINTF_ATTRIBUTE(1
static ObjectPtr null()
Definition: object.h:433
static Object & Handle()
Definition: object.h:407
static constexpr int kLengthInBytes
static constexpr int32_t kLowerCallingRange
static constexpr int kLengthInBytes
static constexpr int32_t kUpperCallingRange
static constexpr int32_t kUpperCallingRange
static constexpr int32_t kLowerCallingRange
static SmiPtr New(intptr_t value)
Definition: object.h:10006
static uword ToAddr(const UntaggedObject *raw_obj)
Definition: raw_object.h:522
static constexpr T RoundUp(T x, uintptr_t alignment, uintptr_t offset=0)
Definition: utils.h:120
static void Protect(void *address, intptr_t size, Protection mode)
#define ASSERT(E)
uint32_t * target
Dart_NativeFunction function
Definition: fuchsia.cc:51
sk_sp< const SkImage > image
Definition: SkRecords.h:269
static void EmitCodeFor(FlowGraphCompiler *compiler, FlowGraph *graph)
Definition: dart_vm.cc:33
@ kGC
Definition: thread.h:291
UNIT_TEST_CASE(PRIORITY_HEAP_WITH_INDEX__INCREASING)
uintptr_t uword
Definition: globals.h:501
constexpr uword kBreakInstructionFiller
ISOLATE_UNIT_TEST_CASE(StackAllocatedDestruction)
constexpr intptr_t kWordSize
Definition: globals.h:509
ArrayOfTuplesView< Code::SCallTableEntry, std::tuple< Smi, Object, Function > > StaticCallsTable
Definition: object.h:13546
DECLARE_FLAG(bool, show_invisible_frames)
dictionary commands
Definition: dom.py:171
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive keep the shell running after the Dart script has completed enable serial On low power devices with low core running concurrent GC tasks on threads can cause them to contend with the UI thread which could potentially lead to jank This option turns off all concurrent GC activities domain network JSON encoded network policy per domain This overrides the DisallowInsecureConnections switch Embedder can specify whether to allow or disallow insecure connections at a domain level old gen heap size
Definition: switches.h:259
inst
Definition: malisc.py:37
#define DART_UNUSED
Definition: globals.h:269
#define __
SeparatedVector2 offset