Flutter Engine
The Flutter Engine
Loading...
Searching...
No Matches
relocation_test.cc
Go to the documentation of this file.
1// Copyright (c) 2021, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#include "platform/assert.h"
6
7#include "vm/allocation.h"
8#include "vm/code_patcher.h"
11#include "vm/instructions.h"
12#include "vm/longjump.h"
13#include "vm/unit_test.h"
14
15#define __ assembler->
16
17namespace dart {
18
19#if defined(DART_PRECOMPILER) && !defined(TARGET_ARCH_IA32)
20
21DECLARE_FLAG(int, lower_pc_relative_call_distance);
22DECLARE_FLAG(int, upper_pc_relative_call_distance);
23
24struct RelocatorTestHelper {
25 const intptr_t kTrampolineSize =
26 Utils::RoundUp(PcRelativeTrampolineJumpPattern::kLengthInBytes,
27 compiler::target::Instructions::kBarePayloadAlignment);
28
29 // The callers on arm/arm64 have to save LR before calling, so the call
30 // instruction will be 4 byte sinto the instruction stream.
31#if defined(TARGET_ARCH_ARM64)
32 static constexpr intptr_t kOffsetOfCall = 4;
33#elif defined(TARGET_ARCH_ARM)
34 static constexpr intptr_t kOffsetOfCall = 4;
35#elif defined(TARGET_ARCH_RISCV32)
36 static constexpr intptr_t kOffsetOfCall = 4;
37#elif defined(TARGET_ARCH_RISCV64)
38 static constexpr intptr_t kOffsetOfCall = 4;
39#else
40 static constexpr intptr_t kOffsetOfCall = 0;
41#endif
42
43 explicit RelocatorTestHelper(Thread* thread)
44 : thread(thread),
45 locker(thread, thread->isolate_group()->program_lock()),
46 safepoint_and_growth_scope(thread, SafepointLevel::kGC) {
47 // So the relocator uses the correct instruction size layout.
48 FLAG_precompiled_mode = true;
49
50 FLAG_lower_pc_relative_call_distance = -128;
51 FLAG_upper_pc_relative_call_distance = 128;
52 }
53 ~RelocatorTestHelper() { FLAG_precompiled_mode = false; }
54
55 void CreateInstructions(std::initializer_list<intptr_t> sizes) {
56 for (auto size : sizes) {
57 codes.Add(&Code::Handle(AllocationInstruction(size)));
58 }
59 }
60
61 CodePtr AllocationInstruction(uintptr_t size) {
62 const auto& instructions = Instructions::Handle(Instructions::New(
63 size, /*has_monomorphic=*/false, /*should_be_aligned=*/false));
64
65 uword addr = instructions.PayloadStart();
66 for (uintptr_t i = 0; i < (size / 4); ++i) {
67 *reinterpret_cast<uint32_t*>(addr + 4 * i) =
68 static_cast<uint32_t>(kBreakInstructionFiller);
69 }
70
71 const auto& code = Code::Handle(Code::New(0));
72 code.SetActiveInstructions(instructions, 0);
73 code.set_instructions(instructions);
74 return code.ptr();
75 }
76
77 void EmitPcRelativeCallFunction(intptr_t idx, intptr_t to_idx) {
78 const Code& code = *codes[idx];
79 const Code& target = *codes[to_idx];
80
81 EmitCodeFor(code, [&](compiler::Assembler* assembler) {
82#if defined(TARGET_ARCH_ARM64)
83 SPILLS_RETURN_ADDRESS_FROM_LR_TO_REGISTER(
84 __ stp(LR, R1,
85 compiler::Address(CSP, -2 * kWordSize,
86 compiler::Address::PairPreIndex)));
87#elif defined(TARGET_ARCH_ARM)
88 SPILLS_RETURN_ADDRESS_FROM_LR_TO_REGISTER(__ PushList((1 << LR)));
89#elif defined(TARGET_ARCH_RISCV32) || defined(TARGET_ARCH_RISCV64)
90 __ PushRegister(RA);
91#endif
92 __ GenerateUnRelocatedPcRelativeCall();
93 AddPcRelativeCallTargetAt(__ CodeSize(), code, target);
94#if defined(TARGET_ARCH_ARM64)
95 RESTORES_RETURN_ADDRESS_FROM_REGISTER_TO_LR(
96 __ ldp(LR, R1,
97 compiler::Address(CSP, 2 * kWordSize,
98 compiler::Address::PairPostIndex)));
99#elif defined(TARGET_ARCH_ARM)
100 RESTORES_RETURN_ADDRESS_FROM_REGISTER_TO_LR(__ PopList((1 << LR)));
101#elif defined(TARGET_ARCH_RISCV32) || defined(TARGET_ARCH_RISCV64)
102 __ PopRegister(RA);
103#endif
104 __ Ret();
105 });
106 }
107
108 void EmitReturn42Function(intptr_t idx) {
109 const Code& code = *codes[idx];
110 EmitCodeFor(code, [&](compiler::Assembler* assembler) {
111#if defined(TARGET_ARCH_X64)
112 __ LoadImmediate(RAX, 42);
113#elif defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_ARM64)
114 __ LoadImmediate(R0, 42);
115#elif defined(TARGET_ARCH_RISCV32) || defined(TARGET_ARCH_RISCV64)
116 __ LoadImmediate(A0, 42);
117#endif
118 __ Ret();
119 });
120 }
121
122 void EmitCodeFor(const Code& code,
123 std::function<void(compiler::Assembler* assembler)> fun) {
124 const auto& inst = Instructions::Handle(code.instructions());
125
126 compiler::Assembler assembler(nullptr);
127 fun(&assembler);
128
129 const uword addr = inst.PayloadStart();
130 memmove(reinterpret_cast<void*>(addr),
131 reinterpret_cast<void*>(assembler.CodeAddress(0)),
132 assembler.CodeSize());
133
134 if (FLAG_disassemble) {
135 OS::PrintErr("Disassemble:\n");
136 code.Disassemble();
137 }
138 }
139
140 void AddPcRelativeCallTargetAt(intptr_t offset,
141 const Code& code,
142 const Code& target) {
143 const auto& kind_and_offset = Smi::Handle(
144 Smi::New(Code::KindField::encode(Code::kPcRelativeCall) |
145 Code::EntryPointField::encode(Code::kDefaultEntry) |
146 Code::OffsetField::encode(offset)));
147 AddCall(code, target, kind_and_offset);
148 }
149
150 void AddCall(const Code& code,
151 const Code& target,
152 const Smi& kind_and_offset) {
153 auto& call_targets = Array::Handle(code.static_calls_target_table());
154 if (call_targets.IsNull()) {
155 call_targets = Array::New(Code::kSCallTableEntryLength);
156 } else {
157 call_targets = Array::Grow(
158 call_targets, call_targets.Length() + Code::kSCallTableEntryLength);
159 }
160
161 StaticCallsTable table(call_targets);
162 auto entry = table[table.Length() - 1];
163 entry.Set<Code::kSCallTableKindAndOffset>(kind_and_offset);
164 entry.Set<Code::kSCallTableCodeOrTypeTarget>(target);
165 entry.Set<Code::kSCallTableFunctionTarget>(
166 Function::Handle(Function::null()));
167 code.set_static_calls_target_table(call_targets);
168 }
169
170 void BuildImageAndRunTest(
171 std::function<void(const GrowableArray<ImageWriterCommand>&, uword*)>
172 fun) {
173 auto& image = Instructions::Handle();
174 uword entrypoint = 0;
175 {
176 GrowableArray<CodePtr> raw_codes;
177 for (auto code : codes) {
178 raw_codes.Add(code->ptr());
179 }
180
181 GrowableArray<ImageWriterCommand> commands;
182 CodeRelocator::Relocate(thread, &raw_codes, &commands,
183 /*is_vm_isolate=*/false);
184
185 uword expected_offset = 0;
186 fun(commands, &expected_offset);
187
188 image = BuildImage(&commands);
189 entrypoint = image.EntryPoint() + expected_offset;
190
191 for (intptr_t i = 0; i < commands.length(); ++i) {
192 if (commands[i].op == ImageWriterCommand::InsertBytesOfTrampoline) {
193 delete[] commands[i].insert_trampoline_bytes.buffer;
194 commands[i].insert_trampoline_bytes.buffer = nullptr;
195 }
196 }
197 }
198 typedef intptr_t (*Fun)() DART_UNUSED;
199#if defined(TARGET_ARCH_X64)
200 EXPECT_EQ(42, reinterpret_cast<Fun>(entrypoint)());
201#elif defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_RISCV32)
202 EXPECT_EQ(42, EXECUTE_TEST_CODE_INT32(Fun, entrypoint));
203#elif defined(TARGET_ARCH_ARM64) || defined(TARGET_ARCH_RISCV64)
204 EXPECT_EQ(42, EXECUTE_TEST_CODE_INT64(Fun, entrypoint));
205#endif
206 }
207
208 InstructionsPtr BuildImage(GrowableArray<ImageWriterCommand>* commands) {
209 intptr_t size = 0;
210 for (intptr_t i = 0; i < commands->length(); ++i) {
211 switch ((*commands)[i].op) {
212 case ImageWriterCommand::InsertBytesOfTrampoline:
213 size += (*commands)[i].insert_trampoline_bytes.buffer_length;
214 break;
215 case ImageWriterCommand::InsertPadding:
216 size += (*commands)[i].insert_padding.padding_length;
217 break;
218 case ImageWriterCommand::InsertInstructionOfCode:
219 size += ImageWriter::SizeInSnapshot(Code::InstructionsOf(
220 (*commands)[i].insert_instruction_of_code.code));
221 break;
222 }
223 }
224
225 auto& instructions = Instructions::Handle(Instructions::New(
226 size, /*has_monomorphic=*/false, /*should_be_aligned=*/false));
227 {
228 uword addr = instructions.PayloadStart();
229 for (intptr_t i = 0; i < commands->length(); ++i) {
230 switch ((*commands)[i].op) {
231 case ImageWriterCommand::InsertBytesOfTrampoline: {
232 const auto entry = (*commands)[i].insert_trampoline_bytes;
233 const auto current_size = entry.buffer_length;
234 ASSERT(addr + current_size <= instructions.PayloadStart() + size);
235 memmove(reinterpret_cast<void*>(addr), entry.buffer, current_size);
236 addr += current_size;
237 break;
238 }
239 case ImageWriterCommand::InsertPadding: {
240 const auto entry = (*commands)[i].insert_padding;
241 const auto current_size = entry.padding_length;
242 ASSERT(addr + current_size <= instructions.PayloadStart() + size);
243 memset(reinterpret_cast<void*>(addr), 0, current_size);
244 addr += current_size;
245 break;
246 }
247 case ImageWriterCommand::InsertInstructionOfCode: {
248 const auto entry = (*commands)[i].insert_instruction_of_code;
249 const auto current_size =
250 ImageWriter::SizeInSnapshot(Code::InstructionsOf(entry.code));
251 ASSERT(addr + current_size <= instructions.PayloadStart() + size);
252 memmove(reinterpret_cast<void*>(addr),
253 reinterpret_cast<void*>(Instructions::PayloadStart(
254 Code::InstructionsOf(entry.code))),
255 current_size);
256 addr += current_size;
257 break;
258 }
259 }
260 }
261
262 if (FLAG_write_protect_code) {
263 const uword address = UntaggedObject::ToAddr(instructions.ptr());
264 const auto size = instructions.ptr()->untag()->HeapSize();
265 VirtualMemory::Protect(reinterpret_cast<void*>(address), size,
266 VirtualMemory::kReadExecute);
267 }
268 CPU::FlushICache(instructions.PayloadStart(), instructions.Size());
269 }
270 return instructions.ptr();
271 }
272
273 Thread* thread;
274 SafepointWriteRwLocker locker;
275 ForceGrowthSafepointOperationScope safepoint_and_growth_scope;
276 GrowableArray<const Code*> codes;
277};
278
279ISOLATE_UNIT_TEST_CASE(CodeRelocator_DirectForwardCall) {
280 RelocatorTestHelper helper(thread);
281 const intptr_t fmax = FLAG_upper_pc_relative_call_distance;
282
283 // The gap is 8 bytes smaller than what could be directly forward-called,
284 // because the relocator's decision when to insert a trampoline is purely
285 // based on whether unresolved calls can reach such a trampoline if the next
286 // instruction is emitted (not taking into account that the next instruction
287 // might actually make some of those unresolved calls resolved).
288 helper.CreateInstructions({
289 20, // caller (call instruction @helper.kOffsetOfCall)
290 fmax - (20 - helper.kOffsetOfCall) - 8, // 8 bytes less than maximum gap
291 8 // forward call target
292 });
293 helper.EmitPcRelativeCallFunction(0, 2);
294 helper.EmitReturn42Function(2);
295 helper.BuildImageAndRunTest(
296 [&](const GrowableArray<ImageWriterCommand>& commands,
297 uword* entry_point) {
298 EXPECT_EQ(3, commands.length());
299
300 // This makes an in-range forward call.
301 EXPECT_EQ(ImageWriterCommand::InsertInstructionOfCode, commands[0].op);
302 EXPECT_EQ(ImageWriterCommand::InsertInstructionOfCode, commands[1].op);
303 // This is is the target of the forwards call.
304 EXPECT_EQ(ImageWriterCommand::InsertInstructionOfCode, commands[2].op);
305
306 *entry_point = commands[0].expected_offset;
307 });
308}
309
310ISOLATE_UNIT_TEST_CASE(CodeRelocator_OutOfRangeForwardCall) {
311 RelocatorTestHelper helper(thread);
312 const intptr_t fmax = FLAG_upper_pc_relative_call_distance;
313
314 helper.CreateInstructions({
315 20, // caller (call instruction @helper.kOffsetOfCall)
316 fmax - (20 - helper.kOffsetOfCall) + 4, // 4 bytes above maximum gap
317 8 // forwards call target
318 });
319 helper.EmitPcRelativeCallFunction(0, 2);
320 helper.EmitReturn42Function(2);
321 helper.BuildImageAndRunTest([&](const GrowableArray<ImageWriterCommand>&
322 commands,
323 uword* entry_point) {
324 EXPECT_EQ(4, commands.length());
325
326 // This makes an out-of-range forward call.
327 EXPECT_EQ(ImageWriterCommand::InsertInstructionOfCode, commands[0].op);
328 // This is the last change the relocator thinks it can ensure the
329 // out-of-range call above can call a trampoline - so it injets it here and
330 // no later.
331 EXPECT_EQ(ImageWriterCommand::InsertBytesOfTrampoline, commands[1].op);
332 EXPECT_EQ(ImageWriterCommand::InsertInstructionOfCode, commands[2].op);
333 // This is the target of the forwards call.
334 EXPECT_EQ(ImageWriterCommand::InsertInstructionOfCode, commands[3].op);
335
336 *entry_point = commands[0].expected_offset;
337 });
338}
339
340ISOLATE_UNIT_TEST_CASE(CodeRelocator_DirectBackwardCall) {
341 RelocatorTestHelper helper(thread);
342 const intptr_t bmax = -FLAG_lower_pc_relative_call_distance;
343
344 helper.CreateInstructions({
345 8, // backwards call target
346 bmax - 8 - helper.kOffsetOfCall, // maximize out backwards call range
347 20 // caller (call instruction @helper.kOffsetOfCall)
348 });
349 helper.EmitReturn42Function(0);
350 helper.EmitPcRelativeCallFunction(2, 0);
351 helper.BuildImageAndRunTest(
352 [&](const GrowableArray<ImageWriterCommand>& commands,
353 uword* entry_point) {
354 EXPECT_EQ(3, commands.length());
355
356 // This is the backwards call target.
357 EXPECT_EQ(ImageWriterCommand::InsertInstructionOfCode, commands[0].op);
358 EXPECT_EQ(ImageWriterCommand::InsertInstructionOfCode, commands[1].op);
359 // This makes an in-range backwards call.
360 EXPECT_EQ(ImageWriterCommand::InsertInstructionOfCode, commands[2].op);
361
362 *entry_point = commands[2].expected_offset;
363 });
364}
365
366ISOLATE_UNIT_TEST_CASE(CodeRelocator_OutOfRangeBackwardCall) {
367 RelocatorTestHelper helper(thread);
368 const intptr_t bmax = -FLAG_lower_pc_relative_call_distance;
369 const intptr_t fmax = FLAG_upper_pc_relative_call_distance;
370
371 helper.CreateInstructions({
372 8, // backward call target
373 bmax - 8 - helper.kOffsetOfCall + 4, // 4 bytes exceeding backwards range
374 20, // caller (call instruction @helper.kOffsetOfCall)
375 fmax - (20 - helper.kOffsetOfCall) -
376 4, // 4 bytes less than forward range
377 4,
378 4, // out-of-range, so trampoline has to be inserted before this
379 });
380 helper.EmitReturn42Function(0);
381 helper.EmitPcRelativeCallFunction(2, 0);
382 helper.BuildImageAndRunTest([&](const GrowableArray<ImageWriterCommand>&
383 commands,
384 uword* entry_point) {
385 EXPECT_EQ(7, commands.length());
386
387 // This is the backwards call target.
388 EXPECT_EQ(ImageWriterCommand::InsertInstructionOfCode, commands[0].op);
389 EXPECT_EQ(ImageWriterCommand::InsertInstructionOfCode, commands[1].op);
390 // This makes an out-of-range backwards call. The relocator will make the
391 // call go to a trampoline instead. It will delay insertion of the
392 // trampoline until it almost becomes out-of-range.
393 EXPECT_EQ(ImageWriterCommand::InsertInstructionOfCode, commands[2].op);
394 EXPECT_EQ(ImageWriterCommand::InsertInstructionOfCode, commands[3].op);
395 EXPECT_EQ(ImageWriterCommand::InsertInstructionOfCode, commands[4].op);
396 // This is the last change the relocator thinks it can ensure the
397 // out-of-range call above can call a trampoline - so it injets it here and
398 // no later.
399 EXPECT_EQ(ImageWriterCommand::InsertBytesOfTrampoline, commands[5].op);
400 EXPECT_EQ(ImageWriterCommand::InsertInstructionOfCode, commands[6].op);
401
402 *entry_point = commands[2].expected_offset;
403 });
404}
405
406ISOLATE_UNIT_TEST_CASE(CodeRelocator_OutOfRangeBackwardCall2) {
407 RelocatorTestHelper helper(thread);
408 const intptr_t bmax = -FLAG_lower_pc_relative_call_distance;
409
410 helper.CreateInstructions({
411 8, // backwards call target
412 bmax - 8 - helper.kOffsetOfCall + 4, // 4 bytes exceeding backwards range
413 20, // caller (call instruction @helper.kOffsetOfCall)
414 4,
415 });
416 helper.EmitReturn42Function(0);
417 helper.EmitPcRelativeCallFunction(2, 0);
418 helper.BuildImageAndRunTest(
419 [&](const GrowableArray<ImageWriterCommand>& commands,
420 uword* entry_point) {
421 EXPECT_EQ(5, commands.length());
422
423 // This is the backwards call target.
424 EXPECT_EQ(ImageWriterCommand::InsertInstructionOfCode, commands[0].op);
425 EXPECT_EQ(ImageWriterCommand::InsertInstructionOfCode, commands[1].op);
426 // This makes an out-of-range backwards call. The relocator will make
427 // the call go to a trampoline instead. It will delay insertion of the
428 // trampoline until it almost becomes out-of-range (or in this case no
429 // more instructions follow).
430 EXPECT_EQ(ImageWriterCommand::InsertInstructionOfCode, commands[2].op);
431 EXPECT_EQ(ImageWriterCommand::InsertInstructionOfCode, commands[3].op);
432 // There's no other instructions coming, so the relocator will resolve
433 // any pending out-of-range calls by inserting trampolines at the end.
434 EXPECT_EQ(ImageWriterCommand::InsertBytesOfTrampoline, commands[4].op);
435
436 *entry_point = commands[4].expected_offset;
437 });
438}
439
440UNIT_TEST_CASE(PCRelativeCallPatterns) {
441 {
442 uint8_t instruction[PcRelativeCallPattern::kLengthInBytes] = {};
443
444 PcRelativeCallPattern pattern(reinterpret_cast<uword>(&instruction));
445
446 pattern.set_distance(PcRelativeCallPattern::kLowerCallingRange);
447 EXPECT_EQ(PcRelativeCallPattern::kLowerCallingRange, pattern.distance());
448
449 pattern.set_distance(PcRelativeCallPattern::kUpperCallingRange);
450 EXPECT_EQ(PcRelativeCallPattern::kUpperCallingRange, pattern.distance());
451 }
452 {
453 uint8_t instruction[PcRelativeTailCallPattern::kLengthInBytes] = {};
454
455 PcRelativeTailCallPattern pattern(reinterpret_cast<uword>(&instruction));
456
459 pattern.distance());
460
463 pattern.distance());
464 }
465}
466
467#endif // defined(DART_PRECOMPILER) && !defined(TARGET_ARCH_IA32)
468
469} // namespace dart
SI F table(const skcms_Curve *curve, F v)
#define __
#define RA(width, name,...)
static constexpr int kLengthInBytes
static constexpr int32_t kLowerCallingRange
static constexpr int kLengthInBytes
static constexpr int32_t kUpperCallingRange
static constexpr int32_t kUpperCallingRange
static constexpr int32_t kLowerCallingRange
#define UNIT_TEST_CASE(name)
Definition unit_test.h:23
#define LR
#define ASSERT(E)
sk_sp< SkImage > image
Definition examples.cpp:29
uint32_t * target
#define DECLARE_FLAG(type, name)
Definition flags.h:14
uintptr_t uword
Definition globals.h:501
#define DART_UNUSED
Definition globals.h:269
Point offset
#define ISOLATE_UNIT_TEST_CASE(name)
Definition unit_test.h:64