Flutter Engine
The Flutter Engine
relocation.cc
Go to the documentation of this file.
1// Copyright (c) 2019, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
6
7#include "vm/code_patcher.h"
8#include "vm/heap/pages.h"
9#include "vm/instructions.h"
10#include "vm/object_store.h"
11#include "vm/stub_code.h"
12
13namespace dart {
14
15#if defined(DART_PRECOMPILER) && !defined(TARGET_ARCH_IA32)
16
17// Only for testing.
18DEFINE_FLAG(bool,
19 always_generate_trampolines_for_testing,
20 false,
21 "Generate always trampolines (for testing purposes).");
22
23DEFINE_FLAG(int,
24 lower_tail_pc_relative_call_distance,
25 -1,
26 "Lower tail call distance.");
27DEFINE_FLAG(int,
28 upper_tail_pc_relative_call_distance,
29 -1,
30 "Upper tail call distance.");
31DEFINE_FLAG(int, lower_pc_relative_call_distance, -1, "Lower call distance.");
32DEFINE_FLAG(int, upper_pc_relative_call_distance, -1, "Upper call distance.");
33
34struct TailCallDistanceLimits {
35 static intptr_t Lower() {
36 if (FLAG_lower_tail_pc_relative_call_distance != -1) {
37 return FLAG_lower_tail_pc_relative_call_distance;
38 }
40 }
41 static intptr_t Upper() {
42 if (FLAG_upper_tail_pc_relative_call_distance != -1) {
43 return FLAG_upper_tail_pc_relative_call_distance;
44 }
46 }
47};
48
49struct CallDistanceLimits {
50 static intptr_t Lower() {
51 if (FLAG_lower_pc_relative_call_distance != -1) {
52 return FLAG_lower_pc_relative_call_distance;
53 }
55 }
56 static intptr_t Upper() {
57 if (FLAG_upper_pc_relative_call_distance != -1) {
58 return FLAG_upper_pc_relative_call_distance;
59 }
61 }
62};
63
64const intptr_t kTrampolineSize =
67
68CodeRelocator::CodeRelocator(Thread* thread,
69 GrowableArray<CodePtr>* code_objects,
70 GrowableArray<ImageWriterCommand>* commands)
71 : StackResource(thread),
72 thread_(thread),
73 code_objects_(code_objects),
74 commands_(commands),
75 kind_type_and_offset_(Smi::Handle(thread->zone())),
76 target_(Object::Handle(thread->zone())),
77 destination_(Code::Handle(thread->zone())) {}
78
79void CodeRelocator::Relocate(bool is_vm_isolate) {
80 Zone* zone = Thread::Current()->zone();
81 auto& current_caller = Code::Handle(zone);
82 auto& call_targets = Array::Handle(zone);
83
84 auto& next_caller = Code::Handle(zone);
85 auto& next_caller_targets = Array::Handle(zone);
86
87 // Emit all instructions and do relocations on the way.
88 for (intptr_t i = 0; i < code_objects_->length(); ++i) {
89 current_caller = (*code_objects_)[i];
90
91 intptr_t code_text_offset;
92 if (!AddInstructionsToText(current_caller.ptr(), &code_text_offset)) {
93 continue;
94 }
95
96 call_targets = current_caller.static_calls_target_table();
97 ScanCallTargets(current_caller, call_targets, code_text_offset);
98
99 // Any unresolved calls to this instruction can be fixed now.
100 ResolveUnresolvedCallsTargeting(current_caller.instructions());
101
102 // If we have forward/backwards calls which are almost out-of-range, we'll
103 // create trampolines now.
104 if (i < (code_objects_->length() - 1)) {
105 next_caller = (*code_objects_)[i + 1];
106 next_caller_targets = next_caller.static_calls_target_table();
107 } else {
108 next_caller = Code::null();
109 next_caller_targets = Array::null();
110 }
111 BuildTrampolinesForAlmostOutOfRangeCalls(next_caller, next_caller_targets);
112 }
113
114 // We're guaranteed to have all calls resolved, since
115 // * backwards calls are resolved eagerly
116 // * forward calls are resolved once the target is written
117 if (!all_unresolved_calls_.IsEmpty()) {
118 for (auto call : all_unresolved_calls_) {
119 OS::PrintErr("Unresolved call to %s from %s\n",
120 Object::Handle(call->callee).ToCString(),
121 Object::Handle(call->caller).ToCString());
122 }
123 }
124 RELEASE_ASSERT(all_unresolved_calls_.IsEmpty());
125 RELEASE_ASSERT(unresolved_calls_by_destination_.IsEmpty());
126
127 // Any trampolines we created must be patched with the right offsets.
128 auto it = trampolines_by_destination_.GetIterator();
129 while (true) {
130 auto entry = it.Next();
131 if (entry == nullptr) break;
132
133 UnresolvedTrampolineList* trampoline_list = entry->value;
134 while (!trampoline_list->IsEmpty()) {
135 auto unresolved_trampoline = trampoline_list->RemoveFirst();
136 ResolveTrampoline(unresolved_trampoline);
137 delete unresolved_trampoline;
138 }
139 delete trampoline_list;
140 }
141 trampolines_by_destination_.Clear();
142
143 // Don't drop static call targets table yet. Snapshotter will skip it anyway
144 // however we might need it to write information into V8 snapshot profile.
145}
146
147bool CodeRelocator::AddInstructionsToText(CodePtr code,
148 intptr_t* code_text_offset) {
149 InstructionsPtr instructions = Code::InstructionsOf(code);
150
151 // If two [Code] objects point to the same [Instructions] object, we'll just
152 // use the first one (they are equivalent for all practical purposes).
153 if (text_offsets_.HasKey(instructions)) {
154 return false;
155 }
156
157 if (Instructions::ShouldBeAligned(instructions) &&
158 !Utils::IsAligned(next_text_offset_, kPreferredLoopAlignment)) {
159 const intptr_t padding_size =
160 Utils::RoundUp(next_text_offset_, kPreferredLoopAlignment) -
161 next_text_offset_;
162
163 commands_->Add(ImageWriterCommand(next_text_offset_, padding_size));
164 next_text_offset_ += padding_size;
165 }
166
167 *code_text_offset = next_text_offset_;
168 text_offsets_.Insert({instructions, next_text_offset_});
169 commands_->Add(ImageWriterCommand(next_text_offset_, code));
170 next_text_offset_ += ImageWriter::SizeInSnapshot(instructions);
171
172 return true;
173}
174
175UnresolvedTrampoline* CodeRelocator::FindTrampolineFor(
176 UnresolvedCall* unresolved_call) {
177 auto destination = Code::InstructionsOf(unresolved_call->callee);
178 auto entry = trampolines_by_destination_.Lookup(destination);
179 if (entry != nullptr) {
180 UnresolvedTrampolineList* trampolines = entry->value;
181 ASSERT(!trampolines->IsEmpty());
182
183 // For the destination of [unresolved_call] we might have multiple
184 // trampolines. The trampolines are sorted according to insertion order,
185 // which guarantees increasing text_offset's. So we go from the back of the
186 // list as long as we have trampolines that are in-range and then check
187 // whether the target offset matches.
188 auto it = trampolines->End();
189 --it;
190 do {
191 UnresolvedTrampoline* trampoline = *it;
192 if (!IsTargetInRangeFor(unresolved_call, trampoline->text_offset)) {
193 break;
194 }
195 if (trampoline->offset_into_target ==
196 unresolved_call->offset_into_target) {
197 return trampoline;
198 }
199 --it;
200 } while (it != trampolines->Begin());
201 }
202 return nullptr;
203}
204
205void CodeRelocator::AddTrampolineToText(InstructionsPtr destination,
206 uint8_t* trampoline_bytes,
207 intptr_t trampoline_length) {
208 commands_->Add(ImageWriterCommand(next_text_offset_, trampoline_bytes,
209 trampoline_length));
210 next_text_offset_ += trampoline_length;
211}
212
213void CodeRelocator::ScanCallTargets(const Code& code,
214 const Array& call_targets,
215 intptr_t code_text_offset) {
216 if (call_targets.IsNull()) {
217 return;
218 }
219 StaticCallsTable calls(call_targets);
220 for (auto call : calls) {
221 kind_type_and_offset_ = call.Get<Code::kSCallTableKindAndOffset>();
222 const auto kind = Code::KindField::decode(kind_type_and_offset_.Value());
223 const auto return_pc_offset =
224 Code::OffsetField::decode(kind_type_and_offset_.Value());
225 const auto call_entry_point =
226 Code::EntryPointField::decode(kind_type_and_offset_.Value());
227
228 if (kind == Code::kCallViaCode) {
229 continue;
230 }
231
232 destination_ = GetTarget(call);
233
234 // A call site can decide to jump not to the beginning of a function but
235 // rather jump into it at a certain offset.
236 int32_t offset_into_target = 0;
237 bool is_tail_call;
238 intptr_t call_instruction_offset;
239 if (kind == Code::kPcRelativeCall || kind == Code::kPcRelativeTTSCall) {
240 call_instruction_offset =
241 return_pc_offset - PcRelativeCallPattern::kLengthInBytes;
242 PcRelativeCallPattern call(code.PayloadStart() + call_instruction_offset);
243 ASSERT(call.IsValid());
244 offset_into_target = call.distance();
245 is_tail_call = false;
246 } else {
247 ASSERT(kind == Code::kPcRelativeTailCall);
248 call_instruction_offset =
249 return_pc_offset - PcRelativeTailCallPattern::kLengthInBytes;
250 PcRelativeTailCallPattern call(code.PayloadStart() +
251 call_instruction_offset);
252 ASSERT(call.IsValid());
253 offset_into_target = call.distance();
254 is_tail_call = true;
255 }
256
257 const uword destination_payload = destination_.PayloadStart();
258 const uword entry_point = call_entry_point == Code::kUncheckedEntry
259 ? destination_.UncheckedEntryPoint()
260 : destination_.EntryPoint();
261
262 offset_into_target += (entry_point - destination_payload);
263
264 const intptr_t text_offset =
265 code_text_offset + AdjustPayloadOffset(call_instruction_offset);
266
267 UnresolvedCall unresolved_call(code.ptr(), call_instruction_offset,
268 text_offset, destination_.ptr(),
269 offset_into_target, is_tail_call);
270 if (!TryResolveBackwardsCall(&unresolved_call)) {
271 EnqueueUnresolvedCall(new UnresolvedCall(unresolved_call));
272 }
273 }
274}
275
276void CodeRelocator::EnqueueUnresolvedCall(UnresolvedCall* unresolved_call) {
277 // Add it to the min-heap by .text offset.
278 all_unresolved_calls_.Append(unresolved_call);
279
280 // Add it to callers of destination.
281 InstructionsPtr destination = Code::InstructionsOf(unresolved_call->callee);
282 if (!unresolved_calls_by_destination_.HasKey(destination)) {
283 unresolved_calls_by_destination_.Insert(
284 {destination, new SameDestinationUnresolvedCallsList()});
285 }
286 unresolved_calls_by_destination_.LookupValue(destination)
287 ->Append(unresolved_call);
288}
289
290void CodeRelocator::EnqueueUnresolvedTrampoline(
291 UnresolvedTrampoline* unresolved_trampoline) {
292 auto destination = Code::InstructionsOf(unresolved_trampoline->callee);
293 auto entry = trampolines_by_destination_.Lookup(destination);
294
295 UnresolvedTrampolineList* trampolines = nullptr;
296 if (entry == nullptr) {
297 trampolines = new UnresolvedTrampolineList();
298 trampolines_by_destination_.Insert({destination, trampolines});
299 } else {
300 trampolines = entry->value;
301 }
302 trampolines->Append(unresolved_trampoline);
303}
304
305bool CodeRelocator::TryResolveBackwardsCall(UnresolvedCall* unresolved_call) {
306 auto callee = Code::InstructionsOf(unresolved_call->callee);
307 auto map_entry = text_offsets_.Lookup(callee);
308 if (map_entry == nullptr) return false;
309
310 if (IsTargetInRangeFor(unresolved_call, map_entry->value)) {
311 ResolveCall(unresolved_call);
312 return true;
313 }
314 return false;
315}
316
317void CodeRelocator::ResolveUnresolvedCallsTargeting(
318 const InstructionsPtr instructions) {
319 if (unresolved_calls_by_destination_.HasKey(instructions)) {
320 SameDestinationUnresolvedCallsList* calls =
321 unresolved_calls_by_destination_.LookupValue(instructions);
322 auto it = calls->Begin();
323 while (it != calls->End()) {
324 UnresolvedCall* unresolved_call = *it;
325 ++it;
326 ASSERT(Code::InstructionsOf(unresolved_call->callee) == instructions);
327 ResolveCall(unresolved_call);
328
329 // Remove the call from both lists.
330 calls->Remove(unresolved_call);
331 all_unresolved_calls_.Remove(unresolved_call);
332
333 delete unresolved_call;
334 }
335 ASSERT(calls->IsEmpty());
336 delete calls;
337 bool ok = unresolved_calls_by_destination_.Remove(instructions);
338 ASSERT(ok);
339 }
340}
341
342void CodeRelocator::ResolveCall(UnresolvedCall* unresolved_call) {
343 const intptr_t destination_text =
344 FindDestinationInText(Code::InstructionsOf(unresolved_call->callee),
345 unresolved_call->offset_into_target);
346
347 ResolveCallToDestination(unresolved_call, destination_text);
348}
349
350void CodeRelocator::ResolveCallToDestination(UnresolvedCall* unresolved_call,
351 intptr_t destination_text) {
352 const intptr_t call_text_offset = unresolved_call->text_offset;
353 const intptr_t call_offset = unresolved_call->call_offset;
354
355 const int32_t distance = destination_text - call_text_offset;
356 {
357 auto const caller = unresolved_call->caller;
358 uword addr = Code::PayloadStartOf(caller) + call_offset;
359 if (unresolved_call->is_tail_call) {
360 PcRelativeTailCallPattern call(addr);
361 ASSERT(call.IsValid());
362 call.set_distance(static_cast<int32_t>(distance));
363 ASSERT(call.distance() == distance);
364 } else {
365 PcRelativeCallPattern call(addr);
366 ASSERT(call.IsValid());
367 call.set_distance(static_cast<int32_t>(distance));
368 ASSERT(call.distance() == distance);
369 }
370 }
371
372 unresolved_call->caller = nullptr;
373 unresolved_call->callee = nullptr;
374}
375
376void CodeRelocator::ResolveTrampoline(
377 UnresolvedTrampoline* unresolved_trampoline) {
378 const intptr_t trampoline_text_offset = unresolved_trampoline->text_offset;
379 const uword trampoline_start =
380 reinterpret_cast<uword>(unresolved_trampoline->trampoline_bytes);
381
382 auto callee = Code::InstructionsOf(unresolved_trampoline->callee);
383 auto destination_text =
384 FindDestinationInText(callee, unresolved_trampoline->offset_into_target);
385 const int32_t distance = destination_text - trampoline_text_offset;
386
387 PcRelativeTrampolineJumpPattern pattern(trampoline_start);
388 pattern.Initialize();
389 pattern.set_distance(distance);
390 ASSERT(pattern.distance() == distance);
391}
392
393bool CodeRelocator::IsTargetInRangeFor(UnresolvedCall* unresolved_call,
394 intptr_t target_text_offset) {
395 const auto forward_distance =
396 target_text_offset - unresolved_call->text_offset;
397 if (unresolved_call->is_tail_call) {
398 return TailCallDistanceLimits::Lower() <= forward_distance &&
399 forward_distance <= TailCallDistanceLimits::Upper();
400 } else {
401 return CallDistanceLimits::Lower() <= forward_distance &&
402 forward_distance <= CallDistanceLimits::Upper();
403 }
404}
405
406CodePtr CodeRelocator::GetTarget(const StaticCallsTableEntry& call) {
407 // The precompiler should have already replaced all function entries
408 // with code entries.
409 ASSERT(call.Get<Code::kSCallTableFunctionTarget>() == Function::null());
410
411 target_ = call.Get<Code::kSCallTableCodeOrTypeTarget>();
412 if (target_.IsAbstractType()) {
413 target_ = AbstractType::Cast(target_).type_test_stub();
414 destination_ = Code::Cast(target_).ptr();
415
416 // The AssertAssignableInstr will emit pc-relative calls to the TTS iff
417 // dst_type is instantiated. If we happened to not install an optimized
418 // TTS but rather a default one, it will live in the vm-isolate (to
419 // which we cannot make pc-relative calls).
420 // Though we have "equivalent" isolate-specific stubs we can use as
421 // targets instead.
422 //
423 // (We could make the AOT compiler install isolate-specific stubs
424 // into the types directly, but that does not work for types which
425 // live in the "vm-isolate" - such as `Type::dynamic_type()`).
426 if (destination_.InVMIsolateHeap()) {
427 auto object_store = thread_->isolate_group()->object_store();
428
429 if (destination_.ptr() == StubCode::DefaultTypeTest().ptr()) {
430 destination_ = object_store->default_tts_stub();
431 } else if (destination_.ptr() ==
432 StubCode::DefaultNullableTypeTest().ptr()) {
433 destination_ = object_store->default_nullable_tts_stub();
434 } else if (destination_.ptr() == StubCode::TopTypeTypeTest().ptr()) {
435 destination_ = object_store->top_type_tts_stub();
436 } else if (destination_.ptr() == StubCode::UnreachableTypeTest().ptr()) {
437 destination_ = object_store->unreachable_tts_stub();
438 } else if (destination_.ptr() == StubCode::SlowTypeTest().ptr()) {
439 destination_ = object_store->slow_tts_stub();
440 } else if (destination_.ptr() ==
441 StubCode::NullableTypeParameterTypeTest().ptr()) {
442 destination_ = object_store->nullable_type_parameter_tts_stub();
443 } else if (destination_.ptr() ==
444 StubCode::TypeParameterTypeTest().ptr()) {
445 destination_ = object_store->type_parameter_tts_stub();
446 } else {
447 UNREACHABLE();
448 }
449 }
450 } else {
451 ASSERT(target_.IsCode());
452 destination_ = Code::Cast(target_).ptr();
453 }
454 ASSERT(!destination_.InVMIsolateHeap());
455 return destination_.ptr();
456}
457
458void CodeRelocator::BuildTrampolinesForAlmostOutOfRangeCalls(
459 const Code& next_caller,
460 const Array& next_caller_targets) {
461 const bool all_functions_emitted = next_caller.IsNull();
462
463 bool next_requires_alignment = false;
464 uword next_size = 0;
465 uword next_call_count = 0;
466 if (!all_functions_emitted) {
467 next_size = ImageWriter::SizeInSnapshot(next_caller.instructions());
468 next_requires_alignment =
469 Instructions::ShouldBeAligned(next_caller.instructions());
470 if (!next_caller_targets.IsNull()) {
471 StaticCallsTable calls(next_caller_targets);
472 next_call_count = calls.Length();
473 }
474 }
475
476 while (!all_unresolved_calls_.IsEmpty()) {
477 UnresolvedCall* unresolved_call = all_unresolved_calls_.First();
478
479 if (!all_functions_emitted) {
480 // If we can emit another instructions object without causing the
481 // unresolved forward calls to become out-of-range, we'll not resolve it
482 // yet (maybe the target function will come very soon and we don't need
483 // a trampoline at all).
484 const intptr_t next_start =
485 next_requires_alignment
486 ? Utils::RoundUp(next_text_offset_, kPreferredLoopAlignment)
487 : next_text_offset_;
488 const intptr_t future_boundary =
489 next_start + next_size +
490 kTrampolineSize *
491 (unresolved_calls_by_destination_.Length() + next_call_count - 1);
492 if (IsTargetInRangeFor(unresolved_call, future_boundary) &&
493 !FLAG_always_generate_trampolines_for_testing) {
494 break;
495 }
496 }
497
498 // We have a "critical" [unresolved_call] we have to resolve. If an
499 // existing trampoline is in range, we use that otherwise we create a new
500 // trampoline.
501
502 // In the worst case we'll make a new trampoline here, in which case the
503 // current text offset must be in range for the "critical"
504 // [unresolved_call].
505 ASSERT(IsTargetInRangeFor(unresolved_call, next_text_offset_));
506
507 // See if there is already a trampoline we could use.
508 intptr_t trampoline_text_offset = -1;
509 auto callee = Code::InstructionsOf(unresolved_call->callee);
510
511 if (!FLAG_always_generate_trampolines_for_testing) {
512 auto old_trampoline_entry = FindTrampolineFor(unresolved_call);
513 if (old_trampoline_entry != nullptr) {
514 trampoline_text_offset = old_trampoline_entry->text_offset;
515 }
516 }
517
518 // If there is no trampoline yet, we'll create a new one.
519 if (trampoline_text_offset == -1) {
520 // The ownership of the trampoline bytes will be transferred to the
521 // [ImageWriter], which will eventually write out the bytes and delete the
522 // buffer.
523 auto trampoline_bytes = new uint8_t[kTrampolineSize];
524 ASSERT((kTrampolineSize % compiler::target::kWordSize) == 0);
525 for (uint8_t* cur = trampoline_bytes;
526 cur < trampoline_bytes + kTrampolineSize;
528 *reinterpret_cast<compiler::target::uword*>(cur) =
530 }
531 auto unresolved_trampoline = new UnresolvedTrampoline{
532 unresolved_call->callee,
533 unresolved_call->offset_into_target,
534 trampoline_bytes,
535 next_text_offset_,
536 };
537 AddTrampolineToText(callee, trampoline_bytes, kTrampolineSize);
538 EnqueueUnresolvedTrampoline(unresolved_trampoline);
539 trampoline_text_offset = unresolved_trampoline->text_offset;
540 }
541
542 // Let the unresolved call to [destination] jump to the trampoline
543 // instead.
544 auto destination = Code::InstructionsOf(unresolved_call->callee);
545 ResolveCallToDestination(unresolved_call, trampoline_text_offset);
546
547 // Remove this unresolved call from the global list and the per-destination
548 // list.
549 auto calls = unresolved_calls_by_destination_.LookupValue(destination);
550 calls->Remove(unresolved_call);
551 all_unresolved_calls_.Remove(unresolved_call);
552 delete unresolved_call;
553
554 // If this destination has no longer any unresolved calls, remove it.
555 if (calls->IsEmpty()) {
556 unresolved_calls_by_destination_.Remove(destination);
557 delete calls;
558 }
559 }
560}
561
562intptr_t CodeRelocator::FindDestinationInText(const InstructionsPtr destination,
563 intptr_t offset_into_target) {
564 auto const destination_offset = text_offsets_.LookupValue(destination);
565 return destination_offset + AdjustPayloadOffset(offset_into_target);
566}
567
568intptr_t CodeRelocator::AdjustPayloadOffset(intptr_t payload_offset) {
569 if (FLAG_precompiled_mode) {
570 return payload_offset;
571 }
572 return compiler::target::Instructions::HeaderSize() + payload_offset;
573}
574
575#endif // defined(DART_PRECOMPILER) && !defined(TARGET_ARCH_IA32)
576
577} // namespace dart
static bool ok(int result)
#define UNREACHABLE()
Definition: assert.h:248
#define RELEASE_ASSERT(cond)
Definition: assert.h:327
static constexpr int32_t kLowerCallingRange
static constexpr int32_t kUpperCallingRange
static constexpr int32_t kUpperCallingRange
static constexpr int32_t kLowerCallingRange
static constexpr T RoundUp(T x, uintptr_t alignment, uintptr_t offset=0)
Definition: utils.h:120
#define ASSERT(E)
static constexpr int HeaderSize
Definition: dart_vm.cc:33
StaticCallsTable::TupleView StaticCallsTableEntry
Definition: object.h:13548
uintptr_t uword
Definition: globals.h:501
constexpr uword kBreakInstructionFiller
DEFINE_FLAG(bool, print_cluster_information, false, "Print information about clusters written to snapshot")
constexpr intptr_t kWordSize
Definition: globals.h:509
ArrayOfTuplesView< Code::SCallTableEntry, std::tuple< Smi, Object, Function > > StaticCallsTable
Definition: object.h:13546
const intptr_t kPreferredLoopAlignment
def call(args)
Definition: dom.py:159
dictionary commands
Definition: dom.py:171
static void RoundUp(Vector< char > buffer, int *length, int *decimal_point)
Definition: fixed-dtoa.cc:189
static DecodeResult decode(std::string path)
Definition: png_codec.cpp:124