Flutter Engine
The Flutter Engine
Loading...
Searching...
No Matches
stub_code.cc
Go to the documentation of this file.
1// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#include "vm/stub_code.h"
6
7#include "platform/assert.h"
8#include "platform/globals.h"
10#include "vm/flags.h"
11#include "vm/heap/safepoint.h"
12#include "vm/object_store.h"
13#include "vm/snapshot.h"
14#include "vm/virtual_memory.h"
15#include "vm/visitor.h"
16
17#if !defined(DART_PRECOMPILED_RUNTIME)
20#endif // !defined(DART_PRECOMPILED_RUNTIME)
21
22namespace dart {
23
24DECLARE_FLAG(bool, precompiled_mode);
25
26StubCode::StubCodeEntry StubCode::entries_[kNumStubEntries] = {
27#if defined(DART_PRECOMPILED_RUNTIME)
28#define STUB_CODE_DECLARE(name) {nullptr, #name},
29#else
30#define STUB_CODE_DECLARE(name) \
31 {nullptr, #name, &compiler::StubCodeCompiler::Generate##name##Stub},
32#endif
34#undef STUB_CODE_DECLARE
35};
36AcqRelAtomic<bool> StubCode::initialized_ = {false};
37
38#if defined(DART_PRECOMPILED_RUNTIME)
39void StubCode::Init() {
40 // Stubs will be loaded from the snapshot.
42}
43
44#else
45
47 compiler::ObjectPoolBuilder object_pool_builder;
48
49 // Generate all the stubs.
50 for (size_t i = 0; i < ARRAY_SIZE(entries_); i++) {
51 entries_[i].code = Code::ReadOnlyHandle();
52 *(entries_[i].code) =
53 Generate(entries_[i].name, &object_pool_builder, entries_[i].generator);
54 }
55
56 const ObjectPool& object_pool =
58
59 for (size_t i = 0; i < ARRAY_SIZE(entries_); i++) {
60 entries_[i].code->set_object_pool(object_pool.ptr());
61 }
62
64
65#if defined(DART_PRECOMPILER)
66 {
67 // Set Function owner for UnknownDartCode stub so it pretends to
68 // be a Dart code.
69 Zone* zone = Thread::Current()->zone();
70 const auto& signature = FunctionType::Handle(zone, FunctionType::New());
71 auto& owner = Object::Handle(zone);
72 owner = Object::void_class();
73 ASSERT(!owner.IsNull());
74 owner = Function::New(signature, Object::null_string(),
75 UntaggedFunction::kRegularFunction,
76 /*is_static=*/true,
77 /*is_const=*/false,
78 /*is_abstract=*/false,
79 /*is_external=*/false,
80 /*is_native=*/false, owner, TokenPosition::kNoSource);
81 StubCode::UnknownDartCode().set_owner(owner);
82 StubCode::UnknownDartCode().set_exception_handlers(
83 Object::empty_exception_handlers());
84 StubCode::UnknownDartCode().set_pc_descriptors(Object::empty_descriptors());
85 ASSERT(StubCode::UnknownDartCode().IsFunctionCode());
86 }
87#endif // defined(DART_PRECOMPILER)
88}
89
90#undef STUB_CODE_GENERATE
91#undef STUB_CODE_SET_OBJECT_POOL
92
94 const char* name,
95 compiler::ObjectPoolBuilder* object_pool_builder,
96 void (compiler::StubCodeCompiler::* GenerateStub)()) {
97 auto thread = Thread::Current();
98 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
99
100 compiler::Assembler assembler(object_pool_builder);
101 CompilerState compiler_state(thread, /*is_aot=*/FLAG_precompiled_mode,
102 /*is_optimizing=*/false);
103 Zone* zone = thread->zone();
104 auto* pc_descriptors_list = new (zone) DescriptorList(zone);
105 compiler::StubCodeCompiler stubCodeCompiler(&assembler, pc_descriptors_list);
106 (stubCodeCompiler.*GenerateStub)();
107 const Code& code = Code::Handle(
108 zone, Code::FinalizeCodeAndNotify(name, nullptr, &assembler,
110 /*optimized=*/false));
111 const PcDescriptors& descriptors = PcDescriptors::Handle(
112 zone, pc_descriptors_list->FinalizePcDescriptors(code.PayloadStart()));
113 code.set_pc_descriptors(descriptors);
114
115#ifndef PRODUCT
116 if (FLAG_support_disassembler && FLAG_disassemble_stubs) {
118 }
119#endif // !PRODUCT
120 return code.ptr();
121}
122#endif // defined(DART_PRECOMPILED_RUNTIME)
123
125 initialized_.store(false, std::memory_order_release);
126
127 for (size_t i = 0; i < ARRAY_SIZE(entries_); i++) {
128 entries_[i].code = nullptr;
129 }
130}
131
134 uword entry = StubCode::InvokeDartCode().EntryPoint();
135 uword size = StubCode::InvokeDartCodeSize();
136 return (pc >= entry) && (pc < (entry + size));
137}
138
141 uword entry = StubCode::JumpToFrame().EntryPoint();
142 uword size = StubCode::JumpToFrameSize();
143 return (pc >= entry) && (pc < (entry + size));
144}
145
146#if !defined(DART_PRECOMPILED_RUNTIME)
148 Zone* zone,
149 compiler::UnresolvedPcRelativeCalls* unresolved_calls) {
150 if (unresolved_calls->length() == 0) {
151 return Array::null();
152 }
153 const intptr_t array_length =
154 unresolved_calls->length() * Code::kSCallTableEntryLength;
155 const auto& static_calls_table =
156 Array::Handle(zone, Array::New(array_length, Heap::kOld));
157 StaticCallsTable entries(static_calls_table);
158 auto& kind_type_and_offset = Smi::Handle(zone);
159 for (intptr_t i = 0; i < unresolved_calls->length(); i++) {
160 auto& unresolved_call = (*unresolved_calls)[i];
161 auto call_kind = unresolved_call->is_tail_call() ? Code::kPcRelativeTailCall
163 kind_type_and_offset =
166 Code::OffsetField::encode(unresolved_call->offset()));
167 auto view = entries[i];
168 view.Set<Code::kSCallTableKindAndOffset>(kind_type_and_offset);
169 view.Set<Code::kSCallTableCodeOrTypeTarget>(unresolved_call->target());
170 }
171 return static_calls_table.ptr();
172}
173
175 Thread* thread = Thread::Current();
176 auto object_store = thread->isolate_group()->object_store();
177 Zone* zone = thread->zone();
178 const Error& error =
179 Error::Handle(zone, cls.EnsureIsAllocateFinalized(thread));
180 ASSERT(error.IsNull());
181 switch (cls.id()) {
182 case kArrayCid:
183 return object_store->allocate_array_stub();
184#if !defined(TARGET_ARCH_IA32)
185 case kGrowableObjectArrayCid:
186 return object_store->allocate_growable_array_stub();
187#endif // !defined(TARGET_ARCH_IA32)
188 case kContextCid:
189 return object_store->allocate_context_stub();
190 case kUnhandledExceptionCid:
191 return object_store->allocate_unhandled_exception_stub();
192 case kMintCid:
193 return object_store->allocate_mint_stub();
194 case kDoubleCid:
195 return object_store->allocate_double_stub();
196 case kFloat32x4Cid:
197 return object_store->allocate_float32x4_stub();
198 case kFloat64x2Cid:
199 return object_store->allocate_float64x2_stub();
200 case kInt32x4Cid:
201 return object_store->allocate_int32x4_stub();
202 case kClosureCid:
203 return object_store->allocate_closure_stub();
204 case kRecordCid:
205 return object_store->allocate_record_stub();
206 }
207 Code& stub = Code::Handle(zone, cls.allocation_stub());
208 if (stub.IsNull()) {
209 compiler::ObjectPoolBuilder object_pool_builder;
210 Precompiler* precompiler = Precompiler::Instance();
211
213 precompiler != nullptr ? precompiler->global_object_pool_builder()
214 : &object_pool_builder;
215
216 const auto pool_attachment = FLAG_precompiled_mode
219
220 auto zone = thread->zone();
221 auto object_store = thread->isolate_group()->object_store();
222 auto& allocate_object_stub = Code::ZoneHandle(zone);
223 auto& allocate_object_parametrized_stub = Code::ZoneHandle(zone);
224 if (FLAG_precompiled_mode) {
225 allocate_object_stub = object_store->allocate_object_stub();
226 allocate_object_parametrized_stub =
227 object_store->allocate_object_parametrized_stub();
228 }
229
230 compiler::Assembler assembler(wrapper);
231 CompilerState compiler_state(thread, /*is_aot=*/FLAG_precompiled_mode,
232 /*is_optimizing=*/false);
234 const char* name = cls.ToCString();
235 compiler::StubCodeCompiler stubCodeCompiler(&assembler, nullptr);
236 stubCodeCompiler.GenerateAllocationStubForClass(
237 &unresolved_calls, cls, allocate_object_stub,
238 allocate_object_parametrized_stub);
239
240 const auto& static_calls_table =
242 zone, &unresolved_calls));
243
244 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
245
246 auto mutator_fun = [&]() {
247 stub = Code::FinalizeCode(nullptr, &assembler, pool_attachment,
248 /*optimized=*/false,
249 /*stats=*/nullptr);
250 // Check if some other thread has not already added the stub.
251 if (cls.allocation_stub() == Code::null()) {
252 stub.set_owner(cls);
253 if (!static_calls_table.IsNull()) {
254 stub.set_static_calls_target_table(static_calls_table);
255 }
256 cls.set_allocation_stub(stub);
257 }
258 };
259
260 // We have to ensure no mutators are running, because:
261 //
262 // a) We allocate an instructions object, which might cause us to
263 // temporarily flip page protections from (RX -> RW -> RX).
264 thread->isolate_group()->RunWithStoppedMutators(mutator_fun,
265 /*use_force_growth=*/true);
266
267 // We notify code observers after finalizing the code in order to be
268 // outside a [SafepointOperationScope].
269 Code::NotifyCodeObservers(name, stub, /*optimized=*/false);
270#ifndef PRODUCT
271 if (FLAG_support_disassembler && FLAG_disassemble_stubs) {
273 }
274#endif // !PRODUCT
275 }
276 return stub.ptr();
277}
278
280 auto object_store = Thread::Current()->isolate_group()->object_store();
281 switch (class_id) {
282 case kTypedDataInt8ArrayCid:
283 return object_store->allocate_int8_array_stub();
284 case kTypedDataUint8ArrayCid:
285 return object_store->allocate_uint8_array_stub();
286 case kTypedDataUint8ClampedArrayCid:
287 return object_store->allocate_uint8_clamped_array_stub();
288 case kTypedDataInt16ArrayCid:
289 return object_store->allocate_int16_array_stub();
290 case kTypedDataUint16ArrayCid:
291 return object_store->allocate_uint16_array_stub();
292 case kTypedDataInt32ArrayCid:
293 return object_store->allocate_int32_array_stub();
294 case kTypedDataUint32ArrayCid:
295 return object_store->allocate_uint32_array_stub();
296 case kTypedDataInt64ArrayCid:
297 return object_store->allocate_int64_array_stub();
298 case kTypedDataUint64ArrayCid:
299 return object_store->allocate_uint64_array_stub();
300 case kTypedDataFloat32ArrayCid:
301 return object_store->allocate_float32_array_stub();
302 case kTypedDataFloat64ArrayCid:
303 return object_store->allocate_float64_array_stub();
304 case kTypedDataFloat32x4ArrayCid:
305 return object_store->allocate_float32x4_array_stub();
306 case kTypedDataInt32x4ArrayCid:
307 return object_store->allocate_int32x4_array_stub();
308 case kTypedDataFloat64x2ArrayCid:
309 return object_store->allocate_float64x2_array_stub();
310 }
311 UNREACHABLE();
312 return Code::null();
313}
314#endif // !defined(DART_PRECOMPILED_RUNTIME)
315
316const Code& StubCode::UnoptimizedStaticCallEntry(intptr_t num_args_tested) {
317 switch (num_args_tested) {
318 case 0:
319 return ZeroArgsUnoptimizedStaticCall();
320 case 1:
321 return OneArgUnoptimizedStaticCall();
322 case 2:
323 return TwoArgsUnoptimizedStaticCall();
324 default:
326 return Code::Handle();
327 }
328}
329
330const char* StubCode::NameOfStub(uword entry_point) {
331 for (size_t i = 0; i < ARRAY_SIZE(entries_); i++) {
332 if ((entries_[i].code != nullptr) && !entries_[i].code->IsNull() &&
333 (entries_[i].code->EntryPoint() == entry_point)) {
334 return entries_[i].name;
335 }
336 }
337
338 auto object_store = IsolateGroup::Current()->object_store();
339
340#define MATCH(member, name) \
341 if (object_store->member() != Code::null() && \
342 entry_point == Code::EntryPointOf(object_store->member())) { \
343 return "_iso_stub_" #name "Stub"; \
344 }
346#undef MATCH
347 return nullptr;
348}
349
350} // namespace dart
#define UNREACHABLE()
Definition assert.h:248
void store(T arg, std::memory_order order=std::memory_order_release)
Definition atomic.h:104
static ArrayPtr New(intptr_t len, Heap::Space space=Heap::kNew)
Definition object.h:10933
intptr_t length() const
static constexpr intptr_t encode(CallKind value)
Definition bitfield.h:167
CodePtr allocation_stub() const
Definition object.h:1802
intptr_t id() const
Definition object.h:1235
void set_allocation_stub(const Code &value) const
Definition object.cc:5925
ErrorPtr EnsureIsAllocateFinalized(Thread *thread) const
Definition object.cc:5009
@ kPcRelativeCall
Definition object.h:6942
@ kPcRelativeTailCall
Definition object.h:6944
static CodePtr FinalizeCodeAndNotify(const Function &function, FlowGraphCompiler *compiler, compiler::Assembler *assembler, PoolAttachment pool_attachment, bool optimized=false, CodeStatistics *stats=nullptr)
Definition object.cc:18033
static void NotifyCodeObservers(const Code &code, bool optimized)
Definition object.cc:18191
@ kSCallTableEntryLength
Definition object.h:6957
@ kSCallTableCodeOrTypeTarget
Definition object.h:6955
@ kSCallTableKindAndOffset
Definition object.h:6954
void set_static_calls_target_table(const Array &value) const
Definition object.cc:17752
static CodePtr FinalizeCode(FlowGraphCompiler *compiler, compiler::Assembler *assembler, PoolAttachment pool_attachment, bool optimized, CodeStatistics *stats)
Definition object.cc:18068
@ kDefaultEntry
Definition object.h:6949
void set_owner(const Object &owner) const
Definition object.cc:17686
static void DisassembleStub(const char *name, const Code &code)
static FunctionTypePtr New(intptr_t num_parent_type_arguments=0, Nullability nullability=Nullability::kLegacy, Heap::Space space=Heap::kOld)
Definition object.cc:11682
static FunctionPtr New(const FunctionType &signature, const String &name, UntaggedFunction::Kind kind, bool is_static, bool is_const, bool is_abstract, bool is_external, bool is_native, const Object &owner, TokenPosition token_pos, Heap::Space space=Heap::kOld)
Definition object.cc:10301
@ kOld
Definition heap.h:39
ObjectStore * object_store() const
Definition isolate.h:505
SafepointRwLock * program_lock()
Definition isolate.h:532
static IsolateGroup * Current()
Definition isolate.h:534
void RunWithStoppedMutators(T single_current_mutator, S otherwise, bool use_force_growth_in_otherwise=false)
Definition isolate.h:604
static ObjectPoolPtr NewFromBuilder(const compiler::ObjectPoolBuilder &builder)
Definition object.cc:15723
static ObjectPtr null()
Definition object.h:433
ObjectPtr ptr() const
Definition object.h:332
static Object * ReadOnlyHandle()
Definition object.h:431
virtual const char * ToCString() const
Definition object.h:366
bool IsNull() const
Definition object.h:363
static Object & Handle()
Definition object.h:407
static ClassPtr void_class()
Definition object.h:515
static Object & ZoneHandle()
Definition object.h:419
static Precompiler * Instance()
compiler::ObjectPoolBuilder * global_object_pool_builder()
static SmiPtr New(intptr_t value)
Definition object.h:9985
static CodePtr GetAllocationStubForClass(const Class &cls)
Definition stub_code.cc:174
static void Init()
Definition stub_code.cc:46
static const Code & UnoptimizedStaticCallEntry(intptr_t num_args_tested)
Definition stub_code.cc:316
static CodePtr GetAllocationStubForTypedData(classid_t class_id)
Definition stub_code.cc:279
static const char * NameOfStub(uword entry_point)
Definition stub_code.cc:330
static bool HasBeenInitialized()
Definition stub_code.h:41
static CodePtr Generate(const char *name, compiler::ObjectPoolBuilder *object_pool_builder, void(compiler::StubCodeCompiler::*GenerateStub)())
Definition stub_code.cc:93
static void InitializationDone()
Definition stub_code.h:44
static bool InInvocationStub(uword pc)
Definition stub_code.cc:132
static void Cleanup()
Definition stub_code.cc:124
static bool InJumpToFrameStub(uword pc)
Definition stub_code.cc:139
Zone * zone() const
static Thread * Current()
Definition thread.h:361
IsolateGroup * isolate_group() const
Definition thread.h:540
static ArrayPtr BuildStaticCallsTable(Zone *zone, compiler::UnresolvedPcRelativeCalls *unresolved_calls)
Definition stub_code.cc:147
void GenerateAllocationStubForClass(UnresolvedPcRelativeCalls *unresolved_calls, const Class &cls, const dart::Code &allocate_object, const dart::Code &allocat_object_parametrized)
#define UNIMPLEMENTED
#define ASSERT(E)
const uint8_t uint32_t uint32_t GError ** error
#define DECLARE_FLAG(type, name)
Definition flags.h:14
#define MATCH(member, name)
const char *const name
int32_t classid_t
Definition globals.h:524
uintptr_t uword
Definition globals.h:501
#define OBJECT_STORE_STUB_CODE_LIST(DO)
#define STUB_CODE_DECLARE(name)
#define VM_STUB_CODE_LIST(V)
#define ARRAY_SIZE(array)
Definition globals.h:72