Flutter Engine
The Flutter Engine
Loading...
Searching...
No Matches
precompiler.cc
Go to the documentation of this file.
1// Copyright (c) 2015, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
6
7#include <memory>
8
9#include "platform/unicode.h"
10#include "platform/utils.h"
11#include "vm/canonical_tables.h"
12#include "vm/class_finalizer.h"
14#include "vm/code_patcher.h"
29#include "vm/compiler/cha.h"
37#include "vm/dart_entry.h"
38#include "vm/exceptions.h"
40#include "vm/flags.h"
41#include "vm/hash_table.h"
42#include "vm/isolate.h"
43#include "vm/log.h"
44#include "vm/longjump.h"
45#include "vm/object.h"
46#include "vm/object_store.h"
47#include "vm/os.h"
48#include "vm/parser.h"
49#include "vm/program_visitor.h"
50#include "vm/regexp_assembler.h"
51#include "vm/regexp_parser.h"
52#include "vm/resolver.h"
53#include "vm/runtime_entry.h"
54#include "vm/stack_trace.h"
55#include "vm/symbols.h"
56#include "vm/tags.h"
57#include "vm/timeline.h"
58#include "vm/timer.h"
60#include "vm/version.h"
61#include "vm/zone_text_buffer.h"
62
63namespace dart {
64
65#define T (thread())
66#define IG (isolate_group())
67#define Z (zone())
68
70 print_precompiler_timings,
71 false,
72 "Print per-phase breakdown of time spent precompiling");
73DEFINE_FLAG(bool, print_unique_targets, false, "Print unique dynamic targets");
75 print_object_layout_to,
76 nullptr,
77 "Print layout of Dart objects to the given file");
78DEFINE_FLAG(bool, trace_precompiler, false, "Trace precompiler.");
80 int,
81 max_speculative_inlining_attempts,
82 1,
83 "Max number of attempts with speculative inlining (precompilation only)");
85 write_retained_reasons_to,
86 nullptr,
87 "Print reasons for retaining objects to the given file");
88
89DECLARE_FLAG(bool, print_flow_graph);
90DECLARE_FLAG(bool, print_flow_graph_optimized);
91DECLARE_FLAG(bool, trace_compiler);
92DECLARE_FLAG(bool, trace_optimizing_compiler);
93DECLARE_FLAG(bool, trace_bailout);
94DECLARE_FLAG(bool, trace_failed_optimization_attempts);
95DECLARE_FLAG(bool, trace_inlining_intervals);
96DECLARE_FLAG(int, inlining_hotness);
97DECLARE_FLAG(int, inlining_size_threshold);
98DECLARE_FLAG(int, inlining_callee_size_threshold);
99DECLARE_FLAG(int, inline_getters_setters_smaller_than);
100DECLARE_FLAG(int, inlining_depth_threshold);
101DECLARE_FLAG(int, inlining_caller_size_threshold);
102DECLARE_FLAG(int, inlining_constant_arguments_max_size_threshold);
103DECLARE_FLAG(int, inlining_constant_arguments_min_size_threshold);
104DECLARE_FLAG(bool, print_instruction_stats);
105
106Precompiler* Precompiler::singleton_ = nullptr;
107
108#if defined(DART_PRECOMPILER) && !defined(TARGET_ARCH_IA32)
109
110// Reasons for retaining a given object.
111struct RetainReasons : public AllStatic {
112 // The LLVM pools are active and the object appears in one of them.
113 static constexpr const char* kLLVMPool = "llvm pool";
114 // The object is an invoke field dispatcher.
115 static constexpr const char* kInvokeFieldDispatcher =
116 "invoke field dispatcher";
117 // The object is a dynamic invocation forwarder.
118 static constexpr const char* kDynamicInvocationForwarder =
119 "dynamic invocation forwarder";
120 // The object is a method extractor.
121 static constexpr const char* kMethodExtractor = "method extractor";
122 // The object is for a compiled implicit closure.
123 static constexpr const char* kImplicitClosure = "implicit closure";
124 // The object is a local closure.
125 static constexpr const char* kLocalClosure = "local closure";
126 // The object is needed for async stack unwinding.
127 static constexpr const char* kAsyncStackUnwinding =
128 "needed for async stack unwinding";
129 // The object is the initializer for a static field.
130 static constexpr const char* kStaticFieldInitializer =
131 "static field initializer";
132 // The object is the initializer for a instance field.
133 static constexpr const char* kInstanceFieldInitializer =
134 "instance field initializer";
135 // The object is the initializer for a late field.
136 static constexpr const char* kLateFieldInitializer = "late field initializer";
137 // The object is an implicit getter.
138 static constexpr const char* kImplicitGetter = "implicit getter";
139 // The object is an implicit setter.
140 static constexpr const char* kImplicitSetter = "implicit setter";
141 // The object is an implicit static getter.
142 static constexpr const char* kImplicitStaticGetter = "implicit static getter";
143 // The object is a function that is called through a getter method.
144 static constexpr const char* kCalledThroughGetter = "called through getter";
145 // The object is a function that is called via selector.
146 static constexpr const char* kCalledViaSelector = "called via selector";
147 // The object is a function and the flag --retain-function-objects is enabled.
148 static constexpr const char* kForcedRetain = "forced via flag";
149 // The object is a function and symbolic stack traces are enabled.
150 static constexpr const char* kSymbolicStackTraces =
151 "needed for symbolic stack traces";
152 // The object is a parent function of a non-inlined local function.
153 static constexpr const char* kLocalParent = "parent of a local function";
154 // The object is a main function of the root library.
155 static constexpr const char* kMainFunction =
156 "this is main function of the root library";
157 // The object has an entry point pragma that requires it be retained.
158 static constexpr const char* kEntryPointPragma = "entry point pragma";
159 // The function is a target of FFI callback.
160 static constexpr const char* kFfiCallbackTarget = "ffi callback target";
161 // The signature is used in a closure function.
162 static constexpr const char* kClosureSignature = "closure signature";
163 // The signature is used in an FFI trampoline.
164 static constexpr const char* kFfiTrampolineSignature =
165 "FFI trampoline signature";
166 // The signature is used in a native function.
167 static constexpr const char* kNativeSignature = "native function signature";
168 // The signature has required named parameters.
169 static constexpr const char* kRequiredNamedParameters =
170 "signature has required named parameters";
171 // The signature is used in a function that has dynamic calls.
172 static constexpr const char* kDynamicallyCalledSignature =
173 "signature of dynamically called function";
174 // The signature is used in a function with an entry point pragma.
175 static constexpr const char* kEntryPointPragmaSignature =
176 "signature of entry point function";
177};
178
179class RetainedReasonsWriter : public ValueObject {
180 public:
181 explicit RetainedReasonsWriter(Zone* zone)
182 : zone_(zone), retained_reasons_map_(zone) {}
183
184 bool Init(const char* filename) {
185 if (filename == nullptr) return false;
186
187 if ((Dart::file_write_callback() == nullptr) ||
188 (Dart::file_open_callback() == nullptr) ||
189 (Dart::file_close_callback() == nullptr)) {
190 OS::PrintErr("warning: Could not access file callbacks.");
191 return false;
192 }
193
194 void* file = Dart::file_open_callback()(filename, /*write=*/true);
195 if (file == nullptr) {
196 OS::PrintErr("warning: Failed to write retained reasons: %s\n", filename);
197 return false;
198 }
199
200 file_ = file;
201 // We open the array here so that we can also print some objects to the
202 // JSON as we go, instead of requiring all information be collected
203 // and printed at one point. This avoids having to keep otherwise
204 // unneeded information around.
205 writer_.OpenArray();
206 return true;
207 }
208
209 void AddDropped(const Object& obj) {
210 if (HasReason(obj)) {
211 FATAL("dropped object has reasons to retain");
212 }
213 writer_.OpenObject();
214 WriteRetainedObjectSpecificFields(obj);
215 writer_.PrintPropertyBool("retained", false);
216 writer_.CloseObject();
217 }
218
219 bool HasReason(const Object& obj) const {
220 return retained_reasons_map_.HasKey(&obj);
221 }
222
223 void AddReason(const Object& obj, const char* reason) {
224 if (auto const kv = retained_reasons_map_.Lookup(&obj)) {
225 if (kv->value->Lookup(reason) == nullptr) {
226 kv->value->Insert(reason);
227 }
228 return;
229 }
230 auto const key = &Object::ZoneHandle(zone_, obj.ptr());
231 auto const value = new (zone_) ZoneCStringSet(zone_);
232 value->Insert(reason);
233 retained_reasons_map_.Insert(RetainedReasonsTrait::Pair(key, value));
234 }
235
236 // Finalizes the JSON output and writes it.
237 void Write() {
238 if (file_ == nullptr) return;
239
240 // Add all the objects for which we have reasons to retain.
241 auto it = retained_reasons_map_.GetIterator();
242
243 for (auto kv = it.Next(); kv != nullptr; kv = it.Next()) {
244 writer_.OpenObject();
245 WriteRetainedObjectSpecificFields(*kv->key);
246 writer_.PrintPropertyBool("retained", true);
247
248 writer_.OpenArray("reasons");
249 auto it = kv->value->GetIterator();
250 for (auto cstrp = it.Next(); cstrp != nullptr; cstrp = it.Next()) {
251 ASSERT(*cstrp != nullptr);
252 writer_.PrintValue(*cstrp);
253 }
254 writer_.CloseArray();
255
256 writer_.CloseObject();
257 }
258
259 writer_.CloseArray();
260 char* output = nullptr;
261 intptr_t length = -1;
262 writer_.Steal(&output, &length);
263
264 if (const auto file_write = Dart::file_write_callback()) {
265 file_write(output, length, file_);
266 }
267
268 if (const auto file_close = Dart::file_close_callback()) {
269 file_close(file_);
270 }
271
272 free(output);
273 }
274
275 private:
276 struct RetainedReasonsTrait {
277 using Key = const Object*;
278 using Value = ZoneCStringSet*;
279
280 struct Pair {
281 Key key;
282 Value value;
283
284 Pair() : key(nullptr), value(nullptr) {}
285 Pair(Key key, Value value) : key(key), value(value) {}
286 };
287
288 static Key KeyOf(Pair kv) { return kv.key; }
289
290 static Value ValueOf(Pair kv) { return kv.value; }
291
292 static inline uword Hash(Key key) {
293 if (key->IsFunction()) {
294 return Function::Cast(*key).Hash();
295 }
296 if (key->IsClass()) {
297 return Utils::WordHash(Class::Cast(*key).id());
298 }
299 if (key->IsAbstractType()) {
300 return AbstractType::Cast(*key).Hash();
301 }
302 return Utils::WordHash(key->GetClassId());
303 }
304
305 static inline bool IsKeyEqual(Pair pair, Key key) {
306 return pair.key->ptr() == key->ptr();
307 }
308 };
309
310 using RetainedReasonsMap = DirectChainedHashMap<RetainedReasonsTrait>;
311
312 void WriteRetainedObjectSpecificFields(const Object& obj) {
313 if (obj.IsFunction()) {
314 writer_.PrintProperty("type", "Function");
315 const auto& function = Function::Cast(obj);
316 writer_.PrintProperty("name",
317 function.ToLibNamePrefixedQualifiedCString());
318 writer_.PrintProperty("kind",
319 UntaggedFunction::KindToCString(function.kind()));
320 return;
321 } else if (obj.IsFunctionType()) {
322 writer_.PrintProperty("type", "FunctionType");
323 const auto& sig = FunctionType::Cast(obj);
324 writer_.PrintProperty("name", sig.ToCString());
325 return;
326 }
327 FATAL("Unexpected object %s", obj.ToCString());
328 }
329
330 Zone* const zone_;
331 RetainedReasonsMap retained_reasons_map_;
332 JSONWriter writer_;
333 void* file_;
334};
335
336class PrecompileParsedFunctionHelper : public ValueObject {
337 public:
338 PrecompileParsedFunctionHelper(Precompiler* precompiler,
339 ParsedFunction* parsed_function,
340 bool optimized)
341 : precompiler_(precompiler),
342 parsed_function_(parsed_function),
343 optimized_(optimized),
344 thread_(Thread::Current()) {}
345
346 bool Compile(CompilationPipeline* pipeline);
347
348 private:
349 ParsedFunction* parsed_function() const { return parsed_function_; }
350 bool optimized() const { return optimized_; }
351 Thread* thread() const { return thread_; }
352
353 void FinalizeCompilation(compiler::Assembler* assembler,
354 FlowGraphCompiler* graph_compiler,
355 FlowGraph* flow_graph,
356 CodeStatistics* stats);
357
358 Precompiler* precompiler_;
359 ParsedFunction* parsed_function_;
360 const bool optimized_;
361 Thread* const thread_;
362
363 DISALLOW_COPY_AND_ASSIGN(PrecompileParsedFunctionHelper);
364};
365
366static void Jump(const Error& error) {
368}
369
370ErrorPtr Precompiler::CompileAll() {
371 LongJumpScope jump;
372 if (setjmp(*jump.Set()) == 0) {
373 Precompiler precompiler(Thread::Current());
374 precompiler.DoCompileAll();
375 precompiler.ReportStats();
376 return Error::null();
377 } else {
379 }
380}
381
382void Precompiler::ReportStats() {
383 if (!FLAG_print_precompiler_timings) {
384 return;
385 }
386
388}
389
390Precompiler::Precompiler(Thread* thread)
391 : thread_(thread),
392 zone_(nullptr),
393 changed_(false),
394 retain_root_library_caches_(false),
395 function_count_(0),
396 class_count_(0),
397 selector_count_(0),
398 dropped_function_count_(0),
399 dropped_field_count_(0),
400 dropped_class_count_(0),
401 dropped_typearg_count_(0),
402 dropped_type_count_(0),
403 dropped_functiontype_count_(0),
404 dropped_typeparam_count_(0),
405 dropped_library_count_(0),
406 dropped_constants_arrays_entries_count_(0),
407 libraries_(GrowableObjectArray::Handle(
408 thread->isolate_group()->object_store()->libraries())),
409 pending_functions_(
410 GrowableObjectArray::Handle(GrowableObjectArray::New())),
411 sent_selectors_(),
412 functions_called_dynamically_(
413 HashTables::New<FunctionSet>(/*initial_capacity=*/1024)),
414 functions_with_entry_point_pragmas_(
415 HashTables::New<FunctionSet>(/*initial_capacity=*/1024)),
416 seen_functions_(HashTables::New<FunctionSet>(/*initial_capacity=*/1024)),
417 possibly_retained_functions_(
418 HashTables::New<FunctionSet>(/*initial_capacity=*/1024)),
419 fields_to_retain_(),
420 functions_to_retain_(
421 HashTables::New<FunctionSet>(/*initial_capacity=*/1024)),
422 classes_to_retain_(),
423 typeargs_to_retain_(),
424 types_to_retain_(),
425 functiontypes_to_retain_(),
426 typeparams_to_retain_(),
427 consts_to_retain_(),
428 seen_table_selectors_(),
429 api_uses_(),
430 error_(Error::Handle()),
431 get_runtime_type_is_unique_(false) {
432 ASSERT(Precompiler::singleton_ == nullptr);
433 Precompiler::singleton_ = this;
434
435 if (FLAG_print_precompiler_timings) {
436 thread->set_compiler_timings(new CompilerTimings());
437 }
438}
439
440Precompiler::~Precompiler() {
441 // We have to call Release() in DEBUG mode.
442 functions_called_dynamically_.Release();
443 functions_with_entry_point_pragmas_.Release();
444 seen_functions_.Release();
445 possibly_retained_functions_.Release();
446 functions_to_retain_.Release();
447
448 ASSERT(Precompiler::singleton_ == this);
449 Precompiler::singleton_ = nullptr;
450
451 delete thread()->compiler_timings();
452 thread()->set_compiler_timings(nullptr);
453}
454
455void Precompiler::DoCompileAll() {
456 PRECOMPILER_TIMER_SCOPE(this, CompileAll);
457 {
458 StackZone stack_zone(T);
459 zone_ = stack_zone.GetZone();
460 RetainedReasonsWriter reasons_writer(zone_);
461
462 if (reasons_writer.Init(FLAG_write_retained_reasons_to)) {
463 retained_reasons_writer_ = &reasons_writer;
464 }
465
466 // Since we keep the object pool until the end of AOT compilation, it
467 // will hang on to its entries until the very end. Therefore we have
468 // to use handles which survive that long, so we use [zone_] here.
469 global_object_pool_builder_.InitializeWithZone(zone_);
470
471 {
472 HANDLESCOPE(T);
473
474 // Make sure class hierarchy is stable before compilation so that CHA
475 // can be used. Also ensures lookup of entry points won't miss functions
476 // because their class hasn't been finalized yet.
477 FinalizeAllClasses();
478 ASSERT(Error::Handle(Z, T->sticky_error()).IsNull());
479
480 if (FLAG_print_object_layout_to != nullptr) {
481 IG->class_table()->PrintObjectLayout(FLAG_print_object_layout_to);
482 }
483
484 ClassFinalizer::SortClasses();
485
486 // Collects type usage information which allows us to decide when/how to
487 // optimize runtime type tests.
488 TypeUsageInfo type_usage_info(T);
489
490 // The cid-ranges of subclasses of a class are e.g. used for is/as checks
491 // as well as other type checks.
492 HierarchyInfo hierarchy_info(T);
493
494 dispatch_table_generator_ = new compiler::DispatchTableGenerator(Z);
495 dispatch_table_generator_->Initialize(IG->class_table());
496
497 // After finding all code, and before starting to trace, populate the
498 // assets map.
500
501 // Precompile constructors to compute information such as
502 // optimized instruction count (used in inlining heuristics).
503 ClassFinalizer::ClearAllCode(
504 /*including_nonchanging_cids=*/true);
505
506 {
507 CompilerState state(thread_, /*is_aot=*/true, /*is_optimizing=*/true);
508 PrecompileConstructors();
509 }
510
511 ClassFinalizer::ClearAllCode(
512 /*including_nonchanging_cids=*/true);
513
514 tracer_ = PrecompilerTracer::StartTracingIfRequested(this);
515
516 // All stubs have already been generated, all of them share the same pool.
517 // We use that pool to initialize our global object pool, to guarantee
518 // stubs as well as code compiled from here on will have the same pool.
519 {
520 // We use any stub here to get it's object pool (all stubs share the
521 // same object pool in bare instructions mode).
522 const Code& code = StubCode::LazyCompile();
523 const ObjectPool& stub_pool = ObjectPool::Handle(code.object_pool());
524
525 global_object_pool_builder()->Reset();
526 stub_pool.CopyInto(global_object_pool_builder());
527
528 // We have various stubs we would like to generate inside the isolate,
529 // to ensure the rest of the AOT compilation will use the
530 // isolate-specific stubs (callable via pc-relative calls).
531 auto& stub_code = Code::Handle();
532#define DO(member, name) \
533 stub_code = StubCode::BuildIsolateSpecific##name##Stub( \
534 global_object_pool_builder()); \
535 IG->object_store()->set_##member(stub_code);
537#undef DO
538 }
539
540 CollectDynamicFunctionNames();
541
542 // Start with the allocations and invocations that happen from C++.
543 {
544 TracingScope scope(this);
545 AddRoots();
546 AddAnnotatedRoots();
547 }
548
549 // With the nnbd experiment enabled, these non-nullable type arguments may
550 // not be retained, although they will be used and expected to be
551 // canonical by Dart_NewListOfType.
552 AddTypeArguments(
553 TypeArguments::Handle(Z, IG->object_store()->type_argument_int()));
554 AddTypeArguments(
555 TypeArguments::Handle(Z, IG->object_store()->type_argument_double()));
556 AddTypeArguments(
557 TypeArguments::Handle(Z, IG->object_store()->type_argument_string()));
558 AddTypeArguments(TypeArguments::Handle(
559 Z, IG->object_store()->type_argument_string_dynamic()));
560 AddTypeArguments(TypeArguments::Handle(
561 Z, IG->object_store()->type_argument_string_string()));
562
563 // Compile newly found targets and add their callees until we reach a
564 // fixed point.
565 Iterate();
566
567 // Replace the default type testing stubs installed on [Type]s with new
568 // [Type]-specialized stubs.
569 AttachOptimizedTypeTestingStub();
570
571 {
572 // Now we generate the actual object pool instance and attach it to the
573 // object store. The AOT runtime will use it from there in the enter
574 // dart code stub.
575 const auto& pool = ObjectPool::Handle(
576 ObjectPool::NewFromBuilder(*global_object_pool_builder()));
577 IG->object_store()->set_global_object_pool(pool);
578 global_object_pool_builder()->Reset();
579
580 if (FLAG_disassemble) {
581 THR_Print("Global object pool:\n");
582 pool.DebugPrint();
583 }
584 }
585
586 if (tracer_ != nullptr) {
587 tracer_->Finalize();
588 tracer_ = nullptr;
589 }
590
591 {
592 PRECOMPILER_TIMER_SCOPE(this, TraceForRetainedFunctions);
593 TraceForRetainedFunctions();
594 }
595
596 FinalizeDispatchTable();
597 ReplaceFunctionStaticCallEntries();
598
599 {
600 PRECOMPILER_TIMER_SCOPE(this, Drop);
601
602 DropFunctions();
603 DropFields();
604 DropTransitiveUserDefinedConstants();
605 TraceTypesFromRetainedClasses();
606
607 // Clear these before dropping classes as they may hold onto otherwise
608 // dead instances of classes we will remove or otherwise unused symbols.
609 IG->object_store()->set_unique_dynamic_targets(Array::null_array());
610 Library& null_library = Library::Handle(Z);
611 Class& null_class = Class::Handle(Z);
612 Function& null_function = Function::Handle(Z);
613 Field& null_field = Field::Handle(Z);
614 IG->object_store()->set_pragma_class(null_class);
615 IG->object_store()->set_pragma_name(null_field);
616 IG->object_store()->set_pragma_options(null_field);
617 IG->object_store()->set_compiletime_error_class(null_class);
618 IG->object_store()->set_growable_list_factory(null_function);
619 IG->object_store()->set_simple_instance_of_function(null_function);
620 IG->object_store()->set_simple_instance_of_true_function(null_function);
621 IG->object_store()->set_simple_instance_of_false_function(
622 null_function);
623 IG->object_store()->set_async_star_stream_controller(null_class);
624 IG->object_store()->set_native_assets_library(null_library);
625 DropMetadata();
626 DropLibraryEntries();
627 }
628 }
629
630 {
631 PRECOMPILER_TIMER_SCOPE(this, Drop);
632 DropClasses();
633 DropLibraries();
634 }
635
636 {
637 PRECOMPILER_TIMER_SCOPE(this, Obfuscate);
638 Obfuscate();
639 }
640
641#if defined(DEBUG)
642 const auto& non_visited =
643 Function::Handle(Z, FindUnvisitedRetainedFunction());
644 if (!non_visited.IsNull()) {
645 FATAL("Code visitor would miss the code for function \"%s\"\n",
646 non_visited.ToFullyQualifiedCString());
647 }
648#endif
649 DiscardCodeObjects();
650
651 {
652 PRECOMPILER_TIMER_SCOPE(this, Dedup);
653 ProgramVisitor::Dedup(T);
654 }
655
656 PruneDictionaries();
657
658 if (retained_reasons_writer_ != nullptr) {
659 reasons_writer.Write();
660 retained_reasons_writer_ = nullptr;
661 }
662
663 zone_ = nullptr;
664 }
665
666 intptr_t symbols_before = -1;
667 intptr_t symbols_after = -1;
668 intptr_t capacity = -1;
669 if (FLAG_trace_precompiler) {
670 Symbols::GetStats(IG, &symbols_before, &capacity);
671 }
672
673 if (FLAG_trace_precompiler) {
674 Symbols::GetStats(IG, &symbols_after, &capacity);
675 THR_Print("Precompiled %" Pd " functions,", function_count_);
676 THR_Print(" %" Pd " dynamic types,", class_count_);
677 THR_Print(" %" Pd " dynamic selectors.\n", selector_count_);
678
679 THR_Print("Dropped %" Pd " functions,", dropped_function_count_);
680 THR_Print(" %" Pd " fields,", dropped_field_count_);
681 THR_Print(" %" Pd " symbols,", symbols_before - symbols_after);
682 THR_Print(" %" Pd " types,", dropped_type_count_);
683 THR_Print(" %" Pd " function types,", dropped_functiontype_count_);
684 THR_Print(" %" Pd " type parameters,", dropped_typeparam_count_);
685 THR_Print(" %" Pd " type arguments,", dropped_typearg_count_);
686 THR_Print(" %" Pd " classes,", dropped_class_count_);
687 THR_Print(" %" Pd " libraries,", dropped_library_count_);
688 THR_Print(" %" Pd " constants arrays entries.\n",
689 dropped_constants_arrays_entries_count_);
690 }
691}
692
693void Precompiler::PrecompileConstructors() {
694 PRECOMPILER_TIMER_SCOPE(this, PrecompileConstructors);
695 class ConstructorVisitor : public FunctionVisitor {
696 public:
697 explicit ConstructorVisitor(Precompiler* precompiler, Zone* zone)
698 : precompiler_(precompiler), zone_(zone) {}
699 void VisitFunction(const Function& function) {
700 if (!function.IsGenerativeConstructor()) return;
701 if (function.HasCode()) {
702 // Const constructors may have been visited before. Recompile them here
703 // to collect type information for final fields for them as well.
704 function.ClearCode();
705 }
706 if (FLAG_trace_precompiler) {
707 THR_Print("Precompiling constructor %s\n", function.ToCString());
708 }
709 ASSERT(Class::Handle(zone_, function.Owner()).is_finalized());
710 CompileFunction(precompiler_, Thread::Current(), zone_, function);
711 }
712
713 private:
714 Precompiler* precompiler_;
715 Zone* zone_;
716 };
717
718 phase_ = Phase::kCompilingConstructorsForInstructionCounts;
719 HANDLESCOPE(T);
720 ConstructorVisitor visitor(this, Z);
721 ProgramVisitor::WalkProgram(Z, IG, &visitor);
722 phase_ = Phase::kPreparation;
723}
724
725void Precompiler::AddRoots() {
726 HANDLESCOPE(T);
727 AddSelector(Symbols::NoSuchMethod());
728 AddSelector(Symbols::call()); // For speed, not correctness.
729
730 // Add main as an entry point.
731 const Library& lib = Library::Handle(IG->object_store()->root_library());
732 if (lib.IsNull()) {
733 const String& msg = String::Handle(
734 Z, String::New("Cannot find root library in isolate.\n"));
735 Jump(Error::Handle(Z, ApiError::New(msg)));
736 UNREACHABLE();
737 }
738
739 const String& name = String::Handle(String::New("main"));
740 Function& main = Function::Handle(lib.LookupFunctionAllowPrivate(name));
741 if (main.IsNull()) {
742 const Object& obj = Object::Handle(lib.LookupReExport(name));
743 if (obj.IsFunction()) {
744 main ^= obj.ptr();
745 }
746 }
747 if (!main.IsNull()) {
748 AddApiUse(main);
749 if (lib.LookupFunctionAllowPrivate(name) == Function::null()) {
750 retain_root_library_caches_ = true;
751 }
752 AddRetainReason(main, RetainReasons::kMainFunction);
753 AddTypesOf(main);
754 // Create closure object from main.
755 main = main.ImplicitClosureFunction();
756 AddConstObject(Closure::Handle(main.ImplicitStaticClosure()));
757 } else {
758 String& msg = String::Handle(
759 Z, String::NewFormatted("Cannot find main in library %s\n",
760 lib.ToCString()));
761 Jump(Error::Handle(Z, ApiError::New(msg)));
762 UNREACHABLE();
763 }
764}
765
766void Precompiler::Iterate() {
767 PRECOMPILER_TIMER_SCOPE(this, Iterate);
768
769 Function& function = Function::Handle(Z);
770
771 phase_ = Phase::kFixpointCodeGeneration;
772 while (changed_) {
773 changed_ = false;
774
775 while (pending_functions_.Length() > 0) {
776 function ^= pending_functions_.RemoveLast();
777 ProcessFunction(function);
778 }
779
780 CheckForNewDynamicFunctions();
781 CollectCallbackFields();
782 }
783 phase_ = Phase::kDone;
784}
785
786void Precompiler::CollectCallbackFields() {
787 PRECOMPILER_TIMER_SCOPE(this, CollectCallbackFields);
788 HANDLESCOPE(T);
789 Library& lib = Library::Handle(Z);
790 Class& cls = Class::Handle(Z);
791 Class& subcls = Class::Handle(Z);
792 Array& fields = Array::Handle(Z);
793 Field& field = Field::Handle(Z);
794 FunctionType& signature = FunctionType::Handle(Z);
795 Function& dispatcher = Function::Handle(Z);
796 Array& args_desc = Array::Handle(Z);
797 AbstractType& field_type = AbstractType::Handle(Z);
798 String& field_name = String::Handle(Z);
799 GrowableArray<intptr_t> cids;
800
801 for (intptr_t i = 0; i < libraries_.Length(); i++) {
802 lib ^= libraries_.At(i);
803 HANDLESCOPE(T);
804 ClassDictionaryIterator it(lib, ClassDictionaryIterator::kIteratePrivate);
805 while (it.HasNext()) {
806 cls = it.GetNextClass();
807
808 if (!cls.is_allocated()) continue;
809
810 fields = cls.fields();
811 for (intptr_t k = 0; k < fields.Length(); k++) {
812 field ^= fields.At(k);
813 if (field.is_static()) continue;
814 field_type = field.type();
815 if (!field_type.IsFunctionType()) continue;
816 field_name = field.name();
817 if (!IsSent(field_name)) continue;
818 // Create arguments descriptor with fixed parameters from
819 // signature of field_type.
820 signature ^= field_type.ptr();
821 if (signature.IsGeneric()) continue;
822 if (signature.HasOptionalParameters()) continue;
823 if (FLAG_trace_precompiler) {
824 THR_Print("Found callback field %s\n", field_name.ToCString());
825 }
826
827 // TODO(dartbug.com/33549): Update this code to use the size of the
828 // parameters when supporting calls to non-static methods with
829 // unboxed parameters.
830 args_desc =
831 ArgumentsDescriptor::NewBoxed(0, // No type argument vector.
832 signature.num_fixed_parameters());
833 cids.Clear();
834 if (CHA::ConcreteSubclasses(cls, &cids)) {
835 for (intptr_t j = 0; j < cids.length(); ++j) {
836 subcls = IG->class_table()->At(cids[j]);
837 if (subcls.is_allocated()) {
838 // Add dispatcher to cls.
839 dispatcher = subcls.GetInvocationDispatcher(
840 field_name, args_desc,
841 UntaggedFunction::kInvokeFieldDispatcher,
842 /* create_if_absent = */ true);
843 if (FLAG_trace_precompiler) {
844 THR_Print("Added invoke-field-dispatcher for %s to %s\n",
845 field_name.ToCString(), subcls.ToCString());
846 }
847 AddFunction(dispatcher, RetainReasons::kInvokeFieldDispatcher);
848 }
849 }
850 }
851 }
852 }
853 }
854}
855
856void Precompiler::ProcessFunction(const Function& function) {
857 HANDLESCOPE(T);
858 const intptr_t gop_offset = global_object_pool_builder()->CurrentLength();
859 RELEASE_ASSERT(!function.HasCode());
860 // Ffi trampoline functions have no signature.
861 ASSERT(function.kind() == UntaggedFunction::kFfiTrampoline ||
862 FunctionType::Handle(Z, function.signature()).IsFinalized());
863
864 TracingScope tracing_scope(this);
865 function_count_++;
866
867 if (FLAG_trace_precompiler) {
868 THR_Print("Precompiling %" Pd " %s (%s, %s)\n", function_count_,
869 function.ToLibNamePrefixedQualifiedCString(),
870 function.token_pos().ToCString(),
871 Function::KindToCString(function.kind()));
872 }
873
874 ASSERT(!function.is_abstract());
875
876 error_ = CompileFunction(this, thread_, zone_, function);
877 if (!error_.IsNull()) {
878 Jump(error_);
879 }
880
881 // Used in the JIT to save type-feedback across compilations.
882 function.ClearICDataArray();
883 AddCalleesOf(function, gop_offset);
884}
885
886void Precompiler::AddCalleesOf(const Function& function, intptr_t gop_offset) {
887 PRECOMPILER_TIMER_SCOPE(this, AddCalleesOf);
888 ASSERT(function.HasCode());
889
890 const Code& code = Code::Handle(Z, function.CurrentCode());
891
892 Object& entry = Object::Handle(Z);
893 Class& cls = Class::Handle(Z);
894 Function& target = Function::Handle(Z);
895
896 const Array& table = Array::Handle(Z, code.static_calls_target_table());
897 StaticCallsTable static_calls(table);
898 for (auto& view : static_calls) {
899 entry = view.Get<Code::kSCallTableFunctionTarget>();
900 if (entry.IsFunction()) {
901 // Since generally function objects are retained when symbolic stack
902 // traces are enabled, only return kForcedRetain to mark that retention
903 // was otherwise forced.
904 const char* const reason =
905 FLAG_retain_function_objects
906 ? (!FLAG_dwarf_stack_traces_mode
907 ? RetainReasons::kSymbolicStackTraces
908 : RetainReasons::kForcedRetain)
909 : nullptr;
910 AddFunction(Function::Cast(entry), reason);
911 ASSERT(view.Get<Code::kSCallTableCodeOrTypeTarget>() == Code::null());
912 continue;
913 }
914 entry = view.Get<Code::kSCallTableCodeOrTypeTarget>();
915 if (entry.IsCode() && Code::Cast(entry).IsAllocationStubCode()) {
916 cls ^= Code::Cast(entry).owner();
917 AddInstantiatedClass(cls);
918 }
919 }
920
921 const ExceptionHandlers& handlers =
922 ExceptionHandlers::Handle(Z, code.exception_handlers());
923 if (!handlers.IsNull()) {
924#if defined(PRODUCT)
925 // List of handled types is only used by debugger and
926 // can be removed in PRODUCT mode.
927 for (intptr_t i = 0; i < handlers.num_entries(); i++) {
928 handlers.SetHandledTypes(i, Array::empty_array());
929 }
930#else
931 Array& types = Array::Handle(Z);
932 AbstractType& type = AbstractType::Handle(Z);
933 for (intptr_t i = 0; i < handlers.num_entries(); i++) {
934 types = handlers.GetHandledTypes(i);
935 for (intptr_t j = 0; j < types.Length(); j++) {
936 type ^= types.At(j);
937 AddType(type);
938 }
939 }
940#endif // defined(PRODUCT)
941 }
942
943#if defined(TARGET_ARCH_IA32)
944 FATAL("Callee scanning unimplemented for IA32");
945#endif
946
947 String& selector = String::Handle(Z);
948 // When tracing we want to scan the object pool attached to the code object
949 // rather than scanning global object pool - because we want to include
950 // *all* outgoing references into the trace. Scanning GOP would exclude
951 // references that have been deduplicated.
952 if (!is_tracing()) {
953 for (intptr_t i = gop_offset;
954 i < global_object_pool_builder()->CurrentLength(); i++) {
955 const auto& wrapper_entry = global_object_pool_builder()->EntryAt(i);
956 if (wrapper_entry.type() ==
957 compiler::ObjectPoolBuilderEntry::kTaggedObject) {
958 const auto& entry = *wrapper_entry.obj_;
959 AddCalleesOfHelper(entry, &selector, &cls);
960 }
961 }
962 } else {
963 const auto& pool = ObjectPool::Handle(Z, code.object_pool());
964 auto& entry = Object::Handle(Z);
965 for (intptr_t i = 0; i < pool.Length(); i++) {
966 if (pool.TypeAt(i) == ObjectPool::EntryType::kTaggedObject) {
967 entry = pool.ObjectAt(i);
968 AddCalleesOfHelper(entry, &selector, &cls);
969 }
970 }
971 }
972
973 if (!FLAG_dwarf_stack_traces_mode) {
974 const Array& inlined_functions =
975 Array::Handle(Z, code.inlined_id_to_function());
976 for (intptr_t i = 0; i < inlined_functions.Length(); i++) {
977 target ^= inlined_functions.At(i);
978 AddRetainReason(target, RetainReasons::kSymbolicStackTraces);
979 AddTypesOf(target);
980 }
981 }
982}
983
984static bool IsPotentialClosureCall(const String& selector) {
985 return selector.ptr() == Symbols::call().ptr() ||
986 selector.ptr() == Symbols::DynamicCall().ptr();
987}
988
989void Precompiler::AddCalleesOfHelper(const Object& entry,
990 String* temp_selector,
991 Class* temp_cls) {
992 switch (entry.GetClassId()) {
993 case kOneByteStringCid:
994 case kNullCid:
995 // Skip common leaf constants early in order to
996 // process object pools faster.
997 return;
998 case kUnlinkedCallCid: {
999 const auto& call_site = UnlinkedCall::Cast(entry);
1000 // A dynamic call.
1001 *temp_selector = call_site.target_name();
1002 AddSelector(*temp_selector);
1003 if (IsPotentialClosureCall(*temp_selector)) {
1004 const Array& arguments_descriptor =
1005 Array::Handle(Z, call_site.arguments_descriptor());
1006 AddClosureCall(*temp_selector, arguments_descriptor);
1007 }
1008 break;
1009 }
1010 case kMegamorphicCacheCid: {
1011 // A dynamic call.
1012 const auto& cache = MegamorphicCache::Cast(entry);
1013 *temp_selector = cache.target_name();
1014 AddSelector(*temp_selector);
1015 if (IsPotentialClosureCall(*temp_selector)) {
1016 const Array& arguments_descriptor =
1017 Array::Handle(Z, cache.arguments_descriptor());
1018 AddClosureCall(*temp_selector, arguments_descriptor);
1019 }
1020 break;
1021 }
1022 case kFieldCid: {
1023 // Potential need for field initializer.
1024 const auto& field = Field::Cast(entry);
1025 AddField(field);
1026 break;
1027 }
1028 case kFunctionCid: {
1029 // Local closure function.
1030 const auto& target = Function::Cast(entry);
1031 AddFunction(target, RetainReasons::kLocalClosure);
1032 if (target.IsFfiCallbackTrampoline()) {
1033 const auto& callback_target =
1034 Function::Handle(Z, target.FfiCallbackTarget());
1035 if (!callback_target.IsNull()) {
1036 AddFunction(callback_target, RetainReasons::kFfiCallbackTarget);
1037 }
1038 AddTypesOf(target);
1039 }
1040 break;
1041 }
1042 case kCodeCid: {
1043 const auto& target_code = Code::Cast(entry);
1044 if (target_code.IsAllocationStubCode()) {
1045 *temp_cls ^= target_code.owner();
1046 AddInstantiatedClass(*temp_cls);
1047 }
1048 break;
1049 }
1050 default:
1051 if (entry.IsInstance()) {
1052 // Const object, literal or args descriptor.
1053 const auto& instance = Instance::Cast(entry);
1054 AddConstObject(instance);
1055 }
1056 break;
1057 }
1058}
1059
1060void Precompiler::AddTypesOf(const Class& cls) {
1061 if (cls.IsNull()) return;
1062 if (classes_to_retain_.HasKey(&cls)) return;
1063 classes_to_retain_.Insert(&Class::ZoneHandle(Z, cls.ptr()));
1064
1065 Array& interfaces = Array::Handle(Z, cls.interfaces());
1066 AbstractType& type = AbstractType::Handle(Z);
1067 for (intptr_t i = 0; i < interfaces.Length(); i++) {
1068 type ^= interfaces.At(i);
1069 AddType(type);
1070 }
1071
1072 AddTypeParameters(TypeParameters::Handle(Z, cls.type_parameters()));
1073
1074 type = cls.super_type();
1075 AddType(type);
1076}
1077
1078void Precompiler::AddRetainReason(const Object& obj, const char* reason) {
1079 if (retained_reasons_writer_ == nullptr || reason == nullptr) return;
1080 retained_reasons_writer_->AddReason(obj, reason);
1081}
1082
1083void Precompiler::AddTypesOf(const Function& function) {
1084 if (function.IsNull()) return;
1085 if (functions_to_retain_.ContainsKey(function)) return;
1086 functions_to_retain_.Insert(function);
1087
1088 if (retained_reasons_writer_ != nullptr &&
1089 !retained_reasons_writer_->HasReason(function)) {
1090 FATAL("no retaining reasons given");
1091 }
1092
1093 if (function.NeedsMonomorphicCheckedEntry(Z) ||
1094 Function::IsDynamicInvocationForwarderName(function.name())) {
1095 functions_called_dynamically_.Insert(function);
1096 }
1097
1098 const FunctionType& signature = FunctionType::Handle(Z, function.signature());
1099 AddType(signature);
1100
1101 // A class may have all functions inlined except a local function.
1102 const Class& owner = Class::Handle(Z, function.Owner());
1103 AddTypesOf(owner);
1104
1105 if (function.IsFfiCallbackTrampoline()) {
1106 AddType(FunctionType::Handle(Z, function.FfiCSignature()));
1107 }
1108
1109 const auto& parent_function = Function::Handle(Z, function.parent_function());
1110 if (parent_function.IsNull()) {
1111 return;
1112 }
1113
1114 // It can happen that all uses of a function are inlined, leaving
1115 // a compiled local function with an uncompiled parent. Retain such
1116 // parents and their enclosing classes and libraries when needed.
1117
1118 // We always retain parents if symbolic stack traces are enabled.
1119 if (!FLAG_dwarf_stack_traces_mode) {
1120 AddRetainReason(parent_function, RetainReasons::kSymbolicStackTraces);
1121 AddTypesOf(parent_function);
1122 return;
1123 }
1124
1125 // We're not retaining the parent due to this function, so wrap it with
1126 // a weak serialization reference.
1127 const auto& data = ClosureData::CheckedHandle(Z, function.data());
1128 const auto& wsr =
1129 Object::Handle(Z, WeakSerializationReference::New(
1130 parent_function, Object::null_function()));
1131 data.set_parent_function(wsr);
1132}
1133
1134void Precompiler::AddType(const AbstractType& abstype) {
1135 if (abstype.IsNull()) return;
1136
1137 if (abstype.IsTypeParameter()) {
1138 const auto& param = TypeParameter::Cast(abstype);
1139 if (typeparams_to_retain_.HasKey(&param)) return;
1140 typeparams_to_retain_.Insert(&TypeParameter::ZoneHandle(Z, param.ptr()));
1141
1142 if (param.IsClassTypeParameter()) {
1143 AddTypesOf(Class::Handle(Z, param.parameterized_class()));
1144 } else {
1145 AddType(FunctionType::Handle(Z, param.parameterized_function_type()));
1146 }
1147 return;
1148 }
1149
1150 if (abstype.IsFunctionType()) {
1151 if (functiontypes_to_retain_.HasKey(&FunctionType::Cast(abstype))) return;
1152 const FunctionType& signature =
1153 FunctionType::ZoneHandle(Z, FunctionType::Cast(abstype).ptr());
1154 functiontypes_to_retain_.Insert(&signature);
1155
1156 AddTypeParameters(TypeParameters::Handle(Z, signature.type_parameters()));
1157
1158 AbstractType& type = AbstractType::Handle(Z);
1159 type = signature.result_type();
1160 AddType(type);
1161 for (intptr_t i = 0; i < signature.NumParameters(); i++) {
1162 type = signature.ParameterTypeAt(i);
1163 AddType(type);
1164 }
1165 return;
1166 }
1167
1168 if (types_to_retain_.HasKey(&abstype)) return;
1169 types_to_retain_.Insert(&AbstractType::ZoneHandle(Z, abstype.ptr()));
1170
1171 if (abstype.IsType()) {
1172 const Type& type = Type::Cast(abstype);
1173 const Class& cls = Class::Handle(Z, type.type_class());
1174 AddTypesOf(cls);
1175 const TypeArguments& vector = TypeArguments::Handle(Z, type.arguments());
1176 AddTypeArguments(vector);
1177 } else if (abstype.IsRecordType()) {
1178 const auto& rec = RecordType::Cast(abstype);
1179 AbstractType& type = AbstractType::Handle(Z);
1180 for (intptr_t i = 0, n = rec.NumFields(); i < n; ++i) {
1181 type = rec.FieldTypeAt(i);
1182 AddType(type);
1183 }
1184 }
1185}
1186
1187void Precompiler::AddTypeParameters(const TypeParameters& params) {
1188 if (params.IsNull()) return;
1189
1190 TypeArguments& args = TypeArguments::Handle();
1191 args = params.bounds();
1192 AddTypeArguments(args);
1193 args = params.defaults();
1194 AddTypeArguments(args);
1195}
1196
1197void Precompiler::AddTypeArguments(const TypeArguments& args) {
1198 if (args.IsNull()) return;
1199
1200 if (typeargs_to_retain_.HasKey(&args)) return;
1201 typeargs_to_retain_.Insert(&TypeArguments::ZoneHandle(Z, args.ptr()));
1202
1203 AbstractType& arg = AbstractType::Handle(Z);
1204 for (intptr_t i = 0; i < args.Length(); i++) {
1205 arg = args.TypeAt(i);
1206 AddType(arg);
1207 }
1208}
1209
1210void Precompiler::AddConstObject(const class Instance& instance) {
1211 // Types, type parameters, and type arguments require special handling.
1212 if (instance.IsAbstractType()) { // Includes type parameter.
1213 AddType(AbstractType::Cast(instance));
1214 return;
1215 } else if (instance.IsTypeArguments()) {
1216 AddTypeArguments(TypeArguments::Cast(instance));
1217 return;
1218 }
1219
1220 if (instance.ptr() == Object::sentinel().ptr() ||
1221 instance.ptr() == Object::transition_sentinel().ptr()) {
1222 return;
1223 }
1224
1225 Class& cls = Class::Handle(Z, instance.clazz());
1226 AddInstantiatedClass(cls);
1227
1228 if (instance.IsClosure()) {
1229 // An implicit static closure.
1230 const Function& func =
1231 Function::Handle(Z, Closure::Cast(instance).function());
1232 ASSERT(func.is_static());
1233 AddFunction(func, RetainReasons::kImplicitClosure);
1234 AddTypeArguments(TypeArguments::Handle(
1235 Z, Closure::Cast(instance).instantiator_type_arguments()));
1236 AddTypeArguments(TypeArguments::Handle(
1237 Z, Closure::Cast(instance).function_type_arguments()));
1238 AddTypeArguments(TypeArguments::Handle(
1239 Z, Closure::Cast(instance).delayed_type_arguments()));
1240 return;
1241 }
1242
1243 if (instance.IsLibraryPrefix()) {
1244 const LibraryPrefix& prefix = LibraryPrefix::Cast(instance);
1245 ASSERT(prefix.is_deferred_load());
1246 const Library& target = Library::Handle(Z, prefix.GetLibrary(0));
1247 cls = target.toplevel_class();
1248 if (!classes_to_retain_.HasKey(&cls)) {
1249 classes_to_retain_.Insert(&Class::ZoneHandle(Z, cls.ptr()));
1250 }
1251 return;
1252 }
1253
1254 // Can't ask immediate objects if they're canonical.
1255 if (instance.IsSmi()) return;
1256
1257 // Some Instances in the ObjectPool aren't const objects, such as
1258 // argument descriptors.
1259 if (!instance.IsCanonical()) return;
1260
1261 // Constants are canonicalized and we avoid repeated processing of them.
1262 if (consts_to_retain_.HasKey(&instance)) return;
1263
1264 consts_to_retain_.Insert(&Instance::ZoneHandle(Z, instance.ptr()));
1265
1266 if (cls.NumTypeArguments() > 0) {
1267 AddTypeArguments(TypeArguments::Handle(Z, instance.GetTypeArguments()));
1268 }
1269
1270 class ConstObjectVisitor : public ObjectPointerVisitor {
1271 public:
1272 ConstObjectVisitor(Precompiler* precompiler, IsolateGroup* isolate_group)
1273 : ObjectPointerVisitor(isolate_group),
1274 precompiler_(precompiler),
1275 subinstance_(Object::Handle()) {}
1276
1277 void VisitPointers(ObjectPtr* first, ObjectPtr* last) override {
1278 for (ObjectPtr* current = first; current <= last; current++) {
1279 subinstance_ = *current;
1280 if (subinstance_.IsInstance()) {
1281 precompiler_->AddConstObject(Instance::Cast(subinstance_));
1282 }
1283 }
1284 subinstance_ = Object::null();
1285 }
1286
1287#if defined(DART_COMPRESSED_POINTERS)
1288 void VisitCompressedPointers(uword heap_base,
1289 CompressedObjectPtr* first,
1290 CompressedObjectPtr* last) override {
1291 for (CompressedObjectPtr* current = first; current <= last; current++) {
1292 subinstance_ = current->Decompress(heap_base);
1293 if (subinstance_.IsInstance()) {
1294 precompiler_->AddConstObject(Instance::Cast(subinstance_));
1295 }
1296 }
1297 subinstance_ = Object::null();
1298 }
1299#endif
1300
1301 private:
1302 Precompiler* precompiler_;
1303 Object& subinstance_;
1304 };
1305
1306 ConstObjectVisitor visitor(this, IG);
1307 instance.ptr()->untag()->VisitPointers(&visitor);
1308}
1309
1310void Precompiler::AddClosureCall(const String& call_selector,
1311 const Array& arguments_descriptor) {
1312 const Class& cache_class =
1313 Class::Handle(Z, IG->object_store()->closure_class());
1314 const Function& dispatcher =
1315 Function::Handle(Z, cache_class.GetInvocationDispatcher(
1316 call_selector, arguments_descriptor,
1317 UntaggedFunction::kInvokeFieldDispatcher,
1318 true /* create_if_absent */));
1319 AddFunction(dispatcher, RetainReasons::kInvokeFieldDispatcher);
1320}
1321
1322void Precompiler::AddField(const Field& field) {
1323 if (is_tracing()) {
1324 tracer_->WriteFieldRef(field);
1325 }
1326
1327 if (fields_to_retain_.HasKey(&field)) return;
1328
1329 fields_to_retain_.Insert(&Field::ZoneHandle(Z, field.ptr()));
1330
1331 if (field.is_static()) {
1332 const Object& value =
1333 Object::Handle(Z, IG->initial_field_table()->At(field.field_id()));
1334 // Should not be in the middle of initialization while precompiling.
1335 ASSERT(value.ptr() != Object::transition_sentinel().ptr());
1336
1337 if (value.ptr() != Object::sentinel().ptr() &&
1338 value.ptr() != Object::null()) {
1339 ASSERT(value.IsInstance());
1340 AddConstObject(Instance::Cast(value));
1341 }
1342 }
1343
1344 if (field.has_nontrivial_initializer() &&
1345 (field.is_static() || field.is_late())) {
1346 const Function& initializer =
1347 Function::ZoneHandle(Z, field.EnsureInitializerFunction());
1348 const char* const reason = field.is_static()
1349 ? RetainReasons::kStaticFieldInitializer
1350 : RetainReasons::kLateFieldInitializer;
1351 AddFunction(initializer, reason);
1352 }
1353}
1354
1355const char* Precompiler::MustRetainFunction(const Function& function) {
1356 // There are some cases where we must retain, even if there are no directly
1357 // observable need for function objects at runtime. Here, we check for cases
1358 // where the function is not marked with the vm:entry-point pragma, which also
1359 // forces retention:
1360 //
1361 // * Native functions (for LinkNativeCall)
1362 // * Selector matches a symbol used in Resolver::ResolveDynamic calls
1363 // in dart_entry.cc or dart_api_impl.cc.
1364 // * _Closure.call (used in async stack handling)
1365 if (function.is_old_native()) {
1366 return "native function";
1367 }
1368
1369 // Use the same check for _Closure.call as in stack_trace.{h|cc}.
1370 const auto& selector = String::Handle(Z, function.name());
1371 if (selector.ptr() == Symbols::call().ptr()) {
1372 const auto& name = String::Handle(Z, function.QualifiedScrubbedName());
1373 if (name.Equals(Symbols::_ClosureCall())) {
1374 return "_Closure.call";
1375 }
1376 }
1377
1378 // We have to retain functions which can be a target of a SwitchableCall
1379 // at AOT runtime, since the AOT runtime needs to be able to find the
1380 // function object in the class.
1381 if (function.NeedsMonomorphicCheckedEntry(Z)) {
1382 return "needs monomorphic checked entry";
1383 }
1384 if (Function::IsDynamicInvocationForwarderName(function.name())) {
1385 return "dynamic invocation forwarder";
1386 }
1387
1388 if (StackTraceUtils::IsNeededForAsyncAwareUnwinding(function)) {
1389 return RetainReasons::kAsyncStackUnwinding;
1390 }
1391
1392 return nullptr;
1393}
1394
1395void Precompiler::AddFunction(const Function& function,
1396 const char* retain_reason) {
1397 ASSERT(!function.is_abstract());
1398 if (is_tracing()) {
1399 tracer_->WriteFunctionRef(function);
1400 }
1401
1402 if (retain_reason == nullptr) {
1403 retain_reason = MustRetainFunction(function);
1404 }
1405 // Add even if we've already marked this function as possibly retained
1406 // because this could be an additional reason for doing so.
1407 AddRetainReason(function, retain_reason);
1408
1409 if (possibly_retained_functions_.ContainsKey(function)) return;
1410 if (retain_reason != nullptr) {
1411 possibly_retained_functions_.Insert(function);
1412 }
1413
1414 if (seen_functions_.ContainsKey(function)) return;
1415 seen_functions_.Insert(function);
1416 pending_functions_.Add(function);
1417 changed_ = true;
1418}
1419
1420bool Precompiler::IsSent(const String& selector) {
1421 if (selector.IsNull()) {
1422 return false;
1423 }
1424 return sent_selectors_.HasKey(&selector);
1425}
1426
1427void Precompiler::AddSelector(const String& selector) {
1428 if (is_tracing()) {
1429 tracer_->WriteSelectorRef(selector);
1430 }
1431
1432 ASSERT(!selector.IsNull());
1433 if (!IsSent(selector)) {
1434 sent_selectors_.Insert(&String::ZoneHandle(Z, selector.ptr()));
1435 selector_count_++;
1436 changed_ = true;
1437
1438 if (FLAG_trace_precompiler) {
1439 THR_Print("Enqueueing selector %" Pd " %s\n", selector_count_,
1440 selector.ToCString());
1441 }
1442 }
1443}
1444
1445void Precompiler::AddTableSelector(const compiler::TableSelector* selector) {
1446 if (is_tracing()) {
1447 tracer_->WriteTableSelectorRef(selector->id);
1448 }
1449
1450 if (seen_table_selectors_.HasKey(selector->id)) return;
1451
1452 seen_table_selectors_.Insert(selector->id);
1453 changed_ = true;
1454}
1455
1456bool Precompiler::IsHitByTableSelector(const Function& function) {
1457 const int32_t selector_id = selector_map()->SelectorId(function);
1458 if (selector_id == compiler::SelectorMap::kInvalidSelectorId) return false;
1459 return seen_table_selectors_.HasKey(selector_id);
1460}
1461
1462void Precompiler::AddApiUse(const Object& obj) {
1463 api_uses_.Insert(&Object::ZoneHandle(Z, obj.ptr()));
1464}
1465
1466bool Precompiler::HasApiUse(const Object& obj) {
1467 return api_uses_.HasKey(&obj);
1468}
1469
1470void Precompiler::AddInstantiatedClass(const Class& cls) {
1471 if (is_tracing()) {
1472 tracer_->WriteClassInstantiationRef(cls);
1473 }
1474
1475 if (cls.is_allocated()) return;
1476
1477 class_count_++;
1478 cls.set_is_allocated_unsafe(true);
1479 error_ = cls.EnsureIsAllocateFinalized(T);
1480 if (!error_.IsNull()) {
1481 Jump(error_);
1482 }
1483
1484 changed_ = true;
1485
1486 if (FLAG_trace_precompiler) {
1487 THR_Print("Allocation %" Pd " %s\n", class_count_, cls.ToCString());
1488 }
1489
1490 const Class& superclass = Class::Handle(cls.SuperClass());
1491 if (!superclass.IsNull()) {
1492 AddInstantiatedClass(superclass);
1493 }
1494}
1495
1496// Adds all values annotated with @pragma('vm:entry-point') as roots.
1497void Precompiler::AddAnnotatedRoots() {
1498 HANDLESCOPE(T);
1499 auto& lib = Library::Handle(Z);
1500 auto& cls = Class::Handle(Z);
1501 auto& members = Array::Handle(Z);
1502 auto& function = Function::Handle(Z);
1503 auto& function2 = Function::Handle(Z);
1504 auto& field = Field::Handle(Z);
1505 auto& metadata = Array::Handle(Z);
1506 auto& reusable_object_handle = Object::Handle(Z);
1507 auto& reusable_field_handle = Field::Handle(Z);
1508
1509 // Lists of fields which need implicit getter/setter/static final getter
1510 // added.
1511 auto& implicit_getters = GrowableObjectArray::Handle(Z);
1512 auto& implicit_setters = GrowableObjectArray::Handle(Z);
1513 auto& implicit_static_getters = GrowableObjectArray::Handle(Z);
1514
1515 for (intptr_t i = 0; i < libraries_.Length(); i++) {
1516 lib ^= libraries_.At(i);
1517 HANDLESCOPE(T);
1518 ClassDictionaryIterator it(lib, ClassDictionaryIterator::kIteratePrivate);
1519 while (it.HasNext()) {
1520 cls = it.GetNextClass();
1521
1522 // Check for @pragma on the class itself.
1523 if (cls.has_pragma()) {
1524 metadata ^= lib.GetMetadata(cls);
1525 if (FindEntryPointPragma(IG, metadata, &reusable_field_handle,
1526 &reusable_object_handle) ==
1527 EntryPointPragma::kAlways) {
1528 AddInstantiatedClass(cls);
1529 AddApiUse(cls);
1530 }
1531 }
1532
1533 // Check for @pragma on any fields in the class.
1534 members = cls.fields();
1535 implicit_getters = GrowableObjectArray::New(members.Length());
1536 implicit_setters = GrowableObjectArray::New(members.Length());
1537 implicit_static_getters = GrowableObjectArray::New(members.Length());
1538 for (intptr_t k = 0; k < members.Length(); ++k) {
1539 field ^= members.At(k);
1540 if (field.has_pragma()) {
1541 metadata ^= lib.GetMetadata(field);
1542 if (metadata.IsNull()) continue;
1544 IG, metadata, &reusable_field_handle, &reusable_object_handle);
1545 if (pragma == EntryPointPragma::kNever) continue;
1546
1547 AddField(field);
1548 AddApiUse(field);
1549
1550 if (!field.is_static()) {
1551 if (pragma != EntryPointPragma::kSetterOnly) {
1552 implicit_getters.Add(field);
1553 }
1554 if (pragma != EntryPointPragma::kGetterOnly) {
1555 implicit_setters.Add(field);
1556 }
1557 } else {
1558 implicit_static_getters.Add(field);
1559 }
1560 }
1561 }
1562
1563 // Check for @pragma on any functions in the class.
1564 members = cls.current_functions();
1565 for (intptr_t k = 0; k < members.Length(); k++) {
1566 function ^= members.At(k);
1567 if (function.has_pragma()) {
1568 metadata ^= lib.GetMetadata(function);
1569 if (metadata.IsNull()) continue;
1570 auto type = FindEntryPointPragma(IG, metadata, &reusable_field_handle,
1571 &reusable_object_handle);
1572
1573 if (type == EntryPointPragma::kAlways ||
1574 type == EntryPointPragma::kCallOnly) {
1575 functions_with_entry_point_pragmas_.Insert(function);
1576 AddApiUse(function);
1577 if (!function.is_abstract()) {
1578 AddFunction(function, RetainReasons::kEntryPointPragma);
1579 }
1580 }
1581
1582 if ((type == EntryPointPragma::kAlways ||
1583 type == EntryPointPragma::kGetterOnly) &&
1584 function.kind() != UntaggedFunction::kConstructor &&
1585 !function.IsSetterFunction()) {
1586 function2 = function.ImplicitClosureFunction();
1587 functions_with_entry_point_pragmas_.Insert(function2);
1588 if (!function.is_abstract()) {
1589 AddFunction(function2, RetainReasons::kEntryPointPragma);
1590 }
1591
1592 // Not `function2`: Dart_GetField will lookup the regular function
1593 // and get the implicit closure function from that.
1594 AddApiUse(function);
1595 }
1596
1597 if (function.IsGenerativeConstructor()) {
1598 AddInstantiatedClass(cls);
1599 AddApiUse(function);
1600 AddApiUse(cls);
1601 }
1602 }
1603 if (function.kind() == UntaggedFunction::kImplicitGetter &&
1604 !implicit_getters.IsNull()) {
1605 for (intptr_t i = 0; i < implicit_getters.Length(); ++i) {
1606 field ^= implicit_getters.At(i);
1607 if (function.accessor_field() == field.ptr()) {
1608 functions_with_entry_point_pragmas_.Insert(function);
1609 AddFunction(function, RetainReasons::kImplicitGetter);
1610 AddApiUse(function);
1611 }
1612 }
1613 }
1614 if (function.kind() == UntaggedFunction::kImplicitSetter &&
1615 !implicit_setters.IsNull()) {
1616 for (intptr_t i = 0; i < implicit_setters.Length(); ++i) {
1617 field ^= implicit_setters.At(i);
1618 if (function.accessor_field() == field.ptr()) {
1619 functions_with_entry_point_pragmas_.Insert(function);
1620 AddFunction(function, RetainReasons::kImplicitSetter);
1621 AddApiUse(function);
1622 }
1623 }
1624 }
1625 if (function.kind() == UntaggedFunction::kImplicitStaticGetter &&
1626 !implicit_static_getters.IsNull()) {
1627 for (intptr_t i = 0; i < implicit_static_getters.Length(); ++i) {
1628 field ^= implicit_static_getters.At(i);
1629 if (function.accessor_field() == field.ptr()) {
1630 functions_with_entry_point_pragmas_.Insert(function);
1631 AddFunction(function, RetainReasons::kImplicitStaticGetter);
1632 AddApiUse(function);
1633 }
1634 }
1635 }
1636 if (function.is_old_native()) {
1637 // The embedder will need to lookup this library to provide the native
1638 // resolver, even if there are no embedder calls into the library.
1639 AddApiUse(lib);
1640 }
1641 }
1642
1643 implicit_getters = GrowableObjectArray::null();
1644 implicit_setters = GrowableObjectArray::null();
1645 implicit_static_getters = GrowableObjectArray::null();
1646 }
1647 }
1648}
1649
1650void Precompiler::CheckForNewDynamicFunctions() {
1651 PRECOMPILER_TIMER_SCOPE(this, CheckForNewDynamicFunctions);
1652 HANDLESCOPE(T);
1653 Library& lib = Library::Handle(Z);
1654 Class& cls = Class::Handle(Z);
1655 Array& functions = Array::Handle(Z);
1656 Function& function = Function::Handle(Z);
1657 Function& function2 = Function::Handle(Z);
1658 String& selector = String::Handle(Z);
1659 String& selector2 = String::Handle(Z);
1660 String& selector3 = String::Handle(Z);
1661 Field& field = Field::Handle(Z);
1662
1663 for (intptr_t i = 0; i < libraries_.Length(); i++) {
1664 lib ^= libraries_.At(i);
1665 HANDLESCOPE(T);
1666 ClassDictionaryIterator it(lib, ClassDictionaryIterator::kIteratePrivate);
1667 while (it.HasNext()) {
1668 cls = it.GetNextClass();
1669
1670 if (!cls.is_allocated()) continue;
1671
1672 functions = cls.current_functions();
1673 for (intptr_t k = 0; k < functions.Length(); k++) {
1674 function ^= functions.At(k);
1675
1676 if (function.is_static() || function.is_abstract()) continue;
1677
1678 // Don't bail out early if there is already code because we may discover
1679 // the corresponding getter selector is sent in some later iteration.
1680 // if (function.HasCode()) continue;
1681
1682 selector = function.name();
1683 if (IsSent(selector)) {
1684 AddFunction(function, RetainReasons::kCalledViaSelector);
1685 }
1686 if (IsHitByTableSelector(function)) {
1687 AddFunction(function, FLAG_retain_function_objects
1688 ? RetainReasons::kForcedRetain
1689 : nullptr);
1690 }
1691
1692 bool found_metadata = false;
1693 kernel::ProcedureAttributesMetadata metadata;
1694
1695 // Handle the implicit call type conversions.
1696 if (Field::IsGetterName(selector) &&
1697 (function.kind() != UntaggedFunction::kMethodExtractor)) {
1698 // Call-through-getter.
1699 // Function is get:foo and somewhere foo (or dyn:foo) is called.
1700 // Note that we need to skip method extractors (which were potentially
1701 // created by DispatchTableGenerator): call of foo will never
1702 // hit method extractor get:foo, because it will hit an existing
1703 // method foo first.
1704 selector2 = Field::NameFromGetter(selector);
1705 if (IsSent(selector2)) {
1706 AddFunction(function, RetainReasons::kCalledThroughGetter);
1707 }
1708 selector2 = Function::CreateDynamicInvocationForwarderName(selector2);
1709 if (IsSent(selector2)) {
1710 selector2 =
1711 Function::CreateDynamicInvocationForwarderName(selector);
1712 function2 = function.GetDynamicInvocationForwarder(selector2);
1713 AddFunction(function2, RetainReasons::kDynamicInvocationForwarder);
1714 functions_called_dynamically_.Insert(function2);
1715 }
1716 } else if (function.kind() == UntaggedFunction::kRegularFunction) {
1717 selector2 = Field::GetterSymbol(selector);
1718 selector3 = Function::CreateDynamicInvocationForwarderName(selector2);
1719 if (IsSent(selector2) || IsSent(selector3)) {
1720 metadata = kernel::ProcedureAttributesOf(function, Z);
1721 found_metadata = true;
1722
1723 if (metadata.has_tearoff_uses) {
1724 // Closurization.
1725 // Function is foo and somewhere get:foo is called.
1726 function2 = function.ImplicitClosureFunction();
1727 AddFunction(function2, RetainReasons::kImplicitClosure);
1728
1729 // Add corresponding method extractor.
1730 function2 = function.GetMethodExtractor(selector2);
1731 AddFunction(function2, RetainReasons::kMethodExtractor);
1732 }
1733 }
1734 }
1735
1736 const bool is_getter =
1737 function.kind() == UntaggedFunction::kImplicitGetter ||
1738 function.kind() == UntaggedFunction::kGetterFunction;
1739 const bool is_setter =
1740 function.kind() == UntaggedFunction::kImplicitSetter ||
1741 function.kind() == UntaggedFunction::kSetterFunction;
1742 const bool is_regular =
1743 function.kind() == UntaggedFunction::kRegularFunction;
1744 if (is_getter || is_setter || is_regular) {
1745 selector2 = Function::CreateDynamicInvocationForwarderName(selector);
1746 if (IsSent(selector2)) {
1747 if (function.kind() == UntaggedFunction::kImplicitGetter ||
1748 function.kind() == UntaggedFunction::kImplicitSetter) {
1749 field = function.accessor_field();
1750 metadata = kernel::ProcedureAttributesOf(field, Z);
1751 } else if (!found_metadata) {
1752 metadata = kernel::ProcedureAttributesOf(function, Z);
1753 }
1754
1755 if (is_getter) {
1756 if (metadata.getter_called_dynamically) {
1757 function2 = function.GetDynamicInvocationForwarder(selector2);
1758 AddFunction(function2,
1759 RetainReasons::kDynamicInvocationForwarder);
1760 functions_called_dynamically_.Insert(function2);
1761 }
1762 } else {
1763 if (metadata.method_or_setter_called_dynamically) {
1764 function2 = function.GetDynamicInvocationForwarder(selector2);
1765 AddFunction(function2,
1766 RetainReasons::kDynamicInvocationForwarder);
1767 functions_called_dynamically_.Insert(function2);
1768 }
1769 }
1770 }
1771 }
1772 }
1773 }
1774 }
1775}
1776
1777class NameFunctionsTraits {
1778 public:
1779 static const char* Name() { return "NameFunctionsTraits"; }
1780 static bool ReportStats() { return false; }
1781
1782 static bool IsMatch(const Object& a, const Object& b) {
1783 return a.IsString() && b.IsString() &&
1784 String::Cast(a).Equals(String::Cast(b));
1785 }
1786 static uword Hash(const Object& obj) { return String::Cast(obj).Hash(); }
1787 static ObjectPtr NewKey(const String& str) { return str.ptr(); }
1788};
1789
1790typedef UnorderedHashMap<NameFunctionsTraits> Table;
1791
1792static void AddNameToFunctionsTable(Zone* zone,
1793 Table* table,
1794 const String& fname,
1795 const Function& function) {
1796 Array& farray = Array::Handle(zone);
1797 farray ^= table->InsertNewOrGetValue(fname, Array::empty_array());
1798 farray = Array::Grow(farray, farray.Length() + 1);
1799 farray.SetAt(farray.Length() - 1, function);
1800 table->UpdateValue(fname, farray);
1801}
1802
1803static void AddNamesToFunctionsTable(Zone* zone,
1804 Table* table,
1805 const String& fname,
1806 const Function& function,
1807 String* mangled_name,
1808 Function* dyn_function) {
1809 AddNameToFunctionsTable(zone, table, fname, function);
1810
1811 *dyn_function = function.ptr();
1812 if (kernel::NeedsDynamicInvocationForwarder(function)) {
1813 *mangled_name = function.name();
1814 *mangled_name =
1815 Function::CreateDynamicInvocationForwarderName(*mangled_name);
1816 *dyn_function = function.GetDynamicInvocationForwarder(*mangled_name,
1817 /*allow_add=*/true);
1818 }
1819 *mangled_name = Function::CreateDynamicInvocationForwarderName(fname);
1820 AddNameToFunctionsTable(zone, table, *mangled_name, *dyn_function);
1821}
1822
1823void Precompiler::CollectDynamicFunctionNames() {
1824 if (!FLAG_collect_dynamic_function_names) {
1825 return;
1826 }
1827 HANDLESCOPE(T);
1828 auto& lib = Library::Handle(Z);
1829 auto& cls = Class::Handle(Z);
1830 auto& functions = Array::Handle(Z);
1831 auto& function = Function::Handle(Z);
1832 auto& fname = String::Handle(Z);
1833 auto& farray = Array::Handle(Z);
1834 auto& mangled_name = String::Handle(Z);
1835 auto& dyn_function = Function::Handle(Z);
1836
1837 Table table(HashTables::New<Table>(100));
1838 for (intptr_t i = 0; i < libraries_.Length(); i++) {
1839 lib ^= libraries_.At(i);
1840 HANDLESCOPE(T);
1841 ClassDictionaryIterator it(lib, ClassDictionaryIterator::kIteratePrivate);
1842 while (it.HasNext()) {
1843 cls = it.GetNextClass();
1844 functions = cls.current_functions();
1845
1846 const intptr_t length = functions.Length();
1847 for (intptr_t j = 0; j < length; j++) {
1848 function ^= functions.At(j);
1849 if (function.IsDynamicFunction()) {
1850 fname = function.name();
1851 if (function.IsSetterFunction() ||
1852 function.IsImplicitSetterFunction()) {
1853 AddNamesToFunctionsTable(zone(), &table, fname, function,
1854 &mangled_name, &dyn_function);
1855 } else if (function.IsGetterFunction() ||
1856 function.IsImplicitGetterFunction()) {
1857 // Enter both getter and non getter name.
1858 AddNamesToFunctionsTable(zone(), &table, fname, function,
1859 &mangled_name, &dyn_function);
1860 fname = Field::NameFromGetter(fname);
1861 AddNamesToFunctionsTable(zone(), &table, fname, function,
1862 &mangled_name, &dyn_function);
1863 } else if (function.IsMethodExtractor()) {
1864 // Skip. We already add getter names for regular methods below.
1865 continue;
1866 } else {
1867 // Regular function. Enter both getter and non getter name.
1868 AddNamesToFunctionsTable(zone(), &table, fname, function,
1869 &mangled_name, &dyn_function);
1870 fname = Field::GetterName(fname);
1871 AddNamesToFunctionsTable(zone(), &table, fname, function,
1872 &mangled_name, &dyn_function);
1873 }
1874 }
1875 }
1876 }
1877 }
1878
1879 // Locate all entries with one function only
1880 Table::Iterator iter(&table);
1881 String& key = String::Handle(Z);
1882 String& key_demangled = String::Handle(Z);
1883 UniqueFunctionsMap functions_map(HashTables::New<UniqueFunctionsMap>(20));
1884 while (iter.MoveNext()) {
1885 intptr_t curr_key = iter.Current();
1886 key ^= table.GetKey(curr_key);
1887 farray ^= table.GetOrNull(key);
1888 ASSERT(!farray.IsNull());
1889 if (farray.Length() == 1) {
1890 function ^= farray.At(0);
1891
1892 // It looks like there is exactly one target for the given name. Though we
1893 // have to be careful: e.g. A name like `dyn:get:foo` might have a target
1894 // `foo()`. Though the actual target would be a lazily created method
1895 // extractor `get:foo` for the `foo` function.
1896 //
1897 // We'd like to prevent eager creation of functions which we normally
1898 // create lazily.
1899 // => We disable unique target optimization if the target belongs to the
1900 // lazily created functions.
1901 key_demangled = key.ptr();
1902 if (Function::IsDynamicInvocationForwarderName(key)) {
1903 key_demangled = Function::DemangleDynamicInvocationForwarderName(key);
1904 }
1905 if (function.name() != key.ptr() &&
1906 function.name() != key_demangled.ptr()) {
1907 continue;
1908 }
1909 functions_map.UpdateOrInsert(key, function);
1910 }
1911 }
1912
1913 farray ^= table.GetOrNull(Symbols::GetRuntimeType());
1914
1915 get_runtime_type_is_unique_ = !farray.IsNull() && (farray.Length() == 1);
1916
1917 if (FLAG_print_unique_targets) {
1918 UniqueFunctionsMap::Iterator unique_iter(&functions_map);
1919 while (unique_iter.MoveNext()) {
1920 intptr_t curr_key = unique_iter.Current();
1921 function ^= functions_map.GetPayload(curr_key, 0);
1922 THR_Print("* %s\n", function.ToQualifiedCString());
1923 }
1924 THR_Print("%" Pd " of %" Pd " dynamic selectors are unique\n",
1925 functions_map.NumOccupied(), table.NumOccupied());
1926 }
1927
1928 IG->object_store()->set_unique_dynamic_targets(functions_map.Release());
1929 table.Release();
1930}
1931
1932void Precompiler::TraceForRetainedFunctions() {
1933 HANDLESCOPE(T);
1934 Library& lib = Library::Handle(Z);
1935 Class& cls = Class::Handle(Z);
1936 Array& functions = Array::Handle(Z);
1937 Function& function = Function::Handle(Z);
1938 Function& function2 = Function::Handle(Z);
1939 Array& fields = Array::Handle(Z);
1940 Field& field = Field::Handle(Z);
1941
1942 for (intptr_t i = 0; i < libraries_.Length(); i++) {
1943 lib ^= libraries_.At(i);
1944 HANDLESCOPE(T);
1945 ClassDictionaryIterator it(lib, ClassDictionaryIterator::kIteratePrivate);
1946 while (it.HasNext()) {
1947 cls = it.GetNextClass();
1948 functions = cls.current_functions();
1949 for (intptr_t j = 0; j < functions.Length(); j++) {
1950 SafepointWriteRwLocker ml(T, T->isolate_group()->program_lock());
1951 function ^= functions.At(j);
1952 function.DropUncompiledImplicitClosureFunction();
1953
1954 const bool retained =
1955 possibly_retained_functions_.ContainsKey(function);
1956 if (retained) {
1957 AddTypesOf(function);
1958 }
1959 if (function.HasImplicitClosureFunction()) {
1960 function2 = function.ImplicitClosureFunction();
1961
1962 if (possibly_retained_functions_.ContainsKey(function2)) {
1963 AddTypesOf(function2);
1964 // If function has @pragma('vm:entry-point', 'get') we need to keep
1965 // the function itself around so that runtime could find it and
1966 // get to the implicit closure through it.
1967 if (!retained &&
1968 functions_with_entry_point_pragmas_.ContainsKey(function2)) {
1969 AddRetainReason(function, RetainReasons::kEntryPointPragma);
1970 AddTypesOf(function);
1971 }
1972 }
1973 }
1974 }
1975
1976 fields = cls.fields();
1977 for (intptr_t j = 0; j < fields.Length(); j++) {
1978 field ^= fields.At(j);
1979 if (fields_to_retain_.HasKey(&field) &&
1980 field.HasInitializerFunction()) {
1981 function = field.InitializerFunction();
1982 if (possibly_retained_functions_.ContainsKey(function)) {
1983 AddTypesOf(function);
1984 }
1985 }
1986 }
1987
1988 if (cls.invocation_dispatcher_cache() != Array::empty_array().ptr()) {
1989 DispatcherSet dispatchers(cls.invocation_dispatcher_cache());
1990 DispatcherSet::Iterator it(&dispatchers);
1991 while (it.MoveNext()) {
1992 function ^= dispatchers.GetKey(it.Current());
1993 if (possibly_retained_functions_.ContainsKey(function)) {
1994 AddTypesOf(function);
1995 }
1996 }
1997 dispatchers.Release();
1998 }
1999 }
2000 }
2001
2002 ClosureFunctionsCache::ForAllClosureFunctions([&](const Function& function) {
2003 if (possibly_retained_functions_.ContainsKey(function)) {
2004 AddTypesOf(function);
2005 }
2006 return true; // Continue iteration.
2007 });
2008
2009#ifdef DEBUG
2010 // Make sure functions_to_retain_ is a super-set of
2011 // possibly_retained_functions_.
2012 FunctionSet::Iterator it(&possibly_retained_functions_);
2013 while (it.MoveNext()) {
2014 function ^= possibly_retained_functions_.GetKey(it.Current());
2015 // Ffi trampoline functions are not reachable from program structure,
2016 // they are referenced only from code (object pool).
2017 if (!functions_to_retain_.ContainsKey(function) &&
2018 !function.IsFfiCallbackTrampoline()) {
2019 FATAL("Function %s was not traced in TraceForRetainedFunctions\n",
2020 function.ToFullyQualifiedCString());
2021 }
2022 }
2023#endif // DEBUG
2024}
2025
2026void Precompiler::FinalizeDispatchTable() {
2027 PRECOMPILER_TIMER_SCOPE(this, FinalizeDispatchTable);
2028 HANDLESCOPE(T);
2029 // Build the entries used to serialize the dispatch table before
2030 // dropping functions, as we may clear references to Code objects.
2031 const auto& entries =
2032 Array::Handle(Z, dispatch_table_generator_->BuildCodeArray());
2033 IG->object_store()->set_dispatch_table_code_entries(entries);
2034 // Delete the dispatch table generator to ensure there's no attempt
2035 // to add new entries after this point.
2036 delete dispatch_table_generator_;
2037 dispatch_table_generator_ = nullptr;
2038
2039 if (FLAG_retain_function_objects || !FLAG_trace_precompiler) return;
2040
2041 FunctionSet printed(HashTables::New<FunctionSet>(/*initial_capacity=*/1024));
2042 auto& code = Code::Handle(Z);
2043 auto& function = Function::Handle(Z);
2044 for (intptr_t i = 0; i < entries.Length(); i++) {
2045 code = Code::RawCast(entries.At(i));
2046 if (code.IsNull()) continue;
2047 if (!code.IsFunctionCode()) continue;
2048 function = code.function();
2049 ASSERT(!function.IsNull());
2050 if (printed.ContainsKey(function)) continue;
2051 if (functions_to_retain_.ContainsKey(function)) continue;
2052 THR_Print("Dispatch table references code for function to drop: %s\n",
2053 function.ToLibNamePrefixedQualifiedCString());
2054 printed.Insert(function);
2055 }
2056 printed.Release();
2057}
2058
2059void Precompiler::ReplaceFunctionStaticCallEntries() {
2060 PRECOMPILER_TIMER_SCOPE(this, ReplaceFunctionStaticCallEntries);
2061 class StaticCallTableEntryFixer : public CodeVisitor {
2062 public:
2063 explicit StaticCallTableEntryFixer(Zone* zone)
2064 : table_(Array::Handle(zone)),
2065 kind_and_offset_(Smi::Handle(zone)),
2066 target_function_(Function::Handle(zone)),
2067 target_code_(Code::Handle(zone)),
2068 pool_(ObjectPool::Handle(zone)) {}
2069
2070 void VisitCode(const Code& code) {
2071 if (!code.IsFunctionCode()) return;
2072 table_ = code.static_calls_target_table();
2073 StaticCallsTable static_calls(table_);
2074
2075 // With bare instructions, there is a global pool and per-Code local
2076 // pools. Instructions are generated to use offsets into the global pool,
2077 // but we still use the local pool to track which Code are using which
2078 // pool values for the purposes of analyzing snapshot size
2079 // (--write_v8_snapshot_profile_to and --print_instructions_sizes_to) and
2080 // deferred loading deciding which snapshots to place pool values in.
2081 // We don't keep track of which offsets in the local pools correspond to
2082 // which entries in the static call table, so we don't properly replace
2083 // the old references to the CallStaticFunction stub, but it is sufficient
2084 // for the local pool to include the actual call target.
2085 compiler::ObjectPoolBuilder builder;
2086 pool_ = code.object_pool();
2087 pool_.CopyInto(&builder);
2088
2089 for (auto& view : static_calls) {
2090 kind_and_offset_ = view.Get<Code::kSCallTableKindAndOffset>();
2091 auto const kind = Code::KindField::decode(kind_and_offset_.Value());
2092
2093 if ((kind != Code::kCallViaCode) && (kind != Code::kPcRelativeCall))
2094 continue;
2095
2096 target_function_ = view.Get<Code::kSCallTableFunctionTarget>();
2097 if (target_function_.IsNull()) continue;
2098
2099 ASSERT(view.Get<Code::kSCallTableCodeOrTypeTarget>() == Code::null());
2100 ASSERT(target_function_.HasCode());
2101 target_code_ = target_function_.CurrentCode();
2102 ASSERT(!target_code_.IsStubCode());
2103 view.Set<Code::kSCallTableCodeOrTypeTarget>(target_code_);
2104 view.Set<Code::kSCallTableFunctionTarget>(Object::null_function());
2105 if (kind == Code::kCallViaCode) {
2106 auto const pc_offset =
2107 Code::OffsetField::decode(kind_and_offset_.Value());
2108 const uword pc = pc_offset + code.PayloadStart();
2109 CodePatcher::PatchStaticCallAt(pc, code, target_code_);
2110 builder.AddObject(Object::ZoneHandle(target_code_.ptr()));
2111 }
2112 if (FLAG_trace_precompiler) {
2113 THR_Print("Updated static call entry to %s in \"%s\"\n",
2114 target_function_.ToFullyQualifiedCString(),
2115 code.ToCString());
2116 }
2117 }
2118
2119 code.set_object_pool(ObjectPool::NewFromBuilder(builder));
2120 }
2121
2122 private:
2123 Array& table_;
2124 Smi& kind_and_offset_;
2125 Function& target_function_;
2126 Code& target_code_;
2127 ObjectPool& pool_;
2128 };
2129
2130 HANDLESCOPE(T);
2131 StaticCallTableEntryFixer visitor(Z);
2132 ProgramVisitor::WalkProgram(Z, IG, &visitor);
2133}
2134
2135void Precompiler::DropFunctions() {
2136 HANDLESCOPE(T);
2137 Library& lib = Library::Handle(Z);
2138 Class& cls = Class::Handle(Z);
2139 Array& functions = Array::Handle(Z);
2140 Function& function = Function::Handle(Z);
2141 Function& target = Function::Handle(Z);
2142 Function& implicit_closure = Function::Handle(Z);
2143 Code& code = Code::Handle(Z);
2144 Object& owner = Object::Handle(Z);
2145 GrowableObjectArray& retained_functions = GrowableObjectArray::Handle(Z);
2146 auto& sig = FunctionType::Handle(Z);
2147 auto& ref = Object::Handle(Z);
2148
2149 auto trim_function = [&](const Function& function) {
2150 if (function.IsDynamicInvocationForwarder()) {
2151 // For dynamic invocation forwarders sever strong connection between the
2152 // forwarder and the target function if we are not going to retain
2153 // target function anyway. The only use of the forwarding target outside
2154 // of compilation pipeline is in Function::script() and that should not
2155 // be used when we are dropping functions (cause we are not going to
2156 // emit symbolic stack traces anyway).
2157 // Note that we still need Function::script() to work during snapshot
2158 // generation to generate DWARF, that's why we are using WSR and not
2159 // simply setting forwarding target to null.
2160 target = function.ForwardingTarget();
2161 if (!functions_to_retain_.ContainsKey(target)) {
2162 ref =
2163 WeakSerializationReference::New(target, Function::null_function());
2164 function.set_data(ref);
2165 }
2166 }
2167
2168 sig = function.signature();
2169 // In the AOT runtime, most calls are direct or through the dispatch table,
2170 // not resolved via dynamic lookup. Thus, we only need to retain the
2171 // function signature in the following cases:
2172 if (function.IsClosureFunction()) {
2173 // Dynamic calls to closures go through dynamic closure call dispatchers,
2174 // which need the signature.
2175 return AddRetainReason(sig, RetainReasons::kClosureSignature);
2176 }
2177 if (function.IsFfiCallbackTrampoline()) {
2178 // FFI trampolines may be dynamically called.
2179 return AddRetainReason(sig, RetainReasons::kFfiTrampolineSignature);
2180 }
2181 if (function.is_old_native()) {
2182 return AddRetainReason(sig, RetainReasons::kNativeSignature);
2183 }
2184 if (function.HasRequiredNamedParameters()) {
2185 // Required named parameters must be checked, so a NoSuchMethod exception
2186 // can be thrown if they are not provided.
2187 return AddRetainReason(sig, RetainReasons::kRequiredNamedParameters);
2188 }
2189 if (functions_called_dynamically_.ContainsKey(function)) {
2190 // Dynamic resolution of these functions checks for valid arguments.
2191 return AddRetainReason(sig, RetainReasons::kDynamicallyCalledSignature);
2192 }
2193 if (functions_with_entry_point_pragmas_.ContainsKey(function)) {
2194 // Dynamic resolution of entry points also checks for valid arguments.
2195 return AddRetainReason(sig, RetainReasons::kEntryPointPragmaSignature);
2196 }
2197 if (StackTraceUtils::IsNeededForAsyncAwareUnwinding(function)) {
2198 return AddRetainReason(sig, RetainReasons::kAsyncStackUnwinding);
2199 }
2200 if (FLAG_trace_precompiler) {
2201 THR_Print("Clearing signature for function %s\n",
2202 function.ToLibNamePrefixedQualifiedCString());
2203 }
2204 // Other functions not listed here may end up in dynamic resolution via
2205 // UnlinkedCalls. However, since it is not a dynamic invocation and has
2206 // been type checked at compile time, we already know the arguments are
2207 // valid. Thus, we can skip checking arguments for functions with dropped
2208 // signatures in ResolveDynamicForReceiverClassWithCustomLookup.
2209 ref = WeakSerializationReference::New(sig, Object::null_function_type());
2210 function.set_signature(ref);
2211 };
2212
2213 auto drop_function = [&](const Function& function) {
2214 if (function.HasCode()) {
2215 code = function.CurrentCode();
2216 function.ClearCode();
2217 // Wrap the owner of the code object in case the code object will be
2218 // serialized but the function object will not.
2219 owner = code.owner();
2220 owner = WeakSerializationReference::New(
2221 owner, Smi::Handle(Smi::New(owner.GetClassId())));
2222 code.set_owner(owner);
2223 }
2224 if (function.HasImplicitClosureFunction()) {
2225 // If we are going to drop the function which has a compiled
2226 // implicit closure move the closure itself to the list of closures
2227 // attached to the object store so that ProgramVisitor could find it.
2228 // The list of closures is going to be dropped during PRODUCT snapshotting
2229 // so there is no overhead in doing so.
2230 implicit_closure = function.ImplicitClosureFunction();
2231 RELEASE_ASSERT(functions_to_retain_.ContainsKey(implicit_closure));
2232 ClosureFunctionsCache::AddClosureFunctionLocked(
2233 implicit_closure, /*allow_implicit_closure_functions=*/true);
2234 }
2235 dropped_function_count_++;
2236 if (FLAG_trace_precompiler) {
2237 THR_Print("Dropping function %s\n",
2238 function.ToLibNamePrefixedQualifiedCString());
2239 }
2240 if (retained_reasons_writer_ != nullptr) {
2241 retained_reasons_writer_->AddDropped(function);
2242 }
2243 };
2244
2245 SafepointWriteRwLocker ml(T, T->isolate_group()->program_lock());
2246 for (intptr_t i = 0; i < libraries_.Length(); i++) {
2247 lib ^= libraries_.At(i);
2248 HANDLESCOPE(T);
2249 ClassDictionaryIterator it(lib, ClassDictionaryIterator::kIteratePrivate);
2250 while (it.HasNext()) {
2251 cls = it.GetNextClass();
2252 functions = cls.functions();
2253 retained_functions = GrowableObjectArray::New();
2254 for (intptr_t j = 0; j < functions.Length(); j++) {
2255 function ^= functions.At(j);
2256 function.DropUncompiledImplicitClosureFunction();
2257 if (functions_to_retain_.ContainsKey(function)) {
2258 trim_function(function);
2259 retained_functions.Add(function);
2260 } else {
2261 drop_function(function);
2262 }
2263 }
2264
2265 if (retained_functions.Length() > 0) {
2266 functions = Array::MakeFixedLength(retained_functions);
2267 cls.SetFunctions(functions);
2268 } else {
2269 cls.SetFunctions(Object::empty_array());
2270 }
2271
2272 retained_functions = GrowableObjectArray::New();
2273 if (cls.invocation_dispatcher_cache() != Array::empty_array().ptr()) {
2274 DispatcherSet dispatchers(Z, cls.invocation_dispatcher_cache());
2275 DispatcherSet::Iterator it(&dispatchers);
2276 while (it.MoveNext()) {
2277 function ^= dispatchers.GetKey(it.Current());
2278 if (functions_to_retain_.ContainsKey(function)) {
2279 trim_function(function);
2280 retained_functions.Add(function);
2281 } else {
2282 drop_function(function);
2283 }
2284 }
2285 dispatchers.Release();
2286 }
2287 if (retained_functions.Length() == 0) {
2288 cls.set_invocation_dispatcher_cache(Array::empty_array());
2289 } else {
2290 DispatcherSet retained_dispatchers(
2291 Z, HashTables::New<DispatcherSet>(retained_functions.Length(),
2292 Heap::kOld));
2293 for (intptr_t j = 0; j < retained_functions.Length(); j++) {
2294 function ^= retained_functions.At(j);
2295 retained_dispatchers.Insert(function);
2296 }
2297 cls.set_invocation_dispatcher_cache(retained_dispatchers.Release());
2298 }
2299 }
2300 }
2301
2302 retained_functions = GrowableObjectArray::New();
2303 ClosureFunctionsCache::ForAllClosureFunctions([&](const Function& function) {
2304 if (functions_to_retain_.ContainsKey(function)) {
2305 trim_function(function);
2306 retained_functions.Add(function);
2307 } else {
2308 drop_function(function);
2309 }
2310 return true; // Continue iteration.
2311 });
2312
2313 // Note: in PRODUCT mode snapshotter will drop this field when serializing.
2314 // This is done in ProgramSerializationRoots.
2315 IG->object_store()->set_closure_functions(retained_functions);
2316
2317 // Only needed during compilation.
2318 IG->object_store()->set_closure_functions_table(Object::null_array());
2319}
2320
2321void Precompiler::DropFields() {
2322 HANDLESCOPE(T);
2323 Library& lib = Library::Handle(Z);
2324 Class& cls = Class::Handle(Z);
2325 Array& fields = Array::Handle(Z);
2326 Field& field = Field::Handle(Z);
2327 GrowableObjectArray& retained_fields = GrowableObjectArray::Handle(Z);
2328 AbstractType& type = AbstractType::Handle(Z);
2329
2330 SafepointWriteRwLocker ml(T, T->isolate_group()->program_lock());
2331 for (intptr_t i = 0; i < libraries_.Length(); i++) {
2332 lib ^= libraries_.At(i);
2333 HANDLESCOPE(T);
2334 ClassDictionaryIterator it(lib, ClassDictionaryIterator::kIteratePrivate);
2335 while (it.HasNext()) {
2336 cls = it.GetNextClass();
2337 fields = cls.fields();
2338 retained_fields = GrowableObjectArray::New();
2339 for (intptr_t j = 0; j < fields.Length(); j++) {
2340 field ^= fields.At(j);
2341 bool retain = fields_to_retain_.HasKey(&field);
2342#if !defined(PRODUCT)
2343 if (field.is_instance() && cls.is_allocated()) {
2344 // Keep instance fields so their names are available to graph tools.
2345 retain = true;
2346 }
2347#endif
2348 if (retain) {
2349 if (FLAG_trace_precompiler) {
2350 THR_Print("Retaining %s field %s\n",
2351 field.is_static() ? "static" : "instance",
2352 field.ToCString());
2353 }
2354 retained_fields.Add(field);
2355 type = field.type();
2356 AddType(type);
2357 } else {
2358 dropped_field_count_++;
2359 if (FLAG_trace_precompiler) {
2360 THR_Print("Dropping %s field %s\n",
2361 field.is_static() ? "static" : "instance",
2362 field.ToCString());
2363 }
2364
2365 // This cleans up references to field current and initial values.
2366 if (field.is_static()) {
2367 field.SetStaticValue(Object::null_instance());
2368 field.SetStaticConstFieldValue(Object::null_instance(),
2369 /*assert_initializing_store=*/false);
2370 }
2371 }
2372 }
2373
2374 if (retained_fields.Length() > 0) {
2375 fields = Array::MakeFixedLength(retained_fields);
2376 cls.SetFields(fields);
2377 } else {
2378 cls.SetFields(Object::empty_array());
2379 }
2380 }
2381 }
2382}
2383
2384void Precompiler::AttachOptimizedTypeTestingStub() {
2385 PRECOMPILER_TIMER_SCOPE(this, AttachOptimizedTypeTestingStub);
2386 HANDLESCOPE(T);
2387 IsolateGroup::Current()->heap()->CollectAllGarbage();
2388 GrowableHandlePtrArray<const AbstractType> types(Z, 200);
2389 {
2390 class TypesCollector : public ObjectVisitor {
2391 public:
2392 explicit TypesCollector(Zone* zone,
2393 GrowableHandlePtrArray<const AbstractType>* types)
2394 : type_(AbstractType::Handle(zone)), types_(types) {}
2395
2396 void VisitObject(ObjectPtr obj) override {
2397 if (obj->GetClassId() == kTypeCid ||
2398 obj->GetClassId() == kFunctionTypeCid ||
2399 obj->GetClassId() == kRecordTypeCid) {
2400 type_ ^= obj;
2401 types_->Add(type_);
2402 }
2403 }
2404
2405 private:
2406 AbstractType& type_;
2407 GrowableHandlePtrArray<const AbstractType>* types_;
2408 };
2409
2410 HeapIterationScope his(T);
2411 TypesCollector visitor(Z, &types);
2412
2413 // Find all type objects in this isolate.
2414 IG->heap()->VisitObjects(&visitor);
2415
2416 // Find all type objects in the vm-isolate.
2417 Dart::vm_isolate_group()->heap()->VisitObjects(&visitor);
2418 }
2419
2420 TypeUsageInfo* type_usage_info = Thread::Current()->type_usage_info();
2421
2422 // At this point we're not generating any new code, so we build a picture of
2423 // which types we might type-test against.
2424 type_usage_info->BuildTypeUsageInformation();
2425
2426 TypeTestingStubGenerator type_testing_stubs;
2427 Code& code = Code::Handle();
2428 for (intptr_t i = 0; i < types.length(); i++) {
2429 const AbstractType& type = types.At(i);
2430
2431 if (type.InVMIsolateHeap()) {
2432 // The only important types in the vm isolate are
2433 // "dynamic"/"void"/"Never", which will get their optimized
2434 // testing stub installed at creation.
2435 continue;
2436 }
2437
2438 if (type_usage_info->IsUsedInTypeTest(type)) {
2439 code = type_testing_stubs.OptimizedCodeForType(type);
2440 type.SetTypeTestingStub(code);
2441
2442 // Ensure we retain the type.
2443 AddType(type);
2444 }
2445 }
2446
2447 ASSERT(Object::dynamic_type().type_test_stub_entry_point() ==
2448 StubCode::TopTypeTypeTest().EntryPoint());
2449}
2450
2451enum ConstantVisitedValue { kNotVisited = 0, kRetain, kDrop };
2452
2453static bool IsUserDefinedClass(Zone* zone,
2454 ClassPtr cls,
2455 ObjectStore* object_store) {
2456 intptr_t cid = cls.untag()->id();
2457 if (cid < kNumPredefinedCids) {
2458 return false;
2459 }
2460
2461 return true;
2462}
2463
2464/// Updates |visited| weak table with information about whether object
2465/// (transitively) references constants of user-defined classes: |kDrop|
2466/// indicates it does, |kRetain| - does not.
2467class ConstantInstanceVisitor {
2468 public:
2469 ConstantInstanceVisitor(Zone* zone,
2470 WeakTable* visited,
2471 ObjectStore* object_store)
2472 : zone_(zone),
2473 visited_(visited),
2474 object_store_(object_store),
2475 object_(Object::Handle(zone)),
2476 array_(Array::Handle(zone)) {}
2477
2478 void Visit(ObjectPtr object_ptr) {
2479 if (!object_ptr->IsHeapObject()) {
2480 return;
2481 }
2482 ConstantVisitedValue value = static_cast<ConstantVisitedValue>(
2483 visited_->GetValueExclusive(object_ptr));
2484 if (value != kNotVisited) {
2485 return;
2486 }
2487 object_ = object_ptr;
2488 if (IsUserDefinedClass(zone_, object_.clazz(), object_store_)) {
2489 visited_->SetValueExclusive(object_ptr, kDrop);
2490 return;
2491 }
2492
2493 // Conservatively assume an object will be retained.
2494 visited_->SetValueExclusive(object_ptr, kRetain);
2495 switch (object_ptr.untag()->GetClassId()) {
2496 case kImmutableArrayCid: {
2497 array_ ^= object_ptr;
2498 for (intptr_t i = 0; i < array_.Length(); i++) {
2499 ObjectPtr element = array_.At(i);
2500 Visit(element);
2501 if (static_cast<ConstantVisitedValue>(
2502 visited_->GetValueExclusive(element)) == kDrop) {
2503 visited_->SetValueExclusive(object_ptr, kDrop);
2504 break;
2505 }
2506 }
2507 break;
2508 }
2509 case kConstMapCid: {
2510 const Map& map = Map::Handle(Map::RawCast(object_ptr));
2511 Map::Iterator iterator(map);
2512 while (iterator.MoveNext()) {
2513 ObjectPtr element = iterator.CurrentKey();
2514 Visit(element);
2515 if (static_cast<ConstantVisitedValue>(
2516 visited_->GetValueExclusive(element)) == kDrop) {
2517 visited_->SetValueExclusive(object_ptr, kDrop);
2518 break;
2519 }
2520 element = iterator.CurrentValue();
2521 Visit(element);
2522 if (static_cast<ConstantVisitedValue>(
2523 visited_->GetValueExclusive(element)) == kDrop) {
2524 visited_->SetValueExclusive(object_ptr, kDrop);
2525 break;
2526 }
2527 }
2528 break;
2529 }
2530 case kConstSetCid: {
2531 const Set& set = Set::Handle(Set::RawCast(object_ptr));
2532 Set::Iterator iterator(set);
2533 while (iterator.MoveNext()) {
2534 ObjectPtr element = iterator.CurrentKey();
2535 Visit(element);
2536 if (static_cast<ConstantVisitedValue>(
2537 visited_->GetValueExclusive(element)) == kDrop) {
2538 visited_->SetValueExclusive(object_ptr, kDrop);
2539 break;
2540 }
2541 }
2542 break;
2543 }
2544 }
2545 }
2546
2547 private:
2548 Zone* zone_;
2549 WeakTable* visited_;
2550 ObjectStore* object_store_;
2551 Object& object_;
2552 Array& array_;
2553};
2554
2555// To reduce snapshot size, we remove from constant tables all constants that
2556// cannot be sent in messages between isolate groups. Such constants will not
2557// be canonicalized at runtime.
2558void Precompiler::DropTransitiveUserDefinedConstants() {
2559 HANDLESCOPE(T);
2560 auto& constants = Array::Handle(Z);
2561 auto& obj = Object::Handle(Z);
2562 auto& lib = Library::Handle(Z);
2563 auto& cls = Class::Handle(Z);
2564 auto& instance = Instance::Handle(Z);
2565
2566 {
2567 NoSafepointScope no_safepoint(T);
2568 std::unique_ptr<WeakTable> visited(new WeakTable());
2569 ObjectStore* object_store = IG->object_store();
2570 ConstantInstanceVisitor visitor(Z, visited.get(), object_store);
2571
2572 for (intptr_t i = 0; i < libraries_.Length(); i++) {
2573 lib ^= libraries_.At(i);
2574 HANDLESCOPE(T);
2575 ClassDictionaryIterator it(lib, ClassDictionaryIterator::kIteratePrivate);
2576 while (it.HasNext()) {
2577 cls = it.GetNextClass();
2578 if (cls.constants() == Array::null()) {
2579 continue;
2580 }
2581 typedef UnorderedHashSet<CanonicalInstanceTraits> CanonicalInstancesSet;
2582
2583 CanonicalInstancesSet constants_set(cls.constants());
2584 CanonicalInstancesSet::Iterator iterator(&constants_set);
2585
2586 if (IsUserDefinedClass(Z, cls.ptr(), object_store)) {
2587 // All constants for user-defined classes can be dropped.
2588 constants = cls.constants();
2589 dropped_constants_arrays_entries_count_ += constants.Length();
2590 if (FLAG_trace_precompiler) {
2591 THR_Print("Dropping %" Pd " entries from constants for class %s\n",
2592 constants.Length(), cls.ToCString());
2593 }
2594 while (iterator.MoveNext()) {
2595 obj = constants_set.GetKey(iterator.Current());
2596 instance = Instance::RawCast(obj.ptr());
2597 consts_to_retain_.Remove(&instance);
2598 visited->SetValueExclusive(obj.ptr(), kDrop);
2599 }
2600 } else {
2601 // Core classes might have constants that refer to user-defined
2602 // classes. Those should be dropped too.
2603 while (iterator.MoveNext()) {
2604 obj = constants_set.GetKey(iterator.Current());
2605 ConstantVisitedValue value = static_cast<ConstantVisitedValue>(
2606 visited->GetValueExclusive(obj.ptr()));
2607 if (value == kNotVisited) {
2608 visitor.Visit(obj.ptr());
2609 value = static_cast<ConstantVisitedValue>(
2610 visited->GetValueExclusive(obj.ptr()));
2611 }
2612 ASSERT(value == kDrop || value == kRetain);
2613 if (value == kDrop) {
2614 dropped_constants_arrays_entries_count_++;
2615 if (FLAG_trace_precompiler) {
2616 THR_Print("Dropping constant entry for class %s instance:%s\n",
2617 cls.ToCString(), obj.ToCString());
2618 }
2619 instance = Instance::RawCast(obj.ptr());
2620 consts_to_retain_.Remove(&instance);
2621 }
2622 }
2623 }
2624 constants_set.Release();
2625 }
2626 }
2627 }
2628}
2629
2630void Precompiler::TraceTypesFromRetainedClasses() {
2631 HANDLESCOPE(T);
2632 auto& lib = Library::Handle(Z);
2633 auto& cls = Class::Handle(Z);
2634 auto& members = Array::Handle(Z);
2635 auto& constants = Array::Handle(Z);
2636 auto& retained_constants = GrowableObjectArray::Handle(Z);
2637 auto& obj = Object::Handle(Z);
2638 auto& constant = Instance::Handle(Z);
2639
2640 SafepointWriteRwLocker ml(T, T->isolate_group()->program_lock());
2641 for (intptr_t i = 0; i < libraries_.Length(); i++) {
2642 lib ^= libraries_.At(i);
2643 HANDLESCOPE(T);
2644 ClassDictionaryIterator it(lib, ClassDictionaryIterator::kIteratePrivate);
2645 while (it.HasNext()) {
2646 cls = it.GetNextClass();
2647
2648 bool retain = false;
2649 members = cls.fields();
2650 if (members.Length() > 0) {
2651 retain = true;
2652 }
2653 members = cls.current_functions();
2654 if (members.Length() > 0) {
2655 retain = true;
2656 }
2657 if (cls.is_allocated()) {
2658 retain = true;
2659 }
2660
2661 constants = cls.constants();
2662 retained_constants = GrowableObjectArray::New();
2663 if (!constants.IsNull()) {
2664 for (intptr_t j = 0; j < constants.Length(); j++) {
2665 obj = constants.At(j);
2666 if ((obj.ptr() == HashTableBase::UnusedMarker().ptr()) ||
2667 (obj.ptr() == HashTableBase::DeletedMarker().ptr())) {
2668 continue;
2669 }
2670 constant ^= obj.ptr();
2671 bool retain = consts_to_retain_.HasKey(&constant);
2672 if (retain) {
2673 retained_constants.Add(constant);
2674 }
2675 }
2676 }
2677 // Rehash.
2678 cls.set_constants(Object::null_array());
2679 for (intptr_t j = 0; j < retained_constants.Length(); j++) {
2680 constant ^= retained_constants.At(j);
2681 cls.InsertCanonicalConstant(Z, constant);
2682 }
2683
2684 if (retained_constants.Length() > 0) {
2685 ASSERT(retain); // This shouldn't be the reason we keep a class.
2686 retain = true;
2687 }
2688
2689 if (retain) {
2690 AddTypesOf(cls);
2691 }
2692 }
2693 }
2694}
2695
2696void Precompiler::DropMetadata() {
2697 HANDLESCOPE(T);
2698 SafepointWriteRwLocker ml(T, T->isolate_group()->program_lock());
2699
2700 Library& lib = Library::Handle(Z);
2701 for (intptr_t i = 0; i < libraries_.Length(); i++) {
2702 lib ^= libraries_.At(i);
2703 lib.set_metadata(Array::null_array());
2704 }
2705}
2706
2707void Precompiler::DropLibraryEntries() {
2708 HANDLESCOPE(T);
2709 Library& lib = Library::Handle(Z);
2710 Array& dict = Array::Handle(Z);
2711 Object& entry = Object::Handle(Z);
2712
2713 for (intptr_t i = 0; i < libraries_.Length(); i++) {
2714 lib ^= libraries_.At(i);
2715
2716 dict = lib.dictionary();
2717 intptr_t dict_size = dict.Length() - 1;
2718 intptr_t used = 0;
2719 for (intptr_t j = 0; j < dict_size; j++) {
2720 entry = dict.At(j);
2721 if (entry.IsNull()) continue;
2722
2723 if (entry.IsClass()) {
2724 if (classes_to_retain_.HasKey(&Class::Cast(entry))) {
2725 used++;
2726 continue;
2727 }
2728 } else if (entry.IsFunction()) {
2729 if (functions_to_retain_.ContainsKey(Function::Cast(entry))) {
2730 used++;
2731 continue;
2732 }
2733 } else if (entry.IsField()) {
2734 if (fields_to_retain_.HasKey(&Field::Cast(entry))) {
2735 used++;
2736 continue;
2737 }
2738 } else if (entry.IsLibraryPrefix()) {
2739 // Always drop.
2740 } else {
2741 FATAL("Unexpected library entry: %s", entry.ToCString());
2742 }
2743 dict.SetAt(j, Object::null_object());
2744 }
2745
2746 lib.RehashDictionary(dict, used * 4 / 3 + 1);
2747 if (!(retain_root_library_caches_ &&
2748 (lib.ptr() == IG->object_store()->root_library()))) {
2749 lib.DropDependenciesAndCaches();
2750 }
2751 }
2752}
2753
2754void Precompiler::DropClasses() {
2755 HANDLESCOPE(T);
2756 Class& cls = Class::Handle(Z);
2757 Array& constants = Array::Handle(Z);
2758 GrowableObjectArray& implementors = GrowableObjectArray::Handle(Z);
2759 GrowableObjectArray& retained_implementors = GrowableObjectArray::Handle(Z);
2760 Class& implementor = Class::Handle(Z);
2761 GrowableObjectArray& subclasses = GrowableObjectArray::Handle(Z);
2762 GrowableObjectArray& retained_subclasses = GrowableObjectArray::Handle(Z);
2763 Class& subclass = Class::Handle(Z);
2764
2765 // We are about to remove classes from the class table. For this to be safe,
2766 // there must be no instances of these classes on the heap, not even
2767 // corpses because the class table entry may be used to find the size of
2768 // corpses. Request a full GC and wait for the sweeper tasks to finish before
2769 // we continue.
2770 IG->heap()->CollectAllGarbage();
2771 IG->heap()->WaitForSweeperTasks(T);
2772
2773 SafepointWriteRwLocker ml(T, IG->program_lock());
2774 ClassTable* class_table = IG->class_table();
2775 intptr_t num_cids = class_table->NumCids();
2776
2777 for (intptr_t cid = 0; cid < num_cids; cid++) {
2778 if (!class_table->IsValidIndex(cid)) continue;
2779 if (!class_table->HasValidClassAt(cid)) continue;
2780 cls = class_table->At(cid);
2781 constants = cls.constants();
2782 HashTables::Weaken(constants);
2783 }
2784
2785 for (intptr_t cid = kNumPredefinedCids; cid < num_cids; cid++) {
2786 if (!class_table->IsValidIndex(cid)) continue;
2787 if (!class_table->HasValidClassAt(cid)) continue;
2788
2789 cls = class_table->At(cid);
2790 ASSERT(!cls.IsNull());
2791
2792 implementors = cls.direct_implementors();
2793 if (!implementors.IsNull()) {
2794 retained_implementors = GrowableObjectArray::New();
2795 for (intptr_t i = 0; i < implementors.Length(); i++) {
2796 implementor ^= implementors.At(i);
2797 if (classes_to_retain_.HasKey(&implementor)) {
2798 retained_implementors.Add(implementor);
2799 }
2800 }
2801 cls.set_direct_implementors(retained_implementors);
2802 }
2803
2804 subclasses = cls.direct_subclasses();
2805 if (!subclasses.IsNull()) {
2806 retained_subclasses = GrowableObjectArray::New();
2807 for (intptr_t i = 0; i < subclasses.Length(); i++) {
2808 subclass ^= subclasses.At(i);
2809 if (classes_to_retain_.HasKey(&subclass)) {
2810 retained_subclasses.Add(subclass);
2811 }
2812 }
2813 cls.set_direct_subclasses(retained_subclasses);
2814 }
2815
2816 if (cls.IsTopLevel()) {
2817 // Top-level classes are referenced directly from their library. They
2818 // will only be removed as a consequence of an entire library being
2819 // removed.
2820 continue;
2821 }
2822
2823 bool retain = classes_to_retain_.HasKey(&cls);
2824 if (retain) {
2825 continue;
2826 }
2827
2828 ASSERT(!cls.is_allocated());
2829 constants = cls.constants();
2830 ASSERT(constants.IsNull() || (constants.Length() == 0));
2831
2832 dropped_class_count_++;
2833 if (FLAG_trace_precompiler) {
2834 THR_Print("Dropping class %" Pd " %s\n", cid, cls.ToCString());
2835 }
2836
2837 cls.set_id(kIllegalCid); // We check this when serializing.
2838 }
2839}
2840
2841void Precompiler::DropLibraries() {
2842 HANDLESCOPE(T);
2843 const GrowableObjectArray& retained_libraries =
2844 GrowableObjectArray::Handle(Z, GrowableObjectArray::New());
2845 const Library& root_lib =
2846 Library::Handle(Z, IG->object_store()->root_library());
2847 Library& lib = Library::Handle(Z);
2848 Class& toplevel_class = Class::Handle(Z);
2849
2850 for (intptr_t i = 0; i < libraries_.Length(); i++) {
2851 lib ^= libraries_.At(i);
2852 HANDLESCOPE(T);
2853 intptr_t entries = 0;
2854 DictionaryIterator it(lib);
2855 while (it.HasNext()) {
2856 entries++;
2857 it.GetNext();
2858 }
2859 bool retain = false;
2860 if (entries > 0) {
2861 retain = true;
2862 } else if (lib.is_dart_scheme()) {
2863 // The core libraries are referenced from the object store.
2864 retain = true;
2865 } else if (lib.ptr() == root_lib.ptr()) {
2866 // The root library might have no surviving members if it only exports
2867 // main from another library. It will still be referenced from the object
2868 // store, so retain it.
2869 retain = true;
2870 } else {
2871 // A type for a top-level class may be referenced from an object pool as
2872 // part of an error message.
2873 toplevel_class = lib.toplevel_class();
2874 if (classes_to_retain_.HasKey(&toplevel_class)) {
2875 retain = true;
2876 }
2877 }
2878
2879 if (retain) {
2880 lib.set_index(retained_libraries.Length());
2881 retained_libraries.Add(lib);
2882 } else {
2883 toplevel_class = lib.toplevel_class();
2884
2885 IG->class_table()->UnregisterTopLevel(toplevel_class.id());
2886 toplevel_class.set_id(kIllegalCid); // We check this when serializing.
2887
2888 dropped_library_count_++;
2889 lib.set_index(-1);
2890 if (FLAG_trace_precompiler) {
2891 THR_Print("Dropping library %s\n", lib.ToCString());
2892 }
2893 }
2894 }
2895
2896 Library::RegisterLibraries(T, retained_libraries);
2897 libraries_ = retained_libraries.ptr();
2898}
2899
2900// Traverse program structure and mark Code objects
2901// which do not have useful information as discarded.
2902// Should be called after Precompiler::ReplaceFunctionStaticCallEntries().
2903// Should be called before ProgramVisitor::Dedup() as Dedup may clear
2904// static calls target table.
2905void Precompiler::DiscardCodeObjects() {
2906 class DiscardCodeVisitor : public CodeVisitor {
2907 public:
2908 DiscardCodeVisitor(Zone* zone,
2909 const FunctionSet& functions_to_retain,
2910 const FunctionSet& functions_called_dynamically)
2911 : zone_(zone),
2912 function_(Function::Handle(zone)),
2913 parent_function_(Function::Handle(zone)),
2914 class_(Class::Handle(zone)),
2915 library_(Library::Handle(zone)),
2916 loading_unit_(LoadingUnit::Handle(zone)),
2917 static_calls_target_table_(Array::Handle(zone)),
2918 kind_and_offset_(Smi::Handle(zone)),
2919 call_target_(Code::Handle(zone)),
2920 targets_of_calls_via_code_(
2921 GrowableObjectArray::Handle(zone, GrowableObjectArray::New())),
2922 functions_to_retain_(functions_to_retain),
2923 functions_called_dynamically_(functions_called_dynamically) {}
2924
2925 // Certain static calls (e.g. between different loading units) are
2926 // performed through Code objects indirectly. Such Code objects
2927 // cannot be fully discarded.
2928 void RecordCodeObjectsUsedForCalls(const Code& code) {
2929 static_calls_target_table_ = code.static_calls_target_table();
2930 if (static_calls_target_table_.IsNull()) return;
2931
2932 StaticCallsTable static_calls(static_calls_target_table_);
2933 for (const auto& view : static_calls) {
2934 kind_and_offset_ = view.Get<Code::kSCallTableKindAndOffset>();
2935 auto const kind = Code::KindField::decode(kind_and_offset_.Value());
2936 if (kind == Code::kCallViaCode) {
2937 call_target_ =
2938 Code::RawCast(view.Get<Code::kSCallTableCodeOrTypeTarget>());
2939 ASSERT(!call_target_.IsNull());
2940 targets_of_calls_via_code_.Add(call_target_);
2941 }
2942 }
2943 }
2944
2945 void VisitCode(const Code& code) override {
2946 ++total_code_objects_;
2947
2948 RecordCodeObjectsUsedForCalls(code);
2949
2950 // Only discard Code objects corresponding to Dart functions.
2951 if (!code.IsFunctionCode() || code.IsUnknownDartCode()) {
2952 ++non_function_codes_;
2953 return;
2954 }
2955
2956 // Retain Code object if it has exception handlers or PC descriptors.
2957 if (code.exception_handlers() !=
2958 Object::empty_exception_handlers().ptr()) {
2959 ++codes_with_exception_handlers_;
2960 return;
2961 }
2962 if (code.pc_descriptors() != Object::empty_descriptors().ptr()) {
2963 ++codes_with_pc_descriptors_;
2964 return;
2965 }
2966
2967 function_ = code.function();
2968 if (functions_to_retain_.ContainsKey(function_)) {
2969 // Retain Code objects corresponding to native functions
2970 // (to find native implementation).
2971 if (function_.is_old_native()) {
2972 ++codes_with_native_function_;
2973 return;
2974 }
2975
2976 // Retain Code objects corresponding to dynamically
2977 // called functions.
2978 if (functions_called_dynamically_.ContainsKey(function_)) {
2979 ++codes_with_dynamically_called_function_;
2980 return;
2981 }
2982
2983 if (StackTraceUtils::IsNeededForAsyncAwareUnwinding(function_)) {
2984 ++codes_with_function_needed_for_async_unwinding_;
2985 return;
2986 }
2987 } else {
2988 ASSERT(!functions_called_dynamically_.ContainsKey(function_));
2989 }
2990
2991 // Retain Code objects in the non-root loading unit as
2992 // they are allocated while loading root unit but filled
2993 // while loading another unit.
2994 class_ = function_.Owner();
2995 library_ = class_.library();
2996 loading_unit_ = library_.loading_unit();
2997 if (loading_unit_.id() != LoadingUnit::kRootId) {
2998 ++codes_with_deferred_function_;
2999 return;
3000 }
3001
3002 // Retain Code objects corresponding to FFI trampolines.
3003 if (function_.IsFfiCallbackTrampoline()) {
3004 ++codes_with_ffi_trampoline_function_;
3005 return;
3006 }
3007
3008 code.set_is_discarded(true);
3009 if (FLAG_trace_precompiler) {
3010 THR_Print("Discarding code object corresponding to %s\n",
3011 function_.ToFullyQualifiedCString());
3012 }
3013 ++discarded_codes_;
3014 }
3015
3016 void RetainCodeObjectsUsedAsCallTargets() {
3017 for (intptr_t i = 0, n = targets_of_calls_via_code_.Length(); i < n;
3018 ++i) {
3019 call_target_ = Code::RawCast(targets_of_calls_via_code_.At(i));
3020 if (call_target_.is_discarded()) {
3021 call_target_.set_is_discarded(false);
3022 ++codes_used_as_call_targets_;
3023 --discarded_codes_;
3024 }
3025 }
3026 }
3027
3028 void PrintStatistics() const {
3029 THR_Print("Discarding Code objects:\n");
3030 THR_Print(" %8" Pd " non-function Codes\n", non_function_codes_);
3031 THR_Print(" %8" Pd " Codes with exception handlers\n",
3032 codes_with_exception_handlers_);
3033 THR_Print(" %8" Pd " Codes with pc descriptors\n",
3034 codes_with_pc_descriptors_);
3035 THR_Print(" %8" Pd " Codes with native functions\n",
3036 codes_with_native_function_);
3037 THR_Print(" %8" Pd " Codes with dynamically called functions\n",
3038 codes_with_dynamically_called_function_);
3039 THR_Print(" %8" Pd " Codes with async unwinding related functions\n",
3040 codes_with_function_needed_for_async_unwinding_);
3041 THR_Print(" %8" Pd " Codes with deferred functions\n",
3042 codes_with_deferred_function_);
3043 THR_Print(" %8" Pd " Codes with ffi trampoline functions\n",
3044 codes_with_ffi_trampoline_function_);
3045 THR_Print(" %8" Pd " Codes used as call targets\n",
3046 codes_used_as_call_targets_);
3047 THR_Print(" %8" Pd " Codes discarded\n", discarded_codes_);
3048 THR_Print(" %8" Pd " Codes total\n", total_code_objects_);
3049 }
3050
3051 private:
3052 Zone* zone_;
3053 Function& function_;
3054 Function& parent_function_;
3055 Class& class_;
3056 Library& library_;
3057 LoadingUnit& loading_unit_;
3058 Array& static_calls_target_table_;
3059 Smi& kind_and_offset_;
3060 Code& call_target_;
3061 GrowableObjectArray& targets_of_calls_via_code_;
3062 const FunctionSet& functions_to_retain_;
3063 const FunctionSet& functions_called_dynamically_;
3064
3065 // Statistics
3066 intptr_t total_code_objects_ = 0;
3067 intptr_t non_function_codes_ = 0;
3068 intptr_t codes_with_exception_handlers_ = 0;
3069 intptr_t codes_with_pc_descriptors_ = 0;
3070 intptr_t codes_with_native_function_ = 0;
3071 intptr_t codes_with_dynamically_called_function_ = 0;
3072 intptr_t codes_with_function_needed_for_async_unwinding_ = 0;
3073 intptr_t codes_with_deferred_function_ = 0;
3074 intptr_t codes_with_ffi_trampoline_function_ = 0;
3075 intptr_t codes_used_as_call_targets_ = 0;
3076 intptr_t discarded_codes_ = 0;
3077 };
3078
3079 // Code objects are used by stack traces if not dwarf_stack_traces.
3080 // Code objects are used by profiler in non-PRODUCT mode.
3081 if (!FLAG_dwarf_stack_traces_mode || FLAG_retain_code_objects) {
3082 return;
3083 }
3084
3085 HANDLESCOPE(T);
3086 DiscardCodeVisitor visitor(Z, functions_to_retain_,
3087 functions_called_dynamically_);
3088 ProgramVisitor::WalkProgram(Z, IG, &visitor);
3089 visitor.RetainCodeObjectsUsedAsCallTargets();
3090
3091 if (FLAG_trace_precompiler) {
3092 visitor.PrintStatistics();
3093 }
3094}
3095
3096void Precompiler::PruneDictionaries() {
3097#if defined(DEBUG)
3098 // Verify that api_uses_ is stable: any entry in it can be found. This
3099 // check serves to catch bugs when ProgramElementSet::Hash is accidentally
3100 // defined using unstable values.
3101 ProgramElementSet::Iterator it = api_uses_.GetIterator();
3102 while (auto entry = it.Next()) {
3103 ASSERT(api_uses_.HasKey(*entry));
3104 }
3105#endif
3106
3107 // PRODUCT-only: pruning interferes with various uses of the service protocol,
3108 // including heap analysis tools.
3109#if defined(PRODUCT)
3110 class PruneDictionariesVisitor {
3111 public:
3112 GrowableObjectArrayPtr PruneLibraries(
3113 const GrowableObjectArray& libraries) {
3114 for (intptr_t i = 0; i < libraries.Length(); i++) {
3115 lib_ ^= libraries.At(i);
3116 bool retain = PruneLibrary(lib_);
3117 if (retain) {
3118 lib_.set_index(retained_libraries_.Length());
3119 retained_libraries_.Add(lib_);
3120 } else {
3121 lib_.set_index(-1);
3122 lib_.set_private_key(null_string_);
3123 }
3124 }
3125
3126 Library::RegisterLibraries(Thread::Current(), retained_libraries_);
3127 return retained_libraries_.ptr();
3128 }
3129
3130 bool PruneLibrary(const Library& lib) {
3131 dict_ = lib.dictionary();
3132 intptr_t dict_size = dict_.Length() - 1;
3133 intptr_t used = 0;
3134 for (intptr_t i = 0; i < dict_size; i++) {
3135 entry_ = dict_.At(i);
3136 if (entry_.IsNull()) continue;
3137
3138 bool retain = false;
3139 if (entry_.IsClass()) {
3140 // dart:async: Fix async stack trace lookups in dart:async to annotate
3141 // entry points or fail gracefully.
3142 // dart:core, dart:collection, dart:typed_data: Isolate messaging
3143 // between groups allows any class in these libraries.
3144 retain = PruneClass(Class::Cast(entry_)) ||
3145 (lib.url() == Symbols::DartAsync().ptr()) ||
3146 (lib.url() == Symbols::DartCore().ptr()) ||
3147 (lib.url() == Symbols::DartCollection().ptr()) ||
3148 (lib.url() == Symbols::DartTypedData().ptr());
3149 } else if (entry_.IsFunction() || entry_.IsField()) {
3150 retain = precompiler_->HasApiUse(entry_);
3151 } else {
3152 FATAL("Unexpected library entry: %s", entry_.ToCString());
3153 }
3154 if (retain) {
3155 used++;
3156 } else {
3157 dict_.SetAt(i, Object::null_object());
3158 }
3159 }
3160 lib.RehashDictionary(dict_, used * 4 / 3 + 1);
3161
3162 bool retain = used > 0;
3163 cls_ = lib.toplevel_class();
3164 if (PruneClass(cls_)) {
3165 retain = true;
3166 }
3167 if (lib.is_dart_scheme()) {
3168 retain = true;
3169 }
3170 if (lib.ptr() == root_lib_.ptr()) {
3171 retain = true;
3172 }
3173 if (precompiler_->HasApiUse(lib)) {
3174 retain = true;
3175 }
3176 return retain;
3177 }
3178
3179 bool PruneClass(const Class& cls) {
3180 bool retain = precompiler_->HasApiUse(cls);
3181
3182 functions_ = cls.functions();
3183 retained_functions_ = GrowableObjectArray::New();
3184 for (intptr_t i = 0; i < functions_.Length(); i++) {
3185 function_ ^= functions_.At(i);
3186 if (precompiler_->HasApiUse(function_)) {
3187 retained_functions_.Add(function_);
3188 retain = true;
3189 } else if (precompiler_->functions_called_dynamically_.ContainsKey(
3190 function_)) {
3191 retained_functions_.Add(function_);
3192 // No `retain = true`: the function must appear in the method
3193 // dictionary for lookup, but the class may still be removed from the
3194 // library.
3195 }
3196 }
3197 if (retained_functions_.Length() > 0) {
3198 functions_ = Array::MakeFixedLength(retained_functions_);
3199 cls.SetFunctions(functions_);
3200 } else {
3201 cls.SetFunctions(Object::empty_array());
3202 }
3203
3204 fields_ = cls.fields();
3205 retained_fields_ = GrowableObjectArray::New();
3206 for (intptr_t i = 0; i < fields_.Length(); i++) {
3207 field_ ^= fields_.At(i);
3208 if (precompiler_->HasApiUse(field_)) {
3209 retained_fields_.Add(field_);
3210 retain = true;
3211 }
3212 }
3213 if (retained_fields_.Length() > 0) {
3214 fields_ = Array::MakeFixedLength(retained_fields_);
3215 cls.SetFields(fields_);
3216 } else {
3217 cls.SetFields(Object::empty_array());
3218 }
3219
3220 return retain;
3221 }
3222
3223 explicit PruneDictionariesVisitor(Precompiler* precompiler, Zone* zone)
3224 : precompiler_(precompiler),
3225 lib_(Library::Handle(zone)),
3226 dict_(Array::Handle(zone)),
3227 entry_(Object::Handle(zone)),
3228 cls_(Class::Handle(zone)),
3229 functions_(Array::Handle(zone)),
3230 fields_(Array::Handle(zone)),
3231 function_(Function::Handle(zone)),
3232 field_(Field::Handle(zone)),
3233 retained_functions_(GrowableObjectArray::Handle(zone)),
3234 retained_fields_(GrowableObjectArray::Handle(zone)),
3235 retained_libraries_(
3236 GrowableObjectArray::Handle(zone, GrowableObjectArray::New())),
3237 root_lib_(Library::Handle(
3238 zone,
3239 precompiler->isolate_group()->object_store()->root_library())),
3240 null_string_(String::Handle(zone)) {}
3241
3242 private:
3243 Precompiler* const precompiler_;
3244 Library& lib_;
3245 Array& dict_;
3246 Object& entry_;
3247 Class& cls_;
3248 Array& functions_;
3249 Array& fields_;
3250 Function& function_;
3251 Field& field_;
3252 GrowableObjectArray& retained_functions_;
3253 GrowableObjectArray& retained_fields_;
3254 const GrowableObjectArray& retained_libraries_;
3255 const Library& root_lib_;
3256 const String& null_string_;
3257 };
3258
3259 HANDLESCOPE(T);
3260 SafepointWriteRwLocker ml(T, T->isolate_group()->program_lock());
3261 PruneDictionariesVisitor visitor(this, Z);
3262 libraries_ = visitor.PruneLibraries(libraries_);
3263#endif // defined(PRODUCT)
3264}
3265
3266// Traits for the HashTable template.
3267struct CodeKeyTraits {
3268 static uint32_t Hash(const Object& key) { return Code::Cast(key).Size(); }
3269 static const char* Name() { return "CodeKeyTraits"; }
3270 static bool IsMatch(const Object& x, const Object& y) {
3271 return x.ptr() == y.ptr();
3272 }
3273 static bool ReportStats() { return false; }
3274};
3275
3276typedef UnorderedHashSet<CodeKeyTraits> CodeSet;
3277
3278#if defined(DEBUG)
3279FunctionPtr Precompiler::FindUnvisitedRetainedFunction() {
3280 class CodeChecker : public CodeVisitor {
3281 public:
3282 CodeChecker()
3283 : visited_code_(HashTables::New<CodeSet>(/*initial_capacity=*/1024)) {}
3284 ~CodeChecker() { visited_code_.Release(); }
3285
3286 const CodeSet& visited() const { return visited_code_; }
3287
3288 void VisitCode(const Code& code) { visited_code_.Insert(code); }
3289
3290 private:
3291 CodeSet visited_code_;
3292 };
3293
3294 CodeChecker visitor;
3295 ProgramVisitor::WalkProgram(Z, IG, &visitor);
3296 const CodeSet& visited = visitor.visited();
3297
3298 FunctionSet::Iterator it(&functions_to_retain_);
3299 Function& function = Function::Handle(Z);
3300 Code& code = Code::Handle(Z);
3301 while (it.MoveNext()) {
3302 function ^= functions_to_retain_.GetKey(it.Current());
3303 if (!function.HasCode()) continue;
3304 code = function.CurrentCode();
3305 if (!visited.ContainsKey(code)) return function.ptr();
3306 }
3307 return Function::null();
3308}
3309#endif
3310
3311void Precompiler::Obfuscate() {
3312 if (!IG->obfuscate()) {
3313 return;
3314 }
3315
3316 class ScriptsCollector : public ObjectVisitor {
3317 public:
3318 explicit ScriptsCollector(Zone* zone,
3319 GrowableHandlePtrArray<const Script>* scripts)
3320 : script_(Script::Handle(zone)), scripts_(scripts) {}
3321
3322 void VisitObject(ObjectPtr obj) override {
3323 if (obj->GetClassId() == kScriptCid) {
3324 script_ ^= obj;
3325 scripts_->Add(Script::Cast(script_));
3326 }
3327 }
3328
3329 private:
3330 Script& script_;
3331 GrowableHandlePtrArray<const Script>* scripts_;
3332 };
3333
3334 GrowableHandlePtrArray<const Script> scripts(Z, 100);
3335 IsolateGroup::Current()->heap()->CollectAllGarbage();
3336 {
3337 HeapIterationScope his(T);
3338 ScriptsCollector visitor(Z, &scripts);
3339 IG->heap()->VisitObjects(&visitor);
3340 }
3341
3342 {
3343 // Note: when this object is destroyed it will commit obfuscation
3344 // mappings into the ObjectStore. Hence the block around it - to
3345 // ensure that destructor is called before we save obfuscation
3346 // mappings and clear the ObjectStore.
3347 Obfuscator obfuscator(T, /*private_key=*/String::Handle(Z));
3348 String& str = String::Handle(Z);
3349 for (intptr_t i = 0; i < scripts.length(); i++) {
3350 const Script& script = scripts.At(i);
3351
3352 str = script.url();
3353 str = Symbols::New(T, str);
3354 str = obfuscator.Rename(str, /*atomic=*/true);
3355 script.set_url(str);
3356 }
3357
3358 Library& lib = Library::Handle();
3359 for (intptr_t i = 0; i < libraries_.Length(); i++) {
3360 lib ^= libraries_.At(i);
3361 if (!lib.is_dart_scheme()) {
3362 str = lib.name();
3363 str = obfuscator.Rename(str, /*atomic=*/true);
3364 lib.set_name(str);
3365
3366 str = lib.url();
3367 str = Symbols::New(T, str);
3368 str = obfuscator.Rename(str, /*atomic=*/true);
3369 lib.set_url(str);
3370 }
3371 }
3372 Library::RegisterLibraries(T, libraries_);
3373 }
3374
3375 // Obfuscation is done. Move obfuscation map into mallocated memory.
3376 IG->set_obfuscation_map(Obfuscator::SerializeMap(T));
3377
3378 // Discard obfuscation mappings to avoid including them into snapshot.
3379 IG->object_store()->set_obfuscation_map(Array::Handle(Z));
3380}
3381
3382void Precompiler::FinalizeAllClasses() {
3383 // Create a fresh Zone because kernel reading during class finalization
3384 // may create zone handles. Those handles may prevent garbage collection of
3385 // otherwise unreachable constants of dropped classes, which would
3386 // cause assertion failures during GC after classes are dropped.
3387 StackZone stack_zone(thread());
3388
3389 error_ = Library::FinalizeAllClasses();
3390 if (!error_.IsNull()) {
3391 Jump(error_);
3392 }
3393 IG->set_all_classes_finalized(true);
3394}
3395
3396void PrecompileParsedFunctionHelper::FinalizeCompilation(
3397 compiler::Assembler* assembler,
3398 FlowGraphCompiler* graph_compiler,
3399 FlowGraph* flow_graph,
3400 CodeStatistics* stats) {
3401 const Function& function = parsed_function()->function();
3402 Zone* const zone = thread()->zone();
3403
3404 // CreateDeoptInfo uses the object pool and needs to be done before
3405 // FinalizeCode.
3406 const Array& deopt_info_array =
3407 Array::Handle(zone, graph_compiler->CreateDeoptInfo(assembler));
3408 // Allocates instruction object. Since this occurs only at safepoint,
3409 // there can be no concurrent access to the instruction page.
3410 const auto pool_attachment = Code::PoolAttachment::kNotAttachPool;
3411
3412 SafepointWriteRwLocker ml(T, T->isolate_group()->program_lock());
3413 const Code& code = Code::Handle(
3414 Code::FinalizeCodeAndNotify(function, graph_compiler, assembler,
3415 pool_attachment, optimized(), stats));
3416 code.set_is_optimized(optimized());
3417 code.set_owner(function);
3418 if (!function.IsOptimizable()) {
3419 // A function with huge unoptimized code can become non-optimizable
3420 // after generating unoptimized code.
3421 function.set_usage_counter(INT32_MIN);
3422 }
3423
3424 graph_compiler->FinalizePcDescriptors(code);
3425 code.set_deopt_info_array(deopt_info_array);
3426
3427 graph_compiler->FinalizeStackMaps(code);
3428 graph_compiler->FinalizeVarDescriptors(code);
3429 graph_compiler->FinalizeExceptionHandlers(code);
3430 graph_compiler->FinalizeCatchEntryMovesMap(code);
3431 graph_compiler->FinalizeStaticCallTargetsTable(code);
3432 graph_compiler->FinalizeCodeSourceMap(code);
3433
3434 if (optimized()) {
3435 // Installs code while at safepoint.
3436 ASSERT(thread()->IsDartMutatorThread());
3437 function.InstallOptimizedCode(code);
3438 } else { // not optimized.
3439 function.set_unoptimized_code(code);
3440 function.AttachCode(code);
3441 }
3442
3443 if (function.IsFfiCallbackTrampoline()) {
3444 compiler::ffi::SetFfiCallbackCode(thread(), function, code);
3445 }
3446}
3447
3448// Generate allocation stubs referenced by AllocateObject instructions.
3449static void GenerateNecessaryAllocationStubs(FlowGraph* flow_graph) {
3450 for (auto block : flow_graph->reverse_postorder()) {
3451 for (ForwardInstructionIterator it(block); !it.Done(); it.Advance()) {
3452 if (auto allocation = it.Current()->AsAllocateObject()) {
3453 StubCode::GetAllocationStubForClass(allocation->cls());
3454 }
3455 }
3456 }
3457}
3458
3459// Return false if bailed out.
3460bool PrecompileParsedFunctionHelper::Compile(CompilationPipeline* pipeline) {
3461 ASSERT(CompilerState::Current().is_aot());
3462 if (optimized() && !parsed_function()->function().IsOptimizable()) {
3463 // All functions compiled by precompiler must be optimizable.
3464 UNREACHABLE();
3465 return false;
3466 }
3467 volatile bool is_compiled = false;
3468 Zone* const zone = thread()->zone();
3469 HANDLESCOPE(thread());
3470
3471 // We may reattempt compilation if the function needs to be assembled using
3472 // far branches on ARM. In the else branch of the setjmp call, done is set to
3473 // false, and use_far_branches is set to true if there is a longjmp from the
3474 // ARM assembler. In all other paths through this while loop, done is set to
3475 // true. use_far_branches is always false on ia32 and x64.
3476 bool done = false;
3477 // volatile because the variable may be clobbered by a longjmp.
3478 volatile intptr_t far_branch_level = 0;
3479 SpeculativeInliningPolicy speculative_policy(
3480 true, FLAG_max_speculative_inlining_attempts);
3481
3482 while (!done) {
3483 LongJumpScope jump;
3484 const intptr_t val = setjmp(*jump.Set());
3485 if (val == 0) {
3486 FlowGraph* flow_graph = nullptr;
3487 ZoneGrowableArray<const ICData*>* ic_data_array = nullptr;
3488 const Function& function = parsed_function()->function();
3489
3490 CompilerState compiler_state(thread(), /*is_aot=*/true, optimized(),
3491 CompilerState::ShouldTrace(function));
3492 compiler_state.set_function(function);
3493
3494 {
3495 ic_data_array = new (zone) ZoneGrowableArray<const ICData*>();
3496
3497 TIMELINE_DURATION(thread(), CompilerVerbose, "BuildFlowGraph");
3498 COMPILER_TIMINGS_TIMER_SCOPE(thread(), BuildGraph);
3499 flow_graph =
3500 pipeline->BuildFlowGraph(zone, parsed_function(), ic_data_array,
3501 Compiler::kNoOSRDeoptId, optimized());
3502 }
3503
3504 if (optimized()) {
3505 flow_graph->PopulateWithICData(function);
3506 }
3507
3508 const bool print_flow_graph =
3509 (FLAG_print_flow_graph ||
3510 (optimized() && FLAG_print_flow_graph_optimized)) &&
3511 FlowGraphPrinter::ShouldPrint(function);
3512
3513 if (print_flow_graph && !optimized()) {
3514 FlowGraphPrinter::PrintGraph("Unoptimized Compilation", flow_graph);
3515 }
3516
3517 CompilerPassState pass_state(thread(), flow_graph, &speculative_policy,
3518 precompiler_);
3519
3520 if (optimized()) {
3521 TIMELINE_DURATION(thread(), CompilerVerbose, "OptimizationPasses");
3522
3523 AotCallSpecializer call_specializer(precompiler_, flow_graph,
3524 &speculative_policy);
3525 pass_state.call_specializer = &call_specializer;
3526
3527 flow_graph = CompilerPass::RunPipeline(CompilerPass::kAOT, &pass_state);
3528 }
3529
3530 ASSERT(pass_state.inline_id_to_function.length() ==
3531 pass_state.caller_inline_id.length());
3532
3533 ASSERT(precompiler_ != nullptr);
3534
3535 // When generating code in bare instruction mode all code objects
3536 // share the same global object pool. To reduce interleaving of
3537 // unrelated object pool entries from different code objects
3538 // we attempt to pregenerate stubs referenced by the code
3539 // we are going to generate.
3540 //
3541 // Reducing interleaving means reducing recompilations triggered by
3542 // failure to commit object pool into the global object pool.
3543 GenerateNecessaryAllocationStubs(flow_graph);
3544
3545 // Even in bare instructions mode we don't directly add objects into
3546 // the global object pool because code generation can bail out
3547 // (e.g. due to speculative optimization or branch offsets being
3548 // too big). If we were adding objects into the global pool directly
3549 // these recompilations would leave dead entries behind.
3550 // Instead we add objects into an intermediary pool which gets
3551 // committed into the global object pool at the end of the compilation.
3552 // This makes an assumption that global object pool itself does not
3553 // grow during code generation - unfortunately this is not the case
3554 // because we might have nested code generation (i.e. we might generate
3555 // some stubs). If this indeed happens we retry the compilation.
3556 // (See TryCommitToParent invocation below).
3557 compiler::ObjectPoolBuilder object_pool_builder(
3558 precompiler_->global_object_pool_builder());
3559 compiler::Assembler assembler(&object_pool_builder, far_branch_level);
3560
3561 CodeStatistics* function_stats = nullptr;
3562 if (FLAG_print_instruction_stats) {
3563 // At the moment we are leaking CodeStatistics objects for
3564 // simplicity because this is just a development mode flag.
3565 function_stats = new CodeStatistics(&assembler);
3566 }
3567
3568 FlowGraphCompiler graph_compiler(
3569 &assembler, flow_graph, *parsed_function(), optimized(),
3570 &speculative_policy, pass_state.inline_id_to_function,
3571 pass_state.inline_id_to_token_pos, pass_state.caller_inline_id,
3572 ic_data_array, function_stats);
3573 pass_state.graph_compiler = &graph_compiler;
3574 CompilerPass::GenerateCode(&pass_state);
3575 {
3576 COMPILER_TIMINGS_TIMER_SCOPE(thread(), FinalizeCode);
3577 TIMELINE_DURATION(thread(), CompilerVerbose, "FinalizeCompilation");
3578 ASSERT(thread()->IsDartMutatorThread());
3579 FinalizeCompilation(&assembler, &graph_compiler, flow_graph,
3580 function_stats);
3581 }
3582
3583 if (precompiler_->phase() ==
3584 Precompiler::Phase::kFixpointCodeGeneration) {
3585 for (intptr_t i = 0; i < graph_compiler.used_static_fields().length();
3586 i++) {
3587 precompiler_->AddField(*graph_compiler.used_static_fields().At(i));
3588 }
3589
3590 const GrowableArray<const compiler::TableSelector*>& call_selectors =
3591 graph_compiler.dispatch_table_call_targets();
3592 for (intptr_t i = 0; i < call_selectors.length(); i++) {
3593 precompiler_->AddTableSelector(call_selectors[i]);
3594 }
3595 } else {
3596 // We should not be generating code outside of these two specific
3597 // precompilation phases.
3599 precompiler_->phase() ==
3600 Precompiler::Phase::kCompilingConstructorsForInstructionCounts);
3601 }
3602
3603 // In bare instructions mode try adding all entries from the object
3604 // pool into the global object pool. This might fail if we have
3605 // nested code generation (i.e. we generated some stubs) which means
3606 // that some of the object indices we used are already occupied in the
3607 // global object pool.
3608 //
3609 // In this case we simply retry compilation assuming that we are not
3610 // going to hit this problem on the second attempt.
3611 //
3612 // Note: currently we can't assume that two compilations of the same
3613 // method will lead to the same IR due to instability of inlining
3614 // heuristics (under some conditions we might end up inlining
3615 // more aggressively on the second attempt).
3616 if (!object_pool_builder.TryCommitToParent()) {
3617 done = false;
3618 continue;
3619 }
3620 // Exit the loop and the function with the correct result value.
3621 is_compiled = true;
3622 done = true;
3623 } else {
3624 // We bailed out or we encountered an error.
3625 const Error& error = Error::Handle(thread()->StealStickyError());
3626
3627 if (error.ptr() == Object::branch_offset_error().ptr()) {
3628 // Compilation failed due to an out of range branch offset in the
3629 // assembler. We try again (done = false) with far branches enabled.
3630 done = false;
3631 RELEASE_ASSERT(far_branch_level < 2);
3632 far_branch_level++;
3633 } else if (error.ptr() == Object::speculative_inlining_error().ptr()) {
3634 // The return value of setjmp is the deopt id of the check instruction
3635 // that caused the bailout.
3636 done = false;
3637 if (!speculative_policy.AllowsSpeculativeInlining()) {
3638 // Assert that we don't repeatedly retry speculation.
3639 UNREACHABLE();
3640 }
3641 if (!speculative_policy.AddBlockedDeoptId(val)) {
3642 if (FLAG_trace_compiler || FLAG_trace_optimizing_compiler) {
3643 THR_Print("Disabled speculative inlining after %" Pd " attempts.\n",
3644 speculative_policy.length());
3645 }
3646 }
3647 } else {
3648 // If the error isn't due to an out of range branch offset, we don't
3649 // try again (done = true), and indicate that we did not finish
3650 // compiling (is_compiled = false).
3651 if (FLAG_trace_bailout) {
3652 THR_Print("%s\n", error.ToErrorCString());
3653 }
3654 done = true;
3655 }
3656
3657 if (error.IsLanguageError() &&
3658 (LanguageError::Cast(error).kind() == Report::kBailout)) {
3659 // Discard the error if it was not a real error, but just a bailout.
3660 } else {
3661 // Otherwise, continue propagating.
3662 thread()->set_sticky_error(error);
3663 }
3664 is_compiled = false;
3665 }
3666 }
3667 return is_compiled;
3668}
3669
3670static ErrorPtr PrecompileFunctionHelper(Precompiler* precompiler,
3671 CompilationPipeline* pipeline,
3672 const Function& function,
3673 bool optimized) {
3674 // Check that we optimize, except if the function is not optimizable.
3675 ASSERT(CompilerState::Current().is_aot());
3676 ASSERT(!function.IsOptimizable() || optimized);
3677 ASSERT(!function.HasCode());
3678 LongJumpScope jump;
3679 if (setjmp(*jump.Set()) == 0) {
3680 Thread* const thread = Thread::Current();
3681 StackZone stack_zone(thread);
3682 Zone* const zone = stack_zone.GetZone();
3683 const bool trace_compiler =
3684 FLAG_trace_compiler || (FLAG_trace_optimizing_compiler && optimized);
3685 Timer per_compile_timer;
3686 per_compile_timer.Start();
3687
3688 ParsedFunction* parsed_function = new (zone)
3689 ParsedFunction(thread, Function::ZoneHandle(zone, function.ptr()));
3690 if (trace_compiler) {
3691 THR_Print("Precompiling %sfunction: '%s' @ token %" Pd ", size %" Pd "\n",
3692 (optimized ? "optimized " : ""),
3693 function.ToFullyQualifiedCString(), function.token_pos().Pos(),
3694 (function.end_token_pos().Pos() - function.token_pos().Pos()));
3695 }
3696 {
3697 HANDLESCOPE(thread);
3698 pipeline->ParseFunction(parsed_function);
3699 }
3700
3701 PrecompileParsedFunctionHelper helper(precompiler, parsed_function,
3702 optimized);
3703 const bool success = helper.Compile(pipeline);
3704 if (!success) {
3705 // We got an error during compilation.
3706 const Error& error = Error::Handle(thread->StealStickyError());
3707 ASSERT(error.IsLanguageError() &&
3708 LanguageError::Cast(error).kind() != Report::kBailout);
3709 return error.ptr();
3710 }
3711
3712 per_compile_timer.Stop();
3713
3714 if (trace_compiler) {
3715 THR_Print("--> '%s' entry: %#" Px " size: %" Pd " time: %" Pd64 " us\n",
3716 function.ToFullyQualifiedCString(),
3717 Code::Handle(function.CurrentCode()).PayloadStart(),
3718 Code::Handle(function.CurrentCode()).Size(),
3719 per_compile_timer.TotalElapsedTime());
3720 }
3721
3722 if (FLAG_disassemble && FlowGraphPrinter::ShouldPrint(function)) {
3723 Code& code = Code::Handle(function.CurrentCode());
3724 Disassembler::DisassembleCode(function, code, optimized);
3725 } else if (FLAG_disassemble_optimized && optimized &&
3726 FlowGraphPrinter::ShouldPrint(function)) {
3727 Code& code = Code::Handle(function.CurrentCode());
3728 Disassembler::DisassembleCode(function, code, true);
3729 }
3730 return Error::null();
3731 } else {
3732 Thread* const thread = Thread::Current();
3733 StackZone stack_zone(thread);
3734 // We got an error during compilation.
3735 const Error& error = Error::Handle(thread->StealStickyError());
3736 // Precompilation may encounter compile-time errors.
3737 // Do not attempt to optimize functions that can cause errors.
3738 function.set_is_optimizable(false);
3739 return error.ptr();
3740 }
3741 UNREACHABLE();
3742 return Error::null();
3743}
3744
3745ErrorPtr Precompiler::CompileFunction(Precompiler* precompiler,
3746 Thread* thread,
3747 Zone* zone,
3748 const Function& function) {
3749 PRECOMPILER_TIMER_SCOPE(precompiler, CompileFunction);
3750 NoActiveIsolateScope no_isolate_scope;
3751
3752 VMTagScope tagScope(thread, VMTag::kCompileUnoptimizedTagId);
3753 TIMELINE_FUNCTION_COMPILATION_DURATION(thread, "CompileFunction", function);
3754
3755 ASSERT(CompilerState::Current().is_aot());
3756 const bool optimized = function.IsOptimizable(); // False for natives.
3757 DartCompilationPipeline pipeline;
3758 if (precompiler->is_tracing()) {
3759 precompiler->tracer_->WriteCompileFunctionEvent(function);
3760 }
3761
3762 return PrecompileFunctionHelper(precompiler, &pipeline, function, optimized);
3763}
3764
3765Obfuscator::Obfuscator(Thread* thread, const String& private_key)
3766 : state_(nullptr) {
3767 auto isolate_group = thread->isolate_group();
3768 if (!isolate_group->obfuscate()) {
3769 // Nothing to do.
3770 return;
3771 }
3772 auto zone = thread->zone();
3773
3774 // Create ObfuscationState from ObjectStore::obfuscation_map().
3775 ObjectStore* store = isolate_group->object_store();
3776 Array& obfuscation_state = Array::Handle(zone, store->obfuscation_map());
3777
3778 if (store->obfuscation_map() == Array::null()) {
3779 // We are just starting the obfuscation. Create initial state.
3780 const int kInitialPrivateCapacity = 256;
3781 obfuscation_state = Array::New(kSavedStateSize);
3782 obfuscation_state.SetAt(
3783 1, Array::Handle(zone, HashTables::New<ObfuscationMap>(
3784 kInitialPrivateCapacity, Heap::kOld)));
3785 }
3786
3787 state_ = new (zone) ObfuscationState(thread, obfuscation_state, private_key);
3788
3789 if (store->obfuscation_map() == Array::null()) {
3790 // We are just starting the obfuscation. Initialize the renaming map.
3791 // Note: InitializeRenamingMap uses state_.
3792 InitializeRenamingMap();
3793 }
3794}
3795
3796Obfuscator::~Obfuscator() {
3797 if (state_ != nullptr) {
3798 state_->SaveState();
3799 }
3800}
3801
3802void Obfuscator::InitializeRenamingMap() {
3803// Prevent renaming of all pseudo-keywords and operators.
3804// Note: not all pseudo-keywords are mentioned in DART_KEYWORD_LIST
3805// (for example 'hide', 'show' and async related keywords are omitted).
3806// Those are protected from renaming as part of all symbols.
3807#define PREVENT_RENAMING(name, value, priority, attr) \
3808 do { \
3809 if (Token::CanBeOverloaded(Token::name) || \
3810 ((Token::attr & Token::kPseudoKeyword) != 0)) { \
3811 PreventRenaming(value); \
3812 } \
3813 } while (0);
3814
3815 DART_TOKEN_LIST(PREVENT_RENAMING)
3816 DART_KEYWORD_LIST(PREVENT_RENAMING)
3817#undef PREVENT_RENAMING
3818
3819 // this is a keyword token unless it occurs in the string interpolation
3820 // which causes it to be obfuscated.
3821 PreventRenaming("this");
3822
3823// Protect all symbols from renaming.
3824#define PREVENT_RENAMING(name, value) PreventRenaming(value);
3825 PREDEFINED_SYMBOLS_LIST(PREVENT_RENAMING)
3826#undef PREVENT_RENAMING
3827
3828 // Protect NativeFieldWrapperClassX names from being obfuscated. Those
3829 // classes are created manually by the runtime system.
3830 // TODO(dartbug.com/30524) instead call to Obfuscator::Rename from a place
3831 // where these are created.
3832 PreventRenaming("NativeFieldWrapperClass1");
3833 PreventRenaming("NativeFieldWrapperClass2");
3834 PreventRenaming("NativeFieldWrapperClass3");
3835 PreventRenaming("NativeFieldWrapperClass4");
3836
3837// Prevent renaming of ClassID.cid* fields. These fields are injected by
3838// runtime.
3839// TODO(dartbug.com/30524) instead call to Obfuscator::Rename from a place
3840// where these are created.
3841#define CLASS_LIST_WITH_NULL(V) \
3842 V(Null) \
3843 CLASS_LIST_NO_OBJECT(V)
3844#define PREVENT_RENAMING(clazz) PreventRenaming("cid" #clazz);
3845 CLASS_LIST_WITH_NULL(PREVENT_RENAMING)
3846#undef PREVENT_RENAMING
3847#undef CLASS_LIST_WITH_NULL
3848
3849// Prevent renaming of methods that are looked up by method recognizer.
3850// TODO(dartbug.com/30524) instead call to Obfuscator::Rename from a place
3851// where these are looked up.
3852#define PREVENT_RENAMING(class_name, function_name, recognized_enum, \
3853 fingerprint) \
3854 do { \
3855 PreventRenaming(#class_name); \
3856 PreventRenaming(#function_name); \
3857 } while (0);
3858 RECOGNIZED_LIST(PREVENT_RENAMING)
3859#undef PREVENT_RENAMING
3860
3861// Prevent renaming of methods that are looked up by method recognizer.
3862// TODO(dartbug.com/30524) instead call to Obfuscator::Rename from a place
3863// where these are looked up.
3864#define PREVENT_RENAMING(class_name, function_name, recognized_enum, \
3865 fingerprint) \
3866 do { \
3867 PreventRenaming(#class_name); \
3868 PreventRenaming(#function_name); \
3869 } while (0);
3870 POLYMORPHIC_TARGET_LIST(PREVENT_RENAMING)
3871#undef PREVENT_RENAMING
3872
3873 // These are not mentioned by entry points but are still looked up by name.
3874 // (They are not mentioned in the entry points because we don't need them
3875 // after the compilation)
3876 PreventRenaming("_resolveScriptUri");
3877
3878 // Precompiler is looking up "main".
3879 // TODO(dartbug.com/30524) instead call to Obfuscator::Rename from a place
3880 // where these are created.
3881 PreventRenaming("main");
3882
3883 // Fast path for common conditional import. See Deobfuscate method.
3884 PreventRenaming("dart");
3885 PreventRenaming("library");
3886 PreventRenaming("io");
3887 PreventRenaming("html");
3888
3889 // Looked up by name via "DartUtils::GetDartType".
3890 PreventRenaming("_RandomAccessFile");
3891 PreventRenaming("_RandomAccessFileOpsImpl");
3892 PreventRenaming("ResourceHandle");
3893 PreventRenaming("_ResourceHandleImpl");
3894 PreventRenaming("_SocketControlMessageImpl");
3895 PreventRenaming("_NamespaceImpl");
3896}
3897
3898StringPtr Obfuscator::ObfuscationState::RenameImpl(const String& name,
3899 bool atomic) {
3900 ASSERT(name.IsSymbol());
3901
3902 renamed_ ^= renames_.GetOrNull(name);
3903 if (renamed_.IsNull()) {
3904 renamed_ = BuildRename(name, atomic);
3905 renames_.UpdateOrInsert(name, renamed_);
3906 }
3907 return renamed_.ptr();
3908}
3909
3910static const char* const kGetterPrefix = "get:";
3911static const intptr_t kGetterPrefixLength = strlen(kGetterPrefix);
3912static const char* const kSetterPrefix = "set:";
3913static const intptr_t kSetterPrefixLength = strlen(kSetterPrefix);
3914
3915void Obfuscator::PreventRenaming(const char* name) {
3916 // For constructor names Class.name skip class name (if any) and a dot.
3917 const char* dot = strchr(name, '.');
3918 if (dot != nullptr) {
3919 name = dot + 1;
3920 }
3921
3922 // Empty name: do nothing.
3923 if (name[0] == '\0') {
3924 return;
3925 }
3926
3927 // Skip get: and set: prefixes.
3928 if (strncmp(name, kGetterPrefix, kGetterPrefixLength) == 0) {
3929 name = name + kGetterPrefixLength;
3930 } else if (strncmp(name, kSetterPrefix, kSetterPrefixLength) == 0) {
3931 name = name + kSetterPrefixLength;
3932 }
3933
3934 state_->PreventRenaming(name);
3935}
3936
3937void Obfuscator::ObfuscationState::SaveState() {
3938 saved_state_.SetAt(kSavedStateNameIndex, String::Handle(String::New(name_)));
3939 saved_state_.SetAt(kSavedStateRenamesIndex, renames_.Release());
3940 thread_->isolate_group()->object_store()->set_obfuscation_map(saved_state_);
3941}
3942
3943void Obfuscator::ObfuscationState::PreventRenaming(const char* name) {
3944 string_ = Symbols::New(thread_, name);
3945 PreventRenaming(string_);
3946}
3947
3948void Obfuscator::ObfuscationState::PreventRenaming(const String& name) {
3949 renames_.UpdateOrInsert(name, name);
3950}
3951
3952void Obfuscator::ObfuscationState::NextName() {
3953 // We apply the following rules:
3954 //
3955 // inc(a) = b, ... , inc(z) = A, ..., inc(Z) = a & carry.
3956 //
3957 for (intptr_t i = 0;; i++) {
3958 const char digit = name_[i];
3959 if (digit == '\0') {
3960 name_[i] = 'a';
3961 } else if (digit < 'Z') {
3962 name_[i]++;
3963 } else if (digit == 'Z') {
3964 name_[i] = 'a';
3965 continue; // Carry.
3966 } else if (digit < 'z') {
3967 name_[i]++;
3968 } else {
3969 name_[i] = 'A';
3970 }
3971 break;
3972 }
3973}
3974
3975StringPtr Obfuscator::ObfuscationState::NewAtomicRename(
3976 bool should_be_private) {
3977 do {
3978 NextName();
3979 renamed_ = Symbols::NewFormatted(thread_, "%s%s",
3980 should_be_private ? "_" : "", name_);
3981 // Must check if our generated name clashes with something that will
3982 // have an identity renaming.
3983 } while (renames_.GetOrNull(renamed_) == renamed_.ptr());
3984 return renamed_.ptr();
3985}
3986
3987StringPtr Obfuscator::ObfuscationState::BuildRename(const String& name,
3988 bool atomic) {
3989 // Do not rename record positional field names $1, $2 etc
3990 // in order to handle them properly during dynamic invocations.
3991 if (Record::GetPositionalFieldIndexFromFieldName(name) >= 0) {
3992 return name.ptr();
3993 }
3994
3995 if (atomic) {
3996 return NewAtomicRename(name.CharAt(0) == '_');
3997 }
3998
3999 intptr_t start = 0;
4000 intptr_t end = name.Length();
4001
4002 // Follow the rules:
4003 //
4004 // Rename(get:foo) = get:Rename(foo).
4005 // Rename(set:foo) = set:Rename(foo).
4006 //
4007 bool is_getter = false;
4008 bool is_setter = false;
4009 if (Field::IsGetterName(name)) {
4010 is_getter = true;
4011 start = kGetterPrefixLength;
4012 } else if (Field::IsSetterName(name)) {
4013 is_setter = true;
4014 start = kSetterPrefixLength;
4015 }
4016
4017 // Follow the rule:
4018 //
4019 // Rename(_ident@key) = Rename(_ident)@private_key_.
4020 //
4021 const bool is_private = name.CharAt(start) == '_';
4022 if (is_private) {
4023 // Find the first '@'.
4024 intptr_t i = start;
4025 while (i < name.Length() && name.CharAt(i) != '@') {
4026 i++;
4027 }
4028 end = i;
4029 }
4030
4031 if (is_getter || is_setter || is_private) {
4032 string_ = Symbols::New(thread_, name, start, end - start);
4033 // It's OK to call RenameImpl() recursively because 'string_' is used
4034 // only if atomic == false.
4035 string_ = RenameImpl(string_, /*atomic=*/true);
4036 if (is_private && (end < name.Length())) {
4037 string_ = Symbols::FromConcat(thread_, string_, private_key_);
4038 }
4039 if (is_getter) {
4040 return Symbols::FromGet(thread_, string_);
4041 } else if (is_setter) {
4042 return Symbols::FromSet(thread_, string_);
4043 }
4044 return string_.ptr();
4045 } else {
4046 return NewAtomicRename(is_private);
4047 }
4048}
4049
4050void Obfuscator::Deobfuscate(Thread* thread,
4051 const GrowableObjectArray& pieces) {
4052 const Array& obfuscation_state =
4053 Array::Handle(thread->zone(),
4054 thread->isolate_group()->object_store()->obfuscation_map());
4055 if (obfuscation_state.IsNull()) {
4056 return;
4057 }
4058
4059 const Array& renames = Array::Handle(
4060 thread->zone(), GetRenamesFromSavedState(obfuscation_state));
4061
4062 ObfuscationMap renames_map(renames.ptr());
4063 String& piece = String::Handle();
4064 for (intptr_t i = 0; i < pieces.Length(); i++) {
4065 piece ^= pieces.At(i);
4066 ASSERT(piece.IsSymbol());
4067
4068 // Fast path: skip '.'
4069 if (piece.ptr() == Symbols::Dot().ptr()) {
4070 continue;
4071 }
4072
4073 // Fast path: check if piece has an identity obfuscation.
4074 if (renames_map.GetOrNull(piece) == piece.ptr()) {
4075 continue;
4076 }
4077
4078 // Search through the whole obfuscation map until matching value is found.
4079 // We are using linear search instead of generating a reverse mapping
4080 // because we assume that Deobfuscate() method is almost never called.
4081 ObfuscationMap::Iterator it(&renames_map);
4082 while (it.MoveNext()) {
4083 const intptr_t entry = it.Current();
4084 if (renames_map.GetPayload(entry, 0) == piece.ptr()) {
4085 piece ^= renames_map.GetKey(entry);
4086 pieces.SetAt(i, piece);
4087 break;
4088 }
4089 }
4090 }
4091 renames_map.Release();
4092}
4093
4094static const char* StringToCString(const String& str) {
4095 const intptr_t len = Utf8::Length(str);
4096 char* result = new char[len + 1];
4097 str.ToUTF8(reinterpret_cast<uint8_t*>(result), len);
4098 result[len] = 0;
4099 return result;
4100}
4101
4102const char** Obfuscator::SerializeMap(Thread* thread) {
4103 const Array& obfuscation_state =
4104 Array::Handle(thread->zone(),
4105 thread->isolate_group()->object_store()->obfuscation_map());
4106 if (obfuscation_state.IsNull()) {
4107 return nullptr;
4108 }
4109
4110 const Array& renames = Array::Handle(
4111 thread->zone(), GetRenamesFromSavedState(obfuscation_state));
4112 ObfuscationMap renames_map(renames.ptr());
4113
4114 const char** result = new const char*[renames_map.NumOccupied() * 2 + 1];
4115 intptr_t idx = 0;
4116 String& str = String::Handle();
4117
4118 ObfuscationMap::Iterator it(&renames_map);
4119 while (it.MoveNext()) {
4120 const intptr_t entry = it.Current();
4121 str ^= renames_map.GetKey(entry);
4122 result[idx++] = StringToCString(str);
4123 str ^= renames_map.GetPayload(entry, 0);
4124 result[idx++] = StringToCString(str);
4125 }
4126 result[idx++] = nullptr;
4127 renames_map.Release();
4128
4129 return result;
4130}
4131
4132#endif // defined(DART_PRECOMPILER) && !defined(TARGET_ARCH_IA32)
4133
4134} // namespace dart
AutoreleasePool pool
static void done(const char *config, const char *src, const char *srcOptions, const char *name)
Definition DM.cpp:263
static struct Initializer initializer
TArray< uint32_t > Key
SI void store(P *ptr, const T &val)
SI F table(const skcms_Curve *curve, F v)
#define UNREACHABLE()
Definition assert.h:248
#define RELEASE_ASSERT(cond)
Definition assert.h:327
static ArrayPtr New(intptr_t len, Heap::Space space=Heap::kNew)
Definition object.h:10933
@ kOld
Definition heap.h:39
DART_NORETURN void Jump(int value, const Error &error)
Definition longjump.cc:22
static ObjectPtr null()
Definition object.h:433
static Object & Handle()
Definition object.h:407
static ErrorPtr CompileAll()
Thread * thread() const
LongJumpScope * long_jump_base() const
static Thread * Current()
Definition thread.h:361
DART_WARN_UNUSED_RESULT ErrorPtr StealStickyError()
Definition thread.cc:243
CompilerTimings * compiler_timings() const
Definition thread.h:612
#define DO(type, attrs)
#define COMPILER_TIMINGS_TIMER_SCOPE(thread, timer_id)
#define PRECOMPILER_TIMER_SCOPE(precompiler, timer_id)
#define THR_Print(format,...)
Definition log.h:20
const EmbeddedViewParams * params
#define ASSERT(E)
VkInstance instance
Definition main.cc:48
static bool b
struct MyStruct a[10]
#define FATAL(error)
AtkStateType state
glong glong end
G_BEGIN_DECLS G_MODULE_EXPORT FlValue * args
const uint8_t uint32_t uint32_t GError ** error
uint8_t value
GAsyncResult * result
uint32_t * target
const char * charp
Definition flags.h:12
#define DECLARE_FLAG(type, name)
Definition flags.h:14
#define DEFINE_FLAG(type, name, default_value, comment)
Definition flags.h:16
Dart_NativeFunction function
Definition fuchsia.cc:51
const char * name
Definition fuchsia.cc:50
void Init()
#define HANDLESCOPE(thread)
Definition handles.h:321
size_t length
double y
double x
ImplicitString Name
Definition DMSrcSink.h:38
UnorderedHashSet< FunctionKeyTraits > FunctionSet
Definition precompiler.h:83
@ kNullCid
Definition class_id.h:252
EntryPointPragma FindEntryPointPragma(IsolateGroup *IG, const Array &metadata, Field *reusable_field_handle, Object *pragma)
Definition object.cc:27193
UnorderedHashSet< CanonicalInstanceTraits > CanonicalInstancesSet
uintptr_t uword
Definition globals.h:501
UnorderedHashMap< FunctionsTraits > UniqueFunctionsMap
EntryPointPragma
Definition object.h:4344
ArrayPtr GetNativeAssetsMap(Thread *thread)
const intptr_t cid
UnorderedHashSet< DispatcherTraits, AcqRelStorageTraits > DispatcherSet
ArrayOfTuplesView< Code::SCallTableEntry, std::tuple< Smi, Object, Function > > StaticCallsTable
Definition object.h:13520
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not defaults to or::depending on whether ipv6 is specified vm service A custom Dart VM Service port The default is to pick a randomly available open port disable vm Disable the Dart VM Service The Dart VM Service is never available in release mode disable vm service Disable mDNS Dart VM Service publication Bind to the IPv6 localhost address for the Dart VM Service Ignored if vm service host is set endless trace Enable an endless trace buffer The default is a ring buffer This is useful when very old events need to viewed For during application launch Memory usage will continue to grow indefinitely however Start app with an specific route defined on the framework flutter assets Path to the Flutter assets directory enable service port Allow the VM service to fallback to automatic port selection if binding to a specified port fails trace Trace early application lifecycle Automatically switches to an endless trace buffer trace skia Filters out all Skia trace event categories except those that are specified in this comma separated list dump skp on shader Automatically dump the skp that triggers new shader compilations This is useful for writing custom ShaderWarmUp to reduce jank By this is not enabled to reduce the overhead purge persistent cache
Definition switches.h:191
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot data
Definition switches.h:41
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not set
Definition switches.h:76
Definition main.py:1
SINT T dot(const Vec< N, T > &a, const Vec< N, T > &b)
Definition SkVx.h:964
SI auto map(std::index_sequence< I... >, Fn &&fn, const Args &... args) -> skvx::Vec< sizeof...(I), decltype(fn(args[0]...))>
Definition SkVx.h:680
#define OBJECT_STORE_STUB_CODE_LIST(DO)
#define Px
Definition globals.h:410
#define Pd64
Definition globals.h:416
#define Pd
Definition globals.h:408
#define DISALLOW_COPY_AND_ASSIGN(TypeName)
Definition globals.h:581
#define T
#define IG
#define Z
#define POLYMORPHIC_TARGET_LIST(V)
#define RECOGNIZED_LIST(V)
#define PREDEFINED_SYMBOLS_LIST(V)
Definition symbols.h:18
#define TIMELINE_FUNCTION_COMPILATION_DURATION(thread, name, function)
Definition timeline.h:40
#define TIMELINE_DURATION(thread, stream, name)
Definition timeline.h:39
#define DART_KEYWORD_LIST(KW)
Definition token.h:159
#define DART_TOKEN_LIST(TOK)
Definition token.h:34
#define CLASS_LIST_WITH_NULL(V)