66#define IG (isolate_group())
70 print_precompiler_timings,
72 "Print per-phase breakdown of time spent precompiling");
73DEFINE_FLAG(
bool, print_unique_targets,
false,
"Print unique dynamic targets");
75 print_object_layout_to,
77 "Print layout of Dart objects to the given file");
78DEFINE_FLAG(
bool, trace_precompiler,
false,
"Trace precompiler.");
81 max_speculative_inlining_attempts,
83 "Max number of attempts with speculative inlining (precompilation only)");
85 write_retained_reasons_to,
87 "Print reasons for retaining objects to the given file");
108#if defined(DART_PRECOMPILER) && !defined(TARGET_ARCH_IA32)
113 static constexpr const char* kLLVMPool =
"llvm pool";
115 static constexpr const char* kInvokeFieldDispatcher =
116 "invoke field dispatcher";
118 static constexpr const char* kDynamicInvocationForwarder =
119 "dynamic invocation forwarder";
121 static constexpr const char* kMethodExtractor =
"method extractor";
123 static constexpr const char* kImplicitClosure =
"implicit closure";
125 static constexpr const char* kLocalClosure =
"local closure";
127 static constexpr const char* kAsyncStackUnwinding =
128 "needed for async stack unwinding";
130 static constexpr const char* kStaticFieldInitializer =
131 "static field initializer";
133 static constexpr const char* kInstanceFieldInitializer =
134 "instance field initializer";
136 static constexpr const char* kLateFieldInitializer =
"late field initializer";
138 static constexpr const char* kImplicitGetter =
"implicit getter";
140 static constexpr const char* kImplicitSetter =
"implicit setter";
142 static constexpr const char* kImplicitStaticGetter =
"implicit static getter";
144 static constexpr const char* kCalledThroughGetter =
"called through getter";
146 static constexpr const char* kCalledViaSelector =
"called via selector";
148 static constexpr const char* kForcedRetain =
"forced via flag";
150 static constexpr const char* kSymbolicStackTraces =
151 "needed for symbolic stack traces";
153 static constexpr const char* kLocalParent =
"parent of a local function";
155 static constexpr const char* kMainFunction =
156 "this is main function of the root library";
158 static constexpr const char* kEntryPointPragma =
"entry point pragma";
160 static constexpr const char* kFfiCallbackTarget =
"ffi callback target";
162 static constexpr const char* kClosureSignature =
"closure signature";
164 static constexpr const char* kFfiTrampolineSignature =
165 "FFI trampoline signature";
167 static constexpr const char* kNativeSignature =
"native function signature";
169 static constexpr const char* kRequiredNamedParameters =
170 "signature has required named parameters";
172 static constexpr const char* kDynamicallyCalledSignature =
173 "signature of dynamically called function";
175 static constexpr const char* kEntryPointPragmaSignature =
176 "signature of entry point function";
179class RetainedReasonsWriter :
public ValueObject {
181 explicit RetainedReasonsWriter(Zone* zone)
182 : zone_(zone), retained_reasons_map_(zone) {}
184 bool Init(
const char* filename) {
185 if (filename ==
nullptr)
return false;
190 OS::PrintErr(
"warning: Could not access file callbacks.");
195 if (
file ==
nullptr) {
196 OS::PrintErr(
"warning: Failed to write retained reasons: %s\n", filename);
209 void AddDropped(
const Object& obj) {
210 if (HasReason(obj)) {
211 FATAL(
"dropped object has reasons to retain");
213 writer_.OpenObject();
214 WriteRetainedObjectSpecificFields(obj);
215 writer_.PrintPropertyBool(
"retained",
false);
216 writer_.CloseObject();
219 bool HasReason(
const Object& obj)
const {
220 return retained_reasons_map_.HasKey(&obj);
223 void AddReason(
const Object& obj,
const char* reason) {
224 if (
auto const kv = retained_reasons_map_.Lookup(&obj)) {
225 if (kv->value->Lookup(reason) ==
nullptr) {
226 kv->value->Insert(reason);
231 auto const value =
new (zone_) ZoneCStringSet(zone_);
232 value->Insert(reason);
233 retained_reasons_map_.Insert(RetainedReasonsTrait::Pair(
key,
value));
238 if (file_ ==
nullptr)
return;
241 auto it = retained_reasons_map_.GetIterator();
243 for (
auto kv = it.Next(); kv !=
nullptr; kv = it.Next()) {
244 writer_.OpenObject();
245 WriteRetainedObjectSpecificFields(*kv->key);
246 writer_.PrintPropertyBool(
"retained",
true);
248 writer_.OpenArray(
"reasons");
249 auto it = kv->value->GetIterator();
250 for (
auto cstrp = it.Next(); cstrp !=
nullptr; cstrp = it.Next()) {
251 ASSERT(*cstrp !=
nullptr);
252 writer_.PrintValue(*cstrp);
254 writer_.CloseArray();
256 writer_.CloseObject();
259 writer_.CloseArray();
276 struct RetainedReasonsTrait {
277 using Key =
const Object*;
278 using Value = ZoneCStringSet*;
288 static Key KeyOf(
Pair kv) {
return kv.key; }
290 static Value ValueOf(
Pair kv) {
return kv.value; }
293 if (
key->IsFunction()) {
294 return Function::Cast(*key).Hash();
296 if (
key->IsClass()) {
299 if (
key->IsAbstractType()) {
300 return AbstractType::Cast(*key).Hash();
305 static inline bool IsKeyEqual(
Pair pair,
Key key) {
306 return pair.key->ptr() ==
key->ptr();
310 using RetainedReasonsMap = DirectChainedHashMap<RetainedReasonsTrait>;
312 void WriteRetainedObjectSpecificFields(
const Object& obj) {
313 if (obj.IsFunction()) {
314 writer_.PrintProperty(
"type",
"Function");
315 const auto&
function = Function::Cast(obj);
316 writer_.PrintProperty(
"name",
317 function.ToLibNamePrefixedQualifiedCString());
318 writer_.PrintProperty(
"kind",
321 }
else if (obj.IsFunctionType()) {
322 writer_.PrintProperty(
"type",
"FunctionType");
323 const auto& sig = FunctionType::Cast(obj);
324 writer_.PrintProperty(
"name", sig.ToCString());
327 FATAL(
"Unexpected object %s", obj.ToCString());
331 RetainedReasonsMap retained_reasons_map_;
336class PrecompileParsedFunctionHelper :
public ValueObject {
338 PrecompileParsedFunctionHelper(Precompiler* precompiler,
339 ParsedFunction* parsed_function,
341 : precompiler_(precompiler),
342 parsed_function_(parsed_function),
343 optimized_(optimized),
344 thread_(Thread::Current()) {}
346 bool Compile(CompilationPipeline* pipeline);
349 ParsedFunction* parsed_function()
const {
return parsed_function_; }
350 bool optimized()
const {
return optimized_; }
351 Thread* thread()
const {
return thread_; }
353 void FinalizeCompilation(compiler::Assembler* assembler,
354 FlowGraphCompiler* graph_compiler,
355 FlowGraph* flow_graph,
356 CodeStatistics*
stats);
358 Precompiler* precompiler_;
359 ParsedFunction* parsed_function_;
360 const bool optimized_;
361 Thread*
const thread_;
366static void Jump(
const Error&
error) {
372 if (setjmp(*jump.Set()) == 0) {
374 precompiler.DoCompileAll();
375 precompiler.ReportStats();
382void Precompiler::ReportStats() {
383 if (!FLAG_print_precompiler_timings) {
390Precompiler::Precompiler(Thread* thread)
394 retain_root_library_caches_(
false),
398 dropped_function_count_(0),
399 dropped_field_count_(0),
400 dropped_class_count_(0),
401 dropped_typearg_count_(0),
402 dropped_type_count_(0),
403 dropped_functiontype_count_(0),
404 dropped_typeparam_count_(0),
405 dropped_library_count_(0),
406 dropped_constants_arrays_entries_count_(0),
407 libraries_(GrowableObjectArray::Handle(
408 thread->isolate_group()->object_store()->libraries())),
410 GrowableObjectArray::Handle(GrowableObjectArray::New())),
412 functions_called_dynamically_(
414 functions_with_entry_point_pragmas_(
416 seen_functions_(HashTables::New<
FunctionSet>(1024)),
417 possibly_retained_functions_(
420 functions_to_retain_(
422 classes_to_retain_(),
423 typeargs_to_retain_(),
425 functiontypes_to_retain_(),
426 typeparams_to_retain_(),
428 seen_table_selectors_(),
430 error_(Error::Handle()),
431 get_runtime_type_is_unique_(
false) {
432 ASSERT(Precompiler::singleton_ ==
nullptr);
433 Precompiler::singleton_ =
this;
435 if (FLAG_print_precompiler_timings) {
436 thread->set_compiler_timings(
new CompilerTimings());
440Precompiler::~Precompiler() {
442 functions_called_dynamically_.Release();
443 functions_with_entry_point_pragmas_.Release();
444 seen_functions_.Release();
445 possibly_retained_functions_.Release();
446 functions_to_retain_.Release();
448 ASSERT(Precompiler::singleton_ ==
this);
449 Precompiler::singleton_ =
nullptr;
451 delete thread()->compiler_timings();
452 thread()->set_compiler_timings(
nullptr);
455void Precompiler::DoCompileAll() {
458 StackZone stack_zone(
T);
459 zone_ = stack_zone.GetZone();
460 RetainedReasonsWriter reasons_writer(zone_);
462 if (reasons_writer.Init(FLAG_write_retained_reasons_to)) {
463 retained_reasons_writer_ = &reasons_writer;
469 global_object_pool_builder_.InitializeWithZone(zone_);
477 FinalizeAllClasses();
478 ASSERT(Error::Handle(
Z,
T->sticky_error()).IsNull());
480 if (FLAG_print_object_layout_to !=
nullptr) {
481 IG->class_table()->PrintObjectLayout(FLAG_print_object_layout_to);
484 ClassFinalizer::SortClasses();
488 TypeUsageInfo type_usage_info(
T);
492 HierarchyInfo hierarchy_info(
T);
494 dispatch_table_generator_ =
new compiler::DispatchTableGenerator(
Z);
495 dispatch_table_generator_->Initialize(
IG->class_table());
503 ClassFinalizer::ClearAllCode(
507 CompilerState
state(thread_,
true,
true);
508 PrecompileConstructors();
511 ClassFinalizer::ClearAllCode(
514 tracer_ = PrecompilerTracer::StartTracingIfRequested(
this);
522 const Code&
code = StubCode::LazyCompile();
523 const ObjectPool& stub_pool = ObjectPool::Handle(
code.object_pool());
525 global_object_pool_builder()->Reset();
526 stub_pool.CopyInto(global_object_pool_builder());
531 auto& stub_code = Code::Handle();
532#define DO(member, name) \
533 stub_code = StubCode::BuildIsolateSpecific##name##Stub( \
534 global_object_pool_builder()); \
535 IG->object_store()->set_##member(stub_code);
540 CollectDynamicFunctionNames();
544 TracingScope scope(
this);
553 TypeArguments::Handle(
Z,
IG->object_store()->type_argument_int()));
555 TypeArguments::Handle(
Z,
IG->object_store()->type_argument_double()));
557 TypeArguments::Handle(
Z,
IG->object_store()->type_argument_string()));
558 AddTypeArguments(TypeArguments::Handle(
559 Z,
IG->object_store()->type_argument_string_dynamic()));
560 AddTypeArguments(TypeArguments::Handle(
561 Z,
IG->object_store()->type_argument_string_string()));
569 AttachOptimizedTypeTestingStub();
575 const auto&
pool = ObjectPool::Handle(
576 ObjectPool::NewFromBuilder(*global_object_pool_builder()));
577 IG->object_store()->set_global_object_pool(
pool);
578 global_object_pool_builder()->Reset();
580 if (FLAG_disassemble) {
586 if (tracer_ !=
nullptr) {
593 TraceForRetainedFunctions();
596 FinalizeDispatchTable();
597 ReplaceFunctionStaticCallEntries();
604 DropTransitiveUserDefinedConstants();
605 TraceTypesFromRetainedClasses();
609 IG->object_store()->set_unique_dynamic_targets(Array::null_array());
610 Library& null_library = Library::Handle(
Z);
611 Class& null_class = Class::Handle(
Z);
612 Function& null_function = Function::Handle(
Z);
613 Field& null_field = Field::Handle(
Z);
614 IG->object_store()->set_pragma_class(null_class);
615 IG->object_store()->set_pragma_name(null_field);
616 IG->object_store()->set_pragma_options(null_field);
617 IG->object_store()->set_compiletime_error_class(null_class);
618 IG->object_store()->set_growable_list_factory(null_function);
619 IG->object_store()->set_simple_instance_of_function(null_function);
620 IG->object_store()->set_simple_instance_of_true_function(null_function);
621 IG->object_store()->set_simple_instance_of_false_function(
623 IG->object_store()->set_async_star_stream_controller(null_class);
624 IG->object_store()->set_native_assets_library(null_library);
626 DropLibraryEntries();
642 const auto& non_visited =
643 Function::Handle(
Z, FindUnvisitedRetainedFunction());
644 if (!non_visited.IsNull()) {
645 FATAL(
"Code visitor would miss the code for function \"%s\"\n",
646 non_visited.ToFullyQualifiedCString());
649 DiscardCodeObjects();
653 ProgramVisitor::Dedup(
T);
658 if (retained_reasons_writer_ !=
nullptr) {
659 reasons_writer.Write();
660 retained_reasons_writer_ =
nullptr;
666 intptr_t symbols_before = -1;
667 intptr_t symbols_after = -1;
668 intptr_t capacity = -1;
669 if (FLAG_trace_precompiler) {
670 Symbols::GetStats(
IG, &symbols_before, &capacity);
673 if (FLAG_trace_precompiler) {
674 Symbols::GetStats(
IG, &symbols_after, &capacity);
675 THR_Print(
"Precompiled %" Pd " functions,", function_count_);
676 THR_Print(
" %" Pd " dynamic types,", class_count_);
677 THR_Print(
" %" Pd " dynamic selectors.\n", selector_count_);
679 THR_Print(
"Dropped %" Pd " functions,", dropped_function_count_);
680 THR_Print(
" %" Pd " fields,", dropped_field_count_);
681 THR_Print(
" %" Pd " symbols,", symbols_before - symbols_after);
683 THR_Print(
" %" Pd " function types,", dropped_functiontype_count_);
684 THR_Print(
" %" Pd " type parameters,", dropped_typeparam_count_);
685 THR_Print(
" %" Pd " type arguments,", dropped_typearg_count_);
686 THR_Print(
" %" Pd " classes,", dropped_class_count_);
687 THR_Print(
" %" Pd " libraries,", dropped_library_count_);
689 dropped_constants_arrays_entries_count_);
693void Precompiler::PrecompileConstructors() {
695 class ConstructorVisitor :
public FunctionVisitor {
697 explicit ConstructorVisitor(Precompiler* precompiler, Zone* zone)
698 : precompiler_(precompiler), zone_(zone) {}
699 void VisitFunction(
const Function&
function) {
700 if (!
function.IsGenerativeConstructor())
return;
706 if (FLAG_trace_precompiler) {
710 CompileFunction(precompiler_, Thread::Current(), zone_,
function);
714 Precompiler* precompiler_;
718 phase_ = Phase::kCompilingConstructorsForInstructionCounts;
720 ConstructorVisitor visitor(
this,
Z);
721 ProgramVisitor::WalkProgram(
Z,
IG, &visitor);
722 phase_ = Phase::kPreparation;
725void Precompiler::AddRoots() {
727 AddSelector(Symbols::NoSuchMethod());
731 const Library& lib = Library::Handle(
IG->object_store()->root_library());
733 const String& msg = String::Handle(
734 Z, String::New(
"Cannot find root library in isolate.\n"));
735 Jump(Error::Handle(
Z, ApiError::New(msg)));
739 const String&
name = String::Handle(String::New(
"main"));
740 Function&
main = Function::Handle(lib.LookupFunctionAllowPrivate(
name));
742 const Object& obj = Object::Handle(lib.LookupReExport(
name));
743 if (obj.IsFunction()) {
747 if (!
main.IsNull()) {
749 if (lib.LookupFunctionAllowPrivate(
name) == Function::null()) {
750 retain_root_library_caches_ =
true;
752 AddRetainReason(
main, RetainReasons::kMainFunction);
755 main =
main.ImplicitClosureFunction();
756 AddConstObject(Closure::Handle(
main.ImplicitStaticClosure()));
758 String& msg = String::Handle(
759 Z, String::NewFormatted(
"Cannot find main in library %s\n",
761 Jump(Error::Handle(
Z, ApiError::New(msg)));
766void Precompiler::Iterate() {
769 Function&
function = Function::Handle(
Z);
771 phase_ = Phase::kFixpointCodeGeneration;
775 while (pending_functions_.Length() > 0) {
776 function ^= pending_functions_.RemoveLast();
780 CheckForNewDynamicFunctions();
781 CollectCallbackFields();
783 phase_ = Phase::kDone;
786void Precompiler::CollectCallbackFields() {
789 Library& lib = Library::Handle(
Z);
790 Class& cls = Class::Handle(
Z);
791 Class& subcls = Class::Handle(
Z);
792 Array& fields = Array::Handle(
Z);
793 Field& field = Field::Handle(
Z);
794 FunctionType& signature = FunctionType::Handle(
Z);
795 Function& dispatcher = Function::Handle(
Z);
796 Array& args_desc = Array::Handle(
Z);
797 AbstractType& field_type = AbstractType::Handle(
Z);
798 String& field_name = String::Handle(
Z);
799 GrowableArray<intptr_t> cids;
801 for (intptr_t
i = 0;
i < libraries_.Length();
i++) {
802 lib ^= libraries_.At(
i);
804 ClassDictionaryIterator it(lib, ClassDictionaryIterator::kIteratePrivate);
805 while (it.HasNext()) {
806 cls = it.GetNextClass();
808 if (!cls.is_allocated())
continue;
810 fields = cls.fields();
811 for (intptr_t k = 0; k < fields.Length(); k++) {
812 field ^= fields.At(k);
813 if (field.is_static())
continue;
814 field_type = field.type();
815 if (!field_type.IsFunctionType())
continue;
816 field_name = field.name();
817 if (!IsSent(field_name))
continue;
820 signature ^= field_type.ptr();
821 if (signature.IsGeneric())
continue;
822 if (signature.HasOptionalParameters())
continue;
823 if (FLAG_trace_precompiler) {
824 THR_Print(
"Found callback field %s\n", field_name.ToCString());
831 ArgumentsDescriptor::NewBoxed(0,
832 signature.num_fixed_parameters());
834 if (CHA::ConcreteSubclasses(cls, &cids)) {
835 for (intptr_t j = 0; j < cids.length(); ++j) {
836 subcls =
IG->class_table()->At(cids[j]);
837 if (subcls.is_allocated()) {
839 dispatcher = subcls.GetInvocationDispatcher(
840 field_name, args_desc,
841 UntaggedFunction::kInvokeFieldDispatcher,
843 if (FLAG_trace_precompiler) {
844 THR_Print(
"Added invoke-field-dispatcher for %s to %s\n",
845 field_name.ToCString(), subcls.ToCString());
847 AddFunction(dispatcher, RetainReasons::kInvokeFieldDispatcher);
856void Precompiler::ProcessFunction(
const Function&
function) {
858 const intptr_t gop_offset = global_object_pool_builder()->CurrentLength();
862 FunctionType::Handle(
Z,
function.signature()).IsFinalized());
864 TracingScope tracing_scope(
this);
867 if (FLAG_trace_precompiler) {
868 THR_Print(
"Precompiling %" Pd " %s (%s, %s)\n", function_count_,
869 function.ToLibNamePrefixedQualifiedCString(),
871 Function::KindToCString(
function.kind()));
876 error_ = CompileFunction(
this, thread_, zone_,
function);
877 if (!error_.IsNull()) {
886void Precompiler::AddCalleesOf(
const Function&
function, intptr_t gop_offset) {
892 Object& entry = Object::Handle(
Z);
893 Class& cls = Class::Handle(
Z);
894 Function&
target = Function::Handle(
Z);
896 const Array&
table = Array::Handle(
Z,
code.static_calls_target_table());
898 for (
auto& view : static_calls) {
899 entry = view.Get<Code::kSCallTableFunctionTarget>();
900 if (entry.IsFunction()) {
904 const char*
const reason =
905 FLAG_retain_function_objects
906 ? (!FLAG_dwarf_stack_traces_mode
907 ? RetainReasons::kSymbolicStackTraces
908 : RetainReasons::kForcedRetain)
910 AddFunction(Function::Cast(entry), reason);
911 ASSERT(view.Get<Code::kSCallTableCodeOrTypeTarget>() == Code::null());
914 entry = view.Get<Code::kSCallTableCodeOrTypeTarget>();
915 if (entry.IsCode() && Code::Cast(entry).IsAllocationStubCode()) {
916 cls ^= Code::Cast(entry).owner();
917 AddInstantiatedClass(cls);
921 const ExceptionHandlers& handlers =
922 ExceptionHandlers::Handle(
Z,
code.exception_handlers());
923 if (!handlers.IsNull()) {
927 for (intptr_t
i = 0;
i < handlers.num_entries();
i++) {
928 handlers.SetHandledTypes(
i, Array::empty_array());
931 Array& types = Array::Handle(
Z);
932 AbstractType&
type = AbstractType::Handle(
Z);
933 for (intptr_t
i = 0;
i < handlers.num_entries();
i++) {
934 types = handlers.GetHandledTypes(
i);
935 for (intptr_t j = 0; j < types.Length(); j++) {
943#if defined(TARGET_ARCH_IA32)
944 FATAL(
"Callee scanning unimplemented for IA32");
947 String& selector = String::Handle(
Z);
953 for (intptr_t
i = gop_offset;
954 i < global_object_pool_builder()->CurrentLength();
i++) {
955 const auto& wrapper_entry = global_object_pool_builder()->EntryAt(
i);
956 if (wrapper_entry.type() ==
957 compiler::ObjectPoolBuilderEntry::kTaggedObject) {
958 const auto& entry = *wrapper_entry.obj_;
959 AddCalleesOfHelper(entry, &selector, &cls);
963 const auto&
pool = ObjectPool::Handle(
Z,
code.object_pool());
964 auto& entry = Object::Handle(
Z);
965 for (intptr_t
i = 0;
i <
pool.Length();
i++) {
966 if (
pool.TypeAt(
i) == ObjectPool::EntryType::kTaggedObject) {
967 entry =
pool.ObjectAt(
i);
968 AddCalleesOfHelper(entry, &selector, &cls);
973 if (!FLAG_dwarf_stack_traces_mode) {
974 const Array& inlined_functions =
975 Array::Handle(
Z,
code.inlined_id_to_function());
976 for (intptr_t
i = 0;
i < inlined_functions.Length();
i++) {
977 target ^= inlined_functions.At(
i);
978 AddRetainReason(
target, RetainReasons::kSymbolicStackTraces);
984static bool IsPotentialClosureCall(
const String& selector) {
986 selector.ptr() == Symbols::DynamicCall().ptr();
989void Precompiler::AddCalleesOfHelper(
const Object& entry,
990 String* temp_selector,
992 switch (entry.GetClassId()) {
993 case kOneByteStringCid:
998 case kUnlinkedCallCid: {
999 const auto& call_site = UnlinkedCall::Cast(entry);
1001 *temp_selector = call_site.target_name();
1002 AddSelector(*temp_selector);
1003 if (IsPotentialClosureCall(*temp_selector)) {
1004 const Array& arguments_descriptor =
1005 Array::Handle(
Z, call_site.arguments_descriptor());
1006 AddClosureCall(*temp_selector, arguments_descriptor);
1010 case kMegamorphicCacheCid: {
1012 const auto&
cache = MegamorphicCache::Cast(entry);
1013 *temp_selector =
cache.target_name();
1014 AddSelector(*temp_selector);
1015 if (IsPotentialClosureCall(*temp_selector)) {
1016 const Array& arguments_descriptor =
1017 Array::Handle(
Z,
cache.arguments_descriptor());
1018 AddClosureCall(*temp_selector, arguments_descriptor);
1024 const auto& field = Field::Cast(entry);
1028 case kFunctionCid: {
1030 const auto&
target = Function::Cast(entry);
1031 AddFunction(
target, RetainReasons::kLocalClosure);
1032 if (
target.IsFfiCallbackTrampoline()) {
1033 const auto& callback_target =
1034 Function::Handle(
Z,
target.FfiCallbackTarget());
1035 if (!callback_target.IsNull()) {
1036 AddFunction(callback_target, RetainReasons::kFfiCallbackTarget);
1043 const auto& target_code = Code::Cast(entry);
1044 if (target_code.IsAllocationStubCode()) {
1045 *temp_cls ^= target_code.owner();
1046 AddInstantiatedClass(*temp_cls);
1051 if (entry.IsInstance()) {
1053 const auto&
instance = Instance::Cast(entry);
1060void Precompiler::AddTypesOf(
const Class& cls) {
1061 if (cls.IsNull())
return;
1062 if (classes_to_retain_.HasKey(&cls))
return;
1063 classes_to_retain_.Insert(&Class::ZoneHandle(
Z, cls.ptr()));
1065 Array& interfaces = Array::Handle(
Z, cls.interfaces());
1066 AbstractType&
type = AbstractType::Handle(
Z);
1067 for (intptr_t
i = 0;
i < interfaces.Length();
i++) {
1068 type ^= interfaces.At(
i);
1072 AddTypeParameters(TypeParameters::Handle(
Z, cls.type_parameters()));
1074 type = cls.super_type();
1078void Precompiler::AddRetainReason(
const Object& obj,
const char* reason) {
1079 if (retained_reasons_writer_ ==
nullptr || reason ==
nullptr)
return;
1080 retained_reasons_writer_->AddReason(obj, reason);
1083void Precompiler::AddTypesOf(
const Function&
function) {
1085 if (functions_to_retain_.ContainsKey(
function))
return;
1086 functions_to_retain_.Insert(
function);
1088 if (retained_reasons_writer_ !=
nullptr &&
1089 !retained_reasons_writer_->HasReason(
function)) {
1090 FATAL(
"no retaining reasons given");
1093 if (
function.NeedsMonomorphicCheckedEntry(
Z) ||
1094 Function::IsDynamicInvocationForwarderName(
function.name())) {
1095 functions_called_dynamically_.Insert(
function);
1098 const FunctionType& signature = FunctionType::Handle(
Z,
function.signature());
1102 const Class& owner = Class::Handle(
Z,
function.Owner());
1105 if (
function.IsFfiCallbackTrampoline()) {
1106 AddType(FunctionType::Handle(
Z,
function.FfiCSignature()));
1109 const auto& parent_function = Function::Handle(
Z,
function.parent_function());
1110 if (parent_function.IsNull()) {
1119 if (!FLAG_dwarf_stack_traces_mode) {
1120 AddRetainReason(parent_function, RetainReasons::kSymbolicStackTraces);
1121 AddTypesOf(parent_function);
1127 const auto&
data = ClosureData::CheckedHandle(
Z,
function.data());
1129 Object::Handle(
Z, WeakSerializationReference::New(
1130 parent_function, Object::null_function()));
1131 data.set_parent_function(wsr);
1134void Precompiler::AddType(
const AbstractType& abstype) {
1135 if (abstype.IsNull())
return;
1137 if (abstype.IsTypeParameter()) {
1138 const auto& param = TypeParameter::Cast(abstype);
1139 if (typeparams_to_retain_.HasKey(¶m))
return;
1140 typeparams_to_retain_.Insert(&TypeParameter::ZoneHandle(
Z, param.ptr()));
1142 if (param.IsClassTypeParameter()) {
1143 AddTypesOf(Class::Handle(
Z, param.parameterized_class()));
1145 AddType(FunctionType::Handle(
Z, param.parameterized_function_type()));
1150 if (abstype.IsFunctionType()) {
1151 if (functiontypes_to_retain_.HasKey(&FunctionType::Cast(abstype)))
return;
1152 const FunctionType& signature =
1153 FunctionType::ZoneHandle(
Z, FunctionType::Cast(abstype).ptr());
1154 functiontypes_to_retain_.Insert(&signature);
1156 AddTypeParameters(TypeParameters::Handle(
Z, signature.type_parameters()));
1158 AbstractType&
type = AbstractType::Handle(
Z);
1159 type = signature.result_type();
1161 for (intptr_t
i = 0;
i < signature.NumParameters();
i++) {
1162 type = signature.ParameterTypeAt(
i);
1168 if (types_to_retain_.HasKey(&abstype))
return;
1169 types_to_retain_.Insert(&AbstractType::ZoneHandle(
Z, abstype.ptr()));
1171 if (abstype.IsType()) {
1172 const Type&
type = Type::Cast(abstype);
1173 const Class& cls = Class::Handle(
Z,
type.type_class());
1175 const TypeArguments& vector = TypeArguments::Handle(
Z,
type.arguments());
1176 AddTypeArguments(vector);
1177 }
else if (abstype.IsRecordType()) {
1178 const auto& rec = RecordType::Cast(abstype);
1179 AbstractType&
type = AbstractType::Handle(
Z);
1180 for (intptr_t
i = 0, n = rec.NumFields();
i < n; ++
i) {
1181 type = rec.FieldTypeAt(
i);
1187void Precompiler::AddTypeParameters(
const TypeParameters&
params) {
1188 if (
params.IsNull())
return;
1190 TypeArguments&
args = TypeArguments::Handle();
1192 AddTypeArguments(
args);
1194 AddTypeArguments(
args);
1197void Precompiler::AddTypeArguments(
const TypeArguments&
args) {
1198 if (
args.IsNull())
return;
1200 if (typeargs_to_retain_.HasKey(&
args))
return;
1201 typeargs_to_retain_.Insert(&TypeArguments::ZoneHandle(
Z,
args.ptr()));
1203 AbstractType& arg = AbstractType::Handle(
Z);
1204 for (intptr_t
i = 0;
i <
args.Length();
i++) {
1205 arg =
args.TypeAt(
i);
1210void Precompiler::AddConstObject(
const class Instance&
instance) {
1213 AddType(AbstractType::Cast(
instance));
1215 }
else if (
instance.IsTypeArguments()) {
1216 AddTypeArguments(TypeArguments::Cast(
instance));
1220 if (
instance.ptr() == Object::sentinel().ptr() ||
1221 instance.ptr() == Object::transition_sentinel().ptr()) {
1225 Class& cls = Class::Handle(
Z,
instance.clazz());
1226 AddInstantiatedClass(cls);
1230 const Function& func =
1232 ASSERT(func.is_static());
1233 AddFunction(func, RetainReasons::kImplicitClosure);
1234 AddTypeArguments(TypeArguments::Handle(
1235 Z, Closure::Cast(
instance).instantiator_type_arguments()));
1236 AddTypeArguments(TypeArguments::Handle(
1237 Z, Closure::Cast(
instance).function_type_arguments()));
1238 AddTypeArguments(TypeArguments::Handle(
1239 Z, Closure::Cast(
instance).delayed_type_arguments()));
1246 const Library&
target = Library::Handle(
Z,
prefix.GetLibrary(0));
1247 cls =
target.toplevel_class();
1248 if (!classes_to_retain_.HasKey(&cls)) {
1249 classes_to_retain_.Insert(&Class::ZoneHandle(
Z, cls.ptr()));
1259 if (!
instance.IsCanonical())
return;
1262 if (consts_to_retain_.HasKey(&
instance))
return;
1264 consts_to_retain_.Insert(&Instance::ZoneHandle(
Z,
instance.ptr()));
1266 if (cls.NumTypeArguments() > 0) {
1267 AddTypeArguments(TypeArguments::Handle(
Z,
instance.GetTypeArguments()));
1270 class ConstObjectVisitor :
public ObjectPointerVisitor {
1272 ConstObjectVisitor(Precompiler* precompiler, IsolateGroup* isolate_group)
1273 : ObjectPointerVisitor(isolate_group),
1274 precompiler_(precompiler),
1275 subinstance_(Object::Handle()) {}
1277 void VisitPointers(ObjectPtr* first, ObjectPtr* last)
override {
1278 for (ObjectPtr* current = first; current <= last; current++) {
1279 subinstance_ = *current;
1280 if (subinstance_.IsInstance()) {
1281 precompiler_->AddConstObject(Instance::Cast(subinstance_));
1284 subinstance_ = Object::null();
1287#if defined(DART_COMPRESSED_POINTERS)
1288 void VisitCompressedPointers(
uword heap_base,
1292 subinstance_ = current->Decompress(heap_base);
1293 if (subinstance_.IsInstance()) {
1294 precompiler_->AddConstObject(Instance::Cast(subinstance_));
1297 subinstance_ = Object::null();
1302 Precompiler* precompiler_;
1303 Object& subinstance_;
1306 ConstObjectVisitor visitor(
this,
IG);
1307 instance.ptr()->untag()->VisitPointers(&visitor);
1310void Precompiler::AddClosureCall(
const String& call_selector,
1311 const Array& arguments_descriptor) {
1312 const Class& cache_class =
1313 Class::Handle(
Z,
IG->object_store()->closure_class());
1314 const Function& dispatcher =
1315 Function::Handle(
Z, cache_class.GetInvocationDispatcher(
1316 call_selector, arguments_descriptor,
1317 UntaggedFunction::kInvokeFieldDispatcher,
1319 AddFunction(dispatcher, RetainReasons::kInvokeFieldDispatcher);
1322void Precompiler::AddField(
const Field& field) {
1324 tracer_->WriteFieldRef(field);
1327 if (fields_to_retain_.HasKey(&field))
return;
1329 fields_to_retain_.Insert(&Field::ZoneHandle(
Z, field.ptr()));
1331 if (field.is_static()) {
1332 auto field_table = field.is_shared() ?
IG->shared_initial_field_table()
1333 :
IG->initial_field_table();
1334 const Object&
value = Object::Handle(
Z, field_table->At(field.field_id()));
1336 ASSERT(
value.ptr() != Object::transition_sentinel().ptr());
1338 if (
value.ptr() != Object::sentinel().ptr() &&
1339 value.ptr() != Object::null()) {
1341 AddConstObject(Instance::Cast(
value));
1345 if (field.has_nontrivial_initializer() &&
1346 (field.is_static() || field.is_late())) {
1348 Function::ZoneHandle(
Z, field.EnsureInitializerFunction());
1349 const char*
const reason = field.is_static()
1350 ? RetainReasons::kStaticFieldInitializer
1351 : RetainReasons::kLateFieldInitializer;
1356const char* Precompiler::MustRetainFunction(
const Function&
function) {
1367 return "native function";
1371 const auto& selector = String::Handle(
Z,
function.name());
1373 const auto&
name = String::Handle(
Z,
function.QualifiedScrubbedName());
1374 if (
name.Equals(Symbols::_ClosureCall())) {
1375 return "_Closure.call";
1382 if (
function.NeedsMonomorphicCheckedEntry(
Z)) {
1383 return "needs monomorphic checked entry";
1385 if (Function::IsDynamicInvocationForwarderName(
function.name())) {
1386 return "dynamic invocation forwarder";
1389 if (StackTraceUtils::IsNeededForAsyncAwareUnwinding(
function)) {
1390 return RetainReasons::kAsyncStackUnwinding;
1396void Precompiler::AddFunction(
const Function&
function,
1397 const char* retain_reason) {
1400 tracer_->WriteFunctionRef(
function);
1403 if (retain_reason ==
nullptr) {
1404 retain_reason = MustRetainFunction(
function);
1408 AddRetainReason(
function, retain_reason);
1410 if (possibly_retained_functions_.ContainsKey(
function))
return;
1411 if (retain_reason !=
nullptr) {
1412 possibly_retained_functions_.Insert(
function);
1415 if (seen_functions_.ContainsKey(
function))
return;
1421bool Precompiler::IsSent(
const String& selector) {
1422 if (selector.IsNull()) {
1425 return sent_selectors_.HasKey(&selector);
1428void Precompiler::AddSelector(
const String& selector) {
1430 tracer_->WriteSelectorRef(selector);
1433 ASSERT(!selector.IsNull());
1434 if (!IsSent(selector)) {
1435 sent_selectors_.Insert(&String::ZoneHandle(
Z, selector.ptr()));
1439 if (FLAG_trace_precompiler) {
1440 THR_Print(
"Enqueueing selector %" Pd " %s\n", selector_count_,
1441 selector.ToCString());
1446void Precompiler::AddTableSelector(
const compiler::TableSelector* selector) {
1448 tracer_->WriteTableSelectorRef(selector->id);
1451 if (seen_table_selectors_.HasKey(selector->id))
return;
1453 seen_table_selectors_.Insert(selector->id);
1457bool Precompiler::IsHitByTableSelector(
const Function&
function) {
1458 const int32_t selector_id = selector_map()->SelectorId(
function);
1459 if (selector_id == compiler::SelectorMap::kInvalidSelectorId)
return false;
1460 return seen_table_selectors_.HasKey(selector_id);
1463void Precompiler::AddApiUse(
const Object& obj) {
1464 api_uses_.Insert(&Object::ZoneHandle(
Z, obj.ptr()));
1467bool Precompiler::HasApiUse(
const Object& obj) {
1468 return api_uses_.HasKey(&obj);
1471void Precompiler::AddInstantiatedClass(
const Class& cls) {
1473 tracer_->WriteClassInstantiationRef(cls);
1476 if (cls.is_allocated())
return;
1479 cls.set_is_allocated_unsafe(
true);
1480 error_ = cls.EnsureIsAllocateFinalized(
T);
1481 if (!error_.IsNull()) {
1487 if (FLAG_trace_precompiler) {
1488 THR_Print(
"Allocation %" Pd " %s\n", class_count_, cls.ToCString());
1491 const Class& superclass = Class::Handle(cls.SuperClass());
1492 if (!superclass.IsNull()) {
1493 AddInstantiatedClass(superclass);
1498void Precompiler::AddAnnotatedRoots() {
1500 auto& lib = Library::Handle(
Z);
1501 auto& cls = Class::Handle(
Z);
1502 auto& members = Array::Handle(
Z);
1504 auto& function2 = Function::Handle(
Z);
1505 auto& field = Field::Handle(
Z);
1506 auto& metadata = Array::Handle(
Z);
1507 auto& reusable_object_handle = Object::Handle(
Z);
1508 auto& reusable_field_handle = Field::Handle(
Z);
1512 auto& implicit_getters = GrowableObjectArray::Handle(
Z);
1513 auto& implicit_setters = GrowableObjectArray::Handle(
Z);
1514 auto& implicit_static_getters = GrowableObjectArray::Handle(
Z);
1516 for (intptr_t
i = 0;
i < libraries_.Length();
i++) {
1517 lib ^= libraries_.At(
i);
1519 ClassDictionaryIterator it(lib, ClassDictionaryIterator::kIteratePrivate);
1520 while (it.HasNext()) {
1521 cls = it.GetNextClass();
1524 if (cls.has_pragma()) {
1525 metadata ^= lib.GetMetadata(cls);
1527 &reusable_object_handle) ==
1528 EntryPointPragma::kAlways) {
1529 AddInstantiatedClass(cls);
1535 members = cls.fields();
1536 implicit_getters = GrowableObjectArray::New(members.Length());
1537 implicit_setters = GrowableObjectArray::New(members.Length());
1538 implicit_static_getters = GrowableObjectArray::New(members.Length());
1539 for (intptr_t k = 0; k < members.Length(); ++k) {
1540 field ^= members.At(k);
1541 if (field.has_pragma()) {
1542 metadata ^= lib.GetMetadata(field);
1543 if (metadata.IsNull())
continue;
1545 IG, metadata, &reusable_field_handle, &reusable_object_handle);
1551 if (!field.is_static()) {
1552 if (pragma != EntryPointPragma::kSetterOnly) {
1553 implicit_getters.Add(field);
1555 if (pragma != EntryPointPragma::kGetterOnly) {
1556 implicit_setters.Add(field);
1559 implicit_static_getters.Add(field);
1565 members = cls.current_functions();
1566 for (intptr_t k = 0; k < members.Length(); k++) {
1569 metadata ^= lib.GetMetadata(
function);
1570 if (metadata.IsNull())
continue;
1572 &reusable_object_handle);
1574 if (
type == EntryPointPragma::kAlways ||
1575 type == EntryPointPragma::kCallOnly) {
1576 functions_with_entry_point_pragmas_.Insert(
function);
1579 AddFunction(
function, RetainReasons::kEntryPointPragma);
1583 if ((
type == EntryPointPragma::kAlways ||
1584 type == EntryPointPragma::kGetterOnly) &&
1585 function.kind() != UntaggedFunction::kConstructor &&
1587 function2 =
function.ImplicitClosureFunction();
1588 functions_with_entry_point_pragmas_.Insert(function2);
1590 AddFunction(function2, RetainReasons::kEntryPointPragma);
1598 if (
function.IsGenerativeConstructor()) {
1599 AddInstantiatedClass(cls);
1604 if (
function.kind() == UntaggedFunction::kImplicitGetter &&
1605 !implicit_getters.IsNull()) {
1606 for (intptr_t
i = 0;
i < implicit_getters.Length(); ++
i) {
1607 field ^= implicit_getters.At(
i);
1608 if (
function.accessor_field() == field.ptr()) {
1609 functions_with_entry_point_pragmas_.Insert(
function);
1610 AddFunction(
function, RetainReasons::kImplicitGetter);
1615 if (
function.kind() == UntaggedFunction::kImplicitSetter &&
1616 !implicit_setters.IsNull()) {
1617 for (intptr_t
i = 0;
i < implicit_setters.Length(); ++
i) {
1618 field ^= implicit_setters.At(
i);
1619 if (
function.accessor_field() == field.ptr()) {
1620 functions_with_entry_point_pragmas_.Insert(
function);
1621 AddFunction(
function, RetainReasons::kImplicitSetter);
1626 if (
function.kind() == UntaggedFunction::kImplicitStaticGetter &&
1627 !implicit_static_getters.IsNull()) {
1628 for (intptr_t
i = 0;
i < implicit_static_getters.Length(); ++
i) {
1629 field ^= implicit_static_getters.At(
i);
1630 if (
function.accessor_field() == field.ptr()) {
1631 functions_with_entry_point_pragmas_.Insert(
function);
1632 AddFunction(
function, RetainReasons::kImplicitStaticGetter);
1644 implicit_getters = GrowableObjectArray::null();
1645 implicit_setters = GrowableObjectArray::null();
1646 implicit_static_getters = GrowableObjectArray::null();
1651void Precompiler::CheckForNewDynamicFunctions() {
1654 Library& lib = Library::Handle(
Z);
1655 Class& cls = Class::Handle(
Z);
1656 Array& functions = Array::Handle(
Z);
1657 Function&
function = Function::Handle(
Z);
1658 Function& function2 = Function::Handle(
Z);
1659 String& selector = String::Handle(
Z);
1660 String& selector2 = String::Handle(
Z);
1661 String& selector3 = String::Handle(
Z);
1662 Field& field = Field::Handle(
Z);
1664 for (intptr_t
i = 0;
i < libraries_.Length();
i++) {
1665 lib ^= libraries_.At(
i);
1667 ClassDictionaryIterator it(lib, ClassDictionaryIterator::kIteratePrivate);
1668 while (it.HasNext()) {
1669 cls = it.GetNextClass();
1671 if (!cls.is_allocated())
continue;
1673 functions = cls.current_functions();
1674 for (intptr_t k = 0; k < functions.Length(); k++) {
1684 if (IsSent(selector)) {
1685 AddFunction(
function, RetainReasons::kCalledViaSelector);
1687 if (IsHitByTableSelector(
function)) {
1688 AddFunction(
function, FLAG_retain_function_objects
1689 ? RetainReasons::kForcedRetain
1693 bool found_metadata =
false;
1694 kernel::ProcedureAttributesMetadata metadata;
1697 if (Field::IsGetterName(selector) &&
1698 (
function.kind() != UntaggedFunction::kMethodExtractor)) {
1705 selector2 = Field::NameFromGetter(selector);
1706 if (IsSent(selector2)) {
1707 AddFunction(
function, RetainReasons::kCalledThroughGetter);
1709 selector2 = Function::CreateDynamicInvocationForwarderName(selector2);
1710 if (IsSent(selector2)) {
1712 Function::CreateDynamicInvocationForwarderName(selector);
1713 function2 =
function.GetDynamicInvocationForwarder(selector2);
1714 AddFunction(function2, RetainReasons::kDynamicInvocationForwarder);
1715 functions_called_dynamically_.Insert(function2);
1717 }
else if (
function.kind() == UntaggedFunction::kRegularFunction) {
1718 selector2 = Field::GetterSymbol(selector);
1719 selector3 = Function::CreateDynamicInvocationForwarderName(selector2);
1720 if (IsSent(selector2) || IsSent(selector3)) {
1722 found_metadata =
true;
1724 if (metadata.has_tearoff_uses) {
1727 function2 =
function.ImplicitClosureFunction();
1728 AddFunction(function2, RetainReasons::kImplicitClosure);
1731 function2 =
function.GetMethodExtractor(selector2);
1732 AddFunction(function2, RetainReasons::kMethodExtractor);
1737 const bool is_getter =
1738 function.kind() == UntaggedFunction::kImplicitGetter ||
1739 function.kind() == UntaggedFunction::kGetterFunction;
1740 const bool is_setter =
1741 function.kind() == UntaggedFunction::kImplicitSetter ||
1742 function.kind() == UntaggedFunction::kSetterFunction;
1743 const bool is_regular =
1744 function.kind() == UntaggedFunction::kRegularFunction;
1745 if (is_getter || is_setter || is_regular) {
1746 selector2 = Function::CreateDynamicInvocationForwarderName(selector);
1747 if (IsSent(selector2)) {
1748 if (
function.kind() == UntaggedFunction::kImplicitGetter ||
1749 function.kind() == UntaggedFunction::kImplicitSetter) {
1752 }
else if (!found_metadata) {
1757 if (metadata.getter_called_dynamically) {
1758 function2 =
function.GetDynamicInvocationForwarder(selector2);
1759 AddFunction(function2,
1760 RetainReasons::kDynamicInvocationForwarder);
1761 functions_called_dynamically_.Insert(function2);
1764 if (metadata.method_or_setter_called_dynamically) {
1765 function2 =
function.GetDynamicInvocationForwarder(selector2);
1766 AddFunction(function2,
1767 RetainReasons::kDynamicInvocationForwarder);
1768 functions_called_dynamically_.Insert(function2);
1778class NameFunctionsTraits {
1780 static const char*
Name() {
return "NameFunctionsTraits"; }
1781 static bool ReportStats() {
return false; }
1783 static bool IsMatch(
const Object&
a,
const Object&
b) {
1784 return a.IsString() &&
b.IsString() &&
1785 String::Cast(
a).Equals(String::Cast(
b));
1787 static uword Hash(
const Object& obj) {
return String::Cast(obj).Hash(); }
1788 static ObjectPtr NewKey(
const String& str) {
return str.ptr(); }
1791typedef UnorderedHashMap<NameFunctionsTraits>
Table;
1793static void AddNameToFunctionsTable(Zone* zone,
1795 const String& fname,
1797 Array& farray = Array::Handle(zone);
1798 farray ^=
table->InsertNewOrGetValue(fname, Array::empty_array());
1799 farray = Array::Grow(farray, farray.Length() + 1);
1800 farray.SetAt(farray.Length() - 1,
function);
1801 table->UpdateValue(fname, farray);
1804static void AddNamesToFunctionsTable(Zone* zone,
1806 const String& fname,
1808 String* mangled_name,
1809 Function* dyn_function) {
1816 Function::CreateDynamicInvocationForwarderName(*mangled_name);
1817 *dyn_function =
function.GetDynamicInvocationForwarder(*mangled_name);
1819 *mangled_name = Function::CreateDynamicInvocationForwarderName(fname);
1820 AddNameToFunctionsTable(zone,
table, *mangled_name, *dyn_function);
1823void Precompiler::CollectDynamicFunctionNames() {
1824 if (!FLAG_collect_dynamic_function_names) {
1828 auto& lib = Library::Handle(
Z);
1829 auto& cls = Class::Handle(
Z);
1830 auto& functions = Array::Handle(
Z);
1832 auto& fname = String::Handle(
Z);
1833 auto& farray = Array::Handle(
Z);
1834 auto& mangled_name = String::Handle(
Z);
1835 auto& dyn_function = Function::Handle(
Z);
1838 for (intptr_t
i = 0;
i < libraries_.Length();
i++) {
1839 lib ^= libraries_.At(
i);
1841 ClassDictionaryIterator it(lib, ClassDictionaryIterator::kIteratePrivate);
1842 while (it.HasNext()) {
1843 cls = it.GetNextClass();
1844 functions = cls.current_functions();
1846 const intptr_t
length = functions.Length();
1847 for (intptr_t j = 0; j <
length; j++) {
1849 if (
function.IsDynamicFunction()) {
1852 function.IsImplicitSetterFunction()) {
1854 &mangled_name, &dyn_function);
1855 }
else if (
function.IsGetterFunction() ||
1856 function.IsImplicitGetterFunction()) {
1859 &mangled_name, &dyn_function);
1860 fname = Field::NameFromGetter(fname);
1862 &mangled_name, &dyn_function);
1863 }
else if (
function.IsMethodExtractor()) {
1869 &mangled_name, &dyn_function);
1870 fname = Field::GetterName(fname);
1872 &mangled_name, &dyn_function);
1880 Table::Iterator iter(&
table);
1881 String&
key = String::Handle(
Z);
1882 String& key_demangled = String::Handle(
Z);
1884 while (iter.MoveNext()) {
1885 intptr_t curr_key = iter.Current();
1888 ASSERT(!farray.IsNull());
1889 if (farray.Length() == 1) {
1891 if (
function.IsDynamicallyOverridden())
continue;
1902 key_demangled =
key.ptr();
1903 if (Function::IsDynamicInvocationForwarderName(
key)) {
1904 key_demangled = Function::DemangleDynamicInvocationForwarderName(
key);
1907 function.name() != key_demangled.ptr()) {
1914 function ^= functions_map.GetOrNull(Symbols::GetRuntimeType());
1915 get_runtime_type_is_unique_ = !
function.IsNull();
1917 if (FLAG_print_unique_targets) {
1918 UniqueFunctionsMap::Iterator unique_iter(&functions_map);
1919 while (unique_iter.MoveNext()) {
1920 intptr_t curr_key = unique_iter.Current();
1921 function ^= functions_map.GetPayload(curr_key, 0);
1924 THR_Print(
"%" Pd " of %" Pd " dynamic selectors are unique\n",
1925 functions_map.NumOccupied(),
table.NumOccupied());
1928 IG->object_store()->set_unique_dynamic_targets(functions_map.Release());
1932void Precompiler::TraceForRetainedFunctions() {
1934 Library& lib = Library::Handle(
Z);
1935 Class& cls = Class::Handle(
Z);
1936 Array& functions = Array::Handle(
Z);
1937 Function&
function = Function::Handle(
Z);
1938 Function& function2 = Function::Handle(
Z);
1939 Array& fields = Array::Handle(
Z);
1940 Field& field = Field::Handle(
Z);
1942 for (intptr_t
i = 0;
i < libraries_.Length();
i++) {
1943 lib ^= libraries_.At(
i);
1945 ClassDictionaryIterator it(lib, ClassDictionaryIterator::kIteratePrivate);
1946 while (it.HasNext()) {
1947 cls = it.GetNextClass();
1948 functions = cls.current_functions();
1949 for (intptr_t j = 0; j < functions.Length(); j++) {
1950 SafepointWriteRwLocker ml(
T,
T->isolate_group()->program_lock());
1952 function.DropUncompiledImplicitClosureFunction();
1954 const bool retained =
1955 possibly_retained_functions_.ContainsKey(
function);
1959 if (
function.HasImplicitClosureFunction()) {
1960 function2 =
function.ImplicitClosureFunction();
1962 if (possibly_retained_functions_.ContainsKey(function2)) {
1963 AddTypesOf(function2);
1968 functions_with_entry_point_pragmas_.ContainsKey(function2)) {
1969 AddRetainReason(
function, RetainReasons::kEntryPointPragma);
1976 fields = cls.fields();
1977 for (intptr_t j = 0; j < fields.Length(); j++) {
1978 field ^= fields.At(j);
1979 if (fields_to_retain_.HasKey(&field) &&
1980 field.HasInitializerFunction()) {
1981 function = field.InitializerFunction();
1982 if (possibly_retained_functions_.ContainsKey(
function)) {
1988 if (cls.invocation_dispatcher_cache() != Array::empty_array().ptr()) {
1989 DispatcherSet dispatchers(cls.invocation_dispatcher_cache());
1990 DispatcherSet::Iterator it(&dispatchers);
1991 while (it.MoveNext()) {
1992 function ^= dispatchers.GetKey(it.Current());
1993 if (possibly_retained_functions_.ContainsKey(
function)) {
1997 dispatchers.Release();
2002 ClosureFunctionsCache::ForAllClosureFunctions([&](
const Function&
function) {
2003 if (possibly_retained_functions_.ContainsKey(
function)) {
2004 AddTypesOf(function);
2012 FunctionSet::Iterator it(&possibly_retained_functions_);
2013 while (it.MoveNext()) {
2014 function ^= possibly_retained_functions_.GetKey(it.Current());
2017 if (!functions_to_retain_.ContainsKey(
function) &&
2018 !
function.IsFfiCallbackTrampoline()) {
2019 FATAL(
"Function %s was not traced in TraceForRetainedFunctions\n",
2020 function.ToFullyQualifiedCString());
2026void Precompiler::FinalizeDispatchTable() {
2031 const auto& entries =
2032 Array::Handle(
Z, dispatch_table_generator_->BuildCodeArray());
2033 IG->object_store()->set_dispatch_table_code_entries(entries);
2036 delete dispatch_table_generator_;
2037 dispatch_table_generator_ =
nullptr;
2039 if (FLAG_retain_function_objects || !FLAG_trace_precompiler)
return;
2041 FunctionSet printed(HashTables::New<FunctionSet>(1024));
2042 auto&
code = Code::Handle(
Z);
2044 for (intptr_t
i = 0;
i < entries.Length();
i++) {
2045 code = Code::RawCast(entries.At(
i));
2046 if (
code.IsNull())
continue;
2047 if (!
code.IsFunctionCode())
continue;
2050 if (printed.ContainsKey(
function))
continue;
2051 if (functions_to_retain_.ContainsKey(
function))
continue;
2052 THR_Print(
"Dispatch table references code for function to drop: %s\n",
2053 function.ToLibNamePrefixedQualifiedCString());
2059void Precompiler::ReplaceFunctionStaticCallEntries() {
2061 class StaticCallTableEntryFixer :
public CodeVisitor {
2063 explicit StaticCallTableEntryFixer(Zone* zone)
2064 : table_(Array::Handle(zone)),
2065 kind_and_offset_(Smi::Handle(zone)),
2066 target_function_(Function::Handle(zone)),
2067 target_code_(Code::Handle(zone)),
2068 pool_(ObjectPool::Handle(zone)) {}
2070 void VisitCode(
const Code&
code) {
2071 if (!
code.IsFunctionCode())
return;
2072 table_ =
code.static_calls_target_table();
2085 compiler::ObjectPoolBuilder
builder;
2086 pool_ =
code.object_pool();
2089 for (
auto& view : static_calls) {
2090 kind_and_offset_ = view.Get<Code::kSCallTableKindAndOffset>();
2093 if ((kind != Code::kCallViaCode) && (kind != Code::kPcRelativeCall))
2096 target_function_ = view.Get<Code::kSCallTableFunctionTarget>();
2097 if (target_function_.IsNull())
continue;
2099 ASSERT(view.Get<Code::kSCallTableCodeOrTypeTarget>() == Code::null());
2100 ASSERT(target_function_.HasCode());
2101 target_code_ = target_function_.CurrentCode();
2102 ASSERT(!target_code_.IsStubCode());
2103 view.Set<Code::kSCallTableCodeOrTypeTarget>(target_code_);
2104 view.Set<Code::kSCallTableFunctionTarget>(Object::null_function());
2105 if (kind == Code::kCallViaCode) {
2106 auto const pc_offset =
2108 const uword pc = pc_offset +
code.PayloadStart();
2109 CodePatcher::PatchStaticCallAt(pc,
code, target_code_);
2110 builder.AddObject(Object::ZoneHandle(target_code_.ptr()));
2112 if (FLAG_trace_precompiler) {
2113 THR_Print(
"Updated static call entry to %s in \"%s\"\n",
2114 target_function_.ToFullyQualifiedCString(),
2119 code.set_object_pool(ObjectPool::NewFromBuilder(
builder));
2124 Smi& kind_and_offset_;
2125 Function& target_function_;
2131 StaticCallTableEntryFixer visitor(
Z);
2132 ProgramVisitor::WalkProgram(
Z,
IG, &visitor);
2135void Precompiler::DropFunctions() {
2137 Library& lib = Library::Handle(
Z);
2138 Class& cls = Class::Handle(
Z);
2139 Array& functions = Array::Handle(
Z);
2140 Function&
function = Function::Handle(
Z);
2141 Function&
target = Function::Handle(
Z);
2142 Function& implicit_closure = Function::Handle(
Z);
2143 Code&
code = Code::Handle(
Z);
2144 Object& owner = Object::Handle(
Z);
2145 GrowableObjectArray& retained_functions = GrowableObjectArray::Handle(
Z);
2146 auto& sig = FunctionType::Handle(
Z);
2147 auto& ref = Object::Handle(
Z);
2149 auto trim_function = [&](
const Function&
function) {
2150 if (
function.IsDynamicInvocationForwarder()) {
2161 if (!functions_to_retain_.ContainsKey(
target)) {
2163 WeakSerializationReference::New(
target, Function::null_function());
2172 if (
function.IsClosureFunction()) {
2175 return AddRetainReason(sig, RetainReasons::kClosureSignature);
2177 if (
function.IsFfiCallbackTrampoline()) {
2179 return AddRetainReason(sig, RetainReasons::kFfiTrampolineSignature);
2182 return AddRetainReason(sig, RetainReasons::kNativeSignature);
2184 if (
function.HasRequiredNamedParameters()) {
2187 return AddRetainReason(sig, RetainReasons::kRequiredNamedParameters);
2189 if (functions_called_dynamically_.ContainsKey(
function)) {
2191 return AddRetainReason(sig, RetainReasons::kDynamicallyCalledSignature);
2193 if (functions_with_entry_point_pragmas_.ContainsKey(
function)) {
2195 return AddRetainReason(sig, RetainReasons::kEntryPointPragmaSignature);
2197 if (StackTraceUtils::IsNeededForAsyncAwareUnwinding(
function)) {
2198 return AddRetainReason(sig, RetainReasons::kAsyncStackUnwinding);
2200 if (FLAG_trace_precompiler) {
2201 THR_Print(
"Clearing signature for function %s\n",
2202 function.ToLibNamePrefixedQualifiedCString());
2209 ref = WeakSerializationReference::New(sig, Object::null_function_type());
2213 auto drop_function = [&](
const Function&
function) {
2219 owner =
code.owner();
2220 owner = WeakSerializationReference::New(
2221 owner, Smi::Handle(Smi::New(owner.GetClassId())));
2222 code.set_owner(owner);
2224 if (
function.HasImplicitClosureFunction()) {
2230 implicit_closure =
function.ImplicitClosureFunction();
2231 RELEASE_ASSERT(functions_to_retain_.ContainsKey(implicit_closure));
2232 ClosureFunctionsCache::AddClosureFunctionLocked(
2233 implicit_closure,
true);
2235 dropped_function_count_++;
2236 if (FLAG_trace_precompiler) {
2238 function.ToLibNamePrefixedQualifiedCString());
2240 if (retained_reasons_writer_ !=
nullptr) {
2241 retained_reasons_writer_->AddDropped(
function);
2245 SafepointWriteRwLocker ml(
T,
T->isolate_group()->program_lock());
2246 for (intptr_t
i = 0;
i < libraries_.Length();
i++) {
2247 lib ^= libraries_.At(
i);
2249 ClassDictionaryIterator it(lib, ClassDictionaryIterator::kIteratePrivate);
2250 while (it.HasNext()) {
2251 cls = it.GetNextClass();
2252 functions = cls.functions();
2253 retained_functions = GrowableObjectArray::New();
2254 for (intptr_t j = 0; j < functions.Length(); j++) {
2256 function.DropUncompiledImplicitClosureFunction();
2257 if (functions_to_retain_.ContainsKey(
function)) {
2265 if (retained_functions.Length() > 0) {
2266 functions = Array::MakeFixedLength(retained_functions);
2267 cls.SetFunctions(functions);
2269 cls.SetFunctions(Object::empty_array());
2272 retained_functions = GrowableObjectArray::New();
2273 if (cls.invocation_dispatcher_cache() != Array::empty_array().ptr()) {
2275 DispatcherSet::Iterator it(&dispatchers);
2276 while (it.MoveNext()) {
2277 function ^= dispatchers.GetKey(it.Current());
2278 if (functions_to_retain_.ContainsKey(
function)) {
2285 dispatchers.Release();
2287 if (retained_functions.Length() == 0) {
2288 cls.set_invocation_dispatcher_cache(Array::empty_array());
2291 Z, HashTables::New<DispatcherSet>(retained_functions.Length(),
2293 for (intptr_t j = 0; j < retained_functions.Length(); j++) {
2294 function ^= retained_functions.At(j);
2295 retained_dispatchers.Insert(
function);
2297 cls.set_invocation_dispatcher_cache(retained_dispatchers.Release());
2302 retained_functions = GrowableObjectArray::New();
2303 ClosureFunctionsCache::ForAllClosureFunctions([&](
const Function&
function) {
2304 if (functions_to_retain_.ContainsKey(
function)) {
2305 trim_function(function);
2306 retained_functions.Add(function);
2308 drop_function(function);
2315 IG->object_store()->set_closure_functions(retained_functions);
2318 IG->object_store()->set_closure_functions_table(Object::null_array());
2321void Precompiler::DropFields() {
2323 Library& lib = Library::Handle(
Z);
2324 Class& cls = Class::Handle(
Z);
2325 Array& fields = Array::Handle(
Z);
2326 Field& field = Field::Handle(
Z);
2327 GrowableObjectArray& retained_fields = GrowableObjectArray::Handle(
Z);
2328 AbstractType&
type = AbstractType::Handle(
Z);
2330 SafepointWriteRwLocker ml(
T,
T->isolate_group()->program_lock());
2331 for (intptr_t
i = 0;
i < libraries_.Length();
i++) {
2332 lib ^= libraries_.At(
i);
2334 ClassDictionaryIterator it(lib, ClassDictionaryIterator::kIteratePrivate);
2335 while (it.HasNext()) {
2336 cls = it.GetNextClass();
2337 fields = cls.fields();
2338 retained_fields = GrowableObjectArray::New();
2339 for (intptr_t j = 0; j < fields.Length(); j++) {
2340 field ^= fields.At(j);
2341 bool retain = fields_to_retain_.HasKey(&field);
2342#if !defined(PRODUCT)
2343 if (field.is_instance() && cls.is_allocated()) {
2349 if (FLAG_trace_precompiler) {
2351 field.is_static() ?
"static" :
"instance",
2354 retained_fields.Add(field);
2355 type = field.type();
2358 dropped_field_count_++;
2359 if (FLAG_trace_precompiler) {
2361 field.is_static() ?
"static" :
"instance",
2366 if (field.is_static()) {
2367 field.SetStaticValue(Object::null_instance());
2368 field.SetStaticConstFieldValue(Object::null_instance(),
2374 if (retained_fields.Length() > 0) {
2375 fields = Array::MakeFixedLength(retained_fields);
2376 cls.SetFields(fields);
2378 cls.SetFields(Object::empty_array());
2384void Precompiler::AttachOptimizedTypeTestingStub() {
2387 IsolateGroup::Current()->heap()->CollectAllGarbage();
2388 GrowableHandlePtrArray<const AbstractType> types(
Z, 200);
2390 class TypesCollector :
public ObjectVisitor {
2392 explicit TypesCollector(Zone* zone,
2393 GrowableHandlePtrArray<const AbstractType>* types)
2394 : type_(AbstractType::Handle(zone)), types_(types) {}
2396 void VisitObject(ObjectPtr obj)
override {
2397 if (obj->GetClassId() == kTypeCid ||
2398 obj->GetClassId() == kFunctionTypeCid ||
2399 obj->GetClassId() == kRecordTypeCid) {
2406 AbstractType& type_;
2407 GrowableHandlePtrArray<const AbstractType>* types_;
2410 HeapIterationScope his(
T);
2411 TypesCollector visitor(
Z, &types);
2414 IG->heap()->VisitObjects(&visitor);
2417 Dart::vm_isolate_group()->heap()->VisitObjects(&visitor);
2420 TypeUsageInfo* type_usage_info = Thread::Current()->type_usage_info();
2424 type_usage_info->BuildTypeUsageInformation();
2426 TypeTestingStubGenerator type_testing_stubs;
2427 Code&
code = Code::Handle();
2428 for (intptr_t
i = 0;
i < types.length();
i++) {
2429 const AbstractType&
type = types.At(
i);
2431 if (
type.InVMIsolateHeap()) {
2438 if (type_usage_info->IsUsedInTypeTest(
type)) {
2439 code = type_testing_stubs.OptimizedCodeForType(
type);
2447 ASSERT(Object::dynamic_type().type_test_stub_entry_point() ==
2448 StubCode::TopTypeTypeTest().EntryPoint());
2451enum ConstantVisitedValue { kNotVisited = 0,
kRetain,
kDrop };
2453static bool IsUserDefinedClass(Zone* zone,
2455 ObjectStore* object_store) {
2456 intptr_t
cid = cls.untag()->id();
2467class ConstantInstanceVisitor {
2469 ConstantInstanceVisitor(Zone* zone,
2471 ObjectStore* object_store)
2474 object_store_(object_store),
2475 object_(Object::Handle(zone)),
2476 array_(Array::Handle(zone)) {}
2478 void Visit(ObjectPtr object_ptr) {
2479 if (!object_ptr->IsHeapObject()) {
2482 ConstantVisitedValue
value =
static_cast<ConstantVisitedValue
>(
2483 visited_->GetValueExclusive(object_ptr));
2484 if (
value != kNotVisited) {
2487 object_ = object_ptr;
2488 if (IsUserDefinedClass(zone_, object_.clazz(), object_store_)) {
2489 visited_->SetValueExclusive(object_ptr, kDrop);
2494 visited_->SetValueExclusive(object_ptr,
kRetain);
2495 switch (object_ptr.untag()->GetClassId()) {
2496 case kImmutableArrayCid: {
2497 array_ ^= object_ptr;
2498 for (intptr_t
i = 0;
i < array_.Length();
i++) {
2499 ObjectPtr element = array_.At(
i);
2501 if (
static_cast<ConstantVisitedValue
>(
2502 visited_->GetValueExclusive(element)) == kDrop) {
2503 visited_->SetValueExclusive(object_ptr, kDrop);
2509 case kConstMapCid: {
2510 const Map&
map = Map::Handle(Map::RawCast(object_ptr));
2511 Map::Iterator iterator(
map);
2512 while (iterator.MoveNext()) {
2513 ObjectPtr element = iterator.CurrentKey();
2515 if (
static_cast<ConstantVisitedValue
>(
2516 visited_->GetValueExclusive(element)) == kDrop) {
2517 visited_->SetValueExclusive(object_ptr, kDrop);
2520 element = iterator.CurrentValue();
2522 if (
static_cast<ConstantVisitedValue
>(
2523 visited_->GetValueExclusive(element)) == kDrop) {
2524 visited_->SetValueExclusive(object_ptr, kDrop);
2530 case kConstSetCid: {
2531 const Set&
set = Set::Handle(Set::RawCast(object_ptr));
2532 Set::Iterator iterator(
set);
2533 while (iterator.MoveNext()) {
2534 ObjectPtr element = iterator.CurrentKey();
2536 if (
static_cast<ConstantVisitedValue
>(
2537 visited_->GetValueExclusive(element)) == kDrop) {
2538 visited_->SetValueExclusive(object_ptr, kDrop);
2549 WeakTable* visited_;
2550 ObjectStore* object_store_;
2558void Precompiler::DropTransitiveUserDefinedConstants() {
2560 auto& constants = Array::Handle(
Z);
2561 auto& obj = Object::Handle(
Z);
2562 auto& lib = Library::Handle(
Z);
2563 auto& cls = Class::Handle(
Z);
2567 NoSafepointScope no_safepoint(
T);
2568 std::unique_ptr<WeakTable> visited(
new WeakTable());
2569 ObjectStore* object_store =
IG->object_store();
2570 ConstantInstanceVisitor visitor(
Z, visited.get(), object_store);
2572 for (intptr_t
i = 0;
i < libraries_.Length();
i++) {
2573 lib ^= libraries_.At(
i);
2575 ClassDictionaryIterator it(lib, ClassDictionaryIterator::kIteratePrivate);
2576 while (it.HasNext()) {
2577 cls = it.GetNextClass();
2578 if (cls.constants() == Array::null()) {
2584 CanonicalInstancesSet::Iterator iterator(&constants_set);
2586 if (IsUserDefinedClass(
Z, cls.ptr(), object_store)) {
2588 constants = cls.constants();
2589 dropped_constants_arrays_entries_count_ += constants.Length();
2590 if (FLAG_trace_precompiler) {
2591 THR_Print(
"Dropping %" Pd " entries from constants for class %s\n",
2592 constants.Length(), cls.ToCString());
2594 while (iterator.MoveNext()) {
2595 obj = constants_set.GetKey(iterator.Current());
2596 instance = Instance::RawCast(obj.ptr());
2597 consts_to_retain_.Remove(&
instance);
2598 visited->SetValueExclusive(obj.ptr(), kDrop);
2603 while (iterator.MoveNext()) {
2604 obj = constants_set.GetKey(iterator.Current());
2605 ConstantVisitedValue
value =
static_cast<ConstantVisitedValue
>(
2606 visited->GetValueExclusive(obj.ptr()));
2607 if (
value == kNotVisited) {
2608 visitor.Visit(obj.ptr());
2609 value =
static_cast<ConstantVisitedValue
>(
2610 visited->GetValueExclusive(obj.ptr()));
2613 if (
value == kDrop) {
2614 dropped_constants_arrays_entries_count_++;
2615 if (FLAG_trace_precompiler) {
2616 THR_Print(
"Dropping constant entry for class %s instance:%s\n",
2617 cls.ToCString(), obj.ToCString());
2619 instance = Instance::RawCast(obj.ptr());
2620 consts_to_retain_.Remove(&
instance);
2624 constants_set.Release();
2630void Precompiler::TraceTypesFromRetainedClasses() {
2632 auto& lib = Library::Handle(
Z);
2633 auto& cls = Class::Handle(
Z);
2634 auto& members = Array::Handle(
Z);
2635 auto& constants = Array::Handle(
Z);
2636 auto& retained_constants = GrowableObjectArray::Handle(
Z);
2637 auto& obj = Object::Handle(
Z);
2638 auto& constant = Instance::Handle(
Z);
2640 SafepointWriteRwLocker ml(
T,
T->isolate_group()->program_lock());
2641 for (intptr_t
i = 0;
i < libraries_.Length();
i++) {
2642 lib ^= libraries_.At(
i);
2644 ClassDictionaryIterator it(lib, ClassDictionaryIterator::kIteratePrivate);
2645 while (it.HasNext()) {
2646 cls = it.GetNextClass();
2648 bool retain =
false;
2649 members = cls.fields();
2650 if (members.Length() > 0) {
2653 members = cls.current_functions();
2654 if (members.Length() > 0) {
2657 if (cls.is_allocated()) {
2661 constants = cls.constants();
2662 retained_constants = GrowableObjectArray::New();
2663 if (!constants.IsNull()) {
2664 for (intptr_t j = 0; j < constants.Length(); j++) {
2665 obj = constants.At(j);
2666 if ((obj.ptr() == HashTableBase::UnusedMarker().ptr()) ||
2667 (obj.ptr() == HashTableBase::DeletedMarker().ptr())) {
2670 constant ^= obj.ptr();
2671 bool retain = consts_to_retain_.HasKey(&constant);
2673 retained_constants.Add(constant);
2678 cls.set_constants(Object::null_array());
2679 for (intptr_t j = 0; j < retained_constants.Length(); j++) {
2680 constant ^= retained_constants.At(j);
2681 cls.InsertCanonicalConstant(
Z, constant);
2684 if (retained_constants.Length() > 0) {
2696void Precompiler::DropMetadata() {
2698 SafepointWriteRwLocker ml(
T,
T->isolate_group()->program_lock());
2700 Library& lib = Library::Handle(
Z);
2701 for (intptr_t
i = 0;
i < libraries_.Length();
i++) {
2702 lib ^= libraries_.At(
i);
2703 lib.set_metadata(Array::null_array());
2707void Precompiler::DropLibraryEntries() {
2709 Library& lib = Library::Handle(
Z);
2710 Array& dict = Array::Handle(
Z);
2711 Object& entry = Object::Handle(
Z);
2713 for (intptr_t
i = 0;
i < libraries_.Length();
i++) {
2714 lib ^= libraries_.At(
i);
2716 dict = lib.dictionary();
2717 intptr_t dict_size = dict.Length() - 1;
2719 for (intptr_t j = 0; j < dict_size; j++) {
2721 if (entry.IsNull())
continue;
2723 if (entry.IsClass()) {
2724 if (classes_to_retain_.HasKey(&Class::Cast(entry))) {
2728 }
else if (entry.IsFunction()) {
2729 if (functions_to_retain_.ContainsKey(Function::Cast(entry))) {
2733 }
else if (entry.IsField()) {
2734 if (fields_to_retain_.HasKey(&Field::Cast(entry))) {
2738 }
else if (entry.IsLibraryPrefix()) {
2741 FATAL(
"Unexpected library entry: %s", entry.ToCString());
2743 dict.SetAt(j, Object::null_object());
2746 lib.RehashDictionary(dict, used * 4 / 3 + 1);
2747 if (!(retain_root_library_caches_ &&
2748 (lib.ptr() ==
IG->object_store()->root_library()))) {
2749 lib.DropDependenciesAndCaches();
2754void Precompiler::DropClasses() {
2756 Class& cls = Class::Handle(
Z);
2757 Array& constants = Array::Handle(
Z);
2758 GrowableObjectArray& implementors = GrowableObjectArray::Handle(
Z);
2759 GrowableObjectArray& retained_implementors = GrowableObjectArray::Handle(
Z);
2760 Class& implementor = Class::Handle(
Z);
2761 GrowableObjectArray& subclasses = GrowableObjectArray::Handle(
Z);
2762 GrowableObjectArray& retained_subclasses = GrowableObjectArray::Handle(
Z);
2763 Class& subclass = Class::Handle(
Z);
2770 IG->heap()->CollectAllGarbage();
2771 IG->heap()->WaitForSweeperTasks(
T);
2773 SafepointWriteRwLocker ml(
T,
IG->program_lock());
2774 ClassTable* class_table =
IG->class_table();
2775 intptr_t num_cids = class_table->NumCids();
2777 for (intptr_t
cid = 0;
cid < num_cids;
cid++) {
2778 if (!class_table->IsValidIndex(
cid))
continue;
2779 if (!class_table->HasValidClassAt(
cid))
continue;
2780 cls = class_table->At(
cid);
2781 constants = cls.constants();
2782 HashTables::Weaken(constants);
2786 if (!class_table->IsValidIndex(
cid))
continue;
2787 if (!class_table->HasValidClassAt(
cid))
continue;
2789 cls = class_table->At(
cid);
2792 implementors = cls.direct_implementors();
2793 if (!implementors.IsNull()) {
2794 retained_implementors = GrowableObjectArray::New();
2795 for (intptr_t
i = 0;
i < implementors.Length();
i++) {
2796 implementor ^= implementors.At(
i);
2797 if (classes_to_retain_.HasKey(&implementor)) {
2798 retained_implementors.Add(implementor);
2801 cls.set_direct_implementors(retained_implementors);
2804 subclasses = cls.direct_subclasses();
2805 if (!subclasses.IsNull()) {
2806 retained_subclasses = GrowableObjectArray::New();
2807 for (intptr_t
i = 0;
i < subclasses.Length();
i++) {
2808 subclass ^= subclasses.At(
i);
2809 if (classes_to_retain_.HasKey(&subclass)) {
2810 retained_subclasses.Add(subclass);
2813 cls.set_direct_subclasses(retained_subclasses);
2816 if (cls.IsTopLevel()) {
2823 bool retain = classes_to_retain_.HasKey(&cls);
2828 ASSERT(!cls.is_allocated());
2829 constants = cls.constants();
2830 ASSERT(constants.IsNull() || (constants.Length() == 0));
2832 dropped_class_count_++;
2833 if (FLAG_trace_precompiler) {
2841void Precompiler::DropLibraries() {
2843 const GrowableObjectArray& retained_libraries =
2844 GrowableObjectArray::Handle(
Z, GrowableObjectArray::New());
2845 const Library& root_lib =
2846 Library::Handle(
Z,
IG->object_store()->root_library());
2847 Library& lib = Library::Handle(
Z);
2848 Class& toplevel_class = Class::Handle(
Z);
2850 for (intptr_t
i = 0;
i < libraries_.Length();
i++) {
2851 lib ^= libraries_.At(
i);
2853 intptr_t entries = 0;
2854 DictionaryIterator it(lib);
2855 while (it.HasNext()) {
2859 bool retain =
false;
2862 }
else if (lib.is_dart_scheme()) {
2865 }
else if (lib.ptr() == root_lib.ptr()) {
2873 toplevel_class = lib.toplevel_class();
2874 if (classes_to_retain_.HasKey(&toplevel_class)) {
2880 lib.set_index(retained_libraries.Length());
2881 retained_libraries.Add(lib);
2883 toplevel_class = lib.toplevel_class();
2885 IG->class_table()->UnregisterTopLevel(toplevel_class.id());
2888 dropped_library_count_++;
2890 if (FLAG_trace_precompiler) {
2891 THR_Print(
"Dropping library %s\n", lib.ToCString());
2896 Library::RegisterLibraries(
T, retained_libraries);
2897 libraries_ = retained_libraries.ptr();
2905void Precompiler::DiscardCodeObjects() {
2906 class DiscardCodeVisitor :
public CodeVisitor {
2908 DiscardCodeVisitor(Zone* zone,
2912 function_(Function::Handle(zone)),
2913 parent_function_(Function::Handle(zone)),
2914 class_(Class::Handle(zone)),
2915 library_(Library::Handle(zone)),
2916 loading_unit_(LoadingUnit::Handle(zone)),
2917 static_calls_target_table_(Array::Handle(zone)),
2918 kind_and_offset_(Smi::Handle(zone)),
2919 call_target_(Code::Handle(zone)),
2920 targets_of_calls_via_code_(
2921 GrowableObjectArray::Handle(zone, GrowableObjectArray::New())),
2922 functions_to_retain_(functions_to_retain),
2923 functions_called_dynamically_(functions_called_dynamically) {}
2928 void RecordCodeObjectsUsedForCalls(
const Code&
code) {
2929 static_calls_target_table_ =
code.static_calls_target_table();
2930 if (static_calls_target_table_.IsNull())
return;
2933 for (
const auto& view : static_calls) {
2934 kind_and_offset_ = view.Get<Code::kSCallTableKindAndOffset>();
2936 if (kind == Code::kCallViaCode) {
2938 Code::RawCast(view.Get<Code::kSCallTableCodeOrTypeTarget>());
2939 ASSERT(!call_target_.IsNull());
2940 targets_of_calls_via_code_.Add(call_target_);
2945 void VisitCode(
const Code&
code)
override {
2946 ++total_code_objects_;
2948 RecordCodeObjectsUsedForCalls(
code);
2951 if (!
code.IsFunctionCode() ||
code.IsUnknownDartCode()) {
2952 ++non_function_codes_;
2957 if (
code.exception_handlers() !=
2958 Object::empty_exception_handlers().ptr()) {
2959 ++codes_with_exception_handlers_;
2962 if (
code.pc_descriptors() != Object::empty_descriptors().ptr()) {
2963 ++codes_with_pc_descriptors_;
2967 function_ =
code.function();
2968 if (functions_to_retain_.ContainsKey(function_)) {
2971 if (function_.is_old_native()) {
2972 ++codes_with_native_function_;
2978 if (functions_called_dynamically_.ContainsKey(function_)) {
2979 ++codes_with_dynamically_called_function_;
2983 if (StackTraceUtils::IsNeededForAsyncAwareUnwinding(function_)) {
2984 ++codes_with_function_needed_for_async_unwinding_;
2988 ASSERT(!functions_called_dynamically_.ContainsKey(function_));
2994 class_ = function_.Owner();
2995 library_ = class_.library();
2996 loading_unit_ = library_.loading_unit();
2997 if (loading_unit_.id() != LoadingUnit::kRootId) {
2998 ++codes_with_deferred_function_;
3003 if (function_.IsFfiCallbackTrampoline()) {
3004 ++codes_with_ffi_trampoline_function_;
3008 code.set_is_discarded(
true);
3009 if (FLAG_trace_precompiler) {
3010 THR_Print(
"Discarding code object corresponding to %s\n",
3011 function_.ToFullyQualifiedCString());
3016 void RetainCodeObjectsUsedAsCallTargets() {
3017 for (intptr_t
i = 0, n = targets_of_calls_via_code_.Length();
i < n;
3019 call_target_ = Code::RawCast(targets_of_calls_via_code_.At(
i));
3020 if (call_target_.is_discarded()) {
3021 call_target_.set_is_discarded(
false);
3022 ++codes_used_as_call_targets_;
3028 void PrintStatistics()
const {
3029 THR_Print(
"Discarding Code objects:\n");
3030 THR_Print(
" %8" Pd " non-function Codes\n", non_function_codes_);
3031 THR_Print(
" %8" Pd " Codes with exception handlers\n",
3032 codes_with_exception_handlers_);
3033 THR_Print(
" %8" Pd " Codes with pc descriptors\n",
3034 codes_with_pc_descriptors_);
3035 THR_Print(
" %8" Pd " Codes with native functions\n",
3036 codes_with_native_function_);
3037 THR_Print(
" %8" Pd " Codes with dynamically called functions\n",
3038 codes_with_dynamically_called_function_);
3039 THR_Print(
" %8" Pd " Codes with async unwinding related functions\n",
3040 codes_with_function_needed_for_async_unwinding_);
3041 THR_Print(
" %8" Pd " Codes with deferred functions\n",
3042 codes_with_deferred_function_);
3043 THR_Print(
" %8" Pd " Codes with ffi trampoline functions\n",
3044 codes_with_ffi_trampoline_function_);
3045 THR_Print(
" %8" Pd " Codes used as call targets\n",
3046 codes_used_as_call_targets_);
3047 THR_Print(
" %8" Pd " Codes discarded\n", discarded_codes_);
3048 THR_Print(
" %8" Pd " Codes total\n", total_code_objects_);
3053 Function& function_;
3054 Function& parent_function_;
3057 LoadingUnit& loading_unit_;
3058 Array& static_calls_target_table_;
3059 Smi& kind_and_offset_;
3061 GrowableObjectArray& targets_of_calls_via_code_;
3066 intptr_t total_code_objects_ = 0;
3067 intptr_t non_function_codes_ = 0;
3068 intptr_t codes_with_exception_handlers_ = 0;
3069 intptr_t codes_with_pc_descriptors_ = 0;
3070 intptr_t codes_with_native_function_ = 0;
3071 intptr_t codes_with_dynamically_called_function_ = 0;
3072 intptr_t codes_with_function_needed_for_async_unwinding_ = 0;
3073 intptr_t codes_with_deferred_function_ = 0;
3074 intptr_t codes_with_ffi_trampoline_function_ = 0;
3075 intptr_t codes_used_as_call_targets_ = 0;
3076 intptr_t discarded_codes_ = 0;
3081 if (!FLAG_dwarf_stack_traces_mode || FLAG_retain_code_objects) {
3086 DiscardCodeVisitor visitor(
Z, functions_to_retain_,
3087 functions_called_dynamically_);
3088 ProgramVisitor::WalkProgram(
Z,
IG, &visitor);
3089 visitor.RetainCodeObjectsUsedAsCallTargets();
3091 if (FLAG_trace_precompiler) {
3092 visitor.PrintStatistics();
3096void Precompiler::PruneDictionaries() {
3101 ProgramElementSet::Iterator it = api_uses_.GetIterator();
3102 while (
auto entry = it.Next()) {
3103 ASSERT(api_uses_.HasKey(*entry));
3110 class PruneDictionariesVisitor {
3112 GrowableObjectArrayPtr PruneLibraries(
3113 const GrowableObjectArray& libraries) {
3114 for (intptr_t
i = 0;
i < libraries.Length();
i++) {
3115 lib_ ^= libraries.At(
i);
3116 bool retain = PruneLibrary(lib_);
3118 lib_.set_index(retained_libraries_.Length());
3119 retained_libraries_.Add(lib_);
3122 lib_.set_private_key(null_string_);
3126 Library::RegisterLibraries(Thread::Current(), retained_libraries_);
3127 return retained_libraries_.ptr();
3130 bool PruneLibrary(
const Library& lib) {
3131 dict_ = lib.dictionary();
3132 intptr_t dict_size = dict_.Length() - 1;
3134 for (intptr_t
i = 0;
i < dict_size;
i++) {
3135 entry_ = dict_.At(
i);
3136 if (entry_.IsNull())
continue;
3138 bool retain =
false;
3139 if (entry_.IsClass()) {
3144 retain = PruneClass(Class::Cast(entry_)) ||
3145 (lib.url() == Symbols::DartAsync().ptr()) ||
3146 (lib.url() == Symbols::DartCore().ptr()) ||
3147 (lib.url() == Symbols::DartCollection().ptr()) ||
3148 (lib.url() == Symbols::DartTypedData().ptr());
3149 }
else if (entry_.IsFunction() || entry_.IsField()) {
3150 retain = precompiler_->HasApiUse(entry_);
3152 FATAL(
"Unexpected library entry: %s", entry_.ToCString());
3157 dict_.SetAt(
i, Object::null_object());
3160 lib.RehashDictionary(dict_, used * 4 / 3 + 1);
3162 bool retain = used > 0;
3163 cls_ = lib.toplevel_class();
3164 if (PruneClass(cls_)) {
3167 if (lib.is_dart_scheme()) {
3170 if (lib.ptr() == root_lib_.ptr()) {
3173 if (precompiler_->HasApiUse(lib)) {
3179 bool PruneClass(
const Class& cls) {
3180 bool retain = precompiler_->HasApiUse(cls);
3182 functions_ = cls.functions();
3183 retained_functions_ = GrowableObjectArray::New();
3184 for (intptr_t
i = 0;
i < functions_.Length();
i++) {
3185 function_ ^= functions_.At(
i);
3186 if (precompiler_->HasApiUse(function_)) {
3187 retained_functions_.Add(function_);
3189 }
else if (precompiler_->functions_called_dynamically_.ContainsKey(
3191 retained_functions_.Add(function_);
3197 if (retained_functions_.Length() > 0) {
3198 functions_ = Array::MakeFixedLength(retained_functions_);
3199 cls.SetFunctions(functions_);
3201 cls.SetFunctions(Object::empty_array());
3204 fields_ = cls.fields();
3205 retained_fields_ = GrowableObjectArray::New();
3206 for (intptr_t
i = 0;
i < fields_.Length();
i++) {
3207 field_ ^= fields_.At(
i);
3208 if (precompiler_->HasApiUse(field_)) {
3209 retained_fields_.Add(field_);
3213 if (retained_fields_.Length() > 0) {
3214 fields_ = Array::MakeFixedLength(retained_fields_);
3215 cls.SetFields(fields_);
3217 cls.SetFields(Object::empty_array());
3223 explicit PruneDictionariesVisitor(Precompiler* precompiler, Zone* zone)
3224 : precompiler_(precompiler),
3225 lib_(Library::Handle(zone)),
3226 dict_(Array::Handle(zone)),
3227 entry_(Object::Handle(zone)),
3228 cls_(Class::Handle(zone)),
3229 functions_(Array::Handle(zone)),
3230 fields_(Array::Handle(zone)),
3231 function_(Function::Handle(zone)),
3232 field_(Field::Handle(zone)),
3233 retained_functions_(GrowableObjectArray::Handle(zone)),
3234 retained_fields_(GrowableObjectArray::Handle(zone)),
3235 retained_libraries_(
3236 GrowableObjectArray::Handle(zone, GrowableObjectArray::New())),
3237 root_lib_(Library::Handle(
3239 precompiler->isolate_group()->object_store()->root_library())),
3240 null_string_(String::Handle(zone)) {}
3243 Precompiler*
const precompiler_;
3250 Function& function_;
3252 GrowableObjectArray& retained_functions_;
3253 GrowableObjectArray& retained_fields_;
3254 const GrowableObjectArray& retained_libraries_;
3255 const Library& root_lib_;
3256 const String& null_string_;
3260 SafepointWriteRwLocker ml(
T,
T->isolate_group()->program_lock());
3261 PruneDictionariesVisitor visitor(
this,
Z);
3262 libraries_ = visitor.PruneLibraries(libraries_);
3267struct CodeKeyTraits {
3268 static uint32_t
Hash(
const Object&
key) {
return Code::Cast(
key).Size(); }
3269 static const char*
Name() {
return "CodeKeyTraits"; }
3270 static bool IsMatch(
const Object&
x,
const Object&
y) {
3271 return x.ptr() ==
y.ptr();
3273 static bool ReportStats() {
return false; }
3276typedef UnorderedHashSet<CodeKeyTraits> CodeSet;
3279FunctionPtr Precompiler::FindUnvisitedRetainedFunction() {
3280 class CodeChecker :
public CodeVisitor {
3283 : visited_code_(HashTables::New<CodeSet>(1024)) {}
3284 ~CodeChecker() { visited_code_.Release(); }
3286 const CodeSet& visited()
const {
return visited_code_; }
3288 void VisitCode(
const Code&
code) { visited_code_.Insert(
code); }
3291 CodeSet visited_code_;
3294 CodeChecker visitor;
3295 ProgramVisitor::WalkProgram(
Z,
IG, &visitor);
3296 const CodeSet& visited = visitor.visited();
3298 FunctionSet::Iterator it(&functions_to_retain_);
3299 Function&
function = Function::Handle(
Z);
3300 Code&
code = Code::Handle(
Z);
3301 while (it.MoveNext()) {
3302 function ^= functions_to_retain_.GetKey(it.Current());
3307 return Function::null();
3311void Precompiler::Obfuscate() {
3312 if (!
IG->obfuscate()) {
3316 class ScriptsCollector :
public ObjectVisitor {
3318 explicit ScriptsCollector(Zone* zone,
3319 GrowableHandlePtrArray<const Script>*
scripts)
3320 : script_(Script::Handle(zone)), scripts_(
scripts) {}
3322 void VisitObject(ObjectPtr obj)
override {
3323 if (obj->GetClassId() == kScriptCid) {
3325 scripts_->Add(Script::Cast(script_));
3331 GrowableHandlePtrArray<const Script>* scripts_;
3334 GrowableHandlePtrArray<const Script>
scripts(
Z, 100);
3335 IsolateGroup::Current()->heap()->CollectAllGarbage();
3337 HeapIterationScope his(
T);
3338 ScriptsCollector visitor(
Z, &
scripts);
3339 IG->heap()->VisitObjects(&visitor);
3347 Obfuscator obfuscator(
T, String::Handle(
Z));
3348 String& str = String::Handle(
Z);
3349 for (intptr_t
i = 0;
i <
scripts.length();
i++) {
3353 str = Symbols::New(
T, str);
3354 str = obfuscator.Rename(str,
true);
3358 Library& lib = Library::Handle();
3359 for (intptr_t
i = 0;
i < libraries_.Length();
i++) {
3360 lib ^= libraries_.At(
i);
3361 if (!lib.is_dart_scheme()) {
3363 str = obfuscator.Rename(str,
true);
3367 str = Symbols::New(
T, str);
3368 str = obfuscator.Rename(str,
true);
3372 Library::RegisterLibraries(
T, libraries_);
3376 IG->set_obfuscation_map(Obfuscator::SerializeMap(
T));
3379 IG->object_store()->set_obfuscation_map(Array::Handle(
Z));
3382void Precompiler::FinalizeAllClasses() {
3387 StackZone stack_zone(thread());
3389 error_ = Library::FinalizeAllClasses();
3390 if (!error_.IsNull()) {
3393 IG->set_all_classes_finalized(
true);
3396void PrecompileParsedFunctionHelper::FinalizeCompilation(
3397 compiler::Assembler* assembler,
3398 FlowGraphCompiler* graph_compiler,
3399 FlowGraph* flow_graph,
3400 CodeStatistics*
stats) {
3401 const Function&
function = parsed_function()->function();
3402 Zone*
const zone = thread()->zone();
3406 const Array& deopt_info_array =
3407 Array::Handle(zone, graph_compiler->CreateDeoptInfo(assembler));
3410 const auto pool_attachment = Code::PoolAttachment::kNotAttachPool;
3412 SafepointWriteRwLocker ml(
T,
T->isolate_group()->program_lock());
3413 const Code&
code = Code::Handle(
3414 Code::FinalizeCodeAndNotify(
function, graph_compiler, assembler,
3415 pool_attachment, optimized(),
stats));
3416 code.set_is_optimized(optimized());
3421 function.set_usage_counter(INT32_MIN);
3424 graph_compiler->FinalizePcDescriptors(
code);
3425 code.set_deopt_info_array(deopt_info_array);
3427 graph_compiler->FinalizeStackMaps(
code);
3428 graph_compiler->FinalizeVarDescriptors(
code);
3429 graph_compiler->FinalizeExceptionHandlers(
code);
3430 graph_compiler->FinalizeCatchEntryMovesMap(
code);
3431 graph_compiler->FinalizeStaticCallTargetsTable(
code);
3432 graph_compiler->FinalizeCodeSourceMap(
code);
3436 ASSERT(thread()->IsDartMutatorThread());
3443 if (
function.IsFfiCallbackTrampoline()) {
3449static void GenerateNecessaryAllocationStubs(FlowGraph* flow_graph) {
3450 for (
auto block : flow_graph->reverse_postorder()) {
3451 for (ForwardInstructionIterator it(block); !it.Done(); it.Advance()) {
3452 if (
auto allocation = it.Current()->AsAllocateObject()) {
3453 StubCode::GetAllocationStubForClass(allocation->cls());
3460bool PrecompileParsedFunctionHelper::Compile(CompilationPipeline* pipeline) {
3461 ASSERT(CompilerState::Current().is_aot());
3462 if (optimized() && !parsed_function()->
function().IsOptimizable()) {
3467 volatile bool is_compiled =
false;
3468 Zone*
const zone = thread()->zone();
3478 volatile intptr_t far_branch_level = 0;
3479 SpeculativeInliningPolicy speculative_policy(
3480 true, FLAG_max_speculative_inlining_attempts);
3484 const intptr_t val = setjmp(*jump.Set());
3486 FlowGraph* flow_graph =
nullptr;
3487 ZoneGrowableArray<const ICData*>* ic_data_array =
nullptr;
3488 const Function&
function = parsed_function()->function();
3490 CompilerState compiler_state(thread(),
true, optimized(),
3491 CompilerState::ShouldTrace(
function));
3492 compiler_state.set_function(
function);
3495 ic_data_array =
new (zone) ZoneGrowableArray<const ICData*>();
3500 pipeline->BuildFlowGraph(zone, parsed_function(), ic_data_array,
3501 Compiler::kNoOSRDeoptId, optimized());
3505 flow_graph->PopulateWithICData(
function);
3508 const bool print_flow_graph =
3509 (FLAG_print_flow_graph ||
3510 (optimized() && FLAG_print_flow_graph_optimized)) &&
3511 FlowGraphPrinter::ShouldPrint(
function);
3513 if (print_flow_graph && !optimized()) {
3514 FlowGraphPrinter::PrintGraph(
"Unoptimized Compilation", flow_graph);
3517 CompilerPassState pass_state(thread(), flow_graph, &speculative_policy,
3523 AotCallSpecializer call_specializer(precompiler_, flow_graph,
3524 &speculative_policy);
3525 pass_state.call_specializer = &call_specializer;
3527 flow_graph = CompilerPass::RunPipeline(CompilerPass::kAOT, &pass_state);
3530 ASSERT(pass_state.inline_id_to_function.length() ==
3531 pass_state.caller_inline_id.length());
3533 ASSERT(precompiler_ !=
nullptr);
3543 GenerateNecessaryAllocationStubs(flow_graph);
3557 compiler::ObjectPoolBuilder object_pool_builder(
3558 precompiler_->global_object_pool_builder());
3559 compiler::Assembler assembler(&object_pool_builder, far_branch_level);
3561 CodeStatistics* function_stats =
nullptr;
3562 if (FLAG_print_instruction_stats) {
3565 function_stats =
new CodeStatistics(&assembler);
3568 FlowGraphCompiler graph_compiler(
3569 &assembler, flow_graph, *parsed_function(), optimized(),
3570 &speculative_policy, pass_state.inline_id_to_function,
3571 pass_state.inline_id_to_token_pos, pass_state.caller_inline_id,
3572 ic_data_array, function_stats);
3573 pass_state.graph_compiler = &graph_compiler;
3574 CompilerPass::GenerateCode(&pass_state);
3578 ASSERT(thread()->IsDartMutatorThread());
3579 FinalizeCompilation(&assembler, &graph_compiler, flow_graph,
3583 if (precompiler_->phase() ==
3584 Precompiler::Phase::kFixpointCodeGeneration) {
3585 for (intptr_t
i = 0;
i < graph_compiler.used_static_fields().
length();
3587 precompiler_->AddField(*graph_compiler.used_static_fields().At(
i));
3590 const GrowableArray<const compiler::TableSelector*>& call_selectors =
3591 graph_compiler.dispatch_table_call_targets();
3592 for (intptr_t
i = 0;
i < call_selectors.length();
i++) {
3593 precompiler_->AddTableSelector(call_selectors[
i]);
3599 precompiler_->phase() ==
3600 Precompiler::Phase::kCompilingConstructorsForInstructionCounts);
3616 if (!object_pool_builder.TryCommitToParent()) {
3625 const Error&
error = Error::Handle(thread()->StealStickyError());
3627 if (
error.ptr() == Object::branch_offset_error().ptr()) {
3633 }
else if (
error.ptr() == Object::speculative_inlining_error().ptr()) {
3637 if (!speculative_policy.AllowsSpeculativeInlining()) {
3641 if (!speculative_policy.AddBlockedDeoptId(val)) {
3642 if (FLAG_trace_compiler || FLAG_trace_optimizing_compiler) {
3643 THR_Print(
"Disabled speculative inlining after %" Pd " attempts.\n",
3644 speculative_policy.length());
3651 if (FLAG_trace_bailout) {
3657 if (
error.IsLanguageError() &&
3658 (LanguageError::Cast(
error).kind() == Report::kBailout)) {
3662 thread()->set_sticky_error(
error);
3664 is_compiled =
false;
3670static ErrorPtr PrecompileFunctionHelper(Precompiler* precompiler,
3671 CompilationPipeline* pipeline,
3675 ASSERT(CompilerState::Current().is_aot());
3679 if (setjmp(*jump.Set()) == 0) {
3680 Thread*
const thread = Thread::Current();
3681 StackZone stack_zone(thread);
3682 Zone*
const zone = stack_zone.GetZone();
3683 const bool trace_compiler =
3684 FLAG_trace_compiler || (FLAG_trace_optimizing_compiler && optimized);
3685 Timer per_compile_timer;
3686 per_compile_timer.Start();
3688 ParsedFunction* parsed_function =
new (zone)
3689 ParsedFunction(thread, Function::ZoneHandle(zone,
function.ptr()));
3690 if (trace_compiler) {
3691 THR_Print(
"Precompiling %sfunction: '%s' @ token %" Pd ", size %" Pd "\n",
3692 (optimized ?
"optimized " :
""),
3698 pipeline->ParseFunction(parsed_function);
3701 PrecompileParsedFunctionHelper helper(precompiler, parsed_function,
3703 const bool success = helper.Compile(pipeline);
3706 const Error&
error = Error::Handle(thread->StealStickyError());
3708 LanguageError::Cast(
error).kind() != Report::kBailout);
3712 per_compile_timer.Stop();
3714 if (trace_compiler) {
3716 function.ToFullyQualifiedCString(),
3717 Code::Handle(
function.CurrentCode()).PayloadStart(),
3718 Code::Handle(
function.CurrentCode()).Size(),
3719 per_compile_timer.TotalElapsedTime());
3722 if (FLAG_disassemble && FlowGraphPrinter::ShouldPrint(
function)) {
3724 Disassembler::DisassembleCode(
function,
code, optimized);
3725 }
else if (FLAG_disassemble_optimized && optimized &&
3726 FlowGraphPrinter::ShouldPrint(
function)) {
3730 return Error::null();
3732 Thread*
const thread = Thread::Current();
3733 StackZone stack_zone(thread);
3735 const Error&
error = Error::Handle(thread->StealStickyError());
3738 function.set_is_optimizable(
false);
3742 return Error::null();
3745ErrorPtr Precompiler::CompileFunction(Precompiler* precompiler,
3750 NoActiveIsolateScope no_isolate_scope;
3752 VMTagScope tagScope(thread, VMTag::kCompileUnoptimizedTagId);
3755 ASSERT(CompilerState::Current().is_aot());
3756 const bool optimized =
function.IsOptimizable();
3757 DartCompilationPipeline pipeline;
3758 if (precompiler->is_tracing()) {
3759 precompiler->tracer_->WriteCompileFunctionEvent(
function);
3762 return PrecompileFunctionHelper(precompiler, &pipeline,
function, optimized);
3765Obfuscator::Obfuscator(Thread* thread,
const String& private_key)
3767 auto isolate_group = thread->isolate_group();
3768 if (!isolate_group->obfuscate()) {
3772 auto zone = thread->zone();
3775 ObjectStore*
store = isolate_group->object_store();
3780 const int kInitialPrivateCapacity = 256;
3781 obfuscation_state =
Array::New(kSavedStateSize);
3782 obfuscation_state.SetAt(
3787 state_ =
new (zone) ObfuscationState(thread, obfuscation_state, private_key);
3792 InitializeRenamingMap();
3796Obfuscator::~Obfuscator() {
3797 if (state_ !=
nullptr) {
3798 state_->SaveState();
3802void Obfuscator::InitializeRenamingMap() {
3807#define PREVENT_RENAMING(name, value, priority, attr) \
3809 if (Token::CanBeOverloaded(Token::name) || \
3810 ((Token::attr & Token::kPseudoKeyword) != 0)) { \
3811 PreventRenaming(value); \
3817#undef PREVENT_RENAMING
3821 PreventRenaming(
"this");
3824#define PREVENT_RENAMING(name, value) PreventRenaming(value);
3826#undef PREVENT_RENAMING
3832 PreventRenaming(
"NativeFieldWrapperClass1");
3833 PreventRenaming(
"NativeFieldWrapperClass2");
3834 PreventRenaming(
"NativeFieldWrapperClass3");
3835 PreventRenaming(
"NativeFieldWrapperClass4");
3841#define CLASS_LIST_WITH_NULL(V) \
3843 CLASS_LIST_NO_OBJECT(V)
3844#define PREVENT_RENAMING(clazz) PreventRenaming("cid" #clazz);
3846#undef PREVENT_RENAMING
3847#undef CLASS_LIST_WITH_NULL
3852#define PREVENT_RENAMING(class_name, function_name, recognized_enum, \
3855 PreventRenaming(#class_name); \
3856 PreventRenaming(#function_name); \
3859#undef PREVENT_RENAMING
3864#define PREVENT_RENAMING(class_name, function_name, recognized_enum, \
3867 PreventRenaming(#class_name); \
3868 PreventRenaming(#function_name); \
3871#undef PREVENT_RENAMING
3876 PreventRenaming(
"_resolveScriptUri");
3881 PreventRenaming(
"main");
3884 PreventRenaming(
"dart");
3885 PreventRenaming(
"library");
3886 PreventRenaming(
"io");
3887 PreventRenaming(
"html");
3890 PreventRenaming(
"_RandomAccessFile");
3891 PreventRenaming(
"_RandomAccessFileOpsImpl");
3892 PreventRenaming(
"ResourceHandle");
3893 PreventRenaming(
"_ResourceHandleImpl");
3894 PreventRenaming(
"_SocketControlMessageImpl");
3895 PreventRenaming(
"_NamespaceImpl");
3898StringPtr Obfuscator::ObfuscationState::RenameImpl(
const String&
name,
3902 renamed_ ^= renames_.GetOrNull(
name);
3903 if (renamed_.IsNull()) {
3904 renamed_ = BuildRename(
name, atomic);
3905 renames_.UpdateOrInsert(
name, renamed_);
3907 return renamed_.ptr();
3915void Obfuscator::PreventRenaming(
const char*
name) {
3917 const char*
dot = strchr(
name,
'.');
3918 if (
dot !=
nullptr) {
3923 if (
name[0] ==
'\0') {
3934 state_->PreventRenaming(
name);
3937void Obfuscator::ObfuscationState::SaveState() {
3938 saved_state_.SetAt(kSavedStateNameIndex, String::Handle(String::New(name_)));
3939 saved_state_.SetAt(kSavedStateRenamesIndex, renames_.Release());
3940 thread_->isolate_group()->object_store()->set_obfuscation_map(saved_state_);
3943void Obfuscator::ObfuscationState::PreventRenaming(
const char*
name) {
3944 string_ = Symbols::New(thread_,
name);
3945 PreventRenaming(string_);
3948void Obfuscator::ObfuscationState::PreventRenaming(
const String&
name) {
3949 renames_.UpdateOrInsert(
name,
name);
3952void Obfuscator::ObfuscationState::NextName() {
3957 for (intptr_t
i = 0;;
i++) {
3958 const char digit = name_[
i];
3959 if (digit ==
'\0') {
3961 }
else if (digit <
'Z') {
3963 }
else if (digit ==
'Z') {
3966 }
else if (digit <
'z') {
3975StringPtr Obfuscator::ObfuscationState::NewAtomicRename(
3976 bool should_be_private) {
3979 renamed_ = Symbols::NewFormatted(thread_,
"%s%s",
3980 should_be_private ?
"_" :
"", name_);
3983 }
while (renames_.GetOrNull(renamed_) == renamed_.ptr());
3984 return renamed_.ptr();
3987StringPtr Obfuscator::ObfuscationState::BuildRename(
const String&
name,
3991 if (Record::GetPositionalFieldIndexFromFieldName(
name) >= 0) {
3996 return NewAtomicRename(
name.CharAt(0) ==
'_');
4007 bool is_getter =
false;
4008 bool is_setter =
false;
4009 if (Field::IsGetterName(
name)) {
4012 }
else if (Field::IsSetterName(
name)) {
4021 const bool is_private =
name.CharAt(
start) ==
'_';
4025 while (
i <
name.Length() &&
name.CharAt(
i) !=
'@') {
4031 if (is_getter || is_setter || is_private) {
4035 string_ = RenameImpl(string_,
true);
4036 if (is_private && (
end <
name.Length())) {
4037 string_ = Symbols::FromConcat(thread_, string_, private_key_);
4040 return Symbols::FromGet(thread_, string_);
4041 }
else if (is_setter) {
4042 return Symbols::FromSet(thread_, string_);
4044 return string_.ptr();
4046 return NewAtomicRename(is_private);
4050void Obfuscator::Deobfuscate(Thread* thread,
4051 const GrowableObjectArray& pieces) {
4052 const Array& obfuscation_state =
4053 Array::Handle(thread->zone(),
4054 thread->isolate_group()->object_store()->obfuscation_map());
4055 if (obfuscation_state.IsNull()) {
4059 const Array& renames = Array::Handle(
4060 thread->zone(), GetRenamesFromSavedState(obfuscation_state));
4062 ObfuscationMap renames_map(renames.ptr());
4063 String& piece = String::Handle();
4064 for (intptr_t
i = 0;
i < pieces.Length();
i++) {
4065 piece ^= pieces.At(
i);
4066 ASSERT(piece.IsSymbol());
4069 if (piece.ptr() == Symbols::Dot().ptr()) {
4074 if (renames_map.GetOrNull(piece) == piece.ptr()) {
4081 ObfuscationMap::Iterator it(&renames_map);
4082 while (it.MoveNext()) {
4083 const intptr_t entry = it.Current();
4084 if (renames_map.GetPayload(entry, 0) == piece.ptr()) {
4085 piece ^= renames_map.GetKey(entry);
4086 pieces.SetAt(
i, piece);
4091 renames_map.Release();
4094static const char* StringToCString(
const String& str) {
4095 const intptr_t
len = Utf8::Length(str);
4097 str.ToUTF8(
reinterpret_cast<uint8_t*
>(
result),
len);
4102const char** Obfuscator::SerializeMap(Thread* thread) {
4103 const Array& obfuscation_state =
4104 Array::Handle(thread->zone(),
4105 thread->isolate_group()->object_store()->obfuscation_map());
4106 if (obfuscation_state.IsNull()) {
4110 const Array& renames = Array::Handle(
4111 thread->zone(), GetRenamesFromSavedState(obfuscation_state));
4112 ObfuscationMap renames_map(renames.ptr());
4114 const char**
result =
new const char*[renames_map.NumOccupied() * 2 + 1];
4116 String& str = String::Handle();
4118 ObfuscationMap::Iterator it(&renames_map);
4119 while (it.MoveNext()) {
4120 const intptr_t entry = it.Current();
4121 str ^= renames_map.GetKey(entry);
4122 result[idx++] = StringToCString(str);
4123 str ^= renames_map.GetPayload(entry, 0);
4124 result[idx++] = StringToCString(str);
4127 renames_map.Release();
static void done(const char *config, const char *src, const char *srcOptions, const char *name)
static struct Initializer initializer
#define RELEASE_ASSERT(cond)
static ArrayPtr New(intptr_t len, Heap::Space space=Heap::kNew)
static Dart_FileWriteCallback file_write_callback()
static Dart_FileOpenCallback file_open_callback()
static Dart_FileCloseCallback file_close_callback()
DART_NORETURN void Jump(int value, const Error &error)
static void static void PrintErr(const char *format,...) PRINTF_ATTRIBUTE(1
static Object & ZoneHandle()
static ErrorPtr CompileAll()
LongJumpScope * long_jump_base() const
static Thread * Current()
DART_WARN_UNUSED_RESULT ErrorPtr StealStickyError()
CompilerTimings * compiler_timings() const
static const char * KindToCString(Kind k)
static uint32_t WordHash(intptr_t key)
#define COMPILER_TIMINGS_TIMER_SCOPE(thread, timer_id)
#define PRECOMPILER_TIMER_SCOPE(precompiler, timer_id)
#define THR_Print(format,...)
const EmbeddedViewParams * params
G_BEGIN_DECLS G_MODULE_EXPORT FlValue * args
const uint8_t uint32_t uint32_t GError ** error
Dart_NativeFunction function
#define HANDLESCOPE(thread)
void SetFfiCallbackCode(Thread *thread, const Function &ffi_trampoline, const Code &code)
bool NeedsDynamicInvocationForwarder(const Function &function)
static ProcedureAttributesMetadata ProcedureAttributesOf(Zone *zone, const KernelProgramInfo &kernel_program_info, const TypedDataView &kernel_data, intptr_t kernel_data_program_offset, intptr_t kernel_offset)
UnorderedHashSet< FunctionKeyTraits > FunctionSet
static const intptr_t kGetterPrefixLength
EntryPointPragma FindEntryPointPragma(IsolateGroup *IG, const Array &metadata, Field *reusable_field_handle, Object *pragma)
UnorderedHashSet< CanonicalInstanceTraits > CanonicalInstancesSet
UnorderedHashMap< FunctionsTraits > UniqueFunctionsMap
ArrayPtr GetNativeAssetsMap(Thread *thread)
static const char *const kGetterPrefix
DEFINE_FLAG(bool, print_cluster_information, false, "Print information about clusters written to snapshot")
static const intptr_t kSetterPrefixLength
static uint32_t Hash(uint32_t key)
UnorderedHashSet< DispatcherTraits, AcqRelStorageTraits > DispatcherSet
ArrayOfTuplesView< Code::SCallTableEntry, std::tuple< Smi, Object, Function > > StaticCallsTable
static const char *const kSetterPrefix
ObjectPtr CompressedObjectPtr
DECLARE_FLAG(bool, show_invisible_frames)
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not defaults to or::depending on whether ipv6 is specified vm service A custom Dart VM Service port The default is to pick a randomly available open port disable vm Disable the Dart VM Service The Dart VM Service is never available in release mode disable vm service Disable mDNS Dart VM Service publication Bind to the IPv6 localhost address for the Dart VM Service Ignored if vm service host is set endless trace Enable an endless trace buffer The default is a ring buffer This is useful when very old events need to viewed For during application launch Memory usage will continue to grow indefinitely however Start app with an specific route defined on the framework flutter assets Path to the Flutter assets directory enable service port Allow the VM service to fallback to automatic port selection if binding to a specified port fails trace Trace early application lifecycle Automatically switches to an endless trace buffer trace skia Filters out all Skia trace event categories except those that are specified in this comma separated list dump skp on shader Automatically dump the skp that triggers new shader compilations This is useful for writing custom ShaderWarmUp to reduce jank By this is not enabled to reduce the overhead purge persistent cache
DEF_SWITCHES_START aot vmservice shared library name
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not set
scripts
Printing Glyph Map Stats.
SK_API sk_sp< PrecompileColorFilter > Table()
SINT T dot(const Vec< N, T > &a, const Vec< N, T > &b)
SI auto map(std::index_sequence< I... >, Fn &&fn, const Args &... args) -> skvx::Vec< sizeof...(I), decltype(fn(args[0]...))>
#define OBJECT_STORE_STUB_CODE_LIST(DO)
static DecodeResult decode(std::string path)
#define POLYMORPHIC_TARGET_LIST(V)
#define RECOGNIZED_LIST(V)
#define PREDEFINED_SYMBOLS_LIST(V)
std::shared_ptr< const fml::Mapping > data
#define TIMELINE_FUNCTION_COMPILATION_DURATION(thread, name, function)
#define TIMELINE_DURATION(thread, stream, name)
#define DART_KEYWORD_LIST(KW)
#define DART_TOKEN_LIST(TOK)
#define CLASS_LIST_WITH_NULL(V)