7#if !defined(DART_PRECOMPILED_RUNTIME)
53 max_deoptimization_counter_threshold,
55 "How many times we allow deoptimization before we disallow optimization.");
59 "Optimize only named function");
60DEFINE_FLAG(
bool, print_flow_graph,
false,
"Print the IR flow graph.");
62 print_flow_graph_optimized,
64 "Print the IR flow graph when optimizing.");
68 "Print the deopt-id to ICData map in optimizing compiler.");
69DEFINE_FLAG(
bool, print_code_source_map,
false,
"Print code source map.");
71 stress_test_background_compilation,
73 "Keep background compiler running all the time");
75 stop_on_excessive_deoptimization,
77 "Debugging: stops program if deoptimizing same function too often");
78DEFINE_FLAG(
bool, trace_compiler,
false,
"Trace compiler operations.");
80 trace_failed_optimization_attempts,
82 "Traces all failed optimization attempts");
84 trace_optimizing_compiler,
86 "Trace only optimizing compiler operations.");
87DEFINE_FLAG(
bool, trace_bailout,
false,
"Print bailout from ssa compiler.");
93#if defined(TARGET_ARCH_IA32)
94 FATAL(
"Precompilation not supported on IA32");
97 FLAG_background_compilation =
false;
98 FLAG_enable_mirrors =
false;
99 FLAG_interpret_irregexp =
true;
100 FLAG_lazy_dispatchers =
false;
101 FLAG_link_natives_lazily =
true;
102 FLAG_optimization_counter_threshold = -1;
103 FLAG_polymorphic_with_deopt =
false;
104 FLAG_precompiled_mode =
true;
105 FLAG_reorder_basic_blocks =
true;
106 FLAG_use_field_guards =
false;
107 FLAG_use_cha_deopt =
false;
109#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
112 FLAG_deoptimize_alot =
false;
113 FLAG_deoptimize_every = 0;
114 FLAG_use_osr =
false;
121 "Precompilation mode");
123#ifndef DART_PRECOMPILED_RUNTIME
147 VMTag::kCompileParseRegExpTagId);
148 Zone* zone = parsed_function->
zone();
159 if (compile_data->
simple) {
179 parsed_function, *ic_data_array, osr_id);
180 if (
result.error_message !=
nullptr) {
184 backtrack_goto_ =
result.backtrack_goto;
194 result.graph_entry->RelinkToOsrEntry(zone,
result.num_blocks);
204 if (
function.IsIrregexpFunction()) {
214 ASSERT(thread->IsDartMutatorThread());
215 const Function&
function = Function::CheckedHandle(zone, arguments.ArgAt(0));
248 if (
function.deoptimization_counter() >=
249 FLAG_max_deoptimization_counter_threshold) {
250 if (FLAG_trace_failed_optimization_attempts ||
251 FLAG_stop_on_excessive_deoptimization) {
252 THR_Print(
"Too many deoptimizations: %s\n",
253 function.ToFullyQualifiedCString());
254 if (FLAG_stop_on_excessive_deoptimization) {
255 FATAL(
"Stop on excessive deoptimization");
261 function.SetUsageCounter(INT32_MIN);
264 if (FLAG_optimization_filter !=
nullptr) {
269 intptr_t len = strlen(FLAG_optimization_filter) + 1;
270 char* filter =
new char[len];
271 strncpy(filter, FLAG_optimization_filter, len);
272 char* token = strtok_r(filter,
",", &save_ptr);
274 while (token !=
nullptr) {
279 token = strtok_r(
nullptr,
",", &save_ptr);
283 function.SetUsageCounter(INT32_MIN);
290 if (FLAG_trace_failed_optimization_attempts) {
293 function.SetUsageCounter(INT32_MIN);
309 : parsed_function_(parsed_function),
310 optimized_(optimized),
312 thread_(
Thread::Current()) {}
317 ParsedFunction* parsed_function()
const {
return parsed_function_; }
318 bool optimized()
const {
return optimized_; }
319 intptr_t osr_id()
const {
return osr_id_; }
320 Thread* thread()
const {
return thread_; }
321 IsolateGroup* isolate_group()
const {
return thread_->
isolate_group(); }
322 CodePtr FinalizeCompilation(compiler::Assembler* assembler,
323 FlowGraphCompiler* graph_compiler,
324 FlowGraph* flow_graph);
326 ParsedFunction* parsed_function_;
327 const bool optimized_;
328 const intptr_t osr_id_;
329 Thread*
const thread_;
334CodePtr CompileParsedFunctionHelper::FinalizeCompilation(
335 compiler::Assembler* assembler,
336 FlowGraphCompiler* graph_compiler,
337 FlowGraph* flow_graph) {
348 if (optimized() &&
function.ForceOptimize() &&
function.HasOptimizedCode()) {
351 Zone*
const zone = thread()->
zone();
355 Array& deopt_info_array =
Array::Handle(zone, Object::empty_array().ptr());
356 deopt_info_array = graph_compiler->CreateDeoptInfo(assembler);
363 code.set_is_optimized(optimized());
369 function.SetUsageCounter(INT32_MIN);
372 graph_compiler->FinalizePcDescriptors(code);
373 code.set_deopt_info_array(deopt_info_array);
375 graph_compiler->FinalizeStackMaps(code);
376 graph_compiler->FinalizeVarDescriptors(code);
377 graph_compiler->FinalizeExceptionHandlers(code);
378 graph_compiler->FinalizeCatchEntryMovesMap(code);
379 graph_compiler->FinalizeStaticCallTargetsTable(code);
380 graph_compiler->FinalizeCodeSourceMap(code);
383 ASSERT(optimized() && thread()->IsDartMutatorThread());
384 code.set_is_force_optimized(
true);
387 }
else if (optimized()) {
398 const bool trace_compiler =
399 FLAG_trace_compiler || FLAG_trace_optimizing_compiler;
400 bool code_is_valid =
true;
401 if (flow_graph->parsed_function().guarded_fields()->Length() != 0) {
403 flow_graph->parsed_function().guarded_fields();
405 FieldSet::Iterator it = guarded_fields->GetIterator();
406 while (
const Field** field = it.Next()) {
407 ASSERT(!(*field)->IsOriginal());
408 original = (*field)->Original();
409 if (!(*field)->IsConsistentWith(original)) {
410 code_is_valid =
false;
411 if (trace_compiler) {
412 THR_Print(
"--> FAIL: Field %s guarded state changed.",
413 (*field)->ToCString());
420 if (!thread()->compiler_state().cha().IsConsistentWithCurrentHierarchy()) {
421 code_is_valid =
false;
422 if (trace_compiler) {
423 THR_Print(
"--> FAIL: Class hierarchy has new subclasses.");
430 function.InstallOptimizedCode(code);
446 thread()->isolate_group()->optimization_counter_threshold() - 100);
450 if (!
code.IsNull()) {
457 flow_graph->parsed_function().guarded_fields();
459 FieldSet::Iterator it = guarded_fields->GetIterator();
460 while (
const Field** guarded_field = it.Next()) {
461 field = (*guarded_field)->Original();
462 field.RegisterDependentCode(code);
467 graph_compiler->deopt_id_to_ic_data(),
469 flow_graph->coverage_array());
470 function.set_unoptimized_code(code);
480 if (
function.IsFfiCallbackTrampoline()) {
489 ASSERT(!FLAG_precompiled_mode);
491 if (optimized() && !
function.IsOptimizable()) {
494 Zone*
const zone = thread()->
zone();
503 volatile bool done =
false;
505 volatile intptr_t far_branch_level = 0;
515 if (setjmp(*jump.
Set()) == 0) {
531 function.RestoreICDataMap(ic_data_array, clone_ic_data);
538 if (FLAG_print_ic_data_map) {
539 for (intptr_t i = 0; i < ic_data_array->
length(); i++) {
540 if ((*ic_data_array)[i] !=
nullptr) {
549 zone, parsed_function(), ic_data_array, osr_id(), optimized());
552 const bool print_flow_graph =
553 (FLAG_print_flow_graph ||
554 (optimized() && FLAG_print_flow_graph_optimized)) &&
557 if (print_flow_graph && !optimized()) {
563 "BlockScheduler::AssignEdgeWeights");
583 &assembler, flow_graph, *parsed_function(), optimized(),
593 auto install_code_fun = [&]() {
595 FinalizeCompilation(&assembler, &graph_compiler, flow_graph);
618 thread()->isolate_group()->program_lock());
633 install_code_fun,
true);
641 }
else if (FLAG_disassemble_optimized && optimized() &&
652 if (
error.ptr() == Object::branch_offset_error().ptr()) {
658 }
else if (
error.ptr() == Object::speculative_inlining_error().ptr()) {
664 if (FLAG_trace_bailout) {
685 volatile bool optimized,
690 ASSERT(!FLAG_precompiled_mode);
692 if (
function.ForceOptimize()) optimized =
true;
694 if (setjmp(*jump.
Set()) == 0) {
697 const bool trace_compiler =
698 FLAG_trace_compiler || (FLAG_trace_optimizing_compiler && optimized);
699 Timer per_compile_timer;
700 per_compile_timer.
Start();
704 if (trace_compiler) {
705 const intptr_t token_size =
function.SourceSize();
706 THR_Print(
"Compiling %s%sfunction %s: '%s' @ token %s, size %" Pd "\n",
708 (optimized ?
"optimized " :
""),
711 function.token_pos().ToCString(), token_size);
729 if (FLAG_trace_compiler) {
730 THR_Print(
"Aborted background compilation: %s\n",
731 function.ToFullyQualifiedCString());
736 if (
error.ptr() == Object::background_compilation_error().ptr()) {
737 if (FLAG_trace_compiler) {
739 "--> discarding background compilation for '%s' (will "
740 "try to re-compile again later)\n",
741 function.ToFullyQualifiedCString());
748 }
else if (
error.IsLanguageError() &&
750 if (FLAG_trace_compiler) {
751 THR_Print(
"--> disabling optimizations for '%s'\n",
752 function.ToFullyQualifiedCString());
764 if (
error.IsLanguageError() &&
769 if (trace_compiler) {
770 THR_Print(
"--> disabling optimizations for '%s'\n",
771 function.ToFullyQualifiedCString());
772 }
else if (FLAG_trace_failed_optimization_attempts) {
774 function.ToFullyQualifiedCString());
786 (
error.IsLanguageError() &&
793 per_compile_timer.
Stop();
795 if (trace_compiler) {
798 function.ToFullyQualifiedCString(), code.PayloadStart(),
809 if (
error.ptr() == Object::background_compilation_error().ptr()) {
811 if (FLAG_trace_bailout) {
812 THR_Print(
"Aborted background compilation: %s\n",
813 function.ToFullyQualifiedCString());
826#if defined(DART_PRECOMPILER) && !defined(TARGET_ARCH_IA32)
830#if defined(DART_PRECOMPILED_RUNTIME)
831 FATAL(
"Precompilation missed function %s (%s, %s)\n",
832 function.ToLibNamePrefixedQualifiedCString(),
837 VMTagScope tagScope(thread, VMTag::kCompileUnoptimizedTagId);
838#if defined(SUPPORT_TIMELINE)
839 const char* event_name;
841 event_name =
"CompileFunctionUnoptimizedBackground";
843 event_name =
"CompileFunction";
851 const bool optimized =
function.ForceOptimize();
863 original_code =
function.CurrentCode();
871 return Error::Cast(
result).ptr();
881 if (FLAG_trace_compiler) {
890 VMTagScope tag_scope(thread, VMTag::kCompileOptimizedTagId);
892#if defined(SUPPORT_TIMELINE)
893 const char* event_name;
895 event_name =
"CompileFunctionOptimizedOSR";
897 event_name =
"CompileFunctionOptimizedBackground";
899 event_name =
"CompileFunctionOptimized";
911 ASSERT(!code.is_optimized());
912 ASSERT(!FLAG_precompiled_mode);
923 if (setjmp(*jump.
Set()) == 0) {
932 parsed_function, ic_data_array, context_level_array,
934 builder.BuildGraph();
941 ASSERT(!var_descs.IsNull());
942 code.set_var_descriptors(var_descs);
958 for (
int i = 0; i < functions.
Length(); i++) {
959 func ^= functions.
At(i);
961 if (!func.
HasCode() && !func.is_abstract()) {
964 return Error::Cast(
result).ptr();
973 if (FLAG_trace_compiler) {
974 THR_Print(
"ABORT background compilation: %s\n", msg);
978 ASSERT(stream !=
nullptr);
979 TimelineEvent*
event = stream->StartEvent();
980 if (
event !=
nullptr) {
981 event->Instant(
"AbortBackgroundCompilation");
982 event->SetNumArguments(1);
983 event->CopyArgument(0,
"reason", msg);
989 deopt_id, Object::background_compilation_error());
996 : next_(nullptr), function_(
function.ptr()) {}
1010 return reinterpret_cast<ObjectPtr*
>(&function_);
1015 FunctionPtr function_;
1028 ASSERT(visitor !=
nullptr);
1030 while (p !=
nullptr) {
1036 bool IsEmpty()
const {
return first_ ==
nullptr; }
1041 if (first_ ==
nullptr) {
1043 ASSERT(last_ ==
nullptr);
1045 ASSERT(last_ !=
nullptr);
1049 ASSERT(first_ !=
nullptr && last_ !=
nullptr);
1059 return e->Function();
1064 ASSERT(first_ !=
nullptr);
1066 first_ = first_->
next();
1067 if (first_ ==
nullptr) {
1075 while (p !=
nullptr) {
1076 if (p->function() == obj.
ptr()) {
1089 ASSERT((first_ ==
nullptr) && (last_ ==
nullptr));
1102 : background_compiler_(background_compiler) {}
1106 virtual void Run() { background_compiler_->
Run(); }
1114 : isolate_group_(isolate_group),
1119 disabled_depth_(0) {}
1123 delete function_queue_;
1144 if (element !=
nullptr) {
1152 FLAG_stress_test_background_compilation) {
1186 if (disabled_depth_ > 0)
return false;
1187 if (!running_ && done_) {
1215void BackgroundCompiler::Stop() {
1221 StopLocked(thread, &ml);
1224void BackgroundCompiler::StopLocked(Thread* thread,
1225 SafepointMonitorLocker* locker) {
1227 function_queue_->
Clear();
1233void BackgroundCompiler::Enable() {
1235 ASSERT(!thread->BypassSafepoints());
1236 ASSERT(thread->CanAcquireSafepointLocks());
1238 SafepointMonitorLocker ml(&monitor_);
1240 if (disabled_depth_ < 0) {
1241 FATAL(
"Mismatched number of calls to BackgroundCompiler::Enable/Disable.");
1245void BackgroundCompiler::Disable() {
1247 ASSERT(!thread->BypassSafepoints());
1248 ASSERT(thread->CanAcquireSafepointLocks());
1250 SafepointMonitorLocker ml(&monitor_);
1253 StopLocked(thread, &ml);
1265 const Function&
function = Function::CheckedHandle(zone, arguments.ArgAt(0));
1266 FATAL(
"Precompilation missed function %s (%s, %s)\n",
1267 function.ToLibNamePrefixedQualifiedCString(),
1282 FATAL(
"Attempt to compile function %s",
function.ToCString());
1288 FATAL(
"Attempt to compile function %s",
function.ToCString());
1295 FATAL(
"Attempt to compile function %s",
function.ToCString());
1304 FATAL(
"Attempt to compile class %s", cls.ToCString());
1321void BackgroundCompiler::Stop() {
1325void BackgroundCompiler::Enable() {
1329void BackgroundCompiler::Disable() {
static void done(const char *config, const char *src, const char *srcOptions, const char *name)
#define DEBUG_ASSERT(cond)
#define RELEASE_ASSERT(cond)
ObjectPtr At(intptr_t index) const
void Add(QueueElement *value)
BackgroundCompilationQueue()
virtual ~BackgroundCompilationQueue()
void VisitObjectPointers(ObjectPointerVisitor *visitor)
QueueElement * Peek() const
bool ContainsObj(const Object &obj) const
FunctionPtr PeekFunction() const
BackgroundCompilerTask(BackgroundCompiler *background_compiler)
virtual ~BackgroundCompilerTask()
void VisitPointers(ObjectPointerVisitor *visitor)
BackgroundCompiler(IsolateGroup *isolate_group)
bool EnqueueCompilation(const Function &function)
BackgroundCompilationQueue * function_queue() const
virtual ~BackgroundCompiler()
static void AssignEdgeWeights(FlowGraph *flow_graph)
void RegisterDependencies(const Code &code) const
bool is_finalized() const
ArrayPtr current_functions() const
static void NotifyCodeObservers(const Code &code, bool optimized)
static CodePtr FinalizeCode(FlowGraphCompiler *compiler, compiler::Assembler *assembler, PoolAttachment pool_attachment, bool optimized, CodeStatistics *stats)
virtual FlowGraph * BuildFlowGraph(Zone *zone, ParsedFunction *parsed_function, ZoneGrowableArray< const ICData * > *ic_data_array, intptr_t osr_id, bool optimized)=0
virtual void ParseFunction(ParsedFunction *parsed_function)=0
static CompilationPipeline * New(Zone *zone, const Function &function)
CompileParsedFunctionHelper(ParsedFunction *parsed_function, bool optimized, intptr_t osr_id)
CodePtr Compile(CompilationPipeline *pipeline)
static DART_WARN_UNUSED_RESULT FlowGraph * RunPipeline(PipelineMode mode, CompilerPassState *state)
static void GenerateCode(CompilerPassState *state)
void set_function(const Function &function)
static bool ShouldTrace()
static CompilerState & Current()
static bool IsBackgroundCompilation()
static bool CanOptimizeFunction(Thread *thread, const Function &function)
static constexpr intptr_t kNoOSRDeoptId
static ErrorPtr EnsureUnoptimizedCode(Thread *thread, const Function &function)
static ObjectPtr CompileFunction(Thread *thread, const Function &function)
static ObjectPtr CompileOptimizedFunction(Thread *thread, const Function &function, intptr_t osr_id=kNoOSRDeoptId)
static ErrorPtr CompileAllFunctions(const Class &cls)
static void ComputeLocalVarDescriptors(const Code &code)
static void AbortBackgroundCompilation(intptr_t deopt_id, const char *msg)
void ParseFunction(ParsedFunction *parsed_function) override
FlowGraph * BuildFlowGraph(Zone *zone, ParsedFunction *parsed_function, ZoneGrowableArray< const ICData * > *ic_data_array, intptr_t osr_id, bool optimized) override
static ThreadPool * thread_pool()
static void DisassembleCode(const Function &function, const Code &code, bool optimized)
static void PrintGraph(const char *phase, FlowGraph *flow_graph)
static bool ShouldPrint(const Function &function, uint8_t **compiler_pass_filter=nullptr)
static void PrintICData(const ICData &ic_data, intptr_t num_checks_to_print=kPrintAll)
static constexpr CompilationMode CompilationModeFrom(bool is_optimizing)
bool should_reorder_blocks() const
static const char * KindToCString(UntaggedFunction::Kind kind)
CodePtr unoptimized_code() const
bool IsDebugging(Thread *thread, const Function &function)
void NotifyCompilation(const Function &func)
void ParseFunction(ParsedFunction *parsed_function) override
FlowGraph * BuildFlowGraph(Zone *zone, ParsedFunction *parsed_function, ZoneGrowableArray< const ICData * > *ic_data_array, intptr_t osr_id, bool optimized) override
GroupDebugger * debugger() const
intptr_t optimization_counter_threshold() const
static IsolateGroup * Current()
void RunWithStoppedMutators(T single_current_mutator, S otherwise, bool use_force_growth_in_otherwise=false)
LocalVarDescriptorsPtr GetVarDescriptors(const Function &func, ZoneGrowableArray< intptr_t > *context_level_array)
DART_NORETURN void Jump(int value, const Error &error)
void VisitPointer(ObjectPtr *p)
static Object & ZoneHandle()
void SetRegExpCompileData(RegExpCompileData *regexp_compile_data)
const Function & function() const
LocalScope * scope() const
void AllocateIrregexpVariables(intptr_t num_stack_locals)
RegExpCompileData * regexp_compile_data() const
void set_next(QueueElement *elem)
ObjectPtr function() const
ObjectPtr * function_untag()
QueueElement(const Function &function)
QueueElement * next() const
FunctionPtr Function() const
static CompilationResult CompileIR(RegExpCompileData *input, const ParsedFunction *parsed_function, const ZoneGrowableArray< const ICData * > &ic_data_array, intptr_t osr_id)
static void ParseRegExp(const String &input, RegExpFlags regexp_flags, RegExpCompileData *result)
void set_is_simple() const
StringPtr pattern() const
void set_is_complex() const
void set_num_bracket_expressions(SmiPtr value) const
void set_capture_name_map(const Array &array) const
RegExpFlags flags() const
static DART_NORETURN void LongJump(const Error &error)
static StringPtr New(const char *cstr, Heap::Space space=Heap::kNew)
LongJumpScope * long_jump_base() const
bool CanAcquireSafepointLocks() const
static Thread * Current()
DART_WARN_UNUSED_RESULT ErrorPtr StealStickyError()
CompilerState & compiler_state()
void set_sticky_error(const Error &value)
static void ExitIsolateGroupAsHelper(bool bypass_safepoint)
bool IsDartMutatorThread() const
Isolate * isolate() const
bool BypassSafepoints() const
IsolateGroup * isolate_group() const
static bool EnterIsolateGroupAsHelper(IsolateGroup *isolate_group, TaskKind kind, bool bypass_safepoint)
int64_t TotalElapsedTime() const
#define THR_Print(format,...)
const uint8_t uint32_t uint32_t GError ** error
#define DECLARE_FLAG(type, name)
#define DEFINE_FLAG(type, name, default_value, comment)
#define DEFINE_FLAG_HANDLER(handler, name, comment)
Dart_NativeFunction function
#define HANDLESCOPE(thread)
void SetFfiCallbackCode(Thread *thread, const Function &ffi_trampoline, const Code &code)
static void PrecompilationModeHandler(bool value)
DirectChainedHashMap< FieldKeyValueTrait > FieldSet
const char *const function_name
static ObjectPtr CompileFunctionHelper(CompilationPipeline *pipeline, const Function &function, volatile bool optimized, intptr_t osr_id)
#define DEFINE_RUNTIME_ENTRY(name, argument_count)
CallSpecializer * call_specializer
GrowableArray< TokenPosition > inline_id_to_token_pos
GrowableArray< intptr_t > caller_inline_id
GrowableArray< const Function * > inline_id_to_function
FlowGraphCompiler * graph_compiler
#define TIMELINE_FUNCTION_COMPILATION_DURATION(thread, name, function)
#define TIMELINE_DURATION(thread, stream, name)