Flutter Engine
The Flutter Engine
Loading...
Searching...
No Matches
compiler.cc
Go to the documentation of this file.
1// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
6
7#if !defined(DART_PRECOMPILED_RUNTIME)
8#include "vm/code_patcher.h"
22#include "vm/compiler/cha.h"
29#include "vm/dart_entry.h"
30#include "vm/debugger.h"
32#include "vm/exceptions.h"
33#include "vm/flags.h"
34#include "vm/kernel.h"
35#include "vm/longjump.h"
36#include "vm/object.h"
37#include "vm/object_store.h"
38#include "vm/os.h"
39#include "vm/parser.h"
40#include "vm/regexp_assembler.h"
41#include "vm/regexp_parser.h"
42#include "vm/runtime_entry.h"
43#include "vm/symbols.h"
44#include "vm/tags.h"
45#include "vm/timeline.h"
46#include "vm/timer.h"
47#endif
48
49namespace dart {
50
52 int,
53 max_deoptimization_counter_threshold,
54 16,
55 "How many times we allow deoptimization before we disallow optimization.");
57 optimization_filter,
58 nullptr,
59 "Optimize only named function");
60DEFINE_FLAG(bool, print_flow_graph, false, "Print the IR flow graph.");
62 print_flow_graph_optimized,
63 false,
64 "Print the IR flow graph when optimizing.");
66 print_ic_data_map,
67 false,
68 "Print the deopt-id to ICData map in optimizing compiler.");
69DEFINE_FLAG(bool, print_code_source_map, false, "Print code source map.");
71 stress_test_background_compilation,
72 false,
73 "Keep background compiler running all the time");
75 stop_on_excessive_deoptimization,
76 false,
77 "Debugging: stops program if deoptimizing same function too often");
78DEFINE_FLAG(bool, trace_compiler, false, "Trace compiler operations.");
80 trace_failed_optimization_attempts,
81 false,
82 "Traces all failed optimization attempts");
84 trace_optimizing_compiler,
85 false,
86 "Trace only optimizing compiler operations.");
87DEFINE_FLAG(bool, trace_bailout, false, "Print bailout from ssa compiler.");
88
89DECLARE_FLAG(bool, trace_failed_optimization_attempts);
90
91static void PrecompilationModeHandler(bool value) {
92 if (value) {
93#if defined(TARGET_ARCH_IA32)
94 FATAL("Precompilation not supported on IA32");
95#endif
96
97 FLAG_background_compilation = false;
98 FLAG_enable_mirrors = false;
99 FLAG_interpret_irregexp = true;
100 FLAG_lazy_dispatchers = false;
101 FLAG_link_natives_lazily = true;
102 FLAG_optimization_counter_threshold = -1;
103 FLAG_polymorphic_with_deopt = false;
104 FLAG_precompiled_mode = true;
105 FLAG_reorder_basic_blocks = true;
106 FLAG_use_field_guards = false;
107 FLAG_use_cha_deopt = false;
108
109#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
110 // Set flags affecting runtime accordingly for gen_snapshot.
111 // These flags are constants with PRODUCT and DART_PRECOMPILED_RUNTIME.
112 FLAG_deoptimize_alot = false; // Used in some tests.
113 FLAG_deoptimize_every = 0; // Used in some tests.
114 FLAG_use_osr = false;
115#endif
116 }
117}
118
120 precompilation,
121 "Precompilation mode");
122
123#ifndef DART_PRECOMPILED_RUNTIME
124
126 // Nothing to do here.
127}
128
130 Zone* zone,
131 ParsedFunction* parsed_function,
133 intptr_t osr_id,
134 bool optimized) {
135 kernel::FlowGraphBuilder builder(parsed_function, ic_data_array,
136 /* not building var desc */ nullptr,
137 /* not inlining */ nullptr, optimized,
138 osr_id);
139 FlowGraph* graph = builder.BuildGraph();
140 ASSERT(graph != nullptr);
141 return graph;
142}
143
145 ParsedFunction* parsed_function) {
146 VMTagScope tagScope(parsed_function->thread(),
147 VMTag::kCompileParseRegExpTagId);
148 Zone* zone = parsed_function->zone();
149 RegExp& regexp = RegExp::Handle(parsed_function->function().regexp());
150
151 const String& pattern = String::Handle(regexp.pattern());
152
153 RegExpCompileData* compile_data = new (zone) RegExpCompileData();
154 // Parsing failures are handled in the RegExp factory constructor.
155 RegExpParser::ParseRegExp(pattern, regexp.flags(), compile_data);
156
157 regexp.set_num_bracket_expressions(compile_data->capture_count);
158 regexp.set_capture_name_map(compile_data->capture_name_map);
159 if (compile_data->simple) {
160 regexp.set_is_simple();
161 } else {
162 regexp.set_is_complex();
163 }
164
165 parsed_function->SetRegExpCompileData(compile_data);
166
167 // Variables are allocated after compilation.
168}
169
171 Zone* zone,
172 ParsedFunction* parsed_function,
174 intptr_t osr_id,
175 bool optimized) {
176 // Compile to the dart IR.
179 parsed_function, *ic_data_array, osr_id);
180 if (result.error_message != nullptr) {
182 LanguageError::New(String::Handle(String::New(result.error_message)))));
183 }
184 backtrack_goto_ = result.backtrack_goto;
185
186 // Allocate variables now that we know the number of locals.
187 parsed_function->AllocateIrregexpVariables(result.num_stack_locals);
188
189 // When compiling for OSR, use a depth first search to find the OSR
190 // entry and make graph entry jump to it instead of normal entry.
191 // Catch entries are always considered reachable, even if they
192 // become unreachable after OSR.
193 if (osr_id != Compiler::kNoOSRDeoptId) {
194 result.graph_entry->RelinkToOsrEntry(zone, result.num_blocks);
195 }
196 PrologueInfo prologue_info(-1, -1);
197 return new (zone)
198 FlowGraph(*parsed_function, result.graph_entry, result.num_blocks,
199 prologue_info, FlowGraph::CompilationModeFrom(optimized));
200}
201
203 const Function& function) {
204 if (function.IsIrregexpFunction()) {
205 return new (zone) IrregexpCompilationPipeline();
206 } else {
207 return new (zone) DartCompilationPipeline();
208 }
209}
210
211// Compile a function. Should call only if the function has not been compiled.
212// Arg0: function object.
213DEFINE_RUNTIME_ENTRY(CompileFunction, 1) {
214 ASSERT(thread->IsDartMutatorThread());
215 const Function& function = Function::CheckedHandle(zone, arguments.ArgAt(0));
216
217 {
218 // Another isolate's mutator thread may have created [function] and
219 // published it via an ICData, MegamorphicCache etc. Entering the lock below
220 // is an acquire operation that pairs with the release operation when the
221 // other isolate exited the lock, ensuring the initializing stores for
222 // [function] are visible in the current thread.
223 SafepointReadRwLocker ml(thread, thread->isolate_group()->program_lock());
224 }
225
226 // Will throw if compilation failed (e.g. with compile-time error).
227 function.EnsureHasCode();
228}
229
231#if !defined(PRODUCT)
232 if (thread->isolate_group()->debugger()->IsDebugging(thread, function)) {
233 // We cannot set breakpoints and single step in optimized code,
234 // so do not optimize the function. Bump usage counter down to avoid
235 // repeatedly entering the runtime for an optimization attempt.
236 function.SetUsageCounter(0);
237
238 // If the optimization counter = 1, the unoptimized code will come back here
239 // immediately, causing an infinite compilation loop. The compiler raises
240 // the threshold for functions with breakpoints, so we drop the unoptimized
241 // to force it to be recompiled.
242 if (thread->isolate_group()->optimization_counter_threshold() < 2) {
243 function.ClearCode();
244 }
245 return false;
246 }
247#endif
248 if (function.deoptimization_counter() >=
249 FLAG_max_deoptimization_counter_threshold) {
250 if (FLAG_trace_failed_optimization_attempts ||
251 FLAG_stop_on_excessive_deoptimization) {
252 THR_Print("Too many deoptimizations: %s\n",
253 function.ToFullyQualifiedCString());
254 if (FLAG_stop_on_excessive_deoptimization) {
255 FATAL("Stop on excessive deoptimization");
256 }
257 }
258 // The function will not be optimized any longer. This situation can occur
259 // mostly with small optimization counter thresholds.
260 function.SetIsOptimizable(false);
261 function.SetUsageCounter(INT32_MIN);
262 return false;
263 }
264 if (FLAG_optimization_filter != nullptr) {
265 // FLAG_optimization_filter is a comma-separated list of strings that are
266 // matched against the fully-qualified function name.
267 char* save_ptr; // Needed for strtok_r.
268 const char* function_name = function.ToFullyQualifiedCString();
269 intptr_t len = strlen(FLAG_optimization_filter) + 1; // Length with \0.
270 char* filter = new char[len];
271 strncpy(filter, FLAG_optimization_filter, len); // strtok modifies arg 1.
272 char* token = strtok_r(filter, ",", &save_ptr);
273 bool found = false;
274 while (token != nullptr) {
275 if (strstr(function_name, token) != nullptr) {
276 found = true;
277 break;
278 }
279 token = strtok_r(nullptr, ",", &save_ptr);
280 }
281 delete[] filter;
282 if (!found) {
283 function.SetUsageCounter(INT32_MIN);
284 return false;
285 }
286 }
287 if (!function.IsOptimizable()) {
288 // Huge methods (code size above --huge_method_cutoff_in_code_size) become
289 // non-optimizable only after the code has been generated.
290 if (FLAG_trace_failed_optimization_attempts) {
291 THR_Print("Not optimizable: %s\n", function.ToFullyQualifiedCString());
292 }
293 function.SetUsageCounter(INT32_MIN);
294 return false;
295 }
296 return true;
297}
298
300 // For now: compilation in non mutator thread is the background compilation.
302}
303
305 public:
307 bool optimized,
308 intptr_t osr_id)
309 : parsed_function_(parsed_function),
310 optimized_(optimized),
311 osr_id_(osr_id),
312 thread_(Thread::Current()) {}
313
314 CodePtr Compile(CompilationPipeline* pipeline);
315
316 private:
317 ParsedFunction* parsed_function() const { return parsed_function_; }
318 bool optimized() const { return optimized_; }
319 intptr_t osr_id() const { return osr_id_; }
320 Thread* thread() const { return thread_; }
321 IsolateGroup* isolate_group() const { return thread_->isolate_group(); }
322 CodePtr FinalizeCompilation(compiler::Assembler* assembler,
323 FlowGraphCompiler* graph_compiler,
324 FlowGraph* flow_graph);
325
326 ParsedFunction* parsed_function_;
327 const bool optimized_;
328 const intptr_t osr_id_;
329 Thread* const thread_;
330
331 DISALLOW_COPY_AND_ASSIGN(CompileParsedFunctionHelper);
332};
333
334CodePtr CompileParsedFunctionHelper::FinalizeCompilation(
335 compiler::Assembler* assembler,
336 FlowGraphCompiler* graph_compiler,
337 FlowGraph* flow_graph) {
338 ASSERT(!CompilerState::Current().is_aot());
339 const Function& function = parsed_function()->function();
340
341 // If another thread compiled and installed unoptmized code already,
342 // skip installation.
343 if (!optimized() && function.unoptimized_code() != Code::null()) {
344 return function.unoptimized_code();
345 }
346 // If another thread compiled and installed optimized code for the
347 // force-optimized function, skip installation.
348 if (optimized() && function.ForceOptimize() && function.HasOptimizedCode()) {
349 return function.CurrentCode();
350 }
351 Zone* const zone = thread()->zone();
352
353 // CreateDeoptInfo uses the object pool and needs to be done before
354 // FinalizeCode.
355 Array& deopt_info_array = Array::Handle(zone, Object::empty_array().ptr());
356 deopt_info_array = graph_compiler->CreateDeoptInfo(assembler);
357
358 // Allocates instruction object. Since this occurs only at safepoint,
359 // there can be no concurrent access to the instruction page.
361 graph_compiler, assembler, Code::PoolAttachment::kAttachPool, optimized(),
362 /*stats=*/nullptr));
363 code.set_is_optimized(optimized());
364 code.set_owner(function);
365
366 if (!function.IsOptimizable()) {
367 // A function with huge unoptimized code can become non-optimizable
368 // after generating unoptimized code.
369 function.SetUsageCounter(INT32_MIN);
370 }
371
372 graph_compiler->FinalizePcDescriptors(code);
373 code.set_deopt_info_array(deopt_info_array);
374
375 graph_compiler->FinalizeStackMaps(code);
376 graph_compiler->FinalizeVarDescriptors(code);
377 graph_compiler->FinalizeExceptionHandlers(code);
378 graph_compiler->FinalizeCatchEntryMovesMap(code);
379 graph_compiler->FinalizeStaticCallTargetsTable(code);
380 graph_compiler->FinalizeCodeSourceMap(code);
381
382 if (function.ForceOptimize()) {
383 ASSERT(optimized() && thread()->IsDartMutatorThread());
384 code.set_is_force_optimized(true);
385 function.AttachCode(code);
386 function.SetWasCompiled(true);
387 } else if (optimized()) {
388 // We cannot execute generated code while installing code.
389 ASSERT(Thread::Current()->OwnsGCSafepoint() ||
390 (Thread::Current()->IsDartMutatorThread() &&
391 IsolateGroup::Current()->ContainsOnlyOneIsolate()));
392 // We are validating our CHA / field guard / ... assumptions. To prevent
393 // another thread from concurrently changing them, we have to guarantee
394 // mutual exclusion.
396 IsolateGroup::Current()->program_lock()->IsCurrentThreadReader());
397
398 const bool trace_compiler =
399 FLAG_trace_compiler || FLAG_trace_optimizing_compiler;
400 bool code_is_valid = true;
401 if (flow_graph->parsed_function().guarded_fields()->Length() != 0) {
402 const FieldSet* guarded_fields =
403 flow_graph->parsed_function().guarded_fields();
404 Field& original = Field::Handle();
405 FieldSet::Iterator it = guarded_fields->GetIterator();
406 while (const Field** field = it.Next()) {
407 ASSERT(!(*field)->IsOriginal());
408 original = (*field)->Original();
409 if (!(*field)->IsConsistentWith(original)) {
410 code_is_valid = false;
411 if (trace_compiler) {
412 THR_Print("--> FAIL: Field %s guarded state changed.",
413 (*field)->ToCString());
414 }
415 break;
416 }
417 }
418 }
419
420 if (!thread()->compiler_state().cha().IsConsistentWithCurrentHierarchy()) {
421 code_is_valid = false;
422 if (trace_compiler) {
423 THR_Print("--> FAIL: Class hierarchy has new subclasses.");
424 }
425 }
426
427 // Setting breakpoints at runtime could make a function non-optimizable.
428 if (code_is_valid && Compiler::CanOptimizeFunction(thread(), function)) {
429 if (osr_id() == Compiler::kNoOSRDeoptId) {
430 function.InstallOptimizedCode(code);
431 } else {
432 // OSR is not compiled in background.
434 }
435 ASSERT(code.owner() == function.ptr());
436 } else {
437 code = Code::null();
438 }
439 if (function.usage_counter() < 0) {
440 // Reset to 0 so that it can be recompiled if needed.
441 if (code_is_valid) {
442 function.SetUsageCounter(0);
443 } else {
444 // Trigger another optimization pass soon.
445 function.SetUsageCounter(
446 thread()->isolate_group()->optimization_counter_threshold() - 100);
447 }
448 }
449
450 if (!code.IsNull()) {
451 // The generated code was compiled under certain assumptions about
452 // class hierarchy and field types. Register these dependencies
453 // to ensure that the code will be deoptimized if they are violated.
454 thread()->compiler_state().cha().RegisterDependencies(code);
455
456 const FieldSet* guarded_fields =
457 flow_graph->parsed_function().guarded_fields();
458 Field& field = Field::Handle();
459 FieldSet::Iterator it = guarded_fields->GetIterator();
460 while (const Field** guarded_field = it.Next()) {
461 field = (*guarded_field)->Original();
462 field.RegisterDependentCode(code);
463 }
464 }
465 } else { // not optimized.
466 function.SaveICDataMap(
467 graph_compiler->deopt_id_to_ic_data(),
468 Array::Handle(zone, graph_compiler->edge_counters_array()),
469 flow_graph->coverage_array());
470 function.set_unoptimized_code(code);
471 function.AttachCode(code);
472 function.SetWasCompiled(true);
473 if (function.IsOptimizable() && (function.usage_counter() < 0)) {
474 // While doing compilation in background, usage counter is set
475 // to INT32_MIN. Reset counter so that function can be optimized further.
476 function.SetUsageCounter(0);
477 }
478 }
479
480 if (function.IsFfiCallbackTrampoline()) {
482 }
483
484 return code.ptr();
485}
486
487// Return null if bailed out.
489 ASSERT(!FLAG_precompiled_mode);
490 const Function& function = parsed_function()->function();
491 if (optimized() && !function.IsOptimizable()) {
492 return Code::null();
493 }
494 Zone* const zone = thread()->zone();
495 HANDLESCOPE(thread());
496 EnterCompilerScope cs(thread());
497
498 // We may reattempt compilation if the function needs to be assembled using
499 // far branches on ARM. In the else branch of the setjmp call, done is set to
500 // false, and use_far_branches is set to true if there is a longjmp from the
501 // ARM assembler. In all other paths through this while loop, done is set to
502 // true. use_far_branches is always false on ia32 and x64.
503 volatile bool done = false;
504 // volatile because the variable may be clobbered by a longjmp.
505 volatile intptr_t far_branch_level = 0;
506
507 // In the JIT case we allow speculative inlining and have no need for a
508 // suppression, since we don't restart optimization.
509 SpeculativeInliningPolicy speculative_policy(/*enable_suppression=*/false);
510
511 Code* volatile result = &Code::ZoneHandle(zone);
512 while (!done) {
513 *result = Code::null();
514 LongJumpScope jump;
515 if (setjmp(*jump.Set()) == 0) {
516 FlowGraph* flow_graph = nullptr;
517 ZoneGrowableArray<const ICData*>* ic_data_array = nullptr;
518
519 CompilerState compiler_state(thread(), /*is_aot=*/false, optimized(),
521 compiler_state.set_function(function);
522
523 {
524 // Extract type feedback before the graph is built, as the graph
525 // builder uses it to attach it to nodes.
526 ic_data_array = new (zone) ZoneGrowableArray<const ICData*>();
527
528 // Clone ICData for background compilation so that it does not
529 // change while compiling.
530 const bool clone_ic_data = Compiler::IsBackgroundCompilation();
531 function.RestoreICDataMap(ic_data_array, clone_ic_data);
532
533 if (optimized()) {
534 ASSERT(function.ic_data_array() != Array::null() ||
535 function.ForceOptimize());
536 }
537
538 if (FLAG_print_ic_data_map) {
539 for (intptr_t i = 0; i < ic_data_array->length(); i++) {
540 if ((*ic_data_array)[i] != nullptr) {
541 THR_Print("%" Pd " ", i);
542 FlowGraphPrinter::PrintICData(*(*ic_data_array)[i]);
543 }
544 }
545 }
546
547 TIMELINE_DURATION(thread(), CompilerVerbose, "BuildFlowGraph");
548 flow_graph = pipeline->BuildFlowGraph(
549 zone, parsed_function(), ic_data_array, osr_id(), optimized());
550 }
551
552 const bool print_flow_graph =
553 (FLAG_print_flow_graph ||
554 (optimized() && FLAG_print_flow_graph_optimized)) &&
556
557 if (print_flow_graph && !optimized()) {
558 FlowGraphPrinter::PrintGraph("Unoptimized Compilation", flow_graph);
559 }
560
561 if (flow_graph->should_reorder_blocks()) {
562 TIMELINE_DURATION(thread(), CompilerVerbose,
563 "BlockScheduler::AssignEdgeWeights");
565 }
566
567 CompilerPassState pass_state(thread(), flow_graph, &speculative_policy);
568
569 if (optimized()) {
570 TIMELINE_DURATION(thread(), CompilerVerbose, "OptimizationPasses");
571
572 JitCallSpecializer call_specializer(flow_graph, &speculative_policy);
573 pass_state.call_specializer = &call_specializer;
574
575 flow_graph = CompilerPass::RunPipeline(CompilerPass::kJIT, &pass_state);
576 }
577
578 ASSERT(pass_state.inline_id_to_function.length() ==
579 pass_state.caller_inline_id.length());
580 compiler::ObjectPoolBuilder object_pool_builder;
581 compiler::Assembler assembler(&object_pool_builder, far_branch_level);
582 FlowGraphCompiler graph_compiler(
583 &assembler, flow_graph, *parsed_function(), optimized(),
584 &speculative_policy, pass_state.inline_id_to_function,
585 pass_state.inline_id_to_token_pos, pass_state.caller_inline_id,
586 ic_data_array);
587 pass_state.graph_compiler = &graph_compiler;
588 CompilerPass::GenerateCode(&pass_state);
589
590 {
591 TIMELINE_DURATION(thread(), CompilerVerbose, "FinalizeCompilation");
592
593 auto install_code_fun = [&]() {
594 *result =
595 FinalizeCompilation(&assembler, &graph_compiler, flow_graph);
596#if !defined(PRODUCT)
597 // Isolate debuggers need to be notified of compiled function right
598 // away as code is installed because there might be latent breakpoints
599 // in compiled function, which have to be activated before functions
600 // code is executed. Otherwise concurrently running isolates might
601 // execute code before its patched and miss a need to pause at a
602 // breakpoint.
603 if (!result->IsNull()) {
604 if (!function.HasOptimizedCode()) {
606 function);
607 }
608 }
609#endif
610 };
611
612 // Grab write program_lock outside of potential safepoint, that lock
613 // can't be waited for inside the safepoint.
614 // Initially read lock was added to guard direct_subclasses field
615 // access.
616 // Read lock was upgraded to write lock to guard dependent code updates.
617 SafepointWriteRwLocker ml(thread(),
618 thread()->isolate_group()->program_lock());
619 // We have to ensure no mutators are running, because:
620 //
621 // a) We allocate an instructions object, which might cause us to
622 // temporarily flip page protections (RX -> RW -> RX).
623 //
624 // b) We have to ensure the code generated does not violate
625 // assumptions (e.g. CHA, field guards), the validation has to
626 // happen while mutator is stopped.
627 //
628 // b) We update the [Function] object with a new [Code] which
629 // requires updating several pointers: We have to ensure all of
630 // those writes are observed atomically.
631 //
633 install_code_fun, /*use_force_growth=*/true);
634 }
635 if (!result->IsNull()) {
636 // Must be called outside of safepoint.
638
639 if (FLAG_disassemble && FlowGraphPrinter::ShouldPrint(function)) {
641 } else if (FLAG_disassemble_optimized && optimized() &&
644 }
645 }
646 // Exit the loop and the function with the correct result value.
647 done = true;
648 } else {
649 // We bailed out or we encountered an error.
650 const Error& error = Error::Handle(thread()->StealStickyError());
651
652 if (error.ptr() == Object::branch_offset_error().ptr()) {
653 // Compilation failed due to an out of range branch offset in the
654 // assembler. We try again (done = false) with far branches enabled.
655 done = false;
656 RELEASE_ASSERT(far_branch_level < 2);
657 far_branch_level++;
658 } else if (error.ptr() == Object::speculative_inlining_error().ptr()) {
659 // Can only happen with precompilation.
660 UNREACHABLE();
661 } else {
662 // If the error isn't due to an out of range branch offset, we don't
663 // try again (done = true).
664 if (FLAG_trace_bailout) {
665 THR_Print("%s\n", error.ToErrorCString());
666 }
667 if (!Compiler::IsBackgroundCompilation() && error.IsLanguageError() &&
668 (LanguageError::Cast(error).kind() == Report::kBailout)) {
669 // If is is not a background compilation, discard the error if it was
670 // not a real error, but just a bailout. If we're it a background
671 // compilation this will be dealt with in the caller.
672 } else {
673 // Otherwise, continue propagating unless we will try again.
674 thread()->set_sticky_error(error);
675 }
676 done = true;
677 }
678 }
679 }
680 return result->ptr();
681}
682
684 const Function& function,
685 volatile bool optimized,
686 intptr_t osr_id) {
687 Thread* const thread = Thread::Current();
688 NoActiveIsolateScope no_active_isolate(thread);
689
690 ASSERT(!FLAG_precompiled_mode);
691 ASSERT(!optimized || function.WasCompiled() || function.ForceOptimize());
692 if (function.ForceOptimize()) optimized = true;
693 LongJumpScope jump;
694 if (setjmp(*jump.Set()) == 0) {
695 StackZone stack_zone(thread);
696 Zone* const zone = stack_zone.GetZone();
697 const bool trace_compiler =
698 FLAG_trace_compiler || (FLAG_trace_optimizing_compiler && optimized);
699 Timer per_compile_timer;
700 per_compile_timer.Start();
701
702 ParsedFunction* parsed_function = new (zone)
703 ParsedFunction(thread, Function::ZoneHandle(zone, function.ptr()));
704 if (trace_compiler) {
705 const intptr_t token_size = function.SourceSize();
706 THR_Print("Compiling %s%sfunction %s: '%s' @ token %s, size %" Pd "\n",
707 (osr_id == Compiler::kNoOSRDeoptId ? "" : "osr "),
708 (optimized ? "optimized " : ""),
709 (Compiler::IsBackgroundCompilation() ? "(background)" : ""),
710 function.ToFullyQualifiedCString(),
711 function.token_pos().ToCString(), token_size);
712 }
713 // Makes sure no classes are loaded during parsing in background.
714 {
715 HANDLESCOPE(thread);
716 pipeline->ParseFunction(parsed_function);
717 }
718
719 CompileParsedFunctionHelper helper(parsed_function, optimized, osr_id);
720
721 const Code& result = Code::Handle(helper.Compile(pipeline));
722
723 if (result.IsNull()) {
724 const Error& error = Error::Handle(thread->StealStickyError());
725
727 // Try again later, background compilation may abort because of
728 // state change during compilation.
729 if (FLAG_trace_compiler) {
730 THR_Print("Aborted background compilation: %s\n",
731 function.ToFullyQualifiedCString());
732 }
733
734 // We got an error during compilation.
735 // If it was a bailout, then disable optimization.
736 if (error.ptr() == Object::background_compilation_error().ptr()) {
737 if (FLAG_trace_compiler) {
738 THR_Print(
739 "--> discarding background compilation for '%s' (will "
740 "try to re-compile again later)\n",
741 function.ToFullyQualifiedCString());
742 }
743
744 // Trigger another optimization pass soon.
745 function.SetUsageCounter(
747 return Error::null();
748 } else if (error.IsLanguageError() &&
749 LanguageError::Cast(error).kind() == Report::kBailout) {
750 if (FLAG_trace_compiler) {
751 THR_Print("--> disabling optimizations for '%s'\n",
752 function.ToFullyQualifiedCString());
753 }
754 function.SetIsOptimizable(false);
755 return Error::null();
756 } else {
757 // The background compiler does not execute Dart code or handle
758 // isolate messages.
759 ASSERT(!error.IsUnwindError());
760 return error.ptr();
761 }
762 }
763 if (optimized) {
764 if (error.IsLanguageError() &&
765 LanguageError::Cast(error).kind() == Report::kBailout) {
766 // Functions which cannot deoptimize should never bail out.
767 ASSERT(!function.ForceOptimize());
768 // Optimizer bailed out. Disable optimizations and never try again.
769 if (trace_compiler) {
770 THR_Print("--> disabling optimizations for '%s'\n",
771 function.ToFullyQualifiedCString());
772 } else if (FLAG_trace_failed_optimization_attempts) {
773 THR_Print("Cannot optimize: %s\n",
774 function.ToFullyQualifiedCString());
775 }
776 function.SetIsOptimizable(false);
777 return Error::null();
778 }
779 return error.ptr();
780 } else {
781 ASSERT(!optimized);
782 // The non-optimizing compiler can get an unhandled exception
783 // due to OOM or Stack overflow errors, it should not however
784 // bail out.
785 ASSERT(error.IsUnhandledException() || error.IsUnwindError() ||
786 (error.IsLanguageError() &&
787 LanguageError::Cast(error).kind() != Report::kBailout));
788 return error.ptr();
789 }
790 UNREACHABLE();
791 }
792
793 per_compile_timer.Stop();
794
795 if (trace_compiler) {
796 const auto& code = Code::Handle(function.CurrentCode());
797 THR_Print("--> '%s' entry: %#" Px " size: %" Pd " time: %" Pd64 " us\n",
798 function.ToFullyQualifiedCString(), code.PayloadStart(),
799 code.Size(), per_compile_timer.TotalElapsedTime());
800 }
801
802 return result.ptr();
803 } else {
804 Thread* const thread = Thread::Current();
805 StackZone stack_zone(thread);
806 // We got an error during compilation or it is a bailout from background
807 // compilation (e.g., during parsing with EnsureIsFinalized).
808 const Error& error = Error::Handle(thread->StealStickyError());
809 if (error.ptr() == Object::background_compilation_error().ptr()) {
810 // Exit compilation, retry it later.
811 if (FLAG_trace_bailout) {
812 THR_Print("Aborted background compilation: %s\n",
813 function.ToFullyQualifiedCString());
814 }
815 return Object::null();
816 }
817 // Do not attempt to optimize functions that can cause errors.
818 function.set_is_optimizable(false);
819 return error.ptr();
820 }
821 UNREACHABLE();
822 return Object::null();
823}
824
826#if defined(DART_PRECOMPILER) && !defined(TARGET_ARCH_IA32)
827 RELEASE_ASSERT(!FLAG_precompiled_mode);
828#endif
829
830#if defined(DART_PRECOMPILED_RUNTIME)
831 FATAL("Precompilation missed function %s (%s, %s)\n",
832 function.ToLibNamePrefixedQualifiedCString(),
833 function.token_pos().ToCString(),
835#endif // defined(DART_PRECOMPILED_RUNTIME)
836
837 VMTagScope tagScope(thread, VMTag::kCompileUnoptimizedTagId);
838#if defined(SUPPORT_TIMELINE)
839 const char* event_name;
841 event_name = "CompileFunctionUnoptimizedBackground";
842 } else {
843 event_name = "CompileFunction";
844 }
846#endif // defined(SUPPORT_TIMELINE)
847
848 CompilationPipeline* pipeline =
850
851 const bool optimized = function.ForceOptimize();
852 return CompileFunctionHelper(pipeline, function, optimized, kNoOSRDeoptId);
853}
854
856 const Function& function) {
857 ASSERT(!function.ForceOptimize());
858 if (function.unoptimized_code() != Object::null()) {
859 return Error::null();
860 }
861 Code& original_code = Code::ZoneHandle(thread->zone());
862 if (function.HasCode()) {
863 original_code = function.CurrentCode();
864 }
865 CompilationPipeline* pipeline =
868 CompileFunctionHelper(pipeline, function, false, /* not optimized */
870 if (result.IsError()) {
871 return Error::Cast(result).ptr();
872 }
873 // Since CompileFunctionHelper replaces the current code, re-attach the
874 // the original code if the function was already compiled.
875 if (!original_code.IsNull() && result.ptr() == function.CurrentCode() &&
876 !original_code.IsDisabled()) {
877 function.AttachCode(original_code);
878 }
879 ASSERT(function.unoptimized_code() != Object::null());
880 ASSERT(function.unoptimized_code() == result.ptr());
881 if (FLAG_trace_compiler) {
882 THR_Print("Ensure unoptimized code for %s\n", function.ToCString());
883 }
884 return Error::null();
885}
886
888 const Function& function,
889 intptr_t osr_id) {
890 VMTagScope tag_scope(thread, VMTag::kCompileOptimizedTagId);
891
892#if defined(SUPPORT_TIMELINE)
893 const char* event_name;
894 if (osr_id != kNoOSRDeoptId) {
895 event_name = "CompileFunctionOptimizedOSR";
896 } else if (IsBackgroundCompilation()) {
897 event_name = "CompileFunctionOptimizedBackground";
898 } else {
899 event_name = "CompileFunctionOptimized";
900 }
902#endif // defined(SUPPORT_TIMELINE)
903
904 CompilationPipeline* pipeline =
906 return CompileFunctionHelper(pipeline, function, /* optimized = */ true,
907 osr_id);
908}
909
911 ASSERT(!code.is_optimized());
912 ASSERT(!FLAG_precompiled_mode);
913 const Function& function = Function::Handle(code.function());
914 ASSERT(code.var_descriptors() == Object::null());
915 // IsIrregexpFunction have eager var descriptors generation.
916 ASSERT(!function.IsIrregexpFunction());
917 // In background compilation, parser can produce 'errors": bailouts
918 // if state changed while compiling in background.
919 Thread* thread = Thread::Current();
920 Zone* zone = thread->zone();
921 CompilerState state(thread, /*is_aot=*/false, /*is_optimizing=*/false);
922 LongJumpScope jump;
923 if (setjmp(*jump.Set()) == 0) {
924 ParsedFunction* parsed_function =
925 new ParsedFunction(thread, Function::ZoneHandle(zone, function.ptr()));
926 ZoneGrowableArray<const ICData*>* ic_data_array =
928 ZoneGrowableArray<intptr_t>* context_level_array =
930
932 parsed_function, ic_data_array, context_level_array,
933 /* not inlining */ nullptr, false, Compiler::kNoOSRDeoptId);
934 builder.BuildGraph();
935
936 auto& var_descs = LocalVarDescriptors::Handle(zone);
937
938 var_descs = parsed_function->scope()->GetVarDescriptors(
939 function, context_level_array);
940
941 ASSERT(!var_descs.IsNull());
942 code.set_var_descriptors(var_descs);
943 } else {
944 // Only possible with background compilation.
946 }
947}
948
950 Thread* thread = Thread::Current();
951 Zone* zone = thread->zone();
953 // We don't expect functions() to change as the class was finalized.
954 ASSERT(cls.is_finalized());
955 Array& functions = Array::Handle(zone, cls.current_functions());
956 Function& func = Function::Handle(zone);
957 // Compile all the regular functions.
958 for (int i = 0; i < functions.Length(); i++) {
959 func ^= functions.At(i);
960 ASSERT(!func.IsNull());
961 if (!func.HasCode() && !func.is_abstract()) {
962 result = CompileFunction(thread, func);
963 if (result.IsError()) {
964 return Error::Cast(result).ptr();
965 }
966 ASSERT(!result.IsNull());
967 }
968 }
969 return Error::null();
970}
971
972void Compiler::AbortBackgroundCompilation(intptr_t deopt_id, const char* msg) {
973 if (FLAG_trace_compiler) {
974 THR_Print("ABORT background compilation: %s\n", msg);
975 }
976#if !defined(PRODUCT)
977 TimelineStream* stream = Timeline::GetCompilerStream();
978 ASSERT(stream != nullptr);
979 TimelineEvent* event = stream->StartEvent();
980 if (event != nullptr) {
981 event->Instant("AbortBackgroundCompilation");
982 event->SetNumArguments(1);
983 event->CopyArgument(0, "reason", msg);
984 event->Complete();
985 }
986#endif // !defined(PRODUCT)
989 deopt_id, Object::background_compilation_error());
990}
991
992// C-heap allocated background compilation queue element.
994 public:
996 : next_(nullptr), function_(function.ptr()) {}
997
998 virtual ~QueueElement() {
999 next_ = nullptr;
1000 function_ = Function::null();
1001 }
1002
1003 FunctionPtr Function() const { return function_; }
1004
1005 void set_next(QueueElement* elem) { next_ = elem; }
1006 QueueElement* next() const { return next_; }
1007
1008 ObjectPtr function() const { return function_; }
1010 return reinterpret_cast<ObjectPtr*>(&function_);
1011 }
1012
1013 private:
1014 QueueElement* next_;
1015 FunctionPtr function_;
1016
1018};
1019
1020// Allocated in C-heap. Handles both input and output of background compilation.
1021// It implements a FIFO queue, using Peek, Add, Remove operations.
1023 public:
1024 BackgroundCompilationQueue() : first_(nullptr), last_(nullptr) {}
1026
1028 ASSERT(visitor != nullptr);
1029 QueueElement* p = first_;
1030 while (p != nullptr) {
1031 visitor->VisitPointer(p->function_untag());
1032 p = p->next();
1033 }
1034 }
1035
1036 bool IsEmpty() const { return first_ == nullptr; }
1037
1038 void Add(QueueElement* value) {
1039 ASSERT(value != nullptr);
1040 ASSERT(value->next() == nullptr);
1041 if (first_ == nullptr) {
1042 first_ = value;
1043 ASSERT(last_ == nullptr);
1044 } else {
1045 ASSERT(last_ != nullptr);
1046 last_->set_next(value);
1047 }
1048 last_ = value;
1049 ASSERT(first_ != nullptr && last_ != nullptr);
1050 }
1051
1052 QueueElement* Peek() const { return first_; }
1053
1054 FunctionPtr PeekFunction() const {
1055 QueueElement* e = Peek();
1056 if (e == nullptr) {
1057 return Function::null();
1058 } else {
1059 return e->Function();
1060 }
1061 }
1062
1064 ASSERT(first_ != nullptr);
1065 QueueElement* result = first_;
1066 first_ = first_->next();
1067 if (first_ == nullptr) {
1068 last_ = nullptr;
1069 }
1070 return result;
1071 }
1072
1073 bool ContainsObj(const Object& obj) const {
1074 QueueElement* p = first_;
1075 while (p != nullptr) {
1076 if (p->function() == obj.ptr()) {
1077 return true;
1078 }
1079 p = p->next();
1080 }
1081 return false;
1082 }
1083
1084 void Clear() {
1085 while (!IsEmpty()) {
1086 QueueElement* e = Remove();
1087 delete e;
1088 }
1089 ASSERT((first_ == nullptr) && (last_ == nullptr));
1090 }
1091
1092 private:
1093 QueueElement* first_;
1094 QueueElement* last_;
1095
1097};
1098
1100 public:
1101 explicit BackgroundCompilerTask(BackgroundCompiler* background_compiler)
1102 : background_compiler_(background_compiler) {}
1104
1105 private:
1106 virtual void Run() { background_compiler_->Run(); }
1107
1108 BackgroundCompiler* background_compiler_;
1109
1111};
1112
1114 : isolate_group_(isolate_group),
1115 monitor_(),
1116 function_queue_(new BackgroundCompilationQueue()),
1117 running_(false),
1118 done_(true),
1119 disabled_depth_(0) {}
1120
1121// Fields all deleted in ::Stop; here clear them.
1123 delete function_queue_;
1124}
1125
1128 isolate_group_, Thread::kCompilerTask, /*bypass_safepoint=*/false);
1129 ASSERT(result);
1130 {
1131 Thread* thread = Thread::Current();
1132 StackZone stack_zone(thread);
1133 Zone* zone = stack_zone.GetZone();
1134 HANDLESCOPE(thread);
1136 QueueElement* element = nullptr;
1137 {
1138 SafepointMonitorLocker ml(&monitor_);
1139 if (running_ && !function_queue()->IsEmpty()) {
1140 element = function_queue()->Remove();
1141 function ^= element->function();
1142 }
1143 }
1144 if (element != nullptr) {
1145 delete element;
1148
1149 // If an optimizable method is not optimized, put it back on
1150 // the background queue (unless it was passed to foreground).
1151 if ((!function.HasOptimizedCode() && function.IsOptimizable()) ||
1152 FLAG_stress_test_background_compilation) {
1154 SafepointMonitorLocker ml(&monitor_);
1155 if (running_) {
1156 QueueElement* repeat_qelem = new QueueElement(function);
1157 function_queue()->Add(repeat_qelem);
1158 }
1159 }
1160 }
1161 }
1162 }
1163 Thread::ExitIsolateGroupAsHelper(/*bypass_safepoint=*/false);
1164 {
1165 MonitorLocker ml(&monitor_);
1166 if (running_ && !function_queue()->IsEmpty() &&
1167 Dart::thread_pool()->Run<BackgroundCompilerTask>(this)) {
1168 // Successfully scheduled a new task.
1169 } else {
1170 // Background compiler done. This notification must happen after the
1171 // thread leaves to group to avoid a shutdown race with the thread
1172 // registry.
1173 running_ = false;
1174 done_ = true;
1175 ml.NotifyAll();
1176 }
1177 }
1178}
1179
1181 Thread* thread = Thread::Current();
1182 ASSERT(thread->IsDartMutatorThread());
1184
1185 SafepointMonitorLocker ml(&monitor_);
1186 if (disabled_depth_ > 0) return false;
1187 if (!running_ && done_) {
1188 running_ = true;
1189 done_ = false;
1190 // If we ever wanted to run the BG compiler on the
1191 // `IsolateGroup::mutator_pool()` we would need to ensure the BG compiler
1192 // stops when it's idle - otherwise the [MutatorThreadPool]-based idle
1193 // notification would not work anymore.
1194 if (!Dart::thread_pool()->Run<BackgroundCompilerTask>(this)) {
1195 running_ = false;
1196 done_ = true;
1197 return false;
1198 }
1199 }
1200
1201 ASSERT(running_);
1202 if (function_queue()->ContainsObj(function)) {
1203 return true;
1204 }
1205 QueueElement* elem = new QueueElement(function);
1206 function_queue()->Add(elem);
1207 ml.NotifyAll();
1208 return true;
1209}
1210
1212 function_queue_->VisitObjectPointers(visitor);
1213}
1214
1215void BackgroundCompiler::Stop() {
1216 Thread* thread = Thread::Current();
1217 ASSERT(thread->isolate() == nullptr || !thread->BypassSafepoints());
1219
1220 SafepointMonitorLocker ml(&monitor_);
1221 StopLocked(thread, &ml);
1222}
1223
1224void BackgroundCompiler::StopLocked(Thread* thread,
1225 SafepointMonitorLocker* locker) {
1226 running_ = false;
1227 function_queue_->Clear();
1228 while (!done_) {
1229 locker->Wait();
1230 }
1231}
1232
1233void BackgroundCompiler::Enable() {
1234 Thread* thread = Thread::Current();
1235 ASSERT(!thread->BypassSafepoints());
1236 ASSERT(thread->CanAcquireSafepointLocks());
1237
1238 SafepointMonitorLocker ml(&monitor_);
1239 disabled_depth_--;
1240 if (disabled_depth_ < 0) {
1241 FATAL("Mismatched number of calls to BackgroundCompiler::Enable/Disable.");
1242 }
1243}
1244
1245void BackgroundCompiler::Disable() {
1246 Thread* thread = Thread::Current();
1247 ASSERT(!thread->BypassSafepoints());
1248 ASSERT(thread->CanAcquireSafepointLocks());
1249
1250 SafepointMonitorLocker ml(&monitor_);
1251 disabled_depth_++;
1252 if (done_) return;
1253 StopLocked(thread, &ml);
1254}
1255
1256#else // DART_PRECOMPILED_RUNTIME
1257
1258CompilationPipeline* CompilationPipeline::New(Zone* zone,
1259 const Function& function) {
1260 UNREACHABLE();
1261 return nullptr;
1262}
1263
1264DEFINE_RUNTIME_ENTRY(CompileFunction, 1) {
1265 const Function& function = Function::CheckedHandle(zone, arguments.ArgAt(0));
1266 FATAL("Precompilation missed function %s (%s, %s)\n",
1267 function.ToLibNamePrefixedQualifiedCString(),
1268 function.token_pos().ToCString(),
1270}
1271
1273 return false;
1274}
1275
1276bool Compiler::CanOptimizeFunction(Thread* thread, const Function& function) {
1277 UNREACHABLE();
1278 return false;
1279}
1280
1281ObjectPtr Compiler::CompileFunction(Thread* thread, const Function& function) {
1282 FATAL("Attempt to compile function %s", function.ToCString());
1283 return Error::null();
1284}
1285
1286ErrorPtr Compiler::EnsureUnoptimizedCode(Thread* thread,
1287 const Function& function) {
1288 FATAL("Attempt to compile function %s", function.ToCString());
1289 return Error::null();
1290}
1291
1292ObjectPtr Compiler::CompileOptimizedFunction(Thread* thread,
1293 const Function& function,
1294 intptr_t osr_id) {
1295 FATAL("Attempt to compile function %s", function.ToCString());
1296 return Error::null();
1297}
1298
1299void Compiler::ComputeLocalVarDescriptors(const Code& code) {
1300 UNREACHABLE();
1301}
1302
1303ErrorPtr Compiler::CompileAllFunctions(const Class& cls) {
1304 FATAL("Attempt to compile class %s", cls.ToCString());
1305 return Error::null();
1306}
1307
1308void Compiler::AbortBackgroundCompilation(intptr_t deopt_id, const char* msg) {
1309 UNREACHABLE();
1310}
1311
1313 UNREACHABLE();
1314 return false;
1315}
1316
1317void BackgroundCompiler::VisitPointers(ObjectPointerVisitor* visitor) {
1318 UNREACHABLE();
1319}
1320
1321void BackgroundCompiler::Stop() {
1322 UNREACHABLE();
1323}
1324
1325void BackgroundCompiler::Enable() {
1326 // NOP
1327}
1328
1329void BackgroundCompiler::Disable() {
1330 // NOP
1331}
1332
1333#endif // DART_PRECOMPILED_RUNTIME
1334
1335} // namespace dart
static void done(const char *config, const char *src, const char *srcOptions, const char *name)
Definition DM.cpp:263
#define UNREACHABLE()
Definition assert.h:248
#define DEBUG_ASSERT(cond)
Definition assert.h:321
#define RELEASE_ASSERT(cond)
Definition assert.h:327
ObjectPtr At(intptr_t index) const
Definition object.h:10854
intptr_t Length() const
Definition object.h:10808
void Add(QueueElement *value)
Definition compiler.cc:1038
void VisitObjectPointers(ObjectPointerVisitor *visitor)
Definition compiler.cc:1027
QueueElement * Peek() const
Definition compiler.cc:1052
bool ContainsObj(const Object &obj) const
Definition compiler.cc:1073
FunctionPtr PeekFunction() const
Definition compiler.cc:1054
BackgroundCompilerTask(BackgroundCompiler *background_compiler)
Definition compiler.cc:1101
void VisitPointers(ObjectPointerVisitor *visitor)
Definition compiler.cc:1211
BackgroundCompiler(IsolateGroup *isolate_group)
Definition compiler.cc:1113
bool EnqueueCompilation(const Function &function)
Definition compiler.cc:1180
BackgroundCompilationQueue * function_queue() const
Definition compiler.h:136
intptr_t length() const
static void AssignEdgeWeights(FlowGraph *flow_graph)
void RegisterDependencies(const Code &code) const
Definition cha.cc:266
bool is_finalized() const
Definition object.h:1725
ArrayPtr current_functions() const
Definition object.h:1643
bool IsDisabled() const
Definition object.h:7228
static void NotifyCodeObservers(const Code &code, bool optimized)
Definition object.cc:18191
static CodePtr FinalizeCode(FlowGraphCompiler *compiler, compiler::Assembler *assembler, PoolAttachment pool_attachment, bool optimized, CodeStatistics *stats)
Definition object.cc:18068
virtual FlowGraph * BuildFlowGraph(Zone *zone, ParsedFunction *parsed_function, ZoneGrowableArray< const ICData * > *ic_data_array, intptr_t osr_id, bool optimized)=0
virtual void ParseFunction(ParsedFunction *parsed_function)=0
static CompilationPipeline * New(Zone *zone, const Function &function)
Definition compiler.cc:202
CompileParsedFunctionHelper(ParsedFunction *parsed_function, bool optimized, intptr_t osr_id)
Definition compiler.cc:306
CodePtr Compile(CompilationPipeline *pipeline)
Definition compiler.cc:488
static DART_WARN_UNUSED_RESULT FlowGraph * RunPipeline(PipelineMode mode, CompilerPassState *state)
static void GenerateCode(CompilerPassState *state)
void set_function(const Function &function)
static bool ShouldTrace()
static CompilerState & Current()
static bool IsBackgroundCompilation()
Definition compiler.cc:299
static bool CanOptimizeFunction(Thread *thread, const Function &function)
Definition compiler.cc:230
static constexpr intptr_t kNoOSRDeoptId
Definition compiler.h:73
static ErrorPtr EnsureUnoptimizedCode(Thread *thread, const Function &function)
Definition compiler.cc:855
static ObjectPtr CompileFunction(Thread *thread, const Function &function)
Definition compiler.cc:825
static ObjectPtr CompileOptimizedFunction(Thread *thread, const Function &function, intptr_t osr_id=kNoOSRDeoptId)
Definition compiler.cc:887
static ErrorPtr CompileAllFunctions(const Class &cls)
Definition compiler.cc:949
static void ComputeLocalVarDescriptors(const Code &code)
Definition compiler.cc:910
static void AbortBackgroundCompilation(intptr_t deopt_id, const char *msg)
Definition compiler.cc:972
void ParseFunction(ParsedFunction *parsed_function) override
Definition compiler.cc:125
FlowGraph * BuildFlowGraph(Zone *zone, ParsedFunction *parsed_function, ZoneGrowableArray< const ICData * > *ic_data_array, intptr_t osr_id, bool optimized) override
Definition compiler.cc:129
static ThreadPool * thread_pool()
Definition dart.h:73
static void DisassembleCode(const Function &function, const Code &code, bool optimized)
static void PrintGraph(const char *phase, FlowGraph *flow_graph)
static bool ShouldPrint(const Function &function, uint8_t **compiler_pass_filter=nullptr)
static void PrintICData(const ICData &ic_data, intptr_t num_checks_to_print=kPrintAll)
static constexpr CompilationMode CompilationModeFrom(bool is_optimizing)
Definition flow_graph.h:587
bool should_reorder_blocks() const
Definition flow_graph.h:510
bool HasCode() const
Definition object.cc:7994
static const char * KindToCString(UntaggedFunction::Kind kind)
Definition object.cc:8477
CodePtr unoptimized_code() const
Definition object.h:3165
RegExpPtr regexp() const
Definition object.cc:8527
bool IsDebugging(Thread *thread, const Function &function)
Definition debugger.cc:3465
void NotifyCompilation(const Function &func)
Definition debugger.cc:2954
void ParseFunction(ParsedFunction *parsed_function) override
Definition compiler.cc:144
FlowGraph * BuildFlowGraph(Zone *zone, ParsedFunction *parsed_function, ZoneGrowableArray< const ICData * > *ic_data_array, intptr_t osr_id, bool optimized) override
Definition compiler.cc:170
GroupDebugger * debugger() const
Definition isolate.h:314
intptr_t optimization_counter_threshold() const
Definition isolate.h:305
static IsolateGroup * Current()
Definition isolate.h:534
void RunWithStoppedMutators(T single_current_mutator, S otherwise, bool use_force_growth_in_otherwise=false)
Definition isolate.h:604
LocalVarDescriptorsPtr GetVarDescriptors(const Function &func, ZoneGrowableArray< intptr_t > *context_level_array)
Definition scopes.cc:249
DART_NORETURN void Jump(int value, const Error &error)
Definition longjump.cc:22
jmp_buf * Set()
Definition longjump.cc:16
void VisitPointer(ObjectPtr *p)
Definition visitor.h:55
static ObjectPtr null()
Definition object.h:433
ObjectPtr ptr() const
Definition object.h:332
bool IsNull() const
Definition object.h:363
static Object & Handle()
Definition object.h:407
static Object & ZoneHandle()
Definition object.h:419
void SetRegExpCompileData(RegExpCompileData *regexp_compile_data)
Definition parser.cc:173
const Function & function() const
Definition parser.h:73
LocalScope * scope() const
Definition parser.h:76
void AllocateIrregexpVariables(intptr_t num_stack_locals)
Definition parser.cc:295
RegExpCompileData * regexp_compile_data() const
Definition parser.h:83
Zone * zone() const
Definition parser.h:213
Thread * thread() const
Definition parser.h:211
void set_next(QueueElement *elem)
Definition compiler.cc:1005
ObjectPtr function() const
Definition compiler.cc:1008
ObjectPtr * function_untag()
Definition compiler.cc:1009
virtual ~QueueElement()
Definition compiler.cc:998
QueueElement(const Function &function)
Definition compiler.cc:995
QueueElement * next() const
Definition compiler.cc:1006
FunctionPtr Function() const
Definition compiler.cc:1003
static CompilationResult CompileIR(RegExpCompileData *input, const ParsedFunction *parsed_function, const ZoneGrowableArray< const ICData * > &ic_data_array, intptr_t osr_id)
Definition regexp.cc:5298
static void ParseRegExp(const String &input, RegExpFlags regexp_flags, RegExpCompileData *result)
void set_is_simple() const
Definition object.h:12856
StringPtr pattern() const
Definition object.h:12771
void set_is_complex() const
Definition object.h:12857
void set_num_bracket_expressions(SmiPtr value) const
void set_capture_name_map(const Array &array) const
Definition object.cc:26737
RegExpFlags flags() const
Definition object.h:12865
static DART_NORETURN void LongJump(const Error &error)
Definition report.cc:86
Zone * GetZone()
Definition zone.h:213
static StringPtr New(const char *cstr, Heap::Space space=Heap::kNew)
Definition object.cc:23777
Zone * zone() const
LongJumpScope * long_jump_base() const
bool CanAcquireSafepointLocks() const
Definition thread.cc:1306
@ kCompilerTask
Definition thread.h:348
static Thread * Current()
Definition thread.h:361
DART_WARN_UNUSED_RESULT ErrorPtr StealStickyError()
Definition thread.cc:243
CompilerState & compiler_state()
Definition thread.h:583
void set_sticky_error(const Error &value)
Definition thread.cc:234
static void ExitIsolateGroupAsHelper(bool bypass_safepoint)
Definition thread.cc:494
bool IsDartMutatorThread() const
Definition thread.h:546
Isolate * isolate() const
Definition thread.h:533
bool BypassSafepoints() const
Definition thread.h:994
IsolateGroup * isolate_group() const
Definition thread.h:540
static bool EnterIsolateGroupAsHelper(IsolateGroup *isolate_group, TaskKind kind, bool bypass_safepoint)
Definition thread.cc:476
void Stop()
Definition timer.h:117
void Start()
Definition timer.h:111
int64_t TotalElapsedTime() const
Definition timer.h:123
#define THR_Print(format,...)
Definition log.h:20
#define ASSERT(E)
#define FATAL(error)
AtkStateType state
FlKeyEvent * event
const uint8_t uint32_t uint32_t GError ** error
uint8_t value
GAsyncResult * result
const char * charp
Definition flags.h:12
#define DECLARE_FLAG(type, name)
Definition flags.h:14
#define DEFINE_FLAG(type, name, default_value, comment)
Definition flags.h:16
#define DEFINE_FLAG_HANDLER(handler, name, comment)
Definition flags.h:20
Dart_NativeFunction function
Definition fuchsia.cc:51
#define HANDLESCOPE(thread)
Definition handles.h:321
void SetFfiCallbackCode(Thread *thread, const Function &ffi_trampoline, const Code &code)
Definition callback.cc:148
static void PrecompilationModeHandler(bool value)
Definition compiler.cc:91
DirectChainedHashMap< FieldKeyValueTrait > FieldSet
Definition parser.h:66
const char *const function_name
static ObjectPtr CompileFunctionHelper(CompilationPipeline *pipeline, const Function &function, volatile bool optimized, intptr_t osr_id)
Definition compiler.cc:683
#define Px
Definition globals.h:410
#define Pd64
Definition globals.h:416
#define Pd
Definition globals.h:408
#define DISALLOW_COPY_AND_ASSIGN(TypeName)
Definition globals.h:581
#define DEFINE_RUNTIME_ENTRY(name, argument_count)
CallSpecializer * call_specializer
GrowableArray< TokenPosition > inline_id_to_token_pos
GrowableArray< intptr_t > caller_inline_id
GrowableArray< const Function * > inline_id_to_function
FlowGraphCompiler * graph_compiler
#define TIMELINE_FUNCTION_COMPILATION_DURATION(thread, name, function)
Definition timeline.h:40
#define TIMELINE_DURATION(thread, stream, name)
Definition timeline.h:39