Flutter Engine
The Flutter Engine
compiler.cc
Go to the documentation of this file.
1// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
6
7#if !defined(DART_PRECOMPILED_RUNTIME)
8#include "vm/code_patcher.h"
22#include "vm/compiler/cha.h"
29#include "vm/dart_entry.h"
30#include "vm/debugger.h"
32#include "vm/exceptions.h"
33#include "vm/flags.h"
34#include "vm/kernel.h"
35#include "vm/longjump.h"
36#include "vm/object.h"
37#include "vm/object_store.h"
38#include "vm/os.h"
39#include "vm/parser.h"
40#include "vm/regexp_assembler.h"
41#include "vm/regexp_parser.h"
42#include "vm/runtime_entry.h"
43#include "vm/symbols.h"
44#include "vm/tags.h"
45#include "vm/timeline.h"
46#include "vm/timer.h"
47#endif
48
49namespace dart {
50
52 int,
53 max_deoptimization_counter_threshold,
54 16,
55 "How many times we allow deoptimization before we disallow optimization.");
57 optimization_filter,
58 nullptr,
59 "Optimize only named function");
60DEFINE_FLAG(bool, print_flow_graph, false, "Print the IR flow graph.");
62 print_flow_graph_optimized,
63 false,
64 "Print the IR flow graph when optimizing.");
66 print_ic_data_map,
67 false,
68 "Print the deopt-id to ICData map in optimizing compiler.");
69DEFINE_FLAG(bool, print_code_source_map, false, "Print code source map.");
71 stress_test_background_compilation,
72 false,
73 "Keep background compiler running all the time");
75 stop_on_excessive_deoptimization,
76 false,
77 "Debugging: stops program if deoptimizing same function too often");
78DEFINE_FLAG(bool, trace_compiler, false, "Trace compiler operations.");
80 trace_failed_optimization_attempts,
81 false,
82 "Traces all failed optimization attempts");
84 trace_optimizing_compiler,
85 false,
86 "Trace only optimizing compiler operations.");
87DEFINE_FLAG(bool, trace_bailout, false, "Print bailout from ssa compiler.");
88
89DECLARE_FLAG(bool, trace_failed_optimization_attempts);
90
92 if (value) {
93#if defined(TARGET_ARCH_IA32)
94 FATAL("Precompilation not supported on IA32");
95#endif
96
97 FLAG_background_compilation = false;
98 FLAG_enable_mirrors = false;
99 FLAG_interpret_irregexp = true;
100 FLAG_link_natives_lazily = true;
101 FLAG_optimization_counter_threshold = -1;
102 FLAG_polymorphic_with_deopt = false;
103 FLAG_precompiled_mode = true;
104 FLAG_reorder_basic_blocks = true;
105 FLAG_use_field_guards = false;
106 FLAG_use_cha_deopt = false;
107
108#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
109 // Set flags affecting runtime accordingly for gen_snapshot.
110 // These flags are constants with PRODUCT and DART_PRECOMPILED_RUNTIME.
111 FLAG_deoptimize_alot = false; // Used in some tests.
112 FLAG_deoptimize_every = 0; // Used in some tests.
113 FLAG_use_osr = false;
114#endif
115 }
116}
117
119 precompilation,
120 "Precompilation mode");
121
122#ifndef DART_PRECOMPILED_RUNTIME
123
125 // Nothing to do here.
126}
127
129 Zone* zone,
130 ParsedFunction* parsed_function,
132 intptr_t osr_id,
133 bool optimized) {
134 kernel::FlowGraphBuilder builder(parsed_function, ic_data_array,
135 /* not building var desc */ nullptr,
136 /* not inlining */ nullptr, optimized,
137 osr_id);
138 FlowGraph* graph = builder.BuildGraph();
139 ASSERT(graph != nullptr);
140 return graph;
141}
142
144 ParsedFunction* parsed_function) {
145 VMTagScope tagScope(parsed_function->thread(),
146 VMTag::kCompileParseRegExpTagId);
147 Zone* zone = parsed_function->zone();
148 RegExp& regexp = RegExp::Handle(parsed_function->function().regexp());
149
150 const String& pattern = String::Handle(regexp.pattern());
151
152 RegExpCompileData* compile_data = new (zone) RegExpCompileData();
153 // Parsing failures are handled in the RegExp factory constructor.
154 RegExpParser::ParseRegExp(pattern, regexp.flags(), compile_data);
155
156 regexp.set_num_bracket_expressions(compile_data->capture_count);
157 regexp.set_capture_name_map(compile_data->capture_name_map);
158 if (compile_data->simple) {
159 regexp.set_is_simple();
160 } else {
161 regexp.set_is_complex();
162 }
163
164 parsed_function->SetRegExpCompileData(compile_data);
165
166 // Variables are allocated after compilation.
167}
168
170 Zone* zone,
171 ParsedFunction* parsed_function,
173 intptr_t osr_id,
174 bool optimized) {
175 // Compile to the dart IR.
178 parsed_function, *ic_data_array, osr_id);
179 if (result.error_message != nullptr) {
181 LanguageError::New(String::Handle(String::New(result.error_message)))));
182 }
183 backtrack_goto_ = result.backtrack_goto;
184
185 // Allocate variables now that we know the number of locals.
186 parsed_function->AllocateIrregexpVariables(result.num_stack_locals);
187
188 // When compiling for OSR, use a depth first search to find the OSR
189 // entry and make graph entry jump to it instead of normal entry.
190 // Catch entries are always considered reachable, even if they
191 // become unreachable after OSR.
192 if (osr_id != Compiler::kNoOSRDeoptId) {
193 result.graph_entry->RelinkToOsrEntry(zone, result.num_blocks);
194 }
195 PrologueInfo prologue_info(-1, -1);
196 return new (zone)
197 FlowGraph(*parsed_function, result.graph_entry, result.num_blocks,
198 prologue_info, FlowGraph::CompilationModeFrom(optimized));
199}
200
202 const Function& function) {
203 if (function.IsIrregexpFunction()) {
204 return new (zone) IrregexpCompilationPipeline();
205 } else {
206 return new (zone) DartCompilationPipeline();
207 }
208}
209
210// Compile a function. Should call only if the function has not been compiled.
211// Arg0: function object.
212DEFINE_RUNTIME_ENTRY(CompileFunction, 1) {
213 ASSERT(thread->IsDartMutatorThread());
214 const Function& function = Function::CheckedHandle(zone, arguments.ArgAt(0));
215
216 {
217 // Another isolate's mutator thread may have created [function] and
218 // published it via an ICData, MegamorphicCache etc. Entering the lock below
219 // is an acquire operation that pairs with the release operation when the
220 // other isolate exited the lock, ensuring the initializing stores for
221 // [function] are visible in the current thread.
222 SafepointReadRwLocker ml(thread, thread->isolate_group()->program_lock());
223 }
224
225 // Will throw if compilation failed (e.g. with compile-time error).
226 function.EnsureHasCode();
227}
228
230#if !defined(PRODUCT)
231 if (thread->isolate_group()->debugger()->IsDebugging(thread, function)) {
232 // We cannot set breakpoints and single step in optimized code,
233 // so do not optimize the function. Bump usage counter down to avoid
234 // repeatedly entering the runtime for an optimization attempt.
235 function.SetUsageCounter(0);
236
237 // If the optimization counter = 1, the unoptimized code will come back here
238 // immediately, causing an infinite compilation loop. The compiler raises
239 // the threshold for functions with breakpoints, so we drop the unoptimized
240 // to force it to be recompiled.
241 if (thread->isolate_group()->optimization_counter_threshold() < 2) {
242 function.ClearCode();
243 }
244 return false;
245 }
246#endif
247 if (function.deoptimization_counter() >=
248 FLAG_max_deoptimization_counter_threshold) {
249 if (FLAG_trace_failed_optimization_attempts ||
250 FLAG_stop_on_excessive_deoptimization) {
251 THR_Print("Too many deoptimizations: %s\n",
252 function.ToFullyQualifiedCString());
253 if (FLAG_stop_on_excessive_deoptimization) {
254 FATAL("Stop on excessive deoptimization");
255 }
256 }
257 // The function will not be optimized any longer. This situation can occur
258 // mostly with small optimization counter thresholds.
259 function.SetIsOptimizable(false);
260 function.SetUsageCounter(INT32_MIN);
261 return false;
262 }
263 if (FLAG_optimization_filter != nullptr) {
264 // FLAG_optimization_filter is a comma-separated list of strings that are
265 // matched against the fully-qualified function name.
266 char* save_ptr; // Needed for strtok_r.
267 const char* function_name = function.ToFullyQualifiedCString();
268 intptr_t len = strlen(FLAG_optimization_filter) + 1; // Length with \0.
269 char* filter = new char[len];
270 strncpy(filter, FLAG_optimization_filter, len); // strtok modifies arg 1.
271 char* token = strtok_r(filter, ",", &save_ptr);
272 bool found = false;
273 while (token != nullptr) {
274 if (strstr(function_name, token) != nullptr) {
275 found = true;
276 break;
277 }
278 token = strtok_r(nullptr, ",", &save_ptr);
279 }
280 delete[] filter;
281 if (!found) {
282 function.SetUsageCounter(INT32_MIN);
283 return false;
284 }
285 }
286 if (!function.IsOptimizable()) {
287 // Huge methods (code size above --huge_method_cutoff_in_code_size) become
288 // non-optimizable only after the code has been generated.
289 if (FLAG_trace_failed_optimization_attempts) {
290 THR_Print("Not optimizable: %s\n", function.ToFullyQualifiedCString());
291 }
292 function.SetUsageCounter(INT32_MIN);
293 return false;
294 }
295 return true;
296}
297
299 // For now: compilation in non mutator thread is the background compilation.
301}
302
304 public:
306 bool optimized,
307 intptr_t osr_id)
308 : parsed_function_(parsed_function),
309 optimized_(optimized),
310 osr_id_(osr_id),
311 thread_(Thread::Current()) {}
312
313 CodePtr Compile(CompilationPipeline* pipeline);
314
315 private:
316 ParsedFunction* parsed_function() const { return parsed_function_; }
317 bool optimized() const { return optimized_; }
318 intptr_t osr_id() const { return osr_id_; }
319 Thread* thread() const { return thread_; }
320 IsolateGroup* isolate_group() const { return thread_->isolate_group(); }
321 CodePtr FinalizeCompilation(compiler::Assembler* assembler,
322 FlowGraphCompiler* graph_compiler,
323 FlowGraph* flow_graph);
324
325 ParsedFunction* parsed_function_;
326 const bool optimized_;
327 const intptr_t osr_id_;
328 Thread* const thread_;
329
330 DISALLOW_COPY_AND_ASSIGN(CompileParsedFunctionHelper);
331};
332
333CodePtr CompileParsedFunctionHelper::FinalizeCompilation(
334 compiler::Assembler* assembler,
335 FlowGraphCompiler* graph_compiler,
336 FlowGraph* flow_graph) {
337 ASSERT(!CompilerState::Current().is_aot());
338 const Function& function = parsed_function()->function();
339
340 // If another thread compiled and installed unoptmized code already,
341 // skip installation.
342 if (!optimized() && function.unoptimized_code() != Code::null()) {
343 return function.unoptimized_code();
344 }
345 // If another thread compiled and installed optimized code for the
346 // force-optimized function, skip installation.
347 if (optimized() && function.ForceOptimize() && function.HasOptimizedCode()) {
348 return function.CurrentCode();
349 }
350 Zone* const zone = thread()->zone();
351
352 // CreateDeoptInfo uses the object pool and needs to be done before
353 // FinalizeCode.
354 Array& deopt_info_array = Array::Handle(zone, Object::empty_array().ptr());
355 deopt_info_array = graph_compiler->CreateDeoptInfo(assembler);
356
357 // Allocates instruction object. Since this occurs only at safepoint,
358 // there can be no concurrent access to the instruction page.
360 graph_compiler, assembler, Code::PoolAttachment::kAttachPool, optimized(),
361 /*stats=*/nullptr));
362 code.set_is_optimized(optimized());
363 code.set_owner(function);
364
365 if (!function.IsOptimizable()) {
366 // A function with huge unoptimized code can become non-optimizable
367 // after generating unoptimized code.
368 function.SetUsageCounter(INT32_MIN);
369 }
370
371 graph_compiler->FinalizePcDescriptors(code);
372 code.set_deopt_info_array(deopt_info_array);
373
374 graph_compiler->FinalizeStackMaps(code);
375 graph_compiler->FinalizeVarDescriptors(code);
376 graph_compiler->FinalizeExceptionHandlers(code);
377 graph_compiler->FinalizeCatchEntryMovesMap(code);
378 graph_compiler->FinalizeStaticCallTargetsTable(code);
379 graph_compiler->FinalizeCodeSourceMap(code);
380
381 if (function.ForceOptimize()) {
382 ASSERT(optimized() && thread()->IsDartMutatorThread());
383 code.set_is_force_optimized(true);
384 function.AttachCode(code);
385 function.SetWasCompiled(true);
386 } else if (optimized()) {
387 // We cannot execute generated code while installing code.
388 ASSERT(Thread::Current()->OwnsGCSafepoint() ||
389 (Thread::Current()->IsDartMutatorThread() &&
390 IsolateGroup::Current()->ContainsOnlyOneIsolate()));
391 // We are validating our CHA / field guard / ... assumptions. To prevent
392 // another thread from concurrently changing them, we have to guarantee
393 // mutual exclusion.
395 IsolateGroup::Current()->program_lock()->IsCurrentThreadReader());
396
397 const bool trace_compiler =
398 FLAG_trace_compiler || FLAG_trace_optimizing_compiler;
399 bool code_is_valid = true;
400 if (flow_graph->parsed_function().guarded_fields()->Length() != 0) {
401 const FieldSet* guarded_fields =
402 flow_graph->parsed_function().guarded_fields();
403 Field& original = Field::Handle();
404 FieldSet::Iterator it = guarded_fields->GetIterator();
405 while (const Field** field = it.Next()) {
406 ASSERT(!(*field)->IsOriginal());
407 original = (*field)->Original();
408 if (!(*field)->IsConsistentWith(original)) {
409 code_is_valid = false;
410 if (trace_compiler) {
411 THR_Print("--> FAIL: Field %s guarded state changed.",
412 (*field)->ToCString());
413 }
414 break;
415 }
416 }
417 }
418
419 if (!thread()->compiler_state().cha().IsConsistentWithCurrentHierarchy()) {
420 code_is_valid = false;
421 if (trace_compiler) {
422 THR_Print("--> FAIL: Class hierarchy has new subclasses.");
423 }
424 }
425
426 // Setting breakpoints at runtime could make a function non-optimizable.
427 if (code_is_valid && Compiler::CanOptimizeFunction(thread(), function)) {
428 if (osr_id() == Compiler::kNoOSRDeoptId) {
429 function.InstallOptimizedCode(code);
430 } else {
431 // OSR is not compiled in background.
433 }
434 ASSERT(code.owner() == function.ptr());
435 } else {
436 code = Code::null();
437 }
438 if (function.usage_counter() < 0) {
439 // Reset to 0 so that it can be recompiled if needed.
440 if (code_is_valid) {
441 function.SetUsageCounter(0);
442 } else {
443 // Trigger another optimization pass soon.
444 function.SetUsageCounter(
445 thread()->isolate_group()->optimization_counter_threshold() - 100);
446 }
447 }
448
449 if (!code.IsNull()) {
450 // The generated code was compiled under certain assumptions about
451 // class hierarchy and field types. Register these dependencies
452 // to ensure that the code will be deoptimized if they are violated.
454
455 const FieldSet* guarded_fields =
456 flow_graph->parsed_function().guarded_fields();
457 Field& field = Field::Handle();
458 FieldSet::Iterator it = guarded_fields->GetIterator();
459 while (const Field** guarded_field = it.Next()) {
460 field = (*guarded_field)->Original();
461 field.RegisterDependentCode(code);
462 }
463 }
464 } else { // not optimized.
465 function.SaveICDataMap(
466 graph_compiler->deopt_id_to_ic_data(),
467 Array::Handle(zone, graph_compiler->edge_counters_array()),
468 flow_graph->coverage_array());
469 function.set_unoptimized_code(code);
470 function.AttachCode(code);
471 function.SetWasCompiled(true);
472 if (function.IsOptimizable() && (function.usage_counter() < 0)) {
473 // While doing compilation in background, usage counter is set
474 // to INT32_MIN. Reset counter so that function can be optimized further.
475 function.SetUsageCounter(0);
476 }
477 }
478
479 if (function.IsFfiCallbackTrampoline()) {
481 }
482
483 return code.ptr();
484}
485
486// Return null if bailed out.
488 ASSERT(!FLAG_precompiled_mode);
489 const Function& function = parsed_function()->function();
490 if (optimized() && !function.IsOptimizable()) {
491 return Code::null();
492 }
493 Zone* const zone = thread()->zone();
494 HANDLESCOPE(thread());
495 EnterCompilerScope cs(thread());
496
497 // We may reattempt compilation if the function needs to be assembled using
498 // far branches on ARM. In the else branch of the setjmp call, done is set to
499 // false, and use_far_branches is set to true if there is a longjmp from the
500 // ARM assembler. In all other paths through this while loop, done is set to
501 // true. use_far_branches is always false on ia32 and x64.
502 volatile bool done = false;
503 // volatile because the variable may be clobbered by a longjmp.
504 volatile intptr_t far_branch_level = 0;
505
506 // In the JIT case we allow speculative inlining and have no need for a
507 // suppression, since we don't restart optimization.
508 SpeculativeInliningPolicy speculative_policy(/*enable_suppression=*/false);
509
510 Code* volatile result = &Code::ZoneHandle(zone);
511 while (!done) {
512 *result = Code::null();
513 LongJumpScope jump;
514 if (setjmp(*jump.Set()) == 0) {
515 FlowGraph* flow_graph = nullptr;
516 ZoneGrowableArray<const ICData*>* ic_data_array = nullptr;
517
518 CompilerState compiler_state(thread(), /*is_aot=*/false, optimized(),
520 compiler_state.set_function(function);
521
522 {
523 // Extract type feedback before the graph is built, as the graph
524 // builder uses it to attach it to nodes.
525 ic_data_array = new (zone) ZoneGrowableArray<const ICData*>();
526
527 // Clone ICData for background compilation so that it does not
528 // change while compiling.
529 const bool clone_ic_data = Compiler::IsBackgroundCompilation();
530 function.RestoreICDataMap(ic_data_array, clone_ic_data);
531
532 if (optimized()) {
533 ASSERT(function.ic_data_array() != Array::null() ||
534 function.ForceOptimize());
535 }
536
537 if (FLAG_print_ic_data_map) {
538 for (intptr_t i = 0; i < ic_data_array->length(); i++) {
539 if ((*ic_data_array)[i] != nullptr) {
540 THR_Print("%" Pd " ", i);
541 FlowGraphPrinter::PrintICData(*(*ic_data_array)[i]);
542 }
543 }
544 }
545
546 TIMELINE_DURATION(thread(), CompilerVerbose, "BuildFlowGraph");
547 flow_graph = pipeline->BuildFlowGraph(
548 zone, parsed_function(), ic_data_array, osr_id(), optimized());
549 }
550
551 const bool print_flow_graph =
552 (FLAG_print_flow_graph ||
553 (optimized() && FLAG_print_flow_graph_optimized)) &&
555
556 if (print_flow_graph && !optimized()) {
557 FlowGraphPrinter::PrintGraph("Unoptimized Compilation", flow_graph);
558 }
559
560 if (flow_graph->should_reorder_blocks()) {
561 TIMELINE_DURATION(thread(), CompilerVerbose,
562 "BlockScheduler::AssignEdgeWeights");
564 }
565
566 CompilerPassState pass_state(thread(), flow_graph, &speculative_policy);
567
568 if (optimized()) {
569 TIMELINE_DURATION(thread(), CompilerVerbose, "OptimizationPasses");
570
571 JitCallSpecializer call_specializer(flow_graph, &speculative_policy);
572 pass_state.call_specializer = &call_specializer;
573
574 flow_graph = CompilerPass::RunPipeline(CompilerPass::kJIT, &pass_state);
575 }
576
577 ASSERT(pass_state.inline_id_to_function.length() ==
578 pass_state.caller_inline_id.length());
579 compiler::ObjectPoolBuilder object_pool_builder;
580 compiler::Assembler assembler(&object_pool_builder, far_branch_level);
581 FlowGraphCompiler graph_compiler(
582 &assembler, flow_graph, *parsed_function(), optimized(),
583 &speculative_policy, pass_state.inline_id_to_function,
584 pass_state.inline_id_to_token_pos, pass_state.caller_inline_id,
585 ic_data_array);
586 pass_state.graph_compiler = &graph_compiler;
587 CompilerPass::GenerateCode(&pass_state);
588
589 {
590 TIMELINE_DURATION(thread(), CompilerVerbose, "FinalizeCompilation");
591
592 auto install_code_fun = [&]() {
593 *result =
594 FinalizeCompilation(&assembler, &graph_compiler, flow_graph);
595#if !defined(PRODUCT)
596 // Isolate debuggers need to be notified of compiled function right
597 // away as code is installed because there might be latent breakpoints
598 // in compiled function, which have to be activated before functions
599 // code is executed. Otherwise concurrently running isolates might
600 // execute code before its patched and miss a need to pause at a
601 // breakpoint.
602 if (!result->IsNull()) {
603 if (!function.HasOptimizedCode()) {
605 function);
606 }
607 }
608#endif
609 };
610
611 // Grab write program_lock outside of potential safepoint, that lock
612 // can't be waited for inside the safepoint.
613 // Initially read lock was added to guard direct_subclasses field
614 // access.
615 // Read lock was upgraded to write lock to guard dependent code updates.
616 SafepointWriteRwLocker ml(thread(),
617 thread()->isolate_group()->program_lock());
618 // We have to ensure no mutators are running, because:
619 //
620 // a) We allocate an instructions object, which might cause us to
621 // temporarily flip page protections (RX -> RW -> RX).
622 //
623 // b) We have to ensure the code generated does not violate
624 // assumptions (e.g. CHA, field guards), the validation has to
625 // happen while mutator is stopped.
626 //
627 // b) We update the [Function] object with a new [Code] which
628 // requires updating several pointers: We have to ensure all of
629 // those writes are observed atomically.
630 //
632 install_code_fun, /*use_force_growth=*/true);
633 }
634 if (!result->IsNull()) {
635 // Must be called outside of safepoint.
637
638 if (FLAG_disassemble && FlowGraphPrinter::ShouldPrint(function)) {
640 } else if (FLAG_disassemble_optimized && optimized() &&
643 }
644 }
645 // Exit the loop and the function with the correct result value.
646 done = true;
647 } else {
648 // We bailed out or we encountered an error.
649 const Error& error = Error::Handle(thread()->StealStickyError());
650
651 if (error.ptr() == Object::branch_offset_error().ptr()) {
652 // Compilation failed due to an out of range branch offset in the
653 // assembler. We try again (done = false) with far branches enabled.
654 done = false;
655 RELEASE_ASSERT(far_branch_level < 2);
656 far_branch_level++;
657 } else if (error.ptr() == Object::speculative_inlining_error().ptr()) {
658 // Can only happen with precompilation.
659 UNREACHABLE();
660 } else {
661 // If the error isn't due to an out of range branch offset, we don't
662 // try again (done = true).
663 if (FLAG_trace_bailout) {
664 THR_Print("%s\n", error.ToErrorCString());
665 }
666 if (!Compiler::IsBackgroundCompilation() && error.IsLanguageError() &&
667 (LanguageError::Cast(error).kind() == Report::kBailout)) {
668 // If is is not a background compilation, discard the error if it was
669 // not a real error, but just a bailout. If we're it a background
670 // compilation this will be dealt with in the caller.
671 } else {
672 // Otherwise, continue propagating unless we will try again.
673 thread()->set_sticky_error(error);
674 }
675 done = true;
676 }
677 }
678 }
679 return result->ptr();
680}
681
683 const Function& function,
684 volatile bool optimized,
685 intptr_t osr_id) {
686 Thread* const thread = Thread::Current();
687 NoActiveIsolateScope no_active_isolate(thread);
688
689 ASSERT(!FLAG_precompiled_mode);
690 ASSERT(!optimized || function.WasCompiled() || function.ForceOptimize());
691 if (function.ForceOptimize()) optimized = true;
692 LongJumpScope jump;
693 if (setjmp(*jump.Set()) == 0) {
694 StackZone stack_zone(thread);
695 Zone* const zone = stack_zone.GetZone();
696 const bool trace_compiler =
697 FLAG_trace_compiler || (FLAG_trace_optimizing_compiler && optimized);
698 Timer per_compile_timer;
699 per_compile_timer.Start();
700
701 ParsedFunction* parsed_function = new (zone)
702 ParsedFunction(thread, Function::ZoneHandle(zone, function.ptr()));
703 if (trace_compiler) {
704 const intptr_t token_size = function.SourceSize();
705 THR_Print("Compiling %s%sfunction %s: '%s' @ token %s, size %" Pd "\n",
706 (osr_id == Compiler::kNoOSRDeoptId ? "" : "osr "),
707 (optimized ? "optimized " : ""),
708 (Compiler::IsBackgroundCompilation() ? "(background)" : ""),
709 function.ToFullyQualifiedCString(),
710 function.token_pos().ToCString(), token_size);
711 }
712 // Makes sure no classes are loaded during parsing in background.
713 {
714 HANDLESCOPE(thread);
715 pipeline->ParseFunction(parsed_function);
716 }
717
718 CompileParsedFunctionHelper helper(parsed_function, optimized, osr_id);
719
720 const Code& result = Code::Handle(helper.Compile(pipeline));
721
722 if (result.IsNull()) {
723 const Error& error = Error::Handle(thread->StealStickyError());
724
726 // Try again later, background compilation may abort because of
727 // state change during compilation.
728 if (FLAG_trace_compiler) {
729 THR_Print("Aborted background compilation: %s\n",
730 function.ToFullyQualifiedCString());
731 }
732
733 // We got an error during compilation.
734 // If it was a bailout, then disable optimization.
735 if (error.ptr() == Object::background_compilation_error().ptr()) {
736 if (FLAG_trace_compiler) {
737 THR_Print(
738 "--> discarding background compilation for '%s' (will "
739 "try to re-compile again later)\n",
740 function.ToFullyQualifiedCString());
741 }
742
743 // Trigger another optimization pass soon.
744 function.SetUsageCounter(
746 return Error::null();
747 } else if (error.IsLanguageError() &&
748 LanguageError::Cast(error).kind() == Report::kBailout) {
749 if (FLAG_trace_compiler) {
750 THR_Print("--> disabling optimizations for '%s'\n",
751 function.ToFullyQualifiedCString());
752 }
753 function.SetIsOptimizable(false);
754 return Error::null();
755 } else {
756 // The background compiler does not execute Dart code or handle
757 // isolate messages.
758 ASSERT(!error.IsUnwindError());
759 return error.ptr();
760 }
761 }
762 if (optimized) {
763 if (error.IsLanguageError() &&
764 LanguageError::Cast(error).kind() == Report::kBailout) {
765 // Functions which cannot deoptimize should never bail out.
766 ASSERT(!function.ForceOptimize());
767 // Optimizer bailed out. Disable optimizations and never try again.
768 if (trace_compiler) {
769 THR_Print("--> disabling optimizations for '%s'\n",
770 function.ToFullyQualifiedCString());
771 } else if (FLAG_trace_failed_optimization_attempts) {
772 THR_Print("Cannot optimize: %s\n",
773 function.ToFullyQualifiedCString());
774 }
775 function.SetIsOptimizable(false);
776 return Error::null();
777 }
778 return error.ptr();
779 } else {
780 ASSERT(!optimized);
781 // The non-optimizing compiler can get an unhandled exception
782 // due to OOM or Stack overflow errors, it should not however
783 // bail out.
784 ASSERT(error.IsUnhandledException() || error.IsUnwindError() ||
785 (error.IsLanguageError() &&
786 LanguageError::Cast(error).kind() != Report::kBailout));
787 return error.ptr();
788 }
789 UNREACHABLE();
790 }
791
792 per_compile_timer.Stop();
793
794 if (trace_compiler) {
795 const auto& code = Code::Handle(function.CurrentCode());
796 THR_Print("--> '%s' entry: %#" Px " size: %" Pd " time: %" Pd64 " us\n",
797 function.ToFullyQualifiedCString(), code.PayloadStart(),
798 code.Size(), per_compile_timer.TotalElapsedTime());
799 }
800
801 return result.ptr();
802 } else {
803 Thread* const thread = Thread::Current();
804 StackZone stack_zone(thread);
805 // We got an error during compilation or it is a bailout from background
806 // compilation (e.g., during parsing with EnsureIsFinalized).
807 const Error& error = Error::Handle(thread->StealStickyError());
808 if (error.ptr() == Object::background_compilation_error().ptr()) {
809 // Exit compilation, retry it later.
810 if (FLAG_trace_bailout) {
811 THR_Print("Aborted background compilation: %s\n",
812 function.ToFullyQualifiedCString());
813 }
814 return Object::null();
815 }
816 // Do not attempt to optimize functions that can cause errors.
817 function.set_is_optimizable(false);
818 return error.ptr();
819 }
820 UNREACHABLE();
821 return Object::null();
822}
823
825#if defined(DART_PRECOMPILER) && !defined(TARGET_ARCH_IA32)
826 RELEASE_ASSERT(!FLAG_precompiled_mode);
827#endif
828
829#if defined(DART_PRECOMPILED_RUNTIME)
830 FATAL("Precompilation missed function %s (%s, %s)\n",
831 function.ToLibNamePrefixedQualifiedCString(),
832 function.token_pos().ToCString(),
834#endif // defined(DART_PRECOMPILED_RUNTIME)
835
836 VMTagScope tagScope(thread, VMTag::kCompileUnoptimizedTagId);
837#if defined(SUPPORT_TIMELINE)
838 const char* event_name;
840 event_name = "CompileFunctionUnoptimizedBackground";
841 } else {
842 event_name = "CompileFunction";
843 }
845#endif // defined(SUPPORT_TIMELINE)
846
847 CompilationPipeline* pipeline =
849
850 const bool optimized = function.ForceOptimize();
851 return CompileFunctionHelper(pipeline, function, optimized, kNoOSRDeoptId);
852}
853
855 const Function& function) {
856 ASSERT(!function.ForceOptimize());
857 if (function.unoptimized_code() != Object::null()) {
858 return Error::null();
859 }
860 Code& original_code = Code::ZoneHandle(thread->zone());
861 if (function.HasCode()) {
862 original_code = function.CurrentCode();
863 }
864 CompilationPipeline* pipeline =
867 CompileFunctionHelper(pipeline, function, false, /* not optimized */
869 if (result.IsError()) {
870 return Error::Cast(result).ptr();
871 }
872 // Since CompileFunctionHelper replaces the current code, re-attach the
873 // the original code if the function was already compiled.
874 if (!original_code.IsNull() && result.ptr() == function.CurrentCode() &&
875 !original_code.IsDisabled()) {
876 function.AttachCode(original_code);
877 }
878 ASSERT(function.unoptimized_code() != Object::null());
879 ASSERT(function.unoptimized_code() == result.ptr());
880 if (FLAG_trace_compiler) {
881 THR_Print("Ensure unoptimized code for %s\n", function.ToCString());
882 }
883 return Error::null();
884}
885
887 const Function& function,
888 intptr_t osr_id) {
889 VMTagScope tag_scope(thread, VMTag::kCompileOptimizedTagId);
890
891#if defined(SUPPORT_TIMELINE)
892 const char* event_name;
893 if (osr_id != kNoOSRDeoptId) {
894 event_name = "CompileFunctionOptimizedOSR";
895 } else if (IsBackgroundCompilation()) {
896 event_name = "CompileFunctionOptimizedBackground";
897 } else {
898 event_name = "CompileFunctionOptimized";
899 }
901#endif // defined(SUPPORT_TIMELINE)
902
903 CompilationPipeline* pipeline =
905 return CompileFunctionHelper(pipeline, function, /* optimized = */ true,
906 osr_id);
907}
908
910 ASSERT(!code.is_optimized());
911 ASSERT(!FLAG_precompiled_mode);
912 const Function& function = Function::Handle(code.function());
913 ASSERT(code.var_descriptors() == Object::null());
914 // IsIrregexpFunction have eager var descriptors generation.
915 ASSERT(!function.IsIrregexpFunction());
916 // In background compilation, parser can produce 'errors": bailouts
917 // if state changed while compiling in background.
918 Thread* thread = Thread::Current();
919 Zone* zone = thread->zone();
920 CompilerState state(thread, /*is_aot=*/false, /*is_optimizing=*/false);
921 LongJumpScope jump;
922 if (setjmp(*jump.Set()) == 0) {
923 ParsedFunction* parsed_function =
924 new ParsedFunction(thread, Function::ZoneHandle(zone, function.ptr()));
925 ZoneGrowableArray<const ICData*>* ic_data_array =
927 ZoneGrowableArray<intptr_t>* context_level_array =
929
931 parsed_function, ic_data_array, context_level_array,
932 /* not inlining */ nullptr, false, Compiler::kNoOSRDeoptId);
933 builder.BuildGraph();
934
935 auto& var_descs = LocalVarDescriptors::Handle(zone);
936
937 var_descs = parsed_function->scope()->GetVarDescriptors(
938 function, context_level_array);
939
940 ASSERT(!var_descs.IsNull());
941 code.set_var_descriptors(var_descs);
942 } else {
943 // Only possible with background compilation.
945 }
946}
947
949 Thread* thread = Thread::Current();
950 Zone* zone = thread->zone();
952 // We don't expect functions() to change as the class was finalized.
953 ASSERT(cls.is_finalized());
954 Array& functions = Array::Handle(zone, cls.current_functions());
955 Function& func = Function::Handle(zone);
956 // Compile all the regular functions.
957 for (int i = 0; i < functions.Length(); i++) {
958 func ^= functions.At(i);
959 ASSERT(!func.IsNull());
960 if (!func.HasCode() && !func.is_abstract()) {
961 result = CompileFunction(thread, func);
962 if (result.IsError()) {
963 return Error::Cast(result).ptr();
964 }
965 ASSERT(!result.IsNull());
966 }
967 }
968 return Error::null();
969}
970
971void Compiler::AbortBackgroundCompilation(intptr_t deopt_id, const char* msg) {
972 if (FLAG_trace_compiler) {
973 THR_Print("ABORT background compilation: %s\n", msg);
974 }
975#if !defined(PRODUCT)
976 TimelineStream* stream = Timeline::GetCompilerStream();
977 ASSERT(stream != nullptr);
978 TimelineEvent* event = stream->StartEvent();
979 if (event != nullptr) {
980 event->Instant("AbortBackgroundCompilation");
981 event->SetNumArguments(1);
982 event->CopyArgument(0, "reason", msg);
983 event->Complete();
984 }
985#endif // !defined(PRODUCT)
988 deopt_id, Object::background_compilation_error());
989}
990
991// C-heap allocated background compilation queue element.
993 public:
995 : next_(nullptr), function_(function.ptr()) {}
996
997 virtual ~QueueElement() {
998 next_ = nullptr;
999 function_ = Function::null();
1000 }
1001
1002 FunctionPtr Function() const { return function_; }
1003
1004 void set_next(QueueElement* elem) { next_ = elem; }
1005 QueueElement* next() const { return next_; }
1006
1007 ObjectPtr function() const { return function_; }
1009 return reinterpret_cast<ObjectPtr*>(&function_);
1010 }
1011
1012 private:
1013 QueueElement* next_;
1014 FunctionPtr function_;
1015
1016 DISALLOW_COPY_AND_ASSIGN(QueueElement);
1017};
1018
1019// Allocated in C-heap. Handles both input and output of background compilation.
1020// It implements a FIFO queue, using Peek, Add, Remove operations.
1022 public:
1023 BackgroundCompilationQueue() : first_(nullptr), last_(nullptr) {}
1025
1027 ASSERT(visitor != nullptr);
1028 QueueElement* p = first_;
1029 while (p != nullptr) {
1030 visitor->VisitPointer(p->function_untag());
1031 p = p->next();
1032 }
1033 }
1034
1035 bool IsEmpty() const { return first_ == nullptr; }
1036
1038 ASSERT(value != nullptr);
1039 ASSERT(value->next() == nullptr);
1040 if (first_ == nullptr) {
1041 first_ = value;
1042 ASSERT(last_ == nullptr);
1043 } else {
1044 ASSERT(last_ != nullptr);
1045 last_->set_next(value);
1046 }
1047 last_ = value;
1048 ASSERT(first_ != nullptr && last_ != nullptr);
1049 }
1050
1051 QueueElement* Peek() const { return first_; }
1052
1053 FunctionPtr PeekFunction() const {
1054 QueueElement* e = Peek();
1055 if (e == nullptr) {
1056 return Function::null();
1057 } else {
1058 return e->Function();
1059 }
1060 }
1061
1063 ASSERT(first_ != nullptr);
1064 QueueElement* result = first_;
1065 first_ = first_->next();
1066 if (first_ == nullptr) {
1067 last_ = nullptr;
1068 }
1069 return result;
1070 }
1071
1072 bool ContainsObj(const Object& obj) const {
1073 QueueElement* p = first_;
1074 while (p != nullptr) {
1075 if (p->function() == obj.ptr()) {
1076 return true;
1077 }
1078 p = p->next();
1079 }
1080 return false;
1081 }
1082
1083 void Clear() {
1084 while (!IsEmpty()) {
1085 QueueElement* e = Remove();
1086 delete e;
1087 }
1088 ASSERT((first_ == nullptr) && (last_ == nullptr));
1089 }
1090
1091 private:
1092 QueueElement* first_;
1093 QueueElement* last_;
1094
1095 DISALLOW_COPY_AND_ASSIGN(BackgroundCompilationQueue);
1096};
1097
1099 public:
1100 explicit BackgroundCompilerTask(BackgroundCompiler* background_compiler)
1101 : background_compiler_(background_compiler) {}
1103
1104 private:
1105 virtual void Run() { background_compiler_->Run(); }
1106
1107 BackgroundCompiler* background_compiler_;
1108
1109 DISALLOW_COPY_AND_ASSIGN(BackgroundCompilerTask);
1110};
1111
1113 : isolate_group_(isolate_group),
1114 monitor_(),
1115 function_queue_(new BackgroundCompilationQueue()),
1116 running_(false),
1117 done_(true),
1118 disabled_depth_(0) {}
1119
1120// Fields all deleted in ::Stop; here clear them.
1122 delete function_queue_;
1123}
1124
1127 isolate_group_, Thread::kCompilerTask, /*bypass_safepoint=*/false);
1128 ASSERT(result);
1129 {
1130 Thread* thread = Thread::Current();
1131 StackZone stack_zone(thread);
1132 Zone* zone = stack_zone.GetZone();
1133 HANDLESCOPE(thread);
1135 QueueElement* element = nullptr;
1136 {
1137 SafepointMonitorLocker ml(&monitor_);
1138 if (running_ && !function_queue()->IsEmpty()) {
1139 element = function_queue()->Remove();
1140 function ^= element->function();
1141 }
1142 }
1143 if (element != nullptr) {
1144 delete element;
1147
1148 // If an optimizable method is not optimized, put it back on
1149 // the background queue (unless it was passed to foreground).
1150 if ((!function.HasOptimizedCode() && function.IsOptimizable()) ||
1151 FLAG_stress_test_background_compilation) {
1153 SafepointMonitorLocker ml(&monitor_);
1154 if (running_) {
1155 QueueElement* repeat_qelem = new QueueElement(function);
1156 function_queue()->Add(repeat_qelem);
1157 }
1158 }
1159 }
1160 }
1161 }
1162 Thread::ExitIsolateGroupAsHelper(/*bypass_safepoint=*/false);
1163 {
1164 MonitorLocker ml(&monitor_);
1165 if (running_ && !function_queue()->IsEmpty() &&
1166 Dart::thread_pool()->Run<BackgroundCompilerTask>(this)) {
1167 // Successfully scheduled a new task.
1168 } else {
1169 // Background compiler done. This notification must happen after the
1170 // thread leaves to group to avoid a shutdown race with the thread
1171 // registry.
1172 running_ = false;
1173 done_ = true;
1174 ml.NotifyAll();
1175 }
1176 }
1177}
1178
1180 Thread* thread = Thread::Current();
1181 ASSERT(thread->IsDartMutatorThread());
1183
1184 SafepointMonitorLocker ml(&monitor_);
1185 if (disabled_depth_ > 0) return false;
1186 if (!running_ && done_) {
1187 running_ = true;
1188 done_ = false;
1189 // If we ever wanted to run the BG compiler on the
1190 // `IsolateGroup::mutator_pool()` we would need to ensure the BG compiler
1191 // stops when it's idle - otherwise the [MutatorThreadPool]-based idle
1192 // notification would not work anymore.
1193 if (!Dart::thread_pool()->Run<BackgroundCompilerTask>(this)) {
1194 running_ = false;
1195 done_ = true;
1196 return false;
1197 }
1198 }
1199
1200 ASSERT(running_);
1201 if (function_queue()->ContainsObj(function)) {
1202 return true;
1203 }
1204 QueueElement* elem = new QueueElement(function);
1205 function_queue()->Add(elem);
1206 ml.NotifyAll();
1207 return true;
1208}
1209
1211 function_queue_->VisitObjectPointers(visitor);
1212}
1213
1214void BackgroundCompiler::Stop() {
1215 Thread* thread = Thread::Current();
1216 ASSERT(thread->isolate() == nullptr || !thread->BypassSafepoints());
1218
1219 SafepointMonitorLocker ml(&monitor_);
1220 StopLocked(thread, &ml);
1221}
1222
1223void BackgroundCompiler::StopLocked(Thread* thread,
1224 SafepointMonitorLocker* locker) {
1225 running_ = false;
1226 function_queue_->Clear();
1227 while (!done_) {
1228 locker->Wait();
1229 }
1230}
1231
1232void BackgroundCompiler::Enable() {
1233 Thread* thread = Thread::Current();
1234 ASSERT(!thread->BypassSafepoints());
1235 ASSERT(thread->CanAcquireSafepointLocks());
1236
1237 SafepointMonitorLocker ml(&monitor_);
1238 disabled_depth_--;
1239 if (disabled_depth_ < 0) {
1240 FATAL("Mismatched number of calls to BackgroundCompiler::Enable/Disable.");
1241 }
1242}
1243
1244void BackgroundCompiler::Disable() {
1245 Thread* thread = Thread::Current();
1246 ASSERT(!thread->BypassSafepoints());
1247 ASSERT(thread->CanAcquireSafepointLocks());
1248
1249 SafepointMonitorLocker ml(&monitor_);
1250 disabled_depth_++;
1251 if (done_) return;
1252 StopLocked(thread, &ml);
1253}
1254
1255#else // DART_PRECOMPILED_RUNTIME
1256
1257CompilationPipeline* CompilationPipeline::New(Zone* zone,
1258 const Function& function) {
1259 UNREACHABLE();
1260 return nullptr;
1261}
1262
1263DEFINE_RUNTIME_ENTRY(CompileFunction, 1) {
1264 const Function& function = Function::CheckedHandle(zone, arguments.ArgAt(0));
1265 FATAL("Precompilation missed function %s (%s, %s)\n",
1266 function.ToLibNamePrefixedQualifiedCString(),
1267 function.token_pos().ToCString(),
1269}
1270
1272 return false;
1273}
1274
1275bool Compiler::CanOptimizeFunction(Thread* thread, const Function& function) {
1276 UNREACHABLE();
1277 return false;
1278}
1279
1280ObjectPtr Compiler::CompileFunction(Thread* thread, const Function& function) {
1281 FATAL("Attempt to compile function %s", function.ToCString());
1282 return Error::null();
1283}
1284
1285ErrorPtr Compiler::EnsureUnoptimizedCode(Thread* thread,
1286 const Function& function) {
1287 FATAL("Attempt to compile function %s", function.ToCString());
1288 return Error::null();
1289}
1290
1291ObjectPtr Compiler::CompileOptimizedFunction(Thread* thread,
1292 const Function& function,
1293 intptr_t osr_id) {
1294 FATAL("Attempt to compile function %s", function.ToCString());
1295 return Error::null();
1296}
1297
1299 UNREACHABLE();
1300}
1301
1302ErrorPtr Compiler::CompileAllFunctions(const Class& cls) {
1303 FATAL("Attempt to compile class %s", cls.ToCString());
1304 return Error::null();
1305}
1306
1307void Compiler::AbortBackgroundCompilation(intptr_t deopt_id, const char* msg) {
1308 UNREACHABLE();
1309}
1310
1312 UNREACHABLE();
1313 return false;
1314}
1315
1316void BackgroundCompiler::VisitPointers(ObjectPointerVisitor* visitor) {
1317 UNREACHABLE();
1318}
1319
1320void BackgroundCompiler::Stop() {
1321 UNREACHABLE();
1322}
1323
1324void BackgroundCompiler::Enable() {
1325 // NOP
1326}
1327
1328void BackgroundCompiler::Disable() {
1329 // NOP
1330}
1331
1332#endif // DART_PRECOMPILED_RUNTIME
1333
1334} // namespace dart
static void done(const char *config, const char *src, const char *srcOptions, const char *name)
Definition: DM.cpp:263
#define UNREACHABLE()
Definition: assert.h:248
#define DEBUG_ASSERT(cond)
Definition: assert.h:321
#define RELEASE_ASSERT(cond)
Definition: assert.h:327
ObjectPtr At(intptr_t index) const
Definition: object.h:10875
intptr_t Length() const
Definition: object.h:10829
void Add(QueueElement *value)
Definition: compiler.cc:1037
void VisitObjectPointers(ObjectPointerVisitor *visitor)
Definition: compiler.cc:1026
QueueElement * Peek() const
Definition: compiler.cc:1051
bool ContainsObj(const Object &obj) const
Definition: compiler.cc:1072
FunctionPtr PeekFunction() const
Definition: compiler.cc:1053
BackgroundCompilerTask(BackgroundCompiler *background_compiler)
Definition: compiler.cc:1100
void VisitPointers(ObjectPointerVisitor *visitor)
Definition: compiler.cc:1210
BackgroundCompiler(IsolateGroup *isolate_group)
Definition: compiler.cc:1112
bool EnqueueCompilation(const Function &function)
Definition: compiler.cc:1179
BackgroundCompilationQueue * function_queue() const
Definition: compiler.h:136
virtual ~BackgroundCompiler()
Definition: compiler.cc:1121
intptr_t length() const
static void AssignEdgeWeights(FlowGraph *flow_graph)
void RegisterDependencies(const Code &code) const
Definition: cha.cc:272
bool is_finalized() const
Definition: object.h:1723
ArrayPtr current_functions() const
Definition: object.h:1641
bool IsDisabled() const
Definition: object.h:7257
static void NotifyCodeObservers(const Code &code, bool optimized)
Definition: object.cc:18141
static CodePtr FinalizeCode(FlowGraphCompiler *compiler, compiler::Assembler *assembler, PoolAttachment pool_attachment, bool optimized, CodeStatistics *stats)
Definition: object.cc:18018
virtual FlowGraph * BuildFlowGraph(Zone *zone, ParsedFunction *parsed_function, ZoneGrowableArray< const ICData * > *ic_data_array, intptr_t osr_id, bool optimized)=0
virtual void ParseFunction(ParsedFunction *parsed_function)=0
static CompilationPipeline * New(Zone *zone, const Function &function)
Definition: compiler.cc:201
CompileParsedFunctionHelper(ParsedFunction *parsed_function, bool optimized, intptr_t osr_id)
Definition: compiler.cc:305
CodePtr Compile(CompilationPipeline *pipeline)
Definition: compiler.cc:487
static void GenerateCode(CompilerPassState *state)
static DART_WARN_UNUSED_RESULT FlowGraph * RunPipeline(PipelineMode mode, CompilerPassState *state, bool compute_ssa=true)
void set_function(const Function &function)
static bool ShouldTrace()
static CompilerState & Current()
static bool IsBackgroundCompilation()
Definition: compiler.cc:298
static bool CanOptimizeFunction(Thread *thread, const Function &function)
Definition: compiler.cc:229
static constexpr intptr_t kNoOSRDeoptId
Definition: compiler.h:73
static ErrorPtr EnsureUnoptimizedCode(Thread *thread, const Function &function)
Definition: compiler.cc:854
static ObjectPtr CompileFunction(Thread *thread, const Function &function)
Definition: compiler.cc:824
static ObjectPtr CompileOptimizedFunction(Thread *thread, const Function &function, intptr_t osr_id=kNoOSRDeoptId)
Definition: compiler.cc:886
static ErrorPtr CompileAllFunctions(const Class &cls)
Definition: compiler.cc:948
static void ComputeLocalVarDescriptors(const Code &code)
Definition: compiler.cc:909
static void AbortBackgroundCompilation(intptr_t deopt_id, const char *msg)
Definition: compiler.cc:971
void ParseFunction(ParsedFunction *parsed_function) override
Definition: compiler.cc:124
FlowGraph * BuildFlowGraph(Zone *zone, ParsedFunction *parsed_function, ZoneGrowableArray< const ICData * > *ic_data_array, intptr_t osr_id, bool optimized) override
Definition: compiler.cc:128
static ThreadPool * thread_pool()
Definition: dart.h:73
static void DisassembleCode(const Function &function, const Code &code, bool optimized)
static void PrintGraph(const char *phase, FlowGraph *flow_graph)
Definition: il_printer.cc:1706
static bool ShouldPrint(const Function &function, uint8_t **compiler_pass_filter=nullptr)
Definition: il_printer.cc:1715
static void PrintICData(const ICData &ic_data, intptr_t num_checks_to_print=kPrintAll)
Definition: il_printer.cc:1710
static constexpr CompilationMode CompilationModeFrom(bool is_optimizing)
Definition: flow_graph.h:585
bool should_reorder_blocks() const
Definition: flow_graph.h:508
bool HasCode() const
Definition: object.cc:7936
static const char * KindToCString(UntaggedFunction::Kind kind)
Definition: object.cc:8419
RegExpPtr regexp() const
Definition: object.cc:8469
bool IsDebugging(Thread *thread, const Function &function)
Definition: debugger.cc:3488
void NotifyCompilation(const Function &func)
Definition: debugger.cc:2977
void ParseFunction(ParsedFunction *parsed_function) override
Definition: compiler.cc:143
FlowGraph * BuildFlowGraph(Zone *zone, ParsedFunction *parsed_function, ZoneGrowableArray< const ICData * > *ic_data_array, intptr_t osr_id, bool optimized) override
Definition: compiler.cc:169
GroupDebugger * debugger() const
Definition: isolate.h:315
intptr_t optimization_counter_threshold() const
Definition: isolate.h:306
static IsolateGroup * Current()
Definition: isolate.h:539
void RunWithStoppedMutators(T single_current_mutator, S otherwise, bool use_force_growth_in_otherwise=false)
Definition: isolate.h:611
LocalVarDescriptorsPtr GetVarDescriptors(const Function &func, ZoneGrowableArray< intptr_t > *context_level_array)
Definition: scopes.cc:249
DART_NORETURN void Jump(int value, const Error &error)
Definition: longjump.cc:22
jmp_buf * Set()
Definition: longjump.cc:16
void VisitPointer(ObjectPtr *p)
Definition: visitor.h:55
static ObjectPtr null()
Definition: object.h:433
ObjectPtr ptr() const
Definition: object.h:332
bool IsNull() const
Definition: object.h:363
static Object & Handle()
Definition: object.h:407
static Object & ZoneHandle()
Definition: object.h:419
void SetRegExpCompileData(RegExpCompileData *regexp_compile_data)
Definition: parser.cc:173
const Function & function() const
Definition: parser.h:73
LocalScope * scope() const
Definition: parser.h:76
void AllocateIrregexpVariables(intptr_t num_stack_locals)
Definition: parser.cc:295
RegExpCompileData * regexp_compile_data() const
Definition: parser.h:83
Zone * zone() const
Definition: parser.h:213
Thread * thread() const
Definition: parser.h:211
void set_next(QueueElement *elem)
Definition: compiler.cc:1004
ObjectPtr function() const
Definition: compiler.cc:1007
ObjectPtr * function_untag()
Definition: compiler.cc:1008
virtual ~QueueElement()
Definition: compiler.cc:997
QueueElement(const Function &function)
Definition: compiler.cc:994
QueueElement * next() const
Definition: compiler.cc:1005
FunctionPtr Function() const
Definition: compiler.cc:1002
static CompilationResult CompileIR(RegExpCompileData *input, const ParsedFunction *parsed_function, const ZoneGrowableArray< const ICData * > &ic_data_array, intptr_t osr_id)
Definition: regexp.cc:5298
static void ParseRegExp(const String &input, RegExpFlags regexp_flags, RegExpCompileData *result)
void set_is_simple() const
Definition: object.h:12882
StringPtr pattern() const
Definition: object.h:12797
void set_is_complex() const
Definition: object.h:12883
void set_num_bracket_expressions(SmiPtr value) const
void set_capture_name_map(const Array &array) const
Definition: object.cc:26658
RegExpFlags flags() const
Definition: object.h:12891
static DART_NORETURN void LongJump(const Error &error)
Definition: report.cc:86
@ kBailout
Definition: report.h:26
Zone * GetZone()
Definition: zone.h:213
static StringPtr New(const char *cstr, Heap::Space space=Heap::kNew)
Definition: object.cc:23698
Zone * zone() const
Definition: thread_state.h:37
LongJumpScope * long_jump_base() const
Definition: thread_state.h:47
bool CanAcquireSafepointLocks() const
Definition: thread.cc:1372
@ kCompilerTask
Definition: thread.h:348
static Thread * Current()
Definition: thread.h:362
DART_WARN_UNUSED_RESULT ErrorPtr StealStickyError()
Definition: thread.cc:245
CompilerState & compiler_state()
Definition: thread.h:588
void set_sticky_error(const Error &value)
Definition: thread.cc:236
static void ExitIsolateGroupAsHelper(bool bypass_safepoint)
Definition: thread.cc:499
bool IsDartMutatorThread() const
Definition: thread.h:551
Isolate * isolate() const
Definition: thread.h:534
bool BypassSafepoints() const
Definition: thread.h:1007
IsolateGroup * isolate_group() const
Definition: thread.h:541
static bool EnterIsolateGroupAsHelper(IsolateGroup *isolate_group, TaskKind kind, bool bypass_safepoint)
Definition: thread.cc:481
void Stop()
Definition: timer.h:117
void Start()
Definition: timer.h:111
int64_t TotalElapsedTime() const
Definition: timer.h:123
#define THR_Print(format,...)
Definition: log.h:20
#define ASSERT(E)
#define FATAL(error)
AtkStateType state
FlKeyEvent * event
const uint8_t uint32_t uint32_t GError ** error
uint8_t value
GAsyncResult * result
const char * charp
Definition: flags.h:12
Dart_NativeFunction function
Definition: fuchsia.cc:51
#define HANDLESCOPE(thread)
Definition: handles.h:321
void SetFfiCallbackCode(Thread *thread, const Function &ffi_trampoline, const Code &code)
Definition: callback.cc:148
Definition: dart_vm.cc:33
static void PrecompilationModeHandler(bool value)
Definition: compiler.cc:91
DEFINE_FLAG(bool, print_cluster_information, false, "Print information about clusters written to snapshot")
DirectChainedHashMap< FieldKeyValueTrait > FieldSet
Definition: parser.h:66
DEFINE_FLAG_HANDLER(PrecompilationModeHandler, precompilation, "Precompilation mode")
DEFINE_RUNTIME_ENTRY(CompileFunction, 1)
Definition: compiler.cc:212
const char *const function_name
static ObjectPtr CompileFunctionHelper(CompilationPipeline *pipeline, const Function &function, volatile bool optimized, intptr_t osr_id)
Definition: compiler.cc:682
DECLARE_FLAG(bool, show_invisible_frames)
bool EMSCRIPTEN_KEEPALIVE IsEmpty(const SkPath &path)
#define Px
Definition: globals.h:410
#define Pd64
Definition: globals.h:416
#define Pd
Definition: globals.h:408
CallSpecializer * call_specializer
Definition: compiler_pass.h:94
GrowableArray< TokenPosition > inline_id_to_token_pos
Definition: compiler_pass.h:90
GrowableArray< intptr_t > caller_inline_id
Definition: compiler_pass.h:92
GrowableArray< const Function * > inline_id_to_function
Definition: compiler_pass.h:88
FlowGraphCompiler * graph_compiler
intptr_t capture_count
Definition: regexp.h:1445
Array & capture_name_map
Definition: regexp.h:1443
#define TIMELINE_FUNCTION_COMPILATION_DURATION(thread, name, function)
Definition: timeline.h:40
#define TIMELINE_DURATION(thread, stream, name)
Definition: timeline.h:39