Flutter Engine
The Flutter Engine
Loading...
Searching...
No Matches
kernel_to_il.h
Go to the documentation of this file.
1// Copyright (c) 2016, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#ifndef RUNTIME_VM_COMPILER_FRONTEND_KERNEL_TO_IL_H_
6#define RUNTIME_VM_COMPILER_FRONTEND_KERNEL_TO_IL_H_
7
8#if defined(DART_PRECOMPILED_RUNTIME)
9#error "AOT runtime should not use compiler sources (including header files)"
10#endif // defined(DART_PRECOMPILED_RUNTIME)
11
12#include "vm/growable_array.h"
13#include "vm/hash_map.h"
14
22#include "vm/object_store.h"
23
24namespace dart {
25
26class InlineExitCollector;
27
28namespace kernel {
29
30class StreamingFlowGraphBuilder;
31struct InferredTypeMetadata;
32class BreakableBlock;
33class CatchBlock;
34class FlowGraphBuilder;
35class SwitchBlock;
36class TryCatchBlock;
37class TryFinallyBlock;
38
44
46 public:
47 FlowGraphBuilder(ParsedFunction* parsed_function,
49 ZoneGrowableArray<intptr_t>* context_level_array,
50 InlineExitCollector* exit_collector,
51 bool optimizing,
52 intptr_t osr_id,
53 intptr_t first_block_id = 1,
54 bool inlining_unchecked_entry = false);
55 virtual ~FlowGraphBuilder();
56
58
59 // Returns true if given [function] is recognized for flow
60 // graph building and its body is expressed in a custom-built IL.
62
63 // Returns true if custom flow graph for given [function]
64 // needs an expression_temp_var().
66 const Function& function);
67
68 private:
69 BlockEntryInstr* BuildPrologue(BlockEntryInstr* normal_entry,
70 PrologueInfo* prologue_info);
71
72 // Return names of optional named parameters of [function].
73 ArrayPtr GetOptionalParameterNames(const Function& function);
74
75 // Generate fragment which pushes all explicit parameters of [function].
76 Fragment PushExplicitParameters(
77 const Function& function,
78 const Function& target = Function::null_function());
79
80 FlowGraph* BuildGraphOfMethodExtractor(const Function& method);
81 FlowGraph* BuildGraphOfNoSuchMethodDispatcher(const Function& function);
82 FlowGraph* BuildGraphOfRecordFieldGetter(const Function& function);
83
84 struct ClosureCallInfo;
85
86 // Tests whether the closure function is generic and branches to the
87 // appropriate fragment.
88 Fragment TestClosureFunctionGeneric(const ClosureCallInfo& info,
89 Fragment generic,
90 Fragment not_generic);
91
92 // Tests whether the function parameter at the given index is required and
93 // branches to the appropriate fragment. Loads the parameter index to
94 // check from info.vars->current_param_index.
95 Fragment TestClosureFunctionNamedParameterRequired(
96 const ClosureCallInfo& info,
97 Fragment set,
98 Fragment not_set);
99
100 // Builds a fragment that, if there are no provided function type arguments,
101 // calculates the appropriate TAV to use instead. Stores either the provided
102 // or calculated function type arguments in vars->function_type_args.
103 Fragment BuildClosureCallDefaultTypeHandling(const ClosureCallInfo& info);
104
105 // The BuildClosureCall...Check methods differs from the checks built in the
106 // PrologueBuilder in that they are built for invoke field dispatchers,
107 // where the ArgumentsDescriptor is known at compile time but the specific
108 // closure function is retrieved at runtime.
109
110 // Builds checks that the given named arguments have valid argument names
111 // and, in the case of null safe code, that all required named parameters
112 // are provided.
113 Fragment BuildClosureCallNamedArgumentsCheck(const ClosureCallInfo& info);
114
115 // Builds checks for checking the arguments of a call are valid for the
116 // function retrieved at runtime from the closure.
117 Fragment BuildClosureCallArgumentsValidCheck(const ClosureCallInfo& info);
118
119 // Builds checks that the type arguments of a call are consistent with the
120 // bounds of the closure function type parameters. Assumes that the closure
121 // function is generic.
122 Fragment BuildClosureCallTypeArgumentsTypeCheck(const ClosureCallInfo& info);
123
124 // Builds checks for type checking a given argument of the closure call using
125 // parameter information from the closure function retrieved at runtime.
126 //
127 // For named arguments, arg_name is a compile-time constant retrieved from
128 // the saved arguments descriptor. For positional arguments, null is passed.
129 Fragment BuildClosureCallArgumentTypeCheck(const ClosureCallInfo& info,
130 LocalVariable* param_index,
131 intptr_t arg_index,
132 const String& arg_name);
133
134 // Builds checks for type checking the arguments of a call using parameter
135 // information for the function retrieved at runtime from the closure.
136 Fragment BuildClosureCallArgumentTypeChecks(const ClosureCallInfo& info);
137
138 // Main entry point for building checks.
139 Fragment BuildDynamicClosureCallChecks(LocalVariable* closure);
140
141 FlowGraph* BuildGraphOfInvokeFieldDispatcher(const Function& function);
142 FlowGraph* BuildGraphOfFfiTrampoline(const Function& function);
143 FlowGraph* BuildGraphOfSyncFfiCallback(const Function& function);
144 FlowGraph* BuildGraphOfAsyncFfiCallback(const Function& function);
145
146 // Resolves the address of a native symbol from the constant data of a
147 // vm:ffi:native pragma.
148 // Because it's used in optimized mode (as part of the implementation of
149 // @Native functions), it pushes the value as an untagged value. This is safe
150 // to use in unoptimized mode too as long as the untagged value is consumed
151 // immediately.
152 Fragment FfiNativeLookupAddress(const Instance& native);
153 // Expects target address on stack.
154 Fragment FfiCallFunctionBody(const Function& function,
155 const FunctionType& c_signature,
156 intptr_t first_argument_parameter_offset);
157 Fragment FfiNativeFunctionBody(const Function& function);
158 Fragment NativeFunctionBody(const Function& function,
159 LocalVariable* first_parameter);
160 Fragment LoadNativeArg(const compiler::ffi::CallbackMarshaller& marshaller,
161 intptr_t arg_index);
162
163 FlowGraph* BuildGraphOfRecognizedMethod(const Function& function);
164
165 Fragment BuildTypedListGet(const Function& function, classid_t cid);
166 Fragment BuildTypedListSet(const Function& function, classid_t cid);
167 Fragment BuildTypedDataMemMove(const Function& function, classid_t cid);
168 Fragment BuildTypedDataViewFactoryConstructor(const Function& function,
169 classid_t cid);
170 Fragment BuildTypedDataFactoryConstructor(const Function& function,
171 classid_t cid);
172
173 Fragment EnterScope(intptr_t kernel_offset,
174 const LocalScope** scope = nullptr);
175 Fragment ExitScope(intptr_t kernel_offset);
176
177 Fragment AdjustContextTo(int depth);
178
179 Fragment PushContext(const LocalScope* scope);
180 Fragment PopContext();
181
182 Fragment LoadInstantiatorTypeArguments();
183 Fragment LoadFunctionTypeArguments();
184 Fragment TranslateInstantiatedTypeArguments(
185 const TypeArguments& type_arguments);
186
187 Fragment CatchBlockEntry(const Array& handler_types,
188 intptr_t handler_index,
189 bool needs_stacktrace,
190 bool is_synthesized);
191 Fragment TryCatch(int try_handler_index);
192 Fragment CheckStackOverflowInPrologue(TokenPosition position);
193 Fragment CloneContext(const ZoneGrowableArray<const Slot*>& context_slots);
194
195 Fragment InstanceCall(
196 TokenPosition position,
197 const String& name,
198 Token::Kind kind,
199 intptr_t type_args_len,
200 intptr_t argument_count,
201 const Array& argument_names,
202 intptr_t checked_argument_count,
203 const Function& interface_target = Function::null_function(),
204 const Function& tearoff_interface_target = Function::null_function(),
205 const InferredTypeMetadata* result_type = nullptr,
206 bool use_unchecked_entry = false,
207 const CallSiteAttributesMetadata* call_site_attrs = nullptr,
208 bool receiver_is_not_smi = false,
209 bool is_call_on_this = false);
210
211 Fragment FfiCall(const compiler::ffi::CallMarshaller& marshaller,
212 bool is_leaf);
213
214 Fragment CallLeafRuntimeEntry(
215 const RuntimeEntry& entry,
216 Representation return_representation,
217 const ZoneGrowableArray<Representation>& argument_representations);
218
219 Fragment RethrowException(TokenPosition position, int catch_try_index);
220 Fragment LoadLocal(LocalVariable* variable);
221 IndirectGotoInstr* IndirectGoto(intptr_t target_count);
222 Fragment StoreLateField(const Field& field,
224 LocalVariable* setter_value);
225 Fragment NativeCall(const String& name, const Function& function);
226 Fragment Return(TokenPosition position, bool omit_result_type_check = false);
227 void SetResultTypeForStaticCall(StaticCallInstr* call,
228 const Function& target,
229 intptr_t argument_count,
230 const InferredTypeMetadata* result_type);
231 Fragment StaticCall(TokenPosition position,
232 const Function& target,
233 intptr_t argument_count,
234 ICData::RebindRule rebind_rule);
235 Fragment StaticCall(TokenPosition position,
236 const Function& target,
237 intptr_t argument_count,
238 const Array& argument_names,
239 ICData::RebindRule rebind_rule,
240 const InferredTypeMetadata* result_type = nullptr,
241 intptr_t type_args_len = 0,
242 bool use_unchecked_entry = false);
243 Fragment CachableIdempotentCall(TokenPosition position,
244 Representation representation,
245 const Function& target,
246 intptr_t argument_count,
247 const Array& argument_names,
248 intptr_t type_args_len = 0);
249 Fragment StringInterpolateSingle(TokenPosition position);
250 Fragment StringInterpolate(TokenPosition position);
251 Fragment ThrowTypeError();
252
253 // [incompatible_arguments] should be true if the NSM is due to a mismatch
254 // between the provided arguments and the function signature.
255 Fragment ThrowNoSuchMethodError(TokenPosition position,
256 const Function& target,
257 bool incompatible_arguments,
258 bool receiver_pushed = false);
259 Fragment ThrowNoSuchMethodError(TokenPosition position,
260 const String& selector,
263 bool receiver_pushed = false);
264 Fragment ThrowLateInitializationError(TokenPosition position,
265 const char* throw_method_name,
266 const String& name);
267 Fragment BuildImplicitClosureCreation(TokenPosition position,
268 const Function& target);
269
270 Fragment EvaluateAssertion();
271 Fragment CheckVariableTypeInCheckedMode(const AbstractType& dst_type,
272 const String& name_symbol);
273 Fragment CheckBoolean(TokenPosition position);
274 Fragment CheckAssignable(
275 const AbstractType& dst_type,
276 const String& dst_name,
277 AssertAssignableInstr::Kind kind = AssertAssignableInstr::kUnknown,
278 TokenPosition token_pos = TokenPosition::kNoSource);
279
280 Fragment AssertAssignableLoadTypeArguments(
281 TokenPosition position,
282 const AbstractType& dst_type,
283 const String& dst_name,
284 AssertAssignableInstr::Kind kind = AssertAssignableInstr::kUnknown);
285 Fragment AssertSubtype(TokenPosition position,
286 const AbstractType& sub_type,
287 const AbstractType& super_type,
288 const String& dst_name);
289 // Assumes destination name, supertype, and subtype are the top of the stack.
290 Fragment AssertSubtype(TokenPosition position);
291
292 bool NeedsDebugStepCheck(const Function& function, TokenPosition position);
293 bool NeedsDebugStepCheck(Value* value, TokenPosition position);
294
295 // Deals with StoreIndexed not working with kUnboxedFloat.
296 // TODO(dartbug.com/43448): Remove this workaround.
297 Fragment StoreIndexedTypedDataUnboxed(Representation unboxed_representation,
298 intptr_t index_scale,
299 bool index_unboxed);
300 // Deals with LoadIndexed not working with kUnboxedFloat.
301 // TODO(dartbug.com/43448): Remove this workaround.
302 Fragment LoadIndexedTypedDataUnboxed(Representation unboxed_representation,
303 intptr_t index_scale,
304 bool index_unboxed);
305
306 // Truncates (instead of deoptimizing) if the origin does not fit into the
307 // target representation.
308 Fragment UnboxTruncate(Representation to);
309
310 // Loads the (untagged) thread address.
311 Fragment LoadThread();
312
313 // Loads the (untagged) isolate address.
314 Fragment LoadIsolate();
315
316 // Loads the (untagged) current IsolateGroup address.
317 Fragment LoadIsolateGroup();
318
319 // Loads the (untagged) current ObjectStore address.
320 Fragment LoadObjectStore();
321
322 // Loads the (untagged) service extension stream address.
323 Fragment LoadServiceExtensionStream();
324
325 // Converts a true to 1 and false to 0.
326 Fragment BoolToInt();
327
328 // Converts 0 to false and the rest to true.
329 Fragment IntToBool();
330
331 // Compares arbitrary integers.
332 Fragment IntRelationalOp(TokenPosition position, Token::Kind kind);
333
334 // Pops a Dart object and push the unboxed native version, according to the
335 // semantics of FFI argument translation.
336 //
337 // Works for FFI call arguments, and FFI callback return values.
338 //
339 // If `marshaller.IsCompoundPointer(arg_index)`, then [variable] must point to
340 // a valid LocalVariable.
341 Fragment FfiConvertPrimitiveToNative(
342 const compiler::ffi::BaseMarshaller& marshaller,
343 intptr_t arg_index,
344 LocalVariable* variable = nullptr);
345
346 // Pops an unboxed native value, and pushes a Dart object, according to the
347 // semantics of FFI argument translation.
348 //
349 // Works for FFI call return values, and FFI callback arguments.
350 Fragment FfiConvertPrimitiveToDart(
351 const compiler::ffi::BaseMarshaller& marshaller,
352 intptr_t arg_index);
353
354 // We pass in `variable` instead of on top of the stack so that we can have
355 // multiple consecutive calls that keep only compound parts on the stack with
356 // no compound parts in between.
357 Fragment LoadTail(LocalVariable* variable,
358 intptr_t size,
359 intptr_t offset_in_bytes,
360 Representation representation);
361 Fragment FfiCallConvertCompoundArgumentToNative(
362 LocalVariable* variable,
363 const compiler::ffi::BaseMarshaller& marshaller,
364 intptr_t arg_index);
365
366 Fragment FfiCallConvertCompoundReturnToDart(
367 const compiler::ffi::BaseMarshaller& marshaller,
368 intptr_t arg_index);
369
370 // We pass in multiple `definitions`, which are also expected to be the top
371 // of the stack. This eases storing each definition in the resulting struct
372 // or union.
373 Fragment FfiCallbackConvertCompoundArgumentToDart(
374 const compiler::ffi::BaseMarshaller& marshaller,
375 intptr_t arg_index,
377
378 Fragment FfiCallbackConvertCompoundReturnToNative(
379 const compiler::ffi::CallbackMarshaller& marshaller,
380 intptr_t arg_index);
381
382 // Wraps a TypedDataBase from the stack and wraps it in a subclass of
383 // _Compound.
384 Fragment WrapTypedDataBaseInCompound(const AbstractType& compound_type);
385
386 // Loads the _typedDataBase field from a subclass of _Compound.
387 Fragment LoadTypedDataBaseFromCompound();
388 Fragment LoadOffsetInBytesFromCompound();
389
390 // Copy `definitions` into TypedData.
391 //
392 // Expects the TypedData on top of the stack and `definitions` right under it.
393 //
394 // Leaves TypedData on stack.
395 //
396 // The compound contents are heterogeneous, so pass in `representations` to
397 // know what representation to load.
398 Fragment PopFromStackToTypedDataBase(
400 const GrowableArray<Representation>& representations);
401
402 // Wrap the current exception and stacktrace in an unhandled exception.
404
405 // Return from a native -> Dart callback. Can only be used in conjunction with
406 // NativeEntry and NativeParameter are used.
407 Fragment NativeReturn(const compiler::ffi::CallbackMarshaller& marshaller);
408
409 // Bit-wise cast between representations.
410 // Pops the input and pushes the converted result.
411 // Currently only works with equal sizes and floating point <-> integer.
412 Fragment BitCast(Representation from, Representation to);
413
414 // Generates Call1ArgStub instruction.
415 Fragment Call1ArgStub(TokenPosition position,
417
418 // Generates Suspend instruction.
419 Fragment Suspend(TokenPosition position, SuspendInstr::StubId stub_id);
420
421 LocalVariable* LookupVariable(intptr_t kernel_offset);
422
423 // Build type argument type checks for the current function.
424 // ParsedFunction should have the following information:
425 // - is_forwarding_stub()
426 // - forwarding_stub_super_target()
427 // Scope should be populated with parameter variables including
428 // - needs_type_check()
429 // - is_explicit_covariant_parameter()
430 void BuildTypeArgumentTypeChecks(TypeChecksToBuild mode,
431 Fragment* implicit_checks);
432
433 // Build argument type checks for the current function.
434 // ParsedFunction should have the following information:
435 // - is_forwarding_stub()
436 // - forwarding_stub_super_target()
437 // Scope should be populated with parameter variables including
438 // - needs_type_check()
439 // - is_explicit_covariant_parameter()
440 void BuildArgumentTypeChecks(Fragment* explicit_checks,
441 Fragment* implicit_checks,
442 Fragment* implicit_redefinitions);
443
444 // Builds flow graph for noSuchMethod forwarder.
445 //
446 // If throw_no_such_method_error is set to true, an
447 // instance of NoSuchMethodError is thrown. Otherwise, the instance
448 // noSuchMethod is called.
449 //
450 // ParsedFunction should have the following information:
451 // - default_parameter_values()
452 // - is_forwarding_stub()
453 // - forwarding_stub_super_target()
454 //
455 // Scope should be populated with parameter variables including
456 // - needs_type_check()
457 // - is_explicit_covariant_parameter()
458 //
459 FlowGraph* BuildGraphOfNoSuchMethodForwarder(
460 const Function& function,
461 bool is_implicit_closure_function,
462 bool throw_no_such_method_error);
463
464 // If no type arguments are passed to a generic function, we need to fill the
465 // type arguments in with the default types stored on the TypeParameter nodes
466 // in Kernel.
467 //
468 // ParsedFunction should have the following information:
469 // - DefaultFunctionTypeArguments()
470 // - function_type_arguments()
471 Fragment BuildDefaultTypeHandling(const Function& function);
472
473 FunctionEntryInstr* BuildSharedUncheckedEntryPoint(
474 Fragment prologue_from_normal_entry,
475 Fragment skippable_checks,
476 Fragment redefinitions_if_skipped,
477 Fragment body);
478 FunctionEntryInstr* BuildSeparateUncheckedEntryPoint(
479 BlockEntryInstr* normal_entry,
480 Fragment normal_prologue,
481 Fragment extra_prologue,
482 Fragment shared_prologue,
483 Fragment body);
484
485 // Builds flow graph for implicit closure function (tear-off).
486 //
487 // ParsedFunction should have the following information:
488 // - DefaultFunctionTypeArguments()
489 // - function_type_arguments()
490 // - default_parameter_values()
491 // - is_forwarding_stub()
492 // - forwarding_stub_super_target()
493 //
494 // Scope should be populated with parameter variables including
495 // - needs_type_check()
496 // - is_explicit_covariant_parameter()
497 //
498 FlowGraph* BuildGraphOfImplicitClosureFunction(const Function& function);
499
500 // Builds flow graph of implicit field getter, setter, or a
501 // dynamic invocation forwarder to a field setter.
502 //
503 // If field is const, its value should be evaluated and stored in
504 // - StaticValue()
505 //
506 // Scope should be populated with parameter variables including
507 // - needs_type_check()
508 //
509 FlowGraph* BuildGraphOfFieldAccessor(const Function& function);
510
511 // Builds flow graph of dynamic invocation forwarder.
512 //
513 // ParsedFunction should have the following information:
514 // - DefaultFunctionTypeArguments()
515 // - function_type_arguments()
516 // - default_parameter_values()
517 // - is_forwarding_stub()
518 // - forwarding_stub_super_target()
519 //
520 // Scope should be populated with parameter variables including
521 // - needs_type_check()
522 // - is_explicit_covariant_parameter()
523 //
524 FlowGraph* BuildGraphOfDynamicInvocationForwarder(const Function& function);
525
526 void SetConstantRangeOfCurrentDefinition(const Fragment& fragment,
527 int64_t min,
528 int64_t max);
529
530 // Extracts a packed field out of the unboxed value with representation [rep
531 // on the top of the stack. Picks a sequence that keeps unboxed values on the
532 // expression stack only as needed, switching to Smis as soon as possible.
533 template <typename T>
534 Fragment BuildExtractUnboxedSlotBitFieldIntoSmi(const Slot& slot) {
536 Fragment instructions;
538 // We don't need to allocate to box this value, so it already fits in
539 // a Smi (and thus the mask must also).
540 instructions += LoadNativeField(slot);
541 instructions += Box(slot.representation());
542 instructions += IntConstant(T::mask_in_place());
543 instructions += SmiBinaryOp(Token::kBIT_AND);
544 } else {
545 // Since kBIT_AND never throws or deoptimizes, we require that the result
546 // of masking the field in place fits into a Smi, so we can use Smi
547 // operations for the shift.
548 static_assert(T::mask_in_place() <= compiler::target::kSmiMax,
549 "Cannot fit results of masking in place into a Smi");
550 instructions += LoadNativeField(slot);
551 instructions +=
552 UnboxedIntConstant(T::mask_in_place(), slot.representation());
553 instructions += BinaryIntegerOp(Token::kBIT_AND, slot.representation());
554 // Set the range of the definition that will be used as the value in the
555 // box so that ValueFitsSmi() returns true even in unoptimized code.
556 SetConstantRangeOfCurrentDefinition(instructions, 0, T::mask_in_place());
557 instructions += Box(slot.representation());
558 }
559 if (T::shift() != 0) {
560 // Only add the shift operation if it's necessary.
561 instructions += IntConstant(T::shift());
562 instructions += SmiBinaryOp(Token::kSHR);
563 }
564 return instructions;
565 }
566
567 Fragment BuildDoubleHashCode();
568 Fragment BuildIntegerHashCode(bool smi);
569
570 TranslationHelper translation_helper_;
571 Thread* thread_;
572 Zone* zone_;
573
574 ParsedFunction* parsed_function_;
575 const bool optimizing_;
576 ZoneGrowableArray<const ICData*>& ic_data_array_;
577
578 intptr_t next_function_id_;
579 intptr_t AllocateFunctionId() { return next_function_id_++; }
580
581 intptr_t loop_depth_;
582 intptr_t try_depth_;
583 intptr_t catch_depth_;
584 intptr_t block_expression_depth_;
585
586 GraphEntryInstr* graph_entry_;
587
588 ScopeBuildingResult* scopes_;
589
590 LocalVariable* CurrentException() {
591 return scopes_->exception_variables[catch_depth_ - 1];
592 }
593 LocalVariable* CurrentStackTrace() {
594 return scopes_->stack_trace_variables[catch_depth_ - 1];
595 }
596 LocalVariable* CurrentRawException() {
597 return scopes_->raw_exception_variables[catch_depth_ - 1];
598 }
599 LocalVariable* CurrentRawStackTrace() {
600 return scopes_->raw_stack_trace_variables[catch_depth_ - 1];
601 }
602 LocalVariable* CurrentCatchContext() {
603 return scopes_->catch_context_variables[try_depth_];
604 }
605
606 TryCatchBlock* CurrentTryCatchBlock() const { return try_catch_block_; }
607
608 void SetCurrentTryCatchBlock(TryCatchBlock* try_catch_block);
609
610 // A chained list of breakable blocks. Chaining and lookup is done by the
611 // [BreakableBlock] class.
612 BreakableBlock* breakable_block_;
613
614 // A chained list of switch blocks. Chaining and lookup is done by the
615 // [SwitchBlock] class.
616 SwitchBlock* switch_block_;
617
618 // A chained list of try-catch blocks. Chaining and lookup is done by the
619 // [TryCatchBlock] class.
620 TryCatchBlock* try_catch_block_;
621
622 // A chained list of try-finally blocks. Chaining and lookup is done by the
623 // [TryFinallyBlock] class.
624 TryFinallyBlock* try_finally_block_;
625
626 // A chained list of catch blocks. Chaining and lookup is done by the
627 // [CatchBlock] class.
628 CatchBlock* catch_block_;
629
630 ActiveClass active_class_;
631
632 // Cached _PrependTypeArguments.
633 Function& prepend_type_arguments_;
634
635 // Returns the function _PrependTypeArguments from dart:_internal. If the
636 // cached version is null, retrieves it and updates the cache.
637 const Function& PrependTypeArgumentsFunction();
638
639 friend class BreakableBlock;
640 friend class CatchBlock;
641 friend class ProgramState;
643 friend class SwitchBlock;
644 friend class TryCatchBlock;
645 friend class TryFinallyBlock;
646
648};
649
650// Convenience class to save/restore program state.
651// This snapshot denotes a partial state of the flow
652// grap builder that is needed when recursing into
653// the statements and expressions of a finalizer block.
655 public:
656 ProgramState(BreakableBlock* breakable_block,
657 SwitchBlock* switch_block,
658 intptr_t loop_depth,
659 intptr_t try_depth,
660 intptr_t catch_depth,
661 intptr_t block_expression_depth)
662 : breakable_block_(breakable_block),
663 switch_block_(switch_block),
664 loop_depth_(loop_depth),
665 try_depth_(try_depth),
666 catch_depth_(catch_depth),
667 block_expression_depth_(block_expression_depth) {}
668
669 void assignTo(FlowGraphBuilder* builder) const {
670 builder->breakable_block_ = breakable_block_;
671 builder->switch_block_ = switch_block_;
672 builder->loop_depth_ = loop_depth_;
673 builder->try_depth_ = try_depth_;
674 builder->catch_depth_ = catch_depth_;
675 builder->block_expression_depth_ = block_expression_depth_;
676 }
677
678 private:
679 BreakableBlock* const breakable_block_;
680 SwitchBlock* const switch_block_;
681 const intptr_t loop_depth_;
682 const intptr_t try_depth_;
683 const intptr_t catch_depth_;
684 const intptr_t block_expression_depth_;
685};
686
688 public:
689 SwitchBlock(FlowGraphBuilder* builder, intptr_t case_count)
690 : builder_(builder),
691 outer_(builder->switch_block_),
692 outer_finally_(builder->try_finally_block_),
693 case_count_(case_count),
694 context_depth_(builder->context_depth_),
695 try_index_(builder->CurrentTryIndex()) {
696 builder_->switch_block_ = this;
697 if (outer_ != nullptr) {
698 depth_ = outer_->depth_ + outer_->case_count_;
699 } else {
700 depth_ = 0;
701 }
702 }
703 ~SwitchBlock() { builder_->switch_block_ = outer_; }
704
705 bool HadJumper(intptr_t case_num) {
706 return destinations_.Lookup(case_num) != nullptr;
707 }
708
709 // Get destination via absolute target number (i.e. the correct destination
710 // is not necessarily in this block).
711 JoinEntryInstr* Destination(intptr_t target_index,
712 TryFinallyBlock** outer_finally = nullptr,
713 intptr_t* context_depth = nullptr) {
714 // Verify consistency of program state.
715 ASSERT(builder_->switch_block_ == this);
716 // Find corresponding destination.
717 SwitchBlock* block = this;
718 while (block->depth_ > target_index) {
719 block = block->outer_;
720 ASSERT(block != nullptr);
721 }
722
723 // Set the outer finally block.
724 if (outer_finally != nullptr) {
725 *outer_finally = block->outer_finally_;
726 *context_depth = block->context_depth_;
727 }
728
729 // Ensure there's [JoinEntryInstr] for that [SwitchCase].
730 return block->EnsureDestination(target_index - block->depth_);
731 }
732
733 // Get destination via relative target number (i.e. relative to this block,
734 // 0 is first case in this block etc).
736 TryFinallyBlock** outer_finally = nullptr,
737 intptr_t* context_depth = nullptr) {
738 // Set the outer finally block.
739 if (outer_finally != nullptr) {
740 *outer_finally = outer_finally_;
741 *context_depth = context_depth_;
742 }
743
744 // Ensure there's [JoinEntryInstr] for that [SwitchCase].
745 return EnsureDestination(case_num);
746 }
747
748 private:
749 JoinEntryInstr* EnsureDestination(intptr_t case_num) {
750 JoinEntryInstr* cached_inst = destinations_.Lookup(case_num);
751 if (cached_inst == nullptr) {
752 JoinEntryInstr* inst = builder_->BuildJoinEntry(try_index_);
753 destinations_.Insert(case_num, inst);
754 return inst;
755 }
756 return cached_inst;
757 }
758
759 FlowGraphBuilder* builder_;
760 SwitchBlock* outer_;
761
762 IntMap<JoinEntryInstr*> destinations_;
763
764 TryFinallyBlock* outer_finally_;
765 intptr_t case_count_;
766 intptr_t depth_;
767 intptr_t context_depth_;
768 intptr_t try_index_;
769};
770
772 public:
774 intptr_t try_handler_index = -1)
775 : builder_(builder),
776 outer_(builder->CurrentTryCatchBlock()),
777 try_index_(try_handler_index == -1 ? builder->AllocateTryIndex()
778 : try_handler_index) {
779 builder->SetCurrentTryCatchBlock(this);
780 }
781
782 ~TryCatchBlock() { builder_->SetCurrentTryCatchBlock(outer_); }
783
784 intptr_t try_index() { return try_index_; }
785 TryCatchBlock* outer() const { return outer_; }
786
787 private:
788 FlowGraphBuilder* const builder_;
789 TryCatchBlock* const outer_;
790 intptr_t const try_index_;
791
793};
794
796 public:
798 : builder_(builder),
799 outer_(builder->try_finally_block_),
800 finalizer_kernel_offset_(finalizer_kernel_offset),
801 context_depth_(builder->context_depth_),
802 try_index_(builder_->CurrentTryIndex()),
803 // Finalizers are executed outside of the try block hence
804 // try depth of finalizers are one less than current try
805 // depth. For others, program state is snapshot of current.
806 state_(builder_->breakable_block_,
807 builder_->switch_block_,
808 builder_->loop_depth_,
809 builder_->try_depth_ - 1,
810 builder_->catch_depth_,
811 builder_->block_expression_depth_) {
812 builder_->try_finally_block_ = this;
813 }
814 ~TryFinallyBlock() { builder_->try_finally_block_ = outer_; }
815
816 TryFinallyBlock* outer() const { return outer_; }
817 intptr_t finalizer_kernel_offset() const { return finalizer_kernel_offset_; }
818 intptr_t context_depth() const { return context_depth_; }
819 intptr_t try_index() const { return try_index_; }
820 const ProgramState& state() const { return state_; }
821
822 private:
823 FlowGraphBuilder* const builder_;
824 TryFinallyBlock* const outer_;
825 const intptr_t finalizer_kernel_offset_;
826 const intptr_t context_depth_;
827 const intptr_t try_index_;
828 const ProgramState state_;
829
831};
832
834 public:
836 : builder_(builder),
837 outer_(builder->breakable_block_),
838 destination_(nullptr),
839 outer_finally_(builder->try_finally_block_),
840 context_depth_(builder->context_depth_),
841 try_index_(builder->CurrentTryIndex()) {
842 if (builder_->breakable_block_ == nullptr) {
843 index_ = 0;
844 } else {
845 index_ = builder_->breakable_block_->index_ + 1;
846 }
847 builder_->breakable_block_ = this;
848 }
849 ~BreakableBlock() { builder_->breakable_block_ = outer_; }
850
851 bool HadJumper() { return destination_ != nullptr; }
852
853 JoinEntryInstr* destination() { return destination_; }
854
855 JoinEntryInstr* BreakDestination(intptr_t label_index,
856 TryFinallyBlock** outer_finally,
857 intptr_t* context_depth) {
858 // Verify consistency of program state.
859 ASSERT(builder_->breakable_block_ == this);
860 // Find corresponding destination.
861 BreakableBlock* block = this;
862 while (block->index_ != label_index) {
863 block = block->outer_;
864 ASSERT(block != nullptr);
865 }
866 *outer_finally = block->outer_finally_;
867 *context_depth = block->context_depth_;
868 return block->EnsureDestination();
869 }
870
871 private:
872 JoinEntryInstr* EnsureDestination() {
873 if (destination_ == nullptr) {
874 destination_ = builder_->BuildJoinEntry(try_index_);
875 }
876 return destination_;
877 }
878
879 FlowGraphBuilder* builder_;
880 intptr_t index_;
881 BreakableBlock* outer_;
882 JoinEntryInstr* destination_;
883 TryFinallyBlock* outer_finally_;
884 intptr_t context_depth_;
885 intptr_t try_index_;
886
887 DISALLOW_COPY_AND_ASSIGN(BreakableBlock);
888};
889
891 public:
895 intptr_t catch_try_index)
896 : builder_(builder),
897 outer_(builder->catch_block_),
898 exception_var_(exception_var),
899 stack_trace_var_(stack_trace_var),
900 catch_try_index_(catch_try_index) {
901 builder_->catch_block_ = this;
902 }
903 ~CatchBlock() { builder_->catch_block_ = outer_; }
904
905 LocalVariable* exception_var() { return exception_var_; }
906 LocalVariable* stack_trace_var() { return stack_trace_var_; }
907 intptr_t catch_try_index() { return catch_try_index_; }
908
909 private:
910 FlowGraphBuilder* builder_;
911 CatchBlock* outer_;
912 LocalVariable* exception_var_;
913 LocalVariable* stack_trace_var_;
914 intptr_t catch_try_index_;
915
917};
918
925
926// Collected information for a switch expression.
928 public:
931 const Instance& value)
932 : case_index_(case_index), position_(position), value_(&value) {}
933
934 intptr_t case_index() const { return case_index_; }
935 const TokenPosition& position() const { return position_; }
936 // Constant value of the expression.
937 const Instance& value() const { return *value_; }
938
939 // Integer representation of the expression.
940 // For Integers it is the value itself and for Enums it is the index.
941 const Integer& integer() const {
942 ASSERT(integer_ != nullptr);
943 return *integer_;
944 }
945
947 ASSERT(integer_ == nullptr);
948 integer_ = &integer;
949 }
950
951 private:
952 intptr_t case_index_;
953 TokenPosition position_;
954 const Instance* value_;
955 const Integer* integer_ = nullptr;
956};
957
958// A range that is covered by a branch in a binary search switch.
959// Leafs are represented by a range where min == max.
961 public:
962 static SwitchRange Leaf(intptr_t index,
964 bool is_bounds_checked = false) {
966 }
967
968 static SwitchRange Branch(intptr_t min,
969 intptr_t max,
972 /*is_bounds_checked=*/false);
973 }
974
975 // min and max are indexes into a sorted array of case expressions.
976 intptr_t min() const { return min_; }
977 intptr_t max() const { return max_; }
978 // The fragment to continue building code for the branch.
979 Fragment branch_instructions() const { return branch_instructions_; }
980 // For leafs, whether the branch is known to be in the bounds of the
981 // overall switch.
982 bool is_bounds_checked() const { return is_bounds_checked_; }
983 bool is_leaf() const { return min_ == max_; }
984
985 private:
986 SwitchRange(intptr_t min,
987 intptr_t max,
990 : min_(min),
991 max_(max),
992 branch_instructions_(branch_instructions),
993 is_bounds_checked_(is_bounds_checked) {}
994
995 intptr_t min_;
996 intptr_t max_;
997 Fragment branch_instructions_;
998 bool is_bounds_checked_;
999};
1000
1001// Helper for building flow graph for a switch statement.
1003 public:
1004 SwitchHelper(Zone* zone,
1006 bool is_exhaustive,
1009 intptr_t case_count);
1010
1011 // A switch statement is optimizable if static type of the scrutinee
1012 // expression is a non-nullable int or enum, and all case expressions
1013 // are instances of the scrutinee static type.
1014 bool is_optimizable() const { return is_optimizable_; }
1015 const TokenPosition& position() const { return position_; }
1016 bool is_exhaustive() const { return is_exhaustive_; }
1017 SwitchBlock* switch_block() { return switch_block_; }
1018 intptr_t case_count() const { return case_count_; }
1019
1020 // Index of default case.
1021 intptr_t default_case() const { return default_case_; }
1022 void set_default_case(intptr_t index) {
1023 ASSERT(default_case_ == -1);
1024 default_case_ = index;
1025 }
1026
1027 const GrowableArray<Fragment>& case_bodies() const { return case_bodies_; }
1028
1029 // Array of the expression counts for all cases.
1031 return case_expression_counts_;
1032 }
1033
1035 return expressions_;
1036 }
1037
1039 return sorted_expressions_;
1040 }
1041
1042 // Static type of the scrutinee expression.
1043 const AbstractType& expression_type() const { return expression_type_; }
1044
1045 const Integer& expression_min() const {
1046 ASSERT(expression_min_ != nullptr);
1047 return *expression_min_;
1048 }
1049 const Integer& expression_max() const {
1050 ASSERT(expression_max_ != nullptr);
1051 return *expression_max_;
1052 }
1053
1054 bool has_default() const { return default_case_ >= 0; }
1055
1056 bool is_enum_switch() const { return is_enum_switch_; }
1057
1058 // Returns size of [min..max] range, or kMaxInt64 on overflow.
1059 int64_t ExpressionRange() const;
1060
1061 bool RequiresLowerBoundCheck() const;
1062 bool RequiresUpperBoundCheck() const;
1063
1065
1066 void AddCaseBody(Fragment body) { case_bodies_.Add(body); }
1067
1068 void AddExpression(intptr_t case_index,
1070 const Instance& value);
1071
1072 private:
1073 void PrepareForOptimizedSwitch();
1074
1075 Zone* zone_;
1076 bool is_optimizable_ = false;
1077 bool is_enum_switch_ = false;
1078 const TokenPosition position_;
1079 const bool is_exhaustive_;
1080 const AbstractType& expression_type_;
1081 SwitchBlock* const switch_block_;
1082 const intptr_t case_count_;
1083 intptr_t default_case_ = -1;
1084 GrowableArray<Fragment> case_bodies_;
1085 GrowableArray<intptr_t> case_expression_counts_;
1087 GrowableArray<SwitchExpression*> sorted_expressions_;
1088 const Integer* expression_min_ = nullptr;
1089 const Integer* expression_max_ = nullptr;
1090};
1091
1092} // namespace kernel
1093} // namespace dart
1094
1095#endif // RUNTIME_VM_COMPILER_FRONTEND_KERNEL_TO_IL_H_
static void info(const char *fmt,...) SK_PRINTF_LIKE(1
Definition DM.cpp:213
Representation representation() const
Definition slot.h:519
Fragment LoadNativeField(const Slot &native_field, InnerPointerAccess loads_inner_pointer, bool calls_initializer=false)
Fragment UnboxedIntConstant(int64_t value, Representation representation)
Fragment BinaryIntegerOp(Token::Kind op, Representation representation, bool is_truncating=false)
Fragment SmiBinaryOp(Token::Kind op, bool is_truncating=false)
JoinEntryInstr * destination()
JoinEntryInstr * BreakDestination(intptr_t label_index, TryFinallyBlock **outer_finally, intptr_t *context_depth)
BreakableBlock(FlowGraphBuilder *builder)
LocalVariable * stack_trace_var()
LocalVariable * exception_var()
CatchBlock(FlowGraphBuilder *builder, LocalVariable *exception_var, LocalVariable *stack_trace_var, intptr_t catch_try_index)
static bool IsExpressionTempVarUsedInRecognizedMethodFlowGraph(const Function &function)
static bool IsRecognizedMethodForFlowGraph(const Function &function)
ProgramState(BreakableBlock *breakable_block, SwitchBlock *switch_block, intptr_t loop_depth, intptr_t try_depth, intptr_t catch_depth, intptr_t block_expression_depth)
void assignTo(FlowGraphBuilder *builder) const
GrowableArray< LocalVariable * > catch_context_variables
GrowableArray< LocalVariable * > stack_trace_variables
GrowableArray< LocalVariable * > raw_exception_variables
GrowableArray< LocalVariable * > exception_variables
GrowableArray< LocalVariable * > raw_stack_trace_variables
SwitchBlock(FlowGraphBuilder *builder, intptr_t case_count)
bool HadJumper(intptr_t case_num)
JoinEntryInstr * DestinationDirect(intptr_t case_num, TryFinallyBlock **outer_finally=nullptr, intptr_t *context_depth=nullptr)
JoinEntryInstr * Destination(intptr_t target_index, TryFinallyBlock **outer_finally=nullptr, intptr_t *context_depth=nullptr)
const Instance & value() const
SwitchExpression(intptr_t case_index, TokenPosition position, const Instance &value)
const Integer & integer() const
void set_integer(const Integer &integer)
const TokenPosition & position() const
const Integer & expression_max() const
intptr_t default_case() const
const AbstractType & expression_type() const
void set_default_case(intptr_t index)
void AddExpression(intptr_t case_index, TokenPosition position, const Instance &value)
const GrowableArray< SwitchExpression > & expressions() const
const GrowableArray< SwitchExpression * > & sorted_expressions() const
const GrowableArray< intptr_t > & case_expression_counts() const
int64_t ExpressionRange() const
intptr_t case_count() const
SwitchDispatch SelectDispatchStrategy()
const GrowableArray< Fragment > & case_bodies() const
const Integer & expression_min() const
const TokenPosition & position() const
void AddCaseBody(Fragment body)
Fragment branch_instructions() const
static SwitchRange Leaf(intptr_t index, Fragment branch_instructions, bool is_bounds_checked=false)
static SwitchRange Branch(intptr_t min, intptr_t max, Fragment branch_instructions)
TryCatchBlock * outer() const
TryCatchBlock(FlowGraphBuilder *builder, intptr_t try_handler_index=-1)
const ProgramState & state() const
intptr_t finalizer_kernel_offset() const
TryFinallyBlock * outer() const
TryFinallyBlock(FlowGraphBuilder *builder, intptr_t finalizer_kernel_offset)
#define ASSERT(E)
VkInstance instance
Definition main.cc:48
uint8_t value
uint32_t * target
Dart_NativeFunction function
Definition fuchsia.cc:51
int argument_count
Definition fuchsia.cc:52
static float max(float r, float g, float b)
Definition hsl.cpp:49
static float min(float r, float g, float b)
Definition hsl.cpp:48
@ kSwitchDispatchLinearScan
@ kSwitchDispatchBinarySearch
const char *const name
int32_t classid_t
Definition globals.h:524
Representation
Definition locations.h:66
const intptr_t cid
#define DISALLOW_COPY_AND_ASSIGN(TypeName)
Definition globals.h:581
static bool RequiresAllocation(Representation rep)
Definition il.cc:468
static constexpr bool IsUnboxedInteger(Representation rep)
Definition locations.h:92