Flutter Engine
The Flutter Engine
Loading...
Searching...
No Matches
flow_graph_compiler.h
Go to the documentation of this file.
1// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#ifndef RUNTIME_VM_COMPILER_BACKEND_FLOW_GRAPH_COMPILER_H_
6#define RUNTIME_VM_COMPILER_BACKEND_FLOW_GRAPH_COMPILER_H_
7
9#if defined(DART_PRECOMPILED_RUNTIME)
10#error "AOT runtime should not use compiler sources (including header files)"
11#endif // defined(DART_PRECOMPILED_RUNTIME)
12
13#include <functional>
14
15#include "vm/allocation.h"
16#include "vm/code_descriptors.h"
22#include "vm/runtime_entry.h"
23
24namespace dart {
25
26// Forward declarations.
27class CatchEntryMovesMapBuilder;
28class Code;
29class DeoptInfoBuilder;
30class FlowGraph;
31class FlowGraphCompiler;
32class Function;
33template <typename T>
34class GrowableArray;
35class ParsedFunction;
36class SpeculativeInliningPolicy;
37
38namespace compiler {
39struct TableSelector;
40}
41
42// Used in methods which need conditional access to a temporary register.
43// May only be used to allocate a single temporary register.
45 public:
48 virtual void ReleaseTemporary() = 0;
49};
50
52 public:
53 explicit ConstantTemporaryAllocator(Register tmp) : tmp_(tmp) {}
54
55 Register AllocateTemporary() override { return tmp_; }
56 void ReleaseTemporary() override {}
57
58 private:
59 Register const tmp_;
60};
61
63 public:
65 void ReleaseTemporary() override { UNREACHABLE(); }
66};
67
68// Used for describing a deoptimization point after call (lazy deoptimization).
69// For deoptimization before instruction use class CompilerDeoptInfoWithStub.
71 public:
74 uint32_t flags,
76 : pc_offset_(-1),
77 deopt_id_(deopt_id),
78 reason_(reason),
79 flags_(flags),
80 deopt_env_(deopt_env) {
81 ASSERT(deopt_env != nullptr);
82 }
83 virtual ~CompilerDeoptInfo() {}
84
86 DeoptInfoBuilder* builder,
87 const Array& deopt_table);
88
89 // No code needs to be generated.
90 virtual void GenerateCode(FlowGraphCompiler* compiler, intptr_t stub_ix) {}
91
92 intptr_t pc_offset() const { return pc_offset_; }
93 void set_pc_offset(intptr_t offset) { pc_offset_ = offset; }
94
95 intptr_t deopt_id() const { return deopt_id_; }
96 ICData::DeoptReasonId reason() const { return reason_; }
97 uint32_t flags() const { return flags_; }
98 const Environment* deopt_env() const { return deopt_env_; }
99
100 private:
101 void EmitMaterializations(Environment* env, DeoptInfoBuilder* builder);
102
103 void AllocateOutgoingArguments(Environment* env);
104
105 intptr_t pc_offset_;
106 const intptr_t deopt_id_;
107 const ICData::DeoptReasonId reason_;
108 const uint32_t flags_;
109 Environment* deopt_env_;
110
112};
113
115 public:
118 uint32_t flags,
120 : CompilerDeoptInfo(deopt_id, reason, flags, deopt_env), entry_label_() {
121 ASSERT(reason != ICData::kDeoptAtCall);
122 }
123
124 compiler::Label* entry_label() { return &entry_label_; }
125
126 // Implementation is in architecture specific file.
127 virtual void GenerateCode(FlowGraphCompiler* compiler, intptr_t stub_ix);
128
129 const char* Name() const {
130 const char* kFormat = "Deopt stub for id %d, reason: %s";
131 const intptr_t len = Utils::SNPrint(nullptr, 0, kFormat, deopt_id(),
133 1;
134 char* chars = Thread::Current()->zone()->Alloc<char>(len);
135 Utils::SNPrint(chars, len, kFormat, deopt_id(),
137 return chars;
138 }
139
140 private:
141 compiler::Label entry_label_;
142
144};
145
147 public:
149 : instruction_(instruction), entry_label_(), exit_label_() {}
150 virtual ~SlowPathCode() {}
151
152 Instruction* instruction() const { return instruction_; }
153 compiler::Label* entry_label() { return &entry_label_; }
154 compiler::Label* exit_label() { return &exit_label_; }
155
160
161 private:
163
164 Instruction* instruction_;
165 compiler::Label entry_label_;
166 compiler::Label exit_label_;
167
169};
170
171template <typename T>
173 public:
175
176 T* instruction() const {
177 return static_cast<T*>(SlowPathCode::instruction());
178 }
179};
180
181class BoxAllocationSlowPath : public TemplateSlowPathCode<Instruction> {
182 public:
187
189
192 const Class& cls,
194 Register temp);
195
196 private:
197 const Class& cls_;
198 const Register result_;
199};
200
202 : public TemplateSlowPathCode<DoubleToIntegerInstr> {
203 public:
207
209
210 private:
211 FpuRegister value_reg_;
212};
213
214// Slow path code which calls runtime entry to throw an exception.
215class ThrowErrorSlowPathCode : public TemplateSlowPathCode<Instruction> {
216 public:
218 const RuntimeEntry& runtime_entry)
219 : TemplateSlowPathCode(instruction), runtime_entry_(runtime_entry) {}
220
221 // This name appears in disassembly.
222 virtual const char* name() = 0;
223
224 // Subclasses can override these methods to customize slow path code.
228
229 // Returns number of arguments for runtime call (if shared stub is not used).
230 virtual intptr_t GetNumberOfArgumentsForRuntimeCall() { return 0; }
231
233 bool save_fpu_registers) {
234 UNREACHABLE();
235 }
236
238
239 private:
240 const RuntimeEntry& runtime_entry_;
241};
242
244 public:
249
251 return instruction()->AsCheckNull()->exception_type();
252 }
253
254 const char* name() override;
255
257 bool save_fpu_registers) override;
258
263
264 static CodePtr GetStub(FlowGraphCompiler* compiler,
266 bool save_fpu_registers);
267
268 private:
269 static const RuntimeEntry& GetRuntimeEntry(
271};
272
274 public:
278 GenericCheckBoundInstr::UseUnboxedRepresentation()
279 ? kRangeErrorUnboxedInt64RuntimeEntry
280 : kRangeErrorRuntimeEntry) {}
281 virtual const char* name() { return "check bound"; }
282
285 return 0; // Unboxed arguments are passed through Thread.
286 }
287 return 2; // length and index
288 }
289
291
293 bool save_fpu_registers);
294};
295
297 public:
300 virtual const char* name() { return "check writable"; }
301
303 bool save_fpu_registers);
304
306
308 return 2; // receiver, kind
309 }
310};
311
313 public:
316 kLateFieldNotInitializedErrorRuntimeEntry) {
317 ASSERT(instruction->IsLoadField() || instruction->IsLoadStaticField());
318 }
319 virtual const char* name() { return "late initialization error"; }
320
322 return 1; // field
323 }
324
326
328 bool save_fpu_registers);
329
330 private:
331 FieldPtr OriginalField() const {
332 return instruction()->IsLoadField()
333 ? instruction()->AsLoadField()->slot().field().Original()
334 : instruction()->AsLoadStaticField()->field().Original();
335 }
336};
337
339 private:
340 class BlockInfo : public ZoneAllocated {
341 public:
342 BlockInfo()
343 : block_label_(),
344 jump_label_(&block_label_),
345 next_nonempty_label_(nullptr),
346 is_marked_(false) {}
347
348 // The label to jump to when control is transferred to this block. For
349 // nonempty blocks it is the label of the block itself. For empty
350 // blocks it is the label of the first nonempty successor block.
351 compiler::Label* jump_label() const { return jump_label_; }
352 void set_jump_label(compiler::Label* label) { jump_label_ = label; }
353
354 // The label of the first nonempty block after this one in the block
355 // order, or nullptr if there is no nonempty block following this one.
356 compiler::Label* next_nonempty_label() const {
357 return next_nonempty_label_;
358 }
359 void set_next_nonempty_label(compiler::Label* label) {
360 next_nonempty_label_ = label;
361 }
362
363 bool WasCompacted() const { return jump_label_ != &block_label_; }
364
365 // Block compaction is recursive. Block info for already-compacted
366 // blocks is marked so as to avoid cycles in the graph.
367 bool is_marked() const { return is_marked_; }
368 void mark() { is_marked_ = true; }
369
370 private:
371 compiler::Label block_label_;
372
373 compiler::Label* jump_label_;
374 compiler::Label* next_nonempty_label_;
375
376 bool is_marked_;
377 };
378
379 public:
383 bool is_optimizing,
384 SpeculativeInliningPolicy* speculative_policy,
385 const GrowableArray<const Function*>& inline_id_to_function,
386 const GrowableArray<TokenPosition>& inline_id_to_token_pos,
387 const GrowableArray<intptr_t>& caller_inline_id,
389 CodeStatistics* stats = nullptr);
390
392
394
398
399 // Accessors.
400 compiler::Assembler* assembler() const { return assembler_; }
401 const ParsedFunction& parsed_function() const { return parsed_function_; }
402 const Function& function() const { return parsed_function_.function(); }
404 return block_order_;
405 }
408 return dispatch_table_call_targets_;
409 }
410
411 // If 'ForcedOptimization()' returns 'true', we are compiling in optimized
412 // mode for a function which cannot deoptimize. Certain optimizations, e.g.
413 // speculative optimizations and call patching are disabled.
414 bool ForcedOptimization() const { return function().ForceOptimize(); }
415
416 const FlowGraph& flow_graph() const {
417 return intrinsic_mode() ? *intrinsic_flow_graph_ : flow_graph_;
418 }
419
420 BlockEntryInstr* current_block() const { return current_block_; }
421 void set_current_block(BlockEntryInstr* value) { current_block_ = value; }
422
423 Instruction* current_instruction() const { return current_instruction_; }
424
425 bool CanOptimize() const;
426 bool CanOptimizeFunction() const;
427 bool CanOSRFunction() const;
428 bool is_optimizing() const { return is_optimizing_; }
429
431 void LoadBSSEntry(BSS::Relocation relocation, Register dst, Register tmp);
432
433 // The function was fully intrinsified, so the body is unreachable.
434 //
435 // We still need to compile the body in unoptimized mode because the
436 // 'ICData's are added to the function's 'ic_data_array_' when instance
437 // calls are compiled.
439 return fully_intrinsified_ && is_optimizing();
440 }
441
444 bool intrinsic_mode() const { return intrinsic_mode_; }
445
447 intrinsic_flow_graph_ = &flow_graph;
448 }
449
451 ASSERT(intrinsic_slow_path_label_ == nullptr || label == nullptr);
452 intrinsic_slow_path_label_ = label;
453 }
455 ASSERT(intrinsic_slow_path_label_ != nullptr);
456 return intrinsic_slow_path_label_;
457 }
458
460
461 const GrowableArray<BlockInfo*>& block_info() const { return block_info_; }
462
463 void StatsBegin(Instruction* instr) {
464 if (stats_ != nullptr) stats_->Begin(instr);
465 }
466
467 void StatsEnd(Instruction* instr) {
468 if (stats_ != nullptr) stats_->End(instr);
469 }
470
471 void SpecialStatsBegin(intptr_t tag) {
472 if (stats_ != nullptr) stats_->SpecialBegin(tag);
473 }
474
475 void SpecialStatsEnd(intptr_t tag) {
476 if (stats_ != nullptr) stats_->SpecialEnd(tag);
477 }
478
480 return used_static_fields_;
481 }
482
483 // Constructor is lightweight, major initialization work should occur here.
484 // This makes it easier to measure time spent in the compiler.
485 void InitCompiler();
486
487 void CompileGraph();
488
490
491 void VisitBlocks();
492
494
495 // Bail out of the flow graph compiler. Does not return to the caller.
496 void Bailout(const char* reason);
497
498 // Returns 'true' if regular code generation should be skipped.
499 bool TryIntrinsify();
500
501 // Emits code for a generic move from a location 'src' to a location 'dst'.
502 //
503 // Note that Location does not include a size (that can only be deduced from
504 // a Representation), so these moves might overapproximate the size needed
505 // to move. The maximal overapproximation is moving 8 bytes instead of 4 on
506 // 64 bit architectures. This overapproximation is not a problem, because
507 // the Dart calling convention only uses word-sized stack slots.
508 //
509 // TODO(dartbug.com/40400): Express this in terms of EmitMove(NativeLocation
510 // NativeLocation) to remove code duplication.
512
513 // Emits code for a move from a location `src` to a location `dst`.
514 //
515 // Takes into account the payload and container representations of `dst` and
516 // `src` to do the smallest move possible, and sign (or zero) extend or
517 // truncate if needed.
518 //
519 // Makes use of TMP, FpuTMP, and `temp`.
523
524 // Helper method to move from a Location to a NativeLocation.
526 Location src_loc,
527 Representation src_type,
529
530 // Helper method to move from a NativeLocation to a Location.
531 void EmitMoveFromNative(Location dst_loc,
532 Representation dst_type,
535
536 // Helper method to move a Dart const to a native location.
538 Location src,
539 Representation src_type,
541
543 const LocationSummary& locs);
544
545 void GenerateAssertAssignable(CompileType* receiver_type,
547 intptr_t deopt_id,
549 const String& dst_name,
550 LocationSummary* locs);
551
552#if !defined(TARGET_ARCH_IA32)
554 const AbstractType& dst_type,
556
558 intptr_t deopt_id,
560 Register reg_with_type,
561 const AbstractType& dst_type,
562 const String& dst_name,
563 LocationSummary* locs);
564
566 Register reg_with_type,
567 intptr_t sub_type_cache_index);
568#endif
569
571 const Code& stub,
573 LocationSummary* locs,
574 intptr_t deopt_id,
576
579 const Code& stub,
581 LocationSummary* locs,
582 ObjectPool::SnapshotBehavior snapshot_behavior =
584
587 const Code& stub,
589 LocationSummary* locs,
590 ObjectPool::SnapshotBehavior snapshot_behavior =
592
593 void GenerateDartCall(intptr_t deopt_id,
595 const Code& stub,
597 LocationSummary* locs,
598 Code::EntryKind entry_kind = Code::EntryKind::kNormal);
599
601 intptr_t deopt_id,
604 LocationSummary* locs,
605 const Function& target,
606 Code::EntryKind entry_kind = Code::EntryKind::kNormal);
607
609 intptr_t deopt_id,
611 const AbstractType& type,
612 LocationSummary* locs);
613
614 void GenerateInstanceCall(intptr_t deopt_id,
616 LocationSummary* locs,
617 const ICData& ic_data,
618 Code::EntryKind entry_kind,
619 bool receiver_can_be_smi);
620
622 intptr_t deopt_id,
624 const Function& function,
625 ArgumentsInfo args_info,
626 LocationSummary* locs,
627 const ICData& ic_data_in,
628 ICData::RebindRule rebind_rule,
629 Code::EntryKind entry_kind = Code::EntryKind::kNormal);
630
631 void GenerateNumberTypeCheck(Register kClassIdReg,
632 const AbstractType& type,
633 compiler::Label* is_instance_lbl,
634 compiler::Label* is_not_instance_lbl);
635 void GenerateStringTypeCheck(Register kClassIdReg,
636 compiler::Label* is_instance_lbl,
637 compiler::Label* is_not_instance_lbl);
638 void GenerateListTypeCheck(Register kClassIdReg,
639 compiler::Label* is_instance_lbl);
640
641 // Returns true if no further checks are necessary but the code coming after
642 // the emitted code here is still required do a runtime call (for the negative
643 // case of throwing an exception).
644 bool GenerateSubtypeRangeCheck(Register class_id_reg,
645 const Class& type_class,
646 compiler::Label* is_subtype_lbl);
647
648 // We test up to 4 different cid ranges, if we would need to test more in
649 // order to get a definite answer we fall back to the old mechanism (namely
650 // of going into the subtyping cache)
651 static constexpr intptr_t kMaxNumberOfCidRangesToTest = 4;
652
653 // If [fall_through_if_inside] is `true`, then [outside_range_lbl] must be
654 // supplied, since it will be jumped to in the last case if the cid is outside
655 // the range.
656 //
657 // Returns whether [class_id_reg] is clobbered by the check.
658 static bool GenerateCidRangesCheck(
660 Register class_id_reg,
661 const CidRangeVector& cid_ranges,
662 compiler::Label* inside_range_lbl,
663 compiler::Label* outside_range_lbl = nullptr,
664 bool fall_through_if_inside = false);
665
667 const Code& stub,
668 const ICData& ic_data,
669 intptr_t deopt_id,
671 LocationSummary* locs,
672 Code::EntryKind entry_kind = Code::EntryKind::kNormal);
673
674 void EmitInstanceCallJIT(const Code& stub,
675 const ICData& ic_data,
676 intptr_t deopt_id,
678 LocationSummary* locs,
679 Code::EntryKind entry_kind);
680
682 const CallTargets& targets,
683 ArgumentsInfo args_info,
684 intptr_t deopt_id,
686 LocationSummary* locs,
687 bool complete,
688 intptr_t total_call_count,
689 bool receiver_can_be_smi = true);
690
692 intptr_t deopt_id,
694 LocationSummary* locs) {
695 const String& name = String::Handle(icdata.target_name());
696 const Array& arguments_descriptor =
698 EmitMegamorphicInstanceCall(name, arguments_descriptor, deopt_id, source,
699 locs);
700 }
701
703 const Array& arguments_descriptor,
704 intptr_t deopt_id,
706 LocationSummary* locs);
707
709 const ICData& ic_data,
710 intptr_t deopt_id,
712 LocationSummary* locs,
713 Code::EntryKind entry_kind = Code::EntryKind::kNormal,
714 bool receiver_can_be_smi = true);
715
716 void EmitTestAndCall(const CallTargets& targets,
717 const String& function_name,
718 ArgumentsInfo args_info,
719 compiler::Label* failed,
720 compiler::Label* match_found,
721 intptr_t deopt_id,
722 const InstructionSource& source_index,
723 LocationSummary* locs,
724 bool complete,
725 intptr_t total_ic_calls,
726 Code::EntryKind entry_kind = Code::EntryKind::kNormal);
727
728 void EmitDispatchTableCall(int32_t selector_offset,
729 const Array& arguments_descriptor);
730
732 const Object& obj,
733 bool needs_number_check,
735 intptr_t deopt_id);
738 bool needs_number_check,
740 intptr_t deopt_id);
742
744
745 void EmitEdgeCounter(intptr_t edge_id);
746
748
749 void EmitCallToStub(const Code& stub,
750 ObjectPool::SnapshotBehavior snapshot_behavior =
752 void EmitJumpToStub(const Code& stub);
753 void EmitTailCallToStub(const Code& stub);
754
755 void EmitDropArguments(intptr_t count);
756
757 // Emits the following metadata for the current PC:
758 //
759 // * Attaches current try index
760 // * Attaches stackmaps
761 // * Attaches catch entry moves (in AOT)
762 // * Deoptimization information (in JIT)
763 //
764 // If [env] is not `nullptr` it will be used instead of the
765 // `pending_deoptimization_env`.
767 intptr_t deopt_id,
769 LocationSummary* locs,
771
773 intptr_t yield_index);
774
775 void EmitComment(Instruction* instr);
776
777 // Returns stack size (number of variables on stack for unoptimized
778 // code, or number of spill slots for optimized code).
779 intptr_t StackSize() const;
780
781 // Returns the number of extra stack slots used during an Osr entry
782 // (values for all [ParameterInstr]s, representing local variables
783 // and expression stack values, are already on the stack).
784 intptr_t ExtraStackSlotsOnOsrEntry() const;
785
786#if defined(TARGET_ARCH_RISCV32) || defined(TARGET_ARCH_RISCV64)
787 // Changes the base register of this Location if this allows us to utilize
788 // a better addressing mode. For RISC-V, this is the wider range of compressed
789 // instructions available for SP-relative load compared to FP-relative loads.
790 // Assumes `StackSize` accounts for everything at the point of use.
791 Location RebaseIfImprovesAddressing(Location loc) const;
792#endif // defined(TARGET_ARCH_RISCV32) || defined(TARGET_ARCH_RISCV64)
793
794 // Returns assembler label associated with the given block entry.
795 compiler::Label* GetJumpLabel(BlockEntryInstr* block_entry) const;
796 bool WasCompacted(BlockEntryInstr* block_entry) const;
797
798 // Returns the label of the fall-through of the current block.
800
801 // Returns true if there is a next block after the current one in
802 // the block order and if it is the given block.
803 bool CanFallThroughTo(BlockEntryInstr* block_entry) const;
804
805 // Return true-, false- and fall-through label for a branch instruction.
807
809 void SetNeedsStackTrace(intptr_t try_index);
811 intptr_t deopt_id,
813 void AddDescriptor(
815 intptr_t pc_offset,
816 intptr_t deopt_id,
818 intptr_t try_index,
819 intptr_t yield_index = UntaggedPcDescriptors::kInvalidYieldIndex);
820
821 // Add NullCheck information for the current PC.
822 void AddNullCheck(const InstructionSource& source, const String& name);
823
825 intptr_t slow_path_argument_count = 0);
826
827 compiler::Label* AddDeoptStub(intptr_t deopt_id,
829 uint32_t flags = 0);
830
833
834 void AddSlowPathCode(SlowPathCode* slow_path);
835
836 void FinalizeExceptionHandlers(const Code& code);
837 void FinalizePcDescriptors(const Code& code);
839 void FinalizeStackMaps(const Code& code);
840 void FinalizeVarDescriptors(const Code& code);
841 void FinalizeCatchEntryMovesMap(const Code& code);
842 void FinalizeStaticCallTargetsTable(const Code& code);
843 void FinalizeCodeSourceMap(const Code& code);
844
845 const Class& double_class() const { return double_class_; }
846 const Class& mint_class() const { return mint_class_; }
847 const Class& float32x4_class() const { return float32x4_class_; }
848 const Class& float64x2_class() const { return float64x2_class_; }
849 const Class& int32x4_class() const { return int32x4_class_; }
850
851 const Class& BoxClassFor(Representation rep);
852
855#if defined(DEBUG)
856 void ClobberDeadTempRegisters(LocationSummary* locs);
857#endif
858
859 // Returns a new environment based on [env] which accounts for the new
860 // locations of values in the slow path call.
862 intptr_t num_slow_path_args) {
863 if (inst->env() == nullptr && is_optimizing()) {
864 if (pending_deoptimization_env_ == nullptr) {
865 return nullptr;
866 }
867 return SlowPathEnvironmentFor(pending_deoptimization_env_, inst->locs(),
868 num_slow_path_args);
869 }
870 return SlowPathEnvironmentFor(inst->env(), inst->locs(),
871 num_slow_path_args);
872 }
873
875 LocationSummary* locs,
876 intptr_t num_slow_path_args);
877
878 intptr_t CurrentTryIndex() const {
879 if (current_block_ == nullptr) {
880 return kInvalidTryIndex;
881 }
882 return current_block_->try_index();
883 }
884
885 bool may_reoptimize() const { return may_reoptimize_; }
886
887 // Use in unoptimized compilation to preserve/reuse ICData.
888 //
889 // If [binary_smi_target] is non-null and we have to create the ICData, the
890 // ICData will get an (kSmiCid, kSmiCid, binary_smi_target) entry.
891 const ICData* GetOrAddInstanceCallICData(intptr_t deopt_id,
892 const String& target_name,
893 const Array& arguments_descriptor,
894 intptr_t num_args_tested,
895 const AbstractType& receiver_type,
896 const Function& binary_smi_target);
897
898 const ICData* GetOrAddStaticCallICData(intptr_t deopt_id,
899 const Function& target,
900 const Array& arguments_descriptor,
901 intptr_t num_args_tested,
902 ICData::RebindRule rebind_rule);
903
905 intptr_t cid,
906 const String& selector,
907 const Array& args_desc_array);
908
910 return *deopt_id_to_ic_data_;
911 }
912
913 Thread* thread() const { return thread_; }
914 IsolateGroup* isolate_group() const { return thread_->isolate_group(); }
915 Zone* zone() const { return zone_; }
916
917 void AddStubCallTarget(const Code& code);
919
920 ArrayPtr edge_counters_array() const { return edge_counters_array_.ptr(); }
921
922 ArrayPtr InliningIdToFunction() const;
923
926
927 static bool LookupMethodFor(int class_id,
928 const String& name,
929 const ArgumentsDescriptor& args_desc,
930 Function* fn_return,
931 bool* class_is_abstract_return = nullptr);
932
933 // Returns new class-id bias.
934 //
935 // TODO(kustermann): We should move this code out of the [FlowGraphCompiler]!
937 compiler::Label* label,
938 Register class_id_reg,
939 const CidRangeValue& range,
940 int bias,
941 bool jump_on_miss = true);
942
943 bool IsEmptyBlock(BlockEntryInstr* block) const;
944
946 const Function& function,
947 const Array& arguments_descriptor,
948 intptr_t size_with_type_args,
949 intptr_t deopt_id,
951 LocationSummary* locs,
952 Code::EntryKind entry_kind = Code::EntryKind::kNormal);
953
954 private:
955 friend class BoxInt64Instr; // For AddPcRelativeCallStubTarget().
956 friend class CheckNullInstr; // For AddPcRelativeCallStubTarget().
957 friend class NullErrorSlowPath; // For AddPcRelativeCallStubTarget().
958 friend class CheckStackOverflowInstr; // For AddPcRelativeCallStubTarget().
959 friend class StoreIndexedInstr; // For AddPcRelativeCallStubTarget().
960 friend class StoreFieldInstr; // For AddPcRelativeCallStubTarget().
961 friend class CheckStackOverflowSlowPath; // For pending_deoptimization_env_.
962 friend class GraphIntrinsicCodeGenScope; // For optimizing_.
963
964 // Architecture specific implementation of simple native moves.
965 void EmitNativeMoveArchitecture(const compiler::ffi::NativeLocation& dst,
967 void EmitNativeLoad(Register dst,
969 intptr_t offset,
971
972 void EmitFrameEntry();
973
974 bool TryIntrinsifyHelper();
975 void AddPcRelativeCallTarget(const Function& function,
976 Code::EntryKind entry_kind);
977 void AddPcRelativeCallStubTarget(const Code& stub_code);
978 void AddPcRelativeTailCallStubTarget(const Code& stub_code);
979 void AddPcRelativeTTSCallTypeTarget(const AbstractType& type);
980 void AddStaticCallTarget(const Function& function,
981 Code::EntryKind entry_kind);
982
983 void GenerateDeferredCode();
984
985 void EmitInstructionPrologue(Instruction* instr);
986 void EmitInstructionEpilogue(Instruction* instr);
987
988 // Emit code to load a Value into register 'dst'.
989 void LoadValue(Register dst, Value* value);
990
991 void EmitUnoptimizedStaticCall(
992 intptr_t size_with_type_args,
993 intptr_t deopt_id,
995 LocationSummary* locs,
996 const ICData& ic_data,
997 Code::EntryKind entry_kind = Code::EntryKind::kNormal);
998
999 // Helper for TestAndCall that calculates a good bias that
1000 // allows more compact instructions to be emitted.
1001 intptr_t ComputeGoodBiasForCidComparison(const CallTargets& sorted,
1002 intptr_t max_immediate);
1003
1004 // More helpers for EmitTestAndCall.
1005
1006 static Register EmitTestCidRegister();
1007
1008 void EmitTestAndCallLoadReceiver(intptr_t count_without_type_args,
1009 const Array& arguments_descriptor);
1010
1011 void EmitTestAndCallSmiBranch(compiler::Label* label, bool jump_if_smi);
1012
1013 void EmitTestAndCallLoadCid(Register class_id_reg);
1014
1015 // Type checking helper methods.
1016 void CheckClassIds(Register class_id_reg,
1017 const GrowableArray<intptr_t>& class_ids,
1018 compiler::Label* is_instance_lbl,
1019 compiler::Label* is_not_instance_lbl);
1020
1021 SubtypeTestCachePtr GenerateInlineInstanceof(
1023 const AbstractType& type,
1024 compiler::Label* is_instance_lbl,
1025 compiler::Label* is_not_instance_lbl);
1026
1027 SubtypeTestCachePtr GenerateInstantiatedTypeWithArgumentsTest(
1029 const AbstractType& dst_type,
1030 compiler::Label* is_instance_lbl,
1031 compiler::Label* is_not_instance_lbl);
1032
1033 bool GenerateInstantiatedTypeNoArgumentsTest(
1035 const AbstractType& dst_type,
1036 compiler::Label* is_instance_lbl,
1037 compiler::Label* is_not_instance_lbl);
1038
1039 SubtypeTestCachePtr GenerateUninstantiatedTypeTest(
1041 const AbstractType& dst_type,
1042 compiler::Label* is_instance_lbl,
1043 compiler::Label* is_not_instance_label);
1044
1045 SubtypeTestCachePtr GenerateFunctionTypeTest(
1047 const AbstractType& dst_type,
1048 compiler::Label* is_instance_lbl,
1049 compiler::Label* is_not_instance_label);
1050
1051 SubtypeTestCachePtr GenerateSubtype1TestCacheLookup(
1053 const Class& type_class,
1054 compiler::Label* is_instance_lbl,
1055 compiler::Label* is_not_instance_lbl);
1056
1057 enum class TypeTestStubKind {
1058 // Just check the instance cid (no closures).
1059 kTestTypeOneArg = 1,
1060 // Also check the instance type arguments.
1061 kTestTypeTwoArgs = 2,
1062 // Also check the instantiator type arguments for the destination type.
1063 kTestTypeThreeArgs = 3,
1064 // Also check the function type arguments for the destination type.
1065 kTestTypeFourArgs = 4,
1066 // Also check the parent function and delayed type arguments for a closure.
1067 kTestTypeSixArgs = 6,
1068 // Also check the destination type, as it is not known at compile time.
1069 kTestTypeSevenArgs = 7,
1070 };
1071
1072 static_assert(static_cast<intptr_t>(TypeTestStubKind::kTestTypeSevenArgs) ==
1074 "Need to adjust kTestTypeMaxArgs");
1075 static constexpr TypeTestStubKind kTestTypeMaxArgs =
1076 TypeTestStubKind::kTestTypeSevenArgs;
1077
1078 // Returns the number of used inputs for a given type test stub kind.
1079 intptr_t UsedInputsForTTSKind(TypeTestStubKind kind) {
1080 return static_cast<intptr_t>(kind);
1081 }
1082
1083 // Returns type test stub kind for a type test against type parameter type.
1084 TypeTestStubKind GetTypeTestStubKindForTypeParameter(
1085 const TypeParameter& type_param);
1086
1087 // Takes input from TypeTestABI registers (or stack on IA32), see
1088 // StubCodeCompiler::GenerateSubtypeNTestCacheStub for caller-save registers.
1089 SubtypeTestCachePtr GenerateCallSubtypeTestStub(
1090 TypeTestStubKind test_kind,
1091 compiler::Label* is_instance_lbl,
1092 compiler::Label* is_not_instance_lbl);
1093
1094 void GenerateBoolToJump(Register bool_reg,
1095 compiler::Label* is_true,
1096 compiler::Label* is_false);
1097
1098 // Perform a greedy local register allocation. Consider all registers free.
1099 void AllocateRegistersLocally(Instruction* instr);
1100
1101 // Map a block number in a forward iteration into the block number in the
1102 // corresponding reverse iteration. Used to obtain an index into
1103 // block_order for reverse iterations.
1104 intptr_t reverse_index(intptr_t index) const {
1105 return block_order_.length() - index - 1;
1106 }
1107
1108 void set_current_instruction(Instruction* current_instruction) {
1109 current_instruction_ = current_instruction;
1110 }
1111
1112 void CompactBlock(BlockEntryInstr* block);
1113 void CompactBlocks();
1114
1115 bool IsListClass(const Class& cls) const {
1116 return cls.ptr() == list_class_.ptr();
1117 }
1118
1119 void EmitSourceLine(Instruction* instr);
1120
1121 intptr_t GetOptimizationThreshold() const;
1122
1123#if defined(DEBUG)
1124 void FrameStateUpdateWith(Instruction* instr);
1125 void FrameStatePush(Definition* defn);
1126 void FrameStatePop(intptr_t count);
1127 bool FrameStateIsSafeToCall();
1128 void FrameStateClear();
1129#endif
1130
1131 // Returns true if instruction lookahead (window size one)
1132 // is amenable to a peephole optimization.
1133 bool IsPeephole(Instruction* instr) const;
1134
1135#if defined(DEBUG)
1136 bool CanCallDart() const {
1137 return current_instruction_ == nullptr ||
1138 current_instruction_->CanCallDart();
1139 }
1140#else
1141 bool CanCallDart() const { return true; }
1142#endif
1143
1144 bool CanPcRelativeCall(const Function& target) const;
1145 bool CanPcRelativeCall(const Code& target) const;
1146 bool CanPcRelativeCall(const AbstractType& target) const;
1147
1148 // This struct contains either function or code, the other one being nullptr.
1149 class StaticCallsStruct : public ZoneAllocated {
1150 public:
1151 Code::CallKind call_kind;
1152 Code::CallEntryPoint entry_point;
1153 const intptr_t offset;
1154 const Function* function; // Can be nullptr.
1155 const Code* code; // Can be nullptr.
1156 const AbstractType* dst_type; // Can be nullptr.
1157 StaticCallsStruct(Code::CallKind call_kind,
1158 Code::CallEntryPoint entry_point,
1159 intptr_t offset_arg,
1160 const Function* function_arg,
1161 const Code* code_arg,
1162 const AbstractType* dst_type)
1163 : call_kind(call_kind),
1164 entry_point(entry_point),
1165 offset(offset_arg),
1166 function(function_arg),
1167 code(code_arg),
1168 dst_type(dst_type) {
1169 DEBUG_ASSERT(function == nullptr ||
1170 function->IsNotTemporaryScopedHandle());
1171 DEBUG_ASSERT(code == nullptr || code->IsNotTemporaryScopedHandle());
1172 DEBUG_ASSERT(dst_type == nullptr ||
1173 dst_type->IsNotTemporaryScopedHandle());
1174 ASSERT(code == nullptr || dst_type == nullptr);
1175 }
1176
1177 private:
1178 DISALLOW_COPY_AND_ASSIGN(StaticCallsStruct);
1179 };
1180
1181 Thread* thread_;
1182 Zone* zone_;
1183 compiler::Assembler* assembler_;
1184 const ParsedFunction& parsed_function_;
1185 const FlowGraph& flow_graph_;
1186 const FlowGraph* intrinsic_flow_graph_ = nullptr;
1187 const GrowableArray<BlockEntryInstr*>& block_order_;
1188
1189#if defined(DEBUG)
1190 GrowableArray<Representation> frame_state_;
1191#endif
1192
1193 // Compiler specific per-block state. Indexed by postorder block number
1194 // for convenience. This is not the block's index in the block order,
1195 // which is reverse postorder.
1196 BlockEntryInstr* current_block_;
1197 ExceptionHandlerList* exception_handlers_list_;
1198 DescriptorList* pc_descriptors_list_;
1199 CompressedStackMapsBuilder* compressed_stackmaps_builder_;
1200 CodeSourceMapBuilder* code_source_map_builder_;
1201 CatchEntryMovesMapBuilder* catch_entry_moves_maps_builder_;
1202 GrowableArray<BlockInfo*> block_info_;
1203 GrowableArray<CompilerDeoptInfo*> deopt_infos_;
1204 GrowableArray<SlowPathCode*> slow_path_code_;
1205 // Fields that were referenced by generated code.
1206 // This list is needed by precompiler to ensure they are retained.
1207 GrowableArray<const Field*> used_static_fields_;
1208 // Stores static call targets as well as stub targets.
1209 // TODO(srdjan): Evaluate if we should store allocation stub targets into a
1210 // separate table?
1211 GrowableArray<StaticCallsStruct*> static_calls_target_table_;
1212 // The table selectors of all dispatch table calls in the current function.
1213 GrowableArray<const compiler::TableSelector*> dispatch_table_call_targets_;
1214 GrowableArray<IndirectGotoInstr*> indirect_gotos_;
1215 bool is_optimizing_;
1216 SpeculativeInliningPolicy* speculative_policy_;
1217 // Set to true if optimized code has IC calls.
1218 bool may_reoptimize_;
1219 // True while emitting intrinsic code.
1220 bool intrinsic_mode_;
1221 compiler::Label* intrinsic_slow_path_label_ = nullptr;
1222 bool fully_intrinsified_ = false;
1223 CodeStatistics* stats_;
1224
1225 // The definition whose value is supposed to be at the top of the
1226 // expression stack. Used by peephole optimization (window size one)
1227 // to eliminate redundant push/pop pairs.
1228 Definition* top_of_stack_ = nullptr;
1229
1230 const Class& double_class_;
1231 const Class& mint_class_;
1232 const Class& float32x4_class_;
1233 const Class& float64x2_class_;
1234 const Class& int32x4_class_;
1235 const Class& list_class_;
1236
1237 // Currently instructions generate deopt stubs internally by
1238 // calling AddDeoptStub. To communicate deoptimization environment
1239 // that should be used when deoptimizing we store it in this variable.
1240 // In future AddDeoptStub should be moved out of the instruction template.
1241 Environment* pending_deoptimization_env_;
1242
1243 ZoneGrowableArray<const ICData*>* deopt_id_to_ic_data_;
1244 Array& edge_counters_array_;
1245
1246 // Instruction currently running EmitNativeCode().
1247 Instruction* current_instruction_ = nullptr;
1248
1249 DISALLOW_COPY_AND_ASSIGN(FlowGraphCompiler);
1250};
1251
1252} // namespace dart
1253
1254#endif // RUNTIME_VM_COMPILER_BACKEND_FLOW_GRAPH_COMPILER_H_
static void done(const char *config, const char *src, const char *srcOptions, const char *name)
Definition DM.cpp:263
int count
static bool left(const SkPoint &p0, const SkPoint &p1)
static bool right(const SkPoint &p0, const SkPoint &p1)
#define UNREACHABLE()
Definition assert.h:248
#define DEBUG_ASSERT(cond)
Definition assert.h:321
intptr_t try_index() const
Definition il.h:1724
static void Allocate(FlowGraphCompiler *compiler, Instruction *instruction, const Class &cls, Register result, Register temp)
Definition il.cc:6317
virtual void EmitNativeCode(FlowGraphCompiler *compiler)
Definition il.cc:6291
BoxAllocationSlowPath(Instruction *instruction, const Class &cls, Register result)
StringPtr target_name() const
Definition object.h:2352
ArrayPtr arguments_descriptor() const
Definition object.h:2353
static void AddMetadataForRuntimeCall(CheckNullInstr *check_null, FlowGraphCompiler *compiler)
Definition il.cc:6286
void End(Instruction *instruction)
void SpecialBegin(intptr_t tag)
void SpecialEnd(intptr_t tag)
void Begin(Instruction *instruction)
CompilerDeoptInfoWithStub(intptr_t deopt_id, ICData::DeoptReasonId reason, uint32_t flags, Environment *deopt_env)
virtual void GenerateCode(FlowGraphCompiler *compiler, intptr_t stub_ix)
virtual void GenerateCode(FlowGraphCompiler *compiler, intptr_t stub_ix)
const Environment * deopt_env() const
ICData::DeoptReasonId reason() const
void set_pc_offset(intptr_t offset)
TypedDataPtr CreateDeoptInfo(FlowGraphCompiler *compiler, DeoptInfoBuilder *builder, const Array &deopt_table)
CompilerDeoptInfo(intptr_t deopt_id, ICData::DeoptReasonId reason, uint32_t flags, Environment *deopt_env)
virtual void EmitNativeCode(FlowGraphCompiler *compiler)
Definition il.cc:6340
DoubleToIntegerSlowPath(DoubleToIntegerInstr *instruction, FpuRegister value_reg)
IsolateGroup * isolate_group() const
void EmitDropArguments(intptr_t count)
static bool GenerateCidRangesCheck(compiler::Assembler *assembler, Register class_id_reg, const CidRangeVector &cid_ranges, compiler::Label *inside_range_lbl, compiler::Label *outside_range_lbl=nullptr, bool fall_through_if_inside=false)
Condition EmitEqualityRegConstCompare(Register reg, const Object &obj, bool needs_number_check, const InstructionSource &source, intptr_t deopt_id)
void EmitTailCallToStub(const Code &stub)
void EmitMegamorphicInstanceCall(const String &function_name, const Array &arguments_descriptor, intptr_t deopt_id, const InstructionSource &source, LocationSummary *locs)
void AddStubCallTarget(const Code &code)
Instruction * current_instruction() const
void EmitJumpToStub(const Code &stub)
void StatsBegin(Instruction *instr)
void RecordSafepoint(LocationSummary *locs, intptr_t slow_path_argument_count=0)
void EmitOptimizedStaticCall(const Function &function, const Array &arguments_descriptor, intptr_t size_with_type_args, intptr_t deopt_id, const InstructionSource &source, LocationSummary *locs, Code::EntryKind entry_kind=Code::EntryKind::kNormal)
void EmitCallToStub(const Code &stub, ObjectPool::SnapshotBehavior snapshot_behavior=compiler::ObjectPoolBuilderEntry::kSnapshotable)
bool CheckAssertAssignableTypeTestingABILocations(const LocationSummary &locs)
void FinalizeVarDescriptors(const Code &code)
void set_current_block(BlockEntryInstr *value)
BranchLabels CreateBranchLabels(BranchInstr *branch) const
const Function & function() const
void GenerateStaticDartCall(intptr_t deopt_id, const InstructionSource &source, UntaggedPcDescriptors::Kind kind, LocationSummary *locs, const Function &target, Code::EntryKind entry_kind=Code::EntryKind::kNormal)
const Class & float64x2_class() const
const Class & BoxClassFor(Representation rep)
Condition EmitBoolTest(Register value, BranchLabels labels, bool invert)
void InsertBSSRelocation(BSS::Relocation reloc)
void AddExceptionHandler(CatchBlockEntryInstr *entry)
void SaveLiveRegisters(LocationSummary *locs)
ArrayPtr edge_counters_array() const
CompilerDeoptInfo * AddDeoptIndexAtCall(intptr_t deopt_id, Environment *env)
const Class & double_class() const
void FinalizeCatchEntryMovesMap(const Code &code)
void GenerateNumberTypeCheck(Register kClassIdReg, const AbstractType &type, compiler::Label *is_instance_lbl, compiler::Label *is_not_instance_lbl)
const FlowGraph & flow_graph() const
compiler::Label * GetJumpLabel(BlockEntryInstr *block_entry) const
void RecordCatchEntryMoves(Environment *env)
void set_intrinsic_flow_graph(const FlowGraph &flow_graph)
const ParsedFunction & parsed_function() const
intptr_t ExtraStackSlotsOnOsrEntry() const
static bool LookupMethodFor(int class_id, const String &name, const ArgumentsDescriptor &args_desc, Function *fn_return, bool *class_is_abstract_return=nullptr)
bool WasCompacted(BlockEntryInstr *block_entry) const
void AddDescriptor(UntaggedPcDescriptors::Kind kind, intptr_t pc_offset, intptr_t deopt_id, const InstructionSource &source, intptr_t try_index, intptr_t yield_index=UntaggedPcDescriptors::kInvalidYieldIndex)
void EmitPolymorphicInstanceCall(const PolymorphicInstanceCallInstr *call, const CallTargets &targets, ArgumentsInfo args_info, intptr_t deopt_id, const InstructionSource &source, LocationSummary *locs, bool complete, intptr_t total_call_count, bool receiver_can_be_smi=true)
BlockEntryInstr * current_block() const
static constexpr intptr_t kMaxNumberOfCidRangesToTest
void EmitEdgeCounter(intptr_t edge_id)
compiler::Label * AddDeoptStub(intptr_t deopt_id, ICData::DeoptReasonId reason, uint32_t flags=0)
void EmitMoveToNative(const compiler::ffi::NativeLocation &dst, Location src_loc, Representation src_type, TemporaryRegisterAllocator *temp)
const Class & float32x4_class() const
void GenerateDartCall(intptr_t deopt_id, const InstructionSource &source, const Code &stub, UntaggedPcDescriptors::Kind kind, LocationSummary *locs, Code::EntryKind entry_kind=Code::EntryKind::kNormal)
bool CanFallThroughTo(BlockEntryInstr *block_entry) const
void EmitComment(Instruction *instr)
void EmitTestAndCall(const CallTargets &targets, const String &function_name, ArgumentsInfo args_info, compiler::Label *failed, compiler::Label *match_found, intptr_t deopt_id, const InstructionSource &source_index, LocationSummary *locs, bool complete, intptr_t total_ic_calls, Code::EntryKind entry_kind=Code::EntryKind::kNormal)
static const CallTargets * ResolveCallTargetsForReceiverCid(intptr_t cid, const String &selector, const Array &args_desc_array)
static int EmitTestAndCallCheckCid(compiler::Assembler *assembler, compiler::Label *label, Register class_id_reg, const CidRangeValue &range, int bias, bool jump_on_miss=true)
void SetNeedsStackTrace(intptr_t try_index)
CompilerDeoptInfo * AddSlowPathDeoptInfo(intptr_t deopt_id, Environment *env)
void RestoreLiveRegisters(LocationSummary *locs)
void EmitInstanceCallJIT(const Code &stub, const ICData &ic_data, intptr_t deopt_id, const InstructionSource &source, LocationSummary *locs, Code::EntryKind entry_kind)
const Class & mint_class() const
const ICData * GetOrAddInstanceCallICData(intptr_t deopt_id, const String &target_name, const Array &arguments_descriptor, intptr_t num_args_tested, const AbstractType &receiver_type, const Function &binary_smi_target)
void EmitMoveFromNative(Location dst_loc, Representation dst_type, const compiler::ffi::NativeLocation &src, TemporaryRegisterAllocator *temp)
bool IsEmptyBlock(BlockEntryInstr *block) const
void AddSlowPathCode(SlowPathCode *slow_path)
static bool SupportsUnboxedDoubles()
const GrowableArray< const compiler::TableSelector * > & dispatch_table_call_targets() const
static void GenerateIndirectTTSCall(compiler::Assembler *assembler, Register reg_with_type, intptr_t sub_type_cache_index)
void FinalizeStaticCallTargetsTable(const Code &code)
bool GenerateSubtypeRangeCheck(Register class_id_reg, const Class &type_class, compiler::Label *is_subtype_lbl)
void AddDispatchTableCallTarget(const compiler::TableSelector *selector)
void GenerateInstanceCall(intptr_t deopt_id, const InstructionSource &source, LocationSummary *locs, const ICData &ic_data, Code::EntryKind entry_kind, bool receiver_can_be_smi)
void EmitFunctionEntrySourcePositionDescriptorIfNeeded()
void LoadBSSEntry(BSS::Relocation relocation, Register dst, Register tmp)
void FinalizeExceptionHandlers(const Code &code)
void GenerateListTypeCheck(Register kClassIdReg, compiler::Label *is_instance_lbl)
void EmitDispatchTableCall(int32_t selector_offset, const Array &arguments_descriptor)
void GenerateTTSCall(const InstructionSource &source, intptr_t deopt_id, Environment *env, Register reg_with_type, const AbstractType &dst_type, const String &dst_name, LocationSummary *locs)
Condition EmitEqualityRegRegCompare(Register left, Register right, bool needs_number_check, const InstructionSource &source, intptr_t deopt_id)
void FinalizeStackMaps(const Code &code)
const Class & int32x4_class() const
const GrowableArray< BlockEntryInstr * > & block_order() const
void GeneratePatchableCall(const InstructionSource &source, const Code &stub, UntaggedPcDescriptors::Kind kind, LocationSummary *locs, ObjectPool::SnapshotBehavior snapshot_behavior=compiler::ObjectPoolBuilderEntry::kSnapshotable)
void EmitMegamorphicInstanceCall(const ICData &icdata, intptr_t deopt_id, const InstructionSource &source, LocationSummary *locs)
void EmitOptimizedInstanceCall(const Code &stub, const ICData &ic_data, intptr_t deopt_id, const InstructionSource &source, LocationSummary *locs, Code::EntryKind entry_kind=Code::EntryKind::kNormal)
void set_intrinsic_slow_path_label(compiler::Label *label)
void Bailout(const char *reason)
const ZoneGrowableArray< const ICData * > & deopt_id_to_ic_data() const
void GenerateNonLazyDeoptableStubCall(const InstructionSource &source, const Code &stub, UntaggedPcDescriptors::Kind kind, LocationSummary *locs, ObjectPool::SnapshotBehavior snapshot_behavior=compiler::ObjectPoolBuilderEntry::kSnapshotable)
void GenerateAssertAssignable(CompileType *receiver_type, const InstructionSource &source, intptr_t deopt_id, Environment *env, const String &dst_name, LocationSummary *locs)
void FinalizeCodeSourceMap(const Code &code)
GrowableArray< const Field * > & used_static_fields()
ArrayPtr CreateDeoptInfo(compiler::Assembler *assembler)
void GenerateStringTypeCheck(Register kClassIdReg, compiler::Label *is_instance_lbl, compiler::Label *is_not_instance_lbl)
void EmitInstanceCallAOT(const ICData &ic_data, intptr_t deopt_id, const InstructionSource &source, LocationSummary *locs, Code::EntryKind entry_kind=Code::EntryKind::kNormal, bool receiver_can_be_smi=true)
void SpecialStatsEnd(intptr_t tag)
const GrowableArray< BlockInfo * > & block_info() const
bool NeedsEdgeCounter(BlockEntryInstr *block)
static bool SupportsUnboxedSimd128()
void FinalizePcDescriptors(const Code &code)
void EmitYieldPositionMetadata(const InstructionSource &source, intptr_t yield_index)
void StatsEnd(Instruction *instr)
void GenerateStubCall(const InstructionSource &source, const Code &stub, UntaggedPcDescriptors::Kind kind, LocationSummary *locs, intptr_t deopt_id, Environment *env)
ArrayPtr InliningIdToFunction() const
void EmitMove(Location dst, Location src, TemporaryRegisterAllocator *temp)
void EmitCallsiteMetadata(const InstructionSource &source, intptr_t deopt_id, UntaggedPcDescriptors::Kind kind, LocationSummary *locs, Environment *env)
static bool CanConvertInt64ToDouble()
void GenerateInstanceOf(const InstructionSource &source, intptr_t deopt_id, Environment *env, const AbstractType &type, LocationSummary *locs)
compiler::Assembler * assembler() const
void EmitMoveConst(const compiler::ffi::NativeLocation &dst, Location src, Representation src_type, TemporaryRegisterAllocator *temp)
const ICData * GetOrAddStaticCallICData(intptr_t deopt_id, const Function &target, const Array &arguments_descriptor, intptr_t num_args_tested, ICData::RebindRule rebind_rule)
void AddCurrentDescriptor(UntaggedPcDescriptors::Kind kind, intptr_t deopt_id, const InstructionSource &source)
compiler::Label * NextNonEmptyLabel() const
void GenerateCallerChecksForAssertAssignable(CompileType *receiver_type, const AbstractType &dst_type, compiler::Label *done)
void EndCodeSourceRange(const InstructionSource &source)
void SpecialStatsBegin(intptr_t tag)
void EmitNativeMove(const compiler::ffi::NativeLocation &dst, const compiler::ffi::NativeLocation &src, TemporaryRegisterAllocator *temp)
compiler::Label * intrinsic_slow_path_label() const
void AddNullCheck(const InstructionSource &source, const String &name)
void GenerateStaticCall(intptr_t deopt_id, const InstructionSource &source, const Function &function, ArgumentsInfo args_info, LocationSummary *locs, const ICData &ic_data_in, ICData::RebindRule rebind_rule, Code::EntryKind entry_kind=Code::EntryKind::kNormal)
Environment * SlowPathEnvironmentFor(Instruction *inst, intptr_t num_slow_path_args)
void BeginCodeSourceRange(const InstructionSource &source)
bool ForceOptimize() const
Definition object.cc:9075
static bool UseUnboxedRepresentation()
Definition il.h:10810
virtual bool CanCallDart() const
Definition il.h:1298
virtual void EmitSharedStubCall(FlowGraphCompiler *compiler, bool save_fpu_registers)
virtual void PushArgumentsForRuntimeCall(FlowGraphCompiler *compiler)
LateInitializationErrorSlowPath(Instruction *instruction)
Register AllocateTemporary() override
static CodePtr GetStub(FlowGraphCompiler *compiler, CheckNullInstr::ExceptionType exception_type, bool save_fpu_registers)
NullErrorSlowPath(CheckNullInstr *instruction)
void EmitSharedStubCall(FlowGraphCompiler *compiler, bool save_fpu_registers) override
void AddMetadataForRuntimeCall(FlowGraphCompiler *compiler) override
const char * name() override
CheckNullInstr::ExceptionType exception_type() const
ObjectPtr ptr() const
Definition object.h:332
static Object & Handle()
Definition object.h:407
const Function & function() const
Definition parser.h:73
virtual void EmitSharedStubCall(FlowGraphCompiler *compiler, bool save_fpu_registers)
virtual void PushArgumentsForRuntimeCall(FlowGraphCompiler *compiler)
virtual intptr_t GetNumberOfArgumentsForRuntimeCall()
virtual const char * name()
RangeErrorSlowPath(GenericCheckBoundInstr *instruction)
virtual void EmitNativeCode(FlowGraphCompiler *compiler)=0
void GenerateCode(FlowGraphCompiler *compiler)
Instruction * instruction() const
compiler::Label * entry_label()
compiler::Label * exit_label()
SlowPathCode(Instruction *instruction)
static constexpr intptr_t kMaxInputs
Definition object.h:7676
virtual Register AllocateTemporary()=0
Zone * zone() const
static Thread * Current()
Definition thread.h:361
IsolateGroup * isolate_group() const
Definition thread.h:540
virtual void EmitCodeAtSlowPathEntry(FlowGraphCompiler *compiler)
ThrowErrorSlowPathCode(Instruction *instruction, const RuntimeEntry &runtime_entry)
virtual intptr_t GetNumberOfArgumentsForRuntimeCall()
virtual void EmitNativeCode(FlowGraphCompiler *compiler)
virtual void EmitSharedStubCall(FlowGraphCompiler *compiler, bool save_fpu_registers)
virtual void AddMetadataForRuntimeCall(FlowGraphCompiler *compiler)
virtual void PushArgumentsForRuntimeCall(FlowGraphCompiler *compiler)
virtual const char * name()=0
static constexpr intptr_t kInvalidYieldIndex
static int SNPrint(char *str, size_t size, const char *format,...) PRINTF_ATTRIBUTE(3
virtual const char * name()
virtual void PushArgumentsForRuntimeCall(FlowGraphCompiler *compiler)
virtual intptr_t GetNumberOfArgumentsForRuntimeCall()
virtual void EmitSharedStubCall(FlowGraphCompiler *compiler, bool save_fpu_registers)
WriteErrorSlowPath(CheckWritableInstr *instruction)
ElementType * Alloc(intptr_t length)
#define ASSERT(E)
SkBitmap source
Definition examples.cpp:28
FlutterSemanticsFlag flags
gboolean invert
uint8_t value
GAsyncResult * result
uint32_t * target
Dart_NativeFunction function
Definition fuchsia.cc:51
const char *const name
Representation
Definition locations.h:66
const intptr_t cid
const char *const function_name
const char * DeoptReasonToCString(ICData::DeoptReasonId deopt_reason)
static constexpr intptr_t kInvalidTryIndex
Definition __init__.py:1
#define DISALLOW_COPY_AND_ASSIGN(TypeName)
Definition globals.h:581
#define T
Point offset