Flutter Engine
The Flutter Engine
base_flow_graph_builder.cc
Go to the documentation of this file.
1// Copyright (c) 2018, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
6
7#include <utility>
8
9#include "vm/compiler/backend/range_analysis.h" // For Range.
10#include "vm/compiler/frontend/flow_graph_builder.h" // For InlineExitCollector.
12#include "vm/compiler/jit/compiler.h" // For Compiler::IsBackgroundCompilation().
14#include "vm/growable_array.h"
15#include "vm/object_store.h"
16#include "vm/resolver.h"
17
18namespace dart {
19namespace kernel {
20
21#define Z (zone_)
22#define IG (thread_->isolate_group())
23
24static bool SupportsCoverage() {
25#if defined(PRODUCT)
26 return false;
27#else
29#endif
30}
31
34 ASSERT(other.is_valid());
35 if (entry == nullptr) {
36 entry = other.entry;
37 current = other.current;
38 } else if (other.entry != nullptr) {
39 if (current != nullptr) {
40 current->LinkTo(other.entry);
41 }
42 // Although [other.entry] could be unreachable (if this fragment is
43 // closed), there could be a yield continuation point in the middle of
44 // [other] fragment so [other.current] is still reachable.
45 current = other.current;
46 }
47 return *this;
48}
49
52 if (entry == nullptr) {
53 entry = current = next;
54 } else if (current != nullptr) {
56 current = next;
57 }
58 return *this;
59}
60
63 if (entry == nullptr) {
65 } else {
66 start->LinkTo(entry);
67 entry = start;
68 }
69}
70
72 ASSERT(entry != nullptr);
73 return Fragment(entry, nullptr);
74}
75
76Fragment operator+(const Fragment& first, const Fragment& second) {
77 Fragment result = first;
78 result += second;
79 return result;
80}
81
83 Fragment result = fragment;
84 result <<= next;
85 return result;
86}
87
89 : entry(entry),
90 true_successor_addresses(new SuccessorAddressArray(1)),
91 false_successor_addresses(new SuccessorAddressArray(1)) {
94}
95
100 ASSERT(!branches.is_empty());
101 for (auto branch : branches) {
102 *branch = builder->BuildTargetEntry();
103 (*branch)->Goto(join);
104 }
105}
106
109 const TestFragment::SuccessorAddressArray& branches) {
110 ASSERT(!branches.is_empty());
111
112 if (branches.length() == 1) {
113 TargetEntryInstr* target = builder->BuildTargetEntry();
114 *(branches[0]) = target;
115 return target;
116 }
117
118 JoinEntryInstr* join = builder->BuildJoinEntry();
119 ConnectBranchesTo(builder, branches, join);
120 return join;
121}
122
127}
128
133}
134
136 intptr_t delta = context_depth_ - depth;
137 ASSERT(delta >= 0);
139 while (delta-- > 0) {
140 instructions += LoadNativeField(Slot::Context_parent());
141 }
142 return instructions;
143}
144
146 Token::Kind kind,
147 bool number_check /* = false */) {
148 Value* right = Pop();
149 Value* left = Pop();
151 new (Z) StrictCompareInstr(InstructionSource(position), kind, left, right,
152 number_check, GetNextDeoptId());
153 Push(compare);
154 return Fragment(compare);
155}
156
158 bool number_check /* = false */) {
159 Value* right = Pop();
160 Value* left = Pop();
162 InstructionSource(), kind, left, right, number_check, GetNextDeoptId());
163 Push(compare);
164 return Fragment(compare);
165}
166
168 TargetEntryInstr** otherwise_entry,
169 bool negate) {
170 Fragment instructions = Constant(Bool::True());
171 return instructions + BranchIfEqual(then_entry, otherwise_entry, negate);
172}
173
175 TargetEntryInstr** otherwise_entry,
176 bool negate) {
177 Fragment instructions = NullConstant();
178 return instructions + BranchIfEqual(then_entry, otherwise_entry, negate);
179}
180
182 TargetEntryInstr** otherwise_entry,
183 bool negate) {
184 Value* right_value = Pop();
185 Value* left_value = Pop();
187 InstructionSource(), negate ? Token::kNE_STRICT : Token::kEQ_STRICT,
188 left_value, right_value, false, GetNextDeoptId());
189 BranchInstr* branch = new (Z) BranchInstr(compare, GetNextDeoptId());
190 *then_entry = *branch->true_successor_address() = BuildTargetEntry();
191 *otherwise_entry = *branch->false_successor_address() = BuildTargetEntry();
192 return Fragment(branch).closed();
193}
194
196 TargetEntryInstr** then_entry,
197 TargetEntryInstr** otherwise_entry) {
198 Value* rhs = Pop();
199 Value* lhs = Pop();
201 new (Z) StrictCompareInstr(InstructionSource(), Token::kEQ_STRICT, lhs,
202 rhs, false, GetNextDeoptId());
203 BranchInstr* branch = new (Z) BranchInstr(compare, GetNextDeoptId());
204 *then_entry = *branch->true_successor_address() = BuildTargetEntry();
205 *otherwise_entry = *branch->false_successor_address() = BuildTargetEntry();
206 return Fragment(branch).closed();
207}
208
210 Fragment instructions;
211
212 Value* value = Pop();
213 ASSERT(stack_ == nullptr);
215 const Representation representation =
217 DartReturnInstr* return_instr = new (Z) DartReturnInstr(
218 InstructionSource(position), value, GetNextDeoptId(), representation);
219 if (exit_collector_ != nullptr) exit_collector_->AddExit(return_instr);
220
221 instructions <<= return_instr;
222
223 return instructions.closed();
224}
225
227 intptr_t stack_depth,
228 intptr_t loop_depth) {
230 InstructionSource(position), stack_depth, loop_depth, GetNextDeoptId(),
232}
233
235 TokenPosition position) {
236 if (IsInlining()) {
237 // If we are inlining don't actually attach the stack check. We must still
238 // create the stack check in order to allocate a deopt id.
239 CheckStackOverflow(position, 0, 0);
240 return Fragment();
241 }
242 return CheckStackOverflow(position, 0, 0);
243}
244
246 DEBUG_ASSERT(value.IsNotTemporaryScopedHandle());
247 ConstantInstr* constant = new (Z) ConstantInstr(value);
248 Push(constant);
249 return Fragment(constant);
250}
251
253 return Fragment(new (Z) GotoInstr(destination, GetNextDeoptId())).closed();
254}
255
257 return Fragment(
259}
260
262 int64_t value,
263 Representation representation) {
265 auto const constant = new (Z) UnboxedConstantInstr(obj, representation);
266 Push(constant);
267 return Fragment(constant);
268}
269
271 classid_t dest_cid,
272 bool unboxed_inputs,
273 bool can_overlap) {
274 Value* length = Pop();
275 Value* dest_start = Pop();
276 Value* src_start = Pop();
277 Value* dest = Pop();
278 Value* src = Pop();
279 auto copy =
280 new (Z) MemoryCopyInstr(src, src_cid, dest, dest_cid, src_start,
281 dest_start, length, unboxed_inputs, can_overlap);
282 return Fragment(copy);
283}
284
286 Value* arg_desc = Pop();
287 return Fragment(new (Z) TailCallInstr(code, arg_desc)).closed();
288}
289
290void BaseFlowGraphBuilder::InlineBailout(const char* reason) {
291 if (IsInlining()) {
292 parsed_function_->function().set_is_inlinable(false);
293 parsed_function_->Bailout("kernel::BaseFlowGraphBuilder", reason);
294 }
295}
296
299 const ArgumentsDescriptor descriptor(saved_args_desc_array());
300 // Double-check that compile-time Size() matches runtime size on target.
302 function_, descriptor.Count()));
304 }
307}
308
310 Fragment neq_branch,
311 intptr_t num_type_args) {
313
314 // Compile-time arguments descriptor case.
317 return descriptor.TypeArgsLen() == num_type_args ? eq_branch : neq_branch;
318 }
319
320 // Runtime arguments descriptor case.
321 TargetEntryInstr* eq_entry;
322 TargetEntryInstr* neq_entry;
323
325 test += LoadNativeField(Slot::ArgumentsDescriptor_type_args_len());
326 test += IntConstant(num_type_args);
327 test += BranchIfEqual(&eq_entry, &neq_entry);
328
329 eq_branch.Prepend(eq_entry);
330 neq_branch.Prepend(neq_entry);
331
333 eq_branch += Goto(join);
334 neq_branch += Goto(join);
335
336 return Fragment(test.entry, join);
337}
338
340 Fragment present,
341 Fragment absent) {
343
344 TargetEntryInstr* absent_entry;
345 TargetEntryInstr* present_entry;
346
348 test += LoadNativeField(Slot::Closure_delayed_type_arguments());
349 test += Constant(Object::empty_type_arguments());
350 test += BranchIfEqual(&absent_entry, &present_entry);
351
352 present.Prepend(present_entry);
353 absent.Prepend(absent_entry);
354
356 absent += Goto(join);
357 present += Goto(join);
358
359 return Fragment(test.entry, join);
360}
361
363 Fragment absent) {
366
367 JoinEntryInstr* complete = BuildJoinEntry();
368 JoinEntryInstr* present_entry = BuildJoinEntry();
369
371 TestDelayedTypeArgs(closure, Goto(present_entry), absent),
372 Goto(present_entry), 0);
373 test += Goto(complete);
374
375 Fragment(present_entry) + present + Goto(complete);
376
377 return Fragment(test.entry, complete);
378 } else {
379 return TestTypeArgsLen(absent, present, 0);
380 }
381}
382
384 intptr_t index_scale,
385 bool index_unboxed,
386 AlignmentType alignment) {
387 Value* index = Pop();
388 // A C pointer if index_unboxed, otherwise a boxed Dart value.
389 Value* array = Pop();
390
391 // We use C behavior when dereferencing pointers, so we use aligned access in
392 // all cases.
393 LoadIndexedInstr* instr = new (Z)
394 LoadIndexedInstr(array, index, index_unboxed, index_scale, class_id,
395 alignment, DeoptId::kNone, InstructionSource());
396 Push(instr);
397 return Fragment(instr);
398}
399
401 Value* index = Pop();
402 Value* length = Pop();
403 auto* instr = new (Z) GenericCheckBoundInstr(length, index, GetNextDeoptId());
404 Push(instr);
405 return Fragment(instr);
406}
407
409 Value* object = Pop();
410 auto load = new (Z) LoadUntaggedInstr(object, offset);
411 Push(load);
412 return Fragment(load);
413}
414
416 Value* value = Pop();
417 auto converted = new (Z)
419 converted->mark_truncating();
420 Push(converted);
421 return Fragment(converted);
422}
423
425 Value* value = Pop();
426 auto converted = new (Z)
428 converted->mark_truncating();
429 Push(converted);
430 return Fragment(converted);
431}
432
434 Value* offset = Pop();
435 Value* index = Pop();
436 Value* base = Pop();
437 auto adjust =
438 new (Z) CalculateElementAddressInstr(base, index, index_scale, offset);
439 Push(adjust);
440 return Fragment(adjust);
441}
442
444 Value* value = Pop();
446 Push(instr);
447 return Fragment(instr);
448}
449
451 Value* value = Pop();
453 value, DeoptId::kNone, Instruction::SpeculativeMode::kNotSpeculative);
454 Push(instr);
455 return Fragment(instr);
456}
457
459 bool calls_initializer) {
461 calls_initializer);
462}
463
465 const Slot& native_field,
466 InnerPointerAccess loads_inner_pointer,
467 bool calls_initializer) {
469 Pop(), native_field, loads_inner_pointer, InstructionSource(),
470 calls_initializer, calls_initializer ? GetNextDeoptId() : DeoptId::kNone);
471 Push(load);
472 return Fragment(load);
473}
474
476 bool calls_initializer) {
477 const InnerPointerAccess loads_inner_pointer =
478 native_field.representation() == kUntagged
479 ? (native_field.may_contain_inner_pointer()
483 return LoadNativeField(native_field, loads_inner_pointer, calls_initializer);
484}
485
487 ASSERT(!variable->is_captured());
489 Push(load);
490 return Fragment(load);
491}
492
495}
496
498 intptr_t deopt_id) {
499 return Fragment(new (Z) GuardFieldLengthInstr(Pop(), field, deopt_id));
500}
501
503 intptr_t deopt_id) {
504 return Fragment(new (Z) GuardFieldClassInstr(Pop(), field, deopt_id));
505}
506
508 const Field& field) {
509 if (CompilerState::Current().should_clone_fields() && field.IsOriginal()) {
510 return Field::ZoneHandle(zone, field.CloneFromOriginal());
511 } else {
512 DEBUG_ASSERT(field.IsNotTemporaryScopedHandle());
513 return field;
514 }
515}
516
518 TokenPosition position,
519 const Slot& slot,
520 InnerPointerAccess stores_inner_pointer,
521 StoreFieldInstr::Kind kind /* = StoreFieldInstr::Kind::kOther */,
522 StoreBarrierType emit_store_barrier /* = kEmitStoreBarrier */,
523 compiler::Assembler::MemoryOrder memory_order /* = kRelaxed */) {
524 Value* value = Pop();
525 Value* instance = Pop();
526 StoreFieldInstr* store = new (Z)
527 StoreFieldInstr(slot, instance, value, emit_store_barrier,
528 stores_inner_pointer, InstructionSource(position), kind);
529 return Fragment(store);
530}
531
533 const Field& field,
534 StoreFieldInstr::Kind kind /* = StoreFieldInstr::Kind::kOther */,
535 StoreBarrierType emit_store_barrier) {
536 return StoreNativeField(TokenPosition::kNoSource,
538 kind, emit_store_barrier);
539}
540
542 const Field& field,
543 StoreFieldInstr::Kind kind /* = StoreFieldInstr::Kind::kOther */) {
544 Fragment instructions;
545 const Field& field_clone = MayCloneField(Z, field);
546 if (IG->use_field_guards()) {
547 LocalVariable* store_expression = MakeTemporary();
548
549 // Note: unboxing decision can only change due to hot reload at which
550 // point all code will be cleared, so there is no need to worry about
551 // stability of deopt id numbering.
552 if (!field_clone.is_unboxed()) {
553 instructions += LoadLocal(store_expression);
554 instructions += GuardFieldClass(field_clone, GetNextDeoptId());
555 }
556
557 // Field length guard can be omitted if it is not needed.
558 // However, it is possible that we were tracking list length previously,
559 // and generated length guards in the past. We need to generate same IL
560 // to keep deopt ids stable, but we can discard generated IL fragment
561 // if length guard is not needed.
562 Fragment length_guard;
563 length_guard += LoadLocal(store_expression);
564 length_guard += GuardFieldLength(field_clone, GetNextDeoptId());
565
566 if (field_clone.needs_length_check()) {
567 instructions += length_guard;
568 }
569
570 // If we are tracking exactness of the static type of the field then
571 // emit appropriate guard.
572 if (field_clone.static_type_exactness_state().IsTracking()) {
573 instructions += LoadLocal(store_expression);
574 instructions <<=
575 new (Z) GuardFieldTypeInstr(Pop(), field_clone, GetNextDeoptId());
576 }
577 }
578 instructions +=
579 StoreNativeField(Slot::Get(field_clone, parsed_function_), kind);
580 return instructions;
581}
582
584 bool calls_initializer) {
586 field, InstructionSource(), calls_initializer,
587 calls_initializer ? GetNextDeoptId() : DeoptId::kNone);
588 Push(load);
589 return Fragment(load);
590}
591
593 auto redefinition = new (Z) RedefinitionInstr(Pop());
594 redefinition->set_constrained_type(
597 Push(redefinition);
598 return Fragment(redefinition);
599}
600
602 Fragment instructions;
603 instructions <<= new (Z) ReachabilityFenceInstr(Pop());
604 return instructions;
605}
606
608 Value* table = Pop();
609 Value* end = Pop();
610 Value* start = Pop();
611 Value* bytes = Pop();
612 Value* decoder = Pop();
613 const Field& scan_flags_field =
615 auto scan = new (Z) Utf8ScanInstr(
616 decoder, bytes, start, end, table,
617 Slot::Get(MayCloneField(Z, scan_flags_field), parsed_function_));
618 Push(scan);
619 return Fragment(scan);
620}
621
623 const Field& field) {
624 return Fragment(new (Z) StoreStaticFieldInstr(MayCloneField(Z, field), Pop(),
625 InstructionSource(position)));
626}
627
629 // This fragment builder cannot be used for typed data accesses.
630 ASSERT(!IsTypedDataBaseClassId(class_id));
631 Value* value = Pop();
632 Value* index = Pop();
633 const StoreBarrierType emit_store_barrier =
634 value->BindsToConstant() ? kNoStoreBarrier : kEmitStoreBarrier;
636 Pop(), // Array.
637 index, value, emit_store_barrier, /*index_unboxed=*/false,
640 return Fragment(store);
641}
642
644 intptr_t index_scale,
645 bool index_unboxed,
646 AlignmentType alignment) {
648 Value* value = Pop();
649 Value* index = Pop();
650 Value* c_pointer = Pop();
652 c_pointer, index, value, kNoStoreBarrier, index_unboxed, index_scale,
653 class_id, alignment, DeoptId::kNone, InstructionSource(),
654 Instruction::SpeculativeMode::kNotSpeculative);
655 return Fragment(instr);
656}
657
659 LocalVariable* variable) {
660 if (variable->is_captured()) {
661 Fragment instructions;
663 instructions += LoadContextAt(variable->owner()->context_level());
664 instructions += LoadLocal(value);
665 instructions += StoreNativeField(
666 position, Slot::GetContextVariableSlotFor(thread_, *variable));
667 return instructions;
668 }
669 return StoreLocalRaw(position, variable);
670}
671
673 LocalVariable* variable) {
674 ASSERT(!variable->is_captured());
675 Value* value = Pop();
677 new (Z) StoreLocalInstr(*variable, value, InstructionSource(position));
678 Fragment instructions(store);
679 Push(store);
680 return instructions;
681}
682
684 static constexpr intptr_t kTemporaryNameLength = 64;
685 char name[kTemporaryNameLength];
686 intptr_t index = stack_->definition()->temp_index();
687 if (suffix != nullptr) {
688 Utils::SNPrint(name, kTemporaryNameLength, ":t_%s", suffix);
689 } else {
690 Utils::SNPrint(name, kTemporaryNameLength, ":t%" Pd, index);
691 }
692 const String& symbol_name =
694 LocalVariable* variable =
695 new (Z) LocalVariable(TokenPosition::kNoSource, TokenPosition::kNoSource,
696 symbol_name, Object::dynamic_type());
697 // Set the index relative to the base of the expression stack including
698 // outgoing arguments.
699 variable->set_index(
701
702 // The value on top of the stack has uses as if it were a local variable.
703 // Mark all definitions on the stack as used so that their temp indices
704 // will not be cleared (causing them to never be materialized in the
705 // expression stack and skew stack depth).
706 for (Value* item = stack_; item != nullptr; item = item->next_use()) {
707 item->definition()->set_ssa_temp_index(0);
708 }
709
710 return variable;
711}
712
714 ASSERT(temp != nullptr && *temp != nullptr && (*temp)->HasIndex());
715 // Check that the temporary matches the current stack definition.
718 -(*temp)->index().value() - parsed_function_->num_stack_locals());
719 *temp = nullptr; // Clear to avoid inadvertent usage after dropping.
720 return Drop();
721}
722
724 definition->set_temp_index(
725 stack_ == nullptr ? 0 : stack_->definition()->temp_index() + 1);
726}
727
729 SetTempIndex(definition);
730 Value::AddToList(new (Z) Value(definition), &stack_);
731}
732
734 Value* head = stack_;
735 for (intptr_t i = 0; i < depth; ++i) {
736 ASSERT(head != nullptr);
737 head = head->next_use();
738 }
739 ASSERT(head != nullptr);
740 return head->definition();
741}
742
744 ASSERT(stack_ != nullptr);
745 Value* value = stack_;
746 stack_ = value->next_use();
747 if (stack_ != nullptr) stack_->set_previous_use(nullptr);
748
749 value->set_next_use(nullptr);
750 value->set_previous_use(nullptr);
751 value->definition()->ClearSSATempIndex();
752 return value;
753}
754
756 ASSERT(stack_ != nullptr);
757 Fragment instructions;
758 Definition* definition = stack_->definition();
759 // The SSA renaming implementation doesn't like [LoadLocal]s without a
760 // tempindex.
761 if (definition->HasSSATemp() || definition->IsLoadLocal()) {
762 instructions <<= new (Z) DropTempsInstr(1, nullptr);
763 } else {
764 definition->ClearTempIndex();
765 }
766
767 Pop();
768 return instructions;
769}
770
772 intptr_t num_temps_to_drop) {
773 Value* top = Pop();
774
775 for (intptr_t i = 0; i < num_temps_to_drop; ++i) {
776 Pop();
777 }
778
779 DropTempsInstr* drop_temps = new (Z) DropTempsInstr(num_temps_to_drop, top);
780 Push(drop_temps);
781
782 return Fragment(drop_temps);
783}
784
786 MakeTempInstr* make_temp = new (Z) MakeTempInstr(Z);
787 Push(make_temp);
788 return Fragment(make_temp);
789}
790
794}
795
797 GraphEntryInstr* graph_entry) {
798 return new (Z) FunctionEntryInstr(graph_entry, AllocateBlockId(),
800}
801
803 return new (Z) JoinEntryInstr(AllocateBlockId(), try_index, GetNextDeoptId(),
804 GetStackDepth());
805}
806
810}
811
813 intptr_t indirect_id,
814 intptr_t try_index) {
815 return new (Z) IndirectEntryInstr(AllocateBlockId(), indirect_id, try_index,
817}
818
820 InputsArray arguments(Z, count);
821 arguments.SetLength(count);
822 for (intptr_t i = count - 1; i >= 0; --i) {
823 arguments[i] = Pop();
824 }
825 return arguments;
826}
827
829 Value* right = Pop();
830 Value* left = Pop();
832 InstructionSource(), kind, left, right, kSmiCid, GetNextDeoptId());
833 Push(instr);
834 return Fragment(instr);
835}
836
838 bool is_truncating) {
839 return BinaryIntegerOp(kind, kTagged, is_truncating);
840}
841
843 Representation representation,
844 bool is_truncating) {
845 ASSERT(representation == kUnboxedInt32 || representation == kUnboxedUint32 ||
846 representation == kUnboxedInt64 || representation == kTagged);
847 Value* right = Pop();
848 Value* left = Pop();
850 representation, kind, left, right, GetNextDeoptId());
851 ASSERT(instr != nullptr);
852 if (is_truncating) {
853 instr->mark_truncating();
854 }
855 Push(instr);
856 return Fragment(instr);
857}
858
860 intptr_t offset,
861 CompileType result_type,
862 Representation representation) {
863 LoadIndexedUnsafeInstr* instr = new (Z)
864 LoadIndexedUnsafeInstr(Pop(), offset, result_type, representation);
865 Push(instr);
866 return Fragment(instr);
867}
868
870 Value* value = Pop();
871 Value* index = Pop();
873 new (Z) StoreIndexedUnsafeInstr(index, value, offset);
874 return Fragment(instr);
875}
876
879
880 Fragment failing(nsm);
881 const Code& nsm_handler = Code::ZoneHandle(
882 Z, IG->object_store()->call_closure_no_such_method_stub());
883 failing += LoadArgDescriptor();
884 failing += TailCall(nsm_handler);
885
886 return nsm;
887}
888
890 Fragment instructions;
891 Value* exception = Pop();
892 instructions += Fragment(new (Z) ThrowInstr(InstructionSource(position),
893 GetNextDeoptId(), exception))
894 .closed();
895 // Use its side effect of leaving a constant on the stack (does not change
896 // the graph).
897 NullConstant();
898
899 return instructions;
900}
901
903 Value* value = Pop();
904 AssertBooleanInstr* instr = new (Z)
906 Push(instr);
907 return Fragment(instr);
908}
909
911 BooleanNegateInstr* negate = new (Z) BooleanNegateInstr(Pop());
912 Push(negate);
913 return Fragment(negate);
914}
915
917 const ZoneGrowableArray<const Slot*>& context_slots) {
919 InstructionSource(), context_slots, GetNextDeoptId());
920 Push(allocate);
921 return Fragment(allocate);
922}
923
925 bool has_instantiator_type_args,
926 bool is_generic,
927 bool is_tear_off) {
928 Value* instantiator_type_args =
929 (has_instantiator_type_args ? Pop() : nullptr);
930 auto const context = Pop();
931 auto const function = Pop();
932 auto* allocate = new (Z) AllocateClosureInstr(
933 InstructionSource(position), function, context, instantiator_type_args,
934 is_generic, is_tear_off, GetNextDeoptId());
935 Push(allocate);
936 return Fragment(allocate);
937}
938
940 Value* element_count = Pop();
941 CreateArrayInstr* array =
943 Pop(), // Element type.
944 element_count, GetNextDeoptId());
945 Push(array);
946 return Fragment(array);
947}
948
950 RecordShape shape) {
951 AllocateRecordInstr* allocate = new (Z)
953 Push(allocate);
954 return Fragment(allocate);
955}
956
958 RecordShape shape) {
959 const intptr_t num_fields = shape.num_fields();
960 ASSERT(num_fields == 2 || num_fields == 3);
961 Value* value2 = (num_fields > 2) ? Pop() : nullptr;
962 Value* value1 = Pop();
963 Value* value0 = Pop();
964 AllocateSmallRecordInstr* allocate = new (Z)
965 AllocateSmallRecordInstr(InstructionSource(position), shape, value0,
966 value1, value2, GetNextDeoptId());
967 Push(allocate);
968 return Fragment(allocate);
969}
970
972 classid_t class_id) {
973 Value* num_elements = Pop();
974 auto* instr = new (Z) AllocateTypedDataInstr(
975 InstructionSource(position), class_id, num_elements, GetNextDeoptId());
976 Push(instr);
977 return Fragment(instr);
978}
979
981 Value* function_type_args = Pop();
982 Value* instantiator_type_args = Pop();
983 InstantiateTypeInstr* instr = new (Z)
984 InstantiateTypeInstr(InstructionSource(), type, instantiator_type_args,
985 function_type_args, GetNextDeoptId());
986 Push(instr);
987 return Fragment(instr);
988}
989
991 const TypeArguments& type_arguments_value) {
992 Fragment instructions;
993 instructions += Constant(type_arguments_value);
994
995 Value* type_arguments = Pop();
996 Value* function_type_args = Pop();
997 Value* instantiator_type_args = Pop();
998 const Class& instantiator_class = Class::ZoneHandle(Z, function_.Owner());
1000 InstructionSource(), instantiator_type_args, function_type_args,
1001 type_arguments, instantiator_class, function_, GetNextDeoptId());
1002 Push(instr);
1003 instructions += Fragment(instr);
1004 return instructions;
1005}
1006
1008 Value* type_arguments = Pop();
1009 Value* function_type_args = Pop();
1010 Value* instantiator_type_args = Pop();
1011 const Function& function = Object::null_function();
1012 const Class& instantiator_class = Class::ZoneHandle(Z);
1014 InstructionSource(), instantiator_type_args, function_type_args,
1015 type_arguments, instantiator_class, function, GetNextDeoptId());
1016 Push(instr);
1017 return Fragment(instr);
1018}
1019
1022 Push(load);
1023 return Fragment(load);
1024}
1025
1027 const Class& klass,
1028 intptr_t argument_count) {
1029 ASSERT((argument_count == 0) || (argument_count == 1));
1030 Value* type_arguments = (argument_count > 0) ? Pop() : nullptr;
1031 AllocateObjectInstr* allocate = new (Z) AllocateObjectInstr(
1032 InstructionSource(position), klass, GetNextDeoptId(), type_arguments);
1033 Push(allocate);
1034 return Fragment(allocate);
1035}
1036
1038 Fragment instructions;
1039 if (from == kUnboxedFloat) {
1040 instructions += FloatToDouble();
1041 from = kUnboxedDouble;
1042 }
1043 BoxInstr* box = BoxInstr::Create(from, Pop());
1044 instructions <<= box;
1045 Push(box);
1046 return instructions;
1047}
1048
1050#ifdef PRODUCT
1051 return Fragment();
1052#else
1053 return Fragment(new (Z) DebugStepCheckInstr(
1054 InstructionSource(position), UntaggedPcDescriptors::kRuntimeCall,
1055 GetNextDeoptId()));
1056#endif
1057}
1058
1060 LocalVariable* receiver,
1061 const String& function_name) {
1062 Fragment instructions = LoadLocal(receiver);
1063
1064 CheckNullInstr* check_null = new (Z) CheckNullInstr(
1068
1069 // Does not use the redefinition, no `Push(check_null)`.
1070 instructions <<= check_null;
1071
1072 return instructions;
1073}
1074
1076 const String& function_name,
1077 CheckNullInstr::ExceptionType exception_type,
1078 TokenPosition position) {
1079 Value* value = Pop();
1080 CheckNullInstr* check_null =
1082 InstructionSource(position), exception_type);
1083 Push(check_null); // Use the redefinition.
1084 return Fragment(check_null);
1085}
1086
1089 Value* value = Pop();
1090 auto* check_writable = new (Z)
1092 return Fragment(check_writable);
1093}
1094
1096 GraphEntryInstr* graph_entry,
1097 FunctionEntryInstr* unchecked_entry) {
1098 // Closures always check all arguments on their checked entry-point, most
1099 // call-sites are unchecked, and they're inlined less often, so it's very
1100 // beneficial to build multiple entry-points for them. Regular methods however
1101 // have fewer checks to begin with since they have dynamic invocation
1102 // forwarders, so in AOT we implement a more conservative time-space tradeoff
1103 // by only building the unchecked entry-point when inlining. We should
1104 // reconsider this heuristic if we identify non-inlined type-checks in
1105 // hotspots of new benchmarks.
1108 graph_entry->set_unchecked_entry(unchecked_entry);
1109 } else if (InliningUncheckedEntry()) {
1110 graph_entry->set_normal_entry(unchecked_entry);
1111 }
1112}
1113
1115 if (!FLAG_enable_testing_pragmas) return Drop();
1116
1118
1119 if (function.IsImplicitClosureFunction()) {
1120 const auto& parent = Function::Handle(Z, function.parent_function());
1121 const auto& func_name = String::Handle(Z, parent.name());
1122 const auto& owner = Class::Handle(Z, parent.Owner());
1123 if (owner.EnsureIsFinalized(thread_) == Error::null()) {
1124 function = Resolver::ResolveFunction(Z, owner, func_name);
1125 }
1126 }
1127
1129 if (!Library::FindPragma(thread_, /*only_core=*/false, function,
1130 Symbols::vm_trace_entrypoints(), /*multiple=*/false,
1131 &options) ||
1132 options.IsNull() || !options.IsClosure()) {
1133 return Drop();
1134 }
1135 auto& closure = Closure::ZoneHandle(Z, Closure::Cast(options).ptr());
1136 LocalVariable* entry_point_num = MakeTemporary("entry_point_num");
1137
1139 Z, String::New(function.ToLibNamePrefixedQualifiedCString(), Heap::kOld));
1143 Heap::kOld);
1144 }
1145 if (!function_name.IsCanonical()) {
1147 }
1148
1149 Fragment call_hook;
1150 call_hook += Constant(closure);
1151 call_hook += Constant(function_name);
1152 call_hook += LoadLocal(entry_point_num);
1153 if (FLAG_precompiled_mode) {
1154 call_hook += Constant(closure);
1155 } else {
1156 call_hook += Constant(Function::ZoneHandle(Z, closure.function()));
1157 }
1158 call_hook += ClosureCall(Function::null_function(), TokenPosition::kNoSource,
1159 /*type_args_len=*/0, /*argument_count=*/3,
1160 /*argument_names=*/Array::ZoneHandle(Z));
1161 call_hook += Drop(); // result of closure call
1162 call_hook += DropTemporary(&entry_point_num); // entrypoint number
1163 return call_hook;
1164}
1165
1167 const Function& target_function,
1168 TokenPosition position,
1169 intptr_t type_args_len,
1170 intptr_t argument_count,
1171 const Array& argument_names,
1172 const InferredTypeMetadata* result_type) {
1173 Fragment instructions = RecordCoverage(position);
1174 const intptr_t total_count =
1175 (type_args_len > 0 ? 1 : 0) + argument_count +
1176 /*closure (bare instructions) or function (otherwise)*/ 1;
1177 InputsArray arguments = GetArguments(total_count);
1179 target_function, std::move(arguments), type_args_len, argument_names,
1180 InstructionSource(position), GetNextDeoptId());
1181 Push(call);
1182 instructions <<= call;
1183 if (result_type != nullptr && result_type->IsConstant()) {
1184 instructions += Drop();
1185 instructions += Constant(result_type->constant_value);
1186 }
1187 return instructions;
1188}
1189
1192 for (intptr_t i = 0, n = context_level_array_->length(); i < n; i += 2) {
1193 if (context_level_array_->At(i) == deopt_id) {
1194 (*context_level_array_)[i + 1] = context_depth_;
1195 return;
1196 }
1197 ASSERT(context_level_array_->At(i) < deopt_id);
1198 }
1199 }
1200}
1201
1203 TokenPosition position,
1204 const String& dst_name,
1206 Value* function_type_args = Pop();
1207 Value* instantiator_type_args = Pop();
1208 Value* dst_type = Pop();
1209 Value* value = Pop();
1210
1212 InstructionSource(position), value, dst_type, instantiator_type_args,
1213 function_type_args, dst_name, GetNextDeoptId(), kind);
1214 Push(instr);
1215
1216 return Fragment(instr);
1217}
1218
1220 Fragment instructions;
1221 const intptr_t parameter_count = parsed_function_->function().NumParameters();
1222 for (intptr_t i = 0; i < parameter_count; ++i) {
1224 const Object* param_value = raw_parameter->inferred_arg_value();
1225 if (param_value != nullptr) {
1226 instructions += Constant(*param_value);
1227 instructions += StoreLocalRaw(TokenPosition::kNoSource, raw_parameter);
1228 instructions += Drop();
1229 }
1230 }
1231 return instructions;
1232}
1233
1235 MethodRecognizer::Kind recognized_kind,
1236 intptr_t num_inputs) {
1237 InputsArray args = GetArguments(num_inputs);
1238 auto* instr = new (Z) InvokeMathCFunctionInstr(
1239 std::move(args), GetNextDeoptId(), recognized_kind,
1240 InstructionSource(TokenPosition::kNoSource));
1241 Push(instr);
1242 return Fragment(instr);
1243}
1244
1246 MethodRecognizer::Kind recognized_kind) {
1247 Value* value = Pop();
1248 auto* instr =
1249 new (Z) DoubleToIntegerInstr(value, recognized_kind, GetNextDeoptId());
1250 Push(instr);
1251 return Fragment(instr);
1252}
1253
1255 Value* value = Pop();
1256 auto* instr = new (Z) UnaryDoubleOpInstr(op, value, GetNextDeoptId(),
1258 Push(instr);
1259 return Fragment(instr);
1260}
1261
1263 return RecordCoverageImpl(position, false /** is_branch_coverage **/);
1264}
1265
1267 return RecordCoverageImpl(position, true /** is_branch_coverage **/);
1268}
1269
1271 bool is_branch_coverage) {
1272 Fragment instructions;
1273 if (!SupportsCoverage()) return instructions;
1274 if (!IG->coverage()) return instructions;
1275 if (!position.IsReal()) return instructions;
1276 if (is_branch_coverage && !IG->branch_coverage()) return instructions;
1277
1278 const intptr_t coverage_index =
1279 GetCoverageIndexFor(position.EncodeCoveragePosition(is_branch_coverage));
1280 instructions <<= new (Z) RecordCoverageInstr(coverage_array(), coverage_index,
1281 InstructionSource(position));
1282 return instructions;
1283}
1284
1285intptr_t BaseFlowGraphBuilder::GetCoverageIndexFor(intptr_t encoded_position) {
1286 if (coverage_array_.IsNull()) {
1287 // We have not yet created coverage_array, this is the first time we are
1288 // building the graph for this function. Collect coverage positions.
1289 intptr_t value =
1291 if (value > 0) {
1292 // Found.
1293 return value;
1294 }
1295 // Not found: Insert.
1296 const auto index = 2 * coverage_state_index_for_position_.Length() + 1;
1297 coverage_state_index_for_position_.Insert(encoded_position, index);
1298 return index;
1299 }
1300
1302 // coverage_array was already created, but we don't want to search
1303 // it linearly: Fill in the coverage_state_index_for_position_ to do
1304 // fast lookups.
1305 // TODO(jensj): If Length is small enough it's probably better to just do
1306 // the linear search.
1307 for (intptr_t i = 0; i < coverage_array_.Length(); i += 2) {
1308 intptr_t key = Smi::Value(static_cast<SmiPtr>(coverage_array_.At(i)));
1309 intptr_t value = i + 1;
1311 }
1312 }
1313
1314 intptr_t value = coverage_state_index_for_position_.Lookup(encoded_position);
1315
1316 if (value > 0) {
1317 // Found.
1318 return value;
1319 }
1320
1321 // Reaching here indicates that the graph is constructed in an unstable way.
1322 UNREACHABLE();
1323 return 1;
1324}
1325
1327 if (!coverage_array_.IsNull()) {
1328 return;
1329 }
1330
1332 coverage_array_ = Array::empty_array().ptr();
1333 return;
1334 }
1335
1338
1339 Smi& value = Smi::Handle();
1341 for (auto* p = it.Next(); p != nullptr; p = it.Next()) {
1342 value = Smi::New(p->key);
1343 // p->value is the index at which coverage state is stored, the
1344 // full coverage entry begins at the previous index.
1345 const intptr_t coverage_entry_index = p->value - 1;
1346 coverage_array_.SetAt(coverage_entry_index, value);
1347 value = Smi::New(0); // no coverage recorded.
1348 coverage_array_.SetAt(p->value, value);
1349 }
1350}
1351
1352} // namespace kernel
1353} // namespace dart
const char * options
#define test(name)
int count
Definition: FontMgrTest.cpp:50
static float next(float f)
SI void store(P *ptr, const T &val)
SI T load(const P *ptr)
Definition: Transform_inl.h:98
SI F table(const skcms_Curve *curve, F v)
#define UNREACHABLE()
Definition: assert.h:248
#define DEBUG_ASSERT(cond)
Definition: assert.h:321
#define ASSERT_EQUAL(expected, actual)
Definition: assert.h:309
#define IG
#define Z
GLenum type
intptr_t Count() const
Definition: dart_entry.cc:363
intptr_t Size() const
Definition: dart_entry.cc:367
intptr_t TypeArgsLen() const
Definition: dart_entry.cc:359
static ArrayPtr New(intptr_t len, Heap::Space space=Heap::kNew)
Definition: object.h:10959
ObjectPtr At(intptr_t index) const
Definition: object.h:10875
intptr_t Length() const
Definition: object.h:10829
void SetAt(intptr_t index, const Object &value) const
Definition: object.h:10880
intptr_t Length() const
Definition: hash_map.h:27
Iterator GetIterator() const
Definition: hash_map.h:87
void Add(const T &value)
const T & At(intptr_t index) const
void SetLength(intptr_t new_length)
intptr_t length() const
static BinaryIntegerOpInstr * Make(Representation representation, Token::Kind op_kind, Value *left, Value *right, intptr_t deopt_id, SpeculativeMode speculative_mode=kGuardInputs)
Definition: il.cc:2293
static const Bool & True()
Definition: object.h:10797
static BoxInstr * Create(Representation from, Value *value)
Definition: il.cc:4007
TargetEntryInstr ** false_successor_address()
Definition: il.h:4051
TargetEntryInstr ** true_successor_address()
Definition: il.h:4050
static constexpr bool kCannotBeSentinel
Definition: compile_type.h:49
static constexpr bool kCanBeNull
Definition: compile_type.h:45
static CompileType FromAbstractType(const AbstractType &type, bool can_be_null, bool can_be_sentinel)
bool is_aot() const
static CompilerState & Current()
void set_temp_index(intptr_t index)
Definition: il.h:2499
bool HasSSATemp() const
Definition: il.h:2508
intptr_t temp_index() const
Definition: il.h:2498
void ClearTempIndex()
Definition: il.h:2500
static constexpr intptr_t kNone
Definition: deopt_id.h:27
bool is_unboxed() const
Definition: object.h:4712
bool IsOriginal() const
Definition: object.h:4418
FieldPtr CloneFromOriginal() const
Definition: object.cc:11735
bool needs_length_check() const
Definition: object.h:4697
StaticTypeExactnessState static_type_exactness_state() const
Definition: object.h:4633
static Representation ReturnRepresentationOf(const Function &function)
Definition: flow_graph.cc:125
static intptr_t ComputeArgumentsSizeInWords(const Function &function, intptr_t arguments_count)
Definition: flow_graph.cc:96
bool IsImplicitClosureFunction() const
Definition: object.h:3903
bool IsClosureFunction() const
Definition: object.h:3891
ClassPtr Owner() const
Definition: object.cc:10841
intptr_t NumParameters() const
Definition: object.cc:8877
void set_unchecked_entry(FunctionEntryInstr *target)
Definition: il.h:2004
void set_normal_entry(FunctionEntryInstr *entry)
Definition: il.h:2003
@ kNew
Definition: heap.h:38
@ kOld
Definition: heap.h:39
void AddExit(DartReturnInstr *exit)
void LinkTo(Instruction *next)
Definition: il.h:1108
@ kNotSpeculative
Definition: il.h:975
void mark_truncating()
Definition: il.h:11050
V Lookup(const Key &key) const
Definition: hash_map.h:548
void Insert(const Key &key, const Value &value)
Definition: hash_map.h:543
static IntegerPtr New(const String &str, Heap::Space space=Heap::kNew)
Definition: object.cc:22984
static IntegerPtr NewCanonical(const String &str)
Definition: object.cc:22999
static bool FindPragma(Thread *T, bool only_core, const Object &object, const String &pragma_name, bool multiple=false, Object *options=nullptr)
Definition: object.cc:4151
int context_level() const
Definition: scopes.h:333
LocalScope * owner() const
Definition: scopes.h:122
bool HasIndex() const
Definition: scopes.h:201
bool is_captured() const
Definition: scopes.h:143
void set_index(VariableIndex index)
Definition: scopes.h:208
const Object * inferred_arg_value() const
Definition: scopes.h:138
static ObjectPtr null()
Definition: object.h:433
ObjectPtr ptr() const
Definition: object.h:332
bool IsNull() const
Definition: object.h:363
static Object & Handle()
Definition: object.h:407
static Object & ZoneHandle()
Definition: object.h:419
void Bailout(const char *origin, const char *reason) const
Definition: parser.cc:118
const Function & function() const
Definition: parser.h:73
int num_stack_locals() const
Definition: parser.h:194
LocalVariable * arg_desc_var() const
Definition: parser.h:131
bool has_arg_desc_var() const
Definition: parser.h:130
LocalVariable * ParameterVariable(intptr_t i) const
Definition: parser.h:239
LocalVariable * current_context_var() const
Definition: parser.h:128
LocalVariable * RawParameterVariable(intptr_t i) const
Definition: parser.h:235
intptr_t num_fields() const
Definition: object.h:11314
void set_constrained_type(CompileType *type)
Definition: il.h:4116
static FunctionPtr ResolveFunction(Zone *zone, const Class &receiver_class, const String &function_name)
Definition: resolver.cc:167
static const Slot & GetContextVariableSlotFor(Thread *thread, const LocalVariable &var)
Definition: slot.cc:292
static const Slot & Get(const Field &field, const ParsedFunction *parsed_function)
Definition: slot.cc:351
Representation representation() const
Definition: slot.h:519
bool may_contain_inner_pointer() const
Definition: slot.h:533
static SmiPtr New(intptr_t value)
Definition: object.h:10006
intptr_t Value() const
Definition: object.h:9990
static StringPtr New(const char *cstr, Heap::Space space=Heap::kNew)
Definition: object.cc:23698
static StringPtr Concat(const String &str1, const String &str2, Heap::Space space=Heap::kNew)
Definition: object.cc:24037
static StringPtr New(Thread *thread, const char *cstr)
Definition: symbols.h:723
intptr_t EncodeCoveragePosition(bool is_branch_coverage)
static int SNPrint(char *str, size_t size, const char *format,...) PRINTF_ATTRIBUTE(3
Definition: il.h:75
void set_previous_use(Value *previous)
Definition: il.h:112
static void AddToList(Value *value, Value **list)
Definition: il.cc:1446
Value * next_use() const
Definition: il.h:114
Definition * definition() const
Definition: il.h:103
static word ElementSizeFor(intptr_t cid)
Definition: runtime_api.cc:581
Fragment SmiRelationalOp(Token::Kind kind)
Fragment TestDelayedTypeArgs(LocalVariable *closure, Fragment present, Fragment absent)
Fragment LoadLocal(LocalVariable *variable)
Fragment StoreNativeField(TokenPosition position, const Slot &slot, InnerPointerAccess stores_inner_pointer, StoreFieldInstr::Kind kind=StoreFieldInstr::Kind::kOther, StoreBarrierType emit_store_barrier=kEmitStoreBarrier, compiler::Assembler::MemoryOrder memory_order=compiler::Assembler::kRelaxedNonAtomic)
Fragment ThrowException(TokenPosition position)
Fragment RecordCoverageImpl(TokenPosition position, bool is_branch_coverage)
Definition * Peek(intptr_t depth=0)
void SetTempIndex(Definition *definition)
Fragment TestAnyTypeArgs(Fragment present, Fragment absent)
Fragment DebugStepCheck(TokenPosition position)
ZoneGrowableArray< intptr_t > * context_level_array_
Fragment CalculateElementAddress(intptr_t index_scale)
Fragment LoadFpRelativeSlot(intptr_t offset, CompileType result_type, Representation representation=kTagged)
Fragment InvokeMathCFunction(MethodRecognizer::Kind recognized_kind, intptr_t num_inputs)
void reset_context_depth_for_deopt_id(intptr_t deopt_id)
Fragment StoreField(const Field &field, StoreFieldInstr::Kind kind=StoreFieldInstr::Kind::kOther, StoreBarrierType emit_store_barrier=kEmitStoreBarrier)
Fragment GuardFieldLength(const Field &field, intptr_t deopt_id)
Fragment CheckNotDeeplyImmutable(CheckWritableInstr::Kind kind)
Fragment StoreLocalRaw(TokenPosition position, LocalVariable *variable)
Fragment AllocateTypedData(TokenPosition position, classid_t class_id)
Fragment AllocateRecord(TokenPosition position, RecordShape shape)
Fragment StoreFpRelativeSlot(intptr_t offset)
Fragment MemoryCopy(classid_t src_cid, classid_t dest_cid, bool unboxed_inputs, bool can_overlap=true)
Fragment InstantiateTypeArguments(const TypeArguments &type_arguments)
Fragment CheckNull(TokenPosition position, LocalVariable *receiver, const String &function_name)
Fragment StoreStaticField(TokenPosition position, const Field &field)
Fragment AllocateSmallRecord(TokenPosition position, RecordShape shape)
Fragment InstantiateType(const AbstractType &type)
Fragment StoreIndexedTypedData(classid_t class_id, intptr_t index_scale, bool index_unboxed, AlignmentType alignment=kAlignedAccess)
Fragment RecordBranchCoverage(TokenPosition position)
Fragment AssertBool(TokenPosition position)
Fragment AssertAssignable(TokenPosition position, const String &dst_name, AssertAssignableInstr::Kind kind=AssertAssignableInstr::kUnknown)
Fragment StoreLocal(LocalVariable *variable)
Fragment LoadField(const Field &field, bool calls_initializer)
Fragment DropTempsPreserveTop(intptr_t num_temps_to_drop)
Fragment ClosureCall(const Function &target_function, TokenPosition position, intptr_t type_args_len, intptr_t argument_count, const Array &argument_names, const InferredTypeMetadata *result_type=nullptr)
FunctionEntryInstr * BuildFunctionEntry(GraphEntryInstr *graph_entry)
Fragment GuardFieldClass(const Field &field, intptr_t deopt_id)
Fragment LoadNativeField(const Slot &native_field, InnerPointerAccess loads_inner_pointer, bool calls_initializer=false)
Fragment StoreFieldGuarded(const Field &field, StoreFieldInstr::Kind kind=StoreFieldInstr::Kind::kOther)
Fragment LoadStaticField(const Field &field, bool calls_initializer)
Fragment BranchIfTrue(TargetEntryInstr **then_entry, TargetEntryInstr **otherwise_entry, bool negate=false)
intptr_t GetCoverageIndexFor(intptr_t encoded_position)
Fragment BranchIfEqual(TargetEntryInstr **then_entry, TargetEntryInstr **otherwise_entry, bool negate=false)
Fragment UnboxedIntConstant(int64_t value, Representation representation)
Fragment RedefinitionWithType(const AbstractType &type)
Fragment LoadIndexed(classid_t class_id, intptr_t index_scale=compiler::target::kWordSize, bool index_unboxed=false, AlignmentType alignment=kAlignedAccess)
Fragment RecordCoverage(TokenPosition position)
Fragment Return(TokenPosition position)
IndirectEntryInstr * BuildIndirectEntry(intptr_t indirect_id, intptr_t try_index)
void RecordUncheckedEntryPoint(GraphEntryInstr *graph_entry, FunctionEntryInstr *unchecked_entry)
LocalVariable * MakeTemporary(const char *suffix=nullptr)
static const Field & MayCloneField(Zone *zone, const Field &field)
Fragment BinaryIntegerOp(Token::Kind op, Representation representation, bool is_truncating=false)
Fragment CheckStackOverflow(TokenPosition position, intptr_t stack_depth, intptr_t loop_depth)
Fragment TestTypeArgsLen(Fragment eq_branch, Fragment neq_branch, intptr_t num_type_args)
Fragment AllocateClosure(TokenPosition position, bool has_instantiator_type_args, bool is_generic, bool is_tear_off)
Fragment StrictCompare(TokenPosition position, Token::Kind kind, bool number_check=false)
Fragment AllocateObject(TokenPosition position, const Class &klass, intptr_t argument_count)
Fragment Constant(const Object &value)
Fragment StoreIndexed(classid_t class_id)
Fragment CheckNullOptimized(const String &name, CheckNullInstr::ExceptionType exception_type, TokenPosition position=TokenPosition::kNoSource)
Fragment SmiBinaryOp(Token::Kind op, bool is_truncating=false)
Fragment DoubleToInteger(MethodRecognizer::Kind recognized_kind)
Fragment BranchIfNull(TargetEntryInstr **then_entry, TargetEntryInstr **otherwise_entry, bool negate=false)
Fragment DropTemporary(LocalVariable **temp)
Fragment CheckStackOverflowInPrologue(TokenPosition position)
Fragment Goto(JoinEntryInstr *destination)
Fragment AllocateContext(const ZoneGrowableArray< const Slot * > &scope)
Fragment BranchIfStrictEqual(TargetEntryInstr **then_entry, TargetEntryInstr **otherwise_entry)
void Prepend(Instruction *start)
Fragment & operator<<=(Instruction *next)
Fragment & operator+=(const Fragment &other)
BlockEntryInstr * CreateSuccessorFor(BaseFlowGraphBuilder *builder, const TestFragment::SuccessorAddressArray &branches)
SuccessorAddressArray * true_successor_addresses
SuccessorAddressArray * false_successor_addresses
BlockEntryInstr * CreateTrueSuccessor(BaseFlowGraphBuilder *builder)
BlockEntryInstr * CreateFalseSuccessor(BaseFlowGraphBuilder *builder)
void ConnectBranchesTo(BaseFlowGraphBuilder *builder, const TestFragment::SuccessorAddressArray &branches, JoinEntryInstr *join)
#define ASSERT(E)
VkInstance instance
Definition: main.cc:48
glong glong end
G_BEGIN_DECLS G_MODULE_EXPORT FlValue * args
uint8_t value
GAsyncResult * result
uint32_t * target
Dart_NativeFunction function
Definition: fuchsia.cc:51
int argument_count
Definition: fuchsia.cc:52
size_t length
Definition: copy.py:1
const Field & LookupConvertUtf8DecoderScanFlagsField()
Definition: runtime_api.cc:271
Fragment operator+(const Fragment &first, const Fragment &second)
static bool SupportsCoverage()
Fragment operator<<(const Fragment &fragment, Instruction *next)
Definition: dart_vm.cc:33
const char *const name
bool IsTypedDataBaseClassId(intptr_t index)
Definition: class_id.h:429
InnerPointerAccess
Definition: il.h:6295
int32_t classid_t
Definition: globals.h:524
StoreBarrierType
Definition: il.h:6301
@ kNoStoreBarrier
Definition: il.h:6301
@ kEmitStoreBarrier
Definition: il.h:6301
Representation
Definition: locations.h:66
static constexpr Representation kUnboxedAddress
Definition: locations.h:182
const char *const function_name
AlignmentType
Definition: il.h:6764
@ kAlignedAccess
Definition: il.h:6766
def call(args)
Definition: dom.py:159
std::function< void()> closure
Definition: closure.h:14
dest
Definition: zip.py:79
#define Pd
Definition: globals.h:408
int compare(const void *untyped_lhs, const void *untyped_rhs)
Definition: skdiff.h:161
static SkString join(const CommandLineFlags::StringArray &)
Definition: skpbench.cpp:741
SeparatedVector2 offset