Flutter Engine
The Flutter Engine
Loading...
Searching...
No Matches
base_flow_graph_builder.cc
Go to the documentation of this file.
1// Copyright (c) 2018, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
6
7#include <utility>
8
9#include "vm/compiler/backend/range_analysis.h" // For Range.
10#include "vm/compiler/frontend/flow_graph_builder.h" // For InlineExitCollector.
12#include "vm/compiler/jit/compiler.h" // For Compiler::IsBackgroundCompilation().
14#include "vm/growable_array.h"
15#include "vm/object_store.h"
16#include "vm/resolver.h"
17
18namespace dart {
19namespace kernel {
20
21#define Z (zone_)
22#define IG (thread_->isolate_group())
23
24static bool SupportsCoverage() {
25#if defined(PRODUCT)
26 return false;
27#else
29#endif
30}
31
34 ASSERT(other.is_valid());
35 if (entry == nullptr) {
36 entry = other.entry;
37 current = other.current;
38 } else if (other.entry != nullptr) {
39 if (current != nullptr) {
40 current->LinkTo(other.entry);
41 }
42 // Although [other.entry] could be unreachable (if this fragment is
43 // closed), there could be a yield continuation point in the middle of
44 // [other] fragment so [other.current] is still reachable.
45 current = other.current;
46 }
47 return *this;
48}
49
52 if (entry == nullptr) {
53 entry = current = next;
54 } else if (current != nullptr) {
56 current = next;
57 }
58 return *this;
59}
60
63 if (entry == nullptr) {
65 } else {
67 entry = start;
68 }
69}
70
72 ASSERT(entry != nullptr);
73 return Fragment(entry, nullptr);
74}
75
76Fragment operator+(const Fragment& first, const Fragment& second) {
77 Fragment result = first;
78 result += second;
79 return result;
80}
81
83 Fragment result = fragment;
84 result <<= next;
85 return result;
86}
87
89 : entry(entry),
90 true_successor_addresses(new SuccessorAddressArray(1)),
91 false_successor_addresses(new SuccessorAddressArray(1)) {
94}
95
97 BaseFlowGraphBuilder* builder,
99 JoinEntryInstr* join) {
100 ASSERT(!branches.is_empty());
101 for (auto branch : branches) {
102 *branch = builder->BuildTargetEntry();
103 (*branch)->Goto(join);
104 }
105}
106
108 BaseFlowGraphBuilder* builder,
109 const TestFragment::SuccessorAddressArray& branches) {
110 ASSERT(!branches.is_empty());
111
112 if (branches.length() == 1) {
113 TargetEntryInstr* target = builder->BuildTargetEntry();
114 *(branches[0]) = target;
115 return target;
116 }
117
118 JoinEntryInstr* join = builder->BuildJoinEntry();
119 ConnectBranchesTo(builder, branches, join);
120 return join;
121}
122
128
134
136 intptr_t delta = context_depth_ - depth;
137 ASSERT(delta >= 0);
139 while (delta-- > 0) {
140 instructions += LoadNativeField(Slot::Context_parent());
141 }
142 return instructions;
143}
144
146 Token::Kind kind,
147 bool number_check /* = false */) {
148 Value* right = Pop();
149 Value* left = Pop();
151 new (Z) StrictCompareInstr(InstructionSource(position), kind, left, right,
152 number_check, GetNextDeoptId());
153 Push(compare);
154 return Fragment(compare);
155}
156
158 bool number_check /* = false */) {
159 Value* right = Pop();
160 Value* left = Pop();
162 InstructionSource(), kind, left, right, number_check, GetNextDeoptId());
163 Push(compare);
164 return Fragment(compare);
165}
166
168 TargetEntryInstr** otherwise_entry,
169 bool negate) {
170 Fragment instructions = Constant(Bool::True());
171 return instructions + BranchIfEqual(then_entry, otherwise_entry, negate);
172}
173
175 TargetEntryInstr** otherwise_entry,
176 bool negate) {
177 Fragment instructions = NullConstant();
178 return instructions + BranchIfEqual(then_entry, otherwise_entry, negate);
179}
180
182 TargetEntryInstr** otherwise_entry,
183 bool negate) {
184 Value* right_value = Pop();
185 Value* left_value = Pop();
187 InstructionSource(), negate ? Token::kNE_STRICT : Token::kEQ_STRICT,
188 left_value, right_value, false, GetNextDeoptId());
189 BranchInstr* branch = new (Z) BranchInstr(compare, GetNextDeoptId());
190 *then_entry = *branch->true_successor_address() = BuildTargetEntry();
191 *otherwise_entry = *branch->false_successor_address() = BuildTargetEntry();
192 return Fragment(branch).closed();
193}
194
196 TargetEntryInstr** then_entry,
197 TargetEntryInstr** otherwise_entry) {
198 Value* rhs = Pop();
199 Value* lhs = Pop();
201 new (Z) StrictCompareInstr(InstructionSource(), Token::kEQ_STRICT, lhs,
202 rhs, false, GetNextDeoptId());
203 BranchInstr* branch = new (Z) BranchInstr(compare, GetNextDeoptId());
204 *then_entry = *branch->true_successor_address() = BuildTargetEntry();
205 *otherwise_entry = *branch->false_successor_address() = BuildTargetEntry();
206 return Fragment(branch).closed();
207}
208
210 Fragment instructions;
211
212 Value* value = Pop();
213 ASSERT(stack_ == nullptr);
215 const Representation representation =
217 DartReturnInstr* return_instr = new (Z) DartReturnInstr(
218 InstructionSource(position), value, GetNextDeoptId(), representation);
219 if (exit_collector_ != nullptr) exit_collector_->AddExit(return_instr);
220
221 instructions <<= return_instr;
222
223 return instructions.closed();
224}
225
227 intptr_t stack_depth,
228 intptr_t loop_depth) {
230 InstructionSource(position), stack_depth, loop_depth, GetNextDeoptId(),
232}
233
235 TokenPosition position) {
236 if (IsInlining()) {
237 // If we are inlining don't actually attach the stack check. We must still
238 // create the stack check in order to allocate a deopt id.
239 CheckStackOverflow(position, 0, 0);
240 return Fragment();
241 }
242 return CheckStackOverflow(position, 0, 0);
243}
244
246 DEBUG_ASSERT(value.IsNotTemporaryScopedHandle());
247 ConstantInstr* constant = new (Z) ConstantInstr(value);
248 Push(constant);
249 return Fragment(constant);
250}
251
253 return Fragment(new (Z) GotoInstr(destination, GetNextDeoptId())).closed();
254}
255
260
262 int64_t value,
263 Representation representation) {
265 auto const constant = new (Z) UnboxedConstantInstr(obj, representation);
266 Push(constant);
267 return Fragment(constant);
268}
269
271 classid_t dest_cid,
272 bool unboxed_inputs,
273 bool can_overlap) {
274 Value* length = Pop();
275 Value* dest_start = Pop();
276 Value* src_start = Pop();
277 Value* dest = Pop();
278 Value* src = Pop();
279 auto copy =
280 new (Z) MemoryCopyInstr(src, src_cid, dest, dest_cid, src_start,
281 dest_start, length, unboxed_inputs, can_overlap);
282 return Fragment(copy);
283}
284
286 Value* arg_desc = Pop();
287 return Fragment(new (Z) TailCallInstr(code, arg_desc)).closed();
288}
289
290void BaseFlowGraphBuilder::InlineBailout(const char* reason) {
291 if (IsInlining()) {
292 parsed_function_->function().set_is_inlinable(false);
293 parsed_function_->Bailout("kernel::BaseFlowGraphBuilder", reason);
294 }
295}
296
299 const ArgumentsDescriptor descriptor(saved_args_desc_array());
300 // Double-check that compile-time Size() matches runtime size on target.
302 function_, descriptor.Count()));
304 }
307}
308
310 Fragment neq_branch,
311 intptr_t num_type_args) {
313
314 // Compile-time arguments descriptor case.
317 return descriptor.TypeArgsLen() == num_type_args ? eq_branch : neq_branch;
318 }
319
320 // Runtime arguments descriptor case.
321 TargetEntryInstr* eq_entry;
322 TargetEntryInstr* neq_entry;
323
325 test += LoadNativeField(Slot::ArgumentsDescriptor_type_args_len());
326 test += IntConstant(num_type_args);
327 test += BranchIfEqual(&eq_entry, &neq_entry);
328
329 eq_branch.Prepend(eq_entry);
330 neq_branch.Prepend(neq_entry);
331
333 eq_branch += Goto(join);
334 neq_branch += Goto(join);
335
336 return Fragment(test.entry, join);
337}
338
340 Fragment present,
341 Fragment absent) {
343
344 TargetEntryInstr* absent_entry;
345 TargetEntryInstr* present_entry;
346
347 test += LoadLocal(closure);
348 test += LoadNativeField(Slot::Closure_delayed_type_arguments());
349 test += Constant(Object::empty_type_arguments());
350 test += BranchIfEqual(&absent_entry, &present_entry);
351
352 present.Prepend(present_entry);
353 absent.Prepend(absent_entry);
354
356 absent += Goto(join);
357 present += Goto(join);
358
359 return Fragment(test.entry, join);
360}
361
363 Fragment absent) {
366
367 JoinEntryInstr* complete = BuildJoinEntry();
368 JoinEntryInstr* present_entry = BuildJoinEntry();
369
371 TestDelayedTypeArgs(closure, Goto(present_entry), absent),
372 Goto(present_entry), 0);
373 test += Goto(complete);
374
375 Fragment(present_entry) + present + Goto(complete);
376
377 return Fragment(test.entry, complete);
378 } else {
379 return TestTypeArgsLen(absent, present, 0);
380 }
381}
382
384 intptr_t index_scale,
385 bool index_unboxed,
386 AlignmentType alignment) {
387 Value* index = Pop();
388 // A C pointer if index_unboxed, otherwise a boxed Dart value.
389 Value* array = Pop();
390
391 // We use C behavior when dereferencing pointers, so we use aligned access in
392 // all cases.
393 LoadIndexedInstr* instr = new (Z)
394 LoadIndexedInstr(array, index, index_unboxed, index_scale, class_id,
395 alignment, DeoptId::kNone, InstructionSource());
396 Push(instr);
397 return Fragment(instr);
398}
399
401 Value* index = Pop();
402 Value* length = Pop();
403 auto* instr = new (Z) GenericCheckBoundInstr(length, index, GetNextDeoptId());
404 Push(instr);
405 return Fragment(instr);
406}
407
409 Value* object = Pop();
410 auto load = new (Z) LoadUntaggedInstr(object, offset);
411 Push(load);
412 return Fragment(load);
413}
414
416 Value* value = Pop();
417 auto converted = new (Z)
419 converted->mark_truncating();
420 Push(converted);
421 return Fragment(converted);
422}
423
425 Value* value = Pop();
426 auto converted = new (Z)
428 converted->mark_truncating();
429 Push(converted);
430 return Fragment(converted);
431}
432
434 Value* offset = Pop();
435 Value* index = Pop();
436 Value* base = Pop();
437 auto adjust =
438 new (Z) CalculateElementAddressInstr(base, index, index_scale, offset);
439 Push(adjust);
440 return Fragment(adjust);
441}
442
449
457
459 bool calls_initializer) {
461 calls_initializer);
462}
463
465 const Slot& native_field,
466 InnerPointerAccess loads_inner_pointer,
467 bool calls_initializer) {
469 Pop(), native_field, loads_inner_pointer, InstructionSource(),
470 calls_initializer, calls_initializer ? GetNextDeoptId() : DeoptId::kNone);
471 Push(load);
472 return Fragment(load);
473}
474
476 bool calls_initializer) {
477 const InnerPointerAccess loads_inner_pointer =
478 native_field.representation() == kUntagged
479 ? (native_field.may_contain_inner_pointer()
483 return LoadNativeField(native_field, loads_inner_pointer, calls_initializer);
484}
485
487 ASSERT(!variable->is_captured());
489 Push(load);
490 return Fragment(load);
491}
492
496
498 intptr_t deopt_id) {
499 return Fragment(new (Z) GuardFieldLengthInstr(Pop(), field, deopt_id));
500}
501
503 intptr_t deopt_id) {
504 return Fragment(new (Z) GuardFieldClassInstr(Pop(), field, deopt_id));
505}
506
508 const Field& field) {
509 if (CompilerState::Current().should_clone_fields() && field.IsOriginal()) {
510 return Field::ZoneHandle(zone, field.CloneFromOriginal());
511 } else {
512 DEBUG_ASSERT(field.IsNotTemporaryScopedHandle());
513 return field;
514 }
515}
516
518 TokenPosition position,
519 const Slot& slot,
520 InnerPointerAccess stores_inner_pointer,
521 StoreFieldInstr::Kind kind /* = StoreFieldInstr::Kind::kOther */,
522 StoreBarrierType emit_store_barrier /* = kEmitStoreBarrier */,
523 compiler::Assembler::MemoryOrder memory_order /* = kRelaxed */) {
524 Value* value = Pop();
525 if (value->BindsToConstant()) {
526 emit_store_barrier = kNoStoreBarrier;
527 }
528 StoreFieldInstr* store = new (Z)
529 StoreFieldInstr(slot, Pop(), value, emit_store_barrier,
530 stores_inner_pointer, InstructionSource(position), kind);
531 return Fragment(store);
532}
533
535 const Field& field,
536 StoreFieldInstr::Kind kind /* = StoreFieldInstr::Kind::kOther */,
537 StoreBarrierType emit_store_barrier) {
538 return StoreNativeField(TokenPosition::kNoSource,
540 kind, emit_store_barrier);
541}
542
544 const Field& field,
545 StoreFieldInstr::Kind kind /* = StoreFieldInstr::Kind::kOther */) {
546 Fragment instructions;
547 const Field& field_clone = MayCloneField(Z, field);
548 if (IG->use_field_guards()) {
549 LocalVariable* store_expression = MakeTemporary();
550
551 // Note: unboxing decision can only change due to hot reload at which
552 // point all code will be cleared, so there is no need to worry about
553 // stability of deopt id numbering.
554 if (!field_clone.is_unboxed()) {
555 instructions += LoadLocal(store_expression);
556 instructions += GuardFieldClass(field_clone, GetNextDeoptId());
557 }
558
559 // Field length guard can be omitted if it is not needed.
560 // However, it is possible that we were tracking list length previously,
561 // and generated length guards in the past. We need to generate same IL
562 // to keep deopt ids stable, but we can discard generated IL fragment
563 // if length guard is not needed.
564 Fragment length_guard;
565 length_guard += LoadLocal(store_expression);
566 length_guard += GuardFieldLength(field_clone, GetNextDeoptId());
567
568 if (field_clone.needs_length_check()) {
569 instructions += length_guard;
570 }
571
572 // If we are tracking exactness of the static type of the field then
573 // emit appropriate guard.
574 if (field_clone.static_type_exactness_state().IsTracking()) {
575 instructions += LoadLocal(store_expression);
576 instructions <<=
577 new (Z) GuardFieldTypeInstr(Pop(), field_clone, GetNextDeoptId());
578 }
579 }
580 instructions +=
581 StoreNativeField(Slot::Get(field_clone, parsed_function_), kind);
582 return instructions;
583}
584
586 bool calls_initializer) {
588 field, InstructionSource(), calls_initializer,
589 calls_initializer ? GetNextDeoptId() : DeoptId::kNone);
590 Push(load);
591 return Fragment(load);
592}
593
595 auto redefinition = new (Z) RedefinitionInstr(Pop());
596 redefinition->set_constrained_type(
599 Push(redefinition);
600 return Fragment(redefinition);
601}
602
604 Fragment instructions;
605 instructions <<= new (Z) ReachabilityFenceInstr(Pop());
606 return instructions;
607}
608
610 Value* table = Pop();
611 Value* end = Pop();
612 Value* start = Pop();
613 Value* bytes = Pop();
614 Value* decoder = Pop();
615 const Field& scan_flags_field =
617 auto scan = new (Z) Utf8ScanInstr(
618 decoder, bytes, start, end, table,
619 Slot::Get(MayCloneField(Z, scan_flags_field), parsed_function_));
620 Push(scan);
621 return Fragment(scan);
622}
623
625 const Field& field) {
626 return Fragment(new (Z) StoreStaticFieldInstr(MayCloneField(Z, field), Pop(),
627 InstructionSource(position)));
628}
629
631 // This fragment builder cannot be used for typed data accesses.
632 ASSERT(!IsTypedDataBaseClassId(class_id));
633 Value* value = Pop();
634 Value* index = Pop();
635 const StoreBarrierType emit_store_barrier =
636 value->BindsToConstant() ? kNoStoreBarrier : kEmitStoreBarrier;
638 Pop(), // Array.
639 index, value, emit_store_barrier, /*index_unboxed=*/false,
640 compiler::target::Instance::ElementSizeFor(class_id), class_id,
642 return Fragment(store);
643}
644
646 intptr_t index_scale,
647 bool index_unboxed,
648 AlignmentType alignment) {
650 Value* value = Pop();
651 Value* index = Pop();
652 Value* c_pointer = Pop();
654 c_pointer, index, value, kNoStoreBarrier, index_unboxed, index_scale,
655 class_id, alignment, DeoptId::kNone, InstructionSource(),
657 return Fragment(instr);
658}
659
661 LocalVariable* variable) {
662 if (variable->is_captured()) {
663 Fragment instructions;
665 instructions += LoadContextAt(variable->owner()->context_level());
666 instructions += LoadLocal(value);
667 instructions += StoreNativeField(
668 position, Slot::GetContextVariableSlotFor(thread_, *variable));
669 return instructions;
670 }
671 return StoreLocalRaw(position, variable);
672}
673
675 LocalVariable* variable) {
676 ASSERT(!variable->is_captured());
677 Value* value = Pop();
679 new (Z) StoreLocalInstr(*variable, value, InstructionSource(position));
680 Fragment instructions(store);
681 Push(store);
682 return instructions;
683}
684
686 static constexpr intptr_t kTemporaryNameLength = 64;
687 char name[kTemporaryNameLength];
688 intptr_t index = stack_->definition()->temp_index();
689 if (suffix != nullptr) {
690 Utils::SNPrint(name, kTemporaryNameLength, ":t_%s", suffix);
691 } else {
692 Utils::SNPrint(name, kTemporaryNameLength, ":t%" Pd, index);
693 }
694 const String& symbol_name =
696 LocalVariable* variable =
697 new (Z) LocalVariable(TokenPosition::kNoSource, TokenPosition::kNoSource,
698 symbol_name, Object::dynamic_type());
699 // Set the index relative to the base of the expression stack including
700 // outgoing arguments.
701 variable->set_index(
703
704 // The value on top of the stack has uses as if it were a local variable.
705 // Mark all definitions on the stack as used so that their temp indices
706 // will not be cleared (causing them to never be materialized in the
707 // expression stack and skew stack depth).
708 for (Value* item = stack_; item != nullptr; item = item->next_use()) {
709 item->definition()->set_ssa_temp_index(0);
710 }
711
712 return variable;
713}
714
716 ASSERT(temp != nullptr && *temp != nullptr && (*temp)->HasIndex());
717 // Check that the temporary matches the current stack definition.
720 -(*temp)->index().value() - parsed_function_->num_stack_locals());
721 *temp = nullptr; // Clear to avoid inadvertent usage after dropping.
722 return Drop();
723}
724
726 definition->set_temp_index(
727 stack_ == nullptr ? 0 : stack_->definition()->temp_index() + 1);
728}
729
731 SetTempIndex(definition);
732 Value::AddToList(new (Z) Value(definition), &stack_);
733}
734
736 Value* head = stack_;
737 for (intptr_t i = 0; i < depth; ++i) {
738 ASSERT(head != nullptr);
739 head = head->next_use();
740 }
741 ASSERT(head != nullptr);
742 return head->definition();
743}
744
746 ASSERT(stack_ != nullptr);
747 Value* value = stack_;
748 stack_ = value->next_use();
749 if (stack_ != nullptr) stack_->set_previous_use(nullptr);
750
751 value->set_next_use(nullptr);
752 value->set_previous_use(nullptr);
753 value->definition()->ClearSSATempIndex();
754 return value;
755}
756
758 ASSERT(stack_ != nullptr);
759 Fragment instructions;
760 Definition* definition = stack_->definition();
761 // The SSA renaming implementation doesn't like [LoadLocal]s without a
762 // tempindex.
763 if (definition->HasSSATemp() || definition->IsLoadLocal()) {
764 instructions <<= new (Z) DropTempsInstr(1, nullptr);
765 } else {
766 definition->ClearTempIndex();
767 }
768
769 Pop();
770 return instructions;
771}
772
774 intptr_t num_temps_to_drop) {
775 Value* top = Pop();
776
777 for (intptr_t i = 0; i < num_temps_to_drop; ++i) {
778 Pop();
779 }
780
781 DropTempsInstr* drop_temps = new (Z) DropTempsInstr(num_temps_to_drop, top);
782 Push(drop_temps);
783
784 return Fragment(drop_temps);
785}
786
788 MakeTempInstr* make_temp = new (Z) MakeTempInstr(Z);
789 Push(make_temp);
790 return Fragment(make_temp);
791}
792
797
803
805 return new (Z) JoinEntryInstr(AllocateBlockId(), try_index, GetNextDeoptId(),
806 GetStackDepth());
807}
808
813
815 intptr_t indirect_id,
816 intptr_t try_index) {
817 return new (Z) IndirectEntryInstr(AllocateBlockId(), indirect_id, try_index,
819}
820
822 InputsArray arguments(Z, count);
823 arguments.SetLength(count);
824 for (intptr_t i = count - 1; i >= 0; --i) {
825 arguments[i] = Pop();
826 }
827 return arguments;
828}
829
831 Value* right = Pop();
832 Value* left = Pop();
834 InstructionSource(), kind, left, right, kSmiCid, GetNextDeoptId());
835 Push(instr);
836 return Fragment(instr);
837}
838
840 bool is_truncating) {
841 return BinaryIntegerOp(kind, kTagged, is_truncating);
842}
843
845 Representation representation,
846 bool is_truncating) {
847 ASSERT(representation == kUnboxedInt32 || representation == kUnboxedUint32 ||
848 representation == kUnboxedInt64 || representation == kTagged);
849 Value* right = Pop();
850 Value* left = Pop();
852 representation, kind, left, right, GetNextDeoptId());
853 ASSERT(instr != nullptr);
854 if (is_truncating) {
855 instr->mark_truncating();
856 }
857 Push(instr);
858 return Fragment(instr);
859}
860
862 intptr_t offset,
863 CompileType result_type,
864 Representation representation) {
865 LoadIndexedUnsafeInstr* instr = new (Z)
866 LoadIndexedUnsafeInstr(Pop(), offset, result_type, representation);
867 Push(instr);
868 return Fragment(instr);
869}
870
872 Value* value = Pop();
873 Value* index = Pop();
875 new (Z) StoreIndexedUnsafeInstr(index, value, offset);
876 return Fragment(instr);
877}
878
881
882 Fragment failing(nsm);
883 const Code& nsm_handler = Code::ZoneHandle(
884 Z, IG->object_store()->call_closure_no_such_method_stub());
885 failing += LoadArgDescriptor();
886 failing += TailCall(nsm_handler);
887
888 return nsm;
889}
890
892 Fragment instructions;
893 Value* exception = Pop();
894 instructions += Fragment(new (Z) ThrowInstr(InstructionSource(position),
895 GetNextDeoptId(), exception))
896 .closed();
897 // Use its side effect of leaving a constant on the stack (does not change
898 // the graph).
899 NullConstant();
900
901 return instructions;
902}
903
905 Value* value = Pop();
906 AssertBooleanInstr* instr = new (Z)
908 Push(instr);
909 return Fragment(instr);
910}
911
913 BooleanNegateInstr* negate = new (Z) BooleanNegateInstr(Pop());
914 Push(negate);
915 return Fragment(negate);
916}
917
919 const ZoneGrowableArray<const Slot*>& context_slots) {
921 InstructionSource(), context_slots, GetNextDeoptId());
922 Push(allocate);
923 return Fragment(allocate);
924}
925
927 bool has_instantiator_type_args,
928 bool is_generic,
929 bool is_tear_off) {
930 Value* instantiator_type_args =
931 (has_instantiator_type_args ? Pop() : nullptr);
932 auto const context = Pop();
933 auto const function = Pop();
934 auto* allocate = new (Z) AllocateClosureInstr(
935 InstructionSource(position), function, context, instantiator_type_args,
936 is_generic, is_tear_off, GetNextDeoptId());
937 Push(allocate);
938 return Fragment(allocate);
939}
940
942 Value* element_count = Pop();
943 CreateArrayInstr* array =
945 Pop(), // Element type.
946 element_count, GetNextDeoptId());
947 Push(array);
948 return Fragment(array);
949}
950
952 RecordShape shape) {
953 AllocateRecordInstr* allocate = new (Z)
955 Push(allocate);
956 return Fragment(allocate);
957}
958
960 RecordShape shape) {
961 const intptr_t num_fields = shape.num_fields();
962 ASSERT(num_fields == 2 || num_fields == 3);
963 Value* value2 = (num_fields > 2) ? Pop() : nullptr;
964 Value* value1 = Pop();
965 Value* value0 = Pop();
966 AllocateSmallRecordInstr* allocate = new (Z)
967 AllocateSmallRecordInstr(InstructionSource(position), shape, value0,
968 value1, value2, GetNextDeoptId());
969 Push(allocate);
970 return Fragment(allocate);
971}
972
974 classid_t class_id) {
975 Value* num_elements = Pop();
976 auto* instr = new (Z) AllocateTypedDataInstr(
977 InstructionSource(position), class_id, num_elements, GetNextDeoptId());
978 Push(instr);
979 return Fragment(instr);
980}
981
983 Value* function_type_args = Pop();
984 Value* instantiator_type_args = Pop();
985 InstantiateTypeInstr* instr = new (Z)
986 InstantiateTypeInstr(InstructionSource(), type, instantiator_type_args,
987 function_type_args, GetNextDeoptId());
988 Push(instr);
989 return Fragment(instr);
990}
991
993 const TypeArguments& type_arguments_value) {
994 Fragment instructions;
995 instructions += Constant(type_arguments_value);
996
997 Value* type_arguments = Pop();
998 Value* function_type_args = Pop();
999 Value* instantiator_type_args = Pop();
1000 const Class& instantiator_class = Class::ZoneHandle(Z, function_.Owner());
1002 InstructionSource(), instantiator_type_args, function_type_args,
1003 type_arguments, instantiator_class, function_, GetNextDeoptId());
1004 Push(instr);
1005 instructions += Fragment(instr);
1006 return instructions;
1007}
1008
1010 Value* type_arguments = Pop();
1011 Value* function_type_args = Pop();
1012 Value* instantiator_type_args = Pop();
1013 const Function& function = Object::null_function();
1014 const Class& instantiator_class = Class::ZoneHandle(Z);
1016 InstructionSource(), instantiator_type_args, function_type_args,
1017 type_arguments, instantiator_class, function, GetNextDeoptId());
1018 Push(instr);
1019 return Fragment(instr);
1020}
1021
1027
1029 const Class& klass,
1030 intptr_t argument_count) {
1031 ASSERT((argument_count == 0) || (argument_count == 1));
1032 Value* type_arguments = (argument_count > 0) ? Pop() : nullptr;
1033 AllocateObjectInstr* allocate = new (Z) AllocateObjectInstr(
1034 InstructionSource(position), klass, GetNextDeoptId(), type_arguments);
1035 Push(allocate);
1036 return Fragment(allocate);
1037}
1038
1040 Fragment instructions;
1041 if (from == kUnboxedFloat) {
1042 instructions += FloatToDouble();
1043 from = kUnboxedDouble;
1044 }
1045 BoxInstr* box = BoxInstr::Create(from, Pop());
1046 instructions <<= box;
1047 Push(box);
1048 return instructions;
1049}
1050
1052#ifdef PRODUCT
1053 return Fragment();
1054#else
1055 return Fragment(new (Z) DebugStepCheckInstr(
1056 InstructionSource(position), UntaggedPcDescriptors::kRuntimeCall,
1057 GetNextDeoptId()));
1058#endif
1059}
1060
1062 LocalVariable* receiver,
1063 const String& function_name) {
1064 Fragment instructions = LoadLocal(receiver);
1065
1066 CheckNullInstr* check_null = new (Z) CheckNullInstr(
1070
1071 // Does not use the redefinition, no `Push(check_null)`.
1072 instructions <<= check_null;
1073
1074 return instructions;
1075}
1076
1078 const String& function_name,
1079 CheckNullInstr::ExceptionType exception_type,
1080 TokenPosition position) {
1081 Value* value = Pop();
1082 CheckNullInstr* check_null =
1084 InstructionSource(position), exception_type);
1085 Push(check_null); // Use the redefinition.
1086 return Fragment(check_null);
1087}
1088
1091 Value* value = Pop();
1092 auto* check_writable = new (Z)
1094 return Fragment(check_writable);
1095}
1096
1098 GraphEntryInstr* graph_entry,
1099 FunctionEntryInstr* unchecked_entry) {
1100 // Closures always check all arguments on their checked entry-point, most
1101 // call-sites are unchecked, and they're inlined less often, so it's very
1102 // beneficial to build multiple entry-points for them. Regular methods however
1103 // have fewer checks to begin with since they have dynamic invocation
1104 // forwarders, so in AOT we implement a more conservative time-space tradeoff
1105 // by only building the unchecked entry-point when inlining. We should
1106 // reconsider this heuristic if we identify non-inlined type-checks in
1107 // hotspots of new benchmarks.
1110 graph_entry->set_unchecked_entry(unchecked_entry);
1111 } else if (InliningUncheckedEntry()) {
1112 graph_entry->set_normal_entry(unchecked_entry);
1113 }
1114}
1115
1117 if (!FLAG_enable_testing_pragmas) return Drop();
1118
1120
1121 if (function.IsImplicitClosureFunction()) {
1122 const auto& parent = Function::Handle(Z, function.parent_function());
1123 const auto& func_name = String::Handle(Z, parent.name());
1124 const auto& owner = Class::Handle(Z, parent.Owner());
1125 if (owner.EnsureIsFinalized(thread_) == Error::null()) {
1126 function = Resolver::ResolveFunction(Z, owner, func_name);
1127 }
1128 }
1129
1131 if (!Library::FindPragma(thread_, /*only_core=*/false, function,
1132 Symbols::vm_trace_entrypoints(), /*multiple=*/false,
1133 &options) ||
1134 options.IsNull() || !options.IsClosure()) {
1135 return Drop();
1136 }
1137 auto& closure = Closure::ZoneHandle(Z, Closure::Cast(options).ptr());
1138 LocalVariable* entry_point_num = MakeTemporary("entry_point_num");
1139
1141 Z, String::New(function.ToLibNamePrefixedQualifiedCString(), Heap::kOld));
1145 Heap::kOld);
1146 }
1147 if (!function_name.IsCanonical()) {
1149 }
1150
1151 Fragment call_hook;
1152 call_hook += Constant(closure);
1153 call_hook += Constant(function_name);
1154 call_hook += LoadLocal(entry_point_num);
1155 if (FLAG_precompiled_mode) {
1156 call_hook += Constant(closure);
1157 } else {
1158 call_hook += Constant(Function::ZoneHandle(Z, closure.function()));
1159 }
1160 call_hook += ClosureCall(Function::null_function(), TokenPosition::kNoSource,
1161 /*type_args_len=*/0, /*argument_count=*/3,
1162 /*argument_names=*/Array::ZoneHandle(Z));
1163 call_hook += Drop(); // result of closure call
1164 call_hook += DropTemporary(&entry_point_num); // entrypoint number
1165 return call_hook;
1166}
1167
1169 const Function& target_function,
1170 TokenPosition position,
1171 intptr_t type_args_len,
1172 intptr_t argument_count,
1173 const Array& argument_names,
1174 const InferredTypeMetadata* result_type) {
1175 Fragment instructions = RecordCoverage(position);
1176 const intptr_t total_count =
1177 (type_args_len > 0 ? 1 : 0) + argument_count +
1178 /*closure (bare instructions) or function (otherwise)*/ 1;
1179 InputsArray arguments = GetArguments(total_count);
1180 ClosureCallInstr* call = new (Z) ClosureCallInstr(
1181 target_function, std::move(arguments), type_args_len, argument_names,
1182 InstructionSource(position), GetNextDeoptId());
1183 Push(call);
1184 instructions <<= call;
1185 if (result_type != nullptr && result_type->IsConstant()) {
1186 instructions += Drop();
1187 instructions += Constant(result_type->constant_value);
1188 }
1189 return instructions;
1190}
1191
1194 for (intptr_t i = 0, n = context_level_array_->length(); i < n; i += 2) {
1195 if (context_level_array_->At(i) == deopt_id) {
1196 (*context_level_array_)[i + 1] = context_depth_;
1197 return;
1198 }
1199 ASSERT(context_level_array_->At(i) < deopt_id);
1200 }
1201 }
1202}
1203
1205 TokenPosition position,
1206 const String& dst_name,
1208 Value* function_type_args = Pop();
1209 Value* instantiator_type_args = Pop();
1210 Value* dst_type = Pop();
1211 Value* value = Pop();
1212
1214 InstructionSource(position), value, dst_type, instantiator_type_args,
1215 function_type_args, dst_name, GetNextDeoptId(), kind);
1216 Push(instr);
1217
1218 return Fragment(instr);
1219}
1220
1222 Fragment instructions;
1223 const intptr_t parameter_count = parsed_function_->function().NumParameters();
1224 for (intptr_t i = 0; i < parameter_count; ++i) {
1226 const Object* param_value = raw_parameter->inferred_arg_value();
1227 if (param_value != nullptr) {
1228 instructions += Constant(*param_value);
1229 instructions += StoreLocalRaw(TokenPosition::kNoSource, raw_parameter);
1230 instructions += Drop();
1231 }
1232 }
1233 return instructions;
1234}
1235
1237 MethodRecognizer::Kind recognized_kind,
1238 intptr_t num_inputs) {
1239 InputsArray args = GetArguments(num_inputs);
1240 auto* instr = new (Z) InvokeMathCFunctionInstr(
1241 std::move(args), GetNextDeoptId(), recognized_kind,
1242 InstructionSource(TokenPosition::kNoSource));
1243 Push(instr);
1244 return Fragment(instr);
1245}
1246
1248 MethodRecognizer::Kind recognized_kind) {
1249 Value* value = Pop();
1250 auto* instr =
1251 new (Z) DoubleToIntegerInstr(value, recognized_kind, GetNextDeoptId());
1252 Push(instr);
1253 return Fragment(instr);
1254}
1255
1257 Value* value = Pop();
1258 auto* instr = new (Z) UnaryDoubleOpInstr(op, value, GetNextDeoptId(),
1260 Push(instr);
1261 return Fragment(instr);
1262}
1263
1265 return RecordCoverageImpl(position, false /** is_branch_coverage **/);
1266}
1267
1269 return RecordCoverageImpl(position, true /** is_branch_coverage **/);
1270}
1271
1273 bool is_branch_coverage) {
1274 Fragment instructions;
1275 if (!SupportsCoverage()) return instructions;
1276 if (!position.IsReal()) return instructions;
1277 if (is_branch_coverage && !IG->branch_coverage()) return instructions;
1278
1279 const intptr_t coverage_index =
1280 GetCoverageIndexFor(position.EncodeCoveragePosition(is_branch_coverage));
1281 instructions <<= new (Z) RecordCoverageInstr(coverage_array(), coverage_index,
1282 InstructionSource(position));
1283 return instructions;
1284}
1285
1286intptr_t BaseFlowGraphBuilder::GetCoverageIndexFor(intptr_t encoded_position) {
1287 if (coverage_array_.IsNull()) {
1288 // We have not yet created coverage_array, this is the first time we are
1289 // building the graph for this function. Collect coverage positions.
1290 for (intptr_t i = 0; i < coverage_array_positions_.length(); i++) {
1291 if (coverage_array_positions_.At(i) == encoded_position) {
1292 return 2 * i + 1;
1293 }
1294 }
1295 const auto index = 2 * coverage_array_positions_.length() + 1;
1296 coverage_array_positions_.Add(encoded_position);
1297 return index;
1298 }
1299
1300 for (intptr_t i = 0; i < coverage_array_.Length(); i += 2) {
1301 if (Smi::Value(static_cast<SmiPtr>(coverage_array_.At(i))) ==
1302 encoded_position) {
1303 return i + 1;
1304 }
1305 }
1306 // Reaching here indicates that the graph is constructed in an unstable way.
1307 UNREACHABLE();
1308 return 1;
1309}
1310
1312 if (!coverage_array_.IsNull()) {
1313 return;
1314 }
1315
1317 coverage_array_ = Array::empty_array().ptr();
1318 return;
1319 }
1320
1323
1324 Smi& value = Smi::Handle();
1325 for (intptr_t i = 0; i < coverage_array_positions_.length(); i++) {
1327 coverage_array_.SetAt(2 * i, value);
1328 value = Smi::New(0); // no coverage recorded.
1329 coverage_array_.SetAt(2 * i + 1, value);
1330 }
1331}
1332
1333} // namespace kernel
1334} // namespace dart
static bool compare(const SkBitmap &ref, const SkIRect &iref, const SkBitmap &test, const SkIRect &itest)
Definition BlurTest.cpp:100
const char * options
int count
static float next(float f)
static bool left(const SkPoint &p0, const SkPoint &p1)
static bool right(const SkPoint &p0, const SkPoint &p1)
SI void store(P *ptr, const T &val)
SI T load(const P *ptr)
SI F table(const skcms_Curve *curve, F v)
#define IG
#define UNREACHABLE()
Definition assert.h:248
#define DEBUG_ASSERT(cond)
Definition assert.h:321
#define ASSERT_EQUAL(expected, actual)
Definition assert.h:309
#define Z
intptr_t Count() const
intptr_t Size() const
intptr_t TypeArgsLen() const
static ArrayPtr New(intptr_t len, Heap::Space space=Heap::kNew)
Definition object.h:10933
ObjectPtr At(intptr_t index) const
Definition object.h:10854
intptr_t Length() const
Definition object.h:10808
void SetAt(intptr_t index, const Object &value) const
Definition object.h:10858
void Add(const T &value)
const T & At(intptr_t index) const
void SetLength(intptr_t new_length)
intptr_t length() const
static BinaryIntegerOpInstr * Make(Representation representation, Token::Kind op_kind, Value *left, Value *right, intptr_t deopt_id, SpeculativeMode speculative_mode=kGuardInputs)
Definition il.cc:2284
static const Bool & True()
Definition object.h:10776
static BoxInstr * Create(Representation from, Value *value)
Definition il.cc:4009
TargetEntryInstr ** false_successor_address()
Definition il.h:4033
TargetEntryInstr ** true_successor_address()
Definition il.h:4032
static constexpr bool kCannotBeSentinel
static constexpr bool kCanBeNull
static CompileType FromAbstractType(const AbstractType &type, bool can_be_null, bool can_be_sentinel)
static CompilerState & Current()
void set_temp_index(intptr_t index)
Definition il.h:2481
bool HasSSATemp() const
Definition il.h:2490
intptr_t temp_index() const
Definition il.h:2480
void ClearTempIndex()
Definition il.h:2482
static constexpr intptr_t kNone
Definition deopt_id.h:27
bool is_unboxed() const
Definition object.h:4685
bool IsOriginal() const
Definition object.h:4396
FieldPtr CloneFromOriginal() const
Definition object.cc:11786
bool needs_length_check() const
Definition object.h:4670
StaticTypeExactnessState static_type_exactness_state() const
Definition object.h:4606
static Representation ReturnRepresentationOf(const Function &function)
static intptr_t ComputeArgumentsSizeInWords(const Function &function, intptr_t arguments_count)
bool IsImplicitClosureFunction() const
Definition object.h:3883
bool IsClosureFunction() const
Definition object.h:3871
ClassPtr Owner() const
Definition object.cc:10899
intptr_t NumParameters() const
Definition object.cc:8935
void set_unchecked_entry(FunctionEntryInstr *target)
Definition il.h:1989
void set_normal_entry(FunctionEntryInstr *entry)
Definition il.h:1988
@ kNew
Definition heap.h:38
@ kOld
Definition heap.h:39
void AddExit(DartReturnInstr *exit)
void LinkTo(Instruction *next)
Definition il.h:1102
@ kNotSpeculative
Definition il.h:969
static IntegerPtr New(const String &str, Heap::Space space=Heap::kNew)
Definition object.cc:23063
static IntegerPtr NewCanonical(const String &str)
Definition object.cc:23078
static bool FindPragma(Thread *T, bool only_core, const Object &object, const String &pragma_name, bool multiple=false, Object *options=nullptr)
Definition object.cc:4201
int context_level() const
Definition scopes.h:333
LocalScope * owner() const
Definition scopes.h:122
bool HasIndex() const
Definition scopes.h:201
bool is_captured() const
Definition scopes.h:143
void set_index(VariableIndex index)
Definition scopes.h:208
const Object * inferred_arg_value() const
Definition scopes.h:138
static ObjectPtr null()
Definition object.h:433
ObjectPtr ptr() const
Definition object.h:332
bool IsNull() const
Definition object.h:363
static Object & Handle()
Definition object.h:407
static Object & ZoneHandle()
Definition object.h:419
void Bailout(const char *origin, const char *reason) const
Definition parser.cc:118
const Function & function() const
Definition parser.h:73
int num_stack_locals() const
Definition parser.h:194
LocalVariable * arg_desc_var() const
Definition parser.h:131
bool has_arg_desc_var() const
Definition parser.h:130
LocalVariable * ParameterVariable(intptr_t i) const
Definition parser.h:239
LocalVariable * current_context_var() const
Definition parser.h:128
LocalVariable * RawParameterVariable(intptr_t i) const
Definition parser.h:235
intptr_t num_fields() const
Definition object.h:11288
void set_constrained_type(CompileType *type)
Definition il.h:4098
static FunctionPtr ResolveFunction(Zone *zone, const Class &receiver_class, const String &function_name)
Definition resolver.cc:180
static const Slot & GetContextVariableSlotFor(Thread *thread, const LocalVariable &var)
Definition slot.cc:292
static const Slot & Get(const Field &field, const ParsedFunction *parsed_function)
Definition slot.cc:351
Representation representation() const
Definition slot.h:519
bool may_contain_inner_pointer() const
Definition slot.h:533
static SmiPtr New(intptr_t value)
Definition object.h:9985
intptr_t Value() const
Definition object.h:9969
static StringPtr New(const char *cstr, Heap::Space space=Heap::kNew)
Definition object.cc:23777
static StringPtr Concat(const String &str1, const String &str2, Heap::Space space=Heap::kNew)
Definition object.cc:24116
static StringPtr New(Thread *thread, const char *cstr)
Definition symbols.h:722
intptr_t EncodeCoveragePosition(bool is_branch_coverage)
static int SNPrint(char *str, size_t size, const char *format,...) PRINTF_ATTRIBUTE(3
void set_previous_use(Value *previous)
Definition il.h:112
static void AddToList(Value *value, Value **list)
Definition il.cc:1437
Value * next_use() const
Definition il.h:114
Definition * definition() const
Definition il.h:103
Fragment TestDelayedTypeArgs(LocalVariable *closure, Fragment present, Fragment absent)
Fragment LoadLocal(LocalVariable *variable)
Fragment StoreNativeField(TokenPosition position, const Slot &slot, InnerPointerAccess stores_inner_pointer, StoreFieldInstr::Kind kind=StoreFieldInstr::Kind::kOther, StoreBarrierType emit_store_barrier=kEmitStoreBarrier, compiler::Assembler::MemoryOrder memory_order=compiler::Assembler::kRelaxedNonAtomic)
Fragment ThrowException(TokenPosition position)
Fragment RecordCoverageImpl(TokenPosition position, bool is_branch_coverage)
void SetTempIndex(Definition *definition)
Fragment TestAnyTypeArgs(Fragment present, Fragment absent)
Fragment DebugStepCheck(TokenPosition position)
ZoneGrowableArray< intptr_t > * context_level_array_
Fragment CalculateElementAddress(intptr_t index_scale)
Fragment LoadFpRelativeSlot(intptr_t offset, CompileType result_type, Representation representation=kTagged)
Fragment InvokeMathCFunction(MethodRecognizer::Kind recognized_kind, intptr_t num_inputs)
void reset_context_depth_for_deopt_id(intptr_t deopt_id)
Fragment StoreField(const Field &field, StoreFieldInstr::Kind kind=StoreFieldInstr::Kind::kOther, StoreBarrierType emit_store_barrier=kEmitStoreBarrier)
Fragment GuardFieldLength(const Field &field, intptr_t deopt_id)
Fragment CheckNotDeeplyImmutable(CheckWritableInstr::Kind kind)
Fragment StoreLocalRaw(TokenPosition position, LocalVariable *variable)
Fragment AllocateTypedData(TokenPosition position, classid_t class_id)
Fragment AllocateRecord(TokenPosition position, RecordShape shape)
Fragment MemoryCopy(classid_t src_cid, classid_t dest_cid, bool unboxed_inputs, bool can_overlap=true)
Fragment InstantiateTypeArguments(const TypeArguments &type_arguments)
Fragment CheckNull(TokenPosition position, LocalVariable *receiver, const String &function_name)
Fragment StoreStaticField(TokenPosition position, const Field &field)
Fragment AllocateSmallRecord(TokenPosition position, RecordShape shape)
Fragment InstantiateType(const AbstractType &type)
Fragment StoreIndexedTypedData(classid_t class_id, intptr_t index_scale, bool index_unboxed, AlignmentType alignment=kAlignedAccess)
Fragment RecordBranchCoverage(TokenPosition position)
Fragment AssertBool(TokenPosition position)
Fragment AssertAssignable(TokenPosition position, const String &dst_name, AssertAssignableInstr::Kind kind=AssertAssignableInstr::kUnknown)
Fragment StoreLocal(LocalVariable *variable)
Fragment LoadField(const Field &field, bool calls_initializer)
Fragment DropTempsPreserveTop(intptr_t num_temps_to_drop)
Fragment ClosureCall(const Function &target_function, TokenPosition position, intptr_t type_args_len, intptr_t argument_count, const Array &argument_names, const InferredTypeMetadata *result_type=nullptr)
FunctionEntryInstr * BuildFunctionEntry(GraphEntryInstr *graph_entry)
Fragment GuardFieldClass(const Field &field, intptr_t deopt_id)
Fragment LoadNativeField(const Slot &native_field, InnerPointerAccess loads_inner_pointer, bool calls_initializer=false)
Fragment StoreFieldGuarded(const Field &field, StoreFieldInstr::Kind kind=StoreFieldInstr::Kind::kOther)
Fragment LoadStaticField(const Field &field, bool calls_initializer)
Fragment BranchIfTrue(TargetEntryInstr **then_entry, TargetEntryInstr **otherwise_entry, bool negate=false)
intptr_t GetCoverageIndexFor(intptr_t encoded_position)
Fragment BranchIfEqual(TargetEntryInstr **then_entry, TargetEntryInstr **otherwise_entry, bool negate=false)
Fragment UnboxedIntConstant(int64_t value, Representation representation)
Fragment RedefinitionWithType(const AbstractType &type)
Fragment LoadIndexed(classid_t class_id, intptr_t index_scale=compiler::target::kWordSize, bool index_unboxed=false, AlignmentType alignment=kAlignedAccess)
Fragment RecordCoverage(TokenPosition position)
Fragment Return(TokenPosition position)
IndirectEntryInstr * BuildIndirectEntry(intptr_t indirect_id, intptr_t try_index)
void RecordUncheckedEntryPoint(GraphEntryInstr *graph_entry, FunctionEntryInstr *unchecked_entry)
LocalVariable * MakeTemporary(const char *suffix=nullptr)
static const Field & MayCloneField(Zone *zone, const Field &field)
Fragment BinaryIntegerOp(Token::Kind op, Representation representation, bool is_truncating=false)
Fragment CheckStackOverflow(TokenPosition position, intptr_t stack_depth, intptr_t loop_depth)
Fragment TestTypeArgsLen(Fragment eq_branch, Fragment neq_branch, intptr_t num_type_args)
Fragment AllocateClosure(TokenPosition position, bool has_instantiator_type_args, bool is_generic, bool is_tear_off)
Fragment StrictCompare(TokenPosition position, Token::Kind kind, bool number_check=false)
Fragment AllocateObject(TokenPosition position, const Class &klass, intptr_t argument_count)
Fragment StoreIndexed(classid_t class_id)
Fragment CheckNullOptimized(const String &name, CheckNullInstr::ExceptionType exception_type, TokenPosition position=TokenPosition::kNoSource)
Fragment SmiBinaryOp(Token::Kind op, bool is_truncating=false)
GrowableArray< intptr_t > coverage_array_positions_
Fragment DoubleToInteger(MethodRecognizer::Kind recognized_kind)
Fragment BranchIfNull(TargetEntryInstr **then_entry, TargetEntryInstr **otherwise_entry, bool negate=false)
Fragment DropTemporary(LocalVariable **temp)
Fragment CheckStackOverflowInPrologue(TokenPosition position)
Fragment Goto(JoinEntryInstr *destination)
Fragment AllocateContext(const ZoneGrowableArray< const Slot * > &scope)
Fragment BranchIfStrictEqual(TargetEntryInstr **then_entry, TargetEntryInstr **otherwise_entry)
Fragment & operator+=(const Fragment &other)
void Prepend(Instruction *start)
Fragment & operator<<=(Instruction *next)
BlockEntryInstr * CreateSuccessorFor(BaseFlowGraphBuilder *builder, const TestFragment::SuccessorAddressArray &branches)
SuccessorAddressArray * true_successor_addresses
SuccessorAddressArray * false_successor_addresses
BlockEntryInstr * CreateTrueSuccessor(BaseFlowGraphBuilder *builder)
BlockEntryInstr * CreateFalseSuccessor(BaseFlowGraphBuilder *builder)
void ConnectBranchesTo(BaseFlowGraphBuilder *builder, const TestFragment::SuccessorAddressArray &branches, JoinEntryInstr *join)
#define ASSERT(E)
glong glong end
G_BEGIN_DECLS G_MODULE_EXPORT FlValue * args
uint8_t value
GAsyncResult * result
uint32_t * target
Dart_NativeFunction function
Definition fuchsia.cc:51
int argument_count
Definition fuchsia.cc:52
size_t length
Definition copy.py:1
const Field & LookupConvertUtf8DecoderScanFlagsField()
Fragment operator+(const Fragment &first, const Fragment &second)
static bool SupportsCoverage()
Fragment operator<<(const Fragment &fragment, Instruction *next)
const char *const name
bool IsTypedDataBaseClassId(intptr_t index)
Definition class_id.h:429
InnerPointerAccess
Definition il.h:6246
int32_t classid_t
Definition globals.h:524
StoreBarrierType
Definition il.h:6252
@ kNoStoreBarrier
Definition il.h:6252
@ kEmitStoreBarrier
Definition il.h:6252
Representation
Definition locations.h:66
static constexpr Representation kUnboxedAddress
Definition locations.h:182
const char *const function_name
AlignmentType
Definition il.h:6720
@ kAlignedAccess
Definition il.h:6722
#define Pd
Definition globals.h:408
Point offset