65 {
66#define DECLARE_REPRESENTATION(name, __, ___, ____) k##name,
68#undef DECLARE_REPRESENTATION
70};
71
73
75 switch (rep) {
76 case kPairOfTagged:
77 return 2;
78 case kUnboxedInt64:
79 return compiler::target::kWordSize == 8 ? 1 : 2;
80 default:
81 return 1;
82 }
83}
84
85struct RepresentationUtils : AllStatic {
86#define REP_IN_SET_CLAUSE(name, __, ___, ____) \
87 case k##name: \
88 return true;
89
90
91 static constexpr bool IsUnboxedInteger(Representation rep) {
92 switch (rep) {
94 default:
95 return false;
96 }
97 }
98
99
100 static constexpr bool IsUnboxed(Representation rep) {
101 switch (rep) {
103 default:
104 return false;
105 }
106 }
107
108#undef REP_IN_SET_CLAUSE
109
110
111 static constexpr size_t ValueSize(Representation rep) {
112 switch (rep) {
113#define REP_SIZEOF_CLAUSE(name, __, ___, type) \
114 case k##name: \
115 return sizeof(type);
117#undef REP_SIZEOF_CLAUSE
118 default:
120 return compiler::target::kWordSize;
121 }
122 }
123
124
125 static bool IsUnsignedInteger(Representation rep) {
126 switch (rep) {
127#define REP_IS_UNSIGNED_CLAUSE(name, __, unsigned, ___) \
128 case k##name: \
129 return unsigned;
131#undef REP_IS_UNSIGNED_CLAUSE
132 default:
133 return false;
134 }
135 }
136
137
138
139 static compiler::OperandSize
OperandSize(Representation rep);
140
141
142
143 static int64_t MinValue(Representation rep);
144
145
146
147 static int64_t MaxValue(Representation rep);
148
149
150
151 static bool IsRepresentable(Representation rep, int64_t value);
152
153
154
155 static Representation RepresentationOfArrayElement(classid_t cid);
156
157
158
159 static const char* ToCString(Representation rep);
160};
161
162
164 compiler::target::kWordSize == 4 ? kUnboxedInt32 : kUnboxedInt64;
165
166
167
168
169
171 compiler::target::kWordSize == 4 ? kUnboxedUint32 : kUnboxedInt64;
172
173
174
176
177
178
179
180
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197class Location :
public ValueObject {
198 private:
199 enum {
200
201 kKindBitsPos = 0,
202 kKindBitsSize = 5,
203
204 kPayloadBitsPos = kKindBitsPos + kKindBitsSize,
206 };
207
208 static constexpr uword kInvalidLocation = 0;
209 static constexpr uword kLocationTagMask = 0x3;
210
211 public:
212
213
214
215
216
217
218 enum Kind : intptr_t {
219
221
222
223 kConstantTag = 1,
224
225
226 kPairLocationTag = 2,
227
228
229
230
231
232 kUnallocated = 1 << 2,
233
234
235
236 kStackSlot = 2 << 2,
237 kDoubleStackSlot = 3 << 2,
238 kQuadStackSlot = 4 << 2,
239
240
241
242 kRegister = 5 << 2,
243
244
245
246 kFpuRegister = 6 << 2,
247 };
248
249 Location() : value_(kInvalidLocation) {
250
251
254
255 COMPILE_ASSERT((kUnallocated & kLocationTagMask) != kConstantTag);
256 COMPILE_ASSERT((kUnallocated & kLocationTagMask) != kPairLocationTag);
257
259 COMPILE_ASSERT((kStackSlot & kLocationTagMask) != kPairLocationTag);
260
261 COMPILE_ASSERT((kDoubleStackSlot & kLocationTagMask) != kConstantTag);
262 COMPILE_ASSERT((kDoubleStackSlot & kLocationTagMask) != kPairLocationTag);
263
264 COMPILE_ASSERT((kQuadStackSlot & kLocationTagMask) != kConstantTag);
265 COMPILE_ASSERT((kQuadStackSlot & kLocationTagMask) != kPairLocationTag);
266
268 COMPILE_ASSERT((kRegister & kLocationTagMask) != kPairLocationTag);
269
270 COMPILE_ASSERT((kFpuRegister & kLocationTagMask) != kConstantTag);
271 COMPILE_ASSERT((kFpuRegister & kLocationTagMask) != kPairLocationTag);
272
273
274 COMPILE_ASSERT((kConstantTag & kLocationTagMask) == kConstantTag);
275
276 COMPILE_ASSERT((kPairLocationTag & kLocationTagMask) == kPairLocationTag);
277
279 }
280
282
284 value_ = other.value_;
285 return *this;
286 }
287
288 bool IsInvalid() const { return value_ == kInvalidLocation; }
289
290
291 bool IsConstant() const { return (value_ & kConstantTag) == kConstantTag; }
292
293 static Location Constant(
const ConstantInstr* obj,
int pair_index = 0) {
294 ASSERT((pair_index == 0) || (pair_index == 1));
296 (pair_index != 0 ?
static_cast<uword>(kPairLocationTag) : 0) |
297 static_cast<
uword>(kConstantTag));
298 ASSERT(obj == loc.constant_instruction());
299 ASSERT(loc.pair_index() == pair_index);
300 return loc;
301 }
302
303 intptr_t pair_index() const {
305 return (value_ & kPairLocationTag) != 0 ? 1 : 0;
306 }
307
308 ConstantInstr* constant_instruction() const {
310 return reinterpret_cast<ConstantInstr*>(value_ & ~kLocationTagMask);
311 }
312
313 const Object& constant() const;
314
315 bool IsPairLocation() const {
316 return (value_ & kLocationTagMask) == kPairLocationTag;
317 }
318
320
321 PairLocation* AsPairLocation() const;
322
323
324 Location Component(intptr_t i)
const;
325
326
327 enum Policy {
329 kPrefersRegister,
330 kRequiresRegister,
331 kRequiresFpuRegister,
332 kWritableRegister,
333 kSameAsFirstInput,
334
335
336
337 kRequiresStack,
338 };
339
340 bool IsUnallocated() const { return kind() == kUnallocated; }
341
342 bool IsRegisterBeneficial() {
344 }
345
346 static Location UnallocatedLocation(Policy policy) {
347 return Location(kUnallocated, PolicyField::encode(policy));
348 }
349
350
351 static Location Any() {
return UnallocatedLocation(kAny); }
352
354 return UnallocatedLocation(kRequiresStack);
355 }
356
358 return UnallocatedLocation(kPrefersRegister);
359 }
360
361
362
363
364 static Location RequiresRegister() {
365 return UnallocatedLocation(kRequiresRegister);
366 }
367
368 static Location RequiresFpuRegister() {
369 return UnallocatedLocation(kRequiresFpuRegister);
370 }
371
372
373
374
375 static Location WritableRegister() {
376 return UnallocatedLocation(kWritableRegister);
377 }
378
379
380
381 static Location SameAsFirstInput() {
382 return UnallocatedLocation(kSameAsFirstInput);
383 }
384
385
387
390 return PolicyField::decode(payload());
391 }
392
393
394
395
396
397 static Location RegisterLocation(Register reg) {
399 }
400
401 bool IsRegister() const { return kind() == kRegister; }
402
405 return static_cast<Register>(payload());
406 }
407
408
409 static Location FpuRegisterLocation(FpuRegister reg) {
411 }
412
413 bool IsFpuRegister() const { return kind() == kFpuRegister; }
414
418 }
419
420 static bool IsMachineRegisterKind(
Kind kind) {
421 return (kind == kRegister) || (kind == kFpuRegister);
422 }
423
424 static Location MachineRegisterLocation(
Kind kind, intptr_t reg) {
425 if (kind == kRegister) {
426 return RegisterLocation(
static_cast<Register>(reg));
427 } else {
428 ASSERT(kind == kFpuRegister);
429 return FpuRegisterLocation(
static_cast<FpuRegister>(reg));
430 }
431 }
432
433 bool IsMachineRegister() const { return IsMachineRegisterKind(kind()); }
434
435 intptr_t register_code() const {
436 ASSERT(IsMachineRegister());
437 return static_cast<intptr_t>(payload());
438 }
439
440 static uword EncodeStackIndex(intptr_t stack_index) {
441 ASSERT((-kStackIndexBias <= stack_index) &&
442 (stack_index < kStackIndexBias));
443 return static_cast<uword>(kStackIndexBias + stack_index);
444 }
445
446 static Location StackSlot(intptr_t stack_index, Register
base) {
447 uword payload = StackSlotBaseField::encode(
base) |
448 StackIndexField::encode(EncodeStackIndex(stack_index));
450
451 ASSERT(loc.stack_index() == stack_index);
452 return loc;
453 }
454
455 bool IsStackSlot() const { return kind() == kStackSlot; }
456
457 static Location DoubleStackSlot(intptr_t stack_index, Register
base) {
458 uword payload = StackSlotBaseField::encode(
base) |
459 StackIndexField::encode(EncodeStackIndex(stack_index));
460 Location loc(kDoubleStackSlot, payload);
461
462 ASSERT(loc.stack_index() == stack_index);
463 return loc;
464 }
465
466 bool IsDoubleStackSlot() const { return kind() == kDoubleStackSlot; }
467
468 static Location QuadStackSlot(intptr_t stack_index, Register
base) {
469 uword payload = StackSlotBaseField::encode(
base) |
470 StackIndexField::encode(EncodeStackIndex(stack_index));
471 Location loc(kQuadStackSlot, payload);
472
473 ASSERT(loc.stack_index() == stack_index);
474 return loc;
475 }
476
477 bool IsQuadStackSlot() const { return kind() == kQuadStackSlot; }
478
481 return StackSlotBaseField::decode(payload());
482 }
483
484 intptr_t stack_index() const {
486
487 return StackIndexField::decode(payload()) - kStackIndexBias;
488 }
489
490 bool HasStackIndex() const {
491 return IsStackSlot() || IsDoubleStackSlot() || IsQuadStackSlot();
492 }
493
494
495 intptr_t ToStackSlotOffset() const;
496
497
498
499
500 Location ToSpRelative(intptr_t fp_to_sp_delta)
const;
501
502
503
504
506
507
508
509
510 Location ToCallerSpRelative()
const;
511
512 const char*
Name()
const;
513 void PrintTo(BaseTextBuffer* f)
const;
514 void Print() const;
515 const char* ToCString() const;
516
517
518 bool Equals(
Location other)
const {
return value_ == other.value_; }
519
520
521
522 Kind kind()
const {
return KindField::decode(value_); }
523
525
526 void Write(FlowGraphSerializer*
s)
const;
528
529 private:
531
532 void set_stack_index(intptr_t index) {
534 value_ = PayloadField::update(
535 StackIndexField::update(EncodeStackIndex(index), payload()), value_);
536 }
537
538 void set_base_reg(Register reg) {
540 value_ = PayloadField::update(StackSlotBaseField::update(reg, payload()),
541 value_);
542 }
543
545 : value_(KindField::
encode(kind) | PayloadField::
encode(payload)) {}
546
547 uword payload()
const {
return PayloadField::decode(value_); }
548
549 class KindField : public BitField<uword, Kind, kKindBitsPos, kKindBitsSize> {
550 };
551 class PayloadField
552 : public BitField<uword, uword, kPayloadBitsPos, kPayloadBitsSize> {};
553
554
555 typedef BitField<uword, Policy, 0, 3> PolicyField;
556
557
558#if defined(ARCH_IS_64_BIT)
559 static constexpr intptr_t kBitsForBaseReg = 6;
560#else
561 static constexpr intptr_t kBitsForBaseReg = 5;
562#endif
563 static constexpr intptr_t kBitsForStackIndex =
564 kPayloadBitsSize - kBitsForBaseReg;
565 class StackSlotBaseField
566 : public BitField<uword, Register, 0, kBitsForBaseReg> {};
567 class StackIndexField
568 : public BitField<uword, intptr_t, kBitsForBaseReg, kBitsForStackIndex> {
569 };
571
572 static constexpr intptr_t kStackIndexBias = static_cast<intptr_t>(1)
573 << (kBitsForStackIndex - 1);
574
575
576
577
579};
580
584
588 intptr_t min_value = compiler::target::kSmiMin,
589 intptr_t max_value = compiler::target::kSmiMax);
593 intptr_t min_value = compiler::target::kSmiMin,
594 intptr_t max_value = compiler::target::kSmiMax);
598
600 Definition* def,
601 intptr_t* cpu_reg_slots,
602 intptr_t* fpu_reg_slots);
603
604
606
607class PairLocation : public ZoneAllocated {
608 public:
609 PairLocation() {
610 for (intptr_t i = 0; i < kPairLength; i++) {
611 ASSERT(locations_[i].IsInvalid());
612 }
613 }
614
615 intptr_t
length()
const {
return kPairLength; }
616
620 return locations_[i];
621 }
622
623 void SetAt(intptr_t i,
Location loc) {
626 locations_[i] = loc;
627 }
628
632 return &locations_[i];
633 }
634
635 private:
636 static constexpr intptr_t kPairLength = 2;
638};
639
640template <typename T>
641class SmallSet {
642 public:
643 SmallSet() : data_(0) {}
644
645 explicit SmallSet(uintptr_t data) : data_(
data) {}
646
647 bool Contains(
T value)
const {
return (data_ & ToMask(value)) != 0; }
648
649 void Add(
T value) { data_ |= ToMask(value); }
650
651 void Remove(
T value) { data_ &= ~ToMask(value); }
652
653 bool IsEmpty() const { return data_ == 0; }
654
655 void Clear() { data_ = 0; }
656
657 uintptr_t
data()
const {
return data_; }
658
659 private:
660 static uintptr_t ToMask(
T value) {
661 ASSERT(
static_cast<uintptr_t
>(value) < (kWordSize * kBitsPerByte));
662 return static_cast<uintptr_t>(1) << static_cast<uintptr_t>(value);
663 }
664
665 uintptr_t data_;
666};
667
668class RegisterSet : public ValueObject {
669 public:
670 RegisterSet()
671 : cpu_registers_(), untagged_cpu_registers_(), fpu_registers_() {
672 ASSERT(kNumberOfCpuRegisters <= (kWordSize * kBitsPerByte));
673 ASSERT(kNumberOfFpuRegisters <= (kWordSize * kBitsPerByte));
674 }
675
676 explicit RegisterSet(uintptr_t cpu_register_mask, uintptr_t fpu_register_mask)
677 : RegisterSet() {
678 AddTaggedRegisters(cpu_register_mask, fpu_register_mask);
679 }
680
681 void AddAllNonReservedRegisters(bool include_fpu_registers) {
682 for (intptr_t i = kNumberOfCpuRegisters - 1; i >= 0; --i) {
683 if ((kReservedCpuRegisters & (1 << i)) != 0u) continue;
684 Add(Location::RegisterLocation(
static_cast<Register>(i)));
685 }
686
687 if (include_fpu_registers) {
688 for (intptr_t i = kNumberOfFpuRegisters - 1; i >= 0; --i) {
689 Add(Location::FpuRegisterLocation(
static_cast<FpuRegister>(i)));
690 }
691 }
692 }
693
694
695
696 void AddAllGeneralRegisters() {
697 for (intptr_t i = kNumberOfCpuRegisters - 1; i >= 0; --i) {
699 if (reg == FPREG || reg == SPREG) continue;
700#if defined(TARGET_ARCH_ARM)
701 if (reg == PC) continue;
702#elif defined(TARGET_ARCH_ARM64)
703 if (reg == R31) continue;
704#elif defined(TARGET_ARCH_RISCV32) || defined(TARGET_ARCH_RISCV64)
705 if (reg == ZR || reg ==
TP || reg == GP)
continue;
706#endif
707 Add(Location::RegisterLocation(reg));
708 }
709
710 for (intptr_t i = kNumberOfFpuRegisters - 1; i >= 0; --i) {
711 Add(Location::FpuRegisterLocation(
static_cast<FpuRegister>(i)));
712 }
713 }
714
715 void AddAllArgumentRegisters() {
716
717#if !defined(TARGET_ARCH_IA32)
720 if (IsArgumentRegister(reg)) {
721 Add(Location::RegisterLocation(reg));
722 }
723 }
726 if (IsFpuArgumentRegister(reg)) {
727 Add(Location::FpuRegisterLocation(reg));
728 }
729 }
730#endif
731 }
732
733 void AddTaggedRegisters(uintptr_t cpu_register_mask,
734 uintptr_t fpu_register_mask) {
736 if (Utils::TestBit(cpu_register_mask, i)) {
738 Add(Location::RegisterLocation(reg));
739 }
740 }
742 if (Utils::TestBit(fpu_register_mask, i)) {
744 Add(Location::FpuRegisterLocation(reg));
745 }
746 }
747 }
748
749 void AddRegister(Register reg, Representation rep = kTagged) {
750 Add(Location::RegisterLocation(reg), rep);
751 }
752
753 void Add(
Location loc, Representation rep = kTagged) {
754 if (loc.IsRegister()) {
755 cpu_registers_.Add(loc.reg());
756 if (rep != kTagged) {
757
758 MarkUntagged(loc);
759 }
760 } else if (loc.IsFpuRegister()) {
761 fpu_registers_.Add(loc.fpu_reg());
762 }
763 }
764
766 if (loc.IsRegister()) {
767 cpu_registers_.Remove(loc.reg());
768 } else if (loc.IsFpuRegister()) {
769 fpu_registers_.Remove(loc.fpu_reg());
770 }
771 }
772
774 if (loc.IsRegister()) {
775 return ContainsRegister(loc.reg());
776 } else if (loc.IsFpuRegister()) {
777 return ContainsFpuRegister(loc.fpu_reg());
778 } else {
780 return false;
781 }
782 }
783
784 void DebugPrint();
785
788 untagged_cpu_registers_.Add(loc.reg());
789 }
790
791 bool HasUntaggedValues() const {
792 return !untagged_cpu_registers_.IsEmpty() || !fpu_registers_.IsEmpty();
793 }
794
795 bool IsTagged(Register reg) const {
796 return !untagged_cpu_registers_.Contains(reg);
797 }
798
799 bool ContainsRegister(Register reg) const {
800 return cpu_registers_.Contains(reg);
801 }
802
803 bool ContainsFpuRegister(FpuRegister fpu_reg) const {
804 return fpu_registers_.Contains(fpu_reg);
805 }
806
807 intptr_t CpuRegisterCount() const { return RegisterCount(cpu_registers()); }
808 intptr_t FpuRegisterCount() const { return RegisterCount(fpu_registers()); }
809
810 bool IsEmpty() const {
811 return CpuRegisterCount() == 0 && FpuRegisterCount() == 0;
812 }
813
814 static intptr_t RegisterCount(intptr_t registers);
815 static bool Contains(uintptr_t register_set, intptr_t reg) {
816 return (register_set & (static_cast<uintptr_t>(1) << reg)) != 0;
817 }
818
819 uintptr_t cpu_registers() const { return cpu_registers_.data(); }
820 uintptr_t fpu_registers() const { return fpu_registers_.data(); }
821
822 void Clear() {
823 cpu_registers_.Clear();
824 fpu_registers_.Clear();
825 untagged_cpu_registers_.Clear();
826 }
827
828 void Write(FlowGraphSerializer*
s)
const;
829 explicit RegisterSet(FlowGraphDeserializer*
d);
830
831 private:
832 SmallSet<Register> cpu_registers_;
833 SmallSet<Register> untagged_cpu_registers_;
834 SmallSet<FpuRegister> fpu_registers_;
835
837};
838
839
840class LocationSummary : public ZoneAllocated {
841 public:
842 enum ContainsCall {
843
844 kNoCall,
845
846 kCall,
847
848 kCallCalleeSafe,
849
850 kCallOnSlowPath,
851
852 kCallOnSharedSlowPath,
853
854
855 kNativeLeafCall
856 };
857
858 LocationSummary(Zone* zone,
859 intptr_t input_count,
860 intptr_t temp_count,
861 LocationSummary::ContainsCall contains_call);
862
863 intptr_t input_count() const { return num_inputs_; }
864
867 ASSERT(index < num_inputs_);
868 return input_locations_[index];
869 }
870
873 ASSERT(index < num_inputs_);
874 return &input_locations_[index];
875 }
876
877 void set_in(intptr_t index,
Location loc);
878
879 intptr_t temp_count() const { return num_temps_; }
880
881 Location temp(intptr_t index)
const {
883 ASSERT(index < num_temps_);
884 return temp_locations_[index];
885 }
886
887 Location* temp_slot(intptr_t index) {
889 ASSERT(index < num_temps_);
890 return &temp_locations_[index];
891 }
892
893 void set_temp(intptr_t index,
Location loc) {
895 ASSERT(index < num_temps_);
896 ASSERT(!always_calls() || loc.IsMachineRegister());
897 temp_locations_[index] = loc;
898 }
899
900 intptr_t output_count() const { return 1; }
901
904 return output_location_;
905 }
906
907 Location* out_slot(intptr_t index) {
909 return &output_location_;
910 }
911
912 void set_out(intptr_t index,
Location loc);
913
914 const BitmapBuilder& stack_bitmap() { return EnsureStackBitmap(); }
915 void SetStackBit(intptr_t index) { EnsureStackBitmap().Set(index, true); }
916
917 bool always_calls() const {
918 return contains_call_ == kCall || contains_call_ == kCallCalleeSafe;
919 }
920
921 bool callee_safe_call() const { return contains_call_ == kCallCalleeSafe; }
922
923 bool can_call() { return contains_call_ != kNoCall; }
924
925 bool HasCallOnSlowPath() { return can_call() && !always_calls(); }
926
927 bool call_on_shared_slow_path() const {
928 return contains_call_ == kCallOnSharedSlowPath;
929 }
930
931 bool native_leaf_call() const { return contains_call_ == kNativeLeafCall; }
932
933 void PrintTo(BaseTextBuffer* f)
const;
934
935 static LocationSummary*
Make(Zone* zone,
936 intptr_t input_count,
938 ContainsCall contains_call);
939
940 RegisterSet* live_registers() { return &live_registers_; }
941
942#if defined(DEBUG)
943
944
945 void DiscoverWritableInputs();
946 void CheckWritableInputs();
947#endif
948
949 void Write(FlowGraphSerializer*
s)
const;
950 explicit LocationSummary(FlowGraphDeserializer*
d);
951
952 private:
953 BitmapBuilder& EnsureStackBitmap() {
954 if (stack_bitmap_ == nullptr) {
955 stack_bitmap_ = new BitmapBuilder();
956 }
957 return *stack_bitmap_;
958 }
959
960 const intptr_t num_inputs_;
962 const intptr_t num_temps_;
965
966 BitmapBuilder* stack_bitmap_;
967
968 const ContainsCall contains_call_;
969 RegisterSet live_registers_;
970
971#if defined(DEBUG)
972 intptr_t writable_inputs_;
973#endif
974};
975
976}
977
978#endif
static std::unique_ptr< SkEncoder > Make(SkWStream *dst, const SkPixmap *src, const SkYUVAPixmaps *srcYUVA, const SkColorSpace *srcYUVAColorSpace, const SkJpegEncoder::Options &options)
static void encode(uint8_t output[16], const uint32_t input[4])
#define COMPILE_ASSERT(expr)
bool Equals(const SkPath &a, const SkPath &b)
VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE auto & d
void PrintTo(FlValue *v, std::ostream *os)
#define REP_SIZEOF_CLAUSE(name, __, ___, type)
#define FOR_EACH_REPRESENTATION_KIND(M)
#define FOR_EACH_INTEGER_REPRESENTATION_KIND(M)
#define REP_IS_UNSIGNED_CLAUSE(name, __, unsigned, ___)
#define FOR_EACH_UNBOXED_REPRESENTATION_KIND(M)
#define FOR_EACH_SIMPLE_REPRESENTATION_KIND(M)
#define REP_IN_SET_CLAUSE(name, __, ___, ____)
#define DECLARE_REPRESENTATION(name, __, ___, ____)
SK_API bool Read(SkStreamSeekable *src, SkDocumentPage *dstArray, int dstArrayCount, const SkDeserialProcs *=nullptr)
bool Contains(const Container &container, const Value &value)
Location LocationAnyOrConstant(Value *value)
Location LocationRegisterOrConstant(Value *value)
constexpr intptr_t kBitsPerWord
static constexpr Representation kUnboxedUword
static constexpr intptr_t kMaxLocationCount
Location LocationExceptionLocation()
intptr_t LocationCount(Representation rep)
Location LocationFixedRegisterOrConstant(Value *value, Register reg)
const int kNumberOfFpuRegisters
Location LocationWritableRegisterOrSmiConstant(Value *value, intptr_t min_value, intptr_t max_value)
Location LocationArgumentsDescriptorLocation()
Location LocationRemapForSlowPath(Location loc, Definition *def, intptr_t *cpu_reg_slots, intptr_t *fpu_reg_slots)
static constexpr Representation kUnboxedAddress
compiler::Address LocationToStackSlotAddress(Location loc)
Location LocationStackTraceLocation()
Location LocationWritableRegisterOrConstant(Value *value)
static constexpr Representation kUnboxedIntPtr
static constexpr Representation kUnboxedWord
Location LocationFixedRegisterOrSmiConstant(Value *value, Register reg)
Location LocationRegisterOrSmiConstant(Value *value, intptr_t min_value, intptr_t max_value)
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot data
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive keep the shell running after the Dart script has completed enable serial On low power devices with low core running concurrent GC tasks on threads can cause them to contend with the UI thread which could potentially lead to jank This option turns off all concurrent GC activities domain network policy
std::enable_if_t< sknonstd::is_bitmask_enum< E >::value, bool > constexpr Any(E e)