12#define SHOULD_NOT_INCLUDE_RUNTIME
34 if (reg == cpu_register)
break;
42void StubCodeCompiler::GenerateInitStaticFieldStub() {
46 __ CallRuntime(kInitStaticFieldRuntimeEntry, 1);
53void StubCodeCompiler::GenerateInitLateStaticFieldStub(
bool is_final,
62 __ Comment(
"Calling initializer function");
63 __ PushRegister(kFieldReg);
64 __ LoadCompressedFieldFromOffset(
66 if (!FLAG_precompiled_mode) {
74 __ PopRegister(kFieldReg);
75 __ LoadStaticFieldAddress(kAddressReg, kFieldReg, kScratchReg, is_shared);
77 Label throw_exception;
79 __ Comment(
"Checking that initializer did not set late final field");
80 __ LoadFromOffset(kScratchReg, kAddressReg, 0);
85 __ StoreToOffset(kResultReg, kAddressReg, 0);
90#if defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_ARM64)
93 __ set_lr_state(compiler::LRState::OnEntry().EnterFrame());
97 __ PushRegister(kFieldReg);
98 __ CallRuntime(kLateFieldAssignedDuringInitializationErrorRuntimeEntry,
104void StubCodeCompiler::GenerateInitLateStaticFieldStub() {
105 GenerateInitLateStaticFieldStub(
false,
false);
108void StubCodeCompiler::GenerateInitLateFinalStaticFieldStub() {
109 GenerateInitLateStaticFieldStub(
true,
false);
112void StubCodeCompiler::GenerateInitSharedLateStaticFieldStub() {
113 GenerateInitLateStaticFieldStub(
false,
true);
116void StubCodeCompiler::GenerateInitSharedLateFinalStaticFieldStub() {
117 GenerateInitLateStaticFieldStub(
true,
true);
120void StubCodeCompiler::GenerateInitInstanceFieldStub() {
123 __ PushRegistersInOrder(
125 __ CallRuntime(kInitInstanceFieldRuntimeEntry, 2);
132void StubCodeCompiler::GenerateInitLateInstanceFieldStub(
bool is_final) {
141 __ PushRegistersInOrder({kFieldReg, kInstanceReg, kInstanceReg});
145 "Result is a return value from initializer");
147 __ LoadCompressedFieldFromOffset(
150 if (!FLAG_precompiled_mode) {
159 __ PopRegisterPair(kInstanceReg, kFieldReg);
160 __ LoadCompressedFieldFromOffset(
162#if defined(DART_COMPRESSED_POINTERS)
165 __ SmiUntag(kScratchReg);
166 __ SmiTag(kScratchReg);
168 __ LoadCompressedFieldAddressForRegOffset(kAddressReg, kInstanceReg,
171 Label throw_exception;
173 __ LoadCompressed(kScratchReg, Address(kAddressReg, 0));
178#if defined(TARGET_ARCH_IA32)
182 __ StoreIntoObject(kInstanceReg, Address(kAddressReg, 0), kScratchReg);
184 __ StoreCompressedIntoObject(kInstanceReg, Address(kAddressReg, 0),
192#if defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_ARM64)
195 __ set_lr_state(compiler::LRState::OnEntry().EnterFrame());
197 __ Bind(&throw_exception);
199 __ PushRegister(kFieldReg);
200 __ CallRuntime(kLateFieldAssignedDuringInitializationErrorRuntimeEntry,
206void StubCodeCompiler::GenerateInitLateInstanceFieldStub() {
207 GenerateInitLateInstanceFieldStub(
false);
210void StubCodeCompiler::GenerateInitLateFinalInstanceFieldStub() {
211 GenerateInitLateInstanceFieldStub(
true);
214void StubCodeCompiler::GenerateThrowStub() {
218 __ CallRuntime(kThrowRuntimeEntry, 1);
222void StubCodeCompiler::GenerateReThrowStub() {
225 __ PushRegistersInOrder(
228 __ CallRuntime(kReThrowRuntimeEntry, 3);
232void StubCodeCompiler::GenerateAssertBooleanStub() {
236 __ CallRuntime(kNonBoolTypeErrorRuntimeEntry, 1);
240void StubCodeCompiler::GenerateAssertSubtypeStub() {
247 __ CallRuntime(kSubtypeCheckRuntimeEntry, 5);
253void StubCodeCompiler::GenerateAssertAssignableStub() {
254#if !defined(TARGET_ARCH_IA32)
258 __ PushObject(Object::null_object());
271 __ CallRuntime(kTypeCheckRuntimeEntry, 7);
287void StubCodeCompiler::GenerateInstantiateTypeArgumentsStub() {
299 "Must handle possibility of inst tav reg being spilled");
302 "Must handle possibility of function tav reg being spilled");
306 auto check_entry = [&](compiler::Label* found, compiler::Label* not_found) {
307 __ Comment(
"Check cache entry");
311 "sentinel is not same index as instantiator type args");
312 __ LoadAcquireCompressedFromOffset(
319 compiler::Label
next;
325 compiler::Address(kEntryReg,
335 __ LoadAcquireCompressedFromOffset(
343 compiler::Label linear_cache_loop, hash_cache_search, cache_hit, call_runtime;
349 Slot::Array_length());
353#if defined(TARGET_ARCH_IA32)
359 __ BranchIf(
GREATER, &hash_cache_search);
362 __ Comment(
"Check linear cache");
366 __ Bind(&linear_cache_loop);
367 check_entry(&cache_hit, &call_runtime);
372#if !defined(TARGET_ARCH_IA32)
373 __ Bind(&hash_cache_search);
374 __ Comment(
"Check hash-based cache");
376 compiler::Label pop_before_success, pop_before_failure;
377 if (!saved_registers.IsEmpty()) {
378 __ Comment(
"Spills due to register pressure");
379 __ PushRegisters(saved_registers);
382 __ Comment(
"Calculate address of first entry");
387 __ Comment(
"Calculate probe mask");
388 __ LoadAcquireCompressedFromOffset(
403 Label is_not_null,
done;
409 __ LoadFromSlot(
dst,
src, Slot::TypeArguments_hash());
412 __ BranchIfZero(
dst, &pop_before_failure);
416 __ Comment(
"Calculate initial probe from type argument vector hashes");
432 compiler::Label loop;
434 __ Comment(
"Loop over hash cache entries");
440 check_entry(&pop_before_success, &pop_before_failure);
450 __ Bind(&pop_before_failure);
451 if (!saved_registers.IsEmpty()) {
452 __ Comment(
"Restore spilled registers on cache miss");
453 __ PopRegisters(saved_registers);
460 __ Comment(
"Cache miss");
462#if !defined(DART_ASSEMBLER_HAS_NULL_REG)
463 __ PushObject(Object::null_object());
465#if defined(TARGET_ARCH_ARM)
470 "Should be ordered to push arguments with one instruction");
472 __ PushRegistersInOrder({
473#if defined(DART_ASSEMBLER_HAS_NULL_REG)
480 __ CallRuntime(kInstantiateTypeArgumentsRuntimeEntry, 3);
486#if !defined(TARGET_ARCH_IA32)
487 __ Bind(&pop_before_success);
488 if (!saved_registers.IsEmpty()) {
489 __ Comment(
"Restore spilled registers on cache hit");
490 __ PopRegisters(saved_registers);
495 __ Comment(
"Cache hit");
498 compiler::Address(kEntryReg,
504void StubCodeCompiler::
505 GenerateInstantiateTypeArgumentsMayShareInstantiatorTAStub() {
510 compiler::Label cache_lookup;
511 __ LoadCompressedSmi(
515 __ LoadCompressedSmi(
519 __ AndRegisters(kScratch2Reg, kScratch1Reg);
520 __ CompareRegisters(kScratch2Reg, kScratch1Reg);
527 GenerateInstantiateTypeArgumentsStub();
530void StubCodeCompiler::
531 GenerateInstantiateTypeArgumentsMayShareFunctionTAStub() {
536 compiler::Label cache_lookup;
537 __ LoadCompressedSmi(
541 __ LoadCompressedSmi(
545 __ AndRegisters(kScratch2Reg, kScratch1Reg);
546 __ CompareRegisters(kScratch2Reg, kScratch1Reg);
553 GenerateInstantiateTypeArgumentsStub();
558 __ PushObject(Object::null_object());
562 __ CallRuntime(kInstantiateTypeRuntimeEntry, 3);
571 bool is_function_parameter) {
572 Label runtime_call, return_dynamic, type_parameter_value_is_not_type;
574 if (is_function_parameter) {
576 TypeArguments::null_object());
577 __ BranchIf(
EQUAL, &return_dynamic);
578 __ LoadFieldFromOffset(
587 TypeArguments::null_object());
588 __ BranchIf(
EQUAL, &return_dynamic);
589 __ LoadFieldFromOffset(
601 switch (nullability) {
606 __ CompareAbstractTypeNullabilityWith(
624void StubCodeCompiler::
625 GenerateInstantiateTypeNonNullableClassTypeParameterStub() {
630void StubCodeCompiler::GenerateInstantiateTypeNullableClassTypeParameterStub() {
635void StubCodeCompiler::
636 GenerateInstantiateTypeNonNullableFunctionTypeParameterStub() {
641void StubCodeCompiler::
642 GenerateInstantiateTypeNullableFunctionTypeParameterStub() {
647void StubCodeCompiler::GenerateInstantiateTypeStub() {
651void StubCodeCompiler::GenerateInstanceOfStub() {
658 __ CallRuntime(kInstanceofRuntimeEntry, 5);
672 __ LoadClassIdMayBeSmi(scratch_reg, type_reg);
673 __ CompareImmediate(scratch_reg, kTypeParameterCid);
674 __ BranchIf(
EQUAL, &is_type_param_or_type_or_function_type,
676 __ CompareImmediate(scratch_reg, kTypeCid);
677 __ BranchIf(
EQUAL, &is_type_param_or_type_or_function_type,
679 __ CompareImmediate(scratch_reg, kFunctionTypeCid);
680 __ BranchIf(
EQUAL, &is_type_param_or_type_or_function_type,
682 __ Stop(
"not a type or function type or type parameter");
683 __ Bind(&is_type_param_or_type_or_function_type);
701void StubCodeCompiler::GenerateTypeIsTopTypeForSubtypingStub() {
710 const Register output_reg = scratch1_reg;
711#if defined(TARGET_ARCH_IA32)
717 __ PushRegister(scratch2_reg);
721 static_assert(scratch1_reg != scratch2_reg,
722 "both scratch registers are the same");
734 __ CompareClassId(scratch1_reg, kTypeCid, scratch2_reg);
739 __ LoadTypeClassId(scratch2_reg, scratch1_reg);
745 __ CompareImmediate(scratch2_reg, kFutureOrCid);
747 __ CompareImmediate(scratch2_reg, kInstanceCid);
750 __ CompareAbstractTypeNullabilityWith(
755 __ LoadImmediate(output_reg, 0);
757#if defined(TARGET_ARCH_IA32)
759 __ PopRegister(scratch2_reg);
763 __ Bind(&unwrap_future_or);
764 __ LoadCompressedField(
768 __ CompareObject(scratch2_reg, Object::null_object());
771 __ LoadCompressedField(
795void StubCodeCompiler::GenerateNullIsAssignableToTypeStub() {
805 const Register kOutputReg = kCurrentTypeReg;
806#if defined(TARGET_ARCH_IA32)
812 __ PushRegister(kScratchReg);
816 static_assert(kCurrentTypeReg != kScratchReg,
817 "code assumes distinct scratch registers");
819 compiler::Label is_assignable,
done;
826 compiler::Label check_null_assignable;
829 __ Bind(&check_null_assignable);
833 compiler::Label is_not_type;
834 __ CompareClassId(kCurrentTypeReg, kTypeCid, kScratchReg);
836 __ CompareAbstractTypeNullabilityWith(
843 __ LoadTypeClassId(kScratchReg, kCurrentTypeReg);
844 __ CompareImmediate(kScratchReg, kFutureOrCid);
846 __ LoadCompressedField(
848 compiler::FieldAddress(kCurrentTypeReg,
850 __ CompareObject(kScratchReg, Object::null_object());
853 __ BranchIf(
EQUAL, &is_assignable);
854 __ LoadCompressedField(
856 compiler::FieldAddress(
862 __ CompareAbstractTypeNullabilityWith(
868 auto handle_case = [&](
Register tav) {
871 auto const kIndexReg = kCurrentTypeReg;
876 __ LoadFieldFromOffset(kIndexReg, kCurrentTypeReg,
879 __ LoadIndexedCompressed(kCurrentTypeReg, tav,
881 __ Jump(&check_null_assignable);
884 Label function_type_param;
886 Slot::AbstractType_flags());
887 __ BranchIfBit(kScratchReg,
891 __ Bind(&function_type_param);
892#if defined(TARGET_ARCH_IA32)
900 __ LoadImmediate(kOutputReg, 0);
902#if defined(TARGET_ARCH_IA32)
904 __ PopRegister(kScratchReg);
909#if !defined(TARGET_ARCH_IA32)
929void StubCodeCompiler::GenerateDefaultTypeTestStub() {
936void StubCodeCompiler::GenerateDefaultNullableTypeTestStub() {
951void StubCodeCompiler::GenerateTopTypeTypeTestStub() {
955void StubCodeCompiler::GenerateUnreachableTypeTestStub() {
968 auto handle_case = [&](
Register tav) {
986 Label function_type_param;
988 Slot::AbstractType_flags());
993 __ Bind(&function_type_param);
999void StubCodeCompiler::GenerateNullableTypeParameterTypeTestStub() {
1003void StubCodeCompiler::GenerateTypeParameterTypeTestStub() {
1016 __ CallRuntime(kTypeCheckRuntimeEntry, 7);
1027void StubCodeCompiler::GenerateLazySpecializeTypeTestStub() {
1030 __ EnterStubFrame();
1032 __ LeaveStubFrame();
1037void StubCodeCompiler::GenerateLazySpecializeNullableTypeTestStub() {
1045 __ EnterStubFrame();
1047 __ LeaveStubFrame();
1053void StubCodeCompiler::GenerateSlowTypeTestStub() {
1054 Label
done, call_runtime;
1056 if (!FLAG_precompiled_mode) {
1060 __ EnterStubFrame();
1064 __ BranchIf(
EQUAL, &call_runtime);
1067 Label call_2, call_3, call_4, call_6;
1068 __ Comment(
"Check number of STC inputs");
1070 Slot::SubtypeTestCache_num_inputs());
1082 __ Comment(
"Call 7 input STC check");
1092 __ Comment(
"Call 6 input STC check");
1102 __ Comment(
"Call 4 input STC check");
1112 __ Comment(
"Call 3 input STC check");
1122 __ Comment(
"Call 2 input STC check");
1131 __ Comment(
"Call runtime");
1137 __ LeaveStubFrame();
1142#define GENERATE_BREAKPOINT_STUB(Name) \
1143 void StubCodeCompiler::Generate##Name##Stub() { \
1149#undef GENERATE_BREAKPOINT_STUB
1161void StubCodeCompiler::GenerateAllocateClosureStub(
1162 bool has_instantiator_type_args,
1164 const intptr_t instance_size =
1168 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
1170 __ Comment(
"Inline allocation of uninitialized closure");
1177 __ TryAllocateObject(kClosureCid, instance_size, &slow_case,
distance,
1181 __ Comment(
"Inline initialization of allocated closure");
1187 if (has_instantiator_type_args) {
1190 Slot::Closure_instantiator_type_arguments());
1194 Slot::Closure_instantiator_type_arguments());
1198 Slot::Closure_function_type_arguments());
1202 Slot::Closure_delayed_type_arguments());
1206 Slot::Closure_function());
1209 Slot::Closure_context());
1212 Slot::Closure_hash());
1217 Slot::Closure_delayed_type_arguments());
1219#if defined(DART_PRECOMPILER) && !defined(TARGET_ARCH_IA32)
1220 if (FLAG_precompiled_mode) {
1227 Slot::Function_entry_point());
1230 Slot::Closure_entry_point());
1240 __ Comment(
"Closure allocation via runtime");
1241 __ EnterStubFrame();
1243 __ PushRegistersInOrder(
1245 if (has_instantiator_type_args) {
1255 __ CallRuntime(kAllocateClosureRuntimeEntry, 4);
1256 if (has_instantiator_type_args) {
1267 __ LeaveStubFrame();
1273void StubCodeCompiler::GenerateAllocateClosureStub() {
1274 GenerateAllocateClosureStub(
false,
1278void StubCodeCompiler::GenerateAllocateClosureGenericStub() {
1279 GenerateAllocateClosureStub(
false,
1283void StubCodeCompiler::GenerateAllocateClosureTAStub() {
1284 GenerateAllocateClosureStub(
true,
1288void StubCodeCompiler::GenerateAllocateClosureTAGenericStub() {
1289 GenerateAllocateClosureStub(
true,
1296void StubCodeCompiler::GenerateAllocateGrowableArrayStub() {
1297#if defined(TARGET_ARCH_IA32)
1306 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
1308 __ Comment(
"Inline allocation of GrowableList");
1309 __ TryAllocateObject(kGrowableObjectArrayCid, instance_size, &slow_case,
1312 __ StoreIntoObjectNoBarrier(
1323 kGrowableObjectArrayCid, instance_size);
1330void StubCodeCompiler::GenerateAllocateRecordStub() {
1336 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
1344 temp_reg, shape_reg,
1348 const intptr_t fixed_size_plus_alignment_padding =
1353 fixed_size_plus_alignment_padding);
1358 __ MoveRegister(new_top_reg, temp_reg);
1359 __ AddRegisters(new_top_reg, result_reg);
1361 __ CompareWithMemoryValue(new_top_reg,
1364 __ CheckAllocationCanary(result_reg);
1373 Label size_tag_overflow,
done;
1376 __ LslImmediate(temp_reg,
1381 __ Bind(&size_tag_overflow);
1383 __ LoadImmediate(temp_reg, 0);
1387 __ OrImmediate(temp_reg, tags);
1388 __ StoreFieldToOffset(temp_reg, result_reg,
1392 __ StoreCompressedIntoObjectNoBarrier(
1398 const Register field_reg = shape_reg;
1399#if defined(TARGET_ARCH_ARM64) || defined(TARGET_ARCH_RISCV32) || \
1400 defined(TARGET_ARCH_RISCV64)
1403 const Register null_reg = temp_reg;
1409 __ CompareRegisters(field_reg, new_top_reg);
1415 __ StoreCompressedIntoObjectNoBarrier(
1416 result_reg, FieldAddress(field_reg,
offset), null_reg);
1421 __ CompareRegisters(field_reg, new_top_reg);
1426 __ WriteAllocationCanary(new_top_reg);
1432 __ EnterStubFrame();
1434 __ PushRegister(shape_reg);
1435 __ CallRuntime(kAllocateRecordRuntimeEntry, 1);
1440 __ LeaveStubFrame();
1444void StubCodeCompiler::GenerateAllocateSmallRecordStub(intptr_t num_fields,
1445 bool has_named_fields) {
1446 ASSERT(num_fields == 2 || num_fields == 3);
1455 if ((num_fields > 2) && (value2_reg ==
kNoRegister)) {
1469 &slow_case,
distance, result_reg, temp_reg);
1471 if (!has_named_fields) {
1475 __ StoreCompressedIntoObjectNoBarrier(
1479 __ StoreCompressedIntoObjectNoBarrier(
1483 __ StoreCompressedIntoObjectNoBarrier(
1487 if (num_fields > 2) {
1488 __ StoreCompressedIntoObjectNoBarrier(
1497 __ EnterStubFrame();
1499 if (has_named_fields) {
1500 __ PushRegister(shape_reg);
1505 __ PushRegistersInOrder({value0_reg, value1_reg});
1506 if (num_fields > 2) {
1507 __ PushRegister(value2_reg);
1511 __ CallRuntime(kAllocateSmallRecordRuntimeEntry, 4);
1513 __ PopRegister(result_reg);
1516 __ LeaveStubFrame();
1520void StubCodeCompiler::GenerateAllocateRecord2Stub() {
1521 GenerateAllocateSmallRecordStub(2,
false);
1524void StubCodeCompiler::GenerateAllocateRecord2NamedStub() {
1525 GenerateAllocateSmallRecordStub(2,
true);
1528void StubCodeCompiler::GenerateAllocateRecord3Stub() {
1529 GenerateAllocateSmallRecordStub(3,
false);
1532void StubCodeCompiler::GenerateAllocateRecord3NamedStub() {
1533 GenerateAllocateSmallRecordStub(3,
true);
1538void StubCodeCompiler::GenerateAllocateUnhandledExceptionStub() {
1540 auto class_table = thread->isolate_group()->class_table();
1541 ASSERT(class_table->HasValidClassAt(kUnhandledExceptionCid));
1543 class_table->At(kUnhandledExceptionCid));
1550#define TYPED_DATA_ALLOCATION_STUB(clazz) \
1551 void StubCodeCompiler::GenerateAllocate##clazz##Stub() { \
1552 GenerateAllocateTypedDataArrayStub(kTypedData##clazz##Cid); \
1555#undef TYPED_DATA_ALLOCATION_STUB
1557void StubCodeCompiler::GenerateLateInitializationError(
bool with_fpu_regs) {
1558 auto perform_runtime_call = [&]() {
1560 __ CallRuntime(kLateFieldNotInitializedErrorRuntimeEntry,
1563 GenerateSharedStubGeneric(
1567 late_initialization_error_shared_with_fpu_regs_stub_offset()
1569 late_initialization_error_shared_without_fpu_regs_stub_offset(),
1570 false, perform_runtime_call);
1573void StubCodeCompiler::
1574 GenerateLateInitializationErrorSharedWithoutFPURegsStub() {
1575 GenerateLateInitializationError(
false);
1578void StubCodeCompiler::GenerateLateInitializationErrorSharedWithFPURegsStub() {
1579 GenerateLateInitializationError(
true);
1582void StubCodeCompiler::GenerateNullErrorSharedWithoutFPURegsStub() {
1584 false, &kNullErrorRuntimeEntry,
1589void StubCodeCompiler::GenerateNullErrorSharedWithFPURegsStub() {
1591 true, &kNullErrorRuntimeEntry,
1596void StubCodeCompiler::GenerateNullArgErrorSharedWithoutFPURegsStub() {
1598 false, &kArgumentNullErrorRuntimeEntry,
1603void StubCodeCompiler::GenerateNullArgErrorSharedWithFPURegsStub() {
1605 true, &kArgumentNullErrorRuntimeEntry,
1610void StubCodeCompiler::GenerateNullCastErrorSharedWithoutFPURegsStub() {
1612 false, &kNullCastErrorRuntimeEntry,
1617void StubCodeCompiler::GenerateNullCastErrorSharedWithFPURegsStub() {
1619 true, &kNullCastErrorRuntimeEntry,
1624void StubCodeCompiler::GenerateStackOverflowSharedWithoutFPURegsStub() {
1626 false, &kInterruptOrStackOverflowRuntimeEntry,
1631void StubCodeCompiler::GenerateStackOverflowSharedWithFPURegsStub() {
1633 true, &kInterruptOrStackOverflowRuntimeEntry,
1638void StubCodeCompiler::GenerateRangeErrorSharedWithoutFPURegsStub() {
1639 GenerateRangeError(
false);
1642void StubCodeCompiler::GenerateRangeErrorSharedWithFPURegsStub() {
1643 GenerateRangeError(
true);
1646void StubCodeCompiler::GenerateWriteErrorSharedWithoutFPURegsStub() {
1647 GenerateWriteError(
false);
1650void StubCodeCompiler::GenerateWriteErrorSharedWithFPURegsStub() {
1651 GenerateWriteError(
true);
1654void StubCodeCompiler::GenerateFrameAwaitingMaterializationStub() {
1658void StubCodeCompiler::GenerateAsynchronousGapMarkerStub() {
1662void StubCodeCompiler::GenerateUnknownDartCodeStub() {
1664 __ EnterStubFrame();
1668void StubCodeCompiler::GenerateNotLoadedStub() {
1669 __ EnterStubFrame();
1670 __ CallRuntime(kNotLoadedRuntimeEntry, 0);
1674#define EMIT_BOX_ALLOCATION(Name) \
1675 void StubCodeCompiler::GenerateAllocate##Name##Stub() { \
1676 Label call_runtime; \
1677 if (!FLAG_use_slow_path && FLAG_inline_alloc) { \
1678 __ TryAllocate(compiler::Name##Class(), &call_runtime, \
1679 Assembler::kNearJump, AllocateBoxABI::kResultReg, \
1680 AllocateBoxABI::kTempReg); \
1683 __ Bind(&call_runtime); \
1684 __ EnterStubFrame(); \
1685 __ PushObject(NullObject()); \
1686 __ CallRuntime(kAllocate##Name##RuntimeEntry, 0); \
1687 __ PopRegister(AllocateBoxABI::kResultReg); \
1688 __ LeaveStubFrame(); \
1698#undef EMIT_BOX_ALLOCATION
1707 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
1710 (assembler->*store_value)(
1716 __ EnterStubFrame();
1720 __ CallRuntime(runtime_entry, 0);
1722 __ LeaveStubFrame();
1726void StubCodeCompiler::GenerateBoxDoubleStub() {
1728 kBoxDoubleRuntimeEntry,
1732void StubCodeCompiler::GenerateBoxFloat32x4Stub() {
1733#if !defined(TARGET_ARCH_RISCV32) && !defined(TARGET_ARCH_RISCV64)
1735 kBoxFloat32x4RuntimeEntry,
1738 __ Stop(
"Not supported on RISC-V.");
1742void StubCodeCompiler::GenerateBoxFloat64x2Stub() {
1743#if !defined(TARGET_ARCH_RISCV32) && !defined(TARGET_ARCH_RISCV64)
1745 kBoxFloat64x2RuntimeEntry,
1748 __ Stop(
"Not supported on RISC-V.");
1752void StubCodeCompiler::GenerateDoubleToIntegerStub() {
1753 __ EnterStubFrame();
1758 __ CallRuntime(kDoubleToIntegerRuntimeEntry, 1);
1761 __ LeaveStubFrame();
1773 intptr_t entry_point_offset_in_thread,
1774 intptr_t function_offset_in_object_store,
1775 bool uses_args_desc =
false) {
1776 if (FLAG_precompiled_mode) {
1783 function_offset_in_object_store);
1786 if (!uses_args_desc) {
1809 if (FLAG_use_slow_path || !FLAG_inline_alloc) {
1816 __ MaybeTraceAllocation(kSuspendStateCid, slow_case, temp_reg));
1819 const intptr_t fixed_size_plus_alignment_padding =
1823 __ AddImmediate(temp_reg, frame_size_reg, fixed_size_plus_alignment_padding);
1828 __ AddRegisters(temp_reg, result_reg);
1830 __ CompareWithMemoryValue(temp_reg,
1833 __ CheckAllocationCanary(result_reg);
1838 __ SubRegisters(temp_reg, result_reg);
1841 if (!FLAG_precompiled_mode) {
1843 __ AddImmediate(temp_reg, temp_reg,
1845 __ StoreFieldToOffset(temp_reg, result_reg,
1856 __ LslImmediate(temp_reg,
1861 __ Bind(&size_tag_overflow);
1863 __ LoadImmediate(temp_reg, 0);
1867 __ OrImmediate(temp_reg, tags);
1868 __ StoreFieldToOffset(temp_reg, result_reg,
1872 __ StoreFieldToOffset(frame_size_reg, result_reg,
1876void StubCodeCompiler::GenerateSuspendStub(
1877 bool call_suspend_function,
1878 bool pass_type_arguments,
1879 intptr_t suspend_entry_point_offset_in_thread,
1880 intptr_t suspend_function_offset_in_object_store) {
1889 Label alloc_slow_case, alloc_done, init_done, resize_suspend_state,
1890 remember_object, call_dart;
1892#if defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_ARM64)
1893 SPILLS_LR_TO_FRAME({});
1901 __ SubRegisters(kFrameSize,
SPREG);
1903 __ EnterStubFrame();
1905 if (pass_type_arguments) {
1906 __ PushRegister(kTypeArgs);
1909 __ CompareClassId(kSuspendState, kSuspendStateCid, kTemp);
1911 if (FLAG_precompiled_mode) {
1912 __ BranchIf(
EQUAL, &init_done);
1914 Label alloc_suspend_state;
1917 __ CompareWithMemoryValue(
1919 FieldAddress(kSuspendState,
1923 __ StoreFieldToOffset(kFrameSize, kSuspendState,
1925 __ Jump(&init_done);
1927 __ Bind(&alloc_suspend_state);
1930 __ Comment(
"Allocate SuspendState");
1931 __ MoveRegister(kFunctionData, kSuspendState);
1936 __ StoreCompressedIntoObjectNoBarrier(
1942#if defined(TARGET_ARCH_ARM64) || defined(TARGET_ARCH_RISCV32) || \
1943 defined(TARGET_ARCH_RISCV64)
1949 __ StoreCompressedIntoObjectNoBarrier(
1951 FieldAddress(kSuspendState,
1954 __ StoreCompressedIntoObjectNoBarrier(
1956 FieldAddress(kSuspendState,
1963 __ Comment(
"Save SuspendState to frame");
1968 __ Comment(
"Copy frame to SuspendState");
1974 __ LoadFieldFromOffset(kTemp, kSuspendState,
1976 __ CompareRegisters(kTemp, kFrameSize);
1983 if (kSrcFrame ==
THR) {
1984 __ PushRegister(
THR);
1987 __ AddImmediate(kDstFrame, kSuspendState,
1989 __ CopyMemoryWords(kSrcFrame, kDstFrame, kFrameSize, kTemp);
1990 if (kSrcFrame ==
THR) {
1991 __ PopRegister(
THR);
1995 __ StoreFieldToOffset(kTemp, kSuspendState,
2003 __ LoadFieldFromOffset(kTemp, kSuspendState,
2005 __ AddRegisters(kTemp, kSuspendState);
2006 __ LoadFieldFromOffset(
2009 __ CompareRegisters(kTemp, kSuspendState);
2016 if (call_suspend_function) {
2019 __ PushRegistersInOrder({kSuspendState, kArgument});
2025 __ CompareRegisters(kSuspendState, kTemp);
2032 if (call_suspend_function) {
2033 __ Comment(
"Call suspend Dart function");
2034 if (pass_type_arguments) {
2040 suspend_function_offset_in_object_store,
2041 pass_type_arguments);
2050 __ LeaveStubFrame();
2052#if !defined(TARGET_ARCH_X64) && !defined(TARGET_ARCH_IA32)
2055 __ LeaveDartFrame();
2056#elif defined(TARGET_ARCH_X64)
2059 if (!FLAG_precompiled_mode) {
2067#if defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_ARM64)
2069 SPILLS_LR_TO_FRAME({});
2070 SPILLS_LR_TO_FRAME({});
2072 __ Bind(&alloc_slow_case);
2073 __ Comment(
"SuspendState Allocation slow case");
2075 __ PushRegistersInOrder({kArgument, kFrameSize});
2077 __ SmiTag(kFrameSize);
2079 __ PushRegistersInOrder({kFrameSize, kFunctionData});
2080 __ CallRuntime(kAllocateSuspendStateRuntimeEntry, 2);
2082 __ PopRegister(kSuspendState);
2083 __ PopRegister(kFrameSize);
2084 __ PopRegister(kArgument);
2085 __ Jump(&alloc_done);
2087 __ Bind(&resize_suspend_state);
2088 __ Comment(
"Resize SuspendState");
2090 __ PushRegistersInOrder({kArgument, kFrameSize});
2092 __ SmiTag(kFrameSize);
2094 __ PushRegistersInOrder({kFrameSize, kSuspendState});
2098 __ CallRuntime(kAllocateSuspendStateRuntimeEntry, 2);
2100 __ PopRegister(kSuspendState);
2101 __ PopRegister(kFrameSize);
2102 __ PopRegister(kArgument);
2103 __ Jump(&alloc_done);
2105 __ Bind(&remember_object);
2106 __ Comment(
"Old gen SuspendState slow case");
2107 if (!call_suspend_function) {
2110 __ PushRegister(kArgument);
2113#if defined(TARGET_ARCH_IA32)
2124 rt.Call(kEnsureRememberedAndMarkingDeferredRuntimeEntry, 2);
2126 if (!call_suspend_function) {
2127 __ PopRegister(kArgument);
2129 __ Jump(&call_dart);
2132void StubCodeCompiler::GenerateAwaitStub() {
2133 GenerateSuspendStub(
2140void StubCodeCompiler::GenerateAwaitWithTypeCheckStub() {
2141 GenerateSuspendStub(
2149void StubCodeCompiler::GenerateYieldAsyncStarStub() {
2150 GenerateSuspendStub(
2158void StubCodeCompiler::GenerateSuspendSyncStarAtStartStub() {
2159 GenerateSuspendStub(
2164 suspend_state_suspend_sync_star_at_start_entry_point_offset(),
2168void StubCodeCompiler::GenerateSuspendSyncStarAtYieldStub() {
2169 GenerateSuspendStub(
2174void StubCodeCompiler::GenerateInitSuspendableFunctionStub(
2175 intptr_t init_entry_point_offset_in_thread,
2176 intptr_t init_function_offset_in_object_store) {
2179 __ EnterStubFrame();
2182 __ PushRegister(kTypeArgs);
2184 init_function_offset_in_object_store,
2186 __ LeaveStubFrame();
2194void StubCodeCompiler::GenerateInitAsyncStub() {
2195 GenerateInitSuspendableFunctionStub(
2200void StubCodeCompiler::GenerateInitAsyncStarStub() {
2201 GenerateInitSuspendableFunctionStub(
2206void StubCodeCompiler::GenerateInitSyncStarStub() {
2207 GenerateInitSuspendableFunctionStub(
2212void StubCodeCompiler::GenerateResumeStub() {
2226 __ EnterDartFrame(0);
2228 const intptr_t param_offset =
2234 __ CompareClassId(kSuspendState, kSuspendStateCid, kTemp);
2241 __ LoadFieldFromOffset(kTemp, kSuspendState,
2243 __ CompareImmediate(kTemp, 0);
2250 __ LoadFieldFromOffset(kFrameSize, kSuspendState,
2255 __ MoveRegister(kTemp, kFrameSize);
2256 __ AddRegisters(kTemp, kSuspendState);
2257 __ LoadFieldFromOffset(
2260 __ CompareRegisters(kTemp, kSuspendState);
2266 if (!FLAG_precompiled_mode) {
2269 __ MoveRegister(kTemp, kSuspendState);
2270 __ AddRegisters(kTemp, kFrameSize);
2277#if !defined(TARGET_ARCH_IA32)
2278 __ LoadPoolPointer(
PP);
2284 __ SubRegisters(
SPREG, kFrameSize);
2286 __ Comment(
"Copy frame from SuspendState");
2287 intptr_t num_saved_regs = 0;
2288 if (kSrcFrame ==
THR) {
2289 __ PushRegister(
THR);
2296 __ AddImmediate(kSrcFrame, kSuspendState,
2299 __ CopyMemoryWords(kSrcFrame, kDstFrame, kFrameSize, kTemp);
2303 if (kSrcFrame ==
THR) {
2304 __ PopRegister(
THR);
2307 __ Comment(
"Transfer control");
2309 __ LoadFieldFromOffset(kResumePc, kSuspendState,
2314#if defined(TARGET_ARCH_X64) || defined(TARGET_ARCH_IA32)
2321 static_assert((kException !=
CODE_REG) && (kException !=
PP),
2322 "should not interfere");
2327 if (!FLAG_precompiled_mode) {
2331 __ CompareWithMemoryValue(
2336#if !defined(PRODUCT)
2338 __ LoadIsolate(kTemp);
2339 __ LoadFromOffset(kTemp, kTemp,
2342 __ CompareImmediate(kTemp, 0);
2352 __ Comment(
"Call runtime to throw exception or deopt");
2356 static_assert((kStackTrace !=
CODE_REG) && (kStackTrace !=
PP),
2357 "should not interfere");
2361 __ SetReturnAddress(kResumePc);
2363 if (!FLAG_precompiled_mode) {
2366#if !defined(TARGET_ARCH_IA32)
2367 __ set_constant_pool_allowed(
false);
2369 __ EnterStubFrame();
2371 __ PushRegistersInOrder({kException, kStackTrace});
2372 __ CallRuntime(kResumeFrameRuntimeEntry, 2);
2374 if (FLAG_precompiled_mode) {
2377 __ LeaveStubFrame();
2385void StubCodeCompiler::GenerateReturnStub(
2386 intptr_t return_entry_point_offset_in_thread,
2387 intptr_t return_function_offset_in_object_store,
2388 intptr_t return_stub_offset_in_thread) {
2391#if defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_ARM64)
2392 SPILLS_LR_TO_FRAME({});
2405 __ LeaveDartFrame();
2406 if (!FLAG_precompiled_mode) {
2407 __ LoadFromOffset(
CODE_REG,
THR, return_stub_offset_in_thread);
2409 __ EnterStubFrame();
2412 return_function_offset_in_object_store);
2413 __ LeaveStubFrame();
2417void StubCodeCompiler::GenerateReturnAsyncStub() {
2424void StubCodeCompiler::GenerateReturnAsyncNotFutureStub() {
2427 suspend_state_return_async_not_future_entry_point_offset(),
2432void StubCodeCompiler::GenerateReturnAsyncStarStub() {
2439void StubCodeCompiler::GenerateAsyncExceptionHandlerStub() {
2443 Label rethrow_exception;
2445#if defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_ARM64)
2446 SPILLS_LR_TO_FRAME({});
2455 __ BranchIf(
EQUAL, &rethrow_exception);
2457 __ LeaveDartFrame();
2458 if (!FLAG_precompiled_mode) {
2462 __ EnterStubFrame();
2463 __ PushRegistersInOrder(
2469 __ LeaveStubFrame();
2472#if defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_ARM64)
2474 SPILLS_LR_TO_FRAME({});
2476 __ Comment(
"Rethrow exception");
2477 __ Bind(&rethrow_exception);
2478 __ LeaveDartFrame();
2479 if (!FLAG_precompiled_mode) {
2483 __ EnterStubFrame();
2487 __ CallRuntime(kReThrowRuntimeEntry, 3);
2491void StubCodeCompiler::GenerateCloneSuspendStateStub() {
2498 Label alloc_slow_case;
2505 __ CompareImmediate(kTemp, 0);
2512 __ LoadFieldFromOffset(kFrameSize, kSource,
2523 __ LoadCompressedFieldFromOffset(
2525 __ StoreCompressedIntoObjectNoBarrier(
2531 __ LoadCompressedFieldFromOffset(
2533 __ StoreCompressedIntoObjectNoBarrier(
2539 __ LoadCompressedFieldFromOffset(
2541 __ StoreCompressedIntoObjectNoBarrier(
2547 if (kSrcFrame ==
THR) {
2548 __ PushRegister(
THR);
2551 __ AddImmediate(kSrcFrame, kSource,
offset);
2552 __ AddImmediate(kDstFrame, kDestination,
offset);
2553 __ CopyMemoryWords(kSrcFrame, kDstFrame, kFrameSize, kTemp);
2554 if (kSrcFrame ==
THR) {
2555 __ PopRegister(
THR);
2560 __ LoadFieldFromOffset(kTemp, kDestination,
2562 __ AddRegisters(kTemp, kDestination);
2563 __ StoreFieldToOffset(
2564 kDestination, kTemp,
2571 __ Bind(&alloc_slow_case);
2572 __ Comment(
"CloneSuspendState slow case");
2573 __ EnterStubFrame();
2575 __ PushRegister(kSource);
2576 __ CallRuntime(kCloneSuspendStateRuntimeEntry, 1);
2579 __ LeaveStubFrame();
2583void StubCodeCompiler::GenerateFfiAsyncCallbackSendStub() {
2584 __ EnterStubFrame();
2587 __ CallRuntime(kFfiAsyncCallbackSendRuntimeEntry, 1);
2590 __ LeaveStubFrame();
2595 ASSERT(pc_descriptors_list_ !=
nullptr);
2598 UntaggedPcDescriptors::kBSSRelocation, pc_offset,
2600 TokenPosition::kNoSource,
2605#if !defined(TARGET_ARCH_IA32)
2616 Label* next_iteration) {
2624 __ LoadAcquireFromOffset(
2638 __ CompareWithMemoryValue(
2639 instance_type_args_reg,
2650 __ CompareWithMemoryValue(
2662 __ CompareWithMemoryValue(
2674 __ CompareWithMemoryValue(
2675 parent_fun_type_args_reg,
2687 __ CompareWithMemoryValue(
2688 delayed_type_args_reg,
2700 __ CompareWithMemoryValue(
2733 : assembler(assembler), reg_(reg), depth_(depth), alt_(alt) {
2738 __ LoadFromStack(alt_, depth_);
2747 __ StoreToStack(alt_, depth_);
2757 const intptr_t depth_;
2783 const intptr_t kTestEntryLengthLog2 =
2790 __ Comment(
"Hash cache traversal");
2791 __ Comment(
"Calculating number of entries");
2801 __ Comment(
"Calculating starting entry address");
2802 __ AddImmediate(cache_entry_reg,
2805 __ MoveRegister(cache_contents_size_reg, cache_entry_reg);
2807 __ PushRegister(cache_entry_reg);
2810 __ Comment(
"Calculating end of entries address");
2835 intptr_t kHashStackElements = 0;
2837 kProbeDistanceDepth = 0;
2838 kHashStackElements++;
2842 kProbeDistanceDepth++;
2843 kHashStackElements++;
2844 kCacheContentsSizeDepth = 0;
2848 kProbeDistanceDepth++;
2849 kCacheContentsSizeDepth++;
2850 kHashStackElements++;
2851 kCacheArrayEndDepth = 0;
2856 Label found, not_found;
2864 __ Comment(
"Loading %s type hash",
name);
2865 __ LoadFromSlot(
dst,
src, Slot::AbstractType_hash());
2867 __ CompareImmediate(
dst, 0);
2868 __ BranchIf(
EQUAL, ¬_found);
2875 __ Comment(
"Loading %s type arguments hash",
name);
2879 __ CompareRegisters(
src, null_reg);
2881 __ LoadFromSlot(
dst,
src, Slot::TypeArguments_hash());
2883 __ CompareImmediate(
dst, 0);
2884 __ BranchIf(
EQUAL, ¬_found);
2888 __ Comment(
"Hash the entry inputs");
2892 __ MoveRegister(cache_entry_reg, instance_cid_or_sig_reg);
2893 __ SmiUntag(cache_entry_reg);
2895 get_abstract_type_hash(cache_entry_reg, instance_cid_or_sig_reg,
2896 "closure signature");
2933 __ Comment(
"Converting hash to probe entry index");
2938 __ AddImmediate(probe_distance_reg, -1);
2939 __ AndRegisters(cache_entry_reg, probe_distance_reg);
2941 __ Comment(
"Set initial probe distance");
2942 __ LoadImmediate(probe_distance_reg,
2948 __ Comment(
"Converting probe entry index to probe entry address");
2952 __ LslImmediate(cache_entry_reg,
2954 __ AddRegisters(cache_entry_reg, cache_contents_size_reg);
2956 __ Comment(
"Set negated cache contents size");
2958 __ SubRegisters(cache_contents_size_reg, cache_entry_end_reg);
2960 __ LoadFromStack(
TMP, kCacheArrayEndDepth);
2961 __ SubRegisters(cache_contents_size_reg,
TMP);
2965 Label loop, next_iteration;
2968 assembler, n, null_reg, cache_entry_reg, instance_cid_or_sig_reg,
2969 instance_type_args_reg, parent_fun_type_args_reg, delayed_type_args_reg,
2970 &found, ¬_found, &next_iteration);
2971 __ Bind(&next_iteration);
2972 __ Comment(
"Move to next entry");
2976 __ AddRegisters(cache_entry_reg, probe_distance_reg);
2977 __ Comment(
"Adjust probe distance");
2978 __ AddImmediate(probe_distance_reg,
2982 __ Comment(
"Check for leaving array");
2985 __ CompareRegisters(cache_entry_reg, cache_entry_end_reg);
2987 __ CompareToStack(cache_entry_reg, kCacheArrayEndDepth);
2990 __ Comment(
"Wrap around to start of entries");
2993 __ AddRegisters(cache_entry_reg, cache_contents_size_reg);
3001 __ Comment(
"Hash found");
3002 __ Drop(kHashStackElements);
3003 gen_found(assembler, n);
3005 __ Comment(
"Hash not found");
3006 __ Drop(kHashStackElements);
3007 gen_not_found(assembler, n);
3026 __ Comment(
"Linear cache traversal");
3027 __ AddImmediate(cache_entry_reg,
3030 Label found, not_found, loop, next_iteration;
3033 assembler, n, null_reg, cache_entry_reg, instance_cid_or_sig_reg,
3034 instance_type_args_reg, parent_fun_type_args_reg, delayed_type_args_reg,
3035 &found, ¬_found, &next_iteration);
3036 __ Bind(&next_iteration);
3037 __ Comment(
"Next iteration");
3044 __ Comment(
"Linear found");
3045 gen_found(assembler, n);
3047 __ Comment(
"Linear not found");
3048 gen_not_found(assembler, n);
3051void StubCodeCompiler::GenerateSubtypeTestCacheSearch(
3052 Assembler* assembler,
3122 Slot::SubtypeTestCache_num_inputs());
3129 __ LoadAcquireCompressedFromOffset(
3134 Label initialized, not_closure;
3143 __ CompareImmediate(instance_cid_or_sig_reg, kClosureCid);
3148 __ Comment(
"Closure");
3149 __ LoadCompressed(instance_cid_or_sig_reg,
3152 __ LoadCompressed(instance_cid_or_sig_reg,
3153 FieldAddress(instance_cid_or_sig_reg,
3157 instance_type_args_reg,
3163 parent_fun_type_args_reg,
3169 delayed_type_args_reg,
3179 __ Comment(
"Non-Closure");
3182 Label has_no_type_arguments;
3184 __ MoveRegister(instance_type_args_reg, null_reg);
3185 __ LoadFieldFromOffset(
3192 __ LoadIndexedCompressed(instance_type_args_reg,
3195 __ Bind(&has_no_type_arguments);
3196 __ Comment(
"No type arguments");
3198 __ SmiTag(instance_cid_or_sig_reg);
3200 __ MoveRegister(parent_fun_type_args_reg, null_reg);
3203 __ MoveRegister(delayed_type_args_reg, null_reg);
3216 Slot::Array_length());
3222 assembler, n, null_reg, cache_entry_reg, instance_cid_or_sig_reg,
3223 instance_type_args_reg, parent_fun_type_args_reg, delayed_type_args_reg,
3224 gen_found, gen_not_found);
3228 assembler, n, null_reg, cache_entry_reg, instance_cid_or_sig_reg,
3229 instance_type_args_reg, parent_fun_type_args_reg, delayed_type_args_reg,
3230 cache_entry_end_reg, cache_contents_size_reg, probe_distance_reg,
3231 gen_found, gen_not_found);
3236void StubCodeCompiler::GenerateSubtype1TestCacheStub() {
3237 GenerateSubtypeNTestCacheStub(
assembler, 1);
3241void StubCodeCompiler::GenerateSubtype2TestCacheStub() {
3242 GenerateSubtypeNTestCacheStub(
assembler, 2);
3246void StubCodeCompiler::GenerateSubtype3TestCacheStub() {
3247 GenerateSubtypeNTestCacheStub(
assembler, 3);
3251void StubCodeCompiler::GenerateSubtype4TestCacheStub() {
3252 GenerateSubtypeNTestCacheStub(
assembler, 4);
3256void StubCodeCompiler::GenerateSubtype6TestCacheStub() {
3257 GenerateSubtypeNTestCacheStub(
assembler, 6);
3261void StubCodeCompiler::GenerateSubtype7TestCacheStub() {
3262 GenerateSubtypeNTestCacheStub(
assembler, 7);
static void done(const char *config, const char *src, const char *srcOptions, const char *name)
static float next(float f)
#define CLASS_LIST_TYPED_DATA(V)
static constexpr int shift()
static const Register ArgumentRegisters[]
static constexpr Register kReturnReg
static constexpr intptr_t kNone
void AddDescriptor(UntaggedPcDescriptors::Kind kind, intptr_t pc_offset, intptr_t deopt_id, TokenPosition token_pos, intptr_t try_index, intptr_t yield_index)
static Object & ZoneHandle()
static RecordShape ForUnnamed(intptr_t num_fields)
void AddRegister(Register reg, Representation rep=kTagged)
bool Contains(Location loc)
bool ContainsRegister(Register reg) const
static SmiPtr New(intptr_t value)
static intptr_t RawValue(intptr_t value)
static constexpr intptr_t kMaxLinearCacheSize
static constexpr intptr_t kSuspendStateVarIndex
static Thread * Current()
@ kInstantiatorTypeArgsIndex
@ kInstantiatedTypeArgsIndex
static constexpr intptr_t kMaxLinearCacheSize
static constexpr intptr_t kAllDynamicHash
static constexpr intptr_t kInvalidYieldIndex
static constexpr int ShiftForPowerOfTwo(T x)
intptr_t InsertAlignedRelocation(BSS::Relocation reloc)
void StoreUnboxedDouble(FpuRegister src, Register base, int32_t offset)
void StoreUnboxedSimd128(FpuRegister src, Register base, int32_t offset)
StackRegisterScope(Assembler *assembler, Register *reg, intptr_t depth, Register alt=TMP)
static constexpr intptr_t kNoDepth
void EnsureIsNewOrRemembered()
std::function< void(Assembler *, int)> STCSearchExitGenerator
void GenerateAllocationStubForClass(UnresolvedPcRelativeCalls *unresolved_calls, const Class &cls, const dart::Code &allocate_object, const dart::Code &allocat_object_parametrized)
static intptr_t WordOffsetFromFpToCpuRegister(Register cpu_register)
static word type_test_stub_entry_point_offset()
static word data_offset()
static word host_type_arguments_field_offset_in_words_offset()
static const word kNoTypeArguments
static word delayed_type_arguments_offset()
static word function_type_arguments_offset()
static word instantiator_type_arguments_offset()
static word function_offset()
static word InstanceSize()
static word entry_point_offset(CodeEntryKind kind=CodeEntryKind::kNormal)
static word active_instructions_offset()
static word instructions_offset()
static word value_offset()
static word host_offset_or_field_id_offset()
static word initializer_function_offset()
static word code_offset()
static word entry_point_offset(CodeEntryKind kind=CodeEntryKind::kNormal)
static word signature_offset()
static word type_arguments_offset()
static word InstanceSize()
static word object_store_offset()
static word has_resumption_breakpoints_offset()
static word suspend_state_handle_exception_offset()
static word suspend_state_init_async_offset()
static word suspend_state_init_sync_star_offset()
static word suspend_state_return_async_offset()
static word suspend_state_init_async_star_offset()
static word suspend_state_return_async_not_future_offset()
static word suspend_state_await_offset()
static word suspend_state_await_with_type_check_offset()
static word suspend_state_return_async_star_offset()
static word suspend_state_yield_async_star_offset()
static word suspend_state_suspend_sync_star_at_start_offset()
static word tags_offset()
static word original_top_offset()
static const word kNumFieldsMask
static word field_offset(intptr_t index)
static word InstanceSize()
static word shape_offset()
static const word kInstanceTypeArguments
static const word kInstanceDelayedFunctionTypeArguments
static const word kTestEntryLength
static const word kDestinationType
static const word kInstantiatorTypeArguments
static const word kInstanceParentFunctionTypeArguments
static const word kInstanceCidOrSignature
static word cache_offset()
static const word kFunctionTypeArguments
static word FrameSizeGrowthGap()
static word function_data_offset()
static word error_callback_offset()
static word payload_offset()
static word frame_size_offset()
static word then_callback_offset()
static word frame_capacity_offset()
static word unboxed_runtime_arg_offset()
static word suspend_state_yield_async_star_entry_point_offset()
static word async_exception_handler_stub_offset()
static word allocate_object_slow_entry_point_offset()
static word suspend_state_init_async_entry_point_offset()
static word null_cast_error_shared_without_fpu_regs_stub_offset()
static word null_error_shared_with_fpu_regs_stub_offset()
static word null_arg_error_shared_with_fpu_regs_stub_offset()
static word suspend_state_return_async_entry_point_offset()
static word stack_overflow_shared_without_fpu_regs_stub_offset()
static word suspend_state_init_async_star_entry_point_offset()
static word null_error_shared_without_fpu_regs_stub_offset()
static word lazy_specialize_type_test_stub_offset()
static word null_arg_error_shared_without_fpu_regs_stub_offset()
static word resume_stub_offset()
static word return_async_stub_offset()
static word suspend_state_handle_exception_entry_point_offset()
static word slow_type_test_stub_offset()
static word suspend_state_return_async_star_entry_point_offset()
static word suspend_state_await_with_type_check_entry_point_offset()
static word suspend_state_await_entry_point_offset()
static word return_async_not_future_stub_offset()
static word suspend_state_init_sync_star_entry_point_offset()
static word stack_overflow_shared_with_fpu_regs_stub_offset()
static word null_cast_error_shared_with_fpu_regs_stub_offset()
static word return_async_star_stub_offset()
static word types_offset()
static word instantiations_offset()
static word nullability_offset()
static word type_at_offset(intptr_t i)
static word index_offset()
static word arguments_offset()
static const word kSizeTagMaxSizeTag
static const word kTagBitsSizeTagPos
static const word kIsFunctionTypeParameterBit
uword MakeTagWordForNewSpaceObject(classid_t cid, uword instance_size)
static constexpr intptr_t kCompressedWordSizeLog2
word ToRawSmi(const dart::Object &a)
static constexpr intptr_t kWordSize
static constexpr intptr_t kCompressedWordSize
static constexpr intptr_t kObjectAlignment
bool WillAllocateNewOrRememberedObject(intptr_t instance_size)
intptr_t RoundedAllocationSize(intptr_t size)
static void InvokeTypeCheckFromTypeTestStub(Assembler *assembler, TypeCheckMode mode)
static intptr_t SuspendStateFpOffset()
static void CallDartCoreLibraryFunction(Assembler *assembler, intptr_t entry_point_offset_in_thread, intptr_t function_offset_in_object_store, bool uses_args_desc=false)
static void EnsureIsTypeOrFunctionTypeOrTypeParameter(Assembler *assembler, Register type_reg, Register scratch_reg)
static void BuildInstantiateTypeRuntimeCall(Assembler *assembler)
const Class & Float64x2Class()
static void GenerateSubtypeTestCacheHashSearch(Assembler *assembler, int n, Register null_reg, Register cache_entry_reg, Register instance_cid_or_sig_reg, Register instance_type_args_reg, Register parent_fun_type_args_reg, Register delayed_type_args_reg, Register cache_entry_end_reg, Register cache_contents_size_reg, Register probe_distance_reg, const StubCodeCompiler::STCSearchExitGenerator &gen_found, const StubCodeCompiler::STCSearchExitGenerator &gen_not_found)
const Class & Float32x4Class()
static void GenerateBoxFpuValueStub(Assembler *assembler, const dart::Class &cls, const RuntimeEntry &runtime_entry, void(Assembler::*store_value)(FpuRegister, Register, int32_t))
const Array & ArgumentsDescriptorBoxed(intptr_t type_args_len, intptr_t num_arguments)
const Code & StubCodeSubtype2TestCache()
const Bool & TrueObject()
const Code & StubCodeSubtype6TestCache()
const Code & StubCodeSubtype7TestCache()
const Code & StubCodeSubtype3TestCache()
const Object & SentinelObject()
static void GenerateAllocateSuspendState(Assembler *assembler, Label *slow_case, Register result_reg, Register frame_size_reg, Register temp_reg)
const Object & NullObject()
const Class & DoubleClass()
static void BuildTypeParameterTypeTestStub(Assembler *assembler, bool allow_null)
static void GenerateSubtypeTestCacheLinearSearch(Assembler *assembler, int n, Register null_reg, Register cache_entry_reg, Register instance_cid_or_sig_reg, Register instance_type_args_reg, Register parent_fun_type_args_reg, Register delayed_type_args_reg, const StubCodeCompiler::STCSearchExitGenerator &gen_found, const StubCodeCompiler::STCSearchExitGenerator &gen_not_found)
const Object & EmptyTypeArguments()
const Code & StubCodeSubtype4TestCache()
static void GenerateSubtypeTestCacheLoopBody(Assembler *assembler, int n, Register null_reg, Register cache_entry_reg, Register instance_cid_or_sig_reg, Register instance_type_args_reg, Register parent_fun_type_args_reg, Register delayed_type_args_reg, Label *found, Label *not_found, Label *next_iteration)
static void BuildInstantiateTypeParameterStub(Assembler *assembler, Nullability nullability, bool is_function_parameter)
@ TIMES_COMPRESSED_HALF_WORD_SIZE
static constexpr int kSavedCallerPcSlotFromFp
const Register kExceptionObjectReg
uint32_t CombineHashes(uint32_t hash, uint32_t other_hash)
constexpr intptr_t kIntptrMin
static constexpr int kSavedCallerFpSlotFromFp
const Register ARGS_DESC_REG
static constexpr int kCallerSpSlotFromFp
constexpr RegList kDartAvailableCpuRegs
uint32_t FinalizeHash(uint32_t hash, intptr_t hashbits=kBitsPerInt32)
@ kTypeCheckFromLazySpecializeStub
const Register FUNCTION_REG
static constexpr intptr_t kAllocationRedZoneSize
const Register kStackTraceObjectReg
NOT_IN_PRODUCT(LibraryPtr ReloadTestScript(const char *script))
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive mode
static bool Bind(PassBindingsCacheMTL &pass, ShaderStage stage, size_t bind_index, const BufferView &view)
static constexpr Register kFunctionReg
static constexpr Register kContextReg
static constexpr Register kResultReg
static constexpr Register kInstantiatorTypeArgsReg
static constexpr Register kScratchReg
static constexpr Register kTypeArgumentsReg
static constexpr Register kResultReg
static constexpr Register kTagsReg
static constexpr Register kShapeReg
static constexpr Register kResultReg
static constexpr Register kTemp1Reg
static constexpr Register kTemp2Reg
static constexpr Register kResultReg
static constexpr Register kShapeReg
static constexpr Register kValue2Reg
static constexpr Register kValue0Reg
static constexpr Register kTempReg
static constexpr Register kValue1Reg
static constexpr Register kDstNameReg
static constexpr intptr_t kFunctionTAVSlotFromFp
static constexpr intptr_t kDstTypeSlotFromFp
static constexpr Register kSubtypeTestReg
static constexpr intptr_t kInstanceSlotFromFp
static constexpr intptr_t kInstantiatorTAVSlotFromFp
static constexpr Register kObjectReg
static constexpr Register kSubTypeReg
static constexpr Register kSuperTypeReg
static constexpr Register kFunctionTypeArgumentsReg
static constexpr Register kInstantiatorTypeArgumentsReg
static constexpr Register kDstNameReg
static constexpr Register kSuspendStateReg
static constexpr Register kTempReg
static constexpr Register kResultReg
static constexpr FpuRegister kValueReg
static constexpr Register kDestinationReg
static constexpr Register kSrcFrameReg
static constexpr Register kFrameSizeReg
static constexpr Register kSourceReg
static constexpr Register kTempReg
static constexpr Register kDstFrameReg
static constexpr Register kResultReg
static constexpr Register kRecognizedKindReg
static constexpr FpuRegister kInputReg
static constexpr Register kArgsReg
intptr_t param_end_from_fp
intptr_t FrameSlotForVariableIndex(intptr_t index) const
static constexpr Register kFieldReg
static constexpr Register kResultReg
static constexpr Register kInstanceReg
static constexpr Register kAddressReg
static constexpr Register kScratchReg
static constexpr Register kAddressReg
static constexpr Register kScratchReg
static constexpr Register kResultReg
static constexpr Register kFieldReg
static constexpr Register kTypeArgsReg
static constexpr Register kEntryStartReg
static constexpr intptr_t kSavedRegisters
static constexpr Register kCurrentEntryIndexReg
static constexpr Register kProbeMaskReg
static constexpr Register kProbeDistanceReg
static constexpr Register kFunctionTypeArgumentsReg
static constexpr Register kTypeReg
static constexpr Register kScratchReg
static constexpr Register kInstantiatorTypeArgumentsReg
static constexpr Register kResultTypeReg
static constexpr Register kInstantiatorTypeArgumentsReg
static constexpr Register kScratchReg
static constexpr Register kUninstantiatedTypeArgumentsReg
static constexpr Register kResultTypeArgumentsReg
static constexpr Register kFunctionTypeArgumentsReg
static constexpr Register kFieldReg
static constexpr intptr_t kObjectAlignmentLog2
static constexpr intptr_t kObjectAlignment
static constexpr Register kStackTraceReg
static constexpr Register kExceptionReg
static constexpr Register kSrcFrameReg
static constexpr Register kDstFrameReg
static constexpr Register kFrameSizeReg
static constexpr Register kSuspendStateReg
static constexpr Register kExceptionReg
static constexpr Register kTempReg
static constexpr Register kResumePcReg
static constexpr Register kStackTraceReg
static constexpr Register kSuspendStateReg
static constexpr Register kSrcFrameReg
static constexpr Register kFunctionDataReg
static constexpr Register kSuspendStateReg
static constexpr intptr_t kResumePcDistance
static constexpr Register kTempReg
static constexpr Register kArgumentReg
static constexpr Register kDstFrameReg
static constexpr Register kTypeArgsReg
static constexpr Register kFrameSizeReg
static constexpr Register kExceptionReg
static constexpr Register kSubtypeTestCacheReg
static constexpr Register kDstTypeReg
static constexpr Register kInstanceReg
static constexpr Register kFunctionTypeArgumentsReg
static constexpr Register kInstantiatorTypeArgumentsReg
static constexpr Register kSubtypeTestCacheResultReg
static constexpr Register kScratchReg
static constexpr Register kInstanceOfResultReg
#define TYPED_DATA_ALLOCATION_STUB(clazz)
#define EMIT_BOX_ALLOCATION(Name)
#define VM_TYPE_TESTING_STUB_CODE_LIST(V)