12#define SHOULD_NOT_INCLUDE_RUNTIME
31 intptr_t slots_from_fp = target::frame_layout.param_end_from_fp + 1;
34 if (reg == cpu_register)
break;
42void StubCodeCompiler::GenerateInitStaticFieldStub() {
46 __ CallRuntime(kInitStaticFieldRuntimeEntry, 1);
53void StubCodeCompiler::GenerateInitLateStaticFieldStub(
bool is_final) {
61 __ Comment(
"Calling initializer function");
62 __ PushRegister(kFieldReg);
63 __ LoadCompressedFieldFromOffset(
64 FUNCTION_REG, kFieldReg, target::Field::initializer_function_offset());
65 if (!FLAG_precompiled_mode) {
67 target::Function::code_offset());
71 __ Call(FieldAddress(
FUNCTION_REG, target::Function::entry_point_offset()));
73 __ PopRegister(kFieldReg);
74 __ LoadStaticFieldAddress(kAddressReg, kFieldReg, kScratchReg);
76 Label throw_exception;
78 __ Comment(
"Checking that initializer did not set late final field");
79 __ LoadFromOffset(kScratchReg, kAddressReg, 0);
84 __ StoreToOffset(kResultReg, kAddressReg, 0);
89#if defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_ARM64)
92 __ set_lr_state(compiler::LRState::OnEntry().EnterFrame());
94 __ Bind(&throw_exception);
96 __ PushRegister(kFieldReg);
97 __ CallRuntime(kLateFieldAssignedDuringInitializationErrorRuntimeEntry,
103void StubCodeCompiler::GenerateInitLateStaticFieldStub() {
104 GenerateInitLateStaticFieldStub(
false);
107void StubCodeCompiler::GenerateInitLateFinalStaticFieldStub() {
108 GenerateInitLateStaticFieldStub(
true);
111void StubCodeCompiler::GenerateInitInstanceFieldStub() {
114 __ PushRegistersInOrder(
116 __ CallRuntime(kInitInstanceFieldRuntimeEntry, 2);
123void StubCodeCompiler::GenerateInitLateInstanceFieldStub(
bool is_final) {
132 __ PushRegistersInOrder({kFieldReg, kInstanceReg, kInstanceReg});
136 "Result is a return value from initializer");
138 __ LoadCompressedFieldFromOffset(
140 target::Field::initializer_function_offset());
141 if (!FLAG_precompiled_mode) {
143 target::Function::code_offset());
147 __ Call(FieldAddress(
FUNCTION_REG, target::Function::entry_point_offset()));
150 __ PopRegisterPair(kInstanceReg, kFieldReg);
151 __ LoadCompressedFieldFromOffset(
152 kScratchReg, kFieldReg, target::Field::host_offset_or_field_id_offset());
153#if defined(DART_COMPRESSED_POINTERS)
156 __ SmiUntag(kScratchReg);
157 __ SmiTag(kScratchReg);
159 __ LoadCompressedFieldAddressForRegOffset(kAddressReg, kInstanceReg,
162 Label throw_exception;
164 __ LoadCompressed(kScratchReg, Address(kAddressReg, 0));
169#if defined(TARGET_ARCH_IA32)
173 __ StoreIntoObject(kInstanceReg, Address(kAddressReg, 0), kScratchReg);
175 __ StoreCompressedIntoObject(kInstanceReg, Address(kAddressReg, 0),
183#if defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_ARM64)
186 __ set_lr_state(compiler::LRState::OnEntry().EnterFrame());
188 __ Bind(&throw_exception);
190 __ PushRegister(kFieldReg);
191 __ CallRuntime(kLateFieldAssignedDuringInitializationErrorRuntimeEntry,
197void StubCodeCompiler::GenerateInitLateInstanceFieldStub() {
198 GenerateInitLateInstanceFieldStub(
false);
201void StubCodeCompiler::GenerateInitLateFinalInstanceFieldStub() {
202 GenerateInitLateInstanceFieldStub(
true);
205void StubCodeCompiler::GenerateThrowStub() {
209 __ CallRuntime(kThrowRuntimeEntry, 1);
213void StubCodeCompiler::GenerateReThrowStub() {
216 __ PushRegistersInOrder(
219 __ CallRuntime(kReThrowRuntimeEntry, 3);
223void StubCodeCompiler::GenerateAssertBooleanStub() {
227 __ CallRuntime(kNonBoolTypeErrorRuntimeEntry, 1);
231void StubCodeCompiler::GenerateAssertSubtypeStub() {
238 __ CallRuntime(kSubtypeCheckRuntimeEntry, 5);
244void StubCodeCompiler::GenerateAssertAssignableStub() {
245#if !defined(TARGET_ARCH_IA32)
249 __ PushObject(Object::null_object());
257 __ pushl(Address(
EBP, target::kWordSize *
262 __ CallRuntime(kTypeCheckRuntimeEntry, 7);
278void StubCodeCompiler::GenerateInstantiateTypeArgumentsStub() {
290 "Must handle possibility of inst tav reg being spilled");
293 "Must handle possibility of function tav reg being spilled");
297 auto check_entry = [&](compiler::Label* found, compiler::Label* not_found) {
298 __ Comment(
"Check cache entry");
302 "sentinel is not same index as instantiator type args");
303 __ LoadAcquireCompressedFromOffset(
306 target::kCompressedWordSize);
310 compiler::Label
next;
316 compiler::Address(kEntryReg,
318 target::kCompressedWordSize));
326 __ LoadAcquireCompressedFromOffset(
332 target::Array::data_offset());
334 compiler::Label linear_cache_loop, hash_cache_search, cache_hit, call_runtime;
340 Slot::Array_length());
344#if defined(TARGET_ARCH_IA32)
350 __ BranchIf(
GREATER, &hash_cache_search);
353 __ Comment(
"Check linear cache");
356 target::kCompressedWordSize);
357 __ Bind(&linear_cache_loop);
358 check_entry(&cache_hit, &call_runtime);
360 target::kCompressedWordSize);
363#if !defined(TARGET_ARCH_IA32)
364 __ Bind(&hash_cache_search);
365 __ Comment(
"Check hash-based cache");
367 compiler::Label pop_before_success, pop_before_failure;
368 if (!saved_registers.IsEmpty()) {
369 __ Comment(
"Spills due to register pressure");
370 __ PushRegisters(saved_registers);
373 __ Comment(
"Calculate address of first entry");
378 __ Comment(
"Calculate probe mask");
379 __ LoadAcquireCompressedFromOffset(
394 Label is_not_null,
done;
399 __ Bind(&is_not_null);
400 __ LoadFromSlot(dst, src, Slot::TypeArguments_hash());
403 __ BranchIfZero(dst, &pop_before_failure);
407 __ Comment(
"Calculate initial probe from type argument vector hashes");
423 compiler::Label loop;
425 __ Comment(
"Loop over hash cache entries");
429 target::kCompressedWordSize);
431 check_entry(&pop_before_success, &pop_before_failure);
441 __ Bind(&pop_before_failure);
442 if (!saved_registers.IsEmpty()) {
443 __ Comment(
"Restore spilled registers on cache miss");
444 __ PopRegisters(saved_registers);
450 __ Bind(&call_runtime);
451 __ Comment(
"Cache miss");
453#if !defined(DART_ASSEMBLER_HAS_NULL_REG)
454 __ PushObject(Object::null_object());
456#if defined(TARGET_ARCH_ARM)
461 "Should be ordered to push arguments with one instruction");
463 __ PushRegistersInOrder({
464#if defined(DART_ASSEMBLER_HAS_NULL_REG)
471 __ CallRuntime(kInstantiateTypeArgumentsRuntimeEntry, 3);
477#if !defined(TARGET_ARCH_IA32)
478 __ Bind(&pop_before_success);
479 if (!saved_registers.IsEmpty()) {
480 __ Comment(
"Restore spilled registers on cache hit");
481 __ PopRegisters(saved_registers);
486 __ Comment(
"Cache hit");
489 compiler::Address(kEntryReg,
491 target::kCompressedWordSize));
495void StubCodeCompiler::
496 GenerateInstantiateTypeArgumentsMayShareInstantiatorTAStub() {
501 compiler::Label cache_lookup;
502 __ LoadCompressedSmi(
505 target::TypeArguments::nullability_offset()));
506 __ LoadCompressedSmi(
509 target::TypeArguments::nullability_offset()));
510 __ AndRegisters(kScratch2Reg, kScratch1Reg);
511 __ CompareRegisters(kScratch2Reg, kScratch1Reg);
517 __ Bind(&cache_lookup);
518 GenerateInstantiateTypeArgumentsStub();
521void StubCodeCompiler::
522 GenerateInstantiateTypeArgumentsMayShareFunctionTAStub() {
527 compiler::Label cache_lookup;
528 __ LoadCompressedSmi(
531 target::TypeArguments::nullability_offset()));
532 __ LoadCompressedSmi(
535 target::TypeArguments::nullability_offset()));
536 __ AndRegisters(kScratch2Reg, kScratch1Reg);
537 __ CompareRegisters(kScratch2Reg, kScratch1Reg);
543 __ Bind(&cache_lookup);
544 GenerateInstantiateTypeArgumentsStub();
549 __ PushObject(Object::null_object());
553 __ CallRuntime(kInstantiateTypeRuntimeEntry, 3);
562 bool is_function_parameter) {
563 Label runtime_call, return_dynamic, type_parameter_value_is_not_type;
565 if (is_function_parameter) {
567 TypeArguments::null_object());
568 __ BranchIf(
EQUAL, &return_dynamic);
569 __ LoadFieldFromOffset(
574 target::TypeArguments::types_offset(),
578 TypeArguments::null_object());
579 __ BranchIf(
EQUAL, &return_dynamic);
580 __ LoadFieldFromOffset(
585 target::TypeArguments::types_offset(),
592 switch (nullability) {
597 __ CompareAbstractTypeNullabilityWith(
605 __ CompareAbstractTypeNullabilityWith(
609 __ BranchIf(
EQUAL, &runtime_call);
614 __ Bind(&return_dynamic);
618 __ Bind(&runtime_call);
622void StubCodeCompiler::
623 GenerateInstantiateTypeNonNullableClassTypeParameterStub() {
628void StubCodeCompiler::GenerateInstantiateTypeNullableClassTypeParameterStub() {
633void StubCodeCompiler::GenerateInstantiateTypeLegacyClassTypeParameterStub() {
638void StubCodeCompiler::
639 GenerateInstantiateTypeNonNullableFunctionTypeParameterStub() {
644void StubCodeCompiler::
645 GenerateInstantiateTypeNullableFunctionTypeParameterStub() {
650void StubCodeCompiler::
651 GenerateInstantiateTypeLegacyFunctionTypeParameterStub() {
656void StubCodeCompiler::GenerateInstantiateTypeStub() {
660void StubCodeCompiler::GenerateInstanceOfStub() {
667 __ CallRuntime(kInstanceofRuntimeEntry, 5);
681 __ LoadClassIdMayBeSmi(scratch_reg, type_reg);
682 __ CompareImmediate(scratch_reg, kTypeParameterCid);
683 __ BranchIf(
EQUAL, &is_type_param_or_type_or_function_type,
685 __ CompareImmediate(scratch_reg, kTypeCid);
686 __ BranchIf(
EQUAL, &is_type_param_or_type_or_function_type,
688 __ CompareImmediate(scratch_reg, kFunctionTypeCid);
689 __ BranchIf(
EQUAL, &is_type_param_or_type_or_function_type,
691 __ Stop(
"not a type or function type or type parameter");
692 __ Bind(&is_type_param_or_type_or_function_type);
710void StubCodeCompiler::GenerateTypeIsTopTypeForSubtypingStub() {
719 const Register output_reg = scratch1_reg;
720#if defined(TARGET_ARCH_IA32)
726 __ PushRegister(scratch2_reg);
730 static_assert(scratch1_reg != scratch2_reg,
731 "both scratch registers are the same");
738 __ Bind(&check_top_type);
743 __ CompareClassId(scratch1_reg, kTypeCid, scratch2_reg);
748 __ LoadTypeClassId(scratch2_reg, scratch1_reg);
754 __ CompareImmediate(scratch2_reg, kFutureOrCid);
756 __ CompareImmediate(scratch2_reg, kInstanceCid);
759 __ CompareAbstractTypeNullabilityWith(
763 __ Bind(&is_top_type);
764 __ LoadImmediate(output_reg, 0);
766#if defined(TARGET_ARCH_IA32)
768 __ PopRegister(scratch2_reg);
772 __ Bind(&unwrap_future_or);
773 __ LoadCompressedField(
776 compiler::target::Type::arguments_offset()));
777 __ CompareObject(scratch2_reg, Object::null_object());
780 __ LoadCompressedField(
783 scratch2_reg, compiler::target::TypeArguments::type_at_offset(0)));
804void StubCodeCompiler::GenerateNullIsAssignableToTypeStub() {
814 const Register kOutputReg = kCurrentTypeReg;
815#if defined(TARGET_ARCH_IA32)
821 __ PushRegister(kScratchReg);
825 static_assert(kCurrentTypeReg != kScratchReg,
826 "code assumes distinct scratch registers");
828 compiler::Label is_assignable,
done;
835 compiler::Label check_null_assignable;
838 __ Bind(&check_null_assignable);
842 compiler::Label is_not_type;
843 __ CompareClassId(kCurrentTypeReg, kTypeCid, kScratchReg);
845 __ CompareAbstractTypeNullabilityWith(
852 __ LoadTypeClassId(kScratchReg, kCurrentTypeReg);
853 __ CompareImmediate(kScratchReg, kFutureOrCid);
855 __ LoadCompressedField(
857 compiler::FieldAddress(kCurrentTypeReg,
858 compiler::target::Type::arguments_offset()));
859 __ CompareObject(kScratchReg, Object::null_object());
862 __ BranchIf(
EQUAL, &is_assignable);
863 __ LoadCompressedField(
865 compiler::FieldAddress(
866 kScratchReg, compiler::target::TypeArguments::type_at_offset(0)));
868 __ Bind(&is_not_type);
871 __ CompareAbstractTypeNullabilityWith(
877 auto handle_case = [&](
Register tav) {
880 auto const kIndexReg = kCurrentTypeReg;
885 __ LoadFieldFromOffset(kIndexReg, kCurrentTypeReg,
886 target::TypeParameter::index_offset(),
888 __ LoadIndexedCompressed(kCurrentTypeReg, tav,
889 target::TypeArguments::types_offset(), kIndexReg);
890 __ Jump(&check_null_assignable);
893 Label function_type_param;
895 Slot::AbstractType_flags());
896 __ BranchIfBit(kScratchReg,
897 target::UntaggedTypeParameter::kIsFunctionTypeParameterBit,
900 __ Bind(&function_type_param);
901#if defined(TARGET_ARCH_IA32)
908 __ Bind(&is_assignable);
909 __ LoadImmediate(kOutputReg, 0);
911#if defined(TARGET_ARCH_IA32)
913 __ PopRegister(kScratchReg);
918#if !defined(TARGET_ARCH_IA32)
938void StubCodeCompiler::GenerateDefaultTypeTestStub() {
940 target::Thread::slow_type_test_stub_offset());
941 __ Jump(FieldAddress(
CODE_REG, target::Code::entry_point_offset()));
945void StubCodeCompiler::GenerateDefaultNullableTypeTestStub() {
953 target::Thread::slow_type_test_stub_offset());
954 __ Jump(FieldAddress(
CODE_REG, target::Code::entry_point_offset()));
960void StubCodeCompiler::GenerateTopTypeTypeTestStub() {
964void StubCodeCompiler::GenerateUnreachableTypeTestStub() {
977 auto handle_case = [&](
Register tav) {
985 target::TypeParameter::index_offset(),
988 target::TypeArguments::types_offset(),
992 target::AbstractType::type_test_stub_entry_point_offset()));
995 Label function_type_param;
997 Slot::AbstractType_flags());
999 target::UntaggedTypeParameter::kIsFunctionTypeParameterBit,
1002 __ Bind(&function_type_param);
1008void StubCodeCompiler::GenerateNullableTypeParameterTypeTestStub() {
1012void StubCodeCompiler::GenerateTypeParameterTypeTestStub() {
1025 __ CallRuntime(kTypeCheckRuntimeEntry, 7);
1036void StubCodeCompiler::GenerateLazySpecializeTypeTestStub() {
1038 target::Thread::lazy_specialize_type_test_stub_offset());
1039 __ EnterStubFrame();
1041 __ LeaveStubFrame();
1046void StubCodeCompiler::GenerateLazySpecializeNullableTypeTestStub() {
1053 target::Thread::lazy_specialize_type_test_stub_offset());
1054 __ EnterStubFrame();
1056 __ LeaveStubFrame();
1062void StubCodeCompiler::GenerateSlowTypeTestStub() {
1063 Label
done, call_runtime;
1065 if (!FLAG_precompiled_mode) {
1067 target::Thread::slow_type_test_stub_offset());
1069 __ EnterStubFrame();
1073 __ BranchIf(
EQUAL, &call_runtime);
1076 Label call_2, call_3, call_4, call_6;
1077 __ Comment(
"Check number of STC inputs");
1079 Slot::SubtypeTestCache_num_inputs());
1091 __ Comment(
"Call 7 input STC check");
1101 __ Comment(
"Call 6 input STC check");
1111 __ Comment(
"Call 4 input STC check");
1121 __ Comment(
"Call 3 input STC check");
1131 __ Comment(
"Call 2 input STC check");
1139 __ Bind(&call_runtime);
1140 __ Comment(
"Call runtime");
1146 __ LeaveStubFrame();
1151#define GENERATE_BREAKPOINT_STUB(Name) \
1152 void StubCodeCompiler::Generate##Name##Stub() { \
1158#undef GENERATE_BREAKPOINT_STUB
1170void StubCodeCompiler::GenerateAllocateClosureStub(
1171 bool has_instantiator_type_args,
1173 const intptr_t instance_size =
1174 target::RoundedAllocationSize(target::Closure::InstanceSize());
1177 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
1179 __ Comment(
"Inline allocation of uninitialized closure");
1186 __ TryAllocateObject(kClosureCid, instance_size, &slow_case, distance,
1190 __ Comment(
"Inline initialization of allocated closure");
1196 if (has_instantiator_type_args) {
1199 Slot::Closure_instantiator_type_arguments());
1203 Slot::Closure_instantiator_type_arguments());
1207 Slot::Closure_function_type_arguments());
1211 Slot::Closure_delayed_type_arguments());
1215 Slot::Closure_function());
1218 Slot::Closure_context());
1221 Slot::Closure_hash());
1226 Slot::Closure_delayed_type_arguments());
1228#if defined(DART_PRECOMPILER) && !defined(TARGET_ARCH_IA32)
1229 if (FLAG_precompiled_mode) {
1236 Slot::Function_entry_point());
1239 Slot::Closure_entry_point());
1246 __ Bind(&slow_case);
1249 __ Comment(
"Closure allocation via runtime");
1250 __ EnterStubFrame();
1252 __ PushRegistersInOrder(
1254 if (has_instantiator_type_args) {
1264 __ CallRuntime(kAllocateClosureRuntimeEntry, 4);
1265 if (has_instantiator_type_args) {
1276 __ LeaveStubFrame();
1282void StubCodeCompiler::GenerateAllocateClosureStub() {
1283 GenerateAllocateClosureStub(
false,
1287void StubCodeCompiler::GenerateAllocateClosureGenericStub() {
1288 GenerateAllocateClosureStub(
false,
1292void StubCodeCompiler::GenerateAllocateClosureTAStub() {
1293 GenerateAllocateClosureStub(
true,
1297void StubCodeCompiler::GenerateAllocateClosureTAGenericStub() {
1298 GenerateAllocateClosureStub(
true,
1305void StubCodeCompiler::GenerateAllocateGrowableArrayStub() {
1306#if defined(TARGET_ARCH_IA32)
1312 const intptr_t instance_size = target::RoundedAllocationSize(
1313 target::GrowableObjectArray::InstanceSize());
1315 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
1317 __ Comment(
"Inline allocation of GrowableList");
1318 __ TryAllocateObject(kGrowableObjectArrayCid, instance_size, &slow_case,
1321 __ StoreIntoObjectNoBarrier(
1324 target::GrowableObjectArray::type_arguments_offset()),
1328 __ Bind(&slow_case);
1332 kGrowableObjectArrayCid, instance_size);
1335 Address(
THR, target::Thread::allocate_object_slow_entry_point_offset()));
1339void StubCodeCompiler::GenerateAllocateRecordStub() {
1345 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
1353 temp_reg, shape_reg,
1354 compiler::target::RecordShape::kNumFieldsMask <<
kSmiTagShift);
1357 const intptr_t fixed_size_plus_alignment_padding =
1358 (target::Record::field_offset(0) +
1361 fixed_size_plus_alignment_padding);
1365 __ LoadFromOffset(result_reg,
THR, target::Thread::top_offset());
1366 __ MoveRegister(new_top_reg, temp_reg);
1367 __ AddRegisters(new_top_reg, result_reg);
1369 __ CompareWithMemoryValue(new_top_reg,
1370 Address(
THR, target::Thread::end_offset()));
1372 __ CheckAllocationCanary(result_reg);
1376 __ StoreToOffset(new_top_reg,
THR, target::Thread::top_offset());
1381 Label size_tag_overflow,
done;
1382 __ CompareImmediate(temp_reg, target::UntaggedObject::kSizeTagMaxSizeTag);
1384 __ LslImmediate(temp_reg,
1385 target::UntaggedObject::kTagBitsSizeTagPos -
1389 __ Bind(&size_tag_overflow);
1391 __ LoadImmediate(temp_reg, 0);
1395 __ OrImmediate(temp_reg, tags);
1396 __ StoreFieldToOffset(temp_reg, result_reg,
1397 target::Object::tags_offset());
1400 __ StoreCompressedIntoObjectNoBarrier(
1401 result_reg, FieldAddress(result_reg, target::Record::shape_offset()),
1406 const Register field_reg = shape_reg;
1407#if defined(TARGET_ARCH_ARM64) || defined(TARGET_ARCH_RISCV32) || \
1408 defined(TARGET_ARCH_RISCV64)
1411 const Register null_reg = temp_reg;
1416 __ AddImmediate(field_reg, result_reg, target::Record::field_offset(0));
1417 __ CompareRegisters(field_reg, new_top_reg);
1421 for (intptr_t
offset = 0;
offset < target::kObjectAlignment;
1422 offset += target::kCompressedWordSize) {
1423 __ StoreCompressedIntoObjectNoBarrier(
1424 result_reg, FieldAddress(field_reg,
offset), null_reg);
1428 __ AddImmediate(field_reg, target::kObjectAlignment);
1429 __ CompareRegisters(field_reg, new_top_reg);
1434 __ WriteAllocationCanary(new_top_reg);
1437 __ Bind(&slow_case);
1440 __ EnterStubFrame();
1442 __ PushRegister(shape_reg);
1443 __ CallRuntime(kAllocateRecordRuntimeEntry, 1);
1448 __ LeaveStubFrame();
1452void StubCodeCompiler::GenerateAllocateSmallRecordStub(intptr_t num_fields,
1453 bool has_named_fields) {
1454 ASSERT(num_fields == 2 || num_fields == 3);
1463 if ((num_fields > 2) && (value2_reg ==
kNoRegister)) {
1476 __ TryAllocateObject(kRecordCid, target::Record::InstanceSize(num_fields),
1477 &slow_case, distance, result_reg, temp_reg);
1479 if (!has_named_fields) {
1483 __ StoreCompressedIntoObjectNoBarrier(
1484 result_reg, FieldAddress(result_reg, target::Record::shape_offset()),
1487 __ StoreCompressedIntoObjectNoBarrier(
1488 result_reg, FieldAddress(result_reg, target::Record::field_offset(0)),
1491 __ StoreCompressedIntoObjectNoBarrier(
1492 result_reg, FieldAddress(result_reg, target::Record::field_offset(1)),
1495 if (num_fields > 2) {
1496 __ StoreCompressedIntoObjectNoBarrier(
1497 result_reg, FieldAddress(result_reg, target::Record::field_offset(2)),
1503 __ Bind(&slow_case);
1505 __ EnterStubFrame();
1507 if (has_named_fields) {
1508 __ PushRegister(shape_reg);
1513 __ PushRegistersInOrder({value0_reg, value1_reg});
1514 if (num_fields > 2) {
1515 __ PushRegister(value2_reg);
1519 __ CallRuntime(kAllocateSmallRecordRuntimeEntry, 4);
1521 __ PopRegister(result_reg);
1524 __ LeaveStubFrame();
1528void StubCodeCompiler::GenerateAllocateRecord2Stub() {
1529 GenerateAllocateSmallRecordStub(2,
false);
1532void StubCodeCompiler::GenerateAllocateRecord2NamedStub() {
1533 GenerateAllocateSmallRecordStub(2,
true);
1536void StubCodeCompiler::GenerateAllocateRecord3Stub() {
1537 GenerateAllocateSmallRecordStub(3,
false);
1540void StubCodeCompiler::GenerateAllocateRecord3NamedStub() {
1541 GenerateAllocateSmallRecordStub(3,
true);
1546void StubCodeCompiler::GenerateAllocateUnhandledExceptionStub() {
1548 auto class_table = thread->isolate_group()->class_table();
1549 ASSERT(class_table->HasValidClassAt(kUnhandledExceptionCid));
1551 class_table->At(kUnhandledExceptionCid));
1558#define TYPED_DATA_ALLOCATION_STUB(clazz) \
1559 void StubCodeCompiler::GenerateAllocate##clazz##Stub() { \
1560 GenerateAllocateTypedDataArrayStub(kTypedData##clazz##Cid); \
1563#undef TYPED_DATA_ALLOCATION_STUB
1565void StubCodeCompiler::GenerateLateInitializationError(
bool with_fpu_regs) {
1566 auto perform_runtime_call = [&]() {
1568 __ CallRuntime(kLateFieldNotInitializedErrorRuntimeEntry,
1571 GenerateSharedStubGeneric(
1575 late_initialization_error_shared_with_fpu_regs_stub_offset()
1577 late_initialization_error_shared_without_fpu_regs_stub_offset(),
1578 false, perform_runtime_call);
1581void StubCodeCompiler::
1582 GenerateLateInitializationErrorSharedWithoutFPURegsStub() {
1583 GenerateLateInitializationError(
false);
1586void StubCodeCompiler::GenerateLateInitializationErrorSharedWithFPURegsStub() {
1587 GenerateLateInitializationError(
true);
1590void StubCodeCompiler::GenerateNullErrorSharedWithoutFPURegsStub() {
1592 false, &kNullErrorRuntimeEntry,
1593 target::Thread::null_error_shared_without_fpu_regs_stub_offset(),
1597void StubCodeCompiler::GenerateNullErrorSharedWithFPURegsStub() {
1599 true, &kNullErrorRuntimeEntry,
1600 target::Thread::null_error_shared_with_fpu_regs_stub_offset(),
1604void StubCodeCompiler::GenerateNullArgErrorSharedWithoutFPURegsStub() {
1606 false, &kArgumentNullErrorRuntimeEntry,
1607 target::Thread::null_arg_error_shared_without_fpu_regs_stub_offset(),
1611void StubCodeCompiler::GenerateNullArgErrorSharedWithFPURegsStub() {
1613 true, &kArgumentNullErrorRuntimeEntry,
1614 target::Thread::null_arg_error_shared_with_fpu_regs_stub_offset(),
1618void StubCodeCompiler::GenerateNullCastErrorSharedWithoutFPURegsStub() {
1620 false, &kNullCastErrorRuntimeEntry,
1621 target::Thread::null_cast_error_shared_without_fpu_regs_stub_offset(),
1625void StubCodeCompiler::GenerateNullCastErrorSharedWithFPURegsStub() {
1627 true, &kNullCastErrorRuntimeEntry,
1628 target::Thread::null_cast_error_shared_with_fpu_regs_stub_offset(),
1632void StubCodeCompiler::GenerateStackOverflowSharedWithoutFPURegsStub() {
1634 false, &kInterruptOrStackOverflowRuntimeEntry,
1635 target::Thread::stack_overflow_shared_without_fpu_regs_stub_offset(),
1639void StubCodeCompiler::GenerateStackOverflowSharedWithFPURegsStub() {
1641 true, &kInterruptOrStackOverflowRuntimeEntry,
1642 target::Thread::stack_overflow_shared_with_fpu_regs_stub_offset(),
1646void StubCodeCompiler::GenerateRangeErrorSharedWithoutFPURegsStub() {
1647 GenerateRangeError(
false);
1650void StubCodeCompiler::GenerateRangeErrorSharedWithFPURegsStub() {
1651 GenerateRangeError(
true);
1654void StubCodeCompiler::GenerateWriteErrorSharedWithoutFPURegsStub() {
1655 GenerateWriteError(
false);
1658void StubCodeCompiler::GenerateWriteErrorSharedWithFPURegsStub() {
1659 GenerateWriteError(
true);
1662void StubCodeCompiler::GenerateFrameAwaitingMaterializationStub() {
1666void StubCodeCompiler::GenerateAsynchronousGapMarkerStub() {
1670void StubCodeCompiler::GenerateUnknownDartCodeStub() {
1672 __ EnterStubFrame();
1676void StubCodeCompiler::GenerateNotLoadedStub() {
1677 __ EnterStubFrame();
1678 __ CallRuntime(kNotLoadedRuntimeEntry, 0);
1682#define EMIT_BOX_ALLOCATION(Name) \
1683 void StubCodeCompiler::GenerateAllocate##Name##Stub() { \
1684 Label call_runtime; \
1685 if (!FLAG_use_slow_path && FLAG_inline_alloc) { \
1686 __ TryAllocate(compiler::Name##Class(), &call_runtime, \
1687 Assembler::kNearJump, AllocateBoxABI::kResultReg, \
1688 AllocateBoxABI::kTempReg); \
1691 __ Bind(&call_runtime); \
1692 __ EnterStubFrame(); \
1693 __ PushObject(NullObject()); \
1694 __ CallRuntime(kAllocate##Name##RuntimeEntry, 0); \
1695 __ PopRegister(AllocateBoxABI::kResultReg); \
1696 __ LeaveStubFrame(); \
1706#undef EMIT_BOX_ALLOCATION
1715 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
1718 (assembler->*store_value)(
1723 __ Bind(&call_runtime);
1724 __ EnterStubFrame();
1727 target::Thread::unboxed_runtime_arg_offset());
1728 __ CallRuntime(runtime_entry, 0);
1730 __ LeaveStubFrame();
1734void StubCodeCompiler::GenerateBoxDoubleStub() {
1736 kBoxDoubleRuntimeEntry,
1740void StubCodeCompiler::GenerateBoxFloat32x4Stub() {
1741#if !defined(TARGET_ARCH_RISCV32) && !defined(TARGET_ARCH_RISCV64)
1743 kBoxFloat32x4RuntimeEntry,
1746 __ Stop(
"Not supported on RISC-V.");
1750void StubCodeCompiler::GenerateBoxFloat64x2Stub() {
1751#if !defined(TARGET_ARCH_RISCV32) && !defined(TARGET_ARCH_RISCV64)
1753 kBoxFloat64x2RuntimeEntry,
1756 __ Stop(
"Not supported on RISC-V.");
1760void StubCodeCompiler::GenerateDoubleToIntegerStub() {
1761 __ EnterStubFrame();
1763 target::Thread::unboxed_runtime_arg_offset());
1766 __ CallRuntime(kDoubleToIntegerRuntimeEntry, 1);
1769 __ LeaveStubFrame();
1774 return compiler::target::frame_layout.FrameSlotForVariableIndex(
1776 compiler::target::kWordSize;
1781 intptr_t entry_point_offset_in_thread,
1782 intptr_t function_offset_in_object_store,
1783 bool uses_args_desc =
false) {
1784 if (FLAG_precompiled_mode) {
1789 target::IsolateGroup::object_store_offset());
1791 function_offset_in_object_store);
1793 target::Function::code_offset());
1794 if (!uses_args_desc) {
1817 if (FLAG_use_slow_path || !FLAG_inline_alloc) {
1824 __ MaybeTraceAllocation(kSuspendStateCid, slow_case, temp_reg));
1827 const intptr_t fixed_size_plus_alignment_padding =
1828 (target::SuspendState::HeaderSize() +
1829 target::SuspendState::FrameSizeGrowthGap() * target::kWordSize +
1831 __ AddImmediate(temp_reg, frame_size_reg, fixed_size_plus_alignment_padding);
1835 __ LoadFromOffset(result_reg,
THR, target::Thread::top_offset());
1836 __ AddRegisters(temp_reg, result_reg);
1838 __ CompareWithMemoryValue(temp_reg,
1841 __ CheckAllocationCanary(result_reg);
1845 __ StoreToOffset(temp_reg,
THR, target::Thread::top_offset());
1846 __ SubRegisters(temp_reg, result_reg);
1849 if (!FLAG_precompiled_mode) {
1851 __ AddImmediate(temp_reg, temp_reg,
1852 -target::SuspendState::payload_offset());
1853 __ StoreFieldToOffset(temp_reg, result_reg,
1854 target::SuspendState::frame_capacity_offset());
1856 __ AddImmediate(temp_reg, temp_reg, target::SuspendState::payload_offset());
1862 __ CompareImmediate(temp_reg, target::UntaggedObject::kSizeTagMaxSizeTag);
1864 __ LslImmediate(temp_reg,
1865 target::UntaggedObject::kTagBitsSizeTagPos -
1869 __ Bind(&size_tag_overflow);
1871 __ LoadImmediate(temp_reg, 0);
1875 __ OrImmediate(temp_reg, tags);
1876 __ StoreFieldToOffset(temp_reg, result_reg,
1877 target::Object::tags_offset());
1880 __ StoreFieldToOffset(frame_size_reg, result_reg,
1881 target::SuspendState::frame_size_offset());
1884void StubCodeCompiler::GenerateSuspendStub(
1885 bool call_suspend_function,
1886 bool pass_type_arguments,
1887 intptr_t suspend_entry_point_offset_in_thread,
1888 intptr_t suspend_function_offset_in_object_store) {
1897 Label alloc_slow_case, alloc_done, init_done, resize_suspend_state,
1898 remember_object, call_dart;
1900#if defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_ARM64)
1901 SPILLS_LR_TO_FRAME({});
1908 -target::frame_layout.last_param_from_entry_sp * target::kWordSize);
1909 __ SubRegisters(kFrameSize,
SPREG);
1911 __ EnterStubFrame();
1913 if (pass_type_arguments) {
1914 __ PushRegister(kTypeArgs);
1917 __ CompareClassId(kSuspendState, kSuspendStateCid, kTemp);
1919 if (FLAG_precompiled_mode) {
1920 __ BranchIf(
EQUAL, &init_done);
1922 Label alloc_suspend_state;
1925 __ CompareWithMemoryValue(
1927 FieldAddress(kSuspendState,
1928 target::SuspendState::frame_capacity_offset()));
1931 __ StoreFieldToOffset(kFrameSize, kSuspendState,
1932 target::SuspendState::frame_size_offset());
1933 __ Jump(&init_done);
1935 __ Bind(&alloc_suspend_state);
1938 __ Comment(
"Allocate SuspendState");
1939 __ MoveRegister(kFunctionData, kSuspendState);
1944 __ StoreCompressedIntoObjectNoBarrier(
1946 FieldAddress(kSuspendState, target::SuspendState::function_data_offset()),
1950#if defined(TARGET_ARCH_ARM64) || defined(TARGET_ARCH_RISCV32) || \
1951 defined(TARGET_ARCH_RISCV64)
1957 __ StoreCompressedIntoObjectNoBarrier(
1959 FieldAddress(kSuspendState,
1960 target::SuspendState::then_callback_offset()),
1962 __ StoreCompressedIntoObjectNoBarrier(
1964 FieldAddress(kSuspendState,
1965 target::SuspendState::error_callback_offset()),
1969 __ Bind(&alloc_done);
1971 __ Comment(
"Save SuspendState to frame");
1975 __ Bind(&init_done);
1976 __ Comment(
"Copy frame to SuspendState");
1982 __ LoadFieldFromOffset(kTemp, kSuspendState,
1983 target::SuspendState::frame_size_offset());
1984 __ CompareRegisters(kTemp, kFrameSize);
1991 if (kSrcFrame ==
THR) {
1992 __ PushRegister(
THR);
1995 __ AddImmediate(kDstFrame, kSuspendState,
1997 __ CopyMemoryWords(kSrcFrame, kDstFrame, kFrameSize, kTemp);
1998 if (kSrcFrame ==
THR) {
1999 __ PopRegister(
THR);
2003 __ StoreFieldToOffset(kTemp, kSuspendState,
2004 target::SuspendState::pc_offset());
2011 __ LoadFieldFromOffset(kTemp, kSuspendState,
2012 target::SuspendState::frame_size_offset());
2013 __ AddRegisters(kTemp, kSuspendState);
2014 __ LoadFieldFromOffset(
2017 __ CompareRegisters(kTemp, kSuspendState);
2024 if (call_suspend_function) {
2027 __ PushRegistersInOrder({kSuspendState, kArgument});
2032 __ LoadFromOffset(kTemp, kTemp, target::Page::original_top_offset());
2033 __ CompareRegisters(kSuspendState, kTemp);
2039 __ Bind(&call_dart);
2040 if (call_suspend_function) {
2041 __ Comment(
"Call suspend Dart function");
2042 if (pass_type_arguments) {
2048 suspend_function_offset_in_object_store,
2049 pass_type_arguments);
2058 __ LeaveStubFrame();
2060#if !defined(TARGET_ARCH_X64) && !defined(TARGET_ARCH_IA32)
2063 __ LeaveDartFrame();
2064#elif defined(TARGET_ARCH_X64)
2067 if (!FLAG_precompiled_mode) {
2070 target::frame_layout.saved_caller_pp_from_fp * target::kWordSize);
2075#if defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_ARM64)
2077 SPILLS_LR_TO_FRAME({});
2078 SPILLS_LR_TO_FRAME({});
2080 __ Bind(&alloc_slow_case);
2081 __ Comment(
"SuspendState Allocation slow case");
2083 __ PushRegistersInOrder({kArgument, kFrameSize});
2085 __ SmiTag(kFrameSize);
2087 __ PushRegistersInOrder({kFrameSize, kFunctionData});
2088 __ CallRuntime(kAllocateSuspendStateRuntimeEntry, 2);
2090 __ PopRegister(kSuspendState);
2091 __ PopRegister(kFrameSize);
2092 __ PopRegister(kArgument);
2093 __ Jump(&alloc_done);
2095 __ Bind(&resize_suspend_state);
2096 __ Comment(
"Resize SuspendState");
2098 __ PushRegistersInOrder({kArgument, kFrameSize});
2100 __ SmiTag(kFrameSize);
2102 __ PushRegistersInOrder({kFrameSize, kSuspendState});
2106 __ CallRuntime(kAllocateSuspendStateRuntimeEntry, 2);
2108 __ PopRegister(kSuspendState);
2109 __ PopRegister(kFrameSize);
2110 __ PopRegister(kArgument);
2111 __ Jump(&alloc_done);
2113 __ Bind(&remember_object);
2114 __ Comment(
"Old gen SuspendState slow case");
2115 if (!call_suspend_function) {
2118 __ PushRegister(kArgument);
2121#if defined(TARGET_ARCH_IA32)
2122 LeafRuntimeScope rt(
assembler, 2 * target::kWordSize,
2124 __ movl(Address(
ESP, 1 * target::kWordSize),
THR);
2125 __ movl(Address(
ESP, 0 * target::kWordSize), kSuspendState);
2132 rt.Call(kEnsureRememberedAndMarkingDeferredRuntimeEntry, 2);
2134 if (!call_suspend_function) {
2135 __ PopRegister(kArgument);
2137 __ Jump(&call_dart);
2140void StubCodeCompiler::GenerateAwaitStub() {
2141 GenerateSuspendStub(
2144 target::Thread::suspend_state_await_entry_point_offset(),
2145 target::ObjectStore::suspend_state_await_offset());
2148void StubCodeCompiler::GenerateAwaitWithTypeCheckStub() {
2149 GenerateSuspendStub(
2153 target::Thread::suspend_state_await_with_type_check_entry_point_offset(),
2154 target::ObjectStore::suspend_state_await_with_type_check_offset());
2157void StubCodeCompiler::GenerateYieldAsyncStarStub() {
2158 GenerateSuspendStub(
2162 target::Thread::suspend_state_yield_async_star_entry_point_offset(),
2163 target::ObjectStore::suspend_state_yield_async_star_offset());
2166void StubCodeCompiler::GenerateSuspendSyncStarAtStartStub() {
2167 GenerateSuspendStub(
2172 suspend_state_suspend_sync_star_at_start_entry_point_offset(),
2173 target::ObjectStore::suspend_state_suspend_sync_star_at_start_offset());
2176void StubCodeCompiler::GenerateSuspendSyncStarAtYieldStub() {
2177 GenerateSuspendStub(
2182void StubCodeCompiler::GenerateInitSuspendableFunctionStub(
2183 intptr_t init_entry_point_offset_in_thread,
2184 intptr_t init_function_offset_in_object_store) {
2187 __ EnterStubFrame();
2190 __ PushRegister(kTypeArgs);
2192 init_function_offset_in_object_store,
2194 __ LeaveStubFrame();
2202void StubCodeCompiler::GenerateInitAsyncStub() {
2203 GenerateInitSuspendableFunctionStub(
2204 target::Thread::suspend_state_init_async_entry_point_offset(),
2205 target::ObjectStore::suspend_state_init_async_offset());
2208void StubCodeCompiler::GenerateInitAsyncStarStub() {
2209 GenerateInitSuspendableFunctionStub(
2210 target::Thread::suspend_state_init_async_star_entry_point_offset(),
2211 target::ObjectStore::suspend_state_init_async_star_offset());
2214void StubCodeCompiler::GenerateInitSyncStarStub() {
2215 GenerateInitSuspendableFunctionStub(
2216 target::Thread::suspend_state_init_sync_star_entry_point_offset(),
2217 target::ObjectStore::suspend_state_init_sync_star_offset());
2220void StubCodeCompiler::GenerateResumeStub() {
2234 __ EnterDartFrame(0);
2236 const intptr_t param_offset =
2237 target::frame_layout.param_end_from_fp * target::kWordSize;
2238 __ LoadFromOffset(kSuspendState,
FPREG, param_offset + 4 * target::kWordSize);
2242 __ CompareClassId(kSuspendState, kSuspendStateCid, kTemp);
2249 __ LoadFieldFromOffset(kTemp, kSuspendState,
2250 target::SuspendState::pc_offset());
2251 __ CompareImmediate(kTemp, 0);
2258 __ LoadFieldFromOffset(kFrameSize, kSuspendState,
2259 target::SuspendState::frame_size_offset());
2263 __ MoveRegister(kTemp, kFrameSize);
2264 __ AddRegisters(kTemp, kSuspendState);
2265 __ LoadFieldFromOffset(
2268 __ CompareRegisters(kTemp, kSuspendState);
2274 if (!FLAG_precompiled_mode) {
2277 __ MoveRegister(kTemp, kSuspendState);
2278 __ AddRegisters(kTemp, kFrameSize);
2282 target::frame_layout.code_from_fp * target::kWordSize);
2284 target::frame_layout.code_from_fp * target::kWordSize);
2285#if !defined(TARGET_ARCH_IA32)
2286 __ LoadPoolPointer(
PP);
2290 __ AddImmediate(kFrameSize, (target::frame_layout.first_local_from_fp + 1) *
2292 __ SubRegisters(
SPREG, kFrameSize);
2294 __ Comment(
"Copy frame from SuspendState");
2295 intptr_t num_saved_regs = 0;
2296 if (kSrcFrame ==
THR) {
2297 __ PushRegister(
THR);
2304 __ AddImmediate(kSrcFrame, kSuspendState,
2306 __ AddImmediate(kDstFrame,
SPREG, num_saved_regs * target::kWordSize);
2307 __ CopyMemoryWords(kSrcFrame, kDstFrame, kFrameSize, kTemp);
2311 if (kSrcFrame ==
THR) {
2312 __ PopRegister(
THR);
2315 __ Comment(
"Transfer control");
2317 __ LoadFieldFromOffset(kResumePc, kSuspendState,
2318 target::SuspendState::pc_offset());
2319 __ StoreZero(FieldAddress(kSuspendState, target::SuspendState::pc_offset()),
2322#if defined(TARGET_ARCH_X64) || defined(TARGET_ARCH_IA32)
2329 static_assert((kException !=
CODE_REG) && (kException !=
PP),
2330 "should not interfere");
2331 __ LoadFromOffset(kException,
FPREG, param_offset + 2 * target::kWordSize);
2335 if (!FLAG_precompiled_mode) {
2338 target::Code::instructions_offset());
2339 __ CompareWithMemoryValue(
2341 FieldAddress(
CODE_REG, target::Code::active_instructions_offset()));
2344#if !defined(PRODUCT)
2346 __ LoadIsolate(kTemp);
2347 __ LoadFromOffset(kTemp, kTemp,
2348 target::Isolate::has_resumption_breakpoints_offset(),
2350 __ CompareImmediate(kTemp, 0);
2356 param_offset + 3 * target::kWordSize);
2360 __ Comment(
"Call runtime to throw exception or deopt");
2361 __ Bind(&call_runtime);
2363 __ LoadFromOffset(kStackTrace,
FPREG, param_offset + 1 * target::kWordSize);
2364 static_assert((kStackTrace !=
CODE_REG) && (kStackTrace !=
PP),
2365 "should not interfere");
2369 __ SetReturnAddress(kResumePc);
2371 if (!FLAG_precompiled_mode) {
2372 __ LoadFromOffset(
CODE_REG,
THR, target::Thread::resume_stub_offset());
2374#if !defined(TARGET_ARCH_IA32)
2375 __ set_constant_pool_allowed(
false);
2377 __ EnterStubFrame();
2379 __ PushRegistersInOrder({kException, kStackTrace});
2380 __ CallRuntime(kResumeFrameRuntimeEntry, 2);
2382 if (FLAG_precompiled_mode) {
2385 __ LeaveStubFrame();
2387 param_offset + 3 * target::kWordSize);
2393void StubCodeCompiler::GenerateReturnStub(
2394 intptr_t return_entry_point_offset_in_thread,
2395 intptr_t return_function_offset_in_object_store,
2396 intptr_t return_stub_offset_in_thread) {
2399#if defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_ARM64)
2400 SPILLS_LR_TO_FRAME({});
2413 __ LeaveDartFrame();
2414 if (!FLAG_precompiled_mode) {
2415 __ LoadFromOffset(
CODE_REG,
THR, return_stub_offset_in_thread);
2417 __ EnterStubFrame();
2420 return_function_offset_in_object_store);
2421 __ LeaveStubFrame();
2425void StubCodeCompiler::GenerateReturnAsyncStub() {
2427 target::Thread::suspend_state_return_async_entry_point_offset(),
2428 target::ObjectStore::suspend_state_return_async_offset(),
2429 target::Thread::return_async_stub_offset());
2432void StubCodeCompiler::GenerateReturnAsyncNotFutureStub() {
2435 suspend_state_return_async_not_future_entry_point_offset(),
2436 target::ObjectStore::suspend_state_return_async_not_future_offset(),
2437 target::Thread::return_async_not_future_stub_offset());
2440void StubCodeCompiler::GenerateReturnAsyncStarStub() {
2442 target::Thread::suspend_state_return_async_star_entry_point_offset(),
2443 target::ObjectStore::suspend_state_return_async_star_offset(),
2444 target::Thread::return_async_star_stub_offset());
2447void StubCodeCompiler::GenerateAsyncExceptionHandlerStub() {
2451 Label rethrow_exception;
2453#if defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_ARM64)
2454 SPILLS_LR_TO_FRAME({});
2463 __ BranchIf(
EQUAL, &rethrow_exception);
2465 __ LeaveDartFrame();
2466 if (!FLAG_precompiled_mode) {
2468 target::Thread::async_exception_handler_stub_offset());
2470 __ EnterStubFrame();
2471 __ PushRegistersInOrder(
2475 target::Thread::suspend_state_handle_exception_entry_point_offset(),
2476 target::ObjectStore::suspend_state_handle_exception_offset());
2477 __ LeaveStubFrame();
2480#if defined(TARGET_ARCH_ARM) || defined(TARGET_ARCH_ARM64)
2482 SPILLS_LR_TO_FRAME({});
2484 __ Comment(
"Rethrow exception");
2485 __ Bind(&rethrow_exception);
2486 __ LeaveDartFrame();
2487 if (!FLAG_precompiled_mode) {
2489 target::Thread::async_exception_handler_stub_offset());
2491 __ EnterStubFrame();
2495 __ CallRuntime(kReThrowRuntimeEntry, 3);
2499void StubCodeCompiler::GenerateCloneSuspendStateStub() {
2506 Label alloc_slow_case;
2512 __ LoadFieldFromOffset(kTemp, kSource, target::SuspendState::pc_offset());
2513 __ CompareImmediate(kTemp, 0);
2520 __ LoadFieldFromOffset(kFrameSize, kSource,
2521 target::SuspendState::frame_size_offset());
2527 __ LoadFieldFromOffset(kTemp, kSource, target::SuspendState::pc_offset());
2528 __ StoreFieldToOffset(kTemp, kDestination, target::SuspendState::pc_offset());
2531 __ LoadCompressedFieldFromOffset(
2532 kTemp, kSource, target::SuspendState::function_data_offset());
2533 __ StoreCompressedIntoObjectNoBarrier(
2535 FieldAddress(kDestination, target::SuspendState::function_data_offset()),
2539 __ LoadCompressedFieldFromOffset(
2540 kTemp, kSource, target::SuspendState::then_callback_offset());
2541 __ StoreCompressedIntoObjectNoBarrier(
2543 FieldAddress(kDestination, target::SuspendState::then_callback_offset()),
2547 __ LoadCompressedFieldFromOffset(
2548 kTemp, kSource, target::SuspendState::error_callback_offset());
2549 __ StoreCompressedIntoObjectNoBarrier(
2551 FieldAddress(kDestination, target::SuspendState::error_callback_offset()),
2555 if (kSrcFrame ==
THR) {
2556 __ PushRegister(
THR);
2559 __ AddImmediate(kSrcFrame, kSource,
offset);
2560 __ AddImmediate(kDstFrame, kDestination,
offset);
2561 __ CopyMemoryWords(kSrcFrame, kDstFrame, kFrameSize, kTemp);
2562 if (kSrcFrame ==
THR) {
2563 __ PopRegister(
THR);
2568 __ LoadFieldFromOffset(kTemp, kDestination,
2569 target::SuspendState::frame_size_offset());
2570 __ AddRegisters(kTemp, kDestination);
2571 __ StoreFieldToOffset(
2572 kDestination, kTemp,
2579 __ Bind(&alloc_slow_case);
2580 __ Comment(
"CloneSuspendState slow case");
2581 __ EnterStubFrame();
2583 __ PushRegister(kSource);
2584 __ CallRuntime(kCloneSuspendStateRuntimeEntry, 1);
2587 __ LeaveStubFrame();
2591void StubCodeCompiler::GenerateFfiAsyncCallbackSendStub() {
2592 __ EnterStubFrame();
2595 __ CallRuntime(kFfiAsyncCallbackSendRuntimeEntry, 1);
2598 __ LeaveStubFrame();
2603 ASSERT(pc_descriptors_list_ !=
nullptr);
2606 UntaggedPcDescriptors::kBSSRelocation, pc_offset,
2608 TokenPosition::kNoSource,
2613#if !defined(TARGET_ARCH_IA32)
2624 Label* next_iteration) {
2632 __ LoadAcquireFromOffset(
2634 target::kCompressedWordSize *
2635 target::SubtypeTestCache::kInstanceCidOrSignature,
2646 __ CompareWithMemoryValue(
2647 instance_type_args_reg,
2649 target::kCompressedWordSize *
2650 target::SubtypeTestCache::kInstanceTypeArguments),
2658 __ CompareWithMemoryValue(
2661 target::kCompressedWordSize *
2662 target::SubtypeTestCache::kInstantiatorTypeArguments),
2670 __ CompareWithMemoryValue(
2673 target::kCompressedWordSize *
2674 target::SubtypeTestCache::kFunctionTypeArguments),
2682 __ CompareWithMemoryValue(
2683 parent_fun_type_args_reg,
2686 target::kCompressedWordSize *
2687 target::SubtypeTestCache::kInstanceParentFunctionTypeArguments),
2695 __ CompareWithMemoryValue(
2696 delayed_type_args_reg,
2699 target::kCompressedWordSize *
2700 target::SubtypeTestCache::kInstanceDelayedFunctionTypeArguments),
2708 __ CompareWithMemoryValue(
2710 Address(cache_entry_reg, target::kCompressedWordSize *
2711 target::SubtypeTestCache::kDestinationType),
2741 : assembler(assembler), reg_(reg), depth_(depth), alt_(alt) {
2746 __ LoadFromStack(alt_, depth_);
2755 __ StoreToStack(alt_, depth_);
2765 const intptr_t depth_;
2791 const intptr_t kTestEntryLengthLog2 =
2798 __ Comment(
"Hash cache traversal");
2799 __ Comment(
"Calculating number of entries");
2809 __ Comment(
"Calculating starting entry address");
2810 __ AddImmediate(cache_entry_reg,
2813 __ MoveRegister(cache_contents_size_reg, cache_entry_reg);
2815 __ PushRegister(cache_entry_reg);
2818 __ Comment(
"Calculating end of entries address");
2820 kTestEntryLengthLog2 + target::kCompressedWordSizeLog2);
2843 intptr_t kHashStackElements = 0;
2845 kProbeDistanceDepth = 0;
2846 kHashStackElements++;
2850 kProbeDistanceDepth++;
2851 kHashStackElements++;
2852 kCacheContentsSizeDepth = 0;
2856 kProbeDistanceDepth++;
2857 kCacheContentsSizeDepth++;
2858 kHashStackElements++;
2859 kCacheArrayEndDepth = 0;
2864 Label found, not_found;
2872 __ Comment(
"Loading %s type hash",
name);
2873 __ LoadFromSlot(dst, src, Slot::AbstractType_hash());
2875 __ CompareImmediate(dst, 0);
2876 __ BranchIf(
EQUAL, ¬_found);
2883 __ Comment(
"Loading %s type arguments hash",
name);
2887 __ CompareRegisters(src, null_reg);
2889 __ LoadFromSlot(dst, src, Slot::TypeArguments_hash());
2891 __ CompareImmediate(dst, 0);
2892 __ BranchIf(
EQUAL, ¬_found);
2896 __ Comment(
"Hash the entry inputs");
2900 __ MoveRegister(cache_entry_reg, instance_cid_or_sig_reg);
2901 __ SmiUntag(cache_entry_reg);
2903 get_abstract_type_hash(cache_entry_reg, instance_cid_or_sig_reg,
2904 "closure signature");
2941 __ Comment(
"Converting hash to probe entry index");
2946 __ AddImmediate(probe_distance_reg, -1);
2947 __ AndRegisters(cache_entry_reg, probe_distance_reg);
2949 __ Comment(
"Set initial probe distance");
2950 __ LoadImmediate(probe_distance_reg,
2951 target::kCompressedWordSize *
2952 target::SubtypeTestCache::kTestEntryLength);
2956 __ Comment(
"Converting probe entry index to probe entry address");
2960 __ LslImmediate(cache_entry_reg,
2961 kTestEntryLengthLog2 + target::kCompressedWordSizeLog2);
2962 __ AddRegisters(cache_entry_reg, cache_contents_size_reg);
2964 __ Comment(
"Set negated cache contents size");
2966 __ SubRegisters(cache_contents_size_reg, cache_entry_end_reg);
2968 __ LoadFromStack(
TMP, kCacheArrayEndDepth);
2969 __ SubRegisters(cache_contents_size_reg,
TMP);
2973 Label loop, next_iteration;
2976 assembler, n, null_reg, cache_entry_reg, instance_cid_or_sig_reg,
2977 instance_type_args_reg, parent_fun_type_args_reg, delayed_type_args_reg,
2978 &found, ¬_found, &next_iteration);
2979 __ Bind(&next_iteration);
2980 __ Comment(
"Move to next entry");
2984 __ AddRegisters(cache_entry_reg, probe_distance_reg);
2985 __ Comment(
"Adjust probe distance");
2986 __ AddImmediate(probe_distance_reg,
2987 target::kCompressedWordSize *
2988 target::SubtypeTestCache::kTestEntryLength);
2990 __ Comment(
"Check for leaving array");
2993 __ CompareRegisters(cache_entry_reg, cache_entry_end_reg);
2995 __ CompareToStack(cache_entry_reg, kCacheArrayEndDepth);
2998 __ Comment(
"Wrap around to start of entries");
3001 __ AddRegisters(cache_entry_reg, cache_contents_size_reg);
3009 __ Comment(
"Hash found");
3010 __ Drop(kHashStackElements);
3011 gen_found(assembler, n);
3012 __ Bind(¬_found);
3013 __ Comment(
"Hash not found");
3014 __ Drop(kHashStackElements);
3015 gen_not_found(assembler, n);
3034 __ Comment(
"Linear cache traversal");
3035 __ AddImmediate(cache_entry_reg,
3038 Label found, not_found, loop, next_iteration;
3041 assembler, n, null_reg, cache_entry_reg, instance_cid_or_sig_reg,
3042 instance_type_args_reg, parent_fun_type_args_reg, delayed_type_args_reg,
3043 &found, ¬_found, &next_iteration);
3044 __ Bind(&next_iteration);
3045 __ Comment(
"Next iteration");
3048 target::kCompressedWordSize * target::SubtypeTestCache::kTestEntryLength);
3052 __ Comment(
"Linear found");
3053 gen_found(assembler, n);
3054 __ Bind(¬_found);
3055 __ Comment(
"Linear not found");
3056 gen_not_found(assembler, n);
3059void StubCodeCompiler::GenerateSubtypeTestCacheSearch(
3060 Assembler* assembler,
3130 Slot::SubtypeTestCache_num_inputs());
3134 __ Bind(&search_stc);
3137 __ LoadAcquireCompressedFromOffset(
3142 Label initialized, not_closure;
3151 __ CompareImmediate(instance_cid_or_sig_reg, kClosureCid);
3156 __ Comment(
"Closure");
3157 __ LoadCompressed(instance_cid_or_sig_reg,
3159 target::Closure::function_offset()));
3160 __ LoadCompressed(instance_cid_or_sig_reg,
3161 FieldAddress(instance_cid_or_sig_reg,
3162 target::Function::signature_offset()));
3165 instance_type_args_reg,
3167 target::Closure::instantiator_type_arguments_offset()));
3171 parent_fun_type_args_reg,
3173 target::Closure::function_type_arguments_offset()));
3177 delayed_type_args_reg,
3179 target::Closure::delayed_type_arguments_offset()));
3187 __ Comment(
"Non-Closure");
3188 __ Bind(¬_closure);
3190 Label has_no_type_arguments;
3192 __ MoveRegister(instance_type_args_reg, null_reg);
3193 __ LoadFieldFromOffset(
3195 target::Class::host_type_arguments_field_offset_in_words_offset(),
3198 target::Class::kNoTypeArguments,
kFourBytes);
3200 __ LoadIndexedCompressed(instance_type_args_reg,
3203 __ Bind(&has_no_type_arguments);
3204 __ Comment(
"No type arguments");
3206 __ SmiTag(instance_cid_or_sig_reg);
3208 __ MoveRegister(parent_fun_type_args_reg, null_reg);
3211 __ MoveRegister(delayed_type_args_reg, null_reg);
3215 __ Bind(&initialized);
3224 Slot::Array_length());
3230 assembler, n, null_reg, cache_entry_reg, instance_cid_or_sig_reg,
3231 instance_type_args_reg, parent_fun_type_args_reg, delayed_type_args_reg,
3232 gen_found, gen_not_found);
3236 assembler, n, null_reg, cache_entry_reg, instance_cid_or_sig_reg,
3237 instance_type_args_reg, parent_fun_type_args_reg, delayed_type_args_reg,
3238 cache_entry_end_reg, cache_contents_size_reg, probe_distance_reg,
3239 gen_found, gen_not_found);
3244void StubCodeCompiler::GenerateSubtype1TestCacheStub() {
3245 GenerateSubtypeNTestCacheStub(
assembler, 1);
3249void StubCodeCompiler::GenerateSubtype2TestCacheStub() {
3250 GenerateSubtypeNTestCacheStub(
assembler, 2);
3254void StubCodeCompiler::GenerateSubtype3TestCacheStub() {
3255 GenerateSubtypeNTestCacheStub(
assembler, 3);
3259void StubCodeCompiler::GenerateSubtype4TestCacheStub() {
3260 GenerateSubtypeNTestCacheStub(
assembler, 4);
3264void StubCodeCompiler::GenerateSubtype6TestCacheStub() {
3265 GenerateSubtypeNTestCacheStub(
assembler, 6);
3269void StubCodeCompiler::GenerateSubtype7TestCacheStub() {
3270 GenerateSubtypeNTestCacheStub(
assembler, 7);
static void done(const char *config, const char *src, const char *srcOptions, const char *name)
static float next(float f)
#define CLASS_LIST_TYPED_DATA(V)
static constexpr int shift()
static const Register ArgumentRegisters[]
static constexpr Register kReturnReg
static constexpr intptr_t kNone
void AddDescriptor(UntaggedPcDescriptors::Kind kind, intptr_t pc_offset, intptr_t deopt_id, TokenPosition token_pos, intptr_t try_index, intptr_t yield_index)
static Object & ZoneHandle()
static RecordShape ForUnnamed(intptr_t num_fields)
void AddRegister(Register reg, Representation rep=kTagged)
bool Contains(Location loc)
bool ContainsRegister(Register reg) const
static SmiPtr New(intptr_t value)
static intptr_t RawValue(intptr_t value)
static constexpr intptr_t kMaxLinearCacheSize
static constexpr intptr_t kSuspendStateVarIndex
static Thread * Current()
@ kInstantiatorTypeArgsIndex
@ kInstantiatedTypeArgsIndex
static constexpr intptr_t kMaxLinearCacheSize
static constexpr intptr_t kAllDynamicHash
static constexpr intptr_t kInvalidYieldIndex
static constexpr int ShiftForPowerOfTwo(T x)
intptr_t InsertAlignedRelocation(BSS::Relocation reloc)
void StoreUnboxedDouble(FpuRegister src, Register base, int32_t offset)
void StoreUnboxedSimd128(FpuRegister src, Register base, int32_t offset)
StackRegisterScope(Assembler *assembler, Register *reg, intptr_t depth, Register alt=TMP)
static constexpr intptr_t kNoDepth
void EnsureIsNewOrRemembered()
std::function< void(Assembler *, int)> STCSearchExitGenerator
void GenerateAllocationStubForClass(UnresolvedPcRelativeCalls *unresolved_calls, const Class &cls, const dart::Code &allocate_object, const dart::Code &allocat_object_parametrized)
static intptr_t WordOffsetFromFpToCpuRegister(Register cpu_register)
uword MakeTagWordForNewSpaceObject(classid_t cid, uword instance_size)
word ToRawSmi(const dart::Object &a)
bool WillAllocateNewOrRememberedObject(intptr_t instance_size)
static void InvokeTypeCheckFromTypeTestStub(Assembler *assembler, TypeCheckMode mode)
static intptr_t SuspendStateFpOffset()
static void CallDartCoreLibraryFunction(Assembler *assembler, intptr_t entry_point_offset_in_thread, intptr_t function_offset_in_object_store, bool uses_args_desc=false)
static void EnsureIsTypeOrFunctionTypeOrTypeParameter(Assembler *assembler, Register type_reg, Register scratch_reg)
static void BuildInstantiateTypeRuntimeCall(Assembler *assembler)
const Class & Float64x2Class()
static void GenerateSubtypeTestCacheHashSearch(Assembler *assembler, int n, Register null_reg, Register cache_entry_reg, Register instance_cid_or_sig_reg, Register instance_type_args_reg, Register parent_fun_type_args_reg, Register delayed_type_args_reg, Register cache_entry_end_reg, Register cache_contents_size_reg, Register probe_distance_reg, const StubCodeCompiler::STCSearchExitGenerator &gen_found, const StubCodeCompiler::STCSearchExitGenerator &gen_not_found)
const Class & Float32x4Class()
static void GenerateBoxFpuValueStub(Assembler *assembler, const dart::Class &cls, const RuntimeEntry &runtime_entry, void(Assembler::*store_value)(FpuRegister, Register, int32_t))
const Array & ArgumentsDescriptorBoxed(intptr_t type_args_len, intptr_t num_arguments)
const Code & StubCodeSubtype2TestCache()
const Bool & TrueObject()
const Code & StubCodeSubtype6TestCache()
const Code & StubCodeSubtype7TestCache()
const Code & StubCodeSubtype3TestCache()
const Object & SentinelObject()
static void GenerateAllocateSuspendState(Assembler *assembler, Label *slow_case, Register result_reg, Register frame_size_reg, Register temp_reg)
const Object & NullObject()
const Class & DoubleClass()
static void BuildTypeParameterTypeTestStub(Assembler *assembler, bool allow_null)
static void GenerateSubtypeTestCacheLinearSearch(Assembler *assembler, int n, Register null_reg, Register cache_entry_reg, Register instance_cid_or_sig_reg, Register instance_type_args_reg, Register parent_fun_type_args_reg, Register delayed_type_args_reg, const StubCodeCompiler::STCSearchExitGenerator &gen_found, const StubCodeCompiler::STCSearchExitGenerator &gen_not_found)
const Object & EmptyTypeArguments()
const Code & StubCodeSubtype4TestCache()
static void GenerateSubtypeTestCacheLoopBody(Assembler *assembler, int n, Register null_reg, Register cache_entry_reg, Register instance_cid_or_sig_reg, Register instance_type_args_reg, Register parent_fun_type_args_reg, Register delayed_type_args_reg, Label *found, Label *not_found, Label *next_iteration)
static void BuildInstantiateTypeParameterStub(Assembler *assembler, Nullability nullability, bool is_function_parameter)
@ TIMES_COMPRESSED_HALF_WORD_SIZE
static constexpr int kSavedCallerPcSlotFromFp
const Register kExceptionObjectReg
uint32_t CombineHashes(uint32_t hash, uint32_t other_hash)
constexpr intptr_t kIntptrMin
static constexpr int kSavedCallerFpSlotFromFp
const Register ARGS_DESC_REG
static constexpr int kCallerSpSlotFromFp
constexpr RegList kDartAvailableCpuRegs
uint32_t FinalizeHash(uint32_t hash, intptr_t hashbits=kBitsPerInt32)
@ kTypeCheckFromLazySpecializeStub
const Register FUNCTION_REG
static constexpr intptr_t kAllocationRedZoneSize
const Register kStackTraceObjectReg
static constexpr Register kFunctionReg
static constexpr Register kContextReg
static constexpr Register kResultReg
static constexpr Register kInstantiatorTypeArgsReg
static constexpr Register kScratchReg
static constexpr Register kTypeArgumentsReg
static constexpr Register kResultReg
static constexpr Register kTagsReg
static constexpr Register kShapeReg
static constexpr Register kResultReg
static constexpr Register kTemp1Reg
static constexpr Register kTemp2Reg
static constexpr Register kResultReg
static constexpr Register kShapeReg
static constexpr Register kValue2Reg
static constexpr Register kValue0Reg
static constexpr Register kTempReg
static constexpr Register kValue1Reg
static constexpr Register kDstNameReg
static constexpr intptr_t kFunctionTAVSlotFromFp
static constexpr intptr_t kDstTypeSlotFromFp
static constexpr Register kSubtypeTestReg
static constexpr intptr_t kInstanceSlotFromFp
static constexpr intptr_t kInstantiatorTAVSlotFromFp
static constexpr Register kObjectReg
static constexpr Register kSubTypeReg
static constexpr Register kSuperTypeReg
static constexpr Register kFunctionTypeArgumentsReg
static constexpr Register kInstantiatorTypeArgumentsReg
static constexpr Register kDstNameReg
static constexpr Register kSuspendStateReg
static constexpr Register kTempReg
static constexpr Register kResultReg
static constexpr FpuRegister kValueReg
static constexpr Register kDestinationReg
static constexpr Register kSrcFrameReg
static constexpr Register kFrameSizeReg
static constexpr Register kSourceReg
static constexpr Register kTempReg
static constexpr Register kDstFrameReg
static constexpr Register kResultReg
static constexpr Register kRecognizedKindReg
static constexpr FpuRegister kInputReg
static constexpr Register kArgsReg
static constexpr Register kFieldReg
static constexpr Register kResultReg
static constexpr Register kInstanceReg
static constexpr Register kAddressReg
static constexpr Register kScratchReg
static constexpr Register kAddressReg
static constexpr Register kScratchReg
static constexpr Register kResultReg
static constexpr Register kFieldReg
static constexpr Register kTypeArgsReg
static constexpr Register kEntryStartReg
static constexpr intptr_t kSavedRegisters
static constexpr Register kCurrentEntryIndexReg
static constexpr Register kProbeMaskReg
static constexpr Register kProbeDistanceReg
static constexpr Register kFunctionTypeArgumentsReg
static constexpr Register kTypeReg
static constexpr Register kScratchReg
static constexpr Register kInstantiatorTypeArgumentsReg
static constexpr Register kResultTypeReg
static constexpr Register kInstantiatorTypeArgumentsReg
static constexpr Register kScratchReg
static constexpr Register kUninstantiatedTypeArgumentsReg
static constexpr Register kResultTypeArgumentsReg
static constexpr Register kFunctionTypeArgumentsReg
static constexpr Register kFieldReg
static constexpr intptr_t kObjectAlignmentLog2
static constexpr intptr_t kObjectAlignment
static constexpr Register kStackTraceReg
static constexpr Register kExceptionReg
static constexpr Register kSrcFrameReg
static constexpr Register kDstFrameReg
static constexpr Register kFrameSizeReg
static constexpr Register kSuspendStateReg
static constexpr Register kExceptionReg
static constexpr Register kTempReg
static constexpr Register kResumePcReg
static constexpr Register kStackTraceReg
static constexpr Register kSuspendStateReg
static constexpr Register kSrcFrameReg
static constexpr Register kFunctionDataReg
static constexpr Register kSuspendStateReg
static constexpr intptr_t kResumePcDistance
static constexpr Register kTempReg
static constexpr Register kArgumentReg
static constexpr Register kDstFrameReg
static constexpr Register kTypeArgsReg
static constexpr Register kFrameSizeReg
static constexpr Register kExceptionReg
static constexpr Register kSubtypeTestCacheReg
static constexpr Register kDstTypeReg
static constexpr Register kInstanceReg
static constexpr Register kFunctionTypeArgumentsReg
static constexpr Register kInstantiatorTypeArgumentsReg
static constexpr Register kSubtypeTestCacheResultReg
static constexpr Register kScratchReg
static constexpr Register kInstanceOfResultReg
#define TYPED_DATA_ALLOCATION_STUB(clazz)
#define EMIT_BOX_ALLOCATION(Name)
#define VM_TYPE_TESTING_STUB_CODE_LIST(V)
#define NOT_IN_PRODUCT(code)