11#define SHOULD_NOT_INCLUDE_RUNTIME
15#if defined(TARGET_ARCH_ARM64)
44 __ LoadFromOffset(
TMP,
TMP, target::Page::original_top_offset());
53 rt.Call(kEnsureRememberedAndMarkingDeferredRuntimeEntry,
73static void WithExceptionCatchingTrampoline(Assembler* assembler,
74 std::function<
void()> fun) {
75#if defined(TARGET_USES_THREAD_SANITIZER) && !defined(USING_SIMULATOR)
79 const intptr_t kJumpBufferSize =
sizeof(jmp_buf);
81 const RegisterSet volatile_registers(
93 __ ldr(kTsanUtilsReg, Address(
THR, target::Thread::tsan_utils_offset()));
95 Address(kTsanUtilsReg, target::TsanUtils::setjmp_buffer_offset()));
101 __ AddImmediate(
SP, -kJumpBufferSize);
102 __ str(
SP, Address(kTsanUtilsReg, target::TsanUtils::setjmp_buffer_offset()));
106 __ PushRegisters(volatile_registers);
110 __ ldr(kTsanUtilsReg, Address(
THR, target::Thread::tsan_utils_offset()));
112 Address(kTsanUtilsReg, target::TsanUtils::setjmp_function_offset()));
116 __ PopRegisters(volatile_registers);
119 __ cbz(&do_native_call,
R0);
124 __ AddImmediate(
SP, kJumpBufferSize);
125 __ ldr(kTsanUtilsReg, Address(
THR, target::Thread::tsan_utils_offset()));
128 Address(kTsanUtilsReg, target::TsanUtils::setjmp_buffer_offset()));
130 __ ldr(
R0, Address(kTsanUtilsReg, target::TsanUtils::exception_pc_offset()));
131 __ ldr(
R1, Address(kTsanUtilsReg, target::TsanUtils::exception_sp_offset()));
132 __ ldr(
R2, Address(kTsanUtilsReg, target::TsanUtils::exception_fp_offset()));
134 __ Jump(Address(
THR, target::Thread::jump_to_frame_entry_point_offset()));
138 __ Bind(&do_native_call);
139 __ MoveRegister(kSavedRspReg,
SP);
144#if defined(TARGET_USES_THREAD_SANITIZER) && !defined(USING_SIMULATOR)
145 __ MoveRegister(
SP, kSavedRspReg);
146 __ AddImmediate(
SP, kJumpBufferSize);
147 const Register kTsanUtilsReg2 = kSavedRspReg;
148 __ ldr(kTsanUtilsReg2, Address(
THR, target::Thread::tsan_utils_offset()));
151 Address(kTsanUtilsReg2, target::TsanUtils::setjmp_buffer_offset()));
162void StubCodeCompiler::GenerateCallToRuntimeStub() {
163 const intptr_t thread_offset = target::NativeArguments::thread_offset();
164 const intptr_t argc_tag_offset = target::NativeArguments::argc_tag_offset();
165 const intptr_t argv_offset = target::NativeArguments::argv_offset();
166 const intptr_t retval_offset = target::NativeArguments::retval_offset();
168 __ Comment(
"CallToRuntimeStub");
169 __ ldr(
CODE_REG, Address(
THR, target::Thread::call_to_runtime_stub_offset()));
170 __ SetPrologueOffset();
175 __ StoreToOffset(
FP,
THR, target::Thread::top_exit_frame_info_offset());
178 __ LoadImmediate(
R8, target::Thread::exit_through_runtime_call());
179 __ StoreToOffset(
R8,
THR, target::Thread::exit_through_ffi_offset());
185 __ LoadFromOffset(
R8,
THR, target::Thread::vm_tag_offset());
186 __ CompareImmediate(
R8, VMTag::kDartTagId);
188 __ Stop(
"Not coming from Dart code.");
194 __ StoreToOffset(
R5,
THR, target::Thread::vm_tag_offset());
196 WithExceptionCatchingTrampoline(
assembler, [&]() {
199 __ Comment(
"align stack");
201 ASSERT(target::NativeArguments::StructSize() == 4 * target::kWordSize);
202 __ ReserveAlignedFrameSpace(target::NativeArguments::StructSize());
207 ASSERT(thread_offset == 0 * target::kWordSize);
211 ASSERT(argc_tag_offset == 1 * target::kWordSize);
214 ASSERT(argv_offset == 2 * target::kWordSize);
219 target::frame_layout.param_end_from_fp * target::kWordSize);
221 ASSERT(retval_offset == 3 * target::kWordSize);
222 __ AddImmediate(
R3,
R2, target::kWordSize);
224 __ StoreToOffset(
R0,
SP, thread_offset);
225 __ StoreToOffset(
R1,
SP, argc_tag_offset);
226 __ StoreToOffset(
R2,
SP, argv_offset);
227 __ StoreToOffset(
R3,
SP, retval_offset);
237 __ Comment(
"CallToRuntimeStub return");
244 __ RestorePinnedRegisters();
248 __ LoadImmediate(
R2, VMTag::kDartTagId);
249 __ StoreToOffset(
R2,
THR, target::Thread::vm_tag_offset());
252 __ StoreToOffset(
ZR,
THR, target::Thread::exit_through_ffi_offset());
255 __ StoreToOffset(
ZR,
THR, target::Thread::top_exit_frame_info_offset());
259 if (FLAG_precompiled_mode) {
260 __ SetupGlobalPoolAndDispatchTable();
271 __ LoadImmediate(
R0, 0);
275void StubCodeCompiler::GenerateSharedStubGeneric(
276 bool save_fpu_registers,
277 intptr_t self_code_stub_offset_from_thread,
279 std::function<
void()> perform_runtime_call) {
282 RegisterSet all_registers;
283 all_registers.AddAllNonReservedRegisters(save_fpu_registers);
287 READS_RETURN_ADDRESS_FROM_LR(
__ Push(
LR));
288 __ PushRegisters(all_registers);
289 __ ldr(
CODE_REG, Address(
THR, self_code_stub_offset_from_thread));
291 perform_runtime_call();
297 __ PopRegisters(all_registers);
299 READS_RETURN_ADDRESS_FROM_LR(
__ ret(
LR));
302void StubCodeCompiler::GenerateSharedStub(
303 bool save_fpu_registers,
304 const RuntimeEntry*
target,
305 intptr_t self_code_stub_offset_from_thread,
307 bool store_runtime_result_in_result_register) {
308 ASSERT(!store_runtime_result_in_result_register || allow_return);
309 auto perform_runtime_call = [&]() {
310 if (store_runtime_result_in_result_register) {
314 if (store_runtime_result_in_result_register) {
317 Address(
FP, target::kWordSize *
322 GenerateSharedStubGeneric(save_fpu_registers,
323 self_code_stub_offset_from_thread, allow_return,
324 perform_runtime_call);
327void StubCodeCompiler::GenerateEnterSafepointStub() {
328 RegisterSet all_registers;
329 all_registers.AddAllGeneralRegisters();
332 __ PushRegisters(all_registers);
336 __ ReserveAlignedFrameSpace(0);
339 __ ldr(
R0, Address(
THR, kEnterSafepointRuntimeEntry.OffsetFromThread()));
345 __ PopRegisters(all_registers);
351static void GenerateExitSafepointStubCommon(Assembler* assembler,
352 uword runtime_entry_offset) {
353 RegisterSet all_registers;
354 all_registers.AddAllGeneralRegisters();
357 __ PushRegisters(all_registers);
361 __ ReserveAlignedFrameSpace(0);
367 __ LoadImmediate(
R0, target::Thread::vm_execution_state());
368 __ str(
R0, Address(
THR, target::Thread::execution_state_offset()));
370 __ ldr(
R0, Address(
THR, runtime_entry_offset));
376 __ PopRegisters(all_registers);
382void StubCodeCompiler::GenerateExitSafepointStub() {
383 GenerateExitSafepointStubCommon(
384 assembler, kExitSafepointRuntimeEntry.OffsetFromThread());
387void StubCodeCompiler::GenerateExitSafepointIgnoreUnwindInProgressStub() {
388 GenerateExitSafepointStubCommon(
390 kExitSafepointIgnoreUnwindInProgressRuntimeEntry.OffsetFromThread());
402void StubCodeCompiler::GenerateCallNativeThroughSafepointStub() {
405 SPILLS_RETURN_ADDRESS_FROM_LR_TO_REGISTER(
__ mov(
R19,
LR));
406 __ LoadImmediate(
R10, target::Thread::exit_through_ffi());
428 __ TransitionNativeToGenerated(
R10,
true);
435 compiler::Label skip_reloc;
437 InsertBSSRelocation(relocation);
438 __ Bind(&skip_reloc);
440 __ adr(tmp, compiler::Immediate(-compiler::target::kWordSize));
443 __ ldr(dst, compiler::Address(tmp));
447 __ add(tmp, tmp, compiler::Operand(dst));
451 __ ldr(dst, compiler::Address(tmp));
454void StubCodeCompiler::GenerateLoadFfiCallbackMetadataRuntimeFunction(
455 uword function_index,
461 const intptr_t code_size =
__ CodeSize();
462 __ adr(dst, Immediate(-code_size));
468 __ LoadFromOffset(dst, dst,
472void StubCodeCompiler::GenerateFfiCallbackTrampolineStub() {
473#if defined(USING_SIMULATOR) && !defined(DART_PRECOMPILER)
485 __ adr(
R9, Immediate(0));
490 FfiCallbackMetadata::kNativeCallbackTrampolineSize *
495 const intptr_t shared_stub_start =
__ CodeSize();
499 COMPILE_ASSERT(FfiCallbackMetadata::kNativeCallbackTrampolineStackDelta == 2);
500 SPILLS_LR_TO_FRAME(
__ stp(
505 RegisterSet all_registers;
506 all_registers.AddAllArgumentRegisters();
518 __ PushRegisters(all_registers);
525 __ AddImmediate(
SP,
SP, -compiler::target::kWordSize);
530 __ AddImmediate(
SP,
SP, -compiler::target::kWordSize);
534 __ ReserveAlignedFrameSpace(0);
536#if defined(DART_TARGET_OS_FUCHSIA)
538 if (FLAG_precompiled_mode) {
548 GenerateLoadFfiCallbackMetadataRuntimeFunction(
566 __ PopRegisters(all_registers);
577 __ cmp(
THR, Operand(0));
595 __ EnterFullSafepoint(
R9);
612 __ ReserveAlignedFrameSpace(0);
614#if defined(DART_TARGET_OS_FUCHSIA)
616 if (FLAG_precompiled_mode) {
626 GenerateLoadFfiCallbackMetadataRuntimeFunction(
642 RESTORES_LR_FROM_FRAME(
__ ldp(
648 FfiCallbackMetadata::kNativeCallbackSharedStubSize);
659void StubCodeCompiler::GenerateDispatchTableNullErrorStub() {
663 __ CallRuntime(kDispatchTableNullErrorRuntimeEntry, 1);
668void StubCodeCompiler::GenerateRangeError(
bool with_fpu_regs) {
669 auto perform_runtime_call = [&]() {
676#if !defined(DART_COMPRESSED_POINTERS)
691 __ CallRuntime(kAllocateMintRuntimeEntry, 0);
694 Address(
FP, target::kWordSize *
698 target::Mint::value_offset()));
700 Address(
FP, target::kWordSize *
710 __ PushRegistersInOrder(
712 __ CallRuntime(kRangeErrorRuntimeEntry, 2);
716 GenerateSharedStubGeneric(
719 ? target::Thread::range_error_shared_with_fpu_regs_stub_offset()
720 : target::Thread::range_error_shared_without_fpu_regs_stub_offset(),
721 false, perform_runtime_call);
724void StubCodeCompiler::GenerateWriteError(
bool with_fpu_regs) {
725 auto perform_runtime_call = [&]() {
726 __ CallRuntime(kWriteErrorRuntimeEntry, 2);
730 GenerateSharedStubGeneric(
733 ? target::Thread::write_error_shared_with_fpu_regs_stub_offset()
734 : target::Thread::write_error_shared_without_fpu_regs_stub_offset(),
735 false, perform_runtime_call);
744static void GenerateCallNativeWithWrapperStub(Assembler* assembler,
746 const intptr_t thread_offset = target::NativeArguments::thread_offset();
747 const intptr_t argc_tag_offset = target::NativeArguments::argc_tag_offset();
748 const intptr_t argv_offset = target::NativeArguments::argv_offset();
749 const intptr_t retval_offset = target::NativeArguments::retval_offset();
755 __ StoreToOffset(
FP,
THR, target::Thread::top_exit_frame_info_offset());
758 __ LoadImmediate(
R6, target::Thread::exit_through_runtime_call());
759 __ StoreToOffset(
R6,
THR, target::Thread::exit_through_ffi_offset());
765 __ LoadFromOffset(
R6,
THR, target::Thread::vm_tag_offset());
766 __ CompareImmediate(
R6, VMTag::kDartTagId);
768 __ Stop(
"Not coming from Dart code.");
774 __ StoreToOffset(
R5,
THR, target::Thread::vm_tag_offset());
776 WithExceptionCatchingTrampoline(assembler, [&]() {
780 __ ReserveAlignedFrameSpace(target::NativeArguments::StructSize());
785 ASSERT(thread_offset == 0 * target::kWordSize);
789 ASSERT(argc_tag_offset == 1 * target::kWordSize);
792 ASSERT(argv_offset == 2 * target::kWordSize);
796 ASSERT(retval_offset == 3 * target::kWordSize);
799 (target::frame_layout.param_end_from_fp + 1) * target::kWordSize);
804 __ StoreToOffset(
R0,
SP, thread_offset);
805 __ StoreToOffset(
R1,
SP, argc_tag_offset);
806 __ StoreToOffset(
R2,
SP, argv_offset);
807 __ StoreToOffset(
R3,
SP, retval_offset);
826 __ RestorePinnedRegisters();
829 __ LoadImmediate(
R2, VMTag::kDartTagId);
830 __ StoreToOffset(
R2,
THR, target::Thread::vm_tag_offset());
833 __ StoreToOffset(
ZR,
THR, target::Thread::exit_through_ffi_offset());
836 __ StoreToOffset(
ZR,
THR, target::Thread::top_exit_frame_info_offset());
840 if (FLAG_precompiled_mode) {
841 __ SetupGlobalPoolAndDispatchTable();
849void StubCodeCompiler::GenerateCallNoScopeNativeStub() {
850 GenerateCallNativeWithWrapperStub(
853 target::Thread::no_scope_native_wrapper_entry_point_offset()));
856void StubCodeCompiler::GenerateCallAutoScopeNativeStub() {
857 GenerateCallNativeWithWrapperStub(
860 target::Thread::auto_scope_native_wrapper_entry_point_offset()));
869void StubCodeCompiler::GenerateCallBootstrapNativeStub() {
870 GenerateCallNativeWithWrapperStub(
873 target::Thread::bootstrap_native_wrapper_entry_point_offset()));
878void StubCodeCompiler::GenerateCallStaticFunctionStub() {
885 __ CallRuntime(kPatchStaticCallRuntimeEntry, 0);
892 __ LoadFieldFromOffset(
R0,
CODE_REG, target::Code::entry_point_offset());
899void StubCodeCompiler::GenerateFixCallersTargetStub() {
901 __ BranchOnMonomorphicCheckedEntryJIT(&monomorphic);
907 Address(
THR, target::Thread::fix_callers_target_code_offset()));
914 __ CallRuntime(kFixCallersTargetRuntimeEntry, 0);
921 __ LoadFieldFromOffset(
R0,
CODE_REG, target::Code::entry_point_offset());
924 __ Bind(&monomorphic);
929 Address(
THR, target::Thread::fix_callers_target_code_offset()));
936 __ CallRuntime(kFixCallersTargetMonomorphicRuntimeEntry, 2);
943 __ LoadFieldFromOffset(
951void StubCodeCompiler::GenerateFixAllocationStubTargetStub() {
956 Address(
THR, target::Thread::fix_allocation_stub_code_offset()));
960 __ CallRuntime(kFixAllocationStubTargetRuntimeEntry, 0);
966 __ LoadFieldFromOffset(
R0,
CODE_REG, target::Code::entry_point_offset());
972void StubCodeCompiler::GenerateFixParameterizedAllocationStubTargetStub() {
977 Address(
THR, target::Thread::fix_allocation_stub_code_offset()));
983 __ CallRuntime(kFixAllocationStubTargetRuntimeEntry, 0);
991 __ LoadFieldFromOffset(
R0,
CODE_REG, target::Code::entry_point_offset());
998static void PushArrayOfArguments(Assembler* assembler) {
1008 __ add(
R1,
FP, Operand(
R2,
LSL, target::kWordSizeLog2));
1010 target::frame_layout.param_end_from_fp * target::kWordSize);
1015 Label loop, loop_exit;
1017 __ CompareRegisters(
R2,
ZR);
1018 __ b(&loop_exit,
LE);
1020 __ AddImmediate(
R1, -target::kWordSize);
1021 __ AddImmediate(
R3, target::kCompressedWordSize);
1022 __ AddImmediate(
R2,
R2, -1);
1023 __ StoreCompressedIntoObject(
R0, Address(
R3, -target::kCompressedWordSize),
1026 __ Bind(&loop_exit);
1055static void GenerateDeoptimizationSequence(Assembler* assembler,
1059 __ EnterStubFrame();
1063 const intptr_t saved_result_slot_from_fp =
1064 target::frame_layout.first_local_from_fp + 1 -
1066 const intptr_t saved_exception_slot_from_fp =
1067 target::frame_layout.first_local_from_fp + 1 -
1069 const intptr_t saved_stacktrace_slot_from_fp =
1070 target::frame_layout.first_local_from_fp + 1 -
1082 __ ldr(
R25, Address(
FP, 2 * target::kWordSize));
1084 }
else if (r ==
R15) {
1090 }
else if (r ==
R31) {
1104 LeafRuntimeScope rt(assembler,
1109 __ LoadImmediate(
R1, is_lazy ? 1 : 0);
1110 rt.Call(kDeoptimizeCopyFrameRuntimeEntry, 2);
1116 __ LoadFromOffset(
R1,
FP, saved_result_slot_from_fp * target::kWordSize);
1119 __ LoadFromOffset(
R1,
FP, saved_exception_slot_from_fp * target::kWordSize);
1120 __ LoadFromOffset(
R2,
FP,
1121 saved_stacktrace_slot_from_fp * target::kWordSize);
1125 __ RestoreCodePointer();
1126 __ LeaveStubFrame();
1131 __ EnterStubFrame();
1141 LeafRuntimeScope rt(assembler,
1144 rt.Call(kDeoptimizeFillFrameRuntimeEntry, 1);
1149 R1,
FP, target::frame_layout.first_local_from_fp * target::kWordSize);
1153 R1,
FP, target::frame_layout.first_local_from_fp * target::kWordSize);
1156 (target::frame_layout.first_local_from_fp - 1) * target::kWordSize);
1160 __ RestoreCodePointer();
1161 __ LeaveStubFrame();
1167 __ EnterStubFrame();
1178 __ CallRuntime(kDeoptimizeMaterializeRuntimeEntry, 0);
1190 __ LeaveStubFrame();
1199 __ EnterStubFrame();
1204 __ CallRuntime(kReThrowRuntimeEntry, 3);
1205 __ LeaveStubFrame();
1210void StubCodeCompiler::GenerateDeoptimizeLazyFromReturnStub() {
1217 Address(
THR, target::Thread::lazy_deopt_from_return_stub_offset()));
1224void StubCodeCompiler::GenerateDeoptimizeLazyFromThrowStub() {
1231 Address(
THR, target::Thread::lazy_deopt_from_throw_stub_offset()));
1236void StubCodeCompiler::GenerateDeoptimizeStub() {
1238 __ ldr(
CODE_REG, Address(
THR, target::Thread::deoptimize_stub_offset()));
1244static void GenerateNoSuchMethodDispatcherBody(Assembler* assembler) {
1245 __ EnterStubFrame();
1249 target::CallSiteData::arguments_descriptor_offset()));
1252 __ LoadCompressedSmiFieldFromOffset(
1254 __ add(
TMP,
FP, Operand(
R2,
LSL, target::kWordSizeLog2 - 1));
1256 target::frame_layout.param_end_from_fp * target::kWordSize);
1263 __ LoadCompressedSmiFieldFromOffset(
1264 R3,
ARGS_DESC_REG, target::ArgumentsDescriptor::type_args_len_offset());
1271 PushArrayOfArguments(assembler);
1272 const intptr_t kNumArgs = 4;
1273 __ CallRuntime(kNoSuchMethodFromCallStubRuntimeEntry, kNumArgs);
1276 __ LeaveStubFrame();
1280static void GenerateDispatcherCode(Assembler* assembler,
1281 Label* call_target_function) {
1282 __ Comment(
"NoSuchMethodDispatch");
1286 __ b(call_target_function,
NE);
1288 GenerateNoSuchMethodDispatcherBody(assembler);
1294void StubCodeCompiler::GenerateNoSuchMethodDispatcherStub() {
1295 GenerateNoSuchMethodDispatcherBody(
assembler);
1307void StubCodeCompiler::GenerateAllocateArrayStub() {
1308 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
1318 const intptr_t max_len =
1321 __ b(&slow_case,
HI);
1323 const intptr_t
cid = kArrayCid;
1331 Address(
THR, target::Thread::top_offset()));
1332 intptr_t fixed_size_plus_alignment_padding =
1333 target::Array::header_size() +
1335 __ LoadImmediate(
R3, fixed_size_plus_alignment_padding);
1337#if defined(DART_COMPRESSED_POINTERS)
1348 __ b(&slow_case,
CS);
1356 __ LoadFromOffset(
TMP,
THR, target::Thread::end_offset());
1358 __ b(&slow_case,
CS);
1366 __ str(
R7, Address(
THR, target::Thread::top_offset()));
1377 __ StoreCompressedIntoObjectOffsetNoBarrier(
1383 target::Array::length_offset(),
1391 const intptr_t shift = target::UntaggedObject::kTagBitsSizeTagPos -
1393 __ CompareImmediate(
R3, target::UntaggedObject::kSizeTagMaxSizeTag);
1395 __ LslImmediate(
TMP,
R3, shift);
1403 __ LoadImmediate(
TMP, tags);
1406 target::Array::tags_offset());
1416#if defined(DART_COMPRESSED_POINTERS)
1418 __ andi(kWordOfNulls,
NULL_REG, Immediate(0xFFFFFFFF));
1419 __ orr(kWordOfNulls, kWordOfNulls, Operand(kWordOfNulls,
LSL, 32));
1425 ASSERT(target::kObjectAlignment == 2 * target::kWordSize);
1426 __ stp(kWordOfNulls, kWordOfNulls,
1430 __ CompareRegisters(
R3,
R7);
1432 __ WriteAllocationCanary(
R7);
1441 __ Bind(&slow_case);
1445 __ EnterStubFrame();
1451 __ CallRuntime(kAllocateArrayRuntimeEntry, 2);
1463 __ LeaveStubFrame();
1467void StubCodeCompiler::GenerateAllocateMintSharedWithFPURegsStub() {
1469 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
1475 __ Bind(&slow_case);
1479 GenerateSharedStub(
true, &kAllocateMintRuntimeEntry,
1480 target::Thread::allocate_mint_with_fpu_regs_stub_offset(),
1485void StubCodeCompiler::GenerateAllocateMintSharedWithoutFPURegsStub() {
1487 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
1493 __ Bind(&slow_case);
1498 false, &kAllocateMintRuntimeEntry,
1499 target::Thread::allocate_mint_without_fpu_regs_stub_offset(),
1511void StubCodeCompiler::GenerateInvokeDartCodeStub() {
1512 __ Comment(
"InvokeDartCodeStub");
1519 __ SetupCSPFromThread(
R3);
1523 __ ldr(
TMP, Address(
R3, target::Thread::invoke_dart_code_stub_offset()));
1526#if defined(DART_TARGET_OS_FUCHSIA)
1527 __ str(
R18, Address(
R3, target::Thread::saved_shadow_call_stack_offset()));
1528#elif defined(USING_SHADOW_CALL_STACK)
1532 __ PushNativeCalleeSavedRegisters();
1540 __ RestorePinnedRegisters();
1543 __ LoadFromOffset(
R4,
THR, target::Thread::vm_tag_offset());
1548 __ LoadFromOffset(
R6,
THR, target::Thread::top_resource_offset());
1549 __ StoreToOffset(
ZR,
THR, target::Thread::top_resource_offset());
1552 __ LoadFromOffset(
R6,
THR, target::Thread::exit_through_ffi_offset());
1554 __ StoreToOffset(
ZR,
THR, target::Thread::exit_through_ffi_offset());
1556 __ LoadFromOffset(
R6,
THR, target::Thread::top_exit_frame_info_offset());
1557 __ StoreToOffset(
ZR,
THR, target::Thread::top_exit_frame_info_offset());
1560#if defined(DART_TARGET_OS_FUCHSIA)
1561 ASSERT(target::frame_layout.exit_link_slot_from_entry_fp == -24);
1563 ASSERT(target::frame_layout.exit_link_slot_from_entry_fp == -23);
1568 __ EmitEntryFrameVerification();
1572 __ LoadImmediate(
R6, VMTag::kDartTagId);
1573 __ StoreToOffset(
R6,
THR, target::Thread::vm_tag_offset());
1579 __ LoadCompressedSmiFieldFromOffset(
1580 R5,
R4, target::ArgumentsDescriptor::count_offset());
1581 __ LoadCompressedSmiFieldFromOffset(
1582 R3,
R4, target::ArgumentsDescriptor::type_args_len_offset());
1592 Label push_arguments;
1593 Label done_push_arguments;
1594 __ cmp(
R5, Operand(0));
1595 __ b(&done_push_arguments,
EQ);
1596 __ LoadImmediate(
R1, 0);
1597 __ Bind(&push_arguments);
1598 __ LoadCompressed(
R3, Address(
R2));
1600 __ add(
R1,
R1, Operand(1));
1601 __ add(
R2,
R2, Operand(target::kCompressedWordSize));
1603 __ b(&push_arguments,
LT);
1604 __ Bind(&done_push_arguments);
1606 if (FLAG_precompiled_mode) {
1607 __ SetupGlobalPoolAndDispatchTable();
1615 __ ldr(
R0, FieldAddress(
CODE_REG, target::Code::entry_point_offset()));
1620 __ Comment(
"InvokeDartCodeStub return");
1625 target::frame_layout.exit_link_slot_from_entry_fp * target::kWordSize);
1630 __ StoreToOffset(
R6,
THR, target::Thread::top_exit_frame_info_offset());
1632 __ StoreToOffset(
R6,
THR, target::Thread::exit_through_ffi_offset());
1634 __ StoreToOffset(
R6,
THR, target::Thread::top_resource_offset());
1638 __ StoreToOffset(
R4,
THR, target::Thread::vm_tag_offset());
1640 __ PopNativeCalleeSavedRegisters();
1656static void GenerateAllocateContextSpaceStub(Assembler* assembler,
1660 intptr_t fixed_size_plus_alignment_padding =
1661 target::Context::header_size() +
1663 __ LoadImmediate(
R2, fixed_size_plus_alignment_padding);
1672 __ ldr(
R0, Address(
THR, target::Thread::top_offset()));
1679 __ ldr(
TMP, Address(
THR, target::Thread::end_offset()));
1681 __ b(slow_case,
CS);
1682 __ CheckAllocationCanary(
R0);
1690 __ str(
R3, Address(
THR, target::Thread::top_offset()));
1697 const intptr_t shift = target::UntaggedObject::kTagBitsSizeTagPos -
1699 __ CompareImmediate(
R2, target::UntaggedObject::kSizeTagMaxSizeTag);
1701 __ LslImmediate(
TMP,
R2, shift);
1710 __ LoadImmediate(
TMP, tags);
1712 __ StoreFieldToOffset(
R2,
R0, target::Object::tags_offset());
1717 __ StoreFieldToOffset(
R1,
R0, target::Context::num_variables_offset(),
1728void StubCodeCompiler::GenerateAllocateContextStub() {
1729 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
1732 GenerateAllocateContextSpaceStub(
assembler, &slow_case);
1737 __ StoreCompressedIntoObjectOffset(
R0, target::Context::parent_offset(),
1745#if defined(DART_COMPRESSED_POINTERS)
1747 __ andi(kWordOfNulls,
NULL_REG, Immediate(0xFFFFFFFF));
1748 __ orr(kWordOfNulls, kWordOfNulls, Operand(kWordOfNulls,
LSL, 32));
1754 ASSERT(target::kObjectAlignment == 2 * target::kWordSize);
1755 __ stp(kWordOfNulls, kWordOfNulls,
1760 Operand(target::kObjectAlignment / target::kCompressedWordSize));
1763 __ ldr(
TMP2, Address(
THR, target::Thread::top_offset()));
1764 __ WriteAllocationCanary(
TMP2);
1771 __ Bind(&slow_case);
1775 __ EnterStubFrame();
1780 __ CallRuntime(kAllocateContextRuntimeEntry, 1);
1791 __ LeaveStubFrame();
1803void StubCodeCompiler::GenerateCloneContextStub() {
1804 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
1808 __ ldr(
R1, FieldAddress(
R5, target::Context::num_variables_offset()),
1811 GenerateAllocateContextSpaceStub(
assembler, &slow_case);
1814 __ LoadCompressed(
R3, FieldAddress(
R5, target::Context::parent_offset()));
1817 __ StoreCompressedIntoObjectNoBarrier(
1818 R0, FieldAddress(
R0, target::Context::parent_offset()),
R3);
1833 __ subs(
R1,
R1, Operand(1));
1847 __ Bind(&slow_case);
1852 __ EnterStubFrame();
1855 __ CallRuntime(kCloneContextRuntimeEntry, 1);
1867 __ LeaveStubFrame();
1871void StubCodeCompiler::GenerateWriteBarrierWrappersStub() {
1876 intptr_t
start =
__ CodeSize();
1877 SPILLS_LR_TO_FRAME(
__ Push(
LR));
1880 __ Call(Address(
THR, target::Thread::write_barrier_entry_point_offset()));
1882 RESTORES_LR_FROM_FRAME(
__ Pop(
LR));
1883 READS_RETURN_ADDRESS_FROM_LR(
__ ret(
LR));
1884 intptr_t
end =
__ CodeSize();
1900static void GenerateWriteBarrierStubHelper(Assembler* assembler,
bool cards) {
1901 RegisterSet spill_set((1 <<
R2) | (1 <<
R3) | (1 <<
R4), 0);
1904 __ ldr(
TMP, FieldAddress(
R0, target::Object::tags_offset()));
1905 __ ldr(
TMP2, Address(
THR, target::Thread::write_barrier_mask_offset()));
1907 __ tsti(
TMP, Immediate(target::UntaggedObject::kIncrementalBarrierMask));
1913 __ PushRegisters(spill_set);
1917 __ LoadImmediate(
TMP, 1 << target::UntaggedObject::kNotMarkedBit);
1919 __ tbz(&
done,
TMP, target::UntaggedObject::kNotMarkedBit);
1923 __ tbz(&
done,
R2, target::UntaggedObject::kNotMarkedBit);
1924 __ AndImmediate(
R2,
R2, ~(1 << target::UntaggedObject::kNotMarkedBit));
1926 __ cbnz(&retry,
R4);
1929 __ LoadFromOffset(
R4,
THR, target::Thread::marking_stack_block_offset());
1932 __ add(
R3,
R4, Operand(
R2,
LSL, target::kWordSizeLog2));
1934 __ add(
R2,
R2, Operand(1));
1941 LeafRuntimeScope rt(assembler,
1945 rt.Call(kMarkingStackBlockProcessRuntimeEntry, 1);
1950 __ PopRegisters(spill_set);
1953 Label add_to_remembered_set, remember_card;
1954 __ Bind(&skip_marking);
1955 __ ldr(
TMP, FieldAddress(
R1, target::Object::tags_offset()));
1956 __ ldr(
TMP2, FieldAddress(
R0, target::Object::tags_offset()));
1958 Operand(
TMP,
LSR, target::UntaggedObject::kBarrierOverlapShift));
1959 __ tsti(
TMP, Immediate(target::UntaggedObject::kGenerationalBarrierMask));
1963 __ Bind(&add_to_remembered_set);
1966 __ tbnz(&remember_card,
TMP, target::UntaggedObject::kCardRememberedBit);
1971 __ tbz(&
ok,
TMP, target::UntaggedObject::kCardRememberedBit);
1972 __ Stop(
"Wrong barrier");
1979 __ PushRegisters(spill_set);
1983 __ LoadImmediate(
TMP,
1984 1 << target::UntaggedObject::kOldAndNotRememberedBit);
1986 __ tbz(&
done,
TMP, target::UntaggedObject::kOldAndNotRememberedBit);
1990 __ tbz(&
done,
R2, target::UntaggedObject::kOldAndNotRememberedBit);
1992 ~(1 << target::UntaggedObject::kOldAndNotRememberedBit));
1994 __ cbnz(&retry,
R4);
1999 __ LoadFromOffset(
R4,
THR, target::Thread::store_buffer_block_offset());
2002 __ add(
R3,
R4, Operand(
R2,
LSL, target::kWordSizeLog2));
2008 __ add(
R2,
R2, Operand(1));
2015 LeafRuntimeScope rt(assembler,
2019 rt.Call(kStoreBufferBlockProcessRuntimeEntry, 1);
2023 __ PopRegisters(spill_set);
2027 Label remember_card_slow;
2030 __ Bind(&remember_card);
2033 Address(
TMP, target::Page::card_table_offset()));
2034 __ cbz(&remember_card_slow,
TMP2);
2039 __ LsrImmediate(
R25,
R25, target::Page::kBytesPerCardLog2);
2040 __ LoadImmediate(
TMP, 1);
2042 __ LsrImmediate(
R25,
R25, target::kBitsPerWordLog2);
2050 __ Bind(&remember_card_slow);
2052 LeafRuntimeScope rt(assembler,
2057 rt.Call(kRememberCardRuntimeEntry, 2);
2063void StubCodeCompiler::GenerateWriteBarrierStub() {
2064 GenerateWriteBarrierStubHelper(
assembler,
false);
2067void StubCodeCompiler::GenerateArrayWriteBarrierStub() {
2068 GenerateWriteBarrierStubHelper(
assembler,
true);
2071static void GenerateAllocateObjectHelper(Assembler* assembler,
2072 bool is_cls_parameterized) {
2078#if !defined(PRODUCT)
2083 __ MaybeTraceAllocation(kCidRegister, &slow_case,
2084 kTraceAllocationTempReg);
2095 __ ExtractInstanceSizeFromTags(kInstanceSizeReg, kTagsReg);
2103 Operand(kInstanceSizeReg));
2105 __ CompareRegisters(kEndReg, kNewTopReg);
2110 __ str(kNewTopReg, Address(
THR, target::Thread::top_offset()));
2115 target::Object::tags_offset()));
2121 target::Instance::first_field_offset());
2122#if defined(DART_COMPRESSED_POINTERS)
2124 __ andi(kWordOfNulls,
NULL_REG, Immediate(0xFFFFFFFF));
2125 __ orr(kWordOfNulls, kWordOfNulls, Operand(kWordOfNulls,
LSL, 32));
2131 ASSERT(target::kObjectAlignment == 2 * target::kWordSize);
2132 __ stp(kWordOfNulls, kWordOfNulls,
2136 __ CompareRegisters(kFieldReg, kNewTopReg);
2138 __ WriteAllocationCanary(kNewTopReg);
2141 if (is_cls_parameterized) {
2142 Label not_parameterized_case;
2147 __ ExtractClassIdFromTags(kClsIdReg, kTagsReg);
2150 __ LoadClassById(kTypeOffsetReg, kClsIdReg);
2153 FieldAddress(kTypeOffsetReg,
2155 host_type_arguments_field_offset_in_words_offset()),
2159 __ StoreCompressedIntoObjectNoBarrier(
2165 __ Bind(¬_parameterized_case);
2173 __ Bind(&slow_case);
2177 if (!is_cls_parameterized) {
2183 Address(
THR, target::Thread::allocate_object_slow_entry_point_offset()));
2188void StubCodeCompiler::GenerateAllocateObjectStub() {
2189 GenerateAllocateObjectHelper(
assembler,
false);
2192void StubCodeCompiler::GenerateAllocateObjectParameterizedStub() {
2193 GenerateAllocateObjectHelper(
assembler,
true);
2196void StubCodeCompiler::GenerateAllocateObjectSlowStub() {
2197 if (!FLAG_precompiled_mode) {
2199 Address(
THR, target::Thread::call_to_runtime_stub_offset()));
2207 __ EnterStubFrame();
2215 __ CallRuntime(kAllocateObjectRuntimeEntry, 2);
2224 __ LeaveStubFrame();
2233 const Code& allocate_object,
2234 const Code& allocat_object_parametrized) {
2235 classid_t cls_id = target::Class::GetId(cls);
2239 const bool is_cls_parameterized = target::Class::NumTypeArguments(cls) > 0;
2240 ASSERT(!is_cls_parameterized || target::Class::TypeArgumentsFieldOffset(
2241 cls) != target::Class::kNoTypeArguments);
2243 const intptr_t instance_size = target::Class::GetInstanceSize(cls);
2244 ASSERT(instance_size > 0);
2252 __ LoadImmediate(kTagsReg, tags);
2254 if (!FLAG_use_slow_path && FLAG_inline_alloc &&
2255 !target::Class::TraceAllocation(cls) &&
2258 RELEASE_ASSERT(target::Heap::IsAllocatableInNewSpace(instance_size));
2260 if (is_cls_parameterized) {
2262 CastHandle<Object>(allocat_object_parametrized))) {
2263 __ GenerateUnRelocatedPcRelativeTailCall();
2264 unresolved_calls->Add(
new UnresolvedPcRelativeCall(
2265 __ CodeSize(), allocat_object_parametrized,
true));
2270 allocate_object_parameterized_entry_point_offset()));
2275 __ GenerateUnRelocatedPcRelativeTailCall();
2276 unresolved_calls->Add(
new UnresolvedPcRelativeCall(
2277 __ CodeSize(), allocate_object,
true));
2281 Address(
THR, target::Thread::allocate_object_entry_point_offset()));
2286 if (!is_cls_parameterized) {
2291 target::Thread::allocate_object_slow_entry_point_offset()));
2303void StubCodeCompiler::GenerateCallClosureNoSuchMethodStub() {
2304 __ EnterStubFrame();
2307 __ LoadCompressedSmiFieldFromOffset(
2308 R2,
R4, target::ArgumentsDescriptor::size_offset());
2309 __ add(
TMP,
FP, Operand(
R2,
LSL, target::kWordSizeLog2 - 1));
2311 target::frame_layout.param_end_from_fp * target::kWordSize);
2314 __ LoadCompressedFieldFromOffset(
TMP,
R6, target::Closure::function_offset());
2322 __ LoadCompressedSmiFieldFromOffset(
2323 R3,
R4, target::ArgumentsDescriptor::type_args_len_offset());
2332 const intptr_t kNumArgs = 4;
2333 __ CallRuntime(kNoSuchMethodFromPrologueRuntimeEntry, kNumArgs);
2345 if (FLAG_precompiled_mode) {
2349 if (FLAG_trace_optimized_ic_calls) {
2350 __ EnterStubFrame();
2355 __ CallRuntime(kTraceICCallRuntimeEntry, 2);
2359 __ LeaveStubFrame();
2361 __ LoadFieldFromOffset(
R7, func_reg, target::Function::usage_counter_offset(),
2363 __ add(
R7,
R7, Operand(1));
2364 __ StoreFieldToOffset(
R7, func_reg, target::Function::usage_counter_offset(),
2370 if (FLAG_precompiled_mode) {
2374 if (FLAG_optimization_counter_threshold >= 0) {
2377 __ Comment(
"Increment function counter");
2379 target::ICData::owner_offset());
2380 __ LoadFieldFromOffset(
2381 R7, func_reg, target::Function::usage_counter_offset(),
kFourBytes);
2382 __ AddImmediate(
R7, 1);
2383 __ StoreFieldToOffset(
R7, func_reg,
2384 target::Function::usage_counter_offset(),
kFourBytes);
2392static void EmitFastSmiOp(Assembler* assembler,
2395 Label* not_smi_or_overflow) {
2396 __ Comment(
"Fast Smi op");
2397 __ ldr(
R0, Address(
SP, +1 * target::kWordSize));
2398 __ ldr(
R1, Address(
SP, +0 * target::kWordSize));
2400 __ BranchIfNotSmi(
TMP, not_smi_or_overflow);
2404 __ b(not_smi_or_overflow,
VS);
2408 __ CompareObjectRegisters(
R0,
R1);
2415 __ CompareObjectRegisters(
R0,
R1);
2426 __ LoadFieldFromOffset(
R6,
R5, target::ICData::entries_offset());
2434 __ LoadCompressedSmiFromOffset(
R1,
R6, 0);
2437 __ LoadCompressedSmiFromOffset(
R1,
R6, target::kCompressedWordSize);
2441 __ Stop(
"Incorrect IC data");
2444 if (FLAG_optimization_counter_threshold >= 0) {
2445 const intptr_t count_offset =
2446 target::ICData::CountIndexFor(num_args) * target::kCompressedWordSize;
2448 __ LoadCompressedSmiFromOffset(
R1,
R6, count_offset);
2460static void GenerateRecordEntryPoint(Assembler* assembler) {
2464 __ BindUncheckedEntryPoint();
2484 const RuntimeEntry& handle_ic_miss,
2486 Optimized optimized,
2488 Exactness exactness) {
2489 const bool save_entry_point = kind == Token::kILLEGAL;
2490 if (FLAG_precompiled_mode) {
2495 if (save_entry_point) {
2505 ASSERT(num_args == 1 || num_args == 2);
2511 __ LoadFromOffset(
R6,
R5,
2514 ASSERT(target::ICData::NumArgsTestedShift() == 0);
2515 __ andi(
R6,
R6, Immediate(target::ICData::NumArgsTestedMask()));
2516 __ CompareImmediate(
R6, num_args);
2518 __ Stop(
"Incorrect stub for IC data");
2523#if !defined(PRODUCT)
2524 Label stepping, done_stepping;
2526 __ Comment(
"Check single stepping");
2528 __ LoadFromOffset(
R6,
R6, target::Isolate::single_step_offset(),
2530 __ CompareRegisters(
R6,
ZR);
2531 __ b(&stepping,
NE);
2532 __ Bind(&done_stepping);
2536 Label not_smi_or_overflow;
2537 if (kind != Token::kILLEGAL) {
2538 EmitFastSmiOp(
assembler, kind, num_args, ¬_smi_or_overflow);
2540 __ Bind(¬_smi_or_overflow);
2542 __ Comment(
"Extract ICData initial values and receiver cid");
2544 __ LoadFieldFromOffset(
R6,
R5, target::ICData::entries_offset());
2550 __ LoadTaggedClassIdMayBeSmi(
R3,
R0);
2552 target::CallSiteData::arguments_descriptor_offset());
2553 if (num_args == 2) {
2554 __ LoadCompressedSmiFieldFromOffset(
2557 __ sub(
R7,
R7, Operand(2));
2560 __ LoadTaggedClassIdMayBeSmi(
R1,
R1);
2564 target::CallSiteData::arguments_descriptor_offset());
2567 __ LoadCompressedSmiFieldFromOffset(
2570 __ sub(
R7,
R7, Operand(1));
2573 __ LoadTaggedClassIdMayBeSmi(
R3,
R0);
2574 if (num_args == 2) {
2575 __ AddImmediate(
R1,
R7, -1);
2578 __ LoadTaggedClassIdMayBeSmi(
R1,
R1);
2586 const bool optimize = kind == Token::kILLEGAL;
2589 Label loop, found, miss;
2590 __ Comment(
"ICData loop");
2593 for (
int unroll =
optimize ? 4 : 2; unroll >= 0; unroll--) {
2596 __ LoadCompressedSmiFromOffset(
R2,
R6, 0);
2597 __ CompareObjectRegisters(
R3,
R2);
2598 if (num_args == 2) {
2600 __ LoadCompressedSmiFromOffset(
R2,
R6, target::kCompressedWordSize);
2601 __ CompareObjectRegisters(
R1,
R2);
2607 const intptr_t entry_size = target::ICData::TestEntryLengthFor(
2609 target::kCompressedWordSize;
2610 __ AddImmediate(
R6, entry_size);
2621 __ Comment(
"IC miss");
2624 __ LoadCompressedSmiFieldFromOffset(
2627 __ sub(
R7,
R7, Operand(1));
2634 __ EnterStubFrame();
2639 if (save_entry_point) {
2646 for (intptr_t i = 0; i < num_args; i++) {
2647 __ LoadFromOffset(
TMP,
R7, -i * target::kWordSize);
2652 __ CallRuntime(handle_ic_miss, num_args + 1);
2654 __ Drop(num_args + 1);
2658 if (save_entry_point) {
2664 __ RestoreCodePointer();
2665 __ LeaveStubFrame();
2666 Label call_target_function;
2667 if (!FLAG_lazy_dispatchers) {
2668 GenerateDispatcherCode(
assembler, &call_target_function);
2670 __ b(&call_target_function);
2675 const intptr_t target_offset =
2676 target::ICData::TargetIndexFor(num_args) * target::kCompressedWordSize;
2677 const intptr_t count_offset =
2678 target::ICData::CountIndexFor(num_args) * target::kCompressedWordSize;
2679 const intptr_t exactness_offset =
2680 target::ICData::ExactnessIndexFor(num_args) * target::kCompressedWordSize;
2682 Label call_target_function_through_unchecked_entry;
2686 __ LoadCompressedSmi(
R1, Address(
R6, exactness_offset));
2687 __ CompareImmediate(
2692 __ BranchIf(
LESS, &exactness_ok);
2693 __ BranchIf(
EQUAL, &call_target_function_through_unchecked_entry);
2699 R2, FieldAddress(
R5, target::ICData::receivers_static_type_offset()));
2700 __ LoadCompressed(
R2, FieldAddress(
R2, target::Type::arguments_offset()));
2704#if defined(DART_COMPRESSED_POINTERS)
2709 __ CompareObjectRegisters(
R2,
R3);
2710 __ BranchIf(
EQUAL, &call_target_function_through_unchecked_entry);
2716 __ Bind(&exactness_ok);
2720 if (FLAG_optimization_counter_threshold >= 0) {
2721 __ Comment(
"Update caller's counter");
2722 __ LoadCompressedSmiFromOffset(
R1,
R6, count_offset);
2728 __ Comment(
"Call target");
2729 __ Bind(&call_target_function);
2732 target::Function::code_offset());
2733 if (save_entry_point) {
2735 __ ldr(
R2, Address(
R2, 0));
2738 target::Function::entry_point_offset());
2743 __ Bind(&call_target_function_through_unchecked_entry);
2744 if (FLAG_optimization_counter_threshold >= 0) {
2745 __ Comment(
"Update ICData counter");
2746 __ LoadCompressedSmiFromOffset(
R1,
R6, count_offset);
2751 __ Comment(
"Call target (via unchecked entry point)");
2754 target::Function::code_offset());
2755 __ LoadFieldFromOffset(
2761#if !defined(PRODUCT)
2764 __ EnterStubFrame();
2768 if (save_entry_point) {
2773 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
2775 if (save_entry_point) {
2782 __ RestoreCodePointer();
2783 __ LeaveStubFrame();
2784 __ b(&done_stepping);
2792void StubCodeCompiler::GenerateOneArgCheckInlineCacheStub() {
2794 1, kInlineCacheMissHandlerOneArgRuntimeEntry, Token::kILLEGAL,
2801void StubCodeCompiler::GenerateOneArgCheckInlineCacheWithExactnessCheckStub() {
2803 1, kInlineCacheMissHandlerOneArgRuntimeEntry, Token::kILLEGAL,
2810void StubCodeCompiler::GenerateTwoArgsCheckInlineCacheStub() {
2812 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kILLEGAL,
2819void StubCodeCompiler::GenerateSmiAddInlineCacheStub() {
2821 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kADD,
kUnoptimized,
2828void StubCodeCompiler::GenerateSmiLessInlineCacheStub() {
2830 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kLT,
kUnoptimized,
2837void StubCodeCompiler::GenerateSmiEqualInlineCacheStub() {
2839 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kEQ,
kUnoptimized,
2847void StubCodeCompiler::GenerateOneArgOptimizedCheckInlineCacheStub() {
2849 1, kInlineCacheMissHandlerOneArgRuntimeEntry, Token::kILLEGAL,
kOptimized,
2857void StubCodeCompiler::
2858 GenerateOneArgOptimizedCheckInlineCacheWithExactnessCheckStub() {
2860 1, kInlineCacheMissHandlerOneArgRuntimeEntry, Token::kILLEGAL,
kOptimized,
2868void StubCodeCompiler::GenerateTwoArgsOptimizedCheckInlineCacheStub() {
2870 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kILLEGAL,
2876void StubCodeCompiler::GenerateZeroArgsUnoptimizedStaticCallStub() {
2884 __ LoadFromOffset(
R6,
R5,
2887 ASSERT(target::ICData::NumArgsTestedShift() == 0);
2888 __ andi(
R6,
R6, Immediate(target::ICData::NumArgsTestedMask()));
2889 __ CompareImmediate(
R6, 0);
2891 __ Stop(
"Incorrect IC data for unoptimized static call");
2897#if !defined(PRODUCT)
2898 Label stepping, done_stepping;
2900 __ LoadFromOffset(
R6,
R6, target::Isolate::single_step_offset(),
2902 __ CompareImmediate(
R6, 0);
2903 __ b(&stepping,
NE);
2904 __ Bind(&done_stepping);
2908 __ LoadFieldFromOffset(
R6,
R5, target::ICData::entries_offset());
2912 const intptr_t target_offset =
2913 target::ICData::TargetIndexFor(0) * target::kCompressedWordSize;
2914 const intptr_t count_offset =
2915 target::ICData::CountIndexFor(0) * target::kCompressedWordSize;
2917 if (FLAG_optimization_counter_threshold >= 0) {
2919 __ LoadCompressedSmiFromOffset(
R1,
R6, count_offset);
2926 target::CallSiteData::arguments_descriptor_offset());
2931 target::Function::code_offset());
2933 __ ldr(
R2, Address(
R2, 0));
2936#if !defined(PRODUCT)
2938 __ EnterStubFrame();
2942 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
2946 __ RestoreCodePointer();
2947 __ LeaveStubFrame();
2948 __ b(&done_stepping);
2954void StubCodeCompiler::GenerateOneArgUnoptimizedStaticCallStub() {
2963void StubCodeCompiler::GenerateTwoArgsUnoptimizedStaticCallStub() {
2966 2, kStaticCallMissHandlerTwoArgsRuntimeEntry, Token::kILLEGAL,
2973void StubCodeCompiler::GenerateLazyCompileStub() {
2975 __ EnterStubFrame();
2978 __ CallRuntime(kCompileFunctionRuntimeEntry, 1);
2981 __ LeaveStubFrame();
2984 target::Function::code_offset());
2986 target::Function::entry_point_offset());
2991void StubCodeCompiler::GenerateICCallBreakpointStub() {
2993 __ Stop(
"No debugging in PRODUCT mode");
2995 __ EnterStubFrame();
2999 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0);
3003 __ LeaveStubFrame();
3004 __ LoadFieldFromOffset(
TMP,
CODE_REG, target::Code::entry_point_offset());
3009void StubCodeCompiler::GenerateUnoptStaticCallBreakpointStub() {
3011 __ Stop(
"No debugging in PRODUCT mode");
3013 __ EnterStubFrame();
3016 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0);
3019 __ LeaveStubFrame();
3020 __ LoadFieldFromOffset(
TMP,
CODE_REG, target::Code::entry_point_offset());
3025void StubCodeCompiler::GenerateRuntimeCallBreakpointStub() {
3027 __ Stop(
"No debugging in PRODUCT mode");
3029 __ EnterStubFrame();
3031 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0);
3033 __ LeaveStubFrame();
3034 __ LoadFieldFromOffset(
R0,
CODE_REG, target::Code::entry_point_offset());
3040void StubCodeCompiler::GenerateDebugStepCheckStub() {
3042 __ Stop(
"No debugging in PRODUCT mode");
3045 Label stepping, done_stepping;
3047 __ LoadFromOffset(
R1,
R1, target::Isolate::single_step_offset(),
3049 __ CompareImmediate(
R1, 0);
3050 __ b(&stepping,
NE);
3051 __ Bind(&done_stepping);
3055 __ EnterStubFrame();
3056 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
3057 __ LeaveStubFrame();
3058 __ b(&done_stepping);
3074void StubCodeCompiler::GenerateSubtypeNTestCacheStub(Assembler* assembler,
3096 GenerateSubtypeTestCacheSearch(
3108 Address(kCacheArrayReg, target::kCompressedWordSize *
3109 target::SubtypeTestCache::kTestResult));
3118void StubCodeCompiler::GenerateGetCStackPointerStub() {
3132void StubCodeCompiler::GenerateJumpToFrameStub() {
3135 __ set_lr_state(compiler::LRState::Clobbered());
3140 __ SetupCSPFromThread(
THR);
3141#if defined(DART_TARGET_OS_FUCHSIA)
3146 __ ldr(
R18, Address(
THR, target::Thread::saved_shadow_call_stack_offset()));
3147#elif defined(USING_SHADOW_CALL_STACK)
3150 Label exit_through_non_ffi;
3158 __ LoadFromOffset(tmp1,
THR,
3159 compiler::target::Thread::exit_through_ffi_offset());
3160 __ LoadImmediate(tmp2, target::Thread::exit_through_ffi());
3161 __ cmp(tmp1, Operand(tmp2));
3162 __ b(&exit_through_non_ffi,
NE);
3163 __ TransitionNativeToGenerated(tmp1,
true,
3165 __ Bind(&exit_through_non_ffi);
3168 __ RestorePinnedRegisters();
3170 __ LoadImmediate(
R2, VMTag::kDartTagId);
3171 __ StoreToOffset(
R2,
THR, target::Thread::vm_tag_offset());
3173 __ StoreToOffset(
ZR,
THR, target::Thread::top_exit_frame_info_offset());
3175 __ RestoreCodePointer();
3176 if (FLAG_precompiled_mode) {
3177 __ SetupGlobalPoolAndDispatchTable();
3179 __ LoadPoolPointer();
3189void StubCodeCompiler::GenerateRunExceptionHandlerStub() {
3190 WRITES_RETURN_ADDRESS_TO_LR(
3191 __ LoadFromOffset(
LR,
THR, target::Thread::resume_pc_offset()));
3193 word offset_from_thread = 0;
3196 __ LoadFromOffset(
R2,
THR, offset_from_thread);
3199 __ LoadFromOffset(
R0,
THR, target::Thread::active_exception_offset());
3200 __ StoreToOffset(
R2,
THR, target::Thread::active_exception_offset());
3203 __ LoadFromOffset(
R1,
THR, target::Thread::active_stacktrace_offset());
3204 __ StoreToOffset(
R2,
THR, target::Thread::active_stacktrace_offset());
3212void StubCodeCompiler::GenerateDeoptForRewindStub() {
3218 WRITES_RETURN_ADDRESS_TO_LR(
3219 __ LoadFromOffset(
LR,
THR, target::Thread::resume_pc_offset()));
3223 __ EnterStubFrame();
3224 __ CallRuntime(kRewindPostDeoptRuntimeEntry, 0);
3225 __ LeaveStubFrame();
3232void StubCodeCompiler::GenerateOptimizeFunctionStub() {
3233 __ LoadFromOffset(
CODE_REG,
THR, target::Thread::optimize_stub_offset());
3234 __ EnterStubFrame();
3239 __ CallRuntime(kOptimizeInvokedFunctionRuntimeEntry, 1);
3244 target::Function::code_offset());
3246 target::Function::entry_point_offset());
3247 __ LeaveStubFrame();
3255static void GenerateIdenticalWithNumberCheckStub(Assembler* assembler,
3258 Label reference_compare, check_mint;
3261 __ BranchIfSmi(
left, &reference_compare);
3262 __ BranchIfSmi(
right, &reference_compare);
3265 __ CompareClassId(
left, kDoubleCid);
3266 __ b(&check_mint,
NE);
3267 __ CompareClassId(
right, kDoubleCid);
3268 __ b(&reference_compare,
NE);
3271 __ LoadFieldFromOffset(
left,
left, target::Double::value_offset());
3272 __ LoadFieldFromOffset(
right,
right, target::Double::value_offset());
3276 __ Bind(&check_mint);
3277 __ CompareClassId(
left, kMintCid);
3278 __ b(&reference_compare,
NE);
3279 __ CompareClassId(
right, kMintCid);
3280 __ b(&reference_compare,
NE);
3281 __ LoadFieldFromOffset(
left,
left, target::Mint::value_offset());
3282 __ LoadFieldFromOffset(
right,
right, target::Mint::value_offset());
3286 __ Bind(&reference_compare);
3300void StubCodeCompiler::GenerateUnoptimizedIdenticalWithNumberCheckStub() {
3301#if !defined(PRODUCT)
3303 Label stepping, done_stepping;
3305 __ LoadFromOffset(
R1,
R1, target::Isolate::single_step_offset(),
3307 __ CompareImmediate(
R1, 0);
3308 __ b(&stepping,
NE);
3309 __ Bind(&done_stepping);
3314 __ LoadFromOffset(
left,
SP, 1 * target::kWordSize);
3315 __ LoadFromOffset(
right,
SP, 0 * target::kWordSize);
3318#if !defined(PRODUCT)
3320 __ EnterStubFrame();
3321 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
3322 __ RestoreCodePointer();
3323 __ LeaveStubFrame();
3324 __ b(&done_stepping);
3333void StubCodeCompiler::GenerateOptimizedIdenticalWithNumberCheckStub() {
3336 __ LoadFromOffset(
left,
SP, 1 * target::kWordSize);
3337 __ LoadFromOffset(
right,
SP, 0 * target::kWordSize);
3348void StubCodeCompiler::GenerateMegamorphicCallStub() {
3351 __ BranchIfSmi(
R0, &smi_case);
3357 __ Bind(&cid_loaded);
3359 FieldAddress(
IC_DATA_REG, target::MegamorphicCache::buckets_offset()));
3361 FieldAddress(
IC_DATA_REG, target::MegamorphicCache::mask_offset()));
3370 ASSERT(target::MegamorphicCache::kSpreadFactor == 7);
3372 __ LslImmediate(
R3,
R8, 3);
3379 const intptr_t
base = target::Array::data_offset();
3382 __ LoadCompressedSmiFieldFromOffset(
R6,
TMP,
base);
3384 __ CompareObjectRegisters(
R6,
R8);
3385 __ b(&probe_failed,
NE);
3388 __ Bind(&load_target);
3394 FieldAddress(
TMP,
base + target::kCompressedWordSize));
3396 FieldAddress(
FUNCTION_REG, target::Function::entry_point_offset()));
3399 target::CallSiteData::arguments_descriptor_offset()));
3400 if (!FLAG_precompiled_mode) {
3407 __ Bind(&probe_failed);
3419 __ LoadImmediate(
R8, kSmiCid);
3423 GenerateSwitchableCallMissStub();
3429void StubCodeCompiler::GenerateICCallThroughCodeStub() {
3430 Label loop, found, miss;
3431 __ ldr(
R8, FieldAddress(
IC_DATA_REG, target::ICData::entries_offset()));
3434 target::CallSiteData::arguments_descriptor_offset()));
3437 __ LoadTaggedClassIdMayBeSmi(
R1,
R0);
3441 __ LoadCompressedSmi(
R2, Address(
R8, 0));
3447 const intptr_t entry_length =
3448 target::ICData::TestEntryLengthFor(1,
false) *
3449 target::kCompressedWordSize;
3450 __ AddImmediate(
R8, entry_length);
3454 if (FLAG_precompiled_mode) {
3455 const intptr_t entry_offset =
3456 target::ICData::EntryPointIndexFor(1) * target::kCompressedWordSize;
3457 __ LoadCompressed(
R1, Address(
R8, entry_offset));
3458 __ ldr(
R1, FieldAddress(
R1, target::Function::entry_point_offset()));
3460 const intptr_t code_offset =
3461 target::ICData::CodeIndexFor(1) * target::kCompressedWordSize;
3463 __ ldr(
R1, FieldAddress(
CODE_REG, target::Code::entry_point_offset()));
3468 __ ldr(
R1, Address(
THR, target::Thread::switchable_call_miss_entry_offset()));
3479void StubCodeCompiler::GenerateMonomorphicSmiableCheckStub() {
3481 __ LoadClassIdMayBeSmi(
IP0,
R0);
3486 FieldAddress(
R5, target::MonomorphicSmiableCall::expected_cid_offset()));
3489 FieldAddress(
R5, target::MonomorphicSmiableCall::entrypoint_offset()));
3496 Address(
THR, target::Thread::switchable_call_miss_entry_offset()));
3502void StubCodeCompiler::GenerateSwitchableCallMissStub() {
3504 Address(
THR, target::Thread::switchable_call_miss_stub_offset()));
3505 __ EnterStubFrame();
3511 __ CallRuntime(kSwitchableCallMissRuntimeEntry, 2);
3517 __ LeaveStubFrame();
3519 __ ldr(
R1, FieldAddress(
CODE_REG, target::Code::entry_point_offset(
3529void StubCodeCompiler::GenerateSingleTargetCallStub() {
3531 __ LoadClassIdMayBeSmi(
R1,
R0);
3532 __ ldr(
R2, FieldAddress(
R5, target::SingleTargetCache::lower_limit_offset()),
3534 __ ldr(
R3, FieldAddress(
R5, target::SingleTargetCache::upper_limit_offset()),
3542 __ ldr(
R1, FieldAddress(
R5, target::SingleTargetCache::entry_point_offset()));
3544 FieldAddress(
R5, target::SingleTargetCache::target_offset()));
3548 __ EnterStubFrame();
3554 __ CallRuntime(kSwitchableCallMissRuntimeEntry, 2);
3560 __ LeaveStubFrame();
3562 __ ldr(
R1, FieldAddress(
CODE_REG, target::Code::entry_point_offset(
3567static int GetScaleFactor(intptr_t size) {
3584void StubCodeCompiler::GenerateAllocateTypedDataArrayStub(intptr_t
cid) {
3587 const intptr_t scale_shift = GetScaleFactor(
element_size);
3592 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
3598 __ BranchIfNotSmi(
R2, &call_runtime);
3603 __ b(&call_runtime,
HI);
3604 __ LslImmediate(
R2,
R2, scale_shift);
3605 const intptr_t fixed_size_plus_alignment_padding =
3606 target::TypedData::HeaderSize() +
3608 __ AddImmediate(
R2, fixed_size_plus_alignment_padding);
3611 __ ldr(
R0, Address(
THR, target::Thread::top_offset()));
3615 __ b(&call_runtime,
CS);
3621 __ ldr(
R6, Address(
THR, target::Thread::end_offset()));
3623 __ b(&call_runtime,
CS);
3624 __ CheckAllocationCanary(
R0);
3628 __ str(
R1, Address(
THR, target::Thread::top_offset()));
3635 __ CompareImmediate(
R2, target::UntaggedObject::kSizeTagMaxSizeTag);
3637 target::UntaggedObject::kTagBitsSizeTagPos -
3644 __ LoadImmediate(
TMP, tags);
3646 __ str(
R2, FieldAddress(
R0, target::Object::tags_offset()));
3652 __ StoreCompressedIntoObjectNoBarrier(
3653 R0, FieldAddress(
R0, target::TypedDataBase::length_offset()),
R2);
3659 __ AddImmediate(
R2,
R0, target::TypedData::HeaderSize() - 1);
3660 __ StoreInternalPointer(
3661 R0, FieldAddress(
R0, target::PointerBase::data_offset()),
R2);
3664 ASSERT(target::kObjectAlignment == 2 * target::kWordSize);
3668 __ WriteAllocationCanary(
R1);
3672 __ Bind(&call_runtime);
3675 __ EnterStubFrame();
3679 __ CallRuntime(kAllocateTypedDataRuntimeEntry, 2);
3682 __ LeaveStubFrame();
static void done(const char *config, const char *src, const char *srcOptions, const char *name)
static void async_callback(void *c, std::unique_ptr< const SkImage::AsyncReadResult > result)
static bool ok(int result)
static SkTileMode optimize(SkTileMode tm, int dimension)
static bool left(const SkPoint &p0, const SkPoint &p1)
static bool right(const SkPoint &p0, const SkPoint &p1)
#define ASSERT_LESS_OR_EQUAL(expected, actual)
#define ASSERT_EQUAL(expected, actual)
#define RELEASE_ASSERT(cond)
#define COMPILE_ASSERT(expr)
virtual bool WillAllocateNewOrRemembered() const
@ DRT_GetFfiCallbackMetadata
@ DRT_ExitTemporaryIsolate
static constexpr Register kPointerToReturnStructRegisterCall
static bool UseUnboxedRepresentation()
static Location RegisterLocation(Register reg)
static intptr_t ActivationFrameAlignment()
static intptr_t pointers_offset()
static intptr_t top_offset()
static StaticTypeExactnessState HasExactSuperType()
static StaticTypeExactnessState NotExact()
static constexpr intptr_t kMaxInputs
static bool atomic_memory_supported()
static Address PC(int32_t pc_off)
void GenerateNArgsCheckInlineCacheStub(intptr_t num_args, const RuntimeEntry &handle_ic_miss, Token::Kind kind, Optimized optimized, CallType type, Exactness exactness)
void EnsureIsNewOrRemembered()
void GenerateUsageCounterIncrement(Register temp_reg)
void GenerateAllocationStubForClass(UnresolvedPcRelativeCalls *unresolved_calls, const Class &cls, const dart::Code &allocate_object, const dart::Code &allocat_object_parametrized)
void GenerateOptimizedUsageCounterIncrement()
static intptr_t WordOffsetFromFpToCpuRegister(Register cpu_register)
const uint8_t uint32_t uint32_t GError ** error
uword MakeTagWordForNewSpaceObject(classid_t cid, uword instance_size)
bool CanLoadFromThread(const dart::Object &object, intptr_t *offset)
word ToRawSmi(const dart::Object &a)
bool SizeFitsInSizeTag(uword instance_size)
word TypedDataMaxNewSpaceElements(classid_t cid)
word TypedDataElementSizeInBytes(classid_t cid)
const Bool & TrueObject()
GrowableArray< UnresolvedPcRelativeCall * > UnresolvedPcRelativeCalls
bool IsSameObject(const Object &a, const Object &b)
const Bool & FalseObject()
const Object & NullObject()
const Code & StubCodeAllocateArray()
const Class & MintClass()
const Register kWriteBarrierSlotReg
@ TIMES_COMPRESSED_HALF_WORD_SIZE
constexpr bool IsAbiPreservedRegister(Register reg)
static constexpr intptr_t kCompressedWordSizeLog2
const RegList kAbiVolatileCpuRegs
const Register kExceptionObjectReg
const Register kWriteBarrierObjectReg
Thread * DLRT_GetFfiCallbackMetadata(FfiCallbackMetadata::Trampoline trampoline, uword *out_entry_point, uword *out_trampoline_type)
const Register kWriteBarrierValueReg
static constexpr bool IsCalleeSavedRegister(Register reg)
static constexpr uword kZapReturnAddress
const Register CALLEE_SAVED_TEMP
void DLRT_ExitTemporaryIsolate()
const Register ARGS_DESC_REG
static constexpr bool IsArgumentRegister(Register reg)
constexpr RegList kDartAvailableCpuRegs
const intptr_t kStoreBufferWrapperSize
const Register FUNCTION_REG
const Register IC_DATA_REG
static constexpr intptr_t kAllocationRedZoneSize
static constexpr uword kZapCodeReg
const Register kStackTraceObjectReg
const Register CALLEE_SAVED_TEMP2
static constexpr Register kResultReg
static constexpr Register kLengthReg
static constexpr Register kTypeArgumentsReg
static constexpr Register kResultReg
static constexpr Register kTempReg
static constexpr Register kTypeArgumentsReg
static constexpr Register kResultReg
static constexpr Register kTagsReg
static constexpr Register kLengthReg
static constexpr Register kResultReg
static constexpr Register kClassIdReg
static constexpr intptr_t kObjectAlignmentLog2
static constexpr intptr_t kObjectAlignment
static constexpr Register kLengthReg
static constexpr Register kIndexReg
static constexpr Register kCacheContentsSizeReg
static constexpr Register kInstanceInstantiatorTypeArgumentsReg
static constexpr Register kInstanceParentFunctionTypeArgumentsReg
static constexpr Register kProbeDistanceReg
static constexpr Register kInstanceCidOrSignatureReg
static constexpr Register kCacheEntriesEndReg
static constexpr Register kInstanceDelayedFunctionTypeArgumentsReg
static constexpr Register kResultReg
static constexpr Register kSubtypeTestCacheResultReg
#define NOT_IN_PRODUCT(code)