14#define SHOULD_NOT_INCLUDE_RUNTIME
19#if defined(TARGET_ARCH_X64)
47 __ LoadFromOffset(
TMP,
TMP, target::Page::original_top_offset());
56 rt.Call(kEnsureRememberedAndMarkingDeferredRuntimeEntry, 2);
75static void WithExceptionCatchingTrampoline(Assembler* assembler,
76 std::function<
void()> fun) {
77#if defined(TARGET_USES_THREAD_SANITIZER) && !defined(USING_SIMULATOR)
81 const intptr_t kJumpBufferSize =
sizeof(jmp_buf);
83 const RegisterSet volatile_registers(
95 __ movq(kTsanUtilsReg, Address(
THR, target::Thread::tsan_utils_offset()));
96 __ pushq(Address(kTsanUtilsReg, target::TsanUtils::setjmp_buffer_offset()));
101 __ AddImmediate(
RSP, Immediate(-kJumpBufferSize));
102 __ movq(Address(kTsanUtilsReg, target::TsanUtils::setjmp_buffer_offset()),
107 __ PushRegisters(volatile_registers);
109 __ MoveRegister(kSavedRspReg,
RSP);
112 __ movq(kTsanUtilsReg, Address(
THR, target::Thread::tsan_utils_offset()));
114 Address(kTsanUtilsReg, target::TsanUtils::setjmp_function_offset()),
117 __ MoveRegister(
RSP, kSavedRspReg);
119 __ PopRegisters(volatile_registers);
122 __ CompareImmediate(
RAX, 0);
123 __ BranchIf(
EQUAL, &do_native_call);
128 __ AddImmediate(
RSP, Immediate(kJumpBufferSize));
129 __ movq(kTsanUtilsReg, Address(
THR, target::Thread::tsan_utils_offset()));
130 __ popq(Address(kTsanUtilsReg, target::TsanUtils::setjmp_buffer_offset()));
133 Address(kTsanUtilsReg, target::TsanUtils::exception_pc_offset()));
135 Address(kTsanUtilsReg, target::TsanUtils::exception_sp_offset()));
137 Address(kTsanUtilsReg, target::TsanUtils::exception_fp_offset()));
139 __ jmp(Address(
THR, target::Thread::jump_to_frame_entry_point_offset()));
143 __ Bind(&do_native_call);
144 __ MoveRegister(kSavedRspReg,
RSP);
149#if defined(TARGET_USES_THREAD_SANITIZER) && !defined(USING_SIMULATOR)
150 __ MoveRegister(
RSP, kSavedRspReg);
151 __ AddImmediate(
RSP, Immediate(kJumpBufferSize));
152 const Register kTsanUtilsReg2 = kSavedRspReg;
153 __ movq(kTsanUtilsReg2, Address(
THR, target::Thread::tsan_utils_offset()));
154 __ popq(Address(kTsanUtilsReg2, target::TsanUtils::setjmp_buffer_offset()));
166void StubCodeCompiler::GenerateCallToRuntimeStub() {
167 const intptr_t thread_offset = target::NativeArguments::thread_offset();
168 const intptr_t argc_tag_offset = target::NativeArguments::argc_tag_offset();
169 const intptr_t argv_offset = target::NativeArguments::argv_offset();
170 const intptr_t retval_offset = target::NativeArguments::retval_offset();
173 Address(
THR, target::Thread::call_to_runtime_stub_offset()));
178 __ movq(Address(
THR, target::Thread::top_exit_frame_info_offset()),
RBP);
181 __ movq(Address(
THR, target::Thread::exit_through_ffi_offset()),
182 Immediate(target::Thread::exit_through_runtime_call()));
188 __ movq(
RAX, Immediate(VMTag::kDartTagId));
191 __ Stop(
"Not coming from Dart code.");
199 WithExceptionCatchingTrampoline(
assembler, [&]() {
201 __ subq(
RSP, Immediate(target::NativeArguments::StructSize()));
207 __ movq(Address(
RSP, thread_offset),
THR);
208 __ movq(Address(
RSP, argc_tag_offset),
212 target::frame_layout.param_end_from_fp *
214 __ movq(Address(
RSP, argv_offset),
218 Immediate(1 * target::kWordSize));
219 __ movq(Address(
RSP, retval_offset),
221#if defined(DART_TARGET_OS_WINDOWS)
222 ASSERT(target::NativeArguments::StructSize() >
223 CallingConventions::kRegisterTransferLimit);
226 __ CallCFunction(
RBX);
232 __ movq(Address(
THR, target::Thread::exit_through_ffi_offset()),
236 __ movq(Address(
THR, target::Thread::top_exit_frame_info_offset()),
241 if (FLAG_precompiled_mode) {
242 __ movq(
PP, Address(
THR, target::Thread::global_object_pool_offset()));
257void StubCodeCompiler::GenerateSharedStubGeneric(
258 bool save_fpu_registers,
259 intptr_t self_code_stub_offset_from_thread,
261 std::function<
void()> perform_runtime_call) {
264 const RegisterSet saved_registers(
266 __ PushRegisters(saved_registers);
268 const intptr_t kSavedCpuRegisterSlots =
270 const intptr_t kSavedFpuRegisterSlots =
274 const intptr_t kAllSavedRegistersSlots =
275 kSavedCpuRegisterSlots + kSavedFpuRegisterSlots;
278 __ pushq(Address(
RSP, kAllSavedRegistersSlots * target::kWordSize));
279 __ movq(
CODE_REG, Address(
THR, self_code_stub_offset_from_thread));
281 perform_runtime_call();
289 __ movq(Address(
RSP, kAllSavedRegistersSlots * target::kWordSize),
TMP);
290 __ PopRegisters(saved_registers);
294void StubCodeCompiler::GenerateSharedStub(
295 bool save_fpu_registers,
296 const RuntimeEntry*
target,
297 intptr_t self_code_stub_offset_from_thread,
299 bool store_runtime_result_in_result_register) {
300 auto perform_runtime_call = [&]() {
301 if (store_runtime_result_in_result_register) {
302 __ PushImmediate(Immediate(0));
305 if (store_runtime_result_in_result_register) {
307 __ movq(Address(
RBP, target::kWordSize *
313 GenerateSharedStubGeneric(save_fpu_registers,
314 self_code_stub_offset_from_thread, allow_return,
315 perform_runtime_call);
318void StubCodeCompiler::GenerateEnterSafepointStub() {
319 RegisterSet all_registers;
320 all_registers.AddAllGeneralRegisters();
321 __ PushRegisters(all_registers);
324 __ ReserveAlignedFrameSpace(0);
325 __ movq(
RAX, Address(
THR, kEnterSafepointRuntimeEntry.OffsetFromThread()));
326 __ CallCFunction(
RAX);
329 __ PopRegisters(all_registers);
333static void GenerateExitSafepointStubCommon(Assembler* assembler,
334 uword runtime_entry_offset) {
335 RegisterSet all_registers;
336 all_registers.AddAllGeneralRegisters();
337 __ PushRegisters(all_registers);
340 __ ReserveAlignedFrameSpace(0);
345 __ movq(Address(
THR, target::Thread::execution_state_offset()),
346 Immediate(target::Thread::vm_execution_state()));
348 __ movq(
RAX, Address(
THR, runtime_entry_offset));
349 __ CallCFunction(
RAX);
352 __ PopRegisters(all_registers);
356void StubCodeCompiler::GenerateExitSafepointStub() {
357 GenerateExitSafepointStubCommon(
358 assembler, kExitSafepointRuntimeEntry.OffsetFromThread());
361void StubCodeCompiler::GenerateExitSafepointIgnoreUnwindInProgressStub() {
362 GenerateExitSafepointStubCommon(
364 kExitSafepointIgnoreUnwindInProgressRuntimeEntry.OffsetFromThread());
376void StubCodeCompiler::GenerateCallNativeThroughSafepointStub() {
377 __ movq(
R12, compiler::Immediate(target::Thread::exit_through_ffi()));
382 __ CallCFunction(
RBX,
true);
384 __ TransitionNativeToGenerated(
true);
394 compiler::Label skip_reloc;
396 InsertBSSRelocation(relocation);
397 const intptr_t reloc_end =
__ CodeSize();
398 __ Bind(&skip_reloc);
400 const intptr_t kLeaqLength = 7;
402 -kLeaqLength - compiler::target::kWordSize));
403 ASSERT((
__ CodeSize() - reloc_end) == kLeaqLength);
406 __ movq(tmp, compiler::Address(dst, 0));
414 __ movq(dst, compiler::Address(dst, 0));
417void StubCodeCompiler::GenerateLoadFfiCallbackMetadataRuntimeFunction(
418 uword function_index,
424 const intptr_t kLeaqLength = 7;
425 const intptr_t code_size =
__ CodeSize();
432 __ LoadFromOffset(dst, dst,
436static const RegisterSet kArgumentRegisterSet(
440void StubCodeCompiler::GenerateFfiCallbackTrampolineStub() {
451 const intptr_t kLeaqLength = 7;
452 const intptr_t size_before =
__ CodeSize();
454 const intptr_t size_after =
__ CodeSize();
460 FfiCallbackMetadata::kNativeCallbackTrampolineSize *
465 const intptr_t shared_stub_start =
__ CodeSize();
471 COMPILE_ASSERT(2 == FfiCallbackMetadata::kNativeCallbackTrampolineStackDelta);
474 __ PushRegisters(kArgumentRegisterSet);
488 __ pushq(Immediate(0));
494 __ pushq(Immediate(0));
497#if defined(DART_TARGET_OS_FUCHSIA)
499 if (FLAG_precompiled_mode) {
503 __ movq(
RAX, Immediate(
507 GenerateLoadFfiCallbackMetadataRuntimeFunction(
512 __ ReserveAlignedFrameSpace(0);
514 __ CallCFunction(
RAX);
527 __ PopRegisters(kArgumentRegisterSet);
544 __ cmpq(
THR, Immediate(0));
561 __ EnterFullSafepoint();
576#if defined(DART_TARGET_OS_FUCHSIA)
578 if (FLAG_precompiled_mode) {
585 GenerateLoadFfiCallbackMetadataRuntimeFunction(
590 __ ReserveAlignedFrameSpace(0);
592 __ CallCFunction(
RAX);
607 FfiCallbackMetadata::kNativeCallbackSharedStubSize);
617void StubCodeCompiler::GenerateDispatchTableNullErrorStub() {
621 __ CallRuntime(kDispatchTableNullErrorRuntimeEntry, 1);
626void StubCodeCompiler::GenerateRangeError(
bool with_fpu_regs) {
627 auto perform_runtime_call = [&]() {
634#if !defined(DART_COMPRESSED_POINTERS)
640 __ sarq(
TMP, Immediate(30));
641 __ addq(
TMP, Immediate(1));
642 __ cmpq(
TMP, Immediate(2));
647 __ PushImmediate(Immediate(0));
648 __ CallRuntime(kAllocateMintRuntimeEntry, 0);
652 Address(
RBP, target::kWordSize *
656 target::Mint::value_offset()),
660 Address(
RBP, target::kWordSize *
670 __ PushRegistersInOrder(
672 __ CallRuntime(kRangeErrorRuntimeEntry, 2);
676 GenerateSharedStubGeneric(
679 ? target::Thread::range_error_shared_with_fpu_regs_stub_offset()
680 : target::Thread::range_error_shared_without_fpu_regs_stub_offset(),
681 false, perform_runtime_call);
684void StubCodeCompiler::GenerateWriteError(
bool with_fpu_regs) {
685 auto perform_runtime_call = [&]() {
686 __ CallRuntime(kWriteErrorRuntimeEntry, 2);
690 GenerateSharedStubGeneric(
693 ? target::Thread::write_error_shared_with_fpu_regs_stub_offset()
694 : target::Thread::write_error_shared_without_fpu_regs_stub_offset(),
695 false, perform_runtime_call);
704static void GenerateCallNativeWithWrapperStub(Assembler* assembler,
705 Address wrapper_address) {
706 const intptr_t native_args_struct_offset = 0;
707 const intptr_t thread_offset =
708 target::NativeArguments::thread_offset() + native_args_struct_offset;
709 const intptr_t argc_tag_offset =
710 target::NativeArguments::argc_tag_offset() + native_args_struct_offset;
711 const intptr_t argv_offset =
712 target::NativeArguments::argv_offset() + native_args_struct_offset;
713 const intptr_t retval_offset =
714 target::NativeArguments::retval_offset() + native_args_struct_offset;
720 __ movq(Address(
THR, target::Thread::top_exit_frame_info_offset()),
RBP);
723 __ movq(Address(
THR, target::Thread::exit_through_ffi_offset()),
724 Immediate(target::Thread::exit_through_runtime_call()));
730 __ movq(
R8, Immediate(VMTag::kDartTagId));
733 __ Stop(
"Not coming from Dart code.");
741 WithExceptionCatchingTrampoline(assembler, [&]() {
745 __ subq(
RSP, Immediate(target::NativeArguments::StructSize()));
752 __ movq(Address(
RSP, thread_offset),
THR);
754 __ movq(Address(
RSP, argc_tag_offset),
R10);
756 __ movq(Address(
RSP, argv_offset),
R13);
758 __ leaq(
RAX, Address(
RBP, (target::frame_layout.param_end_from_fp + 1) *
761 __ movq(Address(
RSP, retval_offset),
RAX);
768 __ movq(
RAX, wrapper_address);
769 __ CallCFunction(
RAX);
775 __ movq(Address(
THR, target::Thread::exit_through_ffi_offset()),
779 __ movq(Address(
THR, target::Thread::top_exit_frame_info_offset()),
784 if (FLAG_precompiled_mode) {
785 __ movq(
PP, Address(
THR, target::Thread::global_object_pool_offset()));
793void StubCodeCompiler::GenerateCallNoScopeNativeStub() {
794 GenerateCallNativeWithWrapperStub(
797 target::Thread::no_scope_native_wrapper_entry_point_offset()));
800void StubCodeCompiler::GenerateCallAutoScopeNativeStub() {
801 GenerateCallNativeWithWrapperStub(
804 target::Thread::auto_scope_native_wrapper_entry_point_offset()));
813void StubCodeCompiler::GenerateCallBootstrapNativeStub() {
814 GenerateCallNativeWithWrapperStub(
817 target::Thread::bootstrap_native_wrapper_entry_point_offset()));
822void StubCodeCompiler::GenerateCallStaticFunctionStub() {
826 __ pushq(Immediate(0));
827 __ CallRuntime(kPatchStaticCallRuntimeEntry, 0);
833 __ movq(
RBX, FieldAddress(
CODE_REG, target::Code::entry_point_offset()));
840void StubCodeCompiler::GenerateFixCallersTargetStub() {
842 __ BranchOnMonomorphicCheckedEntryJIT(&monomorphic);
849 Address(
THR, target::Thread::fix_callers_target_code_offset()));
853 __ pushq(Immediate(0));
854 __ CallRuntime(kFixCallersTargetRuntimeEntry, 0);
857 __ movq(
RAX, FieldAddress(
CODE_REG, target::Code::entry_point_offset()));
862 __ Bind(&monomorphic);
868 Address(
THR, target::Thread::fix_callers_target_code_offset()));
870 __ pushq(Immediate(0));
873 __ CallRuntime(kFixCallersTargetMonomorphicRuntimeEntry, 2);
877 __ movq(
RAX, FieldAddress(
CODE_REG, target::Code::entry_point_offset(
886void StubCodeCompiler::GenerateFixAllocationStubTargetStub() {
891 Address(
THR, target::Thread::fix_allocation_stub_code_offset()));
894 __ pushq(Immediate(0));
895 __ CallRuntime(kFixAllocationStubTargetRuntimeEntry, 0);
897 __ movq(
RAX, FieldAddress(
CODE_REG, target::Code::entry_point_offset()));
905void StubCodeCompiler::GenerateFixParameterizedAllocationStubTargetStub() {
910 Address(
THR, target::Thread::fix_allocation_stub_code_offset()));
914 __ pushq(Immediate(0));
915 __ CallRuntime(kFixAllocationStubTargetRuntimeEntry, 0);
918 __ movq(
RAX, FieldAddress(
CODE_REG, target::Code::entry_point_offset()));
927static void PushArrayOfArguments(Assembler* assembler) {
938 target::frame_layout.param_end_from_fp * target::kWordSize));
939 __ leaq(
RBX, FieldAddress(
RAX, target::Array::data_offset()));
942 Label loop, loop_condition;
948 __ jmp(&loop_condition, kJumpLength);
952 __ StoreCompressedIntoObject(
RAX, Address(
RBX, 0),
RDI);
953 __ addq(
RBX, Immediate(target::kCompressedWordSize));
954 __ subq(
R12, Immediate(target::kWordSize));
955 __ Bind(&loop_condition);
986static void GenerateDeoptimizationSequence(Assembler* assembler,
994 const intptr_t saved_result_slot_from_fp =
995 target::frame_layout.first_local_from_fp + 1 -
997 const intptr_t saved_exception_slot_from_fp =
998 target::frame_layout.first_local_from_fp + 1 -
1000 const intptr_t saved_stacktrace_slot_from_fp =
1001 target::frame_layout.first_local_from_fp + 1 -
1011 __ pushq(Address(
RBP, 2 * target::kWordSize));
1027 LeafRuntimeScope rt(assembler,
1033 rt.Call(kDeoptimizeCopyFrameRuntimeEntry, 2);
1039 __ movq(
RBX, Address(
RBP, saved_result_slot_from_fp * target::kWordSize));
1043 Address(
RBP, saved_exception_slot_from_fp * target::kWordSize));
1045 Address(
RBP, saved_stacktrace_slot_from_fp * target::kWordSize));
1049 __ RestoreCodePointer();
1050 __ LeaveStubFrame();
1059 __ EnterStubFrame();
1069 LeafRuntimeScope rt(assembler,
1072 rt.Call(kDeoptimizeFillFrameRuntimeEntry, 1);
1076 __ movq(
RBX, Address(
RBP, target::frame_layout.first_local_from_fp *
1077 target::kWordSize));
1080 __ movq(
RBX, Address(
RBP, target::frame_layout.first_local_from_fp *
1081 target::kWordSize));
1083 __ movq(
RDX, Address(
RBP, (target::frame_layout.first_local_from_fp - 1) *
1084 target::kWordSize));
1088 __ RestoreCodePointer();
1089 __ LeaveStubFrame();
1095 __ EnterStubFrame();
1105 __ CallRuntime(kDeoptimizeMaterializeRuntimeEntry, 0);
1117 __ LeaveStubFrame();
1127 __ EnterStubFrame();
1132 __ CallRuntime(kReThrowRuntimeEntry, 3);
1133 __ LeaveStubFrame();
1138void StubCodeCompiler::GenerateDeoptimizeLazyFromReturnStub() {
1144 Address(
THR, target::Thread::lazy_deopt_from_return_stub_offset()));
1151void StubCodeCompiler::GenerateDeoptimizeLazyFromThrowStub() {
1157 Address(
THR, target::Thread::lazy_deopt_from_throw_stub_offset()));
1162void StubCodeCompiler::GenerateDeoptimizeStub() {
1166 __ movq(
CODE_REG, Address(
THR, target::Thread::deoptimize_stub_offset()));
1174static void GenerateNoSuchMethodDispatcherBody(Assembler* assembler,
1176 __ pushq(Immediate(0));
1177 __ pushq(receiver_reg);
1183 target::ArgumentsDescriptor::type_args_len_offset()),
1186 Label args_count_ok;
1190 __ Bind(&args_count_ok);
1193 PushArrayOfArguments(assembler);
1194 const intptr_t kNumArgs = 4;
1195 __ CallRuntime(kNoSuchMethodFromCallStubRuntimeEntry, kNumArgs);
1198 __ LeaveStubFrame();
1205static void GenerateDispatcherCode(Assembler* assembler,
1206 Label* call_target_function) {
1207 __ Comment(
"NoSuchMethodDispatch");
1213 __ EnterStubFrame();
1216 target::ArgumentsDescriptor::size_offset()));
1218 Address(
RBP,
RDI, TIMES_HALF_WORD_SIZE,
1219 target::frame_layout.param_end_from_fp * target::kWordSize));
1221 GenerateNoSuchMethodDispatcherBody(assembler,
RAX);
1227void StubCodeCompiler::GenerateNoSuchMethodDispatcherStub() {
1228 __ EnterStubFrame();
1232 target::CallSiteData::arguments_descriptor_offset()));
1234 target::ArgumentsDescriptor::size_offset()));
1247void StubCodeCompiler::GenerateAllocateArrayStub() {
1248 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
1261 const Immediate& max_len =
1269 const intptr_t fixed_size_plus_alignment_padding =
1270 target::Array::header_size() +
1274 fixed_size_plus_alignment_padding));
1278 const intptr_t
cid = kArrayCid;
1280 Address(
THR, target::Thread::top_offset()));
1291 __ cmpq(
RCX, Address(
THR, target::Thread::end_offset()));
1297 __ movq(Address(
THR, target::Thread::top_offset()),
RCX);
1304 Label size_tag_overflow,
done;
1305 __ cmpq(
RDI, Immediate(target::UntaggedObject::kSizeTagMaxSizeTag));
1307 __ shlq(
RDI, Immediate(target::UntaggedObject::kTagBitsSizeTagPos -
1311 __ Bind(&size_tag_overflow);
1312 __ LoadImmediate(
RDI, Immediate(0));
1317 __ orq(
RDI, Immediate(tags));
1318 __ movq(FieldAddress(
RAX, target::Array::tags_offset()),
RDI);
1324 __ StoreCompressedIntoObjectNoBarrier(
1327 target::Array::type_arguments_offset()),
1331 __ StoreCompressedIntoObjectNoBarrier(
1334 target::Array::length_offset()),
1344 target::Array::header_size()));
1347 for (intptr_t
offset = 0;
offset < target::kObjectAlignment;
1348 offset += target::kCompressedWordSize) {
1355 __ addq(
RDI, Immediate(target::kObjectAlignment));
1358 __ WriteAllocationCanary(
RCX);
1363 __ Bind(&slow_case);
1367 __ EnterStubFrame();
1368 __ pushq(Immediate(0));
1371 __ CallRuntime(kAllocateArrayRuntimeEntry, 2);
1382 __ LeaveStubFrame();
1386void StubCodeCompiler::GenerateAllocateMintSharedWithFPURegsStub() {
1388 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
1394 __ Bind(&slow_case);
1398 GenerateSharedStub(
true, &kAllocateMintRuntimeEntry,
1399 target::Thread::allocate_mint_with_fpu_regs_stub_offset(),
1404void StubCodeCompiler::GenerateAllocateMintSharedWithoutFPURegsStub() {
1406 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
1412 __ Bind(&slow_case);
1417 false, &kAllocateMintRuntimeEntry,
1418 target::Thread::allocate_mint_without_fpu_regs_stub_offset(),
1423static const RegisterSet kCalleeSavedRegisterSet(
1434void StubCodeCompiler::GenerateInvokeDartCodeStub() {
1443 __ pushq(Address(kThreadReg, target::Thread::invoke_dart_code_stub_offset()));
1450 const intptr_t kInitialOffset = 2;
1452 const intptr_t kArgumentsDescOffset = -(kInitialOffset)*target::kWordSize;
1453 __ pushq(kArgDescReg);
1456 __ PushRegisters(kCalleeSavedRegisterSet);
1462 if (
THR != kThreadReg) {
1463 __ movq(
THR, kThreadReg);
1466#if defined(USING_SHADOW_CALL_STACK)
1476 __ movq(
RAX, Address(
THR, target::Thread::top_resource_offset()));
1478 __ movq(Address(
THR, target::Thread::top_resource_offset()), Immediate(0));
1480 __ movq(
RAX, Address(
THR, target::Thread::exit_through_ffi_offset()));
1482 __ movq(Address(
THR, target::Thread::exit_through_ffi_offset()),
1485 __ movq(
RAX, Address(
THR, target::Thread::top_exit_frame_info_offset()));
1490 __ EmitEntryFrameVerification();
1492 __ movq(Address(
THR, target::Thread::top_exit_frame_info_offset()),
1500 __ movq(
R10, kArgDescReg);
1507 FieldAddress(
R10, target::ArgumentsDescriptor::count_offset()));
1509 FieldAddress(
R10, target::ArgumentsDescriptor::type_args_len_offset()),
1511 Label args_count_ok;
1514 __ Bind(&args_count_ok);
1516 __ movq(Address(
RBP, kArgumentsDescOffset),
RBX);
1520 __ leaq(
RDX, FieldAddress(kArgsReg, target::Array::data_offset()));
1523 Label push_arguments;
1524 Label done_push_arguments;
1526 __ LoadImmediate(
RAX, Immediate(0));
1527 __ Bind(&push_arguments);
1528#if defined(DART_COMPRESSED_POINTERS)
1537 __ Bind(&done_push_arguments);
1540 if (FLAG_precompiled_mode) {
1541 __ movq(
PP, Address(
THR, target::Thread::global_object_pool_offset()));
1547 FieldAddress(
CODE_REG, target::Code::entry_point_offset()));
1549 __ call(kTargetReg);
1552 __ movq(
RDX, Address(
RBP, kArgumentsDescOffset));
1559 __ popq(Address(
THR, target::Thread::top_exit_frame_info_offset()));
1560 __ popq(Address(
THR, target::Thread::exit_through_ffi_offset()));
1561 __ popq(Address(
THR, target::Thread::top_resource_offset()));
1566#if defined(USING_SHADOW_CALL_STACK)
1571 __ PopRegisters(kCalleeSavedRegisterSet);
1572 __ set_constant_pool_allowed(
false);
1588static void GenerateAllocateContextSpaceStub(Assembler* assembler,
1592 intptr_t fixed_size_plus_alignment_padding =
1593 (target::Context::header_size() +
1596 fixed_size_plus_alignment_padding));
1604 __ movq(
RAX, Address(
THR, target::Thread::top_offset()));
1610 __ cmpq(
R13, Address(
THR, target::Thread::end_offset()));
1612 __ CheckAllocationCanary(
RAX);
1619 __ movq(Address(
THR, target::Thread::top_offset()),
R13);
1629 Label size_tag_overflow,
done;
1631 fixed_size_plus_alignment_padding));
1633 __ cmpq(
R13, Immediate(target::UntaggedObject::kSizeTagMaxSizeTag));
1635 __ shlq(
R13, Immediate(target::UntaggedObject::kTagBitsSizeTagPos -
1639 __ Bind(&size_tag_overflow);
1641 __ LoadImmediate(
R13, Immediate(0));
1648 __ orq(
R13, Immediate(tags));
1649 __ movq(FieldAddress(
RAX, target::Object::tags_offset()),
R13);
1655 __ movl(FieldAddress(
RAX, target::Context::num_variables_offset()),
R10);
1665void StubCodeCompiler::GenerateAllocateContextStub() {
1667 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
1670 GenerateAllocateContextSpaceStub(
assembler, &slow_case);
1676 __ StoreCompressedIntoObjectNoBarrier(
1677 RAX, FieldAddress(
RAX, target::Context::parent_offset()),
R9);
1684 __ leaq(
R13, FieldAddress(
RAX, target::Context::variable_offset(0)));
1690 __ jmp(&entry, kJumpLength);
1694 __ StoreCompressedIntoObjectNoBarrier(
1697 __ cmpq(
R10, Immediate(0));
1705 __ Bind(&slow_case);
1708 __ EnterStubFrame();
1712 __ CallRuntime(kAllocateContextRuntimeEntry, 1);
1722 __ LeaveStubFrame();
1734void StubCodeCompiler::GenerateCloneContextStub() {
1735 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
1739 __ movsxd(
R10, FieldAddress(
R9, target::Context::num_variables_offset()));
1742 GenerateAllocateContextSpaceStub(
assembler, &slow_case);
1745 __ LoadCompressed(
R13, FieldAddress(
R9, target::Context::parent_offset()));
1749 __ StoreCompressedIntoObjectNoBarrier(
1750 RAX, FieldAddress(
RAX, target::Context::parent_offset()),
R13);
1761 target::Context::variable_offset(0)));
1762 __ StoreCompressedIntoObjectNoBarrier(
1765 target::Context::variable_offset(0)),
1768 __ cmpq(
R10, Immediate(0));
1776 __ Bind(&slow_case);
1780 __ EnterStubFrame();
1784 __ CallRuntime(kCloneContextRuntimeEntry, 1);
1795 __ LeaveStubFrame();
1800void StubCodeCompiler::GenerateWriteBarrierWrappersStub() {
1805 intptr_t
start =
__ CodeSize();
1808 __ call(Address(
THR, target::Thread::write_barrier_entry_point_offset()));
1811 intptr_t
end =
__ CodeSize();
1827static void GenerateWriteBarrierStubHelper(Assembler* assembler,
bool cards) {
1829 __ movq(
TMP, FieldAddress(
RAX, target::Object::tags_offset()));
1830 __ andq(
TMP, Address(
THR, target::Thread::write_barrier_mask_offset()));
1831 __ testq(
TMP, Immediate(target::UntaggedObject::kIncrementalBarrierMask));
1832 __ j(
ZERO, &skip_marking);
1840 __ movq(
RAX, FieldAddress(
TMP, target::Object::tags_offset()));
1844 __ testq(
RCX, Immediate(1 << target::UntaggedObject::kNotMarkedBit));
1847 __ andq(
RCX, Immediate(~(1 << target::UntaggedObject::kNotMarkedBit)));
1850 __ LockCmpxchgq(FieldAddress(
TMP, target::Object::tags_offset()),
RCX);
1853 __ movq(
RAX, Address(
THR, target::Thread::marking_stack_block_offset()));
1864 LeafRuntimeScope rt(assembler,
1868 rt.Call(kMarkingStackBlockProcessRuntimeEntry, 1);
1876 Label add_to_remembered_set, remember_card;
1877 __ Bind(&skip_marking);
1878 __ movq(
TMP, FieldAddress(
RDX, target::Object::tags_offset()));
1879 __ shrl(
TMP, Immediate(target::UntaggedObject::kBarrierOverlapShift));
1880 __ andq(
TMP, FieldAddress(
RAX, target::Object::tags_offset()));
1881 __ testq(
TMP, Immediate(target::UntaggedObject::kGenerationalBarrierMask));
1885 __ Bind(&add_to_remembered_set);
1887 __ movl(
TMP, FieldAddress(
RDX, target::Object::tags_offset()));
1888 __ testl(
TMP, Immediate(1 << target::UntaggedObject::kCardRememberedBit));
1893 __ movl(
TMP, FieldAddress(
RDX, target::Object::tags_offset()));
1894 __ testl(
TMP, Immediate(1 << target::UntaggedObject::kCardRememberedBit));
1896 __ Stop(
"Wrong barrier");
1905 __ movq(
RAX, FieldAddress(
RDX, target::Object::tags_offset()));
1910 Immediate(1 << target::UntaggedObject::kOldAndNotRememberedBit));
1913 Immediate(~(1 << target::UntaggedObject::kOldAndNotRememberedBit)));
1916 __ LockCmpxchgq(FieldAddress(
RDX, target::Object::tags_offset()),
RCX);
1922 __ movq(
RAX, Address(
THR, target::Thread::store_buffer_block_offset()));
1937 LeafRuntimeScope rt(assembler,
1941 rt.Call(kStoreBufferBlockProcessRuntimeEntry, 1);
1951 Label remember_card_slow;
1954 __ Bind(&remember_card);
1957 __ cmpq(Address(
TMP, target::Page::card_table_offset()), Immediate(0));
1966 Address(
TMP, target::Page::card_table_offset()));
1967 __ shrq(
R13, Immediate(target::Page::kBytesPerCardLog2));
1969 __ shrq(
R13, Immediate(target::kBitsPerWordLog2));
1970 __ movq(
RAX, Immediate(1));
1978 __ Bind(&remember_card_slow);
1980 LeafRuntimeScope rt(assembler,
1985 rt.Call(kRememberCardRuntimeEntry, 2);
1991void StubCodeCompiler::GenerateWriteBarrierStub() {
1992 GenerateWriteBarrierStubHelper(
assembler,
false);
1995void StubCodeCompiler::GenerateArrayWriteBarrierStub() {
1996 GenerateWriteBarrierStubHelper(
assembler,
true);
1999static void GenerateAllocateObjectHelper(Assembler* assembler,
2000 bool is_cls_parameterized) {
2008#if !defined(PRODUCT)
2012 __ MaybeTraceAllocation(kCidRegister, &slow_case,
TMP);
2020 __ ExtractInstanceSizeFromTags(kInstanceSizeReg, kTagsReg);
2023 Address(
THR, target::Thread::top_offset()));
2025 kInstanceSizeReg,
TIMES_1, 0));
2027 __ cmpq(kNewTopReg, Address(
THR, target::Thread::end_offset()));
2031 __ movq(Address(
THR, target::Thread::top_offset()), kNewTopReg);
2045 __ leaq(kNextFieldReg,
2047 target::Instance::first_field_offset()));
2055 for (intptr_t
offset = 0;
offset < target::kObjectAlignment;
2056 offset += target::kCompressedWordSize) {
2058 Address(kNextFieldReg,
offset),
2063 __ addq(kNextFieldReg, Immediate(target::kObjectAlignment));
2064 __ cmpq(kNextFieldReg, kNewTopReg);
2068 __ WriteAllocationCanary(kNewTopReg);
2070 if (is_cls_parameterized) {
2071 Label not_parameterized_case;
2076 __ ExtractClassIdFromTags(kClsIdReg, kTagsReg);
2079 __ LoadClassById(kTypeOffsetReg, kClsIdReg);
2082 FieldAddress(kTypeOffsetReg,
2084 host_type_arguments_field_offset_in_words_offset()));
2087 __ StoreCompressedIntoObject(
2093 __ Bind(¬_parameterized_case);
2098 __ Bind(&slow_case);
2102 if (!is_cls_parameterized) {
2107 Address(
THR, target::Thread::allocate_object_slow_entry_point_offset()));
2111void StubCodeCompiler::GenerateAllocateObjectStub() {
2112 GenerateAllocateObjectHelper(
assembler,
false);
2115void StubCodeCompiler::GenerateAllocateObjectParameterizedStub() {
2116 GenerateAllocateObjectHelper(
assembler,
true);
2119void StubCodeCompiler::GenerateAllocateObjectSlowStub() {
2120 if (!FLAG_precompiled_mode) {
2122 Address(
THR, target::Thread::call_to_runtime_stub_offset()));
2130 __ EnterStubFrame();
2143 __ CallRuntime(kAllocateObjectRuntimeEntry, 2);
2155 __ LeaveStubFrame();
2164 const Code& allocate_object,
2165 const Code& allocat_object_parametrized) {
2166 classid_t cls_id = target::Class::GetId(cls);
2169 const intptr_t cls_type_arg_field_offset =
2170 target::Class::TypeArgumentsFieldOffset(cls);
2173 const bool is_cls_parameterized = target::Class::NumTypeArguments(cls) > 0;
2174 ASSERT(!is_cls_parameterized ||
2175 cls_type_arg_field_offset != target::Class::kNoTypeArguments);
2177 const intptr_t instance_size = target::Class::GetInstanceSize(cls);
2178 ASSERT(instance_size > 0);
2184 __ movq(kTagsReg, Immediate(tags));
2187 if (!FLAG_use_slow_path && FLAG_inline_alloc &&
2188 !target::Class::TraceAllocation(cls) &&
2191 RELEASE_ASSERT(target::Heap::IsAllocatableInNewSpace(instance_size));
2193 if (is_cls_parameterized) {
2195 CastHandle<Object>(allocat_object_parametrized))) {
2196 __ GenerateUnRelocatedPcRelativeTailCall();
2197 unresolved_calls->Add(
new UnresolvedPcRelativeCall(
2198 __ CodeSize(), allocat_object_parametrized,
true));
2202 allocate_object_parameterized_entry_point_offset()));
2206 __ GenerateUnRelocatedPcRelativeTailCall();
2207 unresolved_calls->Add(
new UnresolvedPcRelativeCall(
2208 __ CodeSize(), allocate_object,
true));
2211 Address(
THR, target::Thread::allocate_object_entry_point_offset()));
2215 if (!is_cls_parameterized) {
2219 target::Thread::allocate_object_slow_entry_point_offset()));
2230void StubCodeCompiler::GenerateCallClosureNoSuchMethodStub() {
2231 __ EnterStubFrame();
2236 __ LoadCompressedSmi(
2237 R13, FieldAddress(
R10, target::ArgumentsDescriptor::size_offset()));
2240 target::frame_layout.param_end_from_fp * target::kWordSize));
2243 __ LoadCompressed(
RBX, FieldAddress(
RAX, target::Closure::function_offset()));
2245 __ pushq(Immediate(0));
2252 FieldAddress(
R10, target::ArgumentsDescriptor::type_args_len_offset()),
2255 Label args_count_ok;
2258 __ Bind(&args_count_ok);
2263 const intptr_t kNumArgs = 4;
2264 __ CallRuntime(kNoSuchMethodFromPrologueRuntimeEntry, kNumArgs);
2272 if (FLAG_precompiled_mode) {
2278 if (FLAG_trace_optimized_ic_calls) {
2279 __ EnterStubFrame();
2284 __ CallRuntime(kTraceICCallRuntimeEntry, 2);
2289 __ LeaveStubFrame();
2291 __ incl(FieldAddress(func_reg, target::Function::usage_counter_offset()));
2296 if (FLAG_precompiled_mode) {
2300 if (FLAG_optimization_counter_threshold >= 0) {
2303 __ Comment(
"Increment function counter");
2305 FieldAddress(
IC_DATA_REG, target::ICData::owner_offset()));
2306 __ incl(FieldAddress(func_reg, target::Function::usage_counter_offset()));
2314static void EmitFastSmiOp(Assembler* assembler,
2317 Label* not_smi_or_overflow) {
2318 __ Comment(
"Fast Smi op");
2320 __ movq(
RAX, Address(
RSP, +2 * target::kWordSize));
2321 __ movq(
RCX, Address(
RSP, +1 * target::kWordSize));
2337 Address(
THR,
RAX,
TIMES_8, target::Thread::bool_true_offset()));
2338 ASSERT(target::Thread::bool_true_offset() + 8 ==
2339 target::Thread::bool_false_offset());
2347 Address(
THR,
RAX,
TIMES_8, target::Thread::bool_true_offset()));
2348 ASSERT(target::Thread::bool_true_offset() + 8 ==
2349 target::Thread::bool_false_offset());
2357 __ movq(
R13, FieldAddress(
RBX, target::ICData::entries_offset()));
2359 __ leaq(
R13, FieldAddress(
R13, target::Array::data_offset()));
2365 __ OBJ(cmp)(Address(
R13, 0 * target::kCompressedWordSize), imm_smi_cid);
2367 __ OBJ(cmp)(Address(
R13, 1 * target::kCompressedWordSize), imm_smi_cid);
2370 __ Stop(
"Incorrect IC data");
2374 if (FLAG_optimization_counter_threshold >= 0) {
2375 const intptr_t count_offset =
2376 target::ICData::CountIndexFor(num_args) * target::kCompressedWordSize;
2388static void GenerateRecordEntryPoint(Assembler* assembler) {
2391 Immediate(target::Function::entry_point_offset() -
kHeapObjectTag));
2393 __ BindUncheckedEntryPoint();
2394 __ movq(
R8, Immediate(target::Function::entry_point_offset(
2413 const RuntimeEntry& handle_ic_miss,
2415 Optimized optimized,
2417 Exactness exactness) {
2418 if (FLAG_precompiled_mode) {
2423 const bool save_entry_point = kind == Token::kILLEGAL;
2424 if (save_entry_point) {
2434 ASSERT(num_args == 1 || num_args == 2);
2440 __ movl(
RCX, FieldAddress(
RBX, target::ICData::state_bits_offset()));
2441 ASSERT(target::ICData::NumArgsTestedShift() == 0);
2442 __ andq(
RCX, Immediate(target::ICData::NumArgsTestedMask()));
2443 __ cmpq(
RCX, Immediate(num_args));
2445 __ Stop(
"Incorrect stub for IC data");
2450#if !defined(PRODUCT)
2451 Label stepping, done_stepping;
2453 __ Comment(
"Check single stepping");
2454 __ LoadIsolate(
RAX);
2455 __ cmpb(Address(
RAX, target::Isolate::single_step_offset()), Immediate(0));
2457 __ Bind(&done_stepping);
2461 Label not_smi_or_overflow;
2462 if (kind != Token::kILLEGAL) {
2463 EmitFastSmiOp(
assembler, kind, num_args, ¬_smi_or_overflow);
2465 __ Bind(¬_smi_or_overflow);
2467 __ Comment(
"Extract ICData initial values and receiver cid");
2469 __ movq(
R13, FieldAddress(
RBX, target::ICData::entries_offset()));
2471 __ leaq(
R13, FieldAddress(
R13, target::Array::data_offset()));
2475 __ LoadTaggedClassIdMayBeSmi(
RAX,
RDX);
2478 FieldAddress(
RBX, target::CallSiteData::arguments_descriptor_offset()));
2479 if (num_args == 2) {
2482 target::ArgumentsDescriptor::count_offset()));
2484 __ LoadTaggedClassIdMayBeSmi(
RCX,
R9);
2489 FieldAddress(
RBX, target::CallSiteData::arguments_descriptor_offset()));
2491 target::ArgumentsDescriptor::count_offset()));
2493 __ LoadTaggedClassIdMayBeSmi(
RAX,
RDX);
2494 if (num_args == 2) {
2496 __ LoadTaggedClassIdMayBeSmi(
RCX,
R9);
2504 Label loop, found, miss;
2505 __ Comment(
"ICData loop");
2508 const bool optimize = kind == Token::kILLEGAL;
2509 const intptr_t target_offset =
2510 target::ICData::TargetIndexFor(num_args) * target::kCompressedWordSize;
2511 const intptr_t count_offset =
2512 target::ICData::CountIndexFor(num_args) * target::kCompressedWordSize;
2513 const intptr_t exactness_offset =
2514 target::ICData::ExactnessIndexFor(num_args) * target::kCompressedWordSize;
2517 for (
int unroll =
optimize ? 4 : 2; unroll >= 0; unroll--) {
2521 if (num_args == 2) {
2523 __ OBJ(mov)(
R9, Address(
R13, target::kCompressedWordSize));
2531 const intptr_t entry_size = target::ICData::TestEntryLengthFor(
2533 target::kCompressedWordSize;
2534 __ addq(
R13, Immediate(entry_size));
2545 __ Comment(
"IC miss");
2549 target::ArgumentsDescriptor::count_offset()));
2551 __ EnterStubFrame();
2552 if (save_entry_point) {
2558 __ pushq(Immediate(0));
2560 for (intptr_t i = 0; i < num_args; i++) {
2561 __ movq(
RCX, Address(
RAX, -target::kWordSize * i));
2565 __ CallRuntime(handle_ic_miss, num_args + 1);
2567 for (intptr_t i = 0; i < num_args + 1; i++) {
2573 if (save_entry_point) {
2577 __ RestoreCodePointer();
2578 __ LeaveStubFrame();
2579 Label call_target_function;
2580 if (!FLAG_lazy_dispatchers) {
2581 GenerateDispatcherCode(
assembler, &call_target_function);
2583 __ jmp(&call_target_function);
2588 Label call_target_function_through_unchecked_entry;
2596 __ j(
LESS, &exactness_ok);
2597 __ j(
EQUAL, &call_target_function_through_unchecked_entry);
2603 FieldAddress(
RBX, target::ICData::receivers_static_type_offset()));
2604 __ LoadCompressed(
RCX, FieldAddress(
RCX, target::Type::arguments_offset()));
2608#if defined(DART_COMPRESSED_POINTERS)
2613 __ j(
EQUAL, &call_target_function_through_unchecked_entry);
2616 __ OBJ(mov)(Address(
R13, exactness_offset),
2619 __ Bind(&exactness_ok);
2623 if (FLAG_optimization_counter_threshold >= 0) {
2624 __ Comment(
"Update ICData counter");
2629 __ Comment(
"Call target (via specified entry point)");
2630 __ Bind(&call_target_function);
2634 if (save_entry_point) {
2636 __ jmp(Address(
R8, 0));
2638 __ jmp(FieldAddress(
FUNCTION_REG, target::Function::entry_point_offset()));
2642 __ Bind(&call_target_function_through_unchecked_entry);
2643 if (FLAG_optimization_counter_threshold >= 0) {
2644 __ Comment(
"Update ICData counter");
2648 __ Comment(
"Call target (via unchecked entry point)");
2652 __ jmp(FieldAddress(
FUNCTION_REG, target::Function::entry_point_offset(
2656#if !defined(PRODUCT)
2659 __ EnterStubFrame();
2664 if (save_entry_point) {
2668 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
2669 if (save_entry_point) {
2677 __ RestoreCodePointer();
2678 __ LeaveStubFrame();
2679 __ jmp(&done_stepping);
2687void StubCodeCompiler::GenerateOneArgCheckInlineCacheStub() {
2689 1, kInlineCacheMissHandlerOneArgRuntimeEntry, Token::kILLEGAL,
2696void StubCodeCompiler::GenerateOneArgCheckInlineCacheWithExactnessCheckStub() {
2698 1, kInlineCacheMissHandlerOneArgRuntimeEntry, Token::kILLEGAL,
2705void StubCodeCompiler::GenerateTwoArgsCheckInlineCacheStub() {
2707 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kILLEGAL,
2714void StubCodeCompiler::GenerateSmiAddInlineCacheStub() {
2716 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kADD,
kUnoptimized,
2723void StubCodeCompiler::GenerateSmiLessInlineCacheStub() {
2725 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kLT,
kUnoptimized,
2732void StubCodeCompiler::GenerateSmiEqualInlineCacheStub() {
2734 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kEQ,
kUnoptimized,
2742void StubCodeCompiler::GenerateOneArgOptimizedCheckInlineCacheStub() {
2744 1, kInlineCacheMissHandlerOneArgRuntimeEntry, Token::kILLEGAL,
kOptimized,
2752void StubCodeCompiler::
2753 GenerateOneArgOptimizedCheckInlineCacheWithExactnessCheckStub() {
2755 1, kInlineCacheMissHandlerOneArgRuntimeEntry, Token::kILLEGAL,
kOptimized,
2763void StubCodeCompiler::GenerateTwoArgsOptimizedCheckInlineCacheStub() {
2765 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kILLEGAL,
2771void StubCodeCompiler::GenerateZeroArgsUnoptimizedStaticCallStub() {
2779 __ movl(
RCX, FieldAddress(
RBX, target::ICData::state_bits_offset()));
2780 ASSERT(target::ICData::NumArgsTestedShift() == 0);
2781 __ andq(
RCX, Immediate(target::ICData::NumArgsTestedMask()));
2782 __ cmpq(
RCX, Immediate(0));
2784 __ Stop(
"Incorrect IC data for unoptimized static call");
2789#if !defined(PRODUCT)
2791 Label stepping, done_stepping;
2792 __ LoadIsolate(
RAX);
2793 __ movzxb(
RAX, Address(
RAX, target::Isolate::single_step_offset()));
2794 __ cmpq(
RAX, Immediate(0));
2801 __ Bind(&done_stepping);
2805 __ movq(
R12, FieldAddress(
RBX, target::ICData::entries_offset()));
2807 __ leaq(
R12, FieldAddress(
R12, target::Array::data_offset()));
2809 const intptr_t target_offset =
2810 target::ICData::TargetIndexFor(0) * target::kCompressedWordSize;
2811 const intptr_t count_offset =
2812 target::ICData::CountIndexFor(0) * target::kCompressedWordSize;
2814 if (FLAG_optimization_counter_threshold >= 0) {
2822 FieldAddress(
RBX, target::CallSiteData::arguments_descriptor_offset()));
2830 __ jmp(Address(
R8, 0));
2832#if !defined(PRODUCT)
2834 __ EnterStubFrame();
2838 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
2842 __ RestoreCodePointer();
2843 __ LeaveStubFrame();
2850void StubCodeCompiler::GenerateOneArgUnoptimizedStaticCallStub() {
2858void StubCodeCompiler::GenerateTwoArgsUnoptimizedStaticCallStub() {
2860 2, kStaticCallMissHandlerTwoArgsRuntimeEntry, Token::kILLEGAL,
2867void StubCodeCompiler::GenerateLazyCompileStub() {
2868 __ EnterStubFrame();
2871 __ CallRuntime(kCompileFunctionRuntimeEntry, 1);
2874 __ LeaveStubFrame();
2879 FieldAddress(
FUNCTION_REG, target::Function::entry_point_offset()));
2885void StubCodeCompiler::GenerateICCallBreakpointStub() {
2887 __ Stop(
"No debugging in PRODUCT mode");
2889 __ EnterStubFrame();
2892 __ pushq(Immediate(0));
2893 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0);
2897 __ LeaveStubFrame();
2899 __ movq(
RAX, FieldAddress(
CODE_REG, target::Code::entry_point_offset()));
2904void StubCodeCompiler::GenerateUnoptStaticCallBreakpointStub() {
2906 __ Stop(
"No debugging in PRODUCT mode");
2908 __ EnterStubFrame();
2910 __ pushq(Immediate(0));
2911 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0);
2914 __ LeaveStubFrame();
2916 __ movq(
RAX, FieldAddress(
CODE_REG, target::Code::entry_point_offset()));
2922void StubCodeCompiler::GenerateRuntimeCallBreakpointStub() {
2924 __ Stop(
"No debugging in PRODUCT mode");
2926 __ EnterStubFrame();
2927 __ pushq(Immediate(0));
2928 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0);
2930 __ LeaveStubFrame();
2932 __ movq(
RAX, FieldAddress(
CODE_REG, target::Code::entry_point_offset()));
2938void StubCodeCompiler::GenerateDebugStepCheckStub() {
2940 __ Stop(
"No debugging in PRODUCT mode");
2943 Label stepping, done_stepping;
2944 __ LoadIsolate(
RAX);
2945 __ movzxb(
RAX, Address(
RAX, target::Isolate::single_step_offset()));
2946 __ cmpq(
RAX, Immediate(0));
2948 __ Bind(&done_stepping);
2952 __ EnterStubFrame();
2953 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
2954 __ LeaveStubFrame();
2972void StubCodeCompiler::GenerateSubtypeNTestCacheStub(Assembler* assembler,
2979 RegisterSet saved_registers;
2991 kInstanceParentFunctionTypeArgumentsReg =
PP;
2992 saved_registers.AddRegister(kInstanceParentFunctionTypeArgumentsReg);
2996 kInstanceDelayedFunctionTypeArgumentsReg =
CODE_REG;
2997 saved_registers.AddRegister(kInstanceDelayedFunctionTypeArgumentsReg);
3008 kCacheContentsSizeReg =
PP;
3009 saved_registers.AddRegister(kCacheContentsSizeReg);
3015 saved_registers.AddRegister(kProbeDistanceReg);
3027 __ PushRegisters(saved_registers);
3030 GenerateSubtypeTestCacheSearch(
3034 kInstanceParentFunctionTypeArgumentsReg,
3035 kInstanceDelayedFunctionTypeArgumentsReg, kCacheEntryEndReg,
3036 kCacheContentsSizeReg, kProbeDistanceReg,
3040 target::kCompressedWordSize *
3041 target::SubtypeTestCache::kTestResult));
3042 __ PopRegisters(saved_registers);
3048 __ PopRegisters(saved_registers);
3057void StubCodeCompiler::GenerateGetCStackPointerStub() {
3058 __ leaq(
RAX, Address(
RSP, target::kWordSize));
3069void StubCodeCompiler::GenerateJumpToFrameStub() {
3073#if defined(USING_SHADOW_CALL_STACK)
3076 Label exit_through_non_ffi;
3083 __ cmpq(compiler::Address(
3084 THR, compiler::target::Thread::exit_through_ffi_offset()),
3085 compiler::Immediate(target::Thread::exit_through_ffi()));
3087 __ TransitionNativeToGenerated(
true,
3089 __ Bind(&exit_through_non_ffi);
3094 __ movq(Address(
THR, target::Thread::top_exit_frame_info_offset()),
3097 __ RestoreCodePointer();
3098 if (FLAG_precompiled_mode) {
3099 __ movq(
PP, Address(
THR, target::Thread::global_object_pool_offset()));
3101 __ LoadPoolPointer(
PP);
3110void StubCodeCompiler::GenerateRunExceptionHandlerStub() {
3114 Address(
THR, target::Thread::resume_pc_offset()));
3116 word offset_from_thread = 0;
3119 __ movq(
TMP, Address(
THR, offset_from_thread));
3122 Address exception_addr(
THR, target::Thread::active_exception_offset());
3124 __ movq(exception_addr,
TMP);
3127 Address stacktrace_addr(
THR, target::Thread::active_stacktrace_offset());
3129 __ movq(stacktrace_addr,
TMP);
3137void StubCodeCompiler::GenerateDeoptForRewindStub() {
3142 __ pushq(Address(
THR, target::Thread::resume_pc_offset()));
3143#if defined(USING_SHADOW_CALL_STACK)
3149 __ EnterStubFrame();
3150 __ CallRuntime(kRewindPostDeoptRuntimeEntry, 0);
3151 __ LeaveStubFrame();
3158void StubCodeCompiler::GenerateOptimizeFunctionStub() {
3159 __ movq(
CODE_REG, Address(
THR, target::Thread::optimize_stub_offset()));
3160 __ EnterStubFrame();
3162 __ pushq(Immediate(0));
3164 __ CallRuntime(kOptimizeInvokedFunctionRuntimeEntry, 1);
3168 __ LeaveStubFrame();
3172 FieldAddress(
FUNCTION_REG, target::Function::entry_point_offset()));
3182static void GenerateIdenticalWithNumberCheckStub(Assembler* assembler,
3185 Label reference_compare,
done, check_mint;
3188 __ j(
ZERO, &reference_compare);
3190 __ j(
ZERO, &reference_compare);
3193 __ CompareClassId(
left, kDoubleCid);
3195 __ CompareClassId(
right, kDoubleCid);
3199 __ movq(
left, FieldAddress(
left, target::Double::value_offset()));
3200 __ cmpq(
left, FieldAddress(
right, target::Double::value_offset()));
3203 __ Bind(&check_mint);
3204 __ CompareClassId(
left, kMintCid);
3206 __ CompareClassId(
right, kMintCid);
3208 __ movq(
left, FieldAddress(
left, target::Mint::value_offset()));
3209 __ cmpq(
left, FieldAddress(
right, target::Mint::value_offset()));
3212 __ Bind(&reference_compare);
3222void StubCodeCompiler::GenerateUnoptimizedIdenticalWithNumberCheckStub() {
3223#if !defined(PRODUCT)
3225 Label stepping, done_stepping;
3226 __ LoadIsolate(
RAX);
3227 __ movzxb(
RAX, Address(
RAX, target::Isolate::single_step_offset()));
3228 __ cmpq(
RAX, Immediate(0));
3230 __ Bind(&done_stepping);
3236 __ movq(
left, Address(
RSP, 2 * target::kWordSize));
3237 __ movq(
right, Address(
RSP, 1 * target::kWordSize));
3241#if !defined(PRODUCT)
3243 __ EnterStubFrame();
3244 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
3245 __ RestoreCodePointer();
3246 __ LeaveStubFrame();
3247 __ jmp(&done_stepping);
3256void StubCodeCompiler::GenerateOptimizedIdenticalWithNumberCheckStub() {
3260 __ movq(
left, Address(
RSP, 2 * target::kWordSize));
3261 __ movq(
right, Address(
RSP, 1 * target::kWordSize));
3273void StubCodeCompiler::GenerateMegamorphicCallStub() {
3284 __ Bind(&cid_loaded);
3286 FieldAddress(
IC_DATA_REG, target::MegamorphicCache::mask_offset()));
3288 target::MegamorphicCache::buckets_offset()));
3296 ASSERT(target::MegamorphicCache::kSpreadFactor == 7);
3305 const intptr_t
base = target::Array::data_offset();
3312 __ Bind(&load_target);
3319 base + target::kCompressedWordSize));
3322 target::CallSiteData::arguments_descriptor_offset()));
3324 FieldAddress(
FUNCTION_REG, target::Function::entry_point_offset()));
3325 if (!FLAG_precompiled_mode) {
3332 __ Bind(&probe_failed);
3344 __ movq(
RAX, Immediate(kSmiCid));
3345 __ jmp(&cid_loaded);
3348 GenerateSwitchableCallMissStub();
3354void StubCodeCompiler::GenerateICCallThroughCodeStub() {
3355 Label loop, found, miss;
3359 target::CallSiteData::arguments_descriptor_offset()));
3360 __ leaq(
R13, FieldAddress(
R13, target::Array::data_offset()));
3362 __ LoadTaggedClassIdMayBeSmi(
RAX,
RDX);
3374 const intptr_t entry_length =
3375 target::ICData::TestEntryLengthFor(1,
false) *
3376 target::kCompressedWordSize;
3377 __ addq(
R13, Immediate(entry_length));
3381 if (FLAG_precompiled_mode) {
3382 const intptr_t entry_offset =
3383 target::ICData::EntryPointIndexFor(1) * target::kCompressedWordSize;
3384 __ LoadCompressed(
RCX, Address(
R13, entry_offset));
3385 __ jmp(FieldAddress(
RCX, target::Function::entry_point_offset()));
3387 const intptr_t code_offset =
3388 target::ICData::CodeIndexFor(1) * target::kCompressedWordSize;
3390 __ jmp(FieldAddress(
CODE_REG, target::Code::entry_point_offset()));
3394 __ jmp(Address(
THR, target::Thread::switchable_call_miss_entry_offset()));
3397void StubCodeCompiler::GenerateMonomorphicSmiableCheckStub() {
3398 Label have_cid, miss;
3400 __ movq(
RAX, Immediate(kSmiCid));
3403 FieldAddress(
RBX, target::MonomorphicSmiableCall::expected_cid_offset()));
3412 FieldAddress(
RBX, target::MonomorphicSmiableCall::entrypoint_offset()));
3415 __ jmp(Address(
THR, target::Thread::switchable_call_miss_entry_offset()));
3420void StubCodeCompiler::GenerateSwitchableCallMissStub() {
3422 Address(
THR, target::Thread::switchable_call_miss_stub_offset()));
3423 __ EnterStubFrame();
3426 __ pushq(Immediate(0));
3427 __ pushq(Immediate(0));
3429 __ CallRuntime(kSwitchableCallMissRuntimeEntry, 2);
3435 __ LeaveStubFrame();
3437 __ movq(
RCX, FieldAddress(
CODE_REG, target::Code::entry_point_offset(
3447void StubCodeCompiler::GenerateSingleTargetCallStub() {
3451 FieldAddress(
RBX, target::SingleTargetCache::lower_limit_offset()));
3453 FieldAddress(
RBX, target::SingleTargetCache::upper_limit_offset()));
3459 FieldAddress(
RBX, target::SingleTargetCache::entry_point_offset()));
3461 FieldAddress(
RBX, target::SingleTargetCache::target_offset()));
3465 __ EnterStubFrame();
3468 __ pushq(Immediate(0));
3469 __ pushq(Immediate(0));
3471 __ CallRuntime(kSwitchableCallMissRuntimeEntry, 2);
3477 __ LeaveStubFrame();
3479 __ movq(
RCX, FieldAddress(
CODE_REG, target::Code::entry_point_offset(
3501void StubCodeCompiler::GenerateAllocateTypedDataArrayStub(intptr_t
cid) {
3509 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
3524 __ cmpq(
RDI, Immediate(max_len));
3533 const intptr_t fixed_size_plus_alignment_padding =
3534 target::TypedData::HeaderSize() +
3536 __ leaq(
RDI, Address(
RDI, scale_factor, fixed_size_plus_alignment_padding));
3538 __ movq(
RAX, Address(
THR, target::Thread::top_offset()));
3549 __ cmpq(
RCX, Address(
THR, target::Thread::end_offset()));
3551 __ CheckAllocationCanary(
RAX);
3555 __ movq(Address(
THR, target::Thread::top_offset()),
RCX);
3563 Label size_tag_overflow,
done;
3564 __ cmpq(
RDI, Immediate(target::UntaggedObject::kSizeTagMaxSizeTag));
3566 __ shlq(
RDI, Immediate(target::UntaggedObject::kTagBitsSizeTagPos -
3570 __ Bind(&size_tag_overflow);
3571 __ LoadImmediate(
RDI, Immediate(0));
3577 __ orq(
RDI, Immediate(tags));
3578 __ movq(FieldAddress(
RAX, target::Object::tags_offset()),
3585 __ StoreCompressedIntoObjectNoBarrier(
3586 RAX, FieldAddress(
RAX, target::TypedDataBase::length_offset()),
RDI);
3594 __ leaq(
RDI, FieldAddress(
RAX, target::TypedData::HeaderSize()));
3595 __ StoreInternalPointer(
3596 RAX, FieldAddress(
RAX, target::PointerBase::data_offset()),
RDI);
3603 __ addq(
RDI, Immediate(target::kObjectAlignment));
3607 __ WriteAllocationCanary(
RCX);
3610 __ Bind(&call_runtime);
3614 __ EnterStubFrame();
3615 __ PushObject(Object::null_object());
3618 __ CallRuntime(kAllocateTypedDataRuntimeEntry, 2);
3621 __ LeaveStubFrame();
static void done(const char *config, const char *src, const char *srcOptions, const char *name)
static void async_callback(void *c, std::unique_ptr< const SkImage::AsyncReadResult > result)
static bool ok(int result)
static SkTileMode optimize(SkTileMode tm, int dimension)
static bool left(const SkPoint &p0, const SkPoint &p1)
static bool right(const SkPoint &p0, const SkPoint &p1)
#define ASSERT_LESS_OR_EQUAL(expected, actual)
#define ASSERT_EQUAL(expected, actual)
#define RELEASE_ASSERT(cond)
#define COMPILE_ASSERT(expr)
virtual bool WillAllocateNewOrRemembered() const
@ DRT_GetFfiCallbackMetadata
@ DRT_ExitTemporaryIsolate
static constexpr intptr_t kCalleeSaveCpuRegisters
static constexpr intptr_t kVolatileCpuRegisters
static constexpr intptr_t kFpuArgumentRegisters
static constexpr Register kArg3Reg
static constexpr Register kArg1Reg
static constexpr intptr_t kArgumentRegisters
static constexpr Register kArg2Reg
static constexpr Register kArg4Reg
static constexpr intptr_t kCalleeSaveXmmRegisters
static bool UseUnboxedRepresentation()
static intptr_t ActivationFrameAlignment()
static intptr_t pointers_offset()
static intptr_t top_offset()
static StaticTypeExactnessState HasExactSuperType()
static StaticTypeExactnessState NotExact()
static constexpr intptr_t kMaxInputs
static constexpr int CountOneBitsWord(uword x)
static Address AddressRIPRelative(int32_t disp)
static Address VMTagAddress()
void GenerateNArgsCheckInlineCacheStub(intptr_t num_args, const RuntimeEntry &handle_ic_miss, Token::Kind kind, Optimized optimized, CallType type, Exactness exactness)
void EnsureIsNewOrRemembered()
void GenerateUsageCounterIncrement(Register temp_reg)
void GenerateAllocationStubForClass(UnresolvedPcRelativeCalls *unresolved_calls, const Class &cls, const dart::Code &allocate_object, const dart::Code &allocat_object_parametrized)
void GenerateOptimizedUsageCounterIncrement()
static intptr_t WordOffsetFromFpToCpuRegister(Register cpu_register)
const uint8_t uint32_t uint32_t GError ** error
uword MakeTagWordForNewSpaceObject(classid_t cid, uword instance_size)
bool CanLoadFromThread(const dart::Object &object, intptr_t *offset)
word ToRawSmi(const dart::Object &a)
bool SizeFitsInSizeTag(uword instance_size)
word TypedDataMaxNewSpaceElements(classid_t cid)
word TypedDataElementSizeInBytes(classid_t cid)
GrowableArray< UnresolvedPcRelativeCall * > UnresolvedPcRelativeCalls
bool IsSameObject(const Object &a, const Object &b)
const Object & NullObject()
const Code & StubCodeAllocateArray()
const Class & MintClass()
const Register kWriteBarrierSlotReg
@ TIMES_COMPRESSED_HALF_WORD_SIZE
@ TIMES_COMPRESSED_WORD_SIZE
const Register kExceptionObjectReg
const Register kWriteBarrierObjectReg
Thread * DLRT_GetFfiCallbackMetadata(FfiCallbackMetadata::Trampoline trampoline, uword *out_entry_point, uword *out_trampoline_type)
const RegList kAllFpuRegistersList
const Register kWriteBarrierValueReg
static constexpr bool IsCalleeSavedRegister(Register reg)
static constexpr uword kZapReturnAddress
void DLRT_ExitTemporaryIsolate()
const Register ARGS_DESC_REG
const int kNumberOfFpuRegisters
static constexpr bool IsArgumentRegister(Register reg)
constexpr RegList kDartAvailableCpuRegs
const intptr_t kStoreBufferWrapperSize
const Register FUNCTION_REG
const Register IC_DATA_REG
static constexpr intptr_t kAllocationRedZoneSize
static constexpr uword kZapCodeReg
const Register kStackTraceObjectReg
const int kFpuRegisterSize
ByteRegister ByteRegisterOf(Register reg)
static constexpr Register kResultReg
static constexpr Register kLengthReg
static constexpr Register kTypeArgumentsReg
static constexpr Register kResultReg
static constexpr Register kTempReg
static constexpr Register kTypeArgumentsReg
static constexpr Register kResultReg
static constexpr Register kTagsReg
static constexpr Register kLengthReg
static constexpr Register kResultReg
static constexpr Register kClassIdReg
static constexpr intptr_t kObjectAlignmentLog2
static constexpr intptr_t kObjectAlignment
static constexpr Register kLengthReg
static constexpr Register kIndexReg
static constexpr Register kInstanceInstantiatorTypeArgumentsReg
static constexpr Register kInstanceCidOrSignatureReg
static constexpr Register kCacheEntryReg
static constexpr Register kResultReg
static constexpr Register kDstTypeReg
static constexpr Register kSubtypeTestCacheResultReg
#define NOT_IN_PRODUCT(code)