14#define SHOULD_NOT_INCLUDE_RUNTIME
19#if defined(TARGET_ARCH_X64)
56 rt.Call(kEnsureRememberedAndMarkingDeferredRuntimeEntry, 2);
75static void WithExceptionCatchingTrampoline(Assembler* assembler,
77#if !defined(USING_SIMULATOR)
81 const intptr_t kJumpBufferSize =
sizeof(jmp_buf);
83 const RegisterSet volatile_registers(
92 if (FLAG_target_thread_sanitizer) {
102 __ AddImmediate(
RSP, Immediate(-kJumpBufferSize));
108 __ PushRegisters(volatile_registers);
110 __ MoveRegister(kSavedRspReg,
RSP);
118 __ MoveRegister(
RSP, kSavedRspReg);
120 __ PopRegisters(volatile_registers);
123 __ CompareImmediate(
RAX, 0);
124 __ BranchIf(
EQUAL, &do_native_call);
129 __ AddImmediate(
RSP, Immediate(kJumpBufferSize));
145 __ MoveRegister(kSavedRspReg,
RSP);
151#if !defined(USING_SIMULATOR)
152 if (FLAG_target_thread_sanitizer) {
153 __ MoveRegister(
RSP, kSavedRspReg);
154 __ AddImmediate(
RSP, Immediate(kJumpBufferSize));
155 const Register kTsanUtilsReg2 = kSavedRspReg;
170void StubCodeCompiler::GenerateCallToRuntimeStub() {
192 __ movq(
RAX, Immediate(VMTag::kDartTagId));
195 __ Stop(
"Not coming from Dart code.");
203 WithExceptionCatchingTrampoline(
assembler, [&]() {
211 __ movq(Address(
RSP, thread_offset),
THR);
212 __ movq(Address(
RSP, argc_tag_offset),
218 __ movq(Address(
RSP, argv_offset),
223 __ movq(Address(
RSP, retval_offset),
225#if defined(DART_TARGET_OS_WINDOWS)
227 CallingConventions::kRegisterTransferLimit);
230 __ CallCFunction(
RBX);
245 if (FLAG_precompiled_mode) {
261void StubCodeCompiler::GenerateSharedStubGeneric(
262 bool save_fpu_registers,
263 intptr_t self_code_stub_offset_from_thread,
268 const RegisterSet saved_registers(
270 __ PushRegisters(saved_registers);
272 const intptr_t kSavedCpuRegisterSlots =
274 const intptr_t kSavedFpuRegisterSlots =
278 const intptr_t kAllSavedRegistersSlots =
279 kSavedCpuRegisterSlots + kSavedFpuRegisterSlots;
283 __ movq(
CODE_REG, Address(
THR, self_code_stub_offset_from_thread));
285 perform_runtime_call();
294 __ PopRegisters(saved_registers);
298void StubCodeCompiler::GenerateSharedStub(
299 bool save_fpu_registers,
300 const RuntimeEntry*
target,
301 intptr_t self_code_stub_offset_from_thread,
303 bool store_runtime_result_in_result_register) {
304 auto perform_runtime_call = [&]() {
305 if (store_runtime_result_in_result_register) {
306 __ PushImmediate(Immediate(0));
309 if (store_runtime_result_in_result_register) {
317 GenerateSharedStubGeneric(save_fpu_registers,
318 self_code_stub_offset_from_thread, allow_return,
319 perform_runtime_call);
322void StubCodeCompiler::GenerateEnterSafepointStub() {
323 RegisterSet all_registers;
324 all_registers.AddAllGeneralRegisters();
325 __ PushRegisters(all_registers);
328 __ ReserveAlignedFrameSpace(0);
329 __ movq(
RAX, Address(
THR, kEnterSafepointRuntimeEntry.OffsetFromThread()));
330 __ CallCFunction(
RAX);
333 __ PopRegisters(all_registers);
337static void GenerateExitSafepointStubCommon(Assembler* assembler,
338 uword runtime_entry_offset) {
339 RegisterSet all_registers;
340 all_registers.AddAllGeneralRegisters();
341 __ PushRegisters(all_registers);
344 __ ReserveAlignedFrameSpace(0);
352 __ movq(
RAX, Address(
THR, runtime_entry_offset));
353 __ CallCFunction(
RAX);
356 __ PopRegisters(all_registers);
360void StubCodeCompiler::GenerateExitSafepointStub() {
361 GenerateExitSafepointStubCommon(
362 assembler, kExitSafepointRuntimeEntry.OffsetFromThread());
365void StubCodeCompiler::GenerateExitSafepointIgnoreUnwindInProgressStub() {
366 GenerateExitSafepointStubCommon(
368 kExitSafepointIgnoreUnwindInProgressRuntimeEntry.OffsetFromThread());
380void StubCodeCompiler::GenerateCallNativeThroughSafepointStub() {
386 __ CallCFunction(
RBX,
true);
388 __ TransitionNativeToGenerated(
true);
398 compiler::Label skip_reloc;
400 InsertBSSRelocation(relocation);
401 const intptr_t reloc_end =
__ CodeSize();
404 const intptr_t kLeaqLength = 7;
407 ASSERT((
__ CodeSize() - reloc_end) == kLeaqLength);
410 __ movq(tmp, compiler::Address(
dst, 0));
418 __ movq(
dst, compiler::Address(
dst, 0));
421void StubCodeCompiler::GenerateLoadFfiCallbackMetadataRuntimeFunction(
422 uword function_index,
428 const intptr_t kLeaqLength = 7;
429 const intptr_t code_size =
__ CodeSize();
440static const RegisterSet kArgumentRegisterSet(
444void StubCodeCompiler::GenerateFfiCallbackTrampolineStub() {
455 const intptr_t kLeaqLength = 7;
456 const intptr_t size_before =
__ CodeSize();
458 const intptr_t size_after =
__ CodeSize();
464 FfiCallbackMetadata::kNativeCallbackTrampolineSize *
469 const intptr_t shared_stub_start =
__ CodeSize();
475 COMPILE_ASSERT(2 == FfiCallbackMetadata::kNativeCallbackTrampolineStackDelta);
478 __ PushRegisters(kArgumentRegisterSet);
492 __ pushq(Immediate(0));
498 __ pushq(Immediate(0));
501#if defined(DART_TARGET_OS_FUCHSIA)
503 if (FLAG_precompiled_mode) {
507 __ movq(
RAX, Immediate(
511 GenerateLoadFfiCallbackMetadataRuntimeFunction(
516 __ ReserveAlignedFrameSpace(0);
518 __ CallCFunction(
RAX);
531 __ PopRegisters(kArgumentRegisterSet);
548 __ cmpq(
THR, Immediate(0));
565 __ EnterFullSafepoint();
580#if defined(DART_TARGET_OS_FUCHSIA)
582 if (FLAG_precompiled_mode) {
589 GenerateLoadFfiCallbackMetadataRuntimeFunction(
594 __ ReserveAlignedFrameSpace(0);
596 __ CallCFunction(
RAX);
611 FfiCallbackMetadata::kNativeCallbackSharedStubSize);
621void StubCodeCompiler::GenerateDispatchTableNullErrorStub() {
625 __ CallRuntime(kDispatchTableNullErrorRuntimeEntry, 1);
630void StubCodeCompiler::GenerateRangeError(
bool with_fpu_regs) {
631 auto perform_runtime_call = [&]() {
638#if !defined(DART_COMPRESSED_POINTERS)
644 __ sarq(
TMP, Immediate(30));
645 __ addq(
TMP, Immediate(1));
646 __ cmpq(
TMP, Immediate(2));
651 __ PushImmediate(Immediate(0));
652 __ CallRuntime(kAllocateMintRuntimeEntry, 0);
674 __ PushRegistersInOrder(
676 __ CallRuntime(kRangeErrorRuntimeEntry, 2);
680 GenerateSharedStubGeneric(
685 false, perform_runtime_call);
688void StubCodeCompiler::GenerateWriteError(
bool with_fpu_regs) {
689 auto perform_runtime_call = [&]() {
690 __ CallRuntime(kWriteErrorRuntimeEntry, 2);
694 GenerateSharedStubGeneric(
699 false, perform_runtime_call);
708static void GenerateCallNativeWithWrapperStub(Assembler* assembler,
709 Address wrapper_address) {
710 const intptr_t native_args_struct_offset = 0;
711 const intptr_t thread_offset =
713 const intptr_t argc_tag_offset =
715 const intptr_t argv_offset =
717 const intptr_t retval_offset =
734 __ movq(
R8, Immediate(VMTag::kDartTagId));
737 __ Stop(
"Not coming from Dart code.");
745 WithExceptionCatchingTrampoline(assembler, [&]() {
756 __ movq(Address(
RSP, thread_offset),
THR);
758 __ movq(Address(
RSP, argc_tag_offset),
R10);
760 __ movq(Address(
RSP, argv_offset),
R13);
765 __ movq(Address(
RSP, retval_offset),
RAX);
772 __ movq(
RAX, wrapper_address);
773 __ CallCFunction(
RAX);
788 if (FLAG_precompiled_mode) {
797void StubCodeCompiler::GenerateCallNoScopeNativeStub() {
798 GenerateCallNativeWithWrapperStub(
804void StubCodeCompiler::GenerateCallAutoScopeNativeStub() {
805 GenerateCallNativeWithWrapperStub(
817void StubCodeCompiler::GenerateCallBootstrapNativeStub() {
818 GenerateCallNativeWithWrapperStub(
826void StubCodeCompiler::GenerateCallStaticFunctionStub() {
830 __ pushq(Immediate(0));
831 __ CallRuntime(kPatchStaticCallRuntimeEntry, 0);
844void StubCodeCompiler::GenerateFixCallersTargetStub() {
846 __ BranchOnMonomorphicCheckedEntryJIT(&monomorphic);
857 __ pushq(Immediate(0));
858 __ CallRuntime(kFixCallersTargetRuntimeEntry, 0);
874 __ pushq(Immediate(0));
877 __ CallRuntime(kFixCallersTargetMonomorphicRuntimeEntry, 2);
890void StubCodeCompiler::GenerateFixAllocationStubTargetStub() {
898 __ pushq(Immediate(0));
899 __ CallRuntime(kFixAllocationStubTargetRuntimeEntry, 0);
909void StubCodeCompiler::GenerateFixParameterizedAllocationStubTargetStub() {
918 __ pushq(Immediate(0));
919 __ CallRuntime(kFixAllocationStubTargetRuntimeEntry, 0);
931static void PushArrayOfArguments(Assembler* assembler) {
946 Label loop, loop_condition;
952 __ jmp(&loop_condition, kJumpLength);
956 __ StoreCompressedIntoObject(
RAX, Address(
RBX, 0),
RDI);
990static void GenerateDeoptimizationSequence(Assembler* assembler,
998 const intptr_t saved_result_slot_from_fp =
1001 const intptr_t saved_exception_slot_from_fp =
1004 const intptr_t saved_stacktrace_slot_from_fp =
1031 LeafRuntimeScope rt(assembler,
1037 rt.Call(kDeoptimizeCopyFrameRuntimeEntry, 2);
1053 __ RestoreCodePointer();
1054 __ LeaveStubFrame();
1063 __ EnterStubFrame();
1073 LeafRuntimeScope rt(assembler,
1076 rt.Call(kDeoptimizeFillFrameRuntimeEntry, 1);
1092 __ RestoreCodePointer();
1093 __ LeaveStubFrame();
1099 __ EnterStubFrame();
1109 __ CallRuntime(kDeoptimizeMaterializeRuntimeEntry, 0);
1121 __ LeaveStubFrame();
1131 __ EnterStubFrame();
1136 __ CallRuntime(kReThrowRuntimeEntry, 3);
1137 __ LeaveStubFrame();
1142void StubCodeCompiler::GenerateDeoptimizeLazyFromReturnStub() {
1155void StubCodeCompiler::GenerateDeoptimizeLazyFromThrowStub() {
1166void StubCodeCompiler::GenerateDeoptimizeStub() {
1178static void GenerateNoSuchMethodDispatcherBody(Assembler* assembler,
1180 __ pushq(Immediate(0));
1181 __ pushq(receiver_reg);
1190 Label args_count_ok;
1197 PushArrayOfArguments(assembler);
1198 const intptr_t kNumArgs = 4;
1199 __ CallRuntime(kNoSuchMethodFromCallStubRuntimeEntry, kNumArgs);
1202 __ LeaveStubFrame();
1209static void GenerateDispatcherCode(Assembler* assembler,
1210 Label* call_target_function) {
1211 __ Comment(
"NoSuchMethodDispatch");
1217 __ EnterStubFrame();
1222 Address(
RBP,
RDI, TIMES_HALF_WORD_SIZE,
1225 GenerateNoSuchMethodDispatcherBody(assembler,
RAX);
1231void StubCodeCompiler::GenerateNoSuchMethodDispatcherStub() {
1232 __ EnterStubFrame();
1251void StubCodeCompiler::GenerateAllocateArrayStub() {
1252 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
1265 const Immediate& max_len =
1273 const intptr_t fixed_size_plus_alignment_padding =
1278 fixed_size_plus_alignment_padding));
1282 const intptr_t
cid = kArrayCid;
1308 Label size_tag_overflow,
done;
1315 __ Bind(&size_tag_overflow);
1316 __ LoadImmediate(
RDI, Immediate(0));
1321 __ orq(
RDI, Immediate(tags));
1328 __ StoreCompressedIntoObjectNoBarrier(
1335 __ StoreCompressedIntoObjectNoBarrier(
1362 __ WriteAllocationCanary(
RCX);
1371 __ EnterStubFrame();
1372 __ pushq(Immediate(0));
1375 __ CallRuntime(kAllocateArrayRuntimeEntry, 2);
1386 __ LeaveStubFrame();
1390void StubCodeCompiler::GenerateAllocateMintSharedWithFPURegsStub() {
1392 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
1402 GenerateSharedStub(
true, &kAllocateMintRuntimeEntry,
1408void StubCodeCompiler::GenerateAllocateMintSharedWithoutFPURegsStub() {
1410 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
1421 false, &kAllocateMintRuntimeEntry,
1427static const RegisterSet kCalleeSavedRegisterSet(
1438void StubCodeCompiler::GenerateInvokeDartCodeStub() {
1454 const intptr_t kInitialOffset = 2;
1457 __ pushq(kArgDescReg);
1460 __ PushRegisters(kCalleeSavedRegisterSet);
1466 if (
THR != kThreadReg) {
1467 __ movq(
THR, kThreadReg);
1470#if defined(USING_SHADOW_CALL_STACK)
1494 __ EmitEntryFrameVerification();
1504 __ movq(
R10, kArgDescReg);
1515 Label args_count_ok;
1520 __ movq(Address(
RBP, kArgumentsDescOffset),
RBX);
1527 Label push_arguments;
1528 Label done_push_arguments;
1530 __ LoadImmediate(
RAX, Immediate(0));
1531 __ Bind(&push_arguments);
1532#if defined(DART_COMPRESSED_POINTERS)
1541 __ Bind(&done_push_arguments);
1544 if (FLAG_precompiled_mode) {
1556 __ movq(
RDX, Address(
RBP, kArgumentsDescOffset));
1570#if defined(USING_SHADOW_CALL_STACK)
1575 __ PopRegisters(kCalleeSavedRegisterSet);
1576 __ set_constant_pool_allowed(
false);
1592static void GenerateAllocateContextSpaceStub(Assembler* assembler,
1596 intptr_t fixed_size_plus_alignment_padding =
1600 fixed_size_plus_alignment_padding));
1616 __ CheckAllocationCanary(
RAX);
1633 Label size_tag_overflow,
done;
1635 fixed_size_plus_alignment_padding));
1643 __ Bind(&size_tag_overflow);
1645 __ LoadImmediate(
R13, Immediate(0));
1652 __ orq(
R13, Immediate(tags));
1669void StubCodeCompiler::GenerateAllocateContextStub() {
1671 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
1674 GenerateAllocateContextSpaceStub(
assembler, &slow_case);
1680 __ StoreCompressedIntoObjectNoBarrier(
1694 __ jmp(&entry, kJumpLength);
1698 __ StoreCompressedIntoObjectNoBarrier(
1701 __ cmpq(
R10, Immediate(0));
1712 __ EnterStubFrame();
1716 __ CallRuntime(kAllocateContextRuntimeEntry, 1);
1726 __ LeaveStubFrame();
1738void StubCodeCompiler::GenerateCloneContextStub() {
1739 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
1746 GenerateAllocateContextSpaceStub(
assembler, &slow_case);
1753 __ StoreCompressedIntoObjectNoBarrier(
1766 __ StoreCompressedIntoObjectNoBarrier(
1772 __ cmpq(
R10, Immediate(0));
1784 __ EnterStubFrame();
1788 __ CallRuntime(kCloneContextRuntimeEntry, 1);
1799 __ LeaveStubFrame();
1804void StubCodeCompiler::GenerateWriteBarrierWrappersStub() {
1809 intptr_t
start =
__ CodeSize();
1815 intptr_t
end =
__ CodeSize();
1831static void GenerateWriteBarrierStubHelper(Assembler* assembler,
bool cards) {
1836 __ j(
ZERO, &skip_marking);
1840 Label retry, is_new,
done;
1861 auto mark_stack_push = [&](intptr_t
offset,
const RuntimeEntry& entry) {
1873 LeafRuntimeScope rt(assembler,
1882 kOldMarkingStackBlockProcessRuntimeEntry);
1887 kNewMarkingStackBlockProcessRuntimeEntry);
1894 Label add_to_remembered_set, remember_card;
1903 __ Bind(&add_to_remembered_set);
1914 __ Stop(
"Wrong barrier");
1955 LeafRuntimeScope rt(assembler,
1959 rt.Call(kStoreBufferBlockProcessRuntimeEntry, 1);
1969 Label remember_card_slow;
1987 __ movq(
RAX, Immediate(1));
1996 __ Bind(&remember_card_slow);
1998 LeafRuntimeScope rt(assembler,
2003 rt.Call(kRememberCardRuntimeEntry, 2);
2009void StubCodeCompiler::GenerateWriteBarrierStub() {
2010 GenerateWriteBarrierStubHelper(
assembler,
false);
2013void StubCodeCompiler::GenerateArrayWriteBarrierStub() {
2014 GenerateWriteBarrierStubHelper(
assembler,
true);
2017static void GenerateAllocateObjectHelper(Assembler* assembler,
2018 bool is_cls_parameterized) {
2026#if !defined(PRODUCT)
2030 __ MaybeTraceAllocation(kCidRegister, &slow_case,
TMP);
2038 __ ExtractInstanceSizeFromTags(kInstanceSizeReg, kTagsReg);
2043 kInstanceSizeReg,
TIMES_1, 0));
2063 __ leaq(kNextFieldReg,
2076 Address(kNextFieldReg,
offset),
2082 __ cmpq(kNextFieldReg, kNewTopReg);
2086 __ WriteAllocationCanary(kNewTopReg);
2088 if (is_cls_parameterized) {
2089 Label not_parameterized_case;
2094 __ ExtractClassIdFromTags(kClsIdReg, kTagsReg);
2097 __ LoadClassById(kTypeOffsetReg, kClsIdReg);
2100 FieldAddress(kTypeOffsetReg,
2102 host_type_arguments_field_offset_in_words_offset()));
2105 __ StoreCompressedIntoObject(
2111 __ Bind(¬_parameterized_case);
2120 if (!is_cls_parameterized) {
2129void StubCodeCompiler::GenerateAllocateObjectStub() {
2130 GenerateAllocateObjectHelper(
assembler,
false);
2133void StubCodeCompiler::GenerateAllocateObjectParameterizedStub() {
2134 GenerateAllocateObjectHelper(
assembler,
true);
2137void StubCodeCompiler::GenerateAllocateObjectSlowStub() {
2138 if (!FLAG_precompiled_mode) {
2148 __ EnterStubFrame();
2161 __ CallRuntime(kAllocateObjectRuntimeEntry, 2);
2173 __ LeaveStubFrame();
2182 const Code& allocate_object,
2183 const Code& allocat_object_parametrized) {
2187 const intptr_t cls_type_arg_field_offset =
2192 ASSERT(!is_cls_parameterized ||
2196 ASSERT(instance_size > 0);
2202 __ movq(kTagsReg, Immediate(tags));
2205 if (!FLAG_use_slow_path && FLAG_inline_alloc &&
2211 if (is_cls_parameterized) {
2213 CastHandle<Object>(allocat_object_parametrized))) {
2214 __ GenerateUnRelocatedPcRelativeTailCall();
2215 unresolved_calls->Add(
new UnresolvedPcRelativeCall(
2216 __ CodeSize(), allocat_object_parametrized,
true));
2220 allocate_object_parameterized_entry_point_offset()));
2224 __ GenerateUnRelocatedPcRelativeTailCall();
2225 unresolved_calls->Add(
new UnresolvedPcRelativeCall(
2226 __ CodeSize(), allocate_object,
true));
2233 if (!is_cls_parameterized) {
2248void StubCodeCompiler::GenerateCallClosureNoSuchMethodStub() {
2249 __ EnterStubFrame();
2254 __ LoadCompressedSmi(
2263 __ pushq(Immediate(0));
2273 Label args_count_ok;
2281 const intptr_t kNumArgs = 4;
2282 __ CallRuntime(kNoSuchMethodFromPrologueRuntimeEntry, kNumArgs);
2290 if (FLAG_precompiled_mode) {
2296 if (FLAG_trace_optimized_ic_calls) {
2297 __ EnterStubFrame();
2302 __ CallRuntime(kTraceICCallRuntimeEntry, 2);
2307 __ LeaveStubFrame();
2314 if (FLAG_precompiled_mode) {
2318 if (FLAG_optimization_counter_threshold >= 0) {
2321 __ Comment(
"Increment function counter");
2332static void EmitFastSmiOp(Assembler* assembler,
2335 Label* not_smi_or_overflow) {
2336 __ Comment(
"Fast Smi op");
2388 __ Stop(
"Incorrect IC data");
2392 if (FLAG_optimization_counter_threshold >= 0) {
2393 const intptr_t count_offset =
2406static void GenerateRecordEntryPoint(Assembler* assembler) {
2411 __ BindUncheckedEntryPoint();
2431 const RuntimeEntry& handle_ic_miss,
2433 Optimized optimized,
2435 Exactness exactness) {
2436 if (FLAG_precompiled_mode) {
2441 const bool save_entry_point = kind == Token::kILLEGAL;
2442 if (save_entry_point) {
2452 ASSERT(num_args == 1 || num_args == 2);
2461 __ cmpq(
RCX, Immediate(num_args));
2463 __ Stop(
"Incorrect stub for IC data");
2468#if !defined(PRODUCT)
2469 Label stepping, done_stepping;
2471 __ Comment(
"Check single stepping");
2472 __ LoadIsolate(
RAX);
2479 Label not_smi_or_overflow;
2480 if (kind != Token::kILLEGAL) {
2481 EmitFastSmiOp(
assembler, kind, num_args, ¬_smi_or_overflow);
2483 __ Bind(¬_smi_or_overflow);
2485 __ Comment(
"Extract ICData initial values and receiver cid");
2493 __ LoadTaggedClassIdMayBeSmi(
RAX,
RDX);
2497 if (num_args == 2) {
2502 __ LoadTaggedClassIdMayBeSmi(
RCX,
R9);
2511 __ LoadTaggedClassIdMayBeSmi(
RAX,
RDX);
2512 if (num_args == 2) {
2514 __ LoadTaggedClassIdMayBeSmi(
RCX,
R9);
2522 Label loop, found, miss;
2523 __ Comment(
"ICData loop");
2526 const bool optimize = kind == Token::kILLEGAL;
2527 const intptr_t target_offset =
2529 const intptr_t count_offset =
2531 const intptr_t exactness_offset =
2535 for (
int unroll =
optimize ? 4 : 2; unroll >= 0; unroll--) {
2539 if (num_args == 2) {
2552 __ addq(
R13, Immediate(entry_size));
2563 __ Comment(
"IC miss");
2569 __ EnterStubFrame();
2570 if (save_entry_point) {
2576 __ pushq(Immediate(0));
2578 for (intptr_t
i = 0;
i < num_args;
i++) {
2583 __ CallRuntime(handle_ic_miss, num_args + 1);
2585 for (intptr_t
i = 0;
i < num_args + 1;
i++) {
2591 if (save_entry_point) {
2595 __ RestoreCodePointer();
2596 __ LeaveStubFrame();
2597 Label call_target_function;
2598 if (FLAG_precompiled_mode) {
2599 GenerateDispatcherCode(
assembler, &call_target_function);
2601 __ jmp(&call_target_function);
2606 Label call_target_function_through_unchecked_entry;
2614 __ j(
LESS, &exactness_ok);
2615 __ j(
EQUAL, &call_target_function_through_unchecked_entry);
2626#if defined(DART_COMPRESSED_POINTERS)
2631 __ j(
EQUAL, &call_target_function_through_unchecked_entry);
2634 __ OBJ(mov)(Address(
R13, exactness_offset),
2641 if (FLAG_optimization_counter_threshold >= 0) {
2642 __ Comment(
"Update ICData counter");
2647 __ Comment(
"Call target (via specified entry point)");
2648 __ Bind(&call_target_function);
2652 if (save_entry_point) {
2654 __ jmp(Address(
R8, 0));
2660 __ Bind(&call_target_function_through_unchecked_entry);
2661 if (FLAG_optimization_counter_threshold >= 0) {
2662 __ Comment(
"Update ICData counter");
2666 __ Comment(
"Call target (via unchecked entry point)");
2674#if !defined(PRODUCT)
2677 __ EnterStubFrame();
2682 if (save_entry_point) {
2686 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
2687 if (save_entry_point) {
2695 __ RestoreCodePointer();
2696 __ LeaveStubFrame();
2697 __ jmp(&done_stepping);
2705void StubCodeCompiler::GenerateOneArgCheckInlineCacheStub() {
2707 1, kInlineCacheMissHandlerOneArgRuntimeEntry, Token::kILLEGAL,
2714void StubCodeCompiler::GenerateOneArgCheckInlineCacheWithExactnessCheckStub() {
2716 1, kInlineCacheMissHandlerOneArgRuntimeEntry, Token::kILLEGAL,
2723void StubCodeCompiler::GenerateTwoArgsCheckInlineCacheStub() {
2725 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kILLEGAL,
2732void StubCodeCompiler::GenerateSmiAddInlineCacheStub() {
2734 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kADD,
kUnoptimized,
2741void StubCodeCompiler::GenerateSmiLessInlineCacheStub() {
2743 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kLT,
kUnoptimized,
2750void StubCodeCompiler::GenerateSmiEqualInlineCacheStub() {
2752 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kEQ,
kUnoptimized,
2760void StubCodeCompiler::GenerateOneArgOptimizedCheckInlineCacheStub() {
2762 1, kInlineCacheMissHandlerOneArgRuntimeEntry, Token::kILLEGAL,
kOptimized,
2770void StubCodeCompiler::
2771 GenerateOneArgOptimizedCheckInlineCacheWithExactnessCheckStub() {
2773 1, kInlineCacheMissHandlerOneArgRuntimeEntry, Token::kILLEGAL,
kOptimized,
2781void StubCodeCompiler::GenerateTwoArgsOptimizedCheckInlineCacheStub() {
2783 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kILLEGAL,
2789void StubCodeCompiler::GenerateZeroArgsUnoptimizedStaticCallStub() {
2800 __ cmpq(
RCX, Immediate(0));
2802 __ Stop(
"Incorrect IC data for unoptimized static call");
2807#if !defined(PRODUCT)
2809 Label stepping, done_stepping;
2810 __ LoadIsolate(
RAX);
2812 __ cmpq(
RAX, Immediate(0));
2827 const intptr_t target_offset =
2829 const intptr_t count_offset =
2832 if (FLAG_optimization_counter_threshold >= 0) {
2848 __ jmp(Address(
R8, 0));
2850#if !defined(PRODUCT)
2852 __ EnterStubFrame();
2856 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
2860 __ RestoreCodePointer();
2861 __ LeaveStubFrame();
2868void StubCodeCompiler::GenerateOneArgUnoptimizedStaticCallStub() {
2876void StubCodeCompiler::GenerateTwoArgsUnoptimizedStaticCallStub() {
2878 2, kStaticCallMissHandlerTwoArgsRuntimeEntry, Token::kILLEGAL,
2885void StubCodeCompiler::GenerateLazyCompileStub() {
2886 __ EnterStubFrame();
2889 __ CallRuntime(kCompileFunctionRuntimeEntry, 1);
2892 __ LeaveStubFrame();
2903void StubCodeCompiler::GenerateICCallBreakpointStub() {
2905 __ Stop(
"No debugging in PRODUCT mode");
2907 __ EnterStubFrame();
2910 __ pushq(Immediate(0));
2911 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0);
2915 __ LeaveStubFrame();
2922void StubCodeCompiler::GenerateUnoptStaticCallBreakpointStub() {
2924 __ Stop(
"No debugging in PRODUCT mode");
2926 __ EnterStubFrame();
2928 __ pushq(Immediate(0));
2929 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0);
2932 __ LeaveStubFrame();
2940void StubCodeCompiler::GenerateRuntimeCallBreakpointStub() {
2942 __ Stop(
"No debugging in PRODUCT mode");
2944 __ EnterStubFrame();
2945 __ pushq(Immediate(0));
2946 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0);
2948 __ LeaveStubFrame();
2956void StubCodeCompiler::GenerateDebugStepCheckStub() {
2958 __ Stop(
"No debugging in PRODUCT mode");
2961 Label stepping, done_stepping;
2962 __ LoadIsolate(
RAX);
2964 __ cmpq(
RAX, Immediate(0));
2970 __ EnterStubFrame();
2971 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
2972 __ LeaveStubFrame();
2990void StubCodeCompiler::GenerateSubtypeNTestCacheStub(Assembler* assembler,
2997 RegisterSet saved_registers;
3009 kInstanceParentFunctionTypeArgumentsReg =
PP;
3010 saved_registers.AddRegister(kInstanceParentFunctionTypeArgumentsReg);
3014 kInstanceDelayedFunctionTypeArgumentsReg =
CODE_REG;
3015 saved_registers.AddRegister(kInstanceDelayedFunctionTypeArgumentsReg);
3026 kCacheContentsSizeReg =
PP;
3027 saved_registers.AddRegister(kCacheContentsSizeReg);
3033 saved_registers.AddRegister(kProbeDistanceReg);
3045 __ PushRegisters(saved_registers);
3048 GenerateSubtypeTestCacheSearch(
3052 kInstanceParentFunctionTypeArgumentsReg,
3053 kInstanceDelayedFunctionTypeArgumentsReg, kCacheEntryEndReg,
3054 kCacheContentsSizeReg, kProbeDistanceReg,
3060 __ PopRegisters(saved_registers);
3066 __ PopRegisters(saved_registers);
3075void StubCodeCompiler::GenerateGetCStackPointerStub() {
3087void StubCodeCompiler::GenerateJumpToFrameStub() {
3091#if defined(USING_SHADOW_CALL_STACK)
3094 Label exit_through_non_ffi;
3101 __ cmpq(compiler::Address(
3105 __ TransitionNativeToGenerated(
true,
3107 __ Bind(&exit_through_non_ffi);
3115 __ RestoreCodePointer();
3116 if (FLAG_precompiled_mode) {
3119 __ LoadPoolPointer(
PP);
3128void StubCodeCompiler::GenerateRunExceptionHandlerStub() {
3134 word offset_from_thread = 0;
3137 __ movq(
TMP, Address(
THR, offset_from_thread));
3142 __ movq(exception_addr,
TMP);
3147 __ movq(stacktrace_addr,
TMP);
3155void StubCodeCompiler::GenerateDeoptForRewindStub() {
3161#if defined(USING_SHADOW_CALL_STACK)
3167 __ EnterStubFrame();
3168 __ CallRuntime(kRewindPostDeoptRuntimeEntry, 0);
3169 __ LeaveStubFrame();
3176void StubCodeCompiler::GenerateOptimizeFunctionStub() {
3178 __ EnterStubFrame();
3180 __ pushq(Immediate(0));
3182 __ CallRuntime(kOptimizeInvokedFunctionRuntimeEntry, 1);
3186 __ LeaveStubFrame();
3200static void GenerateIdenticalWithNumberCheckStub(Assembler* assembler,
3203 Label reference_compare,
done, check_mint;
3206 __ j(
ZERO, &reference_compare);
3208 __ j(
ZERO, &reference_compare);
3211 __ CompareClassId(left, kDoubleCid);
3213 __ CompareClassId(right, kDoubleCid);
3222 __ CompareClassId(left, kMintCid);
3224 __ CompareClassId(right, kMintCid);
3230 __ Bind(&reference_compare);
3231 __ CompareObjectRegisters(left, right);
3240void StubCodeCompiler::GenerateUnoptimizedIdenticalWithNumberCheckStub() {
3241#if !defined(PRODUCT)
3243 Label stepping, done_stepping;
3244 __ LoadIsolate(
RAX);
3246 __ cmpq(
RAX, Immediate(0));
3256 GenerateIdenticalWithNumberCheckStub(
assembler, left, right);
3259#if !defined(PRODUCT)
3261 __ EnterStubFrame();
3262 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
3263 __ RestoreCodePointer();
3264 __ LeaveStubFrame();
3265 __ jmp(&done_stepping);
3274void StubCodeCompiler::GenerateOptimizedIdenticalWithNumberCheckStub() {
3280 GenerateIdenticalWithNumberCheckStub(
assembler, left, right);
3291void StubCodeCompiler::GenerateMegamorphicCallStub() {
3343 if (!FLAG_precompiled_mode) {
3362 __ movq(
RAX, Immediate(kSmiCid));
3363 __ jmp(&cid_loaded);
3366 GenerateSwitchableCallMissStub();
3372void StubCodeCompiler::GenerateICCallThroughCodeStub() {
3373 Label loop, found, miss;
3380 __ LoadTaggedClassIdMayBeSmi(
RAX,
RDX);
3392 const intptr_t entry_length =
3395 __ addq(
R13, Immediate(entry_length));
3399 if (FLAG_precompiled_mode) {
3400 const intptr_t entry_offset =
3402 __ LoadCompressed(
RCX, Address(
R13, entry_offset));
3405 const intptr_t code_offset =
3415void StubCodeCompiler::GenerateMonomorphicSmiableCheckStub() {
3416 Label have_cid, miss;
3418 __ movq(
RAX, Immediate(kSmiCid));
3438void StubCodeCompiler::GenerateSwitchableCallMissStub() {
3441 __ EnterStubFrame();
3444 __ pushq(Immediate(0));
3445 __ pushq(Immediate(0));
3447 __ CallRuntime(kSwitchableCallMissRuntimeEntry, 2);
3453 __ LeaveStubFrame();
3465void StubCodeCompiler::GenerateSingleTargetCallStub() {
3483 __ EnterStubFrame();
3486 __ pushq(Immediate(0));
3487 __ pushq(Immediate(0));
3489 __ CallRuntime(kSwitchableCallMissRuntimeEntry, 2);
3495 __ LeaveStubFrame();
3519void StubCodeCompiler::GenerateAllocateTypedDataArrayStub(intptr_t
cid) {
3527 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
3542 __ cmpq(
RDI, Immediate(max_len));
3551 const intptr_t fixed_size_plus_alignment_padding =
3554 __ leaq(
RDI, Address(
RDI, scale_factor, fixed_size_plus_alignment_padding));
3569 __ CheckAllocationCanary(
RAX);
3581 Label size_tag_overflow,
done;
3588 __ Bind(&size_tag_overflow);
3589 __ LoadImmediate(
RDI, Immediate(0));
3595 __ orq(
RDI, Immediate(tags));
3603 __ StoreCompressedIntoObjectNoBarrier(
3613 __ StoreInternalPointer(
3625 __ WriteAllocationCanary(
RCX);
3632 __ EnterStubFrame();
3633 __ PushObject(Object::null_object());
3636 __ CallRuntime(kAllocateTypedDataRuntimeEntry, 2);
3639 __ LeaveStubFrame();
static void done(const char *config, const char *src, const char *srcOptions, const char *name)
static void async_callback(void *c, std::unique_ptr< const SkImage::AsyncReadResult > result)
static bool ok(int result)
static SkTileMode optimize(SkTileMode tm, int dimension)
#define ASSERT_LESS_OR_EQUAL(expected, actual)
#define ASSERT_EQUAL(expected, actual)
#define RELEASE_ASSERT(cond)
virtual bool WillAllocateNewOrRemembered() const
@ DRT_GetFfiCallbackMetadata
@ DRT_ExitTemporaryIsolate
static constexpr intptr_t kCalleeSaveCpuRegisters
static constexpr intptr_t kVolatileCpuRegisters
static constexpr intptr_t kFpuArgumentRegisters
static constexpr Register kArg3Reg
static constexpr Register kArg1Reg
static constexpr intptr_t kArgumentRegisters
static constexpr Register kArg2Reg
static constexpr Register kArg4Reg
static constexpr intptr_t kCalleeSaveXmmRegisters
static bool UseUnboxedRepresentation()
static intptr_t ActivationFrameAlignment()
static StaticTypeExactnessState HasExactSuperType()
static StaticTypeExactnessState NotExact()
static constexpr intptr_t kMaxInputs
static constexpr int CountOneBitsWord(uword x)
static Address AddressRIPRelative(int32_t disp)
static Address VMTagAddress()
void GenerateNArgsCheckInlineCacheStub(intptr_t num_args, const RuntimeEntry &handle_ic_miss, Token::Kind kind, Optimized optimized, CallType type, Exactness exactness)
void EnsureIsNewOrRemembered()
void GenerateUsageCounterIncrement(Register temp_reg)
void GenerateAllocationStubForClass(UnresolvedPcRelativeCalls *unresolved_calls, const Class &cls, const dart::Code &allocate_object, const dart::Code &allocat_object_parametrized)
void GenerateOptimizedUsageCounterIncrement()
static intptr_t WordOffsetFromFpToCpuRegister(Register cpu_register)
static word size_offset()
static word type_args_len_offset()
static word count_offset()
static word type_arguments_offset()
static word length_offset()
static const word kMaxNewSpaceElements
static word data_offset()
static word tags_offset()
static word header_size()
static word arguments_descriptor_offset()
static bool TraceAllocation(const dart::Class &klass)
static intptr_t NumTypeArguments(const dart::Class &klass)
static uword GetInstanceSize(const dart::Class &handle)
static const word kNoTypeArguments
static classid_t GetId(const dart::Class &handle)
static intptr_t TypeArgumentsFieldOffset(const dart::Class &klass)
static word function_offset()
static word entry_point_offset(CodeEntryKind kind=CodeEntryKind::kNormal)
static word variable_offset(intptr_t index)
static word num_variables_offset()
static word header_size()
static word parent_offset()
static word value_offset()
static word code_offset()
static word entry_point_offset(CodeEntryKind kind=CodeEntryKind::kNormal)
static word usage_counter_offset()
static bool IsAllocatableInNewSpace(intptr_t instance_size)
static word NumArgsTestedMask()
static word state_bits_offset()
static word entries_offset()
static word owner_offset()
static word ExactnessIndexFor(word num_args)
static word TestEntryLengthFor(word num_args, bool exactness_check)
static word NumArgsTestedShift()
static word receivers_static_type_offset()
static word CodeIndexFor(word num_args)
static word TargetIndexFor(word num_args)
static word CountIndexFor(word num_args)
static word EntryPointIndexFor(word num_args)
static word first_field_offset()
static word single_step_offset()
static word pointers_offset()
static word mask_offset()
static word buckets_offset()
static const word kSpreadFactor
static word value_offset()
static word entrypoint_offset()
static word expected_cid_offset()
static word argc_tag_offset()
static word retval_offset()
static word thread_offset()
static word argv_offset()
static word tags_offset()
static word card_table_offset()
static word original_top_offset()
static const word kBytesPerCardLog2
static word data_offset()
static word lower_limit_offset()
static word target_offset()
static word entry_point_offset()
static word upper_limit_offset()
static word pointers_offset()
static const word kTestResult
static word allocate_mint_without_fpu_regs_stub_offset()
static word resume_pc_offset()
static word allocate_object_slow_entry_point_offset()
static word bool_true_offset()
static word auto_scope_native_wrapper_entry_point_offset()
static word lazy_deopt_from_throw_stub_offset()
static word bool_false_offset()
static word active_exception_offset()
static word exit_through_ffi_offset()
static uword exit_through_runtime_call()
static word jump_to_frame_entry_point_offset()
static uword exit_through_ffi()
static word tsan_utils_offset()
static word new_marking_stack_block_offset()
static word optimize_stub_offset()
static word global_object_pool_offset()
static word invoke_dart_code_stub_offset()
static word write_error_shared_without_fpu_regs_stub_offset()
static word no_scope_native_wrapper_entry_point_offset()
static word top_exit_frame_info_offset()
static word range_error_shared_without_fpu_regs_stub_offset()
static word range_error_shared_with_fpu_regs_stub_offset()
static word fix_allocation_stub_code_offset()
static word switchable_call_miss_stub_offset()
static word fix_callers_target_code_offset()
static word store_buffer_block_offset()
static word deoptimize_stub_offset()
static word write_barrier_entry_point_offset()
static word lazy_deopt_from_return_stub_offset()
static word allocate_object_entry_point_offset()
static word switchable_call_miss_entry_offset()
static uword vm_execution_state()
static word active_stacktrace_offset()
static word top_resource_offset()
static word allocate_mint_with_fpu_regs_stub_offset()
static word bootstrap_native_wrapper_entry_point_offset()
static word write_error_shared_with_fpu_regs_stub_offset()
static word write_barrier_mask_offset()
static word call_to_runtime_stub_offset()
static word execution_state_offset()
static word old_marking_stack_block_offset()
static word setjmp_function_offset()
static word setjmp_buffer_offset()
static word exception_fp_offset()
static word exception_sp_offset()
static word exception_pc_offset()
static word arguments_offset()
static word length_offset()
static const word kGenerationalBarrierMask
static const word kBarrierOverlapShift
static const word kOldAndNotRememberedBit
static const word kCardRememberedBit
static const word kNotMarkedBit
static const word kIncrementalBarrierMask
static const word kSizeTagMaxSizeTag
static const word kTagBitsSizeTagPos
const uint8_t uint32_t uint32_t GError ** error
Dart_NativeFunction function
SK_API bool Encode(SkWStream *dst, const SkPixmap &src, const Options &options)
uword MakeTagWordForNewSpaceObject(classid_t cid, uword instance_size)
static constexpr word kBitsPerWordLog2
bool CanLoadFromThread(const dart::Object &object, intptr_t *offset)
word ToRawSmi(const dart::Object &a)
static constexpr intptr_t kWordSize
static constexpr intptr_t kCompressedWordSize
bool SizeFitsInSizeTag(uword instance_size)
static constexpr intptr_t kObjectAlignment
word TypedDataMaxNewSpaceElements(classid_t cid)
word TypedDataElementSizeInBytes(classid_t cid)
GrowableArray< UnresolvedPcRelativeCall * > UnresolvedPcRelativeCalls
bool IsSameObject(const Object &a, const Object &b)
const Object & NullObject()
const Code & StubCodeAllocateArray()
const Class & MintClass()
const Register kWriteBarrierSlotReg
@ TIMES_COMPRESSED_HALF_WORD_SIZE
@ TIMES_COMPRESSED_WORD_SIZE
const Register kExceptionObjectReg
const Register kWriteBarrierObjectReg
Thread * DLRT_GetFfiCallbackMetadata(FfiCallbackMetadata::Trampoline trampoline, uword *out_entry_point, uword *out_trampoline_type)
const RegList kAllFpuRegistersList
const Register kWriteBarrierValueReg
static constexpr bool IsCalleeSavedRegister(Register reg)
static constexpr uword kZapReturnAddress
void DLRT_ExitTemporaryIsolate()
const Register ARGS_DESC_REG
const int kNumberOfFpuRegisters
static constexpr bool IsArgumentRegister(Register reg)
constexpr RegList kDartAvailableCpuRegs
const intptr_t kStoreBufferWrapperSize
const Register FUNCTION_REG
const Register IC_DATA_REG
static constexpr intptr_t kAllocationRedZoneSize
static constexpr uword kZapCodeReg
const Register kStackTraceObjectReg
NOT_IN_PRODUCT(LibraryPtr ReloadTestScript(const char *script))
COMPILE_ASSERT(kUnreachableReference==WeakTable::kNoValue)
const int kFpuRegisterSize
ByteRegister ByteRegisterOf(Register reg)
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive keep the shell running after the Dart script has completed enable serial On low power devices with low core running concurrent GC tasks on threads can cause them to contend with the UI thread which could potentially lead to jank This option turns off all concurrent GC activities domain network JSON encoded network policy per domain This overrides the DisallowInsecureConnections switch Embedder can specify whether to allow or disallow insecure connections at a domain level old gen heap size
static bool Bind(PassBindingsCacheMTL &pass, ShaderStage stage, size_t bind_index, const BufferView &view)
static constexpr Register kResultReg
static constexpr Register kLengthReg
static constexpr Register kTypeArgumentsReg
static constexpr Register kResultReg
static constexpr Register kTempReg
static constexpr Register kTypeArgumentsReg
static constexpr Register kResultReg
static constexpr Register kTagsReg
static constexpr Register kLengthReg
static constexpr Register kResultReg
static constexpr Register kClassIdReg
intptr_t first_local_from_fp
static constexpr intptr_t kNewObjectBitPosition
static constexpr intptr_t kObjectAlignmentLog2
static constexpr intptr_t kObjectAlignment
static constexpr Register kLengthReg
static constexpr Register kIndexReg
static constexpr Register kInstanceInstantiatorTypeArgumentsReg
static constexpr Register kInstanceCidOrSignatureReg
static constexpr Register kCacheEntryReg
static constexpr Register kResultReg
static constexpr Register kDstTypeReg
static constexpr Register kSubtypeTestCacheResultReg