11#define SHOULD_NOT_INCLUDE_RUNTIME
15#if defined(TARGET_ARCH_RISCV32) || defined(TARGET_ARCH_RISCV64)
53 rt.Call(kEnsureRememberedAndMarkingDeferredRuntimeEntry,
67void StubCodeCompiler::GenerateCallToRuntimeStub() {
73 __ Comment(
"CallToRuntimeStub");
75 __ SetPrologueOffset();
91 __ CompareImmediate(
TMP, VMTag::kDartTagId);
93 __ Stop(
"Not coming from Dart code.");
103 __ Comment(
"align stack");
123 __ StoreToOffset(
THR,
SP, thread_offset);
124 __ StoreToOffset(
T4,
SP, argc_tag_offset);
125 __ StoreToOffset(
T2,
SP, argv_offset);
126 __ StoreToOffset(
T3,
SP, retval_offset);
131 __ Comment(
"CallToRuntimeStub return");
134 __ RestorePinnedRegisters();
138 __ LoadImmediate(
TMP, VMTag::kDartTagId);
149 if (FLAG_precompiled_mode) {
150 __ SetupGlobalPoolAndDispatchTable();
160 __ LoadImmediate(
A0, 0);
164void StubCodeCompiler::GenerateSharedStubGeneric(
165 bool save_fpu_registers,
166 intptr_t self_code_stub_offset_from_thread,
171 RegisterSet all_registers;
172 all_registers.AddAllNonReservedRegisters(save_fpu_registers);
177 __ PushRegisters(all_registers);
178 __ lx(
CODE_REG, Address(
THR, self_code_stub_offset_from_thread));
180 perform_runtime_call();
186 __ PopRegisters(all_registers);
191void StubCodeCompiler::GenerateSharedStub(
192 bool save_fpu_registers,
193 const RuntimeEntry*
target,
194 intptr_t self_code_stub_offset_from_thread,
196 bool store_runtime_result_in_result_register) {
197 ASSERT(!store_runtime_result_in_result_register || allow_return);
198 auto perform_runtime_call = [&]() {
199 if (store_runtime_result_in_result_register) {
203 if (store_runtime_result_in_result_register) {
210 GenerateSharedStubGeneric(save_fpu_registers,
211 self_code_stub_offset_from_thread, allow_return,
212 perform_runtime_call);
215void StubCodeCompiler::GenerateEnterSafepointStub() {
216 RegisterSet all_registers;
217 all_registers.AddAllGeneralRegisters();
219 __ PushRegisters(all_registers);
222 __ ReserveAlignedFrameSpace(0);
224 __ lx(
TMP, Address(
THR, kEnterSafepointRuntimeEntry.OffsetFromThread()));
228 __ PopRegisters(all_registers);
232static void GenerateExitSafepointStubCommon(Assembler* assembler,
233 uword runtime_entry_offset) {
234 RegisterSet all_registers;
235 all_registers.AddAllGeneralRegisters();
237 __ PushRegisters(all_registers);
240 __ ReserveAlignedFrameSpace(0);
248 __ lx(
TMP, Address(
THR, runtime_entry_offset));
252 __ PopRegisters(all_registers);
256void StubCodeCompiler::GenerateExitSafepointStub() {
257 GenerateExitSafepointStubCommon(
258 assembler, kExitSafepointRuntimeEntry.OffsetFromThread());
261void StubCodeCompiler::GenerateExitSafepointIgnoreUnwindInProgressStub() {
262 GenerateExitSafepointStubCommon(
264 kExitSafepointIgnoreUnwindInProgressRuntimeEntry.OffsetFromThread());
276void StubCodeCompiler::GenerateCallNativeThroughSafepointStub() {
294 __ TransitionNativeToGenerated(
T1,
true);
301 compiler::Label skip_reloc;
303 InsertBSSRelocation(relocation);
310 __ lx(
dst, compiler::Address(tmp));
314 __ add(tmp, tmp,
dst);
318 __ lx(
dst, compiler::Address(tmp));
321void StubCodeCompiler::GenerateLoadFfiCallbackMetadataRuntimeFunction(
322 uword function_index,
328 const intptr_t code_size =
__ CodeSize();
330 __ AddImmediate(
dst, -code_size);
340void StubCodeCompiler::GenerateFfiCallbackTrampolineStub() {
341#if defined(USING_SIMULATOR) && !defined(DART_PRECOMPILER)
358 FfiCallbackMetadata::kNativeCallbackTrampolineSize *
361 const intptr_t shared_stub_start =
__ CodeSize();
366 COMPILE_ASSERT(FfiCallbackMetadata::kNativeCallbackTrampolineStackDelta == 2);
399#if defined(DART_TARGET_OS_FUCHSIA)
401 if (FLAG_precompiled_mode) {
404 const intptr_t kPCRelativeLoadOffset = 12;
405 intptr_t
start =
__ CodeSize();
407 __ lx(
T1, Address(
T1, kPCRelativeLoadOffset));
418 GenerateLoadFfiCallbackMetadataRuntimeFunction(
466 __ EnterFullSafepoint(
T1);
481 __ ReserveAlignedFrameSpace(0);
485#if defined(DART_TARGET_OS_FUCHSIA)
487 if (FLAG_precompiled_mode) {
490 const intptr_t kPCRelativeLoadOffset = 12;
491 intptr_t
start =
__ CodeSize();
493 __ lx(
T1, Address(
T1, kPCRelativeLoadOffset));
504 GenerateLoadFfiCallbackMetadataRuntimeFunction(
519 FfiCallbackMetadata::kNativeCallbackSharedStubSize);
530void StubCodeCompiler::GenerateDispatchTableNullErrorStub() {
534 __ CallRuntime(kDispatchTableNullErrorRuntimeEntry, 1);
539void StubCodeCompiler::GenerateRangeError(
bool with_fpu_regs) {
540 auto perform_runtime_call = [&]() {
557 __ CallRuntime(kAllocateMintRuntimeEntry, 0);
577 __ PushRegistersInOrder(
579 __ CallRuntime(kRangeErrorRuntimeEntry, 2);
583 GenerateSharedStubGeneric(
588 false, perform_runtime_call);
591void StubCodeCompiler::GenerateWriteError(
bool with_fpu_regs) {
592 auto perform_runtime_call = [&]() {
593 __ CallRuntime(kWriteErrorRuntimeEntry, 2);
597 GenerateSharedStubGeneric(
602 false, perform_runtime_call);
611static void GenerateCallNativeWithWrapperStub(Assembler* assembler,
633 __ CompareImmediate(
TMP, VMTag::kDartTagId);
634 __ BranchIf(EQ, &
ok);
635 __ Stop(
"Not coming from Dart code.");
662 __ StoreToOffset(
THR,
SP, thread_offset);
663 __ StoreToOffset(
T1,
SP, argc_tag_offset);
664 __ StoreToOffset(
T2,
SP, argv_offset);
665 __ StoreToOffset(
T3,
SP, retval_offset);
674 __ RestorePinnedRegisters();
677 __ LoadImmediate(
TMP, VMTag::kDartTagId);
688 if (FLAG_precompiled_mode) {
689 __ SetupGlobalPoolAndDispatchTable();
696void StubCodeCompiler::GenerateCallNoScopeNativeStub() {
697 GenerateCallNativeWithWrapperStub(
703void StubCodeCompiler::GenerateCallAutoScopeNativeStub() {
704 GenerateCallNativeWithWrapperStub(
716void StubCodeCompiler::GenerateCallBootstrapNativeStub() {
717 GenerateCallNativeWithWrapperStub(
725void StubCodeCompiler::GenerateCallStaticFunctionStub() {
733 __ CallRuntime(kPatchStaticCallRuntimeEntry, 0);
747void StubCodeCompiler::GenerateFixCallersTargetStub() {
749 __ BranchOnMonomorphicCheckedEntryJIT(&monomorphic);
761 __ CallRuntime(kFixCallersTargetRuntimeEntry, 0);
782 __ PushRegistersInOrder({
ZR,
A0,
S5});
783 __ CallRuntime(kFixCallersTargetMonomorphicRuntimeEntry, 2);
790 __ LoadFieldFromOffset(
798void StubCodeCompiler::GenerateFixAllocationStubTargetStub() {
807 __ CallRuntime(kFixAllocationStubTargetRuntimeEntry, 0);
819void StubCodeCompiler::GenerateFixParameterizedAllocationStubTargetStub() {
830 __ CallRuntime(kFixAllocationStubTargetRuntimeEntry, 0);
845static void PushArrayOfArguments(Assembler* assembler) {
866 Label loop, loop_exit;
868 __ beqz(
T2, &loop_exit);
869 __ lx(
T6, Address(
T1, 0));
871 __ StoreCompressedIntoObject(
A0, Address(
T3, 0),
T6);
904static void GenerateDeoptimizationSequence(Assembler* assembler,
912 const intptr_t saved_result_slot_from_fp =
915 const intptr_t saved_exception_slot_from_fp =
918 const intptr_t saved_stacktrace_slot_from_fp =
947 LeafRuntimeScope rt(assembler,
952 __ li(
A1, is_lazy ? 1 : 0);
953 rt.Call(kDeoptimizeCopyFrameRuntimeEntry, 2);
968 __ RestoreCodePointer();
981 __ PushRegistersInOrder({
T1,
T2});
985 LeafRuntimeScope rt(assembler,
988 rt.Call(kDeoptimizeFillFrameRuntimeEntry, 1);
1004 __ RestoreCodePointer();
1005 __ LeaveStubFrame();
1011 __ EnterStubFrame();
1013 __ PushRegister(
T1);
1019 __ PushRegistersInOrder({
T1,
T2});
1022 __ PushRegister(
ZR);
1023 __ CallRuntime(kDeoptimizeMaterializeRuntimeEntry, 0);
1035 __ LeaveStubFrame();
1044 __ EnterStubFrame();
1045 __ PushRegister(
ZR);
1046 __ PushRegister(
A0);
1047 __ PushRegister(
A1);
1049 __ CallRuntime(kReThrowRuntimeEntry, 3);
1050 __ LeaveStubFrame();
1055void StubCodeCompiler::GenerateDeoptimizeLazyFromReturnStub() {
1058 __ PushRegister(
TMP);
1069void StubCodeCompiler::GenerateDeoptimizeLazyFromThrowStub() {
1072 __ PushRegister(
TMP);
1081void StubCodeCompiler::GenerateDeoptimizeStub() {
1089static void GenerateNoSuchMethodDispatcherBody(Assembler* assembler) {
1090 __ EnterStubFrame();
1097 __ LoadCompressedSmiFieldFromOffset(
1107 __ LoadCompressedSmiFieldFromOffset(
1109 Label args_count_ok;
1116 PushArrayOfArguments(assembler);
1117 const intptr_t kNumArgs = 4;
1118 __ CallRuntime(kNoSuchMethodFromCallStubRuntimeEntry, kNumArgs);
1121 __ LeaveStubFrame();
1125static void GenerateDispatcherCode(Assembler* assembler,
1126 Label* call_target_function) {
1127 __ Comment(
"NoSuchMethodDispatch");
1132 GenerateNoSuchMethodDispatcherBody(assembler);
1138void StubCodeCompiler::GenerateNoSuchMethodDispatcherStub() {
1139 GenerateNoSuchMethodDispatcherBody(
assembler);
1151void StubCodeCompiler::GenerateAllocateArrayStub() {
1152 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
1162 const intptr_t max_len =
1165 __ BranchIf(
HI, &slow_case);
1167 const intptr_t
cid = kArrayCid;
1176 intptr_t fixed_size_plus_alignment_padding =
1182 __ AddImmediate(
T3, fixed_size_plus_alignment_padding);
1224 compiler::Label zero_tag;
1233 __ OrImmediate(
T5,
T5, tags);
1238 __ StoreCompressedIntoObjectOffsetNoBarrier(
1266 __ WriteAllocationCanary(
T4);
1280 __ EnterStubFrame();
1286 __ CallRuntime(kAllocateArrayRuntimeEntry, 2);
1300 __ LeaveStubFrame();
1304void StubCodeCompiler::GenerateAllocateMintSharedWithFPURegsStub() {
1306 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
1316 GenerateSharedStub(
true, &kAllocateMintRuntimeEntry,
1322void StubCodeCompiler::GenerateAllocateMintSharedWithoutFPURegsStub() {
1324 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
1335 false, &kAllocateMintRuntimeEntry,
1349void StubCodeCompiler::GenerateInvokeDartCodeStub() {
1350 __ Comment(
"InvokeDartCodeStub");
1358#if defined(DART_TARGET_OS_FUCHSIA) || defined(DART_TARGET_OS_ANDROID)
1360#elif defined(USING_SHADOW_CALL_STACK)
1366 __ PushNativeCalleeSavedRegisters();
1374 __ RestorePinnedRegisters();
1399 __ EmitEntryFrameVerification();
1403 __ LoadImmediate(
TMP, VMTag::kDartTagId);
1423 Label push_arguments;
1424 Label done_push_arguments;
1425 __ beqz(
T5, &done_push_arguments);
1426 __ LoadImmediate(
T2, 0);
1427 __ Bind(&push_arguments);
1428 __ lx(
T3, Address(
A2, 0));
1429 __ PushRegister(
T3);
1433 __ Bind(&done_push_arguments);
1435 if (FLAG_precompiled_mode) {
1436 __ SetupGlobalPoolAndDispatchTable();
1448 __ Comment(
"InvokeDartCodeStub return");
1467 __ PopNativeCalleeSavedRegisters();
1482static void GenerateAllocateContextSpaceStub(Assembler* assembler,
1486 intptr_t fixed_size_plus_alignment_padding =
1490 __ AddImmediate(
T2, fixed_size_plus_alignment_padding);
1506 __ BranchIf(
CS, slow_case);
1507 __ CheckAllocationCanary(
A0);
1527 compiler::Label zero_tag;
1528 __ BranchIf(
HI, &zero_tag);
1537 __ OrImmediate(
T3,
T3, tags);
1552void StubCodeCompiler::GenerateAllocateContextStub() {
1553 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
1556 GenerateAllocateContextSpaceStub(
assembler, &slow_case);
1589 __ EnterStubFrame();
1593 __ PushRegister(
T1);
1594 __ CallRuntime(kAllocateContextRuntimeEntry, 1);
1605 __ LeaveStubFrame();
1614void StubCodeCompiler::GenerateCloneContextStub() {
1615 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
1621 GenerateAllocateContextSpaceStub(
assembler, &slow_case);
1627 __ StoreCompressedIntoObjectNoBarrier(
1645 __ lx(
T5, Address(
T4, 0));
1647 __ sx(
T5, Address(
T3, 0));
1661 __ EnterStubFrame();
1666 __ CallRuntime(kCloneContextRuntimeEntry, 1);
1676 __ LeaveStubFrame();
1680void StubCodeCompiler::GenerateWriteBarrierWrappersStub() {
1685 intptr_t
start =
__ CodeSize();
1697 intptr_t
end =
__ CodeSize();
1712static void GenerateWriteBarrierStubHelper(Assembler* assembler,
bool cards) {
1713 RegisterSet spill_set((1 <<
T2) | (1 <<
T3) | (1 <<
T4), 0);
1720 __ beqz(
TMP, &skip_marking);
1725 __ PushRegisters(spill_set);
1740 auto mark_stack_push = [&](intptr_t
offset,
const RuntimeEntry& entry) {
1752 LeafRuntimeScope rt(assembler, 0,
1760 kOldMarkingStackBlockProcessRuntimeEntry);
1765 kNewMarkingStackBlockProcessRuntimeEntry);
1768 __ PopRegisters(spill_set);
1771 Label add_to_remembered_set, remember_card;
1778 __ bnez(
TMP, &add_to_remembered_set);
1781 __ Bind(&add_to_remembered_set);
1785 __ bnez(
TMP2, &remember_card);
1792 __ Stop(
"Wrong barrier!");
1799 __ PushRegisters(spill_set);
1822 LeafRuntimeScope rt(assembler, 0,
1825 rt.Call(kStoreBufferBlockProcessRuntimeEntry, 1);
1829 __ PopRegisters(spill_set);
1833 Label remember_card_slow;
1840 __ beqz(
TMP2, &remember_card_slow);
1858 __ Bind(&remember_card_slow);
1860 LeafRuntimeScope rt(assembler, 0,
1864 rt.Call(kRememberCardRuntimeEntry, 2);
1870void StubCodeCompiler::GenerateWriteBarrierStub() {
1871 GenerateWriteBarrierStubHelper(
assembler,
false);
1874void StubCodeCompiler::GenerateArrayWriteBarrierStub() {
1875 GenerateWriteBarrierStubHelper(
assembler,
true);
1878static void GenerateAllocateObjectHelper(Assembler* assembler,
1879 bool is_cls_parameterized) {
1885#if !defined(PRODUCT)
1889 __ MaybeTraceAllocation(kCidRegister, &slow_case,
TMP);
1900 __ ExtractInstanceSizeFromTags(kInstanceSizeReg, kTagsReg);
1910 __ CompareRegisters(kEndReg, kNewTopReg);
1934 Address(kFieldReg,
offset),
1940 __ bltu(kFieldReg, kNewTopReg, &loop);
1941 __ WriteAllocationCanary(kNewTopReg);
1947 if (is_cls_parameterized) {
1948 Label not_parameterized_case;
1953 __ ExtractClassIdFromTags(kClsIdReg, kTagsReg);
1956 __ LoadClassById(kTypeOffsetReg, kClsIdReg);
1959 FieldAddress(kTypeOffsetReg,
1961 host_type_arguments_field_offset_in_words_offset()));
1966 __ StoreCompressedIntoObjectNoBarrier(
1970 __ Bind(¬_parameterized_case);
1979 if (!is_cls_parameterized) {
1990void StubCodeCompiler::GenerateAllocateObjectStub() {
1991 GenerateAllocateObjectHelper(
assembler,
false);
1994void StubCodeCompiler::GenerateAllocateObjectParameterizedStub() {
1995 GenerateAllocateObjectHelper(
assembler,
true);
1998void StubCodeCompiler::GenerateAllocateObjectSlowStub() {
1999 if (!FLAG_precompiled_mode) {
2006 __ EnterStubFrame();
2017 __ CallRuntime(kAllocateObjectRuntimeEntry, 2);
2025 __ LeaveStubFrame();
2034 const Code& allocate_object,
2035 const Code& allocat_object_parametrized) {
2045 ASSERT(instance_size > 0);
2054 __ LoadImmediate(kTagsReg, tags);
2056 if (!FLAG_use_slow_path && FLAG_inline_alloc &&
2061 if (is_cls_parameterized) {
2063 CastHandle<Object>(allocat_object_parametrized))) {
2064 __ GenerateUnRelocatedPcRelativeTailCall();
2065 unresolved_calls->Add(
new UnresolvedPcRelativeCall(
2066 __ CodeSize(), allocat_object_parametrized,
true));
2071 allocate_object_parameterized_entry_point_offset()));
2076 __ GenerateUnRelocatedPcRelativeTailCall();
2077 unresolved_calls->Add(
new UnresolvedPcRelativeCall(
2078 __ CodeSize(), allocate_object,
true));
2087 if (!is_cls_parameterized) {
2104void StubCodeCompiler::GenerateCallClosureNoSuchMethodStub() {
2105 __ EnterStubFrame();
2108 __ LoadCompressedSmiFieldFromOffset(
2121 __ LoadCompressedSmiFieldFromOffset(
2123 Label args_count_ok;
2132 const intptr_t kNumArgs = 4;
2133 __ CallRuntime(kNoSuchMethodFromPrologueRuntimeEntry, kNumArgs);
2143 if (FLAG_precompiled_mode) {
2147 if (FLAG_trace_optimized_ic_calls) {
2148 __ Stop(
"Unimplemented");
2159 if (FLAG_precompiled_mode) {
2163 if (FLAG_optimization_counter_threshold >= 0) {
2164 __ Comment(
"Increment function counter");
2167 __ LoadFieldFromOffset(
2169 __ AddImmediate(
A1, 1);
2170 __ StoreFieldToOffset(
A1, func_reg,
2179static void EmitFastSmiOp(Assembler* assembler,
2182 Label* not_smi_or_overflow) {
2183 __ Comment(
"Fast Smi op");
2188 __ bnez(
TMP2, not_smi_or_overflow);
2191 __ AddBranchOverflow(
A0,
A0,
A1, not_smi_or_overflow);
2196 Label load_true,
done;
2207 Label load_true,
done;
2229 __ LoadCompressedSmiFromOffset(
TMP,
A6, 0);
2230 __ CompareImmediate(
TMP, imm_smi_cid);
2233 __ CompareImmediate(
TMP, imm_smi_cid);
2234 __ BranchIf(EQ, &
ok);
2236 __ Stop(
"Incorrect IC data");
2239 if (FLAG_optimization_counter_threshold >= 0) {
2240 const intptr_t count_offset =
2243 __ LoadCompressedSmiFromOffset(
A1,
A6, count_offset);
2245 __ StoreToOffset(
A1,
A6, count_offset);
2255static void GenerateRecordEntryPoint(Assembler* assembler) {
2259 __ BindUncheckedEntryPoint();
2279 const RuntimeEntry& handle_ic_miss,
2281 Optimized optimized,
2283 Exactness exactness) {
2284 const bool save_entry_point = kind == Token::kILLEGAL;
2285 if (FLAG_precompiled_mode) {
2290 if (save_entry_point) {
2301 ASSERT(num_args == 1 || num_args == 2);
2312 __ CompareImmediate(
TMP2, num_args);
2314 __ Stop(
"Incorrect stub for IC data");
2319#if !defined(PRODUCT)
2320 Label stepping, done_stepping;
2322 __ Comment(
"Check single stepping");
2323 __ LoadIsolate(
TMP);
2326 __ bnez(
TMP, &stepping);
2331 Label not_smi_or_overflow;
2332 if (kind != Token::kILLEGAL) {
2333 EmitFastSmiOp(
assembler, kind, num_args, ¬_smi_or_overflow);
2335 __ Bind(¬_smi_or_overflow);
2337 __ Comment(
"Extract ICData initial values and receiver cid");
2345 __ LoadTaggedClassIdMayBeSmi(
T1,
A0);
2348 if (num_args == 2) {
2349 __ LoadCompressedSmiFieldFromOffset(
2354 __ LoadTaggedClassIdMayBeSmi(
T2,
A6);
2359 __ LoadCompressedSmiFieldFromOffset(
2364 __ LoadTaggedClassIdMayBeSmi(
T1,
A6);
2365 if (num_args == 2) {
2367 __ LoadTaggedClassIdMayBeSmi(
T2,
A6);
2375 const bool optimize = kind == Token::kILLEGAL;
2378 Label loop, found, miss;
2379 __ Comment(
"ICData loop");
2382 for (
int unroll =
optimize ? 4 : 2; unroll >= 0; unroll--) {
2385 __ LoadCompressedSmiFromOffset(
A7,
A1, 0);
2386 if (num_args == 1) {
2398 __ AddImmediate(
A1, entry_size);
2402 __ BranchIf(
NE, &loop);
2404 __ BranchIf(EQ, &miss);
2409 __ Comment(
"IC miss");
2412 __ LoadCompressedSmiFieldFromOffset(
2421 __ EnterStubFrame();
2425 if (save_entry_point) {
2427 __ PushRegister(
T6);
2430 __ PushRegister(
ZR);
2432 for (intptr_t
i = 0;
i < num_args;
i++) {
2434 __ PushRegister(
TMP);
2438 __ CallRuntime(handle_ic_miss, num_args + 1);
2440 __ Drop(num_args + 1);
2444 if (save_entry_point) {
2450 __ RestoreCodePointer();
2451 __ LeaveStubFrame();
2452 Label call_target_function;
2453 if (FLAG_precompiled_mode) {
2454 GenerateDispatcherCode(
assembler, &call_target_function);
2456 __ j(&call_target_function);
2461 const intptr_t target_offset =
2463 const intptr_t count_offset =
2465 const intptr_t exactness_offset =
2468 Label call_target_function_through_unchecked_entry;
2472 __ LoadCompressedSmi(
T1, Address(
A1, exactness_offset));
2476 __ blt(
T1,
TMP, &exactness_ok);
2477 __ beq(
T1,
TMP, &call_target_function_through_unchecked_entry);
2490 __ beq(
T2,
TMP, &call_target_function_through_unchecked_entry);
2500 if (FLAG_optimization_counter_threshold >= 0) {
2501 __ Comment(
"Update caller's counter");
2502 __ LoadCompressedSmiFromOffset(
TMP,
A1, count_offset);
2507 __ Comment(
"Call target");
2508 __ Bind(&call_target_function);
2512 if (save_entry_point) {
2514 __ lx(
A7, Address(
A7, 0));
2522 __ Bind(&call_target_function_through_unchecked_entry);
2523 if (FLAG_optimization_counter_threshold >= 0) {
2524 __ Comment(
"Update ICData counter");
2525 __ LoadCompressedSmiFromOffset(
TMP,
A1, count_offset);
2529 __ Comment(
"Call target (via unchecked entry point)");
2533 __ LoadFieldFromOffset(
2539#if !defined(PRODUCT)
2542 __ EnterStubFrame();
2544 __ PushRegister(
A0);
2546 if (save_entry_point) {
2548 __ PushRegister(
T6);
2551 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
2553 if (save_entry_point) {
2560 __ RestoreCodePointer();
2561 __ LeaveStubFrame();
2562 __ j(&done_stepping);
2570void StubCodeCompiler::GenerateOneArgCheckInlineCacheStub() {
2572 1, kInlineCacheMissHandlerOneArgRuntimeEntry, Token::kILLEGAL,
2579void StubCodeCompiler::GenerateOneArgCheckInlineCacheWithExactnessCheckStub() {
2581 1, kInlineCacheMissHandlerOneArgRuntimeEntry, Token::kILLEGAL,
2588void StubCodeCompiler::GenerateTwoArgsCheckInlineCacheStub() {
2590 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kILLEGAL,
2597void StubCodeCompiler::GenerateSmiAddInlineCacheStub() {
2599 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kADD,
kUnoptimized,
2606void StubCodeCompiler::GenerateSmiLessInlineCacheStub() {
2608 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kLT,
kUnoptimized,
2615void StubCodeCompiler::GenerateSmiEqualInlineCacheStub() {
2617 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kEQ,
kUnoptimized,
2625void StubCodeCompiler::GenerateOneArgOptimizedCheckInlineCacheStub() {
2627 1, kInlineCacheMissHandlerOneArgRuntimeEntry, Token::kILLEGAL,
kOptimized,
2635void StubCodeCompiler::
2636 GenerateOneArgOptimizedCheckInlineCacheWithExactnessCheckStub() {
2638 1, kInlineCacheMissHandlerOneArgRuntimeEntry, Token::kILLEGAL,
kOptimized,
2646void StubCodeCompiler::GenerateTwoArgsOptimizedCheckInlineCacheStub() {
2648 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kILLEGAL,
2654void StubCodeCompiler::GenerateZeroArgsUnoptimizedStaticCallStub() {
2668 __ CompareImmediate(
TMP, 0);
2669 __ BranchIf(EQ, &
ok);
2670 __ Stop(
"Incorrect IC data for unoptimized static call");
2676#if !defined(PRODUCT)
2677 Label stepping, done_stepping;
2678 __ LoadIsolate(
TMP);
2690 const intptr_t target_offset =
2692 const intptr_t count_offset =
2695 if (FLAG_optimization_counter_threshold >= 0) {
2697 __ LoadCompressedSmiFromOffset(
TMP,
A0, count_offset);
2699 __ StoreToOffset(
TMP,
A0, count_offset);
2714#if !defined(PRODUCT)
2716 __ EnterStubFrame();
2719 __ PushRegister(
T6);
2720 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
2724 __ RestoreCodePointer();
2725 __ LeaveStubFrame();
2726 __ j(&done_stepping);
2732void StubCodeCompiler::GenerateOneArgUnoptimizedStaticCallStub() {
2741void StubCodeCompiler::GenerateTwoArgsUnoptimizedStaticCallStub() {
2744 2, kStaticCallMissHandlerTwoArgsRuntimeEntry, Token::kILLEGAL,
2751void StubCodeCompiler::GenerateLazyCompileStub() {
2753 __ EnterStubFrame();
2756 __ CallRuntime(kCompileFunctionRuntimeEntry, 1);
2759 __ LeaveStubFrame();
2770void StubCodeCompiler::GenerateICCallBreakpointStub() {
2772 __ Stop(
"No debugging in PRODUCT mode");
2774 __ EnterStubFrame();
2779 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0);
2783 __ LeaveStubFrame();
2790void StubCodeCompiler::GenerateUnoptStaticCallBreakpointStub() {
2792 __ Stop(
"No debugging in PRODUCT mode");
2794 __ EnterStubFrame();
2798 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0);
2801 __ LeaveStubFrame();
2807void StubCodeCompiler::GenerateRuntimeCallBreakpointStub() {
2809 __ Stop(
"No debugging in PRODUCT mode");
2811 __ EnterStubFrame();
2814 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0);
2816 __ LeaveStubFrame();
2823void StubCodeCompiler::GenerateDebugStepCheckStub() {
2825 __ Stop(
"No debugging in PRODUCT mode");
2828 Label stepping, done_stepping;
2837 __ EnterStubFrame();
2838 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
2839 __ LeaveStubFrame();
2840 __ j(&done_stepping);
2856void StubCodeCompiler::GenerateSubtypeNTestCacheStub(Assembler* assembler,
2878 GenerateSubtypeTestCacheSearch(
2900void StubCodeCompiler::GenerateGetCStackPointerStub() {
2914void StubCodeCompiler::GenerateJumpToFrameStub() {
2921#if defined(DART_TARGET_OS_FUCHSIA) || defined(DART_TARGET_OS_ANDROID)
2927#elif defined(USING_SHADOW_CALL_STACK)
2930 Label exit_through_non_ffi;
2940 __ bne(
TMP,
TMP2, &exit_through_non_ffi);
2941 __ TransitionNativeToGenerated(
TMP,
true,
2943 __ Bind(&exit_through_non_ffi);
2946 __ RestorePinnedRegisters();
2948 __ LoadImmediate(
TMP, VMTag::kDartTagId);
2953 __ RestoreCodePointer();
2954 if (FLAG_precompiled_mode) {
2955 __ SetupGlobalPoolAndDispatchTable();
2957 __ LoadPoolPointer();
2967void StubCodeCompiler::GenerateRunExceptionHandlerStub() {
2985void StubCodeCompiler::GenerateDeoptForRewindStub() {
2988 __ PushRegister(
TMP);
2995 __ EnterStubFrame();
2996 __ CallRuntime(kRewindPostDeoptRuntimeEntry, 0);
2997 __ LeaveStubFrame();
3004void StubCodeCompiler::GenerateOptimizeFunctionStub() {
3006 __ EnterStubFrame();
3013 __ CallRuntime(kOptimizeInvokedFunctionRuntimeEntry, 1);
3023 __ LeaveStubFrame();
3031static void GenerateIdenticalWithNumberCheckStub(Assembler* assembler,
3034 Label reference_compare, check_mint,
done;
3041 __ CompareClassId(left, kDoubleCid,
TMP);
3043 __ CompareClassId(right, kDoubleCid,
TMP);
3063 __ CompareClassId(left, kMintCid,
TMP);
3065 __ CompareClassId(right, kMintCid,
TMP);
3082 __ Bind(&reference_compare);
3083 __ xor_(
TMP, left, right);
3092void StubCodeCompiler::GenerateUnoptimizedIdenticalWithNumberCheckStub() {
3093#if !defined(PRODUCT)
3095 Label stepping, done_stepping;
3096 __ LoadIsolate(
TMP);
3099 __ bnez(
TMP, &stepping);
3107 GenerateIdenticalWithNumberCheckStub(
assembler, left, right);
3110#if !defined(PRODUCT)
3112 __ EnterStubFrame();
3113 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
3114 __ RestoreCodePointer();
3115 __ LeaveStubFrame();
3116 __ j(&done_stepping);
3125void StubCodeCompiler::GenerateOptimizedIdenticalWithNumberCheckStub() {
3130 GenerateIdenticalWithNumberCheckStub(
assembler, left, right);
3141void StubCodeCompiler::GenerateMegamorphicCallStub() {
3144 __ BranchIfSmi(
A0, &smi_case);
3174 __ LoadCompressedSmiFieldFromOffset(
T4,
TMP,
base);
3176 __ CompareObjectRegisters(
T4,
T5);
3177 __ BranchIf(
NE, &probe_failed);
3191 if (!FLAG_precompiled_mode) {
3209 __ LoadImmediate(
T5, kSmiCid);
3213 GenerateSwitchableCallMissStub();
3219void StubCodeCompiler::GenerateICCallThroughCodeStub() {
3220 Label loop, found, miss;
3227 __ LoadTaggedClassIdMayBeSmi(
A1,
A0);
3231 __ LoadCompressedSmi(
T2, Address(
T1, 0));
3234 __ BranchIf(EQ, &miss);
3236 const intptr_t entry_length =
3239 __ AddImmediate(
T1, entry_length);
3243 if (FLAG_precompiled_mode) {
3244 const intptr_t entry_offset =
3246 __ LoadCompressed(
A1, Address(
T1, entry_offset));
3249 const intptr_t code_offset =
3268void StubCodeCompiler::GenerateMonomorphicSmiableCheckStub() {
3270 __ LoadClassIdMayBeSmi(
T1,
A0);
3289void StubCodeCompiler::GenerateSwitchableCallMissStub() {
3292 __ EnterStubFrame();
3296 __ CallRuntime(kSwitchableCallMissRuntimeEntry, 2);
3302 __ LeaveStubFrame();
3314void StubCodeCompiler::GenerateSingleTargetCallStub() {
3316 __ LoadClassIdMayBeSmi(
A1,
A0);
3328 __ EnterStubFrame();
3332 __ CallRuntime(kSwitchableCallMissRuntimeEntry, 2);
3338 __ LeaveStubFrame();
3345static int GetScaleFactor(intptr_t
size) {
3362void StubCodeCompiler::GenerateAllocateTypedDataArrayStub(intptr_t
cid) {
3365 const intptr_t scale_shift = GetScaleFactor(
element_size);
3370 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
3376 __ BranchIfNotSmi(
T3, &call_runtime);
3382 if (scale_shift != 0) {
3383 __ slli(
T3,
T3, scale_shift);
3385 const intptr_t fixed_size_plus_alignment_padding =
3388 __ AddImmediate(
T3, fixed_size_plus_alignment_padding);
3394 __ bltu(
T4,
A0, &call_runtime);
3401 __ bgeu(
T4,
TMP, &call_runtime);
3402 __ CheckAllocationCanary(
A0);
3415 compiler::Label zero_tags;
3416 __ BranchIf(
HI, &zero_tags);
3425 __ OrImmediate(
T5,
T5, tags);
3432 __ StoreCompressedIntoObjectNoBarrier(
3441 __ StoreInternalPointer(
3453 __ WriteAllocationCanary(
T4);
3460 __ EnterStubFrame();
3461 __ PushRegister(
ZR);
3464 __ CallRuntime(kAllocateTypedDataRuntimeEntry, 2);
3467 __ LeaveStubFrame();
static void done(const char *config, const char *src, const char *srcOptions, const char *name)
static void async_callback(void *c, std::unique_ptr< const SkImage::AsyncReadResult > result)
static bool ok(int result)
static SkTileMode optimize(SkTileMode tm, int dimension)
#define ASSERT_LESS_OR_EQUAL(expected, actual)
#define ASSERT_EQUAL(expected, actual)
#define RELEASE_ASSERT(cond)
virtual bool WillAllocateNewOrRemembered() const
@ DRT_GetFfiCallbackMetadata
@ DRT_ExitTemporaryIsolate
static bool UseUnboxedRepresentation()
static intptr_t ActivationFrameAlignment()
static StaticTypeExactnessState HasExactSuperType()
static StaticTypeExactnessState NotExact()
static constexpr intptr_t kMaxInputs
void GenerateNArgsCheckInlineCacheStub(intptr_t num_args, const RuntimeEntry &handle_ic_miss, Token::Kind kind, Optimized optimized, CallType type, Exactness exactness)
void EnsureIsNewOrRemembered()
void GenerateUsageCounterIncrement(Register temp_reg)
void GenerateAllocationStubForClass(UnresolvedPcRelativeCalls *unresolved_calls, const Class &cls, const dart::Code &allocate_object, const dart::Code &allocat_object_parametrized)
void GenerateOptimizedUsageCounterIncrement()
static intptr_t WordOffsetFromFpToCpuRegister(Register cpu_register)
static word size_offset()
static word type_args_len_offset()
static word count_offset()
static word type_arguments_offset()
static word length_offset()
static const word kMaxNewSpaceElements
static word data_offset()
static word tags_offset()
static word header_size()
static word arguments_descriptor_offset()
static bool TraceAllocation(const dart::Class &klass)
static intptr_t NumTypeArguments(const dart::Class &klass)
static uword GetInstanceSize(const dart::Class &handle)
static const word kNoTypeArguments
static classid_t GetId(const dart::Class &handle)
static intptr_t TypeArgumentsFieldOffset(const dart::Class &klass)
static word function_offset()
static word entry_point_offset(CodeEntryKind kind=CodeEntryKind::kNormal)
static word variable_offset(intptr_t index)
static word num_variables_offset()
static word header_size()
static word parent_offset()
static word value_offset()
static word code_offset()
static word entry_point_offset(CodeEntryKind kind=CodeEntryKind::kNormal)
static word usage_counter_offset()
static bool IsAllocatableInNewSpace(intptr_t instance_size)
static word NumArgsTestedMask()
static word state_bits_offset()
static word entries_offset()
static word owner_offset()
static word ExactnessIndexFor(word num_args)
static word TestEntryLengthFor(word num_args, bool exactness_check)
static word NumArgsTestedShift()
static word receivers_static_type_offset()
static word CodeIndexFor(word num_args)
static word TargetIndexFor(word num_args)
static word CountIndexFor(word num_args)
static word EntryPointIndexFor(word num_args)
static word first_field_offset()
static word single_step_offset()
static word pointers_offset()
static word mask_offset()
static word buckets_offset()
static const word kSpreadFactor
static word value_offset()
static word entrypoint_offset()
static word expected_cid_offset()
static word argc_tag_offset()
static word retval_offset()
static word thread_offset()
static word argv_offset()
static word tags_offset()
static word card_table_offset()
static word original_top_offset()
static const word kBytesPerCardLog2
static word data_offset()
static word lower_limit_offset()
static word target_offset()
static word entry_point_offset()
static word upper_limit_offset()
static word pointers_offset()
static const word kTestResult
static word allocate_mint_without_fpu_regs_stub_offset()
static word resume_pc_offset()
static word allocate_object_slow_entry_point_offset()
static word auto_scope_native_wrapper_entry_point_offset()
static word lazy_deopt_from_throw_stub_offset()
static word active_exception_offset()
static word exit_through_ffi_offset()
static uword exit_through_runtime_call()
static uword exit_through_ffi()
static word new_marking_stack_block_offset()
static word optimize_stub_offset()
static word invoke_dart_code_stub_offset()
static word saved_shadow_call_stack_offset()
static word write_error_shared_without_fpu_regs_stub_offset()
static word no_scope_native_wrapper_entry_point_offset()
static word top_exit_frame_info_offset()
static word range_error_shared_without_fpu_regs_stub_offset()
static word range_error_shared_with_fpu_regs_stub_offset()
static word fix_allocation_stub_code_offset()
static word switchable_call_miss_stub_offset()
static word fix_callers_target_code_offset()
static word store_buffer_block_offset()
static word deoptimize_stub_offset()
static word write_barrier_entry_point_offset()
static word lazy_deopt_from_return_stub_offset()
static word vm_tag_offset()
static word allocate_object_entry_point_offset()
static word switchable_call_miss_entry_offset()
static uword vm_execution_state()
static word active_stacktrace_offset()
static word top_resource_offset()
static word allocate_mint_with_fpu_regs_stub_offset()
static word bootstrap_native_wrapper_entry_point_offset()
static word write_error_shared_with_fpu_regs_stub_offset()
static word write_barrier_mask_offset()
static word call_to_runtime_stub_offset()
static word execution_state_offset()
static word old_marking_stack_block_offset()
static word arguments_offset()
static word length_offset()
static const word kGenerationalBarrierMask
static const word kBarrierOverlapShift
static const word kOldAndNotRememberedBit
static const word kCardRememberedBit
static const word kNotMarkedBit
static const word kIncrementalBarrierMask
static const word kSizeTagMaxSizeTag
static const word kTagBitsSizeTagPos
const uint8_t uint32_t uint32_t GError ** error
Dart_NativeFunction function
SK_API bool Encode(SkWStream *dst, const SkPixmap &src, const Options &options)
uword MakeTagWordForNewSpaceObject(classid_t cid, uword instance_size)
static constexpr word kBitsPerWordLog2
word ToRawSmi(const dart::Object &a)
static constexpr intptr_t kWordSize
static constexpr intptr_t kCompressedWordSize
bool SizeFitsInSizeTag(uword instance_size)
static constexpr intptr_t kObjectAlignment
word TypedDataMaxNewSpaceElements(classid_t cid)
word TypedDataElementSizeInBytes(classid_t cid)
const Bool & TrueObject()
GrowableArray< UnresolvedPcRelativeCall * > UnresolvedPcRelativeCalls
bool IsSameObject(const Object &a, const Object &b)
const Bool & FalseObject()
const Object & NullObject()
const Code & StubCodeAllocateArray()
const Class & MintClass()
const Register kWriteBarrierSlotReg
@ TIMES_COMPRESSED_HALF_WORD_SIZE
constexpr bool IsAbiPreservedRegister(Register reg)
static constexpr intptr_t kCompressedWordSizeLog2
const Register kExceptionObjectReg
const Register kWriteBarrierObjectReg
Thread * DLRT_GetFfiCallbackMetadata(FfiCallbackMetadata::Trampoline trampoline, uword *out_entry_point, uword *out_trampoline_type)
const Register kWriteBarrierValueReg
static constexpr bool IsCalleeSavedRegister(Register reg)
static constexpr uword kZapReturnAddress
const Register CALLEE_SAVED_TEMP
constexpr intptr_t kWordSizeLog2
void DLRT_ExitTemporaryIsolate()
const Register ARGS_DESC_REG
const int kNumberOfFpuRegisters
static constexpr bool IsArgumentRegister(Register reg)
constexpr RegList kDartAvailableCpuRegs
const intptr_t kStoreBufferWrapperSize
const Register FUNCTION_REG
const Register IC_DATA_REG
static constexpr intptr_t kAllocationRedZoneSize
static constexpr uword kZapCodeReg
const Register kStackTraceObjectReg
NOT_IN_PRODUCT(LibraryPtr ReloadTestScript(const char *script))
COMPILE_ASSERT(kUnreachableReference==WeakTable::kNoValue)
const int kFpuRegisterSize
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive keep the shell running after the Dart script has completed enable serial On low power devices with low core running concurrent GC tasks on threads can cause them to contend with the UI thread which could potentially lead to jank This option turns off all concurrent GC activities domain network JSON encoded network policy per domain This overrides the DisallowInsecureConnections switch Embedder can specify whether to allow or disallow insecure connections at a domain level old gen heap size
static bool Bind(PassBindingsCacheMTL &pass, ShaderStage stage, size_t bind_index, const BufferView &view)
static constexpr Register kResultReg
static constexpr Register kLengthReg
static constexpr Register kTypeArgumentsReg
static constexpr Register kResultReg
static constexpr Register kTempReg
static constexpr Register kTypeArgumentsReg
static constexpr Register kResultReg
static constexpr Register kTagsReg
static constexpr Register kLengthReg
static constexpr Register kResultReg
static constexpr Register kClassIdReg
intptr_t first_local_from_fp
static constexpr intptr_t kNewObjectBitPosition
static constexpr intptr_t kObjectAlignmentLog2
static constexpr intptr_t kObjectAlignment
static constexpr Register kLengthReg
static constexpr Register kIndexReg
static constexpr Register kCacheContentsSizeReg
static constexpr Register kInstanceInstantiatorTypeArgumentsReg
static constexpr Register kInstanceParentFunctionTypeArgumentsReg
static constexpr Register kProbeDistanceReg
static constexpr Register kInstanceCidOrSignatureReg
static constexpr Register kCacheEntriesEndReg
static constexpr Register kInstanceDelayedFunctionTypeArgumentsReg
static constexpr Register kResultReg
static constexpr Register kSubtypeTestCacheResultReg