12#define SHOULD_NOT_INCLUDE_RUNTIME
16#if defined(TARGET_ARCH_ARM)
55 rt.Call(kEnsureRememberedAndMarkingDeferredRuntimeEntry, 2);
68void StubCodeCompiler::GenerateCallToRuntimeStub() {
90 __ CompareImmediate(
R8, VMTag::kDartTagId);
92 __ Stop(
"Not coming from Dart code.");
103 __ ReserveAlignedFrameSpace(0);
129 __ LoadImmediate(
R2, VMTag::kDartTagId);
133 __ LoadImmediate(
R2, 0);
141 if (FLAG_precompiled_mode) {
142 __ SetupGlobalPoolAndDispatchTable();
152 __ LoadImmediate(
R0, 0);
156void StubCodeCompiler::GenerateSharedStubGeneric(
157 bool save_fpu_registers,
158 intptr_t self_code_stub_offset_from_thread,
163 RegisterSet all_registers;
164 all_registers.AddAllNonReservedRegisters(save_fpu_registers);
168 READS_RETURN_ADDRESS_FROM_LR(
__ Push(
LR));
169 __ PushRegisters(all_registers);
170 __ ldr(
CODE_REG, Address(
THR, self_code_stub_offset_from_thread));
172 perform_runtime_call();
178 __ PopRegisters(all_registers);
180 READS_RETURN_ADDRESS_FROM_LR(
__ bx(
LR));
183void StubCodeCompiler::GenerateSharedStub(
184 bool save_fpu_registers,
185 const RuntimeEntry*
target,
186 intptr_t self_code_stub_offset_from_thread,
188 bool store_runtime_result_in_result_register) {
189 ASSERT(!store_runtime_result_in_result_register || allow_return);
190 auto perform_runtime_call = [&]() {
191 if (store_runtime_result_in_result_register) {
197 if (store_runtime_result_in_result_register) {
205 GenerateSharedStubGeneric(save_fpu_registers,
206 self_code_stub_offset_from_thread, allow_return,
207 perform_runtime_call);
210void StubCodeCompiler::GenerateEnterSafepointStub() {
211 RegisterSet all_registers;
212 all_registers.AddAllGeneralRegisters();
213 __ PushRegisters(all_registers);
215 SPILLS_LR_TO_FRAME(
__ EnterFrame((1 <<
FP) | (1 <<
LR), 0));
216 __ ReserveAlignedFrameSpace(0);
217 __ ldr(
R0, Address(
THR, kEnterSafepointRuntimeEntry.OffsetFromThread()));
219 RESTORES_LR_FROM_FRAME(
__ LeaveFrame((1 <<
FP) | (1 <<
LR), 0));
221 __ PopRegisters(all_registers);
225static void GenerateExitSafepointStubCommon(Assembler* assembler,
226 uword runtime_entry_offset) {
227 RegisterSet all_registers;
228 all_registers.AddAllGeneralRegisters();
229 __ PushRegisters(all_registers);
231 SPILLS_LR_TO_FRAME(
__ EnterFrame((1 <<
FP) | (1 <<
LR), 0));
232 __ ReserveAlignedFrameSpace(0);
240 __ ldr(
R0, Address(
THR, runtime_entry_offset));
242 RESTORES_LR_FROM_FRAME(
__ LeaveFrame((1 <<
FP) | (1 <<
LR), 0));
244 __ PopRegisters(all_registers);
248void StubCodeCompiler::GenerateExitSafepointStub() {
249 GenerateExitSafepointStubCommon(
250 assembler, kExitSafepointRuntimeEntry.OffsetFromThread());
253void StubCodeCompiler::GenerateExitSafepointIgnoreUnwindInProgressStub() {
254 GenerateExitSafepointStubCommon(
256 kExitSafepointIgnoreUnwindInProgressRuntimeEntry.OffsetFromThread());
268void StubCodeCompiler::GenerateCallNativeThroughSafepointStub() {
272 SPILLS_RETURN_ADDRESS_FROM_LR_TO_REGISTER(
__ mov(
R4, Operand(
LR)));
280 __ TransitionNativeToGenerated(
R9 ,
NOTFP,
289 compiler::Label skip_reloc;
291 InsertBSSRelocation(relocation);
300 __ ldr(
dst, compiler::Address(tmp));
304 __ add(tmp, tmp, compiler::Operand(
dst));
308 __ ldr(
dst, compiler::Address(tmp));
311void StubCodeCompiler::GenerateLoadFfiCallbackMetadataRuntimeFunction(
312 uword function_index,
318 const intptr_t code_size =
__ CodeSize();
329void StubCodeCompiler::GenerateFfiCallbackTrampolineStub() {
330#if defined(USING_SIMULATOR) && !defined(DART_PRECOMPILER)
348 FfiCallbackMetadata::kNativeCallbackTrampolineSize *
353 const intptr_t shared_stub_start =
__ CodeSize();
356 COMPILE_ASSERT(FfiCallbackMetadata::kNativeCallbackTrampolineStackDelta == 4);
358 __ PushList((1 <<
LR) | (1 <<
THR) | (1 <<
R4) | (1 <<
R5)));
367 RegisterSet argument_registers;
368 argument_registers.AddAllArgumentRegisters();
369 __ PushRegisters(argument_registers);
389 __ EnterFrame(1 <<
FP, 0);
390 __ ReserveAlignedFrameSpace(0);
392 GenerateLoadFfiCallbackMetadataRuntimeFunction(
398 __ LeaveFrame(1 <<
FP);
407 __ PopRegisters(argument_registers);
414 __ cmp(
THR, Operand(0));
433 __ EnterFullSafepoint(
R4,
R5);
449 __ EnterFrame(1 <<
FP, 0);
450 __ ReserveAlignedFrameSpace(0);
452 GenerateLoadFfiCallbackMetadataRuntimeFunction(
457 __ LeaveFrame(1 <<
FP);
463 __ PopList((1 <<
PC) | (1 <<
THR) | (1 <<
R4) | (1 <<
R5));
466 FfiCallbackMetadata::kNativeCallbackSharedStubSize);
477void StubCodeCompiler::GenerateDispatchTableNullErrorStub() {
481 __ CallRuntime(kDispatchTableNullErrorRuntimeEntry, 1);
486void StubCodeCompiler::GenerateRangeError(
bool with_fpu_regs) {
487 auto perform_runtime_call = [&]() {
489 __ PushRegistersInOrder(
491 __ CallRuntime(kRangeErrorRuntimeEntry, 2);
495 GenerateSharedStubGeneric(
500 false, perform_runtime_call);
503void StubCodeCompiler::GenerateWriteError(
bool with_fpu_regs) {
504 auto perform_runtime_call = [&]() {
505 __ CallRuntime(kWriteErrorRuntimeEntry, 2);
509 GenerateSharedStubGeneric(
514 false, perform_runtime_call);
523static void GenerateCallNativeWithWrapperStub(Assembler* assembler,
545 __ CompareImmediate(
R8, VMTag::kDartTagId);
547 __ Stop(
"Not coming from Dart code.");
591 __ LoadImmediate(
R2, VMTag::kDartTagId);
595 __ LoadImmediate(
R2, 0);
603 if (FLAG_precompiled_mode) {
604 __ SetupGlobalPoolAndDispatchTable();
611void StubCodeCompiler::GenerateCallNoScopeNativeStub() {
612 GenerateCallNativeWithWrapperStub(
618void StubCodeCompiler::GenerateCallAutoScopeNativeStub() {
619 GenerateCallNativeWithWrapperStub(
631void StubCodeCompiler::GenerateCallBootstrapNativeStub() {
632 GenerateCallNativeWithWrapperStub(
640void StubCodeCompiler::GenerateCallStaticFunctionStub() {
645 __ LoadImmediate(
R0, 0);
647 __ CallRuntime(kPatchStaticCallRuntimeEntry, 0);
660void StubCodeCompiler::GenerateFixCallersTargetStub() {
662 __ BranchOnMonomorphicCheckedEntryJIT(&monomorphic);
673 __ LoadImmediate(
R0, 0);
675 __ CallRuntime(kFixCallersTargetRuntimeEntry, 0);
693 __ LoadImmediate(
R1, 0);
697 __ CallRuntime(kFixCallersTargetMonomorphicRuntimeEntry, 2);
704 __ Branch(FieldAddress(
710void StubCodeCompiler::GenerateFixAllocationStubTargetStub() {
718 __ LoadImmediate(
R0, 0);
720 __ CallRuntime(kFixAllocationStubTargetRuntimeEntry, 0);
732void StubCodeCompiler::GenerateFixParameterizedAllocationStubTargetStub() {
742 __ LoadImmediate(
R0, 0);
744 __ CallRuntime(kFixAllocationStubTargetRuntimeEntry, 0);
759static void PushArrayOfArguments(Assembler* assembler) {
812static void GenerateDeoptimizationSequence(Assembler* assembler,
816 __ EnterDartFrame(0);
817 __ LoadPoolPointer();
821 const intptr_t saved_result_slot_from_fp =
824 const intptr_t saved_exception_slot_from_fp =
827 const intptr_t saved_stacktrace_slot_from_fp =
840 }
else if (
i ==
SP) {
859 LeafRuntimeScope rt(assembler,
864 __ mov(
R1, Operand(is_lazy ? 1 : 0));
865 rt.Call(kDeoptimizeCopyFrameRuntimeEntry, 2);
878 __ RestoreCodePointer();
893 LeafRuntimeScope rt(assembler,
896 rt.Call(kDeoptimizeFillFrameRuntimeEntry, 1);
910 __ RestoreCodePointer();
927 __ CallRuntime(kDeoptimizeMaterializeRuntimeEntry, 0);
953 __ CallRuntime(kReThrowRuntimeEntry, 3);
959void StubCodeCompiler::GenerateDeoptimizeLazyFromReturnStub() {
973void StubCodeCompiler::GenerateDeoptimizeLazyFromThrowStub() {
985void StubCodeCompiler::GenerateDeoptimizeStub() {
993static void GenerateNoSuchMethodDispatcherBody(Assembler* assembler) {
1006 __ LoadImmediate(
IP, 0);
1015 __ cmp(
R3, Operand(0));
1020 PushArrayOfArguments(assembler);
1021 const intptr_t kNumArgs = 4;
1022 __ CallRuntime(kNoSuchMethodFromCallStubRuntimeEntry, kNumArgs);
1025 __ LeaveStubFrame();
1029static void GenerateDispatcherCode(Assembler* assembler,
1030 Label* call_target_function) {
1031 __ Comment(
"NoSuchMethodDispatch");
1035 __ b(call_target_function,
NE);
1037 GenerateNoSuchMethodDispatcherBody(assembler);
1043void StubCodeCompiler::GenerateNoSuchMethodDispatcherStub() {
1044 GenerateNoSuchMethodDispatcherBody(
assembler);
1056void StubCodeCompiler::GenerateAllocateArrayStub() {
1057 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
1066 __ b(&slow_case,
NE);
1069 const intptr_t max_len =
1071 __ CompareImmediate(
R3, max_len);
1072 __ b(&slow_case,
HI);
1074 const intptr_t
cid = kArrayCid;
1077 const intptr_t fixed_size_plus_alignment_padding =
1080 __ LoadImmediate(
R9, fixed_size_plus_alignment_padding);
1091 __ b(&slow_case,
CS);
1099 __ b(&slow_case,
CS);
1118 __ mov(
R8, Operand(0),
HI);
1124 __ LoadImmediate(
TMP, tags);
1133 __ StoreIntoObjectNoBarrier(
1166 __ EnterStubFrame();
1167 __ LoadImmediate(
TMP, 0);
1172 __ CallRuntime(kAllocateArrayRuntimeEntry, 2);
1184 __ LeaveStubFrame();
1189void StubCodeCompiler::GenerateAllocateMintSharedWithFPURegsStub() {
1191 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
1201 GenerateSharedStub(
true, &kAllocateMintRuntimeEntry,
1208void StubCodeCompiler::GenerateAllocateMintSharedWithoutFPURegsStub() {
1210 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
1221 false, &kAllocateMintRuntimeEntry,
1234void StubCodeCompiler::GenerateInvokeDartCodeStub() {
1235 SPILLS_LR_TO_FRAME(
__ EnterFrame((1 <<
FP) | (1 <<
LR), 0));
1241 __ PushNativeCalleeSavedRegisters();
1248#if defined(USING_SHADOW_CALL_STACK)
1260 __ LoadImmediate(
R8, 0);
1265 __ LoadImmediate(
R8, 0);
1273#if defined(DART_TARGET_OS_MACOS) || defined(DART_TARGET_OS_MACOS_IOS)
1280 __ EmitEntryFrameVerification(
R9);
1284 __ LoadImmediate(
R9, VMTag::kDartTagId);
1294 __ cmp(
R3, Operand(0));
1303 Label push_arguments;
1304 Label done_push_arguments;
1305 __ CompareImmediate(
R9, 0);
1306 __ b(&done_push_arguments, EQ);
1307 __ LoadImmediate(
R1, 0);
1308 __ Bind(&push_arguments);
1312 __ AddImmediate(
R1, 1);
1314 __ b(&push_arguments, LT);
1315 __ Bind(&done_push_arguments);
1318 if (FLAG_precompiled_mode) {
1319 __ SetupGlobalPoolAndDispatchTable();
1322 __ LoadImmediate(
PP, 0);
1346#if defined(USING_SHADOW_CALL_STACK)
1350 __ PopNativeCalleeSavedRegisters();
1352 __ set_constant_pool_allowed(
false);
1355 RESTORES_LR_FROM_FRAME(
__ LeaveFrame((1 <<
FP) | (1 <<
LR)));
1367static void GenerateAllocateContext(Assembler* assembler, Label* slow_case) {
1370 const intptr_t fixed_size_plus_alignment_padding =
1373 __ LoadImmediate(
R2, fixed_size_plus_alignment_padding);
1391 __ b(slow_case,
CS);
1392 __ CheckAllocationCanary(
R0);
1413 __ mov(
R9, Operand(0),
HI);
1420 __ LoadImmediate(
IP, tags);
1439void StubCodeCompiler::GenerateAllocateContextStub() {
1440 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
1443 GenerateAllocateContext(
assembler, &slow_case);
1451 __ StoreIntoObjectNoBarrier(
1472 __ EnterStubFrame();
1474 __ LoadImmediate(
R2, 0);
1476 __ PushList((1 <<
R1) | (1 <<
R2));
1477 __ CallRuntime(kAllocateContextRuntimeEntry, 1);
1488 __ LeaveStubFrame();
1500void StubCodeCompiler::GenerateCloneContextStub() {
1501 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
1507 GenerateAllocateContext(
assembler, &slow_case);
1513 __ StoreIntoObjectNoBarrier(
1527 __ subs(
R1,
R1, Operand(1));
1547 __ EnterStubFrame();
1549 __ LoadImmediate(
R0, 0);
1550 __ PushRegisterPair(
R4,
R0);
1551 __ CallRuntime(kCloneContextRuntimeEntry, 1);
1554 __ PopRegisterPair(
R4,
R0);
1563 __ LeaveStubFrame();
1567void StubCodeCompiler::GenerateWriteBarrierWrappersStub() {
1572 intptr_t
start =
__ CodeSize();
1576 RESTORES_LR_FROM_FRAME(
1578 READS_RETURN_ADDRESS_FROM_LR(
__ bx(
LR));
1579 intptr_t
end =
__ CodeSize();
1595static void GenerateWriteBarrierStubHelper(Assembler* assembler,
bool cards) {
1607 Label retry, is_new,
done;
1608 __ PushList((1 <<
R2) | (1 <<
R3) | (1 <<
R4));
1617 __ cmp(
R4, Operand(1));
1623 auto mark_stack_push = [&](intptr_t
offset,
const RuntimeEntry& entry) {
1628 __ add(
R2,
R2, Operand(1));
1634 LeafRuntimeScope rt(assembler, 0,
1642 kOldMarkingStackBlockProcessRuntimeEntry);
1647 kNewMarkingStackBlockProcessRuntimeEntry);
1651 __ PopList((1 <<
R2) | (1 <<
R3) | (1 <<
R4));
1654 Label add_to_remembered_set, remember_card;
1666 __ Bind(&add_to_remembered_set);
1677 __ Stop(
"Wrong barrier");
1685 __ PushList((1 <<
R2) | (1 <<
R3) | (1 <<
R4));
1695 __ cmp(
R4, Operand(1));
1708 __ add(
R2,
R2, Operand(1));
1714 LeafRuntimeScope rt(assembler, 0,
1717 rt.Call(kStoreBufferBlockProcessRuntimeEntry, 1);
1721 __ PopList((1 <<
R2) | (1 <<
R3) | (1 <<
R4));
1725 Label remember_card_slow, retry;
1732 __ cmp(
TMP, Operand(0));
1733 __ b(&remember_card_slow, EQ);
1736 __ PushList((1 <<
R0) | (1 <<
R1) | (1 <<
R2));
1741 __ LoadImmediate(
R0, 1);
1752 __ cmp(
R2, Operand(1));
1754 __ PopList((1 <<
R0) | (1 <<
R1) | (1 <<
R2));
1758 __ Bind(&remember_card_slow);
1760 LeafRuntimeScope rt(assembler, 0,
1764 rt.Call(kRememberCardRuntimeEntry, 2);
1770void StubCodeCompiler::GenerateWriteBarrierStub() {
1771 GenerateWriteBarrierStubHelper(
assembler,
false);
1774void StubCodeCompiler::GenerateArrayWriteBarrierStub() {
1775 GenerateWriteBarrierStubHelper(
assembler,
true);
1778static void GenerateAllocateObjectHelper(Assembler* assembler,
1779 bool is_cls_parameterized) {
1785#if !defined(PRODUCT)
1790 __ MaybeTraceAllocation(kCidRegister, &slow_case,
1791 kTraceAllocationTempReg);
1802 __ ExtractInstanceSizeFromTags(kInstanceSizeReg, kTagsReg);
1810 Operand(kInstanceSizeReg));
1812 __ CompareRegisters(kEndReg, kNewTopReg);
1833 Label
done, init_loop;
1835 __ CompareRegisters(kFieldReg, kNewTopReg);
1848 if (is_cls_parameterized) {
1849 Label not_parameterized_case;
1854 __ ExtractClassIdFromTags(kClsIdReg, kTagsReg);
1857 __ LoadClassById(kTypeOffsetReg, kClsIdReg);
1860 FieldAddress(kTypeOffsetReg,
1862 host_type_arguments_field_offset_in_words_offset()));
1868 FieldAddress(kTypeOffsetReg, 0),
1871 __ Bind(¬_parameterized_case);
1883 if (!is_cls_parameterized) {
1896void StubCodeCompiler::GenerateAllocateObjectStub() {
1897 GenerateAllocateObjectHelper(
assembler,
false);
1900void StubCodeCompiler::GenerateAllocateObjectParameterizedStub() {
1901 GenerateAllocateObjectHelper(
assembler,
true);
1904void StubCodeCompiler::GenerateAllocateObjectSlowStub() {
1907 if (!FLAG_precompiled_mode) {
1914 __ EnterStubFrame();
1927 __ CallRuntime(kAllocateObjectRuntimeEntry, 2);
1936 __ LeaveDartFrameAndReturn();
1943 const Code& allocate_object,
1944 const Code& allocat_object_parametrized) {
1954 ASSERT(instance_size > 0);
1961 __ LoadImmediate(kTagsReg, tags);
1963 if (!FLAG_use_slow_path && FLAG_inline_alloc &&
1969 if (is_cls_parameterized) {
1971 CastHandle<Object>(allocat_object_parametrized))) {
1972 __ GenerateUnRelocatedPcRelativeTailCall();
1973 unresolved_calls->Add(
new UnresolvedPcRelativeCall(
1974 __ CodeSize(), allocat_object_parametrized,
true));
1979 allocate_object_parameterized_entry_point_offset()));
1983 __ GenerateUnRelocatedPcRelativeTailCall();
1984 unresolved_calls->Add(
new UnresolvedPcRelativeCall(
1985 __ CodeSize(), allocate_object,
true));
1993 if (!is_cls_parameterized) {
2009void StubCodeCompiler::GenerateCallClosureNoSuchMethodStub() {
2010 __ EnterStubFrame();
2024 __ LoadImmediate(
IP, 0);
2025 __ PushList((1 <<
R4) | (1 <<
R6) | (1 <<
R8) | (1 <<
IP));
2030 __ cmp(
R3, Operand(0));
2037 const intptr_t kNumArgs = 4;
2038 __ CallRuntime(kNoSuchMethodFromPrologueRuntimeEntry, kNumArgs);
2050 if (FLAG_precompiled_mode) {
2054 if (FLAG_trace_optimized_ic_calls) {
2055 __ EnterStubFrame();
2056 __ PushList((1 <<
R9) | (1 <<
R8));
2059 __ CallRuntime(kTraceICCallRuntimeEntry, 2);
2061 __ PopList((1 <<
R9) | (1 <<
R8));
2062 __ LeaveStubFrame();
2071 if (FLAG_precompiled_mode) {
2075 if (FLAG_optimization_counter_threshold >= 0) {
2078 __ Comment(
"Increment function counter");
2092static void EmitFastSmiOp(Assembler* assembler,
2095 Label* not_smi_or_overflow) {
2096 __ Comment(
"Fast Smi op");
2101 __ b(not_smi_or_overflow,
NE);
2105 __ b(not_smi_or_overflow,
VS);
2132 __ ldr(
R1, Address(
R8, 0));
2133 __ CompareImmediate(
R1, imm_smi_cid);
2136 __ CompareImmediate(
R1, imm_smi_cid);
2139 __ Stop(
"Incorrect IC data");
2142 if (FLAG_optimization_counter_threshold >= 0) {
2144 const intptr_t count_offset =
2146 __ LoadFromOffset(
R1,
R8, count_offset);
2148 __ StoreIntoSmiField(Address(
R8, count_offset),
R1);
2157static void GenerateRecordEntryPoint(Assembler* assembler) {
2161 __ BindUncheckedEntryPoint();
2182 const RuntimeEntry& handle_ic_miss,
2184 Optimized optimized,
2186 Exactness exactness) {
2187 if (FLAG_precompiled_mode) {
2192 const bool save_entry_point = kind == Token::kILLEGAL;
2193 if (save_entry_point) {
2203 __ CheckCodePointer();
2204 ASSERT(num_args == 1 || num_args == 2);
2213 __ CompareImmediate(
R8, num_args);
2215 __ Stop(
"Incorrect stub for IC data");
2220#if !defined(PRODUCT)
2221 Label stepping, done_stepping;
2223 __ Comment(
"Check single stepping");
2226 __ CompareImmediate(
R8, 0);
2227 __ b(&stepping,
NE);
2232 Label not_smi_or_overflow;
2233 if (kind != Token::kILLEGAL) {
2234 EmitFastSmiOp(
assembler, kind, num_args, ¬_smi_or_overflow);
2236 __ Bind(¬_smi_or_overflow);
2238 __ Comment(
"Extract ICData initial values and receiver cid");
2246 __ LoadTaggedClassIdMayBeSmi(
NOTFP,
R0);
2250 if (num_args == 2) {
2255 __ LoadTaggedClassIdMayBeSmi(
R1,
R1);
2271 __ LoadTaggedClassIdMayBeSmi(
NOTFP,
R0);
2273 if (num_args == 2) {
2276 __ LoadTaggedClassIdMayBeSmi(
R1,
R1);
2284 Label loop, found, miss;
2285 __ Comment(
"ICData loop");
2288 const bool optimize = kind == Token::kILLEGAL;
2291 for (
int unroll =
optimize ? 4 : 2; unroll >= 0; unroll--) {
2294 __ ldr(
R2, Address(
R8, kIcDataOffset));
2296 if (num_args == 2) {
2308 __ AddImmediate(
R8, entry_size);
2319 __ Comment(
"IC miss");
2329 __ EnterStubFrame();
2330 __ LoadImmediate(
R0, 0);
2334 if (save_entry_point) {
2340 for (intptr_t
i = 0;
i < num_args;
i++) {
2346 __ CallRuntime(handle_ic_miss, num_args + 1);
2348 __ Drop(num_args + 1);
2353 if (save_entry_point) {
2356 __ RestoreCodePointer();
2357 __ LeaveStubFrame();
2358 Label call_target_function;
2359 if (FLAG_precompiled_mode) {
2360 GenerateDispatcherCode(
assembler, &call_target_function);
2362 __ b(&call_target_function);
2367 const intptr_t target_offset =
2369 const intptr_t count_offset =
2371 const intptr_t exactness_offset =
2374 Label call_target_function_through_unchecked_entry;
2378 __ ldr(
R1, Address(
R8, kIcDataOffset + exactness_offset));
2379 __ CompareImmediate(
2382 __ BranchIf(
LESS, &exactness_ok);
2383 __ BranchIf(
EQUAL, &call_target_function_through_unchecked_entry);
2395 __ CompareObjectRegisters(
R2,
TMP);
2396 __ BranchIf(
EQUAL, &call_target_function_through_unchecked_entry);
2401 __ str(
R1, Address(
R8, kIcDataOffset + exactness_offset));
2406 if (FLAG_optimization_counter_threshold >= 0) {
2407 __ Comment(
"Update caller's counter");
2408 __ LoadFromOffset(
R1,
R8, kIcDataOffset + count_offset);
2410 __ StoreIntoSmiField(Address(
R8, kIcDataOffset + count_offset),
R1);
2413 __ Comment(
"Call target");
2414 __ Bind(&call_target_function);
2418 if (save_entry_point) {
2426 __ Bind(&call_target_function_through_unchecked_entry);
2427 if (FLAG_optimization_counter_threshold >= 0) {
2428 __ Comment(
"Update ICData counter");
2429 __ LoadFromOffset(
R1,
R8, kIcDataOffset + count_offset);
2431 __ StoreIntoSmiField(Address(
R8, kIcDataOffset + count_offset),
R1);
2433 __ Comment(
"Call target (via unchecked entry point)");
2441#if !defined(PRODUCT)
2444 __ EnterStubFrame();
2449 if (save_entry_point) {
2454 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
2456 if (save_entry_point) {
2462 __ RestoreCodePointer();
2463 __ LeaveStubFrame();
2464 __ b(&done_stepping);
2472void StubCodeCompiler::GenerateOneArgCheckInlineCacheStub() {
2474 1, kInlineCacheMissHandlerOneArgRuntimeEntry, Token::kILLEGAL,
2481void StubCodeCompiler::GenerateOneArgCheckInlineCacheWithExactnessCheckStub() {
2483 1, kInlineCacheMissHandlerOneArgRuntimeEntry, Token::kILLEGAL,
2490void StubCodeCompiler::GenerateTwoArgsCheckInlineCacheStub() {
2492 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kILLEGAL,
2499void StubCodeCompiler::GenerateSmiAddInlineCacheStub() {
2501 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kADD,
kUnoptimized,
2508void StubCodeCompiler::GenerateSmiLessInlineCacheStub() {
2510 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kLT,
kUnoptimized,
2517void StubCodeCompiler::GenerateSmiEqualInlineCacheStub() {
2519 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kEQ,
kUnoptimized,
2527void StubCodeCompiler::GenerateOneArgOptimizedCheckInlineCacheStub() {
2529 1, kInlineCacheMissHandlerOneArgRuntimeEntry, Token::kILLEGAL,
kOptimized,
2537void StubCodeCompiler::
2538 GenerateOneArgOptimizedCheckInlineCacheWithExactnessCheckStub() {
2540 1, kInlineCacheMissHandlerOneArgRuntimeEntry, Token::kILLEGAL,
kOptimized,
2548void StubCodeCompiler::GenerateTwoArgsOptimizedCheckInlineCacheStub() {
2550 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kILLEGAL,
2556void StubCodeCompiler::GenerateZeroArgsUnoptimizedStaticCallStub() {
2567 __ CompareImmediate(
R8, 0);
2569 __ Stop(
"Incorrect IC data for unoptimized static call");
2574#if !defined(PRODUCT)
2576 Label stepping, done_stepping;
2579 __ CompareImmediate(
R8, 0);
2580 __ b(&stepping,
NE);
2589 const intptr_t target_offset =
2591 const intptr_t count_offset =
2594 if (FLAG_optimization_counter_threshold >= 0) {
2596 __ LoadFromOffset(
R1,
R8, count_offset);
2598 __ StoreIntoSmiField(Address(
R8, count_offset),
R1);
2611#if !defined(PRODUCT)
2613 __ EnterStubFrame();
2615 __ PushList((1 <<
R9) | (1 <<
R3));
2616 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
2617 __ PopList((1 <<
R9) | (1 <<
R3));
2619 __ RestoreCodePointer();
2620 __ LeaveStubFrame();
2621 __ b(&done_stepping);
2627void StubCodeCompiler::GenerateOneArgUnoptimizedStaticCallStub() {
2636void StubCodeCompiler::GenerateTwoArgsUnoptimizedStaticCallStub() {
2639 2, kStaticCallMissHandlerTwoArgsRuntimeEntry, Token::kILLEGAL,
2646void StubCodeCompiler::GenerateLazyCompileStub() {
2647 __ EnterStubFrame();
2651 __ CallRuntime(kCompileFunctionRuntimeEntry, 1);
2653 __ LeaveStubFrame();
2660void StubCodeCompiler::GenerateICCallBreakpointStub() {
2662 __ Stop(
"No debugging in PRODUCT mode");
2664 __ EnterStubFrame();
2667 __ PushImmediate(0);
2668 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0);
2672 __ LeaveStubFrame();
2677void StubCodeCompiler::GenerateUnoptStaticCallBreakpointStub() {
2679 __ Stop(
"No debugging in PRODUCT mode");
2681 __ EnterStubFrame();
2683 __ PushImmediate(0);
2684 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0);
2687 __ LeaveStubFrame();
2692void StubCodeCompiler::GenerateRuntimeCallBreakpointStub() {
2694 __ Stop(
"No debugging in PRODUCT mode");
2696 __ EnterStubFrame();
2697 __ LoadImmediate(
R0, 0);
2699 __ PushList((1 <<
R0));
2700 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0);
2702 __ LeaveStubFrame();
2708void StubCodeCompiler::GenerateDebugStepCheckStub() {
2710 __ Stop(
"No debugging in PRODUCT mode");
2713 Label stepping, done_stepping;
2716 __ CompareImmediate(
R1, 0);
2717 __ b(&stepping,
NE);
2722 __ EnterStubFrame();
2723 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
2724 __ LeaveStubFrame();
2725 __ b(&done_stepping);
2741void StubCodeCompiler::GenerateSubtypeNTestCacheStub(Assembler* assembler,
2748 RegisterSet saved_registers;
2753 saved_registers.AddRegister(kCacheArrayReg);
2766 kInstanceInstantiatorTypeArgumentsReg =
PP;
2767 saved_registers.AddRegister(kInstanceInstantiatorTypeArgumentsReg);
2773 kInstanceParentFunctionTypeArgumentsReg =
2775 saved_registers.AddRegister(kInstanceParentFunctionTypeArgumentsReg);
2785 saved_registers.AddRegister(kInstanceDelayedFunctionTypeArgumentsReg);
2796 kCacheContentsSizeReg =
2798 saved_registers.AddRegister(kCacheContentsSizeReg);
2804 saved_registers.AddRegister(kProbeDistanceReg);
2813 __ PushRegisters(saved_registers);
2816 GenerateSubtypeTestCacheSearch(
2819 kInstanceInstantiatorTypeArgumentsReg,
2820 kInstanceParentFunctionTypeArgumentsReg,
2821 kInstanceDelayedFunctionTypeArgumentsReg, kCacheEntryEndReg,
2822 kCacheContentsSizeReg, kProbeDistanceReg,
2828 __ PopRegisters(saved_registers);
2833 __ PopRegisters(saved_registers);
2839void StubCodeCompiler::GenerateGetCStackPointerStub() {
2853void StubCodeCompiler::GenerateJumpToFrameStub() {
2864#if defined(USING_SHADOW_CALL_STACK)
2867 Label exit_through_non_ffi;
2875 __ LoadFromOffset(tmp1,
THR,
2878 __ cmp(tmp1, Operand(tmp2));
2879 __ b(&exit_through_non_ffi,
NE);
2880 __ TransitionNativeToGenerated(tmp1, tmp2,
2883 __ Bind(&exit_through_non_ffi);
2886 __ LoadImmediate(
R2, VMTag::kDartTagId);
2889 __ LoadImmediate(
R2, 0);
2892 __ RestoreCodePointer();
2893 if (FLAG_precompiled_mode) {
2894 __ SetupGlobalPoolAndDispatchTable();
2895 __ set_constant_pool_allowed(
true);
2897 __ LoadPoolPointer();
2907void StubCodeCompiler::GenerateRunExceptionHandlerStub() {
2908 WRITES_RETURN_ADDRESS_TO_LR(
2911 word offset_from_thread = 0;
2914 __ LoadFromOffset(
R2,
THR, offset_from_thread);
2924 READS_RETURN_ADDRESS_FROM_LR(
2931void StubCodeCompiler::GenerateDeoptForRewindStub() {
2937 WRITES_RETURN_ADDRESS_TO_LR(
2942 __ EnterStubFrame();
2943 __ CallRuntime(kRewindPostDeoptRuntimeEntry, 0);
2944 __ LeaveStubFrame();
2951void StubCodeCompiler::GenerateOptimizeFunctionStub() {
2953 __ EnterStubFrame();
2955 __ LoadImmediate(
IP, 0);
2958 __ CallRuntime(kOptimizeInvokedFunctionRuntimeEntry, 1);
2962 __ LeaveStubFrame();
2973static void GenerateIdenticalWithNumberCheckStub(Assembler* assembler,
2977 Label reference_compare,
done, check_mint;
2980 __ b(&reference_compare, EQ);
2982 __ b(&reference_compare, EQ);
2985 __ CompareClassId(left, kDoubleCid, temp);
2986 __ b(&check_mint,
NE);
2987 __ CompareClassId(right, kDoubleCid, temp);
2995 __ cmp(temp, Operand(
IP));
3001 __ cmp(temp, Operand(
IP));
3005 __ CompareClassId(left, kMintCid, temp);
3006 __ b(&reference_compare,
NE);
3007 __ CompareClassId(right, kMintCid, temp);
3009 __ ldr(temp, FieldAddress(
3011 __ ldr(
IP, FieldAddress(
3013 __ cmp(temp, Operand(
IP));
3015 __ ldr(temp, FieldAddress(
3017 __ ldr(
IP, FieldAddress(
3019 __ cmp(temp, Operand(
IP));
3022 __ Bind(&reference_compare);
3023 __ cmp(left, Operand(right));
3032void StubCodeCompiler::GenerateUnoptimizedIdenticalWithNumberCheckStub() {
3033#if !defined(PRODUCT)
3035 Label stepping, done_stepping;
3038 __ CompareImmediate(
R1, 0);
3039 __ b(&stepping,
NE);
3048 GenerateIdenticalWithNumberCheckStub(
assembler, left, right, temp);
3051#if !defined(PRODUCT)
3053 __ EnterStubFrame();
3054 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
3055 __ RestoreCodePointer();
3056 __ LeaveStubFrame();
3057 __ b(&done_stepping);
3066void StubCodeCompiler::GenerateOptimizedIdenticalWithNumberCheckStub() {
3072 GenerateIdenticalWithNumberCheckStub(
assembler, left, right, temp);
3083void StubCodeCompiler::GenerateMegamorphicCallStub() {
3084 __ LoadTaggedClassIdMayBeSmi(
R8,
R0);
3108 __ b(&probe_failed,
NE);
3117 if (!FLAG_precompiled_mode) {
3138 GenerateSwitchableCallMissStub();
3141void StubCodeCompiler::GenerateICCallThroughCodeStub() {
3142 Label loop, found, miss;
3148 __ LoadTaggedClassIdMayBeSmi(
R1,
R0);
3152 __ ldr(
R2, Address(
R8, 0));
3158 const intptr_t entry_length =
3161 __ AddImmediate(
R8, entry_length);
3165 if (FLAG_precompiled_mode) {
3166 const intptr_t entry_offset =
3168 __ LoadCompressed(
R0, Address(
R8, entry_offset));
3171 const intptr_t code_offset =
3188void StubCodeCompiler::GenerateMonomorphicSmiableCheckStub() {
3189 __ LoadClassIdMayBeSmi(
IP,
R0);
3207static void CallSwitchableCallMissRuntimeEntry(Assembler* assembler,
3209 __ LoadImmediate(
IP, 0);
3212 __ Push(receiver_reg);
3213 __ CallRuntime(kSwitchableCallMissRuntimeEntry, 2);
3221void StubCodeCompiler::GenerateSwitchableCallMissStub() {
3224 __ EnterStubFrame();
3225 CallSwitchableCallMissRuntimeEntry(
assembler,
R0);
3226 __ LeaveStubFrame();
3228 __ Branch(FieldAddress(
3237void StubCodeCompiler::GenerateSingleTargetCallStub() {
3239 __ LoadClassIdMayBeSmi(
R1,
R0);
3255 __ EnterStubFrame();
3256 CallSwitchableCallMissRuntimeEntry(
assembler,
R0);
3257 __ LeaveStubFrame();
3259 __ Branch(FieldAddress(
3263static int GetScaleFactor(intptr_t
size) {
3280void StubCodeCompiler::GenerateAllocateTypedDataArrayStub(intptr_t
cid) {
3283 const intptr_t scale_shift = GetScaleFactor(
element_size);
3288 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
3295 __ b(&call_runtime,
NE);
3299 __ CompareImmediate(
R2, max_len);
3300 __ b(&call_runtime,
HI);
3301 __ mov(
R2, Operand(
R2,
LSL, scale_shift));
3302 const intptr_t fixed_size_plus_alignment_padding =
3305 __ AddImmediate(
R2, fixed_size_plus_alignment_padding);
3311 __ b(&call_runtime,
CS);
3319 __ b(&call_runtime,
CS);
3320 __ CheckAllocationCanary(
R0);
3335 __ mov(
R3, Operand(0),
HI);
3340 __ LoadImmediate(
TMP, tags);
3350 __ StoreIntoObjectNoBarrier(
3359 __ LoadImmediate(
R8, 0);
3362 __ StoreInternalPointer(
3369 __ b(&init_loop,
CC);
3371 __ WriteAllocationCanary(
R1);
3378 __ EnterStubFrame();
3379 __ PushObject(Object::null_object());
3382 __ CallRuntime(kAllocateTypedDataRuntimeEntry, 2);
3385 __ LeaveStubFrame();
static void done(const char *config, const char *src, const char *srcOptions, const char *name)
static void async_callback(void *c, std::unique_ptr< const SkImage::AsyncReadResult > result)
static bool ok(int result)
static SkTileMode optimize(SkTileMode tm, int dimension)
#define ASSERT_LESS_OR_EQUAL(expected, actual)
#define RELEASE_ASSERT(cond)
virtual bool WillAllocateNewOrRemembered() const
static bool UseUnboxedRepresentation()
static intptr_t RawValue(intptr_t value)
static StaticTypeExactnessState HasExactSuperType()
static StaticTypeExactnessState NotExact()
static constexpr intptr_t kMaxInputs
void GenerateNArgsCheckInlineCacheStub(intptr_t num_args, const RuntimeEntry &handle_ic_miss, Token::Kind kind, Optimized optimized, CallType type, Exactness exactness)
void EnsureIsNewOrRemembered()
void GenerateUsageCounterIncrement(Register temp_reg)
void GenerateAllocationStubForClass(UnresolvedPcRelativeCalls *unresolved_calls, const Class &cls, const dart::Code &allocate_object, const dart::Code &allocat_object_parametrized)
void GenerateOptimizedUsageCounterIncrement()
static intptr_t WordOffsetFromFpToCpuRegister(Register cpu_register)
static word size_offset()
static word type_args_len_offset()
static word count_offset()
static word type_arguments_offset()
static word length_offset()
static const word kMaxNewSpaceElements
static word data_offset()
static word tags_offset()
static word header_size()
static word arguments_descriptor_offset()
static bool TraceAllocation(const dart::Class &klass)
static intptr_t NumTypeArguments(const dart::Class &klass)
static uword GetInstanceSize(const dart::Class &handle)
static const word kNoTypeArguments
static classid_t GetId(const dart::Class &handle)
static intptr_t TypeArgumentsFieldOffset(const dart::Class &klass)
static word function_offset()
static word entry_point_offset(CodeEntryKind kind=CodeEntryKind::kNormal)
static word variable_offset(intptr_t index)
static word num_variables_offset()
static word header_size()
static word parent_offset()
static word value_offset()
static word code_offset()
static word entry_point_offset(CodeEntryKind kind=CodeEntryKind::kNormal)
static word usage_counter_offset()
static bool IsAllocatableInNewSpace(intptr_t instance_size)
static word NumArgsTestedMask()
static word state_bits_offset()
static word entries_offset()
static word owner_offset()
static word ExactnessIndexFor(word num_args)
static word TestEntryLengthFor(word num_args, bool exactness_check)
static word NumArgsTestedShift()
static word receivers_static_type_offset()
static word CodeIndexFor(word num_args)
static word TargetIndexFor(word num_args)
static word CountIndexFor(word num_args)
static word EntryPointIndexFor(word num_args)
static word first_field_offset()
static word single_step_offset()
static word pointers_offset()
static word mask_offset()
static word buckets_offset()
static const word kSpreadFactor
static word value_offset()
static word entrypoint_offset()
static word expected_cid_offset()
static word argc_tag_offset()
static word retval_offset()
static word thread_offset()
static word argv_offset()
static word tags_offset()
static word card_table_offset()
static word original_top_offset()
static const word kBytesPerCardLog2
static word data_offset()
static word lower_limit_offset()
static word target_offset()
static word entry_point_offset()
static word upper_limit_offset()
static word pointers_offset()
static const word kTestResult
static word allocate_mint_without_fpu_regs_stub_offset()
static word resume_pc_offset()
static word allocate_object_slow_entry_point_offset()
static word auto_scope_native_wrapper_entry_point_offset()
static word lazy_deopt_from_throw_stub_offset()
static word active_exception_offset()
static word exit_through_ffi_offset()
static uword exit_through_runtime_call()
static uword exit_through_ffi()
static word new_marking_stack_block_offset()
static word optimize_stub_offset()
static word invoke_dart_code_stub_offset()
static word write_error_shared_without_fpu_regs_stub_offset()
static word no_scope_native_wrapper_entry_point_offset()
static word top_exit_frame_info_offset()
static word range_error_shared_without_fpu_regs_stub_offset()
static word range_error_shared_with_fpu_regs_stub_offset()
static word fix_allocation_stub_code_offset()
static word switchable_call_miss_stub_offset()
static word fix_callers_target_code_offset()
static word store_buffer_block_offset()
static word deoptimize_stub_offset()
static word write_barrier_entry_point_offset()
static word lazy_deopt_from_return_stub_offset()
static word vm_tag_offset()
static word allocate_object_entry_point_offset()
static word switchable_call_miss_entry_offset()
static uword vm_execution_state()
static word active_stacktrace_offset()
static word top_resource_offset()
static word allocate_mint_with_fpu_regs_stub_offset()
static word bootstrap_native_wrapper_entry_point_offset()
static word write_error_shared_with_fpu_regs_stub_offset()
static word write_barrier_mask_offset()
static word call_to_runtime_stub_offset()
static word execution_state_offset()
static word old_marking_stack_block_offset()
static word arguments_offset()
static word length_offset()
static const word kGenerationalBarrierMask
static const word kBarrierOverlapShift
static const word kOldAndNotRememberedBit
static const word kCardRememberedBit
static const word kNotMarkedBit
static const word kIncrementalBarrierMask
static const word kSizeTagMaxSizeTag
static const word kTagBitsSizeTagPos
const uint8_t uint32_t uint32_t GError ** error
Dart_NativeFunction function
SK_API bool Encode(SkWStream *dst, const SkPixmap &src, const Options &options)
uword MakeTagWordForNewSpaceObject(classid_t cid, uword instance_size)
static constexpr word kBitsPerWordLog2
bool CanLoadFromThread(const dart::Object &object, intptr_t *offset)
word ToRawSmi(const dart::Object &a)
static constexpr intptr_t kWordSize
static constexpr word kBitsPerWord
static constexpr intptr_t kCompressedWordSize
bool SizeFitsInSizeTag(uword instance_size)
word TypedDataMaxNewSpaceElements(classid_t cid)
word TypedDataElementSizeInBytes(classid_t cid)
const Bool & TrueObject()
GrowableArray< UnresolvedPcRelativeCall * > UnresolvedPcRelativeCalls
bool IsSameObject(const Object &a, const Object &b)
const Bool & FalseObject()
const Object & NullObject()
const Code & StubCodeAllocateArray()
const Class & MintClass()
const Register kWriteBarrierSlotReg
constexpr bool IsAbiPreservedRegister(Register reg)
const Register kExceptionObjectReg
const Register kWriteBarrierObjectReg
const Register kWriteBarrierValueReg
static constexpr bool IsCalleeSavedRegister(Register reg)
static constexpr uword kZapReturnAddress
constexpr intptr_t kWordSizeLog2
const Register ARGS_DESC_REG
const Register DISPATCH_TABLE_REG
static constexpr bool IsArgumentRegister(Register reg)
constexpr RegList kDartAvailableCpuRegs
const intptr_t kStoreBufferWrapperSize
const Register FUNCTION_REG
const Register IC_DATA_REG
static constexpr uword kZapCodeReg
const Register kStackTraceObjectReg
NOT_IN_PRODUCT(LibraryPtr ReloadTestScript(const char *script))
COMPILE_ASSERT(kUnreachableReference==WeakTable::kNoValue)
const int kFpuRegisterSize
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive keep the shell running after the Dart script has completed enable serial On low power devices with low core running concurrent GC tasks on threads can cause them to contend with the UI thread which could potentially lead to jank This option turns off all concurrent GC activities domain network JSON encoded network policy per domain This overrides the DisallowInsecureConnections switch Embedder can specify whether to allow or disallow insecure connections at a domain level old gen heap size
static bool Bind(PassBindingsCacheMTL &pass, ShaderStage stage, size_t bind_index, const BufferView &view)
static constexpr Register kResultReg
static constexpr Register kLengthReg
static constexpr Register kTypeArgumentsReg
static constexpr Register kResultReg
static constexpr Register kTempReg
static constexpr Register kTypeArgumentsReg
static constexpr Register kResultReg
static constexpr Register kTagsReg
static constexpr Register kLengthReg
static constexpr Register kResultReg
static constexpr Register kClassIdReg
intptr_t first_local_from_fp
static constexpr intptr_t kNewObjectBitPosition
static constexpr intptr_t kObjectAlignmentLog2
static constexpr intptr_t kObjectAlignment
static constexpr Register kLengthReg
static constexpr Register kIndexReg
static constexpr Register kInstanceCidOrSignatureReg
static constexpr Register kResultReg
static constexpr Register kSubtypeTestCacheReg
static constexpr Register kDstTypeReg
static constexpr Register kInstanceReg
static constexpr Register kSubtypeTestCacheResultReg