11#define SHOULD_NOT_INCLUDE_RUNTIME
15#if defined(TARGET_ARCH_ARM64)
53 rt.Call(kEnsureRememberedAndMarkingDeferredRuntimeEntry,
73static void WithExceptionCatchingTrampoline(Assembler* assembler,
75#if !defined(USING_SIMULATOR)
79 const intptr_t kJumpBufferSize =
sizeof(jmp_buf);
81 const RegisterSet volatile_registers(
90 if (FLAG_target_thread_sanitizer) {
102 __ AddImmediate(
SP, -kJumpBufferSize);
108 __ PushRegisters(volatile_registers);
118 __ PopRegisters(volatile_registers);
121 __ cbz(&do_native_call,
R0);
126 __ AddImmediate(
SP, kJumpBufferSize);
144 __ MoveRegister(kSavedRspReg,
SP);
150#if !defined(USING_SIMULATOR)
151 if (FLAG_target_thread_sanitizer) {
152 __ MoveRegister(
SP, kSavedRspReg);
153 __ AddImmediate(
SP, kJumpBufferSize);
154 const Register kTsanUtilsReg2 = kSavedRspReg;
170void StubCodeCompiler::GenerateCallToRuntimeStub() {
176 __ Comment(
"CallToRuntimeStub");
178 __ SetPrologueOffset();
194 __ CompareImmediate(
R8, VMTag::kDartTagId);
196 __ Stop(
"Not coming from Dart code.");
204 WithExceptionCatchingTrampoline(
assembler, [&]() {
207 __ Comment(
"align stack");
232 __ StoreToOffset(
R0,
SP, thread_offset);
233 __ StoreToOffset(
R1,
SP, argc_tag_offset);
234 __ StoreToOffset(
R2,
SP, argv_offset);
235 __ StoreToOffset(
R3,
SP, retval_offset);
245 __ Comment(
"CallToRuntimeStub return");
252 __ RestorePinnedRegisters();
256 __ LoadImmediate(
R2, VMTag::kDartTagId);
267 if (FLAG_precompiled_mode) {
268 __ SetupGlobalPoolAndDispatchTable();
279 __ LoadImmediate(
R0, 0);
283void StubCodeCompiler::GenerateSharedStubGeneric(
284 bool save_fpu_registers,
285 intptr_t self_code_stub_offset_from_thread,
290 RegisterSet all_registers;
291 all_registers.AddAllNonReservedRegisters(save_fpu_registers);
295 READS_RETURN_ADDRESS_FROM_LR(
__ Push(
LR));
296 __ PushRegisters(all_registers);
297 __ ldr(
CODE_REG, Address(
THR, self_code_stub_offset_from_thread));
299 perform_runtime_call();
305 __ PopRegisters(all_registers);
307 READS_RETURN_ADDRESS_FROM_LR(
__ ret(
LR));
310void StubCodeCompiler::GenerateSharedStub(
311 bool save_fpu_registers,
312 const RuntimeEntry*
target,
313 intptr_t self_code_stub_offset_from_thread,
315 bool store_runtime_result_in_result_register) {
316 ASSERT(!store_runtime_result_in_result_register || allow_return);
317 auto perform_runtime_call = [&]() {
318 if (store_runtime_result_in_result_register) {
322 if (store_runtime_result_in_result_register) {
330 GenerateSharedStubGeneric(save_fpu_registers,
331 self_code_stub_offset_from_thread, allow_return,
332 perform_runtime_call);
335void StubCodeCompiler::GenerateEnterSafepointStub() {
336 RegisterSet all_registers;
337 all_registers.AddAllGeneralRegisters();
340 __ PushRegisters(all_registers);
344 __ ReserveAlignedFrameSpace(0);
347 __ ldr(
R0, Address(
THR, kEnterSafepointRuntimeEntry.OffsetFromThread()));
353 __ PopRegisters(all_registers);
359static void GenerateExitSafepointStubCommon(Assembler* assembler,
360 uword runtime_entry_offset) {
361 RegisterSet all_registers;
362 all_registers.AddAllGeneralRegisters();
365 __ PushRegisters(all_registers);
369 __ ReserveAlignedFrameSpace(0);
378 __ ldr(
R0, Address(
THR, runtime_entry_offset));
384 __ PopRegisters(all_registers);
390void StubCodeCompiler::GenerateExitSafepointStub() {
391 GenerateExitSafepointStubCommon(
392 assembler, kExitSafepointRuntimeEntry.OffsetFromThread());
395void StubCodeCompiler::GenerateExitSafepointIgnoreUnwindInProgressStub() {
396 GenerateExitSafepointStubCommon(
398 kExitSafepointIgnoreUnwindInProgressRuntimeEntry.OffsetFromThread());
410void StubCodeCompiler::GenerateCallNativeThroughSafepointStub() {
413 SPILLS_RETURN_ADDRESS_FROM_LR_TO_REGISTER(
__ mov(
R19,
LR));
436 __ TransitionNativeToGenerated(
R10,
true);
443 compiler::Label skip_reloc;
445 InsertBSSRelocation(relocation);
451 __ ldr(
dst, compiler::Address(tmp));
455 __ add(tmp, tmp, compiler::Operand(
dst));
459 __ ldr(
dst, compiler::Address(tmp));
462void StubCodeCompiler::GenerateLoadFfiCallbackMetadataRuntimeFunction(
463 uword function_index,
469 const intptr_t code_size =
__ CodeSize();
470 __ adr(
dst, Immediate(-code_size));
480void StubCodeCompiler::GenerateFfiCallbackTrampolineStub() {
481#if defined(USING_SIMULATOR) && !defined(DART_PRECOMPILER)
493 __ adr(
R9, Immediate(0));
498 FfiCallbackMetadata::kNativeCallbackTrampolineSize *
503 const intptr_t shared_stub_start =
__ CodeSize();
507 COMPILE_ASSERT(FfiCallbackMetadata::kNativeCallbackTrampolineStackDelta == 2);
508 SPILLS_LR_TO_FRAME(
__ stp(
513 RegisterSet all_registers;
514 all_registers.AddAllArgumentRegisters();
526 __ PushRegisters(all_registers);
542 __ ReserveAlignedFrameSpace(0);
544#if defined(DART_TARGET_OS_FUCHSIA)
546 if (FLAG_precompiled_mode) {
556 GenerateLoadFfiCallbackMetadataRuntimeFunction(
574 __ PopRegisters(all_registers);
585 __ cmp(
THR, Operand(0));
603 __ EnterFullSafepoint(
R9);
620 __ ReserveAlignedFrameSpace(0);
622#if defined(DART_TARGET_OS_FUCHSIA)
624 if (FLAG_precompiled_mode) {
634 GenerateLoadFfiCallbackMetadataRuntimeFunction(
650 RESTORES_LR_FROM_FRAME(
__ ldp(
656 FfiCallbackMetadata::kNativeCallbackSharedStubSize);
667void StubCodeCompiler::GenerateDispatchTableNullErrorStub() {
671 __ CallRuntime(kDispatchTableNullErrorRuntimeEntry, 1);
676void StubCodeCompiler::GenerateRangeError(
bool with_fpu_regs) {
677 auto perform_runtime_call = [&]() {
684#if !defined(DART_COMPRESSED_POINTERS)
699 __ CallRuntime(kAllocateMintRuntimeEntry, 0);
718 __ PushRegistersInOrder(
720 __ CallRuntime(kRangeErrorRuntimeEntry, 2);
724 GenerateSharedStubGeneric(
729 false, perform_runtime_call);
732void StubCodeCompiler::GenerateWriteError(
bool with_fpu_regs) {
733 auto perform_runtime_call = [&]() {
734 __ CallRuntime(kWriteErrorRuntimeEntry, 2);
738 GenerateSharedStubGeneric(
743 false, perform_runtime_call);
752static void GenerateCallNativeWithWrapperStub(Assembler* assembler,
774 __ CompareImmediate(
R6, VMTag::kDartTagId);
776 __ Stop(
"Not coming from Dart code.");
784 WithExceptionCatchingTrampoline(assembler, [&]() {
812 __ StoreToOffset(
R0,
SP, thread_offset);
813 __ StoreToOffset(
R1,
SP, argc_tag_offset);
814 __ StoreToOffset(
R2,
SP, argv_offset);
815 __ StoreToOffset(
R3,
SP, retval_offset);
834 __ RestorePinnedRegisters();
837 __ LoadImmediate(
R2, VMTag::kDartTagId);
848 if (FLAG_precompiled_mode) {
849 __ SetupGlobalPoolAndDispatchTable();
857void StubCodeCompiler::GenerateCallNoScopeNativeStub() {
858 GenerateCallNativeWithWrapperStub(
864void StubCodeCompiler::GenerateCallAutoScopeNativeStub() {
865 GenerateCallNativeWithWrapperStub(
877void StubCodeCompiler::GenerateCallBootstrapNativeStub() {
878 GenerateCallNativeWithWrapperStub(
886void StubCodeCompiler::GenerateCallStaticFunctionStub() {
893 __ CallRuntime(kPatchStaticCallRuntimeEntry, 0);
907void StubCodeCompiler::GenerateFixCallersTargetStub() {
909 __ BranchOnMonomorphicCheckedEntryJIT(&monomorphic);
922 __ CallRuntime(kFixCallersTargetRuntimeEntry, 0);
944 __ CallRuntime(kFixCallersTargetMonomorphicRuntimeEntry, 2);
951 __ LoadFieldFromOffset(
959void StubCodeCompiler::GenerateFixAllocationStubTargetStub() {
968 __ CallRuntime(kFixAllocationStubTargetRuntimeEntry, 0);
980void StubCodeCompiler::GenerateFixParameterizedAllocationStubTargetStub() {
991 __ CallRuntime(kFixAllocationStubTargetRuntimeEntry, 0);
1006static void PushArrayOfArguments(Assembler* assembler) {
1023 Label loop, loop_exit;
1025 __ CompareRegisters(
R2,
ZR);
1026 __ b(&loop_exit,
LE);
1030 __ AddImmediate(
R2,
R2, -1);
1063static void GenerateDeoptimizationSequence(Assembler* assembler,
1067 __ EnterStubFrame();
1071 const intptr_t saved_result_slot_from_fp =
1074 const intptr_t saved_exception_slot_from_fp =
1077 const intptr_t saved_stacktrace_slot_from_fp =
1092 }
else if (r ==
R15) {
1098 }
else if (r ==
R31) {
1112 LeafRuntimeScope rt(assembler,
1117 __ LoadImmediate(
R1, is_lazy ? 1 : 0);
1118 rt.Call(kDeoptimizeCopyFrameRuntimeEntry, 2);
1128 __ LoadFromOffset(
R2,
FP,
1133 __ RestoreCodePointer();
1134 __ LeaveStubFrame();
1139 __ EnterStubFrame();
1149 LeafRuntimeScope rt(assembler,
1152 rt.Call(kDeoptimizeFillFrameRuntimeEntry, 1);
1168 __ RestoreCodePointer();
1169 __ LeaveStubFrame();
1175 __ EnterStubFrame();
1186 __ CallRuntime(kDeoptimizeMaterializeRuntimeEntry, 0);
1198 __ LeaveStubFrame();
1207 __ EnterStubFrame();
1212 __ CallRuntime(kReThrowRuntimeEntry, 3);
1213 __ LeaveStubFrame();
1218void StubCodeCompiler::GenerateDeoptimizeLazyFromReturnStub() {
1232void StubCodeCompiler::GenerateDeoptimizeLazyFromThrowStub() {
1244void StubCodeCompiler::GenerateDeoptimizeStub() {
1252static void GenerateNoSuchMethodDispatcherBody(Assembler* assembler) {
1253 __ EnterStubFrame();
1260 __ LoadCompressedSmiFieldFromOffset(
1271 __ LoadCompressedSmiFieldFromOffset(
1279 PushArrayOfArguments(assembler);
1280 const intptr_t kNumArgs = 4;
1281 __ CallRuntime(kNoSuchMethodFromCallStubRuntimeEntry, kNumArgs);
1284 __ LeaveStubFrame();
1288static void GenerateDispatcherCode(Assembler* assembler,
1289 Label* call_target_function) {
1290 __ Comment(
"NoSuchMethodDispatch");
1294 __ b(call_target_function,
NE);
1296 GenerateNoSuchMethodDispatcherBody(assembler);
1302void StubCodeCompiler::GenerateNoSuchMethodDispatcherStub() {
1303 GenerateNoSuchMethodDispatcherBody(
assembler);
1315void StubCodeCompiler::GenerateAllocateArrayStub() {
1316 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
1326 const intptr_t max_len =
1329 __ b(&slow_case,
HI);
1331 const intptr_t
cid = kArrayCid;
1340 intptr_t fixed_size_plus_alignment_padding =
1343 __ LoadImmediate(
R3, fixed_size_plus_alignment_padding);
1345#if defined(DART_COMPRESSED_POINTERS)
1356 __ b(&slow_case,
CS);
1366 __ b(&slow_case,
CS);
1393 __ LslImmediate(
TMP,
R3, shift);
1401 __ LoadImmediate(
TMP, tags);
1407 __ StoreCompressedIntoObjectOffsetNoBarrier(
1424#if defined(DART_COMPRESSED_POINTERS)
1426 __ andi(kWordOfNulls,
NULL_REG, Immediate(0xFFFFFFFF));
1427 __ orr(kWordOfNulls, kWordOfNulls, Operand(kWordOfNulls,
LSL, 32));
1434 __ stp(kWordOfNulls, kWordOfNulls,
1438 __ CompareRegisters(
R3,
R7);
1440 __ WriteAllocationCanary(
R7);
1453 __ EnterStubFrame();
1459 __ CallRuntime(kAllocateArrayRuntimeEntry, 2);
1471 __ LeaveStubFrame();
1475void StubCodeCompiler::GenerateAllocateMintSharedWithFPURegsStub() {
1477 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
1487 GenerateSharedStub(
true, &kAllocateMintRuntimeEntry,
1493void StubCodeCompiler::GenerateAllocateMintSharedWithoutFPURegsStub() {
1495 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
1506 false, &kAllocateMintRuntimeEntry,
1519void StubCodeCompiler::GenerateInvokeDartCodeStub() {
1520 __ Comment(
"InvokeDartCodeStub");
1527 __ SetupCSPFromThread(
R3);
1534#if defined(DART_TARGET_OS_FUCHSIA)
1536#elif defined(USING_SHADOW_CALL_STACK)
1540 __ PushNativeCalleeSavedRegisters();
1548 __ RestorePinnedRegisters();
1568#if defined(DART_TARGET_OS_FUCHSIA)
1576 __ EmitEntryFrameVerification();
1580 __ LoadImmediate(
R6, VMTag::kDartTagId);
1587 __ LoadCompressedSmiFieldFromOffset(
1589 __ LoadCompressedSmiFieldFromOffset(
1600 Label push_arguments;
1601 Label done_push_arguments;
1602 __ cmp(
R5, Operand(0));
1603 __ b(&done_push_arguments, EQ);
1604 __ LoadImmediate(
R1, 0);
1605 __ Bind(&push_arguments);
1606 __ LoadCompressed(
R3, Address(
R2));
1608 __ add(
R1,
R1, Operand(1));
1611 __ b(&push_arguments, LT);
1612 __ Bind(&done_push_arguments);
1614 if (FLAG_precompiled_mode) {
1615 __ SetupGlobalPoolAndDispatchTable();
1628 __ Comment(
"InvokeDartCodeStub return");
1648 __ PopNativeCalleeSavedRegisters();
1664static void GenerateAllocateContextSpaceStub(Assembler* assembler,
1668 intptr_t fixed_size_plus_alignment_padding =
1671 __ LoadImmediate(
R2, fixed_size_plus_alignment_padding);
1689 __ b(slow_case,
CS);
1690 __ CheckAllocationCanary(
R0);
1709 __ LslImmediate(
TMP,
R2, shift);
1718 __ LoadImmediate(
TMP, tags);
1736void StubCodeCompiler::GenerateAllocateContextStub() {
1737 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
1740 GenerateAllocateContextSpaceStub(
assembler, &slow_case);
1753#if defined(DART_COMPRESSED_POINTERS)
1755 __ andi(kWordOfNulls,
NULL_REG, Immediate(0xFFFFFFFF));
1756 __ orr(kWordOfNulls, kWordOfNulls, Operand(kWordOfNulls,
LSL, 32));
1763 __ stp(kWordOfNulls, kWordOfNulls,
1772 __ WriteAllocationCanary(
TMP2);
1783 __ EnterStubFrame();
1788 __ CallRuntime(kAllocateContextRuntimeEntry, 1);
1799 __ LeaveStubFrame();
1811void StubCodeCompiler::GenerateCloneContextStub() {
1812 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
1819 GenerateAllocateContextSpaceStub(
assembler, &slow_case);
1825 __ StoreCompressedIntoObjectNoBarrier(
1841 __ subs(
R1,
R1, Operand(1));
1860 __ EnterStubFrame();
1863 __ CallRuntime(kCloneContextRuntimeEntry, 1);
1875 __ LeaveStubFrame();
1879void StubCodeCompiler::GenerateWriteBarrierWrappersStub() {
1884 intptr_t
start =
__ CodeSize();
1885 SPILLS_LR_TO_FRAME(
__ Push(
LR));
1890 RESTORES_LR_FROM_FRAME(
__ Pop(
LR));
1891 READS_RETURN_ADDRESS_FROM_LR(
__ ret(
LR));
1892 intptr_t
end =
__ CodeSize();
1908static void GenerateWriteBarrierStubHelper(Assembler* assembler,
bool cards) {
1909 RegisterSet spill_set((1 <<
R2) | (1 <<
R3) | (1 <<
R4), 0);
1920 Label retry, is_new,
done;
1921 __ PushRegisters(spill_set);
1934 __ cbnz(&retry,
R4);
1939 auto mark_stack_push = [&](intptr_t
offset,
const RuntimeEntry& entry) {
1945 __ add(
R2,
R2, Operand(1));
1952 LeafRuntimeScope rt(assembler,
1961 kOldMarkingStackBlockProcessRuntimeEntry);
1966 kNewMarkingStackBlockProcessRuntimeEntry);
1970 __ PopRegisters(spill_set);
1973 Label add_to_remembered_set, remember_card;
1983 __ Bind(&add_to_remembered_set);
1992 __ Stop(
"Wrong barrier");
1999 __ PushRegisters(spill_set);
2003 __ LoadImmediate(
TMP,
2014 __ cbnz(&retry,
R4);
2028 __ add(
R2,
R2, Operand(1));
2035 LeafRuntimeScope rt(assembler,
2039 rt.Call(kStoreBufferBlockProcessRuntimeEntry, 1);
2043 __ PopRegisters(spill_set);
2047 Label remember_card_slow, retry;
2054 __ cbz(&remember_card_slow,
TMP2);
2059 __ LoadImmediate(
TMP, 1);
2067 __ PushRegister(
R0);
2072 __ cbnz(&retry,
R0);
2078 __ Bind(&remember_card_slow);
2080 LeafRuntimeScope rt(assembler,
2085 rt.Call(kRememberCardRuntimeEntry, 2);
2091void StubCodeCompiler::GenerateWriteBarrierStub() {
2092 GenerateWriteBarrierStubHelper(
assembler,
false);
2095void StubCodeCompiler::GenerateArrayWriteBarrierStub() {
2096 GenerateWriteBarrierStubHelper(
assembler,
true);
2099static void GenerateAllocateObjectHelper(Assembler* assembler,
2100 bool is_cls_parameterized) {
2106#if !defined(PRODUCT)
2111 __ MaybeTraceAllocation(kCidRegister, &slow_case,
2112 kTraceAllocationTempReg);
2123 __ ExtractInstanceSizeFromTags(kInstanceSizeReg, kTagsReg);
2131 Operand(kInstanceSizeReg));
2133 __ CompareRegisters(kEndReg, kNewTopReg);
2150#if defined(DART_COMPRESSED_POINTERS)
2152 __ andi(kWordOfNulls,
NULL_REG, Immediate(0xFFFFFFFF));
2153 __ orr(kWordOfNulls, kWordOfNulls, Operand(kWordOfNulls,
LSL, 32));
2160 __ stp(kWordOfNulls, kWordOfNulls,
2164 __ CompareRegisters(kFieldReg, kNewTopReg);
2166 __ WriteAllocationCanary(kNewTopReg);
2172 if (is_cls_parameterized) {
2173 Label not_parameterized_case;
2178 __ ExtractClassIdFromTags(kClsIdReg, kTagsReg);
2181 __ LoadClassById(kTypeOffsetReg, kClsIdReg);
2184 FieldAddress(kTypeOffsetReg,
2186 host_type_arguments_field_offset_in_words_offset()),
2192 __ StoreCompressedIntoObjectNoBarrier(
2196 __ Bind(¬_parameterized_case);
2205 if (!is_cls_parameterized) {
2216void StubCodeCompiler::GenerateAllocateObjectStub() {
2217 GenerateAllocateObjectHelper(
assembler,
false);
2220void StubCodeCompiler::GenerateAllocateObjectParameterizedStub() {
2221 GenerateAllocateObjectHelper(
assembler,
true);
2224void StubCodeCompiler::GenerateAllocateObjectSlowStub() {
2225 if (!FLAG_precompiled_mode) {
2235 __ EnterStubFrame();
2243 __ CallRuntime(kAllocateObjectRuntimeEntry, 2);
2252 __ LeaveStubFrame();
2261 const Code& allocate_object,
2262 const Code& allocat_object_parametrized) {
2272 ASSERT(instance_size > 0);
2280 __ LoadImmediate(kTagsReg, tags);
2282 if (!FLAG_use_slow_path && FLAG_inline_alloc &&
2288 if (is_cls_parameterized) {
2290 CastHandle<Object>(allocat_object_parametrized))) {
2291 __ GenerateUnRelocatedPcRelativeTailCall();
2292 unresolved_calls->Add(
new UnresolvedPcRelativeCall(
2293 __ CodeSize(), allocat_object_parametrized,
true));
2298 allocate_object_parameterized_entry_point_offset()));
2303 __ GenerateUnRelocatedPcRelativeTailCall();
2304 unresolved_calls->Add(
new UnresolvedPcRelativeCall(
2305 __ CodeSize(), allocate_object,
true));
2314 if (!is_cls_parameterized) {
2331void StubCodeCompiler::GenerateCallClosureNoSuchMethodStub() {
2332 __ EnterStubFrame();
2335 __ LoadCompressedSmiFieldFromOffset(
2350 __ LoadCompressedSmiFieldFromOffset(
2360 const intptr_t kNumArgs = 4;
2361 __ CallRuntime(kNoSuchMethodFromPrologueRuntimeEntry, kNumArgs);
2373 if (FLAG_precompiled_mode) {
2377 if (FLAG_trace_optimized_ic_calls) {
2378 __ EnterStubFrame();
2383 __ CallRuntime(kTraceICCallRuntimeEntry, 2);
2387 __ LeaveStubFrame();
2391 __ add(
R7,
R7, Operand(1));
2398 if (FLAG_precompiled_mode) {
2402 if (FLAG_optimization_counter_threshold >= 0) {
2405 __ Comment(
"Increment function counter");
2408 __ LoadFieldFromOffset(
2410 __ AddImmediate(
R7, 1);
2411 __ StoreFieldToOffset(
R7, func_reg,
2420static void EmitFastSmiOp(Assembler* assembler,
2423 Label* not_smi_or_overflow) {
2424 __ Comment(
"Fast Smi op");
2428 __ BranchIfNotSmi(
TMP, not_smi_or_overflow);
2432 __ b(not_smi_or_overflow,
VS);
2436 __ CompareObjectRegisters(
R0,
R1);
2443 __ CompareObjectRegisters(
R0,
R1);
2462 __ LoadCompressedSmiFromOffset(
R1,
R6, 0);
2469 __ Stop(
"Incorrect IC data");
2472 if (FLAG_optimization_counter_threshold >= 0) {
2473 const intptr_t count_offset =
2476 __ LoadCompressedSmiFromOffset(
R1,
R6, count_offset);
2488static void GenerateRecordEntryPoint(Assembler* assembler) {
2492 __ BindUncheckedEntryPoint();
2512 const RuntimeEntry& handle_ic_miss,
2514 Optimized optimized,
2516 Exactness exactness) {
2517 const bool save_entry_point = kind == Token::kILLEGAL;
2518 if (FLAG_precompiled_mode) {
2523 if (save_entry_point) {
2533 ASSERT(num_args == 1 || num_args == 2);
2539 __ LoadFromOffset(
R6,
R5,
2544 __ CompareImmediate(
R6, num_args);
2546 __ Stop(
"Incorrect stub for IC data");
2551#if !defined(PRODUCT)
2552 Label stepping, done_stepping;
2554 __ Comment(
"Check single stepping");
2558 __ CompareRegisters(
R6,
ZR);
2559 __ b(&stepping,
NE);
2564 Label not_smi_or_overflow;
2565 if (kind != Token::kILLEGAL) {
2566 EmitFastSmiOp(
assembler, kind, num_args, ¬_smi_or_overflow);
2568 __ Bind(¬_smi_or_overflow);
2570 __ Comment(
"Extract ICData initial values and receiver cid");
2578 __ LoadTaggedClassIdMayBeSmi(
R3,
R0);
2581 if (num_args == 2) {
2582 __ LoadCompressedSmiFieldFromOffset(
2585 __ sub(
R7,
R7, Operand(2));
2588 __ LoadTaggedClassIdMayBeSmi(
R1,
R1);
2595 __ LoadCompressedSmiFieldFromOffset(
2598 __ sub(
R7,
R7, Operand(1));
2601 __ LoadTaggedClassIdMayBeSmi(
R3,
R0);
2602 if (num_args == 2) {
2603 __ AddImmediate(
R1,
R7, -1);
2606 __ LoadTaggedClassIdMayBeSmi(
R1,
R1);
2614 const bool optimize = kind == Token::kILLEGAL;
2617 Label loop, found, miss;
2618 __ Comment(
"ICData loop");
2621 for (
int unroll =
optimize ? 4 : 2; unroll >= 0; unroll--) {
2624 __ LoadCompressedSmiFromOffset(
R2,
R6, 0);
2625 __ CompareObjectRegisters(
R3,
R2);
2626 if (num_args == 2) {
2629 __ CompareObjectRegisters(
R1,
R2);
2638 __ AddImmediate(
R6, entry_size);
2649 __ Comment(
"IC miss");
2652 __ LoadCompressedSmiFieldFromOffset(
2655 __ sub(
R7,
R7, Operand(1));
2662 __ EnterStubFrame();
2667 if (save_entry_point) {
2674 for (intptr_t
i = 0;
i < num_args;
i++) {
2680 __ CallRuntime(handle_ic_miss, num_args + 1);
2682 __ Drop(num_args + 1);
2686 if (save_entry_point) {
2692 __ RestoreCodePointer();
2693 __ LeaveStubFrame();
2694 Label call_target_function;
2695 if (FLAG_precompiled_mode) {
2696 GenerateDispatcherCode(
assembler, &call_target_function);
2698 __ b(&call_target_function);
2703 const intptr_t target_offset =
2705 const intptr_t count_offset =
2707 const intptr_t exactness_offset =
2710 Label call_target_function_through_unchecked_entry;
2714 __ LoadCompressedSmi(
R1, Address(
R6, exactness_offset));
2715 __ CompareImmediate(
2720 __ BranchIf(
LESS, &exactness_ok);
2721 __ BranchIf(
EQUAL, &call_target_function_through_unchecked_entry);
2732#if defined(DART_COMPRESSED_POINTERS)
2737 __ CompareObjectRegisters(
R2,
R3);
2738 __ BranchIf(
EQUAL, &call_target_function_through_unchecked_entry);
2748 if (FLAG_optimization_counter_threshold >= 0) {
2749 __ Comment(
"Update caller's counter");
2750 __ LoadCompressedSmiFromOffset(
R1,
R6, count_offset);
2756 __ Comment(
"Call target");
2757 __ Bind(&call_target_function);
2761 if (save_entry_point) {
2763 __ ldr(
R2, Address(
R2, 0));
2771 __ Bind(&call_target_function_through_unchecked_entry);
2772 if (FLAG_optimization_counter_threshold >= 0) {
2773 __ Comment(
"Update ICData counter");
2774 __ LoadCompressedSmiFromOffset(
R1,
R6, count_offset);
2779 __ Comment(
"Call target (via unchecked entry point)");
2783 __ LoadFieldFromOffset(
2789#if !defined(PRODUCT)
2792 __ EnterStubFrame();
2796 if (save_entry_point) {
2801 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
2803 if (save_entry_point) {
2810 __ RestoreCodePointer();
2811 __ LeaveStubFrame();
2812 __ b(&done_stepping);
2820void StubCodeCompiler::GenerateOneArgCheckInlineCacheStub() {
2822 1, kInlineCacheMissHandlerOneArgRuntimeEntry, Token::kILLEGAL,
2829void StubCodeCompiler::GenerateOneArgCheckInlineCacheWithExactnessCheckStub() {
2831 1, kInlineCacheMissHandlerOneArgRuntimeEntry, Token::kILLEGAL,
2838void StubCodeCompiler::GenerateTwoArgsCheckInlineCacheStub() {
2840 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kILLEGAL,
2847void StubCodeCompiler::GenerateSmiAddInlineCacheStub() {
2849 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kADD,
kUnoptimized,
2856void StubCodeCompiler::GenerateSmiLessInlineCacheStub() {
2858 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kLT,
kUnoptimized,
2865void StubCodeCompiler::GenerateSmiEqualInlineCacheStub() {
2867 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kEQ,
kUnoptimized,
2875void StubCodeCompiler::GenerateOneArgOptimizedCheckInlineCacheStub() {
2877 1, kInlineCacheMissHandlerOneArgRuntimeEntry, Token::kILLEGAL,
kOptimized,
2885void StubCodeCompiler::
2886 GenerateOneArgOptimizedCheckInlineCacheWithExactnessCheckStub() {
2888 1, kInlineCacheMissHandlerOneArgRuntimeEntry, Token::kILLEGAL,
kOptimized,
2896void StubCodeCompiler::GenerateTwoArgsOptimizedCheckInlineCacheStub() {
2898 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kILLEGAL,
2904void StubCodeCompiler::GenerateZeroArgsUnoptimizedStaticCallStub() {
2912 __ LoadFromOffset(
R6,
R5,
2917 __ CompareImmediate(
R6, 0);
2919 __ Stop(
"Incorrect IC data for unoptimized static call");
2925#if !defined(PRODUCT)
2926 Label stepping, done_stepping;
2930 __ CompareImmediate(
R6, 0);
2931 __ b(&stepping,
NE);
2940 const intptr_t target_offset =
2942 const intptr_t count_offset =
2945 if (FLAG_optimization_counter_threshold >= 0) {
2947 __ LoadCompressedSmiFromOffset(
R1,
R6, count_offset);
2961 __ ldr(
R2, Address(
R2, 0));
2964#if !defined(PRODUCT)
2966 __ EnterStubFrame();
2970 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
2974 __ RestoreCodePointer();
2975 __ LeaveStubFrame();
2976 __ b(&done_stepping);
2982void StubCodeCompiler::GenerateOneArgUnoptimizedStaticCallStub() {
2991void StubCodeCompiler::GenerateTwoArgsUnoptimizedStaticCallStub() {
2994 2, kStaticCallMissHandlerTwoArgsRuntimeEntry, Token::kILLEGAL,
3001void StubCodeCompiler::GenerateLazyCompileStub() {
3003 __ EnterStubFrame();
3006 __ CallRuntime(kCompileFunctionRuntimeEntry, 1);
3009 __ LeaveStubFrame();
3019void StubCodeCompiler::GenerateICCallBreakpointStub() {
3021 __ Stop(
"No debugging in PRODUCT mode");
3023 __ EnterStubFrame();
3027 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0);
3031 __ LeaveStubFrame();
3037void StubCodeCompiler::GenerateUnoptStaticCallBreakpointStub() {
3039 __ Stop(
"No debugging in PRODUCT mode");
3041 __ EnterStubFrame();
3044 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0);
3047 __ LeaveStubFrame();
3053void StubCodeCompiler::GenerateRuntimeCallBreakpointStub() {
3055 __ Stop(
"No debugging in PRODUCT mode");
3057 __ EnterStubFrame();
3059 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0);
3061 __ LeaveStubFrame();
3068void StubCodeCompiler::GenerateDebugStepCheckStub() {
3070 __ Stop(
"No debugging in PRODUCT mode");
3073 Label stepping, done_stepping;
3077 __ CompareImmediate(
R1, 0);
3078 __ b(&stepping,
NE);
3083 __ EnterStubFrame();
3084 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
3085 __ LeaveStubFrame();
3086 __ b(&done_stepping);
3102void StubCodeCompiler::GenerateSubtypeNTestCacheStub(Assembler* assembler,
3124 GenerateSubtypeTestCacheSearch(
3146void StubCodeCompiler::GenerateGetCStackPointerStub() {
3160void StubCodeCompiler::GenerateJumpToFrameStub() {
3163 __ set_lr_state(compiler::LRState::Clobbered());
3168 __ SetupCSPFromThread(
THR);
3169#if defined(DART_TARGET_OS_FUCHSIA)
3175#elif defined(USING_SHADOW_CALL_STACK)
3178 Label exit_through_non_ffi;
3186 __ LoadFromOffset(tmp1,
THR,
3189 __ cmp(tmp1, Operand(tmp2));
3190 __ b(&exit_through_non_ffi,
NE);
3191 __ TransitionNativeToGenerated(tmp1,
true,
3193 __ Bind(&exit_through_non_ffi);
3196 __ RestorePinnedRegisters();
3198 __ LoadImmediate(
R2, VMTag::kDartTagId);
3203 __ RestoreCodePointer();
3204 if (FLAG_precompiled_mode) {
3205 __ SetupGlobalPoolAndDispatchTable();
3207 __ LoadPoolPointer();
3217void StubCodeCompiler::GenerateRunExceptionHandlerStub() {
3218 WRITES_RETURN_ADDRESS_TO_LR(
3221 word offset_from_thread = 0;
3224 __ LoadFromOffset(
R2,
THR, offset_from_thread);
3240void StubCodeCompiler::GenerateDeoptForRewindStub() {
3246 WRITES_RETURN_ADDRESS_TO_LR(
3251 __ EnterStubFrame();
3252 __ CallRuntime(kRewindPostDeoptRuntimeEntry, 0);
3253 __ LeaveStubFrame();
3260void StubCodeCompiler::GenerateOptimizeFunctionStub() {
3262 __ EnterStubFrame();
3267 __ CallRuntime(kOptimizeInvokedFunctionRuntimeEntry, 1);
3275 __ LeaveStubFrame();
3283static void GenerateIdenticalWithNumberCheckStub(Assembler* assembler,
3286 Label reference_compare, check_mint;
3289 __ BranchIfSmi(left, &reference_compare);
3290 __ BranchIfSmi(right, &reference_compare);
3293 __ CompareClassId(left, kDoubleCid);
3294 __ b(&check_mint,
NE);
3295 __ CompareClassId(right, kDoubleCid);
3296 __ b(&reference_compare,
NE);
3301 __ CompareRegisters(left, right);
3305 __ CompareClassId(left, kMintCid);
3306 __ b(&reference_compare,
NE);
3307 __ CompareClassId(right, kMintCid);
3308 __ b(&reference_compare,
NE);
3311 __ CompareRegisters(left, right);
3314 __ Bind(&reference_compare);
3315 __ CompareObjectRegisters(left, right);
3328void StubCodeCompiler::GenerateUnoptimizedIdenticalWithNumberCheckStub() {
3329#if !defined(PRODUCT)
3331 Label stepping, done_stepping;
3335 __ CompareImmediate(
R1, 0);
3336 __ b(&stepping,
NE);
3344 GenerateIdenticalWithNumberCheckStub(
assembler, left, right);
3346#if !defined(PRODUCT)
3348 __ EnterStubFrame();
3349 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
3350 __ RestoreCodePointer();
3351 __ LeaveStubFrame();
3352 __ b(&done_stepping);
3361void StubCodeCompiler::GenerateOptimizedIdenticalWithNumberCheckStub() {
3366 GenerateIdenticalWithNumberCheckStub(
assembler, left, right);
3376void StubCodeCompiler::GenerateMegamorphicCallStub() {
3379 __ BranchIfSmi(
R0, &smi_case);
3400 __ LslImmediate(
R3,
R8, 3);
3410 __ LoadCompressedSmiFieldFromOffset(
R6,
TMP,
base);
3412 __ CompareObjectRegisters(
R6,
R8);
3413 __ b(&probe_failed,
NE);
3428 if (!FLAG_precompiled_mode) {
3447 __ LoadImmediate(
R8, kSmiCid);
3451 GenerateSwitchableCallMissStub();
3457void StubCodeCompiler::GenerateICCallThroughCodeStub() {
3458 Label loop, found, miss;
3465 __ LoadTaggedClassIdMayBeSmi(
R1,
R0);
3469 __ LoadCompressedSmi(
R2, Address(
R8, 0));
3475 const intptr_t entry_length =
3478 __ AddImmediate(
R8, entry_length);
3482 if (FLAG_precompiled_mode) {
3483 const intptr_t entry_offset =
3485 __ LoadCompressed(
R1, Address(
R8, entry_offset));
3488 const intptr_t code_offset =
3507void StubCodeCompiler::GenerateMonomorphicSmiableCheckStub() {
3509 __ LoadClassIdMayBeSmi(
IP0,
R0);
3530void StubCodeCompiler::GenerateSwitchableCallMissStub() {
3533 __ EnterStubFrame();
3539 __ CallRuntime(kSwitchableCallMissRuntimeEntry, 2);
3545 __ LeaveStubFrame();
3557void StubCodeCompiler::GenerateSingleTargetCallStub() {
3559 __ LoadClassIdMayBeSmi(
R1,
R0);
3576 __ EnterStubFrame();
3582 __ CallRuntime(kSwitchableCallMissRuntimeEntry, 2);
3588 __ LeaveStubFrame();
3595static int GetScaleFactor(intptr_t
size) {
3612void StubCodeCompiler::GenerateAllocateTypedDataArrayStub(intptr_t
cid) {
3615 const intptr_t scale_shift = GetScaleFactor(
element_size);
3620 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
3626 __ BranchIfNotSmi(
R2, &call_runtime);
3631 __ b(&call_runtime,
HI);
3632 __ LslImmediate(
R2,
R2, scale_shift);
3633 const intptr_t fixed_size_plus_alignment_padding =
3636 __ AddImmediate(
R2, fixed_size_plus_alignment_padding);
3643 __ b(&call_runtime,
CS);
3651 __ b(&call_runtime,
CS);
3652 __ CheckAllocationCanary(
R0);
3672 __ LoadImmediate(
TMP, tags);
3680 __ StoreCompressedIntoObjectNoBarrier(
3688 __ StoreInternalPointer(
3696 __ WriteAllocationCanary(
R1);
3703 __ EnterStubFrame();
3707 __ CallRuntime(kAllocateTypedDataRuntimeEntry, 2);
3710 __ LeaveStubFrame();
static void done(const char *config, const char *src, const char *srcOptions, const char *name)
static void async_callback(void *c, std::unique_ptr< const SkImage::AsyncReadResult > result)
static bool ok(int result)
static SkTileMode optimize(SkTileMode tm, int dimension)
#define ASSERT_LESS_OR_EQUAL(expected, actual)
#define ASSERT_EQUAL(expected, actual)
#define RELEASE_ASSERT(cond)
virtual bool WillAllocateNewOrRemembered() const
@ DRT_GetFfiCallbackMetadata
@ DRT_ExitTemporaryIsolate
static constexpr Register kPointerToReturnStructRegisterCall
static bool UseUnboxedRepresentation()
static Location RegisterLocation(Register reg)
static intptr_t ActivationFrameAlignment()
static StaticTypeExactnessState HasExactSuperType()
static StaticTypeExactnessState NotExact()
static constexpr intptr_t kMaxInputs
static bool atomic_memory_supported()
static Address PC(int32_t pc_off)
void GenerateNArgsCheckInlineCacheStub(intptr_t num_args, const RuntimeEntry &handle_ic_miss, Token::Kind kind, Optimized optimized, CallType type, Exactness exactness)
void EnsureIsNewOrRemembered()
void GenerateUsageCounterIncrement(Register temp_reg)
void GenerateAllocationStubForClass(UnresolvedPcRelativeCalls *unresolved_calls, const Class &cls, const dart::Code &allocate_object, const dart::Code &allocat_object_parametrized)
void GenerateOptimizedUsageCounterIncrement()
static intptr_t WordOffsetFromFpToCpuRegister(Register cpu_register)
static word size_offset()
static word type_args_len_offset()
static word count_offset()
static word type_arguments_offset()
static word length_offset()
static const word kMaxNewSpaceElements
static word data_offset()
static word tags_offset()
static word header_size()
static word arguments_descriptor_offset()
static bool TraceAllocation(const dart::Class &klass)
static intptr_t NumTypeArguments(const dart::Class &klass)
static uword GetInstanceSize(const dart::Class &handle)
static const word kNoTypeArguments
static classid_t GetId(const dart::Class &handle)
static intptr_t TypeArgumentsFieldOffset(const dart::Class &klass)
static word function_offset()
static word entry_point_offset(CodeEntryKind kind=CodeEntryKind::kNormal)
static word variable_offset(intptr_t index)
static word num_variables_offset()
static word header_size()
static word parent_offset()
static word value_offset()
static word code_offset()
static word entry_point_offset(CodeEntryKind kind=CodeEntryKind::kNormal)
static word usage_counter_offset()
static bool IsAllocatableInNewSpace(intptr_t instance_size)
static word NumArgsTestedMask()
static word state_bits_offset()
static word entries_offset()
static word owner_offset()
static word ExactnessIndexFor(word num_args)
static word TestEntryLengthFor(word num_args, bool exactness_check)
static word NumArgsTestedShift()
static word receivers_static_type_offset()
static word CodeIndexFor(word num_args)
static word TargetIndexFor(word num_args)
static word CountIndexFor(word num_args)
static word EntryPointIndexFor(word num_args)
static word first_field_offset()
static word single_step_offset()
static word pointers_offset()
static word mask_offset()
static word buckets_offset()
static const word kSpreadFactor
static word value_offset()
static word entrypoint_offset()
static word expected_cid_offset()
static word argc_tag_offset()
static word retval_offset()
static word thread_offset()
static word argv_offset()
static word tags_offset()
static word card_table_offset()
static word original_top_offset()
static const word kBytesPerCardLog2
static word data_offset()
static word lower_limit_offset()
static word target_offset()
static word entry_point_offset()
static word upper_limit_offset()
static word pointers_offset()
static const word kTestResult
static word allocate_mint_without_fpu_regs_stub_offset()
static word resume_pc_offset()
static word allocate_object_slow_entry_point_offset()
static word auto_scope_native_wrapper_entry_point_offset()
static word lazy_deopt_from_throw_stub_offset()
static word active_exception_offset()
static word exit_through_ffi_offset()
static uword exit_through_runtime_call()
static word jump_to_frame_entry_point_offset()
static uword exit_through_ffi()
static word tsan_utils_offset()
static word new_marking_stack_block_offset()
static word optimize_stub_offset()
static word invoke_dart_code_stub_offset()
static word saved_shadow_call_stack_offset()
static word write_error_shared_without_fpu_regs_stub_offset()
static word no_scope_native_wrapper_entry_point_offset()
static word top_exit_frame_info_offset()
static word range_error_shared_without_fpu_regs_stub_offset()
static word range_error_shared_with_fpu_regs_stub_offset()
static word fix_allocation_stub_code_offset()
static word switchable_call_miss_stub_offset()
static word fix_callers_target_code_offset()
static word store_buffer_block_offset()
static word deoptimize_stub_offset()
static word write_barrier_entry_point_offset()
static word lazy_deopt_from_return_stub_offset()
static word vm_tag_offset()
static word allocate_object_entry_point_offset()
static word switchable_call_miss_entry_offset()
static uword vm_execution_state()
static word active_stacktrace_offset()
static word top_resource_offset()
static word allocate_mint_with_fpu_regs_stub_offset()
static word bootstrap_native_wrapper_entry_point_offset()
static word write_error_shared_with_fpu_regs_stub_offset()
static word write_barrier_mask_offset()
static word call_to_runtime_stub_offset()
static word execution_state_offset()
static word old_marking_stack_block_offset()
static word setjmp_function_offset()
static word setjmp_buffer_offset()
static word exception_fp_offset()
static word exception_sp_offset()
static word exception_pc_offset()
static word arguments_offset()
static word length_offset()
static const word kGenerationalBarrierMask
static const word kBarrierOverlapShift
static const word kOldAndNotRememberedBit
static const word kCardRememberedBit
static const word kNotMarkedBit
static const word kIncrementalBarrierMask
static const word kSizeTagMaxSizeTag
static const word kTagBitsSizeTagPos
const uint8_t uint32_t uint32_t GError ** error
Dart_NativeFunction function
SK_API bool Encode(SkWStream *dst, const SkPixmap &src, const Options &options)
uword MakeTagWordForNewSpaceObject(classid_t cid, uword instance_size)
static constexpr intptr_t kCompressedWordSizeLog2
static constexpr word kBitsPerWordLog2
bool CanLoadFromThread(const dart::Object &object, intptr_t *offset)
word ToRawSmi(const dart::Object &a)
static constexpr intptr_t kWordSize
static constexpr intptr_t kCompressedWordSize
bool SizeFitsInSizeTag(uword instance_size)
static constexpr intptr_t kObjectAlignment
word TypedDataMaxNewSpaceElements(classid_t cid)
word TypedDataElementSizeInBytes(classid_t cid)
const Bool & TrueObject()
GrowableArray< UnresolvedPcRelativeCall * > UnresolvedPcRelativeCalls
bool IsSameObject(const Object &a, const Object &b)
const Bool & FalseObject()
const Object & NullObject()
const Code & StubCodeAllocateArray()
const Class & MintClass()
const Register kWriteBarrierSlotReg
@ TIMES_COMPRESSED_HALF_WORD_SIZE
constexpr bool IsAbiPreservedRegister(Register reg)
static constexpr intptr_t kCompressedWordSizeLog2
const RegList kAbiVolatileCpuRegs
const Register kExceptionObjectReg
const Register kWriteBarrierObjectReg
Thread * DLRT_GetFfiCallbackMetadata(FfiCallbackMetadata::Trampoline trampoline, uword *out_entry_point, uword *out_trampoline_type)
const Register kWriteBarrierValueReg
static constexpr bool IsCalleeSavedRegister(Register reg)
static constexpr uword kZapReturnAddress
const Register CALLEE_SAVED_TEMP
constexpr intptr_t kWordSizeLog2
void DLRT_ExitTemporaryIsolate()
const Register ARGS_DESC_REG
static constexpr bool IsArgumentRegister(Register reg)
constexpr RegList kDartAvailableCpuRegs
const intptr_t kStoreBufferWrapperSize
const Register FUNCTION_REG
const Register IC_DATA_REG
static constexpr intptr_t kAllocationRedZoneSize
static constexpr uword kZapCodeReg
const Register kStackTraceObjectReg
NOT_IN_PRODUCT(LibraryPtr ReloadTestScript(const char *script))
const Register CALLEE_SAVED_TEMP2
COMPILE_ASSERT(kUnreachableReference==WeakTable::kNoValue)
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive keep the shell running after the Dart script has completed enable serial On low power devices with low core running concurrent GC tasks on threads can cause them to contend with the UI thread which could potentially lead to jank This option turns off all concurrent GC activities domain network JSON encoded network policy per domain This overrides the DisallowInsecureConnections switch Embedder can specify whether to allow or disallow insecure connections at a domain level old gen heap size
static bool Bind(PassBindingsCacheMTL &pass, ShaderStage stage, size_t bind_index, const BufferView &view)
static constexpr Register kResultReg
static constexpr Register kLengthReg
static constexpr Register kTypeArgumentsReg
static constexpr Register kResultReg
static constexpr Register kTempReg
static constexpr Register kTypeArgumentsReg
static constexpr Register kResultReg
static constexpr Register kTagsReg
static constexpr Register kLengthReg
static constexpr Register kResultReg
static constexpr Register kClassIdReg
intptr_t first_local_from_fp
static constexpr intptr_t kNewObjectBitPosition
static constexpr intptr_t kObjectAlignmentLog2
static constexpr intptr_t kObjectAlignment
static constexpr Register kLengthReg
static constexpr Register kIndexReg
static constexpr Register kCacheContentsSizeReg
static constexpr Register kInstanceInstantiatorTypeArgumentsReg
static constexpr Register kInstanceParentFunctionTypeArgumentsReg
static constexpr Register kProbeDistanceReg
static constexpr Register kInstanceCidOrSignatureReg
static constexpr Register kCacheEntriesEndReg
static constexpr Register kInstanceDelayedFunctionTypeArgumentsReg
static constexpr Register kResultReg
static constexpr Register kSubtypeTestCacheResultReg