10#define SHOULD_NOT_INCLUDE_RUNTIME
14#if defined(TARGET_ARCH_IA32)
53 rt.Call(kEnsureRememberedAndMarkingDeferredRuntimeEntry, 2);
67void StubCodeCompiler::GenerateCallToRuntimeStub() {
91 __ Stop(
"Not coming from Dart code.");
108 __ movl(Address(
ESP, thread_offset),
THR);
109 __ movl(Address(
ESP, argc_tag_offset),
EDX);
114 __ movl(Address(
ESP, argv_offset),
EAX);
117 __ movl(Address(
ESP, retval_offset),
EAX);
141void StubCodeCompiler::GenerateEnterSafepointStub() {
147 __ ReserveAlignedFrameSpace(0);
148 __ movl(
EAX, Address(
THR, kEnterSafepointRuntimeEntry.OffsetFromThread()));
158static void GenerateExitSafepointStubCommon(Assembler* assembler,
159 uword runtime_entry_offset) {
165 __ ReserveAlignedFrameSpace(0);
173 __ movl(
EAX, Address(
THR, runtime_entry_offset));
183void StubCodeCompiler::GenerateExitSafepointStub() {
184 GenerateExitSafepointStubCommon(
185 assembler, kExitSafepointRuntimeEntry.OffsetFromThread());
188void StubCodeCompiler::GenerateExitSafepointIgnoreUnwindInProgressStub() {
189 GenerateExitSafepointStubCommon(
191 kExitSafepointIgnoreUnwindInProgressRuntimeEntry.OffsetFromThread());
210void StubCodeCompiler::GenerateCallNativeThroughSafepointStub() {
217 __ TransitionNativeToGenerated(
ECX ,
true);
222void StubCodeCompiler::GenerateFfiCallbackTrampolineStub() {
228 Label body, load_tramp_addr;
229 const intptr_t kCallLength = 5;
237 const intptr_t size_before =
__ CodeSize();
238 __ call(&load_tramp_addr);
239 const intptr_t size_after =
__ CodeSize();
245 FfiCallbackMetadata::kNativeCallbackTrampolineSize *
248 const intptr_t shared_stub_start =
__ CodeSize();
250 __ Bind(&load_tramp_addr);
254 __ subl(
EAX, Immediate(kCallLength));
264 COMPILE_ASSERT(FfiCallbackMetadata::kNativeCallbackTrampolineStackDelta == 4);
312 __ cmpl(
THR, Immediate(0));
329 __ EnterFullSafepoint(
ECX);
344 __ ret(Immediate(4));
359 __ ReserveAlignedFrameSpace(0);
363 __ CallCFunction(
EAX);
383 FfiCallbackMetadata::kNativeCallbackSharedStubSize);
393void StubCodeCompiler::GenerateSharedStubGeneric(
394 bool save_fpu_registers,
395 intptr_t self_code_stub_offset_from_thread,
402void StubCodeCompiler::GenerateSharedStub(
403 bool save_fpu_registers,
404 const RuntimeEntry*
target,
405 intptr_t self_code_stub_offset_from_thread,
407 bool store_runtime_result_in_result_register) {
412void StubCodeCompiler::GenerateRangeError(
bool with_fpu_regs) {
417void StubCodeCompiler::GenerateWriteError(
bool with_fpu_regs) {
422void StubCodeCompiler::GenerateDispatchTableNullErrorStub() {
433static void GenerateCallNativeWithWrapperStub(Assembler* assembler,
434 Address wrapper_address) {
435 const intptr_t native_args_struct_offset =
437 const intptr_t thread_offset =
439 const intptr_t argc_tag_offset =
441 const intptr_t argv_offset =
443 const intptr_t retval_offset =
462 __ Stop(
"Not coming from Dart code.");
483 __ movl(Address(
ESP, thread_offset),
THR);
485 __ movl(Address(
ESP, argc_tag_offset),
EDX);
487 __ movl(Address(
ESP, argv_offset),
EAX);
492 __ movl(Address(
ESP, retval_offset),
EAX);
515void StubCodeCompiler::GenerateCallNoScopeNativeStub() {
516 GenerateCallNativeWithWrapperStub(
522void StubCodeCompiler::GenerateCallAutoScopeNativeStub() {
523 GenerateCallNativeWithWrapperStub(
535void StubCodeCompiler::GenerateCallBootstrapNativeStub() {
536 GenerateCallNativeWithWrapperStub(
544void StubCodeCompiler::GenerateCallStaticFunctionStub() {
547 __ pushl(Immediate(0));
548 __ CallRuntime(kPatchStaticCallRuntimeEntry, 0);
560void StubCodeCompiler::GenerateFixCallersTargetStub() {
562 __ BranchOnMonomorphicCheckedEntryJIT(&monomorphic);
567 __ pushl(Immediate(0));
568 __ CallRuntime(kFixCallersTargetRuntimeEntry, 0);
579 __ pushl(Immediate(0));
582 __ CallRuntime(kFixCallersTargetMonomorphicRuntimeEntry, 2);
595void StubCodeCompiler::GenerateFixAllocationStubTargetStub() {
597 __ pushl(Immediate(0));
598 __ CallRuntime(kFixAllocationStubTargetRuntimeEntry, 0);
608void StubCodeCompiler::GenerateFixParameterizedAllocationStubTargetStub() {
612 __ pushl(Immediate(0));
613 __ CallRuntime(kFixAllocationStubTargetRuntimeEntry, 0);
627static void PushArrayOfArguments(Assembler* assembler) {
629 const Immediate& raw_null = Immediate(target::ToRawPointer(
NullObject()));
630 __ movl(
ECX, raw_null);
642 Label loop, loop_condition;
677static void GenerateDeoptimizationSequence(Assembler* assembler,
680 __ EnterDartFrame(0);
683 const intptr_t saved_result_slot_from_fp =
686 const intptr_t saved_exception_slot_from_fp =
689 const intptr_t saved_stacktrace_slot_from_fp =
715 LeafRuntimeScope rt(assembler,
723 rt.Call(kDeoptimizeCopyFrameRuntimeEntry, 2);
745 __ EnterDartFrame(0);
753 LeafRuntimeScope rt(assembler,
757 rt.Call(kDeoptimizeFillFrameRuntimeEntry, 1);
786 __ CallRuntime(kDeoptimizeMaterializeRuntimeEntry, 0);
814 __ CallRuntime(kReThrowRuntimeEntry, 3);
820void StubCodeCompiler::GenerateDeoptimizeLazyFromReturnStub() {
829void StubCodeCompiler::GenerateDeoptimizeLazyFromThrowStub() {
836void StubCodeCompiler::GenerateDeoptimizeStub() {
841static void GenerateNoSuchMethodDispatcherCode(Assembler* assembler) {
843 __ movl(
EDX, FieldAddress(
849 Address(
EBP,
EDI, TIMES_HALF_WORD_SIZE,
851 __ pushl(Immediate(0));
867 PushArrayOfArguments(assembler);
868 const intptr_t kNumArgs = 4;
869 __ CallRuntime(kNoSuchMethodFromCallStubRuntimeEntry, kNumArgs);
876void StubCodeCompiler::GenerateNoSuchMethodDispatcherStub() {
877 GenerateNoSuchMethodDispatcherCode(
assembler);
888void StubCodeCompiler::GenerateAllocateArrayStub() {
889 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
900 const Immediate& max_len =
908 const intptr_t fixed_size_plus_alignment_padding =
913 fixed_size_plus_alignment_padding));
921 const intptr_t
cid = kArrayCid;
948 Label size_tag_overflow,
done;
956 __ Bind(&size_tag_overflow);
957 __ movl(
EDI, Immediate(0));
962 __ orl(
EDI, Immediate(tags));
973 __ StoreIntoObjectNoBarrier(
1008 __ WriteAllocationCanary(
EBX);
1017 __ EnterStubFrame();
1018 __ pushl(Immediate(0));
1021 __ CallRuntime(kAllocateArrayRuntimeEntry, 2);
1044void StubCodeCompiler::GenerateInvokeDartCodeStub() {
1052 __ movl(
EAX, Address(
EBP, kThreadOffset));
1063#if defined(USING_SHADOW_CALL_STACK)
1090 __ EmitEntryFrameVerification();
1097 __ movl(
EDX, Address(
EBP, kArgumentsDescOffset));
1104 Label args_count_ok;
1109 __ movl(Address(
EBP, kArgumentsDescOffset),
EBX);
1113 Label push_arguments;
1114 Label done_push_arguments;
1117 __ movl(
EAX, Immediate(0));
1120 __ movl(
EDI, Address(
EBP, kArgumentsOffset));
1123 __ Bind(&push_arguments);
1129 __ Bind(&done_push_arguments);
1132 __ movl(
EAX, Address(
EBP, kTargetCodeOffset));
1136 __ movl(
EDX, Address(
EBP, kArgumentsDescOffset));
1149#if defined(USING_SHADOW_CALL_STACK)
1172static void GenerateAllocateContextSpaceStub(Assembler* assembler,
1176 intptr_t fixed_size_plus_alignment_padding =
1179 __ leal(
EBX, Address(
EDX,
TIMES_4, fixed_size_plus_alignment_padding));
1199 __ CheckAllocationCanary(
EAX);
1216 Label size_tag_overflow,
done;
1217 __ leal(
EBX, Address(
EDX,
TIMES_4, fixed_size_plus_alignment_padding));
1225 __ Bind(&size_tag_overflow);
1227 __ movl(
EBX, Immediate(0));
1234 __ orl(
EBX, Immediate(tags));
1251void StubCodeCompiler::GenerateAllocateContextStub() {
1252 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
1255 GenerateAllocateContextSpaceStub(
assembler, &slow_case);
1261 __ StoreObjectIntoObjectNoBarrier(
1278 __ cmpl(
EDX, Immediate(0));
1290 __ EnterStubFrame();
1291 __ pushl(Immediate(0));
1294 __ CallRuntime(kAllocateContextRuntimeEntry, 1);
1317void StubCodeCompiler::GenerateCloneContextStub() {
1318 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
1324 GenerateAllocateContextSpaceStub(
assembler, &slow_case);
1330 __ StoreIntoObjectNoBarrier(
1346 __ StoreIntoObjectNoBarrier(
1352 __ cmpl(
EDX, Immediate(0));
1365 __ EnterStubFrame();
1366 __ pushl(Immediate(0));
1368 __ CallRuntime(kCloneContextRuntimeEntry, 1);
1383void StubCodeCompiler::GenerateWriteBarrierWrappersStub() {
1388 intptr_t
start =
__ CodeSize();
1394 intptr_t
end =
__ CodeSize();
1411static void GenerateWriteBarrierStubHelper(Assembler* assembler,
bool cards) {
1420 __ j(
ZERO, &skip_marking);
1424 Label retry, is_new,
done;
1440 auto mark_stack_push = [&](intptr_t
offset,
const RuntimeEntry& entry) {
1452 LeafRuntimeScope rt(assembler,
1461 kOldMarkingStackBlockProcessRuntimeEntry);
1466 kNewMarkingStackBlockProcessRuntimeEntry);
1471 Label add_to_remembered_set, remember_card;
1482 __ Bind(&add_to_remembered_set);
1493 __ Stop(
"Wrong barrier");
1534 LeafRuntimeScope rt(assembler,
1538 rt.Call(kStoreBufferBlockProcessRuntimeEntry, 1);
1547 Label remember_card_slow;
1566 __ movl(
EBX, Immediate(1));
1576 __ Bind(&remember_card_slow);
1579 LeafRuntimeScope rt(assembler,
1584 rt.Call(kRememberCardRuntimeEntry, 2);
1592void StubCodeCompiler::GenerateWriteBarrierStub() {
1593 GenerateWriteBarrierStubHelper(
assembler,
false);
1596void StubCodeCompiler::GenerateArrayWriteBarrierStub() {
1597 GenerateWriteBarrierStubHelper(
assembler,
true);
1600void StubCodeCompiler::GenerateAllocateObjectStub() {
1604void StubCodeCompiler::GenerateAllocateObjectParameterizedStub() {
1608void StubCodeCompiler::GenerateAllocateObjectSlowStub() {
1623 const Code& allocate_object,
1624 const Code& allocat_object_parametrized) {
1625 const Immediate& raw_null = Immediate(target::ToRawPointer(
NullObject()));
1633 const int kInlineInstanceSize = 12;
1635 ASSERT(instance_size > 0);
1639 if (!FLAG_use_slow_path && FLAG_inline_alloc &&
1682 current_offset < instance_size;
1684 __ StoreObjectIntoObjectNoBarrier(
1710 __ WriteAllocationCanary(
EBX);
1712 if (is_cls_parameterized) {
1717 __ StoreIntoObjectNoBarrier(
1732 __ EnterStubFrame();
1735 CastHandle<Object>(cls));
1736 if (is_cls_parameterized) {
1742 __ CallRuntime(kAllocateObjectRuntimeEntry, 2);
1767void StubCodeCompiler::GenerateCallClosureNoSuchMethodStub() {
1768 __ EnterStubFrame();
1779 __ pushl(Immediate(0));
1789 Label args_count_ok;
1797 const intptr_t kNumArgs = 4;
1798 __ CallRuntime(kNoSuchMethodFromPrologueRuntimeEntry, kNumArgs);
1808 if (FLAG_trace_optimized_ic_calls) {
1809 __ EnterStubFrame();
1814 __ CallRuntime(kTraceICCallRuntimeEntry, 2);
1826 if (FLAG_optimization_counter_threshold >= 0) {
1829 __ Comment(
"Increment function counter");
1840static void EmitFastSmiOp(Assembler* assembler,
1843 Label* not_smi_or_overflow) {
1844 __ Comment(
"Fast Smi op");
1859 Label
done, is_true;
1870 Label
done, is_true;
1897 __ Stop(
"Incorrect IC data");
1900 if (FLAG_optimization_counter_threshold >= 0) {
1901 const intptr_t count_offset =
1922 const RuntimeEntry& handle_ic_miss,
1924 Optimized optimized,
1926 Exactness exactness) {
1928 optimized,
type, exactness,
1930 __ BindUncheckedEntryPoint();
1932 optimized,
type, exactness,
1938 const RuntimeEntry& handle_ic_miss,
1940 Optimized optimized,
1942 Exactness exactness,
1950 ASSERT(num_args == 1 || num_args == 2);
1959 __ cmpl(
EAX, Immediate(num_args));
1961 __ Stop(
"Incorrect stub for IC data");
1966#if !defined(PRODUCT)
1967 Label stepping, done_stepping;
1969 __ Comment(
"Check single stepping");
1970 __ LoadIsolate(
EAX);
1976 Label not_smi_or_overflow;
1977 if (kind != Token::kILLEGAL) {
1978 EmitFastSmiOp(
assembler, kind, num_args, ¬_smi_or_overflow);
1980 __ Bind(¬_smi_or_overflow);
1982 __ Comment(
"Extract ICData initial values and receiver cid");
1989 Label loop, found, miss;
2002 if (num_args == 1) {
2006 __ LoadTaggedClassIdMayBeSmi(
EAX,
EDI);
2010 __ Comment(
"ICData loop");
2013 bool optimize = kind == Token::kILLEGAL;
2014 const intptr_t target_offset =
2016 const intptr_t count_offset =
2018 const intptr_t exactness_offset =
2025 for (
int unroll =
optimize ? 4 : 2; unroll >= 0; unroll--) {
2027 if (num_args == 1) {
2031 __ addl(
EBX, Immediate(entry_size));
2037 __ LoadTaggedClassIdMayBeSmi(
EDI,
EDI);
2043 __ LoadTaggedClassIdMayBeSmi(
EDI,
EDI);
2048 __ addl(
EBX, Immediate(entry_size));
2049 __ cmpl(Address(
EBX, -entry_size),
2061 __ Comment(
"IC miss");
2069 __ EnterStubFrame();
2072 __ pushl(Immediate(0));
2074 for (intptr_t
i = 0;
i < num_args;
i++) {
2079 __ CallRuntime(handle_ic_miss, num_args + 1);
2081 for (intptr_t
i = 0;
i < num_args + 1;
i++) {
2088 Label call_target_function;
2089 ASSERT(!FLAG_precompiled_mode);
2090 __ jmp(&call_target_function);
2094 Label call_target_function_through_unchecked_entry;
2098 __ movl(
EDI, Address(
EBX, exactness_offset));
2101 __ j(
LESS, &exactness_ok);
2102 __ j(
EQUAL, &call_target_function_through_unchecked_entry);
2118 __ j(
EQUAL, &call_target_function_through_unchecked_entry);
2121 __ movl(Address(
EBX, exactness_offset),
2127 if (FLAG_optimization_counter_threshold >= 0) {
2128 __ Comment(
"Update caller's counter");
2134 __ Bind(&call_target_function);
2135 __ Comment(
"Call target");
2141 __ Bind(&call_target_function_through_unchecked_entry);
2142 if (FLAG_optimization_counter_threshold >= 0) {
2143 __ Comment(
"Update ICData counter");
2147 __ Comment(
"Call target (via unchecked entry point)");
2153#if !defined(PRODUCT)
2156 __ EnterStubFrame();
2159 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
2163 __ jmp(&done_stepping);
2171void StubCodeCompiler::GenerateOneArgCheckInlineCacheStub() {
2173 1, kInlineCacheMissHandlerOneArgRuntimeEntry, Token::kILLEGAL,
2180void StubCodeCompiler::GenerateOneArgCheckInlineCacheWithExactnessCheckStub() {
2182 1, kInlineCacheMissHandlerOneArgRuntimeEntry, Token::kILLEGAL,
2186void StubCodeCompiler::GenerateAllocateMintSharedWithFPURegsStub() {
2187 __ Stop(
"Unimplemented");
2190void StubCodeCompiler::GenerateAllocateMintSharedWithoutFPURegsStub() {
2191 __ Stop(
"Unimplemented");
2197void StubCodeCompiler::GenerateTwoArgsCheckInlineCacheStub() {
2199 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kILLEGAL,
2206void StubCodeCompiler::GenerateSmiAddInlineCacheStub() {
2208 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kADD,
kUnoptimized,
2215void StubCodeCompiler::GenerateSmiLessInlineCacheStub() {
2217 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kLT,
kUnoptimized,
2224void StubCodeCompiler::GenerateSmiEqualInlineCacheStub() {
2226 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kEQ,
kUnoptimized,
2234void StubCodeCompiler::GenerateOneArgOptimizedCheckInlineCacheStub() {
2236 1, kInlineCacheMissHandlerOneArgRuntimeEntry, Token::kILLEGAL,
kOptimized,
2244void StubCodeCompiler::
2245 GenerateOneArgOptimizedCheckInlineCacheWithExactnessCheckStub() {
2247 1, kInlineCacheMissHandlerOneArgRuntimeEntry, Token::kILLEGAL,
kOptimized,
2255void StubCodeCompiler::GenerateTwoArgsOptimizedCheckInlineCacheStub() {
2257 2, kInlineCacheMissHandlerTwoArgsRuntimeEntry, Token::kILLEGAL,
2263static void GenerateZeroArgsUnoptimizedStaticCallForEntryKind(
2264 StubCodeCompiler* stub_code_compiler,
2266 stub_code_compiler->GenerateUsageCounterIncrement(
EAX);
2267 auto*
const assembler = stub_code_compiler->assembler;
2277 __ cmpl(
EBX, Immediate(0));
2279 __ Stop(
"Incorrect IC data for unoptimized static call");
2284#if !defined(PRODUCT)
2286 Label stepping, done_stepping;
2287 __ LoadIsolate(
EAX);
2298 const intptr_t target_offset =
2300 const intptr_t count_offset =
2303 if (FLAG_optimization_counter_threshold >= 0) {
2318#if !defined(PRODUCT)
2320 __ EnterStubFrame();
2322 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
2329void StubCodeCompiler::GenerateZeroArgsUnoptimizedStaticCallStub() {
2330 GenerateZeroArgsUnoptimizedStaticCallForEntryKind(
this,
2332 __ BindUncheckedEntryPoint();
2333 GenerateZeroArgsUnoptimizedStaticCallForEntryKind(
this,
2339void StubCodeCompiler::GenerateOneArgUnoptimizedStaticCallStub() {
2341 2, kStaticCallMissHandlerTwoArgsRuntimeEntry, Token::kILLEGAL,
2347void StubCodeCompiler::GenerateTwoArgsUnoptimizedStaticCallStub() {
2349 2, kStaticCallMissHandlerTwoArgsRuntimeEntry, Token::kILLEGAL,
2356void StubCodeCompiler::GenerateLazyCompileStub() {
2357 __ EnterStubFrame();
2360 __ CallRuntime(kCompileFunctionRuntimeEntry, 1);
2369void StubCodeCompiler::GenerateICCallBreakpointStub() {
2371 __ Stop(
"No debugging in PRODUCT mode");
2373 __ EnterStubFrame();
2376 __ pushl(Immediate(0));
2377 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0);
2387void StubCodeCompiler::GenerateUnoptStaticCallBreakpointStub() {
2389 __ Stop(
"No debugging in PRODUCT mode");
2391 __ EnterStubFrame();
2393 __ pushl(Immediate(0));
2394 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0);
2403void StubCodeCompiler::GenerateRuntimeCallBreakpointStub() {
2405 __ Stop(
"No debugging in PRODUCT mode");
2407 __ EnterStubFrame();
2410 __ pushl(Immediate(0));
2411 __ CallRuntime(kBreakpointRuntimeHandlerRuntimeEntry, 0);
2420void StubCodeCompiler::GenerateDebugStepCheckStub() {
2422 __ Stop(
"No debugging in PRODUCT mode");
2425 Label stepping, done_stepping;
2426 __ LoadIsolate(
EAX);
2428 __ cmpl(
EAX, Immediate(0));
2434 __ EnterStubFrame();
2435 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
2446struct STCInternal : AllStatic {
2448 static constexpr intptr_t kNoDepth =
kIntptrMin;
2453 static constexpr intptr_t kFunctionTypeArgumentsDepth = 1;
2454 static constexpr intptr_t kInstantiatorTypeArgumentsDepth = 2;
2455 static constexpr intptr_t kDestinationTypeDepth = 3;
2456 static constexpr intptr_t kInstanceDepth = 4;
2457 static constexpr intptr_t kCacheDepth = 5;
2460 static constexpr Register kCacheArrayReg =
2463 static constexpr Register kInstanceCidOrSignatureReg =
2465 static constexpr Register kInstanceInstantiatorTypeArgumentsReg =
2469static void GenerateSubtypeTestCacheLoop(
2470 Assembler* assembler,
2472 intptr_t original_tos_offset,
2473 intptr_t parent_function_type_args_depth,
2474 intptr_t delayed_type_args_depth,
2477 Label* next_iteration) {
2478 const auto& raw_null = Immediate(target::ToRawPointer(
NullObject()));
2481 auto compare_to_stack = [&](
Register src, intptr_t depth) {
2482 ASSERT(original_tos_offset + depth >= 0);
2483 __ CompareToStack(
src, original_tos_offset + depth);
2486 __ LoadAcquireCompressedFromOffset(
2487 STCInternal::kScratchReg, STCInternal::kCacheArrayReg,
2490 __ cmpl(STCInternal::kScratchReg, raw_null);
2492 __ cmpl(STCInternal::kScratchReg, STCInternal::kInstanceCidOrSignatureReg);
2498 __ cmpl(STCInternal::kInstanceInstantiatorTypeArgumentsReg,
2499 Address(STCInternal::kCacheArrayReg,
2507 __ movl(STCInternal::kScratchReg,
2508 Address(STCInternal::kCacheArrayReg,
2511 compare_to_stack(STCInternal::kScratchReg,
2512 STCInternal::kInstantiatorTypeArgumentsDepth);
2518 __ movl(STCInternal::kScratchReg,
2519 Address(STCInternal::kCacheArrayReg,
2522 compare_to_stack(STCInternal::kScratchReg,
2523 STCInternal::kFunctionTypeArgumentsDepth);
2530 STCInternal::kScratchReg,
2532 STCInternal::kCacheArrayReg,
2535 compare_to_stack(STCInternal::kScratchReg, parent_function_type_args_depth);
2542 STCInternal::kScratchReg,
2544 STCInternal::kCacheArrayReg,
2547 compare_to_stack(STCInternal::kScratchReg, delayed_type_args_depth);
2554 STCInternal::kScratchReg,
2555 Address(STCInternal::kCacheArrayReg,
2557 compare_to_stack(STCInternal::kScratchReg,
2558 STCInternal::kDestinationTypeDepth);
2574void StubCodeCompiler::GenerateSubtypeNTestCacheStub(Assembler* assembler,
2582 const auto& raw_null = Immediate(target::ToRawPointer(
NullObject()));
2588 __ LoadFromStack(STCInternal::kCacheArrayReg, STCInternal::kCacheDepth);
2592 __ LoadFromSlot(STCInternal::kScratchReg, STCInternal::kCacheArrayReg,
2593 Slot::SubtypeTestCache_num_inputs());
2594 __ CompareImmediate(STCInternal::kScratchReg, n);
2601 __ movl(STCInternal::kCacheArrayReg,
2602 FieldAddress(STCInternal::kCacheArrayReg,
2608 __ LoadFromSlot(STCInternal::kScratchReg, STCInternal::kCacheArrayReg,
2609 Slot::Array_length());
2610 __ CompareImmediate(STCInternal::kScratchReg,
2620 __ AddImmediate(STCInternal::kCacheArrayReg,
2623 Label loop, not_closure;
2625 __ LoadClassIdMayBeSmi(STCInternal::kInstanceCidOrSignatureReg,
2628 __ LoadClassId(STCInternal::kInstanceCidOrSignatureReg,
2631 __ cmpl(STCInternal::kInstanceCidOrSignatureReg, Immediate(kClosureCid));
2636 __ movl(STCInternal::kInstanceCidOrSignatureReg,
2639 __ movl(STCInternal::kInstanceCidOrSignatureReg,
2640 FieldAddress(STCInternal::kInstanceCidOrSignatureReg,
2644 STCInternal::kInstanceInstantiatorTypeArgumentsReg,
2663 Label has_no_type_arguments;
2664 __ LoadClassById(STCInternal::kScratchReg,
2665 STCInternal::kInstanceCidOrSignatureReg);
2666 __ movl(STCInternal::kInstanceInstantiatorTypeArgumentsReg, raw_null);
2668 STCInternal::kScratchReg,
2669 FieldAddress(STCInternal::kScratchReg,
2671 host_type_arguments_field_offset_in_words_offset()));
2672 __ cmpl(STCInternal::kScratchReg,
2675 __ movl(STCInternal::kInstanceInstantiatorTypeArgumentsReg,
2678 __ Bind(&has_no_type_arguments);
2680 __ SmiTag(STCInternal::kInstanceCidOrSignatureReg);
2690 intptr_t original_tos_offset = 0;
2695 intptr_t kInstanceParentFunctionTypeArgumentsDepth = STCInternal::kNoDepth;
2696 intptr_t kInstanceDelayedFunctionTypeArgumentsDepth = STCInternal::kNoDepth;
2703 original_tos_offset++;
2704 kInstanceParentFunctionTypeArgumentsDepth = -original_tos_offset;
2707 original_tos_offset++;
2708 kInstanceDelayedFunctionTypeArgumentsDepth = -original_tos_offset;
2711 Label found, not_found,
done, next_iteration;
2715 GenerateSubtypeTestCacheLoop(
assembler, n, original_tos_offset,
2716 kInstanceParentFunctionTypeArgumentsDepth,
2717 kInstanceDelayedFunctionTypeArgumentsDepth,
2718 &found, ¬_found, &next_iteration);
2719 __ Bind(&next_iteration);
2720 __ addl(STCInternal::kCacheArrayReg,
2727 __ Drop(original_tos_offset);
2730 Address(STCInternal::kCacheArrayReg,
2736 __ Drop(original_tos_offset);
2749void StubCodeCompiler::GenerateGetCStackPointerStub() {
2761void StubCodeCompiler::GenerateJumpToFrameStub() {
2769#if defined(USING_SHADOW_CALL_STACK)
2773 Label exit_through_non_ffi;
2780 __ cmpl(compiler::Address(
2784 __ TransitionNativeToGenerated(
ECX,
true,
2786 __ Bind(&exit_through_non_ffi);
2800void StubCodeCompiler::GenerateRunExceptionHandlerStub() {
2811 __ movl(exception_addr,
ECX);
2816 __ movl(stacktrace_addr,
ECX);
2824void StubCodeCompiler::GenerateDeoptForRewindStub() {
2830 __ EnterStubFrame();
2831 __ CallRuntime(kRewindPostDeoptRuntimeEntry, 0);
2839void StubCodeCompiler::GenerateOptimizeFunctionStub() {
2841 __ EnterStubFrame();
2843 __ pushl(Immediate(0));
2845 __ CallRuntime(kOptimizeInvokedFunctionRuntimeEntry, 1);
2860static void GenerateIdenticalWithNumberCheckStub(Assembler* assembler,
2864 Label reference_compare,
done, check_mint;
2872 __ CompareClassId(left, kDoubleCid, temp);
2874 __ CompareClassId(right, kDoubleCid, temp);
2890 __ CompareClassId(left, kMintCid, temp);
2892 __ CompareClassId(right, kMintCid, temp);
2905 __ Bind(&reference_compare);
2906 __ cmpl(left, right);
2915void StubCodeCompiler::GenerateUnoptimizedIdenticalWithNumberCheckStub() {
2916#if !defined(PRODUCT)
2918 Label stepping, done_stepping;
2919 __ LoadIsolate(
EAX);
2921 __ cmpl(
EAX, Immediate(0));
2931 GenerateIdenticalWithNumberCheckStub(
assembler, left, right, temp);
2934#if !defined(PRODUCT)
2936 __ EnterStubFrame();
2937 __ CallRuntime(kSingleStepHandlerRuntimeEntry, 0);
2939 __ jmp(&done_stepping);
2948void StubCodeCompiler::GenerateOptimizedIdenticalWithNumberCheckStub() {
2954 GenerateIdenticalWithNumberCheckStub(
assembler, left, right, temp);
2965void StubCodeCompiler::GenerateMegamorphicCallStub() {
3032 __ movl(
EAX, Immediate(kSmiCid));
3033 __ jmp(&cid_loaded);
3037 GenerateSwitchableCallMissStub();
3040void StubCodeCompiler::GenerateICCallThroughCodeStub() {
3044void StubCodeCompiler::GenerateMonomorphicSmiableCheckStub() {
3050void StubCodeCompiler::GenerateSwitchableCallMissStub() {
3053 __ EnterStubFrame();
3056 __ pushl(Immediate(0));
3057 __ pushl(Immediate(0));
3059 __ CallRuntime(kSwitchableCallMissRuntimeEntry, 2);
3072void StubCodeCompiler::GenerateSingleTargetCallStub() {
3093void StubCodeCompiler::GenerateAllocateTypedDataArrayStub(intptr_t
cid) {
3101 if (!FLAG_use_slow_path && FLAG_inline_alloc) {
3116 __ cmpl(
EDI, Immediate(max_len));
3126 const intptr_t fixed_size_plus_alignment_padding =
3129 __ leal(
EDI, Address(
EDI, scale_factor, fixed_size_plus_alignment_padding));
3143 __ CheckAllocationCanary(
EAX);
3155 Label size_tag_overflow,
done;
3161 __ Bind(&size_tag_overflow);
3162 __ movl(
EDI, Immediate(0));
3167 __ orl(
EDI, Immediate(tags));
3176 __ StoreIntoObjectNoBarrier(
3187 __ StoreInternalPointer(
3200 __ WriteAllocationCanary(
EBX);
3208 __ EnterStubFrame();
3209 __ PushObject(Object::null_object());
3212 __ CallRuntime(kAllocateTypedDataRuntimeEntry, 2);
3215 __ LeaveStubFrame();
static void done(const char *config, const char *src, const char *srcOptions, const char *name)
static void async_callback(void *c, std::unique_ptr< const SkImage::AsyncReadResult > result)
static bool ok(int result)
static SkTileMode optimize(SkTileMode tm, int dimension)
#define ASSERT_LESS_OR_EQUAL(expected, actual)
#define ASSERT_EQUAL(expected, actual)
#define RELEASE_ASSERT(cond)
virtual bool WillAllocateNewOrRemembered() const
static intptr_t ActivationFrameAlignment()
static StaticTypeExactnessState HasExactSuperType()
static StaticTypeExactnessState NotExact()
static constexpr intptr_t kMaxLinearCacheSize
static constexpr intptr_t kMaxInputs
static Address VMTagAddress()
void GenerateNArgsCheckInlineCacheStub(intptr_t num_args, const RuntimeEntry &handle_ic_miss, Token::Kind kind, Optimized optimized, CallType type, Exactness exactness)
void EnsureIsNewOrRemembered()
void GenerateUsageCounterIncrement(Register temp_reg)
void GenerateAllocationStubForClass(UnresolvedPcRelativeCalls *unresolved_calls, const Class &cls, const dart::Code &allocate_object, const dart::Code &allocat_object_parametrized)
void GenerateNArgsCheckInlineCacheStubForEntryKind(intptr_t num_args, const RuntimeEntry &handle_ic_miss, Token::Kind kind, Optimized optimized, CallType type, Exactness exactness, CodeEntryKind entry_kind)
void GenerateOptimizedUsageCounterIncrement()
static word size_offset()
static word type_args_len_offset()
static word count_offset()
static word type_arguments_offset()
static word length_offset()
static const word kMaxNewSpaceElements
static word data_offset()
static word header_size()
static word arguments_descriptor_offset()
static bool TraceAllocation(const dart::Class &klass)
static intptr_t NumTypeArguments(const dart::Class &klass)
static uword GetInstanceSize(const dart::Class &handle)
static const word kNoTypeArguments
static classid_t GetId(const dart::Class &handle)
static intptr_t TypeArgumentsFieldOffset(const dart::Class &klass)
static word delayed_type_arguments_offset()
static word function_type_arguments_offset()
static word instantiator_type_arguments_offset()
static word function_offset()
static word entry_point_offset(CodeEntryKind kind=CodeEntryKind::kNormal)
static word variable_offset(intptr_t index)
static word num_variables_offset()
static word header_size()
static word parent_offset()
static word value_offset()
static word code_offset()
static word entry_point_offset(CodeEntryKind kind=CodeEntryKind::kNormal)
static word signature_offset()
static word usage_counter_offset()
static bool IsAllocatableInNewSpace(intptr_t instance_size)
static word NumArgsTestedMask()
static word state_bits_offset()
static word entries_offset()
static word owner_offset()
static word ExactnessIndexFor(word num_args)
static word TestEntryLengthFor(word num_args, bool exactness_check)
static word NumArgsTestedShift()
static word receivers_static_type_offset()
static word TargetIndexFor(word num_args)
static word CountIndexFor(word num_args)
static word first_field_offset()
static word single_step_offset()
static word pointers_offset()
static word mask_offset()
static word buckets_offset()
static const word kSpreadFactor
static word value_offset()
static word argc_tag_offset()
static word retval_offset()
static word thread_offset()
static word argv_offset()
static const word kNumCallWrapperArguments
static word tags_offset()
static word card_table_offset()
static word original_top_offset()
static const word kBytesPerCardLog2
static word data_offset()
static word pointers_offset()
static const word kInstanceTypeArguments
static const word kInstanceDelayedFunctionTypeArguments
static const word kTestEntryLength
static const word kDestinationType
static const word kTestResult
static const word kInstantiatorTypeArguments
static const word kInstanceParentFunctionTypeArguments
static const word kInstanceCidOrSignature
static word cache_offset()
static const word kFunctionTypeArguments
static word resume_pc_offset()
static word bool_true_offset()
static word auto_scope_native_wrapper_entry_point_offset()
static word OffsetFromThread(const dart::Object &object)
static word bool_false_offset()
static word active_exception_offset()
static word exit_through_ffi_offset()
static uword exit_through_runtime_call()
static uword exit_through_ffi()
static word new_marking_stack_block_offset()
static word optimize_stub_offset()
static word invoke_dart_code_stub_offset()
static word no_scope_native_wrapper_entry_point_offset()
static word top_exit_frame_info_offset()
static word switchable_call_miss_stub_offset()
static word store_buffer_block_offset()
static word write_barrier_entry_point_offset()
static uword vm_execution_state()
static word active_stacktrace_offset()
static word top_resource_offset()
static word bootstrap_native_wrapper_entry_point_offset()
static word write_barrier_mask_offset()
static word call_to_runtime_stub_offset()
static word execution_state_offset()
static word old_marking_stack_block_offset()
static word arguments_offset()
static word length_offset()
static const word kGenerationalBarrierMask
static const word kBarrierOverlapShift
static const word kOldAndNotRememberedBit
static const word kCardRememberedBit
static const word kNotMarkedBit
static const word kIncrementalBarrierMask
static const word kSizeTagMaxSizeTag
static const word kTagBitsSizeTagPos
const uint8_t uint32_t uint32_t GError ** error
Dart_NativeFunction function
SK_API bool Encode(SkWStream *dst, const SkPixmap &src, const Options &options)
uword MakeTagWordForNewSpaceObject(classid_t cid, uword instance_size)
static constexpr word kBitsPerWordLog2
bool CanLoadFromThread(const dart::Object &object, intptr_t *offset)
word ToRawSmi(const dart::Object &a)
static constexpr intptr_t kWordSize
static constexpr intptr_t kCompressedWordSize
static constexpr intptr_t kObjectAlignment
word TypedDataMaxNewSpaceElements(classid_t cid)
word TypedDataElementSizeInBytes(classid_t cid)
GrowableArray< UnresolvedPcRelativeCall * > UnresolvedPcRelativeCalls
const Object & NullObject()
const Code & StubCodeAllocateArray()
const Register kWriteBarrierSlotReg
const Register kExceptionObjectReg
const Register kWriteBarrierObjectReg
Thread * DLRT_GetFfiCallbackMetadata(FfiCallbackMetadata::Trampoline trampoline, uword *out_entry_point, uword *out_trampoline_type)
const Register kWriteBarrierValueReg
static constexpr bool IsCalleeSavedRegister(Register reg)
constexpr intptr_t kIntptrMin
static constexpr uword kZapReturnAddress
void DLRT_ExitTemporaryIsolate()
const Register ARGS_DESC_REG
static constexpr bool IsArgumentRegister(Register reg)
constexpr RegList kDartAvailableCpuRegs
const intptr_t kStoreBufferWrapperSize
const Register FUNCTION_REG
const Register IC_DATA_REG
static constexpr intptr_t kAllocationRedZoneSize
const Register kStackTraceObjectReg
NOT_IN_PRODUCT(LibraryPtr ReloadTestScript(const char *script))
COMPILE_ASSERT(kUnreachableReference==WeakTable::kNoValue)
const int kFpuRegisterSize
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive keep the shell running after the Dart script has completed enable serial On low power devices with low core running concurrent GC tasks on threads can cause them to contend with the UI thread which could potentially lead to jank This option turns off all concurrent GC activities domain network JSON encoded network policy per domain This overrides the DisallowInsecureConnections switch Embedder can specify whether to allow or disallow insecure connections at a domain level old gen heap size
static bool Bind(PassBindingsCacheMTL &pass, ShaderStage stage, size_t bind_index, const BufferView &view)
static bool is_linear(SkPoint p0, SkPoint p1, SkPoint p2)
static constexpr Register kResultReg
static constexpr Register kLengthReg
static constexpr Register kTypeArgumentsReg
static constexpr Register kTypeArgumentsReg
static constexpr Register kResultReg
static constexpr Register kLengthReg
static constexpr Register kResultReg
intptr_t first_local_from_fp
static constexpr intptr_t kNewObjectBitPosition
static constexpr intptr_t kObjectAlignmentLog2
static constexpr intptr_t kObjectAlignment
static constexpr Register kSubtypeTestCacheReg
static constexpr Register kDstTypeReg
static constexpr Register kInstanceReg
static constexpr Register kFunctionTypeArgumentsReg
static constexpr Register kInstantiatorTypeArgumentsReg
static constexpr Register kSubtypeTestCacheResultReg