39#if !defined(DART_PRECOMPILED_RUNTIME)
49 max_subtype_cache_entries,
51 "Maximum number of subtype cache entries (number of checks cached).");
54 regexp_optimization_counter_threshold,
56 "RegExp's usage-counter value before it is optimized, -1 means never");
58 reoptimization_counter_threshold,
60 "Counter threshold before a function gets reoptimized.");
64 "Use old-space for allocation via runtime calls.");
66 runtime_allocate_spill_tlab,
68 "Ensure results of allocation via runtime calls are not in an "
70DEFINE_FLAG(
bool, trace_deoptimization,
false,
"Trace deoptimization");
72 trace_deoptimization_verbose,
74 "Trace deoptimization verbose");
81DEFINE_FLAG(
bool, trace_osr,
false,
"Trace attempts at on-stack replacement.");
83DEFINE_FLAG(
int, gc_every, 0,
"Run major GC on every N stack overflow checks");
87 "Compute debugger stacktrace on every N stack overflow checks");
91 "Compute stacktrace in named function on stack overflow checks");
95 "Deoptimize in named function on stack overflow checks");
97 deoptimize_on_runtime_call_name_filter,
99 "Runtime call name filter for --deoptimize-on-runtime-call-every.");
102 unopt_monomorphic_calls,
104 "Enable specializing monomorphic calls from unoptimized code.");
106 unopt_megamorphic_calls,
108 "Enable specializing megamorphic calls from unoptimized code.");
110 verbose_stack_overflow,
112 "Print additional details about stack overflow.");
119 const Instance&
length = Instance::CheckedHandle(zone, arguments.ArgAt(0));
120 const Instance& index = Instance::CheckedHandle(zone, arguments.ArgAt(1));
121 if (!
length.IsInteger()) {
125 args.SetAt(1, Symbols::Length());
129 if (!index.IsInteger()) {
132 args.SetAt(0, index);
133 args.SetAt(1, Symbols::Index());
139 args.SetAt(0, index);
143 zone, Integer::Cast(
length).ArithmeticOp(
145 args.SetAt(3, Symbols::Length());
150 int64_t unboxed_length = thread->unboxed_int64_runtime_arg();
151 int64_t unboxed_index = thread->unboxed_int64_runtime_second_arg();
156 args.SetAt(0, index);
160 zone, Integer::Cast(
length).ArithmeticOp(
162 args.SetAt(3, Symbols::Length());
167 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(0));
168 const Smi& kind = Smi::CheckedHandle(zone, arguments.ArgAt(1));
170 switch (kind.
Value()) {
177 "Cannot attach NativeFinalizer to deeply immutable object: %s",
188 bool is_param_name =
false) {
205 zone,
String::New(
"Null check operator used on a null value")));
222 args.SetAt(0, Object::null_object());
223 args.SetAt(1, selector);
224 args.SetAt(2, invocation_type);
225 args.SetAt(3, Object::smi_zero());
226 args.SetAt(4, Object::null_object());
227 args.SetAt(5, Object::null_object());
228 args.SetAt(6, Object::null_object());
241 const uword pc_offset = caller_frame->
pc() - code.PayloadStart();
243 if (FLAG_shared_slow_path_triggers_gc) {
252 Function::null_function());
257 member_name ^=
pool.ObjectAt(name_index);
259 member_name = Symbols::OptimizedOut().
ptr();
277 buffer.Printf(
"hit null error with cid %" Pd ", caller context: ",
cid);
279 const intptr_t kMaxSlotsCollected = 5;
280 const auto slots =
reinterpret_cast<ObjectPtr*
>(caller_frame->
sp());
281 const intptr_t num_slots_in_frame =
282 reinterpret_cast<ObjectPtr*
>(caller_frame->
fp()) - slots;
283 const auto num_slots_to_collect =
286 for (intptr_t i = 0; i < num_slots_to_collect; i++) {
288 buffer.Printf(
"%s[sp+%" Pd "] %" Pp "", comma ?
", " :
"", i,
289 static_cast<uword>(ptr));
290 if (ptr->IsHeapObject() &&
304 const Smi&
cid = Smi::CheckedHandle(zone, arguments.ArgAt(0));
319 const String& selector = String::CheckedHandle(zone, arguments.ArgAt(0));
332 const Instance&
value = Instance::CheckedHandle(zone, arguments.ArgAt(0));
338 int64_t unboxed_value = arguments.thread()->unboxed_int64_runtime_arg();
345 double val = arguments.thread()->unboxed_double_runtime_arg();
346 const Smi& recognized_kind = Smi::CheckedHandle(zone, arguments.ArgAt(0));
347 switch (recognized_kind.
Value()) {
348 case MethodRecognizer::kDoubleToInteger:
350 case MethodRecognizer::kDoubleFloorToInt:
353 case MethodRecognizer::kDoubleCeilToInt:
372 if (
UNLIKELY(FLAG_runtime_allocate_spill_tlab)) {
374 if ((
count++ % 10) == 0) {
387 const Instance&
length = Instance::CheckedHandle(zone, arguments.ArgAt(0));
388 if (!
length.IsInteger()) {
392 args.SetAt(1, Symbols::Length());
396 const int64_t len = Integer::Cast(
length).AsInt64Value();
410 TypeArguments::CheckedHandle(zone, arguments.ArgAt(1));
417 arguments.SetReturn(array);
422 if (FLAG_shared_slow_path_triggers_gc) {
431 const double val = thread->unboxed_double_runtime_arg();
438 const auto val = thread->unboxed_simd128_runtime_arg();
445 const auto val = thread->unboxed_simd128_runtime_arg();
452 if (FLAG_shared_slow_path_triggers_gc) {
461 if (FLAG_shared_slow_path_triggers_gc) {
470 if (FLAG_shared_slow_path_triggers_gc) {
479 if (FLAG_shared_slow_path_triggers_gc) {
492 const intptr_t
cid = Smi::CheckedHandle(zone, arguments.ArgAt(0)).Value();
493 const auto&
length = Instance::CheckedHandle(zone, arguments.ArgAt(1));
494 if (!
length.IsInteger()) {
499 const int64_t len = Integer::Cast(
length).AsInt64Value();
503 }
else if (len >
max) {
506 const auto& typed_data =
509 arguments.SetReturn(typed_data);
518 ASSERT(caller_frame !=
nullptr);
535 const Class& cls = Class::CheckedHandle(zone, arguments.ArgAt(0));
541 ASSERT(Instance::CheckedHandle(zone, arguments.ArgAt(1)).IsNull());
543 const auto& type_arguments =
544 TypeArguments::CheckedHandle(zone, arguments.ArgAt(1));
548 ASSERT(type_arguments.IsNull() ||
549 (type_arguments.IsInstantiated() &&
551 instance.SetTypeArguments(type_arguments);
558 EnsureRememberedAndMarkingDeferred,
573 bool add_to_remembered_set =
true;
574 if (object->IsNewObject()) {
575 add_to_remembered_set =
false;
576 }
else if (object->IsArray()) {
578 add_to_remembered_set =
580 }
else if (object->IsContext()) {
581 const intptr_t num_context_variables =
583 add_to_remembered_set =
585 num_context_variables);
588 if (add_to_remembered_set) {
589 object->untag()->EnsureInRememberedSet(thread);
599 return static_cast<uword>(object);
611 TypeArguments::CheckedHandle(zone, arguments.ArgAt(1));
613 TypeArguments::CheckedHandle(zone, arguments.ArgAt(2));
619 type =
type.InstantiateFrom(instantiator_type_arguments,
622 arguments.SetReturn(
type);
632 TypeArguments::CheckedHandle(zone, arguments.ArgAt(0));
634 TypeArguments::CheckedHandle(zone, arguments.ArgAt(1));
636 TypeArguments::CheckedHandle(zone, arguments.ArgAt(2));
646 instantiator_type_arguments, function_type_arguments);
648 arguments.SetReturn(type_arguments);
658 ASSERT(caller_frame !=
nullptr);
661 THR_Print(
"SubtypeCheck: '%s' %d %s '%s' %d (pc: %#" Px ").\n",
668 if (
function.HasSavedArgumentsDescriptor()) {
687 TypeArguments::CheckedHandle(zone, arguments.ArgAt(0));
689 TypeArguments::CheckedHandle(zone, arguments.ArgAt(1));
690 AbstractType& subtype = AbstractType::CheckedHandle(zone, arguments.ArgAt(2));
692 AbstractType::CheckedHandle(zone, arguments.ArgAt(3));
693 const String& dst_name = String::CheckedHandle(zone, arguments.ArgAt(4));
704 &subtype, &supertype, instantiator_type_args, function_type_args)) {
705 if (FLAG_trace_type_checks) {
711 if (FLAG_trace_type_checks) {
730 const auto&
function = Function::CheckedHandle(zone, arguments.ArgAt(0));
732 const auto& instantiator_type_args =
733 TypeArguments::CheckedHandle(zone, arguments.ArgAt(2));
734 const auto& delayed_type_args =
735 TypeArguments::CheckedHandle(zone, arguments.ArgAt(3));
737 zone,
Closure::New(instantiator_type_args, Object::null_type_arguments(),
738 delayed_type_args,
function, context,
740 arguments.SetReturn(closure);
748 const Smi& num_variables = Smi::CheckedHandle(zone, arguments.ArgAt(0));
751 arguments.SetReturn(context);
760 const Context& ctx = Context::CheckedHandle(zone, arguments.ArgAt(0));
767 cloned_ctx.
SetAt(i, inst);
769 arguments.SetReturn(cloned_ctx);
780 arguments.SetReturn(record);
790 const auto& value0 = Instance::CheckedHandle(zone, arguments.ArgAt(1));
791 const auto& value1 = Instance::CheckedHandle(zone, arguments.ArgAt(2));
792 const auto& value2 = Instance::CheckedHandle(zone, arguments.ArgAt(3));
795 const intptr_t num_fields = shape.
num_fields();
796 ASSERT(num_fields == 2 || num_fields == 3);
799 if (num_fields > 2) {
802 arguments.SetReturn(record);
811 const intptr_t frame_size =
812 Smi::CheckedHandle(zone, arguments.ArgAt(0)).Value();
815 if (previous_state.IsSuspendState()) {
816 const auto& suspend_state = SuspendState::Cast(previous_state);
817 const auto& function_data =
819 ObjectStore* object_store = thread->isolate_group()->object_store();
820 if (function_data.GetClassId() ==
821 Class::Handle(zone, object_store->async_star_stream_controller())
826 function_data.SetField(
829 object_store->async_star_stream_controller_async_star_body()),
830 Object::null_object());
834 if (function_data.GetClassId() ==
835 Class::Handle(zone, object_store->sync_star_iterator_class()).id()) {
837 function_data.SetField(
838 Field::Handle(zone, object_store->sync_star_iterator_state()),
845 arguments.SetReturn(
result);
854 SuspendState::CheckedHandle(zone, arguments.ArgAt(0));
857 arguments.SetReturn(dst);
871 ASSERT(caller_frame !=
nullptr);
878 if (
type.IsInstantiated()) {
882 type.NameCString(),
type.type_class_id(), caller_frame->
pc());
886 type.InstantiateFrom(instantiator_type_arguments,
888 THR_Print(
"%s: '%s' %s '%s' instantiated from '%s' (pc: %#" Px ").\n",
896 if (
function.HasSavedArgumentsDescriptor()) {
906#if defined(TARGET_ARCH_IA32)
907static BoolPtr CheckHashBasedSubtypeTestCache(
911 const AbstractType& destination_type,
912 const TypeArguments& instantiator_type_arguments,
913 const TypeArguments& function_type_arguments,
914 const SubtypeTestCache& cache) {
932 if (instance_class.IsClosureClass()) {
935 instance_class_id_or_signature =
function.signature();
936 instance_type_arguments =
closure.instantiator_type_arguments();
937 instance_parent_function_type_arguments =
closure.function_type_arguments();
938 instance_delayed_type_arguments =
closure.delayed_type_arguments();
940 instance_class_id_or_signature =
Smi::New(instance_class.id());
941 if (instance_class.NumTypeArguments() > 0) {
942 instance_type_arguments =
instance.GetTypeArguments();
948 if (
cache.HasCheck(instance_class_id_or_signature, destination_type,
949 instance_type_arguments, instantiator_type_arguments,
950 function_type_arguments,
951 instance_parent_function_type_arguments,
952 instance_delayed_type_arguments, &index, &
result)) {
989 if (FLAG_trace_type_checks) {
990 THR_Print(
"Not updating subtype test cache for the record instance.\n");
1008 const auto& closure = Closure::Cast(
instance);
1010 instance_class_id_or_signature =
function.signature();
1011 ASSERT(instance_class_id_or_signature.IsFunctionType());
1012 instance_type_arguments = closure.instantiator_type_arguments();
1013 instance_parent_function_type_arguments = closure.function_type_arguments();
1014 instance_delayed_type_arguments = closure.delayed_type_arguments();
1015 ASSERT(instance_class_id_or_signature.IsCanonical());
1016 ASSERT(instance_type_arguments.IsCanonical());
1017 ASSERT(instance_parent_function_type_arguments.IsCanonical());
1018 ASSERT(instance_delayed_type_arguments.IsCanonical());
1020 instance_class_id_or_signature =
Smi::New(instance_class.
id());
1022 instance_type_arguments =
instance.GetTypeArguments();
1023 ASSERT(instance_type_arguments.IsCanonical());
1026 if (FLAG_trace_type_checks) {
1027 const auto& instance_class_name =
1030 buffer.Printf(
" Updating test cache %#" Px " with result %s for:\n",
1037 buffer.Printf(
" class: %s (%" Pd ")\n", instance_class_name.ToCString(),
1038 instance_class.
id());
1040 " raw entry: [ %#" Px ", %#" Px ", %#" Px ", %#" Px ", %#" Px
1041 ", %#" Px ", %#" Px ", %#" Px " ]\n",
1042 static_cast<uword>(instance_class_id_or_signature.ptr()),
1043 static_cast<uword>(instance_type_arguments.ptr()),
1044 static_cast<uword>(instantiator_type_arguments.
ptr()),
1045 static_cast<uword>(function_type_arguments.
ptr()),
1046 static_cast<uword>(instance_parent_function_type_arguments.ptr()),
1047 static_cast<uword>(instance_delayed_type_arguments.ptr()),
1048 static_cast<uword>(destination_type.
ptr()),
1056 if (len >= FLAG_max_subtype_cache_entries) {
1057 if (FLAG_trace_type_checks) {
1058 THR_Print(
"Not updating subtype test cache as its length reached %d\n",
1059 FLAG_max_subtype_cache_entries);
1063 intptr_t colliding_index = -1;
1066 instance_class_id_or_signature, destination_type,
1067 instance_type_arguments, instantiator_type_arguments,
1068 function_type_arguments, instance_parent_function_type_arguments,
1069 instance_delayed_type_arguments, &colliding_index, &old_result)) {
1070 if (FLAG_trace_type_checks) {
1072 buffer.Printf(
" Collision for test cache %#" Px " at index %" Pd ":\n",
1073 static_cast<uword>(new_cache.
ptr()), colliding_index);
1074 buffer.Printf(
" entry: ");
1078 if (old_result.ptr() !=
result.ptr()) {
1079 FATAL(
"Existing subtype test cache entry has result %s, not %s",
1080 old_result.ToCString(),
result.ToCString());
1086 const intptr_t new_index = new_cache.
AddCheck(
1087 instance_class_id_or_signature, destination_type,
1088 instance_type_arguments, instantiator_type_arguments,
1089 function_type_arguments, instance_parent_function_type_arguments,
1090 instance_delayed_type_arguments,
result);
1091 if (FLAG_trace_type_checks) {
1093 buffer.Printf(
" Added new entry to test cache %#" Px " at index %" Pd
1095 static_cast<uword>(new_cache.
ptr()), new_index);
1096 buffer.Printf(
" new entry: ");
1113 const Instance&
instance = Instance::CheckedHandle(zone, arguments.ArgAt(0));
1115 AbstractType::CheckedHandle(zone, arguments.ArgAt(1));
1117 TypeArguments::CheckedHandle(zone, arguments.ArgAt(2));
1119 TypeArguments::CheckedHandle(zone, arguments.ArgAt(3));
1121 SubtypeTestCache::CheckedHandle(zone, arguments.ArgAt(4));
1125#if defined(TARGET_ARCH_IA32)
1127 if (cache.IsHash()) {
1129 zone, CheckHashBasedSubtypeTestCache(zone, thread,
instance,
type,
1130 instantiator_type_arguments,
1131 function_type_arguments, cache));
1134 arguments.SetReturn(
result);
1140 type, instantiator_type_arguments, function_type_arguments));
1141 if (FLAG_trace_type_checks) {
1143 function_type_arguments,
result);
1146 function_type_arguments,
result, cache);
1147 arguments.SetReturn(
result);
1153bool TESTING_runtime_entered_on_TTS_invocation =
false;
1169 Instance::CheckedHandle(zone, arguments.ArgAt(0));
1171 AbstractType::CheckedHandle(zone, arguments.ArgAt(1));
1173 TypeArguments::CheckedHandle(zone, arguments.ArgAt(2));
1175 TypeArguments::CheckedHandle(zone, arguments.ArgAt(3));
1177 dst_name ^= arguments.ArgAt(4);
1181 cache ^= arguments.ArgAt(5);
1182 ASSERT(cache.IsNull() || cache.IsSubtypeTestCache());
1185 Smi::CheckedHandle(zone, arguments.ArgAt(6)).Value());
1188 TESTING_runtime_entered_on_TTS_invocation =
true;
1191#if defined(TARGET_ARCH_IA32)
1194 if (cache.IsHash()) {
1196 zone, CheckHashBasedSubtypeTestCache(
1197 zone, thread, src_instance, dst_type,
1198 instantiator_type_arguments, function_type_arguments, cache));
1201 arguments.SetReturn(
result);
1211 dst_type, instantiator_type_arguments, function_type_arguments);
1213 if (FLAG_trace_type_checks) {
1215 instantiator_type_arguments, function_type_arguments,
1222 auto resolve_dst_name = [&]() {
1223 if (!dst_name.
IsNull())
return;
1224#if !defined(TARGET_ARCH_IA32)
1233 const Code& caller_code =
1239 const intptr_t dst_name_idx = stc_pool_idx + 1;
1240 dst_name ^=
pool.ObjectAt(dst_name_idx);
1246 if (!is_instance_of) {
1248 if (dst_name.
ptr() ==
1249 Symbols::dynamic_assert_assignable_stc_check().ptr()) {
1250#if !defined(TARGET_ARCH_IA32)
1261 const auto& dispatcher =
1263 ASSERT(dispatcher.IsInvokeFieldDispatcher());
1264 const auto& orig_arguments_desc =
1270 for (intptr_t i = 0; i < arg_count; i++) {
1273 orig_arguments.SetAt(i, obj);
1275 const auto& receiver = Closure::CheckedHandle(
1279 zone,
function.DoArgumentTypesMatch(orig_arguments, args_desc));
1291 const auto& src_type =
1294 if (!reported_type.IsInstantiated()) {
1296 reported_type = reported_type.InstantiateFrom(instantiator_type_arguments,
1297 function_type_arguments,
1305 bool should_update_cache =
true;
1306#if !defined(TARGET_ARCH_IA32)
1307 bool would_update_cache_if_not_lazy =
false;
1308#if !defined(DART_PRECOMPILED_RUNTIME)
1313 if (tts_type.IsTypeParameter()) {
1314 const auto& param = TypeParameter::Cast(tts_type);
1315 tts_type = param.GetFromTypeArguments(instantiator_type_arguments,
1316 function_type_arguments);
1318 ASSERT(!tts_type.IsTypeParameter());
1321 if (FLAG_trace_type_checks) {
1322 THR_Print(
" Specializing type testing stub for %s\n",
1323 tts_type.ToCString());
1327 tts_type.SetTypeTestingStub(code);
1331 would_update_cache_if_not_lazy =
1332 (!src_instance.
IsNull() &&
1333 tts_type.type_test_stub() ==
1334 StubCode::DefaultNullableTypeTest().ptr()) ||
1335 tts_type.type_test_stub() == StubCode::DefaultTypeTest().ptr();
1336 should_update_cache = would_update_cache_if_not_lazy && cache.IsNull();
1342 (tts_type.type_test_stub() != StubCode::DefaultNullableTypeTest().ptr() &&
1343 tts_type.type_test_stub() != StubCode::DefaultTypeTest().ptr())) {
1347 if (FLAG_trace_type_checks) {
1348 THR_Print(
" Rebuilding type testing stub for %s\n",
1349 tts_type.ToCString());
1351 const auto& old_code =
Code::Handle(zone, tts_type.type_test_stub());
1354 ASSERT(old_code.ptr() != new_code.ptr());
1356 ASSERT(new_code.ptr() != StubCode::DefaultNullableTypeTest().ptr() &&
1357 new_code.ptr() != StubCode::DefaultTypeTest().ptr());
1358 const auto& old_instructions =
1360 const auto& new_instructions =
1368 should_update_cache = old_instructions.Equals(new_instructions);
1369 if (FLAG_trace_type_checks) {
1370 THR_Print(
" %s rebuilt type testing stub for %s\n",
1371 should_update_cache ?
"Discarding" :
"Installing",
1372 tts_type.ToCString());
1374 if (!should_update_cache) {
1375 tts_type.SetTypeTestingStub(new_code);
1381 if (should_update_cache) {
1382 if (cache.IsNull()) {
1383#if !defined(TARGET_ARCH_IA32)
1386 would_update_cache_if_not_lazy));
1392 const Code& caller_code =
1402 cache ^=
pool.ObjectAt<std::memory_order_acquire>(stc_pool_idx);
1403 if (cache.IsNull()) {
1407 const intptr_t num_inputs =
1409 Symbols::dynamic_assert_assignable_stc_check().ptr()
1413 pool.SetObjectAt<std::memory_order_release>(stc_pool_idx, cache);
1414 if (FLAG_trace_type_checks) {
1415 THR_Print(
" Installed new subtype test cache %#" Px " with %" Pd
1416 " inputs at index %" Pd " of pool for %s\n",
1417 static_cast<uword>(cache.ptr()), num_inputs, stc_pool_idx,
1428 instantiator_type_arguments, function_type_arguments,
1432 arguments.SetReturn(src_instance);
1443 Instance::CheckedHandle(zone, arguments.ArgAt(0));
1445 if (src_instance.
IsNull()) {
1451 "Failed assertion: boolean expression must not be null")));
1455 args.SetAt(2, Object::smi_zero());
1456 args.SetAt(3, Object::smi_zero());
1463 ASSERT(!src_instance.IsBool());
1468 Symbols::BooleanExpression());
1473 const Instance& exception = Instance::CheckedHandle(zone, arguments.ArgAt(0));
1478 const Instance& exception = Instance::CheckedHandle(zone, arguments.ArgAt(0));
1480 Instance::CheckedHandle(zone, arguments.ArgAt(1));
1481 const Smi& bypass_debugger = Smi::CheckedHandle(zone, arguments.ArgAt(2));
1483 bypass_debugger.
Value() != 0);
1489#if !defined(DART_PRECOMPILED_RUNTIME)
1493 ASSERT(caller_frame !=
nullptr);
1502 if (target_code.
ptr() !=
1505 if (target_code.
ptr() !=
1510 if (FLAG_trace_patching) {
1511 THR_Print(
"PatchStaticCall: patching caller pc %#" Px
1513 " to '%s' new entry point %#" Px " (%s)\n",
1516 target_code.
is_optimized() ?
"optimized" :
"unoptimized");
1520 arguments.SetReturn(target_code);
1526#if defined(PRODUCT) || defined(DART_PRECOMPILED_RUNTIME)
1538 ASSERT(caller_frame !=
nullptr);
1541 isolate->group()->debugger()->GetPatchedStubAddress(caller_frame->
pc());
1543 Error::Handle(zone, isolate->debugger()->PauseBreakpoint());
1545 arguments.SetReturn(orig_stub);
1550#if defined(PRODUCT) || defined(DART_PRECOMPILED_RUNTIME)
1564 const String& target_name,
1566 const Array& arguments_descriptor,
1569 const int kTypeArgsLen = 0;
1570 const int kNumArguments = 1;
1575 receiver_class, getter_name, args_desc));
1584 target_name, arguments_descriptor,
1585 UntaggedFunction::kInvokeFieldDispatcher, FLAG_lazy_dispatchers));
1586 ASSERT(!target_function.
IsNull() || !FLAG_lazy_dispatchers);
1587 if (FLAG_trace_ic) {
1589 "InvokeField IC miss: adding <%s> id:%" Pd " -> <%s>\n",
1599 const Array& args_descriptor,
1600 const String& target_name) {
1604 const String* demangled = &target_name;
1613 args_descriptor, &
result)) {
1617 *demangled, args_descriptor,
1618 UntaggedFunction::kNoSuchMethodDispatcher, FLAG_lazy_dispatchers));
1619 if (FLAG_trace_ic) {
1621 "NoSuchMethod IC miss: adding <%s> id:%" Pd " -> <%s>\n",
1633#if !defined(DART_PRECOMPILED_RUNTIME)
1636 const Code& caller_code,
1640 auto zone = thread->
zone();
1650#if !defined(PRODUCT)
1666#if !defined(PRODUCT)
1678 if (FLAG_unopt_monomorphic_calls && (num_checks == 1)) {
1690 if (FLAG_trace_ic) {
1692 " switching to monomorphic dispatch, %s\n",
1699 if (FLAG_unopt_megamorphic_calls &&
1700 (num_checks > FLAG_max_polymorphic_checks)) {
1702 const Array& descriptor =
1708 StubCode::MegamorphicCall());
1709 if (FLAG_trace_ic) {
1711 " switching to megamorphic dispatch, %s\n",
1724 Object::null_type_arguments());
1728 :
store->simple_instance_of_false_function());
1737 const Class& receiver_class,
1739 const Array& descriptor) {
1748 if (caller_arguments.
length() == 2 &&
1751 ->simple_instance_of_function()) {
1758 if (target_function.IsNull()) {
1761 if (target_function.IsNull()) {
1762 ASSERT(!FLAG_lazy_dispatchers);
1765 return target_function.ptr();
1773 const Instance& arg = Instance::CheckedHandle(zone, arguments.ArgAt(0));
1774 const ICData& ic_data = ICData::CheckedHandle(zone, arguments.ArgAt(1));
1781 if (FLAG_trace_ic) {
1785 ASSERT(caller_frame !=
nullptr);
1789 arguments.SetReturn(
target);
1798 const Instance& arg0 = Instance::CheckedHandle(zone, arguments.ArgAt(0));
1799 const Instance& arg1 = Instance::CheckedHandle(zone, arguments.ArgAt(1));
1800 const ICData& ic_data = ICData::CheckedHandle(zone, arguments.ArgAt(2));
1809 if (FLAG_trace_ic) {
1813 ASSERT(caller_frame !=
nullptr);
1816 caller_frame->
pc(),
target.ToCString(), cids[0], cids[1]);
1818 arguments.SetReturn(
target);
1821#if defined(DART_PRECOMPILED_RUNTIME)
1823static bool IsSingleTarget(IsolateGroup* isolate_group,
1828 const String&
name) {
1830 ClassTable*
table = isolate_group->class_table();
1832 for (intptr_t
cid = lower_cid;
cid <= upper_cid;
cid++) {
1833 if (!
table->HasValidClassAt(
cid))
continue;
1835 if (cls.is_abstract())
continue;
1836 if (!cls.is_allocated())
continue;
1839 if (other_target.ptr() !=
target.ptr()) {
1846class SavedUnlinkedCallMapKeyEqualsTraits :
public AllStatic {
1848 static const char*
Name() {
return "SavedUnlinkedCallMapKeyEqualsTraits "; }
1849 static bool ReportStats() {
return false; }
1851 static bool IsMatch(
const Object& key1,
const Object& key2) {
1852 if (!key1.IsInteger() || !key2.IsInteger())
return false;
1853 return Integer::Cast(key1).Equals(Integer::Cast(key2));
1855 static uword Hash(
const Object&
key) {
1856 return Integer::Cast(
key).CanonicalizeHash();
1860using UnlinkedCallMap = UnorderedHashMap<SavedUnlinkedCallMapKeyEqualsTraits>;
1862static void SaveUnlinkedCall(Zone* zone,
1865 const UnlinkedCall& unlinked_call) {
1866 IsolateGroup* isolate_group = isolate->group();
1868 SafepointMutexLocker ml(isolate_group->unlinked_call_map_mutex());
1869 if (isolate_group->saved_unlinked_calls() ==
Array::null()) {
1870 const auto& initial_map =
1872 isolate_group->set_saved_unlinked_calls(initial_map);
1875 UnlinkedCallMap unlinked_call_map(zone,
1876 isolate_group->saved_unlinked_calls());
1882 unlinked_call_map.InsertOrGetValue(pc, unlinked_call)));
1884 isolate_group->set_saved_unlinked_calls(unlinked_call_map.Release());
1887static UnlinkedCallPtr LoadUnlinkedCall(Zone* zone,
1890 IsolateGroup* isolate_group = isolate->group();
1892 SafepointMutexLocker ml(isolate_group->unlinked_call_map_mutex());
1894 UnlinkedCallMap unlinked_call_map(zone,
1895 isolate_group->saved_unlinked_calls());
1898 const auto& unlinked_call = UnlinkedCall::Cast(
1900 isolate_group->set_saved_unlinked_calls(unlinked_call_map.Release());
1901 return unlinked_call.ptr();
1955 const Code& caller_code,
1957 : isolate_(thread->isolate()),
1959 zone_(thread->zone()),
1960 caller_arguments_(caller_arguments),
1961 miss_handler_(miss_handler),
1962 arguments_(arguments),
1963 caller_frame_(caller_frame),
1964 caller_code_(caller_code),
1965 caller_function_(caller_function),
1967 args_descriptor_(
Array::Handle()) {
1969 ASSERT(caller_arguments_.length() == 1 || !FLAG_precompiled_mode);
1975 FunctionPtr ResolveTargetFunction(
const Object&
data);
1977#if defined(DART_PRECOMPILED_RUNTIME)
1978 void HandleMissAOT(
const Object& old_data,
1984 void DoMonomorphicMissAOT(
const Object& old_data,
1989 bool CanExtendSingleTargetRange(
const String&
name,
1995 void HandleMissJIT(
const Object& old_data,
1996 const Code& old_target,
1999 void DoMonomorphicMissJIT(
const Object& old_data,
2008 void UpdateICDataWithTarget(
const ICData& ic_data,
2010 void TrySwitch(
const ICData& ic_data,
const Function& target_function);
2014 void ReturnJITorAOT(
const Code& stub,
2018 const Instance& receiver() {
return *caller_arguments_[0]; }
2020 bool should_consider_patching() {
2022 if (FLAG_precompiled_mode)
return true;
2035 ICDataPtr NewICData();
2036 ICDataPtr NewICDataWithTarget(intptr_t
cid,
const Function&
target);
2041 const GrowableArray<const Instance*>& caller_arguments_;
2043 NativeArguments arguments_;
2044 StackFrame* caller_frame_;
2045 const Code& caller_code_;
2046 const Function& caller_function_;
2050 Array& args_descriptor_;
2051 bool is_monomorphic_hit_ =
false;
2054#if defined(DART_PRECOMPILED_RUNTIME)
2055void PatchableCallHandler::DoUnlinkedCallAOT(
const UnlinkedCall& unlinked,
2056 const Function& target_function) {
2059 target_function.IsNull()
2061 : NewICDataWithTarget(receiver().GetClassId(), target_function));
2073 if (!target_function.IsNull() &&
2074 !target_function.PrologueNeedsArgumentsDescriptor()) {
2076 ASSERT(target_function.HasCode());
2077 const Code& target_code =
2079 const Smi& expected_cid =
2082 if (unlinked.can_patch_to_monomorphic()) {
2083 object = expected_cid.ptr();
2084 code = target_code.ptr();
2088 code = StubCode::MonomorphicSmiableCheck().ptr();
2096 ReturnAOT(StubCode::ICCallThroughCode(), ic_data);
2099bool PatchableCallHandler::CanExtendSingleTargetRange(
2101 const Function& old_target,
2102 const Function& target_function,
2105 if (old_target.ptr() != target_function.ptr()) {
2108 intptr_t unchecked_lower, unchecked_upper;
2109 if (receiver().GetClassId() < *lower) {
2111 unchecked_upper = *
lower - 1;
2115 unchecked_lower = *upper + 1;
2119 return IsSingleTarget(isolate_->
group(), zone_, unchecked_lower,
2120 unchecked_upper, target_function,
name);
2124#if defined(DART_PRECOMPILED_RUNTIME)
2125void PatchableCallHandler::DoMonomorphicMissAOT(
2126 const Object& old_data,
2127 const Function& target_function) {
2129 if (old_data.IsSmi()) {
2130 old_expected_cid = Smi::Cast(old_data).Value();
2133 old_expected_cid = MonomorphicSmiableCall::Cast(old_data).expected_cid();
2135 const bool is_monomorphic_hit = old_expected_cid == receiver().
GetClassId();
2139 zone_,
Resolve(thread_, zone_, caller_arguments_, old_receiver_class,
2140 name_, args_descriptor_));
2143 zone_, old_target.IsNull()
2145 : NewICDataWithTarget(old_expected_cid, old_target));
2147 if (is_monomorphic_hit) {
2150 ReturnAOT(StubCode::ICCallThroughCode(), ic_data);
2154 intptr_t
lower = old_expected_cid;
2155 intptr_t upper = old_expected_cid;
2156 if (CanExtendSingleTargetRange(name_, old_target, target_function, &lower,
2158 const SingleTargetCache&
cache =
2161 cache.set_target(code);
2162 cache.set_entry_point(
code.EntryPoint());
2163 cache.set_lower_limit(lower);
2164 cache.set_upper_limit(upper);
2165 const Code& stub = StubCode::SingleTargetCall();
2170 ReturnAOT(StubCode::ICCallThroughCode(), ic_data);
2175 const Code& stub = StubCode::ICCallThroughCode();
2181 ReturnAOT(stub, ic_data);
2185#if !defined(DART_PRECOMPILED_RUNTIME)
2186void PatchableCallHandler::DoMonomorphicMissJIT(
2187 const Object& old_data,
2188 const Function& target_function) {
2190 const auto& old_ic_data_entries = Array::Cast(old_data);
2192 const auto& ic_data =
2196 if (ic_data.NumberOfChecksIs(1) &&
2197 (ic_data.GetReceiverClassIdAt(0) == receiver().GetClassId())) {
2200 if (FLAG_trace_ic) {
2202 " updating code (old code was disabled)\n",
2203 caller_frame_->
pc());
2212 ReturnJIT(code,
data, target_function);
2216 ASSERT(ic_data.NumArgsTested() == 1);
2217 const Code& stub = ic_data.is_tracking_exactness()
2218 ? StubCode::OneArgCheckInlineCacheWithExactnessCheck()
2219 : StubCode::OneArgCheckInlineCache();
2220 if (FLAG_trace_ic) {
2222 " switching monomorphic to polymorphic dispatch, %s\n",
2223 caller_frame_->
pc(), ic_data.ToCString());
2228 ASSERT(caller_arguments_.length() == 1);
2229 UpdateICDataWithTarget(ic_data, target_function);
2230 ASSERT(should_consider_patching());
2232 ic_data, target_function);
2233 ReturnJIT(stub, ic_data, target_function);
2237#if defined(DART_PRECOMPILED_RUNTIME)
2238void PatchableCallHandler::DoSingleTargetMissAOT(
2239 const SingleTargetCache&
data,
2240 const Function& target_function) {
2242 const Function& old_target =
2248 target_function.IsNull()
2250 : NewICDataWithTarget(receiver().GetClassId(), target_function));
2253 intptr_t upper =
data.upper_limit();
2254 if (CanExtendSingleTargetRange(name_, old_target, target_function, &lower,
2256 data.set_lower_limit(lower);
2257 data.set_upper_limit(upper);
2260 ReturnAOT(StubCode::ICCallThroughCode(), ic_data);
2265 const Code& stub = StubCode::ICCallThroughCode();
2271 ReturnAOT(stub, ic_data);
2275#if defined(DART_PRECOMPILED_RUNTIME)
2276void PatchableCallHandler::DoICDataMissAOT(
const ICData& ic_data,
2277 const Function& target_function) {
2279 const Class& cls =
Class::Handle(zone_, receiver().clazz());
2281 const Array& descriptor =
2282 Array::CheckedHandle(zone_, ic_data.arguments_descriptor());
2283 ArgumentsDescriptor args_desc(descriptor);
2284 if (FLAG_trace_ic || FLAG_trace_ic_miss_in_optimized) {
2286 cls.ToCString(), args_desc.TypeArgsLen(),
name.ToCString());
2289 if (target_function.IsNull()) {
2290 ReturnAOT(StubCode::NoSuchMethodDispatcher(), ic_data);
2294 const intptr_t number_of_checks = ic_data.NumberOfChecks();
2296 if ((number_of_checks == 0) &&
2297 (!FLAG_precompiled_mode || ic_data.receiver_cannot_be_smi()) &&
2298 !target_function.PrologueNeedsArgumentsDescriptor()) {
2305 const Code& target_code =
2307 const Smi& expected_cid =
2309 ASSERT(target_code.HasMonomorphicEntry());
2311 expected_cid, target_code);
2312 ReturnAOT(target_code, expected_cid);
2314 ic_data.EnsureHasReceiverCheck(receiver().GetClassId(), target_function);
2315 if (number_of_checks > FLAG_max_polymorphic_checks) {
2319 const Code& stub = StubCode::MegamorphicCall();
2323 ReturnAOT(stub, cache);
2325 ReturnAOT(StubCode::ICCallThroughCode(), ic_data);
2331#if !defined(DART_PRECOMPILED_RUNTIME)
2332void PatchableCallHandler::DoICDataMissJIT(
const ICData& ic_data,
2333 const Object& old_code,
2334 const Function& target_function) {
2335 ASSERT(ic_data.NumArgsTested() == caller_arguments_.length());
2337 if (ic_data.NumArgsTested() == 1) {
2338 ASSERT(old_code.ptr() == StubCode::OneArgCheckInlineCache().ptr() ||
2340 StubCode::OneArgCheckInlineCacheWithExactnessCheck().ptr() ||
2342 StubCode::OneArgOptimizedCheckInlineCache().ptr() ||
2344 StubCode::OneArgOptimizedCheckInlineCacheWithExactnessCheck()
2346 old_code.ptr() == StubCode::ICCallBreakpoint().ptr() ||
2347 (old_code.IsNull() && !should_consider_patching()));
2348 UpdateICDataWithTarget(ic_data, target_function);
2349 if (should_consider_patching()) {
2351 caller_function_, ic_data, target_function);
2354 zone_, ic_data.is_tracking_exactness()
2355 ? StubCode::OneArgCheckInlineCacheWithExactnessCheck().ptr()
2356 : StubCode::OneArgCheckInlineCache().ptr());
2357 ReturnJIT(stub, ic_data, target_function);
2359 ASSERT(old_code.ptr() == StubCode::TwoArgsCheckInlineCache().ptr() ||
2360 old_code.ptr() == StubCode::SmiAddInlineCache().ptr() ||
2361 old_code.ptr() == StubCode::SmiLessInlineCache().ptr() ||
2362 old_code.ptr() == StubCode::SmiEqualInlineCache().ptr() ||
2364 StubCode::TwoArgsOptimizedCheckInlineCache().ptr() ||
2365 old_code.ptr() == StubCode::ICCallBreakpoint().ptr() ||
2366 (old_code.IsNull() && !should_consider_patching()));
2367 UpdateICDataWithTarget(ic_data, target_function);
2368 ReturnJIT(StubCode::TwoArgsCheckInlineCache(), ic_data, target_function);
2373void PatchableCallHandler::DoMegamorphicMiss(
const MegamorphicCache&
data,
2374 const Function& target_function) {
2376 const Class& cls =
Class::Handle(zone_, receiver().clazz());
2378 const Array& descriptor =
2379 Array::CheckedHandle(zone_,
data.arguments_descriptor());
2380 ArgumentsDescriptor args_desc(descriptor);
2381 if (FLAG_trace_ic || FLAG_trace_ic_miss_in_optimized) {
2382 OS::PrintErr(
"Megamorphic miss, class=%s, function<%" Pd ">=%s\n",
2383 cls.ToCString(), args_desc.TypeArgsLen(),
name.ToCString());
2385 if (target_function.IsNull()) {
2386 ReturnJITorAOT(StubCode::NoSuchMethodDispatcher(),
data, target_function);
2392 data.EnsureContains(class_id, target_function);
2393 ReturnJITorAOT(StubCode::MegamorphicCall(),
data, target_function);
2396void PatchableCallHandler::UpdateICDataWithTarget(
2397 const ICData& ic_data,
2398 const Function& target_function) {
2399 if (target_function.IsNull())
return;
2405 const bool call_target_directly =
2407 const intptr_t invocation_count = call_target_directly ? 1 : 0;
2409 if (caller_arguments_.length() == 1) {
2411#if !defined(DART_PRECOMPILED_RUNTIME)
2412 if (ic_data.is_tracking_exactness()) {
2413 exactness = receiver().
IsNull()
2415 : StaticTypeExactnessState::Compute(
2416 Type::Cast(AbstractType::Handle(
2417 ic_data.receivers_static_type())),
2421 ic_data.EnsureHasReceiverCheck(receiver().GetClassId(), target_function,
2422 invocation_count, exactness);
2424 GrowableArray<intptr_t> class_ids(caller_arguments_.length());
2425 ASSERT(ic_data.NumArgsTested() == caller_arguments_.length());
2426 for (intptr_t i = 0; i < caller_arguments_.length(); i++) {
2427 class_ids.Add(caller_arguments_[i]->GetClassId());
2429 ic_data.EnsureHasCheck(class_ids, target_function, invocation_count);
2433void PatchableCallHandler::ReturnAOT(
const Code& stub,
const Object&
data) {
2439void PatchableCallHandler::ReturnJIT(
const Code& stub,
2441 const Function&
target) {
2444 switch (miss_handler_) {
2462void PatchableCallHandler::ReturnJITorAOT(
const Code& stub,
2464 const Function&
target) {
2465#if defined(DART_PRECOMPILED_MODE)
2466 ReturnAOT(stub,
data);
2472ICDataPtr PatchableCallHandler::NewICData() {
2473 return ICData::New(caller_function_, name_, args_descriptor_,
DeoptId::kNone,
2474 1, ICData::kInstance);
2477ICDataPtr PatchableCallHandler::NewICDataWithTarget(intptr_t
cid,
2478 const Function&
target) {
2479 GrowableArray<intptr_t> cids(1);
2483 ICData::kInstance, &cids,
target);
2486FunctionPtr PatchableCallHandler::ResolveTargetFunction(
const Object&
data) {
2487 switch (
data.GetClassId()) {
2488 case kUnlinkedCallCid: {
2489 const auto& unlinked_call = UnlinkedCall::Cast(
data);
2491#if defined(DART_PRECOMPILED_RUNTIME)
2504 SaveUnlinkedCall(zone_, isolate_, caller_frame_->
pc(), unlinked_call);
2507 name_ = unlinked_call.target_name();
2508 args_descriptor_ = unlinked_call.arguments_descriptor();
2511 case kMonomorphicSmiableCallCid:
2513#if defined(DART_PRECOMPILED_RUNTIME)
2516 case kSingleTargetCacheCid: {
2518 zone_, LoadUnlinkedCall(zone_, isolate_, caller_frame_->
pc()));
2519 name_ = unlinked_call.target_name();
2520 args_descriptor_ = unlinked_call.arguments_descriptor();
2526 const auto& ic_data_entries = Array::Cast(
data);
2528 const auto& ic_data =
2530 args_descriptor_ = ic_data.arguments_descriptor();
2531 name_ = ic_data.target_name();
2537 case kMegamorphicCacheCid: {
2538 const CallSiteData& call_site_data = CallSiteData::Cast(
data);
2539 name_ = call_site_data.target_name();
2540 args_descriptor_ = call_site_data.arguments_descriptor();
2546 const Class& cls =
Class::Handle(zone_, receiver().clazz());
2547 return Resolve(thread_, zone_, caller_arguments_, cls, name_,
2554 const auto& target_function =
2567#if defined(DART_PRECOMPILED_RUNTIME)
2570 uword target_entry = 0;
2572 caller_frame_->
pc(), caller_code_));
2573 HandleMissAOT(
data, target_entry, target_function);
2576 if (should_consider_patching()) {
2580 ASSERT(old_data.IsICData() || old_data.IsMegamorphicCache());
2583 HandleMissJIT(
data, code, target_function);
2587#if defined(DART_PRECOMPILED_RUNTIME)
2589void PatchableCallHandler::HandleMissAOT(
const Object& old_data,
2593 case kUnlinkedCallCid:
2595 StubCode::SwitchableCallMiss().MonomorphicEntryPoint());
2596 DoUnlinkedCallAOT(UnlinkedCall::Cast(old_data), target_function);
2598 case kMonomorphicSmiableCallCid:
2600 StubCode::MonomorphicSmiableCheck().MonomorphicEntryPoint());
2603 DoMonomorphicMissAOT(old_data, target_function);
2605 case kSingleTargetCacheCid:
2606 ASSERT(old_entry == StubCode::SingleTargetCall().MonomorphicEntryPoint());
2607 DoSingleTargetMissAOT(SingleTargetCache::Cast(old_data), target_function);
2611 StubCode::ICCallThroughCode().MonomorphicEntryPoint());
2612 DoICDataMissAOT(ICData::Cast(old_data), target_function);
2614 case kMegamorphicCacheCid:
2615 ASSERT(old_entry == StubCode::MegamorphicCall().MonomorphicEntryPoint());
2616 DoMegamorphicMiss(MegamorphicCache::Cast(old_data), target_function);
2625void PatchableCallHandler::HandleMissJIT(
const Object& old_data,
2626 const Code& old_code,
2627 const Function& target_function) {
2628 switch (old_data.GetClassId()) {
2632 DoMonomorphicMissJIT(old_data, target_function);
2635 DoICDataMissJIT(ICData::Cast(old_data), old_code, target_function);
2637 case kMegamorphicCacheCid:
2638 ASSERT(old_code.ptr() == StubCode::MegamorphicCall().ptr() ||
2639 (old_code.IsNull() && !should_consider_patching()));
2640 DoMegamorphicMiss(MegamorphicCache::Cast(old_data), target_function);
2653#if !defined(DART_PRECOMPILED_RUNTIME)
2658 const auto& caller_function =
2662 native_arguments, caller_frame, caller_code,
2677 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(0));
2678 const ICData& ic_data = ICData::CheckedHandle(zone, arguments.ArgAt(1));
2681 args.Add(&receiver);
2692 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(0));
2693 const Instance& other = Instance::CheckedHandle(zone, arguments.ArgAt(1));
2694 const ICData& ic_data = ICData::CheckedHandle(zone, arguments.ArgAt(2));
2697 args.Add(&receiver);
2707 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(1));
2726#if defined(DART_PRECOMPILED_RUNTIME)
2734 caller_arguments.
Add(&receiver);
2737 caller_frame, caller_code, caller_function);
2748 const String& target_name,
2749 const Array& orig_arguments,
2750 const Array& orig_arguments_desc) {
2751 ASSERT(!FLAG_lazy_dispatchers);
2752 const bool is_dynamic_call =
2755 if (is_dynamic_call) {
2756 demangled_target_name =
2783#if !defined(DART_PRECOMPILED_RUNTIME)
2785 function.GetMethodExtractor(demangled_target_name))));
2796 if (receiver.IsRecord()) {
2797 const Record& record = Record::Cast(receiver);
2798 const intptr_t field_index =
2800 if (field_index >= 0) {
2801 return record.
FieldAt(field_index);
2811 if ((target_name.
ptr() == Symbols::call().ptr()) && receiver.IsClosure()) {
2816 orig_arguments_desc);
2821 const auto& getter_name =
2824 zone, is_dynamic_call
2826 : getter_name.ptr());
2838 if (is_dynamic_call) {
2848 if (is_dynamic_call) {
2856 getter_arguments.
SetAt(0, receiver);
2859 if (getter_result.IsError()) {
2860 return getter_result.
ptr();
2862 ASSERT(getter_result.
IsNull() || getter_result.IsInstance());
2864 orig_arguments.
SetAt(args_desc.FirstArgIndex(), getter_result);
2866 orig_arguments_desc);
2871 if (receiver.IsRecord()) {
2872 const Record& record = Record::Cast(receiver);
2873 const intptr_t field_index =
2875 if (field_index >= 0) {
2876 const Object& getter_result =
2878 ASSERT(getter_result.
IsNull() || getter_result.IsInstance());
2879 orig_arguments.
SetAt(args_desc.FirstArgIndex(), getter_result);
2881 orig_arguments_desc);
2889 orig_arguments, orig_arguments_desc));
2899 ASSERT(!FLAG_lazy_dispatchers);
2900 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(0));
2902 const Array& orig_arguments_desc =
2903 Array::CheckedHandle(zone, arguments.ArgAt(2));
2904 const Array& orig_arguments = Array::CheckedHandle(zone, arguments.ArgAt(3));
2906 if (ic_data_or_cache.IsICData()) {
2907 target_name = ICData::Cast(ic_data_or_cache).target_name();
2909 ASSERT(ic_data_or_cache.IsMegamorphicCache());
2910 target_name = MegamorphicCache::Cast(ic_data_or_cache).target_name();
2915 thread, zone, receiver, target_name,
2916 orig_arguments, orig_arguments_desc));
2918 arguments.SetReturn(
result);
2927 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(0));
2928 const Function&
function = Function::CheckedHandle(zone, arguments.ArgAt(1));
2929 const Array& orig_arguments_desc =
2930 Array::CheckedHandle(zone, arguments.ArgAt(2));
2931 const Array& orig_arguments = Array::CheckedHandle(zone, arguments.ArgAt(3));
2934 if ((
function.kind() == UntaggedFunction::kClosureFunction) ||
2935 (
function.kind() == UntaggedFunction::kImplicitClosureFunction)) {
2939 orig_function_name =
function.QualifiedUserVisibleName();
2941 orig_function_name =
function.name();
2946 orig_arguments, orig_arguments_desc));
2948 arguments.SetReturn(
result);
2951#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
2958 auto isolate = thread->
isolate();
2961 if (FLAG_shared_slow_path_triggers_gc) {
2965 bool do_deopt =
false;
2966 bool do_stacktrace =
false;
2967 bool do_reload =
false;
2969 const intptr_t isolate_reload_every =
2970 isolate->group()->reload_every_n_stack_overflow_checks();
2971 if ((FLAG_deoptimize_every > 0) || (FLAG_stacktrace_every > 0) ||
2972 (FLAG_gc_every > 0) || (isolate_reload_every > 0)) {
2978 if (FLAG_deoptimize_every > 0 && (
count % FLAG_deoptimize_every) == 0) {
2981 if (FLAG_stacktrace_every > 0 && (
count % FLAG_stacktrace_every) == 0) {
2982 do_stacktrace =
true;
2984 if (FLAG_gc_every > 0 && (
count % FLAG_gc_every) == 0) {
2987 if ((isolate_reload_every > 0) && (
count % isolate_reload_every) == 0) {
2988 do_reload = isolate->group()->CanReload();
2992 if ((FLAG_deoptimize_filter !=
nullptr) ||
2993 (FLAG_stacktrace_filter !=
nullptr) || (FLAG_reload_every != 0)) {
3000 code =
frame->LookupDartCode();
3005 if ((FLAG_deoptimize_filter !=
nullptr) ||
3006 (FLAG_stacktrace_filter !=
nullptr)) {
3010 if (!code.IsNull()) {
3011 if (!code.is_optimized() && FLAG_reload_every_optimized) {
3015 if (code.is_optimized() && FLAG_deoptimize_filter !=
nullptr &&
3019 function.ToFullyQualifiedCString());
3023 if (FLAG_stacktrace_filter !=
nullptr &&
3026 function.ToFullyQualifiedCString());
3027 do_stacktrace =
true;
3036 isolate_group->MaybeIncreaseReloadEveryNStackOverflowChecks();
3039 const char* script_uri = isolate_group->source()->script_uri;
3041 const bool success =
3042 isolate_group->ReloadSources(&js,
true, script_uri);
3044 FATAL(
"*** Isolate reload failed:\n%s\n", js.ToCString());
3047 if (do_stacktrace) {
3051 intptr_t num_frames = stack->
Length();
3052 for (intptr_t i = 0; i < num_frames; i++) {
3056#if !defined(DART_PRECOMPILED_RUNTIME)
3057 if (!
frame->function().ForceOptimize()) {
3060 num_vars =
frame->NumLocalVariables();
3064 for (intptr_t v = 0; v < num_vars; v++) {
3068 if (FLAG_stress_async_stacks) {
3078#if !defined(DART_PRECOMPILED_RUNTIME)
3081 ASSERT(isolate_group->use_osr());
3088 ASSERT(!code.is_optimized());
3094 if (code.ptr() !=
function.unoptimized_code()) {
3114 if (FLAG_trace_osr) {
3116 function.ToFullyQualifiedCString(), osr_id,
3128 uword optimized_entry = code.EntryPoint();
3129 frame->set_pc(optimized_entry);
3130 frame->set_pc_marker(code.ptr());
3136#if defined(USING_SIMULATOR)
3139 if (stack_pos == 0) {
3142 stack_pos = thread->saved_stack_limit();
3150 uword stack_overflow_flags = thread->GetAndClearStackOverflowFlags();
3155 if (!thread->os_thread()->HasStackHeadroom() ||
3157 if (FLAG_verbose_stack_overflow) {
3160 thread->saved_stack_limit());
3167 while (
frame !=
nullptr) {
3183#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
3194#if !defined(DART_PRECOMPILED_RUNTIME)
3204 const ICData& ic_data = ICData::CheckedHandle(zone, arguments.ArgAt(0));
3205 const Function&
function = Function::CheckedHandle(zone, arguments.ArgAt(1));
3211 "IC call @%#" Px ": ICData: %#" Px " cnt:%" Pd " nchecks: %" Pd " %s\n",
3220#if !defined(DART_PRECOMPILED_RUNTIME)
3221 const Function&
function = Function::CheckedHandle(zone, arguments.ArgAt(0));
3226 auto isolate_group = thread->isolate_group();
3227 if (FLAG_background_compilation) {
3228 if (isolate_group->background_compiler()->EnqueueCompilation(
function)) {
3233 function.SetUsageCounter(INT32_MIN);
3243 if (FLAG_trace_compiler || FLAG_trace_optimizing_compiler) {
3245 THR_Print(
"ReCompiling function: '%s' \n",
3246 function.ToFullyQualifiedCString());
3262#if !defined(DART_PRECOMPILED_RUNTIME)
3267 while (
frame->IsStubFrame() ||
frame->IsExitFrame()) {
3271 if (
frame->IsEntryFrame()) {
3282 const Code& current_target_code =
3286 if (FLAG_trace_patching) {
3288 "FixCallersTarget: caller %#" Px
3290 "target '%s' -> %#" Px " (%s)\n",
3293 current_target_code.
is_optimized() ?
"optimized" :
"unoptimized");
3295 arguments.SetReturn(current_target_code);
3304#if !defined(DART_PRECOMPILED_RUNTIME)
3305 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(0));
3306 const Array& switchable_call_data =
3307 Array::CheckedHandle(zone, arguments.ArgAt(1));
3313 const auto& caller_function =
3317 caller_arguments.
Add(&receiver);
3320 arguments, caller_frame, caller_code, caller_function);
3330#if !defined(DART_PRECOMPILED_RUNTIME)
3335 while (
frame->IsStubFrame() ||
frame->IsExitFrame()) {
3339 if (
frame->IsEntryFrame()) {
3349 alloc_class ^= stub.
owner();
3351 if (alloc_stub.
IsNull()) {
3357 if (FLAG_trace_patching) {
3363 arguments.SetReturn(alloc_stub);
3370 switch (deopt_reason) {
3371#define DEOPT_REASON_TO_TEXT(name) \
3372 case ICData::kDeopt##name: \
3375#undef DEOPT_REASON_TO_TEXT
3385 if (!
function.IsSuspendableFunction()) {
3392 return suspend_state.IsSuspendState() &&
3393 (SuspendState::Cast(suspend_state).pc() != 0);
3397 const Code& optimized_code,
3410 if (!
error.IsNull()) {
3413 const Code& unoptimized_code =
3418 function.SwitchToUnoptimizedCode();
3423 if (FLAG_trace_deoptimization) {
3426 }
else if (
frame->IsMarkedForLazyDeopt()) {
3428 if (FLAG_trace_deoptimization) {
3442 frame->MarkForLazyDeopt();
3444 if (FLAG_trace_deoptimization) {
3445 THR_Print(
"Lazy deopt scheduled for fp=%" Pp ", pc=%" Pp "\n",
3446 frame->fp(), deopt_pc);
3461 auto isolate_group = thread->isolate_group();
3462 isolate_group->RunWithStoppedMutators([&]() {
3464 isolate_group->ForEachIsolate(
3467 if (mutator_thread ==
nullptr) {
3473 while (
frame !=
nullptr) {
3474 optimized_code =
frame->LookupDartCode();
3491 auto isolate = thread->isolate();
3492 auto isolate_group = thread->isolate_group();
3493 isolate_group->RunWithStoppedMutators([&]() {
3494 auto mutator_thread = isolate->mutator_thread();
3495 if (mutator_thread ==
nullptr) {
3501 if (
frame !=
nullptr) {
3503 if (optimized_code.is_optimized() &&
3504 !optimized_code.is_force_optimized()) {
3511#if !defined(DART_PRECOMPILED_RUNTIME)
3517 intptr_t** cpu_registers) {
3522 MSAN_UNPOISON(
reinterpret_cast<void*
>(saved_registers_address),
3529 ASSERT(fpu_registers_copy !=
nullptr);
3531 fpu_registers_copy[i] =
3535 *fpu_registers = fpu_registers_copy;
3539 ASSERT(cpu_registers_copy !=
nullptr);
3541 cpu_registers_copy[i] =
3542 *
reinterpret_cast<intptr_t*
>(saved_registers_address);
3545 *cpu_registers = cpu_registers_copy;
3551 int64_t int_value =
static_cast<int64_t
>(
value);
3552 double converted_double =
static_cast<double>(int_value);
3553 if (converted_double !=
value) {
3567 DeoptimizeCopyFrame,
3569 uword saved_registers_address,
3570 uword is_lazy_deopt) {
3571#if !defined(DART_PRECOMPILED_RUNTIME)
3577 const uword last_fp =
3587 ASSERT(caller_frame !=
nullptr);
3593 if (FLAG_trace_deoptimization) {
3595 THR_Print(
"== Deoptimizing code for '%s', %s, %s\n",
3596 function.ToFullyQualifiedCString(),
3597 deoptimizing_code ?
"code & frame" :
"frame",
3598 (is_lazy_deopt != 0u) ?
"lazy-deopt" :
"");
3601 if (is_lazy_deopt != 0u) {
3602 const uword deopt_pc =
3607 caller_frame->
set_pc(deopt_pc);
3608 ASSERT(caller_frame->
pc() == deopt_pc);
3613 if (FLAG_trace_deoptimization) {
3615 caller_frame->
pc());
3621 intptr_t* cpu_registers;
3627 fpu_registers, cpu_registers, is_lazy_deopt != 0, deoptimizing_code);
3642#if !defined(DART_PRECOMPILED_RUNTIME)
3651 ASSERT(caller_frame !=
nullptr);
3664 ASSERT(code.ptr() == optimized_code.
ptr());
3686#if !defined(DART_PRECOMPILED_RUNTIME)
3694 DeoptContext* deopt_context = isolate->deopt_context();
3696 isolate->set_deopt_context(
nullptr);
3697 delete deopt_context;
3708#if !defined(DART_PRECOMPILED_RUNTIME)
3709#if !defined(PRODUCT)
3710 isolate->debugger()->RewindPostDeopt();
3725 const Instance& exception = Instance::CheckedHandle(zone, arguments.ArgAt(0));
3727 Instance::CheckedHandle(zone, arguments.ArgAt(1));
3729#if !defined(DART_PRECOMPILED_RUNTIME)
3730#if !defined(PRODUCT)
3731 if (isolate->has_resumption_breakpoints()) {
3732 isolate->debugger()->ResumptionBreakpoint();
3741 .IsSuspendableFunction());
3746 thread->pending_deopts().AddPendingDeopt(
frame->fp(), deopt_pc);
3747 frame->MarkForLazyDeopt();
3749 if (FLAG_trace_deoptimization) {
3750 THR_Print(
"Lazy deopt scheduled for resumed frame fp=%" Pp ", pc=%" Pp
3752 frame->fp(), deopt_pc);
3757 if (!exception.
IsNull()) {
3763 const char* runtime_call_name,
3764 bool can_lazy_deopt) {
3765 ASSERT(FLAG_deoptimize_on_runtime_call_every > 0);
3766 if (FLAG_precompiled_mode) {
3772 const bool is_deopt_related =
3773 strstr(runtime_call_name,
"Deoptimize") !=
nullptr;
3774 if (is_deopt_related) {
3779 if (can_lazy_deopt) {
3780 if (FLAG_deoptimize_on_runtime_call_name_filter !=
nullptr &&
3781 (strlen(runtime_call_name) !=
3782 strlen(FLAG_deoptimize_on_runtime_call_name_filter) ||
3783 strstr(runtime_call_name,
3784 FLAG_deoptimize_on_runtime_call_name_filter) ==
nullptr)) {
3788 if ((
count % FLAG_deoptimize_on_runtime_call_every) == 0) {
3796 if (remainder == 0.0) {
3799 }
else if (remainder < 0.0) {
3814#if !defined(DART_PRECOMPILED_RUNTIME)
3815 const Field& field = Field::CheckedHandle(zone, arguments.ArgAt(0));
3824 const Instance&
instance = Instance::CheckedHandle(zone, arguments.ArgAt(0));
3825 const Field& field = Field::CheckedHandle(zone, arguments.ArgAt(1));
3830 (
result.ptr() != Object::transition_sentinel().ptr()));
3831 arguments.SetReturn(
result);
3835 const Field& field = Field::CheckedHandle(zone, arguments.ArgAt(0));
3840 (
result.ptr() != Object::transition_sentinel().ptr()));
3841 arguments.SetReturn(
result);
3845 const Field& field = Field::CheckedHandle(zone, arguments.ArgAt(0));
3851 const Field& field = Field::CheckedHandle(zone, arguments.ArgAt(0));
3858 FATAL(
"Not loaded");
3868 isolate->group()->api_state()->AllocatePersistentHandle();
3877typedef void* (*MemMoveCFunction)(
void* dest,
const void* src,
size_t n);
4029 uword* out_entry_point,
4030 uword* out_trampoline_type) {
4033 reinterpret_cast<void*
>(trampoline));
4034 ASSERT(out_entry_point !=
nullptr);
4035 ASSERT(out_trampoline_type !=
nullptr);
4039 auto metadata = fcm->LookupMetadataForTrampoline(trampoline);
4042 if (metadata.trampoline_type() ==
4048 auto metadata2 = fcm->LookupMetadataForTrampoline(trampoline);
4049 *out_trampoline_type =
static_cast<uword>(metadata2.trampoline_type());
4054 if (!metadata.IsLive() || !metadata.IsSameCallback(metadata2)) {
4056 reinterpret_cast<void*
>(trampoline));
4060 *out_entry_point = metadata.target_entry_point();
4061 Isolate* target_isolate = metadata.target_isolate();
4063 Isolate* current_isolate =
nullptr;
4064 if (current_thread !=
nullptr) {
4065 current_isolate = current_thread->
isolate();
4074 if (current_isolate ==
nullptr ||
4075 current_isolate->
group() != target_isolate->
group()) {
4076 if (current_isolate !=
nullptr) {
4082 ASSERT(temp_thread !=
nullptr);
4085 reinterpret_cast<intptr_t
>(current_isolate));
4092 if (!metadata.IsLive()) {
4093 FATAL(
"Callback invoked after it has been deleted.");
4095 Isolate* target_isolate = metadata.target_isolate();
4096 *out_entry_point = metadata.target_entry_point();
4097 *out_trampoline_type =
static_cast<uword>(metadata.trampoline_type());
4098 if (current_thread ==
nullptr) {
4099 FATAL(
"Cannot invoke native callback outside an isolate.");
4102 FATAL(
"Cannot invoke native callback when API callbacks are prohibited.");
4105 FATAL(
"Cannot invoke native callback while unwind error propagates.");
4108 FATAL(
"Native callbacks must be invoked on the mutator thread.");
4110 if (current_thread->
isolate() != target_isolate) {
4111 FATAL(
"Cannot invoke native callback from a different isolate.");
4125 (
void*)*out_entry_point);
4127 (
void*)*out_trampoline_type);
4128 return current_thread;
4134 ASSERT(thread !=
nullptr);
4139 const bool inside_temp_isolate =
4140 source_isolate ==
nullptr || source_isolate != thread->
isolate();
4141 if (inside_temp_isolate) {
4143 if (source_isolate !=
nullptr) {
4161 return return_value;
4184 return_value->
set_ptr(Object::sentinel().ptr());
4186 return return_value;
4219#if !defined(USING_MEMORY_SANITIZER)
4228#if !defined(USING_THREAD_SANITIZER)
static void round(SkPoint *p)
static bool left(const SkPoint &p0, const SkPoint &p1)
static bool right(const SkPoint &p0, const SkPoint &p1)
#define RELEASE_ASSERT(cond)
virtual classid_t type_class_id() const
bool IsTopTypeForSubtyping() const
virtual bool IsInstantiated(Genericity genericity=kAny, intptr_t num_free_fun_type_params=kAllFree) const
static bool InstantiateAndTestSubtype(AbstractType *subtype, AbstractType *supertype, const TypeArguments &instantiator_type_args, const TypeArguments &function_type_args)
const char * NameCString() const
bool IsDynamicType() const
const Function & function() const
LocalHandles * local_handles()
const char * ToCString() const
static ArrayPtr NewBoxed(intptr_t type_args_len, intptr_t num_arguments, const Array &optional_arguments_names, Heap::Space space=Heap::kOld)
intptr_t CountWithTypeArgs() const
intptr_t FirstArgIndex() const
static constexpr intptr_t kMaxElements
static ArrayPtr New(intptr_t len, Heap::Space space=Heap::kNew)
virtual void SetTypeArguments(const TypeArguments &value) const
static intptr_t LengthOf(const ArrayPtr array)
void SetAt(intptr_t index, const Object &value) const
static const Bool & Get(bool value)
static const Bool & True()
StringPtr target_name() const
ArrayPtr arguments_descriptor() const
ClassPtr At(intptr_t cid) const
CodePtr allocation_stub() const
FunctionPtr GetInvocationDispatcher(const String &target_name, const Array &args_desc, UntaggedFunction::Kind kind, bool create_if_absent) const
intptr_t NumTypeArguments() const
bool IsClosureClass() const
ErrorPtr EnsureIsFinalized(Thread *thread) const
ClassPtr SuperClass(ClassTable *class_table=nullptr) const
bool is_allocate_finalized() const
static ClosurePtr New(const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments, const Function &function, const Object &context, Heap::Space space=Heap::kNew)
static void PatchInstanceCallAt(uword return_address, const Code &caller_code, const Object &data, const Code &target)
static CodePtr GetStaticCallTargetAt(uword return_address, const Code &code)
static void PatchSwitchableCallAt(uword return_address, const Code &caller_code, const Object &data, const Code &target)
static uword GetSwitchableCallTargetEntryAt(uword return_address, const Code &caller_code)
static ObjectPtr GetSwitchableCallDataAt(uword return_address, const Code &caller_code)
static CodePtr GetInstanceCallAt(uword return_address, const Code &caller_code, Object *data)
static void PatchStaticCallAt(uword return_address, const Code &code, const Code &new_target)
intptr_t GetNullCheckNameIndexAt(int32_t pc_offset)
FunctionPtr function() const
void SetStaticCallTargetCodeAt(uword pc, const Code &code) const
bool is_optimized() const
bool is_force_optimized() const
void SetStubCallTargetCodeAt(uword pc, const Code &code) const
bool ContainsInstructionAt(uword addr) const
void set_is_alive(bool value) const
ObjectPoolPtr GetObjectPool() const
FunctionPtr GetStaticCallTargetFunctionAt(uword pc) const
static bool CanOptimizeFunction(Thread *thread, const Function &function)
static constexpr intptr_t kNoOSRDeoptId
static ErrorPtr EnsureUnoptimizedCode(Thread *thread, const Function &function)
static ObjectPtr CompileOptimizedFunction(Thread *thread, const Function &function, intptr_t osr_id=kNoOSRDeoptId)
static ContextPtr New(intptr_t num_variables, Heap::Space space=Heap::kNew)
static intptr_t NumVariables(const ContextPtr context)
void set_parent(const Context &parent) const
void SetAt(intptr_t context_index, const Object &value) const
ObjectPtr At(intptr_t context_index) const
intptr_t num_variables() const
ContextPtr parent() const
static ObjectPtr InvokeNoSuchMethod(Thread *thread, const Instance &receiver, const String &target_name, const Array &arguments, const Array &arguments_descriptor)
static ObjectPtr InvokeClosure(Thread *thread, const Array &arguments)
static ObjectPtr InvokeFunction(const Function &function, const Array &arguments)
static IsolateGroup * vm_isolate_group()
ActivationFrame * FrameAt(int i) const
static DebuggerStackTrace * CollectAsyncAwaiters()
intptr_t MaterializeDeferredObjects()
void set_dest_frame(const StackFrame *frame)
intptr_t DestStackAdjustment() const
static constexpr intptr_t kNone
static DoublePtr New(double d, Heap::Space space=Heap::kNew)
static DART_NORETURN void ThrowByType(ExceptionType type, const Array &arguments)
static DART_NORETURN void ThrowOOM()
static DART_NORETURN void ThrowRangeError(const char *argument_name, const Integer &argument_value, intptr_t expected_from, intptr_t expected_to)
static DART_NORETURN void ThrowLateFieldAssignedDuringInitialization(const String &name)
static DART_NORETURN void Throw(Thread *thread, const Instance &exception)
static DART_NORETURN void ThrowArgumentError(const Instance &arg)
@ kIntegerDivisionByZeroException
static DART_NORETURN void ThrowLateFieldNotInitialized(const String &name)
static DART_NORETURN void ReThrow(Thread *thread, const Instance &exception, const Instance &stacktrace, bool bypass_debugger=false)
static void CreateAndThrowTypeError(TokenPosition location, const AbstractType &src_type, const AbstractType &dst_type, const String &dst_name)
static DART_NORETURN void PropagateError(const Error &error)
DART_WARN_UNUSED_RESULT ErrorPtr InitializeInstance(const Instance &instance) const
static bool IsGetterName(const String &function_name)
DART_WARN_UNUSED_RESULT ErrorPtr InitializeStatic() const
static bool IsSetterName(const String &function_name)
ObjectPtr StaticValue() const
static StringPtr GetterName(const String &field_name)
static StringPtr NameFromGetter(const String &getter_name)
void RecordStore(const Object &value) const
static Float32x4Ptr New(float value0, float value1, float value2, float value3, Heap::Space space=Heap::kNew)
static Float64x2Ptr New(double value0, double value1, Heap::Space space=Heap::kNew)
bool PrologueNeedsArgumentsDescriptor() const
static bool IsDynamicInvocationForwarderName(const String &name)
void EnsureHasCompiledUnoptimizedCode() const
const char * ToFullyQualifiedCString() const
ClosurePtr ImplicitInstanceClosure(const Instance &receiver) const
static StringPtr DemangleDynamicInvocationForwarderName(const String &name)
bool HasOptimizedCode() const
static StringPtr CreateDynamicInvocationForwarderName(const String &name)
bool IsMethodExtractor() const
CodePtr unoptimized_code() const
CodePtr EnsureHasCode() const
bool IsDebugging(Thread *thread, const Function &function)
void CollectAllGarbage(GCReason reason=GCReason::kFull, bool compact=false)
bool Contains(uword addr) const
void EnsureHasCheck(const GrowableArray< intptr_t > &class_ids, const Function &target, intptr_t count=1) const
static ICDataPtr ICDataOfEntriesArray(const Array &array)
static ICDataPtr NewWithCheck(const Function &owner, const String &target_name, const Array &arguments_descriptor, intptr_t deopt_id, intptr_t num_args_tested, RebindRule rebind_rule, GrowableArray< intptr_t > *cids, const Function &target, const AbstractType &receiver_type=Object::null_abstract_type())
intptr_t NumArgsTested() const
RebindRule rebind_rule() const
bool NumberOfChecksIs(intptr_t n) const
FunctionPtr GetTargetAt(intptr_t index) const
bool is_tracking_exactness() const
void set_is_megamorphic(bool value) const
void EnsureHasReceiverCheck(intptr_t receiver_class_id, const Function &target, intptr_t count=1, StaticTypeExactnessState exactness=StaticTypeExactnessState::NotTracking()) const
intptr_t NumberOfChecks() const
bool IsInstanceOf(const AbstractType &other, const TypeArguments &other_instantiator_type_arguments, const TypeArguments &other_function_type_arguments) const
AbstractTypePtr GetType(Heap::Space space) const
bool IsAssignableTo(const AbstractType &other, const TypeArguments &other_instantiator_type_arguments, const TypeArguments &other_function_type_arguments) const
static InstancePtr NewAlreadyFinalized(const Class &cls, Heap::Space space=Heap::kNew)
static Int32x4Ptr New(int32_t value0, int32_t value1, int32_t value2, int32_t value3, Heap::Space space=Heap::kNew)
static IntegerPtr New(const String &str, Heap::Space space=Heap::kNew)
static IntegerPtr NewFromUint64(uint64_t value, Heap::Space space=Heap::kNew)
static int EncodeType(Level level, Kind kind)
GroupDebugger * debugger() const
static bool IsSystemIsolateGroup(const IsolateGroup *group)
ObjectStore * object_store() const
static IsolateGroup * Current()
ClassTable * class_table() const
static void ExitTemporaryIsolate()
Mutex * patchable_call_mutex()
Isolate * EnterTemporaryIsolate()
Mutex * subtype_test_cache_mutex()
void set_deopt_context(DeoptContext *value)
static bool IsSystemIsolate(const Isolate *isolate)
IsolateObjectStore * isolate_object_store() const
bool has_attempted_stepping() const
IsolateGroup * group() const
DeoptContext * deopt_context() const
Thread * mutator_thread() const
void set_ptr(ObjectPtr ptr)
LocalHandle * AllocateHandle()
static MegamorphicCachePtr Lookup(Thread *thread, const String &name, const Array &descriptor)
static std::unique_ptr< Message > New(Args &&... args)
static MonomorphicSmiableCallPtr New(classid_t expected_cid, const Code &target)
void SetReturn(const Object &value) const
void SetArgAt(int index, const Object &value) const
static uword GetCurrentStackPointer()
static void static void PrintErr(const char *format,...) PRINTF_ATTRIBUTE(1
UntaggedObject * untag() const
intptr_t GetClassId() const
virtual const char * ToCString() const
static ObjectPtr RawCast(ObjectPtr obj)
static Object & ZoneHandle()
PatchableCallHandler(Thread *thread, const GrowableArray< const Instance * > &caller_arguments, MissHandler miss_handler, NativeArguments arguments, StackFrame *caller_frame, const Code &caller_code, const Function &caller_function)
void ResolveSwitchAndReturn(const Object &data)
uword FindPendingDeopt(uword fp)
void ClearPendingDeoptsAtOrBelow(uword fp, ClearReason reason)
void AddPendingDeopt(uword fp, uword pc)
void set_ptr(ObjectPtr ref)
static bool PostMessage(std::unique_ptr< Message > message, bool before_events=false)
intptr_t num_fields() const
static RecordPtr New(RecordShape shape, Heap::Space space=Heap::kNew)
void SetFieldAt(intptr_t field_index, const Object &value) const
intptr_t GetFieldIndexByName(Thread *thread, const String &field_name) const
ObjectPtr FieldAt(intptr_t field_index) const
static FunctionPtr ResolveDynamicFunction(Zone *zone, const Class &receiver_class, const String &function_name)
static FunctionPtr ResolveDynamicAnyArgs(Zone *zone, const Class &receiver_class, const String &function_name, bool allow_add=true)
static FunctionPtr ResolveDynamicForReceiverClass(const Class &receiver_class, const String &function_name, const ArgumentsDescriptor &args_desc, bool allow_add=true)
intptr_t AbandonRemainingTLAB(Thread *thread)
static Simulator * Current()
static SingleTargetCachePtr New()
static SmiPtr New(intptr_t value)
@ kAllowCrossThreadIteration
@ kNoCrossThreadIteration
virtual bool IsExitFrame() const
CodePtr LookupDartCode() const
virtual bool IsStubFrame() const
TokenPosition GetTokenPos() const
virtual bool IsDartFrame(bool validate=true) const
FunctionPtr LookupDartFunction() const
static StaticTypeExactnessState NotExact()
static StaticTypeExactnessState NotTracking()
static StringPtr NewFormatted(const char *format,...) PRINTF_ATTRIBUTE(1
static StringPtr New(const char *cstr, Heap::Space space=Heap::kNew)
static const char * ToCString(Thread *thread, StringPtr ptr)
static CodePtr GetAllocationStubForClass(const Class &cls)
void WriteEntryToBuffer(Zone *zone, BaseTextBuffer *buffer, intptr_t index, const char *line_prefix=nullptr) const
static SubtypeTestCachePtr New(intptr_t num_inputs)
static constexpr intptr_t kMaxInputs
static constexpr intptr_t MaxEntriesForCacheAllocatedFor(intptr_t count)
static intptr_t UsedInputsForType(const AbstractType &type)
intptr_t NumberOfChecks() const
intptr_t AddCheck(const Object &instance_class_id_or_signature, const AbstractType &destination_type, const TypeArguments &instance_type_arguments, const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments, const TypeArguments &instance_parent_function_type_arguments, const TypeArguments &instance_delayed_type_arguments, const Bool &test_result) const
bool HasCheck(const Object &instance_class_id_or_signature, const AbstractType &destination_type, const TypeArguments &instance_type_arguments, const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments, const TypeArguments &instance_parent_function_type_arguments, const TypeArguments &instance_delayed_type_arguments, intptr_t *index, Bool *result) const
static SuspendStatePtr Clone(Thread *thread, const SuspendState &src, Heap::Space space=Heap::kNew)
static constexpr intptr_t kSuspendStateVarIndex
static SuspendStatePtr New(intptr_t frame_size, const Instance &function_data, Heap::Space space=Heap::kNew)
void set_execution_state(ExecutionState state)
ApiLocalScope * api_top_scope() const
void DeferredMarkingStackAddObject(ObjectPtr obj)
int32_t no_callback_scope_depth() const
static Thread * Current()
PendingDeopts & pending_deopts()
void set_unboxed_int64_runtime_arg(int64_t value)
static bool IsAtSafepoint(SafepointLevel level, uword state)
void SetUnwindErrorInProgress(bool value)
double unboxed_double_runtime_arg() const
int64_t unboxed_int64_runtime_arg() const
bool is_unwind_in_progress() const
uword top_exit_frame_info() const
int64_t unboxed_int64_runtime_second_arg() const
bool IsDartMutatorThread() const
ExecutionState execution_state() const
Isolate * isolate() const
int32_t IncrementAndGetStackOverflowCount()
uint32_t IncrementAndGetRuntimeCallCount()
IsolateGroup * isolate_group() const
static void EnterIsolate(Isolate *isolate)
static void ExitIsolate(bool isolate_shutdown=false)
void set_unboxed_int64_runtime_second_arg(int64_t value)
bool IsInstantiated(Genericity genericity=kAny, intptr_t num_free_fun_type_params=kAllFree) const
TypeArgumentsPtr InstantiateAndCanonicalizeFrom(const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments) const
bool IsUninstantiatedIdentity() const
intptr_t GetSubtypeTestCachePoolIndex()
static CodePtr SpecializeStubFor(Thread *thread, const AbstractType &type)
static TypePtr BoolType()
static intptr_t MaxElements(intptr_t class_id)
static TypedDataPtr New(intptr_t class_id, intptr_t len, Heap::Space space=Heap::kNew)
static uword ToAddr(const UntaggedObject *raw_obj)
static constexpr T Maximum(T x, T y)
#define THR_Print(format,...)
struct _Dart_Handle * Dart_Handle
G_BEGIN_DECLS G_MODULE_EXPORT FlValue * args
static const uint8_t buffer[]
const uint8_t uint32_t uint32_t GError ** error
#define DECLARE_FLAG(type, name)
#define DEFINE_FLAG(type, name, default_value, comment)
Dart_NativeFunction function
static float max(float r, float g, float b)
#define MSAN_UNPOISON(ptr, len)
bool WillAllocateNewOrRememberedContext(intptr_t num_context_variables)
bool WillAllocateNewOrRememberedArray(intptr_t length)
constexpr int64_t kMaxInt64
static AbstractTypePtr InstantiateType(const AbstractType &type, const AbstractType &instantiator)
static void InlineCacheMissHandler(Thread *thread, Zone *zone, const GrowableArray< const Instance * > &args, const ICData &ic_data, NativeArguments native_arguments)
FunctionPtr InlineCacheMissHelper(const Class &receiver_class, const Array &args_descriptor, const String &target_name)
void DLRT_PropagateError(Dart_Handle handle)
IntegerPtr DoubleToInteger(Zone *zone, double val)
void DeoptimizeFunctionsOnStack()
static void CopySavedRegisters(uword saved_registers_address, fpu_register_t **fpu_registers, intptr_t **cpu_registers)
static bool IsSuspendedFrame(Zone *zone, const Function &function, StackFrame *frame)
Thread * DLRT_GetFfiCallbackMetadata(FfiCallbackMetadata::Trampoline trampoline, uword *out_entry_point, uword *out_trampoline_type)
static void DeoptimizeLastDartFrameIfOptimized()
static void PrintSubtypeCheck(const AbstractType &subtype, const AbstractType &supertype, const bool result)
static void HandleOSRRequest(Thread *thread)
double DartModulo(double left, double right)
static DART_FORCE_INLINE uword ParamAddress(uword fp, intptr_t reverse_index)
static void NullErrorHelper(Zone *zone, const String &selector, bool is_param_name=false)
static void UpdateTypeTestCache(Zone *zone, Thread *thread, const Instance &instance, const AbstractType &destination_type, const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments, const Bool &result, const SubtypeTestCache &new_cache)
DART_EXPORT void Dart_PropagateError(Dart_Handle handle)
@ kFixCallersTargetMonomorphic
static TokenPosition GetCallerLocation()
void *(* MemMoveCFunction)(void *dest, const void *src, size_t n)
void DFLRT_ExitSafepointIgnoreUnwindInProgress(NativeArguments __unusable_)
static void PrintTypeCheck(const char *message, const Instance &instance, const AbstractType &type, const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments, const Bool &result)
void DLRT_ExitHandleScope(Thread *thread)
LocalHandle * DLRT_AllocateHandle(ApiLocalScope *scope)
void __tsan_acquire(void *addr)
void DLRT_ExitTemporaryIsolate()
double(* BinaryMathCFunction)(double x, double y)
void __msan_unpoison(const volatile void *, size_t)
DART_EXPORT bool Dart_IsError(Dart_Handle handle)
ApiLocalScope * DLRT_EnterHandleScope(Thread *thread)
const int kNumberOfFpuRegisters
static constexpr intptr_t kNumberOfSavedFpuRegisters
void DeoptimizeAt(Thread *mutator_thread, const Code &optimized_code, StackFrame *frame)
static bool ResolveCallThroughGetter(const Class &receiver_class, const String &target_name, const String &demangled, const Array &arguments_descriptor, Function *result)
static FunctionPtr ComputeTypeCheckTarget(const Instance &receiver, const AbstractType &type, const ArgumentsDescriptor &desc)
static void RuntimeAllocationEpilogue(Thread *thread)
static constexpr intptr_t kDefaultMaxSubtypeCacheEntries
static FunctionPtr Resolve(Thread *thread, Zone *zone, const GrowableArray< const Instance * > &caller_arguments, const Class &receiver_class, const String &name, const Array &descriptor)
static void HandleStackOverflowTestCases(Thread *thread)
static Heap::Space SpaceForRuntimeAllocation()
double(* UnaryMathCFunction)(double x)
static constexpr intptr_t kNumberOfSavedCpuRegisters
@ kTypeCheckFromLazySpecializeStub
FrameLayout runtime_frame_layout
static void ThrowIfError(const Object &result)
static DART_FORCE_INLINE uword LocalVarAddress(uword fp, intptr_t index)
static InstancePtr AllocateObject(Thread *thread, const Class &cls)
constexpr intptr_t kWordSize
void DFLRT_ExitSafepoint(NativeArguments __unusable_)
static void TrySwitchInstanceCall(Thread *thread, StackFrame *caller_frame, const Code &caller_code, const Function &caller_function, const ICData &ic_data, const Function &target_function)
void __msan_unpoison_param(size_t)
void DFLRT_EnterSafepoint(NativeArguments __unusable_)
static DART_FORCE_INLINE bool IsCalleeFrameOf(uword fp, uword other_fp)
const char *const function_name
static int8_t data[kExtLength]
static void DoThrowNullError(Isolate *isolate, Thread *thread, Zone *zone, bool is_param)
const char * DeoptReasonToCString(ICData::DeoptReasonId deopt_reason)
void __tsan_release(void *addr)
static ObjectPtr InvokeCallThroughGetterOrNoSuchMethod(Thread *thread, Zone *zone, const Instance &receiver, const String &target_name, const Array &orig_arguments, const Array &orig_arguments_desc)
void ReportImpossibleNullError(intptr_t cid, StackFrame *caller_frame, Thread *thread)
void OnEveryRuntimeEntryCall(Thread *thread, const char *runtime_call_name, bool can_lazy_deopt)
const int kFpuRegisterSize
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not defaults to or::depending on whether ipv6 is specified vm service A custom Dart VM Service port The default is to pick a randomly available open port disable vm Disable the Dart VM Service The Dart VM Service is never available in release mode disable vm service Disable mDNS Dart VM Service publication Bind to the IPv6 localhost address for the Dart VM Service Ignored if vm service host is set endless trace Enable an endless trace buffer The default is a ring buffer This is useful when very old events need to viewed For during application launch Memory usage will continue to grow indefinitely however Start app with an specific route defined on the framework flutter assets Path to the Flutter assets directory enable service port Allow the VM service to fallback to automatic port selection if binding to a specified port fails trace Trace early application lifecycle Automatically switches to an endless trace buffer trace skia Filters out all Skia trace event categories except those that are specified in this comma separated list dump skp on shader Automatically dump the skp that triggers new shader compilations This is useful for writing custom ShaderWarmUp to reduce jank By this is not enabled to reduce the overhead purge persistent cache
std::function< void()> closure
#define CHECK_STACK_ALIGNMENT
#define DEOPT_REASON_TO_TEXT(name)
#define DEFINE_RUNTIME_ENTRY(name, argument_count)
#define TRACE_RUNTIME_CALL(format, name)
#define DEFINE_RAW_LEAF_RUNTIME_ENTRY(name, argument_count, is_float, func)
#define DEFINE_RUNTIME_ENTRY_NO_LAZY_DEOPT(name, argument_count)
#define END_LEAF_RUNTIME_ENTRY
#define DEFINE_LEAF_RUNTIME_ENTRY(type, name, argument_count,...)
intptr_t first_local_from_fp
intptr_t FrameSlotForVariableIndex(intptr_t index) const