39#if !defined(DART_PRECOMPILED_RUNTIME)
49 max_subtype_cache_entries,
51 "Maximum number of subtype cache entries (number of checks cached).");
54 regexp_optimization_counter_threshold,
56 "RegExp's usage-counter value before it is optimized, -1 means never");
58 reoptimization_counter_threshold,
60 "Counter threshold before a function gets reoptimized.");
64 "Use old-space for allocation via runtime calls.");
66 runtime_allocate_spill_tlab,
68 "Ensure results of allocation via runtime calls are not in an "
70DEFINE_FLAG(
bool, trace_deoptimization,
false,
"Trace deoptimization");
72 trace_deoptimization_verbose,
74 "Trace deoptimization verbose");
81DEFINE_FLAG(
bool, trace_osr,
false,
"Trace attempts at on-stack replacement.");
83DEFINE_FLAG(
int, gc_every, 0,
"Run major GC on every N stack overflow checks");
87 "Compute debugger stacktrace on every N stack overflow checks");
91 "Compute stacktrace in named function on stack overflow checks");
95 "Deoptimize in named function on stack overflow checks");
97 deoptimize_on_runtime_call_name_filter,
99 "Runtime call name filter for --deoptimize-on-runtime-call-every.");
102 unopt_monomorphic_calls,
104 "Enable specializing monomorphic calls from unoptimized code.");
106 unopt_megamorphic_calls,
108 "Enable specializing megamorphic calls from unoptimized code.");
110 verbose_stack_overflow,
112 "Print additional details about stack overflow.");
119 const Instance&
length = Instance::CheckedHandle(zone, arguments.ArgAt(0));
120 const Instance& index = Instance::CheckedHandle(zone, arguments.ArgAt(1));
121 if (!
length.IsInteger()) {
125 args.SetAt(1, Symbols::Length());
129 if (!index.IsInteger()) {
132 args.SetAt(0, index);
133 args.SetAt(1, Symbols::Index());
139 args.SetAt(0, index);
143 zone, Integer::Cast(
length).ArithmeticOp(
145 args.SetAt(3, Symbols::Length());
150 int64_t unboxed_length = thread->unboxed_int64_runtime_arg();
151 int64_t unboxed_index = thread->unboxed_int64_runtime_second_arg();
156 args.SetAt(0, index);
160 zone, Integer::Cast(
length).ArithmeticOp(
162 args.SetAt(3, Symbols::Length());
167 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(0));
168 const Smi& kind = Smi::CheckedHandle(zone, arguments.ArgAt(1));
170 switch (kind.
Value()) {
177 "Cannot attach NativeFinalizer to deeply immutable object: %s",
188 bool is_param_name =
false) {
205 zone,
String::New(
"Null check operator used on a null value")));
222 args.SetAt(0, Object::null_object());
223 args.SetAt(1, selector);
224 args.SetAt(2, invocation_type);
225 args.SetAt(3, Object::smi_zero());
226 args.SetAt(4, Object::null_object());
227 args.SetAt(5, Object::null_object());
228 args.SetAt(6, Object::null_object());
241 const uword pc_offset = caller_frame->
pc() -
code.PayloadStart();
243 if (FLAG_shared_slow_path_triggers_gc) {
252 Function::null_function());
257 member_name ^=
pool.ObjectAt(name_index);
259 member_name = Symbols::OptimizedOut().
ptr();
277 buffer.Printf(
"hit null error with cid %" Pd ", caller context: ",
cid);
279 const intptr_t kMaxSlotsCollected = 5;
280 const auto slots =
reinterpret_cast<ObjectPtr*
>(caller_frame->
sp());
281 const intptr_t num_slots_in_frame =
282 reinterpret_cast<ObjectPtr*
>(caller_frame->
fp()) - slots;
283 const auto num_slots_to_collect =
286 for (intptr_t
i = 0;
i < num_slots_to_collect;
i++) {
288 buffer.Printf(
"%s[sp+%" Pd "] %" Pp "", comma ?
", " :
"",
i,
289 static_cast<uword>(ptr));
290 if (ptr->IsHeapObject() &&
304 const Smi&
cid = Smi::CheckedHandle(zone, arguments.ArgAt(0));
319 const String& selector = String::CheckedHandle(zone, arguments.ArgAt(0));
332 const Instance&
value = Instance::CheckedHandle(zone, arguments.ArgAt(0));
338 int64_t unboxed_value = arguments.thread()->unboxed_int64_runtime_arg();
345 double val = arguments.thread()->unboxed_double_runtime_arg();
346 const Smi& recognized_kind = Smi::CheckedHandle(zone, arguments.ArgAt(0));
347 switch (recognized_kind.
Value()) {
348 case MethodRecognizer::kDoubleToInteger:
350 case MethodRecognizer::kDoubleFloorToInt:
353 case MethodRecognizer::kDoubleCeilToInt:
372 if (
UNLIKELY(FLAG_runtime_allocate_spill_tlab)) {
374 if ((
count++ % 10) == 0) {
387 const Instance&
length = Instance::CheckedHandle(zone, arguments.ArgAt(0));
388 if (!
length.IsInteger()) {
392 args.SetAt(1, Symbols::Length());
396 const int64_t
len = Integer::Cast(
length).AsInt64Value();
410 TypeArguments::CheckedHandle(zone, arguments.ArgAt(1));
417 arguments.SetReturn(array);
422 if (FLAG_shared_slow_path_triggers_gc) {
431 const double val = thread->unboxed_double_runtime_arg();
438 const auto val = thread->unboxed_simd128_runtime_arg();
445 const auto val = thread->unboxed_simd128_runtime_arg();
452 if (FLAG_shared_slow_path_triggers_gc) {
461 if (FLAG_shared_slow_path_triggers_gc) {
470 if (FLAG_shared_slow_path_triggers_gc) {
479 if (FLAG_shared_slow_path_triggers_gc) {
492 const intptr_t
cid = Smi::CheckedHandle(zone, arguments.ArgAt(0)).Value();
493 const auto&
length = Instance::CheckedHandle(zone, arguments.ArgAt(1));
494 if (!
length.IsInteger()) {
499 const int64_t
len = Integer::Cast(
length).AsInt64Value();
506 const auto& typed_data =
509 arguments.SetReturn(typed_data);
518 ASSERT(caller_frame !=
nullptr);
535 const Class& cls = Class::CheckedHandle(zone, arguments.ArgAt(0));
541 ASSERT(Instance::CheckedHandle(zone, arguments.ArgAt(1)).IsNull());
543 const auto& type_arguments =
544 TypeArguments::CheckedHandle(zone, arguments.ArgAt(1));
548 ASSERT(type_arguments.IsNull() ||
549 (type_arguments.IsInstantiated() &&
551 instance.SetTypeArguments(type_arguments);
558 EnsureRememberedAndMarkingDeferred,
573 bool add_to_remembered_set =
true;
574 if (object->IsNewObject()) {
575 add_to_remembered_set =
false;
576 }
else if (object->IsArray()) {
578 add_to_remembered_set =
580 }
else if (object->IsContext()) {
581 const intptr_t num_context_variables =
583 add_to_remembered_set =
585 num_context_variables);
588 if (add_to_remembered_set) {
589 object->untag()->EnsureInRememberedSet(thread);
599 return static_cast<uword>(object);
611 TypeArguments::CheckedHandle(zone, arguments.ArgAt(1));
613 TypeArguments::CheckedHandle(zone, arguments.ArgAt(2));
619 type =
type.InstantiateFrom(instantiator_type_arguments,
622 arguments.SetReturn(
type);
632 TypeArguments::CheckedHandle(zone, arguments.ArgAt(0));
634 TypeArguments::CheckedHandle(zone, arguments.ArgAt(1));
636 TypeArguments::CheckedHandle(zone, arguments.ArgAt(2));
646 instantiator_type_arguments, function_type_arguments);
648 arguments.SetReturn(type_arguments);
658 ASSERT(caller_frame !=
nullptr);
661 THR_Print(
"SubtypeCheck: '%s' %d %s '%s' %d (pc: %#" Px ").\n",
668 if (
function.HasSavedArgumentsDescriptor()) {
687 TypeArguments::CheckedHandle(zone, arguments.ArgAt(0));
689 TypeArguments::CheckedHandle(zone, arguments.ArgAt(1));
690 AbstractType& subtype = AbstractType::CheckedHandle(zone, arguments.ArgAt(2));
692 AbstractType::CheckedHandle(zone, arguments.ArgAt(3));
693 const String& dst_name = String::CheckedHandle(zone, arguments.ArgAt(4));
704 &subtype, &supertype, instantiator_type_args, function_type_args)) {
705 if (FLAG_trace_type_checks) {
711 if (FLAG_trace_type_checks) {
730 const auto&
function = Function::CheckedHandle(zone, arguments.ArgAt(0));
732 const auto& instantiator_type_args =
733 TypeArguments::CheckedHandle(zone, arguments.ArgAt(2));
734 const auto& delayed_type_args =
735 TypeArguments::CheckedHandle(zone, arguments.ArgAt(3));
737 zone,
Closure::New(instantiator_type_args, Object::null_type_arguments(),
738 delayed_type_args,
function, context,
748 const Smi& num_variables = Smi::CheckedHandle(zone, arguments.ArgAt(0));
751 arguments.SetReturn(context);
760 const Context& ctx = Context::CheckedHandle(zone, arguments.ArgAt(0));
769 arguments.SetReturn(cloned_ctx);
780 arguments.SetReturn(record);
790 const auto& value0 = Instance::CheckedHandle(zone, arguments.ArgAt(1));
791 const auto& value1 = Instance::CheckedHandle(zone, arguments.ArgAt(2));
792 const auto& value2 = Instance::CheckedHandle(zone, arguments.ArgAt(3));
795 const intptr_t num_fields = shape.
num_fields();
796 ASSERT(num_fields == 2 || num_fields == 3);
799 if (num_fields > 2) {
802 arguments.SetReturn(record);
811 const intptr_t frame_size =
812 Smi::CheckedHandle(zone, arguments.ArgAt(0)).Value();
815 if (previous_state.IsSuspendState()) {
816 const auto& suspend_state = SuspendState::Cast(previous_state);
817 const auto& function_data =
819 ObjectStore* object_store = thread->isolate_group()->object_store();
820 if (function_data.GetClassId() ==
821 Class::Handle(zone, object_store->async_star_stream_controller())
826 function_data.SetField(
829 object_store->async_star_stream_controller_async_star_body()),
830 Object::null_object());
834 if (function_data.GetClassId() ==
835 Class::Handle(zone, object_store->sync_star_iterator_class()).id()) {
837 function_data.SetField(
838 Field::Handle(zone, object_store->sync_star_iterator_state()),
845 arguments.SetReturn(
result);
854 SuspendState::CheckedHandle(zone, arguments.ArgAt(0));
857 arguments.SetReturn(
dst);
871 ASSERT(caller_frame !=
nullptr);
878 if (
type.IsInstantiated()) {
882 type.NameCString(),
type.type_class_id(), caller_frame->
pc());
886 type.InstantiateFrom(instantiator_type_arguments,
888 THR_Print(
"%s: '%s' %s '%s' instantiated from '%s' (pc: %#" Px ").\n",
896 if (
function.HasSavedArgumentsDescriptor()) {
906#if defined(TARGET_ARCH_IA32)
907static BoolPtr CheckHashBasedSubtypeTestCache(
911 const AbstractType& destination_type,
912 const TypeArguments& instantiator_type_arguments,
913 const TypeArguments& function_type_arguments,
914 const SubtypeTestCache&
cache) {
932 if (instance_class.IsClosureClass()) {
935 instance_class_id_or_signature =
function.signature();
936 instance_type_arguments =
closure.instantiator_type_arguments();
937 instance_parent_function_type_arguments =
closure.function_type_arguments();
938 instance_delayed_type_arguments =
closure.delayed_type_arguments();
940 instance_class_id_or_signature =
Smi::New(instance_class.id());
941 if (instance_class.NumTypeArguments() > 0) {
942 instance_type_arguments =
instance.GetTypeArguments();
948 if (
cache.HasCheck(instance_class_id_or_signature, destination_type,
949 instance_type_arguments, instantiator_type_arguments,
950 function_type_arguments,
951 instance_parent_function_type_arguments,
952 instance_delayed_type_arguments, &index, &
result)) {
989 if (FLAG_trace_type_checks) {
990 THR_Print(
"Not updating subtype test cache for the record instance.\n");
1010 instance_class_id_or_signature =
function.signature();
1011 ASSERT(instance_class_id_or_signature.IsFunctionType());
1012 instance_type_arguments =
closure.instantiator_type_arguments();
1013 instance_parent_function_type_arguments =
closure.function_type_arguments();
1014 instance_delayed_type_arguments =
closure.delayed_type_arguments();
1015 ASSERT(instance_class_id_or_signature.IsCanonical());
1016 ASSERT(instance_type_arguments.IsCanonical());
1017 ASSERT(instance_parent_function_type_arguments.IsCanonical());
1018 ASSERT(instance_delayed_type_arguments.IsCanonical());
1020 instance_class_id_or_signature =
Smi::New(instance_class.
id());
1022 instance_type_arguments =
instance.GetTypeArguments();
1023 ASSERT(instance_type_arguments.IsCanonical());
1026 if (FLAG_trace_type_checks) {
1027 const auto& instance_class_name =
1030 buffer.Printf(
" Updating test cache %#" Px " with result %s for:\n",
1037 buffer.Printf(
" class: %s (%" Pd ")\n", instance_class_name.ToCString(),
1038 instance_class.
id());
1040 " raw entry: [ %#" Px ", %#" Px ", %#" Px ", %#" Px ", %#" Px
1041 ", %#" Px ", %#" Px ", %#" Px " ]\n",
1042 static_cast<uword>(instance_class_id_or_signature.ptr()),
1043 static_cast<uword>(instance_type_arguments.ptr()),
1044 static_cast<uword>(instantiator_type_arguments.
ptr()),
1045 static_cast<uword>(function_type_arguments.
ptr()),
1046 static_cast<uword>(instance_parent_function_type_arguments.ptr()),
1047 static_cast<uword>(instance_delayed_type_arguments.ptr()),
1048 static_cast<uword>(destination_type.
ptr()),
1056 if (
len >= FLAG_max_subtype_cache_entries) {
1057 if (FLAG_trace_type_checks) {
1058 THR_Print(
"Not updating subtype test cache as its length reached %d\n",
1059 FLAG_max_subtype_cache_entries);
1063 intptr_t colliding_index = -1;
1066 instance_class_id_or_signature, destination_type,
1067 instance_type_arguments, instantiator_type_arguments,
1068 function_type_arguments, instance_parent_function_type_arguments,
1069 instance_delayed_type_arguments, &colliding_index, &old_result)) {
1070 if (FLAG_trace_type_checks) {
1072 buffer.Printf(
" Collision for test cache %#" Px " at index %" Pd ":\n",
1073 static_cast<uword>(new_cache.
ptr()), colliding_index);
1074 buffer.Printf(
" entry: ");
1078 if (old_result.ptr() !=
result.ptr()) {
1079 FATAL(
"Existing subtype test cache entry has result %s, not %s",
1080 old_result.ToCString(),
result.ToCString());
1086 const intptr_t new_index = new_cache.
AddCheck(
1087 instance_class_id_or_signature, destination_type,
1088 instance_type_arguments, instantiator_type_arguments,
1089 function_type_arguments, instance_parent_function_type_arguments,
1090 instance_delayed_type_arguments,
result);
1091 if (FLAG_trace_type_checks) {
1093 buffer.Printf(
" Added new entry to test cache %#" Px " at index %" Pd
1095 static_cast<uword>(new_cache.
ptr()), new_index);
1096 buffer.Printf(
" new entry: ");
1113 const Instance&
instance = Instance::CheckedHandle(zone, arguments.ArgAt(0));
1115 AbstractType::CheckedHandle(zone, arguments.ArgAt(1));
1117 TypeArguments::CheckedHandle(zone, arguments.ArgAt(2));
1119 TypeArguments::CheckedHandle(zone, arguments.ArgAt(3));
1121 SubtypeTestCache::CheckedHandle(zone, arguments.ArgAt(4));
1125#if defined(TARGET_ARCH_IA32)
1127 if (
cache.IsHash()) {
1129 zone, CheckHashBasedSubtypeTestCache(zone, thread,
instance,
type,
1130 instantiator_type_arguments,
1131 function_type_arguments,
cache));
1134 arguments.SetReturn(
result);
1140 type, instantiator_type_arguments, function_type_arguments));
1141 if (FLAG_trace_type_checks) {
1143 function_type_arguments,
result);
1147 arguments.SetReturn(
result);
1153bool TESTING_runtime_entered_on_TTS_invocation =
false;
1169 Instance::CheckedHandle(zone, arguments.ArgAt(0));
1171 AbstractType::CheckedHandle(zone, arguments.ArgAt(1));
1173 TypeArguments::CheckedHandle(zone, arguments.ArgAt(2));
1175 TypeArguments::CheckedHandle(zone, arguments.ArgAt(3));
1177 dst_name ^= arguments.ArgAt(4);
1181 cache ^= arguments.ArgAt(5);
1185 Smi::CheckedHandle(zone, arguments.ArgAt(6)).Value());
1188 TESTING_runtime_entered_on_TTS_invocation =
true;
1191#if defined(TARGET_ARCH_IA32)
1194 if (
cache.IsHash()) {
1196 zone, CheckHashBasedSubtypeTestCache(
1197 zone, thread, src_instance, dst_type,
1198 instantiator_type_arguments, function_type_arguments,
cache));
1201 arguments.SetReturn(
result);
1211 dst_type, instantiator_type_arguments, function_type_arguments);
1213 if (FLAG_trace_type_checks) {
1215 instantiator_type_arguments, function_type_arguments,
1222 auto resolve_dst_name = [&]() {
1223 if (!dst_name.
IsNull())
return;
1224#if !defined(TARGET_ARCH_IA32)
1233 const Code& caller_code =
1239 const intptr_t dst_name_idx = stc_pool_idx + 1;
1240 dst_name ^=
pool.ObjectAt(dst_name_idx);
1246 if (!is_instance_of) {
1248 if (dst_name.
ptr() ==
1249 Symbols::dynamic_assert_assignable_stc_check().ptr()) {
1250#if !defined(TARGET_ARCH_IA32)
1261 const auto& dispatcher =
1263 ASSERT(dispatcher.IsInvokeFieldDispatcher());
1264 const auto& orig_arguments_desc =
1270 for (intptr_t
i = 0;
i < arg_count;
i++) {
1273 orig_arguments.SetAt(
i, obj);
1275 const auto& receiver = Closure::CheckedHandle(
1279 zone,
function.DoArgumentTypesMatch(orig_arguments, args_desc));
1291 const auto& src_type =
1294 if (!reported_type.IsInstantiated()) {
1296 reported_type = reported_type.InstantiateFrom(instantiator_type_arguments,
1297 function_type_arguments,
1305 bool should_update_cache =
true;
1306#if !defined(TARGET_ARCH_IA32)
1307 bool would_update_cache_if_not_lazy =
false;
1308#if !defined(DART_PRECOMPILED_RUNTIME)
1313 if (tts_type.IsTypeParameter()) {
1314 const auto& param = TypeParameter::Cast(tts_type);
1315 tts_type = param.GetFromTypeArguments(instantiator_type_arguments,
1316 function_type_arguments);
1318 ASSERT(!tts_type.IsTypeParameter());
1321 if (FLAG_trace_type_checks) {
1322 THR_Print(
" Specializing type testing stub for %s\n",
1323 tts_type.ToCString());
1327 tts_type.SetTypeTestingStub(
code);
1331 would_update_cache_if_not_lazy =
1332 (!src_instance.
IsNull() &&
1333 tts_type.type_test_stub() ==
1334 StubCode::DefaultNullableTypeTest().ptr()) ||
1335 tts_type.type_test_stub() == StubCode::DefaultTypeTest().ptr();
1336 should_update_cache = would_update_cache_if_not_lazy &&
cache.IsNull();
1342 (tts_type.type_test_stub() != StubCode::DefaultNullableTypeTest().ptr() &&
1343 tts_type.type_test_stub() != StubCode::DefaultTypeTest().ptr())) {
1347 if (FLAG_trace_type_checks) {
1348 THR_Print(
" Rebuilding type testing stub for %s\n",
1349 tts_type.ToCString());
1351 const auto& old_code =
Code::Handle(zone, tts_type.type_test_stub());
1354 ASSERT(old_code.ptr() != new_code.ptr());
1356 ASSERT(new_code.ptr() != StubCode::DefaultNullableTypeTest().ptr() &&
1357 new_code.ptr() != StubCode::DefaultTypeTest().ptr());
1358 const auto& old_instructions =
1360 const auto& new_instructions =
1368 should_update_cache = old_instructions.Equals(new_instructions);
1369 if (FLAG_trace_type_checks) {
1370 THR_Print(
" %s rebuilt type testing stub for %s\n",
1371 should_update_cache ?
"Discarding" :
"Installing",
1372 tts_type.ToCString());
1374 if (!should_update_cache) {
1375 tts_type.SetTypeTestingStub(new_code);
1381 if (should_update_cache) {
1382 if (
cache.IsNull()) {
1383#if !defined(TARGET_ARCH_IA32)
1386 would_update_cache_if_not_lazy));
1392 const Code& caller_code =
1402 cache ^=
pool.ObjectAt<std::memory_order_acquire>(stc_pool_idx);
1403 if (
cache.IsNull()) {
1407 const intptr_t num_inputs =
1409 Symbols::dynamic_assert_assignable_stc_check().ptr()
1413 pool.SetObjectAt<std::memory_order_release>(stc_pool_idx,
cache);
1414 if (FLAG_trace_type_checks) {
1415 THR_Print(
" Installed new subtype test cache %#" Px " with %" Pd
1416 " inputs at index %" Pd " of pool for %s\n",
1417 static_cast<uword>(
cache.ptr()), num_inputs, stc_pool_idx,
1428 instantiator_type_arguments, function_type_arguments,
1432 arguments.SetReturn(src_instance);
1443 Instance::CheckedHandle(zone, arguments.ArgAt(0));
1445 if (src_instance.
IsNull()) {
1451 "Failed assertion: boolean expression must not be null")));
1455 args.SetAt(2, Object::smi_zero());
1456 args.SetAt(3, Object::smi_zero());
1463 ASSERT(!src_instance.IsBool());
1468 Symbols::BooleanExpression());
1473 const Instance& exception = Instance::CheckedHandle(zone, arguments.ArgAt(0));
1478 const Instance& exception = Instance::CheckedHandle(zone, arguments.ArgAt(0));
1480 Instance::CheckedHandle(zone, arguments.ArgAt(1));
1481 const Smi& bypass_debugger = Smi::CheckedHandle(zone, arguments.ArgAt(2));
1483 bypass_debugger.
Value() != 0);
1489#if !defined(DART_PRECOMPILED_RUNTIME)
1493 ASSERT(caller_frame !=
nullptr);
1502 if (target_code.
ptr() !=
1505 if (target_code.
ptr() !=
1510 if (FLAG_trace_patching) {
1511 THR_Print(
"PatchStaticCall: patching caller pc %#" Px
1513 " to '%s' new entry point %#" Px " (%s)\n",
1516 target_code.
is_optimized() ?
"optimized" :
"unoptimized");
1520 arguments.SetReturn(target_code);
1526#if defined(PRODUCT) || defined(DART_PRECOMPILED_RUNTIME)
1538 ASSERT(caller_frame !=
nullptr);
1541 isolate->group()->debugger()->GetPatchedStubAddress(caller_frame->
pc());
1543 Error::Handle(zone, isolate->debugger()->PauseBreakpoint());
1545 arguments.SetReturn(orig_stub);
1550#if defined(PRODUCT) || defined(DART_PRECOMPILED_RUNTIME)
1564 const String& target_name,
1566 const Array& arguments_descriptor,
1568 const bool create_if_absent = !FLAG_precompiled_mode;
1570 const int kTypeArgsLen = 0;
1571 const int kNumArguments = 1;
1576 receiver_class, getter_name, args_desc, create_if_absent));
1585 target_name, arguments_descriptor,
1586 UntaggedFunction::kInvokeFieldDispatcher, create_if_absent));
1587 ASSERT(!create_if_absent || !target_function.
IsNull());
1588 if (FLAG_trace_ic) {
1590 "InvokeField IC miss: adding <%s> id:%" Pd " -> <%s>\n",
1600 const Array& args_descriptor,
1601 const String& target_name) {
1605 const String* demangled = &target_name;
1612#if defined(DART_PRECOMPILED_RUNTIME)
1613 const bool create_if_absent =
false;
1615 const bool create_if_absent =
true;
1619 args_descriptor, &
result)) {
1623 *demangled, args_descriptor,
1624 UntaggedFunction::kNoSuchMethodDispatcher, create_if_absent));
1625 if (FLAG_trace_ic) {
1627 "NoSuchMethod IC miss: adding <%s> id:%" Pd " -> <%s>\n",
1639#if !defined(DART_PRECOMPILED_RUNTIME)
1642 const Code& caller_code,
1647 auto zone = thread->
zone();
1657#if !defined(PRODUCT)
1673#if !defined(PRODUCT)
1685 if (FLAG_unopt_monomorphic_calls && (num_checks == 1)) {
1697 if (FLAG_trace_ic) {
1699 " switching to monomorphic dispatch, %s\n",
1706 if (FLAG_unopt_megamorphic_calls &&
1707 (num_checks > FLAG_max_polymorphic_checks)) {
1709 const Array& descriptor =
1715 StubCode::MegamorphicCall());
1716 if (FLAG_trace_ic) {
1718 " switching to megamorphic dispatch, %s\n",
1731 Object::null_type_arguments());
1735 :
store->simple_instance_of_false_function());
1744 const Class& receiver_class,
1746 const Array& descriptor) {
1751 const bool allow_add = !FLAG_precompiled_mode;
1754 receiver_class,
name, args_desc, allow_add);
1756 if (caller_arguments.
length() == 2 &&
1759 ->simple_instance_of_function()) {
1766 if (target_function.IsNull()) {
1769 ASSERT(!allow_add || !target_function.IsNull());
1770 return target_function.ptr();
1778 const Instance& arg = Instance::CheckedHandle(zone, arguments.ArgAt(0));
1779 const ICData& ic_data = ICData::CheckedHandle(zone, arguments.ArgAt(1));
1786 if (FLAG_trace_ic) {
1790 ASSERT(caller_frame !=
nullptr);
1794 arguments.SetReturn(
target);
1803 const Instance& arg0 = Instance::CheckedHandle(zone, arguments.ArgAt(0));
1804 const Instance& arg1 = Instance::CheckedHandle(zone, arguments.ArgAt(1));
1805 const ICData& ic_data = ICData::CheckedHandle(zone, arguments.ArgAt(2));
1814 if (FLAG_trace_ic) {
1818 ASSERT(caller_frame !=
nullptr);
1821 caller_frame->
pc(),
target.ToCString(), cids[0], cids[1]);
1823 arguments.SetReturn(
target);
1826#if defined(DART_PRECOMPILED_RUNTIME)
1828static bool IsSingleTarget(IsolateGroup* isolate_group,
1833 const String&
name) {
1835 ClassTable*
table = isolate_group->class_table();
1837 for (intptr_t
cid = lower_cid;
cid <= upper_cid;
cid++) {
1838 if (!
table->HasValidClassAt(
cid))
continue;
1840 if (cls.is_abstract())
continue;
1841 if (!cls.is_allocated())
continue;
1844 if (other_target.ptr() !=
target.ptr()) {
1851class SavedUnlinkedCallMapKeyEqualsTraits :
public AllStatic {
1853 static const char*
Name() {
return "SavedUnlinkedCallMapKeyEqualsTraits "; }
1854 static bool ReportStats() {
return false; }
1856 static bool IsMatch(
const Object& key1,
const Object& key2) {
1857 if (!key1.IsInteger() || !key2.IsInteger())
return false;
1858 return Integer::Cast(key1).Equals(Integer::Cast(key2));
1861 return Integer::Cast(
key).CanonicalizeHash();
1865using UnlinkedCallMap = UnorderedHashMap<SavedUnlinkedCallMapKeyEqualsTraits>;
1867static void SaveUnlinkedCall(Zone* zone,
1870 const UnlinkedCall& unlinked_call) {
1871 IsolateGroup* isolate_group = isolate->group();
1873 SafepointMutexLocker ml(isolate_group->unlinked_call_map_mutex());
1874 if (isolate_group->saved_unlinked_calls() ==
Array::null()) {
1875 const auto& initial_map =
1877 isolate_group->set_saved_unlinked_calls(initial_map);
1880 UnlinkedCallMap unlinked_call_map(zone,
1881 isolate_group->saved_unlinked_calls());
1887 unlinked_call_map.InsertOrGetValue(pc, unlinked_call)));
1889 isolate_group->set_saved_unlinked_calls(unlinked_call_map.Release());
1892static UnlinkedCallPtr LoadUnlinkedCall(Zone* zone,
1895 IsolateGroup* isolate_group = isolate->group();
1897 SafepointMutexLocker ml(isolate_group->unlinked_call_map_mutex());
1899 UnlinkedCallMap unlinked_call_map(zone,
1900 isolate_group->saved_unlinked_calls());
1903 const auto& unlinked_call = UnlinkedCall::Cast(
1905 isolate_group->set_saved_unlinked_calls(unlinked_call_map.Release());
1906 return unlinked_call.ptr();
1960 const Code& caller_code,
1962 : isolate_(thread->isolate()),
1964 zone_(thread->zone()),
1965 caller_arguments_(caller_arguments),
1966 miss_handler_(miss_handler),
1967 arguments_(arguments),
1968 caller_frame_(caller_frame),
1969 caller_code_(caller_code),
1970 caller_function_(caller_function),
1972 args_descriptor_(
Array::Handle()) {
1974 ASSERT(caller_arguments_.length() == 1 || !FLAG_precompiled_mode);
1980 FunctionPtr ResolveTargetFunction(
const Object&
data);
1982#if defined(DART_PRECOMPILED_RUNTIME)
1983 void HandleMissAOT(
const Object& old_data,
1989 void DoMonomorphicMissAOT(
const Object& old_data,
1994 bool CanExtendSingleTargetRange(
const String&
name,
2000 void HandleMissJIT(
const Object& old_data,
2001 const Code& old_target,
2004 void DoMonomorphicMissJIT(
const Object& old_data,
2013 void UpdateICDataWithTarget(
const ICData& ic_data,
2015 void TrySwitch(
const ICData& ic_data,
const Function& target_function);
2019 void ReturnJITorAOT(
const Code& stub,
2023 const Instance& receiver() {
return *caller_arguments_[0]; }
2025 bool should_consider_patching() {
2027 if (FLAG_precompiled_mode)
return true;
2040 ICDataPtr NewICData();
2041 ICDataPtr NewICDataWithTarget(intptr_t
cid,
const Function&
target);
2046 const GrowableArray<const Instance*>& caller_arguments_;
2048 NativeArguments arguments_;
2049 StackFrame* caller_frame_;
2050 const Code& caller_code_;
2051 const Function& caller_function_;
2055 Array& args_descriptor_;
2056 bool is_monomorphic_hit_ =
false;
2059#if defined(DART_PRECOMPILED_RUNTIME)
2060void PatchableCallHandler::DoUnlinkedCallAOT(
const UnlinkedCall& unlinked,
2061 const Function& target_function) {
2064 target_function.IsNull()
2066 : NewICDataWithTarget(receiver().GetClassId(), target_function));
2078 if (!target_function.IsNull() &&
2079 !target_function.PrologueNeedsArgumentsDescriptor()) {
2081 ASSERT(target_function.HasCode());
2082 const Code& target_code =
2084 const Smi& expected_cid =
2087 if (unlinked.can_patch_to_monomorphic()) {
2088 object = expected_cid.ptr();
2089 code = target_code.ptr();
2093 code = StubCode::MonomorphicSmiableCheck().ptr();
2101 ReturnAOT(StubCode::ICCallThroughCode(), ic_data);
2104bool PatchableCallHandler::CanExtendSingleTargetRange(
2106 const Function& old_target,
2107 const Function& target_function,
2110 if (old_target.ptr() != target_function.ptr()) {
2113 intptr_t unchecked_lower, unchecked_upper;
2114 if (receiver().GetClassId() < *
lower) {
2116 unchecked_upper = *
lower - 1;
2120 unchecked_lower = *upper + 1;
2124 return IsSingleTarget(isolate_->
group(), zone_, unchecked_lower,
2125 unchecked_upper, target_function,
name);
2129#if defined(DART_PRECOMPILED_RUNTIME)
2130void PatchableCallHandler::DoMonomorphicMissAOT(
2131 const Object& old_data,
2132 const Function& target_function) {
2134 if (old_data.IsSmi()) {
2135 old_expected_cid = Smi::Cast(old_data).Value();
2138 old_expected_cid = MonomorphicSmiableCall::Cast(old_data).expected_cid();
2140 const bool is_monomorphic_hit = old_expected_cid == receiver().
GetClassId();
2144 zone_,
Resolve(thread_, zone_, caller_arguments_, old_receiver_class,
2145 name_, args_descriptor_));
2148 zone_, old_target.IsNull()
2150 : NewICDataWithTarget(old_expected_cid, old_target));
2152 if (is_monomorphic_hit) {
2155 ReturnAOT(StubCode::ICCallThroughCode(), ic_data);
2159 intptr_t
lower = old_expected_cid;
2160 intptr_t upper = old_expected_cid;
2161 if (CanExtendSingleTargetRange(name_, old_target, target_function, &
lower,
2163 const SingleTargetCache&
cache =
2167 cache.set_entry_point(
code.EntryPoint());
2169 cache.set_upper_limit(upper);
2170 const Code& stub = StubCode::SingleTargetCall();
2175 ReturnAOT(StubCode::ICCallThroughCode(), ic_data);
2180 const Code& stub = StubCode::ICCallThroughCode();
2186 ReturnAOT(stub, ic_data);
2190#if !defined(DART_PRECOMPILED_RUNTIME)
2191void PatchableCallHandler::DoMonomorphicMissJIT(
2192 const Object& old_data,
2193 const Function& target_function) {
2195 const auto& old_ic_data_entries = Array::Cast(old_data);
2197 const auto& ic_data =
2201 if (ic_data.NumberOfChecksIs(1) &&
2202 (ic_data.GetReceiverClassIdAt(0) == receiver().GetClassId())) {
2205 if (FLAG_trace_ic) {
2207 " updating code (old code was disabled)\n",
2208 caller_frame_->
pc());
2217 ReturnJIT(
code,
data, target_function);
2221 ASSERT(ic_data.NumArgsTested() == 1);
2222 const Code& stub = ic_data.is_tracking_exactness()
2223 ? StubCode::OneArgCheckInlineCacheWithExactnessCheck()
2224 : StubCode::OneArgCheckInlineCache();
2225 if (FLAG_trace_ic) {
2227 " switching monomorphic to polymorphic dispatch, %s\n",
2228 caller_frame_->
pc(), ic_data.ToCString());
2233 ASSERT(caller_arguments_.length() == 1);
2234 UpdateICDataWithTarget(ic_data, target_function);
2235 ASSERT(should_consider_patching());
2237 ic_data, target_function);
2238 ReturnJIT(stub, ic_data, target_function);
2242#if defined(DART_PRECOMPILED_RUNTIME)
2243void PatchableCallHandler::DoSingleTargetMissAOT(
2244 const SingleTargetCache&
data,
2245 const Function& target_function) {
2247 const Function& old_target =
2253 target_function.IsNull()
2255 : NewICDataWithTarget(receiver().GetClassId(), target_function));
2258 intptr_t upper =
data.upper_limit();
2259 if (CanExtendSingleTargetRange(name_, old_target, target_function, &
lower,
2262 data.set_upper_limit(upper);
2265 ReturnAOT(StubCode::ICCallThroughCode(), ic_data);
2270 const Code& stub = StubCode::ICCallThroughCode();
2276 ReturnAOT(stub, ic_data);
2280#if defined(DART_PRECOMPILED_RUNTIME)
2281void PatchableCallHandler::DoICDataMissAOT(
const ICData& ic_data,
2282 const Function& target_function) {
2284 const Class& cls =
Class::Handle(zone_, receiver().clazz());
2286 const Array& descriptor =
2287 Array::CheckedHandle(zone_, ic_data.arguments_descriptor());
2288 ArgumentsDescriptor args_desc(descriptor);
2289 if (FLAG_trace_ic || FLAG_trace_ic_miss_in_optimized) {
2291 cls.ToCString(), args_desc.TypeArgsLen(),
name.ToCString());
2294 if (target_function.IsNull()) {
2295 ReturnAOT(StubCode::NoSuchMethodDispatcher(), ic_data);
2299 const intptr_t number_of_checks = ic_data.NumberOfChecks();
2301 if ((number_of_checks == 0) &&
2302 (!FLAG_precompiled_mode || ic_data.receiver_cannot_be_smi()) &&
2303 !target_function.PrologueNeedsArgumentsDescriptor()) {
2310 const Code& target_code =
2312 const Smi& expected_cid =
2314 ASSERT(target_code.HasMonomorphicEntry());
2316 expected_cid, target_code);
2317 ReturnAOT(target_code, expected_cid);
2319 ic_data.EnsureHasReceiverCheck(receiver().GetClassId(), target_function);
2320 if (number_of_checks > FLAG_max_polymorphic_checks) {
2324 const Code& stub = StubCode::MegamorphicCall();
2328 ReturnAOT(stub,
cache);
2330 ReturnAOT(StubCode::ICCallThroughCode(), ic_data);
2336#if !defined(DART_PRECOMPILED_RUNTIME)
2337void PatchableCallHandler::DoICDataMissJIT(
const ICData& ic_data,
2338 const Object& old_code,
2339 const Function& target_function) {
2340 ASSERT(ic_data.NumArgsTested() == caller_arguments_.length());
2342 if (ic_data.NumArgsTested() == 1) {
2343 ASSERT(old_code.ptr() == StubCode::OneArgCheckInlineCache().ptr() ||
2345 StubCode::OneArgCheckInlineCacheWithExactnessCheck().ptr() ||
2347 StubCode::OneArgOptimizedCheckInlineCache().ptr() ||
2349 StubCode::OneArgOptimizedCheckInlineCacheWithExactnessCheck()
2351 old_code.ptr() == StubCode::ICCallBreakpoint().ptr() ||
2352 (old_code.IsNull() && !should_consider_patching()));
2353 UpdateICDataWithTarget(ic_data, target_function);
2354 if (should_consider_patching()) {
2356 caller_function_, ic_data, target_function);
2359 zone_, ic_data.is_tracking_exactness()
2360 ? StubCode::OneArgCheckInlineCacheWithExactnessCheck().ptr()
2361 : StubCode::OneArgCheckInlineCache().ptr());
2362 ReturnJIT(stub, ic_data, target_function);
2364 ASSERT(old_code.ptr() == StubCode::TwoArgsCheckInlineCache().ptr() ||
2365 old_code.ptr() == StubCode::SmiAddInlineCache().ptr() ||
2366 old_code.ptr() == StubCode::SmiLessInlineCache().ptr() ||
2367 old_code.ptr() == StubCode::SmiEqualInlineCache().ptr() ||
2369 StubCode::TwoArgsOptimizedCheckInlineCache().ptr() ||
2370 old_code.ptr() == StubCode::ICCallBreakpoint().ptr() ||
2371 (old_code.IsNull() && !should_consider_patching()));
2372 UpdateICDataWithTarget(ic_data, target_function);
2373 ReturnJIT(StubCode::TwoArgsCheckInlineCache(), ic_data, target_function);
2378void PatchableCallHandler::DoMegamorphicMiss(
const MegamorphicCache&
data,
2379 const Function& target_function) {
2381 const Class& cls =
Class::Handle(zone_, receiver().clazz());
2383 const Array& descriptor =
2384 Array::CheckedHandle(zone_,
data.arguments_descriptor());
2385 ArgumentsDescriptor args_desc(descriptor);
2386 if (FLAG_trace_ic || FLAG_trace_ic_miss_in_optimized) {
2387 OS::PrintErr(
"Megamorphic miss, class=%s, function<%" Pd ">=%s\n",
2388 cls.ToCString(), args_desc.TypeArgsLen(),
name.ToCString());
2390 if (target_function.IsNull()) {
2391 ReturnJITorAOT(StubCode::NoSuchMethodDispatcher(),
data, target_function);
2397 data.EnsureContains(class_id, target_function);
2398 ReturnJITorAOT(StubCode::MegamorphicCall(),
data, target_function);
2401void PatchableCallHandler::UpdateICDataWithTarget(
2402 const ICData& ic_data,
2403 const Function& target_function) {
2404 if (target_function.IsNull())
return;
2410 const bool call_target_directly =
2412 const intptr_t invocation_count = call_target_directly ? 1 : 0;
2414 if (caller_arguments_.length() == 1) {
2416#if !defined(DART_PRECOMPILED_RUNTIME)
2417 if (ic_data.is_tracking_exactness()) {
2418 exactness = receiver().
IsNull()
2420 : StaticTypeExactnessState::Compute(
2421 Type::Cast(AbstractType::Handle(
2422 ic_data.receivers_static_type())),
2426 ic_data.EnsureHasReceiverCheck(receiver().GetClassId(), target_function,
2427 invocation_count, exactness);
2429 GrowableArray<intptr_t> class_ids(caller_arguments_.length());
2430 ASSERT(ic_data.NumArgsTested() == caller_arguments_.length());
2431 for (intptr_t
i = 0;
i < caller_arguments_.length();
i++) {
2432 class_ids.Add(caller_arguments_[
i]->GetClassId());
2434 ic_data.EnsureHasCheck(class_ids, target_function, invocation_count);
2438void PatchableCallHandler::ReturnAOT(
const Code& stub,
const Object&
data) {
2444void PatchableCallHandler::ReturnJIT(
const Code& stub,
2446 const Function&
target) {
2449 switch (miss_handler_) {
2467void PatchableCallHandler::ReturnJITorAOT(
const Code& stub,
2469 const Function&
target) {
2470#if defined(DART_PRECOMPILED_MODE)
2471 ReturnAOT(stub,
data);
2477ICDataPtr PatchableCallHandler::NewICData() {
2478 return ICData::New(caller_function_, name_, args_descriptor_,
DeoptId::kNone,
2479 1, ICData::kInstance);
2482ICDataPtr PatchableCallHandler::NewICDataWithTarget(intptr_t
cid,
2483 const Function&
target) {
2484 GrowableArray<intptr_t> cids(1);
2488 ICData::kInstance, &cids,
target);
2491FunctionPtr PatchableCallHandler::ResolveTargetFunction(
const Object&
data) {
2492 switch (
data.GetClassId()) {
2493 case kUnlinkedCallCid: {
2494 const auto& unlinked_call = UnlinkedCall::Cast(
data);
2496#if defined(DART_PRECOMPILED_RUNTIME)
2509 SaveUnlinkedCall(zone_, isolate_, caller_frame_->
pc(), unlinked_call);
2512 name_ = unlinked_call.target_name();
2513 args_descriptor_ = unlinked_call.arguments_descriptor();
2516 case kMonomorphicSmiableCallCid:
2518#if defined(DART_PRECOMPILED_RUNTIME)
2521 case kSingleTargetCacheCid: {
2523 zone_, LoadUnlinkedCall(zone_, isolate_, caller_frame_->
pc()));
2524 name_ = unlinked_call.target_name();
2525 args_descriptor_ = unlinked_call.arguments_descriptor();
2531 const auto& ic_data_entries = Array::Cast(
data);
2533 const auto& ic_data =
2535 args_descriptor_ = ic_data.arguments_descriptor();
2536 name_ = ic_data.target_name();
2542 case kMegamorphicCacheCid: {
2543 const CallSiteData& call_site_data = CallSiteData::Cast(
data);
2544 name_ = call_site_data.target_name();
2545 args_descriptor_ = call_site_data.arguments_descriptor();
2551 const Class& cls =
Class::Handle(zone_, receiver().clazz());
2552 return Resolve(thread_, zone_, caller_arguments_, cls, name_,
2559 const auto& target_function =
2572#if defined(DART_PRECOMPILED_RUNTIME)
2575 uword target_entry = 0;
2577 caller_frame_->
pc(), caller_code_));
2578 HandleMissAOT(
data, target_entry, target_function);
2581 if (should_consider_patching()) {
2585 ASSERT(old_data.IsICData() || old_data.IsMegamorphicCache());
2588 HandleMissJIT(
data,
code, target_function);
2592#if defined(DART_PRECOMPILED_RUNTIME)
2594void PatchableCallHandler::HandleMissAOT(
const Object& old_data,
2598 case kUnlinkedCallCid:
2600 StubCode::SwitchableCallMiss().MonomorphicEntryPoint());
2601 DoUnlinkedCallAOT(UnlinkedCall::Cast(old_data), target_function);
2603 case kMonomorphicSmiableCallCid:
2605 StubCode::MonomorphicSmiableCheck().MonomorphicEntryPoint());
2608 DoMonomorphicMissAOT(old_data, target_function);
2610 case kSingleTargetCacheCid:
2611 ASSERT(old_entry == StubCode::SingleTargetCall().MonomorphicEntryPoint());
2612 DoSingleTargetMissAOT(SingleTargetCache::Cast(old_data), target_function);
2616 StubCode::ICCallThroughCode().MonomorphicEntryPoint());
2617 DoICDataMissAOT(ICData::Cast(old_data), target_function);
2619 case kMegamorphicCacheCid:
2620 ASSERT(old_entry == StubCode::MegamorphicCall().MonomorphicEntryPoint());
2621 DoMegamorphicMiss(MegamorphicCache::Cast(old_data), target_function);
2630void PatchableCallHandler::HandleMissJIT(
const Object& old_data,
2631 const Code& old_code,
2632 const Function& target_function) {
2633 switch (old_data.GetClassId()) {
2637 DoMonomorphicMissJIT(old_data, target_function);
2640 DoICDataMissJIT(ICData::Cast(old_data), old_code, target_function);
2642 case kMegamorphicCacheCid:
2643 ASSERT(old_code.ptr() == StubCode::MegamorphicCall().ptr() ||
2644 (old_code.IsNull() && !should_consider_patching()));
2645 DoMegamorphicMiss(MegamorphicCache::Cast(old_data), target_function);
2658#if !defined(DART_PRECOMPILED_RUNTIME)
2663 const auto& caller_function =
2667 native_arguments, caller_frame, caller_code,
2682 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(0));
2683 const ICData& ic_data = ICData::CheckedHandle(zone, arguments.ArgAt(1));
2686 args.Add(&receiver);
2697 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(0));
2698 const Instance& other = Instance::CheckedHandle(zone, arguments.ArgAt(1));
2699 const ICData& ic_data = ICData::CheckedHandle(zone, arguments.ArgAt(2));
2702 args.Add(&receiver);
2712 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(1));
2731#if defined(DART_PRECOMPILED_RUNTIME)
2739 caller_arguments.
Add(&receiver);
2742 caller_frame, caller_code, caller_function);
2746#if defined(DART_PRECOMPILED_RUNTIME)
2750static ObjectPtr InvokeCallThroughGetterOrNoSuchMethod(
2753 const Instance& receiver,
2754 const String& target_name,
2755 const Array& orig_arguments,
2756 const Array& orig_arguments_desc) {
2757 const bool is_dynamic_call =
2759 String& demangled_target_name =
String::Handle(zone, target_name.ptr());
2760 if (is_dynamic_call) {
2761 demangled_target_name =
2778 while (!cls.IsNull()) {
2782 if (cls.EnsureIsFinalized(thread) ==
Error::null()) {
2787 const Function& closure_function =
2790 zone, closure_function.ImplicitInstanceClosure(receiver));
2793 cls = cls.SuperClass();
2796 if (receiver.IsRecord()) {
2797 const Record& record = Record::Cast(receiver);
2798 const intptr_t field_index =
2800 if (field_index >= 0) {
2801 return record.FieldAt(field_index);
2811 if ((target_name.ptr() ==
Symbols::call().ptr()) && receiver.IsClosure()) {
2816 orig_arguments_desc);
2821 const auto& getter_name =
2824 zone, is_dynamic_call
2826 : getter_name.ptr());
2827 ArgumentsDescriptor args_desc(orig_arguments_desc);
2828 while (!cls.IsNull()) {
2831 if (cls.EnsureIsFinalized(thread) ==
Error::null()) {
2838 if (is_dynamic_call) {
2848 if (is_dynamic_call) {
2856 getter_arguments.SetAt(0, receiver);
2859 if (getter_result.IsError()) {
2860 return getter_result.ptr();
2862 ASSERT(getter_result.IsNull() || getter_result.IsInstance());
2864 orig_arguments.SetAt(args_desc.FirstArgIndex(), getter_result);
2866 orig_arguments_desc);
2868 cls = cls.SuperClass();
2871 if (receiver.IsRecord()) {
2872 const Record& record = Record::Cast(receiver);
2873 const intptr_t field_index =
2874 record.GetFieldIndexByName(thread, demangled_target_name);
2875 if (field_index >= 0) {
2876 const Object& getter_result =
2878 ASSERT(getter_result.IsNull() || getter_result.IsInstance());
2879 orig_arguments.SetAt(args_desc.FirstArgIndex(), getter_result);
2881 orig_arguments_desc);
2889 orig_arguments, orig_arguments_desc));
2902 if (ic_data_or_cache.IsICData()) {
2903 target_name = ICData::Cast(ic_data_or_cache).target_name();
2905 ASSERT(ic_data_or_cache.IsMegamorphicCache());
2906 target_name = MegamorphicCache::Cast(ic_data_or_cache).target_name();
2908#if defined(DART_PRECOMPILED_RUNTIME)
2909 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(0));
2910 const Array& orig_arguments_desc =
2911 Array::CheckedHandle(zone, arguments.ArgAt(2));
2912 const Array& orig_arguments = Array::CheckedHandle(zone, arguments.ArgAt(3));
2915 thread, zone, receiver, target_name,
2916 orig_arguments, orig_arguments_desc));
2918 arguments.SetReturn(
result);
2920 FATAL(
"Dispatcher for %s should have been lazily created",
2931 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(0));
2932 const Function&
function = Function::CheckedHandle(zone, arguments.ArgAt(1));
2933 const Array& orig_arguments_desc =
2934 Array::CheckedHandle(zone, arguments.ArgAt(2));
2935 const Array& orig_arguments = Array::CheckedHandle(zone, arguments.ArgAt(3));
2938 if ((
function.kind() == UntaggedFunction::kClosureFunction) ||
2939 (
function.kind() == UntaggedFunction::kImplicitClosureFunction)) {
2943 orig_function_name =
function.QualifiedUserVisibleName();
2945 orig_function_name =
function.name();
2950 orig_arguments, orig_arguments_desc));
2952 arguments.SetReturn(
result);
2955#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
2962 auto isolate = thread->
isolate();
2965 if (FLAG_shared_slow_path_triggers_gc) {
2969 bool do_deopt =
false;
2970 bool do_stacktrace =
false;
2971 bool do_reload =
false;
2973 const intptr_t isolate_reload_every =
2974 isolate->group()->reload_every_n_stack_overflow_checks();
2975 if ((FLAG_deoptimize_every > 0) || (FLAG_stacktrace_every > 0) ||
2976 (FLAG_gc_every > 0) || (isolate_reload_every > 0)) {
2982 if (FLAG_deoptimize_every > 0 && (
count % FLAG_deoptimize_every) == 0) {
2985 if (FLAG_stacktrace_every > 0 && (
count % FLAG_stacktrace_every) == 0) {
2986 do_stacktrace =
true;
2988 if (FLAG_gc_every > 0 && (
count % FLAG_gc_every) == 0) {
2991 if ((isolate_reload_every > 0) && (
count % isolate_reload_every) == 0) {
2992 do_reload = isolate->group()->CanReload();
2996 if ((FLAG_deoptimize_filter !=
nullptr) ||
2997 (FLAG_stacktrace_filter !=
nullptr) || (FLAG_reload_every != 0)) {
3009 if ((FLAG_deoptimize_filter !=
nullptr) ||
3010 (FLAG_stacktrace_filter !=
nullptr)) {
3014 if (!
code.IsNull()) {
3015 if (!
code.is_optimized() && FLAG_reload_every_optimized) {
3019 if (
code.is_optimized() && FLAG_deoptimize_filter !=
nullptr &&
3023 function.ToFullyQualifiedCString());
3027 if (FLAG_stacktrace_filter !=
nullptr &&
3030 function.ToFullyQualifiedCString());
3031 do_stacktrace =
true;
3040 isolate_group->MaybeIncreaseReloadEveryNStackOverflowChecks();
3043 const char* script_uri = isolate_group->source()->script_uri;
3045 const bool success =
3046 isolate_group->ReloadSources(&
js,
true, script_uri);
3048 FATAL(
"*** Isolate reload failed:\n%s\n",
js.ToCString());
3051 if (do_stacktrace) {
3055 intptr_t num_frames = stack->
Length();
3056 for (intptr_t
i = 0;
i < num_frames;
i++) {
3060#if !defined(DART_PRECOMPILED_RUNTIME)
3061 if (!
frame->function().ForceOptimize()) {
3063 frame->function().EnsureHasCompiledUnoptimizedCode();
3064 num_vars =
frame->NumLocalVariables();
3068 for (intptr_t v = 0; v < num_vars; v++) {
3072 if (FLAG_stress_async_stacks) {
3082#if !defined(DART_PRECOMPILED_RUNTIME)
3085 ASSERT(isolate_group->use_osr());
3118 if (FLAG_trace_osr) {
3120 function.ToFullyQualifiedCString(), osr_id,
3132 uword optimized_entry =
code.EntryPoint();
3133 frame->set_pc(optimized_entry);
3140#if defined(USING_SIMULATOR)
3143 if (stack_pos == 0) {
3146 stack_pos = thread->saved_stack_limit();
3154 uword stack_overflow_flags = thread->GetAndClearStackOverflowFlags();
3159 if (!thread->os_thread()->HasStackHeadroom() ||
3161 if (FLAG_verbose_stack_overflow) {
3164 thread->saved_stack_limit());
3171 while (
frame !=
nullptr) {
3187#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
3198#if !defined(DART_PRECOMPILED_RUNTIME)
3208 const ICData& ic_data = ICData::CheckedHandle(zone, arguments.ArgAt(0));
3209 const Function&
function = Function::CheckedHandle(zone, arguments.ArgAt(1));
3215 "IC call @%#" Px ": ICData: %#" Px " cnt:%" Pd " nchecks: %" Pd " %s\n",
3224#if !defined(DART_PRECOMPILED_RUNTIME)
3225 const Function&
function = Function::CheckedHandle(zone, arguments.ArgAt(0));
3230 auto isolate_group = thread->isolate_group();
3231 if (FLAG_background_compilation) {
3232 if (isolate_group->background_compiler()->EnqueueCompilation(
function)) {
3237 function.SetUsageCounter(INT32_MIN);
3247 if (FLAG_trace_compiler || FLAG_trace_optimizing_compiler) {
3249 THR_Print(
"ReCompiling function: '%s' \n",
3250 function.ToFullyQualifiedCString());
3266#if !defined(DART_PRECOMPILED_RUNTIME)
3271 while (
frame->IsStubFrame() ||
frame->IsExitFrame()) {
3275 if (
frame->IsEntryFrame()) {
3286 const Code& current_target_code =
3290 if (FLAG_trace_patching) {
3292 "FixCallersTarget: caller %#" Px
3294 "target '%s' -> %#" Px " (%s)\n",
3297 current_target_code.
is_optimized() ?
"optimized" :
"unoptimized");
3299 arguments.SetReturn(current_target_code);
3308#if !defined(DART_PRECOMPILED_RUNTIME)
3309 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(0));
3310 const Array& switchable_call_data =
3311 Array::CheckedHandle(zone, arguments.ArgAt(1));
3317 const auto& caller_function =
3321 caller_arguments.
Add(&receiver);
3324 arguments, caller_frame, caller_code, caller_function);
3334#if !defined(DART_PRECOMPILED_RUNTIME)
3339 while (
frame->IsStubFrame() ||
frame->IsExitFrame()) {
3343 if (
frame->IsEntryFrame()) {
3353 alloc_class ^= stub.
owner();
3355 if (alloc_stub.
IsNull()) {
3361 if (FLAG_trace_patching) {
3367 arguments.SetReturn(alloc_stub);
3374 switch (deopt_reason) {
3375#define DEOPT_REASON_TO_TEXT(name) \
3376 case ICData::kDeopt##name: \
3379#undef DEOPT_REASON_TO_TEXT
3389 if (!
function.IsSuspendableFunction()) {
3396 return suspend_state.IsSuspendState() &&
3397 (SuspendState::Cast(suspend_state).pc() != 0);
3401 const Code& optimized_code,
3414 if (!
error.IsNull()) {
3417 const Code& unoptimized_code =
3422 function.SwitchToUnoptimizedCode();
3427 if (FLAG_trace_deoptimization) {
3430 }
else if (
frame->IsMarkedForLazyDeopt()) {
3432 if (FLAG_trace_deoptimization) {
3446 frame->MarkForLazyDeopt();
3448 if (FLAG_trace_deoptimization) {
3449 THR_Print(
"Lazy deopt scheduled for fp=%" Pp ", pc=%" Pp "\n",
3450 frame->fp(), deopt_pc);
3465 auto isolate_group = thread->isolate_group();
3466 isolate_group->RunWithStoppedMutators([&]() {
3468 isolate_group->ForEachIsolate(
3471 if (mutator_thread ==
nullptr) {
3477 while (
frame !=
nullptr) {
3478 optimized_code =
frame->LookupDartCode();
3495 auto isolate = thread->isolate();
3496 auto isolate_group = thread->isolate_group();
3497 isolate_group->RunWithStoppedMutators([&]() {
3498 auto mutator_thread = isolate->mutator_thread();
3499 if (mutator_thread ==
nullptr) {
3505 if (
frame !=
nullptr) {
3507 if (optimized_code.is_optimized() &&
3508 !optimized_code.is_force_optimized()) {
3515#if !defined(DART_PRECOMPILED_RUNTIME)
3521 intptr_t** cpu_registers) {
3526 MSAN_UNPOISON(
reinterpret_cast<void*
>(saved_registers_address),
3533 ASSERT(fpu_registers_copy !=
nullptr);
3535 fpu_registers_copy[
i] =
3539 *fpu_registers = fpu_registers_copy;
3543 ASSERT(cpu_registers_copy !=
nullptr);
3545 cpu_registers_copy[
i] =
3546 *
reinterpret_cast<intptr_t*
>(saved_registers_address);
3549 *cpu_registers = cpu_registers_copy;
3555 int64_t int_value =
static_cast<int64_t
>(
value);
3556 double converted_double =
static_cast<double>(int_value);
3557 if (converted_double !=
value) {
3571 DeoptimizeCopyFrame,
3573 uword saved_registers_address,
3574 uword is_lazy_deopt) {
3575#if !defined(DART_PRECOMPILED_RUNTIME)
3581 const uword last_fp =
3591 ASSERT(caller_frame !=
nullptr);
3597 if (FLAG_trace_deoptimization) {
3599 THR_Print(
"== Deoptimizing code for '%s', %s, %s\n",
3600 function.ToFullyQualifiedCString(),
3601 deoptimizing_code ?
"code & frame" :
"frame",
3602 (is_lazy_deopt != 0u) ?
"lazy-deopt" :
"");
3605 if (is_lazy_deopt != 0u) {
3606 const uword deopt_pc =
3611 caller_frame->
set_pc(deopt_pc);
3612 ASSERT(caller_frame->
pc() == deopt_pc);
3617 if (FLAG_trace_deoptimization) {
3619 caller_frame->
pc());
3625 intptr_t* cpu_registers;
3631 fpu_registers, cpu_registers, is_lazy_deopt != 0, deoptimizing_code);
3646#if !defined(DART_PRECOMPILED_RUNTIME)
3655 ASSERT(caller_frame !=
nullptr);
3690#if !defined(DART_PRECOMPILED_RUNTIME)
3698 DeoptContext* deopt_context = isolate->deopt_context();
3700 isolate->set_deopt_context(
nullptr);
3701 delete deopt_context;
3712#if !defined(DART_PRECOMPILED_RUNTIME)
3713#if !defined(PRODUCT)
3714 isolate->debugger()->RewindPostDeopt();
3729 const Instance& exception = Instance::CheckedHandle(zone, arguments.ArgAt(0));
3731 Instance::CheckedHandle(zone, arguments.ArgAt(1));
3733#if !defined(DART_PRECOMPILED_RUNTIME)
3734#if !defined(PRODUCT)
3735 if (isolate->has_resumption_breakpoints()) {
3736 isolate->debugger()->ResumptionBreakpoint();
3745 .IsSuspendableFunction());
3750 thread->pending_deopts().AddPendingDeopt(
frame->fp(), deopt_pc);
3751 frame->MarkForLazyDeopt();
3753 if (FLAG_trace_deoptimization) {
3754 THR_Print(
"Lazy deopt scheduled for resumed frame fp=%" Pp ", pc=%" Pp
3756 frame->fp(), deopt_pc);
3761 if (!exception.
IsNull()) {
3767 const char* runtime_call_name,
3768 bool can_lazy_deopt) {
3769 ASSERT(FLAG_deoptimize_on_runtime_call_every > 0);
3770 if (FLAG_precompiled_mode) {
3776 const bool is_deopt_related =
3777 strstr(runtime_call_name,
"Deoptimize") !=
nullptr;
3778 if (is_deopt_related) {
3783 if (can_lazy_deopt) {
3784 if (FLAG_deoptimize_on_runtime_call_name_filter !=
nullptr &&
3785 (strlen(runtime_call_name) !=
3786 strlen(FLAG_deoptimize_on_runtime_call_name_filter) ||
3787 strstr(runtime_call_name,
3788 FLAG_deoptimize_on_runtime_call_name_filter) ==
nullptr)) {
3792 if ((
count % FLAG_deoptimize_on_runtime_call_every) == 0) {
3799 double remainder =
fmod_ieee(left, right);
3800 if (remainder == 0.0) {
3803 }
else if (remainder < 0.0) {
3818#if !defined(DART_PRECOMPILED_RUNTIME)
3819 const Field& field = Field::CheckedHandle(zone, arguments.ArgAt(0));
3828 const Instance&
instance = Instance::CheckedHandle(zone, arguments.ArgAt(0));
3829 const Field& field = Field::CheckedHandle(zone, arguments.ArgAt(1));
3834 (
result.ptr() != Object::transition_sentinel().ptr()));
3835 arguments.SetReturn(
result);
3839 const Field& field = Field::CheckedHandle(zone, arguments.ArgAt(0));
3844 (
result.ptr() != Object::transition_sentinel().ptr()));
3845 arguments.SetReturn(
result);
3849 const Field& field = Field::CheckedHandle(zone, arguments.ArgAt(0));
3855 const Field& field = Field::CheckedHandle(zone, arguments.ArgAt(0));
3862 FATAL(
"Not loaded");
3872 isolate->group()->api_state()->AllocatePersistentHandle();
3881typedef void* (*MemMoveCFunction)(
void*
dest,
const void*
src,
size_t n);
4038 uword* out_entry_point,
4039 uword* out_trampoline_type) {
4042 reinterpret_cast<void*
>(trampoline));
4043 ASSERT(out_entry_point !=
nullptr);
4044 ASSERT(out_trampoline_type !=
nullptr);
4048 auto metadata = fcm->LookupMetadataForTrampoline(trampoline);
4051 if (metadata.trampoline_type() ==
4057 auto metadata2 = fcm->LookupMetadataForTrampoline(trampoline);
4058 *out_trampoline_type =
static_cast<uword>(metadata2.trampoline_type());
4063 if (!metadata.IsLive() || !metadata.IsSameCallback(metadata2)) {
4065 reinterpret_cast<void*
>(trampoline));
4069 *out_entry_point = metadata.target_entry_point();
4070 Isolate* target_isolate = metadata.target_isolate();
4072 Isolate* current_isolate =
nullptr;
4073 if (current_thread !=
nullptr) {
4074 current_isolate = current_thread->
isolate();
4083 if (current_isolate ==
nullptr ||
4084 current_isolate->
group() != target_isolate->
group()) {
4085 if (current_isolate !=
nullptr) {
4091 ASSERT(temp_thread !=
nullptr);
4094 reinterpret_cast<intptr_t
>(current_isolate));
4101 if (!metadata.IsLive()) {
4102 FATAL(
"Callback invoked after it has been deleted.");
4104 Isolate* target_isolate = metadata.target_isolate();
4105 *out_entry_point = metadata.target_entry_point();
4106 *out_trampoline_type =
static_cast<uword>(metadata.trampoline_type());
4107 if (current_thread ==
nullptr) {
4108 FATAL(
"Cannot invoke native callback outside an isolate.");
4111 FATAL(
"Cannot invoke native callback when API callbacks are prohibited.");
4114 FATAL(
"Cannot invoke native callback while unwind error propagates.");
4117 FATAL(
"Native callbacks must be invoked on the mutator thread.");
4119 if (current_thread->
isolate() != target_isolate) {
4120 FATAL(
"Cannot invoke native callback from a different isolate.");
4134 (
void*)*out_entry_point);
4136 (
void*)*out_trampoline_type);
4137 return current_thread;
4143 ASSERT(thread !=
nullptr);
4148 const bool inside_temp_isolate =
4149 source_isolate ==
nullptr || source_isolate != thread->
isolate();
4150 if (inside_temp_isolate) {
4152 if (source_isolate !=
nullptr) {
4170 return return_value;
4193 return_value->
set_ptr(Object::sentinel().ptr());
4195 return return_value;
4228#if !defined(USING_MEMORY_SANITIZER)
4237#if !defined(USING_THREAD_SANITIZER)
static void round(SkPoint *p)
#define RELEASE_ASSERT(cond)
virtual classid_t type_class_id() const
bool IsTopTypeForSubtyping() const
virtual bool IsInstantiated(Genericity genericity=kAny, intptr_t num_free_fun_type_params=kAllFree) const
static bool InstantiateAndTestSubtype(AbstractType *subtype, AbstractType *supertype, const TypeArguments &instantiator_type_args, const TypeArguments &function_type_args)
const char * NameCString() const
bool IsDynamicType() const
LocalHandles * local_handles()
const char * ToCString() const
static ArrayPtr NewBoxed(intptr_t type_args_len, intptr_t num_arguments, const Array &optional_arguments_names, Heap::Space space=Heap::kOld)
intptr_t CountWithTypeArgs() const
intptr_t FirstArgIndex() const
static constexpr intptr_t kMaxElements
static ArrayPtr New(intptr_t len, Heap::Space space=Heap::kNew)
virtual void SetTypeArguments(const TypeArguments &value) const
static intptr_t LengthOf(const ArrayPtr array)
void SetAt(intptr_t index, const Object &value) const
static const Bool & Get(bool value)
static const Bool & True()
StringPtr target_name() const
ArrayPtr arguments_descriptor() const
ClassPtr At(intptr_t cid) const
CodePtr allocation_stub() const
FunctionPtr GetInvocationDispatcher(const String &target_name, const Array &args_desc, UntaggedFunction::Kind kind, bool create_if_absent) const
intptr_t NumTypeArguments() const
bool IsClosureClass() const
ErrorPtr EnsureIsFinalized(Thread *thread) const
bool is_allocate_finalized() const
static ClosurePtr New(const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments, const Function &function, const Object &context, Heap::Space space=Heap::kNew)
static void PatchInstanceCallAt(uword return_address, const Code &caller_code, const Object &data, const Code &target)
static CodePtr GetStaticCallTargetAt(uword return_address, const Code &code)
static void PatchSwitchableCallAt(uword return_address, const Code &caller_code, const Object &data, const Code &target)
static uword GetSwitchableCallTargetEntryAt(uword return_address, const Code &caller_code)
static ObjectPtr GetSwitchableCallDataAt(uword return_address, const Code &caller_code)
static CodePtr GetInstanceCallAt(uword return_address, const Code &caller_code, Object *data)
static void PatchStaticCallAt(uword return_address, const Code &code, const Code &new_target)
intptr_t GetNullCheckNameIndexAt(int32_t pc_offset)
FunctionPtr function() const
void SetStaticCallTargetCodeAt(uword pc, const Code &code) const
bool is_optimized() const
bool is_force_optimized() const
void SetStubCallTargetCodeAt(uword pc, const Code &code) const
bool ContainsInstructionAt(uword addr) const
void set_is_alive(bool value) const
ObjectPoolPtr GetObjectPool() const
FunctionPtr GetStaticCallTargetFunctionAt(uword pc) const
static bool CanOptimizeFunction(Thread *thread, const Function &function)
static constexpr intptr_t kNoOSRDeoptId
static ErrorPtr EnsureUnoptimizedCode(Thread *thread, const Function &function)
static ObjectPtr CompileOptimizedFunction(Thread *thread, const Function &function, intptr_t osr_id=kNoOSRDeoptId)
static ContextPtr New(intptr_t num_variables, Heap::Space space=Heap::kNew)
static intptr_t NumVariables(const ContextPtr context)
void set_parent(const Context &parent) const
void SetAt(intptr_t context_index, const Object &value) const
ObjectPtr At(intptr_t context_index) const
intptr_t num_variables() const
ContextPtr parent() const
static ObjectPtr InvokeNoSuchMethod(Thread *thread, const Instance &receiver, const String &target_name, const Array &arguments, const Array &arguments_descriptor)
static ObjectPtr InvokeClosure(Thread *thread, const Array &arguments)
static ObjectPtr InvokeFunction(const Function &function, const Array &arguments)
static IsolateGroup * vm_isolate_group()
ActivationFrame * FrameAt(int i) const
static DebuggerStackTrace * CollectAsyncAwaiters()
intptr_t MaterializeDeferredObjects()
void set_dest_frame(const StackFrame *frame)
intptr_t DestStackAdjustment() const
static constexpr intptr_t kNone
static DoublePtr New(double d, Heap::Space space=Heap::kNew)
static DART_NORETURN void ThrowByType(ExceptionType type, const Array &arguments)
static DART_NORETURN void ThrowOOM()
static DART_NORETURN void ThrowRangeError(const char *argument_name, const Integer &argument_value, intptr_t expected_from, intptr_t expected_to)
static DART_NORETURN void ThrowLateFieldAssignedDuringInitialization(const String &name)
static DART_NORETURN void Throw(Thread *thread, const Instance &exception)
static DART_NORETURN void ThrowArgumentError(const Instance &arg)
@ kIntegerDivisionByZeroException
static DART_NORETURN void ThrowLateFieldNotInitialized(const String &name)
static DART_NORETURN void ReThrow(Thread *thread, const Instance &exception, const Instance &stacktrace, bool bypass_debugger=false)
static void CreateAndThrowTypeError(TokenPosition location, const AbstractType &src_type, const AbstractType &dst_type, const String &dst_name)
static DART_NORETURN void PropagateError(const Error &error)
DART_WARN_UNUSED_RESULT ErrorPtr InitializeInstance(const Instance &instance) const
static bool IsGetterName(const String &function_name)
DART_WARN_UNUSED_RESULT ErrorPtr InitializeStatic() const
static bool IsSetterName(const String &function_name)
ObjectPtr StaticValue() const
static StringPtr GetterName(const String &field_name)
static StringPtr NameFromGetter(const String &getter_name)
void RecordStore(const Object &value) const
static Float32x4Ptr New(float value0, float value1, float value2, float value3, Heap::Space space=Heap::kNew)
static Float64x2Ptr New(double value0, double value1, Heap::Space space=Heap::kNew)
bool PrologueNeedsArgumentsDescriptor() const
static bool IsDynamicInvocationForwarderName(const String &name)
const char * ToFullyQualifiedCString() const
static StringPtr DemangleDynamicInvocationForwarderName(const String &name)
bool HasOptimizedCode() const
static StringPtr CreateDynamicInvocationForwarderName(const String &name)
bool IsMethodExtractor() const
CodePtr unoptimized_code() const
CodePtr EnsureHasCode() const
bool IsDebugging(Thread *thread, const Function &function)
void CollectAllGarbage(GCReason reason=GCReason::kFull, bool compact=false)
bool Contains(uword addr) const
void EnsureHasCheck(const GrowableArray< intptr_t > &class_ids, const Function &target, intptr_t count=1) const
static ICDataPtr ICDataOfEntriesArray(const Array &array)
static ICDataPtr NewWithCheck(const Function &owner, const String &target_name, const Array &arguments_descriptor, intptr_t deopt_id, intptr_t num_args_tested, RebindRule rebind_rule, GrowableArray< intptr_t > *cids, const Function &target, const AbstractType &receiver_type=Object::null_abstract_type())
intptr_t NumArgsTested() const
RebindRule rebind_rule() const
bool NumberOfChecksIs(intptr_t n) const
FunctionPtr GetTargetAt(intptr_t index) const
bool is_tracking_exactness() const
void set_is_megamorphic(bool value) const
void EnsureHasReceiverCheck(intptr_t receiver_class_id, const Function &target, intptr_t count=1, StaticTypeExactnessState exactness=StaticTypeExactnessState::NotTracking()) const
intptr_t NumberOfChecks() const
bool IsInstanceOf(const AbstractType &other, const TypeArguments &other_instantiator_type_arguments, const TypeArguments &other_function_type_arguments) const
AbstractTypePtr GetType(Heap::Space space) const
bool IsAssignableTo(const AbstractType &other, const TypeArguments &other_instantiator_type_arguments, const TypeArguments &other_function_type_arguments) const
static InstancePtr NewAlreadyFinalized(const Class &cls, Heap::Space space=Heap::kNew)
static Int32x4Ptr New(int32_t value0, int32_t value1, int32_t value2, int32_t value3, Heap::Space space=Heap::kNew)
static IntegerPtr New(const String &str, Heap::Space space=Heap::kNew)
static IntegerPtr NewFromUint64(uint64_t value, Heap::Space space=Heap::kNew)
static int EncodeType(Level level, Kind kind)
GroupDebugger * debugger() const
static bool IsSystemIsolateGroup(const IsolateGroup *group)
ObjectStore * object_store() const
static IsolateGroup * Current()
ClassTable * class_table() const
static void ExitTemporaryIsolate()
Mutex * patchable_call_mutex()
Isolate * EnterTemporaryIsolate()
Mutex * subtype_test_cache_mutex()
void set_deopt_context(DeoptContext *value)
static bool IsSystemIsolate(const Isolate *isolate)
IsolateObjectStore * isolate_object_store() const
bool has_attempted_stepping() const
IsolateGroup * group() const
DeoptContext * deopt_context() const
Thread * mutator_thread() const
void set_ptr(ObjectPtr ptr)
LocalHandle * AllocateHandle()
static MegamorphicCachePtr Lookup(Thread *thread, const String &name, const Array &descriptor)
static std::unique_ptr< Message > New(Args &&... args)
static MonomorphicSmiableCallPtr New(classid_t expected_cid, const Code &target)
void SetReturn(const Object &value) const
void SetArgAt(int index, const Object &value) const
static uword GetCurrentStackPointer()
static void static void PrintErr(const char *format,...) PRINTF_ATTRIBUTE(1
UntaggedObject * untag() const
intptr_t GetClassId() const
virtual const char * ToCString() const
static ObjectPtr RawCast(ObjectPtr obj)
static Object & ZoneHandle()
PatchableCallHandler(Thread *thread, const GrowableArray< const Instance * > &caller_arguments, MissHandler miss_handler, NativeArguments arguments, StackFrame *caller_frame, const Code &caller_code, const Function &caller_function)
void ResolveSwitchAndReturn(const Object &data)
uword FindPendingDeopt(uword fp)
void ClearPendingDeoptsAtOrBelow(uword fp, ClearReason reason)
void AddPendingDeopt(uword fp, uword pc)
void set_ptr(ObjectPtr ref)
static bool PostMessage(std::unique_ptr< Message > message, bool before_events=false)
intptr_t num_fields() const
static RecordPtr New(RecordShape shape, Heap::Space space=Heap::kNew)
void SetFieldAt(intptr_t field_index, const Object &value) const
static FunctionPtr ResolveDynamicAnyArgs(Zone *zone, const Class &receiver_class, const String &function_name, bool allow_add)
static FunctionPtr ResolveDynamicFunction(Zone *zone, const Class &receiver_class, const String &function_name)
static FunctionPtr ResolveDynamicForReceiverClass(const Class &receiver_class, const String &function_name, const ArgumentsDescriptor &args_desc, bool allow_add)
intptr_t AbandonRemainingTLAB(Thread *thread)
static Simulator * Current()
static SingleTargetCachePtr New()
static SmiPtr New(intptr_t value)
@ kAllowCrossThreadIteration
@ kNoCrossThreadIteration
virtual bool IsExitFrame() const
CodePtr LookupDartCode() const
virtual bool IsStubFrame() const
TokenPosition GetTokenPos() const
virtual bool IsDartFrame(bool validate=true) const
FunctionPtr LookupDartFunction() const
static StaticTypeExactnessState NotExact()
static StaticTypeExactnessState NotTracking()
static StringPtr NewFormatted(const char *format,...) PRINTF_ATTRIBUTE(1
static StringPtr New(const char *cstr, Heap::Space space=Heap::kNew)
static const char * ToCString(Thread *thread, StringPtr ptr)
static CodePtr GetAllocationStubForClass(const Class &cls)
void WriteEntryToBuffer(Zone *zone, BaseTextBuffer *buffer, intptr_t index, const char *line_prefix=nullptr) const
static SubtypeTestCachePtr New(intptr_t num_inputs)
static constexpr intptr_t kMaxInputs
static constexpr intptr_t MaxEntriesForCacheAllocatedFor(intptr_t count)
static intptr_t UsedInputsForType(const AbstractType &type)
intptr_t NumberOfChecks() const
intptr_t AddCheck(const Object &instance_class_id_or_signature, const AbstractType &destination_type, const TypeArguments &instance_type_arguments, const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments, const TypeArguments &instance_parent_function_type_arguments, const TypeArguments &instance_delayed_type_arguments, const Bool &test_result) const
bool HasCheck(const Object &instance_class_id_or_signature, const AbstractType &destination_type, const TypeArguments &instance_type_arguments, const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments, const TypeArguments &instance_parent_function_type_arguments, const TypeArguments &instance_delayed_type_arguments, intptr_t *index, Bool *result) const
static SuspendStatePtr Clone(Thread *thread, const SuspendState &src, Heap::Space space=Heap::kNew)
static constexpr intptr_t kSuspendStateVarIndex
static SuspendStatePtr New(intptr_t frame_size, const Instance &function_data, Heap::Space space=Heap::kNew)
void set_execution_state(ExecutionState state)
ApiLocalScope * api_top_scope() const
void DeferredMarkingStackAddObject(ObjectPtr obj)
int32_t no_callback_scope_depth() const
static Thread * Current()
PendingDeopts & pending_deopts()
void set_unboxed_int64_runtime_arg(int64_t value)
static bool IsAtSafepoint(SafepointLevel level, uword state)
void SetUnwindErrorInProgress(bool value)
double unboxed_double_runtime_arg() const
int64_t unboxed_int64_runtime_arg() const
bool is_unwind_in_progress() const
uword top_exit_frame_info() const
int64_t unboxed_int64_runtime_second_arg() const
bool IsDartMutatorThread() const
ExecutionState execution_state() const
Isolate * isolate() const
int32_t IncrementAndGetStackOverflowCount()
uint32_t IncrementAndGetRuntimeCallCount()
IsolateGroup * isolate_group() const
static void EnterIsolate(Isolate *isolate)
static void ExitIsolate(bool isolate_shutdown=false)
void set_unboxed_int64_runtime_second_arg(int64_t value)
bool IsInstantiated(Genericity genericity=kAny, intptr_t num_free_fun_type_params=kAllFree) const
TypeArgumentsPtr InstantiateAndCanonicalizeFrom(const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments) const
bool IsUninstantiatedIdentity() const
intptr_t GetSubtypeTestCachePoolIndex()
static CodePtr SpecializeStubFor(Thread *thread, const AbstractType &type)
static TypePtr BoolType()
static intptr_t MaxElements(intptr_t class_id)
static TypedDataPtr New(intptr_t class_id, intptr_t len, Heap::Space space=Heap::kNew)
static uword ToAddr(const UntaggedObject *raw_obj)
static constexpr T Maximum(T x, T y)
#define THR_Print(format,...)
struct _Dart_Handle * Dart_Handle
G_BEGIN_DECLS G_MODULE_EXPORT FlValue * args
const uint8_t uint32_t uint32_t GError ** error
Dart_NativeFunction function
static float max(float r, float g, float b)
#define MSAN_UNPOISON(ptr, len)
bool WillAllocateNewOrRememberedContext(intptr_t num_context_variables)
bool WillAllocateNewOrRememberedArray(intptr_t length)
constexpr int64_t kMaxInt64
static AbstractTypePtr InstantiateType(const AbstractType &type, const AbstractType &instantiator)
static void InlineCacheMissHandler(Thread *thread, Zone *zone, const GrowableArray< const Instance * > &args, const ICData &ic_data, NativeArguments native_arguments)
FunctionPtr InlineCacheMissHelper(const Class &receiver_class, const Array &args_descriptor, const String &target_name)
void DLRT_PropagateError(Dart_Handle handle)
IntegerPtr DoubleToInteger(Zone *zone, double val)
void DeoptimizeFunctionsOnStack()
static void CopySavedRegisters(uword saved_registers_address, fpu_register_t **fpu_registers, intptr_t **cpu_registers)
static bool IsSuspendedFrame(Zone *zone, const Function &function, StackFrame *frame)
Thread * DLRT_GetFfiCallbackMetadata(FfiCallbackMetadata::Trampoline trampoline, uword *out_entry_point, uword *out_trampoline_type)
static void DeoptimizeLastDartFrameIfOptimized()
static void PrintSubtypeCheck(const AbstractType &subtype, const AbstractType &supertype, const bool result)
static void HandleOSRRequest(Thread *thread)
double DartModulo(double left, double right)
static DART_FORCE_INLINE uword ParamAddress(uword fp, intptr_t reverse_index)
static void NullErrorHelper(Zone *zone, const String &selector, bool is_param_name=false)
static void UpdateTypeTestCache(Zone *zone, Thread *thread, const Instance &instance, const AbstractType &destination_type, const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments, const Bool &result, const SubtypeTestCache &new_cache)
DART_EXPORT void Dart_PropagateError(Dart_Handle handle)
@ kFixCallersTargetMonomorphic
static TokenPosition GetCallerLocation()
void *(* MemMoveCFunction)(void *dest, const void *src, size_t n)
void DFLRT_ExitSafepointIgnoreUnwindInProgress(NativeArguments __unusable_)
static void PrintTypeCheck(const char *message, const Instance &instance, const AbstractType &type, const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments, const Bool &result)
void DLRT_ExitHandleScope(Thread *thread)
LocalHandle * DLRT_AllocateHandle(ApiLocalScope *scope)
void __tsan_acquire(void *addr)
void DLRT_ExitTemporaryIsolate()
double(* BinaryMathCFunction)(double x, double y)
void __msan_unpoison(const volatile void *, size_t)
DART_EXPORT bool Dart_IsError(Dart_Handle handle)
ApiLocalScope * DLRT_EnterHandleScope(Thread *thread)
const int kNumberOfFpuRegisters
static constexpr intptr_t kNumberOfSavedFpuRegisters
void DeoptimizeAt(Thread *mutator_thread, const Code &optimized_code, StackFrame *frame)
DEFINE_RUNTIME_ENTRY_NO_LAZY_DEOPT(AllocateDouble, 0)
DEFINE_RAW_LEAF_RUNTIME_ENTRY(CaseInsensitiveCompareUCS2, 4, false, CaseInsensitiveCompareUCS2)
static bool ResolveCallThroughGetter(const Class &receiver_class, const String &target_name, const String &demangled, const Array &arguments_descriptor, Function *result)
static FunctionPtr ComputeTypeCheckTarget(const Instance &receiver, const AbstractType &type, const ArgumentsDescriptor &desc)
static void RuntimeAllocationEpilogue(Thread *thread)
static constexpr intptr_t kDefaultMaxSubtypeCacheEntries
static FunctionPtr Resolve(Thread *thread, Zone *zone, const GrowableArray< const Instance * > &caller_arguments, const Class &receiver_class, const String &name, const Array &descriptor)
static void HandleStackOverflowTestCases(Thread *thread)
DEFINE_FLAG(bool, print_cluster_information, false, "Print information about clusters written to snapshot")
static Heap::Space SpaceForRuntimeAllocation()
double(* UnaryMathCFunction)(double x)
static constexpr intptr_t kNumberOfSavedCpuRegisters
@ kTypeCheckFromLazySpecializeStub
FrameLayout runtime_frame_layout
static void ThrowIfError(const Object &result)
static uint32_t Hash(uint32_t key)
static DART_FORCE_INLINE uword LocalVarAddress(uword fp, intptr_t index)
static InstancePtr AllocateObject(Thread *thread, const Class &cls)
constexpr intptr_t kWordSize
void DFLRT_ExitSafepoint(NativeArguments __unusable_)
static void TrySwitchInstanceCall(Thread *thread, StackFrame *caller_frame, const Code &caller_code, const Function &caller_function, const ICData &ic_data, const Function &target_function)
DEFINE_RUNTIME_ENTRY(CompileFunction, 1)
void __msan_unpoison_param(size_t)
void DFLRT_EnterSafepoint(NativeArguments __unusable_)
static DART_FORCE_INLINE bool IsCalleeFrameOf(uword fp, uword other_fp)
const char *const function_name
static int8_t data[kExtLength]
static void DoThrowNullError(Isolate *isolate, Thread *thread, Zone *zone, bool is_param)
const char * DeoptReasonToCString(ICData::DeoptReasonId deopt_reason)
void __tsan_release(void *addr)
DEFINE_LEAF_RUNTIME_ENTRY(void, StoreBufferBlockProcess, 1, Thread *thread)
void ReportImpossibleNullError(intptr_t cid, StackFrame *caller_frame, Thread *thread)
void OnEveryRuntimeEntryCall(Thread *thread, const char *runtime_call_name, bool can_lazy_deopt)
const int kFpuRegisterSize
DECLARE_FLAG(bool, show_invisible_frames)
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not defaults to or::depending on whether ipv6 is specified vm service A custom Dart VM Service port The default is to pick a randomly available open port disable vm Disable the Dart VM Service The Dart VM Service is never available in release mode disable vm service Disable mDNS Dart VM Service publication Bind to the IPv6 localhost address for the Dart VM Service Ignored if vm service host is set endless trace Enable an endless trace buffer The default is a ring buffer This is useful when very old events need to viewed For during application launch Memory usage will continue to grow indefinitely however Start app with an specific route defined on the framework flutter assets Path to the Flutter assets directory enable service port Allow the VM service to fallback to automatic port selection if binding to a specified port fails trace Trace early application lifecycle Automatically switches to an endless trace buffer trace skia Filters out all Skia trace event categories except those that are specified in this comma separated list dump skp on shader Automatically dump the skp that triggers new shader compilations This is useful for writing custom ShaderWarmUp to reduce jank By this is not enabled to reduce the overhead purge persistent cache
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not defaults to or::depending on whether ipv6 is specified vm service A custom Dart VM Service port The default is to pick a randomly available open port disable vm Disable the Dart VM Service The Dart VM Service is never available in release mode disable vm service Disable mDNS Dart VM Service publication Bind to the IPv6 localhost address for the Dart VM Service Ignored if vm service host is set endless trace buffer
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive mode
std::function< void()> closure
SIN Vec< N, float > trunc(const Vec< N, float > &x)
SI auto map(std::index_sequence< I... >, Fn &&fn, const Args &... args) -> skvx::Vec< sizeof...(I), decltype(fn(args[0]...))>
SIN Vec< N, float > floor(const Vec< N, float > &x)
SIN Vec< N, float > ceil(const Vec< N, float > &x)
#define CHECK_STACK_ALIGNMENT
#define DEOPT_REASON_TO_TEXT(name)
#define TRACE_RUNTIME_CALL(format, name)
#define END_LEAF_RUNTIME_ENTRY
intptr_t first_local_from_fp
intptr_t FrameSlotForVariableIndex(intptr_t index) const