Flutter Engine
The Flutter Engine
runtime_entry.cc
Go to the documentation of this file.
1// Copyright (c) 2011, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#include "vm/runtime_entry.h"
6
7#include <memory>
8
11#include "vm/code_descriptors.h"
12#include "vm/code_patcher.h"
16#include "vm/dart_api_impl.h"
17#include "vm/dart_api_state.h"
18#include "vm/dart_entry.h"
19#include "vm/debugger.h"
21#include "vm/exceptions.h"
23#include "vm/flags.h"
24#include "vm/heap/verifier.h"
25#include "vm/instructions.h"
26#include "vm/kernel_isolate.h"
27#include "vm/message.h"
28#include "vm/message_handler.h"
29#include "vm/object_store.h"
30#include "vm/parser.h"
31#include "vm/resolver.h"
32#include "vm/service_isolate.h"
33#include "vm/stack_frame.h"
34#include "vm/symbols.h"
35#include "vm/thread.h"
37#include "vm/zone_text_buffer.h"
38
39#if !defined(DART_PRECOMPILED_RUNTIME)
41#endif // !defined(DART_PRECOMPILED_RUNTIME)
42
43namespace dart {
44
45static constexpr intptr_t kDefaultMaxSubtypeCacheEntries =
48 int,
49 max_subtype_cache_entries,
51 "Maximum number of subtype cache entries (number of checks cached).");
53 int,
54 regexp_optimization_counter_threshold,
55 1000,
56 "RegExp's usage-counter value before it is optimized, -1 means never");
58 reoptimization_counter_threshold,
59 4000,
60 "Counter threshold before a function gets reoptimized.");
62 runtime_allocate_old,
63 false,
64 "Use old-space for allocation via runtime calls.");
66 runtime_allocate_spill_tlab,
67 false,
68 "Ensure results of allocation via runtime calls are not in an "
69 "active TLAB.");
70DEFINE_FLAG(bool, trace_deoptimization, false, "Trace deoptimization");
72 trace_deoptimization_verbose,
73 false,
74 "Trace deoptimization verbose");
75
76DECLARE_FLAG(int, max_deoptimization_counter_threshold);
77DECLARE_FLAG(bool, trace_compiler);
78DECLARE_FLAG(bool, trace_optimizing_compiler);
79DECLARE_FLAG(int, max_polymorphic_checks);
80
81DEFINE_FLAG(bool, trace_osr, false, "Trace attempts at on-stack replacement.");
82
83DEFINE_FLAG(int, gc_every, 0, "Run major GC on every N stack overflow checks");
85 stacktrace_every,
86 0,
87 "Compute debugger stacktrace on every N stack overflow checks");
89 stacktrace_filter,
90 nullptr,
91 "Compute stacktrace in named function on stack overflow checks");
93 deoptimize_filter,
94 nullptr,
95 "Deoptimize in named function on stack overflow checks");
97 deoptimize_on_runtime_call_name_filter,
98 nullptr,
99 "Runtime call name filter for --deoptimize-on-runtime-call-every.");
100
102 unopt_monomorphic_calls,
103 true,
104 "Enable specializing monomorphic calls from unoptimized code.");
106 unopt_megamorphic_calls,
107 true,
108 "Enable specializing megamorphic calls from unoptimized code.");
110 verbose_stack_overflow,
111 false,
112 "Print additional details about stack overflow.");
113
114DECLARE_FLAG(int, reload_every);
115DECLARE_FLAG(bool, reload_every_optimized);
116DECLARE_FLAG(bool, reload_every_back_off);
117
118DEFINE_RUNTIME_ENTRY(RangeError, 2) {
119 const Instance& length = Instance::CheckedHandle(zone, arguments.ArgAt(0));
120 const Instance& index = Instance::CheckedHandle(zone, arguments.ArgAt(1));
121 if (!length.IsInteger()) {
122 // Throw: new ArgumentError.value(length, "length", "is not an integer");
123 const Array& args = Array::Handle(zone, Array::New(3));
124 args.SetAt(0, length);
125 args.SetAt(1, Symbols::Length());
126 args.SetAt(2, String::Handle(zone, String::New("is not an integer")));
128 }
129 if (!index.IsInteger()) {
130 // Throw: new ArgumentError.value(index, "index", "is not an integer");
131 const Array& args = Array::Handle(zone, Array::New(3));
132 args.SetAt(0, index);
133 args.SetAt(1, Symbols::Index());
134 args.SetAt(2, String::Handle(zone, String::New("is not an integer")));
136 }
137 // Throw: new RangeError.range(index, 0, length - 1, "length");
138 const Array& args = Array::Handle(zone, Array::New(4));
139 args.SetAt(0, index);
140 args.SetAt(1, Integer::Handle(zone, Integer::New(0)));
141 args.SetAt(
143 zone, Integer::Cast(length).ArithmeticOp(
144 Token::kSUB, Integer::Handle(zone, Integer::New(1)))));
145 args.SetAt(3, Symbols::Length());
147}
148
149DEFINE_RUNTIME_ENTRY(RangeErrorUnboxedInt64, 0) {
150 int64_t unboxed_length = thread->unboxed_int64_runtime_arg();
151 int64_t unboxed_index = thread->unboxed_int64_runtime_second_arg();
152 const auto& length = Integer::Handle(zone, Integer::New(unboxed_length));
153 const auto& index = Integer::Handle(zone, Integer::New(unboxed_index));
154 // Throw: new RangeError.range(index, 0, length - 1, "length");
155 const Array& args = Array::Handle(zone, Array::New(4));
156 args.SetAt(0, index);
157 args.SetAt(1, Integer::Handle(zone, Integer::New(0)));
158 args.SetAt(
160 zone, Integer::Cast(length).ArithmeticOp(
161 Token::kSUB, Integer::Handle(zone, Integer::New(1)))));
162 args.SetAt(3, Symbols::Length());
164}
165
166DEFINE_RUNTIME_ENTRY(WriteError, 2) {
167 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(0));
168 const Smi& kind = Smi::CheckedHandle(zone, arguments.ArgAt(1));
169 auto& message = String::Handle(zone);
170 switch (kind.Value()) {
171 case 0: // CheckWritableInstr::Kind::kWriteUnmodifiableTypedData:
172 message = String::NewFormatted("Cannot modify an unmodifiable list: %s",
173 receiver.ToCString());
174 break;
175 case 1: // CheckWritableInstr::Kind::kDeeplyImmutableAttachNativeFinalizer:
177 "Cannot attach NativeFinalizer to deeply immutable object: %s",
178 receiver.ToCString());
179 break;
180 }
181 const Array& args = Array::Handle(Array::New(1));
182 args.SetAt(0, message);
184}
185
186static void NullErrorHelper(Zone* zone,
187 const String& selector,
188 bool is_param_name = false) {
189 if (is_param_name) {
190 const String& error = String::Handle(
191 selector.IsNull()
192 ? String::New("argument value is null")
193 : String::NewFormatted("argument value for '%s' is null",
194 selector.ToCString()));
196 return;
197 }
198
199 // If the selector is null, this must be a null check that wasn't due to a
200 // method invocation, so was due to the null check operator.
201 if (selector.IsNull()) {
202 const Array& args = Array::Handle(zone, Array::New(4));
203 args.SetAt(
205 zone, String::New("Null check operator used on a null value")));
207 return;
208 }
209
211 if (Field::IsGetterName(selector)) {
213 } else if (Field::IsSetterName(selector)) {
215 }
216
217 const Smi& invocation_type = Smi::Handle(
218 zone,
220
221 const Array& args = Array::Handle(zone, Array::New(7));
222 args.SetAt(0, /* instance */ Object::null_object());
223 args.SetAt(1, selector);
224 args.SetAt(2, invocation_type);
225 args.SetAt(3, /* func_type_args_length */ Object::smi_zero());
226 args.SetAt(4, /* func_type_args */ Object::null_object());
227 args.SetAt(5, /* func_args */ Object::null_object());
228 args.SetAt(6, /* func_arg_names */ Object::null_object());
230}
231
232static void DoThrowNullError(Isolate* isolate,
233 Thread* thread,
234 Zone* zone,
235 bool is_param) {
236 DartFrameIterator iterator(thread,
238 const StackFrame* caller_frame = iterator.NextFrame();
239 ASSERT(caller_frame->IsDartFrame());
240 const Code& code = Code::Handle(zone, caller_frame->LookupDartCode());
241 const uword pc_offset = caller_frame->pc() - code.PayloadStart();
242
243 if (FLAG_shared_slow_path_triggers_gc) {
245 }
246
247 const CodeSourceMap& map =
248 CodeSourceMap::Handle(zone, code.code_source_map());
249 String& member_name = String::Handle(zone);
250 if (!map.IsNull()) {
251 CodeSourceMapReader reader(map, Array::null_array(),
252 Function::null_function());
253 const intptr_t name_index = reader.GetNullCheckNameIndexAt(pc_offset);
254 RELEASE_ASSERT(name_index >= 0);
255
256 const ObjectPool& pool = ObjectPool::Handle(zone, code.GetObjectPool());
257 member_name ^= pool.ObjectAt(name_index);
258 } else {
259 member_name = Symbols::OptimizedOut().ptr();
260 }
261
262 NullErrorHelper(zone, member_name, is_param);
263}
264
265DEFINE_RUNTIME_ENTRY(NullError, 0) {
266 DoThrowNullError(isolate, thread, zone, /*is_param=*/false);
267}
268
269// Collects information about pointers within the top |kMaxSlotsCollected|
270// slots on the stack.
271// TODO(b/179632636) This code is added in attempt to better understand
272// b/179632636 and should be removed in the future.
274 StackFrame* caller_frame,
275 Thread* thread) {
276 TextBuffer buffer(512);
277 buffer.Printf("hit null error with cid %" Pd ", caller context: ", cid);
278
279 const intptr_t kMaxSlotsCollected = 5;
280 const auto slots = reinterpret_cast<ObjectPtr*>(caller_frame->sp());
281 const intptr_t num_slots_in_frame =
282 reinterpret_cast<ObjectPtr*>(caller_frame->fp()) - slots;
283 const auto num_slots_to_collect =
284 Utils::Maximum(kMaxSlotsCollected, num_slots_in_frame);
285 bool comma = false;
286 for (intptr_t i = 0; i < num_slots_to_collect; i++) {
287 const ObjectPtr ptr = slots[i];
288 buffer.Printf("%s[sp+%" Pd "] %" Pp "", comma ? ", " : "", i,
289 static_cast<uword>(ptr));
290 if (ptr->IsHeapObject() &&
293 thread->heap()->Contains(UntaggedObject::ToAddr(ptr)))) {
294 buffer.Printf("(%" Pp ")", static_cast<uword>(ptr->untag()->tags_));
295 }
296 comma = true;
297 }
298
299 const char* message = buffer.buffer();
300 FATAL("%s", message);
301}
302
303DEFINE_RUNTIME_ENTRY(DispatchTableNullError, 1) {
304 const Smi& cid = Smi::CheckedHandle(zone, arguments.ArgAt(0));
305 if (cid.Value() != kNullCid) {
306 // We hit null error, but receiver is not null itself. This most likely
307 // is a memory corruption. Crash the VM but provide some additional
308 // information about the arguments on the stack.
309 DartFrameIterator iterator(thread,
311 StackFrame* caller_frame = iterator.NextFrame();
312 RELEASE_ASSERT(caller_frame->IsDartFrame());
313 ReportImpossibleNullError(cid.Value(), caller_frame, thread);
314 }
315 DoThrowNullError(isolate, thread, zone, /*is_param=*/false);
316}
317
318DEFINE_RUNTIME_ENTRY(NullErrorWithSelector, 1) {
319 const String& selector = String::CheckedHandle(zone, arguments.ArgAt(0));
320 NullErrorHelper(zone, selector);
321}
322
323DEFINE_RUNTIME_ENTRY(NullCastError, 0) {
324 NullErrorHelper(zone, String::null_string());
325}
326
327DEFINE_RUNTIME_ENTRY(ArgumentNullError, 0) {
328 DoThrowNullError(isolate, thread, zone, /*is_param=*/true);
329}
330
331DEFINE_RUNTIME_ENTRY(ArgumentError, 1) {
332 const Instance& value = Instance::CheckedHandle(zone, arguments.ArgAt(0));
334}
335
336DEFINE_RUNTIME_ENTRY(ArgumentErrorUnboxedInt64, 0) {
337 // Unboxed value is passed through a dedicated slot in Thread.
338 int64_t unboxed_value = arguments.thread()->unboxed_int64_runtime_arg();
339 const Integer& value = Integer::Handle(zone, Integer::New(unboxed_value));
341}
342
344 // Unboxed value is passed through a dedicated slot in Thread.
345 double val = arguments.thread()->unboxed_double_runtime_arg();
346 const Smi& recognized_kind = Smi::CheckedHandle(zone, arguments.ArgAt(0));
347 switch (recognized_kind.Value()) {
348 case MethodRecognizer::kDoubleToInteger:
349 break;
350 case MethodRecognizer::kDoubleFloorToInt:
351 val = floor(val);
352 break;
353 case MethodRecognizer::kDoubleCeilToInt:
354 val = ceil(val);
355 break;
356 default:
357 UNREACHABLE();
358 }
359 arguments.SetReturn(Integer::Handle(zone, DoubleToInteger(zone, val)));
360}
361
362DEFINE_RUNTIME_ENTRY(IntegerDivisionByZeroException, 0) {
363 const Array& args = Array::Handle(zone, Array::New(0));
365}
366
368 return UNLIKELY(FLAG_runtime_allocate_old) ? Heap::kOld : Heap::kNew;
369}
370
371static void RuntimeAllocationEpilogue(Thread* thread) {
372 if (UNLIKELY(FLAG_runtime_allocate_spill_tlab)) {
373 static RelaxedAtomic<uword> count = 0;
374 if ((count++ % 10) == 0) {
375 thread->heap()->new_space()->AbandonRemainingTLAB(thread);
376 }
377 }
378}
379
380// Allocation of a fixed length array of given element type.
381// This runtime entry is never called for allocating a List of a generic type,
382// because a prior run time call instantiates the element type if necessary.
383// Arg0: array length.
384// Arg1: array type arguments, i.e. vector of 1 type, the element type.
385// Return value: newly allocated array of length arg0.
386DEFINE_RUNTIME_ENTRY(AllocateArray, 2) {
387 const Instance& length = Instance::CheckedHandle(zone, arguments.ArgAt(0));
388 if (!length.IsInteger()) {
389 // Throw: new ArgumentError.value(length, "length", "is not an integer");
390 const Array& args = Array::Handle(zone, Array::New(3));
391 args.SetAt(0, length);
392 args.SetAt(1, Symbols::Length());
393 args.SetAt(2, String::Handle(zone, String::New("is not an integer")));
395 }
396 const int64_t len = Integer::Cast(length).AsInt64Value();
397 if (len < 0) {
398 // Throw: new RangeError.range(length, 0, Array::kMaxElements, "length");
399 Exceptions::ThrowRangeError("length", Integer::Cast(length), 0,
401 }
402 if (len > Array::kMaxElements) {
404 }
405
406 const Array& array = Array::Handle(
407 zone,
408 Array::New(static_cast<intptr_t>(len), SpaceForRuntimeAllocation()));
409 TypeArguments& element_type =
410 TypeArguments::CheckedHandle(zone, arguments.ArgAt(1));
411 // An Array is raw or takes one type argument. However, its type argument
412 // vector may be longer than 1 due to a type optimization reusing the type
413 // argument vector of the instantiator.
414 ASSERT(element_type.IsNull() ||
415 (element_type.Length() >= 1 && element_type.IsInstantiated()));
416 array.SetTypeArguments(element_type); // May be null.
417 arguments.SetReturn(array);
419}
420
422 if (FLAG_shared_slow_path_triggers_gc) {
423 isolate->group()->heap()->CollectAllGarbage(GCReason::kDebugging);
424 }
425 arguments.SetReturn(
428}
429
431 const double val = thread->unboxed_double_runtime_arg();
432 arguments.SetReturn(
435}
436
438 const auto val = thread->unboxed_simd128_runtime_arg();
439 arguments.SetReturn(
442}
443
445 const auto val = thread->unboxed_simd128_runtime_arg();
446 arguments.SetReturn(
449}
450
452 if (FLAG_shared_slow_path_triggers_gc) {
453 isolate->group()->heap()->CollectAllGarbage(GCReason::kDebugging);
454 }
455 arguments.SetReturn(Object::Handle(
458}
459
461 if (FLAG_shared_slow_path_triggers_gc) {
462 isolate->group()->heap()->CollectAllGarbage(GCReason::kDebugging);
463 }
464 arguments.SetReturn(Object::Handle(
465 zone, Float32x4::New(0.0, 0.0, 0.0, 0.0, SpaceForRuntimeAllocation())));
467}
468
470 if (FLAG_shared_slow_path_triggers_gc) {
471 isolate->group()->heap()->CollectAllGarbage(GCReason::kDebugging);
472 }
473 arguments.SetReturn(Object::Handle(
474 zone, Float64x2::New(0.0, 0.0, SpaceForRuntimeAllocation())));
476}
477
479 if (FLAG_shared_slow_path_triggers_gc) {
480 isolate->group()->heap()->CollectAllGarbage(GCReason::kDebugging);
481 }
482 arguments.SetReturn(Object::Handle(
483 zone, Int32x4::New(0, 0, 0, 0, SpaceForRuntimeAllocation())));
485}
486
487// Allocate typed data array of given class id and length.
488// Arg0: class id.
489// Arg1: number of elements.
490// Return value: newly allocated typed data array.
491DEFINE_RUNTIME_ENTRY(AllocateTypedData, 2) {
492 const intptr_t cid = Smi::CheckedHandle(zone, arguments.ArgAt(0)).Value();
493 const auto& length = Instance::CheckedHandle(zone, arguments.ArgAt(1));
494 if (!length.IsInteger()) {
495 const Array& args = Array::Handle(zone, Array::New(1));
496 args.SetAt(0, length);
498 }
499 const int64_t len = Integer::Cast(length).AsInt64Value();
500 const intptr_t max = TypedData::MaxElements(cid);
501 if (len < 0) {
502 Exceptions::ThrowRangeError("length", Integer::Cast(length), 0, max);
503 } else if (len > max) {
505 }
506 const auto& typed_data =
507 TypedData::Handle(zone, TypedData::New(cid, static_cast<intptr_t>(len),
509 arguments.SetReturn(typed_data);
511}
512
513// Helper returning the token position of the Dart caller.
517 StackFrame* caller_frame = iterator.NextFrame();
518 ASSERT(caller_frame != nullptr);
519 return caller_frame->GetTokenPos();
520}
521
522// Result of an invoke may be an unhandled exception, in which case we
523// rethrow it.
524static void ThrowIfError(const Object& result) {
525 if (!result.IsNull() && result.IsError()) {
527 }
528}
529
530// Allocate a new object.
531// Arg0: class of the object that needs to be allocated.
532// Arg1: type arguments of the object that needs to be allocated.
533// Return value: newly allocated object.
535 const Class& cls = Class::CheckedHandle(zone, arguments.ArgAt(0));
539 if (cls.NumTypeArguments() == 0) {
540 // No type arguments required for a non-parameterized type.
541 ASSERT(Instance::CheckedHandle(zone, arguments.ArgAt(1)).IsNull());
542 } else {
543 const auto& type_arguments =
544 TypeArguments::CheckedHandle(zone, arguments.ArgAt(1));
545 // Unless null (for a raw type), the type argument vector may be longer than
546 // necessary due to a type optimization reusing the type argument vector of
547 // the instantiator.
548 ASSERT(type_arguments.IsNull() ||
549 (type_arguments.IsInstantiated() &&
550 (type_arguments.Length() >= cls.NumTypeArguments())));
551 instance.SetTypeArguments(type_arguments);
552 }
553 arguments.SetReturn(instance);
555}
556
558 EnsureRememberedAndMarkingDeferred,
559 2,
560 uword /*ObjectPtr*/ object_in,
561 Thread* thread) {
562 ObjectPtr object = static_cast<ObjectPtr>(object_in);
563
564 // If we eliminate a generational write barriers on allocations of an object
565 // we need to ensure it's either a new-space object or it has been added to
566 // the remembered set.
567 //
568 // NOTE: We use static_cast<>() instead of ::RawCast() to avoid handle
569 // allocations in debug mode. Handle allocations in leaf runtimes can cause
570 // memory leaks because they will allocate into a handle scope from the next
571 // outermost runtime code (to which the generated Dart code might not return
572 // in a long time).
573 bool add_to_remembered_set = true;
574 if (object->IsNewObject()) {
575 add_to_remembered_set = false;
576 } else if (object->IsArray()) {
577 const intptr_t length = Array::LengthOf(static_cast<ArrayPtr>(object));
578 add_to_remembered_set =
580 } else if (object->IsContext()) {
581 const intptr_t num_context_variables =
582 Context::NumVariables(static_cast<ContextPtr>(object));
583 add_to_remembered_set =
585 num_context_variables);
586 }
587
588 if (add_to_remembered_set) {
589 object->untag()->EnsureInRememberedSet(thread);
590 }
591
592 // For incremental write barrier elimination, we need to ensure that the
593 // allocation ends up in the new space or else the object needs to added
594 // to deferred marking stack so it will be [re]scanned.
595 if (thread->is_marking()) {
596 thread->DeferredMarkingStackAddObject(object);
597 }
598
599 return static_cast<uword>(object);
600}
602
603// Instantiate type.
604// Arg0: uninstantiated type.
605// Arg1: instantiator type arguments.
606// Arg2: function type arguments.
607// Return value: instantiated type.
609 AbstractType& type = AbstractType::CheckedHandle(zone, arguments.ArgAt(0));
610 const TypeArguments& instantiator_type_arguments =
611 TypeArguments::CheckedHandle(zone, arguments.ArgAt(1));
612 const TypeArguments& function_type_arguments =
613 TypeArguments::CheckedHandle(zone, arguments.ArgAt(2));
614 ASSERT(!type.IsNull());
615 ASSERT(instantiator_type_arguments.IsNull() ||
616 instantiator_type_arguments.IsInstantiated());
617 ASSERT(function_type_arguments.IsNull() ||
618 function_type_arguments.IsInstantiated());
619 type = type.InstantiateFrom(instantiator_type_arguments,
620 function_type_arguments, kAllFree, Heap::kOld);
621 ASSERT(!type.IsNull() && type.IsInstantiated());
622 arguments.SetReturn(type);
623}
624
625// Instantiate type arguments.
626// Arg0: uninstantiated type arguments.
627// Arg1: instantiator type arguments.
628// Arg2: function type arguments.
629// Return value: instantiated type arguments.
630DEFINE_RUNTIME_ENTRY(InstantiateTypeArguments, 3) {
631 TypeArguments& type_arguments =
632 TypeArguments::CheckedHandle(zone, arguments.ArgAt(0));
633 const TypeArguments& instantiator_type_arguments =
634 TypeArguments::CheckedHandle(zone, arguments.ArgAt(1));
635 const TypeArguments& function_type_arguments =
636 TypeArguments::CheckedHandle(zone, arguments.ArgAt(2));
637 ASSERT(!type_arguments.IsNull() && !type_arguments.IsInstantiated());
638 ASSERT(instantiator_type_arguments.IsNull() ||
639 instantiator_type_arguments.IsInstantiated());
640 ASSERT(function_type_arguments.IsNull() ||
641 function_type_arguments.IsInstantiated());
642 // Code inlined in the caller should have optimized the case where the
643 // instantiator can be reused as type argument vector.
644 ASSERT(!type_arguments.IsUninstantiatedIdentity());
645 type_arguments = type_arguments.InstantiateAndCanonicalizeFrom(
646 instantiator_type_arguments, function_type_arguments);
647 ASSERT(type_arguments.IsNull() || type_arguments.IsInstantiated());
648 arguments.SetReturn(type_arguments);
649}
650
651// Helper routine for tracing a subtype check.
652static void PrintSubtypeCheck(const AbstractType& subtype,
653 const AbstractType& supertype,
654 const bool result) {
657 StackFrame* caller_frame = iterator.NextFrame();
658 ASSERT(caller_frame != nullptr);
659
660 LogBlock lb;
661 THR_Print("SubtypeCheck: '%s' %d %s '%s' %d (pc: %#" Px ").\n",
662 subtype.NameCString(), subtype.type_class_id(),
663 result ? "is" : "is !", supertype.NameCString(),
664 supertype.type_class_id(), caller_frame->pc());
665
666 const Function& function =
667 Function::Handle(caller_frame->LookupDartFunction());
668 if (function.HasSavedArgumentsDescriptor()) {
669 const auto& args_desc_array = Array::Handle(function.saved_args_desc());
670 const ArgumentsDescriptor args_desc(args_desc_array);
671 THR_Print(" -> Function %s [%s]\n", function.ToFullyQualifiedCString(),
672 args_desc.ToCString());
673 } else {
674 THR_Print(" -> Function %s\n", function.ToFullyQualifiedCString());
675 }
676}
677
678// Instantiate type.
679// Arg0: instantiator type arguments
680// Arg1: function type arguments
681// Arg2: type to be a subtype of the other
682// Arg3: type to be a supertype of the other
683// Arg4: variable name of the subtype parameter
684// No return value.
685DEFINE_RUNTIME_ENTRY(SubtypeCheck, 5) {
686 const TypeArguments& instantiator_type_args =
687 TypeArguments::CheckedHandle(zone, arguments.ArgAt(0));
688 const TypeArguments& function_type_args =
689 TypeArguments::CheckedHandle(zone, arguments.ArgAt(1));
690 AbstractType& subtype = AbstractType::CheckedHandle(zone, arguments.ArgAt(2));
691 AbstractType& supertype =
692 AbstractType::CheckedHandle(zone, arguments.ArgAt(3));
693 const String& dst_name = String::CheckedHandle(zone, arguments.ArgAt(4));
694
695 ASSERT(!supertype.IsNull());
696 ASSERT(!subtype.IsNull());
697
698 // Now that AssertSubtype may be checking types only available at runtime,
699 // we can't guarantee the supertype isn't the top type.
700 if (supertype.IsTopTypeForSubtyping()) return;
701
702 // The supertype or subtype may not be instantiated.
704 &subtype, &supertype, instantiator_type_args, function_type_args)) {
705 if (FLAG_trace_type_checks) {
706 // The supertype and subtype are now instantiated. Subtype check passed.
707 PrintSubtypeCheck(subtype, supertype, true);
708 }
709 return;
710 }
711 if (FLAG_trace_type_checks) {
712 // The supertype and subtype are now instantiated. Subtype check failed.
713 PrintSubtypeCheck(subtype, supertype, false);
714 }
715
716 // Throw a dynamic type error.
717 const TokenPosition location = GetCallerLocation();
718 Exceptions::CreateAndThrowTypeError(location, subtype, supertype, dst_name);
719 UNREACHABLE();
720}
721
722// Allocate a new closure and initializes its function, context,
723// instantiator type arguments and delayed type arguments fields.
724// Arg0: function.
725// Arg1: context.
726// Arg2: instantiator type arguments.
727// Arg3: delayed type arguments.
728// Return value: newly allocated closure.
729DEFINE_RUNTIME_ENTRY(AllocateClosure, 4) {
730 const auto& function = Function::CheckedHandle(zone, arguments.ArgAt(0));
731 const auto& context = Object::Handle(zone, arguments.ArgAt(1));
732 const auto& instantiator_type_args =
733 TypeArguments::CheckedHandle(zone, arguments.ArgAt(2));
734 const auto& delayed_type_args =
735 TypeArguments::CheckedHandle(zone, arguments.ArgAt(3));
737 zone, Closure::New(instantiator_type_args, Object::null_type_arguments(),
738 delayed_type_args, function, context,
740 arguments.SetReturn(closure);
742}
743
744// Allocate a new context large enough to hold the given number of variables.
745// Arg0: number of variables.
746// Return value: newly allocated context.
747DEFINE_RUNTIME_ENTRY(AllocateContext, 1) {
748 const Smi& num_variables = Smi::CheckedHandle(zone, arguments.ArgAt(0));
749 const Context& context = Context::Handle(
750 zone, Context::New(num_variables.Value(), SpaceForRuntimeAllocation()));
751 arguments.SetReturn(context);
753}
754
755// Make a copy of the given context, including the values of the captured
756// variables.
757// Arg0: the context to be cloned.
758// Return value: newly allocated context.
759DEFINE_RUNTIME_ENTRY(CloneContext, 1) {
760 const Context& ctx = Context::CheckedHandle(zone, arguments.ArgAt(0));
761 Context& cloned_ctx = Context::Handle(
763 cloned_ctx.set_parent(Context::Handle(zone, ctx.parent()));
764 Object& inst = Object::Handle(zone);
765 for (int i = 0; i < ctx.num_variables(); i++) {
766 inst = ctx.At(i);
767 cloned_ctx.SetAt(i, inst);
768 }
769 arguments.SetReturn(cloned_ctx);
771}
772
773// Allocate a new record instance.
774// Arg0: record shape id.
775// Return value: newly allocated record.
776DEFINE_RUNTIME_ENTRY(AllocateRecord, 1) {
777 const RecordShape shape(Smi::RawCast(arguments.ArgAt(0)));
778 const Record& record =
780 arguments.SetReturn(record);
782}
783
784// Allocate a new small record instance and initialize its fields.
785// Arg0: record shape id.
786// Arg1-Arg3: field values.
787// Return value: newly allocated record.
788DEFINE_RUNTIME_ENTRY(AllocateSmallRecord, 4) {
789 const RecordShape shape(Smi::RawCast(arguments.ArgAt(0)));
790 const auto& value0 = Instance::CheckedHandle(zone, arguments.ArgAt(1));
791 const auto& value1 = Instance::CheckedHandle(zone, arguments.ArgAt(2));
792 const auto& value2 = Instance::CheckedHandle(zone, arguments.ArgAt(3));
793 const Record& record =
795 const intptr_t num_fields = shape.num_fields();
796 ASSERT(num_fields == 2 || num_fields == 3);
797 record.SetFieldAt(0, value0);
798 record.SetFieldAt(1, value1);
799 if (num_fields > 2) {
800 record.SetFieldAt(2, value2);
801 }
802 arguments.SetReturn(record);
804}
805
806// Allocate a SuspendState object.
807// Arg0: frame size.
808// Arg1: existing SuspendState object or function data.
809// Return value: newly allocated object.
810DEFINE_RUNTIME_ENTRY(AllocateSuspendState, 2) {
811 const intptr_t frame_size =
812 Smi::CheckedHandle(zone, arguments.ArgAt(0)).Value();
813 const Object& previous_state = Object::Handle(zone, arguments.ArgAt(1));
815 if (previous_state.IsSuspendState()) {
816 const auto& suspend_state = SuspendState::Cast(previous_state);
817 const auto& function_data =
818 Instance::Handle(zone, suspend_state.function_data());
819 ObjectStore* object_store = thread->isolate_group()->object_store();
820 if (function_data.GetClassId() ==
821 Class::Handle(zone, object_store->async_star_stream_controller())
822 .id()) {
823 // Reset _AsyncStarStreamController.asyncStarBody to null in order
824 // to create a new callback closure during next yield.
825 // The new callback closure will capture the reallocated SuspendState.
826 function_data.SetField(
828 zone,
829 object_store->async_star_stream_controller_async_star_body()),
830 Object::null_object());
831 }
832 result = SuspendState::New(frame_size, function_data,
834 if (function_data.GetClassId() ==
835 Class::Handle(zone, object_store->sync_star_iterator_class()).id()) {
836 // Refresh _SyncStarIterator._state with the new SuspendState object.
837 function_data.SetField(
838 Field::Handle(zone, object_store->sync_star_iterator_state()),
839 result);
840 }
841 } else {
842 result = SuspendState::New(frame_size, Instance::Cast(previous_state),
844 }
845 arguments.SetReturn(result);
847}
848
849// Makes a copy of the given SuspendState object, including the payload frame.
850// Arg0: the SuspendState object to be cloned.
851// Return value: newly allocated object.
852DEFINE_RUNTIME_ENTRY(CloneSuspendState, 1) {
853 const SuspendState& src =
854 SuspendState::CheckedHandle(zone, arguments.ArgAt(0));
857 arguments.SetReturn(dst);
859}
860
861// Helper routine for tracing a type check.
862static void PrintTypeCheck(const char* message,
863 const Instance& instance,
864 const AbstractType& type,
865 const TypeArguments& instantiator_type_arguments,
866 const TypeArguments& function_type_arguments,
867 const Bool& result) {
870 StackFrame* caller_frame = iterator.NextFrame();
871 ASSERT(caller_frame != nullptr);
872
873 const AbstractType& instance_type =
875 ASSERT(instance_type.IsInstantiated() ||
876 (instance.IsClosure() && instance_type.IsInstantiated(kCurrentClass)));
877 LogBlock lb;
878 if (type.IsInstantiated()) {
879 THR_Print("%s: '%s' %d %s '%s' %d (pc: %#" Px ").\n", message,
880 instance_type.NameCString(), instance_type.type_class_id(),
881 (result.ptr() == Bool::True().ptr()) ? "is" : "is !",
882 type.NameCString(), type.type_class_id(), caller_frame->pc());
883 } else {
884 // Instantiate type before printing.
885 const AbstractType& instantiated_type = AbstractType::Handle(
886 type.InstantiateFrom(instantiator_type_arguments,
887 function_type_arguments, kAllFree, Heap::kOld));
888 THR_Print("%s: '%s' %s '%s' instantiated from '%s' (pc: %#" Px ").\n",
889 message, instance_type.NameCString(),
890 (result.ptr() == Bool::True().ptr()) ? "is" : "is !",
891 instantiated_type.NameCString(), type.NameCString(),
892 caller_frame->pc());
893 }
894 const Function& function =
895 Function::Handle(caller_frame->LookupDartFunction());
896 if (function.HasSavedArgumentsDescriptor()) {
897 const auto& args_desc_array = Array::Handle(function.saved_args_desc());
898 const ArgumentsDescriptor args_desc(args_desc_array);
899 THR_Print(" -> Function %s [%s]\n", function.ToFullyQualifiedCString(),
900 args_desc.ToCString());
901 } else {
902 THR_Print(" -> Function %s\n", function.ToFullyQualifiedCString());
903 }
904}
905
906#if defined(TARGET_ARCH_IA32)
907static BoolPtr CheckHashBasedSubtypeTestCache(
908 Zone* zone,
909 Thread* thread,
910 const Instance& instance,
911 const AbstractType& destination_type,
912 const TypeArguments& instantiator_type_arguments,
913 const TypeArguments& function_type_arguments,
914 const SubtypeTestCache& cache) {
915 ASSERT(cache.IsHash());
916 // Record instances are not added to the cache as they don't have a valid
917 // key (type of a record depends on types of all its fields).
918 if (instance.IsRecord()) return Bool::null();
919 Class& instance_class = Class::Handle(zone);
920 if (instance.IsSmi()) {
921 instance_class = Smi::Class();
922 } else {
923 instance_class = instance.clazz();
924 }
925 // If the type is uninstantiated and refers to parent function type
926 // parameters, the function_type_arguments have been canonicalized
927 // when concatenated.
928 auto& instance_class_id_or_signature = Object::Handle(zone);
929 auto& instance_type_arguments = TypeArguments::Handle(zone);
930 auto& instance_parent_function_type_arguments = TypeArguments::Handle(zone);
931 auto& instance_delayed_type_arguments = TypeArguments::Handle(zone);
932 if (instance_class.IsClosureClass()) {
933 const auto& closure = Closure::Cast(instance);
934 const auto& function = Function::Handle(zone, closure.function());
935 instance_class_id_or_signature = function.signature();
936 instance_type_arguments = closure.instantiator_type_arguments();
937 instance_parent_function_type_arguments = closure.function_type_arguments();
938 instance_delayed_type_arguments = closure.delayed_type_arguments();
939 } else {
940 instance_class_id_or_signature = Smi::New(instance_class.id());
941 if (instance_class.NumTypeArguments() > 0) {
942 instance_type_arguments = instance.GetTypeArguments();
943 }
944 }
945
946 intptr_t index = -1;
947 auto& result = Bool::Handle(zone);
948 if (cache.HasCheck(instance_class_id_or_signature, destination_type,
949 instance_type_arguments, instantiator_type_arguments,
950 function_type_arguments,
951 instance_parent_function_type_arguments,
952 instance_delayed_type_arguments, &index, &result)) {
953 return result.ptr();
954 }
955
956 return Bool::null();
957}
958#endif // defined(TARGET_ARCH_IA32)
959
960// This updates the type test cache, an array containing 8 elements:
961// - instance class (or function if the instance is a closure)
962// - instance type arguments (null if the instance class is not generic)
963// - instantiator type arguments (null if the type is instantiated)
964// - function type arguments (null if the type is instantiated)
965// - instance parent function type arguments (null if instance is not a closure)
966// - instance delayed type arguments (null if instance is not a closure)
967// - destination type (null if the type was known at compile time)
968// - test result
969// It can be applied to classes with type arguments in which case it contains
970// just the result of the class subtype test, not including the evaluation of
971// type arguments.
972// This operation is currently very slow (lookup of code is not efficient yet).
974 Zone* zone,
975 Thread* thread,
976 const Instance& instance,
977 const AbstractType& destination_type,
978 const TypeArguments& instantiator_type_arguments,
979 const TypeArguments& function_type_arguments,
980 const Bool& result,
981 const SubtypeTestCache& new_cache) {
982 ASSERT(!new_cache.IsNull());
983 ASSERT(destination_type.IsCanonical());
984 ASSERT(instantiator_type_arguments.IsCanonical());
985 ASSERT(function_type_arguments.IsCanonical());
986 if (instance.IsRecord()) {
987 // Do not add record instances to cache as they don't have a valid
988 // key (type of a record depends on types of all its fields).
989 if (FLAG_trace_type_checks) {
990 THR_Print("Not updating subtype test cache for the record instance.\n");
991 }
992 return;
993 }
994 Class& instance_class = Class::Handle(zone);
995 if (instance.IsSmi()) {
996 instance_class = Smi::Class();
997 } else {
998 instance_class = instance.clazz();
999 }
1000 // If the type is uninstantiated and refers to parent function type
1001 // parameters, the function_type_arguments have been canonicalized
1002 // when concatenated.
1003 auto& instance_class_id_or_signature = Object::Handle(zone);
1004 auto& instance_type_arguments = TypeArguments::Handle(zone);
1005 auto& instance_parent_function_type_arguments = TypeArguments::Handle(zone);
1006 auto& instance_delayed_type_arguments = TypeArguments::Handle(zone);
1007 if (instance_class.IsClosureClass()) {
1008 const auto& closure = Closure::Cast(instance);
1009 const auto& function = Function::Handle(zone, closure.function());
1010 instance_class_id_or_signature = function.signature();
1011 ASSERT(instance_class_id_or_signature.IsFunctionType());
1012 instance_type_arguments = closure.instantiator_type_arguments();
1013 instance_parent_function_type_arguments = closure.function_type_arguments();
1014 instance_delayed_type_arguments = closure.delayed_type_arguments();
1015 ASSERT(instance_class_id_or_signature.IsCanonical());
1016 ASSERT(instance_type_arguments.IsCanonical());
1017 ASSERT(instance_parent_function_type_arguments.IsCanonical());
1018 ASSERT(instance_delayed_type_arguments.IsCanonical());
1019 } else {
1020 instance_class_id_or_signature = Smi::New(instance_class.id());
1021 if (instance_class.NumTypeArguments() > 0) {
1022 instance_type_arguments = instance.GetTypeArguments();
1023 ASSERT(instance_type_arguments.IsCanonical());
1024 }
1025 }
1026 if (FLAG_trace_type_checks) {
1027 const auto& instance_class_name =
1028 String::Handle(zone, instance_class.Name());
1029 TextBuffer buffer(256);
1030 buffer.Printf(" Updating test cache %#" Px " with result %s for:\n",
1031 static_cast<uword>(new_cache.ptr()), result.ToCString());
1032 if (instance.IsString()) {
1033 buffer.Printf(" instance: '%s'\n", instance.ToCString());
1034 } else {
1035 buffer.Printf(" instance: %s\n", instance.ToCString());
1036 }
1037 buffer.Printf(" class: %s (%" Pd ")\n", instance_class_name.ToCString(),
1038 instance_class.id());
1039 buffer.Printf(
1040 " raw entry: [ %#" Px ", %#" Px ", %#" Px ", %#" Px ", %#" Px
1041 ", %#" Px ", %#" Px ", %#" Px " ]\n",
1042 static_cast<uword>(instance_class_id_or_signature.ptr()),
1043 static_cast<uword>(instance_type_arguments.ptr()),
1044 static_cast<uword>(instantiator_type_arguments.ptr()),
1045 static_cast<uword>(function_type_arguments.ptr()),
1046 static_cast<uword>(instance_parent_function_type_arguments.ptr()),
1047 static_cast<uword>(instance_delayed_type_arguments.ptr()),
1048 static_cast<uword>(destination_type.ptr()),
1049 static_cast<uword>(result.ptr()));
1050 THR_Print("%s", buffer.buffer());
1051 }
1052 {
1055 const intptr_t len = new_cache.NumberOfChecks();
1056 if (len >= FLAG_max_subtype_cache_entries) {
1057 if (FLAG_trace_type_checks) {
1058 THR_Print("Not updating subtype test cache as its length reached %d\n",
1059 FLAG_max_subtype_cache_entries);
1060 }
1061 return;
1062 }
1063 intptr_t colliding_index = -1;
1064 auto& old_result = Bool::Handle(zone);
1065 if (new_cache.HasCheck(
1066 instance_class_id_or_signature, destination_type,
1067 instance_type_arguments, instantiator_type_arguments,
1068 function_type_arguments, instance_parent_function_type_arguments,
1069 instance_delayed_type_arguments, &colliding_index, &old_result)) {
1070 if (FLAG_trace_type_checks) {
1071 TextBuffer buffer(256);
1072 buffer.Printf(" Collision for test cache %#" Px " at index %" Pd ":\n",
1073 static_cast<uword>(new_cache.ptr()), colliding_index);
1074 buffer.Printf(" entry: ");
1075 new_cache.WriteEntryToBuffer(zone, &buffer, colliding_index, " ");
1076 THR_Print("%s\n", buffer.buffer());
1077 }
1078 if (old_result.ptr() != result.ptr()) {
1079 FATAL("Existing subtype test cache entry has result %s, not %s",
1080 old_result.ToCString(), result.ToCString());
1081 }
1082 // Some other isolate might have updated the cache between entry was
1083 // found missing and now.
1084 return;
1085 }
1086 const intptr_t new_index = new_cache.AddCheck(
1087 instance_class_id_or_signature, destination_type,
1088 instance_type_arguments, instantiator_type_arguments,
1089 function_type_arguments, instance_parent_function_type_arguments,
1090 instance_delayed_type_arguments, result);
1091 if (FLAG_trace_type_checks) {
1092 TextBuffer buffer(256);
1093 buffer.Printf(" Added new entry to test cache %#" Px " at index %" Pd
1094 ":\n",
1095 static_cast<uword>(new_cache.ptr()), new_index);
1096 buffer.Printf(" new entry: ");
1097 new_cache.WriteEntryToBuffer(zone, &buffer, new_index, " ");
1098 THR_Print("%s\n", buffer.buffer());
1099 }
1100 }
1101}
1102
1103// Check that the given instance is an instance of the given type.
1104// Tested instance may be null, because a null test cannot always be inlined,
1105// e.g 'null is T' yields true if T = Null, but false if T = bool.
1106// Arg0: instance being checked.
1107// Arg1: type.
1108// Arg2: type arguments of the instantiator of the type.
1109// Arg3: type arguments of the function of the type.
1110// Arg4: SubtypeTestCache.
1111// Return value: true or false.
1112DEFINE_RUNTIME_ENTRY(Instanceof, 5) {
1113 const Instance& instance = Instance::CheckedHandle(zone, arguments.ArgAt(0));
1114 const AbstractType& type =
1115 AbstractType::CheckedHandle(zone, arguments.ArgAt(1));
1116 const TypeArguments& instantiator_type_arguments =
1117 TypeArguments::CheckedHandle(zone, arguments.ArgAt(2));
1118 const TypeArguments& function_type_arguments =
1119 TypeArguments::CheckedHandle(zone, arguments.ArgAt(3));
1120 const SubtypeTestCache& cache =
1121 SubtypeTestCache::CheckedHandle(zone, arguments.ArgAt(4));
1122 ASSERT(type.IsFinalized());
1123 ASSERT(!type.IsDynamicType()); // No need to check assignment.
1124 ASSERT(!cache.IsNull());
1125#if defined(TARGET_ARCH_IA32)
1126 // Hash-based caches are still not handled by the stubs on IA32.
1127 if (cache.IsHash()) {
1128 const auto& result = Bool::Handle(
1129 zone, CheckHashBasedSubtypeTestCache(zone, thread, instance, type,
1130 instantiator_type_arguments,
1131 function_type_arguments, cache));
1132 if (!result.IsNull()) {
1133 // Early exit because an entry already exists in the cache.
1134 arguments.SetReturn(result);
1135 return;
1136 }
1137 }
1138#endif // defined(TARGET_ARCH_IA32)
1139 const Bool& result = Bool::Get(instance.IsInstanceOf(
1140 type, instantiator_type_arguments, function_type_arguments));
1141 if (FLAG_trace_type_checks) {
1142 PrintTypeCheck("InstanceOf", instance, type, instantiator_type_arguments,
1143 function_type_arguments, result);
1144 }
1145 UpdateTypeTestCache(zone, thread, instance, type, instantiator_type_arguments,
1146 function_type_arguments, result, cache);
1147 arguments.SetReturn(result);
1148}
1149
1150#if defined(TESTING)
1151// Used only in type_testing_stubs_test.cc. If DRT_TypeCheck is entered, then
1152// this flag is set to true.
1153bool TESTING_runtime_entered_on_TTS_invocation = false;
1154#endif
1155
1156// Check that the type of the given instance is a subtype of the given type and
1157// can therefore be assigned.
1158// Tested instance may not be null, because a null test is always inlined.
1159// Arg0: instance being assigned.
1160// Arg1: type being assigned to.
1161// Arg2: type arguments of the instantiator of the type being assigned to.
1162// Arg3: type arguments of the function of the type being assigned to.
1163// Arg4: name of variable being assigned to.
1164// Arg5: SubtypeTestCache.
1165// Arg6: invocation mode (see TypeCheckMode)
1166// Return value: instance if a subtype, otherwise throw a TypeError.
1168 const Instance& src_instance =
1169 Instance::CheckedHandle(zone, arguments.ArgAt(0));
1170 const AbstractType& dst_type =
1171 AbstractType::CheckedHandle(zone, arguments.ArgAt(1));
1172 const TypeArguments& instantiator_type_arguments =
1173 TypeArguments::CheckedHandle(zone, arguments.ArgAt(2));
1174 const TypeArguments& function_type_arguments =
1175 TypeArguments::CheckedHandle(zone, arguments.ArgAt(3));
1176 String& dst_name = String::Handle(zone);
1177 dst_name ^= arguments.ArgAt(4);
1178 ASSERT(dst_name.IsNull() || dst_name.IsString());
1179
1181 cache ^= arguments.ArgAt(5);
1182 ASSERT(cache.IsNull() || cache.IsSubtypeTestCache());
1183
1184 const TypeCheckMode mode = static_cast<TypeCheckMode>(
1185 Smi::CheckedHandle(zone, arguments.ArgAt(6)).Value());
1186
1187#if defined(TESTING)
1188 TESTING_runtime_entered_on_TTS_invocation = true;
1189#endif
1190
1191#if defined(TARGET_ARCH_IA32)
1193 // Hash-based caches are still not handled by the stubs on IA32.
1194 if (cache.IsHash()) {
1195 const auto& result = Bool::Handle(
1196 zone, CheckHashBasedSubtypeTestCache(
1197 zone, thread, src_instance, dst_type,
1198 instantiator_type_arguments, function_type_arguments, cache));
1199 if (!result.IsNull()) {
1200 // Early exit because an entry already exists in the cache.
1201 arguments.SetReturn(result);
1202 return;
1203 }
1204 }
1205#endif // defined(TARGET_ARCH_IA32)
1206
1207 // This is guaranteed on the calling side.
1208 ASSERT(!dst_type.IsDynamicType());
1209
1210 const bool is_instance_of = src_instance.IsAssignableTo(
1211 dst_type, instantiator_type_arguments, function_type_arguments);
1212
1213 if (FLAG_trace_type_checks) {
1214 PrintTypeCheck("TypeCheck", src_instance, dst_type,
1215 instantiator_type_arguments, function_type_arguments,
1216 Bool::Get(is_instance_of));
1217 }
1218
1219 // Most paths through this runtime entry don't need to know what the
1220 // destination name was or if this was a dynamic assert assignable call,
1221 // so only walk the stack to find the stored destination name when necessary.
1222 auto resolve_dst_name = [&]() {
1223 if (!dst_name.IsNull()) return;
1224#if !defined(TARGET_ARCH_IA32)
1225 // Can only come here from type testing stub.
1227
1228 // Grab the [dst_name] from the pool. It's stored at one pool slot after
1229 // the subtype-test-cache.
1230 DartFrameIterator iterator(thread,
1232 StackFrame* caller_frame = iterator.NextFrame();
1233 const Code& caller_code =
1234 Code::Handle(zone, caller_frame->LookupDartCode());
1235 const ObjectPool& pool =
1236 ObjectPool::Handle(zone, caller_code.GetObjectPool());
1237 TypeTestingStubCallPattern tts_pattern(caller_frame->pc());
1238 const intptr_t stc_pool_idx = tts_pattern.GetSubtypeTestCachePoolIndex();
1239 const intptr_t dst_name_idx = stc_pool_idx + 1;
1240 dst_name ^= pool.ObjectAt(dst_name_idx);
1241#else
1242 UNREACHABLE();
1243#endif
1244 };
1245
1246 if (!is_instance_of) {
1247 resolve_dst_name();
1248 if (dst_name.ptr() ==
1249 Symbols::dynamic_assert_assignable_stc_check().ptr()) {
1250#if !defined(TARGET_ARCH_IA32)
1251 // Can only come here from type testing stub via dynamic AssertAssignable.
1253#endif
1254 // This was a dynamic closure call where the destination name was not
1255 // known at compile-time. Thus, fetch the original arguments and arguments
1256 // descriptor and re-do the type check in the runtime, which causes the
1257 // error with the proper destination name to be thrown.
1258 DartFrameIterator iterator(thread,
1260 StackFrame* caller_frame = iterator.NextFrame();
1261 const auto& dispatcher =
1262 Function::Handle(zone, caller_frame->LookupDartFunction());
1263 ASSERT(dispatcher.IsInvokeFieldDispatcher());
1264 const auto& orig_arguments_desc =
1265 Array::Handle(zone, dispatcher.saved_args_desc());
1266 const ArgumentsDescriptor args_desc(orig_arguments_desc);
1267 const intptr_t arg_count = args_desc.CountWithTypeArgs();
1268 const auto& orig_arguments = Array::Handle(zone, Array::New(arg_count));
1269 auto& obj = Object::Handle(zone);
1270 for (intptr_t i = 0; i < arg_count; i++) {
1271 obj = *reinterpret_cast<ObjectPtr*>(
1272 ParamAddress(caller_frame->fp(), arg_count - i));
1273 orig_arguments.SetAt(i, obj);
1274 }
1275 const auto& receiver = Closure::CheckedHandle(
1276 zone, orig_arguments.At(args_desc.FirstArgIndex()));
1277 const auto& function = Function::Handle(zone, receiver.function());
1278 const auto& result = Object::Handle(
1279 zone, function.DoArgumentTypesMatch(orig_arguments, args_desc));
1280 if (result.IsError()) {
1281 Exceptions::PropagateError(Error::Cast(result));
1282 }
1283 // IsAssignableTo returned false, so we should have thrown a type
1284 // error in DoArgumentsTypesMatch.
1285 UNREACHABLE();
1286 }
1287
1288 ASSERT(!dst_name.IsNull());
1289 // Throw a dynamic type error.
1290 const TokenPosition location = GetCallerLocation();
1291 const auto& src_type =
1292 AbstractType::Handle(zone, src_instance.GetType(Heap::kNew));
1293 auto& reported_type = AbstractType::Handle(zone, dst_type.ptr());
1294 if (!reported_type.IsInstantiated()) {
1295 // Instantiate dst_type before reporting the error.
1296 reported_type = reported_type.InstantiateFrom(instantiator_type_arguments,
1297 function_type_arguments,
1299 }
1300 Exceptions::CreateAndThrowTypeError(location, src_type, reported_type,
1301 dst_name);
1302 UNREACHABLE();
1303 }
1304
1305 bool should_update_cache = true;
1306#if !defined(TARGET_ARCH_IA32)
1307 bool would_update_cache_if_not_lazy = false;
1308#if !defined(DART_PRECOMPILED_RUNTIME)
1309 // Checks against type parameters are done by loading the corresponding type
1310 // argument at runtime and calling the type argument's TTS. Thus, we install
1311 // specialized TTSes on the type argument, not the parameter itself.
1312 auto& tts_type = AbstractType::Handle(zone, dst_type.ptr());
1313 if (tts_type.IsTypeParameter()) {
1314 const auto& param = TypeParameter::Cast(tts_type);
1315 tts_type = param.GetFromTypeArguments(instantiator_type_arguments,
1316 function_type_arguments);
1317 }
1318 ASSERT(!tts_type.IsTypeParameter());
1319
1321 if (FLAG_trace_type_checks) {
1322 THR_Print(" Specializing type testing stub for %s\n",
1323 tts_type.ToCString());
1324 }
1325 const Code& code = Code::Handle(
1326 zone, TypeTestingStubGenerator::SpecializeStubFor(thread, tts_type));
1327 tts_type.SetTypeTestingStub(code);
1328
1329 // Only create the cache if we failed to create a specialized TTS and doing
1330 // the same check would cause an update to the cache.
1331 would_update_cache_if_not_lazy =
1332 (!src_instance.IsNull() &&
1333 tts_type.type_test_stub() ==
1334 StubCode::DefaultNullableTypeTest().ptr()) ||
1335 tts_type.type_test_stub() == StubCode::DefaultTypeTest().ptr();
1336 should_update_cache = would_update_cache_if_not_lazy && cache.IsNull();
1337 }
1338
1339 // Since dst_type is not a top type or type parameter, then the only default
1340 // stubs it can use are DefaultTypeTest or DefaultNullableTypeTest.
1341 if ((mode == kTypeCheckFromSlowStub) &&
1342 (tts_type.type_test_stub() != StubCode::DefaultNullableTypeTest().ptr() &&
1343 tts_type.type_test_stub() != StubCode::DefaultTypeTest().ptr())) {
1344 // The specialized type testing stub returned a false negative. That means
1345 // the specialization may have been generated using outdated cid ranges and
1346 // new classes appeared since the stub was generated. Try respecializing.
1347 if (FLAG_trace_type_checks) {
1348 THR_Print(" Rebuilding type testing stub for %s\n",
1349 tts_type.ToCString());
1350 }
1351 const auto& old_code = Code::Handle(zone, tts_type.type_test_stub());
1352 const auto& new_code = Code::Handle(
1353 zone, TypeTestingStubGenerator::SpecializeStubFor(thread, tts_type));
1354 ASSERT(old_code.ptr() != new_code.ptr());
1355 // A specialized stub should always respecialize to a non-default stub.
1356 ASSERT(new_code.ptr() != StubCode::DefaultNullableTypeTest().ptr() &&
1357 new_code.ptr() != StubCode::DefaultTypeTest().ptr());
1358 const auto& old_instructions =
1359 Instructions::Handle(old_code.instructions());
1360 const auto& new_instructions =
1361 Instructions::Handle(new_code.instructions());
1362 // Check if specialization produced exactly the same sequence of
1363 // instructions. If it did, then we have a false negative, which can
1364 // happen in some cases involving uninstantiated types. In these cases,
1365 // update the cache, because the only case in which these false negatives
1366 // could possibly turn into true positives is with reloads, which clear
1367 // all the SubtypeTestCaches.
1368 should_update_cache = old_instructions.Equals(new_instructions);
1369 if (FLAG_trace_type_checks) {
1370 THR_Print(" %s rebuilt type testing stub for %s\n",
1371 should_update_cache ? "Discarding" : "Installing",
1372 tts_type.ToCString());
1373 }
1374 if (!should_update_cache) {
1375 tts_type.SetTypeTestingStub(new_code);
1376 }
1377 }
1378#endif // !defined(DART_PRECOMPILED_RUNTIME)
1379#endif // !defined(TARGET_ARCH_IA32)
1380
1381 if (should_update_cache) {
1382 if (cache.IsNull()) {
1383#if !defined(TARGET_ARCH_IA32)
1386 would_update_cache_if_not_lazy));
1387 // We lazily create [SubtypeTestCache] for those call sites which actually
1388 // need one and will patch the pool entry.
1389 DartFrameIterator iterator(thread,
1391 StackFrame* caller_frame = iterator.NextFrame();
1392 const Code& caller_code =
1393 Code::Handle(zone, caller_frame->LookupDartCode());
1394 const ObjectPool& pool =
1395 ObjectPool::Handle(zone, caller_code.GetObjectPool());
1396 TypeTestingStubCallPattern tts_pattern(caller_frame->pc());
1397 const intptr_t stc_pool_idx = tts_pattern.GetSubtypeTestCachePoolIndex();
1398 // Ensure we do have a STC (lazily create it if not) and all threads use
1399 // the same STC.
1400 {
1401 SafepointMutexLocker ml(isolate->group()->subtype_test_cache_mutex());
1402 cache ^= pool.ObjectAt<std::memory_order_acquire>(stc_pool_idx);
1403 if (cache.IsNull()) {
1404 resolve_dst_name();
1405 // If this is a dynamic AssertAssignable check, then we must assume
1406 // all inputs may be needed, as the type may vary from call to call.
1407 const intptr_t num_inputs =
1408 dst_name.ptr() ==
1409 Symbols::dynamic_assert_assignable_stc_check().ptr()
1412 cache = SubtypeTestCache::New(num_inputs);
1413 pool.SetObjectAt<std::memory_order_release>(stc_pool_idx, cache);
1414 if (FLAG_trace_type_checks) {
1415 THR_Print(" Installed new subtype test cache %#" Px " with %" Pd
1416 " inputs at index %" Pd " of pool for %s\n",
1417 static_cast<uword>(cache.ptr()), num_inputs, stc_pool_idx,
1418 caller_code.ToCString());
1419 }
1420 }
1421 }
1422#else
1423 UNREACHABLE();
1424#endif
1425 }
1426
1427 UpdateTypeTestCache(zone, thread, src_instance, dst_type,
1428 instantiator_type_arguments, function_type_arguments,
1429 Bool::True(), cache);
1430 }
1431
1432 arguments.SetReturn(src_instance);
1433}
1434
1435// Report that the type of the given object is not bool in conditional context.
1436// Throw assertion error if the object is null. (cf. Boolean Conversion
1437// in language Spec.)
1438// Arg0: bad object.
1439// Return value: none, throws TypeError or AssertionError.
1440DEFINE_RUNTIME_ENTRY(NonBoolTypeError, 1) {
1441 const TokenPosition location = GetCallerLocation();
1442 const Instance& src_instance =
1443 Instance::CheckedHandle(zone, arguments.ArgAt(0));
1444
1445 if (src_instance.IsNull()) {
1446 const Array& args = Array::Handle(zone, Array::New(5));
1447 args.SetAt(
1448 0, String::Handle(
1449 zone,
1451 "Failed assertion: boolean expression must not be null")));
1452
1453 // No source code for this assertion, set url to null.
1454 args.SetAt(1, String::Handle(zone, String::null()));
1455 args.SetAt(2, Object::smi_zero());
1456 args.SetAt(3, Object::smi_zero());
1457 args.SetAt(4, String::Handle(zone, String::null()));
1458
1460 UNREACHABLE();
1461 }
1462
1463 ASSERT(!src_instance.IsBool());
1464 const Type& bool_interface = Type::Handle(Type::BoolType());
1465 const AbstractType& src_type =
1466 AbstractType::Handle(zone, src_instance.GetType(Heap::kNew));
1467 Exceptions::CreateAndThrowTypeError(location, src_type, bool_interface,
1468 Symbols::BooleanExpression());
1469 UNREACHABLE();
1470}
1471
1473 const Instance& exception = Instance::CheckedHandle(zone, arguments.ArgAt(0));
1474 Exceptions::Throw(thread, exception);
1475}
1476
1478 const Instance& exception = Instance::CheckedHandle(zone, arguments.ArgAt(0));
1479 const Instance& stacktrace =
1480 Instance::CheckedHandle(zone, arguments.ArgAt(1));
1481 const Smi& bypass_debugger = Smi::CheckedHandle(zone, arguments.ArgAt(2));
1482 Exceptions::ReThrow(thread, exception, stacktrace,
1483 bypass_debugger.Value() != 0);
1484}
1485
1486// Patches static call in optimized code with the target's entry point.
1487// Compiles target if necessary.
1488DEFINE_RUNTIME_ENTRY(PatchStaticCall, 0) {
1489#if !defined(DART_PRECOMPILED_RUNTIME)
1490 DartFrameIterator iterator(thread,
1492 StackFrame* caller_frame = iterator.NextFrame();
1493 ASSERT(caller_frame != nullptr);
1494 const Code& caller_code = Code::Handle(zone, caller_frame->LookupDartCode());
1495 ASSERT(!caller_code.IsNull());
1496 ASSERT(caller_code.is_optimized());
1497 const Function& target_function = Function::Handle(
1498 zone, caller_code.GetStaticCallTargetFunctionAt(caller_frame->pc()));
1499 const Code& target_code = Code::Handle(zone, target_function.EnsureHasCode());
1500 // Before patching verify that we are not repeatedly patching to the same
1501 // target.
1502 if (target_code.ptr() !=
1503 CodePatcher::GetStaticCallTargetAt(caller_frame->pc(), caller_code)) {
1504 GcSafepointOperationScope safepoint(thread);
1505 if (target_code.ptr() !=
1506 CodePatcher::GetStaticCallTargetAt(caller_frame->pc(), caller_code)) {
1507 CodePatcher::PatchStaticCallAt(caller_frame->pc(), caller_code,
1508 target_code);
1509 caller_code.SetStaticCallTargetCodeAt(caller_frame->pc(), target_code);
1510 if (FLAG_trace_patching) {
1511 THR_Print("PatchStaticCall: patching caller pc %#" Px
1512 ""
1513 " to '%s' new entry point %#" Px " (%s)\n",
1514 caller_frame->pc(), target_function.ToFullyQualifiedCString(),
1515 target_code.EntryPoint(),
1516 target_code.is_optimized() ? "optimized" : "unoptimized");
1517 }
1518 }
1519 }
1520 arguments.SetReturn(target_code);
1521#else
1522 UNREACHABLE();
1523#endif
1524}
1525
1526#if defined(PRODUCT) || defined(DART_PRECOMPILED_RUNTIME)
1527DEFINE_RUNTIME_ENTRY(BreakpointRuntimeHandler, 0) {
1528 UNREACHABLE();
1529 return;
1530}
1531#else
1532// Gets called from debug stub when code reaches a breakpoint
1533// set on a runtime stub call.
1534DEFINE_RUNTIME_ENTRY(BreakpointRuntimeHandler, 0) {
1535 DartFrameIterator iterator(thread,
1537 StackFrame* caller_frame = iterator.NextFrame();
1538 ASSERT(caller_frame != nullptr);
1539 Code& orig_stub = Code::Handle(zone);
1540 orig_stub =
1541 isolate->group()->debugger()->GetPatchedStubAddress(caller_frame->pc());
1542 const Error& error =
1543 Error::Handle(zone, isolate->debugger()->PauseBreakpoint());
1545 arguments.SetReturn(orig_stub);
1546}
1547#endif
1548
1549DEFINE_RUNTIME_ENTRY(SingleStepHandler, 0) {
1550#if defined(PRODUCT) || defined(DART_PRECOMPILED_RUNTIME)
1551 UNREACHABLE();
1552#else
1553 const Error& error =
1554 Error::Handle(zone, isolate->debugger()->PauseStepping());
1556#endif
1557}
1558
1559// An instance call of the form o.f(...) could not be resolved. Check if
1560// there is a getter with the same name. If so, invoke it. If the value is
1561// a closure, invoke it with the given arguments. If the value is a
1562// non-closure, attempt to invoke "call" on it.
1563static bool ResolveCallThroughGetter(const Class& receiver_class,
1564 const String& target_name,
1565 const String& demangled,
1566 const Array& arguments_descriptor,
1567 Function* result) {
1568 const bool create_if_absent = !FLAG_precompiled_mode;
1569 const String& getter_name = String::Handle(Field::GetterName(demangled));
1570 const int kTypeArgsLen = 0;
1571 const int kNumArguments = 1;
1573 ArgumentsDescriptor::NewBoxed(kTypeArgsLen, kNumArguments)));
1574 const Function& getter =
1576 receiver_class, getter_name, args_desc, create_if_absent));
1577 if (getter.IsNull() || getter.IsMethodExtractor()) {
1578 return false;
1579 }
1580 // We do this on the target_name, _not_ on the demangled name, so that
1581 // FlowGraphBuilder::BuildGraphOfInvokeFieldDispatcher can detect dynamic
1582 // calls from the dyn: tag on the name of the dispatcher.
1583 const Function& target_function =
1585 target_name, arguments_descriptor,
1586 UntaggedFunction::kInvokeFieldDispatcher, create_if_absent));
1587 ASSERT(!create_if_absent || !target_function.IsNull());
1588 if (FLAG_trace_ic) {
1590 "InvokeField IC miss: adding <%s> id:%" Pd " -> <%s>\n",
1591 receiver_class.ToCString(), receiver_class.id(),
1592 target_function.IsNull() ? "null" : target_function.ToCString());
1593 }
1594 *result = target_function.ptr();
1595 return true;
1596}
1597
1598// Handle other invocations (implicit closures, noSuchMethod).
1599FunctionPtr InlineCacheMissHelper(const Class& receiver_class,
1600 const Array& args_descriptor,
1601 const String& target_name) {
1602 // Create a demangled version of the target_name, if necessary, This is used
1603 // for the field getter in ResolveCallThroughGetter and as the target name
1604 // for the NoSuchMethod dispatcher (if needed).
1605 const String* demangled = &target_name;
1607 demangled = &String::Handle(
1609 }
1610 const bool is_getter = Field::IsGetterName(*demangled);
1612#if defined(DART_PRECOMPILED_RUNTIME)
1613 const bool create_if_absent = false;
1614#else
1615 const bool create_if_absent = true;
1616#endif
1617 if (is_getter ||
1618 !ResolveCallThroughGetter(receiver_class, target_name, *demangled,
1619 args_descriptor, &result)) {
1620 ArgumentsDescriptor desc(args_descriptor);
1621 const Function& target_function =
1623 *demangled, args_descriptor,
1624 UntaggedFunction::kNoSuchMethodDispatcher, create_if_absent));
1625 if (FLAG_trace_ic) {
1627 "NoSuchMethod IC miss: adding <%s> id:%" Pd " -> <%s>\n",
1628 receiver_class.ToCString(), receiver_class.id(),
1629 target_function.IsNull() ? "null" : target_function.ToCString());
1630 }
1631 result = target_function.ptr();
1632 }
1633 // May be null if in the precompiled runtime, in which case dispatch will be
1634 // handled by NoSuchMethodFromCallStub.
1635 ASSERT(!create_if_absent || !result.IsNull());
1636 return result.ptr();
1637}
1638
1639#if !defined(DART_PRECOMPILED_RUNTIME)
1640static void TrySwitchInstanceCall(Thread* thread,
1641 StackFrame* caller_frame,
1642 const Code& caller_code,
1643 const Function& caller_function,
1644 const ICData& ic_data,
1645 const Function& target_function) {
1646 ASSERT(!target_function.IsNull());
1647 auto zone = thread->zone();
1648
1649 // Monomorphic/megamorphic calls only check the receiver CID.
1650 if (ic_data.NumArgsTested() != 1) return;
1651
1652 ASSERT(ic_data.rebind_rule() == ICData::kInstance);
1653
1654 // Monomorphic/megamorphic calls don't record exactness.
1655 if (ic_data.is_tracking_exactness()) return;
1656
1657#if !defined(PRODUCT)
1658 // Monomorphic/megamorphic do not check the isolate's stepping flag.
1659 if (thread->isolate()->has_attempted_stepping()) return;
1660#endif
1661
1662 // Monomorphic/megamorphic calls are only for unoptimized code.
1663 ASSERT(!caller_code.is_optimized());
1664
1665 // Code is detached from its function. This will prevent us from resetting
1666 // the switchable call later because resets are function based and because
1667 // the ic_data_array belongs to the function instead of the code. This should
1668 // only happen because of reload, but it sometimes happens with KBC mixed mode
1669 // probably through a race between foreground and background compilation.
1670 if (caller_function.unoptimized_code() != caller_code.ptr()) {
1671 return;
1672 }
1673#if !defined(PRODUCT)
1674 // Skip functions that contain breakpoints or when debugger is in single
1675 // stepping mode.
1676 if (thread->isolate_group()->debugger()->IsDebugging(thread,
1677 caller_function)) {
1678 return;
1679 }
1680#endif
1681
1682 const intptr_t num_checks = ic_data.NumberOfChecks();
1683
1684 // Monomorphic call.
1685 if (FLAG_unopt_monomorphic_calls && (num_checks == 1)) {
1686 // A call site in the monomorphic state does not load the arguments
1687 // descriptor, so do not allow transition to this state if the callee
1688 // needs it.
1689 if (target_function.PrologueNeedsArgumentsDescriptor()) {
1690 return;
1691 }
1692
1693 const Array& data = Array::Handle(zone, ic_data.entries());
1694 const Code& target = Code::Handle(zone, target_function.EnsureHasCode());
1695 CodePatcher::PatchInstanceCallAt(caller_frame->pc(), caller_code, data,
1696 target);
1697 if (FLAG_trace_ic) {
1698 OS::PrintErr("Instance call at %" Px
1699 " switching to monomorphic dispatch, %s\n",
1700 caller_frame->pc(), ic_data.ToCString());
1701 }
1702 return; // Success.
1703 }
1704
1705 // Megamorphic call.
1706 if (FLAG_unopt_megamorphic_calls &&
1707 (num_checks > FLAG_max_polymorphic_checks)) {
1708 const String& name = String::Handle(zone, ic_data.target_name());
1709 const Array& descriptor =
1710 Array::Handle(zone, ic_data.arguments_descriptor());
1712 zone, MegamorphicCacheTable::Lookup(thread, name, descriptor));
1713 ic_data.set_is_megamorphic(true);
1714 CodePatcher::PatchInstanceCallAt(caller_frame->pc(), caller_code, cache,
1715 StubCode::MegamorphicCall());
1716 if (FLAG_trace_ic) {
1717 OS::PrintErr("Instance call at %" Px
1718 " switching to megamorphic dispatch, %s\n",
1719 caller_frame->pc(), ic_data.ToCString());
1720 }
1721 return; // Success.
1722 }
1723}
1724#endif // !defined(DART_PRECOMPILED_RUNTIME)
1725
1726// Perform the subtype and return constant function based on the result.
1727static FunctionPtr ComputeTypeCheckTarget(const Instance& receiver,
1728 const AbstractType& type,
1729 const ArgumentsDescriptor& desc) {
1730 const bool result = receiver.IsInstanceOf(type, Object::null_type_arguments(),
1731 Object::null_type_arguments());
1733 const Function& target =
1734 Function::Handle(result ? store->simple_instance_of_true_function()
1735 : store->simple_instance_of_false_function());
1736 ASSERT(!target.IsNull());
1737 return target.ptr();
1738}
1739
1740static FunctionPtr Resolve(
1741 Thread* thread,
1742 Zone* zone,
1743 const GrowableArray<const Instance*>& caller_arguments,
1744 const Class& receiver_class,
1745 const String& name,
1746 const Array& descriptor) {
1747 ASSERT(name.IsSymbol());
1748 auto& target_function = Function::Handle(zone);
1749 ArgumentsDescriptor args_desc(descriptor);
1750
1751 const bool allow_add = !FLAG_precompiled_mode;
1752 if (receiver_class.EnsureIsFinalized(thread) == Error::null()) {
1754 receiver_class, name, args_desc, allow_add);
1755 }
1756 if (caller_arguments.length() == 2 &&
1757 target_function.ptr() == thread->isolate_group()
1758 ->object_store()
1759 ->simple_instance_of_function()) {
1760 // Replace the target function with constant function.
1761 const AbstractType& type = AbstractType::Cast(*caller_arguments[1]);
1762 target_function =
1763 ComputeTypeCheckTarget(*caller_arguments[0], type, args_desc);
1764 }
1765
1766 if (target_function.IsNull()) {
1767 target_function = InlineCacheMissHelper(receiver_class, descriptor, name);
1768 }
1769 ASSERT(!allow_add || !target_function.IsNull());
1770 return target_function.ptr();
1771}
1772
1773// Handles a static call in unoptimized code that has one argument type not
1774// seen before. Compile the target if necessary and update the ICData.
1775// Arg0: argument.
1776// Arg1: IC data object.
1777DEFINE_RUNTIME_ENTRY(StaticCallMissHandlerOneArg, 2) {
1778 const Instance& arg = Instance::CheckedHandle(zone, arguments.ArgAt(0));
1779 const ICData& ic_data = ICData::CheckedHandle(zone, arguments.ArgAt(1));
1780 // IC data for static call is prepopulated with the statically known target.
1781 ASSERT(ic_data.NumberOfChecksIs(1));
1782 const Function& target = Function::Handle(zone, ic_data.GetTargetAt(0));
1783 target.EnsureHasCode();
1784 ASSERT(!target.IsNull() && target.HasCode());
1785 ic_data.EnsureHasReceiverCheck(arg.GetClassId(), target, 1);
1786 if (FLAG_trace_ic) {
1787 DartFrameIterator iterator(thread,
1789 StackFrame* caller_frame = iterator.NextFrame();
1790 ASSERT(caller_frame != nullptr);
1791 OS::PrintErr("StaticCallMissHandler at %#" Px " target %s (%" Pd ")\n",
1792 caller_frame->pc(), target.ToCString(), arg.GetClassId());
1793 }
1794 arguments.SetReturn(target);
1795}
1796
1797// Handles a static call in unoptimized code that has two argument types not
1798// seen before. Compile the target if necessary and update the ICData.
1799// Arg0: argument 0.
1800// Arg1: argument 1.
1801// Arg2: IC data object.
1802DEFINE_RUNTIME_ENTRY(StaticCallMissHandlerTwoArgs, 3) {
1803 const Instance& arg0 = Instance::CheckedHandle(zone, arguments.ArgAt(0));
1804 const Instance& arg1 = Instance::CheckedHandle(zone, arguments.ArgAt(1));
1805 const ICData& ic_data = ICData::CheckedHandle(zone, arguments.ArgAt(2));
1806 // IC data for static call is prepopulated with the statically known target.
1807 ASSERT(!ic_data.NumberOfChecksIs(0));
1808 const Function& target = Function::Handle(zone, ic_data.GetTargetAt(0));
1809 target.EnsureHasCode();
1811 cids.Add(arg0.GetClassId());
1812 cids.Add(arg1.GetClassId());
1813 ic_data.EnsureHasCheck(cids, target);
1814 if (FLAG_trace_ic) {
1815 DartFrameIterator iterator(thread,
1817 StackFrame* caller_frame = iterator.NextFrame();
1818 ASSERT(caller_frame != nullptr);
1819 OS::PrintErr("StaticCallMissHandler at %#" Px " target %s (%" Pd ", %" Pd
1820 ")\n",
1821 caller_frame->pc(), target.ToCString(), cids[0], cids[1]);
1822 }
1823 arguments.SetReturn(target);
1824}
1825
1826#if defined(DART_PRECOMPILED_RUNTIME)
1827
1828static bool IsSingleTarget(IsolateGroup* isolate_group,
1829 Zone* zone,
1830 intptr_t lower_cid,
1831 intptr_t upper_cid,
1832 const Function& target,
1833 const String& name) {
1834 Class& cls = Class::Handle(zone);
1835 ClassTable* table = isolate_group->class_table();
1836 Function& other_target = Function::Handle(zone);
1837 for (intptr_t cid = lower_cid; cid <= upper_cid; cid++) {
1838 if (!table->HasValidClassAt(cid)) continue;
1839 cls = table->At(cid);
1840 if (cls.is_abstract()) continue;
1841 if (!cls.is_allocated()) continue;
1842 other_target = Resolver::ResolveDynamicAnyArgs(zone, cls, name,
1843 /*allow_add=*/false);
1844 if (other_target.ptr() != target.ptr()) {
1845 return false;
1846 }
1847 }
1848 return true;
1849}
1850
1851class SavedUnlinkedCallMapKeyEqualsTraits : public AllStatic {
1852 public:
1853 static const char* Name() { return "SavedUnlinkedCallMapKeyEqualsTraits "; }
1854 static bool ReportStats() { return false; }
1855
1856 static bool IsMatch(const Object& key1, const Object& key2) {
1857 if (!key1.IsInteger() || !key2.IsInteger()) return false;
1858 return Integer::Cast(key1).Equals(Integer::Cast(key2));
1859 }
1860 static uword Hash(const Object& key) {
1861 return Integer::Cast(key).CanonicalizeHash();
1862 }
1863};
1864
1865using UnlinkedCallMap = UnorderedHashMap<SavedUnlinkedCallMapKeyEqualsTraits>;
1866
1867static void SaveUnlinkedCall(Zone* zone,
1868 Isolate* isolate,
1869 uword frame_pc,
1870 const UnlinkedCall& unlinked_call) {
1871 IsolateGroup* isolate_group = isolate->group();
1872
1873 SafepointMutexLocker ml(isolate_group->unlinked_call_map_mutex());
1874 if (isolate_group->saved_unlinked_calls() == Array::null()) {
1875 const auto& initial_map =
1876 Array::Handle(zone, HashTables::New<UnlinkedCallMap>(16, Heap::kOld));
1877 isolate_group->set_saved_unlinked_calls(initial_map);
1878 }
1879
1880 UnlinkedCallMap unlinked_call_map(zone,
1881 isolate_group->saved_unlinked_calls());
1882 const auto& pc = Integer::Handle(zone, Integer::NewFromUint64(frame_pc));
1883 // Some other isolate might have updated unlinked_call_map[pc] too, but
1884 // their update should be identical to ours.
1885 const auto& new_or_old_value = UnlinkedCall::Handle(
1887 unlinked_call_map.InsertOrGetValue(pc, unlinked_call)));
1888 RELEASE_ASSERT(new_or_old_value.ptr() == unlinked_call.ptr());
1889 isolate_group->set_saved_unlinked_calls(unlinked_call_map.Release());
1890}
1891
1892static UnlinkedCallPtr LoadUnlinkedCall(Zone* zone,
1893 Isolate* isolate,
1894 uword pc) {
1895 IsolateGroup* isolate_group = isolate->group();
1896
1897 SafepointMutexLocker ml(isolate_group->unlinked_call_map_mutex());
1898 ASSERT(isolate_group->saved_unlinked_calls() != Array::null());
1899 UnlinkedCallMap unlinked_call_map(zone,
1900 isolate_group->saved_unlinked_calls());
1901
1902 const auto& pc_integer = Integer::Handle(zone, Integer::NewFromUint64(pc));
1903 const auto& unlinked_call = UnlinkedCall::Cast(
1904 Object::Handle(zone, unlinked_call_map.GetOrDie(pc_integer)));
1905 isolate_group->set_saved_unlinked_calls(unlinked_call_map.Release());
1906 return unlinked_call.ptr();
1907}
1908
1909// NOTE: Right now we never delete [UnlinkedCall] objects. They are needed while
1910// a call site is in Unlinked/Monomorphic/MonomorphicSmiable/SingleTarget
1911// states.
1912//
1913// Theoretically we could free the [UnlinkedCall] object once we transition the
1914// call site to use ICData/MegamorphicCache, but that would require careful
1915// coordination between the deleter and a possible concurrent reader.
1916//
1917// To simplify the code we decided not to do that atm (only a very small
1918// fraction of callsites in AOT use switchable calls, the name/args-descriptor
1919// objects are kept alive anyways -> there is little memory savings from
1920// freeing the [UnlinkedCall] objects).
1921
1922#endif // defined(DART_PRECOMPILED_RUNTIME)
1923
1924enum class MissHandler {
1928};
1929
1930// Handles updating of type feedback and possible patching of instance calls.
1931//
1932// It works in 3 separate steps:
1933// - resolve the actual target
1934// - update type feedback & (optionally) perform call site transition
1935// - return the right values
1936//
1937// Depending on the JIT/AOT mode we obtain current and patch new (target, data)
1938// differently:
1939//
1940// - JIT calls must be patched with CodePatcher::PatchInstanceCallAt()
1941// - AOT calls must be patched with CodePatcher::PatchSwitchableCallAt()
1942//
1943// Independent of which miss handler was used or how we will return, we look at
1944// current (target, data) and see if we need to transition the call site to a
1945// new (target, data). We do this while holding `IG->patchable_call_mutex()`.
1946//
1947// Depending on which miss handler got called we might need to return
1948// differently:
1949//
1950// - SwitchableCallMiss will get get (stub, data) return value
1951// - InlineCache*Miss will get get function as return value
1952//
1954 public:
1956 const GrowableArray<const Instance*>& caller_arguments,
1957 MissHandler miss_handler,
1958 NativeArguments arguments,
1959 StackFrame* caller_frame,
1960 const Code& caller_code,
1961 const Function& caller_function)
1962 : isolate_(thread->isolate()),
1963 thread_(thread),
1964 zone_(thread->zone()),
1965 caller_arguments_(caller_arguments),
1966 miss_handler_(miss_handler),
1967 arguments_(arguments),
1968 caller_frame_(caller_frame),
1969 caller_code_(caller_code),
1970 caller_function_(caller_function),
1971 name_(String::Handle()),
1972 args_descriptor_(Array::Handle()) {
1973 // We only have two arg IC calls in JIT mode.
1974 ASSERT(caller_arguments_.length() == 1 || !FLAG_precompiled_mode);
1975 }
1976
1977 void ResolveSwitchAndReturn(const Object& data);
1978
1979 private:
1980 FunctionPtr ResolveTargetFunction(const Object& data);
1981
1982#if defined(DART_PRECOMPILED_RUNTIME)
1983 void HandleMissAOT(const Object& old_data,
1984 uword old_entry,
1985 const Function& target_function);
1986
1987 void DoUnlinkedCallAOT(const UnlinkedCall& unlinked,
1988 const Function& target_function);
1989 void DoMonomorphicMissAOT(const Object& old_data,
1990 const Function& target_function);
1991 void DoSingleTargetMissAOT(const SingleTargetCache& data,
1992 const Function& target_function);
1993 void DoICDataMissAOT(const ICData& data, const Function& target_function);
1994 bool CanExtendSingleTargetRange(const String& name,
1995 const Function& old_target,
1996 const Function& target_function,
1997 intptr_t* lower,
1998 intptr_t* upper);
1999#else
2000 void HandleMissJIT(const Object& old_data,
2001 const Code& old_target,
2002 const Function& target_function);
2003
2004 void DoMonomorphicMissJIT(const Object& old_data,
2005 const Function& target_function);
2006 void DoICDataMissJIT(const ICData& data,
2007 const Object& old_data,
2008 const Function& target_function);
2009#endif // !defined(DART_PRECOMPILED_RUNTIME)
2010 void DoMegamorphicMiss(const MegamorphicCache& data,
2011 const Function& target_function);
2012
2013 void UpdateICDataWithTarget(const ICData& ic_data,
2014 const Function& target_function);
2015 void TrySwitch(const ICData& ic_data, const Function& target_function);
2016
2017 void ReturnAOT(const Code& stub, const Object& data);
2018 void ReturnJIT(const Code& stub, const Object& data, const Function& target);
2019 void ReturnJITorAOT(const Code& stub,
2020 const Object& data,
2021 const Function& target);
2022
2023 const Instance& receiver() { return *caller_arguments_[0]; }
2024
2025 bool should_consider_patching() {
2026 // In AOT we use switchable calls.
2027 if (FLAG_precompiled_mode) return true;
2028
2029 // In JIT instance calls use a different calling sequence in unoptimized vs
2030 // optimized code (see [FlowGraphCompiler::EmitInstanceCallJIT] vs
2031 // [FlowGraphCompiler::EmitOptimizedInstanceCall]).
2032 //
2033 // The [CodePatcher::GetInstanceCallAt], [CodePatcher::PatchInstanceCallAt]
2034 // only recognize unoptimized call pattern.
2035 //
2036 // So we will not try to switch optimized instance calls.
2037 return !caller_code_.is_optimized();
2038 }
2039
2040 ICDataPtr NewICData();
2041 ICDataPtr NewICDataWithTarget(intptr_t cid, const Function& target);
2042
2043 Isolate* isolate_;
2044 Thread* thread_;
2045 Zone* zone_;
2046 const GrowableArray<const Instance*>& caller_arguments_;
2047 MissHandler miss_handler_;
2048 NativeArguments arguments_;
2049 StackFrame* caller_frame_;
2050 const Code& caller_code_;
2051 const Function& caller_function_;
2052
2053 // Call-site information populated during resolution.
2054 String& name_;
2055 Array& args_descriptor_;
2056 bool is_monomorphic_hit_ = false;
2057};
2058
2059#if defined(DART_PRECOMPILED_RUNTIME)
2060void PatchableCallHandler::DoUnlinkedCallAOT(const UnlinkedCall& unlinked,
2061 const Function& target_function) {
2062 const auto& ic_data = ICData::Handle(
2063 zone_,
2064 target_function.IsNull()
2065 ? NewICData()
2066 : NewICDataWithTarget(receiver().GetClassId(), target_function));
2067
2068 Object& object = Object::Handle(zone_, ic_data.ptr());
2069 Code& code = Code::Handle(zone_, StubCode::ICCallThroughCode().ptr());
2070 // If the target function has optional parameters or is generic, it's
2071 // prologue requires ARGS_DESC_REG to be populated. Yet the switchable calls
2072 // do not populate that on the call site, which is why we don't transition
2073 // those call sites to monomorphic, but rather directly to call via stub
2074 // (which will populate the ARGS_DESC_REG from the ICData).
2075 //
2076 // Because of this we also don't generate monomorphic checks for those
2077 // functions.
2078 if (!target_function.IsNull() &&
2079 !target_function.PrologueNeedsArgumentsDescriptor()) {
2080 // Patch to monomorphic call.
2081 ASSERT(target_function.HasCode());
2082 const Code& target_code =
2083 Code::Handle(zone_, target_function.CurrentCode());
2084 const Smi& expected_cid =
2085 Smi::Handle(zone_, Smi::New(receiver().GetClassId()));
2086
2087 if (unlinked.can_patch_to_monomorphic()) {
2088 object = expected_cid.ptr();
2089 code = target_code.ptr();
2090 ASSERT(code.HasMonomorphicEntry());
2091 } else {
2092 object = MonomorphicSmiableCall::New(expected_cid.Value(), target_code);
2093 code = StubCode::MonomorphicSmiableCheck().ptr();
2094 }
2095 }
2096 CodePatcher::PatchSwitchableCallAt(caller_frame_->pc(), caller_code_, object,
2097 code);
2098
2099 // Return the ICData. The miss stub will jump to continue in the IC lookup
2100 // stub.
2101 ReturnAOT(StubCode::ICCallThroughCode(), ic_data);
2102}
2103
2104bool PatchableCallHandler::CanExtendSingleTargetRange(
2105 const String& name,
2106 const Function& old_target,
2107 const Function& target_function,
2108 intptr_t* lower,
2109 intptr_t* upper) {
2110 if (old_target.ptr() != target_function.ptr()) {
2111 return false;
2112 }
2113 intptr_t unchecked_lower, unchecked_upper;
2114 if (receiver().GetClassId() < *lower) {
2115 unchecked_lower = receiver().GetClassId();
2116 unchecked_upper = *lower - 1;
2117 *lower = receiver().GetClassId();
2118 } else {
2119 unchecked_upper = receiver().GetClassId();
2120 unchecked_lower = *upper + 1;
2121 *upper = receiver().GetClassId();
2122 }
2123
2124 return IsSingleTarget(isolate_->group(), zone_, unchecked_lower,
2125 unchecked_upper, target_function, name);
2126}
2127#endif // defined(DART_PRECOMPILED_RUNTIME)
2128
2129#if defined(DART_PRECOMPILED_RUNTIME)
2130void PatchableCallHandler::DoMonomorphicMissAOT(
2131 const Object& old_data,
2132 const Function& target_function) {
2133 classid_t old_expected_cid;
2134 if (old_data.IsSmi()) {
2135 old_expected_cid = Smi::Cast(old_data).Value();
2136 } else {
2137 RELEASE_ASSERT(old_data.IsMonomorphicSmiableCall());
2138 old_expected_cid = MonomorphicSmiableCall::Cast(old_data).expected_cid();
2139 }
2140 const bool is_monomorphic_hit = old_expected_cid == receiver().GetClassId();
2141 const auto& old_receiver_class = Class::Handle(
2142 zone_, isolate_->group()->class_table()->At(old_expected_cid));
2143 const auto& old_target = Function::Handle(
2144 zone_, Resolve(thread_, zone_, caller_arguments_, old_receiver_class,
2145 name_, args_descriptor_));
2146
2147 const auto& ic_data = ICData::Handle(
2148 zone_, old_target.IsNull()
2149 ? NewICData()
2150 : NewICDataWithTarget(old_expected_cid, old_target));
2151
2152 if (is_monomorphic_hit) {
2153 // The site just have been updated to monomorphic state with same
2154 // exact class id - do nothing in that case: stub will call through ic data.
2155 ReturnAOT(StubCode::ICCallThroughCode(), ic_data);
2156 return;
2157 }
2158
2159 intptr_t lower = old_expected_cid;
2160 intptr_t upper = old_expected_cid;
2161 if (CanExtendSingleTargetRange(name_, old_target, target_function, &lower,
2162 &upper)) {
2163 const SingleTargetCache& cache =
2165 const Code& code = Code::Handle(zone_, target_function.CurrentCode());
2166 cache.set_target(code);
2167 cache.set_entry_point(code.EntryPoint());
2168 cache.set_lower_limit(lower);
2169 cache.set_upper_limit(upper);
2170 const Code& stub = StubCode::SingleTargetCall();
2171 CodePatcher::PatchSwitchableCallAt(caller_frame_->pc(), caller_code_, cache,
2172 stub);
2173 // Return the ICData. The miss stub will jump to continue in the IC call
2174 // stub.
2175 ReturnAOT(StubCode::ICCallThroughCode(), ic_data);
2176 return;
2177 }
2178
2179 // Patch to call through stub.
2180 const Code& stub = StubCode::ICCallThroughCode();
2181 CodePatcher::PatchSwitchableCallAt(caller_frame_->pc(), caller_code_, ic_data,
2182 stub);
2183
2184 // Return the ICData. The miss stub will jump to continue in the IC lookup
2185 // stub.
2186 ReturnAOT(stub, ic_data);
2187}
2188#endif // defined(DART_PRECOMPILED_RUNTIME)
2189
2190#if !defined(DART_PRECOMPILED_RUNTIME)
2191void PatchableCallHandler::DoMonomorphicMissJIT(
2192 const Object& old_data,
2193 const Function& target_function) {
2194 // Monomorphic calls use the ICData::entries() as their data.
2195 const auto& old_ic_data_entries = Array::Cast(old_data);
2196 // Any non-empty ICData::entries() has a backref to it's ICData.
2197 const auto& ic_data =
2198 ICData::Handle(zone_, ICData::ICDataOfEntriesArray(old_ic_data_entries));
2199
2200 // The target didn't change, so we can stay inside monomorphic state.
2201 if (ic_data.NumberOfChecksIs(1) &&
2202 (ic_data.GetReceiverClassIdAt(0) == receiver().GetClassId())) {
2203 // No need to update ICData - it's already up-to-date.
2204
2205 if (FLAG_trace_ic) {
2206 OS::PrintErr("Instance call at %" Px
2207 " updating code (old code was disabled)\n",
2208 caller_frame_->pc());
2209 }
2210
2211 // We stay in monomorphic state, patch the code object and reload the icdata
2212 // entries array.
2213 const auto& code = Code::Handle(zone_, target_function.EnsureHasCode());
2214 const auto& data = Object::Handle(zone_, ic_data.entries());
2215 CodePatcher::PatchInstanceCallAt(caller_frame_->pc(), caller_code_, data,
2216 code);
2217 ReturnJIT(code, data, target_function);
2218 return;
2219 }
2220
2221 ASSERT(ic_data.NumArgsTested() == 1);
2222 const Code& stub = ic_data.is_tracking_exactness()
2223 ? StubCode::OneArgCheckInlineCacheWithExactnessCheck()
2224 : StubCode::OneArgCheckInlineCache();
2225 if (FLAG_trace_ic) {
2226 OS::PrintErr("Instance call at %" Px
2227 " switching monomorphic to polymorphic dispatch, %s\n",
2228 caller_frame_->pc(), ic_data.ToCString());
2229 }
2230 CodePatcher::PatchInstanceCallAt(caller_frame_->pc(), caller_code_, ic_data,
2231 stub);
2232
2233 ASSERT(caller_arguments_.length() == 1);
2234 UpdateICDataWithTarget(ic_data, target_function);
2235 ASSERT(should_consider_patching());
2236 TrySwitchInstanceCall(thread_, caller_frame_, caller_code_, caller_function_,
2237 ic_data, target_function);
2238 ReturnJIT(stub, ic_data, target_function);
2239}
2240#endif // !defined(DART_PRECOMPILED_RUNTIME)
2241
2242#if defined(DART_PRECOMPILED_RUNTIME)
2243void PatchableCallHandler::DoSingleTargetMissAOT(
2244 const SingleTargetCache& data,
2245 const Function& target_function) {
2246 const Code& old_target_code = Code::Handle(zone_, data.target());
2247 const Function& old_target =
2248 Function::Handle(zone_, Function::RawCast(old_target_code.owner()));
2249
2250 // We lost the original ICData when we patched to the monomorphic case.
2251 const auto& ic_data = ICData::Handle(
2252 zone_,
2253 target_function.IsNull()
2254 ? NewICData()
2255 : NewICDataWithTarget(receiver().GetClassId(), target_function));
2256
2257 intptr_t lower = data.lower_limit();
2258 intptr_t upper = data.upper_limit();
2259 if (CanExtendSingleTargetRange(name_, old_target, target_function, &lower,
2260 &upper)) {
2261 data.set_lower_limit(lower);
2262 data.set_upper_limit(upper);
2263 // Return the ICData. The single target stub will jump to continue in the
2264 // IC call stub.
2265 ReturnAOT(StubCode::ICCallThroughCode(), ic_data);
2266 return;
2267 }
2268
2269 // Call site is not single target, switch to call using ICData.
2270 const Code& stub = StubCode::ICCallThroughCode();
2271 CodePatcher::PatchSwitchableCallAt(caller_frame_->pc(), caller_code_, ic_data,
2272 stub);
2273
2274 // Return the ICData. The single target stub will jump to continue in the
2275 // IC call stub.
2276 ReturnAOT(stub, ic_data);
2277}
2278#endif // defined(DART_PRECOMPILED_RUNTIME)
2279
2280#if defined(DART_PRECOMPILED_RUNTIME)
2281void PatchableCallHandler::DoICDataMissAOT(const ICData& ic_data,
2282 const Function& target_function) {
2283 const String& name = String::Handle(zone_, ic_data.target_name());
2284 const Class& cls = Class::Handle(zone_, receiver().clazz());
2285 ASSERT(!cls.IsNull());
2286 const Array& descriptor =
2287 Array::CheckedHandle(zone_, ic_data.arguments_descriptor());
2288 ArgumentsDescriptor args_desc(descriptor);
2289 if (FLAG_trace_ic || FLAG_trace_ic_miss_in_optimized) {
2290 OS::PrintErr("ICData miss, class=%s, function<%" Pd ">=%s\n",
2291 cls.ToCString(), args_desc.TypeArgsLen(), name.ToCString());
2292 }
2293
2294 if (target_function.IsNull()) {
2295 ReturnAOT(StubCode::NoSuchMethodDispatcher(), ic_data);
2296 return;
2297 }
2298
2299 const intptr_t number_of_checks = ic_data.NumberOfChecks();
2300
2301 if ((number_of_checks == 0) &&
2302 (!FLAG_precompiled_mode || ic_data.receiver_cannot_be_smi()) &&
2303 !target_function.PrologueNeedsArgumentsDescriptor()) {
2304 // This call site is unlinked: transition to a monomorphic direct call.
2305 // Note we cannot do this if the target has optional parameters because
2306 // the monomorphic direct call does not load the arguments descriptor.
2307 // We cannot do this if we are still in the middle of precompiling because
2308 // the monomorphic case hides a live instance selector from the
2309 // treeshaker.
2310 const Code& target_code =
2311 Code::Handle(zone_, target_function.EnsureHasCode());
2312 const Smi& expected_cid =
2313 Smi::Handle(zone_, Smi::New(receiver().GetClassId()));
2314 ASSERT(target_code.HasMonomorphicEntry());
2315 CodePatcher::PatchSwitchableCallAt(caller_frame_->pc(), caller_code_,
2316 expected_cid, target_code);
2317 ReturnAOT(target_code, expected_cid);
2318 } else {
2319 ic_data.EnsureHasReceiverCheck(receiver().GetClassId(), target_function);
2320 if (number_of_checks > FLAG_max_polymorphic_checks) {
2321 // Switch to megamorphic call.
2322 const MegamorphicCache& cache = MegamorphicCache::Handle(
2323 zone_, MegamorphicCacheTable::Lookup(thread_, name, descriptor));
2324 const Code& stub = StubCode::MegamorphicCall();
2325
2326 CodePatcher::PatchSwitchableCallAt(caller_frame_->pc(), caller_code_,
2327 cache, stub);
2328 ReturnAOT(stub, cache);
2329 } else {
2330 ReturnAOT(StubCode::ICCallThroughCode(), ic_data);
2331 }
2332 }
2333}
2334#endif // defined(DART_PRECOMPILED_RUNTIME)
2335
2336#if !defined(DART_PRECOMPILED_RUNTIME)
2337void PatchableCallHandler::DoICDataMissJIT(const ICData& ic_data,
2338 const Object& old_code,
2339 const Function& target_function) {
2340 ASSERT(ic_data.NumArgsTested() == caller_arguments_.length());
2341
2342 if (ic_data.NumArgsTested() == 1) {
2343 ASSERT(old_code.ptr() == StubCode::OneArgCheckInlineCache().ptr() ||
2344 old_code.ptr() ==
2345 StubCode::OneArgCheckInlineCacheWithExactnessCheck().ptr() ||
2346 old_code.ptr() ==
2347 StubCode::OneArgOptimizedCheckInlineCache().ptr() ||
2348 old_code.ptr() ==
2349 StubCode::OneArgOptimizedCheckInlineCacheWithExactnessCheck()
2350 .ptr() ||
2351 old_code.ptr() == StubCode::ICCallBreakpoint().ptr() ||
2352 (old_code.IsNull() && !should_consider_patching()));
2353 UpdateICDataWithTarget(ic_data, target_function);
2354 if (should_consider_patching()) {
2355 TrySwitchInstanceCall(thread_, caller_frame_, caller_code_,
2356 caller_function_, ic_data, target_function);
2357 }
2358 const Code& stub = Code::Handle(
2359 zone_, ic_data.is_tracking_exactness()
2360 ? StubCode::OneArgCheckInlineCacheWithExactnessCheck().ptr()
2361 : StubCode::OneArgCheckInlineCache().ptr());
2362 ReturnJIT(stub, ic_data, target_function);
2363 } else {
2364 ASSERT(old_code.ptr() == StubCode::TwoArgsCheckInlineCache().ptr() ||
2365 old_code.ptr() == StubCode::SmiAddInlineCache().ptr() ||
2366 old_code.ptr() == StubCode::SmiLessInlineCache().ptr() ||
2367 old_code.ptr() == StubCode::SmiEqualInlineCache().ptr() ||
2368 old_code.ptr() ==
2369 StubCode::TwoArgsOptimizedCheckInlineCache().ptr() ||
2370 old_code.ptr() == StubCode::ICCallBreakpoint().ptr() ||
2371 (old_code.IsNull() && !should_consider_patching()));
2372 UpdateICDataWithTarget(ic_data, target_function);
2373 ReturnJIT(StubCode::TwoArgsCheckInlineCache(), ic_data, target_function);
2374 }
2375}
2376#endif // !defined(DART_PRECOMPILED_RUNTIME)
2377
2378void PatchableCallHandler::DoMegamorphicMiss(const MegamorphicCache& data,
2379 const Function& target_function) {
2380 const String& name = String::Handle(zone_, data.target_name());
2381 const Class& cls = Class::Handle(zone_, receiver().clazz());
2382 ASSERT(!cls.IsNull());
2383 const Array& descriptor =
2384 Array::CheckedHandle(zone_, data.arguments_descriptor());
2385 ArgumentsDescriptor args_desc(descriptor);
2386 if (FLAG_trace_ic || FLAG_trace_ic_miss_in_optimized) {
2387 OS::PrintErr("Megamorphic miss, class=%s, function<%" Pd ">=%s\n",
2388 cls.ToCString(), args_desc.TypeArgsLen(), name.ToCString());
2389 }
2390 if (target_function.IsNull()) {
2391 ReturnJITorAOT(StubCode::NoSuchMethodDispatcher(), data, target_function);
2392 return;
2393 }
2394
2395 // Insert function found into cache.
2396 const Smi& class_id = Smi::Handle(zone_, Smi::New(cls.id()));
2397 data.EnsureContains(class_id, target_function);
2398 ReturnJITorAOT(StubCode::MegamorphicCall(), data, target_function);
2399}
2400
2401void PatchableCallHandler::UpdateICDataWithTarget(
2402 const ICData& ic_data,
2403 const Function& target_function) {
2404 if (target_function.IsNull()) return;
2405
2406 // If, upon return of the runtime, we will invoke the target directly we have
2407 // to increment the call count here in the ICData.
2408 // If we instead only insert a new ICData entry and will return to the IC stub
2409 // which will call the target, the stub will take care of the increment.
2410 const bool call_target_directly =
2411 miss_handler_ == MissHandler::kInlineCacheMiss;
2412 const intptr_t invocation_count = call_target_directly ? 1 : 0;
2413
2414 if (caller_arguments_.length() == 1) {
2415 auto exactness = StaticTypeExactnessState::NotTracking();
2416#if !defined(DART_PRECOMPILED_RUNTIME)
2417 if (ic_data.is_tracking_exactness()) {
2418 exactness = receiver().IsNull()
2420 : StaticTypeExactnessState::Compute(
2421 Type::Cast(AbstractType::Handle(
2422 ic_data.receivers_static_type())),
2423 receiver());
2424 }
2425#endif // !defined(DART_PRECOMPILED_RUNTIME)
2426 ic_data.EnsureHasReceiverCheck(receiver().GetClassId(), target_function,
2427 invocation_count, exactness);
2428 } else {
2429 GrowableArray<intptr_t> class_ids(caller_arguments_.length());
2430 ASSERT(ic_data.NumArgsTested() == caller_arguments_.length());
2431 for (intptr_t i = 0; i < caller_arguments_.length(); i++) {
2432 class_ids.Add(caller_arguments_[i]->GetClassId());
2433 }
2434 ic_data.EnsureHasCheck(class_ids, target_function, invocation_count);
2435 }
2436}
2437
2438void PatchableCallHandler::ReturnAOT(const Code& stub, const Object& data) {
2439 ASSERT(miss_handler_ == MissHandler::kSwitchableCallMiss);
2440 arguments_.SetArgAt(0, stub); // Second return value.
2441 arguments_.SetReturn(data);
2442}
2443
2444void PatchableCallHandler::ReturnJIT(const Code& stub,
2445 const Object& data,
2446 const Function& target) {
2447 // In JIT we can have two different miss handlers to which we return slightly
2448 // differently.
2449 switch (miss_handler_) {
2451 arguments_.SetArgAt(0, stub); // Second return value.
2452 arguments_.SetReturn(data);
2453 break;
2454 }
2456 arguments_.SetArgAt(1, data); // Second return value.
2457 arguments_.SetReturn(stub);
2458 break;
2459 }
2461 arguments_.SetReturn(target);
2462 break;
2463 }
2464 }
2465}
2466
2467void PatchableCallHandler::ReturnJITorAOT(const Code& stub,
2468 const Object& data,
2469 const Function& target) {
2470#if defined(DART_PRECOMPILED_MODE)
2471 ReturnAOT(stub, data);
2472#else
2473 ReturnJIT(stub, data, target);
2474#endif
2475}
2476
2477ICDataPtr PatchableCallHandler::NewICData() {
2478 return ICData::New(caller_function_, name_, args_descriptor_, DeoptId::kNone,
2479 /*num_args_tested=*/1, ICData::kInstance);
2480}
2481
2482ICDataPtr PatchableCallHandler::NewICDataWithTarget(intptr_t cid,
2483 const Function& target) {
2484 GrowableArray<intptr_t> cids(1);
2485 cids.Add(cid);
2486 return ICData::NewWithCheck(caller_function_, name_, args_descriptor_,
2487 DeoptId::kNone, /*num_args_tested=*/1,
2488 ICData::kInstance, &cids, target);
2489}
2490
2491FunctionPtr PatchableCallHandler::ResolveTargetFunction(const Object& data) {
2492 switch (data.GetClassId()) {
2493 case kUnlinkedCallCid: {
2494 const auto& unlinked_call = UnlinkedCall::Cast(data);
2495
2496#if defined(DART_PRECOMPILED_RUNTIME)
2497 // When transitioning out of UnlinkedCall to other states (e.g.
2498 // Monomorphic, MonomorphicSmiable, SingleTarget) we lose
2499 // name/arg-descriptor in AOT mode and cannot recover it.
2500 //
2501 // Even if we could recover an old target function (which was missed) -
2502 // which we cannot in AOT bare mode - we can still lose the name due to a
2503 // dyn:* call site potentially targeting non-dyn:* targets.
2504 //
2505 // => We will therefore retain the unlinked call here.
2506 //
2507 // In JIT mode we always use ICData from the call site, which has the
2508 // correct name/args-descriptor.
2509 SaveUnlinkedCall(zone_, isolate_, caller_frame_->pc(), unlinked_call);
2510#endif // defined(DART_PRECOMPILED_RUNTIME)
2511
2512 name_ = unlinked_call.target_name();
2513 args_descriptor_ = unlinked_call.arguments_descriptor();
2514 break;
2515 }
2516 case kMonomorphicSmiableCallCid:
2518#if defined(DART_PRECOMPILED_RUNTIME)
2519 case kSmiCid:
2521 case kSingleTargetCacheCid: {
2522 const auto& unlinked_call = UnlinkedCall::Handle(
2523 zone_, LoadUnlinkedCall(zone_, isolate_, caller_frame_->pc()));
2524 name_ = unlinked_call.target_name();
2525 args_descriptor_ = unlinked_call.arguments_descriptor();
2526 break;
2527 }
2528#else
2529 case kArrayCid: {
2530 // Monomorphic calls use the ICData::entries() as their data.
2531 const auto& ic_data_entries = Array::Cast(data);
2532 // Any non-empty ICData::entries() has a backref to it's ICData.
2533 const auto& ic_data =
2534 ICData::Handle(zone_, ICData::ICDataOfEntriesArray(ic_data_entries));
2535 args_descriptor_ = ic_data.arguments_descriptor();
2536 name_ = ic_data.target_name();
2537 break;
2538 }
2539#endif // defined(DART_PRECOMPILED_RUNTIME)
2540 case kICDataCid:
2542 case kMegamorphicCacheCid: {
2543 const CallSiteData& call_site_data = CallSiteData::Cast(data);
2544 name_ = call_site_data.target_name();
2545 args_descriptor_ = call_site_data.arguments_descriptor();
2546 break;
2547 }
2548 default:
2549 UNREACHABLE();
2550 }
2551 const Class& cls = Class::Handle(zone_, receiver().clazz());
2552 return Resolve(thread_, zone_, caller_arguments_, cls, name_,
2553 args_descriptor_);
2554}
2555
2557 // Find out actual target (which can be time consuming) without holding any
2558 // locks.
2559 const auto& target_function =
2560 Function::Handle(zone_, ResolveTargetFunction(old_data));
2561
2562 auto& data = Object::Handle(zone_);
2563
2564 // We ensure any transition in a patchable calls are done in an atomic
2565 // manner, we ensure we always transition forward (e.g. Monomorphic ->
2566 // Polymorphic).
2567 //
2568 // Mutators are only stopped if we actually need to patch a patchable call.
2569 // We may not do that if we e.g. just add one more check to an ICData.
2571
2572#if defined(DART_PRECOMPILED_RUNTIME)
2573 data =
2574 CodePatcher::GetSwitchableCallDataAt(caller_frame_->pc(), caller_code_);
2575 uword target_entry = 0;
2577 caller_frame_->pc(), caller_code_));
2578 HandleMissAOT(data, target_entry, target_function);
2579#else
2580 auto& code = Code::Handle(zone_);
2581 if (should_consider_patching()) {
2582 code ^= CodePatcher::GetInstanceCallAt(caller_frame_->pc(), caller_code_,
2583 &data);
2584 } else {
2585 ASSERT(old_data.IsICData() || old_data.IsMegamorphicCache());
2586 data = old_data.ptr();
2587 }
2588 HandleMissJIT(data, code, target_function);
2589#endif
2590}
2591
2592#if defined(DART_PRECOMPILED_RUNTIME)
2593
2594void PatchableCallHandler::HandleMissAOT(const Object& old_data,
2595 uword old_entry,
2596 const Function& target_function) {
2597 switch (old_data.GetClassId()) {
2598 case kUnlinkedCallCid:
2599 ASSERT(old_entry ==
2600 StubCode::SwitchableCallMiss().MonomorphicEntryPoint());
2601 DoUnlinkedCallAOT(UnlinkedCall::Cast(old_data), target_function);
2602 break;
2603 case kMonomorphicSmiableCallCid:
2604 ASSERT(old_entry ==
2605 StubCode::MonomorphicSmiableCheck().MonomorphicEntryPoint());
2607 case kSmiCid:
2608 DoMonomorphicMissAOT(old_data, target_function);
2609 break;
2610 case kSingleTargetCacheCid:
2611 ASSERT(old_entry == StubCode::SingleTargetCall().MonomorphicEntryPoint());
2612 DoSingleTargetMissAOT(SingleTargetCache::Cast(old_data), target_function);
2613 break;
2614 case kICDataCid:
2615 ASSERT(old_entry ==
2616 StubCode::ICCallThroughCode().MonomorphicEntryPoint());
2617 DoICDataMissAOT(ICData::Cast(old_data), target_function);
2618 break;
2619 case kMegamorphicCacheCid:
2620 ASSERT(old_entry == StubCode::MegamorphicCall().MonomorphicEntryPoint());
2621 DoMegamorphicMiss(MegamorphicCache::Cast(old_data), target_function);
2622 break;
2623 default:
2624 UNREACHABLE();
2625 }
2626}
2627
2628#else
2629
2630void PatchableCallHandler::HandleMissJIT(const Object& old_data,
2631 const Code& old_code,
2632 const Function& target_function) {
2633 switch (old_data.GetClassId()) {
2634 case kArrayCid:
2635 // ICData three-element array: Smi(receiver CID), Smi(count),
2636 // Function(target). It is the Array from ICData::entries_.
2637 DoMonomorphicMissJIT(old_data, target_function);
2638 break;
2639 case kICDataCid:
2640 DoICDataMissJIT(ICData::Cast(old_data), old_code, target_function);
2641 break;
2642 case kMegamorphicCacheCid:
2643 ASSERT(old_code.ptr() == StubCode::MegamorphicCall().ptr() ||
2644 (old_code.IsNull() && !should_consider_patching()));
2645 DoMegamorphicMiss(MegamorphicCache::Cast(old_data), target_function);
2646 break;
2647 default:
2648 UNREACHABLE();
2649 }
2650}
2651#endif // defined(DART_PRECOMPILED_RUNTIME)
2652
2653static void InlineCacheMissHandler(Thread* thread,
2654 Zone* zone,
2656 const ICData& ic_data,
2657 NativeArguments native_arguments) {
2658#if !defined(DART_PRECOMPILED_RUNTIME)
2659 DartFrameIterator iterator(thread,
2661 StackFrame* caller_frame = iterator.NextFrame();
2662 const auto& caller_code = Code::Handle(zone, caller_frame->LookupDartCode());
2663 const auto& caller_function =
2664 Function::Handle(zone, caller_frame->LookupDartFunction());
2665
2667 native_arguments, caller_frame, caller_code,
2668 caller_function);
2669
2670 handler.ResolveSwitchAndReturn(ic_data);
2671#else
2672 UNREACHABLE();
2673#endif // !defined(DART_PRECOMPILED_RUNTIME)
2674}
2675
2676// Handles inline cache misses by updating the IC data array of the call site.
2677// Arg0: Receiver object.
2678// Arg1: IC data object.
2679// Returns: target function with compiled code or null.
2680// Modifies the instance call to hold the updated IC data array.
2681DEFINE_RUNTIME_ENTRY(InlineCacheMissHandlerOneArg, 2) {
2682 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(0));
2683 const ICData& ic_data = ICData::CheckedHandle(zone, arguments.ArgAt(1));
2684 RELEASE_ASSERT(!FLAG_precompiled_mode);
2686 args.Add(&receiver);
2687 InlineCacheMissHandler(thread, zone, args, ic_data, arguments);
2688}
2689
2690// Handles inline cache misses by updating the IC data array of the call site.
2691// Arg0: Receiver object.
2692// Arg1: Argument after receiver.
2693// Arg2: IC data object.
2694// Returns: target function with compiled code or null.
2695// Modifies the instance call to hold the updated IC data array.
2696DEFINE_RUNTIME_ENTRY(InlineCacheMissHandlerTwoArgs, 3) {
2697 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(0));
2698 const Instance& other = Instance::CheckedHandle(zone, arguments.ArgAt(1));
2699 const ICData& ic_data = ICData::CheckedHandle(zone, arguments.ArgAt(2));
2700 RELEASE_ASSERT(!FLAG_precompiled_mode);
2702 args.Add(&receiver);
2703 args.Add(&other);
2704 InlineCacheMissHandler(thread, zone, args, ic_data, arguments);
2705}
2706
2707// Handle the first use of an instance call
2708// Arg1: Receiver.
2709// Arg0: Stub out.
2710// Returns: the ICData used to continue with the call.
2711DEFINE_RUNTIME_ENTRY(SwitchableCallMiss, 2) {
2712 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(1));
2713
2716 StackFrame* exit_frame = iterator.NextFrame();
2717 ASSERT(exit_frame->IsExitFrame());
2718 StackFrame* miss_handler_frame = iterator.NextFrame();
2719 // This runtime entry can be called either from miss stub or from
2720 // switchable_call_miss "dart" stub/function set up in
2721 // [MegamorphicCacheTable::InitMissHandler].
2722 ASSERT(miss_handler_frame->IsStubFrame() ||
2723 miss_handler_frame->IsDartFrame());
2724 StackFrame* caller_frame = iterator.NextFrame();
2725 ASSERT(caller_frame->IsDartFrame());
2726 const Code& caller_code = Code::Handle(zone, caller_frame->LookupDartCode());
2727 const Function& caller_function =
2728 Function::Handle(zone, caller_frame->LookupDartFunction());
2729
2730 auto& old_data = Object::Handle(zone);
2731#if defined(DART_PRECOMPILED_RUNTIME)
2732 old_data =
2733 CodePatcher::GetSwitchableCallDataAt(caller_frame->pc(), caller_code);
2734#else
2735 CodePatcher::GetInstanceCallAt(caller_frame->pc(), caller_code, &old_data);
2736#endif
2737
2738 GrowableArray<const Instance*> caller_arguments(1);
2739 caller_arguments.Add(&receiver);
2740 PatchableCallHandler handler(thread, caller_arguments,
2742 caller_frame, caller_code, caller_function);
2743 handler.ResolveSwitchAndReturn(old_data);
2744}
2745
2746#if defined(DART_PRECOMPILED_RUNTIME)
2747// Used to find the correct receiver and function to invoke or to fall back to
2748// invoking noSuchMethod when lazy dispatchers are disabled. Returns the
2749// result of the invocation or an Error.
2750static ObjectPtr InvokeCallThroughGetterOrNoSuchMethod(
2751 Thread* thread,
2752 Zone* zone,
2753 const Instance& receiver,
2754 const String& target_name,
2755 const Array& orig_arguments,
2756 const Array& orig_arguments_desc) {
2757 const bool is_dynamic_call =
2759 String& demangled_target_name = String::Handle(zone, target_name.ptr());
2760 if (is_dynamic_call) {
2761 demangled_target_name =
2763 }
2764 Class& cls = Class::Handle(zone, receiver.clazz());
2765 Function& function = Function::Handle(zone);
2766
2767 // Dart distinguishes getters and regular methods and allows their calls
2768 // to mix with conversions, and its selectors are independent of arity. So do
2769 // a zigzagged lookup to see if this call failed because of an arity mismatch,
2770 // need for conversion, or there really is no such method.
2771
2772 const bool is_getter = Field::IsGetterName(demangled_target_name);
2773 if (is_getter) {
2774 // Tear-off of a method
2775 // o.foo (o.get:foo) failed, closurize o.foo() if it exists.
2776 const auto& function_name =
2777 String::Handle(zone, Field::NameFromGetter(demangled_target_name));
2778 while (!cls.IsNull()) {
2779 // We don't generate dyn:* forwarders for method extractors so there is no
2780 // need to try to find a dyn:get:foo first.
2781 if (function.IsNull()) {
2782 if (cls.EnsureIsFinalized(thread) == Error::null()) {
2784 }
2785 }
2786 if (!function.IsNull()) {
2787 const Function& closure_function =
2788 Function::Handle(zone, function.ImplicitClosureFunction());
2789 const Object& result = Object::Handle(
2790 zone, closure_function.ImplicitInstanceClosure(receiver));
2791 return result.ptr();
2792 }
2793 cls = cls.SuperClass();
2794 }
2795
2796 if (receiver.IsRecord()) {
2797 const Record& record = Record::Cast(receiver);
2798 const intptr_t field_index =
2799 record.GetFieldIndexByName(thread, function_name);
2800 if (field_index >= 0) {
2801 return record.FieldAt(field_index);
2802 }
2803 }
2804
2805 // Fall through for noSuchMethod
2806 } else {
2807 // Call through field.
2808 // o.foo(...) failed, invoke noSuchMethod is foo exists but has the wrong
2809 // number of arguments, or try (o.foo).call(...)
2810
2811 if ((target_name.ptr() == Symbols::call().ptr()) && receiver.IsClosure()) {
2812 // Special case: closures are implemented with a call getter instead of a
2813 // call method and with lazy dispatchers the field-invocation-dispatcher
2814 // would perform the closure call.
2815 return DartEntry::InvokeClosure(thread, orig_arguments,
2816 orig_arguments_desc);
2817 }
2818
2819 // Dynamic call sites have to use the dynamic getter as well (if it was
2820 // created).
2821 const auto& getter_name =
2822 String::Handle(zone, Field::GetterName(demangled_target_name));
2823 const auto& dyn_getter_name = String::Handle(
2824 zone, is_dynamic_call
2826 : getter_name.ptr());
2827 ArgumentsDescriptor args_desc(orig_arguments_desc);
2828 while (!cls.IsNull()) {
2829 // If there is a function with the target name but mismatched arguments
2830 // we need to call `receiver.noSuchMethod()`.
2831 if (cls.EnsureIsFinalized(thread) == Error::null()) {
2832 function = Resolver::ResolveDynamicFunction(zone, cls, target_name);
2833 }
2834 if (!function.IsNull()) {
2835 ASSERT(!function.AreValidArguments(args_desc, nullptr));
2836 break; // mismatch, invoke noSuchMethod
2837 }
2838 if (is_dynamic_call) {
2839 function =
2840 Resolver::ResolveDynamicFunction(zone, cls, demangled_target_name);
2841 if (!function.IsNull()) {
2842 ASSERT(!function.AreValidArguments(args_desc, nullptr));
2843 break; // mismatch, invoke noSuchMethod
2844 }
2845 }
2846
2847 // If there is a getter we need to call-through-getter.
2848 if (is_dynamic_call) {
2849 function = Resolver::ResolveDynamicFunction(zone, cls, dyn_getter_name);
2850 }
2851 if (function.IsNull()) {
2852 function = Resolver::ResolveDynamicFunction(zone, cls, getter_name);
2853 }
2854 if (!function.IsNull()) {
2855 const Array& getter_arguments = Array::Handle(Array::New(1));
2856 getter_arguments.SetAt(0, receiver);
2857 const Object& getter_result = Object::Handle(
2858 zone, DartEntry::InvokeFunction(function, getter_arguments));
2859 if (getter_result.IsError()) {
2860 return getter_result.ptr();
2861 }
2862 ASSERT(getter_result.IsNull() || getter_result.IsInstance());
2863
2864 orig_arguments.SetAt(args_desc.FirstArgIndex(), getter_result);
2865 return DartEntry::InvokeClosure(thread, orig_arguments,
2866 orig_arguments_desc);
2867 }
2868 cls = cls.SuperClass();
2869 }
2870
2871 if (receiver.IsRecord()) {
2872 const Record& record = Record::Cast(receiver);
2873 const intptr_t field_index =
2874 record.GetFieldIndexByName(thread, demangled_target_name);
2875 if (field_index >= 0) {
2876 const Object& getter_result =
2877 Object::Handle(zone, record.FieldAt(field_index));
2878 ASSERT(getter_result.IsNull() || getter_result.IsInstance());
2879 orig_arguments.SetAt(args_desc.FirstArgIndex(), getter_result);
2880 return DartEntry::InvokeClosure(thread, orig_arguments,
2881 orig_arguments_desc);
2882 }
2883 }
2884 }
2885
2886 const Object& result = Object::Handle(
2887 zone,
2888 DartEntry::InvokeNoSuchMethod(thread, receiver, demangled_target_name,
2889 orig_arguments, orig_arguments_desc));
2890 return result.ptr();
2891}
2892#endif
2893
2894// Invoke appropriate noSuchMethod or closure from getter.
2895// Arg0: receiver
2896// Arg1: ICData or MegamorphicCache
2897// Arg2: arguments descriptor array
2898// Arg3: arguments array
2899DEFINE_RUNTIME_ENTRY(NoSuchMethodFromCallStub, 4) {
2900 const Object& ic_data_or_cache = Object::Handle(zone, arguments.ArgAt(1));
2901 String& target_name = String::Handle(zone);
2902 if (ic_data_or_cache.IsICData()) {
2903 target_name = ICData::Cast(ic_data_or_cache).target_name();
2904 } else {
2905 ASSERT(ic_data_or_cache.IsMegamorphicCache());
2906 target_name = MegamorphicCache::Cast(ic_data_or_cache).target_name();
2907 }
2908#if defined(DART_PRECOMPILED_RUNTIME)
2909 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(0));
2910 const Array& orig_arguments_desc =
2911 Array::CheckedHandle(zone, arguments.ArgAt(2));
2912 const Array& orig_arguments = Array::CheckedHandle(zone, arguments.ArgAt(3));
2913 const auto& result =
2914 Object::Handle(zone, InvokeCallThroughGetterOrNoSuchMethod(
2915 thread, zone, receiver, target_name,
2916 orig_arguments, orig_arguments_desc));
2918 arguments.SetReturn(result);
2919#else
2920 FATAL("Dispatcher for %s should have been lazily created",
2921 target_name.ToCString());
2922#endif
2923}
2924
2925// Invoke appropriate noSuchMethod function.
2926// Arg0: receiver
2927// Arg1: function
2928// Arg1: arguments descriptor array.
2929// Arg3: arguments array.
2930DEFINE_RUNTIME_ENTRY(NoSuchMethodFromPrologue, 4) {
2931 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(0));
2932 const Function& function = Function::CheckedHandle(zone, arguments.ArgAt(1));
2933 const Array& orig_arguments_desc =
2934 Array::CheckedHandle(zone, arguments.ArgAt(2));
2935 const Array& orig_arguments = Array::CheckedHandle(zone, arguments.ArgAt(3));
2936
2937 String& orig_function_name = String::Handle(zone);
2938 if ((function.kind() == UntaggedFunction::kClosureFunction) ||
2939 (function.kind() == UntaggedFunction::kImplicitClosureFunction)) {
2940 // For closure the function name is always 'call'. Replace it with the
2941 // name of the closurized function so that exception contains more
2942 // relevant information.
2943 orig_function_name = function.QualifiedUserVisibleName();
2944 } else {
2945 orig_function_name = function.name();
2946 }
2947
2948 const Object& result = Object::Handle(
2949 zone, DartEntry::InvokeNoSuchMethod(thread, receiver, orig_function_name,
2950 orig_arguments, orig_arguments_desc));
2952 arguments.SetReturn(result);
2953}
2954
2955#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
2956// The following code is used to stress test
2957// - deoptimization
2958// - debugger stack tracing
2959// - garbage collection
2960// - hot reload
2962 auto isolate = thread->isolate();
2963 auto isolate_group = thread->isolate_group();
2964
2965 if (FLAG_shared_slow_path_triggers_gc) {
2966 isolate->group()->heap()->CollectAllGarbage(GCReason::kDebugging);
2967 }
2968
2969 bool do_deopt = false;
2970 bool do_stacktrace = false;
2971 bool do_reload = false;
2972 bool do_gc = false;
2973 const intptr_t isolate_reload_every =
2974 isolate->group()->reload_every_n_stack_overflow_checks();
2975 if ((FLAG_deoptimize_every > 0) || (FLAG_stacktrace_every > 0) ||
2976 (FLAG_gc_every > 0) || (isolate_reload_every > 0)) {
2977 if (!Isolate::IsSystemIsolate(isolate)) {
2978 // TODO(turnidge): To make --deoptimize_every and
2979 // --stacktrace-every faster we could move this increment/test to
2980 // the generated code.
2981 int32_t count = thread->IncrementAndGetStackOverflowCount();
2982 if (FLAG_deoptimize_every > 0 && (count % FLAG_deoptimize_every) == 0) {
2983 do_deopt = true;
2984 }
2985 if (FLAG_stacktrace_every > 0 && (count % FLAG_stacktrace_every) == 0) {
2986 do_stacktrace = true;
2987 }
2988 if (FLAG_gc_every > 0 && (count % FLAG_gc_every) == 0) {
2989 do_gc = true;
2990 }
2991 if ((isolate_reload_every > 0) && (count % isolate_reload_every) == 0) {
2992 do_reload = isolate->group()->CanReload();
2993 }
2994 }
2995 }
2996 if ((FLAG_deoptimize_filter != nullptr) ||
2997 (FLAG_stacktrace_filter != nullptr) || (FLAG_reload_every != 0)) {
2998 DartFrameIterator iterator(thread,
3000 StackFrame* frame = iterator.NextFrame();
3001 ASSERT(frame != nullptr);
3002 Code& code = Code::Handle();
3004 code = frame->LookupDartCode();
3005 ASSERT(!code.IsNull());
3006 function = code.function();
3007 ASSERT(!function.IsNull());
3008 const char* function_name = nullptr;
3009 if ((FLAG_deoptimize_filter != nullptr) ||
3010 (FLAG_stacktrace_filter != nullptr)) {
3011 function_name = function.ToFullyQualifiedCString();
3012 ASSERT(function_name != nullptr);
3013 }
3014 if (!code.IsNull()) {
3015 if (!code.is_optimized() && FLAG_reload_every_optimized) {
3016 // Don't do the reload if we aren't inside optimized code.
3017 do_reload = false;
3018 }
3019 if (code.is_optimized() && FLAG_deoptimize_filter != nullptr &&
3020 strstr(function_name, FLAG_deoptimize_filter) != nullptr &&
3021 !function.ForceOptimize()) {
3022 OS::PrintErr("*** Forcing deoptimization (%s)\n",
3023 function.ToFullyQualifiedCString());
3024 do_deopt = true;
3025 }
3026 }
3027 if (FLAG_stacktrace_filter != nullptr &&
3028 strstr(function_name, FLAG_stacktrace_filter) != nullptr) {
3029 OS::PrintErr("*** Computing stacktrace (%s)\n",
3030 function.ToFullyQualifiedCString());
3031 do_stacktrace = true;
3032 }
3033 }
3034 if (do_deopt) {
3035 // TODO(turnidge): Consider using DeoptimizeAt instead.
3037 }
3038 if (do_reload) {
3039 // Maybe adjust the rate of future reloads.
3040 isolate_group->MaybeIncreaseReloadEveryNStackOverflowChecks();
3041
3042 // Issue a reload.
3043 const char* script_uri = isolate_group->source()->script_uri;
3044 JSONStream js;
3045 const bool success =
3046 isolate_group->ReloadSources(&js, /*force_reload=*/true, script_uri);
3047 if (!success) {
3048 FATAL("*** Isolate reload failed:\n%s\n", js.ToCString());
3049 }
3050 }
3051 if (do_stacktrace) {
3052 String& var_name = String::Handle();
3053 Instance& var_value = Instance::Handle();
3054 DebuggerStackTrace* stack = isolate->debugger()->StackTrace();
3055 intptr_t num_frames = stack->Length();
3056 for (intptr_t i = 0; i < num_frames; i++) {
3057 ActivationFrame* frame = stack->FrameAt(i);
3058 int num_vars = 0;
3059 // Variable locations and number are unknown when precompiling.
3060#if !defined(DART_PRECOMPILED_RUNTIME)
3061 if (!frame->function().ForceOptimize()) {
3062 // Ensure that we have unoptimized code.
3063 frame->function().EnsureHasCompiledUnoptimizedCode();
3064 num_vars = frame->NumLocalVariables();
3065 }
3066#endif
3067 TokenPosition unused = TokenPosition::kNoSource;
3068 for (intptr_t v = 0; v < num_vars; v++) {
3069 frame->VariableAt(v, &var_name, &unused, &unused, &unused, &var_value);
3070 }
3071 }
3072 if (FLAG_stress_async_stacks) {
3074 }
3075 }
3076 if (do_gc) {
3077 isolate->group()->heap()->CollectAllGarbage(GCReason::kDebugging);
3078 }
3079}
3080#endif // !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
3081
3082#if !defined(DART_PRECOMPILED_RUNTIME)
3083static void HandleOSRRequest(Thread* thread) {
3084 auto isolate_group = thread->isolate_group();
3085 ASSERT(isolate_group->use_osr());
3086 DartFrameIterator iterator(thread,
3088 StackFrame* frame = iterator.NextFrame();
3089 ASSERT(frame != nullptr);
3090 const Code& code = Code::ZoneHandle(frame->LookupDartCode());
3091 ASSERT(!code.IsNull());
3092 ASSERT(!code.is_optimized());
3093 const Function& function = Function::Handle(code.function());
3094 ASSERT(!function.IsNull());
3095
3096 // If the code of the frame does not match the function's unoptimized code,
3097 // we bail out since the code was reset by an isolate reload.
3098 if (code.ptr() != function.unoptimized_code()) {
3099 return;
3100 }
3101
3102 // Since the code is referenced from the frame and the ZoneHandle,
3103 // it cannot have been removed from the function.
3104 ASSERT(function.HasCode());
3105 // Don't do OSR on intrinsified functions: The intrinsic code expects to be
3106 // called like a regular function and can't be entered via OSR.
3108 function.is_intrinsic()) {
3109 return;
3110 }
3111
3112 // The unoptimized code is on the stack and should never be detached from
3113 // the function at this point.
3114 ASSERT(function.unoptimized_code() != Object::null());
3115 intptr_t osr_id =
3116 Code::Handle(function.unoptimized_code()).GetDeoptIdForOsr(frame->pc());
3118 if (FLAG_trace_osr) {
3119 OS::PrintErr("Attempting OSR for %s at id=%" Pd ", count=%" Pd "\n",
3120 function.ToFullyQualifiedCString(), osr_id,
3121 function.usage_counter());
3122 }
3123
3124 // Since the code is referenced from the frame and the ZoneHandle,
3125 // it cannot have been removed from the function.
3126 const Object& result = Object::Handle(
3129
3130 if (!result.IsNull()) {
3131 const Code& code = Code::Cast(result);
3132 uword optimized_entry = code.EntryPoint();
3133 frame->set_pc(optimized_entry);
3134 frame->set_pc_marker(code.ptr());
3135 }
3136}
3137#endif // !defined(DART_PRECOMPILED_RUNTIME)
3138
3139DEFINE_RUNTIME_ENTRY(InterruptOrStackOverflow, 0) {
3140#if defined(USING_SIMULATOR)
3141 uword stack_pos = Simulator::Current()->get_sp();
3142 // If simulator was never called it may return 0 as a value of SPREG.
3143 if (stack_pos == 0) {
3144 // Use any reasonable value which would not be treated
3145 // as stack overflow.
3146 stack_pos = thread->saved_stack_limit();
3147 }
3148#else
3150#endif
3151 // Always clear the stack overflow flags. They are meant for this
3152 // particular stack overflow runtime call and are not meant to
3153 // persist.
3154 uword stack_overflow_flags = thread->GetAndClearStackOverflowFlags();
3155
3156 // If an interrupt happens at the same time as a stack overflow, we
3157 // process the stack overflow now and leave the interrupt for next
3158 // time.
3159 if (!thread->os_thread()->HasStackHeadroom() ||
3160 IsCalleeFrameOf(thread->saved_stack_limit(), stack_pos)) {
3161 if (FLAG_verbose_stack_overflow) {
3162 OS::PrintErr("Stack overflow\n");
3163 OS::PrintErr(" Native SP = %" Px ", stack limit = %" Px "\n", stack_pos,
3164 thread->saved_stack_limit());
3165 OS::PrintErr("Call stack:\n");
3166 OS::PrintErr("size | frame\n");
3169 uword fp = stack_pos;
3170 StackFrame* frame = frames.NextFrame();
3171 while (frame != nullptr) {
3172 uword delta = (frame->fp() - fp);
3173 fp = frame->fp();
3174 OS::PrintErr("%4" Pd " %s\n", delta, frame->ToCString());
3175 frame = frames.NextFrame();
3176 }
3177 }
3178
3179 // Use the preallocated stack overflow exception to avoid calling
3180 // into dart code.
3181 const Instance& exception =
3182 Instance::Handle(isolate->group()->object_store()->stack_overflow());
3183 Exceptions::Throw(thread, exception);
3184 UNREACHABLE();
3185 }
3186
3187#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
3189#endif // !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
3190
3191 // Handle interrupts:
3192 // - store buffer overflow
3193 // - OOB message (vm-service or dart:isolate)
3194 // - marking ready for finalization
3195 const Error& error = Error::Handle(thread->HandleInterrupts());
3197
3198#if !defined(DART_PRECOMPILED_RUNTIME)
3199 if ((stack_overflow_flags & Thread::kOsrRequest) != 0) {
3200 HandleOSRRequest(thread);
3201 }
3202#else
3203 ASSERT((stack_overflow_flags & Thread::kOsrRequest) == 0);
3204#endif // !defined(DART_PRECOMPILED_RUNTIME)
3205}
3206
3207DEFINE_RUNTIME_ENTRY(TraceICCall, 2) {
3208 const ICData& ic_data = ICData::CheckedHandle(zone, arguments.ArgAt(0));
3209 const Function& function = Function::CheckedHandle(zone, arguments.ArgAt(1));
3210 DartFrameIterator iterator(thread,
3212 StackFrame* frame = iterator.NextFrame();
3213 ASSERT(frame != nullptr);
3215 "IC call @%#" Px ": ICData: %#" Px " cnt:%" Pd " nchecks: %" Pd " %s\n",
3216 frame->pc(), static_cast<uword>(ic_data.ptr()), function.usage_counter(),
3217 ic_data.NumberOfChecks(), function.ToFullyQualifiedCString());
3218}
3219
3220// This is called from function that needs to be optimized.
3221// The requesting function can be already optimized (reoptimization).
3222// Returns the Code object where to continue execution.
3223DEFINE_RUNTIME_ENTRY(OptimizeInvokedFunction, 1) {
3224#if !defined(DART_PRECOMPILED_RUNTIME)
3225 const Function& function = Function::CheckedHandle(zone, arguments.ArgAt(0));
3226 ASSERT(!function.IsNull());
3227 ASSERT(function.HasCode());
3228
3230 auto isolate_group = thread->isolate_group();
3231 if (FLAG_background_compilation) {
3232 if (isolate_group->background_compiler()->EnqueueCompilation(function)) {
3233 // Reduce the chance of triggering a compilation while the function is
3234 // being compiled in the background. INT32_MIN should ensure that it
3235 // takes long time to trigger a compilation.
3236 // Note that the background compilation queue rejects duplicate entries.
3237 function.SetUsageCounter(INT32_MIN);
3238 // Continue in the same code.
3239 arguments.SetReturn(function);
3240 return;
3241 }
3242 }
3243
3244 // Reset usage counter for reoptimization before calling optimizer to
3245 // prevent recursive triggering of function optimization.
3246 function.SetUsageCounter(0);
3247 if (FLAG_trace_compiler || FLAG_trace_optimizing_compiler) {
3248 if (function.HasOptimizedCode()) {
3249 THR_Print("ReCompiling function: '%s' \n",
3250 function.ToFullyQualifiedCString());
3251 }
3252 }
3256 }
3257 arguments.SetReturn(function);
3258#else
3259 UNREACHABLE();
3260#endif // !DART_PRECOMPILED_RUNTIME
3261}
3262
3263// The caller must be a static call in a Dart frame, or an entry frame.
3264// Patch static call to point to valid code's entry point.
3265DEFINE_RUNTIME_ENTRY(FixCallersTarget, 0) {
3266#if !defined(DART_PRECOMPILED_RUNTIME)
3269 StackFrame* frame = iterator.NextFrame();
3270 ASSERT(frame != nullptr);
3271 while (frame->IsStubFrame() || frame->IsExitFrame()) {
3272 frame = iterator.NextFrame();
3273 ASSERT(frame != nullptr);
3274 }
3275 if (frame->IsEntryFrame()) {
3276 // Since function's current code is always unpatched, the entry frame always
3277 // calls to unpatched code.
3278 UNREACHABLE();
3279 }
3280 ASSERT(frame->IsDartFrame());
3281 const Code& caller_code = Code::Handle(zone, frame->LookupDartCode());
3282 RELEASE_ASSERT(caller_code.is_optimized());
3283 const Function& target_function = Function::Handle(
3284 zone, caller_code.GetStaticCallTargetFunctionAt(frame->pc()));
3285
3286 const Code& current_target_code =
3287 Code::Handle(zone, target_function.EnsureHasCode());
3288 CodePatcher::PatchStaticCallAt(frame->pc(), caller_code, current_target_code);
3289 caller_code.SetStaticCallTargetCodeAt(frame->pc(), current_target_code);
3290 if (FLAG_trace_patching) {
3292 "FixCallersTarget: caller %#" Px
3293 " "
3294 "target '%s' -> %#" Px " (%s)\n",
3295 frame->pc(), target_function.ToFullyQualifiedCString(),
3296 current_target_code.EntryPoint(),
3297 current_target_code.is_optimized() ? "optimized" : "unoptimized");
3298 }
3299 arguments.SetReturn(current_target_code);
3300#else
3301 UNREACHABLE();
3302#endif
3303}
3304
3305// The caller must be a monomorphic call from unoptimized code.
3306// Patch call to point to new target.
3307DEFINE_RUNTIME_ENTRY(FixCallersTargetMonomorphic, 2) {
3308#if !defined(DART_PRECOMPILED_RUNTIME)
3309 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(0));
3310 const Array& switchable_call_data =
3311 Array::CheckedHandle(zone, arguments.ArgAt(1));
3312
3313 DartFrameIterator iterator(thread,
3315 StackFrame* caller_frame = iterator.NextFrame();
3316 const auto& caller_code = Code::Handle(zone, caller_frame->LookupDartCode());
3317 const auto& caller_function =
3318 Function::Handle(zone, caller_frame->LookupDartFunction());
3319
3320 GrowableArray<const Instance*> caller_arguments(1);
3321 caller_arguments.Add(&receiver);
3322 PatchableCallHandler handler(
3323 thread, caller_arguments, MissHandler::kFixCallersTargetMonomorphic,
3324 arguments, caller_frame, caller_code, caller_function);
3325 handler.ResolveSwitchAndReturn(switchable_call_data);
3326#else
3327 UNREACHABLE();
3328#endif
3329}
3330
3331// The caller tried to allocate an instance via an invalidated allocation
3332// stub.
3333DEFINE_RUNTIME_ENTRY(FixAllocationStubTarget, 0) {
3334#if !defined(DART_PRECOMPILED_RUNTIME)
3337 StackFrame* frame = iterator.NextFrame();
3338 ASSERT(frame != nullptr);
3339 while (frame->IsStubFrame() || frame->IsExitFrame()) {
3340 frame = iterator.NextFrame();
3341 ASSERT(frame != nullptr);
3342 }
3343 if (frame->IsEntryFrame()) {
3344 // There must be a valid Dart frame.
3345 UNREACHABLE();
3346 }
3347 ASSERT(frame->IsDartFrame());
3348 const Code& caller_code = Code::Handle(zone, frame->LookupDartCode());
3349 ASSERT(!caller_code.IsNull());
3350 const Code& stub = Code::Handle(
3351 CodePatcher::GetStaticCallTargetAt(frame->pc(), caller_code));
3352 Class& alloc_class = Class::ZoneHandle(zone);
3353 alloc_class ^= stub.owner();
3354 Code& alloc_stub = Code::Handle(zone, alloc_class.allocation_stub());
3355 if (alloc_stub.IsNull()) {
3356 alloc_stub = StubCode::GetAllocationStubForClass(alloc_class);
3357 ASSERT(!alloc_stub.IsDisabled());
3358 }
3359 CodePatcher::PatchStaticCallAt(frame->pc(), caller_code, alloc_stub);
3360 caller_code.SetStubCallTargetCodeAt(frame->pc(), alloc_stub);
3361 if (FLAG_trace_patching) {
3362 OS::PrintErr("FixAllocationStubTarget: caller %#" Px
3363 " alloc-class %s "
3364 " -> %#" Px "\n",
3365 frame->pc(), alloc_class.ToCString(), alloc_stub.EntryPoint());
3366 }
3367 arguments.SetReturn(alloc_stub);
3368#else
3369 UNREACHABLE();
3370#endif
3371}
3372
3374 switch (deopt_reason) {
3375#define DEOPT_REASON_TO_TEXT(name) \
3376 case ICData::kDeopt##name: \
3377 return #name;
3379#undef DEOPT_REASON_TO_TEXT
3380 default:
3381 UNREACHABLE();
3382 return "";
3383 }
3384}
3385
3386static bool IsSuspendedFrame(Zone* zone,
3387 const Function& function,
3388 StackFrame* frame) {
3389 if (!function.IsSuspendableFunction()) {
3390 return false;
3391 }
3392 auto& suspend_state = Object::Handle(
3393 zone, *reinterpret_cast<ObjectPtr*>(LocalVarAddress(
3396 return suspend_state.IsSuspendState() &&
3397 (SuspendState::Cast(suspend_state).pc() != 0);
3398}
3399
3400void DeoptimizeAt(Thread* mutator_thread,
3401 const Code& optimized_code,
3402 StackFrame* frame) {
3403 ASSERT(optimized_code.is_optimized());
3404
3405 // Force-optimized code is optimized code which cannot deoptimize and doesn't
3406 // have unoptimized code to fall back to.
3407 ASSERT(!optimized_code.is_force_optimized());
3408
3409 Thread* thread = Thread::Current();
3410 Zone* zone = thread->zone();
3411 const Function& function = Function::Handle(zone, optimized_code.function());
3412 const Error& error =
3414 if (!error.IsNull()) {
3416 }
3417 const Code& unoptimized_code =
3418 Code::Handle(zone, function.unoptimized_code());
3419 ASSERT(!unoptimized_code.IsNull());
3420 // The switch to unoptimized code may have already occurred.
3421 if (function.HasOptimizedCode()) {
3422 function.SwitchToUnoptimizedCode();
3423 }
3424
3425 if (IsSuspendedFrame(zone, function, frame)) {
3426 // Frame is suspended and going to be removed from the stack.
3427 if (FLAG_trace_deoptimization) {
3428 THR_Print("Not deoptimizing suspended frame, fp=%" Pp "\n", frame->fp());
3429 }
3430 } else if (frame->IsMarkedForLazyDeopt()) {
3431 // Deopt already scheduled.
3432 if (FLAG_trace_deoptimization) {
3433 THR_Print("Lazy deopt already scheduled for fp=%" Pp "\n", frame->fp());
3434 }
3435 } else {
3436 uword deopt_pc = frame->pc();
3437 ASSERT(optimized_code.ContainsInstructionAt(deopt_pc));
3438
3439#if defined(DEBUG)
3440 ValidateFrames();
3441#endif
3442
3443 // N.B.: Update the pending deopt table before updating the frame. The
3444 // profiler may attempt a stack walk in between.
3445 mutator_thread->pending_deopts().AddPendingDeopt(frame->fp(), deopt_pc);
3446 frame->MarkForLazyDeopt();
3447
3448 if (FLAG_trace_deoptimization) {
3449 THR_Print("Lazy deopt scheduled for fp=%" Pp ", pc=%" Pp "\n",
3450 frame->fp(), deopt_pc);
3451 }
3452 }
3453
3454 // Mark code as dead (do not GC its embedded objects).
3455 optimized_code.set_is_alive(false);
3456}
3457
3458// Currently checks only that all optimized frames have kDeoptIndex
3459// and unoptimized code has the kDeoptAfter.
3461 auto thread = Thread::Current();
3462 // Have to grab program_lock before stopping everybody else.
3463 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
3464
3465 auto isolate_group = thread->isolate_group();
3466 isolate_group->RunWithStoppedMutators([&]() {
3467 Code& optimized_code = Code::Handle();
3468 isolate_group->ForEachIsolate(
3469 [&](Isolate* isolate) {
3470 auto mutator_thread = isolate->mutator_thread();
3471 if (mutator_thread == nullptr) {
3472 return;
3473 }
3474 DartFrameIterator iterator(
3476 StackFrame* frame = iterator.NextFrame();
3477 while (frame != nullptr) {
3478 optimized_code = frame->LookupDartCode();
3479 if (optimized_code.is_optimized() &&
3480 !optimized_code.is_force_optimized()) {
3481 DeoptimizeAt(mutator_thread, optimized_code, frame);
3482 }
3483 frame = iterator.NextFrame();
3484 }
3485 },
3486 /*at_safepoint=*/true);
3487 });
3488}
3489
3491 auto thread = Thread::Current();
3492 // Have to grab program_lock before stopping everybody else.
3493 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
3494
3495 auto isolate = thread->isolate();
3496 auto isolate_group = thread->isolate_group();
3497 isolate_group->RunWithStoppedMutators([&]() {
3498 auto mutator_thread = isolate->mutator_thread();
3499 if (mutator_thread == nullptr) {
3500 return;
3501 }
3502 DartFrameIterator iterator(mutator_thread,
3504 StackFrame* frame = iterator.NextFrame();
3505 if (frame != nullptr) {
3506 const auto& optimized_code = Code::Handle(frame->LookupDartCode());
3507 if (optimized_code.is_optimized() &&
3508 !optimized_code.is_force_optimized()) {
3509 DeoptimizeAt(mutator_thread, optimized_code, frame);
3510 }
3511 }
3512 });
3513}
3514
3515#if !defined(DART_PRECOMPILED_RUNTIME)
3518
3519static void CopySavedRegisters(uword saved_registers_address,
3520 fpu_register_t** fpu_registers,
3521 intptr_t** cpu_registers) {
3522 // Tell MemorySanitizer this region is initialized by generated code. This
3523 // region isn't already (fully) unpoisoned by FrameSetIterator::Unpoison
3524 // because it is in an exit frame and stack frame iteration doesn't have
3525 // access to true SP for exit frames.
3526 MSAN_UNPOISON(reinterpret_cast<void*>(saved_registers_address),
3529
3531 fpu_register_t* fpu_registers_copy =
3533 ASSERT(fpu_registers_copy != nullptr);
3534 for (intptr_t i = 0; i < kNumberOfSavedFpuRegisters; i++) {
3535 fpu_registers_copy[i] =
3536 *reinterpret_cast<fpu_register_t*>(saved_registers_address);
3537 saved_registers_address += kFpuRegisterSize;
3538 }
3539 *fpu_registers = fpu_registers_copy;
3540
3541 ASSERT(sizeof(intptr_t) == kWordSize);
3542 intptr_t* cpu_registers_copy = new intptr_t[kNumberOfSavedCpuRegisters];
3543 ASSERT(cpu_registers_copy != nullptr);
3544 for (intptr_t i = 0; i < kNumberOfSavedCpuRegisters; i++) {
3545 cpu_registers_copy[i] =
3546 *reinterpret_cast<intptr_t*>(saved_registers_address);
3547 saved_registers_address += kWordSize;
3548 }
3549 *cpu_registers = cpu_registers_copy;
3550}
3551#endif
3552
3553DEFINE_LEAF_RUNTIME_ENTRY(bool, TryDoubleAsInteger, 1, Thread* thread) {
3554 double value = thread->unboxed_double_runtime_arg();
3555 int64_t int_value = static_cast<int64_t>(value);
3556 double converted_double = static_cast<double>(int_value);
3557 if (converted_double != value) {
3558 return false;
3559 }
3560 thread->set_unboxed_int64_runtime_arg(int_value);
3561 return true;
3562}
3564
3565// Copies saved registers and caller's frame into temporary buffers.
3566// Returns the stack size of unoptimized frame.
3567// The calling code must be optimized, but its function may not have
3568// have optimized code if the code is OSR code, or if the code was invalidated
3569// through class loading/finalization or field guard.
3571 DeoptimizeCopyFrame,
3572 2,
3573 uword saved_registers_address,
3574 uword is_lazy_deopt) {
3575#if !defined(DART_PRECOMPILED_RUNTIME)
3576 Thread* thread = Thread::Current();
3577 Isolate* isolate = thread->isolate();
3578 StackZone zone(thread);
3579
3580 // All registers have been saved below last-fp as if they were locals.
3581 const uword last_fp =
3582 saved_registers_address + (kNumberOfSavedCpuRegisters * kWordSize) +
3585
3586 // Get optimized code and frame that need to be deoptimized.
3587 DartFrameIterator iterator(last_fp, thread,
3589
3590 StackFrame* caller_frame = iterator.NextFrame();
3591 ASSERT(caller_frame != nullptr);
3592 const Code& optimized_code = Code::Handle(caller_frame->LookupDartCode());
3593 ASSERT(optimized_code.is_optimized());
3594 const Function& top_function =
3595 Function::Handle(thread->zone(), optimized_code.function());
3596 const bool deoptimizing_code = top_function.HasOptimizedCode();
3597 if (FLAG_trace_deoptimization) {
3598 const Function& function = Function::Handle(optimized_code.function());
3599 THR_Print("== Deoptimizing code for '%s', %s, %s\n",
3600 function.ToFullyQualifiedCString(),
3601 deoptimizing_code ? "code & frame" : "frame",
3602 (is_lazy_deopt != 0u) ? "lazy-deopt" : "");
3603 }
3604
3605 if (is_lazy_deopt != 0u) {
3606 const uword deopt_pc =
3607 thread->pending_deopts().FindPendingDeopt(caller_frame->fp());
3608
3609 // N.B.: Update frame before updating pending deopt table. The profiler
3610 // may attempt a stack walk in between.
3611 caller_frame->set_pc(deopt_pc);
3612 ASSERT(caller_frame->pc() == deopt_pc);
3613 ASSERT(optimized_code.ContainsInstructionAt(caller_frame->pc()));
3615 caller_frame->fp(), PendingDeopts::kClearDueToDeopt);
3616 } else {
3617 if (FLAG_trace_deoptimization) {
3618 THR_Print("Eager deopt fp=%" Pp " pc=%" Pp "\n", caller_frame->fp(),
3619 caller_frame->pc());
3620 }
3621 }
3622
3623 // Copy the saved registers from the stack.
3624 fpu_register_t* fpu_registers;
3625 intptr_t* cpu_registers;
3626 CopySavedRegisters(saved_registers_address, &fpu_registers, &cpu_registers);
3627
3628 // Create the DeoptContext.
3629 DeoptContext* deopt_context = new DeoptContext(
3630 caller_frame, optimized_code, DeoptContext::kDestIsOriginalFrame,
3631 fpu_registers, cpu_registers, is_lazy_deopt != 0, deoptimizing_code);
3632 isolate->set_deopt_context(deopt_context);
3633
3634 // Stack size (FP - SP) in bytes.
3635 return deopt_context->DestStackAdjustment() * kWordSize;
3636#else
3637 UNREACHABLE();
3638 return 0;
3639#endif // !DART_PRECOMPILED_RUNTIME
3640}
3642
3643// The stack has been adjusted to fit all values for unoptimized frame.
3644// Fill the unoptimized frame.
3645DEFINE_LEAF_RUNTIME_ENTRY(void, DeoptimizeFillFrame, 1, uword last_fp) {
3646#if !defined(DART_PRECOMPILED_RUNTIME)
3647 Thread* thread = Thread::Current();
3648 Isolate* isolate = thread->isolate();
3649 StackZone zone(thread);
3650
3651 DeoptContext* deopt_context = isolate->deopt_context();
3652 DartFrameIterator iterator(last_fp, thread,
3654 StackFrame* caller_frame = iterator.NextFrame();
3655 ASSERT(caller_frame != nullptr);
3656
3657#if defined(DEBUG)
3658 {
3659 // The code from the deopt_context.
3660 const Code& code = Code::Handle(deopt_context->code());
3661
3662 // The code from our frame.
3663 const Code& optimized_code = Code::Handle(caller_frame->LookupDartCode());
3664 const Function& function = Function::Handle(optimized_code.function());
3665 ASSERT(!function.IsNull());
3666
3667 // The code will be the same as before.
3668 ASSERT(code.ptr() == optimized_code.ptr());
3669
3670 // Some sanity checking of the optimized code.
3671 ASSERT(!optimized_code.IsNull() && optimized_code.is_optimized());
3672 }
3673#endif
3674
3675 deopt_context->set_dest_frame(caller_frame);
3676 deopt_context->FillDestFrame();
3677
3678#else
3679 UNREACHABLE();
3680#endif // !DART_PRECOMPILED_RUNTIME
3681}
3683
3684// This is the last step in the deoptimization, GC can occur.
3685// Returns number of bytes to remove from the expression stack of the
3686// bottom-most deoptimized frame. Those arguments were artificially injected
3687// under return address to keep them discoverable by GC that can occur during
3688// materialization phase.
3689DEFINE_RUNTIME_ENTRY(DeoptimizeMaterialize, 0) {
3690#if !defined(DART_PRECOMPILED_RUNTIME)
3691#if defined(DEBUG)
3692 {
3693 // We may rendezvous for a safepoint at entry or GC from the allocations
3694 // below. Check the stack is walkable.
3695 ValidateFrames();
3696 }
3697#endif
3698 DeoptContext* deopt_context = isolate->deopt_context();
3699 intptr_t deopt_arg_count = deopt_context->MaterializeDeferredObjects();
3700 isolate->set_deopt_context(nullptr);
3701 delete deopt_context;
3702
3703 // Return value tells deoptimization stub to remove the given number of bytes
3704 // from the stack.
3705 arguments.SetReturn(Smi::Handle(Smi::New(deopt_arg_count * kWordSize)));
3706#else
3707 UNREACHABLE();
3708#endif // !DART_PRECOMPILED_RUNTIME
3709}
3710
3711DEFINE_RUNTIME_ENTRY(RewindPostDeopt, 0) {
3712#if !defined(DART_PRECOMPILED_RUNTIME)
3713#if !defined(PRODUCT)
3714 isolate->debugger()->RewindPostDeopt();
3715#endif // !PRODUCT
3716#endif // !DART_PRECOMPILED_RUNTIME
3717 UNREACHABLE();
3718}
3719
3720// Handle slow path actions for the resumed frame after it was
3721// copied back to the stack:
3722// 1) deoptimization;
3723// 2) breakpoint at resumption;
3724// 3) throwing an exception.
3725//
3726// Arg0: exception
3727// Arg1: stack trace
3728DEFINE_RUNTIME_ENTRY(ResumeFrame, 2) {
3729 const Instance& exception = Instance::CheckedHandle(zone, arguments.ArgAt(0));
3730 const Instance& stacktrace =
3731 Instance::CheckedHandle(zone, arguments.ArgAt(1));
3732
3733#if !defined(DART_PRECOMPILED_RUNTIME)
3734#if !defined(PRODUCT)
3735 if (isolate->has_resumption_breakpoints()) {
3736 isolate->debugger()->ResumptionBreakpoint();
3737 }
3738#endif
3739
3740 DartFrameIterator iterator(thread,
3742 StackFrame* frame = iterator.NextFrame();
3743 ASSERT(frame->IsDartFrame());
3744 ASSERT(Function::Handle(zone, frame->LookupDartFunction())
3745 .IsSuspendableFunction());
3746 const Code& caller_code = Code::Handle(zone, frame->LookupDartCode());
3747 if (caller_code.IsDisabled() && caller_code.is_optimized() &&
3748 !caller_code.is_force_optimized()) {
3749 const uword deopt_pc = frame->pc();
3750 thread->pending_deopts().AddPendingDeopt(frame->fp(), deopt_pc);
3751 frame->MarkForLazyDeopt();
3752
3753 if (FLAG_trace_deoptimization) {
3754 THR_Print("Lazy deopt scheduled for resumed frame fp=%" Pp ", pc=%" Pp
3755 "\n",
3756 frame->fp(), deopt_pc);
3757 }
3758 }
3759#endif
3760
3761 if (!exception.IsNull()) {
3762 Exceptions::ReThrow(thread, exception, stacktrace);
3763 }
3764}
3765
3767 const char* runtime_call_name,
3768 bool can_lazy_deopt) {
3769 ASSERT(FLAG_deoptimize_on_runtime_call_every > 0);
3770 if (FLAG_precompiled_mode) {
3771 return;
3772 }
3774 return;
3775 }
3776 const bool is_deopt_related =
3777 strstr(runtime_call_name, "Deoptimize") != nullptr;
3778 if (is_deopt_related) {
3779 return;
3780 }
3781 // For --deoptimize-on-every-runtime-call we only consider runtime calls that
3782 // can lazy-deopt.
3783 if (can_lazy_deopt) {
3784 if (FLAG_deoptimize_on_runtime_call_name_filter != nullptr &&
3785 (strlen(runtime_call_name) !=
3786 strlen(FLAG_deoptimize_on_runtime_call_name_filter) ||
3787 strstr(runtime_call_name,
3788 FLAG_deoptimize_on_runtime_call_name_filter) == nullptr)) {
3789 return;
3790 }
3791 const uint32_t count = thread->IncrementAndGetRuntimeCallCount();
3792 if ((count % FLAG_deoptimize_on_runtime_call_every) == 0) {
3794 }
3795 }
3796}
3797
3798double DartModulo(double left, double right) {
3799 double remainder = fmod_ieee(left, right);
3800 if (remainder == 0.0) {
3801 // We explicitly switch to the positive 0.0 (just in case it was negative).
3802 remainder = +0.0;
3803 } else if (remainder < 0.0) {
3804 if (right < 0) {
3805 remainder -= right;
3806 } else {
3807 remainder += right;
3808 }
3809 }
3810 return remainder;
3811}
3812
3813// Update global type feedback recorded for a field recording the assignment
3814// of the given value.
3815// Arg0: Field object;
3816// Arg1: Value that is being stored.
3817DEFINE_RUNTIME_ENTRY(UpdateFieldCid, 2) {
3818#if !defined(DART_PRECOMPILED_RUNTIME)
3819 const Field& field = Field::CheckedHandle(zone, arguments.ArgAt(0));
3820 const Object& value = Object::Handle(arguments.ArgAt(1));
3821 field.RecordStore(value);
3822#else
3823 UNREACHABLE();
3824#endif
3825}
3826
3827DEFINE_RUNTIME_ENTRY(InitInstanceField, 2) {
3828 const Instance& instance = Instance::CheckedHandle(zone, arguments.ArgAt(0));
3829 const Field& field = Field::CheckedHandle(zone, arguments.ArgAt(1));
3832 result = instance.GetField(field);
3833 ASSERT((result.ptr() != Object::sentinel().ptr()) &&
3834 (result.ptr() != Object::transition_sentinel().ptr()));
3835 arguments.SetReturn(result);
3836}
3837
3838DEFINE_RUNTIME_ENTRY(InitStaticField, 1) {
3839 const Field& field = Field::CheckedHandle(zone, arguments.ArgAt(0));
3840 Object& result = Object::Handle(zone, field.InitializeStatic());
3842 result = field.StaticValue();
3843 ASSERT((result.ptr() != Object::sentinel().ptr()) &&
3844 (result.ptr() != Object::transition_sentinel().ptr()));
3845 arguments.SetReturn(result);
3846}
3847
3848DEFINE_RUNTIME_ENTRY(LateFieldAssignedDuringInitializationError, 1) {
3849 const Field& field = Field::CheckedHandle(zone, arguments.ArgAt(0));
3851 String::Handle(field.name()));
3852}
3853
3854DEFINE_RUNTIME_ENTRY(LateFieldNotInitializedError, 1) {
3855 const Field& field = Field::CheckedHandle(zone, arguments.ArgAt(0));
3857}
3858
3860 // We could just use a trap instruction in the stub, but we get better stack
3861 // traces when there is an exit frame.
3862 FATAL("Not loaded");
3863}
3864
3865DEFINE_RUNTIME_ENTRY(FfiAsyncCallbackSend, 1) {
3867 TRACE_RUNTIME_CALL("FfiAsyncCallbackSend %p", (void*)target_port);
3868 const Object& message = Object::Handle(zone, arguments.ArgAt(0));
3869 const Array& msg_array = Array::Handle(zone, Array::New(3));
3870 msg_array.SetAt(0, message);
3871 PersistentHandle* handle =
3872 isolate->group()->api_state()->AllocatePersistentHandle();
3873 handle->set_ptr(msg_array);
3875 Message::New(target_port, handle, Message::kNormalPriority));
3876}
3877
3878// Use expected function signatures to help MSVC compiler resolve overloading.
3879typedef double (*UnaryMathCFunction)(double x);
3880typedef double (*BinaryMathCFunction)(double x, double y);
3881typedef void* (*MemMoveCFunction)(void* dest, const void* src, size_t n);
3882
3884 /*argument_count=*/2,
3885 /*is_float=*/true,
3886 static_cast<BinaryMathCFunction>(pow));
3887
3889 /*argument_count=*/2,
3890 /*is_float=*/true,
3891 static_cast<BinaryMathCFunction>(DartModulo));
3892
3894 2,
3895 /*is_float=*/true,
3896 static_cast<BinaryMathCFunction>(fmod_ieee));
3897
3899 2,
3900 /*is_float=*/true,
3901 static_cast<BinaryMathCFunction>(atan2_ieee));
3902
3904 /*argument_count=*/1,
3905 /*is_float=*/true,
3906 static_cast<UnaryMathCFunction>(floor));
3907
3909 /*argument_count=*/1,
3910 /*is_float=*/true,
3911 static_cast<UnaryMathCFunction>(ceil));
3912
3914 /*argument_count=*/1,
3915 /*is_float=*/true,
3916 static_cast<UnaryMathCFunction>(trunc));
3917
3919 /*argument_count=*/1,
3920 /*is_float=*/true,
3921 static_cast<UnaryMathCFunction>(round));
3922
3924 /*argument_count=*/1,
3925 /*is_float=*/true,
3926 static_cast<UnaryMathCFunction>(cos));
3927
3929 /*argument_count=*/1,
3930 /*is_float=*/true,
3931 static_cast<UnaryMathCFunction>(sin));
3932
3934 /*argument_count=*/1,
3935 /*is_float=*/true,
3936 static_cast<UnaryMathCFunction>(asin));
3937
3939 /*argument_count=*/1,
3940 /*is_float=*/true,
3941 static_cast<UnaryMathCFunction>(acos));
3942
3944 /*argument_count=*/1,
3945 /*is_float=*/true,
3946 static_cast<UnaryMathCFunction>(tan));
3947
3949 /*argument_count=*/1,
3950 /*is_float=*/true,
3951 static_cast<UnaryMathCFunction>(atan));
3952
3954 /*argument_count=*/1,
3955 /*is_float=*/true,
3956 static_cast<UnaryMathCFunction>(exp));
3957
3959 /*argument_count=*/1,
3960 /*is_float=*/true,
3961 static_cast<UnaryMathCFunction>(log));
3962
3964 /*argument_count=*/3,
3965 /*is_float=*/false,
3966 static_cast<MemMoveCFunction>(memmove));
3967
3968extern "C" void DFLRT_EnterSafepoint(NativeArguments __unusable_) {
3970 TRACE_RUNTIME_CALL("%s", "EnterSafepoint");
3971 Thread* thread = Thread::Current();
3972 ASSERT(thread->top_exit_frame_info() != 0);
3974 thread->EnterSafepoint();
3975 TRACE_RUNTIME_CALL("%s", "EnterSafepoint done");
3976}
3978 /*argument_count=*/0,
3979 /*is_float=*/false,
3981
3982extern "C" void DFLRT_ExitSafepoint(NativeArguments __unusable_) {
3984 TRACE_RUNTIME_CALL("%s", "ExitSafepoint");
3985 Thread* thread = Thread::Current();
3986 ASSERT(thread->top_exit_frame_info() != 0);
3987
3989 if (thread->is_unwind_in_progress()) {
3990 // Clean up safepoint unwind error marker to prevent safepoint tripping.
3991 // The safepoint marker will get restored just before jumping back
3992 // to generated code.
3993 thread->SetUnwindErrorInProgress(false);
3994 NoSafepointScope no_safepoint;
3995 Error unwind_error;
3996 unwind_error ^=
3997 thread->isolate()->isolate_object_store()->preallocated_unwind_error();
3998 Exceptions::PropagateError(unwind_error);
3999 }
4000 thread->ExitSafepoint();
4001
4002 TRACE_RUNTIME_CALL("%s", "ExitSafepoint done");
4003}
4005 /*argument_count=*/0,
4006 /*is_float=*/false,
4008
4009// This is expected to be invoked when jumping to destination frame,
4010// during exception handling.
4012 NativeArguments __unusable_) {
4014 TRACE_RUNTIME_CALL("%s", "ExitSafepointIgnoreUnwindInProgress");
4015 Thread* thread = Thread::Current();
4016 ASSERT(thread->top_exit_frame_info() != 0);
4017
4019
4020 // Compared to ExitSafepoint above we are going to ignore
4021 // is_unwind_in_progress flag because this is called as part of JumpToFrame
4022 // exception handler - we want this transition to complete so that the next
4023 // safepoint check does error propagation.
4024 thread->ExitSafepoint();
4025
4026 TRACE_RUNTIME_CALL("%s", "ExitSafepointIgnoreUnwindInProgress done");
4027}
4028DEFINE_RAW_LEAF_RUNTIME_ENTRY(ExitSafepointIgnoreUnwindInProgress,
4029 /*argument_count=*/0,
4030 /*is_float*/ false,
4032
4033// This is called by a native callback trampoline
4034// (see StubCodeCompiler::GenerateFfiCallbackTrampolineStub). Not registered as
4035// a runtime entry because we can't use Thread to look it up.
4038 uword* out_entry_point,
4039 uword* out_trampoline_type) {
4041 TRACE_RUNTIME_CALL("GetFfiCallbackMetadata %p",
4042 reinterpret_cast<void*>(trampoline));
4043 ASSERT(out_entry_point != nullptr);
4044 ASSERT(out_trampoline_type != nullptr);
4045
4046 Thread* const current_thread = Thread::Current();
4047 auto* fcm = FfiCallbackMetadata::Instance();
4048 auto metadata = fcm->LookupMetadataForTrampoline(trampoline);
4049
4050 // Is this an async callback?
4051 if (metadata.trampoline_type() ==
4053 // It's possible that the callback was deleted, or the target isolate was
4054 // shut down, in between looking up the metadata above, and this point. So
4055 // grab the lock and then check that the callback is still alive.
4056 MutexLocker locker(fcm->lock());
4057 auto metadata2 = fcm->LookupMetadataForTrampoline(trampoline);
4058 *out_trampoline_type = static_cast<uword>(metadata2.trampoline_type());
4059
4060 // Check IsLive, but also check that the metdata hasn't changed. This is
4061 // for the edge case that the callback was destroyed and recycled in between
4062 // the two lookups.
4063 if (!metadata.IsLive() || !metadata.IsSameCallback(metadata2)) {
4064 TRACE_RUNTIME_CALL("GetFfiCallbackMetadata callback deleted %p",
4065 reinterpret_cast<void*>(trampoline));
4066 return nullptr;
4067 }
4068
4069 *out_entry_point = metadata.target_entry_point();
4070 Isolate* target_isolate = metadata.target_isolate();
4071
4072 Isolate* current_isolate = nullptr;
4073 if (current_thread != nullptr) {
4074 current_isolate = current_thread->isolate();
4075 ASSERT(current_thread->execution_state() == Thread::kThreadInNative);
4076 current_thread->ExitSafepoint();
4078 }
4079
4080 // Enter the temporary isolate. If the current isolate is in the same group
4081 // as the target isolate, we can skip entering the temp isolate, and marshal
4082 // the args on the current isolate.
4083 if (current_isolate == nullptr ||
4084 current_isolate->group() != target_isolate->group()) {
4085 if (current_isolate != nullptr) {
4086 Thread::ExitIsolate(/*isolate_shutdown=*/false);
4087 }
4088 target_isolate->group()->EnterTemporaryIsolate();
4089 }
4090 Thread* const temp_thread = Thread::Current();
4091 ASSERT(temp_thread != nullptr);
4092 temp_thread->set_unboxed_int64_runtime_arg(metadata.send_port());
4094 reinterpret_cast<intptr_t>(current_isolate));
4095 ASSERT(!temp_thread->IsAtSafepoint());
4096 return temp_thread;
4097 }
4098
4099 // Otherwise, this is a sync callback, so verify that we're already entered
4100 // into the target isolate.
4101 if (!metadata.IsLive()) {
4102 FATAL("Callback invoked after it has been deleted.");
4103 }
4104 Isolate* target_isolate = metadata.target_isolate();
4105 *out_entry_point = metadata.target_entry_point();
4106 *out_trampoline_type = static_cast<uword>(metadata.trampoline_type());
4107 if (current_thread == nullptr) {
4108 FATAL("Cannot invoke native callback outside an isolate.");
4109 }
4110 if (current_thread->no_callback_scope_depth() != 0) {
4111 FATAL("Cannot invoke native callback when API callbacks are prohibited.");
4112 }
4113 if (current_thread->is_unwind_in_progress()) {
4114 FATAL("Cannot invoke native callback while unwind error propagates.");
4115 }
4116 if (!current_thread->IsDartMutatorThread()) {
4117 FATAL("Native callbacks must be invoked on the mutator thread.");
4118 }
4119 if (current_thread->isolate() != target_isolate) {
4120 FATAL("Cannot invoke native callback from a different isolate.");
4121 }
4122
4123 // Set the execution state to VM while waiting for the safepoint to end.
4124 // This isn't strictly necessary but enables tests to check that we're not
4125 // in native code anymore. See tests/ffi/function_gc_test.dart for example.
4127
4128 current_thread->ExitSafepoint();
4129
4130 current_thread->set_unboxed_int64_runtime_arg(metadata.context());
4131
4132 TRACE_RUNTIME_CALL("GetFfiCallbackMetadata thread %p", current_thread);
4133 TRACE_RUNTIME_CALL("GetFfiCallbackMetadata entry_point %p",
4134 (void*)*out_entry_point);
4135 TRACE_RUNTIME_CALL("GetFfiCallbackMetadata trampoline_type %p",
4136 (void*)*out_trampoline_type);
4137 return current_thread;
4138}
4139
4140extern "C" void DLRT_ExitTemporaryIsolate() {
4141 TRACE_RUNTIME_CALL("ExitTemporaryIsolate%s", "");
4142 Thread* thread = Thread::Current();
4143 ASSERT(thread != nullptr);
4144 Isolate* source_isolate =
4145 reinterpret_cast<Isolate*>(thread->unboxed_int64_runtime_second_arg());
4146
4147 // We're either inside a temp isolate, or inside the source_isolate.
4148 const bool inside_temp_isolate =
4149 source_isolate == nullptr || source_isolate != thread->isolate();
4150 if (inside_temp_isolate) {
4152 if (source_isolate != nullptr) {
4153 TRACE_RUNTIME_CALL("ExitTemporaryIsolate re-entering source isolate %p",
4154 source_isolate);
4155 Thread::EnterIsolate(source_isolate);
4157 }
4158 } else {
4159 thread->EnterSafepoint();
4160 }
4161 TRACE_RUNTIME_CALL("ExitTemporaryIsolate %s", "done");
4162}
4163
4166 TRACE_RUNTIME_CALL("EnterHandleScope %p", thread);
4167 thread->EnterApiScope();
4168 ApiLocalScope* return_value = thread->api_top_scope();
4169 TRACE_RUNTIME_CALL("EnterHandleScope returning %p", return_value);
4170 return return_value;
4171}
4173 /*argument_count=*/1,
4174 /*is_float=*/false,
4176
4177extern "C" void DLRT_ExitHandleScope(Thread* thread) {
4179 TRACE_RUNTIME_CALL("ExitHandleScope %p", thread);
4180 thread->ExitApiScope();
4181 TRACE_RUNTIME_CALL("ExitHandleScope %s", "done");
4182}
4184 /*argument_count=*/1,
4185 /*is_float=*/false,
4187
4190 TRACE_RUNTIME_CALL("AllocateHandle %p", scope);
4191 LocalHandle* return_value = scope->local_handles()->AllocateHandle();
4192 // Don't return an uninitialised handle.
4193 return_value->set_ptr(Object::sentinel().ptr());
4194 TRACE_RUNTIME_CALL("AllocateHandle returning %p", return_value);
4195 return return_value;
4196}
4197
4199 /*argument_count=*/1,
4200 /*is_float=*/false,
4202
4203// Enables reusing `Dart_PropagateError` from `FfiCallInstr`.
4204// `Dart_PropagateError` requires the native state and transitions into the VM.
4205// So the flow is:
4206// - FfiCallInstr (slow path)
4207// - TransitionGeneratedToNative
4208// - DLRT_PropagateError (this)
4209// - Dart_PropagateError
4210// - TransitionNativeToVM
4211// - Throw
4212extern "C" void DLRT_PropagateError(Dart_Handle handle) {
4214 TRACE_RUNTIME_CALL("PropagateError %p", handle);
4215 ASSERT(Thread::Current()->execution_state() == Thread::kThreadInNative);
4216 ASSERT(Dart_IsError(handle));
4217 Dart_PropagateError(handle);
4218 // We should never exit through normal control flow.
4219 UNREACHABLE();
4220}
4221
4222// Not a leaf-function, throws error.
4224 /*argument_count=*/1,
4225 /*is_float=*/false,
4227
4228#if !defined(USING_MEMORY_SANITIZER)
4229extern "C" void __msan_unpoison(const volatile void*, size_t) {
4230 UNREACHABLE();
4231}
4232extern "C" void __msan_unpoison_param(size_t) {
4233 UNREACHABLE();
4234}
4235#endif
4236
4237#if !defined(USING_THREAD_SANITIZER)
4238extern "C" void __tsan_acquire(void* addr) {
4239 UNREACHABLE();
4240}
4241extern "C" void __tsan_release(void* addr) {
4242 UNREACHABLE();
4243}
4244#endif
4245
4246// These runtime entries are defined even when not using MSAN / TSAN to keep
4247// offsets on Thread consistent.
4248
4250 /*argument_count=*/2,
4251 /*is_float=*/false,
4253
4255 /*argument_count=*/1,
4256 /*is_float=*/false,
4258
4260 /*argument_count=*/1,
4261 /*is_float=*/false,
4263
4265 /*argument_count=*/1,
4266 /*is_float=*/false,
4268
4269} // namespace dart
AutoreleasePool pool
static bool unused
int count
Definition: FontMgrTest.cpp:50
static void round(SkPoint *p)
SI void store(P *ptr, const T &val)
SI F table(const skcms_Curve *curve, F v)
#define UNREACHABLE()
Definition: assert.h:248
#define RELEASE_ASSERT(cond)
Definition: assert.h:327
GLenum type
virtual classid_t type_class_id() const
Definition: object.cc:21033
bool IsTopTypeForSubtyping() const
Definition: object.cc:21396
virtual bool IsInstantiated(Genericity genericity=kAny, intptr_t num_free_fun_type_params=kAllFree) const
Definition: object.cc:21151
static bool InstantiateAndTestSubtype(AbstractType *subtype, AbstractType *supertype, const TypeArguments &instantiator_type_args, const TypeArguments &function_type_args)
Definition: object.cc:4287
const char * NameCString() const
Definition: object.cc:21324
bool IsDynamicType() const
Definition: object.h:9186
LocalHandles * local_handles()
const char * ToCString() const
Definition: dart_entry.cc:438
static ArrayPtr NewBoxed(intptr_t type_args_len, intptr_t num_arguments, const Array &optional_arguments_names, Heap::Space space=Heap::kOld)
Definition: dart_entry.h:83
intptr_t CountWithTypeArgs() const
Definition: dart_entry.h:38
intptr_t FirstArgIndex() const
Definition: dart_entry.h:37
static constexpr intptr_t kMaxElements
Definition: object.h:10924
static ArrayPtr New(intptr_t len, Heap::Space space=Heap::kNew)
Definition: object.h:10959
virtual void SetTypeArguments(const TypeArguments &value) const
Definition: object.h:10908
static intptr_t LengthOf(const ArrayPtr array)
Definition: object.h:10830
void SetAt(intptr_t index, const Object &value) const
Definition: object.h:10880
void Add(const T &value)
intptr_t length() const
static const Bool & Get(bool value)
Definition: object.h:10801
static const Bool & True()
Definition: object.h:10797
StringPtr target_name() const
Definition: object.h:2372
ArrayPtr arguments_descriptor() const
Definition: object.h:2373
ClassPtr At(intptr_t cid) const
Definition: class_table.h:362
CodePtr allocation_stub() const
Definition: object.h:1800
FunctionPtr GetInvocationDispatcher(const String &target_name, const Array &args_desc, UntaggedFunction::Kind kind, bool create_if_absent) const
Definition: object.cc:3847
intptr_t id() const
Definition: object.h:1233
intptr_t NumTypeArguments() const
Definition: object.cc:3640
bool IsClosureClass() const
Definition: object.h:1577
StringPtr Name() const
Definition: object.cc:2977
ErrorPtr EnsureIsFinalized(Thread *thread) const
Definition: object.cc:4924
bool is_allocate_finalized() const
Definition: object.h:1732
static ClosurePtr New(const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments, const Function &function, const Object &context, Heap::Space space=Heap::kNew)
Definition: object.cc:25942
static void PatchInstanceCallAt(uword return_address, const Code &caller_code, const Object &data, const Code &target)
static CodePtr GetStaticCallTargetAt(uword return_address, const Code &code)
static void PatchSwitchableCallAt(uword return_address, const Code &caller_code, const Object &data, const Code &target)
static uword GetSwitchableCallTargetEntryAt(uword return_address, const Code &caller_code)
static ObjectPtr GetSwitchableCallDataAt(uword return_address, const Code &caller_code)
static CodePtr GetInstanceCallAt(uword return_address, const Code &caller_code, Object *data)
static void PatchStaticCallAt(uword return_address, const Code &code, const Code &new_target)
intptr_t GetNullCheckNameIndexAt(int32_t pc_offset)
FunctionPtr function() const
Definition: object.h:7130
uword EntryPoint() const
Definition: object.h:6864
void SetStaticCallTargetCodeAt(uword pc, const Code &code) const
Definition: object.cc:17815
bool is_optimized() const
Definition: object.h:6817
bool is_force_optimized() const
Definition: object.h:6825
void SetStubCallTargetCodeAt(uword pc, const Code &code) const
Definition: object.cc:17829
bool ContainsInstructionAt(uword addr) const
Definition: object.h:6915
bool IsDisabled() const
Definition: object.h:7257
ObjectPtr owner() const
Definition: object.h:7135
void set_is_alive(bool value) const
Definition: object.cc:17658
ObjectPoolPtr GetObjectPool() const
Definition: object.cc:17723
FunctionPtr GetStaticCallTargetFunctionAt(uword pc) const
Definition: object.cc:17800
static bool CanOptimizeFunction(Thread *thread, const Function &function)
Definition: compiler.cc:229
static constexpr intptr_t kNoOSRDeoptId
Definition: compiler.h:73
static ErrorPtr EnsureUnoptimizedCode(Thread *thread, const Function &function)
Definition: compiler.cc:854
static ObjectPtr CompileOptimizedFunction(Thread *thread, const Function &function, intptr_t osr_id=kNoOSRDeoptId)
Definition: compiler.cc:886
static ContextPtr New(intptr_t num_variables, Heap::Space space=Heap::kNew)
Definition: object.cc:18511
static intptr_t NumVariables(const ContextPtr context)
Definition: object.h:7418
void set_parent(const Context &parent) const
Definition: object.h:7407
void SetAt(intptr_t context_index, const Object &value) const
Definition: object.h:13303
ObjectPtr At(intptr_t context_index) const
Definition: object.h:7422
intptr_t num_variables() const
Definition: object.h:7414
ContextPtr parent() const
Definition: object.h:7406
static ObjectPtr InvokeNoSuchMethod(Thread *thread, const Instance &receiver, const String &target_name, const Array &arguments, const Array &arguments_descriptor)
Definition: dart_entry.cc:307
static ObjectPtr InvokeClosure(Thread *thread, const Array &arguments)
Definition: dart_entry.cc:282
static ObjectPtr InvokeFunction(const Function &function, const Array &arguments)
Definition: dart_entry.cc:31
StackFrame * NextFrame()
Definition: stack_frame.h:352
static IsolateGroup * vm_isolate_group()
Definition: dart.h:69
intptr_t Length() const
Definition: debugger.h:465
ActivationFrame * FrameAt(int i) const
Definition: debugger.h:467
static DebuggerStackTrace * CollectAsyncAwaiters()
Definition: debugger.cc:1732
intptr_t MaterializeDeferredObjects()
void set_dest_frame(const StackFrame *frame)
CodePtr code() const
intptr_t DestStackAdjustment() const
static constexpr intptr_t kNone
Definition: deopt_id.h:27
static DoublePtr New(double d, Heap::Space space=Heap::kNew)
Definition: object.cc:23402
static DART_NORETURN void ThrowByType(ExceptionType type, const Array &arguments)
Definition: exceptions.cc:1052
static DART_NORETURN void ThrowOOM()
Definition: exceptions.cc:1066
static DART_NORETURN void ThrowRangeError(const char *argument_name, const Integer &argument_value, intptr_t expected_from, intptr_t expected_to)
Definition: exceptions.cc:1094
static DART_NORETURN void ThrowLateFieldAssignedDuringInitialization(const String &name)
Definition: exceptions.cc:1124
static DART_NORETURN void Throw(Thread *thread, const Instance &exception)
Definition: exceptions.cc:979
static DART_NORETURN void ThrowArgumentError(const Instance &arg)
Definition: exceptions.cc:1082
@ kIntegerDivisionByZeroException
Definition: exceptions.h:60
static DART_NORETURN void ThrowLateFieldNotInitialized(const String &name)
Definition: exceptions.cc:1118
static DART_NORETURN void ReThrow(Thread *thread, const Instance &exception, const Instance &stacktrace, bool bypass_debugger=false)
Definition: exceptions.cc:986
static void CreateAndThrowTypeError(TokenPosition location, const AbstractType &src_type, const AbstractType &dst_type, const String &dst_name)
Definition: exceptions.cc:896
static DART_NORETURN void PropagateError(const Error &error)
Definition: exceptions.cc:1003
static FfiCallbackMetadata * Instance()
DART_WARN_UNUSED_RESULT ErrorPtr InitializeInstance(const Instance &instance) const
Definition: object.cc:12339
static bool IsGetterName(const String &function_name)
Definition: object.cc:11831
DART_WARN_UNUSED_RESULT ErrorPtr InitializeStatic() const
Definition: object.cc:12377
static bool IsSetterName(const String &function_name)
Definition: object.cc:11835
StringPtr name() const
Definition: object.h:4430
ObjectPtr StaticValue() const
Definition: object.h:13279
static StringPtr GetterName(const String &field_name)
Definition: object.cc:11792
static StringPtr NameFromGetter(const String &getter_name)
Definition: object.cc:11816
void RecordStore(const Object &value) const
Definition: object.cc:13027
static Float32x4Ptr New(float value0, float value1, float value2, float value3, Heap::Space space=Heap::kNew)
Definition: object.cc:25307
static Float64x2Ptr New(double value0, double value1, Heap::Space space=Heap::kNew)
Definition: object.cc:25475
bool PrologueNeedsArgumentsDescriptor() const
Definition: object.cc:11437
static bool IsDynamicInvocationForwarderName(const String &name)
Definition: object.cc:4190
const char * ToFullyQualifiedCString() const
Definition: object.cc:9762
static StringPtr DemangleDynamicInvocationForwarderName(const String &name)
Definition: object.cc:4198
bool HasOptimizedCode() const
Definition: object.cc:10974
static StringPtr CreateDynamicInvocationForwarderName(const String &name)
Definition: object.cc:4205
bool IsMethodExtractor() const
Definition: object.h:3284
CodePtr unoptimized_code() const
Definition: object.h:3185
CodePtr EnsureHasCode() const
Definition: object.cc:11338
bool IsDebugging(Thread *thread, const Function &function)
Definition: debugger.cc:3488
@ kNew
Definition: heap.h:38
@ kOld
Definition: heap.h:39
Scavenger * new_space()
Definition: heap.h:62
void CollectAllGarbage(GCReason reason=GCReason::kFull, bool compact=false)
Definition: heap.cc:573
bool Contains(uword addr) const
Definition: heap.cc:239
void EnsureHasCheck(const GrowableArray< intptr_t > &class_ids, const Function &target, intptr_t count=1) const
Definition: object.cc:16775
static ICDataPtr ICDataOfEntriesArray(const Array &array)
Definition: object.cc:17484
static ICDataPtr NewWithCheck(const Function &owner, const String &target_name, const Array &arguments_descriptor, intptr_t deopt_id, intptr_t num_args_tested, RebindRule rebind_rule, GrowableArray< intptr_t > *cids, const Function &target, const AbstractType &receiver_type=Object::null_abstract_type())
Definition: object.cc:17360
intptr_t NumArgsTested() const
Definition: object.cc:16471
ArrayPtr entries() const
Definition: object.h:2783
RebindRule rebind_rule() const
Definition: object.cc:16547
bool NumberOfChecksIs(intptr_t n) const
Definition: object.cc:16582
FunctionPtr GetTargetAt(intptr_t index) const
Definition: object.cc:17029
bool is_tracking_exactness() const
Definition: object.h:2483
void set_is_megamorphic(bool value) const
Definition: object.h:2555
void EnsureHasReceiverCheck(intptr_t receiver_class_id, const Function &target, intptr_t count=1, StaticTypeExactnessState exactness=StaticTypeExactnessState::NotTracking()) const
Definition: object.cc:16876
intptr_t NumberOfChecks() const
Definition: object.cc:16577
bool IsInstanceOf(const AbstractType &other, const TypeArguments &other_instantiator_type_arguments, const TypeArguments &other_function_type_arguments) const
Definition: object.cc:20614
AbstractTypePtr GetType(Heap::Space space) const
Definition: object.cc:20520
bool IsAssignableTo(const AbstractType &other, const TypeArguments &other_instantiator_type_arguments, const TypeArguments &other_function_type_arguments) const
Definition: object.cc:20629
static InstancePtr NewAlreadyFinalized(const Class &cls, Heap::Space space=Heap::kNew)
Definition: object.cc:20943
static Int32x4Ptr New(int32_t value0, int32_t value1, int32_t value2, int32_t value3, Heap::Space space=Heap::kNew)
Definition: object.cc:25391
static IntegerPtr New(const String &str, Heap::Space space=Heap::kNew)
Definition: object.cc:22984
static IntegerPtr NewFromUint64(uint64_t value, Heap::Space space=Heap::kNew)
Definition: object.cc:23026
static int EncodeType(Level level, Kind kind)
GroupDebugger * debugger() const
Definition: isolate.h:315
static bool IsSystemIsolateGroup(const IsolateGroup *group)
Definition: isolate.cc:3605
Heap * heap() const
Definition: isolate.h:296
ObjectStore * object_store() const
Definition: isolate.h:510
static IsolateGroup * Current()
Definition: isolate.h:539
ClassTable * class_table() const
Definition: isolate.h:496
static void ExitTemporaryIsolate()
Definition: isolate.cc:880
Mutex * patchable_call_mutex()
Definition: isolate.h:519
Isolate * EnterTemporaryIsolate()
Definition: isolate.cc:871
Mutex * subtype_test_cache_mutex()
Definition: isolate.h:516
void set_deopt_context(DeoptContext *value)
Definition: isolate.h:1256
static bool IsSystemIsolate(const Isolate *isolate)
Definition: isolate.h:1445
IsolateObjectStore * isolate_object_store() const
Definition: isolate.h:1007
bool has_attempted_stepping() const
Definition: isolate.h:1421
IsolateGroup * group() const
Definition: isolate.h:1037
DeoptContext * deopt_context() const
Definition: isolate.h:1255
Thread * mutator_thread() const
Definition: isolate.cc:1920
void set_ptr(ObjectPtr ptr)
LocalHandle * AllocateHandle()
static MegamorphicCachePtr Lookup(Thread *thread, const String &name, const Array &descriptor)
static std::unique_ptr< Message > New(Args &&... args)
Definition: message.h:72
@ kNormalPriority
Definition: message.h:28
static MonomorphicSmiableCallPtr New(classid_t expected_cid, const Code &target)
Definition: object.cc:16350
void SetReturn(const Object &value) const
void SetArgAt(int index, const Object &value) const
static uword GetCurrentStackPointer()
Definition: os_thread.cc:132
static void static void PrintErr(const char *format,...) PRINTF_ATTRIBUTE(1
UntaggedObject * untag() const
static ObjectPtr null()
Definition: object.h:433
intptr_t GetClassId() const
Definition: object.h:341
ObjectPtr ptr() const
Definition: object.h:332
bool IsCanonical() const
Definition: object.h:335
virtual const char * ToCString() const
Definition: object.h:366
bool IsNull() const
Definition: object.h:363
static Object & Handle()
Definition: object.h:407
static ObjectPtr RawCast(ObjectPtr obj)
Definition: object.h:325
static Object & ZoneHandle()
Definition: object.h:419
PatchableCallHandler(Thread *thread, const GrowableArray< const Instance * > &caller_arguments, MissHandler miss_handler, NativeArguments arguments, StackFrame *caller_frame, const Code &caller_code, const Function &caller_function)
void ResolveSwitchAndReturn(const Object &data)
uword FindPendingDeopt(uword fp)
void ClearPendingDeoptsAtOrBelow(uword fp, ClearReason reason)
void AddPendingDeopt(uword fp, uword pc)
void set_ptr(ObjectPtr ref)
static bool PostMessage(std::unique_ptr< Message > message, bool before_events=false)
Definition: port.cc:152
intptr_t num_fields() const
Definition: object.h:11314
static RecordPtr New(RecordShape shape, Heap::Space space=Heap::kNew)
Definition: object.cc:27741
void SetFieldAt(intptr_t field_index, const Object &value) const
Definition: object.h:11436
static FunctionPtr ResolveDynamicAnyArgs(Zone *zone, const Class &receiver_class, const String &function_name, bool allow_add)
Definition: resolver.cc:185
static FunctionPtr ResolveDynamicFunction(Zone *zone, const Class &receiver_class, const String &function_name)
Definition: resolver.cc:176
static FunctionPtr ResolveDynamicForReceiverClass(const Class &receiver_class, const String &function_name, const ArgumentsDescriptor &args_desc, bool allow_add)
Definition: resolver.cc:148
intptr_t AbandonRemainingTLAB(Thread *thread)
Definition: scavenger.cc:1863
int32_t get_sp() const
Definition: simulator_arm.h:61
static Simulator * Current()
static SingleTargetCachePtr New()
Definition: object.cc:16321
static SmiPtr New(intptr_t value)
Definition: object.h:10006
intptr_t Value() const
Definition: object.h:9990
friend class Class
Definition: object.h:10047
StackFrame * NextFrame()
Definition: stack_frame.cc:549
uword pc() const
Definition: stack_frame.h:43
virtual bool IsExitFrame() const
Definition: stack_frame.h:103
CodePtr LookupDartCode() const
Definition: stack_frame.cc:336
virtual bool IsStubFrame() const
Definition: stack_frame.cc:150
void set_pc(uword value)
Definition: stack_frame.h:72
TokenPosition GetTokenPos() const
Definition: stack_frame.cc:429
uword fp() const
Definition: stack_frame.h:42
uword sp() const
Definition: stack_frame.h:41
virtual bool IsDartFrame(bool validate=true) const
Definition: stack_frame.h:97
FunctionPtr LookupDartFunction() const
Definition: stack_frame.cc:325
static StaticTypeExactnessState NotExact()
static StaticTypeExactnessState NotTracking()
static StringPtr NewFormatted(const char *format,...) PRINTF_ATTRIBUTE(1
Definition: object.cc:24004
static StringPtr New(const char *cstr, Heap::Space space=Heap::kNew)
Definition: object.cc:23698
static const char * ToCString(Thread *thread, StringPtr ptr)
Definition: object.cc:24126
static CodePtr GetAllocationStubForClass(const Class &cls)
Definition: stub_code.cc:174
void WriteEntryToBuffer(Zone *zone, BaseTextBuffer *buffer, intptr_t index, const char *line_prefix=nullptr) const
Definition: object.cc:19496
static SubtypeTestCachePtr New(intptr_t num_inputs)
Definition: object.cc:18924
static constexpr intptr_t kMaxInputs
Definition: object.h:7705
static constexpr intptr_t MaxEntriesForCacheAllocatedFor(intptr_t count)
Definition: object.h:7843
static intptr_t UsedInputsForType(const AbstractType &type)
Definition: object.cc:19668
intptr_t NumberOfChecks() const
Definition: object.cc:18954
intptr_t AddCheck(const Object &instance_class_id_or_signature, const AbstractType &destination_type, const TypeArguments &instance_type_arguments, const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments, const TypeArguments &instance_parent_function_type_arguments, const TypeArguments &instance_delayed_type_arguments, const Bool &test_result) const
Definition: object.cc:18978
bool HasCheck(const Object &instance_class_id_or_signature, const AbstractType &destination_type, const TypeArguments &instance_type_arguments, const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments, const TypeArguments &instance_parent_function_type_arguments, const TypeArguments &instance_delayed_type_arguments, intptr_t *index, Bool *result) const
Definition: object.cc:19464
static SuspendStatePtr Clone(Thread *thread, const SuspendState &src, Heap::Space space=Heap::kNew)
Definition: object.cc:26532
static constexpr intptr_t kSuspendStateVarIndex
Definition: object.h:12617
static SuspendStatePtr New(intptr_t frame_size, const Instance &function_data, Heap::Space space=Heap::kNew)
Definition: object.cc:26508
Zone * zone() const
Definition: thread_state.h:37
void set_execution_state(ExecutionState state)
Definition: thread.h:1048
@ kOsrRequest
Definition: thread.h:425
ApiLocalScope * api_top_scope() const
Definition: thread.h:513
void DeferredMarkingStackAddObject(ObjectPtr obj)
Definition: thread.cc:871
int32_t no_callback_scope_depth() const
Definition: thread.h:623
static Thread * Current()
Definition: thread.h:362
PendingDeopts & pending_deopts()
Definition: thread.h:1144
void set_unboxed_int64_runtime_arg(int64_t value)
Definition: thread.h:828
static bool IsAtSafepoint(SafepointLevel level, uword state)
Definition: thread.h:911
void SetUnwindErrorInProgress(bool value)
Definition: thread.h:1016
bool is_marking() const
Definition: thread.h:676
Heap * heap() const
Definition: thread.cc:943
double unboxed_double_runtime_arg() const
Definition: thread.h:837
void ExitSafepoint()
Definition: thread.h:1094
void EnterApiScope()
Definition: thread.cc:1301
void ExitApiScope()
Definition: thread.cc:1314
int64_t unboxed_int64_runtime_arg() const
Definition: thread.h:825
bool is_unwind_in_progress() const
Definition: thread.h:643
uword top_exit_frame_info() const
Definition: thread.h:691
int64_t unboxed_int64_runtime_second_arg() const
Definition: thread.h:831
bool IsDartMutatorThread() const
Definition: thread.h:551
void EnterSafepoint()
Definition: thread.h:1076
ExecutionState execution_state() const
Definition: thread.h:1040
Isolate * isolate() const
Definition: thread.h:534
int32_t IncrementAndGetStackOverflowCount()
Definition: thread.h:447
uint32_t IncrementAndGetRuntimeCallCount()
Definition: thread.h:451
@ kThreadInNative
Definition: thread.h:1036
IsolateGroup * isolate_group() const
Definition: thread.h:541
static void EnterIsolate(Isolate *isolate)
Definition: thread.cc:371
static void ExitIsolate(bool isolate_shutdown=false)
Definition: thread.cc:428
void set_unboxed_int64_runtime_second_arg(int64_t value)
Definition: thread.h:834
bool IsInstantiated(Genericity genericity=kAny, intptr_t num_free_fun_type_params=kAllFree) const
Definition: object.h:8707
intptr_t Length() const
Definition: object.cc:7294
TypeArgumentsPtr InstantiateAndCanonicalizeFrom(const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments) const
Definition: object.cc:7620
bool IsUninstantiatedIdentity() const
Definition: object.cc:7352
static CodePtr SpecializeStubFor(Thread *thread, const AbstractType &type)
static TypePtr BoolType()
static intptr_t MaxElements(intptr_t class_id)
Definition: object.h:11684
static TypedDataPtr New(intptr_t class_id, intptr_t len, Heap::Space space=Heap::kNew)
Definition: object.cc:25587
static uword ToAddr(const UntaggedObject *raw_obj)
Definition: raw_object.h:522
static constexpr T Maximum(T x, T y)
Definition: utils.h:41
#define THR_Print(format,...)
Definition: log.h:20
int64_t Dart_Port
Definition: dart_api.h:1525
struct _Dart_Handle * Dart_Handle
Definition: dart_api.h:258
#define ASSERT(E)
VkInstance instance
Definition: main.cc:48
double frame
Definition: examples.cpp:31
#define FATAL(error)
G_BEGIN_DECLS G_MODULE_EXPORT FlValue * args
const uint8_t uint32_t uint32_t GError ** error
uint8_t value
GAsyncResult * result
uint32_t * target
const char * charp
Definition: flags.h:12
Dart_NativeFunction function
Definition: fuchsia.cc:51
static float max(float r, float g, float b)
Definition: hsl.cpp:49
size_t length
Win32Message message
#define MSAN_UNPOISON(ptr, len)
double y
double x
ImplicitString Name
Definition: DMSrcSink.h:38
bool WillAllocateNewOrRememberedContext(intptr_t num_context_variables)
Definition: runtime_api.cc:40
bool WillAllocateNewOrRememberedArray(intptr_t length)
Definition: runtime_api.cc:46
Definition: dart_vm.cc:33
constexpr int64_t kMaxInt64
Definition: globals.h:486
static AbstractTypePtr InstantiateType(const AbstractType &type, const AbstractType &instantiator)
Definition: mirrors.cc:614
static void InlineCacheMissHandler(Thread *thread, Zone *zone, const GrowableArray< const Instance * > &args, const ICData &ic_data, NativeArguments native_arguments)
FunctionPtr InlineCacheMissHelper(const Class &receiver_class, const Array &args_descriptor, const String &target_name)
const char *const name
void DLRT_PropagateError(Dart_Handle handle)
IntegerPtr DoubleToInteger(Zone *zone, double val)
void DeoptimizeFunctionsOnStack()
static void CopySavedRegisters(uword saved_registers_address, fpu_register_t **fpu_registers, intptr_t **cpu_registers)
static bool IsSuspendedFrame(Zone *zone, const Function &function, StackFrame *frame)
Thread * DLRT_GetFfiCallbackMetadata(FfiCallbackMetadata::Trampoline trampoline, uword *out_entry_point, uword *out_trampoline_type)
static void DeoptimizeLastDartFrameIfOptimized()
static void PrintSubtypeCheck(const AbstractType &subtype, const AbstractType &supertype, const bool result)
static void HandleOSRRequest(Thread *thread)
double DartModulo(double left, double right)
int32_t classid_t
Definition: globals.h:524
static DART_FORCE_INLINE uword ParamAddress(uword fp, intptr_t reverse_index)
Definition: stack_frame.h:417
static void NullErrorHelper(Zone *zone, const String &selector, bool is_param_name=false)
static void UpdateTypeTestCache(Zone *zone, Thread *thread, const Instance &instance, const AbstractType &destination_type, const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments, const Bool &result, const SubtypeTestCache &new_cache)
DART_EXPORT void Dart_PropagateError(Dart_Handle handle)
static TokenPosition GetCallerLocation()
@ kNullCid
Definition: class_id.h:252
void *(* MemMoveCFunction)(void *dest, const void *src, size_t n)
void DFLRT_ExitSafepointIgnoreUnwindInProgress(NativeArguments __unusable_)
static void PrintTypeCheck(const char *message, const Instance &instance, const AbstractType &type, const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments, const Bool &result)
void DLRT_ExitHandleScope(Thread *thread)
LocalHandle * DLRT_AllocateHandle(ApiLocalScope *scope)
void __tsan_acquire(void *addr)
uintptr_t uword
Definition: globals.h:501
void DLRT_ExitTemporaryIsolate()
double(* BinaryMathCFunction)(double x, double y)
void __msan_unpoison(const volatile void *, size_t)
DART_EXPORT bool Dart_IsError(Dart_Handle handle)
ApiLocalScope * DLRT_EnterHandleScope(Thread *thread)
const uint32_t fp
@ kNumberOfCpuRegisters
Definition: constants_arm.h:98
const int kNumberOfFpuRegisters
static constexpr intptr_t kNumberOfSavedFpuRegisters
void DeoptimizeAt(Thread *mutator_thread, const Code &optimized_code, StackFrame *frame)
DEFINE_RUNTIME_ENTRY_NO_LAZY_DEOPT(AllocateDouble, 0)
DEFINE_RAW_LEAF_RUNTIME_ENTRY(CaseInsensitiveCompareUCS2, 4, false, CaseInsensitiveCompareUCS2)
static bool ResolveCallThroughGetter(const Class &receiver_class, const String &target_name, const String &demangled, const Array &arguments_descriptor, Function *result)
static FunctionPtr ComputeTypeCheckTarget(const Instance &receiver, const AbstractType &type, const ArgumentsDescriptor &desc)
static void RuntimeAllocationEpilogue(Thread *thread)
static constexpr intptr_t kDefaultMaxSubtypeCacheEntries
static FunctionPtr Resolve(Thread *thread, Zone *zone, const GrowableArray< const Instance * > &caller_arguments, const Class &receiver_class, const String &name, const Array &descriptor)
static void HandleStackOverflowTestCases(Thread *thread)
DEFINE_FLAG(bool, print_cluster_information, false, "Print information about clusters written to snapshot")
static Heap::Space SpaceForRuntimeAllocation()
double(* UnaryMathCFunction)(double x)
const intptr_t cid
static constexpr intptr_t kNumberOfSavedCpuRegisters
@ kTypeCheckFromLazySpecializeStub
@ kTypeCheckFromInline
@ kTypeCheckFromSlowStub
FrameLayout runtime_frame_layout
Definition: stack_frame.cc:81
static void ThrowIfError(const Object &result)
static uint32_t Hash(uint32_t key)
Definition: hashmap_test.cc:65
static DART_FORCE_INLINE uword LocalVarAddress(uword fp, intptr_t index)
Definition: stack_frame.h:429
static InstancePtr AllocateObject(Thread *thread, const Class &cls)
constexpr intptr_t kWordSize
Definition: globals.h:509
void DFLRT_ExitSafepoint(NativeArguments __unusable_)
static void TrySwitchInstanceCall(Thread *thread, StackFrame *caller_frame, const Code &caller_code, const Function &caller_function, const ICData &ic_data, const Function &target_function)
DEFINE_RUNTIME_ENTRY(CompileFunction, 1)
Definition: compiler.cc:212
@ kCurrentClass
Definition: object.h:2250
void __msan_unpoison_param(size_t)
void DFLRT_EnterSafepoint(NativeArguments __unusable_)
static DART_FORCE_INLINE bool IsCalleeFrameOf(uword fp, uword other_fp)
Definition: stack_frame.h:422
const char *const function_name
static int8_t data[kExtLength]
static void DoThrowNullError(Isolate *isolate, Thread *thread, Zone *zone, bool is_param)
const char * DeoptReasonToCString(ICData::DeoptReasonId deopt_reason)
void __tsan_release(void *addr)
DEFINE_LEAF_RUNTIME_ENTRY(void, StoreBufferBlockProcess, 1, Thread *thread)
void ReportImpossibleNullError(intptr_t cid, StackFrame *caller_frame, Thread *thread)
@ kAllFree
Definition: object.h:2940
void OnEveryRuntimeEntryCall(Thread *thread, const char *runtime_call_name, bool can_lazy_deopt)
const int kFpuRegisterSize
DECLARE_FLAG(bool, show_invisible_frames)
def call(args)
Definition: dom.py:159
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not defaults to or::depending on whether ipv6 is specified vm service A custom Dart VM Service port The default is to pick a randomly available open port disable vm Disable the Dart VM Service The Dart VM Service is never available in release mode disable vm service Disable mDNS Dart VM Service publication Bind to the IPv6 localhost address for the Dart VM Service Ignored if vm service host is set endless trace Enable an endless trace buffer The default is a ring buffer This is useful when very old events need to viewed For during application launch Memory usage will continue to grow indefinitely however Start app with an specific route defined on the framework flutter assets Path to the Flutter assets directory enable service port Allow the VM service to fallback to automatic port selection if binding to a specified port fails trace Trace early application lifecycle Automatically switches to an endless trace buffer trace skia Filters out all Skia trace event categories except those that are specified in this comma separated list dump skp on shader Automatically dump the skp that triggers new shader compilations This is useful for writing custom ShaderWarmUp to reduce jank By this is not enabled to reduce the overhead purge persistent cache
Definition: switches.h:191
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not defaults to or::depending on whether ipv6 is specified vm service A custom Dart VM Service port The default is to pick a randomly available open port disable vm Disable the Dart VM Service The Dart VM Service is never available in release mode disable vm service Disable mDNS Dart VM Service publication Bind to the IPv6 localhost address for the Dart VM Service Ignored if vm service host is set endless trace buffer
Definition: switches.h:126
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive mode
Definition: switches.h:228
std::function< void()> closure
Definition: closure.h:14
dst
Definition: cp.py:12
inst
Definition: malisc.py:37
SIN Vec< N, float > trunc(const Vec< N, float > &x)
Definition: SkVx.h:704
SI auto map(std::index_sequence< I... >, Fn &&fn, const Args &... args) -> skvx::Vec< sizeof...(I), decltype(fn(args[0]...))>
Definition: SkVx.h:680
SIN Vec< N, float > floor(const Vec< N, float > &x)
Definition: SkVx.h:703
SIN Vec< N, float > ceil(const Vec< N, float > &x)
Definition: SkVx.h:702
dest
Definition: zip.py:79
#define CHECK_STACK_ALIGNMENT
#define DEOPT_REASONS(V)
Definition: object.h:2493
#define Pp
Definition: globals.h:425
#define FALL_THROUGH
Definition: globals.h:15
#define Px
Definition: globals.h:410
#define DEBUG_ONLY(code)
Definition: globals.h:141
#define UNLIKELY(cond)
Definition: globals.h:261
#define Pd
Definition: globals.h:408
#define DEOPT_REASON_TO_TEXT(name)
#define TRACE_RUNTIME_CALL(format, name)
Definition: runtime_entry.h:76
#define END_LEAF_RUNTIME_ENTRY
intptr_t first_local_from_fp
Definition: frame_layout.h:37
intptr_t FrameSlotForVariableIndex(intptr_t index) const
Definition: stack_frame.cc:89
double fmod_ieee(double x, double y)
double atan2_ieee(double y, double x)