Flutter Engine
The Flutter Engine
Loading...
Searching...
No Matches
runtime_entry.cc
Go to the documentation of this file.
1// Copyright (c) 2011, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#include "vm/runtime_entry.h"
6
7#include <memory>
8
11#include "vm/code_descriptors.h"
12#include "vm/code_patcher.h"
16#include "vm/dart_api_impl.h"
17#include "vm/dart_api_state.h"
18#include "vm/dart_entry.h"
19#include "vm/debugger.h"
21#include "vm/exceptions.h"
23#include "vm/flags.h"
24#include "vm/heap/verifier.h"
25#include "vm/instructions.h"
26#include "vm/kernel_isolate.h"
27#include "vm/message.h"
28#include "vm/message_handler.h"
29#include "vm/object_store.h"
30#include "vm/parser.h"
31#include "vm/resolver.h"
32#include "vm/service_isolate.h"
33#include "vm/stack_frame.h"
34#include "vm/symbols.h"
35#include "vm/thread.h"
37#include "vm/zone_text_buffer.h"
38
39#if !defined(DART_PRECOMPILED_RUNTIME)
41#endif // !defined(DART_PRECOMPILED_RUNTIME)
42
43namespace dart {
44
45static constexpr intptr_t kDefaultMaxSubtypeCacheEntries =
48 int,
49 max_subtype_cache_entries,
51 "Maximum number of subtype cache entries (number of checks cached).");
53 int,
54 regexp_optimization_counter_threshold,
55 1000,
56 "RegExp's usage-counter value before it is optimized, -1 means never");
58 reoptimization_counter_threshold,
59 4000,
60 "Counter threshold before a function gets reoptimized.");
62 runtime_allocate_old,
63 false,
64 "Use old-space for allocation via runtime calls.");
66 runtime_allocate_spill_tlab,
67 false,
68 "Ensure results of allocation via runtime calls are not in an "
69 "active TLAB.");
70DEFINE_FLAG(bool, trace_deoptimization, false, "Trace deoptimization");
72 trace_deoptimization_verbose,
73 false,
74 "Trace deoptimization verbose");
75
76DECLARE_FLAG(int, max_deoptimization_counter_threshold);
77DECLARE_FLAG(bool, trace_compiler);
78DECLARE_FLAG(bool, trace_optimizing_compiler);
79DECLARE_FLAG(int, max_polymorphic_checks);
80
81DEFINE_FLAG(bool, trace_osr, false, "Trace attempts at on-stack replacement.");
82
83DEFINE_FLAG(int, gc_every, 0, "Run major GC on every N stack overflow checks");
85 stacktrace_every,
86 0,
87 "Compute debugger stacktrace on every N stack overflow checks");
89 stacktrace_filter,
90 nullptr,
91 "Compute stacktrace in named function on stack overflow checks");
93 deoptimize_filter,
94 nullptr,
95 "Deoptimize in named function on stack overflow checks");
97 deoptimize_on_runtime_call_name_filter,
98 nullptr,
99 "Runtime call name filter for --deoptimize-on-runtime-call-every.");
100
102 unopt_monomorphic_calls,
103 true,
104 "Enable specializing monomorphic calls from unoptimized code.");
106 unopt_megamorphic_calls,
107 true,
108 "Enable specializing megamorphic calls from unoptimized code.");
110 verbose_stack_overflow,
111 false,
112 "Print additional details about stack overflow.");
113
114DECLARE_FLAG(int, reload_every);
115DECLARE_FLAG(bool, reload_every_optimized);
116DECLARE_FLAG(bool, reload_every_back_off);
117
118DEFINE_RUNTIME_ENTRY(RangeError, 2) {
119 const Instance& length = Instance::CheckedHandle(zone, arguments.ArgAt(0));
120 const Instance& index = Instance::CheckedHandle(zone, arguments.ArgAt(1));
121 if (!length.IsInteger()) {
122 // Throw: new ArgumentError.value(length, "length", "is not an integer");
123 const Array& args = Array::Handle(zone, Array::New(3));
124 args.SetAt(0, length);
125 args.SetAt(1, Symbols::Length());
126 args.SetAt(2, String::Handle(zone, String::New("is not an integer")));
128 }
129 if (!index.IsInteger()) {
130 // Throw: new ArgumentError.value(index, "index", "is not an integer");
131 const Array& args = Array::Handle(zone, Array::New(3));
132 args.SetAt(0, index);
133 args.SetAt(1, Symbols::Index());
134 args.SetAt(2, String::Handle(zone, String::New("is not an integer")));
136 }
137 // Throw: new RangeError.range(index, 0, length - 1, "length");
138 const Array& args = Array::Handle(zone, Array::New(4));
139 args.SetAt(0, index);
140 args.SetAt(1, Integer::Handle(zone, Integer::New(0)));
141 args.SetAt(
143 zone, Integer::Cast(length).ArithmeticOp(
144 Token::kSUB, Integer::Handle(zone, Integer::New(1)))));
145 args.SetAt(3, Symbols::Length());
147}
148
149DEFINE_RUNTIME_ENTRY(RangeErrorUnboxedInt64, 0) {
150 int64_t unboxed_length = thread->unboxed_int64_runtime_arg();
151 int64_t unboxed_index = thread->unboxed_int64_runtime_second_arg();
152 const auto& length = Integer::Handle(zone, Integer::New(unboxed_length));
153 const auto& index = Integer::Handle(zone, Integer::New(unboxed_index));
154 // Throw: new RangeError.range(index, 0, length - 1, "length");
155 const Array& args = Array::Handle(zone, Array::New(4));
156 args.SetAt(0, index);
157 args.SetAt(1, Integer::Handle(zone, Integer::New(0)));
158 args.SetAt(
160 zone, Integer::Cast(length).ArithmeticOp(
161 Token::kSUB, Integer::Handle(zone, Integer::New(1)))));
162 args.SetAt(3, Symbols::Length());
164}
165
166DEFINE_RUNTIME_ENTRY(WriteError, 2) {
167 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(0));
168 const Smi& kind = Smi::CheckedHandle(zone, arguments.ArgAt(1));
169 auto& message = String::Handle(zone);
170 switch (kind.Value()) {
171 case 0: // CheckWritableInstr::Kind::kWriteUnmodifiableTypedData:
172 message = String::NewFormatted("Cannot modify an unmodifiable list: %s",
173 receiver.ToCString());
174 break;
175 case 1: // CheckWritableInstr::Kind::kDeeplyImmutableAttachNativeFinalizer:
177 "Cannot attach NativeFinalizer to deeply immutable object: %s",
178 receiver.ToCString());
179 break;
180 }
181 const Array& args = Array::Handle(Array::New(1));
182 args.SetAt(0, message);
184}
185
186static void NullErrorHelper(Zone* zone,
187 const String& selector,
188 bool is_param_name = false) {
189 if (is_param_name) {
190 const String& error = String::Handle(
191 selector.IsNull()
192 ? String::New("argument value is null")
193 : String::NewFormatted("argument value for '%s' is null",
194 selector.ToCString()));
196 return;
197 }
198
199 // If the selector is null, this must be a null check that wasn't due to a
200 // method invocation, so was due to the null check operator.
201 if (selector.IsNull()) {
202 const Array& args = Array::Handle(zone, Array::New(4));
203 args.SetAt(
205 zone, String::New("Null check operator used on a null value")));
207 return;
208 }
209
211 if (Field::IsGetterName(selector)) {
213 } else if (Field::IsSetterName(selector)) {
215 }
216
217 const Smi& invocation_type = Smi::Handle(
218 zone,
220
221 const Array& args = Array::Handle(zone, Array::New(7));
222 args.SetAt(0, /* instance */ Object::null_object());
223 args.SetAt(1, selector);
224 args.SetAt(2, invocation_type);
225 args.SetAt(3, /* func_type_args_length */ Object::smi_zero());
226 args.SetAt(4, /* func_type_args */ Object::null_object());
227 args.SetAt(5, /* func_args */ Object::null_object());
228 args.SetAt(6, /* func_arg_names */ Object::null_object());
230}
231
232static void DoThrowNullError(Isolate* isolate,
233 Thread* thread,
234 Zone* zone,
235 bool is_param) {
236 DartFrameIterator iterator(thread,
238 const StackFrame* caller_frame = iterator.NextFrame();
239 ASSERT(caller_frame->IsDartFrame());
240 const Code& code = Code::Handle(zone, caller_frame->LookupDartCode());
241 const uword pc_offset = caller_frame->pc() - code.PayloadStart();
242
243 if (FLAG_shared_slow_path_triggers_gc) {
245 }
246
247 const CodeSourceMap& map =
248 CodeSourceMap::Handle(zone, code.code_source_map());
249 String& member_name = String::Handle(zone);
250 if (!map.IsNull()) {
251 CodeSourceMapReader reader(map, Array::null_array(),
252 Function::null_function());
253 const intptr_t name_index = reader.GetNullCheckNameIndexAt(pc_offset);
254 RELEASE_ASSERT(name_index >= 0);
255
256 const ObjectPool& pool = ObjectPool::Handle(zone, code.GetObjectPool());
257 member_name ^= pool.ObjectAt(name_index);
258 } else {
259 member_name = Symbols::OptimizedOut().ptr();
260 }
261
262 NullErrorHelper(zone, member_name, is_param);
263}
264
265DEFINE_RUNTIME_ENTRY(NullError, 0) {
266 DoThrowNullError(isolate, thread, zone, /*is_param=*/false);
267}
268
269// Collects information about pointers within the top |kMaxSlotsCollected|
270// slots on the stack.
271// TODO(b/179632636) This code is added in attempt to better understand
272// b/179632636 and should be removed in the future.
274 StackFrame* caller_frame,
275 Thread* thread) {
276 TextBuffer buffer(512);
277 buffer.Printf("hit null error with cid %" Pd ", caller context: ", cid);
278
279 const intptr_t kMaxSlotsCollected = 5;
280 const auto slots = reinterpret_cast<ObjectPtr*>(caller_frame->sp());
281 const intptr_t num_slots_in_frame =
282 reinterpret_cast<ObjectPtr*>(caller_frame->fp()) - slots;
283 const auto num_slots_to_collect =
284 Utils::Maximum(kMaxSlotsCollected, num_slots_in_frame);
285 bool comma = false;
286 for (intptr_t i = 0; i < num_slots_to_collect; i++) {
287 const ObjectPtr ptr = slots[i];
288 buffer.Printf("%s[sp+%" Pd "] %" Pp "", comma ? ", " : "", i,
289 static_cast<uword>(ptr));
290 if (ptr->IsHeapObject() &&
293 thread->heap()->Contains(UntaggedObject::ToAddr(ptr)))) {
294 buffer.Printf("(%" Pp ")", static_cast<uword>(ptr->untag()->tags_));
295 }
296 comma = true;
297 }
298
299 const char* message = buffer.buffer();
300 FATAL("%s", message);
301}
302
303DEFINE_RUNTIME_ENTRY(DispatchTableNullError, 1) {
304 const Smi& cid = Smi::CheckedHandle(zone, arguments.ArgAt(0));
305 if (cid.Value() != kNullCid) {
306 // We hit null error, but receiver is not null itself. This most likely
307 // is a memory corruption. Crash the VM but provide some additional
308 // information about the arguments on the stack.
309 DartFrameIterator iterator(thread,
311 StackFrame* caller_frame = iterator.NextFrame();
312 RELEASE_ASSERT(caller_frame->IsDartFrame());
313 ReportImpossibleNullError(cid.Value(), caller_frame, thread);
314 }
315 DoThrowNullError(isolate, thread, zone, /*is_param=*/false);
316}
317
318DEFINE_RUNTIME_ENTRY(NullErrorWithSelector, 1) {
319 const String& selector = String::CheckedHandle(zone, arguments.ArgAt(0));
320 NullErrorHelper(zone, selector);
321}
322
323DEFINE_RUNTIME_ENTRY(NullCastError, 0) {
324 NullErrorHelper(zone, String::null_string());
325}
326
327DEFINE_RUNTIME_ENTRY(ArgumentNullError, 0) {
328 DoThrowNullError(isolate, thread, zone, /*is_param=*/true);
329}
330
331DEFINE_RUNTIME_ENTRY(ArgumentError, 1) {
332 const Instance& value = Instance::CheckedHandle(zone, arguments.ArgAt(0));
334}
335
336DEFINE_RUNTIME_ENTRY(ArgumentErrorUnboxedInt64, 0) {
337 // Unboxed value is passed through a dedicated slot in Thread.
338 int64_t unboxed_value = arguments.thread()->unboxed_int64_runtime_arg();
339 const Integer& value = Integer::Handle(zone, Integer::New(unboxed_value));
341}
342
344 // Unboxed value is passed through a dedicated slot in Thread.
345 double val = arguments.thread()->unboxed_double_runtime_arg();
346 const Smi& recognized_kind = Smi::CheckedHandle(zone, arguments.ArgAt(0));
347 switch (recognized_kind.Value()) {
348 case MethodRecognizer::kDoubleToInteger:
349 break;
350 case MethodRecognizer::kDoubleFloorToInt:
351 val = floor(val);
352 break;
353 case MethodRecognizer::kDoubleCeilToInt:
354 val = ceil(val);
355 break;
356 default:
357 UNREACHABLE();
358 }
359 arguments.SetReturn(Integer::Handle(zone, DoubleToInteger(zone, val)));
360}
361
366
368 return UNLIKELY(FLAG_runtime_allocate_old) ? Heap::kOld : Heap::kNew;
369}
370
371static void RuntimeAllocationEpilogue(Thread* thread) {
372 if (UNLIKELY(FLAG_runtime_allocate_spill_tlab)) {
373 static RelaxedAtomic<uword> count = 0;
374 if ((count++ % 10) == 0) {
375 thread->heap()->new_space()->AbandonRemainingTLAB(thread);
376 }
377 }
378}
379
380// Allocation of a fixed length array of given element type.
381// This runtime entry is never called for allocating a List of a generic type,
382// because a prior run time call instantiates the element type if necessary.
383// Arg0: array length.
384// Arg1: array type arguments, i.e. vector of 1 type, the element type.
385// Return value: newly allocated array of length arg0.
386DEFINE_RUNTIME_ENTRY(AllocateArray, 2) {
387 const Instance& length = Instance::CheckedHandle(zone, arguments.ArgAt(0));
388 if (!length.IsInteger()) {
389 // Throw: new ArgumentError.value(length, "length", "is not an integer");
390 const Array& args = Array::Handle(zone, Array::New(3));
391 args.SetAt(0, length);
392 args.SetAt(1, Symbols::Length());
393 args.SetAt(2, String::Handle(zone, String::New("is not an integer")));
395 }
396 const int64_t len = Integer::Cast(length).AsInt64Value();
397 if (len < 0) {
398 // Throw: new RangeError.range(length, 0, Array::kMaxElements, "length");
399 Exceptions::ThrowRangeError("length", Integer::Cast(length), 0,
401 }
402 if (len > Array::kMaxElements) {
404 }
405
406 const Array& array = Array::Handle(
407 zone,
408 Array::New(static_cast<intptr_t>(len), SpaceForRuntimeAllocation()));
409 TypeArguments& element_type =
410 TypeArguments::CheckedHandle(zone, arguments.ArgAt(1));
411 // An Array is raw or takes one type argument. However, its type argument
412 // vector may be longer than 1 due to a type optimization reusing the type
413 // argument vector of the instantiator.
414 ASSERT(element_type.IsNull() ||
415 (element_type.Length() >= 1 && element_type.IsInstantiated()));
416 array.SetTypeArguments(element_type); // May be null.
417 arguments.SetReturn(array);
419}
420
422 if (FLAG_shared_slow_path_triggers_gc) {
423 isolate->group()->heap()->CollectAllGarbage(GCReason::kDebugging);
424 }
425 arguments.SetReturn(
428}
429
431 const double val = thread->unboxed_double_runtime_arg();
432 arguments.SetReturn(
435}
436
438 const auto val = thread->unboxed_simd128_runtime_arg();
439 arguments.SetReturn(
442}
443
445 const auto val = thread->unboxed_simd128_runtime_arg();
446 arguments.SetReturn(
449}
450
452 if (FLAG_shared_slow_path_triggers_gc) {
453 isolate->group()->heap()->CollectAllGarbage(GCReason::kDebugging);
454 }
455 arguments.SetReturn(Object::Handle(
458}
459
461 if (FLAG_shared_slow_path_triggers_gc) {
462 isolate->group()->heap()->CollectAllGarbage(GCReason::kDebugging);
463 }
464 arguments.SetReturn(Object::Handle(
465 zone, Float32x4::New(0.0, 0.0, 0.0, 0.0, SpaceForRuntimeAllocation())));
467}
468
470 if (FLAG_shared_slow_path_triggers_gc) {
471 isolate->group()->heap()->CollectAllGarbage(GCReason::kDebugging);
472 }
473 arguments.SetReturn(Object::Handle(
474 zone, Float64x2::New(0.0, 0.0, SpaceForRuntimeAllocation())));
476}
477
479 if (FLAG_shared_slow_path_triggers_gc) {
480 isolate->group()->heap()->CollectAllGarbage(GCReason::kDebugging);
481 }
482 arguments.SetReturn(Object::Handle(
483 zone, Int32x4::New(0, 0, 0, 0, SpaceForRuntimeAllocation())));
485}
486
487// Allocate typed data array of given class id and length.
488// Arg0: class id.
489// Arg1: number of elements.
490// Return value: newly allocated typed data array.
491DEFINE_RUNTIME_ENTRY(AllocateTypedData, 2) {
492 const intptr_t cid = Smi::CheckedHandle(zone, arguments.ArgAt(0)).Value();
493 const auto& length = Instance::CheckedHandle(zone, arguments.ArgAt(1));
494 if (!length.IsInteger()) {
495 const Array& args = Array::Handle(zone, Array::New(1));
496 args.SetAt(0, length);
498 }
499 const int64_t len = Integer::Cast(length).AsInt64Value();
500 const intptr_t max = TypedData::MaxElements(cid);
501 if (len < 0) {
502 Exceptions::ThrowRangeError("length", Integer::Cast(length), 0, max);
503 } else if (len > max) {
505 }
506 const auto& typed_data =
507 TypedData::Handle(zone, TypedData::New(cid, static_cast<intptr_t>(len),
509 arguments.SetReturn(typed_data);
511}
512
513// Helper returning the token position of the Dart caller.
517 StackFrame* caller_frame = iterator.NextFrame();
518 ASSERT(caller_frame != nullptr);
519 return caller_frame->GetTokenPos();
520}
521
522// Result of an invoke may be an unhandled exception, in which case we
523// rethrow it.
524static void ThrowIfError(const Object& result) {
525 if (!result.IsNull() && result.IsError()) {
527 }
528}
529
530// Allocate a new object.
531// Arg0: class of the object that needs to be allocated.
532// Arg1: type arguments of the object that needs to be allocated.
533// Return value: newly allocated object.
535 const Class& cls = Class::CheckedHandle(zone, arguments.ArgAt(0));
539 if (cls.NumTypeArguments() == 0) {
540 // No type arguments required for a non-parameterized type.
541 ASSERT(Instance::CheckedHandle(zone, arguments.ArgAt(1)).IsNull());
542 } else {
543 const auto& type_arguments =
544 TypeArguments::CheckedHandle(zone, arguments.ArgAt(1));
545 // Unless null (for a raw type), the type argument vector may be longer than
546 // necessary due to a type optimization reusing the type argument vector of
547 // the instantiator.
548 ASSERT(type_arguments.IsNull() ||
549 (type_arguments.IsInstantiated() &&
550 (type_arguments.Length() >= cls.NumTypeArguments())));
551 instance.SetTypeArguments(type_arguments);
552 }
553 arguments.SetReturn(instance);
555}
556
558 EnsureRememberedAndMarkingDeferred,
559 2,
560 uword /*ObjectPtr*/ object_in,
561 Thread* thread) {
562 ObjectPtr object = static_cast<ObjectPtr>(object_in);
563
564 // If we eliminate a generational write barriers on allocations of an object
565 // we need to ensure it's either a new-space object or it has been added to
566 // the remembered set.
567 //
568 // NOTE: We use static_cast<>() instead of ::RawCast() to avoid handle
569 // allocations in debug mode. Handle allocations in leaf runtimes can cause
570 // memory leaks because they will allocate into a handle scope from the next
571 // outermost runtime code (to which the generated Dart code might not return
572 // in a long time).
573 bool add_to_remembered_set = true;
574 if (object->IsNewObject()) {
575 add_to_remembered_set = false;
576 } else if (object->IsArray()) {
577 const intptr_t length = Array::LengthOf(static_cast<ArrayPtr>(object));
578 add_to_remembered_set =
580 } else if (object->IsContext()) {
581 const intptr_t num_context_variables =
582 Context::NumVariables(static_cast<ContextPtr>(object));
583 add_to_remembered_set =
585 num_context_variables);
586 }
587
588 if (add_to_remembered_set) {
589 object->untag()->EnsureInRememberedSet(thread);
590 }
591
592 // For incremental write barrier elimination, we need to ensure that the
593 // allocation ends up in the new space or else the object needs to added
594 // to deferred marking stack so it will be [re]scanned.
595 if (thread->is_marking()) {
596 thread->DeferredMarkingStackAddObject(object);
597 }
598
599 return static_cast<uword>(object);
600}
602
603// Instantiate type.
604// Arg0: uninstantiated type.
605// Arg1: instantiator type arguments.
606// Arg2: function type arguments.
607// Return value: instantiated type.
609 AbstractType& type = AbstractType::CheckedHandle(zone, arguments.ArgAt(0));
610 const TypeArguments& instantiator_type_arguments =
611 TypeArguments::CheckedHandle(zone, arguments.ArgAt(1));
612 const TypeArguments& function_type_arguments =
613 TypeArguments::CheckedHandle(zone, arguments.ArgAt(2));
614 ASSERT(!type.IsNull());
615 ASSERT(instantiator_type_arguments.IsNull() ||
616 instantiator_type_arguments.IsInstantiated());
617 ASSERT(function_type_arguments.IsNull() ||
618 function_type_arguments.IsInstantiated());
619 type = type.InstantiateFrom(instantiator_type_arguments,
620 function_type_arguments, kAllFree, Heap::kOld);
621 ASSERT(!type.IsNull() && type.IsInstantiated());
622 arguments.SetReturn(type);
623}
624
625// Instantiate type arguments.
626// Arg0: uninstantiated type arguments.
627// Arg1: instantiator type arguments.
628// Arg2: function type arguments.
629// Return value: instantiated type arguments.
630DEFINE_RUNTIME_ENTRY(InstantiateTypeArguments, 3) {
631 TypeArguments& type_arguments =
632 TypeArguments::CheckedHandle(zone, arguments.ArgAt(0));
633 const TypeArguments& instantiator_type_arguments =
634 TypeArguments::CheckedHandle(zone, arguments.ArgAt(1));
635 const TypeArguments& function_type_arguments =
636 TypeArguments::CheckedHandle(zone, arguments.ArgAt(2));
637 ASSERT(!type_arguments.IsNull() && !type_arguments.IsInstantiated());
638 ASSERT(instantiator_type_arguments.IsNull() ||
639 instantiator_type_arguments.IsInstantiated());
640 ASSERT(function_type_arguments.IsNull() ||
641 function_type_arguments.IsInstantiated());
642 // Code inlined in the caller should have optimized the case where the
643 // instantiator can be reused as type argument vector.
644 ASSERT(!type_arguments.IsUninstantiatedIdentity());
645 type_arguments = type_arguments.InstantiateAndCanonicalizeFrom(
646 instantiator_type_arguments, function_type_arguments);
647 ASSERT(type_arguments.IsNull() || type_arguments.IsInstantiated());
648 arguments.SetReturn(type_arguments);
649}
650
651// Helper routine for tracing a subtype check.
652static void PrintSubtypeCheck(const AbstractType& subtype,
653 const AbstractType& supertype,
654 const bool result) {
657 StackFrame* caller_frame = iterator.NextFrame();
658 ASSERT(caller_frame != nullptr);
659
660 LogBlock lb;
661 THR_Print("SubtypeCheck: '%s' %d %s '%s' %d (pc: %#" Px ").\n",
662 subtype.NameCString(), subtype.type_class_id(),
663 result ? "is" : "is !", supertype.NameCString(),
664 supertype.type_class_id(), caller_frame->pc());
665
666 const Function& function =
667 Function::Handle(caller_frame->LookupDartFunction());
668 if (function.HasSavedArgumentsDescriptor()) {
669 const auto& args_desc_array = Array::Handle(function.saved_args_desc());
670 const ArgumentsDescriptor args_desc(args_desc_array);
671 THR_Print(" -> Function %s [%s]\n", function.ToFullyQualifiedCString(),
672 args_desc.ToCString());
673 } else {
674 THR_Print(" -> Function %s\n", function.ToFullyQualifiedCString());
675 }
676}
677
678// Instantiate type.
679// Arg0: instantiator type arguments
680// Arg1: function type arguments
681// Arg2: type to be a subtype of the other
682// Arg3: type to be a supertype of the other
683// Arg4: variable name of the subtype parameter
684// No return value.
685DEFINE_RUNTIME_ENTRY(SubtypeCheck, 5) {
686 const TypeArguments& instantiator_type_args =
687 TypeArguments::CheckedHandle(zone, arguments.ArgAt(0));
688 const TypeArguments& function_type_args =
689 TypeArguments::CheckedHandle(zone, arguments.ArgAt(1));
690 AbstractType& subtype = AbstractType::CheckedHandle(zone, arguments.ArgAt(2));
691 AbstractType& supertype =
692 AbstractType::CheckedHandle(zone, arguments.ArgAt(3));
693 const String& dst_name = String::CheckedHandle(zone, arguments.ArgAt(4));
694
695 ASSERT(!supertype.IsNull());
696 ASSERT(!subtype.IsNull());
697
698 // Now that AssertSubtype may be checking types only available at runtime,
699 // we can't guarantee the supertype isn't the top type.
700 if (supertype.IsTopTypeForSubtyping()) return;
701
702 // The supertype or subtype may not be instantiated.
704 &subtype, &supertype, instantiator_type_args, function_type_args)) {
705 if (FLAG_trace_type_checks) {
706 // The supertype and subtype are now instantiated. Subtype check passed.
707 PrintSubtypeCheck(subtype, supertype, true);
708 }
709 return;
710 }
711 if (FLAG_trace_type_checks) {
712 // The supertype and subtype are now instantiated. Subtype check failed.
713 PrintSubtypeCheck(subtype, supertype, false);
714 }
715
716 // Throw a dynamic type error.
717 const TokenPosition location = GetCallerLocation();
718 Exceptions::CreateAndThrowTypeError(location, subtype, supertype, dst_name);
719 UNREACHABLE();
720}
721
722// Allocate a new closure and initializes its function, context,
723// instantiator type arguments and delayed type arguments fields.
724// Arg0: function.
725// Arg1: context.
726// Arg2: instantiator type arguments.
727// Arg3: delayed type arguments.
728// Return value: newly allocated closure.
729DEFINE_RUNTIME_ENTRY(AllocateClosure, 4) {
730 const auto& function = Function::CheckedHandle(zone, arguments.ArgAt(0));
731 const auto& context = Object::Handle(zone, arguments.ArgAt(1));
732 const auto& instantiator_type_args =
733 TypeArguments::CheckedHandle(zone, arguments.ArgAt(2));
734 const auto& delayed_type_args =
735 TypeArguments::CheckedHandle(zone, arguments.ArgAt(3));
736 const Closure& closure = Closure::Handle(
737 zone, Closure::New(instantiator_type_args, Object::null_type_arguments(),
738 delayed_type_args, function, context,
740 arguments.SetReturn(closure);
742}
743
744// Allocate a new context large enough to hold the given number of variables.
745// Arg0: number of variables.
746// Return value: newly allocated context.
747DEFINE_RUNTIME_ENTRY(AllocateContext, 1) {
748 const Smi& num_variables = Smi::CheckedHandle(zone, arguments.ArgAt(0));
749 const Context& context = Context::Handle(
750 zone, Context::New(num_variables.Value(), SpaceForRuntimeAllocation()));
751 arguments.SetReturn(context);
753}
754
755// Make a copy of the given context, including the values of the captured
756// variables.
757// Arg0: the context to be cloned.
758// Return value: newly allocated context.
759DEFINE_RUNTIME_ENTRY(CloneContext, 1) {
760 const Context& ctx = Context::CheckedHandle(zone, arguments.ArgAt(0));
761 Context& cloned_ctx = Context::Handle(
763 cloned_ctx.set_parent(Context::Handle(zone, ctx.parent()));
764 Object& inst = Object::Handle(zone);
765 for (int i = 0; i < ctx.num_variables(); i++) {
766 inst = ctx.At(i);
767 cloned_ctx.SetAt(i, inst);
768 }
769 arguments.SetReturn(cloned_ctx);
771}
772
773// Allocate a new record instance.
774// Arg0: record shape id.
775// Return value: newly allocated record.
776DEFINE_RUNTIME_ENTRY(AllocateRecord, 1) {
777 const RecordShape shape(Smi::RawCast(arguments.ArgAt(0)));
778 const Record& record =
780 arguments.SetReturn(record);
782}
783
784// Allocate a new small record instance and initialize its fields.
785// Arg0: record shape id.
786// Arg1-Arg3: field values.
787// Return value: newly allocated record.
788DEFINE_RUNTIME_ENTRY(AllocateSmallRecord, 4) {
789 const RecordShape shape(Smi::RawCast(arguments.ArgAt(0)));
790 const auto& value0 = Instance::CheckedHandle(zone, arguments.ArgAt(1));
791 const auto& value1 = Instance::CheckedHandle(zone, arguments.ArgAt(2));
792 const auto& value2 = Instance::CheckedHandle(zone, arguments.ArgAt(3));
793 const Record& record =
795 const intptr_t num_fields = shape.num_fields();
796 ASSERT(num_fields == 2 || num_fields == 3);
797 record.SetFieldAt(0, value0);
798 record.SetFieldAt(1, value1);
799 if (num_fields > 2) {
800 record.SetFieldAt(2, value2);
801 }
802 arguments.SetReturn(record);
804}
805
806// Allocate a SuspendState object.
807// Arg0: frame size.
808// Arg1: existing SuspendState object or function data.
809// Return value: newly allocated object.
810DEFINE_RUNTIME_ENTRY(AllocateSuspendState, 2) {
811 const intptr_t frame_size =
812 Smi::CheckedHandle(zone, arguments.ArgAt(0)).Value();
813 const Object& previous_state = Object::Handle(zone, arguments.ArgAt(1));
815 if (previous_state.IsSuspendState()) {
816 const auto& suspend_state = SuspendState::Cast(previous_state);
817 const auto& function_data =
818 Instance::Handle(zone, suspend_state.function_data());
819 ObjectStore* object_store = thread->isolate_group()->object_store();
820 if (function_data.GetClassId() ==
821 Class::Handle(zone, object_store->async_star_stream_controller())
822 .id()) {
823 // Reset _AsyncStarStreamController.asyncStarBody to null in order
824 // to create a new callback closure during next yield.
825 // The new callback closure will capture the reallocated SuspendState.
826 function_data.SetField(
828 zone,
829 object_store->async_star_stream_controller_async_star_body()),
830 Object::null_object());
831 }
832 result = SuspendState::New(frame_size, function_data,
834 if (function_data.GetClassId() ==
835 Class::Handle(zone, object_store->sync_star_iterator_class()).id()) {
836 // Refresh _SyncStarIterator._state with the new SuspendState object.
837 function_data.SetField(
838 Field::Handle(zone, object_store->sync_star_iterator_state()),
839 result);
840 }
841 } else {
842 result = SuspendState::New(frame_size, Instance::Cast(previous_state),
844 }
845 arguments.SetReturn(result);
847}
848
849// Makes a copy of the given SuspendState object, including the payload frame.
850// Arg0: the SuspendState object to be cloned.
851// Return value: newly allocated object.
852DEFINE_RUNTIME_ENTRY(CloneSuspendState, 1) {
853 const SuspendState& src =
854 SuspendState::CheckedHandle(zone, arguments.ArgAt(0));
856 zone, SuspendState::Clone(thread, src, SpaceForRuntimeAllocation()));
857 arguments.SetReturn(dst);
859}
860
861// Helper routine for tracing a type check.
862static void PrintTypeCheck(const char* message,
863 const Instance& instance,
864 const AbstractType& type,
865 const TypeArguments& instantiator_type_arguments,
866 const TypeArguments& function_type_arguments,
867 const Bool& result) {
870 StackFrame* caller_frame = iterator.NextFrame();
871 ASSERT(caller_frame != nullptr);
872
873 const AbstractType& instance_type =
875 ASSERT(instance_type.IsInstantiated() ||
876 (instance.IsClosure() && instance_type.IsInstantiated(kCurrentClass)));
877 LogBlock lb;
878 if (type.IsInstantiated()) {
879 THR_Print("%s: '%s' %d %s '%s' %d (pc: %#" Px ").\n", message,
880 instance_type.NameCString(), instance_type.type_class_id(),
881 (result.ptr() == Bool::True().ptr()) ? "is" : "is !",
882 type.NameCString(), type.type_class_id(), caller_frame->pc());
883 } else {
884 // Instantiate type before printing.
885 const AbstractType& instantiated_type = AbstractType::Handle(
886 type.InstantiateFrom(instantiator_type_arguments,
887 function_type_arguments, kAllFree, Heap::kOld));
888 THR_Print("%s: '%s' %s '%s' instantiated from '%s' (pc: %#" Px ").\n",
889 message, instance_type.NameCString(),
890 (result.ptr() == Bool::True().ptr()) ? "is" : "is !",
891 instantiated_type.NameCString(), type.NameCString(),
892 caller_frame->pc());
893 }
894 const Function& function =
895 Function::Handle(caller_frame->LookupDartFunction());
896 if (function.HasSavedArgumentsDescriptor()) {
897 const auto& args_desc_array = Array::Handle(function.saved_args_desc());
898 const ArgumentsDescriptor args_desc(args_desc_array);
899 THR_Print(" -> Function %s [%s]\n", function.ToFullyQualifiedCString(),
900 args_desc.ToCString());
901 } else {
902 THR_Print(" -> Function %s\n", function.ToFullyQualifiedCString());
903 }
904}
905
906#if defined(TARGET_ARCH_IA32)
907static BoolPtr CheckHashBasedSubtypeTestCache(
908 Zone* zone,
909 Thread* thread,
910 const Instance& instance,
911 const AbstractType& destination_type,
912 const TypeArguments& instantiator_type_arguments,
913 const TypeArguments& function_type_arguments,
914 const SubtypeTestCache& cache) {
915 ASSERT(cache.IsHash());
916 // Record instances are not added to the cache as they don't have a valid
917 // key (type of a record depends on types of all its fields).
918 if (instance.IsRecord()) return Bool::null();
919 Class& instance_class = Class::Handle(zone);
920 if (instance.IsSmi()) {
921 instance_class = Smi::Class();
922 } else {
923 instance_class = instance.clazz();
924 }
925 // If the type is uninstantiated and refers to parent function type
926 // parameters, the function_type_arguments have been canonicalized
927 // when concatenated.
928 auto& instance_class_id_or_signature = Object::Handle(zone);
929 auto& instance_type_arguments = TypeArguments::Handle(zone);
930 auto& instance_parent_function_type_arguments = TypeArguments::Handle(zone);
931 auto& instance_delayed_type_arguments = TypeArguments::Handle(zone);
932 if (instance_class.IsClosureClass()) {
933 const auto& closure = Closure::Cast(instance);
934 const auto& function = Function::Handle(zone, closure.function());
935 instance_class_id_or_signature = function.signature();
936 instance_type_arguments = closure.instantiator_type_arguments();
937 instance_parent_function_type_arguments = closure.function_type_arguments();
938 instance_delayed_type_arguments = closure.delayed_type_arguments();
939 } else {
940 instance_class_id_or_signature = Smi::New(instance_class.id());
941 if (instance_class.NumTypeArguments() > 0) {
942 instance_type_arguments = instance.GetTypeArguments();
943 }
944 }
945
946 intptr_t index = -1;
947 auto& result = Bool::Handle(zone);
948 if (cache.HasCheck(instance_class_id_or_signature, destination_type,
949 instance_type_arguments, instantiator_type_arguments,
950 function_type_arguments,
951 instance_parent_function_type_arguments,
952 instance_delayed_type_arguments, &index, &result)) {
953 return result.ptr();
954 }
955
956 return Bool::null();
957}
958#endif // defined(TARGET_ARCH_IA32)
959
960// This updates the type test cache, an array containing 8 elements:
961// - instance class (or function if the instance is a closure)
962// - instance type arguments (null if the instance class is not generic)
963// - instantiator type arguments (null if the type is instantiated)
964// - function type arguments (null if the type is instantiated)
965// - instance parent function type arguments (null if instance is not a closure)
966// - instance delayed type arguments (null if instance is not a closure)
967// - destination type (null if the type was known at compile time)
968// - test result
969// It can be applied to classes with type arguments in which case it contains
970// just the result of the class subtype test, not including the evaluation of
971// type arguments.
972// This operation is currently very slow (lookup of code is not efficient yet).
974 Zone* zone,
975 Thread* thread,
976 const Instance& instance,
977 const AbstractType& destination_type,
978 const TypeArguments& instantiator_type_arguments,
979 const TypeArguments& function_type_arguments,
980 const Bool& result,
981 const SubtypeTestCache& new_cache) {
982 ASSERT(!new_cache.IsNull());
983 ASSERT(destination_type.IsCanonical());
984 ASSERT(instantiator_type_arguments.IsCanonical());
985 ASSERT(function_type_arguments.IsCanonical());
986 if (instance.IsRecord()) {
987 // Do not add record instances to cache as they don't have a valid
988 // key (type of a record depends on types of all its fields).
989 if (FLAG_trace_type_checks) {
990 THR_Print("Not updating subtype test cache for the record instance.\n");
991 }
992 return;
993 }
994 Class& instance_class = Class::Handle(zone);
995 if (instance.IsSmi()) {
996 instance_class = Smi::Class();
997 } else {
998 instance_class = instance.clazz();
999 }
1000 // If the type is uninstantiated and refers to parent function type
1001 // parameters, the function_type_arguments have been canonicalized
1002 // when concatenated.
1003 auto& instance_class_id_or_signature = Object::Handle(zone);
1004 auto& instance_type_arguments = TypeArguments::Handle(zone);
1005 auto& instance_parent_function_type_arguments = TypeArguments::Handle(zone);
1006 auto& instance_delayed_type_arguments = TypeArguments::Handle(zone);
1007 if (instance_class.IsClosureClass()) {
1008 const auto& closure = Closure::Cast(instance);
1009 const auto& function = Function::Handle(zone, closure.function());
1010 instance_class_id_or_signature = function.signature();
1011 ASSERT(instance_class_id_or_signature.IsFunctionType());
1012 instance_type_arguments = closure.instantiator_type_arguments();
1013 instance_parent_function_type_arguments = closure.function_type_arguments();
1014 instance_delayed_type_arguments = closure.delayed_type_arguments();
1015 ASSERT(instance_class_id_or_signature.IsCanonical());
1016 ASSERT(instance_type_arguments.IsCanonical());
1017 ASSERT(instance_parent_function_type_arguments.IsCanonical());
1018 ASSERT(instance_delayed_type_arguments.IsCanonical());
1019 } else {
1020 instance_class_id_or_signature = Smi::New(instance_class.id());
1021 if (instance_class.NumTypeArguments() > 0) {
1022 instance_type_arguments = instance.GetTypeArguments();
1023 ASSERT(instance_type_arguments.IsCanonical());
1024 }
1025 }
1026 if (FLAG_trace_type_checks) {
1027 const auto& instance_class_name =
1028 String::Handle(zone, instance_class.Name());
1029 TextBuffer buffer(256);
1030 buffer.Printf(" Updating test cache %#" Px " with result %s for:\n",
1031 static_cast<uword>(new_cache.ptr()), result.ToCString());
1032 if (instance.IsString()) {
1033 buffer.Printf(" instance: '%s'\n", instance.ToCString());
1034 } else {
1035 buffer.Printf(" instance: %s\n", instance.ToCString());
1036 }
1037 buffer.Printf(" class: %s (%" Pd ")\n", instance_class_name.ToCString(),
1038 instance_class.id());
1039 buffer.Printf(
1040 " raw entry: [ %#" Px ", %#" Px ", %#" Px ", %#" Px ", %#" Px
1041 ", %#" Px ", %#" Px ", %#" Px " ]\n",
1042 static_cast<uword>(instance_class_id_or_signature.ptr()),
1043 static_cast<uword>(instance_type_arguments.ptr()),
1044 static_cast<uword>(instantiator_type_arguments.ptr()),
1045 static_cast<uword>(function_type_arguments.ptr()),
1046 static_cast<uword>(instance_parent_function_type_arguments.ptr()),
1047 static_cast<uword>(instance_delayed_type_arguments.ptr()),
1048 static_cast<uword>(destination_type.ptr()),
1049 static_cast<uword>(result.ptr()));
1050 THR_Print("%s", buffer.buffer());
1051 }
1052 {
1055 const intptr_t len = new_cache.NumberOfChecks();
1056 if (len >= FLAG_max_subtype_cache_entries) {
1057 if (FLAG_trace_type_checks) {
1058 THR_Print("Not updating subtype test cache as its length reached %d\n",
1059 FLAG_max_subtype_cache_entries);
1060 }
1061 return;
1062 }
1063 intptr_t colliding_index = -1;
1064 auto& old_result = Bool::Handle(zone);
1065 if (new_cache.HasCheck(
1066 instance_class_id_or_signature, destination_type,
1067 instance_type_arguments, instantiator_type_arguments,
1068 function_type_arguments, instance_parent_function_type_arguments,
1069 instance_delayed_type_arguments, &colliding_index, &old_result)) {
1070 if (FLAG_trace_type_checks) {
1071 TextBuffer buffer(256);
1072 buffer.Printf(" Collision for test cache %#" Px " at index %" Pd ":\n",
1073 static_cast<uword>(new_cache.ptr()), colliding_index);
1074 buffer.Printf(" entry: ");
1075 new_cache.WriteEntryToBuffer(zone, &buffer, colliding_index, " ");
1076 THR_Print("%s\n", buffer.buffer());
1077 }
1078 if (old_result.ptr() != result.ptr()) {
1079 FATAL("Existing subtype test cache entry has result %s, not %s",
1080 old_result.ToCString(), result.ToCString());
1081 }
1082 // Some other isolate might have updated the cache between entry was
1083 // found missing and now.
1084 return;
1085 }
1086 const intptr_t new_index = new_cache.AddCheck(
1087 instance_class_id_or_signature, destination_type,
1088 instance_type_arguments, instantiator_type_arguments,
1089 function_type_arguments, instance_parent_function_type_arguments,
1090 instance_delayed_type_arguments, result);
1091 if (FLAG_trace_type_checks) {
1092 TextBuffer buffer(256);
1093 buffer.Printf(" Added new entry to test cache %#" Px " at index %" Pd
1094 ":\n",
1095 static_cast<uword>(new_cache.ptr()), new_index);
1096 buffer.Printf(" new entry: ");
1097 new_cache.WriteEntryToBuffer(zone, &buffer, new_index, " ");
1098 THR_Print("%s\n", buffer.buffer());
1099 }
1100 }
1101}
1102
1103// Check that the given instance is an instance of the given type.
1104// Tested instance may be null, because a null test cannot always be inlined,
1105// e.g 'null is T' yields true if T = Null, but false if T = bool.
1106// Arg0: instance being checked.
1107// Arg1: type.
1108// Arg2: type arguments of the instantiator of the type.
1109// Arg3: type arguments of the function of the type.
1110// Arg4: SubtypeTestCache.
1111// Return value: true or false.
1112DEFINE_RUNTIME_ENTRY(Instanceof, 5) {
1113 const Instance& instance = Instance::CheckedHandle(zone, arguments.ArgAt(0));
1114 const AbstractType& type =
1115 AbstractType::CheckedHandle(zone, arguments.ArgAt(1));
1116 const TypeArguments& instantiator_type_arguments =
1117 TypeArguments::CheckedHandle(zone, arguments.ArgAt(2));
1118 const TypeArguments& function_type_arguments =
1119 TypeArguments::CheckedHandle(zone, arguments.ArgAt(3));
1120 const SubtypeTestCache& cache =
1121 SubtypeTestCache::CheckedHandle(zone, arguments.ArgAt(4));
1122 ASSERT(type.IsFinalized());
1123 ASSERT(!type.IsDynamicType()); // No need to check assignment.
1124 ASSERT(!cache.IsNull());
1125#if defined(TARGET_ARCH_IA32)
1126 // Hash-based caches are still not handled by the stubs on IA32.
1127 if (cache.IsHash()) {
1128 const auto& result = Bool::Handle(
1129 zone, CheckHashBasedSubtypeTestCache(zone, thread, instance, type,
1130 instantiator_type_arguments,
1131 function_type_arguments, cache));
1132 if (!result.IsNull()) {
1133 // Early exit because an entry already exists in the cache.
1134 arguments.SetReturn(result);
1135 return;
1136 }
1137 }
1138#endif // defined(TARGET_ARCH_IA32)
1139 const Bool& result = Bool::Get(instance.IsInstanceOf(
1140 type, instantiator_type_arguments, function_type_arguments));
1141 if (FLAG_trace_type_checks) {
1142 PrintTypeCheck("InstanceOf", instance, type, instantiator_type_arguments,
1143 function_type_arguments, result);
1144 }
1145 UpdateTypeTestCache(zone, thread, instance, type, instantiator_type_arguments,
1146 function_type_arguments, result, cache);
1147 arguments.SetReturn(result);
1148}
1149
1150#if defined(TESTING)
1151// Used only in type_testing_stubs_test.cc. If DRT_TypeCheck is entered, then
1152// this flag is set to true.
1153bool TESTING_runtime_entered_on_TTS_invocation = false;
1154#endif
1155
1156// Check that the type of the given instance is a subtype of the given type and
1157// can therefore be assigned.
1158// Tested instance may not be null, because a null test is always inlined.
1159// Arg0: instance being assigned.
1160// Arg1: type being assigned to.
1161// Arg2: type arguments of the instantiator of the type being assigned to.
1162// Arg3: type arguments of the function of the type being assigned to.
1163// Arg4: name of variable being assigned to.
1164// Arg5: SubtypeTestCache.
1165// Arg6: invocation mode (see TypeCheckMode)
1166// Return value: instance if a subtype, otherwise throw a TypeError.
1168 const Instance& src_instance =
1169 Instance::CheckedHandle(zone, arguments.ArgAt(0));
1170 const AbstractType& dst_type =
1171 AbstractType::CheckedHandle(zone, arguments.ArgAt(1));
1172 const TypeArguments& instantiator_type_arguments =
1173 TypeArguments::CheckedHandle(zone, arguments.ArgAt(2));
1174 const TypeArguments& function_type_arguments =
1175 TypeArguments::CheckedHandle(zone, arguments.ArgAt(3));
1176 String& dst_name = String::Handle(zone);
1177 dst_name ^= arguments.ArgAt(4);
1178 ASSERT(dst_name.IsNull() || dst_name.IsString());
1179
1181 cache ^= arguments.ArgAt(5);
1182 ASSERT(cache.IsNull() || cache.IsSubtypeTestCache());
1183
1184 const TypeCheckMode mode = static_cast<TypeCheckMode>(
1185 Smi::CheckedHandle(zone, arguments.ArgAt(6)).Value());
1186
1187#if defined(TESTING)
1188 TESTING_runtime_entered_on_TTS_invocation = true;
1189#endif
1190
1191#if defined(TARGET_ARCH_IA32)
1193 // Hash-based caches are still not handled by the stubs on IA32.
1194 if (cache.IsHash()) {
1195 const auto& result = Bool::Handle(
1196 zone, CheckHashBasedSubtypeTestCache(
1197 zone, thread, src_instance, dst_type,
1198 instantiator_type_arguments, function_type_arguments, cache));
1199 if (!result.IsNull()) {
1200 // Early exit because an entry already exists in the cache.
1201 arguments.SetReturn(result);
1202 return;
1203 }
1204 }
1205#endif // defined(TARGET_ARCH_IA32)
1206
1207 // This is guaranteed on the calling side.
1208 ASSERT(!dst_type.IsDynamicType());
1209
1210 const bool is_instance_of = src_instance.IsAssignableTo(
1211 dst_type, instantiator_type_arguments, function_type_arguments);
1212
1213 if (FLAG_trace_type_checks) {
1214 PrintTypeCheck("TypeCheck", src_instance, dst_type,
1215 instantiator_type_arguments, function_type_arguments,
1216 Bool::Get(is_instance_of));
1217 }
1218
1219 // Most paths through this runtime entry don't need to know what the
1220 // destination name was or if this was a dynamic assert assignable call,
1221 // so only walk the stack to find the stored destination name when necessary.
1222 auto resolve_dst_name = [&]() {
1223 if (!dst_name.IsNull()) return;
1224#if !defined(TARGET_ARCH_IA32)
1225 // Can only come here from type testing stub.
1227
1228 // Grab the [dst_name] from the pool. It's stored at one pool slot after
1229 // the subtype-test-cache.
1230 DartFrameIterator iterator(thread,
1232 StackFrame* caller_frame = iterator.NextFrame();
1233 const Code& caller_code =
1234 Code::Handle(zone, caller_frame->LookupDartCode());
1235 const ObjectPool& pool =
1236 ObjectPool::Handle(zone, caller_code.GetObjectPool());
1237 TypeTestingStubCallPattern tts_pattern(caller_frame->pc());
1238 const intptr_t stc_pool_idx = tts_pattern.GetSubtypeTestCachePoolIndex();
1239 const intptr_t dst_name_idx = stc_pool_idx + 1;
1240 dst_name ^= pool.ObjectAt(dst_name_idx);
1241#else
1242 UNREACHABLE();
1243#endif
1244 };
1245
1246 if (!is_instance_of) {
1247 resolve_dst_name();
1248 if (dst_name.ptr() ==
1249 Symbols::dynamic_assert_assignable_stc_check().ptr()) {
1250#if !defined(TARGET_ARCH_IA32)
1251 // Can only come here from type testing stub via dynamic AssertAssignable.
1253#endif
1254 // This was a dynamic closure call where the destination name was not
1255 // known at compile-time. Thus, fetch the original arguments and arguments
1256 // descriptor and re-do the type check in the runtime, which causes the
1257 // error with the proper destination name to be thrown.
1258 DartFrameIterator iterator(thread,
1260 StackFrame* caller_frame = iterator.NextFrame();
1261 const auto& dispatcher =
1262 Function::Handle(zone, caller_frame->LookupDartFunction());
1263 ASSERT(dispatcher.IsInvokeFieldDispatcher());
1264 const auto& orig_arguments_desc =
1265 Array::Handle(zone, dispatcher.saved_args_desc());
1266 const ArgumentsDescriptor args_desc(orig_arguments_desc);
1267 const intptr_t arg_count = args_desc.CountWithTypeArgs();
1268 const auto& orig_arguments = Array::Handle(zone, Array::New(arg_count));
1269 auto& obj = Object::Handle(zone);
1270 for (intptr_t i = 0; i < arg_count; i++) {
1271 obj = *reinterpret_cast<ObjectPtr*>(
1272 ParamAddress(caller_frame->fp(), arg_count - i));
1273 orig_arguments.SetAt(i, obj);
1274 }
1275 const auto& receiver = Closure::CheckedHandle(
1276 zone, orig_arguments.At(args_desc.FirstArgIndex()));
1277 const auto& function = Function::Handle(zone, receiver.function());
1278 const auto& result = Object::Handle(
1279 zone, function.DoArgumentTypesMatch(orig_arguments, args_desc));
1280 if (result.IsError()) {
1281 Exceptions::PropagateError(Error::Cast(result));
1282 }
1283 // IsAssignableTo returned false, so we should have thrown a type
1284 // error in DoArgumentsTypesMatch.
1285 UNREACHABLE();
1286 }
1287
1288 ASSERT(!dst_name.IsNull());
1289 // Throw a dynamic type error.
1290 const TokenPosition location = GetCallerLocation();
1291 const auto& src_type =
1292 AbstractType::Handle(zone, src_instance.GetType(Heap::kNew));
1293 auto& reported_type = AbstractType::Handle(zone, dst_type.ptr());
1294 if (!reported_type.IsInstantiated()) {
1295 // Instantiate dst_type before reporting the error.
1296 reported_type = reported_type.InstantiateFrom(instantiator_type_arguments,
1297 function_type_arguments,
1299 }
1300 Exceptions::CreateAndThrowTypeError(location, src_type, reported_type,
1301 dst_name);
1302 UNREACHABLE();
1303 }
1304
1305 bool should_update_cache = true;
1306#if !defined(TARGET_ARCH_IA32)
1307 bool would_update_cache_if_not_lazy = false;
1308#if !defined(DART_PRECOMPILED_RUNTIME)
1309 // Checks against type parameters are done by loading the corresponding type
1310 // argument at runtime and calling the type argument's TTS. Thus, we install
1311 // specialized TTSes on the type argument, not the parameter itself.
1312 auto& tts_type = AbstractType::Handle(zone, dst_type.ptr());
1313 if (tts_type.IsTypeParameter()) {
1314 const auto& param = TypeParameter::Cast(tts_type);
1315 tts_type = param.GetFromTypeArguments(instantiator_type_arguments,
1316 function_type_arguments);
1317 }
1318 ASSERT(!tts_type.IsTypeParameter());
1319
1321 if (FLAG_trace_type_checks) {
1322 THR_Print(" Specializing type testing stub for %s\n",
1323 tts_type.ToCString());
1324 }
1325 const Code& code = Code::Handle(
1326 zone, TypeTestingStubGenerator::SpecializeStubFor(thread, tts_type));
1327 tts_type.SetTypeTestingStub(code);
1328
1329 // Only create the cache if we failed to create a specialized TTS and doing
1330 // the same check would cause an update to the cache.
1331 would_update_cache_if_not_lazy =
1332 (!src_instance.IsNull() &&
1333 tts_type.type_test_stub() ==
1334 StubCode::DefaultNullableTypeTest().ptr()) ||
1335 tts_type.type_test_stub() == StubCode::DefaultTypeTest().ptr();
1336 should_update_cache = would_update_cache_if_not_lazy && cache.IsNull();
1337 }
1338
1339 // Since dst_type is not a top type or type parameter, then the only default
1340 // stubs it can use are DefaultTypeTest or DefaultNullableTypeTest.
1341 if ((mode == kTypeCheckFromSlowStub) &&
1342 (tts_type.type_test_stub() != StubCode::DefaultNullableTypeTest().ptr() &&
1343 tts_type.type_test_stub() != StubCode::DefaultTypeTest().ptr())) {
1344 // The specialized type testing stub returned a false negative. That means
1345 // the specialization may have been generated using outdated cid ranges and
1346 // new classes appeared since the stub was generated. Try respecializing.
1347 if (FLAG_trace_type_checks) {
1348 THR_Print(" Rebuilding type testing stub for %s\n",
1349 tts_type.ToCString());
1350 }
1351 const auto& old_code = Code::Handle(zone, tts_type.type_test_stub());
1352 const auto& new_code = Code::Handle(
1353 zone, TypeTestingStubGenerator::SpecializeStubFor(thread, tts_type));
1354 ASSERT(old_code.ptr() != new_code.ptr());
1355 // A specialized stub should always respecialize to a non-default stub.
1356 ASSERT(new_code.ptr() != StubCode::DefaultNullableTypeTest().ptr() &&
1357 new_code.ptr() != StubCode::DefaultTypeTest().ptr());
1358 const auto& old_instructions =
1359 Instructions::Handle(old_code.instructions());
1360 const auto& new_instructions =
1361 Instructions::Handle(new_code.instructions());
1362 // Check if specialization produced exactly the same sequence of
1363 // instructions. If it did, then we have a false negative, which can
1364 // happen in some cases involving uninstantiated types. In these cases,
1365 // update the cache, because the only case in which these false negatives
1366 // could possibly turn into true positives is with reloads, which clear
1367 // all the SubtypeTestCaches.
1368 should_update_cache = old_instructions.Equals(new_instructions);
1369 if (FLAG_trace_type_checks) {
1370 THR_Print(" %s rebuilt type testing stub for %s\n",
1371 should_update_cache ? "Discarding" : "Installing",
1372 tts_type.ToCString());
1373 }
1374 if (!should_update_cache) {
1375 tts_type.SetTypeTestingStub(new_code);
1376 }
1377 }
1378#endif // !defined(DART_PRECOMPILED_RUNTIME)
1379#endif // !defined(TARGET_ARCH_IA32)
1380
1381 if (should_update_cache) {
1382 if (cache.IsNull()) {
1383#if !defined(TARGET_ARCH_IA32)
1386 would_update_cache_if_not_lazy));
1387 // We lazily create [SubtypeTestCache] for those call sites which actually
1388 // need one and will patch the pool entry.
1389 DartFrameIterator iterator(thread,
1391 StackFrame* caller_frame = iterator.NextFrame();
1392 const Code& caller_code =
1393 Code::Handle(zone, caller_frame->LookupDartCode());
1394 const ObjectPool& pool =
1395 ObjectPool::Handle(zone, caller_code.GetObjectPool());
1396 TypeTestingStubCallPattern tts_pattern(caller_frame->pc());
1397 const intptr_t stc_pool_idx = tts_pattern.GetSubtypeTestCachePoolIndex();
1398 // Ensure we do have a STC (lazily create it if not) and all threads use
1399 // the same STC.
1400 {
1401 SafepointMutexLocker ml(isolate->group()->subtype_test_cache_mutex());
1402 cache ^= pool.ObjectAt<std::memory_order_acquire>(stc_pool_idx);
1403 if (cache.IsNull()) {
1404 resolve_dst_name();
1405 // If this is a dynamic AssertAssignable check, then we must assume
1406 // all inputs may be needed, as the type may vary from call to call.
1407 const intptr_t num_inputs =
1408 dst_name.ptr() ==
1409 Symbols::dynamic_assert_assignable_stc_check().ptr()
1412 cache = SubtypeTestCache::New(num_inputs);
1413 pool.SetObjectAt<std::memory_order_release>(stc_pool_idx, cache);
1414 if (FLAG_trace_type_checks) {
1415 THR_Print(" Installed new subtype test cache %#" Px " with %" Pd
1416 " inputs at index %" Pd " of pool for %s\n",
1417 static_cast<uword>(cache.ptr()), num_inputs, stc_pool_idx,
1418 caller_code.ToCString());
1419 }
1420 }
1421 }
1422#else
1423 UNREACHABLE();
1424#endif
1425 }
1426
1427 UpdateTypeTestCache(zone, thread, src_instance, dst_type,
1428 instantiator_type_arguments, function_type_arguments,
1429 Bool::True(), cache);
1430 }
1431
1432 arguments.SetReturn(src_instance);
1433}
1434
1435// Report that the type of the given object is not bool in conditional context.
1436// Throw assertion error if the object is null. (cf. Boolean Conversion
1437// in language Spec.)
1438// Arg0: bad object.
1439// Return value: none, throws TypeError or AssertionError.
1440DEFINE_RUNTIME_ENTRY(NonBoolTypeError, 1) {
1441 const TokenPosition location = GetCallerLocation();
1442 const Instance& src_instance =
1443 Instance::CheckedHandle(zone, arguments.ArgAt(0));
1444
1445 if (src_instance.IsNull()) {
1446 const Array& args = Array::Handle(zone, Array::New(5));
1447 args.SetAt(
1448 0, String::Handle(
1449 zone,
1451 "Failed assertion: boolean expression must not be null")));
1452
1453 // No source code for this assertion, set url to null.
1454 args.SetAt(1, String::Handle(zone, String::null()));
1455 args.SetAt(2, Object::smi_zero());
1456 args.SetAt(3, Object::smi_zero());
1457 args.SetAt(4, String::Handle(zone, String::null()));
1458
1460 UNREACHABLE();
1461 }
1462
1463 ASSERT(!src_instance.IsBool());
1464 const Type& bool_interface = Type::Handle(Type::BoolType());
1465 const AbstractType& src_type =
1466 AbstractType::Handle(zone, src_instance.GetType(Heap::kNew));
1467 Exceptions::CreateAndThrowTypeError(location, src_type, bool_interface,
1468 Symbols::BooleanExpression());
1469 UNREACHABLE();
1470}
1471
1473 const Instance& exception = Instance::CheckedHandle(zone, arguments.ArgAt(0));
1474 Exceptions::Throw(thread, exception);
1475}
1476
1478 const Instance& exception = Instance::CheckedHandle(zone, arguments.ArgAt(0));
1479 const Instance& stacktrace =
1480 Instance::CheckedHandle(zone, arguments.ArgAt(1));
1481 const Smi& bypass_debugger = Smi::CheckedHandle(zone, arguments.ArgAt(2));
1482 Exceptions::ReThrow(thread, exception, stacktrace,
1483 bypass_debugger.Value() != 0);
1484}
1485
1486// Patches static call in optimized code with the target's entry point.
1487// Compiles target if necessary.
1488DEFINE_RUNTIME_ENTRY(PatchStaticCall, 0) {
1489#if !defined(DART_PRECOMPILED_RUNTIME)
1490 DartFrameIterator iterator(thread,
1492 StackFrame* caller_frame = iterator.NextFrame();
1493 ASSERT(caller_frame != nullptr);
1494 const Code& caller_code = Code::Handle(zone, caller_frame->LookupDartCode());
1495 ASSERT(!caller_code.IsNull());
1496 ASSERT(caller_code.is_optimized());
1497 const Function& target_function = Function::Handle(
1498 zone, caller_code.GetStaticCallTargetFunctionAt(caller_frame->pc()));
1499 const Code& target_code = Code::Handle(zone, target_function.EnsureHasCode());
1500 // Before patching verify that we are not repeatedly patching to the same
1501 // target.
1502 if (target_code.ptr() !=
1503 CodePatcher::GetStaticCallTargetAt(caller_frame->pc(), caller_code)) {
1504 GcSafepointOperationScope safepoint(thread);
1505 if (target_code.ptr() !=
1506 CodePatcher::GetStaticCallTargetAt(caller_frame->pc(), caller_code)) {
1507 CodePatcher::PatchStaticCallAt(caller_frame->pc(), caller_code,
1508 target_code);
1509 caller_code.SetStaticCallTargetCodeAt(caller_frame->pc(), target_code);
1510 if (FLAG_trace_patching) {
1511 THR_Print("PatchStaticCall: patching caller pc %#" Px
1512 ""
1513 " to '%s' new entry point %#" Px " (%s)\n",
1514 caller_frame->pc(), target_function.ToFullyQualifiedCString(),
1515 target_code.EntryPoint(),
1516 target_code.is_optimized() ? "optimized" : "unoptimized");
1517 }
1518 }
1519 }
1520 arguments.SetReturn(target_code);
1521#else
1522 UNREACHABLE();
1523#endif
1524}
1525
1526#if defined(PRODUCT) || defined(DART_PRECOMPILED_RUNTIME)
1527DEFINE_RUNTIME_ENTRY(BreakpointRuntimeHandler, 0) {
1528 UNREACHABLE();
1529 return;
1530}
1531#else
1532// Gets called from debug stub when code reaches a breakpoint
1533// set on a runtime stub call.
1534DEFINE_RUNTIME_ENTRY(BreakpointRuntimeHandler, 0) {
1535 DartFrameIterator iterator(thread,
1537 StackFrame* caller_frame = iterator.NextFrame();
1538 ASSERT(caller_frame != nullptr);
1539 Code& orig_stub = Code::Handle(zone);
1540 orig_stub =
1541 isolate->group()->debugger()->GetPatchedStubAddress(caller_frame->pc());
1542 const Error& error =
1543 Error::Handle(zone, isolate->debugger()->PauseBreakpoint());
1545 arguments.SetReturn(orig_stub);
1546}
1547#endif
1548
1549DEFINE_RUNTIME_ENTRY(SingleStepHandler, 0) {
1550#if defined(PRODUCT) || defined(DART_PRECOMPILED_RUNTIME)
1551 UNREACHABLE();
1552#else
1553 const Error& error =
1554 Error::Handle(zone, isolate->debugger()->PauseStepping());
1556#endif
1557}
1558
1559// An instance call of the form o.f(...) could not be resolved. Check if
1560// there is a getter with the same name. If so, invoke it. If the value is
1561// a closure, invoke it with the given arguments. If the value is a
1562// non-closure, attempt to invoke "call" on it.
1563static bool ResolveCallThroughGetter(const Class& receiver_class,
1564 const String& target_name,
1565 const String& demangled,
1566 const Array& arguments_descriptor,
1567 Function* result) {
1568 const String& getter_name = String::Handle(Field::GetterName(demangled));
1569 const int kTypeArgsLen = 0;
1570 const int kNumArguments = 1;
1572 ArgumentsDescriptor::NewBoxed(kTypeArgsLen, kNumArguments)));
1573 const Function& getter =
1575 receiver_class, getter_name, args_desc));
1576 if (getter.IsNull() || getter.IsMethodExtractor()) {
1577 return false;
1578 }
1579 // We do this on the target_name, _not_ on the demangled name, so that
1580 // FlowGraphBuilder::BuildGraphOfInvokeFieldDispatcher can detect dynamic
1581 // calls from the dyn: tag on the name of the dispatcher.
1582 const Function& target_function =
1584 target_name, arguments_descriptor,
1585 UntaggedFunction::kInvokeFieldDispatcher, FLAG_lazy_dispatchers));
1586 ASSERT(!target_function.IsNull() || !FLAG_lazy_dispatchers);
1587 if (FLAG_trace_ic) {
1589 "InvokeField IC miss: adding <%s> id:%" Pd " -> <%s>\n",
1590 receiver_class.ToCString(), receiver_class.id(),
1591 target_function.IsNull() ? "null" : target_function.ToCString());
1592 }
1593 *result = target_function.ptr();
1594 return true;
1595}
1596
1597// Handle other invocations (implicit closures, noSuchMethod).
1598FunctionPtr InlineCacheMissHelper(const Class& receiver_class,
1599 const Array& args_descriptor,
1600 const String& target_name) {
1601 // Create a demangled version of the target_name, if necessary, This is used
1602 // for the field getter in ResolveCallThroughGetter and as the target name
1603 // for the NoSuchMethod dispatcher (if needed).
1604 const String* demangled = &target_name;
1606 demangled = &String::Handle(
1608 }
1609 const bool is_getter = Field::IsGetterName(*demangled);
1611 if (is_getter ||
1612 !ResolveCallThroughGetter(receiver_class, target_name, *demangled,
1613 args_descriptor, &result)) {
1614 ArgumentsDescriptor desc(args_descriptor);
1615 const Function& target_function =
1617 *demangled, args_descriptor,
1618 UntaggedFunction::kNoSuchMethodDispatcher, FLAG_lazy_dispatchers));
1619 if (FLAG_trace_ic) {
1621 "NoSuchMethod IC miss: adding <%s> id:%" Pd " -> <%s>\n",
1622 receiver_class.ToCString(), receiver_class.id(),
1623 target_function.IsNull() ? "null" : target_function.ToCString());
1624 }
1625 result = target_function.ptr();
1626 }
1627 // May be null if --no-lazy-dispatchers, in which case dispatch will be
1628 // handled by NoSuchMethodFromCallStub.
1629 ASSERT(!result.IsNull() || !FLAG_lazy_dispatchers);
1630 return result.ptr();
1631}
1632
1633#if !defined(DART_PRECOMPILED_RUNTIME)
1634static void TrySwitchInstanceCall(Thread* thread,
1635 StackFrame* caller_frame,
1636 const Code& caller_code,
1637 const Function& caller_function,
1638 const ICData& ic_data,
1639 const Function& target_function) {
1640 auto zone = thread->zone();
1641
1642 // Monomorphic/megamorphic calls only check the receiver CID.
1643 if (ic_data.NumArgsTested() != 1) return;
1644
1645 ASSERT(ic_data.rebind_rule() == ICData::kInstance);
1646
1647 // Monomorphic/megamorphic calls don't record exactness.
1648 if (ic_data.is_tracking_exactness()) return;
1649
1650#if !defined(PRODUCT)
1651 // Monomorphic/megamorphic do not check the isolate's stepping flag.
1652 if (thread->isolate()->has_attempted_stepping()) return;
1653#endif
1654
1655 // Monomorphic/megamorphic calls are only for unoptimized code.
1656 ASSERT(!caller_code.is_optimized());
1657
1658 // Code is detached from its function. This will prevent us from resetting
1659 // the switchable call later because resets are function based and because
1660 // the ic_data_array belongs to the function instead of the code. This should
1661 // only happen because of reload, but it sometimes happens with KBC mixed mode
1662 // probably through a race between foreground and background compilation.
1663 if (caller_function.unoptimized_code() != caller_code.ptr()) {
1664 return;
1665 }
1666#if !defined(PRODUCT)
1667 // Skip functions that contain breakpoints or when debugger is in single
1668 // stepping mode.
1669 if (thread->isolate_group()->debugger()->IsDebugging(thread,
1670 caller_function)) {
1671 return;
1672 }
1673#endif
1674
1675 const intptr_t num_checks = ic_data.NumberOfChecks();
1676
1677 // Monomorphic call.
1678 if (FLAG_unopt_monomorphic_calls && (num_checks == 1)) {
1679 // A call site in the monomorphic state does not load the arguments
1680 // descriptor, so do not allow transition to this state if the callee
1681 // needs it.
1682 if (target_function.PrologueNeedsArgumentsDescriptor()) {
1683 return;
1684 }
1685
1686 const Array& data = Array::Handle(zone, ic_data.entries());
1687 const Code& target = Code::Handle(zone, target_function.EnsureHasCode());
1688 CodePatcher::PatchInstanceCallAt(caller_frame->pc(), caller_code, data,
1689 target);
1690 if (FLAG_trace_ic) {
1691 OS::PrintErr("Instance call at %" Px
1692 " switching to monomorphic dispatch, %s\n",
1693 caller_frame->pc(), ic_data.ToCString());
1694 }
1695 return; // Success.
1696 }
1697
1698 // Megamorphic call.
1699 if (FLAG_unopt_megamorphic_calls &&
1700 (num_checks > FLAG_max_polymorphic_checks)) {
1701 const String& name = String::Handle(zone, ic_data.target_name());
1702 const Array& descriptor =
1703 Array::Handle(zone, ic_data.arguments_descriptor());
1705 zone, MegamorphicCacheTable::Lookup(thread, name, descriptor));
1706 ic_data.set_is_megamorphic(true);
1707 CodePatcher::PatchInstanceCallAt(caller_frame->pc(), caller_code, cache,
1708 StubCode::MegamorphicCall());
1709 if (FLAG_trace_ic) {
1710 OS::PrintErr("Instance call at %" Px
1711 " switching to megamorphic dispatch, %s\n",
1712 caller_frame->pc(), ic_data.ToCString());
1713 }
1714 return; // Success.
1715 }
1716}
1717#endif // !defined(DART_PRECOMPILED_RUNTIME)
1718
1719// Perform the subtype and return constant function based on the result.
1720static FunctionPtr ComputeTypeCheckTarget(const Instance& receiver,
1721 const AbstractType& type,
1722 const ArgumentsDescriptor& desc) {
1723 const bool result = receiver.IsInstanceOf(type, Object::null_type_arguments(),
1724 Object::null_type_arguments());
1726 const Function& target =
1727 Function::Handle(result ? store->simple_instance_of_true_function()
1728 : store->simple_instance_of_false_function());
1729 ASSERT(!target.IsNull());
1730 return target.ptr();
1731}
1732
1733static FunctionPtr Resolve(
1734 Thread* thread,
1735 Zone* zone,
1736 const GrowableArray<const Instance*>& caller_arguments,
1737 const Class& receiver_class,
1738 const String& name,
1739 const Array& descriptor) {
1740 ASSERT(name.IsSymbol());
1741 auto& target_function = Function::Handle(zone);
1742 ArgumentsDescriptor args_desc(descriptor);
1743
1744 if (receiver_class.EnsureIsFinalized(thread) == Error::null()) {
1745 target_function = Resolver::ResolveDynamicForReceiverClass(receiver_class,
1746 name, args_desc);
1747 }
1748 if (caller_arguments.length() == 2 &&
1749 target_function.ptr() == thread->isolate_group()
1750 ->object_store()
1751 ->simple_instance_of_function()) {
1752 // Replace the target function with constant function.
1753 const AbstractType& type = AbstractType::Cast(*caller_arguments[1]);
1754 target_function =
1755 ComputeTypeCheckTarget(*caller_arguments[0], type, args_desc);
1756 }
1757
1758 if (target_function.IsNull()) {
1759 target_function = InlineCacheMissHelper(receiver_class, descriptor, name);
1760 }
1761 if (target_function.IsNull()) {
1762 ASSERT(!FLAG_lazy_dispatchers);
1763 }
1764
1765 return target_function.ptr();
1766}
1767
1768// Handles a static call in unoptimized code that has one argument type not
1769// seen before. Compile the target if necessary and update the ICData.
1770// Arg0: argument.
1771// Arg1: IC data object.
1772DEFINE_RUNTIME_ENTRY(StaticCallMissHandlerOneArg, 2) {
1773 const Instance& arg = Instance::CheckedHandle(zone, arguments.ArgAt(0));
1774 const ICData& ic_data = ICData::CheckedHandle(zone, arguments.ArgAt(1));
1775 // IC data for static call is prepopulated with the statically known target.
1776 ASSERT(ic_data.NumberOfChecksIs(1));
1777 const Function& target = Function::Handle(zone, ic_data.GetTargetAt(0));
1778 target.EnsureHasCode();
1779 ASSERT(!target.IsNull() && target.HasCode());
1780 ic_data.EnsureHasReceiverCheck(arg.GetClassId(), target, 1);
1781 if (FLAG_trace_ic) {
1782 DartFrameIterator iterator(thread,
1784 StackFrame* caller_frame = iterator.NextFrame();
1785 ASSERT(caller_frame != nullptr);
1786 OS::PrintErr("StaticCallMissHandler at %#" Px " target %s (%" Pd ")\n",
1787 caller_frame->pc(), target.ToCString(), arg.GetClassId());
1788 }
1789 arguments.SetReturn(target);
1790}
1791
1792// Handles a static call in unoptimized code that has two argument types not
1793// seen before. Compile the target if necessary and update the ICData.
1794// Arg0: argument 0.
1795// Arg1: argument 1.
1796// Arg2: IC data object.
1797DEFINE_RUNTIME_ENTRY(StaticCallMissHandlerTwoArgs, 3) {
1798 const Instance& arg0 = Instance::CheckedHandle(zone, arguments.ArgAt(0));
1799 const Instance& arg1 = Instance::CheckedHandle(zone, arguments.ArgAt(1));
1800 const ICData& ic_data = ICData::CheckedHandle(zone, arguments.ArgAt(2));
1801 // IC data for static call is prepopulated with the statically known target.
1802 ASSERT(!ic_data.NumberOfChecksIs(0));
1803 const Function& target = Function::Handle(zone, ic_data.GetTargetAt(0));
1804 target.EnsureHasCode();
1806 cids.Add(arg0.GetClassId());
1807 cids.Add(arg1.GetClassId());
1808 ic_data.EnsureHasCheck(cids, target);
1809 if (FLAG_trace_ic) {
1810 DartFrameIterator iterator(thread,
1812 StackFrame* caller_frame = iterator.NextFrame();
1813 ASSERT(caller_frame != nullptr);
1814 OS::PrintErr("StaticCallMissHandler at %#" Px " target %s (%" Pd ", %" Pd
1815 ")\n",
1816 caller_frame->pc(), target.ToCString(), cids[0], cids[1]);
1817 }
1818 arguments.SetReturn(target);
1819}
1820
1821#if defined(DART_PRECOMPILED_RUNTIME)
1822
1823static bool IsSingleTarget(IsolateGroup* isolate_group,
1824 Zone* zone,
1825 intptr_t lower_cid,
1826 intptr_t upper_cid,
1827 const Function& target,
1828 const String& name) {
1829 Class& cls = Class::Handle(zone);
1830 ClassTable* table = isolate_group->class_table();
1831 Function& other_target = Function::Handle(zone);
1832 for (intptr_t cid = lower_cid; cid <= upper_cid; cid++) {
1833 if (!table->HasValidClassAt(cid)) continue;
1834 cls = table->At(cid);
1835 if (cls.is_abstract()) continue;
1836 if (!cls.is_allocated()) continue;
1837 other_target = Resolver::ResolveDynamicAnyArgs(zone, cls, name,
1838 /*allow_add=*/false);
1839 if (other_target.ptr() != target.ptr()) {
1840 return false;
1841 }
1842 }
1843 return true;
1844}
1845
1846class SavedUnlinkedCallMapKeyEqualsTraits : public AllStatic {
1847 public:
1848 static const char* Name() { return "SavedUnlinkedCallMapKeyEqualsTraits "; }
1849 static bool ReportStats() { return false; }
1850
1851 static bool IsMatch(const Object& key1, const Object& key2) {
1852 if (!key1.IsInteger() || !key2.IsInteger()) return false;
1853 return Integer::Cast(key1).Equals(Integer::Cast(key2));
1854 }
1855 static uword Hash(const Object& key) {
1856 return Integer::Cast(key).CanonicalizeHash();
1857 }
1858};
1859
1860using UnlinkedCallMap = UnorderedHashMap<SavedUnlinkedCallMapKeyEqualsTraits>;
1861
1862static void SaveUnlinkedCall(Zone* zone,
1863 Isolate* isolate,
1864 uword frame_pc,
1865 const UnlinkedCall& unlinked_call) {
1866 IsolateGroup* isolate_group = isolate->group();
1867
1868 SafepointMutexLocker ml(isolate_group->unlinked_call_map_mutex());
1869 if (isolate_group->saved_unlinked_calls() == Array::null()) {
1870 const auto& initial_map =
1871 Array::Handle(zone, HashTables::New<UnlinkedCallMap>(16, Heap::kOld));
1872 isolate_group->set_saved_unlinked_calls(initial_map);
1873 }
1874
1875 UnlinkedCallMap unlinked_call_map(zone,
1876 isolate_group->saved_unlinked_calls());
1877 const auto& pc = Integer::Handle(zone, Integer::NewFromUint64(frame_pc));
1878 // Some other isolate might have updated unlinked_call_map[pc] too, but
1879 // their update should be identical to ours.
1880 const auto& new_or_old_value = UnlinkedCall::Handle(
1882 unlinked_call_map.InsertOrGetValue(pc, unlinked_call)));
1883 RELEASE_ASSERT(new_or_old_value.ptr() == unlinked_call.ptr());
1884 isolate_group->set_saved_unlinked_calls(unlinked_call_map.Release());
1885}
1886
1887static UnlinkedCallPtr LoadUnlinkedCall(Zone* zone,
1888 Isolate* isolate,
1889 uword pc) {
1890 IsolateGroup* isolate_group = isolate->group();
1891
1892 SafepointMutexLocker ml(isolate_group->unlinked_call_map_mutex());
1893 ASSERT(isolate_group->saved_unlinked_calls() != Array::null());
1894 UnlinkedCallMap unlinked_call_map(zone,
1895 isolate_group->saved_unlinked_calls());
1896
1897 const auto& pc_integer = Integer::Handle(zone, Integer::NewFromUint64(pc));
1898 const auto& unlinked_call = UnlinkedCall::Cast(
1899 Object::Handle(zone, unlinked_call_map.GetOrDie(pc_integer)));
1900 isolate_group->set_saved_unlinked_calls(unlinked_call_map.Release());
1901 return unlinked_call.ptr();
1902}
1903
1904// NOTE: Right now we never delete [UnlinkedCall] objects. They are needed while
1905// a call site is in Unlinked/Monomorphic/MonomorphicSmiable/SingleTarget
1906// states.
1907//
1908// Theoretically we could free the [UnlinkedCall] object once we transition the
1909// call site to use ICData/MegamorphicCache, but that would require careful
1910// coordination between the deleter and a possible concurrent reader.
1911//
1912// To simplify the code we decided not to do that atm (only a very small
1913// fraction of callsites in AOT use switchable calls, the name/args-descriptor
1914// objects are kept alive anyways -> there is little memory savings from
1915// freeing the [UnlinkedCall] objects).
1916
1917#endif // defined(DART_PRECOMPILED_RUNTIME)
1918
1924
1925// Handles updating of type feedback and possible patching of instance calls.
1926//
1927// It works in 3 separate steps:
1928// - resolve the actual target
1929// - update type feedback & (optionally) perform call site transition
1930// - return the right values
1931//
1932// Depending on the JIT/AOT mode we obtain current and patch new (target, data)
1933// differently:
1934//
1935// - JIT calls must be patched with CodePatcher::PatchInstanceCallAt()
1936// - AOT calls must be patched with CodePatcher::PatchSwitchableCallAt()
1937//
1938// Independent of which miss handler was used or how we will return, we look at
1939// current (target, data) and see if we need to transition the call site to a
1940// new (target, data). We do this while holding `IG->patchable_call_mutex()`.
1941//
1942// Depending on which miss handler got called we might need to return
1943// differently:
1944//
1945// - SwitchableCallMiss will get get (stub, data) return value
1946// - InlineCache*Miss will get get function as return value
1947//
1949 public:
1951 const GrowableArray<const Instance*>& caller_arguments,
1952 MissHandler miss_handler,
1953 NativeArguments arguments,
1954 StackFrame* caller_frame,
1955 const Code& caller_code,
1956 const Function& caller_function)
1957 : isolate_(thread->isolate()),
1958 thread_(thread),
1959 zone_(thread->zone()),
1960 caller_arguments_(caller_arguments),
1961 miss_handler_(miss_handler),
1962 arguments_(arguments),
1963 caller_frame_(caller_frame),
1964 caller_code_(caller_code),
1965 caller_function_(caller_function),
1966 name_(String::Handle()),
1967 args_descriptor_(Array::Handle()) {
1968 // We only have two arg IC calls in JIT mode.
1969 ASSERT(caller_arguments_.length() == 1 || !FLAG_precompiled_mode);
1970 }
1971
1972 void ResolveSwitchAndReturn(const Object& data);
1973
1974 private:
1975 FunctionPtr ResolveTargetFunction(const Object& data);
1976
1977#if defined(DART_PRECOMPILED_RUNTIME)
1978 void HandleMissAOT(const Object& old_data,
1979 uword old_entry,
1980 const Function& target_function);
1981
1982 void DoUnlinkedCallAOT(const UnlinkedCall& unlinked,
1983 const Function& target_function);
1984 void DoMonomorphicMissAOT(const Object& old_data,
1985 const Function& target_function);
1986 void DoSingleTargetMissAOT(const SingleTargetCache& data,
1987 const Function& target_function);
1988 void DoICDataMissAOT(const ICData& data, const Function& target_function);
1989 bool CanExtendSingleTargetRange(const String& name,
1990 const Function& old_target,
1991 const Function& target_function,
1992 intptr_t* lower,
1993 intptr_t* upper);
1994#else
1995 void HandleMissJIT(const Object& old_data,
1996 const Code& old_target,
1997 const Function& target_function);
1998
1999 void DoMonomorphicMissJIT(const Object& old_data,
2000 const Function& target_function);
2001 void DoICDataMissJIT(const ICData& data,
2002 const Object& old_data,
2003 const Function& target_function);
2004#endif // !defined(DART_PRECOMPILED_RUNTIME)
2005 void DoMegamorphicMiss(const MegamorphicCache& data,
2006 const Function& target_function);
2007
2008 void UpdateICDataWithTarget(const ICData& ic_data,
2009 const Function& target_function);
2010 void TrySwitch(const ICData& ic_data, const Function& target_function);
2011
2012 void ReturnAOT(const Code& stub, const Object& data);
2013 void ReturnJIT(const Code& stub, const Object& data, const Function& target);
2014 void ReturnJITorAOT(const Code& stub,
2015 const Object& data,
2016 const Function& target);
2017
2018 const Instance& receiver() { return *caller_arguments_[0]; }
2019
2020 bool should_consider_patching() {
2021 // In AOT we use switchable calls.
2022 if (FLAG_precompiled_mode) return true;
2023
2024 // In JIT instance calls use a different calling sequence in unoptimized vs
2025 // optimized code (see [FlowGraphCompiler::EmitInstanceCallJIT] vs
2026 // [FlowGraphCompiler::EmitOptimizedInstanceCall]).
2027 //
2028 // The [CodePatcher::GetInstanceCallAt], [CodePatcher::PatchInstanceCallAt]
2029 // only recognize unoptimized call pattern.
2030 //
2031 // So we will not try to switch optimized instance calls.
2032 return !caller_code_.is_optimized();
2033 }
2034
2035 ICDataPtr NewICData();
2036 ICDataPtr NewICDataWithTarget(intptr_t cid, const Function& target);
2037
2038 Isolate* isolate_;
2039 Thread* thread_;
2040 Zone* zone_;
2041 const GrowableArray<const Instance*>& caller_arguments_;
2042 MissHandler miss_handler_;
2043 NativeArguments arguments_;
2044 StackFrame* caller_frame_;
2045 const Code& caller_code_;
2046 const Function& caller_function_;
2047
2048 // Call-site information populated during resolution.
2049 String& name_;
2050 Array& args_descriptor_;
2051 bool is_monomorphic_hit_ = false;
2052};
2053
2054#if defined(DART_PRECOMPILED_RUNTIME)
2055void PatchableCallHandler::DoUnlinkedCallAOT(const UnlinkedCall& unlinked,
2056 const Function& target_function) {
2057 const auto& ic_data = ICData::Handle(
2058 zone_,
2059 target_function.IsNull()
2060 ? NewICData()
2061 : NewICDataWithTarget(receiver().GetClassId(), target_function));
2062
2063 Object& object = Object::Handle(zone_, ic_data.ptr());
2064 Code& code = Code::Handle(zone_, StubCode::ICCallThroughCode().ptr());
2065 // If the target function has optional parameters or is generic, it's
2066 // prologue requires ARGS_DESC_REG to be populated. Yet the switchable calls
2067 // do not populate that on the call site, which is why we don't transition
2068 // those call sites to monomorphic, but rather directly to call via stub
2069 // (which will populate the ARGS_DESC_REG from the ICData).
2070 //
2071 // Because of this we also don't generate monomorphic checks for those
2072 // functions.
2073 if (!target_function.IsNull() &&
2074 !target_function.PrologueNeedsArgumentsDescriptor()) {
2075 // Patch to monomorphic call.
2076 ASSERT(target_function.HasCode());
2077 const Code& target_code =
2078 Code::Handle(zone_, target_function.CurrentCode());
2079 const Smi& expected_cid =
2080 Smi::Handle(zone_, Smi::New(receiver().GetClassId()));
2081
2082 if (unlinked.can_patch_to_monomorphic()) {
2083 object = expected_cid.ptr();
2084 code = target_code.ptr();
2085 ASSERT(code.HasMonomorphicEntry());
2086 } else {
2087 object = MonomorphicSmiableCall::New(expected_cid.Value(), target_code);
2088 code = StubCode::MonomorphicSmiableCheck().ptr();
2089 }
2090 }
2091 CodePatcher::PatchSwitchableCallAt(caller_frame_->pc(), caller_code_, object,
2092 code);
2093
2094 // Return the ICData. The miss stub will jump to continue in the IC lookup
2095 // stub.
2096 ReturnAOT(StubCode::ICCallThroughCode(), ic_data);
2097}
2098
2099bool PatchableCallHandler::CanExtendSingleTargetRange(
2100 const String& name,
2101 const Function& old_target,
2102 const Function& target_function,
2103 intptr_t* lower,
2104 intptr_t* upper) {
2105 if (old_target.ptr() != target_function.ptr()) {
2106 return false;
2107 }
2108 intptr_t unchecked_lower, unchecked_upper;
2109 if (receiver().GetClassId() < *lower) {
2110 unchecked_lower = receiver().GetClassId();
2111 unchecked_upper = *lower - 1;
2112 *lower = receiver().GetClassId();
2113 } else {
2114 unchecked_upper = receiver().GetClassId();
2115 unchecked_lower = *upper + 1;
2116 *upper = receiver().GetClassId();
2117 }
2118
2119 return IsSingleTarget(isolate_->group(), zone_, unchecked_lower,
2120 unchecked_upper, target_function, name);
2121}
2122#endif // defined(DART_PRECOMPILED_RUNTIME)
2123
2124#if defined(DART_PRECOMPILED_RUNTIME)
2125void PatchableCallHandler::DoMonomorphicMissAOT(
2126 const Object& old_data,
2127 const Function& target_function) {
2128 classid_t old_expected_cid;
2129 if (old_data.IsSmi()) {
2130 old_expected_cid = Smi::Cast(old_data).Value();
2131 } else {
2132 RELEASE_ASSERT(old_data.IsMonomorphicSmiableCall());
2133 old_expected_cid = MonomorphicSmiableCall::Cast(old_data).expected_cid();
2134 }
2135 const bool is_monomorphic_hit = old_expected_cid == receiver().GetClassId();
2136 const auto& old_receiver_class = Class::Handle(
2137 zone_, isolate_->group()->class_table()->At(old_expected_cid));
2138 const auto& old_target = Function::Handle(
2139 zone_, Resolve(thread_, zone_, caller_arguments_, old_receiver_class,
2140 name_, args_descriptor_));
2141
2142 const auto& ic_data = ICData::Handle(
2143 zone_, old_target.IsNull()
2144 ? NewICData()
2145 : NewICDataWithTarget(old_expected_cid, old_target));
2146
2147 if (is_monomorphic_hit) {
2148 // The site just have been updated to monomorphic state with same
2149 // exact class id - do nothing in that case: stub will call through ic data.
2150 ReturnAOT(StubCode::ICCallThroughCode(), ic_data);
2151 return;
2152 }
2153
2154 intptr_t lower = old_expected_cid;
2155 intptr_t upper = old_expected_cid;
2156 if (CanExtendSingleTargetRange(name_, old_target, target_function, &lower,
2157 &upper)) {
2158 const SingleTargetCache& cache =
2160 const Code& code = Code::Handle(zone_, target_function.CurrentCode());
2161 cache.set_target(code);
2162 cache.set_entry_point(code.EntryPoint());
2163 cache.set_lower_limit(lower);
2164 cache.set_upper_limit(upper);
2165 const Code& stub = StubCode::SingleTargetCall();
2166 CodePatcher::PatchSwitchableCallAt(caller_frame_->pc(), caller_code_, cache,
2167 stub);
2168 // Return the ICData. The miss stub will jump to continue in the IC call
2169 // stub.
2170 ReturnAOT(StubCode::ICCallThroughCode(), ic_data);
2171 return;
2172 }
2173
2174 // Patch to call through stub.
2175 const Code& stub = StubCode::ICCallThroughCode();
2176 CodePatcher::PatchSwitchableCallAt(caller_frame_->pc(), caller_code_, ic_data,
2177 stub);
2178
2179 // Return the ICData. The miss stub will jump to continue in the IC lookup
2180 // stub.
2181 ReturnAOT(stub, ic_data);
2182}
2183#endif // defined(DART_PRECOMPILED_RUNTIME)
2184
2185#if !defined(DART_PRECOMPILED_RUNTIME)
2186void PatchableCallHandler::DoMonomorphicMissJIT(
2187 const Object& old_data,
2188 const Function& target_function) {
2189 // Monomorphic calls use the ICData::entries() as their data.
2190 const auto& old_ic_data_entries = Array::Cast(old_data);
2191 // Any non-empty ICData::entries() has a backref to it's ICData.
2192 const auto& ic_data =
2193 ICData::Handle(zone_, ICData::ICDataOfEntriesArray(old_ic_data_entries));
2194
2195 // The target didn't change, so we can stay inside monomorphic state.
2196 if (ic_data.NumberOfChecksIs(1) &&
2197 (ic_data.GetReceiverClassIdAt(0) == receiver().GetClassId())) {
2198 // No need to update ICData - it's already up-to-date.
2199
2200 if (FLAG_trace_ic) {
2201 OS::PrintErr("Instance call at %" Px
2202 " updating code (old code was disabled)\n",
2203 caller_frame_->pc());
2204 }
2205
2206 // We stay in monomorphic state, patch the code object and reload the icdata
2207 // entries array.
2208 const auto& code = Code::Handle(zone_, target_function.EnsureHasCode());
2209 const auto& data = Object::Handle(zone_, ic_data.entries());
2210 CodePatcher::PatchInstanceCallAt(caller_frame_->pc(), caller_code_, data,
2211 code);
2212 ReturnJIT(code, data, target_function);
2213 return;
2214 }
2215
2216 ASSERT(ic_data.NumArgsTested() == 1);
2217 const Code& stub = ic_data.is_tracking_exactness()
2218 ? StubCode::OneArgCheckInlineCacheWithExactnessCheck()
2219 : StubCode::OneArgCheckInlineCache();
2220 if (FLAG_trace_ic) {
2221 OS::PrintErr("Instance call at %" Px
2222 " switching monomorphic to polymorphic dispatch, %s\n",
2223 caller_frame_->pc(), ic_data.ToCString());
2224 }
2225 CodePatcher::PatchInstanceCallAt(caller_frame_->pc(), caller_code_, ic_data,
2226 stub);
2227
2228 ASSERT(caller_arguments_.length() == 1);
2229 UpdateICDataWithTarget(ic_data, target_function);
2230 ASSERT(should_consider_patching());
2231 TrySwitchInstanceCall(thread_, caller_frame_, caller_code_, caller_function_,
2232 ic_data, target_function);
2233 ReturnJIT(stub, ic_data, target_function);
2234}
2235#endif // !defined(DART_PRECOMPILED_RUNTIME)
2236
2237#if defined(DART_PRECOMPILED_RUNTIME)
2238void PatchableCallHandler::DoSingleTargetMissAOT(
2239 const SingleTargetCache& data,
2240 const Function& target_function) {
2241 const Code& old_target_code = Code::Handle(zone_, data.target());
2242 const Function& old_target =
2243 Function::Handle(zone_, Function::RawCast(old_target_code.owner()));
2244
2245 // We lost the original ICData when we patched to the monomorphic case.
2246 const auto& ic_data = ICData::Handle(
2247 zone_,
2248 target_function.IsNull()
2249 ? NewICData()
2250 : NewICDataWithTarget(receiver().GetClassId(), target_function));
2251
2252 intptr_t lower = data.lower_limit();
2253 intptr_t upper = data.upper_limit();
2254 if (CanExtendSingleTargetRange(name_, old_target, target_function, &lower,
2255 &upper)) {
2256 data.set_lower_limit(lower);
2257 data.set_upper_limit(upper);
2258 // Return the ICData. The single target stub will jump to continue in the
2259 // IC call stub.
2260 ReturnAOT(StubCode::ICCallThroughCode(), ic_data);
2261 return;
2262 }
2263
2264 // Call site is not single target, switch to call using ICData.
2265 const Code& stub = StubCode::ICCallThroughCode();
2266 CodePatcher::PatchSwitchableCallAt(caller_frame_->pc(), caller_code_, ic_data,
2267 stub);
2268
2269 // Return the ICData. The single target stub will jump to continue in the
2270 // IC call stub.
2271 ReturnAOT(stub, ic_data);
2272}
2273#endif // defined(DART_PRECOMPILED_RUNTIME)
2274
2275#if defined(DART_PRECOMPILED_RUNTIME)
2276void PatchableCallHandler::DoICDataMissAOT(const ICData& ic_data,
2277 const Function& target_function) {
2278 const String& name = String::Handle(zone_, ic_data.target_name());
2279 const Class& cls = Class::Handle(zone_, receiver().clazz());
2280 ASSERT(!cls.IsNull());
2281 const Array& descriptor =
2282 Array::CheckedHandle(zone_, ic_data.arguments_descriptor());
2283 ArgumentsDescriptor args_desc(descriptor);
2284 if (FLAG_trace_ic || FLAG_trace_ic_miss_in_optimized) {
2285 OS::PrintErr("ICData miss, class=%s, function<%" Pd ">=%s\n",
2286 cls.ToCString(), args_desc.TypeArgsLen(), name.ToCString());
2287 }
2288
2289 if (target_function.IsNull()) {
2290 ReturnAOT(StubCode::NoSuchMethodDispatcher(), ic_data);
2291 return;
2292 }
2293
2294 const intptr_t number_of_checks = ic_data.NumberOfChecks();
2295
2296 if ((number_of_checks == 0) &&
2297 (!FLAG_precompiled_mode || ic_data.receiver_cannot_be_smi()) &&
2298 !target_function.PrologueNeedsArgumentsDescriptor()) {
2299 // This call site is unlinked: transition to a monomorphic direct call.
2300 // Note we cannot do this if the target has optional parameters because
2301 // the monomorphic direct call does not load the arguments descriptor.
2302 // We cannot do this if we are still in the middle of precompiling because
2303 // the monomorphic case hides a live instance selector from the
2304 // treeshaker.
2305 const Code& target_code =
2306 Code::Handle(zone_, target_function.EnsureHasCode());
2307 const Smi& expected_cid =
2308 Smi::Handle(zone_, Smi::New(receiver().GetClassId()));
2309 ASSERT(target_code.HasMonomorphicEntry());
2310 CodePatcher::PatchSwitchableCallAt(caller_frame_->pc(), caller_code_,
2311 expected_cid, target_code);
2312 ReturnAOT(target_code, expected_cid);
2313 } else {
2314 ic_data.EnsureHasReceiverCheck(receiver().GetClassId(), target_function);
2315 if (number_of_checks > FLAG_max_polymorphic_checks) {
2316 // Switch to megamorphic call.
2317 const MegamorphicCache& cache = MegamorphicCache::Handle(
2318 zone_, MegamorphicCacheTable::Lookup(thread_, name, descriptor));
2319 const Code& stub = StubCode::MegamorphicCall();
2320
2321 CodePatcher::PatchSwitchableCallAt(caller_frame_->pc(), caller_code_,
2322 cache, stub);
2323 ReturnAOT(stub, cache);
2324 } else {
2325 ReturnAOT(StubCode::ICCallThroughCode(), ic_data);
2326 }
2327 }
2328}
2329#endif // defined(DART_PRECOMPILED_RUNTIME)
2330
2331#if !defined(DART_PRECOMPILED_RUNTIME)
2332void PatchableCallHandler::DoICDataMissJIT(const ICData& ic_data,
2333 const Object& old_code,
2334 const Function& target_function) {
2335 ASSERT(ic_data.NumArgsTested() == caller_arguments_.length());
2336
2337 if (ic_data.NumArgsTested() == 1) {
2338 ASSERT(old_code.ptr() == StubCode::OneArgCheckInlineCache().ptr() ||
2339 old_code.ptr() ==
2340 StubCode::OneArgCheckInlineCacheWithExactnessCheck().ptr() ||
2341 old_code.ptr() ==
2342 StubCode::OneArgOptimizedCheckInlineCache().ptr() ||
2343 old_code.ptr() ==
2344 StubCode::OneArgOptimizedCheckInlineCacheWithExactnessCheck()
2345 .ptr() ||
2346 old_code.ptr() == StubCode::ICCallBreakpoint().ptr() ||
2347 (old_code.IsNull() && !should_consider_patching()));
2348 UpdateICDataWithTarget(ic_data, target_function);
2349 if (should_consider_patching()) {
2350 TrySwitchInstanceCall(thread_, caller_frame_, caller_code_,
2351 caller_function_, ic_data, target_function);
2352 }
2353 const Code& stub = Code::Handle(
2354 zone_, ic_data.is_tracking_exactness()
2355 ? StubCode::OneArgCheckInlineCacheWithExactnessCheck().ptr()
2356 : StubCode::OneArgCheckInlineCache().ptr());
2357 ReturnJIT(stub, ic_data, target_function);
2358 } else {
2359 ASSERT(old_code.ptr() == StubCode::TwoArgsCheckInlineCache().ptr() ||
2360 old_code.ptr() == StubCode::SmiAddInlineCache().ptr() ||
2361 old_code.ptr() == StubCode::SmiLessInlineCache().ptr() ||
2362 old_code.ptr() == StubCode::SmiEqualInlineCache().ptr() ||
2363 old_code.ptr() ==
2364 StubCode::TwoArgsOptimizedCheckInlineCache().ptr() ||
2365 old_code.ptr() == StubCode::ICCallBreakpoint().ptr() ||
2366 (old_code.IsNull() && !should_consider_patching()));
2367 UpdateICDataWithTarget(ic_data, target_function);
2368 ReturnJIT(StubCode::TwoArgsCheckInlineCache(), ic_data, target_function);
2369 }
2370}
2371#endif // !defined(DART_PRECOMPILED_RUNTIME)
2372
2373void PatchableCallHandler::DoMegamorphicMiss(const MegamorphicCache& data,
2374 const Function& target_function) {
2375 const String& name = String::Handle(zone_, data.target_name());
2376 const Class& cls = Class::Handle(zone_, receiver().clazz());
2377 ASSERT(!cls.IsNull());
2378 const Array& descriptor =
2379 Array::CheckedHandle(zone_, data.arguments_descriptor());
2380 ArgumentsDescriptor args_desc(descriptor);
2381 if (FLAG_trace_ic || FLAG_trace_ic_miss_in_optimized) {
2382 OS::PrintErr("Megamorphic miss, class=%s, function<%" Pd ">=%s\n",
2383 cls.ToCString(), args_desc.TypeArgsLen(), name.ToCString());
2384 }
2385 if (target_function.IsNull()) {
2386 ReturnJITorAOT(StubCode::NoSuchMethodDispatcher(), data, target_function);
2387 return;
2388 }
2389
2390 // Insert function found into cache.
2391 const Smi& class_id = Smi::Handle(zone_, Smi::New(cls.id()));
2392 data.EnsureContains(class_id, target_function);
2393 ReturnJITorAOT(StubCode::MegamorphicCall(), data, target_function);
2394}
2395
2396void PatchableCallHandler::UpdateICDataWithTarget(
2397 const ICData& ic_data,
2398 const Function& target_function) {
2399 if (target_function.IsNull()) return;
2400
2401 // If, upon return of the runtime, we will invoke the target directly we have
2402 // to increment the call count here in the ICData.
2403 // If we instead only insert a new ICData entry and will return to the IC stub
2404 // which will call the target, the stub will take care of the increment.
2405 const bool call_target_directly =
2406 miss_handler_ == MissHandler::kInlineCacheMiss;
2407 const intptr_t invocation_count = call_target_directly ? 1 : 0;
2408
2409 if (caller_arguments_.length() == 1) {
2410 auto exactness = StaticTypeExactnessState::NotTracking();
2411#if !defined(DART_PRECOMPILED_RUNTIME)
2412 if (ic_data.is_tracking_exactness()) {
2413 exactness = receiver().IsNull()
2415 : StaticTypeExactnessState::Compute(
2416 Type::Cast(AbstractType::Handle(
2417 ic_data.receivers_static_type())),
2418 receiver());
2419 }
2420#endif // !defined(DART_PRECOMPILED_RUNTIME)
2421 ic_data.EnsureHasReceiverCheck(receiver().GetClassId(), target_function,
2422 invocation_count, exactness);
2423 } else {
2424 GrowableArray<intptr_t> class_ids(caller_arguments_.length());
2425 ASSERT(ic_data.NumArgsTested() == caller_arguments_.length());
2426 for (intptr_t i = 0; i < caller_arguments_.length(); i++) {
2427 class_ids.Add(caller_arguments_[i]->GetClassId());
2428 }
2429 ic_data.EnsureHasCheck(class_ids, target_function, invocation_count);
2430 }
2431}
2432
2433void PatchableCallHandler::ReturnAOT(const Code& stub, const Object& data) {
2434 ASSERT(miss_handler_ == MissHandler::kSwitchableCallMiss);
2435 arguments_.SetArgAt(0, stub); // Second return value.
2436 arguments_.SetReturn(data);
2437}
2438
2439void PatchableCallHandler::ReturnJIT(const Code& stub,
2440 const Object& data,
2441 const Function& target) {
2442 // In JIT we can have two different miss handlers to which we return slightly
2443 // differently.
2444 switch (miss_handler_) {
2446 arguments_.SetArgAt(0, stub); // Second return value.
2447 arguments_.SetReturn(data);
2448 break;
2449 }
2451 arguments_.SetArgAt(1, data); // Second return value.
2452 arguments_.SetReturn(stub);
2453 break;
2454 }
2456 arguments_.SetReturn(target);
2457 break;
2458 }
2459 }
2460}
2461
2462void PatchableCallHandler::ReturnJITorAOT(const Code& stub,
2463 const Object& data,
2464 const Function& target) {
2465#if defined(DART_PRECOMPILED_MODE)
2466 ReturnAOT(stub, data);
2467#else
2468 ReturnJIT(stub, data, target);
2469#endif
2470}
2471
2472ICDataPtr PatchableCallHandler::NewICData() {
2473 return ICData::New(caller_function_, name_, args_descriptor_, DeoptId::kNone,
2474 /*num_args_tested=*/1, ICData::kInstance);
2475}
2476
2477ICDataPtr PatchableCallHandler::NewICDataWithTarget(intptr_t cid,
2478 const Function& target) {
2479 GrowableArray<intptr_t> cids(1);
2480 cids.Add(cid);
2481 return ICData::NewWithCheck(caller_function_, name_, args_descriptor_,
2482 DeoptId::kNone, /*num_args_tested=*/1,
2483 ICData::kInstance, &cids, target);
2484}
2485
2486FunctionPtr PatchableCallHandler::ResolveTargetFunction(const Object& data) {
2487 switch (data.GetClassId()) {
2488 case kUnlinkedCallCid: {
2489 const auto& unlinked_call = UnlinkedCall::Cast(data);
2490
2491#if defined(DART_PRECOMPILED_RUNTIME)
2492 // When transitioning out of UnlinkedCall to other states (e.g.
2493 // Monomorphic, MonomorphicSmiable, SingleTarget) we lose
2494 // name/arg-descriptor in AOT mode and cannot recover it.
2495 //
2496 // Even if we could recover an old target function (which was missed) -
2497 // which we cannot in AOT bare mode - we can still lose the name due to a
2498 // dyn:* call site potentially targeting non-dyn:* targets.
2499 //
2500 // => We will therefore retain the unlinked call here.
2501 //
2502 // In JIT mode we always use ICData from the call site, which has the
2503 // correct name/args-descriptor.
2504 SaveUnlinkedCall(zone_, isolate_, caller_frame_->pc(), unlinked_call);
2505#endif // defined(DART_PRECOMPILED_RUNTIME)
2506
2507 name_ = unlinked_call.target_name();
2508 args_descriptor_ = unlinked_call.arguments_descriptor();
2509 break;
2510 }
2511 case kMonomorphicSmiableCallCid:
2513#if defined(DART_PRECOMPILED_RUNTIME)
2514 case kSmiCid:
2516 case kSingleTargetCacheCid: {
2517 const auto& unlinked_call = UnlinkedCall::Handle(
2518 zone_, LoadUnlinkedCall(zone_, isolate_, caller_frame_->pc()));
2519 name_ = unlinked_call.target_name();
2520 args_descriptor_ = unlinked_call.arguments_descriptor();
2521 break;
2522 }
2523#else
2524 case kArrayCid: {
2525 // Monomorphic calls use the ICData::entries() as their data.
2526 const auto& ic_data_entries = Array::Cast(data);
2527 // Any non-empty ICData::entries() has a backref to it's ICData.
2528 const auto& ic_data =
2529 ICData::Handle(zone_, ICData::ICDataOfEntriesArray(ic_data_entries));
2530 args_descriptor_ = ic_data.arguments_descriptor();
2531 name_ = ic_data.target_name();
2532 break;
2533 }
2534#endif // defined(DART_PRECOMPILED_RUNTIME)
2535 case kICDataCid:
2537 case kMegamorphicCacheCid: {
2538 const CallSiteData& call_site_data = CallSiteData::Cast(data);
2539 name_ = call_site_data.target_name();
2540 args_descriptor_ = call_site_data.arguments_descriptor();
2541 break;
2542 }
2543 default:
2544 UNREACHABLE();
2545 }
2546 const Class& cls = Class::Handle(zone_, receiver().clazz());
2547 return Resolve(thread_, zone_, caller_arguments_, cls, name_,
2548 args_descriptor_);
2549}
2550
2552 // Find out actual target (which can be time consuming) without holding any
2553 // locks.
2554 const auto& target_function =
2555 Function::Handle(zone_, ResolveTargetFunction(old_data));
2556
2557 auto& data = Object::Handle(zone_);
2558
2559 // We ensure any transition in a patchable calls are done in an atomic
2560 // manner, we ensure we always transition forward (e.g. Monomorphic ->
2561 // Polymorphic).
2562 //
2563 // Mutators are only stopped if we actually need to patch a patchable call.
2564 // We may not do that if we e.g. just add one more check to an ICData.
2566
2567#if defined(DART_PRECOMPILED_RUNTIME)
2568 data =
2569 CodePatcher::GetSwitchableCallDataAt(caller_frame_->pc(), caller_code_);
2570 uword target_entry = 0;
2572 caller_frame_->pc(), caller_code_));
2573 HandleMissAOT(data, target_entry, target_function);
2574#else
2575 auto& code = Code::Handle(zone_);
2576 if (should_consider_patching()) {
2577 code ^= CodePatcher::GetInstanceCallAt(caller_frame_->pc(), caller_code_,
2578 &data);
2579 } else {
2580 ASSERT(old_data.IsICData() || old_data.IsMegamorphicCache());
2581 data = old_data.ptr();
2582 }
2583 HandleMissJIT(data, code, target_function);
2584#endif
2585}
2586
2587#if defined(DART_PRECOMPILED_RUNTIME)
2588
2589void PatchableCallHandler::HandleMissAOT(const Object& old_data,
2590 uword old_entry,
2591 const Function& target_function) {
2592 switch (old_data.GetClassId()) {
2593 case kUnlinkedCallCid:
2594 ASSERT(old_entry ==
2595 StubCode::SwitchableCallMiss().MonomorphicEntryPoint());
2596 DoUnlinkedCallAOT(UnlinkedCall::Cast(old_data), target_function);
2597 break;
2598 case kMonomorphicSmiableCallCid:
2599 ASSERT(old_entry ==
2600 StubCode::MonomorphicSmiableCheck().MonomorphicEntryPoint());
2602 case kSmiCid:
2603 DoMonomorphicMissAOT(old_data, target_function);
2604 break;
2605 case kSingleTargetCacheCid:
2606 ASSERT(old_entry == StubCode::SingleTargetCall().MonomorphicEntryPoint());
2607 DoSingleTargetMissAOT(SingleTargetCache::Cast(old_data), target_function);
2608 break;
2609 case kICDataCid:
2610 ASSERT(old_entry ==
2611 StubCode::ICCallThroughCode().MonomorphicEntryPoint());
2612 DoICDataMissAOT(ICData::Cast(old_data), target_function);
2613 break;
2614 case kMegamorphicCacheCid:
2615 ASSERT(old_entry == StubCode::MegamorphicCall().MonomorphicEntryPoint());
2616 DoMegamorphicMiss(MegamorphicCache::Cast(old_data), target_function);
2617 break;
2618 default:
2619 UNREACHABLE();
2620 }
2621}
2622
2623#else
2624
2625void PatchableCallHandler::HandleMissJIT(const Object& old_data,
2626 const Code& old_code,
2627 const Function& target_function) {
2628 switch (old_data.GetClassId()) {
2629 case kArrayCid:
2630 // ICData three-element array: Smi(receiver CID), Smi(count),
2631 // Function(target). It is the Array from ICData::entries_.
2632 DoMonomorphicMissJIT(old_data, target_function);
2633 break;
2634 case kICDataCid:
2635 DoICDataMissJIT(ICData::Cast(old_data), old_code, target_function);
2636 break;
2637 case kMegamorphicCacheCid:
2638 ASSERT(old_code.ptr() == StubCode::MegamorphicCall().ptr() ||
2639 (old_code.IsNull() && !should_consider_patching()));
2640 DoMegamorphicMiss(MegamorphicCache::Cast(old_data), target_function);
2641 break;
2642 default:
2643 UNREACHABLE();
2644 }
2645}
2646#endif // defined(DART_PRECOMPILED_RUNTIME)
2647
2648static void InlineCacheMissHandler(Thread* thread,
2649 Zone* zone,
2651 const ICData& ic_data,
2652 NativeArguments native_arguments) {
2653#if !defined(DART_PRECOMPILED_RUNTIME)
2654 DartFrameIterator iterator(thread,
2656 StackFrame* caller_frame = iterator.NextFrame();
2657 const auto& caller_code = Code::Handle(zone, caller_frame->LookupDartCode());
2658 const auto& caller_function =
2659 Function::Handle(zone, caller_frame->LookupDartFunction());
2660
2662 native_arguments, caller_frame, caller_code,
2663 caller_function);
2664
2665 handler.ResolveSwitchAndReturn(ic_data);
2666#else
2667 UNREACHABLE();
2668#endif // !defined(DART_PRECOMPILED_RUNTIME)
2669}
2670
2671// Handles inline cache misses by updating the IC data array of the call site.
2672// Arg0: Receiver object.
2673// Arg1: IC data object.
2674// Returns: target function with compiled code or null.
2675// Modifies the instance call to hold the updated IC data array.
2676DEFINE_RUNTIME_ENTRY(InlineCacheMissHandlerOneArg, 2) {
2677 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(0));
2678 const ICData& ic_data = ICData::CheckedHandle(zone, arguments.ArgAt(1));
2679 RELEASE_ASSERT(!FLAG_precompiled_mode);
2681 args.Add(&receiver);
2682 InlineCacheMissHandler(thread, zone, args, ic_data, arguments);
2683}
2684
2685// Handles inline cache misses by updating the IC data array of the call site.
2686// Arg0: Receiver object.
2687// Arg1: Argument after receiver.
2688// Arg2: IC data object.
2689// Returns: target function with compiled code or null.
2690// Modifies the instance call to hold the updated IC data array.
2691DEFINE_RUNTIME_ENTRY(InlineCacheMissHandlerTwoArgs, 3) {
2692 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(0));
2693 const Instance& other = Instance::CheckedHandle(zone, arguments.ArgAt(1));
2694 const ICData& ic_data = ICData::CheckedHandle(zone, arguments.ArgAt(2));
2695 RELEASE_ASSERT(!FLAG_precompiled_mode);
2697 args.Add(&receiver);
2698 args.Add(&other);
2699 InlineCacheMissHandler(thread, zone, args, ic_data, arguments);
2700}
2701
2702// Handle the first use of an instance call
2703// Arg1: Receiver.
2704// Arg0: Stub out.
2705// Returns: the ICData used to continue with the call.
2706DEFINE_RUNTIME_ENTRY(SwitchableCallMiss, 2) {
2707 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(1));
2708
2711 StackFrame* exit_frame = iterator.NextFrame();
2712 ASSERT(exit_frame->IsExitFrame());
2713 StackFrame* miss_handler_frame = iterator.NextFrame();
2714 // This runtime entry can be called either from miss stub or from
2715 // switchable_call_miss "dart" stub/function set up in
2716 // [MegamorphicCacheTable::InitMissHandler].
2717 ASSERT(miss_handler_frame->IsStubFrame() ||
2718 miss_handler_frame->IsDartFrame());
2719 StackFrame* caller_frame = iterator.NextFrame();
2720 ASSERT(caller_frame->IsDartFrame());
2721 const Code& caller_code = Code::Handle(zone, caller_frame->LookupDartCode());
2722 const Function& caller_function =
2723 Function::Handle(zone, caller_frame->LookupDartFunction());
2724
2725 auto& old_data = Object::Handle(zone);
2726#if defined(DART_PRECOMPILED_RUNTIME)
2727 old_data =
2728 CodePatcher::GetSwitchableCallDataAt(caller_frame->pc(), caller_code);
2729#else
2730 CodePatcher::GetInstanceCallAt(caller_frame->pc(), caller_code, &old_data);
2731#endif
2732
2733 GrowableArray<const Instance*> caller_arguments(1);
2734 caller_arguments.Add(&receiver);
2735 PatchableCallHandler handler(thread, caller_arguments,
2737 caller_frame, caller_code, caller_function);
2738 handler.ResolveSwitchAndReturn(old_data);
2739}
2740
2741// Used to find the correct receiver and function to invoke or to fall back to
2742// invoking noSuchMethod when lazy dispatchers are disabled. Returns the
2743// result of the invocation or an Error.
2745 Thread* thread,
2746 Zone* zone,
2747 const Instance& receiver,
2748 const String& target_name,
2749 const Array& orig_arguments,
2750 const Array& orig_arguments_desc) {
2751 ASSERT(!FLAG_lazy_dispatchers);
2752 const bool is_dynamic_call =
2754 String& demangled_target_name = String::Handle(zone, target_name.ptr());
2755 if (is_dynamic_call) {
2756 demangled_target_name =
2758 }
2759
2760 Class& cls = Class::Handle(zone, receiver.clazz());
2762
2763 // Dart distinguishes getters and regular methods and allows their calls
2764 // to mix with conversions, and its selectors are independent of arity. So do
2765 // a zigzagged lookup to see if this call failed because of an arity mismatch,
2766 // need for conversion, or there really is no such method.
2767
2768 const bool is_getter = Field::IsGetterName(demangled_target_name);
2769 if (is_getter) {
2770 // Tear-off of a method
2771 // o.foo (o.get:foo) failed, closurize o.foo() if it exists.
2772 const auto& function_name =
2773 String::Handle(zone, Field::NameFromGetter(demangled_target_name));
2774 while (!cls.IsNull()) {
2775 // We don't generate dyn:* forwarders for method extractors so there is no
2776 // need to try to find a dyn:get:foo first (see assertion below)
2777 if (function.IsNull()) {
2778 if (cls.EnsureIsFinalized(thread) == Error::null()) {
2780 }
2781 }
2782 if (!function.IsNull()) {
2783#if !defined(DART_PRECOMPILED_RUNTIME)
2784 ASSERT(!kernel::NeedsDynamicInvocationForwarder(Function::Handle(
2785 function.GetMethodExtractor(demangled_target_name))));
2786#endif
2787 const Function& closure_function =
2788 Function::Handle(zone, function.ImplicitClosureFunction());
2789 const Object& result = Object::Handle(
2790 zone, closure_function.ImplicitInstanceClosure(receiver));
2791 return result.ptr();
2792 }
2793 cls = cls.SuperClass();
2794 }
2795
2796 if (receiver.IsRecord()) {
2797 const Record& record = Record::Cast(receiver);
2798 const intptr_t field_index =
2799 record.GetFieldIndexByName(thread, function_name);
2800 if (field_index >= 0) {
2801 return record.FieldAt(field_index);
2802 }
2803 }
2804
2805 // Fall through for noSuchMethod
2806 } else {
2807 // Call through field.
2808 // o.foo(...) failed, invoke noSuchMethod is foo exists but has the wrong
2809 // number of arguments, or try (o.foo).call(...)
2810
2811 if ((target_name.ptr() == Symbols::call().ptr()) && receiver.IsClosure()) {
2812 // Special case: closures are implemented with a call getter instead of a
2813 // call method and with lazy dispatchers the field-invocation-dispatcher
2814 // would perform the closure call.
2815 return DartEntry::InvokeClosure(thread, orig_arguments,
2816 orig_arguments_desc);
2817 }
2818
2819 // Dynamic call sites have to use the dynamic getter as well (if it was
2820 // created).
2821 const auto& getter_name =
2822 String::Handle(zone, Field::GetterName(demangled_target_name));
2823 const auto& dyn_getter_name = String::Handle(
2824 zone, is_dynamic_call
2826 : getter_name.ptr());
2827 ArgumentsDescriptor args_desc(orig_arguments_desc);
2828 while (!cls.IsNull()) {
2829 // If there is a function with the target name but mismatched arguments
2830 // we need to call `receiver.noSuchMethod()`.
2831 if (cls.EnsureIsFinalized(thread) == Error::null()) {
2832 function = Resolver::ResolveDynamicFunction(zone, cls, target_name);
2833 }
2834 if (!function.IsNull()) {
2835 ASSERT(!function.AreValidArguments(args_desc, nullptr));
2836 break; // mismatch, invoke noSuchMethod
2837 }
2838 if (is_dynamic_call) {
2839 function =
2840 Resolver::ResolveDynamicFunction(zone, cls, demangled_target_name);
2841 if (!function.IsNull()) {
2842 ASSERT(!function.AreValidArguments(args_desc, nullptr));
2843 break; // mismatch, invoke noSuchMethod
2844 }
2845 }
2846
2847 // If there is a getter we need to call-through-getter.
2848 if (is_dynamic_call) {
2849 function = Resolver::ResolveDynamicFunction(zone, cls, dyn_getter_name);
2850 }
2851 if (function.IsNull()) {
2852 function = Resolver::ResolveDynamicFunction(zone, cls, getter_name);
2853 }
2854 if (!function.IsNull()) {
2855 const Array& getter_arguments = Array::Handle(Array::New(1));
2856 getter_arguments.SetAt(0, receiver);
2857 const Object& getter_result = Object::Handle(
2858 zone, DartEntry::InvokeFunction(function, getter_arguments));
2859 if (getter_result.IsError()) {
2860 return getter_result.ptr();
2861 }
2862 ASSERT(getter_result.IsNull() || getter_result.IsInstance());
2863
2864 orig_arguments.SetAt(args_desc.FirstArgIndex(), getter_result);
2865 return DartEntry::InvokeClosure(thread, orig_arguments,
2866 orig_arguments_desc);
2867 }
2868 cls = cls.SuperClass();
2869 }
2870
2871 if (receiver.IsRecord()) {
2872 const Record& record = Record::Cast(receiver);
2873 const intptr_t field_index =
2874 record.GetFieldIndexByName(thread, demangled_target_name);
2875 if (field_index >= 0) {
2876 const Object& getter_result =
2877 Object::Handle(zone, record.FieldAt(field_index));
2878 ASSERT(getter_result.IsNull() || getter_result.IsInstance());
2879 orig_arguments.SetAt(args_desc.FirstArgIndex(), getter_result);
2880 return DartEntry::InvokeClosure(thread, orig_arguments,
2881 orig_arguments_desc);
2882 }
2883 }
2884 }
2885
2886 const Object& result = Object::Handle(
2887 zone,
2888 DartEntry::InvokeNoSuchMethod(thread, receiver, demangled_target_name,
2889 orig_arguments, orig_arguments_desc));
2890 return result.ptr();
2891}
2892
2893// Invoke appropriate noSuchMethod or closure from getter.
2894// Arg0: receiver
2895// Arg1: ICData or MegamorphicCache
2896// Arg2: arguments descriptor array
2897// Arg3: arguments array
2898DEFINE_RUNTIME_ENTRY(NoSuchMethodFromCallStub, 4) {
2899 ASSERT(!FLAG_lazy_dispatchers);
2900 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(0));
2901 const Object& ic_data_or_cache = Object::Handle(zone, arguments.ArgAt(1));
2902 const Array& orig_arguments_desc =
2903 Array::CheckedHandle(zone, arguments.ArgAt(2));
2904 const Array& orig_arguments = Array::CheckedHandle(zone, arguments.ArgAt(3));
2905 String& target_name = String::Handle(zone);
2906 if (ic_data_or_cache.IsICData()) {
2907 target_name = ICData::Cast(ic_data_or_cache).target_name();
2908 } else {
2909 ASSERT(ic_data_or_cache.IsMegamorphicCache());
2910 target_name = MegamorphicCache::Cast(ic_data_or_cache).target_name();
2911 }
2912
2913 const auto& result =
2915 thread, zone, receiver, target_name,
2916 orig_arguments, orig_arguments_desc));
2918 arguments.SetReturn(result);
2919}
2920
2921// Invoke appropriate noSuchMethod function.
2922// Arg0: receiver
2923// Arg1: function
2924// Arg1: arguments descriptor array.
2925// Arg3: arguments array.
2926DEFINE_RUNTIME_ENTRY(NoSuchMethodFromPrologue, 4) {
2927 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(0));
2928 const Function& function = Function::CheckedHandle(zone, arguments.ArgAt(1));
2929 const Array& orig_arguments_desc =
2930 Array::CheckedHandle(zone, arguments.ArgAt(2));
2931 const Array& orig_arguments = Array::CheckedHandle(zone, arguments.ArgAt(3));
2932
2933 String& orig_function_name = String::Handle(zone);
2934 if ((function.kind() == UntaggedFunction::kClosureFunction) ||
2935 (function.kind() == UntaggedFunction::kImplicitClosureFunction)) {
2936 // For closure the function name is always 'call'. Replace it with the
2937 // name of the closurized function so that exception contains more
2938 // relevant information.
2939 orig_function_name = function.QualifiedUserVisibleName();
2940 } else {
2941 orig_function_name = function.name();
2942 }
2943
2944 const Object& result = Object::Handle(
2945 zone, DartEntry::InvokeNoSuchMethod(thread, receiver, orig_function_name,
2946 orig_arguments, orig_arguments_desc));
2948 arguments.SetReturn(result);
2949}
2950
2951#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
2952// The following code is used to stress test
2953// - deoptimization
2954// - debugger stack tracing
2955// - garbage collection
2956// - hot reload
2958 auto isolate = thread->isolate();
2959 auto isolate_group = thread->isolate_group();
2960
2961 if (FLAG_shared_slow_path_triggers_gc) {
2962 isolate->group()->heap()->CollectAllGarbage(GCReason::kDebugging);
2963 }
2964
2965 bool do_deopt = false;
2966 bool do_stacktrace = false;
2967 bool do_reload = false;
2968 bool do_gc = false;
2969 const intptr_t isolate_reload_every =
2970 isolate->group()->reload_every_n_stack_overflow_checks();
2971 if ((FLAG_deoptimize_every > 0) || (FLAG_stacktrace_every > 0) ||
2972 (FLAG_gc_every > 0) || (isolate_reload_every > 0)) {
2973 if (!Isolate::IsSystemIsolate(isolate)) {
2974 // TODO(turnidge): To make --deoptimize_every and
2975 // --stacktrace-every faster we could move this increment/test to
2976 // the generated code.
2977 int32_t count = thread->IncrementAndGetStackOverflowCount();
2978 if (FLAG_deoptimize_every > 0 && (count % FLAG_deoptimize_every) == 0) {
2979 do_deopt = true;
2980 }
2981 if (FLAG_stacktrace_every > 0 && (count % FLAG_stacktrace_every) == 0) {
2982 do_stacktrace = true;
2983 }
2984 if (FLAG_gc_every > 0 && (count % FLAG_gc_every) == 0) {
2985 do_gc = true;
2986 }
2987 if ((isolate_reload_every > 0) && (count % isolate_reload_every) == 0) {
2988 do_reload = isolate->group()->CanReload();
2989 }
2990 }
2991 }
2992 if ((FLAG_deoptimize_filter != nullptr) ||
2993 (FLAG_stacktrace_filter != nullptr) || (FLAG_reload_every != 0)) {
2994 DartFrameIterator iterator(thread,
2996 StackFrame* frame = iterator.NextFrame();
2997 ASSERT(frame != nullptr);
2998 Code& code = Code::Handle();
3000 code = frame->LookupDartCode();
3001 ASSERT(!code.IsNull());
3002 function = code.function();
3003 ASSERT(!function.IsNull());
3004 const char* function_name = nullptr;
3005 if ((FLAG_deoptimize_filter != nullptr) ||
3006 (FLAG_stacktrace_filter != nullptr)) {
3007 function_name = function.ToFullyQualifiedCString();
3008 ASSERT(function_name != nullptr);
3009 }
3010 if (!code.IsNull()) {
3011 if (!code.is_optimized() && FLAG_reload_every_optimized) {
3012 // Don't do the reload if we aren't inside optimized code.
3013 do_reload = false;
3014 }
3015 if (code.is_optimized() && FLAG_deoptimize_filter != nullptr &&
3016 strstr(function_name, FLAG_deoptimize_filter) != nullptr &&
3017 !function.ForceOptimize()) {
3018 OS::PrintErr("*** Forcing deoptimization (%s)\n",
3019 function.ToFullyQualifiedCString());
3020 do_deopt = true;
3021 }
3022 }
3023 if (FLAG_stacktrace_filter != nullptr &&
3024 strstr(function_name, FLAG_stacktrace_filter) != nullptr) {
3025 OS::PrintErr("*** Computing stacktrace (%s)\n",
3026 function.ToFullyQualifiedCString());
3027 do_stacktrace = true;
3028 }
3029 }
3030 if (do_deopt) {
3031 // TODO(turnidge): Consider using DeoptimizeAt instead.
3033 }
3034 if (do_reload) {
3035 // Maybe adjust the rate of future reloads.
3036 isolate_group->MaybeIncreaseReloadEveryNStackOverflowChecks();
3037
3038 // Issue a reload.
3039 const char* script_uri = isolate_group->source()->script_uri;
3040 JSONStream js;
3041 const bool success =
3042 isolate_group->ReloadSources(&js, /*force_reload=*/true, script_uri);
3043 if (!success) {
3044 FATAL("*** Isolate reload failed:\n%s\n", js.ToCString());
3045 }
3046 }
3047 if (do_stacktrace) {
3048 String& var_name = String::Handle();
3049 Instance& var_value = Instance::Handle();
3050 DebuggerStackTrace* stack = isolate->debugger()->StackTrace();
3051 intptr_t num_frames = stack->Length();
3052 for (intptr_t i = 0; i < num_frames; i++) {
3053 ActivationFrame* frame = stack->FrameAt(i);
3054 int num_vars = 0;
3055 // Variable locations and number are unknown when precompiling.
3056#if !defined(DART_PRECOMPILED_RUNTIME)
3057 if (!frame->function().ForceOptimize()) {
3058 // Ensure that we have unoptimized code.
3060 num_vars = frame->NumLocalVariables();
3061 }
3062#endif
3063 TokenPosition unused = TokenPosition::kNoSource;
3064 for (intptr_t v = 0; v < num_vars; v++) {
3065 frame->VariableAt(v, &var_name, &unused, &unused, &unused, &var_value);
3066 }
3067 }
3068 if (FLAG_stress_async_stacks) {
3070 }
3071 }
3072 if (do_gc) {
3073 isolate->group()->heap()->CollectAllGarbage(GCReason::kDebugging);
3074 }
3075}
3076#endif // !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
3077
3078#if !defined(DART_PRECOMPILED_RUNTIME)
3079static void HandleOSRRequest(Thread* thread) {
3080 auto isolate_group = thread->isolate_group();
3081 ASSERT(isolate_group->use_osr());
3082 DartFrameIterator iterator(thread,
3084 StackFrame* frame = iterator.NextFrame();
3085 ASSERT(frame != nullptr);
3086 const Code& code = Code::ZoneHandle(frame->LookupDartCode());
3087 ASSERT(!code.IsNull());
3088 ASSERT(!code.is_optimized());
3089 const Function& function = Function::Handle(code.function());
3090 ASSERT(!function.IsNull());
3091
3092 // If the code of the frame does not match the function's unoptimized code,
3093 // we bail out since the code was reset by an isolate reload.
3094 if (code.ptr() != function.unoptimized_code()) {
3095 return;
3096 }
3097
3098 // Since the code is referenced from the frame and the ZoneHandle,
3099 // it cannot have been removed from the function.
3100 ASSERT(function.HasCode());
3101 // Don't do OSR on intrinsified functions: The intrinsic code expects to be
3102 // called like a regular function and can't be entered via OSR.
3104 function.is_intrinsic()) {
3105 return;
3106 }
3107
3108 // The unoptimized code is on the stack and should never be detached from
3109 // the function at this point.
3110 ASSERT(function.unoptimized_code() != Object::null());
3111 intptr_t osr_id =
3112 Code::Handle(function.unoptimized_code()).GetDeoptIdForOsr(frame->pc());
3114 if (FLAG_trace_osr) {
3115 OS::PrintErr("Attempting OSR for %s at id=%" Pd ", count=%" Pd "\n",
3116 function.ToFullyQualifiedCString(), osr_id,
3117 function.usage_counter());
3118 }
3119
3120 // Since the code is referenced from the frame and the ZoneHandle,
3121 // it cannot have been removed from the function.
3122 const Object& result = Object::Handle(
3125
3126 if (!result.IsNull()) {
3127 const Code& code = Code::Cast(result);
3128 uword optimized_entry = code.EntryPoint();
3129 frame->set_pc(optimized_entry);
3130 frame->set_pc_marker(code.ptr());
3131 }
3132}
3133#endif // !defined(DART_PRECOMPILED_RUNTIME)
3134
3135DEFINE_RUNTIME_ENTRY(InterruptOrStackOverflow, 0) {
3136#if defined(USING_SIMULATOR)
3137 uword stack_pos = Simulator::Current()->get_sp();
3138 // If simulator was never called it may return 0 as a value of SPREG.
3139 if (stack_pos == 0) {
3140 // Use any reasonable value which would not be treated
3141 // as stack overflow.
3142 stack_pos = thread->saved_stack_limit();
3143 }
3144#else
3146#endif
3147 // Always clear the stack overflow flags. They are meant for this
3148 // particular stack overflow runtime call and are not meant to
3149 // persist.
3150 uword stack_overflow_flags = thread->GetAndClearStackOverflowFlags();
3151
3152 // If an interrupt happens at the same time as a stack overflow, we
3153 // process the stack overflow now and leave the interrupt for next
3154 // time.
3155 if (!thread->os_thread()->HasStackHeadroom() ||
3156 IsCalleeFrameOf(thread->saved_stack_limit(), stack_pos)) {
3157 if (FLAG_verbose_stack_overflow) {
3158 OS::PrintErr("Stack overflow\n");
3159 OS::PrintErr(" Native SP = %" Px ", stack limit = %" Px "\n", stack_pos,
3160 thread->saved_stack_limit());
3161 OS::PrintErr("Call stack:\n");
3162 OS::PrintErr("size | frame\n");
3165 uword fp = stack_pos;
3166 StackFrame* frame = frames.NextFrame();
3167 while (frame != nullptr) {
3168 uword delta = (frame->fp() - fp);
3169 fp = frame->fp();
3170 OS::PrintErr("%4" Pd " %s\n", delta, frame->ToCString());
3171 frame = frames.NextFrame();
3172 }
3173 }
3174
3175 // Use the preallocated stack overflow exception to avoid calling
3176 // into dart code.
3177 const Instance& exception =
3178 Instance::Handle(isolate->group()->object_store()->stack_overflow());
3179 Exceptions::Throw(thread, exception);
3180 UNREACHABLE();
3181 }
3182
3183#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
3185#endif // !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
3186
3187 // Handle interrupts:
3188 // - store buffer overflow
3189 // - OOB message (vm-service or dart:isolate)
3190 // - marking ready for finalization
3191 const Error& error = Error::Handle(thread->HandleInterrupts());
3193
3194#if !defined(DART_PRECOMPILED_RUNTIME)
3195 if ((stack_overflow_flags & Thread::kOsrRequest) != 0) {
3196 HandleOSRRequest(thread);
3197 }
3198#else
3199 ASSERT((stack_overflow_flags & Thread::kOsrRequest) == 0);
3200#endif // !defined(DART_PRECOMPILED_RUNTIME)
3201}
3202
3203DEFINE_RUNTIME_ENTRY(TraceICCall, 2) {
3204 const ICData& ic_data = ICData::CheckedHandle(zone, arguments.ArgAt(0));
3205 const Function& function = Function::CheckedHandle(zone, arguments.ArgAt(1));
3206 DartFrameIterator iterator(thread,
3208 StackFrame* frame = iterator.NextFrame();
3209 ASSERT(frame != nullptr);
3211 "IC call @%#" Px ": ICData: %#" Px " cnt:%" Pd " nchecks: %" Pd " %s\n",
3212 frame->pc(), static_cast<uword>(ic_data.ptr()), function.usage_counter(),
3213 ic_data.NumberOfChecks(), function.ToFullyQualifiedCString());
3214}
3215
3216// This is called from function that needs to be optimized.
3217// The requesting function can be already optimized (reoptimization).
3218// Returns the Code object where to continue execution.
3219DEFINE_RUNTIME_ENTRY(OptimizeInvokedFunction, 1) {
3220#if !defined(DART_PRECOMPILED_RUNTIME)
3221 const Function& function = Function::CheckedHandle(zone, arguments.ArgAt(0));
3222 ASSERT(!function.IsNull());
3223 ASSERT(function.HasCode());
3224
3226 auto isolate_group = thread->isolate_group();
3227 if (FLAG_background_compilation) {
3228 if (isolate_group->background_compiler()->EnqueueCompilation(function)) {
3229 // Reduce the chance of triggering a compilation while the function is
3230 // being compiled in the background. INT32_MIN should ensure that it
3231 // takes long time to trigger a compilation.
3232 // Note that the background compilation queue rejects duplicate entries.
3233 function.SetUsageCounter(INT32_MIN);
3234 // Continue in the same code.
3235 arguments.SetReturn(function);
3236 return;
3237 }
3238 }
3239
3240 // Reset usage counter for reoptimization before calling optimizer to
3241 // prevent recursive triggering of function optimization.
3242 function.SetUsageCounter(0);
3243 if (FLAG_trace_compiler || FLAG_trace_optimizing_compiler) {
3244 if (function.HasOptimizedCode()) {
3245 THR_Print("ReCompiling function: '%s' \n",
3246 function.ToFullyQualifiedCString());
3247 }
3248 }
3252 }
3253 arguments.SetReturn(function);
3254#else
3255 UNREACHABLE();
3256#endif // !DART_PRECOMPILED_RUNTIME
3257}
3258
3259// The caller must be a static call in a Dart frame, or an entry frame.
3260// Patch static call to point to valid code's entry point.
3261DEFINE_RUNTIME_ENTRY(FixCallersTarget, 0) {
3262#if !defined(DART_PRECOMPILED_RUNTIME)
3265 StackFrame* frame = iterator.NextFrame();
3266 ASSERT(frame != nullptr);
3267 while (frame->IsStubFrame() || frame->IsExitFrame()) {
3268 frame = iterator.NextFrame();
3269 ASSERT(frame != nullptr);
3270 }
3271 if (frame->IsEntryFrame()) {
3272 // Since function's current code is always unpatched, the entry frame always
3273 // calls to unpatched code.
3274 UNREACHABLE();
3275 }
3276 ASSERT(frame->IsDartFrame());
3277 const Code& caller_code = Code::Handle(zone, frame->LookupDartCode());
3278 RELEASE_ASSERT(caller_code.is_optimized());
3279 const Function& target_function = Function::Handle(
3280 zone, caller_code.GetStaticCallTargetFunctionAt(frame->pc()));
3281
3282 const Code& current_target_code =
3283 Code::Handle(zone, target_function.EnsureHasCode());
3284 CodePatcher::PatchStaticCallAt(frame->pc(), caller_code, current_target_code);
3285 caller_code.SetStaticCallTargetCodeAt(frame->pc(), current_target_code);
3286 if (FLAG_trace_patching) {
3288 "FixCallersTarget: caller %#" Px
3289 " "
3290 "target '%s' -> %#" Px " (%s)\n",
3291 frame->pc(), target_function.ToFullyQualifiedCString(),
3292 current_target_code.EntryPoint(),
3293 current_target_code.is_optimized() ? "optimized" : "unoptimized");
3294 }
3295 arguments.SetReturn(current_target_code);
3296#else
3297 UNREACHABLE();
3298#endif
3299}
3300
3301// The caller must be a monomorphic call from unoptimized code.
3302// Patch call to point to new target.
3303DEFINE_RUNTIME_ENTRY(FixCallersTargetMonomorphic, 2) {
3304#if !defined(DART_PRECOMPILED_RUNTIME)
3305 const Instance& receiver = Instance::CheckedHandle(zone, arguments.ArgAt(0));
3306 const Array& switchable_call_data =
3307 Array::CheckedHandle(zone, arguments.ArgAt(1));
3308
3309 DartFrameIterator iterator(thread,
3311 StackFrame* caller_frame = iterator.NextFrame();
3312 const auto& caller_code = Code::Handle(zone, caller_frame->LookupDartCode());
3313 const auto& caller_function =
3314 Function::Handle(zone, caller_frame->LookupDartFunction());
3315
3316 GrowableArray<const Instance*> caller_arguments(1);
3317 caller_arguments.Add(&receiver);
3318 PatchableCallHandler handler(
3319 thread, caller_arguments, MissHandler::kFixCallersTargetMonomorphic,
3320 arguments, caller_frame, caller_code, caller_function);
3321 handler.ResolveSwitchAndReturn(switchable_call_data);
3322#else
3323 UNREACHABLE();
3324#endif
3325}
3326
3327// The caller tried to allocate an instance via an invalidated allocation
3328// stub.
3329DEFINE_RUNTIME_ENTRY(FixAllocationStubTarget, 0) {
3330#if !defined(DART_PRECOMPILED_RUNTIME)
3333 StackFrame* frame = iterator.NextFrame();
3334 ASSERT(frame != nullptr);
3335 while (frame->IsStubFrame() || frame->IsExitFrame()) {
3336 frame = iterator.NextFrame();
3337 ASSERT(frame != nullptr);
3338 }
3339 if (frame->IsEntryFrame()) {
3340 // There must be a valid Dart frame.
3341 UNREACHABLE();
3342 }
3343 ASSERT(frame->IsDartFrame());
3344 const Code& caller_code = Code::Handle(zone, frame->LookupDartCode());
3345 ASSERT(!caller_code.IsNull());
3346 const Code& stub = Code::Handle(
3347 CodePatcher::GetStaticCallTargetAt(frame->pc(), caller_code));
3348 Class& alloc_class = Class::ZoneHandle(zone);
3349 alloc_class ^= stub.owner();
3350 Code& alloc_stub = Code::Handle(zone, alloc_class.allocation_stub());
3351 if (alloc_stub.IsNull()) {
3352 alloc_stub = StubCode::GetAllocationStubForClass(alloc_class);
3353 ASSERT(!alloc_stub.IsDisabled());
3354 }
3355 CodePatcher::PatchStaticCallAt(frame->pc(), caller_code, alloc_stub);
3356 caller_code.SetStubCallTargetCodeAt(frame->pc(), alloc_stub);
3357 if (FLAG_trace_patching) {
3358 OS::PrintErr("FixAllocationStubTarget: caller %#" Px
3359 " alloc-class %s "
3360 " -> %#" Px "\n",
3361 frame->pc(), alloc_class.ToCString(), alloc_stub.EntryPoint());
3362 }
3363 arguments.SetReturn(alloc_stub);
3364#else
3365 UNREACHABLE();
3366#endif
3367}
3368
3370 switch (deopt_reason) {
3371#define DEOPT_REASON_TO_TEXT(name) \
3372 case ICData::kDeopt##name: \
3373 return #name;
3375#undef DEOPT_REASON_TO_TEXT
3376 default:
3377 UNREACHABLE();
3378 return "";
3379 }
3380}
3381
3382static bool IsSuspendedFrame(Zone* zone,
3383 const Function& function,
3384 StackFrame* frame) {
3385 if (!function.IsSuspendableFunction()) {
3386 return false;
3387 }
3388 auto& suspend_state = Object::Handle(
3389 zone, *reinterpret_cast<ObjectPtr*>(LocalVarAddress(
3392 return suspend_state.IsSuspendState() &&
3393 (SuspendState::Cast(suspend_state).pc() != 0);
3394}
3395
3396void DeoptimizeAt(Thread* mutator_thread,
3397 const Code& optimized_code,
3398 StackFrame* frame) {
3399 ASSERT(optimized_code.is_optimized());
3400
3401 // Force-optimized code is optimized code which cannot deoptimize and doesn't
3402 // have unoptimized code to fall back to.
3403 ASSERT(!optimized_code.is_force_optimized());
3404
3405 Thread* thread = Thread::Current();
3406 Zone* zone = thread->zone();
3407 const Function& function = Function::Handle(zone, optimized_code.function());
3408 const Error& error =
3410 if (!error.IsNull()) {
3412 }
3413 const Code& unoptimized_code =
3414 Code::Handle(zone, function.unoptimized_code());
3415 ASSERT(!unoptimized_code.IsNull());
3416 // The switch to unoptimized code may have already occurred.
3417 if (function.HasOptimizedCode()) {
3418 function.SwitchToUnoptimizedCode();
3419 }
3420
3421 if (IsSuspendedFrame(zone, function, frame)) {
3422 // Frame is suspended and going to be removed from the stack.
3423 if (FLAG_trace_deoptimization) {
3424 THR_Print("Not deoptimizing suspended frame, fp=%" Pp "\n", frame->fp());
3425 }
3426 } else if (frame->IsMarkedForLazyDeopt()) {
3427 // Deopt already scheduled.
3428 if (FLAG_trace_deoptimization) {
3429 THR_Print("Lazy deopt already scheduled for fp=%" Pp "\n", frame->fp());
3430 }
3431 } else {
3432 uword deopt_pc = frame->pc();
3433 ASSERT(optimized_code.ContainsInstructionAt(deopt_pc));
3434
3435#if defined(DEBUG)
3436 ValidateFrames();
3437#endif
3438
3439 // N.B.: Update the pending deopt table before updating the frame. The
3440 // profiler may attempt a stack walk in between.
3441 mutator_thread->pending_deopts().AddPendingDeopt(frame->fp(), deopt_pc);
3442 frame->MarkForLazyDeopt();
3443
3444 if (FLAG_trace_deoptimization) {
3445 THR_Print("Lazy deopt scheduled for fp=%" Pp ", pc=%" Pp "\n",
3446 frame->fp(), deopt_pc);
3447 }
3448 }
3449
3450 // Mark code as dead (do not GC its embedded objects).
3451 optimized_code.set_is_alive(false);
3452}
3453
3454// Currently checks only that all optimized frames have kDeoptIndex
3455// and unoptimized code has the kDeoptAfter.
3457 auto thread = Thread::Current();
3458 // Have to grab program_lock before stopping everybody else.
3459 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
3460
3461 auto isolate_group = thread->isolate_group();
3462 isolate_group->RunWithStoppedMutators([&]() {
3463 Code& optimized_code = Code::Handle();
3464 isolate_group->ForEachIsolate(
3465 [&](Isolate* isolate) {
3466 auto mutator_thread = isolate->mutator_thread();
3467 if (mutator_thread == nullptr) {
3468 return;
3469 }
3470 DartFrameIterator iterator(
3472 StackFrame* frame = iterator.NextFrame();
3473 while (frame != nullptr) {
3474 optimized_code = frame->LookupDartCode();
3475 if (optimized_code.is_optimized() &&
3476 !optimized_code.is_force_optimized()) {
3477 DeoptimizeAt(mutator_thread, optimized_code, frame);
3478 }
3479 frame = iterator.NextFrame();
3480 }
3481 },
3482 /*at_safepoint=*/true);
3483 });
3484}
3485
3487 auto thread = Thread::Current();
3488 // Have to grab program_lock before stopping everybody else.
3489 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
3490
3491 auto isolate = thread->isolate();
3492 auto isolate_group = thread->isolate_group();
3493 isolate_group->RunWithStoppedMutators([&]() {
3494 auto mutator_thread = isolate->mutator_thread();
3495 if (mutator_thread == nullptr) {
3496 return;
3497 }
3498 DartFrameIterator iterator(mutator_thread,
3500 StackFrame* frame = iterator.NextFrame();
3501 if (frame != nullptr) {
3502 const auto& optimized_code = Code::Handle(frame->LookupDartCode());
3503 if (optimized_code.is_optimized() &&
3504 !optimized_code.is_force_optimized()) {
3505 DeoptimizeAt(mutator_thread, optimized_code, frame);
3506 }
3507 }
3508 });
3509}
3510
3511#if !defined(DART_PRECOMPILED_RUNTIME)
3514
3515static void CopySavedRegisters(uword saved_registers_address,
3516 fpu_register_t** fpu_registers,
3517 intptr_t** cpu_registers) {
3518 // Tell MemorySanitizer this region is initialized by generated code. This
3519 // region isn't already (fully) unpoisoned by FrameSetIterator::Unpoison
3520 // because it is in an exit frame and stack frame iteration doesn't have
3521 // access to true SP for exit frames.
3522 MSAN_UNPOISON(reinterpret_cast<void*>(saved_registers_address),
3525
3527 fpu_register_t* fpu_registers_copy =
3529 ASSERT(fpu_registers_copy != nullptr);
3530 for (intptr_t i = 0; i < kNumberOfSavedFpuRegisters; i++) {
3531 fpu_registers_copy[i] =
3532 *reinterpret_cast<fpu_register_t*>(saved_registers_address);
3533 saved_registers_address += kFpuRegisterSize;
3534 }
3535 *fpu_registers = fpu_registers_copy;
3536
3537 ASSERT(sizeof(intptr_t) == kWordSize);
3538 intptr_t* cpu_registers_copy = new intptr_t[kNumberOfSavedCpuRegisters];
3539 ASSERT(cpu_registers_copy != nullptr);
3540 for (intptr_t i = 0; i < kNumberOfSavedCpuRegisters; i++) {
3541 cpu_registers_copy[i] =
3542 *reinterpret_cast<intptr_t*>(saved_registers_address);
3543 saved_registers_address += kWordSize;
3544 }
3545 *cpu_registers = cpu_registers_copy;
3546}
3547#endif
3548
3549DEFINE_LEAF_RUNTIME_ENTRY(bool, TryDoubleAsInteger, 1, Thread* thread) {
3550 double value = thread->unboxed_double_runtime_arg();
3551 int64_t int_value = static_cast<int64_t>(value);
3552 double converted_double = static_cast<double>(int_value);
3553 if (converted_double != value) {
3554 return false;
3555 }
3556 thread->set_unboxed_int64_runtime_arg(int_value);
3557 return true;
3558}
3560
3561// Copies saved registers and caller's frame into temporary buffers.
3562// Returns the stack size of unoptimized frame.
3563// The calling code must be optimized, but its function may not have
3564// have optimized code if the code is OSR code, or if the code was invalidated
3565// through class loading/finalization or field guard.
3567 DeoptimizeCopyFrame,
3568 2,
3569 uword saved_registers_address,
3570 uword is_lazy_deopt) {
3571#if !defined(DART_PRECOMPILED_RUNTIME)
3572 Thread* thread = Thread::Current();
3573 Isolate* isolate = thread->isolate();
3574 StackZone zone(thread);
3575
3576 // All registers have been saved below last-fp as if they were locals.
3577 const uword last_fp =
3578 saved_registers_address + (kNumberOfSavedCpuRegisters * kWordSize) +
3581
3582 // Get optimized code and frame that need to be deoptimized.
3583 DartFrameIterator iterator(last_fp, thread,
3585
3586 StackFrame* caller_frame = iterator.NextFrame();
3587 ASSERT(caller_frame != nullptr);
3588 const Code& optimized_code = Code::Handle(caller_frame->LookupDartCode());
3589 ASSERT(optimized_code.is_optimized());
3590 const Function& top_function =
3591 Function::Handle(thread->zone(), optimized_code.function());
3592 const bool deoptimizing_code = top_function.HasOptimizedCode();
3593 if (FLAG_trace_deoptimization) {
3594 const Function& function = Function::Handle(optimized_code.function());
3595 THR_Print("== Deoptimizing code for '%s', %s, %s\n",
3596 function.ToFullyQualifiedCString(),
3597 deoptimizing_code ? "code & frame" : "frame",
3598 (is_lazy_deopt != 0u) ? "lazy-deopt" : "");
3599 }
3600
3601 if (is_lazy_deopt != 0u) {
3602 const uword deopt_pc =
3603 thread->pending_deopts().FindPendingDeopt(caller_frame->fp());
3604
3605 // N.B.: Update frame before updating pending deopt table. The profiler
3606 // may attempt a stack walk in between.
3607 caller_frame->set_pc(deopt_pc);
3608 ASSERT(caller_frame->pc() == deopt_pc);
3609 ASSERT(optimized_code.ContainsInstructionAt(caller_frame->pc()));
3611 caller_frame->fp(), PendingDeopts::kClearDueToDeopt);
3612 } else {
3613 if (FLAG_trace_deoptimization) {
3614 THR_Print("Eager deopt fp=%" Pp " pc=%" Pp "\n", caller_frame->fp(),
3615 caller_frame->pc());
3616 }
3617 }
3618
3619 // Copy the saved registers from the stack.
3620 fpu_register_t* fpu_registers;
3621 intptr_t* cpu_registers;
3622 CopySavedRegisters(saved_registers_address, &fpu_registers, &cpu_registers);
3623
3624 // Create the DeoptContext.
3625 DeoptContext* deopt_context = new DeoptContext(
3626 caller_frame, optimized_code, DeoptContext::kDestIsOriginalFrame,
3627 fpu_registers, cpu_registers, is_lazy_deopt != 0, deoptimizing_code);
3628 isolate->set_deopt_context(deopt_context);
3629
3630 // Stack size (FP - SP) in bytes.
3631 return deopt_context->DestStackAdjustment() * kWordSize;
3632#else
3633 UNREACHABLE();
3634 return 0;
3635#endif // !DART_PRECOMPILED_RUNTIME
3636}
3638
3639// The stack has been adjusted to fit all values for unoptimized frame.
3640// Fill the unoptimized frame.
3641DEFINE_LEAF_RUNTIME_ENTRY(void, DeoptimizeFillFrame, 1, uword last_fp) {
3642#if !defined(DART_PRECOMPILED_RUNTIME)
3643 Thread* thread = Thread::Current();
3644 Isolate* isolate = thread->isolate();
3645 StackZone zone(thread);
3646
3647 DeoptContext* deopt_context = isolate->deopt_context();
3648 DartFrameIterator iterator(last_fp, thread,
3650 StackFrame* caller_frame = iterator.NextFrame();
3651 ASSERT(caller_frame != nullptr);
3652
3653#if defined(DEBUG)
3654 {
3655 // The code from the deopt_context.
3656 const Code& code = Code::Handle(deopt_context->code());
3657
3658 // The code from our frame.
3659 const Code& optimized_code = Code::Handle(caller_frame->LookupDartCode());
3660 const Function& function = Function::Handle(optimized_code.function());
3661 ASSERT(!function.IsNull());
3662
3663 // The code will be the same as before.
3664 ASSERT(code.ptr() == optimized_code.ptr());
3665
3666 // Some sanity checking of the optimized code.
3667 ASSERT(!optimized_code.IsNull() && optimized_code.is_optimized());
3668 }
3669#endif
3670
3671 deopt_context->set_dest_frame(caller_frame);
3672 deopt_context->FillDestFrame();
3673
3674#else
3675 UNREACHABLE();
3676#endif // !DART_PRECOMPILED_RUNTIME
3677}
3679
3680// This is the last step in the deoptimization, GC can occur.
3681// Returns number of bytes to remove from the expression stack of the
3682// bottom-most deoptimized frame. Those arguments were artificially injected
3683// under return address to keep them discoverable by GC that can occur during
3684// materialization phase.
3685DEFINE_RUNTIME_ENTRY(DeoptimizeMaterialize, 0) {
3686#if !defined(DART_PRECOMPILED_RUNTIME)
3687#if defined(DEBUG)
3688 {
3689 // We may rendezvous for a safepoint at entry or GC from the allocations
3690 // below. Check the stack is walkable.
3691 ValidateFrames();
3692 }
3693#endif
3694 DeoptContext* deopt_context = isolate->deopt_context();
3695 intptr_t deopt_arg_count = deopt_context->MaterializeDeferredObjects();
3696 isolate->set_deopt_context(nullptr);
3697 delete deopt_context;
3698
3699 // Return value tells deoptimization stub to remove the given number of bytes
3700 // from the stack.
3701 arguments.SetReturn(Smi::Handle(Smi::New(deopt_arg_count * kWordSize)));
3702#else
3703 UNREACHABLE();
3704#endif // !DART_PRECOMPILED_RUNTIME
3705}
3706
3707DEFINE_RUNTIME_ENTRY(RewindPostDeopt, 0) {
3708#if !defined(DART_PRECOMPILED_RUNTIME)
3709#if !defined(PRODUCT)
3710 isolate->debugger()->RewindPostDeopt();
3711#endif // !PRODUCT
3712#endif // !DART_PRECOMPILED_RUNTIME
3713 UNREACHABLE();
3714}
3715
3716// Handle slow path actions for the resumed frame after it was
3717// copied back to the stack:
3718// 1) deoptimization;
3719// 2) breakpoint at resumption;
3720// 3) throwing an exception.
3721//
3722// Arg0: exception
3723// Arg1: stack trace
3724DEFINE_RUNTIME_ENTRY(ResumeFrame, 2) {
3725 const Instance& exception = Instance::CheckedHandle(zone, arguments.ArgAt(0));
3726 const Instance& stacktrace =
3727 Instance::CheckedHandle(zone, arguments.ArgAt(1));
3728
3729#if !defined(DART_PRECOMPILED_RUNTIME)
3730#if !defined(PRODUCT)
3731 if (isolate->has_resumption_breakpoints()) {
3732 isolate->debugger()->ResumptionBreakpoint();
3733 }
3734#endif
3735
3736 DartFrameIterator iterator(thread,
3738 StackFrame* frame = iterator.NextFrame();
3739 ASSERT(frame->IsDartFrame());
3740 ASSERT(Function::Handle(zone, frame->LookupDartFunction())
3741 .IsSuspendableFunction());
3742 const Code& caller_code = Code::Handle(zone, frame->LookupDartCode());
3743 if (caller_code.IsDisabled() && caller_code.is_optimized() &&
3744 !caller_code.is_force_optimized()) {
3745 const uword deopt_pc = frame->pc();
3746 thread->pending_deopts().AddPendingDeopt(frame->fp(), deopt_pc);
3747 frame->MarkForLazyDeopt();
3748
3749 if (FLAG_trace_deoptimization) {
3750 THR_Print("Lazy deopt scheduled for resumed frame fp=%" Pp ", pc=%" Pp
3751 "\n",
3752 frame->fp(), deopt_pc);
3753 }
3754 }
3755#endif
3756
3757 if (!exception.IsNull()) {
3758 Exceptions::ReThrow(thread, exception, stacktrace);
3759 }
3760}
3761
3763 const char* runtime_call_name,
3764 bool can_lazy_deopt) {
3765 ASSERT(FLAG_deoptimize_on_runtime_call_every > 0);
3766 if (FLAG_precompiled_mode) {
3767 return;
3768 }
3770 return;
3771 }
3772 const bool is_deopt_related =
3773 strstr(runtime_call_name, "Deoptimize") != nullptr;
3774 if (is_deopt_related) {
3775 return;
3776 }
3777 // For --deoptimize-on-every-runtime-call we only consider runtime calls that
3778 // can lazy-deopt.
3779 if (can_lazy_deopt) {
3780 if (FLAG_deoptimize_on_runtime_call_name_filter != nullptr &&
3781 (strlen(runtime_call_name) !=
3782 strlen(FLAG_deoptimize_on_runtime_call_name_filter) ||
3783 strstr(runtime_call_name,
3784 FLAG_deoptimize_on_runtime_call_name_filter) == nullptr)) {
3785 return;
3786 }
3787 const uint32_t count = thread->IncrementAndGetRuntimeCallCount();
3788 if ((count % FLAG_deoptimize_on_runtime_call_every) == 0) {
3790 }
3791 }
3792}
3793
3794double DartModulo(double left, double right) {
3795 double remainder = fmod_ieee(left, right);
3796 if (remainder == 0.0) {
3797 // We explicitly switch to the positive 0.0 (just in case it was negative).
3798 remainder = +0.0;
3799 } else if (remainder < 0.0) {
3800 if (right < 0) {
3801 remainder -= right;
3802 } else {
3803 remainder += right;
3804 }
3805 }
3806 return remainder;
3807}
3808
3809// Update global type feedback recorded for a field recording the assignment
3810// of the given value.
3811// Arg0: Field object;
3812// Arg1: Value that is being stored.
3813DEFINE_RUNTIME_ENTRY(UpdateFieldCid, 2) {
3814#if !defined(DART_PRECOMPILED_RUNTIME)
3815 const Field& field = Field::CheckedHandle(zone, arguments.ArgAt(0));
3816 const Object& value = Object::Handle(arguments.ArgAt(1));
3817 field.RecordStore(value);
3818#else
3819 UNREACHABLE();
3820#endif
3821}
3822
3823DEFINE_RUNTIME_ENTRY(InitInstanceField, 2) {
3824 const Instance& instance = Instance::CheckedHandle(zone, arguments.ArgAt(0));
3825 const Field& field = Field::CheckedHandle(zone, arguments.ArgAt(1));
3828 result = instance.GetField(field);
3829 ASSERT((result.ptr() != Object::sentinel().ptr()) &&
3830 (result.ptr() != Object::transition_sentinel().ptr()));
3831 arguments.SetReturn(result);
3832}
3833
3834DEFINE_RUNTIME_ENTRY(InitStaticField, 1) {
3835 const Field& field = Field::CheckedHandle(zone, arguments.ArgAt(0));
3836 Object& result = Object::Handle(zone, field.InitializeStatic());
3838 result = field.StaticValue();
3839 ASSERT((result.ptr() != Object::sentinel().ptr()) &&
3840 (result.ptr() != Object::transition_sentinel().ptr()));
3841 arguments.SetReturn(result);
3842}
3843
3844DEFINE_RUNTIME_ENTRY(LateFieldAssignedDuringInitializationError, 1) {
3845 const Field& field = Field::CheckedHandle(zone, arguments.ArgAt(0));
3847 String::Handle(field.name()));
3848}
3849
3850DEFINE_RUNTIME_ENTRY(LateFieldNotInitializedError, 1) {
3851 const Field& field = Field::CheckedHandle(zone, arguments.ArgAt(0));
3853}
3854
3856 // We could just use a trap instruction in the stub, but we get better stack
3857 // traces when there is an exit frame.
3858 FATAL("Not loaded");
3859}
3860
3861DEFINE_RUNTIME_ENTRY(FfiAsyncCallbackSend, 1) {
3863 TRACE_RUNTIME_CALL("FfiAsyncCallbackSend %p", (void*)target_port);
3864 const Object& message = Object::Handle(zone, arguments.ArgAt(0));
3865 const Array& msg_array = Array::Handle(zone, Array::New(3));
3866 msg_array.SetAt(0, message);
3867 PersistentHandle* handle =
3868 isolate->group()->api_state()->AllocatePersistentHandle();
3869 handle->set_ptr(msg_array);
3871 Message::New(target_port, handle, Message::kNormalPriority));
3872}
3873
3874// Use expected function signatures to help MSVC compiler resolve overloading.
3875typedef double (*UnaryMathCFunction)(double x);
3876typedef double (*BinaryMathCFunction)(double x, double y);
3877typedef void* (*MemMoveCFunction)(void* dest, const void* src, size_t n);
3878
3880 /*argument_count=*/2,
3881 /*is_float=*/true,
3882 static_cast<BinaryMathCFunction>(pow));
3883
3885 /*argument_count=*/2,
3886 /*is_float=*/true,
3887 static_cast<BinaryMathCFunction>(DartModulo));
3888
3890 2,
3891 /*is_float=*/true,
3892 static_cast<BinaryMathCFunction>(atan2_ieee));
3893
3895 /*argument_count=*/1,
3896 /*is_float=*/true,
3897 static_cast<UnaryMathCFunction>(floor));
3898
3900 /*argument_count=*/1,
3901 /*is_float=*/true,
3902 static_cast<UnaryMathCFunction>(ceil));
3903
3905 /*argument_count=*/1,
3906 /*is_float=*/true,
3907 static_cast<UnaryMathCFunction>(trunc));
3908
3910 /*argument_count=*/1,
3911 /*is_float=*/true,
3912 static_cast<UnaryMathCFunction>(round));
3913
3915 /*argument_count=*/1,
3916 /*is_float=*/true,
3917 static_cast<UnaryMathCFunction>(cos));
3918
3920 /*argument_count=*/1,
3921 /*is_float=*/true,
3922 static_cast<UnaryMathCFunction>(sin));
3923
3925 /*argument_count=*/1,
3926 /*is_float=*/true,
3927 static_cast<UnaryMathCFunction>(asin));
3928
3930 /*argument_count=*/1,
3931 /*is_float=*/true,
3932 static_cast<UnaryMathCFunction>(acos));
3933
3935 /*argument_count=*/1,
3936 /*is_float=*/true,
3937 static_cast<UnaryMathCFunction>(tan));
3938
3940 /*argument_count=*/1,
3941 /*is_float=*/true,
3942 static_cast<UnaryMathCFunction>(atan));
3943
3945 /*argument_count=*/1,
3946 /*is_float=*/true,
3947 static_cast<UnaryMathCFunction>(exp));
3948
3950 /*argument_count=*/1,
3951 /*is_float=*/true,
3952 static_cast<UnaryMathCFunction>(log));
3953
3955 /*argument_count=*/3,
3956 /*is_float=*/false,
3957 static_cast<MemMoveCFunction>(memmove));
3958
3959extern "C" void DFLRT_EnterSafepoint(NativeArguments __unusable_) {
3961 TRACE_RUNTIME_CALL("%s", "EnterSafepoint");
3962 Thread* thread = Thread::Current();
3963 ASSERT(thread->top_exit_frame_info() != 0);
3965 thread->EnterSafepoint();
3966 TRACE_RUNTIME_CALL("%s", "EnterSafepoint done");
3967}
3969 /*argument_count=*/0,
3970 /*is_float=*/false,
3972
3973extern "C" void DFLRT_ExitSafepoint(NativeArguments __unusable_) {
3975 TRACE_RUNTIME_CALL("%s", "ExitSafepoint");
3976 Thread* thread = Thread::Current();
3977 ASSERT(thread->top_exit_frame_info() != 0);
3978
3980 if (thread->is_unwind_in_progress()) {
3981 // Clean up safepoint unwind error marker to prevent safepoint tripping.
3982 // The safepoint marker will get restored just before jumping back
3983 // to generated code.
3984 thread->SetUnwindErrorInProgress(false);
3985 NoSafepointScope no_safepoint;
3986 Error unwind_error;
3987 unwind_error ^=
3988 thread->isolate()->isolate_object_store()->preallocated_unwind_error();
3989 Exceptions::PropagateError(unwind_error);
3990 }
3991 thread->ExitSafepoint();
3992
3993 TRACE_RUNTIME_CALL("%s", "ExitSafepoint done");
3994}
3996 /*argument_count=*/0,
3997 /*is_float=*/false,
3999
4000// This is expected to be invoked when jumping to destination frame,
4001// during exception handling.
4003 NativeArguments __unusable_) {
4005 TRACE_RUNTIME_CALL("%s", "ExitSafepointIgnoreUnwindInProgress");
4006 Thread* thread = Thread::Current();
4007 ASSERT(thread->top_exit_frame_info() != 0);
4008
4010
4011 // Compared to ExitSafepoint above we are going to ignore
4012 // is_unwind_in_progress flag because this is called as part of JumpToFrame
4013 // exception handler - we want this transition to complete so that the next
4014 // safepoint check does error propagation.
4015 thread->ExitSafepoint();
4016
4017 TRACE_RUNTIME_CALL("%s", "ExitSafepointIgnoreUnwindInProgress done");
4018}
4019DEFINE_RAW_LEAF_RUNTIME_ENTRY(ExitSafepointIgnoreUnwindInProgress,
4020 /*argument_count=*/0,
4021 /*is_float*/ false,
4023
4024// This is called by a native callback trampoline
4025// (see StubCodeCompiler::GenerateFfiCallbackTrampolineStub). Not registered as
4026// a runtime entry because we can't use Thread to look it up.
4029 uword* out_entry_point,
4030 uword* out_trampoline_type) {
4032 TRACE_RUNTIME_CALL("GetFfiCallbackMetadata %p",
4033 reinterpret_cast<void*>(trampoline));
4034 ASSERT(out_entry_point != nullptr);
4035 ASSERT(out_trampoline_type != nullptr);
4036
4037 Thread* const current_thread = Thread::Current();
4038 auto* fcm = FfiCallbackMetadata::Instance();
4039 auto metadata = fcm->LookupMetadataForTrampoline(trampoline);
4040
4041 // Is this an async callback?
4042 if (metadata.trampoline_type() ==
4044 // It's possible that the callback was deleted, or the target isolate was
4045 // shut down, in between looking up the metadata above, and this point. So
4046 // grab the lock and then check that the callback is still alive.
4047 MutexLocker locker(fcm->lock());
4048 auto metadata2 = fcm->LookupMetadataForTrampoline(trampoline);
4049 *out_trampoline_type = static_cast<uword>(metadata2.trampoline_type());
4050
4051 // Check IsLive, but also check that the metdata hasn't changed. This is
4052 // for the edge case that the callback was destroyed and recycled in between
4053 // the two lookups.
4054 if (!metadata.IsLive() || !metadata.IsSameCallback(metadata2)) {
4055 TRACE_RUNTIME_CALL("GetFfiCallbackMetadata callback deleted %p",
4056 reinterpret_cast<void*>(trampoline));
4057 return nullptr;
4058 }
4059
4060 *out_entry_point = metadata.target_entry_point();
4061 Isolate* target_isolate = metadata.target_isolate();
4062
4063 Isolate* current_isolate = nullptr;
4064 if (current_thread != nullptr) {
4065 current_isolate = current_thread->isolate();
4066 ASSERT(current_thread->execution_state() == Thread::kThreadInNative);
4067 current_thread->ExitSafepoint();
4069 }
4070
4071 // Enter the temporary isolate. If the current isolate is in the same group
4072 // as the target isolate, we can skip entering the temp isolate, and marshal
4073 // the args on the current isolate.
4074 if (current_isolate == nullptr ||
4075 current_isolate->group() != target_isolate->group()) {
4076 if (current_isolate != nullptr) {
4077 Thread::ExitIsolate(/*isolate_shutdown=*/false);
4078 }
4079 target_isolate->group()->EnterTemporaryIsolate();
4080 }
4081 Thread* const temp_thread = Thread::Current();
4082 ASSERT(temp_thread != nullptr);
4083 temp_thread->set_unboxed_int64_runtime_arg(metadata.send_port());
4085 reinterpret_cast<intptr_t>(current_isolate));
4086 ASSERT(!temp_thread->IsAtSafepoint());
4087 return temp_thread;
4088 }
4089
4090 // Otherwise, this is a sync callback, so verify that we're already entered
4091 // into the target isolate.
4092 if (!metadata.IsLive()) {
4093 FATAL("Callback invoked after it has been deleted.");
4094 }
4095 Isolate* target_isolate = metadata.target_isolate();
4096 *out_entry_point = metadata.target_entry_point();
4097 *out_trampoline_type = static_cast<uword>(metadata.trampoline_type());
4098 if (current_thread == nullptr) {
4099 FATAL("Cannot invoke native callback outside an isolate.");
4100 }
4101 if (current_thread->no_callback_scope_depth() != 0) {
4102 FATAL("Cannot invoke native callback when API callbacks are prohibited.");
4103 }
4104 if (current_thread->is_unwind_in_progress()) {
4105 FATAL("Cannot invoke native callback while unwind error propagates.");
4106 }
4107 if (!current_thread->IsDartMutatorThread()) {
4108 FATAL("Native callbacks must be invoked on the mutator thread.");
4109 }
4110 if (current_thread->isolate() != target_isolate) {
4111 FATAL("Cannot invoke native callback from a different isolate.");
4112 }
4113
4114 // Set the execution state to VM while waiting for the safepoint to end.
4115 // This isn't strictly necessary but enables tests to check that we're not
4116 // in native code anymore. See tests/ffi/function_gc_test.dart for example.
4118
4119 current_thread->ExitSafepoint();
4120
4121 current_thread->set_unboxed_int64_runtime_arg(metadata.context());
4122
4123 TRACE_RUNTIME_CALL("GetFfiCallbackMetadata thread %p", current_thread);
4124 TRACE_RUNTIME_CALL("GetFfiCallbackMetadata entry_point %p",
4125 (void*)*out_entry_point);
4126 TRACE_RUNTIME_CALL("GetFfiCallbackMetadata trampoline_type %p",
4127 (void*)*out_trampoline_type);
4128 return current_thread;
4129}
4130
4131extern "C" void DLRT_ExitTemporaryIsolate() {
4132 TRACE_RUNTIME_CALL("ExitTemporaryIsolate%s", "");
4133 Thread* thread = Thread::Current();
4134 ASSERT(thread != nullptr);
4135 Isolate* source_isolate =
4136 reinterpret_cast<Isolate*>(thread->unboxed_int64_runtime_second_arg());
4137
4138 // We're either inside a temp isolate, or inside the source_isolate.
4139 const bool inside_temp_isolate =
4140 source_isolate == nullptr || source_isolate != thread->isolate();
4141 if (inside_temp_isolate) {
4143 if (source_isolate != nullptr) {
4144 TRACE_RUNTIME_CALL("ExitTemporaryIsolate re-entering source isolate %p",
4145 source_isolate);
4146 Thread::EnterIsolate(source_isolate);
4148 }
4149 } else {
4150 thread->EnterSafepoint();
4151 }
4152 TRACE_RUNTIME_CALL("ExitTemporaryIsolate %s", "done");
4153}
4154
4157 TRACE_RUNTIME_CALL("EnterHandleScope %p", thread);
4158 thread->EnterApiScope();
4159 ApiLocalScope* return_value = thread->api_top_scope();
4160 TRACE_RUNTIME_CALL("EnterHandleScope returning %p", return_value);
4161 return return_value;
4162}
4164 /*argument_count=*/1,
4165 /*is_float=*/false,
4167
4168extern "C" void DLRT_ExitHandleScope(Thread* thread) {
4170 TRACE_RUNTIME_CALL("ExitHandleScope %p", thread);
4171 thread->ExitApiScope();
4172 TRACE_RUNTIME_CALL("ExitHandleScope %s", "done");
4173}
4175 /*argument_count=*/1,
4176 /*is_float=*/false,
4178
4181 TRACE_RUNTIME_CALL("AllocateHandle %p", scope);
4182 LocalHandle* return_value = scope->local_handles()->AllocateHandle();
4183 // Don't return an uninitialised handle.
4184 return_value->set_ptr(Object::sentinel().ptr());
4185 TRACE_RUNTIME_CALL("AllocateHandle returning %p", return_value);
4186 return return_value;
4187}
4188
4190 /*argument_count=*/1,
4191 /*is_float=*/false,
4193
4194// Enables reusing `Dart_PropagateError` from `FfiCallInstr`.
4195// `Dart_PropagateError` requires the native state and transitions into the VM.
4196// So the flow is:
4197// - FfiCallInstr (slow path)
4198// - TransitionGeneratedToNative
4199// - DLRT_PropagateError (this)
4200// - Dart_PropagateError
4201// - TransitionNativeToVM
4202// - Throw
4203extern "C" void DLRT_PropagateError(Dart_Handle handle) {
4205 TRACE_RUNTIME_CALL("PropagateError %p", handle);
4206 ASSERT(Thread::Current()->execution_state() == Thread::kThreadInNative);
4207 ASSERT(Dart_IsError(handle));
4208 Dart_PropagateError(handle);
4209 // We should never exit through normal control flow.
4210 UNREACHABLE();
4211}
4212
4213// Not a leaf-function, throws error.
4215 /*argument_count=*/1,
4216 /*is_float=*/false,
4218
4219#if !defined(USING_MEMORY_SANITIZER)
4220extern "C" void __msan_unpoison(const volatile void*, size_t) {
4221 UNREACHABLE();
4222}
4223extern "C" void __msan_unpoison_param(size_t) {
4224 UNREACHABLE();
4225}
4226#endif
4227
4228#if !defined(USING_THREAD_SANITIZER)
4229extern "C" void __tsan_acquire(void* addr) {
4230 UNREACHABLE();
4231}
4232extern "C" void __tsan_release(void* addr) {
4233 UNREACHABLE();
4234}
4235#endif
4236
4237// These runtime entries are defined even when not using MSAN / TSAN to keep
4238// offsets on Thread consistent.
4239
4241 /*argument_count=*/2,
4242 /*is_float=*/false,
4244
4246 /*argument_count=*/1,
4247 /*is_float=*/false,
4249
4251 /*argument_count=*/1,
4252 /*is_float=*/false,
4254
4256 /*argument_count=*/1,
4257 /*is_float=*/false,
4259
4260} // namespace dart
AutoreleasePool pool
static bool unused
int count
static void round(SkPoint *p)
static bool left(const SkPoint &p0, const SkPoint &p1)
static bool right(const SkPoint &p0, const SkPoint &p1)
SI void store(P *ptr, const T &val)
SI F table(const skcms_Curve *curve, F v)
#define UNREACHABLE()
Definition assert.h:248
#define RELEASE_ASSERT(cond)
Definition assert.h:327
virtual classid_t type_class_id() const
Definition object.cc:21074
bool IsTopTypeForSubtyping() const
Definition object.cc:21457
virtual bool IsInstantiated(Genericity genericity=kAny, intptr_t num_free_fun_type_params=kAllFree) const
Definition object.cc:21200
static bool InstantiateAndTestSubtype(AbstractType *subtype, AbstractType *supertype, const TypeArguments &instantiator_type_args, const TypeArguments &function_type_args)
Definition object.cc:4342
const char * NameCString() const
Definition object.cc:21385
bool IsDynamicType() const
Definition object.h:9166
const Function & function() const
Definition debugger.h:316
LocalHandles * local_handles()
const char * ToCString() const
static ArrayPtr NewBoxed(intptr_t type_args_len, intptr_t num_arguments, const Array &optional_arguments_names, Heap::Space space=Heap::kOld)
Definition dart_entry.h:83
intptr_t CountWithTypeArgs() const
Definition dart_entry.h:38
intptr_t FirstArgIndex() const
Definition dart_entry.h:37
static constexpr intptr_t kMaxElements
Definition object.h:10898
static ArrayPtr New(intptr_t len, Heap::Space space=Heap::kNew)
Definition object.h:10933
virtual void SetTypeArguments(const TypeArguments &value) const
Definition object.h:10882
static intptr_t LengthOf(const ArrayPtr array)
Definition object.h:10809
void SetAt(intptr_t index, const Object &value) const
Definition object.h:10858
void Add(const T &value)
intptr_t length() const
static const Bool & Get(bool value)
Definition object.h:10780
static const Bool & True()
Definition object.h:10776
StringPtr target_name() const
Definition object.h:2352
ArrayPtr arguments_descriptor() const
Definition object.h:2353
ClassPtr At(intptr_t cid) const
CodePtr allocation_stub() const
Definition object.h:1802
FunctionPtr GetInvocationDispatcher(const String &target_name, const Array &args_desc, UntaggedFunction::Kind kind, bool create_if_absent) const
Definition object.cc:3897
intptr_t id() const
Definition object.h:1235
intptr_t NumTypeArguments() const
Definition object.cc:3690
bool IsClosureClass() const
Definition object.h:1579
StringPtr Name() const
Definition object.cc:3038
ErrorPtr EnsureIsFinalized(Thread *thread) const
Definition object.cc:4979
ClassPtr SuperClass(ClassTable *class_table=nullptr) const
Definition object.cc:3715
bool is_allocate_finalized() const
Definition object.h:1734
static ClosurePtr New(const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments, const Function &function, const Object &context, Heap::Space space=Heap::kNew)
Definition object.cc:26021
static void PatchInstanceCallAt(uword return_address, const Code &caller_code, const Object &data, const Code &target)
static CodePtr GetStaticCallTargetAt(uword return_address, const Code &code)
static void PatchSwitchableCallAt(uword return_address, const Code &caller_code, const Object &data, const Code &target)
static uword GetSwitchableCallTargetEntryAt(uword return_address, const Code &caller_code)
static ObjectPtr GetSwitchableCallDataAt(uword return_address, const Code &caller_code)
static CodePtr GetInstanceCallAt(uword return_address, const Code &caller_code, Object *data)
static void PatchStaticCallAt(uword return_address, const Code &code, const Code &new_target)
intptr_t GetNullCheckNameIndexAt(int32_t pc_offset)
FunctionPtr function() const
Definition object.h:7101
uword EntryPoint() const
Definition object.h:6837
void SetStaticCallTargetCodeAt(uword pc, const Code &code) const
Definition object.cc:17865
bool is_optimized() const
Definition object.h:6790
bool is_force_optimized() const
Definition object.h:6798
void SetStubCallTargetCodeAt(uword pc, const Code &code) const
Definition object.cc:17879
bool ContainsInstructionAt(uword addr) const
Definition object.h:6888
bool IsDisabled() const
Definition object.h:7228
ObjectPtr owner() const
Definition object.h:7106
void set_is_alive(bool value) const
Definition object.cc:17708
ObjectPoolPtr GetObjectPool() const
Definition object.cc:17773
FunctionPtr GetStaticCallTargetFunctionAt(uword pc) const
Definition object.cc:17850
static bool CanOptimizeFunction(Thread *thread, const Function &function)
Definition compiler.cc:230
static constexpr intptr_t kNoOSRDeoptId
Definition compiler.h:73
static ErrorPtr EnsureUnoptimizedCode(Thread *thread, const Function &function)
Definition compiler.cc:855
static ObjectPtr CompileOptimizedFunction(Thread *thread, const Function &function, intptr_t osr_id=kNoOSRDeoptId)
Definition compiler.cc:887
static ContextPtr New(intptr_t num_variables, Heap::Space space=Heap::kNew)
Definition object.cc:18561
static intptr_t NumVariables(const ContextPtr context)
Definition object.h:7389
void set_parent(const Context &parent) const
Definition object.h:7378
void SetAt(intptr_t context_index, const Object &value) const
Definition object.h:13277
ObjectPtr At(intptr_t context_index) const
Definition object.h:7393
intptr_t num_variables() const
Definition object.h:7385
ContextPtr parent() const
Definition object.h:7377
static ObjectPtr InvokeNoSuchMethod(Thread *thread, const Instance &receiver, const String &target_name, const Array &arguments, const Array &arguments_descriptor)
static ObjectPtr InvokeClosure(Thread *thread, const Array &arguments)
static ObjectPtr InvokeFunction(const Function &function, const Array &arguments)
Definition dart_entry.cc:31
StackFrame * NextFrame()
static IsolateGroup * vm_isolate_group()
Definition dart.h:69
intptr_t Length() const
Definition debugger.h:463
ActivationFrame * FrameAt(int i) const
Definition debugger.h:465
static DebuggerStackTrace * CollectAsyncAwaiters()
Definition debugger.cc:1732
intptr_t MaterializeDeferredObjects()
void set_dest_frame(const StackFrame *frame)
intptr_t DestStackAdjustment() const
static constexpr intptr_t kNone
Definition deopt_id.h:27
static DoublePtr New(double d, Heap::Space space=Heap::kNew)
Definition object.cc:23481
static DART_NORETURN void ThrowByType(ExceptionType type, const Array &arguments)
static DART_NORETURN void ThrowOOM()
static DART_NORETURN void ThrowRangeError(const char *argument_name, const Integer &argument_value, intptr_t expected_from, intptr_t expected_to)
static DART_NORETURN void ThrowLateFieldAssignedDuringInitialization(const String &name)
static DART_NORETURN void Throw(Thread *thread, const Instance &exception)
static DART_NORETURN void ThrowArgumentError(const Instance &arg)
@ kIntegerDivisionByZeroException
Definition exceptions.h:60
static DART_NORETURN void ThrowLateFieldNotInitialized(const String &name)
static DART_NORETURN void ReThrow(Thread *thread, const Instance &exception, const Instance &stacktrace, bool bypass_debugger=false)
static void CreateAndThrowTypeError(TokenPosition location, const AbstractType &src_type, const AbstractType &dst_type, const String &dst_name)
static DART_NORETURN void PropagateError(const Error &error)
static FfiCallbackMetadata * Instance()
DART_WARN_UNUSED_RESULT ErrorPtr InitializeInstance(const Instance &instance) const
Definition object.cc:12389
static bool IsGetterName(const String &function_name)
Definition object.cc:11882
DART_WARN_UNUSED_RESULT ErrorPtr InitializeStatic() const
Definition object.cc:12427
static bool IsSetterName(const String &function_name)
Definition object.cc:11886
StringPtr name() const
Definition object.h:4408
ObjectPtr StaticValue() const
Definition object.h:13253
static StringPtr GetterName(const String &field_name)
Definition object.cc:11843
static StringPtr NameFromGetter(const String &getter_name)
Definition object.cc:11867
void RecordStore(const Object &value) const
Definition object.cc:13074
static Float32x4Ptr New(float value0, float value1, float value2, float value3, Heap::Space space=Heap::kNew)
Definition object.cc:25386
static Float64x2Ptr New(double value0, double value1, Heap::Space space=Heap::kNew)
Definition object.cc:25554
bool PrologueNeedsArgumentsDescriptor() const
Definition object.cc:11488
static bool IsDynamicInvocationForwarderName(const String &name)
Definition object.cc:4240
void EnsureHasCompiledUnoptimizedCode() const
Definition object.cc:8026
const char * ToFullyQualifiedCString() const
Definition object.cc:9820
ClosurePtr ImplicitInstanceClosure(const Instance &receiver) const
Definition object.cc:10780
static StringPtr DemangleDynamicInvocationForwarderName(const String &name)
Definition object.cc:4248
bool HasOptimizedCode() const
Definition object.cc:11032
static StringPtr CreateDynamicInvocationForwarderName(const String &name)
Definition object.cc:4255
bool IsMethodExtractor() const
Definition object.h:3264
CodePtr unoptimized_code() const
Definition object.h:3165
CodePtr EnsureHasCode() const
Definition object.cc:11396
bool IsDebugging(Thread *thread, const Function &function)
Definition debugger.cc:3465
@ kNew
Definition heap.h:38
@ kOld
Definition heap.h:39
Scavenger * new_space()
Definition heap.h:62
void CollectAllGarbage(GCReason reason=GCReason::kFull, bool compact=false)
Definition heap.cc:562
bool Contains(uword addr) const
Definition heap.cc:238
void EnsureHasCheck(const GrowableArray< intptr_t > &class_ids, const Function &target, intptr_t count=1) const
Definition object.cc:16822
static ICDataPtr ICDataOfEntriesArray(const Array &array)
Definition object.cc:17534
static ICDataPtr NewWithCheck(const Function &owner, const String &target_name, const Array &arguments_descriptor, intptr_t deopt_id, intptr_t num_args_tested, RebindRule rebind_rule, GrowableArray< intptr_t > *cids, const Function &target, const AbstractType &receiver_type=Object::null_abstract_type())
Definition object.cc:17407
intptr_t NumArgsTested() const
Definition object.cc:16518
ArrayPtr entries() const
Definition object.h:2763
RebindRule rebind_rule() const
Definition object.cc:16594
bool NumberOfChecksIs(intptr_t n) const
Definition object.cc:16629
FunctionPtr GetTargetAt(intptr_t index) const
Definition object.cc:17076
bool is_tracking_exactness() const
Definition object.h:2463
void set_is_megamorphic(bool value) const
Definition object.h:2535
void EnsureHasReceiverCheck(intptr_t receiver_class_id, const Function &target, intptr_t count=1, StaticTypeExactnessState exactness=StaticTypeExactnessState::NotTracking()) const
Definition object.cc:16923
intptr_t NumberOfChecks() const
Definition object.cc:16624
bool IsInstanceOf(const AbstractType &other, const TypeArguments &other_instantiator_type_arguments, const TypeArguments &other_function_type_arguments) const
Definition object.cc:20655
AbstractTypePtr GetType(Heap::Space space) const
Definition object.cc:20561
bool IsAssignableTo(const AbstractType &other, const TypeArguments &other_instantiator_type_arguments, const TypeArguments &other_function_type_arguments) const
Definition object.cc:20670
static InstancePtr NewAlreadyFinalized(const Class &cls, Heap::Space space=Heap::kNew)
Definition object.cc:20984
static Int32x4Ptr New(int32_t value0, int32_t value1, int32_t value2, int32_t value3, Heap::Space space=Heap::kNew)
Definition object.cc:25470
static IntegerPtr New(const String &str, Heap::Space space=Heap::kNew)
Definition object.cc:23063
static IntegerPtr NewFromUint64(uint64_t value, Heap::Space space=Heap::kNew)
Definition object.cc:23105
static int EncodeType(Level level, Kind kind)
GroupDebugger * debugger() const
Definition isolate.h:314
static bool IsSystemIsolateGroup(const IsolateGroup *group)
Definition isolate.cc:3559
Heap * heap() const
Definition isolate.h:295
ObjectStore * object_store() const
Definition isolate.h:505
static IsolateGroup * Current()
Definition isolate.h:534
ClassTable * class_table() const
Definition isolate.h:491
static void ExitTemporaryIsolate()
Definition isolate.cc:844
Mutex * patchable_call_mutex()
Definition isolate.h:514
Isolate * EnterTemporaryIsolate()
Definition isolate.cc:835
Mutex * subtype_test_cache_mutex()
Definition isolate.h:511
void set_deopt_context(DeoptContext *value)
Definition isolate.h:1209
static bool IsSystemIsolate(const Isolate *isolate)
Definition isolate.h:1398
IsolateObjectStore * isolate_object_store() const
Definition isolate.h:960
bool has_attempted_stepping() const
Definition isolate.h:1374
IsolateGroup * group() const
Definition isolate.h:990
DeoptContext * deopt_context() const
Definition isolate.h:1208
Thread * mutator_thread() const
Definition isolate.cc:1884
void set_ptr(ObjectPtr ptr)
LocalHandle * AllocateHandle()
static MegamorphicCachePtr Lookup(Thread *thread, const String &name, const Array &descriptor)
static std::unique_ptr< Message > New(Args &&... args)
Definition message.h:72
@ kNormalPriority
Definition message.h:28
static MonomorphicSmiableCallPtr New(classid_t expected_cid, const Code &target)
Definition object.cc:16397
void SetReturn(const Object &value) const
void SetArgAt(int index, const Object &value) const
static uword GetCurrentStackPointer()
Definition os_thread.cc:132
static void static void PrintErr(const char *format,...) PRINTF_ATTRIBUTE(1
UntaggedObject * untag() const
static ObjectPtr null()
Definition object.h:433
intptr_t GetClassId() const
Definition object.h:341
ObjectPtr ptr() const
Definition object.h:332
bool IsCanonical() const
Definition object.h:335
virtual const char * ToCString() const
Definition object.h:366
bool IsNull() const
Definition object.h:363
static Object & Handle()
Definition object.h:407
static ObjectPtr RawCast(ObjectPtr obj)
Definition object.h:325
static Object & ZoneHandle()
Definition object.h:419
ClassPtr clazz() const
Definition object.h:13192
PatchableCallHandler(Thread *thread, const GrowableArray< const Instance * > &caller_arguments, MissHandler miss_handler, NativeArguments arguments, StackFrame *caller_frame, const Code &caller_code, const Function &caller_function)
void ResolveSwitchAndReturn(const Object &data)
uword FindPendingDeopt(uword fp)
void ClearPendingDeoptsAtOrBelow(uword fp, ClearReason reason)
void AddPendingDeopt(uword fp, uword pc)
void set_ptr(ObjectPtr ref)
static bool PostMessage(std::unique_ptr< Message > message, bool before_events=false)
Definition port.cc:152
intptr_t num_fields() const
Definition object.h:11288
static RecordPtr New(RecordShape shape, Heap::Space space=Heap::kNew)
Definition object.cc:27823
void SetFieldAt(intptr_t field_index, const Object &value) const
Definition object.h:11410
intptr_t GetFieldIndexByName(Thread *thread, const String &field_name) const
Definition object.cc:27944
ObjectPtr FieldAt(intptr_t field_index) const
Definition object.h:11407
static FunctionPtr ResolveDynamicFunction(Zone *zone, const Class &receiver_class, const String &function_name)
Definition resolver.cc:189
static FunctionPtr ResolveDynamicAnyArgs(Zone *zone, const Class &receiver_class, const String &function_name, bool allow_add=true)
Definition resolver.cc:198
static FunctionPtr ResolveDynamicForReceiverClass(const Class &receiver_class, const String &function_name, const ArgumentsDescriptor &args_desc, bool allow_add=true)
Definition resolver.cc:160
intptr_t AbandonRemainingTLAB(Thread *thread)
int32_t get_sp() const
static Simulator * Current()
static SingleTargetCachePtr New()
Definition object.cc:16368
static SmiPtr New(intptr_t value)
Definition object.h:9985
intptr_t Value() const
Definition object.h:9969
friend class Class
Definition object.h:10026
uword pc() const
Definition stack_frame.h:43
virtual bool IsExitFrame() const
CodePtr LookupDartCode() const
virtual bool IsStubFrame() const
void set_pc(uword value)
Definition stack_frame.h:72
TokenPosition GetTokenPos() const
uword fp() const
Definition stack_frame.h:42
uword sp() const
Definition stack_frame.h:41
virtual bool IsDartFrame(bool validate=true) const
Definition stack_frame.h:97
FunctionPtr LookupDartFunction() const
static StaticTypeExactnessState NotExact()
static StaticTypeExactnessState NotTracking()
static StringPtr NewFormatted(const char *format,...) PRINTF_ATTRIBUTE(1
Definition object.cc:24083
static StringPtr New(const char *cstr, Heap::Space space=Heap::kNew)
Definition object.cc:23777
static const char * ToCString(Thread *thread, StringPtr ptr)
Definition object.cc:24205
static CodePtr GetAllocationStubForClass(const Class &cls)
Definition stub_code.cc:174
void WriteEntryToBuffer(Zone *zone, BaseTextBuffer *buffer, intptr_t index, const char *line_prefix=nullptr) const
Definition object.cc:19546
static SubtypeTestCachePtr New(intptr_t num_inputs)
Definition object.cc:18974
static constexpr intptr_t kMaxInputs
Definition object.h:7676
static constexpr intptr_t MaxEntriesForCacheAllocatedFor(intptr_t count)
Definition object.h:7814
static intptr_t UsedInputsForType(const AbstractType &type)
Definition object.cc:19718
intptr_t NumberOfChecks() const
Definition object.cc:19004
intptr_t AddCheck(const Object &instance_class_id_or_signature, const AbstractType &destination_type, const TypeArguments &instance_type_arguments, const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments, const TypeArguments &instance_parent_function_type_arguments, const TypeArguments &instance_delayed_type_arguments, const Bool &test_result) const
Definition object.cc:19028
bool HasCheck(const Object &instance_class_id_or_signature, const AbstractType &destination_type, const TypeArguments &instance_type_arguments, const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments, const TypeArguments &instance_parent_function_type_arguments, const TypeArguments &instance_delayed_type_arguments, intptr_t *index, Bool *result) const
Definition object.cc:19514
static SuspendStatePtr Clone(Thread *thread, const SuspendState &src, Heap::Space space=Heap::kNew)
Definition object.cc:26611
static constexpr intptr_t kSuspendStateVarIndex
Definition object.h:12591
static SuspendStatePtr New(intptr_t frame_size, const Instance &function_data, Heap::Space space=Heap::kNew)
Definition object.cc:26587
Zone * zone() const
void set_execution_state(ExecutionState state)
Definition thread.h:1035
ApiLocalScope * api_top_scope() const
Definition thread.h:512
void DeferredMarkingStackAddObject(ObjectPtr obj)
Definition thread.cc:833
int32_t no_callback_scope_depth() const
Definition thread.h:618
static Thread * Current()
Definition thread.h:361
PendingDeopts & pending_deopts()
Definition thread.h:1131
void set_unboxed_int64_runtime_arg(int64_t value)
Definition thread.h:815
static bool IsAtSafepoint(SafepointLevel level, uword state)
Definition thread.h:898
void SetUnwindErrorInProgress(bool value)
Definition thread.h:1003
bool is_marking() const
Definition thread.h:669
Heap * heap() const
Definition thread.cc:876
double unboxed_double_runtime_arg() const
Definition thread.h:824
void ExitSafepoint()
Definition thread.h:1081
void EnterApiScope()
Definition thread.cc:1235
void ExitApiScope()
Definition thread.cc:1248
int64_t unboxed_int64_runtime_arg() const
Definition thread.h:812
bool is_unwind_in_progress() const
Definition thread.h:638
uword top_exit_frame_info() const
Definition thread.h:678
int64_t unboxed_int64_runtime_second_arg() const
Definition thread.h:818
bool IsDartMutatorThread() const
Definition thread.h:546
void EnterSafepoint()
Definition thread.h:1063
ExecutionState execution_state() const
Definition thread.h:1027
Isolate * isolate() const
Definition thread.h:533
int32_t IncrementAndGetStackOverflowCount()
Definition thread.h:446
uint32_t IncrementAndGetRuntimeCallCount()
Definition thread.h:450
@ kThreadInNative
Definition thread.h:1023
IsolateGroup * isolate_group() const
Definition thread.h:540
static void EnterIsolate(Isolate *isolate)
Definition thread.cc:366
static void ExitIsolate(bool isolate_shutdown=false)
Definition thread.cc:423
void set_unboxed_int64_runtime_second_arg(int64_t value)
Definition thread.h:821
bool IsInstantiated(Genericity genericity=kAny, intptr_t num_free_fun_type_params=kAllFree) const
Definition object.h:8681
intptr_t Length() const
Definition object.cc:7352
TypeArgumentsPtr InstantiateAndCanonicalizeFrom(const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments) const
Definition object.cc:7678
bool IsUninstantiatedIdentity() const
Definition object.cc:7410
static CodePtr SpecializeStubFor(Thread *thread, const AbstractType &type)
static TypePtr BoolType()
Definition object.cc:21882
static intptr_t MaxElements(intptr_t class_id)
Definition object.h:11658
static TypedDataPtr New(intptr_t class_id, intptr_t len, Heap::Space space=Heap::kNew)
Definition object.cc:25666
static uword ToAddr(const UntaggedObject *raw_obj)
Definition raw_object.h:501
static constexpr T Maximum(T x, T y)
Definition utils.h:26
#define THR_Print(format,...)
Definition log.h:20
int64_t Dart_Port
Definition dart_api.h:1524
struct _Dart_Handle * Dart_Handle
Definition dart_api.h:258
#define ASSERT(E)
VkInstance instance
Definition main.cc:48
double frame
Definition examples.cpp:31
#define FATAL(error)
G_BEGIN_DECLS G_MODULE_EXPORT FlValue * args
static const uint8_t buffer[]
const uint8_t uint32_t uint32_t GError ** error
uint8_t value
GAsyncResult * result
uint32_t * target
const char * charp
Definition flags.h:12
#define DECLARE_FLAG(type, name)
Definition flags.h:14
#define DEFINE_FLAG(type, name, default_value, comment)
Definition flags.h:16
Dart_NativeFunction function
Definition fuchsia.cc:51
static float max(float r, float g, float b)
Definition hsl.cpp:49
size_t length
Win32Message message
#define MSAN_UNPOISON(ptr, len)
double y
double x
ImplicitString Name
Definition DMSrcSink.h:38
bool WillAllocateNewOrRememberedContext(intptr_t num_context_variables)
bool WillAllocateNewOrRememberedArray(intptr_t length)
constexpr int64_t kMaxInt64
Definition globals.h:486
static AbstractTypePtr InstantiateType(const AbstractType &type, const AbstractType &instantiator)
Definition mirrors.cc:614
static void InlineCacheMissHandler(Thread *thread, Zone *zone, const GrowableArray< const Instance * > &args, const ICData &ic_data, NativeArguments native_arguments)
FunctionPtr InlineCacheMissHelper(const Class &receiver_class, const Array &args_descriptor, const String &target_name)
const char *const name
void DLRT_PropagateError(Dart_Handle handle)
IntegerPtr DoubleToInteger(Zone *zone, double val)
void DeoptimizeFunctionsOnStack()
static void CopySavedRegisters(uword saved_registers_address, fpu_register_t **fpu_registers, intptr_t **cpu_registers)
static bool IsSuspendedFrame(Zone *zone, const Function &function, StackFrame *frame)
Thread * DLRT_GetFfiCallbackMetadata(FfiCallbackMetadata::Trampoline trampoline, uword *out_entry_point, uword *out_trampoline_type)
static void DeoptimizeLastDartFrameIfOptimized()
static void PrintSubtypeCheck(const AbstractType &subtype, const AbstractType &supertype, const bool result)
static void HandleOSRRequest(Thread *thread)
double DartModulo(double left, double right)
int32_t classid_t
Definition globals.h:524
static DART_FORCE_INLINE uword ParamAddress(uword fp, intptr_t reverse_index)
static void NullErrorHelper(Zone *zone, const String &selector, bool is_param_name=false)
static void UpdateTypeTestCache(Zone *zone, Thread *thread, const Instance &instance, const AbstractType &destination_type, const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments, const Bool &result, const SubtypeTestCache &new_cache)
DART_EXPORT void Dart_PropagateError(Dart_Handle handle)
static TokenPosition GetCallerLocation()
@ kNullCid
Definition class_id.h:252
void *(* MemMoveCFunction)(void *dest, const void *src, size_t n)
void DFLRT_ExitSafepointIgnoreUnwindInProgress(NativeArguments __unusable_)
static void PrintTypeCheck(const char *message, const Instance &instance, const AbstractType &type, const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments, const Bool &result)
void DLRT_ExitHandleScope(Thread *thread)
LocalHandle * DLRT_AllocateHandle(ApiLocalScope *scope)
void __tsan_acquire(void *addr)
uintptr_t uword
Definition globals.h:501
void DLRT_ExitTemporaryIsolate()
double(* BinaryMathCFunction)(double x, double y)
void __msan_unpoison(const volatile void *, size_t)
DART_EXPORT bool Dart_IsError(Dart_Handle handle)
ApiLocalScope * DLRT_EnterHandleScope(Thread *thread)
const uint32_t fp
@ kNumberOfCpuRegisters
const int kNumberOfFpuRegisters
static constexpr intptr_t kNumberOfSavedFpuRegisters
void DeoptimizeAt(Thread *mutator_thread, const Code &optimized_code, StackFrame *frame)
static bool ResolveCallThroughGetter(const Class &receiver_class, const String &target_name, const String &demangled, const Array &arguments_descriptor, Function *result)
static FunctionPtr ComputeTypeCheckTarget(const Instance &receiver, const AbstractType &type, const ArgumentsDescriptor &desc)
static void RuntimeAllocationEpilogue(Thread *thread)
@ kAllFree
Definition object.h:2920
static constexpr intptr_t kDefaultMaxSubtypeCacheEntries
static FunctionPtr Resolve(Thread *thread, Zone *zone, const GrowableArray< const Instance * > &caller_arguments, const Class &receiver_class, const String &name, const Array &descriptor)
static void HandleStackOverflowTestCases(Thread *thread)
static Heap::Space SpaceForRuntimeAllocation()
double(* UnaryMathCFunction)(double x)
const intptr_t cid
static constexpr intptr_t kNumberOfSavedCpuRegisters
@ kTypeCheckFromLazySpecializeStub
@ kTypeCheckFromInline
@ kTypeCheckFromSlowStub
FrameLayout runtime_frame_layout
static void ThrowIfError(const Object &result)
static DART_FORCE_INLINE uword LocalVarAddress(uword fp, intptr_t index)
static InstancePtr AllocateObject(Thread *thread, const Class &cls)
constexpr intptr_t kWordSize
Definition globals.h:509
void DFLRT_ExitSafepoint(NativeArguments __unusable_)
static void TrySwitchInstanceCall(Thread *thread, StackFrame *caller_frame, const Code &caller_code, const Function &caller_function, const ICData &ic_data, const Function &target_function)
@ kCurrentClass
Definition object.h:2230
void __msan_unpoison_param(size_t)
void DFLRT_EnterSafepoint(NativeArguments __unusable_)
static DART_FORCE_INLINE bool IsCalleeFrameOf(uword fp, uword other_fp)
const char *const function_name
static int8_t data[kExtLength]
static void DoThrowNullError(Isolate *isolate, Thread *thread, Zone *zone, bool is_param)
const char * DeoptReasonToCString(ICData::DeoptReasonId deopt_reason)
void __tsan_release(void *addr)
static ObjectPtr InvokeCallThroughGetterOrNoSuchMethod(Thread *thread, Zone *zone, const Instance &receiver, const String &target_name, const Array &orig_arguments, const Array &orig_arguments_desc)
void ReportImpossibleNullError(intptr_t cid, StackFrame *caller_frame, Thread *thread)
void OnEveryRuntimeEntryCall(Thread *thread, const char *runtime_call_name, bool can_lazy_deopt)
const int kFpuRegisterSize
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not defaults to or::depending on whether ipv6 is specified vm service A custom Dart VM Service port The default is to pick a randomly available open port disable vm Disable the Dart VM Service The Dart VM Service is never available in release mode disable vm service Disable mDNS Dart VM Service publication Bind to the IPv6 localhost address for the Dart VM Service Ignored if vm service host is set endless trace Enable an endless trace buffer The default is a ring buffer This is useful when very old events need to viewed For during application launch Memory usage will continue to grow indefinitely however Start app with an specific route defined on the framework flutter assets Path to the Flutter assets directory enable service port Allow the VM service to fallback to automatic port selection if binding to a specified port fails trace Trace early application lifecycle Automatically switches to an endless trace buffer trace skia Filters out all Skia trace event categories except those that are specified in this comma separated list dump skp on shader Automatically dump the skp that triggers new shader compilations This is useful for writing custom ShaderWarmUp to reduce jank By this is not enabled to reduce the overhead purge persistent cache
Definition switches.h:191
std::function< void()> closure
Definition closure.h:14
#define CHECK_STACK_ALIGNMENT
#define DEOPT_REASONS(V)
Definition object.h:2473
#define Pp
Definition globals.h:425
#define FALL_THROUGH
Definition globals.h:15
#define Px
Definition globals.h:410
#define DEBUG_ONLY(code)
Definition globals.h:141
#define UNLIKELY(cond)
Definition globals.h:261
#define Pd
Definition globals.h:408
#define DEOPT_REASON_TO_TEXT(name)
#define DEFINE_RUNTIME_ENTRY(name, argument_count)
#define TRACE_RUNTIME_CALL(format, name)
#define DEFINE_RAW_LEAF_RUNTIME_ENTRY(name, argument_count, is_float, func)
#define DEFINE_RUNTIME_ENTRY_NO_LAZY_DEOPT(name, argument_count)
#define END_LEAF_RUNTIME_ENTRY
#define DEFINE_LEAF_RUNTIME_ENTRY(type, name, argument_count,...)
intptr_t first_local_from_fp
intptr_t FrameSlotForVariableIndex(intptr_t index) const
double fmod_ieee(double x, double y)
double atan2_ieee(double y, double x)