Flutter Engine
The Flutter Engine
All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Properties Friends Macros Modules Pages
object.cc
Go to the documentation of this file.
1// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#include "vm/object.h"
6
7#include <memory>
8
10#include "include/dart_api.h"
11#include "lib/integers.h"
12#include "lib/stacktrace.h"
13#include "platform/assert.h"
15#include "platform/unaligned.h"
16#include "platform/unicode.h"
17#include "vm/bit_vector.h"
18#include "vm/bootstrap.h"
19#include "vm/canonical_tables.h"
20#include "vm/class_finalizer.h"
21#include "vm/class_id.h"
23#include "vm/code_comments.h"
24#include "vm/code_descriptors.h"
25#include "vm/code_observers.h"
29#include "vm/cpu.h"
30#include "vm/dart.h"
31#include "vm/dart_api_state.h"
32#include "vm/dart_entry.h"
33#include "vm/datastream.h"
34#include "vm/debugger.h"
37#include "vm/elf.h"
38#include "vm/exceptions.h"
39#include "vm/growable_array.h"
40#include "vm/hash.h"
41#include "vm/hash_table.h"
42#include "vm/heap/become.h"
43#include "vm/heap/heap.h"
44#include "vm/heap/sampler.h"
45#include "vm/heap/weak_code.h"
46#include "vm/image_snapshot.h"
47#include "vm/isolate_reload.h"
48#include "vm/kernel.h"
49#include "vm/kernel_binary.h"
50#include "vm/kernel_isolate.h"
51#include "vm/kernel_loader.h"
52#include "vm/log.h"
53#include "vm/native_symbol.h"
54#include "vm/object_graph.h"
55#include "vm/object_store.h"
56#include "vm/os.h"
57#include "vm/parser.h"
58#include "vm/profiler.h"
59#include "vm/regexp.h"
60#include "vm/resolver.h"
61#include "vm/reusable_handles.h"
63#include "vm/runtime_entry.h"
64#include "vm/scopes.h"
65#include "vm/stack_frame.h"
66#include "vm/stub_code.h"
67#include "vm/symbols.h"
68#include "vm/tags.h"
69#include "vm/thread_registry.h"
70#include "vm/timeline.h"
72#include "vm/zone_text_buffer.h"
73
74#if !defined(DART_PRECOMPILED_RUNTIME)
82#endif // !defined(DART_PRECOMPILED_RUNTIME)
83
84namespace dart {
85
86DEFINE_FLAG(uint64_t,
87 huge_method_cutoff_in_code_size,
88 200000,
89 "Huge method cutoff in unoptimized code size (in bytes).");
91 bool,
92 show_internal_names,
93 false,
94 "Show names of internal classes (e.g. \"OneByteString\") in error messages "
95 "instead of showing the corresponding interface names (e.g. \"String\"). "
96 "Also show legacy nullability in type names.");
97
99 remove_script_timestamps_for_test,
100 false,
101 "Remove script timestamps to allow for deterministic testing.");
102
103#if !defined(DART_PRECOMPILED_RUNTIME)
104DEFINE_FLAG(bool, use_register_cc, true, "Use register calling conventions");
105#endif
106
107DECLARE_FLAG(bool, intrinsify);
108DECLARE_FLAG(bool, trace_deoptimization);
109DECLARE_FLAG(bool, trace_deoptimization_verbose);
110DECLARE_FLAG(bool, trace_reload);
111DECLARE_FLAG(bool, write_protect_code);
112DECLARE_FLAG(bool, precompiled_mode);
113DECLARE_FLAG(int, max_polymorphic_checks);
114
115static const char* const kGetterPrefix = "get:";
116static const intptr_t kGetterPrefixLength = strlen(kGetterPrefix);
117static const char* const kSetterPrefix = "set:";
118static const intptr_t kSetterPrefixLength = strlen(kSetterPrefix);
119static const char* const kInitPrefix = "init:";
120static const intptr_t kInitPrefixLength = strlen(kInitPrefix);
121
122// A cache of VM heap allocated preinitialized empty ic data entry arrays.
123ArrayPtr ICData::cached_icdata_arrays_[kCachedICDataArrayCount];
124
125cpp_vtable Object::builtin_vtables_[kNumPredefinedCids] = {};
126
127// These are initialized to a value that will force an illegal memory access if
128// they are being used.
129#if defined(RAW_NULL)
130#error RAW_NULL should not be defined.
131#endif
132#define RAW_NULL static_cast<uword>(kHeapObjectTag)
133
134#define CHECK_ERROR(error) \
135 { \
136 ErrorPtr err = (error); \
137 if (err != Error::null()) { \
138 return err; \
139 } \
140 }
141
142#define DEFINE_SHARED_READONLY_HANDLE(Type, name) \
143 Type* Object::name##_ = nullptr;
145#undef DEFINE_SHARED_READONLY_HANDLE
146
147ObjectPtr Object::null_ = static_cast<ObjectPtr>(RAW_NULL);
148BoolPtr Object::true_ = static_cast<BoolPtr>(RAW_NULL);
149BoolPtr Object::false_ = static_cast<BoolPtr>(RAW_NULL);
150ClassPtr Object::class_class_ = static_cast<ClassPtr>(RAW_NULL);
151ClassPtr Object::dynamic_class_ = static_cast<ClassPtr>(RAW_NULL);
152ClassPtr Object::void_class_ = static_cast<ClassPtr>(RAW_NULL);
153ClassPtr Object::type_parameters_class_ = static_cast<ClassPtr>(RAW_NULL);
154ClassPtr Object::type_arguments_class_ = static_cast<ClassPtr>(RAW_NULL);
155ClassPtr Object::patch_class_class_ = static_cast<ClassPtr>(RAW_NULL);
156ClassPtr Object::function_class_ = static_cast<ClassPtr>(RAW_NULL);
157ClassPtr Object::closure_data_class_ = static_cast<ClassPtr>(RAW_NULL);
158ClassPtr Object::ffi_trampoline_data_class_ = static_cast<ClassPtr>(RAW_NULL);
159ClassPtr Object::field_class_ = static_cast<ClassPtr>(RAW_NULL);
160ClassPtr Object::script_class_ = static_cast<ClassPtr>(RAW_NULL);
161ClassPtr Object::library_class_ = static_cast<ClassPtr>(RAW_NULL);
162ClassPtr Object::namespace_class_ = static_cast<ClassPtr>(RAW_NULL);
163ClassPtr Object::kernel_program_info_class_ = static_cast<ClassPtr>(RAW_NULL);
164ClassPtr Object::code_class_ = static_cast<ClassPtr>(RAW_NULL);
165ClassPtr Object::instructions_class_ = static_cast<ClassPtr>(RAW_NULL);
166ClassPtr Object::instructions_section_class_ = static_cast<ClassPtr>(RAW_NULL);
167ClassPtr Object::instructions_table_class_ = static_cast<ClassPtr>(RAW_NULL);
168ClassPtr Object::object_pool_class_ = static_cast<ClassPtr>(RAW_NULL);
169ClassPtr Object::pc_descriptors_class_ = static_cast<ClassPtr>(RAW_NULL);
170ClassPtr Object::code_source_map_class_ = static_cast<ClassPtr>(RAW_NULL);
171ClassPtr Object::compressed_stackmaps_class_ = static_cast<ClassPtr>(RAW_NULL);
172ClassPtr Object::var_descriptors_class_ = static_cast<ClassPtr>(RAW_NULL);
173ClassPtr Object::exception_handlers_class_ = static_cast<ClassPtr>(RAW_NULL);
174ClassPtr Object::context_class_ = static_cast<ClassPtr>(RAW_NULL);
175ClassPtr Object::context_scope_class_ = static_cast<ClassPtr>(RAW_NULL);
176ClassPtr Object::sentinel_class_ = static_cast<ClassPtr>(RAW_NULL);
177ClassPtr Object::singletargetcache_class_ = static_cast<ClassPtr>(RAW_NULL);
178ClassPtr Object::unlinkedcall_class_ = static_cast<ClassPtr>(RAW_NULL);
179ClassPtr Object::monomorphicsmiablecall_class_ =
180 static_cast<ClassPtr>(RAW_NULL);
181ClassPtr Object::icdata_class_ = static_cast<ClassPtr>(RAW_NULL);
182ClassPtr Object::megamorphic_cache_class_ = static_cast<ClassPtr>(RAW_NULL);
183ClassPtr Object::subtypetestcache_class_ = static_cast<ClassPtr>(RAW_NULL);
184ClassPtr Object::loadingunit_class_ = static_cast<ClassPtr>(RAW_NULL);
185ClassPtr Object::api_error_class_ = static_cast<ClassPtr>(RAW_NULL);
186ClassPtr Object::language_error_class_ = static_cast<ClassPtr>(RAW_NULL);
187ClassPtr Object::unhandled_exception_class_ = static_cast<ClassPtr>(RAW_NULL);
188ClassPtr Object::unwind_error_class_ = static_cast<ClassPtr>(RAW_NULL);
189ClassPtr Object::weak_serialization_reference_class_ =
190 static_cast<ClassPtr>(RAW_NULL);
191ClassPtr Object::weak_array_class_ = static_cast<ClassPtr>(RAW_NULL);
192
194 const char* name,
195 intptr_t start_pos,
196 intptr_t len) {
197 buffer->Printf("%.*s", static_cast<int>(len), &name[start_pos]);
198}
199
200// Used to define setters and getters for untagged object fields that are
201// defined with the WSR_COMPRESSED_POINTER_FIELD macro. See
202// PRECOMPILER_WSR_FIELD_DECLARATION in object.h for more information.
203#if defined(DART_PRECOMPILER)
204#define PRECOMPILER_WSR_FIELD_DEFINITION(Class, Type, Name) \
205 Type##Ptr Class::Name() const { \
206 return Type::RawCast(WeakSerializationReference::Unwrap(untag()->Name())); \
207 }
208#else
209#define PRECOMPILER_WSR_FIELD_DEFINITION(Class, Type, Name) \
210 void Class::set_##Name(const Type& value) const { \
211 untag()->set_##Name(value.ptr()); \
212 }
213#endif
214
215PRECOMPILER_WSR_FIELD_DEFINITION(ClosureData, Function, parent_function)
216PRECOMPILER_WSR_FIELD_DEFINITION(Function, FunctionType, signature)
217
218#undef PRECOMPILER_WSR_FIELD_DEFINITION
219
220#if defined(_MSC_VER)
221#define TRACE_TYPE_CHECKS_VERBOSE(format, ...) \
222 if (FLAG_trace_type_checks_verbose) { \
223 OS::PrintErr(format, __VA_ARGS__); \
224 }
225#else
226#define TRACE_TYPE_CHECKS_VERBOSE(format, ...) \
227 if (FLAG_trace_type_checks_verbose) { \
228 OS::PrintErr(format, ##__VA_ARGS__); \
229 }
230#endif
231
232// Remove private keys, but retain getter/setter/constructor/mixin manglings.
234 ASSERT(name.IsOneByteString());
235 GrowableArray<uint8_t> without_key(name.Length());
236 intptr_t i = 0;
237 while (i < name.Length()) {
238 while (i < name.Length()) {
239 uint8_t c = name.CharAt(i++);
240 if (c == '@') break;
241 without_key.Add(c);
242 }
243 while (i < name.Length()) {
244 uint8_t c = name.CharAt(i);
245 if ((c < '0') || (c > '9')) break;
246 i++;
247 }
248 }
249
250 return String::FromLatin1(without_key.data(), without_key.length());
251}
252
253// Takes a vm internal name and makes it suitable for external user.
254//
255// Examples:
256//
257// Internal getter and setter prefixes are changed:
258//
259// get:foo -> foo
260// set:foo -> foo=
261//
262// Private name mangling is removed, possibly multiple times:
263//
264// _ReceivePortImpl@709387912 -> _ReceivePortImpl
265// _ReceivePortImpl@709387912._internal@709387912 ->
266// _ReceivePortImpl._internal
267// _C@6328321&_E@6328321&_F@6328321 -> _C&_E&_F
268//
269// The trailing . on the default constructor name is dropped:
270//
271// List. -> List
272//
273// And so forth:
274//
275// get:foo@6328321 -> foo
276// _MyClass@6328321. -> _MyClass
277// _MyClass@6328321.named -> _MyClass.named
278//
279// For extension methods the following demangling is done
280// ext|func -> ext.func (instance extension method)
281// ext|get#prop -> ext.prop (instance extension getter)
282// ext|set#prop -> ext.prop= (instance extension setter)
283// ext|sfunc -> ext.sfunc (static extension method)
284// get:ext|sprop -> ext.sprop (static extension getter)
285// set:ext|sprop -> ext.sprop= (static extension setter)
286//
287const char* String::ScrubName(const String& name, bool is_extension) {
288 Thread* thread = Thread::Current();
289 NoSafepointScope no_safepoint(thread);
290 Zone* zone = thread->zone();
291 ZoneTextBuffer printer(zone);
292
293#if !defined(DART_PRECOMPILED_RUNTIME)
294 if (name.Equals(Symbols::TopLevel())) {
295 // Name of invisible top-level class.
296 return "";
297 }
298#endif // !defined(DART_PRECOMPILED_RUNTIME)
299
300 const char* cname = name.ToCString();
301 ASSERT(strlen(cname) == static_cast<size_t>(name.Length()));
302 const intptr_t name_len = name.Length();
303 // First remove all private name mangling and if 'is_extension' is true
304 // substitute the first '|' character with '.'.
305 intptr_t start_pos = 0;
306 intptr_t sum_segment_len = 0;
307 for (intptr_t i = 0; i < name_len; i++) {
308 if ((cname[i] == '@') && ((i + 1) < name_len) && (cname[i + 1] >= '0') &&
309 (cname[i + 1] <= '9')) {
310 // Append the current segment to the unmangled name.
311 const intptr_t segment_len = i - start_pos;
312 sum_segment_len += segment_len;
313 AppendSubString(&printer, cname, start_pos, segment_len);
314 // Advance until past the name mangling. The private keys are only
315 // numbers so we skip until the first non-number.
316 i++; // Skip the '@'.
317 while ((i < name.Length()) && (name.CharAt(i) >= '0') &&
318 (name.CharAt(i) <= '9')) {
319 i++;
320 }
321 start_pos = i;
322 i--; // Account for for-loop increment.
323 } else if (is_extension && cname[i] == '|') {
324 // Append the current segment to the unmangled name.
325 const intptr_t segment_len = i - start_pos;
326 AppendSubString(&printer, cname, start_pos, segment_len);
327 // Append the '.' character (replaces '|' with '.').
328 AppendSubString(&printer, ".", 0, 1);
329 start_pos = i + 1;
330 // Account for length of segments added so far.
331 sum_segment_len += (segment_len + 1);
332 }
333 }
334
335 const char* unmangled_name = nullptr;
336 if (start_pos == 0) {
337 // No name unmangling needed, reuse the name that was passed in.
338 unmangled_name = cname;
339 sum_segment_len = name_len;
340 } else if (name.Length() != start_pos) {
341 // Append the last segment.
342 const intptr_t segment_len = name.Length() - start_pos;
343 sum_segment_len += segment_len;
344 AppendSubString(&printer, cname, start_pos, segment_len);
345 }
346 if (unmangled_name == nullptr) {
347 // Merge unmangled_segments.
348 unmangled_name = printer.buffer();
349 }
350
351 printer.Clear();
352 intptr_t start = 0;
353 intptr_t len = sum_segment_len;
354 bool is_setter = false;
355 if (is_extension) {
356 // First scan till we see the '.' character.
357 for (intptr_t i = 0; i < len; i++) {
358 if (unmangled_name[i] == '.') {
359 intptr_t slen = i + 1;
360 intptr_t plen = slen - start;
361 AppendSubString(&printer, unmangled_name, start, plen);
362 unmangled_name += slen;
363 len -= slen;
364 break;
365 } else if (unmangled_name[i] == ':') {
366 if (start != 0) {
367 // Reset and break.
368 start = 0;
369 is_setter = false;
370 break;
371 }
372 if (unmangled_name[0] == 's') {
373 is_setter = true;
374 }
375 start = i + 1;
376 }
377 }
378 }
379 intptr_t dot_pos = -1; // Position of '.' in the name, if any.
380 start = 0;
381 for (intptr_t i = start; i < len; i++) {
382 if (unmangled_name[i] == ':' ||
383 (is_extension && unmangled_name[i] == '#')) {
384 if (start != 0) {
385 // Reset and break.
386 start = 0;
387 dot_pos = -1;
388 break;
389 }
390 ASSERT(start == 0); // Only one : is possible in getters or setters.
391 if (unmangled_name[0] == 's') {
392 ASSERT(!is_setter);
393 is_setter = true;
394 }
395 start = i + 1;
396 } else if (unmangled_name[i] == '.') {
397 if (dot_pos != -1) {
398 // Reset and break.
399 start = 0;
400 dot_pos = -1;
401 break;
402 }
403 ASSERT(dot_pos == -1); // Only one dot is supported.
404 dot_pos = i;
405 }
406 }
407
408 if (!is_extension && (start == 0) && (dot_pos == -1)) {
409 // This unmangled_name is fine as it is.
410 return unmangled_name;
411 }
412
413 // Drop the trailing dot if needed.
414 intptr_t end = ((dot_pos + 1) == len) ? dot_pos : len;
415
416 intptr_t substr_len = end - start;
417 AppendSubString(&printer, unmangled_name, start, substr_len);
418 if (is_setter) {
419 const char* equals = Symbols::Equals().ToCString();
420 const intptr_t equals_len = strlen(equals);
421 AppendSubString(&printer, equals, 0, equals_len);
422 }
423
424 return printer.buffer();
425}
426
428 bool is_extension) {
429#if !defined(DART_PRECOMPILED_RUNTIME)
430 intptr_t len = name.Length();
431 intptr_t start = 0;
432 intptr_t at_pos = -1; // Position of '@' in the name, if any.
433 bool is_setter = false;
434
436
437 // If extension strip out the leading prefix e.g" ext|func would strip out
438 // 'ext|'.
439 if (is_extension) {
440 // First scan till we see the '|' character.
441 for (intptr_t i = 0; i < len; i++) {
442 if (name.CharAt(i) == '|') {
445 start = i + 1;
446 break;
447 } else if (name.CharAt(i) == ':') {
448 if (start != 0) {
449 // Reset and break.
450 start = 0;
451 is_setter = false;
452 break;
453 }
454 if (name.CharAt(0) == 's') {
455 is_setter = true;
456 }
457 start = i + 1;
458 }
459 }
460 }
461
462 for (intptr_t i = start; i < len; i++) {
463 if (name.CharAt(i) == ':' || (is_extension && name.CharAt(i) == '#')) {
464 // Only one : is possible in getters or setters.
465 ASSERT(is_extension || start == 0);
466 if (name.CharAt(start) == 's') {
467 is_setter = true;
468 }
469 start = i + 1;
470 } else if (name.CharAt(i) == '@') {
471 // Setters should have only one @ so we know where to put the =.
472 ASSERT(!is_setter || (at_pos == -1));
473 at_pos = i;
474 }
475 }
476
477 if (start == 0) {
478 // This unmangled_name is fine as it is.
479 return name.ptr();
480 }
481
482 if (is_extension) {
483 const String& fname =
485 result = String::Concat(result, fname);
486 } else {
488 }
489
490 if (is_setter) {
491 // Setters need to end with '='.
492 if (at_pos == -1) {
494 } else {
495 const String& pre_at =
496 String::Handle(String::SubString(result, 0, at_pos - 4));
497 const String& post_at =
498 String::Handle(String::SubString(name, at_pos, len - at_pos));
500 result = String::Concat(result, post_at);
501 }
502 }
503
504 return result.ptr();
505#endif // !defined(DART_PRECOMPILED_RUNTIME)
506 return name.ptr(); // In AOT, return argument unchanged.
507}
508
509template <typename type>
511 return ((value == '"') || (value == '\n') || (value == '\f') ||
512 (value == '\b') || (value == '\t') || (value == '\v') ||
513 (value == '\r') || (value == '\\') || (value == '$'));
514}
515
516static inline bool IsAsciiNonprintable(int32_t c) {
517 return ((0 <= c) && (c < 32)) || (c == 127);
518}
519
520static int32_t EscapeOverhead(int32_t c) {
521 if (IsSpecialCharacter(c)) {
522 return 1; // 1 additional byte for the backslash.
523 } else if (IsAsciiNonprintable(c)) {
524 return 3; // 3 additional bytes to encode c as \x00.
525 }
526 return 0;
527}
528
529template <typename type>
531 if (value == '"') {
532 return '"';
533 } else if (value == '\n') {
534 return 'n';
535 } else if (value == '\f') {
536 return 'f';
537 } else if (value == '\b') {
538 return 'b';
539 } else if (value == '\t') {
540 return 't';
541 } else if (value == '\v') {
542 return 'v';
543 } else if (value == '\r') {
544 return 'r';
545 } else if (value == '\\') {
546 return '\\';
547 } else if (value == '$') {
548 return '$';
549 }
550 UNREACHABLE();
551 return '\0';
552}
553
555 // Should only be run by the vm isolate.
556 ASSERT(isolate_group == Dart::vm_isolate_group());
557 Thread* thread = Thread::Current();
558 auto heap = isolate_group->heap();
559
560 // TODO(iposva): NoSafepointScope needs to be added here.
561 ASSERT(class_class() == null_);
562
563 // Allocate and initialize the null instance.
564 // 'null_' must be the first object allocated as it is used in allocation to
565 // clear the pointer fields of objects.
566 {
567 uword address =
568 heap->Allocate(thread, Instance::InstanceSize(), Heap::kOld);
569 null_ = static_cast<InstancePtr>(address + kHeapObjectTag);
570 InitializeObjectVariant<Instance>(address, kNullCid);
571 null_->untag()->SetCanonical();
572 }
573
574 // Allocate and initialize the bool instances.
575 // These must be allocated such that at kBoolValueBitPosition, the address
576 // of true is 0 and the address of false is 1, and their addresses are
577 // otherwise identical.
578 {
579 // Allocate a dummy bool object to give true the desired alignment.
580 uword address = heap->Allocate(thread, Bool::InstanceSize(), Heap::kOld);
581 InitializeObject<Bool>(address);
582 static_cast<BoolPtr>(address + kHeapObjectTag)->untag()->value_ = false;
583 }
584 {
585 // Allocate true.
586 uword address = heap->Allocate(thread, Bool::InstanceSize(), Heap::kOld);
587 true_ = static_cast<BoolPtr>(address + kHeapObjectTag);
588 InitializeObject<Bool>(address);
589 true_->untag()->value_ = true;
590 true_->untag()->SetCanonical();
591 }
592 {
593 // Allocate false.
594 uword address = heap->Allocate(thread, Bool::InstanceSize(), Heap::kOld);
595 false_ = static_cast<BoolPtr>(address + kHeapObjectTag);
596 InitializeObject<Bool>(address);
597 false_->untag()->value_ = false;
598 false_->untag()->SetCanonical();
599 }
600
601 // Check that the objects have been allocated at appropriate addresses.
602 ASSERT(static_cast<uword>(true_) ==
603 static_cast<uword>(null_) + kTrueOffsetFromNull);
604 ASSERT(static_cast<uword>(false_) ==
605 static_cast<uword>(null_) + kFalseOffsetFromNull);
606 ASSERT((static_cast<uword>(true_) & kBoolValueMask) == 0);
607 ASSERT((static_cast<uword>(false_) & kBoolValueMask) != 0);
608 ASSERT(static_cast<uword>(false_) ==
609 (static_cast<uword>(true_) | kBoolValueMask));
610 ASSERT((static_cast<uword>(null_) & kBoolVsNullMask) == 0);
611 ASSERT((static_cast<uword>(true_) & kBoolVsNullMask) != 0);
612 ASSERT((static_cast<uword>(false_) & kBoolVsNullMask) != 0);
613}
614
616 {
617 Object fake_handle;
618 builtin_vtables_[kObjectCid] = fake_handle.vtable();
619 }
620
621#define INIT_VTABLE(clazz) \
622 { \
623 clazz fake_handle; \
624 builtin_vtables_[k##clazz##Cid] = fake_handle.vtable(); \
625 }
628#undef INIT_VTABLE
629
630#define INIT_VTABLE(clazz) \
631 { \
632 Map fake_handle; \
633 builtin_vtables_[k##clazz##Cid] = fake_handle.vtable(); \
634 }
636#undef INIT_VTABLE
637
638#define INIT_VTABLE(clazz) \
639 { \
640 Set fake_handle; \
641 builtin_vtables_[k##clazz##Cid] = fake_handle.vtable(); \
642 }
644#undef INIT_VTABLE
645
646#define INIT_VTABLE(clazz) \
647 { \
648 Array fake_handle; \
649 builtin_vtables_[k##clazz##Cid] = fake_handle.vtable(); \
650 }
652#undef INIT_VTABLE
653
654#define INIT_VTABLE(clazz) \
655 { \
656 String fake_handle; \
657 builtin_vtables_[k##clazz##Cid] = fake_handle.vtable(); \
658 }
660#undef INIT_VTABLE
661
662 {
663 Instance fake_handle;
664 builtin_vtables_[kFfiNativeTypeCid] = fake_handle.vtable();
665 }
666
667#define INIT_VTABLE(clazz) \
668 { \
669 Instance fake_handle; \
670 builtin_vtables_[kFfi##clazz##Cid] = fake_handle.vtable(); \
671 }
673#undef INIT_VTABLE
674
675 {
676 Instance fake_handle;
677 builtin_vtables_[kFfiNativeFunctionCid] = fake_handle.vtable();
678 }
679
680 {
681 Pointer fake_handle;
682 builtin_vtables_[kPointerCid] = fake_handle.vtable();
683 }
684
685 {
686 DynamicLibrary fake_handle;
687 builtin_vtables_[kDynamicLibraryCid] = fake_handle.vtable();
688 }
689
690#define INIT_VTABLE(clazz) \
691 { \
692 TypedData fake_internal_handle; \
693 builtin_vtables_[kTypedData##clazz##Cid] = fake_internal_handle.vtable(); \
694 TypedDataView fake_view_handle; \
695 builtin_vtables_[kTypedData##clazz##ViewCid] = fake_view_handle.vtable(); \
696 builtin_vtables_[kUnmodifiableTypedData##clazz##ViewCid] = \
697 fake_view_handle.vtable(); \
698 ExternalTypedData fake_external_handle; \
699 builtin_vtables_[kExternalTypedData##clazz##Cid] = \
700 fake_external_handle.vtable(); \
701 }
703#undef INIT_VTABLE
704
705 {
706 TypedDataView fake_handle;
707 builtin_vtables_[kByteDataViewCid] = fake_handle.vtable();
708 builtin_vtables_[kUnmodifiableByteDataViewCid] = fake_handle.vtable();
709 }
710
711 {
712 Instance fake_handle;
713 builtin_vtables_[kByteBufferCid] = fake_handle.vtable();
714 builtin_vtables_[kNullCid] = fake_handle.vtable();
715 builtin_vtables_[kDynamicCid] = fake_handle.vtable();
716 builtin_vtables_[kVoidCid] = fake_handle.vtable();
717 builtin_vtables_[kNeverCid] = fake_handle.vtable();
718 }
719}
720
721void Object::Init(IsolateGroup* isolate_group) {
722 // Should only be run by the vm isolate.
723 ASSERT(isolate_group == Dart::vm_isolate_group());
724 Heap* heap = isolate_group->heap();
725 Thread* thread = Thread::Current();
726 ASSERT(thread != nullptr);
727 // Ensure lock checks in setters are happy.
728 SafepointWriteRwLocker ml(thread, isolate_group->program_lock());
729
730 InitVtables();
731
732// Allocate the read only object handles here.
733#define INITIALIZE_SHARED_READONLY_HANDLE(Type, name) \
734 name##_ = Type::ReadOnlyHandle();
736#undef INITIALIZE_SHARED_READONLY_HANDLE
737
738 *null_object_ = Object::null();
739 *null_class_ = Class::null();
740 *null_array_ = Array::null();
741 *null_string_ = String::null();
742 *null_instance_ = Instance::null();
743 *null_function_ = Function::null();
744 *null_function_type_ = FunctionType::null();
745 *null_record_type_ = RecordType::null();
746 *null_type_arguments_ = TypeArguments::null();
747 *null_closure_ = Closure::null();
748 *empty_type_arguments_ = TypeArguments::null();
749 *null_abstract_type_ = AbstractType::null();
750 *null_compressed_stackmaps_ = CompressedStackMaps::null();
751 *bool_true_ = true_;
752 *bool_false_ = false_;
753
754 // Initialize the empty array and empty instantiations cache array handles to
755 // null_ in order to be able to check if the empty and zero arrays were
756 // allocated (RAW_NULL is not available).
757 *empty_array_ = Array::null();
758 *empty_instantiations_cache_array_ = Array::null();
759 *empty_subtype_test_cache_array_ = Array::null();
760
761 Class& cls = Class::Handle();
762
763 // Allocate and initialize the class class.
764 {
765 intptr_t size = Class::InstanceSize();
766 uword address = heap->Allocate(thread, size, Heap::kOld);
767 class_class_ = static_cast<ClassPtr>(address + kHeapObjectTag);
768 InitializeObject<Class>(address);
769
770 Class fake;
771 // Initialization from Class::New<Class>.
772 // Directly set ptr_ to break a circular dependency: SetRaw will attempt
773 // to lookup class class in the class table where it is not registered yet.
774 cls.ptr_ = class_class_;
775 ASSERT(builtin_vtables_[kClassCid] == fake.vtable());
779 const intptr_t host_next_field_offset = Class::NextFieldOffset();
780 const intptr_t target_next_field_offset = RTN::Class::NextFieldOffset();
781 cls.set_next_field_offset(host_next_field_offset, target_next_field_offset);
783 cls.set_state_bits(0);
791 cls.InitEmptyFields();
792 isolate_group->class_table()->Register(cls);
793 }
794
795 // Allocate and initialize the null class.
796 cls = Class::New<Instance, RTN::Instance>(kNullCid, isolate_group);
798 isolate_group->object_store()->set_null_class(cls);
799
800 // Allocate and initialize Never class.
801 cls = Class::New<Instance, RTN::Instance>(kNeverCid, isolate_group);
806 isolate_group->object_store()->set_never_class(cls);
807
808 // Allocate and initialize the free list element class.
810 RTN::FreeListElement::FakeInstance>(kFreeListElement,
811 isolate_group);
816
817 // Allocate and initialize the forwarding corpse class.
819 RTN::ForwardingCorpse::FakeInstance>(kForwardingCorpse,
820 isolate_group);
825
826 // Allocate and initialize Sentinel class.
827 cls = Class::New<Sentinel, RTN::Sentinel>(isolate_group);
828 sentinel_class_ = cls.ptr();
829
830 // Allocate and initialize the sentinel values.
831 {
832 *sentinel_ ^= Sentinel::New();
833 *transition_sentinel_ ^= Sentinel::New();
834 }
835
836 // Allocate and initialize optimizing compiler constants.
837 {
838 *unknown_constant_ ^= Sentinel::New();
839 *non_constant_ ^= Sentinel::New();
840 *optimized_out_ ^= Sentinel::New();
841 }
842
843 // Allocate the remaining VM internal classes.
844 cls = Class::New<TypeParameters, RTN::TypeParameters>(isolate_group);
845 type_parameters_class_ = cls.ptr();
846
847 cls = Class::New<TypeArguments, RTN::TypeArguments>(isolate_group);
848 type_arguments_class_ = cls.ptr();
849
850 cls = Class::New<PatchClass, RTN::PatchClass>(isolate_group);
851 patch_class_class_ = cls.ptr();
852
853 cls = Class::New<Function, RTN::Function>(isolate_group);
854 function_class_ = cls.ptr();
855
856 cls = Class::New<ClosureData, RTN::ClosureData>(isolate_group);
857 closure_data_class_ = cls.ptr();
858
859 cls = Class::New<FfiTrampolineData, RTN::FfiTrampolineData>(isolate_group);
860 ffi_trampoline_data_class_ = cls.ptr();
861
862 cls = Class::New<Field, RTN::Field>(isolate_group);
863 field_class_ = cls.ptr();
864
865 cls = Class::New<Script, RTN::Script>(isolate_group);
866 script_class_ = cls.ptr();
867
868 cls = Class::New<Library, RTN::Library>(isolate_group);
869 library_class_ = cls.ptr();
870
871 cls = Class::New<Namespace, RTN::Namespace>(isolate_group);
872 namespace_class_ = cls.ptr();
873
874 cls = Class::New<KernelProgramInfo, RTN::KernelProgramInfo>(isolate_group);
875 kernel_program_info_class_ = cls.ptr();
876
877 cls = Class::New<Code, RTN::Code>(isolate_group);
878 code_class_ = cls.ptr();
879
880 cls = Class::New<Instructions, RTN::Instructions>(isolate_group);
881 instructions_class_ = cls.ptr();
882
883 cls =
884 Class::New<InstructionsSection, RTN::InstructionsSection>(isolate_group);
885 instructions_section_class_ = cls.ptr();
886
887 cls = Class::New<InstructionsTable, RTN::InstructionsTable>(isolate_group);
888 instructions_table_class_ = cls.ptr();
889
890 cls = Class::New<ObjectPool, RTN::ObjectPool>(isolate_group);
891 object_pool_class_ = cls.ptr();
892
893 cls = Class::New<PcDescriptors, RTN::PcDescriptors>(isolate_group);
894 pc_descriptors_class_ = cls.ptr();
895
896 cls = Class::New<CodeSourceMap, RTN::CodeSourceMap>(isolate_group);
897 code_source_map_class_ = cls.ptr();
898
899 cls =
900 Class::New<CompressedStackMaps, RTN::CompressedStackMaps>(isolate_group);
901 compressed_stackmaps_class_ = cls.ptr();
902
903 cls =
904 Class::New<LocalVarDescriptors, RTN::LocalVarDescriptors>(isolate_group);
905 var_descriptors_class_ = cls.ptr();
906
907 cls = Class::New<ExceptionHandlers, RTN::ExceptionHandlers>(isolate_group);
908 exception_handlers_class_ = cls.ptr();
909
910 cls = Class::New<Context, RTN::Context>(isolate_group);
911 context_class_ = cls.ptr();
912
913 cls = Class::New<ContextScope, RTN::ContextScope>(isolate_group);
914 context_scope_class_ = cls.ptr();
915
916 cls = Class::New<SingleTargetCache, RTN::SingleTargetCache>(isolate_group);
917 singletargetcache_class_ = cls.ptr();
918
919 cls = Class::New<UnlinkedCall, RTN::UnlinkedCall>(isolate_group);
920 unlinkedcall_class_ = cls.ptr();
921
922 cls = Class::New<MonomorphicSmiableCall, RTN::MonomorphicSmiableCall>(
923 isolate_group);
924 monomorphicsmiablecall_class_ = cls.ptr();
925
926 cls = Class::New<ICData, RTN::ICData>(isolate_group);
927 icdata_class_ = cls.ptr();
928
929 cls = Class::New<MegamorphicCache, RTN::MegamorphicCache>(isolate_group);
930 megamorphic_cache_class_ = cls.ptr();
931
932 cls = Class::New<SubtypeTestCache, RTN::SubtypeTestCache>(isolate_group);
933 subtypetestcache_class_ = cls.ptr();
934
935 cls = Class::New<LoadingUnit, RTN::LoadingUnit>(isolate_group);
936 loadingunit_class_ = cls.ptr();
937
938 cls = Class::New<ApiError, RTN::ApiError>(isolate_group);
939 api_error_class_ = cls.ptr();
940
941 cls = Class::New<LanguageError, RTN::LanguageError>(isolate_group);
942 language_error_class_ = cls.ptr();
943
944 cls = Class::New<UnhandledException, RTN::UnhandledException>(isolate_group);
945 unhandled_exception_class_ = cls.ptr();
946
947 cls = Class::New<UnwindError, RTN::UnwindError>(isolate_group);
948 unwind_error_class_ = cls.ptr();
949
950 cls = Class::New<WeakSerializationReference, RTN::WeakSerializationReference>(
951 isolate_group);
952 weak_serialization_reference_class_ = cls.ptr();
953
954 cls = Class::New<WeakArray, RTN::WeakArray>(isolate_group);
955 weak_array_class_ = cls.ptr();
956
957 ASSERT(class_class() != null_);
958
959 // Pre-allocate classes in the vm isolate so that we can for example create a
960 // symbol table and populate it with some frequently used strings as symbols.
961 cls = Class::New<Array, RTN::Array>(isolate_group);
962 isolate_group->object_store()->set_array_class(cls);
966 cls = Class::New<Array, RTN::Array>(kImmutableArrayCid, isolate_group);
967 isolate_group->object_store()->set_immutable_array_class(cls);
971 // In order to be able to canonicalize arguments descriptors early.
973 cls =
974 Class::New<GrowableObjectArray, RTN::GrowableObjectArray>(isolate_group);
975 isolate_group->object_store()->set_growable_object_array_class(cls);
980 cls = Class::NewStringClass(kOneByteStringCid, isolate_group);
981 isolate_group->object_store()->set_one_byte_string_class(cls);
982 cls = Class::NewStringClass(kTwoByteStringCid, isolate_group);
983 isolate_group->object_store()->set_two_byte_string_class(cls);
984 cls = Class::New<Mint, RTN::Mint>(isolate_group);
985 isolate_group->object_store()->set_mint_class(cls);
986 cls = Class::New<Double, RTN::Double>(isolate_group);
987 isolate_group->object_store()->set_double_class(cls);
988 cls = Class::New<Float32x4, RTN::Float32x4>(isolate_group);
989 isolate_group->object_store()->set_float32x4_class(cls);
990 cls = Class::New<Float64x2, RTN::Float64x2>(isolate_group);
991 isolate_group->object_store()->set_float64x2_class(cls);
992 cls = Class::New<Int32x4, RTN::Int32x4>(isolate_group);
993 isolate_group->object_store()->set_int32x4_class(cls);
994
995 // Ensure that class kExternalTypedDataUint8ArrayCid is registered as we
996 // need it when reading in the token stream of bootstrap classes in the VM
997 // isolate.
998 Class::NewExternalTypedDataClass(kExternalTypedDataUint8ArrayCid,
999 isolate_group);
1000
1001 // Needed for object pools of VM isolate stubs.
1002 Class::NewTypedDataClass(kTypedDataInt8ArrayCid, isolate_group);
1003
1004 // Allocate and initialize the empty_array instance.
1005 {
1006 uword address = heap->Allocate(thread, Array::InstanceSize(0), Heap::kOld);
1007 InitializeObjectVariant<Array>(address, kImmutableArrayCid, 0);
1008 Array::initializeHandle(empty_array_,
1009 static_cast<ArrayPtr>(address + kHeapObjectTag));
1010 empty_array_->untag()->set_length(Smi::New(0));
1011 empty_array_->SetCanonical();
1012 }
1013
1014 Smi& smi = Smi::Handle();
1015 // Allocate and initialize the empty instantiations cache array instance,
1016 // which contains metadata as the first element and a sentinel value
1017 // at the start of the first entry.
1018 {
1019 const intptr_t array_size =
1021 uword address =
1022 heap->Allocate(thread, Array::InstanceSize(array_size), Heap::kOld);
1023 InitializeObjectVariant<Array>(address, kImmutableArrayCid, array_size);
1024 Array::initializeHandle(empty_instantiations_cache_array_,
1025 static_cast<ArrayPtr>(address + kHeapObjectTag));
1026 empty_instantiations_cache_array_->untag()->set_length(
1027 Smi::New(array_size));
1028 // The empty cache has no occupied entries and is not a hash-based cache.
1029 smi = Smi::New(0);
1030 empty_instantiations_cache_array_->SetAt(
1032 // Make the first (and only) entry unoccupied by setting its first element
1033 // to the sentinel value.
1035 InstantiationsCacheTable table(*empty_instantiations_cache_array_);
1037 // The other contents of the array are immaterial.
1038 empty_instantiations_cache_array_->SetCanonical();
1039 }
1040
1041 // Allocate and initialize the empty subtype test cache array instance,
1042 // which contains a single unoccupied entry.
1043 {
1044 const intptr_t array_size = SubtypeTestCache::kTestEntryLength;
1045 uword address =
1046 heap->Allocate(thread, Array::InstanceSize(array_size), Heap::kOld);
1047 InitializeObjectVariant<Array>(address, kImmutableArrayCid, array_size);
1048 Array::initializeHandle(empty_subtype_test_cache_array_,
1049 static_cast<ArrayPtr>(address + kHeapObjectTag));
1050 empty_subtype_test_cache_array_->untag()->set_length(Smi::New(array_size));
1051 // Make the first (and only) entry unoccupied by setting its first element
1052 // to the null value.
1053 empty_subtype_test_cache_array_->SetAt(
1054 SubtypeTestCache::kInstanceCidOrSignature, Object::null_object());
1056 SubtypeTestCacheTable table(*empty_subtype_test_cache_array_);
1058 Object::null_object());
1059 // The other contents of the array are immaterial.
1060 empty_subtype_test_cache_array_->SetCanonical();
1061 }
1062
1063 // Allocate and initialize the canonical empty context scope object.
1064 {
1065 uword address =
1067 InitializeObject<ContextScope>(address, 0);
1068 ContextScope::initializeHandle(
1069 empty_context_scope_,
1070 static_cast<ContextScopePtr>(address + kHeapObjectTag));
1071 empty_context_scope_->StoreNonPointer(
1072 &empty_context_scope_->untag()->num_variables_, 0);
1073 empty_context_scope_->StoreNonPointer(
1074 &empty_context_scope_->untag()->is_implicit_, true);
1075 empty_context_scope_->SetCanonical();
1076 }
1077
1078 // Allocate and initialize the canonical empty object pool object.
1079 {
1080 uword address =
1081 heap->Allocate(thread, ObjectPool::InstanceSize(0), Heap::kOld);
1082 InitializeObject<ObjectPool>(address, 0);
1083 ObjectPool::initializeHandle(
1084 empty_object_pool_,
1085 static_cast<ObjectPoolPtr>(address + kHeapObjectTag));
1086 empty_object_pool_->StoreNonPointer(&empty_object_pool_->untag()->length_,
1087 0);
1088 empty_object_pool_->SetCanonical();
1089 }
1090
1091 // Allocate and initialize the empty_compressed_stackmaps instance.
1092 {
1093 const intptr_t instance_size = CompressedStackMaps::InstanceSize(0);
1094 uword address = heap->Allocate(thread, instance_size, Heap::kOld);
1095 InitializeObject<CompressedStackMaps>(address, 0);
1096 CompressedStackMaps::initializeHandle(
1097 empty_compressed_stackmaps_,
1098 static_cast<CompressedStackMapsPtr>(address + kHeapObjectTag));
1099 empty_compressed_stackmaps_->untag()->payload()->set_flags_and_size(0);
1100 empty_compressed_stackmaps_->SetCanonical();
1101 }
1102
1103 // Allocate and initialize the empty_descriptors instance.
1104 {
1105 uword address =
1107 InitializeObject<PcDescriptors>(address, 0);
1108 PcDescriptors::initializeHandle(
1109 empty_descriptors_,
1110 static_cast<PcDescriptorsPtr>(address + kHeapObjectTag));
1111 empty_descriptors_->StoreNonPointer(&empty_descriptors_->untag()->length_,
1112 0);
1113 empty_descriptors_->SetCanonical();
1114 }
1115
1116 // Allocate and initialize the canonical empty variable descriptor object.
1117 {
1118 uword address = heap->Allocate(thread, LocalVarDescriptors::InstanceSize(0),
1119 Heap::kOld);
1120 InitializeObject<LocalVarDescriptors>(address, 0);
1121 LocalVarDescriptors::initializeHandle(
1122 empty_var_descriptors_,
1123 static_cast<LocalVarDescriptorsPtr>(address + kHeapObjectTag));
1124 empty_var_descriptors_->StoreNonPointer(
1125 &empty_var_descriptors_->untag()->num_entries_, 0);
1126 empty_var_descriptors_->SetCanonical();
1127 }
1128
1129 // Allocate and initialize the canonical empty exception handler info object.
1130 // The vast majority of all functions do not contain an exception handler
1131 // and can share this canonical descriptor.
1132 {
1133 uword address =
1135 InitializeObject<ExceptionHandlers>(address, 0);
1136 ExceptionHandlers::initializeHandle(
1137 empty_exception_handlers_,
1138 static_cast<ExceptionHandlersPtr>(address + kHeapObjectTag));
1139 empty_exception_handlers_->StoreNonPointer(
1140 &empty_exception_handlers_->untag()->packed_fields_, 0);
1141 empty_exception_handlers_->SetCanonical();
1142 }
1143
1144 // Empty exception handlers for async/async* functions.
1145 {
1146 uword address =
1148 InitializeObject<ExceptionHandlers>(address, 0);
1149 ExceptionHandlers::initializeHandle(
1150 empty_async_exception_handlers_,
1151 static_cast<ExceptionHandlersPtr>(address + kHeapObjectTag));
1152 empty_async_exception_handlers_->StoreNonPointer(
1153 &empty_async_exception_handlers_->untag()->packed_fields_,
1155 empty_async_exception_handlers_->SetCanonical();
1156 }
1157
1158 // Allocate and initialize the canonical empty type arguments object.
1159 {
1160 uword address =
1162 InitializeObject<TypeArguments>(address, 0);
1163 TypeArguments::initializeHandle(
1164 empty_type_arguments_,
1165 static_cast<TypeArgumentsPtr>(address + kHeapObjectTag));
1166 empty_type_arguments_->untag()->set_length(Smi::New(0));
1167 empty_type_arguments_->untag()->set_hash(Smi::New(0));
1168 empty_type_arguments_->ComputeHash();
1169 empty_type_arguments_->SetCanonical();
1170 }
1171
1172 // The VM isolate snapshot object table is initialized to an empty array
1173 // as we do not have any VM isolate snapshot at this time.
1174 *vm_isolate_snapshot_object_table_ = Object::empty_array().ptr();
1175
1176 cls = Class::New<Instance, RTN::Instance>(kDynamicCid, isolate_group);
1177 cls.set_is_abstract();
1182 dynamic_class_ = cls.ptr();
1183
1184 cls = Class::New<Instance, RTN::Instance>(kVoidCid, isolate_group);
1189 void_class_ = cls.ptr();
1190
1191 cls = Class::New<Type, RTN::Type>(isolate_group);
1195
1196 cls = Class::New<FunctionType, RTN::FunctionType>(isolate_group);
1200
1201 cls = Class::New<RecordType, RTN::RecordType>(isolate_group);
1205
1206 cls = dynamic_class_;
1207 *dynamic_type_ =
1208 Type::New(cls, Object::null_type_arguments(), Nullability::kNullable);
1209 dynamic_type_->SetIsFinalized();
1210 dynamic_type_->ComputeHash();
1211 dynamic_type_->SetCanonical();
1212
1213 cls = void_class_;
1214 *void_type_ =
1215 Type::New(cls, Object::null_type_arguments(), Nullability::kNullable);
1216 void_type_->SetIsFinalized();
1217 void_type_->ComputeHash();
1218 void_type_->SetCanonical();
1219
1220 // Since TypeArguments objects are passed as function arguments, make them
1221 // behave as Dart instances, although they are just VM objects.
1222 // Note that we cannot set the super type to ObjectType, which does not live
1223 // in the vm isolate. See special handling in Class::SuperClass().
1224 cls = type_arguments_class_;
1225 cls.set_interfaces(Object::empty_array());
1226 cls.SetFields(Object::empty_array());
1227 cls.SetFunctions(Object::empty_array());
1228
1229 cls = Class::New<Bool, RTN::Bool>(isolate_group);
1230 isolate_group->object_store()->set_bool_class(cls);
1231
1232 *smi_illegal_cid_ = Smi::New(kIllegalCid);
1233 *smi_zero_ = Smi::New(0);
1234
1235 String& error_str = String::Handle();
1236 error_str = String::New(
1237 "Callbacks into the Dart VM are currently prohibited. Either there are "
1238 "outstanding pointers from Dart_TypedDataAcquireData that have not been "
1239 "released with Dart_TypedDataReleaseData, or a finalizer is running.",
1240 Heap::kOld);
1241 *no_callbacks_error_ = ApiError::New(error_str, Heap::kOld);
1242 error_str = String::New(
1243 "No api calls are allowed while unwind is in progress", Heap::kOld);
1244 *unwind_in_progress_error_ = UnwindError::New(error_str, Heap::kOld);
1245 error_str = String::New("SnapshotWriter Error", Heap::kOld);
1246 *snapshot_writer_error_ =
1247 LanguageError::New(error_str, Report::kError, Heap::kOld);
1248 error_str = String::New("Branch offset overflow", Heap::kOld);
1249 *branch_offset_error_ =
1250 LanguageError::New(error_str, Report::kBailout, Heap::kOld);
1251 error_str = String::New("Speculative inlining failed", Heap::kOld);
1252 *speculative_inlining_error_ =
1253 LanguageError::New(error_str, Report::kBailout, Heap::kOld);
1254 error_str = String::New("Background Compilation Failed", Heap::kOld);
1255 *background_compilation_error_ =
1256 LanguageError::New(error_str, Report::kBailout, Heap::kOld);
1257 error_str = String::New("Out of memory", Heap::kOld);
1258 *out_of_memory_error_ =
1259 LanguageError::New(error_str, Report::kError, Heap::kOld);
1260
1261 // Allocate the parameter types and names for synthetic getters.
1262 *synthetic_getter_parameter_types_ = Array::New(1, Heap::kOld);
1263 synthetic_getter_parameter_types_->SetAt(0, Object::dynamic_type());
1264 *synthetic_getter_parameter_names_ = Array::New(1, Heap::kOld);
1265 // Fill in synthetic_getter_parameter_names_ later, after symbols are
1266 // initialized (in Object::FinalizeVMIsolate).
1267 // synthetic_getter_parameter_names_ object needs to be created earlier as
1268 // VM isolate snapshot reader references it before Object::FinalizeVMIsolate.
1269
1270 // Some thread fields need to be reinitialized as null constants have not been
1271 // initialized until now.
1272 thread->ClearStickyError();
1273
1274 ASSERT(!null_object_->IsSmi());
1275 ASSERT(!null_class_->IsSmi());
1276 ASSERT(null_class_->IsClass());
1277 ASSERT(!null_array_->IsSmi());
1278 ASSERT(null_array_->IsArray());
1279 ASSERT(!null_string_->IsSmi());
1280 ASSERT(null_string_->IsString());
1281 ASSERT(!null_instance_->IsSmi());
1282 ASSERT(null_instance_->IsInstance());
1283 ASSERT(!null_function_->IsSmi());
1284 ASSERT(null_function_->IsFunction());
1285 ASSERT(!null_function_type_->IsSmi());
1286 ASSERT(null_function_type_->IsFunctionType());
1287 ASSERT(!null_record_type_->IsSmi());
1288 ASSERT(null_record_type_->IsRecordType());
1289 ASSERT(!null_type_arguments_->IsSmi());
1290 ASSERT(null_type_arguments_->IsTypeArguments());
1291 ASSERT(!null_compressed_stackmaps_->IsSmi());
1292 ASSERT(null_compressed_stackmaps_->IsCompressedStackMaps());
1293 ASSERT(!empty_array_->IsSmi());
1294 ASSERT(empty_array_->IsArray());
1295 ASSERT(!empty_instantiations_cache_array_->IsSmi());
1296 ASSERT(empty_instantiations_cache_array_->IsArray());
1297 ASSERT(!empty_subtype_test_cache_array_->IsSmi());
1298 ASSERT(empty_subtype_test_cache_array_->IsArray());
1299 ASSERT(!empty_type_arguments_->IsSmi());
1300 ASSERT(empty_type_arguments_->IsTypeArguments());
1301 ASSERT(!empty_context_scope_->IsSmi());
1302 ASSERT(empty_context_scope_->IsContextScope());
1303 ASSERT(!empty_compressed_stackmaps_->IsSmi());
1304 ASSERT(empty_compressed_stackmaps_->IsCompressedStackMaps());
1305 ASSERT(!empty_descriptors_->IsSmi());
1306 ASSERT(empty_descriptors_->IsPcDescriptors());
1307 ASSERT(!empty_var_descriptors_->IsSmi());
1308 ASSERT(empty_var_descriptors_->IsLocalVarDescriptors());
1309 ASSERT(!empty_exception_handlers_->IsSmi());
1310 ASSERT(empty_exception_handlers_->IsExceptionHandlers());
1311 ASSERT(!empty_async_exception_handlers_->IsSmi());
1312 ASSERT(empty_async_exception_handlers_->IsExceptionHandlers());
1313 ASSERT(!sentinel_->IsSmi());
1314 ASSERT(sentinel_->IsSentinel());
1315 ASSERT(!transition_sentinel_->IsSmi());
1316 ASSERT(transition_sentinel_->IsSentinel());
1317 ASSERT(!unknown_constant_->IsSmi());
1318 ASSERT(unknown_constant_->IsSentinel());
1319 ASSERT(!non_constant_->IsSmi());
1320 ASSERT(non_constant_->IsSentinel());
1321 ASSERT(!optimized_out_->IsSmi());
1322 ASSERT(optimized_out_->IsSentinel());
1323 ASSERT(!bool_true_->IsSmi());
1324 ASSERT(bool_true_->IsBool());
1325 ASSERT(!bool_false_->IsSmi());
1326 ASSERT(bool_false_->IsBool());
1327 ASSERT(smi_illegal_cid_->IsSmi());
1328 ASSERT(smi_zero_->IsSmi());
1329 ASSERT(!no_callbacks_error_->IsSmi());
1330 ASSERT(no_callbacks_error_->IsApiError());
1331 ASSERT(!unwind_in_progress_error_->IsSmi());
1332 ASSERT(unwind_in_progress_error_->IsUnwindError());
1333 ASSERT(!snapshot_writer_error_->IsSmi());
1334 ASSERT(snapshot_writer_error_->IsLanguageError());
1335 ASSERT(!branch_offset_error_->IsSmi());
1336 ASSERT(branch_offset_error_->IsLanguageError());
1337 ASSERT(!speculative_inlining_error_->IsSmi());
1338 ASSERT(speculative_inlining_error_->IsLanguageError());
1339 ASSERT(!background_compilation_error_->IsSmi());
1340 ASSERT(background_compilation_error_->IsLanguageError());
1341 ASSERT(!out_of_memory_error_->IsSmi());
1342 ASSERT(out_of_memory_error_->IsLanguageError());
1343 ASSERT(!vm_isolate_snapshot_object_table_->IsSmi());
1344 ASSERT(vm_isolate_snapshot_object_table_->IsArray());
1345 ASSERT(!synthetic_getter_parameter_types_->IsSmi());
1346 ASSERT(synthetic_getter_parameter_types_->IsArray());
1347 ASSERT(!synthetic_getter_parameter_names_->IsSmi());
1348 ASSERT(synthetic_getter_parameter_names_->IsArray());
1349}
1350
1351void Object::FinishInit(IsolateGroup* isolate_group) {
1352 // The type testing stubs we initialize in AbstractType objects for the
1353 // canonical type of kDynamicCid/kVoidCid need to be set in this
1354 // method, which is called after StubCode::InitOnce().
1355 Code& code = Code::Handle();
1356
1358 dynamic_type_->InitializeTypeTestingStubNonAtomic(code);
1359
1361 void_type_->InitializeTypeTestingStubNonAtomic(code);
1362}
1363
1365 null_ = static_cast<ObjectPtr>(RAW_NULL);
1366 true_ = static_cast<BoolPtr>(RAW_NULL);
1367 false_ = static_cast<BoolPtr>(RAW_NULL);
1368 class_class_ = static_cast<ClassPtr>(RAW_NULL);
1369 dynamic_class_ = static_cast<ClassPtr>(RAW_NULL);
1370 void_class_ = static_cast<ClassPtr>(RAW_NULL);
1371 type_parameters_class_ = static_cast<ClassPtr>(RAW_NULL);
1372 type_arguments_class_ = static_cast<ClassPtr>(RAW_NULL);
1373 patch_class_class_ = static_cast<ClassPtr>(RAW_NULL);
1374 function_class_ = static_cast<ClassPtr>(RAW_NULL);
1375 closure_data_class_ = static_cast<ClassPtr>(RAW_NULL);
1376 ffi_trampoline_data_class_ = static_cast<ClassPtr>(RAW_NULL);
1377 field_class_ = static_cast<ClassPtr>(RAW_NULL);
1378 script_class_ = static_cast<ClassPtr>(RAW_NULL);
1379 library_class_ = static_cast<ClassPtr>(RAW_NULL);
1380 namespace_class_ = static_cast<ClassPtr>(RAW_NULL);
1381 kernel_program_info_class_ = static_cast<ClassPtr>(RAW_NULL);
1382 code_class_ = static_cast<ClassPtr>(RAW_NULL);
1383 instructions_class_ = static_cast<ClassPtr>(RAW_NULL);
1384 instructions_section_class_ = static_cast<ClassPtr>(RAW_NULL);
1385 instructions_table_class_ = static_cast<ClassPtr>(RAW_NULL);
1386 object_pool_class_ = static_cast<ClassPtr>(RAW_NULL);
1387 pc_descriptors_class_ = static_cast<ClassPtr>(RAW_NULL);
1388 code_source_map_class_ = static_cast<ClassPtr>(RAW_NULL);
1389 compressed_stackmaps_class_ = static_cast<ClassPtr>(RAW_NULL);
1390 var_descriptors_class_ = static_cast<ClassPtr>(RAW_NULL);
1391 exception_handlers_class_ = static_cast<ClassPtr>(RAW_NULL);
1392 context_class_ = static_cast<ClassPtr>(RAW_NULL);
1393 context_scope_class_ = static_cast<ClassPtr>(RAW_NULL);
1394 singletargetcache_class_ = static_cast<ClassPtr>(RAW_NULL);
1395 unlinkedcall_class_ = static_cast<ClassPtr>(RAW_NULL);
1396 monomorphicsmiablecall_class_ = static_cast<ClassPtr>(RAW_NULL);
1397 icdata_class_ = static_cast<ClassPtr>(RAW_NULL);
1398 megamorphic_cache_class_ = static_cast<ClassPtr>(RAW_NULL);
1399 subtypetestcache_class_ = static_cast<ClassPtr>(RAW_NULL);
1400 loadingunit_class_ = static_cast<ClassPtr>(RAW_NULL);
1401 api_error_class_ = static_cast<ClassPtr>(RAW_NULL);
1402 language_error_class_ = static_cast<ClassPtr>(RAW_NULL);
1403 unhandled_exception_class_ = static_cast<ClassPtr>(RAW_NULL);
1404 unwind_error_class_ = static_cast<ClassPtr>(RAW_NULL);
1405}
1406
1407// An object visitor which will mark all visited objects. This is used to
1408// premark all objects in the vm_isolate_ heap. Also precalculates hash
1409// codes so that we can get the identity hash code of objects in the read-
1410// only VM isolate.
1412 public:
1414#if defined(HASH_IN_OBJECT_HEADER)
1415 : counter_(1337)
1416#endif
1417 {
1418 }
1419
1421 // Free list elements should never be marked.
1422 ASSERT(!obj->untag()->IsMarked());
1423 // No forwarding corpses in the VM isolate.
1424 ASSERT(!obj->IsForwardingCorpse());
1425 if (!obj->IsFreeListElement()) {
1428#if defined(HASH_IN_OBJECT_HEADER)
1429 // These objects end up in the read-only VM isolate which is shared
1430 // between isolates, so we have to prepopulate them with identity hash
1431 // codes, since we can't add hash codes later.
1432 if (Object::GetCachedHash(obj) == 0) {
1433 // Some classes have identity hash codes that depend on their contents,
1434 // not per object.
1435 ASSERT(!obj->IsStringInstance());
1436 if (obj == Object::null()) {
1437 Object::SetCachedHashIfNotSet(obj, kNullIdentityHash);
1438 } else if (obj == Object::bool_true().ptr()) {
1439 Object::SetCachedHashIfNotSet(obj, kTrueIdentityHash);
1440 } else if (obj == Object::bool_false().ptr()) {
1441 Object::SetCachedHashIfNotSet(obj, kFalseIdentityHash);
1442 } else if (!obj->IsMint() && !obj->IsDouble()) {
1443 counter_ += 2011; // The year Dart was announced and a prime.
1444 counter_ &= 0x3fffffff;
1445 if (counter_ == 0) counter_++;
1446 Object::SetCachedHashIfNotSet(obj, counter_);
1447 }
1448 }
1449#endif
1450#if !defined(DART_PRECOMPILED_RUNTIME)
1451 if (obj->IsClass()) {
1452 // Won't be able to update read-only VM isolate classes if implementors
1453 // are discovered later.
1454 static_cast<ClassPtr>(obj)->untag()->implementor_cid_ = kDynamicCid;
1455 }
1456#endif
1457 }
1458 }
1459
1460 private:
1461#if defined(HASH_IN_OBJECT_HEADER)
1462 int32_t counter_;
1463#endif
1464};
1465
1466#define SET_CLASS_NAME(class_name, name) \
1467 cls = class_name##_class(); \
1468 cls.set_name(Symbols::name());
1469
1471 // Should only be run by the vm isolate.
1472 ASSERT(isolate_group == Dart::vm_isolate_group());
1473
1474 // Finish initialization of synthetic_getter_parameter_names_ which was
1475 // Started in Object::InitOnce()
1476 synthetic_getter_parameter_names_->SetAt(0, Symbols::This());
1477
1478 // Set up names for all VM singleton classes.
1479 Class& cls = Class::Handle();
1480
1481 SET_CLASS_NAME(class, Class);
1482 SET_CLASS_NAME(dynamic, Dynamic);
1483 SET_CLASS_NAME(void, Void);
1484 SET_CLASS_NAME(type_parameters, TypeParameters);
1485 SET_CLASS_NAME(type_arguments, TypeArguments);
1486 SET_CLASS_NAME(patch_class, PatchClass);
1488 SET_CLASS_NAME(closure_data, ClosureData);
1489 SET_CLASS_NAME(ffi_trampoline_data, FfiTrampolineData);
1490 SET_CLASS_NAME(field, Field);
1492 SET_CLASS_NAME(library, LibraryClass);
1493 SET_CLASS_NAME(namespace, Namespace);
1494 SET_CLASS_NAME(kernel_program_info, KernelProgramInfo);
1495 SET_CLASS_NAME(weak_serialization_reference, WeakSerializationReference);
1496 SET_CLASS_NAME(weak_array, WeakArray);
1498 SET_CLASS_NAME(instructions, Instructions);
1499 SET_CLASS_NAME(instructions_section, InstructionsSection);
1500 SET_CLASS_NAME(instructions_table, InstructionsTable);
1501 SET_CLASS_NAME(object_pool, ObjectPool);
1502 SET_CLASS_NAME(code_source_map, CodeSourceMap);
1503 SET_CLASS_NAME(pc_descriptors, PcDescriptors);
1504 SET_CLASS_NAME(compressed_stackmaps, CompressedStackMaps);
1505 SET_CLASS_NAME(var_descriptors, LocalVarDescriptors);
1506 SET_CLASS_NAME(exception_handlers, ExceptionHandlers);
1507 SET_CLASS_NAME(context, Context);
1508 SET_CLASS_NAME(context_scope, ContextScope);
1509 SET_CLASS_NAME(sentinel, Sentinel);
1510 SET_CLASS_NAME(singletargetcache, SingleTargetCache);
1511 SET_CLASS_NAME(unlinkedcall, UnlinkedCall);
1512 SET_CLASS_NAME(monomorphicsmiablecall, MonomorphicSmiableCall);
1513 SET_CLASS_NAME(icdata, ICData);
1514 SET_CLASS_NAME(megamorphic_cache, MegamorphicCache);
1515 SET_CLASS_NAME(subtypetestcache, SubtypeTestCache);
1516 SET_CLASS_NAME(loadingunit, LoadingUnit);
1517 SET_CLASS_NAME(api_error, ApiError);
1518 SET_CLASS_NAME(language_error, LanguageError);
1519 SET_CLASS_NAME(unhandled_exception, UnhandledException);
1520 SET_CLASS_NAME(unwind_error, UnwindError);
1521
1522 // Set up names for classes which are also pre-allocated in the vm isolate.
1523 cls = isolate_group->object_store()->array_class();
1524 cls.set_name(Symbols::_List());
1525 cls = isolate_group->object_store()->one_byte_string_class();
1526 cls.set_name(Symbols::OneByteString());
1527 cls = isolate_group->object_store()->never_class();
1528 cls.set_name(Symbols::Never());
1529
1530 // Set up names for the pseudo-classes for free list elements and forwarding
1531 // corpses. Mainly this makes VM debugging easier.
1532 cls = isolate_group->class_table()->At(kFreeListElement);
1533 cls.set_name(Symbols::FreeListElement());
1534 cls = isolate_group->class_table()->At(kForwardingCorpse);
1535 cls.set_name(Symbols::ForwardingCorpse());
1536
1537#if defined(DART_PRECOMPILER)
1538 const auto& function =
1539 Function::Handle(StubCode::UnknownDartCode().function());
1540 function.set_name(Symbols::OptimizedOut());
1541#endif // defined(DART_PRECOMPILER)
1542
1543 {
1544 ASSERT(isolate_group == Dart::vm_isolate_group());
1545 Thread* thread = Thread::Current();
1546 WritableVMIsolateScope scope(thread);
1547 HeapIterationScope iteration(thread);
1548 FinalizeVMIsolateVisitor premarker;
1549 ASSERT(isolate_group->heap()->UsedInWords(Heap::kNew) == 0);
1550 iteration.IterateOldObjectsNoImagePages(&premarker);
1551 // Make the VM isolate read-only again after setting all objects as marked.
1552 // Note objects in image pages are already pre-marked.
1553 }
1554}
1555
1557 NoSafepointScope no_safepoint;
1558 intptr_t cid = object->GetClassId();
1559 if (cid == kOneByteStringCid) {
1560 OneByteStringPtr str = static_cast<OneByteStringPtr>(object);
1561 if (String::GetCachedHash(str) == 0) {
1562 intptr_t hash = String::Hash(str);
1564 }
1565 intptr_t size = OneByteString::UnroundedSize(str);
1566 ASSERT(size <= str->untag()->HeapSize());
1567 memset(reinterpret_cast<void*>(UntaggedObject::ToAddr(str) + size), 0,
1568 str->untag()->HeapSize() - size);
1569 } else if (cid == kTwoByteStringCid) {
1570 TwoByteStringPtr str = static_cast<TwoByteStringPtr>(object);
1571 if (String::GetCachedHash(str) == 0) {
1572 intptr_t hash = String::Hash(str);
1574 }
1575 ASSERT(String::GetCachedHash(str) != 0);
1576 intptr_t size = TwoByteString::UnroundedSize(str);
1577 ASSERT(size <= str->untag()->HeapSize());
1578 memset(reinterpret_cast<void*>(UntaggedObject::ToAddr(str) + size), 0,
1579 str->untag()->HeapSize() - size);
1580 } else if (cid == kCodeSourceMapCid) {
1581 CodeSourceMapPtr map = CodeSourceMap::RawCast(object);
1583 ASSERT(size <= map->untag()->HeapSize());
1584 memset(reinterpret_cast<void*>(UntaggedObject::ToAddr(map) + size), 0,
1585 map->untag()->HeapSize() - size);
1586 } else if (cid == kCompressedStackMapsCid) {
1587 CompressedStackMapsPtr maps = CompressedStackMaps::RawCast(object);
1589 ASSERT(size <= maps->untag()->HeapSize());
1590 memset(reinterpret_cast<void*>(UntaggedObject::ToAddr(maps) + size), 0,
1591 maps->untag()->HeapSize() - size);
1592 } else if (cid == kPcDescriptorsCid) {
1593 PcDescriptorsPtr desc = PcDescriptors::RawCast(object);
1595 ASSERT(size <= desc->untag()->HeapSize());
1596 memset(reinterpret_cast<void*>(UntaggedObject::ToAddr(desc) + size), 0,
1597 desc->untag()->HeapSize() - size);
1598 }
1599}
1600
1603 *vm_isolate_snapshot_object_table_ = table.ptr();
1604}
1605
1606// Make unused space in an object whose type has been transformed safe
1607// for traversing during GC.
1608// The unused part of the transformed object is marked as a FreeListElement
1609// object that is not inserted into to the freelist.
1611 intptr_t original_size,
1612 intptr_t used_size) {
1613 ASSERT(Thread::Current()->no_safepoint_scope_depth() > 0);
1614 ASSERT(!obj.IsNull());
1615 ASSERT(original_size >= used_size);
1616 if (original_size > used_size) {
1617 intptr_t leftover_size = original_size - used_size;
1618 uword addr = UntaggedObject::ToAddr(obj.ptr()) + used_size;
1619 if (obj.ptr()->IsNewObject()) {
1620 FreeListElement::AsElementNew(addr, leftover_size);
1621 } else {
1622 FreeListElement::AsElement(addr, leftover_size);
1623 }
1624 // On architectures with a relaxed memory model, the concurrent marker may
1625 // observe the write of the filler object's header before observing the
1626 // new array length, and so treat it as a pointer. Ensure it is a Smi so
1627 // the marker won't dereference it.
1628 ASSERT((*reinterpret_cast<uword*>(addr) & kSmiTagMask) == kSmiTag);
1629 ASSERT((*reinterpret_cast<uword*>(addr + kWordSize) & kSmiTagMask) ==
1630 kSmiTag);
1631 }
1632}
1633
1635#if defined(DEBUG)
1636 ASSERT(builtin_vtables_[kIllegalCid] == 0);
1637 ASSERT(builtin_vtables_[kFreeListElement] == 0);
1638 ASSERT(builtin_vtables_[kForwardingCorpse] == 0);
1640 for (intptr_t cid = kObjectCid; cid < kNumPredefinedCids; cid++) {
1641 if (table->HasValidClassAt(cid)) {
1642 ASSERT(builtin_vtables_[cid] != 0);
1643 }
1644 }
1645#endif
1646}
1647
1648void Object::RegisterClass(const Class& cls,
1649 const String& name,
1650 const Library& lib) {
1651 ASSERT(name.Length() > 0);
1652 ASSERT(name.CharAt(0) != '_');
1653 cls.set_name(name);
1654 lib.AddClass(cls);
1655}
1656
1657void Object::RegisterPrivateClass(const Class& cls,
1658 const String& public_class_name,
1659 const Library& lib) {
1660 ASSERT(public_class_name.Length() > 0);
1661 ASSERT(public_class_name.CharAt(0) == '_');
1662 String& str = String::Handle();
1663 str = lib.PrivateName(public_class_name);
1664 cls.set_name(str);
1665 lib.AddClass(cls);
1666}
1667
1668// Initialize a new isolate from source or from a snapshot.
1669//
1670// There are three possibilities:
1671// 1. Running a Kernel binary. This function will bootstrap from the KERNEL
1672// file.
1673// 2. There is no vm snapshot. This function will bootstrap from source.
1674// 3. There is a vm snapshot. The caller should initialize from the snapshot.
1675//
1676// A non-null kernel argument indicates (1).
1677// A nullptr kernel indicates (2) or (3).
1678ErrorPtr Object::Init(IsolateGroup* isolate_group,
1679 const uint8_t* kernel_buffer,
1680 intptr_t kernel_buffer_size) {
1681 Thread* thread = Thread::Current();
1682 Zone* zone = thread->zone();
1683 ASSERT(isolate_group == thread->isolate_group());
1684 TIMELINE_DURATION(thread, Isolate, "Object::Init");
1685
1686#if defined(DART_PRECOMPILED_RUNTIME)
1687 const bool bootstrapping = false;
1688#else
1689 const bool is_kernel = (kernel_buffer != nullptr);
1690 const bool bootstrapping =
1691 (Dart::vm_snapshot_kind() == Snapshot::kNone) || is_kernel;
1692#endif // defined(DART_PRECOMPILED_RUNTIME).
1693
1694 if (bootstrapping) {
1695#if !defined(DART_PRECOMPILED_RUNTIME)
1696 // Object::Init version when we are bootstrapping from source or from a
1697 // Kernel binary.
1698 // This will initialize isolate group object_store, shared by all isolates
1699 // running in the isolate group.
1700 ObjectStore* object_store = isolate_group->object_store();
1701 SafepointWriteRwLocker ml(thread, isolate_group->program_lock());
1702
1703 Class& cls = Class::Handle(zone);
1704 Type& type = Type::Handle(zone);
1705 Array& array = Array::Handle(zone);
1706 WeakArray& weak_array = WeakArray::Handle(zone);
1707 Library& lib = Library::Handle(zone);
1708 TypeArguments& type_args = TypeArguments::Handle(zone);
1709
1710 // All RawArray fields will be initialized to an empty array, therefore
1711 // initialize array class first.
1712 cls = Class::New<Array, RTN::Array>(isolate_group);
1713 ASSERT(object_store->array_class() == Class::null());
1714 object_store->set_array_class(cls);
1715
1716 // VM classes that are parameterized (Array, ImmutableArray,
1717 // GrowableObjectArray, Map, ConstMap,
1718 // Set, ConstSet) are also pre-finalized, so
1719 // CalculateFieldOffsets() is not called, so we need to set the offset
1720 // of their type_arguments_ field, which is explicitly
1721 // declared in their respective Raw* classes.
1725
1726 // Set up the growable object array class (Has to be done after the array
1727 // class is setup as one of its field is an array object).
1728 cls = Class::New<GrowableObjectArray, RTN::GrowableObjectArray>(
1729 isolate_group);
1730 object_store->set_growable_object_array_class(cls);
1735
1736 // Initialize hash set for regexp_table_.
1737 const intptr_t kInitialCanonicalRegExpSize = 4;
1738 weak_array = HashTables::New<CanonicalRegExpSet>(
1739 kInitialCanonicalRegExpSize, Heap::kOld);
1740 object_store->set_regexp_table(weak_array);
1741
1742 // Initialize hash set for canonical types.
1743 const intptr_t kInitialCanonicalTypeSize = 16;
1744 array = HashTables::New<CanonicalTypeSet>(kInitialCanonicalTypeSize,
1745 Heap::kOld);
1746 object_store->set_canonical_types(array);
1747
1748 // Initialize hash set for canonical function types.
1749 const intptr_t kInitialCanonicalFunctionTypeSize = 16;
1750 array = HashTables::New<CanonicalFunctionTypeSet>(
1751 kInitialCanonicalFunctionTypeSize, Heap::kOld);
1752 object_store->set_canonical_function_types(array);
1753
1754 // Initialize hash set for canonical record types.
1755 const intptr_t kInitialCanonicalRecordTypeSize = 16;
1756 array = HashTables::New<CanonicalRecordTypeSet>(
1757 kInitialCanonicalRecordTypeSize, Heap::kOld);
1758 object_store->set_canonical_record_types(array);
1759
1760 // Initialize hash set for canonical type parameters.
1761 const intptr_t kInitialCanonicalTypeParameterSize = 4;
1762 array = HashTables::New<CanonicalTypeParameterSet>(
1763 kInitialCanonicalTypeParameterSize, Heap::kOld);
1764 object_store->set_canonical_type_parameters(array);
1765
1766 // Initialize hash set for canonical_type_arguments_.
1767 const intptr_t kInitialCanonicalTypeArgumentsSize = 4;
1768 array = HashTables::New<CanonicalTypeArgumentsSet>(
1769 kInitialCanonicalTypeArgumentsSize, Heap::kOld);
1770 object_store->set_canonical_type_arguments(array);
1771
1772 // Setup type class early in the process.
1773 const Class& type_cls =
1774 Class::Handle(zone, Class::New<Type, RTN::Type>(isolate_group));
1775 const Class& function_type_cls = Class::Handle(
1776 zone, Class::New<FunctionType, RTN::FunctionType>(isolate_group));
1777 const Class& record_type_cls = Class::Handle(
1778 zone, Class::New<RecordType, RTN::RecordType>(isolate_group));
1779 const Class& type_parameter_cls = Class::Handle(
1780 zone, Class::New<TypeParameter, RTN::TypeParameter>(isolate_group));
1781 const Class& library_prefix_cls = Class::Handle(
1782 zone, Class::New<LibraryPrefix, RTN::LibraryPrefix>(isolate_group));
1783
1784 // Pre-allocate the OneByteString class needed by the symbol table.
1785 cls = Class::NewStringClass(kOneByteStringCid, isolate_group);
1786 object_store->set_one_byte_string_class(cls);
1787
1788 // Pre-allocate the TwoByteString class needed by the symbol table.
1789 cls = Class::NewStringClass(kTwoByteStringCid, isolate_group);
1790 object_store->set_two_byte_string_class(cls);
1791
1792 // Setup the symbol table for the symbols created in the isolate.
1793 Symbols::SetupSymbolTable(isolate_group);
1794
1795 // Set up the libraries array before initializing the core library.
1796 const GrowableObjectArray& libraries =
1798 object_store->set_libraries(libraries);
1799
1800 // Pre-register the core library.
1801 Library::InitCoreLibrary(isolate_group);
1802
1803 // Basic infrastructure has been setup, initialize the class dictionary.
1804 const Library& core_lib = Library::Handle(zone, Library::CoreLibrary());
1805 ASSERT(!core_lib.IsNull());
1806
1807 const GrowableObjectArray& pending_classes =
1809 object_store->set_pending_classes(pending_classes);
1810
1811 // Now that the symbol table is initialized and that the core dictionary as
1812 // well as the core implementation dictionary have been setup, preallocate
1813 // remaining classes and register them by name in the dictionaries.
1814 String& name = String::Handle(zone);
1815 cls = object_store->array_class(); // Was allocated above.
1816 RegisterPrivateClass(cls, Symbols::_List(), core_lib);
1817 pending_classes.Add(cls);
1818 // We cannot use NewNonParameterizedType(), because Array is
1819 // parameterized. Warning: class _List has not been patched yet. Its
1820 // declared number of type parameters is still 0. It will become 1 after
1821 // patching. The array type allocated below represents the raw type _List
1822 // and not _List<E> as we could expect. Use with caution.
1823 type = Type::New(Class::Handle(zone, cls.ptr()),
1824 Object::null_type_arguments(), Nullability::kNonNullable);
1825 type.SetIsFinalized();
1826 type ^= type.Canonicalize(thread);
1827 object_store->set_array_type(type);
1828
1829 cls = object_store->growable_object_array_class(); // Was allocated above.
1830 RegisterPrivateClass(cls, Symbols::_GrowableList(), core_lib);
1831 pending_classes.Add(cls);
1832
1833 cls = Class::New<Array, RTN::Array>(kImmutableArrayCid, isolate_group);
1834 object_store->set_immutable_array_class(cls);
1838 ASSERT(object_store->immutable_array_class() !=
1839 object_store->array_class());
1840 cls.set_is_prefinalized();
1841 RegisterPrivateClass(cls, Symbols::_ImmutableList(), core_lib);
1842 pending_classes.Add(cls);
1843
1844 cls = object_store->one_byte_string_class(); // Was allocated above.
1845 RegisterPrivateClass(cls, Symbols::OneByteString(), core_lib);
1846 pending_classes.Add(cls);
1847
1848 cls = object_store->two_byte_string_class(); // Was allocated above.
1849 RegisterPrivateClass(cls, Symbols::TwoByteString(), core_lib);
1850 pending_classes.Add(cls);
1851
1852 // Pre-register the isolate library so the native class implementations can
1853 // be hooked up before compiling it.
1854 Library& isolate_lib = Library::Handle(
1855 zone, Library::LookupLibrary(thread, Symbols::DartIsolate()));
1856 if (isolate_lib.IsNull()) {
1857 isolate_lib = Library::NewLibraryHelper(Symbols::DartIsolate(), true);
1858 isolate_lib.SetLoadRequested();
1859 isolate_lib.Register(thread);
1860 }
1861 object_store->set_bootstrap_library(ObjectStore::kIsolate, isolate_lib);
1862 ASSERT(!isolate_lib.IsNull());
1863 ASSERT(isolate_lib.ptr() == Library::IsolateLibrary());
1864
1865 cls = Class::New<Capability, RTN::Capability>(isolate_group);
1866 RegisterPrivateClass(cls, Symbols::_Capability(), isolate_lib);
1867 pending_classes.Add(cls);
1868
1869 cls = Class::New<ReceivePort, RTN::ReceivePort>(isolate_group);
1870 RegisterPrivateClass(cls, Symbols::_RawReceivePort(), isolate_lib);
1871 pending_classes.Add(cls);
1872
1873 cls = Class::New<SendPort, RTN::SendPort>(isolate_group);
1874 RegisterPrivateClass(cls, Symbols::_SendPort(), isolate_lib);
1875 pending_classes.Add(cls);
1876
1877 cls = Class::New<TransferableTypedData, RTN::TransferableTypedData>(
1878 isolate_group);
1879 RegisterPrivateClass(cls, Symbols::_TransferableTypedDataImpl(),
1880 isolate_lib);
1881 pending_classes.Add(cls);
1882
1883 const Class& stacktrace_cls = Class::Handle(
1884 zone, Class::New<StackTrace, RTN::StackTrace>(isolate_group));
1885 RegisterPrivateClass(stacktrace_cls, Symbols::_StackTrace(), core_lib);
1886 pending_classes.Add(stacktrace_cls);
1887 // Super type set below, after Object is allocated.
1888
1889 cls = Class::New<RegExp, RTN::RegExp>(isolate_group);
1890 RegisterPrivateClass(cls, Symbols::_RegExp(), core_lib);
1891 pending_classes.Add(cls);
1892
1893 // Initialize the base interfaces used by the core VM classes.
1894
1895 // Allocate and initialize the pre-allocated classes in the core library.
1896 // The script and token index of these pre-allocated classes is set up when
1897 // the corelib script is compiled.
1898 cls = Class::New<Instance, RTN::Instance>(kInstanceCid, isolate_group);
1899 object_store->set_object_class(cls);
1900 cls.set_name(Symbols::Object());
1902 cls.set_is_prefinalized();
1903 cls.set_is_const();
1904 core_lib.AddClass(cls);
1905 pending_classes.Add(cls);
1907 ASSERT(type.IsCanonical());
1908 object_store->set_object_type(type);
1910 ASSERT(type.IsCanonical());
1911 object_store->set_non_nullable_object_type(type);
1912 type = type.ToNullability(Nullability::kNullable, Heap::kOld);
1913 ASSERT(type.IsCanonical());
1914 object_store->set_nullable_object_type(type);
1915
1916 cls = Class::New<Bool, RTN::Bool>(isolate_group);
1917 object_store->set_bool_class(cls);
1918 RegisterClass(cls, Symbols::Bool(), core_lib);
1919 pending_classes.Add(cls);
1920
1921 cls = Class::New<Instance, RTN::Instance>(kNullCid, isolate_group);
1922 object_store->set_null_class(cls);
1924 cls.set_is_prefinalized();
1925 RegisterClass(cls, Symbols::Null(), core_lib);
1926 pending_classes.Add(cls);
1927
1928 cls = Class::New<Instance, RTN::Instance>(kNeverCid, isolate_group);
1933 cls.set_name(Symbols::Never());
1934 object_store->set_never_class(cls);
1935
1936 ASSERT(!library_prefix_cls.IsNull());
1937 RegisterPrivateClass(library_prefix_cls, Symbols::_LibraryPrefix(),
1938 core_lib);
1939 pending_classes.Add(library_prefix_cls);
1940
1941 RegisterPrivateClass(type_cls, Symbols::_Type(), core_lib);
1942 pending_classes.Add(type_cls);
1943
1944 RegisterPrivateClass(function_type_cls, Symbols::_FunctionType(), core_lib);
1945 pending_classes.Add(function_type_cls);
1946
1947 RegisterPrivateClass(record_type_cls, Symbols::_RecordType(), core_lib);
1948 pending_classes.Add(record_type_cls);
1949
1950 RegisterPrivateClass(type_parameter_cls, Symbols::_TypeParameter(),
1951 core_lib);
1952 pending_classes.Add(type_parameter_cls);
1953
1954 cls = Class::New<Integer, RTN::Integer>(isolate_group);
1955 object_store->set_integer_implementation_class(cls);
1956 RegisterPrivateClass(cls, Symbols::_IntegerImplementation(), core_lib);
1957 pending_classes.Add(cls);
1958
1959 cls = Class::New<Smi, RTN::Smi>(isolate_group);
1960 object_store->set_smi_class(cls);
1961 RegisterPrivateClass(cls, Symbols::_Smi(), core_lib);
1962 pending_classes.Add(cls);
1963
1964 cls = Class::New<Mint, RTN::Mint>(isolate_group);
1965 object_store->set_mint_class(cls);
1966 RegisterPrivateClass(cls, Symbols::_Mint(), core_lib);
1967 pending_classes.Add(cls);
1968
1969 cls = Class::New<Double, RTN::Double>(isolate_group);
1970 object_store->set_double_class(cls);
1971 RegisterPrivateClass(cls, Symbols::_Double(), core_lib);
1972 pending_classes.Add(cls);
1973
1974 // Class that represents the Dart class _Closure and C++ class Closure.
1975 cls = Class::New<Closure, RTN::Closure>(isolate_group);
1976 object_store->set_closure_class(cls);
1977 RegisterPrivateClass(cls, Symbols::_Closure(), core_lib);
1978 pending_classes.Add(cls);
1979
1980 cls = Class::New<Record, RTN::Record>(isolate_group);
1981 RegisterPrivateClass(cls, Symbols::_Record(), core_lib);
1982 pending_classes.Add(cls);
1983
1984 cls = Class::New<WeakProperty, RTN::WeakProperty>(isolate_group);
1985 object_store->set_weak_property_class(cls);
1986 RegisterPrivateClass(cls, Symbols::_WeakProperty(), core_lib);
1987
1988 cls = Class::New<WeakReference, RTN::WeakReference>(isolate_group);
1993 object_store->set_weak_reference_class(cls);
1994 RegisterPrivateClass(cls, Symbols::_WeakReference(), core_lib);
1995
1996 // Pre-register the mirrors library so we can place the vm class
1997 // MirrorReference there rather than the core library.
1998 lib = Library::LookupLibrary(thread, Symbols::DartMirrors());
1999 if (lib.IsNull()) {
2000 lib = Library::NewLibraryHelper(Symbols::DartMirrors(), true);
2001 lib.SetLoadRequested();
2002 lib.Register(thread);
2003 }
2004 object_store->set_bootstrap_library(ObjectStore::kMirrors, lib);
2005 ASSERT(!lib.IsNull());
2007
2008 cls = Class::New<MirrorReference, RTN::MirrorReference>(isolate_group);
2009 RegisterPrivateClass(cls, Symbols::_MirrorReference(), lib);
2010
2011 // Pre-register the collection library so we can place the vm class
2012 // Map there rather than the core library.
2013 lib = Library::LookupLibrary(thread, Symbols::DartCollection());
2014 if (lib.IsNull()) {
2015 lib = Library::NewLibraryHelper(Symbols::DartCollection(), true);
2016 lib.SetLoadRequested();
2017 lib.Register(thread);
2018 }
2019
2020 object_store->set_bootstrap_library(ObjectStore::kCollection, lib);
2021 ASSERT(!lib.IsNull());
2023 cls = Class::New<Map, RTN::Map>(isolate_group);
2024 object_store->set_map_impl_class(cls);
2028 RegisterPrivateClass(cls, Symbols::_Map(), lib);
2029 pending_classes.Add(cls);
2030
2031 cls = Class::New<Map, RTN::Map>(kConstMapCid, isolate_group);
2032 object_store->set_const_map_impl_class(cls);
2036 cls.set_is_prefinalized();
2037 RegisterPrivateClass(cls, Symbols::_ConstMap(), lib);
2038 pending_classes.Add(cls);
2039
2040 cls = Class::New<Set, RTN::Set>(isolate_group);
2041 object_store->set_set_impl_class(cls);
2045 RegisterPrivateClass(cls, Symbols::_Set(), lib);
2046 pending_classes.Add(cls);
2047
2048 cls = Class::New<Set, RTN::Set>(kConstSetCid, isolate_group);
2049 object_store->set_const_set_impl_class(cls);
2053 cls.set_is_prefinalized();
2054 RegisterPrivateClass(cls, Symbols::_ConstSet(), lib);
2055 pending_classes.Add(cls);
2056
2057 // Pre-register the async library so we can place the vm class
2058 // FutureOr there rather than the core library.
2059 lib = Library::LookupLibrary(thread, Symbols::DartAsync());
2060 if (lib.IsNull()) {
2061 lib = Library::NewLibraryHelper(Symbols::DartAsync(), true);
2062 lib.SetLoadRequested();
2063 lib.Register(thread);
2064 }
2065 object_store->set_bootstrap_library(ObjectStore::kAsync, lib);
2066 ASSERT(!lib.IsNull());
2067 ASSERT(lib.ptr() == Library::AsyncLibrary());
2068 cls = Class::New<FutureOr, RTN::FutureOr>(isolate_group);
2072 RegisterClass(cls, Symbols::FutureOr(), lib);
2073 pending_classes.Add(cls);
2074 object_store->set_future_or_class(cls);
2075
2076 cls = Class::New<SuspendState, RTN::SuspendState>(isolate_group);
2077 RegisterPrivateClass(cls, Symbols::_SuspendState(), lib);
2078 pending_classes.Add(cls);
2079
2080 // Pre-register the developer library so we can place the vm class
2081 // UserTag there rather than the core library.
2082 lib = Library::LookupLibrary(thread, Symbols::DartDeveloper());
2083 if (lib.IsNull()) {
2084 lib = Library::NewLibraryHelper(Symbols::DartDeveloper(), true);
2085 lib.SetLoadRequested();
2086 lib.Register(thread);
2087 }
2088 object_store->set_bootstrap_library(ObjectStore::kDeveloper, lib);
2089 ASSERT(!lib.IsNull());
2091 cls = Class::New<UserTag, RTN::UserTag>(isolate_group);
2092 RegisterPrivateClass(cls, Symbols::_UserTag(), lib);
2093 pending_classes.Add(cls);
2094
2095 // Setup some default native field classes which can be extended for
2096 // specifying native fields in dart classes.
2097 Library::InitNativeWrappersLibrary(isolate_group, is_kernel);
2098 ASSERT(object_store->native_wrappers_library() != Library::null());
2099
2100 // Pre-register the typed_data library so the native class implementations
2101 // can be hooked up before compiling it.
2102 lib = Library::LookupLibrary(thread, Symbols::DartTypedData());
2103 if (lib.IsNull()) {
2104 lib = Library::NewLibraryHelper(Symbols::DartTypedData(), true);
2105 lib.SetLoadRequested();
2106 lib.Register(thread);
2107 }
2108 object_store->set_bootstrap_library(ObjectStore::kTypedData, lib);
2109 ASSERT(!lib.IsNull());
2111#define REGISTER_TYPED_DATA_CLASS(clazz) \
2112 cls = Class::NewTypedDataClass(kTypedData##clazz##ArrayCid, isolate_group); \
2113 RegisterPrivateClass(cls, Symbols::_##clazz##List(), lib);
2114
2116#undef REGISTER_TYPED_DATA_CLASS
2117#define REGISTER_TYPED_DATA_VIEW_CLASS(clazz) \
2118 cls = \
2119 Class::NewTypedDataViewClass(kTypedData##clazz##ViewCid, isolate_group); \
2120 RegisterPrivateClass(cls, Symbols::_##clazz##View(), lib); \
2121 pending_classes.Add(cls); \
2122 cls = Class::NewUnmodifiableTypedDataViewClass( \
2123 kUnmodifiableTypedData##clazz##ViewCid, isolate_group); \
2124 RegisterPrivateClass(cls, Symbols::_Unmodifiable##clazz##View(), lib); \
2125 pending_classes.Add(cls);
2126
2128
2129 cls = Class::NewTypedDataViewClass(kByteDataViewCid, isolate_group);
2130 RegisterPrivateClass(cls, Symbols::_ByteDataView(), lib);
2131 pending_classes.Add(cls);
2133 isolate_group);
2134 RegisterPrivateClass(cls, Symbols::_UnmodifiableByteDataView(), lib);
2135 pending_classes.Add(cls);
2136
2137#undef REGISTER_TYPED_DATA_VIEW_CLASS
2138#define REGISTER_EXT_TYPED_DATA_CLASS(clazz) \
2139 cls = Class::NewExternalTypedDataClass(kExternalTypedData##clazz##Cid, \
2140 isolate_group); \
2141 RegisterPrivateClass(cls, Symbols::_External##clazz(), lib);
2142
2143 cls = Class::New<Instance, RTN::Instance>(kByteBufferCid, isolate_group,
2144 /*register_class=*/false);
2145 cls.set_instance_size(0, 0);
2147 isolate_group->class_table()->Register(cls);
2148 RegisterPrivateClass(cls, Symbols::_ByteBuffer(), lib);
2149 pending_classes.Add(cls);
2150
2152#undef REGISTER_EXT_TYPED_DATA_CLASS
2153 // Register Float32x4, Int32x4, and Float64x2 in the object store.
2154 cls = Class::New<Float32x4, RTN::Float32x4>(isolate_group);
2155 RegisterPrivateClass(cls, Symbols::_Float32x4(), lib);
2156 pending_classes.Add(cls);
2157 object_store->set_float32x4_class(cls);
2158
2159 cls = Class::New<Instance, RTN::Instance>(kIllegalCid, isolate_group,
2160 /*register_class=*/true,
2161 /*is_abstract=*/true);
2162 RegisterClass(cls, Symbols::Float32x4(), lib);
2164 cls.set_is_prefinalized();
2166 object_store->set_float32x4_type(type);
2167
2168 cls = Class::New<Int32x4, RTN::Int32x4>(isolate_group);
2169 RegisterPrivateClass(cls, Symbols::_Int32x4(), lib);
2170 pending_classes.Add(cls);
2171 object_store->set_int32x4_class(cls);
2172
2173 cls = Class::New<Instance, RTN::Instance>(kIllegalCid, isolate_group,
2174 /*register_class=*/true,
2175 /*is_abstract=*/true);
2176 RegisterClass(cls, Symbols::Int32x4(), lib);
2178 cls.set_is_prefinalized();
2180 object_store->set_int32x4_type(type);
2181
2182 cls = Class::New<Float64x2, RTN::Float64x2>(isolate_group);
2183 RegisterPrivateClass(cls, Symbols::_Float64x2(), lib);
2184 pending_classes.Add(cls);
2185 object_store->set_float64x2_class(cls);
2186
2187 cls = Class::New<Instance, RTN::Instance>(kIllegalCid, isolate_group,
2188 /*register_class=*/true,
2189 /*is_abstract=*/true);
2190 RegisterClass(cls, Symbols::Float64x2(), lib);
2192 cls.set_is_prefinalized();
2194 object_store->set_float64x2_type(type);
2195
2196 // Set the super type of class StackTrace to Object type so that the
2197 // 'toString' method is implemented.
2198 type = object_store->object_type();
2199 stacktrace_cls.set_super_type(type);
2200
2201 // Abstract class that represents the Dart class Type.
2202 // Note that this class is implemented by Dart class _AbstractType.
2203 cls = Class::New<Instance, RTN::Instance>(kIllegalCid, isolate_group,
2204 /*register_class=*/true,
2205 /*is_abstract=*/true);
2207 cls.set_is_prefinalized();
2208 RegisterClass(cls, Symbols::Type(), core_lib);
2209 pending_classes.Add(cls);
2211 object_store->set_type_type(type);
2212
2213 // Abstract class that represents the Dart class Function.
2214 cls = Class::New<Instance, RTN::Instance>(kIllegalCid, isolate_group,
2215 /*register_class=*/true,
2216 /*is_abstract=*/true);
2218 cls.set_is_prefinalized();
2219 RegisterClass(cls, Symbols::Function(), core_lib);
2220 pending_classes.Add(cls);
2222 object_store->set_function_type(type);
2223
2224 // Abstract class that represents the Dart class Record.
2225 cls = Class::New<Instance, RTN::Instance>(kIllegalCid, isolate_group,
2226 /*register_class=*/true,
2227 /*is_abstract=*/true);
2228 RegisterClass(cls, Symbols::Record(), core_lib);
2229 pending_classes.Add(cls);
2230 object_store->set_record_class(cls);
2231
2232 cls = Class::New<Number, RTN::Number>(isolate_group);
2233 RegisterClass(cls, Symbols::Number(), core_lib);
2234 pending_classes.Add(cls);
2236 object_store->set_number_type(type);
2237 type = type.ToNullability(Nullability::kNullable, Heap::kOld);
2238 object_store->set_nullable_number_type(type);
2239
2240 cls = Class::New<Instance, RTN::Instance>(kIllegalCid, isolate_group,
2241 /*register_class=*/true,
2242 /*is_abstract=*/true);
2243 RegisterClass(cls, Symbols::Int(), core_lib);
2245 cls.set_is_prefinalized();
2246 pending_classes.Add(cls);
2248 object_store->set_int_type(type);
2250 object_store->set_non_nullable_int_type(type);
2251 type = type.ToNullability(Nullability::kNullable, Heap::kOld);
2252 object_store->set_nullable_int_type(type);
2253
2254 cls = Class::New<Instance, RTN::Instance>(kIllegalCid, isolate_group,
2255 /*register_class=*/true,
2256 /*is_abstract=*/true);
2257 RegisterClass(cls, Symbols::Double(), core_lib);
2259 cls.set_is_prefinalized();
2260 pending_classes.Add(cls);
2262 object_store->set_double_type(type);
2263 type = type.ToNullability(Nullability::kNullable, Heap::kOld);
2264 object_store->set_nullable_double_type(type);
2265
2266 name = Symbols::_String().ptr();
2267 cls = Class::New<Instance, RTN::Instance>(kIllegalCid, isolate_group,
2268 /*register_class=*/true,
2269 /*is_abstract=*/true);
2270 RegisterClass(cls, name, core_lib);
2272 cls.set_is_prefinalized();
2273 pending_classes.Add(cls);
2275 object_store->set_string_type(type);
2276
2277 cls = object_store->bool_class();
2279 object_store->set_bool_type(type);
2280
2281 cls = object_store->smi_class();
2283 object_store->set_smi_type(type);
2284
2285 cls = object_store->mint_class();
2287 object_store->set_mint_type(type);
2288
2289 // The classes 'void' and 'dynamic' are phony classes to make type checking
2290 // more regular; they live in the VM isolate. The class 'void' is not
2291 // registered in the class dictionary because its name is a reserved word.
2292 // The class 'dynamic' is registered in the class dictionary because its
2293 // name is a built-in identifier (this is wrong). The corresponding types
2294 // are stored in the object store.
2295 cls = object_store->null_class();
2296 type =
2297 Type::New(cls, Object::null_type_arguments(), Nullability::kNullable);
2298 type.SetIsFinalized();
2299 type ^= type.Canonicalize(thread);
2300 object_store->set_null_type(type);
2301 cls.set_declaration_type(type);
2302 ASSERT(type.IsNullable());
2303
2304 // Consider removing when/if Null becomes an ordinary class.
2305 type = object_store->object_type();
2306 cls.set_super_type(type);
2307
2308 cls = object_store->never_class();
2309 type = Type::New(cls, Object::null_type_arguments(),
2311 type.SetIsFinalized();
2312 type ^= type.Canonicalize(thread);
2313 object_store->set_never_type(type);
2314 type_args = TypeArguments::New(1);
2315 type_args.SetTypeAt(0, type);
2316 type_args = type_args.Canonicalize(thread);
2317 object_store->set_type_argument_never(type_args);
2318
2319 // Create and cache commonly used type arguments <int>, <double>,
2320 // <String>, <String, dynamic> and <String, String>.
2321 type_args = TypeArguments::New(1);
2322 type = object_store->int_type();
2323 type_args.SetTypeAt(0, type);
2324 type_args = type_args.Canonicalize(thread);
2325 object_store->set_type_argument_int(type_args);
2326
2327 type_args = TypeArguments::New(1);
2328 type = object_store->double_type();
2329 type_args.SetTypeAt(0, type);
2330 type_args = type_args.Canonicalize(thread);
2331 object_store->set_type_argument_double(type_args);
2332
2333 type_args = TypeArguments::New(1);
2334 type = object_store->string_type();
2335 type_args.SetTypeAt(0, type);
2336 type_args = type_args.Canonicalize(thread);
2337 object_store->set_type_argument_string(type_args);
2338
2339 type_args = TypeArguments::New(2);
2340 type = object_store->string_type();
2341 type_args.SetTypeAt(0, type);
2342 type_args.SetTypeAt(1, Object::dynamic_type());
2343 type_args = type_args.Canonicalize(thread);
2344 object_store->set_type_argument_string_dynamic(type_args);
2345
2346 type_args = TypeArguments::New(2);
2347 type = object_store->string_type();
2348 type_args.SetTypeAt(0, type);
2349 type_args.SetTypeAt(1, type);
2350 type_args = type_args.Canonicalize(thread);
2351 object_store->set_type_argument_string_string(type_args);
2352
2353 lib = Library::LookupLibrary(thread, Symbols::DartFfi());
2354 if (lib.IsNull()) {
2355 lib = Library::NewLibraryHelper(Symbols::DartFfi(), true);
2356 lib.SetLoadRequested();
2357 lib.Register(thread);
2358 }
2359 object_store->set_bootstrap_library(ObjectStore::kFfi, lib);
2360
2361 cls = Class::New<Instance, RTN::Instance>(kFfiNativeTypeCid, isolate_group);
2363 cls.set_is_prefinalized();
2364 pending_classes.Add(cls);
2365 object_store->set_ffi_native_type_class(cls);
2366 RegisterClass(cls, Symbols::FfiNativeType(), lib);
2367
2368#define REGISTER_FFI_TYPE_MARKER(clazz) \
2369 cls = Class::New<Instance, RTN::Instance>(kFfi##clazz##Cid, isolate_group); \
2370 cls.set_num_type_arguments_unsafe(0); \
2371 cls.set_is_prefinalized(); \
2372 pending_classes.Add(cls); \
2373 RegisterClass(cls, Symbols::Ffi##clazz(), lib);
2375#undef REGISTER_FFI_TYPE_MARKER
2376
2377 cls = Class::New<Instance, RTN::Instance>(kFfiNativeFunctionCid,
2378 isolate_group);
2382 cls.set_is_prefinalized();
2383 pending_classes.Add(cls);
2384 RegisterClass(cls, Symbols::FfiNativeFunction(), lib);
2385
2386 cls = Class::NewPointerClass(kPointerCid, isolate_group);
2387 object_store->set_ffi_pointer_class(cls);
2388 pending_classes.Add(cls);
2389 RegisterClass(cls, Symbols::FfiPointer(), lib);
2390
2391 cls = Class::New<DynamicLibrary, RTN::DynamicLibrary>(kDynamicLibraryCid,
2392 isolate_group);
2396 cls.set_is_prefinalized();
2397 pending_classes.Add(cls);
2398 RegisterClass(cls, Symbols::FfiDynamicLibrary(), lib);
2399
2400 cls = Class::New<NativeFinalizer, RTN::NativeFinalizer>(isolate_group);
2401 object_store->set_native_finalizer_class(cls);
2402 RegisterPrivateClass(cls, Symbols::_NativeFinalizer(), lib);
2403
2404 cls = Class::New<Finalizer, RTN::Finalizer>(isolate_group);
2409 object_store->set_finalizer_class(cls);
2410 pending_classes.Add(cls);
2411 RegisterPrivateClass(cls, Symbols::_FinalizerImpl(), core_lib);
2412
2413 // Pre-register the internal library so we can place the vm class
2414 // FinalizerEntry there rather than the core library.
2415 lib = Library::LookupLibrary(thread, Symbols::DartInternal());
2416 if (lib.IsNull()) {
2417 lib = Library::NewLibraryHelper(Symbols::DartInternal(), true);
2418 lib.SetLoadRequested();
2419 lib.Register(thread);
2420 }
2421 object_store->set_bootstrap_library(ObjectStore::kInternal, lib);
2422 ASSERT(!lib.IsNull());
2424
2425 cls = Class::New<FinalizerEntry, RTN::FinalizerEntry>(isolate_group);
2426 object_store->set_finalizer_entry_class(cls);
2427 pending_classes.Add(cls);
2428 RegisterClass(cls, Symbols::FinalizerEntry(), lib);
2429
2430 // Finish the initialization by compiling the bootstrap scripts containing
2431 // the base interfaces and the implementation of the internal classes.
2432 const Error& error = Error::Handle(
2433 zone, Bootstrap::DoBootstrapping(kernel_buffer, kernel_buffer_size));
2434 if (!error.IsNull()) {
2435 return error.ptr();
2436 }
2437
2438 isolate_group->class_table()->CopySizesFromClassObjects();
2439
2441
2442 // Set up the intrinsic state of all functions (core, math and typed data).
2444
2445 // Adds static const fields (class ids) to the class 'ClassID');
2446 lib = Library::LookupLibrary(thread, Symbols::DartInternal());
2447 ASSERT(!lib.IsNull());
2448 cls = lib.LookupClassAllowPrivate(Symbols::ClassID());
2449 ASSERT(!cls.IsNull());
2450 const bool injected = cls.InjectCIDFields();
2451 ASSERT(injected);
2452
2453 // Set up recognized state of all functions (core, math and typed data).
2455#endif // !defined(DART_PRECOMPILED_RUNTIME)
2456 } else {
2457 // Object::Init version when we are running in a version of dart that has a
2458 // full snapshot linked in and an isolate is initialized using the full
2459 // snapshot.
2460 ObjectStore* object_store = isolate_group->object_store();
2461 SafepointWriteRwLocker ml(thread, isolate_group->program_lock());
2462
2463 Class& cls = Class::Handle(zone);
2464
2465 // Set up empty classes in the object store, these will get initialized
2466 // correctly when we read from the snapshot. This is done to allow
2467 // bootstrapping of reading classes from the snapshot. Some classes are not
2468 // stored in the object store. Yet we still need to create their Class
2469 // object so that they get put into the class_table (as a side effect of
2470 // Class::New()).
2471 cls = Class::New<Instance, RTN::Instance>(kInstanceCid, isolate_group);
2472 object_store->set_object_class(cls);
2473
2474 cls = Class::New<LibraryPrefix, RTN::LibraryPrefix>(isolate_group);
2475 cls = Class::New<Type, RTN::Type>(isolate_group);
2476 cls = Class::New<FunctionType, RTN::FunctionType>(isolate_group);
2477 cls = Class::New<RecordType, RTN::RecordType>(isolate_group);
2478 cls = Class::New<TypeParameter, RTN::TypeParameter>(isolate_group);
2479
2480 cls = Class::New<Array, RTN::Array>(isolate_group);
2481 object_store->set_array_class(cls);
2482
2483 cls = Class::New<Array, RTN::Array>(kImmutableArrayCid, isolate_group);
2484 object_store->set_immutable_array_class(cls);
2485
2486 cls = Class::New<GrowableObjectArray, RTN::GrowableObjectArray>(
2487 isolate_group);
2488 object_store->set_growable_object_array_class(cls);
2489
2490 cls = Class::New<Map, RTN::Map>(isolate_group);
2491 object_store->set_map_impl_class(cls);
2492
2493 cls = Class::New<Map, RTN::Map>(kConstMapCid, isolate_group);
2494 object_store->set_const_map_impl_class(cls);
2495
2496 cls = Class::New<Set, RTN::Set>(isolate_group);
2497 object_store->set_set_impl_class(cls);
2498
2499 cls = Class::New<Set, RTN::Set>(kConstSetCid, isolate_group);
2500 object_store->set_const_set_impl_class(cls);
2501
2502 cls = Class::New<Float32x4, RTN::Float32x4>(isolate_group);
2503 object_store->set_float32x4_class(cls);
2504
2505 cls = Class::New<Int32x4, RTN::Int32x4>(isolate_group);
2506 object_store->set_int32x4_class(cls);
2507
2508 cls = Class::New<Float64x2, RTN::Float64x2>(isolate_group);
2509 object_store->set_float64x2_class(cls);
2510
2511#define REGISTER_TYPED_DATA_CLASS(clazz) \
2512 cls = Class::NewTypedDataClass(kTypedData##clazz##Cid, isolate_group);
2514#undef REGISTER_TYPED_DATA_CLASS
2515#define REGISTER_TYPED_DATA_VIEW_CLASS(clazz) \
2516 cls = \
2517 Class::NewTypedDataViewClass(kTypedData##clazz##ViewCid, isolate_group); \
2518 cls = Class::NewUnmodifiableTypedDataViewClass( \
2519 kUnmodifiableTypedData##clazz##ViewCid, isolate_group);
2521#undef REGISTER_TYPED_DATA_VIEW_CLASS
2522 cls = Class::NewTypedDataViewClass(kByteDataViewCid, isolate_group);
2524 isolate_group);
2525#define REGISTER_EXT_TYPED_DATA_CLASS(clazz) \
2526 cls = Class::NewExternalTypedDataClass(kExternalTypedData##clazz##Cid, \
2527 isolate_group);
2529#undef REGISTER_EXT_TYPED_DATA_CLASS
2530
2531 cls = Class::New<Instance, RTN::Instance>(kFfiNativeTypeCid, isolate_group);
2532 object_store->set_ffi_native_type_class(cls);
2533
2534#define REGISTER_FFI_CLASS(clazz) \
2535 cls = Class::New<Instance, RTN::Instance>(kFfi##clazz##Cid, isolate_group);
2537#undef REGISTER_FFI_CLASS
2538
2539 cls = Class::New<Instance, RTN::Instance>(kFfiNativeFunctionCid,
2540 isolate_group);
2541
2542 cls = Class::NewPointerClass(kPointerCid, isolate_group);
2543 object_store->set_ffi_pointer_class(cls);
2544
2545 cls = Class::New<DynamicLibrary, RTN::DynamicLibrary>(kDynamicLibraryCid,
2546 isolate_group);
2547
2548 cls = Class::New<Instance, RTN::Instance>(kByteBufferCid, isolate_group,
2549 /*register_isolate_group=*/false);
2551 isolate_group->class_table()->Register(cls);
2552
2553 cls = Class::New<Integer, RTN::Integer>(isolate_group);
2554 object_store->set_integer_implementation_class(cls);
2555
2556 cls = Class::New<Smi, RTN::Smi>(isolate_group);
2557 object_store->set_smi_class(cls);
2558
2559 cls = Class::New<Mint, RTN::Mint>(isolate_group);
2560 object_store->set_mint_class(cls);
2561
2562 cls = Class::New<Double, RTN::Double>(isolate_group);
2563 object_store->set_double_class(cls);
2564
2565 cls = Class::New<Closure, RTN::Closure>(isolate_group);
2566 object_store->set_closure_class(cls);
2567
2568 cls = Class::New<Record, RTN::Record>(isolate_group);
2569
2570 cls = Class::NewStringClass(kOneByteStringCid, isolate_group);
2571 object_store->set_one_byte_string_class(cls);
2572
2573 cls = Class::NewStringClass(kTwoByteStringCid, isolate_group);
2574 object_store->set_two_byte_string_class(cls);
2575
2576 cls = Class::New<Bool, RTN::Bool>(isolate_group);
2577 object_store->set_bool_class(cls);
2578
2579 cls = Class::New<Instance, RTN::Instance>(kNullCid, isolate_group);
2580 object_store->set_null_class(cls);
2581
2582 cls = Class::New<Instance, RTN::Instance>(kNeverCid, isolate_group);
2583 object_store->set_never_class(cls);
2584
2585 cls = Class::New<Capability, RTN::Capability>(isolate_group);
2586 cls = Class::New<ReceivePort, RTN::ReceivePort>(isolate_group);
2587 cls = Class::New<SendPort, RTN::SendPort>(isolate_group);
2588 cls = Class::New<StackTrace, RTN::StackTrace>(isolate_group);
2589 cls = Class::New<SuspendState, RTN::SuspendState>(isolate_group);
2590 cls = Class::New<RegExp, RTN::RegExp>(isolate_group);
2591 cls = Class::New<Number, RTN::Number>(isolate_group);
2592
2593 cls = Class::New<WeakProperty, RTN::WeakProperty>(isolate_group);
2594 object_store->set_weak_property_class(cls);
2595 cls = Class::New<WeakReference, RTN::WeakReference>(isolate_group);
2596 object_store->set_weak_reference_class(cls);
2597 cls = Class::New<Finalizer, RTN::Finalizer>(isolate_group);
2598 object_store->set_finalizer_class(cls);
2599 cls = Class::New<NativeFinalizer, RTN::NativeFinalizer>(isolate_group);
2600 object_store->set_native_finalizer_class(cls);
2601 cls = Class::New<FinalizerEntry, RTN::FinalizerEntry>(isolate_group);
2602 object_store->set_finalizer_entry_class(cls);
2603
2604 cls = Class::New<MirrorReference, RTN::MirrorReference>(isolate_group);
2605 cls = Class::New<UserTag, RTN::UserTag>(isolate_group);
2606 cls = Class::New<FutureOr, RTN::FutureOr>(isolate_group);
2607 object_store->set_future_or_class(cls);
2608 cls = Class::New<TransferableTypedData, RTN::TransferableTypedData>(
2609 isolate_group);
2610 }
2611 return Error::null();
2612}
2613
2614#if defined(DEBUG)
2615bool Object::InVMIsolateHeap() const {
2616 return ptr()->untag()->InVMIsolateHeap();
2617}
2618#endif // DEBUG
2619
2620void Object::Print() const {
2621 THR_Print("%s\n", ToCString());
2622}
2623
2624StringPtr Object::DictionaryName() const {
2625 return String::null();
2626}
2627
2629 if (class_id < kNumPredefinedCids) {
2630 return ShouldHaveImmutabilityBitSetCid(class_id);
2631 } else {
2633 IsolateGroup::Current()->class_table()->At(class_id));
2634 }
2635}
2636
2637void Object::InitializeObject(uword address,
2638 intptr_t class_id,
2639 intptr_t size,
2640 bool compressed,
2641 uword ptr_field_start_offset,
2642 uword ptr_field_end_offset) {
2643 // Note: we skip the header word here to avoid a racy read in the concurrent
2644 // marker from observing the null object when it reads into a heap page
2645 // allocated after marking started.
2646 uword cur = address + sizeof(UntaggedObject);
2647 uword ptr_field_start = address + ptr_field_start_offset;
2648 uword ptr_field_end = address + ptr_field_end_offset;
2649 uword end = address + size;
2650 // The start of pointer fields should always be past the object header, even
2651 // if there are no pointer fields (ptr_field_end < ptr_field_start).
2652 ASSERT(cur <= ptr_field_start);
2653 // The start of pointer fields can be at the end for empty payload objects.
2654 ASSERT(ptr_field_start <= end);
2655 // The end of pointer fields should always be before the end, as the end of
2656 // pointer fields is inclusive (the address of the last field to initialize).
2657 ASSERT(ptr_field_end < end);
2658 bool needs_init = true;
2659 if (IsTypedDataBaseClassId(class_id) || class_id == kArrayCid) {
2660 // If the size is greater than both kNewAllocatableSize and
2661 // kAllocatablePageSize, the object must have been allocated to a new
2662 // large page, which must already have been zero initialized by the OS.
2663 // Note that zero is a GC-safe value.
2664 //
2665 // For arrays, the caller will then initialize the fields to null with
2666 // safepoint checks to avoid blocking for the full duration of
2667 // initializing this array.
2668 needs_init =
2670 }
2671 if (needs_init) {
2672 // Initialize the memory prior to any pointer fields with 0. (This loop
2673 // and the next will be a no-op if the object has no pointer fields.)
2674 uword initial_value = 0;
2675 while (cur < ptr_field_start) {
2676 *reinterpret_cast<uword*>(cur) = initial_value;
2677 cur += kWordSize;
2678 }
2679 // Initialize any pointer fields with Object::null().
2680 initial_value = static_cast<uword>(null_);
2681#if defined(DART_COMPRESSED_POINTERS)
2682 if (compressed) {
2683 initial_value &= 0xFFFFFFFF;
2684 initial_value |= initial_value << 32;
2685 }
2686 const bool has_pointer_fields = ptr_field_start <= ptr_field_end;
2687 // If there are compressed pointer fields and the first compressed pointer
2688 // field is not at a word start, then initialize it to Object::null().
2689 if (compressed && has_pointer_fields &&
2690 (ptr_field_start % kWordSize != 0)) {
2691 *reinterpret_cast<compressed_uword*>(ptr_field_start) = initial_value;
2692 }
2693#endif
2694 while (cur <= ptr_field_end) {
2695 *reinterpret_cast<uword*>(cur) = initial_value;
2696 cur += kWordSize;
2697 }
2698 // Initialize the memory after any pointer fields with 0, unless this is
2699 // an instructions object in which case we use the break instruction.
2700 initial_value = class_id == kInstructionsCid ? kBreakInstructionFiller : 0;
2701#if defined(DART_COMPRESSED_POINTERS)
2702 // If there are compressed pointer fields and the last compressed pointer
2703 // field is the start of a word, then initialize the other part of the word
2704 // to the new initial value.
2705 //
2706 // (We're guaranteed there's always space in the object after the last
2707 // pointer field in this case since objects are allocated in multiples of
2708 // the word size.)
2709 if (compressed && has_pointer_fields && (ptr_field_end % kWordSize == 0)) {
2710 *reinterpret_cast<compressed_uword*>(ptr_field_end +
2711 kCompressedWordSize) = initial_value;
2712 }
2713#endif
2714 while (cur < end) {
2715 *reinterpret_cast<uword*>(cur) = initial_value;
2716 cur += kWordSize;
2717 }
2718 } else {
2719 // Check that MemorySanitizer understands this is initialized.
2720 MSAN_CHECK_INITIALIZED(reinterpret_cast<void*>(address), size);
2721#if defined(DEBUG)
2722 const uword initial_value = 0;
2723 while (cur < end) {
2724 ASSERT_EQUAL(*reinterpret_cast<uword*>(cur), initial_value);
2725 cur += kWordSize;
2726 }
2727#endif
2728 }
2729 uword tags = 0;
2730 ASSERT(class_id != kIllegalCid);
2731 tags = UntaggedObject::ClassIdTag::update(class_id, tags);
2733 const bool is_old =
2735 tags = UntaggedObject::AlwaysSetBit::update(true, tags);
2736 tags = UntaggedObject::NotMarkedBit::update(true, tags);
2740 Object::ShouldHaveImmutabilityBitSet(class_id), tags);
2741#if defined(HASH_IN_OBJECT_HEADER)
2742 tags = UntaggedObject::HashTag::update(0, tags);
2743#endif
2744 reinterpret_cast<UntaggedObject*>(address)->tags_ = tags;
2745}
2746
2748#if defined(DEBUG)
2749 if (ptr_ != Object::null()) {
2750 intptr_t cid = ptr_->GetClassIdMayBeSmi();
2751 if (cid >= kNumPredefinedCids) {
2752 cid = kInstanceCid;
2753 }
2754 ASSERT(vtable() == builtin_vtables_[cid]);
2755 }
2756#endif
2757}
2758
2760 intptr_t size,
2761 Heap::Space space,
2762 bool compressed,
2763 uword ptr_field_start_offset,
2764 uword ptr_field_end_offset) {
2766 Thread* thread = Thread::Current();
2768 ASSERT(thread->no_safepoint_scope_depth() == 0);
2769 ASSERT(thread->no_callback_scope_depth() == 0);
2770 Heap* heap = thread->heap();
2771
2772 uword address = heap->Allocate(thread, size, space);
2773 if (UNLIKELY(address == 0)) {
2774 // SuspendLongJumpScope during Dart entry ensures that if a longjmp base is
2775 // available, it is the innermost error handler, so check for a longjmp base
2776 // before checking for an exit frame.
2777 if (thread->long_jump_base() != nullptr) {
2778 Report::LongJump(Object::out_of_memory_error());
2779 UNREACHABLE();
2780 } else if (thread->top_exit_frame_info() != 0) {
2781 // Use the preallocated out of memory exception to avoid calling
2782 // into dart code or allocating any code.
2784 UNREACHABLE();
2785 } else {
2786 // Nowhere to propagate an exception to.
2787 OUT_OF_MEMORY();
2788 }
2789 }
2790
2791 ObjectPtr raw_obj;
2792 NoSafepointScope no_safepoint(thread);
2793 InitializeObject(address, cls_id, size, compressed, ptr_field_start_offset,
2794 ptr_field_end_offset);
2795 raw_obj = static_cast<ObjectPtr>(address + kHeapObjectTag);
2796 ASSERT(cls_id == UntaggedObject::ClassIdTag::decode(raw_obj->untag()->tags_));
2797 if (raw_obj->IsOldObject() && UNLIKELY(thread->is_marking())) {
2798 // Black allocation. Prevents a data race between the mutator and
2799 // concurrent marker on ARM and ARM64 (the marker may observe a
2800 // publishing store of this object before the stores that initialize its
2801 // slots), and helps the collection to finish sooner.
2802 // release: Setting the mark bit must not be ordered after a publishing
2803 // store of this object. Compare Scavenger::ScavengePointer.
2804 raw_obj->untag()->SetMarkBitRelease();
2805 heap->old_space()->AllocateBlack(size);
2806 }
2807
2808#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
2809 HeapProfileSampler& heap_sampler = thread->heap_sampler();
2810 if (heap_sampler.HasOutstandingSample()) {
2812 void* data = heap_sampler.InvokeCallbackForLastSample(cls_id);
2813 heap->SetHeapSamplingData(raw_obj, data);
2815 }
2816#endif // !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
2817
2818#if !defined(PRODUCT)
2819 auto class_table = thread->isolate_group()->class_table();
2820 if (class_table->ShouldTraceAllocationFor(cls_id)) {
2821 uint32_t hash =
2822 HeapSnapshotWriter::GetHeapSnapshotIdentityHash(thread, raw_obj);
2823 Profiler::SampleAllocation(thread, cls_id, hash);
2824 }
2825#endif // !defined(PRODUCT)
2826 return raw_obj;
2827}
2828
2830 public:
2833 thread_(thread),
2834 old_obj_(obj) {
2835 ASSERT(old_obj_->IsOldObject());
2836 }
2837
2838 void VisitPointers(ObjectPtr* from, ObjectPtr* to) override {
2839 if (old_obj_->IsArray()) {
2840 for (ObjectPtr* slot = from; slot <= to; ++slot) {
2841 ObjectPtr value = *slot;
2842 if (value->IsHeapObject()) {
2843 old_obj_->untag()->CheckArrayPointerStore(slot, value, thread_);
2844 }
2845 }
2846 } else {
2847 for (ObjectPtr* slot = from; slot <= to; ++slot) {
2848 ObjectPtr value = *slot;
2849 if (value->IsHeapObject()) {
2850 old_obj_->untag()->CheckHeapPointerStore(value, thread_);
2851 }
2852 }
2853 }
2854 }
2855
2856#if defined(DART_COMPRESSED_POINTERS)
2857 void VisitCompressedPointers(uword heap_base,
2858 CompressedObjectPtr* from,
2859 CompressedObjectPtr* to) override {
2860 if (old_obj_->IsArray()) {
2861 for (CompressedObjectPtr* slot = from; slot <= to; ++slot) {
2862 ObjectPtr value = slot->Decompress(heap_base);
2863 if (value->IsHeapObject()) {
2864 old_obj_->untag()->CheckArrayPointerStore(slot, value, thread_);
2865 }
2866 }
2867 } else {
2868 for (CompressedObjectPtr* slot = from; slot <= to; ++slot) {
2869 ObjectPtr value = slot->Decompress(heap_base);
2870 if (value->IsHeapObject()) {
2871 old_obj_->untag()->CheckHeapPointerStore(value, thread_);
2872 }
2873 }
2874 }
2875 }
2876#endif
2877
2878 private:
2879 Thread* thread_;
2880 ObjectPtr old_obj_;
2881
2882 DISALLOW_COPY_AND_ASSIGN(WriteBarrierUpdateVisitor);
2883};
2884
2885#if defined(DEBUG)
2886bool Object::IsZoneHandle() const {
2887 return VMHandles::IsZoneHandle(reinterpret_cast<uword>(this));
2888}
2889
2890bool Object::IsReadOnlyHandle() const {
2891 return Dart::IsReadOnlyHandle(reinterpret_cast<uword>(this));
2892}
2893
2894bool Object::IsNotTemporaryScopedHandle() const {
2895 return (IsZoneHandle() || IsReadOnlyHandle());
2896}
2897#endif
2898
2900 Heap::Space space,
2901 bool load_with_relaxed_atomics) {
2902 // Generic function types should be cloned with FunctionType::Clone.
2903 ASSERT(!orig.IsFunctionType() || !FunctionType::Cast(orig).IsGeneric());
2904 const Class& cls = Class::Handle(orig.clazz());
2905 intptr_t size = orig.ptr()->untag()->HeapSize();
2906 // All fields (including non-SmiPtr fields) will be initialized with Smi 0,
2907 // but the contents of the original object are copied over before the thread
2908 // is allowed to reach a safepoint.
2909 ObjectPtr raw_clone =
2910 Object::Allocate(cls.id(), size, space, cls.HasCompressedPointers(),
2911 from_offset<Object>(), to_offset<Object>());
2912 NoSafepointScope no_safepoint;
2913 // Copy the body of the original into the clone.
2914 uword orig_addr = UntaggedObject::ToAddr(orig.ptr());
2915 uword clone_addr = UntaggedObject::ToAddr(raw_clone);
2916 const intptr_t kHeaderSizeInBytes = sizeof(UntaggedObject);
2917 if (load_with_relaxed_atomics) {
2918 auto orig_atomics_ptr = reinterpret_cast<std::atomic<uword>*>(orig_addr);
2919 auto clone_ptr = reinterpret_cast<uword*>(clone_addr);
2920 for (intptr_t i = kHeaderSizeInBytes / kWordSize; i < size / kWordSize;
2921 i++) {
2922 *(clone_ptr + i) =
2923 (orig_atomics_ptr + i)->load(std::memory_order_relaxed);
2924 }
2925 } else {
2926 memmove(reinterpret_cast<uint8_t*>(clone_addr + kHeaderSizeInBytes),
2927 reinterpret_cast<uint8_t*>(orig_addr + kHeaderSizeInBytes),
2928 size - kHeaderSizeInBytes);
2929 }
2930
2931 if (IsTypedDataClassId(raw_clone->GetClassId())) {
2932 auto raw_typed_data = TypedData::RawCast(raw_clone);
2933 raw_typed_data.untag()->RecomputeDataField();
2934 }
2935
2936 // Add clone to store buffer, if needed.
2937 if (!raw_clone->IsOldObject()) {
2938 // No need to remember an object in new space.
2939 return raw_clone;
2940 }
2941 WriteBarrierUpdateVisitor visitor(Thread::Current(), raw_clone);
2942 raw_clone->untag()->VisitPointers(&visitor);
2943 return raw_clone;
2944}
2945
2947 const intptr_t cid = id();
2948 switch (cid) {
2949 case kByteBufferCid:
2951#define HANDLE_CASE(clazz) \
2952 case k##clazz##Cid: \
2953 return dart::clazz::ContainsCompressedPointers();
2955#undef HANDLE_CASE
2956#define HANDLE_CASE(clazz) \
2957 case kTypedData##clazz##Cid: \
2958 return dart::TypedData::ContainsCompressedPointers(); \
2959 case kTypedData##clazz##ViewCid: \
2960 case kUnmodifiableTypedData##clazz##ViewCid: \
2961 return dart::TypedDataView::ContainsCompressedPointers(); \
2962 case kExternalTypedData##clazz##Cid: \
2963 return dart::ExternalTypedData::ContainsCompressedPointers();
2965#undef HANDLE_CASE
2966 default:
2967 if (cid >= kNumPredefinedCids) {
2969 }
2970 }
2971 FATAL("Unsupported class for compressed pointers translation: %s (id=%" Pd
2972 ", kNumPredefinedCids=%" Pd ")\n",
2974 return false;
2975}
2976
2977StringPtr Class::Name() const {
2978 return untag()->name();
2979}
2980
2981StringPtr Class::ScrubbedName() const {
2983}
2984
2985const char* Class::ScrubbedNameCString() const {
2987}
2988
2989StringPtr Class::UserVisibleName() const {
2990#if !defined(PRODUCT)
2991 ASSERT(untag()->user_name() != String::null());
2992 return untag()->user_name();
2993#endif // !defined(PRODUCT)
2994 // No caching in PRODUCT, regenerate.
2995 return Symbols::New(Thread::Current(), GenerateUserVisibleName());
2996}
2997
2999#if !defined(PRODUCT)
3000 ASSERT(untag()->user_name() != String::null());
3001 return String::Handle(untag()->user_name()).ToCString();
3002#endif // !defined(PRODUCT)
3003 return GenerateUserVisibleName(); // No caching in PRODUCT, regenerate.
3004}
3005
3006const char* Class::NameCString(NameVisibility name_visibility) const {
3007 switch (name_visibility) {
3009 return String::Handle(Name()).ToCString();
3011 return ScrubbedNameCString();
3013 return UserVisibleNameCString();
3014 default:
3015 UNREACHABLE();
3016 return nullptr;
3017 }
3018}
3019
3020ClassPtr Class::Mixin() const {
3022 const Array& interfaces = Array::Handle(this->interfaces());
3023 const Type& mixin_type =
3025 return mixin_type.type_class();
3026 }
3027 return ptr();
3028}
3029
3031 NoSafepointScope no_safepoint;
3033 untag()->library()->untag()->flags_);
3034}
3035
3036TypePtr Class::RareType() const {
3037 if (!IsGeneric()) {
3038 return DeclarationType();
3039 }
3041 Thread* const thread = Thread::Current();
3042 Zone* const zone = thread->zone();
3043 const auto& inst_to_bounds =
3045 ASSERT(inst_to_bounds.ptr() != Object::empty_type_arguments().ptr());
3046 auto& type = Type::Handle(
3047 zone, Type::New(*this, inst_to_bounds, Nullability::kNonNullable));
3049 return type.ptr();
3050}
3051
3052template <class FakeObject, class TargetFakeObject>
3053ClassPtr Class::New(IsolateGroup* isolate_group, bool register_class) {
3055 const auto& result = Class::Handle(Object::Allocate<Class>(Heap::kOld));
3056 Object::VerifyBuiltinVtable<FakeObject>(FakeObject::kClassId);
3057 NOT_IN_PRECOMPILED(result.set_token_pos(TokenPosition::kNoSource));
3058 NOT_IN_PRECOMPILED(result.set_end_token_pos(TokenPosition::kNoSource));
3059 result.set_instance_size(FakeObject::InstanceSize(),
3061 TargetFakeObject::InstanceSize()));
3062 result.set_type_arguments_field_offset_in_words(kNoTypeArguments,
3064 const intptr_t host_next_field_offset = FakeObject::NextFieldOffset();
3065 const intptr_t target_next_field_offset = TargetFakeObject::NextFieldOffset();
3066 result.set_next_field_offset(host_next_field_offset,
3068 COMPILE_ASSERT((FakeObject::kClassId != kInstanceCid));
3069 result.set_id(FakeObject::kClassId);
3070 NOT_IN_PRECOMPILED(result.set_implementor_cid(kIllegalCid));
3071 result.set_num_type_arguments_unsafe(0);
3072 result.set_num_native_fields(0);
3073 result.set_state_bits(0);
3074 if (IsInternalOnlyClassId(FakeObject::kClassId) ||
3075 (FakeObject::kClassId == kTypeArgumentsCid)) {
3076 // VM internal classes are done. There is no finalization needed or
3077 // possible in this case.
3078 result.set_is_declaration_loaded();
3079 result.set_is_type_finalized();
3080 result.set_is_allocate_finalized();
3081 } else if (FakeObject::kClassId != kClosureCid) {
3082 // VM backed classes are almost ready: run checks and resolve class
3083 // references, but do not recompute size.
3084 result.set_is_prefinalized();
3085 }
3086 if (FakeObject::kClassId < kNumPredefinedCids &&
3087 IsDeeplyImmutableCid(FakeObject::kClassId)) {
3088 result.set_is_deeply_immutable(true);
3089 }
3090 NOT_IN_PRECOMPILED(result.set_kernel_offset(0));
3091 result.InitEmptyFields();
3092 if (register_class) {
3093 isolate_group->class_table()->Register(result);
3094 }
3095 return result.ptr();
3096}
3097
3098#if !defined(DART_PRECOMPILED_RUNTIME)
3099static void ReportTooManyTypeArguments(const Class& cls) {
3102 "too many type parameters declared in class '%s' or in its "
3103 "super classes",
3104 String::Handle(cls.Name()).ToCString());
3105 UNREACHABLE();
3106}
3107#endif // !defined(DART_PRECOMPILED_RUNTIME)
3108
3110#if defined(DART_PRECOMPILED_RUNTIME)
3111 UNREACHABLE();
3112#else
3113 if (!Utils::IsInt(16, value)) {
3115 }
3116 // We allow concurrent calculation of the number of type arguments. If two
3117 // threads perform this operation it doesn't matter which one wins.
3118 DEBUG_ONLY(intptr_t old_value = num_type_arguments());
3119 DEBUG_ASSERT(old_value == kUnknownNumTypeArguments || old_value == value);
3120 StoreNonPointer<int16_t, int16_t, std::memory_order_relaxed>(
3121 &untag()->num_type_arguments_, value);
3122#endif // defined(DART_PRECOMPILED_RUNTIME)
3123}
3124
3126 StoreNonPointer(&untag()->num_type_arguments_, value);
3127}
3128
3130 set_state_bits(HasPragmaBit::update(value, state_bits()));
3131}
3132
3134 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
3135 set_state_bits(IsIsolateUnsendableBit::update(value, state_bits()));
3136}
3137
3139 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
3140 set_state_bits(
3141 IsIsolateUnsendableDueToPragmaBit::update(value, state_bits()));
3142}
3143
3145 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
3146 set_state_bits(IsDeeplyImmutableBit::update(value, state_bits()));
3147}
3148
3150 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
3151 set_state_bits(IsFutureSubtypeBit::update(value, state_bits()));
3152}
3153
3155 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
3156 set_state_bits(CanBeFutureBit::update(value, state_bits()));
3157}
3158
3160 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
3161 set_state_bits(IsDynamicallyExtendableBit::update(value, state_bits()));
3162}
3163
3165 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
3166 set_state_bits(
3167 HasDynamicallyExtendableSubtypesBit::update(value, state_bits()));
3168}
3169
3170// Initialize class fields of type Array with empty array.
3171void Class::InitEmptyFields() const {
3172 if (Object::empty_array().ptr() == Array::null()) {
3173 // The empty array has not been initialized yet.
3174 return;
3175 }
3176 untag()->set_interfaces(Object::empty_array().ptr());
3177 untag()->set_constants(Object::null_array().ptr());
3178 set_functions(Object::empty_array());
3179 set_fields(Object::empty_array());
3180 set_invocation_dispatcher_cache(Object::empty_array());
3181}
3182
3184 ClassTable* class_table /* = nullptr */) const {
3186 if (untag()->offset_in_words_to_field<std::memory_order_acquire>() ==
3187 Array::null()) {
3188 // Even if multiple threads are calling this concurrently, all of them would
3189 // compute the same array, so we intentionally don't acquire any locks here.
3190 const intptr_t length = untag()->host_instance_size_in_words_;
3192 Class& cls = Class::Handle(this->ptr());
3194 Field& f = Field::Handle();
3195 while (!cls.IsNull()) {
3196 fields = cls.fields();
3197 for (intptr_t i = 0; i < fields.Length(); ++i) {
3198 f ^= fields.At(i);
3199 if (f.is_instance()) {
3200 array.SetAt(f.HostOffset() >> kCompressedWordSizeLog2, f);
3201 }
3202 }
3203 cls = cls.SuperClass(class_table);
3204 }
3205 untag()->set_offset_in_words_to_field<std::memory_order_release>(
3206 array.ptr());
3207 }
3208 return untag()->offset_in_words_to_field<std::memory_order_acquire>();
3209}
3210
3212 const Array& field_array = Array::Handle(fields());
3213 Field& field = Field::Handle();
3214 for (intptr_t i = 0; i < field_array.Length(); ++i) {
3215 field ^= field_array.At(i);
3216 if (!field.is_static()) {
3217 return true;
3218 }
3219 }
3220 return false;
3221}
3222
3224 public:
3225 FunctionName(const String& name, String* tmp_string)
3226 : name_(name), tmp_string_(tmp_string) {}
3227 bool Matches(const Function& function) const {
3228 if (name_.IsSymbol()) {
3229 return name_.ptr() == function.name();
3230 } else {
3231 *tmp_string_ = function.name();
3232 return name_.Equals(*tmp_string_);
3233 }
3234 }
3235 intptr_t Hash() const { return name_.Hash(); }
3236
3237 private:
3238 const String& name_;
3239 String* tmp_string_;
3240};
3241
3242// Traits for looking up Functions by name.
3244 public:
3245 static const char* Name() { return "ClassFunctionsTraits"; }
3246 static bool ReportStats() { return false; }
3247
3248 // Called when growing the table.
3249 static bool IsMatch(const Object& a, const Object& b) {
3250 ASSERT(a.IsFunction() && b.IsFunction());
3251 // Function objects are always canonical.
3252 return a.ptr() == b.ptr();
3253 }
3254 static bool IsMatch(const FunctionName& name, const Object& obj) {
3255 return name.Matches(Function::Cast(obj));
3256 }
3257 static uword Hash(const Object& key) {
3258 return String::HashRawSymbol(Function::Cast(key).name());
3259 }
3260 static uword Hash(const FunctionName& name) { return name.Hash(); }
3261};
3263
3264void Class::SetFunctions(const Array& value) const {
3265 ASSERT(!value.IsNull());
3266 const intptr_t len = value.Length();
3267#if defined(DEBUG)
3268 Thread* thread = Thread::Current();
3270 if (is_finalized()) {
3272 FunctionType& signature = FunctionType::Handle();
3273 for (intptr_t i = 0; i < len; ++i) {
3274 function ^= value.At(i);
3275 signature = function.signature();
3276 ASSERT(signature.IsFinalized());
3277 }
3278 }
3279#endif
3280 set_functions(value);
3281 if (len >= kFunctionLookupHashThreshold) {
3282 ClassFunctionsSet set(HashTables::New<ClassFunctionsSet>(len, Heap::kOld));
3283 Function& func = Function::Handle();
3284 for (intptr_t i = 0; i < len; ++i) {
3285 func ^= value.At(i);
3286 // Verify that all the functions in the array have this class as owner.
3287 ASSERT(func.Owner() == ptr());
3288 set.Insert(func);
3289 }
3290 untag()->set_functions_hash_table(set.Release().ptr());
3291 } else {
3292 untag()->set_functions_hash_table(Array::null());
3293 }
3294}
3295
3297#if defined(DEBUG)
3298 Thread* thread = Thread::Current();
3299 ASSERT(thread->IsDartMutatorThread());
3301 ASSERT(!is_finalized() ||
3302 FunctionType::Handle(function.signature()).IsFinalized());
3303#endif
3304 const Array& arr = Array::Handle(functions());
3305 const Array& new_array =
3306 Array::Handle(Array::Grow(arr, arr.Length() + 1, Heap::kOld));
3307 new_array.SetAt(arr.Length(), function);
3308 set_functions(new_array);
3309 // Add to hash table, if any.
3310 const intptr_t new_len = new_array.Length();
3311 if (new_len == kFunctionLookupHashThreshold) {
3312 // Transition to using hash table.
3313 SetFunctions(new_array);
3314 } else if (new_len > kFunctionLookupHashThreshold) {
3315 ClassFunctionsSet set(untag()->functions_hash_table());
3316 set.Insert(function);
3317 untag()->set_functions_hash_table(set.Release().ptr());
3318 }
3319}
3320
3321intptr_t Class::FindFunctionIndex(const Function& needle) const {
3322 Thread* thread = Thread::Current();
3323 if (EnsureIsFinalized(thread) != Error::null()) {
3324 return -1;
3325 }
3328 Array& funcs = thread->ArrayHandle();
3329 Function& function = thread->FunctionHandle();
3330 funcs = current_functions();
3331 ASSERT(!funcs.IsNull());
3332 const intptr_t len = funcs.Length();
3333 for (intptr_t i = 0; i < len; i++) {
3334 function ^= funcs.At(i);
3335 if (needle.ptr() == function.ptr()) {
3336 return i;
3337 }
3338 }
3339 // No function found.
3340 return -1;
3341}
3342
3343FunctionPtr Class::FunctionFromIndex(intptr_t idx) const {
3344 const Array& funcs = Array::Handle(current_functions());
3345 if ((idx < 0) || (idx >= funcs.Length())) {
3346 return Function::null();
3347 }
3348 Function& func = Function::Handle();
3349 func ^= funcs.At(idx);
3350 ASSERT(!func.IsNull());
3351 return func.ptr();
3352}
3353
3354FunctionPtr Class::ImplicitClosureFunctionFromIndex(intptr_t idx) const {
3356 if (func.IsNull() || !func.HasImplicitClosureFunction()) {
3357 return Function::null();
3358 }
3359 func = func.ImplicitClosureFunction();
3360 ASSERT(!func.IsNull());
3361 return func.ptr();
3362}
3363
3365 Thread* thread = Thread::Current();
3366 if (EnsureIsFinalized(thread) != Error::null()) {
3367 return -1;
3368 }
3371 Array& funcs = thread->ArrayHandle();
3372 Function& function = thread->FunctionHandle();
3373 funcs = current_functions();
3374 ASSERT(!funcs.IsNull());
3375 Function& implicit_closure = Function::Handle(thread->zone());
3376 const intptr_t len = funcs.Length();
3377 for (intptr_t i = 0; i < len; i++) {
3378 function ^= funcs.At(i);
3379 implicit_closure = function.implicit_closure_function();
3380 if (implicit_closure.IsNull()) {
3381 // Skip non-implicit closure functions.
3382 continue;
3383 }
3384 if (needle.ptr() == implicit_closure.ptr()) {
3385 return i;
3386 }
3387 }
3388 // No function found.
3389 return -1;
3390}
3391
3393 const Function& needle) const {
3394 Thread* thread = Thread::Current();
3395 if (EnsureIsFinalized(thread) != Error::null()) {
3396 return -1;
3397 }
3400 Array& funcs = thread->ArrayHandle();
3401 Object& object = thread->ObjectHandle();
3402 funcs = invocation_dispatcher_cache();
3403 ASSERT(!funcs.IsNull());
3404 const intptr_t len = funcs.Length();
3405 for (intptr_t i = 0; i < len; i++) {
3406 object = funcs.At(i);
3407 // The invocation_dispatcher_cache is a table with some entries that
3408 // are functions.
3409 if (object.IsFunction()) {
3410 if (Function::Cast(object).ptr() == needle.ptr()) {
3411 return i;
3412 }
3413 }
3414 }
3415 // No function found.
3416 return -1;
3417}
3418
3419FunctionPtr Class::InvocationDispatcherFunctionFromIndex(intptr_t idx) const {
3420 Thread* thread = Thread::Current();
3423 Array& dispatcher_cache = thread->ArrayHandle();
3424 Object& object = thread->ObjectHandle();
3425 dispatcher_cache = invocation_dispatcher_cache();
3426 object = dispatcher_cache.At(idx);
3427 if (!object.IsFunction()) {
3428 return Function::null();
3429 }
3430 return Function::Cast(object).ptr();
3431}
3432
3433void Class::set_state_bits(intptr_t bits) const {
3434 StoreNonPointer<uint32_t, uint32_t, std::memory_order_release>(
3435 &untag()->state_bits_, static_cast<uint32_t>(bits));
3436}
3437
3439 untag()->set_library(value.ptr());
3440}
3441
3443 ASSERT((num_type_arguments() == kUnknownNumTypeArguments) ||
3444 is_prefinalized());
3445 untag()->set_type_parameters(value.ptr());
3446}
3447
3448void Class::set_functions(const Array& value) const {
3449 // Ensure all writes to the [Function]s are visible by the time the array
3450 // is visible.
3451 untag()->set_functions<std::memory_order_release>(value.ptr());
3452}
3453
3454void Class::set_fields(const Array& value) const {
3455 // Ensure all writes to the [Field]s are visible by the time the array
3456 // is visible.
3457 untag()->set_fields<std::memory_order_release>(value.ptr());
3458}
3459
3460void Class::set_invocation_dispatcher_cache(const Array& cache) const {
3461 // Ensure all writes to the cache are visible by the time the array
3462 // is visible.
3463 untag()->set_invocation_dispatcher_cache<std::memory_order_release>(
3464 cache.ptr());
3465}
3466
3467void Class::set_declaration_instance_type_arguments(
3468 const TypeArguments& value) const {
3469 ASSERT(value.IsNull() || (value.IsCanonical() && value.IsOld()));
3470 ASSERT((declaration_instance_type_arguments() == TypeArguments::null()) ||
3471 (declaration_instance_type_arguments() == value.ptr()));
3472 untag()->set_declaration_instance_type_arguments<std::memory_order_release>(
3473 value.ptr());
3474}
3475
3477 const intptr_t num_type_arguments = NumTypeArguments();
3478 if (num_type_arguments == 0) {
3479 return TypeArguments::null();
3480 }
3481 if (declaration_instance_type_arguments() != TypeArguments::null()) {
3482 return declaration_instance_type_arguments();
3483 }
3484 Thread* thread = Thread::Current();
3485 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
3486 if (declaration_instance_type_arguments() != TypeArguments::null()) {
3487 return declaration_instance_type_arguments();
3488 }
3489 Zone* zone = thread->zone();
3490 auto& args = TypeArguments::Handle(zone);
3491 auto& type = AbstractType::Handle(zone);
3492 const intptr_t num_type_parameters = NumTypeParameters(thread);
3493 if (num_type_arguments == num_type_parameters) {
3495 args = Type::Cast(type).arguments();
3496 } else {
3497 type = super_type();
3498 const auto& super_args = TypeArguments::Handle(
3499 zone, Type::Cast(type).GetInstanceTypeArguments(thread));
3500 if ((num_type_parameters == 0) ||
3501 (!super_args.IsNull() && (super_args.Length() == num_type_arguments))) {
3502 args = super_args.ptr();
3503 } else {
3504 args = TypeArguments::New(num_type_arguments);
3505 const intptr_t offset = num_type_arguments - num_type_parameters;
3506 for (intptr_t i = 0; i < offset; ++i) {
3507 type = super_args.TypeAtNullSafe(i);
3508 args.SetTypeAt(i, type);
3509 }
3511 const auto& decl_args =
3512 TypeArguments::Handle(zone, Type::Cast(type).arguments());
3513 for (intptr_t i = 0; i < num_type_parameters; ++i) {
3514 type = decl_args.TypeAt(i);
3515 args.SetTypeAt(offset + i, type);
3516 }
3517 }
3518 }
3519 args = args.Canonicalize(thread);
3520 set_declaration_instance_type_arguments(args);
3521 return args.ptr();
3522}
3523
3525 Thread* thread,
3526 const TypeArguments& type_arguments,
3527 bool canonicalize) const {
3528 const intptr_t num_type_arguments = NumTypeArguments();
3529 if (num_type_arguments == 0) {
3530 return TypeArguments::null();
3531 }
3532 Zone* zone = thread->zone();
3533 auto& args = TypeArguments::Handle(zone);
3534 const intptr_t num_type_parameters = NumTypeParameters(thread);
3535 ASSERT(type_arguments.IsNull() ||
3536 type_arguments.Length() == num_type_parameters);
3537 if (num_type_arguments == num_type_parameters) {
3538 args = type_arguments.ptr();
3539 } else {
3541 if (num_type_parameters == 0) {
3542 return args.ptr();
3543 }
3544 args = args.InstantiateFrom(
3546 zone, type_arguments.ToInstantiatorTypeArguments(thread, *this)),
3547 Object::null_type_arguments(), kAllFree, Heap::kOld);
3548 }
3549 if (canonicalize) {
3550 args = args.Canonicalize(thread);
3551 }
3552 return args.ptr();
3553}
3554
3555intptr_t Class::NumTypeParameters(Thread* thread) const {
3556 if (!is_declaration_loaded()) {
3558 const intptr_t cid = id();
3559 if ((cid == kArrayCid) || (cid == kImmutableArrayCid) ||
3560 (cid == kGrowableObjectArrayCid)) {
3561 return 1; // List's type parameter may not have been parsed yet.
3562 }
3563 return 0;
3564 }
3566 return 0;
3567 }
3569 TypeParameters& type_params = thread->TypeParametersHandle();
3570 type_params = type_parameters();
3571 return type_params.Length();
3572}
3573
3574intptr_t Class::ComputeNumTypeArguments() const {
3576 Thread* thread = Thread::Current();
3577 Zone* zone = thread->zone();
3578 auto isolate_group = thread->isolate_group();
3579 const intptr_t num_type_params = NumTypeParameters();
3580
3581 if ((super_type() == AbstractType::null()) ||
3582 (super_type() == isolate_group->object_store()->object_type())) {
3583 return num_type_params;
3584 }
3585
3586 const auto& sup_type = Type::Handle(zone, super_type());
3587 const auto& sup_class = Class::Handle(zone, sup_type.type_class());
3588 const intptr_t sup_class_num_type_args = sup_class.NumTypeArguments();
3589 if (num_type_params == 0) {
3590 return sup_class_num_type_args;
3591 }
3592
3593 const auto& sup_type_args = TypeArguments::Handle(zone, sup_type.arguments());
3594 if (sup_type_args.IsNull()) {
3595 // The super type is raw or the super class is non generic.
3596 // In either case, overlapping is not possible.
3597 return sup_class_num_type_args + num_type_params;
3598 }
3599
3600 const intptr_t sup_type_args_length = sup_type_args.Length();
3601 // Determine the maximum overlap of a prefix of the vector consisting of the
3602 // type parameters of this class with a suffix of the vector consisting of the
3603 // type arguments of the super type of this class.
3604 // The number of own type arguments of this class is the number of its type
3605 // parameters minus the number of type arguments in the overlap.
3606 // Attempt to overlap the whole vector of type parameters; reduce the size
3607 // of the vector (keeping the first type parameter) until it fits or until
3608 // its size is zero.
3609 auto& sup_type_arg = AbstractType::Handle(zone);
3610 for (intptr_t num_overlapping_type_args =
3611 (num_type_params < sup_type_args_length) ? num_type_params
3612 : sup_type_args_length;
3613 num_overlapping_type_args > 0; num_overlapping_type_args--) {
3614 intptr_t i = 0;
3615 for (; i < num_overlapping_type_args; i++) {
3616 sup_type_arg = sup_type_args.TypeAt(sup_type_args_length -
3617 num_overlapping_type_args + i);
3618 ASSERT(!sup_type_arg.IsNull());
3619 if (!sup_type_arg.IsTypeParameter()) break;
3620 // The only type parameters appearing in the type arguments of the super
3621 // type are those declared by this class. Their finalized indices depend
3622 // on the number of type arguments being computed here. Therefore, they
3623 // cannot possibly be finalized yet.
3624 ASSERT(!TypeParameter::Cast(sup_type_arg).IsFinalized());
3625 if (TypeParameter::Cast(sup_type_arg).index() != i ||
3626 TypeParameter::Cast(sup_type_arg).IsNullable()) {
3627 break;
3628 }
3629 }
3630 if (i == num_overlapping_type_args) {
3631 // Overlap found.
3632 return sup_class_num_type_args + num_type_params -
3633 num_overlapping_type_args;
3634 }
3635 }
3636 // No overlap found.
3637 return sup_class_num_type_args + num_type_params;
3638}
3639
3640intptr_t Class::NumTypeArguments() const {
3641 // Return cached value if already calculated.
3642 intptr_t num_type_args = num_type_arguments();
3643 if (num_type_args != kUnknownNumTypeArguments) {
3644 return num_type_args;
3645 }
3646
3647#if defined(DART_PRECOMPILED_RUNTIME)
3648 UNREACHABLE();
3649 return 0;
3650#else
3651 num_type_args = ComputeNumTypeArguments();
3652 ASSERT(num_type_args != kUnknownNumTypeArguments);
3653 set_num_type_arguments(num_type_args);
3654 return num_type_args;
3655#endif // defined(DART_PRECOMPILED_RUNTIME)
3656}
3657
3658TypeArgumentsPtr Class::DefaultTypeArguments(Zone* zone) const {
3660 return Object::empty_type_arguments().ptr();
3661 }
3662 return TypeParameters::Handle(zone, type_parameters()).defaults();
3663}
3664
3665ClassPtr Class::SuperClass(ClassTable* class_table /* = nullptr */) const {
3666 Thread* thread = Thread::Current();
3667 Zone* zone = thread->zone();
3668 if (class_table == nullptr) {
3669 class_table = thread->isolate_group()->class_table();
3670 }
3671
3672 if (super_type() == AbstractType::null()) {
3673 if (id() == kTypeArgumentsCid) {
3674 // Pretend TypeArguments objects are Dart instances.
3675 return class_table->At(kInstanceCid);
3676 }
3677 return Class::null();
3678 }
3679 const AbstractType& sup_type = AbstractType::Handle(zone, super_type());
3680 const intptr_t type_class_id = sup_type.type_class_id();
3681 return class_table->At(type_class_id);
3682}
3683
3685 ASSERT(value.IsNull() || !value.IsDynamicType());
3686 untag()->set_super_type(value.ptr());
3687}
3688
3689TypeParameterPtr Class::TypeParameterAt(intptr_t index,
3690 Nullability nullability) const {
3691 ASSERT(index >= 0 && index < NumTypeParameters());
3692 TypeParameter& type_param =
3693 TypeParameter::Handle(TypeParameter::New(*this, 0, index, nullability));
3694 // Finalize type parameter only if its declaring class is
3695 // finalized and available in the current class table.
3696 if (is_type_finalized() && (type_param.parameterized_class() == ptr())) {
3697 type_param ^= ClassFinalizer::FinalizeType(type_param);
3698 }
3699 return type_param.ptr();
3700}
3701
3703 switch (cid) {
3704 case kDoubleCid:
3705 return sizeof(UntaggedDouble::value_);
3706 case kFloat32x4Cid:
3707 return sizeof(UntaggedFloat32x4::value_);
3708 case kFloat64x2Cid:
3709 return sizeof(UntaggedFloat64x2::value_);
3710 default:
3711 return sizeof(UntaggedMint::value_);
3712 }
3713}
3714
3715UnboxedFieldBitmap Class::CalculateFieldOffsets() const {
3716 Array& flds = Array::Handle(fields());
3717 const Class& super = Class::Handle(SuperClass());
3718 intptr_t host_offset = 0;
3719 UnboxedFieldBitmap host_bitmap{};
3720 // Target offsets might differ if the word size are different
3721 intptr_t target_offset = 0;
3722 intptr_t host_type_args_field_offset = kNoTypeArguments;
3723 intptr_t target_type_args_field_offset = RTN::Class::kNoTypeArguments;
3724 if (super.IsNull()) {
3725 host_offset = Instance::NextFieldOffset();
3726 target_offset = RTN::Instance::NextFieldOffset();
3727 ASSERT(host_offset > 0);
3728 ASSERT(target_offset > 0);
3729 } else {
3730 ASSERT(super.is_finalized() || super.is_prefinalized());
3731 host_type_args_field_offset = super.host_type_arguments_field_offset();
3732 target_type_args_field_offset = super.target_type_arguments_field_offset();
3733 host_offset = super.host_next_field_offset();
3734 ASSERT(host_offset > 0);
3735 target_offset = super.target_next_field_offset();
3736 ASSERT(target_offset > 0);
3737 // We should never call CalculateFieldOffsets for native wrapper
3738 // classes, assert this.
3739 ASSERT(num_native_fields() == 0);
3740 const intptr_t num_native_fields = super.num_native_fields();
3744 }
3745
3747 super.id());
3748 }
3749 // If the super class is parameterized, use the same type_arguments field,
3750 // otherwise, if this class is the first in the super chain to be
3751 // parameterized, introduce a new type_arguments field.
3752 if (host_type_args_field_offset == kNoTypeArguments) {
3753 ASSERT(target_type_args_field_offset == RTN::Class::kNoTypeArguments);
3754 if (IsGeneric()) {
3755 // The instance needs a type_arguments field.
3756 host_type_args_field_offset = host_offset;
3757 target_type_args_field_offset = target_offset;
3758 host_offset += kCompressedWordSize;
3760 }
3761 } else {
3762 ASSERT(target_type_args_field_offset != RTN::Class::kNoTypeArguments);
3763 }
3764
3765 set_type_arguments_field_offset(host_type_args_field_offset,
3766 target_type_args_field_offset);
3767 ASSERT(host_offset > 0);
3768 ASSERT(target_offset > 0);
3769 Field& field = Field::Handle();
3770 const intptr_t len = flds.Length();
3771 for (intptr_t i = 0; i < len; i++) {
3772 field ^= flds.At(i);
3773 // Offset is computed only for instance fields.
3774 if (!field.is_static()) {
3775 ASSERT(field.HostOffset() == 0);
3776 ASSERT(field.TargetOffset() == 0);
3777 field.SetOffset(host_offset, target_offset);
3778
3779 if (field.is_unboxed()) {
3780 const intptr_t field_size =
3781 UnboxedFieldSizeInBytesByCid(field.guarded_cid());
3782
3783 const intptr_t host_num_words = field_size / kCompressedWordSize;
3784 const intptr_t host_next_offset = host_offset + field_size;
3785 const intptr_t host_next_position =
3786 host_next_offset / kCompressedWordSize;
3787
3788 const intptr_t target_next_offset = target_offset + field_size;
3789 const intptr_t target_next_position =
3790 target_next_offset / compiler::target::kCompressedWordSize;
3791
3792 // The bitmap has fixed length. Checks if the offset position is smaller
3793 // than its length. If it is not, than the field should be boxed
3794 if (host_next_position <= UnboxedFieldBitmap::Length() &&
3795 target_next_position <= UnboxedFieldBitmap::Length()) {
3796 for (intptr_t j = 0; j < host_num_words; j++) {
3797 // Activate the respective bit in the bitmap, indicating that the
3798 // content is not a pointer
3799 host_bitmap.Set(host_offset / kCompressedWordSize);
3800 host_offset += kCompressedWordSize;
3801 }
3802
3803 ASSERT(host_offset == host_next_offset);
3804 target_offset = target_next_offset;
3805 } else {
3806 // Make the field boxed
3807 field.set_is_unboxed(false);
3808 host_offset += kCompressedWordSize;
3810 }
3811 } else {
3812 host_offset += kCompressedWordSize;
3814 }
3815 }
3816 }
3817
3818 const intptr_t host_instance_size = RoundedAllocationSize(host_offset);
3819 const intptr_t target_instance_size =
3822 // Many parts of the compiler assume offsets can be represented with
3823 // int32_t.
3824 FATAL("Too many fields in %s\n", UserVisibleNameCString());
3825 }
3827 set_next_field_offset(host_offset, target_offset);
3828 return host_bitmap;
3829}
3830
3832 const Array& args_desc,
3833 const Function& dispatcher) const {
3834 auto thread = Thread::Current();
3836
3837 ASSERT(target_name.ptr() == dispatcher.name());
3838
3839 DispatcherSet dispatchers(invocation_dispatcher_cache() ==
3840 Array::empty_array().ptr()
3841 ? HashTables::New<DispatcherSet>(4, Heap::kOld)
3842 : invocation_dispatcher_cache());
3843 dispatchers.Insert(dispatcher);
3844 set_invocation_dispatcher_cache(dispatchers.Release());
3845}
3846
3847FunctionPtr Class::GetInvocationDispatcher(const String& target_name,
3848 const Array& args_desc,
3850 bool create_if_absent) const {
3851 ASSERT(kind == UntaggedFunction::kNoSuchMethodDispatcher ||
3852 kind == UntaggedFunction::kInvokeFieldDispatcher ||
3853 kind == UntaggedFunction::kDynamicInvocationForwarder);
3854 auto thread = Thread::Current();
3855 auto Z = thread->zone();
3856 auto& function = Function::Handle(Z);
3857
3858 // First we'll try to find it without using locks.
3859 DispatcherKey key(target_name, args_desc, kind);
3860 if (invocation_dispatcher_cache() != Array::empty_array().ptr()) {
3861 DispatcherSet dispatchers(Z, invocation_dispatcher_cache());
3862 function ^= dispatchers.GetOrNull(key);
3863 dispatchers.Release();
3864 }
3865 if (!function.IsNull() || !create_if_absent) {
3866 return function.ptr();
3867 }
3868
3869 // If we failed to find it and possibly need to create it, use a write lock.
3870 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
3871
3872 // Try to find it again & return if it was added in the meantime.
3873 if (invocation_dispatcher_cache() != Array::empty_array().ptr()) {
3874 DispatcherSet dispatchers(Z, invocation_dispatcher_cache());
3875 function ^= dispatchers.GetOrNull(key);
3876 dispatchers.Release();
3877 }
3878 if (!function.IsNull()) return function.ptr();
3879
3880 // Otherwise create it & add it.
3881 function = CreateInvocationDispatcher(target_name, args_desc, kind);
3882 AddInvocationDispatcher(target_name, args_desc, function);
3883 return function.ptr();
3884}
3885
3886FunctionPtr Class::CreateInvocationDispatcher(
3887 const String& target_name,
3888 const Array& args_desc,
3889 UntaggedFunction::Kind kind) const {
3890 Thread* thread = Thread::Current();
3891 Zone* zone = thread->zone();
3893 Function& invocation = Function::Handle(
3894 zone, Function::New(
3895 signature,
3896 String::Handle(zone, Symbols::New(thread, target_name)), kind,
3897 false, // Not static.
3898 false, // Not const.
3899 false, // Not abstract.
3900 false, // Not external.
3901 false, // Not native.
3903 ArgumentsDescriptor desc(args_desc);
3904 const intptr_t type_args_len = desc.TypeArgsLen();
3905 if (type_args_len > 0) {
3906 // Make dispatcher function generic, since type arguments are passed.
3907 const auto& type_parameters =
3908 TypeParameters::Handle(zone, TypeParameters::New(type_args_len));
3909 // Allow any type, as any type checking is compiled into the dispatcher.
3910 auto& bound = Type::Handle(
3911 zone, IsolateGroup::Current()->object_store()->nullable_object_type());
3912 for (intptr_t i = 0; i < type_args_len; i++) {
3913 // The name of the type parameter does not matter, as a type error using
3914 // it should never be thrown.
3915 type_parameters.SetNameAt(i, Symbols::OptimizedOut());
3916 type_parameters.SetBoundAt(i, bound);
3917 // Type arguments will always be provided, so the default is not used.
3918 type_parameters.SetDefaultAt(i, Object::dynamic_type());
3919 }
3921 }
3922
3923 signature.set_num_fixed_parameters(desc.PositionalCount());
3924 signature.SetNumOptionalParameters(desc.NamedCount(),
3925 false); // Not positional.
3926 signature.set_parameter_types(
3927 Array::Handle(zone, Array::New(desc.Count(), Heap::kOld)));
3928 invocation.CreateNameArray();
3930 // Receiver.
3931 signature.SetParameterTypeAt(0, Object::dynamic_type());
3932 invocation.SetParameterNameAt(0, Symbols::This());
3933 // Remaining positional parameters.
3934 for (intptr_t i = 1; i < desc.PositionalCount(); i++) {
3935 signature.SetParameterTypeAt(i, Object::dynamic_type());
3936 char name[64];
3937 Utils::SNPrint(name, 64, ":p%" Pd, i);
3938 invocation.SetParameterNameAt(
3939 i, String::Handle(zone, Symbols::New(thread, name)));
3940 }
3941
3942 // Named parameters.
3943 for (intptr_t i = 0; i < desc.NamedCount(); i++) {
3944 const intptr_t param_index = desc.PositionAt(i);
3945 const auto& param_name = String::Handle(zone, desc.NameAt(i));
3946 signature.SetParameterTypeAt(param_index, Object::dynamic_type());
3947 signature.SetParameterNameAt(param_index, param_name);
3948 }
3949 signature.FinalizeNameArray();
3950 signature.set_result_type(Object::dynamic_type());
3951 invocation.set_is_debuggable(false);
3952 invocation.set_is_visible(false);
3953 invocation.set_is_reflectable(false);
3954 invocation.set_saved_args_desc(args_desc);
3955
3956 signature ^= ClassFinalizer::FinalizeType(signature);
3957 invocation.SetSignature(signature);
3958
3959 return invocation.ptr();
3960}
3961
3962// Method extractors are used to create implicit closures from methods.
3963// When an expression obj.M is evaluated for the first time and receiver obj
3964// does not have a getter called M but has a method called M then an extractor
3965// is created and injected as a getter (under the name get:M) into the class
3966// owning method M.
3967FunctionPtr Function::CreateMethodExtractor(const String& getter_name) const {
3968 Thread* thread = Thread::Current();
3969 Zone* zone = thread->zone();
3970 ASSERT(Field::IsGetterName(getter_name));
3971 const Function& closure_function =
3973
3974 const Class& owner = Class::Handle(zone, closure_function.Owner());
3976 const Function& extractor = Function::Handle(
3977 zone,
3978 Function::New(signature,
3979 String::Handle(zone, Symbols::New(thread, getter_name)),
3980 UntaggedFunction::kMethodExtractor,
3981 false, // Not static.
3982 false, // Not const.
3983 is_abstract(),
3984 false, // Not external.
3985 false, // Not native.
3986 owner, TokenPosition::kMethodExtractor));
3987
3988 // Initialize signature: receiver is a single fixed parameter.
3989 const intptr_t kNumParameters = 1;
3990 signature.set_num_fixed_parameters(kNumParameters);
3991 signature.SetNumOptionalParameters(0, false);
3992 signature.set_parameter_types(Object::synthetic_getter_parameter_types());
3993#if !defined(DART_PRECOMPILED_RUNTIME)
3994 extractor.set_positional_parameter_names(
3995 Object::synthetic_getter_parameter_names());
3996#endif
3997 signature.set_result_type(Object::dynamic_type());
3998
3999 extractor.InheritKernelOffsetFrom(*this);
4000
4001 extractor.set_extracted_method_closure(closure_function);
4002 extractor.set_is_debuggable(false);
4003 extractor.set_is_visible(false);
4004
4005 signature ^= ClassFinalizer::FinalizeType(signature);
4006 extractor.SetSignature(signature);
4007
4008 owner.AddFunction(extractor);
4009
4010 return extractor.ptr();
4011}
4012
4013FunctionPtr Function::GetMethodExtractor(const String& getter_name) const {
4014 ASSERT(Field::IsGetterName(getter_name));
4015 const Function& closure_function =
4017 const Class& owner = Class::Handle(closure_function.Owner());
4018 Thread* thread = Thread::Current();
4019 if (owner.EnsureIsFinalized(thread) != Error::null()) {
4020 return Function::null();
4021 }
4022 IsolateGroup* group = thread->isolate_group();
4024 Resolver::ResolveDynamicFunction(thread->zone(), owner, getter_name));
4025 if (result.IsNull()) {
4026 SafepointWriteRwLocker ml(thread, group->program_lock());
4027 result = owner.LookupDynamicFunctionUnsafe(getter_name);
4028 if (result.IsNull()) {
4029 result = CreateMethodExtractor(getter_name);
4030 }
4031 }
4032 ASSERT(result.kind() == UntaggedFunction::kMethodExtractor);
4033 return result.ptr();
4034}
4035
4036// Record field getters are used to access fields of arbitrary
4037// record instances dynamically.
4038FunctionPtr Class::CreateRecordFieldGetter(const String& getter_name) const {
4039 Thread* thread = Thread::Current();
4040 Zone* zone = thread->zone();
4042 ASSERT(Field::IsGetterName(getter_name));
4044 const Function& getter = Function::Handle(
4045 zone,
4046 Function::New(signature,
4047 String::Handle(zone, Symbols::New(thread, getter_name)),
4048 UntaggedFunction::kRecordFieldGetter,
4049 false, // Not static.
4050 false, // Not const.
4051 false, // Not abstract.
4052 false, // Not external.
4053 false, // Not native.
4055
4056 // Initialize signature: receiver is a single fixed parameter.
4057 const intptr_t kNumParameters = 1;
4058 signature.set_num_fixed_parameters(kNumParameters);
4059 signature.SetNumOptionalParameters(0, false);
4060 signature.set_parameter_types(Object::synthetic_getter_parameter_types());
4061#if !defined(DART_PRECOMPILED_RUNTIME)
4062 getter.set_positional_parameter_names(
4063 Object::synthetic_getter_parameter_names());
4064#endif
4065 signature.set_result_type(Object::dynamic_type());
4066
4067 getter.set_is_debuggable(false);
4068 getter.set_is_visible(false);
4069
4070 signature ^= ClassFinalizer::FinalizeType(signature);
4071 getter.SetSignature(signature);
4072
4073 AddFunction(getter);
4074
4075 return getter.ptr();
4076}
4077
4078FunctionPtr Class::GetRecordFieldGetter(const String& getter_name) const {
4080 ASSERT(Field::IsGetterName(getter_name));
4081 Thread* thread = Thread::Current();
4082 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
4083 Function& result = Function::Handle(thread->zone(),
4084 LookupDynamicFunctionUnsafe(getter_name));
4085 if (result.IsNull()) {
4086 result = CreateRecordFieldGetter(getter_name);
4087 }
4088 ASSERT(result.kind() == UntaggedFunction::kRecordFieldGetter);
4089 return result.ptr();
4090}
4091
4093 const Object& metadata_obj,
4094 const String& pragma_name,
4095 bool multiple,
4096 Object* options) {
4097 auto IG = T->isolate_group();
4098 auto Z = T->zone();
4099
4100 // If there is a compile-time error while evaluating the metadata, we will
4101 // simply claim there was no @pragma annotation.
4102 if (metadata_obj.IsNull() || metadata_obj.IsLanguageError()) {
4103 return false;
4104 }
4105 ASSERT(metadata_obj.IsArray());
4106
4107 auto& metadata = Array::Cast(metadata_obj);
4108 auto& pragma_class = Class::Handle(Z, IG->object_store()->pragma_class());
4109 if (pragma_class.IsNull()) {
4110 // Precompiler may drop pragma class.
4111 return false;
4112 }
4113 auto& pragma_name_field =
4114 Field::Handle(Z, pragma_class.LookupField(Symbols::name()));
4115 auto& pragma_options_field =
4116 Field::Handle(Z, pragma_class.LookupField(Symbols::options()));
4117
4118 auto& pragma = Object::Handle(Z);
4119 bool found = false;
4120 auto& options_value = Object::Handle(Z);
4121 auto& results = GrowableObjectArray::Handle(Z);
4122 if (multiple) {
4123 ASSERT(options != nullptr);
4124 results ^= GrowableObjectArray::New(1);
4125 }
4126 for (intptr_t i = 0; i < metadata.Length(); ++i) {
4127 pragma = metadata.At(i);
4128 if (pragma.clazz() != pragma_class.ptr() ||
4129 Instance::Cast(pragma).GetField(pragma_name_field) !=
4130 pragma_name.ptr()) {
4131 continue;
4132 }
4133 options_value = Instance::Cast(pragma).GetField(pragma_options_field);
4134 found = true;
4135 if (multiple) {
4136 results.Add(options_value);
4137 continue;
4138 }
4139 if (options != nullptr) {
4140 *options = options_value.ptr();
4141 }
4142 return true;
4143 }
4144
4145 if (found && options != nullptr) {
4146 *options = results.ptr();
4147 }
4148 return false;
4149}
4150
4152 bool only_core,
4153 const Object& obj,
4154 const String& pragma_name,
4155 bool multiple,
4156 Object* options) {
4157 auto Z = T->zone();
4158 auto& lib = Library::Handle(Z);
4159
4160 if (obj.IsLibrary()) {
4161 lib = Library::Cast(obj).ptr();
4162 } else if (obj.IsClass()) {
4163 auto& klass = Class::Cast(obj);
4164 if (!klass.has_pragma()) return false;
4165 lib = klass.library();
4166 } else if (obj.IsFunction()) {
4167 auto& function = Function::Cast(obj);
4168 if (!function.has_pragma()) return false;
4169 lib = Class::Handle(Z, function.Owner()).library();
4170 } else if (obj.IsField()) {
4171 auto& field = Field::Cast(obj);
4172 if (!field.has_pragma()) return false;
4173 lib = Class::Handle(Z, field.Owner()).library();
4174 } else {
4175 UNREACHABLE();
4176 }
4177
4178 if (only_core && !lib.IsAnyCoreLibrary()) {
4179 return false;
4180 }
4181
4182 Object& metadata_obj = Object::Handle(Z, lib.GetMetadata(obj));
4183 if (metadata_obj.IsUnwindError()) {
4184 Report::LongJump(UnwindError::Cast(metadata_obj));
4185 }
4186
4187 return FindPragmaInMetadata(T, metadata_obj, pragma_name, multiple, options);
4188}
4189
4192}
4193
4195 return String::StartsWith(name, Symbols::DynamicPrefix().ptr());
4196}
4197
4199 const intptr_t kDynamicPrefixLength = 4; // "dyn:"
4200 ASSERT(Symbols::DynamicPrefix().Length() == kDynamicPrefixLength);
4201 return Symbols::New(Thread::Current(), name, kDynamicPrefixLength,
4202 name.Length() - kDynamicPrefixLength);
4203}
4204
4206 return Symbols::FromConcat(Thread::Current(), Symbols::DynamicPrefix(), name);
4207}
4208
4209#if !defined(DART_PRECOMPILED_RUNTIME)
4211 const String& mangled_name) const {
4212 Thread* thread = Thread::Current();
4213 Zone* zone = thread->zone();
4214
4215 Function& forwarder = Function::Handle(zone);
4216 forwarder ^= Object::Clone(*this, Heap::kOld);
4217
4219
4220 forwarder.set_name(mangled_name);
4221 forwarder.set_is_native(false);
4222 // TODO(dartbug.com/37737): Currently, we intentionally keep the recognized
4223 // kind when creating the dynamic invocation forwarder.
4224 forwarder.set_kind(UntaggedFunction::kDynamicInvocationForwarder);
4226 forwarder.set_is_debuggable(false);
4227
4228 // TODO(vegorov) for error reporting reasons it is better to make this
4229 // function visible and instead use a TailCall to invoke the target.
4230 // Our TailCall instruction is not ready for such usage though it
4231 // blocks inlining and can't take Function-s only Code objects.
4232 forwarder.set_is_visible(false);
4233
4234 forwarder.ClearICDataArray();
4235 forwarder.ClearCode();
4236 forwarder.set_usage_counter(0);
4237 forwarder.set_deoptimization_counter(0);
4238 forwarder.set_optimized_instruction_count(0);
4239 forwarder.set_inlining_depth(0);
4240 forwarder.set_optimized_call_site_count(0);
4241
4242 forwarder.InheritKernelOffsetFrom(*this);
4243 forwarder.SetForwardingTarget(*this);
4244
4245 return forwarder.ptr();
4246}
4247
4249 const String& mangled_name) const {
4251 auto thread = Thread::Current();
4252 auto zone = thread->zone();
4253 const Class& owner = Class::Handle(zone, Owner());
4255
4256 // First we'll try to find it without using locks.
4258 mangled_name, Array::null_array(),
4259 UntaggedFunction::kDynamicInvocationForwarder,
4260 /*create_if_absent=*/false);
4261 if (!result.IsNull()) return result.ptr();
4262
4263 const bool needs_dyn_forwarder =
4265 if (!needs_dyn_forwarder) {
4266 return ptr();
4267 }
4268
4269 // If we failed to find it and possibly need to create it, use a write lock.
4270 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
4271
4272 // Try to find it again & return if it was added in the mean time.
4274 mangled_name, Array::null_array(),
4275 UntaggedFunction::kDynamicInvocationForwarder,
4276 /*create_if_absent=*/false);
4277 if (!result.IsNull()) return result.ptr();
4278
4279 // Otherwise create it & add it.
4281 owner.AddInvocationDispatcher(mangled_name, Array::null_array(), result);
4282 return result.ptr();
4283}
4284
4285#endif
4286
4288 AbstractType* subtype,
4289 AbstractType* supertype,
4290 const TypeArguments& instantiator_type_args,
4291 const TypeArguments& function_type_args) {
4292 if (!subtype->IsInstantiated()) {
4293 *subtype = subtype->InstantiateFrom(
4294 instantiator_type_args, function_type_args, kAllFree, Heap::kOld);
4295 }
4296 if (!supertype->IsInstantiated()) {
4297 *supertype = supertype->InstantiateFrom(
4298 instantiator_type_args, function_type_args, kAllFree, Heap::kOld);
4299 }
4300 return subtype->IsSubtypeOf(*supertype, Heap::kOld);
4301}
4302
4303ArrayPtr Class::invocation_dispatcher_cache() const {
4304 return untag()->invocation_dispatcher_cache<std::memory_order_acquire>();
4305}
4306
4307void Class::Finalize() const {
4308 auto thread = Thread::Current();
4309 auto isolate_group = thread->isolate_group();
4311 ASSERT(!is_finalized());
4312 // Prefinalized classes have a VM internal representation and no Dart fields.
4313 // Their instance size is precomputed and field offsets are known.
4314 if (!is_prefinalized()) {
4315 // Compute offsets of instance fields, instance size and bitmap for unboxed
4316 // fields.
4317 const auto host_bitmap = CalculateFieldOffsets();
4318 if (ptr() == isolate_group->class_table()->At(id())) {
4319 if (!ClassTable::IsTopLevelCid(id())) {
4320 // Unless class is top-level, which don't get instantiated,
4321 // sets the new size in the class table.
4322 isolate_group->class_table()->UpdateClassSize(id(), ptr());
4323 isolate_group->class_table()->SetUnboxedFieldsMapAt(id(), host_bitmap);
4324 }
4325 }
4326 }
4327
4328#if defined(DEBUG)
4329 if (is_const()) {
4330 // Double-check that all fields are final (CFE should guarantee that if it
4331 // marks the class as having a constant constructor).
4332 auto Z = thread->zone();
4333 const auto& super_class = Class::Handle(Z, SuperClass());
4334 ASSERT(super_class.IsNull() || super_class.is_const());
4335 const auto& fields = Array::Handle(Z, this->fields());
4336 auto& field = Field::Handle(Z);
4337 for (intptr_t i = 0; i < fields.Length(); ++i) {
4338 field ^= fields.At(i);
4339 ASSERT(field.is_static() || field.is_final());
4340 }
4341 }
4342#endif
4343
4345}
4346
4347#if defined(DEBUG)
4348static bool IsMutatorOrAtDeoptSafepoint() {
4349 Thread* thread = Thread::Current();
4350 return thread->IsDartMutatorThread() || thread->OwnsDeoptSafepoint();
4351}
4352#endif
4353
4354#if !defined(DART_PRECOMPILED_RUNTIME)
4355
4357 public:
4358 explicit CHACodeArray(const Class& cls)
4359 : WeakCodeReferences(WeakArray::Handle(cls.dependent_code())),
4360 cls_(cls) {}
4361
4362 virtual void UpdateArrayTo(const WeakArray& value) {
4363 // TODO(fschneider): Fails for classes in the VM isolate.
4365 }
4366
4367 virtual void ReportDeoptimization(const Code& code) {
4368 if (FLAG_trace_deoptimization || FLAG_trace_deoptimization_verbose) {
4369 Function& function = Function::Handle(code.function());
4370 THR_Print("Deoptimizing %s because CHA optimized (%s).\n",
4371 function.ToFullyQualifiedCString(), cls_.ToCString());
4372 }
4373 }
4374
4375 virtual void ReportSwitchingCode(const Code& code) {
4376 if (FLAG_trace_deoptimization || FLAG_trace_deoptimization_verbose) {
4377 Function& function = Function::Handle(code.function());
4378 THR_Print(
4379 "Switching %s to unoptimized code because CHA invalid"
4380 " (%s)\n",
4381 function.ToFullyQualifiedCString(), cls_.ToCString());
4382 }
4383 }
4384
4385 private:
4386 const Class& cls_;
4387 DISALLOW_COPY_AND_ASSIGN(CHACodeArray);
4388};
4389
4391 if (FLAG_trace_cha) {
4392 THR_Print("RegisterCHACode '%s' depends on class '%s'\n",
4393 Function::Handle(code.function()).ToQualifiedCString(),
4394 ToCString());
4395 }
4396 DEBUG_ASSERT(IsMutatorOrAtDeoptSafepoint());
4397 ASSERT(code.is_optimized());
4398 CHACodeArray a(*this);
4399 a.Register(code);
4400}
4401
4404 IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
4405 CHACodeArray a(*this);
4406 if (FLAG_trace_deoptimization && a.HasCodes()) {
4407 if (subclass.IsNull()) {
4408 THR_Print("Deopt for CHA (all)\n");
4409 } else {
4410 THR_Print("Deopt for CHA (new subclass %s)\n", subclass.ToCString());
4411 }
4412 }
4413 a.DisableCode(/*are_mutators_stopped=*/false);
4414}
4415
4418}
4419
4420WeakArrayPtr Class::dependent_code() const {
4422 IsolateGroup::Current()->program_lock()->IsCurrentThreadReader());
4423 return untag()->dependent_code();
4424}
4425
4426void Class::set_dependent_code(const WeakArray& array) const {
4428 IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
4429 untag()->set_dependent_code(array.ptr());
4430}
4431
4432#endif // !defined(DART_PRECOMPILED_RUNTIME)
4433
4434bool Class::TraceAllocation(IsolateGroup* isolate_group) const {
4435#ifndef PRODUCT
4436 auto class_table = isolate_group->class_table();
4437 return class_table->ShouldTraceAllocationFor(id());
4438#else
4439 return false;
4440#endif
4441}
4442
4443void Class::SetTraceAllocation(bool trace_allocation) const {
4444#ifndef PRODUCT
4445 auto isolate_group = IsolateGroup::Current();
4446 const bool changed = trace_allocation != this->TraceAllocation(isolate_group);
4447 if (changed) {
4448 auto class_table = isolate_group->class_table();
4449 class_table->SetTraceAllocationFor(id(), trace_allocation);
4450#ifdef TARGET_ARCH_IA32
4452#endif
4453 }
4454#else
4455 UNREACHABLE();
4456#endif
4457}
4458
4459// Conventions:
4460// * For throwing a NSM in a library or top-level class (i.e., level is
4461// kTopLevel), if a method was found but was incompatible, we pass the
4462// signature of the found method as a string, otherwise the null instance.
4463// * Otherwise, for throwing a NSM in a class klass we use its runtime type as
4464// receiver, i.e., klass.RareType().
4465static ObjectPtr ThrowNoSuchMethod(const Instance& receiver,
4466 const String& function_name,
4467 const Array& arguments,
4468 const Array& argument_names,
4470 const InvocationMirror::Kind kind) {
4471 const Smi& invocation_type =
4473
4474 ASSERT(!receiver.IsNull() || level == InvocationMirror::Level::kTopLevel);
4475 ASSERT(level != InvocationMirror::Level::kTopLevel || receiver.IsString());
4476 const Array& args = Array::Handle(Array::New(7));
4477 args.SetAt(0, receiver);
4478 args.SetAt(1, function_name);
4479 args.SetAt(2, invocation_type);
4480 args.SetAt(3, Object::smi_zero()); // Type arguments length.
4481 args.SetAt(4, Object::null_type_arguments());
4482 args.SetAt(5, arguments);
4483 args.SetAt(6, argument_names);
4484
4485 const Library& libcore = Library::Handle(Library::CoreLibrary());
4486 const Class& cls =
4487 Class::Handle(libcore.LookupClass(Symbols::NoSuchMethodError()));
4488 ASSERT(!cls.IsNull());
4489 const auto& error = cls.EnsureIsFinalized(Thread::Current());
4490 ASSERT(error == Error::null());
4491 const Function& throwNew =
4492 Function::Handle(cls.LookupFunctionAllowPrivate(Symbols::ThrowNew()));
4493 return DartEntry::InvokeFunction(throwNew, args);
4494}
4495
4497 const Instance& src_value,
4498 const AbstractType& dst_type,
4499 const String& dst_name) {
4500 const Array& args = Array::Handle(Array::New(4));
4501 const Smi& pos = Smi::Handle(Smi::New(token_pos.Serialize()));
4502 args.SetAt(0, pos);
4503 args.SetAt(1, src_value);
4504 args.SetAt(2, dst_type);
4505 args.SetAt(3, dst_name);
4506
4507 const Library& libcore = Library::Handle(Library::CoreLibrary());
4508 const Class& cls =
4509 Class::Handle(libcore.LookupClassAllowPrivate(Symbols::TypeError()));
4510 const auto& error = cls.EnsureIsFinalized(Thread::Current());
4511 ASSERT(error == Error::null());
4512 const Function& throwNew =
4513 Function::Handle(cls.LookupFunctionAllowPrivate(Symbols::ThrowNew()));
4514 return DartEntry::InvokeFunction(throwNew, args);
4515}
4516
4518 bool throw_nsm_if_absent,
4519 bool respect_reflectable,
4520 bool check_is_entrypoint) const {
4521 Thread* thread = Thread::Current();
4522 Zone* zone = thread->zone();
4523
4525
4526 // Note static fields do not have implicit getters.
4527 const Field& field = Field::Handle(zone, LookupStaticField(getter_name));
4528
4529 if (!field.IsNull() && check_is_entrypoint) {
4531 }
4532
4533 if (field.IsNull() || field.IsUninitialized()) {
4534 const String& internal_getter_name =
4535 String::Handle(zone, Field::GetterName(getter_name));
4536 Function& getter =
4537 Function::Handle(zone, LookupStaticFunction(internal_getter_name));
4538
4539 if (field.IsNull() && !getter.IsNull() && check_is_entrypoint) {
4541 }
4542
4543 if (getter.IsNull() || (respect_reflectable && !getter.is_reflectable())) {
4544 if (getter.IsNull()) {
4545 getter = LookupStaticFunction(getter_name);
4546 if (!getter.IsNull()) {
4547 if (check_is_entrypoint) {
4549 }
4550 if (getter.SafeToClosurize()) {
4551 // Looking for a getter but found a regular method: closurize it.
4552 const Function& closure_function =
4554 return closure_function.ImplicitStaticClosure();
4555 }
4556 }
4557 }
4558 if (throw_nsm_if_absent) {
4559 return ThrowNoSuchMethod(
4560 AbstractType::Handle(zone, RareType()), getter_name,
4561 Object::null_array(), Object::null_array(),
4563 }
4564 // Fall through case: Indicate that we didn't find any function or field
4565 // using a special null instance. This is different from a field being
4566 // null. Callers make sure that this null does not leak into Dartland.
4567 return Object::sentinel().ptr();
4568 }
4569
4570 // Invoke the getter and return the result.
4571 return DartEntry::InvokeFunction(getter, Object::empty_array());
4572 }
4573
4574 return field.StaticValue();
4575}
4576
4578 const Instance& value,
4579 bool respect_reflectable,
4580 bool check_is_entrypoint) const {
4581 Thread* thread = Thread::Current();
4582 Zone* zone = thread->zone();
4583
4585
4586 // Check for real fields and user-defined setters.
4587 const Field& field = Field::Handle(zone, LookupStaticField(setter_name));
4588 const String& internal_setter_name =
4589 String::Handle(zone, Field::SetterName(setter_name));
4590
4591 if (!field.IsNull() && check_is_entrypoint) {
4593 }
4594
4595 AbstractType& parameter_type = AbstractType::Handle(zone);
4596 if (field.IsNull()) {
4597 const Function& setter =
4598 Function::Handle(zone, LookupStaticFunction(internal_setter_name));
4599 if (!setter.IsNull() && check_is_entrypoint) {
4601 }
4602 const int kNumArgs = 1;
4603 const Array& args = Array::Handle(zone, Array::New(kNumArgs));
4604 args.SetAt(0, value);
4605 if (setter.IsNull() || (respect_reflectable && !setter.is_reflectable())) {
4607 internal_setter_name, args, Object::null_array(),
4610 }
4611 parameter_type = setter.ParameterTypeAt(0);
4612 if (!value.RuntimeTypeIsSubtypeOf(parameter_type,
4613 Object::null_type_arguments(),
4614 Object::null_type_arguments())) {
4615 const String& argument_name =
4616 String::Handle(zone, setter.ParameterNameAt(0));
4617 return ThrowTypeError(setter.token_pos(), value, parameter_type,
4618 argument_name);
4619 }
4620 // Invoke the setter and return the result.
4621 return DartEntry::InvokeFunction(setter, args);
4622 }
4623
4624 if (field.is_final() || (respect_reflectable && !field.is_reflectable())) {
4625 const int kNumArgs = 1;
4626 const Array& args = Array::Handle(zone, Array::New(kNumArgs));
4627 args.SetAt(0, value);
4629 internal_setter_name, args, Object::null_array(),
4632 }
4633
4634 parameter_type = field.type();
4635 if (!value.RuntimeTypeIsSubtypeOf(parameter_type,
4636 Object::null_type_arguments(),
4637 Object::null_type_arguments())) {
4638 const String& argument_name = String::Handle(zone, field.name());
4639 return ThrowTypeError(field.token_pos(), value, parameter_type,
4640 argument_name);
4641 }
4642 field.SetStaticValue(value);
4643 return value.ptr();
4644}
4645
4646// Creates a new array of boxed arguments suitable for invoking the callable
4647// from the original boxed arguments for a static call. Also sets the contents
4648// of the handle pointed to by [callable_args_desc_array_out] to an appropriate
4649// arguments descriptor array for the new arguments.
4650//
4651// Assumes [arg_names] are consistent with [static_args_descriptor].
4653 Zone* zone,
4654 const Instance& receiver,
4655 const Array& static_args,
4656 const Array& arg_names,
4657 const ArgumentsDescriptor& static_args_descriptor) {
4658 const intptr_t num_static_type_args = static_args_descriptor.TypeArgsLen();
4659 const intptr_t num_static_args = static_args_descriptor.Count();
4660 // Double check that the static args descriptor expects boxed arguments
4661 // and the static args descriptor is consistent with the static arguments.
4662 ASSERT_EQUAL(static_args_descriptor.Size(), num_static_args);
4663 ASSERT_EQUAL(static_args.Length(),
4664 num_static_args + (num_static_type_args > 0 ? 1 : 0));
4665 // Add an additional slot to store the callable as the receiver.
4666 const auto& callable_args =
4667 Array::Handle(zone, Array::New(static_args.Length() + 1));
4668 const intptr_t first_arg_index = static_args_descriptor.FirstArgIndex();
4669 auto& temp = Object::Handle(zone);
4670 // Copy the static args into the corresponding slots of the callable args.
4671 if (num_static_type_args > 0) {
4672 temp = static_args.At(0);
4673 callable_args.SetAt(0, temp);
4674 }
4675 for (intptr_t i = first_arg_index; i < static_args.Length(); i++) {
4676 temp = static_args.At(i);
4677 callable_args.SetAt(i + 1, temp);
4678 }
4679 // Set the receiver slot in the callable args.
4680 callable_args.SetAt(first_arg_index, receiver);
4681 return callable_args.ptr();
4682}
4683
4685 const Array& args,
4686 const Array& arg_names,
4687 bool respect_reflectable,
4688 bool check_is_entrypoint) const {
4689 Thread* thread = Thread::Current();
4690 Zone* zone = thread->zone();
4692
4693 // We don't pass any explicit type arguments, which will be understood as
4694 // using dynamic for any function type arguments by lower layers.
4695 const int kTypeArgsLen = 0;
4696 const Array& args_descriptor_array = Array::Handle(
4697 zone, ArgumentsDescriptor::NewBoxed(kTypeArgsLen, args.Length(),
4698 arg_names, Heap::kNew));
4699 ArgumentsDescriptor args_descriptor(args_descriptor_array);
4700
4703
4704 if (!function.IsNull() && check_is_entrypoint) {
4705 CHECK_ERROR(function.VerifyCallEntryPoint());
4706 }
4707
4708 if (function.IsNull()) {
4709 // Didn't find a method: try to find a getter and invoke call on its result.
4710 const Object& getter_result = Object::Handle(
4711 zone, InvokeGetter(function_name, false, respect_reflectable,
4712 check_is_entrypoint));
4713 if (getter_result.ptr() != Object::sentinel().ptr()) {
4714 if (check_is_entrypoint) {
4716 }
4717 const auto& call_args_descriptor_array = Array::Handle(
4718 zone, ArgumentsDescriptor::NewBoxed(args_descriptor.TypeArgsLen(),
4719 args_descriptor.Count() + 1,
4720 arg_names, Heap::kNew));
4721 const auto& call_args = Array::Handle(
4722 zone,
4723 CreateCallableArgumentsFromStatic(zone, Instance::Cast(getter_result),
4724 args, arg_names, args_descriptor));
4725 return DartEntry::InvokeClosure(thread, call_args,
4726 call_args_descriptor_array);
4727 }
4728 }
4729
4730 if (function.IsNull() ||
4731 !function.AreValidArguments(args_descriptor, nullptr) ||
4732 (respect_reflectable && !function.is_reflectable())) {
4733 return ThrowNoSuchMethod(
4734 AbstractType::Handle(zone, RareType()), function_name, args, arg_names,
4736 }
4737 // This is a static function, so we pass an empty instantiator tav.
4738 ASSERT(function.is_static());
4739 ObjectPtr type_error = function.DoArgumentTypesMatch(
4740 args, args_descriptor, Object::empty_type_arguments());
4741 if (type_error != Error::null()) {
4742 return type_error;
4743 }
4744 return DartEntry::InvokeFunction(function, args, args_descriptor_array);
4745}
4746
4747#if !defined(DART_PRECOMPILED_RUNTIME)
4748
4750 Zone* zone,
4751 const ExternalTypedData& kernel_buffer,
4752 const String& library_url,
4753 const String& klass) {
4754 std::unique_ptr<kernel::Program> kernel_pgm =
4756
4757 if (kernel_pgm == nullptr) {
4758 return ApiError::New(String::Handle(
4759 zone, String::New("Kernel isolate returned ill-formed kernel.")));
4760 }
4761
4762 auto& result = Object::Handle(zone);
4763 {
4764 kernel::KernelLoader loader(kernel_pgm.get(),
4765 /*uri_to_source_table=*/nullptr);
4766 result = loader.LoadExpressionEvaluationFunction(library_url, klass);
4767 kernel_pgm.reset();
4768 }
4769 if (result.IsError()) return result.ptr();
4770 return Function::Cast(result).ptr();
4771}
4772
4774 Zone* zone,
4775 const Function& eval_function) {
4776 auto parsed_function = new ParsedFunction(
4777 thread, Function::ZoneHandle(zone, eval_function.ptr()));
4778 parsed_function->EnsureKernelScopes();
4779 return parsed_function->is_receiver_used();
4780}
4781
4783 Zone* zone,
4784 const Function& eval_function,
4785 const Array& type_definitions,
4786 const Array& arguments,
4787 const TypeArguments& type_arguments) {
4788 // type_arguments is null if all type arguments are dynamic.
4789 if (type_definitions.Length() == 0 || type_arguments.IsNull()) {
4790 return DartEntry::InvokeFunction(eval_function, arguments);
4791 }
4792
4793 intptr_t num_type_args = type_arguments.Length();
4794 const auto& real_arguments =
4795 Array::Handle(zone, Array::New(arguments.Length() + 1));
4796 real_arguments.SetAt(0, type_arguments);
4797 Object& arg = Object::Handle(zone);
4798 for (intptr_t i = 0; i < arguments.Length(); ++i) {
4799 arg = arguments.At(i);
4800 real_arguments.SetAt(i + 1, arg);
4801 }
4802
4803 const Array& args_desc =
4805 num_type_args, arguments.Length(), Heap::kNew));
4806 return DartEntry::InvokeFunction(eval_function, real_arguments, args_desc);
4807}
4808
4809#endif // !defined(DART_PRECOMPILED_RUNTIME)
4810
4812 const ExternalTypedData& kernel_buffer,
4813 const Array& type_definitions,
4814 const Array& arguments,
4815 const TypeArguments& type_arguments) const {
4816 const auto& klass = Class::Handle(toplevel_class());
4817 return klass.EvaluateCompiledExpression(kernel_buffer, type_definitions,
4818 arguments, type_arguments);
4819}
4820
4822 const ExternalTypedData& kernel_buffer,
4823 const Array& type_definitions,
4824 const Array& arguments,
4825 const TypeArguments& type_arguments) const {
4826 auto thread = Thread::Current();
4827 const auto& library = Library::Handle(thread->zone(), this->library());
4829 thread, Instance::null_object(), library, *this, kernel_buffer,
4830 type_definitions, arguments, type_arguments);
4831}
4832
4834 const Class& klass,
4835 const ExternalTypedData& kernel_buffer,
4836 const Array& type_definitions,
4837 const Array& arguments,
4838 const TypeArguments& type_arguments) const {
4839 auto thread = Thread::Current();
4840 auto zone = thread->zone();
4841 const auto& library = Library::Handle(zone, klass.library());
4842 return Instance::EvaluateCompiledExpression(thread, *this, library, klass,
4843 kernel_buffer, type_definitions,
4844 arguments, type_arguments);
4845}
4846
4848 Thread* thread,
4849 const Object& receiver,
4850 const Library& library,
4851 const Class& klass,
4852 const ExternalTypedData& kernel_buffer,
4853 const Array& type_definitions,
4854 const Array& arguments,
4855 const TypeArguments& type_arguments) {
4856 auto zone = Thread::Current()->zone();
4857#if defined(DART_PRECOMPILED_RUNTIME)
4858 const auto& error_str = String::Handle(
4859 zone,
4860 String::New("Expression evaluation not available in precompiled mode."));
4861 return ApiError::New(error_str);
4862#else
4863 if (IsInternalOnlyClassId(klass.id()) || (klass.id() == kTypeArgumentsCid)) {
4864 const auto& exception = Instance::Handle(
4865 zone, String::New("Expressions can be evaluated only with regular Dart "
4866 "instances/classes."));
4867 return UnhandledException::New(exception, StackTrace::null_instance());
4868 }
4869
4870 const auto& url = String::Handle(zone, library.url());
4871 const auto& klass_name = klass.IsTopLevel()
4872 ? String::null_string()
4873 : String::Handle(zone, klass.UserVisibleName());
4874
4875 const auto& result = Object::Handle(
4876 zone,
4877 LoadExpressionEvaluationFunction(zone, kernel_buffer, url, klass_name));
4878 if (result.IsError()) return result.ptr();
4879
4880 const auto& eval_function = Function::Cast(result);
4881
4882#if defined(DEBUG)
4883 for (intptr_t i = 0; i < arguments.Length(); ++i) {
4884 ASSERT(arguments.At(i) != Object::optimized_out().ptr());
4885 }
4886#endif // defined(DEBUG)
4887
4888 auto& all_arguments = Array::Handle(zone, arguments.ptr());
4889 if (!eval_function.is_static()) {
4890 // `this` may be optimized out (e.g. not accessible from breakpoint due to
4891 // not being captured by closure). We allow this as long as the evaluation
4892 // function doesn't actually need `this`.
4893 if (receiver.IsNull() || receiver.ptr() == Object::optimized_out().ptr()) {
4894 if (EvaluationFunctionNeedsReceiver(thread, zone, eval_function)) {
4895 return Object::optimized_out().ptr();
4896 }
4897 }
4898
4899 all_arguments = Array::New(1 + arguments.Length());
4900 auto& param = PassiveObject::Handle();
4901 all_arguments.SetAt(0, receiver);
4902 for (intptr_t i = 0; i < arguments.Length(); i++) {
4903 param = arguments.At(i);
4904 all_arguments.SetAt(i + 1, param);
4905 }
4906 }
4907
4908 return EvaluateCompiledExpressionHelper(zone, eval_function, type_definitions,
4909 all_arguments, type_arguments);
4910#endif // !defined(DART_PRECOMPILED_RUNTIME)
4911}
4912
4914 if (!is_declaration_loaded()) {
4915#if defined(DART_PRECOMPILED_RUNTIME)
4916 UNREACHABLE();
4917#else
4918 FATAL("Unable to use class %s which is not loaded yet.", ToCString());
4919#endif
4920 }
4921}
4922
4923// Ensure that top level parsing of the class has been done.
4924ErrorPtr Class::EnsureIsFinalized(Thread* thread) const {
4925 ASSERT(!IsNull());
4926 if (is_finalized()) {
4927 return Error::null();
4928 }
4929#if defined(DART_PRECOMPILED_RUNTIME)
4930 UNREACHABLE();
4931 return Error::null();
4932#else
4933 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
4934 if (is_finalized()) {
4935 return Error::null();
4936 }
4937 LeaveCompilerScope ncs(thread);
4938 ASSERT(thread != nullptr);
4939 const Error& error =
4941 if (!error.IsNull()) {
4942 ASSERT(thread == Thread::Current());
4943 if (thread->long_jump_base() != nullptr) {
4945 UNREACHABLE();
4946 }
4947 }
4948 return error.ptr();
4949#endif // defined(DART_PRECOMPILED_RUNTIME)
4950}
4951
4952// Ensure that code outdated by finalized class is cleaned up, new instance of
4953// this class is ready to be allocated.
4955 ASSERT(!IsNull());
4956 if (is_allocate_finalized()) {
4957 return Error::null();
4958 }
4959 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
4960 if (is_allocate_finalized()) {
4961 return Error::null();
4962 }
4963 ASSERT(thread != nullptr);
4964 Error& error = Error::Handle(thread->zone(), EnsureIsFinalized(thread));
4965 if (!error.IsNull()) {
4966 ASSERT(thread == Thread::Current());
4967 if (thread->long_jump_base() != nullptr) {
4969 UNREACHABLE();
4970 }
4971 }
4972 // May be allocate-finalized recursively during EnsureIsFinalized.
4973 if (is_allocate_finalized()) {
4974 return Error::null();
4975 }
4976#if defined(DART_PRECOMPILED_RUNTIME)
4977 UNREACHABLE();
4978#else
4980#endif // defined(DART_PRECOMPILED_RUNTIME)
4981 return error.ptr();
4982}
4983
4984void Class::SetFields(const Array& value) const {
4985 ASSERT(!value.IsNull());
4986#if defined(DEBUG)
4987 Thread* thread = Thread::Current();
4989 // Verify that all the fields in the array have this class as owner.
4990 Field& field = Field::Handle();
4991 intptr_t len = value.Length();
4992 for (intptr_t i = 0; i < len; i++) {
4993 field ^= value.At(i);
4994 ASSERT(field.IsOriginal());
4995 ASSERT(field.Owner() == ptr());
4996 }
4997#endif
4998 // The value of static fields is already initialized to null.
4999 set_fields(value);
5000}
5001
5002void Class::AddField(const Field& field) const {
5003#if defined(DEBUG)
5004 Thread* thread = Thread::Current();
5006#endif
5007 const Array& arr = Array::Handle(fields());
5008 const Array& new_arr = Array::Handle(Array::Grow(arr, arr.Length() + 1));
5009 new_arr.SetAt(arr.Length(), field);
5010 SetFields(new_arr);
5011}
5012
5013void Class::AddFields(const GrowableArray<const Field*>& new_fields) const {
5014#if defined(DEBUG)
5015 Thread* thread = Thread::Current();
5017#endif
5018 const intptr_t num_new_fields = new_fields.length();
5019 if (num_new_fields == 0) return;
5020 const Array& arr = Array::Handle(fields());
5021 const intptr_t num_old_fields = arr.Length();
5022 const Array& new_arr = Array::Handle(
5023 Array::Grow(arr, num_old_fields + num_new_fields, Heap::kOld));
5024 for (intptr_t i = 0; i < num_new_fields; i++) {
5025 new_arr.SetAt(i + num_old_fields, *new_fields.At(i));
5026 }
5027 SetFields(new_arr);
5028}
5029
5030intptr_t Class::FindFieldIndex(const Field& needle) const {
5031 Thread* thread = Thread::Current();
5032 if (EnsureIsFinalized(thread) != Error::null()) {
5033 return -1;
5034 }
5037 Array& fields = thread->ArrayHandle();
5038 Field& field = thread->FieldHandle();
5039 fields = this->fields();
5040 ASSERT(!fields.IsNull());
5041 for (intptr_t i = 0, n = fields.Length(); i < n; ++i) {
5042 field ^= fields.At(i);
5043 if (needle.ptr() == field.ptr()) {
5044 return i;
5045 }
5046 }
5047 // Not found.
5048 return -1;
5049}
5050
5051FieldPtr Class::FieldFromIndex(intptr_t idx) const {
5052 Array& fields = Array::Handle(this->fields());
5053 if ((idx < 0) || (idx >= fields.Length())) {
5054 return Field::null();
5055 }
5056 return Field::RawCast(fields.At(idx));
5057}
5058
5060 if (library() != Library::InternalLibrary() ||
5061 Name() != Symbols::ClassID().ptr()) {
5062 return false;
5063 }
5064
5065 auto thread = Thread::Current();
5066 auto isolate_group = thread->isolate_group();
5067 auto zone = thread->zone();
5068 Field& field = Field::Handle(zone);
5069 Smi& value = Smi::Handle(zone);
5070 String& field_name = String::Handle(zone);
5071
5072 // clang-format off
5073 static const struct {
5074 const char* const field_name;
5075 const intptr_t cid;
5076 } cid_fields[] = {
5077#define CLASS_LIST_WITH_NULL(V) \
5078 V(Null) \
5079 CLASS_LIST_NO_OBJECT(V)
5080#define ADD_SET_FIELD(clazz) \
5081 {"cid" #clazz, k##clazz##Cid},
5083#undef ADD_SET_FIELD
5084#undef CLASS_LIST_WITH_NULL
5085#define ADD_SET_FIELD(clazz) \
5086 {"cid" #clazz, kTypedData##clazz##Cid}, \
5087 {"cid" #clazz "View", kTypedData##clazz##ViewCid}, \
5088 {"cidExternal" #clazz, kExternalTypedData##clazz##Cid}, \
5089 {"cidUnmodifiable" #clazz "View", kUnmodifiableTypedData##clazz##ViewCid}, \
5090 CLASS_LIST_TYPED_DATA(ADD_SET_FIELD)
5091#undef ADD_SET_FIELD
5092 // Used in const hashing to determine whether we're dealing with a
5093 // user-defined const. See lib/_internal/vm/lib/compact_hash.dart.
5094 {"numPredefinedCids", kNumPredefinedCids},
5095 };
5096 // clang-format on
5097
5098 const AbstractType& field_type = Type::Handle(zone, Type::IntType());
5099 for (size_t i = 0; i < ARRAY_SIZE(cid_fields); i++) {
5100 field_name = Symbols::New(thread, cid_fields[i].field_name);
5101 field = Field::New(field_name, /* is_static = */ true,
5102 /* is_final = */ false,
5103 /* is_const = */ true,
5104 /* is_reflectable = */ false,
5105 /* is_late = */ false, *this, field_type,
5107 value = Smi::New(cid_fields[i].cid);
5108 isolate_group->RegisterStaticField(field, value);
5109 AddField(field);
5110 }
5111
5112 return true;
5113}
5114
5115template <class FakeInstance, class TargetFakeInstance>
5116ClassPtr Class::NewCommon(intptr_t index) {
5118 const auto& result = Class::Handle(Object::Allocate<Class>(Heap::kOld));
5119 // Here kIllegalCid means not-yet-assigned.
5120 Object::VerifyBuiltinVtable<FakeInstance>(index == kIllegalCid ? kInstanceCid
5121 : index);
5122 NOT_IN_PRECOMPILED(result.set_token_pos(TokenPosition::kNoSource));
5123 NOT_IN_PRECOMPILED(result.set_end_token_pos(TokenPosition::kNoSource));
5124 const intptr_t host_instance_size = FakeInstance::InstanceSize();
5126 TargetFakeInstance::InstanceSize());
5128 result.set_type_arguments_field_offset_in_words(kNoTypeArguments,
5130 const intptr_t host_next_field_offset = FakeInstance::NextFieldOffset();
5131 const intptr_t target_next_field_offset =
5132 TargetFakeInstance::NextFieldOffset();
5133 result.set_next_field_offset(host_next_field_offset,
5135 result.set_id(index);
5136 NOT_IN_PRECOMPILED(result.set_implementor_cid(kIllegalCid));
5137 result.set_num_type_arguments_unsafe(kUnknownNumTypeArguments);
5138 result.set_num_native_fields(0);
5139 result.set_state_bits(0);
5140 NOT_IN_PRECOMPILED(result.set_kernel_offset(0));
5141 result.InitEmptyFields();
5142 return result.ptr();
5143}
5144
5145template <class FakeInstance, class TargetFakeInstance>
5146ClassPtr Class::New(intptr_t index,
5147 IsolateGroup* isolate_group,
5148 bool register_class,
5149 bool is_abstract) {
5150 Class& result =
5151 Class::Handle(NewCommon<FakeInstance, TargetFakeInstance>(index));
5152 if (is_abstract) {
5153 result.set_is_abstract();
5154 }
5155 if (register_class) {
5156 isolate_group->class_table()->Register(result);
5157 }
5158 return result.ptr();
5159}
5160
5161ClassPtr Class::New(const Library& lib,
5162 const String& name,
5163 const Script& script,
5164 TokenPosition token_pos,
5165 bool register_class) {
5166 Class& result =
5167 Class::Handle(NewCommon<Instance, RTN::Instance>(kIllegalCid));
5168 result.set_library(lib);
5169 result.set_name(name);
5170 result.set_script(script);
5171 NOT_IN_PRECOMPILED(result.set_token_pos(token_pos));
5172
5173 // The size gets initialized to 0. Once the class gets finalized the class
5174 // finalizer will set the correct size.
5175 ASSERT(!result.is_finalized() && !result.is_prefinalized());
5176 result.set_instance_size_in_words(0, 0);
5177
5178 if (register_class) {
5180 }
5181 return result.ptr();
5182}
5183
5184ClassPtr Class::NewInstanceClass() {
5185 return Class::New<Instance, RTN::Instance>(kIllegalCid,
5187}
5188
5189ClassPtr Class::NewNativeWrapper(const Library& library,
5190 const String& name,
5191 int field_count) {
5192 Class& cls = Class::Handle(library.LookupClass(name));
5193 if (cls.IsNull()) {
5194 cls = New(library, name, Script::Handle(), TokenPosition::kNoSource);
5195 cls.SetFields(Object::empty_array());
5196 cls.SetFunctions(Object::empty_array());
5197 // Set super class to Object.
5199 // Compute instance size. First word contains a pointer to a properly
5200 // sized typed array once the first native field has been set.
5201 const intptr_t host_instance_size =
5203#if defined(DART_PRECOMPILER)
5204 const intptr_t target_instance_size =
5207#else
5208 const intptr_t target_instance_size =
5210#endif
5215 cls.set_num_native_fields(field_count);
5217 // The signature of the constructor yet to be added to this class will have
5218 // to be finalized explicitly, since the class is prematurely marked as
5219 // 'is_allocate_finalized' and finalization of member types will not occur.
5223 cls.set_is_isolate_unsendable(true);
5224 NOT_IN_PRECOMPILED(cls.set_implementor_cid(kDynamicCid));
5225 library.AddClass(cls);
5226 return cls.ptr();
5227 } else {
5228 return Class::null();
5229 }
5230}
5231
5232ClassPtr Class::NewStringClass(intptr_t class_id, IsolateGroup* isolate_group) {
5234 if (class_id == kOneByteStringCid) {
5238 } else {
5239 ASSERT(class_id == kTwoByteStringCid);
5243 }
5244 Class& result = Class::Handle(New<String, RTN::String>(
5245 class_id, isolate_group, /*register_class=*/false));
5247
5249 const intptr_t target_next_field_offset = RTN::String::NextFieldOffset();
5250 result.set_next_field_offset(host_next_field_offset,
5252 result.set_is_prefinalized();
5253 ASSERT(IsDeeplyImmutableCid(class_id));
5254 result.set_is_deeply_immutable(true);
5255 isolate_group->class_table()->Register(result);
5256 return result.ptr();
5257}
5258
5259ClassPtr Class::NewTypedDataClass(intptr_t class_id,
5260 IsolateGroup* isolate_group) {
5261 ASSERT(IsTypedDataClassId(class_id));
5263 const intptr_t target_instance_size =
5265 Class& result = Class::Handle(New<TypedData, RTN::TypedData>(
5266 class_id, isolate_group, /*register_class=*/false));
5268
5270 const intptr_t target_next_field_offset = RTN::TypedData::NextFieldOffset();
5271 result.set_next_field_offset(host_next_field_offset,
5273 result.set_is_prefinalized();
5274 isolate_group->class_table()->Register(result);
5275 return result.ptr();
5276}
5277
5278ClassPtr Class::NewTypedDataViewClass(intptr_t class_id,
5279 IsolateGroup* isolate_group) {
5280 ASSERT(IsTypedDataViewClassId(class_id));
5284 Class& result = Class::Handle(New<TypedDataView, RTN::TypedDataView>(
5285 class_id, isolate_group, /*register_class=*/false));
5287
5289 const intptr_t target_next_field_offset =
5290 RTN::TypedDataView::NextFieldOffset();
5291 result.set_next_field_offset(host_next_field_offset,
5293 result.set_is_prefinalized();
5294 isolate_group->class_table()->Register(result);
5295 return result.ptr();
5296}
5297
5299 IsolateGroup* isolate_group) {
5304 Class& result = Class::Handle(New<TypedDataView, RTN::TypedDataView>(
5305 class_id, isolate_group, /*register_class=*/false));
5307
5309 const intptr_t target_next_field_offset =
5310 RTN::TypedDataView::NextFieldOffset();
5311 result.set_next_field_offset(host_next_field_offset,
5313 result.set_is_prefinalized();
5314 isolate_group->class_table()->Register(result);
5315 return result.ptr();
5316}
5317
5318ClassPtr Class::NewExternalTypedDataClass(intptr_t class_id,
5319 IsolateGroup* isolate_group) {
5324 Class& result = Class::Handle(New<ExternalTypedData, RTN::ExternalTypedData>(
5325 class_id, isolate_group, /*register_class=*/false));
5326
5328 const intptr_t target_next_field_offset =
5329 RTN::ExternalTypedData::NextFieldOffset();
5331 result.set_next_field_offset(host_next_field_offset,
5333 result.set_is_prefinalized();
5334 isolate_group->class_table()->Register(result);
5335 return result.ptr();
5336}
5337
5338ClassPtr Class::NewPointerClass(intptr_t class_id,
5339 IsolateGroup* isolate_group) {
5340 ASSERT(IsFfiPointerClassId(class_id));
5342 intptr_t target_instance_size =
5344 Class& result = Class::Handle(New<Pointer, RTN::Pointer>(
5345 class_id, isolate_group, /*register_class=*/false));
5347 result.set_type_arguments_field_offset(Pointer::type_arguments_offset(),
5349
5351 const intptr_t target_next_field_offset = RTN::Pointer::NextFieldOffset();
5352
5353 result.set_next_field_offset(host_next_field_offset,
5355 result.set_is_prefinalized();
5356 isolate_group->class_table()->Register(result);
5357 return result.ptr();
5358}
5359
5360void Class::set_name(const String& value) const {
5361 ASSERT(untag()->name() == String::null());
5362 ASSERT(value.IsSymbol());
5363 untag()->set_name(value.ptr());
5364#if !defined(PRODUCT)
5365 if (untag()->user_name() == String::null()) {
5366 // TODO(johnmccutchan): Eagerly set user name for VM isolate classes,
5367 // lazily set user name for the other classes.
5368 // Generate and set user_name.
5369 const String& user_name = String::Handle(
5370 Symbols::New(Thread::Current(), GenerateUserVisibleName()));
5371 set_user_name(user_name);
5372 }
5373#endif // !defined(PRODUCT)
5374}
5375
5376#if !defined(PRODUCT)
5377void Class::set_user_name(const String& value) const {
5378 untag()->set_user_name(value.ptr());
5379}
5380#endif // !defined(PRODUCT)
5381
5382#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
5384 IsolateGroup* isolate_group = IsolateGroup::Current();
5385 auto class_table = isolate_group->class_table();
5386 if (class_table->UserVisibleNameFor(id()) == nullptr) {
5388 class_table->SetUserVisibleNameFor(id(), name.ToMallocCString());
5389 }
5390}
5391#endif // !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
5392
5393const char* Class::GenerateUserVisibleName() const {
5394 if (FLAG_show_internal_names) {
5395 return String::Handle(Name()).ToCString();
5396 }
5397 switch (id()) {
5398 case kFloat32x4Cid:
5399 return Symbols::Float32x4().ToCString();
5400 case kFloat64x2Cid:
5401 return Symbols::Float64x2().ToCString();
5402 case kInt32x4Cid:
5403 return Symbols::Int32x4().ToCString();
5404 case kTypedDataInt8ArrayCid:
5405 case kExternalTypedDataInt8ArrayCid:
5406 return Symbols::Int8List().ToCString();
5407 case kTypedDataUint8ArrayCid:
5408 case kExternalTypedDataUint8ArrayCid:
5409 return Symbols::Uint8List().ToCString();
5410 case kTypedDataUint8ClampedArrayCid:
5411 case kExternalTypedDataUint8ClampedArrayCid:
5412 return Symbols::Uint8ClampedList().ToCString();
5413 case kTypedDataInt16ArrayCid:
5414 case kExternalTypedDataInt16ArrayCid:
5415 return Symbols::Int16List().ToCString();
5416 case kTypedDataUint16ArrayCid:
5417 case kExternalTypedDataUint16ArrayCid:
5418 return Symbols::Uint16List().ToCString();
5419 case kTypedDataInt32ArrayCid:
5420 case kExternalTypedDataInt32ArrayCid:
5421 return Symbols::Int32List().ToCString();
5422 case kTypedDataUint32ArrayCid:
5423 case kExternalTypedDataUint32ArrayCid:
5424 return Symbols::Uint32List().ToCString();
5425 case kTypedDataInt64ArrayCid:
5426 case kExternalTypedDataInt64ArrayCid:
5427 return Symbols::Int64List().ToCString();
5428 case kTypedDataUint64ArrayCid:
5429 case kExternalTypedDataUint64ArrayCid:
5430 return Symbols::Uint64List().ToCString();
5431 case kTypedDataInt32x4ArrayCid:
5432 case kExternalTypedDataInt32x4ArrayCid:
5433 return Symbols::Int32x4List().ToCString();
5434 case kTypedDataFloat32x4ArrayCid:
5435 case kExternalTypedDataFloat32x4ArrayCid:
5436 return Symbols::Float32x4List().ToCString();
5437 case kTypedDataFloat64x2ArrayCid:
5438 case kExternalTypedDataFloat64x2ArrayCid:
5439 return Symbols::Float64x2List().ToCString();
5440 case kTypedDataFloat32ArrayCid:
5441 case kExternalTypedDataFloat32ArrayCid:
5442 return Symbols::Float32List().ToCString();
5443 case kTypedDataFloat64ArrayCid:
5444 case kExternalTypedDataFloat64ArrayCid:
5445 return Symbols::Float64List().ToCString();
5446 case kPointerCid:
5447 return Symbols::FfiPointer().ToCString();
5448 case kDynamicLibraryCid:
5449 return Symbols::FfiDynamicLibrary().ToCString();
5450 case kNullCid:
5451 return Symbols::Null().ToCString();
5452 case kDynamicCid:
5453 return Symbols::Dynamic().ToCString();
5454 case kVoidCid:
5455 return Symbols::Void().ToCString();
5456 case kNeverCid:
5457 return Symbols::Never().ToCString();
5458 case kClassCid:
5459 return Symbols::Class().ToCString();
5460 case kTypeParametersCid:
5461 return Symbols::TypeParameters().ToCString();
5462 case kTypeArgumentsCid:
5463 return Symbols::TypeArguments().ToCString();
5464 case kPatchClassCid:
5465 return Symbols::PatchClass().ToCString();
5466 case kFunctionCid:
5467 return Symbols::Function().ToCString();
5468 case kClosureDataCid:
5469 return Symbols::ClosureData().ToCString();
5470 case kFfiTrampolineDataCid:
5471 return Symbols::FfiTrampolineData().ToCString();
5472 case kFieldCid:
5473 return Symbols::Field().ToCString();
5474 case kScriptCid:
5475 return Symbols::Script().ToCString();
5476 case kLibraryCid:
5477 return Symbols::Library().ToCString();
5478 case kLibraryPrefixCid:
5479 return Symbols::LibraryPrefix().ToCString();
5480 case kNamespaceCid:
5481 return Symbols::Namespace().ToCString();
5482 case kKernelProgramInfoCid:
5483 return Symbols::KernelProgramInfo().ToCString();
5484 case kWeakSerializationReferenceCid:
5485 return Symbols::WeakSerializationReference().ToCString();
5486 case kWeakArrayCid:
5487 return Symbols::WeakArray().ToCString();
5488 case kCodeCid:
5489 return Symbols::Code().ToCString();
5490 case kInstructionsCid:
5491 return Symbols::Instructions().ToCString();
5492 case kInstructionsSectionCid:
5493 return Symbols::InstructionsSection().ToCString();
5494 case kInstructionsTableCid:
5495 return Symbols::InstructionsTable().ToCString();
5496 case kObjectPoolCid:
5497 return Symbols::ObjectPool().ToCString();
5498 case kCodeSourceMapCid:
5499 return Symbols::CodeSourceMap().ToCString();
5500 case kPcDescriptorsCid:
5501 return Symbols::PcDescriptors().ToCString();
5502 case kCompressedStackMapsCid:
5503 return Symbols::CompressedStackMaps().ToCString();
5504 case kLocalVarDescriptorsCid:
5505 return Symbols::LocalVarDescriptors().ToCString();
5506 case kExceptionHandlersCid:
5507 return Symbols::ExceptionHandlers().ToCString();
5508 case kContextCid:
5509 return Symbols::Context().ToCString();
5510 case kContextScopeCid:
5511 return Symbols::ContextScope().ToCString();
5512 case kSentinelCid:
5513 return Symbols::Sentinel().ToCString();
5514 case kSingleTargetCacheCid:
5515 return Symbols::SingleTargetCache().ToCString();
5516 case kICDataCid:
5517 return Symbols::ICData().ToCString();
5518 case kMegamorphicCacheCid:
5519 return Symbols::MegamorphicCache().ToCString();
5520 case kSubtypeTestCacheCid:
5521 return Symbols::SubtypeTestCache().ToCString();
5522 case kLoadingUnitCid:
5523 return Symbols::LoadingUnit().ToCString();
5524 case kApiErrorCid:
5525 return Symbols::ApiError().ToCString();
5526 case kLanguageErrorCid:
5527 return Symbols::LanguageError().ToCString();
5528 case kUnhandledExceptionCid:
5529 return Symbols::UnhandledException().ToCString();
5530 case kUnwindErrorCid:
5531 return Symbols::UnwindError().ToCString();
5532 case kIntegerCid:
5533 case kSmiCid:
5534 case kMintCid:
5535 return Symbols::Int().ToCString();
5536 case kDoubleCid:
5537 return Symbols::Double().ToCString();
5538 case kOneByteStringCid:
5539 case kTwoByteStringCid:
5540 return Symbols::_String().ToCString();
5541 case kArrayCid:
5542 case kImmutableArrayCid:
5543 case kGrowableObjectArrayCid:
5544 return Symbols::List().ToCString();
5545 }
5546 String& name = String::Handle(Name());
5548 if (name.ptr() == Symbols::_Future().ptr() &&
5550 return Symbols::Future().ToCString();
5551 }
5552 return name.ToCString();
5553}
5554
5555void Class::set_script(const Script& value) const {
5556 untag()->set_script(value.ptr());
5557}
5558
5559#if !defined(DART_PRECOMPILED_RUNTIME)
5560KernelProgramInfoPtr Class::KernelProgramInfo() const {
5561 const auto& lib = Library::Handle(library());
5562 return lib.kernel_program_info();
5563}
5564
5567 StoreNonPointer(&untag()->token_pos_, token_pos);
5568}
5569
5572 StoreNonPointer(&untag()->end_token_pos_, token_pos);
5573}
5574
5575void Class::set_implementor_cid(intptr_t value) const {
5577 StoreNonPointer(&untag()->implementor_cid_, value);
5578}
5579
5580bool Class::NoteImplementor(const Class& implementor) const {
5581 ASSERT(!implementor.is_abstract());
5582 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5583 if (implementor_cid() == kDynamicCid) {
5584 return false;
5585 } else if (implementor_cid() == implementor.id()) {
5586 return false;
5587 } else if (implementor_cid() == kIllegalCid) {
5588 set_implementor_cid(implementor.id());
5589 return true; // None -> One
5590 } else {
5591 set_implementor_cid(kDynamicCid);
5592 return true; // One -> Many
5593 }
5594}
5595#endif // !defined(DART_PRECOMPILED_RUNTIME)
5596
5597uint32_t Class::Hash() const {
5598 return Class::Hash(ptr());
5599}
5600uint32_t Class::Hash(ClassPtr obj) {
5601 return String::HashRawSymbol(obj.untag()->name());
5602}
5603
5605#if !defined(DART_PRECOMPILED_RUNTIME)
5607 *this);
5608#else
5609 return 0;
5610#endif // !defined(DART_PRECOMPILED_RUNTIME)
5611}
5612
5614 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5616}
5617
5619 set_state_bits(ImplementedBit::update(true, state_bits()));
5620}
5621
5623 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5624 set_state_bits(AbstractBit::update(true, state_bits()));
5625}
5626
5628 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5630}
5631
5634 set_state_bits(ClassLoadingBits::update(UntaggedClass::kDeclarationLoaded,
5635 state_bits()));
5636}
5637
5639 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5642 set_state_bits(
5643 ClassLoadingBits::update(UntaggedClass::kTypeFinalized, state_bits()));
5644}
5645
5647 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5649}
5650
5652 set_state_bits(SynthesizedClassBit::update(true, state_bits()));
5653}
5654
5656 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5657 set_state_bits(EnumBit::update(true, state_bits()));
5658}
5659
5661 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5662 set_state_bits(ConstBit::update(true, state_bits()));
5663}
5664
5666 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5667 set_state_bits(TransformedMixinApplicationBit::update(true, state_bits()));
5668}
5669
5671 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5672 set_state_bits(SealedBit::update(true, state_bits()));
5673}
5674
5676 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5677 set_state_bits(MixinClassBit::update(true, state_bits()));
5678}
5679
5681 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5682 set_state_bits(BaseClassBit::update(true, state_bits()));
5683}
5684
5686 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5687 set_state_bits(InterfaceClassBit::update(true, state_bits()));
5688}
5689
5691 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5692 set_state_bits(FinalBit::update(true, state_bits()));
5693}
5694
5696 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5697 set_state_bits(FieldsMarkedNullableBit::update(true, state_bits()));
5698}
5699
5701 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5703}
5704
5706 set_state_bits(IsAllocatedBit::update(value, state_bits()));
5707}
5708
5709void Class::set_is_loaded(bool value) const {
5710 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5711 set_state_bits(IsLoadedBit::update(value, state_bits()));
5712}
5713
5715 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5716 ASSERT(!is_finalized());
5718}
5719
5721 set_state_bits(
5722 ClassFinalizedBits::update(UntaggedClass::kFinalized, state_bits()));
5723}
5724
5726 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5728 set_state_bits(ClassFinalizedBits::update(UntaggedClass::kAllocateFinalized,
5729 state_bits()));
5730}
5731
5733 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5734 ASSERT(!is_finalized());
5735 set_state_bits(
5736 ClassFinalizedBits::update(UntaggedClass::kPreFinalized, state_bits()));
5737}
5738
5740 ASSERT(!value.IsNull());
5741 untag()->set_interfaces(value.ptr());
5742}
5743
5744#if !defined(DART_PRECOMPILED_RUNTIME)
5745
5746void Class::AddDirectImplementor(const Class& implementor,
5747 bool is_mixin) const {
5748 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5750 ASSERT(!implementor.IsNull());
5753 if (direct_implementors.IsNull()) {
5755 untag()->set_direct_implementors(direct_implementors.ptr());
5756 }
5757#if defined(DEBUG)
5758 // Verify that the same class is not added twice.
5759 // The only exception is mixins: when mixin application is transformed,
5760 // mixin is added to the end of interfaces list and may be duplicated:
5761 // class X = A with B implements B;
5762 // This is rare and harmless.
5763 if (!is_mixin) {
5764 for (intptr_t i = 0; i < direct_implementors.Length(); i++) {
5765 ASSERT(direct_implementors.At(i) != implementor.ptr());
5766 }
5767 }
5768#endif
5769 direct_implementors.Add(implementor, Heap::kOld);
5770}
5771
5773 const GrowableObjectArray& implementors) const {
5774 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5775 untag()->set_direct_implementors(implementors.ptr());
5776}
5777
5778void Class::AddDirectSubclass(const Class& subclass) const {
5779 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5780 ASSERT(!subclass.IsNull());
5781 ASSERT(subclass.SuperClass() == ptr());
5782 // Do not keep track of the direct subclasses of class Object.
5786 if (direct_subclasses.IsNull()) {
5788 untag()->set_direct_subclasses(direct_subclasses.ptr());
5789 }
5790#if defined(DEBUG)
5791 // Verify that the same class is not added twice.
5792 for (intptr_t i = 0; i < direct_subclasses.Length(); i++) {
5793 ASSERT(direct_subclasses.At(i) != subclass.ptr());
5794 }
5795#endif
5796 direct_subclasses.Add(subclass, Heap::kOld);
5797}
5798
5800 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5801 untag()->set_direct_subclasses(subclasses.ptr());
5802}
5803
5804#endif // !defined(DART_PRECOMPILED_RUNTIME)
5805
5806ArrayPtr Class::constants() const {
5807 return untag()->constants();
5808}
5809
5811 untag()->set_constants(value.ptr());
5812}
5813
5814void Class::set_declaration_type(const Type& value) const {
5815 ASSERT(id() != kDynamicCid && id() != kVoidCid);
5816 ASSERT(!value.IsNull() && value.IsCanonical() && value.IsOld());
5817 ASSERT((declaration_type() == Object::null()) ||
5818 (declaration_type() == value.ptr())); // Set during own finalization.
5819 // Since DeclarationType is used as the runtime type of instances of a
5820 // non-generic class, its nullability must be kNonNullable.
5821 // The exception is DeclarationType of Null which is kNullable.
5822 ASSERT(value.type_class_id() != kNullCid || value.IsNullable());
5823 ASSERT(value.type_class_id() == kNullCid || value.IsNonNullable());
5824 untag()->set_declaration_type<std::memory_order_release>(value.ptr());
5825}
5826
5827TypePtr Class::DeclarationType() const {
5829 if (IsNullClass()) {
5830 return Type::NullType();
5831 }
5832 if (IsDynamicClass()) {
5833 return Type::DynamicType();
5834 }
5835 if (IsVoidClass()) {
5836 return Type::VoidType();
5837 }
5838 if (declaration_type() != Type::null()) {
5839 return declaration_type();
5840 }
5841 {
5842 auto thread = Thread::Current();
5843 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
5844 if (declaration_type() != Type::null()) {
5845 return declaration_type();
5846 }
5847 // For efficiency, the runtimeType intrinsic returns the type cached by
5848 // DeclarationType without checking its nullability. Therefore, we
5849 // consistently cache the kNonNullable version of the type.
5850 // The exception is type Null which is stored as kNullable.
5851 TypeArguments& type_args = TypeArguments::Handle();
5852 const intptr_t num_type_params = NumTypeParameters();
5853 if (num_type_params > 0) {
5854 type_args = TypeArguments::New(num_type_params);
5855 TypeParameter& type_param = TypeParameter::Handle();
5856 for (intptr_t i = 0; i < num_type_params; i++) {
5857 type_param = TypeParameterAt(i);
5858 type_args.SetTypeAt(i, type_param);
5859 }
5860 }
5861 Type& type =
5864 set_declaration_type(type);
5865 return type.ptr();
5866 }
5867}
5868
5869#if !defined(DART_PRECOMPILED_RUNTIME)
5871 // Never clear the stub as it may still be a target, but will be GC-d if
5872 // not referenced.
5873 ASSERT(!value.IsNull());
5875 untag()->set_allocation_stub(value.ptr());
5876}
5877#endif // !defined(DART_PRECOMPILED_RUNTIME)
5878
5880#if defined(DART_PRECOMPILED_RUNTIME)
5881 UNREACHABLE();
5882#else
5883 {
5884 const Code& existing_stub = Code::Handle(allocation_stub());
5885 if (existing_stub.IsNull()) {
5886 return;
5887 }
5888 }
5889 auto thread = Thread::Current();
5890 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
5891 const Code& existing_stub = Code::Handle(allocation_stub());
5892 if (existing_stub.IsNull()) {
5893 return;
5894 }
5895 ASSERT(!existing_stub.IsDisabled());
5896 // Change the stub so that the next caller will regenerate the stub.
5897 existing_stub.DisableStubCode(NumTypeParameters() > 0);
5898 // Disassociate the existing stub from class.
5899 untag()->set_allocation_stub(Code::null());
5900#endif // defined(DART_PRECOMPILED_RUNTIME)
5901}
5902
5904 return ptr() == Type::Handle(Type::DartFunctionType()).type_class();
5905}
5906
5908 // Looking up future_class in the object store would not work, because
5909 // this function is called during class finalization, before the object store
5910 // field would be initialized by InitKnownObjects().
5911 return (Name() == Symbols::Future().ptr()) &&
5913}
5914
5915// Checks if type T0 is a subtype of type T1.
5916// Type T0 is specified by class 'cls' parameterized with 'type_arguments' and
5917// by 'nullability', and type T1 is specified by 'other' and must have a type
5918// class.
5919// [type_arguments] should be a flattened instance type arguments vector.
5921 const TypeArguments& type_arguments,
5922 Nullability nullability,
5923 const AbstractType& other,
5924 Heap::Space space,
5925 FunctionTypeMapping* function_type_equivalence) {
5926 TRACE_TYPE_CHECKS_VERBOSE(" Class::IsSubtypeOf(%s %s, %s)\n",
5927 cls.ToCString(), type_arguments.ToCString(),
5928 other.ToCString());
5929 // This function does not support Null, Never, dynamic, or void as type T0.
5930 classid_t this_cid = cls.id();
5931 ASSERT(this_cid != kNullCid && this_cid != kNeverCid &&
5932 this_cid != kDynamicCid && this_cid != kVoidCid);
5933 ASSERT(type_arguments.IsNull() ||
5934 (type_arguments.Length() >= cls.NumTypeArguments()));
5935 // Type T1 must have a type class (e.g. not a type param or a function type).
5936 ASSERT(other.HasTypeClass());
5937 const classid_t other_cid = other.type_class_id();
5938 if (other_cid == kDynamicCid || other_cid == kVoidCid) {
5939 TRACE_TYPE_CHECKS_VERBOSE(" - result: true (right is top)\n");
5940 return true;
5941 }
5942 // Left nullable:
5943 // if T0 is S0? then:
5944 // T0 <: T1 iff S0 <: T1 and Null <: T1
5945 if ((nullability == Nullability::kNullable) &&
5947 TRACE_TYPE_CHECKS_VERBOSE(" - result: false (nullability)\n");
5948 return false;
5949 }
5950
5951 // Right Object.
5952 if (other_cid == kObjectCid) {
5953 TRACE_TYPE_CHECKS_VERBOSE(" - result: true (right is Object)\n");
5954 return true;
5955 }
5956
5957 Thread* thread = Thread::Current();
5958 Zone* zone = thread->zone();
5959 const Class& other_class = Class::Handle(zone, other.type_class());
5960 const TypeArguments& other_type_arguments =
5961 TypeArguments::Handle(zone, other.arguments());
5962 // Use the 'this_class' object as if it was the receiver of this method, but
5963 // instead of recursing, reset it to the super class and loop.
5964 Class& this_class = Class::Handle(zone, cls.ptr());
5965 while (true) {
5966 // Apply additional subtyping rules if T0 or T1 are 'FutureOr'.
5967
5968 // Left FutureOr:
5969 // if T0 is FutureOr<S0> then:
5970 // T0 <: T1 iff Future<S0> <: T1 and S0 <: T1
5971 if (this_cid == kFutureOrCid) {
5972 // Check Future<S0> <: T1.
5973 ObjectStore* object_store = IsolateGroup::Current()->object_store();
5974 const Class& future_class =
5975 Class::Handle(zone, object_store->future_class());
5976 ASSERT(!future_class.IsNull() && future_class.NumTypeParameters() == 1 &&
5977 this_class.NumTypeParameters() == 1);
5978 ASSERT(type_arguments.IsNull() || type_arguments.Length() >= 1);
5979 if (Class::IsSubtypeOf(future_class, type_arguments,
5980 Nullability::kNonNullable, other, space,
5981 function_type_equivalence)) {
5982 // Check S0 <: T1.
5983 const AbstractType& type_arg =
5984 AbstractType::Handle(zone, type_arguments.TypeAtNullSafe(0));
5985 if (type_arg.IsSubtypeOf(other, space, function_type_equivalence)) {
5986 TRACE_TYPE_CHECKS_VERBOSE(" - result: true (left is FutureOr)\n");
5987 return true;
5988 }
5989 }
5990 }
5991
5992 // Right FutureOr:
5993 // if T1 is FutureOr<S1> then:
5994 // T0 <: T1 iff any of the following hold:
5995 // either T0 <: Future<S1>
5996 // or T0 <: S1
5997 // or T0 is X0 and X0 has bound S0 and S0 <: T1 (checked elsewhere)
5998 if (other_cid == kFutureOrCid) {
5999 const AbstractType& other_type_arg =
6000 AbstractType::Handle(zone, other_type_arguments.TypeAtNullSafe(0));
6001 // Check if S1 is a top type.
6002 if (other_type_arg.IsTopTypeForSubtyping()) {
6004 " - result: true (right is FutureOr top)\n");
6005 return true;
6006 }
6007 // Check T0 <: Future<S1> when T0 is Future<S0>.
6008 if (this_class.IsFutureClass()) {
6009 const AbstractType& type_arg =
6010 AbstractType::Handle(zone, type_arguments.TypeAtNullSafe(0));
6011 // If T0 is Future<S0>, then T0 <: Future<S1>, iff S0 <: S1.
6012 if (type_arg.IsSubtypeOf(other_type_arg, space,
6013 function_type_equivalence)) {
6015 " - result: true (left is Future, right is FutureOr)\n");
6016 return true;
6017 }
6018 }
6019 // Check T0 <: Future<S1> when T0 is FutureOr<S0> is already done.
6020 // Check T0 <: S1.
6021 if (other_type_arg.HasTypeClass() &&
6022 Class::IsSubtypeOf(this_class, type_arguments, nullability,
6023 other_type_arg, space,
6024 function_type_equivalence)) {
6026 " - result: true (right is FutureOr, subtype of arg)\n");
6027 return true;
6028 }
6029 }
6030
6031 // Check for reflexivity.
6032 if (this_class.ptr() == other_class.ptr()) {
6033 const intptr_t num_type_params = this_class.NumTypeParameters();
6034 if (num_type_params == 0) {
6036 " - result: true (same non-generic class)\n");
6037 return true;
6038 }
6039 // Check for covariance.
6040 if (other_type_arguments.IsNull()) {
6042 " - result: true (same class, dynamic type args)\n");
6043 return true;
6044 }
6045 const intptr_t num_type_args = this_class.NumTypeArguments();
6046 const intptr_t from_index = num_type_args - num_type_params;
6047 ASSERT(other_type_arguments.Length() == num_type_params);
6049 AbstractType& other_type = AbstractType::Handle(zone);
6050 for (intptr_t i = 0; i < num_type_params; ++i) {
6051 type = type_arguments.TypeAtNullSafe(from_index + i);
6052 other_type = other_type_arguments.TypeAt(i);
6053 ASSERT(!type.IsNull() && !other_type.IsNull());
6054 if (!type.IsSubtypeOf(other_type, space, function_type_equivalence)) {
6056 " - result: false (same class, type args mismatch)\n");
6057 return false;
6058 }
6059 }
6061 " - result: true (same class, matching type args)\n");
6062 return true;
6063 }
6064
6065 // _Closure <: Function
6066 if (this_class.IsClosureClass() && other_class.IsDartFunctionClass()) {
6068 " - result: true (left is closure, right is Function)\n");
6069 return true;
6070 }
6071
6072 // Check for 'direct super type' specified in the implements clause
6073 // and check for transitivity at the same time.
6074 Array& interfaces = Array::Handle(zone, this_class.interfaces());
6075 Type& interface = Type::Handle(zone);
6076 Class& interface_class = Class::Handle(zone);
6077 TypeArguments& interface_args = TypeArguments::Handle(zone);
6078 for (intptr_t i = 0; i < interfaces.Length(); i++) {
6079 interface ^= interfaces.At(i);
6080 ASSERT(interface.IsFinalized());
6081 interface_class = interface.type_class();
6082 interface_args = interface.arguments();
6083 if (!interface_args.IsNull() && !interface_args.IsInstantiated()) {
6084 // This type class implements an interface that is parameterized with
6085 // generic type(s), e.g. it implements List<T>.
6086 // The uninstantiated type T must be instantiated using the type
6087 // parameters of this type before performing the type test.
6088 // The type arguments of this type that are referred to by the type
6089 // parameters of the interface are at the end of the type vector,
6090 // after the type arguments of the super type of this type.
6091 // The index of the type parameters is adjusted upon finalization.
6092 interface_args = interface_args.InstantiateFrom(
6093 type_arguments, Object::null_type_arguments(), kNoneFree, space);
6094 }
6095 interface_args = interface_class.GetInstanceTypeArguments(
6096 thread, interface_args, /*canonicalize=*/false);
6097 // In Dart 2, implementing Function has no meaning.
6098 // TODO(regis): Can we encounter and skip Object as well?
6099 if (interface_class.IsDartFunctionClass()) {
6100 continue;
6101 }
6102 if (Class::IsSubtypeOf(interface_class, interface_args,
6103 Nullability::kNonNullable, other, space,
6104 function_type_equivalence)) {
6105 TRACE_TYPE_CHECKS_VERBOSE(" - result: true (interface found)\n");
6106 return true;
6107 }
6108 }
6109 // "Recurse" up the class hierarchy until we have reached the top.
6110 this_class = this_class.SuperClass();
6111 if (this_class.IsNull()) {
6112 TRACE_TYPE_CHECKS_VERBOSE(" - result: false (supertype not found)\n");
6113 return false;
6114 }
6115 this_cid = this_class.id();
6116 }
6117 UNREACHABLE();
6118 return false;
6119}
6120
6121bool Class::IsTopLevel() const {
6122 return Name() == Symbols::TopLevel().ptr();
6123}
6124
6125bool Class::IsPrivate() const {
6127}
6128
6130 return LookupFunctionReadLocked(name, kInstance);
6131}
6132
6134 return LookupFunctionAllowPrivate(name, kInstance);
6135}
6136
6137FunctionPtr Class::LookupStaticFunction(const String& name) const {
6138 Thread* thread = Thread::Current();
6139 SafepointReadRwLocker ml(thread, thread->isolate_group()->program_lock());
6140 return LookupFunctionReadLocked(name, kStatic);
6141}
6142
6144 return LookupFunctionAllowPrivate(name, kStatic);
6145}
6146
6147FunctionPtr Class::LookupConstructor(const String& name) const {
6148 Thread* thread = Thread::Current();
6149 SafepointReadRwLocker ml(thread, thread->isolate_group()->program_lock());
6150 return LookupFunctionReadLocked(name, kConstructor);
6151}
6152
6154 return LookupFunctionAllowPrivate(name, kConstructor);
6155}
6156
6157FunctionPtr Class::LookupFactory(const String& name) const {
6158 Thread* thread = Thread::Current();
6159 SafepointReadRwLocker ml(thread, thread->isolate_group()->program_lock());
6160 return LookupFunctionReadLocked(name, kFactory);
6161}
6162
6164 return LookupFunctionAllowPrivate(name, kFactory);
6165}
6166
6169}
6170
6173}
6174
6175// Returns true if 'prefix' and 'accessor_name' match 'name'.
6177 const char* prefix,
6178 intptr_t prefix_length,
6179 const String& accessor_name) {
6180 intptr_t name_len = name.Length();
6181 intptr_t accessor_name_len = accessor_name.Length();
6182
6183 if (name_len != (accessor_name_len + prefix_length)) {
6184 return false;
6185 }
6186 for (intptr_t i = 0; i < prefix_length; i++) {
6187 if (name.CharAt(i) != prefix[i]) {
6188 return false;
6189 }
6190 }
6191 for (intptr_t i = 0, j = prefix_length; i < accessor_name_len; i++, j++) {
6192 if (name.CharAt(j) != accessor_name.CharAt(i)) {
6193 return false;
6194 }
6195 }
6196 return true;
6197}
6198
6199FunctionPtr Class::CheckFunctionType(const Function& func, MemberKind kind) {
6200 if ((kind == kInstance) || (kind == kInstanceAllowAbstract)) {
6201 if (func.IsDynamicFunction(kind == kInstanceAllowAbstract)) {
6202 return func.ptr();
6203 }
6204 } else if (kind == kStatic) {
6205 if (func.IsStaticFunction()) {
6206 return func.ptr();
6207 }
6208 } else if (kind == kConstructor) {
6209 if (func.IsGenerativeConstructor()) {
6210 ASSERT(!func.is_static());
6211 return func.ptr();
6212 }
6213 } else if (kind == kFactory) {
6214 if (func.IsFactory()) {
6215 ASSERT(func.is_static());
6216 return func.ptr();
6217 }
6218 } else if (kind == kAny) {
6219 return func.ptr();
6220 }
6221 return Function::null();
6222}
6223
6224FunctionPtr Class::LookupFunctionReadLocked(const String& name,
6225 MemberKind kind) const {
6226 ASSERT(!IsNull());
6227 Thread* thread = Thread::Current();
6229 // Caller needs to ensure they grab program_lock because this method
6230 // can be invoked with either ReadRwLock or WriteRwLock.
6231#if defined(DEBUG)
6232 ASSERT(thread->isolate_group()->program_lock()->IsCurrentThreadReader());
6233#endif
6237 Array& funcs = thread->ArrayHandle();
6238 funcs = functions();
6239 const intptr_t len = funcs.Length();
6240 Function& function = thread->FunctionHandle();
6241 if (len >= kFunctionLookupHashThreshold) {
6242 // TODO(dartbug.com/36097): We require currently a read lock in the resolver
6243 // to avoid read-write race access to this hash table.
6244 // If we want to increase resolver speed by avoiding the need for read lock,
6245 // we could make change this hash table to be lock-free for the reader.
6246 const Array& hash_table =
6247 Array::Handle(thread->zone(), untag()->functions_hash_table());
6248 if (!hash_table.IsNull()) {
6249 ClassFunctionsSet set(hash_table.ptr());
6251 function ^= set.GetOrNull(FunctionName(name, &(thread->StringHandle())));
6252 // No mutations.
6253 ASSERT(set.Release().ptr() == hash_table.ptr());
6254 return function.IsNull() ? Function::null()
6255 : CheckFunctionType(function, kind);
6256 }
6257 }
6258 if (name.IsSymbol()) {
6259 // Quick Symbol compare.
6260 NoSafepointScope no_safepoint;
6261 for (intptr_t i = 0; i < len; i++) {
6262 function ^= funcs.At(i);
6263 if (function.name() == name.ptr()) {
6264 return CheckFunctionType(function, kind);
6265 }
6266 }
6267 } else {
6269 String& function_name = thread->StringHandle();
6270 for (intptr_t i = 0; i < len; i++) {
6271 function ^= funcs.At(i);
6272 function_name = function.name();
6273 if (function_name.Equals(name)) {
6274 return CheckFunctionType(function, kind);
6275 }
6276 }
6277 }
6278 // No function found.
6279 return Function::null();
6280}
6281
6282FunctionPtr Class::LookupFunctionAllowPrivate(const String& name,
6283 MemberKind kind) const {
6284 ASSERT(!IsNull());
6285 Thread* thread = Thread::Current();
6287 SafepointReadRwLocker ml(thread, thread->isolate_group()->program_lock());
6291 Array& funcs = thread->ArrayHandle();
6292 funcs = current_functions();
6293 ASSERT(!funcs.IsNull());
6294 const intptr_t len = funcs.Length();
6295 Function& function = thread->FunctionHandle();
6296 String& function_name = thread->StringHandle();
6297 for (intptr_t i = 0; i < len; i++) {
6298 function ^= funcs.At(i);
6299 function_name = function.name();
6301 return CheckFunctionType(function, kind);
6302 }
6303 }
6304 // No function found.
6305 return Function::null();
6306}
6307
6308FunctionPtr Class::LookupGetterFunction(const String& name) const {
6309 return LookupAccessorFunction(kGetterPrefix, kGetterPrefixLength, name);
6310}
6311
6312FunctionPtr Class::LookupSetterFunction(const String& name) const {
6313 return LookupAccessorFunction(kSetterPrefix, kSetterPrefixLength, name);
6314}
6315
6316FunctionPtr Class::LookupAccessorFunction(const char* prefix,
6317 intptr_t prefix_length,
6318 const String& name) const {
6319 ASSERT(!IsNull());
6320 Thread* thread = Thread::Current();
6321 if (EnsureIsFinalized(thread) != Error::null()) {
6322 return Function::null();
6323 }
6327 Array& funcs = thread->ArrayHandle();
6328 funcs = current_functions();
6329 intptr_t len = funcs.Length();
6330 Function& function = thread->FunctionHandle();
6331 String& function_name = thread->StringHandle();
6332 for (intptr_t i = 0; i < len; i++) {
6333 function ^= funcs.At(i);
6334 function_name = function.name();
6335 if (MatchesAccessorName(function_name, prefix, prefix_length, name)) {
6336 return function.ptr();
6337 }
6338 }
6339
6340 // No function found.
6341 return Function::null();
6342}
6343
6345 return LookupField(name, kInstance);
6346}
6347
6348FieldPtr Class::LookupStaticField(const String& name) const {
6349 return LookupField(name, kStatic);
6350}
6351
6352FieldPtr Class::LookupField(const String& name) const {
6353 return LookupField(name, kAny);
6354}
6355
6356FieldPtr Class::LookupField(const String& name, MemberKind kind) const {
6357 ASSERT(!IsNull());
6358 Thread* thread = Thread::Current();
6359 if (EnsureIsFinalized(thread) != Error::null()) {
6360 return Field::null();
6361 }
6365 Array& flds = thread->ArrayHandle();
6366 flds = fields();
6367 ASSERT(!flds.IsNull());
6368 intptr_t len = flds.Length();
6369 Field& field = thread->FieldHandle();
6370 if (name.IsSymbol()) {
6371 // Use fast raw pointer string compare for symbols.
6372 for (intptr_t i = 0; i < len; i++) {
6373 field ^= flds.At(i);
6374 if (name.ptr() == field.name()) {
6375 if (kind == kInstance) {
6376 return field.is_static() ? Field::null() : field.ptr();
6377 } else if (kind == kStatic) {
6378 return field.is_static() ? field.ptr() : Field::null();
6379 }
6380 ASSERT(kind == kAny);
6381 return field.ptr();
6382 }
6383 }
6384 } else {
6385 String& field_name = thread->StringHandle();
6386 for (intptr_t i = 0; i < len; i++) {
6387 field ^= flds.At(i);
6388 field_name = field.name();
6389 if (name.Equals(field_name)) {
6390 if (kind == kInstance) {
6391 return field.is_static() ? Field::null() : field.ptr();
6392 } else if (kind == kStatic) {
6393 return field.is_static() ? field.ptr() : Field::null();
6394 }
6395 ASSERT(kind == kAny);
6396 return field.ptr();
6397 }
6398 }
6399 }
6400 return Field::null();
6401}
6402
6404 bool instance_only) const {
6405 ASSERT(!IsNull());
6406 // Use slow string compare, ignoring privacy name mangling.
6407 Thread* thread = Thread::Current();
6408 if (EnsureIsFinalized(thread) != Error::null()) {
6409 return Field::null();
6410 }
6414 Array& flds = thread->ArrayHandle();
6415 flds = fields();
6416 ASSERT(!flds.IsNull());
6417 intptr_t len = flds.Length();
6418 Field& field = thread->FieldHandle();
6419 String& field_name = thread->StringHandle();
6420 for (intptr_t i = 0; i < len; i++) {
6421 field ^= flds.At(i);
6422 field_name = field.name();
6423 if (field.is_static() && instance_only) {
6424 // If we only care about instance fields, skip statics.
6425 continue;
6426 }
6427 if (String::EqualsIgnoringPrivateKey(field_name, name)) {
6428 return field.ptr();
6429 }
6430 }
6431 return Field::null();
6432}
6433
6436 if (!field.IsNull() && !field.is_static()) {
6437 return field.ptr();
6438 }
6439 return Field::null();
6440}
6441
6444 if (!field.IsNull() && field.is_static()) {
6445 return field.ptr();
6446 }
6447 return Field::null();
6448}
6449
6450const char* Class::ToCString() const {
6451 NoSafepointScope no_safepoint;
6452 const Library& lib = Library::Handle(library());
6453 const char* library_name = lib.IsNull() ? "" : lib.ToCString();
6454 const char* class_name = String::Handle(Name()).ToCString();
6455 return OS::SCreate(Thread::Current()->zone(), "%s Class: %s", library_name,
6456 class_name);
6457}
6458
6459// Thomas Wang, Integer Hash Functions.
6460// https://gist.github.com/badboy/6267743
6461// "64 bit to 32 bit Hash Functions"
6462static uword Hash64To32(uint64_t v) {
6463 v = ~v + (v << 18);
6464 v = v ^ (v >> 31);
6465 v = v * 21;
6466 v = v ^ (v >> 11);
6467 v = v + (v << 6);
6468 v = v ^ (v >> 22);
6469 return static_cast<uint32_t>(v);
6470}
6471
6473 const Instance& value) const {
6474 ASSERT(this->ptr() == value.clazz());
6476 Instance& canonical_value = Instance::Handle(zone);
6477 if (this->constants() != Array::null()) {
6479 canonical_value ^= constants.GetOrNull(CanonicalInstanceKey(value));
6480 this->set_constants(constants.Release());
6481 }
6482 return canonical_value.ptr();
6483}
6484
6486 const Instance& constant) const {
6487 ASSERT(constant.IsCanonical());
6488 ASSERT(this->ptr() == constant.clazz());
6489 Instance& canonical_value = Instance::Handle(zone);
6490 if (this->constants() == Array::null()) {
6492 HashTables::New<CanonicalInstancesSet>(128, Heap::kOld));
6493 canonical_value ^= constants.InsertNewOrGet(CanonicalInstanceKey(constant));
6494 this->set_constants(constants.Release());
6495 } else {
6497 this->constants());
6498 canonical_value ^= constants.InsertNewOrGet(CanonicalInstanceKey(constant));
6499 this->set_constants(constants.Release());
6500 }
6501 return canonical_value.ptr();
6502}
6503
6504// Scoped mapping FunctionType -> FunctionType.
6505// Used for tracking and updating nested generic function types
6506// and their type parameters.
6508 public:
6510 FunctionTypeMapping** mapping,
6511 const FunctionType& from,
6512 const FunctionType& to)
6513 : zone_(zone), parent_(*mapping), from_(from), to_(to) {
6514 // Add self to the linked list.
6515 *mapping = this;
6516 }
6517
6518 const FunctionType* Find(const Object& from) const {
6519 if (!from.IsFunctionType()) {
6520 return nullptr;
6521 }
6522 for (const FunctionTypeMapping* scope = this; scope != nullptr;
6523 scope = scope->parent_) {
6524 if (scope->from_.ptr() == from.ptr()) {
6525 return &(scope->to_);
6526 }
6527 }
6528 return nullptr;
6529 }
6530
6531 TypeParameterPtr MapTypeParameter(const TypeParameter& type_param) const {
6532 ASSERT(type_param.IsFunctionTypeParameter());
6533 const FunctionType* new_owner = Find(
6535 if (new_owner != nullptr) {
6536 return new_owner->TypeParameterAt(type_param.index() - type_param.base(),
6537 type_param.nullability());
6538 }
6539 return type_param.ptr();
6540 }
6541
6543 const TypeParameter& p2) const {
6544 auto& from = FunctionType::Handle(zone_, p1.parameterized_function_type());
6545 const FunctionType* to = Find(from);
6546 if (to != nullptr) {
6547 return to->ptr() == p2.parameterized_function_type();
6548 }
6549 from = p2.parameterized_function_type();
6550 to = Find(from);
6551 if (to != nullptr) {
6552 return to->ptr() == p1.parameterized_function_type();
6553 }
6554 return false;
6555 }
6556
6557 private:
6558 Zone* zone_;
6559 const FunctionTypeMapping* const parent_;
6560 const FunctionType& from_;
6561 const FunctionType& to_;
6562};
6563
6564intptr_t TypeParameters::Length() const {
6565 if (IsNull() || untag()->names() == Array::null()) return 0;
6566 return Smi::Value(untag()->names()->untag()->length());
6567}
6568
6569void TypeParameters::set_names(const Array& value) const {
6570 ASSERT(!value.IsNull());
6571 untag()->set_names(value.ptr());
6572}
6573
6574StringPtr TypeParameters::NameAt(intptr_t index) const {
6575 const Array& names_array = Array::Handle(names());
6576 return String::RawCast(names_array.At(index));
6577}
6578
6579void TypeParameters::SetNameAt(intptr_t index, const String& value) const {
6580 const Array& names_array = Array::Handle(names());
6581 names_array.SetAt(index, value);
6582}
6583
6584void TypeParameters::set_flags(const Array& value) const {
6585 untag()->set_flags(value.ptr());
6586}
6587
6588void TypeParameters::set_bounds(const TypeArguments& value) const {
6589 // A null value represents a vector of dynamic.
6590 untag()->set_bounds(value.ptr());
6591}
6592
6593AbstractTypePtr TypeParameters::BoundAt(intptr_t index) const {
6594 const TypeArguments& upper_bounds = TypeArguments::Handle(bounds());
6595 return upper_bounds.IsNull() ? Type::DynamicType()
6596 : upper_bounds.TypeAt(index);
6597}
6598
6599void TypeParameters::SetBoundAt(intptr_t index,
6600 const AbstractType& value) const {
6601 const TypeArguments& upper_bounds = TypeArguments::Handle(bounds());
6602 upper_bounds.SetTypeAt(index, value);
6603}
6604
6606 return bounds() == TypeArguments::null();
6607}
6608
6609void TypeParameters::set_defaults(const TypeArguments& value) const {
6610 // The null value represents a vector of dynamic.
6611 untag()->set_defaults(value.ptr());
6612}
6613
6614AbstractTypePtr TypeParameters::DefaultAt(intptr_t index) const {
6615 const TypeArguments& default_type_args = TypeArguments::Handle(defaults());
6616 return default_type_args.IsNull() ? Type::DynamicType()
6617 : default_type_args.TypeAt(index);
6618}
6619
6621 const AbstractType& value) const {
6622 const TypeArguments& default_type_args = TypeArguments::Handle(defaults());
6623 default_type_args.SetTypeAt(index, value);
6624}
6625
6627 return defaults() == TypeArguments::null();
6628}
6629
6630void TypeParameters::AllocateFlags(Heap::Space space) const {
6631 const intptr_t len = (Length() + kFlagsPerSmiMask) >> kFlagsPerSmiShift;
6632 const Array& flags_array = Array::Handle(Array::New(len, space));
6633 // Initialize flags to 0.
6634 const Smi& zero = Smi::Handle(Smi::New(0));
6635 for (intptr_t i = 0; i < len; i++) {
6636 flags_array.SetAt(i, zero);
6637 }
6638 set_flags(flags_array);
6639}
6640
6641void TypeParameters::OptimizeFlags() const {
6642 if (untag()->flags() == Array::null()) return; // Already optimized.
6643 const intptr_t len = (Length() + kFlagsPerSmiMask) >> kFlagsPerSmiShift;
6644 const Array& flags_array = Array::Handle(flags());
6645 const Smi& zero = Smi::Handle(Smi::New(0));
6646 for (intptr_t i = 0; i < len; i++) {
6647 if (flags_array.At(i) != zero.ptr()) return;
6648 }
6649 set_flags(Object::null_array());
6650}
6651
6653 if (untag()->flags() == Array::null()) return false;
6654 const intptr_t flag = Smi::Value(
6655 Smi::RawCast(Array::Handle(flags()).At(index >> kFlagsPerSmiShift)));
6656 return (flag >> (index & kFlagsPerSmiMask)) != 0;
6657}
6658
6660 bool value) const {
6661 const Array& flg = Array::Handle(flags());
6662 intptr_t flag = Smi::Value(Smi::RawCast(flg.At(index >> kFlagsPerSmiShift)));
6663 if (value) {
6664 flag |= 1 << (index % kFlagsPerSmiMask);
6665 } else {
6666 flag &= ~(1 << (index % kFlagsPerSmiMask));
6667 }
6669}
6670
6672 Zone* zone,
6673 bool are_class_type_parameters,
6674 intptr_t base,
6675 NameVisibility name_visibility,
6676 BaseTextBuffer* printer) const {
6677 String& name = String::Handle(zone);
6679 const intptr_t num_type_params = Length();
6680 for (intptr_t i = 0; i < num_type_params; i++) {
6681 if (are_class_type_parameters) {
6682 name = NameAt(i);
6683 printer->AddString(name.ToCString());
6684 } else {
6686 are_class_type_parameters, base, base + i));
6687 }
6688 if (FLAG_show_internal_names || !AllDynamicBounds()) {
6689 type = BoundAt(i);
6690 // Do not print default bound.
6691 if (!type.IsNull() && (FLAG_show_internal_names || !type.IsObjectType() ||
6692 type.IsNonNullable())) {
6693 printer->AddString(" extends ");
6694 type.PrintName(name_visibility, printer);
6695 if (FLAG_show_internal_names && !AllDynamicDefaults()) {
6696 type = DefaultAt(i);
6697 if (!type.IsNull() &&
6698 (FLAG_show_internal_names || !type.IsDynamicType())) {
6699 printer->AddString(" defaults to ");
6700 type.PrintName(name_visibility, printer);
6701 }
6702 }
6703 }
6704 }
6705 if (i != num_type_params - 1) {
6706 printer->AddString(", ");
6707 }
6708 }
6709}
6710
6711const char* TypeParameters::ToCString() const {
6712 if (IsNull()) {
6713 return "TypeParameters: null";
6714 }
6715 auto thread = Thread::Current();
6716 auto zone = thread->zone();
6717 ZoneTextBuffer buffer(zone);
6718 buffer.AddString("TypeParameters: ");
6719 Print(thread, zone, true, 0, kInternalName, &buffer);
6720 return buffer.buffer();
6721}
6722
6723TypeParametersPtr TypeParameters::New(Heap::Space space) {
6725 return Object::Allocate<TypeParameters>(space);
6726}
6727
6728TypeParametersPtr TypeParameters::New(intptr_t count, Heap::Space space) {
6729 const TypeParameters& result =
6731 // Create an [ Array ] of [ String ] objects to represent the names.
6732 // Create a [ TypeArguments ] vector representing the bounds.
6733 // Create a [ TypeArguments ] vector representing the defaults.
6734 // Create an [ Array ] of [ Smi] objects to represent the flags.
6735 const Array& names_array = Array::Handle(Array::New(count, space));
6736 result.set_names(names_array);
6737 TypeArguments& type_args = TypeArguments::Handle();
6738 type_args = TypeArguments::New(count, Heap::kNew); // Will get canonicalized.
6739 result.set_bounds(type_args);
6740 type_args = TypeArguments::New(count, Heap::kNew); // Will get canonicalized.
6741 result.set_defaults(type_args);
6742 result.AllocateFlags(space); // Will get optimized.
6743 return result.ptr();
6744}
6745
6746intptr_t TypeArguments::ComputeNullability() const {
6747 if (IsNull()) return 0;
6748 const intptr_t num_types = Length();
6749 intptr_t result = 0;
6750 if (num_types <= kNullabilityMaxTypes) {
6752 for (intptr_t i = 0; i < num_types; i++) {
6753 type = TypeAt(i);
6754 intptr_t type_bits = 0;
6755 if (!type.IsNull()) {
6756 switch (type.nullability()) {
6758 type_bits = kNullableBit;
6759 break;
6761 type_bits = kNonNullableBit;
6762 break;
6763 default:
6764 UNREACHABLE();
6765 }
6766 }
6767 result |= (type_bits << (i * kNullabilityBitsPerType));
6768 }
6769 }
6770 set_nullability(result);
6771 return result;
6772}
6773
6774void TypeArguments::set_nullability(intptr_t value) const {
6775 untag()->set_nullability(Smi::New(value));
6776}
6777
6778uword TypeArguments::HashForRange(intptr_t from_index, intptr_t len) const {
6779 if (IsNull()) return kAllDynamicHash;
6780 if (IsRaw(from_index, len)) return kAllDynamicHash;
6781 uint32_t result = 0;
6783 for (intptr_t i = 0; i < len; i++) {
6784 type = TypeAt(from_index + i);
6785 ASSERT(!type.IsNull());
6786 result = CombineHashes(result, type.Hash());
6787 }
6789 return result;
6790}
6791
6792uword TypeArguments::ComputeHash() const {
6793 if (IsNull()) return kAllDynamicHash;
6794 const uword result = HashForRange(0, Length());
6795 ASSERT(result != 0);
6796 SetHash(result);
6797 return result;
6798}
6799
6800TypeArgumentsPtr TypeArguments::Prepend(Zone* zone,
6801 const TypeArguments& other,
6802 intptr_t other_length,
6803 intptr_t total_length) const {
6804 if (other_length == 0) {
6806 return ptr();
6807 } else if (other_length == total_length) {
6808 ASSERT(other.IsCanonical());
6809 return other.ptr();
6810 } else if (IsNull() && other.IsNull()) {
6811 return TypeArguments::null();
6812 }
6813 const TypeArguments& result =
6816 for (intptr_t i = 0; i < other_length; i++) {
6817 type = other.IsNull() ? Type::DynamicType() : other.TypeAt(i);
6818 result.SetTypeAt(i, type);
6819 }
6820 for (intptr_t i = other_length; i < total_length; i++) {
6821 type = IsNull() ? Type::DynamicType() : TypeAt(i - other_length);
6822 result.SetTypeAt(i, type);
6823 }
6824 return result.Canonicalize(Thread::Current());
6825}
6826
6828 Zone* zone,
6829 const TypeArguments& other) const {
6830 ASSERT(!IsNull() && !other.IsNull());
6831 const intptr_t this_len = Length();
6832 const intptr_t other_len = other.Length();
6833 const auto& result = TypeArguments::Handle(
6834 zone, TypeArguments::New(this_len + other_len, Heap::kNew));
6835 auto& type = AbstractType::Handle(zone);
6836 for (intptr_t i = 0; i < this_len; ++i) {
6837 type = TypeAt(i);
6838 result.SetTypeAt(i, type);
6839 }
6840 for (intptr_t i = 0; i < other_len; ++i) {
6841 type = other.TypeAt(i);
6842 result.SetTypeAt(this_len + i, type);
6843 }
6844 return result.ptr();
6845}
6846
6848 const Function* function,
6849 const Class* cls) const {
6850 if (IsNull() || IsInstantiated()) {
6852 }
6853 if (function != nullptr) {
6856 }
6857 if (cls == nullptr) {
6858 cls = &Class::Handle(zone, function->Owner());
6859 }
6860 }
6861 if (cls != nullptr) {
6864 }
6865 }
6867}
6868
6869StringPtr TypeArguments::Name() const {
6870 Thread* thread = Thread::Current();
6871 ZoneTextBuffer printer(thread->zone());
6872 PrintSubvectorName(0, Length(), kInternalName, &printer);
6873 return Symbols::New(thread, printer.buffer());
6874}
6875
6877 Thread* thread = Thread::Current();
6878 ZoneTextBuffer printer(thread->zone());
6880 return Symbols::New(thread, printer.buffer());
6881}
6882
6883void TypeArguments::PrintSubvectorName(intptr_t from_index,
6884 intptr_t len,
6885 NameVisibility name_visibility,
6886 BaseTextBuffer* printer) const {
6887 printer->AddString("<");
6889 for (intptr_t i = 0; i < len; i++) {
6890 if (from_index + i < Length()) {
6891 type = TypeAt(from_index + i);
6892 if (type.IsNull()) {
6893 printer->AddString("null"); // Unfinalized vector.
6894 } else {
6895 type.PrintName(name_visibility, printer);
6896 }
6897 } else {
6898 printer->AddString("dynamic");
6899 }
6900 if (i < len - 1) {
6901 printer->AddString(", ");
6902 }
6903 }
6904 printer->AddString(">");
6905}
6906
6908 buffer->AddString("TypeArguments: ");
6909 if (IsNull()) {
6910 return buffer->AddString("null");
6911 }
6912 buffer->Printf("(H%" Px ")", Smi::Value(untag()->hash()));
6913 auto& type_at = AbstractType::Handle();
6914 for (intptr_t i = 0; i < Length(); i++) {
6915 type_at = TypeAt(i);
6916 buffer->Printf(" [%s]", type_at.IsNull() ? "null" : type_at.ToCString());
6917 }
6918}
6919
6921 const TypeArguments& other,
6922 intptr_t from_index,
6923 intptr_t len,
6924 TypeEquality kind,
6925 FunctionTypeMapping* function_type_equivalence) const {
6926 if (this->ptr() == other.ptr()) {
6927 return true;
6928 }
6929 if (kind == TypeEquality::kCanonical) {
6930 if (IsNull() || other.IsNull()) {
6931 return false;
6932 }
6933 if (Length() != other.Length()) {
6934 return false;
6935 }
6936 }
6938 AbstractType& other_type = AbstractType::Handle();
6939 for (intptr_t i = from_index; i < from_index + len; i++) {
6941 ASSERT(!type.IsNull());
6942 other_type = other.IsNull() ? Type::DynamicType() : other.TypeAt(i);
6943 ASSERT(!other_type.IsNull());
6944 if (!type.IsEquivalent(other_type, kind, function_type_equivalence)) {
6945 return false;
6946 }
6947 }
6948 return true;
6949}
6950
6951bool TypeArguments::IsDynamicTypes(bool raw_instantiated,
6952 intptr_t from_index,
6953 intptr_t len) const {
6954 ASSERT(Length() >= (from_index + len));
6956 Class& type_class = Class::Handle();
6957 for (intptr_t i = 0; i < len; i++) {
6958 type = TypeAt(from_index + i);
6959 if (type.IsNull()) {
6960 return false;
6961 }
6962 if (!type.HasTypeClass()) {
6963 if (raw_instantiated && type.IsTypeParameter()) {
6964 // An uninstantiated type parameter is equivalent to dynamic.
6965 continue;
6966 }
6967 return false;
6968 }
6969 type_class = type.type_class();
6970 if (!type_class.IsDynamicClass()) {
6971 return false;
6972 }
6973 }
6974 return true;
6975}
6976
6978 : zone_(ASSERT_NOTNULL(zone)),
6979 cache_container_(&source),
6980 data_(Array::Handle(source.instantiations())),
6981 smi_handle_(Smi::Handle(zone)) {
6983 ->type_arguments_canonicalization_mutex()
6984 ->IsOwnedByCurrentThread());
6985}
6986
6988 : zone_(ASSERT_NOTNULL(zone)),
6989 cache_container_(nullptr),
6990 data_(Array::Handle(array.ptr())),
6991 smi_handle_(Smi::Handle(zone)) {
6993 ->type_arguments_canonicalization_mutex()
6994 ->IsOwnedByCurrentThread());
6995}
6996
6997bool TypeArguments::Cache::IsHash(const Array& array) {
6998 return array.Length() > kMaxLinearCacheSize;
6999}
7000
7001intptr_t TypeArguments::Cache::NumOccupied(const Array& array) {
7003 RawSmiValue(Smi::RawCast(array.AtAcquire(kMetadataIndex))));
7004}
7005
7006#if defined(DEBUG)
7007bool TypeArguments::Cache::IsValidStorageLocked(const Array& array) {
7008 // We only require the mutex be held so we don't need to use acquire/release
7009 // semantics to access and set the number of occupied entries in the header.
7011 ->type_arguments_canonicalization_mutex()
7012 ->IsOwnedByCurrentThread());
7013 // Quick check against the empty linear cache.
7014 if (array.ptr() == EmptyStorage().ptr()) return true;
7015 const intptr_t num_occupied = NumOccupied(array);
7016 // We should be using the same shared value for an empty cache.
7017 if (num_occupied == 0) return false;
7018 const intptr_t storage_len = array.Length();
7019 // All caches have the metadata followed by a series of entries.
7020 if ((storage_len % kEntrySize) != kHeaderSize) return false;
7021 const intptr_t num_entries = NumEntries(array);
7022 // Linear caches contain at least one unoccupied entry, and hash-based caches
7023 // grow prior to hitting 100% occupancy.
7024 if (num_occupied >= num_entries) return false;
7025 // In a linear cache, all entries with indexes smaller than [num_occupied]
7026 // should be occupied and ones greater than or equal should be unoccupied.
7027 const bool is_linear_cache = IsLinear(array);
7028 // The capacity of a hash-based cache must be a power of two (see
7029 // EnsureCapacityLocked as to why).
7030 if (!is_linear_cache) {
7031 if (!Utils::IsPowerOfTwo(num_entries)) return false;
7032 const intptr_t metadata =
7033 RawSmiValue(Smi::RawCast(array.AtAcquire(kMetadataIndex)));
7034 if ((1 << EntryCountLog2Bits::decode(metadata)) != num_entries) {
7035 return false;
7036 }
7037 }
7038 for (intptr_t i = 0; i < num_entries; i++) {
7039 const intptr_t index = kHeaderSize + i * kEntrySize;
7040 if (array.At(index + kSentinelIndex) == Sentinel()) {
7041 if (is_linear_cache && i < num_occupied) return false;
7042 continue;
7043 }
7044 if (is_linear_cache && i >= num_occupied) return false;
7045 // The elements of an occupied entry are all TypeArguments values.
7046 for (intptr_t j = index; j < index + kEntrySize; j++) {
7047 if (!array.At(j)->IsHeapObject()) return false;
7048 if (array.At(j) == Object::null()) continue; // null is a valid TAV.
7049 if (!array.At(j)->IsTypeArguments()) return false;
7050 }
7051 }
7052 return true;
7053}
7054#endif
7055
7056bool TypeArguments::Cache::IsOccupied(intptr_t entry) const {
7058 ASSERT(entry >= 0 && entry < table.Length());
7059 return table.At(entry).Get<kSentinelIndex>() != Sentinel();
7060}
7061
7062TypeArgumentsPtr TypeArguments::Cache::Retrieve(intptr_t entry) const {
7063 ASSERT(IsOccupied(entry));
7065 return table.At(entry).Get<kInstantiatedTypeArgsIndex>();
7066}
7067
7068intptr_t TypeArguments::Cache::NumEntries(const Array& array) {
7070 return table.Length();
7071}
7072
7074 const Array& array,
7075 const TypeArguments& instantiator_tav,
7076 const TypeArguments& function_tav) {
7077 const bool is_hash = IsHash(array);
7079 const intptr_t num_entries = table.Length();
7080 // For a linear cache, start at the first entry and probe linearly. This can
7081 // be done because a linear cache always has at least one unoccupied entry
7082 // after all the occupied ones.
7083 intptr_t probe = 0;
7084 intptr_t probe_distance = 1;
7085 if (is_hash) {
7086 // For a hash-based cache, instead start at an entry determined by the hash
7087 // of the keys.
7088 auto hash = FinalizeHash(
7089 CombineHashes(instantiator_tav.Hash(), function_tav.Hash()));
7090 probe = hash & (num_entries - 1);
7091 }
7092 while (true) {
7093 const auto& tuple = table.At(probe);
7094 if (tuple.Get<kSentinelIndex>() == Sentinel()) break;
7095 if ((tuple.Get<kInstantiatorTypeArgsIndex>() == instantiator_tav.ptr()) &&
7096 (tuple.Get<kFunctionTypeArgsIndex>() == function_tav.ptr())) {
7097 return {probe, true};
7098 }
7099 // Advance probe by the current probing distance.
7100 probe = probe + probe_distance;
7101 if (is_hash) {
7102 // Wrap around if the probe goes off the end of the entries array.
7103 probe = probe & (num_entries - 1);
7104 // We had a collision, so increase the probe distance. See comment in
7105 // EnsureCapacityLocked for an explanation of how this hits all slots.
7106 probe_distance++;
7107 }
7108 }
7109 // We should always get the next slot for a linear cache.
7110 ASSERT(is_hash || probe == NumOccupied(array));
7111 return {probe, false};
7112}
7113
7115 intptr_t entry,
7116 const TypeArguments& instantiator_tav,
7117 const TypeArguments& function_tav,
7118 const TypeArguments& instantiated_tav) const {
7119 // We don't do mutating operations in tests without a TypeArguments object.
7120 ASSERT(cache_container_ != nullptr);
7121#if defined(DEBUG)
7122 auto loc = FindKeyOrUnused(instantiator_tav, function_tav);
7123 ASSERT_EQUAL(loc.entry, entry);
7124 ASSERT(!loc.present);
7125#endif
7126 // Double-check we got the expected entry index when adding to a linear array.
7127 ASSERT(!IsLinear() || entry == NumOccupied());
7128 const intptr_t new_occupied = NumOccupied() + 1;
7129 const bool storage_changed = EnsureCapacity(new_occupied);
7130 // Note that this call to IsLinear() may return a different result than the
7131 // earlier, since EnsureCapacity() may have swapped to hash-based storage.
7132 if (storage_changed && !IsLinear()) {
7133 // The capacity of the array has changed, and the capacity is used when
7134 // probing further into the array due to collisions. Thus, we need to redo
7135 // the entry index calculation.
7136 auto loc = FindKeyOrUnused(instantiator_tav, function_tav);
7137 ASSERT(!loc.present);
7138 entry = loc.entry;
7139 }
7140
7141 // Go ahead and increment the number of occupied entries prior to adding the
7142 // entry. Use a store-release barrier in case of concurrent readers.
7143 const intptr_t metadata = RawSmiValue(Smi::RawCast(data_.At(kMetadataIndex)));
7144 smi_handle_ = Smi::New(NumOccupiedBits::update(new_occupied, metadata));
7145 data_.SetAtRelease(kMetadataIndex, smi_handle_);
7146
7148 const auto& tuple = table.At(entry);
7149 // The parts of the tuple that aren't used for sentinel checking are only
7150 // retrieved if the entry is occupied. Entries in the cache are never deleted,
7151 // so once the entry is marked as occupied, the contents of that entry never
7152 // change. Thus, we don't need store-release barriers here.
7153 tuple.Set<kFunctionTypeArgsIndex>(function_tav);
7154 tuple.Set<kInstantiatedTypeArgsIndex>(instantiated_tav);
7155 // For the sentinel position, though, we do.
7156 static_assert(
7157 kSentinelIndex == kInstantiatorTypeArgsIndex,
7158 "the sentinel position is not protected with a store-release barrier");
7159 tuple.Set<kInstantiatorTypeArgsIndex, std::memory_order_release>(
7160 instantiator_tav);
7161
7162 if (storage_changed) {
7163 // Only check for validity on growth, just to keep the overhead on DEBUG
7164 // builds down.
7165 DEBUG_ASSERT(IsValidStorageLocked(data_));
7166 // Update the container of the original cache to point to the new one.
7167 cache_container_->set_instantiations(data_);
7168 }
7169
7170 return {entry, true};
7171}
7172
7174 return Smi::New(kSentinelValue);
7175}
7176
7177bool TypeArguments::Cache::EnsureCapacity(intptr_t new_occupied) const {
7178 ASSERT(new_occupied > NumOccupied());
7179 // How many entries are in the current array (including unoccupied entries).
7180 const intptr_t current_capacity = NumEntries();
7181
7182 // Early returns for cases where no growth is needed.
7183 const bool is_linear = IsLinear();
7184 if (is_linear) {
7185 // We need at least one unoccupied entry in addition to the occupied ones.
7186 if (current_capacity > new_occupied) return false;
7187 } else {
7188 if (LoadFactor(new_occupied, current_capacity) < kMaxLoadFactor) {
7189 return false;
7190 }
7191 }
7192
7193 if (new_occupied <= kMaxLinearCacheEntries) {
7195 // Not enough room for both the new entry and at least one unoccupied
7196 // entry, so grow the tuple capacity of the linear cache by about 50%,
7197 // ensuring that space for at least one new tuple is added, capping the
7198 // total number of occupied entries to the max allowed.
7199 const intptr_t new_capacity =
7200 Utils::Minimum(current_capacity + (current_capacity >> 1),
7201 kMaxLinearCacheEntries) +
7202 1;
7203 const intptr_t cache_size = kHeaderSize + new_capacity * kEntrySize;
7204 ASSERT(cache_size <= kMaxLinearCacheSize);
7205 data_ = Array::Grow(data_, cache_size, Heap::kOld);
7206 ASSERT(!data_.IsNull());
7207 // No need to adjust the number of occupied entries or old entries, as they
7208 // are copied over by Array::Grow. Just mark any new entries as unoccupied.
7209 smi_handle_ = Sentinel();
7211 for (intptr_t i = current_capacity; i < new_capacity; i++) {
7212 const auto& tuple = table.At(i);
7213 tuple.Set<kSentinelIndex>(smi_handle_);
7214 }
7215 return true;
7216 }
7217
7218 // Either we're converting a linear cache into a hash-based cache, or the
7219 // load factor of the hash-based cache has increased to the point where we
7220 // need to grow it.
7221 const intptr_t new_capacity =
7222 is_linear ? kNumInitialHashCacheEntries : 2 * current_capacity;
7223 // Because we use quadratic (actually triangle number) probing it is
7224 // important that the size is a power of two (otherwise we could fail to
7225 // find an empty slot). This is described in Knuth's The Art of Computer
7226 // Programming Volume 2, Chapter 6.4, exercise 20 (solution in the
7227 // appendix, 2nd edition).
7228 ASSERT(Utils::IsPowerOfTwo(new_capacity));
7229 ASSERT(LoadFactor(new_occupied, new_capacity) < kMaxLoadFactor);
7230 const intptr_t new_size = kHeaderSize + new_capacity * kEntrySize;
7231 const auto& new_data =
7233 ASSERT(!new_data.IsNull());
7234 // First set up the metadata in new_data.
7235 const intptr_t metadata = RawSmiValue(Smi::RawCast(data_.At(kMetadataIndex)));
7236 smi_handle_ = Smi::New(EntryCountLog2Bits::update(
7237 Utils::ShiftForPowerOfTwo(new_capacity), metadata));
7238 new_data.SetAt(kMetadataIndex, smi_handle_);
7239 // Then mark all the entries in new_data as unoccupied.
7240 smi_handle_ = Sentinel();
7241 InstantiationsCacheTable to_table(new_data);
7242 for (const auto& tuple : to_table) {
7243 tuple.Set<kSentinelIndex>(smi_handle_);
7244 }
7245 // Finally, copy over the entries.
7246 auto& instantiator_tav = TypeArguments::Handle(zone_);
7247 auto& function_tav = TypeArguments::Handle(zone_);
7248 auto& result_tav = TypeArguments::Handle(zone_);
7249 const InstantiationsCacheTable from_table(data_);
7250 for (const auto& from_tuple : from_table) {
7251 // Skip unoccupied entries.
7252 if (from_tuple.Get<kSentinelIndex>() == Sentinel()) continue;
7253 instantiator_tav ^= from_tuple.Get<kInstantiatorTypeArgsIndex>();
7254 function_tav = from_tuple.Get<kFunctionTypeArgsIndex>();
7255 result_tav = from_tuple.Get<kInstantiatedTypeArgsIndex>();
7256 // Since new_data has a different total capacity, we can't use the old
7257 // entry indexes, but must recalculate them.
7258 auto loc = FindKeyOrUnused(new_data, instantiator_tav, function_tav);
7259 ASSERT(!loc.present);
7260 const auto& to_tuple = to_table.At(loc.entry);
7261 to_tuple.Set<kInstantiatorTypeArgsIndex>(instantiator_tav);
7262 to_tuple.Set<kFunctionTypeArgsIndex>(function_tav);
7263 to_tuple.Set<kInstantiatedTypeArgsIndex>(result_tav);
7264 }
7265 data_ = new_data.ptr();
7266 return true;
7267}
7268
7270 return instantiations() != Cache::EmptyStorage().ptr();
7271}
7272
7273ArrayPtr TypeArguments::instantiations() const {
7274 // We rely on the fact that any loads from the array are dependent loads and
7275 // avoid the load-acquire barrier here.
7276 return untag()->instantiations();
7277}
7278
7279void TypeArguments::set_instantiations(const Array& value) const {
7280 // We have to ensure that initializing stores to the array are available
7281 // when releasing the pointer to the array pointer.
7282 // => We have to use store-release here.
7283 ASSERT(!value.IsNull());
7284 untag()->set_instantiations<std::memory_order_release>(value.ptr());
7285}
7286
7287bool TypeArguments::HasCount(intptr_t count) const {
7288 if (IsNull()) {
7289 return true;
7290 }
7291 return Length() == count;
7292}
7293
7294intptr_t TypeArguments::Length() const {
7295 if (IsNull()) {
7296 return 0;
7297 }
7298 return Smi::Value(untag()->length());
7299}
7300
7302 if (IsNull()) {
7303 return 0;
7304 }
7305 return Smi::Value(untag()->nullability());
7306}
7307
7308AbstractTypePtr TypeArguments::TypeAt(intptr_t index) const {
7309 ASSERT(!IsNull());
7310 ASSERT((index >= 0) && (index < Length()));
7311 return untag()->element(index);
7312}
7313
7314AbstractTypePtr TypeArguments::TypeAtNullSafe(intptr_t index) const {
7315 if (IsNull()) {
7316 // null vector represents infinite list of dynamics
7317 return Type::dynamic_type().ptr();
7318 }
7319 ASSERT((index >= 0) && (index < Length()));
7320 return TypeAt(index);
7321}
7322
7323void TypeArguments::SetTypeAt(intptr_t index, const AbstractType& value) const {
7324 ASSERT(!IsCanonical());
7325 ASSERT((index >= 0) && (index < Length()));
7326 return untag()->set_element(index, value.ptr());
7327}
7328
7330 intptr_t from_index,
7331 intptr_t len,
7332 Genericity genericity,
7333 intptr_t num_free_fun_type_params) const {
7334 ASSERT(!IsNull());
7336 for (intptr_t i = 0; i < len; i++) {
7337 type = TypeAt(from_index + i);
7338 // If this type argument T is null, the type A containing T in its flattened
7339 // type argument vector V is recursive and is still being finalized.
7340 // T is the type argument of a super type of A. T is being instantiated
7341 // during finalization of V, which is also the instantiator. T depends
7342 // solely on the type parameters of A and will be replaced by a non-null
7343 // type before A is marked as finalized.
7344 if (!type.IsNull() &&
7345 !type.IsInstantiated(genericity, num_free_fun_type_params)) {
7346 return false;
7347 }
7348 }
7349 return true;
7350}
7351
7354 const intptr_t num_types = Length();
7355 for (intptr_t i = 0; i < num_types; i++) {
7356 type = TypeAt(i);
7357 if (type.IsNull()) {
7358 return false; // Still unfinalized, too early to tell.
7359 }
7360 if (!type.IsTypeParameter()) {
7361 return false;
7362 }
7363 const TypeParameter& type_param = TypeParameter::Cast(type);
7364 ASSERT(type_param.IsFinalized());
7365 if ((type_param.index() != i) || type_param.IsFunctionTypeParameter()) {
7366 return false;
7367 }
7368 // Instantiating nullable type parameters may change
7369 // nullability of a type, so type arguments vector containing such type
7370 // parameters cannot be substituted with instantiator type arguments.
7371 if (type_param.IsNullable()) {
7372 return false;
7373 }
7374 }
7375 return true;
7376 // Note that it is not necessary to verify at runtime that the instantiator
7377 // type vector is long enough, since this uninstantiated vector contains as
7378 // many different type parameters as it is long.
7379}
7380
7381// Return true if this uninstantiated type argument vector, once instantiated
7382// at runtime, is a prefix of the type argument vector of its instantiator.
7383// A runtime check may be required, as indicated by with_runtime_check.
7385 const Class& instantiator_class,
7386 bool* with_runtime_check) const {
7388 if (with_runtime_check != nullptr) {
7389 *with_runtime_check = false;
7390 }
7391 const intptr_t num_type_args = Length();
7392 const intptr_t num_instantiator_type_args =
7393 instantiator_class.NumTypeArguments();
7394 if (num_type_args > num_instantiator_type_args) {
7395 // This vector cannot be a prefix of a shorter vector.
7396 return false;
7397 }
7398 const intptr_t num_instantiator_type_params =
7399 instantiator_class.NumTypeParameters();
7400 const intptr_t first_type_param_offset =
7401 num_instantiator_type_args - num_instantiator_type_params;
7402 // At compile time, the type argument vector of the instantiator consists of
7403 // the type argument vector of its super type, which may refer to the type
7404 // parameters of the instantiator class, followed by (or overlapping partially
7405 // or fully with) the type parameters of the instantiator class in declaration
7406 // order.
7407 // In other words, the only variables are the type parameters of the
7408 // instantiator class.
7409 // This uninstantiated type argument vector is also expressed in terms of the
7410 // type parameters of the instantiator class. Therefore, in order to be a
7411 // prefix once instantiated at runtime, every one of its type argument must be
7412 // equal to the type argument of the instantiator vector at the same index.
7413
7414 // As a first requirement, the last num_instantiator_type_params type
7415 // arguments of this type argument vector must refer to the corresponding type
7416 // parameters of the instantiator class.
7417 AbstractType& type_arg = AbstractType::Handle();
7418 for (intptr_t i = first_type_param_offset; i < num_type_args; i++) {
7419 type_arg = TypeAt(i);
7420 if (!type_arg.IsTypeParameter()) {
7421 return false;
7422 }
7423 const TypeParameter& type_param = TypeParameter::Cast(type_arg);
7424 ASSERT(type_param.IsFinalized());
7425 if ((type_param.index() != i) || type_param.IsFunctionTypeParameter()) {
7426 return false;
7427 }
7428 // Instantiating nullable type parameters may change nullability
7429 // of a type, so type arguments vector containing such type parameters
7430 // cannot be substituted with instantiator type arguments, unless we check
7431 // at runtime the nullability of the first 1 or 2 type arguments of the
7432 // instantiator.
7433 // Note that the presence of non-overlapping super type arguments (i.e.
7434 // first_type_param_offset > 0) will prevent this optimization.
7435 if (type_param.IsNullable()) {
7436 if (with_runtime_check == nullptr || i >= kNullabilityMaxTypes) {
7437 return false;
7438 }
7439 *with_runtime_check = true;
7440 }
7441 }
7442 // As a second requirement, the type arguments corresponding to the super type
7443 // must be identical. Overlapping ones have already been checked starting at
7444 // first_type_param_offset.
7445 if (first_type_param_offset == 0) {
7446 return true;
7447 }
7448 Type& super_type = Type::Handle(instantiator_class.super_type());
7449 const TypeArguments& super_type_args =
7451 Thread::Current(), /*canonicalize=*/false));
7452 if (super_type_args.IsNull()) {
7454 return false;
7455 }
7456 AbstractType& super_type_arg = AbstractType::Handle();
7457 for (intptr_t i = 0; (i < first_type_param_offset) && (i < num_type_args);
7458 i++) {
7459 type_arg = TypeAt(i);
7460 super_type_arg = super_type_args.TypeAt(i);
7461 if (!type_arg.Equals(super_type_arg)) {
7463 return false;
7464 }
7465 }
7466 return true;
7467}
7468
7469// Return true if this uninstantiated type argument vector, once instantiated
7470// at runtime, is a prefix of the enclosing function type arguments.
7471// A runtime check may be required, as indicated by with_runtime_check.
7473 const Function& function,
7474 bool* with_runtime_check) const {
7476 if (with_runtime_check != nullptr) {
7477 *with_runtime_check = false;
7478 }
7479 const intptr_t num_type_args = Length();
7480 const intptr_t num_parent_type_args = function.NumParentTypeArguments();
7481 const intptr_t num_function_type_params = function.NumTypeParameters();
7482 const intptr_t num_function_type_args =
7483 num_parent_type_args + num_function_type_params;
7484 if (num_type_args > num_function_type_args) {
7485 // This vector cannot be a prefix of a shorter vector.
7486 return false;
7487 }
7488 AbstractType& type_arg = AbstractType::Handle();
7489 for (intptr_t i = 0; i < num_type_args; i++) {
7490 type_arg = TypeAt(i);
7491 if (!type_arg.IsTypeParameter()) {
7492 return false;
7493 }
7494 const TypeParameter& type_param = TypeParameter::Cast(type_arg);
7495 ASSERT(type_param.IsFinalized());
7496 if ((type_param.index() != i) || !type_param.IsFunctionTypeParameter()) {
7497 return false;
7498 }
7499 // Instantiating nullable type parameters may change nullability
7500 // of a type, so type arguments vector containing such type parameters
7501 // cannot be substituted with the enclosing function type arguments, unless
7502 // we check at runtime the nullability of the first 1 or 2 type arguments of
7503 // the enclosing function type arguments.
7504 if (type_param.IsNullable()) {
7505 if (with_runtime_check == nullptr || i >= kNullabilityMaxTypes) {
7506 return false;
7507 }
7508 *with_runtime_check = true;
7509 }
7510 }
7511 return true;
7512}
7513
7514TypeArgumentsPtr TypeArguments::TruncatedTo(intptr_t length) const {
7515 Thread* thread = Thread::Current();
7516 Zone* zone = thread->zone();
7517 const TypeArguments& result =
7520 for (intptr_t i = 0; i < length; i++) {
7521 type = TypeAt(i);
7522 result.SetTypeAt(i, type);
7523 }
7524 return result.Canonicalize(thread);
7525}
7526
7528 ASSERT(!IsNull());
7530 const intptr_t num_types = Length();
7531 for (intptr_t i = 0; i < num_types; i++) {
7532 type = TypeAt(i);
7533 if (!type.IsFinalized()) {
7534 return false;
7535 }
7536 }
7537 return true;
7538}
7539
7541 const TypeArguments& instantiator_type_arguments,
7542 const TypeArguments& function_type_arguments,
7543 intptr_t num_free_fun_type_params,
7544 Heap::Space space,
7545 FunctionTypeMapping* function_type_mapping,
7546 intptr_t num_parent_type_args_adjustment) const {
7548 if ((instantiator_type_arguments.IsNull() ||
7549 instantiator_type_arguments.Length() == Length()) &&
7551 return instantiator_type_arguments.ptr();
7552 }
7553 const intptr_t num_types = Length();
7554 TypeArguments& instantiated_array =
7555 TypeArguments::Handle(TypeArguments::New(num_types, space));
7557 for (intptr_t i = 0; i < num_types; i++) {
7558 type = TypeAt(i);
7559 // If this type argument T is null, the type A containing T in its flattened
7560 // type argument vector V is recursive and is still being finalized.
7561 // T is the type argument of a super type of A. T is being instantiated
7562 // during finalization of V, which is also the instantiator. T depends
7563 // solely on the type parameters of A and will be replaced by a non-null
7564 // type before A is marked as finalized.
7565 if (!type.IsNull() && !type.IsInstantiated()) {
7566 type = type.InstantiateFrom(
7567 instantiator_type_arguments, function_type_arguments,
7568 num_free_fun_type_params, space, function_type_mapping,
7569 num_parent_type_args_adjustment);
7570 // A returned null type indicates a failed instantiation in dead code that
7571 // must be propagated up to the caller, the optimizing compiler.
7572 if (type.IsNull()) {
7573 return Object::empty_type_arguments().ptr();
7574 }
7575 }
7576 instantiated_array.SetTypeAt(i, type);
7577 }
7578 return instantiated_array.ptr();
7579}
7580
7582 intptr_t num_parent_type_args_adjustment,
7583 intptr_t num_free_fun_type_params,
7584 Heap::Space space,
7585 FunctionTypeMapping* function_type_mapping) const {
7586 Zone* zone = Thread::Current()->zone();
7587 TypeArguments* updated_args = nullptr;
7589 AbstractType& updated = AbstractType::Handle(zone);
7590 for (intptr_t i = 0, n = Length(); i < n; ++i) {
7591 type = TypeAt(i);
7592 updated = type.UpdateFunctionTypes(num_parent_type_args_adjustment,
7593 num_free_fun_type_params, space,
7594 function_type_mapping);
7595 if (type.ptr() != updated.ptr()) {
7596 if (updated_args == nullptr) {
7597 updated_args =
7598 &TypeArguments::Handle(zone, TypeArguments::New(n, space));
7599 for (intptr_t j = 0; j < i; ++j) {
7600 type = TypeAt(j);
7601 updated_args->SetTypeAt(j, type);
7602 }
7603 }
7604 }
7605 if (updated_args != nullptr) {
7606 updated_args->SetTypeAt(i, updated);
7607 }
7608 }
7609 return (updated_args != nullptr) ? updated_args->ptr() : ptr();
7610}
7611
7612#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
7613// A local flag used only in object_test.cc that, when true, causes a failure
7614// when a cache entry for the given instantiator and function type arguments
7615// already exists. Used to check that the InstantiateTypeArguments stub found
7616// the cache entry instead of calling the runtime.
7618#endif
7619
7621 const TypeArguments& instantiator_type_arguments,
7622 const TypeArguments& function_type_arguments) const {
7623 auto thread = Thread::Current();
7624 auto zone = thread->zone();
7627
7629 ASSERT(instantiator_type_arguments.IsNull() ||
7630 instantiator_type_arguments.IsCanonical());
7631 ASSERT(function_type_arguments.IsNull() ||
7632 function_type_arguments.IsCanonical());
7633 // Lookup instantiators and if found, return instantiated result.
7634 Cache cache(zone, *this);
7635 auto const loc = cache.FindKeyOrUnused(instantiator_type_arguments,
7636 function_type_arguments);
7637 if (loc.present) {
7638#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
7640 TextBuffer buffer(1024);
7641 buffer.Printf("for\n");
7642 buffer.Printf(" * uninstantiated type arguments %s\n", ToCString());
7643 buffer.Printf(" * instantiation type arguments: %s (hash: %" Pu ")\n",
7644 instantiator_type_arguments.ToCString(),
7645 instantiator_type_arguments.Hash());
7646 buffer.Printf(" * function type arguments: %s (hash: %" Pu ")\n",
7647 function_type_arguments.ToCString(),
7648 function_type_arguments.Hash());
7649 buffer.Printf(" * number of occupied entries in cache: %" Pd "\n",
7650 cache.NumOccupied());
7651 buffer.Printf(" * number of total entries in cache: %" Pd "\n",
7652 cache.NumEntries());
7653 buffer.Printf("expected to find entry %" Pd
7654 " of cache in stub, but reached runtime",
7655 loc.entry);
7656 FATAL("%s", buffer.buffer());
7657 }
7658#endif
7659 return cache.Retrieve(loc.entry);
7660 }
7661 // Cache lookup failed. Instantiate the type arguments.
7663 result = InstantiateFrom(instantiator_type_arguments, function_type_arguments,
7665 // Canonicalize type arguments.
7666 result = result.Canonicalize(thread);
7667 // InstantiateAndCanonicalizeFrom is not reentrant. It cannot have been called
7668 // indirectly, so the prior_instantiations array cannot have grown.
7669 ASSERT(cache.data_.ptr() == instantiations());
7670 cache.AddEntry(loc.entry, instantiator_type_arguments,
7671 function_type_arguments, result);
7672 return result.ptr();
7673}
7674
7675TypeArgumentsPtr TypeArguments::New(intptr_t len, Heap::Space space) {
7676 if (len < 0 || len > kMaxElements) {
7677 // This should be caught before we reach here.
7678 FATAL("Fatal error in TypeArguments::New: invalid len %" Pd "\n", len);
7679 }
7681 {
7682 auto raw = Object::Allocate<TypeArguments>(space, len);
7683 NoSafepointScope no_safepoint;
7684 result = raw;
7685 // Length must be set before we start storing into the array.
7686 result.SetLength(len);
7687 result.SetHash(0);
7688 result.set_nullability(0);
7689 }
7690 // The array used as storage for an empty linear cache should be initialized.
7692 result.set_instantiations(Cache::EmptyStorage());
7693 return result.ptr();
7694}
7695
7696void TypeArguments::SetLength(intptr_t value) const {
7697 ASSERT(!IsCanonical());
7698 // This is only safe because we create a new Smi, which does not cause
7699 // heap allocation.
7700 untag()->set_length(Smi::New(value));
7701}
7702
7703TypeArgumentsPtr TypeArguments::Canonicalize(Thread* thread) const {
7704 if (IsNull() || IsCanonical()) {
7705 ASSERT(IsOld());
7706 return this->ptr();
7707 }
7708 const intptr_t num_types = Length();
7709 if (num_types == 0) {
7710 return TypeArguments::empty_type_arguments().ptr();
7711 } else if (IsRaw(0, num_types)) {
7712 return TypeArguments::null();
7713 }
7714 Zone* zone = thread->zone();
7715 auto isolate_group = thread->isolate_group();
7716 ObjectStore* object_store = isolate_group->object_store();
7718 {
7719 SafepointMutexLocker ml(isolate_group->type_canonicalization_mutex());
7721 object_store->canonical_type_arguments());
7722 result ^= table.GetOrNull(CanonicalTypeArgumentsKey(*this));
7723 object_store->set_canonical_type_arguments(table.Release());
7724 }
7725 if (result.IsNull()) {
7726 // Canonicalize each type argument.
7727 AbstractType& type_arg = AbstractType::Handle(zone);
7728 GrowableHandlePtrArray<const AbstractType> canonicalized_types(zone,
7729 num_types);
7730 for (intptr_t i = 0; i < num_types; i++) {
7731 type_arg = TypeAt(i);
7732 type_arg = type_arg.Canonicalize(thread);
7733 canonicalized_types.Add(type_arg);
7734 }
7735 SafepointMutexLocker ml(isolate_group->type_canonicalization_mutex());
7737 object_store->canonical_type_arguments());
7738 // Since we canonicalized some type arguments above we need to lookup
7739 // in the table again to make sure we don't already have an equivalent
7740 // canonical entry.
7741 result ^= table.GetOrNull(CanonicalTypeArgumentsKey(*this));
7742 if (result.IsNull()) {
7743 for (intptr_t i = 0; i < num_types; i++) {
7744 SetTypeAt(i, canonicalized_types.At(i));
7745 }
7746 // Make sure we have an old space object and add it to the table.
7747 if (this->IsNew()) {
7748 result ^= Object::Clone(*this, Heap::kOld);
7749 } else {
7750 result = this->ptr();
7751 }
7752 ASSERT(result.IsOld());
7753 result.ComputeNullability();
7754 result.SetCanonical(); // Mark object as being canonical.
7755 // Now add this TypeArgument into the canonical list of type arguments.
7756 bool present = table.Insert(result);
7757 ASSERT(!present);
7758 }
7759 object_store->set_canonical_type_arguments(table.Release());
7760 }
7761 ASSERT(result.Equals(*this));
7762 ASSERT(!result.IsNull());
7763 ASSERT(result.IsTypeArguments());
7764 ASSERT(result.IsCanonical());
7765 return result.ptr();
7766}
7767
7769 Thread* thread,
7770 const Class& cls) const {
7771 if (IsNull()) {
7772 return ptr();
7773 }
7774 const intptr_t num_type_arguments = cls.NumTypeArguments();
7775 const intptr_t num_type_parameters = cls.NumTypeParameters(thread);
7776 ASSERT(Length() >= num_type_arguments);
7777 if (Length() == num_type_parameters) {
7778 return ptr();
7779 }
7780 if (num_type_parameters == 0) {
7781 return TypeArguments::null();
7782 }
7783 Zone* zone = thread->zone();
7784 const auto& args =
7785 TypeArguments::Handle(zone, TypeArguments::New(num_type_parameters));
7786 const intptr_t offset = num_type_arguments - num_type_parameters;
7787 auto& type = AbstractType::Handle(zone);
7788 for (intptr_t i = 0; i < num_type_parameters; ++i) {
7789 type = TypeAt(offset + i);
7790 args.SetTypeAt(i, type);
7791 }
7792 return args.ptr();
7793}
7794
7796 Thread* thread,
7797 const Class& cls) const {
7798 if (IsNull()) {
7799 return ptr();
7800 }
7801 const intptr_t num_type_arguments = cls.NumTypeArguments();
7802 const intptr_t num_type_parameters = cls.NumTypeParameters(thread);
7803 ASSERT(Length() == num_type_parameters);
7804 if (num_type_arguments == num_type_parameters) {
7805 return ptr();
7806 }
7807 Zone* zone = thread->zone();
7808 const auto& args =
7809 TypeArguments::Handle(zone, TypeArguments::New(num_type_arguments));
7810 const intptr_t offset = num_type_arguments - num_type_parameters;
7811 auto& type = AbstractType::Handle(zone);
7812 for (intptr_t i = 0; i < num_type_parameters; ++i) {
7813 type = TypeAt(i);
7814 args.SetTypeAt(offset + i, type);
7815 }
7816 return args.ptr();
7817}
7818
7820 if (IsNull()) {
7821 return;
7822 }
7823 Thread* thread = Thread::Current();
7824 Zone* zone = thread->zone();
7826 const intptr_t num_types = Length();
7827 for (intptr_t i = 0; i < num_types; i++) {
7828 type = TypeAt(i);
7829 type.EnumerateURIs(uris);
7830 }
7831}
7832
7833const char* TypeArguments::ToCString() const {
7834 if (IsNull()) {
7835 return "TypeArguments: null"; // Optimizing the frequent case.
7836 }
7837 ZoneTextBuffer buffer(Thread::Current()->zone());
7838 PrintTo(&buffer);
7839 return buffer.buffer();
7840}
7841
7842const char* PatchClass::ToCString() const {
7843 const Class& cls = Class::Handle(wrapped_class());
7844 const char* cls_name = cls.ToCString();
7845 return OS::SCreate(Thread::Current()->zone(), "PatchClass for %s", cls_name);
7846}
7847
7848PatchClassPtr PatchClass::New(const Class& wrapped_class,
7849 const KernelProgramInfo& info,
7850 const Script& script) {
7851 const PatchClass& result = PatchClass::Handle(PatchClass::New());
7852 result.set_wrapped_class(wrapped_class);
7854 result.untag()->set_kernel_program_info(info.ptr()));
7855 result.set_script(script);
7856 result.set_kernel_library_index(-1);
7857 return result.ptr();
7858}
7859
7860PatchClassPtr PatchClass::New() {
7862 return Object::Allocate<PatchClass>(Heap::kOld);
7863}
7864
7865void PatchClass::set_wrapped_class(const Class& value) const {
7866 untag()->set_wrapped_class(value.ptr());
7867}
7868
7869#if !defined(DART_PRECOMPILED_RUNTIME)
7871 untag()->set_kernel_program_info(info.ptr());
7872}
7873#endif
7874
7875void PatchClass::set_script(const Script& value) const {
7876 untag()->set_script(value.ptr());
7877}
7878
7881 if (IsClosureFunction()) {
7882 hash = hash ^ token_pos().Hash();
7883 }
7884 if (Owner()->IsClass()) {
7885 hash = hash ^ Class::Hash(Class::RawCast(Owner()));
7886 }
7887 return hash;
7888}
7889
7891#if defined(PRODUCT)
7892 return false;
7893#else
7894 auto thread = Thread::Current();
7895 return thread->isolate_group()->debugger()->HasBreakpoint(thread, *this);
7896#endif
7897}
7898
7900 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
7901 // We may not have previous code if FLAG_precompile is set.
7902 // Hot-reload may have already disabled the current code.
7903 if (HasCode() && !Code::Handle(CurrentCode()).IsDisabled()) {
7904 Code::Handle(CurrentCode()).DisableDartCode();
7905 }
7906 AttachCode(code);
7907}
7908
7910 // Ensure that nobody is executing this function when we install it.
7911 if (untag()->code() != Code::null() && HasCode()) {
7913 SetInstructionsSafe(value);
7914 } else {
7915 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
7916 SetInstructionsSafe(value);
7917 }
7918}
7919
7921 untag()->set_code<std::memory_order_release>(value.ptr());
7922 StoreNonPointer(&untag()->entry_point_, value.EntryPoint());
7923 StoreNonPointer(&untag()->unchecked_entry_point_,
7924 value.UncheckedEntryPoint());
7925}
7926
7927void Function::AttachCode(const Code& value) const {
7928 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
7929 // Finish setting up code before activating it.
7930 value.set_owner(*this);
7931 SetInstructions(value);
7932 ASSERT(Function::Handle(value.function()).IsNull() ||
7933 (value.function() == this->ptr()));
7934}
7935
7936bool Function::HasCode() const {
7937 NoSafepointScope no_safepoint;
7938 ASSERT(untag()->code() != Code::null());
7939 return untag()->code() != StubCode::LazyCompile().ptr();
7940}
7941
7942bool Function::HasCode(FunctionPtr function) {
7943 NoSafepointScope no_safepoint;
7944 ASSERT(function->untag()->code() != Code::null());
7945 return function->untag()->code() != StubCode::LazyCompile().ptr();
7946}
7947
7949#if defined(DART_PRECOMPILED_RUNTIME)
7950 UNREACHABLE();
7951#else
7952 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
7953 untag()->set_unoptimized_code(Code::null());
7954 SetInstructions(StubCode::LazyCompile());
7955#endif // defined(DART_PRECOMPILED_RUNTIME)
7956}
7957
7959#if defined(DART_PRECOMPILED_RUNTIME)
7960 UNREACHABLE();
7961#else
7962 untag()->set_unoptimized_code(Code::null());
7963
7964 SetInstructionsSafe(StubCode::LazyCompile());
7965#endif // defined(DART_PRECOMPILED_RUNTIME)
7966}
7967
7969 ASSERT(!ForceOptimize());
7970 Thread* thread = Thread::Current();
7971 ASSERT(thread->IsDartMutatorThread());
7972 // TODO(35224): DEBUG_ASSERT(thread->TopErrorHandlerIsExitFrame());
7973 Zone* zone = thread->zone();
7974
7975 const Error& error =
7976 Error::Handle(zone, Compiler::EnsureUnoptimizedCode(thread, *this));
7977 if (!error.IsNull()) {
7979 }
7980}
7981
7983 ASSERT(HasOptimizedCode());
7984 ASSERT(!ForceOptimize());
7985 Thread* thread = Thread::Current();
7988 Zone* zone = thread->zone();
7989 // TODO(35224): DEBUG_ASSERT(thread->TopErrorHandlerIsExitFrame());
7990 const Code& current_code = Code::Handle(zone, CurrentCode());
7991
7992 if (FLAG_trace_deoptimization_verbose) {
7993 THR_Print("Disabling optimized code: '%s' entry: %#" Px "\n",
7994 ToFullyQualifiedCString(), current_code.EntryPoint());
7995 }
7996 current_code.DisableDartCode();
7997 const Error& error =
7998 Error::Handle(zone, Compiler::EnsureUnoptimizedCode(thread, *this));
7999 if (!error.IsNull()) {
8001 }
8002 const Code& unopt_code = Code::Handle(zone, unoptimized_code());
8003 unopt_code.Enable();
8004 AttachCode(unopt_code);
8005}
8006
8008#if defined(DART_PRECOMPILED_RUNTIME)
8009 UNREACHABLE();
8010#else
8011 if (!HasOptimizedCode()) {
8012 return;
8013 }
8014
8015 Thread* thread = Thread::Current();
8016 Zone* zone = thread->zone();
8017 ASSERT(thread->IsDartMutatorThread());
8018
8019 const Code& current_code = Code::Handle(zone, CurrentCode());
8020 TIR_Print("Disabling optimized code for %s\n", ToCString());
8021 current_code.DisableDartCode();
8022
8023 const Code& unopt_code = Code::Handle(zone, unoptimized_code());
8024 if (unopt_code.IsNull()) {
8025 // Set the lazy compile stub code.
8026 TIR_Print("Switched to lazy compile stub for %s\n", ToCString());
8027 SetInstructions(StubCode::LazyCompile());
8028 return;
8029 }
8030
8031 TIR_Print("Switched to unoptimized code for %s\n", ToCString());
8032
8033 AttachCode(unopt_code);
8034 unopt_code.Enable();
8035#endif
8036}
8037
8039#if defined(DART_PRECOMPILED_RUNTIME)
8040 UNREACHABLE();
8041#else
8042 DEBUG_ASSERT(IsMutatorOrAtDeoptSafepoint());
8043 ASSERT(value.IsNull() || !value.is_optimized());
8044 untag()->set_unoptimized_code(value.ptr());
8045#endif
8046}
8047
8048ContextScopePtr Function::context_scope() const {
8049 if (IsClosureFunction()) {
8050 const Object& obj = Object::Handle(untag()->data());
8051 ASSERT(!obj.IsNull());
8052 return ClosureData::Cast(obj).context_scope();
8053 }
8054 return ContextScope::null();
8055}
8056
8058 if (IsClosureFunction()) {
8059 const Object& obj = Object::Handle(untag()->data());
8060 ASSERT(!obj.IsNull());
8061 ClosureData::Cast(obj).set_context_scope(value);
8062 return;
8063 }
8064 UNREACHABLE();
8065}
8066
8068 if (IsClosureFunction()) {
8069 const Object& obj = Object::Handle(untag()->data());
8070 ASSERT(!obj.IsNull());
8071 return ClosureData::Cast(obj).awaiter_link();
8072 }
8073 UNREACHABLE();
8074 return {};
8075}
8076
8078 if (IsClosureFunction()) {
8079 const Object& obj = Object::Handle(untag()->data());
8080 ASSERT(!obj.IsNull());
8081 ClosureData::Cast(obj).set_awaiter_link(link);
8082 return;
8083 }
8084 UNREACHABLE();
8085}
8086
8087ClosurePtr Function::implicit_static_closure() const {
8088 if (IsImplicitStaticClosureFunction()) {
8089 const Object& obj = Object::Handle(untag()->data());
8090 ASSERT(!obj.IsNull());
8091 return ClosureData::Cast(obj).implicit_static_closure();
8092 }
8093 return Closure::null();
8094}
8095
8096void Function::set_implicit_static_closure(const Closure& closure) const {
8097 if (IsImplicitStaticClosureFunction()) {
8098 const Object& obj = Object::Handle(untag()->data());
8099 ASSERT(!obj.IsNull());
8100 ClosureData::Cast(obj).set_implicit_static_closure(closure);
8101 return;
8102 }
8103 UNREACHABLE();
8104}
8105
8106ScriptPtr Function::eval_script() const {
8107 const Object& obj = Object::Handle(untag()->data());
8108 if (obj.IsScript()) {
8109 return Script::Cast(obj).ptr();
8110 }
8111 return Script::null();
8112}
8113
8114void Function::set_eval_script(const Script& script) const {
8115 ASSERT(token_pos() == TokenPosition::kMinSource);
8116 ASSERT(untag()->data() == Object::null());
8117 set_data(script);
8118}
8119
8121 ASSERT(kind() == UntaggedFunction::kMethodExtractor);
8122 const Object& obj = Object::Handle(untag()->data());
8123 ASSERT(obj.IsFunction());
8124 return Function::Cast(obj).ptr();
8125}
8126
8128 ASSERT(kind() == UntaggedFunction::kMethodExtractor);
8129 ASSERT(untag()->data() == Object::null());
8130 set_data(value);
8131}
8132
8134 if (kind() == UntaggedFunction::kDynamicInvocationForwarder) {
8135 return Array::null();
8136 }
8137 ASSERT(kind() == UntaggedFunction::kNoSuchMethodDispatcher ||
8138 kind() == UntaggedFunction::kInvokeFieldDispatcher);
8139 return Array::RawCast(untag()->data());
8140}
8141
8143 ASSERT(kind() == UntaggedFunction::kNoSuchMethodDispatcher ||
8144 kind() == UntaggedFunction::kInvokeFieldDispatcher);
8145 ASSERT(untag()->data() == Object::null());
8146 set_data(value);
8147}
8148
8150 ASSERT(kind() == UntaggedFunction::kImplicitGetter ||
8151 kind() == UntaggedFunction::kImplicitSetter ||
8152 kind() == UntaggedFunction::kImplicitStaticGetter ||
8153 kind() == UntaggedFunction::kFieldInitializer);
8154 return Field::RawCast(untag()->data());
8155}
8156
8158 ASSERT(kind() == UntaggedFunction::kImplicitGetter ||
8159 kind() == UntaggedFunction::kImplicitSetter ||
8160 kind() == UntaggedFunction::kImplicitStaticGetter ||
8161 kind() == UntaggedFunction::kFieldInitializer);
8162 // Top level classes may be finalized multiple times.
8163 ASSERT(untag()->data() == Object::null() || untag()->data() == value.ptr());
8164 set_data(value);
8165}
8166
8167FunctionPtr Function::parent_function() const {
8168 if (!IsClosureFunction()) return Function::null();
8169 Object& obj = Object::Handle(untag()->data());
8170 ASSERT(!obj.IsNull());
8171 return ClosureData::Cast(obj).parent_function();
8172}
8173
8174void Function::set_parent_function(const Function& value) const {
8175 ASSERT(IsClosureFunction());
8176 const Object& obj = Object::Handle(untag()->data());
8177 ASSERT(!obj.IsNull());
8178 ClosureData::Cast(obj).set_parent_function(value);
8179}
8180
8181TypeArgumentsPtr Function::DefaultTypeArguments(Zone* zone) const {
8182 if (type_parameters() == TypeParameters::null()) {
8183 return Object::empty_type_arguments().ptr();
8184 }
8185 return TypeParameters::Handle(zone, type_parameters()).defaults();
8186}
8187
8189 if (!IsClosureFunction()) {
8190 UNREACHABLE();
8191 }
8192 return ClosureData::DefaultTypeArgumentsInstantiationMode(
8194}
8195
8197 InstantiationMode value) const {
8198 if (!IsClosureFunction()) {
8199 UNREACHABLE();
8200 }
8201 const auto& closure_data = ClosureData::Handle(ClosureData::RawCast(data()));
8202 ASSERT(!closure_data.IsNull());
8203 closure_data.set_default_type_arguments_instantiation_mode(value);
8204}
8205
8206// Enclosing outermost function of this local function.
8208 FunctionPtr parent = parent_function();
8209 if (parent == Object::null()) {
8210 return ptr();
8211 }
8213 do {
8214 function = parent;
8215 parent = function.parent_function();
8216 } while (parent != Object::null());
8217 return function.ptr();
8218}
8219
8220FunctionPtr Function::implicit_closure_function() const {
8221 if (IsClosureFunction() || IsDispatcherOrImplicitAccessor() ||
8222 IsFieldInitializer() || IsFfiCallbackTrampoline() ||
8223 IsMethodExtractor()) {
8224 return Function::null();
8225 }
8226 const Object& obj = Object::Handle(data());
8227 ASSERT(obj.IsNull() || obj.IsScript() || obj.IsFunction() || obj.IsArray());
8228 if (obj.IsNull() || obj.IsScript()) {
8229 return Function::null();
8230 }
8231 if (obj.IsFunction()) {
8232 return Function::Cast(obj).ptr();
8233 }
8234 ASSERT(is_native());
8235 ASSERT(obj.IsArray());
8236 const Object& res = Object::Handle(Array::Cast(obj).AtAcquire(1));
8237 return res.IsNull() ? Function::null() : Function::Cast(res).ptr();
8238}
8239
8240void Function::set_implicit_closure_function(const Function& value) const {
8242 IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
8243 ASSERT(!IsClosureFunction());
8244 const Object& old_data = Object::Handle(data());
8245 if (is_old_native()) {
8246 ASSERT(old_data.IsArray());
8247 const auto& pair = Array::Cast(old_data);
8248 ASSERT(pair.AtAcquire(NativeFunctionData::kTearOff) == Object::null() ||
8249 value.IsNull());
8250 pair.SetAtRelease(NativeFunctionData::kTearOff, value);
8251 } else {
8252 ASSERT(old_data.IsNull() || value.IsNull());
8253 set_data(value);
8254 }
8255}
8256
8258 ASSERT(IsFfiCallbackTrampoline());
8259 const Object& obj = Object::Handle(data());
8260 ASSERT(!obj.IsNull());
8261 FfiTrampolineData::Cast(obj).set_c_signature(sig);
8262}
8263
8264FunctionTypePtr Function::FfiCSignature() const {
8265 auto* const zone = Thread::Current()->zone();
8266 if (IsFfiCallbackTrampoline()) {
8267 const Object& obj = Object::Handle(zone, data());
8268 ASSERT(!obj.IsNull());
8269 return FfiTrampolineData::Cast(obj).c_signature();
8270 }
8271 auto& pragma_value = Instance::Handle(zone);
8272 if (is_ffi_native()) {
8273 pragma_value = GetNativeAnnotation();
8274 } else if (IsFfiCallClosure()) {
8275 pragma_value = GetFfiCallClosurePragmaValue();
8276 } else {
8277 UNREACHABLE();
8278 }
8279 const auto& type_args =
8280 TypeArguments::Handle(zone, pragma_value.GetTypeArguments());
8281 ASSERT(type_args.Length() == 1);
8282 const auto& native_type =
8283 FunctionType::Cast(AbstractType::ZoneHandle(zone, type_args.TypeAt(0)));
8284 return native_type.ptr();
8285}
8286
8288 const FunctionType& c_signature = FunctionType::Handle(FfiCSignature());
8289 return c_signature.ContainsHandles();
8290}
8291
8293 const intptr_t num_params = num_fixed_parameters();
8294 for (intptr_t i = 0; i < num_params; i++) {
8295 const bool is_handle =
8296 AbstractType::Handle(ParameterTypeAt(i)).type_class_id() ==
8297 kFfiHandleCid;
8298 if (is_handle) {
8299 return true;
8300 }
8301 }
8302 return AbstractType::Handle(result_type()).type_class_id() == kFfiHandleCid;
8303}
8304
8305// Keep consistent with BaseMarshaller::IsCompound.
8307 ASSERT(IsFfiCallbackTrampoline());
8308 Zone* zone = Thread::Current()->zone();
8309 const auto& c_signature = FunctionType::Handle(zone, FfiCSignature());
8310 const auto& type = AbstractType::Handle(zone, c_signature.result_type());
8311 if (IsFfiTypeClassId(type.type_class_id())) {
8312 return false;
8313 }
8314 const auto& cls = Class::Handle(zone, type.type_class());
8315 const auto& superClass = Class::Handle(zone, cls.SuperClass());
8316 const bool is_abi_specific_int =
8317 String::Handle(zone, superClass.UserVisibleName())
8318 .Equals(Symbols::AbiSpecificInteger());
8319 if (is_abi_specific_int) {
8320 return false;
8321 }
8322#ifdef DEBUG
8323 const bool is_struct = String::Handle(zone, superClass.UserVisibleName())
8324 .Equals(Symbols::Struct());
8325 const bool is_union = String::Handle(zone, superClass.UserVisibleName())
8326 .Equals(Symbols::Union());
8327 ASSERT(is_struct || is_union);
8328#endif
8329 return true;
8330}
8331
8333 ASSERT(IsFfiCallbackTrampoline());
8334
8335 const auto& obj = Object::Handle(data());
8336 ASSERT(!obj.IsNull());
8337 const auto& trampoline_data = FfiTrampolineData::Cast(obj);
8338
8339 ASSERT(trampoline_data.callback_id() != -1);
8340
8341 return trampoline_data.callback_id();
8342}
8343
8344void Function::AssignFfiCallbackId(int32_t callback_id) const {
8345 ASSERT(IsFfiCallbackTrampoline());
8346
8347 const auto& obj = Object::Handle(data());
8348 ASSERT(!obj.IsNull());
8349 const auto& trampoline_data = FfiTrampolineData::Cast(obj);
8350
8351 ASSERT(trampoline_data.callback_id() == -1);
8352 trampoline_data.set_callback_id(callback_id);
8353}
8354
8356 Zone* zone = Thread::Current()->zone();
8357 auto& pragma_value = Instance::Handle(zone);
8358 if (is_ffi_native()) {
8359 pragma_value = GetNativeAnnotation();
8360 } else if (IsFfiCallClosure()) {
8361 pragma_value = GetFfiCallClosurePragmaValue();
8362 } else {
8363 UNREACHABLE();
8364 }
8365 const auto& pragma_value_class = Class::Handle(zone, pragma_value.clazz());
8366 const auto& pragma_value_fields =
8367 Array::Handle(zone, pragma_value_class.fields());
8368 ASSERT(pragma_value_fields.Length() >= 1);
8369 const auto& is_leaf_field = Field::Handle(
8370 zone,
8371 Field::RawCast(pragma_value_fields.At(pragma_value_fields.Length() - 1)));
8372 ASSERT(is_leaf_field.name() == Symbols::isLeaf().ptr());
8373 return Bool::Handle(zone, Bool::RawCast(pragma_value.GetField(is_leaf_field)))
8374 .value();
8375}
8376
8377FunctionPtr Function::FfiCallbackTarget() const {
8378 ASSERT(IsFfiCallbackTrampoline());
8379 const Object& obj = Object::Handle(data());
8380 ASSERT(!obj.IsNull());
8381 return FfiTrampolineData::Cast(obj).callback_target();
8382}
8383
8385 ASSERT(IsFfiCallbackTrampoline());
8386 const Object& obj = Object::Handle(data());
8387 ASSERT(!obj.IsNull());
8388 FfiTrampolineData::Cast(obj).set_callback_target(target);
8389}
8390
8392 ASSERT(IsFfiCallbackTrampoline());
8393 const Object& obj = Object::Handle(data());
8394 ASSERT(!obj.IsNull());
8395 return FfiTrampolineData::Cast(obj).callback_exceptional_return();
8396}
8397
8399 ASSERT(IsFfiCallbackTrampoline());
8400 const Object& obj = Object::Handle(data());
8401 ASSERT(!obj.IsNull());
8402 FfiTrampolineData::Cast(obj).set_callback_exceptional_return(value);
8403}
8404
8406 ASSERT(IsFfiCallbackTrampoline());
8407 const Object& obj = Object::Handle(data());
8408 ASSERT(!obj.IsNull());
8409 return FfiTrampolineData::Cast(obj).ffi_function_kind();
8410}
8411
8413 ASSERT(IsFfiCallbackTrampoline());
8414 const Object& obj = Object::Handle(data());
8415 ASSERT(!obj.IsNull());
8416 FfiTrampolineData::Cast(obj).set_ffi_function_kind(value);
8417}
8418
8421}
8422
8423FunctionPtr Function::ForwardingTarget() const {
8424 ASSERT(kind() == UntaggedFunction::kDynamicInvocationForwarder);
8426}
8427
8429 ASSERT(kind() == UntaggedFunction::kDynamicInvocationForwarder);
8430 set_data(target);
8431}
8432
8433// This field is heavily overloaded:
8434// kernel eval function: Array[0] = Script
8435// Array[1] = KernelProgramInfo
8436// Array[2] = Kernel index of enclosing library
8437// method extractor: Function extracted closure function
8438// implicit getter: Field
8439// implicit setter: Field
8440// impl. static final gttr: Field
8441// field initializer: Field
8442// noSuchMethod dispatcher: Array arguments descriptor
8443// invoke-field dispatcher: Array arguments descriptor
8444// closure function: ClosureData
8445// irregexp function: Array[0] = RegExp
8446// Array[1] = Smi string specialization cid
8447// native function: Array[0] = String native name
8448// Array[1] = Function implicit closure function
8449// regular function: Function for implicit closure function
8450// constructor, factory: Function for implicit closure function
8451// ffi trampoline function: FfiTrampolineData (Dart->C)
8452// dyn inv forwarder: Forwarding target, a WSR pointing to it or null
8453// (null can only occur if forwarding target was
8454// dropped)
8455void Function::set_data(const Object& value) const {
8456 untag()->set_data<std::memory_order_release>(value.ptr());
8457}
8458
8459void Function::set_name(const String& value) const {
8460 ASSERT(value.IsSymbol());
8461 untag()->set_name(value.ptr());
8462}
8463
8465 ASSERT(!value.IsNull());
8466 untag()->set_owner(value.ptr());
8467}
8468
8469RegExpPtr Function::regexp() const {
8470 ASSERT(kind() == UntaggedFunction::kIrregexpFunction);
8471 const Array& pair = Array::Cast(Object::Handle(data()));
8472 return RegExp::RawCast(pair.At(0));
8473}
8474
8475class StickySpecialization : public BitField<intptr_t, bool, 0, 1> {};
8477 : public BitField<intptr_t, intptr_t, 1, UntaggedObject::kClassIdTagSize> {
8478};
8479
8481 ASSERT(kind() == UntaggedFunction::kIrregexpFunction);
8482 const Array& pair = Array::Cast(Object::Handle(data()));
8484}
8485
8487 ASSERT(kind() == UntaggedFunction::kIrregexpFunction);
8488 const Array& pair = Array::Cast(Object::Handle(data()));
8490}
8491
8493 intptr_t string_specialization_cid,
8494 bool sticky) const {
8495 ASSERT(kind() == UntaggedFunction::kIrregexpFunction);
8496 ASSERT(IsStringClassId(string_specialization_cid));
8497 ASSERT(data() == Object::null());
8498 const Array& pair = Array::Handle(Array::New(2, Heap::kOld));
8499 pair.SetAt(0, regexp);
8502 string_specialization_cid))));
8503 set_data(pair);
8504}
8505
8506StringPtr Function::native_name() const {
8507 ASSERT(is_native());
8508 const Object& obj = Object::Handle(data());
8509 ASSERT(obj.IsArray());
8510 return String::RawCast(Array::Cast(obj).At(0));
8511}
8512
8514 ASSERT(is_native());
8515 const auto& pair = Array::Cast(Object::Handle(data()));
8516 ASSERT(pair.At(0) == Object::null());
8517 pair.SetAt(NativeFunctionData::kNativeName, value);
8518}
8519
8521 ASSERT(is_ffi_native());
8522 Zone* zone = Thread::Current()->zone();
8523 auto& pragma_value = Object::Handle(zone);
8524 Library::FindPragma(dart::Thread::Current(), /*only_core=*/false,
8525 Object::Handle(zone, ptr()),
8526 String::Handle(zone, Symbols::vm_ffi_native().ptr()),
8527 /*multiple=*/false, &pragma_value);
8528 auto const& native_instance = Instance::Cast(pragma_value);
8529 ASSERT(!native_instance.IsNull());
8530#if defined(DEBUG)
8531 const auto& native_class = Class::Handle(zone, native_instance.clazz());
8532 ASSERT(String::Handle(zone, native_class.UserVisibleName())
8533 .Equals(Symbols::FfiNative()));
8534#endif
8535 return native_instance.ptr();
8536}
8537
8539 return is_native() && !is_external();
8540}
8541
8543 return is_native() && is_external();
8544}
8545
8547 set_signature(value);
8548 ASSERT(NumImplicitParameters() == value.num_implicit_parameters());
8549 if (IsClosureFunction() && value.IsGeneric()) {
8550 Zone* zone = Thread::Current()->zone();
8551 const TypeParameters& type_params =
8552 TypeParameters::Handle(zone, value.type_parameters());
8553 const TypeArguments& defaults =
8554 TypeArguments::Handle(zone, type_params.defaults());
8555 auto mode = defaults.GetInstantiationMode(zone, this);
8556 set_default_type_arguments_instantiation_mode(mode);
8557 }
8558}
8559
8560TypeParameterPtr FunctionType::TypeParameterAt(intptr_t index,
8561 Nullability nullability) const {
8562 ASSERT(index >= 0 && index < NumTypeParameters());
8563 Thread* thread = Thread::Current();
8564 Zone* zone = thread->zone();
8566 zone, TypeParameter::New(*this, NumParentTypeArguments(),
8567 NumParentTypeArguments() + index, nullability));
8568 type_param.SetIsFinalized();
8569 if (IsFinalized()) {
8570 type_param ^= type_param.Canonicalize(thread);
8571 }
8572 return type_param.ptr();
8573}
8574
8576 ASSERT(!value.IsNull());
8577 untag()->set_result_type(value.ptr());
8578}
8579
8580AbstractTypePtr Function::ParameterTypeAt(intptr_t index) const {
8581 const Array& types = Array::Handle(parameter_types());
8582 return AbstractType::RawCast(types.At(index));
8583}
8584
8585AbstractTypePtr FunctionType::ParameterTypeAt(intptr_t index) const {
8586 const Array& parameter_types = Array::Handle(untag()->parameter_types());
8587 return AbstractType::RawCast(parameter_types.At(index));
8588}
8589
8591 const AbstractType& value) const {
8592 ASSERT(!value.IsNull());
8593 const Array& parameter_types = Array::Handle(untag()->parameter_types());
8594 parameter_types.SetAt(index, value);
8595}
8596
8598 ASSERT(value.IsNull() || value.Length() > 0);
8599 untag()->set_parameter_types(value.ptr());
8600}
8601
8602StringPtr Function::ParameterNameAt(intptr_t index) const {
8603#if defined(DART_PRECOMPILED_RUNTIME)
8604 if (signature() == FunctionType::null()) {
8605 // Without the signature, we're guaranteed not to have any name information.
8606 return Symbols::OptimizedOut().ptr();
8607 }
8608#endif
8609 const intptr_t num_fixed = num_fixed_parameters();
8610 if (HasOptionalNamedParameters() && index >= num_fixed) {
8611 const Array& parameter_names =
8612 Array::Handle(signature()->untag()->named_parameter_names());
8613 return String::RawCast(parameter_names.At(index - num_fixed));
8614 }
8615#if defined(DART_PRECOMPILED_RUNTIME)
8616 return Symbols::OptimizedOut().ptr();
8617#else
8618 const Array& names = Array::Handle(untag()->positional_parameter_names());
8619 return String::RawCast(names.At(index));
8620#endif
8621}
8622
8623void Function::SetParameterNameAt(intptr_t index, const String& value) const {
8624#if defined(DART_PRECOMPILED_RUNTIME)
8625 UNREACHABLE();
8626#else
8627 ASSERT(!value.IsNull() && value.IsSymbol());
8628 if (HasOptionalNamedParameters() && index >= num_fixed_parameters()) {
8629 // These should be set on the signature, not the function.
8630 UNREACHABLE();
8631 }
8632 const Array& parameter_names =
8633 Array::Handle(untag()->positional_parameter_names());
8634 parameter_names.SetAt(index, value);
8635#endif
8636}
8637
8638#if !defined(DART_PRECOMPILED_RUNTIME)
8639void Function::set_positional_parameter_names(const Array& value) const {
8640 ASSERT(value.ptr() == Object::empty_array().ptr() || value.Length() > 0);
8641 untag()->set_positional_parameter_names(value.ptr());
8642}
8643#endif
8644
8645StringPtr FunctionType::ParameterNameAt(intptr_t index) const {
8646 const intptr_t num_fixed = num_fixed_parameters();
8647 if (!HasOptionalNamedParameters() || index < num_fixed) {
8648 // The positional parameter names are stored on the function, not here.
8649 UNREACHABLE();
8650 }
8651 const Array& parameter_names =
8652 Array::Handle(untag()->named_parameter_names());
8653 return String::RawCast(parameter_names.At(index - num_fixed));
8654}
8655
8657 const String& value) const {
8658#if defined(DART_PRECOMPILED_RUNTIME)
8659 UNREACHABLE();
8660#else
8661 ASSERT(!value.IsNull() && value.IsSymbol());
8662 const intptr_t num_fixed = num_fixed_parameters();
8663 if (!HasOptionalNamedParameters() || index < num_fixed) {
8664 UNREACHABLE();
8665 }
8666 const Array& parameter_names =
8667 Array::Handle(untag()->named_parameter_names());
8668 parameter_names.SetAt(index - num_fixed, value);
8669#endif
8670}
8671
8673 ASSERT(value.ptr() == Object::empty_array().ptr() || value.Length() > 0);
8674 untag()->set_named_parameter_names(value.ptr());
8675}
8676
8678#if defined(DART_PRECOMPILED_RUNTIME)
8679 UNREACHABLE();
8680#else
8681 const intptr_t num_positional_params =
8682 num_fixed_parameters() + NumOptionalPositionalParameters();
8683 if (num_positional_params == 0) {
8684 set_positional_parameter_names(Object::empty_array());
8685 } else {
8686 set_positional_parameter_names(
8687 Array::Handle(Array::New(num_positional_params, space)));
8688 }
8689#endif
8690}
8691
8693#if defined(DART_PRECOMPILED_RUNTIME)
8694 UNREACHABLE();
8695#else
8696 const intptr_t num_named_parameters = NumOptionalNamedParameters();
8697 if (num_named_parameters == 0) {
8698 return set_named_parameter_names(Object::empty_array());
8699 }
8700 // Currently, we only store flags for named parameters.
8701 const intptr_t last_index = (num_named_parameters - 1) /
8703 const intptr_t num_flag_slots = last_index + 1;
8704 intptr_t num_total_slots = num_named_parameters + num_flag_slots;
8705 auto& array = Array::Handle(Array::New(num_total_slots, space));
8706 // Set flag slots to Smi 0 before handing off.
8707 auto& empty_flags_smi = Smi::Handle(Smi::New(0));
8708 for (intptr_t i = num_named_parameters; i < num_total_slots; i++) {
8709 array.SetAt(i, empty_flags_smi);
8710 }
8711 set_named_parameter_names(array);
8712#endif
8713}
8714
8716 intptr_t* flag_mask) const {
8717 // If these calculations change, also change
8718 // FlowGraphBuilder::BuildClosureCallHasRequiredNamedArgumentsCheck.
8719 ASSERT(HasOptionalNamedParameters());
8720 ASSERT(flag_mask != nullptr);
8721 ASSERT(index >= num_fixed_parameters());
8722 index -= num_fixed_parameters();
8724 << ((static_cast<uintptr_t>(index) %
8727 return NumOptionalNamedParameters() +
8729}
8730
8732#if defined(DART_PRECOMPILED_RUNTIME)
8733 if (signature() == FunctionType::null()) {
8734 // Signatures for functions with required named parameters are not dropped.
8735 return false;
8736 }
8737#endif
8738 return FunctionType::Handle(signature()).HasRequiredNamedParameters();
8739}
8740
8741bool Function::IsRequiredAt(intptr_t index) const {
8742#if defined(DART_PRECOMPILED_RUNTIME)
8743 if (signature() == FunctionType::null()) {
8744 // Signature is not dropped in aot when any named parameter is required.
8745 return false;
8746 }
8747#endif
8748 if (!HasOptionalNamedParameters() || index < num_fixed_parameters()) {
8749 return false;
8750 }
8751 const FunctionType& sig = FunctionType::Handle(signature());
8752 return sig.IsRequiredAt(index);
8753}
8754
8755bool FunctionType::IsRequiredAt(intptr_t index) const {
8756 if (!HasOptionalNamedParameters() || index < num_fixed_parameters()) {
8757 return false;
8758 }
8759 intptr_t flag_mask;
8760 const intptr_t flag_index = GetRequiredFlagIndex(index, &flag_mask);
8761 const Array& parameter_names =
8762 Array::Handle(untag()->named_parameter_names());
8763 if (flag_index >= parameter_names.Length()) {
8764 return false;
8765 }
8766 const intptr_t flags =
8767 Smi::Value(Smi::RawCast(parameter_names.At(flag_index)));
8768 return (flags & flag_mask) != 0;
8769}
8770
8771void FunctionType::SetIsRequiredAt(intptr_t index) const {
8772#if defined(DART_PRECOMPILER_RUNTIME)
8773 UNREACHABLE();
8774#else
8775 intptr_t flag_mask;
8776 const intptr_t flag_index = GetRequiredFlagIndex(index, &flag_mask);
8777 const Array& parameter_names =
8778 Array::Handle(untag()->named_parameter_names());
8779 ASSERT(flag_index < parameter_names.Length());
8780 const intptr_t flags =
8781 Smi::Value(Smi::RawCast(parameter_names.At(flag_index)));
8782 parameter_names.SetAt(flag_index, Smi::Handle(Smi::New(flags | flag_mask)));
8783#endif
8784}
8785
8787#if defined(DART_PRECOMPILER_RUNTIME)
8788 UNREACHABLE();
8789#else
8790 const intptr_t num_named_parameters = NumOptionalNamedParameters();
8791 if (num_named_parameters == 0) {
8792 ASSERT(untag()->named_parameter_names() == Object::empty_array().ptr());
8793 return;
8794 }
8795 const Array& parameter_names =
8796 Array::Handle(untag()->named_parameter_names());
8797 // Truncate the parameter names array to remove unused flags from the end.
8798 intptr_t last_used = parameter_names.Length() - 1;
8799 for (; last_used >= num_named_parameters; --last_used) {
8800 if (Smi::Value(Smi::RawCast(parameter_names.At(last_used))) != 0) {
8801 break;
8802 }
8803 }
8804 parameter_names.Truncate(last_used + 1);
8805#endif
8806}
8807
8809 const intptr_t num_named_params = NumOptionalNamedParameters();
8810 if (num_named_params == 0) return false;
8811 // Check for flag slots in the named parameter names array.
8812 const auto& parameter_names = Array::Handle(named_parameter_names());
8813 ASSERT(!parameter_names.IsNull());
8814 return parameter_names.Length() > num_named_params;
8815}
8816
8818 Report::MessageF(Report::kError, Script::Handle(), TokenPosition::kNoSource,
8820 "too many type parameters declared in signature '%s' or in "
8821 "its enclosing signatures",
8822 sig.ToUserVisibleCString());
8823 UNREACHABLE();
8824}
8825
8827 untag()->set_type_parameters(value.ptr());
8828 const intptr_t count = value.Length();
8831 }
8832 untag()->packed_type_parameter_counts_.Update<PackedNumTypeParameters>(count);
8833}
8834
8836 ASSERT(value >= 0);
8839 }
8840 untag()->packed_type_parameter_counts_.Update<PackedNumParentTypeArguments>(
8841 value);
8842}
8843
8845 return FunctionType::IsGeneric(signature());
8846}
8848 return FunctionType::NumTypeParametersOf(signature());
8849}
8851 return FunctionType::NumParentTypeArgumentsOf(signature());
8852}
8854 return FunctionType::NumTypeArgumentsOf(signature());
8855}
8857 return FunctionType::NumFixedParametersOf(signature());
8858}
8860 return FunctionType::HasOptionalParameters(signature());
8861}
8863 return FunctionType::HasOptionalNamedParameters(signature());
8864}
8867}
8869 return FunctionType::NumOptionalParametersOf(signature());
8870}
8873}
8876}
8877intptr_t Function::NumParameters() const {
8878 return FunctionType::NumParametersOf(signature());
8879}
8880
8881TypeParameterPtr Function::TypeParameterAt(intptr_t index,
8882 Nullability nullability) const {
8883 const FunctionType& sig = FunctionType::Handle(signature());
8884 return sig.TypeParameterAt(index, nullability);
8885}
8886
8887void Function::set_kind(UntaggedFunction::Kind value) const {
8888 untag()->kind_tag_.Update<KindBits>(value);
8889}
8890
8892 untag()->kind_tag_.Update<ModifierBits>(value);
8893}
8894
8896 // Prevent multiple settings of kind.
8897 ASSERT((value == MethodRecognizer::kUnknown) || !IsRecognized());
8898 untag()->kind_tag_.Update<RecognizedBits>(value);
8899}
8900
8902#if defined(DART_PRECOMPILED_RUNTIME)
8903 UNREACHABLE();
8904#else
8905 ASSERT(!token_pos.IsClassifying() || IsMethodExtractor());
8906 StoreNonPointer(&untag()->token_pos_, token_pos);
8907#endif
8908}
8909
8910void Function::set_kind_tag(uint32_t value) const {
8911 untag()->kind_tag_ = value;
8912}
8913
8914bool Function::is_eval_function() const {
8915 if (data()->IsArray()) {
8916 const intptr_t len = Array::LengthOf(Array::RawCast(data()));
8917 return len == static_cast<intptr_t>(EvalFunctionData::kLength);
8918 }
8919 return false;
8920}
8921
8922void Function::set_packed_fields(uint32_t packed_fields) const {
8923#if defined(DART_PRECOMPILED_RUNTIME)
8924 UNREACHABLE();
8925#else
8926 StoreNonPointer(&untag()->packed_fields_, packed_fields);
8927#endif
8928}
8929
8931 if (FLAG_precompiled_mode) {
8932 return true;
8933 }
8934 if (ForceOptimize()) return true;
8935 if (is_old_native()) {
8936 // Native methods don't need to be optimized.
8937 return false;
8938 }
8939 if (is_optimizable() && (script() != Script::null())) {
8940 // Additional check needed for implicit getters.
8941 return (unoptimized_code() == Object::null()) ||
8942 (Code::Handle(unoptimized_code()).Size() <
8943 FLAG_huge_method_cutoff_in_code_size);
8944 }
8945 return false;
8946}
8947
8949 ASSERT(!is_native());
8950 set_is_optimizable(value);
8951 if (!value) {
8952 set_is_inlinable(false);
8953 set_usage_counter(INT32_MIN);
8954 }
8955}
8956
8958 switch (recognized_kind()) {
8959 case MethodRecognizer::kTypedData_ByteDataView_factory:
8960 case MethodRecognizer::kTypedData_Int8ArrayView_factory:
8961 case MethodRecognizer::kTypedData_Uint8ArrayView_factory:
8962 case MethodRecognizer::kTypedData_Uint8ClampedArrayView_factory:
8963 case MethodRecognizer::kTypedData_Int16ArrayView_factory:
8964 case MethodRecognizer::kTypedData_Uint16ArrayView_factory:
8965 case MethodRecognizer::kTypedData_Int32ArrayView_factory:
8966 case MethodRecognizer::kTypedData_Uint32ArrayView_factory:
8967 case MethodRecognizer::kTypedData_Int64ArrayView_factory:
8968 case MethodRecognizer::kTypedData_Uint64ArrayView_factory:
8969 case MethodRecognizer::kTypedData_Float32ArrayView_factory:
8970 case MethodRecognizer::kTypedData_Float64ArrayView_factory:
8971 case MethodRecognizer::kTypedData_Float32x4ArrayView_factory:
8972 case MethodRecognizer::kTypedData_Int32x4ArrayView_factory:
8973 case MethodRecognizer::kTypedData_Float64x2ArrayView_factory:
8974 return true;
8975 default:
8976 return false;
8977 }
8978}
8979
8981 switch (recognized_kind()) {
8982 case MethodRecognizer::kTypedData_UnmodifiableByteDataView_factory:
8983 case MethodRecognizer::kTypedData_UnmodifiableInt8ArrayView_factory:
8984 case MethodRecognizer::kTypedData_UnmodifiableUint8ArrayView_factory:
8985 case MethodRecognizer::kTypedData_UnmodifiableUint8ClampedArrayView_factory:
8986 case MethodRecognizer::kTypedData_UnmodifiableInt16ArrayView_factory:
8987 case MethodRecognizer::kTypedData_UnmodifiableUint16ArrayView_factory:
8988 case MethodRecognizer::kTypedData_UnmodifiableInt32ArrayView_factory:
8989 case MethodRecognizer::kTypedData_UnmodifiableUint32ArrayView_factory:
8990 case MethodRecognizer::kTypedData_UnmodifiableInt64ArrayView_factory:
8991 case MethodRecognizer::kTypedData_UnmodifiableUint64ArrayView_factory:
8992 case MethodRecognizer::kTypedData_UnmodifiableFloat32ArrayView_factory:
8993 case MethodRecognizer::kTypedData_UnmodifiableFloat64ArrayView_factory:
8994 case MethodRecognizer::kTypedData_UnmodifiableFloat32x4ArrayView_factory:
8995 case MethodRecognizer::kTypedData_UnmodifiableInt32x4ArrayView_factory:
8996 case MethodRecognizer::kTypedData_UnmodifiableFloat64x2ArrayView_factory:
8997 return true;
8998 default:
8999 return false;
9000 }
9001}
9002
9003static bool InVmTests(const Function& function) {
9004#if defined(TESTING)
9005 return true;
9006#else
9007 auto* zone = Thread::Current()->zone();
9008 const auto& cls = Class::Handle(zone, function.Owner());
9009 const auto& lib = Library::Handle(zone, cls.library());
9010 const auto& url = String::Handle(zone, lib.url());
9011 const bool in_vm_tests =
9012 strstr(url.ToCString(), "runtime/tests/vm/") != nullptr;
9013 return in_vm_tests;
9014#endif
9015}
9016
9018 if (RecognizedKindForceOptimize() || IsFfiCallClosure() ||
9019 IsFfiCallbackTrampoline() || is_ffi_native() ||
9020 IsTypedDataViewFactory() || IsUnmodifiableTypedDataViewFactory()) {
9021 return true;
9022 }
9023
9024 if (!has_pragma()) return false;
9025
9026 const bool has_vm_pragma = Library::FindPragma(
9027 Thread::Current(), false, *this, Symbols::vm_force_optimize());
9028 if (!has_vm_pragma) return false;
9029
9030 // For run_vm_tests and runtime/tests/vm allow marking arbitrary functions as
9031 // force-optimize via `@pragma('vm:force-optimize')`.
9032 return InVmTests(*this);
9033}
9034
9036 if (!has_pragma()) return false;
9037
9038 return Library::FindPragma(Thread::Current(), /*only_core=*/false, *this,
9039 Symbols::vm_prefer_inline());
9040}
9041
9043 if (!has_pragma()) return false;
9044
9045#if defined(TESTING)
9046 const bool kAllowOnlyForCoreLibFunctions = false;
9047#else
9048 const bool kAllowOnlyForCoreLibFunctions = true;
9049#endif // defined(TESTING)
9050
9051 return Library::FindPragma(Thread::Current(), kAllowOnlyForCoreLibFunctions,
9052 *this, Symbols::vm_idempotent());
9053}
9054
9056 if (!has_pragma()) return false;
9057
9058 const bool has_vm_pragma =
9059 Library::FindPragma(Thread::Current(), /*only_core=*/false, *this,
9060 Symbols::vm_cachable_idempotent());
9061 if (!has_vm_pragma) return false;
9062
9063 // For run_vm_tests and runtime/tests/vm allow marking arbitrary functions.
9064 return InVmTests(*this);
9065}
9066
9068 if (!IsNonImplicitClosureFunction()) return false;
9069 if (!has_pragma()) return false;
9070 return Library::FindPragma(Thread::Current(), /*only_core=*/false, *this,
9071 Symbols::vm_ffi_call_closure());
9072}
9073
9075 ASSERT(IsFfiCallClosure());
9076 Thread* thread = Thread::Current();
9077 Zone* zone = thread->zone();
9078 auto& pragma_value = Object::Handle(zone);
9079 Library::FindPragma(thread, /*only_core=*/false, *this,
9080 Symbols::vm_ffi_call_closure(),
9081 /*multiple=*/false, &pragma_value);
9082 ASSERT(!pragma_value.IsNull());
9083 return Instance::Cast(pragma_value).ptr();
9084}
9085
9087 switch (recognized_kind()) {
9088 // Uses unboxed/untagged data not supported in unoptimized, or uses
9089 // LoadIndexed/StoreIndexed/MemoryCopy instructions with typed data
9090 // arrays, which requires optimization for payload extraction.
9091 case MethodRecognizer::kObjectArrayGetIndexed:
9092 case MethodRecognizer::kGrowableArrayGetIndexed:
9093#define TYPED_DATA_GET_INDEXED_CASES(clazz) \
9094 case MethodRecognizer::k##clazz##ArrayGetIndexed: \
9095 FALL_THROUGH; \
9096 case MethodRecognizer::kExternal##clazz##ArrayGetIndexed: \
9097 FALL_THROUGH; \
9098 case MethodRecognizer::k##clazz##ArrayViewGetIndexed: \
9099 FALL_THROUGH;
9101#undef TYPED_DATA_GET_INDEXED_CASES
9102 case MethodRecognizer::kCopyRangeFromUint8ListToOneByteString:
9103 case MethodRecognizer::kFinalizerBase_getIsolateFinalizers:
9104 case MethodRecognizer::kFinalizerBase_setIsolate:
9105 case MethodRecognizer::kFinalizerBase_setIsolateFinalizers:
9106 case MethodRecognizer::kFinalizerEntry_getExternalSize:
9107 case MethodRecognizer::kExtensionStreamHasListener:
9108 case MethodRecognizer::kFfiLoadInt8:
9109 case MethodRecognizer::kFfiLoadInt16:
9110 case MethodRecognizer::kFfiLoadInt32:
9111 case MethodRecognizer::kFfiLoadInt64:
9112 case MethodRecognizer::kFfiLoadUint8:
9113 case MethodRecognizer::kFfiLoadUint16:
9114 case MethodRecognizer::kFfiLoadUint32:
9115 case MethodRecognizer::kFfiLoadUint64:
9116 case MethodRecognizer::kFfiLoadFloat:
9117 case MethodRecognizer::kFfiLoadFloatUnaligned:
9118 case MethodRecognizer::kFfiLoadDouble:
9119 case MethodRecognizer::kFfiLoadDoubleUnaligned:
9120 case MethodRecognizer::kFfiLoadPointer:
9121 case MethodRecognizer::kFfiStoreInt8:
9122 case MethodRecognizer::kFfiStoreInt16:
9123 case MethodRecognizer::kFfiStoreInt32:
9124 case MethodRecognizer::kFfiStoreInt64:
9125 case MethodRecognizer::kFfiStoreUint8:
9126 case MethodRecognizer::kFfiStoreUint16:
9127 case MethodRecognizer::kFfiStoreUint32:
9128 case MethodRecognizer::kFfiStoreUint64:
9129 case MethodRecognizer::kFfiStoreFloat:
9130 case MethodRecognizer::kFfiStoreFloatUnaligned:
9131 case MethodRecognizer::kFfiStoreDouble:
9132 case MethodRecognizer::kFfiStoreDoubleUnaligned:
9133 case MethodRecognizer::kFfiStorePointer:
9134 case MethodRecognizer::kFfiFromAddress:
9135 case MethodRecognizer::kFfiGetAddress:
9136 case MethodRecognizer::kFfiAsExternalTypedDataInt8:
9137 case MethodRecognizer::kFfiAsExternalTypedDataInt16:
9138 case MethodRecognizer::kFfiAsExternalTypedDataInt32:
9139 case MethodRecognizer::kFfiAsExternalTypedDataInt64:
9140 case MethodRecognizer::kFfiAsExternalTypedDataUint8:
9141 case MethodRecognizer::kFfiAsExternalTypedDataUint16:
9142 case MethodRecognizer::kFfiAsExternalTypedDataUint32:
9143 case MethodRecognizer::kFfiAsExternalTypedDataUint64:
9144 case MethodRecognizer::kFfiAsExternalTypedDataFloat:
9145 case MethodRecognizer::kFfiAsExternalTypedDataDouble:
9146 case MethodRecognizer::kGetNativeField:
9147 case MethodRecognizer::kRecord_fieldNames:
9148 case MethodRecognizer::kRecord_numFields:
9149 case MethodRecognizer::kStringBaseCodeUnitAt:
9150 case MethodRecognizer::kUtf8DecoderScan:
9151 case MethodRecognizer::kDouble_hashCode:
9152 case MethodRecognizer::kTypedList_GetInt8:
9153 case MethodRecognizer::kTypedList_SetInt8:
9154 case MethodRecognizer::kTypedList_GetUint8:
9155 case MethodRecognizer::kTypedList_SetUint8:
9156 case MethodRecognizer::kTypedList_GetInt16:
9157 case MethodRecognizer::kTypedList_SetInt16:
9158 case MethodRecognizer::kTypedList_GetUint16:
9159 case MethodRecognizer::kTypedList_SetUint16:
9160 case MethodRecognizer::kTypedList_GetInt32:
9161 case MethodRecognizer::kTypedList_SetInt32:
9162 case MethodRecognizer::kTypedList_GetUint32:
9163 case MethodRecognizer::kTypedList_SetUint32:
9164 case MethodRecognizer::kTypedList_GetInt64:
9165 case MethodRecognizer::kTypedList_SetInt64:
9166 case MethodRecognizer::kTypedList_GetUint64:
9167 case MethodRecognizer::kTypedList_SetUint64:
9168 case MethodRecognizer::kTypedList_GetFloat32:
9169 case MethodRecognizer::kTypedList_SetFloat32:
9170 case MethodRecognizer::kTypedList_GetFloat64:
9171 case MethodRecognizer::kTypedList_SetFloat64:
9172 case MethodRecognizer::kTypedList_GetInt32x4:
9173 case MethodRecognizer::kTypedList_SetInt32x4:
9174 case MethodRecognizer::kTypedList_GetFloat32x4:
9175 case MethodRecognizer::kTypedList_SetFloat32x4:
9176 case MethodRecognizer::kTypedList_GetFloat64x2:
9177 case MethodRecognizer::kTypedList_SetFloat64x2:
9178 case MethodRecognizer::kTypedData_memMove1:
9179 case MethodRecognizer::kTypedData_memMove2:
9180 case MethodRecognizer::kTypedData_memMove4:
9181 case MethodRecognizer::kTypedData_memMove8:
9182 case MethodRecognizer::kTypedData_memMove16:
9183 case MethodRecognizer::kMemCopy:
9184 // Prevent the GC from running so that the operation is atomic from
9185 // a GC point of view. Always double check implementation in
9186 // kernel_to_il.cc that no GC can happen in between the relevant IL
9187 // instructions.
9188 // TODO(https://dartbug.com/48527): Support inlining.
9189 case MethodRecognizer::kFinalizerBase_exchangeEntriesCollectedWithNull:
9190 // Both unboxed/untagged data and atomic-to-GC operation.
9191 case MethodRecognizer::kFinalizerEntry_allocate:
9192 return true;
9193 default:
9194 return false;
9195 }
9196}
9197
9198#if !defined(DART_PRECOMPILED_RUNTIME)
9200 if (ForceOptimize()) {
9201 if (IsFfiCallClosure() || IsFfiCallbackTrampoline() || is_ffi_native()) {
9202 // We currently don't support inlining FFI trampolines. Some of them
9203 // are naturally non-inlinable because they contain a try/catch block,
9204 // but this condition is broader than strictly necessary.
9205 // The work necessary for inlining FFI trampolines is tracked by
9206 // http://dartbug.com/45055.
9207 return false;
9208 }
9209 if (CompilerState::Current().is_aot()) {
9210 return true;
9211 }
9212 // Inlining of force-optimized functions requires target function to be
9213 // idempotent becase if deoptimization is needed in inlined body, the
9214 // execution of the force-optimized will be restarted at the beginning of
9215 // the function.
9216 ASSERT(!IsPreferInline() || IsIdempotent());
9217 return IsIdempotent();
9218 }
9219
9220 if (HasBreakpoint()) {
9221 return false;
9222 }
9223
9224 return is_inlinable();
9225}
9226#endif // !defined(DART_PRECOMPILED_RUNTIME)
9227
9229 const UntaggedFunction::Kind k = kind();
9230 if (k == UntaggedFunction::kConstructor) {
9231 // Type arguments for factory; instance for generative constructor.
9232 return 1;
9233 }
9234 if ((k == UntaggedFunction::kClosureFunction) ||
9235 (k == UntaggedFunction::kImplicitClosureFunction) ||
9236 (k == UntaggedFunction::kFfiTrampoline)) {
9237 return 1; // Closure object.
9238 }
9239 if (!is_static()) {
9240 // Closure functions defined inside instance (i.e. non-static) functions are
9241 // marked as non-static, but they do not have a receiver.
9242 // Closures are handled above.
9243 ASSERT((k != UntaggedFunction::kClosureFunction) &&
9244 (k != UntaggedFunction::kImplicitClosureFunction));
9245 return 1; // Receiver.
9246 }
9247 return 0; // No implicit parameters.
9248}
9249
9250bool Function::AreValidArgumentCounts(intptr_t num_type_arguments,
9251 intptr_t num_arguments,
9252 intptr_t num_named_arguments,
9253 String* error_message) const {
9254 if ((num_type_arguments != 0) &&
9255 (num_type_arguments != NumTypeParameters())) {
9256 if (error_message != nullptr) {
9257 const intptr_t kMessageBufferSize = 64;
9258 char message_buffer[kMessageBufferSize];
9259 Utils::SNPrint(message_buffer, kMessageBufferSize,
9260 "%" Pd " type arguments passed, but %" Pd " expected",
9261 num_type_arguments, NumTypeParameters());
9262 // Allocate in old space because it can be invoked in background
9263 // optimizing compilation.
9264 *error_message = String::New(message_buffer, Heap::kOld);
9265 }
9266 return false; // Too many type arguments.
9267 }
9268 if (num_named_arguments > NumOptionalNamedParameters()) {
9269 if (error_message != nullptr) {
9270 const intptr_t kMessageBufferSize = 64;
9271 char message_buffer[kMessageBufferSize];
9272 Utils::SNPrint(message_buffer, kMessageBufferSize,
9273 "%" Pd " named passed, at most %" Pd " expected",
9274 num_named_arguments, NumOptionalNamedParameters());
9275 // Allocate in old space because it can be invoked in background
9276 // optimizing compilation.
9277 *error_message = String::New(message_buffer, Heap::kOld);
9278 }
9279 return false; // Too many named arguments.
9280 }
9281 const intptr_t num_pos_args = num_arguments - num_named_arguments;
9282 const intptr_t num_opt_pos_params = NumOptionalPositionalParameters();
9283 const intptr_t num_pos_params = num_fixed_parameters() + num_opt_pos_params;
9284 if (num_pos_args > num_pos_params) {
9285 if (error_message != nullptr) {
9286 const intptr_t kMessageBufferSize = 64;
9287 char message_buffer[kMessageBufferSize];
9288 // Hide implicit parameters to the user.
9289 const intptr_t num_hidden_params = NumImplicitParameters();
9290 Utils::SNPrint(message_buffer, kMessageBufferSize,
9291 "%" Pd "%s passed, %s%" Pd " expected",
9292 num_pos_args - num_hidden_params,
9293 num_opt_pos_params > 0 ? " positional" : "",
9294 num_opt_pos_params > 0 ? "at most " : "",
9295 num_pos_params - num_hidden_params);
9296 // Allocate in old space because it can be invoked in background
9297 // optimizing compilation.
9298 *error_message = String::New(message_buffer, Heap::kOld);
9299 }
9300 return false; // Too many fixed and/or positional arguments.
9301 }
9302 if (num_pos_args < num_fixed_parameters()) {
9303 if (error_message != nullptr) {
9304 const intptr_t kMessageBufferSize = 64;
9305 char message_buffer[kMessageBufferSize];
9306 // Hide implicit parameters to the user.
9307 const intptr_t num_hidden_params = NumImplicitParameters();
9308 Utils::SNPrint(message_buffer, kMessageBufferSize,
9309 "%" Pd "%s passed, %s%" Pd " expected",
9310 num_pos_args - num_hidden_params,
9311 num_opt_pos_params > 0 ? " positional" : "",
9312 num_opt_pos_params > 0 ? "at least " : "",
9313 num_fixed_parameters() - num_hidden_params);
9314 // Allocate in old space because it can be invoked in background
9315 // optimizing compilation.
9316 *error_message = String::New(message_buffer, Heap::kOld);
9317 }
9318 return false; // Too few fixed and/or positional arguments.
9319 }
9320 return true;
9321}
9322
9323bool Function::AreValidArguments(intptr_t num_type_arguments,
9324 intptr_t num_arguments,
9325 const Array& argument_names,
9326 String* error_message) const {
9327 const Array& args_desc_array = Array::Handle(ArgumentsDescriptor::NewBoxed(
9328 num_type_arguments, num_arguments, argument_names, Heap::kNew));
9329 ArgumentsDescriptor args_desc(args_desc_array);
9330 return AreValidArguments(args_desc, error_message);
9331}
9332
9334 String* error_message) const {
9335 const intptr_t num_type_arguments = args_desc.TypeArgsLen();
9336 const intptr_t num_arguments = args_desc.Count();
9337 const intptr_t num_named_arguments = args_desc.NamedCount();
9338
9339 if (!AreValidArgumentCounts(num_type_arguments, num_arguments,
9340 num_named_arguments, error_message)) {
9341 return false;
9342 }
9343 // Verify that all argument names are valid parameter names.
9344 Thread* thread = Thread::Current();
9345 Zone* zone = thread->zone();
9346 String& argument_name = String::Handle(zone);
9347 String& parameter_name = String::Handle(zone);
9348 const intptr_t num_positional_args = num_arguments - num_named_arguments;
9349 const intptr_t num_parameters = NumParameters();
9350 for (intptr_t i = 0; i < num_named_arguments; i++) {
9351 argument_name = args_desc.NameAt(i);
9352 ASSERT(argument_name.IsSymbol());
9353 bool found = false;
9354 for (intptr_t j = num_positional_args; j < num_parameters; j++) {
9355 parameter_name = ParameterNameAt(j);
9356 ASSERT(parameter_name.IsSymbol());
9357 if (argument_name.Equals(parameter_name)) {
9358 found = true;
9359 break;
9360 }
9361 }
9362 if (!found) {
9363 if (error_message != nullptr) {
9364 const intptr_t kMessageBufferSize = 64;
9365 char message_buffer[kMessageBufferSize];
9366 Utils::SNPrint(message_buffer, kMessageBufferSize,
9367 "no optional formal parameter named '%s'",
9368 argument_name.ToCString());
9369 *error_message = String::New(message_buffer);
9370 }
9371 return false;
9372 }
9373 }
9374 // Verify that all required named parameters are filled.
9375 for (intptr_t j = num_parameters - NumOptionalNamedParameters();
9376 j < num_parameters; j++) {
9377 if (IsRequiredAt(j)) {
9378 parameter_name = ParameterNameAt(j);
9379 ASSERT(parameter_name.IsSymbol());
9380 bool found = false;
9381 for (intptr_t i = 0; i < num_named_arguments; i++) {
9382 argument_name = args_desc.NameAt(i);
9383 ASSERT(argument_name.IsSymbol());
9384 if (argument_name.Equals(parameter_name)) {
9385 found = true;
9386 break;
9387 }
9388 }
9389 if (!found) {
9390 if (error_message != nullptr) {
9391 const intptr_t kMessageBufferSize = 64;
9392 char message_buffer[kMessageBufferSize];
9393 Utils::SNPrint(message_buffer, kMessageBufferSize,
9394 "missing required named parameter '%s'",
9395 parameter_name.ToCString());
9396 *error_message = String::New(message_buffer);
9397 }
9398 return false;
9399 }
9400 }
9401 }
9402 return true;
9403}
9404
9405// Retrieves the function type arguments, if any. This could be explicitly
9406// passed type from the arguments array, delayed type arguments in closures,
9407// or instantiated bounds for the type parameters if no other source for
9408// function type arguments are found.
9409static TypeArgumentsPtr RetrieveFunctionTypeArguments(
9410 Thread* thread,
9411 Zone* zone,
9412 const Function& function,
9413 const Instance& receiver,
9414 const TypeArguments& instantiator_type_args,
9415 const Array& args,
9416 const ArgumentsDescriptor& args_desc) {
9417 ASSERT(!function.IsNull());
9418
9419 const intptr_t kNumCurrentTypeArgs = function.NumTypeParameters();
9420 const intptr_t kNumParentTypeArgs = function.NumParentTypeArguments();
9421 const intptr_t kNumTypeArgs = kNumCurrentTypeArgs + kNumParentTypeArgs;
9422 // Non-generic functions don't receive type arguments.
9423 if (kNumTypeArgs == 0) return Object::empty_type_arguments().ptr();
9424 // Closure functions require that the receiver be provided (and is a closure).
9425 ASSERT(!function.IsClosureFunction() || receiver.IsClosure());
9426
9427 // Only closure functions should have possibly generic parents.
9428 ASSERT(function.IsClosureFunction() || kNumParentTypeArgs == 0);
9429 const auto& parent_type_args =
9430 function.IsClosureFunction()
9432 zone, Closure::Cast(receiver).function_type_arguments())
9433 : Object::empty_type_arguments();
9434 // We don't try to instantiate the parent type parameters to their bounds
9435 // if not provided or check any closed-over type arguments against the parent
9436 // type parameter bounds (since they have been type checked already).
9437 if (kNumCurrentTypeArgs == 0) return parent_type_args.ptr();
9438
9439 auto& function_type_args = TypeArguments::Handle(zone);
9440 // First check for delayed type arguments before using either provided or
9441 // default type arguments.
9442 bool has_delayed_type_args = false;
9443 if (function.IsClosureFunction()) {
9444 const auto& closure = Closure::Cast(receiver);
9445 function_type_args = closure.delayed_type_arguments();
9446 has_delayed_type_args =
9447 function_type_args.ptr() != Object::empty_type_arguments().ptr();
9448 }
9449
9450 if (args_desc.TypeArgsLen() > 0) {
9451 // We should never end up here when the receiver is a closure with delayed
9452 // type arguments unless this dynamically called closure function was
9453 // retrieved directly from the closure instead of going through
9454 // DartEntry::ResolveCallable, which appropriately checks for this case.
9455 ASSERT(!has_delayed_type_args);
9456 function_type_args ^= args.At(0);
9457 } else if (!has_delayed_type_args) {
9458 // We have no explicitly provided function type arguments, so instantiate
9459 // the type parameters to bounds or replace as appropriate.
9460 function_type_args = function.DefaultTypeArguments(zone);
9461 auto const mode =
9462 function.IsClosureFunction()
9463 ? function.default_type_arguments_instantiation_mode()
9464 : function_type_args.GetInstantiationMode(zone, &function);
9465 switch (mode) {
9467 // Nothing left to do.
9468 break;
9470 function_type_args = function_type_args.InstantiateAndCanonicalizeFrom(
9471 instantiator_type_args, parent_type_args);
9472 break;
9474 function_type_args = instantiator_type_args.ptr();
9475 break;
9477 function_type_args = parent_type_args.ptr();
9478 break;
9479 }
9480 }
9481
9482 return function_type_args.Prepend(zone, parent_type_args, kNumParentTypeArgs,
9483 kNumTypeArgs);
9484}
9485
9486// Retrieves the instantiator type arguments, if any, from the receiver.
9488 Zone* zone,
9489 const Function& function,
9490 const Instance& receiver) {
9491 if (function.IsClosureFunction()) {
9492 ASSERT(receiver.IsClosure());
9493 const auto& closure = Closure::Cast(receiver);
9494 return closure.instantiator_type_arguments();
9495 }
9496 if (!receiver.IsNull()) {
9497 const auto& cls = Class::Handle(zone, receiver.clazz());
9498 if (cls.NumTypeArguments() > 0) {
9499 return receiver.GetTypeArguments();
9500 }
9501 }
9502 return Object::empty_type_arguments().ptr();
9503}
9504
9506 const Array& args,
9507 const ArgumentsDescriptor& args_desc) const {
9508#if defined(DART_PRECOMPILED_RUNTIME)
9509 if (signature() == FunctionType::null()) {
9510 // Precompiler deleted signature because of missing entry point pragma.
9511 return EntryPointMemberInvocationError(*this);
9512 }
9513#endif
9514 Thread* thread = Thread::Current();
9515 Zone* zone = thread->zone();
9516
9517 auto& receiver = Instance::Handle(zone);
9518 if (IsClosureFunction() || HasThisParameter()) {
9519 receiver ^= args.At(args_desc.FirstArgIndex());
9520 }
9521 const auto& instantiator_type_arguments = TypeArguments::Handle(
9522 zone, RetrieveInstantiatorTypeArguments(zone, *this, receiver));
9523 return Function::DoArgumentTypesMatch(args, args_desc,
9524 instantiator_type_arguments);
9525}
9526
9528 const Array& args,
9529 const ArgumentsDescriptor& args_desc,
9530 const TypeArguments& instantiator_type_arguments) const {
9531#if defined(DART_PRECOMPILED_RUNTIME)
9532 if (signature() == FunctionType::null()) {
9533 // Precompiler deleted signature because of missing entry point pragma.
9534 return EntryPointMemberInvocationError(*this);
9535 }
9536#endif
9537 Thread* thread = Thread::Current();
9538 Zone* zone = thread->zone();
9539
9540 auto& receiver = Instance::Handle(zone);
9541 if (IsClosureFunction() || HasThisParameter()) {
9542 receiver ^= args.At(args_desc.FirstArgIndex());
9543 }
9544
9545 const auto& function_type_arguments = TypeArguments::Handle(
9546 zone, RetrieveFunctionTypeArguments(thread, zone, *this, receiver,
9547 instantiator_type_arguments, args,
9548 args_desc));
9550 args, args_desc, instantiator_type_arguments, function_type_arguments);
9551}
9552
9554 const Array& args,
9555 const ArgumentsDescriptor& args_desc,
9556 const TypeArguments& instantiator_type_arguments,
9557 const TypeArguments& function_type_arguments) const {
9558#if defined(DART_PRECOMPILED_RUNTIME)
9559 if (signature() == FunctionType::null()) {
9560 // Precompiler deleted signature because of missing entry point pragma.
9561 return EntryPointMemberInvocationError(*this);
9562 }
9563#endif
9564 Thread* thread = Thread::Current();
9565 Zone* zone = thread->zone();
9566
9567 // Perform any non-covariant bounds checks on the provided function type
9568 // arguments to make sure they are appropriate subtypes of the bounds.
9569 const intptr_t kNumLocalTypeArgs = NumTypeParameters();
9570 if (kNumLocalTypeArgs > 0) {
9571 const intptr_t kNumParentTypeArgs = NumParentTypeArguments();
9572 ASSERT(function_type_arguments.HasCount(kNumParentTypeArgs +
9573 kNumLocalTypeArgs));
9574 const auto& params = TypeParameters::Handle(zone, type_parameters());
9575 // No checks are needed if all bounds are dynamic.
9576 if (!params.AllDynamicBounds()) {
9577 auto& param = AbstractType::Handle(zone);
9578 auto& bound = AbstractType::Handle(zone);
9579 for (intptr_t i = 0; i < kNumLocalTypeArgs; i++) {
9580 bound = params.BoundAt(i);
9581 // Only perform non-covariant checks where the bound is not
9582 // the top type.
9583 if (params.IsGenericCovariantImplAt(i) ||
9584 bound.IsTopTypeForSubtyping()) {
9585 continue;
9586 }
9587 param = TypeParameterAt(i);
9589 &param, &bound, instantiator_type_arguments,
9590 function_type_arguments)) {
9591 const auto& names = Array::Handle(zone, params.names());
9592 auto& name = String::Handle(zone);
9593 name ^= names.At(i);
9594 return Error::RawCast(
9595 ThrowTypeError(token_pos(), param, bound, name));
9596 }
9597 }
9598 }
9599 } else {
9600 ASSERT(function_type_arguments.HasCount(NumParentTypeArguments()));
9601 }
9602
9604 Instance& argument = Instance::Handle(zone);
9605
9606 auto check_argument = [](const Instance& argument, const AbstractType& type,
9607 const TypeArguments& instantiator_type_args,
9608 const TypeArguments& function_type_args) -> bool {
9609 // If the argument type is the top type, no need to check.
9610 if (type.IsTopTypeForSubtyping()) return true;
9611 if (argument.IsNull()) {
9612 return Instance::NullIsAssignableTo(type, instantiator_type_args,
9613 function_type_args);
9614 }
9615 return argument.IsAssignableTo(type, instantiator_type_args,
9616 function_type_args);
9617 };
9618
9619 // Check types of the provided arguments against the expected parameter types.
9620 const intptr_t arg_offset = args_desc.FirstArgIndex();
9621 // Only check explicit arguments.
9622 const intptr_t arg_start = arg_offset + NumImplicitParameters();
9623 const intptr_t end_positional_args = arg_offset + args_desc.PositionalCount();
9624 for (intptr_t arg_index = arg_start; arg_index < end_positional_args;
9625 ++arg_index) {
9626 argument ^= args.At(arg_index);
9627 // Adjust for type arguments when they're present.
9628 const intptr_t param_index = arg_index - arg_offset;
9629 type = ParameterTypeAt(param_index);
9630 if (!check_argument(argument, type, instantiator_type_arguments,
9631 function_type_arguments)) {
9632 auto& name = String::Handle(zone, ParameterNameAt(param_index));
9633 if (!type.IsInstantiated()) {
9634 type =
9635 type.InstantiateFrom(instantiator_type_arguments,
9636 function_type_arguments, kAllFree, Heap::kNew);
9637 }
9638 return ThrowTypeError(token_pos(), argument, type, name);
9639 }
9640 }
9641
9642 const intptr_t num_named_arguments = args_desc.NamedCount();
9643 if (num_named_arguments == 0) {
9644 return Error::null();
9645 }
9646
9647 const int num_parameters = NumParameters();
9648 const int num_fixed_params = num_fixed_parameters();
9649
9650 String& argument_name = String::Handle(zone);
9651 String& parameter_name = String::Handle(zone);
9652
9653 // Check types of named arguments against expected parameter type.
9654 for (intptr_t named_index = 0; named_index < num_named_arguments;
9655 named_index++) {
9656 argument_name = args_desc.NameAt(named_index);
9657 ASSERT(argument_name.IsSymbol());
9658 argument ^= args.At(arg_offset + args_desc.PositionAt(named_index));
9659
9660 // Try to find the named parameter that matches the provided argument.
9661 // Even when annotated with @required, named parameters are still stored
9662 // as if they were optional and so come after the fixed parameters.
9663 // Currently O(n^2) as there's no guarantee from either the CFE or the
9664 // VM that named parameters and named arguments are sorted in the same way.
9665 intptr_t param_index = num_fixed_params;
9666 for (; param_index < num_parameters; param_index++) {
9667 parameter_name = ParameterNameAt(param_index);
9668 ASSERT(parameter_name.IsSymbol());
9669
9670 if (!parameter_name.Equals(argument_name)) continue;
9671
9672 type = ParameterTypeAt(param_index);
9673 if (!check_argument(argument, type, instantiator_type_arguments,
9674 function_type_arguments)) {
9675 auto& name = String::Handle(zone, ParameterNameAt(param_index));
9676 if (!type.IsInstantiated()) {
9677 type = type.InstantiateFrom(instantiator_type_arguments,
9678 function_type_arguments, kAllFree,
9679 Heap::kNew);
9680 }
9681 return ThrowTypeError(token_pos(), argument, type, name);
9682 }
9683 break;
9684 }
9685 // Only should fail if AreValidArguments returns a false positive.
9686 ASSERT(param_index < num_parameters);
9687 }
9688 return Error::null();
9689}
9690
9691// Helper allocating a C string buffer in the zone, printing the fully qualified
9692// name of a function in it, and replacing ':' by '_' to make sure the
9693// constructed name is a valid C++ identifier for debugging purpose.
9694// Set 'chars' to allocated buffer and return number of written characters.
9695
9700
9702 const Function& function,
9703 char** chars,
9704 intptr_t reserve_len,
9705 bool with_lib,
9706 QualifiedFunctionLibKind lib_kind) {
9707 Zone* zone = Thread::Current()->zone();
9708 const char* name = String::Handle(zone, function.name()).ToCString();
9709 const char* function_format = (reserve_len == 0) ? "%s" : "%s_";
9710 reserve_len += Utils::SNPrint(nullptr, 0, function_format, name);
9711 const Function& parent = Function::Handle(zone, function.parent_function());
9712 intptr_t written = 0;
9713 if (parent.IsNull()) {
9714 const Class& function_class = Class::Handle(zone, function.Owner());
9715 ASSERT(!function_class.IsNull());
9716 const char* class_name =
9717 String::Handle(zone, function_class.Name()).ToCString();
9718 ASSERT(class_name != nullptr);
9719 const char* library_name = nullptr;
9720 const char* lib_class_format = nullptr;
9721 if (with_lib) {
9722 const Library& library = Library::Handle(zone, function_class.library());
9723 ASSERT(!library.IsNull());
9724 switch (lib_kind) {
9726 library_name = String::Handle(zone, library.url()).ToCString();
9727 break;
9729 library_name = String::Handle(zone, library.name()).ToCString();
9730 break;
9731 default:
9732 UNREACHABLE();
9733 }
9734 ASSERT(library_name != nullptr);
9735 lib_class_format = (library_name[0] == '\0') ? "%s%s_" : "%s_%s_";
9736 } else {
9737 library_name = "";
9738 lib_class_format = "%s%s.";
9739 }
9740 reserve_len +=
9741 Utils::SNPrint(nullptr, 0, lib_class_format, library_name, class_name);
9742 ASSERT(chars != nullptr);
9743 *chars = zone->Alloc<char>(reserve_len + 1);
9744 written = Utils::SNPrint(*chars, reserve_len + 1, lib_class_format,
9745 library_name, class_name);
9746 } else {
9747 written = ConstructFunctionFullyQualifiedCString(parent, chars, reserve_len,
9748 with_lib, lib_kind);
9749 }
9750 ASSERT(*chars != nullptr);
9751 char* next = *chars + written;
9752 written += Utils::SNPrint(next, reserve_len + 1, function_format, name);
9753 // Replace ":" with "_".
9754 while (true) {
9755 next = strchr(next, ':');
9756 if (next == nullptr) break;
9757 *next = '_';
9758 }
9759 return written;
9760}
9761
9763 char* chars = nullptr;
9764 ConstructFunctionFullyQualifiedCString(*this, &chars, 0, true,
9766 return chars;
9767}
9768
9770 char* chars = nullptr;
9771 ConstructFunctionFullyQualifiedCString(*this, &chars, 0, true,
9773 return chars;
9774}
9775
9776const char* Function::ToQualifiedCString() const {
9777 char* chars = nullptr;
9778 ConstructFunctionFullyQualifiedCString(*this, &chars, 0, false,
9780 return chars;
9781}
9782
9784 const TypeArguments& instantiator_type_arguments,
9785 const TypeArguments& function_type_arguments,
9786 intptr_t num_free_fun_type_params,
9787 Heap::Space space,
9788 FunctionTypeMapping* function_type_mapping,
9789 intptr_t num_parent_type_args_adjustment) const {
9791 Zone* zone = Thread::Current()->zone();
9792 const intptr_t num_parent_type_args = NumParentTypeArguments();
9793 bool delete_type_parameters = false;
9794 if (num_free_fun_type_params == kCurrentAndEnclosingFree) {
9795 // See the comment on kCurrentAndEnclosingFree to understand why we don't
9796 // adjust 'num_free_fun_type_params' downward in this case.
9797 num_free_fun_type_params = kAllFree;
9798 delete_type_parameters = true;
9799 } else {
9800 ASSERT(!IsInstantiated(kAny, num_free_fun_type_params));
9801 // We only consider the function type parameters declared by the parents
9802 // of this signature function as free.
9803 if (num_parent_type_args < num_free_fun_type_params) {
9804 num_free_fun_type_params = num_parent_type_args;
9805 }
9806 }
9807
9808 // The number of parent type parameters that remain uninstantiated.
9809 const intptr_t remaining_parent_type_params =
9810 num_free_fun_type_params < num_parent_type_args
9811 ? num_parent_type_args - num_free_fun_type_params
9812 : 0;
9813
9814 // Adjust number of parent type arguments for all nested substituted types.
9815 num_parent_type_args_adjustment =
9816 remaining_parent_type_params +
9817 (delete_type_parameters ? 0 : NumTypeParameters());
9818
9820 FunctionType::New(remaining_parent_type_params, nullability(), space));
9822
9823 FunctionTypeMapping scope(zone, &function_type_mapping, *this, sig);
9824
9825 // Copy the type parameters and instantiate their bounds and defaults.
9826 if (!delete_type_parameters) {
9827 const TypeParameters& type_params =
9828 TypeParameters::Handle(zone, type_parameters());
9829 if (!type_params.IsNull()) {
9830 const TypeParameters& sig_type_params =
9832 // No need to set names that are ignored in a signature, however, the
9833 // length of the names array defines the number of type parameters.
9834 sig_type_params.set_names(Array::Handle(zone, type_params.names()));
9835 sig_type_params.set_flags(Array::Handle(zone, type_params.flags()));
9836 sig.SetTypeParameters(sig_type_params);
9837 TypeArguments& type_args = TypeArguments::Handle(zone);
9838 type_args = type_params.bounds();
9839 if (!type_args.IsNull() && !type_args.IsInstantiated()) {
9840 type_args = type_args.InstantiateFrom(
9841 instantiator_type_arguments, function_type_arguments,
9842 num_free_fun_type_params, space, function_type_mapping,
9843 num_parent_type_args_adjustment);
9844 }
9845 sig_type_params.set_bounds(type_args);
9846 type_args = type_params.defaults();
9847 if (!type_args.IsNull() && !type_args.IsInstantiated()) {
9848 type_args = type_args.InstantiateFrom(
9849 instantiator_type_arguments, function_type_arguments,
9850 num_free_fun_type_params, space, function_type_mapping,
9851 num_parent_type_args_adjustment);
9852 }
9853 sig_type_params.set_defaults(type_args);
9854 }
9855 }
9856
9857 type = result_type();
9858 if (!type.IsInstantiated()) {
9859 type = type.InstantiateFrom(
9860 instantiator_type_arguments, function_type_arguments,
9861 num_free_fun_type_params, space, function_type_mapping,
9862 num_parent_type_args_adjustment);
9863 // A returned null type indicates a failed instantiation in dead code that
9864 // must be propagated up to the caller, the optimizing compiler.
9865 if (type.IsNull()) {
9866 return FunctionType::null();
9867 }
9868 }
9869 sig.set_result_type(type);
9870 const intptr_t num_params = NumParameters();
9871 sig.set_num_implicit_parameters(num_implicit_parameters());
9872 sig.set_num_fixed_parameters(num_fixed_parameters());
9873 sig.SetNumOptionalParameters(NumOptionalParameters(),
9874 HasOptionalPositionalParameters());
9875 sig.set_parameter_types(Array::Handle(Array::New(num_params, space)));
9876 for (intptr_t i = 0; i < num_params; i++) {
9877 type = ParameterTypeAt(i);
9878 if (!type.IsInstantiated()) {
9879 type = type.InstantiateFrom(
9880 instantiator_type_arguments, function_type_arguments,
9881 num_free_fun_type_params, space, function_type_mapping,
9882 num_parent_type_args_adjustment);
9883 // A returned null type indicates a failed instantiation in dead code that
9884 // must be propagated up to the caller, the optimizing compiler.
9885 if (type.IsNull()) {
9886 return FunctionType::null();
9887 }
9888 }
9889 sig.SetParameterTypeAt(i, type);
9890 }
9891 sig.set_named_parameter_names(Array::Handle(zone, named_parameter_names()));
9892
9893 if (delete_type_parameters) {
9894 ASSERT(sig.IsInstantiated(kFunctions));
9895 }
9896
9897 sig.SetIsFinalized();
9898
9899 // Canonicalization is not part of instantiation.
9900 return sig.ptr();
9901}
9902
9904 intptr_t num_parent_type_args_adjustment,
9905 intptr_t num_free_fun_type_params,
9906 Heap::Space space,
9907 FunctionTypeMapping* function_type_mapping) const {
9908 ASSERT(num_parent_type_args_adjustment >= 0);
9910 Zone* zone = Thread::Current()->zone();
9911
9912 const intptr_t old_num_parent_type_args = NumParentTypeArguments();
9913 // From now on, adjust all type parameter types
9914 // which belong to this or nested function types.
9915 if (num_free_fun_type_params > old_num_parent_type_args) {
9916 num_free_fun_type_params = old_num_parent_type_args;
9917 }
9918
9920 zone, FunctionType::New(
9921 NumParentTypeArguments() + num_parent_type_args_adjustment,
9922 nullability(), space));
9924
9925 FunctionTypeMapping scope(zone, &function_type_mapping, *this, new_type);
9926
9927 const TypeParameters& type_params =
9928 TypeParameters::Handle(zone, type_parameters());
9929 if (!type_params.IsNull()) {
9930 const TypeParameters& new_type_params =
9932 // No need to set names that are ignored in a signature, however, the
9933 // length of the names array defines the number of type parameters.
9934 new_type_params.set_names(Array::Handle(zone, type_params.names()));
9935 new_type_params.set_flags(Array::Handle(zone, type_params.flags()));
9936 TypeArguments& type_args = TypeArguments::Handle(zone);
9937 type_args = type_params.bounds();
9938 if (!type_args.IsNull()) {
9939 type_args = type_args.UpdateFunctionTypes(num_parent_type_args_adjustment,
9940 num_free_fun_type_params, space,
9941 function_type_mapping);
9942 }
9943 new_type_params.set_bounds(type_args);
9944 type_args = type_params.defaults();
9945 if (!type_args.IsNull()) {
9946 type_args = type_args.UpdateFunctionTypes(num_parent_type_args_adjustment,
9947 num_free_fun_type_params, space,
9948 function_type_mapping);
9949 }
9950 new_type_params.set_defaults(type_args);
9951 new_type.SetTypeParameters(new_type_params);
9952 }
9953
9954 type = result_type();
9955 type = type.UpdateFunctionTypes(num_parent_type_args_adjustment,
9956 num_free_fun_type_params, space,
9957 function_type_mapping);
9958 new_type.set_result_type(type);
9959
9960 const intptr_t num_params = NumParameters();
9961 new_type.set_num_implicit_parameters(num_implicit_parameters());
9962 new_type.set_num_fixed_parameters(num_fixed_parameters());
9963 new_type.SetNumOptionalParameters(NumOptionalParameters(),
9964 HasOptionalPositionalParameters());
9965 new_type.set_parameter_types(Array::Handle(Array::New(num_params, space)));
9966 for (intptr_t i = 0; i < num_params; i++) {
9967 type = ParameterTypeAt(i);
9968 type = type.UpdateFunctionTypes(num_parent_type_args_adjustment,
9969 num_free_fun_type_params, space,
9970 function_type_mapping);
9971 new_type.SetParameterTypeAt(i, type);
9972 }
9974 Array::Handle(zone, named_parameter_names()));
9975 new_type.SetIsFinalized();
9976
9977 return new_type.ptr();
9978}
9979
9980// Checks if the type of the specified parameter of this signature is a
9981// supertype of the type of the specified parameter of the other signature
9982// (i.e. check parameter contravariance).
9983// Note that types marked as covariant are already dealt with in the front-end.
9985 intptr_t parameter_position,
9986 const FunctionType& other,
9987 intptr_t other_parameter_position,
9988 Heap::Space space,
9989 FunctionTypeMapping* function_type_equivalence) const {
9990 const AbstractType& param_type =
9991 AbstractType::Handle(ParameterTypeAt(parameter_position));
9992 if (param_type.IsTopTypeForSubtyping()) {
9993 return true;
9994 }
9995 const AbstractType& other_param_type =
9996 AbstractType::Handle(other.ParameterTypeAt(other_parameter_position));
9997 return other_param_type.IsSubtypeOf(param_type, space,
9998 function_type_equivalence);
9999}
10000
10002 const FunctionType& other,
10003 TypeEquality kind,
10004 FunctionTypeMapping* function_type_equivalence) const {
10005 Zone* const zone = Thread::Current()->zone();
10007 " FunctionType::HasSameTypeParametersAndBounds(%s, %s)\n", ToCString(),
10008 other.ToCString());
10009
10010 const intptr_t num_type_params = NumTypeParameters();
10011 if (num_type_params != other.NumTypeParameters()) {
10013 " - result: false (number of type parameters)\n");
10014 return false;
10015 }
10016 if (num_type_params > 0) {
10017 const TypeParameters& type_params =
10018 TypeParameters::Handle(zone, type_parameters());
10019 ASSERT(!type_params.IsNull());
10020 const TypeParameters& other_type_params =
10022 ASSERT(!other_type_params.IsNull());
10023 if (kind == TypeEquality::kInSubtypeTest) {
10024 if (!type_params.AllDynamicBounds() ||
10025 !other_type_params.AllDynamicBounds()) {
10026 AbstractType& bound = AbstractType::Handle(zone);
10027 AbstractType& other_bound = AbstractType::Handle(zone);
10028 for (intptr_t i = 0; i < num_type_params; i++) {
10029 bound = type_params.BoundAt(i);
10030 other_bound = other_type_params.BoundAt(i);
10031 // Bounds that are mutual subtypes are considered equal.
10032 if (!bound.IsSubtypeOf(other_bound, Heap::kOld,
10033 function_type_equivalence) ||
10034 !other_bound.IsSubtypeOf(bound, Heap::kOld,
10035 function_type_equivalence)) {
10037 " - result: false (bounds are not mutual subtypes)\n");
10038 return false;
10039 }
10040 }
10041 }
10042 } else {
10043 if (NumParentTypeArguments() != other.NumParentTypeArguments()) {
10045 " - result: false (mismatch in number of type arguments)\n");
10046 return false;
10047 }
10048 const TypeArguments& bounds =
10049 TypeArguments::Handle(zone, type_params.bounds());
10050 const TypeArguments& other_bounds =
10051 TypeArguments::Handle(zone, other_type_params.bounds());
10052 if (!bounds.IsEquivalent(other_bounds, kind, function_type_equivalence)) {
10054 " - result: false (bounds are not equivalent)\n");
10055 return false;
10056 }
10057 if (kind == TypeEquality::kCanonical) {
10058 // Compare default arguments.
10059 const TypeArguments& defaults =
10060 TypeArguments::Handle(zone, type_params.defaults());
10061 const TypeArguments& other_defaults =
10062 TypeArguments::Handle(zone, other_type_params.defaults());
10063 if (defaults.IsNull()) {
10064 if (!other_defaults.IsNull()) {
10066 " - result: false (mismatch in defaults)\n");
10067 return false;
10068 }
10069 } else if (!defaults.IsEquivalent(other_defaults, kind,
10070 function_type_equivalence)) {
10072 " - result: false (default types are not equivalent)\n");
10073 return false;
10074 }
10075 }
10076 }
10077 if (kind != TypeEquality::kInSubtypeTest) {
10078 // Compare flags (IsGenericCovariantImpl).
10079 if (!Array::Equals(type_params.flags(), other_type_params.flags())) {
10080 TRACE_TYPE_CHECKS_VERBOSE(" - result: false (flags are not equal)\n");
10081 return false;
10082 }
10083 }
10084 }
10085 TRACE_TYPE_CHECKS_VERBOSE(" - result: true\n");
10086 return true;
10087}
10088
10090 const FunctionType& other,
10091 Heap::Space space,
10092 FunctionTypeMapping* function_type_equivalence) const {
10093 TRACE_TYPE_CHECKS_VERBOSE(" FunctionType::IsSubtypeOf(%s, %s)\n",
10094 ToCString(), other.ToCString());
10095 const intptr_t num_fixed_params = num_fixed_parameters();
10096 const intptr_t num_opt_pos_params = NumOptionalPositionalParameters();
10097 const intptr_t num_opt_named_params = NumOptionalNamedParameters();
10098 const intptr_t other_num_fixed_params = other.num_fixed_parameters();
10099 const intptr_t other_num_opt_pos_params =
10101 const intptr_t other_num_opt_named_params =
10103 // This signature requires the same arguments or less and accepts the same
10104 // arguments or more. We can ignore implicit parameters.
10105 const intptr_t num_ignored_params = num_implicit_parameters();
10106 const intptr_t other_num_ignored_params = other.num_implicit_parameters();
10107 if (((num_fixed_params - num_ignored_params) >
10108 (other_num_fixed_params - other_num_ignored_params)) ||
10109 ((num_fixed_params - num_ignored_params + num_opt_pos_params) <
10110 (other_num_fixed_params - other_num_ignored_params +
10111 other_num_opt_pos_params)) ||
10112 (num_opt_named_params < other_num_opt_named_params)) {
10114 " - result: false (mismatch in number of parameters)\n");
10115 return false;
10116 }
10117 Thread* thread = Thread::Current();
10118 Zone* zone = thread->zone();
10119 FunctionTypeMapping scope(zone, &function_type_equivalence, *this, other);
10120
10121 // Check the type parameters and bounds of generic functions.
10122 if (!HasSameTypeParametersAndBounds(other, TypeEquality::kInSubtypeTest,
10123 function_type_equivalence)) {
10125 " - result: false (mismatch in type parameters)\n");
10126 return false;
10127 }
10128 // Check the result type.
10129 const AbstractType& other_res_type =
10130 AbstractType::Handle(zone, other.result_type());
10131 // 'void Function()' is a subtype of 'Object Function()'.
10132 if (!other_res_type.IsTopTypeForSubtyping()) {
10133 const AbstractType& res_type = AbstractType::Handle(zone, result_type());
10134 if (!res_type.IsSubtypeOf(other_res_type, space,
10135 function_type_equivalence)) {
10136 TRACE_TYPE_CHECKS_VERBOSE(" - result: false (result type)\n");
10137 return false;
10138 }
10139 }
10140 // Check the types of fixed and optional positional parameters.
10141 for (intptr_t i = 0; i < (other_num_fixed_params - other_num_ignored_params +
10142 other_num_opt_pos_params);
10143 i++) {
10144 if (!IsContravariantParameter(i + num_ignored_params, other,
10145 i + other_num_ignored_params, space,
10146 function_type_equivalence)) {
10147 TRACE_TYPE_CHECKS_VERBOSE(" - result: false (parameter type)\n");
10148 return false;
10149 }
10150 }
10151 // Check that for each optional named parameter of type T of the other
10152 // function type, there exists an optional named parameter of this function
10153 // type with an identical name and with a type S that is a supertype of T.
10154 // Note that SetParameterNameAt() guarantees that names are symbols, so we
10155 // can compare their raw pointers.
10156 const int num_params = num_fixed_params + num_opt_named_params;
10157 const int other_num_params =
10158 other_num_fixed_params + other_num_opt_named_params;
10159 bool found_param_name;
10160 String& other_param_name = String::Handle(zone);
10161 for (intptr_t i = other_num_fixed_params; i < other_num_params; i++) {
10162 other_param_name = other.ParameterNameAt(i);
10163 ASSERT(other_param_name.IsSymbol());
10164 found_param_name = false;
10165 for (intptr_t j = num_fixed_params; j < num_params; j++) {
10166 ASSERT(String::Handle(zone, ParameterNameAt(j)).IsSymbol());
10167 if (ParameterNameAt(j) == other_param_name.ptr()) {
10168 found_param_name = true;
10169 if (!IsContravariantParameter(j, other, i, space,
10170 function_type_equivalence)) {
10172 " - result: false (optional parameter type)\n");
10173 return false;
10174 }
10175 break;
10176 }
10177 }
10178 if (!found_param_name) {
10180 " - result: false (named parameter not found)\n");
10181 return false;
10182 }
10183 }
10184 // Check that for each required named parameter in this function, there's a
10185 // corresponding required named parameter in the other function.
10186 String& param_name = other_param_name;
10187 for (intptr_t j = num_params - num_opt_named_params; j < num_params; j++) {
10188 if (IsRequiredAt(j)) {
10189 param_name = ParameterNameAt(j);
10190 ASSERT(param_name.IsSymbol());
10191 bool found = false;
10192 for (intptr_t i = other_num_fixed_params; i < other_num_params; i++) {
10193 ASSERT(String::Handle(zone, other.ParameterNameAt(i)).IsSymbol());
10194 if (other.ParameterNameAt(i) == param_name.ptr()) {
10195 found = true;
10196 if (!other.IsRequiredAt(i)) {
10198 " - result: false (mismatch in required named "
10199 "parameters)\n");
10200 return false;
10201 }
10202 }
10203 }
10204 if (!found) {
10206 " - result: false (required named parameter not found)\n");
10207 return false;
10208 }
10209 }
10210 }
10211 TRACE_TYPE_CHECKS_VERBOSE(" - result: true\n");
10212 return true;
10213}
10214
10215// The compiler generates an implicit constructor if a class definition
10216// does not contain an explicit constructor or factory. The implicit
10217// constructor has the same token position as the owner class.
10219 return IsGenerativeConstructor() && (token_pos() == end_token_pos());
10220}
10221
10223 NoSafepointScope no_safepoint;
10224 uint32_t kind_tag = func->untag()->kind_tag_.load(std::memory_order_relaxed);
10225 return (KindBits::decode(kind_tag) ==
10226 UntaggedFunction::kImplicitClosureFunction) &&
10227 StaticBit::decode(kind_tag);
10228}
10229
10231 NoSafepointScope no_safepoint;
10232 uint32_t kind_tag = func->untag()->kind_tag_.load(std::memory_order_relaxed);
10233 return (KindBits::decode(kind_tag) ==
10234 UntaggedFunction::kImplicitClosureFunction) &&
10235 !StaticBit::decode(kind_tag);
10236}
10237
10238FunctionPtr Function::New(Heap::Space space) {
10240 return Object::Allocate<Function>(space);
10241}
10242
10243FunctionPtr Function::New(const FunctionType& signature,
10244 const String& name,
10246 bool is_static,
10247 bool is_const,
10248 bool is_abstract,
10249 bool is_external,
10250 bool is_native,
10251 const Object& owner,
10252 TokenPosition token_pos,
10253 Heap::Space space) {
10254 ASSERT(!owner.IsNull());
10255 ASSERT(!signature.IsNull());
10257 result.set_kind_tag(0);
10258 result.set_packed_fields(0);
10259 result.set_name(name);
10260 result.set_kind_tag(0); // Ensure determinism of uninitialized bits.
10261 result.set_kind(kind);
10262 result.set_recognized_kind(MethodRecognizer::kUnknown);
10264 result.set_is_static(is_static);
10265 result.set_is_const(is_const);
10266 result.set_is_abstract(is_abstract);
10267 result.set_is_external(is_external);
10268 result.set_is_native(is_native);
10269 result.set_is_reflectable(true); // Will be computed later.
10270 result.set_is_visible(true); // Will be computed later.
10271 result.set_is_debuggable(true); // Will be computed later.
10272 result.set_is_intrinsic(false);
10273 result.set_has_pragma(false);
10274 result.set_is_polymorphic_target(false);
10275 result.set_is_synthetic(false);
10276 NOT_IN_PRECOMPILED(result.set_state_bits(0));
10277 result.set_owner(owner);
10278 NOT_IN_PRECOMPILED(result.set_token_pos(token_pos));
10279 NOT_IN_PRECOMPILED(result.set_end_token_pos(token_pos));
10280 NOT_IN_PRECOMPILED(result.set_usage_counter(0));
10281 NOT_IN_PRECOMPILED(result.set_deoptimization_counter(0));
10282 NOT_IN_PRECOMPILED(result.set_optimized_instruction_count(0));
10283 NOT_IN_PRECOMPILED(result.set_optimized_call_site_count(0));
10284 NOT_IN_PRECOMPILED(result.set_inlining_depth(0));
10285 NOT_IN_PRECOMPILED(result.set_kernel_offset(0));
10286 result.set_is_optimizable(is_native ? false : true);
10287 result.set_is_inlinable(true);
10288 result.reset_unboxed_parameters_and_return();
10289 result.SetInstructionsSafe(StubCode::LazyCompile());
10290
10291 // See Function::set_data() for more information.
10292 if (kind == UntaggedFunction::kClosureFunction ||
10293 kind == UntaggedFunction::kImplicitClosureFunction) {
10294 ASSERT(space == Heap::kOld);
10295 const ClosureData& data = ClosureData::Handle(ClosureData::New());
10296 data.set_awaiter_link({});
10297 result.set_data(data);
10298 } else if (kind == UntaggedFunction::kFfiTrampoline) {
10299 const FfiTrampolineData& data =
10300 FfiTrampolineData::Handle(FfiTrampolineData::New());
10301 result.set_data(data);
10302 } else if (result.is_old_native()) {
10303 const auto& data =
10305 result.set_data(data);
10306 } else {
10307 // Functions other than signature functions have no reason to be allocated
10308 // in new space.
10309 ASSERT(space == Heap::kOld);
10310 }
10311
10312 // Force-optimized functions are not debuggable because they cannot
10313 // deoptimize.
10314 if (result.ForceOptimize()) {
10315 result.set_is_debuggable(false);
10316 }
10317 signature.set_num_implicit_parameters(result.NumImplicitParameters());
10318 result.SetSignature(signature);
10320 result.set_positional_parameter_names(Object::empty_array()));
10321 return result.ptr();
10322}
10323
10325 const String& name,
10326 const Function& parent,
10327 bool is_static,
10328 TokenPosition token_pos,
10329 const Object& owner) {
10330 ASSERT((kind == UntaggedFunction::kClosureFunction) ||
10331 (kind == UntaggedFunction::kImplicitClosureFunction));
10332 ASSERT(!parent.IsNull());
10333 ASSERT(!owner.IsNull());
10335 kind == UntaggedFunction::kClosureFunction ? parent.NumTypeArguments()
10336 : 0));
10338 Function::New(signature, name, kind,
10339 /* is_static = */ is_static,
10340 /* is_const = */ false,
10341 /* is_abstract = */ false,
10342 /* is_external = */ false,
10343 /* is_native = */ false, owner, token_pos));
10344 result.set_parent_function(parent);
10345 return result.ptr();
10346}
10347
10349 const Function& parent,
10350 TokenPosition token_pos) {
10351 // Use the owner defining the parent function and not the class containing it.
10352 const Object& parent_owner = Object::Handle(parent.RawOwner());
10353 return NewClosureFunctionWithKind(UntaggedFunction::kClosureFunction, name,
10354 parent, parent.is_static(), token_pos,
10355 parent_owner);
10356}
10357
10359 const Function& parent,
10360 TokenPosition token_pos) {
10361 // Use the owner defining the parent function and not the class containing it.
10362 const Object& parent_owner = Object::Handle(parent.RawOwner());
10363 return NewClosureFunctionWithKind(
10364 UntaggedFunction::kImplicitClosureFunction, name, parent,
10365 parent.is_static() || parent.IsConstructor(), token_pos, parent_owner);
10366}
10367
10369#if defined(DART_PRECOMPILED_RUNTIME)
10370 return HasImplicitClosureFunction();
10371#else
10372 return true;
10373#endif
10374}
10375
10377 if (!IsInvokeFieldDispatcher()) return false;
10378 if (thread->isolate_group()->object_store()->closure_class() != Owner()) {
10379 return false;
10380 }
10381 const auto& handle = String::Handle(thread->zone(), name());
10382 return handle.Equals(Symbols::DynamicCall());
10383}
10384
10386 // Return the existing implicit closure function if any.
10387 if (implicit_closure_function() != Function::null()) {
10388 return implicit_closure_function();
10389 }
10390
10391#if defined(DART_PRECOMPILED_RUNTIME)
10392 // In AOT mode all implicit closures are pre-created.
10393 FATAL("Cannot create implicit closure in AOT!");
10394 return Function::null();
10395#else
10396 ASSERT(!IsClosureFunction());
10397 Thread* thread = Thread::Current();
10398 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
10399
10400 if (implicit_closure_function() != Function::null()) {
10401 return implicit_closure_function();
10402 }
10403
10404 // Create closure function.
10405 Zone* zone = thread->zone();
10406 const String& closure_name = String::Handle(zone, name());
10407 const Function& closure_function = Function::Handle(
10408 zone, NewImplicitClosureFunction(closure_name, *this, token_pos()));
10409
10410 // Set closure function's context scope.
10411 if (is_static() || IsConstructor()) {
10412 closure_function.set_context_scope(Object::empty_context_scope());
10413 } else {
10414 const ContextScope& context_scope = ContextScope::Handle(
10416 closure_function.set_context_scope(context_scope);
10417 }
10418
10419 FunctionType& closure_signature =
10420 FunctionType::Handle(zone, closure_function.signature());
10421
10422 const auto& cls = Class::Handle(zone, Owner());
10423
10424 if (!is_static() && !IsConstructor() &&
10426 closure_function.set_awaiter_link({0, 0});
10427 }
10428
10429 const intptr_t num_type_params =
10430 IsConstructor() ? cls.NumTypeParameters() : NumTypeParameters();
10431
10432 TypeArguments& instantiator_type_arguments = TypeArguments::Handle(zone);
10433 TypeArguments& function_type_arguments = TypeArguments::Handle(zone);
10434
10435 FunctionTypeMapping* function_type_mapping = nullptr;
10436 FunctionTypeMapping scope(zone, &function_type_mapping,
10437 FunctionType::Handle(zone, signature()),
10438 closure_signature);
10439
10440 auto transform_type = [&](AbstractType& type) {
10441 if (num_type_params > 0) {
10442 if (IsConstructor()) {
10443 type = type.UpdateFunctionTypes(num_type_params, kAllFree, Heap::kOld,
10444 nullptr);
10445 if (!type.IsInstantiated(kCurrentClass)) {
10446 type = type.InstantiateFrom(
10447 instantiator_type_arguments, function_type_arguments,
10448 kNoneFree /* avoid truncating parent type args */, Heap::kOld);
10449 }
10450 } else {
10451 type = type.UpdateFunctionTypes(0, kNoneFree, Heap::kOld,
10452 function_type_mapping);
10453 }
10454 }
10455 };
10456
10457 auto transform_type_args = [&](TypeArguments& type_args) {
10458 ASSERT(num_type_params > 0);
10459 if (!type_args.IsNull()) {
10460 if (IsConstructor()) {
10461 type_args = type_args.UpdateFunctionTypes(num_type_params, kAllFree,
10462 Heap::kOld, nullptr);
10463 if (!type_args.IsInstantiated(kCurrentClass)) {
10464 type_args = type_args.InstantiateFrom(
10465 instantiator_type_arguments, function_type_arguments,
10466 kNoneFree /* avoid truncating parent type args */, Heap::kOld);
10467 }
10468 } else {
10469 type_args = type_args.UpdateFunctionTypes(0, kNoneFree, Heap::kOld,
10470 function_type_mapping);
10471 }
10472 }
10473 };
10474
10475 // Set closure function's type parameters.
10476 if (num_type_params > 0) {
10477 const TypeParameters& old_type_params = TypeParameters::Handle(
10478 zone, IsConstructor() ? cls.type_parameters() : type_parameters());
10479 const TypeParameters& new_type_params =
10481 // No need to set names that are ignored in a signature, however, the
10482 // length of the names array defines the number of type parameters.
10483 new_type_params.set_names(Array::Handle(zone, old_type_params.names()));
10484 new_type_params.set_flags(Array::Handle(zone, old_type_params.flags()));
10485
10486 closure_signature.SetTypeParameters(new_type_params);
10487 ASSERT(closure_signature.NumTypeParameters() == num_type_params);
10488
10489 TypeArguments& type_args = TypeArguments::Handle(zone);
10490 type_args = TypeArguments::New(num_type_params);
10491 TypeParameter& type_param = TypeParameter::Handle(zone);
10492 for (intptr_t i = 0; i < num_type_params; i++) {
10493 type_param = closure_signature.TypeParameterAt(i);
10494 type_args.SetTypeAt(i, type_param);
10495 }
10496
10497 if (IsConstructor()) {
10498 instantiator_type_arguments =
10499 type_args.ToInstantiatorTypeArguments(thread, cls);
10500 } else {
10501 ASSERT(NumTypeArguments() == type_args.Length());
10502 function_type_arguments = type_args.ptr();
10503 }
10504
10505 type_args = old_type_params.bounds();
10506 transform_type_args(type_args);
10507 new_type_params.set_bounds(type_args);
10508
10509 type_args = old_type_params.defaults();
10510 transform_type_args(type_args);
10511 new_type_params.set_defaults(type_args);
10512 }
10513
10514 // Set closure function's result type.
10515 AbstractType& result_type = AbstractType::Handle(zone);
10516 if (IsConstructor()) {
10517 result_type = cls.DeclarationType();
10518 } else {
10519 result_type = this->result_type();
10520 }
10521 transform_type(result_type);
10522 closure_signature.set_result_type(result_type);
10523
10524 // Set closure function's end token to this end token.
10525 closure_function.set_end_token_pos(end_token_pos());
10526
10527 // The closurized method stub just calls into the original method and should
10528 // therefore be skipped by the debugger and in stack traces.
10529 closure_function.set_is_debuggable(false);
10530 closure_function.set_is_visible(false);
10531
10532 // Set closure function's formal parameters to this formal parameters,
10533 // removing the receiver if this is an instance method and adding the closure
10534 // object as first parameter.
10535 const int kClosure = 1;
10536 const int num_implicit_params = NumImplicitParameters();
10537 const int num_fixed_params =
10538 kClosure - num_implicit_params + num_fixed_parameters();
10539 const int num_opt_params = NumOptionalParameters();
10540 const bool has_opt_pos_params = HasOptionalPositionalParameters();
10541 const int num_params = num_fixed_params + num_opt_params;
10542 const int num_pos_params = has_opt_pos_params ? num_params : num_fixed_params;
10543 closure_signature.set_num_fixed_parameters(num_fixed_params);
10544 closure_signature.SetNumOptionalParameters(num_opt_params,
10545 has_opt_pos_params);
10546 closure_signature.set_parameter_types(
10547 Array::Handle(zone, Array::New(num_params, Heap::kOld)));
10548 closure_function.CreateNameArray();
10549 closure_signature.CreateNameArrayIncludingFlags();
10550 AbstractType& param_type = AbstractType::Handle(zone);
10551 String& param_name = String::Handle(zone);
10552 // Add implicit closure object parameter.
10553 param_type = Type::DynamicType();
10554 closure_signature.SetParameterTypeAt(0, param_type);
10555 closure_function.SetParameterNameAt(0, Symbols::ClosureParameter());
10556 for (int i = kClosure; i < num_pos_params; i++) {
10557 param_type = ParameterTypeAt(num_implicit_params - kClosure + i);
10558 transform_type(param_type);
10559 closure_signature.SetParameterTypeAt(i, param_type);
10560 param_name = ParameterNameAt(num_implicit_params - kClosure + i);
10561 // Set the name in the function for positional parameters.
10562 closure_function.SetParameterNameAt(i, param_name);
10563 }
10564 for (int i = num_pos_params; i < num_params; i++) {
10565 param_type = ParameterTypeAt(num_implicit_params - kClosure + i);
10566 transform_type(param_type);
10567 closure_signature.SetParameterTypeAt(i, param_type);
10568 param_name = ParameterNameAt(num_implicit_params - kClosure + i);
10569 // Set the name in the signature for named parameters.
10570 closure_signature.SetParameterNameAt(i, param_name);
10571 if (IsRequiredAt(num_implicit_params - kClosure + i)) {
10572 closure_signature.SetIsRequiredAt(i);
10573 }
10574 }
10575 closure_signature.FinalizeNameArray();
10576 closure_function.InheritKernelOffsetFrom(*this);
10577
10578 if (!is_static() && !IsConstructor()) {
10579 // Change covariant parameter types to Object?.
10580 BitVector is_covariant(zone, NumParameters());
10581 BitVector is_generic_covariant_impl(zone, NumParameters());
10582 kernel::ReadParameterCovariance(*this, &is_covariant,
10583 &is_generic_covariant_impl);
10584
10585 ObjectStore* object_store = IsolateGroup::Current()->object_store();
10586 const auto& object_type =
10587 Type::Handle(zone, object_store->nullable_object_type());
10588 ASSERT(object_type.IsCanonical());
10589 for (intptr_t i = kClosure; i < num_params; ++i) {
10590 const intptr_t original_param_index = num_implicit_params - kClosure + i;
10591 if (is_covariant.Contains(original_param_index) ||
10592 is_generic_covariant_impl.Contains(original_param_index)) {
10593 closure_signature.SetParameterTypeAt(i, object_type);
10594 }
10595 }
10596 }
10597 ASSERT(!closure_signature.IsFinalized());
10598 closure_signature ^= ClassFinalizer::FinalizeType(closure_signature);
10599 closure_function.SetSignature(closure_signature);
10600 set_implicit_closure_function(closure_function);
10601 ASSERT(closure_function.IsImplicitClosureFunction());
10602 ASSERT(HasImplicitClosureFunction());
10603 return closure_function.ptr();
10604#endif // defined(DART_PRECOMPILED_RUNTIME)
10605}
10606
10608 if (implicit_closure_function() != Function::null()) {
10609 const Function& func = Function::Handle(implicit_closure_function());
10610 if (!func.HasCode()) {
10611 set_implicit_closure_function(Function::Handle());
10612 }
10613 }
10614}
10615
10617#if defined(DART_PRECOMPILED_RUNTIME)
10618 if (signature() == FunctionType::null()) {
10619 return String::null();
10620 }
10621#endif
10622 Thread* thread = Thread::Current();
10623 ZoneTextBuffer printer(thread->zone());
10624 const FunctionType& sig = FunctionType::Handle(signature());
10625 sig.Print(kInternalName, &printer);
10626 return Symbols::New(thread, printer.buffer());
10627}
10628
10630#if defined(DART_PRECOMPILED_RUNTIME)
10631 if (signature() == FunctionType::null()) {
10632 return String::null();
10633 }
10634#endif
10635 Thread* thread = Thread::Current();
10636 ZoneTextBuffer printer(thread->zone());
10637 const FunctionType& sig = FunctionType::Handle(signature());
10638 sig.Print(kUserVisibleName, &printer);
10639 return Symbols::New(thread, printer.buffer());
10640}
10641
10643 Zone* zone,
10644 NameVisibility name_visibility,
10645 BaseTextBuffer* printer) const {
10646 AbstractType& param_type = AbstractType::Handle(zone);
10647 const intptr_t num_params = NumParameters();
10648 const intptr_t num_fixed_params = num_fixed_parameters();
10649 const intptr_t num_opt_pos_params = NumOptionalPositionalParameters();
10650 const intptr_t num_opt_named_params = NumOptionalNamedParameters();
10651 const intptr_t num_opt_params = num_opt_pos_params + num_opt_named_params;
10652 ASSERT((num_fixed_params + num_opt_params) == num_params);
10653 intptr_t i = 0;
10654 if (name_visibility == kUserVisibleName) {
10655 // Hide implicit parameters.
10656 i = num_implicit_parameters();
10657 }
10658 String& name = String::Handle(zone);
10659 while (i < num_fixed_params) {
10660 param_type = ParameterTypeAt(i);
10661 ASSERT(!param_type.IsNull());
10662 param_type.PrintName(name_visibility, printer);
10663 if (i != (num_params - 1)) {
10664 printer->AddString(", ");
10665 }
10666 i++;
10667 }
10668 if (num_opt_params > 0) {
10669 if (num_opt_pos_params > 0) {
10670 printer->AddString("[");
10671 } else {
10672 printer->AddString("{");
10673 }
10674 for (intptr_t i = num_fixed_params; i < num_params; i++) {
10675 if (num_opt_named_params > 0 && IsRequiredAt(i)) {
10676 printer->AddString("required ");
10677 }
10678 param_type = ParameterTypeAt(i);
10679 ASSERT(!param_type.IsNull());
10680 param_type.PrintName(name_visibility, printer);
10681 // The parameter name of an optional positional parameter does not need
10682 // to be part of the signature, since it is not used.
10683 if (num_opt_named_params > 0) {
10684 name = ParameterNameAt(i);
10685 printer->AddString(" ");
10686 printer->AddString(name.ToCString());
10687 }
10688 if (i != (num_params - 1)) {
10689 printer->AddString(", ");
10690 }
10691 }
10692 if (num_opt_pos_params > 0) {
10693 printer->AddString("]");
10694 } else {
10695 printer->AddString("}");
10696 }
10697 }
10698}
10699
10701 ASSERT(IsImplicitStaticClosureFunction());
10702 if (implicit_static_closure() != Closure::null()) {
10703 return implicit_static_closure();
10704 }
10705
10706 auto thread = Thread::Current();
10707 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
10708
10709 if (implicit_static_closure() != Closure::null()) {
10710 return implicit_static_closure();
10711 }
10712
10713 Zone* zone = thread->zone();
10714 const auto& closure =
10715 Closure::Handle(zone, Closure::New(Object::null_type_arguments(),
10716 Object::null_type_arguments(), *this,
10717 Object::null_object(), Heap::kOld));
10718 set_implicit_static_closure(closure);
10719 return implicit_static_closure();
10720}
10721
10722ClosurePtr Function::ImplicitInstanceClosure(const Instance& receiver) const {
10723 ASSERT(IsImplicitClosureFunction());
10724 Zone* zone = Thread::Current()->zone();
10725 TypeArguments& instantiator_type_arguments = TypeArguments::Handle(zone);
10726 if (!HasInstantiatedSignature(kCurrentClass)) {
10727 instantiator_type_arguments = receiver.GetTypeArguments();
10728 }
10729 ASSERT(!HasGenericParent()); // No generic parent function.
10730 return Closure::New(instantiator_type_arguments,
10731 Object::null_type_arguments(), *this, receiver);
10732}
10733
10734FunctionPtr Function::ImplicitClosureTarget(Zone* zone) const {
10735 const auto& parent = Function::Handle(zone, parent_function());
10736 const auto& func_name = String::Handle(zone, parent.name());
10737 const auto& owner = Class::Handle(zone, parent.Owner());
10738 Thread* thread = Thread::Current();
10739 const auto& error = owner.EnsureIsFinalized(thread);
10740 ASSERT(error == Error::null());
10741 auto& target =
10742 Function::Handle(zone, Resolver::ResolveFunction(zone, owner, func_name));
10743
10744 if (!target.IsNull() && (target.ptr() != parent.ptr())) {
10745 DEBUG_ASSERT(IsolateGroup::Current()->HasAttemptedReload());
10746 if ((target.is_static() != parent.is_static()) ||
10747 (target.kind() != parent.kind())) {
10749 }
10750 }
10751
10752 return target.ptr();
10753}
10754
10756 BaseTextBuffer* printer) const {
10757 if (IsNull()) {
10758 printer->AddString("null"); // Signature optimized out in precompiler.
10759 return;
10760 }
10761 Thread* thread = Thread::Current();
10762 Zone* zone = thread->zone();
10763 const TypeParameters& type_params =
10764 TypeParameters::Handle(zone, type_parameters());
10765 if (!type_params.IsNull()) {
10766 printer->AddString("<");
10767 const intptr_t base = NumParentTypeArguments();
10768 const bool kIsClassTypeParameter = false;
10769 // Type parameter names are meaningless after canonicalization.
10770 type_params.Print(thread, zone, kIsClassTypeParameter, base,
10771 name_visibility, printer);
10772 printer->AddString(">");
10773 }
10774 printer->AddString("(");
10775 PrintParameters(thread, zone, name_visibility, printer);
10776 printer->AddString(") => ");
10777 const AbstractType& res_type = AbstractType::Handle(zone, result_type());
10778 if (!res_type.IsNull()) {
10779 res_type.PrintName(name_visibility, printer);
10780 } else {
10781 printer->AddString("null");
10782 }
10783}
10784
10786 Genericity genericity,
10787 intptr_t num_free_fun_type_params) const {
10788 return FunctionType::Handle(signature())
10789 .IsInstantiated(genericity, num_free_fun_type_params);
10790}
10791
10793 intptr_t num_free_fun_type_params) const {
10794 if (num_free_fun_type_params == kCurrentAndEnclosingFree) {
10795 num_free_fun_type_params = kAllFree;
10796 } else if (genericity != kCurrentClass) {
10797 const intptr_t num_parent_type_args = NumParentTypeArguments();
10798 if (num_parent_type_args > 0 && num_free_fun_type_params > 0) {
10799 // The number of parent type arguments is cached in the FunctionType, so
10800 // we can't consider any FunctionType with free parent type arguments as
10801 // fully instantiated. Instead, the FunctionType must be instantiated to
10802 // reduce the number of parent type arguments, even if they're unused in
10803 // its component types.
10804 return false;
10805 }
10806 // Don't consider local function type parameters as free.
10807 if (num_free_fun_type_params > num_parent_type_args) {
10808 num_free_fun_type_params = num_parent_type_args;
10809 }
10810 }
10811 AbstractType& type = AbstractType::Handle(result_type());
10812 if (!type.IsInstantiated(genericity, num_free_fun_type_params)) {
10813 return false;
10814 }
10815 const intptr_t num_parameters = NumParameters();
10816 for (intptr_t i = 0; i < num_parameters; i++) {
10817 type = ParameterTypeAt(i);
10818 if (!type.IsInstantiated(genericity, num_free_fun_type_params)) {
10819 return false;
10820 }
10821 }
10822 const intptr_t num_type_params = NumTypeParameters();
10823 if (num_type_params > 0) {
10824 TypeParameters& type_params = TypeParameters::Handle(type_parameters());
10825 if (!type_params.AllDynamicBounds()) {
10826 for (intptr_t i = 0; i < type_params.Length(); ++i) {
10827 type = type_params.BoundAt(i);
10828 if (!type.IsInstantiated(genericity, num_free_fun_type_params)) {
10829 return false;
10830 }
10831 }
10832 }
10833 }
10834 return true;
10835}
10836
10839}
10840
10841ClassPtr Function::Owner() const {
10842 ASSERT(untag()->owner() != Object::null());
10843 if (untag()->owner()->IsClass()) {
10844 return Class::RawCast(untag()->owner());
10845 }
10846 const Object& obj = Object::Handle(untag()->owner());
10847 ASSERT(obj.IsPatchClass());
10848 return PatchClass::Cast(obj).wrapped_class();
10849}
10850
10852#if defined(DART_PRECOMPILED_RUNTIME)
10853 UNREACHABLE();
10854#else
10855 StoreNonPointer(&untag()->kernel_offset_, src.untag()->kernel_offset_);
10856#endif
10857}
10858
10860#if defined(DART_PRECOMPILED_RUNTIME)
10861 UNREACHABLE();
10862#else
10863 set_kernel_offset(src.kernel_offset());
10864#endif
10865}
10866
10868 const Script& script,
10869 const class KernelProgramInfo& kernel_program_info,
10870 intptr_t index) const {
10871 Array& data_field = Array::Handle(
10872 Array::New(static_cast<intptr_t>(EvalFunctionData::kLength)));
10873 data_field.SetAt(static_cast<intptr_t>(EvalFunctionData::kScript), script);
10874 data_field.SetAt(static_cast<intptr_t>(EvalFunctionData::kKernelProgramInfo),
10875 kernel_program_info);
10876 data_field.SetAt(static_cast<intptr_t>(EvalFunctionData::kKernelLibraryIndex),
10877 Smi::Handle(Smi::New(index)));
10878 set_data(data_field);
10879}
10880
10881ScriptPtr Function::script() const {
10882 // NOTE(turnidge): If you update this function, you probably want to
10883 // update Class::PatchFieldsAndFunctions() at the same time.
10884 if (IsDynamicInvocationForwarder()) {
10885 const Function& target = Function::Handle(ForwardingTarget());
10886 return target.IsNull() ? Script::null() : target.script();
10887 }
10888 if (IsImplicitGetterOrSetter()) {
10889 const auto& field = Field::Handle(accessor_field());
10890 return field.IsNull() ? Script::null() : field.Script();
10891 }
10892 if (is_eval_function()) {
10893 const auto& fdata = Array::Handle(Array::RawCast(data()));
10894 return Script::RawCast(
10895 fdata.At(static_cast<intptr_t>(EvalFunctionData::kScript)));
10896 }
10897 if (token_pos() == TokenPosition::kMinSource) {
10898 // Testing for position 0 is an optimization that relies on temporary
10899 // eval functions having token position 0.
10900 const Script& script = Script::Handle(eval_script());
10901 if (!script.IsNull()) {
10902 return script.ptr();
10903 }
10904 }
10905 const Object& obj = Object::Handle(untag()->owner());
10906 if (obj.IsPatchClass()) {
10907 return PatchClass::Cast(obj).script();
10908 }
10909 if (IsClosureFunction()) {
10910 const Function& function = Function::Handle(parent_function());
10911 if (function.IsNull()) return Script::null();
10912 return function.script();
10913 }
10914 ASSERT(obj.IsClass());
10915 return Class::Cast(obj).script();
10916}
10917
10918#if !defined(DART_PRECOMPILED_RUNTIME)
10919KernelProgramInfoPtr Function::KernelProgramInfo() const {
10920 if (is_eval_function()) {
10921 const auto& fdata = Array::Handle(Array::RawCast(data()));
10923 fdata.At(static_cast<intptr_t>(EvalFunctionData::kKernelProgramInfo)));
10924 }
10925 if (IsClosureFunction()) {
10926 const auto& parent = Function::Handle(parent_function());
10927 return parent.KernelProgramInfo();
10928 }
10929 const auto& owner = Object::Handle(RawOwner());
10930 if (owner.IsClass()) {
10931 return Class::Cast(owner).KernelProgramInfo();
10932 }
10933 return PatchClass::Cast(owner).kernel_program_info();
10934}
10935
10936TypedDataViewPtr Function::KernelLibrary() const {
10938 return info.KernelLibrary(KernelLibraryIndex());
10939}
10940
10942 const intptr_t kernel_library_index = KernelLibraryIndex();
10943 if (kernel_library_index == -1) return 0;
10945 return info.KernelLibraryStartOffset(kernel_library_index);
10946}
10947
10949 if (IsNoSuchMethodDispatcher() || IsInvokeFieldDispatcher() ||
10950 IsFfiCallbackTrampoline()) {
10951 return -1;
10952 }
10953 if (is_eval_function()) {
10954 const auto& fdata = Array::Handle(Array::RawCast(data()));
10955 return Smi::Value(static_cast<SmiPtr>(fdata.At(
10956 static_cast<intptr_t>(EvalFunctionData::kKernelLibraryIndex))));
10957 }
10958 if (IsClosureFunction()) {
10959 const auto& parent = Function::Handle(parent_function());
10960 ASSERT(!parent.IsNull());
10961 return parent.KernelLibraryIndex();
10962 }
10963
10964 const auto& obj = Object::Handle(untag()->owner());
10965 if (obj.IsClass()) {
10966 const auto& lib = Library::Handle(Class::Cast(obj).library());
10967 return lib.kernel_library_index();
10968 }
10969 ASSERT(obj.IsPatchClass());
10970 return PatchClass::Cast(obj).kernel_library_index();
10971}
10972#endif
10973
10975 return HasCode() && Code::Handle(CurrentCode()).is_optimized();
10976}
10977
10978const char* Function::NameCString(NameVisibility name_visibility) const {
10979 switch (name_visibility) {
10980 case kInternalName:
10981 return String::Handle(name()).ToCString();
10982 case kScrubbedName:
10983 case kUserVisibleName:
10984 return UserVisibleNameCString();
10985 }
10986 UNREACHABLE();
10987 return nullptr;
10988}
10989
10991 if (FLAG_show_internal_names) {
10992 return String::Handle(name()).ToCString();
10993 }
10994 is_extension_type_member();
10996 is_extension_member() || is_extension_type_member());
10997}
10998
10999StringPtr Function::UserVisibleName() const {
11000 if (FLAG_show_internal_names) {
11001 return name();
11002 }
11003 return Symbols::New(
11006 is_extension_member() || is_extension_type_member()));
11007}
11008
11010 Thread* thread = Thread::Current();
11011 ZoneTextBuffer printer(thread->zone());
11012 PrintName(NameFormattingParams(kScrubbedName), &printer);
11013 return Symbols::New(thread, printer.buffer());
11014}
11015
11017 Thread* thread = Thread::Current();
11018 ZoneTextBuffer printer(thread->zone());
11019 PrintName(NameFormattingParams(kScrubbedName), &printer);
11020 return printer.buffer();
11021}
11022
11024 Thread* thread = Thread::Current();
11025 ZoneTextBuffer printer(thread->zone());
11026 PrintName(NameFormattingParams(kUserVisibleName), &printer);
11027 return Symbols::New(thread, printer.buffer());
11028}
11029
11031 Thread* thread = Thread::Current();
11032 ZoneTextBuffer printer(thread->zone());
11033 PrintName(NameFormattingParams(kUserVisibleName), &printer);
11034 return printer.buffer();
11035}
11036
11037static void FunctionPrintNameHelper(const Function& fun,
11039 BaseTextBuffer* printer) {
11040 if (fun.IsNonImplicitClosureFunction()) {
11041 if (params.include_parent_name) {
11042 const auto& parent = Function::Handle(fun.parent_function());
11043 if (parent.IsNull()) {
11044 printer->AddString(Symbols::OptimizedOut().ToCString());
11045 } else {
11046 parent.PrintName(params, printer);
11047 }
11048 // A function's scrubbed name and its user visible name are identical.
11049 printer->AddString(".");
11050 }
11051 if (params.disambiguate_names &&
11052 fun.name() == Symbols::AnonymousClosure().ptr()) {
11053 if (fun.token_pos().IsReal()) {
11054 printer->Printf("<anonymous closure @%" Pd ">", fun.token_pos().Pos());
11055 } else {
11056 printer->Printf("<anonymous closure @no position>");
11057 }
11058 } else {
11059 printer->AddString(fun.NameCString(params.name_visibility));
11060 if (params.disambiguate_names) {
11061 if (fun.token_pos().IsReal()) {
11062 printer->Printf("@<%" Pd ">", fun.token_pos().Pos());
11063 } else {
11064 printer->Printf("@<no position>");
11065 }
11066 }
11067 }
11068 return;
11069 }
11070 if (params.disambiguate_names) {
11071 if (fun.IsInvokeFieldDispatcher()) {
11072 printer->AddString("[invoke-field] ");
11073 }
11074 if (fun.IsNoSuchMethodDispatcher()) {
11075 printer->AddString("[no-such-method] ");
11076 }
11077 if (fun.IsImplicitClosureFunction()) {
11078 printer->AddString("[tear-off] ");
11079 }
11080 if (fun.IsMethodExtractor()) {
11081 printer->AddString("[tear-off-extractor] ");
11082 }
11083 }
11084
11085 if (fun.kind() == UntaggedFunction::kConstructor) {
11086 printer->AddString("new ");
11087 } else if (params.include_class_name) {
11088 const Class& cls = Class::Handle(fun.Owner());
11089 if (!cls.IsTopLevel()) {
11090 const Class& mixin = Class::Handle(cls.Mixin());
11091 printer->AddString(params.name_visibility == Object::kUserVisibleName
11092 ? mixin.UserVisibleNameCString()
11093 : cls.NameCString(params.name_visibility));
11094 printer->AddString(".");
11095 }
11096 }
11097
11098 printer->AddString(fun.NameCString(params.name_visibility));
11099
11100 // Dispatchers that are created with an arguments descriptor need both the
11101 // name and the saved arguments descriptor to disambiguate.
11102 if (params.disambiguate_names && fun.HasSavedArgumentsDescriptor()) {
11103 const auto& args_desc_array = Array::Handle(fun.saved_args_desc());
11104 const ArgumentsDescriptor args_desc(args_desc_array);
11105 args_desc.PrintTo(printer);
11106 }
11107}
11108
11110 BaseTextBuffer* printer) const {
11111 if (!IsLocalFunction()) {
11112 FunctionPrintNameHelper(*this, params, printer);
11113 return;
11114 }
11115 auto& fun = Function::Handle(ptr());
11116 FunctionPrintNameHelper(fun, params, printer);
11117}
11118
11119StringPtr Function::GetSource() const {
11120 if (IsImplicitConstructor() || is_synthetic()) {
11121 // We may need to handle more cases when the restrictions on mixins are
11122 // relaxed. In particular we might start associating some source with the
11123 // forwarding constructors when it becomes possible to specify a particular
11124 // constructor from the mixin to use.
11125 return String::null();
11126 }
11127 Zone* zone = Thread::Current()->zone();
11128 const Script& func_script = Script::Handle(zone, script());
11129
11130 intptr_t from_line, from_col;
11131 if (!func_script.GetTokenLocation(token_pos(), &from_line, &from_col)) {
11132 return String::null();
11133 }
11134 intptr_t to_line, to_col;
11135 if (!func_script.GetTokenLocation(end_token_pos(), &to_line, &to_col)) {
11136 return String::null();
11137 }
11138 intptr_t to_length = func_script.GetTokenLength(end_token_pos());
11139 if (to_length < 0) {
11140 return String::null();
11141 }
11142
11143 if (to_length == 1) {
11144 // Handle special cases for end tokens of closures (where we exclude the
11145 // last token):
11146 // (1) "foo(() => null, bar);": End token is `,', but we don't print it.
11147 // (2) "foo(() => null);": End token is ')`, but we don't print it.
11148 // (3) "var foo = () => null;": End token is `;', but in this case the
11149 // token semicolon belongs to the assignment so we skip it.
11150 const String& src = String::Handle(func_script.Source());
11151 if (src.IsNull() || src.Length() == 0) {
11152 return Symbols::OptimizedOut().ptr();
11153 }
11154 uint16_t end_char = src.CharAt(end_token_pos().Pos());
11155 if ((end_char == ',') || // Case 1.
11156 (end_char == ')') || // Case 2.
11157 (end_char == ';' && String::Handle(zone, name())
11158 .Equals("<anonymous closure>"))) { // Case 3.
11159 to_length = 0;
11160 }
11161 }
11162
11163 return func_script.GetSnippet(from_line, from_col, to_line,
11164 to_col + to_length);
11165}
11166
11167// Construct fingerprint from token stream. The token stream contains also
11168// arguments.
11170#if !defined(DART_PRECOMPILED_RUNTIME)
11172 *this);
11173#else
11174 return 0;
11175#endif // !defined(DART_PRECOMPILED_RUNTIME)
11176}
11177
11179 const ZoneGrowableArray<const ICData*>& deopt_id_to_ic_data,
11180 const Array& edge_counters_array,
11181 const Array& coverage_array) const {
11182#if !defined(DART_PRECOMPILED_RUNTIME)
11183 // Already installed nothing to do.
11184 if (ic_data_array() != Array::null()) {
11185 ASSERT(coverage_array.ptr() == GetCoverageArray());
11186 return;
11187 }
11188
11189 // Compute number of ICData objects to save.
11190 intptr_t count = 0;
11191 for (intptr_t i = 0; i < deopt_id_to_ic_data.length(); i++) {
11192 if (deopt_id_to_ic_data[i] != nullptr) {
11193 count++;
11194 }
11195 }
11196
11197 // Compress sparse deopt_id_to_ic_data mapping into a linear sequence of
11198 // ICData objects.
11199 const Array& array = Array::Handle(
11200 Array::New(ICDataArrayIndices::kFirstICData + count, Heap::kOld));
11201 for (intptr_t i = 0, pos = ICDataArrayIndices::kFirstICData;
11202 i < deopt_id_to_ic_data.length(); i++) {
11203 if (deopt_id_to_ic_data[i] != nullptr) {
11204 ASSERT(i == deopt_id_to_ic_data[i]->deopt_id());
11205 array.SetAt(pos++, *deopt_id_to_ic_data[i]);
11206 }
11207 }
11208 array.SetAt(ICDataArrayIndices::kEdgeCounters, edge_counters_array);
11209 // Preserve coverage_array which is stored early after graph construction.
11210 array.SetAt(ICDataArrayIndices::kCoverageData, coverage_array);
11211 set_ic_data_array(array);
11212#else // DART_PRECOMPILED_RUNTIME
11213 UNREACHABLE();
11214#endif // DART_PRECOMPILED_RUNTIME
11215}
11216
11218 ZoneGrowableArray<const ICData*>* deopt_id_to_ic_data,
11219 bool clone_ic_data) const {
11220#if !defined(DART_PRECOMPILED_RUNTIME)
11221 if (FLAG_force_clone_compiler_objects) {
11222 clone_ic_data = true;
11223 }
11224 ASSERT(deopt_id_to_ic_data->is_empty());
11225 Zone* zone = Thread::Current()->zone();
11226 const Array& saved_ic_data = Array::Handle(zone, ic_data_array());
11227 if (saved_ic_data.IsNull()) {
11228 // Could happen with not-yet compiled unoptimized code or force-optimized
11229 // functions.
11230 return;
11231 }
11232 const intptr_t saved_length = saved_ic_data.Length();
11233 ASSERT(saved_length > 0);
11234 if (saved_length > ICDataArrayIndices::kFirstICData) {
11235 const intptr_t restored_length =
11236 ICData::Cast(Object::Handle(zone, saved_ic_data.At(saved_length - 1)))
11237 .deopt_id() +
11238 1;
11239 deopt_id_to_ic_data->SetLength(restored_length);
11240 for (intptr_t i = 0; i < restored_length; i++) {
11241 (*deopt_id_to_ic_data)[i] = nullptr;
11242 }
11243 for (intptr_t i = ICDataArrayIndices::kFirstICData; i < saved_length; i++) {
11244 ICData& ic_data = ICData::ZoneHandle(zone);
11245 ic_data ^= saved_ic_data.At(i);
11246 if (clone_ic_data) {
11247 const ICData& original_ic_data = ICData::Handle(zone, ic_data.ptr());
11248 ic_data = ICData::Clone(ic_data);
11249 ic_data.SetOriginal(original_ic_data);
11250 }
11251 ASSERT(deopt_id_to_ic_data->At(ic_data.deopt_id()) == nullptr);
11252 (*deopt_id_to_ic_data)[ic_data.deopt_id()] = &ic_data;
11253 }
11254 }
11255#else // DART_PRECOMPILED_RUNTIME
11256 UNREACHABLE();
11257#endif // DART_PRECOMPILED_RUNTIME
11258}
11259
11261 const Array& arr = Array::Handle(ic_data_array());
11262 if (arr.IsNull()) {
11263 return Array::null();
11264 }
11265 return Array::RawCast(arr.At(ICDataArrayIndices::kCoverageData));
11266}
11267
11268void Function::set_ic_data_array(const Array& value) const {
11269 untag()->set_ic_data_array<std::memory_order_release>(value.ptr());
11270}
11271
11272ArrayPtr Function::ic_data_array() const {
11273 return untag()->ic_data_array<std::memory_order_acquire>();
11274}
11275
11277 set_ic_data_array(Array::null_array());
11278}
11279
11280ICDataPtr Function::FindICData(intptr_t deopt_id) const {
11281 const Array& array = Array::Handle(ic_data_array());
11282 ICData& ic_data = ICData::Handle();
11283 for (intptr_t i = ICDataArrayIndices::kFirstICData; i < array.Length(); i++) {
11284 ic_data ^= array.At(i);
11285 if (ic_data.deopt_id() == deopt_id) {
11286 return ic_data.ptr();
11287 }
11288 }
11289 return ICData::null();
11290}
11291
11292void Function::SetDeoptReasonForAll(intptr_t deopt_id,
11293 ICData::DeoptReasonId reason) {
11294 const Array& array = Array::Handle(ic_data_array());
11295 ICData& ic_data = ICData::Handle();
11296 for (intptr_t i = ICDataArrayIndices::kFirstICData; i < array.Length(); i++) {
11297 ic_data ^= array.At(i);
11298 if (ic_data.deopt_id() == deopt_id) {
11299 ic_data.AddDeoptReason(reason);
11300 }
11301 }
11302}
11303
11304bool Function::CheckSourceFingerprint(int32_t fp, const char* kind) const {
11305#if !defined(DEBUG)
11306 return true; // Only check on debug.
11307#endif
11308
11309#if !defined(DART_PRECOMPILED_RUNTIME)
11310 // Check that the function is marked as recognized via the vm:recognized
11311 // pragma. This is so that optimizations that change the signature will know
11312 // not to touch it.
11313 if (kind != nullptr && !MethodRecognizer::IsMarkedAsRecognized(*this, kind)) {
11315 "Recognized method %s should be marked with: "
11316 "@pragma(\"vm:recognized\", \"%s\")\n",
11317 ToQualifiedCString(), kind);
11318 return false;
11319 }
11320#endif
11321
11322 if (IsolateGroup::Current()->obfuscate() || FLAG_precompiled_mode ||
11324 return true; // The kernel structure has been altered, skip checking.
11325 }
11326
11327 if (SourceFingerprint() != fp) {
11328 // This output can be copied into a file, then used with sed
11329 // to replace the old values.
11330 // sed -i.bak -f /tmp/newkeys \
11331 // runtime/vm/compiler/recognized_methods_list.h
11332 THR_Print("s/0x%08x/0x%08x/\n", fp, SourceFingerprint());
11333 return false;
11334 }
11335 return true;
11336}
11337
11339 if (HasCode()) return CurrentCode();
11340 Thread* thread = Thread::Current();
11341 ASSERT(thread->IsDartMutatorThread());
11342 DEBUG_ASSERT(thread->TopErrorHandlerIsExitFrame());
11343 Zone* zone = thread->zone();
11344 const Object& result =
11345 Object::Handle(zone, Compiler::CompileFunction(thread, *this));
11346 if (result.IsError()) {
11347 if (result.ptr() == Object::out_of_memory_error().ptr()) {
11349 UNREACHABLE();
11350 }
11351 if (result.IsLanguageError()) {
11352 Exceptions::ThrowCompileTimeError(LanguageError::Cast(result));
11353 UNREACHABLE();
11354 }
11355 Exceptions::PropagateError(Error::Cast(result));
11356 UNREACHABLE();
11357 }
11358 // Compiling in unoptimized mode should never fail if there are no errors.
11359 RELEASE_ASSERT(HasCode());
11360 ASSERT(ForceOptimize() || unoptimized_code() == result.ptr());
11361 return CurrentCode();
11362}
11363
11365#if !defined(DART_PRECOMPILED_RUNTIME)
11366 if (!IsDynamicFunction()) {
11367 return false;
11368 }
11369
11370 // For functions which need an args descriptor the switchable call sites will
11371 // transition directly to calling via a stub (and therefore never call the
11372 // monomorphic entry).
11373 //
11374 // See runtime_entry.cc:DEFINE_RUNTIME_ENTRY(UnlinkedCall)
11375 if (PrologueNeedsArgumentsDescriptor()) {
11376 return false;
11377 }
11378
11379 // All dyn:* forwarders are called via SwitchableCalls and all except the ones
11380 // with `PrologueNeedsArgumentsDescriptor()` transition into monomorphic
11381 // state.
11383 return true;
11384 }
11385
11386 // AOT mode uses table dispatch.
11387 // In JIT mode all instance calls use switchable calls.
11388 if (!FLAG_precompiled_mode) {
11389 return true;
11390 }
11391
11392 // Any method from the class with a dynamically loaded subtype
11393 // can be called via switchable call (when cid range check fails
11394 // during conditional table dispatch).
11395 if (Class::Handle(zone, Owner()).has_dynamically_extendable_subtypes()) {
11396 return true;
11397 }
11398
11399 // Only if there are dynamic callers and if we didn't create a dyn:* forwarder
11400 // for it do we need the monomorphic checked entry.
11401 return HasDynamicCallers(zone) &&
11403#else
11404 UNREACHABLE();
11405 return true;
11406#endif
11407}
11408
11410#if !defined(DART_PRECOMPILED_RUNTIME)
11411 // Issue(dartbug.com/42719):
11412 // Right now the metadata of _Closure.call says there are no dynamic callers -
11413 // even though there can be. To be conservative we return true.
11414 if ((name() == Symbols::GetCall().ptr() || name() == Symbols::call().ptr()) &&
11415 Class::IsClosureClass(Owner())) {
11416 return true;
11417 }
11418
11419 // Use the results of TFA to determine whether this function is ever
11420 // called dynamically, i.e. using switchable calls.
11422 metadata = kernel::ProcedureAttributesOf(*this, zone);
11423 if (IsGetterFunction() || IsImplicitGetterFunction() || IsMethodExtractor()) {
11424 // Dynamic method call through field/getter involves dynamic call of
11425 // the field/getter.
11426 return metadata.getter_called_dynamically ||
11428 } else {
11430 }
11431#else
11432 UNREACHABLE();
11433 return true;
11434#endif
11435}
11436
11438 // These functions have a saved compile-time arguments descriptor that is
11439 // used in lieu of the runtime arguments descriptor in generated IL.
11440 if (HasSavedArgumentsDescriptor()) {
11441 return false;
11442 }
11443 // The prologue of those functions need to examine the arg descriptor for
11444 // various purposes.
11445 return IsGeneric() || HasOptionalParameters();
11446}
11447
11449 return FLAG_enable_multiple_entrypoints &&
11450 (NeedsTypeArgumentTypeChecks() || NeedsArgumentTypeChecks());
11451}
11452
11453intptr_t Function::SourceSize() const {
11454 const TokenPosition& start = token_pos();
11455 const TokenPosition& end = end_token_pos();
11456 if (!end.IsReal() || start.IsNoSource() || start.IsClassifying()) {
11457 // No source information, so just return 0.
11458 return 0;
11459 }
11460 if (start.IsSynthetic()) {
11461 // Try and approximate the source size using the parent's source size.
11462 const auto& parent = Function::Handle(parent_function());
11463 ASSERT(!parent.IsNull());
11464 const intptr_t parent_size = parent.SourceSize();
11465 if (parent_size == 0) {
11466 return parent_size;
11467 }
11468 // Parent must have a real ending position.
11469 return parent_size - (parent.end_token_pos().Pos() - end.Pos());
11470 }
11471 return end.Pos() - start.Pos();
11472}
11473
11474const char* Function::ToCString() const {
11475 if (IsNull()) {
11476 return "Function: null";
11477 }
11478 Zone* zone = Thread::Current()->zone();
11479 ZoneTextBuffer buffer(zone);
11480 buffer.Printf("Function '%s':", String::Handle(zone, name()).ToCString());
11481 if (is_static()) {
11482 buffer.AddString(" static");
11483 }
11484 if (is_abstract()) {
11485 buffer.AddString(" abstract");
11486 }
11487 switch (kind()) {
11488 case UntaggedFunction::kRegularFunction:
11489 case UntaggedFunction::kClosureFunction:
11490 case UntaggedFunction::kImplicitClosureFunction:
11491 case UntaggedFunction::kGetterFunction:
11492 case UntaggedFunction::kSetterFunction:
11493 break;
11494 case UntaggedFunction::kConstructor:
11495 buffer.AddString(is_static() ? " factory" : " constructor");
11496 break;
11497 case UntaggedFunction::kImplicitGetter:
11498 buffer.AddString(" getter");
11499 break;
11500 case UntaggedFunction::kImplicitSetter:
11501 buffer.AddString(" setter");
11502 break;
11503 case UntaggedFunction::kImplicitStaticGetter:
11504 buffer.AddString(" static-getter");
11505 break;
11506 case UntaggedFunction::kFieldInitializer:
11507 buffer.AddString(" field-initializer");
11508 break;
11509 case UntaggedFunction::kMethodExtractor:
11510 buffer.AddString(" method-extractor");
11511 break;
11512 case UntaggedFunction::kNoSuchMethodDispatcher:
11513 buffer.AddString(" no-such-method-dispatcher");
11514 break;
11515 case UntaggedFunction::kDynamicInvocationForwarder:
11516 buffer.AddString(" dynamic-invocation-forwarder");
11517 break;
11518 case UntaggedFunction::kInvokeFieldDispatcher:
11519 buffer.AddString(" invoke-field-dispatcher");
11520 break;
11521 case UntaggedFunction::kIrregexpFunction:
11522 buffer.AddString(" irregexp-function");
11523 break;
11524 case UntaggedFunction::kFfiTrampoline:
11525 buffer.AddString(" ffi-trampoline-function");
11526 break;
11527 case UntaggedFunction::kRecordFieldGetter:
11528 buffer.AddString(" record-field-getter");
11529 break;
11530 default:
11531 UNREACHABLE();
11532 }
11533 if (HasSavedArgumentsDescriptor()) {
11534 const auto& args_desc_array = Array::Handle(zone, saved_args_desc());
11535 const ArgumentsDescriptor args_desc(args_desc_array);
11536 buffer.AddChar('[');
11537 args_desc.PrintTo(&buffer);
11538 buffer.AddChar(']');
11539 }
11540 if (is_const()) {
11541 buffer.AddString(" const");
11542 }
11543 buffer.AddChar('.');
11544 return buffer.buffer();
11545}
11546
11548 uint32_t packed_parameter_counts) const {
11549 untag()->packed_parameter_counts_ = packed_parameter_counts;
11550}
11551
11553 uint16_t packed_type_parameter_counts) const {
11554 untag()->packed_type_parameter_counts_ = packed_type_parameter_counts;
11555}
11556
11558 ASSERT(value >= 0);
11559 untag()->packed_parameter_counts_.Update<PackedNumImplicitParameters>(value);
11560}
11561
11562void ClosureData::set_default_type_arguments_instantiation_mode(
11563 InstantiationMode value) const {
11564 untag()->packed_fields_.Update<PackedInstantiationMode>(value);
11565}
11566
11567Function::AwaiterLink ClosureData::awaiter_link() const {
11568 const uint8_t depth =
11569 untag()
11570 ->packed_fields_.Read<UntaggedClosureData::PackedAwaiterLinkDepth>();
11571 const uint8_t index =
11572 untag()
11573 ->packed_fields_.Read<UntaggedClosureData::PackedAwaiterLinkIndex>();
11574 return {depth, index};
11575}
11576
11577void ClosureData::set_awaiter_link(Function::AwaiterLink link) const {
11578 untag()->packed_fields_.Update<UntaggedClosureData::PackedAwaiterLinkDepth>(
11579 link.depth);
11580 untag()->packed_fields_.Update<UntaggedClosureData::PackedAwaiterLinkIndex>(
11581 link.index);
11582}
11583
11584ClosureDataPtr ClosureData::New() {
11586 return Object::Allocate<ClosureData>(Heap::kOld);
11587}
11588
11589const char* ClosureData::ToCString() const {
11590 if (IsNull()) {
11591 return "ClosureData: null";
11592 }
11593 auto const zone = Thread::Current()->zone();
11594 ZoneTextBuffer buffer(zone);
11595 buffer.Printf("ClosureData: context_scope: 0x%" Px "",
11596 static_cast<uword>(context_scope()));
11597 buffer.AddString(" parent_function: ");
11598 if (parent_function() == Object::null()) {
11599 buffer.AddString("null");
11600 } else {
11601 buffer.AddString(Object::Handle(parent_function()).ToCString());
11602 }
11603 buffer.Printf(" implicit_static_closure: 0x%" Px "",
11604 static_cast<uword>(implicit_static_closure()));
11605 return buffer.buffer();
11606}
11607
11609 ASSERT(value >= 0);
11610 untag()->packed_parameter_counts_.Update<PackedNumFixedParameters>(value);
11611}
11612
11613void FfiTrampolineData::set_callback_target(const Function& value) const {
11614 untag()->set_callback_target(value.ptr());
11615}
11616
11618 intptr_t value,
11619 bool are_optional_positional) const {
11620 // HasOptionalNamedParameters only checks this bit, so only set it if there
11621 // are actual named parameters.
11622 untag()->packed_parameter_counts_.Update<PackedHasNamedOptionalParameters>(
11623 (value > 0) && !are_optional_positional);
11624 untag()->packed_parameter_counts_.Update<PackedNumOptionalParameters>(value);
11625}
11626
11627FunctionTypePtr FunctionType::New(Heap::Space space) {
11628 return Object::Allocate<FunctionType>(space);
11629}
11630
11631FunctionTypePtr FunctionType::New(intptr_t num_parent_type_arguments,
11633 Heap::Space space) {
11634 Zone* Z = Thread::Current()->zone();
11635 const FunctionType& result =
11637 result.set_packed_parameter_counts(0);
11638 result.set_packed_type_parameter_counts(0);
11639 result.set_named_parameter_names(Object::empty_array());
11640 result.SetNumParentTypeArguments(num_parent_type_arguments);
11641 result.SetHash(0);
11642 result.set_flags(0);
11643 result.set_nullability(nullability);
11645 result.InitializeTypeTestingStubNonAtomic(
11647 return result.ptr();
11648}
11649
11650FunctionTypePtr FunctionType::Clone(const FunctionType& orig,
11651 Heap::Space space) {
11652 if (orig.IsGeneric()) {
11653 // Need a deep clone in order to update owners of type parameters.
11654 return FunctionType::RawCast(
11655 orig.UpdateFunctionTypes(0, kAllFree, space, nullptr));
11656 } else {
11657 return FunctionType::RawCast(Object::Clone(orig, space));
11658 }
11659}
11660
11662 Zone* zone = Thread::Current()->zone();
11663 ZoneTextBuffer printer(zone);
11664 Print(kUserVisibleName, &printer);
11665 return printer.buffer();
11666}
11667
11669 Thread* thread = Thread::Current();
11670 ZoneTextBuffer printer(thread->zone());
11671 Print(kUserVisibleName, &printer);
11672 return Symbols::New(thread, printer.buffer());
11673}
11674
11675const char* FunctionType::ToCString() const {
11676 if (IsNull()) {
11677 return "FunctionType: null";
11678 }
11679 Zone* zone = Thread::Current()->zone();
11680 ZoneTextBuffer printer(zone);
11681 const char* suffix = NullabilitySuffix(kInternalName);
11682 if (suffix[0] != '\0') {
11683 printer.AddString("(");
11684 }
11685 Print(kInternalName, &printer);
11686 if (suffix[0] != '\0') {
11687 printer.AddString(")");
11688 printer.AddString(suffix);
11689 }
11690 return printer.buffer();
11691}
11692
11693void ClosureData::set_context_scope(const ContextScope& value) const {
11694 untag()->set_context_scope(value.ptr());
11695}
11696
11697void ClosureData::set_implicit_static_closure(const Closure& closure) const {
11698 ASSERT(!closure.IsNull());
11699 ASSERT(untag()->closure() == Closure::null());
11700 untag()->set_closure<std::memory_order_release>(closure.ptr());
11701}
11702
11703void FfiTrampolineData::set_c_signature(const FunctionType& value) const {
11704 untag()->set_c_signature(value.ptr());
11705}
11706
11707void FfiTrampolineData::set_callback_id(int32_t callback_id) const {
11708 StoreNonPointer(&untag()->callback_id_, callback_id);
11709}
11710
11711void FfiTrampolineData::set_callback_exceptional_return(
11712 const Instance& value) const {
11713 untag()->set_callback_exceptional_return(value.ptr());
11714}
11715
11716void FfiTrampolineData::set_ffi_function_kind(FfiCallbackKind kind) const {
11717 StoreNonPointer(&untag()->ffi_function_kind_, static_cast<uint8_t>(kind));
11718}
11719
11720FfiTrampolineDataPtr FfiTrampolineData::New() {
11722 const auto& data = FfiTrampolineData::Handle(
11723 Object::Allocate<FfiTrampolineData>(Heap::kOld));
11724 data.set_callback_id(-1);
11725 return data.ptr();
11726}
11727
11728const char* FfiTrampolineData::ToCString() const {
11729 const FunctionType& c_sig = FunctionType::Handle(c_signature());
11730 return OS::SCreate(Thread::Current()->zone(),
11731 "TrampolineData: c_signature=%s",
11732 c_sig.ToUserVisibleCString());
11733}
11734
11736 return this->Clone(*this);
11737}
11738
11739FieldPtr Field::Original() const {
11740 if (IsNull()) {
11741 return Field::null();
11742 }
11743 if (untag()->owner()->IsField()) {
11744 return static_cast<FieldPtr>(untag()->owner());
11745 }
11746 return this->ptr();
11747}
11748
11749intptr_t Field::guarded_cid() const {
11750#if defined(DEBUG)
11751 // This assertion ensures that the cid seen by the background compiler is
11752 // consistent. So the assertion passes if the field is a clone. It also
11753 // passes if the field is static, because we don't use field guards on
11754 // static fields. It also passes if we're compiling unoptimized
11755 // code (in which case the caller might get different answers if it obtains
11756 // the guarded cid multiple times).
11757 Thread* thread = Thread::Current();
11758#if defined(DART_PRECOMPILED_RUNTIME)
11759 ASSERT(!thread->IsInsideCompiler() || is_static());
11760#else
11761 ASSERT(!thread->IsInsideCompiler() ||
11762 ((CompilerState::Current().should_clone_fields() == !IsOriginal())) ||
11763 is_static());
11764#endif
11765#endif
11766 return LoadNonPointer<ClassIdTagType, std::memory_order_relaxed>(
11767 &untag()->guarded_cid_);
11768}
11769
11771#if defined(DEBUG)
11772 // Same assert as guarded_cid(), because is_nullable() also needs to be
11773 // consistent for the background compiler.
11774 Thread* thread = Thread::Current();
11775#if defined(DART_PRECOMPILED_RUNTIME)
11776 ASSERT(!thread->IsInsideCompiler() || is_static());
11777#else
11778 ASSERT(!thread->IsInsideCompiler() ||
11779 ((CompilerState::Current().should_clone_fields() == !IsOriginal())) ||
11780 is_static());
11781#endif
11782#endif
11783 return is_nullable_unsafe();
11784}
11785
11786void Field::SetOriginal(const Field& value) const {
11787 ASSERT(value.IsOriginal());
11788 ASSERT(!value.IsNull());
11789 untag()->set_owner(static_cast<ObjectPtr>(value.ptr()));
11790}
11791
11792StringPtr Field::GetterName(const String& field_name) {
11793 return String::Concat(Symbols::GetterPrefix(), field_name);
11794}
11795
11796StringPtr Field::GetterSymbol(const String& field_name) {
11797 return Symbols::FromGet(Thread::Current(), field_name);
11798}
11799
11800StringPtr Field::LookupGetterSymbol(const String& field_name) {
11801 return Symbols::LookupFromGet(Thread::Current(), field_name);
11802}
11803
11804StringPtr Field::SetterName(const String& field_name) {
11805 return String::Concat(Symbols::SetterPrefix(), field_name);
11806}
11807
11808StringPtr Field::SetterSymbol(const String& field_name) {
11809 return Symbols::FromSet(Thread::Current(), field_name);
11810}
11811
11812StringPtr Field::LookupSetterSymbol(const String& field_name) {
11813 return Symbols::LookupFromSet(Thread::Current(), field_name);
11814}
11815
11816StringPtr Field::NameFromGetter(const String& getter_name) {
11817 return Symbols::New(Thread::Current(), getter_name, kGetterPrefixLength,
11818 getter_name.Length() - kGetterPrefixLength);
11819}
11820
11821StringPtr Field::NameFromSetter(const String& setter_name) {
11822 return Symbols::New(Thread::Current(), setter_name, kSetterPrefixLength,
11823 setter_name.Length() - kSetterPrefixLength);
11824}
11825
11826StringPtr Field::NameFromInit(const String& init_name) {
11827 return Symbols::New(Thread::Current(), init_name, kInitPrefixLength,
11828 init_name.Length() - kInitPrefixLength);
11829}
11830
11832 return function_name.StartsWith(Symbols::GetterPrefix());
11833}
11834
11836 return function_name.StartsWith(Symbols::SetterPrefix());
11837}
11838
11840 return function_name.StartsWith(Symbols::InitPrefix());
11841}
11842
11843void Field::set_name(const String& value) const {
11844 ASSERT(value.IsSymbol());
11845 ASSERT(IsOriginal());
11846 untag()->set_name(value.ptr());
11847}
11848
11850 if (IsOriginal()) {
11851 return untag()->owner();
11852 } else {
11853 const Field& field = Field::Handle(Original());
11854 ASSERT(field.IsOriginal());
11855 ASSERT(!Object::Handle(field.untag()->owner()).IsField());
11856 return field.untag()->owner();
11857 }
11858}
11859
11860ClassPtr Field::Owner() const {
11861 const Field& field = Field::Handle(Original());
11862 ASSERT(field.IsOriginal());
11863 const Object& obj = Object::Handle(field.untag()->owner());
11864 if (obj.IsClass()) {
11865 return Class::Cast(obj).ptr();
11866 }
11867 ASSERT(obj.IsPatchClass());
11868 return PatchClass::Cast(obj).wrapped_class();
11869}
11870
11871ScriptPtr Field::Script() const {
11872 // NOTE(turnidge): If you update this function, you probably want to
11873 // update Class::PatchFieldsAndFunctions() at the same time.
11874 const Field& field = Field::Handle(Original());
11875 ASSERT(field.IsOriginal());
11876 const Object& obj = Object::Handle(field.untag()->owner());
11877 if (obj.IsClass()) {
11878 return Class::Cast(obj).script();
11879 }
11880 ASSERT(obj.IsPatchClass());
11881 return PatchClass::Cast(obj).script();
11882}
11883
11884#if !defined(DART_PRECOMPILED_RUNTIME)
11885KernelProgramInfoPtr Field::KernelProgramInfo() const {
11886 const auto& owner = Object::Handle(RawOwner());
11887 if (owner.IsClass()) {
11888 return Class::Cast(owner).KernelProgramInfo();
11889 }
11890 return PatchClass::Cast(owner).kernel_program_info();
11891}
11892#endif
11893
11894uint32_t Field::Hash() const {
11895 return String::HashRawSymbol(name());
11896}
11897
11899#if defined(DART_PRECOMPILED_RUNTIME)
11900 UNREACHABLE();
11901#else
11902 StoreNonPointer(&untag()->kernel_offset_, src.untag()->kernel_offset_);
11903#endif
11904}
11905
11906#if !defined(DART_PRECOMPILED_RUNTIME)
11907TypedDataViewPtr Field::KernelLibrary() const {
11909 return info.KernelLibrary(KernelLibraryIndex());
11910}
11911
11913 const intptr_t kernel_library_index = KernelLibraryIndex();
11914 if (kernel_library_index == -1) return 0;
11916 return info.KernelLibraryStartOffset(kernel_library_index);
11917}
11918
11920 const Object& obj = Object::Handle(untag()->owner());
11921 // During background JIT compilation field objects are copied
11922 // and copy points to the original field via the owner field.
11923 if (obj.IsField()) {
11924 return Field::Cast(obj).KernelLibraryIndex();
11925 } else if (obj.IsClass()) {
11926 const auto& lib = Library::Handle(Class::Cast(obj).library());
11927 return lib.kernel_library_index();
11928 }
11929 ASSERT(obj.IsPatchClass());
11930 return PatchClass::Cast(obj).kernel_library_index();
11931}
11932#endif // !defined(DART_PRECOMPILED_RUNTIME)
11933
11935 ASSERT(IsOriginal());
11936 ASSERT(!value.IsNull());
11937 if (value.ptr() != type()) {
11938 untag()->set_type(value.ptr());
11939 }
11940}
11941
11942// Called at finalization time
11945 IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
11946 SetFieldTypeSafe(value);
11947}
11948
11949FieldPtr Field::New() {
11951 return Object::Allocate<Field>(Heap::kOld);
11952}
11953
11954void Field::InitializeNew(const Field& result,
11955 const String& name,
11956 bool is_static,
11957 bool is_final,
11958 bool is_const,
11959 bool is_reflectable,
11960 bool is_late,
11961 const Object& owner,
11962 TokenPosition token_pos,
11963 TokenPosition end_token_pos) {
11964 result.set_kind_bits(0);
11965 result.set_name(name);
11966 result.set_is_static(is_static);
11967 if (is_static) {
11968 result.set_field_id_unsafe(-1);
11969 } else {
11970 result.SetOffset(0, 0);
11971 }
11972 result.set_is_final(is_final);
11973 result.set_is_const(is_const);
11974 result.set_is_reflectable(is_reflectable);
11975 result.set_is_late(is_late);
11976 result.set_owner(owner);
11977 result.set_token_pos(token_pos);
11978 result.set_end_token_pos(end_token_pos);
11979 result.set_has_nontrivial_initializer_unsafe(false);
11980 result.set_has_initializer_unsafe(false);
11981 // We will make unboxing decision once we read static type or
11982 // in KernelLoader::ReadInferredType.
11983 result.set_is_unboxed_unsafe(false);
11984 result.set_initializer_changed_after_initialization(false);
11985 NOT_IN_PRECOMPILED(result.set_kernel_offset(0));
11986 result.set_has_pragma(false);
11987 result.set_static_type_exactness_state_unsafe(
11989 auto isolate_group = IsolateGroup::Current();
11990
11991// Use field guards if they are enabled and the isolate has never reloaded.
11992// TODO(johnmccutchan): The reload case assumes the worst case (everything is
11993// dynamic and possibly null). Attempt to relax this later.
11994//
11995// Do not use field guards for late fields as late field initialization
11996// doesn't update guarded cid and length.
11997#if defined(PRODUCT)
11998 const bool use_guarded_cid =
11999 FLAG_precompiled_mode || (isolate_group->use_field_guards() && !is_late);
12000#else
12001 const bool use_guarded_cid =
12002 FLAG_precompiled_mode ||
12003 (isolate_group->use_field_guards() &&
12004 !isolate_group->HasAttemptedReload() && !is_late);
12005#endif // !defined(PRODUCT)
12006 result.set_guarded_cid_unsafe(use_guarded_cid ? kIllegalCid : kDynamicCid);
12007 result.set_is_nullable_unsafe(use_guarded_cid ? false : true);
12008 result.set_guarded_list_length_in_object_offset_unsafe(
12010 // Presently, we only attempt to remember the list length for final fields.
12011 if (is_final && use_guarded_cid) {
12012 result.set_guarded_list_length_unsafe(Field::kUnknownFixedLength);
12013 } else {
12014 result.set_guarded_list_length_unsafe(Field::kNoFixedLength);
12015 }
12016}
12017
12018FieldPtr Field::New(const String& name,
12019 bool is_static,
12020 bool is_final,
12021 bool is_const,
12022 bool is_reflectable,
12023 bool is_late,
12024 const Object& owner,
12025 const AbstractType& type,
12026 TokenPosition token_pos,
12027 TokenPosition end_token_pos) {
12028 ASSERT(!owner.IsNull());
12029 const Field& result = Field::Handle(Field::New());
12030 InitializeNew(result, name, is_static, is_final, is_const, is_reflectable,
12031 is_late, owner, token_pos, end_token_pos);
12032 result.SetFieldTypeSafe(type);
12033#if !defined(DART_PRECOMPILED_RUNTIME)
12035#endif
12036 return result.ptr();
12037}
12038
12040 bool is_final,
12041 bool is_const,
12042 bool is_late,
12043 const Object& owner,
12044 TokenPosition token_pos,
12045 TokenPosition end_token_pos) {
12046 ASSERT(!owner.IsNull());
12047 const Field& result = Field::Handle(Field::New());
12048 InitializeNew(result, name, true, /* is_static */
12049 is_final, is_const, true, /* is_reflectable */
12050 is_late, owner, token_pos, end_token_pos);
12051 return result.ptr();
12052}
12053
12054FieldPtr Field::Clone(const Field& original) const {
12055 if (original.IsNull()) {
12056 return Field::null();
12057 }
12058 ASSERT(original.IsOriginal());
12059 Field& clone = Field::Handle();
12060 // Using relaxed loading is fine because concurrent fields changes are all
12061 // guarded, will be reconciled during optimized code installation.
12062 clone ^= Object::Clone(*this, Heap::kOld, /*load_with_relaxed_atomics=*/true);
12063 clone.SetOriginal(original);
12064 clone.InheritKernelOffsetFrom(original);
12065 return clone.ptr();
12066}
12067
12069#if !defined(DART_PRECOMPILED_RUNTIME)
12071 *this);
12072#else
12073 return 0;
12074#endif // !defined(DART_PRECOMPILED_RUNTIME)
12075}
12076
12078 UNREACHABLE();
12079 return String::null();
12080}
12081
12083 NoSafepointScope no_safepoint;
12084 if (FLAG_show_internal_names) {
12085 return String::Handle(name()).ToCString();
12086 }
12088 is_extension_member() || is_extension_type_member());
12089}
12090
12091StringPtr Field::UserVisibleName() const {
12092 if (FLAG_show_internal_names) {
12093 return name();
12094 }
12095 return Symbols::New(
12098 is_extension_member() || is_extension_type_member()));
12099}
12100
12102 return Smi::Value(untag()->guarded_list_length());
12103}
12104
12105void Field::set_guarded_list_length_unsafe(intptr_t list_length) const {
12106 ASSERT(IsOriginal());
12107 untag()->set_guarded_list_length(Smi::New(list_length));
12108}
12109
12111 return untag()->guarded_list_length_in_object_offset_ + kHeapObjectTag;
12112}
12113
12115 intptr_t list_length_offset) const {
12116 ASSERT(IsOriginal());
12117 StoreNonPointer<int8_t, int8_t, std::memory_order_relaxed>(
12118 &untag()->guarded_list_length_in_object_offset_,
12119 static_cast<int8_t>(list_length_offset - kHeapObjectTag));
12120 ASSERT(guarded_list_length_in_object_offset() == list_length_offset);
12121}
12122
12124 // According to the Dart language specification, final fields don't have
12125 // a setter, except late final fields without initializer.
12126 if (is_final()) {
12127 // Late final fields without initializer always need a setter to check
12128 // if they are already initialized.
12129 if (is_late() && !has_initializer()) {
12130 return true;
12131 }
12132 return false;
12133 }
12134
12135 // Instance non-final fields always need a setter.
12136 if (!is_static()) {
12137 return true;
12138 }
12139
12140 // Otherwise, setters for static fields can be omitted
12141 // and fields can be accessed directly.
12142 return false;
12143}
12144
12146 // All instance fields need a getter.
12147 if (!is_static()) return true;
12148
12149 // Static fields also need a getter if they have a non-trivial initializer,
12150 // because it needs to be initialized lazily.
12151 if (has_nontrivial_initializer()) return true;
12152
12153 // Static late fields with no initializer also need a getter, to check if it's
12154 // been initialized.
12155 return is_late() && !has_initializer();
12156}
12157
12158const char* Field::ToCString() const {
12159 NoSafepointScope no_safepoint;
12160 if (IsNull()) {
12161 return "Field: null";
12162 }
12163 const char* kF0 = is_static() ? " static" : "";
12164 const char* kF1 = is_late() ? " late" : "";
12165 const char* kF2 = is_final() ? " final" : "";
12166 const char* kF3 = is_const() ? " const" : "";
12167 const char* kF4 = is_shared() ? " shared" : "";
12168 const char* field_name = String::Handle(name()).ToCString();
12169 const Class& cls = Class::Handle(Owner());
12170 const char* cls_name = String::Handle(cls.Name()).ToCString();
12171 return OS::SCreate(Thread::Current()->zone(), "Field <%s.%s>:%s%s%s%s%s",
12172 cls_name, field_name, kF0, kF1, kF2, kF3, kF4);
12173}
12174
12175// Build a closure object that gets (or sets) the contents of a static
12176// field f and cache the closure in a newly created static field
12177// named #f (or #f= in case of a setter).
12178InstancePtr Field::AccessorClosure(bool make_setter) const {
12179 Thread* thread = Thread::Current();
12180 Zone* zone = thread->zone();
12181 ASSERT(is_static());
12182 const Class& field_owner = Class::Handle(zone, Owner());
12183
12184 String& closure_name = String::Handle(zone, this->name());
12185 closure_name = Symbols::FromConcat(thread, Symbols::HashMark(), closure_name);
12186 if (make_setter) {
12187 closure_name =
12188 Symbols::FromConcat(thread, Symbols::HashMark(), closure_name);
12189 }
12190
12191 Field& closure_field = Field::Handle(zone);
12192 closure_field = field_owner.LookupStaticField(closure_name);
12193 if (!closure_field.IsNull()) {
12194 ASSERT(closure_field.is_static());
12195 const Instance& closure =
12196 Instance::Handle(zone, Instance::RawCast(closure_field.StaticValue()));
12197 ASSERT(!closure.IsNull());
12198 ASSERT(closure.IsClosure());
12199 return closure.ptr();
12200 }
12201
12202 UNREACHABLE();
12203 return Instance::null();
12204}
12205
12206InstancePtr Field::GetterClosure() const {
12207 return AccessorClosure(false);
12208}
12209
12210InstancePtr Field::SetterClosure() const {
12211 return AccessorClosure(true);
12212}
12213
12214WeakArrayPtr Field::dependent_code() const {
12216 IsolateGroup::Current()->program_lock()->IsCurrentThreadReader());
12217 return untag()->dependent_code();
12218}
12219
12220void Field::set_dependent_code(const WeakArray& array) const {
12221 ASSERT(IsOriginal());
12223 IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
12224 untag()->set_dependent_code(array.ptr());
12225}
12226
12228 public:
12229 explicit FieldDependentArray(const Field& field)
12230 : WeakCodeReferences(WeakArray::Handle(field.dependent_code())),
12231 field_(field) {}
12232
12233 virtual void UpdateArrayTo(const WeakArray& value) {
12234 field_.set_dependent_code(value);
12235 }
12236
12237 virtual void ReportDeoptimization(const Code& code) {
12238 if (FLAG_trace_deoptimization || FLAG_trace_deoptimization_verbose) {
12239 Function& function = Function::Handle(code.function());
12240 THR_Print("Deoptimizing %s because guard on field %s failed.\n",
12241 function.ToFullyQualifiedCString(), field_.ToCString());
12242 }
12243 }
12244
12245 virtual void ReportSwitchingCode(const Code& code) {
12246 if (FLAG_trace_deoptimization || FLAG_trace_deoptimization_verbose) {
12247 Function& function = Function::Handle(code.function());
12248 THR_Print(
12249 "Switching '%s' to unoptimized code because guard"
12250 " on field '%s' was violated.\n",
12251 function.ToFullyQualifiedCString(), field_.ToCString());
12252 }
12253 }
12254
12255 private:
12256 const Field& field_;
12258};
12259
12261 ASSERT(IsOriginal());
12262 DEBUG_ASSERT(IsMutatorOrAtDeoptSafepoint());
12263 ASSERT(code.is_optimized());
12264 FieldDependentArray a(*this);
12265 a.Register(code);
12266}
12267
12268void Field::DeoptimizeDependentCode(bool are_mutators_stopped) const {
12270 IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
12271 ASSERT(IsOriginal());
12272 FieldDependentArray a(*this);
12273 if (FLAG_trace_deoptimization && a.HasCodes()) {
12274 THR_Print("Deopt for field guard (field %s)\n", ToCString());
12275 }
12276 a.DisableCode(are_mutators_stopped);
12277}
12278
12279bool Field::IsConsistentWith(const Field& other) const {
12280 return (untag()->guarded_cid_ == other.untag()->guarded_cid_) &&
12281 (untag()->is_nullable_ == other.untag()->is_nullable_) &&
12282 (untag()->guarded_list_length() ==
12283 other.untag()->guarded_list_length()) &&
12284 (is_unboxed() == other.is_unboxed()) &&
12285 (static_type_exactness_state().Encode() ==
12287}
12288
12290 Thread* thread = Thread::Current();
12291 const FieldTable* field_table = thread->isolate()->field_table();
12292 const ObjectPtr raw_value = field_table->At(field_id());
12293 ASSERT(raw_value != Object::transition_sentinel().ptr());
12294 return raw_value == Object::sentinel().ptr();
12295}
12296
12298 ASSERT(has_nontrivial_initializer());
12299 ASSERT(IsOriginal());
12300 Thread* thread = Thread::Current();
12301 Zone* zone = thread->zone();
12302 Function& initializer = Function::Handle(zone, InitializerFunction());
12303 if (initializer.IsNull()) {
12304#if defined(DART_PRECOMPILED_RUNTIME)
12305 UNREACHABLE();
12306#else
12309 // Double check after grabbing the lock.
12310 initializer = InitializerFunction();
12311 if (initializer.IsNull()) {
12313 }
12314#endif
12315 }
12316 return initializer.ptr();
12317}
12318
12320#if defined(DART_PRECOMPILED_RUNTIME)
12321 UNREACHABLE();
12322#else
12323 ASSERT(IsOriginal());
12325 ->initializer_functions_mutex()
12326 ->IsOwnedByCurrentThread());
12327 // We have to ensure that all stores into the initializer function object
12328 // happen before releasing the pointer to the initializer as it may be
12329 // accessed without grabbing the lock.
12330 untag()->set_initializer_function<std::memory_order_release>(
12331 initializer.ptr());
12332#endif
12333}
12334
12336 return untag()->initializer_function() != Function::null();
12337}
12338
12340 ASSERT(IsOriginal());
12341 ASSERT(is_instance());
12342 ASSERT(instance.GetField(*this) == Object::sentinel().ptr());
12344
12345 if (has_nontrivial_initializer()) {
12346 const Function& initializer = Function::Handle(EnsureInitializerFunction());
12347 const Array& args = Array::Handle(Array::New(1));
12348 args.SetAt(0, instance);
12350 if (!value.IsNull() && value.IsError()) {
12351 return Error::Cast(value).ptr();
12352 }
12353 } else {
12354 if (is_late() && !has_initializer()) {
12356 UNREACHABLE();
12357 }
12358#if defined(DART_PRECOMPILED_RUNTIME)
12359 UNREACHABLE();
12360#else
12361 // Our trivial initializer is `null`. Any non-`null` initializer is
12362 // non-trivial (see `KernelLoader::CheckForInitializer()`).
12363 value = Object::null();
12364#endif
12365 }
12366 ASSERT(value.IsNull() || value.IsInstance());
12367 if (is_late() && is_final() &&
12368 (instance.GetField(*this) != Object::sentinel().ptr())) {
12370 String::Handle(name()));
12371 UNREACHABLE();
12372 }
12373 instance.SetField(*this, value);
12374 return Error::null();
12375}
12376
12377ErrorPtr Field::InitializeStatic() const {
12378 ASSERT(IsOriginal());
12379 ASSERT(is_static());
12380 if (StaticValue() == Object::sentinel().ptr()) {
12381 auto& value = Object::Handle();
12382 if (is_late()) {
12383 if (!has_initializer()) {
12385 UNREACHABLE();
12386 }
12387 value = EvaluateInitializer();
12388 if (value.IsError()) {
12389 return Error::Cast(value).ptr();
12390 }
12391 if (is_final() && (StaticValue() != Object::sentinel().ptr())) {
12393 String::Handle(name()));
12394 UNREACHABLE();
12395 }
12396 } else {
12397 SetStaticValue(Object::transition_sentinel());
12398 value = EvaluateInitializer();
12399 if (value.IsError()) {
12400 SetStaticValue(Object::null_instance());
12401 return Error::Cast(value).ptr();
12402 }
12403 }
12404 ASSERT(value.IsNull() || value.IsInstance());
12405 SetStaticValue(value.IsNull() ? Instance::null_instance()
12406 : Instance::Cast(value));
12407 return Error::null();
12408 } else if (StaticValue() == Object::transition_sentinel().ptr()) {
12409 ASSERT(!is_late());
12410 const Array& ctor_args = Array::Handle(Array::New(1));
12411 const String& field_name = String::Handle(name());
12412 ctor_args.SetAt(0, field_name);
12414 UNREACHABLE();
12415 }
12416 return Error::null();
12417}
12418
12420 ASSERT(is_static() &&
12421 (is_const() || (is_final() && has_trivial_initializer())));
12422
12423 auto thread = Thread::Current();
12424 auto zone = thread->zone();
12425 auto initial_field_table = thread->isolate_group()->initial_field_table();
12426
12427 // We can safely cache the value of the static const field in the initial
12428 // field table.
12429 ASSERT(!is_shared());
12430 auto& value = Object::Handle(
12431 zone, initial_field_table->At(field_id(), /*concurrent_use=*/true));
12432 if (value.ptr() == Object::sentinel().ptr()) {
12433 // Fields with trivial initializers get their initial value
12434 // eagerly when they are registered.
12435 ASSERT(is_const());
12436 ASSERT(has_initializer());
12437 ASSERT(has_nontrivial_initializer());
12438 value = EvaluateInitializer();
12439 if (!value.IsError()) {
12440 ASSERT(value.IsNull() || value.IsInstance());
12441 SetStaticConstFieldValue(value.IsNull() ? Instance::null_instance()
12442 : Instance::Cast(value));
12443 }
12444 }
12445 return value.ptr();
12446}
12447
12449 bool assert_initializing_store) const {
12450 ASSERT(is_static());
12451 ASSERT(!is_shared());
12452 auto thread = Thread::Current();
12453 auto initial_field_table = thread->isolate_group()->initial_field_table();
12454
12455 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
12456 ASSERT(initial_field_table->At(field_id()) == Object::sentinel().ptr() ||
12457 initial_field_table->At(field_id()) == value.ptr() ||
12458 !assert_initializing_store);
12459 initial_field_table->SetAt(field_id(),
12460 value.IsNull() ? Instance::null_instance().ptr()
12461 : Instance::Cast(value).ptr(),
12462 /*concurrent_use=*/true);
12463}
12464
12466 ASSERT(Thread::Current()->IsDartMutatorThread());
12467
12468#if !defined(DART_PRECOMPILED_RUNTIME)
12469 if (is_static() && is_const()) {
12471 }
12472#endif // !defined(DART_PRECOMPILED_RUNTIME)
12473
12474 const Function& initializer = Function::Handle(EnsureInitializerFunction());
12475 return DartEntry::InvokeFunction(initializer, Object::empty_array());
12476}
12477
12478static intptr_t GetListLength(const Object& value) {
12479 if (value.IsTypedDataBase()) {
12480 return TypedDataBase::Cast(value).Length();
12481 } else if (value.IsArray()) {
12482 return Array::Cast(value).Length();
12483 } else if (value.IsGrowableObjectArray()) {
12484 // List length is variable.
12485 return Field::kNoFixedLength;
12486 }
12487 return Field::kNoFixedLength;
12488}
12489
12490static intptr_t GetListLengthOffset(intptr_t cid) {
12494 return TypedData::length_offset();
12495 } else if (cid == kArrayCid || cid == kImmutableArrayCid) {
12496 return Array::length_offset();
12497 } else if (cid == kGrowableObjectArrayCid) {
12498 // List length is variable.
12500 }
12502}
12503
12505 if (guarded_cid() == kIllegalCid) {
12506 return "<?>";
12507 } else if (guarded_cid() == kDynamicCid) {
12508 ASSERT(!static_type_exactness_state().IsExactOrUninitialized());
12509 return "<*>";
12510 }
12511
12512 Zone* zone = Thread::Current()->zone();
12513
12514 const char* exactness = "";
12515 if (static_type_exactness_state().IsTracking()) {
12516 exactness =
12517 zone->PrintToString(" {%s}", static_type_exactness_state().ToCString());
12518 }
12519
12520 const Class& cls =
12521 Class::Handle(IsolateGroup::Current()->class_table()->At(guarded_cid()));
12522 const char* class_name = String::Handle(cls.Name()).ToCString();
12523
12524 if (IsBuiltinListClassId(guarded_cid()) && !is_nullable() && is_final()) {
12525 ASSERT(guarded_list_length() != kUnknownFixedLength);
12526 if (guarded_list_length() == kNoFixedLength) {
12527 return zone->PrintToString("<%s [*]%s>", class_name, exactness);
12528 } else {
12529 return zone->PrintToString(
12530 "<%s [%" Pd " @%" Pd "]%s>", class_name, guarded_list_length(),
12531 guarded_list_length_in_object_offset(), exactness);
12532 }
12533 }
12534
12535 return zone->PrintToString("<%s %s%s>",
12536 is_nullable() ? "nullable" : "not-nullable",
12537 class_name, exactness);
12538}
12539
12543 ASSERT(IsOriginal());
12544 if (needs_length_check() &&
12545 (guarded_list_length() != Field::kUnknownFixedLength)) {
12546 const intptr_t offset = GetListLengthOffset(guarded_cid());
12547 (this->*setter)(offset);
12549 } else {
12550 (this->*setter)(Field::kUnknownLengthOffset);
12551 }
12552}
12553
12555 public:
12556 FieldGuardUpdater(const Field* field, const Object& value);
12557
12559 return does_guarded_cid_need_update_ || does_is_nullable_need_update_ ||
12560 does_list_length_and_offset_need_update_ ||
12561 does_static_type_exactness_state_need_update_;
12562 }
12563 void DoUpdate();
12564
12565 private:
12566 void ReviewExactnessState();
12567 void ReviewGuards();
12568
12569 intptr_t guarded_cid() { return guarded_cid_; }
12570 void set_guarded_cid(intptr_t guarded_cid) {
12571 guarded_cid_ = guarded_cid;
12572 does_guarded_cid_need_update_ = true;
12573 }
12574
12575 bool is_nullable() { return is_nullable_; }
12576 void set_is_nullable(bool is_nullable) {
12577 is_nullable_ = is_nullable;
12578 does_is_nullable_need_update_ = true;
12579 }
12580
12581 intptr_t guarded_list_length() { return list_length_; }
12582 void set_guarded_list_length_and_offset(
12583 intptr_t list_length,
12584 intptr_t list_length_in_object_offset) {
12585 list_length_ = list_length;
12586 list_length_in_object_offset_ = list_length_in_object_offset;
12587 does_list_length_and_offset_need_update_ = true;
12588 }
12589
12590 StaticTypeExactnessState static_type_exactness_state() {
12591 return static_type_exactness_state_;
12592 }
12593 void set_static_type_exactness_state(StaticTypeExactnessState state) {
12594 static_type_exactness_state_ = state;
12595 does_static_type_exactness_state_need_update_ = true;
12596 }
12597
12598 const Field* field_;
12599 const Object& value_;
12600
12601 intptr_t guarded_cid_;
12602 bool is_nullable_;
12603 intptr_t list_length_;
12604 intptr_t list_length_in_object_offset_;
12605 StaticTypeExactnessState static_type_exactness_state_;
12606
12607 bool does_guarded_cid_need_update_ = false;
12608 bool does_is_nullable_need_update_ = false;
12609 bool does_list_length_and_offset_need_update_ = false;
12610 bool does_static_type_exactness_state_need_update_ = false;
12611};
12612
12613void FieldGuardUpdater::ReviewGuards() {
12614 ASSERT(field_->IsOriginal());
12615 const intptr_t cid = value_.GetClassId();
12616
12617 if (guarded_cid() == kIllegalCid) {
12618 set_guarded_cid(cid);
12619 set_is_nullable(cid == kNullCid);
12620
12621 // Start tracking length if needed.
12622 ASSERT((guarded_list_length() == Field::kUnknownFixedLength) ||
12623 (guarded_list_length() == Field::kNoFixedLength));
12624 if (field_->needs_length_check()) {
12625 ASSERT(guarded_list_length() == Field::kUnknownFixedLength);
12626 set_guarded_list_length_and_offset(GetListLength(value_),
12628 }
12629
12630 if (FLAG_trace_field_guards) {
12631 THR_Print(" => %s\n", field_->GuardedPropertiesAsCString());
12632 }
12633 return;
12634 }
12635
12636 if ((cid == guarded_cid()) || ((cid == kNullCid) && is_nullable())) {
12637 // Class id of the assigned value matches expected class id and nullability.
12638
12639 // If we are tracking length check if it has matches.
12640 if (field_->needs_length_check() &&
12641 (guarded_list_length() != GetListLength(value_))) {
12642 ASSERT(guarded_list_length() != Field::kUnknownFixedLength);
12643 set_guarded_list_length_and_offset(Field::kNoFixedLength,
12645 return;
12646 }
12647
12648 // Everything matches.
12649 return;
12650 }
12651
12652 if ((cid == kNullCid) && !is_nullable()) {
12653 // Assigning null value to a non-nullable field makes it nullable.
12654 set_is_nullable(true);
12655 } else if ((cid != kNullCid) && (guarded_cid() == kNullCid)) {
12656 // Assigning non-null value to a field that previously contained only null
12657 // turns it into a nullable field with the given class id.
12658 ASSERT(is_nullable());
12659 set_guarded_cid(cid);
12660 } else {
12661 // Give up on tracking class id of values contained in this field.
12662 ASSERT(guarded_cid() != cid);
12663 set_guarded_cid(kDynamicCid);
12664 set_is_nullable(true);
12665 }
12666
12667 // If we were tracking length drop collected feedback.
12668 if (field_->needs_length_check()) {
12669 ASSERT(guarded_list_length() != Field::kUnknownFixedLength);
12670 set_guarded_list_length_and_offset(Field::kNoFixedLength,
12672 }
12673}
12674
12676 const Class& cls,
12678 bool consider_only_super_classes) const {
12680 if (cls.ptr() == ptr()) {
12681 return true; // Found instantiation.
12682 }
12683
12684 Class& cls2 = Class::Handle(zone);
12685 Type& super = Type::Handle(zone, super_type());
12686 if (!super.IsNull() && !super.IsObjectType()) {
12687 cls2 = super.type_class();
12688 if (path != nullptr) {
12689 path->Add(&super);
12690 }
12691 if (cls2.FindInstantiationOf(zone, cls, path,
12692 consider_only_super_classes)) {
12693 return true; // Found instantiation.
12694 }
12695 if (path != nullptr) {
12696 path->RemoveLast();
12697 }
12698 }
12699
12700 if (!consider_only_super_classes) {
12701 Array& super_interfaces = Array::Handle(zone, interfaces());
12702 for (intptr_t i = 0; i < super_interfaces.Length(); i++) {
12703 super ^= super_interfaces.At(i);
12704 cls2 = super.type_class();
12705 if (path != nullptr) {
12706 path->Add(&super);
12707 }
12708 if (cls2.FindInstantiationOf(zone, cls, path)) {
12709 return true; // Found instantiation.
12710 }
12711 if (path != nullptr) {
12712 path->RemoveLast();
12713 }
12714 }
12715 }
12716
12717 return false; // Not found.
12718}
12719
12721 const Type& type,
12723 bool consider_only_super_classes) const {
12724 return FindInstantiationOf(zone, Class::Handle(zone, type.type_class()), path,
12725 consider_only_super_classes);
12726}
12727
12728TypePtr Class::GetInstantiationOf(Zone* zone, const Class& cls) const {
12729 if (ptr() == cls.ptr()) {
12730 return DeclarationType();
12731 }
12732 if (FindInstantiationOf(zone, cls, /*consider_only_super_classes=*/true)) {
12733 // Since [cls] is a superclass of [this], use [cls]'s declaration type.
12734 return cls.DeclarationType();
12735 }
12736 const auto& decl_type = Type::Handle(zone, DeclarationType());
12738 if (!FindInstantiationOf(zone, cls, &path)) {
12739 return Type::null();
12740 }
12741 Thread* thread = Thread::Current();
12742 ASSERT(!path.is_empty());
12743 auto& calculated_type = Type::Handle(zone, decl_type.ptr());
12744 auto& calculated_type_class =
12745 Class::Handle(zone, calculated_type.type_class());
12746 auto& calculated_type_args =
12747 TypeArguments::Handle(zone, calculated_type.arguments());
12748 calculated_type_args = calculated_type_args.ToInstantiatorTypeArguments(
12749 thread, calculated_type_class);
12750 for (auto* const type : path) {
12751 calculated_type ^= type->ptr();
12752 if (!calculated_type.IsInstantiated()) {
12753 calculated_type ^= calculated_type.InstantiateFrom(
12754 calculated_type_args, Object::null_type_arguments(), kAllFree,
12755 Heap::kNew);
12756 }
12757 calculated_type_class = calculated_type.type_class();
12758 calculated_type_args = calculated_type.arguments();
12759 calculated_type_args = calculated_type_args.ToInstantiatorTypeArguments(
12760 thread, calculated_type_class);
12761 }
12762 ASSERT_EQUAL(calculated_type.type_class_id(), cls.id());
12763 return calculated_type.ptr();
12764}
12765
12766TypePtr Class::GetInstantiationOf(Zone* zone, const Type& type) const {
12767 return GetInstantiationOf(zone, Class::Handle(zone, type.type_class()));
12768}
12769
12771 ASSERT(!is_shared());
12772 auto thread = Thread::Current();
12773 ASSERT(thread->IsDartMutatorThread());
12774 ASSERT(value.IsNull() || value.IsSentinel() || value.IsInstance());
12775
12776 ASSERT(is_static()); // Valid only for static dart fields.
12777 const intptr_t id = field_id();
12778 ASSERT(id >= 0);
12779
12780 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
12781 thread->isolate()->field_table()->SetAt(id, value.ptr());
12782}
12783
12785 const intptr_t type_arguments_offset = cls.host_type_arguments_field_offset();
12786 ASSERT(type_arguments_offset != Class::kNoTypeArguments);
12788 type_arguments_offset / kCompressedWordSize)) {
12789 return StaticTypeExactnessState::TriviallyExact(type_arguments_offset /
12791 } else {
12793 }
12794}
12795
12797 return (args.ptr() == TypeArguments::null()) ? "<null>" : args.ToCString();
12798}
12799
12801 const Type& static_type,
12802 const Instance& value,
12803 bool print_trace /* = false */) {
12804 ASSERT(!value.IsNull()); // Should be handled by the caller.
12805 ASSERT(value.ptr() != Object::sentinel().ptr());
12806 ASSERT(value.ptr() != Object::transition_sentinel().ptr());
12807
12808 Thread* thread = Thread::Current();
12809 Zone* const zone = thread->zone();
12810 const TypeArguments& static_type_args =
12811 TypeArguments::Handle(zone, static_type.GetInstanceTypeArguments(thread));
12812
12814
12815 ASSERT(static_type.IsFinalized());
12816 const Class& cls = Class::Handle(zone, value.clazz());
12818
12819 bool is_super_class = true;
12820 if (!cls.FindInstantiationOf(zone, static_type, &path,
12821 /*consider_only_super_classes=*/true)) {
12822 is_super_class = false;
12823 bool found_super_interface =
12824 cls.FindInstantiationOf(zone, static_type, &path);
12825 ASSERT(found_super_interface);
12826 }
12827
12828 // Trivial case: field has type G<T0, ..., Tn> and value has type
12829 // G<U0, ..., Un>. Check if type arguments match.
12830 if (path.is_empty()) {
12831 ASSERT(cls.ptr() == static_type.type_class());
12832 args = value.GetTypeArguments();
12833 // TODO(dartbug.com/34170) Evaluate if comparing relevant subvectors (that
12834 // disregards superclass own arguments) improves precision of the
12835 // tracking.
12836 if (args.ptr() == static_type_args.ptr()) {
12837 return TrivialTypeExactnessFor(cls);
12838 }
12839
12840 if (print_trace) {
12841 THR_Print(" expected %s got %s type arguments\n",
12842 SafeTypeArgumentsToCString(static_type_args),
12844 }
12846 }
12847
12848 // Value has type C<U0, ..., Un> and field has type G<T0, ..., Tn> and G != C.
12849 // Compute C<X0, ..., Xn> at G (Xi are free type arguments).
12850 // Path array contains a chain of immediate supertypes S0 <: S1 <: ... Sn,
12851 // such that S0 is an immediate supertype of C and Sn is G<...>.
12852 // Each Si might depend on type parameters of the previous supertype S{i-1}.
12853 // To compute C<X0, ..., Xn> at G we walk the chain backwards and
12854 // instantiate Si using type parameters of S{i-1} which gives us a type
12855 // depending on type parameters of S{i-2}.
12856 Type& type = Type::Handle(zone, path.Last()->ptr());
12857 for (intptr_t i = path.length() - 2; (i >= 0) && !type.IsInstantiated();
12858 i--) {
12859 args = path[i]->GetInstanceTypeArguments(thread, /*canonicalize=*/false);
12860 type ^= type.InstantiateFrom(args, TypeArguments::null_type_arguments(),
12862 }
12863
12864 if (type.IsInstantiated()) {
12865 // C<X0, ..., Xn> at G is fully instantiated and does not depend on
12866 // Xi. In this case just check if type arguments match.
12867 args = type.GetInstanceTypeArguments(thread, /*canonicalize=*/false);
12868 if (args.Equals(static_type_args)) {
12869 return is_super_class ? StaticTypeExactnessState::HasExactSuperClass()
12871 }
12872
12873 if (print_trace) {
12874 THR_Print(" expected %s got %s type arguments\n",
12875 SafeTypeArgumentsToCString(static_type_args),
12877 }
12878
12880 }
12881
12882 // The most complicated case: C<X0, ..., Xn> at G depends on
12883 // Xi values. To compare type arguments we would need to instantiate
12884 // it fully from value's type arguments and compare with <U0, ..., Un>.
12885 // However this would complicate fast path in the native code. To avoid this
12886 // complication we would optimize for the trivial case: we check if
12887 // C<X0, ..., Xn> at G is exactly G<X0, ..., Xn> which means we can simply
12888 // compare values type arguments (<T0, ..., Tn>) to fields type arguments
12889 // (<U0, ..., Un>) to establish if field type is exact.
12890 ASSERT(cls.IsGeneric());
12891 const intptr_t num_type_params = cls.NumTypeParameters();
12892 bool trivial_case =
12893 (num_type_params ==
12894 Class::Handle(zone, static_type.type_class()).NumTypeParameters()) &&
12895 (value.GetTypeArguments() == static_type_args.ptr());
12896 if (!trivial_case && FLAG_trace_field_guards) {
12897 THR_Print("Not a simple case: %" Pd " vs %" Pd
12898 " type parameters, %s vs %s type arguments\n",
12899 num_type_params,
12900 Class::Handle(zone, static_type.type_class()).NumTypeParameters(),
12902 TypeArguments::Handle(zone, value.GetTypeArguments())),
12903 SafeTypeArgumentsToCString(static_type_args));
12904 }
12905
12906 AbstractType& type_arg = AbstractType::Handle(zone);
12907 args = type.GetInstanceTypeArguments(thread, /*canonicalize=*/false);
12908 for (intptr_t i = 0; (i < num_type_params) && trivial_case; i++) {
12909 type_arg = args.TypeAt(i);
12910 if (!type_arg.IsTypeParameter() ||
12911 (TypeParameter::Cast(type_arg).index() != i)) {
12912 if (FLAG_trace_field_guards) {
12913 THR_Print(" => encountered %s at index % " Pd "\n",
12914 type_arg.ToCString(), i);
12915 }
12916 trivial_case = false;
12917 }
12918 }
12919
12920 return trivial_case ? TrivialTypeExactnessFor(cls)
12922}
12923
12925 if (!IsTracking()) {
12926 return "not-tracking";
12927 } else if (!IsExactOrUninitialized()) {
12928 return "not-exact";
12929 } else if (IsTriviallyExact()) {
12930 return Thread::Current()->zone()->PrintToString(
12931 "trivially-exact(%hhu)", GetTypeArgumentsOffsetInWords());
12932 } else if (IsHasExactSuperType()) {
12933 return "has-exact-super-type";
12934 } else if (IsHasExactSuperClass()) {
12935 return "has-exact-super-class";
12936 } else {
12937 ASSERT(IsUninitialized());
12938 return "uninitialized-exactness";
12939 }
12940}
12941
12942void FieldGuardUpdater::ReviewExactnessState() {
12943 if (!static_type_exactness_state().IsExactOrUninitialized()) {
12944 // Nothing to update.
12945 return;
12946 }
12947
12948 if (guarded_cid() == kDynamicCid) {
12949 if (FLAG_trace_field_guards) {
12950 THR_Print(
12951 " => switching off exactness tracking because guarded cid is "
12952 "dynamic\n");
12953 }
12954 set_static_type_exactness_state(StaticTypeExactnessState::NotExact());
12955 return;
12956 }
12957
12958 // If we are storing null into a field or we have an exact super type
12959 // then there is nothing to do.
12960 if (value_.IsNull() || static_type_exactness_state().IsHasExactSuperType() ||
12961 static_type_exactness_state().IsHasExactSuperClass()) {
12962 return;
12963 }
12964
12965 // If we are storing a non-null value into a field that is considered
12966 // to be trivially exact then we need to check if value has an appropriate
12967 // type.
12968 ASSERT(guarded_cid() != kNullCid);
12969
12970 const Type& field_type = Type::Cast(AbstractType::Handle(field_->type()));
12971 const Instance& instance = Instance::Cast(value_);
12972
12973 if (static_type_exactness_state().IsTriviallyExact()) {
12974 const TypeArguments& args =
12975 TypeArguments::Handle(instance.GetTypeArguments());
12976 const TypeArguments& field_type_args = TypeArguments::Handle(
12977 field_type.GetInstanceTypeArguments(Thread::Current()));
12978 if (args.ptr() == field_type_args.ptr()) {
12979 return;
12980 }
12981
12982 if (FLAG_trace_field_guards) {
12983 THR_Print(" expected %s got %s type arguments\n",
12984 field_type_args.ToCString(), args.ToCString());
12985 }
12986
12987 set_static_type_exactness_state(StaticTypeExactnessState::NotExact());
12988 return;
12989 }
12990
12991 ASSERT(static_type_exactness_state().IsUninitialized());
12992 set_static_type_exactness_state(StaticTypeExactnessState::Compute(
12993 field_type, instance, FLAG_trace_field_guards));
12994 return;
12995}
12996
12998 : field_(field),
12999 value_(value),
13000 guarded_cid_(field->guarded_cid()),
13001 is_nullable_(field->is_nullable()),
13002 list_length_(field->guarded_list_length()),
13003 list_length_in_object_offset_(
13004 field->guarded_list_length_in_object_offset()),
13005 static_type_exactness_state_(field->static_type_exactness_state()) {
13006 ReviewGuards();
13007 ReviewExactnessState();
13008}
13009
13011 if (does_guarded_cid_need_update_) {
13012 field_->set_guarded_cid(guarded_cid_);
13013 }
13014 if (does_is_nullable_need_update_) {
13015 field_->set_is_nullable(is_nullable_);
13016 }
13017 if (does_list_length_and_offset_need_update_) {
13018 field_->set_guarded_list_length(list_length_);
13020 list_length_in_object_offset_);
13021 }
13022 if (does_static_type_exactness_state_need_update_) {
13023 field_->set_static_type_exactness_state(static_type_exactness_state_);
13024 }
13025}
13026
13027void Field::RecordStore(const Object& value) const {
13028 ASSERT(IsOriginal());
13029 Thread* const thread = Thread::Current();
13030 if (!thread->isolate_group()->use_field_guards()) {
13031 return;
13032 }
13033
13034 // We should never try to record a sentinel.
13035 ASSERT(value.ptr() != Object::sentinel().ptr());
13036
13037 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
13038 if ((guarded_cid() == kDynamicCid) ||
13039 (is_nullable() && value.ptr() == Object::null())) {
13040 // Nothing to do: the field is not guarded or we are storing null into
13041 // a nullable field.
13042 return;
13043 }
13044
13045 if (FLAG_trace_field_guards) {
13046 THR_Print("Store %s %s <- %s\n", ToCString(), GuardedPropertiesAsCString(),
13047 value.ToCString());
13048 }
13049
13050 FieldGuardUpdater updater(this, value);
13051 if (updater.IsUpdateNeeded()) {
13052 if (FLAG_trace_field_guards) {
13054 }
13055 // Nobody else could have updated guard state since we are holding write
13056 // program lock. But we need to ensure we stop mutators as we update
13057 // guard state as we can't have optimized code running with updated fields.
13058 auto isolate_group = IsolateGroup::Current();
13059 isolate_group->RunWithStoppedMutators([&]() {
13060 updater.DoUpdate();
13061 DeoptimizeDependentCode(/*are_mutators_stopped=*/true);
13062 });
13063 }
13064}
13065
13066void Field::ForceDynamicGuardedCidAndLength() const {
13067 if (!is_unboxed()) {
13069 set_is_nullable(true);
13070 }
13073 if (static_type_exactness_state().IsTracking()) {
13075 }
13076 // Drop any code that relied on the above assumptions.
13078}
13079
13080StringPtr Script::resolved_url() const {
13081#if defined(DART_PRECOMPILER)
13082 return String::RawCast(
13084#else
13085 return untag()->resolved_url();
13086#endif
13087}
13088
13089bool Script::HasSource() const {
13090 return untag()->source() != String::null();
13091}
13092
13093StringPtr Script::Source() const {
13094 return untag()->source();
13095}
13096
13098 const String& script_url = String::Handle(url());
13099 return (script_url.StartsWith(Symbols::DartScheme()) ||
13100 script_url.StartsWith(Symbols::DartSchemePrivate()));
13101}
13102
13103#if !defined(DART_PRECOMPILED_RUNTIME)
13104void Script::LoadSourceFromKernel(const uint8_t* kernel_buffer,
13105 intptr_t kernel_buffer_len) const {
13108 kernel_buffer, kernel_buffer_len, uri));
13109 set_source(source);
13110}
13111
13113 const KernelProgramInfo& info,
13114 intptr_t script_index,
13115 const TypedData& line_starts,
13116 const TypedDataView& constant_coverage) const {
13117 StoreNonPointer(&untag()->kernel_script_index_, script_index);
13118 untag()->set_kernel_program_info(info.ptr());
13119 untag()->set_line_starts(line_starts.ptr());
13120 untag()->set_debug_positions(Array::null_array().ptr());
13121 NOT_IN_PRODUCT(untag()->set_constant_coverage(constant_coverage.ptr()));
13122}
13123#endif
13124
13125GrowableObjectArrayPtr Script::GenerateLineNumberArray() const {
13126 Zone* zone = Thread::Current()->zone();
13127 const GrowableObjectArray& info =
13129 const Object& line_separator = Object::Handle(zone);
13130 if (line_starts() == TypedData::null()) {
13131 // Scripts in the AOT snapshot do not have a line starts array.
13132 // A well-formed line number array has a leading null.
13133 info.Add(line_separator); // New line.
13134 return info.ptr();
13135 }
13136#if !defined(DART_PRECOMPILED_RUNTIME)
13137 Smi& value = Smi::Handle(zone);
13138 const TypedData& line_starts_data = TypedData::Handle(zone, line_starts());
13139 intptr_t line_count = line_starts_data.Length();
13140 const Array& debug_positions_array = Array::Handle(debug_positions());
13141 intptr_t token_count = debug_positions_array.Length();
13142 int token_index = 0;
13143
13144 kernel::KernelLineStartsReader line_starts_reader(line_starts_data, zone);
13145 for (int line_index = 0; line_index < line_count; ++line_index) {
13146 intptr_t start = line_starts_reader.At(line_index);
13147 // Output the rest of the tokens if we have no next line.
13149 if (line_index + 1 < line_count) {
13150 end = line_starts_reader.At(line_index + 1);
13151 }
13152 bool first = true;
13153 while (token_index < token_count) {
13154 value ^= debug_positions_array.At(token_index);
13155 intptr_t debug_position = value.Value();
13156 if (debug_position >= end) break;
13157
13158 if (first) {
13159 info.Add(line_separator); // New line.
13160 value = Smi::New(line_index + 1); // Line number.
13161 info.Add(value);
13162 first = false;
13163 }
13164
13165 value ^= debug_positions_array.At(token_index);
13166 info.Add(value); // Token position.
13167 value = Smi::New(debug_position - start + 1); // Column.
13168 info.Add(value);
13169 ++token_index;
13170 }
13171 }
13172#endif // !defined(DART_PRECOMPILED_RUNTIME)
13173 return info.ptr();
13174}
13175
13177#if !defined(DART_PRECOMPILED_RUNTIME)
13178 if (HasCachedMaxPosition()) {
13181 untag()->flags_and_max_position_));
13182 }
13183 auto const zone = Thread::Current()->zone();
13184 if (!HasCachedMaxPosition() && line_starts() != TypedData::null()) {
13185 const auto& starts = TypedData::Handle(zone, line_starts());
13186 kernel::KernelLineStartsReader reader(starts, zone);
13187 const intptr_t max_position = reader.MaxPosition();
13188 SetCachedMaxPosition(max_position);
13189 SetHasCachedMaxPosition(true);
13190 return TokenPosition::Deserialize(max_position);
13191 }
13192#endif
13193 return TokenPosition::kNoSource;
13194}
13195
13196void Script::set_url(const String& value) const {
13197 untag()->set_url(value.ptr());
13198}
13199
13200void Script::set_resolved_url(const String& value) const {
13201 untag()->set_resolved_url(value.ptr());
13202}
13203
13204void Script::set_source(const String& value) const {
13205 untag()->set_source(value.ptr());
13206}
13207
13208#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
13209TypedDataViewPtr Script::constant_coverage() const {
13210 return untag()->constant_coverage();
13211}
13212#endif // !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
13213
13214void Script::set_debug_positions(const Array& value) const {
13215 untag()->set_debug_positions(value.ptr());
13216}
13217
13218TypedDataPtr Script::line_starts() const {
13219 return untag()->line_starts();
13220}
13221
13222ArrayPtr Script::debug_positions() const {
13223#if !defined(DART_PRECOMPILED_RUNTIME)
13224 Array& debug_positions_array = Array::Handle(untag()->debug_positions());
13225 if (debug_positions_array.IsNull()) {
13226 // This is created lazily. Now we need it.
13228 }
13229#endif // !defined(DART_PRECOMPILED_RUNTIME)
13230 return untag()->debug_positions();
13231}
13232
13233#if !defined(DART_PRECOMPILED_RUNTIME)
13234bool Script::HasCachedMaxPosition() const {
13236 untag()->flags_and_max_position_);
13237}
13238
13239void Script::SetHasCachedMaxPosition(bool value) const {
13240 StoreNonPointer(&untag()->flags_and_max_position_,
13242 value, untag()->flags_and_max_position_));
13243}
13244
13245void Script::SetCachedMaxPosition(intptr_t value) const {
13246 StoreNonPointer(&untag()->flags_and_max_position_,
13248 value, untag()->flags_and_max_position_));
13249}
13250#endif
13251
13252void Script::set_load_timestamp(int64_t value) const {
13253 StoreNonPointer(&untag()->load_timestamp_, value);
13254}
13255
13257 const TokenPosition& max_position = MaxPosition();
13258 // We may end up with scripts that have the empty string as a source file
13259 // in testing and the like, so allow any token position when the max position
13260 // is 0 as well as when it is kNoSource.
13261 return !max_position.IsReal() || !token_pos.IsReal() ||
13262 max_position.Pos() == 0 || token_pos <= max_position;
13263}
13264
13265#if !defined(DART_PRECOMPILED_RUNTIME)
13266static bool IsLetter(int32_t c) {
13267 return (('A' <= c) && (c <= 'Z')) || (('a' <= c) && (c <= 'z'));
13268}
13269
13270static bool IsDecimalDigit(int32_t c) {
13271 return '0' <= c && c <= '9';
13272}
13273
13274static bool IsIdentStartChar(int32_t c) {
13275 return IsLetter(c) || (c == '_') || (c == '$');
13276}
13277
13278static bool IsIdentChar(int32_t c) {
13279 return IsLetter(c) || IsDecimalDigit(c) || (c == '_') || (c == '$');
13280}
13281#endif // !defined(DART_PRECOMPILED_RUNTIME)
13282
13284 intptr_t* line,
13285 intptr_t* column) const {
13286 ASSERT(line != nullptr);
13287#if defined(DART_PRECOMPILED_RUNTIME)
13288 // Scripts in the AOT snapshot do not have a line starts array.
13289 return false;
13290#else
13291 if (!token_pos.IsReal()) return false;
13292
13293 auto const zone = Thread::Current()->zone();
13294 const TypedData& line_starts_data = TypedData::Handle(zone, line_starts());
13295 if (line_starts_data.IsNull()) return false;
13296 kernel::KernelLineStartsReader line_starts_reader(line_starts_data, zone);
13297 return line_starts_reader.LocationForPosition(token_pos.Pos(), line, column);
13298#endif // defined(DART_PRECOMPILED_RUNTIME)
13299}
13300
13301intptr_t Script::GetTokenLength(const TokenPosition& token_pos) const {
13302#if defined(DART_PRECOMPILED_RUNTIME)
13303 // Scripts in the AOT snapshot do not have their source.
13304 return -1;
13305#else
13306 if (!HasSource() || !token_pos.IsReal()) return -1;
13307 auto const zone = Thread::Current()->zone();
13308 // We don't explicitly save this data: Load the source and find it from there.
13309 const String& source = String::Handle(zone, Source());
13310 const intptr_t start = token_pos.Pos();
13311 if (start >= source.Length()) return -1; // Can't determine token_len.
13312 intptr_t end = start;
13313 if (IsIdentStartChar(source.CharAt(end++))) {
13314 for (; end < source.Length(); ++end) {
13315 if (!IsIdentChar(source.CharAt(end))) break;
13316 }
13317 }
13318 return end - start;
13319#endif
13320}
13321
13322bool Script::TokenRangeAtLine(intptr_t line_number,
13323 TokenPosition* first_token_index,
13324 TokenPosition* last_token_index) const {
13325 ASSERT(first_token_index != nullptr && last_token_index != nullptr);
13326#if defined(DART_PRECOMPILED_RUNTIME)
13327 // Scripts in the AOT snapshot do not have a line starts array.
13328 return false;
13329#else
13330 // Line numbers are 1-indexed.
13331 if (line_number <= 0) return false;
13332 Zone* zone = Thread::Current()->zone();
13333 const TypedData& line_starts_data = TypedData::Handle(zone, line_starts());
13334 kernel::KernelLineStartsReader line_starts_reader(line_starts_data, zone);
13335 if (!line_starts_reader.TokenRangeAtLine(line_number, first_token_index,
13336 last_token_index)) {
13337 return false;
13338 }
13339#if defined(DEBUG)
13340 intptr_t source_length;
13341 if (!HasSource()) {
13342 Smi& value = Smi::Handle(zone);
13343 const Array& debug_positions_array = Array::Handle(zone, debug_positions());
13344 value ^= debug_positions_array.At(debug_positions_array.Length() - 1);
13345 source_length = value.Value();
13346 } else {
13347 const String& source = String::Handle(zone, Source());
13348 source_length = source.Length();
13349 }
13350 ASSERT(last_token_index->Serialize() <= source_length);
13351#endif
13352 return true;
13353#endif // !defined(DART_PRECOMPILED_RUNTIME)
13354}
13355
13356// Returns the index in the given source string for the given (1-based) absolute
13357// line and column numbers. The line and column offsets are used to calculate
13358// the absolute line and column number for the starting index in the source.
13359//
13360// If the given line number is outside the range of lines represented by the
13361// source, the given column number invalid for the given line, or a negative
13362// starting index is given, a negative value is returned to indicate failure.
13363static intptr_t GetRelativeSourceIndex(const String& src,
13364 intptr_t line,
13365 intptr_t line_offset = 0,
13366 intptr_t column = 1,
13367 intptr_t column_offset = 0,
13368 intptr_t starting_index = 0) {
13369 if (starting_index < 0 || line < 1 || column < 1 || line <= line_offset ||
13370 (line == line_offset + 1 && column <= column_offset)) {
13371 return -1;
13372 }
13373 intptr_t len = src.Length();
13374 intptr_t current_line = line_offset + 1;
13375 intptr_t current_index = starting_index;
13376 for (; current_index < len; current_index++) {
13377 if (current_line == line) {
13378 break;
13379 }
13380 const uint16_t c = src.CharAt(current_index);
13381 if (c == '\n' || c == '\r') {
13382 current_line++;
13383 }
13384 if (c == '\r' && current_index + 1 < len &&
13385 src.CharAt(current_index + 1) == '\n') {
13386 // \r\n is treated as a single line terminator.
13387 current_index++;
13388 }
13389 }
13390 if (current_line != line) {
13391 return -1;
13392 }
13393 // Only adjust with column offset when still on the first line.
13394 intptr_t current_column = 1 + (line == line_offset + 1 ? column_offset : 0);
13395 for (; current_index < len; current_index++, current_column++) {
13396 if (current_column == column) {
13397 return current_index;
13398 }
13399 const uint16_t c = src.CharAt(current_index);
13400 if (c == '\n' || c == '\r') {
13401 break;
13402 }
13403 }
13404 // Check for a column value representing the source's end.
13405 if (current_column == column) {
13406 return current_index;
13407 }
13408 return -1;
13409}
13410
13411StringPtr Script::GetLine(intptr_t line_number, Heap::Space space) const {
13412 if (!HasSource()) {
13413 return Symbols::OptimizedOut().ptr();
13414 }
13415 const String& src = String::Handle(Source());
13416 const intptr_t start =
13417 GetRelativeSourceIndex(src, line_number, line_offset());
13418 if (start < 0) {
13419 return Symbols::Empty().ptr();
13420 }
13421 intptr_t end = start;
13422 for (; end < src.Length(); end++) {
13423 const uint16_t c = src.CharAt(end);
13424 if (c == '\n' || c == '\r') {
13425 break;
13426 }
13427 }
13428 return String::SubString(src, start, end - start, space);
13429}
13430
13431StringPtr Script::GetSnippet(intptr_t from_line,
13432 intptr_t from_column,
13433 intptr_t to_line,
13434 intptr_t to_column) const {
13435 if (!HasSource()) {
13436 return Symbols::OptimizedOut().ptr();
13437 }
13438 const String& src = String::Handle(Source());
13439 const intptr_t start = GetRelativeSourceIndex(src, from_line, line_offset(),
13440 from_column, col_offset());
13441 // Lines and columns are 1-based, so need to subtract one to get offsets.
13442 const intptr_t end = GetRelativeSourceIndex(
13443 src, to_line, from_line - 1, to_column, from_column - 1, start);
13444 // Only need to check end, because a negative start results in a negative end.
13445 if (end < 0) {
13446 return String::null();
13447 }
13448 return String::SubString(src, start, end - start);
13449}
13450
13451ScriptPtr Script::New(const String& url, const String& source) {
13452 return Script::New(url, url, source);
13453}
13454
13455ScriptPtr Script::New(const String& url,
13456 const String& resolved_url,
13457 const String& source) {
13459 Thread* thread = Thread::Current();
13460 Zone* zone = thread->zone();
13461 const Script& result =
13462 Script::Handle(zone, Object::Allocate<Script>(Heap::kOld));
13463 result.set_url(String::Handle(zone, Symbols::New(thread, url)));
13464 result.set_resolved_url(
13465 String::Handle(zone, Symbols::New(thread, resolved_url)));
13466 result.set_source(source);
13467 NOT_IN_PRECOMPILED(ASSERT_EQUAL(result.HasCachedMaxPosition(), false));
13468 ASSERT_EQUAL(result.kernel_script_index(), 0);
13469 if (FLAG_remove_script_timestamps_for_test) {
13470 ASSERT_EQUAL(result.load_timestamp(), 0);
13471 } else {
13472 result.set_load_timestamp(OS::GetCurrentTimeMillis());
13473 }
13474 return result.ptr();
13475}
13476
13477const char* Script::ToCString() const {
13478 const String& name = String::Handle(url());
13479 return OS::SCreate(Thread::Current()->zone(), "Script(%s)", name.ToCString());
13480}
13481
13482LibraryPtr Script::FindLibrary() const {
13483 Thread* thread = Thread::Current();
13484 Zone* zone = thread->zone();
13485 auto isolate_group = thread->isolate_group();
13487 zone, isolate_group->object_store()->libraries());
13488 Library& lib = Library::Handle(zone);
13489 Array& scripts = Array::Handle(zone);
13490 for (intptr_t i = 0; i < libs.Length(); i++) {
13491 lib ^= libs.At(i);
13492 scripts = lib.LoadedScripts();
13493 for (intptr_t j = 0; j < scripts.Length(); j++) {
13494 if (scripts.At(j) == ptr()) {
13495 return lib.ptr();
13496 }
13497 }
13498 }
13499 return Library::null();
13500}
13501
13503 : array_(Array::Handle(library.dictionary())),
13504 // Last element in array is a Smi indicating the number of entries used.
13505 size_(Array::Handle(library.dictionary()).Length() - 1),
13506 next_ix_(0) {
13507 MoveToNextObject();
13508}
13509
13511 ASSERT(HasNext());
13512 int ix = next_ix_++;
13513 MoveToNextObject();
13514 ASSERT(array_.At(ix) != Object::null());
13515 return array_.At(ix);
13516}
13517
13518void DictionaryIterator::MoveToNextObject() {
13519 Object& obj = Object::Handle(array_.At(next_ix_));
13520 while (obj.IsNull() && HasNext()) {
13521 next_ix_++;
13522 obj = array_.At(next_ix_);
13523 }
13524}
13525
13527 IterationKind kind)
13528 : DictionaryIterator(library),
13529 toplevel_class_(Class::Handle((kind == kIteratePrivate)
13530 ? library.toplevel_class()
13531 : Class::null())) {
13532 MoveToNextClass();
13533}
13534
13536 ASSERT(HasNext());
13537 Class& cls = Class::Handle();
13538 if (next_ix_ < size_) {
13539 int ix = next_ix_++;
13540 cls ^= array_.At(ix);
13541 MoveToNextClass();
13542 return cls.ptr();
13543 }
13544 ASSERT(!toplevel_class_.IsNull());
13545 cls = toplevel_class_.ptr();
13546 toplevel_class_ = Class::null();
13547 return cls.ptr();
13548}
13549
13550void ClassDictionaryIterator::MoveToNextClass() {
13551 Object& obj = Object::Handle();
13552 while (next_ix_ < size_) {
13553 obj = array_.At(next_ix_);
13554 if (obj.IsClass()) {
13555 return;
13556 }
13557 next_ix_++;
13558 }
13559}
13560
13561static void ReportTooManyImports(const Library& lib) {
13562 const String& url = String::Handle(lib.url());
13564 TokenPosition::kNoSource, Report::AtLocation,
13565 "too many imports in library '%s'", url.ToCString());
13566 UNREACHABLE();
13567}
13568
13570 String& url_str = Thread::Current()->StringHandle();
13571 url_str = url();
13572 return url_str.StartsWith(Symbols::DartScheme()) ||
13573 url_str.StartsWith(Symbols::DartSchemePrivate());
13574}
13575
13576void Library::set_num_imports(intptr_t value) const {
13577 if (!Utils::IsUint(16, value)) {
13578 ReportTooManyImports(*this);
13579 }
13580 StoreNonPointer(&untag()->num_imports_, value);
13581}
13582
13583void Library::set_name(const String& name) const {
13584 ASSERT(name.IsSymbol());
13585 untag()->set_name(name.ptr());
13586}
13587
13588void Library::set_url(const String& url) const {
13589 untag()->set_url(url.ptr());
13590}
13591
13592void Library::set_private_key(const String& key) const {
13593 untag()->set_private_key(key.ptr());
13594}
13595
13596#if !defined(DART_PRECOMPILED_RUNTIME)
13598 untag()->set_kernel_program_info(info.ptr());
13599}
13600
13601TypedDataViewPtr Library::KernelLibrary() const {
13603 return info.KernelLibrary(kernel_library_index());
13604}
13605
13608 return info.KernelLibraryStartOffset(kernel_library_index());
13609}
13610#endif
13611
13613 untag()->set_loading_unit(value.ptr());
13614}
13615
13616void Library::SetName(const String& name) const {
13617 // Only set name once.
13618 ASSERT(!Loaded());
13619 set_name(name);
13620}
13621
13623 // Must not already be in the process of being loaded.
13624 ASSERT(untag()->load_state_ <= UntaggedLibrary::kLoadRequested);
13625 StoreNonPointer(&untag()->load_state_, UntaggedLibrary::kLoadInProgress);
13626}
13627
13629 // Must not be already loaded.
13630 ASSERT(untag()->load_state_ == UntaggedLibrary::kAllocated);
13631 StoreNonPointer(&untag()->load_state_, UntaggedLibrary::kLoadRequested);
13632}
13633
13635 // Should not be already loaded or just allocated.
13637 StoreNonPointer(&untag()->load_state_, UntaggedLibrary::kLoaded);
13638}
13639
13640void Library::AddMetadata(const Object& declaration,
13641 intptr_t kernel_offset) const {
13642#if defined(DART_PRECOMPILED_RUNTIME)
13643 UNREACHABLE();
13644#else
13645 Thread* thread = Thread::Current();
13647
13648 MetadataMap map(metadata());
13649 map.UpdateOrInsert(declaration, Smi::Handle(Smi::New(kernel_offset)));
13650 set_metadata(map.Release());
13651#endif // defined(DART_PRECOMPILED_RUNTIME)
13652}
13653
13654ObjectPtr Library::GetMetadata(const Object& declaration) const {
13655#if defined(DART_PRECOMPILED_RUNTIME)
13656 return Object::empty_array().ptr();
13657#else
13658 RELEASE_ASSERT(declaration.IsClass() || declaration.IsField() ||
13659 declaration.IsFunction() || declaration.IsLibrary() ||
13660 declaration.IsTypeParameter() || declaration.IsNamespace());
13661
13662 auto thread = Thread::Current();
13663 auto zone = thread->zone();
13664
13665 if (declaration.IsLibrary()) {
13666 // Ensure top-level class is loaded as it may contain annotations of
13667 // a library.
13668 const auto& cls = Class::Handle(zone, toplevel_class());
13669 if (!cls.IsNull()) {
13670 cls.EnsureDeclarationLoaded();
13671 }
13672 }
13673 Object& value = Object::Handle(zone);
13674 {
13675 SafepointReadRwLocker ml(thread, thread->isolate_group()->program_lock());
13676 MetadataMap map(metadata());
13677 value = map.GetOrNull(declaration);
13678 set_metadata(map.Release());
13679 }
13680 if (value.IsNull()) {
13681 // There is no metadata for this object.
13682 return Object::empty_array().ptr();
13683 }
13684 if (!value.IsSmi()) {
13685 // Metadata is already evaluated.
13686 ASSERT(value.IsArray());
13687 return value.ptr();
13688 }
13689 const auto& smi_value = Smi::Cast(value);
13690 intptr_t kernel_offset = smi_value.Value();
13691 ASSERT(kernel_offset > 0);
13692 const auto& evaluated_value = Object::Handle(
13694 *this, kernel_offset,
13695 /* is_annotations_offset = */ declaration.IsLibrary() ||
13696 declaration.IsNamespace()));
13697 if (evaluated_value.IsArray() || evaluated_value.IsNull()) {
13698 ASSERT(evaluated_value.ptr() != Object::empty_array().ptr());
13699 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
13700 MetadataMap map(metadata());
13701 if (map.GetOrNull(declaration) == smi_value.ptr()) {
13702 map.UpdateOrInsert(declaration, evaluated_value);
13703 } else {
13704 ASSERT(map.GetOrNull(declaration) == evaluated_value.ptr());
13705 }
13706 set_metadata(map.Release());
13707 }
13708 return evaluated_value.ptr();
13709#endif // defined(DART_PRECOMPILED_RUNTIME)
13710}
13711
13712#if !defined(DART_PRECOMPILED_RUNTIME)
13713static bool HasPragma(const Object& declaration) {
13714 return (declaration.IsClass() && Class::Cast(declaration).has_pragma()) ||
13715 (declaration.IsFunction() &&
13716 Function::Cast(declaration).has_pragma()) ||
13717 (declaration.IsField() && Field::Cast(declaration).has_pragma());
13718}
13719
13721 Object& declaration = Object::Handle();
13724 {
13725 auto thread = Thread::Current();
13726 SafepointReadRwLocker ml(thread, thread->isolate_group()->program_lock());
13727 MetadataMap map(metadata());
13728 MetadataMap::Iterator it(&map);
13729 while (it.MoveNext()) {
13730 const intptr_t entry = it.Current();
13731 ASSERT(entry != -1);
13732 declaration = map.GetKey(entry);
13733 if (HasPragma(declaration)) {
13734 declarations.Add(declaration);
13735 }
13736 }
13737 set_metadata(map.Release());
13738 }
13739 for (intptr_t i = 0; i < declarations.Length(); ++i) {
13740 declaration = declarations.At(i);
13741 GetMetadata(declaration);
13742 }
13743}
13744
13745void Library::CopyPragmas(const Library& old_lib) {
13746 auto thread = Thread::Current();
13747 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
13748 MetadataMap new_map(metadata());
13749 MetadataMap old_map(old_lib.metadata());
13750 Object& declaration = Object::Handle();
13752 MetadataMap::Iterator it(&old_map);
13753 while (it.MoveNext()) {
13754 const intptr_t entry = it.Current();
13755 ASSERT(entry != -1);
13756 declaration = old_map.GetKey(entry);
13757 if (HasPragma(declaration)) {
13758 value = old_map.GetPayload(entry, 0);
13759 ASSERT(!value.IsNull());
13760 // Pragmas should be evaluated during hot reload phase 1
13761 // (when checkpointing libraries).
13762 ASSERT(!value.IsSmi());
13763 new_map.UpdateOrInsert(declaration, value);
13764 }
13765 }
13766 old_lib.set_metadata(old_map.Release());
13767 set_metadata(new_map.Release());
13768}
13769#endif // !defined(DART_PRECOMPILED_RUNTIME)
13770
13771static bool ShouldBePrivate(const String& name) {
13772 return (name.Length() >= 1 && name.CharAt(0) == '_') ||
13773 (name.Length() >= 5 &&
13774 (name.CharAt(4) == '_' &&
13775 (name.CharAt(0) == 'g' || name.CharAt(0) == 's') &&
13776 name.CharAt(1) == 'e' && name.CharAt(2) == 't' &&
13777 name.CharAt(3) == ':'));
13778}
13779
13780void Library::RehashDictionary(const Array& old_dict,
13781 intptr_t new_dict_size) const {
13782 intptr_t old_dict_size = old_dict.Length() - 1;
13783 const Array& new_dict =
13784 Array::Handle(Array::New(new_dict_size + 1, Heap::kOld));
13785 // Rehash all elements from the original dictionary
13786 // to the newly allocated array.
13787 Object& entry = Class::Handle();
13788 String& entry_name = String::Handle();
13789 Object& new_entry = Object::Handle();
13790 intptr_t used = 0;
13791 for (intptr_t i = 0; i < old_dict_size; i++) {
13792 entry = old_dict.At(i);
13793 if (!entry.IsNull()) {
13794 entry_name = entry.DictionaryName();
13795 ASSERT(!entry_name.IsNull());
13796 const intptr_t hash = entry_name.Hash();
13797 intptr_t index = hash % new_dict_size;
13798 new_entry = new_dict.At(index);
13799 while (!new_entry.IsNull()) {
13800 index = (index + 1) % new_dict_size; // Move to next element.
13801 new_entry = new_dict.At(index);
13802 }
13803 new_dict.SetAt(index, entry);
13804 used++;
13805 }
13806 }
13807 // Set used count.
13808 ASSERT(used < new_dict_size); // Need at least one empty slot.
13809 new_entry = Smi::New(used);
13810 new_dict.SetAt(new_dict_size, new_entry);
13811 // Remember the new dictionary now.
13812 untag()->set_dictionary(new_dict.ptr());
13813}
13814
13815void Library::AddObject(const Object& obj, const String& name) const {
13816 ASSERT(Thread::Current()->IsDartMutatorThread());
13817 ASSERT(obj.IsClass() || obj.IsFunction() || obj.IsField() ||
13818 obj.IsLibraryPrefix());
13819 ASSERT(name.Equals(String::Handle(obj.DictionaryName())));
13820 ASSERT(LookupLocalObject(name) == Object::null());
13821 const Array& dict = Array::Handle(dictionary());
13822 intptr_t dict_size = dict.Length() - 1;
13823 intptr_t index = name.Hash() % dict_size;
13824
13825 Object& entry = Object::Handle();
13826 entry = dict.At(index);
13827 // An empty spot will be found because we keep the hash set at most 75% full.
13828 while (!entry.IsNull()) {
13829 index = (index + 1) % dict_size;
13830 entry = dict.At(index);
13831 }
13832
13833 // Insert the object at the empty slot.
13834 dict.SetAt(index, obj);
13835 // One more element added.
13836 intptr_t used_elements = Smi::Value(Smi::RawCast(dict.At(dict_size))) + 1;
13837 const Smi& used = Smi::Handle(Smi::New(used_elements));
13838 dict.SetAt(dict_size, used); // Update used count.
13839
13840 // Rehash if symbol_table is 75% full.
13841 if (used_elements > ((dict_size / 4) * 3)) {
13842 // TODO(iposva): Avoid exponential growth.
13843 RehashDictionary(dict, 2 * dict_size);
13844 }
13845
13846 // Invalidate the cache of loaded scripts.
13847 if (loaded_scripts() != Array::null()) {
13848 untag()->set_loaded_scripts(Array::null());
13849 }
13850}
13851
13852// Lookup a name in the library's re-export namespace.
13853// This lookup can occur from two different threads: background compiler and
13854// mutator thread.
13856 ZoneGrowableArray<intptr_t>* trail) const {
13857 if (!HasExports()) {
13858 return Object::null();
13859 }
13860
13861 if (trail == nullptr) {
13862 trail = new ZoneGrowableArray<intptr_t>();
13863 }
13864 Object& obj = Object::Handle();
13865
13866 const intptr_t lib_id = this->index();
13867 ASSERT(lib_id >= 0); // We use -1 to indicate that a cycle was found.
13868 trail->Add(lib_id);
13869 const Array& exports = Array::Handle(this->exports());
13871 for (int i = 0; i < exports.Length(); i++) {
13872 ns ^= exports.At(i);
13873 obj = ns.Lookup(name, trail);
13874 if (!obj.IsNull()) {
13875 // The Lookup call above may return a setter x= when we are looking
13876 // for the name x. Make sure we only return when a matching name
13877 // is found.
13878 String& obj_name = String::Handle(obj.DictionaryName());
13879 if (Field::IsSetterName(obj_name) == Field::IsSetterName(name)) {
13880 break;
13881 }
13882 }
13883 }
13884 trail->RemoveLast();
13885 return obj.ptr();
13886}
13887
13888ObjectPtr Library::LookupEntry(const String& name, intptr_t* index) const {
13889 ASSERT(!IsNull());
13890 Thread* thread = Thread::Current();
13894 Array& dict = thread->ArrayHandle();
13895 dict = dictionary();
13896 intptr_t dict_size = dict.Length() - 1;
13897 *index = name.Hash() % dict_size;
13898 Object& entry = thread->ObjectHandle();
13899 String& entry_name = thread->StringHandle();
13900 entry = dict.At(*index);
13901 // Search the entry in the hash set.
13902 while (!entry.IsNull()) {
13903 entry_name = entry.DictionaryName();
13904 ASSERT(!entry_name.IsNull());
13905 if (entry_name.Equals(name)) {
13906 return entry.ptr();
13907 }
13908 *index = (*index + 1) % dict_size;
13909 entry = dict.At(*index);
13910 }
13911 return Object::null();
13912}
13913
13914void Library::AddClass(const Class& cls) const {
13916 const String& class_name = String::Handle(cls.Name());
13917 AddObject(cls, class_name);
13918 // Link class to this library.
13919 cls.set_library(*this);
13920}
13921
13923 const Script& candidate) {
13924 if (candidate.IsNull()) {
13925 return;
13926 }
13927 Script& script_obj = Script::Handle();
13928
13929 for (int i = 0; i < scripts.Length(); i++) {
13930 script_obj ^= scripts.At(i);
13931 if (script_obj.ptr() == candidate.ptr()) {
13932 // We already have a reference to this script.
13933 return;
13934 }
13935 }
13936 // Add script to the list of scripts.
13937 scripts.Add(candidate);
13938}
13939
13940ArrayPtr Library::LoadedScripts() const {
13941 // We compute the list of loaded scripts lazily. The result is
13942 // cached in loaded_scripts_.
13943 if (loaded_scripts() == Array::null()) {
13944 // TODO(jensj): This can be cleaned up.
13945 // It really should just return the content of `used_scripts`, and there
13946 // should be no need to do the O(n) call to `AddScriptIfUnique` per script.
13947
13948 // Iterate over the library dictionary and collect all scripts.
13951 Object& entry = Object::Handle();
13952 Class& cls = Class::Handle();
13953 Script& owner_script = Script::Handle();
13954 DictionaryIterator it(*this);
13955 while (it.HasNext()) {
13956 entry = it.GetNext();
13957 if (entry.IsClass()) {
13958 owner_script = Class::Cast(entry).script();
13959 } else if (entry.IsFunction()) {
13960 owner_script = Function::Cast(entry).script();
13961 } else if (entry.IsField()) {
13962 owner_script = Field::Cast(entry).Script();
13963 } else {
13964 continue;
13965 }
13966 AddScriptIfUnique(scripts, owner_script);
13967 }
13968
13969 // Add all scripts from patch classes.
13971 for (intptr_t i = 0; i < patches.Length(); i++) {
13972 entry = patches.At(i);
13973 if (entry.IsClass()) {
13974 owner_script = Class::Cast(entry).script();
13975 } else {
13976 ASSERT(entry.IsScript());
13977 owner_script = Script::Cast(entry).ptr();
13978 }
13979 AddScriptIfUnique(scripts, owner_script);
13980 }
13981
13982 cls = toplevel_class();
13983 if (!cls.IsNull()) {
13984 owner_script = cls.script();
13985 AddScriptIfUnique(scripts, owner_script);
13986 // Special case: Scripts that only contain external top-level functions
13987 // are not included above, but can be referenced through a library's
13988 // anonymous classes. Example: dart-core:identical.dart.
13989 Function& func = Function::Handle();
13990 Array& functions = Array::Handle(cls.current_functions());
13991 for (intptr_t j = 0; j < functions.Length(); j++) {
13992 func ^= functions.At(j);
13993 if (func.is_external()) {
13994 owner_script = func.script();
13995 AddScriptIfUnique(scripts, owner_script);
13996 }
13997 }
13998 }
13999
14000 // Create the array of scripts and cache it in loaded_scripts_.
14001 const Array& scripts_array = Array::Handle(Array::MakeFixedLength(scripts));
14002 untag()->set_loaded_scripts(scripts_array.ptr());
14003 }
14004 return loaded_scripts();
14005}
14006
14007// TODO(hausner): we might want to add a script dictionary to the
14008// library class to make this lookup faster.
14009ScriptPtr Library::LookupScript(const String& url,
14010 bool useResolvedUri /* = false */) const {
14011 const intptr_t url_length = url.Length();
14012 if (url_length == 0) {
14013 return Script::null();
14014 }
14017 String& script_url = String::Handle();
14018 const intptr_t num_scripts = scripts.Length();
14019 for (int i = 0; i < num_scripts; i++) {
14020 script ^= scripts.At(i);
14021 if (useResolvedUri) {
14022 // Use for urls with 'org-dartlang-sdk:' or 'file:' schemes
14023 script_url = script.resolved_url();
14024 } else {
14025 // Use for urls with 'dart:', 'package:', or 'file:' schemes
14026 script_url = script.url();
14027 }
14028 const intptr_t start_idx = script_url.Length() - url_length;
14029 if ((start_idx == 0) && url.Equals(script_url)) {
14030 return script.ptr();
14031 } else if (start_idx > 0) {
14032 // If we do a suffix match, only match if the partial path
14033 // starts at or immediately after the path separator.
14034 if (((url.CharAt(0) == '/') ||
14035 (script_url.CharAt(start_idx - 1) == '/')) &&
14036 url.Equals(script_url, start_idx, url_length)) {
14037 return script.ptr();
14038 }
14039 }
14040 }
14041 return Script::null();
14042}
14043
14045 if (toplevel_class() == Object::null()) {
14046 return;
14047 }
14048 Thread* thread = Thread::Current();
14049 const Class& cls = Class::Handle(thread->zone(), toplevel_class());
14050 if (cls.is_finalized()) {
14051 return;
14052 }
14053 const Error& error =
14054 Error::Handle(thread->zone(), cls.EnsureIsFinalized(thread));
14055 if (!error.IsNull()) {
14057 }
14058}
14059
14060ObjectPtr Library::LookupLocalObject(const String& name) const {
14061 intptr_t index;
14062 return LookupEntry(name, &index);
14063}
14064
14066 intptr_t index;
14068 const Object& result = Object::Handle(LookupEntry(name, &index));
14069 if (!result.IsNull() && !result.IsLibraryPrefix()) {
14070 return result.ptr();
14071 }
14072 return LookupReExport(name);
14073}
14074
14077 Object& obj = Object::Handle(LookupLocalObjectAllowPrivate(name));
14078 if (obj.IsField()) {
14079 return Field::Cast(obj).ptr();
14080 }
14081 return Field::null();
14082}
14083
14086 Object& obj = Object::Handle(LookupLocalObjectAllowPrivate(name));
14087 if (obj.IsFunction()) {
14088 return Function::Cast(obj).ptr();
14089 }
14090 return Function::null();
14091}
14092
14093ObjectPtr Library::LookupLocalObjectAllowPrivate(const String& name) const {
14094 Thread* thread = Thread::Current();
14095 Zone* zone = thread->zone();
14096 Object& obj = Object::Handle(zone, Object::null());
14097 obj = LookupLocalObject(name);
14098 if (obj.IsNull() && ShouldBePrivate(name)) {
14099 String& private_name = String::Handle(zone, PrivateName(name));
14100 obj = LookupLocalObject(private_name);
14101 }
14102 return obj.ptr();
14103}
14104
14105ClassPtr Library::LookupClass(const String& name) const {
14106 Object& obj = Object::Handle(LookupLocalObject(name));
14107 if (obj.IsClass()) {
14108 return Class::Cast(obj).ptr();
14109 }
14110 return Class::null();
14111}
14112
14114 Object& obj = Object::Handle(LookupLocalObjectAllowPrivate(name));
14115 if (obj.IsClass()) {
14116 return Class::Cast(obj).ptr();
14117 }
14118 return Class::null();
14119}
14120
14121LibraryPrefixPtr Library::LookupLocalLibraryPrefix(const String& name) const {
14122 const Object& obj = Object::Handle(LookupLocalObject(name));
14123 if (obj.IsLibraryPrefix()) {
14124 return LibraryPrefix::Cast(obj).ptr();
14125 }
14126 return LibraryPrefix::null();
14127}
14128
14131 untag()->set_toplevel_class(value.ptr());
14132}
14133
14134void Library::set_dependencies(const Array& deps) const {
14135 untag()->set_dependencies(deps.ptr());
14136}
14137
14138void Library::set_metadata(const Array& value) const {
14139 if (untag()->metadata() != value.ptr()) {
14141 IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
14142 untag()->set_metadata(value.ptr());
14143 }
14144}
14145
14146LibraryPtr Library::ImportLibraryAt(intptr_t index) const {
14148 if (import.IsNull()) {
14149 return Library::null();
14150 }
14151 return import.target();
14152}
14153
14154NamespacePtr Library::ImportAt(intptr_t index) const {
14155 if ((index < 0) || index >= num_imports()) {
14156 return Namespace::null();
14157 }
14158 const Array& import_list = Array::Handle(imports());
14159 return Namespace::RawCast(import_list.At(index));
14160}
14161
14163 untag()->set_imports(Object::empty_array().ptr());
14164 untag()->set_exports(Object::empty_array().ptr());
14165 StoreNonPointer(&untag()->num_imports_, 0);
14166 untag()->set_loaded_scripts(Array::null());
14167 untag()->set_dependencies(Array::null());
14168#if defined(PRODUCT)
14169 // used_scripts is only used by vm-service.
14170 untag()->set_used_scripts(GrowableObjectArray::null());
14171#endif
14172}
14173
14174void Library::AddImport(const Namespace& ns) const {
14175 Array& imports = Array::Handle(this->imports());
14176 intptr_t capacity = imports.Length();
14177 if (num_imports() == capacity) {
14178 capacity = capacity + kImportsCapacityIncrement + (capacity >> 2);
14179 imports = Array::Grow(imports, capacity);
14180 untag()->set_imports(imports.ptr());
14181 }
14182 intptr_t index = num_imports();
14183 imports.SetAt(index, ns);
14184 set_num_imports(index + 1);
14185}
14186
14187// Convenience function to determine whether the export list is
14188// non-empty.
14189bool Library::HasExports() const {
14190 return exports() != Object::empty_array().ptr();
14191}
14192
14193// We add one namespace at a time to the exports array and don't
14194// pre-allocate any unused capacity. The assumption is that
14195// re-exports are quite rare.
14196void Library::AddExport(const Namespace& ns) const {
14197 Array& exports = Array::Handle(this->exports());
14198 intptr_t num_exports = exports.Length();
14199 exports = Array::Grow(exports, num_exports + 1);
14200 untag()->set_exports(exports.ptr());
14201 exports.SetAt(num_exports, ns);
14202}
14203
14204static ArrayPtr NewDictionary(intptr_t initial_size) {
14205 const Array& dict = Array::Handle(Array::New(initial_size + 1, Heap::kOld));
14206 // The last element of the dictionary specifies the number of in use slots.
14207 dict.SetAt(initial_size, Object::smi_zero());
14208 return dict.ptr();
14209}
14210
14211void Library::InitClassDictionary() const {
14212 Thread* thread = Thread::Current();
14213 ASSERT(thread->IsDartMutatorThread());
14215 Array& dictionary = thread->ArrayHandle();
14216 // TODO(iposva): Find reasonable initial size.
14217 const int kInitialElementCount = 16;
14218 dictionary = NewDictionary(kInitialElementCount);
14219 untag()->set_dictionary(dictionary.ptr());
14220}
14221
14222void Library::InitImportList() const {
14223 const Array& imports =
14224 Array::Handle(Array::New(kInitialImportsCapacity, Heap::kOld));
14225 untag()->set_imports(imports.ptr());
14226 StoreNonPointer(&untag()->num_imports_, 0);
14227}
14228
14229LibraryPtr Library::New() {
14231 return Object::Allocate<Library>(Heap::kOld);
14232}
14233
14234LibraryPtr Library::NewLibraryHelper(const String& url, bool import_core_lib) {
14235 Thread* thread = Thread::Current();
14236 Zone* zone = thread->zone();
14237 ASSERT(thread->IsDartMutatorThread());
14238 // Force the url to have a hash code.
14239 url.Hash();
14240 const bool dart_scheme = url.StartsWith(Symbols::DartScheme());
14241 const Library& result = Library::Handle(zone, Library::New());
14242 result.untag()->set_name(Symbols::Empty().ptr());
14243 result.untag()->set_url(url.ptr());
14244 result.untag()->set_dictionary(Object::empty_array().ptr());
14245 Array& array = Array::Handle(zone);
14246 array = HashTables::New<MetadataMap>(4, Heap::kOld);
14247 result.untag()->set_metadata(array.ptr());
14248 result.untag()->set_toplevel_class(Class::null());
14249 GrowableObjectArray& list = GrowableObjectArray::Handle(zone);
14250 list = GrowableObjectArray::New(Object::empty_array(), Heap::kOld);
14251 result.untag()->set_used_scripts(list.ptr());
14252 result.untag()->set_imports(Object::empty_array().ptr());
14253 result.untag()->set_exports(Object::empty_array().ptr());
14255 result.untag()->set_kernel_program_info(KernelProgramInfo::null()));
14256 result.untag()->set_loaded_scripts(Array::null());
14257 result.set_native_entry_resolver(nullptr);
14258 result.set_native_entry_symbol_resolver(nullptr);
14259 result.set_ffi_native_resolver(nullptr);
14260 result.set_flags(0);
14261 result.set_is_in_fullsnapshot(false);
14262 // This logic is also in the DAP debug adapter in DDS to avoid needing
14263 // to call setLibraryDebuggable for every library for every isolate.
14264 // If these defaults change, the same should be done there in
14265 // dap/IsolateManager._getIsLibraryDebuggableByDefault.
14266 if (dart_scheme) {
14267 // Only debug dart: libraries if we have been requested to show invisible
14268 // frames.
14269 result.set_debuggable(FLAG_show_invisible_frames);
14270 } else {
14271 // Default to debuggable for all other libraries.
14272 result.set_debuggable(true);
14273 }
14274 result.set_is_dart_scheme(dart_scheme);
14276 result.StoreNonPointer(&result.untag()->kernel_library_index_, -1));
14277 result.StoreNonPointer(&result.untag()->load_state_,
14278 UntaggedLibrary::kAllocated);
14279 result.StoreNonPointer(&result.untag()->index_, -1);
14280 result.InitClassDictionary();
14281 result.InitImportList();
14282 result.AllocatePrivateKey();
14283 if (import_core_lib) {
14284 const Library& core_lib = Library::Handle(zone, Library::CoreLibrary());
14285 ASSERT(!core_lib.IsNull());
14286 const Namespace& ns =
14287 Namespace::Handle(zone, Namespace::New(core_lib, Object::null_array(),
14288 Object::null_array(), result));
14289 result.AddImport(ns);
14290 }
14291 return result.ptr();
14292}
14293
14294LibraryPtr Library::New(const String& url) {
14295 return NewLibraryHelper(url, false);
14296}
14297
14298void Library::set_flags(uint8_t flags) const {
14299 StoreNonPointer(&untag()->flags_, flags);
14300}
14301
14303 Thread* thread = Thread::Current();
14304 Zone* zone = thread->zone();
14305 const String& core_lib_url = Symbols::DartCore();
14306 const Library& core_lib =
14307 Library::Handle(zone, Library::NewLibraryHelper(core_lib_url, false));
14308 core_lib.SetLoadRequested();
14309 core_lib.Register(thread);
14311 core_lib);
14312 isolate_group->object_store()->set_root_library(Library::Handle());
14313}
14314
14315// Invoke the function, or noSuchMethod if it is null.
14317 Thread* thread,
14318 const Instance& receiver,
14319 const Function& function,
14320 const String& target_name,
14321 const Array& args,
14322 const Array& args_descriptor_array,
14323 bool respect_reflectable,
14324 const TypeArguments& instantiator_type_args) {
14325 // Note "args" is already the internal arguments with the receiver as the
14326 // first element.
14327 ArgumentsDescriptor args_descriptor(args_descriptor_array);
14328 if (function.IsNull() ||
14329 !function.AreValidArguments(args_descriptor, nullptr) ||
14330 (respect_reflectable && !function.is_reflectable())) {
14331 return DartEntry::InvokeNoSuchMethod(thread, receiver, target_name, args,
14332 args_descriptor_array);
14333 }
14334 ObjectPtr type_error = function.DoArgumentTypesMatch(args, args_descriptor,
14335 instantiator_type_args);
14336 if (type_error != Error::null()) {
14337 return type_error;
14338 }
14339 return DartEntry::InvokeFunction(function, args, args_descriptor_array);
14340}
14341
14343 bool throw_nsm_if_absent,
14344 bool respect_reflectable,
14345 bool check_is_entrypoint) const {
14347 Function& getter = Function::Handle();
14348 if (obj.IsField()) {
14349 const Field& field = Field::Cast(obj);
14350 if (check_is_entrypoint) {
14352 }
14353 if (!field.IsUninitialized()) {
14354 return field.StaticValue();
14355 }
14356 // An uninitialized field was found. Check for a getter in the field's
14357 // owner class.
14358 const Class& klass = Class::Handle(field.Owner());
14359 const String& internal_getter_name =
14360 String::Handle(Field::GetterName(getter_name));
14361 getter = klass.LookupStaticFunction(internal_getter_name);
14362 } else {
14363 // No field found. Check for a getter in the lib.
14364 const String& internal_getter_name =
14365 String::Handle(Field::GetterName(getter_name));
14366 obj = LookupLocalOrReExportObject(internal_getter_name);
14367 if (obj.IsFunction()) {
14368 getter = Function::Cast(obj).ptr();
14369 if (check_is_entrypoint) {
14371 }
14372 } else {
14373 obj = LookupLocalOrReExportObject(getter_name);
14374 // Normally static top-level methods cannot be closurized through the
14375 // native API even if they are marked as entry-points, with the one
14376 // exception of "main".
14377 if (obj.IsFunction() && check_is_entrypoint) {
14378 if (!getter_name.Equals(String::Handle(String::New("main"))) ||
14379 ptr() != IsolateGroup::Current()->object_store()->root_library()) {
14380 CHECK_ERROR(Function::Cast(obj).VerifyClosurizedEntryPoint());
14381 }
14382 }
14383 if (obj.IsFunction() && Function::Cast(obj).SafeToClosurize()) {
14384 // Looking for a getter but found a regular method: closurize it.
14385 const Function& closure_function =
14386 Function::Handle(Function::Cast(obj).ImplicitClosureFunction());
14387 return closure_function.ImplicitStaticClosure();
14388 }
14389 }
14390 }
14391
14392 if (getter.IsNull() || (respect_reflectable && !getter.is_reflectable())) {
14393 if (throw_nsm_if_absent) {
14394 return ThrowNoSuchMethod(Object::null_string(), getter_name,
14395 Object::null_array(), Object::null_array(),
14398 }
14399
14400 // Fall through case: Indicate that we didn't find any function or field
14401 // using a special null instance. This is different from a field being null.
14402 // Callers make sure that this null does not leak into Dartland.
14403 return Object::sentinel().ptr();
14404 }
14405
14406 // Invoke the getter and return the result.
14407 return DartEntry::InvokeFunction(getter, Object::empty_array());
14408}
14409
14411 const Instance& value,
14412 bool respect_reflectable,
14413 bool check_is_entrypoint) const {
14415 const String& internal_setter_name =
14416 String::Handle(Field::SetterName(setter_name));
14417 AbstractType& setter_type = AbstractType::Handle();
14418 AbstractType& argument_type = AbstractType::Handle(value.GetType(Heap::kOld));
14419 if (obj.IsField()) {
14420 const Field& field = Field::Cast(obj);
14421 if (check_is_entrypoint) {
14423 }
14424 setter_type = field.type();
14425 if (!argument_type.IsNullType() && !setter_type.IsDynamicType() &&
14426 !value.IsInstanceOf(setter_type, Object::null_type_arguments(),
14427 Object::null_type_arguments())) {
14428 return ThrowTypeError(field.token_pos(), value, setter_type, setter_name);
14429 }
14430 if (field.is_final() || (respect_reflectable && !field.is_reflectable())) {
14431 const int kNumArgs = 1;
14432 const Array& args = Array::Handle(Array::New(kNumArgs));
14433 args.SetAt(0, value);
14434
14435 return ThrowNoSuchMethod(Object::null_string(), internal_setter_name,
14436 args, Object::null_array(),
14439 }
14440 field.SetStaticValue(value);
14441 return value.ptr();
14442 }
14443
14444 Function& setter = Function::Handle();
14445 obj = LookupLocalOrReExportObject(internal_setter_name);
14446 if (obj.IsFunction()) {
14447 setter ^= obj.ptr();
14448 }
14449
14450 if (!setter.IsNull() && check_is_entrypoint) {
14452 }
14453
14454 const int kNumArgs = 1;
14455 const Array& args = Array::Handle(Array::New(kNumArgs));
14456 args.SetAt(0, value);
14457 if (setter.IsNull() || (respect_reflectable && !setter.is_reflectable())) {
14458 return ThrowNoSuchMethod(Object::null_string(), internal_setter_name, args,
14459 Object::null_array(), InvocationMirror::kTopLevel,
14461 }
14462
14463 setter_type = setter.ParameterTypeAt(0);
14464 if (!argument_type.IsNullType() && !setter_type.IsDynamicType() &&
14465 !value.IsInstanceOf(setter_type, Object::null_type_arguments(),
14466 Object::null_type_arguments())) {
14467 return ThrowTypeError(setter.token_pos(), value, setter_type, setter_name);
14468 }
14469
14470 return DartEntry::InvokeFunction(setter, args);
14471}
14472
14474 const Array& args,
14475 const Array& arg_names,
14476 bool respect_reflectable,
14477 bool check_is_entrypoint) const {
14478 Thread* thread = Thread::Current();
14479 Zone* zone = thread->zone();
14480
14481 // We don't pass any explicit type arguments, which will be understood as
14482 // using dynamic for any function type arguments by lower layers.
14483 const int kTypeArgsLen = 0;
14484 const Array& args_descriptor_array = Array::Handle(
14485 zone, ArgumentsDescriptor::NewBoxed(kTypeArgsLen, args.Length(),
14486 arg_names, Heap::kNew));
14487 ArgumentsDescriptor args_descriptor(args_descriptor_array);
14488
14489 auto& function = Function::Handle(zone);
14490 auto& result =
14492 if (result.IsFunction()) {
14493 function ^= result.ptr();
14494 }
14495
14496 if (!function.IsNull() && check_is_entrypoint) {
14497 CHECK_ERROR(function.VerifyCallEntryPoint());
14498 }
14499
14500 if (function.IsNull()) {
14501 // Didn't find a method: try to find a getter and invoke call on its result.
14502 const Object& getter_result = Object::Handle(
14503 zone, InvokeGetter(function_name, false, respect_reflectable,
14504 check_is_entrypoint));
14505 if (getter_result.ptr() != Object::sentinel().ptr()) {
14506 if (check_is_entrypoint) {
14508 }
14509 const auto& call_args_descriptor_array = Array::Handle(
14510 zone, ArgumentsDescriptor::NewBoxed(args_descriptor.TypeArgsLen(),
14511 args_descriptor.Count() + 1,
14512 arg_names, Heap::kNew));
14513 const auto& call_args = Array::Handle(
14514 zone,
14515 CreateCallableArgumentsFromStatic(zone, Instance::Cast(getter_result),
14516 args, arg_names, args_descriptor));
14517 return DartEntry::InvokeClosure(thread, call_args,
14518 call_args_descriptor_array);
14519 }
14520 }
14521
14522 if (function.IsNull() ||
14523 (respect_reflectable && !function.is_reflectable())) {
14524 return ThrowNoSuchMethod(Object::null_string(), function_name, args,
14525 arg_names, InvocationMirror::kTopLevel,
14527 }
14528 if (!function.AreValidArguments(args_descriptor, nullptr)) {
14529 return ThrowNoSuchMethod(
14530 String::Handle(function.UserVisibleSignature()), function_name, args,
14532 }
14533 // This is a static function, so we pass an empty instantiator tav.
14534 ASSERT(function.is_static());
14535 ObjectPtr type_error = function.DoArgumentTypesMatch(
14536 args, args_descriptor, Object::empty_type_arguments());
14537 if (type_error != Error::null()) {
14538 return type_error;
14539 }
14540 return DartEntry::InvokeFunction(function, args, args_descriptor_array);
14541}
14542
14544 bool is_kernel) {
14545 const int kNumNativeWrappersClasses = 4;
14546 COMPILE_ASSERT((kNumNativeWrappersClasses > 0) &&
14547 (kNumNativeWrappersClasses < 10));
14548 Thread* thread = Thread::Current();
14549 Zone* zone = thread->zone();
14550 const String& native_flds_lib_url = Symbols::DartNativeWrappers();
14551 const Library& native_flds_lib = Library::Handle(
14552 zone, Library::NewLibraryHelper(native_flds_lib_url, false));
14553 const String& native_flds_lib_name = Symbols::DartNativeWrappersLibName();
14554 native_flds_lib.SetName(native_flds_lib_name);
14555 native_flds_lib.SetLoadRequested();
14556 native_flds_lib.Register(thread);
14557 native_flds_lib.SetLoadInProgress();
14558 isolate_group->object_store()->set_native_wrappers_library(native_flds_lib);
14559 const char* const kNativeWrappersClass = "NativeFieldWrapperClass";
14560 const int kNameLength = 25;
14561 ASSERT(kNameLength == (strlen(kNativeWrappersClass) + 1 + 1));
14562 char name_buffer[kNameLength];
14563 String& cls_name = String::Handle(zone);
14564 for (int fld_cnt = 1; fld_cnt <= kNumNativeWrappersClasses; fld_cnt++) {
14565 Utils::SNPrint(name_buffer, kNameLength, "%s%d", kNativeWrappersClass,
14566 fld_cnt);
14567 cls_name = Symbols::New(thread, name_buffer);
14568 Class::NewNativeWrapper(native_flds_lib, cls_name, fld_cnt);
14569 }
14570 // NOTE: If we bootstrap from a Kernel IR file we want to generate the
14571 // synthetic constructors for the native wrapper classes. We leave this up to
14572 // the [KernelLoader] who will take care of it later.
14573 if (!is_kernel) {
14574 native_flds_lib.SetLoaded();
14575 }
14576}
14577
14578// LibraryLookupSet maps URIs to libraries.
14580 public:
14581 static const char* Name() { return "LibraryLookupTraits"; }
14582 static bool ReportStats() { return false; }
14583
14584 static bool IsMatch(const Object& a, const Object& b) {
14585 const String& a_str = String::Cast(a);
14586 const String& b_str = String::Cast(b);
14587
14588 ASSERT(a_str.HasHash() && b_str.HasHash());
14589 return a_str.Equals(b_str);
14590 }
14591
14592 static uword Hash(const Object& key) { return String::Cast(key).Hash(); }
14593
14594 static ObjectPtr NewKey(const String& str) { return str.ptr(); }
14595};
14597
14598// Returns library with given url in current isolate, or nullptr.
14599LibraryPtr Library::LookupLibrary(Thread* thread, const String& url) {
14600 Zone* zone = thread->zone();
14601 ObjectStore* object_store = thread->isolate_group()->object_store();
14602
14603 // Make sure the URL string has an associated hash code
14604 // to speed up the repeated equality checks.
14605 url.Hash();
14606
14607 // Use the libraries map to lookup the library by URL.
14608 Library& lib = Library::Handle(zone);
14609 if (object_store->libraries_map() == Array::null()) {
14610 return Library::null();
14611 } else {
14612 LibraryLookupMap map(object_store->libraries_map());
14613 lib ^= map.GetOrNull(url);
14614 ASSERT(map.Release().ptr() == object_store->libraries_map());
14615 }
14616 return lib.ptr();
14617}
14618
14620 if (ShouldBePrivate(name)) return true;
14621 // Factory names: List._fromLiteral.
14622 for (intptr_t i = 1; i < name.Length() - 1; i++) {
14623 if (name.CharAt(i) == '.') {
14624 if (name.CharAt(i + 1) == '_') {
14625 return true;
14626 }
14627 }
14628 }
14629 return false;
14630}
14631
14632// Create a private key for this library. It is based on the hash of the
14633// library URI and the sequence number of the library to guarantee unique
14634// private keys without having to verify.
14635void Library::AllocatePrivateKey() const {
14636 Thread* thread = Thread::Current();
14637 Zone* zone = thread->zone();
14638 auto isolate_group = thread->isolate_group();
14639
14640#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
14641 if (isolate_group->IsReloading()) {
14642 // When reloading, we need to make sure we use the original private key
14643 // if this library previously existed.
14644 ProgramReloadContext* program_reload_context =
14645 isolate_group->program_reload_context();
14646 const String& original_key =
14647 String::Handle(program_reload_context->FindLibraryPrivateKey(*this));
14648 if (!original_key.IsNull()) {
14649 untag()->set_private_key(original_key.ptr());
14650 return;
14651 }
14652 }
14653#endif // !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
14654
14655 // Format of the private key is: "@<sequence number><6 digits of hash>
14656 const intptr_t hash_mask = 0x7FFFF;
14657
14658 const String& url = String::Handle(zone, this->url());
14659 intptr_t hash_value = url.Hash() & hash_mask;
14660
14661 const GrowableObjectArray& libs = GrowableObjectArray::Handle(
14662 zone, isolate_group->object_store()->libraries());
14663 intptr_t sequence_value = libs.Length();
14664
14665 char private_key[32];
14666 Utils::SNPrint(private_key, sizeof(private_key), "%c%" Pd "%06" Pd "",
14667 kPrivateKeySeparator, sequence_value, hash_value);
14668 const String& key =
14670 key.Hash(); // This string may end up in the VM isolate.
14671 untag()->set_private_key(key.ptr());
14672}
14673
14675 const Library& core_lib = Library::Handle(Library::CoreLibrary());
14676 const String& private_name = String::ZoneHandle(core_lib.PrivateName(member));
14677 return private_name;
14678}
14679
14681 Zone* zone = Thread::Current()->zone();
14682 const auto& core_lib = Library::Handle(zone, Library::CoreLibrary());
14683 const auto& private_key = String::Handle(zone, core_lib.private_key());
14684
14685 ASSERT(core_lib.IsPrivate(member));
14686 return name.EqualsConcat(member, private_key);
14687}
14688
14690 Thread* thread = Thread::Current();
14691 Zone* zone = thread->zone();
14692 const Library& core_lib = Library::Handle(zone, Library::CoreLibrary());
14693 String& name = String::Handle(zone, class_name.ptr());
14694 if (class_name.CharAt(0) == kPrivateIdentifierStart) {
14695 // Private identifiers are mangled on a per library basis.
14696 name = Symbols::FromConcat(thread, name,
14697 String::Handle(zone, core_lib.private_key()));
14698 }
14699 return core_lib.LookupClass(name);
14700}
14701
14702// Cannot handle qualified names properly as it only appends private key to
14703// the end (e.g. _Alfa.foo -> _Alfa.foo@...).
14704StringPtr Library::PrivateName(const String& name) const {
14705 Thread* thread = Thread::Current();
14706 Zone* zone = thread->zone();
14708 // ASSERT(strchr(name, '@') == nullptr);
14709 String& str = String::Handle(zone);
14710 str = name.ptr();
14711 str = Symbols::FromConcat(thread, str,
14712 String::Handle(zone, this->private_key()));
14713 return str.ptr();
14714}
14715
14716LibraryPtr Library::GetLibrary(intptr_t index) {
14717 Thread* thread = Thread::Current();
14718 Zone* zone = thread->zone();
14719 auto isolate_group = thread->isolate_group();
14721 zone, isolate_group->object_store()->libraries());
14722 ASSERT(!libs.IsNull());
14723 if ((0 <= index) && (index < libs.Length())) {
14724 Library& lib = Library::Handle(zone);
14725 lib ^= libs.At(index);
14726 return lib.ptr();
14727 }
14728 return Library::null();
14729}
14730
14731void Library::Register(Thread* thread) const {
14732 Zone* zone = thread->zone();
14733 auto isolate_group = thread->isolate_group();
14734 ObjectStore* object_store = isolate_group->object_store();
14735
14736 // A library is "registered" in two places:
14737 // - A growable array mapping from index to library.
14738 const String& lib_url = String::Handle(zone, url());
14739 ASSERT(Library::LookupLibrary(thread, lib_url) == Library::null());
14740 ASSERT(lib_url.HasHash());
14742 GrowableObjectArray::Handle(zone, object_store->libraries());
14743 ASSERT(!libs.IsNull());
14744 set_index(libs.Length());
14745 libs.Add(*this);
14746
14747 // - A map from URL string to library.
14748 if (object_store->libraries_map() == Array::null()) {
14749 LibraryLookupMap map(HashTables::New<LibraryLookupMap>(16, Heap::kOld));
14750 object_store->set_libraries_map(map.Release());
14751 }
14752
14753 LibraryLookupMap map(object_store->libraries_map());
14754 bool present = map.UpdateOrInsert(lib_url, *this);
14755 ASSERT(!present);
14756 object_store->set_libraries_map(map.Release());
14757}
14758
14760 const GrowableObjectArray& libs) {
14761 Zone* zone = thread->zone();
14762 auto isolate_group = thread->isolate_group();
14763 Library& lib = Library::Handle(zone);
14764 String& lib_url = String::Handle(zone);
14765
14766 LibraryLookupMap map(HashTables::New<LibraryLookupMap>(16, Heap::kOld));
14767
14768 intptr_t len = libs.Length();
14769 for (intptr_t i = 0; i < len; i++) {
14770 lib ^= libs.At(i);
14771 lib_url = lib.url();
14772 map.InsertNewOrGetValue(lib_url, lib);
14773 }
14774 // Now remember these in the isolate's object store.
14775 isolate_group->object_store()->set_libraries(libs);
14776 isolate_group->object_store()->set_libraries_map(map.Release());
14777}
14778
14780 return IsolateGroup::Current()->object_store()->async_library();
14781}
14782
14784 return IsolateGroup::Current()->object_store()->convert_library();
14785}
14786
14788 return IsolateGroup::Current()->object_store()->core_library();
14789}
14790
14792 return IsolateGroup::Current()->object_store()->collection_library();
14793}
14794
14796 return IsolateGroup::Current()->object_store()->developer_library();
14797}
14798
14800 return IsolateGroup::Current()->object_store()->ffi_library();
14801}
14802
14804 return IsolateGroup::Current()->object_store()->_internal_library();
14805}
14806
14808 return IsolateGroup::Current()->object_store()->isolate_library();
14809}
14810
14812 return IsolateGroup::Current()->object_store()->math_library();
14813}
14814
14815#if !defined(DART_PRECOMPILED_RUNTIME)
14817 return IsolateGroup::Current()->object_store()->mirrors_library();
14818}
14819#endif
14820
14822 return IsolateGroup::Current()->object_store()->native_wrappers_library();
14823}
14824
14826 return IsolateGroup::Current()->object_store()->typed_data_library();
14827}
14828
14830 return IsolateGroup::Current()->object_store()->_vmservice_library();
14831}
14832
14833const char* Library::ToCString() const {
14834 NoSafepointScope no_safepoint;
14835 const String& name = String::Handle(url());
14836 return OS::SCreate(Thread::Current()->zone(), "Library:'%s'",
14837 name.ToCString());
14838}
14839
14840LibraryPtr LibraryPrefix::GetLibrary(int index) const {
14841 if ((index >= 0) || (index < num_imports())) {
14842 const Array& imports = Array::Handle(this->imports());
14843 Namespace& import = Namespace::Handle();
14844 import ^= imports.At(index);
14845 return import.target();
14846 }
14847 return Library::null();
14848}
14849
14850void LibraryPrefix::AddImport(const Namespace& import) const {
14851 intptr_t num_current_imports = num_imports();
14852
14853 // Prefixes with deferred libraries can only contain one library.
14854 ASSERT((num_current_imports == 0) || !is_deferred_load());
14855
14856 // The library needs to be added to the list.
14857 Array& imports = Array::Handle(this->imports());
14858 const intptr_t length = (imports.IsNull()) ? 0 : imports.Length();
14859 // Grow the list if it is full.
14860 if (num_current_imports >= length) {
14861 const intptr_t new_length = length + kIncrementSize + (length >> 2);
14862 imports = Array::Grow(imports, new_length, Heap::kOld);
14863 set_imports(imports);
14864 }
14865 imports.SetAt(num_current_imports, import);
14866 set_num_imports(num_current_imports + 1);
14867}
14868
14869LibraryPrefixPtr LibraryPrefix::New() {
14870 return Object::Allocate<LibraryPrefix>(Heap::kOld);
14871}
14872
14873LibraryPrefixPtr LibraryPrefix::New(const String& name,
14874 const Namespace& import,
14875 bool deferred_load,
14876 const Library& importer) {
14877 const LibraryPrefix& result = LibraryPrefix::Handle(LibraryPrefix::New());
14878 result.set_name(name);
14879 result.set_num_imports(0);
14880 result.set_importer(importer);
14881 result.StoreNonPointer(&result.untag()->is_deferred_load_, deferred_load);
14882 result.set_imports(Array::Handle(Array::New(kInitialSize)));
14883 result.AddImport(import);
14884 return result.ptr();
14885}
14886
14887void LibraryPrefix::set_name(const String& value) const {
14888 ASSERT(value.IsSymbol());
14889 untag()->set_name(value.ptr());
14890}
14891
14892void LibraryPrefix::set_imports(const Array& value) const {
14893 untag()->set_imports(value.ptr());
14894}
14895
14896void LibraryPrefix::set_num_imports(intptr_t value) const {
14897 if (!Utils::IsUint(16, value)) {
14899 }
14900 StoreNonPointer(&untag()->num_imports_, value);
14901}
14902
14903void LibraryPrefix::set_importer(const Library& value) const {
14904 untag()->set_importer(value.ptr());
14905}
14906
14907const char* LibraryPrefix::ToCString() const {
14908 const String& prefix = String::Handle(name());
14909 return prefix.ToCString();
14910}
14911
14912const char* Namespace::ToCString() const {
14913 const Library& lib = Library::Handle(target());
14914 return OS::SCreate(Thread::Current()->zone(), "Namespace for library '%s'",
14915 lib.ToCString());
14916}
14917
14919 // Quick check for common case with no combinators.
14920 if (hide_names() == show_names()) {
14922 return false;
14923 }
14924 const String* plain_name = &name;
14927 } else if (Field::IsSetterName(name)) {
14929 }
14930 // Check whether the name is in the list of explicitly hidden names.
14931 if (hide_names() != Array::null()) {
14932 const Array& names = Array::Handle(hide_names());
14933 String& hidden = String::Handle();
14934 intptr_t num_names = names.Length();
14935 for (intptr_t i = 0; i < num_names; i++) {
14936 hidden ^= names.At(i);
14937 if (plain_name->Equals(hidden)) {
14938 return true;
14939 }
14940 }
14941 }
14942 // The name is not explicitly hidden. Now check whether it is in the
14943 // list of explicitly visible names, if there is one.
14944 if (show_names() != Array::null()) {
14945 const Array& names = Array::Handle(show_names());
14946 String& shown = String::Handle();
14947 intptr_t num_names = names.Length();
14948 for (intptr_t i = 0; i < num_names; i++) {
14949 shown ^= names.At(i);
14950 if (plain_name->Equals(shown)) {
14951 return false;
14952 }
14953 }
14954 // There is a list of visible names. The name we're looking for is not
14955 // contained in the list, so it is hidden.
14956 return true;
14957 }
14958 // The name is not filtered out.
14959 return false;
14960}
14961
14962// Look up object with given name in library and filter out hidden
14963// names. Also look up getters and setters.
14965 ZoneGrowableArray<intptr_t>* trail) const {
14966 Zone* zone = Thread::Current()->zone();
14967 const Library& lib = Library::Handle(zone, target());
14968
14969 if (trail != nullptr) {
14970 // Look for cycle in reexport graph.
14971 for (int i = 0; i < trail->length(); i++) {
14972 if (trail->At(i) == lib.index()) {
14973 for (int j = i + 1; j < trail->length(); j++) {
14974 (*trail)[j] = -1;
14975 }
14976 return Object::null();
14977 }
14978 }
14979 }
14980
14982
14983 intptr_t ignore = 0;
14984 // Lookup the name in the library's symbols.
14985 Object& obj = Object::Handle(zone, lib.LookupEntry(name, &ignore));
14987 (obj.IsNull() || obj.IsLibraryPrefix())) {
14988 String& accessor_name = String::Handle(zone);
14989 accessor_name = Field::LookupGetterSymbol(name);
14990 if (!accessor_name.IsNull()) {
14991 obj = lib.LookupEntry(accessor_name, &ignore);
14992 }
14993 if (obj.IsNull()) {
14994 accessor_name = Field::LookupSetterSymbol(name);
14995 if (!accessor_name.IsNull()) {
14996 obj = lib.LookupEntry(accessor_name, &ignore);
14997 }
14998 }
14999 }
15000
15001 // Library prefixes are not exported.
15002 if (obj.IsNull() || obj.IsLibraryPrefix()) {
15003 // Lookup in the re-exported symbols.
15004 obj = lib.LookupReExport(name, trail);
15005 if (obj.IsNull() && !Field::IsSetterName(name)) {
15006 // LookupReExport() only returns objects that match the given name.
15007 // If there is no field/func/getter, try finding a setter.
15008 const String& setter_name =
15010 if (!setter_name.IsNull()) {
15011 obj = lib.LookupReExport(setter_name, trail);
15012 }
15013 }
15014 }
15015 if (obj.IsNull() || HidesName(name) || obj.IsLibraryPrefix()) {
15016 return Object::null();
15017 }
15018 return obj.ptr();
15019}
15020
15021NamespacePtr Namespace::New() {
15023 return Object::Allocate<Namespace>(Heap::kOld);
15024}
15025
15026NamespacePtr Namespace::New(const Library& target,
15027 const Array& show_names,
15028 const Array& hide_names,
15029 const Library& owner) {
15030 ASSERT(show_names.IsNull() || (show_names.Length() > 0));
15031 ASSERT(hide_names.IsNull() || (hide_names.Length() > 0));
15032 const Namespace& result = Namespace::Handle(Namespace::New());
15033 result.untag()->set_target(target.ptr());
15034 result.untag()->set_show_names(show_names.ptr());
15035 result.untag()->set_hide_names(hide_names.ptr());
15036 result.untag()->set_owner(owner.ptr());
15037 return result.ptr();
15038}
15039
15040KernelProgramInfoPtr KernelProgramInfo::New() {
15041 return Object::Allocate<KernelProgramInfo>(Heap::kOld);
15042}
15043
15044KernelProgramInfoPtr KernelProgramInfo::New(
15045 const TypedDataBase& kernel_component,
15046 const TypedDataView& string_data,
15047 const TypedDataView& metadata_payloads,
15048 const TypedDataView& metadata_mappings,
15049 const TypedDataView& constants_table,
15050 const TypedData& string_offsets,
15051 const TypedData& canonical_names,
15052 const Array& scripts,
15053 const Array& libraries_cache,
15054 const Array& classes_cache) {
15055 ASSERT(kernel_component.IsExternalOrExternalView());
15056 ASSERT(string_data.IsExternalOrExternalView());
15057 ASSERT(metadata_payloads.IsExternalOrExternalView());
15058 ASSERT(metadata_mappings.IsExternalOrExternalView());
15059 ASSERT(constants_table.IsExternalOrExternalView());
15060
15061 const auto& info = KernelProgramInfo::Handle(KernelProgramInfo::New());
15062 info.untag()->set_kernel_component(kernel_component.ptr());
15063 info.untag()->set_string_offsets(string_offsets.ptr());
15064 info.untag()->set_string_data(string_data.ptr());
15065 info.untag()->set_canonical_names(canonical_names.ptr());
15066 info.untag()->set_metadata_payloads(metadata_payloads.ptr());
15067 info.untag()->set_metadata_mappings(metadata_mappings.ptr());
15068 info.untag()->set_scripts(scripts.ptr());
15069 info.untag()->set_constants_table(constants_table.ptr());
15070 info.untag()->set_libraries_cache(libraries_cache.ptr());
15071 info.untag()->set_classes_cache(classes_cache.ptr());
15072 return info.ptr();
15073}
15074
15075const char* KernelProgramInfo::ToCString() const {
15076 return "[KernelProgramInfo]";
15077}
15078
15079ScriptPtr KernelProgramInfo::ScriptAt(intptr_t index) const {
15080 const Array& all_scripts = Array::Handle(scripts());
15081 ObjectPtr script = all_scripts.At(index);
15082 return Script::RawCast(script);
15083}
15084
15086 untag()->set_scripts(scripts.ptr());
15087}
15088
15089void KernelProgramInfo::set_constants(const Array& constants) const {
15090 untag()->set_constants(constants.ptr());
15091}
15092
15094 intptr_t library_index) const {
15095 const auto& blob = TypedDataBase::Handle(kernel_component());
15096 const intptr_t library_count =
15097 Utils::BigEndianToHost32(LoadUnaligned(reinterpret_cast<uint32_t*>(
15098 blob.DataAddr(blob.LengthInBytes() - 2 * 4))));
15099 const intptr_t library_start =
15100 Utils::BigEndianToHost32(LoadUnaligned(reinterpret_cast<uint32_t*>(
15101 blob.DataAddr(blob.LengthInBytes() -
15102 (2 + 1 + (library_count - library_index)) * 4))));
15103 return library_start;
15104}
15105
15107 intptr_t library_index) const {
15108 const intptr_t start_offset = KernelLibraryStartOffset(library_index);
15109 const intptr_t end_offset = KernelLibraryEndOffset(library_index);
15110 const auto& component = TypedDataBase::Handle(kernel_component());
15111 return component.ViewFromTo(start_offset, end_offset);
15112}
15113
15115 intptr_t library_index) const {
15116 const auto& blob = TypedDataBase::Handle(kernel_component());
15117 const intptr_t library_count =
15118 Utils::BigEndianToHost32(LoadUnaligned(reinterpret_cast<uint32_t*>(
15119 blob.DataAddr(blob.LengthInBytes() - 2 * 4))));
15120 const intptr_t library_end = Utils::BigEndianToHost32(
15121 LoadUnaligned(reinterpret_cast<uint32_t*>(blob.DataAddr(
15122 blob.LengthInBytes() - (2 + (library_count - library_index)) * 4))));
15123 return library_end;
15124}
15125
15127 untag()->set_constants_table(value.ptr());
15128}
15129
15131 untag()->set_libraries_cache(cache.ptr());
15132}
15133
15135 const Smi& name_index) const {
15140 Array& data = thread->ArrayHandle();
15141 Library& result = thread->LibraryHandle();
15142 Object& key = thread->ObjectHandle();
15143 Smi& value = thread->SmiHandle();
15144 {
15148 ASSERT(!data.IsNull());
15150 result ^= table.GetOrNull(name_index);
15151 table.Release();
15152 }
15153 return result.ptr();
15154}
15155
15157 const Smi& name_index,
15158 const Library& lib) const {
15163 Array& data = thread->ArrayHandle();
15164 Library& result = thread->LibraryHandle();
15165 Object& key = thread->ObjectHandle();
15166 Smi& value = thread->SmiHandle();
15167 {
15171 ASSERT(!data.IsNull());
15173 result ^= table.InsertOrGetValue(name_index, lib);
15174 set_libraries_cache(table.Release());
15175 }
15176 return result.ptr();
15177}
15178
15180 untag()->set_classes_cache(cache.ptr());
15181}
15182
15184 const Smi& name_index) const {
15189 Array& data = thread->ArrayHandle();
15190 Class& result = thread->ClassHandle();
15191 Object& key = thread->ObjectHandle();
15192 Smi& value = thread->SmiHandle();
15193 {
15196 data = classes_cache();
15197 ASSERT(!data.IsNull());
15199 result ^= table.GetOrNull(name_index);
15200 table.Release();
15201 }
15202 return result.ptr();
15203}
15204
15206 const Smi& name_index,
15207 const Class& klass) const {
15212 Array& data = thread->ArrayHandle();
15213 Class& result = thread->ClassHandle();
15214 Object& key = thread->ObjectHandle();
15215 Smi& value = thread->SmiHandle();
15216 {
15219 data = classes_cache();
15220 ASSERT(!data.IsNull());
15222 result ^= table.InsertOrGetValue(name_index, klass);
15223 set_classes_cache(table.Release());
15224 }
15225 return result.ptr();
15226}
15227
15228ErrorPtr Library::CompileAll(bool ignore_error /* = false */) {
15229 Thread* thread = Thread::Current();
15230 Zone* zone = thread->zone();
15231 Error& error = Error::Handle(zone);
15233 IsolateGroup::Current()->object_store()->libraries());
15234 Library& lib = Library::Handle(zone);
15235 Class& cls = Class::Handle(zone);
15236 for (int i = 0; i < libs.Length(); i++) {
15237 lib ^= libs.At(i);
15239 while (it.HasNext()) {
15240 cls = it.GetNextClass();
15241 error = cls.EnsureIsFinalized(thread);
15242 if (!error.IsNull()) {
15243 if (ignore_error) continue;
15244 return error.ptr();
15245 }
15247 if (!error.IsNull()) {
15248 if (ignore_error) continue;
15249 return error.ptr();
15250 }
15251 }
15252 }
15253
15254 Object& result = Object::Handle(zone);
15256 if (!func.HasCode()) {
15257 result = Compiler::CompileFunction(thread, func);
15258 if (result.IsError()) {
15259 error = Error::Cast(result).ptr();
15260 return false; // Stop iteration.
15261 }
15262 }
15263 return true; // Continue iteration.
15264 });
15265 return error.ptr();
15266}
15267
15268#if !defined(DART_PRECOMPILED_RUNTIME)
15269
15270ErrorPtr Library::FinalizeAllClasses() {
15271 Thread* thread = Thread::Current();
15272 ASSERT(thread->IsDartMutatorThread());
15273 Zone* zone = thread->zone();
15274 Error& error = Error::Handle(zone);
15275 const GrowableObjectArray& libs = GrowableObjectArray::Handle(
15276 IsolateGroup::Current()->object_store()->libraries());
15277 Library& lib = Library::Handle(zone);
15278 Class& cls = Class::Handle(zone);
15279 for (int i = 0; i < libs.Length(); i++) {
15280 lib ^= libs.At(i);
15281 if (!lib.Loaded()) {
15282 String& uri = String::Handle(zone, lib.url());
15283 String& msg = String::Handle(
15284 zone,
15285 String::NewFormatted("Library '%s' is not loaded. "
15286 "Did you forget to call Dart_FinalizeLoading?",
15287 uri.ToCString()));
15288 return ApiError::New(msg);
15289 }
15290 ClassDictionaryIterator it(lib, ClassDictionaryIterator::kIteratePrivate);
15291 while (it.HasNext()) {
15292 cls = it.GetNextClass();
15293 error = cls.EnsureIsFinalized(thread);
15294 if (!error.IsNull()) {
15295 return error.ptr();
15296 }
15297 }
15298 }
15299 return Error::null();
15300}
15301
15302#endif // !defined(DART_PRECOMPILED_RUNTIME)
15303
15304// Return Function::null() if function does not exist in libs.
15306 const char* class_name,
15307 const char* function_name) {
15308 Thread* thread = Thread::Current();
15309 Zone* zone = thread->zone();
15310 Function& func = Function::Handle(zone);
15311 String& class_str = String::Handle(zone);
15312 String& func_str = String::Handle(zone);
15313 Class& cls = Class::Handle(zone);
15314 for (intptr_t l = 0; l < libs.length(); l++) {
15315 const Library& lib = *libs[l];
15316 if (strcmp(class_name, "::") == 0) {
15317 cls = lib.toplevel_class();
15318 } else {
15319 class_str = String::New(class_name);
15320 cls = lib.LookupClassAllowPrivate(class_str);
15321 }
15322 if (!cls.IsNull()) {
15323 if (cls.EnsureIsFinalized(thread) == Error::null()) {
15324 func_str = String::New(function_name);
15325 if (function_name[0] == '.') {
15326 func_str = String::Concat(class_str, func_str);
15327 }
15328 func = cls.LookupFunctionAllowPrivate(func_str);
15329 }
15330 }
15331 if (!func.IsNull()) {
15332 return func.ptr();
15333 }
15334 }
15335 return Function::null();
15336}
15337
15338ObjectPtr Library::GetFunctionClosure(const String& name) const {
15339 Thread* thread = Thread::Current();
15340 Zone* zone = thread->zone();
15341 Function& func = Function::Handle(zone, LookupFunctionAllowPrivate(name));
15342 if (func.IsNull()) {
15343 // Check whether the function is reexported into the library.
15344 const Object& obj = Object::Handle(zone, LookupReExport(name));
15345 if (obj.IsFunction()) {
15346 func ^= obj.ptr();
15347 } else {
15348 // Check if there is a getter of 'name', in which case invoke it
15349 // and return the result.
15350 const String& getter_name = String::Handle(zone, Field::GetterName(name));
15351 func = LookupFunctionAllowPrivate(getter_name);
15352 if (func.IsNull()) {
15353 return Closure::null();
15354 }
15355 // Invoke the getter and return the result.
15356 return DartEntry::InvokeFunction(func, Object::empty_array());
15357 }
15358 }
15359 func = func.ImplicitClosureFunction();
15360 return func.ImplicitStaticClosure();
15361}
15362
15363#if defined(DEBUG) && !defined(DART_PRECOMPILED_RUNTIME)
15364void Library::CheckFunctionFingerprints() {
15365 GrowableArray<Library*> all_libs;
15366 Function& func = Function::Handle();
15367 bool fingerprints_match = true;
15368
15369#define CHECK_FINGERPRINTS_INNER(class_name, function_name, dest, fp, kind) \
15370 func = GetFunction(all_libs, #class_name, #function_name); \
15371 if (func.IsNull()) { \
15372 fingerprints_match = false; \
15373 OS::PrintErr("Function not found %s.%s\n", #class_name, #function_name); \
15374 } else { \
15375 fingerprints_match = \
15376 func.CheckSourceFingerprint(fp, kind) && fingerprints_match; \
15377 }
15378
15379#define CHECK_FINGERPRINTS(class_name, function_name, dest, fp) \
15380 CHECK_FINGERPRINTS_INNER(class_name, function_name, dest, fp, nullptr)
15381#define CHECK_FINGERPRINTS_ASM_INTRINSIC(class_name, function_name, dest, fp) \
15382 CHECK_FINGERPRINTS_INNER(class_name, function_name, dest, fp, "asm-intrinsic")
15383#define CHECK_FINGERPRINTS_GRAPH_INTRINSIC(class_name, function_name, dest, \
15384 fp) \
15385 CHECK_FINGERPRINTS_INNER(class_name, function_name, dest, fp, \
15386 "graph-intrinsic")
15387#define CHECK_FINGERPRINTS_OTHER(class_name, function_name, dest, fp) \
15388 CHECK_FINGERPRINTS_INNER(class_name, function_name, dest, fp, "other")
15389
15390 all_libs.Add(&Library::ZoneHandle(Library::CoreLibrary()));
15391 CORE_LIB_INTRINSIC_LIST(CHECK_FINGERPRINTS_ASM_INTRINSIC);
15392 CORE_INTEGER_LIB_INTRINSIC_LIST(CHECK_FINGERPRINTS_ASM_INTRINSIC);
15393 GRAPH_CORE_INTRINSICS_LIST(CHECK_FINGERPRINTS_GRAPH_INTRINSIC);
15394
15395 all_libs.Add(&Library::ZoneHandle(Library::AsyncLibrary()));
15396 all_libs.Add(&Library::ZoneHandle(Library::MathLibrary()));
15397 all_libs.Add(&Library::ZoneHandle(Library::TypedDataLibrary()));
15398 all_libs.Add(&Library::ZoneHandle(Library::CollectionLibrary()));
15399 all_libs.Add(&Library::ZoneHandle(Library::ConvertLibrary()));
15400 all_libs.Add(&Library::ZoneHandle(Library::InternalLibrary()));
15401 all_libs.Add(&Library::ZoneHandle(Library::IsolateLibrary()));
15402 all_libs.Add(&Library::ZoneHandle(Library::FfiLibrary()));
15403 all_libs.Add(&Library::ZoneHandle(Library::NativeWrappersLibrary()));
15404 all_libs.Add(&Library::ZoneHandle(Library::DeveloperLibrary()));
15405 INTERNAL_LIB_INTRINSIC_LIST(CHECK_FINGERPRINTS_ASM_INTRINSIC);
15406 OTHER_RECOGNIZED_LIST(CHECK_FINGERPRINTS_OTHER);
15407 POLYMORPHIC_TARGET_LIST(CHECK_FINGERPRINTS);
15408 GRAPH_TYPED_DATA_INTRINSICS_LIST(CHECK_FINGERPRINTS_GRAPH_INTRINSIC);
15409
15410 all_libs.Clear();
15411 all_libs.Add(&Library::ZoneHandle(Library::DeveloperLibrary()));
15412 DEVELOPER_LIB_INTRINSIC_LIST(CHECK_FINGERPRINTS_ASM_INTRINSIC);
15413
15414#undef CHECK_FINGERPRINTS_INNER
15415#undef CHECK_FINGERPRINTS
15416#undef CHECK_FINGERPRINTS_ASM_INTRINSIC
15417#undef CHECK_FINGERPRINTS_GRAPH_INTRINSIC
15418#undef CHECK_FINGERPRINTS_OTHER
15419
15420#define CHECK_FACTORY_FINGERPRINTS(symbol, class_name, factory_name, cid, fp) \
15421 func = GetFunction(all_libs, #class_name, #factory_name); \
15422 if (func.IsNull()) { \
15423 fingerprints_match = false; \
15424 OS::PrintErr("Function not found %s.%s\n", #class_name, #factory_name); \
15425 } else { \
15426 fingerprints_match = \
15427 func.CheckSourceFingerprint(fp) && fingerprints_match; \
15428 }
15429
15430 all_libs.Clear();
15431 all_libs.Add(&Library::ZoneHandle(Library::CoreLibrary()));
15432 all_libs.Add(&Library::ZoneHandle(Library::TypedDataLibrary()));
15433 RECOGNIZED_LIST_FACTORY_LIST(CHECK_FACTORY_FINGERPRINTS);
15434
15435#undef CHECK_FACTORY_FINGERPRINTS
15436
15437 if (!fingerprints_match) {
15438 // Private names are mangled. Mangling depends on Library::private_key_.
15439 // If registering a new bootstrap library, add at the end.
15440 FATAL(
15441 "FP mismatch while recognizing methods. If the behavior of "
15442 "these functions has changed, then changes are also needed in "
15443 "the VM's compiler. Otherwise the fingerprint can simply be "
15444 "updated in recognized_methods_list.h\n");
15445 }
15446}
15447#endif // defined(DEBUG) && !defined(DART_PRECOMPILED_RUNTIME).
15448
15449InstructionsPtr Instructions::New(intptr_t size,
15450 bool has_monomorphic_entry,
15451 bool should_be_aligned) {
15452 ASSERT(size >= 0);
15453 ASSERT(Object::instructions_class() != Class::null());
15454 if (size < 0 || size > kMaxElements) {
15455 // This should be caught before we reach here.
15456 FATAL("Fatal error in Instructions::New: invalid size %" Pd "\n", size);
15457 }
15458 Instructions& result = Instructions::Handle();
15459 {
15460 auto raw = Object::Allocate<Instructions>(Heap::kCode, size);
15461 NoSafepointScope no_safepoint;
15462 result = raw;
15463 result.SetSize(size);
15464 // Set this within the NoSafepointScope as well since it is contained in
15465 // the same bitfield as the size.
15466 result.SetHasMonomorphicEntry(has_monomorphic_entry);
15467 result.SetShouldBeAligned(should_be_aligned);
15468 }
15469 ASSERT(result.stats() == nullptr);
15470 return result.ptr();
15471}
15472
15473const char* Instructions::ToCString() const {
15474 return "Instructions";
15475}
15476
15478#if defined(DART_PRECOMPILER)
15479 return reinterpret_cast<CodeStatistics*>(
15480 Thread::Current()->heap()->GetPeer(ptr()));
15481#else
15482 return nullptr;
15483#endif
15484}
15485
15486void Instructions::set_stats(CodeStatistics* stats) const {
15487#if defined(DART_PRECOMPILER)
15488 Thread::Current()->heap()->SetPeer(ptr(), stats);
15489#endif
15490}
15491
15492const char* InstructionsSection::ToCString() const {
15493 return "InstructionsSection";
15494}
15495
15496void InstructionsTable::set_length(intptr_t value) const {
15497 StoreNonPointer(&untag()->length_, value);
15498}
15499
15500void InstructionsTable::set_start_pc(uword value) const {
15501 StoreNonPointer(&untag()->start_pc_, value);
15502}
15503
15504void InstructionsTable::set_end_pc(uword value) const {
15505 StoreNonPointer(&untag()->end_pc_, value);
15506}
15507
15508void InstructionsTable::set_code_objects(const Array& value) const {
15509 untag()->set_code_objects(value.ptr());
15510}
15511
15512void InstructionsTable::set_rodata(uword value) const {
15513 StoreNonPointer(
15514 &untag()->rodata_,
15515 reinterpret_cast<const UntaggedInstructionsTable::Data*>(value));
15516}
15517
15518InstructionsTablePtr InstructionsTable::New(intptr_t length,
15519 uword start_pc,
15520 uword end_pc,
15521 uword rodata) {
15522 ASSERT(Object::instructions_table_class() != Class::null());
15523 ASSERT(length >= 0);
15524 ASSERT(start_pc <= end_pc);
15525 auto* const zone = Thread::Current()->zone();
15526 const Array& code_objects =
15527 (length == 0) ? Object::empty_array()
15528 : Array::Handle(zone, Array::New(length, Heap::kOld));
15529 const auto& result = InstructionsTable::Handle(
15530 zone, Object::Allocate<InstructionsTable>(Heap::kOld));
15531 result.set_code_objects(code_objects);
15532 result.set_length(length);
15533 result.set_start_pc(start_pc);
15534 result.set_end_pc(end_pc);
15535 result.set_rodata(rodata);
15536 return result.ptr();
15537}
15538
15539void InstructionsTable::SetCodeAt(intptr_t index, CodePtr code) const {
15540 ASSERT((0 <= index) &&
15541 (index < Smi::Value(code_objects()->untag()->length())));
15542 code_objects()->untag()->set_element(index, code);
15543}
15544
15545bool InstructionsTable::ContainsPc(InstructionsTablePtr table, uword pc) {
15546 return (InstructionsTable::start_pc(table) <= pc) &&
15547 (pc < InstructionsTable::end_pc(table));
15548}
15549
15550uint32_t InstructionsTable::ConvertPcToOffset(InstructionsTablePtr table,
15551 uword pc) {
15552 ASSERT(InstructionsTable::ContainsPc(table, pc));
15553 const uint32_t pc_offset =
15554 static_cast<uint32_t>(pc - InstructionsTable::start_pc(table));
15555 ASSERT(InstructionsTable::start_pc(table) + pc_offset == pc); // No overflow.
15556 return pc_offset;
15557}
15558
15559intptr_t InstructionsTable::FindEntry(InstructionsTablePtr table,
15560 uword pc,
15561 intptr_t start_index /* = 0 */) {
15562 // This can run in the middle of GC and must not allocate handles.
15563 NoSafepointScope no_safepoint;
15564 if (!InstructionsTable::ContainsPc(table, pc)) return -1;
15565 const uint32_t pc_offset = InstructionsTable::ConvertPcToOffset(table, pc);
15566
15567 const auto rodata = table.untag()->rodata_;
15568 const auto entries = rodata->entries();
15569 intptr_t lo = start_index;
15570 intptr_t hi = rodata->length - 1;
15571 while (lo <= hi) {
15572 intptr_t mid = (hi - lo + 1) / 2 + lo;
15573 ASSERT(mid >= lo);
15574 ASSERT(mid <= hi);
15575 if (pc_offset < entries[mid].pc_offset) {
15576 hi = mid - 1;
15577 } else if ((mid != hi) && (pc_offset >= entries[mid + 1].pc_offset)) {
15578 lo = mid + 1;
15579 } else {
15580 return mid;
15581 }
15582 }
15583 return -1;
15584}
15585
15586const UntaggedCompressedStackMaps::Payload*
15587InstructionsTable::GetCanonicalStackMap(InstructionsTablePtr table) {
15588 const auto rodata = table.untag()->rodata_;
15589 return rodata->canonical_stack_map_entries_offset != 0
15590 ? rodata->StackMapAt(rodata->canonical_stack_map_entries_offset)
15591 : nullptr;
15592}
15593
15594const UntaggedCompressedStackMaps::Payload* InstructionsTable::FindStackMap(
15595 InstructionsTablePtr table,
15596 uword pc,
15597 uword* start_pc) {
15598 // This can run in the middle of GC and must not allocate handles.
15599 NoSafepointScope no_safepoint;
15600 const intptr_t idx = FindEntry(table, pc);
15601 if (idx != -1) {
15602 const auto rodata = table.untag()->rodata_;
15603 const auto entries = rodata->entries();
15604 *start_pc = InstructionsTable::start_pc(table) + entries[idx].pc_offset;
15605 return rodata->StackMapAt(entries[idx].stack_map_offset);
15606 }
15607 return nullptr;
15608}
15609
15610CodePtr InstructionsTable::FindCode(InstructionsTablePtr table, uword pc) {
15611 // This can run in the middle of GC and must not allocate handles.
15612 NoSafepointScope no_safepoint;
15613 if (!InstructionsTable::ContainsPc(table, pc)) return Code::null();
15614
15615 const auto rodata = table.untag()->rodata_;
15616
15617 const auto pc_offset = InstructionsTable::ConvertPcToOffset(table, pc);
15618
15619 if (pc_offset <= rodata->entries()[rodata->first_entry_with_code].pc_offset) {
15620 return StubCode::UnknownDartCode().ptr();
15621 }
15622
15623 const auto idx =
15624 FindEntry(table, pc, table.untag()->rodata_->first_entry_with_code);
15625 if (idx != -1) {
15626 const intptr_t code_index = idx - rodata->first_entry_with_code;
15627 ASSERT(code_index >= 0);
15628 ASSERT(code_index <
15629 Smi::Value(table.untag()->code_objects()->untag()->length()));
15631 table.untag()->code_objects()->untag()->element(code_index);
15632 ASSERT(result->IsCode());
15633 // Note: can't use Code::RawCast(...) here because it allocates handles
15634 // in DEBUG mode.
15635 return static_cast<CodePtr>(result);
15636 }
15637
15638 return Code::null();
15639}
15640
15641uword InstructionsTable::EntryPointAt(intptr_t code_index) const {
15642 ASSERT(0 <= code_index);
15643 ASSERT(code_index < static_cast<intptr_t>(rodata()->length));
15644 return InstructionsTable::start_pc(this->ptr()) +
15645 rodata()->entries()[code_index].pc_offset;
15646}
15647
15648const char* InstructionsTable::ToCString() const {
15649 return "InstructionsTable";
15650}
15651
15652ObjectPoolPtr ObjectPool::New(intptr_t len) {
15653 ASSERT(Object::object_pool_class() != Class::null());
15654 if (len < 0 || len > kMaxElements) {
15655 // This should be caught before we reach here.
15656 FATAL("Fatal error in ObjectPool::New: invalid length %" Pd "\n", len);
15657 }
15658 // We only verify the entry bits in DEBUG, so only allocate a handle there.
15659 DEBUG_ONLY(auto& result = ObjectPool::Handle());
15660 auto raw = Object::Allocate<ObjectPool>(Heap::kOld, len);
15661 NoSafepointScope no_safepoint;
15662 raw->untag()->length_ = len;
15663#if defined(DEBUG)
15664 result = raw;
15665 for (intptr_t i = 0; i < len; i++) {
15666 // Verify that InitializeObject() already set the payload as expected.
15667 ASSERT_EQUAL(result.PatchableAt(i), ObjectPool::Patchability::kPatchable);
15668 ASSERT_EQUAL(result.TypeAt(i), ObjectPool::EntryType::kImmediate);
15669 ASSERT_EQUAL(result.RawValueAt(i), 0);
15670 }
15671#endif
15672 return raw;
15673}
15674
15675#if !defined(DART_PRECOMPILED_RUNTIME)
15676ObjectPoolPtr ObjectPool::NewFromBuilder(
15678 const intptr_t len = builder.CurrentLength();
15679 if (len == 0) {
15680 return Object::empty_object_pool().ptr();
15681 }
15682 const ObjectPool& result = ObjectPool::Handle(ObjectPool::New(len));
15683 for (intptr_t i = 0; i < len; i++) {
15684 auto entry = builder.EntryAt(i);
15685 auto type = entry.type();
15686 auto patchable = entry.patchable();
15687 auto snapshot_behavior = entry.snapshot_behavior();
15688 result.SetTypeAt(i, type, patchable, snapshot_behavior);
15689 if (type == EntryType::kTaggedObject) {
15690 result.SetObjectAt(i, *entry.obj_);
15691 } else {
15692#if defined(TARGET_ARCH_IS_32_BIT)
15693 ASSERT(type != EntryType::kImmediate64);
15694#endif
15695 ASSERT(type != EntryType::kImmediate128);
15696 result.SetRawValueAt(i, entry.imm_);
15697 }
15698 }
15699 return result.ptr();
15700}
15701
15702void ObjectPool::CopyInto(compiler::ObjectPoolBuilder* builder) const {
15703 ASSERT(builder->CurrentLength() == 0);
15704 for (intptr_t i = 0; i < Length(); i++) {
15705 auto type = TypeAt(i);
15706 auto patchable = PatchableAt(i);
15707 auto snapshot_behavior = SnapshotBehaviorAt(i);
15708 switch (type) {
15709 case compiler::ObjectPoolBuilderEntry::kTaggedObject: {
15710 compiler::ObjectPoolBuilderEntry entry(&Object::ZoneHandle(ObjectAt(i)),
15711 patchable, snapshot_behavior);
15712 builder->AddObject(entry);
15713 break;
15714 }
15715 case compiler::ObjectPoolBuilderEntry::kImmediate:
15716 case compiler::ObjectPoolBuilderEntry::kNativeFunction: {
15717 compiler::ObjectPoolBuilderEntry entry(RawValueAt(i), type, patchable,
15718 snapshot_behavior);
15719 builder->AddObject(entry);
15720 break;
15721 }
15722 default:
15723 UNREACHABLE();
15724 }
15725 }
15726 ASSERT(builder->CurrentLength() == Length());
15727}
15728#endif
15729
15730const char* ObjectPool::ToCString() const {
15731 Zone* zone = Thread::Current()->zone();
15732 return zone->PrintToString("ObjectPool len:%" Pd, Length());
15733}
15734
15735void ObjectPool::DebugPrint() const {
15736 THR_Print("ObjectPool len:%" Pd " {\n", Length());
15737 for (intptr_t i = 0; i < Length(); i++) {
15738#if defined(DART_PRECOMPILED_RUNTIME)
15739 intptr_t offset = ObjectPool::element_offset(i);
15740#else
15741 intptr_t offset = compiler::target::ObjectPool::element_offset(i);
15742#endif
15743#if defined(TARGET_ARCH_RISCV32) || defined(TARGET_ARCH_RISCV64)
15744 THR_Print(" %" Pd "(pp) ", offset); // PP is untagged
15745#elif defined(TARGET_ARCH_ARM64)
15746 THR_Print(" [pp, #%" Pd "] ", offset); // PP is untagged
15747#elif defined(TARGET_ARCH_ARM32)
15748 THR_Print(" [pp, #%" Pd "] ", offset - kHeapObjectTag); // PP is tagged
15749#else
15750 THR_Print(" [pp+0x%" Px "] ", offset - kHeapObjectTag); // PP is tagged
15751#endif
15752 if (TypeAt(i) == EntryType::kTaggedObject) {
15753 const Object& obj = Object::Handle(ObjectAt(i));
15754 THR_Print("%s (obj)\n", obj.ToCString());
15755 } else if (TypeAt(i) == EntryType::kNativeFunction) {
15756 uword pc = RawValueAt(i);
15757 uintptr_t start = 0;
15758 const char* name = NativeSymbolResolver::LookupSymbolName(pc, &start);
15759 const char* dso_name;
15760 uword dso_base;
15761 if (name != nullptr) {
15762 THR_Print("%s (native function)\n", name);
15763 NativeSymbolResolver::FreeSymbolName(name);
15764 } else if (NativeSymbolResolver::LookupSharedObject(pc, &dso_base,
15765 &dso_name)) {
15766 uword dso_offset = pc - dso_base;
15767 THR_Print("%s+0x%" Px " (native function)\n", dso_name, dso_offset);
15768 NativeSymbolResolver::FreeSymbolName(dso_name);
15769 } else {
15770 THR_Print("0x%" Px " (native function)\n", pc);
15771 }
15772 } else {
15773 THR_Print("0x%" Px " (raw)\n", RawValueAt(i));
15774 }
15775 }
15776 THR_Print("}\n");
15777}
15778
15779intptr_t PcDescriptors::Length() const {
15780 return untag()->length_;
15781}
15782
15783void PcDescriptors::SetLength(intptr_t value) const {
15784 StoreNonPointer(&untag()->length_, value);
15785}
15786
15787void PcDescriptors::CopyData(const void* bytes, intptr_t size) {
15788 NoSafepointScope no_safepoint;
15789 uint8_t* data = UnsafeMutableNonPointer(&untag()->data()[0]);
15790 // We're guaranteed these memory spaces do not overlap.
15791 memcpy(data, bytes, size); // NOLINT
15792}
15793
15794PcDescriptorsPtr PcDescriptors::New(const void* delta_encoded_data,
15795 intptr_t size) {
15796 ASSERT(Object::pc_descriptors_class() != Class::null());
15797 Thread* thread = Thread::Current();
15798 PcDescriptors& result = PcDescriptors::Handle(thread->zone());
15799 {
15800 auto raw = Object::Allocate<PcDescriptors>(Heap::kOld, size);
15801 NoSafepointScope no_safepoint;
15802 result = raw;
15803 result.SetLength(size);
15804 }
15805 result.CopyData(delta_encoded_data, size);
15806 return result.ptr();
15807}
15808
15809PcDescriptorsPtr PcDescriptors::New(intptr_t length) {
15810 ASSERT(Object::pc_descriptors_class() != Class::null());
15811 Thread* thread = Thread::Current();
15812 PcDescriptors& result = PcDescriptors::Handle(thread->zone());
15813 {
15814 auto raw = Object::Allocate<PcDescriptors>(Heap::kOld, length);
15815 NoSafepointScope no_safepoint;
15816 result = raw;
15817 result.SetLength(length);
15818 }
15819 return result.ptr();
15820}
15821
15822const char* PcDescriptors::KindAsStr(UntaggedPcDescriptors::Kind kind) {
15823 switch (kind) {
15824 case UntaggedPcDescriptors::kDeopt:
15825 return "deopt ";
15826 case UntaggedPcDescriptors::kIcCall:
15827 return "ic-call";
15828 case UntaggedPcDescriptors::kUnoptStaticCall:
15829 return "unopt-call";
15830 case UntaggedPcDescriptors::kRuntimeCall:
15831 return "runtime-call";
15832 case UntaggedPcDescriptors::kOsrEntry:
15833 return "osr-entry";
15834 case UntaggedPcDescriptors::kRewind:
15835 return "rewind";
15836 case UntaggedPcDescriptors::kBSSRelocation:
15837 return "bss reloc";
15838 case UntaggedPcDescriptors::kOther:
15839 return "other";
15840 case UntaggedPcDescriptors::kAnyKind:
15841 UNREACHABLE();
15842 break;
15843 }
15844 UNREACHABLE();
15845 return "";
15846}
15847
15848void PcDescriptors::WriteToBuffer(BaseTextBuffer* buffer, uword base) const {
15849 // 4 bits per hex digit.
15850 const int addr_width = kBitsPerWord / 4;
15851 // "*" in a printf format specifier tells it to read the field width from
15852 // the printf argument list.
15853 buffer->Printf(
15854 "%-*s kind deopt-id tok-ix try-ix yield-idx\n",
15855 addr_width, "pc");
15856 Iterator iter(*this, UntaggedPcDescriptors::kAnyKind);
15857 while (iter.MoveNext()) {
15858 buffer->Printf("%#-*" Px " %-13s % 8" Pd " %-10s % 8" Pd " % 8" Pd
15859 "\n",
15860 addr_width, base + iter.PcOffset(), KindAsStr(iter.Kind()),
15861 iter.DeoptId(), iter.TokenPos().ToCString(), iter.TryIndex(),
15862 iter.YieldIndex());
15863 }
15864}
15865
15866const char* PcDescriptors::ToCString() const {
15867 if (Length() == 0) {
15868 return "empty PcDescriptors";
15869 }
15870 ZoneTextBuffer buffer(Thread::Current()->zone());
15871 WriteToBuffer(&buffer, /*base=*/0);
15872 return buffer.buffer();
15873}
15874
15875// Verify assumptions (in debug mode only).
15876// - No two deopt descriptors have the same deoptimization id.
15877// - No two ic-call descriptors have the same deoptimization id (type feedback).
15878// A function without unique ids is marked as non-optimizable (e.g., because of
15879// finally blocks).
15880void PcDescriptors::Verify(const Function& function) const {
15881#if defined(DEBUG)
15882 // Only check ids for unoptimized code that is optimizable.
15883 if (!function.IsOptimizable()) {
15884 return;
15885 }
15886 intptr_t max_deopt_id = 0;
15887 Iterator max_iter(
15888 *this, UntaggedPcDescriptors::kDeopt | UntaggedPcDescriptors::kIcCall);
15889 while (max_iter.MoveNext()) {
15890 if (max_iter.DeoptId() > max_deopt_id) {
15891 max_deopt_id = max_iter.DeoptId();
15892 }
15893 }
15894
15895 Zone* zone = Thread::Current()->zone();
15896 BitVector* deopt_ids = new (zone) BitVector(zone, max_deopt_id + 1);
15897 BitVector* iccall_ids = new (zone) BitVector(zone, max_deopt_id + 1);
15898 Iterator iter(*this,
15899 UntaggedPcDescriptors::kDeopt | UntaggedPcDescriptors::kIcCall);
15900 while (iter.MoveNext()) {
15901 // 'deopt_id' is set for kDeopt and kIcCall and must be unique for one kind.
15902 if (DeoptId::IsDeoptAfter(iter.DeoptId())) {
15903 // TODO(vegorov): some instructions contain multiple calls and have
15904 // multiple "after" targets recorded. Right now it is benign but might
15905 // lead to issues in the future. Fix that and enable verification.
15906 continue;
15907 }
15908 if (iter.Kind() == UntaggedPcDescriptors::kDeopt) {
15909 ASSERT(!deopt_ids->Contains(iter.DeoptId()));
15910 deopt_ids->Add(iter.DeoptId());
15911 } else {
15912 ASSERT(!iccall_ids->Contains(iter.DeoptId()));
15913 iccall_ids->Add(iter.DeoptId());
15914 }
15915 }
15916#endif // DEBUG
15917}
15918
15919void CodeSourceMap::SetLength(intptr_t value) const {
15920 StoreNonPointer(&untag()->length_, value);
15921}
15922
15923CodeSourceMapPtr CodeSourceMap::New(intptr_t length) {
15924 ASSERT(Object::code_source_map_class() != Class::null());
15925 Thread* thread = Thread::Current();
15926 CodeSourceMap& result = CodeSourceMap::Handle(thread->zone());
15927 {
15928 auto raw = Object::Allocate<CodeSourceMap>(Heap::kOld, length);
15929 NoSafepointScope no_safepoint;
15930 result = raw;
15931 result.SetLength(length);
15932 }
15933 return result.ptr();
15934}
15935
15936const char* CodeSourceMap::ToCString() const {
15937 return "CodeSourceMap";
15938}
15939
15941 NoSafepointScope scope;
15942 uint8_t* data = UnsafeMutableNonPointer(&untag()->payload()->data()[0]);
15943 uint8_t* end = data + payload_size();
15944 uint32_t hash = payload_size();
15945 for (uint8_t* cursor = data; cursor < end; cursor++) {
15946 hash = CombineHashes(hash, *cursor);
15947 }
15948 return FinalizeHash(hash, kHashBits);
15949}
15950
15951void CompressedStackMaps::WriteToBuffer(BaseTextBuffer* buffer,
15952 uword base,
15953 const char* separator) const {
15954 auto it = iterator(Thread::Current());
15955 bool first_entry = true;
15956 while (it.MoveNext()) {
15957 if (!first_entry) {
15958 buffer->AddString(separator);
15959 }
15960 buffer->Printf("0x%.8" Px ": ", base + it.pc_offset());
15961 for (intptr_t i = 0, n = it.Length(); i < n; i++) {
15962 buffer->AddString(it.IsObject(i) ? "1" : "0");
15963 }
15964 first_entry = false;
15965 }
15966}
15967
15969CompressedStackMaps::iterator(Thread* thread) const {
15971 *this, CompressedStackMaps::Handle(
15972 thread->zone(), thread->isolate_group()
15973 ->object_store()
15974 ->canonicalized_stack_map_entries()));
15975}
15976
15977CompressedStackMapsPtr CompressedStackMaps::New(const void* payload,
15978 intptr_t size,
15979 bool is_global_table,
15980 bool uses_global_table) {
15981 ASSERT(Object::compressed_stackmaps_class() != Class::null());
15982 // We don't currently allow both flags to be true.
15983 ASSERT(!is_global_table || !uses_global_table);
15984 // The canonical empty instance should be used instead.
15985 ASSERT(size != 0);
15986
15988 FATAL(
15989 "Fatal error in CompressedStackMaps::New: "
15990 "invalid payload size %" Pu "\n",
15991 size);
15992 }
15993
15994 auto& result = CompressedStackMaps::Handle();
15995 {
15996 // CompressedStackMaps data objects are associated with a code object,
15997 // allocate them in old generation.
15998 auto raw = Object::Allocate<CompressedStackMaps>(Heap::kOld, size);
15999 NoSafepointScope no_safepoint;
16000 result = raw;
16001 result.untag()->payload()->set_flags_and_size(
16005 // Perform the copy under the NoSafepointScope since it uses a raw pointer
16006 // to the payload, and so the object should not move during the copy.
16007 auto cursor =
16008 result.UnsafeMutableNonPointer(result.untag()->payload()->data());
16009 memcpy(cursor, payload, size); // NOLINT
16010 }
16011
16012 ASSERT(!result.IsGlobalTable() || !result.UsesGlobalTable());
16013
16014 return result.ptr();
16015}
16016
16017const char* CompressedStackMaps::ToCString() const {
16018 ASSERT(!IsGlobalTable());
16019 if (payload_size() == 0) {
16020 return "CompressedStackMaps()";
16021 }
16022 auto const t = Thread::Current();
16023 ZoneTextBuffer buffer(t->zone(), 100);
16024 buffer.AddString("CompressedStackMaps(");
16025 WriteToBuffer(&buffer, /*base=*/0, ", ");
16026 buffer.AddString(")");
16027 return buffer.buffer();
16028}
16029
16030StringPtr LocalVarDescriptors::GetName(intptr_t var_index) const {
16031 ASSERT(var_index < Length());
16032 ASSERT(Object::Handle(ptr()->untag()->name(var_index)).IsString());
16033 return ptr()->untag()->name(var_index);
16034}
16035
16036void LocalVarDescriptors::SetVar(
16037 intptr_t var_index,
16038 const String& name,
16040 ASSERT(var_index < Length());
16041 ASSERT(!name.IsNull());
16042 ptr()->untag()->set_name(var_index, name.ptr());
16043 ptr()->untag()->data()[var_index] = *info;
16044}
16045
16046void LocalVarDescriptors::GetInfo(
16047 intptr_t var_index,
16049 ASSERT(var_index < Length());
16050 *info = ptr()->untag()->data()[var_index];
16051}
16052
16053static int PrintVarInfo(char* buffer,
16054 int len,
16055 intptr_t i,
16056 const String& var_name,
16059 const int32_t index = info.index();
16060 if (kind == UntaggedLocalVarDescriptors::kContextLevel) {
16061 return Utils::SNPrint(buffer, len,
16062 "%2" Pd
16063 " %-13s level=%-3d"
16064 " begin=%-3d end=%d\n",
16065 i, LocalVarDescriptors::KindToCString(kind), index,
16066 static_cast<int>(info.begin_pos.Pos()),
16067 static_cast<int>(info.end_pos.Pos()));
16068 } else if (kind == UntaggedLocalVarDescriptors::kContextVar) {
16069 return Utils::SNPrint(
16070 buffer, len,
16071 "%2" Pd
16072 " %-13s level=%-3d index=%-3d"
16073 " begin=%-3d end=%-3d name=%s\n",
16074 i, LocalVarDescriptors::KindToCString(kind), info.scope_id, index,
16075 static_cast<int>(info.begin_pos.Pos()),
16076 static_cast<int>(info.end_pos.Pos()), var_name.ToCString());
16077 } else {
16078 return Utils::SNPrint(
16079 buffer, len,
16080 "%2" Pd
16081 " %-13s scope=%-3d index=%-3d"
16082 " begin=%-3d end=%-3d name=%s\n",
16083 i, LocalVarDescriptors::KindToCString(kind), info.scope_id, index,
16084 static_cast<int>(info.begin_pos.Pos()),
16085 static_cast<int>(info.end_pos.Pos()), var_name.ToCString());
16086 }
16087}
16088
16089const char* LocalVarDescriptors::ToCString() const {
16090 if (IsNull()) {
16091 return "LocalVarDescriptors: null";
16092 }
16093 if (Length() == 0) {
16094 return "empty LocalVarDescriptors";
16095 }
16096 intptr_t len = 1; // Trailing '\0'.
16097 String& var_name = String::Handle();
16098 for (intptr_t i = 0; i < Length(); i++) {
16099 UntaggedLocalVarDescriptors::VarInfo info;
16100 var_name = GetName(i);
16101 GetInfo(i, &info);
16102 len += PrintVarInfo(nullptr, 0, i, var_name, info);
16103 }
16104 char* buffer = Thread::Current()->zone()->Alloc<char>(len + 1);
16105 buffer[0] = '\0';
16106 intptr_t num_chars = 0;
16107 for (intptr_t i = 0; i < Length(); i++) {
16108 UntaggedLocalVarDescriptors::VarInfo info;
16109 var_name = GetName(i);
16110 GetInfo(i, &info);
16111 num_chars += PrintVarInfo((buffer + num_chars), (len - num_chars), i,
16112 var_name, info);
16113 }
16114 return buffer;
16115}
16116
16117const char* LocalVarDescriptors::KindToCString(
16119 switch (kind) {
16120 case UntaggedLocalVarDescriptors::kStackVar:
16121 return "StackVar";
16122 case UntaggedLocalVarDescriptors::kContextVar:
16123 return "ContextVar";
16124 case UntaggedLocalVarDescriptors::kContextLevel:
16125 return "ContextLevel";
16126 case UntaggedLocalVarDescriptors::kSavedCurrentContext:
16127 return "CurrentCtx";
16128 default:
16129 UNIMPLEMENTED();
16130 return nullptr;
16131 }
16132}
16133
16134LocalVarDescriptorsPtr LocalVarDescriptors::New(intptr_t num_variables) {
16135 ASSERT(Object::var_descriptors_class() != Class::null());
16136 if (num_variables < 0 || num_variables > kMaxElements) {
16137 // This should be caught before we reach here.
16138 FATAL(
16139 "Fatal error in LocalVarDescriptors::New: "
16140 "invalid num_variables %" Pd ". Maximum is: %d\n",
16141 num_variables, UntaggedLocalVarDescriptors::kMaxIndex);
16142 }
16143 auto raw = Object::Allocate<LocalVarDescriptors>(Heap::kOld, num_variables);
16144 NoSafepointScope no_safepoint;
16145 raw->untag()->num_entries_ = num_variables;
16146 return raw;
16147}
16148
16149intptr_t LocalVarDescriptors::Length() const {
16150 return untag()->num_entries_;
16151}
16152
16153intptr_t ExceptionHandlers::num_entries() const {
16154 return untag()->num_entries();
16155}
16156
16157bool ExceptionHandlers::has_async_handler() const {
16159 untag()->packed_fields_);
16160}
16161
16162void ExceptionHandlers::set_has_async_handler(bool value) const {
16163 StoreNonPointer(&untag()->packed_fields_,
16164 UntaggedExceptionHandlers::AsyncHandlerBit::update(
16165 value, untag()->packed_fields_));
16166}
16167
16168void ExceptionHandlers::SetHandlerInfo(intptr_t try_index,
16169 intptr_t outer_try_index,
16170 uword handler_pc_offset,
16171 bool needs_stacktrace,
16172 bool has_catch_all,
16173 bool is_generated) const {
16174 ASSERT((try_index >= 0) && (try_index < num_entries()));
16175 NoSafepointScope no_safepoint;
16177 UnsafeMutableNonPointer(&untag()->data()[try_index]);
16178 info->outer_try_index = outer_try_index;
16179 // Some C compilers warn about the comparison always being true when using <=
16180 // due to limited range of data type.
16181 ASSERT((handler_pc_offset == static_cast<uword>(kMaxUint32)) ||
16182 (handler_pc_offset < static_cast<uword>(kMaxUint32)));
16183 info->handler_pc_offset = handler_pc_offset;
16184 info->needs_stacktrace = static_cast<int8_t>(needs_stacktrace);
16185 info->has_catch_all = static_cast<int8_t>(has_catch_all);
16186 info->is_generated = static_cast<int8_t>(is_generated);
16187}
16188
16189void ExceptionHandlers::GetHandlerInfo(intptr_t try_index,
16190 ExceptionHandlerInfo* info) const {
16191 ASSERT((try_index >= 0) && (try_index < num_entries()));
16192 ASSERT(info != nullptr);
16193 *info = untag()->data()[try_index];
16194}
16195
16196uword ExceptionHandlers::HandlerPCOffset(intptr_t try_index) const {
16197 ASSERT((try_index >= 0) && (try_index < num_entries()));
16198 return untag()->data()[try_index].handler_pc_offset;
16199}
16200
16201intptr_t ExceptionHandlers::OuterTryIndex(intptr_t try_index) const {
16202 ASSERT((try_index >= 0) && (try_index < num_entries()));
16203 return untag()->data()[try_index].outer_try_index;
16204}
16205
16206bool ExceptionHandlers::NeedsStackTrace(intptr_t try_index) const {
16207 ASSERT((try_index >= 0) && (try_index < num_entries()));
16208 return untag()->data()[try_index].needs_stacktrace != 0;
16209}
16210
16211bool ExceptionHandlers::IsGenerated(intptr_t try_index) const {
16212 ASSERT((try_index >= 0) && (try_index < num_entries()));
16213 return untag()->data()[try_index].is_generated != 0;
16214}
16215
16216bool ExceptionHandlers::HasCatchAll(intptr_t try_index) const {
16217 ASSERT((try_index >= 0) && (try_index < num_entries()));
16218 return untag()->data()[try_index].has_catch_all != 0;
16219}
16220
16221void ExceptionHandlers::SetHandledTypes(intptr_t try_index,
16222 const Array& handled_types) const {
16223 ASSERT((try_index >= 0) && (try_index < num_entries()));
16224 ASSERT(!handled_types.IsNull());
16225 const Array& handled_types_data =
16226 Array::Handle(untag()->handled_types_data());
16227 handled_types_data.SetAt(try_index, handled_types);
16228}
16229
16230ArrayPtr ExceptionHandlers::GetHandledTypes(intptr_t try_index) const {
16231 ASSERT((try_index >= 0) && (try_index < num_entries()));
16232 Array& array = Array::Handle(untag()->handled_types_data());
16233 array ^= array.At(try_index);
16234 return array.ptr();
16235}
16236
16237void ExceptionHandlers::set_handled_types_data(const Array& value) const {
16238 untag()->set_handled_types_data(value.ptr());
16239}
16240
16241ExceptionHandlersPtr ExceptionHandlers::New(intptr_t num_handlers) {
16242 ASSERT(Object::exception_handlers_class() != Class::null());
16243 if ((num_handlers < 0) || (num_handlers >= kMaxHandlers)) {
16244 FATAL(
16245 "Fatal error in ExceptionHandlers::New(): "
16246 "invalid num_handlers %" Pd "\n",
16247 num_handlers);
16248 }
16249 const Array& handled_types_data =
16250 (num_handlers == 0) ? Object::empty_array()
16251 : Array::Handle(Array::New(num_handlers, Heap::kOld));
16252 return ExceptionHandlers::New(handled_types_data);
16253}
16254
16255ExceptionHandlersPtr ExceptionHandlers::New(const Array& handled_types_data) {
16256 ASSERT(Object::exception_handlers_class() != Class::null());
16257 const intptr_t num_handlers = handled_types_data.Length();
16258 if ((num_handlers < 0) || (num_handlers >= kMaxHandlers)) {
16259 FATAL(
16260 "Fatal error in ExceptionHandlers::New(): "
16261 "invalid num_handlers %" Pd "\n",
16262 num_handlers);
16263 }
16264 ExceptionHandlers& result = ExceptionHandlers::Handle();
16265 {
16266 auto raw = Object::Allocate<ExceptionHandlers>(Heap::kOld, num_handlers);
16267 NoSafepointScope no_safepoint;
16268 result = raw;
16269 result.untag()->packed_fields_ =
16271 }
16272 result.set_handled_types_data(handled_types_data);
16273 return result.ptr();
16274}
16275
16276void ExceptionHandlers::WriteToBuffer(BaseTextBuffer* buffer,
16277 uword base) const {
16278 auto& handled_types = Array::Handle();
16279 auto& type = AbstractType::Handle();
16281 for (intptr_t i = 0; i < num_entries(); i++) {
16282 GetHandlerInfo(i, &info);
16283 handled_types = GetHandledTypes(i);
16284 const intptr_t num_types =
16285 handled_types.IsNull() ? 0 : handled_types.Length();
16286 buffer->Printf("%" Pd " => %#" Px " (%" Pd " types) (outer %d)%s%s\n", i,
16287 base + info.handler_pc_offset, num_types,
16288 info.outer_try_index,
16289 ((info.needs_stacktrace != 0) ? " (needs stack trace)" : ""),
16290 ((info.is_generated != 0) ? " (generated)" : ""));
16291 for (int k = 0; k < num_types; k++) {
16292 type ^= handled_types.At(k);
16293 ASSERT(!type.IsNull());
16294 buffer->Printf(" %d. %s\n", k, type.ToCString());
16295 }
16296 }
16297 if (has_async_handler()) {
16298 buffer->AddString("<async handler>\n");
16299 }
16300}
16301
16302const char* ExceptionHandlers::ToCString() const {
16303 if (num_entries() == 0) {
16304 return has_async_handler()
16305 ? "empty ExceptionHandlers (with <async handler>)"
16306 : "empty ExceptionHandlers";
16307 }
16308 ZoneTextBuffer buffer(Thread::Current()->zone());
16309 WriteToBuffer(&buffer, /*base=*/0);
16310 return buffer.buffer();
16311}
16312
16313void SingleTargetCache::set_target(const Code& value) const {
16314 untag()->set_target(value.ptr());
16315}
16316
16317const char* SingleTargetCache::ToCString() const {
16318 return "SingleTargetCache";
16319}
16320
16321SingleTargetCachePtr SingleTargetCache::New() {
16322 return Object::Allocate<SingleTargetCache>(Heap::kOld);
16323}
16324
16325void UnlinkedCall::set_can_patch_to_monomorphic(bool value) const {
16326 StoreNonPointer(&untag()->can_patch_to_monomorphic_, value);
16327}
16328
16330 return String::Handle(target_name()).Hash();
16331}
16332
16333bool UnlinkedCall::Equals(const UnlinkedCall& other) const {
16334 return (target_name() == other.target_name()) &&
16335 (arguments_descriptor() == other.arguments_descriptor()) &&
16336 (can_patch_to_monomorphic() == other.can_patch_to_monomorphic());
16337}
16338
16339const char* UnlinkedCall::ToCString() const {
16340 return "UnlinkedCall";
16341}
16342
16343UnlinkedCallPtr UnlinkedCall::New() {
16344 const auto& result =
16345 UnlinkedCall::Handle(Object::Allocate<UnlinkedCall>(Heap::kOld));
16346 result.set_can_patch_to_monomorphic(!FLAG_precompiled_mode);
16347 return result.ptr();
16348}
16349
16350MonomorphicSmiableCallPtr MonomorphicSmiableCall::New(classid_t expected_cid,
16351 const Code& target) {
16352 const auto& result = MonomorphicSmiableCall::Handle(
16353 Object::Allocate<MonomorphicSmiableCall>(Heap::kOld));
16354 result.StoreNonPointer(&result.untag()->expected_cid_, expected_cid);
16355 result.StoreNonPointer(&result.untag()->entrypoint_, target.EntryPoint());
16356 return result.ptr();
16357}
16358
16359const char* MonomorphicSmiableCall::ToCString() const {
16360 return "MonomorphicSmiableCall";
16361}
16362
16363const char* CallSiteData::ToCString() const {
16364 // CallSiteData is an abstract class. We should never reach here.
16365 UNREACHABLE();
16366 return "CallSiteData";
16367}
16368
16369void CallSiteData::set_target_name(const String& value) const {
16370 ASSERT(!value.IsNull());
16371 ASSERT(value.IsCanonical());
16372 untag()->set_target_name(value.ptr());
16373}
16374
16375void CallSiteData::set_arguments_descriptor(const Array& value) const {
16376 ASSERT(!value.IsNull());
16377 untag()->set_args_descriptor(value.ptr());
16378}
16379
16380#if !defined(DART_PRECOMPILED_RUNTIME)
16381void ICData::SetReceiversStaticType(const AbstractType& type) const {
16382 untag()->set_receivers_static_type(type.ptr());
16383
16384 if (!type.IsNull() && type.HasTypeClass() && (NumArgsTested() == 1) &&
16385 type.IsInstantiated() && !type.IsFutureOrType()) {
16386 const Class& cls = Class::Handle(type.type_class());
16387 if (cls.IsGeneric()) {
16388 set_tracking_exactness(true);
16389 }
16390 }
16391}
16392#endif
16393
16394void ICData::SetTargetAtPos(const Array& data,
16395 intptr_t data_pos,
16396 intptr_t num_args_tested,
16397 const Function& target) {
16398#if !defined(DART_PRECOMPILED_RUNTIME)
16399 // JIT
16400 data.SetAt(data_pos + TargetIndexFor(num_args_tested), target);
16401#else
16402 // AOT
16403 ASSERT(target.HasCode());
16404 const Code& code = Code::Handle(target.CurrentCode());
16405 data.SetAt(data_pos + CodeIndexFor(num_args_tested), code);
16406 data.SetAt(data_pos + EntryPointIndexFor(num_args_tested), target);
16407#endif
16408}
16409
16411 return String::HashRawSymbol(target_name()) ^ deopt_id();
16412}
16413
16414const char* ICData::ToCString() const {
16415 Zone* zone = Thread::Current()->zone();
16416 const String& name = String::Handle(zone, target_name());
16417 return zone->PrintToString("ICData(%s num-args: %" Pd " num-checks: %" Pd
16418 " type-args-len: %" Pd ", deopt-id: %" Pd ")",
16419 name.ToCString(), NumArgsTested(),
16420 NumberOfChecks(), TypeArgsLen(), deopt_id());
16421}
16422
16423FunctionPtr ICData::Owner() const {
16424 Object& obj = Object::Handle(untag()->owner());
16425 if (obj.IsNull()) {
16426 ASSERT(Dart::vm_snapshot_kind() == Snapshot::kFullAOT);
16427 return Function::null();
16428 } else if (obj.IsFunction()) {
16429 return Function::Cast(obj).ptr();
16430 } else {
16431 ICData& original = ICData::Handle();
16432 original ^= obj.ptr();
16433 return original.Owner();
16434 }
16435}
16436
16437ICDataPtr ICData::Original() const {
16438 if (IsNull()) {
16439 return ICData::null();
16440 }
16441 if (untag()->owner()->IsICData()) {
16442 return static_cast<ICDataPtr>(untag()->owner());
16443 }
16444 return this->ptr();
16445}
16446
16447void ICData::SetOriginal(const ICData& value) const {
16448 ASSERT(value.IsOriginal());
16449 ASSERT(!value.IsNull());
16450 untag()->set_owner(static_cast<ObjectPtr>(value.ptr()));
16451}
16452
16453void ICData::set_owner(const Function& value) const {
16454 untag()->set_owner(static_cast<ObjectPtr>(value.ptr()));
16455}
16456
16457void ICData::set_deopt_id(intptr_t value) const {
16458#if defined(DART_PRECOMPILED_RUNTIME)
16459 UNREACHABLE();
16460#else
16462 StoreNonPointer(&untag()->deopt_id_, value);
16463#endif
16464}
16465
16466void ICData::set_entries(const Array& value) const {
16467 ASSERT(!value.IsNull());
16468 untag()->set_entries<std::memory_order_release>(value.ptr());
16469}
16470
16471intptr_t ICData::NumArgsTested() const {
16472 return untag()->state_bits_.Read<NumArgsTestedBits>();
16473}
16474
16475void ICData::SetNumArgsTested(intptr_t value) const {
16476 ASSERT(Utils::IsUint(2, value));
16477 untag()->state_bits_.Update<NumArgsTestedBits>(value);
16478}
16479
16480intptr_t CallSiteData::TypeArgsLen() const {
16481 ArgumentsDescriptor args_desc(Array::Handle(arguments_descriptor()));
16482 return args_desc.TypeArgsLen();
16483}
16484
16485intptr_t CallSiteData::CountWithTypeArgs() const {
16486 ArgumentsDescriptor args_desc(Array::Handle(arguments_descriptor()));
16487 return args_desc.CountWithTypeArgs();
16488}
16489
16490intptr_t CallSiteData::CountWithoutTypeArgs() const {
16491 ArgumentsDescriptor args_desc(Array::Handle(arguments_descriptor()));
16492 return args_desc.Count();
16493}
16494
16495intptr_t CallSiteData::SizeWithoutTypeArgs() const {
16496 ArgumentsDescriptor args_desc(Array::Handle(arguments_descriptor()));
16497 return args_desc.Size();
16498}
16499
16500intptr_t CallSiteData::SizeWithTypeArgs() const {
16501 ArgumentsDescriptor args_desc(Array::Handle(arguments_descriptor()));
16502 return args_desc.SizeWithTypeArgs();
16503}
16504
16505uint32_t ICData::DeoptReasons() const {
16506 return untag()->state_bits_.Read<DeoptReasonBits>();
16507}
16508
16509void ICData::SetDeoptReasons(uint32_t reasons) const {
16510 untag()->state_bits_.Update<DeoptReasonBits>(reasons);
16511}
16512
16513bool ICData::HasDeoptReason(DeoptReasonId reason) const {
16514 ASSERT(reason <= kLastRecordedDeoptReason);
16515 return (DeoptReasons() & (1 << reason)) != 0;
16516}
16517
16518void ICData::AddDeoptReason(DeoptReasonId reason) const {
16519 if (reason <= kLastRecordedDeoptReason) {
16520 untag()->state_bits_.FetchOr<DeoptReasonBits>(1 << reason);
16521 }
16522}
16523
16524const char* ICData::RebindRuleToCString(RebindRule r) {
16525 switch (r) {
16526#define RULE_CASE(Name) \
16527 case RebindRule::k##Name: \
16528 return #Name;
16530#undef RULE_CASE
16531 default:
16532 return nullptr;
16533 }
16534}
16535
16536bool ICData::ParseRebindRule(const char* str, RebindRule* out) {
16537#define RULE_CASE(Name) \
16538 if (strcmp(str, #Name) == 0) { \
16539 *out = RebindRule::k##Name; \
16540 return true; \
16541 }
16543#undef RULE_CASE
16544 return false;
16545}
16546
16547ICData::RebindRule ICData::rebind_rule() const {
16548 return RebindRule(untag()->state_bits_.Read<RebindRuleBits>());
16549}
16550
16551void ICData::set_rebind_rule(uint32_t rebind_rule) const {
16552 untag()->state_bits_.Update<ICData::RebindRuleBits>(rebind_rule);
16553}
16554
16555bool ICData::is_static_call() const {
16556 return rebind_rule() != kInstance;
16557}
16558
16559void ICData::clear_state_bits() const {
16560 untag()->state_bits_ = 0;
16561}
16562
16563intptr_t ICData::TestEntryLengthFor(intptr_t num_args,
16564 bool tracking_exactness) {
16565 return num_args + 1 /* target function*/ + 1 /* frequency */ +
16566 (tracking_exactness ? 1 : 0) /* exactness state */;
16567}
16568
16569intptr_t ICData::TestEntryLength() const {
16570 return TestEntryLengthFor(NumArgsTested(), is_tracking_exactness());
16571}
16572
16573intptr_t ICData::Length() const {
16574 return (Smi::Value(entries()->untag()->length()) / TestEntryLength());
16575}
16576
16577intptr_t ICData::NumberOfChecks() const {
16578 DEBUG_ONLY(AssertInvariantsAreSatisfied());
16579 return Length() - 1;
16580}
16581
16582bool ICData::NumberOfChecksIs(intptr_t n) const {
16583 DEBUG_ONLY(AssertInvariantsAreSatisfied());
16584 return NumberOfChecks() == n;
16585}
16586
16587#if defined(DEBUG)
16588void ICData::AssertInvariantsAreSatisfied() const {
16589 // See layout and invariant of [ICData] in class comment in object.h.
16590 //
16591 // This method can be called without holding any locks, it will grab a
16592 // snapshot of `entries()` and do it's verification logic on that.
16593 auto zone = Thread::Current()->zone();
16594 const auto& array = Array::Handle(zone, entries());
16595
16596 const intptr_t entry_length = TestEntryLength();
16597 const intptr_t num_checks = array.Length() / entry_length - 1;
16598 const intptr_t num_args = NumArgsTested();
16599
16600 /// Backing store must be multiple of entry length.
16601 ASSERT((array.Length() % entry_length) == 0);
16602
16603 /// Entries must be valid.
16604 for (intptr_t i = 0; i < num_checks; ++i) {
16605 // Should be valid entry.
16606 const intptr_t start = entry_length * i;
16607 for (intptr_t i = 0; i < num_args; ++i) {
16608 ASSERT(!array.At(start + i)->IsHeapObject());
16609 ASSERT(array.At(start + i) != smi_illegal_cid().ptr());
16610 }
16611 ASSERT(array.At(start + TargetIndexFor(num_args))->IsHeapObject());
16612 if (is_tracking_exactness()) {
16613 ASSERT(!array.At(start + ExactnessIndexFor(num_args))->IsHeapObject());
16614 }
16615 }
16616
16617 /// Sentinel at end must be valid.
16618 const intptr_t sentinel_start = num_checks * entry_length;
16619 for (intptr_t i = 0; i < entry_length - 1; ++i) {
16620 ASSERT(array.At(sentinel_start + i) == smi_illegal_cid().ptr());
16621 }
16622 if (num_checks == 0) {
16623 ASSERT(array.At(sentinel_start + entry_length - 1) ==
16624 smi_illegal_cid().ptr());
16625 ASSERT(ICData::CachedEmptyICDataArray(num_args, is_tracking_exactness()) ==
16626 array.ptr());
16627 } else {
16628 ASSERT(array.At(sentinel_start + entry_length - 1) == ptr());
16629 }
16630
16631 // Invariants for ICData of static calls.
16632 if (num_args == 0) {
16633 ASSERT(Length() == 2);
16634 ASSERT(TestEntryLength() == 2);
16635 }
16636}
16637#endif // defined(DEBUG)
16638
16639// Discounts any checks with usage of zero.
16640intptr_t ICData::NumberOfUsedChecks() const {
16641 const intptr_t n = NumberOfChecks();
16642 intptr_t count = 0;
16643 for (intptr_t i = 0; i < n; i++) {
16644 if (GetCountAt(i) > 0) {
16645 count++;
16646 }
16647 }
16648 return count;
16649}
16650
16651void ICData::WriteSentinel(const Array& data,
16652 intptr_t test_entry_length,
16653 const Object& back_ref) {
16654 ASSERT(!data.IsNull());
16655 RELEASE_ASSERT(smi_illegal_cid().Value() == kIllegalCid);
16656 const intptr_t entry_start = data.Length() - test_entry_length;
16657 for (intptr_t i = 0; i < test_entry_length - 1; i++) {
16658 data.SetAt(entry_start + i, smi_illegal_cid());
16659 }
16660 data.SetAt(entry_start + test_entry_length - 1, back_ref);
16661}
16662
16663#if defined(DEBUG)
16664// Used in asserts to verify that a check is not added twice.
16665bool ICData::HasCheck(const GrowableArray<intptr_t>& cids) const {
16666 return FindCheck(cids) != -1;
16667}
16668#endif // DEBUG
16669
16670intptr_t ICData::FindCheck(const GrowableArray<intptr_t>& cids) const {
16671 const intptr_t len = NumberOfChecks();
16672 GrowableArray<intptr_t> class_ids;
16673 for (intptr_t i = 0; i < len; i++) {
16674 GetClassIdsAt(i, &class_ids);
16675 bool matches = true;
16676 for (intptr_t k = 0; k < class_ids.length(); k++) {
16677 ASSERT(class_ids[k] != kIllegalCid);
16678 if (class_ids[k] != cids[k]) {
16679 matches = false;
16680 break;
16681 }
16682 }
16683 if (matches) {
16684 return i;
16685 }
16686 }
16687 return -1;
16688}
16689
16690void ICData::TruncateTo(intptr_t num_checks,
16691 const CallSiteResetter& proof_of_reload) const {
16692 USE(proof_of_reload); // This method can only be called during reload.
16693
16694 DEBUG_ONLY(AssertInvariantsAreSatisfied());
16695 ASSERT(num_checks <= NumberOfChecks());
16696
16697 // Nothing to do.
16698 if (NumberOfChecks() == num_checks) return;
16699
16700 auto thread = Thread::Current();
16702 auto& array = thread->ArrayHandle();
16703
16704 // If we make the ICData empty, use the pre-allocated shared backing stores.
16705 const intptr_t num_args = NumArgsTested();
16706 if (num_checks == 0) {
16707 array = ICData::CachedEmptyICDataArray(num_args, is_tracking_exactness());
16708 set_entries(array);
16709 return;
16710 }
16711
16712 // Otherwise truncate array and initialize sentinel.
16713 // Use kSmiCid for all slots in the entry except the last, which is a backref
16714 // to ICData.
16715 const intptr_t entry_length = TestEntryLength();
16716 array = entries();
16717 array.Truncate((num_checks + 1) * entry_length);
16718 WriteSentinel(array, entry_length, *this);
16719}
16720
16721void ICData::ClearCountAt(intptr_t index,
16722 const CallSiteResetter& proof_of_reload) const {
16723 USE(proof_of_reload); // This method can only be called during reload.
16724
16725 ASSERT(index >= 0);
16726 ASSERT(index < NumberOfChecks());
16727 SetCountAt(index, 0);
16728}
16729
16730void ICData::ClearAndSetStaticTarget(
16731 const Function& func,
16732 const CallSiteResetter& proof_of_reload) const {
16733 USE(proof_of_reload); // This method can only be called during reload.
16734
16735 // The final entry is always the sentinel.
16736 DEBUG_ONLY(AssertInvariantsAreSatisfied());
16737
16738 if (IsImmutable()) return;
16739 if (NumberOfChecks() == 0) return;
16740
16741 // Leave one entry.
16742 TruncateTo(/*num_checks=*/1, proof_of_reload);
16743
16744 // Reinitialize the one and only entry.
16745 const intptr_t num_args = NumArgsTested();
16746 Thread* thread = Thread::Current();
16748 Array& data = thread->ArrayHandle();
16749 data = entries();
16750 const Smi& object_cid = Smi::Handle(Smi::New(kObjectCid));
16751 for (intptr_t i = 0; i < num_args; i++) {
16752 data.SetAt(i, object_cid);
16753 }
16754 data.SetAt(TargetIndexFor(num_args), func);
16755 data.SetAt(CountIndexFor(num_args), Object::smi_zero());
16756}
16757
16758bool ICData::ValidateInterceptor(const Function& target) const {
16759#if !defined(DART_PRECOMPILED_RUNTIME)
16760 const String& name = String::Handle(target_name());
16761 if (Function::IsDynamicInvocationForwarderName(name)) {
16762 return Function::DemangleDynamicInvocationForwarderName(name) ==
16763 target.name();
16764 }
16765#endif
16766 ObjectStore* store = IsolateGroup::Current()->object_store();
16767 ASSERT((target.ptr() == store->simple_instance_of_true_function()) ||
16768 (target.ptr() == store->simple_instance_of_false_function()));
16769 const String& instance_of_name = String::Handle(
16770 Library::PrivateCoreLibName(Symbols::_simpleInstanceOf()).ptr());
16771 ASSERT(target_name() == instance_of_name.ptr());
16772 return true;
16773}
16774
16775void ICData::EnsureHasCheck(const GrowableArray<intptr_t>& class_ids,
16776 const Function& target,
16777 intptr_t count) const {
16778 SafepointMutexLocker ml(IsolateGroup::Current()->type_feedback_mutex());
16779
16780 if (FindCheck(class_ids) != -1) return;
16781 AddCheckInternal(class_ids, target, count);
16782}
16783
16784void ICData::AddCheck(const GrowableArray<intptr_t>& class_ids,
16785 const Function& target,
16786 intptr_t count) const {
16787 SafepointMutexLocker ml(IsolateGroup::Current()->type_feedback_mutex());
16788 AddCheckInternal(class_ids, target, count);
16789}
16790
16791void ICData::AddCheckInternal(const GrowableArray<intptr_t>& class_ids,
16792 const Function& target,
16793 intptr_t count) const {
16794 ASSERT(
16795 IsolateGroup::Current()->type_feedback_mutex()->IsOwnedByCurrentThread());
16796
16797 ASSERT(!is_tracking_exactness());
16798 ASSERT(!target.IsNull());
16799 ASSERT((target.name() == target_name()) || ValidateInterceptor(target));
16800 DEBUG_ASSERT(!HasCheck(class_ids));
16801 ASSERT(NumArgsTested() > 1); // Otherwise use 'AddReceiverCheck'.
16802 const intptr_t num_args_tested = NumArgsTested();
16803 ASSERT(class_ids.length() == num_args_tested);
16804 const intptr_t old_num = NumberOfChecks();
16805 Array& data = Array::Handle(entries());
16806
16807 // ICData of static calls with NumArgsTested() > 0 have initially a
16808 // dummy set of cids entered (see ICData::NewForStaticCall). That entry is
16809 // overwritten by first real type feedback data.
16810 if (old_num == 1 && num_args_tested == 2) {
16811 const bool has_dummy_entry =
16812 Smi::Value(Smi::RawCast(data.At(0))) == kObjectCid &&
16813 Smi::Value(Smi::RawCast(data.At(1))) == kObjectCid;
16814 if (has_dummy_entry) {
16815 ASSERT(target.ptr() == data.At(TargetIndexFor(num_args_tested)));
16816 // Replace dummy entry.
16817 Smi& value = Smi::Handle();
16818 for (intptr_t i = 0; i < NumArgsTested(); i++) {
16819 ASSERT(class_ids[i] != kIllegalCid);
16820 value = Smi::New(class_ids[i]);
16821 data.SetAt(i, value);
16822 }
16823 return;
16824 }
16825 }
16826 intptr_t index = -1;
16827 data = Grow(&index);
16828 ASSERT(!data.IsNull());
16829 intptr_t data_pos = index * TestEntryLength();
16830 Smi& value = Smi::Handle();
16831 for (intptr_t i = 0; i < class_ids.length(); i++) {
16832 // kIllegalCid is used as terminating value, do not add it.
16833 ASSERT(class_ids[i] != kIllegalCid);
16834 value = Smi::New(class_ids[i]);
16835 data.SetAt(data_pos + i, value);
16836 }
16837 ASSERT(!target.IsNull());
16838 data.SetAt(data_pos + TargetIndexFor(num_args_tested), target);
16839 value = Smi::New(count);
16840 data.SetAt(data_pos + CountIndexFor(num_args_tested), value);
16841 // Multithreaded access to ICData requires setting of array to be the last
16842 // operation.
16843 set_entries(data);
16844}
16845
16846ArrayPtr ICData::Grow(intptr_t* index) const {
16847 DEBUG_ONLY(AssertInvariantsAreSatisfied());
16848
16849 *index = NumberOfChecks();
16850 Array& data = Array::Handle(entries());
16851 const intptr_t new_len = data.Length() + TestEntryLength();
16852 data = Array::Grow(data, new_len, Heap::kOld);
16853 WriteSentinel(data, TestEntryLength(), *this);
16854 return data.ptr();
16855}
16856
16857void ICData::DebugDump() const {
16858 const Function& owner = Function::Handle(Owner());
16859 THR_Print("ICData::DebugDump\n");
16860 THR_Print("Owner = %s [deopt=%" Pd "]\n", owner.ToCString(), deopt_id());
16861 THR_Print("NumArgsTested = %" Pd "\n", NumArgsTested());
16862 THR_Print("Length = %" Pd "\n", Length());
16863 THR_Print("NumberOfChecks = %" Pd "\n", NumberOfChecks());
16864
16865 GrowableArray<intptr_t> class_ids;
16866 for (intptr_t i = 0; i < NumberOfChecks(); i++) {
16867 THR_Print("Check[%" Pd "]:", i);
16868 GetClassIdsAt(i, &class_ids);
16869 for (intptr_t c = 0; c < class_ids.length(); c++) {
16870 THR_Print(" %" Pd "", class_ids[c]);
16871 }
16872 THR_Print("--- %" Pd " hits\n", GetCountAt(i));
16873 }
16874}
16875
16876void ICData::EnsureHasReceiverCheck(intptr_t receiver_class_id,
16877 const Function& target,
16878 intptr_t count,
16879 StaticTypeExactnessState exactness) const {
16880 SafepointMutexLocker ml(IsolateGroup::Current()->type_feedback_mutex());
16881
16882 GrowableArray<intptr_t> class_ids(1);
16883 class_ids.Add(receiver_class_id);
16884 if (FindCheck(class_ids) != -1) return;
16885
16886 AddReceiverCheckInternal(receiver_class_id, target, count, exactness);
16887}
16888
16889void ICData::AddReceiverCheck(intptr_t receiver_class_id,
16890 const Function& target,
16891 intptr_t count,
16892 StaticTypeExactnessState exactness) const {
16893 SafepointMutexLocker ml(IsolateGroup::Current()->type_feedback_mutex());
16894 AddReceiverCheckInternal(receiver_class_id, target, count, exactness);
16895}
16896
16897void ICData::AddReceiverCheckInternal(
16898 intptr_t receiver_class_id,
16899 const Function& target,
16900 intptr_t count,
16901 StaticTypeExactnessState exactness) const {
16902#if defined(DEBUG)
16903 GrowableArray<intptr_t> class_ids(1);
16904 class_ids.Add(receiver_class_id);
16905 ASSERT(!HasCheck(class_ids));
16906#endif // DEBUG
16907 ASSERT(!target.IsNull());
16908 const intptr_t kNumArgsTested = 1;
16909 ASSERT(NumArgsTested() == kNumArgsTested); // Otherwise use 'AddCheck'.
16910 ASSERT(receiver_class_id != kIllegalCid);
16911
16912 intptr_t index = -1;
16913 Array& data = Array::Handle(Grow(&index));
16914 intptr_t data_pos = index * TestEntryLength();
16915 if ((receiver_class_id == kSmiCid) && (data_pos > 0)) {
16916 ASSERT(GetReceiverClassIdAt(0) != kSmiCid);
16917 // Move class occupying position 0 to the data_pos.
16918 for (intptr_t i = 0; i < TestEntryLength(); i++) {
16919 data.SetAt(data_pos + i, Object::Handle(data.At(i)));
16920 }
16921 // Insert kSmiCid in position 0.
16922 data_pos = 0;
16923 }
16924 data.SetAt(data_pos, Smi::Handle(Smi::New(receiver_class_id)));
16925 SetTargetAtPos(data, data_pos, kNumArgsTested, target);
16926
16927#if !defined(DART_PRECOMPILED_RUNTIME)
16928 data.SetAt(data_pos + CountIndexFor(kNumArgsTested),
16929 Smi::Handle(Smi::New(count)));
16930 if (is_tracking_exactness()) {
16931 data.SetAt(data_pos + ExactnessIndexFor(kNumArgsTested),
16932 Smi::Handle(Smi::New(exactness.Encode())));
16933 }
16934#endif
16935
16936 // Multithreaded access to ICData requires setting of array to be the last
16937 // operation.
16938 set_entries(data);
16939}
16940
16941StaticTypeExactnessState ICData::GetExactnessAt(intptr_t index) const {
16942 if (!is_tracking_exactness()) {
16943 return StaticTypeExactnessState::NotTracking();
16944 }
16945 Thread* thread = Thread::Current();
16947 Array& data = thread->ArrayHandle();
16948 data = entries();
16949 intptr_t data_pos =
16950 index * TestEntryLength() + ExactnessIndexFor(NumArgsTested());
16952 Smi::Value(Smi::RawCast(data.At(data_pos))));
16953}
16954
16955void ICData::GetCheckAt(intptr_t index,
16956 GrowableArray<intptr_t>* class_ids,
16957 Function* target) const {
16958 ASSERT(index < NumberOfChecks());
16959 ASSERT(class_ids != nullptr);
16960 ASSERT(target != nullptr);
16961 class_ids->Clear();
16962 Thread* thread = Thread::Current();
16964 Array& data = thread->ArrayHandle();
16965 data = entries();
16966 intptr_t data_pos = index * TestEntryLength();
16967 for (intptr_t i = 0; i < NumArgsTested(); i++) {
16968 class_ids->Add(Smi::Value(Smi::RawCast(data.At(data_pos + i))));
16969 }
16970 (*target) ^= data.At(data_pos + TargetIndexFor(NumArgsTested()));
16971}
16972
16973void ICData::GetClassIdsAt(intptr_t index,
16974 GrowableArray<intptr_t>* class_ids) const {
16975 ASSERT(index < Length());
16976 ASSERT(class_ids != nullptr);
16977 ASSERT(IsValidEntryIndex(index));
16978 class_ids->Clear();
16979 Thread* thread = Thread::Current();
16981 Array& data = thread->ArrayHandle();
16982 data = entries();
16983 intptr_t data_pos = index * TestEntryLength();
16984 for (intptr_t i = 0; i < NumArgsTested(); i++) {
16985 class_ids->Add(Smi::Value(Smi::RawCast(data.At(data_pos++))));
16986 }
16987}
16988
16989void ICData::GetOneClassCheckAt(intptr_t index,
16990 intptr_t* class_id,
16991 Function* target) const {
16992 ASSERT(class_id != nullptr);
16993 ASSERT(target != nullptr);
16994 ASSERT(NumArgsTested() == 1);
16995 Thread* thread = Thread::Current();
16997 Array& data = thread->ArrayHandle();
16998 data = entries();
16999 const intptr_t data_pos = index * TestEntryLength();
17000 *class_id = Smi::Value(Smi::RawCast(data.At(data_pos)));
17001 *target ^= data.At(data_pos + TargetIndexFor(NumArgsTested()));
17002}
17003
17004intptr_t ICData::GetCidAt(intptr_t index) const {
17005 ASSERT(NumArgsTested() == 1);
17006 Thread* thread = Thread::Current();
17008 Array& data = thread->ArrayHandle();
17009 data = entries();
17010 const intptr_t data_pos = index * TestEntryLength();
17011 return Smi::Value(Smi::RawCast(data.At(data_pos)));
17012}
17013
17014intptr_t ICData::GetClassIdAt(intptr_t index, intptr_t arg_nr) const {
17015 GrowableArray<intptr_t> class_ids;
17016 GetClassIdsAt(index, &class_ids);
17017 return class_ids[arg_nr];
17018}
17019
17020intptr_t ICData::GetReceiverClassIdAt(intptr_t index) const {
17021 ASSERT(index < Length());
17022 ASSERT(IsValidEntryIndex(index));
17023 const intptr_t data_pos = index * TestEntryLength();
17024 NoSafepointScope no_safepoint;
17025 ArrayPtr raw_data = entries();
17026 return Smi::Value(Smi::RawCast(raw_data->untag()->element(data_pos)));
17027}
17028
17029FunctionPtr ICData::GetTargetAt(intptr_t index) const {
17030#if defined(DART_PRECOMPILED_RUNTIME)
17031 UNREACHABLE();
17032 return nullptr;
17033#else
17034 const intptr_t data_pos =
17035 index * TestEntryLength() + TargetIndexFor(NumArgsTested());
17036 ASSERT(Object::Handle(Array::Handle(entries()).At(data_pos)).IsFunction());
17037
17038 NoSafepointScope no_safepoint;
17039 ArrayPtr raw_data = entries();
17040 return static_cast<FunctionPtr>(raw_data->untag()->element(data_pos));
17041#endif
17042}
17043
17044void ICData::IncrementCountAt(intptr_t index, intptr_t value) const {
17045 ASSERT(0 <= value);
17046 ASSERT(value <= Smi::kMaxValue);
17047 SetCountAt(index, Utils::Minimum(GetCountAt(index) + value, Smi::kMaxValue));
17048}
17049
17050void ICData::SetCountAt(intptr_t index, intptr_t value) const {
17051 ASSERT(0 <= value);
17052 ASSERT(value <= Smi::kMaxValue);
17053
17054 Thread* thread = Thread::Current();
17056 Array& data = thread->ArrayHandle();
17057 data = entries();
17058 const intptr_t data_pos =
17059 index * TestEntryLength() + CountIndexFor(NumArgsTested());
17060 data.SetAt(data_pos, Smi::Handle(Smi::New(value)));
17061}
17062
17063intptr_t ICData::GetCountAt(intptr_t index) const {
17064#if defined(DART_PRECOMPILED_RUNTIME)
17065 UNREACHABLE();
17066 return 0;
17067#else
17068 Thread* thread = Thread::Current();
17070 Array& data = thread->ArrayHandle();
17071 data = entries();
17072 const intptr_t data_pos =
17073 index * TestEntryLength() + CountIndexFor(NumArgsTested());
17074 intptr_t value = Smi::Value(Smi::RawCast(data.At(data_pos)));
17075 if (value >= 0) return value;
17076
17077 // The counter very rarely overflows to a negative value, but if it does, we
17078 // would rather just reset it to zero.
17079 SetCountAt(index, 0);
17080 return 0;
17081#endif
17082}
17083
17084intptr_t ICData::AggregateCount() const {
17085 if (IsNull()) return 0;
17086 const intptr_t len = NumberOfChecks();
17087 intptr_t count = 0;
17088 for (intptr_t i = 0; i < len; i++) {
17089 count += GetCountAt(i);
17090 }
17091 return count;
17092}
17093
17094#if !defined(DART_PRECOMPILED_RUNTIME)
17095ICDataPtr ICData::AsUnaryClassChecksForArgNr(intptr_t arg_nr) const {
17096 ASSERT(!IsNull());
17097 ASSERT(NumArgsTested() > arg_nr);
17098 if ((arg_nr == 0) && (NumArgsTested() == 1)) {
17099 // Frequent case.
17100 return ptr();
17101 }
17102 const intptr_t kNumArgsTested = 1;
17103 ICData& result = ICData::Handle(ICData::NewFrom(*this, kNumArgsTested));
17104 const intptr_t len = NumberOfChecks();
17105 for (intptr_t i = 0; i < len; i++) {
17106 const intptr_t class_id = GetClassIdAt(i, arg_nr);
17107 const intptr_t count = GetCountAt(i);
17108 if (count == 0) {
17109 continue;
17110 }
17111 intptr_t duplicate_class_id = -1;
17112 const intptr_t result_len = result.NumberOfChecks();
17113 for (intptr_t k = 0; k < result_len; k++) {
17114 if (class_id == result.GetReceiverClassIdAt(k)) {
17115 duplicate_class_id = k;
17116 break;
17117 }
17118 }
17119 if (duplicate_class_id >= 0) {
17120 // This check is valid only when checking the receiver.
17121 ASSERT((arg_nr != 0) ||
17122 (result.GetTargetAt(duplicate_class_id) == GetTargetAt(i)));
17123 result.IncrementCountAt(duplicate_class_id, count);
17124 } else {
17125 // This will make sure that Smi is first if it exists.
17126 result.AddReceiverCheckInternal(class_id,
17127 Function::Handle(GetTargetAt(i)), count,
17128 StaticTypeExactnessState::NotTracking());
17129 }
17130 }
17131
17132 return result.ptr();
17133}
17134
17135// (cid, count) tuple used to sort ICData by count.
17136struct CidCount {
17137 CidCount(intptr_t cid_, intptr_t count_, Function* f_)
17138 : cid(cid_), count(count_), function(f_) {}
17139
17140 static int HighestCountFirst(const CidCount* a, const CidCount* b);
17141
17142 intptr_t cid;
17143 intptr_t count;
17145};
17146
17147int CidCount::HighestCountFirst(const CidCount* a, const CidCount* b) {
17148 if (a->count > b->count) {
17149 return -1;
17150 }
17151 return (a->count < b->count) ? 1 : 0;
17152}
17153
17154ICDataPtr ICData::AsUnaryClassChecksSortedByCount() const {
17155 ASSERT(!IsNull());
17156 const intptr_t kNumArgsTested = 1;
17157 const intptr_t len = NumberOfChecks();
17158 if (len <= 1) {
17159 // No sorting needed.
17160 return AsUnaryClassChecks();
17161 }
17162 GrowableArray<CidCount> aggregate;
17163 for (intptr_t i = 0; i < len; i++) {
17164 const intptr_t class_id = GetClassIdAt(i, 0);
17165 const intptr_t count = GetCountAt(i);
17166 if (count == 0) {
17167 continue;
17168 }
17169 bool found = false;
17170 for (intptr_t r = 0; r < aggregate.length(); r++) {
17171 if (aggregate[r].cid == class_id) {
17172 aggregate[r].count += count;
17173 found = true;
17174 break;
17175 }
17176 }
17177 if (!found) {
17178 aggregate.Add(
17179 CidCount(class_id, count, &Function::ZoneHandle(GetTargetAt(i))));
17180 }
17181 }
17182 aggregate.Sort(CidCount::HighestCountFirst);
17183
17184 ICData& result = ICData::Handle(ICData::NewFrom(*this, kNumArgsTested));
17185 ASSERT(result.NumberOfChecksIs(0));
17186 // Room for all entries and the sentinel.
17187 const intptr_t data_len = result.TestEntryLength() * (aggregate.length() + 1);
17188 // Allocate the array but do not assign it to result until we have populated
17189 // it with the aggregate data and the terminating sentinel.
17190 const Array& data = Array::Handle(Array::New(data_len, Heap::kOld));
17191 intptr_t pos = 0;
17192 for (intptr_t i = 0; i < aggregate.length(); i++) {
17193 data.SetAt(pos + 0, Smi::Handle(Smi::New(aggregate[i].cid)));
17194 data.SetAt(pos + TargetIndexFor(1), *aggregate[i].function);
17195 data.SetAt(pos + CountIndexFor(1),
17196 Smi::Handle(Smi::New(aggregate[i].count)));
17197
17198 pos += result.TestEntryLength();
17199 }
17200 WriteSentinel(data, result.TestEntryLength(), result);
17201 result.set_entries(data);
17202 ASSERT(result.NumberOfChecksIs(aggregate.length()));
17203 return result.ptr();
17204}
17205
17206UnlinkedCallPtr ICData::AsUnlinkedCall() const {
17207 ASSERT(NumArgsTested() == 1);
17208 ASSERT(!is_tracking_exactness());
17209 const UnlinkedCall& result = UnlinkedCall::Handle(UnlinkedCall::New());
17210 result.set_target_name(String::Handle(target_name()));
17211 result.set_arguments_descriptor(Array::Handle(arguments_descriptor()));
17212 result.set_can_patch_to_monomorphic(!FLAG_precompiled_mode ||
17213 receiver_cannot_be_smi());
17214 return result.ptr();
17215}
17216
17217bool ICData::HasReceiverClassId(intptr_t class_id) const {
17218 ASSERT(NumArgsTested() > 0);
17219 const intptr_t len = NumberOfChecks();
17220 for (intptr_t i = 0; i < len; i++) {
17221 if (IsUsedAt(i)) {
17222 const intptr_t test_class_id = GetReceiverClassIdAt(i);
17223 if (test_class_id == class_id) {
17224 return true;
17225 }
17226 }
17227 }
17228 return false;
17229}
17230#endif
17231
17232bool ICData::IsUsedAt(intptr_t i) const {
17233 if (GetCountAt(i) <= 0) {
17234 // Do not mistake unoptimized static call ICData for unused.
17235 // See ICData::AddTarget.
17236 // TODO(srdjan): Make this test more robust.
17237 if (NumArgsTested() > 0) {
17238 const intptr_t cid = GetReceiverClassIdAt(i);
17239 if (cid == kObjectCid) {
17240 return true;
17241 }
17242 }
17243 return false;
17244 }
17245 return true;
17246}
17247
17249 for (int i = 0; i <= kCachedICDataMaxArgsTestedWithoutExactnessTracking;
17250 i++) {
17251 cached_icdata_arrays_
17252 [kCachedICDataZeroArgTestedWithoutExactnessTrackingIdx + i] =
17253 ICData::NewNonCachedEmptyICDataArray(i, false);
17254 }
17255 cached_icdata_arrays_[kCachedICDataOneArgWithExactnessTrackingIdx] =
17256 ICData::NewNonCachedEmptyICDataArray(1, true);
17257}
17258
17260 for (int i = 0; i < kCachedICDataArrayCount; ++i) {
17261 cached_icdata_arrays_[i] = nullptr;
17262 }
17263}
17264
17265ArrayPtr ICData::NewNonCachedEmptyICDataArray(intptr_t num_args_tested,
17266 bool tracking_exactness) {
17267 // IC data array must be null terminated (sentinel entry).
17268 const intptr_t len = TestEntryLengthFor(num_args_tested, tracking_exactness);
17269 const Array& array = Array::Handle(Array::New(len, Heap::kOld));
17270 // Only empty [ICData]s are allowed to have a non-ICData backref.
17271 WriteSentinel(array, len, /*back_ref=*/smi_illegal_cid());
17272 array.MakeImmutable();
17273 return array.ptr();
17274}
17275
17276ArrayPtr ICData::CachedEmptyICDataArray(intptr_t num_args_tested,
17277 bool tracking_exactness) {
17278 if (tracking_exactness) {
17279 ASSERT(num_args_tested == 1);
17280 return cached_icdata_arrays_[kCachedICDataOneArgWithExactnessTrackingIdx];
17281 } else {
17282 ASSERT(num_args_tested >= 0);
17283 ASSERT(num_args_tested <=
17284 kCachedICDataMaxArgsTestedWithoutExactnessTracking);
17285 return cached_icdata_arrays_
17286 [kCachedICDataZeroArgTestedWithoutExactnessTrackingIdx +
17287 num_args_tested];
17288 }
17289}
17290
17291bool ICData::IsCachedEmptyEntry(const Array& array) {
17292 for (int i = 0; i < kCachedICDataArrayCount; ++i) {
17293 if (cached_icdata_arrays_[i] == array.ptr()) return true;
17294 }
17295 return false;
17296}
17297
17298// Does not initialize ICData array.
17299ICDataPtr ICData::NewDescriptor(Zone* zone,
17300 const Function& owner,
17301 const String& target_name,
17302 const Array& arguments_descriptor,
17303 intptr_t deopt_id,
17304 intptr_t num_args_tested,
17305 RebindRule rebind_rule,
17306 const AbstractType& receivers_static_type) {
17307#if !defined(DART_PRECOMPILED_RUNTIME)
17308 // We should only have null owners in the precompiled runtime, if the
17309 // owning function for a Code object was optimized out.
17310 ASSERT(!owner.IsNull());
17311#endif
17312 ASSERT(!target_name.IsNull());
17313 ASSERT(!arguments_descriptor.IsNull());
17314 ASSERT(Object::icdata_class() != Class::null());
17315 ASSERT(num_args_tested >= 0);
17316 // IC data objects are long living objects, allocate them in old generation.
17317 const auto& result =
17318 ICData::Handle(zone, Object::Allocate<ICData>(Heap::kOld));
17319 result.set_owner(owner);
17320 result.set_target_name(target_name);
17321 result.set_arguments_descriptor(arguments_descriptor);
17322 NOT_IN_PRECOMPILED(result.set_deopt_id(deopt_id));
17323 ASSERT_EQUAL(result.untag()->state_bits_, 0);
17324 result.set_rebind_rule(rebind_rule);
17325 result.SetNumArgsTested(num_args_tested);
17326 NOT_IN_PRECOMPILED(result.SetReceiversStaticType(receivers_static_type));
17327 return result.ptr();
17328}
17329
17330bool ICData::IsImmutable() const {
17331 return entries()->IsImmutableArray();
17332}
17333
17334ICDataPtr ICData::New() {
17335 // IC data objects are long living objects, allocate them in old generation.
17336 const auto& result = ICData::Handle(Object::Allocate<ICData>(Heap::kOld));
17337 ASSERT_EQUAL(result.untag()->state_bits_, 0);
17338 result.set_deopt_id(DeoptId::kNone);
17339 return result.ptr();
17340}
17341
17342ICDataPtr ICData::New(const Function& owner,
17343 const String& target_name,
17344 const Array& arguments_descriptor,
17345 intptr_t deopt_id,
17346 intptr_t num_args_tested,
17347 RebindRule rebind_rule,
17348 const AbstractType& receivers_static_type) {
17349 Zone* zone = Thread::Current()->zone();
17350 const ICData& result = ICData::Handle(
17351 zone,
17352 NewDescriptor(zone, owner, target_name, arguments_descriptor, deopt_id,
17353 num_args_tested, rebind_rule, receivers_static_type));
17354 result.set_entries(Array::Handle(
17355 zone,
17356 CachedEmptyICDataArray(num_args_tested, result.is_tracking_exactness())));
17357 return result.ptr();
17358}
17359
17360ICDataPtr ICData::NewWithCheck(const Function& owner,
17361 const String& target_name,
17362 const Array& arguments_descriptor,
17363 intptr_t deopt_id,
17364 intptr_t num_args_tested,
17365 RebindRule rebind_rule,
17367 const Function& target,
17368 const AbstractType& receiver_type) {
17369 ASSERT((cids != nullptr) && !target.IsNull());
17370 ASSERT(cids->length() == num_args_tested);
17371
17372 Zone* zone = Thread::Current()->zone();
17373 const auto& result = ICData::Handle(
17374 zone,
17375 NewDescriptor(zone, owner, target_name, arguments_descriptor, deopt_id,
17376 num_args_tested, rebind_rule, receiver_type));
17377
17378 const intptr_t kNumEntries = 2; // 1 entry and a sentinel.
17379 const intptr_t entry_len =
17380 TestEntryLengthFor(num_args_tested, result.is_tracking_exactness());
17381 const auto& array =
17382 Array::Handle(zone, Array::New(kNumEntries * entry_len, Heap::kOld));
17383
17384 auto& cid = Smi::Handle(zone);
17385 for (intptr_t i = 0; i < num_args_tested; ++i) {
17386 cid = Smi::New((*cids)[i]);
17387 array.SetAt(i, cid);
17388 }
17389
17390 SetTargetAtPos(array, 0, num_args_tested, target);
17391#if !defined(DART_PRECOMPILED_RUNTIME)
17392 array.SetAt(CountIndexFor(num_args_tested), Object::smi_zero());
17393#endif
17394 WriteSentinel(array, entry_len, result);
17395
17396 result.set_entries(array);
17397
17398 return result.ptr();
17399}
17400
17401ICDataPtr ICData::NewForStaticCall(const Function& owner,
17402 const Function& target,
17403 const Array& arguments_descriptor,
17404 intptr_t deopt_id,
17405 intptr_t num_args_tested,
17406 RebindRule rebind_rule) {
17407 // See `MethodRecognizer::NumArgsCheckedForStaticCall`.
17408 ASSERT(num_args_tested == 0 || num_args_tested == 2);
17409 ASSERT(!target.IsNull());
17410
17411 Zone* zone = Thread::Current()->zone();
17412 const auto& target_name = String::Handle(zone, target.name());
17413 GrowableArray<intptr_t> cids(num_args_tested);
17414 if (num_args_tested == 2) {
17415 cids.Add(kObjectCid);
17416 cids.Add(kObjectCid);
17417 }
17418 return ICData::NewWithCheck(owner, target_name, arguments_descriptor,
17419 deopt_id, num_args_tested, rebind_rule, &cids,
17420 target, Object::null_abstract_type());
17421}
17422
17423#if !defined(DART_PRECOMPILED_RUNTIME)
17424ICDataPtr ICData::NewFrom(const ICData& from, intptr_t num_args_tested) {
17425 // See comment in [ICData::Clone] why we access the megamorphic bit first.
17426 const bool is_megamorphic = from.is_megamorphic();
17427
17428 const ICData& result = ICData::Handle(ICData::New(
17429 Function::Handle(from.Owner()), String::Handle(from.target_name()),
17430 Array::Handle(from.arguments_descriptor()), from.deopt_id(),
17431 num_args_tested, from.rebind_rule(),
17432 AbstractType::Handle(from.receivers_static_type())));
17433 // Copy deoptimization reasons.
17434 result.SetDeoptReasons(from.DeoptReasons());
17435 result.set_is_megamorphic(is_megamorphic);
17436 return result.ptr();
17437}
17438
17439ICDataPtr ICData::Clone(const ICData& from) {
17440 Zone* zone = Thread::Current()->zone();
17441
17442 // We have to check the megamorphic bit before accessing the entries of the
17443 // ICData to ensure all writes to the entries have been flushed and are
17444 // visible at this point.
17445 //
17446 // This will allow us to maintain the invariant that if the megamorphic bit is
17447 // set, the number of entries in the ICData have reached the limit.
17448 const bool is_megamorphic = from.is_megamorphic();
17449
17450 const ICData& result = ICData::Handle(
17451 zone, ICData::NewDescriptor(
17452 zone, Function::Handle(zone, from.Owner()),
17453 String::Handle(zone, from.target_name()),
17454 Array::Handle(zone, from.arguments_descriptor()),
17455 from.deopt_id(), from.NumArgsTested(), from.rebind_rule(),
17456 AbstractType::Handle(zone, from.receivers_static_type())));
17457 // Clone entry array.
17458 const Array& from_array = Array::Handle(zone, from.entries());
17459 if (ICData::IsCachedEmptyEntry(from_array)) {
17460 result.set_entries(from_array);
17461 } else {
17462 const intptr_t len = from_array.Length();
17463 const Array& cloned_array =
17464 Array::Handle(zone, Array::New(len, Heap::kOld));
17465 Object& obj = Object::Handle(zone);
17466 for (intptr_t i = 0; i < len; i++) {
17467 obj = from_array.At(i);
17468 cloned_array.SetAt(i, obj);
17469 }
17470 // Update backref in our clone.
17471 cloned_array.SetAt(cloned_array.Length() - 1, result);
17472 result.set_entries(cloned_array);
17473 }
17474 // Copy deoptimization reasons.
17475 result.SetDeoptReasons(from.DeoptReasons());
17476 result.set_is_megamorphic(is_megamorphic);
17477
17478 DEBUG_ONLY(result.AssertInvariantsAreSatisfied());
17479
17480 return result.ptr();
17481}
17482#endif
17483
17484ICDataPtr ICData::ICDataOfEntriesArray(const Array& array) {
17485 const auto& back_ref = Object::Handle(array.At(array.Length() - 1));
17486 if (back_ref.ptr() == smi_illegal_cid().ptr()) {
17487 ASSERT(IsCachedEmptyEntry(array));
17488 return ICData::null();
17489 }
17490
17491 const auto& ic_data = ICData::Cast(back_ref);
17492 DEBUG_ONLY(ic_data.AssertInvariantsAreSatisfied());
17493 return ic_data.ptr();
17494}
17495
17496const char* WeakSerializationReference::ToCString() const {
17497 return Object::Handle(target()).ToCString();
17498}
17499
17500ObjectPtr WeakSerializationReference::New(const Object& target,
17501 const Object& replacement) {
17502 ASSERT(Object::weak_serialization_reference_class() != Class::null());
17503 // Don't wrap any object in the VM heap, as all objects in the VM isolate
17504 // heap are currently serialized.
17505 //
17506 // Note that we _do_ wrap Smis if requested. Smis are serialized in the Mint
17507 // cluster, and so dropping them if not strongly referenced saves space in
17508 // the snapshot.
17509 if (target.ptr()->IsHeapObject() && target.InVMIsolateHeap()) {
17510 return target.ptr();
17511 }
17512 // If the target is a WSR that already uses the replacement, then return it.
17513 if (target.IsWeakSerializationReference() &&
17514 WeakSerializationReference::Cast(target).replacement() ==
17515 replacement.ptr()) {
17516 return target.ptr();
17517 }
17518 const auto& result = WeakSerializationReference::Handle(
17519 Object::Allocate<WeakSerializationReference>(Heap::kOld));
17520 // Don't nest WSRs, instead just use the old WSR's target.
17521 result.untag()->set_target(target.IsWeakSerializationReference()
17522 ? WeakSerializationReference::Unwrap(target)
17523 : target.ptr());
17524 result.untag()->set_replacement(replacement.ptr());
17525 return result.ptr();
17526}
17527
17528const char* WeakArray::ToCString() const {
17529 return Thread::Current()->zone()->PrintToString("WeakArray len:%" Pd,
17530 Length());
17531}
17532
17533WeakArrayPtr WeakArray::New(intptr_t length, Heap::Space space) {
17534 ASSERT(Object::weak_array_class() != Class::null());
17535 if (!IsValidLength(length)) {
17536 // This should be caught before we reach here.
17537 FATAL("Fatal error in WeakArray::New: invalid len %" Pd "\n", length);
17538 }
17539 auto raw = Object::Allocate<WeakArray>(space, length);
17540 NoSafepointScope no_safepoint;
17541 raw->untag()->set_length(Smi::New(length));
17542 return raw;
17543}
17544
17545#if defined(INCLUDE_IL_PRINTER)
17546Code::Comments& Code::Comments::New(intptr_t count) {
17547 Comments* comments;
17548 if (count < 0 || count > (kIntptrMax / kNumberOfEntries)) {
17549 // This should be caught before we reach here.
17550 FATAL("Fatal error in Code::Comments::New: invalid count %" Pd "\n", count);
17551 }
17552 if (count == 0) {
17553 comments = new Comments(Object::empty_array());
17554 } else {
17555 const Array& data =
17556 Array::Handle(Array::New(count * kNumberOfEntries, Heap::kOld));
17557 comments = new Comments(data);
17558 }
17559 return *comments;
17560}
17561
17562intptr_t Code::Comments::Length() const {
17563 if (comments_.IsNull()) {
17564 return 0;
17565 }
17566 return comments_.Length() / kNumberOfEntries;
17567}
17568
17569intptr_t Code::Comments::PCOffsetAt(intptr_t idx) const {
17570 return Smi::Value(
17571 Smi::RawCast(comments_.At(idx * kNumberOfEntries + kPCOffsetEntry)));
17572}
17573
17574void Code::Comments::SetPCOffsetAt(intptr_t idx, intptr_t pc) {
17575 comments_.SetAt(idx * kNumberOfEntries + kPCOffsetEntry,
17576 Smi::Handle(Smi::New(pc)));
17577}
17578
17579const char* Code::Comments::CommentAt(intptr_t idx) const {
17580 string_ ^= comments_.At(idx * kNumberOfEntries + kCommentEntry);
17581 return string_.ToCString();
17582}
17583
17584void Code::Comments::SetCommentAt(intptr_t idx, const String& comment) {
17585 comments_.SetAt(idx * kNumberOfEntries + kCommentEntry, comment);
17586}
17587
17588Code::Comments::Comments(const Array& comments)
17589 : comments_(comments), string_(String::Handle()) {}
17590#endif // defined(INCLUDE_IL_PRINTER)
17591
17593 switch (kind) {
17594 case EntryKind::kNormal:
17595 return "Normal";
17596 case EntryKind::kUnchecked:
17597 return "Unchecked";
17598 case EntryKind::kMonomorphic:
17599 return "Monomorphic";
17600 case EntryKind::kMonomorphicUnchecked:
17601 return "MonomorphicUnchecked";
17602 default:
17603 UNREACHABLE();
17604 return nullptr;
17605 }
17606}
17607
17608bool Code::ParseEntryKind(const char* str, EntryKind* out) {
17609 if (strcmp(str, "Normal") == 0) {
17611 return true;
17612 } else if (strcmp(str, "Unchecked") == 0) {
17613 *out = EntryKind::kUnchecked;
17614 return true;
17615 } else if (strcmp(str, "Monomorphic") == 0) {
17616 *out = EntryKind::kMonomorphic;
17617 return true;
17618 } else if (strcmp(str, "MonomorphicUnchecked") == 0) {
17619 *out = EntryKind::kMonomorphicUnchecked;
17620 return true;
17621 }
17622 return false;
17623}
17624
17625LocalVarDescriptorsPtr Code::GetLocalVarDescriptors() const {
17627 if (v.IsNull()) {
17628 ASSERT(!is_optimized());
17629 const Function& f = Function::Handle(function());
17630 ASSERT(!f.IsIrregexpFunction()); // Not yet implemented.
17632 }
17633 return var_descriptors();
17634}
17635
17636void Code::set_owner(const Object& owner) const {
17637#if defined(DEBUG)
17638 const auto& unwrapped_owner =
17640 ASSERT(unwrapped_owner.IsFunction() || unwrapped_owner.IsClass() ||
17641 unwrapped_owner.IsAbstractType());
17642#endif
17643 untag()->set_owner(owner.ptr());
17644}
17645
17646void Code::set_state_bits(intptr_t bits) const {
17647 StoreNonPointer(&untag()->state_bits_, bits);
17648}
17649
17651 set_state_bits(OptimizedBit::update(value, untag()->state_bits_));
17652}
17653
17655 set_state_bits(ForceOptimizedBit::update(value, untag()->state_bits_));
17656}
17657
17658void Code::set_is_alive(bool value) const {
17659 set_state_bits(AliveBit::update(value, untag()->state_bits_));
17660}
17661
17663 set_state_bits(DiscardedBit::update(value, untag()->state_bits_));
17664}
17665
17667 ASSERT(maps.IsOld());
17668 untag()->set_compressed_stackmaps(maps.ptr());
17669}
17670
17671#if !defined(DART_PRECOMPILED_RUNTIME)
17672intptr_t Code::num_variables() const {
17673 ASSERT(!FLAG_precompiled_mode);
17674 return Smi::Value(Smi::RawCast(untag()->catch_entry()));
17675}
17676void Code::set_num_variables(intptr_t num_variables) const {
17677 ASSERT(!FLAG_precompiled_mode);
17678 untag()->set_catch_entry(Smi::New(num_variables));
17679}
17680#endif
17681
17682#if defined(DART_PRECOMPILED_RUNTIME) || defined(DART_PRECOMPILER)
17683TypedDataPtr Code::catch_entry_moves_maps() const {
17684 ASSERT(FLAG_precompiled_mode);
17685 return TypedData::RawCast(untag()->catch_entry());
17686}
17687void Code::set_catch_entry_moves_maps(const TypedData& maps) const {
17688 ASSERT(FLAG_precompiled_mode);
17689 untag()->set_catch_entry(maps.ptr());
17690}
17691#endif
17692
17693void Code::set_deopt_info_array(const Array& array) const {
17694#if defined(DART_PRECOMPILED_RUNTIME)
17695 UNREACHABLE();
17696#else
17697 ASSERT(array.IsOld());
17698 untag()->set_deopt_info_array(array.ptr());
17699#endif
17700}
17701
17703#if defined(DART_PRECOMPILED_RUNTIME)
17704 UNREACHABLE();
17705#else
17706 untag()->set_static_calls_target_table(value.ptr());
17707#endif
17708#if defined(DEBUG)
17709 // Check that the table is sorted by pc offsets.
17710 // FlowGraphCompiler::AddStaticCallTarget adds pc-offsets to the table while
17711 // emitting assembly. This guarantees that every succeeding pc-offset is
17712 // larger than the previously added one.
17713 StaticCallsTable entries(value);
17714 const intptr_t count = entries.Length();
17715 for (intptr_t i = 0; i < count - 1; ++i) {
17716 auto left = Smi::Value(entries[i].Get<kSCallTableKindAndOffset>());
17717 auto right = Smi::Value(entries[i + 1].Get<kSCallTableKindAndOffset>());
17719 }
17720#endif // DEBUG
17721}
17722
17723ObjectPoolPtr Code::GetObjectPool() const {
17724#if defined(DART_PRECOMPILER) || defined(DART_PRECOMPILED_RUNTIME)
17725 if (FLAG_precompiled_mode) {
17726 return IsolateGroup::Current()->object_store()->global_object_pool();
17727 }
17728#endif
17729 return object_pool();
17730}
17731
17733#if defined(PRODUCT)
17734 return false;
17735#else
17737#endif
17738}
17739
17741 ICData::DeoptReasonId* deopt_reason,
17742 uint32_t* deopt_flags) const {
17743#if defined(DART_PRECOMPILED_RUNTIME)
17745 return TypedData::null();
17746#else
17749 uword code_entry = instrs.PayloadStart();
17751 if (table.IsNull()) {
17753 return TypedData::null();
17754 }
17755 // Linear search for the PC offset matching the target PC.
17757 Smi& offset = Smi::Handle();
17758 Smi& reason_and_flags = Smi::Handle();
17760 for (intptr_t i = 0; i < length; ++i) {
17761 DeoptTable::GetEntry(table, i, &offset, &info, &reason_and_flags);
17762 if (pc == (code_entry + offset.Value())) {
17763 ASSERT(!info.IsNull());
17764 *deopt_reason = DeoptTable::ReasonField::decode(reason_and_flags.Value());
17765 *deopt_flags = DeoptTable::FlagsField::decode(reason_and_flags.Value());
17766 return info.ptr();
17767 }
17768 }
17769 *deopt_reason = ICData::kDeoptUnknown;
17770 return TypedData::null();
17771#endif // defined(DART_PRECOMPILED_RUNTIME)
17772}
17773
17774intptr_t Code::BinarySearchInSCallTable(uword pc) const {
17775#if defined(DART_PRECOMPILED_RUNTIME)
17776 UNREACHABLE();
17777#else
17778 NoSafepointScope no_safepoint;
17780 StaticCallsTable entries(table);
17781 const intptr_t pc_offset = pc - PayloadStart();
17782 intptr_t imin = 0;
17783 intptr_t imax = (table.Length() / kSCallTableEntryLength) - 1;
17784 while (imax >= imin) {
17785 const intptr_t imid = imin + (imax - imin) / 2;
17786 const auto offset = OffsetField::decode(
17787 Smi::Value(entries[imid].Get<kSCallTableKindAndOffset>()));
17788 if (offset < pc_offset) {
17789 imin = imid + 1;
17790 } else if (offset > pc_offset) {
17791 imax = imid - 1;
17792 } else {
17793 return imid;
17794 }
17795 }
17796#endif
17797 return -1;
17798}
17799
17801#if defined(DART_PRECOMPILED_RUNTIME)
17802 UNREACHABLE();
17803 return Function::null();
17804#else
17805 const intptr_t i = BinarySearchInSCallTable(pc);
17806 if (i < 0) {
17807 return Function::null();
17808 }
17810 StaticCallsTable entries(array);
17811 return entries[i].Get<kSCallTableFunctionTarget>();
17812#endif
17813}
17814
17816#if defined(DART_PRECOMPILED_RUNTIME)
17817 UNREACHABLE();
17818#else
17819 const intptr_t i = BinarySearchInSCallTable(pc);
17820 ASSERT(i >= 0);
17822 StaticCallsTable entries(array);
17823 ASSERT(code.IsNull() ||
17824 (code.function() == entries[i].Get<kSCallTableFunctionTarget>()));
17825 return entries[i].Set<kSCallTableCodeOrTypeTarget>(code);
17826#endif
17827}
17828
17830#if defined(DART_PRECOMPILED_RUNTIME)
17831 UNREACHABLE();
17832#else
17833 const intptr_t i = BinarySearchInSCallTable(pc);
17834 ASSERT(i >= 0);
17836 StaticCallsTable entries(array);
17837#if defined(DEBUG)
17838 if (entries[i].Get<kSCallTableFunctionTarget>() == Function::null()) {
17839 ASSERT(!code.IsNull() && Object::Handle(code.owner()).IsClass());
17840 } else {
17841 ASSERT(code.IsNull() ||
17842 (code.function() == entries[i].Get<kSCallTableFunctionTarget>()));
17843 }
17844#endif
17845 return entries[i].Set<kSCallTableCodeOrTypeTarget>(code);
17846#endif
17847}
17848
17850#if !defined(PRODUCT) || defined(FORCE_INCLUDE_DISASSEMBLER)
17851 if (!FLAG_support_disassembler) {
17852 return;
17853 }
17854 const uword start = PayloadStart();
17855 if (formatter == nullptr) {
17857 } else {
17858 Disassembler::Disassemble(start, start + Size(), formatter, *this);
17859 }
17860#endif // !defined(PRODUCT) || defined(FORCE_INCLUDE_DISASSEMBLER)
17861}
17862
17863#if defined(INCLUDE_IL_PRINTER)
17864#if defined(PRODUCT)
17865// In PRODUCT builds we don't have space in Code object to store code comments
17866// so we move them into malloced heap (and leak them). This functionality
17867// is only intended to be used in AOT compiler so leaking is fine.
17868class MallocCodeComments final : public CodeComments {
17869 public:
17870 explicit MallocCodeComments(const CodeComments& comments)
17871 : length_(comments.Length()), comments_(new Comment[comments.Length()]) {
17872 for (intptr_t i = 0; i < length_; i++) {
17873 comments_[i].pc_offset = comments.PCOffsetAt(i);
17874 comments_[i].comment =
17875 CStringUniquePtr(Utils::StrDup(comments.CommentAt(i)));
17876 }
17877 }
17878
17879 intptr_t Length() const override { return length_; }
17880
17881 intptr_t PCOffsetAt(intptr_t i) const override {
17882 return comments_[i].pc_offset;
17883 }
17884
17885 const char* CommentAt(intptr_t i) const override {
17886 return comments_[i].comment.get();
17887 }
17888
17889 private:
17890 struct Comment {
17891 intptr_t pc_offset;
17892 CStringUniquePtr comment;
17893 };
17894
17895 intptr_t length_;
17896 std::unique_ptr<Comment[]> comments_;
17897};
17898#endif
17899
17900const CodeComments& Code::comments() const {
17901#if defined(PRODUCT)
17902 auto comments =
17903 static_cast<CodeComments*>(Thread::Current()->heap()->GetPeer(ptr()));
17904 return (comments != nullptr) ? *comments : Code::Comments::New(0);
17905#else
17906 return *new Code::Comments(Array::Handle(untag()->comments()));
17907#endif
17908}
17909
17910void Code::set_comments(const CodeComments& comments) const {
17911#if !defined(PRODUCT)
17912 auto& wrapper = static_cast<const Code::Comments&>(comments);
17913 ASSERT(wrapper.comments_.IsOld());
17914 untag()->set_comments(wrapper.comments_.ptr());
17915#else
17916 if (FLAG_code_comments && comments.Length() > 0) {
17917 Thread::Current()->heap()->SetPeer(ptr(), new MallocCodeComments(comments));
17918 } else {
17919 Thread::Current()->heap()->SetPeer(ptr(), nullptr);
17920 }
17921#endif
17922}
17923#endif // defined(INCLUDE_IL_PRINTER)
17924
17925void Code::SetPrologueOffset(intptr_t offset) const {
17926#if defined(PRODUCT)
17927 UNREACHABLE();
17928#else
17929 ASSERT(offset >= 0);
17930 untag()->set_return_address_metadata(Smi::New(offset));
17931#endif
17932}
17933
17934intptr_t Code::GetPrologueOffset() const {
17935#if defined(PRODUCT)
17936 UNREACHABLE();
17937 return -1;
17938#else
17939 const Object& object = Object::Handle(untag()->return_address_metadata());
17940 // In the future we may put something other than a smi in
17941 // |return_address_metadata_|.
17942 if (object.IsNull() || !object.IsSmi()) {
17943 return -1;
17944 }
17945 return Smi::Cast(object).Value();
17946#endif
17947}
17948
17950 return untag()->inlined_id_to_function();
17951}
17952
17954 ASSERT(value.IsOld());
17955 untag()->set_inlined_id_to_function(value.ptr());
17956}
17957
17958CodePtr Code::New(intptr_t pointer_offsets_length) {
17959 if (pointer_offsets_length < 0 || pointer_offsets_length > kMaxElements) {
17960 // This should be caught before we reach here.
17961 FATAL("Fatal error in Code::New: invalid pointer_offsets_length %" Pd "\n",
17963 }
17965 Code& result = Code::Handle();
17966 {
17967 auto raw = Object::Allocate<Code>(Heap::kOld, pointer_offsets_length);
17968 NoSafepointScope no_safepoint;
17969 result = raw;
17970 ASSERT_EQUAL(result.untag()->state_bits_, 0);
17971 result.set_pointer_offsets_length(pointer_offsets_length);
17972 }
17973 DEBUG_ASSERT(result.compile_timestamp() == 0);
17974#if defined(INCLUDE_IL_PRINTER)
17975 result.set_comments(Comments::New(0));
17976#endif
17977 result.set_pc_descriptors(Object::empty_descriptors());
17978 result.set_compressed_stackmaps(Object::empty_compressed_stackmaps());
17979 return result.ptr();
17980}
17981
17982#if !defined(DART_PRECOMPILED_RUNTIME)
17985 compiler::Assembler* assembler,
17986 PoolAttachment pool_attachment,
17987 bool optimized,
17989 auto thread = Thread::Current();
17991
17992 const auto& code = Code::Handle(
17993 FinalizeCode(compiler, assembler, pool_attachment, optimized, stats));
17994 NotifyCodeObservers(function, code, optimized);
17995 return code.ptr();
17996}
17997
18000 compiler::Assembler* assembler,
18001 PoolAttachment pool_attachment,
18002 bool optimized,
18004 auto thread = Thread::Current();
18006
18007 const auto& code = Code::Handle(
18008 FinalizeCode(compiler, assembler, pool_attachment, optimized, stats));
18009 NotifyCodeObservers(name, code, optimized);
18010 return code.ptr();
18011}
18012
18013#if defined(DART_PRECOMPILER)
18014DECLARE_FLAG(charp, write_v8_snapshot_profile_to);
18015DECLARE_FLAG(charp, trace_precompiler_to);
18016#endif // defined(DART_PRECOMPILER)
18017
18019 compiler::Assembler* assembler,
18020 PoolAttachment pool_attachment,
18021 bool optimized,
18022 CodeStatistics* stats /* = nullptr */) {
18023 auto thread = Thread::Current();
18025
18026 ASSERT(assembler != nullptr);
18028
18029 if (pool_attachment == PoolAttachment::kAttachPool) {
18030 if (assembler->HasObjectPoolBuilder()) {
18031 object_pool =
18033 } else {
18034 object_pool = ObjectPool::empty_object_pool().ptr();
18035 }
18036 } else {
18037#if defined(DART_PRECOMPILER)
18038 if (assembler->HasObjectPoolBuilder() &&
18039 assembler->object_pool_builder().HasParent()) {
18040 // We are not going to write this pool into snapshot, but we will use
18041 // it to emit references from this code object to other objects in the
18042 // snapshot that it uses.
18043 object_pool =
18045 }
18046#endif // defined(DART_PRECOMPILER)
18047 }
18048
18049 // Allocate the Code and Instructions objects. Code is allocated first
18050 // because a GC during allocation of the code will leave the instruction
18051 // pages read-only.
18052 intptr_t pointer_offset_count = assembler->CountPointerOffsets();
18053 Code& code = Code::ZoneHandle(Code::New(pointer_offset_count));
18054#ifdef TARGET_ARCH_IA32
18055 assembler->GetSelfHandle() = code.ptr();
18056#endif
18057 Instructions& instrs = Instructions::ZoneHandle(Instructions::New(
18058 assembler->CodeSize(), assembler->has_monomorphic_entry(),
18059 assembler->should_be_aligned()));
18060
18061 {
18062 // Important: if GC is triggered at any point between Instructions::New
18063 // and here it would write protect instructions object that we are trying
18064 // to fill in.
18065 NoSafepointScope no_safepoint;
18066
18067 // Copy the instructions into the instruction area and apply all fixups.
18068 // Embedded pointers are still in handles at this point.
18069 MemoryRegion region(reinterpret_cast<void*>(instrs.PayloadStart()),
18070 instrs.Size());
18071 assembler->FinalizeInstructions(region);
18072
18073 const auto& pointer_offsets = assembler->GetPointerOffsets();
18074 ASSERT(pointer_offsets.length() == pointer_offset_count);
18075 ASSERT(code.pointer_offsets_length() == pointer_offsets.length());
18076
18077 // Set pointer offsets list in Code object and resolve all handles in
18078 // the instruction stream to raw objects.
18079 for (intptr_t i = 0; i < pointer_offsets.length(); i++) {
18080 intptr_t offset_in_instrs = pointer_offsets[i];
18081 code.SetPointerOffsetAt(i, offset_in_instrs);
18082 uword addr = region.start() + offset_in_instrs;
18083 ASSERT(instrs.PayloadStart() <= addr);
18084 ASSERT((instrs.PayloadStart() + instrs.Size()) > addr);
18085 const Object* object = LoadUnaligned(reinterpret_cast<Object**>(addr));
18086 ASSERT(object->IsOld());
18087 // N.B. The pointer is embedded in the Instructions object, but visited
18088 // through the Code object.
18089 code.StorePointerUnaligned(reinterpret_cast<ObjectPtr*>(addr),
18090 object->ptr(), thread);
18091 }
18092
18093 // Write protect instructions and, if supported by OS, use dual mapping
18094 // for execution.
18095 if (FLAG_write_protect_code) {
18096 uword address = UntaggedObject::ToAddr(instrs.ptr());
18097 VirtualMemory::Protect(reinterpret_cast<void*>(address),
18098 instrs.ptr()->untag()->HeapSize(),
18100 }
18101
18102 // Hook up Code and Instructions objects.
18103 const uword unchecked_offset = assembler->UncheckedEntryOffset();
18104 code.SetActiveInstructions(instrs, unchecked_offset);
18105 code.set_instructions(instrs);
18106 NOT_IN_PRECOMPILED(code.set_unchecked_offset(unchecked_offset));
18107 code.set_is_alive(true);
18108
18109 // Set object pool in Instructions object.
18110 if (!object_pool.IsNull()) {
18111 code.set_object_pool(object_pool.ptr());
18112 }
18113
18114#if defined(DART_PRECOMPILER)
18115 if (stats != nullptr) {
18116 stats->Finalize();
18117 instrs.set_stats(stats);
18118 }
18119#endif
18120
18121 CPU::FlushICache(instrs.PayloadStart(), instrs.Size());
18122 }
18123
18124#if defined(INCLUDE_IL_PRINTER)
18125 code.set_comments(CreateCommentsFrom(assembler));
18126#endif // defined(INCLUDE_IL_PRINTER)
18127
18128#ifndef PRODUCT
18129 code.set_compile_timestamp(OS::GetCurrentMonotonicMicros());
18130 if (assembler->prologue_offset() >= 0) {
18131 code.SetPrologueOffset(assembler->prologue_offset());
18132 } else {
18133 // No prologue was ever entered, optimistically assume nothing was ever
18134 // pushed onto the stack.
18135 code.SetPrologueOffset(assembler->CodeSize());
18136 }
18137#endif
18138 return code.ptr();
18139}
18140
18141void Code::NotifyCodeObservers(const Code& code, bool optimized) {
18142#if !defined(PRODUCT)
18143 ASSERT(!Thread::Current()->OwnsGCSafepoint());
18145 if (code.IsFunctionCode()) {
18146 const auto& function = Function::Handle(code.function());
18147 if (!function.IsNull()) {
18148 return NotifyCodeObservers(function, code, optimized);
18149 }
18150 }
18151 NotifyCodeObservers(code.Name(), code, optimized);
18152 }
18153#endif
18154}
18155
18157 const Code& code,
18158 bool optimized) {
18159#if !defined(PRODUCT)
18160 ASSERT(!function.IsNull());
18161 ASSERT(!Thread::Current()->OwnsGCSafepoint());
18162 // Calling ToLibNamePrefixedQualifiedCString is very expensive,
18163 // try to avoid it.
18165 const char* name = function.ToLibNamePrefixedQualifiedCString();
18166 NotifyCodeObservers(name, code, optimized);
18167 }
18168#endif
18169}
18170
18172 const Code& code,
18173 bool optimized) {
18174#if !defined(PRODUCT)
18175 ASSERT(name != nullptr);
18176 ASSERT(!code.IsNull());
18177 ASSERT(!Thread::Current()->OwnsGCSafepoint());
18179 const auto& instrs = Instructions::Handle(code.instructions());
18180 CodeObservers::NotifyAll(name, instrs.PayloadStart(),
18181 code.GetPrologueOffset(), instrs.Size(), optimized,
18182 &code.comments());
18183 }
18184#endif
18185}
18186#endif // !defined(DART_PRECOMPILED_RUNTIME)
18187
18188CodePtr Code::FindCode(uword pc, int64_t timestamp) {
18189 class SlowFindCodeVisitor : public ObjectVisitor {
18190 public:
18191 SlowFindCodeVisitor(uword pc, int64_t timestamp)
18192 : pc_(pc), timestamp_(timestamp), result_(Code::null()) {}
18193
18194 void VisitObject(ObjectPtr obj) {
18195 if (!obj->IsCode()) return;
18196 CodePtr code = static_cast<CodePtr>(obj);
18197 if (Code::PayloadStartOf(code) != pc_) return;
18198#if !defined(PRODUCT)
18199 if (code->untag()->compile_timestamp_ != timestamp_) return;
18200#endif
18201 ASSERT(result_ == Code::null());
18202 result_ = code;
18203 }
18204
18205 CodePtr result() const { return result_; }
18206
18207 private:
18208 uword pc_;
18209 int64_t timestamp_;
18210 CodePtr result_;
18211 };
18212
18214 SlowFindCodeVisitor visitor(pc, timestamp);
18215 iteration.IterateVMIsolateObjects(&visitor);
18216 iteration.IterateOldObjectsNoImagePages(&visitor);
18217 return visitor.result();
18218}
18219
18221 class FindCodeUnsafeVisitor : public ObjectVisitor {
18222 public:
18223 explicit FindCodeUnsafeVisitor(uword pc) : pc_(pc), result_(Code::null()) {}
18224
18225 void VisitObject(ObjectPtr obj) {
18226 if (obj->IsCode()) {
18227 CodePtr code = static_cast<CodePtr>(obj);
18229 result_ = code;
18230 }
18231 }
18232 }
18233
18234 CodePtr result() { return result_; }
18235
18236 private:
18237 uword pc_;
18238 CodePtr result_;
18239 };
18240
18242 PageSpace* old_space = group->heap()->old_space();
18243 old_space->MakeIterable();
18244 FindCodeUnsafeVisitor visitor(pc);
18245 old_space->VisitObjectsUnsafe(&visitor);
18247 return visitor.result();
18248}
18249
18251 uword pc_offset = pc - PayloadStart();
18252 const PcDescriptors& descriptors = PcDescriptors::Handle(pc_descriptors());
18253 PcDescriptors::Iterator iter(descriptors, UntaggedPcDescriptors::kAnyKind);
18254 while (iter.MoveNext()) {
18255 if (iter.PcOffset() == pc_offset) {
18256 return iter.TokenPos();
18257 }
18258 }
18259 return TokenPosition::kNoSource;
18260}
18261
18263 UntaggedPcDescriptors::Kind kind) const {
18264 const PcDescriptors& descriptors = PcDescriptors::Handle(pc_descriptors());
18265 PcDescriptors::Iterator iter(descriptors, kind);
18266 while (iter.MoveNext()) {
18267 if (iter.DeoptId() == deopt_id) {
18268 uword pc_offset = iter.PcOffset();
18269 uword pc = PayloadStart() + pc_offset;
18271 return pc;
18272 }
18273 }
18274 return 0;
18275}
18276
18277intptr_t Code::GetDeoptIdForOsr(uword pc) const {
18278 uword pc_offset = pc - PayloadStart();
18279 const PcDescriptors& descriptors = PcDescriptors::Handle(pc_descriptors());
18280 PcDescriptors::Iterator iter(descriptors, UntaggedPcDescriptors::kOsrEntry);
18281 while (iter.MoveNext()) {
18282 if (iter.PcOffset() == pc_offset) {
18283 return iter.DeoptId();
18284 }
18285 }
18286 return DeoptId::kNone;
18287}
18288
18289const char* Code::ToCString() const {
18290 return OS::SCreate(Thread::Current()->zone(), "Code(%s)",
18293}
18294
18295uint32_t Code::Hash() const {
18296 // PayloadStart() is a tempting hash as Instructions are not moved by the
18297 // compactor, but Instructions are effectively moved between the process
18298 // creating an AppJIT/AOT snapshot and the process loading the snapshot.
18299 const Object& obj =
18301 if (obj.IsClass()) {
18302 return Class::Cast(obj).Hash();
18303 } else if (obj.IsAbstractType()) {
18304 return AbstractType::Cast(obj).Hash();
18305 } else if (obj.IsFunction()) {
18306 return Function::Cast(obj).Hash();
18307 } else {
18308 // E.g., VM stub.
18309 return 42;
18310 }
18311}
18312
18313const char* Code::Name() const {
18314 Zone* zone = Thread::Current()->zone();
18315 if (IsStubCode()) {
18316 // Regular stub.
18317 const char* name = StubCode::NameOfStub(EntryPoint());
18318 if (name == nullptr) {
18319 return "[unknown stub]"; // Not yet recorded.
18320 }
18321 return OS::SCreate(zone, "[Stub] %s", name);
18322 }
18323 const auto& obj =
18325 if (obj.IsClass()) {
18326 // Allocation stub.
18327 return OS::SCreate(zone, "[Stub] Allocate %s",
18328 Class::Cast(obj).ScrubbedNameCString());
18329 } else if (obj.IsAbstractType()) {
18330 // Type test stub.
18331 return OS::SCreate(zone, "[Stub] Type Test %s",
18332 AbstractType::Cast(obj).ToCString());
18333 } else if (obj.IsFunction()) {
18334 // Dart function.
18335 const char* opt = is_optimized() ? "[Optimized]" : "[Unoptimized]";
18336 const char* function_name = Function::Cast(obj).UserVisibleNameCString();
18337 return OS::SCreate(zone, "%s %s", opt, function_name);
18338 } else {
18339 // --no_retain_function_objects etc
18340 return "[unknown code]";
18341 }
18342}
18343
18345 Zone* zone = Thread::Current()->zone();
18346 const Object& obj =
18348 if (obj.IsFunction()) {
18349 ZoneTextBuffer printer(zone);
18350 printer.AddString(is_optimized() ? "[Optimized] " : "[Unoptimized] ");
18351 Function::Cast(obj).PrintName(params, &printer);
18352 return printer.buffer();
18353 }
18354 return Name();
18355}
18356
18357bool Code::IsStubCode() const {
18358 // We should _not_ unwrap any possible WSRs here, as the null value is never
18359 // wrapped by a WSR.
18360 return owner() == Object::null();
18361}
18362
18364 return OwnerClassId() == kClassCid;
18365}
18366
18368 auto const cid = OwnerClassId();
18369 return cid == kAbstractTypeCid || cid == kTypeCid ||
18370 cid == kFunctionTypeCid || cid == kRecordTypeCid ||
18371 cid == kTypeParameterCid;
18372}
18373
18375 return OwnerClassId() == kFunctionCid;
18376}
18377
18380 (code == StubCode::UnknownDartCode().ptr());
18381}
18382
18387 const Code& new_code = StubCode::FixCallersTarget();
18388 SetActiveInstructions(Instructions::Handle(new_code.instructions()),
18389 new_code.UncheckedEntryPointOffset());
18390}
18391
18392void Code::DisableStubCode(bool is_cls_parameterized) const {
18396 const Code& new_code = is_cls_parameterized
18397 ? StubCode::FixParameterizedAllocationStubTarget()
18398 : StubCode::FixAllocationStubTarget();
18399 SetActiveInstructions(Instructions::Handle(new_code.instructions()),
18400 new_code.UncheckedEntryPointOffset());
18401}
18402
18403void Code::InitializeCachedEntryPointsFrom(CodePtr code,
18404 InstructionsPtr instructions,
18405 uint32_t unchecked_offset) {
18407 const uword entry_point = Instructions::EntryPoint(instructions);
18408 const uword monomorphic_entry_point =
18410 code->untag()->entry_point_ = entry_point;
18411 code->untag()->monomorphic_entry_point_ = monomorphic_entry_point;
18412 code->untag()->unchecked_entry_point_ = entry_point + unchecked_offset;
18413 code->untag()->monomorphic_unchecked_entry_point_ =
18414 monomorphic_entry_point + unchecked_offset;
18415}
18416
18417void Code::SetActiveInstructions(const Instructions& instructions,
18418 uint32_t unchecked_offset) const {
18419#if defined(DART_PRECOMPILED_RUNTIME)
18420 UNREACHABLE();
18421#else
18422 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
18423 SetActiveInstructionsSafe(instructions, unchecked_offset);
18424#endif
18425}
18426
18427void Code::SetActiveInstructionsSafe(const Instructions& instructions,
18428 uint32_t unchecked_offset) const {
18429#if defined(DART_PRECOMPILED_RUNTIME)
18430 UNREACHABLE();
18431#else
18432 // RawInstructions are never allocated in New space and hence a
18433 // store buffer update is not needed here.
18434 untag()->set_active_instructions(instructions.ptr());
18435 Code::InitializeCachedEntryPointsFrom(ptr(), instructions.ptr(),
18436 unchecked_offset);
18437#endif
18438}
18439
18440void Code::ResetActiveInstructions() const {
18441#if defined(DART_PRECOMPILED_RUNTIME)
18442 UNREACHABLE();
18443#else
18444 SetActiveInstructions(Instructions::Handle(instructions()),
18445 untag()->unchecked_offset_);
18446#endif
18447}
18448
18450 intptr_t pc_offset,
18452 GrowableArray<TokenPosition>* token_positions) const {
18454 if (map.IsNull()) {
18456 return; // VM stub, allocation stub, or type testing stub.
18457 }
18458 const Array& id_map = Array::Handle(inlined_id_to_function());
18460 CodeSourceMapReader reader(map, id_map, root);
18461 reader.GetInlinedFunctionsAt(pc_offset, functions, token_positions);
18462}
18463
18464#ifndef PRODUCT
18465void Code::PrintJSONInlineIntervals(JSONObject* jsobj) const {
18466 if (!is_optimized()) {
18467 return; // No inlining.
18468 }
18469 const CodeSourceMap& map = CodeSourceMap::Handle(code_source_map());
18470 const Array& id_map = Array::Handle(inlined_id_to_function());
18471 const Function& root = Function::Handle(function());
18472 CodeSourceMapReader reader(map, id_map, root);
18473 reader.PrintJSONInlineIntervals(jsobj);
18474}
18475#endif
18476
18479 if (map.IsNull()) {
18480 // Stub code.
18481 return;
18482 }
18483 const Array& id_map = Array::Handle(inlined_id_to_function());
18485 CodeSourceMapReader reader(map, id_map, root);
18487}
18488
18489void Code::DumpSourcePositions(bool relative_addresses) const {
18491 if (map.IsNull()) {
18492 // Stub code.
18493 return;
18494 }
18495 const Array& id_map = Array::Handle(inlined_id_to_function());
18497 CodeSourceMapReader reader(map, id_map, root);
18498 reader.DumpSourcePositions(relative_addresses ? 0 : PayloadStart());
18499}
18500
18501intptr_t Context::GetLevel() const {
18502 intptr_t level = 0;
18503 Context& parent_ctx = Context::Handle(parent());
18504 while (!parent_ctx.IsNull()) {
18505 level++;
18506 parent_ctx = parent_ctx.parent();
18507 }
18508 return level;
18509}
18510
18511ContextPtr Context::New(intptr_t num_variables, Heap::Space space) {
18512 ASSERT(num_variables >= 0);
18514
18516 // This should be caught before we reach here.
18517 FATAL("Fatal error in Context::New: invalid num_variables %" Pd "\n",
18519 }
18520 auto raw = Object::Allocate<Context>(space, num_variables);
18521 NoSafepointScope no_safepoint;
18522 raw->untag()->num_variables_ = num_variables;
18523 return raw;
18524}
18525
18526const char* Context::ToCString() const {
18527 if (IsNull()) {
18528 return "Context: null";
18529 }
18530 Zone* zone = Thread::Current()->zone();
18531 const Context& parent_ctx = Context::Handle(parent());
18532 if (parent_ctx.IsNull()) {
18533 return zone->PrintToString("Context num_variables: %" Pd "",
18534 num_variables());
18535 } else {
18536 const char* parent_str = parent_ctx.ToCString();
18537 return zone->PrintToString("Context num_variables: %" Pd " parent:{ %s }",
18538 num_variables(), parent_str);
18539 }
18540}
18541
18542static void IndentN(int count) {
18543 for (int i = 0; i < count; i++) {
18544 THR_Print(" ");
18545 }
18546}
18547
18548void Context::Dump(int indent) const {
18549 if (IsNull()) {
18550 IndentN(indent);
18551 THR_Print("Context@null\n");
18552 return;
18553 }
18554
18555 IndentN(indent);
18556 THR_Print("Context vars(%" Pd ") {\n", num_variables());
18557 Object& obj = Object::Handle();
18558 for (intptr_t i = 0; i < num_variables(); i++) {
18559 IndentN(indent + 2);
18560 obj = At(i);
18561 const char* s = obj.ToCString();
18562 if (strlen(s) > 50) {
18563 THR_Print("[%" Pd "] = [first 50 chars:] %.50s...\n", i, s);
18564 } else {
18565 THR_Print("[%" Pd "] = %s\n", i, s);
18566 }
18567 }
18568
18569 const Context& parent_ctx = Context::Handle(parent());
18570 if (!parent_ctx.IsNull()) {
18571 parent_ctx.Dump(indent + 2);
18572 }
18573 IndentN(indent);
18574 THR_Print("}\n");
18575}
18576
18577ContextScopePtr ContextScope::New(intptr_t num_variables, bool is_implicit) {
18579 if (num_variables < 0 || num_variables > kMaxElements) {
18580 // This should be caught before we reach here.
18581 FATAL("Fatal error in ContextScope::New: invalid num_variables %" Pd "\n",
18583 }
18585 {
18586 auto raw = Object::Allocate<ContextScope>(Heap::kOld, num_variables);
18587 NoSafepointScope no_safepoint;
18588 result = raw;
18589 result.set_num_variables(num_variables);
18590 }
18591 result.set_is_implicit(is_implicit);
18592 return result.ptr();
18593}
18594
18595TokenPosition ContextScope::TokenIndexAt(intptr_t scope_index) const {
18597 Smi::Value(untag()->token_pos_at(scope_index)));
18598}
18599
18600void ContextScope::SetTokenIndexAt(intptr_t scope_index,
18601 TokenPosition token_pos) const {
18602 untag()->set_token_pos_at(scope_index, Smi::New(token_pos.Serialize()));
18603}
18604
18606 intptr_t scope_index) const {
18608 Smi::Value(untag()->declaration_token_pos_at(scope_index)));
18609}
18610
18612 intptr_t scope_index,
18613 TokenPosition declaration_token_pos) const {
18614 untag()->set_declaration_token_pos_at(
18615 scope_index, Smi::New(declaration_token_pos.Serialize()));
18616}
18617
18618StringPtr ContextScope::NameAt(intptr_t scope_index) const {
18619 return untag()->name_at(scope_index);
18620}
18621
18622void ContextScope::SetNameAt(intptr_t scope_index, const String& name) const {
18623 untag()->set_name_at(scope_index, name.ptr());
18624}
18625
18626void ContextScope::ClearFlagsAt(intptr_t scope_index) const {
18627 untag()->set_flags_at(scope_index, Smi::New(0));
18628}
18629
18630bool ContextScope::GetFlagAt(intptr_t scope_index, intptr_t bit_index) const {
18631 const intptr_t mask = 1 << bit_index;
18632 return (Smi::Value(untag()->flags_at(scope_index)) & mask) != 0;
18633}
18634
18635void ContextScope::SetFlagAt(intptr_t scope_index,
18636 intptr_t bit_index,
18637 bool value) const {
18638 const intptr_t mask = 1 << bit_index;
18639 intptr_t flags = Smi::Value(untag()->flags_at(scope_index));
18640 untag()->set_flags_at(scope_index,
18641 Smi::New(value ? flags | mask : flags & ~mask));
18642}
18643
18644#define DEFINE_FLAG_ACCESSORS(Name) \
18645 bool ContextScope::Is##Name##At(intptr_t scope_index) const { \
18646 return GetFlagAt(scope_index, \
18647 UntaggedContextScope::VariableDesc::kIs##Name); \
18648 } \
18649 \
18650 void ContextScope::SetIs##Name##At(intptr_t scope_index, bool value) const { \
18651 SetFlagAt(scope_index, UntaggedContextScope::VariableDesc::kIs##Name, \
18652 value); \
18653 }
18654
18656#undef DEFINE_FLAG_ACCESSORS
18657
18658intptr_t ContextScope::LateInitOffsetAt(intptr_t scope_index) const {
18659 return Smi::Value(untag()->late_init_offset_at(scope_index));
18660}
18661
18662void ContextScope::SetLateInitOffsetAt(intptr_t scope_index,
18663 intptr_t late_init_offset) const {
18664 untag()->set_late_init_offset_at(scope_index, Smi::New(late_init_offset));
18665}
18666
18667AbstractTypePtr ContextScope::TypeAt(intptr_t scope_index) const {
18668 return untag()->type_at(scope_index);
18669}
18670
18671void ContextScope::SetTypeAt(intptr_t scope_index,
18672 const AbstractType& type) const {
18673 untag()->set_type_at(scope_index, type.ptr());
18674}
18675
18676intptr_t ContextScope::CidAt(intptr_t scope_index) const {
18677 return Smi::Value(untag()->cid_at(scope_index));
18678}
18679
18680void ContextScope::SetCidAt(intptr_t scope_index, intptr_t cid) const {
18681 untag()->set_cid_at(scope_index, Smi::New(cid));
18682}
18683
18684intptr_t ContextScope::ContextIndexAt(intptr_t scope_index) const {
18685 return Smi::Value(untag()->context_index_at(scope_index));
18686}
18687
18688void ContextScope::SetContextIndexAt(intptr_t scope_index,
18689 intptr_t context_index) const {
18690 untag()->set_context_index_at(scope_index, Smi::New(context_index));
18691}
18692
18693intptr_t ContextScope::ContextLevelAt(intptr_t scope_index) const {
18694 return Smi::Value(untag()->context_level_at(scope_index));
18695}
18696
18697void ContextScope::SetContextLevelAt(intptr_t scope_index,
18698 intptr_t context_level) const {
18699 untag()->set_context_level_at(scope_index, Smi::New(context_level));
18700}
18701
18702intptr_t ContextScope::KernelOffsetAt(intptr_t scope_index) const {
18703 return Smi::Value(untag()->kernel_offset_at(scope_index));
18704}
18705
18706void ContextScope::SetKernelOffsetAt(intptr_t scope_index,
18707 intptr_t kernel_offset) const {
18708 untag()->set_kernel_offset_at(scope_index, Smi::New(kernel_offset));
18709}
18710
18711const char* ContextScope::ToCString() const {
18712 const char* prev_cstr = "ContextScope:";
18714 for (int i = 0; i < num_variables(); i++) {
18715 name = NameAt(i);
18716 const char* cname = name.ToCString();
18718 intptr_t idx = ContextIndexAt(i);
18719 intptr_t lvl = ContextLevelAt(i);
18720 char* chars =
18721 OS::SCreate(Thread::Current()->zone(),
18722 "%s\nvar %s token-pos %s ctx lvl %" Pd " index %" Pd "",
18723 prev_cstr, cname, pos.ToCString(), lvl, idx);
18724 prev_cstr = chars;
18725 }
18726 return prev_cstr;
18727}
18728
18729SentinelPtr Sentinel::New() {
18730 return Object::Allocate<Sentinel>(Heap::kOld);
18731}
18732
18733const char* Sentinel::ToCString() const {
18734 if (ptr() == Object::sentinel().ptr()) {
18735 return "sentinel";
18736 } else if (ptr() == Object::transition_sentinel().ptr()) {
18737 return "transition_sentinel";
18738 } else if (ptr() == Object::unknown_constant().ptr()) {
18739 return "unknown_constant";
18740 } else if (ptr() == Object::non_constant().ptr()) {
18741 return "non_constant";
18742 } else if (ptr() == Object::optimized_out().ptr()) {
18743 return "<optimized out>";
18744 }
18745 return "Sentinel(unknown)";
18746}
18747
18749 return untag()->buckets();
18750}
18751
18752void MegamorphicCache::set_buckets(const Array& buckets) const {
18753 untag()->set_buckets(buckets.ptr());
18754}
18755
18756// Class IDs in the table are smi-tagged, so we use a smi-tagged mask
18757// and target class ID to avoid untagging (on each iteration of the
18758// test loop) in generated code.
18759intptr_t MegamorphicCache::mask() const {
18760 return Smi::Value(untag()->mask());
18761}
18762
18763void MegamorphicCache::set_mask(intptr_t mask) const {
18764 untag()->set_mask(Smi::New(mask));
18765}
18766
18768 return untag()->filled_entry_count_;
18769}
18770
18772 StoreNonPointer(&untag()->filled_entry_count_, count);
18773}
18774
18775MegamorphicCachePtr MegamorphicCache::New() {
18776 return Object::Allocate<MegamorphicCache>(Heap::kOld);
18777}
18778
18779MegamorphicCachePtr MegamorphicCache::New(const String& target_name,
18780 const Array& arguments_descriptor) {
18781 auto* const zone = Thread::Current()->zone();
18782 const auto& result = MegamorphicCache::Handle(
18783 zone, Object::Allocate<MegamorphicCache>(Heap::kOld));
18784 const intptr_t capacity = kInitialCapacity;
18785 const Array& buckets =
18786 Array::Handle(zone, Array::New(kEntryLength * capacity, Heap::kOld));
18787 const Object& handler = Object::Handle(zone);
18788 for (intptr_t i = 0; i < capacity; ++i) {
18789 SetEntry(buckets, i, smi_illegal_cid(), handler);
18790 }
18791 result.set_buckets(buckets);
18792 result.set_mask(capacity - 1);
18793 result.set_target_name(target_name);
18794 result.set_arguments_descriptor(arguments_descriptor);
18795 result.set_filled_entry_count(0);
18796 return result.ptr();
18797}
18798
18800 const Object& target) const {
18801 SafepointMutexLocker ml(IsolateGroup::Current()->type_feedback_mutex());
18802
18803 if (LookupLocked(class_id) == Object::null()) {
18804 InsertLocked(class_id, target);
18805 }
18806
18807#if defined(DEBUG)
18808 ASSERT(LookupLocked(class_id) == target.ptr());
18809#endif // define(DEBUG)
18810}
18811
18813 SafepointMutexLocker ml(IsolateGroup::Current()->type_feedback_mutex());
18814 return LookupLocked(class_id);
18815}
18816
18817ObjectPtr MegamorphicCache::LookupLocked(const Smi& class_id) const {
18818 auto thread = Thread::Current();
18819 auto isolate_group = thread->isolate_group();
18820 auto zone = thread->zone();
18821 ASSERT(thread->IsDartMutatorThread());
18822 ASSERT(isolate_group->type_feedback_mutex()->IsOwnedByCurrentThread());
18823
18824 const auto& backing_array = Array::Handle(zone, buckets());
18825 intptr_t id_mask = mask();
18826 intptr_t index = (class_id.Value() * kSpreadFactor) & id_mask;
18827 intptr_t i = index;
18828 do {
18829 const classid_t current_cid =
18830 Smi::Value(Smi::RawCast(GetClassId(backing_array, i)));
18831 if (current_cid == class_id.Value()) {
18832 return GetTargetFunction(backing_array, i);
18833 } else if (current_cid == kIllegalCid) {
18834 return Object::null();
18835 }
18836 i = (i + 1) & id_mask;
18837 } while (i != index);
18838 UNREACHABLE();
18839}
18840
18841void MegamorphicCache::InsertLocked(const Smi& class_id,
18842 const Object& target) const {
18843 auto isolate_group = IsolateGroup::Current();
18844 ASSERT(isolate_group->type_feedback_mutex()->IsOwnedByCurrentThread());
18845
18846 // As opposed to ICData we are stopping mutator threads from other isolates
18847 // while modifying the megamorphic cache, since updates are not atomic.
18848 //
18849 // NOTE: In the future we might change the megamorphic cache insertions to
18850 // carefully use store-release barriers on the writer as well as
18851 // load-acquire barriers on the reader, ...
18852 isolate_group->RunWithStoppedMutators(
18853 [&]() {
18854 EnsureCapacityLocked();
18855 InsertEntryLocked(class_id, target);
18856 },
18857 /*use_force_growth=*/true);
18858}
18859
18860void MegamorphicCache::EnsureCapacityLocked() const {
18861 auto thread = Thread::Current();
18862 auto zone = thread->zone();
18863 auto isolate_group = thread->isolate_group();
18864 ASSERT(isolate_group->type_feedback_mutex()->IsOwnedByCurrentThread());
18865
18866 intptr_t old_capacity = mask() + 1;
18867 double load_limit = kLoadFactor * static_cast<double>(old_capacity);
18868 if (static_cast<double>(filled_entry_count() + 1) > load_limit) {
18869 const Array& old_buckets = Array::Handle(zone, buckets());
18870 intptr_t new_capacity = old_capacity * 2;
18871 const Array& new_buckets =
18872 Array::Handle(zone, Array::New(kEntryLength * new_capacity));
18873
18874 auto& target = Object::Handle(zone);
18875 for (intptr_t i = 0; i < new_capacity; ++i) {
18876 SetEntry(new_buckets, i, smi_illegal_cid(), target);
18877 }
18878 set_buckets(new_buckets);
18879 set_mask(new_capacity - 1);
18881
18882 // Rehash the valid entries.
18883 Smi& class_id = Smi::Handle(zone);
18884 for (intptr_t i = 0; i < old_capacity; ++i) {
18885 class_id ^= GetClassId(old_buckets, i);
18886 if (class_id.Value() != kIllegalCid) {
18887 target = GetTargetFunction(old_buckets, i);
18888 InsertEntryLocked(class_id, target);
18889 }
18890 }
18891 }
18892}
18893
18894void MegamorphicCache::InsertEntryLocked(const Smi& class_id,
18895 const Object& target) const {
18896 auto thread = Thread::Current();
18897 auto isolate_group = thread->isolate_group();
18898 ASSERT(isolate_group->type_feedback_mutex()->IsOwnedByCurrentThread());
18899
18900 ASSERT(Thread::Current()->IsDartMutatorThread());
18901 ASSERT(static_cast<double>(filled_entry_count() + 1) <=
18902 (kLoadFactor * static_cast<double>(mask() + 1)));
18903 const Array& backing_array = Array::Handle(buckets());
18904 intptr_t id_mask = mask();
18905 intptr_t index = (class_id.Value() * kSpreadFactor) & id_mask;
18906 intptr_t i = index;
18907 do {
18908 if (Smi::Value(Smi::RawCast(GetClassId(backing_array, i))) == kIllegalCid) {
18909 SetEntry(backing_array, i, class_id, target);
18911 return;
18912 }
18913 i = (i + 1) & id_mask;
18914 } while (i != index);
18915 UNREACHABLE();
18916}
18917
18918const char* MegamorphicCache::ToCString() const {
18919 const String& name = String::Handle(target_name());
18920 return OS::SCreate(Thread::Current()->zone(), "MegamorphicCache(%s)",
18921 name.ToCString());
18922}
18923
18924SubtypeTestCachePtr SubtypeTestCache::New(intptr_t num_inputs) {
18926 ASSERT(num_inputs >= 1);
18928 // SubtypeTestCache objects are long living objects, allocate them in the
18929 // old generation.
18930 const auto& result =
18931 SubtypeTestCache::Handle(Object::Allocate<SubtypeTestCache>(Heap::kOld));
18932 ASSERT_EQUAL(result.num_occupied(), 0);
18933 result.untag()->num_inputs_ = num_inputs;
18934 result.set_cache(Object::empty_subtype_test_cache_array());
18935 return result.ptr();
18936}
18937
18938ArrayPtr SubtypeTestCache::cache() const {
18939 return untag()->cache<std::memory_order_acquire>();
18940}
18941
18942void SubtypeTestCache::set_cache(const Array& value) const {
18943 // We have to ensure that initializing stores to the array are available
18944 // when releasing the pointer to the array pointer.
18945 // => We have to use store-release here.
18946 untag()->set_cache<std::memory_order_release>(value.ptr());
18947}
18948
18949void SubtypeTestCache::set_num_occupied(intptr_t value) const {
18951 untag()->num_occupied_ = value;
18952}
18953
18955 ASSERT(!IsNull());
18956 return num_occupied();
18957}
18958
18960 ASSERT(!IsNull());
18962}
18963
18964intptr_t SubtypeTestCache::NumEntries(const Array& array) {
18966 return table.Length();
18967}
18968
18970 if (IsNull()) return false;
18972}
18973
18974bool SubtypeTestCache::IsHash(const Array& array) {
18975 return array.Length() > kMaxLinearCacheSize;
18976}
18977
18979 const Object& instance_class_id_or_signature,
18980 const AbstractType& destination_type,
18981 const TypeArguments& instance_type_arguments,
18982 const TypeArguments& instantiator_type_arguments,
18983 const TypeArguments& function_type_arguments,
18984 const TypeArguments& instance_parent_function_type_arguments,
18985 const TypeArguments& instance_delayed_type_arguments,
18986 const Bool& test_result) const {
18988 ->isolate_group()
18989 ->subtype_test_cache_mutex()
18990 ->IsOwnedByCurrentThread());
18991 ASSERT(!test_result.IsNull());
18992 ASSERT(Smi::New(kRecordCid) != instance_class_id_or_signature.ptr());
18993
18994 const intptr_t old_num = NumberOfChecks();
18995 Zone* const zone = Thread::Current()->zone();
18996 Array& data = Array::Handle(zone, cache());
18997 bool was_grown;
18998 data = EnsureCapacity(zone, data, old_num + 1, &was_grown);
18999 ASSERT(data.ptr() != Object::empty_subtype_test_cache_array().ptr());
19000
19001 const auto& loc = FindKeyOrUnused(
19002 data, num_inputs(), instance_class_id_or_signature, destination_type,
19003 instance_type_arguments, instantiator_type_arguments,
19004 function_type_arguments, instance_parent_function_type_arguments,
19005 instance_delayed_type_arguments);
19006 SubtypeTestCacheTable entries(data);
19007 const auto& entry = entries[loc.entry];
19008 if (loc.present) {
19009 if (entry.Get<kTestResult>() != test_result.ptr()) {
19010 const auto& old_result = Bool::Handle(zone, entry.Get<kTestResult>());
19011 FATAL("Existing subtype test cache entry has result %s, not %s",
19012 old_result.ToCString(), test_result.ToCString());
19013 }
19014 return loc.entry;
19015 }
19016
19017 // Set the used elements in the entry in reverse order, so that the instance
19018 // cid or signature is last, then increment the number of entries.
19019 entry.Set<kTestResult>(test_result);
19020 switch (num_inputs()) {
19021 case 7:
19022 entry.Set<kDestinationType>(destination_type);
19024 case 6:
19026 instance_delayed_type_arguments);
19028 case 5:
19030 instance_parent_function_type_arguments);
19032 case 4:
19033 entry.Set<kFunctionTypeArguments>(function_type_arguments);
19035 case 3:
19036 entry.Set<kInstantiatorTypeArguments>(instantiator_type_arguments);
19038 case 2:
19039 entry.Set<kInstanceTypeArguments>(instance_type_arguments);
19041 case 1:
19042 // If this is a new backing array, we don't need store-release barriers,
19043 // as no reader has access to the array until it is set as the backing
19044 // store (which is done with a store-release barrier).
19045 //
19046 // Otherwise, the instance cid or signature must be set last with a
19047 // store-release barrier, so concurrent readers can depend on a non-null
19048 // value meaning the rest of the entry is safe to load without barriers.
19049 if (was_grown) {
19050 entry.Set<kInstanceCidOrSignature>(instance_class_id_or_signature);
19051 } else {
19052 entry.Set<kInstanceCidOrSignature, std::memory_order_release>(
19053 instance_class_id_or_signature);
19054 }
19055 break;
19056 default:
19057 UNREACHABLE();
19058 }
19059 set_num_occupied(old_num + 1);
19060 if (was_grown) {
19061 set_cache(data);
19062 }
19063 return loc.entry;
19064}
19065
19068 intptr_t num_inputs,
19069 const Object& instance_class_id_or_signature,
19070 const AbstractType& destination_type,
19071 const TypeArguments& instance_type_arguments,
19072 const TypeArguments& instantiator_type_arguments,
19073 const TypeArguments& function_type_arguments,
19074 const TypeArguments& instance_parent_function_type_arguments,
19075 const TypeArguments& instance_delayed_type_arguments) {
19076 switch (num_inputs) {
19077 case 7:
19079 destination_type.ptr()) {
19080 return false;
19081 }
19083 case 6:
19085 instance_delayed_type_arguments.ptr()) {
19086 return false;
19087 }
19089 case 5:
19091 instance_parent_function_type_arguments.ptr()) {
19092 return false;
19093 }
19095 case 4:
19097 function_type_arguments.ptr()) {
19098 return false;
19099 }
19101 case 3:
19103 instantiator_type_arguments.ptr()) {
19104 return false;
19105 }
19107 case 2:
19109 instance_type_arguments.ptr()) {
19110 return false;
19111 }
19113 case 1:
19114 // We don't need to perform load-acquire semantics when re-retrieving
19115 // the kInstanceCidOrSignature field, as this is performed only if the
19116 // entry is occupied, and occupied entries never change.
19118 instance_class_id_or_signature.ptr();
19119 default:
19120 UNREACHABLE();
19121 }
19122}
19123
19124SubtypeTestCache::KeyLocation SubtypeTestCache::FindKeyOrUnused(
19125 const Array& array,
19126 intptr_t num_inputs,
19127 const Object& instance_class_id_or_signature,
19128 const AbstractType& destination_type,
19129 const TypeArguments& instance_type_arguments,
19130 const TypeArguments& instantiator_type_arguments,
19131 const TypeArguments& function_type_arguments,
19132 const TypeArguments& instance_parent_function_type_arguments,
19133 const TypeArguments& instance_delayed_type_arguments) {
19134 // Fast case for empty STCs.
19135 if (array.ptr() == Object::empty_subtype_test_cache_array().ptr()) {
19136 return {0, false};
19137 }
19138 const bool is_hash = IsHash(array);
19140 const intptr_t num_entries = table.Length();
19141 // For a linear cache, start at the first entry and probe linearly. This can
19142 // be done because a linear cache always has at least one unoccupied entry
19143 // after all the occupied ones.
19144 intptr_t probe = 0;
19145 intptr_t probe_distance = 1;
19146 if (is_hash) {
19147 // For a hash-based cache, instead start at an entry determined by the hash
19148 // of the keys.
19149 //
19150 // If we have an instance cid, then just use that as our starting hash.
19151 uint32_t hash =
19152 instance_class_id_or_signature.IsFunctionType()
19153 ? FunctionType::Cast(instance_class_id_or_signature).Hash()
19154 : Smi::Cast(instance_class_id_or_signature).Value();
19155 switch (num_inputs) {
19156 case 7:
19157 hash = CombineHashes(hash, destination_type.Hash());
19159 case 6:
19160 hash = CombineHashes(hash, instance_delayed_type_arguments.Hash());
19162 case 5:
19163 hash =
19164 CombineHashes(hash, instance_parent_function_type_arguments.Hash());
19166 case 4:
19167 hash = CombineHashes(hash, function_type_arguments.Hash());
19169 case 3:
19170 hash = CombineHashes(hash, instantiator_type_arguments.Hash());
19172 case 2:
19173 hash = CombineHashes(hash, instance_type_arguments.Hash());
19175 case 1:
19176 break;
19177 default:
19178 UNREACHABLE();
19179 }
19181 probe = hash & (num_entries - 1);
19182 }
19183 while (true) {
19184 const auto& tuple = table.At(probe);
19185 if (tuple.Get<kInstanceCidOrSignature, std::memory_order_acquire>() ==
19186 Object::null()) {
19187 break;
19188 }
19190 tuple, num_inputs, instance_class_id_or_signature, destination_type,
19191 instance_type_arguments, instantiator_type_arguments,
19192 function_type_arguments, instance_parent_function_type_arguments,
19193 instance_delayed_type_arguments)) {
19194 return {probe, true};
19195 }
19196 // Advance probe by the current probing distance.
19197 probe = probe + probe_distance;
19198 if (is_hash) {
19199 // Wrap around if the probe goes off the end of the entries array.
19200 probe = probe & (num_entries - 1);
19201 // We had a collision, so increase the probe distance. See comment in
19202 // EnsureCapacityLocked for an explanation of how this hits all slots.
19203 probe_distance++;
19204 }
19205 }
19206 return {probe, false};
19207}
19208
19209ArrayPtr SubtypeTestCache::EnsureCapacity(Zone* zone,
19210 const Array& array,
19211 intptr_t new_occupied,
19212 bool* was_grown) const {
19213 ASSERT(new_occupied > NumberOfChecks());
19214 ASSERT(was_grown != nullptr);
19215 // How many entries are in the current array (including unoccupied entries).
19216 const intptr_t current_capacity = NumEntries(array);
19217
19218 // Early returns for cases where no growth is needed.
19219 *was_grown = false;
19220 const bool is_linear = IsLinear(array);
19221 if (is_linear) {
19222 // We need at least one unoccupied entry in addition to the occupied ones.
19223 if (current_capacity > new_occupied) return array.ptr();
19224 } else {
19225 if (LoadFactor(new_occupied, current_capacity) < kMaxLoadFactor) {
19226 return array.ptr();
19227 }
19228 }
19229
19230 // Every path from here should result in a new backing array.
19231 *was_grown = true;
19232 // Initially null for initializing unoccupied entries.
19233 auto& instance_cid_or_signature = Object::Handle(zone);
19234 if (new_occupied <= kMaxLinearCacheEntries) {
19236 // Not enough room for both the new entry and at least one unoccupied
19237 // entry, so grow the tuple capacity of the linear cache by about 50%,
19238 // ensuring that space for at least one new tuple is added, capping the
19239 // total number of occupied entries to the max allowed.
19240 const intptr_t new_capacity =
19241 Utils::Minimum(current_capacity + (current_capacity >> 1),
19243 1;
19244 const intptr_t cache_size = new_capacity * kTestEntryLength;
19245 ASSERT(cache_size <= kMaxLinearCacheSize);
19246 const auto& new_data =
19247 Array::Handle(zone, Array::Grow(array, cache_size, Heap::kOld));
19248 ASSERT(!new_data.IsNull());
19249 // No need to adjust old entries, as they are copied over by Array::Grow.
19250 // Just mark any new entries as unoccupied.
19251 SubtypeTestCacheTable table(new_data);
19252 for (intptr_t i = current_capacity; i < new_capacity; i++) {
19253 const auto& tuple = table.At(i);
19254 tuple.Set<kInstanceCidOrSignature>(instance_cid_or_signature);
19255 }
19256 return new_data.ptr();
19257 }
19258
19259 // Either we're converting a linear cache into a hash-based cache, or the
19260 // load factor of the hash-based cache has increased to the point where we
19261 // need to grow it.
19262 const intptr_t new_capacity =
19263 is_linear ? kNumInitialHashCacheEntries : 2 * current_capacity;
19264 // Because we use quadratic (actually triangle number) probing it is
19265 // important that the size is a power of two (otherwise we could fail to
19266 // find an empty slot). This is described in Knuth's The Art of Computer
19267 // Programming Volume 2, Chapter 6.4, exercise 20 (solution in the
19268 // appendix, 2nd edition).
19269 //
19270 // This is also important because when we do hash probing, we take the
19271 // calculated hash from the inputs and then calculate (hash % capacity) to get
19272 // the initial probe index. To ensure this is a fast calculation in the stubs,
19273 // we ensure the capacity is a power of 2, which allows (hash % capacity) to
19274 // be calculated as (hash & (capacity - 1)).
19275 ASSERT(Utils::IsPowerOfTwo(new_capacity));
19276 ASSERT(LoadFactor(new_occupied, new_capacity) < kMaxLoadFactor);
19277 const intptr_t new_size = new_capacity * kTestEntryLength;
19278 const auto& new_data =
19280 ASSERT(!new_data.IsNull());
19281 // Mark all the entries in new_data as unoccupied.
19282 SubtypeTestCacheTable to_table(new_data);
19283 for (const auto& tuple : to_table) {
19284 tuple.Set<kInstanceCidOrSignature>(instance_cid_or_signature);
19285 }
19286 // Finally, copy over the entries.
19287 auto& destination_type = AbstractType::Handle(zone);
19288 auto& instance_type_arguments = TypeArguments::Handle(zone);
19289 auto& instantiator_type_arguments = TypeArguments::Handle(zone);
19290 auto& function_type_arguments = TypeArguments::Handle(zone);
19291 auto& instance_parent_function_type_arguments = TypeArguments::Handle(zone);
19292 auto& instance_delayed_type_arguments = TypeArguments::Handle(zone);
19293 auto& test_result = Bool::Handle(zone);
19294 const SubtypeTestCacheTable from_table(array);
19295 const intptr_t used_inputs = num_inputs();
19296 for (intptr_t i = 0; i < current_capacity; i++) {
19297 const auto& from_tuple = from_table.At(i);
19298 // Skip unoccupied entries.
19299 if (from_tuple.Get<kInstanceCidOrSignature>() == Object::null()) continue;
19300 GetCheckFromArray(array, used_inputs, i, &instance_cid_or_signature,
19301 &destination_type, &instance_type_arguments,
19302 &instantiator_type_arguments, &function_type_arguments,
19303 &instance_parent_function_type_arguments,
19304 &instance_delayed_type_arguments, &test_result);
19305 // Since new_data has a different total capacity, we can't use the old
19306 // entry indexes, but must recalculate them.
19307 auto loc = FindKeyOrUnused(
19308 new_data, used_inputs, instance_cid_or_signature, destination_type,
19309 instance_type_arguments, instantiator_type_arguments,
19310 function_type_arguments, instance_parent_function_type_arguments,
19311 instance_delayed_type_arguments);
19312 ASSERT(!loc.present);
19313 const auto& to_tuple = to_table.At(loc.entry);
19314 to_tuple.Set<kTestResult>(test_result);
19315 switch (used_inputs) {
19316 case 7:
19317 to_tuple.Set<kDestinationType>(destination_type);
19319 case 6:
19321 instance_delayed_type_arguments);
19323 case 5:
19325 instance_parent_function_type_arguments);
19327 case 4:
19328 to_tuple.Set<kFunctionTypeArguments>(function_type_arguments);
19330 case 3:
19331 to_tuple.Set<kInstantiatorTypeArguments>(instantiator_type_arguments);
19333 case 2:
19334 to_tuple.Set<kInstanceTypeArguments>(instance_type_arguments);
19336 case 1:
19337 to_tuple.Set<kInstanceCidOrSignature>(instance_cid_or_signature);
19338 break;
19339 default:
19340 UNREACHABLE();
19341 }
19342 }
19343 return new_data.ptr();
19344}
19345
19347 intptr_t ix,
19348 Object* instance_class_id_or_signature,
19349 AbstractType* destination_type,
19350 TypeArguments* instance_type_arguments,
19351 TypeArguments* instantiator_type_arguments,
19352 TypeArguments* function_type_arguments,
19353 TypeArguments* instance_parent_function_type_arguments,
19354 TypeArguments* instance_delayed_type_arguments,
19355 Bool* test_result) const {
19357 ->isolate_group()
19358 ->subtype_test_cache_mutex()
19359 ->IsOwnedByCurrentThread());
19360 GetCurrentCheck(ix, instance_class_id_or_signature, destination_type,
19361 instance_type_arguments, instantiator_type_arguments,
19362 function_type_arguments,
19363 instance_parent_function_type_arguments,
19364 instance_delayed_type_arguments, test_result);
19365}
19366
19368 intptr_t ix,
19369 Object* instance_class_id_or_signature,
19370 AbstractType* destination_type,
19371 TypeArguments* instance_type_arguments,
19372 TypeArguments* instantiator_type_arguments,
19373 TypeArguments* function_type_arguments,
19374 TypeArguments* instance_parent_function_type_arguments,
19375 TypeArguments* instance_delayed_type_arguments,
19376 Bool* test_result) const {
19377 const Array& array = Array::Handle(cache());
19378 GetCheckFromArray(array, num_inputs(), ix, instance_class_id_or_signature,
19379 destination_type, instance_type_arguments,
19380 instantiator_type_arguments, function_type_arguments,
19381 instance_parent_function_type_arguments,
19382 instance_delayed_type_arguments, test_result);
19383}
19384
19385void SubtypeTestCache::GetCheckFromArray(
19386 const Array& array,
19387 intptr_t num_inputs,
19388 intptr_t ix,
19389 Object* instance_class_id_or_signature,
19390 AbstractType* destination_type,
19391 TypeArguments* instance_type_arguments,
19392 TypeArguments* instantiator_type_arguments,
19393 TypeArguments* function_type_arguments,
19394 TypeArguments* instance_parent_function_type_arguments,
19395 TypeArguments* instance_delayed_type_arguments,
19396 Bool* test_result) {
19397 ASSERT(array.ptr() != Object::empty_subtype_test_cache_array().ptr());
19398 SubtypeTestCacheTable entries(array);
19399 auto entry = entries[ix];
19400 // First get the field that determines occupancy. We have to do this with
19401 // load-acquire because some callers may not have the subtype test cache lock.
19402 *instance_class_id_or_signature =
19403 entry.Get<kInstanceCidOrSignature, std::memory_order_acquire>();
19404 // We should not be retrieving unoccupied entries.
19405 ASSERT(!instance_class_id_or_signature->IsNull());
19406 switch (num_inputs) {
19407 case 7:
19408 *destination_type = entry.Get<kDestinationType>();
19410 case 6:
19411 *instance_delayed_type_arguments =
19414 case 5:
19415 *instance_parent_function_type_arguments =
19418 case 4:
19419 *function_type_arguments = entry.Get<kFunctionTypeArguments>();
19421 case 3:
19422 *instantiator_type_arguments = entry.Get<kInstantiatorTypeArguments>();
19424 case 2:
19425 *instance_type_arguments = entry.Get<kInstanceTypeArguments>();
19427 case 1:
19428 break;
19429 default:
19430 UNREACHABLE();
19431 }
19432 *test_result = entry.Get<kTestResult>();
19433}
19434
19436 intptr_t* ix,
19437 Object* instance_class_id_or_signature,
19438 AbstractType* destination_type,
19439 TypeArguments* instance_type_arguments,
19440 TypeArguments* instantiator_type_arguments,
19441 TypeArguments* function_type_arguments,
19442 TypeArguments* instance_parent_function_type_arguments,
19443 TypeArguments* instance_delayed_type_arguments,
19444 Bool* test_result) const {
19445 ASSERT(ix != nullptr);
19446 for (intptr_t i = *ix; i < NumEntries(); i++) {
19448 ->isolate_group()
19449 ->subtype_test_cache_mutex()
19450 ->IsOwnedByCurrentThread());
19451 if (IsOccupied(i)) {
19452 GetCurrentCheck(i, instance_class_id_or_signature, destination_type,
19453 instance_type_arguments, instantiator_type_arguments,
19454 function_type_arguments,
19455 instance_parent_function_type_arguments,
19456 instance_delayed_type_arguments, test_result);
19457 *ix = i + 1;
19458 return true;
19459 }
19460 }
19461 return false;
19462}
19463
19465 const Object& instance_class_id_or_signature,
19466 const AbstractType& destination_type,
19467 const TypeArguments& instance_type_arguments,
19468 const TypeArguments& instantiator_type_arguments,
19469 const TypeArguments& function_type_arguments,
19470 const TypeArguments& instance_parent_function_type_arguments,
19471 const TypeArguments& instance_delayed_type_arguments,
19472 intptr_t* index,
19473 Bool* result) const {
19474 const auto& data = Array::Handle(cache());
19475 auto loc = FindKeyOrUnused(
19476 data, num_inputs(), instance_class_id_or_signature, destination_type,
19477 instance_type_arguments, instantiator_type_arguments,
19478 function_type_arguments, instance_parent_function_type_arguments,
19479 instance_delayed_type_arguments);
19480 if (loc.present) {
19481 if (index != nullptr) {
19482 *index = loc.entry;
19483 }
19484 if (result != nullptr) {
19485 SubtypeTestCacheTable entries(data);
19486 const auto& entry = entries[loc.entry];
19487 // A positive result from FindKeyOrUnused means that load-acquire is not
19488 // needed, as an occupied entry never changes for a given backing array.
19489 *result = entry.Get<kTestResult>();
19490 ASSERT(!result->IsNull());
19491 }
19492 }
19493 return loc.present;
19494}
19495
19498 intptr_t index,
19499 const char* line_prefix) const {
19501 ->isolate_group()
19502 ->subtype_test_cache_mutex()
19503 ->IsOwnedByCurrentThread());
19504 WriteCurrentEntryToBuffer(zone, buffer, index, line_prefix);
19505}
19506
19509 const char* line_prefix) const {
19511 ->isolate_group()
19512 ->subtype_test_cache_mutex()
19513 ->IsOwnedByCurrentThread());
19514 WriteToBufferUnlocked(zone, buffer, line_prefix);
19515}
19516
19517void SubtypeTestCache::WriteCurrentEntryToBuffer(
19518 Zone* zone,
19520 intptr_t index,
19521 const char* line_prefix) const {
19522 const char* separator =
19523 line_prefix == nullptr ? ", " : OS::SCreate(zone, "\n%s", line_prefix);
19524 auto& instance_class_id_or_signature = Object::Handle(zone);
19525 auto& destination_type = AbstractType::Handle(zone);
19526 auto& instance_type_arguments = TypeArguments::Handle(zone);
19527 auto& instantiator_type_arguments = TypeArguments::Handle(zone);
19528 auto& function_type_arguments = TypeArguments::Handle(zone);
19529 auto& instance_parent_function_type_arguments = TypeArguments::Handle(zone);
19530 auto& instance_delayed_type_arguments = TypeArguments::Handle(zone);
19531 auto& result = Bool::Handle(zone);
19532 GetCurrentCheck(index, &instance_class_id_or_signature, &destination_type,
19533 &instance_type_arguments, &instantiator_type_arguments,
19534 &function_type_arguments,
19535 &instance_parent_function_type_arguments,
19536 &instance_delayed_type_arguments, &result);
19537 buffer->Printf(
19538 "%" Pd ": [ %#" Px ", %#" Px ", %#" Px ", %#" Px ", %#" Px ", %#" Px
19539 ", %#" Px ", %#" Px " ]",
19540 index, static_cast<uword>(instance_class_id_or_signature.ptr()),
19541 static_cast<uword>(instance_type_arguments.ptr()),
19542 static_cast<uword>(instantiator_type_arguments.ptr()),
19543 static_cast<uword>(function_type_arguments.ptr()),
19544 static_cast<uword>(instance_parent_function_type_arguments.ptr()),
19545 static_cast<uword>(instance_delayed_type_arguments.ptr()),
19546 static_cast<uword>(destination_type.ptr()),
19547 static_cast<uword>(result.ptr()));
19548 if (instance_class_id_or_signature.IsSmi()) {
19549 buffer->Printf("%sclass id: %" Pd "", separator,
19550 Smi::Cast(instance_class_id_or_signature).Value());
19551 } else {
19552 buffer->Printf(
19553 "%ssignature: %s", separator,
19554 FunctionType::Cast(instance_class_id_or_signature).ToCString());
19555 }
19556 if (!instance_type_arguments.IsNull()) {
19557 if (instance_class_id_or_signature.IsSmi()) {
19558 buffer->Printf("%sinstance type arguments: %s", separator,
19559 instance_type_arguments.ToCString());
19560 } else {
19561 ASSERT(instance_class_id_or_signature.IsFunctionType());
19562 buffer->Printf("%sclosure instantiator function type arguments: %s",
19563 separator, instance_type_arguments.ToCString());
19564 }
19565 }
19566 if (!instantiator_type_arguments.IsNull()) {
19567 buffer->Printf("%sinstantiator type arguments: %s", separator,
19568 instantiator_type_arguments.ToCString());
19569 }
19570 if (!function_type_arguments.IsNull()) {
19571 buffer->Printf("%sfunction type arguments: %s", separator,
19572 function_type_arguments.ToCString());
19573 }
19574 if (!instance_parent_function_type_arguments.IsNull()) {
19575 buffer->Printf("%sclosure parent function type arguments: %s", separator,
19576 instance_parent_function_type_arguments.ToCString());
19577 }
19578 if (!instance_delayed_type_arguments.IsNull()) {
19579 buffer->Printf("%sclosure delayed function type arguments: %s", separator,
19580 instance_delayed_type_arguments.ToCString());
19581 }
19582 if (!destination_type.IsNull()) {
19583 buffer->Printf("%sdestination type: %s", separator,
19584 destination_type.ToCString());
19585 if (!destination_type.IsInstantiated()) {
19586 AbstractType& test_type = AbstractType::Handle(
19587 zone, destination_type.InstantiateFrom(instantiator_type_arguments,
19588 function_type_arguments,
19590 const auto type_class_id = test_type.type_class_id();
19591 buffer->Printf("%sinstantiated type: %s", separator,
19592 test_type.ToCString());
19593 buffer->Printf("%sinstantiated type class id: %d", separator,
19594 type_class_id);
19595 }
19596 }
19597 buffer->Printf("%sresult: %s", separator, result.ToCString());
19598}
19599
19600void SubtypeTestCache::WriteToBufferUnlocked(Zone* zone,
19601 BaseTextBuffer* buffer,
19602 const char* line_prefix) const {
19603 const char* separator =
19604 line_prefix == nullptr ? " " : OS::SCreate(zone, "\n%s", line_prefix);
19605 const char* internal_line_prefix =
19606 line_prefix == nullptr
19607 ? nullptr
19608 : OS::SCreate(zone, "%s%s", line_prefix, line_prefix);
19609 const intptr_t num_entries = NumEntries();
19610 buffer->Printf("SubtypeTestCache(%" Pd ", %" Pd "", num_inputs(),
19611 num_occupied());
19612 for (intptr_t i = 0; i < num_entries; i++) {
19613 if (!IsOccupied(i)) continue;
19614 buffer->Printf(",%s{", separator);
19615 WriteCurrentEntryToBuffer(zone, buffer, i, internal_line_prefix);
19616 buffer->Printf(line_prefix != nullptr ? "}" : " }");
19617 }
19618 buffer->AddString(line_prefix != nullptr && num_entries != 0 ? "\n)" : ")");
19619}
19620
19622 set_num_occupied(0);
19623 set_cache(Object::empty_subtype_test_cache_array());
19624}
19625
19628 ->isolate_group()
19629 ->subtype_test_cache_mutex()
19630 ->IsOwnedByCurrentThread());
19631 if (ptr() == other.ptr()) {
19632 return true;
19633 }
19634 if (num_inputs() != other.num_inputs()) return false;
19635 if (num_occupied() != other.num_occupied()) return false;
19636 return Array::Handle(cache()).Equals(Array::Handle(other.cache()));
19637}
19638
19639SubtypeTestCachePtr SubtypeTestCache::Copy(Thread* thread) const {
19640 ASSERT(thread->isolate_group()
19643 if (IsNull()) {
19644 return SubtypeTestCache::null();
19645 }
19646 Zone* const zone = thread->zone();
19647 // STC caches are only copied on write if there are not enough unoccupied
19648 // entries to store a new one, so we need to copy the array.
19649 const auto& result =
19651 auto& entry_cache = Array::Handle(zone, cache());
19652 entry_cache = entry_cache.Copy();
19653 result.set_cache(entry_cache);
19654 result.set_num_occupied(num_occupied());
19655 return result.ptr();
19656}
19657
19658bool SubtypeTestCache::IsOccupied(intptr_t index) const {
19659 ASSERT(!IsNull());
19660 ASSERT(index < NumEntries());
19661 const intptr_t cache_index =
19663 NoSafepointScope no_safepoint;
19664 return cache()->untag()->element<std::memory_order_acquire>(cache_index) !=
19665 Object::null();
19666}
19667
19669 if (type.IsType()) {
19670 if (type.IsInstantiated()) return 2;
19671 if (type.IsInstantiated(kFunctions)) return 3;
19672 return 4;
19673 }
19674 // Default to all inputs except for the destination type, which must be
19675 // statically known, otherwise this method wouldn't be called.
19676 static_assert(kDestinationType == kMaxInputs - 1,
19677 "destination type is not last input");
19678 return kMaxInputs - 1;
19679}
19680
19681const char* SubtypeTestCache::ToCString() const {
19682 auto const zone = Thread::Current()->zone();
19683 ZoneTextBuffer buffer(zone);
19684 WriteToBufferUnlocked(zone, &buffer);
19685 return buffer.buffer();
19686}
19687
19688LoadingUnitPtr LoadingUnit::New(intptr_t id, const LoadingUnit& parent) {
19690 // LoadingUnit objects are long living objects, allocate them in the
19691 // old generation.
19692 auto result = Object::Allocate<LoadingUnit>(Heap::kOld);
19693 NoSafepointScope scope;
19695 result->untag()->packed_fields_.Update<UntaggedLoadingUnit::IdBits>(id);
19696 result->untag()->set_parent(parent.ptr());
19697 return result;
19698}
19699
19701 untag()->set_base_objects(value.ptr());
19702}
19703
19704const char* LoadingUnit::ToCString() const {
19705 return "LoadingUnit";
19706}
19707
19711}
19712
19714 bool transient_error) const {
19715 set_loaded(error_message.IsNull());
19716
19718 const String& sel = String::Handle(String::New("_completeLoads"));
19720 ASSERT(!func.IsNull());
19721 const Array& args = Array::Handle(Array::New(3));
19722 args.SetAt(0, Smi::Handle(Smi::New(id())));
19723 args.SetAt(1, error_message);
19724 args.SetAt(2, Bool::Get(transient_error));
19725 return DartEntry::InvokeFunction(func, args);
19726}
19727
19728// The assignment to loading units here must match that in
19729// AssignLoadingUnitsCodeVisitor, which runs after compilation is done.
19731 Thread* thread = Thread::Current();
19735
19736 Class& cls = thread->ClassHandle();
19737 Library& lib = thread->LibraryHandle();
19738 LoadingUnit& unit = thread->LoadingUnitHandle();
19739
19740 cls = function.Owner();
19741 lib = cls.library();
19742 unit = lib.loading_unit();
19743 if (unit.IsNull()) {
19744 FATAL("Unable to find loading unit of %s (class %s, library %s)",
19745 function.ToFullyQualifiedCString(), cls.ToCString(), lib.ToCString());
19746 }
19747 return unit.id();
19748}
19749
19751 if (code.IsStubCode() || code.IsTypeTestStubCode() ||
19752 code.IsAllocationStubCode()) {
19753 return LoadingUnit::kRootId;
19754 } else {
19755 Thread* thread = Thread::Current();
19760
19761 Class& cls = thread->ClassHandle();
19762 Library& lib = thread->LibraryHandle();
19763 LoadingUnit& unit = thread->LoadingUnitHandle();
19764 Function& func = thread->FunctionHandle();
19765
19766 if (code.IsFunctionCode()) {
19767 func ^= code.function();
19768 cls = func.Owner();
19769 lib = cls.library();
19770 unit = lib.loading_unit();
19771 ASSERT(!unit.IsNull());
19772 return unit.id();
19773 } else {
19774 UNREACHABLE();
19776 }
19777 }
19778}
19779
19780const char* Error::ToErrorCString() const {
19781 if (IsNull()) {
19782 return "Error: null";
19783 }
19784 UNREACHABLE();
19785 return "Error";
19786}
19787
19788const char* Error::ToCString() const {
19789 if (IsNull()) {
19790 return "Error: null";
19791 }
19792 // Error is an abstract class. We should never reach here.
19793 UNREACHABLE();
19794 return "Error";
19795}
19796
19797ApiErrorPtr ApiError::New() {
19799 return Object::Allocate<ApiError>(Heap::kOld);
19800}
19801
19802ApiErrorPtr ApiError::New(const String& message, Heap::Space space) {
19803#ifndef PRODUCT
19804 if (FLAG_print_stacktrace_at_api_error) {
19805 OS::PrintErr("ApiError: %s\n", message.ToCString());
19806 Profiler::DumpStackTrace(false /* for_crash */);
19807 }
19808#endif // !PRODUCT
19809
19811 const auto& result = ApiError::Handle(Object::Allocate<ApiError>(space));
19812 result.set_message(message);
19813 return result.ptr();
19814}
19815
19816void ApiError::set_message(const String& message) const {
19817 untag()->set_message(message.ptr());
19818}
19819
19820const char* ApiError::ToErrorCString() const {
19821 const String& msg_str = String::Handle(message());
19822 return msg_str.ToCString();
19823}
19824
19825const char* ApiError::ToCString() const {
19826 return "ApiError";
19827}
19828
19829LanguageErrorPtr LanguageError::New() {
19831 return Object::Allocate<LanguageError>(Heap::kOld);
19832}
19833
19834LanguageErrorPtr LanguageError::NewFormattedV(const Error& prev_error,
19835 const Script& script,
19836 TokenPosition token_pos,
19837 bool report_after_token,
19838 Report::Kind kind,
19839 Heap::Space space,
19840 const char* format,
19841 va_list args) {
19843 const auto& result =
19844 LanguageError::Handle(Object::Allocate<LanguageError>(space));
19845 result.set_previous_error(prev_error);
19846 result.set_script(script);
19847 result.set_token_pos(token_pos);
19848 result.set_report_after_token(report_after_token);
19849 result.set_kind(kind);
19850 result.set_message(
19852 return result.ptr();
19853}
19854
19855LanguageErrorPtr LanguageError::NewFormatted(const Error& prev_error,
19856 const Script& script,
19857 TokenPosition token_pos,
19858 bool report_after_token,
19859 Report::Kind kind,
19860 Heap::Space space,
19861 const char* format,
19862 ...) {
19863 va_list args;
19865 LanguageErrorPtr result = LanguageError::NewFormattedV(
19866 prev_error, script, token_pos, report_after_token, kind, space, format,
19867 args);
19868 NoSafepointScope no_safepoint;
19869 va_end(args);
19870 return result;
19871}
19872
19873LanguageErrorPtr LanguageError::New(const String& formatted_message,
19874 Report::Kind kind,
19875 Heap::Space space) {
19877 const auto& result =
19878 LanguageError::Handle(Object::Allocate<LanguageError>(space));
19879 result.set_formatted_message(formatted_message);
19880 result.set_kind(kind);
19881 return result.ptr();
19882}
19883
19884void LanguageError::set_previous_error(const Error& value) const {
19885 untag()->set_previous_error(value.ptr());
19886}
19887
19888void LanguageError::set_script(const Script& value) const {
19889 untag()->set_script(value.ptr());
19890}
19891
19892void LanguageError::set_token_pos(TokenPosition token_pos) const {
19894 StoreNonPointer(&untag()->token_pos_, token_pos);
19895}
19896
19897void LanguageError::set_report_after_token(bool value) const {
19898 StoreNonPointer(&untag()->report_after_token_, value);
19899}
19900
19901void LanguageError::set_kind(uint8_t value) const {
19902 StoreNonPointer(&untag()->kind_, value);
19903}
19904
19905void LanguageError::set_message(const String& value) const {
19906 untag()->set_message(value.ptr());
19907}
19908
19909void LanguageError::set_formatted_message(const String& value) const {
19910 untag()->set_formatted_message(value.ptr());
19911}
19912
19914 if (formatted_message() != String::null()) {
19915 return formatted_message();
19916 }
19919 report_after_token(), String::Handle(message())));
19920 // Prepend previous error message.
19921 const Error& prev_error = Error::Handle(previous_error());
19922 if (!prev_error.IsNull()) {
19925 }
19926 set_formatted_message(result);
19927 return result.ptr();
19928}
19929
19931 const String& msg_str = String::Handle(FormatMessage());
19932 return msg_str.ToCString();
19933}
19934
19935const char* LanguageError::ToCString() const {
19936 return "LanguageError";
19937}
19938
19939UnhandledExceptionPtr UnhandledException::New(const Instance& exception,
19940 const Instance& stacktrace,
19941 Heap::Space space) {
19943 const auto& result =
19944 UnhandledException::Handle(Object::Allocate<UnhandledException>(space));
19945 result.set_exception(exception);
19946 result.set_stacktrace(stacktrace);
19947 return result.ptr();
19948}
19949
19950UnhandledExceptionPtr UnhandledException::New(Heap::Space space) {
19952 return Object::Allocate<UnhandledException>(space);
19953}
19954
19955void UnhandledException::set_exception(const Instance& exception) const {
19956 untag()->set_exception(exception.ptr());
19957}
19958
19959void UnhandledException::set_stacktrace(const Instance& stacktrace) const {
19960 untag()->set_stacktrace(stacktrace.ptr());
19961}
19962
19964 Thread* thread = Thread::Current();
19965 auto isolate_group = thread->isolate_group();
19966 NoReloadScope no_reload_scope(thread);
19967 HANDLESCOPE(thread);
19968 Object& strtmp = Object::Handle();
19969 const char* exc_str;
19970 if (exception() == isolate_group->object_store()->out_of_memory()) {
19971 exc_str = "Out of Memory";
19972 } else if (exception() == isolate_group->object_store()->stack_overflow()) {
19973 exc_str = "Stack Overflow";
19974 } else {
19975 const Instance& exc = Instance::Handle(exception());
19976 strtmp = DartLibraryCalls::ToString(exc);
19977 if (!strtmp.IsError()) {
19978 exc_str = strtmp.ToCString();
19979 } else {
19980 exc_str = "<Received error while converting exception to string>";
19981 }
19982 }
19983 const Instance& stack = Instance::Handle(stacktrace());
19984 const char* stack_str;
19985 if (stack.IsNull()) {
19986 stack_str = "null";
19987 } else if (stack.IsStackTrace()) {
19988 stack_str = StackTrace::Cast(stack).ToCString();
19989 } else {
19990 strtmp = DartLibraryCalls::ToString(stack);
19991 if (!strtmp.IsError()) {
19992 stack_str = strtmp.ToCString();
19993 } else {
19994 stack_str = "<Received error while converting stack trace to string>";
19995 }
19996 }
19997 return OS::SCreate(thread->zone(), "Unhandled exception:\n%s\n%s", exc_str,
19998 stack_str);
19999}
20000
20001const char* UnhandledException::ToCString() const {
20002 return "UnhandledException";
20003}
20004
20005UnwindErrorPtr UnwindError::New(const String& message, Heap::Space space) {
20007 const auto& result =
20008 UnwindError::Handle(Object::Allocate<UnwindError>(space));
20009 result.set_message(message);
20010 ASSERT_EQUAL(result.is_user_initiated(), false);
20011 return result.ptr();
20012}
20013
20014void UnwindError::set_message(const String& message) const {
20015 untag()->set_message(message.ptr());
20016}
20017
20019 StoreNonPointer(&untag()->is_user_initiated_, value);
20020}
20021
20022const char* UnwindError::ToErrorCString() const {
20023 const String& msg_str = String::Handle(message());
20024 return msg_str.ToCString();
20025}
20026
20027const char* UnwindError::ToCString() const {
20028 return "UnwindError";
20029}
20030
20032 bool respect_reflectable,
20033 bool check_is_entrypoint) const {
20034 Thread* thread = Thread::Current();
20035 Zone* zone = thread->zone();
20036
20037 Class& klass = Class::Handle(zone, clazz());
20038 CHECK_ERROR(klass.EnsureIsFinalized(thread));
20039 const auto& inst_type_args =
20040 klass.NumTypeArguments() > 0
20042 : Object::null_type_arguments();
20043
20044 const String& internal_getter_name =
20045 String::Handle(zone, Field::GetterName(getter_name));
20047 zone,
20048 Resolver::ResolveDynamicAnyArgs(zone, klass, internal_getter_name,
20049 /*allow_add=*/!FLAG_precompiled_mode));
20050
20051 if (!function.IsNull() && check_is_entrypoint) {
20052 // The getter must correspond to either an entry-point field or a getter
20053 // method explicitly marked.
20054 Field& field = Field::Handle(zone);
20055 if (function.kind() == UntaggedFunction::kImplicitGetter) {
20056 field = function.accessor_field();
20057 }
20058 if (!field.IsNull()) {
20060 } else {
20061 CHECK_ERROR(function.VerifyCallEntryPoint());
20062 }
20063 }
20064
20065 // Check for method extraction when method extractors are not lazily created.
20066 if (function.IsNull() && FLAG_precompiled_mode) {
20067 function = Resolver::ResolveDynamicAnyArgs(zone, klass, getter_name,
20068 /*allow_add=*/false);
20069
20070 if (!function.IsNull() && check_is_entrypoint) {
20071 CHECK_ERROR(function.VerifyClosurizedEntryPoint());
20072 }
20073
20074 if (!function.IsNull() && function.SafeToClosurize()) {
20075 const Function& closure_function =
20076 Function::Handle(zone, function.ImplicitClosureFunction());
20077 return closure_function.ImplicitInstanceClosure(*this);
20078 }
20079 }
20080
20081 const int kTypeArgsLen = 0;
20082 const int kNumArgs = 1;
20083 const Array& args = Array::Handle(zone, Array::New(kNumArgs));
20084 args.SetAt(0, *this);
20085 const Array& args_descriptor = Array::Handle(
20086 zone,
20087 ArgumentsDescriptor::NewBoxed(kTypeArgsLen, args.Length(), Heap::kNew));
20088
20089 return InvokeInstanceFunction(thread, *this, function, internal_getter_name,
20090 args, args_descriptor, respect_reflectable,
20091 inst_type_args);
20092}
20093
20095 const Instance& value,
20096 bool respect_reflectable,
20097 bool check_is_entrypoint) const {
20098 Thread* thread = Thread::Current();
20099 Zone* zone = thread->zone();
20100
20101 const Class& klass = Class::Handle(zone, clazz());
20102 CHECK_ERROR(klass.EnsureIsFinalized(thread));
20103 const auto& inst_type_args =
20104 klass.NumTypeArguments() > 0
20106 : Object::null_type_arguments();
20107
20108 const String& internal_setter_name =
20109 String::Handle(zone, Field::SetterName(setter_name));
20110 const Function& setter = Function::Handle(
20111 zone,
20112 Resolver::ResolveDynamicAnyArgs(zone, klass, internal_setter_name,
20113 /*allow_add=*/!FLAG_precompiled_mode));
20114
20115 if (check_is_entrypoint) {
20116 // The setter must correspond to either an entry-point field or a setter
20117 // method explicitly marked.
20118 Field& field = Field::Handle(zone);
20119 if (setter.kind() == UntaggedFunction::kImplicitSetter) {
20120 field = setter.accessor_field();
20121 }
20122 if (!field.IsNull()) {
20124 } else if (!setter.IsNull()) {
20126 }
20127 }
20128
20129 const int kTypeArgsLen = 0;
20130 const int kNumArgs = 2;
20131 const Array& args = Array::Handle(zone, Array::New(kNumArgs));
20132 args.SetAt(0, *this);
20133 args.SetAt(1, value);
20134 const Array& args_descriptor = Array::Handle(
20135 zone,
20136 ArgumentsDescriptor::NewBoxed(kTypeArgsLen, args.Length(), Heap::kNew));
20137
20138 return InvokeInstanceFunction(thread, *this, setter, internal_setter_name,
20139 args, args_descriptor, respect_reflectable,
20140 inst_type_args);
20141}
20142
20144 const Array& args,
20145 const Array& arg_names,
20146 bool respect_reflectable,
20147 bool check_is_entrypoint) const {
20148 Thread* thread = Thread::Current();
20149 Zone* zone = thread->zone();
20150 Class& klass = Class::Handle(zone, clazz());
20151 CHECK_ERROR(klass.EnsureIsFinalized(thread));
20152
20154 zone,
20156 /*allow_add=*/!FLAG_precompiled_mode));
20157
20158 if (!function.IsNull() && check_is_entrypoint) {
20159 CHECK_ERROR(function.VerifyCallEntryPoint());
20160 }
20161
20162 // We don't pass any explicit type arguments, which will be understood as
20163 // using dynamic for any function type arguments by lower layers.
20164 const int kTypeArgsLen = 0;
20165 const Array& args_descriptor = Array::Handle(
20166 zone, ArgumentsDescriptor::NewBoxed(kTypeArgsLen, args.Length(),
20167 arg_names, Heap::kNew));
20168
20169 const auto& inst_type_args =
20170 klass.NumTypeArguments() > 0
20172 : Object::null_type_arguments();
20173
20174 if (function.IsNull()) {
20175 // Didn't find a method: try to find a getter and invoke call on its result.
20176 const String& getter_name =
20178 function =
20179 Resolver::ResolveDynamicAnyArgs(zone, klass, getter_name,
20180 /*allow_add=*/!FLAG_precompiled_mode);
20181 if (!function.IsNull()) {
20182 if (check_is_entrypoint) {
20184 }
20185 ASSERT(function.kind() != UntaggedFunction::kMethodExtractor);
20186 // Invoke the getter.
20187 const int kNumArgs = 1;
20188 const Array& getter_args = Array::Handle(zone, Array::New(kNumArgs));
20189 getter_args.SetAt(0, *this);
20190 const Array& getter_args_descriptor = Array::Handle(
20192 kTypeArgsLen, getter_args.Length(), Heap::kNew));
20193 const Object& getter_result = Object::Handle(
20194 zone, InvokeInstanceFunction(thread, *this, function, getter_name,
20195 getter_args, getter_args_descriptor,
20196 respect_reflectable, inst_type_args));
20197 if (getter_result.IsError()) {
20198 return getter_result.ptr();
20199 }
20200 // Replace the closure as the receiver in the arguments list.
20201 args.SetAt(0, getter_result);
20202 return DartEntry::InvokeClosure(thread, args, args_descriptor);
20203 }
20204 }
20205
20206 // Found an ordinary method.
20207 return InvokeInstanceFunction(thread, *this, function, function_name, args,
20208 args_descriptor, respect_reflectable,
20209 inst_type_args);
20210}
20211
20213 // TODO(koda): Optimize for all builtin classes and all classes
20214 // that do not override hashCode.
20215 return DartLibraryCalls::HashCode(*this);
20216}
20217
20218// Keep in sync with AsmIntrinsifier::Object_getHash.
20219IntegerPtr Instance::IdentityHashCode(Thread* thread) const {
20220 if (IsInteger()) return Integer::Cast(*this).ptr();
20221
20222#if defined(HASH_IN_OBJECT_HEADER)
20223 intptr_t hash = Object::GetCachedHash(ptr());
20224#else
20225 intptr_t hash = thread->heap()->GetHash(ptr());
20226#endif
20227 if (hash == 0) {
20228 if (IsNull()) {
20230 } else if (IsBool()) {
20231 hash = Bool::Cast(*this).value() ? kTrueIdentityHash : kFalseIdentityHash;
20232 } else if (IsDouble()) {
20233 double val = Double::Cast(*this).value();
20234 if ((val >= kMinInt64RepresentableAsDouble) &&
20236 int64_t ival = static_cast<int64_t>(val);
20237 if (static_cast<double>(ival) == val) {
20238 return Integer::New(ival);
20239 }
20240 }
20241
20242 uint64_t uval = bit_cast<uint64_t>(val);
20243 hash = ((uval >> 32) ^ (uval)) & kSmiMax;
20244 } else {
20245 do {
20246 hash = thread->random()->NextUInt32() & 0x3FFFFFFF;
20247 } while (hash == 0);
20248 }
20249
20250#if defined(HASH_IN_OBJECT_HEADER)
20251 hash = Object::SetCachedHashIfNotSet(ptr(), hash);
20252#else
20253 hash = thread->heap()->SetHashIfNotSet(ptr(), hash);
20254#endif
20255 }
20256 return Smi::New(hash);
20257}
20258
20259bool Instance::CanonicalizeEquals(const Instance& other) const {
20260 if (this->ptr() == other.ptr()) {
20261 return true; // "===".
20262 }
20263
20264 if (other.IsNull() || (this->clazz() != other.clazz())) {
20265 return false;
20266 }
20267
20268 {
20269 NoSafepointScope no_safepoint;
20270 // Raw bits compare.
20271 const intptr_t instance_size = SizeFromClass();
20272 ASSERT(instance_size != 0);
20273 const intptr_t other_instance_size = other.SizeFromClass();
20274 ASSERT(other_instance_size != 0);
20275 if (instance_size != other_instance_size) {
20276 return false;
20277 }
20278 uword this_addr = reinterpret_cast<uword>(this->untag());
20279 uword other_addr = reinterpret_cast<uword>(other.untag());
20280 for (intptr_t offset = Instance::NextFieldOffset(); offset < instance_size;
20282 if ((reinterpret_cast<CompressedObjectPtr*>(this_addr + offset)
20283 ->Decompress(untag()->heap_base())) !=
20284 (reinterpret_cast<CompressedObjectPtr*>(other_addr + offset)
20285 ->Decompress(untag()->heap_base()))) {
20286 return false;
20287 }
20288 }
20289 }
20290 return true;
20291}
20292
20293bool Symbol::IsSymbolCid(Thread* thread, classid_t class_id) {
20294 auto object_store = thread->isolate_group()->object_store();
20295 return Class::GetClassId(object_store->symbol_class()) == class_id;
20296}
20297
20298// Must be kept in sync with Symbol.hashCode in symbol_patch.dart
20300 ASSERT(IsSymbolCid(thread, instance.GetClassId()));
20301
20302 auto zone = thread->zone();
20303 auto object_store = thread->isolate_group()->object_store();
20304
20305 const auto& symbol_name_field =
20306 Field::Handle(zone, object_store->symbol_name_field());
20307 ASSERT(!symbol_name_field.IsNull());
20308
20309 // Keep in sync with sdk/lib/_internal/vm/lib/symbol_patch.dart.
20310 const auto& name =
20311 String::Cast(Object::Handle(zone, instance.GetField(symbol_name_field)));
20312 const uint32_t arbitrary_prime = 664597;
20313 return 0x1fffffff & (arbitrary_prime * name.CanonicalizeHash());
20314}
20315
20317 if (GetClassId() == kNullCid) {
20318 return kNullIdentityHash;
20319 }
20320 Thread* thread = Thread::Current();
20321 uint32_t hash = thread->heap()->GetCanonicalHash(ptr());
20322 if (hash != 0) {
20323 return hash;
20324 }
20325 Zone* zone = thread->zone();
20326 const Class& cls = Class::Handle(zone, clazz());
20327 const bool is_symbol = Symbol::IsSymbolCid(thread, cls.id());
20328
20329 NoSafepointScope no_safepoint(thread);
20330
20331 if (is_symbol) {
20332 hash = Symbol::CanonicalizeHash(thread, *this);
20333 } else {
20334 const intptr_t class_id = cls.id();
20335 ASSERT(class_id != 0);
20336 hash = class_id;
20337 uword this_addr = reinterpret_cast<uword>(this->untag());
20338 Object& obj = Object::Handle(zone);
20340
20341 const auto unboxed_fields_bitmap =
20343 GetClassId());
20344
20345 for (intptr_t offset = Instance::NextFieldOffset();
20347 if (unboxed_fields_bitmap.Get(offset / kCompressedWordSize)) {
20348 if (kCompressedWordSize == 8) {
20350 hash, *reinterpret_cast<uint32_t*>(this_addr + offset));
20352 hash, *reinterpret_cast<uint32_t*>(this_addr + offset + 4));
20353 } else {
20355 hash, *reinterpret_cast<uint32_t*>(this_addr + offset));
20356 }
20357 } else {
20358 obj = reinterpret_cast<CompressedObjectPtr*>(this_addr + offset)
20359 ->Decompress(untag()->heap_base());
20360 if (obj.IsSentinel()) {
20361 hash = CombineHashes(hash, 11);
20362 } else {
20363 instance ^= obj.ptr();
20364 hash = CombineHashes(hash, instance.CanonicalizeHash());
20365 }
20366 }
20367 }
20369 }
20370 thread->heap()->SetCanonicalHash(ptr(), hash);
20371 return hash;
20372}
20373
20374#if defined(DEBUG)
20375class CheckForPointers : public ObjectPointerVisitor {
20376 public:
20377 explicit CheckForPointers(IsolateGroup* isolate_group)
20378 : ObjectPointerVisitor(isolate_group), has_pointers_(false) {}
20379
20380 bool has_pointers() const { return has_pointers_; }
20381
20382 void VisitPointers(ObjectPtr* first, ObjectPtr* last) override {
20383 if (last >= first) {
20384 has_pointers_ = true;
20385 }
20386 }
20387
20388#if defined(DART_COMPRESSED_POINTERS)
20389 void VisitCompressedPointers(uword heap_base,
20390 CompressedObjectPtr* first,
20391 CompressedObjectPtr* last) override {
20392 if (last >= first) {
20393 has_pointers_ = true;
20394 }
20395 }
20396#endif
20397
20398 private:
20399 bool has_pointers_;
20400
20401 DISALLOW_COPY_AND_ASSIGN(CheckForPointers);
20402};
20403#endif // DEBUG
20404
20406 const intptr_t class_id = GetClassId();
20407 if (class_id >= kNumPredefinedCids) {
20408 // Iterate over all fields, canonicalize numbers and strings, expect all
20409 // other instances to be canonical otherwise report error (return false).
20410 Zone* zone = thread->zone();
20411 Object& obj = Object::Handle(zone);
20412 const intptr_t instance_size = SizeFromClass();
20413 ASSERT(instance_size != 0);
20414 const auto unboxed_fields_bitmap =
20415 thread->isolate_group()->class_table()->GetUnboxedFieldsMapAt(class_id);
20416 for (intptr_t offset = Instance::NextFieldOffset(); offset < instance_size;
20418 if (unboxed_fields_bitmap.Get(offset / kCompressedWordSize)) {
20419 continue;
20420 }
20421 obj = this->FieldAddrAtOffset(offset)->Decompress(untag()->heap_base());
20422 if (obj.IsInstance()) {
20423 obj = Instance::Cast(obj).CanonicalizeLocked(thread);
20424 this->SetFieldAtOffset(offset, obj);
20425 } else {
20426 ASSERT(obj.IsNull() || obj.IsSentinel());
20427 }
20428 }
20429 } else {
20430#if defined(DEBUG) && !defined(DART_COMPRESSED_POINTERS)
20431 // Make sure that we are not missing any fields.
20433 CheckForPointers has_pointers(group);
20434 this->ptr()->untag()->VisitPointersPrecise(&has_pointers);
20435 ASSERT(!has_pointers.has_pointers());
20436#endif // DEBUG
20437 }
20438}
20439
20440InstancePtr Instance::CopyShallowToOldSpace(Thread* thread) const {
20442}
20443
20444InstancePtr Instance::Canonicalize(Thread* thread) const {
20447 return CanonicalizeLocked(thread);
20448}
20449
20450InstancePtr Instance::CanonicalizeLocked(Thread* thread) const {
20451 if (!this->ptr()->IsHeapObject() || this->IsCanonical()) {
20452 return this->ptr();
20453 }
20454 ASSERT(!IsNull());
20456 Zone* zone = thread->zone();
20457 const Class& cls = Class::Handle(zone, this->clazz());
20458 Instance& result =
20459 Instance::Handle(zone, cls.LookupCanonicalInstance(zone, *this));
20460 if (!result.IsNull()) {
20461 return result.ptr();
20462 }
20463 if (IsNew()) {
20464 ASSERT((thread->isolate() == Dart::vm_isolate()) || !InVMIsolateHeap());
20465 // Create a canonical object in old space.
20466 result ^= Object::Clone(*this, Heap::kOld);
20467 } else {
20468 result = this->ptr();
20469 }
20470 ASSERT(result.IsOld());
20471 result.SetCanonical();
20472 return cls.InsertCanonicalConstant(zone, result);
20473}
20474
20476 if (field.is_unboxed()) {
20477 switch (field.guarded_cid()) {
20478 case kDoubleCid:
20479 return Double::New(*reinterpret_cast<double_t*>(FieldAddr(field)));
20480 case kFloat32x4Cid:
20481 return Float32x4::New(
20482 *reinterpret_cast<simd128_value_t*>(FieldAddr(field)));
20483 case kFloat64x2Cid:
20484 return Float64x2::New(
20485 *reinterpret_cast<simd128_value_t*>(FieldAddr(field)));
20486 default:
20487 return Integer::New(*reinterpret_cast<int64_t*>(FieldAddr(field)));
20488 }
20489 } else {
20490 return FieldAddr(field)->Decompress(untag()->heap_base());
20491 }
20492}
20493
20494void Instance::SetField(const Field& field, const Object& value) const {
20495 if (field.is_unboxed()) {
20496 switch (field.guarded_cid()) {
20497 case kDoubleCid:
20498 StoreNonPointer(reinterpret_cast<double_t*>(FieldAddr(field)),
20499 Double::Cast(value).value());
20500 break;
20501 case kFloat32x4Cid:
20502 StoreNonPointer(reinterpret_cast<simd128_value_t*>(FieldAddr(field)),
20503 Float32x4::Cast(value).value());
20504 break;
20505 case kFloat64x2Cid:
20506 StoreNonPointer(reinterpret_cast<simd128_value_t*>(FieldAddr(field)),
20507 Float64x2::Cast(value).value());
20508 break;
20509 default:
20510 StoreNonPointer(reinterpret_cast<int64_t*>(FieldAddr(field)),
20511 Integer::Cast(value).AsInt64Value());
20512 break;
20513 }
20514 } else {
20515 field.RecordStore(value);
20516 StoreCompressedPointer(FieldAddr(field), value.ptr());
20517 }
20518}
20519
20520AbstractTypePtr Instance::GetType(Heap::Space space) const {
20521 if (IsNull()) {
20522 return Type::NullType();
20523 }
20524 Thread* thread = Thread::Current();
20525 Zone* zone = thread->zone();
20526 const Class& cls = Class::Handle(zone, clazz());
20527 if (!cls.is_finalized()) {
20528 // Various predefined classes can be instantiated by the VM or
20529 // Dart_NewString/Integer/TypedData/... before the class is finalized.
20530 ASSERT(cls.is_prefinalized());
20532 }
20533 if (cls.IsClosureClass()) {
20535 zone, Closure::Cast(*this).GetInstantiatedSignature(zone));
20536 if (!signature.IsFinalized()) {
20537 signature.SetIsFinalized();
20538 }
20539 signature ^= signature.Canonicalize(thread);
20540 return signature.ptr();
20541 }
20542 if (IsRecord()) {
20543 ASSERT(cls.IsRecordClass());
20544 auto& record_type =
20545 RecordType::Handle(zone, Record::Cast(*this).GetRecordType());
20546 ASSERT(record_type.IsFinalized());
20547 ASSERT(record_type.IsCanonical());
20548 return record_type.ptr();
20549 }
20550 Type& type = Type::Handle(zone);
20551 if (!cls.IsGeneric()) {
20552 type = cls.DeclarationType();
20553 }
20554 if (type.IsNull()) {
20555 TypeArguments& type_arguments = TypeArguments::Handle(zone);
20556 const intptr_t num_type_arguments = cls.NumTypeArguments();
20557 if (num_type_arguments > 0) {
20558 type_arguments = GetTypeArguments();
20559 if (!type_arguments.IsNull()) {
20560 type_arguments = type_arguments.FromInstanceTypeArguments(thread, cls);
20561 }
20562 }
20563 type = Type::New(cls, type_arguments, Nullability::kNonNullable, space);
20564 type.SetIsFinalized();
20565 type ^= type.Canonicalize(thread);
20566 }
20567 return type.ptr();
20568}
20569
20570TypeArgumentsPtr Instance::GetTypeArguments() const {
20571 ASSERT(!IsType());
20572 const Class& cls = Class::Handle(clazz());
20573 intptr_t field_offset = cls.host_type_arguments_field_offset();
20574 ASSERT(field_offset != Class::kNoTypeArguments);
20575 TypeArguments& type_arguments = TypeArguments::Handle();
20576 type_arguments ^=
20577 FieldAddrAtOffset(field_offset)->Decompress(untag()->heap_base());
20578 return type_arguments.ptr();
20579}
20580
20582 ASSERT(!IsType());
20583 ASSERT(value.IsNull() || value.IsCanonical());
20584 const Class& cls = Class::Handle(clazz());
20585 intptr_t field_offset = cls.host_type_arguments_field_offset();
20586 ASSERT(field_offset != Class::kNoTypeArguments);
20587 SetFieldAtOffset(field_offset, value);
20588}
20589
20590/*
20591Specification of instance checks (e is T) and casts (e as T), where e evaluates
20592to a value v and v has runtime type S:
20593
20594Instance checks (e is T) in weak checking mode in a legacy or opted-in library:
20595 If v == null and T is a legacy type
20596 return LEGACY_SUBTYPE(T, Null) || LEGACY_SUBTYPE(Object, T)
20597 If v == null and T is not a legacy type, return NNBD_SUBTYPE(Null, T)
20598 Otherwise return LEGACY_SUBTYPE(S, T)
20599
20600Instance checks (e is T) in strong checking mode in a legacy or opted-in lib:
20601 If v == null and T is a legacy type
20602 return LEGACY_SUBTYPE(T, Null) || LEGACY_SUBTYPE(Object, T)
20603 Otherwise return NNBD_SUBTYPE(S, T)
20604
20605Casts (e as T) in weak checking mode in a legacy or opted-in library:
20606 If LEGACY_SUBTYPE(S, T) then e as T evaluates to v.
20607 Otherwise a TypeError is thrown.
20608
20609Casts (e as T) in strong checking mode in a legacy or opted-in library:
20610 If NNBD_SUBTYPE(S, T) then e as T evaluates to v.
20611 Otherwise a TypeError is thrown.
20612*/
20613
20615 const AbstractType& other,
20616 const TypeArguments& other_instantiator_type_arguments,
20617 const TypeArguments& other_function_type_arguments) const {
20618 ASSERT(!other.IsDynamicType());
20619 if (IsNull()) {
20620 return Instance::NullIsInstanceOf(other, other_instantiator_type_arguments,
20621 other_function_type_arguments);
20622 }
20623 // In strong mode, compute NNBD_SUBTYPE(runtimeType, other).
20624 // In weak mode, compute LEGACY_SUBTYPE(runtimeType, other).
20625 return RuntimeTypeIsSubtypeOf(other, other_instantiator_type_arguments,
20626 other_function_type_arguments);
20627}
20628
20630 const AbstractType& other,
20631 const TypeArguments& other_instantiator_type_arguments,
20632 const TypeArguments& other_function_type_arguments) const {
20633 ASSERT(!other.IsDynamicType());
20634 // In strong mode, compute NNBD_SUBTYPE(runtimeType, other).
20635 // In weak mode, compute LEGACY_SUBTYPE(runtimeType, other).
20636 return RuntimeTypeIsSubtypeOf(other, other_instantiator_type_arguments,
20637 other_function_type_arguments);
20638}
20639
20640// If 'other' type (once instantiated) is a legacy type:
20641// return LEGACY_SUBTYPE(other, Null) || LEGACY_SUBTYPE(Object, other).
20642// Otherwise return NNBD_SUBTYPE(Null, T).
20643// Ignore value of strong flag value.
20644bool Instance::NullIsInstanceOf(
20645 const AbstractType& other,
20646 const TypeArguments& other_instantiator_type_arguments,
20647 const TypeArguments& other_function_type_arguments) {
20648 ASSERT(other.IsFinalized());
20649 if (other.IsNullable()) {
20650 // This case includes top types (void, dynamic, Object?).
20651 // The uninstantiated nullable type will remain nullable after
20652 // instantiation.
20653 return true;
20654 }
20655 if (other.IsFutureOrType()) {
20656 const auto& type = AbstractType::Handle(other.UnwrapFutureOr());
20657 return NullIsInstanceOf(type, other_instantiator_type_arguments,
20658 other_function_type_arguments);
20659 }
20660 // No need to instantiate type, unless it is a type parameter.
20661 // Note that a typeref cannot refer to a type parameter.
20662 if (other.IsTypeParameter()) {
20664 other_instantiator_type_arguments, other_function_type_arguments,
20666 return Instance::NullIsInstanceOf(type, Object::null_type_arguments(),
20667 Object::null_type_arguments());
20668 }
20669 return false;
20670}
20671
20672// Must be kept in sync with GenerateNullIsAssignableToType in
20673// stub_code_compiler.cc if any changes are made.
20675 // "Left Null" rule: null is assignable when destination type is
20676 // nullable. Otherwise it is not assignable or we cannot tell
20677 // without instantiating type parameter.
20678 if (other.IsNullable()) {
20679 return true;
20680 }
20681 if (other.IsFutureOrType()) {
20683 }
20684 // Since the TAVs are not available, for non-nullable type parameters
20685 // this returns a conservative approximation of "not assignable" .
20686 return false;
20687}
20688
20689// Must be kept in sync with GenerateNullIsAssignableToType in
20690// stub_code_compiler.cc if any changes are made.
20692 const AbstractType& other,
20693 const TypeArguments& other_instantiator_type_arguments,
20694 const TypeArguments& other_function_type_arguments) {
20695 // Do checks that don't require instantiation first.
20696 if (NullIsAssignableTo(other)) return true;
20697 if (!other.IsTypeParameter()) return false;
20698 const auto& type = AbstractType::Handle(other.InstantiateFrom(
20699 other_instantiator_type_arguments, other_function_type_arguments,
20701 return NullIsAssignableTo(type);
20702}
20703
20704bool Instance::RuntimeTypeIsSubtypeOf(
20705 const AbstractType& other,
20706 const TypeArguments& other_instantiator_type_arguments,
20707 const TypeArguments& other_function_type_arguments) const {
20708 ASSERT(other.IsFinalized());
20709 ASSERT(ptr() != Object::sentinel().ptr());
20710 // Instance may not have runtimeType dynamic, void, or Never.
20711 if (other.IsTopTypeForSubtyping()) {
20712 return true;
20713 }
20714 Thread* thread = Thread::Current();
20715 Zone* zone = thread->zone();
20716 const Class& cls = Class::Handle(zone, clazz());
20717 if (cls.IsClosureClass()) {
20718 if (other.IsDartFunctionType() || other.IsDartClosureType() ||
20719 other.IsObjectType()) {
20720 return true;
20721 }
20722 AbstractType& instantiated_other = AbstractType::Handle(zone, other.ptr());
20723 if (!other.IsInstantiated()) {
20724 instantiated_other = other.InstantiateFrom(
20725 other_instantiator_type_arguments, other_function_type_arguments,
20727 if (instantiated_other.IsTopTypeForSubtyping() ||
20728 instantiated_other.IsObjectType() ||
20729 instantiated_other.IsDartFunctionType()) {
20730 return true;
20731 }
20732 }
20733 if (RuntimeTypeIsSubtypeOfFutureOr(zone, instantiated_other)) {
20734 return true;
20735 }
20736 if (!instantiated_other.IsFunctionType()) {
20737 return false;
20738 }
20739 const FunctionType& sig = FunctionType::Handle(
20740 Closure::Cast(*this).GetInstantiatedSignature(zone));
20741 return sig.IsSubtypeOf(FunctionType::Cast(instantiated_other), Heap::kOld);
20742 }
20743 if (cls.IsRecordClass()) {
20744 if (other.IsDartRecordType() || other.IsObjectType()) {
20745 return true;
20746 }
20747 AbstractType& instantiated_other = AbstractType::Handle(zone, other.ptr());
20748 if (!other.IsInstantiated()) {
20749 instantiated_other = other.InstantiateFrom(
20750 other_instantiator_type_arguments, other_function_type_arguments,
20752 if (instantiated_other.IsTopTypeForSubtyping() ||
20753 instantiated_other.IsObjectType() ||
20754 instantiated_other.IsDartRecordType()) {
20755 return true;
20756 }
20757 }
20758 if (RuntimeTypeIsSubtypeOfFutureOr(zone, instantiated_other)) {
20759 return true;
20760 }
20761 if (!instantiated_other.IsRecordType()) {
20762 return false;
20763 }
20764 const Record& record = Record::Cast(*this);
20765 const RecordType& record_type = RecordType::Cast(instantiated_other);
20766 if (record.shape() != record_type.shape()) {
20767 return false;
20768 }
20769 Instance& field_value = Instance::Handle(zone);
20770 AbstractType& field_type = AbstractType::Handle(zone);
20771 const intptr_t num_fields = record.num_fields();
20772 for (intptr_t i = 0; i < num_fields; ++i) {
20773 field_value ^= record.FieldAt(i);
20774 field_type = record_type.FieldTypeAt(i);
20775 if (!field_value.RuntimeTypeIsSubtypeOf(field_type,
20776 Object::null_type_arguments(),
20777 Object::null_type_arguments())) {
20778 return false;
20779 }
20780 }
20781 return true;
20782 }
20783 TypeArguments& type_arguments = TypeArguments::Handle(zone);
20784 const intptr_t num_type_arguments = cls.NumTypeArguments();
20785 if (num_type_arguments > 0) {
20786 type_arguments = GetTypeArguments();
20787 ASSERT(type_arguments.IsNull() || type_arguments.IsCanonical());
20788 // The number of type arguments in the instance must be greater or equal to
20789 // the number of type arguments expected by the instance class.
20790 // A discrepancy is allowed for closures, which borrow the type argument
20791 // vector of their instantiator, which may be of a subclass of the class
20792 // defining the closure. Truncating the vector to the correct length on
20793 // instantiation is unnecessary. The vector may therefore be longer.
20794 // Also, an optimization reuses the type argument vector of the instantiator
20795 // of generic instances when its layout is compatible.
20796 ASSERT(type_arguments.IsNull() ||
20797 (type_arguments.Length() >= num_type_arguments));
20798 }
20799 AbstractType& instantiated_other = AbstractType::Handle(zone, other.ptr());
20800 if (!other.IsInstantiated()) {
20801 instantiated_other = other.InstantiateFrom(
20802 other_instantiator_type_arguments, other_function_type_arguments,
20804 if (instantiated_other.IsTopTypeForSubtyping()) {
20805 return true;
20806 }
20807 }
20808 if (IsNull()) {
20809 if (instantiated_other.IsNullType()) {
20810 return true;
20811 }
20812 if (RuntimeTypeIsSubtypeOfFutureOr(zone, instantiated_other)) {
20813 return true;
20814 }
20815 // At this point, instantiated_other can be a function type.
20816 return !instantiated_other.IsNonNullable();
20817 }
20818 if (!instantiated_other.IsType()) {
20819 return false;
20820 }
20821 // RuntimeType of non-null instance is non-nullable, so there is no need to
20822 // check nullability of other type.
20823 return Class::IsSubtypeOf(cls, type_arguments, Nullability::kNonNullable,
20824 instantiated_other, Heap::kOld);
20825}
20826
20827bool Instance::RuntimeTypeIsSubtypeOfFutureOr(Zone* zone,
20828 const AbstractType& other) const {
20829 if (other.IsFutureOrType()) {
20830 const TypeArguments& other_type_arguments =
20831 TypeArguments::Handle(zone, other.arguments());
20832 const AbstractType& other_type_arg =
20833 AbstractType::Handle(zone, other_type_arguments.TypeAtNullSafe(0));
20834 if (other_type_arg.IsTopTypeForSubtyping()) {
20835 return true;
20836 }
20837 if (Class::Handle(zone, clazz()).IsFutureClass()) {
20838 const TypeArguments& type_arguments =
20840 const AbstractType& type_arg =
20841 AbstractType::Handle(zone, type_arguments.TypeAtNullSafe(0));
20842 if (type_arg.IsSubtypeOf(other_type_arg, Heap::kOld)) {
20843 return true;
20844 }
20845 }
20846 // Retry RuntimeTypeIsSubtypeOf after unwrapping type arg of FutureOr.
20847 if (RuntimeTypeIsSubtypeOf(other_type_arg, Object::null_type_arguments(),
20848 Object::null_type_arguments())) {
20849 return true;
20850 }
20851 }
20852 return false;
20853}
20854
20855bool Instance::OperatorEquals(const Instance& other) const {
20856 // TODO(koda): Optimize for all builtin classes and all classes
20857 // that do not override operator==.
20858 return DartLibraryCalls::Equals(*this, other) == Object::bool_true().ptr();
20859}
20860
20861bool Instance::IsIdenticalTo(const Instance& other) const {
20862 if (ptr() == other.ptr()) return true;
20863 if (IsInteger() && other.IsInteger()) {
20864 return Integer::Cast(*this).Equals(other);
20865 }
20866 if (IsDouble() && other.IsDouble()) {
20867 double other_value = Double::Cast(other).value();
20868 return Double::Cast(*this).BitwiseEqualsToDouble(other_value);
20869 }
20870 return false;
20871}
20872
20874 ASSERT(Thread::Current()->no_safepoint_scope_depth() > 0);
20875 TypedDataPtr native_fields = static_cast<TypedDataPtr>(
20876 NativeFieldsAddr()->Decompress(untag()->heap_base()));
20877 if (native_fields == TypedData::null()) {
20878 return nullptr;
20879 }
20880 return reinterpret_cast<intptr_t*>(native_fields->untag()->data());
20881}
20882
20883void Instance::SetNativeField(int index, intptr_t value) const {
20884 ASSERT(IsValidNativeIndex(index));
20885 Object& native_fields =
20886 Object::Handle(NativeFieldsAddr()->Decompress(untag()->heap_base()));
20887 if (native_fields.IsNull()) {
20888 // Allocate backing storage for the native fields.
20889 native_fields = TypedData::New(kIntPtrCid, NumNativeFields());
20890 StoreCompressedPointer(NativeFieldsAddr(), native_fields.ptr());
20891 }
20892 intptr_t byte_offset = index * sizeof(intptr_t);
20893 TypedData::Cast(native_fields).SetIntPtr(byte_offset, value);
20894}
20895
20896void Instance::SetNativeFields(uint16_t num_native_fields,
20897 const intptr_t* field_values) const {
20898 ASSERT(num_native_fields == NumNativeFields());
20899 ASSERT(field_values != nullptr);
20900 Object& native_fields =
20901 Object::Handle(NativeFieldsAddr()->Decompress(untag()->heap_base()));
20902 if (native_fields.IsNull()) {
20903 // Allocate backing storage for the native fields.
20904 native_fields = TypedData::New(kIntPtrCid, NumNativeFields());
20905 StoreCompressedPointer(NativeFieldsAddr(), native_fields.ptr());
20906 }
20907 for (uint16_t i = 0; i < num_native_fields; i++) {
20908 intptr_t byte_offset = i * sizeof(intptr_t);
20909 TypedData::Cast(native_fields).SetIntPtr(byte_offset, field_values[i]);
20910 }
20911}
20912
20914 Class& cls = Class::Handle(clazz());
20915 if (cls.IsClosureClass()) {
20916 if (function != nullptr) {
20917 *function = Closure::Cast(*this).function();
20918 }
20919 return true;
20920 }
20921 // Try to resolve a "call" method.
20922 Zone* zone = Thread::Current()->zone();
20923 Function& call_function = Function::Handle(
20924 zone, Resolver::ResolveDynamicAnyArgs(zone, cls, Symbols::DynamicCall(),
20925 /*allow_add=*/false));
20926 if (call_function.IsNull()) {
20927 return false;
20928 }
20929 if (function != nullptr) {
20930 *function = call_function.ptr();
20931 }
20932 return true;
20933}
20934
20935InstancePtr Instance::New(const Class& cls, Heap::Space space) {
20936 Thread* thread = Thread::Current();
20937 if (cls.EnsureIsAllocateFinalized(thread) != Error::null()) {
20938 return Instance::null();
20939 }
20940 return NewAlreadyFinalized(cls, space);
20941}
20942
20943InstancePtr Instance::NewAlreadyFinalized(const Class& cls, Heap::Space space) {
20945 intptr_t instance_size = cls.host_instance_size();
20946 ASSERT(instance_size > 0);
20947 // Initialize everything after the object header with Object::null(), since
20948 // this isn't a predefined class.
20949 const uword ptr_field_end_offset =
20950 instance_size - (Instance::ContainsCompressedPointers()
20952 : kWordSize);
20953 return static_cast<InstancePtr>(Object::Allocate(
20954 cls.id(), instance_size, space, Instance::ContainsCompressedPointers(),
20955 from_offset<Instance>(), ptr_field_end_offset));
20956}
20957
20958bool Instance::IsValidFieldOffset(intptr_t offset) const {
20959 Thread* thread = Thread::Current();
20961 Class& cls = thread->ClassHandle();
20962 cls = clazz();
20963 return (offset >= 0 &&
20965}
20966
20967intptr_t Instance::ElementSizeFor(intptr_t cid) {
20971 }
20972 switch (cid) {
20973 case kArrayCid:
20974 case kImmutableArrayCid:
20976 case kTypeArgumentsCid:
20978 case kOneByteStringCid:
20980 case kTwoByteStringCid:
20982 default:
20983 UNIMPLEMENTED();
20984 return 0;
20985 }
20986}
20987
20988intptr_t Instance::DataOffsetFor(intptr_t cid) {
20990 // Elements start at offset 0 of the external data.
20991 return 0;
20992 }
20993 if (IsTypedDataClassId(cid)) {
20995 }
20996 switch (cid) {
20997 case kArrayCid:
20998 case kImmutableArrayCid:
20999 return Array::data_offset();
21000 case kTypeArgumentsCid:
21002 case kOneByteStringCid:
21004 case kTwoByteStringCid:
21006 case kRecordCid:
21007 return Record::field_offset(0);
21008 default:
21009 UNIMPLEMENTED();
21010 return Array::data_offset();
21011 }
21012}
21013
21014const char* Instance::ToCString() const {
21015 if (IsNull()) {
21016 return "null";
21017 } else if (Thread::Current()->no_safepoint_scope_depth() > 0) {
21018 // Can occur when running disassembler.
21019 return "Instance";
21020 } else {
21021 if (IsClosure()) {
21022 return Closure::Cast(*this).ToCString();
21023 }
21024 // Background compiler disassembly of instructions referring to pool objects
21025 // calls this function and requires allocation of Type in old space.
21026 const AbstractType& type = AbstractType::Handle(GetType(Heap::kOld));
21027 const String& type_name = String::Handle(type.UserVisibleName());
21028 return OS::SCreate(Thread::Current()->zone(), "Instance of '%s'",
21029 type_name.ToCString());
21030 }
21031}
21032
21034 // All subclasses should implement this appropriately, so the only value that
21035 // should reach this implementation should be the null value.
21036 ASSERT(IsNull());
21037 // AbstractType is an abstract class.
21038 UNREACHABLE();
21039 return kIllegalCid;
21040}
21041
21043 // All subclasses should implement this appropriately, so the only value that
21044 // should reach this implementation should be the null value.
21045 ASSERT(IsNull());
21046 // AbstractType is an abstract class.
21047 UNREACHABLE();
21048 return Class::null();
21049}
21050
21051TypeArgumentsPtr AbstractType::arguments() const {
21052 // All subclasses should implement this appropriately, so the only value that
21053 // should reach this implementation should be the null value.
21054 ASSERT(IsNull());
21055 // AbstractType is an abstract class.
21056 UNREACHABLE();
21057 return nullptr;
21058}
21059
21061 // Null can be assigned to legacy and nullable types.
21062 if (!IsNonNullable()) {
21063 return false;
21064 }
21065
21066 Thread* thread = Thread::Current();
21067 Zone* zone = thread->zone();
21068
21069 if (IsTypeParameter()) {
21070 const auto& bound =
21071 AbstractType::Handle(zone, TypeParameter::Cast(*this).bound());
21072 ASSERT(!bound.IsNull());
21073 return bound.IsStrictlyNonNullable();
21074 }
21075 if (IsFutureOrType()) {
21076 return AbstractType::Handle(zone, UnwrapFutureOr()).IsStrictlyNonNullable();
21077 }
21078 return true;
21079}
21080
21082 const TypeParameter& type_param,
21083 Heap::Space space) const {
21084 Nullability result_nullability;
21085 const Nullability arg_nullability = nullability();
21086 const Nullability var_nullability = type_param.nullability();
21087 // Adjust nullability of result 'arg' instantiated from 'var'.
21088 // arg/var ! ?
21089 // ! ! ?
21090 // ? ? ?
21091 if (var_nullability == Nullability::kNullable) {
21092 result_nullability = Nullability::kNullable;
21093 } else {
21094 // Keep arg nullability.
21095 return ptr();
21096 }
21097 if (arg_nullability == result_nullability) {
21098 return ptr();
21099 }
21100 if (IsType()) {
21101 return Type::Cast(*this).ToNullability(result_nullability, space);
21102 }
21103 if (IsFunctionType()) {
21104 return FunctionType::Cast(*this).ToNullability(result_nullability, space);
21105 }
21106 if (IsRecordType()) {
21107 return RecordType::Cast(*this).ToNullability(result_nullability, space);
21108 }
21109 if (IsTypeParameter()) {
21110 return TypeParameter::Cast(*this).ToNullability(result_nullability, space);
21111 }
21112 UNREACHABLE();
21113}
21114
21116 if (IsFutureOrType()) {
21117 Zone* zone = Thread::Current()->zone();
21118 const AbstractType& unwrapped_type =
21120 const classid_t cid = unwrapped_type.type_class_id();
21121 if (cid == kDynamicCid || cid == kVoidCid) {
21122 return unwrapped_type.ptr();
21123 }
21124 if (cid == kInstanceCid) {
21125 if (IsNonNullable()) {
21126 return unwrapped_type.ptr();
21127 }
21128 ASSERT(IsNullable());
21129 return Type::Cast(unwrapped_type)
21130 .ToNullability(Nullability::kNullable, space);
21131 }
21132 if (cid == kNeverCid && unwrapped_type.IsNonNullable()) {
21133 ObjectStore* object_store = IsolateGroup::Current()->object_store();
21134 const Type& future_never_type =
21135 Type::Handle(zone, object_store->non_nullable_future_never_type());
21136 ASSERT(!future_never_type.IsNull());
21137 return future_never_type.ToNullability(nullability(), space);
21138 }
21139 if (cid == kNullCid) {
21140 ObjectStore* object_store = IsolateGroup::Current()->object_store();
21141 ASSERT(object_store->nullable_future_null_type() != Type::null());
21142 return object_store->nullable_future_null_type();
21143 }
21144 if (IsNullable() && unwrapped_type.IsNullable()) {
21145 return Type::Cast(*this).ToNullability(Nullability::kNonNullable, space);
21146 }
21147 }
21148 return ptr();
21149}
21150
21152 intptr_t num_free_fun_type_params) const {
21153 // All subclasses should implement this appropriately, so the only value that
21154 // should reach this implementation should be the null value.
21155 ASSERT(IsNull());
21156 // AbstractType is an abstract class.
21157 UNREACHABLE();
21158 return false;
21159}
21160
21162 ASSERT(!IsFinalized());
21166}
21167
21168void AbstractType::set_flags(uint32_t value) const {
21169 untag()->set_flags(value);
21170}
21171
21173 ASSERT(!IsCanonical());
21174 set_flags(
21176}
21177
21179 ASSERT(!IsCanonical());
21181 static_cast<uint8_t>(value), untag()->flags()));
21182}
21183
21185 const Instance& other,
21186 TypeEquality kind,
21187 FunctionTypeMapping* function_type_equivalence) const {
21188 // All subclasses should implement this appropriately, so the only value that
21189 // should reach this implementation should be the null value.
21190 ASSERT(IsNull());
21191 // AbstractType is an abstract class.
21192 UNREACHABLE();
21193 return false;
21194}
21195
21197 const AbstractType& other_type,
21198 TypeEquality kind) const {
21199 Nullability this_type_nullability = nullability();
21200 Nullability other_type_nullability = other_type.nullability();
21201 if (kind == TypeEquality::kInSubtypeTest) {
21202 if (this_type_nullability == Nullability::kNullable &&
21203 other_type_nullability == Nullability::kNonNullable) {
21204 return false;
21205 }
21206 } else {
21208 (kind == TypeEquality::kCanonical));
21209 if (this_type_nullability != other_type_nullability) {
21210 return false;
21211 }
21212 }
21213 return true;
21214}
21215
21217 const TypeArguments& instantiator_type_arguments,
21218 const TypeArguments& function_type_arguments,
21219 intptr_t num_free_fun_type_params,
21220 Heap::Space space,
21221 FunctionTypeMapping* function_type_mapping,
21222 intptr_t num_parent_type_args_adjustment) const {
21223 // All subclasses should implement this appropriately, so the only value that
21224 // should reach this implementation should be the null value.
21225 ASSERT(IsNull());
21226 // AbstractType is an abstract class.
21227 UNREACHABLE();
21228 return nullptr;
21229}
21230
21232 intptr_t num_parent_type_args_adjustment,
21233 intptr_t num_free_fun_type_params,
21234 Heap::Space space,
21235 FunctionTypeMapping* function_type_mapping) const {
21236 UNREACHABLE();
21237 return nullptr;
21238}
21239
21240AbstractTypePtr AbstractType::Canonicalize(Thread* thread) const {
21241 // All subclasses should implement this appropriately, so the only value that
21242 // should reach this implementation should be the null value.
21243 ASSERT(IsNull());
21244 // AbstractType is an abstract class.
21245 UNREACHABLE();
21246 return nullptr;
21247}
21248
21250 // All subclasses should implement this appropriately, so the only value that
21251 // should reach this implementation should be the null value.
21252 ASSERT(IsNull());
21253 // AbstractType is an abstract class.
21254 UNREACHABLE();
21255}
21256
21257void AbstractType::AddURI(URIs* uris, const String& name, const String& uri) {
21258 ASSERT(uris != nullptr);
21259 const intptr_t len = uris->length();
21260 ASSERT((len % 3) == 0);
21261 bool print_uri = false;
21262 for (intptr_t i = 0; i < len; i += 3) {
21263 if (uris->At(i).Equals(name)) {
21264 if (uris->At(i + 1).Equals(uri)) {
21265 // Same name and same URI: no need to add this already listed URI.
21266 return; // No state change is possible.
21267 } else {
21268 // Same name and different URI: the name is ambiguous, print both URIs.
21269 print_uri = true;
21270 uris->SetAt(i + 2, Symbols::print());
21271 }
21272 }
21273 }
21274 uris->Add(name);
21275 uris->Add(uri);
21276 if (print_uri) {
21277 uris->Add(Symbols::print());
21278 } else {
21279 uris->Add(Symbols::Empty());
21280 }
21281}
21282
21284 ASSERT(uris != nullptr);
21285 Thread* thread = Thread::Current();
21286 Zone* zone = thread->zone();
21287 const intptr_t len = uris->length();
21288 ASSERT((len % 3) == 0);
21289 GrowableHandlePtrArray<const String> pieces(zone, 5 * (len / 3));
21290 for (intptr_t i = 0; i < len; i += 3) {
21291 // Only print URIs that have been marked.
21292 if (uris->At(i + 2).ptr() == Symbols::print().ptr()) {
21293 pieces.Add(Symbols::TwoSpaces());
21294 pieces.Add(uris->At(i));
21295 pieces.Add(Symbols::SpaceIsFromSpace());
21296 pieces.Add(uris->At(i + 1));
21297 pieces.Add(Symbols::NewLine());
21298 }
21299 }
21300 return Symbols::FromConcatAll(thread, pieces);
21301}
21302
21304 NameVisibility name_visibility) const {
21305 if (IsDynamicType() || IsVoidType() || IsNullType()) {
21306 // Hide nullable suffix.
21307 return "";
21308 }
21309 // Keep in sync with Nullability enum in runtime/vm/object.h.
21310 switch (nullability()) {
21312 return "?";
21314 return "";
21315 default:
21316 UNREACHABLE();
21317 }
21318}
21319
21320StringPtr AbstractType::Name() const {
21322}
21323
21324const char* AbstractType::NameCString() const {
21325 Thread* thread = Thread::Current();
21326 ZoneTextBuffer printer(thread->zone());
21327 PrintName(kInternalName, &printer);
21328 return printer.buffer();
21329}
21330
21333}
21334
21336 Thread* thread = Thread::Current();
21337 ZoneTextBuffer printer(thread->zone());
21338 PrintName(kUserVisibleName, &printer);
21339 return printer.buffer();
21340}
21341
21344}
21345
21347 Thread* thread = Thread::Current();
21348 ZoneTextBuffer printer(thread->zone());
21349 PrintName(kScrubbedName, &printer);
21350 return printer.buffer();
21351}
21352
21354 BaseTextBuffer* printer) const {
21355 // All subclasses should implement this appropriately, so the only value that
21356 // should reach this implementation should be the null value.
21357 ASSERT(IsNull());
21358 // AbstractType is an abstract class.
21359 UNREACHABLE();
21360}
21361
21362StringPtr AbstractType::ClassName() const {
21363 ASSERT(!IsFunctionType() && !IsRecordType());
21364 return Class::Handle(type_class()).Name();
21365}
21366
21368 return type_class_id() == kNullCid;
21369}
21370
21372 return type_class_id() == kNeverCid;
21373}
21374
21376 return type_class_id() == kSentinelCid;
21377}
21378
21380 const classid_t cid = type_class_id();
21381 if (cid == kDynamicCid || cid == kVoidCid) {
21382 return true;
21383 }
21384 if (cid == kInstanceCid) { // Object type.
21385 return IsNullable();
21386 }
21387 if (cid == kFutureOrCid) {
21388 // FutureOr<T> where T is a top type behaves as a top type.
21389 return AbstractType::Handle(UnwrapFutureOr()).IsTopTypeForInstanceOf();
21390 }
21391 return false;
21392}
21393
21394// Must be kept in sync with GenerateTypeIsTopTypeForSubtyping in
21395// stub_code_compiler.cc if any changes are made.
21397 const classid_t cid = type_class_id();
21398 if (cid == kDynamicCid || cid == kVoidCid) {
21399 return true;
21400 }
21401 if (cid == kInstanceCid) { // Object type.
21402 return !IsNonNullable();
21403 }
21404 if (cid == kFutureOrCid) {
21405 // FutureOr<T> where T is a top type behaves as a top type.
21406 return AbstractType::Handle(UnwrapFutureOr()).IsTopTypeForSubtyping();
21407 }
21408 return false;
21409}
21410
21412 return HasTypeClass() &&
21413 (type_class() == Type::Handle(Type::IntType()).type_class());
21414}
21415
21417 return HasTypeClass() &&
21419 ->object_store()
21420 ->integer_implementation_class());
21421}
21422
21424 return HasTypeClass() &&
21425 (type_class() == Type::Handle(Type::Double()).type_class());
21426}
21427
21429 // kFloat32x4Cid refers to the private class and cannot be used here.
21430 return HasTypeClass() &&
21431 (type_class() == Type::Handle(Type::Float32x4()).type_class());
21432}
21433
21435 // kFloat64x2Cid refers to the private class and cannot be used here.
21436 return HasTypeClass() &&
21437 (type_class() == Type::Handle(Type::Float64x2()).type_class());
21438}
21439
21441 // kInt32x4Cid refers to the private class and cannot be used here.
21442 return HasTypeClass() &&
21443 (type_class() == Type::Handle(Type::Int32x4()).type_class());
21444}
21445
21447 return HasTypeClass() &&
21448 (type_class() == Type::Handle(Type::StringType()).type_class());
21449}
21450
21452 return HasTypeClass() &&
21453 (type_class() == Type::Handle(Type::DartFunctionType()).type_class());
21454}
21455
21457 return (type_class_id() == kClosureCid);
21458}
21459
21461 if (!HasTypeClass()) return false;
21462 const auto cid = type_class_id();
21463 return ((cid == kRecordCid) ||
21464 (cid == Class::Handle(
21465 IsolateGroup::Current()->object_store()->record_class())
21466 .id()));
21467}
21468
21470 return HasTypeClass() && type_class_id() == kPointerCid;
21471}
21472
21474 if (!HasTypeClass()) return false;
21475
21476 intptr_t cid = type_class_id();
21477
21478 if (cid == kBoolCid) return true;
21479 if (cid == kDynamicCid) return true;
21480 if (cid == kInstanceCid) return true; // Object.
21481 if (cid == kNeverCid) return true;
21482 if (cid == kNullCid) return true;
21483 if (cid == kVoidCid) return true;
21484
21485 // These are not constant CID checks because kDoubleCid refers to _Double
21486 // not double, etc.
21487 ObjectStore* object_store = IsolateGroup::Current()->object_store();
21488 Type& candidate_type = Type::Handle();
21489 candidate_type = object_store->int_type();
21490 if (cid == candidate_type.type_class_id()) return true;
21491 candidate_type = object_store->double_type();
21492 if (cid == candidate_type.type_class_id()) return true;
21493 candidate_type = object_store->number_type();
21494 if (cid == candidate_type.type_class_id()) return true;
21495 candidate_type = object_store->string_type();
21496 if (cid == candidate_type.type_class_id()) return true;
21497
21498 Class& candidate_cls = Class::Handle();
21499 candidate_cls = object_store->list_class();
21500 if (cid == candidate_cls.id()) return true;
21501 candidate_cls = object_store->map_class();
21502 if (cid == candidate_cls.id()) return true;
21503 candidate_cls = object_store->set_class();
21504 if (cid == candidate_cls.id()) return true;
21505 candidate_cls = object_store->capability_class();
21506 if (cid == candidate_cls.id()) return true;
21507 candidate_cls = object_store->send_port_class();
21508 if (cid == candidate_cls.id()) return true;
21509 candidate_cls = object_store->transferable_class();
21510 if (cid == candidate_cls.id()) return true;
21511
21512 const auto& typed_data_lib =
21513 Library::Handle(object_store->typed_data_library());
21514
21515#define IS_CHECK(name) \
21516 candidate_cls = typed_data_lib.LookupClass(Symbols::name##List()); \
21517 if (cid == candidate_cls.id()) { \
21518 return true; \
21519 }
21521#undef IS_CHECK
21522
21523 return false;
21524}
21525
21526AbstractTypePtr AbstractType::UnwrapFutureOr() const {
21527 if (!IsFutureOrType()) {
21528 return ptr();
21529 }
21530 if (arguments() == TypeArguments::null()) {
21531 return Type::dynamic_type().ptr();
21532 }
21533 Thread* thread = Thread::Current();
21535 TypeArguments& type_args = thread->TypeArgumentsHandle();
21536 type_args = arguments();
21538 AbstractType& type_arg = thread->AbstractTypeHandle();
21539 type_arg = type_args.TypeAt(0);
21540 while (type_arg.IsFutureOrType()) {
21541 if (type_arg.arguments() == TypeArguments::null()) {
21542 return Type::dynamic_type().ptr();
21543 }
21544 type_args = type_arg.arguments();
21545 type_arg = type_args.TypeAt(0);
21546 }
21547 return type_arg.ptr();
21548}
21549
21551 const AbstractType& other,
21552 Heap::Space space,
21553 FunctionTypeMapping* function_type_equivalence) const {
21554 TRACE_TYPE_CHECKS_VERBOSE(" AbstractType::IsSubtypeOf(%s, %s)\n",
21555 ToCString(), other.ToCString());
21557 ASSERT(other.IsFinalized());
21558 // Reflexivity.
21559 if (ptr() == other.ptr()) {
21560 TRACE_TYPE_CHECKS_VERBOSE(" - result: true (same types)\n");
21561 return true;
21562 }
21563 // Right top type.
21564 if (other.IsTopTypeForSubtyping()) {
21565 TRACE_TYPE_CHECKS_VERBOSE(" - result: true (right is top)\n");
21566 return true;
21567 }
21568 // Left bottom type.
21569 // Any form of Never in weak mode maps to Null and Null is a bottom type in
21570 // weak mode. In strong mode, Never and Never* are bottom types. Therefore,
21571 // Never and Never* are bottom types regardless of weak/strong mode.
21572 // Note that we cannot encounter Never?, as it is normalized to Null.
21573 if (IsNeverType()) {
21574 ASSERT(!IsNullable());
21575 TRACE_TYPE_CHECKS_VERBOSE(" - result: true (left is Never)\n");
21576 return true;
21577 }
21578 // Left top type.
21579 if (IsDynamicType() || IsVoidType()) {
21580 TRACE_TYPE_CHECKS_VERBOSE(" - result: false (left is top)\n");
21581 return false;
21582 }
21583 // Left Null type.
21584 if (IsNullType()) {
21585 const bool result = Instance::NullIsAssignableTo(other);
21586 TRACE_TYPE_CHECKS_VERBOSE(" - result: %s (left is Null)\n",
21587 (result ? "true" : "false"));
21588 return result;
21589 }
21590 Thread* thread = Thread::Current();
21591 Zone* zone = thread->zone();
21592 // Type parameters cannot be handled by Class::IsSubtypeOf().
21593 // When comparing two uninstantiated function types, one returning type
21594 // parameter K, the other returning type parameter V, we cannot assume that
21595 // K is a subtype of V, or vice versa. We only return true if K equals V, as
21596 // defined by TypeParameter::Equals.
21597 // The same rule applies when checking the upper bound of a still
21598 // uninstantiated type at compile time. Returning false will defer the test
21599 // to run time.
21600 // There are however some cases that can be decided at compile time.
21601 // For example, with class A<K, V extends K>, new A<T, T> called from within
21602 // a class B<T> will never require a run time bound check, even if T is
21603 // uninstantiated at compile time.
21604 if (IsTypeParameter()) {
21605 const TypeParameter& type_param = TypeParameter::Cast(*this);
21606 if (other.IsTypeParameter()) {
21607 const TypeParameter& other_type_param = TypeParameter::Cast(other);
21608 if (type_param.IsEquivalent(other_type_param,
21610 function_type_equivalence)) {
21612 " - result: true (equivalent type parameters)\n");
21613 return true;
21614 }
21615 }
21616 const AbstractType& bound = AbstractType::Handle(zone, type_param.bound());
21617 ASSERT(bound.IsFinalized());
21618 if (bound.IsSubtypeOf(other, space, function_type_equivalence)) {
21619 TRACE_TYPE_CHECKS_VERBOSE(" - result: true (bound is a subtype)\n");
21620 return true;
21621 }
21622 // Apply additional subtyping rules if 'other' is 'FutureOr'.
21623 if (IsSubtypeOfFutureOr(zone, other, space, function_type_equivalence)) {
21625 " - result: true (type parameter is a subtype of FutureOr)\n");
21626 return true;
21627 }
21629 " - result: false (left is a type parameter)\n");
21630 return false;
21631 }
21632 if (other.IsTypeParameter()) {
21634 " - result: false (right is a type parameter)\n");
21635 return false;
21636 }
21637 // Function types cannot be handled by Class::IsSubtypeOf().
21638 if (IsFunctionType()) {
21639 // Any type that can be the type of a closure is a subtype of Function or
21640 // non-nullable Object.
21641 if (other.IsObjectType() || other.IsDartFunctionType()) {
21642 const bool result = !IsNullable() || !other.IsNonNullable();
21643 TRACE_TYPE_CHECKS_VERBOSE(" - result: %s (function vs non-function)\n",
21644 (result ? "true" : "false"));
21645 return result;
21646 }
21647 if (other.IsFunctionType()) {
21648 // Check for two function types.
21649 if (IsNullable() && other.IsNonNullable()) {
21651 " - result: false (function nullability)\n");
21652 return false;
21653 }
21654 const bool result = FunctionType::Cast(*this).IsSubtypeOf(
21655 FunctionType::Cast(other), space, function_type_equivalence);
21656 TRACE_TYPE_CHECKS_VERBOSE(" - result: %s (function types)\n",
21657 (result ? "true" : "false"));
21658 return result;
21659 }
21660 // Apply additional subtyping rules if 'other' is 'FutureOr'.
21661 if (IsSubtypeOfFutureOr(zone, other, space, function_type_equivalence)) {
21663 " - result: true (function type is a subtype of FutureOr)\n");
21664 return true;
21665 }
21666 // All possible supertypes for FunctionType have been checked.
21667 TRACE_TYPE_CHECKS_VERBOSE(" - result: false (function type)\n");
21668 return false;
21669 } else if (other.IsFunctionType()) {
21670 // FunctionTypes can only be subtyped by other FunctionTypes, so don't
21671 // fall through to class-based type tests.
21673 " - result: false (right is a function type)\n");
21674 return false;
21675 }
21676 // Record types cannot be handled by Class::IsSubtypeOf().
21677 if (IsRecordType()) {
21678 if (other.IsObjectType() || other.IsDartRecordType()) {
21679 const bool result = !IsNullable() || !other.IsNonNullable();
21680 TRACE_TYPE_CHECKS_VERBOSE(" - result: %s (record vs non-record)\n",
21681 (result ? "true" : "false"));
21682 return result;
21683 }
21684 if (other.IsRecordType()) {
21685 // Check for two record types.
21686 if (IsNullable() && other.IsNonNullable()) {
21687 TRACE_TYPE_CHECKS_VERBOSE(" - result: false (record nullability)\n");
21688 return false;
21689 }
21690 const bool result = RecordType::Cast(*this).IsSubtypeOf(
21691 RecordType::Cast(other), space, function_type_equivalence);
21692 TRACE_TYPE_CHECKS_VERBOSE(" - result: %s (record types)\n",
21693 (result ? "true" : "false"));
21694 return result;
21695 }
21696 // Apply additional subtyping rules if 'other' is 'FutureOr'.
21697 if (IsSubtypeOfFutureOr(zone, other, space, function_type_equivalence)) {
21699 " - result: true (record type is a subtype of FutureOr)\n");
21700 return true;
21701 }
21702 // All possible supertypes for record type have been checked.
21703 TRACE_TYPE_CHECKS_VERBOSE(" - result: false (record type)\n");
21704 return false;
21705 } else if (other.IsRecordType()) {
21706 // RecordTypes can only be subtyped by other RecordTypes, so don't
21707 // fall through to class-based type tests.
21708 TRACE_TYPE_CHECKS_VERBOSE(" - result: false (right is a record type)\n");
21709 return false;
21710 }
21711 ASSERT(IsType());
21712 const Class& type_cls = Class::Handle(zone, type_class());
21713 const bool result = Class::IsSubtypeOf(
21714 type_cls,
21715 TypeArguments::Handle(zone, Type::Cast(*this).GetInstanceTypeArguments(
21716 thread, /*canonicalize=*/false)),
21717 nullability(), other, space, function_type_equivalence);
21718 TRACE_TYPE_CHECKS_VERBOSE(" - result: %s (class type check)\n",
21719 (result ? "true" : "false"));
21720 return result;
21721}
21722
21723bool AbstractType::IsSubtypeOfFutureOr(
21724 Zone* zone,
21725 const AbstractType& other,
21726 Heap::Space space,
21727 FunctionTypeMapping* function_type_equivalence) const {
21728 if (other.IsFutureOrType()) {
21729 // This function is only called with a receiver that is either a function
21730 // type, record type, or an uninstantiated type parameter.
21731 // Therefore, it cannot be of class Future and we can spare the check.
21732 ASSERT(IsFunctionType() || IsRecordType() || IsTypeParameter());
21733 const TypeArguments& other_type_arguments =
21734 TypeArguments::Handle(zone, other.arguments());
21735 const AbstractType& other_type_arg =
21736 AbstractType::Handle(zone, other_type_arguments.TypeAtNullSafe(0));
21737 if (other_type_arg.IsTopTypeForSubtyping()) {
21738 return true;
21739 }
21740 // Retry the IsSubtypeOf check after unwrapping type arg of FutureOr.
21741 if (IsSubtypeOf(other_type_arg, space, function_type_equivalence)) {
21742 return true;
21743 }
21744 }
21745 return false;
21746}
21747
21749 // AbstractType is an abstract class.
21750 UNREACHABLE();
21751 return 0;
21752}
21753
21754const char* AbstractType::ToCString() const {
21755 // All subclasses should implement this appropriately, so the only value that
21756 // should reach this implementation should be the null value.
21757 ASSERT(IsNull());
21758 return "AbstractType: null";
21759}
21760
21762 if (stub.IsNull()) {
21764 return;
21765 }
21766
21767 auto& old = Code::Handle(Thread::Current()->zone());
21768 while (true) {
21769 // We load the old TTS and it's entrypoint.
21770 old = untag()->type_test_stub<std::memory_order_acquire>();
21771 uword old_entry_point = old.IsNull() ? 0 : old.EntryPoint();
21772
21773 // If we can successfully update the entrypoint of the TTS, we will
21774 // unconditionally also set the [Code] of the TTS.
21775 //
21776 // Any competing writer would do the same, lose the compare-exchange, loop
21777 // around and continue loading the old [Code] TTS and continue to lose the
21778 // race until we have finally also updated the [Code] TTS.
21779 if (untag()->type_test_stub_entry_point_.compare_exchange_strong(
21780 old_entry_point, stub.EntryPoint())) {
21781 untag()->set_type_test_stub<std::memory_order_release>(stub.ptr());
21782 return;
21783 }
21784 }
21785}
21786
21788 if (stub.IsNull()) {
21789 // This only happens during bootstrapping when creating Type objects before
21790 // we have the instructions.
21792 StoreNonPointer(&untag()->type_test_stub_entry_point_, 0);
21793 untag()->set_type_test_stub(stub.ptr());
21794 return;
21795 }
21796
21797 StoreNonPointer(&untag()->type_test_stub_entry_point_, stub.EntryPoint());
21798 untag()->set_type_test_stub(stub.ptr());
21799}
21800
21801TypePtr Type::NullType() {
21802 return IsolateGroup::Current()->object_store()->null_type();
21803}
21804
21805TypePtr Type::DynamicType() {
21806 return Object::dynamic_type().ptr();
21807}
21808
21809TypePtr Type::VoidType() {
21810 return Object::void_type().ptr();
21811}
21812
21813TypePtr Type::NeverType() {
21814 return IsolateGroup::Current()->object_store()->never_type();
21815}
21816
21817TypePtr Type::ObjectType() {
21818 return IsolateGroup::Current()->object_store()->object_type();
21819}
21820
21821TypePtr Type::BoolType() {
21822 return IsolateGroup::Current()->object_store()->bool_type();
21823}
21824
21825TypePtr Type::IntType() {
21826 return IsolateGroup::Current()->object_store()->int_type();
21827}
21828
21829TypePtr Type::NullableIntType() {
21830 return IsolateGroup::Current()->object_store()->nullable_int_type();
21831}
21832
21833TypePtr Type::SmiType() {
21834 return IsolateGroup::Current()->object_store()->smi_type();
21835}
21836
21837TypePtr Type::MintType() {
21838 return IsolateGroup::Current()->object_store()->mint_type();
21839}
21840
21841TypePtr Type::Double() {
21842 return IsolateGroup::Current()->object_store()->double_type();
21843}
21844
21845TypePtr Type::NullableDouble() {
21846 return IsolateGroup::Current()->object_store()->nullable_double_type();
21847}
21848
21849TypePtr Type::Float32x4() {
21850 return IsolateGroup::Current()->object_store()->float32x4_type();
21851}
21852
21853TypePtr Type::Float64x2() {
21854 return IsolateGroup::Current()->object_store()->float64x2_type();
21855}
21856
21857TypePtr Type::Int32x4() {
21858 return IsolateGroup::Current()->object_store()->int32x4_type();
21859}
21860
21861TypePtr Type::Number() {
21862 return IsolateGroup::Current()->object_store()->number_type();
21863}
21864
21865TypePtr Type::NullableNumber() {
21866 return IsolateGroup::Current()->object_store()->nullable_number_type();
21867}
21868
21869TypePtr Type::StringType() {
21870 return IsolateGroup::Current()->object_store()->string_type();
21871}
21872
21873TypePtr Type::ArrayType() {
21874 return IsolateGroup::Current()->object_store()->array_type();
21875}
21876
21877TypePtr Type::DartFunctionType() {
21878 return IsolateGroup::Current()->object_store()->function_type();
21879}
21880
21881TypePtr Type::DartTypeType() {
21882 return IsolateGroup::Current()->object_store()->type_type();
21883}
21884
21885TypePtr Type::NewNonParameterizedType(const Class& type_class) {
21886 ASSERT(type_class.NumTypeArguments() == 0);
21887 if (type_class.IsNullClass()) {
21888 return Type::NullType();
21889 }
21890 if (type_class.IsDynamicClass()) {
21891 return Type::DynamicType();
21892 }
21893 if (type_class.IsVoidClass()) {
21894 return Type::VoidType();
21895 }
21896 // It is too early to use the class finalizer, as type_class may not be named
21897 // yet, so do not call DeclarationType().
21898 Type& type = Type::Handle(type_class.declaration_type());
21899 if (type.IsNull()) {
21901 Object::null_type_arguments(), Nullability::kNonNullable);
21902 type.SetIsFinalized();
21903 type ^= type.Canonicalize(Thread::Current());
21904 type_class.set_declaration_type(type);
21905 }
21906 ASSERT(type.IsFinalized());
21907 return type.ptr();
21908}
21909
21910TypePtr Type::ToNullability(Nullability value, Heap::Space space) const {
21911 if (nullability() == value) {
21912 return ptr();
21913 }
21914 // Type parameter instantiation may request a nullability change, which should
21915 // be ignored for types dynamic and void. Type Null cannot be the result of
21916 // instantiating a non-nullable type parameter (TypeError thrown).
21917 const classid_t cid = type_class_id();
21918 if (cid == kDynamicCid || cid == kVoidCid || cid == kNullCid) {
21919 return ptr();
21920 }
21922 // Normalize Never? to Null.
21923 return Type::NullType();
21924 }
21925 // Clone type and set new nullability.
21926 Type& type = Type::Handle();
21927 // Always cloning in old space and removing space parameter would not satisfy
21928 // currently existing requests for type instantiation in new space.
21929 // Load with relaxed atomics to prevent data race with updating type
21930 // testing stub.
21931 type ^= Object::Clone(*this, space, /*load_with_relaxed_atomics=*/true);
21932 type.set_nullability(value);
21933 type.SetHash(0);
21934 type.InitializeTypeTestingStubNonAtomic(
21936 if (IsCanonical()) {
21937 // Object::Clone does not clone canonical bit.
21938 ASSERT(!type.IsCanonical());
21939 type ^= type.Canonicalize(Thread::Current());
21940 }
21941 return type.ptr();
21942}
21943
21945 Heap::Space space) const {
21946 if (nullability() == value) {
21947 return ptr();
21948 }
21949 // Clone function type and set new nullability.
21951 type.set_nullability(value);
21952 type.SetHash(0);
21953 type.InitializeTypeTestingStubNonAtomic(
21955 if (IsCanonical()) {
21956 // Object::Clone does not clone canonical bit.
21957 ASSERT(!type.IsCanonical());
21958 type ^= type.Canonicalize(Thread::Current());
21959 }
21960 return type.ptr();
21961}
21962
21964 return untag()->type_class_id();
21965}
21966
21967ClassPtr Type::type_class() const {
21969}
21970
21971bool Type::IsInstantiated(Genericity genericity,
21972 intptr_t num_free_fun_type_params) const {
21974 return true;
21975 }
21976 if ((genericity == kAny) && (num_free_fun_type_params == kAllFree) &&
21978 return false;
21979 }
21980 if (arguments() == TypeArguments::null()) {
21981 return true;
21982 }
21984 return args.IsSubvectorInstantiated(0, args.Length(), genericity,
21985 num_free_fun_type_params);
21986}
21987
21988AbstractTypePtr Type::InstantiateFrom(
21989 const TypeArguments& instantiator_type_arguments,
21990 const TypeArguments& function_type_arguments,
21991 intptr_t num_free_fun_type_params,
21992 Heap::Space space,
21993 FunctionTypeMapping* function_type_mapping,
21994 intptr_t num_parent_type_args_adjustment) const {
21995 Zone* zone = Thread::Current()->zone();
21998 // Note that the type class has to be resolved at this time, but not
21999 // necessarily finalized yet. We may be checking bounds at compile time or
22000 // finalizing the type argument vector of a recursive type.
22001 const Class& cls = Class::Handle(zone, type_class());
22002 TypeArguments& type_arguments = TypeArguments::Handle(zone, arguments());
22003 ASSERT(type_arguments.Length() == cls.NumTypeParameters());
22004 type_arguments = type_arguments.InstantiateFrom(
22005 instantiator_type_arguments, function_type_arguments,
22006 num_free_fun_type_params, space, function_type_mapping,
22007 num_parent_type_args_adjustment);
22008 // A returned empty_type_arguments indicates a failed instantiation in dead
22009 // code that must be propagated up to the caller, the optimizing compiler.
22010 if (type_arguments.ptr() == Object::empty_type_arguments().ptr()) {
22011 return Type::null();
22012 }
22013 // This uninstantiated type is not modified, as it can be instantiated
22014 // with different instantiators. Allocate a new instantiated version of it.
22015 const Type& instantiated_type =
22016 Type::Handle(zone, Type::New(cls, type_arguments, nullability(), space));
22017 instantiated_type.SetIsFinalized();
22018 // Canonicalization is not part of instantiation.
22019 return instantiated_type.NormalizeFutureOrType(space);
22020}
22021
22022AbstractTypePtr Type::UpdateFunctionTypes(
22023 intptr_t num_parent_type_args_adjustment,
22024 intptr_t num_free_fun_type_params,
22025 Heap::Space space,
22026 FunctionTypeMapping* function_type_mapping) const {
22028 ASSERT(num_parent_type_args_adjustment >= 0);
22029 if (arguments() == Object::null()) {
22030 return ptr();
22031 }
22032 Zone* zone = Thread::Current()->zone();
22033 const auto& type_args = TypeArguments::Handle(zone, arguments());
22034 const auto& updated_type_args = TypeArguments::Handle(
22035 zone, type_args.UpdateFunctionTypes(num_parent_type_args_adjustment,
22036 num_free_fun_type_params, space,
22037 function_type_mapping));
22038 if (type_args.ptr() == updated_type_args.ptr()) {
22039 return ptr();
22040 }
22041 const Class& cls = Class::Handle(zone, type_class());
22042 const Type& new_type = Type::Handle(
22043 zone, Type::New(cls, updated_type_args, nullability(), space));
22044 new_type.SetIsFinalized();
22045 return new_type.ptr();
22046}
22047
22048// Certain built-in classes are treated as syntactically equivalent.
22050 if (IsIntegerClassId(cid)) {
22051 return Type::Handle(Type::IntType()).type_class_id();
22052 } else if (IsStringClassId(cid)) {
22053 return Type::Handle(Type::StringType()).type_class_id();
22054 } else if (cid == kDoubleCid) {
22055 return Type::Handle(Type::Double()).type_class_id();
22056 } else if (IsTypeClassId(cid)) {
22057 return Type::Handle(Type::DartTypeType()).type_class_id();
22058 } else if (IsArrayClassId(cid)) {
22059 return Class::Handle(IsolateGroup::Current()->object_store()->list_class())
22060 .id();
22061 }
22062 return cid;
22063}
22064
22065bool Type::IsEquivalent(const Instance& other,
22066 TypeEquality kind,
22067 FunctionTypeMapping* function_type_equivalence) const {
22068 ASSERT(!IsNull());
22069 if (ptr() == other.ptr()) {
22070 return true;
22071 }
22072 if (!other.IsType()) {
22073 return false;
22074 }
22075 const Type& other_type = Type::Cast(other);
22076 const classid_t type_cid = type_class_id();
22077 const classid_t other_type_cid = other_type.type_class_id();
22078 if (type_cid != other_type_cid) {
22079 if ((kind != TypeEquality::kSyntactical) ||
22082 return false;
22083 }
22084 }
22085 Thread* thread = Thread::Current();
22086 Zone* zone = thread->zone();
22087 ASSERT(
22088 Class::Handle(zone, type_class()).NumTypeParameters(thread) ==
22089 Class::Handle(zone, other_type.type_class()).NumTypeParameters(thread));
22090
22091 if (!IsNullabilityEquivalent(thread, other_type, kind)) {
22092 return false;
22093 }
22094 if (!IsFinalized() || !other_type.IsFinalized()) {
22096 return false; // Too early to decide if equal.
22097 }
22098 if (arguments() == other_type.arguments()) {
22099 return true;
22100 }
22101 const TypeArguments& type_args =
22102 TypeArguments::Handle(zone, this->arguments());
22103 const TypeArguments& other_type_args =
22104 TypeArguments::Handle(zone, other_type.arguments());
22105 return type_args.IsEquivalent(other_type_args, kind,
22106 function_type_equivalence);
22107}
22108
22110 const Instance& other,
22111 TypeEquality kind,
22112 FunctionTypeMapping* function_type_equivalence) const {
22113 ASSERT(!IsNull());
22114 if (ptr() == other.ptr()) {
22115 return true;
22116 }
22117 if (!other.IsFunctionType()) {
22118 return false;
22119 }
22120 const FunctionType& other_type = FunctionType::Cast(other);
22121 if ((packed_parameter_counts() != other_type.packed_parameter_counts()) ||
22123 other_type.packed_type_parameter_counts())) {
22124 // Different number of type parameters or parameters.
22125 return false;
22126 }
22127 Thread* thread = Thread::Current();
22128 Zone* zone = thread->zone();
22129 if (!IsNullabilityEquivalent(thread, other_type, kind)) {
22130 return false;
22131 }
22132 if (!IsFinalized() || !other_type.IsFinalized()) {
22134 return false; // Too early to decide if equal.
22135 }
22136 FunctionTypeMapping scope(zone, &function_type_equivalence, *this,
22137 other_type);
22138
22139 // Equal function types must have equal signature types and equal optional
22140 // named arguments.
22141 // Compare function type parameters and their bounds.
22142 // Check the type parameters and bounds of generic functions.
22143 if (!HasSameTypeParametersAndBounds(other_type, kind,
22144 function_type_equivalence)) {
22145 return false;
22146 }
22147 AbstractType& param_type = Type::Handle(zone);
22148 AbstractType& other_param_type = Type::Handle(zone);
22149 // Check the result type.
22150 param_type = result_type();
22151 other_param_type = other_type.result_type();
22152 if (!param_type.IsEquivalent(other_param_type, kind,
22153 function_type_equivalence)) {
22154 return false;
22155 }
22156 // Check the types of all parameters.
22157 const intptr_t num_params = NumParameters();
22158 ASSERT(other_type.NumParameters() == num_params);
22159 for (intptr_t i = 0; i < num_params; i++) {
22160 param_type = ParameterTypeAt(i);
22161 other_param_type = other_type.ParameterTypeAt(i);
22162 // Use contravariant order in case we test for subtyping.
22163 if (!other_param_type.IsEquivalent(param_type, kind,
22164 function_type_equivalence)) {
22165 return false;
22166 }
22167 }
22169 ASSERT(other_type.HasOptionalNamedParameters()); // Same packed counts.
22170 for (intptr_t i = num_fixed_parameters(); i < num_params; i++) {
22171 if (ParameterNameAt(i) != other_type.ParameterNameAt(i)) {
22172 return false;
22173 }
22174 if (IsRequiredAt(i) != other_type.IsRequiredAt(i)) {
22175 return false;
22176 }
22177 }
22178 }
22179 return true;
22180}
22181
22182bool Type::IsDeclarationTypeOf(const Class& cls) const {
22183 ASSERT(type_class() == cls.ptr());
22184 if (cls.IsNullClass()) {
22185 return true;
22186 }
22187 if (cls.IsGeneric() || cls.IsClosureClass()) {
22188 return false;
22189 }
22191}
22192
22193// Keep in sync with TypeSerializationCluster::IsInCanonicalSet.
22194AbstractTypePtr Type::Canonicalize(Thread* thread) const {
22195 Zone* zone = thread->zone();
22197 if (IsCanonical()) {
22198#ifdef DEBUG
22199 TypeArguments& type_args = TypeArguments::Handle(zone, arguments());
22200 ASSERT(type_args.IsCanonical());
22201 ASSERT(type_args.IsOld());
22202#endif
22203 return this->ptr();
22204 }
22205 auto isolate_group = thread->isolate_group();
22206 const classid_t cid = type_class_id();
22207 if (cid == kDynamicCid) {
22208 ASSERT(Object::dynamic_type().IsCanonical());
22209 return Object::dynamic_type().ptr();
22210 }
22211
22212 if (cid == kVoidCid) {
22213 ASSERT(Object::void_type().IsCanonical());
22214 return Object::void_type().ptr();
22215 }
22216
22217 const Class& cls = Class::Handle(zone, type_class());
22218
22219 // Fast canonical lookup/registry for simple types.
22220 if (IsDeclarationTypeOf(cls)) {
22221 ASSERT(!cls.IsNullClass() || IsNullable());
22222 Type& type = Type::Handle(zone, cls.declaration_type());
22223 if (type.IsNull()) {
22224 ASSERT(!cls.ptr()->untag()->InVMIsolateHeap() ||
22225 (isolate_group == Dart::vm_isolate_group()));
22226 // Canonicalize the type arguments of the supertype, if any.
22227 TypeArguments& type_args = TypeArguments::Handle(zone, arguments());
22228 type_args = type_args.Canonicalize(thread);
22229 set_arguments(type_args);
22230 type = cls.declaration_type();
22231 // May be set while canonicalizing type args.
22232 if (type.IsNull()) {
22233 SafepointMutexLocker ml(isolate_group->type_canonicalization_mutex());
22234 // Recheck if type exists.
22235 type = cls.declaration_type();
22236 if (type.IsNull()) {
22237 if (this->IsNew()) {
22238 type ^= Object::Clone(*this, Heap::kOld);
22239 } else {
22240 type = this->ptr();
22241 }
22242 ASSERT(type.IsOld());
22243 type.ComputeHash();
22244 type.SetCanonical();
22245 cls.set_declaration_type(type);
22246 return type.ptr();
22247 }
22248 }
22249 }
22250 ASSERT(this->Equals(type));
22251 ASSERT(type.IsOld());
22252 if (type.IsCanonical()) {
22253 return type.ptr();
22254 }
22255 }
22256
22257 Type& type = Type::Handle(zone);
22258 ObjectStore* object_store = isolate_group->object_store();
22259 {
22260 SafepointMutexLocker ml(isolate_group->type_canonicalization_mutex());
22261 CanonicalTypeSet table(zone, object_store->canonical_types());
22262 type ^= table.GetOrNull(CanonicalTypeKey(*this));
22263 ASSERT(object_store->canonical_types() == table.Release().ptr());
22264 }
22265 if (type.IsNull()) {
22266 // The type was not found in the table. It is not canonical yet.
22267
22268 // Canonicalize the type arguments.
22269 TypeArguments& type_args = TypeArguments::Handle(zone, arguments());
22270 ASSERT(type_args.IsNull() ||
22271 (type_args.Length() == cls.NumTypeParameters()));
22272 type_args = type_args.Canonicalize(thread);
22273 set_arguments(type_args);
22274 ASSERT(type_args.IsNull() || type_args.IsOld());
22275
22276 // Check to see if the type got added to canonical table as part of the
22277 // type arguments canonicalization.
22278 SafepointMutexLocker ml(isolate_group->type_canonicalization_mutex());
22279 CanonicalTypeSet table(zone, object_store->canonical_types());
22280 type ^= table.GetOrNull(CanonicalTypeKey(*this));
22281 if (type.IsNull()) {
22282 // Add this type into the canonical table of types.
22283 if (this->IsNew()) {
22284 type ^= Object::Clone(*this, Heap::kOld);
22285 } else {
22286 type = this->ptr();
22287 }
22288 ASSERT(type.IsOld());
22289 type.SetCanonical(); // Mark object as being canonical.
22290 bool present = table.Insert(type);
22291 ASSERT(!present);
22292 }
22293 object_store->set_canonical_types(table.Release());
22294 }
22295 return type.ptr();
22296}
22297
22298void Type::EnumerateURIs(URIs* uris) const {
22299 if (IsDynamicType() || IsVoidType() || IsNeverType()) {
22300 return;
22301 }
22302 Thread* thread = Thread::Current();
22303 Zone* zone = thread->zone();
22304 const Class& cls = Class::Handle(zone, type_class());
22305 const String& name = String::Handle(zone, cls.UserVisibleName());
22306 const Library& library = Library::Handle(zone, cls.library());
22307 const String& uri = String::Handle(zone, library.url());
22308 AddURI(uris, name, uri);
22309 const TypeArguments& type_args = TypeArguments::Handle(zone, arguments());
22310 type_args.EnumerateURIs(uris);
22311}
22312
22313void Type::PrintName(NameVisibility name_visibility,
22314 BaseTextBuffer* printer) const {
22315 Thread* thread = Thread::Current();
22316 Zone* zone = thread->zone();
22317 const Class& cls = Class::Handle(zone, type_class());
22318 const TypeParameters& params =
22319 TypeParameters::Handle(zone, cls.type_parameters());
22320 printer->AddString(cls.NameCString(name_visibility));
22322 intptr_t num_type_params = 0;
22323 if (cls.is_declaration_loaded()) {
22324 num_type_params = cls.NumTypeParameters(thread);
22325 } else if (!args.IsNull() || args.ptr() != params.defaults()) {
22326 num_type_params = args.Length();
22327 }
22328 if (num_type_params == 0) {
22329 // Do nothing.
22330 } else {
22331 args.PrintSubvectorName(0, num_type_params, name_visibility, printer);
22332 }
22333 printer->AddString(NullabilitySuffix(name_visibility));
22334 // The name is only used for type checking and debugging purposes.
22335 // Unless profiling data shows otherwise, it is not worth caching the name in
22336 // the type.
22337}
22338
22339uword Type::ComputeHash() const {
22341 uint32_t result = type_class_id();
22342 result = CombineHashes(result, static_cast<uint32_t>(nullability()));
22343 uint32_t type_args_hash = TypeArguments::kAllDynamicHash;
22344 if (arguments() != TypeArguments::null()) {
22346 type_args_hash = args.Hash();
22347 }
22348 result = CombineHashes(result, type_args_hash);
22350 SetHash(result);
22351 return result;
22352}
22353
22356 uint32_t result =
22358 result = CombineHashes(result, static_cast<uint32_t>(nullability()));
22360 const intptr_t num_type_params = NumTypeParameters();
22361 if (num_type_params > 0) {
22362 const TypeParameters& type_params =
22364 const TypeArguments& bounds = TypeArguments::Handle(type_params.bounds());
22365 result = CombineHashes(result, bounds.Hash());
22366 // Since the default arguments are ignored when comparing two generic
22367 // function types for type equality, the hash does not depend on them.
22368 }
22369 type = result_type();
22370 result = CombineHashes(result, type.Hash());
22371 const intptr_t num_params = NumParameters();
22372 for (intptr_t i = 0; i < num_params; i++) {
22374 result = CombineHashes(result, type.Hash());
22375 }
22377 String& param_name = String::Handle();
22378 for (intptr_t i = num_fixed_parameters(); i < num_params; i++) {
22379 param_name = ParameterNameAt(i);
22380 result = CombineHashes(result, param_name.Hash());
22381 }
22382 // Required flag is not hashed, see comment above about legacy type.
22383 }
22385 SetHash(result);
22386 return result;
22387}
22388
22389void Type::set_type_class(const Class& value) const {
22390 ASSERT(!value.IsNull());
22391 set_type_class_id(value.id());
22392}
22393
22394void Type::set_arguments(const TypeArguments& value) const {
22395 ASSERT(!IsCanonical());
22396 ASSERT(value.IsNull() ||
22397 // Do not attempt to query number of type parameters
22398 // before class declaration is fully loaded.
22399 !Class::Handle(type_class()).is_declaration_loaded() ||
22400 // Relax assertion in order to support invalid generic types
22401 // created in ClosureMirror_function.
22402 (type_class_id() == kInstanceCid) ||
22403 value.Length() == Class::Handle(type_class()).NumTypeParameters());
22404 untag()->set_arguments(value.ptr());
22405}
22406
22407TypeArgumentsPtr Type::GetInstanceTypeArguments(Thread* thread,
22408 bool canonicalize) const {
22409 Zone* zone = thread->zone();
22410 const auto& cls = Class::Handle(zone, type_class());
22411 const auto& args = TypeArguments::Handle(zone, arguments());
22412 return cls.GetInstanceTypeArguments(thread, args, canonicalize);
22413}
22414
22415TypePtr Type::New(Heap::Space space) {
22416 return Object::Allocate<Type>(space);
22417}
22418
22419TypePtr Type::New(const Class& clazz,
22420 const TypeArguments& arguments,
22421 Nullability nullability,
22422 Heap::Space space) {
22423 Zone* Z = Thread::Current()->zone();
22424 const Type& result = Type::Handle(Z, Type::New(space));
22425 result.SetHash(0);
22426 result.set_flags(0);
22427 result.set_nullability(nullability);
22429 result.set_type_class(clazz);
22430 result.set_arguments(arguments);
22431
22432 result.InitializeTypeTestingStubNonAtomic(
22434 return result.ptr();
22435}
22436
22437void Type::set_type_class_id(intptr_t id) const {
22439 // We should never need a Type object for a top-level class.
22441 ASSERT(id != kIllegalCid);
22443 untag()->set_type_class_id(id);
22444}
22445
22446const char* Type::ToCString() const {
22447 if (IsNull()) {
22448 return "Type: null";
22449 }
22450 Zone* zone = Thread::Current()->zone();
22451 ZoneTextBuffer args(zone);
22452 const TypeArguments& type_args = TypeArguments::Handle(zone, arguments());
22453 const char* args_cstr = "";
22454 if (!type_args.IsNull()) {
22455 type_args.PrintSubvectorName(0, type_args.Length(), kInternalName, &args);
22456 args_cstr = args.buffer();
22457 }
22458 const Class& cls = Class::Handle(zone, type_class());
22459 const char* class_name;
22460 const String& name = String::Handle(zone, cls.Name());
22461 class_name = name.IsNull() ? "<null>" : name.ToCString();
22462 const char* suffix = NullabilitySuffix(kInternalName);
22463 return OS::SCreate(zone, "Type: %s%s%s", class_name, args_cstr, suffix);
22464}
22465
22466AbstractTypePtr FunctionType::Canonicalize(Thread* thread) const {
22468 Zone* zone = thread->zone();
22469 if (IsCanonical()) {
22470#ifdef DEBUG
22471 // Verify that all fields are allocated in old space and are canonical.
22472 if (IsGeneric()) {
22473 const TypeParameters& type_params =
22475 ASSERT(type_params.IsOld());
22476 TypeArguments& type_args = TypeArguments::Handle(zone);
22477 type_args = type_params.bounds();
22478 ASSERT(type_args.IsOld());
22479 ASSERT(type_args.IsCanonical());
22480 type_args = type_params.defaults();
22481 ASSERT(type_args.IsOld());
22482 ASSERT(type_args.IsCanonical());
22483 }
22485 type = result_type();
22486 ASSERT(type.IsOld());
22487 ASSERT(type.IsCanonical());
22490 const intptr_t num_params = NumParameters();
22491 for (intptr_t i = 0; i < num_params; i++) {
22493 ASSERT(type.IsOld());
22494 ASSERT(type.IsCanonical());
22495 }
22496#endif
22497 return ptr();
22498 }
22499 auto isolate_group = thread->isolate_group();
22500 ObjectStore* object_store = isolate_group->object_store();
22502 {
22503 SafepointMutexLocker ml(isolate_group->type_canonicalization_mutex());
22505 object_store->canonical_function_types());
22506 sig ^= table.GetOrNull(CanonicalFunctionTypeKey(*this));
22507 ASSERT(object_store->canonical_function_types() == table.Release().ptr());
22508 }
22509 if (sig.IsNull()) {
22510 // The function type was not found in the table. It is not canonical yet.
22511 // Canonicalize its type parameters and types.
22512
22513 // Clone this function type to the old heap and update
22514 // owners of type parameters.
22515 FunctionType& new_sig = FunctionType::Handle(zone);
22516 if (this->IsNew()) {
22517 new_sig ^= FunctionType::Clone(*this, Heap::kOld);
22518 } else {
22519 new_sig ^= this->ptr();
22520 }
22521 ASSERT(new_sig.IsOld());
22522
22523 if (new_sig.IsGeneric()) {
22524 const TypeParameters& type_params =
22525 TypeParameters::Handle(zone, new_sig.type_parameters());
22526 ASSERT(type_params.IsOld());
22527 TypeArguments& type_args = TypeArguments::Handle(zone);
22528 type_args = type_params.bounds();
22529 if (!type_args.IsCanonical()) {
22530 type_args = type_args.Canonicalize(thread);
22531 type_params.set_bounds(type_args);
22532 }
22533 type_args = type_params.defaults();
22534 if (!type_args.IsCanonical()) {
22535 type_args = type_args.Canonicalize(thread);
22536 type_params.set_defaults(type_args);
22537 }
22538 }
22540 type = new_sig.result_type();
22541 if (!type.IsCanonical()) {
22542 type = type.Canonicalize(thread);
22543 new_sig.set_result_type(type);
22544 }
22545 ASSERT(Array::Handle(zone, new_sig.parameter_types()).IsOld());
22546 ASSERT(Array::Handle(zone, new_sig.named_parameter_names()).IsOld());
22547 const intptr_t num_params = new_sig.NumParameters();
22548 for (intptr_t i = 0; i < num_params; i++) {
22549 type = new_sig.ParameterTypeAt(i);
22550 if (!type.IsCanonical()) {
22551 type = type.Canonicalize(thread);
22552 new_sig.SetParameterTypeAt(i, type);
22553 }
22554 }
22555 // Check to see if the function type got added to canonical table
22556 // during canonicalization of its signature types.
22557 SafepointMutexLocker ml(isolate_group->type_canonicalization_mutex());
22559 object_store->canonical_function_types());
22560 sig ^= table.GetOrNull(CanonicalFunctionTypeKey(new_sig));
22561 if (sig.IsNull()) {
22562 // Add this function type into the canonical table of function types.
22563 sig = new_sig.ptr();
22564 ASSERT(sig.IsOld());
22565 sig.SetCanonical(); // Mark object as being canonical.
22566 bool present = table.Insert(sig);
22567 ASSERT(!present);
22568 }
22569 object_store->set_canonical_function_types(table.Release());
22570 }
22571 return sig.ptr();
22572}
22573
22575 Thread* thread = Thread::Current();
22576 Zone* zone = thread->zone();
22578 const intptr_t num_params = NumParameters();
22579 for (intptr_t i = 0; i < num_params; i++) {
22581 type.EnumerateURIs(uris);
22582 }
22583 // Handle result type last, since it appears last in the user visible name.
22584 type = result_type();
22585 type.EnumerateURIs(uris);
22586}
22587
22589 BaseTextBuffer* printer) const {
22590 const char* suffix = NullabilitySuffix(name_visibility);
22591 if (suffix[0] != '\0') {
22592 printer->AddString("(");
22593 }
22594 FunctionType::Cast(*this).Print(name_visibility, printer);
22595 if (suffix[0] != '\0') {
22596 printer->AddString(")");
22597 printer->AddString(suffix);
22598 }
22599}
22600
22602 Heap::Space space) const {
22603 if (nullability() == value) {
22604 return ptr();
22605 }
22606 // Clone type parameter and set new nullability.
22607 TypeParameter& type_parameter = TypeParameter::Handle();
22608 type_parameter ^= Object::Clone(*this, space);
22609 type_parameter.set_nullability(value);
22610 type_parameter.SetHash(0);
22613 if (IsCanonical()) {
22614 // Object::Clone does not clone canonical bit.
22615 ASSERT(!type_parameter.IsCanonical());
22617 ASSERT(type_parameter.IsFinalized());
22618 type_parameter ^= type_parameter.Canonicalize(Thread::Current());
22619 }
22620 return type_parameter.ptr();
22621}
22622
22624 intptr_t num_free_fun_type_params) const {
22625 // Bounds of class type parameters are ignored in the VM.
22626 if (IsClassTypeParameter()) {
22627 return genericity == kFunctions;
22628 }
22630 return (genericity == kCurrentClass) || (index() >= num_free_fun_type_params);
22631}
22632
22634 const Instance& other,
22635 TypeEquality kind,
22636 FunctionTypeMapping* function_type_equivalence) const {
22637 TRACE_TYPE_CHECKS_VERBOSE(" TypeParameter::IsEquivalent(%s, %s, kind %d)\n",
22638 ToCString(), other.ToCString(),
22639 static_cast<int>(kind));
22640 if (ptr() == other.ptr()) {
22641 TRACE_TYPE_CHECKS_VERBOSE(" - result: true (same types)\n");
22642 return true;
22643 }
22644 if (!other.IsTypeParameter()) {
22646 " - result: false (other is not a type parameter)\n");
22647 return false;
22648 }
22649 const TypeParameter& other_type_param = TypeParameter::Cast(other);
22650 ASSERT(IsFinalized() && other_type_param.IsFinalized());
22651 // Compare index, base and owner.
22653 if (!other_type_param.IsFunctionTypeParameter()) {
22655 " - result: false (other is not a function type parameter)\n");
22656 return false;
22657 }
22659 other_type_param.parameterized_function_type()) &&
22660 ((function_type_equivalence == nullptr) ||
22661 !function_type_equivalence->ContainsOwnersOfTypeParameters(
22662 *this, other_type_param))) {
22664 " - result: false (owners are not equivalent)\n");
22665 return false;
22666 }
22667 } else {
22668 if (!other_type_param.IsClassTypeParameter()) {
22670 " - result: false (other is not a class type parameter)\n");
22671 return false;
22672 }
22673 if (parameterized_class_id() != other_type_param.parameterized_class_id()) {
22675 " - result: false (parameterized class id)\n");
22676 return false;
22677 }
22678 }
22679 if (base() != other_type_param.base() ||
22680 index() != other_type_param.index()) {
22681 TRACE_TYPE_CHECKS_VERBOSE(" - result: false (mismatch base/index)\n");
22682 return false;
22683 }
22684 if (!IsNullabilityEquivalent(Thread::Current(), other_type_param, kind)) {
22685 TRACE_TYPE_CHECKS_VERBOSE(" - result: false (mismatch nullability)\n");
22686 return false;
22687 }
22688 TRACE_TYPE_CHECKS_VERBOSE(" - result: true\n");
22689 return true;
22690}
22691
22692void TypeParameter::set_owner(const Object& value) const {
22693 ASSERT((IsFunctionTypeParameter() && value.IsFunctionType()) ||
22694 (IsClassTypeParameter() && value.IsSmi()));
22695 untag()->set_owner(value.ptr());
22696}
22697
22699 if (IsClassTypeParameter()) {
22700 return Smi::Value(Smi::RawCast(untag()->owner()));
22701 } else {
22702 return kFunctionCid;
22703 }
22704}
22707 untag()->set_owner(Smi::New(value));
22708}
22709
22711 if (IsClassTypeParameter()) {
22713 if (cid != kIllegalCid) {
22715 }
22716 }
22717 return Class::null();
22718}
22719
22722 return FunctionType::RawCast(untag()->owner());
22723}
22724
22725void TypeParameter::set_base(intptr_t value) const {
22726 ASSERT(value >= 0);
22728 StoreNonPointer(&untag()->base_, value);
22729}
22730
22731void TypeParameter::set_index(intptr_t value) const {
22732 ASSERT(value >= 0);
22734 StoreNonPointer(&untag()->index_, value);
22735}
22736
22737AbstractTypePtr TypeParameter::bound() const {
22740 const auto& type_parameters =
22741 TypeParameters::Handle(owner.type_parameters());
22742 return type_parameters.BoundAt(index() - base());
22743 } else {
22744 const auto& owner = Class::Handle(parameterized_class());
22745 if (owner.IsNull()) {
22746 return IsolateGroup::Current()->object_store()->nullable_object_type();
22747 }
22748 const auto& type_parameters =
22749 TypeParameters::Handle(owner.type_parameters());
22750 return type_parameters.BoundAt(index() - base());
22751 }
22752}
22753
22755 const TypeArguments& instantiator_type_arguments,
22756 const TypeArguments& function_type_arguments) const {
22758 const TypeArguments& type_args = IsFunctionTypeParameter()
22759 ? function_type_arguments
22760 : instantiator_type_arguments;
22761 return type_args.TypeAtNullSafe(index());
22762}
22763
22765 const TypeArguments& instantiator_type_arguments,
22766 const TypeArguments& function_type_arguments,
22767 intptr_t num_free_fun_type_params,
22768 Heap::Space space,
22769 FunctionTypeMapping* function_type_mapping,
22770 intptr_t num_parent_type_args_adjustment) const {
22771 Zone* zone = Thread::Current()->zone();
22773 bool substituted = false;
22776 if (index() >= num_free_fun_type_params) {
22777 // Do not instantiate the function type parameter.
22778 // Get a replacement from the updated function type.
22779 ASSERT(function_type_mapping != nullptr);
22780 result = function_type_mapping->MapTypeParameter(*this);
22781 ASSERT(TypeParameter::Cast(result).index() ==
22782 index() - num_free_fun_type_params);
22783 ASSERT(TypeParameter::Cast(result).base() ==
22784 base() - num_free_fun_type_params);
22785 ASSERT(TypeParameter::Cast(result).nullability() == nullability());
22786 AbstractType& upper_bound = AbstractType::Handle(zone, bound());
22787 if (!upper_bound.IsInstantiated()) {
22788 upper_bound = upper_bound.InstantiateFrom(
22789 instantiator_type_arguments, function_type_arguments,
22790 num_free_fun_type_params, space, function_type_mapping,
22791 num_parent_type_args_adjustment);
22792 }
22793 if (upper_bound.ptr() == Type::NeverType()) {
22794 // Normalize 'X extends Never' to 'Never'.
22796 }
22797 } else if (function_type_arguments.IsNull()) {
22798 return Type::DynamicType();
22799 } else {
22800 result = function_type_arguments.TypeAt(index());
22801 substituted = true;
22802 }
22803 } else {
22806 if (instantiator_type_arguments.IsNull()) {
22807 return Type::DynamicType();
22808 }
22809 if (instantiator_type_arguments.Length() <= index()) {
22810 // InstantiateFrom can be invoked from a compilation pipeline with
22811 // mismatching type arguments vector. This can only happen for
22812 // a dynamically unreachable code - which compiler can't remove
22813 // statically for some reason.
22814 // To prevent crashes we return AbstractType::null(), understood by caller
22815 // (see AssertAssignableInstr::Canonicalize).
22816 return AbstractType::null();
22817 }
22818 result = instantiator_type_arguments.TypeAt(index());
22819 substituted = true;
22820 // Instantiating a class type parameter cannot result in a
22821 // function type parameter.
22822 // Bounds of class type parameters are ignored in the VM.
22823 }
22824 result = result.SetInstantiatedNullability(*this, space);
22825 if (substituted && (num_parent_type_args_adjustment != 0)) {
22826 // This type parameter is used inside a generic function type.
22827 // A type being substituted can have nested function types,
22828 // whose number of parent function type arguments should be adjusted
22829 // after the substitution.
22830 result = result.UpdateFunctionTypes(num_parent_type_args_adjustment,
22831 kAllFree, space, function_type_mapping);
22832 }
22833 // Canonicalization is not part of instantiation.
22834 return result.NormalizeFutureOrType(space);
22835}
22836
22838 intptr_t num_parent_type_args_adjustment,
22839 intptr_t num_free_fun_type_params,
22840 Heap::Space space,
22841 FunctionTypeMapping* function_type_mapping) const {
22843 ASSERT(num_parent_type_args_adjustment >= 0);
22844 if (IsFunctionTypeParameter() && (index() >= num_free_fun_type_params)) {
22845 Zone* zone = Thread::Current()->zone();
22846 ASSERT(function_type_mapping != nullptr);
22847 const auto& new_tp = TypeParameter::Handle(
22848 zone, function_type_mapping->MapTypeParameter(*this));
22849 ASSERT(new_tp.base() == base() + num_parent_type_args_adjustment);
22850 ASSERT(new_tp.index() == index() + num_parent_type_args_adjustment);
22851 ASSERT(new_tp.nullability() == nullability());
22852 ASSERT(new_tp.IsFinalized());
22853 return new_tp.ptr();
22854 } else {
22855 return ptr();
22856 }
22857}
22858
22859AbstractTypePtr TypeParameter::Canonicalize(Thread* thread) const {
22861 Zone* zone = thread->zone();
22862 if (IsCanonical()) {
22863#ifdef DEBUG
22866 }
22867#endif
22868 return this->ptr();
22869 }
22870 auto isolate_group = thread->isolate_group();
22871 ObjectStore* object_store = isolate_group->object_store();
22872 TypeParameter& type_parameter = TypeParameter::Handle(zone);
22873 {
22874 SafepointMutexLocker ml(isolate_group->type_canonicalization_mutex());
22876 object_store->canonical_type_parameters());
22877 type_parameter ^= table.GetOrNull(CanonicalTypeParameterKey(*this));
22878 if (type_parameter.IsNull()) {
22879 // Add this type parameter into the canonical table of type parameters.
22880 if (this->IsNew()) {
22881 type_parameter ^= Object::Clone(*this, Heap::kOld);
22882 } else {
22883 type_parameter = this->ptr();
22884 }
22885 ASSERT(type_parameter.IsOld());
22886 type_parameter.SetCanonical(); // Mark object as being canonical.
22887 bool present = table.Insert(type_parameter);
22888 ASSERT(!present);
22889 }
22890 object_store->set_canonical_type_parameters(table.Release());
22891 }
22892 return type_parameter.ptr();
22893}
22894
22896 BaseTextBuffer* printer) const {
22897 const TypeParameter& type_param = TypeParameter::Cast(*this);
22898 // Type parameter names are meaningless after canonicalization.
22899 printer->AddString(type_param.CanonicalNameCString());
22900 printer->AddString(NullabilitySuffix(name_visibility));
22901}
22902
22905 uint32_t result = parameterized_class_id();
22908 result = CombineHashes(result, static_cast<uint32_t>(nullability()));
22910 SetHash(result);
22911 return result;
22912}
22913
22914TypeParameterPtr TypeParameter::New() {
22915 return Object::Allocate<TypeParameter>(Heap::kOld);
22916}
22917
22918TypeParameterPtr TypeParameter::New(const Object& owner,
22919 intptr_t base,
22920 intptr_t index,
22921 Nullability nullability) {
22922 ASSERT(owner.IsNull() || owner.IsClass() || owner.IsFunctionType());
22923 const bool is_function_type_parameter = owner.IsFunctionType();
22925 is_function_type_parameter);
22926 Zone* Z = Thread::Current()->zone();
22927 const TypeParameter& result = TypeParameter::Handle(Z, TypeParameter::New());
22928 result.set_flags(flags);
22929 if (is_function_type_parameter) {
22930 result.set_owner(owner);
22931 } else {
22932 result.set_parameterized_class_id(owner.IsNull() ? kIllegalCid
22933 : Class::Cast(owner).id());
22934 }
22935 result.set_base(base);
22936 result.set_index(index);
22937 result.SetHash(0);
22938 result.set_nullability(nullability);
22940
22941 result.InitializeTypeTestingStubNonAtomic(
22943 return result.ptr();
22944}
22945
22946const char* TypeParameter::CanonicalNameCString(bool is_class_type_parameter,
22947 intptr_t base,
22948 intptr_t index) {
22949 Thread* thread = Thread::Current();
22950 ZoneTextBuffer printer(thread->zone());
22951 const char* base_fmt = is_class_type_parameter ? "C%" Pd : "F%" Pd;
22952 const char* index_fmt = is_class_type_parameter ? "X%" Pd : "Y%" Pd;
22953 if (base != 0) {
22954 printer.Printf(base_fmt, base);
22955 }
22956 printer.Printf(index_fmt, index - base);
22957 return printer.buffer();
22958}
22959
22960const char* TypeParameter::ToCString() const {
22961 if (IsNull()) {
22962 return "TypeParameter: null";
22963 }
22964 Thread* thread = Thread::Current();
22965 ZoneTextBuffer printer(thread->zone());
22966 printer.Printf("TypeParameter: ");
22967 printer.AddString(CanonicalNameCString());
22968 printer.AddString(NullabilitySuffix(kInternalName));
22969 return printer.buffer();
22970}
22971
22972const char* Number::ToCString() const {
22973 // Number is an interface. No instances of Number should exist.
22974 UNREACHABLE();
22975 return "Number";
22976}
22977
22978const char* Integer::ToCString() const {
22979 // Integer is an interface. No instances of Integer should exist except null.
22980 ASSERT(IsNull());
22981 return "nullptr Integer";
22982}
22983
22984IntegerPtr Integer::New(const String& str, Heap::Space space) {
22985 // We are not supposed to have integers represented as two byte strings.
22986 ASSERT(str.IsOneByteString());
22987 if (str.IsNull() || (str.Length() == 0)) {
22988 return Integer::null();
22989 }
22990 int64_t value = 0;
22991 const char* cstr = str.ToCString();
22992 if (!OS::StringToInt64(cstr, &value)) {
22993 // Out of range.
22994 return Integer::null();
22995 }
22996 return Integer::New(value, space);
22997}
22998
22999IntegerPtr Integer::NewCanonical(const String& str) {
23000 // We are not supposed to have integers represented as two byte strings.
23001 ASSERT(str.IsOneByteString());
23002 int64_t value = 0;
23003 const char* cstr = str.ToCString();
23004 if (!OS::StringToInt64(cstr, &value)) {
23005 // Out of range.
23006 return Integer::null();
23007 }
23008 return NewCanonical(value);
23009}
23010
23011IntegerPtr Integer::NewCanonical(int64_t value) {
23012 if (Smi::IsValid(value)) {
23013 return Smi::New(static_cast<intptr_t>(value));
23014 }
23015 return Mint::NewCanonical(value);
23016}
23017
23018IntegerPtr Integer::New(int64_t value, Heap::Space space) {
23019 const bool is_smi = Smi::IsValid(value);
23020 if (is_smi) {
23021 return Smi::New(static_cast<intptr_t>(value));
23022 }
23023 return Mint::New(value, space);
23024}
23025
23026IntegerPtr Integer::NewFromUint64(uint64_t value, Heap::Space space) {
23027 return Integer::New(static_cast<int64_t>(value), space);
23028}
23029
23031 return (value <= static_cast<uint64_t>(Mint::kMaxValue));
23032}
23033
23034bool Integer::Equals(const Instance& other) const {
23035 // Integer is an abstract class.
23036 UNREACHABLE();
23037 return false;
23038}
23039
23040bool Integer::IsZero() const {
23041 // Integer is an abstract class.
23042 UNREACHABLE();
23043 return false;
23044}
23045
23047 // Integer is an abstract class.
23048 UNREACHABLE();
23049 return false;
23050}
23051
23053 // Integer is an abstract class.
23054 UNREACHABLE();
23055 return 0.0;
23056}
23057
23058int64_t Integer::AsInt64Value() const {
23059 // Integer is an abstract class.
23060 UNREACHABLE();
23061 return 0;
23062}
23063
23065 // Integer is an abstract class.
23066 UNREACHABLE();
23067 return 0;
23068}
23069
23071 // Integer is an abstract class.
23072 UNREACHABLE();
23073 return false;
23074}
23075
23076int Integer::CompareWith(const Integer& other) const {
23077 // Integer is an abstract class.
23078 UNREACHABLE();
23079 return 0;
23080}
23081
23083 return Multiply64Hash(AsInt64Value());
23084}
23085
23086IntegerPtr Integer::AsValidInteger() const {
23087 if (IsSmi()) return ptr();
23088 if (IsMint()) {
23089 Mint& mint = Mint::Handle();
23090 mint ^= ptr();
23091 if (Smi::IsValid(mint.value())) {
23092 return Smi::New(static_cast<intptr_t>(mint.value()));
23093 } else {
23094 return ptr();
23095 }
23096 }
23097 return ptr();
23098}
23099
23100const char* Integer::ToHexCString(Zone* zone) const {
23101 ASSERT(IsSmi() || IsMint());
23102 int64_t value = AsInt64Value();
23103 if (value < 0) {
23104 return OS::SCreate(zone, "-0x%" PX64, -static_cast<uint64_t>(value));
23105 } else {
23106 return OS::SCreate(zone, "0x%" PX64, static_cast<uint64_t>(value));
23107 }
23108}
23109
23111 const Integer& other,
23112 Heap::Space space) const {
23113 // In 32-bit mode, the result of any operation between two Smis will fit in a
23114 // 32-bit signed result, except the product of two Smis, which will be 64-bit.
23115 // In 64-bit mode, the result of any operation between two Smis will fit in a
23116 // 64-bit signed result, except the product of two Smis (see below).
23117 if (IsSmi() && other.IsSmi()) {
23118 const intptr_t left_value = Smi::Value(Smi::RawCast(ptr()));
23119 const intptr_t right_value = Smi::Value(Smi::RawCast(other.ptr()));
23120 switch (operation) {
23121 case Token::kADD:
23122 return Integer::New(left_value + right_value, space);
23123 case Token::kSUB:
23124 return Integer::New(left_value - right_value, space);
23125 case Token::kMUL:
23126 return Integer::New(
23127 Utils::MulWithWrapAround(static_cast<int64_t>(left_value),
23128 static_cast<int64_t>(right_value)),
23129 space);
23130 case Token::kTRUNCDIV:
23131 return Integer::New(left_value / right_value, space);
23132 case Token::kMOD: {
23133 const intptr_t remainder = left_value % right_value;
23134 if (remainder < 0) {
23135 if (right_value < 0) {
23136 return Integer::New(remainder - right_value, space);
23137 } else {
23138 return Integer::New(remainder + right_value, space);
23139 }
23140 }
23141 return Integer::New(remainder, space);
23142 }
23143 default:
23144 UNIMPLEMENTED();
23145 }
23146 }
23147 const int64_t left_value = AsInt64Value();
23148 const int64_t right_value = other.AsInt64Value();
23149 switch (operation) {
23150 case Token::kADD:
23151 return Integer::New(Utils::AddWithWrapAround(left_value, right_value),
23152 space);
23153
23154 case Token::kSUB:
23155 return Integer::New(Utils::SubWithWrapAround(left_value, right_value),
23156 space);
23157
23158 case Token::kMUL:
23159 return Integer::New(Utils::MulWithWrapAround(left_value, right_value),
23160 space);
23161
23162 case Token::kTRUNCDIV:
23163 if ((left_value == Mint::kMinValue) && (right_value == -1)) {
23164 // Division special case: overflow in int64_t.
23165 // MIN_VALUE / -1 = (MAX_VALUE + 1), which wraps around to MIN_VALUE
23166 return Integer::New(Mint::kMinValue, space);
23167 }
23168 return Integer::New(left_value / right_value, space);
23169
23170 case Token::kMOD: {
23171 if ((left_value == Mint::kMinValue) && (right_value == -1)) {
23172 // Modulo special case: overflow in int64_t.
23173 // MIN_VALUE % -1 = 0 for reason given above.
23174 return Integer::New(0, space);
23175 }
23176 const int64_t remainder = left_value % right_value;
23177 if (remainder < 0) {
23178 if (right_value < 0) {
23179 return Integer::New(remainder - right_value, space);
23180 } else {
23181 return Integer::New(remainder + right_value, space);
23182 }
23183 }
23184 return Integer::New(remainder, space);
23185 }
23186 default:
23187 UNIMPLEMENTED();
23188 return Integer::null();
23189 }
23190}
23191
23193 const Integer& other,
23194 Heap::Space space) const {
23195 if (IsSmi() && other.IsSmi()) {
23196 intptr_t op1_value = Smi::Value(Smi::RawCast(ptr()));
23197 intptr_t op2_value = Smi::Value(Smi::RawCast(other.ptr()));
23198 intptr_t result = 0;
23199 switch (kind) {
23200 case Token::kBIT_AND:
23201 result = op1_value & op2_value;
23202 break;
23203 case Token::kBIT_OR:
23204 result = op1_value | op2_value;
23205 break;
23206 case Token::kBIT_XOR:
23207 result = op1_value ^ op2_value;
23208 break;
23209 default:
23210 UNIMPLEMENTED();
23211 }
23213 return Smi::New(result);
23214 } else {
23215 int64_t a = AsInt64Value();
23216 int64_t b = other.AsInt64Value();
23217 switch (kind) {
23218 case Token::kBIT_AND:
23219 return Integer::New(a & b, space);
23220 case Token::kBIT_OR:
23221 return Integer::New(a | b, space);
23222 case Token::kBIT_XOR:
23223 return Integer::New(a ^ b, space);
23224 default:
23225 UNIMPLEMENTED();
23226 return Integer::null();
23227 }
23228 }
23229}
23230
23232 const Integer& other,
23233 Heap::Space space) const {
23234 int64_t a = AsInt64Value();
23235 int64_t b = other.AsInt64Value();
23236 ASSERT(b >= 0);
23237 switch (kind) {
23238 case Token::kSHL:
23240 case Token::kSHR:
23241 return Integer::New(a >> Utils::Minimum<int64_t>(b, Mint::kBits), space);
23242 case Token::kUSHR:
23243 return Integer::New(
23244 (b >= kBitsPerInt64) ? 0 : static_cast<uint64_t>(a) >> b, space);
23245 default:
23246 UNIMPLEMENTED();
23247 return Integer::null();
23248 }
23249}
23250
23251bool Smi::Equals(const Instance& other) const {
23252 if (other.IsNull() || !other.IsSmi()) {
23253 return false;
23254 }
23255 return (this->Value() == Smi::Cast(other).Value());
23256}
23257
23258double Smi::AsDoubleValue() const {
23259 return static_cast<double>(this->Value());
23260}
23261
23262int64_t Smi::AsInt64Value() const {
23263 return this->Value();
23264}
23265
23267 return this->Value() & 0xFFFFFFFF;
23268}
23269
23270int Smi::CompareWith(const Integer& other) const {
23271 if (other.IsSmi()) {
23272 const Smi& other_smi = Smi::Cast(other);
23273 if (this->Value() < other_smi.Value()) {
23274 return -1;
23275 } else if (this->Value() > other_smi.Value()) {
23276 return 1;
23277 } else {
23278 return 0;
23279 }
23280 }
23281 ASSERT(!other.FitsIntoSmi());
23282 if (other.IsMint()) {
23283 if (this->IsNegative() == other.IsNegative()) {
23284 return this->IsNegative() ? 1 : -1;
23285 }
23286 return this->IsNegative() ? -1 : 1;
23287 }
23288 UNREACHABLE();
23289 return 0;
23290}
23291
23292const char* Smi::ToCString() const {
23293 return OS::SCreate(Thread::Current()->zone(), "%" Pd "", Value());
23294}
23295
23296ClassPtr Smi::Class() {
23297 return IsolateGroup::Current()->object_store()->smi_class();
23298}
23299
23300void Mint::set_value(int64_t value) const {
23301 StoreNonPointer(&untag()->value_, value);
23302}
23303
23304MintPtr Mint::New(int64_t val, Heap::Space space) {
23305 // Do not allocate a Mint if Smi would do.
23306 ASSERT(!Smi::IsValid(val));
23307 ASSERT(IsolateGroup::Current()->object_store()->mint_class() !=
23308 Class::null());
23309 const auto& result = Mint::Handle(Object::Allocate<Mint>(space));
23310 result.set_value(val);
23311 return result.ptr();
23312}
23313
23314MintPtr Mint::NewCanonical(int64_t value) {
23315 Thread* thread = Thread::Current();
23316 Mint& mint = Mint::Handle(thread->zone(), Mint::New(value, Heap::kOld));
23317 mint ^= mint.Canonicalize(thread);
23318 return mint.ptr();
23319}
23320
23321bool Mint::Equals(const Instance& other) const {
23322 if (this->ptr() == other.ptr()) {
23323 // Both handles point to the same raw instance.
23324 return true;
23325 }
23326 if (!other.IsMint() || other.IsNull()) {
23327 return false;
23328 }
23329 return value() == Mint::Cast(other).value();
23330}
23331
23332double Mint::AsDoubleValue() const {
23333 return static_cast<double>(this->value());
23334}
23335
23336int64_t Mint::AsInt64Value() const {
23337 return this->value();
23338}
23339
23341 return this->value() & 0xFFFFFFFF;
23342}
23343
23344bool Mint::FitsIntoSmi() const {
23345 return Smi::IsValid(AsInt64Value());
23346}
23347
23348int Mint::CompareWith(const Integer& other) const {
23349 ASSERT(!FitsIntoSmi());
23350 ASSERT(other.IsMint() || other.IsSmi());
23351 int64_t a = AsInt64Value();
23352 int64_t b = other.AsInt64Value();
23353 if (a < b) {
23354 return -1;
23355 } else if (a > b) {
23356 return 1;
23357 } else {
23358 return 0;
23359 }
23360}
23361
23362const char* Mint::ToCString() const {
23363 return OS::SCreate(Thread::Current()->zone(), "%" Pd64 "", value());
23364}
23365
23366void Double::set_value(double value) const {
23367 StoreNonPointer(&untag()->value_, value);
23368}
23369
23372 void* this_addr = reinterpret_cast<void*>(
23373 reinterpret_cast<uword>(this->untag()) + value_offset);
23374 void* other_addr = reinterpret_cast<void*>(&value);
23375 return (memcmp(this_addr, other_addr, sizeof(value)) == 0);
23376}
23377
23378bool Double::OperatorEquals(const Instance& other) const {
23379 if (this->IsNull() || other.IsNull()) {
23380 return (this->IsNull() && other.IsNull());
23381 }
23382 if (!other.IsDouble()) {
23383 return false;
23384 }
23385 return this->value() == Double::Cast(other).value();
23386}
23387
23388bool Double::CanonicalizeEquals(const Instance& other) const {
23389 if (this->ptr() == other.ptr()) {
23390 return true; // "===".
23391 }
23392 if (other.IsNull() || !other.IsDouble()) {
23393 return false;
23394 }
23395 return BitwiseEqualsToDouble(Double::Cast(other).value());
23396}
23397
23399 return Hash64To32(bit_cast<uint64_t>(value()));
23400}
23401
23402DoublePtr Double::New(double d, Heap::Space space) {
23403 ASSERT(IsolateGroup::Current()->object_store()->double_class() !=
23404 Class::null());
23405 const auto& result = Double::Handle(Object::Allocate<Double>(space));
23406 result.set_value(d);
23407 return result.ptr();
23408}
23409
23410DoublePtr Double::New(const String& str, Heap::Space space) {
23411 double double_value;
23412 if (!CStringToDouble(str.ToCString(), str.Length(), &double_value)) {
23413 return Double::Handle().ptr();
23414 }
23415 return New(double_value, space);
23416}
23417
23418DoublePtr Double::NewCanonical(double value) {
23419 Thread* thread = Thread::Current();
23421 dbl ^= dbl.Canonicalize(thread);
23422 return dbl.ptr();
23423}
23424
23425DoublePtr Double::NewCanonical(const String& str) {
23426 double double_value;
23427 if (!CStringToDouble(str.ToCString(), str.Length(), &double_value)) {
23428 return Double::Handle().ptr();
23429 }
23430 return NewCanonical(double_value);
23431}
23432
23433StringPtr Number::ToString(Heap::Space space) const {
23434 // Refactoring can avoid Zone::Alloc and strlen, but gains are insignificant.
23435 const char* cstr = ToCString();
23436 intptr_t len = strlen(cstr);
23437// Resulting string is ASCII ...
23438#ifdef DEBUG
23439 for (intptr_t i = 0; i < len; ++i) {
23440 ASSERT(static_cast<uint8_t>(cstr[i]) < 128);
23441 }
23442#endif // DEBUG
23443 // ... which is a subset of Latin-1.
23444 return String::FromLatin1(reinterpret_cast<const uint8_t*>(cstr), len, space);
23445}
23446
23447const char* Double::ToCString() const {
23448 if (isnan(value())) {
23449 return "NaN";
23450 }
23451 if (isinf(value())) {
23452 return value() < 0 ? "-Infinity" : "Infinity";
23453 }
23454 const int kBufferSize = 128;
23455 char* buffer = Thread::Current()->zone()->Alloc<char>(kBufferSize);
23456 buffer[kBufferSize - 1] = '\0';
23458 return buffer;
23459}
23460
23461void StringHasher::Add(const String& str, intptr_t begin_index, intptr_t len) {
23462 ASSERT(begin_index >= 0);
23463 ASSERT(len >= 0);
23464 ASSERT((begin_index + len) <= str.Length());
23465 if (len == 0) {
23466 return;
23467 }
23468 if (str.IsOneByteString()) {
23469 NoSafepointScope no_safepoint;
23470 Add(OneByteString::CharAddr(str, begin_index), len);
23471 } else if (str.IsTwoByteString()) {
23472 NoSafepointScope no_safepoint;
23473 Add(TwoByteString::CharAddr(str, begin_index), len);
23474 } else {
23475 UNREACHABLE();
23476 }
23477}
23478
23479uword String::Hash(const String& str, intptr_t begin_index, intptr_t len) {
23480 StringHasher hasher;
23481 hasher.Add(str, begin_index, len);
23482 return hasher.Finalize();
23483}
23484
23485uword String::HashConcat(const String& str1, const String& str2) {
23486 StringHasher hasher;
23487 hasher.Add(str1, 0, str1.Length());
23488 hasher.Add(str2, 0, str2.Length());
23489 return hasher.Finalize();
23490}
23491
23492uword String::Hash(StringPtr raw) {
23493 StringHasher hasher;
23494 uword length = Smi::Value(raw->untag()->length());
23495 if (raw->IsOneByteString()) {
23496 const uint8_t* data = static_cast<OneByteStringPtr>(raw)->untag()->data();
23497 return String::Hash(data, length);
23498 } else {
23499 const uint16_t* data = static_cast<TwoByteStringPtr>(raw)->untag()->data();
23500 return String::Hash(data, length);
23501 }
23502}
23503
23504uword String::Hash(const char* characters, intptr_t len) {
23505 StringHasher hasher;
23506 hasher.Add(reinterpret_cast<const uint8_t*>(characters), len);
23507 return hasher.Finalize();
23508}
23509
23510uword String::Hash(const uint8_t* characters, intptr_t len) {
23511 StringHasher hasher;
23512 hasher.Add(characters, len);
23513 return hasher.Finalize();
23514}
23515
23516uword String::Hash(const uint16_t* characters, intptr_t len) {
23517 StringHasher hasher;
23518 hasher.Add(characters, len);
23519 return hasher.Finalize();
23520}
23521
23522intptr_t String::CharSize() const {
23523 intptr_t class_id = ptr()->GetClassId();
23524 if (class_id == kOneByteStringCid) {
23525 return kOneByteChar;
23526 }
23527 ASSERT(class_id == kTwoByteStringCid);
23528 return kTwoByteChar;
23529}
23530
23531bool String::Equals(const Instance& other) const {
23532 if (this->ptr() == other.ptr()) {
23533 // Both handles point to the same raw instance.
23534 return true;
23535 }
23536
23537 if (!other.IsString()) {
23538 return false;
23539 }
23540
23541 const String& other_string = String::Cast(other);
23542 return Equals(other_string);
23543}
23544
23545bool String::Equals(const String& str,
23546 intptr_t begin_index,
23547 intptr_t len) const {
23548 ASSERT(begin_index >= 0);
23549 ASSERT((begin_index == 0) || (begin_index < str.Length()));
23550 ASSERT(len >= 0);
23551 ASSERT(len <= str.Length());
23552 if (len != this->Length()) {
23553 return false; // Lengths don't match.
23554 }
23555
23556 for (intptr_t i = 0; i < len; i++) {
23557 if (CharAt(i) != str.CharAt(begin_index + i)) {
23558 return false;
23559 }
23560 }
23561
23562 return true;
23563}
23564
23565bool String::Equals(const char* cstr) const {
23566 ASSERT(cstr != nullptr);
23567 CodePointIterator it(*this);
23568 intptr_t len = strlen(cstr);
23569 while (it.Next()) {
23570 if (*cstr == '\0') {
23571 // Lengths don't match.
23572 return false;
23573 }
23574 int32_t ch;
23575 intptr_t consumed =
23576 Utf8::Decode(reinterpret_cast<const uint8_t*>(cstr), len, &ch);
23577 if (consumed == 0 || it.Current() != ch) {
23578 return false;
23579 }
23580 cstr += consumed;
23581 len -= consumed;
23582 }
23583 return *cstr == '\0';
23584}
23585
23586bool String::Equals(const uint8_t* latin1_array, intptr_t len) const {
23587 if (len != this->Length()) {
23588 // Lengths don't match.
23589 return false;
23590 }
23591
23592 for (intptr_t i = 0; i < len; i++) {
23593 if (this->CharAt(i) != latin1_array[i]) {
23594 return false;
23595 }
23596 }
23597 return true;
23598}
23599
23600bool String::Equals(const uint16_t* utf16_array, intptr_t len) const {
23601 if (len != this->Length()) {
23602 // Lengths don't match.
23603 return false;
23604 }
23605
23606 for (intptr_t i = 0; i < len; i++) {
23607 if (this->CharAt(i) != LoadUnaligned(&utf16_array[i])) {
23608 return false;
23609 }
23610 }
23611 return true;
23612}
23613
23614bool String::Equals(const int32_t* utf32_array, intptr_t len) const {
23615 if (len < 0) return false;
23616 intptr_t j = 0;
23617 for (intptr_t i = 0; i < len; ++i) {
23618 if (Utf::IsSupplementary(utf32_array[i])) {
23619 uint16_t encoded[2];
23620 Utf16::Encode(utf32_array[i], &encoded[0]);
23621 if (j + 1 >= Length()) return false;
23622 if (CharAt(j++) != encoded[0]) return false;
23623 if (CharAt(j++) != encoded[1]) return false;
23624 } else {
23625 if (j >= Length()) return false;
23626 if (CharAt(j++) != utf32_array[i]) return false;
23627 }
23628 }
23629 return j == Length();
23630}
23631
23632bool String::EqualsConcat(const String& str1, const String& str2) const {
23633 return (Length() == str1.Length() + str2.Length()) &&
23634 str1.Equals(*this, 0, str1.Length()) &&
23635 str2.Equals(*this, str1.Length(), str2.Length());
23636}
23637
23638intptr_t String::CompareTo(const String& other) const {
23639 const intptr_t this_len = this->Length();
23640 const intptr_t other_len = other.IsNull() ? 0 : other.Length();
23641 const intptr_t len = (this_len < other_len) ? this_len : other_len;
23642 for (intptr_t i = 0; i < len; i++) {
23643 uint16_t this_code_unit = this->CharAt(i);
23644 uint16_t other_code_unit = other.CharAt(i);
23645 if (this_code_unit < other_code_unit) {
23646 return -1;
23647 }
23648 if (this_code_unit > other_code_unit) {
23649 return 1;
23650 }
23651 }
23652 if (this_len < other_len) return -1;
23653 if (this_len > other_len) return 1;
23654 return 0;
23655}
23656
23657bool String::StartsWith(StringPtr str, StringPtr prefix) {
23658 if (prefix == String::null()) return false;
23659
23660 const intptr_t length = String::LengthOf(str);
23661 const intptr_t prefix_length = String::LengthOf(prefix);
23662 if (prefix_length > length) return false;
23663
23664 for (intptr_t i = 0; i < prefix_length; i++) {
23665 if (String::CharAt(str, i) != String::CharAt(prefix, i)) {
23666 return false;
23667 }
23668 }
23669 return true;
23670}
23671
23672bool String::EndsWith(const String& other) const {
23673 if (other.IsNull()) {
23674 return false;
23675 }
23676 const intptr_t len = this->Length();
23677 const intptr_t other_len = other.Length();
23678 const intptr_t offset = len - other_len;
23679
23680 if ((other_len == 0) || (other_len > len)) {
23681 return false;
23682 }
23683 for (int i = offset; i < len; i++) {
23684 if (this->CharAt(i) != other.CharAt(i - offset)) {
23685 return false;
23686 }
23687 }
23688 return true;
23689}
23690
23691InstancePtr String::CanonicalizeLocked(Thread* thread) const {
23692 if (IsCanonical()) {
23693 return this->ptr();
23694 }
23695 return Symbols::New(Thread::Current(), *this);
23696}
23697
23698StringPtr String::New(const char* cstr, Heap::Space space) {
23699 ASSERT(cstr != nullptr);
23700 intptr_t array_len = strlen(cstr);
23701 const uint8_t* utf8_array = reinterpret_cast<const uint8_t*>(cstr);
23702 return String::FromUTF8(utf8_array, array_len, space);
23703}
23704
23705StringPtr String::FromUTF8(const uint8_t* utf8_array,
23706 intptr_t array_len,
23707 Heap::Space space) {
23709 intptr_t len = Utf8::CodeUnitCount(utf8_array, array_len, &type);
23710 if (type == Utf8::kLatin1) {
23711 const String& strobj = String::Handle(OneByteString::New(len, space));
23712 if (len > 0) {
23713 NoSafepointScope no_safepoint;
23714 if (!Utf8::DecodeToLatin1(utf8_array, array_len,
23715 OneByteString::DataStart(strobj), len)) {
23716 Utf8::ReportInvalidByte(utf8_array, array_len, len);
23717 return String::null();
23718 }
23719 }
23720 return strobj.ptr();
23721 }
23723 const String& strobj = String::Handle(TwoByteString::New(len, space));
23724 NoSafepointScope no_safepoint;
23725 if (!Utf8::DecodeToUTF16(utf8_array, array_len,
23726 TwoByteString::DataStart(strobj), len)) {
23727 Utf8::ReportInvalidByte(utf8_array, array_len, len);
23728 return String::null();
23729 }
23730 return strobj.ptr();
23731}
23732
23733StringPtr String::FromLatin1(const uint8_t* latin1_array,
23734 intptr_t array_len,
23735 Heap::Space space) {
23736 return OneByteString::New(latin1_array, array_len, space);
23737}
23738
23739StringPtr String::FromUTF16(const uint16_t* utf16_array,
23740 intptr_t array_len,
23741 Heap::Space space) {
23742 bool is_one_byte_string = true;
23743 for (intptr_t i = 0; i < array_len; ++i) {
23744 if (!Utf::IsLatin1(LoadUnaligned(&utf16_array[i]))) {
23745 is_one_byte_string = false;
23746 break;
23747 }
23748 }
23749 if (is_one_byte_string) {
23750 return OneByteString::New(utf16_array, array_len, space);
23751 }
23752 return TwoByteString::New(utf16_array, array_len, space);
23753}
23754
23755StringPtr String::FromUTF32(const int32_t* utf32_array,
23756 intptr_t array_len,
23757 Heap::Space space) {
23758 bool is_one_byte_string = true;
23759 intptr_t utf16_len = array_len;
23760 for (intptr_t i = 0; i < array_len; ++i) {
23761 if (!Utf::IsLatin1(utf32_array[i])) {
23762 is_one_byte_string = false;
23763 if (Utf::IsSupplementary(utf32_array[i])) {
23764 utf16_len += 1;
23765 }
23766 }
23767 }
23768 if (is_one_byte_string) {
23769 return OneByteString::New(utf32_array, array_len, space);
23770 }
23771 return TwoByteString::New(utf16_len, utf32_array, array_len, space);
23772}
23773
23774StringPtr String::New(const String& str, Heap::Space space) {
23775 // Currently this just creates a copy of the string in the correct space.
23776 // Once we have external string support, this will also create a heap copy of
23777 // the string if necessary. Some optimizations are possible, such as not
23778 // copying internal strings into the same space.
23779 intptr_t len = str.Length();
23781 intptr_t char_size = str.CharSize();
23782 if (char_size == kOneByteChar) {
23783 result = OneByteString::New(len, space);
23784 } else {
23785 ASSERT(char_size == kTwoByteChar);
23786 result = TwoByteString::New(len, space);
23787 }
23788 String::Copy(result, 0, str, 0, len);
23789 return result.ptr();
23790}
23791
23793 intptr_t dst_offset,
23794 const uint8_t* characters,
23795 intptr_t len) {
23796 ASSERT(dst_offset >= 0);
23797 ASSERT(len >= 0);
23798 ASSERT(len <= (dst.Length() - dst_offset));
23799 if (dst.IsOneByteString()) {
23800 NoSafepointScope no_safepoint;
23801 if (len > 0) {
23802 memmove(OneByteString::CharAddr(dst, dst_offset), characters, len);
23803 }
23804 } else if (dst.IsTwoByteString()) {
23805 for (intptr_t i = 0; i < len; ++i) {
23806 *TwoByteString::CharAddr(dst, i + dst_offset) = characters[i];
23807 }
23808 }
23809}
23810
23812 intptr_t dst_offset,
23813 const uint16_t* utf16_array,
23814 intptr_t array_len) {
23815 ASSERT(dst_offset >= 0);
23816 ASSERT(array_len >= 0);
23817 ASSERT(array_len <= (dst.Length() - dst_offset));
23818 if (dst.IsOneByteString()) {
23819 NoSafepointScope no_safepoint;
23820 for (intptr_t i = 0; i < array_len; ++i) {
23821 ASSERT(Utf::IsLatin1(LoadUnaligned(&utf16_array[i])));
23822 *OneByteString::CharAddr(dst, i + dst_offset) = utf16_array[i];
23823 }
23824 } else {
23825 ASSERT(dst.IsTwoByteString());
23826 NoSafepointScope no_safepoint;
23827 if (array_len > 0) {
23828 memmove(TwoByteString::CharAddr(dst, dst_offset), utf16_array,
23829 array_len * 2);
23830 }
23831 }
23832}
23833
23835 intptr_t dst_offset,
23836 const String& src,
23837 intptr_t src_offset,
23838 intptr_t len) {
23839 ASSERT(dst_offset >= 0);
23840 ASSERT(src_offset >= 0);
23841 ASSERT(len >= 0);
23842 ASSERT(len <= (dst.Length() - dst_offset));
23843 ASSERT(len <= (src.Length() - src_offset));
23844 if (len > 0) {
23845 intptr_t char_size = src.CharSize();
23846 if (char_size == kOneByteChar) {
23847 ASSERT(src.IsOneByteString());
23848 NoSafepointScope no_safepoint;
23849 String::Copy(dst, dst_offset, OneByteString::CharAddr(src, src_offset),
23850 len);
23851 } else {
23852 ASSERT(char_size == kTwoByteChar);
23853 ASSERT(src.IsTwoByteString());
23854 NoSafepointScope no_safepoint;
23855 String::Copy(dst, dst_offset, TwoByteString::CharAddr(src, src_offset),
23856 len);
23857 }
23858 }
23859}
23860
23862 if (str.IsOneByteString()) {
23864 }
23865 ASSERT(str.IsTwoByteString());
23867}
23868
23869static bool IsPercent(int32_t c) {
23870 return c == '%';
23871}
23872
23873static bool IsHexCharacter(int32_t c) {
23874 if (c >= '0' && c <= '9') {
23875 return true;
23876 }
23877 if (c >= 'A' && c <= 'F') {
23878 return true;
23879 }
23880 return false;
23881}
23882
23883static bool IsURISafeCharacter(int32_t c) {
23884 if ((c >= '0') && (c <= '9')) {
23885 return true;
23886 }
23887 if ((c >= 'a') && (c <= 'z')) {
23888 return true;
23889 }
23890 if ((c >= 'A') && (c <= 'Z')) {
23891 return true;
23892 }
23893 return (c == '-') || (c == '_') || (c == '.') || (c == '~');
23894}
23895
23896static int32_t GetHexCharacter(int32_t c) {
23897 ASSERT(c >= 0);
23898 ASSERT(c < 16);
23899 const char* hex = "0123456789ABCDEF";
23900 return hex[c];
23901}
23902
23903static int32_t GetHexValue(int32_t c) {
23904 if (c >= '0' && c <= '9') {
23905 return c - '0';
23906 }
23907 if (c >= 'A' && c <= 'F') {
23908 return c - 'A' + 10;
23909 }
23910 UNREACHABLE();
23911 return 0;
23912}
23913
23914static int32_t MergeHexCharacters(int32_t c1, int32_t c2) {
23915 return GetHexValue(c1) << 4 | GetHexValue(c2);
23916}
23917
23918const char* String::EncodeIRI(const String& str) {
23919 const intptr_t len = Utf8::Length(str);
23920 Zone* zone = Thread::Current()->zone();
23921 uint8_t* utf8 = zone->Alloc<uint8_t>(len);
23922 str.ToUTF8(utf8, len);
23923 intptr_t num_escapes = 0;
23924 for (int i = 0; i < len; ++i) {
23925 uint8_t byte = utf8[i];
23926 if (!IsURISafeCharacter(byte)) {
23927 num_escapes += 2;
23928 }
23929 }
23930 intptr_t cstr_len = len + num_escapes + 1;
23931 char* cstr = zone->Alloc<char>(cstr_len);
23932 intptr_t index = 0;
23933 for (int i = 0; i < len; ++i) {
23934 uint8_t byte = utf8[i];
23935 if (!IsURISafeCharacter(byte)) {
23936 cstr[index++] = '%';
23937 cstr[index++] = GetHexCharacter(byte >> 4);
23938 cstr[index++] = GetHexCharacter(byte & 0xF);
23939 } else {
23940 ASSERT(byte <= 127);
23941 cstr[index++] = byte;
23942 }
23943 }
23944 cstr[index] = '\0';
23945 return cstr;
23946}
23947
23948StringPtr String::DecodeIRI(const String& str) {
23949 CodePointIterator cpi(str);
23950 intptr_t num_escapes = 0;
23951 intptr_t len = str.Length();
23952 {
23953 CodePointIterator cpi(str);
23954 while (cpi.Next()) {
23955 int32_t code_point = cpi.Current();
23956 if (IsPercent(code_point)) {
23957 // Verify that the two characters following the % are hex digits.
23958 if (!cpi.Next()) {
23959 return String::null();
23960 }
23961 int32_t code_point = cpi.Current();
23962 if (!IsHexCharacter(code_point)) {
23963 return String::null();
23964 }
23965 if (!cpi.Next()) {
23966 return String::null();
23967 }
23968 code_point = cpi.Current();
23969 if (!IsHexCharacter(code_point)) {
23970 return String::null();
23971 }
23972 num_escapes += 2;
23973 }
23974 }
23975 }
23976 intptr_t utf8_len = len - num_escapes;
23977 ASSERT(utf8_len >= 0);
23978 Zone* zone = Thread::Current()->zone();
23979 uint8_t* utf8 = zone->Alloc<uint8_t>(utf8_len);
23980 {
23981 intptr_t index = 0;
23982 CodePointIterator cpi(str);
23983 while (cpi.Next()) {
23984 ASSERT(index < utf8_len);
23985 int32_t code_point = cpi.Current();
23986 if (IsPercent(code_point)) {
23987 cpi.Next();
23988 int32_t ch1 = cpi.Current();
23989 cpi.Next();
23990 int32_t ch2 = cpi.Current();
23991 int32_t merged = MergeHexCharacters(ch1, ch2);
23992 ASSERT(merged >= 0 && merged < 256);
23993 utf8[index] = static_cast<uint8_t>(merged);
23994 } else {
23995 ASSERT(code_point >= 0 && code_point < 256);
23996 utf8[index] = static_cast<uint8_t>(code_point);
23997 }
23998 index++;
23999 }
24000 }
24001 return FromUTF8(utf8, utf8_len);
24002}
24003
24004StringPtr String::NewFormatted(const char* format, ...) {
24005 va_list args;
24007 StringPtr result = NewFormattedV(format, args);
24008 NoSafepointScope no_safepoint;
24009 va_end(args);
24010 return result;
24011}
24012
24013StringPtr String::NewFormatted(Heap::Space space, const char* format, ...) {
24014 va_list args;
24016 StringPtr result = NewFormattedV(format, args, space);
24017 NoSafepointScope no_safepoint;
24018 va_end(args);
24019 return result;
24020}
24021
24022StringPtr String::NewFormattedV(const char* format,
24023 va_list args,
24024 Heap::Space space) {
24025 va_list args_copy;
24026 va_copy(args_copy, args);
24027 intptr_t len = Utils::VSNPrint(nullptr, 0, format, args_copy);
24028 va_end(args_copy);
24029
24030 Zone* zone = Thread::Current()->zone();
24031 char* buffer = zone->Alloc<char>(len + 1);
24033
24034 return String::New(buffer, space);
24035}
24036
24037StringPtr String::Concat(const String& str1,
24038 const String& str2,
24039 Heap::Space space) {
24040 ASSERT(!str1.IsNull() && !str2.IsNull());
24041 intptr_t char_size = Utils::Maximum(str1.CharSize(), str2.CharSize());
24042 if (char_size == kTwoByteChar) {
24043 return TwoByteString::Concat(str1, str2, space);
24044 }
24045 return OneByteString::Concat(str1, str2, space);
24046}
24047
24048StringPtr String::ConcatAll(const Array& strings, Heap::Space space) {
24049 return ConcatAllRange(strings, 0, strings.Length(), space);
24050}
24051
24052StringPtr String::ConcatAllRange(const Array& strings,
24053 intptr_t start,
24054 intptr_t end,
24055 Heap::Space space) {
24056 ASSERT(!strings.IsNull());
24057 ASSERT(start >= 0);
24058 ASSERT(end <= strings.Length());
24059 intptr_t result_len = 0;
24060 String& str = String::Handle();
24061 intptr_t char_size = kOneByteChar;
24062 // Compute 'char_size' and 'result_len'.
24063 for (intptr_t i = start; i < end; i++) {
24064 str ^= strings.At(i);
24065 const intptr_t str_len = str.Length();
24066 if ((kMaxElements - result_len) < str_len) {
24068 UNREACHABLE();
24069 }
24070 result_len += str_len;
24071 char_size = Utils::Maximum(char_size, str.CharSize());
24072 }
24073 if (char_size == kOneByteChar) {
24074 return OneByteString::ConcatAll(strings, start, end, result_len, space);
24075 }
24076 ASSERT(char_size == kTwoByteChar);
24077 return TwoByteString::ConcatAll(strings, start, end, result_len, space);
24078}
24079
24080StringPtr String::SubString(const String& str,
24081 intptr_t begin_index,
24082 Heap::Space space) {
24083 ASSERT(!str.IsNull());
24084 if (begin_index >= str.Length()) {
24085 return String::null();
24086 }
24087 return String::SubString(str, begin_index, (str.Length() - begin_index),
24088 space);
24089}
24090
24091StringPtr String::SubString(Thread* thread,
24092 const String& str,
24093 intptr_t begin_index,
24094 intptr_t length,
24095 Heap::Space space) {
24096 ASSERT(!str.IsNull());
24097 ASSERT(begin_index >= 0);
24098 ASSERT(length >= 0);
24099 if (begin_index <= str.Length() && length == 0) {
24100 return Symbols::Empty().ptr();
24101 }
24102 if (begin_index > str.Length()) {
24103 return String::null();
24104 }
24105 bool is_one_byte_string = true;
24106 intptr_t char_size = str.CharSize();
24107 if (char_size == kTwoByteChar) {
24108 for (intptr_t i = begin_index; i < begin_index + length; ++i) {
24109 if (!Utf::IsLatin1(str.CharAt(i))) {
24110 is_one_byte_string = false;
24111 break;
24112 }
24113 }
24114 }
24116 String& result = thread->StringHandle();
24117 if (is_one_byte_string) {
24119 } else {
24121 }
24122 String::Copy(result, 0, str, begin_index, length);
24123 return result.ptr();
24124}
24125
24126const char* String::ToCString() const {
24127 if (IsNull()) {
24128 return "String: null";
24129 }
24130 const intptr_t len = Utf8::Length(*this);
24131 Zone* zone = Thread::Current()->zone();
24132 uint8_t* result = zone->Alloc<uint8_t>(len + 1);
24133 ToUTF8(result, len);
24134 result[len] = 0;
24135 return reinterpret_cast<const char*>(result);
24136}
24137
24139 const intptr_t len = Utf8::Length(*this);
24140 uint8_t* result = reinterpret_cast<uint8_t*>(malloc(len + 1));
24141 ToUTF8(result, len);
24142 result[len] = 0;
24143 return reinterpret_cast<char*>(result);
24144}
24145
24146void String::ToUTF8(uint8_t* utf8_array, intptr_t array_len) const {
24147 ASSERT(array_len >= Utf8::Length(*this));
24148 Utf8::Encode(*this, reinterpret_cast<char*>(utf8_array), array_len);
24149}
24150
24151const char* String::ToCString(Thread* thread, StringPtr ptr) {
24152 if (ptr == nullptr) return nullptr;
24154 String& str = reused_string_handle.Handle();
24155 str = ptr;
24156 return str.ToCString();
24157}
24158
24160 void* peer,
24162 intptr_t external_size) {
24163 ASSERT(callback != nullptr);
24164 FinalizablePersistentHandle* finalizable_ref =
24166 callback, external_size,
24167 /*auto_delete=*/true);
24168 ASSERT(finalizable_ref != nullptr);
24169 return finalizable_ref;
24170}
24171
24172StringPtr String::Transform(int32_t (*mapping)(int32_t ch),
24173 const String& str,
24174 Heap::Space space) {
24175 ASSERT(!str.IsNull());
24176 bool has_mapping = false;
24177 int32_t dst_max = 0;
24178 CodePointIterator it(str);
24179 while (it.Next()) {
24180 int32_t src = it.Current();
24181 int32_t dst = mapping(src);
24182 if (src != dst) {
24183 has_mapping = true;
24184 }
24185 dst_max = Utils::Maximum(dst_max, dst);
24186 }
24187 if (!has_mapping) {
24188 return str.ptr();
24189 }
24190 if (Utf::IsLatin1(dst_max)) {
24191 return OneByteString::Transform(mapping, str, space);
24192 }
24193 ASSERT(Utf::IsBmp(dst_max) || Utf::IsSupplementary(dst_max));
24194 return TwoByteString::Transform(mapping, str, space);
24195}
24196
24197StringPtr String::ToUpperCase(const String& str, Heap::Space space) {
24198 // TODO(cshapiro): create a fast-path for OneByteString instances.
24199 return Transform(CaseMapping::ToUpper, str, space);
24200}
24201
24202StringPtr String::ToLowerCase(const String& str, Heap::Space space) {
24203 // TODO(cshapiro): create a fast-path for OneByteString instances.
24204 return Transform(CaseMapping::ToLower, str, space);
24205}
24206
24208 intptr_t start,
24209 intptr_t end,
24210 double* result) {
24211 ASSERT(0 <= start);
24212 ASSERT(start <= end);
24213 ASSERT(end <= str.Length());
24214 intptr_t length = end - start;
24215 NoSafepointScope no_safepoint;
24216 const uint8_t* startChar;
24217 if (str.IsOneByteString()) {
24218 startChar = OneByteString::CharAddr(str, start);
24219 } else {
24220 uint8_t* chars = Thread::Current()->zone()->Alloc<uint8_t>(length);
24221 for (intptr_t i = 0; i < length; i++) {
24222 int32_t ch = str.CharAt(start + i);
24223 if (ch < 128) {
24224 chars[i] = ch;
24225 } else {
24226 return false; // Not ASCII, so definitely not valid double numeral.
24227 }
24228 }
24229 startChar = chars;
24230 }
24231 return CStringToDouble(reinterpret_cast<const char*>(startChar), length,
24232 result);
24233}
24234
24235// Check to see if 'str1' matches 'str2' as is or
24236// once the private key separator is stripped from str2.
24237//
24238// Things are made more complicated by the fact that constructors are
24239// added *after* the private suffix, so "foo@123.named" should match
24240// "foo.named".
24241//
24242// Also, the private suffix can occur more than once in the name, as in:
24243//
24244// _ReceivePortImpl@6be832b._internal@6be832b
24245//
24246template <typename T1, typename T2>
24247static bool EqualsIgnoringPrivateKey(const String& str1, const String& str2) {
24248 intptr_t len = str1.Length();
24249 intptr_t str2_len = str2.Length();
24250 if (len == str2_len) {
24251 for (intptr_t i = 0; i < len; i++) {
24252 if (T1::CharAt(str1, i) != T2::CharAt(str2, i)) {
24253 return false;
24254 }
24255 }
24256 return true;
24257 }
24258 if (len < str2_len) {
24259 return false; // No way they can match.
24260 }
24261 intptr_t pos = 0;
24262 intptr_t str2_pos = 0;
24263 while (pos < len) {
24264 int32_t ch = T1::CharAt(str1, pos);
24265 pos++;
24266
24267 if ((str2_pos < str2_len) && (ch == T2::CharAt(str2, str2_pos))) {
24268 str2_pos++;
24269 continue;
24270 }
24271
24273 // Consume a private key separator if str1 has it but str2 does not.
24274 while ((pos < len) && (T1::CharAt(str1, pos) != '.') &&
24275 (T1::CharAt(str1, pos) != '&')) {
24276 pos++;
24277 }
24278 // Resume matching characters.
24279 continue;
24280 }
24281
24282 return false;
24283 }
24284
24285 // We have reached the end of mangled_name string.
24286 ASSERT(pos == len);
24287 return (str2_pos == str2_len);
24288}
24289
24290#define EQUALS_IGNORING_PRIVATE_KEY(class_id, type, str1, str2) \
24291 switch (class_id) { \
24292 case kOneByteStringCid: \
24293 return dart::EqualsIgnoringPrivateKey<type, OneByteString>(str1, str2); \
24294 case kTwoByteStringCid: \
24295 return dart::EqualsIgnoringPrivateKey<type, TwoByteString>(str1, str2); \
24296 } \
24297 UNREACHABLE();
24298
24299bool String::EqualsIgnoringPrivateKey(const String& str1, const String& str2) {
24300 if (str1.ptr() == str2.ptr()) {
24301 return true; // Both handles point to the same raw instance.
24302 }
24303 NoSafepointScope no_safepoint;
24304 intptr_t str1_class_id = str1.ptr()->GetClassId();
24305 intptr_t str2_class_id = str2.ptr()->GetClassId();
24306 switch (str1_class_id) {
24307 case kOneByteStringCid:
24308 EQUALS_IGNORING_PRIVATE_KEY(str2_class_id, OneByteString, str1, str2);
24309 break;
24310 case kTwoByteStringCid:
24311 EQUALS_IGNORING_PRIVATE_KEY(str2_class_id, TwoByteString, str1, str2);
24312 break;
24313 }
24314 UNREACHABLE();
24315 return false;
24316}
24317
24319 ASSERT(index_ >= -1);
24320 intptr_t length = Utf16::Length(ch_);
24321 if (index_ < (end_ - length)) {
24322 index_ += length;
24323 ch_ = str_.CharAt(index_);
24324 if (Utf16::IsLeadSurrogate(ch_) && (index_ < (end_ - 1))) {
24325 int32_t ch2 = str_.CharAt(index_ + 1);
24326 if (Utf16::IsTrailSurrogate(ch2)) {
24327 ch_ = Utf16::Decode(ch_, ch2);
24328 }
24329 }
24330 return true;
24331 }
24332 index_ = end_;
24333 return false;
24334}
24335
24337 intptr_t len = str.Length();
24338 if (len > 0) {
24339 intptr_t num_escapes = 0;
24340 for (intptr_t i = 0; i < len; i++) {
24341 num_escapes += EscapeOverhead(CharAt(str, i));
24342 }
24343 const String& dststr =
24345 intptr_t index = 0;
24346 for (intptr_t i = 0; i < len; i++) {
24347 uint8_t ch = CharAt(str, i);
24348 if (IsSpecialCharacter(ch)) {
24349 SetCharAt(dststr, index, '\\');
24350 SetCharAt(dststr, index + 1, SpecialCharacter(ch));
24351 index += 2;
24352 } else if (IsAsciiNonprintable(ch)) {
24353 SetCharAt(dststr, index, '\\');
24354 SetCharAt(dststr, index + 1, 'x');
24355 SetCharAt(dststr, index + 2, GetHexCharacter(ch >> 4));
24356 SetCharAt(dststr, index + 3, GetHexCharacter(ch & 0xF));
24357 index += 4;
24358 } else {
24359 SetCharAt(dststr, index, ch);
24360 index += 1;
24361 }
24362 }
24363 return OneByteString::raw(dststr);
24364 }
24365 return OneByteString::raw(Symbols::Empty());
24366}
24367
24368OneByteStringPtr OneByteString::New(intptr_t len, Heap::Space space) {
24370 ((IsolateGroup::Current()->object_store() != nullptr) &&
24371 (IsolateGroup::Current()->object_store()->one_byte_string_class() !=
24372 Class::null())));
24373 if (len < 0 || len > kMaxElements) {
24374 // This should be caught before we reach here.
24375 FATAL("Fatal error in OneByteString::New: invalid len %" Pd "\n", len);
24376 }
24377 auto result = Object::Allocate<OneByteString>(space, len);
24378 NoSafepointScope no_safepoint;
24379 result->untag()->set_length(Smi::New(len));
24380#if !defined(HASH_IN_OBJECT_HEADER)
24381 result->untag()->set_hash(Smi::New(0));
24382#endif
24384 ASSERT(size <= result->untag()->HeapSize());
24385 memset(reinterpret_cast<void*>(UntaggedObject::ToAddr(result) + size), 0,
24386 result->untag()->HeapSize() - size);
24387 return result;
24388}
24389
24390OneByteStringPtr OneByteString::New(const uint8_t* characters,
24391 intptr_t len,
24392 Heap::Space space) {
24394 if (len > 0) {
24395 NoSafepointScope no_safepoint;
24396 memmove(DataStart(result), characters, len);
24397 }
24398 return OneByteString::raw(result);
24399}
24400
24401OneByteStringPtr OneByteString::New(const uint16_t* characters,
24402 intptr_t len,
24403 Heap::Space space) {
24405 NoSafepointScope no_safepoint;
24406 for (intptr_t i = 0; i < len; ++i) {
24407 ASSERT(Utf::IsLatin1(characters[i]));
24408 *CharAddr(result, i) = characters[i];
24409 }
24410 return OneByteString::raw(result);
24411}
24412
24413OneByteStringPtr OneByteString::New(const int32_t* characters,
24414 intptr_t len,
24415 Heap::Space space) {
24417 NoSafepointScope no_safepoint;
24418 for (intptr_t i = 0; i < len; ++i) {
24419 ASSERT(Utf::IsLatin1(characters[i]));
24420 *CharAddr(result, i) = characters[i];
24421 }
24422 return OneByteString::raw(result);
24423}
24424
24425OneByteStringPtr OneByteString::New(const String& str, Heap::Space space) {
24426 intptr_t len = str.Length();
24428 String::Copy(result, 0, str, 0, len);
24429 return OneByteString::raw(result);
24430}
24431
24432OneByteStringPtr OneByteString::New(const String& other_one_byte_string,
24433 intptr_t other_start_index,
24434 intptr_t other_len,
24435 Heap::Space space) {
24436 const String& result = String::Handle(OneByteString::New(other_len, space));
24437 ASSERT(other_one_byte_string.IsOneByteString());
24438 if (other_len > 0) {
24439 NoSafepointScope no_safepoint;
24440 memmove(OneByteString::DataStart(result),
24441 OneByteString::CharAddr(other_one_byte_string, other_start_index),
24442 other_len);
24443 }
24444 return OneByteString::raw(result);
24445}
24446
24447OneByteStringPtr OneByteString::New(const TypedDataBase& other_typed_data,
24448 intptr_t other_start_index,
24449 intptr_t other_len,
24450 Heap::Space space) {
24451 const String& result = String::Handle(OneByteString::New(other_len, space));
24452 ASSERT(other_typed_data.ElementSizeInBytes() == 1);
24453 if (other_len > 0) {
24454 NoSafepointScope no_safepoint;
24455 memmove(OneByteString::DataStart(result),
24456 other_typed_data.DataAddr(other_start_index), other_len);
24457 }
24458 return OneByteString::raw(result);
24459}
24460
24461OneByteStringPtr OneByteString::Concat(const String& str1,
24462 const String& str2,
24463 Heap::Space space) {
24464 intptr_t len1 = str1.Length();
24465 intptr_t len2 = str2.Length();
24466 intptr_t len = len1 + len2;
24468 String::Copy(result, 0, str1, 0, len1);
24469 String::Copy(result, len1, str2, 0, len2);
24470 return OneByteString::raw(result);
24471}
24472
24473OneByteStringPtr OneByteString::ConcatAll(const Array& strings,
24474 intptr_t start,
24475 intptr_t end,
24476 intptr_t len,
24477 Heap::Space space) {
24478 ASSERT(!strings.IsNull());
24479 ASSERT(start >= 0);
24480 ASSERT(end <= strings.Length());
24482 String& str = String::Handle();
24483 intptr_t pos = 0;
24484 for (intptr_t i = start; i < end; i++) {
24485 str ^= strings.At(i);
24486 const intptr_t str_len = str.Length();
24487 String::Copy(result, pos, str, 0, str_len);
24488 ASSERT((kMaxElements - pos) >= str_len);
24489 pos += str_len;
24490 }
24491 return OneByteString::raw(result);
24492}
24493
24494OneByteStringPtr OneByteString::Transform(int32_t (*mapping)(int32_t ch),
24495 const String& str,
24496 Heap::Space space) {
24497 ASSERT(!str.IsNull());
24498 intptr_t len = str.Length();
24500 NoSafepointScope no_safepoint;
24501 for (intptr_t i = 0; i < len; ++i) {
24502 int32_t ch = mapping(str.CharAt(i));
24503 ASSERT(Utf::IsLatin1(ch));
24504 *CharAddr(result, i) = ch;
24505 }
24506 return OneByteString::raw(result);
24507}
24508
24509OneByteStringPtr OneByteString::SubStringUnchecked(const String& str,
24510 intptr_t begin_index,
24511 intptr_t length,
24512 Heap::Space space) {
24513 ASSERT(!str.IsNull() && str.IsOneByteString());
24514 ASSERT(begin_index >= 0);
24515 ASSERT(length >= 0);
24516 if (begin_index <= str.Length() && length == 0) {
24517 return OneByteString::raw(Symbols::Empty());
24518 }
24519 ASSERT(begin_index < str.Length());
24520 OneByteStringPtr result = OneByteString::New(length, space);
24521 NoSafepointScope no_safepoint;
24522 if (length > 0) {
24523 uint8_t* dest = &result->untag()->data()[0];
24524 const uint8_t* src = &untag(str)->data()[begin_index];
24525 memmove(dest, src, length);
24526 }
24527 return result;
24528}
24529
24531 intptr_t len = str.Length();
24532 if (len > 0) {
24533 intptr_t num_escapes = 0;
24534 for (intptr_t i = 0; i < len; i++) {
24535 num_escapes += EscapeOverhead(CharAt(str, i));
24536 }
24537 const String& dststr =
24539 intptr_t index = 0;
24540 for (intptr_t i = 0; i < len; i++) {
24541 uint16_t ch = CharAt(str, i);
24542 if (IsSpecialCharacter(ch)) {
24543 SetCharAt(dststr, index, '\\');
24544 SetCharAt(dststr, index + 1, SpecialCharacter(ch));
24545 index += 2;
24546 } else if (IsAsciiNonprintable(ch)) {
24547 SetCharAt(dststr, index, '\\');
24548 SetCharAt(dststr, index + 1, 'x');
24549 SetCharAt(dststr, index + 2, GetHexCharacter(ch >> 4));
24550 SetCharAt(dststr, index + 3, GetHexCharacter(ch & 0xF));
24551 index += 4;
24552 } else {
24553 SetCharAt(dststr, index, ch);
24554 index += 1;
24555 }
24556 }
24557 return TwoByteString::raw(dststr);
24558 }
24559 return TwoByteString::New(0, Heap::kNew);
24560}
24561
24562TwoByteStringPtr TwoByteString::New(intptr_t len, Heap::Space space) {
24563 ASSERT(IsolateGroup::Current()->object_store()->two_byte_string_class() !=
24564 nullptr);
24565 if (len < 0 || len > kMaxElements) {
24566 // This should be caught before we reach here.
24567 FATAL("Fatal error in TwoByteString::New: invalid len %" Pd "\n", len);
24568 }
24569 auto s = Object::Allocate<TwoByteString>(space, len);
24570 NoSafepointScope no_safepoint;
24571 s->untag()->set_length(Smi::New(len));
24572#if !defined(HASH_IN_OBJECT_HEADER)
24573 s->untag()->set_hash(Smi::New(0));
24574#endif
24576 ASSERT(size <= s->untag()->HeapSize());
24577 memset(reinterpret_cast<void*>(UntaggedObject::ToAddr(s) + size), 0,
24578 s->untag()->HeapSize() - size);
24579 return s;
24580}
24581
24582TwoByteStringPtr TwoByteString::New(const uint16_t* utf16_array,
24583 intptr_t array_len,
24584 Heap::Space space) {
24585 ASSERT(array_len > 0);
24586 const String& result = String::Handle(TwoByteString::New(array_len, space));
24587 {
24588 NoSafepointScope no_safepoint;
24589 memmove(reinterpret_cast<void*>(DataStart(result)),
24590 reinterpret_cast<const void*>(utf16_array), (array_len * 2));
24591 }
24592 return TwoByteString::raw(result);
24593}
24594
24595TwoByteStringPtr TwoByteString::New(intptr_t utf16_len,
24596 const int32_t* utf32_array,
24597 intptr_t array_len,
24598 Heap::Space space) {
24599 ASSERT((array_len > 0) && (utf16_len >= array_len));
24600 const String& result = String::Handle(TwoByteString::New(utf16_len, space));
24601 {
24602 NoSafepointScope no_safepoint;
24603 intptr_t j = 0;
24604 for (intptr_t i = 0; i < array_len; ++i) {
24605 if (Utf::IsSupplementary(utf32_array[i])) {
24606 ASSERT(j < (utf16_len - 1));
24607 Utf16::Encode(utf32_array[i], CharAddr(result, j));
24608 j += 2;
24609 } else {
24610 ASSERT(j < utf16_len);
24611 *CharAddr(result, j) = utf32_array[i];
24612 j += 1;
24613 }
24614 }
24615 }
24616 return TwoByteString::raw(result);
24617}
24618
24619TwoByteStringPtr TwoByteString::New(const String& str, Heap::Space space) {
24620 intptr_t len = str.Length();
24622 String::Copy(result, 0, str, 0, len);
24623 return TwoByteString::raw(result);
24624}
24625
24626TwoByteStringPtr TwoByteString::New(const TypedDataBase& other_typed_data,
24627 intptr_t other_start_index,
24628 intptr_t other_len,
24629 Heap::Space space) {
24630 const String& result = String::Handle(TwoByteString::New(other_len, space));
24631 if (other_len > 0) {
24632 NoSafepointScope no_safepoint;
24633 memmove(TwoByteString::DataStart(result),
24634 other_typed_data.DataAddr(other_start_index),
24635 other_len * sizeof(uint16_t));
24636 }
24637 return TwoByteString::raw(result);
24638}
24639
24640TwoByteStringPtr TwoByteString::Concat(const String& str1,
24641 const String& str2,
24642 Heap::Space space) {
24643 intptr_t len1 = str1.Length();
24644 intptr_t len2 = str2.Length();
24645 intptr_t len = len1 + len2;
24647 String::Copy(result, 0, str1, 0, len1);
24648 String::Copy(result, len1, str2, 0, len2);
24649 return TwoByteString::raw(result);
24650}
24651
24652TwoByteStringPtr TwoByteString::ConcatAll(const Array& strings,
24653 intptr_t start,
24654 intptr_t end,
24655 intptr_t len,
24656 Heap::Space space) {
24657 ASSERT(!strings.IsNull());
24658 ASSERT(start >= 0);
24659 ASSERT(end <= strings.Length());
24661 String& str = String::Handle();
24662 intptr_t pos = 0;
24663 for (intptr_t i = start; i < end; i++) {
24664 str ^= strings.At(i);
24665 const intptr_t str_len = str.Length();
24666 String::Copy(result, pos, str, 0, str_len);
24667 ASSERT((kMaxElements - pos) >= str_len);
24668 pos += str_len;
24669 }
24670 return TwoByteString::raw(result);
24671}
24672
24673TwoByteStringPtr TwoByteString::Transform(int32_t (*mapping)(int32_t ch),
24674 const String& str,
24675 Heap::Space space) {
24676 ASSERT(!str.IsNull());
24677 intptr_t len = str.Length();
24680 intptr_t i = 0;
24681 NoSafepointScope no_safepoint;
24682 while (it.Next()) {
24683 int32_t src = it.Current();
24684 int32_t dst = mapping(src);
24685 ASSERT(dst >= 0 && dst <= 0x10FFFF);
24686 intptr_t len = Utf16::Length(dst);
24687 if (len == 1) {
24688 *CharAddr(result, i) = dst;
24689 } else {
24690 ASSERT(len == 2);
24691 Utf16::Encode(dst, CharAddr(result, i));
24692 }
24693 i += len;
24694 }
24695 return TwoByteString::raw(result);
24696}
24697
24698const char* Bool::ToCString() const {
24699 return value() ? "true" : "false";
24700}
24701
24702bool Array::CanonicalizeEquals(const Instance& other) const {
24703 if (this->ptr() == other.ptr()) {
24704 // Both handles point to the same raw instance.
24705 return true;
24706 }
24707
24708 // An Array may be compared to an ImmutableArray.
24709 if (!other.IsArray() || other.IsNull()) {
24710 return false;
24711 }
24712
24713 // First check if both arrays have the same length and elements.
24714 const Array& other_arr = Array::Cast(other);
24715
24716 intptr_t len = this->Length();
24717 if (len != other_arr.Length()) {
24718 return false;
24719 }
24720
24721 for (intptr_t i = 0; i < len; i++) {
24722 if (this->At(i) != other_arr.At(i)) {
24723 return false;
24724 }
24725 }
24726
24727 // Now check if both arrays have the same type arguments.
24728 if (GetTypeArguments() == other.GetTypeArguments()) {
24729 return true;
24730 }
24732 const TypeArguments& other_type_args =
24734 if (!type_args.Equals(other_type_args)) {
24735 return false;
24736 }
24737 return true;
24738}
24739
24740uint32_t Array::CanonicalizeHash() const {
24741 intptr_t len = Length();
24742 if (len == 0) {
24743 return 1;
24744 }
24745 Thread* thread = Thread::Current();
24746 uint32_t hash = thread->heap()->GetCanonicalHash(ptr());
24747 if (hash != 0) {
24748 return hash;
24749 }
24750 hash = len;
24753 for (intptr_t i = 0; i < len; i++) {
24754 member ^= At(i);
24756 }
24758 thread->heap()->SetCanonicalHash(ptr(), hash);
24759 return hash;
24760}
24761
24762ArrayPtr Array::New(intptr_t len,
24763 const AbstractType& element_type,
24764 Heap::Space space) {
24765 const Array& result = Array::Handle(Array::New(len, space));
24766 if (!element_type.IsDynamicType()) {
24768 type_args.SetTypeAt(0, element_type);
24769 type_args = type_args.Canonicalize(Thread::Current());
24770 result.SetTypeArguments(type_args);
24771 }
24772 return result.ptr();
24773}
24774
24775ArrayPtr Array::NewUninitialized(intptr_t class_id,
24776 intptr_t len,
24777 Heap::Space space) {
24778 if (!IsValidLength(len)) {
24779 // This should be caught before we reach here.
24780 FATAL("Fatal error in Array::New: invalid len %" Pd "\n", len);
24781 }
24782 auto raw = Object::AllocateVariant<Array>(class_id, space, len);
24783 NoSafepointScope no_safepoint;
24784 raw->untag()->set_length(Smi::New(len));
24785 if (UseCardMarkingForAllocation(len)) {
24786 ASSERT(raw->IsOldObject());
24787 raw->untag()->SetCardRememberedBitUnsynchronized();
24788 }
24789 return raw;
24790}
24791
24792ArrayPtr Array::New(intptr_t class_id, intptr_t len, Heap::Space space) {
24793 if (!UseCardMarkingForAllocation(len)) {
24794 return NewUninitialized(class_id, len, space);
24795 }
24796
24797 Thread* thread = Thread::Current();
24798 Array& result =
24799 Array::Handle(thread->zone(), NewUninitialized(class_id, len, space));
24800 result.SetTypeArguments(Object::null_type_arguments());
24801 for (intptr_t i = 0; i < len; i++) {
24802 result.SetAt(i, Object::null_object(), thread);
24803 if (((i + 1) % kSlotsPerInterruptCheck) == 0) {
24804 thread->CheckForSafepoint();
24805 }
24806 }
24807 return result.ptr();
24808}
24809
24810ArrayPtr Array::Slice(intptr_t start,
24811 intptr_t count,
24812 bool with_type_argument) const {
24813 Thread* thread = Thread::Current();
24814 Zone* zone = thread->zone();
24816 if (with_type_argument) {
24817 dest.SetTypeArguments(TypeArguments::Handle(zone, GetTypeArguments()));
24818 } else {
24819 dest.SetTypeArguments(Object::null_type_arguments());
24820 }
24821 if (!UseCardMarkingForAllocation(count)) {
24822 NoSafepointScope no_safepoint(thread);
24823 for (int i = 0; i < count; i++) {
24824 dest.untag()->set_element(i, untag()->element(i + start), thread);
24825 }
24826 } else {
24827 for (int i = 0; i < count; i++) {
24828 dest.untag()->set_element(i, untag()->element(i + start), thread);
24829 if (((i + 1) % kSlotsPerInterruptCheck) == 0) {
24830 thread->CheckForSafepoint();
24831 }
24832 }
24833 }
24834 return dest.ptr();
24835}
24836
24838 if (IsImmutable()) return;
24839 ASSERT(!IsCanonical());
24840 untag()->SetClassId(kImmutableArrayCid);
24841}
24842
24843const char* Array::ToCString() const {
24844 if (IsNull()) {
24845 return IsImmutable() ? "_ImmutableList nullptr" : "_List nullptr";
24846 }
24847 Zone* zone = Thread::Current()->zone();
24848 const char* format =
24849 IsImmutable() ? "_ImmutableList len:%" Pd : "_List len:%" Pd;
24850 return zone->PrintToString(format, Length());
24851}
24852
24853ArrayPtr Array::Grow(const Array& source,
24854 intptr_t new_length,
24855 Heap::Space space) {
24856 Thread* thread = Thread::Current();
24857 Zone* zone = thread->zone();
24858 const Array& result =
24859 Array::Handle(zone, Array::NewUninitialized(new_length, space));
24860 intptr_t old_length = 0;
24861 if (!source.IsNull()) {
24862 old_length = source.Length();
24863 result.SetTypeArguments(
24864 TypeArguments::Handle(zone, source.GetTypeArguments()));
24865 } else {
24866 result.SetTypeArguments(Object::null_type_arguments());
24867 }
24868 ASSERT(new_length > old_length); // Unnecessary copying of array.
24869 if (!UseCardMarkingForAllocation(new_length)) {
24870 NoSafepointScope no_safepoint(thread);
24871 for (intptr_t i = 0; i < old_length; i++) {
24872 result.untag()->set_element(i, source.untag()->element(i), thread);
24873 }
24874 for (intptr_t i = old_length; i < new_length; i++) {
24875 ASSERT(result.untag()->element(i) == Object::null());
24876 }
24877 } else {
24878 for (intptr_t i = 0; i < old_length; i++) {
24879 result.untag()->set_element(i, source.untag()->element(i), thread);
24880 if (((i + 1) % kSlotsPerInterruptCheck) == 0) {
24881 thread->CheckForSafepoint();
24882 }
24883 }
24884 for (intptr_t i = old_length; i < new_length; i++) {
24885 result.untag()->set_element(i, Object::null(), thread);
24886 if (((i + 1) % kSlotsPerInterruptCheck) == 0) {
24887 thread->CheckForSafepoint();
24888 }
24889 }
24890 }
24891 return result.ptr();
24892}
24893
24894void Array::Truncate(intptr_t new_len) const {
24895 if (IsNull()) {
24896 return;
24897 }
24898 Thread* thread = Thread::Current();
24899 Zone* zone = thread->zone();
24900 const Array& array = Array::Handle(zone, this->ptr());
24901
24902 intptr_t old_len = array.Length();
24903 ASSERT(new_len <= old_len);
24904 if (old_len == new_len) {
24905 return;
24906 }
24907 intptr_t old_size = Array::InstanceSize(old_len);
24908 intptr_t new_size = Array::InstanceSize(new_len);
24909
24910 NoSafepointScope no_safepoint;
24911
24912 // If there is any left over space fill it with either an Array object or
24913 // just a plain object (depending on the amount of left over space) so
24914 // that it can be traversed over successfully during garbage collection.
24915 Object::MakeUnusedSpaceTraversable(array, old_size, new_size);
24916
24917 // Update the size in the header field and length of the array object.
24918 // These release operations are balanced by acquire operations in the
24919 // concurrent sweeper.
24920 uword old_tags = array.untag()->tags_;
24921 uword new_tags;
24922 ASSERT(kArrayCid == UntaggedObject::ClassIdTag::decode(old_tags));
24923 do {
24924 new_tags = UntaggedObject::SizeTag::update(new_size, old_tags);
24925 } while (!array.untag()->tags_.compare_exchange_weak(
24926 old_tags, new_tags, std::memory_order_release));
24927
24928 // Between the CAS of the header above and the SetLength below, the array is
24929 // temporarily in an inconsistent state. The header is considered the
24930 // overriding source of object size by UntaggedObject::HeapSize, but the
24931 // ASSERTs in UntaggedObject::HeapSizeFromClass must handle this special case.
24932 array.SetLengthRelease(new_len);
24933}
24934
24935ArrayPtr Array::MakeFixedLength(const GrowableObjectArray& growable_array,
24936 bool unique) {
24937 ASSERT(!growable_array.IsNull());
24938 Thread* thread = Thread::Current();
24939 Zone* zone = thread->zone();
24940 intptr_t used_len = growable_array.Length();
24941 // Get the type arguments and prepare to copy them.
24942 const TypeArguments& type_arguments =
24943 TypeArguments::Handle(growable_array.GetTypeArguments());
24944 if (used_len == 0) {
24945 if (type_arguments.IsNull() && !unique) {
24946 // This is a raw List (as in no type arguments), so we can return the
24947 // simple empty array.
24948 return Object::empty_array().ptr();
24949 }
24950
24951 // The backing array may be a shared instance, or may not have correct
24952 // type parameters. Create a new empty array.
24954 Array& array = Array::Handle(zone, Array::New(0, space));
24955 array.SetTypeArguments(type_arguments);
24956 return array.ptr();
24957 }
24958 const Array& array = Array::Handle(zone, growable_array.data());
24959 ASSERT(array.IsArray());
24960 array.SetTypeArguments(type_arguments);
24961
24962 // Null the GrowableObjectArray, we are removing its backing array.
24963 growable_array.SetLength(0);
24964 growable_array.SetData(Object::empty_array());
24965
24966 // Truncate the old backing array and return it.
24967 array.Truncate(used_len);
24968 return array.ptr();
24969}
24970
24973 intptr_t len = Length();
24974 if (len > 0) {
24975 Zone* zone = thread->zone();
24976 Instance& obj = Instance::Handle(zone);
24977 for (intptr_t i = 0; i < len; i++) {
24978 obj ^= At(i);
24979 obj = obj.CanonicalizeLocked(thread);
24980 this->SetAt(i, obj);
24981 }
24982 }
24983}
24984
24985ImmutableArrayPtr ImmutableArray::New(intptr_t len, Heap::Space space) {
24986 ASSERT(IsolateGroup::Current()->object_store()->immutable_array_class() !=
24987 Class::null());
24988 return static_cast<ImmutableArrayPtr>(Array::New(kClassId, len, space));
24989}
24990
24992 ASSERT(!IsNull());
24993 if (Length() == Capacity()) {
24994 // Grow from 0 to 3, and then double + 1.
24995 intptr_t new_capacity = (Capacity() * 2) | 3;
24996 if (new_capacity <= Capacity()) {
24998 UNREACHABLE();
24999 }
25000 Grow(new_capacity, space);
25001 }
25002 ASSERT(Length() < Capacity());
25003 intptr_t index = Length();
25004 SetLength(index + 1);
25005 SetAt(index, value);
25006}
25007
25008void GrowableObjectArray::Grow(intptr_t new_capacity, Heap::Space space) const {
25009 ASSERT(new_capacity > Capacity());
25010 const Array& contents = Array::Handle(data());
25011 const Array& new_contents =
25012 Array::Handle(Array::Grow(contents, new_capacity, space));
25013 untag()->set_data(new_contents.ptr());
25014}
25015
25017 ASSERT(!IsNull());
25018 ASSERT(Length() > 0);
25019 intptr_t index = Length() - 1;
25020 const Array& contents = Array::Handle(data());
25021 const PassiveObject& obj = PassiveObject::Handle(contents.At(index));
25022 contents.SetAt(index, Object::null_object());
25023 SetLength(index);
25024 return obj.ptr();
25025}
25026
25027GrowableObjectArrayPtr GrowableObjectArray::New(intptr_t capacity,
25028 Heap::Space space) {
25029 ArrayPtr raw_data = (capacity == 0) ? Object::empty_array().ptr()
25030 : Array::New(capacity, space);
25031 const Array& data = Array::Handle(raw_data);
25032 return New(data, space);
25033}
25034
25035GrowableObjectArrayPtr GrowableObjectArray::New(const Array& array,
25036 Heap::Space space) {
25037 ASSERT(
25038 IsolateGroup::Current()->object_store()->growable_object_array_class() !=
25039 Class::null());
25040 const auto& result =
25041 GrowableObjectArray::Handle(Object::Allocate<GrowableObjectArray>(space));
25042 result.SetLength(0);
25043 result.SetData(array);
25044 return result.ptr();
25045}
25046
25047const char* GrowableObjectArray::ToCString() const {
25048 if (IsNull()) {
25049 return "_GrowableList: null";
25050 }
25051 return OS::SCreate(Thread::Current()->zone(),
25052 "Instance(length:%" Pd ") of '_GrowableList'", Length());
25053}
25054
25055// Equivalent to Dart's operator "==" and hashCode.
25057 public:
25058 static const char* Name() { return "DefaultHashTraits"; }
25059 static bool ReportStats() { return false; }
25060
25061 static bool IsMatch(const Object& a, const Object& b) {
25062 if (a.IsNull() || b.IsNull()) {
25063 return (a.IsNull() && b.IsNull());
25064 } else {
25065 return Instance::Cast(a).OperatorEquals(Instance::Cast(b));
25066 }
25067 }
25068 static uword Hash(const Object& obj) {
25069 if (obj.IsNull()) {
25070 return 0;
25071 }
25072 // TODO(koda): Ensure VM classes only produce Smi hash codes, and remove
25073 // non-Smi cases once Dart-side implementation is complete.
25074 Thread* thread = Thread::Current();
25076 Instance& hash_code = thread->InstanceHandle();
25077 hash_code ^= Instance::Cast(obj).HashCode();
25078 if (hash_code.IsSmi()) {
25079 // May waste some bits on 64-bit, to ensure consistency with non-Smi case.
25080 return static_cast<uword>(Smi::Cast(hash_code).AsTruncatedUint32Value());
25081 } else if (hash_code.IsInteger()) {
25082 return static_cast<uword>(
25083 Integer::Cast(hash_code).AsTruncatedUint32Value());
25084 } else {
25085 return 0;
25086 }
25087 }
25088};
25089
25090MapPtr Map::NewDefault(intptr_t class_id, Heap::Space space) {
25091 const Array& data = Array::Handle(Array::New(kInitialIndexSize, space));
25092 const TypedData& index = TypedData::Handle(
25093 TypedData::New(kTypedDataUint32ArrayCid, kInitialIndexSize, space));
25094 // On 32-bit, the top bits are wasted to avoid Mint allocation.
25095 const intptr_t kAvailableBits = (kSmiBits >= 32) ? 32 : kSmiBits;
25096 const intptr_t kInitialHashMask =
25097 (1 << (kAvailableBits - kInitialIndexBits)) - 1;
25098 return Map::New(class_id, data, index, kInitialHashMask, 0, 0, space);
25099}
25100
25101MapPtr Map::New(intptr_t class_id,
25102 const Array& data,
25103 const TypedData& index,
25104 intptr_t hash_mask,
25105 intptr_t used_data,
25106 intptr_t deleted_keys,
25107 Heap::Space space) {
25108 ASSERT(class_id == kMapCid || class_id == kConstMapCid);
25109 ASSERT(IsolateGroup::Current()->object_store()->map_impl_class() !=
25110 Class::null());
25111 Map& result = Map::Handle(Map::NewUninitialized(class_id, space));
25112 result.set_data(data);
25113 result.set_index(index);
25114 result.set_hash_mask(hash_mask);
25115 result.set_used_data(used_data);
25116 result.set_deleted_keys(deleted_keys);
25117 return result.ptr();
25118}
25119
25120MapPtr Map::NewUninitialized(intptr_t class_id, Heap::Space space) {
25121 ASSERT(IsolateGroup::Current()->object_store()->map_impl_class() !=
25122 Class::null());
25123 return Object::AllocateVariant<Map>(class_id, space);
25124}
25125
25126const char* Map::ToCString() const {
25127 Zone* zone = Thread::Current()->zone();
25128 return zone->PrintToString(
25129 "%s len:%" Pd, GetClassId() == kConstMapCid ? "_ConstMap" : "_Map",
25130 Length());
25131}
25132
25135 ASSERT_EQUAL(Smi::Value(deleted_keys()), 0);
25136 Thread* const thread = Thread::Current();
25137 Zone* const zone = thread->zone();
25138
25139 const auto& data_array = Array::Handle(zone, data());
25140 const intptr_t data_length = Utils::RoundUpToPowerOfTwo(data_array.Length());
25141 const intptr_t index_size_mult = IsMap() ? 1 : 2;
25142 const intptr_t index_size = Utils::Maximum(LinkedHashBase::kInitialIndexSize,
25143 data_length * index_size_mult);
25144 ASSERT(Utils::IsPowerOfTwo(index_size));
25145
25146 const intptr_t hash_mask = IndexSizeToHashMask(index_size);
25147 set_hash_mask(hash_mask);
25148}
25149
25152
25153 if (this->ptr() == other.ptr()) {
25154 // Both handles point to the same raw instance.
25155 return true;
25156 }
25157 if (other.IsNull()) {
25158 return false;
25159 }
25160 if (GetClassId() != other.GetClassId()) {
25161 return false;
25162 }
25163
25164 Zone* zone = Thread::Current()->zone();
25165
25166 const LinkedHashBase& other_map = LinkedHashBase::Cast(other);
25167
25168 if (!Smi::Handle(zone, used_data())
25169 .Equals(Smi::Handle(zone, other_map.used_data()))) {
25170 return false;
25171 }
25172
25173 // Immutable maps and sets do not have deleted keys.
25174 ASSERT_EQUAL(RawSmiValue(deleted_keys()), 0);
25175
25176 if (!Array::Handle(zone, data())
25177 .CanonicalizeEquals(Array::Handle(zone, other_map.data()))) {
25178 return false;
25179 }
25180
25181 if (GetTypeArguments() == other.GetTypeArguments()) {
25182 return true;
25183 }
25184 const TypeArguments& type_args =
25186 const TypeArguments& other_type_args =
25188 return type_args.Equals(other_type_args);
25189}
25190
25193
25194 Thread* thread = Thread::Current();
25195 uint32_t hash = thread->heap()->GetCanonicalHash(ptr());
25196 if (hash != 0) {
25197 return hash;
25198 }
25199
25200 // Immutable maps and sets do not have deleted keys.
25201 ASSERT_EQUAL(RawSmiValue(deleted_keys()), 0);
25202
25203 Zone* zone = thread->zone();
25204 auto& member = Instance::Handle(zone, GetTypeArguments());
25205 hash = member.CanonicalizeHash();
25206 member = data();
25207 hash = CombineHashes(hash, member.CanonicalizeHash());
25208 member = used_data();
25209 hash = CombineHashes(hash, member.CanonicalizeHash());
25211 thread->heap()->SetCanonicalHash(ptr(), hash);
25212 return hash;
25213}
25214
25217
25218 Zone* zone = thread->zone();
25219
25221 if (!type_args.IsNull()) {
25222 type_args = type_args.Canonicalize(thread);
25223 SetTypeArguments(type_args);
25224 }
25225
25226 auto& data_array = Array::Handle(zone, data());
25227 data_array.MakeImmutable();
25228 data_array ^= data_array.CanonicalizeLocked(thread);
25229 set_data(data_array);
25230
25231 // Ignoring index. It will be initially null, created on first use, and
25232 // possibly non-null here if we are rehashing.
25233}
25234
25236 ASSERT(IsolateGroup::Current()->object_store()->const_map_impl_class() !=
25237 Class::null());
25238 return static_cast<ConstMapPtr>(Map::NewDefault(kClassId, space));
25239}
25240
25242 ASSERT(IsolateGroup::Current()->object_store()->const_map_impl_class() !=
25243 Class::null());
25244 return static_cast<ConstMapPtr>(Map::NewUninitialized(kClassId, space));
25245}
25246
25247SetPtr Set::New(intptr_t class_id,
25248 const Array& data,
25249 const TypedData& index,
25250 intptr_t hash_mask,
25251 intptr_t used_data,
25252 intptr_t deleted_keys,
25253 Heap::Space space) {
25254 ASSERT(class_id == kSetCid || class_id == kConstSetCid);
25255 ASSERT(IsolateGroup::Current()->object_store()->set_impl_class() !=
25256 Class::null());
25257 Set& result = Set::Handle(Set::NewUninitialized(class_id, space));
25258 result.set_data(data);
25259 result.set_index(index);
25260 result.set_hash_mask(hash_mask);
25261 result.set_used_data(used_data);
25262 result.set_deleted_keys(deleted_keys);
25263 return result.ptr();
25264}
25265
25266SetPtr Set::NewDefault(intptr_t class_id, Heap::Space space) {
25267 const Array& data = Array::Handle(Array::New(kInitialIndexSize, space));
25268 const TypedData& index = TypedData::Handle(
25269 TypedData::New(kTypedDataUint32ArrayCid, kInitialIndexSize, space));
25270 // On 32-bit, the top bits are wasted to avoid Mint allocation.
25271 const intptr_t kAvailableBits = (kSmiBits >= 32) ? 32 : kSmiBits;
25272 const intptr_t kInitialHashMask =
25273 (1 << (kAvailableBits - kInitialIndexBits)) - 1;
25274 return Set::New(class_id, data, index, kInitialHashMask, 0, 0, space);
25275}
25276
25277SetPtr Set::NewUninitialized(intptr_t class_id, Heap::Space space) {
25278 ASSERT(IsolateGroup::Current()->object_store()->set_impl_class() !=
25279 Class::null());
25280 return Object::AllocateVariant<Set>(class_id, space);
25281}
25282
25284 ASSERT(IsolateGroup::Current()->object_store()->const_set_impl_class() !=
25285 Class::null());
25286 return static_cast<ConstSetPtr>(Set::NewDefault(kClassId, space));
25287}
25288
25290 ASSERT(IsolateGroup::Current()->object_store()->const_set_impl_class() !=
25291 Class::null());
25292 return static_cast<ConstSetPtr>(Set::NewUninitialized(kClassId, space));
25293}
25294
25295const char* Set::ToCString() const {
25296 Zone* zone = Thread::Current()->zone();
25297 return zone->PrintToString(
25298 "%s len:%" Pd, GetClassId() == kConstSetCid ? "_ConstSet" : "_Set",
25299 Length());
25300}
25301
25302const char* FutureOr::ToCString() const {
25303 // FutureOr is an abstract class.
25304 UNREACHABLE();
25305}
25306
25307Float32x4Ptr Float32x4::New(float v0,
25308 float v1,
25309 float v2,
25310 float v3,
25311 Heap::Space space) {
25312 ASSERT(IsolateGroup::Current()->object_store()->float32x4_class() !=
25313 Class::null());
25314 const auto& result = Float32x4::Handle(Object::Allocate<Float32x4>(space));
25315 result.set_x(v0);
25316 result.set_y(v1);
25317 result.set_z(v2);
25318 result.set_w(v3);
25319 return result.ptr();
25320}
25321
25323 ASSERT(IsolateGroup::Current()->object_store()->float32x4_class() !=
25324 Class::null());
25325 const auto& result = Float32x4::Handle(Object::Allocate<Float32x4>(space));
25326 result.set_value(value);
25327 return result.ptr();
25328}
25329
25331 return LoadUnaligned(
25332 reinterpret_cast<const simd128_value_t*>(&untag()->value_));
25333}
25334
25336 StoreUnaligned(reinterpret_cast<simd128_value_t*>(&ptr()->untag()->value_),
25337 value);
25338}
25339
25340void Float32x4::set_x(float value) const {
25341 StoreNonPointer(&untag()->value_[0], value);
25342}
25343
25344void Float32x4::set_y(float value) const {
25345 StoreNonPointer(&untag()->value_[1], value);
25346}
25347
25348void Float32x4::set_z(float value) const {
25349 StoreNonPointer(&untag()->value_[2], value);
25350}
25351
25352void Float32x4::set_w(float value) const {
25353 StoreNonPointer(&untag()->value_[3], value);
25354}
25355
25356float Float32x4::x() const {
25357 return untag()->value_[0];
25358}
25359
25360float Float32x4::y() const {
25361 return untag()->value_[1];
25362}
25363
25364float Float32x4::z() const {
25365 return untag()->value_[2];
25366}
25367
25368float Float32x4::w() const {
25369 return untag()->value_[3];
25370}
25371
25372bool Float32x4::CanonicalizeEquals(const Instance& other) const {
25373 return memcmp(&untag()->value_, Float32x4::Cast(other).untag()->value_,
25374 sizeof(simd128_value_t)) == 0;
25375}
25376
25378 return HashBytes(reinterpret_cast<const uint8_t*>(&untag()->value_),
25379 sizeof(simd128_value_t));
25380}
25381
25382const char* Float32x4::ToCString() const {
25383 float _x = x();
25384 float _y = y();
25385 float _z = z();
25386 float _w = w();
25387 return OS::SCreate(Thread::Current()->zone(), "[%f, %f, %f, %f]", _x, _y, _z,
25388 _w);
25389}
25390
25391Int32x4Ptr Int32x4::New(int32_t v0,
25392 int32_t v1,
25393 int32_t v2,
25394 int32_t v3,
25395 Heap::Space space) {
25396 ASSERT(IsolateGroup::Current()->object_store()->int32x4_class() !=
25397 Class::null());
25398 const auto& result = Int32x4::Handle(Object::Allocate<Int32x4>(space));
25399 result.set_x(v0);
25400 result.set_y(v1);
25401 result.set_z(v2);
25402 result.set_w(v3);
25403 return result.ptr();
25404}
25405
25407 ASSERT(IsolateGroup::Current()->object_store()->int32x4_class() !=
25408 Class::null());
25409 const auto& result = Int32x4::Handle(Object::Allocate<Int32x4>(space));
25410 result.set_value(value);
25411 return result.ptr();
25412}
25413
25414void Int32x4::set_x(int32_t value) const {
25415 StoreNonPointer(&untag()->value_[0], value);
25416}
25417
25418void Int32x4::set_y(int32_t value) const {
25419 StoreNonPointer(&untag()->value_[1], value);
25420}
25421
25422void Int32x4::set_z(int32_t value) const {
25423 StoreNonPointer(&untag()->value_[2], value);
25424}
25425
25426void Int32x4::set_w(int32_t value) const {
25427 StoreNonPointer(&untag()->value_[3], value);
25428}
25429
25430int32_t Int32x4::x() const {
25431 return untag()->value_[0];
25432}
25433
25434int32_t Int32x4::y() const {
25435 return untag()->value_[1];
25436}
25437
25438int32_t Int32x4::z() const {
25439 return untag()->value_[2];
25440}
25441
25442int32_t Int32x4::w() const {
25443 return untag()->value_[3];
25444}
25445
25447 return LoadUnaligned(
25448 reinterpret_cast<const simd128_value_t*>(&untag()->value_));
25449}
25450
25452 StoreUnaligned(reinterpret_cast<simd128_value_t*>(&ptr()->untag()->value_),
25453 value);
25454}
25455
25456bool Int32x4::CanonicalizeEquals(const Instance& other) const {
25457 return memcmp(&untag()->value_, Int32x4::Cast(other).untag()->value_,
25458 sizeof(simd128_value_t)) == 0;
25459}
25460
25462 return HashBytes(reinterpret_cast<const uint8_t*>(&untag()->value_),
25463 sizeof(simd128_value_t));
25464}
25465
25466const char* Int32x4::ToCString() const {
25467 int32_t _x = x();
25468 int32_t _y = y();
25469 int32_t _z = z();
25470 int32_t _w = w();
25471 return OS::SCreate(Thread::Current()->zone(), "[%08x, %08x, %08x, %08x]", _x,
25472 _y, _z, _w);
25473}
25474
25475Float64x2Ptr Float64x2::New(double value0, double value1, Heap::Space space) {
25476 ASSERT(IsolateGroup::Current()->object_store()->float64x2_class() !=
25477 Class::null());
25478 const auto& result = Float64x2::Handle(Object::Allocate<Float64x2>(space));
25479 result.set_x(value0);
25480 result.set_y(value1);
25481 return result.ptr();
25482}
25483
25485 ASSERT(IsolateGroup::Current()->object_store()->float64x2_class() !=
25486 Class::null());
25487 const auto& result = Float64x2::Handle(Object::Allocate<Float64x2>(space));
25488 result.set_value(value);
25489 return result.ptr();
25490}
25491
25492double Float64x2::x() const {
25493 return untag()->value_[0];
25494}
25495
25496double Float64x2::y() const {
25497 return untag()->value_[1];
25498}
25499
25500void Float64x2::set_x(double x) const {
25501 StoreNonPointer(&untag()->value_[0], x);
25502}
25503
25504void Float64x2::set_y(double y) const {
25505 StoreNonPointer(&untag()->value_[1], y);
25506}
25507
25509 return simd128_value_t().readFrom(&untag()->value_[0]);
25510}
25511
25513 StoreSimd128(&untag()->value_[0], value);
25514}
25515
25516bool Float64x2::CanonicalizeEquals(const Instance& other) const {
25517 return memcmp(&untag()->value_, Float64x2::Cast(other).untag()->value_,
25518 sizeof(simd128_value_t)) == 0;
25519}
25520
25522 return HashBytes(reinterpret_cast<const uint8_t*>(&untag()->value_),
25523 sizeof(simd128_value_t));
25524}
25525
25526const char* Float64x2::ToCString() const {
25527 double _x = x();
25528 double _y = y();
25529 return OS::SCreate(Thread::Current()->zone(), "[%f, %f]", _x, _y);
25530}
25531
25532const intptr_t
25533 TypedDataBase::element_size_table[TypedDataBase::kNumElementSizes] = {
25534 1, // kTypedDataInt8ArrayCid.
25535 1, // kTypedDataUint8ArrayCid.
25536 1, // kTypedDataUint8ClampedArrayCid.
25537 2, // kTypedDataInt16ArrayCid.
25538 2, // kTypedDataUint16ArrayCid.
25539 4, // kTypedDataInt32ArrayCid.
25540 4, // kTypedDataUint32ArrayCid.
25541 8, // kTypedDataInt64ArrayCid.
25542 8, // kTypedDataUint64ArrayCid.
25543 4, // kTypedDataFloat32ArrayCid.
25544 8, // kTypedDataFloat64ArrayCid.
25545 16, // kTypedDataFloat32x4ArrayCid.
25546 16, // kTypedDataInt32x4ArrayCid.
25547 16, // kTypedDataFloat64x2ArrayCid,
25548 };
25549
25550bool TypedData::CanonicalizeEquals(const Instance& other) const {
25551 if (this->ptr() == other.ptr()) {
25552 // Both handles point to the same raw instance.
25553 return true;
25554 }
25555
25556 if (!other.IsTypedData() || other.IsNull()) {
25557 return false;
25558 }
25559
25560 const TypedData& other_typed_data = TypedData::Cast(other);
25561
25562 if (this->ElementType() != other_typed_data.ElementType()) {
25563 return false;
25564 }
25565
25566 const intptr_t len = this->LengthInBytes();
25567 if (len != other_typed_data.LengthInBytes()) {
25568 return false;
25569 }
25570 NoSafepointScope no_safepoint;
25571 return (len == 0) ||
25572 (memcmp(DataAddr(0), other_typed_data.DataAddr(0), len) == 0);
25573}
25574
25576 const intptr_t len = this->LengthInBytes();
25577 if (len == 0) {
25578 return 1;
25579 }
25580 uint32_t hash = len;
25581 for (intptr_t i = 0; i < len; i++) {
25582 hash = CombineHashes(len, GetUint8(i));
25583 }
25584 return FinalizeHash(hash, kHashBits);
25585}
25586
25587TypedDataPtr TypedData::New(intptr_t class_id,
25588 intptr_t len,
25589 Heap::Space space) {
25590 if (len < 0 || len > TypedData::MaxElements(class_id)) {
25591 FATAL("Fatal error in TypedData::New: invalid len %" Pd "\n", len);
25592 }
25593 auto raw = Object::AllocateVariant<TypedData>(
25594 class_id, space, len * ElementSizeInBytes(class_id));
25595 NoSafepointScope no_safepoint;
25596 raw->untag()->set_length(Smi::New(len));
25597 raw->untag()->RecomputeDataField();
25598 return raw;
25599}
25600
25601TypedDataPtr TypedData::Grow(const TypedData& current,
25602 intptr_t len,
25603 Heap::Space space) {
25604 ASSERT(len > current.Length());
25605 const auto& new_td =
25606 TypedData::Handle(TypedData::New(current.GetClassId(), len, space));
25607 {
25608 NoSafepointScope no_safepoint_scope;
25609 memcpy(new_td.DataAddr(0), current.DataAddr(0), current.LengthInBytes());
25610 }
25611 return new_td.ptr();
25612}
25613
25614const char* TypedData::ToCString() const {
25615 const Class& cls = Class::Handle(clazz());
25616 return cls.ScrubbedNameCString();
25617}
25618
25620 void* peer,
25622 intptr_t external_size) const {
25623 return dart::AddFinalizer(*this, peer, callback, external_size);
25624}
25625
25626ExternalTypedDataPtr ExternalTypedData::New(
25627 intptr_t class_id,
25628 uint8_t* data,
25629 intptr_t len,
25630 Heap::Space space,
25631 bool perform_eager_msan_initialization_check) {
25632 if (len < 0 || len > ExternalTypedData::MaxElements(class_id)) {
25633 FATAL("Fatal error in ExternalTypedData::New: invalid len %" Pd "\n", len);
25634 }
25635
25636 if (perform_eager_msan_initialization_check) {
25637 // Once the TypedData is created, Dart might read this memory. Check for
25638 // initialization at construction to make it easier to track the source.
25640 }
25641
25642 const auto& result = ExternalTypedData::Handle(
25643 Object::AllocateVariant<ExternalTypedData>(class_id, space));
25644 result.SetLength(len);
25645 result.SetData(data);
25646 return result.ptr();
25647}
25648
25649ExternalTypedDataPtr ExternalTypedData::NewFinalizeWithFree(uint8_t* data,
25650 intptr_t len) {
25652 kExternalTypedDataUint8ArrayCid, data, len, Heap::kOld));
25653 result.AddFinalizer(
25654 data, [](void* isolate_callback_data, void* data) { free(data); }, len);
25655 return result.ptr();
25656}
25657
25658TypedDataViewPtr TypedDataView::New(intptr_t class_id, Heap::Space space) {
25659 return Object::AllocateVariant<TypedDataView>(class_id, space);
25660}
25661
25662TypedDataViewPtr TypedDataView::New(intptr_t class_id,
25663 const TypedDataBase& typed_data,
25664 intptr_t offset_in_bytes,
25665 intptr_t length,
25666 Heap::Space space) {
25667 auto& result = TypedDataView::Handle(TypedDataView::New(class_id, space));
25668 result.InitializeWith(typed_data, offset_in_bytes, length);
25669 return result.ptr();
25670}
25671
25673 if (IsExternalTypedData()) return true;
25674 if (IsTypedDataView()) {
25675 const auto& backing =
25676 TypedDataBase::Handle(TypedDataView::Cast(*this).typed_data());
25677 return backing.IsExternalTypedData();
25678 }
25679 return false;
25680}
25681
25682TypedDataViewPtr TypedDataBase::ViewFromTo(intptr_t start,
25683 intptr_t end,
25684 Heap::Space space) const {
25685 const intptr_t len = end - start;
25686 ASSERT(0 <= len);
25687 ASSERT(start < Length());
25688 ASSERT((start + len) <= Length());
25689
25690 const intptr_t cid = GetClassId();
25691
25692 if (IsTypedDataView()) {
25693 const auto& view = TypedDataView::Cast(*this);
25694 const auto& td = TypedDataBase::Handle(view.typed_data());
25695 const intptr_t view_offset = Smi::Value(view.offset_in_bytes());
25697 return TypedDataView::New(cid, ExternalTypedData::Cast(td),
25698 view_offset + start, len, Heap::kOld);
25699 } else if (IsExternalTypedData()) {
25702 return TypedDataView::New(cid - 1, *this, start, len, Heap::kOld);
25703 }
25704 RELEASE_ASSERT(IsTypedData());
25707 return TypedDataView::New(cid + 1, *this, start, len, Heap::kOld);
25708}
25709
25710const char* TypedDataBase::ToCString() const {
25711 // There are no instances of UntaggedTypedDataBase.
25712 UNREACHABLE();
25713 return nullptr;
25714}
25715
25716const char* TypedDataView::ToCString() const {
25717 const Class& cls = Class::Handle(clazz());
25718 return cls.ScrubbedNameCString();
25719}
25720
25721const char* ExternalTypedData::ToCString() const {
25722 const Class& cls = Class::Handle(clazz());
25723 return cls.ScrubbedNameCString();
25724}
25725
25726PointerPtr Pointer::New(uword native_address, Heap::Space space) {
25727 Thread* thread = Thread::Current();
25728 Zone* zone = thread->zone();
25729
25730 const auto& type_args = TypeArguments::Handle(
25731 zone, IsolateGroup::Current()->object_store()->type_argument_never());
25732
25733 const Class& cls =
25734 Class::Handle(IsolateGroup::Current()->class_table()->At(kPointerCid));
25736
25737 const auto& result = Pointer::Handle(zone, Object::Allocate<Pointer>(space));
25738 result.SetTypeArguments(type_args);
25739 result.SetNativeAddress(native_address);
25740
25741 return result.ptr();
25742}
25743
25744const char* Pointer::ToCString() const {
25745 return OS::SCreate(Thread::Current()->zone(), "Pointer: address=0x%" Px,
25746 NativeAddress());
25747}
25748
25749DynamicLibraryPtr DynamicLibrary::New(void* handle,
25750 bool canBeClosed,
25751 Heap::Space space) {
25752 const auto& result =
25753 DynamicLibrary::Handle(Object::Allocate<DynamicLibrary>(space));
25754 ASSERT_EQUAL(result.IsClosed(), false);
25755 result.SetHandle(handle);
25756 result.SetCanBeClosed(canBeClosed);
25757 return result.ptr();
25758}
25759
25761 return IsFfiPointerClassId(obj.ptr()->GetClassId());
25762}
25763
25765 return Pointer::IsPointer(*this);
25766}
25767
25768const char* DynamicLibrary::ToCString() const {
25769 return OS::SCreate(Thread::Current()->zone(), "DynamicLibrary: handle=0x%" Px,
25770 reinterpret_cast<uintptr_t>(GetHandle()));
25771}
25772
25773CapabilityPtr Capability::New(uint64_t id, Heap::Space space) {
25774 const auto& result = Capability::Handle(Object::Allocate<Capability>(space));
25775 result.StoreNonPointer(&result.untag()->id_, id);
25776 return result.ptr();
25777}
25778
25779const char* Capability::ToCString() const {
25780 return "Capability";
25781}
25782
25783ReceivePortPtr ReceivePort::New(Dart_Port id,
25784 const String& debug_name,
25785 Heap::Space space) {
25786 ASSERT(id != ILLEGAL_PORT);
25787 Thread* thread = Thread::Current();
25788 Zone* zone = thread->zone();
25789 const SendPort& send_port =
25790 SendPort::Handle(zone, SendPort::New(id, thread->isolate()->origin_id()));
25791#if !defined(PRODUCT)
25792 const StackTrace& allocation_location_ =
25794#endif // !defined(PRODUCT)
25795
25796 const auto& result =
25797 ReceivePort::Handle(zone, Object::Allocate<ReceivePort>(space));
25798 result.untag()->set_send_port(send_port.ptr());
25799 result.untag()->set_bitfield(
25801#if !defined(PRODUCT)
25802 result.untag()->set_debug_name(debug_name.ptr());
25803 result.untag()->set_allocation_location(allocation_location_.ptr());
25804#endif // !defined(PRODUCT)
25805 return result.ptr();
25806}
25807
25808const char* ReceivePort::ToCString() const {
25809 return "ReceivePort";
25810}
25811
25812SendPortPtr SendPort::New(Dart_Port id, Heap::Space space) {
25813 return New(id, ILLEGAL_PORT, space);
25814}
25815
25817 Dart_Port origin_id,
25818 Heap::Space space) {
25819 ASSERT(id != ILLEGAL_PORT);
25820 const auto& result = SendPort::Handle(Object::Allocate<SendPort>(space));
25821 result.StoreNonPointer(&result.untag()->id_, id);
25822 result.StoreNonPointer(&result.untag()->origin_id_, origin_id);
25823 return result.ptr();
25824}
25825
25826const char* SendPort::ToCString() const {
25827 return "SendPort";
25828}
25829
25830static void TransferableTypedDataFinalizer(void* isolate_callback_data,
25831 void* peer) {
25832 delete (reinterpret_cast<TransferableTypedDataPeer*>(peer));
25833}
25834
25835TransferableTypedDataPtr TransferableTypedData::New(uint8_t* data,
25836 intptr_t length) {
25837 auto* const peer = new TransferableTypedDataPeer(data, length);
25838
25839 Thread* thread = Thread::Current();
25840 const auto& result =
25841 TransferableTypedData::Handle(Object::Allocate<TransferableTypedData>(
25842 thread->heap()->SpaceForExternal(length)));
25843 thread->heap()->SetPeer(result.ptr(), peer);
25844
25845 // Set up finalizer so it frees allocated memory if handle is
25846 // garbage-collected.
25847 FinalizablePersistentHandle* finalizable_ref =
25850 /*auto_delete=*/true);
25851 ASSERT(finalizable_ref != nullptr);
25852 peer->set_handle(finalizable_ref);
25853
25854 return result.ptr();
25855}
25856
25857const char* TransferableTypedData::ToCString() const {
25858 return "TransferableTypedData";
25859}
25860
25861bool Closure::CanonicalizeEquals(const Instance& other) const {
25862 if (!other.IsClosure()) return false;
25863
25864 const Closure& other_closure = Closure::Cast(other);
25865 return (instantiator_type_arguments() ==
25866 other_closure.instantiator_type_arguments()) &&
25867 (function_type_arguments() ==
25868 other_closure.function_type_arguments()) &&
25869 (delayed_type_arguments() == other_closure.delayed_type_arguments()) &&
25870 (function() == other_closure.function()) &&
25871 (RawContext() == other_closure.RawContext());
25872}
25873
25875 TypeArguments& type_args = TypeArguments::Handle();
25876 type_args = instantiator_type_arguments();
25877 if (!type_args.IsNull()) {
25878 type_args = type_args.Canonicalize(thread);
25879 set_instantiator_type_arguments(type_args);
25880 }
25881 type_args = function_type_arguments();
25882 if (!type_args.IsNull()) {
25883 type_args = type_args.Canonicalize(thread);
25884 set_function_type_arguments(type_args);
25885 }
25886 type_args = delayed_type_arguments();
25887 if (!type_args.IsNull()) {
25888 type_args = type_args.Canonicalize(thread);
25889 set_delayed_type_arguments(type_args);
25890 }
25891 // Ignore function, context, hash.
25892}
25893
25894const char* Closure::ToCString() const {
25895 auto const thread = Thread::Current();
25896 auto const zone = thread->zone();
25897 ZoneTextBuffer buffer(zone);
25898 buffer.AddString("Closure: ");
25899 const Function& fun = Function::Handle(zone, function());
25900 const FunctionType& sig =
25901 FunctionType::Handle(zone, GetInstantiatedSignature(zone));
25903 if (fun.IsImplicitClosureFunction()) {
25904 buffer.Printf(" from %s", fun.ToCString());
25905 }
25906 return buffer.buffer();
25907}
25908
25910 Thread* thread = Thread::Current();
25911 DEBUG_ASSERT(thread->TopErrorHandlerIsExitFrame());
25912 Zone* zone = thread->zone();
25913 const Function& func = Function::Handle(zone, function());
25914 uint32_t result = 0;
25915 if (func.IsImplicitClosureFunction() || func.IsGeneric()) {
25916 // Combine function's hash code, delayed type arguments hash code
25917 // (if generic), and identityHashCode of cached receiver (if implicit
25918 // instance closure).
25919 result = static_cast<uint32_t>(func.Hash());
25920 if (func.IsGeneric()) {
25921 const TypeArguments& delayed_type_args =
25922 TypeArguments::Handle(zone, delayed_type_arguments());
25923 result = CombineHashes(result, delayed_type_args.Hash());
25924 }
25926 const Instance& receiver =
25927 Instance::Handle(zone, GetImplicitClosureReceiver());
25928 const Integer& receiverHash =
25929 Integer::Handle(zone, receiver.IdentityHashCode(thread));
25931 }
25932 } else {
25933 // Non-implicit closures of non-generic functions are unique,
25934 // so identityHashCode of closure object is good enough.
25935 const Integer& identityHash =
25936 Integer::Handle(zone, this->IdentityHashCode(thread));
25937 result = identityHash.AsTruncatedUint32Value();
25938 }
25940}
25941
25942ClosurePtr Closure::New(const TypeArguments& instantiator_type_arguments,
25943 const TypeArguments& function_type_arguments,
25944 const Function& function,
25945 const Object& context,
25946 Heap::Space space) {
25947 // We store null delayed type arguments, not empty ones, in closures with
25948 // non-generic functions a) to make method extraction slightly faster and
25949 // b) to make the Closure::IsGeneric check fast.
25950 // Keep in sync with StubCodeCompiler::GenerateAllocateClosureStub.
25951 return Closure::New(instantiator_type_arguments, function_type_arguments,
25952 function.IsGeneric() ? Object::empty_type_arguments()
25953 : Object::null_type_arguments(),
25954 function, context, space);
25955}
25956
25957ClosurePtr Closure::New(const TypeArguments& instantiator_type_arguments,
25958 const TypeArguments& function_type_arguments,
25959 const TypeArguments& delayed_type_arguments,
25960 const Function& function,
25961 const Object& context,
25962 Heap::Space space) {
25963 ASSERT(instantiator_type_arguments.IsCanonical());
25964 ASSERT(function_type_arguments.IsCanonical());
25965 ASSERT(delayed_type_arguments.IsCanonical());
25967 ASSERT(
25968 (function.IsImplicitInstanceClosureFunction() && context.IsInstance()) ||
25969 (function.IsNonImplicitClosureFunction() && context.IsContext()) ||
25970 context.IsNull());
25971 const auto& result = Closure::Handle(Object::Allocate<Closure>(space));
25972 result.untag()->set_instantiator_type_arguments(
25973 instantiator_type_arguments.ptr());
25974 result.untag()->set_function_type_arguments(function_type_arguments.ptr());
25975 result.untag()->set_delayed_type_arguments(delayed_type_arguments.ptr());
25976 result.untag()->set_function(function.ptr());
25977 result.untag()->set_context(context.ptr());
25978#if defined(DART_PRECOMPILED_RUNTIME)
25979 result.set_entry_point(function.entry_point());
25980#endif
25981 return result.ptr();
25982}
25983
25984FunctionTypePtr Closure::GetInstantiatedSignature(Zone* zone) const {
25985 const Function& fun = Function::Handle(zone, function());
25986 FunctionType& sig = FunctionType::Handle(zone, fun.signature());
25987 TypeArguments& fn_type_args =
25988 TypeArguments::Handle(zone, function_type_arguments());
25989 const TypeArguments& delayed_type_args =
25990 TypeArguments::Handle(zone, delayed_type_arguments());
25991 const TypeArguments& inst_type_args =
25992 TypeArguments::Handle(zone, instantiator_type_arguments());
25993
25994 // We detect the case of a partial tearoff type application and substitute the
25995 // type arguments for the type parameters of the function.
25996 intptr_t num_free_params;
25997 if (!IsGeneric() && fun.IsGeneric()) {
25998 num_free_params = kCurrentAndEnclosingFree;
25999 fn_type_args = delayed_type_args.Prepend(
26000 zone, fn_type_args, sig.NumParentTypeArguments(),
26002 } else {
26003 num_free_params = kAllFree;
26004 }
26005 if (num_free_params == kCurrentAndEnclosingFree || !sig.IsInstantiated()) {
26006 sig ^= sig.InstantiateFrom(inst_type_args, fn_type_args, num_free_params,
26007 Heap::kOld);
26008 }
26009 return sig.ptr();
26010}
26011
26013 return untag()->skip_sync_start_in_parent_stack;
26014}
26015
26017 StoreNonPointer(&untag()->skip_sync_start_in_parent_stack, value);
26018}
26019
26020intptr_t StackTrace::Length() const {
26021 const Array& code_array = Array::Handle(untag()->code_array());
26022 return code_array.Length();
26023}
26024
26025ObjectPtr StackTrace::CodeAtFrame(intptr_t frame_index) const {
26026 const Array& code_array = Array::Handle(untag()->code_array());
26027 return code_array.At(frame_index);
26028}
26029
26030void StackTrace::SetCodeAtFrame(intptr_t frame_index,
26031 const Object& code) const {
26032 const Array& code_array = Array::Handle(untag()->code_array());
26033 code_array.SetAt(frame_index, code);
26034}
26035
26036uword StackTrace::PcOffsetAtFrame(intptr_t frame_index) const {
26037 const TypedData& pc_offset_array =
26038 TypedData::Handle(untag()->pc_offset_array());
26039 return pc_offset_array.GetUintPtr(frame_index * kWordSize);
26040}
26041
26042void StackTrace::SetPcOffsetAtFrame(intptr_t frame_index,
26043 uword pc_offset) const {
26044 const TypedData& pc_offset_array =
26045 TypedData::Handle(untag()->pc_offset_array());
26046 pc_offset_array.SetUintPtr(frame_index * kWordSize, pc_offset);
26047}
26048
26049void StackTrace::set_async_link(const StackTrace& async_link) const {
26050 untag()->set_async_link(async_link.ptr());
26051}
26052
26053void StackTrace::set_code_array(const Array& code_array) const {
26054 untag()->set_code_array(code_array.ptr());
26055}
26056
26057void StackTrace::set_pc_offset_array(const TypedData& pc_offset_array) const {
26058 untag()->set_pc_offset_array(pc_offset_array.ptr());
26059}
26060
26062 StoreNonPointer(&untag()->expand_inlined_, value);
26063}
26064
26065bool StackTrace::expand_inlined() const {
26066 return untag()->expand_inlined_;
26067}
26068
26069StackTracePtr StackTrace::New(const Array& code_array,
26070 const TypedData& pc_offset_array,
26071 Heap::Space space) {
26072 const auto& result = StackTrace::Handle(Object::Allocate<StackTrace>(space));
26073 result.set_code_array(code_array);
26074 result.set_pc_offset_array(pc_offset_array);
26075 result.set_expand_inlined(true); // default.
26076 ASSERT_EQUAL(result.skip_sync_start_in_parent_stack(), false);
26077 return result.ptr();
26078}
26079
26080StackTracePtr StackTrace::New(const Array& code_array,
26081 const TypedData& pc_offset_array,
26082 const StackTrace& async_link,
26083 bool skip_sync_start_in_parent_stack,
26084 Heap::Space space) {
26085 const auto& result = StackTrace::Handle(Object::Allocate<StackTrace>(space));
26086 result.set_async_link(async_link);
26087 result.set_code_array(code_array);
26088 result.set_pc_offset_array(pc_offset_array);
26089 result.set_expand_inlined(true); // default.
26090 result.set_skip_sync_start_in_parent_stack(skip_sync_start_in_parent_stack);
26091 return result.ptr();
26092}
26093
26094#if defined(DART_PRECOMPILED_RUNTIME)
26095static bool TryPrintNonSymbolicStackFrameBodyRelative(
26097 uword call_addr,
26098 uword instructions,
26099 bool vm,
26100 LoadingUnit* unit = nullptr) {
26101 const Image image(reinterpret_cast<const uint8_t*>(instructions));
26102 if (!image.contains(call_addr)) return false;
26103 if (unit != nullptr) {
26104 ASSERT(!unit->IsNull());
26105 // Add the unit ID to the stack frame, so the correct loading unit
26106 // information from the header can be checked.
26107 buffer->Printf(" unit %" Pd "", unit->id());
26108 }
26109 auto const offset = call_addr - instructions;
26110 // Only print the relocated address of the call when we know the saved
26111 // debugging information (if any) will have the same relocated address.
26112 // Also only print 'virt' fields for isolate addresses.
26113 if (!vm && image.compiled_to_elf()) {
26114 const uword relocated_section_start =
26115 image.instructions_relocated_address();
26116 buffer->Printf(" virt %" Pp "", relocated_section_start + offset);
26117 }
26118 const char* symbol = vm ? kVmSnapshotInstructionsAsmSymbol
26120 buffer->Printf(" %s+0x%" Px "\n", symbol, offset);
26121 return true;
26122}
26123
26124// Prints the best representation(s) for the call address.
26125static void PrintNonSymbolicStackFrameBody(BaseTextBuffer* buffer,
26126 uword call_addr,
26127 uword isolate_instructions,
26128 uword vm_instructions,
26129 const Array& loading_units,
26130 LoadingUnit* unit) {
26131 if (TryPrintNonSymbolicStackFrameBodyRelative(buffer, call_addr,
26132 vm_instructions,
26133 /*vm=*/true)) {
26134 return;
26135 }
26136
26137 if (!loading_units.IsNull()) {
26138 // All non-VM stack frames should include the loading unit id.
26139 const intptr_t unit_count = loading_units.Length();
26140 for (intptr_t i = LoadingUnit::kRootId; i < unit_count; i++) {
26141 *unit ^= loading_units.At(i);
26142 if (!unit->has_instructions_image()) continue;
26143 auto const instructions =
26144 reinterpret_cast<uword>(unit->instructions_image());
26145 if (TryPrintNonSymbolicStackFrameBodyRelative(buffer, call_addr,
26146 instructions,
26147 /*vm=*/false, unit)) {
26148 return;
26149 }
26150 }
26151 } else {
26152 if (TryPrintNonSymbolicStackFrameBodyRelative(buffer, call_addr,
26153 isolate_instructions,
26154 /*vm=*/false)) {
26155 return;
26156 }
26157 }
26158
26159 // The stack trace printer should never end up here, since these are not
26160 // addresses within a loading unit or the VM or app isolate instructions
26161 // sections. Thus, make it easy to notice when looking at the stack trace.
26162 buffer->Printf(" <invalid Dart instruction address>\n");
26163}
26164#endif
26165
26167 intptr_t frame_index) {
26168 buffer->Printf("#%-6" Pd "", frame_index);
26169}
26170
26172 const char* function_name,
26173 const char* url,
26174 intptr_t line = -1,
26175 intptr_t column = -1) {
26176 buffer->Printf(" %s (%s", function_name, url);
26177 if (line >= 0) {
26178 buffer->Printf(":%" Pd "", line);
26179 if (column >= 0) {
26180 buffer->Printf(":%" Pd "", column);
26181 }
26182 }
26183 buffer->Printf(")\n");
26184}
26185
26188 const Function& function,
26189 TokenPosition token_pos_or_line,
26190 intptr_t frame_index,
26191 bool is_line = false) {
26192 ASSERT(!function.IsNull());
26193 const auto& script = Script::Handle(zone, function.script());
26194 const char* function_name = function.QualifiedUserVisibleNameCString();
26195 const char* url = script.IsNull()
26196 ? "Kernel"
26197 : String::Handle(zone, script.url()).ToCString();
26198
26199 // If the URI starts with "data:application/dart;" this is a URI encoded
26200 // script so we shouldn't print the entire URI because it could be very long.
26201 if (strstr(url, "data:application/dart;") == url) {
26202 url = "<data:application/dart>";
26203 }
26204
26205 intptr_t line = -1;
26206 intptr_t column = -1;
26207 if (is_line) {
26208 ASSERT(token_pos_or_line.IsNoSource() || token_pos_or_line.IsReal());
26209 if (token_pos_or_line.IsReal()) {
26210 line = token_pos_or_line.Pos();
26211 }
26212 } else {
26213 ASSERT(!script.IsNull());
26214 script.GetTokenLocation(token_pos_or_line, &line, &column);
26215 }
26218}
26219
26221 if (function.is_visible()) {
26222 return true;
26223 }
26224
26225 if (function.IsImplicitClosureFunction()) {
26226 return function.parent_function() == Function::null() ||
26227 Function::is_visible(function.parent_function());
26228 }
26229
26230 return false;
26231}
26232
26233#if defined(DART_PRECOMPILED_RUNTIME)
26234static void WriteImageBuildId(BaseTextBuffer* buffer,
26235 const char* prefix,
26236 uword image_address) {
26237 const auto& build_id = OS::GetAppBuildId(image_address);
26238 if (build_id.data != nullptr) {
26239 ASSERT(build_id.len > 0);
26240 buffer->AddString(prefix);
26241 buffer->AddString("'");
26242 for (intptr_t i = 0; i < build_id.len; i++) {
26243 buffer->Printf("%2.2x", build_id.data[i]);
26244 }
26245 buffer->AddString("'");
26246 }
26247}
26248
26249void WriteStackTraceHeaderLoadingUnitEntry(BaseTextBuffer* buffer,
26250 intptr_t id,
26251 uword dso_base,
26252 uword instructions) {
26253 buffer->Printf("loading_unit: %" Pd "", id);
26254 WriteImageBuildId(buffer, ", build_id: ", instructions);
26255 buffer->Printf(", dso_base: %" Px ", instructions: %" Px "\n", dso_base,
26256 instructions);
26257}
26258#endif
26259
26260const char* StackTrace::ToCString() const {
26261 auto const T = Thread::Current();
26262 auto const zone = T->zone();
26263 auto& stack_trace = StackTrace::Handle(zone, this->ptr());
26264 auto& owner = Object::Handle(zone);
26265 auto& function = Function::Handle(zone);
26266 auto& code_object = Object::Handle(zone);
26267 auto& code = Code::Handle(zone);
26268
26269#if defined(DART_PRECOMPILED_RUNTIME)
26270 const Array& loading_units =
26271 Array::Handle(T->isolate_group()->object_store()->loading_units());
26272 auto* const unit =
26273 loading_units.IsNull() ? nullptr : &LoadingUnit::Handle(zone);
26274#endif
26275
26276 NoSafepointScope no_allocation;
26277 GrowableArray<const Function*> inlined_functions;
26278 GrowableArray<TokenPosition> inlined_token_positions;
26279
26280#if defined(DART_PRECOMPILED_RUNTIME)
26281 GrowableArray<void*> addresses(10);
26282 const bool have_footnote_callback =
26283 FLAG_dwarf_stack_traces_mode &&
26285#endif
26286
26287 ZoneTextBuffer buffer(zone, 1024);
26288
26289#if defined(DART_PRECOMPILED_RUNTIME)
26290 auto const isolate_instructions = reinterpret_cast<uword>(
26291 T->isolate_group()->source()->snapshot_instructions);
26292#if defined(DEBUG)
26293 if (!loading_units.IsNull()) {
26294 *unit ^= loading_units.At(LoadingUnit::kRootId);
26295 ASSERT(!unit->IsNull());
26296 ASSERT(unit->has_instructions_image());
26297 ASSERT(reinterpret_cast<uword>(unit->instructions_image()) ==
26298 isolate_instructions);
26299 }
26300#endif
26301 auto const vm_instructions = reinterpret_cast<uword>(
26303 if (FLAG_dwarf_stack_traces_mode) {
26304 // This prologue imitates Android's debuggerd to make it possible to paste
26305 // the stack trace into ndk-stack.
26306 buffer.Printf(
26307 "*** *** *** *** *** *** *** *** *** *** *** *** *** *** *** ***\n");
26308 OSThread* thread = OSThread::Current();
26309 buffer.Printf("pid: %" Pd ", tid: %" Pd ", name %s\n", OS::ProcessId(),
26310 OSThread::ThreadIdToIntPtr(thread->id()), thread->name());
26311#if defined(DART_COMPRESSED_POINTERS)
26312 const char kCompressedPointers[] = "yes";
26313#else
26314 const char kCompressedPointers[] = "no";
26315#endif
26316#if defined(USING_SIMULATOR)
26317 const char kUsingSimulator[] = "yes";
26318#else
26319 const char kUsingSimulator[] = "no";
26320#endif
26321 buffer.Printf("os: %s arch: %s comp: %s sim: %s\n",
26322 kHostOperatingSystemName, kTargetArchitectureName,
26323 kCompressedPointers, kUsingSimulator);
26324 WriteImageBuildId(&buffer, "build_id: ", isolate_instructions);
26325 buffer.AddString("\n");
26326 if (!loading_units.IsNull()) {
26327 const intptr_t unit_count = loading_units.Length();
26328 for (intptr_t i = LoadingUnit::kRootId; i < unit_count; i++) {
26329 *unit ^= loading_units.At(i);
26330 if (!unit->has_instructions_image()) continue;
26331 const uword instructions =
26332 reinterpret_cast<uword>(unit->instructions_image());
26333 const uword dso_base = OS::GetAppDSOBase(instructions);
26334 WriteStackTraceHeaderLoadingUnitEntry(&buffer, i, dso_base,
26335 instructions);
26336 }
26337 }
26338 // Print the dso_base of the VM and isolate_instructions. We print both here
26339 // as the VM and isolate may be loaded from different snapshot images.
26340 const uword isolate_dso_base = OS::GetAppDSOBase(isolate_instructions);
26341 buffer.Printf("isolate_dso_base: %" Px "", isolate_dso_base);
26342 const uword vm_dso_base = OS::GetAppDSOBase(vm_instructions);
26343 buffer.Printf(", vm_dso_base: %" Px "\n", vm_dso_base);
26344 buffer.Printf("isolate_instructions: %" Px "", isolate_instructions);
26345 buffer.Printf(", vm_instructions: %" Px "\n", vm_instructions);
26346 }
26347#endif
26348
26349 // Iterate through the stack frames and create C string description
26350 // for each frame.
26351 intptr_t frame_index = 0;
26352 uint32_t frame_skip = 0;
26353 // If we're already in a gap, don't print multiple gap markers.
26354 bool in_gap = false;
26355 do {
26356 for (intptr_t i = frame_skip; i < stack_trace.Length(); i++) {
26357 code_object = stack_trace.CodeAtFrame(i);
26358 if (code_object.IsNull()) {
26359 // Check for a null function, which indicates a gap in a StackOverflow
26360 // or OutOfMemory trace.
26361 if ((i < (stack_trace.Length() - 1)) &&
26362 (stack_trace.CodeAtFrame(i + 1) != Code::null())) {
26363 buffer.AddString("...\n...\n");
26364 // To account for gap frames.
26365 frame_index += stack_trace.PcOffsetAtFrame(i);
26366 }
26367 continue;
26368 }
26369
26370 if (code_object.ptr() == StubCode::AsynchronousGapMarker().ptr()) {
26371 if (!in_gap) {
26372 buffer.AddString("<asynchronous suspension>\n");
26373 }
26374 in_gap = true;
26375 continue;
26376 }
26377
26378 const uword pc_offset = stack_trace.PcOffsetAtFrame(i);
26379 ASSERT(code_object.IsCode());
26380 code ^= code_object.ptr();
26381 ASSERT(code.IsFunctionCode());
26382 owner = code.owner();
26383 if (owner.IsFunction()) {
26384 function ^= owner.ptr();
26385 } else {
26387 }
26388 const uword pc = code.PayloadStart() + pc_offset;
26389
26390 const bool is_future_listener =
26392
26393 // A visible frame ends any gap we might be in.
26394 in_gap = false;
26395
26396#if defined(DART_PRECOMPILED_RUNTIME)
26397 // When printing non-symbolic frames, we normally print call
26398 // addresses, not return addresses, by subtracting one from the PC to
26399 // get an address within the preceding instruction.
26400 //
26401 // The one exception is a normal closure registered as a listener on a
26402 // future. In this case, the returned pc_offset will be pointing to the
26403 // entry pooint of the function, which will be invoked when the future
26404 // completes. To make things more uniform stack unwinding code offets
26405 // pc_offset by 1 for such cases.
26406 const uword call_addr = pc - 1;
26407
26408 if (FLAG_dwarf_stack_traces_mode) {
26409 if (have_footnote_callback) {
26410 addresses.Add(reinterpret_cast<void*>(call_addr));
26411 }
26412
26413 // This output is formatted like Android's debuggerd. Note debuggerd
26414 // prints call addresses instead of return addresses.
26415 buffer.Printf(" #%02" Pd " abs %" Pp "", frame_index, call_addr);
26416 PrintNonSymbolicStackFrameBody(&buffer, call_addr, isolate_instructions,
26417 vm_instructions, loading_units, unit);
26418 frame_index++;
26419 continue;
26420 }
26421
26422 if (function.IsNull()) {
26423 in_gap = false;
26424 // We can't print the symbolic information since the owner was not
26425 // retained, so instead print the static symbol + offset like the
26426 // non-symbolic stack traces.
26427 PrintSymbolicStackFrameIndex(&buffer, frame_index);
26428 PrintNonSymbolicStackFrameBody(&buffer, call_addr, isolate_instructions,
26429 vm_instructions, loading_units, unit);
26430 frame_index++;
26431 continue;
26432 }
26433#endif
26434
26435 if (code.is_optimized() && stack_trace.expand_inlined() &&
26436 (FLAG_precompiled_mode || !is_future_listener)) {
26437 // Note: In AOT mode EmitFunctionEntrySourcePositionDescriptorIfNeeded
26438 // will take care of emitting a descriptor that would allow us to
26439 // symbolize stack frame with 0 offset.
26440 code.GetInlinedFunctionsAtReturnAddress(
26441 is_future_listener ? 0 : pc_offset, &inlined_functions,
26442 &inlined_token_positions);
26443 ASSERT(inlined_functions.length() >= 1);
26444 for (intptr_t j = inlined_functions.length() - 1; j >= 0; j--) {
26445 function = inlined_functions[j]->ptr();
26446 auto const pos = inlined_token_positions[j];
26447 if (is_future_listener && function.IsImplicitClosureFunction()) {
26448 function = function.parent_function();
26449 }
26450 if (FLAG_show_invisible_frames || function.is_visible()) {
26451 PrintSymbolicStackFrame(zone, &buffer, function, pos, frame_index,
26452 /*is_line=*/FLAG_precompiled_mode);
26453 frame_index++;
26454 }
26455 }
26456 continue;
26457 }
26458
26459 if (FLAG_show_invisible_frames || function.is_visible() ||
26460 (is_future_listener && IsVisibleAsFutureListener(function))) {
26461 auto const pos = is_future_listener ? function.token_pos()
26462 : code.GetTokenIndexOfPC(pc);
26463 PrintSymbolicStackFrame(zone, &buffer, function, pos, frame_index);
26464 frame_index++;
26465 }
26466 }
26467
26468 // Follow the link.
26469 frame_skip = stack_trace.skip_sync_start_in_parent_stack()
26471 : 0;
26472 stack_trace = stack_trace.async_link();
26473 } while (!stack_trace.IsNull());
26474
26475#if defined(DART_PRECOMPILED_RUNTIME)
26476 if (have_footnote_callback) {
26478 &addresses[0], addresses.length());
26479 if (footnote != nullptr) {
26480 buffer.AddString(footnote);
26481 free(footnote);
26482 }
26483 }
26484#endif
26485
26486 return buffer.buffer();
26487}
26488
26490 FLAG_dwarf_stack_traces_mode = value;
26491
26492#if defined(PRODUCT)
26493 // We can safely remove function objects in precompiled snapshots if the
26494 // runtime will generate DWARF stack traces and we don't have runtime
26495 // debugging options like the observatory available.
26496 if (value) {
26497 FLAG_retain_function_objects = false;
26498 FLAG_retain_code_objects = false;
26499 }
26500#endif
26501}
26502
26504 dwarf_stack_traces,
26505 "Omit CodeSourceMaps in precompiled snapshots and don't "
26506 "symbolize stack traces in the precompiled runtime.");
26507
26508SuspendStatePtr SuspendState::New(intptr_t frame_size,
26509 const Instance& function_data,
26510 Heap::Space space) {
26511 ASSERT(frame_size >= 0);
26512 const intptr_t num_elements = frame_size + SuspendState::FrameSizeGrowthGap();
26513#if !defined(DART_PRECOMPILED_RUNTIME)
26514 // Include heap object alignment overhead into the frame capacity.
26515 const intptr_t instance_size = SuspendState::InstanceSize(num_elements);
26516 const intptr_t frame_capacity =
26517 instance_size - SuspendState::payload_offset();
26518 ASSERT(SuspendState::InstanceSize(frame_capacity) == instance_size);
26519 ASSERT(frame_size <= frame_capacity);
26520#endif
26521 auto raw = Object::Allocate<SuspendState>(space, num_elements);
26522 NoSafepointScope no_safepoint;
26523 ASSERT_EQUAL(raw->untag()->pc_, 0);
26524#if !defined(DART_PRECOMPILED_RUNTIME)
26525 raw->untag()->frame_capacity_ = frame_capacity;
26526#endif
26527 raw->untag()->frame_size_ = frame_size;
26528 raw->untag()->set_function_data(function_data.ptr());
26529 return raw;
26530}
26531
26532SuspendStatePtr SuspendState::Clone(Thread* thread,
26533 const SuspendState& src,
26534 Heap::Space space) {
26535 ASSERT(src.pc() != 0);
26536 Zone* zone = thread->zone();
26537 const intptr_t frame_size = src.frame_size();
26539 zone,
26540 SuspendState::New(frame_size, Instance::Handle(zone, src.function_data()),
26541 space));
26542 dst.set_then_callback(Closure::Handle(zone, src.then_callback()));
26543 dst.set_error_callback(Closure::Handle(zone, src.error_callback()));
26544 {
26545 NoSafepointScope no_safepoint;
26546 memmove(dst.payload(), src.payload(), frame_size);
26547 // Update value of :suspend_state variable in the copied frame.
26548 const uword fp = reinterpret_cast<uword>(dst.payload() + frame_size);
26549 *reinterpret_cast<ObjectPtr*>(
26551 kSuspendStateVarIndex))) = dst.ptr();
26552 dst.set_pc(src.pc());
26553 // Trigger write barrier if needed.
26554 if (dst.ptr()->IsOldObject()) {
26555 dst.untag()->EnsureInRememberedSet(thread);
26556 }
26557 if (thread->is_marking()) {
26558 thread->DeferredMarkingStackAddObject(dst.ptr());
26559 }
26560 }
26561 return dst.ptr();
26562}
26563
26564#if !defined(DART_PRECOMPILED_RUNTIME)
26565void SuspendState::set_frame_capacity(intptr_t frame_capcity) const {
26566 ASSERT(frame_capcity >= 0);
26567 StoreNonPointer(&untag()->frame_capacity_, frame_capcity);
26568}
26569#endif
26570
26571void SuspendState::set_frame_size(intptr_t frame_size) const {
26572 ASSERT(frame_size >= 0);
26573 StoreNonPointer(&untag()->frame_size_, frame_size);
26574}
26575
26576void SuspendState::set_pc(uword pc) const {
26577 StoreNonPointer(&untag()->pc_, pc);
26578}
26579
26580void SuspendState::set_function_data(const Instance& function_data) const {
26581 untag()->set_function_data(function_data.ptr());
26582}
26583
26584void SuspendState::set_then_callback(const Closure& then_callback) const {
26585 untag()->set_then_callback(then_callback.ptr());
26586}
26587
26588void SuspendState::set_error_callback(const Closure& error_callback) const {
26589 untag()->set_error_callback(error_callback.ptr());
26590}
26591
26592const char* SuspendState::ToCString() const {
26593 return "SuspendState";
26594}
26595
26597 ASSERT(pc() != 0);
26598#if defined(DART_PRECOMPILED_RUNTIME)
26599 NoSafepointScope no_safepoint;
26601 /*is_return_address=*/true);
26602 ASSERT(code != Code::null());
26603 return code;
26604#else
26605 ObjectPtr code = *(reinterpret_cast<ObjectPtr*>(
26606 untag()->payload() + untag()->frame_size_ +
26608 return Code::RawCast(code);
26609#endif // defined(DART_PRECOMPILED_RUNTIME)
26610}
26611
26612void RegExp::set_pattern(const String& pattern) const {
26613 untag()->set_pattern(pattern.ptr());
26614}
26615
26617 bool sticky,
26618 const Function& value) const {
26619 if (sticky) {
26620 switch (cid) {
26621 case kOneByteStringCid:
26622 return untag()->set_one_byte_sticky(value.ptr());
26623 case kTwoByteStringCid:
26624 return untag()->set_two_byte_sticky(value.ptr());
26625 }
26626 } else {
26627 switch (cid) {
26628 case kOneByteStringCid:
26629 return untag()->set_one_byte(value.ptr());
26630 case kTwoByteStringCid:
26631 return untag()->set_two_byte(value.ptr());
26632 }
26633 }
26634}
26635
26636void RegExp::set_bytecode(bool is_one_byte,
26637 bool sticky,
26638 const TypedData& bytecode) const {
26639 if (sticky) {
26640 if (is_one_byte) {
26641 untag()->set_one_byte_sticky<std::memory_order_release>(bytecode.ptr());
26642 } else {
26643 untag()->set_two_byte_sticky<std::memory_order_release>(bytecode.ptr());
26644 }
26645 } else {
26646 if (is_one_byte) {
26647 untag()->set_one_byte<std::memory_order_release>(bytecode.ptr());
26648 } else {
26649 untag()->set_two_byte<std::memory_order_release>(bytecode.ptr());
26650 }
26651 }
26652}
26653
26655 untag()->num_bracket_expressions_ = value;
26656}
26657
26658void RegExp::set_capture_name_map(const Array& array) const {
26659 untag()->set_capture_name_map(array.ptr());
26660}
26661
26662RegExpPtr RegExp::New(Zone* zone, Heap::Space space) {
26663 const auto& result = RegExp::Handle(Object::Allocate<RegExp>(space));
26664 ASSERT_EQUAL(result.type(), kUninitialized);
26665 ASSERT(result.flags() == RegExpFlags());
26666 result.set_num_bracket_expressions(-1);
26667 result.set_num_registers(/*is_one_byte=*/false, -1);
26668 result.set_num_registers(/*is_one_byte=*/true, -1);
26669
26670 if (!FLAG_interpret_irregexp) {
26671 auto thread = Thread::Current();
26672 const Library& lib = Library::Handle(zone, Library::CoreLibrary());
26673 const Class& owner =
26674 Class::Handle(zone, lib.LookupClass(Symbols::RegExp()));
26675
26676 for (intptr_t cid = kOneByteStringCid; cid <= kTwoByteStringCid; cid++) {
26677 CreateSpecializedFunction(thread, zone, result, cid, /*sticky=*/false,
26678 owner);
26679 CreateSpecializedFunction(thread, zone, result, cid, /*sticky=*/true,
26680 owner);
26681 }
26682 }
26683 return result.ptr();
26684}
26685
26686const char* RegExpFlags::ToCString() const {
26687 switch (value_ & ~kGlobal) {
26688 case kIgnoreCase | kMultiLine | kDotAll | kUnicode:
26689 return "imsu";
26690 case kIgnoreCase | kMultiLine | kDotAll:
26691 return "ims";
26692 case kIgnoreCase | kMultiLine | kUnicode:
26693 return "imu";
26694 case kIgnoreCase | kUnicode | kDotAll:
26695 return "ius";
26696 case kMultiLine | kDotAll | kUnicode:
26697 return "msu";
26698 case kIgnoreCase | kMultiLine:
26699 return "im";
26700 case kIgnoreCase | kDotAll:
26701 return "is";
26702 case kIgnoreCase | kUnicode:
26703 return "iu";
26704 case kMultiLine | kDotAll:
26705 return "ms";
26706 case kMultiLine | kUnicode:
26707 return "mu";
26708 case kDotAll | kUnicode:
26709 return "su";
26710 case kIgnoreCase:
26711 return "i";
26712 case kMultiLine:
26713 return "m";
26714 case kDotAll:
26715 return "s";
26716 case kUnicode:
26717 return "u";
26718 default:
26719 break;
26720 }
26721 return "";
26722}
26723
26724bool RegExp::CanonicalizeEquals(const Instance& other) const {
26725 if (this->ptr() == other.ptr()) {
26726 return true; // "===".
26727 }
26728 if (other.IsNull() || !other.IsRegExp()) {
26729 return false;
26730 }
26731 const RegExp& other_js = RegExp::Cast(other);
26732 // Match the pattern.
26733 const String& str1 = String::Handle(pattern());
26734 const String& str2 = String::Handle(other_js.pattern());
26735 if (!str1.Equals(str2)) {
26736 return false;
26737 }
26738 // Match the flags.
26739 if (flags() != other_js.flags()) {
26740 return false;
26741 }
26742 return true;
26743}
26744
26746 // Must agree with RegExpKey::Hash.
26747 return CombineHashes(String::Hash(pattern()), flags().value());
26748}
26749
26750const char* RegExp::ToCString() const {
26751 const String& str = String::Handle(pattern());
26752 return OS::SCreate(Thread::Current()->zone(), "RegExp: pattern=%s flags=%s",
26753 str.ToCString(), flags().ToCString());
26754}
26755
26756WeakPropertyPtr WeakProperty::New(Heap::Space space) {
26757 ASSERT(IsolateGroup::Current()->object_store()->weak_property_class() !=
26758 Class::null());
26759 return Object::Allocate<WeakProperty>(space);
26760}
26761
26762const char* WeakProperty::ToCString() const {
26763 return "_WeakProperty";
26764}
26765
26766WeakReferencePtr WeakReference::New(Heap::Space space) {
26767 ASSERT(IsolateGroup::Current()->object_store()->weak_reference_class() !=
26768 Class::null());
26769 return Object::Allocate<WeakReference>(space);
26770}
26771const char* WeakReference::ToCString() const {
26773 String& type_args_name = String::Handle(type_args.UserVisibleName());
26774 return OS::SCreate(Thread::Current()->zone(), "_WeakReference%s",
26775 type_args_name.ToCString());
26776}
26777
26778const char* FinalizerBase::ToCString() const {
26779 return "FinalizerBase";
26780}
26781
26782FinalizerPtr Finalizer::New(Heap::Space space) {
26783 ASSERT(IsolateGroup::Current()->object_store()->finalizer_class() !=
26784 Class::null());
26785 ASSERT(
26786 Class::Handle(IsolateGroup::Current()->object_store()->finalizer_class())
26787 .EnsureIsAllocateFinalized(Thread::Current()) == Error::null());
26788 return Object::Allocate<Finalizer>(space);
26789}
26790
26791const char* Finalizer::ToCString() const {
26793 String& type_args_name = String::Handle(type_args.UserVisibleName());
26794 return OS::SCreate(Thread::Current()->zone(), "_FinalizerImpl%s",
26795 type_args_name.ToCString());
26796}
26797
26798NativeFinalizerPtr NativeFinalizer::New(Heap::Space space) {
26799 ASSERT(IsolateGroup::Current()->object_store()->native_finalizer_class() !=
26800 Class::null());
26802 IsolateGroup::Current()->object_store()->native_finalizer_class())
26803 .EnsureIsAllocateFinalized(Thread::Current()) == Error::null());
26804 return Object::Allocate<NativeFinalizer>(space);
26805}
26806
26807// Runs the finalizer if not detached, detaches the value and set external size
26808// to 0.
26809// TODO(http://dartbug.com/47777): Can this be merged with
26810// RunNativeFinalizerCallback?
26812 const char* trace_context) const {
26813 Thread* const thread = Thread::Current();
26814 Zone* const zone = thread->zone();
26815 IsolateGroup* const group = thread->isolate_group();
26816 const intptr_t external_size = entry.external_size();
26817 const auto& token_object = Object::Handle(zone, entry.token());
26818 const auto& callback_pointer = Pointer::Handle(zone, this->callback());
26819 const auto callback = reinterpret_cast<NativeFinalizer::Callback>(
26820 callback_pointer.NativeAddress());
26821 if (token_object.IsFinalizerEntry()) {
26822 // Detached from Dart code.
26823 ASSERT(token_object.ptr() == entry.ptr());
26824 ASSERT(external_size == 0);
26825 if (FLAG_trace_finalizers) {
26826 THR_Print(
26827 "%s: Not running native finalizer %p callback %p, "
26828 "detached\n",
26829 trace_context, ptr()->untag(), callback);
26830 }
26831 } else {
26832 const auto& token = Pointer::Cast(token_object);
26833 void* peer = reinterpret_cast<void*>(token.NativeAddress());
26834 if (FLAG_trace_finalizers) {
26835 THR_Print(
26836 "%s: Running native finalizer %p callback %p "
26837 "with token %p\n",
26838 trace_context, ptr()->untag(), callback, peer);
26839 }
26840 entry.set_token(entry);
26841 callback(peer);
26842 if (external_size > 0) {
26843 ASSERT(!entry.value()->IsSmi());
26844 Heap::Space space =
26845 entry.value()->IsOldObject() ? Heap::kOld : Heap::kNew;
26846 if (FLAG_trace_finalizers) {
26847 THR_Print("%s: Clearing external size %" Pd " bytes in %s space\n",
26848 trace_context, external_size, space == 0 ? "new" : "old");
26849 }
26850 group->heap()->FreedExternal(external_size, space);
26851 entry.set_external_size(0);
26852 }
26853 }
26854}
26855
26856const char* NativeFinalizer::ToCString() const {
26857 const auto& pointer = Pointer::Handle(callback());
26858 return OS::SCreate(Thread::Current()->zone(), "_NativeFinalizer %s",
26859 pointer.ToCString());
26860}
26861
26862FinalizerEntryPtr FinalizerEntry::New(const FinalizerBase& finalizer,
26863 Heap::Space space) {
26864 ASSERT(IsolateGroup::Current()->object_store()->finalizer_entry_class() !=
26865 Class::null());
26866 const auto& entry =
26867 FinalizerEntry::Handle(Object::Allocate<FinalizerEntry>(space));
26868 ASSERT_EQUAL(entry.external_size(), 0);
26869 entry.set_finalizer(finalizer);
26870 return entry.ptr();
26871}
26872
26874 untag()->set_finalizer(value.ptr());
26875}
26876
26877const char* FinalizerEntry::ToCString() const {
26878 return "FinalizerEntry";
26879}
26880
26882 ASSERT(Object::Handle(referent()).IsAbstractType());
26883 return AbstractType::Cast(Object::Handle(referent())).ptr();
26884}
26885
26887 ASSERT(Object::Handle(referent()).IsClass());
26888 return Class::Cast(Object::Handle(referent())).ptr();
26889}
26890
26892 ASSERT(Object::Handle(referent()).IsField());
26893 return Field::Cast(Object::Handle(referent())).ptr();
26894}
26895
26897 ASSERT(Object::Handle(referent()).IsFunction());
26898 return Function::Cast(Object::Handle(referent())).ptr();
26899}
26900
26902 ASSERT(Object::Handle(referent()).IsFunctionType());
26903 return FunctionType::Cast(Object::Handle(referent())).ptr();
26904}
26905
26907 ASSERT(Object::Handle(referent()).IsLibrary());
26908 return Library::Cast(Object::Handle(referent())).ptr();
26909}
26910
26912 ASSERT(Object::Handle(referent()).IsTypeParameter());
26913 return TypeParameter::Cast(Object::Handle(referent())).ptr();
26914}
26915
26916MirrorReferencePtr MirrorReference::New(const Object& referent,
26917 Heap::Space space) {
26918 const auto& result =
26919 MirrorReference::Handle(Object::Allocate<MirrorReference>(space));
26920 result.set_referent(referent);
26921 return result.ptr();
26922}
26923
26924const char* MirrorReference::ToCString() const {
26925 return "_MirrorReference";
26926}
26927
26928UserTagPtr UserTag::MakeActive() const {
26929 Isolate* isolate = Isolate::Current();
26930 ASSERT(isolate != nullptr);
26931 UserTag& old = UserTag::Handle(isolate->current_tag());
26932 isolate->set_current_tag(*this);
26933
26934#if !defined(PRODUCT)
26935 // Notify VM service clients that the current UserTag has changed.
26936 if (Service::profiler_stream.enabled()) {
26938 String& name = String::Handle(old.label());
26939 event.set_previous_tag(name.ToCString());
26940 name ^= label();
26941 event.set_updated_tag(name.ToCString());
26943 }
26944#endif // !defined(PRODUCT)
26945
26946 return old.ptr();
26947}
26948
26949UserTagPtr UserTag::New(const String& label, Heap::Space space) {
26950 Thread* thread = Thread::Current();
26951 Isolate* isolate = thread->isolate();
26953 // Canonicalize by name.
26954 UserTag& result = UserTag::Handle(FindTagInIsolate(thread, label));
26955 if (!result.IsNull()) {
26956 // Tag already exists, return existing instance.
26957 return result.ptr();
26958 }
26959 if (TagTableIsFull(thread)) {
26961 "UserTag instance limit (%" Pd ") reached.", UserTags::kMaxUserTags));
26962 const Array& args = Array::Handle(Array::New(1));
26963 args.SetAt(0, error);
26965 }
26966 // No tag with label exists, create and register with isolate tag table.
26967 result = Object::Allocate<UserTag>(space);
26968 result.set_label(label);
26969 result.set_streamable(UserTags::IsTagNameStreamable(label.ToCString()));
26970 AddTagToIsolate(thread, result);
26971 return result.ptr();
26972}
26973
26975 Thread* thread = Thread::Current();
26976 Zone* zone = thread->zone();
26977 Isolate* isolate = thread->isolate();
26978 ASSERT(isolate != nullptr);
26979 if (isolate->default_tag() != UserTag::null()) {
26980 // Already created.
26981 return isolate->default_tag();
26982 }
26983 // Create default tag.
26984 const UserTag& result =
26985 UserTag::Handle(zone, UserTag::New(Symbols::Default()));
26987 isolate->set_default_tag(result);
26988 return result.ptr();
26989}
26990
26992 Thread* thread,
26993 const String& label) {
26994 Zone* zone = thread->zone();
26995 if (isolate->tag_table() == GrowableObjectArray::null()) {
26996 return UserTag::null();
26997 }
26998 const GrowableObjectArray& tag_table =
26999 GrowableObjectArray::Handle(zone, isolate->tag_table());
27000 UserTag& other = UserTag::Handle(zone);
27001 String& tag_label = String::Handle(zone);
27002 for (intptr_t i = 0; i < tag_table.Length(); i++) {
27003 other ^= tag_table.At(i);
27004 ASSERT(!other.IsNull());
27005 tag_label = other.label();
27006 ASSERT(!tag_label.IsNull());
27007 if (tag_label.Equals(label)) {
27008 return other.ptr();
27009 }
27010 }
27011 return UserTag::null();
27012}
27013
27014UserTagPtr UserTag::FindTagInIsolate(Thread* thread, const String& label) {
27015 Isolate* isolate = thread->isolate();
27016 return FindTagInIsolate(isolate, thread, label);
27017}
27018
27019void UserTag::AddTagToIsolate(Thread* thread, const UserTag& tag) {
27020 Isolate* isolate = thread->isolate();
27021 Zone* zone = thread->zone();
27022 ASSERT(isolate->tag_table() != GrowableObjectArray::null());
27023 const GrowableObjectArray& tag_table =
27024 GrowableObjectArray::Handle(zone, isolate->tag_table());
27025 ASSERT(!TagTableIsFull(thread));
27026#if defined(DEBUG)
27027 // Verify that no existing tag has the same tag id.
27028 UserTag& other = UserTag::Handle(thread->zone());
27029 for (intptr_t i = 0; i < tag_table.Length(); i++) {
27030 other ^= tag_table.At(i);
27031 ASSERT(!other.IsNull());
27032 ASSERT(tag.tag() != other.tag());
27033 }
27034#endif
27035 // Generate the UserTag tag id by taking the length of the isolate's
27036 // tag table + kUserTagIdOffset.
27037 uword tag_id = tag_table.Length() + UserTags::kUserTagIdOffset;
27040 tag.set_tag(tag_id);
27041 tag_table.Add(tag);
27042}
27043
27045 Isolate* isolate = thread->isolate();
27047 const GrowableObjectArray& tag_table =
27048 GrowableObjectArray::Handle(thread->zone(), isolate->tag_table());
27049 ASSERT(tag_table.Length() <= UserTags::kMaxUserTags);
27050 return tag_table.Length() == UserTags::kMaxUserTags;
27051}
27052
27053UserTagPtr UserTag::FindTagById(const Isolate* isolate, uword tag_id) {
27054 ASSERT(isolate != nullptr);
27055 Thread* thread = Thread::Current();
27056 Zone* zone = thread->zone();
27058 const GrowableObjectArray& tag_table =
27059 GrowableObjectArray::Handle(zone, isolate->tag_table());
27060 UserTag& tag = UserTag::Handle(zone);
27061 for (intptr_t i = 0; i < tag_table.Length(); i++) {
27062 tag ^= tag_table.At(i);
27063 if (tag.tag() == tag_id) {
27064 return tag.ptr();
27065 }
27066 }
27067 return UserTag::null();
27068}
27069
27070const char* UserTag::ToCString() const {
27071 const String& tag_label = String::Handle(label());
27072 return tag_label.ToCString();
27073}
27074
27075void DumpTypeTable(Isolate* isolate) {
27076 OS::PrintErr("canonical types:\n");
27077 CanonicalTypeSet table(isolate->group()->object_store()->canonical_types());
27078 table.Dump();
27079 table.Release();
27080}
27081
27083 OS::PrintErr("canonical function types:\n");
27085 isolate->group()->object_store()->canonical_function_types());
27086 table.Dump();
27087 table.Release();
27088}
27089
27091 OS::PrintErr("canonical record types:\n");
27093 isolate->group()->object_store()->canonical_record_types());
27094 table.Dump();
27095 table.Release();
27096}
27097
27099 OS::PrintErr("canonical type parameters (cloned from declarations):\n");
27101 isolate->group()->object_store()->canonical_type_parameters());
27102 table.Dump();
27103 table.Release();
27104}
27105
27107 OS::PrintErr("canonical type arguments:\n");
27109 isolate->group()->object_store()->canonical_type_arguments());
27110 table.Dump();
27111 table.Release();
27112}
27113
27115 const Array& metadata,
27116 Field* reusable_field_handle,
27117 Object* pragma) {
27118 for (intptr_t i = 0; i < metadata.Length(); i++) {
27119 *pragma = metadata.At(i);
27120 if (pragma->clazz() != IG->object_store()->pragma_class()) {
27121 continue;
27122 }
27123 *reusable_field_handle = IG->object_store()->pragma_name();
27124 const auto pragma_name =
27125 Instance::Cast(*pragma).GetField(*reusable_field_handle);
27126 if ((pragma_name != Symbols::vm_entry_point().ptr()) &&
27127 (pragma_name != Symbols::dyn_module_callable().ptr()) &&
27128 (pragma_name != Symbols::dyn_module_extendable().ptr())) {
27129 continue;
27130 }
27131 *reusable_field_handle = IG->object_store()->pragma_options();
27132 *pragma = Instance::Cast(*pragma).GetField(*reusable_field_handle);
27133 if (pragma->ptr() == Bool::null() || pragma->ptr() == Bool::True().ptr()) {
27135 break;
27136 }
27137 if (pragma->ptr() == Symbols::get().ptr()) {
27139 }
27140 if (pragma->ptr() == Symbols::set().ptr()) {
27142 }
27143 if (pragma->ptr() == Symbols::call().ptr()) {
27145 }
27146 }
27148}
27149
27152 const Library& lib,
27153 const Object& member,
27154 const Object& annotated,
27155 std::initializer_list<EntryPointPragma> allowed_kinds) {
27156#if defined(DART_PRECOMPILED_RUNTIME)
27157 // Annotations are discarded in the AOT snapshot, so we can't determine
27158 // precisely if this member was marked as an entry-point. Instead, we use
27159 // "has_pragma()" as a proxy, since that bit is usually retained.
27160 bool is_marked_entrypoint = true;
27161 if (annotated.IsClass() && !Class::Cast(annotated).has_pragma()) {
27162 is_marked_entrypoint = false;
27163 } else if (annotated.IsField() && !Field::Cast(annotated).has_pragma()) {
27164 is_marked_entrypoint = false;
27165 } else if (annotated.IsFunction() &&
27166 !Function::Cast(annotated).has_pragma()) {
27167 is_marked_entrypoint = false;
27168 }
27169#else
27170 Object& metadata = Object::Handle(Object::empty_array().ptr());
27171 if (!annotated.IsNull()) {
27172 metadata = lib.GetMetadata(annotated);
27173 }
27174 if (metadata.IsError()) return Error::RawCast(metadata.ptr());
27175 ASSERT(!metadata.IsNull() && metadata.IsArray());
27176 EntryPointPragma pragma =
27177 FindEntryPointPragma(IsolateGroup::Current(), Array::Cast(metadata),
27179 bool is_marked_entrypoint = pragma == EntryPointPragma::kAlways;
27180 if (!is_marked_entrypoint) {
27181 for (const auto allowed_kind : allowed_kinds) {
27182 if (pragma == allowed_kind) {
27183 is_marked_entrypoint = true;
27184 break;
27185 }
27186 }
27187 }
27188#endif
27189 if (!is_marked_entrypoint) {
27190 return EntryPointMemberInvocationError(member);
27191 }
27192 return Error::null();
27193}
27194
27196ErrorPtr EntryPointFieldInvocationError(const String& getter_name) {
27197 if (!FLAG_verify_entry_points) return Error::null();
27198
27199 char const* error = OS::SCreate(
27200 Thread::Current()->zone(),
27201 "ERROR: Entry-points do not allow invoking fields "
27202 "(failure to resolve '%s')\n"
27203 "ERROR: See "
27204 "https://github.com/dart-lang/sdk/blob/master/runtime/docs/compiler/"
27205 "aot/entry_point_pragma.md\n",
27206 getter_name.ToCString());
27207 OS::PrintErr("%s", error);
27208 return ApiError::New(String::Handle(String::New(error)));
27209}
27210
27213 const char* member_cstring =
27214 member.IsFunction()
27215 ? OS::SCreate(
27216 Thread::Current()->zone(), "%s (kind %s)",
27217 Function::Cast(member).ToLibNamePrefixedQualifiedCString(),
27218 Function::KindToCString(Function::Cast(member).kind()))
27219 : member.ToCString();
27220 if (!FLAG_verify_entry_points) {
27221 // Print a warning, but do not return an error.
27222 char const* warning = OS::SCreate(
27223 Thread::Current()->zone(),
27224 "WARNING: '%s' is accessed through Dart C API without being marked as "
27225 "an entry point; its tree-shaken signature cannot be verified.\n"
27226 "WARNING: See "
27227 "https://github.com/dart-lang/sdk/blob/master/runtime/docs/compiler/"
27228 "aot/entry_point_pragma.md\n",
27229 member_cstring);
27230 OS::PrintErr("%s", warning);
27231 return Error::null();
27232 }
27233 char const* error = OS::SCreate(
27234 Thread::Current()->zone(),
27235 "ERROR: It is illegal to access '%s' through Dart C API.\n"
27236 "ERROR: See "
27237 "https://github.com/dart-lang/sdk/blob/master/runtime/docs/compiler/"
27238 "aot/entry_point_pragma.md\n",
27239 member_cstring);
27240 OS::PrintErr("%s", error);
27241 return ApiError::New(String::Handle(String::New(error)));
27242}
27243
27244#if !defined(DART_PRECOMPILED_RUNTIME)
27245// Note: see also [NeedsDynamicInvocationForwarder] which ensures that we
27246// never land in a function which expects parameters in registers from a
27247// dynamic call site.
27249#if defined(TARGET_ARCH_X64) || defined(TARGET_ARCH_ARM64) || \
27250 defined(TARGET_ARCH_ARM)
27251 if (!FLAG_precompiled_mode) {
27252 return 0;
27253 }
27254
27255 if (!FLAG_use_register_cc) {
27256 return 0;
27257 }
27258
27259 if (IsGeneric()) {
27260 return 0;
27261 }
27262
27263 switch (kind()) {
27264 case UntaggedFunction::kClosureFunction:
27265 case UntaggedFunction::kImplicitClosureFunction:
27266 case UntaggedFunction::kNoSuchMethodDispatcher:
27267 case UntaggedFunction::kInvokeFieldDispatcher:
27268 case UntaggedFunction::kDynamicInvocationForwarder:
27269 case UntaggedFunction::kMethodExtractor:
27270 case UntaggedFunction::kFfiTrampoline:
27271 case UntaggedFunction::kFieldInitializer:
27272 case UntaggedFunction::kIrregexpFunction:
27273 return 0;
27274
27275 default:
27276 break;
27277 }
27278
27279 const auto unboxing_metadata = kernel::UnboxingInfoMetadataOf(*this, zone);
27280 if (unboxing_metadata != nullptr &&
27281 unboxing_metadata->must_use_stack_calling_convention) {
27282 return 0;
27283 }
27284
27285 // Getters and setters have fixed signatures.
27286 switch (kind()) {
27287 case UntaggedFunction::kGetterFunction:
27288 case UntaggedFunction::kImplicitGetter:
27289 case UntaggedFunction::kSetterFunction:
27290 case UntaggedFunction::kImplicitSetter:
27291 return num_fixed_parameters();
27292
27293 default:
27294 break;
27295 }
27296
27297 if (unboxing_metadata != nullptr &&
27298 unboxing_metadata->has_overrides_with_less_direct_parameters) {
27299 // Receiver (`this`) can always be passed in the register because it is
27300 // never an optional or named parameter.
27301 return unboxing_metadata->unboxed_args_info.length() + 1;
27302 }
27303
27304 return num_fixed_parameters();
27305#endif
27306 return 0;
27307}
27308#endif // !defined(DART_PRECOMPILED_RUNTIME)
27309
27311 if (!FLAG_verify_entry_points) return Error::null();
27312
27313 const Class& cls = Class::Handle(Owner());
27314 const Library& lib = Library::Handle(cls.library());
27315 switch (kind()) {
27316 case UntaggedFunction::kRegularFunction:
27317 case UntaggedFunction::kSetterFunction:
27318 case UntaggedFunction::kConstructor:
27319 return dart::VerifyEntryPoint(lib, *this, *this,
27321 break;
27322 case UntaggedFunction::kGetterFunction:
27324 lib, *this, *this,
27326 break;
27327 case UntaggedFunction::kImplicitGetter:
27328 return dart::VerifyEntryPoint(lib, *this, Field::Handle(accessor_field()),
27330 break;
27331 case UntaggedFunction::kImplicitSetter:
27332 return dart::VerifyEntryPoint(lib, *this, Field::Handle(accessor_field()),
27334 case UntaggedFunction::kMethodExtractor:
27335 return Function::Handle(extracted_method_closure())
27336 .VerifyClosurizedEntryPoint();
27337 break;
27338 default:
27339 return dart::VerifyEntryPoint(lib, *this, Object::Handle(), {});
27340 break;
27341 }
27342}
27343
27345 if (!FLAG_verify_entry_points) return Error::null();
27346
27347 const Class& cls = Class::Handle(Owner());
27348 const Library& lib = Library::Handle(cls.library());
27349 switch (kind()) {
27350 case UntaggedFunction::kRegularFunction:
27351 return dart::VerifyEntryPoint(lib, *this, *this,
27353 case UntaggedFunction::kImplicitClosureFunction: {
27354 const Function& parent = Function::Handle(parent_function());
27355 return dart::VerifyEntryPoint(lib, parent, parent,
27357 }
27358 default:
27359 UNREACHABLE();
27360 }
27361}
27362
27364 if (!FLAG_verify_entry_points) return Error::null();
27365 const Class& cls = Class::Handle(Owner());
27366 const Library& lib = Library::Handle(cls.library());
27367 return dart::VerifyEntryPoint(lib, *this, *this, {pragma});
27368}
27369
27370ErrorPtr Class::VerifyEntryPoint() const {
27371 if (!FLAG_verify_entry_points) return Error::null();
27372 const Library& lib = Library::Handle(library());
27373 if (!lib.IsNull()) {
27374 return dart::VerifyEntryPoint(lib, *this, *this, {});
27375 } else {
27376 return Error::null();
27377 }
27378}
27379
27380AbstractTypePtr RecordType::FieldTypeAt(intptr_t index) const {
27381 const Array& field_types = Array::Handle(untag()->field_types());
27382 return AbstractType::RawCast(field_types.At(index));
27383}
27384
27385void RecordType::SetFieldTypeAt(intptr_t index,
27386 const AbstractType& value) const {
27387 ASSERT(!value.IsNull());
27388 const Array& field_types = Array::Handle(untag()->field_types());
27389 field_types.SetAt(index, value);
27390}
27391
27392void RecordType::set_field_types(const Array& value) const {
27393 ASSERT(!value.IsNull());
27394 untag()->set_field_types(value.ptr());
27395}
27396
27397void RecordType::set_shape(RecordShape shape) const {
27398 untag()->set_shape(shape.AsSmi());
27399}
27400
27401ArrayPtr RecordType::GetFieldNames(Thread* thread) const {
27402 return shape().GetFieldNames(thread);
27403}
27404
27406 BaseTextBuffer* printer) const {
27407 if (IsNull()) {
27408 printer->AddString("null");
27409 return;
27410 }
27411 Thread* thread = Thread::Current();
27412 Zone* zone = thread->zone();
27414 String& name = String::Handle(zone);
27415 const intptr_t num_fields = NumFields();
27416 const Array& field_names = Array::Handle(zone, GetFieldNames(thread));
27417 const intptr_t num_positional_fields = num_fields - field_names.Length();
27418 printer->AddString("(");
27419 for (intptr_t i = 0; i < num_fields; ++i) {
27420 if (i != 0) {
27421 printer->AddString(", ");
27422 }
27423 if (i == num_positional_fields) {
27424 printer->AddString("{");
27425 }
27426 type = FieldTypeAt(i);
27427 type.PrintName(name_visibility, printer);
27428 if (i >= num_positional_fields) {
27429 printer->AddString(" ");
27430 name ^= field_names.At(i - num_positional_fields);
27431 printer->AddString(name.ToCString());
27432 }
27433 }
27434 if (num_positional_fields < num_fields) {
27435 printer->AddString("}");
27436 }
27437 printer->AddString(")");
27438 printer->AddString(NullabilitySuffix(name_visibility));
27439}
27440
27441const char* RecordType::ToCString() const {
27442 Zone* zone = Thread::Current()->zone();
27443 ZoneTextBuffer printer(zone);
27444 Print(kInternalName, &printer);
27445 return printer.buffer();
27446}
27447
27449 intptr_t num_free_fun_type_params) const {
27451 const intptr_t num_fields = NumFields();
27452 for (intptr_t i = 0; i < num_fields; ++i) {
27453 type = FieldTypeAt(i);
27454 if (!type.IsInstantiated(genericity, num_free_fun_type_params)) {
27455 return false;
27456 }
27457 }
27458 return true;
27459}
27460
27461RecordTypePtr RecordType::New(Heap::Space space) {
27462 return Object::Allocate<RecordType>(space);
27463}
27464
27465RecordTypePtr RecordType::New(RecordShape shape,
27466 const Array& field_types,
27467 Nullability nullability,
27468 Heap::Space space) {
27469 Zone* Z = Thread::Current()->zone();
27471 result.set_shape(shape);
27472 result.set_field_types(field_types);
27473 result.SetHash(0);
27474 result.set_flags(0);
27475 result.set_nullability(nullability);
27477 result.InitializeTypeTestingStubNonAtomic(
27479 return result.ptr();
27480}
27481
27483 Heap::Space space) const {
27484 if (nullability() == value) {
27485 return ptr();
27486 }
27487 // Clone record type and set new nullability.
27488 // Always cloning in old space and removing space parameter would not satisfy
27489 // currently existing requests for type instantiation in new space.
27491 Zone* Z = T->zone();
27493 Z,
27494 RecordType::New(shape(), Array::Handle(Z, field_types()), value, space));
27495 if (IsFinalized()) {
27496 type.SetIsFinalized();
27497 if (IsCanonical()) {
27498 type ^= type.Canonicalize(T);
27499 }
27500 }
27501 return RecordType::Cast(type).ptr();
27502}
27503
27505 const Instance& other,
27506 TypeEquality kind,
27507 FunctionTypeMapping* function_type_equivalence) const {
27508 ASSERT(!IsNull());
27509 if (ptr() == other.ptr()) {
27510 return true;
27511 }
27512 if (!other.IsRecordType()) {
27513 return false;
27514 }
27515 const RecordType& other_type = RecordType::Cast(other);
27516 // Equal record types must have the same shape
27517 // (number of fields and named fields).
27518 if (shape() != other_type.shape()) {
27519 return false;
27520 }
27521 Thread* thread = Thread::Current();
27522 Zone* zone = thread->zone();
27523 if (!IsNullabilityEquivalent(thread, other_type, kind)) {
27524 return false;
27525 }
27526 // Equal record types must have equal field types.
27527 AbstractType& field_type = Type::Handle(zone);
27528 AbstractType& other_field_type = Type::Handle(zone);
27529 const intptr_t num_fields = NumFields();
27530 for (intptr_t i = 0; i < num_fields; ++i) {
27531 field_type = FieldTypeAt(i);
27532 other_field_type = other_type.FieldTypeAt(i);
27533 if (!field_type.IsEquivalent(other_field_type, kind,
27534 function_type_equivalence)) {
27535 return false;
27536 }
27537 }
27538 return true;
27539}
27540
27542 ASSERT(IsFinalized());
27543 uint32_t result = 0;
27544 result = CombineHashes(result, static_cast<uint32_t>(nullability()));
27545 result = CombineHashes(result, static_cast<uint32_t>(shape().AsInt()));
27547 const intptr_t num_fields = NumFields();
27548 for (intptr_t i = 0; i < num_fields; ++i) {
27549 type = FieldTypeAt(i);
27550 result = CombineHashes(result, type.Hash());
27551 }
27553 SetHash(result);
27554 return result;
27555}
27556
27557AbstractTypePtr RecordType::Canonicalize(Thread* thread) const {
27558 ASSERT(IsFinalized());
27559 Zone* zone = thread->zone();
27561 if (IsCanonical()) {
27562#ifdef DEBUG
27563 // Verify that all fields are allocated in old space and are canonical.
27564 ASSERT(Array::Handle(zone, field_types()).IsOld());
27565 const intptr_t num_fields = NumFields();
27566 for (intptr_t i = 0; i < num_fields; ++i) {
27567 type = FieldTypeAt(i);
27568 ASSERT(type.IsOld());
27569 ASSERT(type.IsCanonical());
27570 }
27571#endif
27572 return ptr();
27573 }
27574 auto isolate_group = thread->isolate_group();
27575 ObjectStore* object_store = isolate_group->object_store();
27576 RecordType& rec = RecordType::Handle(zone);
27577 {
27578 SafepointMutexLocker ml(isolate_group->type_canonicalization_mutex());
27579 CanonicalRecordTypeSet table(zone, object_store->canonical_record_types());
27580 rec ^= table.GetOrNull(CanonicalRecordTypeKey(*this));
27581 ASSERT(object_store->canonical_record_types() == table.Release().ptr());
27582 }
27583 if (rec.IsNull()) {
27584 ASSERT(Array::Handle(zone, field_types()).IsOld());
27585 const intptr_t num_fields = NumFields();
27586 for (intptr_t i = 0; i < num_fields; ++i) {
27587 type = FieldTypeAt(i);
27588 if (!type.IsCanonical()) {
27589 type = type.Canonicalize(thread);
27590 SetFieldTypeAt(i, type);
27591 }
27592 }
27593 // Check to see if the record type got added to canonical table as part
27594 // of the canonicalization of its signature types.
27595 SafepointMutexLocker ml(isolate_group->type_canonicalization_mutex());
27596 CanonicalRecordTypeSet table(zone, object_store->canonical_record_types());
27597 rec ^= table.GetOrNull(CanonicalRecordTypeKey(*this));
27598 if (rec.IsNull()) {
27599 // Add this record type into the canonical table of record types.
27600 if (this->IsNew()) {
27601 rec ^= Object::Clone(*this, Heap::kOld);
27602 } else {
27603 rec = this->ptr();
27604 }
27605 ASSERT(rec.IsOld());
27606 rec.SetCanonical(); // Mark object as being canonical.
27607 bool present = table.Insert(rec);
27608 ASSERT(!present);
27609 }
27610 object_store->set_canonical_record_types(table.Release());
27611 }
27612 return rec.ptr();
27613}
27614
27617 const intptr_t num_fields = NumFields();
27618 for (intptr_t i = 0; i < num_fields; ++i) {
27619 type = FieldTypeAt(i);
27620 type.EnumerateURIs(uris);
27621 }
27622}
27623
27625 BaseTextBuffer* printer) const {
27626 RecordType::Cast(*this).Print(name_visibility, printer);
27627}
27628
27630 const TypeArguments& instantiator_type_arguments,
27631 const TypeArguments& function_type_arguments,
27632 intptr_t num_free_fun_type_params,
27633 Heap::Space space,
27634 FunctionTypeMapping* function_type_mapping,
27635 intptr_t num_parent_type_args_adjustment) const {
27636 ASSERT(IsFinalized());
27637 Zone* zone = Thread::Current()->zone();
27638
27639 const intptr_t num_fields = NumFields();
27640 const Array& old_field_types = Array::Handle(zone, field_types());
27641 const Array& new_field_types =
27642 Array::Handle(zone, Array::New(num_fields, space));
27644 for (intptr_t i = 0; i < num_fields; ++i) {
27645 type ^= old_field_types.At(i);
27646 if (!type.IsInstantiated()) {
27647 type = type.InstantiateFrom(
27648 instantiator_type_arguments, function_type_arguments,
27649 num_free_fun_type_params, space, function_type_mapping,
27650 num_parent_type_args_adjustment);
27651 // A returned null type indicates a failed instantiation in dead code that
27652 // must be propagated up to the caller, the optimizing compiler.
27653 if (type.IsNull()) {
27654 return RecordType::null();
27655 }
27656 }
27657 new_field_types.SetAt(i, type);
27658 }
27659
27660 const auto& rec = RecordType::Handle(
27661 zone, RecordType::New(shape(), new_field_types, nullability(), space));
27662
27663 rec.SetIsFinalized();
27664
27665 // Canonicalization is not part of instantiation.
27666 return rec.ptr();
27667}
27668
27670 intptr_t num_parent_type_args_adjustment,
27671 intptr_t num_free_fun_type_params,
27672 Heap::Space space,
27673 FunctionTypeMapping* function_type_mapping) const {
27674 ASSERT(IsFinalized());
27675 ASSERT(num_parent_type_args_adjustment >= 0);
27676 Zone* zone = Thread::Current()->zone();
27677 const auto& types = Array::Handle(zone, field_types());
27678 Array* updated_types = nullptr;
27679 auto& type = AbstractType::Handle(zone);
27680 auto& updated = AbstractType::Handle(zone);
27681 for (intptr_t i = 0, n = NumFields(); i < n; ++i) {
27682 type ^= types.At(i);
27683 updated = type.UpdateFunctionTypes(num_parent_type_args_adjustment,
27684 num_free_fun_type_params, space,
27685 function_type_mapping);
27686 if (type.ptr() != updated.ptr()) {
27687 if (updated_types == nullptr) {
27688 updated_types = &Array::Handle(zone, Array::New(n, space));
27689 for (intptr_t j = 0; j < i; ++j) {
27690 type ^= types.At(j);
27691 updated_types->SetAt(j, type);
27692 }
27693 }
27694 }
27695 if (updated_types != nullptr) {
27696 updated_types->SetAt(i, updated);
27697 }
27698 }
27699 if (updated_types == nullptr) {
27700 return ptr();
27701 }
27702 const auto& new_rt = RecordType::Handle(
27703 zone, RecordType::New(shape(), *updated_types, nullability(), space));
27704 new_rt.SetIsFinalized();
27705 return new_rt.ptr();
27706}
27707
27709 const RecordType& other,
27710 Heap::Space space,
27711 FunctionTypeMapping* function_type_equivalence) const {
27712 if (ptr() == other.ptr()) {
27713 return true;
27714 }
27715 ASSERT(IsFinalized());
27716 ASSERT(other.IsFinalized());
27717 const intptr_t num_fields = NumFields();
27718 if (shape() != other.shape()) {
27719 // Different number of fields or different named fields.
27720 return false;
27721 }
27722 Thread* const thread = Thread::Current();
27723 if (!IsNullabilityEquivalent(thread, other, TypeEquality::kInSubtypeTest)) {
27724 return false;
27725 }
27726 // Check subtyping of record field types.
27727 Zone* const zone = thread->zone();
27728 AbstractType& field_type = Type::Handle(zone);
27729 AbstractType& other_field_type = Type::Handle(zone);
27730 for (intptr_t i = 0; i < num_fields; ++i) {
27731 field_type = FieldTypeAt(i);
27732 other_field_type = other.FieldTypeAt(i);
27733 if (!field_type.IsSubtypeOf(other_field_type, space,
27734 function_type_equivalence)) {
27735 return false;
27736 }
27737 }
27738 return true;
27739}
27740
27741RecordPtr Record::New(RecordShape shape, Heap::Space space) {
27742 const intptr_t num_fields = shape.num_fields();
27743 ASSERT(num_fields >= 0);
27744 auto raw = Object::Allocate<Record>(space, num_fields);
27745 NoSafepointScope no_safepoint;
27746 raw->untag()->set_shape(shape.AsSmi());
27747 return raw;
27748}
27749
27750const char* Record::ToCString() const {
27751 if (IsNull()) {
27752 return "Record: null";
27753 }
27754 Thread* thread = Thread::Current();
27755 Zone* zone = thread->zone();
27756 ZoneTextBuffer printer(zone);
27757 const intptr_t num_fields = this->num_fields();
27758 const Array& field_names = Array::Handle(zone, GetFieldNames(thread));
27759 const intptr_t num_positional_fields = num_fields - field_names.Length();
27760 Object& obj = Object::Handle(zone);
27761 printer.AddString("Record (");
27762 for (intptr_t i = 0; i < num_fields; ++i) {
27763 if (i != 0) {
27764 printer.AddString(", ");
27765 }
27766 if (i >= num_positional_fields) {
27767 obj = field_names.At(i - num_positional_fields);
27768 printer.AddString(obj.ToCString());
27769 printer.AddString(": ");
27770 }
27771 obj = FieldAt(i);
27772 printer.AddString(obj.ToCString());
27773 }
27774 printer.AddString(")");
27775 return printer.buffer();
27776}
27777
27778bool Record::CanonicalizeEquals(const Instance& other) const {
27779 if (this->ptr() == other.ptr()) {
27780 return true;
27781 }
27782
27783 if (!other.IsRecord() || other.IsNull()) {
27784 return false;
27785 }
27786
27787 const Record& other_rec = Record::Cast(other);
27788 if (shape() != other_rec.shape()) {
27789 return false;
27790 }
27791
27792 const intptr_t num_fields = this->num_fields();
27793 for (intptr_t i = 0; i < num_fields; ++i) {
27794 if (this->FieldAt(i) != other_rec.FieldAt(i)) {
27795 return false;
27796 }
27797 }
27798 return true;
27799}
27800
27802 Thread* thread = Thread::Current();
27803 uint32_t hash = thread->heap()->GetCanonicalHash(ptr());
27804 if (hash != 0) {
27805 return hash;
27806 }
27807 hash = shape().AsInt();
27808 Instance& element = Instance::Handle();
27809 const intptr_t num_fields = this->num_fields();
27810 for (intptr_t i = 0; i < num_fields; ++i) {
27811 element ^= FieldAt(i);
27813 }
27815 thread->heap()->SetCanonicalHash(ptr(), hash);
27816 return hash;
27817}
27818
27820 Zone* zone = thread->zone();
27821 Instance& obj = Instance::Handle(zone);
27822 const intptr_t num_fields = this->num_fields();
27823 for (intptr_t i = 0; i < num_fields; ++i) {
27824 obj ^= FieldAt(i);
27825 obj = obj.CanonicalizeLocked(thread);
27826 SetFieldAt(i, obj);
27827 }
27828}
27829
27830RecordTypePtr Record::GetRecordType() const {
27831 Zone* const zone = Thread::Current()->zone();
27832 const intptr_t num_fields = this->num_fields();
27833 const Array& field_types =
27834 Array::Handle(zone, Array::New(num_fields, Heap::kOld));
27835 Instance& obj = Instance::Handle(zone);
27837 for (intptr_t i = 0; i < num_fields; ++i) {
27838 obj ^= FieldAt(i);
27839 type = obj.GetType(Heap::kNew);
27840 field_types.SetAt(i, type);
27841 }
27842 type = RecordType::New(shape(), field_types, Nullability::kNonNullable);
27844 return RecordType::Cast(type).ptr();
27845}
27846
27848 const String& field_name) {
27849 if (field_name.IsOneByteString() && field_name.Length() >= 1 &&
27850 field_name.CharAt(0) == '$') {
27851 int64_t value = 0;
27852 const char* cstr = field_name.ToCString();
27853 if (OS::StringToInt64(cstr + 1 /* skip '$' */, &value)) {
27854 if (value >= 1 && value < kMaxElements) {
27855 return static_cast<intptr_t>(value - 1);
27856 }
27857 }
27858 }
27859 return -1;
27860}
27861
27863 const String& field_name) const {
27864 ASSERT(field_name.IsSymbol());
27865 const intptr_t field_index =
27867 const Array& field_names = Array::Handle(GetFieldNames(thread));
27868 const intptr_t num_positional_fields = num_fields() - field_names.Length();
27869 if ((field_index >= 0) && (field_index < num_positional_fields)) {
27870 return field_index;
27871 } else {
27872 for (intptr_t i = 0, n = field_names.Length(); i < n; ++i) {
27873 if (field_names.At(i) == field_name.ptr()) {
27874 return num_positional_fields + i;
27875 }
27876 }
27877 }
27878 return -1;
27879}
27880
27882 public:
27883 static const char* Name() { return "RecordFieldNamesMapTraits"; }
27884 static bool ReportStats() { return false; }
27885
27886 static bool IsMatch(const Object& a, const Object& b) {
27887 return Array::Cast(a).CanonicalizeEquals(Array::Cast(b));
27888 }
27889
27890 static uword Hash(const Object& key) {
27891 return Array::Cast(key).CanonicalizeHash();
27892 }
27893
27894 static ObjectPtr NewKey(const Array& arr) { return arr.ptr(); }
27895};
27897
27899 intptr_t num_fields,
27900 const Array& field_names) {
27901 ASSERT(!field_names.IsNull());
27902 ASSERT(field_names.IsImmutable());
27903 ASSERT(field_names.ptr() == Object::empty_array().ptr() ||
27904 field_names.Length() > 0);
27905
27906 Zone* zone = thread->zone();
27907 IsolateGroup* isolate_group = thread->isolate_group();
27908 ObjectStore* object_store = isolate_group->object_store();
27909
27910 if (object_store->record_field_names<std::memory_order_acquire>() ==
27911 Array::null()) {
27912 // First-time initialization.
27913 SafepointWriteRwLocker ml(thread, isolate_group->program_lock());
27914 if (object_store->record_field_names() == Array::null()) {
27915 // Reserve record field names index 0 for records without named fields.
27917 HashTables::New<RecordFieldNamesMap>(16, Heap::kOld));
27918 map.InsertOrGetValue(Object::empty_array(),
27919 Smi::Handle(zone, Smi::New(0)));
27920 ASSERT(map.NumOccupied() == 1);
27921 object_store->set_record_field_names_map(map.Release());
27922 const auto& table = Array::Handle(zone, Array::New(16));
27923 table.SetAt(0, Object::empty_array());
27924 object_store->set_record_field_names<std::memory_order_release>(table);
27925 }
27926 }
27927
27928#if defined(DART_PRECOMPILER)
27929 const intptr_t kMaxNumFields = compiler::target::RecordShape::kMaxNumFields;
27930 const intptr_t kMaxFieldNamesIndex =
27932#else
27933 const intptr_t kMaxNumFields = RecordShape::kMaxNumFields;
27934 const intptr_t kMaxFieldNamesIndex = RecordShape::kMaxFieldNamesIndex;
27935#endif
27936
27937 if (num_fields > kMaxNumFields) {
27938 FATAL("Too many record fields");
27939 }
27940 if (field_names.ptr() == Object::empty_array().ptr()) {
27941 return RecordShape::ForUnnamed(num_fields);
27942 }
27943
27944 {
27945 SafepointReadRwLocker ml(thread, isolate_group->program_lock());
27946 RecordFieldNamesMap map(object_store->record_field_names_map());
27947 Smi& index = Smi::Handle(zone);
27948 index ^= map.GetOrNull(field_names);
27949 ASSERT(map.Release().ptr() == object_store->record_field_names_map());
27950 if (!index.IsNull()) {
27951 return RecordShape(num_fields, index.Value());
27952 }
27953 }
27954
27955 SafepointWriteRwLocker ml(thread, isolate_group->program_lock());
27956 RecordFieldNamesMap map(object_store->record_field_names_map());
27957 const intptr_t new_index = map.NumOccupied();
27958 if (new_index > kMaxFieldNamesIndex) {
27959 FATAL("Too many record shapes");
27960 }
27961
27962 const intptr_t index = Smi::Value(Smi::RawCast(map.InsertOrGetValue(
27963 field_names, Smi::Handle(zone, Smi::New(new_index)))));
27964 ASSERT(index > 0);
27965
27966 if (index == new_index) {
27967 ASSERT(map.NumOccupied() == (new_index + 1));
27968 Array& table = Array::Handle(zone, object_store->record_field_names());
27969 intptr_t capacity = table.Length();
27970 if (index >= table.Length()) {
27971 capacity = capacity + (capacity >> 2);
27972 table = Array::Grow(table, capacity);
27973 object_store->set_record_field_names(table);
27974 }
27975 table.SetAt(index, field_names);
27976 } else {
27977 ASSERT(index < new_index);
27978 }
27979 object_store->set_record_field_names_map(map.Release());
27980
27981 const RecordShape shape(num_fields, index);
27982 ASSERT(shape.GetFieldNames(thread) == field_names.ptr());
27983 ASSERT(shape.num_fields() == num_fields);
27984 return shape;
27985}
27986
27987ArrayPtr RecordShape::GetFieldNames(Thread* thread) const {
27988 ObjectStore* object_store = thread->isolate_group()->object_store();
27989 Array& table =
27990 Array::Handle(thread->zone(), object_store->record_field_names());
27991 ASSERT(!table.IsNull());
27992 return Array::RawCast(table.At(field_names_index()));
27993}
27994
27995} // namespace dart
const char * options
static void info(const char *fmt,...) SK_PRINTF_LIKE(1
Definition: DM.cpp:213
static struct Initializer initializer
int count
Definition: FontMgrTest.cpp:50
SkPoint pos
static float next(float f)
static void Union(SkRegion *rgn, const SkIRect &rect)
Definition: RegionTest.cpp:27
static constexpr size_t kHeaderSize
SkIDChangeListener::List List
static void operation(T operation, uint32_t &a, uint32_t b, uint32_t c, uint32_t d, uint32_t x, uint8_t s, uint32_t t)
Definition: SkMD5.cpp:144
static void encode(uint8_t output[16], const uint32_t input[4])
Definition: SkMD5.cpp:240
static uint32_t hash(const SkShaderBase::GradientInfo &v)
bool equals(SkDrawable *a, SkDrawable *b)
static const size_t kBufferSize
Definition: SkString.cpp:27
static bool is_valid(SkISize dim)
Type
Definition: SortBench.cpp:56
SI void store(P *ptr, const T &val)
SI F table(const skcms_Curve *curve, F v)
Vec2Value v2
#define IG
#define UNREACHABLE()
Definition: assert.h:248
#define OUT_OF_MEMORY()
Definition: assert.h:250
#define DEBUG_ASSERT(cond)
Definition: assert.h:321
#define ASSERT_EQUAL(expected, actual)
Definition: assert.h:309
#define RELEASE_ASSERT(cond)
Definition: assert.h:327
#define ASSERT_NOTNULL(ptr)
Definition: assert.h:323
#define Z
GLenum type
#define CLASS_LIST_NO_OBJECT_NOR_STRING_NOR_ARRAY_NOR_MAP(V)
Definition: class_id.h:113
#define CLASS_LIST_MAPS(V)
Definition: class_id.h:116
#define CLASS_LIST_STRINGS(V)
Definition: class_id.h:132
#define CLASS_LIST_SETS(V)
Definition: class_id.h:120
#define CLASS_LIST_FIXED_LENGTH_ARRAYS(V)
Definition: class_id.h:124
#define CLASS_LIST_FFI_TYPE_MARKER(V)
Definition: class_id.h:165
#define CLASS_LIST_TYPED_DATA(V)
Definition: class_id.h:137
#define DART_CLASS_LIST_TYPED_DATA(V)
Definition: class_id.h:177
bool IsSubtypeOf(const AbstractType &other, Heap::Space space, FunctionTypeMapping *function_type_equivalence=nullptr) const
Definition: object.cc:21550
bool IsNonNullable() const
Definition: object.h:9071
void SetTypeTestingStub(const Code &stub) const
Definition: object.cc:21761
UntaggedAbstractType::TypeState type_state() const
Definition: object.h:9351
virtual AbstractTypePtr UpdateFunctionTypes(intptr_t num_parent_type_args_adjustment, intptr_t num_free_fun_type_params, Heap::Space space, FunctionTypeMapping *function_type_mapping) const
Definition: object.cc:21231
virtual classid_t type_class_id() const
Definition: object.cc:21033
virtual const char * NullabilitySuffix(NameVisibility name_visibility) const
Definition: object.cc:21303
bool IsDartFunctionType() const
Definition: object.cc:21451
bool IsTopTypeForSubtyping() const
Definition: object.cc:21396
StringPtr UserVisibleName() const
Definition: object.cc:21331
bool IsStringType() const
Definition: object.cc:21446
bool IsNullabilityEquivalent(Thread *thread, const AbstractType &other_type, TypeEquality kind) const
Definition: object.cc:21196
bool IsFinalized() const
Definition: object.h:9053
virtual AbstractTypePtr Canonicalize(Thread *thread) const
Definition: object.cc:21240
bool IsIntegerImplementationType() const
Definition: object.cc:21416
void set_flags(uint32_t value) const
Definition: object.cc:21168
virtual bool HasTypeClass() const
Definition: object.h:9083
virtual AbstractTypePtr SetInstantiatedNullability(const TypeParameter &type_param, Heap::Space space) const
Definition: object.cc:21081
void SetHash(intptr_t value) const
Definition: object.h:13386
virtual bool IsEquivalent(const Instance &other, TypeEquality kind, FunctionTypeMapping *function_type_equivalence=nullptr) const
Definition: object.cc:21184
virtual TypeArgumentsPtr arguments() const
Definition: object.cc:21051
bool IsDartRecordType() const
Definition: object.cc:21460
bool IsVoidType() const
Definition: object.h:9189
virtual AbstractTypePtr InstantiateFrom(const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments, intptr_t num_free_fun_type_params, Heap::Space space, FunctionTypeMapping *function_type_mapping=nullptr, intptr_t num_parent_type_args_adjustment=0) const
Definition: object.cc:21216
const char * ScrubbedNameCString() const
Definition: object.cc:21346
Nullability nullability() const
Definition: object.h:9060
virtual bool Equals(const Instance &other) const
Definition: object.h:9094
bool IsFfiPointerType() const
Definition: object.cc:21469
bool IsInt32x4Type() const
Definition: object.cc:21440
bool IsObjectType() const
Definition: object.h:9201
virtual void PrintName(NameVisibility visibility, BaseTextBuffer *printer) const
Definition: object.cc:21353
StringPtr Name() const
Definition: object.cc:21320
virtual AbstractTypePtr NormalizeFutureOrType(Heap::Space space) const
Definition: object.cc:21115
static void AddURI(URIs *uris, const String &name, const String &uri)
Definition: object.cc:21257
bool IsTopTypeForInstanceOf() const
Definition: object.cc:21379
static StringPtr PrintURIs(URIs *uris)
Definition: object.cc:21283
bool IsFloat64x2Type() const
Definition: object.cc:21434
bool IsFutureOrType() const
Definition: object.h:9267
virtual bool IsInstantiated(Genericity genericity=kAny, intptr_t num_free_fun_type_params=kAllFree) const
Definition: object.cc:21151
StringPtr ScrubbedName() const
Definition: object.cc:21342
static bool InstantiateAndTestSubtype(AbstractType *subtype, AbstractType *supertype, const TypeArguments &instantiator_type_args, const TypeArguments &function_type_args)
Definition: object.cc:4287
bool IsDartClosureType() const
Definition: object.cc:21456
virtual ClassPtr type_class() const
Definition: object.cc:21042
bool IsNullable() const
Definition: object.h:9066
bool IsTypeClassAllowedBySpawnUri() const
Definition: object.cc:21473
const char * NameCString() const
Definition: object.cc:21324
bool IsNullType() const
Definition: object.cc:21367
StringPtr ClassName() const
Definition: object.cc:21362
bool IsDynamicType() const
Definition: object.h:9186
void SetIsFinalized() const
Definition: object.cc:21161
bool IsSentinelType() const
Definition: object.cc:21375
bool IsDoubleType() const
Definition: object.cc:21423
const char * UserVisibleNameCString() const
Definition: object.cc:21335
bool IsStrictlyNonNullable() const
Definition: object.cc:21060
AbstractTypePtr UnwrapFutureOr() const
Definition: object.cc:21526
virtual uword ComputeHash() const
Definition: object.cc:21748
virtual void EnumerateURIs(URIs *uris) const
Definition: object.cc:21249
void set_nullability(Nullability value) const
Definition: object.cc:21178
bool IsFloat32x4Type() const
Definition: object.cc:21428
void InitializeTypeTestingStubNonAtomic(const Code &stub) const
Definition: object.cc:21787
void set_type_state(UntaggedAbstractType::TypeState value) const
Definition: object.cc:21172
bool IsIntType() const
Definition: object.cc:21411
bool IsNeverType() const
Definition: object.cc:21371
StringPtr message() const
Definition: object.h:8059
virtual const char * ToErrorCString() const
Definition: object.cc:19820
intptr_t PositionalCount() const
Definition: dart_entry.cc:371
intptr_t NamedCount() const
Definition: dart_entry.h:43
intptr_t Count() const
Definition: dart_entry.cc:363
void PrintTo(BaseTextBuffer *buffer, bool show_named_positions=false) const
Definition: dart_entry.cc:414
intptr_t SizeWithTypeArgs() const
Definition: dart_entry.h:41
static ArrayPtr NewBoxed(intptr_t type_args_len, intptr_t num_arguments, const Array &optional_arguments_names, Heap::Space space=Heap::kOld)
Definition: dart_entry.h:83
intptr_t CountWithTypeArgs() const
Definition: dart_entry.h:38
intptr_t FirstArgIndex() const
Definition: dart_entry.h:37
intptr_t Size() const
Definition: dart_entry.cc:367
intptr_t TypeArgsLen() const
Definition: dart_entry.cc:359
intptr_t PositionAt(intptr_t i) const
Definition: dart_entry.cc:383
StringPtr NameAt(intptr_t i) const
Definition: dart_entry.cc:375
std::tuple_element< kElement, TupleT >::type::ObjectPtrType Get() const
Definition: object.h:13475
intptr_t Length() const
Definition: object.h:13525
static intptr_t type_arguments_offset()
Definition: object.h:10928
static intptr_t InstanceSize()
Definition: object.h:10936
ArrayPtr Slice(intptr_t start, intptr_t count, bool with_type_argument) const
Definition: object.cc:24810
static bool Equals(ArrayPtr a, ArrayPtr b)
Definition: object.h:10854
virtual uint32_t CanonicalizeHash() const
Definition: object.cc:24740
ObjectPtr AtAcquire(intptr_t index) const
Definition: object.h:10891
static ArrayPtr New(intptr_t len, Heap::Space space=Heap::kNew)
Definition: object.h:10959
void Truncate(intptr_t new_length) const
Definition: object.cc:24894
static constexpr intptr_t kBytesPerElement
Definition: object.h:10923
virtual bool CanonicalizeEquals(const Instance &other) const
Definition: object.cc:24702
void MakeImmutable() const
Definition: object.cc:24837
bool IsImmutable() const
Definition: object.h:10900
virtual void SetTypeArguments(const TypeArguments &value) const
Definition: object.h:10908
static intptr_t LengthOf(const ArrayPtr array)
Definition: object.h:10830
ObjectPtr At(intptr_t index) const
Definition: object.h:10875
intptr_t Length() const
Definition: object.h:10829
static ArrayPtr NewUninitialized(intptr_t len, Heap::Space space=Heap::kNew)
Definition: object.h:10964
static ArrayPtr MakeFixedLength(const GrowableObjectArray &growable_array, bool unique=false)
Definition: object.cc:24935
static intptr_t data_offset()
Definition: object.h:10835
void SetAt(intptr_t index, const Object &value) const
Definition: object.h:10880
static intptr_t length_offset()
Definition: object.h:10834
static ArrayPtr Grow(const Array &source, intptr_t new_length, Heap::Space space=Heap::kNew)
Definition: object.cc:24853
virtual void CanonicalizeFieldsLocked(Thread *thread) const
Definition: object.cc:24971
void Add(const T &value)
const T & At(intptr_t index) const
void SetLength(intptr_t new_length)
void Sort(int compare(const T *, const T *))
intptr_t length() const
void SetAt(intptr_t index, const T &t)
const T & At(intptr_t index) const
void AddString(const char *s)
Definition: text_buffer.cc:263
intptr_t Printf(const char *format,...) PRINTF_ATTRIBUTE(2
Definition: text_buffer.cc:14
char * buffer() const
Definition: text_buffer.h:35
static constexpr int bitsize()
Definition: bitfield.h:162
static constexpr ClassIdTagType decode(uword value)
Definition: bitfield.h:171
static constexpr S update(T value, S original)
Definition: bitfield.h:188
static constexpr intptr_t encode(bool value)
Definition: bitfield.h:165
static constexpr bool is_valid(T value)
Definition: bitfield.h:146
void Add(intptr_t i)
Definition: bit_vector.h:63
bool Contains(intptr_t i) const
Definition: bit_vector.h:91
static intptr_t InstanceSize()
Definition: object.h:10793
static const Bool & Get(bool value)
Definition: object.h:10801
static const Bool & True()
Definition: object.h:10797
static ErrorPtr DoBootstrapping(const uint8_t *kernel_buffer, intptr_t kernel_buffer_size)
Definition: bootstrap.cc:219
static constexpr bool ContainsCompressedPointers()
Definition: object.h:11859
virtual void ReportSwitchingCode(const Code &code)
Definition: object.cc:4375
virtual void UpdateArrayTo(const WeakArray &value)
Definition: object.cc:4362
virtual void ReportDeoptimization(const Code &code)
Definition: object.cc:4367
CHACodeArray(const Class &cls)
Definition: object.cc:4358
static void FlushICache(uword start, uword size)
StringPtr target_name() const
Definition: object.h:2372
ArrayPtr arguments_descriptor() const
Definition: object.h:2373
static CapabilityPtr New(uint64_t id, Heap::Space space=Heap::kNew)
Definition: object.cc:25773
static int32_t ToLower(int32_t code_point)
Definition: unicode.h:179
static int32_t ToUpper(int32_t code_point)
Definition: unicode.h:174
static AbstractTypePtr FinalizeType(const AbstractType &type, FinalizationKind finalization=kCanonicalize)
static void VerifyBootstrapClasses()
static ErrorPtr AllocateFinalizeClass(const Class &cls)
static ErrorPtr LoadClassMembers(const Class &cls)
static bool IsMatch(const FunctionName &name, const Object &obj)
Definition: object.cc:3254
static bool ReportStats()
Definition: object.cc:3246
static uword Hash(const FunctionName &name)
Definition: object.cc:3260
static bool IsMatch(const Object &a, const Object &b)
Definition: object.cc:3249
static uword Hash(const Object &key)
Definition: object.cc:3257
static const char * Name()
Definition: object.cc:3245
void Register(const Class &cls)
Definition: class_table.cc:65
void CopySizesFromClassObjects()
Definition: class_table.cc:146
ClassPtr At(intptr_t cid) const
Definition: class_table.h:362
bool ShouldTraceAllocationFor(intptr_t cid)
Definition: class_table.h:399
UnboxedFieldBitmap GetUnboxedFieldsMapAt(intptr_t cid) const
Definition: class_table.h:388
static bool IsTopLevelCid(intptr_t cid)
Definition: class_table.h:496
void AddFields(const GrowableArray< const Field * > &fields) const
Definition: object.cc:5013
TypeArgumentsPtr DefaultTypeArguments(Zone *zone) const
Definition: object.cc:3658
void AddFunction(const Function &function) const
Definition: object.cc:3296
void set_is_implemented_unsafe() const
Definition: object.cc:5618
void set_num_type_arguments(intptr_t value) const
Definition: object.cc:3109
void set_is_transformed_mixin_application() const
Definition: object.cc:5665
void set_is_implemented() const
Definition: object.cc:5613
FieldPtr LookupInstanceField(const String &name) const
Definition: object.cc:6344
void set_has_pragma(bool value) const
Definition: object.cc:3129
const char * NameCString(NameVisibility name_visibility) const
Definition: object.cc:3006
void set_is_finalized_unsafe() const
Definition: object.cc:5720
intptr_t FindImplicitClosureFunctionIndex(const Function &needle) const
Definition: object.cc:3364
intptr_t NumTypeParameters() const
Definition: object.h:1344
const char * ScrubbedNameCString() const
Definition: object.cc:2985
FunctionPtr LookupFunctionAllowPrivate(const String &name) const
Definition: object.cc:6167
ObjectPtr InvokeSetter(const String &selector, const Instance &argument, bool respect_reflectable=true, bool check_is_entrypoint=false) const
Definition: object.cc:4577
CodePtr allocation_stub() const
Definition: object.h:1800
void set_is_deeply_immutable(bool value) const
Definition: object.cc:3144
FunctionPtr LookupDynamicFunctionAllowPrivate(const String &name) const
Definition: object.cc:6133
void set_instance_size(intptr_t host_value_in_bytes, intptr_t target_value_in_bytes) const
Definition: object.h:1167
LibraryPtr library() const
Definition: object.h:1333
void set_is_isolate_unsendable_due_to_pragma(bool value) const
Definition: object.cc:3138
void set_super_type(const Type &value) const
Definition: object.cc:3684
bool TraceAllocation(IsolateGroup *isolate_group) const
Definition: object.cc:4434
FunctionPtr InvocationDispatcherFunctionFromIndex(intptr_t idx) const
Definition: object.cc:3419
void set_is_allocate_finalized() const
Definition: object.cc:5725
FunctionPtr LookupConstructorAllowPrivate(const String &name) const
Definition: object.cc:6153
static ClassPtr NewTypedDataViewClass(intptr_t class_id, IsolateGroup *isolate_group)
Definition: object.cc:5278
void set_is_enum_class() const
Definition: object.cc:5655
void set_is_synthesized_class() const
Definition: object.cc:5646
ObjectPtr Invoke(const String &selector, const Array &arguments, const Array &argument_names, bool respect_reflectable=true, bool check_is_entrypoint=false) const
Definition: object.cc:4684
FunctionPtr LookupGetterFunction(const String &name) const
Definition: object.cc:6308
void Finalize() const
Definition: object.cc:4307
bool IsRecordClass() const
Definition: object.h:1583
bool IsInFullSnapshot() const
Definition: object.cc:3030
ObjectPtr InvokeGetter(const String &selector, bool throw_nsm_if_absent, bool respect_reflectable=true, bool check_is_entrypoint=false) const
Definition: object.cc:4517
FieldPtr LookupInstanceFieldAllowPrivate(const String &name) const
Definition: object.cc:6434
void set_is_allocated_unsafe(bool value) const
Definition: object.cc:5705
void AddDirectImplementor(const Class &subclass, bool is_mixin) const
Definition: object.cc:5746
TypePtr GetInstantiationOf(Zone *zone, const Class &cls) const
Definition: object.cc:12728
bool is_declaration_loaded() const
Definition: object.h:1703
intptr_t target_type_arguments_field_offset() const
Definition: object.h:1384
uint32_t Hash() const
Definition: object.cc:5597
void SetFields(const Array &value) const
Definition: object.cc:4984
void set_is_prefinalized() const
Definition: object.cc:5732
bool is_const() const
Definition: object.h:1745
void set_is_interface_class() const
Definition: object.cc:5685
bool IsDartFunctionClass() const
Definition: object.cc:5903
StringPtr ScrubbedName() const
Definition: object.cc:2981
static intptr_t InstanceSize()
Definition: object.h:1685
GrowableObjectArrayPtr direct_subclasses() const
Definition: object.h:1537
TypeArgumentsPtr GetDeclarationInstanceTypeArguments() const
Definition: object.cc:3476
TypePtr super_type() const
Definition: object.h:1431
FunctionPtr GetInvocationDispatcher(const String &target_name, const Array &args_desc, UntaggedFunction::Kind kind, bool create_if_absent) const
Definition: object.cc:3847
intptr_t host_next_field_offset() const
Definition: object.h:1190
FunctionPtr LookupStaticFunction(const String &name) const
Definition: object.cc:6137
intptr_t id() const
Definition: object.h:1233
static intptr_t UnboxedFieldSizeInBytesByCid(intptr_t cid)
Definition: object.cc:3702
void set_is_declaration_loaded() const
Definition: object.cc:5627
static ClassPtr NewExternalTypedDataClass(intptr_t class_id, IsolateGroup *isolate)
Definition: object.cc:5318
void set_num_type_arguments_unsafe(intptr_t value) const
Definition: object.cc:3125
intptr_t target_instance_size() const
Definition: object.h:1147
void set_is_fields_marked_nullable() const
Definition: object.cc:5695
intptr_t NumTypeArguments() const
Definition: object.cc:3640
void set_is_abstract() const
Definition: object.cc:5622
void set_is_mixin_class() const
Definition: object.cc:5675
FunctionPtr LookupConstructor(const String &name) const
Definition: object.cc:6147
void set_type_arguments_field_offset_in_words(intptr_t host_value, intptr_t target_value) const
Definition: object.h:1414
void set_num_native_fields(uint16_t value) const
Definition: object.h:1789
static ClassPtr NewTypedDataClass(intptr_t class_id, IsolateGroup *isolate_group)
Definition: object.cc:5259
void set_type_arguments_field_offset(intptr_t host_value_in_bytes, intptr_t target_value_in_bytes) const
Definition: object.h:1397
WeakArrayPtr dependent_code() const
Definition: object.cc:4420
void set_dependent_code(const WeakArray &array) const
Definition: object.cc:4426
intptr_t target_next_field_offset() const
Definition: object.h:1193
ObjectPtr EvaluateCompiledExpression(const ExternalTypedData &kernel_buffer, const Array &type_definitions, const Array &param_values, const TypeArguments &type_param_values) const
Definition: object.cc:4821
FunctionPtr LookupSetterFunction(const String &name) const
Definition: object.cc:6312
bool IsPrivate() const
Definition: object.cc:6125
FunctionPtr GetRecordFieldGetter(const String &getter_name) const
Definition: object.cc:4078
void set_is_sealed() const
Definition: object.cc:5670
TypePtr RareType() const
Definition: object.cc:3036
void set_direct_subclasses(const GrowableObjectArray &subclasses) const
Definition: object.cc:5799
intptr_t host_type_arguments_field_offset() const
Definition: object.h:1375
ArrayPtr interfaces() const
Definition: object.h:1447
bool IsObjectClass() const
Definition: object.h:1565
bool InjectCIDFields() const
Definition: object.cc:5059
void set_interfaces(const Array &value) const
Definition: object.cc:5739
bool is_type_finalized() const
Definition: object.h:1709
void SetUserVisibleNameInClassTable()
Definition: object.cc:5383
InstancePtr InsertCanonicalConstant(Zone *zone, const Instance &constant) const
Definition: object.cc:6485
void set_script(const Script &value) const
Definition: object.cc:5555
ArrayPtr fields() const
Definition: object.h:1615
static bool IsSubtypeOf(const Class &cls, const TypeArguments &type_arguments, Nullability nullability, const AbstractType &other, Heap::Space space, FunctionTypeMapping *function_type_equivalence=nullptr)
Definition: object.cc:5920
void DisableAllocationStub() const
Definition: object.cc:5879
void SetTraceAllocation(bool trace_allocation) const
Definition: object.cc:4443
ArrayPtr constants() const
Definition: object.cc:5806
bool HasInstanceFields() const
Definition: object.cc:3211
void set_is_future_subtype(bool value) const
Definition: object.cc:3149
void set_is_declaration_loaded_unsafe() const
Definition: object.cc:5632
InstancePtr LookupCanonicalInstance(Zone *zone, const Instance &value) const
Definition: object.cc:6472
uint16_t num_native_fields() const
Definition: object.h:1788
intptr_t implementor_cid() const
Definition: object.h:1245
void set_is_finalized() const
Definition: object.cc:5714
ArrayPtr OffsetToFieldMap(ClassTable *class_table=nullptr) const
Definition: object.cc:3183
intptr_t host_instance_size() const
Definition: object.h:1143
static bool IsDeeplyImmutable(ClassPtr clazz)
Definition: object.h:2177
void DisableAllCHAOptimizedCode()
Definition: object.cc:4416
FunctionPtr LookupFunctionReadLocked(const String &name) const
Definition: object.cc:6171
void AddField(const Field &field) const
Definition: object.cc:5002
void DisableCHAOptimizedCode(const Class &subclass)
Definition: object.cc:4402
bool HasCompressedPointers() const
Definition: object.cc:2946
void set_is_allocated(bool value) const
Definition: object.cc:5700
void set_allocation_stub(const Code &value) const
Definition: object.cc:5870
int32_t SourceFingerprint() const
Definition: object.cc:5604
FunctionPtr LookupDynamicFunctionUnsafe(const String &name) const
Definition: object.cc:6129
bool is_abstract() const
Definition: object.h:1696
bool IsDynamicClass() const
Definition: object.h:1556
bool IsGeneric() const
Definition: object.h:1358
bool IsClosureClass() const
Definition: object.h:1577
StringPtr Name() const
Definition: object.cc:2977
static constexpr intptr_t kNoTypeArguments
Definition: object.h:1374
FunctionPtr LookupStaticFunctionAllowPrivate(const String &name) const
Definition: object.cc:6143
void set_token_pos(TokenPosition value) const
Definition: object.cc:5565
TypePtr DeclarationType() const
Definition: object.cc:5827
FieldPtr LookupStaticFieldAllowPrivate(const String &name) const
Definition: object.cc:6442
FunctionPtr LookupFactory(const String &name) const
Definition: object.cc:6157
TokenPosition token_pos() const
Definition: object.h:1279
void set_instance_size_in_words(intptr_t host_value, intptr_t target_value) const
Definition: object.h:1174
ErrorPtr EnsureIsFinalized(Thread *thread) const
Definition: object.cc:4924
bool IsVoidClass() const
Definition: object.h:1559
void set_is_synthesized_class_unsafe() const
Definition: object.cc:5651
bool FindInstantiationOf(Zone *zone, const Class &cls, GrowableArray< const Type * > *path, bool consider_only_super_classes=false) const
Definition: object.cc:12675
void set_is_base_class() const
Definition: object.cc:5680
static ClassPtr New(IsolateGroup *isolate_group, bool register_class=true)
Definition: object.cc:3053
bool is_prefinalized() const
Definition: object.h:1738
void RegisterCHACode(const Code &code)
Definition: object.cc:4390
bool IsFutureClass() const
Definition: object.cc:5907
KernelProgramInfoPtr KernelProgramInfo() const
Definition: object.cc:5560
FieldPtr LookupField(const String &name) const
Definition: object.cc:6352
void set_library(const Library &value) const
Definition: object.cc:3438
void set_end_token_pos(TokenPosition value) const
Definition: object.cc:5570
ErrorPtr EnsureIsAllocateFinalized(Thread *thread) const
Definition: object.cc:4954
FunctionPtr LookupFactoryAllowPrivate(const String &name) const
Definition: object.cc:6163
ClassPtr SuperClass(ClassTable *class_table=nullptr) const
Definition: object.cc:3665
void set_is_loaded(bool value) const
Definition: object.cc:5709
static ClassPtr NewStringClass(intptr_t class_id, IsolateGroup *isolate_group)
Definition: object.cc:5232
void set_constants(const Array &value) const
Definition: object.cc:5810
intptr_t FindFieldIndex(const Field &needle) const
Definition: object.cc:5030
StringPtr UserVisibleName() const
Definition: object.cc:2989
void AddInvocationDispatcher(const String &target_name, const Array &args_desc, const Function &dispatcher) const
Definition: object.cc:3831
void AddDirectSubclass(const Class &subclass) const
Definition: object.cc:5778
void set_id(intptr_t value) const
Definition: object.h:1234
bool IsTopLevel() const
Definition: object.cc:6121
FieldPtr FieldFromIndex(intptr_t idx) const
Definition: object.cc:5051
void set_has_dynamically_extendable_subtypes(bool value) const
Definition: object.cc:3164
bool NoteImplementor(const Class &implementor) const
Definition: object.cc:5580
TypeParametersPtr type_parameters() const
Definition: object.h:1338
bool IsNullClass() const
Definition: object.h:1553
void set_is_type_finalized() const
Definition: object.cc:5638
const char * UserVisibleNameCString() const
Definition: object.cc:2998
void set_is_final() const
Definition: object.cc:5690
GrowableObjectArrayPtr direct_implementors() const
Definition: object.h:1520
intptr_t FindFunctionIndex(const Function &needle) const
Definition: object.cc:3321
bool is_implemented() const
Definition: object.h:1692
TypeParameterPtr TypeParameterAt(intptr_t index, Nullability nullability=Nullability::kNonNullable) const
Definition: object.cc:3689
static ClassPtr NewPointerClass(intptr_t class_id, IsolateGroup *isolate_group)
Definition: object.cc:5338
void set_next_field_offset(intptr_t host_value_in_bytes, intptr_t target_value_in_bytes) const
Definition: object.h:1201
void set_is_const() const
Definition: object.cc:5660
bool is_allocate_finalized() const
Definition: object.h:1732
bool is_transformed_mixin_application() const
Definition: object.h:1754
FunctionPtr ImplicitClosureFunctionFromIndex(intptr_t idx) const
Definition: object.cc:3354
FunctionPtr FunctionFromIndex(intptr_t idx) const
Definition: object.cc:3343
static ClassPtr NewNativeWrapper(const Library &library, const String &name, int num_fields)
Definition: object.cc:5189
void SetFunctions(const Array &value) const
Definition: object.cc:3264
void set_type_parameters(const TypeParameters &value) const
Definition: object.cc:3442
static ClassPtr NewUnmodifiableTypedDataViewClass(intptr_t class_id, IsolateGroup *isolate_group)
Definition: object.cc:5298
DART_WARN_UNUSED_RESULT ErrorPtr VerifyEntryPoint() const
Definition: object.cc:27370
FieldPtr LookupFieldAllowPrivate(const String &name, bool instance_only=false) const
Definition: object.cc:6403
intptr_t NumTypeParameters(Thread *thread) const
Definition: object.cc:3555
ClassPtr Mixin() const
Definition: object.cc:3020
bool is_isolate_unsendable_due_to_pragma() const
Definition: object.h:2169
void set_direct_implementors(const GrowableObjectArray &implementors) const
Definition: object.cc:5772
intptr_t FindInvocationDispatcherFunctionIndex(const Function &needle) const
Definition: object.cc:3392
bool is_finalized() const
Definition: object.h:1723
ArrayPtr current_functions() const
Definition: object.h:1641
ArrayPtr functions() const
Definition: object.h:1646
FieldPtr LookupStaticField(const String &name) const
Definition: object.cc:6348
ScriptPtr script() const
Definition: object.h:1272
void EnsureDeclarationLoaded() const
Definition: object.cc:4913
TypeArgumentsPtr GetInstanceTypeArguments(Thread *thread, const TypeArguments &type_arguments, bool canonicalize=true) const
Definition: object.cc:3524
void set_is_isolate_unsendable(bool value) const
Definition: object.cc:3133
void set_is_dynamically_extendable(bool value) const
Definition: object.cc:3159
void set_can_be_future(bool value) const
Definition: object.cc:3154
static void ForAllClosureFunctions(std::function< bool(const Function &)> callback)
ObjectPtr RawContext() const
Definition: object.h:12358
virtual bool CanonicalizeEquals(const Instance &other) const
Definition: object.cc:25861
static ClosurePtr New(const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments, const Function &function, const Object &context, Heap::Space space=Heap::kNew)
Definition: object.cc:25942
FunctionTypePtr GetInstantiatedSignature(Zone *zone) const
Definition: object.cc:25984
TypeArgumentsPtr instantiator_type_arguments() const
Definition: object.h:12320
TypeArgumentsPtr delayed_type_arguments() const
Definition: object.h:12340
TypeArgumentsPtr function_type_arguments() const
Definition: object.h:12330
FunctionPtr function() const
Definition: object.h:12350
virtual void CanonicalizeFieldsLocked(Thread *thread) const
Definition: object.cc:25874
uword ComputeHash() const
Definition: object.cc:25909
static void NotifyAll(const char *name, uword base, uword prologue_offset, uword size, bool optimized, const CodeComments *comments)
static bool AreActive()
void DumpInlineIntervals(uword start)
void GetInlinedFunctionsAt(int32_t pc_offset, GrowableArray< const Function * > *function_stack, GrowableArray< TokenPosition > *token_positions)
void DumpSourcePositions(uword start)
static intptr_t UnroundedSize(CodeSourceMapPtr map)
Definition: object.h:6225
void set_is_force_optimized(bool value) const
Definition: object.cc:17654
void DisableDartCode() const
Definition: object.cc:18383
FunctionPtr function() const
Definition: object.h:7130
uword EntryPoint() const
Definition: object.h:6864
static CodePtr FindCode(uword pc, int64_t timestamp)
Definition: object.cc:18188
static bool ParseEntryKind(const char *str, EntryKind *out)
Definition: object.cc:17608
LocalVarDescriptorsPtr var_descriptors() const
Definition: object.h:7096
void Enable() const
Definition: object.h:7252
ArrayPtr deopt_info_array() const
Definition: object.h:6943
bool HasBreakpoint() const
Definition: object.cc:17732
uword Size() const
Definition: object.h:6903
void set_compressed_stackmaps(const CompressedStackMaps &maps) const
Definition: object.cc:17666
void SetStaticCallTargetCodeAt(uword pc, const Code &code) const
Definition: object.cc:17815
void SetPrologueOffset(intptr_t offset) const
Definition: object.cc:17925
PcDescriptorsPtr pc_descriptors() const
Definition: object.h:6927
void set_inlined_id_to_function(const Array &value) const
Definition: object.cc:17953
uword GetPcForDeoptId(intptr_t deopt_id, UntaggedPcDescriptors::Kind kind) const
Definition: object.cc:18262
bool is_optimized() const
Definition: object.h:6817
classid_t OwnerClassId() const
Definition: object.h:7140
CodeSourceMapPtr code_source_map() const
Definition: object.h:6933
intptr_t GetDeoptIdForOsr(uword pc) const
Definition: object.cc:18277
intptr_t GetPrologueOffset() const
Definition: object.cc:17934
void SetStubCallTargetCodeAt(uword pc, const Code &code) const
Definition: object.cc:17829
ObjectPoolPtr object_pool() const
Definition: object.h:6808
static CodePtr FindCodeUnsafe(uword pc)
Definition: object.cc:18220
ArrayPtr inlined_id_to_function() const
Definition: object.cc:17949
void set_num_variables(intptr_t num_variables) const
Definition: object.cc:17676
bool ContainsInstructionAt(uword addr) const
Definition: object.h:6915
const char * QualifiedName(const NameFormattingParams &params) const
Definition: object.cc:18344
bool IsDisabled() const
Definition: object.h:7257
static CodePtr FinalizeCodeAndNotify(const Function &function, FlowGraphCompiler *compiler, compiler::Assembler *assembler, PoolAttachment pool_attachment, bool optimized=false, CodeStatistics *stats=nullptr)
Definition: object.cc:17983
static uword PayloadStartOf(const CodePtr code)
Definition: object.h:6851
uint32_t Hash() const
Definition: object.cc:18295
PoolAttachment
Definition: object.h:6987
void DumpInlineIntervals() const
Definition: object.cc:18477
bool IsStubCode() const
Definition: object.cc:18357
LocalVarDescriptorsPtr GetLocalVarDescriptors() const
Definition: object.cc:17625
ObjectPtr owner() const
Definition: object.h:7135
void DumpSourcePositions(bool relative_addresses=false) const
Definition: object.cc:18489
ArrayPtr static_calls_target_table() const
Definition: object.h:6999
void Disassemble(DisassemblyFormatter *formatter=nullptr) const
Definition: object.cc:17849
const char * Name() const
Definition: object.cc:18313
void set_is_alive(bool value) const
Definition: object.cc:17658
static void NotifyCodeObservers(const Code &code, bool optimized)
Definition: object.cc:18141
bool IsTypeTestStubCode() const
Definition: object.cc:18367
bool IsAllocationStubCode() const
Definition: object.cc:18363
@ kSCallTableEntryLength
Definition: object.h:6984
@ kSCallTableFunctionTarget
Definition: object.h:6983
@ kSCallTableCodeOrTypeTarget
Definition: object.h:6982
ObjectPoolPtr GetObjectPool() const
Definition: object.cc:17723
static constexpr intptr_t kMaxElements
Definition: object.h:7156
void DisableStubCode(bool is_cls_parameterized) const
Definition: object.cc:18392
void set_static_calls_target_table(const Array &value) const
Definition: object.cc:17702
bool IsFunctionCode() const
Definition: object.cc:18374
static CodePtr FinalizeCode(FlowGraphCompiler *compiler, compiler::Assembler *assembler, PoolAttachment pool_attachment, bool optimized, CodeStatistics *stats)
Definition: object.cc:18018
TokenPosition GetTokenIndexOfPC(uword pc) const
Definition: object.cc:18250
intptr_t pointer_offsets_length() const
Definition: object.h:6813
InstructionsPtr active_instructions() const
Definition: object.h:6764
intptr_t num_variables() const
Definition: object.cc:17672
ObjectPtr return_address_metadata() const
Definition: object.h:7055
void set_is_discarded(bool value) const
Definition: object.cc:17662
FunctionPtr GetStaticCallTargetFunctionAt(uword pc) const
Definition: object.cc:17800
bool IsUnknownDartCode() const
Definition: object.h:7245
InstructionsPtr instructions() const
Definition: object.h:6774
void set_is_optimized(bool value) const
Definition: object.cc:17650
static const char * EntryKindToCString(EntryKind kind)
Definition: object.cc:17592
uword PayloadStart() const
Definition: object.h:6850
TypedDataPtr GetDeoptInfoAtPc(uword pc, ICData::DeoptReasonId *deopt_reason, uint32_t *deopt_flags) const
Definition: object.cc:17740
void set_deopt_info_array(const Array &array) const
Definition: object.cc:17693
void set_owner(const Object &owner) const
Definition: object.cc:17636
void GetInlinedFunctionsAtInstruction(intptr_t pc_offset, GrowableArray< const Function * > *functions, GrowableArray< TokenPosition > *token_positions) const
Definition: object.cc:18449
static CompilerState & Current()
static bool IsBackgroundCompilation()
Definition: compiler.cc:298
static ErrorPtr EnsureUnoptimizedCode(Thread *thread, const Function &function)
Definition: compiler.cc:854
static ObjectPtr CompileFunction(Thread *thread, const Function &function)
Definition: compiler.cc:824
static ErrorPtr CompileAllFunctions(const Class &cls)
Definition: compiler.cc:948
static void ComputeLocalVarDescriptors(const Code &code)
Definition: compiler.cc:909
static intptr_t InstanceSize()
Definition: object.h:6299
static intptr_t UnroundedSize(CompressedStackMapsPtr maps)
Definition: object.h:6293
static ConstMapPtr NewUninitialized(Heap::Space space=Heap::kNew)
Definition: object.cc:25241
static ConstMapPtr NewDefault(Heap::Space space=Heap::kNew)
Definition: object.cc:25235
static ConstSetPtr NewUninitialized(Heap::Space space=Heap::kNew)
Definition: object.cc:25289
static ConstSetPtr NewDefault(Heap::Space space=Heap::kNew)
Definition: object.cc:25283
intptr_t KernelOffsetAt(intptr_t scope_index) const
Definition: object.cc:18702
StringPtr NameAt(intptr_t scope_index) const
Definition: object.cc:18618
AbstractTypePtr TypeAt(intptr_t scope_index) const
Definition: object.cc:18667
intptr_t LateInitOffsetAt(intptr_t scope_index) const
Definition: object.cc:18658
static intptr_t InstanceSize()
Definition: object.h:7535
intptr_t ContextLevelAt(intptr_t scope_index) const
Definition: object.cc:18693
void SetCidAt(intptr_t scope_index, intptr_t cid) const
Definition: object.cc:18680
TokenPosition DeclarationTokenIndexAt(intptr_t scope_index) const
Definition: object.cc:18605
intptr_t CidAt(intptr_t scope_index) const
Definition: object.cc:18676
void ClearFlagsAt(intptr_t scope_index) const
Definition: object.cc:18626
intptr_t ContextIndexAt(intptr_t scope_index) const
Definition: object.cc:18684
TokenPosition TokenIndexAt(intptr_t scope_index) const
Definition: object.cc:18595
void SetContextLevelAt(intptr_t scope_index, intptr_t context_level) const
Definition: object.cc:18697
void SetTypeAt(intptr_t scope_index, const AbstractType &type) const
Definition: object.cc:18671
void SetContextIndexAt(intptr_t scope_index, intptr_t context_index) const
Definition: object.cc:18688
void SetTokenIndexAt(intptr_t scope_index, TokenPosition token_pos) const
Definition: object.cc:18600
static constexpr intptr_t kMaxElements
Definition: object.h:7526
void SetLateInitOffsetAt(intptr_t scope_index, intptr_t late_init_offset) const
Definition: object.cc:18662
void SetNameAt(intptr_t scope_index, const String &name) const
Definition: object.cc:18622
intptr_t num_variables() const
Definition: object.h:7484
void SetKernelOffsetAt(intptr_t scope_index, intptr_t kernel_offset) const
Definition: object.cc:18706
void SetDeclarationTokenIndexAt(intptr_t scope_index, TokenPosition declaration_token_pos) const
Definition: object.cc:18611
static ContextScopePtr New(intptr_t num_variables, bool is_implicit)
Definition: object.cc:18577
void Dump(int indent=0) const
Definition: object.cc:18548
intptr_t GetLevel() const
Definition: object.cc:18501
static ContextPtr New(intptr_t num_variables, Heap::Space space=Heap::kNew)
Definition: object.cc:18511
static bool IsValidLength(intptr_t len)
Definition: object.h:7444
ObjectPtr At(intptr_t context_index) const
Definition: object.h:7422
intptr_t num_variables() const
Definition: object.h:7414
ContextPtr parent() const
Definition: object.h:7406
static ObjectPtr InvokeNoSuchMethod(Thread *thread, const Instance &receiver, const String &target_name, const Array &arguments, const Array &arguments_descriptor)
Definition: dart_entry.cc:307
static ObjectPtr InvokeClosure(Thread *thread, const Array &arguments)
Definition: dart_entry.cc:282
static ObjectPtr InvokeFunction(const Function &function, const Array &arguments)
Definition: dart_entry.cc:31
static ObjectPtr HashCode(const Instance &receiver)
Definition: dart_entry.cc:631
static ObjectPtr Equals(const Instance &left, const Instance &right)
Definition: dart_entry.cc:647
static ObjectPtr ToString(const Instance &receiver)
Definition: dart_entry.cc:615
static bool IsReadOnlyHandle(uword address)
Definition: dart.cc:1147
static IsolateGroup * vm_isolate_group()
Definition: dart.h:69
static Dart_DwarfStackTraceFootnoteCallback dwarf_stacktrace_footnote_callback()
Definition: dart.h:145
static Isolate * vm_isolate()
Definition: dart.h:68
static Snapshot::Kind vm_snapshot_kind()
Definition: dart.h:95
static const char * Name()
Definition: object.cc:25058
static uword Hash(const Object &obj)
Definition: object.cc:25068
static bool IsMatch(const Object &a, const Object &b)
Definition: object.cc:25061
static bool ReportStats()
Definition: object.cc:25059
static constexpr intptr_t kNone
Definition: deopt_id.h:27
static void GetEntry(const Array &table, intptr_t index, Smi *offset, TypedData *info, Smi *reason_and_flags)
static intptr_t GetLength(const Array &table)
friend class ClassDictionaryIterator
Definition: object.h:5061
DictionaryIterator(const Library &library)
Definition: object.cc:13502
bool HasNext() const
Definition: object.h:5049
static void Disassemble(uword start, uword end, DisassemblyFormatter *formatter, const Code &code, const CodeComments *comments=nullptr)
static intptr_t value_offset()
Definition: object.h:10139
virtual bool OperatorEquals(const Instance &other) const
Definition: object.cc:23378
virtual bool CanonicalizeEquals(const Instance &other) const
Definition: object.cc:23388
bool BitwiseEqualsToDouble(double value) const
Definition: object.cc:23370
static DoublePtr New(double d, Heap::Space space=Heap::kNew)
Definition: object.cc:23402
virtual uint32_t CanonicalizeHash() const
Definition: object.cc:23398
double value() const
Definition: object.h:10115
static DoublePtr NewCanonical(double d)
Definition: object.cc:23418
static DynamicLibraryPtr New(void *handle, bool canBeClosed, Heap::Space space=Heap::kNew)
Definition: object.cc:25749
static intptr_t InstanceSize()
Definition: object.h:11926
virtual const char * ToErrorCString() const
Definition: object.cc:19780
static intptr_t InstanceSize()
Definition: object.h:6606
static DART_NORETURN void ThrowByType(ExceptionType type, const Array &arguments)
Definition: exceptions.cc:1052
static DART_NORETURN void ThrowOOM()
Definition: exceptions.cc:1066
static DART_NORETURN void ThrowLateFieldAssignedDuringInitialization(const String &name)
Definition: exceptions.cc:1124
@ kCyclicInitializationError
Definition: exceptions.h:70
static DART_NORETURN void ThrowLateFieldNotInitialized(const String &name)
Definition: exceptions.cc:1118
static DART_NORETURN void ThrowCompileTimeError(const LanguageError &error)
Definition: exceptions.cc:1112
static DART_NORETURN void PropagateError(const Error &error)
Definition: exceptions.cc:1003
static intptr_t InstanceSize()
Definition: object.h:11740
static ExternalTypedDataPtr New(intptr_t class_id, uint8_t *data, intptr_t len, Heap::Space space=Heap::kNew, bool perform_eager_msan_initialization_check=true)
Definition: object.cc:25626
static intptr_t MaxElements(intptr_t class_id)
Definition: object.h:11744
FinalizablePersistentHandle * AddFinalizer(void *peer, Dart_HandleFinalizer callback, intptr_t external_size) const
Definition: object.cc:25619
static ExternalTypedDataPtr NewFinalizeWithFree(uint8_t *data, intptr_t len)
Definition: object.cc:25649
FieldDependentArray(const Field &field)
Definition: object.cc:12229
virtual void UpdateArrayTo(const WeakArray &value)
Definition: object.cc:12233
virtual void ReportSwitchingCode(const Code &code)
Definition: object.cc:12245
virtual void ReportDeoptimization(const Code &code)
Definition: object.cc:12237
FieldGuardUpdater(const Field *field, const Object &value)
Definition: object.cc:12997
void SetAt(intptr_t index, ObjectPtr raw_instance, bool concurrent_use=false)
Definition: field_table.h:76
ObjectPtr At(intptr_t index, bool concurrent_use=false) const
Definition: field_table.h:62
void SetFieldType(const AbstractType &value) const
Definition: object.cc:11943
static StringPtr GetterSymbol(const String &field_name)
Definition: object.cc:11796
DART_WARN_UNUSED_RESULT ErrorPtr InitializeInstance(const Instance &instance) const
Definition: object.cc:12339
bool is_final() const
Definition: object.h:4442
InstancePtr SetterClosure() const
Definition: object.cc:12210
static StringPtr NameFromSetter(const String &setter_name)
Definition: object.cc:11821
const char * UserVisibleNameCString() const
Definition: object.cc:12082
ClassPtr Owner() const
Definition: object.cc:11860
ObjectPtr StaticConstFieldValue() const
Definition: object.cc:12419
bool is_unboxed() const
Definition: object.h:4712
void InheritKernelOffsetFrom(const Field &src) const
Definition: object.cc:11898
StringPtr UserVisibleName() const
Definition: object.cc:12091
static bool IsInitName(const String &function_name)
Definition: object.cc:11839
bool IsOriginal() const
Definition: object.h:4418
bool is_nullable() const
Definition: object.cc:11770
FieldPtr CloneFromOriginal() const
Definition: object.cc:11735
intptr_t KernelLibraryOffset() const
Definition: object.cc:11912
uint32_t Hash() const
Definition: object.cc:11894
void SetOriginal(const Field &value) const
Definition: object.cc:11786
void RegisterDependentCode(const Code &code) const
Definition: object.cc:12260
FieldPtr Original() const
Definition: object.cc:11739
ScriptPtr Script() const
Definition: object.cc:11871
bool HasInitializerFunction() const
Definition: object.cc:12335
bool IsUninitialized() const
Definition: object.cc:12289
ObjectPtr RawOwner() const
Definition: object.cc:11849
static bool IsGetterName(const String &function_name)
Definition: object.cc:11831
void SetInitializerFunction(const Function &initializer) const
Definition: object.cc:12319
bool is_reflectable() const
Definition: object.h:4454
InstancePtr GetterClosure() const
Definition: object.cc:12206
@ kUnknownFixedLength
Definition: object.h:4728
@ kUnknownLengthOffset
Definition: object.h:4727
@ kNoFixedLength
Definition: object.h:4729
DART_WARN_UNUSED_RESULT ErrorPtr InitializeStatic() const
Definition: object.cc:12377
void set_static_type_exactness_state(StaticTypeExactnessState state) const
Definition: object.h:4639
bool is_static() const
Definition: object.h:4440
static bool IsSetterName(const String &function_name)
Definition: object.cc:11835
void InitializeGuardedListLengthInObjectOffset(bool unsafe=false) const
Definition: object.cc:12540
static StringPtr NameFromInit(const String &init_name)
Definition: object.cc:11826
StaticTypeExactnessState static_type_exactness_state() const
Definition: object.h:4633
FieldPtr Clone(const Field &original) const
Definition: object.cc:12054
bool NeedsSetter() const
Definition: object.cc:12123
intptr_t guarded_list_length_in_object_offset() const
Definition: object.cc:12110
StringPtr name() const
Definition: object.h:4430
void set_guarded_list_length_in_object_offset_unsafe(intptr_t offset) const
Definition: object.cc:12114
static StringPtr LookupSetterSymbol(const String &field_name)
Definition: object.cc:11812
DART_WARN_UNUSED_RESULT ObjectPtr EvaluateInitializer() const
Definition: object.cc:12465
bool IsConsistentWith(const Field &field) const
Definition: object.cc:12279
const char * GuardedPropertiesAsCString() const
Definition: object.cc:12504
void set_guarded_cid(intptr_t cid) const
Definition: object.h:4660
StringPtr InitializingExpression() const
Definition: object.cc:12077
void SetStaticConstFieldValue(const Instance &value, bool assert_initializing_store=true) const
Definition: object.cc:12448
void set_guarded_list_length_in_object_offset(intptr_t offset) const
Definition: object.h:4688
ObjectPtr StaticValue() const
Definition: object.h:13279
static StringPtr LookupGetterSymbol(const String &field_name)
Definition: object.cc:11800
TypedDataViewPtr KernelLibrary() const
Definition: object.cc:11907
intptr_t guarded_cid() const
Definition: object.cc:11749
static FieldPtr NewTopLevel(const String &name, bool is_final, bool is_const, bool is_late, const Object &owner, TokenPosition token_pos, TokenPosition end_token_pos)
Definition: object.cc:12039
void set_is_nullable(bool val) const
Definition: object.h:4753
FunctionPtr EnsureInitializerFunction() const
Definition: object.cc:12297
void set_dependent_code(const WeakArray &array) const
Definition: object.cc:12220
void DeoptimizeDependentCode(bool are_mutators_stopped=false) const
Definition: object.cc:12268
intptr_t guarded_list_length() const
Definition: object.cc:12101
InstancePtr AccessorClosure(bool make_setter) const
Definition: object.cc:12178
static StringPtr GetterName(const String &field_name)
Definition: object.cc:11792
AbstractTypePtr type() const
Definition: object.h:4550
static StringPtr SetterName(const String &field_name)
Definition: object.cc:11804
bool NeedsGetter() const
Definition: object.cc:12145
void SetStaticValue(const Object &value) const
Definition: object.cc:12770
void SetFieldTypeSafe(const AbstractType &value) const
Definition: object.cc:11934
WeakArrayPtr dependent_code() const
Definition: object.cc:12214
DART_WARN_UNUSED_RESULT ErrorPtr VerifyEntryPoint(EntryPointPragma kind) const
Definition: object.cc:27363
static StringPtr NameFromGetter(const String &getter_name)
Definition: object.cc:11816
intptr_t KernelLibraryIndex() const
Definition: object.cc:11919
void set_guarded_list_length_unsafe(intptr_t list_length) const
Definition: object.cc:12105
void RecordStore(const Object &value) const
Definition: object.cc:13027
void set_guarded_list_length(intptr_t list_length) const
Definition: object.h:4678
static StringPtr SetterSymbol(const String &field_name)
Definition: object.cc:11808
TokenPosition token_pos() const
Definition: object.h:4589
KernelProgramInfoPtr KernelProgramInfo() const
Definition: object.cc:11885
int32_t SourceFingerprint() const
Definition: object.cc:12068
static FinalizablePersistentHandle * New(IsolateGroup *isolate_group, const Object &object, void *peer, Dart_HandleFinalizer callback, intptr_t external_size, bool auto_delete)
void VisitObject(ObjectPtr obj)
Definition: object.cc:1420
ObjectPtr token() const
Definition: object.h:12984
void set_token(const Object &value) const
Definition: object.h:12985
ObjectPtr value() const
Definition: object.h:12970
void set_finalizer(const FinalizerBase &value) const
Definition: object.cc:26873
void set_external_size(intptr_t value) const
Definition: object.h:13005
intptr_t external_size() const
Definition: object.h:13004
static FinalizerEntryPtr New(const FinalizerBase &finalizer, Heap::Space space=Heap::kNew)
Definition: object.cc:26862
static intptr_t type_arguments_offset()
Definition: object.h:13072
static FinalizerPtr New(Heap::Space space=Heap::kNew)
Definition: object.cc:26782
static Float32x4Ptr New(float value0, float value1, float value2, float value3, Heap::Space space=Heap::kNew)
Definition: object.cc:25307
void set_w(float w) const
Definition: object.cc:25352
float x() const
Definition: object.cc:25356
void set_y(float y) const
Definition: object.cc:25344
float z() const
Definition: object.cc:25364
virtual uint32_t CanonicalizeHash() const
Definition: object.cc:25377
virtual bool CanonicalizeEquals(const Instance &other) const
Definition: object.cc:25372
void set_z(float z) const
Definition: object.cc:25348
simd128_value_t value() const
Definition: object.cc:25330
void set_x(float x) const
Definition: object.cc:25340
void set_value(simd128_value_t value) const
Definition: object.cc:25335
float w() const
Definition: object.cc:25368
float y() const
Definition: object.cc:25360
void set_y(double y) const
Definition: object.cc:25504
void set_value(simd128_value_t value) const
Definition: object.cc:25512
virtual uint32_t CanonicalizeHash() const
Definition: object.cc:25521
simd128_value_t value() const
Definition: object.cc:25508
static Float64x2Ptr New(double value0, double value1, Heap::Space space=Heap::kNew)
Definition: object.cc:25475
void set_x(double x) const
Definition: object.cc:25500
double y() const
Definition: object.cc:25496
virtual bool CanonicalizeEquals(const Instance &other) const
Definition: object.cc:25516
double x() const
Definition: object.cc:25492
static FreeListElement * AsElementNew(uword addr, intptr_t size)
Definition: freelist.cc:43
static FreeListElement * AsElement(uword addr, intptr_t size)
Definition: freelist.cc:16
bool Matches(const Function &function) const
Definition: object.cc:3227
intptr_t Hash() const
Definition: object.cc:3235
FunctionName(const String &name, String *tmp_string)
Definition: object.cc:3225
FunctionTypeMapping(Zone *zone, FunctionTypeMapping **mapping, const FunctionType &from, const FunctionType &to)
Definition: object.cc:6509
TypeParameterPtr MapTypeParameter(const TypeParameter &type_param) const
Definition: object.cc:6531
const FunctionType * Find(const Object &from) const
Definition: object.cc:6518
bool ContainsOwnersOfTypeParameters(const TypeParameter &p1, const TypeParameter &p2) const
Definition: object.cc:6542
void set_packed_parameter_counts(uint32_t packed_parameter_counts) const
Definition: object.cc:11547
static intptr_t NumOptionalParametersOf(FunctionTypePtr ptr)
Definition: object.h:9621
TypeParametersPtr type_parameters() const
Definition: object.h:9727
void SetIsRequiredAt(intptr_t index) const
Definition: object.cc:8771
void set_result_type(const AbstractType &value) const
Definition: object.cc:8575
intptr_t NumOptionalNamedParameters() const
Definition: object.h:9641
intptr_t num_implicit_parameters() const
Definition: object.h:9585
void SetParameterTypeAt(intptr_t index, const AbstractType &value) const
Definition: object.cc:8590
AbstractTypePtr ParameterTypeAt(intptr_t index) const
Definition: object.cc:8585
bool ContainsHandles() const
Definition: object.cc:8292
TypeParameterPtr TypeParameterAt(intptr_t index, Nullability nullability=Nullability::kNonNullable) const
Definition: object.cc:8560
const char * ToUserVisibleCString() const
Definition: object.cc:11661
bool HasRequiredNamedParameters() const
Definition: object.cc:8808
StringPtr ParameterNameAt(intptr_t index) const
Definition: object.cc:8645
void set_num_fixed_parameters(intptr_t value) const
Definition: object.cc:11608
void set_num_implicit_parameters(intptr_t value) const
Definition: object.cc:11557
AbstractTypePtr result_type() const
Definition: object.h:9670
intptr_t NumOptionalPositionalParameters() const
Definition: object.h:9634
virtual void PrintName(NameVisibility visibility, BaseTextBuffer *printer) const
Definition: object.cc:22588
virtual AbstractTypePtr UpdateFunctionTypes(intptr_t num_parent_type_args_adjustment, intptr_t num_free_fun_type_params, Heap::Space space, FunctionTypeMapping *function_type_mapping) const
Definition: object.cc:9903
bool IsRequiredAt(intptr_t index) const
Definition: object.cc:8755
bool IsSubtypeOf(const FunctionType &other, Heap::Space space, FunctionTypeMapping *function_type_equivalence=nullptr) const
Definition: object.cc:10089
void PrintParameters(Thread *thread, Zone *zone, NameVisibility name_visibility, BaseTextBuffer *printer) const
Definition: object.cc:10642
void set_named_parameter_names(const Array &value) const
Definition: object.cc:8672
ArrayPtr named_parameter_names() const
Definition: object.h:9689
static FunctionTypePtr Clone(const FunctionType &orig, Heap::Space space)
Definition: object.cc:11650
void SetParameterNameAt(intptr_t index, const String &value) const
Definition: object.cc:8656
void CreateNameArrayIncludingFlags(Heap::Space space=Heap::kOld) const
Definition: object.cc:8692
static intptr_t NumFixedParametersOf(FunctionTypePtr ptr)
Definition: object.h:9591
virtual AbstractTypePtr InstantiateFrom(const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments, intptr_t num_free_fun_type_params, Heap::Space space, FunctionTypeMapping *function_type_mapping=nullptr, intptr_t num_parent_type_args_adjustment=0) const
Definition: object.cc:9783
intptr_t NumParentTypeArguments() const
Definition: object.h:9570
void set_packed_type_parameter_counts(uint16_t packed_parameter_counts) const
Definition: object.cc:11552
void set_parameter_types(const Array &value) const
Definition: object.cc:8597
virtual void EnumerateURIs(URIs *uris) const
Definition: object.cc:22574
static intptr_t NumParentTypeArgumentsOf(FunctionTypePtr ptr)
Definition: object.h:9565
virtual uword ComputeHash() const
Definition: object.cc:22354
bool HasSameTypeParametersAndBounds(const FunctionType &other, TypeEquality kind, FunctionTypeMapping *function_type_equivalence=nullptr) const
Definition: object.cc:10001
static intptr_t NumTypeArgumentsOf(FunctionTypePtr ptr)
Definition: object.h:9580
bool HasOptionalNamedParameters() const
Definition: object.h:9609
bool HasOptionalParameters() const
Definition: object.h:9603
intptr_t GetRequiredFlagIndex(intptr_t index, intptr_t *flag_mask) const
Definition: object.cc:8715
void FinalizeNameArray() const
Definition: object.cc:8786
bool HasOptionalPositionalParameters() const
Definition: object.h:9617
void SetNumOptionalParameters(intptr_t num_optional_parameters, bool are_optional_positional) const
Definition: object.cc:11617
void SetNumParentTypeArguments(intptr_t value) const
Definition: object.cc:8835
static intptr_t NumParametersOf(FunctionTypePtr ptr)
Definition: object.h:9645
bool IsContravariantParameter(intptr_t parameter_position, const FunctionType &other, intptr_t other_parameter_position, Heap::Space space, FunctionTypeMapping *function_type_equivalence) const
Definition: object.cc:9984
virtual bool IsEquivalent(const Instance &other, TypeEquality kind, FunctionTypeMapping *function_type_equivalence=nullptr) const
Definition: object.cc:22109
intptr_t NumParameters() const
Definition: object.h:9648
static intptr_t NumOptionalNamedParametersOf(FunctionTypePtr ptr)
Definition: object.h:9638
StringPtr ToUserVisibleString() const
Definition: object.cc:11668
void Print(NameVisibility name_visibility, BaseTextBuffer *printer) const
Definition: object.cc:10755
static bool HasOptionalNamedParameters(FunctionTypePtr ptr)
Definition: object.h:9605
static bool IsGeneric(FunctionTypePtr ptr)
Definition: object.h:9744
intptr_t num_fixed_parameters() const
Definition: object.h:9595
static FunctionTypePtr New(intptr_t num_parent_type_arguments=0, Nullability nullability=Nullability::kNonNullable, Heap::Space space=Heap::kOld)
Definition: object.cc:11631
bool IsGeneric() const
Definition: object.h:9747
static intptr_t NumTypeParametersOf(FunctionTypePtr ptr)
Definition: object.h:9574
intptr_t NumTypeParameters() const
Definition: object.h:9578
virtual AbstractTypePtr Canonicalize(Thread *thread) const
Definition: object.cc:22466
FunctionTypePtr ToNullability(Nullability value, Heap::Space space) const
Definition: object.cc:21944
virtual bool IsInstantiated(Genericity genericity=kAny, intptr_t num_free_fun_type_params=kAllFree) const
Definition: object.cc:10792
uint16_t packed_type_parameter_counts() const
Definition: object.h:9657
void SetTypeParameters(const TypeParameters &value) const
Definition: object.cc:8826
ArrayPtr parameter_types() const
Definition: object.h:9678
uint32_t packed_parameter_counts() const
Definition: object.h:9650
static intptr_t NumOptionalPositionalParametersOf(FunctionTypePtr ptr)
Definition: object.h:9631
void SetIsOptimizable(bool value) const
Definition: object.cc:8948
intptr_t KernelLibraryIndex() const
Definition: object.cc:10948
void set_owner(const Object &value) const
Definition: object.cc:8464
FunctionPtr GetDynamicInvocationForwarder(const String &mangled_name) const
Definition: object.cc:4248
void set_context_scope(const ContextScope &value) const
Definition: object.cc:8057
StringPtr ParameterNameAt(intptr_t index) const
Definition: object.cc:8602
void set_accessor_field(const Field &value) const
Definition: object.cc:8157
intptr_t MaxNumberOfParametersInRegisters(Zone *zone) const
Definition: object.cc:27248
FunctionPtr CreateMethodExtractor(const String &getter_name) const
Definition: object.cc:3967
bool HasOptionalNamedParameters() const
Definition: object.cc:8862
FunctionPtr ForwardingTarget() const
Definition: object.cc:8423
InstantiationMode default_type_arguments_instantiation_mode() const
Definition: object.cc:8188
void SetFfiCallbackTarget(const Function &target) const
Definition: object.cc:8384
bool IsImplicitConstructor() const
Definition: object.cc:10218
void SetFfiCallbackKind(FfiCallbackKind value) const
Definition: object.cc:8412
StringPtr GetSource() const
Definition: object.cc:11119
bool PrologueNeedsArgumentsDescriptor() const
Definition: object.cc:11437
const char * QualifiedUserVisibleNameCString() const
Definition: object.cc:11030
bool IsRequiredAt(intptr_t index) const
Definition: object.cc:8741
intptr_t NumParentTypeArguments() const
Definition: object.cc:8850
StringPtr UserVisibleName() const
Definition: object.cc:10999
ObjectPtr DoArgumentTypesMatch(const Array &args, const ArgumentsDescriptor &arg_names) const
Definition: object.cc:9505
void AssignFfiCallbackId(int32_t callback_id) const
Definition: object.cc:8344
ICDataPtr FindICData(intptr_t deopt_id) const
Definition: object.cc:11280
void set_default_type_arguments_instantiation_mode(InstantiationMode value) const
Definition: object.cc:8196
bool IsImplicitInstanceClosureFunction() const
Definition: object.h:3924
FfiCallbackKind GetFfiCallbackKind() const
Definition: object.cc:8405
intptr_t SourceSize() const
Definition: object.cc:11453
void SetFfiCSignature(const FunctionType &sig) const
Definition: object.cc:8257
bool IsImplicitClosureFunction() const
Definition: object.h:3903
bool IsPrivate() const
Definition: object.cc:10837
bool IsPreferInline() const
Definition: object.cc:9035
bool is_old_native() const
Definition: object.cc:8538
FunctionPtr CreateDynamicInvocationForwarder(const String &mangled_name) const
Definition: object.cc:4210
bool is_sticky_specialization() const
Definition: object.cc:8486
void set_modifier(UntaggedFunction::AsyncModifier value) const
Definition: object.cc:8891
static bool IsDynamicInvocationForwarderName(const String &name)
Definition: object.cc:4190
bool IsNoSuchMethodDispatcher() const
Definition: object.h:3288
bool SafeToClosurize() const
Definition: object.cc:10368
int32_t FfiCallbackId() const
Definition: object.cc:8332
bool IsFfiCallClosure() const
Definition: object.cc:9067
StringPtr native_name() const
Definition: object.cc:8506
static bool is_visible(FunctionPtr f)
Definition: object.h:4185
void set_end_token_pos(TokenPosition value) const
Definition: object.h:3462
void EnsureHasCompiledUnoptimizedCode() const
Definition: object.cc:7968
bool IsDynamicClosureCallDispatcher() const
Definition: object.h:3308
void InheritKernelOffsetFrom(const Function &src) const
Definition: object.cc:10851
bool IsIdempotent() const
Definition: object.cc:9042
intptr_t NumOptionalParameters() const
Definition: object.cc:8868
bool IsCachableIdempotent() const
Definition: object.cc:9055
bool IsTypedDataViewFactory() const
Definition: object.cc:8957
bool HasOptionalParameters() const
Definition: object.cc:8859
void SetKernelLibraryAndEvalScript(const Script &script, const class KernelProgramInfo &kernel_program_info, intptr_t index) const
Definition: object.cc:10867
void CreateNameArray(Heap::Space space=Heap::kOld) const
Definition: object.cc:8677
InstancePtr GetFfiCallClosurePragmaValue() const
Definition: object.cc:9074
bool FfiCSignatureContainsHandles() const
Definition: object.cc:8287
void set_awaiter_link(AwaiterLink link) const
Definition: object.cc:8077
void SetForwardingTarget(const Function &target) const
Definition: object.cc:8428
ObjectPtr RawOwner() const
Definition: object.h:3083
const char * ToFullyQualifiedCString() const
Definition: object.cc:9762
InstancePtr FfiCallbackExceptionalReturn() const
Definition: object.cc:8391
bool HasCode() const
Definition: object.cc:7936
bool CanBeInlined() const
Definition: object.cc:9199
static const char * KindToCString(UntaggedFunction::Kind kind)
Definition: object.cc:8419
StringPtr UserVisibleSignature() const
Definition: object.cc:10629
FunctionPtr parent_function() const
Definition: object.cc:8167
ClosurePtr ImplicitInstanceClosure(const Instance &receiver) const
Definition: object.cc:10722
static FunctionPtr New(const FunctionType &signature, const String &name, UntaggedFunction::Kind kind, bool is_static, bool is_const, bool is_abstract, bool is_external, bool is_native, const Object &owner, TokenPosition token_pos, Heap::Space space=Heap::kOld)
Definition: object.cc:10243
StringPtr InternalSignature() const
Definition: object.cc:10616
intptr_t NumTypeArguments() const
Definition: object.cc:8853
virtual uword Hash() const
Definition: object.cc:7879
intptr_t string_specialization_cid() const
Definition: object.cc:8480
void set_token_pos(TokenPosition value) const
Definition: object.cc:8901
StringPtr name() const
Definition: object.h:2992
bool AreValidArguments(intptr_t num_type_arguments, intptr_t num_arguments, const Array &argument_names, String *error_message) const
Definition: object.cc:9323
TokenPosition token_pos() const
Definition: object.h:3446
static StringPtr DemangleDynamicInvocationForwarderName(const String &name)
Definition: object.cc:4198
void SetParameterNameAt(intptr_t index, const String &value) const
Definition: object.cc:8623
intptr_t KernelLibraryOffset() const
Definition: object.cc:10941
bool AreValidArgumentCounts(intptr_t num_type_arguments, intptr_t num_arguments, intptr_t num_named_arguments, String *error_message) const
Definition: object.cc:9250
DART_WARN_UNUSED_RESULT ErrorPtr VerifyCallEntryPoint() const
Definition: object.cc:27310
bool ForceOptimize() const
Definition: object.cc:9017
void ClearCodeSafe() const
Definition: object.cc:7958
bool HasRequiredNamedParameters() const
Definition: object.cc:8731
DART_WARN_UNUSED_RESULT ErrorPtr VerifyClosurizedEntryPoint() const
Definition: object.cc:27344
void PrintName(const NameFormattingParams &params, BaseTextBuffer *printer) const
Definition: object.cc:11109
void DropUncompiledImplicitClosureFunction() const
Definition: object.cc:10607
FunctionPtr ImplicitClosureFunction() const
Definition: object.cc:10385
const char * QualifiedScrubbedNameCString() const
Definition: object.cc:11016
bool HasOptimizedCode() const
Definition: object.cc:10974
int32_t SourceFingerprint() const
Definition: object.cc:11169
void set_unoptimized_code(const Code &value) const
Definition: object.cc:8038
intptr_t NumOptionalNamedParameters() const
Definition: object.cc:8874
bool CheckSourceFingerprint(int32_t fp, const char *kind=nullptr) const
Definition: object.cc:11304
void set_extracted_method_closure(const Function &function) const
Definition: object.cc:8127
void set_native_name(const String &name) const
Definition: object.cc:8513
static FunctionPtr NewClosureFunctionWithKind(UntaggedFunction::Kind kind, const String &name, const Function &parent, bool is_static, TokenPosition token_pos, const Object &owner)
Definition: object.cc:10324
intptr_t NumOptionalPositionalParameters() const
Definition: object.cc:8871
bool IsOptimizable() const
Definition: object.cc:8930
FunctionPtr FfiCallbackTarget() const
Definition: object.cc:8377
void SetInstructions(const Code &value) const
Definition: object.cc:7909
intptr_t NumTypeParameters() const
Definition: object.cc:8847
void RestoreICDataMap(ZoneGrowableArray< const ICData * > *deopt_id_to_ic_data, bool clone_ic_data) const
Definition: object.cc:11217
void SetDeoptReasonForAll(intptr_t deopt_id, ICData::DeoptReasonId reason)
Definition: object.cc:11292
void SwitchToUnoptimizedCode() const
Definition: object.cc:7982
bool HasOptionalPositionalParameters() const
Definition: object.cc:8865
ScriptPtr script() const
Definition: object.cc:10881
FunctionPtr GetOutermostFunction() const
Definition: object.cc:8207
void SaveICDataMap(const ZoneGrowableArray< const ICData * > &deopt_id_to_ic_data, const Array &edge_counters_array, const Array &coverage_array) const
Definition: object.cc:11178
StringPtr QualifiedScrubbedName() const
Definition: object.cc:11009
void SetFfiCallbackExceptionalReturn(const Instance &value) const
Definition: object.cc:8398
bool HasInstantiatedSignature(Genericity genericity=kAny, intptr_t num_free_fun_type_params=kAllFree) const
Definition: object.cc:10785
bool IsInvokeFieldDispatcher() const
Definition: object.h:3296
ArrayPtr ic_data_array() const
Definition: object.cc:11272
void reset_unboxed_parameters_and_return() const
Definition: object.h:3709
void InstallOptimizedCode(const Code &code) const
Definition: object.cc:7899
void set_saved_args_desc(const Array &array) const
Definition: object.cc:8142
static StringPtr CreateDynamicInvocationForwarderName(const String &name)
Definition: object.cc:4205
FieldPtr accessor_field() const
Definition: object.cc:8149
bool IsNonImplicitClosureFunction() const
Definition: object.h:3911
bool HasSavedArgumentsDescriptor() const
Definition: object.h:3273
TypeArgumentsPtr DefaultTypeArguments(Zone *zone) const
Definition: object.cc:8181
const char * NameCString(NameVisibility name_visibility) const
Definition: object.cc:10978
bool IsImplicitStaticClosureFunction() const
Definition: object.h:3917
bool IsMethodExtractor() const
Definition: object.h:3284
intptr_t NumImplicitParameters() const
Definition: object.cc:9228
bool is_ffi_native() const
Definition: object.cc:8542
ArrayPtr GetCoverageArray() const
Definition: object.cc:11260
static FunctionPtr NewClosureFunction(const String &name, const Function &parent, TokenPosition token_pos)
Definition: object.cc:10348
bool HasBreakpoint() const
Definition: object.cc:7890
bool MayHaveUncheckedEntryPoint() const
Definition: object.cc:11448
FunctionPtr extracted_method_closure() const
Definition: object.cc:8120
TypedDataViewPtr KernelLibrary() const
Definition: object.cc:10936
bool IsGeneric() const
Definition: object.cc:8844
void set_packed_fields(uint32_t packed_fields) const
Definition: object.cc:8922
CodePtr EnsureHasCode() const
Definition: object.cc:11338
static FunctionPtr NewImplicitClosureFunction(const String &name, const Function &parent, TokenPosition token_pos)
Definition: object.cc:10358
bool IsUnmodifiableTypedDataViewFactory() const
Definition: object.cc:8980
KernelProgramInfoPtr KernelProgramInfo() const
Definition: object.cc:10919
void ClearCode() const
Definition: object.cc:7948
void SetSignature(const FunctionType &value) const
Definition: object.cc:8546
bool FfiCSignatureReturnsStruct() const
Definition: object.cc:8306
AbstractTypePtr ParameterTypeAt(intptr_t index) const
Definition: object.cc:8580
AwaiterLink awaiter_link() const
Definition: object.cc:8067
ClassPtr Owner() const
Definition: object.cc:10841
intptr_t num_fixed_parameters() const
Definition: object.cc:8856
bool FfiIsLeaf() const
Definition: object.cc:8355
bool NeedsMonomorphicCheckedEntry(Zone *zone) const
Definition: object.cc:11364
const char * UserVisibleNameCString() const
Definition: object.cc:10990
const char * ToQualifiedCString() const
Definition: object.cc:9776
StringPtr QualifiedUserVisibleName() const
Definition: object.cc:11023
RegExpPtr regexp() const
Definition: object.cc:8469
void SwitchToLazyCompiledUnoptimizedCode() const
Definition: object.cc:8007
bool HasDynamicCallers(Zone *zone) const
Definition: object.cc:11409
void SetInstructionsSafe(const Code &value) const
Definition: object.cc:7920
UntaggedFunction::Kind kind() const
Definition: object.h:3349
intptr_t NumParameters() const
Definition: object.cc:8877
FunctionPtr GetMethodExtractor(const String &getter_name) const
Definition: object.cc:4013
bool RecognizedKindForceOptimize() const
Definition: object.cc:9086
ClosurePtr ImplicitStaticClosure() const
Definition: object.cc:10700
bool IsConstructor() const
Definition: object.h:3360
ArrayPtr saved_args_desc() const
Definition: object.cc:8133
ContextScopePtr context_scope() const
Definition: object.cc:8048
bool HasImplicitClosureFunction() const
Definition: object.h:3326
InstancePtr GetNativeAnnotation() const
Definition: object.cc:8520
void AttachCode(const Code &value) const
Definition: object.cc:7927
FunctionPtr ImplicitClosureTarget(Zone *zone) const
Definition: object.cc:10734
void SetRegExpData(const RegExp &regexp, intptr_t string_specialization_cid, bool sticky) const
Definition: object.cc:8492
const char * ToLibNamePrefixedQualifiedCString() const
Definition: object.cc:9769
void ClearICDataArray() const
Definition: object.cc:11276
FunctionTypePtr FfiCSignature() const
Definition: object.cc:8264
TypeParameterPtr TypeParameterAt(intptr_t index, Nullability nullability=Nullability::kNonNullable) const
Definition: object.cc:8881
void set_recognized_kind(MethodRecognizer::Kind value) const
Definition: object.cc:8895
static intptr_t type_arguments_offset()
Definition: object.h:13207
bool HasBreakpointInCode(const Code &code)
Definition: debugger.cc:438
bool HasBreakpoint(Thread *thread, const Function &function)
Definition: debugger.cc:3464
void SetData(const Array &value) const
Definition: object.h:11083
void Add(const Object &value, Heap::Space space=Heap::kNew) const
Definition: object.cc:24991
void SetLength(intptr_t value) const
Definition: object.h:11076
ObjectPtr RemoveLast() const
Definition: object.cc:25016
static GrowableObjectArrayPtr New(Heap::Space space=Heap::kNew)
Definition: object.h:11144
static intptr_t type_arguments_offset()
Definition: object.h:11129
virtual TypeArgumentsPtr GetTypeArguments() const
Definition: object.h:11104
intptr_t Length() const
Definition: object.h:11072
ObjectPtr At(intptr_t index) const
Definition: object.h:11085
ArrayPtr data() const
Definition: object.h:11082
void Grow(intptr_t new_capacity, Heap::Space space=Heap::kNew) const
Definition: object.cc:25008
bool UpdateOrInsert(const Object &key, const Object &value) const
Definition: hash_table.h:713
bool Insert(const Object &key)
Definition: hash_table.h:801
ObjectPtr GetOrNull(const Key &key, bool *present=nullptr) const
Definition: hash_table.h:840
ObjectPtr GetPayload(intptr_t entry, intptr_t component) const
Definition: hash_table.h:352
StorageTraits::ArrayHandle & Release()
Definition: hash_table.h:195
ObjectPtr GetKey(intptr_t entry) const
Definition: hash_table.h:348
void IterateVMIsolateObjects(ObjectVisitor *visitor) const
Definition: heap.cc:354
void IterateOldObjectsNoImagePages(ObjectVisitor *visitor) const
Definition: heap.cc:349
bool HasOutstandingSample() const
Definition: sampler.h:124
void * InvokeCallbackForLastSample(intptr_t cid)
Definition: sampler.cc:200
uword Allocate(Thread *thread, intptr_t size, Space space)
Definition: heap.h:65
@ kNew
Definition: heap.h:38
@ kOld
Definition: heap.h:39
void SetHeapSamplingData(ObjectPtr obj, void *data)
Definition: heap.h:213
intptr_t SetHashIfNotSet(ObjectPtr raw_obj, intptr_t hash)
Definition: heap.h:175
void * GetPeer(ObjectPtr raw_obj) const
Definition: heap.h:167
PageSpace * old_space()
Definition: heap.h:63
intptr_t GetHash(ObjectPtr raw_obj) const
Definition: heap.h:178
void SetPeer(ObjectPtr raw_obj, void *peer)
Definition: heap.h:164
void SetCanonicalHash(ObjectPtr raw_obj, intptr_t hash)
Definition: heap.h:183
intptr_t UsedInWords(Space space) const
Definition: heap.cc:802
intptr_t GetCanonicalHash(ObjectPtr raw_obj) const
Definition: heap.h:186
Space SpaceForExternal(intptr_t size) const
Definition: heap.cc:1141
intptr_t deopt_id() const
Definition: object.h:2468
intptr_t NumArgsTested() const
Definition: object.cc:16471
ArrayPtr entries() const
Definition: object.h:2783
RebindRule rebind_rule() const
Definition: object.cc:16547
uint32_t DeoptReasons() const
Definition: object.cc:16505
void SetOriginal(const ICData &value) const
Definition: object.cc:16447
void AddDeoptReason(ICData::DeoptReasonId reason) const
Definition: object.cc:16518
AbstractTypePtr receivers_static_type() const
Definition: object.h:2480
static ICDataPtr Clone(const ICData &from)
Definition: object.cc:17439
FunctionPtr Owner() const
Definition: object.cc:16423
static ImmutableArrayPtr New(intptr_t len, Heap::Space space=Heap::kNew)
Definition: object.cc:24985
InstancePtr CopyShallowToOldSpace(Thread *thread) const
Definition: object.cc:20440
void SetNativeFields(uint16_t num_fields, const intptr_t *field_values) const
Definition: object.cc:20896
ObjectPtr Invoke(const String &selector, const Array &arguments, const Array &argument_names, bool respect_reflectable=true, bool check_is_entrypoint=false) const
Definition: object.cc:20143
static intptr_t DataOffsetFor(intptr_t cid)
Definition: object.cc:20988
static intptr_t NextFieldOffset()
Definition: object.h:8355
uint16_t NumNativeFields() const
Definition: object.h:8280
static intptr_t InstanceSize()
Definition: object.h:8337
bool IsCallable(Function *function) const
Definition: object.cc:20913
ObjectPtr InvokeSetter(const String &selector, const Instance &argument, bool respect_reflectable=true, bool check_is_entrypoint=false) const
Definition: object.cc:20094
virtual InstancePtr CanonicalizeLocked(Thread *thread) const
Definition: object.cc:20450
ObjectPtr GetField(const Field &field) const
Definition: object.cc:20475
virtual bool CanonicalizeEquals(const Instance &other) const
Definition: object.cc:20259
bool IsInstanceOf(const AbstractType &other, const TypeArguments &other_instantiator_type_arguments, const TypeArguments &other_function_type_arguments) const
Definition: object.cc:20614
AbstractTypePtr GetType(Heap::Space space) const
Definition: object.cc:20520
friend class Closure
Definition: object.h:8434
bool IsIdenticalTo(const Instance &other) const
Definition: object.cc:20861
static intptr_t ElementSizeFor(intptr_t cid)
Definition: object.cc:20967
virtual void SetTypeArguments(const TypeArguments &value) const
Definition: object.cc:20581
bool IsAssignableTo(const AbstractType &other, const TypeArguments &other_instantiator_type_arguments, const TypeArguments &other_function_type_arguments) const
Definition: object.cc:20629
intptr_t SizeFromClass() const
Definition: object.h:8216
virtual bool OperatorEquals(const Instance &other) const
Definition: object.cc:20855
void SetNativeField(int index, intptr_t value) const
Definition: object.cc:20883
ObjectPtr InvokeGetter(const String &selector, bool respect_reflectable=true, bool check_is_entrypoint=false) const
Definition: object.cc:20031
InstancePtr Canonicalize(Thread *thread) const
Definition: object.cc:20444
virtual TypeArgumentsPtr GetTypeArguments() const
Definition: object.cc:20570
virtual ObjectPtr HashCode() const
Definition: object.cc:20212
bool IsValidNativeIndex(int index) const
Definition: object.h:8270
IntegerPtr IdentityHashCode(Thread *thread) const
Definition: object.cc:20219
static bool NullIsAssignableTo(const AbstractType &other)
Definition: object.cc:20674
intptr_t * NativeFieldsDataAddr() const
Definition: object.cc:20873
ObjectPtr EvaluateCompiledExpression(const Class &klass, const ExternalTypedData &kernel_buffer, const Array &type_definitions, const Array &arguments, const TypeArguments &type_arguments) const
Definition: object.cc:4833
void SetField(const Field &field, const Object &value) const
Definition: object.cc:20494
friend class Class
Definition: object.h:8433
virtual bool IsPointer() const
Definition: object.cc:25764
static InstancePtr NewAlreadyFinalized(const Class &cls, Heap::Space space=Heap::kNew)
Definition: object.cc:20943
virtual uint32_t CanonicalizeHash() const
Definition: object.cc:20316
static InstancePtr New(const Class &cls, Heap::Space space=Heap::kNew)
Definition: object.cc:20935
virtual void CanonicalizeFieldsLocked(Thread *thread) const
Definition: object.cc:20405
void set_stats(CodeStatistics *stats) const
Definition: object.cc:15486
uword MonomorphicEntryPoint() const
Definition: object.h:5746
uword EntryPoint() const
Definition: object.h:5747
uword PayloadStart() const
Definition: object.h:5745
intptr_t Size() const
Definition: object.h:5740
int32_t x() const
Definition: object.cc:25430
int32_t y() const
Definition: object.cc:25434
int32_t w() const
Definition: object.cc:25442
void set_z(int32_t z) const
Definition: object.cc:25422
virtual bool CanonicalizeEquals(const Instance &other) const
Definition: object.cc:25456
static Int32x4Ptr New(int32_t value0, int32_t value1, int32_t value2, int32_t value3, Heap::Space space=Heap::kNew)
Definition: object.cc:25391
void set_value(simd128_value_t value) const
Definition: object.cc:25451
void set_y(int32_t y) const
Definition: object.cc:25418
virtual uint32_t CanonicalizeHash() const
Definition: object.cc:25461
void set_x(int32_t x) const
Definition: object.cc:25414
simd128_value_t value() const
Definition: object.cc:25446
int32_t z() const
Definition: object.cc:25438
void set_w(int32_t w) const
Definition: object.cc:25426
IntegerPtr ShiftOp(Token::Kind operation, const Integer &other, Heap::Space space=Heap::kNew) const
Definition: object.cc:23231
static IntegerPtr New(const String &str, Heap::Space space=Heap::kNew)
Definition: object.cc:22984
virtual bool FitsIntoSmi() const
Definition: object.cc:23070
virtual int CompareWith(const Integer &other) const
Definition: object.cc:23076
IntegerPtr AsValidInteger() const
Definition: object.cc:23086
virtual double AsDoubleValue() const
Definition: object.cc:23052
static IntegerPtr NewCanonical(const String &str)
Definition: object.cc:22999
IntegerPtr ArithmeticOp(Token::Kind operation, const Integer &other, Heap::Space space=Heap::kNew) const
Definition: object.cc:23110
const char * ToHexCString(Zone *zone) const
Definition: object.cc:23100
IntegerPtr BitOp(Token::Kind operation, const Integer &other, Heap::Space space=Heap::kNew) const
Definition: object.cc:23192
virtual bool IsNegative() const
Definition: object.cc:23046
static IntegerPtr NewFromUint64(uint64_t value, Heap::Space space=Heap::kNew)
Definition: object.cc:23026
virtual int64_t AsInt64Value() const
Definition: object.cc:23058
static bool IsValueInRange(uint64_t value)
Definition: object.cc:23030
virtual bool IsZero() const
Definition: object.cc:23040
virtual bool Equals(const Instance &other) const
Definition: object.cc:23034
virtual uint32_t CanonicalizeHash() const
Definition: object.cc:23082
virtual uint32_t AsTruncatedUint32Value() const
Definition: object.cc:23064
static int EncodeType(Level level, Kind kind)
const uint8_t * snapshot_instructions
Definition: isolate.h:194
GroupDebugger * debugger() const
Definition: isolate.h:315
Mutex * kernel_data_class_cache_mutex()
Definition: isolate.h:524
void RegisterClass(const Class &cls)
Definition: isolate.cc:769
bool all_classes_finalized() const
Definition: isolate.h:713
Heap * heap() const
Definition: isolate.h:296
ObjectStore * object_store() const
Definition: isolate.h:510
SafepointRwLock * program_lock()
Definition: isolate.h:537
static IsolateGroup * Current()
Definition: isolate.h:539
ClassTable * class_table() const
Definition: isolate.h:496
Mutex * type_arguments_canonicalization_mutex()
Definition: isolate.h:513
Mutex * initializer_functions_mutex()
Definition: isolate.h:534
FieldTable * initial_field_table() const
Definition: isolate.h:742
IsolateGroupSource * source() const
Definition: isolate.h:286
Mutex * constant_canonicalization_mutex()
Definition: isolate.h:520
Mutex * kernel_data_lib_cache_mutex()
Definition: isolate.h:523
Mutex * subtype_test_cache_mutex()
Definition: isolate.h:516
UserTagPtr current_tag() const
Definition: isolate.h:1348
FieldTable * field_table() const
Definition: isolate.h:1000
static Isolate * Current()
Definition: isolate.h:986
UserTagPtr default_tag() const
Definition: isolate.h:1351
ObjectPtr CallDeferredLoadHandler(intptr_t id)
Definition: isolate.cc:1941
GrowableObjectArrayPtr tag_table() const
Definition: isolate.h:1345
IsolateGroup * group() const
Definition: isolate.h:1037
void set_current_tag(const UserTag &tag)
Definition: isolate.cc:3222
void set_default_tag(const UserTag &tag)
Definition: isolate.cc:3229
Dart_Port origin_id()
Definition: isolate.cc:1990
void set_constants(const Array &constants) const
Definition: object.cc:15089
intptr_t KernelLibraryStartOffset(intptr_t library_index) const
Definition: object.cc:15093
intptr_t KernelLibraryEndOffset(intptr_t library_index) const
Definition: object.cc:15114
LibraryPtr LookupLibrary(Thread *thread, const Smi &name_index) const
Definition: object.cc:15134
void set_scripts(const Array &scripts) const
Definition: object.cc:15085
ArrayPtr libraries_cache() const
Definition: object.h:5526
TypedDataViewPtr KernelLibrary(intptr_t library_index) const
Definition: object.cc:15106
ClassPtr LookupClass(Thread *thread, const Smi &name_index) const
Definition: object.cc:15183
ArrayPtr constants() const
Definition: object.h:5521
ArrayPtr scripts() const
Definition: object.h:5518
void set_classes_cache(const Array &cache) const
Definition: object.cc:15179
TypedDataViewPtr constants_table() const
Definition: object.h:5512
TypedDataPtr string_offsets() const
Definition: object.h:5491
TypedDataViewPtr string_data() const
Definition: object.h:5496
ArrayPtr classes_cache() const
Definition: object.h:5533
LibraryPtr InsertLibrary(Thread *thread, const Smi &name_index, const Library &lib) const
Definition: object.cc:15156
TypedDataViewPtr metadata_mappings() const
Definition: object.h:5504
void set_constants_table(const TypedDataView &value) const
Definition: object.cc:15126
TypedDataPtr canonical_names() const
Definition: object.h:5498
ScriptPtr ScriptAt(intptr_t index) const
Definition: object.cc:15079
void set_libraries_cache(const Array &cache) const
Definition: object.cc:15130
TypedDataBasePtr kernel_component() const
Definition: object.h:5493
ClassPtr InsertClass(Thread *thread, const Smi &name_index, const Class &klass) const
Definition: object.cc:15205
TypedDataViewPtr metadata_payloads() const
Definition: object.h:5500
StringPtr FormatMessage() const
Definition: object.cc:19913
Report::Kind kind() const
Definition: object.h:8080
virtual const char * ToErrorCString() const
Definition: object.cc:19930
static LanguageErrorPtr static LanguageErrorPtr NewFormattedV(const Error &prev_error, const Script &script, TokenPosition token_pos, bool report_after_token, Report::Kind kind, Heap::Space space, const char *format, va_list args)
Definition: object.cc:19834
static LanguageErrorPtr NewFormatted(const Error &prev_error, const Script &script, TokenPosition token_pos, bool report_after_token, Report::Kind kind, Heap::Space space, const char *format,...) PRINTF_ATTRIBUTE(7
Definition: object.cc:19855
TokenPosition token_pos() const
Definition: object.h:8116
static bool ReportStats()
Definition: object.cc:14582
static uword Hash(const Object &key)
Definition: object.cc:14592
static ObjectPtr NewKey(const String &str)
Definition: object.cc:14594
static bool IsMatch(const Object &a, const Object &b)
Definition: object.cc:14584
static const char * Name()
Definition: object.cc:14581
bool is_deferred_load() const
Definition: object.h:8461
void AddImport(const Namespace &import) const
Definition: object.cc:14850
intptr_t num_imports() const
Definition: object.h:8455
StringPtr name() const
Definition: object.h:8451
ArrayPtr imports() const
Definition: object.h:8454
LibraryPtr importer() const
Definition: object.h:8456
LibraryPtr GetLibrary(int index) const
Definition: object.cc:14840
static LibraryPtr CoreLibrary()
Definition: object.cc:14787
static LibraryPtr VMServiceLibrary()
Definition: object.cc:14829
void SetLoadInProgress() const
Definition: object.cc:13622
friend class Object
Definition: object.h:5440
StringPtr PrivateName(const String &name) const
Definition: object.cc:14704
ScriptPtr LookupScript(const String &url, bool useResolvedUri=false) const
Definition: object.cc:14009
void Register(Thread *thread) const
Definition: object.cc:14731
ObjectPtr LookupReExport(const String &name, ZoneGrowableArray< intptr_t > *visited=nullptr) const
Definition: object.cc:13855
intptr_t num_imports() const
Definition: object.h:5219
static LibraryPtr MirrorsLibrary()
Definition: object.cc:14816
bool Loaded() const
Definition: object.h:5111
static bool IsPrivate(const String &name)
Definition: object.cc:14619
bool LoadRequested() const
Definition: object.h:5103
ObjectPtr InvokeGetter(const String &selector, bool throw_nsm_if_absent, bool respect_reflectable=true, bool check_is_entrypoint=false) const
Definition: object.cc:14342
static LibraryPtr IsolateLibrary()
Definition: object.cc:14807
GrowableObjectArrayPtr used_scripts() const
Definition: object.h:5211
void SetName(const String &name) const
Definition: object.cc:13616
void DropDependenciesAndCaches() const
Definition: object.cc:14162
ArrayPtr LoadedScripts() const
Definition: object.cc:13940
static ClassPtr LookupCoreClass(const String &class_name)
Definition: object.cc:14689
void AddObject(const Object &obj, const String &name) const
Definition: object.cc:13815
ObjectPtr InvokeSetter(const String &selector, const Instance &argument, bool respect_reflectable=true, bool check_is_entrypoint=false) const
Definition: object.cc:14410
ObjectPtr LookupLocalOrReExportObject(const String &name) const
Definition: object.cc:14065
void EnsureTopLevelClassIsFinalized() const
Definition: object.cc:14044
TypedDataViewPtr KernelLibrary() const
Definition: object.cc:13601
static LibraryPtr ConvertLibrary()
Definition: object.cc:14783
void AddMetadata(const Object &declaration, intptr_t kernel_offset) const
Definition: object.cc:13640
ClassPtr LookupClassAllowPrivate(const String &name) const
Definition: object.cc:14113
LibraryPtr ImportLibraryAt(intptr_t index) const
Definition: object.cc:14146
ObjectPtr GetMetadata(const Object &declaration) const
Definition: object.cc:13654
void AddClass(const Class &cls) const
Definition: object.cc:13914
ObjectPtr Invoke(const String &selector, const Array &arguments, const Array &argument_names, bool respect_reflectable=true, bool check_is_entrypoint=false) const
Definition: object.cc:14473
ObjectPtr EvaluateCompiledExpression(const ExternalTypedData &kernel_buffer, const Array &type_definitions, const Array &param_values, const TypeArguments &type_param_values) const
Definition: object.cc:4811
FunctionPtr LookupFunctionAllowPrivate(const String &name) const
Definition: object.cc:14084
ClassPtr toplevel_class() const
Definition: object.h:5208
static LibraryPtr NativeWrappersLibrary()
Definition: object.cc:14821
NamespacePtr ImportAt(intptr_t index) const
Definition: object.cc:14154
static LibraryPtr CollectionLibrary()
Definition: object.cc:14791
void SetLoadRequested() const
Definition: object.cc:13628
void EvaluatePragmas()
Definition: object.cc:13720
static LibraryPtr GetLibrary(intptr_t index)
Definition: object.cc:14716
friend class Namespace
Definition: object.h:5439
static bool FindPragma(Thread *T, bool only_core, const Object &object, const String &pragma_name, bool multiple=false, Object *options=nullptr)
Definition: object.cc:4151
StringPtr name() const
Definition: object.h:5094
void set_loading_unit(const LoadingUnit &value) const
Definition: object.cc:13612
static LibraryPtr AsyncLibrary()
Definition: object.cc:14779
intptr_t index() const
Definition: object.h:5270
ClassPtr LookupClass(const String &name) const
Definition: object.cc:14105
static LibraryPtr LookupLibrary(Thread *thread, const String &url)
Definition: object.cc:14599
static const String & PrivateCoreLibName(const String &member)
Definition: object.cc:14674
void AddImport(const Namespace &ns) const
Definition: object.cc:14174
ArrayPtr exports() const
Definition: object.h:5217
LoadingUnitPtr loading_unit() const
Definition: object.h:5116
static ErrorPtr CompileAll(bool ignore_error=false)
Definition: object.cc:15228
void set_dependencies(const Array &deps) const
Definition: object.cc:14134
void SetLoaded() const
Definition: object.cc:13634
static const char kPrivateIdentifierStart
Definition: object.h:5380
void set_index(intptr_t value) const
Definition: object.h:5271
static LibraryPtr InternalLibrary()
Definition: object.cc:14803
static void InitNativeWrappersLibrary(IsolateGroup *isolate_group, bool is_kernel_file)
Definition: object.cc:14543
FieldPtr LookupFieldAllowPrivate(const String &name) const
Definition: object.cc:14075
static LibraryPtr FfiLibrary()
Definition: object.cc:14799
static void RegisterLibraries(Thread *thread, const GrowableObjectArray &libs)
Definition: object.cc:14759
LibraryPrefixPtr LookupLocalLibraryPrefix(const String &name) const
Definition: object.cc:14121
static bool IsPrivateCoreLibName(const String &name, const String &member)
Definition: object.cc:14680
StringPtr private_key() const
Definition: object.h:5099
static LibraryPtr MathLibrary()
Definition: object.cc:14811
StringPtr url() const
Definition: object.h:5097
bool LoadInProgress() const
Definition: object.h:5106
void CopyPragmas(const Library &old_lib)
Definition: object.cc:13745
intptr_t kernel_library_index() const
Definition: object.h:5309
intptr_t KernelLibraryOffset() const
Definition: object.cc:13606
KernelProgramInfoPtr kernel_program_info() const
Definition: object.h:5301
ArrayPtr imports() const
Definition: object.h:5216
static const char kPrivateKeySeparator
Definition: object.h:5384
void set_toplevel_class(const Class &value) const
Definition: object.cc:14129
bool IsAnyCoreLibrary() const
Definition: object.cc:13569
static void InitCoreLibrary(IsolateGroup *isolate_group)
Definition: object.cc:14302
void AddExport(const Namespace &ns) const
Definition: object.cc:14196
static LibraryPtr DeveloperLibrary()
Definition: object.cc:14795
static LibraryPtr TypedDataLibrary()
Definition: object.cc:14825
void set_kernel_program_info(const KernelProgramInfo &info) const
Definition: object.cc:13597
void ComputeAndSetHashMask() const
Definition: object.cc:25133
virtual void CanonicalizeFieldsLocked(Thread *thread) const
Definition: object.cc:25215
virtual uint32_t CanonicalizeHash() const
Definition: object.cc:25191
ArrayPtr data() const
Definition: object.h:12038
virtual bool CanonicalizeEquals(const Instance &other) const
Definition: object.cc:25150
static constexpr intptr_t kInitialIndexSize
Definition: object.h:12081
static const LinkedHashBase & Cast(const Object &obj)
Definition: object.h:12010
static intptr_t type_arguments_offset()
Definition: object.h:11986
SmiPtr used_data() const
Definition: object.h:12046
intptr_t id() const
Definition: object.h:7985
static LoadingUnitPtr New(intptr_t id, const LoadingUnit &parent)
Definition: object.cc:19688
LoadingUnitPtr parent() const
Definition: object.h:7980
static intptr_t LoadingUnitOf(const Function &function)
Definition: object.cc:19730
void set_loaded(bool value) const
Definition: object.h:7995
ObjectPtr CompleteLoad(const String &error_message, bool transient_error) const
Definition: object.cc:19713
static constexpr intptr_t kIllegalId
Definition: object.h:7967
static constexpr intptr_t kRootId
Definition: object.h:7969
void set_base_objects(const Array &value) const
Definition: object.cc:19700
ObjectPtr IssueLoad() const
Definition: object.cc:19708
void set_load_outstanding() const
Definition: object.h:8013
static ContextScopePtr CreateImplicitClosureScope(const Function &func)
Definition: scopes.cc:580
static intptr_t InstanceSize()
Definition: object.h:6055
static MapPtr NewDefault(intptr_t class_id=kMapCid, Heap::Space space=Heap::kNew)
Definition: object.cc:25090
static MapPtr New(intptr_t class_id, const Array &data, const TypedData &index, intptr_t hash_mask, intptr_t used_data, intptr_t deleted_keys, Heap::Space space=Heap::kNew)
Definition: object.cc:25101
void EnsureContains(const Smi &class_id, const Object &target) const
Definition: object.cc:18799
intptr_t mask() const
Definition: object.cc:18759
static constexpr intptr_t kSpreadFactor
Definition: object.h:7600
void set_mask(intptr_t mask) const
Definition: object.cc:18763
intptr_t filled_entry_count() const
Definition: object.cc:18767
void set_filled_entry_count(intptr_t num) const
Definition: object.cc:18771
ObjectPtr Lookup(const Smi &class_id) const
Definition: object.cc:18812
ArrayPtr buckets() const
Definition: object.cc:18748
void set_buckets(const Array &buckets) const
Definition: object.cc:18752
static constexpr double kLoadFactor
Definition: object.h:7601
static constexpr intptr_t kInitialCapacity
Definition: object.h:7599
static bool IsMarkedAsRecognized(const Function &function, const char *kind=nullptr)
static MintPtr NewCanonical(int64_t value)
Definition: object.cc:23314
virtual int CompareWith(const Integer &other) const
Definition: object.cc:23348
virtual int64_t AsInt64Value() const
Definition: object.cc:23336
virtual uint32_t AsTruncatedUint32Value() const
Definition: object.cc:23340
static constexpr intptr_t kBits
Definition: object.h:10067
virtual bool FitsIntoSmi() const
Definition: object.cc:23344
static constexpr int64_t kMinValue
Definition: object.h:10070
virtual bool Equals(const Instance &other) const
Definition: object.cc:23321
int64_t value() const
Definition: object.h:10073
static constexpr int64_t kMaxValue
Definition: object.h:10068
static MintPtr New(int64_t value, Heap::Space space=Heap::kNew)
Definition: object.cc:23304
virtual double AsDoubleValue() const
Definition: object.cc:23332
FieldPtr GetFieldReferent() const
Definition: object.cc:26891
TypeParameterPtr GetTypeParameterReferent() const
Definition: object.cc:26911
LibraryPtr GetLibraryReferent() const
Definition: object.cc:26906
static MirrorReferencePtr New(const Object &referent, Heap::Space space=Heap::kNew)
Definition: object.cc:26916
AbstractTypePtr GetAbstractTypeReferent() const
Definition: object.cc:26881
FunctionTypePtr GetFunctionTypeReferent() const
Definition: object.cc:26901
ClassPtr GetClassReferent() const
Definition: object.cc:26886
FunctionPtr GetFunctionReferent() const
Definition: object.cc:26896
bool IsOwnedByCurrentThread() const
Definition: os_thread.h:402
ArrayPtr hide_names() const
Definition: object.h:5450
bool HidesName(const String &name) const
Definition: object.cc:14918
ObjectPtr Lookup(const String &name, ZoneGrowableArray< intptr_t > *trail=nullptr) const
Definition: object.cc:14964
ArrayPtr show_names() const
Definition: object.h:5449
LibraryPtr target() const
Definition: object.h:5448
LibraryPtr owner() const
Definition: object.h:5451
void RunCallback(const FinalizerEntry &entry, const char *trace_context) const
Definition: object.cc:26811
void(* Callback)(void *)
Definition: object.h:13094
static NativeFinalizerPtr New(Heap::Space space=Heap::kNew)
Definition: object.cc:26798
StringPtr ToString(Heap::Space space) const
Definition: object.cc:23433
static OSThread * Current()
Definition: os_thread.h:179
static intptr_t ThreadIdToIntPtr(ThreadId id)
static int64_t GetCurrentTimeMillis()
static int64_t GetCurrentMonotonicMicros()
static void static void PrintErr(const char *format,...) PRINTF_ATTRIBUTE(1
static bool StringToInt64(const char *str, int64_t *value)
static BuildId GetAppBuildId(const uint8_t *snapshot_instructions)
static const uint8_t * GetAppDSOBase(const uint8_t *snapshot_instructions)
Definition: os.cc:13
static intptr_t ProcessId()
static char * SCreate(Zone *zone, const char *format,...) PRINTF_ATTRIBUTE(2
IsolateGroup * isolate_group() const
Definition: visitor.h:25
void VisitCompressedPointers(uword heap_base, CompressedObjectPtr *first, CompressedObjectPtr *last)
Definition: visitor.h:43
static intptr_t InstanceSize()
Definition: object.h:5649
static ObjectPoolPtr NewFromBuilder(const compiler::ObjectPoolBuilder &builder)
Definition: object.cc:15676
bool IsFreeListElement() const
ObjectPtr Decompress(uword heap_base) const
UntaggedObject * untag() const
bool IsStringInstance() const
bool IsForwardingCorpse() const
intptr_t GetClassId() const
Definition: raw_object.h:885
intptr_t GetClassIdMayBeSmi() const
void set_bootstrap_library(BootstrapLibraryId index, const Library &value)
Definition: object_store.h:544
bool IsImmutable() const
Definition: object.h:338
static ObjectPtr Clone(const Object &orig, Heap::Space space, bool load_with_relaxed_atomics=false)
Definition: object.cc:2899
static ObjectPtr Allocate(intptr_t cls_id, intptr_t size, Heap::Space space, bool compressed, uword ptr_field_start_offset, uword ptr_field_end_offset)
Definition: object.cc:2759
void StoreNonPointer(const FieldType *addr, ValueType value) const
Definition: object.h:819
static ClassPtr loadingunit_class()
Definition: object.h:566
static ClassPtr type_parameters_class()
Definition: object.h:516
@ kUserVisibleName
Definition: object.h:645
@ kInternalName
Definition: object.h:622
@ kScrubbedName
Definition: object.h:633
static void Cleanup()
Definition: object.cc:1364
static ClassPtr ffi_trampoline_data_class()
Definition: object.h:521
static ClassPtr class_class()
Definition: object.h:513
static void InitNullAndBool(IsolateGroup *isolate_group)
Definition: object.cc:554
static ClassPtr namespace_class()
Definition: object.h:527
static const ClassId kClassId
Definition: object.h:606
void StoreCompressedPointer(compressed_type const *addr, type value) const
Definition: object.h:782
static ObjectPtr null()
Definition: object.h:433
intptr_t GetClassId() const
Definition: object.h:341
ObjectPtr ptr() const
Definition: object.h:332
ObjectPtr ptr_
Definition: object.h:870
static ClassPtr language_error_class()
Definition: object.h:553
void CheckHandle() const
Definition: object.cc:2747
bool InVMIsolateHeap() const
Definition: object.h:395
virtual StringPtr DictionaryName() const
Definition: object.cc:2624
static ClassPtr unhandled_exception_class()
Definition: object.h:554
void Print() const
Definition: object.cc:2620
static bool ShouldHaveImmutabilityBitSet(classid_t class_id)
Definition: object.cc:2628
bool IsCanonical() const
Definition: object.h:335
static void FinishInit(IsolateGroup *isolate_group)
Definition: object.cc:1351
bool IsNew() const
Definition: object.h:390
bool IsOld() const
Definition: object.h:391
static void FinalizeVMIsolate(IsolateGroup *isolate_group)
Definition: object.cc:1470
uword raw_value() const
Definition: object.h:670
static ClassPtr context_class()
Definition: object.h:549
static ClassPtr patch_class_class()
Definition: object.h:518
static ClassPtr code_class()
Definition: object.h:531
static void InitVtables()
Definition: object.cc:615
static void MakeUnusedSpaceTraversable(const Object &obj, intptr_t original_size, intptr_t used_size)
Definition: object.cc:1610
static void set_vm_isolate_snapshot_object_table(const Array &table)
Definition: object.cc:1601
void SetCanonical() const
Definition: object.h:336
static void FinalizeReadOnlyObject(ObjectPtr object)
Definition: object.cc:1556
virtual const char * ToCString() const
Definition: object.h:366
static constexpr bool ContainsCompressedPointers()
Definition: object.h:329
static ClassPtr library_class()
Definition: object.h:526
static ClassPtr unwind_error_class()
Definition: object.h:557
cpp_vtable vtable() const
Definition: object.h:700
CLASS_LIST(STORE_NON_POINTER_ILLEGAL_TYPE)
static ClassPtr field_class()
Definition: object.h:524
static constexpr intptr_t RoundedAllocationSize(intptr_t size)
Definition: object.h:758
static ClassPtr script_class()
Definition: object.h:525
bool IsNull() const
Definition: object.h:363
static ClassPtr context_scope_class()
Definition: object.h:550
static Object & Handle()
Definition: object.h:407
static ObjectPtr RawCast(ObjectPtr obj)
Definition: object.h:325
static Object & ZoneHandle()
Definition: object.h:419
static ClassPtr function_class()
Definition: object.h:519
friend class Thread
Definition: object.h:1025
static void Init(IsolateGroup *isolate_group)
Definition: object.cc:721
static constexpr intptr_t kHashBits
Definition: object.h:323
static ClassPtr closure_data_class()
Definition: object.h:520
static ClassPtr api_error_class()
Definition: object.h:552
void StoreSimd128(const FieldType *addr, simd128_value_t value) const
Definition: object.h:800
static ClassPtr subtypetestcache_class()
Definition: object.h:565
static void VerifyBuiltinVtables()
Definition: object.cc:1634
ClassPtr clazz() const
Definition: object.h:13218
static intptr_t UnroundedSize(OneByteStringPtr str)
Definition: object.h:10558
static OneByteStringPtr New(intptr_t len, Heap::Space space)
Definition: object.cc:24368
static OneByteStringPtr ConcatAll(const Array &strings, intptr_t start, intptr_t end, intptr_t len, Heap::Space space)
Definition: object.cc:24473
static intptr_t data_offset()
Definition: object.h:10554
static intptr_t InstanceSize()
Definition: object.h:10564
static OneByteStringPtr Transform(int32_t(*mapping)(int32_t ch), const String &str, Heap::Space space)
Definition: object.cc:24494
static OneByteStringPtr EscapeSpecialCharacters(const String &str)
Definition: object.cc:24336
static OneByteStringPtr SubStringUnchecked(const String &str, intptr_t begin_index, intptr_t length, Heap::Space space)
Definition: object.cc:24509
static OneByteStringPtr Concat(const String &str1, const String &str2, Heap::Space space)
Definition: object.cc:24461
static constexpr intptr_t kBytesPerElement
Definition: object.h:10542
void AllocateBlack(intptr_t size)
Definition: pages.h:271
void VisitObjectsUnsafe(ObjectVisitor *visitor) const
Definition: pages.cc:673
static PassiveObject & Handle()
Definition: object.h:1077
void set_kernel_program_info(const KernelProgramInfo &info) const
Definition: object.cc:7870
intptr_t YieldIndex() const
Definition: object.h:6160
TokenPosition TokenPos() const
Definition: object.h:6156
intptr_t DeoptId() const
Definition: object.h:6155
intptr_t TryIndex() const
Definition: object.h:6159
UntaggedPcDescriptors::Kind Kind() const
Definition: object.h:6161
static intptr_t UnroundedSize(PcDescriptorsPtr desc)
Definition: object.h:6085
static intptr_t InstanceSize()
Definition: object.h:6089
static intptr_t type_arguments_offset()
Definition: object.h:11902
static PointerPtr New(uword native_address, Heap::Space space=Heap::kNew)
Definition: object.cc:25726
static intptr_t InstanceSize()
Definition: object.h:11887
static void DumpStackTrace(void *context)
Definition: profiler.cc:417
static void SampleAllocation(Thread *thread, intptr_t cid, uint32_t identity_hash)
Definition: profiler.cc:1271
uint32_t NextUInt32()
Definition: random.cc:73
static ReceivePortPtr New(Dart_Port id, const String &debug_name, Heap::Space space=Heap::kNew)
Definition: object.cc:25783
static bool IsMatch(const Object &a, const Object &b)
Definition: object.cc:27886
static uword Hash(const Object &key)
Definition: object.cc:27890
static const char * Name()
Definition: object.cc:27883
static ObjectPtr NewKey(const Array &arr)
Definition: object.cc:27894
static RecordShape ForUnnamed(intptr_t num_fields)
Definition: object.h:11308
ArrayPtr GetFieldNames(Thread *thread) const
Definition: object.cc:27987
static constexpr intptr_t kMaxFieldNamesIndex
Definition: object.h:11297
intptr_t num_fields() const
Definition: object.h:11314
static constexpr intptr_t kMaxNumFields
Definition: object.h:11292
SmiPtr AsSmi() const
Definition: object.h:11320
static RecordShape Register(Thread *thread, intptr_t num_fields, const Array &field_names)
Definition: object.cc:27898
AbstractTypePtr FieldTypeAt(intptr_t index) const
Definition: object.cc:27380
virtual void EnumerateURIs(URIs *uris) const
Definition: object.cc:27615
RecordTypePtr ToNullability(Nullability value, Heap::Space space) const
Definition: object.cc:27482
RecordShape shape() const
Definition: object.h:11388
virtual uword ComputeHash() const
Definition: object.cc:27541
ArrayPtr GetFieldNames(Thread *thread) const
Definition: object.cc:27401
virtual bool IsEquivalent(const Instance &other, TypeEquality kind, FunctionTypeMapping *function_type_equivalence=nullptr) const
Definition: object.cc:27504
static RecordTypePtr New(RecordShape shape, const Array &field_types, Nullability nullability=Nullability::kNonNullable, Heap::Space space=Heap::kOld)
Definition: object.cc:27465
virtual AbstractTypePtr Canonicalize(Thread *thread) const
Definition: object.cc:27557
virtual AbstractTypePtr UpdateFunctionTypes(intptr_t num_parent_type_args_adjustment, intptr_t num_free_fun_type_params, Heap::Space space, FunctionTypeMapping *function_type_mapping) const
Definition: object.cc:27669
virtual AbstractTypePtr InstantiateFrom(const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments, intptr_t num_free_fun_type_params, Heap::Space space, FunctionTypeMapping *function_type_mapping=nullptr, intptr_t num_parent_type_args_adjustment=0) const
Definition: object.cc:27629
bool IsSubtypeOf(const RecordType &other, Heap::Space space, FunctionTypeMapping *function_type_equivalence=nullptr) const
Definition: object.cc:27708
void SetFieldTypeAt(intptr_t index, const AbstractType &value) const
Definition: object.cc:27385
virtual void PrintName(NameVisibility visibility, BaseTextBuffer *printer) const
Definition: object.cc:27624
virtual bool IsInstantiated(Genericity genericity=kAny, intptr_t num_free_fun_type_params=kAllFree) const
Definition: object.cc:27448
virtual uint32_t CanonicalizeHash() const
Definition: object.cc:27801
RecordShape shape() const
Definition: object.h:11430
static intptr_t GetPositionalFieldIndexFromFieldName(const String &field_name)
Definition: object.cc:27847
virtual void CanonicalizeFieldsLocked(Thread *thread) const
Definition: object.cc:27819
RecordTypePtr GetRecordType() const
Definition: object.cc:27830
virtual bool CanonicalizeEquals(const Instance &other) const
Definition: object.cc:27778
static RecordPtr New(RecordShape shape, Heap::Space space=Heap::kNew)
Definition: object.cc:27741
static intptr_t field_offset(intptr_t index)
Definition: object.h:11448
intptr_t GetFieldIndexByName(Thread *thread, const String &field_name) const
Definition: object.cc:27862
ObjectPtr FieldAt(intptr_t field_index) const
Definition: object.h:11433
const char * ToCString() const
Definition: object.cc:26686
void set_pattern(const String &pattern) const
Definition: object.cc:26612
StringPtr pattern() const
Definition: object.h:12797
virtual uint32_t CanonicalizeHash() const
Definition: object.cc:26745
virtual bool CanonicalizeEquals(const Instance &other) const
Definition: object.cc:26724
void set_function(intptr_t cid, bool sticky, const Function &value) const
Definition: object.cc:26616
static RegExpPtr New(Zone *zone, Heap::Space space=Heap::kNew)
Definition: object.cc:26662
void set_num_bracket_expressions(SmiPtr value) const
void set_capture_name_map(const Array &array) const
Definition: object.cc:26658
void set_bytecode(bool is_one_byte, bool sticky, const TypedData &bytecode) const
Definition: object.cc:26636
RegExpFlags flags() const
Definition: object.h:12891
static DART_NORETURN void LongJump(const Error &error)
Definition: report.cc:86
static void MessageF(Kind kind, const Script &script, TokenPosition token_pos, bool report_after_token, const char *format,...) PRINTF_ATTRIBUTE(5
Definition: report.cc:123
@ kBailout
Definition: report.h:26
static constexpr bool AtLocation
Definition: report.h:29
static StringPtr PrependSnippet(Kind kind, const Script &script, TokenPosition token_pos, bool report_after_token, const String &message)
Definition: report.cc:20
static FunctionPtr ResolveDynamicAnyArgs(Zone *zone, const Class &receiver_class, const String &function_name, bool allow_add)
Definition: resolver.cc:185
static FunctionPtr ResolveDynamicFunction(Zone *zone, const Class &receiver_class, const String &function_name)
Definition: resolver.cc:176
static FunctionPtr ResolveFunction(Zone *zone, const Class &receiver_class, const String &function_name)
Definition: resolver.cc:167
static CodePtr Lookup(IsolateGroup *group, uword pc, bool is_return_address)
bool IsCurrentThreadWriter()
Definition: lockers.h:354
intptr_t GetTokenLength(const TokenPosition &token_pos) const
Definition: object.cc:13301
StringPtr Source() const
Definition: object.cc:13093
TypedDataPtr line_starts() const
Definition: object.cc:13218
bool IsPartOfDartColonLibrary() const
Definition: object.cc:13097
GrowableObjectArrayPtr GenerateLineNumberArray() const
Definition: object.cc:13125
void CollectTokenPositionsFor() const
Definition: kernel.cc:223
TokenPosition MaxPosition() const
Definition: object.cc:13176
intptr_t col_offset() const
Definition: object.h:4944
void LoadSourceFromKernel(const uint8_t *kernel_buffer, intptr_t kernel_buffer_len) const
Definition: object.cc:13104
LibraryPtr FindLibrary() const
Definition: object.cc:13482
StringPtr resolved_url() const
Definition: object.cc:13080
bool HasSource() const
Definition: object.cc:13089
void InitializeFromKernel(const KernelProgramInfo &info, intptr_t script_index, const TypedData &line_starts, const TypedDataView &constant_coverage) const
Definition: object.cc:13112
void set_url(const String &value) const
Definition: object.cc:13196
StringPtr GetSnippet(intptr_t from_line, intptr_t from_column, intptr_t to_line, intptr_t to_column) const
Definition: object.cc:13431
TypedDataViewPtr constant_coverage() const
Definition: object.cc:13209
bool TokenRangeAtLine(intptr_t line_number, TokenPosition *first_token_index, TokenPosition *last_token_index) const
Definition: object.cc:13322
StringPtr url() const
Definition: object.h:4932
bool IsValidTokenPosition(TokenPosition token_pos) const
Definition: object.cc:13256
static ScriptPtr New(const String &url, const String &source)
Definition: object.cc:13451
StringPtr GetLine(intptr_t line_number, Heap::Space space=Heap::kNew) const
Definition: object.cc:13411
intptr_t line_offset() const
Definition: object.h:4943
bool GetTokenLocation(const TokenPosition &token_pos, intptr_t *line, intptr_t *column=nullptr) const
Definition: object.cc:13283
static SendPortPtr New(Dart_Port id, Heap::Space space=Heap::kNew)
Definition: object.cc:25812
static SentinelPtr New()
Definition: object.cc:18729
static void HandleEvent(ServiceEvent *event, bool enter_safepoint=true)
Definition: service.cc:1206
static StreamInfo profiler_stream
Definition: service.h:188
static SetPtr New(intptr_t class_id, const Array &data, const TypedData &index, intptr_t hash_mask, intptr_t used_data, intptr_t deleted_keys, Heap::Space space=Heap::kNew)
Definition: object.cc:25247
static SetPtr NewDefault(intptr_t class_id=kSetCid, Heap::Space space=Heap::kNew)
Definition: object.cc:25266
static SmiPtr New(intptr_t value)
Definition: object.h:10006
intptr_t Value() const
Definition: object.h:9990
virtual bool Equals(const Instance &other) const
Definition: object.cc:23251
virtual int64_t AsInt64Value() const
Definition: object.cc:23262
virtual int CompareWith(const Integer &other) const
Definition: object.cc:23270
virtual uint32_t AsTruncatedUint32Value() const
Definition: object.cc:23266
friend class Class
Definition: object.h:10047
static bool IsValid(int64_t value)
Definition: object.h:10026
virtual double AsDoubleValue() const
Definition: object.cc:23258
virtual bool IsNegative() const
Definition: object.h:9994
static bool IsPossibleAwaiterLink(const Class &cls)
Definition: stack_trace.cc:711
static constexpr uword kFutureListenerPcOffset
Definition: stack_trace.h:19
intptr_t Length() const
Definition: object.cc:26020
void SetPcOffsetAtFrame(intptr_t frame_index, uword pc_offset) const
Definition: object.cc:26042
bool skip_sync_start_in_parent_stack() const
Definition: object.cc:26012
void set_skip_sync_start_in_parent_stack(bool value) const
Definition: object.cc:26016
void SetCodeAtFrame(intptr_t frame_index, const Object &code) const
Definition: object.cc:26030
static constexpr intptr_t kSyncAsyncCroppedFrames
Definition: object.h:12589
void set_async_link(const StackTrace &async_link) const
Definition: object.cc:26049
uword PcOffsetAtFrame(intptr_t frame_index) const
Definition: object.cc:26036
static StackTracePtr New(const Array &code_array, const TypedData &pc_offset_array, Heap::Space space=Heap::kNew)
Definition: object.cc:26069
ObjectPtr CodeAtFrame(intptr_t frame_index) const
Definition: object.cc:26025
void set_expand_inlined(bool value) const
Definition: object.cc:26061
static StaticTypeExactnessState TriviallyExact(intptr_t type_arguments_offset_in_bytes)
static bool CanRepresentAsTriviallyExact(intptr_t type_arguments_offset_in_bytes)
static StaticTypeExactnessState HasExactSuperType()
static StaticTypeExactnessState NotExact()
static StaticTypeExactnessState NotTracking()
const char * ToCString() const
Definition: object.cc:12924
static StaticTypeExactnessState Compute(const Type &static_type, const Instance &value, bool print_trace=false)
Definition: object.cc:12800
static StaticTypeExactnessState HasExactSuperClass()
void Add(uint16_t code_unit)
Definition: object.h:10501
intptr_t Finalize()
Definition: object.h:10517
static StringPtr FromLatin1(const uint8_t *latin1_array, intptr_t array_len, Heap::Space space=Heap::kNew)
Definition: object.cc:23733
static StringPtr ScrubNameRetainPrivate(const String &name, bool is_extension=false)
Definition: object.cc:427
static constexpr intptr_t kMaxElements
Definition: object.h:10173
static void Copy(const String &dst, intptr_t dst_offset, const uint8_t *characters, intptr_t len)
Definition: object.cc:23792
static StringPtr NewFormatted(const char *format,...) PRINTF_ATTRIBUTE(1
Definition: object.cc:24004
bool EndsWith(const String &other) const
Definition: object.cc:23672
static StringPtr RemovePrivateKey(const String &name)
Definition: object.cc:233
bool IsOneByteString() const
Definition: object.h:10311
static uint32_t SetCachedHashIfNotSet(StringPtr obj, uint32_t hash)
Definition: object.h:10449
intptr_t Length() const
Definition: object.h:10210
static uword HashRawSymbol(const StringPtr symbol)
Definition: object.h:10247
static bool ParseDouble(const String &str, intptr_t start, intptr_t end, double *result)
Definition: object.cc:24207
char * ToMallocCString() const
Definition: object.cc:24138
static StringPtr ToLowerCase(const String &str, Heap::Space space=Heap::kNew)
Definition: object.cc:24202
bool HasHash() const
Definition: object.h:10229
static StringPtr static StringPtr static StringPtr NewFormattedV(const char *format, va_list args, Heap::Space space=Heap::kNew)
Definition: object.cc:24022
static uword HashConcat(const String &str1, const String &str2)
Definition: object.cc:23485
static StringPtr ConcatAllRange(const Array &strings, intptr_t start, intptr_t end, Heap::Space space=Heap::kNew)
Definition: object.cc:24052
static StringPtr ConcatAll(const Array &strings, Heap::Space space=Heap::kNew)
Definition: object.cc:24048
void SetHash(intptr_t value) const
Definition: object.h:10477
static const char * ScrubName(const String &name, bool is_extension=false)
Definition: object.cc:287
void SetLength(intptr_t value) const
Definition: object.h:10471
bool Equals(const String &str) const
Definition: object.h:13337
static constexpr intptr_t kOneByteChar
Definition: object.h:10160
static StringPtr New(const char *cstr, Heap::Space space=Heap::kNew)
Definition: object.cc:23698
static constexpr intptr_t kTwoByteChar
Definition: object.h:10161
intptr_t CompareTo(const String &other) const
Definition: object.cc:23638
intptr_t CharSize() const
Definition: object.cc:23522
void ToUTF8(uint8_t *utf8_array, intptr_t array_len) const
Definition: object.cc:24146
bool IsTwoByteString() const
Definition: object.h:10315
uint16_t CharAt(intptr_t index) const
Definition: object.h:10259
static StringPtr SubString(const String &str, intptr_t begin_index, Heap::Space space=Heap::kNew)
Definition: object.cc:24080
bool StartsWith(const String &other) const
Definition: object.h:10298
static StringPtr EscapeSpecialCharacters(const String &str)
Definition: object.cc:23861
static StringPtr Concat(const String &str1, const String &str2, Heap::Space space=Heap::kNew)
Definition: object.cc:24037
virtual InstancePtr CanonicalizeLocked(Thread *thread) const
Definition: object.cc:23691
static StringPtr DecodeIRI(const String &str)
Definition: object.cc:23948
static const char * ToCString(Thread *thread, StringPtr ptr)
Definition: object.cc:24126
static StringPtr Transform(int32_t(*mapping)(int32_t ch), const String &str, Heap::Space space=Heap::kNew)
Definition: object.cc:24172
static StringPtr ToUpperCase(const String &str, Heap::Space space=Heap::kNew)
Definition: object.cc:24197
bool IsSymbol() const
Definition: object.h:10309
static StringPtr FromUTF16(const uint16_t *utf16_array, intptr_t array_len, Heap::Space space=Heap::kNew)
Definition: object.cc:23739
uword Hash() const
Definition: object.h:10216
virtual bool CanonicalizeEquals(const Instance &other) const
Definition: object.h:10290
bool EqualsConcat(const String &str1, const String &str2) const
Definition: object.cc:23632
friend class Class
Definition: object.h:10484
static bool EqualsIgnoringPrivateKey(const String &str1, const String &str2)
Definition: object.cc:24299
static const char * EncodeIRI(const String &str)
Definition: object.cc:23918
static uint32_t GetCachedHash(const StringPtr obj)
Definition: object.h:10445
static StringPtr FromUTF32(const int32_t *utf32_array, intptr_t array_len, Heap::Space space=Heap::kNew)
Definition: object.cc:23755
static StringPtr FromUTF8(const uint8_t *utf8_array, intptr_t array_len, Heap::Space space=Heap::kNew)
Definition: object.cc:23705
static intptr_t LengthOf(StringPtr obj)
Definition: object.h:10211
static const char * NameOfStub(uword entry_point)
Definition: stub_code.cc:330
static bool HasBeenInitialized()
Definition: stub_code.h:41
void GetCurrentCheck(intptr_t ix, Object *instance_class_id_or_signature, AbstractType *destination_type, TypeArguments *instance_type_arguments, TypeArguments *instantiator_type_arguments, TypeArguments *function_type_arguments, TypeArguments *instance_parent_function_type_arguments, TypeArguments *instance_delayed_type_arguments, Bool *test_result) const
Definition: object.cc:19367
void WriteEntryToBuffer(Zone *zone, BaseTextBuffer *buffer, intptr_t index, const char *line_prefix=nullptr) const
Definition: object.cc:19496
intptr_t num_occupied() const
Definition: object.h:7821
static constexpr intptr_t kMaxLinearCacheSize
Definition: object.h:7912
intptr_t NumEntries() const
Definition: object.cc:18959
bool Equals(const SubtypeTestCache &other) const
Definition: object.cc:19626
static SubtypeTestCachePtr New(intptr_t num_inputs)
Definition: object.cc:18924
void Reset() const
Definition: object.cc:19621
static constexpr intptr_t kMaxInputs
Definition: object.h:7705
void GetCheck(intptr_t ix, Object *instance_class_id_or_signature, AbstractType *destination_type, TypeArguments *instance_type_arguments, TypeArguments *instantiator_type_arguments, TypeArguments *function_type_arguments, TypeArguments *instance_parent_function_type_arguments, TypeArguments *instance_delayed_type_arguments, Bool *test_result) const
Definition: object.cc:19346
SubtypeTestCachePtr Copy(Thread *thread) const
Definition: object.cc:19639
ArrayPtr cache() const
Definition: object.cc:18938
static intptr_t UsedInputsForType(const AbstractType &type)
Definition: object.cc:19668
intptr_t num_inputs() const
Definition: object.h:7819
intptr_t NumberOfChecks() const
Definition: object.cc:18954
bool GetNextCheck(intptr_t *ix, Object *instance_class_id_or_signature, AbstractType *destination_type, TypeArguments *instance_type_arguments, TypeArguments *instantiator_type_arguments, TypeArguments *function_type_arguments, TypeArguments *instance_parent_function_type_arguments, TypeArguments *instance_delayed_type_arguments, Bool *test_result) const
Definition: object.cc:19435
bool IsHash() const
Definition: object.cc:18969
intptr_t AddCheck(const Object &instance_class_id_or_signature, const AbstractType &destination_type, const TypeArguments &instance_type_arguments, const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments, const TypeArguments &instance_parent_function_type_arguments, const TypeArguments &instance_delayed_type_arguments, const Bool &test_result) const
Definition: object.cc:18978
bool HasCheck(const Object &instance_class_id_or_signature, const AbstractType &destination_type, const TypeArguments &instance_type_arguments, const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments, const TypeArguments &instance_parent_function_type_arguments, const TypeArguments &instance_delayed_type_arguments, intptr_t *index, Bool *result) const
Definition: object.cc:19464
@ kInstanceDelayedFunctionTypeArguments
Definition: object.h:7694
@ kInstanceParentFunctionTypeArguments
Definition: object.h:7693
static constexpr intptr_t kMaxLinearCacheEntries
Definition: object.h:7830
void WriteToBuffer(Zone *zone, BaseTextBuffer *buffer, const char *line_prefix=nullptr) const
Definition: object.cc:19507
bool IsOccupied(intptr_t index) const
Definition: object.cc:19658
static SuspendStatePtr Clone(Thread *thread, const SuspendState &src, Heap::Space space=Heap::kNew)
Definition: object.cc:26532
static intptr_t FrameSizeGrowthGap()
Definition: object.h:12637
static intptr_t payload_offset()
Definition: object.h:12659
CodePtr GetCodeObject() const
Definition: object.cc:26596
static intptr_t InstanceSize()
Definition: object.h:12626
static SuspendStatePtr New(intptr_t frame_size, const Instance &function_data, Heap::Space space=Heap::kNew)
Definition: object.cc:26508
static bool IsSymbolCid(Thread *thread, classid_t class_id)
Definition: object.cc:20293
static uint32_t CanonicalizeHash(Thread *thread, const Instance &instance)
Definition: object.cc:20299
static const String & HashMark()
Definition: symbols.h:672
static const String & NewLine()
Definition: symbols.h:651
static StringPtr LookupFromGet(Thread *thread, const String &str)
Definition: symbols.cc:424
static StringPtr FromConcat(Thread *thread, const String &str1, const String &str2)
Definition: symbols.cc:235
static StringPtr LookupFromSet(Thread *thread, const String &str)
Definition: symbols.cc:428
static const String & Library()
Definition: symbols.h:690
static StringPtr FromGet(Thread *thread, const String &str)
Definition: symbols.cc:247
static StringPtr FromSet(Thread *thread, const String &str)
Definition: symbols.cc:251
static void SetupSymbolTable(IsolateGroup *isolate_group)
Definition: symbols.cc:168
static StringPtr FromConcatAll(Thread *thread, const GrowableHandlePtrArray< const String > &strs)
Definition: symbols.cc:262
static const String & Void()
Definition: symbols.h:694
static const String & This()
Definition: symbols.h:692
static const String & Empty()
Definition: symbols.h:688
static StringPtr New(Thread *thread, const char *cstr)
Definition: symbols.h:723
static const String & Dot()
Definition: symbols.h:613
static const String & Equals()
Definition: symbols.h:614
Zone * zone() const
Definition: thread_state.h:37
LongJumpScope * long_jump_base() const
Definition: thread_state.h:47
void DeferredMarkingStackAddObject(ObjectPtr obj)
Definition: thread.cc:871
int32_t no_callback_scope_depth() const
Definition: thread.h:623
static Thread * Current()
Definition: thread.h:362
int32_t no_safepoint_scope_depth() const
Definition: thread.h:718
bool is_marking() const
Definition: thread.h:676
void IncrementNoCallbackScopeDepth()
Definition: thread.h:624
Heap * heap() const
Definition: thread.cc:943
void CheckForSafepoint()
Definition: thread.h:1104
void ClearStickyError()
Definition: thread.cc:241
uword top_exit_frame_info() const
Definition: thread.h:691
bool OwnsDeoptSafepoint() const
Definition: thread.cc:1357
bool IsDartMutatorThread() const
Definition: thread.h:551
Random * random()
Definition: thread.h:1133
ExecutionState execution_state() const
Definition: thread.h:1040
Isolate * isolate() const
Definition: thread.h:534
IsolateGroup * isolate_group() const
Definition: thread.h:541
void DecrementNoCallbackScopeDepth()
Definition: thread.h:628
HeapProfileSampler & heap_sampler()
Definition: thread.h:1141
bool IsNoSource() const
intptr_t Pos() const
const char * ToCString() const
int32_t Serialize() const
static TokenPosition Deserialize(int32_t value)
static constexpr int32_t kMaxSourcePos
static const TokenPosition kMinSource
bool IsClassifying() const
static TransferableTypedDataPtr New(uint8_t *data, intptr_t len)
Definition: object.cc:25835
static intptr_t UnroundedSize(TwoByteStringPtr str)
Definition: object.h:10698
static TwoByteStringPtr New(intptr_t len, Heap::Space space)
Definition: object.cc:24562
static TwoByteStringPtr Transform(int32_t(*mapping)(int32_t ch), const String &str, Heap::Space space)
Definition: object.cc:24673
static constexpr intptr_t kBytesPerElement
Definition: object.h:10683
static TwoByteStringPtr ConcatAll(const Array &strings, intptr_t start, intptr_t end, intptr_t len, Heap::Space space)
Definition: object.cc:24652
static intptr_t InstanceSize()
Definition: object.h:10704
static intptr_t data_offset()
Definition: object.h:10695
static TwoByteStringPtr Concat(const String &str1, const String &str2, Heap::Space space)
Definition: object.cc:24640
static TwoByteStringPtr EscapeSpecialCharacters(const String &str)
Definition: object.cc:24530
bool IsOccupied(intptr_t entry) const
Definition: object.cc:7056
KeyLocation FindKeyOrUnused(const TypeArguments &instantiator_tav, const TypeArguments &function_tav) const
Definition: object.h:8865
Cache(Zone *zone, const TypeArguments &source)
Definition: object.cc:6977
static SmiPtr Sentinel()
Definition: object.cc:7173
TypeArgumentsPtr Retrieve(intptr_t entry) const
Definition: object.cc:7062
static const Array & EmptyStorage()
Definition: object.h:8891
intptr_t NumOccupied() const
Definition: object.h:8853
KeyLocation AddEntry(intptr_t entry, const TypeArguments &instantiator_tav, const TypeArguments &function_tav, const TypeArguments &instantiated_tav) const
Definition: object.cc:7114
intptr_t NumEntries() const
Definition: object.h:8925
friend class Object
Definition: object.h:9039
bool IsEquivalent(const TypeArguments &other, TypeEquality kind, FunctionTypeMapping *function_type_equivalence=nullptr) const
Definition: object.h:8691
void EnumerateURIs(URIs *uris) const
Definition: object.cc:7819
void PrintSubvectorName(intptr_t from_index, intptr_t len, NameVisibility name_visibility, BaseTextBuffer *printer) const
Definition: object.cc:6883
static constexpr intptr_t kNullabilityBitsPerType
Definition: object.h:8619
StringPtr Name() const
Definition: object.cc:6869
bool IsInstantiated(Genericity genericity=kAny, intptr_t num_free_fun_type_params=kAllFree) const
Definition: object.h:8707
uword Hash() const
Definition: object.h:13396
TypeArgumentsPtr ToInstantiatorTypeArguments(Thread *thread, const Class &cls) const
Definition: object.cc:7795
intptr_t Length() const
Definition: object.cc:7294
intptr_t nullability() const
Definition: object.cc:7301
uword HashForRange(intptr_t from_index, intptr_t len) const
Definition: object.cc:6778
static intptr_t types_offset()
Definition: object.h:8589
static constexpr intptr_t kNullableBit
Definition: object.h:8623
TypeArgumentsPtr TruncatedTo(intptr_t length) const
Definition: object.cc:7514
bool Equals(const TypeArguments &other) const
Definition: object.h:8686
static intptr_t InstanceSize()
Definition: object.h:8988
bool IsSubvectorInstantiated(intptr_t from_index, intptr_t len, Genericity genericity=kAny, intptr_t num_free_fun_type_params=kAllFree) const
Definition: object.cc:7329
bool CanShareFunctionTypeArguments(const Function &function, bool *with_runtime_check=nullptr) const
Definition: object.cc:7472
static constexpr intptr_t kMaxElements
Definition: object.h:8986
void PrintTo(BaseTextBuffer *printer) const
Definition: object.cc:6907
bool CanShareInstantiatorTypeArguments(const Class &instantiator_class, bool *with_runtime_check=nullptr) const
Definition: object.cc:7384
InstantiationMode GetInstantiationMode(Zone *zone, const Function *function=nullptr, const Class *cls=nullptr) const
Definition: object.cc:6847
TypeArgumentsPtr InstantiateAndCanonicalizeFrom(const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments) const
Definition: object.cc:7620
bool IsUninstantiatedIdentity() const
Definition: object.cc:7352
TypeArgumentsPtr FromInstanceTypeArguments(Thread *thread, const Class &cls) const
Definition: object.cc:7768
AbstractTypePtr TypeAtNullSafe(intptr_t index) const
Definition: object.cc:7314
bool IsSubvectorEquivalent(const TypeArguments &other, intptr_t from_index, intptr_t len, TypeEquality kind, FunctionTypeMapping *function_type_equivalence=nullptr) const
Definition: object.cc:6920
bool IsFinalized() const
Definition: object.cc:7527
bool HasInstantiations() const
Definition: object.cc:7269
TypeArgumentsPtr ConcatenateTypeParameters(Zone *zone, const TypeArguments &other) const
Definition: object.cc:6827
StringPtr UserVisibleName() const
Definition: object.cc:6876
static constexpr intptr_t kNonNullableBit
Definition: object.h:8622
bool HasCount(intptr_t count) const
Definition: object.cc:7287
static constexpr intptr_t kNullabilityMaxTypes
Definition: object.h:8620
static constexpr intptr_t kAllDynamicHash
Definition: object.h:8578
void SetTypeAt(intptr_t index, const AbstractType &value) const
Definition: object.cc:7323
friend class Class
Definition: object.h:9037
TypeArgumentsPtr InstantiateFrom(const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments, intptr_t num_free_fun_type_params, Heap::Space space, FunctionTypeMapping *function_type_mapping=nullptr, intptr_t num_parent_type_args_adjustment=0) const
Definition: object.cc:7540
TypeArgumentsPtr Canonicalize(Thread *thread) const
Definition: object.cc:7703
bool IsRaw(intptr_t from_index, intptr_t len) const
Definition: object.h:8646
TypeArgumentsPtr UpdateFunctionTypes(intptr_t num_parent_type_args_adjustment, intptr_t num_free_fun_type_params, Heap::Space space, FunctionTypeMapping *function_type_mapping) const
Definition: object.cc:7581
static TypeArgumentsPtr New(intptr_t len, Heap::Space space=Heap::kOld)
Definition: object.cc:7675
AbstractTypePtr TypeAt(intptr_t index) const
Definition: object.cc:7308
TypeArgumentsPtr Prepend(Zone *zone, const TypeArguments &other, intptr_t other_length, intptr_t total_length) const
Definition: object.cc:6800
virtual uword ComputeHash() const
Definition: object.cc:22903
bool IsClassTypeParameter() const
Definition: object.h:9817
void set_parameterized_class_id(classid_t value) const
Definition: object.cc:22705
virtual AbstractTypePtr UpdateFunctionTypes(intptr_t num_parent_type_args_adjustment, intptr_t num_free_fun_type_params, Heap::Space space, FunctionTypeMapping *function_type_mapping) const
Definition: object.cc:22837
intptr_t index() const
Definition: object.h:9821
TypeParameterPtr ToNullability(Nullability value, Heap::Space space) const
Definition: object.cc:22601
virtual void PrintName(NameVisibility visibility, BaseTextBuffer *printer) const
Definition: object.cc:22895
virtual bool IsInstantiated(Genericity genericity=kAny, intptr_t num_free_fun_type_params=kAllFree) const
Definition: object.cc:22623
void set_index(intptr_t value) const
Definition: object.cc:22731
classid_t parameterized_class_id() const
Definition: object.cc:22698
ClassPtr parameterized_class() const
Definition: object.cc:22710
AbstractTypePtr bound() const
Definition: object.cc:22737
AbstractTypePtr GetFromTypeArguments(const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments) const
Definition: object.cc:22754
virtual bool IsEquivalent(const Instance &other, TypeEquality kind, FunctionTypeMapping *function_type_equivalence=nullptr) const
Definition: object.cc:22633
virtual AbstractTypePtr InstantiateFrom(const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments, intptr_t num_free_fun_type_params, Heap::Space space, FunctionTypeMapping *function_type_mapping=nullptr, intptr_t num_parent_type_args_adjustment=0) const
Definition: object.cc:22764
void set_base(intptr_t value) const
Definition: object.cc:22725
intptr_t base() const
Definition: object.h:9819
const char * CanonicalNameCString() const
Definition: object.h:9869
bool IsFunctionTypeParameter() const
Definition: object.h:9813
FunctionTypePtr parameterized_function_type() const
Definition: object.cc:22720
virtual AbstractTypePtr Canonicalize(Thread *thread) const
Definition: object.cc:22859
void SetNameAt(intptr_t index, const String &value) const
Definition: object.cc:6579
void SetBoundAt(intptr_t index, const AbstractType &value) const
Definition: object.cc:6599
void Print(Thread *thread, Zone *zone, bool are_class_type_parameters, intptr_t base, NameVisibility name_visibility, BaseTextBuffer *printer) const
Definition: object.cc:6671
intptr_t Length() const
Definition: object.cc:6564
static constexpr intptr_t kFlagsPerSmiShift
Definition: object.h:8525
static constexpr intptr_t kFlagsPerSmiMask
Definition: object.h:8531
StringPtr NameAt(intptr_t index) const
Definition: object.cc:6574
AbstractTypePtr DefaultAt(intptr_t index) const
Definition: object.cc:6614
bool IsGenericCovariantImplAt(intptr_t index) const
Definition: object.cc:6652
static TypeParametersPtr New(Heap::Space space=Heap::kOld)
Definition: object.cc:6723
bool AllDynamicDefaults() const
Definition: object.cc:6626
bool AllDynamicBounds() const
Definition: object.cc:6605
AbstractTypePtr BoundAt(intptr_t index) const
Definition: object.cc:6593
void SetDefaultAt(intptr_t index, const AbstractType &value) const
Definition: object.cc:6620
void SetIsGenericCovariantImplAt(intptr_t index, bool value) const
Definition: object.cc:6659
static CodePtr DefaultCodeForType(const AbstractType &type, bool lazy_specialize=true)
virtual void EnumerateURIs(URIs *uris) const
static TypePtr IntType()
TypeArgumentsPtr GetInstanceTypeArguments(Thread *thread, bool canonicalize=true) const
static TypePtr VoidType()
static TypePtr NullableIntType()
static TypePtr Double()
static TypePtr Number()
friend class TypeArguments
Definition: object.h:9507
virtual classid_t type_class_id() const
virtual void PrintName(NameVisibility visibility, BaseTextBuffer *printer) const
TypePtr ToNullability(Nullability value, Heap::Space space) const
static TypePtr ArrayType()
static TypePtr NullableDouble()
virtual ClassPtr type_class() const
bool IsDeclarationTypeOf(const Class &cls) const
static TypePtr MintType()
static TypePtr NullType()
virtual bool IsInstantiated(Genericity genericity=kAny, intptr_t num_free_fun_type_params=kAllFree) const
virtual TypeArgumentsPtr arguments() const
Definition: object.h:9381
static TypePtr StringType()
static TypePtr BoolType()
static TypePtr DartFunctionType()
static TypePtr ObjectType()
virtual AbstractTypePtr UpdateFunctionTypes(intptr_t num_parent_type_args_adjustment, intptr_t num_free_fun_type_params, Heap::Space space, FunctionTypeMapping *function_type_mapping) const
void set_type_class(const Class &value) const
static TypePtr NeverType()
static TypePtr Int32x4()
virtual uword ComputeHash() const
void set_arguments(const TypeArguments &value) const
static TypePtr SmiType()
virtual AbstractTypePtr InstantiateFrom(const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments, intptr_t num_free_fun_type_params, Heap::Space space, FunctionTypeMapping *function_type_mapping=nullptr, intptr_t num_parent_type_args_adjustment=0) const
static TypePtr Float64x2()
static TypePtr DynamicType()
virtual AbstractTypePtr Canonicalize(Thread *thread) const
friend class Class
Definition: object.h:9506
static TypePtr Float32x4()
static TypePtr New(const Class &clazz, const TypeArguments &arguments, Nullability nullability=Nullability::kNonNullable, Heap::Space space=Heap::kOld)
static TypePtr NullableNumber()
static TypePtr NewNonParameterizedType(const Class &type_class)
static TypePtr DartTypeType()
virtual bool IsEquivalent(const Instance &other, TypeEquality kind, FunctionTypeMapping *function_type_equivalence=nullptr) const
TypedDataElementType ElementType() const
Definition: object.h:11527
intptr_t Length() const
Definition: object.h:11518
TypedDataViewPtr ViewFromTo(intptr_t start, intptr_t end, Heap::Space space=Heap::kNew) const
Definition: object.cc:25682
intptr_t ElementSizeInBytes() const
Definition: object.h:11531
static intptr_t length_offset()
Definition: object.h:11512
intptr_t LengthInBytes() const
Definition: object.h:11523
bool IsExternalOrExternalView() const
Definition: object.cc:25672
void * DataAddr(intptr_t byte_offset) const
Definition: object.h:11571
static TypedDataViewPtr New(intptr_t class_id, Heap::Space space=Heap::kNew)
Definition: object.cc:25658
static intptr_t InstanceSize()
Definition: object.h:11793
static intptr_t payload_offset()
Definition: object.h:11669
static TypedDataPtr Grow(const TypedData &current, intptr_t len, Heap::Space space=Heap::kNew)
Definition: object.cc:25601
virtual bool CanonicalizeEquals(const Instance &other) const
Definition: object.cc:25550
virtual uint32_t CanonicalizeHash() const
Definition: object.cc:25575
static intptr_t MaxElements(intptr_t class_id)
Definition: object.h:11684
static TypedDataPtr New(intptr_t class_id, intptr_t len, Heap::Space space=Heap::kNew)
Definition: object.cc:25587
static intptr_t InstanceSize()
Definition: object.h:11673
static DART_FORCE_INLINE constexpr intptr_t Length()
Definition: class_table.h:67
virtual const char * ToErrorCString() const
Definition: object.cc:19963
InstancePtr exception() const
Definition: object.h:8146
static UnhandledExceptionPtr New(const Instance &exception, const Instance &stacktrace, Heap::Space space=Heap::kNew)
Definition: object.cc:19939
InstancePtr stacktrace() const
Definition: object.h:8151
bool can_patch_to_monomorphic() const
Definition: object.h:2405
static const char * KindToCString(Kind k)
Definition: raw_object.h:1283
static constexpr uword update(intptr_t size, uword tag)
Definition: raw_object.h:212
static bool IsMarked(uword tags)
Definition: raw_object.h:303
static uword ToAddr(const UntaggedObject *raw_obj)
Definition: raw_object.h:522
intptr_t HeapSize() const
Definition: raw_object.h:401
bool InVMIsolateHeap() const
Definition: raw_object.cc:20
void VisitPointersPrecise(ObjectPointerVisitor *visitor)
Definition: raw_object.cc:371
intptr_t VisitPointers(ObjectPointerVisitor *visitor)
Definition: raw_object.h:447
void SetMarkBitUnsynchronized()
Definition: raw_object.h:309
void set_is_user_initiated(bool value) const
Definition: object.cc:20018
static UnwindErrorPtr New(const String &message, Heap::Space space=Heap::kNew)
Definition: object.cc:20005
StringPtr message() const
Definition: object.h:8182
virtual const char * ToErrorCString() const
Definition: object.cc:20022
static UserTagPtr FindTagById(const Isolate *isolate, uword tag_id)
Definition: object.cc:27053
static UserTagPtr New(const String &label, Heap::Space space=Heap::kOld)
Definition: object.cc:26949
uword tag() const
Definition: object.h:13154
static UserTagPtr FindTagInIsolate(Isolate *isolate, Thread *thread, const String &label)
Definition: object.cc:26991
UserTagPtr MakeActive() const
Definition: object.cc:26928
static UserTagPtr DefaultTag()
Definition: object.cc:26974
static bool TagTableIsFull(Thread *thread)
Definition: object.cc:27044
StringPtr label() const
Definition: object.h:13168
static constexpr intptr_t kMaxUserTags
Definition: tags.h:110
static bool IsTagNameStreamable(const char *tag)
Definition: tags.cc:177
static constexpr uword kUserTagIdOffset
Definition: tags.h:111
static constexpr uword kDefaultUserTag
Definition: tags.h:112
static int32_t Decode(uint16_t lead, uint16_t trail)
Definition: unicode.h:151
static void Encode(int32_t codepoint, uint16_t *dst)
Definition: unicode.cc:273
static bool IsLeadSurrogate(uint32_t ch)
Definition: unicode.h:126
static intptr_t Length(int32_t ch)
Definition: unicode.h:118
static bool IsTrailSurrogate(uint32_t ch)
Definition: unicode.h:131
static intptr_t Length(int32_t ch)
Definition: unicode.cc:98
@ kSupplementary
Definition: unicode.h:46
@ kLatin1
Definition: unicode.h:44
static intptr_t CodeUnitCount(const uint8_t *utf8_array, intptr_t array_len, Type *type)
Definition: unicode.cc:46
static intptr_t ReportInvalidByte(const uint8_t *utf8_array, intptr_t array_len, intptr_t len)
Definition: unicode.cc:163
static bool DecodeToUTF16(const uint8_t *utf8_array, intptr_t array_len, uint16_t *dst, intptr_t len)
Definition: unicode.cc:217
static intptr_t Decode(const uint8_t *utf8_array, intptr_t array_len, int32_t *ch)
Definition: unicode.cc:135
static bool DecodeToLatin1(const uint8_t *utf8_array, intptr_t array_len, uint8_t *dst, intptr_t len)
Definition: unicode.cc:194
static intptr_t Encode(int32_t ch, char *dst)
Definition: unicode.cc:110
static bool IsSupplementary(int32_t code_point)
Definition: unicode.h:31
static bool IsBmp(int32_t code_point)
Definition: unicode.h:27
static bool IsLatin1(int32_t code_point)
Definition: unicode.h:23
static bool IsInt(intptr_t N, T value)
Definition: utils.h:313
static constexpr uintptr_t RoundUpToPowerOfTwo(uintptr_t x)
Definition: utils.h:135
static T MulWithWrapAround(T a, T b)
Definition: utils.h:449
static constexpr T Maximum(T x, T y)
Definition: utils.h:41
static int SNPrint(char *str, size_t size, const char *format,...) PRINTF_ATTRIBUTE(3
static constexpr int ShiftForPowerOfTwo(T x)
Definition: utils.h:81
static char * StrDup(const char *s)
static int static int VSNPrint(char *str, size_t size, const char *format, va_list args)
static T Minimum(T x, T y)
Definition: utils.h:36
static T AddWithWrapAround(T a, T b)
Definition: utils.h:431
static T SubWithWrapAround(T a, T b)
Definition: utils.h:440
static uint32_t BigEndianToHost32(uint32_t be_value)
Definition: utils.h:518
static bool IsUint(intptr_t N, T value)
Definition: utils.h:328
static constexpr bool IsAligned(T x, uintptr_t alignment, uintptr_t offset=0)
Definition: utils.h:92
static int64_t ShiftLeftWithTruncation(int64_t a, int64_t b)
Definition: utils.h:464
static constexpr bool IsPowerOfTwo(T x)
Definition: utils.h:76
Definition: il.h:75
static void Protect(void *address, intptr_t size, Protection mode)
static WeakPropertyPtr New(Heap::Space space=Heap::kNew)
Definition: object.cc:26756
static intptr_t type_arguments_offset()
Definition: object.h:12952
static WeakReferencePtr New(Heap::Space space=Heap::kNew)
Definition: object.cc:26766
static ObjectPtr Unwrap(ObjectPtr obj)
Definition: object.h:6667
static ObjectPtr UnwrapIfTarget(ObjectPtr obj)
Definition: object.h:6676
void VisitPointers(ObjectPtr *from, ObjectPtr *to) override
Definition: object.cc:2838
WriteBarrierUpdateVisitor(Thread *thread, ObjectPtr obj)
Definition: object.cc:2831
char * PrintToString(const char *format,...) PRINTF_ATTRIBUTE(2
Definition: zone.cc:313
ElementType * Alloc(intptr_t length)
intptr_t CountPointerOffsets() const
ObjectPoolBuilder & object_pool_builder()
intptr_t UncheckedEntryOffset() const
const ZoneGrowableArray< intptr_t > & GetPointerOffsets() const
void FinalizeInstructions(const MemoryRegion &region)
intptr_t prologue_offset() const
Object & GetSelfHandle() const
static word type_arguments_offset()
static const word kNoTypeArguments
Definition: runtime_api.h:486
static const word kMaxFieldNamesIndex
Definition: runtime_api.h:613
DART_WARN_UNUSED_RESULT bool LocationForPosition(intptr_t position, intptr_t *line, intptr_t *col=nullptr) const
Definition: kernel.cc:43
DART_WARN_UNUSED_RESULT bool TokenRangeAtLine(intptr_t line_number, dart::TokenPosition *first_token_index, dart::TokenPosition *last_token_index) const
Definition: kernel.cc:71
uint32_t At(intptr_t index) const
Definition: kernel.h:137
ObjectPtr LoadExpressionEvaluationFunction(const String &library_url, const String &klass)
static StringPtr FindSourceForScript(const uint8_t *kernel_buffer, intptr_t kernel_buffer_length, const String &url)
static uint32_t CalculateFunctionFingerprint(const Function &func)
static uint32_t CalculateFieldFingerprint(const Field &field)
static uint32_t CalculateClassFingerprint(const Class &klass)
static std::unique_ptr< Program > ReadFromTypedData(const ExternalTypedData &typed_data, const char **error=nullptr)
#define THR_Print(format,...)
Definition: log.h:20
#define kIsolateSnapshotInstructionsAsmSymbol
Definition: dart_api.h:3968
#define ILLEGAL_PORT
Definition: dart_api.h:1535
int64_t Dart_Port
Definition: dart_api.h:1525
#define DART_WARN_UNUSED_RESULT
Definition: dart_api.h:66
#define kVmSnapshotInstructionsAsmSymbol
Definition: dart_api.h:3965
void(* Dart_HandleFinalizer)(void *isolate_callback_data, void *peer)
Definition: dart_api.h:265
#define UNIMPLEMENTED
const EmbeddedViewParams * params
#define ASSERT(E)
VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE auto & d
Definition: main.cc:19
VkInstance instance
Definition: main.cc:48
SkBitmap source
Definition: examples.cpp:28
static bool b
struct MyStruct s
struct MyStruct a[10]
#define FATAL(error)
FlutterSemanticsFlag flag
AtkStateType state
FlutterSemanticsFlag flags
glong glong end
G_BEGIN_DECLS G_MODULE_EXPORT FlValue * args
FlKeyEvent uint64_t FlKeyResponderAsyncCallback callback
FlKeyEvent * event
const uint8_t uint32_t uint32_t GError ** error
uint8_t value
GAsyncResult * result
uint32_t uint32_t * format
uint32_t * target
const char * charp
Definition: flags.h:12
Dart_NativeFunction function
Definition: fuchsia.cc:51
void Init()
#define HANDLESCOPE(thread)
Definition: handles.h:321
static float max(float r, float g, float b)
Definition: hsl.cpp:49
#define TIR_Print(format,...)
size_t length
Win32Message message
#define MSAN_CHECK_INITIALIZED(ptr, len)
double y
double x
SK_API std::unique_ptr< SkCodec > Decode(std::unique_ptr< SkStream >, SkCodec::Result *, SkCodecs::DecodeContext=nullptr)
Optional< SkRect > bounds
Definition: SkRecords.h:189
sk_sp< const SkImage > image
Definition: SkRecords.h:269
ClipOpAndAA opAA SkRegion region
Definition: SkRecords.h:238
SK_API sk_sp< SkSurface > Null(int width, int height)
va_start(args, format)
va_end(args)
void Decompress(const uint8_t *input, intptr_t input_len, uint8_t **output, intptr_t *output_length)
Definition: gzip.cc:15
void UnboxFieldIfSupported(const dart::Field &field, const dart::AbstractType &type)
static constexpr intptr_t kWordSize
Definition: runtime_api.h:274
bool IsDouble(const dart::Object &a)
Definition: runtime_api.cc:974
static constexpr intptr_t kCompressedWordSize
Definition: runtime_api.h:286
bool IsSmi(int64_t v)
Definition: runtime_api.cc:31
static constexpr intptr_t kNumParameterFlagsPerElement
Definition: runtime_api.h:328
intptr_t RoundedAllocationSize(intptr_t size)
Definition: runtime_api.h:333
bool NeedsDynamicInvocationForwarder(const Function &function)
Definition: kernel.cc:657
void ReadParameterCovariance(const Function &function, BitVector *is_covariant, BitVector *is_generic_covariant_impl)
Definition: kernel.cc:599
static ProcedureAttributesMetadata ProcedureAttributesOf(Zone *zone, const KernelProgramInfo &kernel_program_info, const TypedDataView &kernel_data, intptr_t kernel_data_program_offset, intptr_t kernel_offset)
Definition: kernel.cc:740
FunctionPtr CreateFieldInitializerFunction(Thread *thread, Zone *zone, const Field &field)
ObjectPtr EvaluateMetadata(const Library &library, intptr_t kernel_offset, bool is_annotations_offset)
Definition: kernel.cc:467
static UnboxingInfoMetadata * UnboxingInfoMetadataOf(Zone *zone, const KernelProgramInfo &kernel_program_info, const TypedDataView &kernel_data, intptr_t kernel_data_program_offset, intptr_t kernel_offset)
Definition: kernel.cc:778
ObjectPtr EvaluateStaticConstFieldInitializer(const Field &field)
Definition: kernel.cc:381
def link(from_root, to_root)
Definition: dart_pkg.py:44
Definition: dart_vm.cc:33
static constexpr intptr_t kNullIdentityHash
Definition: object.h:10784
bool IsTypedDataViewClassId(intptr_t index)
Definition: class_id.h:439
ArrayOfTuplesView< TypeArguments::Cache::Entry, std::tuple< Object, TypeArguments, TypeArguments >, TypeArguments::Cache::kHeaderSize > InstantiationsCacheTable
Definition: object.h:13566
static ArrayPtr CreateCallableArgumentsFromStatic(Zone *zone, const Instance &receiver, const Array &static_args, const Array &arg_names, const ArgumentsDescriptor &static_args_descriptor)
Definition: object.cc:4652
bool IsTypedDataClassId(intptr_t index)
Definition: class_id.h:433
static void AppendSubString(BaseTextBuffer *buffer, const char *name, intptr_t start_pos, intptr_t len)
Definition: object.cc:193
static constexpr intptr_t kFalseIdentityHash
Definition: object.h:10786
const intptr_t kSmiBits
Definition: globals.h:24
static bool IsIdentChar(int32_t c)
Definition: object.cc:13278
static void TransferableTypedDataFinalizer(void *isolate_callback_data, void *peer)
Definition: object.cc:25830
static void ReportTooManyTypeArguments(const Class &cls)
Definition: object.cc:3099
void DoubleToCString(double d, char *buffer, int buffer_size)
bool CStringToDouble(const char *str, intptr_t length, double *result)
static constexpr intptr_t kOldObjectAlignmentOffset
static bool EvaluationFunctionNeedsReceiver(Thread *thread, Zone *zone, const Function &eval_function)
Definition: object.cc:4773
static const char *const names[]
Definition: symbols.cc:24
InstantiationMode
Definition: raw_object.h:1467
static constexpr intptr_t kNewObjectAlignmentOffset
static type SpecialCharacter(type value)
Definition: object.cc:530
ZoneGrowableHandlePtrArray< const String > URIs
Definition: object.h:1110
const char *const name
static constexpr intptr_t kCompressedWordSizeLog2
Definition: globals.h:43
static bool EqualsIgnoringPrivateKey(const String &str1, const String &str2)
Definition: object.cc:24247
const intptr_t kSmiMax
Definition: globals.h:28
CAllocUniquePtr< char > CStringUniquePtr
Definition: utils.h:31
bool IsTypedDataBaseClassId(intptr_t index)
Definition: class_id.h:429
constexpr intptr_t kBitsPerWord
Definition: globals.h:514
uword cpp_vtable
Definition: globals.h:163
static constexpr intptr_t kBoolValueMask
static const intptr_t kGetterPrefixLength
Definition: object.cc:116
static constexpr intptr_t kFalseOffsetFromNull
static int32_t GetHexCharacter(int32_t c)
Definition: object.cc:23896
@ kOld
Definition: heap_test.cc:892
static StaticTypeExactnessState TrivialTypeExactnessFor(const Class &cls)
Definition: object.cc:12784
static intptr_t GetListLength(const Object &value)
Definition: object.cc:12478
Nullability
Definition: object.h:1112
static int32_t EscapeOverhead(int32_t c)
Definition: object.cc:520
DART_WARN_UNUSED_RESULT ErrorPtr EntryPointFieldInvocationError(const String &getter_name)
Definition: object.cc:27196
QualifiedFunctionLibKind
Definition: object.cc:9696
@ kQualifiedFunctionLibKindLibUrl
Definition: object.cc:9697
@ kQualifiedFunctionLibKindLibName
Definition: object.cc:9698
DART_EXPORT bool IsNull(Dart_Handle object)
static constexpr intptr_t kBoolVsNullMask
bool IsTypeClassId(intptr_t index)
Definition: class_id.h:370
intptr_t RawSmiValue(const SmiPtr raw_value)
static bool SubtypeTestCacheEntryMatches(const SubtypeTestCacheTable::TupleView &t, intptr_t num_inputs, const Object &instance_class_id_or_signature, const AbstractType &destination_type, const TypeArguments &instance_type_arguments, const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments, const TypeArguments &instance_parent_function_type_arguments, const TypeArguments &instance_delayed_type_arguments)
Definition: object.cc:19066
static ObjectPtr EvaluateCompiledExpressionHelper(Zone *zone, const Function &eval_function, const Array &type_definitions, const Array &arguments, const TypeArguments &type_arguments)
Definition: object.cc:4782
void * malloc(size_t size)
Definition: allocation.cc:19
uint32_t CombineHashes(uint32_t hash, uint32_t other_hash)
Definition: hash.h:12
const char *const class_name
FfiCallbackKind
Definition: object.h:2984
bool IsArrayClassId(intptr_t index)
Definition: class_id.h:358
int32_t classid_t
Definition: globals.h:524
static bool IsPercent(int32_t c)
Definition: object.cc:23869
static constexpr intptr_t kTrueOffsetFromNull
DART_WARN_UNUSED_RESULT ErrorPtr EntryPointMemberInvocationError(const Object &member)
Definition: object.cc:27212
bool IsUnmodifiableTypedDataViewClassId(intptr_t index)
Definition: class_id.h:453
@ kForwardingCorpse
Definition: class_id.h:225
@ kIllegalCid
Definition: class_id.h:214
@ kNullCid
Definition: class_id.h:252
@ kNumPredefinedCids
Definition: class_id.h:257
@ kByteDataViewCid
Definition: class_id.h:244
@ kVoidCid
Definition: class_id.h:254
@ kByteBufferCid
Definition: class_id.h:247
@ kDynamicCid
Definition: class_id.h:253
@ kNeverCid
Definition: class_id.h:255
@ kFreeListElement
Definition: class_id.h:224
@ kUnmodifiableByteDataViewCid
Definition: class_id.h:245
static bool IsHexCharacter(int32_t c)
Definition: object.cc:23873
static bool IsSpecialCharacter(type value)
Definition: object.cc:510
static TypeArgumentsPtr RetrieveInstantiatorTypeArguments(Zone *zone, const Function &function, const Instance &receiver)
Definition: object.cc:9487
bool IsFfiTypeClassId(intptr_t index)
Definition: class_id.h:513
constexpr uint32_t kMaxUint32
Definition: globals.h:484
static const char *const kInitPrefix
Definition: object.cc:119
EntryPointPragma FindEntryPointPragma(IsolateGroup *IG, const Array &metadata, Field *reusable_field_handle, Object *pragma)
Definition: object.cc:27114
static bool IsAsciiNonprintable(int32_t c)
Definition: object.cc:516
static int PrintVarInfo(char *buffer, int len, intptr_t i, const String &var_name, const UntaggedLocalVarDescriptors::VarInfo &info)
Definition: object.cc:16053
FunctionPtr GetFunction(const Library &lib, const char *name)
static void PrintSymbolicStackFrameBody(BaseTextBuffer *buffer, const char *function_name, const char *url, intptr_t line=-1, intptr_t column=-1)
Definition: object.cc:26171
ArrayOfTuplesView< SubtypeTestCache::Entries, std::tuple< Object, TypeArguments, TypeArguments, TypeArguments, TypeArguments, TypeArguments, AbstractType, Bool > > SubtypeTestCacheTable
Definition: object.h:13558
DART_WARN_UNUSED_RESULT ErrorPtr VerifyEntryPoint(const Library &lib, const Object &member, const Object &annotated, std::initializer_list< EntryPointPragma > allowed_kinds)
Definition: object.cc:27151
uintptr_t uword
Definition: globals.h:501
static bool IsURISafeCharacter(int32_t c)
Definition: object.cc:23883
static void FunctionPrintNameHelper(const Function &fun, const NameFormattingParams &params, BaseTextBuffer *printer)
Definition: object.cc:11037
static int32_t MergeHexCharacters(int32_t c1, int32_t c2)
Definition: object.cc:23914
constexpr uword kBreakInstructionFiller
TypeEquality
Definition: object.h:1118
static void IndentN(int count)
Definition: object.cc:18542
static void ReportTooManyImports(const Library &lib)
Definition: object.cc:13561
uint32_t Multiply64Hash(int64_t ivalue)
Definition: integers.cc:276
const uint32_t fp
EntryPointPragma
Definition: object.h:4366
void CreateSpecializedFunction(Thread *thread, Zone *zone, const RegExp &regexp, intptr_t specialization_cid, bool sticky, const Object &owner)
Definition: regexp.cc:5523
bool IsAllocatableInNewSpace(intptr_t size)
Definition: spaces.h:57
UnorderedHashSet< CanonicalTypeTraits > CanonicalTypeSet
static const char * Concat(const char *a, const char *b)
Definition: file_test.cc:86
static constexpr intptr_t kSlotsPerInterruptCheck
Definition: page.h:359
uintptr_t compressed_uword
Definition: globals.h:44
static bool MatchesAccessorName(const String &name, const char *prefix, intptr_t prefix_length, const String &accessor_name)
Definition: object.cc:6176
static void DwarfStackTracesHandler(bool value)
Definition: object.cc:26489
UnorderedHashSet< ClassFunctionsTraits > ClassFunctionsSet
Definition: object.cc:3262
static ObjectPtr ThrowTypeError(const TokenPosition token_pos, const Instance &src_value, const AbstractType &dst_type, const String &dst_name)
Definition: object.cc:4496
static bool IsDecimalDigit(int32_t c)
Definition: object.cc:13270
UnorderedHashMap< LibraryLookupTraits > LibraryLookupMap
Definition: object.cc:14596
bool ShouldHaveImmutabilityBitSetCid(intptr_t predefined_cid)
Definition: class_id.h:507
static intptr_t ConstructFunctionFullyQualifiedCString(const Function &function, char **chars, intptr_t reserve_len, bool with_lib, QualifiedFunctionLibKind lib_kind)
Definition: object.cc:9701
uint32_t HashBytes(const uint8_t *bytes, intptr_t size)
Definition: hash.h:31
static void USE(T &&)
Definition: globals.h:618
static const char *const kGetterPrefix
Definition: object.cc:115
static classid_t NormalizeClassIdForSyntacticalTypeEquality(classid_t cid)
Definition: object.cc:22049
bool FindPragmaInMetadata(Thread *T, const Object &metadata_obj, const String &pragma_name, bool multiple, Object *options)
Definition: object.cc:4092
static void PrintSymbolicStackFrame(Zone *zone, BaseTextBuffer *buffer, const Function &function, TokenPosition token_pos_or_line, intptr_t frame_index, bool is_line=false)
Definition: object.cc:26186
DEFINE_FLAG(bool, print_cluster_information, false, "Print information about clusters written to snapshot")
bool HasStack()
Definition: stacktrace.cc:106
static T LoadUnaligned(const T *ptr)
Definition: unaligned.h:14
const intptr_t cid
static constexpr intptr_t kTrueIdentityHash
Definition: object.h:10785
static bool IsVisibleAsFutureListener(const Function &function)
Definition: object.cc:26220
static constexpr intptr_t kCompressedWordSize
Definition: globals.h:42
raw_obj untag() -> num_entries()) VARIABLE_COMPRESSED_VISITOR(Array, Smi::Value(raw_obj->untag() ->length())) VARIABLE_COMPRESSED_VISITOR(TypedData, TypedData::ElementSizeInBytes(raw_obj->GetClassId()) *Smi::Value(raw_obj->untag() ->length())) VARIABLE_COMPRESSED_VISITOR(Record, RecordShape(raw_obj->untag() ->shape()).num_fields()) VARIABLE_NULL_VISITOR(CompressedStackMaps, CompressedStackMaps::PayloadSizeOf(raw_obj)) VARIABLE_NULL_VISITOR(OneByteString, Smi::Value(raw_obj->untag() ->length())) VARIABLE_NULL_VISITOR(TwoByteString, Smi::Value(raw_obj->untag() ->length())) intptr_t UntaggedField::VisitFieldPointers(FieldPtr raw_obj, ObjectPointerVisitor *visitor)
Definition: raw_object.cc:558
uint32_t FinalizeHash(uint32_t hash, intptr_t hashbits=kBitsPerInt32)
Definition: hash.h:20
static const intptr_t kSetterPrefixLength
Definition: object.cc:118
static uword Hash64To32(uint64_t v)
Definition: object.cc:6462
DEFINE_FLAG_HANDLER(PrecompilationModeHandler, precompilation, "Precompilation mode")
void DumpFunctionTypeTable(Isolate *isolate)
Definition: object.cc:27082
FrameLayout runtime_frame_layout
Definition: stack_frame.cc:81
static void StoreUnaligned(T *ptr, T value)
Definition: unaligned.h:22
static void ReportTooManyTypeParameters(const FunctionType &sig)
Definition: object.cc:8817
static uint32_t Hash(uint32_t key)
Definition: hashmap_test.cc:65
void DumpTypeTable(Isolate *isolate)
Definition: object.cc:27075
static DART_FORCE_INLINE uword LocalVarAddress(uword fp, intptr_t index)
Definition: stack_frame.h:429
bool TESTING_runtime_fail_on_existing_cache_entry
Definition: object.cc:7617
bool IsFfiPointerClassId(intptr_t index)
Definition: class_id.h:541
constexpr int32_t kMaxInt32
Definition: globals.h:483
static const intptr_t kInitPrefixLength
Definition: object.cc:120
constexpr intptr_t kWordSize
Definition: globals.h:509
const StackTrace & GetCurrentStackTrace(int skip_frames)
Definition: stacktrace.cc:94
static ObjectPtr InvokeInstanceFunction(Thread *thread, const Instance &receiver, const Function &function, const String &target_name, const Array &args, const Array &args_descriptor_array, bool respect_reflectable, const TypeArguments &instantiator_type_args)
Definition: object.cc:14316
static constexpr intptr_t kObjectAlignment
static bool HasPragma(const Object &declaration)
Definition: object.cc:13713
static ArrayPtr NewDictionary(intptr_t initial_size)
Definition: object.cc:14204
static void ThrowNoSuchMethod(const Instance &receiver, const String &function_name, const Array &arguments, const Array &argument_names, const InvocationMirror::Level level, const InvocationMirror::Kind kind)
Definition: mirrors.cc:49
static void AddScriptIfUnique(const GrowableObjectArray &scripts, const Script &candidate)
Definition: object.cc:13922
Genericity
Definition: object.h:2248
@ kFunctions
Definition: object.h:2251
@ kCurrentClass
Definition: object.h:2250
@ kAny
Definition: object.h:2249
constexpr int64_t kMinInt64RepresentableAsDouble
Definition: globals.h:493
bool IsIntegerClassId(intptr_t index)
Definition: class_id.h:340
UnorderedHashMap< RecordFieldNamesMapTraits > RecordFieldNamesMap
Definition: object.cc:27896
bool IsInternalOnlyClassId(intptr_t index)
Definition: class_id.h:299
@ kIsolate
Definition: marker.cc:734
void DumpTypeParameterTable(Isolate *isolate)
Definition: object.cc:27098
const char *const function_name
static int8_t data[kExtLength]
static int32_t GetHexValue(int32_t c)
Definition: object.cc:23903
static void PrintSymbolicStackFrameIndex(BaseTextBuffer *buffer, intptr_t frame_index)
Definition: object.cc:26166
constexpr int64_t kMaxInt64RepresentableAsDouble
Definition: globals.h:494
static bool ShouldBePrivate(const String &name)
Definition: object.cc:13771
bool IsDeeplyImmutableCid(intptr_t predefined_cid)
Definition: class_id.h:485
bool IsAllocatableViaFreeLists(intptr_t size)
Definition: spaces.h:60
static int kLength
NOT_IN_PRODUCT(LibraryPtr ReloadTestScript(const char *script))
static ObjectPtr LoadExpressionEvaluationFunction(Zone *zone, const ExternalTypedData &kernel_buffer, const String &library_url, const String &klass)
Definition: object.cc:4749
@ kHeapObjectTag
@ kSmiTagMask
void DumpTypeArgumentsTable(Isolate *isolate)
Definition: object.cc:27106
static const char * SafeTypeArgumentsToCString(const TypeArguments &args)
Definition: object.cc:12796
static bool IsIdentStartChar(int32_t c)
Definition: object.cc:13274
static bool IsLetter(int32_t c)
Definition: object.cc:13266
static FinalizablePersistentHandle * AddFinalizer(const Object &referent, void *peer, Dart_HandleFinalizer callback, intptr_t external_size)
Definition: object.cc:24159
static const char *const kSetterPrefix
Definition: object.cc:117
static TypeArgumentsPtr RetrieveFunctionTypeArguments(Thread *thread, Zone *zone, const Function &function, const Instance &receiver, const TypeArguments &instantiator_type_args, const Array &args, const ArgumentsDescriptor &args_desc)
Definition: object.cc:9409
bool IsBuiltinListClassId(intptr_t index)
Definition: class_id.h:364
static int NumEntries(const FinalizerEntry &entry, intptr_t acc=0)
constexpr intptr_t kBitsPerInt64
Definition: globals.h:467
bool IsExternalTypedDataClassId(intptr_t index)
Definition: class_id.h:447
COMPILE_ASSERT(kUnreachableReference==WeakTable::kNoValue)
constexpr intptr_t kIntptrMax
Definition: globals.h:557
bool IsStringClassId(intptr_t index)
Definition: class_id.h:350
@ kCurrentAndEnclosingFree
Definition: object.h:2937
@ kAllFree
Definition: object.h:2940
@ kNoneFree
Definition: object.h:2926
static bool InVmTests(const Function &function)
Definition: object.cc:9003
static intptr_t GetRelativeSourceIndex(const String &src, intptr_t line, intptr_t line_offset=0, intptr_t column=1, intptr_t column_offset=0, intptr_t starting_index=0)
Definition: object.cc:13363
ObjectPtr CompressedObjectPtr
void DumpRecordTypeTable(Isolate *isolate)
Definition: object.cc:27090
static intptr_t GetListLengthOffset(intptr_t cid)
Definition: object.cc:12490
DECLARE_FLAG(bool, show_invisible_frames)
def call(args)
Definition: dom.py:159
bool Equals(const T *a, const T *b)
Definition: dl_comparable.h:19
@ kNone
Definition: layer.h:53
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not defaults to or::depending on whether ipv6 is specified vm service A custom Dart VM Service port The default is to pick a randomly available open port disable vm Disable the Dart VM Service The Dart VM Service is never available in release mode disable vm service Disable mDNS Dart VM Service publication Bind to the IPv6 localhost address for the Dart VM Service Ignored if vm service host is set endless trace Enable an endless trace buffer The default is a ring buffer This is useful when very old events need to viewed For during application launch Memory usage will continue to grow indefinitely however Start app with an specific route defined on the framework flutter assets Path to the Flutter assets directory enable service port Allow the VM service to fallback to automatic port selection if binding to a specified port fails trace Trace early application lifecycle Automatically switches to an endless trace buffer trace skia Filters out all Skia trace event categories except those that are specified in this comma separated list dump skp on shader Automatically dump the skp that triggers new shader compilations This is useful for writing custom ShaderWarmUp to reduce jank By this is not enabled to reduce the overhead purge persistent cache
Definition: switches.h:191
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir path
Definition: switches.h:57
DEF_SWITCHES_START aot vmservice shared library name
Definition: switches.h:32
struct PathData * Data(SkPath *path)
Definition: path_ops.cc:52
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not defaults to or::depending on whether ipv6 is specified vm service A custom Dart VM Service port The default is to pick a randomly available open port disable vm Disable the Dart VM Service The Dart VM Service is never available in release mode disable vm service Disable mDNS Dart VM Service publication Bind to the IPv6 localhost address for the Dart VM Service Ignored if vm service host is set endless trace buffer
Definition: switches.h:126
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive mode
Definition: switches.h:228
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive keep the shell running after the Dart script has completed enable serial On low power devices with low core running concurrent GC tasks on threads can cause them to contend with the UI thread which could potentially lead to jank This option turns off all concurrent GC activities domain network JSON encoded network policy per domain This overrides the DisallowInsecureConnections switch Embedder can specify whether to allow or disallow insecure connections at a domain level old gen heap size
Definition: switches.h:259
static Dart_Handle InvokeFunction(Dart_Handle builtin_library, const char *name)
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not set
Definition: switches.h:76
std::function< void()> closure
Definition: closure.h:14
def matches(file)
Definition: gen_manifest.py:38
dst
Definition: cp.py:12
dictionary stats
Definition: malisc.py:20
const myers::Point & get(const myers::Segment &)
def print(*args, **kwargs)
Definition: run_tests.py:49
string root
Definition: scale_cpu.py:20
static bool is_linear(SkPoint p0, SkPoint p1, SkPoint p2)
SI auto map(std::index_sequence< I... >, Fn &&fn, const Args &... args) -> skvx::Vec< sizeof...(I), decltype(fn(args[0]...))>
Definition: SkVx.h:680
dest
Definition: zip.py:79
#define SHARED_READONLY_HANDLES_LIST(V)
Definition: object.h:457
#define FOR_EACH_REBIND_RULE(V)
Definition: object.h:2537
SkScalar w
#define Pp
Definition: globals.h:425
#define FALL_THROUGH
Definition: globals.h:15
#define Px
Definition: globals.h:410
#define PX64
Definition: globals.h:419
#define DEBUG_ONLY(code)
Definition: globals.h:141
#define Pu
Definition: globals.h:409
#define UNLIKELY(cond)
Definition: globals.h:261
#define Pd64
Definition: globals.h:416
#define Pd
Definition: globals.h:408
#define DISALLOW_COPY_AND_ASSIGN(TypeName)
Definition: globals.h:581
static DecodeResult decode(std::string path)
Definition: png_codec.cpp:124
#define T
Definition: precompiler.cc:65
#define CONTEXT_SCOPE_VARIABLE_DESC_FLAG_LIST(V)
Definition: raw_object.h:2439
#define CORE_LIB_INTRINSIC_LIST(V)
#define GRAPH_TYPED_DATA_INTRINSICS_LIST(V)
#define INTERNAL_LIB_INTRINSIC_LIST(V)
#define CORE_INTEGER_LIB_INTRINSIC_LIST(V)
#define POLYMORPHIC_TARGET_LIST(V)
#define GRAPH_CORE_INTRINSICS_LIST(V)
#define DEVELOPER_LIB_INTRINSIC_LIST(V)
#define RECOGNIZED_LIST_FACTORY_LIST(V)
#define OTHER_RECOGNIZED_LIST(V)
#define REUSABLE_CLASS_HANDLESCOPE(thread)
#define REUSABLE_LOADING_UNIT_HANDLESCOPE(thread)
#define REUSABLE_INSTANCE_HANDLESCOPE(thread)
#define REUSABLE_ARRAY_HANDLESCOPE(thread)
#define REUSABLE_SMI_HANDLESCOPE(thread)
#define REUSABLE_STRING_HANDLESCOPE(thread)
#define REUSABLE_TYPE_PARAMETERS_HANDLESCOPE(thread)
#define REUSABLE_FUNCTION_HANDLESCOPE(thread)
#define REUSABLE_FIELD_HANDLESCOPE(thread)
#define REUSABLE_LIBRARY_HANDLESCOPE(thread)
#define REUSABLE_ABSTRACT_TYPE_HANDLESCOPE(thread)
#define REUSABLE_OBJECT_HANDLESCOPE(thread)
#define REUSABLE_TYPE_ARGUMENTS_HANDLESCOPE(thread)
SeparatedVector2 offset
intptr_t count
Definition: object.cc:17143
intptr_t cid
Definition: object.cc:17142
CidCount(intptr_t cid_, intptr_t count_, Function *f_)
Definition: object.cc:17137
Function * function
Definition: object.cc:17144
intptr_t code_from_fp
Definition: frame_layout.h:52
intptr_t FrameSlotForVariableIndex(intptr_t index) const
Definition: stack_frame.cc:89
static constexpr intptr_t kElementSize
Definition: object.h:8602
simd128_value_t & readFrom(const float *v)
Definition: globals.h:153
std::shared_ptr< const fml::Mapping > data
Definition: texture_gles.cc:63
#define TIMELINE_DURATION(thread, stream, name)
Definition: timeline.h:39
#define NOT_IN_PRECOMPILED_RUNTIME(code)
Definition: globals.h:113
#define ARRAY_SIZE(array)
Definition: globals.h:72
#define NOT_IN_PRECOMPILED(code)
Definition: globals.h:100
#define EQUALS_IGNORING_PRIVATE_KEY(class_id, type, str1, str2)
Definition: object.cc:24290
#define IS_CHECK(name)
#define CLASS_LIST_WITH_NULL(V)
#define ADD_SET_FIELD(clazz)
#define REGISTER_TYPED_DATA_CLASS(clazz)
#define REGISTER_FFI_CLASS(clazz)
#define CHECK_ERROR(error)
Definition: object.cc:134
#define INITIALIZE_SHARED_READONLY_HANDLE(Type, name)
#define REGISTER_EXT_TYPED_DATA_CLASS(clazz)
#define DEFINE_FLAG_ACCESSORS(Name)
Definition: object.cc:18644
#define PRECOMPILER_WSR_FIELD_DEFINITION(Class, Type, Name)
Definition: object.cc:209
#define RAW_NULL
Definition: object.cc:132
#define REGISTER_TYPED_DATA_VIEW_CLASS(clazz)
#define TRACE_TYPE_CHECKS_VERBOSE(format,...)
Definition: object.cc:226
#define REGISTER_FFI_TYPE_MARKER(clazz)
#define HANDLE_CASE(clazz)
#define SET_CLASS_NAME(class_name, name)
Definition: object.cc:1466
#define TYPED_DATA_GET_INDEXED_CASES(clazz)
#define RULE_CASE(Name)
#define INIT_VTABLE(clazz)
#define DEFINE_SHARED_READONLY_HANDLE(Type, name)
Definition: object.cc:142