Flutter Engine
The Flutter Engine
Loading...
Searching...
No Matches
object.cc
Go to the documentation of this file.
1// Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#include "vm/object.h"
6
7#include <memory>
8
10#include "include/dart_api.h"
11#include "lib/integers.h"
12#include "lib/stacktrace.h"
13#include "platform/assert.h"
15#include "platform/unaligned.h"
16#include "platform/unicode.h"
17#include "vm/bit_vector.h"
18#include "vm/bootstrap.h"
19#include "vm/canonical_tables.h"
20#include "vm/class_finalizer.h"
21#include "vm/class_id.h"
23#include "vm/code_comments.h"
24#include "vm/code_descriptors.h"
25#include "vm/code_observers.h"
29#include "vm/cpu.h"
30#include "vm/dart.h"
31#include "vm/dart_api_state.h"
32#include "vm/dart_entry.h"
33#include "vm/datastream.h"
34#include "vm/debugger.h"
37#include "vm/elf.h"
38#include "vm/exceptions.h"
39#include "vm/growable_array.h"
40#include "vm/hash.h"
41#include "vm/hash_table.h"
42#include "vm/heap/become.h"
43#include "vm/heap/heap.h"
44#include "vm/heap/sampler.h"
45#include "vm/heap/weak_code.h"
46#include "vm/image_snapshot.h"
47#include "vm/isolate_reload.h"
48#include "vm/kernel.h"
49#include "vm/kernel_binary.h"
50#include "vm/kernel_isolate.h"
51#include "vm/kernel_loader.h"
52#include "vm/log.h"
53#include "vm/native_symbol.h"
54#include "vm/object_graph.h"
55#include "vm/object_store.h"
56#include "vm/os.h"
57#include "vm/parser.h"
58#include "vm/profiler.h"
59#include "vm/regexp.h"
60#include "vm/resolver.h"
61#include "vm/reusable_handles.h"
63#include "vm/runtime_entry.h"
64#include "vm/scopes.h"
65#include "vm/stack_frame.h"
66#include "vm/stub_code.h"
67#include "vm/symbols.h"
68#include "vm/tags.h"
69#include "vm/thread_registry.h"
70#include "vm/timeline.h"
72#include "vm/zone_text_buffer.h"
73
74#if !defined(DART_PRECOMPILED_RUNTIME)
82#endif // !defined(DART_PRECOMPILED_RUNTIME)
83
84namespace dart {
85
86DEFINE_FLAG(uint64_t,
87 huge_method_cutoff_in_code_size,
88 200000,
89 "Huge method cutoff in unoptimized code size (in bytes).");
91 bool,
92 show_internal_names,
93 false,
94 "Show names of internal classes (e.g. \"OneByteString\") in error messages "
95 "instead of showing the corresponding interface names (e.g. \"String\"). "
96 "Also show legacy nullability in type names.");
97
99 remove_script_timestamps_for_test,
100 false,
101 "Remove script timestamps to allow for deterministic testing.");
102
103#if !defined(DART_PRECOMPILED_RUNTIME)
104DEFINE_FLAG(bool, use_register_cc, true, "Use register calling conventions");
105#endif
106
107DECLARE_FLAG(bool, intrinsify);
108DECLARE_FLAG(bool, trace_deoptimization);
109DECLARE_FLAG(bool, trace_deoptimization_verbose);
110DECLARE_FLAG(bool, trace_reload);
111DECLARE_FLAG(bool, write_protect_code);
112DECLARE_FLAG(bool, precompiled_mode);
113DECLARE_FLAG(int, max_polymorphic_checks);
114
115static const char* const kGetterPrefix = "get:";
116static const intptr_t kGetterPrefixLength = strlen(kGetterPrefix);
117static const char* const kSetterPrefix = "set:";
118static const intptr_t kSetterPrefixLength = strlen(kSetterPrefix);
119static const char* const kInitPrefix = "init:";
120static const intptr_t kInitPrefixLength = strlen(kInitPrefix);
121
122// A cache of VM heap allocated preinitialized empty ic data entry arrays.
123ArrayPtr ICData::cached_icdata_arrays_[kCachedICDataArrayCount];
124
125cpp_vtable Object::builtin_vtables_[kNumPredefinedCids] = {};
126
127// These are initialized to a value that will force an illegal memory access if
128// they are being used.
129#if defined(RAW_NULL)
130#error RAW_NULL should not be defined.
131#endif
132#define RAW_NULL static_cast<uword>(kHeapObjectTag)
133
134#define CHECK_ERROR(error) \
135 { \
136 ErrorPtr err = (error); \
137 if (err != Error::null()) { \
138 return err; \
139 } \
140 }
141
142#define DEFINE_SHARED_READONLY_HANDLE(Type, name) \
143 Type* Object::name##_ = nullptr;
145#undef DEFINE_SHARED_READONLY_HANDLE
146
147ObjectPtr Object::null_ = static_cast<ObjectPtr>(RAW_NULL);
148BoolPtr Object::true_ = static_cast<BoolPtr>(RAW_NULL);
149BoolPtr Object::false_ = static_cast<BoolPtr>(RAW_NULL);
150ClassPtr Object::class_class_ = static_cast<ClassPtr>(RAW_NULL);
151ClassPtr Object::dynamic_class_ = static_cast<ClassPtr>(RAW_NULL);
152ClassPtr Object::void_class_ = static_cast<ClassPtr>(RAW_NULL);
153ClassPtr Object::type_parameters_class_ = static_cast<ClassPtr>(RAW_NULL);
154ClassPtr Object::type_arguments_class_ = static_cast<ClassPtr>(RAW_NULL);
155ClassPtr Object::patch_class_class_ = static_cast<ClassPtr>(RAW_NULL);
156ClassPtr Object::function_class_ = static_cast<ClassPtr>(RAW_NULL);
157ClassPtr Object::closure_data_class_ = static_cast<ClassPtr>(RAW_NULL);
158ClassPtr Object::ffi_trampoline_data_class_ = static_cast<ClassPtr>(RAW_NULL);
159ClassPtr Object::field_class_ = static_cast<ClassPtr>(RAW_NULL);
160ClassPtr Object::script_class_ = static_cast<ClassPtr>(RAW_NULL);
161ClassPtr Object::library_class_ = static_cast<ClassPtr>(RAW_NULL);
162ClassPtr Object::namespace_class_ = static_cast<ClassPtr>(RAW_NULL);
163ClassPtr Object::kernel_program_info_class_ = static_cast<ClassPtr>(RAW_NULL);
164ClassPtr Object::code_class_ = static_cast<ClassPtr>(RAW_NULL);
165ClassPtr Object::instructions_class_ = static_cast<ClassPtr>(RAW_NULL);
166ClassPtr Object::instructions_section_class_ = static_cast<ClassPtr>(RAW_NULL);
167ClassPtr Object::instructions_table_class_ = static_cast<ClassPtr>(RAW_NULL);
168ClassPtr Object::object_pool_class_ = static_cast<ClassPtr>(RAW_NULL);
169ClassPtr Object::pc_descriptors_class_ = static_cast<ClassPtr>(RAW_NULL);
170ClassPtr Object::code_source_map_class_ = static_cast<ClassPtr>(RAW_NULL);
171ClassPtr Object::compressed_stackmaps_class_ = static_cast<ClassPtr>(RAW_NULL);
172ClassPtr Object::var_descriptors_class_ = static_cast<ClassPtr>(RAW_NULL);
173ClassPtr Object::exception_handlers_class_ = static_cast<ClassPtr>(RAW_NULL);
174ClassPtr Object::context_class_ = static_cast<ClassPtr>(RAW_NULL);
175ClassPtr Object::context_scope_class_ = static_cast<ClassPtr>(RAW_NULL);
176ClassPtr Object::sentinel_class_ = static_cast<ClassPtr>(RAW_NULL);
177ClassPtr Object::singletargetcache_class_ = static_cast<ClassPtr>(RAW_NULL);
178ClassPtr Object::unlinkedcall_class_ = static_cast<ClassPtr>(RAW_NULL);
179ClassPtr Object::monomorphicsmiablecall_class_ =
180 static_cast<ClassPtr>(RAW_NULL);
181ClassPtr Object::icdata_class_ = static_cast<ClassPtr>(RAW_NULL);
182ClassPtr Object::megamorphic_cache_class_ = static_cast<ClassPtr>(RAW_NULL);
183ClassPtr Object::subtypetestcache_class_ = static_cast<ClassPtr>(RAW_NULL);
184ClassPtr Object::loadingunit_class_ = static_cast<ClassPtr>(RAW_NULL);
185ClassPtr Object::api_error_class_ = static_cast<ClassPtr>(RAW_NULL);
186ClassPtr Object::language_error_class_ = static_cast<ClassPtr>(RAW_NULL);
187ClassPtr Object::unhandled_exception_class_ = static_cast<ClassPtr>(RAW_NULL);
188ClassPtr Object::unwind_error_class_ = static_cast<ClassPtr>(RAW_NULL);
189ClassPtr Object::weak_serialization_reference_class_ =
190 static_cast<ClassPtr>(RAW_NULL);
191ClassPtr Object::weak_array_class_ = static_cast<ClassPtr>(RAW_NULL);
192
194 const char* name,
195 intptr_t start_pos,
196 intptr_t len) {
197 buffer->Printf("%.*s", static_cast<int>(len), &name[start_pos]);
198}
199
200// Used to define setters and getters for untagged object fields that are
201// defined with the WSR_COMPRESSED_POINTER_FIELD macro. See
202// PRECOMPILER_WSR_FIELD_DECLARATION in object.h for more information.
203#if defined(DART_PRECOMPILER)
204#define PRECOMPILER_WSR_FIELD_DEFINITION(Class, Type, Name) \
205 Type##Ptr Class::Name() const { \
206 return Type::RawCast(WeakSerializationReference::Unwrap(untag()->Name())); \
207 }
208#else
209#define PRECOMPILER_WSR_FIELD_DEFINITION(Class, Type, Name) \
210 void Class::set_##Name(const Type& value) const { \
211 untag()->set_##Name(value.ptr()); \
212 }
213#endif
214
215PRECOMPILER_WSR_FIELD_DEFINITION(ClosureData, Function, parent_function)
216PRECOMPILER_WSR_FIELD_DEFINITION(Function, FunctionType, signature)
217
218#undef PRECOMPILER_WSR_FIELD_DEFINITION
219
220#if defined(_MSC_VER)
221#define TRACE_TYPE_CHECKS_VERBOSE(format, ...) \
222 if (FLAG_trace_type_checks_verbose) { \
223 OS::PrintErr(format, __VA_ARGS__); \
224 }
225#else
226#define TRACE_TYPE_CHECKS_VERBOSE(format, ...) \
227 if (FLAG_trace_type_checks_verbose) { \
228 OS::PrintErr(format, ##__VA_ARGS__); \
229 }
230#endif
231
232// Remove private keys, but retain getter/setter/constructor/mixin manglings.
234 ASSERT(name.IsOneByteString());
235 GrowableArray<uint8_t> without_key(name.Length());
236 intptr_t i = 0;
237 while (i < name.Length()) {
238 while (i < name.Length()) {
239 uint8_t c = name.CharAt(i++);
240 if (c == '@') break;
241 without_key.Add(c);
242 }
243 while (i < name.Length()) {
244 uint8_t c = name.CharAt(i);
245 if ((c < '0') || (c > '9')) break;
246 i++;
247 }
248 }
249
250 return String::FromLatin1(without_key.data(), without_key.length());
251}
252
253// Takes a vm internal name and makes it suitable for external user.
254//
255// Examples:
256//
257// Internal getter and setter prefixes are changed:
258//
259// get:foo -> foo
260// set:foo -> foo=
261//
262// Private name mangling is removed, possibly multiple times:
263//
264// _ReceivePortImpl@709387912 -> _ReceivePortImpl
265// _ReceivePortImpl@709387912._internal@709387912 ->
266// _ReceivePortImpl._internal
267// _C@6328321&_E@6328321&_F@6328321 -> _C&_E&_F
268//
269// The trailing . on the default constructor name is dropped:
270//
271// List. -> List
272//
273// And so forth:
274//
275// get:foo@6328321 -> foo
276// _MyClass@6328321. -> _MyClass
277// _MyClass@6328321.named -> _MyClass.named
278//
279// For extension methods the following demangling is done
280// ext|func -> ext.func (instance extension method)
281// ext|get#prop -> ext.prop (instance extension getter)
282// ext|set#prop -> ext.prop= (instance extension setter)
283// ext|sfunc -> ext.sfunc (static extension method)
284// get:ext|sprop -> ext.sprop (static extension getter)
285// set:ext|sprop -> ext.sprop= (static extension setter)
286//
287const char* String::ScrubName(const String& name, bool is_extension) {
288 Thread* thread = Thread::Current();
289 NoSafepointScope no_safepoint(thread);
290 Zone* zone = thread->zone();
291 ZoneTextBuffer printer(zone);
292
293#if !defined(DART_PRECOMPILED_RUNTIME)
294 if (name.Equals(Symbols::TopLevel())) {
295 // Name of invisible top-level class.
296 return "";
297 }
298#endif // !defined(DART_PRECOMPILED_RUNTIME)
299
300 const char* cname = name.ToCString();
301 ASSERT(strlen(cname) == static_cast<size_t>(name.Length()));
302 const intptr_t name_len = name.Length();
303 // First remove all private name mangling and if 'is_extension' is true
304 // substitute the first '|' character with '.'.
305 intptr_t start_pos = 0;
306 intptr_t sum_segment_len = 0;
307 for (intptr_t i = 0; i < name_len; i++) {
308 if ((cname[i] == '@') && ((i + 1) < name_len) && (cname[i + 1] >= '0') &&
309 (cname[i + 1] <= '9')) {
310 // Append the current segment to the unmangled name.
311 const intptr_t segment_len = i - start_pos;
312 sum_segment_len += segment_len;
313 AppendSubString(&printer, cname, start_pos, segment_len);
314 // Advance until past the name mangling. The private keys are only
315 // numbers so we skip until the first non-number.
316 i++; // Skip the '@'.
317 while ((i < name.Length()) && (name.CharAt(i) >= '0') &&
318 (name.CharAt(i) <= '9')) {
319 i++;
320 }
321 start_pos = i;
322 i--; // Account for for-loop increment.
323 } else if (is_extension && cname[i] == '|') {
324 // Append the current segment to the unmangled name.
325 const intptr_t segment_len = i - start_pos;
326 AppendSubString(&printer, cname, start_pos, segment_len);
327 // Append the '.' character (replaces '|' with '.').
328 AppendSubString(&printer, ".", 0, 1);
329 start_pos = i + 1;
330 // Account for length of segments added so far.
331 sum_segment_len += (segment_len + 1);
332 }
333 }
334
335 const char* unmangled_name = nullptr;
336 if (start_pos == 0) {
337 // No name unmangling needed, reuse the name that was passed in.
338 unmangled_name = cname;
339 sum_segment_len = name_len;
340 } else if (name.Length() != start_pos) {
341 // Append the last segment.
342 const intptr_t segment_len = name.Length() - start_pos;
343 sum_segment_len += segment_len;
344 AppendSubString(&printer, cname, start_pos, segment_len);
345 }
346 if (unmangled_name == nullptr) {
347 // Merge unmangled_segments.
348 unmangled_name = printer.buffer();
349 }
350
351 printer.Clear();
352 intptr_t start = 0;
353 intptr_t len = sum_segment_len;
354 bool is_setter = false;
355 if (is_extension) {
356 // First scan till we see the '.' character.
357 for (intptr_t i = 0; i < len; i++) {
358 if (unmangled_name[i] == '.') {
359 intptr_t slen = i + 1;
360 intptr_t plen = slen - start;
361 AppendSubString(&printer, unmangled_name, start, plen);
362 unmangled_name += slen;
363 len -= slen;
364 break;
365 } else if (unmangled_name[i] == ':') {
366 if (start != 0) {
367 // Reset and break.
368 start = 0;
369 is_setter = false;
370 break;
371 }
372 if (unmangled_name[0] == 's') {
373 is_setter = true;
374 }
375 start = i + 1;
376 }
377 }
378 }
379 intptr_t dot_pos = -1; // Position of '.' in the name, if any.
380 start = 0;
381 for (intptr_t i = start; i < len; i++) {
382 if (unmangled_name[i] == ':' ||
383 (is_extension && unmangled_name[i] == '#')) {
384 if (start != 0) {
385 // Reset and break.
386 start = 0;
387 dot_pos = -1;
388 break;
389 }
390 ASSERT(start == 0); // Only one : is possible in getters or setters.
391 if (unmangled_name[0] == 's') {
392 ASSERT(!is_setter);
393 is_setter = true;
394 }
395 start = i + 1;
396 } else if (unmangled_name[i] == '.') {
397 if (dot_pos != -1) {
398 // Reset and break.
399 start = 0;
400 dot_pos = -1;
401 break;
402 }
403 ASSERT(dot_pos == -1); // Only one dot is supported.
404 dot_pos = i;
405 }
406 }
407
408 if (!is_extension && (start == 0) && (dot_pos == -1)) {
409 // This unmangled_name is fine as it is.
410 return unmangled_name;
411 }
412
413 // Drop the trailing dot if needed.
414 intptr_t end = ((dot_pos + 1) == len) ? dot_pos : len;
415
416 intptr_t substr_len = end - start;
417 AppendSubString(&printer, unmangled_name, start, substr_len);
418 if (is_setter) {
419 const char* equals = Symbols::Equals().ToCString();
420 const intptr_t equals_len = strlen(equals);
421 AppendSubString(&printer, equals, 0, equals_len);
422 }
423
424 return printer.buffer();
425}
426
428 bool is_extension) {
429#if !defined(DART_PRECOMPILED_RUNTIME)
430 intptr_t len = name.Length();
431 intptr_t start = 0;
432 intptr_t at_pos = -1; // Position of '@' in the name, if any.
433 bool is_setter = false;
434
436
437 // If extension strip out the leading prefix e.g" ext|func would strip out
438 // 'ext|'.
439 if (is_extension) {
440 // First scan till we see the '|' character.
441 for (intptr_t i = 0; i < len; i++) {
442 if (name.CharAt(i) == '|') {
445 start = i + 1;
446 break;
447 } else if (name.CharAt(i) == ':') {
448 if (start != 0) {
449 // Reset and break.
450 start = 0;
451 is_setter = false;
452 break;
453 }
454 if (name.CharAt(0) == 's') {
455 is_setter = true;
456 }
457 start = i + 1;
458 }
459 }
460 }
461
462 for (intptr_t i = start; i < len; i++) {
463 if (name.CharAt(i) == ':' || (is_extension && name.CharAt(i) == '#')) {
464 // Only one : is possible in getters or setters.
465 ASSERT(is_extension || start == 0);
466 if (name.CharAt(start) == 's') {
467 is_setter = true;
468 }
469 start = i + 1;
470 } else if (name.CharAt(i) == '@') {
471 // Setters should have only one @ so we know where to put the =.
472 ASSERT(!is_setter || (at_pos == -1));
473 at_pos = i;
474 }
475 }
476
477 if (start == 0) {
478 // This unmangled_name is fine as it is.
479 return name.ptr();
480 }
481
482 if (is_extension) {
483 const String& fname =
485 result = String::Concat(result, fname);
486 } else {
488 }
489
490 if (is_setter) {
491 // Setters need to end with '='.
492 if (at_pos == -1) {
494 } else {
495 const String& pre_at =
496 String::Handle(String::SubString(result, 0, at_pos - 4));
497 const String& post_at =
498 String::Handle(String::SubString(name, at_pos, len - at_pos));
500 result = String::Concat(result, post_at);
501 }
502 }
503
504 return result.ptr();
505#endif // !defined(DART_PRECOMPILED_RUNTIME)
506 return name.ptr(); // In AOT, return argument unchanged.
507}
508
509template <typename type>
510static bool IsSpecialCharacter(type value) {
511 return ((value == '"') || (value == '\n') || (value == '\f') ||
512 (value == '\b') || (value == '\t') || (value == '\v') ||
513 (value == '\r') || (value == '\\') || (value == '$'));
514}
515
516static inline bool IsAsciiNonprintable(int32_t c) {
517 return ((0 <= c) && (c < 32)) || (c == 127);
518}
519
520static int32_t EscapeOverhead(int32_t c) {
521 if (IsSpecialCharacter(c)) {
522 return 1; // 1 additional byte for the backslash.
523 } else if (IsAsciiNonprintable(c)) {
524 return 3; // 3 additional bytes to encode c as \x00.
525 }
526 return 0;
527}
528
529template <typename type>
531 if (value == '"') {
532 return '"';
533 } else if (value == '\n') {
534 return 'n';
535 } else if (value == '\f') {
536 return 'f';
537 } else if (value == '\b') {
538 return 'b';
539 } else if (value == '\t') {
540 return 't';
541 } else if (value == '\v') {
542 return 'v';
543 } else if (value == '\r') {
544 return 'r';
545 } else if (value == '\\') {
546 return '\\';
547 } else if (value == '$') {
548 return '$';
549 }
550 UNREACHABLE();
551 return '\0';
552}
553
555 // Should only be run by the vm isolate.
556 ASSERT(isolate_group == Dart::vm_isolate_group());
557 Thread* thread = Thread::Current();
558 auto heap = isolate_group->heap();
559
560 // TODO(iposva): NoSafepointScope needs to be added here.
561 ASSERT(class_class() == null_);
562
563 // Allocate and initialize the null instance.
564 // 'null_' must be the first object allocated as it is used in allocation to
565 // clear the pointer fields of objects.
566 {
567 uword address =
568 heap->Allocate(thread, Instance::InstanceSize(), Heap::kOld);
569 null_ = static_cast<InstancePtr>(address + kHeapObjectTag);
570 InitializeObjectVariant<Instance>(address, kNullCid);
571 null_->untag()->SetCanonical();
572 }
573
574 // Allocate and initialize the bool instances.
575 // These must be allocated such that at kBoolValueBitPosition, the address
576 // of true is 0 and the address of false is 1, and their addresses are
577 // otherwise identical.
578 {
579 // Allocate a dummy bool object to give true the desired alignment.
580 uword address = heap->Allocate(thread, Bool::InstanceSize(), Heap::kOld);
581 InitializeObject<Bool>(address);
582 static_cast<BoolPtr>(address + kHeapObjectTag)->untag()->value_ = false;
583 }
584 {
585 // Allocate true.
586 uword address = heap->Allocate(thread, Bool::InstanceSize(), Heap::kOld);
587 true_ = static_cast<BoolPtr>(address + kHeapObjectTag);
588 InitializeObject<Bool>(address);
589 true_->untag()->value_ = true;
590 true_->untag()->SetCanonical();
591 }
592 {
593 // Allocate false.
594 uword address = heap->Allocate(thread, Bool::InstanceSize(), Heap::kOld);
595 false_ = static_cast<BoolPtr>(address + kHeapObjectTag);
596 InitializeObject<Bool>(address);
597 false_->untag()->value_ = false;
598 false_->untag()->SetCanonical();
599 }
600
601 // Check that the objects have been allocated at appropriate addresses.
602 ASSERT(static_cast<uword>(true_) ==
603 static_cast<uword>(null_) + kTrueOffsetFromNull);
604 ASSERT(static_cast<uword>(false_) ==
605 static_cast<uword>(null_) + kFalseOffsetFromNull);
606 ASSERT((static_cast<uword>(true_) & kBoolValueMask) == 0);
607 ASSERT((static_cast<uword>(false_) & kBoolValueMask) != 0);
608 ASSERT(static_cast<uword>(false_) ==
609 (static_cast<uword>(true_) | kBoolValueMask));
610 ASSERT((static_cast<uword>(null_) & kBoolVsNullMask) == 0);
611 ASSERT((static_cast<uword>(true_) & kBoolVsNullMask) != 0);
612 ASSERT((static_cast<uword>(false_) & kBoolVsNullMask) != 0);
613}
614
616 {
617 Object fake_handle;
618 builtin_vtables_[kObjectCid] = fake_handle.vtable();
619 }
620
621#define INIT_VTABLE(clazz) \
622 { \
623 clazz fake_handle; \
624 builtin_vtables_[k##clazz##Cid] = fake_handle.vtable(); \
625 }
628#undef INIT_VTABLE
629
630#define INIT_VTABLE(clazz) \
631 { \
632 Map fake_handle; \
633 builtin_vtables_[k##clazz##Cid] = fake_handle.vtable(); \
634 }
636#undef INIT_VTABLE
637
638#define INIT_VTABLE(clazz) \
639 { \
640 Set fake_handle; \
641 builtin_vtables_[k##clazz##Cid] = fake_handle.vtable(); \
642 }
644#undef INIT_VTABLE
645
646#define INIT_VTABLE(clazz) \
647 { \
648 Array fake_handle; \
649 builtin_vtables_[k##clazz##Cid] = fake_handle.vtable(); \
650 }
652#undef INIT_VTABLE
653
654#define INIT_VTABLE(clazz) \
655 { \
656 String fake_handle; \
657 builtin_vtables_[k##clazz##Cid] = fake_handle.vtable(); \
658 }
660#undef INIT_VTABLE
661
662 {
663 Instance fake_handle;
664 builtin_vtables_[kFfiNativeTypeCid] = fake_handle.vtable();
665 }
666
667#define INIT_VTABLE(clazz) \
668 { \
669 Instance fake_handle; \
670 builtin_vtables_[kFfi##clazz##Cid] = fake_handle.vtable(); \
671 }
673#undef INIT_VTABLE
674
675 {
676 Instance fake_handle;
677 builtin_vtables_[kFfiNativeFunctionCid] = fake_handle.vtable();
678 }
679
680 {
681 Pointer fake_handle;
682 builtin_vtables_[kPointerCid] = fake_handle.vtable();
683 }
684
685 {
686 DynamicLibrary fake_handle;
687 builtin_vtables_[kDynamicLibraryCid] = fake_handle.vtable();
688 }
689
690#define INIT_VTABLE(clazz) \
691 { \
692 TypedData fake_internal_handle; \
693 builtin_vtables_[kTypedData##clazz##Cid] = fake_internal_handle.vtable(); \
694 TypedDataView fake_view_handle; \
695 builtin_vtables_[kTypedData##clazz##ViewCid] = fake_view_handle.vtable(); \
696 builtin_vtables_[kUnmodifiableTypedData##clazz##ViewCid] = \
697 fake_view_handle.vtable(); \
698 ExternalTypedData fake_external_handle; \
699 builtin_vtables_[kExternalTypedData##clazz##Cid] = \
700 fake_external_handle.vtable(); \
701 }
703#undef INIT_VTABLE
704
705 {
706 TypedDataView fake_handle;
707 builtin_vtables_[kByteDataViewCid] = fake_handle.vtable();
708 builtin_vtables_[kUnmodifiableByteDataViewCid] = fake_handle.vtable();
709 }
710
711 {
712 Instance fake_handle;
713 builtin_vtables_[kByteBufferCid] = fake_handle.vtable();
714 builtin_vtables_[kNullCid] = fake_handle.vtable();
715 builtin_vtables_[kDynamicCid] = fake_handle.vtable();
716 builtin_vtables_[kVoidCid] = fake_handle.vtable();
717 builtin_vtables_[kNeverCid] = fake_handle.vtable();
718 }
719}
720
721void Object::Init(IsolateGroup* isolate_group) {
722 // Should only be run by the vm isolate.
723 ASSERT(isolate_group == Dart::vm_isolate_group());
724 Heap* heap = isolate_group->heap();
725 Thread* thread = Thread::Current();
726 ASSERT(thread != nullptr);
727 // Ensure lock checks in setters are happy.
728 SafepointWriteRwLocker ml(thread, isolate_group->program_lock());
729
730 InitVtables();
731
732// Allocate the read only object handles here.
733#define INITIALIZE_SHARED_READONLY_HANDLE(Type, name) \
734 name##_ = Type::ReadOnlyHandle();
736#undef INITIALIZE_SHARED_READONLY_HANDLE
737
738 *null_object_ = Object::null();
739 *null_class_ = Class::null();
740 *null_array_ = Array::null();
741 *null_string_ = String::null();
742 *null_instance_ = Instance::null();
743 *null_function_ = Function::null();
744 *null_function_type_ = FunctionType::null();
745 *null_record_type_ = RecordType::null();
746 *null_type_arguments_ = TypeArguments::null();
747 *null_closure_ = Closure::null();
748 *empty_type_arguments_ = TypeArguments::null();
749 *null_abstract_type_ = AbstractType::null();
750 *null_compressed_stackmaps_ = CompressedStackMaps::null();
751 *bool_true_ = true_;
752 *bool_false_ = false_;
753
754 // Initialize the empty array and empty instantiations cache array handles to
755 // null_ in order to be able to check if the empty and zero arrays were
756 // allocated (RAW_NULL is not available).
757 *empty_array_ = Array::null();
758 *empty_instantiations_cache_array_ = Array::null();
759 *empty_subtype_test_cache_array_ = Array::null();
760
761 Class& cls = Class::Handle();
762
763 // Allocate and initialize the class class.
764 {
765 intptr_t size = Class::InstanceSize();
766 uword address = heap->Allocate(thread, size, Heap::kOld);
767 class_class_ = static_cast<ClassPtr>(address + kHeapObjectTag);
768 InitializeObject<Class>(address);
769
770 Class fake;
771 // Initialization from Class::New<Class>.
772 // Directly set ptr_ to break a circular dependency: SetRaw will attempt
773 // to lookup class class in the class table where it is not registered yet.
774 cls.ptr_ = class_class_;
775 ASSERT(builtin_vtables_[kClassCid] == fake.vtable());
778 compiler::target::RoundedAllocationSize(RTN::Class::InstanceSize()));
779 const intptr_t host_next_field_offset = Class::NextFieldOffset();
780 const intptr_t target_next_field_offset = RTN::Class::NextFieldOffset();
781 cls.set_next_field_offset(host_next_field_offset, target_next_field_offset);
783 cls.set_state_bits(0);
788 RTN::Class::kNoTypeArguments);
791 cls.InitEmptyFields();
792 isolate_group->class_table()->Register(cls);
793 }
794
795 // Allocate and initialize the null class.
796 cls = Class::New<Instance, RTN::Instance>(kNullCid, isolate_group);
798 isolate_group->object_store()->set_null_class(cls);
799
800 // Allocate and initialize Never class.
801 cls = Class::New<Instance, RTN::Instance>(kNeverCid, isolate_group);
806 isolate_group->object_store()->set_never_class(cls);
807
808 // Allocate and initialize the free list element class.
810 RTN::FreeListElement::FakeInstance>(kFreeListElement,
811 isolate_group);
816
817 // Allocate and initialize the forwarding corpse class.
819 RTN::ForwardingCorpse::FakeInstance>(kForwardingCorpse,
820 isolate_group);
825
826 // Allocate and initialize Sentinel class.
827 cls = Class::New<Sentinel, RTN::Sentinel>(isolate_group);
828 sentinel_class_ = cls.ptr();
829
830 // Allocate and initialize the sentinel values.
831 {
832 *sentinel_ ^= Sentinel::New();
833 *transition_sentinel_ ^= Sentinel::New();
834 }
835
836 // Allocate and initialize optimizing compiler constants.
837 {
838 *unknown_constant_ ^= Sentinel::New();
839 *non_constant_ ^= Sentinel::New();
840 *optimized_out_ ^= Sentinel::New();
841 }
842
843 // Allocate the remaining VM internal classes.
844 cls = Class::New<TypeParameters, RTN::TypeParameters>(isolate_group);
845 type_parameters_class_ = cls.ptr();
846
847 cls = Class::New<TypeArguments, RTN::TypeArguments>(isolate_group);
848 type_arguments_class_ = cls.ptr();
849
850 cls = Class::New<PatchClass, RTN::PatchClass>(isolate_group);
851 patch_class_class_ = cls.ptr();
852
853 cls = Class::New<Function, RTN::Function>(isolate_group);
854 function_class_ = cls.ptr();
855
856 cls = Class::New<ClosureData, RTN::ClosureData>(isolate_group);
857 closure_data_class_ = cls.ptr();
858
859 cls = Class::New<FfiTrampolineData, RTN::FfiTrampolineData>(isolate_group);
860 ffi_trampoline_data_class_ = cls.ptr();
861
862 cls = Class::New<Field, RTN::Field>(isolate_group);
863 field_class_ = cls.ptr();
864
865 cls = Class::New<Script, RTN::Script>(isolate_group);
866 script_class_ = cls.ptr();
867
868 cls = Class::New<Library, RTN::Library>(isolate_group);
869 library_class_ = cls.ptr();
870
871 cls = Class::New<Namespace, RTN::Namespace>(isolate_group);
872 namespace_class_ = cls.ptr();
873
874 cls = Class::New<KernelProgramInfo, RTN::KernelProgramInfo>(isolate_group);
875 kernel_program_info_class_ = cls.ptr();
876
877 cls = Class::New<Code, RTN::Code>(isolate_group);
878 code_class_ = cls.ptr();
879
880 cls = Class::New<Instructions, RTN::Instructions>(isolate_group);
881 instructions_class_ = cls.ptr();
882
883 cls =
884 Class::New<InstructionsSection, RTN::InstructionsSection>(isolate_group);
885 instructions_section_class_ = cls.ptr();
886
887 cls = Class::New<InstructionsTable, RTN::InstructionsTable>(isolate_group);
888 instructions_table_class_ = cls.ptr();
889
890 cls = Class::New<ObjectPool, RTN::ObjectPool>(isolate_group);
891 object_pool_class_ = cls.ptr();
892
893 cls = Class::New<PcDescriptors, RTN::PcDescriptors>(isolate_group);
894 pc_descriptors_class_ = cls.ptr();
895
896 cls = Class::New<CodeSourceMap, RTN::CodeSourceMap>(isolate_group);
897 code_source_map_class_ = cls.ptr();
898
899 cls =
900 Class::New<CompressedStackMaps, RTN::CompressedStackMaps>(isolate_group);
901 compressed_stackmaps_class_ = cls.ptr();
902
903 cls =
904 Class::New<LocalVarDescriptors, RTN::LocalVarDescriptors>(isolate_group);
905 var_descriptors_class_ = cls.ptr();
906
907 cls = Class::New<ExceptionHandlers, RTN::ExceptionHandlers>(isolate_group);
908 exception_handlers_class_ = cls.ptr();
909
910 cls = Class::New<Context, RTN::Context>(isolate_group);
911 context_class_ = cls.ptr();
912
913 cls = Class::New<ContextScope, RTN::ContextScope>(isolate_group);
914 context_scope_class_ = cls.ptr();
915
916 cls = Class::New<SingleTargetCache, RTN::SingleTargetCache>(isolate_group);
917 singletargetcache_class_ = cls.ptr();
918
919 cls = Class::New<UnlinkedCall, RTN::UnlinkedCall>(isolate_group);
920 unlinkedcall_class_ = cls.ptr();
921
922 cls = Class::New<MonomorphicSmiableCall, RTN::MonomorphicSmiableCall>(
923 isolate_group);
924 monomorphicsmiablecall_class_ = cls.ptr();
925
926 cls = Class::New<ICData, RTN::ICData>(isolate_group);
927 icdata_class_ = cls.ptr();
928
929 cls = Class::New<MegamorphicCache, RTN::MegamorphicCache>(isolate_group);
930 megamorphic_cache_class_ = cls.ptr();
931
932 cls = Class::New<SubtypeTestCache, RTN::SubtypeTestCache>(isolate_group);
933 subtypetestcache_class_ = cls.ptr();
934
935 cls = Class::New<LoadingUnit, RTN::LoadingUnit>(isolate_group);
936 loadingunit_class_ = cls.ptr();
937
938 cls = Class::New<ApiError, RTN::ApiError>(isolate_group);
939 api_error_class_ = cls.ptr();
940
941 cls = Class::New<LanguageError, RTN::LanguageError>(isolate_group);
942 language_error_class_ = cls.ptr();
943
944 cls = Class::New<UnhandledException, RTN::UnhandledException>(isolate_group);
945 unhandled_exception_class_ = cls.ptr();
946
947 cls = Class::New<UnwindError, RTN::UnwindError>(isolate_group);
948 unwind_error_class_ = cls.ptr();
949
950 cls = Class::New<WeakSerializationReference, RTN::WeakSerializationReference>(
951 isolate_group);
952 weak_serialization_reference_class_ = cls.ptr();
953
954 cls = Class::New<WeakArray, RTN::WeakArray>(isolate_group);
955 weak_array_class_ = cls.ptr();
956
957 ASSERT(class_class() != null_);
958
959 // Pre-allocate classes in the vm isolate so that we can for example create a
960 // symbol table and populate it with some frequently used strings as symbols.
961 cls = Class::New<Array, RTN::Array>(isolate_group);
962 isolate_group->object_store()->set_array_class(cls);
964 RTN::Array::type_arguments_offset());
966 cls = Class::New<Array, RTN::Array>(kImmutableArrayCid, isolate_group);
967 isolate_group->object_store()->set_immutable_array_class(cls);
969 RTN::Array::type_arguments_offset());
971 // In order to be able to canonicalize arguments descriptors early.
973 cls =
974 Class::New<GrowableObjectArray, RTN::GrowableObjectArray>(isolate_group);
975 isolate_group->object_store()->set_growable_object_array_class(cls);
978 RTN::GrowableObjectArray::type_arguments_offset());
980 cls = Class::NewStringClass(kOneByteStringCid, isolate_group);
981 isolate_group->object_store()->set_one_byte_string_class(cls);
982 cls = Class::NewStringClass(kTwoByteStringCid, isolate_group);
983 isolate_group->object_store()->set_two_byte_string_class(cls);
984 cls = Class::New<Mint, RTN::Mint>(isolate_group);
985 isolate_group->object_store()->set_mint_class(cls);
986 cls = Class::New<Double, RTN::Double>(isolate_group);
987 isolate_group->object_store()->set_double_class(cls);
988 cls = Class::New<Float32x4, RTN::Float32x4>(isolate_group);
989 isolate_group->object_store()->set_float32x4_class(cls);
990 cls = Class::New<Float64x2, RTN::Float64x2>(isolate_group);
991 isolate_group->object_store()->set_float64x2_class(cls);
992 cls = Class::New<Int32x4, RTN::Int32x4>(isolate_group);
993 isolate_group->object_store()->set_int32x4_class(cls);
994
995 // Ensure that class kExternalTypedDataUint8ArrayCid is registered as we
996 // need it when reading in the token stream of bootstrap classes in the VM
997 // isolate.
998 Class::NewExternalTypedDataClass(kExternalTypedDataUint8ArrayCid,
999 isolate_group);
1000
1001 // Needed for object pools of VM isolate stubs.
1002 Class::NewTypedDataClass(kTypedDataInt8ArrayCid, isolate_group);
1003
1004 // Allocate and initialize the empty_array instance.
1005 {
1006 uword address = heap->Allocate(thread, Array::InstanceSize(0), Heap::kOld);
1007 InitializeObjectVariant<Array>(address, kImmutableArrayCid, 0);
1008 Array::initializeHandle(empty_array_,
1009 static_cast<ArrayPtr>(address + kHeapObjectTag));
1010 empty_array_->untag()->set_length(Smi::New(0));
1011 empty_array_->SetCanonical();
1012 }
1013
1014 Smi& smi = Smi::Handle();
1015 // Allocate and initialize the empty instantiations cache array instance,
1016 // which contains metadata as the first element and a sentinel value
1017 // at the start of the first entry.
1018 {
1019 const intptr_t array_size =
1021 uword address =
1022 heap->Allocate(thread, Array::InstanceSize(array_size), Heap::kOld);
1023 InitializeObjectVariant<Array>(address, kImmutableArrayCid, array_size);
1024 Array::initializeHandle(empty_instantiations_cache_array_,
1025 static_cast<ArrayPtr>(address + kHeapObjectTag));
1026 empty_instantiations_cache_array_->untag()->set_length(
1027 Smi::New(array_size));
1028 // The empty cache has no occupied entries and is not a hash-based cache.
1029 smi = Smi::New(0);
1030 empty_instantiations_cache_array_->SetAt(
1032 // Make the first (and only) entry unoccupied by setting its first element
1033 // to the sentinel value.
1035 InstantiationsCacheTable table(*empty_instantiations_cache_array_);
1037 // The other contents of the array are immaterial.
1038 empty_instantiations_cache_array_->SetCanonical();
1039 }
1040
1041 // Allocate and initialize the empty subtype test cache array instance,
1042 // which contains a single unoccupied entry.
1043 {
1044 const intptr_t array_size = SubtypeTestCache::kTestEntryLength;
1045 uword address =
1046 heap->Allocate(thread, Array::InstanceSize(array_size), Heap::kOld);
1047 InitializeObjectVariant<Array>(address, kImmutableArrayCid, array_size);
1048 Array::initializeHandle(empty_subtype_test_cache_array_,
1049 static_cast<ArrayPtr>(address + kHeapObjectTag));
1050 empty_subtype_test_cache_array_->untag()->set_length(Smi::New(array_size));
1051 // Make the first (and only) entry unoccupied by setting its first element
1052 // to the null value.
1053 empty_subtype_test_cache_array_->SetAt(
1054 SubtypeTestCache::kInstanceCidOrSignature, Object::null_object());
1056 SubtypeTestCacheTable table(*empty_subtype_test_cache_array_);
1058 Object::null_object());
1059 // The other contents of the array are immaterial.
1060 empty_subtype_test_cache_array_->SetCanonical();
1061 }
1062
1063 // Allocate and initialize the canonical empty context scope object.
1064 {
1065 uword address =
1067 InitializeObject<ContextScope>(address, 0);
1068 ContextScope::initializeHandle(
1069 empty_context_scope_,
1070 static_cast<ContextScopePtr>(address + kHeapObjectTag));
1071 empty_context_scope_->StoreNonPointer(
1072 &empty_context_scope_->untag()->num_variables_, 0);
1073 empty_context_scope_->StoreNonPointer(
1074 &empty_context_scope_->untag()->is_implicit_, true);
1075 empty_context_scope_->SetCanonical();
1076 }
1077
1078 // Allocate and initialize the canonical empty object pool object.
1079 {
1080 uword address =
1081 heap->Allocate(thread, ObjectPool::InstanceSize(0), Heap::kOld);
1082 InitializeObject<ObjectPool>(address, 0);
1083 ObjectPool::initializeHandle(
1084 empty_object_pool_,
1085 static_cast<ObjectPoolPtr>(address + kHeapObjectTag));
1086 empty_object_pool_->StoreNonPointer(&empty_object_pool_->untag()->length_,
1087 0);
1088 empty_object_pool_->SetCanonical();
1089 }
1090
1091 // Allocate and initialize the empty_compressed_stackmaps instance.
1092 {
1093 const intptr_t instance_size = CompressedStackMaps::InstanceSize(0);
1094 uword address = heap->Allocate(thread, instance_size, Heap::kOld);
1095 InitializeObject<CompressedStackMaps>(address, 0);
1096 CompressedStackMaps::initializeHandle(
1097 empty_compressed_stackmaps_,
1098 static_cast<CompressedStackMapsPtr>(address + kHeapObjectTag));
1099 empty_compressed_stackmaps_->untag()->payload()->set_flags_and_size(0);
1100 empty_compressed_stackmaps_->SetCanonical();
1101 }
1102
1103 // Allocate and initialize the empty_descriptors instance.
1104 {
1105 uword address =
1107 InitializeObject<PcDescriptors>(address, 0);
1108 PcDescriptors::initializeHandle(
1109 empty_descriptors_,
1110 static_cast<PcDescriptorsPtr>(address + kHeapObjectTag));
1111 empty_descriptors_->StoreNonPointer(&empty_descriptors_->untag()->length_,
1112 0);
1113 empty_descriptors_->SetCanonical();
1114 }
1115
1116 // Allocate and initialize the canonical empty variable descriptor object.
1117 {
1118 uword address = heap->Allocate(thread, LocalVarDescriptors::InstanceSize(0),
1119 Heap::kOld);
1120 InitializeObject<LocalVarDescriptors>(address, 0);
1121 LocalVarDescriptors::initializeHandle(
1122 empty_var_descriptors_,
1123 static_cast<LocalVarDescriptorsPtr>(address + kHeapObjectTag));
1124 empty_var_descriptors_->StoreNonPointer(
1125 &empty_var_descriptors_->untag()->num_entries_, 0);
1126 empty_var_descriptors_->SetCanonical();
1127 }
1128
1129 // Allocate and initialize the canonical empty exception handler info object.
1130 // The vast majority of all functions do not contain an exception handler
1131 // and can share this canonical descriptor.
1132 {
1133 uword address =
1135 InitializeObject<ExceptionHandlers>(address, 0);
1136 ExceptionHandlers::initializeHandle(
1137 empty_exception_handlers_,
1138 static_cast<ExceptionHandlersPtr>(address + kHeapObjectTag));
1139 empty_exception_handlers_->StoreNonPointer(
1140 &empty_exception_handlers_->untag()->packed_fields_, 0);
1141 empty_exception_handlers_->SetCanonical();
1142 }
1143
1144 // Empty exception handlers for async/async* functions.
1145 {
1146 uword address =
1148 InitializeObject<ExceptionHandlers>(address, 0);
1149 ExceptionHandlers::initializeHandle(
1150 empty_async_exception_handlers_,
1151 static_cast<ExceptionHandlersPtr>(address + kHeapObjectTag));
1152 empty_async_exception_handlers_->StoreNonPointer(
1153 &empty_async_exception_handlers_->untag()->packed_fields_,
1155 empty_async_exception_handlers_->SetCanonical();
1156 }
1157
1158 // Allocate and initialize the canonical empty type arguments object.
1159 {
1160 uword address =
1162 InitializeObject<TypeArguments>(address, 0);
1163 TypeArguments::initializeHandle(
1164 empty_type_arguments_,
1165 static_cast<TypeArgumentsPtr>(address + kHeapObjectTag));
1166 empty_type_arguments_->untag()->set_length(Smi::New(0));
1167 empty_type_arguments_->untag()->set_hash(Smi::New(0));
1168 empty_type_arguments_->ComputeHash();
1169 empty_type_arguments_->SetCanonical();
1170 }
1171
1172 // The VM isolate snapshot object table is initialized to an empty array
1173 // as we do not have any VM isolate snapshot at this time.
1174 *vm_isolate_snapshot_object_table_ = Object::empty_array().ptr();
1175
1176 cls = Class::New<Instance, RTN::Instance>(kDynamicCid, isolate_group);
1177 cls.set_is_abstract();
1182 dynamic_class_ = cls.ptr();
1183
1184 cls = Class::New<Instance, RTN::Instance>(kVoidCid, isolate_group);
1189 void_class_ = cls.ptr();
1190
1191 cls = Class::New<Type, RTN::Type>(isolate_group);
1195
1196 cls = Class::New<FunctionType, RTN::FunctionType>(isolate_group);
1200
1201 cls = Class::New<RecordType, RTN::RecordType>(isolate_group);
1205
1206 cls = dynamic_class_;
1207 *dynamic_type_ =
1208 Type::New(cls, Object::null_type_arguments(), Nullability::kNullable);
1209 dynamic_type_->SetIsFinalized();
1210 dynamic_type_->ComputeHash();
1211 dynamic_type_->SetCanonical();
1212
1213 cls = void_class_;
1214 *void_type_ =
1215 Type::New(cls, Object::null_type_arguments(), Nullability::kNullable);
1216 void_type_->SetIsFinalized();
1217 void_type_->ComputeHash();
1218 void_type_->SetCanonical();
1219
1220 // Since TypeArguments objects are passed as function arguments, make them
1221 // behave as Dart instances, although they are just VM objects.
1222 // Note that we cannot set the super type to ObjectType, which does not live
1223 // in the vm isolate. See special handling in Class::SuperClass().
1224 cls = type_arguments_class_;
1225 cls.set_interfaces(Object::empty_array());
1226 cls.SetFields(Object::empty_array());
1227 cls.SetFunctions(Object::empty_array());
1228
1229 cls = Class::New<Bool, RTN::Bool>(isolate_group);
1230 isolate_group->object_store()->set_bool_class(cls);
1231
1232 *smi_illegal_cid_ = Smi::New(kIllegalCid);
1233 *smi_zero_ = Smi::New(0);
1234
1235 String& error_str = String::Handle();
1236 error_str = String::New(
1237 "Callbacks into the Dart VM are currently prohibited. Either there are "
1238 "outstanding pointers from Dart_TypedDataAcquireData that have not been "
1239 "released with Dart_TypedDataReleaseData, or a finalizer is running.",
1240 Heap::kOld);
1241 *no_callbacks_error_ = ApiError::New(error_str, Heap::kOld);
1242 error_str = String::New(
1243 "No api calls are allowed while unwind is in progress", Heap::kOld);
1244 *unwind_in_progress_error_ = UnwindError::New(error_str, Heap::kOld);
1245 error_str = String::New("SnapshotWriter Error", Heap::kOld);
1246 *snapshot_writer_error_ =
1247 LanguageError::New(error_str, Report::kError, Heap::kOld);
1248 error_str = String::New("Branch offset overflow", Heap::kOld);
1249 *branch_offset_error_ =
1250 LanguageError::New(error_str, Report::kBailout, Heap::kOld);
1251 error_str = String::New("Speculative inlining failed", Heap::kOld);
1252 *speculative_inlining_error_ =
1253 LanguageError::New(error_str, Report::kBailout, Heap::kOld);
1254 error_str = String::New("Background Compilation Failed", Heap::kOld);
1255 *background_compilation_error_ =
1256 LanguageError::New(error_str, Report::kBailout, Heap::kOld);
1257 error_str = String::New("Out of memory", Heap::kOld);
1258 *out_of_memory_error_ =
1259 LanguageError::New(error_str, Report::kError, Heap::kOld);
1260
1261 // Allocate the parameter types and names for synthetic getters.
1262 *synthetic_getter_parameter_types_ = Array::New(1, Heap::kOld);
1263 synthetic_getter_parameter_types_->SetAt(0, Object::dynamic_type());
1264 *synthetic_getter_parameter_names_ = Array::New(1, Heap::kOld);
1265 // Fill in synthetic_getter_parameter_names_ later, after symbols are
1266 // initialized (in Object::FinalizeVMIsolate).
1267 // synthetic_getter_parameter_names_ object needs to be created earlier as
1268 // VM isolate snapshot reader references it before Object::FinalizeVMIsolate.
1269
1270 // Some thread fields need to be reinitialized as null constants have not been
1271 // initialized until now.
1272 thread->ClearStickyError();
1273
1274 ASSERT(!null_object_->IsSmi());
1275 ASSERT(!null_class_->IsSmi());
1276 ASSERT(null_class_->IsClass());
1277 ASSERT(!null_array_->IsSmi());
1278 ASSERT(null_array_->IsArray());
1279 ASSERT(!null_string_->IsSmi());
1280 ASSERT(null_string_->IsString());
1281 ASSERT(!null_instance_->IsSmi());
1282 ASSERT(null_instance_->IsInstance());
1283 ASSERT(!null_function_->IsSmi());
1284 ASSERT(null_function_->IsFunction());
1285 ASSERT(!null_function_type_->IsSmi());
1286 ASSERT(null_function_type_->IsFunctionType());
1287 ASSERT(!null_record_type_->IsSmi());
1288 ASSERT(null_record_type_->IsRecordType());
1289 ASSERT(!null_type_arguments_->IsSmi());
1290 ASSERT(null_type_arguments_->IsTypeArguments());
1291 ASSERT(!null_compressed_stackmaps_->IsSmi());
1292 ASSERT(null_compressed_stackmaps_->IsCompressedStackMaps());
1293 ASSERT(!empty_array_->IsSmi());
1294 ASSERT(empty_array_->IsArray());
1295 ASSERT(!empty_instantiations_cache_array_->IsSmi());
1296 ASSERT(empty_instantiations_cache_array_->IsArray());
1297 ASSERT(!empty_subtype_test_cache_array_->IsSmi());
1298 ASSERT(empty_subtype_test_cache_array_->IsArray());
1299 ASSERT(!empty_type_arguments_->IsSmi());
1300 ASSERT(empty_type_arguments_->IsTypeArguments());
1301 ASSERT(!empty_context_scope_->IsSmi());
1302 ASSERT(empty_context_scope_->IsContextScope());
1303 ASSERT(!empty_compressed_stackmaps_->IsSmi());
1304 ASSERT(empty_compressed_stackmaps_->IsCompressedStackMaps());
1305 ASSERT(!empty_descriptors_->IsSmi());
1306 ASSERT(empty_descriptors_->IsPcDescriptors());
1307 ASSERT(!empty_var_descriptors_->IsSmi());
1308 ASSERT(empty_var_descriptors_->IsLocalVarDescriptors());
1309 ASSERT(!empty_exception_handlers_->IsSmi());
1310 ASSERT(empty_exception_handlers_->IsExceptionHandlers());
1311 ASSERT(!empty_async_exception_handlers_->IsSmi());
1312 ASSERT(empty_async_exception_handlers_->IsExceptionHandlers());
1313 ASSERT(!sentinel_->IsSmi());
1314 ASSERT(sentinel_->IsSentinel());
1315 ASSERT(!transition_sentinel_->IsSmi());
1316 ASSERT(transition_sentinel_->IsSentinel());
1317 ASSERT(!unknown_constant_->IsSmi());
1318 ASSERT(unknown_constant_->IsSentinel());
1319 ASSERT(!non_constant_->IsSmi());
1320 ASSERT(non_constant_->IsSentinel());
1321 ASSERT(!optimized_out_->IsSmi());
1322 ASSERT(optimized_out_->IsSentinel());
1323 ASSERT(!bool_true_->IsSmi());
1324 ASSERT(bool_true_->IsBool());
1325 ASSERT(!bool_false_->IsSmi());
1326 ASSERT(bool_false_->IsBool());
1327 ASSERT(smi_illegal_cid_->IsSmi());
1328 ASSERT(smi_zero_->IsSmi());
1329 ASSERT(!no_callbacks_error_->IsSmi());
1330 ASSERT(no_callbacks_error_->IsApiError());
1331 ASSERT(!unwind_in_progress_error_->IsSmi());
1332 ASSERT(unwind_in_progress_error_->IsUnwindError());
1333 ASSERT(!snapshot_writer_error_->IsSmi());
1334 ASSERT(snapshot_writer_error_->IsLanguageError());
1335 ASSERT(!branch_offset_error_->IsSmi());
1336 ASSERT(branch_offset_error_->IsLanguageError());
1337 ASSERT(!speculative_inlining_error_->IsSmi());
1338 ASSERT(speculative_inlining_error_->IsLanguageError());
1339 ASSERT(!background_compilation_error_->IsSmi());
1340 ASSERT(background_compilation_error_->IsLanguageError());
1341 ASSERT(!out_of_memory_error_->IsSmi());
1342 ASSERT(out_of_memory_error_->IsLanguageError());
1343 ASSERT(!vm_isolate_snapshot_object_table_->IsSmi());
1344 ASSERT(vm_isolate_snapshot_object_table_->IsArray());
1345 ASSERT(!synthetic_getter_parameter_types_->IsSmi());
1346 ASSERT(synthetic_getter_parameter_types_->IsArray());
1347 ASSERT(!synthetic_getter_parameter_names_->IsSmi());
1348 ASSERT(synthetic_getter_parameter_names_->IsArray());
1349}
1350
1351void Object::FinishInit(IsolateGroup* isolate_group) {
1352 // The type testing stubs we initialize in AbstractType objects for the
1353 // canonical type of kDynamicCid/kVoidCid need to be set in this
1354 // method, which is called after StubCode::InitOnce().
1355 Code& code = Code::Handle();
1356
1357 code = TypeTestingStubGenerator::DefaultCodeForType(*dynamic_type_);
1358 dynamic_type_->InitializeTypeTestingStubNonAtomic(code);
1359
1361 void_type_->InitializeTypeTestingStubNonAtomic(code);
1362}
1363
1365 null_ = static_cast<ObjectPtr>(RAW_NULL);
1366 true_ = static_cast<BoolPtr>(RAW_NULL);
1367 false_ = static_cast<BoolPtr>(RAW_NULL);
1368 class_class_ = static_cast<ClassPtr>(RAW_NULL);
1369 dynamic_class_ = static_cast<ClassPtr>(RAW_NULL);
1370 void_class_ = static_cast<ClassPtr>(RAW_NULL);
1371 type_parameters_class_ = static_cast<ClassPtr>(RAW_NULL);
1372 type_arguments_class_ = static_cast<ClassPtr>(RAW_NULL);
1373 patch_class_class_ = static_cast<ClassPtr>(RAW_NULL);
1374 function_class_ = static_cast<ClassPtr>(RAW_NULL);
1375 closure_data_class_ = static_cast<ClassPtr>(RAW_NULL);
1376 ffi_trampoline_data_class_ = static_cast<ClassPtr>(RAW_NULL);
1377 field_class_ = static_cast<ClassPtr>(RAW_NULL);
1378 script_class_ = static_cast<ClassPtr>(RAW_NULL);
1379 library_class_ = static_cast<ClassPtr>(RAW_NULL);
1380 namespace_class_ = static_cast<ClassPtr>(RAW_NULL);
1381 kernel_program_info_class_ = static_cast<ClassPtr>(RAW_NULL);
1382 code_class_ = static_cast<ClassPtr>(RAW_NULL);
1383 instructions_class_ = static_cast<ClassPtr>(RAW_NULL);
1384 instructions_section_class_ = static_cast<ClassPtr>(RAW_NULL);
1385 instructions_table_class_ = static_cast<ClassPtr>(RAW_NULL);
1386 object_pool_class_ = static_cast<ClassPtr>(RAW_NULL);
1387 pc_descriptors_class_ = static_cast<ClassPtr>(RAW_NULL);
1388 code_source_map_class_ = static_cast<ClassPtr>(RAW_NULL);
1389 compressed_stackmaps_class_ = static_cast<ClassPtr>(RAW_NULL);
1390 var_descriptors_class_ = static_cast<ClassPtr>(RAW_NULL);
1391 exception_handlers_class_ = static_cast<ClassPtr>(RAW_NULL);
1392 context_class_ = static_cast<ClassPtr>(RAW_NULL);
1393 context_scope_class_ = static_cast<ClassPtr>(RAW_NULL);
1394 singletargetcache_class_ = static_cast<ClassPtr>(RAW_NULL);
1395 unlinkedcall_class_ = static_cast<ClassPtr>(RAW_NULL);
1396 monomorphicsmiablecall_class_ = static_cast<ClassPtr>(RAW_NULL);
1397 icdata_class_ = static_cast<ClassPtr>(RAW_NULL);
1398 megamorphic_cache_class_ = static_cast<ClassPtr>(RAW_NULL);
1399 subtypetestcache_class_ = static_cast<ClassPtr>(RAW_NULL);
1400 loadingunit_class_ = static_cast<ClassPtr>(RAW_NULL);
1401 api_error_class_ = static_cast<ClassPtr>(RAW_NULL);
1402 language_error_class_ = static_cast<ClassPtr>(RAW_NULL);
1403 unhandled_exception_class_ = static_cast<ClassPtr>(RAW_NULL);
1404 unwind_error_class_ = static_cast<ClassPtr>(RAW_NULL);
1405}
1406
1407// An object visitor which will mark all visited objects. This is used to
1408// premark all objects in the vm_isolate_ heap. Also precalculates hash
1409// codes so that we can get the identity hash code of objects in the read-
1410// only VM isolate.
1412 public:
1414#if defined(HASH_IN_OBJECT_HEADER)
1415 : counter_(1337)
1416#endif
1417 {
1418 }
1419
1421 // Free list elements should never be marked.
1422 ASSERT(!obj->untag()->IsMarked());
1423 // No forwarding corpses in the VM isolate.
1424 ASSERT(!obj->IsForwardingCorpse());
1425 if (!obj->IsFreeListElement()) {
1428#if defined(HASH_IN_OBJECT_HEADER)
1429 // These objects end up in the read-only VM isolate which is shared
1430 // between isolates, so we have to prepopulate them with identity hash
1431 // codes, since we can't add hash codes later.
1432 if (Object::GetCachedHash(obj) == 0) {
1433 // Some classes have identity hash codes that depend on their contents,
1434 // not per object.
1435 ASSERT(!obj->IsStringInstance());
1436 if (obj == Object::null()) {
1437 Object::SetCachedHashIfNotSet(obj, kNullIdentityHash);
1438 } else if (obj == Object::bool_true().ptr()) {
1439 Object::SetCachedHashIfNotSet(obj, kTrueIdentityHash);
1440 } else if (obj == Object::bool_false().ptr()) {
1441 Object::SetCachedHashIfNotSet(obj, kFalseIdentityHash);
1442 } else if (!obj->IsMint() && !obj->IsDouble()) {
1443 counter_ += 2011; // The year Dart was announced and a prime.
1444 counter_ &= 0x3fffffff;
1445 if (counter_ == 0) counter_++;
1446 Object::SetCachedHashIfNotSet(obj, counter_);
1447 }
1448 }
1449#endif
1450#if !defined(DART_PRECOMPILED_RUNTIME)
1451 if (obj->IsClass()) {
1452 // Won't be able to update read-only VM isolate classes if implementors
1453 // are discovered later.
1454 static_cast<ClassPtr>(obj)->untag()->implementor_cid_ = kDynamicCid;
1455 }
1456#endif
1457 }
1458 }
1459
1460 private:
1461#if defined(HASH_IN_OBJECT_HEADER)
1462 int32_t counter_;
1463#endif
1464};
1465
1466#define SET_CLASS_NAME(class_name, name) \
1467 cls = class_name##_class(); \
1468 cls.set_name(Symbols::name());
1469
1471 // Should only be run by the vm isolate.
1472 ASSERT(isolate_group == Dart::vm_isolate_group());
1473
1474 // Finish initialization of synthetic_getter_parameter_names_ which was
1475 // Started in Object::InitOnce()
1476 synthetic_getter_parameter_names_->SetAt(0, Symbols::This());
1477
1478 // Set up names for all VM singleton classes.
1479 Class& cls = Class::Handle();
1480
1481 SET_CLASS_NAME(class, Class);
1482 SET_CLASS_NAME(dynamic, Dynamic);
1483 SET_CLASS_NAME(void, Void);
1484 SET_CLASS_NAME(type_parameters, TypeParameters);
1485 SET_CLASS_NAME(type_arguments, TypeArguments);
1486 SET_CLASS_NAME(patch_class, PatchClass);
1488 SET_CLASS_NAME(closure_data, ClosureData);
1489 SET_CLASS_NAME(ffi_trampoline_data, FfiTrampolineData);
1490 SET_CLASS_NAME(field, Field);
1491 SET_CLASS_NAME(script, Script);
1492 SET_CLASS_NAME(library, LibraryClass);
1493 SET_CLASS_NAME(namespace, Namespace);
1494 SET_CLASS_NAME(kernel_program_info, KernelProgramInfo);
1495 SET_CLASS_NAME(weak_serialization_reference, WeakSerializationReference);
1496 SET_CLASS_NAME(weak_array, WeakArray);
1497 SET_CLASS_NAME(code, Code);
1498 SET_CLASS_NAME(instructions, Instructions);
1499 SET_CLASS_NAME(instructions_section, InstructionsSection);
1500 SET_CLASS_NAME(instructions_table, InstructionsTable);
1501 SET_CLASS_NAME(object_pool, ObjectPool);
1502 SET_CLASS_NAME(code_source_map, CodeSourceMap);
1503 SET_CLASS_NAME(pc_descriptors, PcDescriptors);
1504 SET_CLASS_NAME(compressed_stackmaps, CompressedStackMaps);
1505 SET_CLASS_NAME(var_descriptors, LocalVarDescriptors);
1506 SET_CLASS_NAME(exception_handlers, ExceptionHandlers);
1507 SET_CLASS_NAME(context, Context);
1508 SET_CLASS_NAME(context_scope, ContextScope);
1509 SET_CLASS_NAME(sentinel, Sentinel);
1510 SET_CLASS_NAME(singletargetcache, SingleTargetCache);
1511 SET_CLASS_NAME(unlinkedcall, UnlinkedCall);
1512 SET_CLASS_NAME(monomorphicsmiablecall, MonomorphicSmiableCall);
1513 SET_CLASS_NAME(icdata, ICData);
1514 SET_CLASS_NAME(megamorphic_cache, MegamorphicCache);
1515 SET_CLASS_NAME(subtypetestcache, SubtypeTestCache);
1516 SET_CLASS_NAME(loadingunit, LoadingUnit);
1517 SET_CLASS_NAME(api_error, ApiError);
1518 SET_CLASS_NAME(language_error, LanguageError);
1519 SET_CLASS_NAME(unhandled_exception, UnhandledException);
1520 SET_CLASS_NAME(unwind_error, UnwindError);
1521
1522 // Set up names for classes which are also pre-allocated in the vm isolate.
1523 cls = isolate_group->object_store()->array_class();
1524 cls.set_name(Symbols::_List());
1525 cls = isolate_group->object_store()->one_byte_string_class();
1526 cls.set_name(Symbols::OneByteString());
1527 cls = isolate_group->object_store()->never_class();
1528 cls.set_name(Symbols::Never());
1529
1530 // Set up names for the pseudo-classes for free list elements and forwarding
1531 // corpses. Mainly this makes VM debugging easier.
1532 cls = isolate_group->class_table()->At(kFreeListElement);
1533 cls.set_name(Symbols::FreeListElement());
1534 cls = isolate_group->class_table()->At(kForwardingCorpse);
1535 cls.set_name(Symbols::ForwardingCorpse());
1536
1537#if defined(DART_PRECOMPILER)
1538 const auto& function =
1539 Function::Handle(StubCode::UnknownDartCode().function());
1540 function.set_name(Symbols::OptimizedOut());
1541#endif // defined(DART_PRECOMPILER)
1542
1543 {
1544 ASSERT(isolate_group == Dart::vm_isolate_group());
1545 Thread* thread = Thread::Current();
1546 WritableVMIsolateScope scope(thread);
1547 HeapIterationScope iteration(thread);
1548 FinalizeVMIsolateVisitor premarker;
1549 ASSERT(isolate_group->heap()->UsedInWords(Heap::kNew) == 0);
1550 iteration.IterateOldObjectsNoImagePages(&premarker);
1551 // Make the VM isolate read-only again after setting all objects as marked.
1552 // Note objects in image pages are already pre-marked.
1553 }
1554}
1555
1557 NoSafepointScope no_safepoint;
1558 intptr_t cid = object->GetClassId();
1559 if (cid == kOneByteStringCid) {
1560 OneByteStringPtr str = static_cast<OneByteStringPtr>(object);
1561 if (String::GetCachedHash(str) == 0) {
1562 intptr_t hash = String::Hash(str);
1564 }
1565 intptr_t size = OneByteString::UnroundedSize(str);
1566 ASSERT(size <= str->untag()->HeapSize());
1567 memset(reinterpret_cast<void*>(UntaggedObject::ToAddr(str) + size), 0,
1568 str->untag()->HeapSize() - size);
1569 } else if (cid == kTwoByteStringCid) {
1570 TwoByteStringPtr str = static_cast<TwoByteStringPtr>(object);
1571 if (String::GetCachedHash(str) == 0) {
1572 intptr_t hash = String::Hash(str);
1574 }
1575 ASSERT(String::GetCachedHash(str) != 0);
1576 intptr_t size = TwoByteString::UnroundedSize(str);
1577 ASSERT(size <= str->untag()->HeapSize());
1578 memset(reinterpret_cast<void*>(UntaggedObject::ToAddr(str) + size), 0,
1579 str->untag()->HeapSize() - size);
1580 } else if (cid == kCodeSourceMapCid) {
1581 CodeSourceMapPtr map = CodeSourceMap::RawCast(object);
1582 intptr_t size = CodeSourceMap::UnroundedSize(map);
1583 ASSERT(size <= map->untag()->HeapSize());
1584 memset(reinterpret_cast<void*>(UntaggedObject::ToAddr(map) + size), 0,
1585 map->untag()->HeapSize() - size);
1586 } else if (cid == kCompressedStackMapsCid) {
1587 CompressedStackMapsPtr maps = CompressedStackMaps::RawCast(object);
1588 intptr_t size = CompressedStackMaps::UnroundedSize(maps);
1589 ASSERT(size <= maps->untag()->HeapSize());
1590 memset(reinterpret_cast<void*>(UntaggedObject::ToAddr(maps) + size), 0,
1591 maps->untag()->HeapSize() - size);
1592 } else if (cid == kPcDescriptorsCid) {
1593 PcDescriptorsPtr desc = PcDescriptors::RawCast(object);
1594 intptr_t size = PcDescriptors::UnroundedSize(desc);
1595 ASSERT(size <= desc->untag()->HeapSize());
1596 memset(reinterpret_cast<void*>(UntaggedObject::ToAddr(desc) + size), 0,
1597 desc->untag()->HeapSize() - size);
1598 }
1599}
1600
1603 *vm_isolate_snapshot_object_table_ = table.ptr();
1604}
1605
1606// Make unused space in an object whose type has been transformed safe
1607// for traversing during GC.
1608// The unused part of the transformed object is marked as an TypedDataInt8Array
1609// object.
1611 intptr_t original_size,
1612 intptr_t used_size) {
1613 ASSERT(Thread::Current()->no_safepoint_scope_depth() > 0);
1614 ASSERT(!obj.IsNull());
1615 ASSERT(original_size >= used_size);
1616 if (original_size > used_size) {
1617 intptr_t leftover_size = original_size - used_size;
1618
1619 uword addr = UntaggedObject::ToAddr(obj.ptr()) + used_size;
1620 if (leftover_size >= TypedData::InstanceSize(0)) {
1621 // Update the leftover space as a TypedDataInt8Array object.
1622 TypedDataPtr raw =
1623 static_cast<TypedDataPtr>(UntaggedObject::FromAddr(addr));
1624 uword new_tags =
1625 UntaggedObject::ClassIdTag::update(kTypedDataInt8ArrayCid, 0);
1626 new_tags = UntaggedObject::SizeTag::update(leftover_size, new_tags);
1627 const bool is_old = obj.ptr()->IsOldObject();
1628 new_tags = UntaggedObject::AlwaysSetBit::update(true, new_tags);
1629 new_tags = UntaggedObject::NotMarkedBit::update(true, new_tags);
1630 new_tags =
1632 new_tags = UntaggedObject::NewBit::update(!is_old, new_tags);
1633 // On architectures with a relaxed memory model, the concurrent marker may
1634 // observe the write of the filler object's header before observing the
1635 // new array length, and so treat it as a pointer. Ensure it is a Smi so
1636 // the marker won't dereference it.
1637 ASSERT((new_tags & kSmiTagMask) == kSmiTag);
1638
1639 intptr_t leftover_len = (leftover_size - TypedData::InstanceSize(0));
1640 ASSERT(TypedData::InstanceSize(leftover_len) == leftover_size);
1641 raw->untag()->set_length<std::memory_order_release>(
1642 Smi::New(leftover_len));
1643 raw->untag()->tags_ = new_tags;
1644 raw->untag()->RecomputeDataField();
1645 } else {
1646 // Update the leftover space as a basic object.
1647 ASSERT(leftover_size == Object::InstanceSize());
1648 ObjectPtr raw = static_cast<ObjectPtr>(UntaggedObject::FromAddr(addr));
1649 uword new_tags = UntaggedObject::ClassIdTag::update(kInstanceCid, 0);
1650 new_tags = UntaggedObject::SizeTag::update(leftover_size, new_tags);
1651 const bool is_old = obj.ptr()->IsOldObject();
1652 new_tags = UntaggedObject::AlwaysSetBit::update(true, new_tags);
1653 new_tags = UntaggedObject::NotMarkedBit::update(true, new_tags);
1654 new_tags =
1656 new_tags = UntaggedObject::NewBit::update(!is_old, new_tags);
1657 // On architectures with a relaxed memory model, the concurrent marker may
1658 // observe the write of the filler object's header before observing the
1659 // new array length, and so treat it as a pointer. Ensure it is a Smi so
1660 // the marker won't dereference it.
1661 ASSERT((new_tags & kSmiTagMask) == kSmiTag);
1662
1663 // The array might have an uninitialized alignment gap since the visitors
1664 // for Arrays are precise based on element count, but the visitors for
1665 // Instance are based on the size rounded to the allocation unit, so we
1666 // need to ensure the alignment gap is initialized.
1667 for (intptr_t offset = Instance::UnroundedSize();
1668 offset < Instance::InstanceSize(); offset += sizeof(uword)) {
1669 reinterpret_cast<std::atomic<uword>*>(addr + offset)
1670 ->store(0, std::memory_order_release);
1671 }
1672 raw->untag()->tags_ = new_tags;
1673 }
1674 }
1675}
1676
1678#if defined(DEBUG)
1679 ASSERT(builtin_vtables_[kIllegalCid] == 0);
1680 ASSERT(builtin_vtables_[kFreeListElement] == 0);
1681 ASSERT(builtin_vtables_[kForwardingCorpse] == 0);
1683 for (intptr_t cid = kObjectCid; cid < kNumPredefinedCids; cid++) {
1684 if (table->HasValidClassAt(cid)) {
1685 ASSERT(builtin_vtables_[cid] != 0);
1686 }
1687 }
1688#endif
1689}
1690
1691void Object::RegisterClass(const Class& cls,
1692 const String& name,
1693 const Library& lib) {
1694 ASSERT(name.Length() > 0);
1695 ASSERT(name.CharAt(0) != '_');
1696 cls.set_name(name);
1697 lib.AddClass(cls);
1698}
1699
1700void Object::RegisterPrivateClass(const Class& cls,
1701 const String& public_class_name,
1702 const Library& lib) {
1703 ASSERT(public_class_name.Length() > 0);
1704 ASSERT(public_class_name.CharAt(0) == '_');
1705 String& str = String::Handle();
1706 str = lib.PrivateName(public_class_name);
1707 cls.set_name(str);
1708 lib.AddClass(cls);
1709}
1710
1711// Initialize a new isolate from source or from a snapshot.
1712//
1713// There are three possibilities:
1714// 1. Running a Kernel binary. This function will bootstrap from the KERNEL
1715// file.
1716// 2. There is no vm snapshot. This function will bootstrap from source.
1717// 3. There is a vm snapshot. The caller should initialize from the snapshot.
1718//
1719// A non-null kernel argument indicates (1).
1720// A nullptr kernel indicates (2) or (3).
1721ErrorPtr Object::Init(IsolateGroup* isolate_group,
1722 const uint8_t* kernel_buffer,
1723 intptr_t kernel_buffer_size) {
1724 Thread* thread = Thread::Current();
1725 Zone* zone = thread->zone();
1726 ASSERT(isolate_group == thread->isolate_group());
1727 TIMELINE_DURATION(thread, Isolate, "Object::Init");
1728
1729#if defined(DART_PRECOMPILED_RUNTIME)
1730 const bool bootstrapping = false;
1731#else
1732 const bool is_kernel = (kernel_buffer != nullptr);
1733 const bool bootstrapping =
1734 (Dart::vm_snapshot_kind() == Snapshot::kNone) || is_kernel;
1735#endif // defined(DART_PRECOMPILED_RUNTIME).
1736
1737 if (bootstrapping) {
1738#if !defined(DART_PRECOMPILED_RUNTIME)
1739 // Object::Init version when we are bootstrapping from source or from a
1740 // Kernel binary.
1741 // This will initialize isolate group object_store, shared by all isolates
1742 // running in the isolate group.
1743 ObjectStore* object_store = isolate_group->object_store();
1744 SafepointWriteRwLocker ml(thread, isolate_group->program_lock());
1745
1746 Class& cls = Class::Handle(zone);
1747 Type& type = Type::Handle(zone);
1748 Array& array = Array::Handle(zone);
1749 WeakArray& weak_array = WeakArray::Handle(zone);
1750 Library& lib = Library::Handle(zone);
1751 TypeArguments& type_args = TypeArguments::Handle(zone);
1752
1753 // All RawArray fields will be initialized to an empty array, therefore
1754 // initialize array class first.
1755 cls = Class::New<Array, RTN::Array>(isolate_group);
1756 ASSERT(object_store->array_class() == Class::null());
1757 object_store->set_array_class(cls);
1758
1759 // VM classes that are parameterized (Array, ImmutableArray,
1760 // GrowableObjectArray, Map, ConstMap,
1761 // Set, ConstSet) are also pre-finalized, so
1762 // CalculateFieldOffsets() is not called, so we need to set the offset
1763 // of their type_arguments_ field, which is explicitly
1764 // declared in their respective Raw* classes.
1766 RTN::Array::type_arguments_offset());
1768
1769 // Set up the growable object array class (Has to be done after the array
1770 // class is setup as one of its field is an array object).
1771 cls = Class::New<GrowableObjectArray, RTN::GrowableObjectArray>(
1772 isolate_group);
1773 object_store->set_growable_object_array_class(cls);
1776 RTN::GrowableObjectArray::type_arguments_offset());
1778
1779 // Initialize hash set for regexp_table_.
1780 const intptr_t kInitialCanonicalRegExpSize = 4;
1781 weak_array = HashTables::New<CanonicalRegExpSet>(
1782 kInitialCanonicalRegExpSize, Heap::kOld);
1783 object_store->set_regexp_table(weak_array);
1784
1785 // Initialize hash set for canonical types.
1786 const intptr_t kInitialCanonicalTypeSize = 16;
1787 array = HashTables::New<CanonicalTypeSet>(kInitialCanonicalTypeSize,
1788 Heap::kOld);
1789 object_store->set_canonical_types(array);
1790
1791 // Initialize hash set for canonical function types.
1792 const intptr_t kInitialCanonicalFunctionTypeSize = 16;
1793 array = HashTables::New<CanonicalFunctionTypeSet>(
1794 kInitialCanonicalFunctionTypeSize, Heap::kOld);
1795 object_store->set_canonical_function_types(array);
1796
1797 // Initialize hash set for canonical record types.
1798 const intptr_t kInitialCanonicalRecordTypeSize = 16;
1799 array = HashTables::New<CanonicalRecordTypeSet>(
1800 kInitialCanonicalRecordTypeSize, Heap::kOld);
1801 object_store->set_canonical_record_types(array);
1802
1803 // Initialize hash set for canonical type parameters.
1804 const intptr_t kInitialCanonicalTypeParameterSize = 4;
1805 array = HashTables::New<CanonicalTypeParameterSet>(
1806 kInitialCanonicalTypeParameterSize, Heap::kOld);
1807 object_store->set_canonical_type_parameters(array);
1808
1809 // Initialize hash set for canonical_type_arguments_.
1810 const intptr_t kInitialCanonicalTypeArgumentsSize = 4;
1811 array = HashTables::New<CanonicalTypeArgumentsSet>(
1812 kInitialCanonicalTypeArgumentsSize, Heap::kOld);
1813 object_store->set_canonical_type_arguments(array);
1814
1815 // Setup type class early in the process.
1816 const Class& type_cls =
1817 Class::Handle(zone, Class::New<Type, RTN::Type>(isolate_group));
1818 const Class& function_type_cls = Class::Handle(
1819 zone, Class::New<FunctionType, RTN::FunctionType>(isolate_group));
1820 const Class& record_type_cls = Class::Handle(
1821 zone, Class::New<RecordType, RTN::RecordType>(isolate_group));
1822 const Class& type_parameter_cls = Class::Handle(
1823 zone, Class::New<TypeParameter, RTN::TypeParameter>(isolate_group));
1824 const Class& library_prefix_cls = Class::Handle(
1825 zone, Class::New<LibraryPrefix, RTN::LibraryPrefix>(isolate_group));
1826
1827 // Pre-allocate the OneByteString class needed by the symbol table.
1828 cls = Class::NewStringClass(kOneByteStringCid, isolate_group);
1829 object_store->set_one_byte_string_class(cls);
1830
1831 // Pre-allocate the TwoByteString class needed by the symbol table.
1832 cls = Class::NewStringClass(kTwoByteStringCid, isolate_group);
1833 object_store->set_two_byte_string_class(cls);
1834
1835 // Setup the symbol table for the symbols created in the isolate.
1836 Symbols::SetupSymbolTable(isolate_group);
1837
1838 // Set up the libraries array before initializing the core library.
1839 const GrowableObjectArray& libraries =
1841 object_store->set_libraries(libraries);
1842
1843 // Pre-register the core library.
1844 Library::InitCoreLibrary(isolate_group);
1845
1846 // Basic infrastructure has been setup, initialize the class dictionary.
1847 const Library& core_lib = Library::Handle(zone, Library::CoreLibrary());
1848 ASSERT(!core_lib.IsNull());
1849
1850 const GrowableObjectArray& pending_classes =
1852 object_store->set_pending_classes(pending_classes);
1853
1854 // Now that the symbol table is initialized and that the core dictionary as
1855 // well as the core implementation dictionary have been setup, preallocate
1856 // remaining classes and register them by name in the dictionaries.
1857 String& name = String::Handle(zone);
1858 cls = object_store->array_class(); // Was allocated above.
1859 RegisterPrivateClass(cls, Symbols::_List(), core_lib);
1860 pending_classes.Add(cls);
1861 // We cannot use NewNonParameterizedType(), because Array is
1862 // parameterized. Warning: class _List has not been patched yet. Its
1863 // declared number of type parameters is still 0. It will become 1 after
1864 // patching. The array type allocated below represents the raw type _List
1865 // and not _List<E> as we could expect. Use with caution.
1866 type = Type::New(Class::Handle(zone, cls.ptr()),
1867 Object::null_type_arguments(), Nullability::kNonNullable);
1868 type.SetIsFinalized();
1869 type ^= type.Canonicalize(thread);
1870 object_store->set_array_type(type);
1871
1872 cls = object_store->growable_object_array_class(); // Was allocated above.
1873 RegisterPrivateClass(cls, Symbols::_GrowableList(), core_lib);
1874 pending_classes.Add(cls);
1875
1876 cls = Class::New<Array, RTN::Array>(kImmutableArrayCid, isolate_group);
1877 object_store->set_immutable_array_class(cls);
1879 RTN::Array::type_arguments_offset());
1881 ASSERT(object_store->immutable_array_class() !=
1882 object_store->array_class());
1883 cls.set_is_prefinalized();
1884 RegisterPrivateClass(cls, Symbols::_ImmutableList(), core_lib);
1885 pending_classes.Add(cls);
1886
1887 cls = object_store->one_byte_string_class(); // Was allocated above.
1888 RegisterPrivateClass(cls, Symbols::OneByteString(), core_lib);
1889 pending_classes.Add(cls);
1890
1891 cls = object_store->two_byte_string_class(); // Was allocated above.
1892 RegisterPrivateClass(cls, Symbols::TwoByteString(), core_lib);
1893 pending_classes.Add(cls);
1894
1895 // Pre-register the isolate library so the native class implementations can
1896 // be hooked up before compiling it.
1897 Library& isolate_lib = Library::Handle(
1898 zone, Library::LookupLibrary(thread, Symbols::DartIsolate()));
1899 if (isolate_lib.IsNull()) {
1900 isolate_lib = Library::NewLibraryHelper(Symbols::DartIsolate(), true);
1901 isolate_lib.SetLoadRequested();
1902 isolate_lib.Register(thread);
1903 }
1904 object_store->set_bootstrap_library(ObjectStore::kIsolate, isolate_lib);
1905 ASSERT(!isolate_lib.IsNull());
1906 ASSERT(isolate_lib.ptr() == Library::IsolateLibrary());
1907
1908 cls = Class::New<Capability, RTN::Capability>(isolate_group);
1909 RegisterPrivateClass(cls, Symbols::_Capability(), isolate_lib);
1910 pending_classes.Add(cls);
1911
1912 cls = Class::New<ReceivePort, RTN::ReceivePort>(isolate_group);
1913 RegisterPrivateClass(cls, Symbols::_RawReceivePort(), isolate_lib);
1914 pending_classes.Add(cls);
1915
1916 cls = Class::New<SendPort, RTN::SendPort>(isolate_group);
1917 RegisterPrivateClass(cls, Symbols::_SendPort(), isolate_lib);
1918 pending_classes.Add(cls);
1919
1920 cls = Class::New<TransferableTypedData, RTN::TransferableTypedData>(
1921 isolate_group);
1922 RegisterPrivateClass(cls, Symbols::_TransferableTypedDataImpl(),
1923 isolate_lib);
1924 pending_classes.Add(cls);
1925
1926 const Class& stacktrace_cls = Class::Handle(
1927 zone, Class::New<StackTrace, RTN::StackTrace>(isolate_group));
1928 RegisterPrivateClass(stacktrace_cls, Symbols::_StackTrace(), core_lib);
1929 pending_classes.Add(stacktrace_cls);
1930 // Super type set below, after Object is allocated.
1931
1932 cls = Class::New<RegExp, RTN::RegExp>(isolate_group);
1933 RegisterPrivateClass(cls, Symbols::_RegExp(), core_lib);
1934 pending_classes.Add(cls);
1935
1936 // Initialize the base interfaces used by the core VM classes.
1937
1938 // Allocate and initialize the pre-allocated classes in the core library.
1939 // The script and token index of these pre-allocated classes is set up when
1940 // the corelib script is compiled.
1941 cls = Class::New<Instance, RTN::Instance>(kInstanceCid, isolate_group);
1942 object_store->set_object_class(cls);
1943 cls.set_name(Symbols::Object());
1945 cls.set_is_prefinalized();
1946 cls.set_is_const();
1947 core_lib.AddClass(cls);
1948 pending_classes.Add(cls);
1950 ASSERT(type.IsCanonical());
1951 object_store->set_object_type(type);
1952 type = type.ToNullability(Nullability::kLegacy, Heap::kOld);
1953 ASSERT(type.IsCanonical());
1954 object_store->set_legacy_object_type(type);
1956 ASSERT(type.IsCanonical());
1957 object_store->set_non_nullable_object_type(type);
1958 type = type.ToNullability(Nullability::kNullable, Heap::kOld);
1959 ASSERT(type.IsCanonical());
1960 object_store->set_nullable_object_type(type);
1961
1962 cls = Class::New<Bool, RTN::Bool>(isolate_group);
1963 object_store->set_bool_class(cls);
1964 RegisterClass(cls, Symbols::Bool(), core_lib);
1965 pending_classes.Add(cls);
1966
1967 cls = Class::New<Instance, RTN::Instance>(kNullCid, isolate_group);
1968 object_store->set_null_class(cls);
1970 cls.set_is_prefinalized();
1971 RegisterClass(cls, Symbols::Null(), core_lib);
1972 pending_classes.Add(cls);
1973
1974 cls = Class::New<Instance, RTN::Instance>(kNeverCid, isolate_group);
1979 cls.set_name(Symbols::Never());
1980 object_store->set_never_class(cls);
1981
1982 ASSERT(!library_prefix_cls.IsNull());
1983 RegisterPrivateClass(library_prefix_cls, Symbols::_LibraryPrefix(),
1984 core_lib);
1985 pending_classes.Add(library_prefix_cls);
1986
1987 RegisterPrivateClass(type_cls, Symbols::_Type(), core_lib);
1988 pending_classes.Add(type_cls);
1989
1990 RegisterPrivateClass(function_type_cls, Symbols::_FunctionType(), core_lib);
1991 pending_classes.Add(function_type_cls);
1992
1993 RegisterPrivateClass(record_type_cls, Symbols::_RecordType(), core_lib);
1994 pending_classes.Add(record_type_cls);
1995
1996 RegisterPrivateClass(type_parameter_cls, Symbols::_TypeParameter(),
1997 core_lib);
1998 pending_classes.Add(type_parameter_cls);
1999
2000 cls = Class::New<Integer, RTN::Integer>(isolate_group);
2001 object_store->set_integer_implementation_class(cls);
2002 RegisterPrivateClass(cls, Symbols::_IntegerImplementation(), core_lib);
2003 pending_classes.Add(cls);
2004
2005 cls = Class::New<Smi, RTN::Smi>(isolate_group);
2006 object_store->set_smi_class(cls);
2007 RegisterPrivateClass(cls, Symbols::_Smi(), core_lib);
2008 pending_classes.Add(cls);
2009
2010 cls = Class::New<Mint, RTN::Mint>(isolate_group);
2011 object_store->set_mint_class(cls);
2012 RegisterPrivateClass(cls, Symbols::_Mint(), core_lib);
2013 pending_classes.Add(cls);
2014
2015 cls = Class::New<Double, RTN::Double>(isolate_group);
2016 object_store->set_double_class(cls);
2017 RegisterPrivateClass(cls, Symbols::_Double(), core_lib);
2018 pending_classes.Add(cls);
2019
2020 // Class that represents the Dart class _Closure and C++ class Closure.
2021 cls = Class::New<Closure, RTN::Closure>(isolate_group);
2022 object_store->set_closure_class(cls);
2023 RegisterPrivateClass(cls, Symbols::_Closure(), core_lib);
2024 pending_classes.Add(cls);
2025
2026 cls = Class::New<Record, RTN::Record>(isolate_group);
2027 RegisterPrivateClass(cls, Symbols::_Record(), core_lib);
2028 pending_classes.Add(cls);
2029
2030 cls = Class::New<WeakProperty, RTN::WeakProperty>(isolate_group);
2031 object_store->set_weak_property_class(cls);
2032 RegisterPrivateClass(cls, Symbols::_WeakProperty(), core_lib);
2033
2034 cls = Class::New<WeakReference, RTN::WeakReference>(isolate_group);
2037 RTN::WeakReference::type_arguments_offset());
2039 object_store->set_weak_reference_class(cls);
2040 RegisterPrivateClass(cls, Symbols::_WeakReference(), core_lib);
2041
2042 // Pre-register the mirrors library so we can place the vm class
2043 // MirrorReference there rather than the core library.
2044 lib = Library::LookupLibrary(thread, Symbols::DartMirrors());
2045 if (lib.IsNull()) {
2046 lib = Library::NewLibraryHelper(Symbols::DartMirrors(), true);
2047 lib.SetLoadRequested();
2048 lib.Register(thread);
2049 }
2050 object_store->set_bootstrap_library(ObjectStore::kMirrors, lib);
2051 ASSERT(!lib.IsNull());
2053
2054 cls = Class::New<MirrorReference, RTN::MirrorReference>(isolate_group);
2055 RegisterPrivateClass(cls, Symbols::_MirrorReference(), lib);
2056
2057 // Pre-register the collection library so we can place the vm class
2058 // Map there rather than the core library.
2059 lib = Library::LookupLibrary(thread, Symbols::DartCollection());
2060 if (lib.IsNull()) {
2061 lib = Library::NewLibraryHelper(Symbols::DartCollection(), true);
2062 lib.SetLoadRequested();
2063 lib.Register(thread);
2064 }
2065
2066 object_store->set_bootstrap_library(ObjectStore::kCollection, lib);
2067 ASSERT(!lib.IsNull());
2069 cls = Class::New<Map, RTN::Map>(isolate_group);
2070 object_store->set_map_impl_class(cls);
2072 RTN::Map::type_arguments_offset());
2074 RegisterPrivateClass(cls, Symbols::_Map(), lib);
2075 pending_classes.Add(cls);
2076
2077 cls = Class::New<Map, RTN::Map>(kConstMapCid, isolate_group);
2078 object_store->set_const_map_impl_class(cls);
2080 RTN::Map::type_arguments_offset());
2082 cls.set_is_prefinalized();
2083 RegisterPrivateClass(cls, Symbols::_ConstMap(), lib);
2084 pending_classes.Add(cls);
2085
2086 cls = Class::New<Set, RTN::Set>(isolate_group);
2087 object_store->set_set_impl_class(cls);
2089 RTN::Set::type_arguments_offset());
2091 RegisterPrivateClass(cls, Symbols::_Set(), lib);
2092 pending_classes.Add(cls);
2093
2094 cls = Class::New<Set, RTN::Set>(kConstSetCid, isolate_group);
2095 object_store->set_const_set_impl_class(cls);
2097 RTN::Set::type_arguments_offset());
2099 cls.set_is_prefinalized();
2100 RegisterPrivateClass(cls, Symbols::_ConstSet(), lib);
2101 pending_classes.Add(cls);
2102
2103 // Pre-register the async library so we can place the vm class
2104 // FutureOr there rather than the core library.
2105 lib = Library::LookupLibrary(thread, Symbols::DartAsync());
2106 if (lib.IsNull()) {
2107 lib = Library::NewLibraryHelper(Symbols::DartAsync(), true);
2108 lib.SetLoadRequested();
2109 lib.Register(thread);
2110 }
2111 object_store->set_bootstrap_library(ObjectStore::kAsync, lib);
2112 ASSERT(!lib.IsNull());
2113 ASSERT(lib.ptr() == Library::AsyncLibrary());
2114 cls = Class::New<FutureOr, RTN::FutureOr>(isolate_group);
2116 RTN::FutureOr::type_arguments_offset());
2118 RegisterClass(cls, Symbols::FutureOr(), lib);
2119 pending_classes.Add(cls);
2120 object_store->set_future_or_class(cls);
2121
2122 cls = Class::New<SuspendState, RTN::SuspendState>(isolate_group);
2123 RegisterPrivateClass(cls, Symbols::_SuspendState(), lib);
2124 pending_classes.Add(cls);
2125
2126 // Pre-register the developer library so we can place the vm class
2127 // UserTag there rather than the core library.
2128 lib = Library::LookupLibrary(thread, Symbols::DartDeveloper());
2129 if (lib.IsNull()) {
2130 lib = Library::NewLibraryHelper(Symbols::DartDeveloper(), true);
2131 lib.SetLoadRequested();
2132 lib.Register(thread);
2133 }
2134 object_store->set_bootstrap_library(ObjectStore::kDeveloper, lib);
2135 ASSERT(!lib.IsNull());
2137 cls = Class::New<UserTag, RTN::UserTag>(isolate_group);
2138 RegisterPrivateClass(cls, Symbols::_UserTag(), lib);
2139 pending_classes.Add(cls);
2140
2141 // Setup some default native field classes which can be extended for
2142 // specifying native fields in dart classes.
2143 Library::InitNativeWrappersLibrary(isolate_group, is_kernel);
2144 ASSERT(object_store->native_wrappers_library() != Library::null());
2145
2146 // Pre-register the typed_data library so the native class implementations
2147 // can be hooked up before compiling it.
2148 lib = Library::LookupLibrary(thread, Symbols::DartTypedData());
2149 if (lib.IsNull()) {
2150 lib = Library::NewLibraryHelper(Symbols::DartTypedData(), true);
2151 lib.SetLoadRequested();
2152 lib.Register(thread);
2153 }
2154 object_store->set_bootstrap_library(ObjectStore::kTypedData, lib);
2155 ASSERT(!lib.IsNull());
2157#define REGISTER_TYPED_DATA_CLASS(clazz) \
2158 cls = Class::NewTypedDataClass(kTypedData##clazz##ArrayCid, isolate_group); \
2159 RegisterPrivateClass(cls, Symbols::_##clazz##List(), lib);
2160
2162#undef REGISTER_TYPED_DATA_CLASS
2163#define REGISTER_TYPED_DATA_VIEW_CLASS(clazz) \
2164 cls = \
2165 Class::NewTypedDataViewClass(kTypedData##clazz##ViewCid, isolate_group); \
2166 RegisterPrivateClass(cls, Symbols::_##clazz##View(), lib); \
2167 pending_classes.Add(cls); \
2168 cls = Class::NewUnmodifiableTypedDataViewClass( \
2169 kUnmodifiableTypedData##clazz##ViewCid, isolate_group); \
2170 RegisterPrivateClass(cls, Symbols::_Unmodifiable##clazz##View(), lib); \
2171 pending_classes.Add(cls);
2172
2174
2175 cls = Class::NewTypedDataViewClass(kByteDataViewCid, isolate_group);
2176 RegisterPrivateClass(cls, Symbols::_ByteDataView(), lib);
2177 pending_classes.Add(cls);
2179 isolate_group);
2180 RegisterPrivateClass(cls, Symbols::_UnmodifiableByteDataView(), lib);
2181 pending_classes.Add(cls);
2182
2183#undef REGISTER_TYPED_DATA_VIEW_CLASS
2184#define REGISTER_EXT_TYPED_DATA_CLASS(clazz) \
2185 cls = Class::NewExternalTypedDataClass(kExternalTypedData##clazz##Cid, \
2186 isolate_group); \
2187 RegisterPrivateClass(cls, Symbols::_External##clazz(), lib);
2188
2189 cls = Class::New<Instance, RTN::Instance>(kByteBufferCid, isolate_group,
2190 /*register_class=*/false);
2191 cls.set_instance_size(0, 0);
2192 cls.set_next_field_offset(-kWordSize, -compiler::target::kWordSize);
2193 isolate_group->class_table()->Register(cls);
2194 RegisterPrivateClass(cls, Symbols::_ByteBuffer(), lib);
2195 pending_classes.Add(cls);
2196
2198#undef REGISTER_EXT_TYPED_DATA_CLASS
2199 // Register Float32x4, Int32x4, and Float64x2 in the object store.
2200 cls = Class::New<Float32x4, RTN::Float32x4>(isolate_group);
2201 RegisterPrivateClass(cls, Symbols::_Float32x4(), lib);
2202 pending_classes.Add(cls);
2203 object_store->set_float32x4_class(cls);
2204
2205 cls = Class::New<Instance, RTN::Instance>(kIllegalCid, isolate_group,
2206 /*register_class=*/true,
2207 /*is_abstract=*/true);
2208 RegisterClass(cls, Symbols::Float32x4(), lib);
2210 cls.set_is_prefinalized();
2212 object_store->set_float32x4_type(type);
2213
2214 cls = Class::New<Int32x4, RTN::Int32x4>(isolate_group);
2215 RegisterPrivateClass(cls, Symbols::_Int32x4(), lib);
2216 pending_classes.Add(cls);
2217 object_store->set_int32x4_class(cls);
2218
2219 cls = Class::New<Instance, RTN::Instance>(kIllegalCid, isolate_group,
2220 /*register_class=*/true,
2221 /*is_abstract=*/true);
2222 RegisterClass(cls, Symbols::Int32x4(), lib);
2224 cls.set_is_prefinalized();
2226 object_store->set_int32x4_type(type);
2227
2228 cls = Class::New<Float64x2, RTN::Float64x2>(isolate_group);
2229 RegisterPrivateClass(cls, Symbols::_Float64x2(), lib);
2230 pending_classes.Add(cls);
2231 object_store->set_float64x2_class(cls);
2232
2233 cls = Class::New<Instance, RTN::Instance>(kIllegalCid, isolate_group,
2234 /*register_class=*/true,
2235 /*is_abstract=*/true);
2236 RegisterClass(cls, Symbols::Float64x2(), lib);
2238 cls.set_is_prefinalized();
2240 object_store->set_float64x2_type(type);
2241
2242 // Set the super type of class StackTrace to Object type so that the
2243 // 'toString' method is implemented.
2244 type = object_store->object_type();
2245 stacktrace_cls.set_super_type(type);
2246
2247 // Abstract class that represents the Dart class Type.
2248 // Note that this class is implemented by Dart class _AbstractType.
2249 cls = Class::New<Instance, RTN::Instance>(kIllegalCid, isolate_group,
2250 /*register_class=*/true,
2251 /*is_abstract=*/true);
2253 cls.set_is_prefinalized();
2254 RegisterClass(cls, Symbols::Type(), core_lib);
2255 pending_classes.Add(cls);
2257 object_store->set_type_type(type);
2258
2259 // Abstract class that represents the Dart class Function.
2260 cls = Class::New<Instance, RTN::Instance>(kIllegalCid, isolate_group,
2261 /*register_class=*/true,
2262 /*is_abstract=*/true);
2264 cls.set_is_prefinalized();
2265 RegisterClass(cls, Symbols::Function(), core_lib);
2266 pending_classes.Add(cls);
2268 object_store->set_function_type(type);
2269
2270 // Abstract class that represents the Dart class Record.
2271 cls = Class::New<Instance, RTN::Instance>(kIllegalCid, isolate_group,
2272 /*register_class=*/true,
2273 /*is_abstract=*/true);
2274 RegisterClass(cls, Symbols::Record(), core_lib);
2275 pending_classes.Add(cls);
2276 object_store->set_record_class(cls);
2277
2278 cls = Class::New<Number, RTN::Number>(isolate_group);
2279 RegisterClass(cls, Symbols::Number(), core_lib);
2280 pending_classes.Add(cls);
2282 object_store->set_number_type(type);
2283 type = type.ToNullability(Nullability::kNullable, Heap::kOld);
2284 object_store->set_nullable_number_type(type);
2285
2286 cls = Class::New<Instance, RTN::Instance>(kIllegalCid, isolate_group,
2287 /*register_class=*/true,
2288 /*is_abstract=*/true);
2289 RegisterClass(cls, Symbols::Int(), core_lib);
2291 cls.set_is_prefinalized();
2292 pending_classes.Add(cls);
2294 object_store->set_int_type(type);
2295 type = type.ToNullability(Nullability::kLegacy, Heap::kOld);
2296 object_store->set_legacy_int_type(type);
2298 object_store->set_non_nullable_int_type(type);
2299 type = type.ToNullability(Nullability::kNullable, Heap::kOld);
2300 object_store->set_nullable_int_type(type);
2301
2302 cls = Class::New<Instance, RTN::Instance>(kIllegalCid, isolate_group,
2303 /*register_class=*/true,
2304 /*is_abstract=*/true);
2305 RegisterClass(cls, Symbols::Double(), core_lib);
2307 cls.set_is_prefinalized();
2308 pending_classes.Add(cls);
2310 object_store->set_double_type(type);
2311 type = type.ToNullability(Nullability::kNullable, Heap::kOld);
2312 object_store->set_nullable_double_type(type);
2313
2314 name = Symbols::_String().ptr();
2315 cls = Class::New<Instance, RTN::Instance>(kIllegalCid, isolate_group,
2316 /*register_class=*/true,
2317 /*is_abstract=*/true);
2318 RegisterClass(cls, name, core_lib);
2320 cls.set_is_prefinalized();
2321 pending_classes.Add(cls);
2323 object_store->set_string_type(type);
2324 type = type.ToNullability(Nullability::kLegacy, Heap::kOld);
2325 object_store->set_legacy_string_type(type);
2326
2327 cls = object_store->bool_class();
2329 object_store->set_bool_type(type);
2330
2331 cls = object_store->smi_class();
2333 object_store->set_smi_type(type);
2334 type = type.ToNullability(Nullability::kLegacy, Heap::kOld);
2335
2336 cls = object_store->mint_class();
2338 object_store->set_mint_type(type);
2339
2340 // The classes 'void' and 'dynamic' are phony classes to make type checking
2341 // more regular; they live in the VM isolate. The class 'void' is not
2342 // registered in the class dictionary because its name is a reserved word.
2343 // The class 'dynamic' is registered in the class dictionary because its
2344 // name is a built-in identifier (this is wrong). The corresponding types
2345 // are stored in the object store.
2346 cls = object_store->null_class();
2347 type =
2348 Type::New(cls, Object::null_type_arguments(), Nullability::kNullable);
2349 type.SetIsFinalized();
2350 type ^= type.Canonicalize(thread);
2351 object_store->set_null_type(type);
2352 cls.set_declaration_type(type);
2353 ASSERT(type.IsNullable());
2354
2355 // Consider removing when/if Null becomes an ordinary class.
2356 type = object_store->object_type();
2357 cls.set_super_type(type);
2358
2359 cls = object_store->never_class();
2360 type = Type::New(cls, Object::null_type_arguments(),
2362 type.SetIsFinalized();
2363 type ^= type.Canonicalize(thread);
2364 object_store->set_never_type(type);
2365 type_args = TypeArguments::New(1);
2366 type_args.SetTypeAt(0, type);
2367 type_args = type_args.Canonicalize(thread);
2368 object_store->set_type_argument_never(type_args);
2369
2370 // Create and cache commonly used type arguments <int>, <double>,
2371 // <String>, <String, dynamic> and <String, String>.
2372 type_args = TypeArguments::New(1);
2373 type = object_store->int_type();
2374 type_args.SetTypeAt(0, type);
2375 type_args = type_args.Canonicalize(thread);
2376 object_store->set_type_argument_int(type_args);
2377 type_args = TypeArguments::New(1);
2378 type = object_store->legacy_int_type();
2379 type_args.SetTypeAt(0, type);
2380 type_args = type_args.Canonicalize(thread);
2381 object_store->set_type_argument_legacy_int(type_args);
2382
2383 type_args = TypeArguments::New(1);
2384 type = object_store->double_type();
2385 type_args.SetTypeAt(0, type);
2386 type_args = type_args.Canonicalize(thread);
2387 object_store->set_type_argument_double(type_args);
2388
2389 type_args = TypeArguments::New(1);
2390 type = object_store->string_type();
2391 type_args.SetTypeAt(0, type);
2392 type_args = type_args.Canonicalize(thread);
2393 object_store->set_type_argument_string(type_args);
2394 type_args = TypeArguments::New(1);
2395 type = object_store->legacy_string_type();
2396 type_args.SetTypeAt(0, type);
2397 type_args = type_args.Canonicalize(thread);
2398 object_store->set_type_argument_legacy_string(type_args);
2399
2400 type_args = TypeArguments::New(2);
2401 type = object_store->string_type();
2402 type_args.SetTypeAt(0, type);
2403 type_args.SetTypeAt(1, Object::dynamic_type());
2404 type_args = type_args.Canonicalize(thread);
2405 object_store->set_type_argument_string_dynamic(type_args);
2406
2407 type_args = TypeArguments::New(2);
2408 type = object_store->string_type();
2409 type_args.SetTypeAt(0, type);
2410 type_args.SetTypeAt(1, type);
2411 type_args = type_args.Canonicalize(thread);
2412 object_store->set_type_argument_string_string(type_args);
2413
2414 lib = Library::LookupLibrary(thread, Symbols::DartFfi());
2415 if (lib.IsNull()) {
2416 lib = Library::NewLibraryHelper(Symbols::DartFfi(), true);
2417 lib.SetLoadRequested();
2418 lib.Register(thread);
2419 }
2420 object_store->set_bootstrap_library(ObjectStore::kFfi, lib);
2421
2422 cls = Class::New<Instance, RTN::Instance>(kFfiNativeTypeCid, isolate_group);
2424 cls.set_is_prefinalized();
2425 pending_classes.Add(cls);
2426 object_store->set_ffi_native_type_class(cls);
2427 RegisterClass(cls, Symbols::FfiNativeType(), lib);
2428
2429#define REGISTER_FFI_TYPE_MARKER(clazz) \
2430 cls = Class::New<Instance, RTN::Instance>(kFfi##clazz##Cid, isolate_group); \
2431 cls.set_num_type_arguments_unsafe(0); \
2432 cls.set_is_prefinalized(); \
2433 pending_classes.Add(cls); \
2434 RegisterClass(cls, Symbols::Ffi##clazz(), lib);
2436#undef REGISTER_FFI_TYPE_MARKER
2437
2438 cls = Class::New<Instance, RTN::Instance>(kFfiNativeFunctionCid,
2439 isolate_group);
2441 RTN::Instance::NextFieldOffset());
2443 cls.set_is_prefinalized();
2444 pending_classes.Add(cls);
2445 RegisterClass(cls, Symbols::FfiNativeFunction(), lib);
2446
2447 cls = Class::NewPointerClass(kPointerCid, isolate_group);
2448 object_store->set_ffi_pointer_class(cls);
2449 pending_classes.Add(cls);
2450 RegisterClass(cls, Symbols::FfiPointer(), lib);
2451
2452 cls = Class::New<DynamicLibrary, RTN::DynamicLibrary>(kDynamicLibraryCid,
2453 isolate_group);
2455 compiler::target::RoundedAllocationSize(
2456 RTN::DynamicLibrary::InstanceSize()));
2457 cls.set_is_prefinalized();
2458 pending_classes.Add(cls);
2459 RegisterClass(cls, Symbols::FfiDynamicLibrary(), lib);
2460
2461 cls = Class::New<NativeFinalizer, RTN::NativeFinalizer>(isolate_group);
2462 object_store->set_native_finalizer_class(cls);
2463 RegisterPrivateClass(cls, Symbols::_NativeFinalizer(), lib);
2464
2465 cls = Class::New<Finalizer, RTN::Finalizer>(isolate_group);
2468 RTN::Finalizer::type_arguments_offset());
2470 object_store->set_finalizer_class(cls);
2471 pending_classes.Add(cls);
2472 RegisterPrivateClass(cls, Symbols::_FinalizerImpl(), core_lib);
2473
2474 // Pre-register the internal library so we can place the vm class
2475 // FinalizerEntry there rather than the core library.
2476 lib = Library::LookupLibrary(thread, Symbols::DartInternal());
2477 if (lib.IsNull()) {
2478 lib = Library::NewLibraryHelper(Symbols::DartInternal(), true);
2479 lib.SetLoadRequested();
2480 lib.Register(thread);
2481 }
2482 object_store->set_bootstrap_library(ObjectStore::kInternal, lib);
2483 ASSERT(!lib.IsNull());
2485
2486 cls = Class::New<FinalizerEntry, RTN::FinalizerEntry>(isolate_group);
2487 object_store->set_finalizer_entry_class(cls);
2488 pending_classes.Add(cls);
2489 RegisterClass(cls, Symbols::FinalizerEntry(), lib);
2490
2491 // Finish the initialization by compiling the bootstrap scripts containing
2492 // the base interfaces and the implementation of the internal classes.
2493 const Error& error = Error::Handle(
2494 zone, Bootstrap::DoBootstrapping(kernel_buffer, kernel_buffer_size));
2495 if (!error.IsNull()) {
2496 return error.ptr();
2497 }
2498
2499 isolate_group->class_table()->CopySizesFromClassObjects();
2500
2502
2503 // Set up the intrinsic state of all functions (core, math and typed data).
2505
2506 // Adds static const fields (class ids) to the class 'ClassID');
2507 lib = Library::LookupLibrary(thread, Symbols::DartInternal());
2508 ASSERT(!lib.IsNull());
2509 cls = lib.LookupClassAllowPrivate(Symbols::ClassID());
2510 ASSERT(!cls.IsNull());
2511 const bool injected = cls.InjectCIDFields();
2512 ASSERT(injected);
2513
2514 // Set up recognized state of all functions (core, math and typed data).
2516#endif // !defined(DART_PRECOMPILED_RUNTIME)
2517 } else {
2518 // Object::Init version when we are running in a version of dart that has a
2519 // full snapshot linked in and an isolate is initialized using the full
2520 // snapshot.
2521 ObjectStore* object_store = isolate_group->object_store();
2522 SafepointWriteRwLocker ml(thread, isolate_group->program_lock());
2523
2524 Class& cls = Class::Handle(zone);
2525
2526 // Set up empty classes in the object store, these will get initialized
2527 // correctly when we read from the snapshot. This is done to allow
2528 // bootstrapping of reading classes from the snapshot. Some classes are not
2529 // stored in the object store. Yet we still need to create their Class
2530 // object so that they get put into the class_table (as a side effect of
2531 // Class::New()).
2532 cls = Class::New<Instance, RTN::Instance>(kInstanceCid, isolate_group);
2533 object_store->set_object_class(cls);
2534
2535 cls = Class::New<LibraryPrefix, RTN::LibraryPrefix>(isolate_group);
2536 cls = Class::New<Type, RTN::Type>(isolate_group);
2537 cls = Class::New<FunctionType, RTN::FunctionType>(isolate_group);
2538 cls = Class::New<RecordType, RTN::RecordType>(isolate_group);
2539 cls = Class::New<TypeParameter, RTN::TypeParameter>(isolate_group);
2540
2541 cls = Class::New<Array, RTN::Array>(isolate_group);
2542 object_store->set_array_class(cls);
2543
2544 cls = Class::New<Array, RTN::Array>(kImmutableArrayCid, isolate_group);
2545 object_store->set_immutable_array_class(cls);
2546
2547 cls = Class::New<GrowableObjectArray, RTN::GrowableObjectArray>(
2548 isolate_group);
2549 object_store->set_growable_object_array_class(cls);
2550
2551 cls = Class::New<Map, RTN::Map>(isolate_group);
2552 object_store->set_map_impl_class(cls);
2553
2554 cls = Class::New<Map, RTN::Map>(kConstMapCid, isolate_group);
2555 object_store->set_const_map_impl_class(cls);
2556
2557 cls = Class::New<Set, RTN::Set>(isolate_group);
2558 object_store->set_set_impl_class(cls);
2559
2560 cls = Class::New<Set, RTN::Set>(kConstSetCid, isolate_group);
2561 object_store->set_const_set_impl_class(cls);
2562
2563 cls = Class::New<Float32x4, RTN::Float32x4>(isolate_group);
2564 object_store->set_float32x4_class(cls);
2565
2566 cls = Class::New<Int32x4, RTN::Int32x4>(isolate_group);
2567 object_store->set_int32x4_class(cls);
2568
2569 cls = Class::New<Float64x2, RTN::Float64x2>(isolate_group);
2570 object_store->set_float64x2_class(cls);
2571
2572#define REGISTER_TYPED_DATA_CLASS(clazz) \
2573 cls = Class::NewTypedDataClass(kTypedData##clazz##Cid, isolate_group);
2575#undef REGISTER_TYPED_DATA_CLASS
2576#define REGISTER_TYPED_DATA_VIEW_CLASS(clazz) \
2577 cls = \
2578 Class::NewTypedDataViewClass(kTypedData##clazz##ViewCid, isolate_group); \
2579 cls = Class::NewUnmodifiableTypedDataViewClass( \
2580 kUnmodifiableTypedData##clazz##ViewCid, isolate_group);
2582#undef REGISTER_TYPED_DATA_VIEW_CLASS
2583 cls = Class::NewTypedDataViewClass(kByteDataViewCid, isolate_group);
2585 isolate_group);
2586#define REGISTER_EXT_TYPED_DATA_CLASS(clazz) \
2587 cls = Class::NewExternalTypedDataClass(kExternalTypedData##clazz##Cid, \
2588 isolate_group);
2590#undef REGISTER_EXT_TYPED_DATA_CLASS
2591
2592 cls = Class::New<Instance, RTN::Instance>(kFfiNativeTypeCid, isolate_group);
2593 object_store->set_ffi_native_type_class(cls);
2594
2595#define REGISTER_FFI_CLASS(clazz) \
2596 cls = Class::New<Instance, RTN::Instance>(kFfi##clazz##Cid, isolate_group);
2598#undef REGISTER_FFI_CLASS
2599
2600 cls = Class::New<Instance, RTN::Instance>(kFfiNativeFunctionCid,
2601 isolate_group);
2602
2603 cls = Class::NewPointerClass(kPointerCid, isolate_group);
2604 object_store->set_ffi_pointer_class(cls);
2605
2606 cls = Class::New<DynamicLibrary, RTN::DynamicLibrary>(kDynamicLibraryCid,
2607 isolate_group);
2608
2609 cls = Class::New<Instance, RTN::Instance>(kByteBufferCid, isolate_group,
2610 /*register_isolate_group=*/false);
2612 isolate_group->class_table()->Register(cls);
2613
2614 cls = Class::New<Integer, RTN::Integer>(isolate_group);
2615 object_store->set_integer_implementation_class(cls);
2616
2617 cls = Class::New<Smi, RTN::Smi>(isolate_group);
2618 object_store->set_smi_class(cls);
2619
2620 cls = Class::New<Mint, RTN::Mint>(isolate_group);
2621 object_store->set_mint_class(cls);
2622
2623 cls = Class::New<Double, RTN::Double>(isolate_group);
2624 object_store->set_double_class(cls);
2625
2626 cls = Class::New<Closure, RTN::Closure>(isolate_group);
2627 object_store->set_closure_class(cls);
2628
2629 cls = Class::New<Record, RTN::Record>(isolate_group);
2630
2631 cls = Class::NewStringClass(kOneByteStringCid, isolate_group);
2632 object_store->set_one_byte_string_class(cls);
2633
2634 cls = Class::NewStringClass(kTwoByteStringCid, isolate_group);
2635 object_store->set_two_byte_string_class(cls);
2636
2637 cls = Class::New<Bool, RTN::Bool>(isolate_group);
2638 object_store->set_bool_class(cls);
2639
2640 cls = Class::New<Instance, RTN::Instance>(kNullCid, isolate_group);
2641 object_store->set_null_class(cls);
2642
2643 cls = Class::New<Instance, RTN::Instance>(kNeverCid, isolate_group);
2644 object_store->set_never_class(cls);
2645
2646 cls = Class::New<Capability, RTN::Capability>(isolate_group);
2647 cls = Class::New<ReceivePort, RTN::ReceivePort>(isolate_group);
2648 cls = Class::New<SendPort, RTN::SendPort>(isolate_group);
2649 cls = Class::New<StackTrace, RTN::StackTrace>(isolate_group);
2650 cls = Class::New<SuspendState, RTN::SuspendState>(isolate_group);
2651 cls = Class::New<RegExp, RTN::RegExp>(isolate_group);
2652 cls = Class::New<Number, RTN::Number>(isolate_group);
2653
2654 cls = Class::New<WeakProperty, RTN::WeakProperty>(isolate_group);
2655 object_store->set_weak_property_class(cls);
2656 cls = Class::New<WeakReference, RTN::WeakReference>(isolate_group);
2657 object_store->set_weak_reference_class(cls);
2658 cls = Class::New<Finalizer, RTN::Finalizer>(isolate_group);
2659 object_store->set_finalizer_class(cls);
2660 cls = Class::New<NativeFinalizer, RTN::NativeFinalizer>(isolate_group);
2661 object_store->set_native_finalizer_class(cls);
2662 cls = Class::New<FinalizerEntry, RTN::FinalizerEntry>(isolate_group);
2663 object_store->set_finalizer_entry_class(cls);
2664
2665 cls = Class::New<MirrorReference, RTN::MirrorReference>(isolate_group);
2666 cls = Class::New<UserTag, RTN::UserTag>(isolate_group);
2667 cls = Class::New<FutureOr, RTN::FutureOr>(isolate_group);
2668 object_store->set_future_or_class(cls);
2669 cls = Class::New<TransferableTypedData, RTN::TransferableTypedData>(
2670 isolate_group);
2671 }
2672 return Error::null();
2673}
2674
2675#if defined(DEBUG)
2676bool Object::InVMIsolateHeap() const {
2677 return ptr()->untag()->InVMIsolateHeap();
2678}
2679#endif // DEBUG
2680
2681void Object::Print() const {
2682 THR_Print("%s\n", ToCString());
2683}
2684
2685StringPtr Object::DictionaryName() const {
2686 return String::null();
2687}
2688
2690 if (class_id < kNumPredefinedCids) {
2691 return ShouldHaveImmutabilityBitSetCid(class_id);
2692 } else {
2694 IsolateGroup::Current()->class_table()->At(class_id));
2695 }
2696}
2697
2698void Object::InitializeObject(uword address,
2699 intptr_t class_id,
2700 intptr_t size,
2701 bool compressed,
2702 uword ptr_field_start_offset,
2703 uword ptr_field_end_offset) {
2704 // Note: we skip the header word here to avoid a racy read in the concurrent
2705 // marker from observing the null object when it reads into a heap page
2706 // allocated after marking started.
2707 uword cur = address + sizeof(UntaggedObject);
2708 uword ptr_field_start = address + ptr_field_start_offset;
2709 uword ptr_field_end = address + ptr_field_end_offset;
2710 uword end = address + size;
2711 // The start of pointer fields should always be past the object header, even
2712 // if there are no pointer fields (ptr_field_end < ptr_field_start).
2713 ASSERT(cur <= ptr_field_start);
2714 // The start of pointer fields can be at the end for empty payload objects.
2715 ASSERT(ptr_field_start <= end);
2716 // The end of pointer fields should always be before the end, as the end of
2717 // pointer fields is inclusive (the address of the last field to initialize).
2718 ASSERT(ptr_field_end < end);
2719 bool needs_init = true;
2720 if (IsTypedDataBaseClassId(class_id) || class_id == kArrayCid) {
2721 // If the size is greater than both kNewAllocatableSize and
2722 // kAllocatablePageSize, the object must have been allocated to a new
2723 // large page, which must already have been zero initialized by the OS.
2724 // Note that zero is a GC-safe value.
2725 //
2726 // For arrays, the caller will then initialize the fields to null with
2727 // safepoint checks to avoid blocking for the full duration of
2728 // initializing this array.
2729 needs_init =
2731 }
2732 if (needs_init) {
2733 // Initialize the memory prior to any pointer fields with 0. (This loop
2734 // and the next will be a no-op if the object has no pointer fields.)
2735 uword initial_value = 0;
2736 while (cur < ptr_field_start) {
2737 *reinterpret_cast<uword*>(cur) = initial_value;
2738 cur += kWordSize;
2739 }
2740 // Initialize any pointer fields with Object::null().
2741 initial_value = static_cast<uword>(null_);
2742#if defined(DART_COMPRESSED_POINTERS)
2743 if (compressed) {
2744 initial_value &= 0xFFFFFFFF;
2745 initial_value |= initial_value << 32;
2746 }
2747 const bool has_pointer_fields = ptr_field_start <= ptr_field_end;
2748 // If there are compressed pointer fields and the first compressed pointer
2749 // field is not at a word start, then initialize it to Object::null().
2750 if (compressed && has_pointer_fields &&
2751 (ptr_field_start % kWordSize != 0)) {
2752 *reinterpret_cast<compressed_uword*>(ptr_field_start) = initial_value;
2753 }
2754#endif
2755 while (cur <= ptr_field_end) {
2756 *reinterpret_cast<uword*>(cur) = initial_value;
2757 cur += kWordSize;
2758 }
2759 // Initialize the memory after any pointer fields with 0, unless this is
2760 // an instructions object in which case we use the break instruction.
2761 initial_value = class_id == kInstructionsCid ? kBreakInstructionFiller : 0;
2762#if defined(DART_COMPRESSED_POINTERS)
2763 // If there are compressed pointer fields and the last compressed pointer
2764 // field is the start of a word, then initialize the other part of the word
2765 // to the new initial value.
2766 //
2767 // (We're guaranteed there's always space in the object after the last
2768 // pointer field in this case since objects are allocated in multiples of
2769 // the word size.)
2770 if (compressed && has_pointer_fields && (ptr_field_end % kWordSize == 0)) {
2771 *reinterpret_cast<compressed_uword*>(ptr_field_end +
2772 kCompressedWordSize) = initial_value;
2773 }
2774#endif
2775 while (cur < end) {
2776 *reinterpret_cast<uword*>(cur) = initial_value;
2777 cur += kWordSize;
2778 }
2779 } else {
2780 // Check that MemorySanitizer understands this is initialized.
2781 MSAN_CHECK_INITIALIZED(reinterpret_cast<void*>(address), size);
2782#if defined(DEBUG)
2783 const uword initial_value = 0;
2784 while (cur < end) {
2785 ASSERT_EQUAL(*reinterpret_cast<uword*>(cur), initial_value);
2786 cur += kWordSize;
2787 }
2788#endif
2789 }
2790 uword tags = 0;
2791 ASSERT(class_id != kIllegalCid);
2792 tags = UntaggedObject::ClassIdTag::update(class_id, tags);
2793 tags = UntaggedObject::SizeTag::update(size, tags);
2794 const bool is_old =
2796 tags = UntaggedObject::AlwaysSetBit::update(true, tags);
2797 tags = UntaggedObject::NotMarkedBit::update(true, tags);
2799 tags = UntaggedObject::NewBit::update(!is_old, tags);
2801 Object::ShouldHaveImmutabilityBitSet(class_id), tags);
2802#if defined(HASH_IN_OBJECT_HEADER)
2803 tags = UntaggedObject::HashTag::update(0, tags);
2804#endif
2805 reinterpret_cast<UntaggedObject*>(address)->tags_ = tags;
2806}
2807
2809#if defined(DEBUG)
2810 if (ptr_ != Object::null()) {
2811 intptr_t cid = ptr_->GetClassIdMayBeSmi();
2812 if (cid >= kNumPredefinedCids) {
2813 cid = kInstanceCid;
2814 }
2815 ASSERT(vtable() == builtin_vtables_[cid]);
2816 }
2817#endif
2818}
2819
2821 intptr_t size,
2822 Heap::Space space,
2823 bool compressed,
2824 uword ptr_field_start_offset,
2825 uword ptr_field_end_offset) {
2827 Thread* thread = Thread::Current();
2829 ASSERT(thread->no_safepoint_scope_depth() == 0);
2830 ASSERT(thread->no_callback_scope_depth() == 0);
2831 Heap* heap = thread->heap();
2832
2833 uword address = heap->Allocate(thread, size, space);
2834 if (UNLIKELY(address == 0)) {
2835 // SuspendLongJumpScope during Dart entry ensures that if a longjmp base is
2836 // available, it is the innermost error handler, so check for a longjmp base
2837 // before checking for an exit frame.
2838 if (thread->long_jump_base() != nullptr) {
2839 Report::LongJump(Object::out_of_memory_error());
2840 UNREACHABLE();
2841 } else if (thread->top_exit_frame_info() != 0) {
2842 // Use the preallocated out of memory exception to avoid calling
2843 // into dart code or allocating any code.
2845 UNREACHABLE();
2846 } else {
2847 // Nowhere to propagate an exception to.
2848 OUT_OF_MEMORY();
2849 }
2850 }
2851
2852 ObjectPtr raw_obj;
2853 NoSafepointScope no_safepoint(thread);
2854 InitializeObject(address, cls_id, size, compressed, ptr_field_start_offset,
2855 ptr_field_end_offset);
2856 raw_obj = static_cast<ObjectPtr>(address + kHeapObjectTag);
2857 ASSERT(cls_id == UntaggedObject::ClassIdTag::decode(raw_obj->untag()->tags_));
2858 if (raw_obj->IsOldObject() && UNLIKELY(thread->is_marking())) {
2859 // Black allocation. Prevents a data race between the mutator and
2860 // concurrent marker on ARM and ARM64 (the marker may observe a
2861 // publishing store of this object before the stores that initialize its
2862 // slots), and helps the collection to finish sooner.
2863 // release: Setting the mark bit must not be ordered after a publishing
2864 // store of this object. Compare Scavenger::ScavengePointer.
2865 raw_obj->untag()->SetMarkBitRelease();
2866 heap->old_space()->AllocateBlack(size);
2867 }
2868
2869#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
2870 HeapProfileSampler& heap_sampler = thread->heap_sampler();
2871 if (heap_sampler.HasOutstandingSample()) {
2873 void* data = heap_sampler.InvokeCallbackForLastSample(cls_id);
2874 heap->SetHeapSamplingData(raw_obj, data);
2876 }
2877#endif // !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
2878
2879#if !defined(PRODUCT)
2880 auto class_table = thread->isolate_group()->class_table();
2881 if (class_table->ShouldTraceAllocationFor(cls_id)) {
2882 uint32_t hash =
2883 HeapSnapshotWriter::GetHeapSnapshotIdentityHash(thread, raw_obj);
2884 Profiler::SampleAllocation(thread, cls_id, hash);
2885 }
2886#endif // !defined(PRODUCT)
2887 return raw_obj;
2888}
2889
2891 public:
2894 thread_(thread),
2895 old_obj_(obj) {
2896 ASSERT(old_obj_->IsOldObject());
2897 }
2898
2899 void VisitPointers(ObjectPtr* from, ObjectPtr* to) override {
2900 if (old_obj_->IsArray()) {
2901 for (ObjectPtr* slot = from; slot <= to; ++slot) {
2902 ObjectPtr value = *slot;
2903 if (value->IsHeapObject()) {
2904 old_obj_->untag()->CheckArrayPointerStore(slot, value, thread_);
2905 }
2906 }
2907 } else {
2908 for (ObjectPtr* slot = from; slot <= to; ++slot) {
2909 ObjectPtr value = *slot;
2910 if (value->IsHeapObject()) {
2911 old_obj_->untag()->CheckHeapPointerStore(value, thread_);
2912 }
2913 }
2914 }
2915 }
2916
2917#if defined(DART_COMPRESSED_POINTERS)
2918 void VisitCompressedPointers(uword heap_base,
2919 CompressedObjectPtr* from,
2920 CompressedObjectPtr* to) override {
2921 if (old_obj_->IsArray()) {
2922 for (CompressedObjectPtr* slot = from; slot <= to; ++slot) {
2923 ObjectPtr value = slot->Decompress(heap_base);
2924 if (value->IsHeapObject()) {
2925 old_obj_->untag()->CheckArrayPointerStore(slot, value, thread_);
2926 }
2927 }
2928 } else {
2929 for (CompressedObjectPtr* slot = from; slot <= to; ++slot) {
2930 ObjectPtr value = slot->Decompress(heap_base);
2931 if (value->IsHeapObject()) {
2932 old_obj_->untag()->CheckHeapPointerStore(value, thread_);
2933 }
2934 }
2935 }
2936 }
2937#endif
2938
2939 private:
2940 Thread* thread_;
2941 ObjectPtr old_obj_;
2942
2943 DISALLOW_COPY_AND_ASSIGN(WriteBarrierUpdateVisitor);
2944};
2945
2946#if defined(DEBUG)
2947bool Object::IsZoneHandle() const {
2948 return VMHandles::IsZoneHandle(reinterpret_cast<uword>(this));
2949}
2950
2951bool Object::IsReadOnlyHandle() const {
2952 return Dart::IsReadOnlyHandle(reinterpret_cast<uword>(this));
2953}
2954
2955bool Object::IsNotTemporaryScopedHandle() const {
2956 return (IsZoneHandle() || IsReadOnlyHandle());
2957}
2958#endif
2959
2961 Heap::Space space,
2962 bool load_with_relaxed_atomics) {
2963 // Generic function types should be cloned with FunctionType::Clone.
2964 ASSERT(!orig.IsFunctionType() || !FunctionType::Cast(orig).IsGeneric());
2965 const Class& cls = Class::Handle(orig.clazz());
2966 intptr_t size = orig.ptr()->untag()->HeapSize();
2967 // All fields (including non-SmiPtr fields) will be initialized with Smi 0,
2968 // but the contents of the original object are copied over before the thread
2969 // is allowed to reach a safepoint.
2970 ObjectPtr raw_clone =
2971 Object::Allocate(cls.id(), size, space, cls.HasCompressedPointers(),
2972 from_offset<Object>(), to_offset<Object>());
2973 NoSafepointScope no_safepoint;
2974 // Copy the body of the original into the clone.
2975 uword orig_addr = UntaggedObject::ToAddr(orig.ptr());
2976 uword clone_addr = UntaggedObject::ToAddr(raw_clone);
2977 const intptr_t kHeaderSizeInBytes = sizeof(UntaggedObject);
2978 if (load_with_relaxed_atomics) {
2979 auto orig_atomics_ptr = reinterpret_cast<std::atomic<uword>*>(orig_addr);
2980 auto clone_ptr = reinterpret_cast<uword*>(clone_addr);
2981 for (intptr_t i = kHeaderSizeInBytes / kWordSize; i < size / kWordSize;
2982 i++) {
2983 *(clone_ptr + i) =
2984 (orig_atomics_ptr + i)->load(std::memory_order_relaxed);
2985 }
2986 } else {
2987 memmove(reinterpret_cast<uint8_t*>(clone_addr + kHeaderSizeInBytes),
2988 reinterpret_cast<uint8_t*>(orig_addr + kHeaderSizeInBytes),
2989 size - kHeaderSizeInBytes);
2990 }
2991
2992 if (IsTypedDataClassId(raw_clone->GetClassId())) {
2993 auto raw_typed_data = TypedData::RawCast(raw_clone);
2994 raw_typed_data.untag()->RecomputeDataField();
2995 }
2996
2997 // Add clone to store buffer, if needed.
2998 if (!raw_clone->IsOldObject()) {
2999 // No need to remember an object in new space.
3000 return raw_clone;
3001 }
3002 WriteBarrierUpdateVisitor visitor(Thread::Current(), raw_clone);
3003 raw_clone->untag()->VisitPointers(&visitor);
3004 return raw_clone;
3005}
3006
3008 const intptr_t cid = id();
3009 switch (cid) {
3010 case kByteBufferCid:
3012#define HANDLE_CASE(clazz) \
3013 case k##clazz##Cid: \
3014 return dart::clazz::ContainsCompressedPointers();
3016#undef HANDLE_CASE
3017#define HANDLE_CASE(clazz) \
3018 case kTypedData##clazz##Cid: \
3019 return dart::TypedData::ContainsCompressedPointers(); \
3020 case kTypedData##clazz##ViewCid: \
3021 case kUnmodifiableTypedData##clazz##ViewCid: \
3022 return dart::TypedDataView::ContainsCompressedPointers(); \
3023 case kExternalTypedData##clazz##Cid: \
3024 return dart::ExternalTypedData::ContainsCompressedPointers();
3026#undef HANDLE_CASE
3027 default:
3028 if (cid >= kNumPredefinedCids) {
3030 }
3031 }
3032 FATAL("Unsupported class for compressed pointers translation: %s (id=%" Pd
3033 ", kNumPredefinedCids=%" Pd ")\n",
3035 return false;
3036}
3037
3038StringPtr Class::Name() const {
3039 return untag()->name();
3040}
3041
3042StringPtr Class::ScrubbedName() const {
3044}
3045
3046const char* Class::ScrubbedNameCString() const {
3048}
3049
3050StringPtr Class::UserVisibleName() const {
3051#if !defined(PRODUCT)
3052 ASSERT(untag()->user_name() != String::null());
3053 return untag()->user_name();
3054#endif // !defined(PRODUCT)
3055 // No caching in PRODUCT, regenerate.
3056 return Symbols::New(Thread::Current(), GenerateUserVisibleName());
3057}
3058
3060#if !defined(PRODUCT)
3061 ASSERT(untag()->user_name() != String::null());
3062 return String::Handle(untag()->user_name()).ToCString();
3063#endif // !defined(PRODUCT)
3064 return GenerateUserVisibleName(); // No caching in PRODUCT, regenerate.
3065}
3066
3067const char* Class::NameCString(NameVisibility name_visibility) const {
3068 switch (name_visibility) {
3070 return String::Handle(Name()).ToCString();
3072 return ScrubbedNameCString();
3074 return UserVisibleNameCString();
3075 default:
3076 UNREACHABLE();
3077 return nullptr;
3078 }
3079}
3080
3081ClassPtr Class::Mixin() const {
3083 const Array& interfaces = Array::Handle(this->interfaces());
3084 const Type& mixin_type =
3086 return mixin_type.type_class();
3087 }
3088 return ptr();
3089}
3090
3092 NoSafepointScope no_safepoint;
3094 untag()->library()->untag()->flags_);
3095}
3096
3097TypePtr Class::RareType() const {
3098 if (!IsGeneric()) {
3099 return DeclarationType();
3100 }
3102 Thread* const thread = Thread::Current();
3103 Zone* const zone = thread->zone();
3104 const auto& inst_to_bounds =
3106 ASSERT(inst_to_bounds.ptr() != Object::empty_type_arguments().ptr());
3107 auto& type = Type::Handle(
3108 zone, Type::New(*this, inst_to_bounds, Nullability::kNonNullable));
3110 return type.ptr();
3111}
3112
3113template <class FakeObject, class TargetFakeObject>
3114ClassPtr Class::New(IsolateGroup* isolate_group, bool register_class) {
3116 const auto& result = Class::Handle(Object::Allocate<Class>(Heap::kOld));
3117 Object::VerifyBuiltinVtable<FakeObject>(FakeObject::kClassId);
3118 NOT_IN_PRECOMPILED(result.set_token_pos(TokenPosition::kNoSource));
3119 NOT_IN_PRECOMPILED(result.set_end_token_pos(TokenPosition::kNoSource));
3120 result.set_instance_size(FakeObject::InstanceSize(),
3121 compiler::target::RoundedAllocationSize(
3122 TargetFakeObject::InstanceSize()));
3123 result.set_type_arguments_field_offset_in_words(kNoTypeArguments,
3124 RTN::Class::kNoTypeArguments);
3125 const intptr_t host_next_field_offset = FakeObject::NextFieldOffset();
3126 const intptr_t target_next_field_offset = TargetFakeObject::NextFieldOffset();
3127 result.set_next_field_offset(host_next_field_offset,
3129 COMPILE_ASSERT((FakeObject::kClassId != kInstanceCid));
3130 result.set_id(FakeObject::kClassId);
3131 NOT_IN_PRECOMPILED(result.set_implementor_cid(kIllegalCid));
3132 result.set_num_type_arguments_unsafe(0);
3133 result.set_num_native_fields(0);
3134 result.set_state_bits(0);
3135 if (IsInternalOnlyClassId(FakeObject::kClassId) ||
3136 (FakeObject::kClassId == kTypeArgumentsCid)) {
3137 // VM internal classes are done. There is no finalization needed or
3138 // possible in this case.
3139 result.set_is_declaration_loaded();
3140 result.set_is_type_finalized();
3141 result.set_is_allocate_finalized();
3142 } else if (FakeObject::kClassId != kClosureCid) {
3143 // VM backed classes are almost ready: run checks and resolve class
3144 // references, but do not recompute size.
3145 result.set_is_prefinalized();
3146 }
3147 if (FakeObject::kClassId < kNumPredefinedCids &&
3148 IsDeeplyImmutableCid(FakeObject::kClassId)) {
3149 result.set_is_deeply_immutable(true);
3150 }
3151 NOT_IN_PRECOMPILED(result.set_kernel_offset(0));
3152 result.InitEmptyFields();
3153 if (register_class) {
3154 isolate_group->class_table()->Register(result);
3155 }
3156 return result.ptr();
3157}
3158
3159#if !defined(DART_PRECOMPILED_RUNTIME)
3160static void ReportTooManyTypeArguments(const Class& cls) {
3163 "too many type parameters declared in class '%s' or in its "
3164 "super classes",
3165 String::Handle(cls.Name()).ToCString());
3166 UNREACHABLE();
3167}
3168#endif // !defined(DART_PRECOMPILED_RUNTIME)
3169
3170void Class::set_num_type_arguments(intptr_t value) const {
3171#if defined(DART_PRECOMPILED_RUNTIME)
3172 UNREACHABLE();
3173#else
3174 if (!Utils::IsInt(16, value)) {
3176 }
3177 // We allow concurrent calculation of the number of type arguments. If two
3178 // threads perform this operation it doesn't matter which one wins.
3179 DEBUG_ONLY(intptr_t old_value = num_type_arguments());
3180 DEBUG_ASSERT(old_value == kUnknownNumTypeArguments || old_value == value);
3181 StoreNonPointer<int16_t, int16_t, std::memory_order_relaxed>(
3182 &untag()->num_type_arguments_, value);
3183#endif // defined(DART_PRECOMPILED_RUNTIME)
3184}
3185
3186void Class::set_num_type_arguments_unsafe(intptr_t value) const {
3187 StoreNonPointer(&untag()->num_type_arguments_, value);
3188}
3189
3190void Class::set_has_pragma(bool value) const {
3191 set_state_bits(HasPragmaBit::update(value, state_bits()));
3192}
3193
3194void Class::set_is_isolate_unsendable(bool value) const {
3195 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
3196 set_state_bits(IsIsolateUnsendableBit::update(value, state_bits()));
3197}
3198
3200 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
3201 set_state_bits(
3203}
3204
3205void Class::set_is_deeply_immutable(bool value) const {
3206 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
3207 set_state_bits(IsDeeplyImmutableBit::update(value, state_bits()));
3208}
3209
3210void Class::set_is_future_subtype(bool value) const {
3211 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
3212 set_state_bits(IsFutureSubtypeBit::update(value, state_bits()));
3213}
3214
3215void Class::set_can_be_future(bool value) const {
3216 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
3217 set_state_bits(CanBeFutureBit::update(value, state_bits()));
3218}
3219
3220// Initialize class fields of type Array with empty array.
3221void Class::InitEmptyFields() const {
3222 if (Object::empty_array().ptr() == Array::null()) {
3223 // The empty array has not been initialized yet.
3224 return;
3225 }
3226 untag()->set_interfaces(Object::empty_array().ptr());
3227 untag()->set_constants(Object::null_array().ptr());
3228 set_functions(Object::empty_array());
3229 set_fields(Object::empty_array());
3230 set_invocation_dispatcher_cache(Object::empty_array());
3231}
3232
3234 ClassTable* class_table /* = nullptr */) const {
3236 if (untag()->offset_in_words_to_field<std::memory_order_acquire>() ==
3237 Array::null()) {
3238 // Even if multiple threads are calling this concurrently, all of them would
3239 // compute the same array, so we intentionally don't acquire any locks here.
3240 const intptr_t length = untag()->host_instance_size_in_words_;
3242 Class& cls = Class::Handle(this->ptr());
3244 Field& f = Field::Handle();
3245 while (!cls.IsNull()) {
3246 fields = cls.fields();
3247 for (intptr_t i = 0; i < fields.Length(); ++i) {
3248 f ^= fields.At(i);
3249 if (f.is_instance()) {
3250 array.SetAt(f.HostOffset() >> kCompressedWordSizeLog2, f);
3251 }
3252 }
3253 cls = cls.SuperClass(class_table);
3254 }
3255 untag()->set_offset_in_words_to_field<std::memory_order_release>(
3256 array.ptr());
3257 }
3258 return untag()->offset_in_words_to_field<std::memory_order_acquire>();
3259}
3260
3262 const Array& field_array = Array::Handle(fields());
3263 Field& field = Field::Handle();
3264 for (intptr_t i = 0; i < field_array.Length(); ++i) {
3265 field ^= field_array.At(i);
3266 if (!field.is_static()) {
3267 return true;
3268 }
3269 }
3270 return false;
3271}
3272
3274 public:
3275 FunctionName(const String& name, String* tmp_string)
3276 : name_(name), tmp_string_(tmp_string) {}
3277 bool Matches(const Function& function) const {
3278 if (name_.IsSymbol()) {
3279 return name_.ptr() == function.name();
3280 } else {
3281 *tmp_string_ = function.name();
3282 return name_.Equals(*tmp_string_);
3283 }
3284 }
3285 intptr_t Hash() const { return name_.Hash(); }
3286
3287 private:
3288 const String& name_;
3289 String* tmp_string_;
3290};
3291
3292// Traits for looking up Functions by name.
3294 public:
3295 static const char* Name() { return "ClassFunctionsTraits"; }
3296 static bool ReportStats() { return false; }
3297
3298 // Called when growing the table.
3299 static bool IsMatch(const Object& a, const Object& b) {
3300 ASSERT(a.IsFunction() && b.IsFunction());
3301 // Function objects are always canonical.
3302 return a.ptr() == b.ptr();
3303 }
3304 static bool IsMatch(const FunctionName& name, const Object& obj) {
3305 return name.Matches(Function::Cast(obj));
3306 }
3307 static uword Hash(const Object& key) {
3308 return String::HashRawSymbol(Function::Cast(key).name());
3309 }
3310 static uword Hash(const FunctionName& name) { return name.Hash(); }
3311};
3313
3314void Class::SetFunctions(const Array& value) const {
3315 ASSERT(!value.IsNull());
3316 const intptr_t len = value.Length();
3317#if defined(DEBUG)
3318 Thread* thread = Thread::Current();
3320 if (is_finalized()) {
3322 FunctionType& signature = FunctionType::Handle();
3323 for (intptr_t i = 0; i < len; ++i) {
3324 function ^= value.At(i);
3325 signature = function.signature();
3326 ASSERT(signature.IsFinalized());
3327 }
3328 }
3329#endif
3330 set_functions(value);
3331 if (len >= kFunctionLookupHashThreshold) {
3332 ClassFunctionsSet set(HashTables::New<ClassFunctionsSet>(len, Heap::kOld));
3333 Function& func = Function::Handle();
3334 for (intptr_t i = 0; i < len; ++i) {
3335 func ^= value.At(i);
3336 // Verify that all the functions in the array have this class as owner.
3337 ASSERT(func.Owner() == ptr());
3338 set.Insert(func);
3339 }
3340 untag()->set_functions_hash_table(set.Release().ptr());
3341 } else {
3342 untag()->set_functions_hash_table(Array::null());
3343 }
3344}
3345
3347#if defined(DEBUG)
3348 Thread* thread = Thread::Current();
3349 ASSERT(thread->IsDartMutatorThread());
3351 ASSERT(!is_finalized() ||
3352 FunctionType::Handle(function.signature()).IsFinalized());
3353#endif
3354 const Array& arr = Array::Handle(functions());
3355 const Array& new_array =
3356 Array::Handle(Array::Grow(arr, arr.Length() + 1, Heap::kOld));
3357 new_array.SetAt(arr.Length(), function);
3358 set_functions(new_array);
3359 // Add to hash table, if any.
3360 const intptr_t new_len = new_array.Length();
3361 if (new_len == kFunctionLookupHashThreshold) {
3362 // Transition to using hash table.
3363 SetFunctions(new_array);
3364 } else if (new_len > kFunctionLookupHashThreshold) {
3365 ClassFunctionsSet set(untag()->functions_hash_table());
3366 set.Insert(function);
3367 untag()->set_functions_hash_table(set.Release().ptr());
3368 }
3369}
3370
3371intptr_t Class::FindFunctionIndex(const Function& needle) const {
3372 Thread* thread = Thread::Current();
3373 if (EnsureIsFinalized(thread) != Error::null()) {
3374 return -1;
3375 }
3378 Array& funcs = thread->ArrayHandle();
3379 Function& function = thread->FunctionHandle();
3380 funcs = current_functions();
3381 ASSERT(!funcs.IsNull());
3382 const intptr_t len = funcs.Length();
3383 for (intptr_t i = 0; i < len; i++) {
3384 function ^= funcs.At(i);
3385 if (needle.ptr() == function.ptr()) {
3386 return i;
3387 }
3388 }
3389 // No function found.
3390 return -1;
3391}
3392
3393FunctionPtr Class::FunctionFromIndex(intptr_t idx) const {
3394 const Array& funcs = Array::Handle(current_functions());
3395 if ((idx < 0) || (idx >= funcs.Length())) {
3396 return Function::null();
3397 }
3398 Function& func = Function::Handle();
3399 func ^= funcs.At(idx);
3400 ASSERT(!func.IsNull());
3401 return func.ptr();
3402}
3403
3404FunctionPtr Class::ImplicitClosureFunctionFromIndex(intptr_t idx) const {
3406 if (func.IsNull() || !func.HasImplicitClosureFunction()) {
3407 return Function::null();
3408 }
3409 func = func.ImplicitClosureFunction();
3410 ASSERT(!func.IsNull());
3411 return func.ptr();
3412}
3413
3415 Thread* thread = Thread::Current();
3416 if (EnsureIsFinalized(thread) != Error::null()) {
3417 return -1;
3418 }
3421 Array& funcs = thread->ArrayHandle();
3422 Function& function = thread->FunctionHandle();
3423 funcs = current_functions();
3424 ASSERT(!funcs.IsNull());
3425 Function& implicit_closure = Function::Handle(thread->zone());
3426 const intptr_t len = funcs.Length();
3427 for (intptr_t i = 0; i < len; i++) {
3428 function ^= funcs.At(i);
3429 implicit_closure = function.implicit_closure_function();
3430 if (implicit_closure.IsNull()) {
3431 // Skip non-implicit closure functions.
3432 continue;
3433 }
3434 if (needle.ptr() == implicit_closure.ptr()) {
3435 return i;
3436 }
3437 }
3438 // No function found.
3439 return -1;
3440}
3441
3443 const Function& needle) const {
3444 Thread* thread = Thread::Current();
3445 if (EnsureIsFinalized(thread) != Error::null()) {
3446 return -1;
3447 }
3450 Array& funcs = thread->ArrayHandle();
3451 Object& object = thread->ObjectHandle();
3452 funcs = invocation_dispatcher_cache();
3453 ASSERT(!funcs.IsNull());
3454 const intptr_t len = funcs.Length();
3455 for (intptr_t i = 0; i < len; i++) {
3456 object = funcs.At(i);
3457 // The invocation_dispatcher_cache is a table with some entries that
3458 // are functions.
3459 if (object.IsFunction()) {
3460 if (Function::Cast(object).ptr() == needle.ptr()) {
3461 return i;
3462 }
3463 }
3464 }
3465 // No function found.
3466 return -1;
3467}
3468
3469FunctionPtr Class::InvocationDispatcherFunctionFromIndex(intptr_t idx) const {
3470 Thread* thread = Thread::Current();
3473 Array& dispatcher_cache = thread->ArrayHandle();
3474 Object& object = thread->ObjectHandle();
3475 dispatcher_cache = invocation_dispatcher_cache();
3476 object = dispatcher_cache.At(idx);
3477 if (!object.IsFunction()) {
3478 return Function::null();
3479 }
3480 return Function::Cast(object).ptr();
3481}
3482
3483void Class::set_state_bits(intptr_t bits) const {
3484 StoreNonPointer<uint32_t, uint32_t, std::memory_order_release>(
3485 &untag()->state_bits_, static_cast<uint32_t>(bits));
3486}
3487
3488void Class::set_library(const Library& value) const {
3489 untag()->set_library(value.ptr());
3490}
3491
3493 ASSERT((num_type_arguments() == kUnknownNumTypeArguments) ||
3494 is_prefinalized());
3495 untag()->set_type_parameters(value.ptr());
3496}
3497
3498void Class::set_functions(const Array& value) const {
3499 // Ensure all writes to the [Function]s are visible by the time the array
3500 // is visible.
3501 untag()->set_functions<std::memory_order_release>(value.ptr());
3502}
3503
3504void Class::set_fields(const Array& value) const {
3505 // Ensure all writes to the [Field]s are visible by the time the array
3506 // is visible.
3507 untag()->set_fields<std::memory_order_release>(value.ptr());
3508}
3509
3510void Class::set_invocation_dispatcher_cache(const Array& cache) const {
3511 // Ensure all writes to the cache are visible by the time the array
3512 // is visible.
3513 untag()->set_invocation_dispatcher_cache<std::memory_order_release>(
3514 cache.ptr());
3515}
3516
3517void Class::set_declaration_instance_type_arguments(
3518 const TypeArguments& value) const {
3519 ASSERT(value.IsNull() || (value.IsCanonical() && value.IsOld()));
3520 ASSERT((declaration_instance_type_arguments() == TypeArguments::null()) ||
3521 (declaration_instance_type_arguments() == value.ptr()));
3522 untag()->set_declaration_instance_type_arguments<std::memory_order_release>(
3523 value.ptr());
3524}
3525
3527 const intptr_t num_type_arguments = NumTypeArguments();
3528 if (num_type_arguments == 0) {
3529 return TypeArguments::null();
3530 }
3531 if (declaration_instance_type_arguments() != TypeArguments::null()) {
3532 return declaration_instance_type_arguments();
3533 }
3534 Thread* thread = Thread::Current();
3535 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
3536 if (declaration_instance_type_arguments() != TypeArguments::null()) {
3537 return declaration_instance_type_arguments();
3538 }
3539 Zone* zone = thread->zone();
3540 auto& args = TypeArguments::Handle(zone);
3541 auto& type = AbstractType::Handle(zone);
3542 const intptr_t num_type_parameters = NumTypeParameters(thread);
3543 if (num_type_arguments == num_type_parameters) {
3545 args = Type::Cast(type).arguments();
3546 } else {
3547 type = super_type();
3548 const auto& super_args = TypeArguments::Handle(
3549 zone, Type::Cast(type).GetInstanceTypeArguments(thread));
3550 if ((num_type_parameters == 0) ||
3551 (!super_args.IsNull() && (super_args.Length() == num_type_arguments))) {
3552 args = super_args.ptr();
3553 } else {
3554 args = TypeArguments::New(num_type_arguments);
3555 const intptr_t offset = num_type_arguments - num_type_parameters;
3556 for (intptr_t i = 0; i < offset; ++i) {
3557 type = super_args.TypeAtNullSafe(i);
3558 args.SetTypeAt(i, type);
3559 }
3561 const auto& decl_args =
3562 TypeArguments::Handle(zone, Type::Cast(type).arguments());
3563 for (intptr_t i = 0; i < num_type_parameters; ++i) {
3564 type = decl_args.TypeAt(i);
3565 args.SetTypeAt(offset + i, type);
3566 }
3567 }
3568 }
3569 args = args.Canonicalize(thread);
3570 set_declaration_instance_type_arguments(args);
3571 return args.ptr();
3572}
3573
3575 Thread* thread,
3576 const TypeArguments& type_arguments,
3577 bool canonicalize) const {
3578 const intptr_t num_type_arguments = NumTypeArguments();
3579 if (num_type_arguments == 0) {
3580 return TypeArguments::null();
3581 }
3582 Zone* zone = thread->zone();
3583 auto& args = TypeArguments::Handle(zone);
3584 const intptr_t num_type_parameters = NumTypeParameters(thread);
3585 ASSERT(type_arguments.IsNull() ||
3586 type_arguments.Length() == num_type_parameters);
3587 if (num_type_arguments == num_type_parameters) {
3588 args = type_arguments.ptr();
3589 } else {
3591 if (num_type_parameters == 0) {
3592 return args.ptr();
3593 }
3594 args = args.InstantiateFrom(
3596 zone, type_arguments.ToInstantiatorTypeArguments(thread, *this)),
3597 Object::null_type_arguments(), kAllFree, Heap::kOld);
3598 }
3599 if (canonicalize) {
3600 args = args.Canonicalize(thread);
3601 }
3602 return args.ptr();
3603}
3604
3605intptr_t Class::NumTypeParameters(Thread* thread) const {
3606 if (!is_declaration_loaded()) {
3608 const intptr_t cid = id();
3609 if ((cid == kArrayCid) || (cid == kImmutableArrayCid) ||
3610 (cid == kGrowableObjectArrayCid)) {
3611 return 1; // List's type parameter may not have been parsed yet.
3612 }
3613 return 0;
3614 }
3616 return 0;
3617 }
3619 TypeParameters& type_params = thread->TypeParametersHandle();
3620 type_params = type_parameters();
3621 return type_params.Length();
3622}
3623
3624intptr_t Class::ComputeNumTypeArguments() const {
3626 Thread* thread = Thread::Current();
3627 Zone* zone = thread->zone();
3628 auto isolate_group = thread->isolate_group();
3629 const intptr_t num_type_params = NumTypeParameters();
3630
3631 if ((super_type() == AbstractType::null()) ||
3632 (super_type() == isolate_group->object_store()->object_type())) {
3633 return num_type_params;
3634 }
3635
3636 const auto& sup_type = Type::Handle(zone, super_type());
3637 const auto& sup_class = Class::Handle(zone, sup_type.type_class());
3638 const intptr_t sup_class_num_type_args = sup_class.NumTypeArguments();
3639 if (num_type_params == 0) {
3640 return sup_class_num_type_args;
3641 }
3642
3643 const auto& sup_type_args = TypeArguments::Handle(zone, sup_type.arguments());
3644 if (sup_type_args.IsNull()) {
3645 // The super type is raw or the super class is non generic.
3646 // In either case, overlapping is not possible.
3647 return sup_class_num_type_args + num_type_params;
3648 }
3649
3650 const intptr_t sup_type_args_length = sup_type_args.Length();
3651 // Determine the maximum overlap of a prefix of the vector consisting of the
3652 // type parameters of this class with a suffix of the vector consisting of the
3653 // type arguments of the super type of this class.
3654 // The number of own type arguments of this class is the number of its type
3655 // parameters minus the number of type arguments in the overlap.
3656 // Attempt to overlap the whole vector of type parameters; reduce the size
3657 // of the vector (keeping the first type parameter) until it fits or until
3658 // its size is zero.
3659 auto& sup_type_arg = AbstractType::Handle(zone);
3660 for (intptr_t num_overlapping_type_args =
3661 (num_type_params < sup_type_args_length) ? num_type_params
3662 : sup_type_args_length;
3663 num_overlapping_type_args > 0; num_overlapping_type_args--) {
3664 intptr_t i = 0;
3665 for (; i < num_overlapping_type_args; i++) {
3666 sup_type_arg = sup_type_args.TypeAt(sup_type_args_length -
3667 num_overlapping_type_args + i);
3668 ASSERT(!sup_type_arg.IsNull());
3669 if (!sup_type_arg.IsTypeParameter()) break;
3670 // The only type parameters appearing in the type arguments of the super
3671 // type are those declared by this class. Their finalized indices depend
3672 // on the number of type arguments being computed here. Therefore, they
3673 // cannot possibly be finalized yet.
3674 ASSERT(!TypeParameter::Cast(sup_type_arg).IsFinalized());
3675 if (TypeParameter::Cast(sup_type_arg).index() != i ||
3676 TypeParameter::Cast(sup_type_arg).IsNullable()) {
3677 break;
3678 }
3679 }
3680 if (i == num_overlapping_type_args) {
3681 // Overlap found.
3682 return sup_class_num_type_args + num_type_params -
3683 num_overlapping_type_args;
3684 }
3685 }
3686 // No overlap found.
3687 return sup_class_num_type_args + num_type_params;
3688}
3689
3690intptr_t Class::NumTypeArguments() const {
3691 // Return cached value if already calculated.
3692 intptr_t num_type_args = num_type_arguments();
3693 if (num_type_args != kUnknownNumTypeArguments) {
3694 return num_type_args;
3695 }
3696
3697#if defined(DART_PRECOMPILED_RUNTIME)
3698 UNREACHABLE();
3699 return 0;
3700#else
3701 num_type_args = ComputeNumTypeArguments();
3702 ASSERT(num_type_args != kUnknownNumTypeArguments);
3703 set_num_type_arguments(num_type_args);
3704 return num_type_args;
3705#endif // defined(DART_PRECOMPILED_RUNTIME)
3706}
3707
3708TypeArgumentsPtr Class::DefaultTypeArguments(Zone* zone) const {
3710 return Object::empty_type_arguments().ptr();
3711 }
3712 return TypeParameters::Handle(zone, type_parameters()).defaults();
3713}
3714
3715ClassPtr Class::SuperClass(ClassTable* class_table /* = nullptr */) const {
3716 Thread* thread = Thread::Current();
3717 Zone* zone = thread->zone();
3718 if (class_table == nullptr) {
3719 class_table = thread->isolate_group()->class_table();
3720 }
3721
3722 if (super_type() == AbstractType::null()) {
3723 if (id() == kTypeArgumentsCid) {
3724 // Pretend TypeArguments objects are Dart instances.
3725 return class_table->At(kInstanceCid);
3726 }
3727 return Class::null();
3728 }
3729 const AbstractType& sup_type = AbstractType::Handle(zone, super_type());
3730 const intptr_t type_class_id = sup_type.type_class_id();
3731 return class_table->At(type_class_id);
3732}
3733
3734void Class::set_super_type(const Type& value) const {
3735 ASSERT(value.IsNull() || !value.IsDynamicType());
3736 untag()->set_super_type(value.ptr());
3737}
3738
3739TypeParameterPtr Class::TypeParameterAt(intptr_t index,
3740 Nullability nullability) const {
3741 ASSERT(index >= 0 && index < NumTypeParameters());
3742 TypeParameter& type_param =
3743 TypeParameter::Handle(TypeParameter::New(*this, 0, index, nullability));
3744 // Finalize type parameter only if its declaring class is
3745 // finalized and available in the current class table.
3746 if (is_type_finalized() && (type_param.parameterized_class() == ptr())) {
3747 type_param ^= ClassFinalizer::FinalizeType(type_param);
3748 }
3749 return type_param.ptr();
3750}
3751
3753 switch (cid) {
3754 case kDoubleCid:
3755 return sizeof(UntaggedDouble::value_);
3756 case kFloat32x4Cid:
3757 return sizeof(UntaggedFloat32x4::value_);
3758 case kFloat64x2Cid:
3759 return sizeof(UntaggedFloat64x2::value_);
3760 default:
3761 return sizeof(UntaggedMint::value_);
3762 }
3763}
3764
3765UnboxedFieldBitmap Class::CalculateFieldOffsets() const {
3766 Array& flds = Array::Handle(fields());
3767 const Class& super = Class::Handle(SuperClass());
3768 intptr_t host_offset = 0;
3769 UnboxedFieldBitmap host_bitmap{};
3770 // Target offsets might differ if the word size are different
3771 intptr_t target_offset = 0;
3772 intptr_t host_type_args_field_offset = kNoTypeArguments;
3773 intptr_t target_type_args_field_offset = RTN::Class::kNoTypeArguments;
3774 if (super.IsNull()) {
3775 host_offset = Instance::NextFieldOffset();
3776 target_offset = RTN::Instance::NextFieldOffset();
3777 ASSERT(host_offset > 0);
3778 ASSERT(target_offset > 0);
3779 } else {
3780 ASSERT(super.is_finalized() || super.is_prefinalized());
3781 host_type_args_field_offset = super.host_type_arguments_field_offset();
3782 target_type_args_field_offset = super.target_type_arguments_field_offset();
3783 host_offset = super.host_next_field_offset();
3784 ASSERT(host_offset > 0);
3785 target_offset = super.target_next_field_offset();
3786 ASSERT(target_offset > 0);
3787 // We should never call CalculateFieldOffsets for native wrapper
3788 // classes, assert this.
3789 ASSERT(num_native_fields() == 0);
3790 const intptr_t num_native_fields = super.num_native_fields();
3794 }
3795
3797 super.id());
3798 }
3799 // If the super class is parameterized, use the same type_arguments field,
3800 // otherwise, if this class is the first in the super chain to be
3801 // parameterized, introduce a new type_arguments field.
3802 if (host_type_args_field_offset == kNoTypeArguments) {
3803 ASSERT(target_type_args_field_offset == RTN::Class::kNoTypeArguments);
3804 if (IsGeneric()) {
3805 // The instance needs a type_arguments field.
3806 host_type_args_field_offset = host_offset;
3807 target_type_args_field_offset = target_offset;
3808 host_offset += kCompressedWordSize;
3809 target_offset += compiler::target::kCompressedWordSize;
3810 }
3811 } else {
3812 ASSERT(target_type_args_field_offset != RTN::Class::kNoTypeArguments);
3813 }
3814
3815 set_type_arguments_field_offset(host_type_args_field_offset,
3816 target_type_args_field_offset);
3817 ASSERT(host_offset > 0);
3818 ASSERT(target_offset > 0);
3819 Field& field = Field::Handle();
3820 const intptr_t len = flds.Length();
3821 for (intptr_t i = 0; i < len; i++) {
3822 field ^= flds.At(i);
3823 // Offset is computed only for instance fields.
3824 if (!field.is_static()) {
3825 ASSERT(field.HostOffset() == 0);
3826 ASSERT(field.TargetOffset() == 0);
3827 field.SetOffset(host_offset, target_offset);
3828
3829 if (field.is_unboxed()) {
3830 const intptr_t field_size =
3831 UnboxedFieldSizeInBytesByCid(field.guarded_cid());
3832
3833 const intptr_t host_num_words = field_size / kCompressedWordSize;
3834 const intptr_t host_next_offset = host_offset + field_size;
3835 const intptr_t host_next_position =
3836 host_next_offset / kCompressedWordSize;
3837
3838 const intptr_t target_next_offset = target_offset + field_size;
3839 const intptr_t target_next_position =
3840 target_next_offset / compiler::target::kCompressedWordSize;
3841
3842 // The bitmap has fixed length. Checks if the offset position is smaller
3843 // than its length. If it is not, than the field should be boxed
3844 if (host_next_position <= UnboxedFieldBitmap::Length() &&
3845 target_next_position <= UnboxedFieldBitmap::Length()) {
3846 for (intptr_t j = 0; j < host_num_words; j++) {
3847 // Activate the respective bit in the bitmap, indicating that the
3848 // content is not a pointer
3849 host_bitmap.Set(host_offset / kCompressedWordSize);
3850 host_offset += kCompressedWordSize;
3851 }
3852
3853 ASSERT(host_offset == host_next_offset);
3854 target_offset = target_next_offset;
3855 } else {
3856 // Make the field boxed
3857 field.set_is_unboxed(false);
3858 host_offset += kCompressedWordSize;
3859 target_offset += compiler::target::kCompressedWordSize;
3860 }
3861 } else {
3862 host_offset += kCompressedWordSize;
3863 target_offset += compiler::target::kCompressedWordSize;
3864 }
3865 }
3866 }
3867
3868 const intptr_t host_instance_size = RoundedAllocationSize(host_offset);
3869 const intptr_t target_instance_size =
3870 compiler::target::RoundedAllocationSize(target_offset);
3872 // Many parts of the compiler assume offsets can be represented with
3873 // int32_t.
3874 FATAL("Too many fields in %s\n", UserVisibleNameCString());
3875 }
3877 set_next_field_offset(host_offset, target_offset);
3878 return host_bitmap;
3879}
3880
3882 const Array& args_desc,
3883 const Function& dispatcher) const {
3884 auto thread = Thread::Current();
3886
3887 ASSERT(target_name.ptr() == dispatcher.name());
3888
3889 DispatcherSet dispatchers(invocation_dispatcher_cache() ==
3890 Array::empty_array().ptr()
3891 ? HashTables::New<DispatcherSet>(4, Heap::kOld)
3892 : invocation_dispatcher_cache());
3893 dispatchers.Insert(dispatcher);
3894 set_invocation_dispatcher_cache(dispatchers.Release());
3895}
3896
3897FunctionPtr Class::GetInvocationDispatcher(const String& target_name,
3898 const Array& args_desc,
3900 bool create_if_absent) const {
3901 ASSERT(kind == UntaggedFunction::kNoSuchMethodDispatcher ||
3902 kind == UntaggedFunction::kInvokeFieldDispatcher ||
3903 kind == UntaggedFunction::kDynamicInvocationForwarder);
3904 auto thread = Thread::Current();
3905 auto Z = thread->zone();
3906 auto& function = Function::Handle(Z);
3907
3908 // First we'll try to find it without using locks.
3909 DispatcherKey key(target_name, args_desc, kind);
3910 if (invocation_dispatcher_cache() != Array::empty_array().ptr()) {
3911 DispatcherSet dispatchers(Z, invocation_dispatcher_cache());
3912 function ^= dispatchers.GetOrNull(key);
3913 dispatchers.Release();
3914 }
3915 if (!function.IsNull() || !create_if_absent) {
3916 return function.ptr();
3917 }
3918
3919 // If we failed to find it and possibly need to create it, use a write lock.
3920 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
3921
3922 // Try to find it again & return if it was added in the meantime.
3923 if (invocation_dispatcher_cache() != Array::empty_array().ptr()) {
3924 DispatcherSet dispatchers(Z, invocation_dispatcher_cache());
3925 function ^= dispatchers.GetOrNull(key);
3926 dispatchers.Release();
3927 }
3928 if (!function.IsNull()) return function.ptr();
3929
3930 // Otherwise create it & add it.
3931 function = CreateInvocationDispatcher(target_name, args_desc, kind);
3932 AddInvocationDispatcher(target_name, args_desc, function);
3933 return function.ptr();
3934}
3935
3936FunctionPtr Class::CreateInvocationDispatcher(
3937 const String& target_name,
3938 const Array& args_desc,
3939 UntaggedFunction::Kind kind) const {
3940 Thread* thread = Thread::Current();
3941 Zone* zone = thread->zone();
3943 Function& invocation = Function::Handle(
3944 zone, Function::New(
3945 signature,
3946 String::Handle(zone, Symbols::New(thread, target_name)), kind,
3947 false, // Not static.
3948 false, // Not const.
3949 false, // Not abstract.
3950 false, // Not external.
3951 false, // Not native.
3953 ArgumentsDescriptor desc(args_desc);
3954 const intptr_t type_args_len = desc.TypeArgsLen();
3955 if (type_args_len > 0) {
3956 // Make dispatcher function generic, since type arguments are passed.
3957 const auto& type_parameters =
3958 TypeParameters::Handle(zone, TypeParameters::New(type_args_len));
3959 // Allow any type, as any type checking is compiled into the dispatcher.
3960 auto& bound = Type::Handle(
3961 zone, IsolateGroup::Current()->object_store()->nullable_object_type());
3962 for (intptr_t i = 0; i < type_args_len; i++) {
3963 // The name of the type parameter does not matter, as a type error using
3964 // it should never be thrown.
3965 type_parameters.SetNameAt(i, Symbols::OptimizedOut());
3966 type_parameters.SetBoundAt(i, bound);
3967 // Type arguments will always be provided, so the default is not used.
3968 type_parameters.SetDefaultAt(i, Object::dynamic_type());
3969 }
3971 }
3972
3973 signature.set_num_fixed_parameters(desc.PositionalCount());
3974 signature.SetNumOptionalParameters(desc.NamedCount(),
3975 false); // Not positional.
3976 signature.set_parameter_types(
3977 Array::Handle(zone, Array::New(desc.Count(), Heap::kOld)));
3978 invocation.CreateNameArray();
3980 // Receiver.
3981 signature.SetParameterTypeAt(0, Object::dynamic_type());
3982 invocation.SetParameterNameAt(0, Symbols::This());
3983 // Remaining positional parameters.
3984 for (intptr_t i = 1; i < desc.PositionalCount(); i++) {
3985 signature.SetParameterTypeAt(i, Object::dynamic_type());
3986 char name[64];
3987 Utils::SNPrint(name, 64, ":p%" Pd, i);
3988 invocation.SetParameterNameAt(
3989 i, String::Handle(zone, Symbols::New(thread, name)));
3990 }
3991
3992 // Named parameters.
3993 for (intptr_t i = 0; i < desc.NamedCount(); i++) {
3994 const intptr_t param_index = desc.PositionAt(i);
3995 const auto& param_name = String::Handle(zone, desc.NameAt(i));
3996 signature.SetParameterTypeAt(param_index, Object::dynamic_type());
3997 signature.SetParameterNameAt(param_index, param_name);
3998 }
3999 signature.FinalizeNameArray();
4000 signature.set_result_type(Object::dynamic_type());
4001 invocation.set_is_debuggable(false);
4002 invocation.set_is_visible(false);
4003 invocation.set_is_reflectable(false);
4004 invocation.set_saved_args_desc(args_desc);
4005
4006 signature ^= ClassFinalizer::FinalizeType(signature);
4007 invocation.SetSignature(signature);
4008
4009 return invocation.ptr();
4010}
4011
4012// Method extractors are used to create implicit closures from methods.
4013// When an expression obj.M is evaluated for the first time and receiver obj
4014// does not have a getter called M but has a method called M then an extractor
4015// is created and injected as a getter (under the name get:M) into the class
4016// owning method M.
4017FunctionPtr Function::CreateMethodExtractor(const String& getter_name) const {
4018 Thread* thread = Thread::Current();
4019 Zone* zone = thread->zone();
4020 ASSERT(Field::IsGetterName(getter_name));
4021 const Function& closure_function =
4023
4024 const Class& owner = Class::Handle(zone, closure_function.Owner());
4026 const Function& extractor = Function::Handle(
4027 zone,
4028 Function::New(signature,
4029 String::Handle(zone, Symbols::New(thread, getter_name)),
4030 UntaggedFunction::kMethodExtractor,
4031 false, // Not static.
4032 false, // Not const.
4033 is_abstract(),
4034 false, // Not external.
4035 false, // Not native.
4036 owner, TokenPosition::kMethodExtractor));
4037
4038 // Initialize signature: receiver is a single fixed parameter.
4039 const intptr_t kNumParameters = 1;
4040 signature.set_num_fixed_parameters(kNumParameters);
4041 signature.SetNumOptionalParameters(0, false);
4042 signature.set_parameter_types(Object::synthetic_getter_parameter_types());
4043#if !defined(DART_PRECOMPILED_RUNTIME)
4044 extractor.set_positional_parameter_names(
4045 Object::synthetic_getter_parameter_names());
4046#endif
4047 signature.set_result_type(Object::dynamic_type());
4048
4049 extractor.InheritKernelOffsetFrom(*this);
4050
4051 extractor.set_extracted_method_closure(closure_function);
4052 extractor.set_is_debuggable(false);
4053 extractor.set_is_visible(false);
4054
4055 signature ^= ClassFinalizer::FinalizeType(signature);
4056 extractor.SetSignature(signature);
4057
4058 owner.AddFunction(extractor);
4059
4060 return extractor.ptr();
4061}
4062
4063FunctionPtr Function::GetMethodExtractor(const String& getter_name) const {
4064 ASSERT(Field::IsGetterName(getter_name));
4065 const Function& closure_function =
4067 const Class& owner = Class::Handle(closure_function.Owner());
4068 Thread* thread = Thread::Current();
4069 if (owner.EnsureIsFinalized(thread) != Error::null()) {
4070 return Function::null();
4071 }
4072 IsolateGroup* group = thread->isolate_group();
4074 Resolver::ResolveDynamicFunction(thread->zone(), owner, getter_name));
4075 if (result.IsNull()) {
4076 SafepointWriteRwLocker ml(thread, group->program_lock());
4077 result = owner.LookupDynamicFunctionUnsafe(getter_name);
4078 if (result.IsNull()) {
4079 result = CreateMethodExtractor(getter_name);
4080 }
4081 }
4082 ASSERT(result.kind() == UntaggedFunction::kMethodExtractor);
4083 return result.ptr();
4084}
4085
4086// Record field getters are used to access fields of arbitrary
4087// record instances dynamically.
4088FunctionPtr Class::CreateRecordFieldGetter(const String& getter_name) const {
4089 Thread* thread = Thread::Current();
4090 Zone* zone = thread->zone();
4092 ASSERT(Field::IsGetterName(getter_name));
4094 const Function& getter = Function::Handle(
4095 zone,
4096 Function::New(signature,
4097 String::Handle(zone, Symbols::New(thread, getter_name)),
4098 UntaggedFunction::kRecordFieldGetter,
4099 false, // Not static.
4100 false, // Not const.
4101 false, // Not abstract.
4102 false, // Not external.
4103 false, // Not native.
4105
4106 // Initialize signature: receiver is a single fixed parameter.
4107 const intptr_t kNumParameters = 1;
4108 signature.set_num_fixed_parameters(kNumParameters);
4109 signature.SetNumOptionalParameters(0, false);
4110 signature.set_parameter_types(Object::synthetic_getter_parameter_types());
4111#if !defined(DART_PRECOMPILED_RUNTIME)
4112 getter.set_positional_parameter_names(
4113 Object::synthetic_getter_parameter_names());
4114#endif
4115 signature.set_result_type(Object::dynamic_type());
4116
4117 getter.set_is_debuggable(false);
4118 getter.set_is_visible(false);
4119
4120 signature ^= ClassFinalizer::FinalizeType(signature);
4121 getter.SetSignature(signature);
4122
4123 AddFunction(getter);
4124
4125 return getter.ptr();
4126}
4127
4128FunctionPtr Class::GetRecordFieldGetter(const String& getter_name) const {
4130 ASSERT(Field::IsGetterName(getter_name));
4131 Thread* thread = Thread::Current();
4132 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
4133 Function& result = Function::Handle(thread->zone(),
4134 LookupDynamicFunctionUnsafe(getter_name));
4135 if (result.IsNull()) {
4136 result = CreateRecordFieldGetter(getter_name);
4137 }
4138 ASSERT(result.kind() == UntaggedFunction::kRecordFieldGetter);
4139 return result.ptr();
4140}
4141
4143 const Object& metadata_obj,
4144 const String& pragma_name,
4145 bool multiple,
4146 Object* options) {
4147 auto IG = T->isolate_group();
4148 auto Z = T->zone();
4149
4150 // If there is a compile-time error while evaluating the metadata, we will
4151 // simply claim there was no @pragma annotation.
4152 if (metadata_obj.IsNull() || metadata_obj.IsLanguageError()) {
4153 return false;
4154 }
4155 ASSERT(metadata_obj.IsArray());
4156
4157 auto& metadata = Array::Cast(metadata_obj);
4158 auto& pragma_class = Class::Handle(Z, IG->object_store()->pragma_class());
4159 if (pragma_class.IsNull()) {
4160 // Precompiler may drop pragma class.
4161 return false;
4162 }
4163 auto& pragma_name_field =
4164 Field::Handle(Z, pragma_class.LookupField(Symbols::name()));
4165 auto& pragma_options_field =
4166 Field::Handle(Z, pragma_class.LookupField(Symbols::options()));
4167
4168 auto& pragma = Object::Handle(Z);
4169 bool found = false;
4170 auto& options_value = Object::Handle(Z);
4171 auto& results = GrowableObjectArray::Handle(Z);
4172 if (multiple) {
4173 ASSERT(options != nullptr);
4174 results ^= GrowableObjectArray::New(1);
4175 }
4176 for (intptr_t i = 0; i < metadata.Length(); ++i) {
4177 pragma = metadata.At(i);
4178 if (pragma.clazz() != pragma_class.ptr() ||
4179 Instance::Cast(pragma).GetField(pragma_name_field) !=
4180 pragma_name.ptr()) {
4181 continue;
4182 }
4183 options_value = Instance::Cast(pragma).GetField(pragma_options_field);
4184 found = true;
4185 if (multiple) {
4186 results.Add(options_value);
4187 continue;
4188 }
4189 if (options != nullptr) {
4190 *options = options_value.ptr();
4191 }
4192 return true;
4193 }
4194
4195 if (found && options != nullptr) {
4196 *options = results.ptr();
4197 }
4198 return false;
4199}
4200
4202 bool only_core,
4203 const Object& obj,
4204 const String& pragma_name,
4205 bool multiple,
4206 Object* options) {
4207 auto Z = T->zone();
4208 auto& lib = Library::Handle(Z);
4209
4210 if (obj.IsLibrary()) {
4211 lib = Library::Cast(obj).ptr();
4212 } else if (obj.IsClass()) {
4213 auto& klass = Class::Cast(obj);
4214 if (!klass.has_pragma()) return false;
4215 lib = klass.library();
4216 } else if (obj.IsFunction()) {
4217 auto& function = Function::Cast(obj);
4218 if (!function.has_pragma()) return false;
4219 lib = Class::Handle(Z, function.Owner()).library();
4220 } else if (obj.IsField()) {
4221 auto& field = Field::Cast(obj);
4222 if (!field.has_pragma()) return false;
4223 lib = Class::Handle(Z, field.Owner()).library();
4224 } else {
4225 UNREACHABLE();
4226 }
4227
4228 if (only_core && !lib.IsAnyCoreLibrary()) {
4229 return false;
4230 }
4231
4232 Object& metadata_obj = Object::Handle(Z, lib.GetMetadata(obj));
4233 if (metadata_obj.IsUnwindError()) {
4234 Report::LongJump(UnwindError::Cast(metadata_obj));
4235 }
4236
4237 return FindPragmaInMetadata(T, metadata_obj, pragma_name, multiple, options);
4238}
4239
4243
4245 return String::StartsWith(name, Symbols::DynamicPrefix().ptr());
4246}
4247
4249 const intptr_t kDynamicPrefixLength = 4; // "dyn:"
4250 ASSERT(Symbols::DynamicPrefix().Length() == kDynamicPrefixLength);
4251 return Symbols::New(Thread::Current(), name, kDynamicPrefixLength,
4252 name.Length() - kDynamicPrefixLength);
4253}
4254
4256 return Symbols::FromConcat(Thread::Current(), Symbols::DynamicPrefix(), name);
4257}
4258
4259#if !defined(DART_PRECOMPILED_RUNTIME)
4261 const String& mangled_name) const {
4262 Thread* thread = Thread::Current();
4263 Zone* zone = thread->zone();
4264
4265 Function& forwarder = Function::Handle(zone);
4266 forwarder ^= Object::Clone(*this, Heap::kOld);
4267
4269
4270 forwarder.set_name(mangled_name);
4271 forwarder.set_is_native(false);
4272 // TODO(dartbug.com/37737): Currently, we intentionally keep the recognized
4273 // kind when creating the dynamic invocation forwarder.
4274 forwarder.set_kind(UntaggedFunction::kDynamicInvocationForwarder);
4276 forwarder.set_is_debuggable(false);
4277
4278 // TODO(vegorov) for error reporting reasons it is better to make this
4279 // function visible and instead use a TailCall to invoke the target.
4280 // Our TailCall instruction is not ready for such usage though it
4281 // blocks inlining and can't take Function-s only Code objects.
4282 forwarder.set_is_visible(false);
4283
4284 forwarder.ClearICDataArray();
4285 forwarder.ClearCode();
4286 forwarder.set_usage_counter(0);
4287 forwarder.set_deoptimization_counter(0);
4288 forwarder.set_optimized_instruction_count(0);
4289 forwarder.set_inlining_depth(0);
4290 forwarder.set_optimized_call_site_count(0);
4291
4292 forwarder.InheritKernelOffsetFrom(*this);
4293 forwarder.SetForwardingTarget(*this);
4294
4295 return forwarder.ptr();
4296}
4297
4299 const String& mangled_name,
4300 bool allow_add /*=true*/) const {
4302 auto thread = Thread::Current();
4303 auto zone = thread->zone();
4304 const Class& owner = Class::Handle(zone, Owner());
4306
4307 // First we'll try to find it without using locks.
4309 mangled_name, Array::null_array(),
4310 UntaggedFunction::kDynamicInvocationForwarder,
4311 /*create_if_absent=*/false);
4312 if (!result.IsNull()) return result.ptr();
4313
4314 const bool needs_dyn_forwarder =
4315 kernel::NeedsDynamicInvocationForwarder(*this);
4316 if (!needs_dyn_forwarder) {
4317 return ptr();
4318 }
4319
4320 if (!allow_add) {
4321 return Function::null();
4322 }
4323
4324 // If we failed to find it and possibly need to create it, use a write lock.
4325 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
4326
4327 // Try to find it again & return if it was added in the mean time.
4329 mangled_name, Array::null_array(),
4330 UntaggedFunction::kDynamicInvocationForwarder,
4331 /*create_if_absent=*/false);
4332 if (!result.IsNull()) return result.ptr();
4333
4334 // Otherwise create it & add it.
4336 owner.AddInvocationDispatcher(mangled_name, Array::null_array(), result);
4337 return result.ptr();
4338}
4339
4340#endif
4341
4343 AbstractType* subtype,
4344 AbstractType* supertype,
4345 const TypeArguments& instantiator_type_args,
4346 const TypeArguments& function_type_args) {
4347 if (!subtype->IsInstantiated()) {
4348 *subtype = subtype->InstantiateFrom(
4349 instantiator_type_args, function_type_args, kAllFree, Heap::kOld);
4350 }
4351 if (!supertype->IsInstantiated()) {
4352 *supertype = supertype->InstantiateFrom(
4353 instantiator_type_args, function_type_args, kAllFree, Heap::kOld);
4354 }
4355 return subtype->IsSubtypeOf(*supertype, Heap::kOld);
4356}
4357
4358ArrayPtr Class::invocation_dispatcher_cache() const {
4359 return untag()->invocation_dispatcher_cache<std::memory_order_acquire>();
4360}
4361
4362void Class::Finalize() const {
4363 auto thread = Thread::Current();
4364 auto isolate_group = thread->isolate_group();
4366 ASSERT(!is_finalized());
4367 // Prefinalized classes have a VM internal representation and no Dart fields.
4368 // Their instance size is precomputed and field offsets are known.
4369 if (!is_prefinalized()) {
4370 // Compute offsets of instance fields, instance size and bitmap for unboxed
4371 // fields.
4372 const auto host_bitmap = CalculateFieldOffsets();
4373 if (ptr() == isolate_group->class_table()->At(id())) {
4374 if (!ClassTable::IsTopLevelCid(id())) {
4375 // Unless class is top-level, which don't get instantiated,
4376 // sets the new size in the class table.
4377 isolate_group->class_table()->UpdateClassSize(id(), ptr());
4378 isolate_group->class_table()->SetUnboxedFieldsMapAt(id(), host_bitmap);
4379 }
4380 }
4381 }
4382
4383#if defined(DEBUG)
4384 if (is_const()) {
4385 // Double-check that all fields are final (CFE should guarantee that if it
4386 // marks the class as having a constant constructor).
4387 auto Z = thread->zone();
4388 const auto& super_class = Class::Handle(Z, SuperClass());
4389 ASSERT(super_class.IsNull() || super_class.is_const());
4390 const auto& fields = Array::Handle(Z, this->fields());
4391 auto& field = Field::Handle(Z);
4392 for (intptr_t i = 0; i < fields.Length(); ++i) {
4393 field ^= fields.At(i);
4394 ASSERT(field.is_static() || field.is_final());
4395 }
4396 }
4397#endif
4398
4400}
4401
4402#if defined(DEBUG)
4403static bool IsMutatorOrAtDeoptSafepoint() {
4404 Thread* thread = Thread::Current();
4405 return thread->IsDartMutatorThread() || thread->OwnsDeoptSafepoint();
4406}
4407#endif
4408
4409#if !defined(DART_PRECOMPILED_RUNTIME)
4410
4412 public:
4413 explicit CHACodeArray(const Class& cls)
4414 : WeakCodeReferences(WeakArray::Handle(cls.dependent_code())),
4415 cls_(cls) {}
4416
4417 virtual void UpdateArrayTo(const WeakArray& value) {
4418 // TODO(fschneider): Fails for classes in the VM isolate.
4420 }
4421
4422 virtual void ReportDeoptimization(const Code& code) {
4423 if (FLAG_trace_deoptimization || FLAG_trace_deoptimization_verbose) {
4424 Function& function = Function::Handle(code.function());
4425 THR_Print("Deoptimizing %s because CHA optimized (%s).\n",
4426 function.ToFullyQualifiedCString(), cls_.ToCString());
4427 }
4428 }
4429
4430 virtual void ReportSwitchingCode(const Code& code) {
4431 if (FLAG_trace_deoptimization || FLAG_trace_deoptimization_verbose) {
4432 Function& function = Function::Handle(code.function());
4433 THR_Print(
4434 "Switching %s to unoptimized code because CHA invalid"
4435 " (%s)\n",
4436 function.ToFullyQualifiedCString(), cls_.ToCString());
4437 }
4438 }
4439
4440 private:
4441 const Class& cls_;
4443};
4444
4445void Class::RegisterCHACode(const Code& code) {
4446 if (FLAG_trace_cha) {
4447 THR_Print("RegisterCHACode '%s' depends on class '%s'\n",
4448 Function::Handle(code.function()).ToQualifiedCString(),
4449 ToCString());
4450 }
4451 DEBUG_ASSERT(IsMutatorOrAtDeoptSafepoint());
4452 ASSERT(code.is_optimized());
4453 CHACodeArray a(*this);
4454 a.Register(code);
4455}
4456
4459 IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
4460 CHACodeArray a(*this);
4461 if (FLAG_trace_deoptimization && a.HasCodes()) {
4462 if (subclass.IsNull()) {
4463 THR_Print("Deopt for CHA (all)\n");
4464 } else {
4465 THR_Print("Deopt for CHA (new subclass %s)\n", subclass.ToCString());
4466 }
4467 }
4468 a.DisableCode(/*are_mutators_stopped=*/false);
4469}
4470
4474
4475WeakArrayPtr Class::dependent_code() const {
4477 IsolateGroup::Current()->program_lock()->IsCurrentThreadReader());
4478 return untag()->dependent_code();
4479}
4480
4481void Class::set_dependent_code(const WeakArray& array) const {
4483 IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
4484 untag()->set_dependent_code(array.ptr());
4485}
4486
4487#endif // !defined(DART_PRECOMPILED_RUNTIME)
4488
4489bool Class::TraceAllocation(IsolateGroup* isolate_group) const {
4490#ifndef PRODUCT
4491 auto class_table = isolate_group->class_table();
4492 return class_table->ShouldTraceAllocationFor(id());
4493#else
4494 return false;
4495#endif
4496}
4497
4498void Class::SetTraceAllocation(bool trace_allocation) const {
4499#ifndef PRODUCT
4500 auto isolate_group = IsolateGroup::Current();
4501 const bool changed = trace_allocation != this->TraceAllocation(isolate_group);
4502 if (changed) {
4503 auto class_table = isolate_group->class_table();
4504 class_table->SetTraceAllocationFor(id(), trace_allocation);
4505#ifdef TARGET_ARCH_IA32
4507#endif
4508 }
4509#else
4510 UNREACHABLE();
4511#endif
4512}
4513
4514// Conventions:
4515// * For throwing a NSM in a library or top-level class (i.e., level is
4516// kTopLevel), if a method was found but was incompatible, we pass the
4517// signature of the found method as a string, otherwise the null instance.
4518// * Otherwise, for throwing a NSM in a class klass we use its runtime type as
4519// receiver, i.e., klass.RareType().
4520static ObjectPtr ThrowNoSuchMethod(const Instance& receiver,
4521 const String& function_name,
4522 const Array& arguments,
4523 const Array& argument_names,
4524 const InvocationMirror::Level level,
4525 const InvocationMirror::Kind kind) {
4526 const Smi& invocation_type =
4528
4529 ASSERT(!receiver.IsNull() || level == InvocationMirror::Level::kTopLevel);
4530 ASSERT(level != InvocationMirror::Level::kTopLevel || receiver.IsString());
4531 const Array& args = Array::Handle(Array::New(7));
4532 args.SetAt(0, receiver);
4533 args.SetAt(1, function_name);
4534 args.SetAt(2, invocation_type);
4535 args.SetAt(3, Object::smi_zero()); // Type arguments length.
4536 args.SetAt(4, Object::null_type_arguments());
4537 args.SetAt(5, arguments);
4538 args.SetAt(6, argument_names);
4539
4540 const Library& libcore = Library::Handle(Library::CoreLibrary());
4541 const Class& cls =
4542 Class::Handle(libcore.LookupClass(Symbols::NoSuchMethodError()));
4543 ASSERT(!cls.IsNull());
4544 const auto& error = cls.EnsureIsFinalized(Thread::Current());
4545 ASSERT(error == Error::null());
4546 const Function& throwNew =
4547 Function::Handle(cls.LookupFunctionAllowPrivate(Symbols::ThrowNew()));
4548 return DartEntry::InvokeFunction(throwNew, args);
4549}
4550
4552 const Instance& src_value,
4553 const AbstractType& dst_type,
4554 const String& dst_name) {
4555 const Array& args = Array::Handle(Array::New(4));
4556 const Smi& pos = Smi::Handle(Smi::New(token_pos.Serialize()));
4557 args.SetAt(0, pos);
4558 args.SetAt(1, src_value);
4559 args.SetAt(2, dst_type);
4560 args.SetAt(3, dst_name);
4561
4562 const Library& libcore = Library::Handle(Library::CoreLibrary());
4563 const Class& cls =
4564 Class::Handle(libcore.LookupClassAllowPrivate(Symbols::TypeError()));
4565 const auto& error = cls.EnsureIsFinalized(Thread::Current());
4566 ASSERT(error == Error::null());
4567 const Function& throwNew =
4568 Function::Handle(cls.LookupFunctionAllowPrivate(Symbols::ThrowNew()));
4569 return DartEntry::InvokeFunction(throwNew, args);
4570}
4571
4573 bool throw_nsm_if_absent,
4574 bool respect_reflectable,
4575 bool check_is_entrypoint) const {
4576 Thread* thread = Thread::Current();
4577 Zone* zone = thread->zone();
4578
4580
4581 // Note static fields do not have implicit getters.
4582 const Field& field = Field::Handle(zone, LookupStaticField(getter_name));
4583
4584 if (!field.IsNull() && check_is_entrypoint) {
4586 }
4587
4588 if (field.IsNull() || field.IsUninitialized()) {
4589 const String& internal_getter_name =
4590 String::Handle(zone, Field::GetterName(getter_name));
4591 Function& getter =
4592 Function::Handle(zone, LookupStaticFunction(internal_getter_name));
4593
4594 if (field.IsNull() && !getter.IsNull() && check_is_entrypoint) {
4596 }
4597
4598 if (getter.IsNull() || (respect_reflectable && !getter.is_reflectable())) {
4599 if (getter.IsNull()) {
4600 getter = LookupStaticFunction(getter_name);
4601 if (!getter.IsNull()) {
4602 if (check_is_entrypoint) {
4604 }
4605 if (getter.SafeToClosurize()) {
4606 // Looking for a getter but found a regular method: closurize it.
4607 const Function& closure_function =
4609 return closure_function.ImplicitStaticClosure();
4610 }
4611 }
4612 }
4613 if (throw_nsm_if_absent) {
4614 return ThrowNoSuchMethod(
4615 AbstractType::Handle(zone, RareType()), getter_name,
4616 Object::null_array(), Object::null_array(),
4618 }
4619 // Fall through case: Indicate that we didn't find any function or field
4620 // using a special null instance. This is different from a field being
4621 // null. Callers make sure that this null does not leak into Dartland.
4622 return Object::sentinel().ptr();
4623 }
4624
4625 // Invoke the getter and return the result.
4626 return DartEntry::InvokeFunction(getter, Object::empty_array());
4627 }
4628
4629 return field.StaticValue();
4630}
4631
4633 const Instance& value,
4634 bool respect_reflectable,
4635 bool check_is_entrypoint) const {
4636 Thread* thread = Thread::Current();
4637 Zone* zone = thread->zone();
4638
4640
4641 // Check for real fields and user-defined setters.
4642 const Field& field = Field::Handle(zone, LookupStaticField(setter_name));
4643 const String& internal_setter_name =
4644 String::Handle(zone, Field::SetterName(setter_name));
4645
4646 if (!field.IsNull() && check_is_entrypoint) {
4648 }
4649
4650 AbstractType& parameter_type = AbstractType::Handle(zone);
4651 if (field.IsNull()) {
4652 const Function& setter =
4653 Function::Handle(zone, LookupStaticFunction(internal_setter_name));
4654 if (!setter.IsNull() && check_is_entrypoint) {
4656 }
4657 const int kNumArgs = 1;
4658 const Array& args = Array::Handle(zone, Array::New(kNumArgs));
4659 args.SetAt(0, value);
4660 if (setter.IsNull() || (respect_reflectable && !setter.is_reflectable())) {
4662 internal_setter_name, args, Object::null_array(),
4665 }
4666 parameter_type = setter.ParameterTypeAt(0);
4667 if (!value.RuntimeTypeIsSubtypeOf(parameter_type,
4668 Object::null_type_arguments(),
4669 Object::null_type_arguments())) {
4670 const String& argument_name =
4671 String::Handle(zone, setter.ParameterNameAt(0));
4672 return ThrowTypeError(setter.token_pos(), value, parameter_type,
4673 argument_name);
4674 }
4675 // Invoke the setter and return the result.
4676 return DartEntry::InvokeFunction(setter, args);
4677 }
4678
4679 if (field.is_final() || (respect_reflectable && !field.is_reflectable())) {
4680 const int kNumArgs = 1;
4681 const Array& args = Array::Handle(zone, Array::New(kNumArgs));
4682 args.SetAt(0, value);
4684 internal_setter_name, args, Object::null_array(),
4687 }
4688
4689 parameter_type = field.type();
4690 if (!value.RuntimeTypeIsSubtypeOf(parameter_type,
4691 Object::null_type_arguments(),
4692 Object::null_type_arguments())) {
4693 const String& argument_name = String::Handle(zone, field.name());
4694 return ThrowTypeError(field.token_pos(), value, parameter_type,
4695 argument_name);
4696 }
4697 field.SetStaticValue(value);
4698 return value.ptr();
4699}
4700
4701// Creates a new array of boxed arguments suitable for invoking the callable
4702// from the original boxed arguments for a static call. Also sets the contents
4703// of the handle pointed to by [callable_args_desc_array_out] to an appropriate
4704// arguments descriptor array for the new arguments.
4705//
4706// Assumes [arg_names] are consistent with [static_args_descriptor].
4708 Zone* zone,
4709 const Instance& receiver,
4710 const Array& static_args,
4711 const Array& arg_names,
4712 const ArgumentsDescriptor& static_args_descriptor) {
4713 const intptr_t num_static_type_args = static_args_descriptor.TypeArgsLen();
4714 const intptr_t num_static_args = static_args_descriptor.Count();
4715 // Double check that the static args descriptor expects boxed arguments
4716 // and the static args descriptor is consistent with the static arguments.
4717 ASSERT_EQUAL(static_args_descriptor.Size(), num_static_args);
4718 ASSERT_EQUAL(static_args.Length(),
4719 num_static_args + (num_static_type_args > 0 ? 1 : 0));
4720 // Add an additional slot to store the callable as the receiver.
4721 const auto& callable_args =
4722 Array::Handle(zone, Array::New(static_args.Length() + 1));
4723 const intptr_t first_arg_index = static_args_descriptor.FirstArgIndex();
4724 auto& temp = Object::Handle(zone);
4725 // Copy the static args into the corresponding slots of the callable args.
4726 if (num_static_type_args > 0) {
4727 temp = static_args.At(0);
4728 callable_args.SetAt(0, temp);
4729 }
4730 for (intptr_t i = first_arg_index; i < static_args.Length(); i++) {
4731 temp = static_args.At(i);
4732 callable_args.SetAt(i + 1, temp);
4733 }
4734 // Set the receiver slot in the callable args.
4735 callable_args.SetAt(first_arg_index, receiver);
4736 return callable_args.ptr();
4737}
4738
4740 const Array& args,
4741 const Array& arg_names,
4742 bool respect_reflectable,
4743 bool check_is_entrypoint) const {
4744 Thread* thread = Thread::Current();
4745 Zone* zone = thread->zone();
4747
4748 // We don't pass any explicit type arguments, which will be understood as
4749 // using dynamic for any function type arguments by lower layers.
4750 const int kTypeArgsLen = 0;
4751 const Array& args_descriptor_array = Array::Handle(
4752 zone, ArgumentsDescriptor::NewBoxed(kTypeArgsLen, args.Length(),
4753 arg_names, Heap::kNew));
4754 ArgumentsDescriptor args_descriptor(args_descriptor_array);
4755
4758
4759 if (!function.IsNull() && check_is_entrypoint) {
4760 CHECK_ERROR(function.VerifyCallEntryPoint());
4761 }
4762
4763 if (function.IsNull()) {
4764 // Didn't find a method: try to find a getter and invoke call on its result.
4765 const Object& getter_result = Object::Handle(
4766 zone, InvokeGetter(function_name, false, respect_reflectable,
4767 check_is_entrypoint));
4768 if (getter_result.ptr() != Object::sentinel().ptr()) {
4769 if (check_is_entrypoint) {
4771 }
4772 const auto& call_args_descriptor_array = Array::Handle(
4773 zone, ArgumentsDescriptor::NewBoxed(args_descriptor.TypeArgsLen(),
4774 args_descriptor.Count() + 1,
4775 arg_names, Heap::kNew));
4776 const auto& call_args = Array::Handle(
4777 zone,
4778 CreateCallableArgumentsFromStatic(zone, Instance::Cast(getter_result),
4779 args, arg_names, args_descriptor));
4780 return DartEntry::InvokeClosure(thread, call_args,
4781 call_args_descriptor_array);
4782 }
4783 }
4784
4785 if (function.IsNull() ||
4786 !function.AreValidArguments(args_descriptor, nullptr) ||
4787 (respect_reflectable && !function.is_reflectable())) {
4788 return ThrowNoSuchMethod(
4789 AbstractType::Handle(zone, RareType()), function_name, args, arg_names,
4791 }
4792 // This is a static function, so we pass an empty instantiator tav.
4793 ASSERT(function.is_static());
4794 ObjectPtr type_error = function.DoArgumentTypesMatch(
4795 args, args_descriptor, Object::empty_type_arguments());
4796 if (type_error != Error::null()) {
4797 return type_error;
4798 }
4799 return DartEntry::InvokeFunction(function, args, args_descriptor_array);
4800}
4801
4802#if !defined(DART_PRECOMPILED_RUNTIME)
4803
4805 Zone* zone,
4806 const ExternalTypedData& kernel_buffer,
4807 const String& library_url,
4808 const String& klass) {
4809 std::unique_ptr<kernel::Program> kernel_pgm =
4810 kernel::Program::ReadFromTypedData(kernel_buffer);
4811
4812 if (kernel_pgm == nullptr) {
4813 return ApiError::New(String::Handle(
4814 zone, String::New("Kernel isolate returned ill-formed kernel.")));
4815 }
4816
4817 auto& result = Object::Handle(zone);
4818 {
4819 kernel::KernelLoader loader(kernel_pgm.get(),
4820 /*uri_to_source_table=*/nullptr);
4821 result = loader.LoadExpressionEvaluationFunction(library_url, klass);
4822 kernel_pgm.reset();
4823 }
4824 if (result.IsError()) return result.ptr();
4825 return Function::Cast(result).ptr();
4826}
4827
4829 Zone* zone,
4830 const Function& eval_function) {
4831 auto parsed_function = new ParsedFunction(
4832 thread, Function::ZoneHandle(zone, eval_function.ptr()));
4833 parsed_function->EnsureKernelScopes();
4834 return parsed_function->is_receiver_used();
4835}
4836
4838 Zone* zone,
4839 const Function& eval_function,
4840 const Array& type_definitions,
4841 const Array& arguments,
4842 const TypeArguments& type_arguments) {
4843 // type_arguments is null if all type arguments are dynamic.
4844 if (type_definitions.Length() == 0 || type_arguments.IsNull()) {
4845 return DartEntry::InvokeFunction(eval_function, arguments);
4846 }
4847
4848 intptr_t num_type_args = type_arguments.Length();
4849 const auto& real_arguments =
4850 Array::Handle(zone, Array::New(arguments.Length() + 1));
4851 real_arguments.SetAt(0, type_arguments);
4852 Object& arg = Object::Handle(zone);
4853 for (intptr_t i = 0; i < arguments.Length(); ++i) {
4854 arg = arguments.At(i);
4855 real_arguments.SetAt(i + 1, arg);
4856 }
4857
4858 const Array& args_desc =
4860 num_type_args, arguments.Length(), Heap::kNew));
4861 return DartEntry::InvokeFunction(eval_function, real_arguments, args_desc);
4862}
4863
4864#endif // !defined(DART_PRECOMPILED_RUNTIME)
4865
4867 const ExternalTypedData& kernel_buffer,
4868 const Array& type_definitions,
4869 const Array& arguments,
4870 const TypeArguments& type_arguments) const {
4871 const auto& klass = Class::Handle(toplevel_class());
4872 return klass.EvaluateCompiledExpression(kernel_buffer, type_definitions,
4873 arguments, type_arguments);
4874}
4875
4877 const ExternalTypedData& kernel_buffer,
4878 const Array& type_definitions,
4879 const Array& arguments,
4880 const TypeArguments& type_arguments) const {
4881 auto thread = Thread::Current();
4882 const auto& library = Library::Handle(thread->zone(), this->library());
4884 thread, Instance::null_object(), library, *this, kernel_buffer,
4885 type_definitions, arguments, type_arguments);
4886}
4887
4889 const Class& klass,
4890 const ExternalTypedData& kernel_buffer,
4891 const Array& type_definitions,
4892 const Array& arguments,
4893 const TypeArguments& type_arguments) const {
4894 auto thread = Thread::Current();
4895 auto zone = thread->zone();
4896 const auto& library = Library::Handle(zone, klass.library());
4897 return Instance::EvaluateCompiledExpression(thread, *this, library, klass,
4898 kernel_buffer, type_definitions,
4899 arguments, type_arguments);
4900}
4901
4903 Thread* thread,
4904 const Object& receiver,
4905 const Library& library,
4906 const Class& klass,
4907 const ExternalTypedData& kernel_buffer,
4908 const Array& type_definitions,
4909 const Array& arguments,
4910 const TypeArguments& type_arguments) {
4911 auto zone = Thread::Current()->zone();
4912#if defined(DART_PRECOMPILED_RUNTIME)
4913 const auto& error_str = String::Handle(
4914 zone,
4915 String::New("Expression evaluation not available in precompiled mode."));
4916 return ApiError::New(error_str);
4917#else
4918 if (IsInternalOnlyClassId(klass.id()) || (klass.id() == kTypeArgumentsCid)) {
4919 const auto& exception = Instance::Handle(
4920 zone, String::New("Expressions can be evaluated only with regular Dart "
4921 "instances/classes."));
4922 return UnhandledException::New(exception, StackTrace::null_instance());
4923 }
4924
4925 const auto& url = String::Handle(zone, library.url());
4926 const auto& klass_name = klass.IsTopLevel()
4927 ? String::null_string()
4928 : String::Handle(zone, klass.UserVisibleName());
4929
4930 const auto& result = Object::Handle(
4931 zone,
4932 LoadExpressionEvaluationFunction(zone, kernel_buffer, url, klass_name));
4933 if (result.IsError()) return result.ptr();
4934
4935 const auto& eval_function = Function::Cast(result);
4936
4937#if defined(DEBUG)
4938 for (intptr_t i = 0; i < arguments.Length(); ++i) {
4939 ASSERT(arguments.At(i) != Object::optimized_out().ptr());
4940 }
4941#endif // defined(DEBUG)
4942
4943 auto& all_arguments = Array::Handle(zone, arguments.ptr());
4944 if (!eval_function.is_static()) {
4945 // `this` may be optimized out (e.g. not accessible from breakpoint due to
4946 // not being captured by closure). We allow this as long as the evaluation
4947 // function doesn't actually need `this`.
4948 if (receiver.IsNull() || receiver.ptr() == Object::optimized_out().ptr()) {
4949 if (EvaluationFunctionNeedsReceiver(thread, zone, eval_function)) {
4950 return Object::optimized_out().ptr();
4951 }
4952 }
4953
4954 all_arguments = Array::New(1 + arguments.Length());
4955 auto& param = PassiveObject::Handle();
4956 all_arguments.SetAt(0, receiver);
4957 for (intptr_t i = 0; i < arguments.Length(); i++) {
4958 param = arguments.At(i);
4959 all_arguments.SetAt(i + 1, param);
4960 }
4961 }
4962
4963 return EvaluateCompiledExpressionHelper(zone, eval_function, type_definitions,
4964 all_arguments, type_arguments);
4965#endif // !defined(DART_PRECOMPILED_RUNTIME)
4966}
4967
4969 if (!is_declaration_loaded()) {
4970#if defined(DART_PRECOMPILED_RUNTIME)
4971 UNREACHABLE();
4972#else
4973 FATAL("Unable to use class %s which is not loaded yet.", ToCString());
4974#endif
4975 }
4976}
4977
4978// Ensure that top level parsing of the class has been done.
4979ErrorPtr Class::EnsureIsFinalized(Thread* thread) const {
4980 ASSERT(!IsNull());
4981 if (is_finalized()) {
4982 return Error::null();
4983 }
4984#if defined(DART_PRECOMPILED_RUNTIME)
4985 UNREACHABLE();
4986 return Error::null();
4987#else
4988 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
4989 if (is_finalized()) {
4990 return Error::null();
4991 }
4992 LeaveCompilerScope ncs(thread);
4993 ASSERT(thread != nullptr);
4994 const Error& error =
4996 if (!error.IsNull()) {
4997 ASSERT(thread == Thread::Current());
4998 if (thread->long_jump_base() != nullptr) {
5000 UNREACHABLE();
5001 }
5002 }
5003 return error.ptr();
5004#endif // defined(DART_PRECOMPILED_RUNTIME)
5005}
5006
5007// Ensure that code outdated by finalized class is cleaned up, new instance of
5008// this class is ready to be allocated.
5010 ASSERT(!IsNull());
5011 if (is_allocate_finalized()) {
5012 return Error::null();
5013 }
5014 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
5015 if (is_allocate_finalized()) {
5016 return Error::null();
5017 }
5018 ASSERT(thread != nullptr);
5019 Error& error = Error::Handle(thread->zone(), EnsureIsFinalized(thread));
5020 if (!error.IsNull()) {
5021 ASSERT(thread == Thread::Current());
5022 if (thread->long_jump_base() != nullptr) {
5024 UNREACHABLE();
5025 }
5026 }
5027 // May be allocate-finalized recursively during EnsureIsFinalized.
5028 if (is_allocate_finalized()) {
5029 return Error::null();
5030 }
5031#if defined(DART_PRECOMPILED_RUNTIME)
5032 UNREACHABLE();
5033#else
5035#endif // defined(DART_PRECOMPILED_RUNTIME)
5036 return error.ptr();
5037}
5038
5039void Class::SetFields(const Array& value) const {
5040 ASSERT(!value.IsNull());
5041#if defined(DEBUG)
5042 Thread* thread = Thread::Current();
5044 // Verify that all the fields in the array have this class as owner.
5045 Field& field = Field::Handle();
5046 intptr_t len = value.Length();
5047 for (intptr_t i = 0; i < len; i++) {
5048 field ^= value.At(i);
5049 ASSERT(field.IsOriginal());
5050 ASSERT(field.Owner() == ptr());
5051 }
5052#endif
5053 // The value of static fields is already initialized to null.
5054 set_fields(value);
5055}
5056
5057void Class::AddField(const Field& field) const {
5058#if defined(DEBUG)
5059 Thread* thread = Thread::Current();
5061#endif
5062 const Array& arr = Array::Handle(fields());
5063 const Array& new_arr = Array::Handle(Array::Grow(arr, arr.Length() + 1));
5064 new_arr.SetAt(arr.Length(), field);
5065 SetFields(new_arr);
5066}
5067
5068void Class::AddFields(const GrowableArray<const Field*>& new_fields) const {
5069#if defined(DEBUG)
5070 Thread* thread = Thread::Current();
5072#endif
5073 const intptr_t num_new_fields = new_fields.length();
5074 if (num_new_fields == 0) return;
5075 const Array& arr = Array::Handle(fields());
5076 const intptr_t num_old_fields = arr.Length();
5077 const Array& new_arr = Array::Handle(
5078 Array::Grow(arr, num_old_fields + num_new_fields, Heap::kOld));
5079 for (intptr_t i = 0; i < num_new_fields; i++) {
5080 new_arr.SetAt(i + num_old_fields, *new_fields.At(i));
5081 }
5082 SetFields(new_arr);
5083}
5084
5085intptr_t Class::FindFieldIndex(const Field& needle) const {
5086 Thread* thread = Thread::Current();
5087 if (EnsureIsFinalized(thread) != Error::null()) {
5088 return -1;
5089 }
5092 Array& fields = thread->ArrayHandle();
5093 Field& field = thread->FieldHandle();
5094 fields = this->fields();
5095 ASSERT(!fields.IsNull());
5096 for (intptr_t i = 0, n = fields.Length(); i < n; ++i) {
5097 field ^= fields.At(i);
5098 if (needle.ptr() == field.ptr()) {
5099 return i;
5100 }
5101 }
5102 // Not found.
5103 return -1;
5104}
5105
5106FieldPtr Class::FieldFromIndex(intptr_t idx) const {
5107 Array& fields = Array::Handle(this->fields());
5108 if ((idx < 0) || (idx >= fields.Length())) {
5109 return Field::null();
5110 }
5111 return Field::RawCast(fields.At(idx));
5112}
5113
5115 if (library() != Library::InternalLibrary() ||
5116 Name() != Symbols::ClassID().ptr()) {
5117 return false;
5118 }
5119
5120 auto thread = Thread::Current();
5121 auto isolate_group = thread->isolate_group();
5122 auto zone = thread->zone();
5123 Field& field = Field::Handle(zone);
5124 Smi& value = Smi::Handle(zone);
5125 String& field_name = String::Handle(zone);
5126
5127 // clang-format off
5128 static const struct {
5129 const char* const field_name;
5130 const intptr_t cid;
5131 } cid_fields[] = {
5132#define CLASS_LIST_WITH_NULL(V) \
5133 V(Null) \
5134 CLASS_LIST_NO_OBJECT(V)
5135#define ADD_SET_FIELD(clazz) \
5136 {"cid" #clazz, k##clazz##Cid},
5138#undef ADD_SET_FIELD
5139#undef CLASS_LIST_WITH_NULL
5140#define ADD_SET_FIELD(clazz) \
5141 {"cid" #clazz, kTypedData##clazz##Cid}, \
5142 {"cid" #clazz "View", kTypedData##clazz##ViewCid}, \
5143 {"cidExternal" #clazz, kExternalTypedData##clazz##Cid}, \
5144 {"cidUnmodifiable" #clazz "View", kUnmodifiableTypedData##clazz##ViewCid}, \
5145 CLASS_LIST_TYPED_DATA(ADD_SET_FIELD)
5146#undef ADD_SET_FIELD
5147 // Used in const hashing to determine whether we're dealing with a
5148 // user-defined const. See lib/_internal/vm/lib/compact_hash.dart.
5149 {"numPredefinedCids", kNumPredefinedCids},
5150 };
5151 // clang-format on
5152
5153 const AbstractType& field_type = Type::Handle(zone, Type::IntType());
5154 for (size_t i = 0; i < ARRAY_SIZE(cid_fields); i++) {
5155 field_name = Symbols::New(thread, cid_fields[i].field_name);
5156 field = Field::New(field_name, /* is_static = */ true,
5157 /* is_final = */ false,
5158 /* is_const = */ true,
5159 /* is_reflectable = */ false,
5160 /* is_late = */ false, *this, field_type,
5162 value = Smi::New(cid_fields[i].cid);
5163 isolate_group->RegisterStaticField(field, value);
5164 AddField(field);
5165 }
5166
5167 return true;
5168}
5169
5170template <class FakeInstance, class TargetFakeInstance>
5171ClassPtr Class::NewCommon(intptr_t index) {
5173 const auto& result = Class::Handle(Object::Allocate<Class>(Heap::kOld));
5174 // Here kIllegalCid means not-yet-assigned.
5175 Object::VerifyBuiltinVtable<FakeInstance>(index == kIllegalCid ? kInstanceCid
5176 : index);
5177 NOT_IN_PRECOMPILED(result.set_token_pos(TokenPosition::kNoSource));
5178 NOT_IN_PRECOMPILED(result.set_end_token_pos(TokenPosition::kNoSource));
5179 const intptr_t host_instance_size = FakeInstance::InstanceSize();
5180 const intptr_t target_instance_size = compiler::target::RoundedAllocationSize(
5181 TargetFakeInstance::InstanceSize());
5183 result.set_type_arguments_field_offset_in_words(kNoTypeArguments,
5184 RTN::Class::kNoTypeArguments);
5185 const intptr_t host_next_field_offset = FakeInstance::NextFieldOffset();
5186 const intptr_t target_next_field_offset =
5187 TargetFakeInstance::NextFieldOffset();
5188 result.set_next_field_offset(host_next_field_offset,
5190 result.set_id(index);
5191 NOT_IN_PRECOMPILED(result.set_implementor_cid(kIllegalCid));
5192 result.set_num_type_arguments_unsafe(kUnknownNumTypeArguments);
5193 result.set_num_native_fields(0);
5194 result.set_state_bits(0);
5195 NOT_IN_PRECOMPILED(result.set_kernel_offset(0));
5196 result.InitEmptyFields();
5197 return result.ptr();
5198}
5199
5200template <class FakeInstance, class TargetFakeInstance>
5201ClassPtr Class::New(intptr_t index,
5202 IsolateGroup* isolate_group,
5203 bool register_class,
5204 bool is_abstract) {
5205 Class& result =
5206 Class::Handle(NewCommon<FakeInstance, TargetFakeInstance>(index));
5207 if (is_abstract) {
5208 result.set_is_abstract();
5209 }
5210 if (register_class) {
5211 isolate_group->class_table()->Register(result);
5212 }
5213 return result.ptr();
5214}
5215
5216ClassPtr Class::New(const Library& lib,
5217 const String& name,
5218 const Script& script,
5219 TokenPosition token_pos,
5220 bool register_class) {
5221 Class& result =
5222 Class::Handle(NewCommon<Instance, RTN::Instance>(kIllegalCid));
5223 result.set_library(lib);
5224 result.set_name(name);
5225 result.set_script(script);
5226 NOT_IN_PRECOMPILED(result.set_token_pos(token_pos));
5227
5228 // The size gets initialized to 0. Once the class gets finalized the class
5229 // finalizer will set the correct size.
5230 ASSERT(!result.is_finalized() && !result.is_prefinalized());
5231 result.set_instance_size_in_words(0, 0);
5232
5233 if (register_class) {
5235 }
5236 return result.ptr();
5237}
5238
5239ClassPtr Class::NewInstanceClass() {
5240 return Class::New<Instance, RTN::Instance>(kIllegalCid,
5242}
5243
5244ClassPtr Class::NewNativeWrapper(const Library& library,
5245 const String& name,
5246 int field_count) {
5247 Class& cls = Class::Handle(library.LookupClass(name));
5248 if (cls.IsNull()) {
5249 cls = New(library, name, Script::Handle(), TokenPosition::kNoSource);
5250 cls.SetFields(Object::empty_array());
5251 cls.SetFunctions(Object::empty_array());
5252 // Set super class to Object.
5254 // Compute instance size. First word contains a pointer to a properly
5255 // sized typed array once the first native field has been set.
5256 const intptr_t host_instance_size =
5258#if defined(DART_PRECOMPILER)
5259 const intptr_t target_instance_size =
5260 compiler::target::Instance::InstanceSize() +
5261 compiler::target::kCompressedWordSize;
5262#else
5263 const intptr_t target_instance_size =
5264 sizeof(UntaggedInstance) + compiler::target::kCompressedWordSize;
5265#endif
5268 compiler::target::RoundedAllocationSize(target_instance_size));
5270 cls.set_num_native_fields(field_count);
5272 // The signature of the constructor yet to be added to this class will have
5273 // to be finalized explicitly, since the class is prematurely marked as
5274 // 'is_allocate_finalized' and finalization of member types will not occur.
5278 cls.set_is_isolate_unsendable(true);
5279 NOT_IN_PRECOMPILED(cls.set_implementor_cid(kDynamicCid));
5280 library.AddClass(cls);
5281 return cls.ptr();
5282 } else {
5283 return Class::null();
5284 }
5285}
5286
5287ClassPtr Class::NewStringClass(intptr_t class_id, IsolateGroup* isolate_group) {
5289 if (class_id == kOneByteStringCid) {
5291 target_instance_size = compiler::target::RoundedAllocationSize(
5292 RTN::OneByteString::InstanceSize());
5293 } else {
5294 ASSERT(class_id == kTwoByteStringCid);
5296 target_instance_size = compiler::target::RoundedAllocationSize(
5297 RTN::TwoByteString::InstanceSize());
5298 }
5299 Class& result = Class::Handle(New<String, RTN::String>(
5300 class_id, isolate_group, /*register_class=*/false));
5302
5303 const intptr_t host_next_field_offset = String::NextFieldOffset();
5304 const intptr_t target_next_field_offset = RTN::String::NextFieldOffset();
5305 result.set_next_field_offset(host_next_field_offset,
5307 result.set_is_prefinalized();
5308 ASSERT(IsDeeplyImmutableCid(class_id));
5309 result.set_is_deeply_immutable(true);
5310 isolate_group->class_table()->Register(result);
5311 return result.ptr();
5312}
5313
5314ClassPtr Class::NewTypedDataClass(intptr_t class_id,
5315 IsolateGroup* isolate_group) {
5316 ASSERT(IsTypedDataClassId(class_id));
5318 const intptr_t target_instance_size =
5319 compiler::target::RoundedAllocationSize(RTN::TypedData::InstanceSize());
5320 Class& result = Class::Handle(New<TypedData, RTN::TypedData>(
5321 class_id, isolate_group, /*register_class=*/false));
5323
5324 const intptr_t host_next_field_offset = TypedData::NextFieldOffset();
5325 const intptr_t target_next_field_offset = RTN::TypedData::NextFieldOffset();
5326 result.set_next_field_offset(host_next_field_offset,
5328 result.set_is_prefinalized();
5329 isolate_group->class_table()->Register(result);
5330 return result.ptr();
5331}
5332
5333ClassPtr Class::NewTypedDataViewClass(intptr_t class_id,
5334 IsolateGroup* isolate_group) {
5335 ASSERT(IsTypedDataViewClassId(class_id));
5337 const intptr_t target_instance_size = compiler::target::RoundedAllocationSize(
5338 RTN::TypedDataView::InstanceSize());
5339 Class& result = Class::Handle(New<TypedDataView, RTN::TypedDataView>(
5340 class_id, isolate_group, /*register_class=*/false));
5342
5343 const intptr_t host_next_field_offset = TypedDataView::NextFieldOffset();
5344 const intptr_t target_next_field_offset =
5345 RTN::TypedDataView::NextFieldOffset();
5346 result.set_next_field_offset(host_next_field_offset,
5348 result.set_is_prefinalized();
5349 isolate_group->class_table()->Register(result);
5350 return result.ptr();
5351}
5352
5354 IsolateGroup* isolate_group) {
5357 const intptr_t target_instance_size = compiler::target::RoundedAllocationSize(
5358 RTN::TypedDataView::InstanceSize());
5359 Class& result = Class::Handle(New<TypedDataView, RTN::TypedDataView>(
5360 class_id, isolate_group, /*register_class=*/false));
5362
5363 const intptr_t host_next_field_offset = TypedDataView::NextFieldOffset();
5364 const intptr_t target_next_field_offset =
5365 RTN::TypedDataView::NextFieldOffset();
5366 result.set_next_field_offset(host_next_field_offset,
5368 result.set_is_prefinalized();
5369 isolate_group->class_table()->Register(result);
5370 return result.ptr();
5371}
5372
5373ClassPtr Class::NewExternalTypedDataClass(intptr_t class_id,
5374 IsolateGroup* isolate_group) {
5377 const intptr_t target_instance_size = compiler::target::RoundedAllocationSize(
5378 RTN::ExternalTypedData::InstanceSize());
5379 Class& result = Class::Handle(New<ExternalTypedData, RTN::ExternalTypedData>(
5380 class_id, isolate_group, /*register_class=*/false));
5381
5382 const intptr_t host_next_field_offset = ExternalTypedData::NextFieldOffset();
5383 const intptr_t target_next_field_offset =
5384 RTN::ExternalTypedData::NextFieldOffset();
5386 result.set_next_field_offset(host_next_field_offset,
5388 result.set_is_prefinalized();
5389 isolate_group->class_table()->Register(result);
5390 return result.ptr();
5391}
5392
5393ClassPtr Class::NewPointerClass(intptr_t class_id,
5394 IsolateGroup* isolate_group) {
5395 ASSERT(IsFfiPointerClassId(class_id));
5397 intptr_t target_instance_size =
5398 compiler::target::RoundedAllocationSize(RTN::Pointer::InstanceSize());
5399 Class& result = Class::Handle(New<Pointer, RTN::Pointer>(
5400 class_id, isolate_group, /*register_class=*/false));
5402 result.set_type_arguments_field_offset(Pointer::type_arguments_offset(),
5403 RTN::Pointer::type_arguments_offset());
5404
5405 const intptr_t host_next_field_offset = Pointer::NextFieldOffset();
5406 const intptr_t target_next_field_offset = RTN::Pointer::NextFieldOffset();
5407
5408 result.set_next_field_offset(host_next_field_offset,
5410 result.set_is_prefinalized();
5411 isolate_group->class_table()->Register(result);
5412 return result.ptr();
5413}
5414
5415void Class::set_name(const String& value) const {
5416 ASSERT(untag()->name() == String::null());
5417 ASSERT(value.IsSymbol());
5418 untag()->set_name(value.ptr());
5419#if !defined(PRODUCT)
5420 if (untag()->user_name() == String::null()) {
5421 // TODO(johnmccutchan): Eagerly set user name for VM isolate classes,
5422 // lazily set user name for the other classes.
5423 // Generate and set user_name.
5424 const String& user_name = String::Handle(
5425 Symbols::New(Thread::Current(), GenerateUserVisibleName()));
5426 set_user_name(user_name);
5427 }
5428#endif // !defined(PRODUCT)
5429}
5430
5431#if !defined(PRODUCT)
5432void Class::set_user_name(const String& value) const {
5433 untag()->set_user_name(value.ptr());
5434}
5435#endif // !defined(PRODUCT)
5436
5437#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
5439 IsolateGroup* isolate_group = IsolateGroup::Current();
5440 auto class_table = isolate_group->class_table();
5441 if (class_table->UserVisibleNameFor(id()) == nullptr) {
5443 class_table->SetUserVisibleNameFor(id(), name.ToMallocCString());
5444 }
5445}
5446#endif // !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
5447
5448const char* Class::GenerateUserVisibleName() const {
5449 if (FLAG_show_internal_names) {
5450 return String::Handle(Name()).ToCString();
5451 }
5452 switch (id()) {
5453 case kFloat32x4Cid:
5454 return Symbols::Float32x4().ToCString();
5455 case kFloat64x2Cid:
5456 return Symbols::Float64x2().ToCString();
5457 case kInt32x4Cid:
5458 return Symbols::Int32x4().ToCString();
5459 case kTypedDataInt8ArrayCid:
5460 case kExternalTypedDataInt8ArrayCid:
5461 return Symbols::Int8List().ToCString();
5462 case kTypedDataUint8ArrayCid:
5463 case kExternalTypedDataUint8ArrayCid:
5464 return Symbols::Uint8List().ToCString();
5465 case kTypedDataUint8ClampedArrayCid:
5466 case kExternalTypedDataUint8ClampedArrayCid:
5467 return Symbols::Uint8ClampedList().ToCString();
5468 case kTypedDataInt16ArrayCid:
5469 case kExternalTypedDataInt16ArrayCid:
5470 return Symbols::Int16List().ToCString();
5471 case kTypedDataUint16ArrayCid:
5472 case kExternalTypedDataUint16ArrayCid:
5473 return Symbols::Uint16List().ToCString();
5474 case kTypedDataInt32ArrayCid:
5475 case kExternalTypedDataInt32ArrayCid:
5476 return Symbols::Int32List().ToCString();
5477 case kTypedDataUint32ArrayCid:
5478 case kExternalTypedDataUint32ArrayCid:
5479 return Symbols::Uint32List().ToCString();
5480 case kTypedDataInt64ArrayCid:
5481 case kExternalTypedDataInt64ArrayCid:
5482 return Symbols::Int64List().ToCString();
5483 case kTypedDataUint64ArrayCid:
5484 case kExternalTypedDataUint64ArrayCid:
5485 return Symbols::Uint64List().ToCString();
5486 case kTypedDataInt32x4ArrayCid:
5487 case kExternalTypedDataInt32x4ArrayCid:
5488 return Symbols::Int32x4List().ToCString();
5489 case kTypedDataFloat32x4ArrayCid:
5490 case kExternalTypedDataFloat32x4ArrayCid:
5491 return Symbols::Float32x4List().ToCString();
5492 case kTypedDataFloat64x2ArrayCid:
5493 case kExternalTypedDataFloat64x2ArrayCid:
5494 return Symbols::Float64x2List().ToCString();
5495 case kTypedDataFloat32ArrayCid:
5496 case kExternalTypedDataFloat32ArrayCid:
5497 return Symbols::Float32List().ToCString();
5498 case kTypedDataFloat64ArrayCid:
5499 case kExternalTypedDataFloat64ArrayCid:
5500 return Symbols::Float64List().ToCString();
5501 case kPointerCid:
5502 return Symbols::FfiPointer().ToCString();
5503 case kDynamicLibraryCid:
5504 return Symbols::FfiDynamicLibrary().ToCString();
5505 case kNullCid:
5506 return Symbols::Null().ToCString();
5507 case kDynamicCid:
5508 return Symbols::Dynamic().ToCString();
5509 case kVoidCid:
5510 return Symbols::Void().ToCString();
5511 case kNeverCid:
5512 return Symbols::Never().ToCString();
5513 case kClassCid:
5514 return Symbols::Class().ToCString();
5515 case kTypeParametersCid:
5516 return Symbols::TypeParameters().ToCString();
5517 case kTypeArgumentsCid:
5518 return Symbols::TypeArguments().ToCString();
5519 case kPatchClassCid:
5520 return Symbols::PatchClass().ToCString();
5521 case kFunctionCid:
5522 return Symbols::Function().ToCString();
5523 case kClosureDataCid:
5524 return Symbols::ClosureData().ToCString();
5525 case kFfiTrampolineDataCid:
5526 return Symbols::FfiTrampolineData().ToCString();
5527 case kFieldCid:
5528 return Symbols::Field().ToCString();
5529 case kScriptCid:
5530 return Symbols::Script().ToCString();
5531 case kLibraryCid:
5532 return Symbols::Library().ToCString();
5533 case kLibraryPrefixCid:
5534 return Symbols::LibraryPrefix().ToCString();
5535 case kNamespaceCid:
5536 return Symbols::Namespace().ToCString();
5537 case kKernelProgramInfoCid:
5538 return Symbols::KernelProgramInfo().ToCString();
5539 case kWeakSerializationReferenceCid:
5540 return Symbols::WeakSerializationReference().ToCString();
5541 case kWeakArrayCid:
5542 return Symbols::WeakArray().ToCString();
5543 case kCodeCid:
5544 return Symbols::Code().ToCString();
5545 case kInstructionsCid:
5546 return Symbols::Instructions().ToCString();
5547 case kInstructionsSectionCid:
5548 return Symbols::InstructionsSection().ToCString();
5549 case kInstructionsTableCid:
5550 return Symbols::InstructionsTable().ToCString();
5551 case kObjectPoolCid:
5552 return Symbols::ObjectPool().ToCString();
5553 case kCodeSourceMapCid:
5554 return Symbols::CodeSourceMap().ToCString();
5555 case kPcDescriptorsCid:
5556 return Symbols::PcDescriptors().ToCString();
5557 case kCompressedStackMapsCid:
5558 return Symbols::CompressedStackMaps().ToCString();
5559 case kLocalVarDescriptorsCid:
5560 return Symbols::LocalVarDescriptors().ToCString();
5561 case kExceptionHandlersCid:
5562 return Symbols::ExceptionHandlers().ToCString();
5563 case kContextCid:
5564 return Symbols::Context().ToCString();
5565 case kContextScopeCid:
5566 return Symbols::ContextScope().ToCString();
5567 case kSentinelCid:
5568 return Symbols::Sentinel().ToCString();
5569 case kSingleTargetCacheCid:
5570 return Symbols::SingleTargetCache().ToCString();
5571 case kICDataCid:
5572 return Symbols::ICData().ToCString();
5573 case kMegamorphicCacheCid:
5574 return Symbols::MegamorphicCache().ToCString();
5575 case kSubtypeTestCacheCid:
5576 return Symbols::SubtypeTestCache().ToCString();
5577 case kLoadingUnitCid:
5578 return Symbols::LoadingUnit().ToCString();
5579 case kApiErrorCid:
5580 return Symbols::ApiError().ToCString();
5581 case kLanguageErrorCid:
5582 return Symbols::LanguageError().ToCString();
5583 case kUnhandledExceptionCid:
5584 return Symbols::UnhandledException().ToCString();
5585 case kUnwindErrorCid:
5586 return Symbols::UnwindError().ToCString();
5587 case kIntegerCid:
5588 case kSmiCid:
5589 case kMintCid:
5590 return Symbols::Int().ToCString();
5591 case kDoubleCid:
5592 return Symbols::Double().ToCString();
5593 case kOneByteStringCid:
5594 case kTwoByteStringCid:
5595 return Symbols::_String().ToCString();
5596 case kArrayCid:
5597 case kImmutableArrayCid:
5598 case kGrowableObjectArrayCid:
5599 return Symbols::List().ToCString();
5600 }
5601 String& name = String::Handle(Name());
5603 if (name.ptr() == Symbols::_Future().ptr() &&
5605 return Symbols::Future().ToCString();
5606 }
5607 return name.ToCString();
5608}
5609
5610void Class::set_script(const Script& value) const {
5611 untag()->set_script(value.ptr());
5612}
5613
5614#if !defined(DART_PRECOMPILED_RUNTIME)
5615KernelProgramInfoPtr Class::KernelProgramInfo() const {
5616 const auto& lib = Library::Handle(library());
5617 return lib.kernel_program_info();
5618}
5619
5622 StoreNonPointer(&untag()->token_pos_, token_pos);
5623}
5624
5627 StoreNonPointer(&untag()->end_token_pos_, token_pos);
5628}
5629
5630void Class::set_implementor_cid(intptr_t value) const {
5631 ASSERT(value >= 0 && value < std::numeric_limits<classid_t>::max());
5632 StoreNonPointer(&untag()->implementor_cid_, value);
5633}
5634
5635bool Class::NoteImplementor(const Class& implementor) const {
5636 ASSERT(!implementor.is_abstract());
5637 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5638 if (implementor_cid() == kDynamicCid) {
5639 return false;
5640 } else if (implementor_cid() == implementor.id()) {
5641 return false;
5642 } else if (implementor_cid() == kIllegalCid) {
5643 set_implementor_cid(implementor.id());
5644 return true; // None -> One
5645 } else {
5646 set_implementor_cid(kDynamicCid);
5647 return true; // One -> Many
5648 }
5649}
5650#endif // !defined(DART_PRECOMPILED_RUNTIME)
5651
5652uint32_t Class::Hash() const {
5653 return Class::Hash(ptr());
5654}
5655uint32_t Class::Hash(ClassPtr obj) {
5656 return String::HashRawSymbol(obj.untag()->name());
5657}
5658
5660#if !defined(DART_PRECOMPILED_RUNTIME)
5662 *this);
5663#else
5664 return 0;
5665#endif // !defined(DART_PRECOMPILED_RUNTIME)
5666}
5667
5669 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5671}
5672
5674 set_state_bits(ImplementedBit::update(true, state_bits()));
5675}
5676
5678 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5679 set_state_bits(AbstractBit::update(true, state_bits()));
5680}
5681
5683 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5685}
5686
5692
5694 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5697 set_state_bits(
5699}
5700
5702 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5704}
5705
5707 set_state_bits(SynthesizedClassBit::update(true, state_bits()));
5708}
5709
5711 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5712 set_state_bits(EnumBit::update(true, state_bits()));
5713}
5714
5716 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5717 set_state_bits(ConstBit::update(true, state_bits()));
5718}
5719
5721 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5722 set_state_bits(TransformedMixinApplicationBit::update(true, state_bits()));
5723}
5724
5726 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5727 set_state_bits(SealedBit::update(true, state_bits()));
5728}
5729
5731 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5732 set_state_bits(MixinClassBit::update(true, state_bits()));
5733}
5734
5736 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5737 set_state_bits(BaseClassBit::update(true, state_bits()));
5738}
5739
5741 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5742 set_state_bits(InterfaceClassBit::update(true, state_bits()));
5743}
5744
5746 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5747 set_state_bits(FinalBit::update(true, state_bits()));
5748}
5749
5751 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5752 set_state_bits(FieldsMarkedNullableBit::update(true, state_bits()));
5753}
5754
5755void Class::set_is_allocated(bool value) const {
5756 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5758}
5759
5760void Class::set_is_allocated_unsafe(bool value) const {
5761 set_state_bits(IsAllocatedBit::update(value, state_bits()));
5762}
5763
5764void Class::set_is_loaded(bool value) const {
5765 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5766 set_state_bits(IsLoadedBit::update(value, state_bits()));
5767}
5768
5770 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5771 ASSERT(!is_finalized());
5773}
5774
5776 set_state_bits(
5778}
5779
5781 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5784 state_bits()));
5785}
5786
5788 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5789 ASSERT(!is_finalized());
5790 set_state_bits(
5792}
5793
5794void Class::set_interfaces(const Array& value) const {
5795 ASSERT(!value.IsNull());
5796 untag()->set_interfaces(value.ptr());
5797}
5798
5799#if !defined(DART_PRECOMPILED_RUNTIME)
5800
5801void Class::AddDirectImplementor(const Class& implementor,
5802 bool is_mixin) const {
5803 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5805 ASSERT(!implementor.IsNull());
5808 if (direct_implementors.IsNull()) {
5810 untag()->set_direct_implementors(direct_implementors.ptr());
5811 }
5812#if defined(DEBUG)
5813 // Verify that the same class is not added twice.
5814 // The only exception is mixins: when mixin application is transformed,
5815 // mixin is added to the end of interfaces list and may be duplicated:
5816 // class X = A with B implements B;
5817 // This is rare and harmless.
5818 if (!is_mixin) {
5819 for (intptr_t i = 0; i < direct_implementors.Length(); i++) {
5820 ASSERT(direct_implementors.At(i) != implementor.ptr());
5821 }
5822 }
5823#endif
5824 direct_implementors.Add(implementor, Heap::kOld);
5825}
5826
5828 const GrowableObjectArray& implementors) const {
5829 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5830 untag()->set_direct_implementors(implementors.ptr());
5831}
5832
5833void Class::AddDirectSubclass(const Class& subclass) const {
5834 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5835 ASSERT(!subclass.IsNull());
5836 ASSERT(subclass.SuperClass() == ptr());
5837 // Do not keep track of the direct subclasses of class Object.
5841 if (direct_subclasses.IsNull()) {
5843 untag()->set_direct_subclasses(direct_subclasses.ptr());
5844 }
5845#if defined(DEBUG)
5846 // Verify that the same class is not added twice.
5847 for (intptr_t i = 0; i < direct_subclasses.Length(); i++) {
5848 ASSERT(direct_subclasses.At(i) != subclass.ptr());
5849 }
5850#endif
5851 direct_subclasses.Add(subclass, Heap::kOld);
5852}
5853
5855 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
5856 untag()->set_direct_subclasses(subclasses.ptr());
5857}
5858
5859#endif // !defined(DART_PRECOMPILED_RUNTIME)
5860
5861ArrayPtr Class::constants() const {
5862 return untag()->constants();
5863}
5864
5865void Class::set_constants(const Array& value) const {
5866 untag()->set_constants(value.ptr());
5867}
5868
5869void Class::set_declaration_type(const Type& value) const {
5870 ASSERT(id() != kDynamicCid && id() != kVoidCid);
5871 ASSERT(!value.IsNull() && value.IsCanonical() && value.IsOld());
5872 ASSERT((declaration_type() == Object::null()) ||
5873 (declaration_type() == value.ptr())); // Set during own finalization.
5874 // Since DeclarationType is used as the runtime type of instances of a
5875 // non-generic class, its nullability must be kNonNullable.
5876 // The exception is DeclarationType of Null which is kNullable.
5877 ASSERT(value.type_class_id() != kNullCid || value.IsNullable());
5878 ASSERT(value.type_class_id() == kNullCid || value.IsNonNullable());
5879 untag()->set_declaration_type<std::memory_order_release>(value.ptr());
5880}
5881
5882TypePtr Class::DeclarationType() const {
5884 if (IsNullClass()) {
5885 return Type::NullType();
5886 }
5887 if (IsDynamicClass()) {
5888 return Type::DynamicType();
5889 }
5890 if (IsVoidClass()) {
5891 return Type::VoidType();
5892 }
5893 if (declaration_type() != Type::null()) {
5894 return declaration_type();
5895 }
5896 {
5897 auto thread = Thread::Current();
5898 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
5899 if (declaration_type() != Type::null()) {
5900 return declaration_type();
5901 }
5902 // For efficiency, the runtimeType intrinsic returns the type cached by
5903 // DeclarationType without checking its nullability. Therefore, we
5904 // consistently cache the kNonNullable version of the type.
5905 // The exception is type Null which is stored as kNullable.
5906 TypeArguments& type_args = TypeArguments::Handle();
5907 const intptr_t num_type_params = NumTypeParameters();
5908 if (num_type_params > 0) {
5909 type_args = TypeArguments::New(num_type_params);
5910 TypeParameter& type_param = TypeParameter::Handle();
5911 for (intptr_t i = 0; i < num_type_params; i++) {
5912 type_param = TypeParameterAt(i);
5913 type_args.SetTypeAt(i, type_param);
5914 }
5915 }
5916 Type& type =
5919 set_declaration_type(type);
5920 return type.ptr();
5921 }
5922}
5923
5924#if !defined(DART_PRECOMPILED_RUNTIME)
5925void Class::set_allocation_stub(const Code& value) const {
5926 // Never clear the stub as it may still be a target, but will be GC-d if
5927 // not referenced.
5928 ASSERT(!value.IsNull());
5930 untag()->set_allocation_stub(value.ptr());
5931}
5932#endif // !defined(DART_PRECOMPILED_RUNTIME)
5933
5935#if defined(DART_PRECOMPILED_RUNTIME)
5936 UNREACHABLE();
5937#else
5938 {
5939 const Code& existing_stub = Code::Handle(allocation_stub());
5940 if (existing_stub.IsNull()) {
5941 return;
5942 }
5943 }
5944 auto thread = Thread::Current();
5945 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
5946 const Code& existing_stub = Code::Handle(allocation_stub());
5947 if (existing_stub.IsNull()) {
5948 return;
5949 }
5950 ASSERT(!existing_stub.IsDisabled());
5951 // Change the stub so that the next caller will regenerate the stub.
5952 existing_stub.DisableStubCode(NumTypeParameters() > 0);
5953 // Disassociate the existing stub from class.
5954 untag()->set_allocation_stub(Code::null());
5955#endif // defined(DART_PRECOMPILED_RUNTIME)
5956}
5957
5959 return ptr() == Type::Handle(Type::DartFunctionType()).type_class();
5960}
5961
5963 // Looking up future_class in the object store would not work, because
5964 // this function is called during class finalization, before the object store
5965 // field would be initialized by InitKnownObjects().
5966 return (Name() == Symbols::Future().ptr()) &&
5968}
5969
5970// Checks if type T0 is a subtype of type T1.
5971// Type T0 is specified by class 'cls' parameterized with 'type_arguments' and
5972// by 'nullability', and type T1 is specified by 'other' and must have a type
5973// class.
5974// [type_arguments] should be a flattened instance type arguments vector.
5976 const TypeArguments& type_arguments,
5977 Nullability nullability,
5978 const AbstractType& other,
5979 Heap::Space space,
5980 FunctionTypeMapping* function_type_equivalence) {
5981 TRACE_TYPE_CHECKS_VERBOSE(" Class::IsSubtypeOf(%s %s, %s)\n",
5982 cls.ToCString(), type_arguments.ToCString(),
5983 other.ToCString());
5984 // This function does not support Null, Never, dynamic, or void as type T0.
5985 classid_t this_cid = cls.id();
5986 ASSERT(this_cid != kNullCid && this_cid != kNeverCid &&
5987 this_cid != kDynamicCid && this_cid != kVoidCid);
5988 ASSERT(type_arguments.IsNull() ||
5989 (type_arguments.Length() >= cls.NumTypeArguments()));
5990 // Type T1 must have a type class (e.g. not a type param or a function type).
5991 ASSERT(other.HasTypeClass());
5992 const classid_t other_cid = other.type_class_id();
5993 if (other_cid == kDynamicCid || other_cid == kVoidCid) {
5994 TRACE_TYPE_CHECKS_VERBOSE(" - result: true (right is top)\n");
5995 return true;
5996 }
5997 // Left nullable:
5998 // if T0 is S0? then:
5999 // T0 <: T1 iff S0 <: T1 and Null <: T1
6000 if ((nullability == Nullability::kNullable) &&
6002 TRACE_TYPE_CHECKS_VERBOSE(" - result: false (nullability)\n");
6003 return false;
6004 }
6005
6006 // Right Object.
6007 if (other_cid == kObjectCid) {
6008 TRACE_TYPE_CHECKS_VERBOSE(" - result: true (right is Object)\n");
6009 return true;
6010 }
6011
6012 Thread* thread = Thread::Current();
6013 Zone* zone = thread->zone();
6014 const Class& other_class = Class::Handle(zone, other.type_class());
6015 const TypeArguments& other_type_arguments =
6016 TypeArguments::Handle(zone, other.arguments());
6017 // Use the 'this_class' object as if it was the receiver of this method, but
6018 // instead of recursing, reset it to the super class and loop.
6019 Class& this_class = Class::Handle(zone, cls.ptr());
6020 while (true) {
6021 // Apply additional subtyping rules if T0 or T1 are 'FutureOr'.
6022
6023 // Left FutureOr:
6024 // if T0 is FutureOr<S0> then:
6025 // T0 <: T1 iff Future<S0> <: T1 and S0 <: T1
6026 if (this_cid == kFutureOrCid) {
6027 // Check Future<S0> <: T1.
6028 ObjectStore* object_store = IsolateGroup::Current()->object_store();
6029 const Class& future_class =
6030 Class::Handle(zone, object_store->future_class());
6031 ASSERT(!future_class.IsNull() && future_class.NumTypeParameters() == 1 &&
6032 this_class.NumTypeParameters() == 1);
6033 ASSERT(type_arguments.IsNull() || type_arguments.Length() >= 1);
6034 if (Class::IsSubtypeOf(future_class, type_arguments,
6035 Nullability::kNonNullable, other, space,
6036 function_type_equivalence)) {
6037 // Check S0 <: T1.
6038 const AbstractType& type_arg =
6039 AbstractType::Handle(zone, type_arguments.TypeAtNullSafe(0));
6040 if (type_arg.IsSubtypeOf(other, space, function_type_equivalence)) {
6041 TRACE_TYPE_CHECKS_VERBOSE(" - result: true (left is FutureOr)\n");
6042 return true;
6043 }
6044 }
6045 }
6046
6047 // Right FutureOr:
6048 // if T1 is FutureOr<S1> then:
6049 // T0 <: T1 iff any of the following hold:
6050 // either T0 <: Future<S1>
6051 // or T0 <: S1
6052 // or T0 is X0 and X0 has bound S0 and S0 <: T1 (checked elsewhere)
6053 if (other_cid == kFutureOrCid) {
6054 const AbstractType& other_type_arg =
6055 AbstractType::Handle(zone, other_type_arguments.TypeAtNullSafe(0));
6056 // Check if S1 is a top type.
6057 if (other_type_arg.IsTopTypeForSubtyping()) {
6059 " - result: true (right is FutureOr top)\n");
6060 return true;
6061 }
6062 // Check T0 <: Future<S1> when T0 is Future<S0>.
6063 if (this_class.IsFutureClass()) {
6064 const AbstractType& type_arg =
6065 AbstractType::Handle(zone, type_arguments.TypeAtNullSafe(0));
6066 // If T0 is Future<S0>, then T0 <: Future<S1>, iff S0 <: S1.
6067 if (type_arg.IsSubtypeOf(other_type_arg, space,
6068 function_type_equivalence)) {
6070 " - result: true (left is Future, right is FutureOr)\n");
6071 return true;
6072 }
6073 }
6074 // Check T0 <: Future<S1> when T0 is FutureOr<S0> is already done.
6075 // Check T0 <: S1.
6076 if (other_type_arg.HasTypeClass() &&
6077 Class::IsSubtypeOf(this_class, type_arguments, nullability,
6078 other_type_arg, space,
6079 function_type_equivalence)) {
6081 " - result: true (right is FutureOr, subtype of arg)\n");
6082 return true;
6083 }
6084 }
6085
6086 // Check for reflexivity.
6087 if (this_class.ptr() == other_class.ptr()) {
6088 const intptr_t num_type_params = this_class.NumTypeParameters();
6089 if (num_type_params == 0) {
6091 " - result: true (same non-generic class)\n");
6092 return true;
6093 }
6094 // Check for covariance.
6095 if (other_type_arguments.IsNull()) {
6097 " - result: true (same class, dynamic type args)\n");
6098 return true;
6099 }
6100 const intptr_t num_type_args = this_class.NumTypeArguments();
6101 const intptr_t from_index = num_type_args - num_type_params;
6102 ASSERT(other_type_arguments.Length() == num_type_params);
6104 AbstractType& other_type = AbstractType::Handle(zone);
6105 for (intptr_t i = 0; i < num_type_params; ++i) {
6106 type = type_arguments.TypeAtNullSafe(from_index + i);
6107 other_type = other_type_arguments.TypeAt(i);
6108 ASSERT(!type.IsNull() && !other_type.IsNull());
6109 if (!type.IsSubtypeOf(other_type, space, function_type_equivalence)) {
6111 " - result: false (same class, type args mismatch)\n");
6112 return false;
6113 }
6114 }
6116 " - result: true (same class, matching type args)\n");
6117 return true;
6118 }
6119
6120 // _Closure <: Function
6121 if (this_class.IsClosureClass() && other_class.IsDartFunctionClass()) {
6123 " - result: true (left is closure, right is Function)\n");
6124 return true;
6125 }
6126
6127 // Check for 'direct super type' specified in the implements clause
6128 // and check for transitivity at the same time.
6129 Array& interfaces = Array::Handle(zone, this_class.interfaces());
6130 Type& interface = Type::Handle(zone);
6131 Class& interface_class = Class::Handle(zone);
6132 TypeArguments& interface_args = TypeArguments::Handle(zone);
6133 for (intptr_t i = 0; i < interfaces.Length(); i++) {
6134 interface ^= interfaces.At(i);
6135 ASSERT(interface.IsFinalized());
6136 interface_class = interface.type_class();
6137 interface_args = interface.arguments();
6138 if (!interface_args.IsNull() && !interface_args.IsInstantiated()) {
6139 // This type class implements an interface that is parameterized with
6140 // generic type(s), e.g. it implements List<T>.
6141 // The uninstantiated type T must be instantiated using the type
6142 // parameters of this type before performing the type test.
6143 // The type arguments of this type that are referred to by the type
6144 // parameters of the interface are at the end of the type vector,
6145 // after the type arguments of the super type of this type.
6146 // The index of the type parameters is adjusted upon finalization.
6147 interface_args = interface_args.InstantiateFrom(
6148 type_arguments, Object::null_type_arguments(), kNoneFree, space);
6149 }
6150 interface_args = interface_class.GetInstanceTypeArguments(
6151 thread, interface_args, /*canonicalize=*/false);
6152 // In Dart 2, implementing Function has no meaning.
6153 // TODO(regis): Can we encounter and skip Object as well?
6154 if (interface_class.IsDartFunctionClass()) {
6155 continue;
6156 }
6157 if (Class::IsSubtypeOf(interface_class, interface_args,
6158 Nullability::kNonNullable, other, space,
6159 function_type_equivalence)) {
6160 TRACE_TYPE_CHECKS_VERBOSE(" - result: true (interface found)\n");
6161 return true;
6162 }
6163 }
6164 // "Recurse" up the class hierarchy until we have reached the top.
6165 this_class = this_class.SuperClass();
6166 if (this_class.IsNull()) {
6167 TRACE_TYPE_CHECKS_VERBOSE(" - result: false (supertype not found)\n");
6168 return false;
6169 }
6170 this_cid = this_class.id();
6171 }
6172 UNREACHABLE();
6173 return false;
6174}
6175
6176bool Class::IsTopLevel() const {
6177 return Name() == Symbols::TopLevel().ptr();
6178}
6179
6180bool Class::IsPrivate() const {
6182}
6183
6185 return LookupFunctionReadLocked(name, kInstance);
6186}
6187
6189 return LookupFunctionAllowPrivate(name, kInstance);
6190}
6191
6192FunctionPtr Class::LookupStaticFunction(const String& name) const {
6193 Thread* thread = Thread::Current();
6194 SafepointReadRwLocker ml(thread, thread->isolate_group()->program_lock());
6195 return LookupFunctionReadLocked(name, kStatic);
6196}
6197
6199 return LookupFunctionAllowPrivate(name, kStatic);
6200}
6201
6202FunctionPtr Class::LookupConstructor(const String& name) const {
6203 Thread* thread = Thread::Current();
6204 SafepointReadRwLocker ml(thread, thread->isolate_group()->program_lock());
6205 return LookupFunctionReadLocked(name, kConstructor);
6206}
6207
6209 return LookupFunctionAllowPrivate(name, kConstructor);
6210}
6211
6212FunctionPtr Class::LookupFactory(const String& name) const {
6213 Thread* thread = Thread::Current();
6214 SafepointReadRwLocker ml(thread, thread->isolate_group()->program_lock());
6215 return LookupFunctionReadLocked(name, kFactory);
6216}
6217
6219 return LookupFunctionAllowPrivate(name, kFactory);
6220}
6221
6223 return LookupFunctionAllowPrivate(name, kAny);
6224}
6225
6227 return LookupFunctionReadLocked(name, kAny);
6228}
6229
6230// Returns true if 'prefix' and 'accessor_name' match 'name'.
6232 const char* prefix,
6233 intptr_t prefix_length,
6234 const String& accessor_name) {
6235 intptr_t name_len = name.Length();
6236 intptr_t accessor_name_len = accessor_name.Length();
6237
6238 if (name_len != (accessor_name_len + prefix_length)) {
6239 return false;
6240 }
6241 for (intptr_t i = 0; i < prefix_length; i++) {
6242 if (name.CharAt(i) != prefix[i]) {
6243 return false;
6244 }
6245 }
6246 for (intptr_t i = 0, j = prefix_length; i < accessor_name_len; i++, j++) {
6247 if (name.CharAt(j) != accessor_name.CharAt(i)) {
6248 return false;
6249 }
6250 }
6251 return true;
6252}
6253
6254FunctionPtr Class::CheckFunctionType(const Function& func, MemberKind kind) {
6255 if ((kind == kInstance) || (kind == kInstanceAllowAbstract)) {
6256 if (func.IsDynamicFunction(kind == kInstanceAllowAbstract)) {
6257 return func.ptr();
6258 }
6259 } else if (kind == kStatic) {
6260 if (func.IsStaticFunction()) {
6261 return func.ptr();
6262 }
6263 } else if (kind == kConstructor) {
6264 if (func.IsGenerativeConstructor()) {
6265 ASSERT(!func.is_static());
6266 return func.ptr();
6267 }
6268 } else if (kind == kFactory) {
6269 if (func.IsFactory()) {
6270 ASSERT(func.is_static());
6271 return func.ptr();
6272 }
6273 } else if (kind == kAny) {
6274 return func.ptr();
6275 }
6276 return Function::null();
6277}
6278
6279FunctionPtr Class::LookupFunctionReadLocked(const String& name,
6280 MemberKind kind) const {
6281 ASSERT(!IsNull());
6282 Thread* thread = Thread::Current();
6284 // Caller needs to ensure they grab program_lock because this method
6285 // can be invoked with either ReadRwLock or WriteRwLock.
6286#if defined(DEBUG)
6287 ASSERT(thread->isolate_group()->program_lock()->IsCurrentThreadReader());
6288#endif
6292 Array& funcs = thread->ArrayHandle();
6293 funcs = functions();
6294 const intptr_t len = funcs.Length();
6295 Function& function = thread->FunctionHandle();
6296 if (len >= kFunctionLookupHashThreshold) {
6297 // TODO(dartbug.com/36097): We require currently a read lock in the resolver
6298 // to avoid read-write race access to this hash table.
6299 // If we want to increase resolver speed by avoiding the need for read lock,
6300 // we could make change this hash table to be lock-free for the reader.
6301 const Array& hash_table =
6302 Array::Handle(thread->zone(), untag()->functions_hash_table());
6303 if (!hash_table.IsNull()) {
6304 ClassFunctionsSet set(hash_table.ptr());
6306 function ^= set.GetOrNull(FunctionName(name, &(thread->StringHandle())));
6307 // No mutations.
6308 ASSERT(set.Release().ptr() == hash_table.ptr());
6309 return function.IsNull() ? Function::null()
6310 : CheckFunctionType(function, kind);
6311 }
6312 }
6313 if (name.IsSymbol()) {
6314 // Quick Symbol compare.
6315 NoSafepointScope no_safepoint;
6316 for (intptr_t i = 0; i < len; i++) {
6317 function ^= funcs.At(i);
6318 if (function.name() == name.ptr()) {
6319 return CheckFunctionType(function, kind);
6320 }
6321 }
6322 } else {
6324 String& function_name = thread->StringHandle();
6325 for (intptr_t i = 0; i < len; i++) {
6326 function ^= funcs.At(i);
6327 function_name = function.name();
6328 if (function_name.Equals(name)) {
6329 return CheckFunctionType(function, kind);
6330 }
6331 }
6332 }
6333 // No function found.
6334 return Function::null();
6335}
6336
6337FunctionPtr Class::LookupFunctionAllowPrivate(const String& name,
6338 MemberKind kind) const {
6339 ASSERT(!IsNull());
6340 Thread* thread = Thread::Current();
6342 SafepointReadRwLocker ml(thread, thread->isolate_group()->program_lock());
6346 Array& funcs = thread->ArrayHandle();
6347 funcs = current_functions();
6348 ASSERT(!funcs.IsNull());
6349 const intptr_t len = funcs.Length();
6350 Function& function = thread->FunctionHandle();
6351 String& function_name = thread->StringHandle();
6352 for (intptr_t i = 0; i < len; i++) {
6353 function ^= funcs.At(i);
6354 function_name = function.name();
6356 return CheckFunctionType(function, kind);
6357 }
6358 }
6359 // No function found.
6360 return Function::null();
6361}
6362
6363FunctionPtr Class::LookupGetterFunction(const String& name) const {
6364 return LookupAccessorFunction(kGetterPrefix, kGetterPrefixLength, name);
6365}
6366
6367FunctionPtr Class::LookupSetterFunction(const String& name) const {
6368 return LookupAccessorFunction(kSetterPrefix, kSetterPrefixLength, name);
6369}
6370
6371FunctionPtr Class::LookupAccessorFunction(const char* prefix,
6372 intptr_t prefix_length,
6373 const String& name) const {
6374 ASSERT(!IsNull());
6375 Thread* thread = Thread::Current();
6376 if (EnsureIsFinalized(thread) != Error::null()) {
6377 return Function::null();
6378 }
6382 Array& funcs = thread->ArrayHandle();
6383 funcs = current_functions();
6384 intptr_t len = funcs.Length();
6385 Function& function = thread->FunctionHandle();
6386 String& function_name = thread->StringHandle();
6387 for (intptr_t i = 0; i < len; i++) {
6388 function ^= funcs.At(i);
6389 function_name = function.name();
6390 if (MatchesAccessorName(function_name, prefix, prefix_length, name)) {
6391 return function.ptr();
6392 }
6393 }
6394
6395 // No function found.
6396 return Function::null();
6397}
6398
6400 return LookupField(name, kInstance);
6401}
6402
6403FieldPtr Class::LookupStaticField(const String& name) const {
6404 return LookupField(name, kStatic);
6405}
6406
6407FieldPtr Class::LookupField(const String& name) const {
6408 return LookupField(name, kAny);
6409}
6410
6411FieldPtr Class::LookupField(const String& name, MemberKind kind) const {
6412 ASSERT(!IsNull());
6413 Thread* thread = Thread::Current();
6414 if (EnsureIsFinalized(thread) != Error::null()) {
6415 return Field::null();
6416 }
6420 Array& flds = thread->ArrayHandle();
6421 flds = fields();
6422 ASSERT(!flds.IsNull());
6423 intptr_t len = flds.Length();
6424 Field& field = thread->FieldHandle();
6425 if (name.IsSymbol()) {
6426 // Use fast raw pointer string compare for symbols.
6427 for (intptr_t i = 0; i < len; i++) {
6428 field ^= flds.At(i);
6429 if (name.ptr() == field.name()) {
6430 if (kind == kInstance) {
6431 return field.is_static() ? Field::null() : field.ptr();
6432 } else if (kind == kStatic) {
6433 return field.is_static() ? field.ptr() : Field::null();
6434 }
6435 ASSERT(kind == kAny);
6436 return field.ptr();
6437 }
6438 }
6439 } else {
6440 String& field_name = thread->StringHandle();
6441 for (intptr_t i = 0; i < len; i++) {
6442 field ^= flds.At(i);
6443 field_name = field.name();
6444 if (name.Equals(field_name)) {
6445 if (kind == kInstance) {
6446 return field.is_static() ? Field::null() : field.ptr();
6447 } else if (kind == kStatic) {
6448 return field.is_static() ? field.ptr() : Field::null();
6449 }
6450 ASSERT(kind == kAny);
6451 return field.ptr();
6452 }
6453 }
6454 }
6455 return Field::null();
6456}
6457
6459 bool instance_only) const {
6460 ASSERT(!IsNull());
6461 // Use slow string compare, ignoring privacy name mangling.
6462 Thread* thread = Thread::Current();
6463 if (EnsureIsFinalized(thread) != Error::null()) {
6464 return Field::null();
6465 }
6469 Array& flds = thread->ArrayHandle();
6470 flds = fields();
6471 ASSERT(!flds.IsNull());
6472 intptr_t len = flds.Length();
6473 Field& field = thread->FieldHandle();
6474 String& field_name = thread->StringHandle();
6475 for (intptr_t i = 0; i < len; i++) {
6476 field ^= flds.At(i);
6477 field_name = field.name();
6478 if (field.is_static() && instance_only) {
6479 // If we only care about instance fields, skip statics.
6480 continue;
6481 }
6482 if (String::EqualsIgnoringPrivateKey(field_name, name)) {
6483 return field.ptr();
6484 }
6485 }
6486 return Field::null();
6487}
6488
6491 if (!field.IsNull() && !field.is_static()) {
6492 return field.ptr();
6493 }
6494 return Field::null();
6495}
6496
6499 if (!field.IsNull() && field.is_static()) {
6500 return field.ptr();
6501 }
6502 return Field::null();
6503}
6504
6505const char* Class::ToCString() const {
6506 NoSafepointScope no_safepoint;
6507 const Library& lib = Library::Handle(library());
6508 const char* library_name = lib.IsNull() ? "" : lib.ToCString();
6509 const char* class_name = String::Handle(Name()).ToCString();
6510 return OS::SCreate(Thread::Current()->zone(), "%s Class: %s", library_name,
6511 class_name);
6512}
6513
6514// Thomas Wang, Integer Hash Functions.
6515// https://gist.github.com/badboy/6267743
6516// "64 bit to 32 bit Hash Functions"
6517static uword Hash64To32(uint64_t v) {
6518 v = ~v + (v << 18);
6519 v = v ^ (v >> 31);
6520 v = v * 21;
6521 v = v ^ (v >> 11);
6522 v = v + (v << 6);
6523 v = v ^ (v >> 22);
6524 return static_cast<uint32_t>(v);
6525}
6526
6528 const Instance& value) const {
6529 ASSERT(this->ptr() == value.clazz());
6531 Instance& canonical_value = Instance::Handle(zone);
6532 if (this->constants() != Array::null()) {
6534 canonical_value ^= constants.GetOrNull(CanonicalInstanceKey(value));
6535 this->set_constants(constants.Release());
6536 }
6537 return canonical_value.ptr();
6538}
6539
6541 const Instance& constant) const {
6542 ASSERT(constant.IsCanonical());
6543 ASSERT(this->ptr() == constant.clazz());
6544 Instance& canonical_value = Instance::Handle(zone);
6545 if (this->constants() == Array::null()) {
6547 HashTables::New<CanonicalInstancesSet>(128, Heap::kOld));
6548 canonical_value ^= constants.InsertNewOrGet(CanonicalInstanceKey(constant));
6549 this->set_constants(constants.Release());
6550 } else {
6552 this->constants());
6553 canonical_value ^= constants.InsertNewOrGet(CanonicalInstanceKey(constant));
6554 this->set_constants(constants.Release());
6555 }
6556 return canonical_value.ptr();
6557}
6558
6559// Scoped mapping FunctionType -> FunctionType.
6560// Used for tracking and updating nested generic function types
6561// and their type parameters.
6563 public:
6565 FunctionTypeMapping** mapping,
6566 const FunctionType& from,
6567 const FunctionType& to)
6568 : zone_(zone), parent_(*mapping), from_(from), to_(to) {
6569 // Add self to the linked list.
6570 *mapping = this;
6571 }
6572
6573 const FunctionType* Find(const Object& from) const {
6574 if (!from.IsFunctionType()) {
6575 return nullptr;
6576 }
6577 for (const FunctionTypeMapping* scope = this; scope != nullptr;
6578 scope = scope->parent_) {
6579 if (scope->from_.ptr() == from.ptr()) {
6580 return &(scope->to_);
6581 }
6582 }
6583 return nullptr;
6584 }
6585
6586 TypeParameterPtr MapTypeParameter(const TypeParameter& type_param) const {
6587 ASSERT(type_param.IsFunctionTypeParameter());
6588 const FunctionType* new_owner = Find(
6590 if (new_owner != nullptr) {
6591 return new_owner->TypeParameterAt(type_param.index() - type_param.base(),
6592 type_param.nullability());
6593 }
6594 return type_param.ptr();
6595 }
6596
6598 const TypeParameter& p2) const {
6599 auto& from = FunctionType::Handle(zone_, p1.parameterized_function_type());
6600 const FunctionType* to = Find(from);
6601 if (to != nullptr) {
6602 return to->ptr() == p2.parameterized_function_type();
6603 }
6604 from = p2.parameterized_function_type();
6605 to = Find(from);
6606 if (to != nullptr) {
6607 return to->ptr() == p1.parameterized_function_type();
6608 }
6609 return false;
6610 }
6611
6612 private:
6613 Zone* zone_;
6614 const FunctionTypeMapping* const parent_;
6615 const FunctionType& from_;
6616 const FunctionType& to_;
6617};
6618
6619intptr_t TypeParameters::Length() const {
6620 if (IsNull() || untag()->names() == Array::null()) return 0;
6621 return Smi::Value(untag()->names()->untag()->length());
6622}
6623
6624void TypeParameters::set_names(const Array& value) const {
6625 ASSERT(!value.IsNull());
6626 untag()->set_names(value.ptr());
6627}
6628
6629StringPtr TypeParameters::NameAt(intptr_t index) const {
6630 const Array& names_array = Array::Handle(names());
6631 return String::RawCast(names_array.At(index));
6632}
6633
6634void TypeParameters::SetNameAt(intptr_t index, const String& value) const {
6635 const Array& names_array = Array::Handle(names());
6636 names_array.SetAt(index, value);
6637}
6638
6639void TypeParameters::set_flags(const Array& value) const {
6640 untag()->set_flags(value.ptr());
6641}
6642
6643void TypeParameters::set_bounds(const TypeArguments& value) const {
6644 // A null value represents a vector of dynamic.
6645 untag()->set_bounds(value.ptr());
6646}
6647
6648AbstractTypePtr TypeParameters::BoundAt(intptr_t index) const {
6649 const TypeArguments& upper_bounds = TypeArguments::Handle(bounds());
6650 return upper_bounds.IsNull() ? Type::DynamicType()
6651 : upper_bounds.TypeAt(index);
6652}
6653
6654void TypeParameters::SetBoundAt(intptr_t index,
6655 const AbstractType& value) const {
6656 const TypeArguments& upper_bounds = TypeArguments::Handle(bounds());
6657 upper_bounds.SetTypeAt(index, value);
6658}
6659
6661 return bounds() == TypeArguments::null();
6662}
6663
6664void TypeParameters::set_defaults(const TypeArguments& value) const {
6665 // The null value represents a vector of dynamic.
6666 untag()->set_defaults(value.ptr());
6667}
6668
6669AbstractTypePtr TypeParameters::DefaultAt(intptr_t index) const {
6670 const TypeArguments& default_type_args = TypeArguments::Handle(defaults());
6671 return default_type_args.IsNull() ? Type::DynamicType()
6672 : default_type_args.TypeAt(index);
6673}
6674
6676 const AbstractType& value) const {
6677 const TypeArguments& default_type_args = TypeArguments::Handle(defaults());
6678 default_type_args.SetTypeAt(index, value);
6679}
6680
6682 return defaults() == TypeArguments::null();
6683}
6684
6685void TypeParameters::AllocateFlags(Heap::Space space) const {
6686 const intptr_t len = (Length() + kFlagsPerSmiMask) >> kFlagsPerSmiShift;
6687 const Array& flags_array = Array::Handle(Array::New(len, space));
6688 // Initialize flags to 0.
6689 const Smi& zero = Smi::Handle(Smi::New(0));
6690 for (intptr_t i = 0; i < len; i++) {
6691 flags_array.SetAt(i, zero);
6692 }
6693 set_flags(flags_array);
6694}
6695
6696void TypeParameters::OptimizeFlags() const {
6697 if (untag()->flags() == Array::null()) return; // Already optimized.
6698 const intptr_t len = (Length() + kFlagsPerSmiMask) >> kFlagsPerSmiShift;
6699 const Array& flags_array = Array::Handle(flags());
6700 const Smi& zero = Smi::Handle(Smi::New(0));
6701 for (intptr_t i = 0; i < len; i++) {
6702 if (flags_array.At(i) != zero.ptr()) return;
6703 }
6704 set_flags(Object::null_array());
6705}
6706
6708 if (untag()->flags() == Array::null()) return false;
6709 const intptr_t flag = Smi::Value(
6710 Smi::RawCast(Array::Handle(flags()).At(index >> kFlagsPerSmiShift)));
6711 return (flag >> (index & kFlagsPerSmiMask)) != 0;
6712}
6713
6715 bool value) const {
6716 const Array& flg = Array::Handle(flags());
6717 intptr_t flag = Smi::Value(Smi::RawCast(flg.At(index >> kFlagsPerSmiShift)));
6718 if (value) {
6719 flag |= 1 << (index % kFlagsPerSmiMask);
6720 } else {
6721 flag &= ~(1 << (index % kFlagsPerSmiMask));
6722 }
6724}
6725
6727 Zone* zone,
6728 bool are_class_type_parameters,
6729 intptr_t base,
6730 NameVisibility name_visibility,
6731 BaseTextBuffer* printer) const {
6732 String& name = String::Handle(zone);
6734 const intptr_t num_type_params = Length();
6735 for (intptr_t i = 0; i < num_type_params; i++) {
6736 if (are_class_type_parameters) {
6737 name = NameAt(i);
6738 printer->AddString(name.ToCString());
6739 } else {
6741 are_class_type_parameters, base, base + i));
6742 }
6743 if (FLAG_show_internal_names || !AllDynamicBounds()) {
6744 type = BoundAt(i);
6745 // Do not print default bound.
6746 if (!type.IsNull() && (FLAG_show_internal_names || !type.IsObjectType() ||
6747 type.IsNonNullable())) {
6748 printer->AddString(" extends ");
6749 type.PrintName(name_visibility, printer);
6750 if (FLAG_show_internal_names && !AllDynamicDefaults()) {
6751 type = DefaultAt(i);
6752 if (!type.IsNull() &&
6753 (FLAG_show_internal_names || !type.IsDynamicType())) {
6754 printer->AddString(" defaults to ");
6755 type.PrintName(name_visibility, printer);
6756 }
6757 }
6758 }
6759 }
6760 if (i != num_type_params - 1) {
6761 printer->AddString(", ");
6762 }
6763 }
6764}
6765
6766const char* TypeParameters::ToCString() const {
6767 if (IsNull()) {
6768 return "TypeParameters: null";
6769 }
6770 auto thread = Thread::Current();
6771 auto zone = thread->zone();
6772 ZoneTextBuffer buffer(zone);
6773 buffer.AddString("TypeParameters: ");
6774 Print(thread, zone, true, 0, kInternalName, &buffer);
6775 return buffer.buffer();
6776}
6777
6778TypeParametersPtr TypeParameters::New(Heap::Space space) {
6780 return Object::Allocate<TypeParameters>(space);
6781}
6782
6783TypeParametersPtr TypeParameters::New(intptr_t count, Heap::Space space) {
6784 const TypeParameters& result =
6786 // Create an [ Array ] of [ String ] objects to represent the names.
6787 // Create a [ TypeArguments ] vector representing the bounds.
6788 // Create a [ TypeArguments ] vector representing the defaults.
6789 // Create an [ Array ] of [ Smi] objects to represent the flags.
6790 const Array& names_array = Array::Handle(Array::New(count, space));
6791 result.set_names(names_array);
6792 TypeArguments& type_args = TypeArguments::Handle();
6793 type_args = TypeArguments::New(count, Heap::kNew); // Will get canonicalized.
6794 result.set_bounds(type_args);
6795 type_args = TypeArguments::New(count, Heap::kNew); // Will get canonicalized.
6796 result.set_defaults(type_args);
6797 result.AllocateFlags(space); // Will get optimized.
6798 return result.ptr();
6799}
6800
6801intptr_t TypeArguments::ComputeNullability() const {
6802 if (IsNull()) return 0;
6803 const intptr_t num_types = Length();
6804 intptr_t result = 0;
6805 if (num_types <= kNullabilityMaxTypes) {
6807 for (intptr_t i = 0; i < num_types; i++) {
6808 type = TypeAt(i);
6809 intptr_t type_bits = 0;
6810 if (!type.IsNull()) {
6811 switch (type.nullability()) {
6813 type_bits = kNullableBits;
6814 break;
6816 type_bits = kNonNullableBits;
6817 break;
6819 type_bits = kLegacyBits;
6820 break;
6821 default:
6822 UNREACHABLE();
6823 }
6824 }
6825 result |= (type_bits << (i * kNullabilityBitsPerType));
6826 }
6827 }
6828 set_nullability(result);
6829 return result;
6830}
6831
6832void TypeArguments::set_nullability(intptr_t value) const {
6833 untag()->set_nullability(Smi::New(value));
6834}
6835
6836uword TypeArguments::HashForRange(intptr_t from_index, intptr_t len) const {
6837 if (IsNull()) return kAllDynamicHash;
6838 if (IsRaw(from_index, len)) return kAllDynamicHash;
6839 uint32_t result = 0;
6841 for (intptr_t i = 0; i < len; i++) {
6842 type = TypeAt(from_index + i);
6843 ASSERT(!type.IsNull());
6844 result = CombineHashes(result, type.Hash());
6845 }
6847 return result;
6848}
6849
6850uword TypeArguments::ComputeHash() const {
6851 if (IsNull()) return kAllDynamicHash;
6852 const uword result = HashForRange(0, Length());
6853 ASSERT(result != 0);
6854 SetHash(result);
6855 return result;
6856}
6857
6858TypeArgumentsPtr TypeArguments::Prepend(Zone* zone,
6859 const TypeArguments& other,
6860 intptr_t other_length,
6861 intptr_t total_length) const {
6862 if (other_length == 0) {
6864 return ptr();
6865 } else if (other_length == total_length) {
6866 ASSERT(other.IsCanonical());
6867 return other.ptr();
6868 } else if (IsNull() && other.IsNull()) {
6869 return TypeArguments::null();
6870 }
6871 const TypeArguments& result =
6874 for (intptr_t i = 0; i < other_length; i++) {
6875 type = other.IsNull() ? Type::DynamicType() : other.TypeAt(i);
6876 result.SetTypeAt(i, type);
6877 }
6878 for (intptr_t i = other_length; i < total_length; i++) {
6879 type = IsNull() ? Type::DynamicType() : TypeAt(i - other_length);
6880 result.SetTypeAt(i, type);
6881 }
6882 return result.Canonicalize(Thread::Current());
6883}
6884
6886 Zone* zone,
6887 const TypeArguments& other) const {
6888 ASSERT(!IsNull() && !other.IsNull());
6889 const intptr_t this_len = Length();
6890 const intptr_t other_len = other.Length();
6891 const auto& result = TypeArguments::Handle(
6892 zone, TypeArguments::New(this_len + other_len, Heap::kNew));
6893 auto& type = AbstractType::Handle(zone);
6894 for (intptr_t i = 0; i < this_len; ++i) {
6895 type = TypeAt(i);
6896 result.SetTypeAt(i, type);
6897 }
6898 for (intptr_t i = 0; i < other_len; ++i) {
6899 type = other.TypeAt(i);
6900 result.SetTypeAt(this_len + i, type);
6901 }
6902 return result.ptr();
6903}
6904
6906 const Function* function,
6907 const Class* cls) const {
6908 if (IsNull() || IsInstantiated()) {
6910 }
6911 if (function != nullptr) {
6914 }
6915 if (cls == nullptr) {
6916 cls = &Class::Handle(zone, function->Owner());
6917 }
6918 }
6919 if (cls != nullptr) {
6922 }
6923 }
6925}
6926
6927StringPtr TypeArguments::Name() const {
6928 Thread* thread = Thread::Current();
6929 ZoneTextBuffer printer(thread->zone());
6930 PrintSubvectorName(0, Length(), kInternalName, &printer);
6931 return Symbols::New(thread, printer.buffer());
6932}
6933
6935 Thread* thread = Thread::Current();
6936 ZoneTextBuffer printer(thread->zone());
6938 return Symbols::New(thread, printer.buffer());
6939}
6940
6941void TypeArguments::PrintSubvectorName(intptr_t from_index,
6942 intptr_t len,
6943 NameVisibility name_visibility,
6944 BaseTextBuffer* printer) const {
6945 printer->AddString("<");
6947 for (intptr_t i = 0; i < len; i++) {
6948 if (from_index + i < Length()) {
6949 type = TypeAt(from_index + i);
6950 if (type.IsNull()) {
6951 printer->AddString("null"); // Unfinalized vector.
6952 } else {
6953 type.PrintName(name_visibility, printer);
6954 }
6955 } else {
6956 printer->AddString("dynamic");
6957 }
6958 if (i < len - 1) {
6959 printer->AddString(", ");
6960 }
6961 }
6962 printer->AddString(">");
6963}
6964
6966 buffer->AddString("TypeArguments: ");
6967 if (IsNull()) {
6968 return buffer->AddString("null");
6969 }
6970 buffer->Printf("(H%" Px ")", Smi::Value(untag()->hash()));
6971 auto& type_at = AbstractType::Handle();
6972 for (intptr_t i = 0; i < Length(); i++) {
6973 type_at = TypeAt(i);
6974 buffer->Printf(" [%s]", type_at.IsNull() ? "null" : type_at.ToCString());
6975 }
6976}
6977
6979 const TypeArguments& other,
6980 intptr_t from_index,
6981 intptr_t len,
6982 TypeEquality kind,
6983 FunctionTypeMapping* function_type_equivalence) const {
6984 if (this->ptr() == other.ptr()) {
6985 return true;
6986 }
6987 if (kind == TypeEquality::kCanonical) {
6988 if (IsNull() || other.IsNull()) {
6989 return false;
6990 }
6991 if (Length() != other.Length()) {
6992 return false;
6993 }
6994 }
6996 AbstractType& other_type = AbstractType::Handle();
6997 for (intptr_t i = from_index; i < from_index + len; i++) {
6998 type = IsNull() ? Type::DynamicType() : TypeAt(i);
6999 ASSERT(!type.IsNull());
7000 other_type = other.IsNull() ? Type::DynamicType() : other.TypeAt(i);
7001 ASSERT(!other_type.IsNull());
7002 if (!type.IsEquivalent(other_type, kind, function_type_equivalence)) {
7003 return false;
7004 }
7005 }
7006 return true;
7007}
7008
7009bool TypeArguments::IsDynamicTypes(bool raw_instantiated,
7010 intptr_t from_index,
7011 intptr_t len) const {
7012 ASSERT(Length() >= (from_index + len));
7014 Class& type_class = Class::Handle();
7015 for (intptr_t i = 0; i < len; i++) {
7016 type = TypeAt(from_index + i);
7017 if (type.IsNull()) {
7018 return false;
7019 }
7020 if (!type.HasTypeClass()) {
7021 if (raw_instantiated && type.IsTypeParameter()) {
7022 // An uninstantiated type parameter is equivalent to dynamic.
7023 continue;
7024 }
7025 return false;
7026 }
7027 type_class = type.type_class();
7028 if (!type_class.IsDynamicClass()) {
7029 return false;
7030 }
7031 }
7032 return true;
7033}
7034
7036 : zone_(ASSERT_NOTNULL(zone)),
7037 cache_container_(&source),
7038 data_(Array::Handle(source.instantiations())),
7039 smi_handle_(Smi::Handle(zone)) {
7041 ->type_arguments_canonicalization_mutex()
7042 ->IsOwnedByCurrentThread());
7043}
7044
7046 : zone_(ASSERT_NOTNULL(zone)),
7047 cache_container_(nullptr),
7048 data_(Array::Handle(array.ptr())),
7049 smi_handle_(Smi::Handle(zone)) {
7051 ->type_arguments_canonicalization_mutex()
7052 ->IsOwnedByCurrentThread());
7053}
7054
7055bool TypeArguments::Cache::IsHash(const Array& array) {
7056 return array.Length() > kMaxLinearCacheSize;
7057}
7058
7059intptr_t TypeArguments::Cache::NumOccupied(const Array& array) {
7060 return NumOccupiedBits::decode(
7061 RawSmiValue(Smi::RawCast(array.AtAcquire(kMetadataIndex))));
7062}
7063
7064#if defined(DEBUG)
7065bool TypeArguments::Cache::IsValidStorageLocked(const Array& array) {
7066 // We only require the mutex be held so we don't need to use acquire/release
7067 // semantics to access and set the number of occupied entries in the header.
7069 ->type_arguments_canonicalization_mutex()
7070 ->IsOwnedByCurrentThread());
7071 // Quick check against the empty linear cache.
7072 if (array.ptr() == EmptyStorage().ptr()) return true;
7073 const intptr_t num_occupied = NumOccupied(array);
7074 // We should be using the same shared value for an empty cache.
7075 if (num_occupied == 0) return false;
7076 const intptr_t storage_len = array.Length();
7077 // All caches have the metadata followed by a series of entries.
7078 if ((storage_len % kEntrySize) != kHeaderSize) return false;
7079 const intptr_t num_entries = NumEntries(array);
7080 // Linear caches contain at least one unoccupied entry, and hash-based caches
7081 // grow prior to hitting 100% occupancy.
7082 if (num_occupied >= num_entries) return false;
7083 // In a linear cache, all entries with indexes smaller than [num_occupied]
7084 // should be occupied and ones greater than or equal should be unoccupied.
7085 const bool is_linear_cache = IsLinear(array);
7086 // The capacity of a hash-based cache must be a power of two (see
7087 // EnsureCapacityLocked as to why).
7088 if (!is_linear_cache) {
7089 if (!Utils::IsPowerOfTwo(num_entries)) return false;
7090 const intptr_t metadata =
7091 RawSmiValue(Smi::RawCast(array.AtAcquire(kMetadataIndex)));
7092 if ((1 << EntryCountLog2Bits::decode(metadata)) != num_entries) {
7093 return false;
7094 }
7095 }
7096 for (intptr_t i = 0; i < num_entries; i++) {
7097 const intptr_t index = kHeaderSize + i * kEntrySize;
7098 if (array.At(index + kSentinelIndex) == Sentinel()) {
7099 if (is_linear_cache && i < num_occupied) return false;
7100 continue;
7101 }
7102 if (is_linear_cache && i >= num_occupied) return false;
7103 // The elements of an occupied entry are all TypeArguments values.
7104 for (intptr_t j = index; j < index + kEntrySize; j++) {
7105 if (!array.At(j)->IsHeapObject()) return false;
7106 if (array.At(j) == Object::null()) continue; // null is a valid TAV.
7107 if (!array.At(j)->IsTypeArguments()) return false;
7108 }
7109 }
7110 return true;
7111}
7112#endif
7113
7114bool TypeArguments::Cache::IsOccupied(intptr_t entry) const {
7116 ASSERT(entry >= 0 && entry < table.Length());
7117 return table.At(entry).Get<kSentinelIndex>() != Sentinel();
7118}
7119
7120TypeArgumentsPtr TypeArguments::Cache::Retrieve(intptr_t entry) const {
7121 ASSERT(IsOccupied(entry));
7123 return table.At(entry).Get<kInstantiatedTypeArgsIndex>();
7124}
7125
7126intptr_t TypeArguments::Cache::NumEntries(const Array& array) {
7128 return table.Length();
7129}
7130
7132 const Array& array,
7133 const TypeArguments& instantiator_tav,
7134 const TypeArguments& function_tav) {
7135 const bool is_hash = IsHash(array);
7137 const intptr_t num_entries = table.Length();
7138 // For a linear cache, start at the first entry and probe linearly. This can
7139 // be done because a linear cache always has at least one unoccupied entry
7140 // after all the occupied ones.
7141 intptr_t probe = 0;
7142 intptr_t probe_distance = 1;
7143 if (is_hash) {
7144 // For a hash-based cache, instead start at an entry determined by the hash
7145 // of the keys.
7146 auto hash = FinalizeHash(
7147 CombineHashes(instantiator_tav.Hash(), function_tav.Hash()));
7148 probe = hash & (num_entries - 1);
7149 }
7150 while (true) {
7151 const auto& tuple = table.At(probe);
7152 if (tuple.Get<kSentinelIndex>() == Sentinel()) break;
7153 if ((tuple.Get<kInstantiatorTypeArgsIndex>() == instantiator_tav.ptr()) &&
7154 (tuple.Get<kFunctionTypeArgsIndex>() == function_tav.ptr())) {
7155 return {probe, true};
7156 }
7157 // Advance probe by the current probing distance.
7158 probe = probe + probe_distance;
7159 if (is_hash) {
7160 // Wrap around if the probe goes off the end of the entries array.
7161 probe = probe & (num_entries - 1);
7162 // We had a collision, so increase the probe distance. See comment in
7163 // EnsureCapacityLocked for an explanation of how this hits all slots.
7164 probe_distance++;
7165 }
7166 }
7167 // We should always get the next slot for a linear cache.
7168 ASSERT(is_hash || probe == NumOccupied(array));
7169 return {probe, false};
7170}
7171
7173 intptr_t entry,
7174 const TypeArguments& instantiator_tav,
7175 const TypeArguments& function_tav,
7176 const TypeArguments& instantiated_tav) const {
7177 // We don't do mutating operations in tests without a TypeArguments object.
7178 ASSERT(cache_container_ != nullptr);
7179#if defined(DEBUG)
7180 auto loc = FindKeyOrUnused(instantiator_tav, function_tav);
7181 ASSERT_EQUAL(loc.entry, entry);
7182 ASSERT(!loc.present);
7183#endif
7184 // Double-check we got the expected entry index when adding to a linear array.
7185 ASSERT(!IsLinear() || entry == NumOccupied());
7186 const intptr_t new_occupied = NumOccupied() + 1;
7187 const bool storage_changed = EnsureCapacity(new_occupied);
7188 // Note that this call to IsLinear() may return a different result than the
7189 // earlier, since EnsureCapacity() may have swapped to hash-based storage.
7190 if (storage_changed && !IsLinear()) {
7191 // The capacity of the array has changed, and the capacity is used when
7192 // probing further into the array due to collisions. Thus, we need to redo
7193 // the entry index calculation.
7194 auto loc = FindKeyOrUnused(instantiator_tav, function_tav);
7195 ASSERT(!loc.present);
7196 entry = loc.entry;
7197 }
7198
7199 // Go ahead and increment the number of occupied entries prior to adding the
7200 // entry. Use a store-release barrier in case of concurrent readers.
7201 const intptr_t metadata = RawSmiValue(Smi::RawCast(data_.At(kMetadataIndex)));
7202 smi_handle_ = Smi::New(NumOccupiedBits::update(new_occupied, metadata));
7203 data_.SetAtRelease(kMetadataIndex, smi_handle_);
7204
7206 const auto& tuple = table.At(entry);
7207 // The parts of the tuple that aren't used for sentinel checking are only
7208 // retrieved if the entry is occupied. Entries in the cache are never deleted,
7209 // so once the entry is marked as occupied, the contents of that entry never
7210 // change. Thus, we don't need store-release barriers here.
7211 tuple.Set<kFunctionTypeArgsIndex>(function_tav);
7212 tuple.Set<kInstantiatedTypeArgsIndex>(instantiated_tav);
7213 // For the sentinel position, though, we do.
7214 static_assert(
7215 kSentinelIndex == kInstantiatorTypeArgsIndex,
7216 "the sentinel position is not protected with a store-release barrier");
7217 tuple.Set<kInstantiatorTypeArgsIndex, std::memory_order_release>(
7218 instantiator_tav);
7219
7220 if (storage_changed) {
7221 // Only check for validity on growth, just to keep the overhead on DEBUG
7222 // builds down.
7223 DEBUG_ASSERT(IsValidStorageLocked(data_));
7224 // Update the container of the original cache to point to the new one.
7225 cache_container_->set_instantiations(data_);
7226 }
7227
7228 return {entry, true};
7229}
7230
7232 return Smi::New(kSentinelValue);
7233}
7234
7235bool TypeArguments::Cache::EnsureCapacity(intptr_t new_occupied) const {
7236 ASSERT(new_occupied > NumOccupied());
7237 // How many entries are in the current array (including unoccupied entries).
7238 const intptr_t current_capacity = NumEntries();
7239
7240 // Early returns for cases where no growth is needed.
7241 const bool is_linear = IsLinear();
7242 if (is_linear) {
7243 // We need at least one unoccupied entry in addition to the occupied ones.
7244 if (current_capacity > new_occupied) return false;
7245 } else {
7246 if (LoadFactor(new_occupied, current_capacity) < kMaxLoadFactor) {
7247 return false;
7248 }
7249 }
7250
7251 if (new_occupied <= kMaxLinearCacheEntries) {
7252 ASSERT(is_linear);
7253 // Not enough room for both the new entry and at least one unoccupied
7254 // entry, so grow the tuple capacity of the linear cache by about 50%,
7255 // ensuring that space for at least one new tuple is added, capping the
7256 // total number of occupied entries to the max allowed.
7257 const intptr_t new_capacity =
7258 Utils::Minimum(current_capacity + (current_capacity >> 1),
7259 kMaxLinearCacheEntries) +
7260 1;
7261 const intptr_t cache_size = kHeaderSize + new_capacity * kEntrySize;
7262 ASSERT(cache_size <= kMaxLinearCacheSize);
7263 data_ = Array::Grow(data_, cache_size, Heap::kOld);
7264 ASSERT(!data_.IsNull());
7265 // No need to adjust the number of occupied entries or old entries, as they
7266 // are copied over by Array::Grow. Just mark any new entries as unoccupied.
7267 smi_handle_ = Sentinel();
7269 for (intptr_t i = current_capacity; i < new_capacity; i++) {
7270 const auto& tuple = table.At(i);
7271 tuple.Set<kSentinelIndex>(smi_handle_);
7272 }
7273 return true;
7274 }
7275
7276 // Either we're converting a linear cache into a hash-based cache, or the
7277 // load factor of the hash-based cache has increased to the point where we
7278 // need to grow it.
7279 const intptr_t new_capacity =
7280 is_linear ? kNumInitialHashCacheEntries : 2 * current_capacity;
7281 // Because we use quadratic (actually triangle number) probing it is
7282 // important that the size is a power of two (otherwise we could fail to
7283 // find an empty slot). This is described in Knuth's The Art of Computer
7284 // Programming Volume 2, Chapter 6.4, exercise 20 (solution in the
7285 // appendix, 2nd edition).
7286 ASSERT(Utils::IsPowerOfTwo(new_capacity));
7287 ASSERT(LoadFactor(new_occupied, new_capacity) < kMaxLoadFactor);
7288 const intptr_t new_size = kHeaderSize + new_capacity * kEntrySize;
7289 const auto& new_data =
7291 ASSERT(!new_data.IsNull());
7292 // First set up the metadata in new_data.
7293 const intptr_t metadata = RawSmiValue(Smi::RawCast(data_.At(kMetadataIndex)));
7294 smi_handle_ = Smi::New(EntryCountLog2Bits::update(
7295 Utils::ShiftForPowerOfTwo(new_capacity), metadata));
7296 new_data.SetAt(kMetadataIndex, smi_handle_);
7297 // Then mark all the entries in new_data as unoccupied.
7298 smi_handle_ = Sentinel();
7299 InstantiationsCacheTable to_table(new_data);
7300 for (const auto& tuple : to_table) {
7301 tuple.Set<kSentinelIndex>(smi_handle_);
7302 }
7303 // Finally, copy over the entries.
7304 auto& instantiator_tav = TypeArguments::Handle(zone_);
7305 auto& function_tav = TypeArguments::Handle(zone_);
7306 auto& result_tav = TypeArguments::Handle(zone_);
7307 const InstantiationsCacheTable from_table(data_);
7308 for (const auto& from_tuple : from_table) {
7309 // Skip unoccupied entries.
7310 if (from_tuple.Get<kSentinelIndex>() == Sentinel()) continue;
7311 instantiator_tav ^= from_tuple.Get<kInstantiatorTypeArgsIndex>();
7312 function_tav = from_tuple.Get<kFunctionTypeArgsIndex>();
7313 result_tav = from_tuple.Get<kInstantiatedTypeArgsIndex>();
7314 // Since new_data has a different total capacity, we can't use the old
7315 // entry indexes, but must recalculate them.
7316 auto loc = FindKeyOrUnused(new_data, instantiator_tav, function_tav);
7317 ASSERT(!loc.present);
7318 const auto& to_tuple = to_table.At(loc.entry);
7319 to_tuple.Set<kInstantiatorTypeArgsIndex>(instantiator_tav);
7320 to_tuple.Set<kFunctionTypeArgsIndex>(function_tav);
7321 to_tuple.Set<kInstantiatedTypeArgsIndex>(result_tav);
7322 }
7323 data_ = new_data.ptr();
7324 return true;
7325}
7326
7328 return instantiations() != Cache::EmptyStorage().ptr();
7329}
7330
7331ArrayPtr TypeArguments::instantiations() const {
7332 // We rely on the fact that any loads from the array are dependent loads and
7333 // avoid the load-acquire barrier here.
7334 return untag()->instantiations();
7335}
7336
7337void TypeArguments::set_instantiations(const Array& value) const {
7338 // We have to ensure that initializing stores to the array are available
7339 // when releasing the pointer to the array pointer.
7340 // => We have to use store-release here.
7341 ASSERT(!value.IsNull());
7342 untag()->set_instantiations<std::memory_order_release>(value.ptr());
7343}
7344
7345bool TypeArguments::HasCount(intptr_t count) const {
7346 if (IsNull()) {
7347 return true;
7348 }
7349 return Length() == count;
7350}
7351
7352intptr_t TypeArguments::Length() const {
7353 if (IsNull()) {
7354 return 0;
7355 }
7356 return Smi::Value(untag()->length());
7357}
7358
7360 if (IsNull()) {
7361 return 0;
7362 }
7363 return Smi::Value(untag()->nullability());
7364}
7365
7366AbstractTypePtr TypeArguments::TypeAt(intptr_t index) const {
7367 ASSERT(!IsNull());
7368 ASSERT((index >= 0) && (index < Length()));
7369 return untag()->element(index);
7370}
7371
7372AbstractTypePtr TypeArguments::TypeAtNullSafe(intptr_t index) const {
7373 if (IsNull()) {
7374 // null vector represents infinite list of dynamics
7375 return Type::dynamic_type().ptr();
7376 }
7377 ASSERT((index >= 0) && (index < Length()));
7378 return TypeAt(index);
7379}
7380
7381void TypeArguments::SetTypeAt(intptr_t index, const AbstractType& value) const {
7382 ASSERT(!IsCanonical());
7383 ASSERT((index >= 0) && (index < Length()));
7384 return untag()->set_element(index, value.ptr());
7385}
7386
7388 intptr_t from_index,
7389 intptr_t len,
7390 Genericity genericity,
7391 intptr_t num_free_fun_type_params) const {
7392 ASSERT(!IsNull());
7394 for (intptr_t i = 0; i < len; i++) {
7395 type = TypeAt(from_index + i);
7396 // If this type argument T is null, the type A containing T in its flattened
7397 // type argument vector V is recursive and is still being finalized.
7398 // T is the type argument of a super type of A. T is being instantiated
7399 // during finalization of V, which is also the instantiator. T depends
7400 // solely on the type parameters of A and will be replaced by a non-null
7401 // type before A is marked as finalized.
7402 if (!type.IsNull() &&
7403 !type.IsInstantiated(genericity, num_free_fun_type_params)) {
7404 return false;
7405 }
7406 }
7407 return true;
7408}
7409
7412 const intptr_t num_types = Length();
7413 for (intptr_t i = 0; i < num_types; i++) {
7414 type = TypeAt(i);
7415 if (type.IsNull()) {
7416 return false; // Still unfinalized, too early to tell.
7417 }
7418 if (!type.IsTypeParameter()) {
7419 return false;
7420 }
7421 const TypeParameter& type_param = TypeParameter::Cast(type);
7422 ASSERT(type_param.IsFinalized());
7423 if ((type_param.index() != i) || type_param.IsFunctionTypeParameter()) {
7424 return false;
7425 }
7426 // Instantiating nullable and legacy type parameters may change
7427 // nullability of a type, so type arguments vector containing such type
7428 // parameters cannot be substituted with instantiator type arguments.
7429 if (type_param.IsNullable() || type_param.IsLegacy()) {
7430 return false;
7431 }
7432 }
7433 return true;
7434 // Note that it is not necessary to verify at runtime that the instantiator
7435 // type vector is long enough, since this uninstantiated vector contains as
7436 // many different type parameters as it is long.
7437}
7438
7439// Return true if this uninstantiated type argument vector, once instantiated
7440// at runtime, is a prefix of the type argument vector of its instantiator.
7441// A runtime check may be required, as indicated by with_runtime_check.
7443 const Class& instantiator_class,
7444 bool* with_runtime_check) const {
7446 if (with_runtime_check != nullptr) {
7447 *with_runtime_check = false;
7448 }
7449 const intptr_t num_type_args = Length();
7450 const intptr_t num_instantiator_type_args =
7451 instantiator_class.NumTypeArguments();
7452 if (num_type_args > num_instantiator_type_args) {
7453 // This vector cannot be a prefix of a shorter vector.
7454 return false;
7455 }
7456 const intptr_t num_instantiator_type_params =
7457 instantiator_class.NumTypeParameters();
7458 const intptr_t first_type_param_offset =
7459 num_instantiator_type_args - num_instantiator_type_params;
7460 // At compile time, the type argument vector of the instantiator consists of
7461 // the type argument vector of its super type, which may refer to the type
7462 // parameters of the instantiator class, followed by (or overlapping partially
7463 // or fully with) the type parameters of the instantiator class in declaration
7464 // order.
7465 // In other words, the only variables are the type parameters of the
7466 // instantiator class.
7467 // This uninstantiated type argument vector is also expressed in terms of the
7468 // type parameters of the instantiator class. Therefore, in order to be a
7469 // prefix once instantiated at runtime, every one of its type argument must be
7470 // equal to the type argument of the instantiator vector at the same index.
7471
7472 // As a first requirement, the last num_instantiator_type_params type
7473 // arguments of this type argument vector must refer to the corresponding type
7474 // parameters of the instantiator class.
7475 AbstractType& type_arg = AbstractType::Handle();
7476 for (intptr_t i = first_type_param_offset; i < num_type_args; i++) {
7477 type_arg = TypeAt(i);
7478 if (!type_arg.IsTypeParameter()) {
7479 return false;
7480 }
7481 const TypeParameter& type_param = TypeParameter::Cast(type_arg);
7482 ASSERT(type_param.IsFinalized());
7483 if ((type_param.index() != i) || type_param.IsFunctionTypeParameter()) {
7484 return false;
7485 }
7486 // Instantiating nullable and legacy type parameters may change nullability
7487 // of a type, so type arguments vector containing such type parameters
7488 // cannot be substituted with instantiator type arguments, unless we check
7489 // at runtime the nullability of the first 1 or 2 type arguments of the
7490 // instantiator.
7491 // Note that the presence of non-overlapping super type arguments (i.e.
7492 // first_type_param_offset > 0) will prevent this optimization.
7493 if (type_param.IsNullable() || type_param.IsLegacy()) {
7494 if (with_runtime_check == nullptr || i >= kNullabilityMaxTypes) {
7495 return false;
7496 }
7497 *with_runtime_check = true;
7498 }
7499 }
7500 // As a second requirement, the type arguments corresponding to the super type
7501 // must be identical. Overlapping ones have already been checked starting at
7502 // first_type_param_offset.
7503 if (first_type_param_offset == 0) {
7504 return true;
7505 }
7506 Type& super_type = Type::Handle(instantiator_class.super_type());
7507 const TypeArguments& super_type_args =
7509 Thread::Current(), /*canonicalize=*/false));
7510 if (super_type_args.IsNull()) {
7512 return false;
7513 }
7514 AbstractType& super_type_arg = AbstractType::Handle();
7515 for (intptr_t i = 0; (i < first_type_param_offset) && (i < num_type_args);
7516 i++) {
7517 type_arg = TypeAt(i);
7518 super_type_arg = super_type_args.TypeAt(i);
7519 if (!type_arg.Equals(super_type_arg)) {
7521 return false;
7522 }
7523 }
7524 return true;
7525}
7526
7527// Return true if this uninstantiated type argument vector, once instantiated
7528// at runtime, is a prefix of the enclosing function type arguments.
7529// A runtime check may be required, as indicated by with_runtime_check.
7531 const Function& function,
7532 bool* with_runtime_check) const {
7534 if (with_runtime_check != nullptr) {
7535 *with_runtime_check = false;
7536 }
7537 const intptr_t num_type_args = Length();
7538 const intptr_t num_parent_type_args = function.NumParentTypeArguments();
7539 const intptr_t num_function_type_params = function.NumTypeParameters();
7540 const intptr_t num_function_type_args =
7541 num_parent_type_args + num_function_type_params;
7542 if (num_type_args > num_function_type_args) {
7543 // This vector cannot be a prefix of a shorter vector.
7544 return false;
7545 }
7546 AbstractType& type_arg = AbstractType::Handle();
7547 for (intptr_t i = 0; i < num_type_args; i++) {
7548 type_arg = TypeAt(i);
7549 if (!type_arg.IsTypeParameter()) {
7550 return false;
7551 }
7552 const TypeParameter& type_param = TypeParameter::Cast(type_arg);
7553 ASSERT(type_param.IsFinalized());
7554 if ((type_param.index() != i) || !type_param.IsFunctionTypeParameter()) {
7555 return false;
7556 }
7557 // Instantiating nullable and legacy type parameters may change nullability
7558 // of a type, so type arguments vector containing such type parameters
7559 // cannot be substituted with the enclosing function type arguments, unless
7560 // we check at runtime the nullability of the first 1 or 2 type arguments of
7561 // the enclosing function type arguments.
7562 if (type_param.IsNullable() || type_param.IsLegacy()) {
7563 if (with_runtime_check == nullptr || i >= kNullabilityMaxTypes) {
7564 return false;
7565 }
7566 *with_runtime_check = true;
7567 }
7568 }
7569 return true;
7570}
7571
7572TypeArgumentsPtr TypeArguments::TruncatedTo(intptr_t length) const {
7573 Thread* thread = Thread::Current();
7574 Zone* zone = thread->zone();
7575 const TypeArguments& result =
7578 for (intptr_t i = 0; i < length; i++) {
7579 type = TypeAt(i);
7580 result.SetTypeAt(i, type);
7581 }
7582 return result.Canonicalize(thread);
7583}
7584
7586 ASSERT(!IsNull());
7588 const intptr_t num_types = Length();
7589 for (intptr_t i = 0; i < num_types; i++) {
7590 type = TypeAt(i);
7591 if (!type.IsFinalized()) {
7592 return false;
7593 }
7594 }
7595 return true;
7596}
7597
7599 const TypeArguments& instantiator_type_arguments,
7600 const TypeArguments& function_type_arguments,
7601 intptr_t num_free_fun_type_params,
7602 Heap::Space space,
7603 FunctionTypeMapping* function_type_mapping,
7604 intptr_t num_parent_type_args_adjustment) const {
7606 if ((instantiator_type_arguments.IsNull() ||
7607 instantiator_type_arguments.Length() == Length()) &&
7609 return instantiator_type_arguments.ptr();
7610 }
7611 const intptr_t num_types = Length();
7612 TypeArguments& instantiated_array =
7613 TypeArguments::Handle(TypeArguments::New(num_types, space));
7615 for (intptr_t i = 0; i < num_types; i++) {
7616 type = TypeAt(i);
7617 // If this type argument T is null, the type A containing T in its flattened
7618 // type argument vector V is recursive and is still being finalized.
7619 // T is the type argument of a super type of A. T is being instantiated
7620 // during finalization of V, which is also the instantiator. T depends
7621 // solely on the type parameters of A and will be replaced by a non-null
7622 // type before A is marked as finalized.
7623 if (!type.IsNull() && !type.IsInstantiated()) {
7624 type = type.InstantiateFrom(
7625 instantiator_type_arguments, function_type_arguments,
7626 num_free_fun_type_params, space, function_type_mapping,
7627 num_parent_type_args_adjustment);
7628 // A returned null type indicates a failed instantiation in dead code that
7629 // must be propagated up to the caller, the optimizing compiler.
7630 if (type.IsNull()) {
7631 return Object::empty_type_arguments().ptr();
7632 }
7633 }
7634 instantiated_array.SetTypeAt(i, type);
7635 }
7636 return instantiated_array.ptr();
7637}
7638
7640 intptr_t num_parent_type_args_adjustment,
7641 intptr_t num_free_fun_type_params,
7642 Heap::Space space,
7643 FunctionTypeMapping* function_type_mapping) const {
7644 Zone* zone = Thread::Current()->zone();
7645 TypeArguments* updated_args = nullptr;
7647 AbstractType& updated = AbstractType::Handle(zone);
7648 for (intptr_t i = 0, n = Length(); i < n; ++i) {
7649 type = TypeAt(i);
7650 updated = type.UpdateFunctionTypes(num_parent_type_args_adjustment,
7651 num_free_fun_type_params, space,
7652 function_type_mapping);
7653 if (type.ptr() != updated.ptr()) {
7654 if (updated_args == nullptr) {
7655 updated_args =
7656 &TypeArguments::Handle(zone, TypeArguments::New(n, space));
7657 for (intptr_t j = 0; j < i; ++j) {
7658 type = TypeAt(j);
7659 updated_args->SetTypeAt(j, type);
7660 }
7661 }
7662 }
7663 if (updated_args != nullptr) {
7664 updated_args->SetTypeAt(i, updated);
7665 }
7666 }
7667 return (updated_args != nullptr) ? updated_args->ptr() : ptr();
7668}
7669
7670#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
7671// A local flag used only in object_test.cc that, when true, causes a failure
7672// when a cache entry for the given instantiator and function type arguments
7673// already exists. Used to check that the InstantiateTypeArguments stub found
7674// the cache entry instead of calling the runtime.
7676#endif
7677
7679 const TypeArguments& instantiator_type_arguments,
7680 const TypeArguments& function_type_arguments) const {
7681 auto thread = Thread::Current();
7682 auto zone = thread->zone();
7685
7687 ASSERT(instantiator_type_arguments.IsNull() ||
7688 instantiator_type_arguments.IsCanonical());
7689 ASSERT(function_type_arguments.IsNull() ||
7690 function_type_arguments.IsCanonical());
7691 // Lookup instantiators and if found, return instantiated result.
7692 Cache cache(zone, *this);
7693 auto const loc = cache.FindKeyOrUnused(instantiator_type_arguments,
7694 function_type_arguments);
7695 if (loc.present) {
7696#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
7698 TextBuffer buffer(1024);
7699 buffer.Printf("for\n");
7700 buffer.Printf(" * uninstantiated type arguments %s\n", ToCString());
7701 buffer.Printf(" * instantiation type arguments: %s (hash: %" Pu ")\n",
7702 instantiator_type_arguments.ToCString(),
7703 instantiator_type_arguments.Hash());
7704 buffer.Printf(" * function type arguments: %s (hash: %" Pu ")\n",
7705 function_type_arguments.ToCString(),
7706 function_type_arguments.Hash());
7707 buffer.Printf(" * number of occupied entries in cache: %" Pd "\n",
7708 cache.NumOccupied());
7709 buffer.Printf(" * number of total entries in cache: %" Pd "\n",
7710 cache.NumEntries());
7711 buffer.Printf("expected to find entry %" Pd
7712 " of cache in stub, but reached runtime",
7713 loc.entry);
7714 FATAL("%s", buffer.buffer());
7715 }
7716#endif
7717 return cache.Retrieve(loc.entry);
7718 }
7719 // Cache lookup failed. Instantiate the type arguments.
7721 result = InstantiateFrom(instantiator_type_arguments, function_type_arguments,
7723 // Canonicalize type arguments.
7724 result = result.Canonicalize(thread);
7725 // InstantiateAndCanonicalizeFrom is not reentrant. It cannot have been called
7726 // indirectly, so the prior_instantiations array cannot have grown.
7727 ASSERT(cache.data_.ptr() == instantiations());
7728 cache.AddEntry(loc.entry, instantiator_type_arguments,
7729 function_type_arguments, result);
7730 return result.ptr();
7731}
7732
7733TypeArgumentsPtr TypeArguments::New(intptr_t len, Heap::Space space) {
7734 if (len < 0 || len > kMaxElements) {
7735 // This should be caught before we reach here.
7736 FATAL("Fatal error in TypeArguments::New: invalid len %" Pd "\n", len);
7737 }
7739 {
7740 auto raw = Object::Allocate<TypeArguments>(space, len);
7741 NoSafepointScope no_safepoint;
7742 result = raw;
7743 // Length must be set before we start storing into the array.
7744 result.SetLength(len);
7745 result.SetHash(0);
7746 result.set_nullability(0);
7747 }
7748 // The array used as storage for an empty linear cache should be initialized.
7750 result.set_instantiations(Cache::EmptyStorage());
7751 return result.ptr();
7752}
7753
7754void TypeArguments::SetLength(intptr_t value) const {
7755 ASSERT(!IsCanonical());
7756 // This is only safe because we create a new Smi, which does not cause
7757 // heap allocation.
7758 untag()->set_length(Smi::New(value));
7759}
7760
7761TypeArgumentsPtr TypeArguments::Canonicalize(Thread* thread) const {
7762 if (IsNull() || IsCanonical()) {
7763 ASSERT(IsOld());
7764 return this->ptr();
7765 }
7766 const intptr_t num_types = Length();
7767 if (num_types == 0) {
7768 return TypeArguments::empty_type_arguments().ptr();
7769 } else if (IsRaw(0, num_types)) {
7770 return TypeArguments::null();
7771 }
7772 Zone* zone = thread->zone();
7773 auto isolate_group = thread->isolate_group();
7774 ObjectStore* object_store = isolate_group->object_store();
7776 {
7777 SafepointMutexLocker ml(isolate_group->type_canonicalization_mutex());
7779 object_store->canonical_type_arguments());
7780 result ^= table.GetOrNull(CanonicalTypeArgumentsKey(*this));
7781 object_store->set_canonical_type_arguments(table.Release());
7782 }
7783 if (result.IsNull()) {
7784 // Canonicalize each type argument.
7785 AbstractType& type_arg = AbstractType::Handle(zone);
7786 GrowableHandlePtrArray<const AbstractType> canonicalized_types(zone,
7787 num_types);
7788 for (intptr_t i = 0; i < num_types; i++) {
7789 type_arg = TypeAt(i);
7790 type_arg = type_arg.Canonicalize(thread);
7791 canonicalized_types.Add(type_arg);
7792 }
7793 SafepointMutexLocker ml(isolate_group->type_canonicalization_mutex());
7795 object_store->canonical_type_arguments());
7796 // Since we canonicalized some type arguments above we need to lookup
7797 // in the table again to make sure we don't already have an equivalent
7798 // canonical entry.
7799 result ^= table.GetOrNull(CanonicalTypeArgumentsKey(*this));
7800 if (result.IsNull()) {
7801 for (intptr_t i = 0; i < num_types; i++) {
7802 SetTypeAt(i, canonicalized_types.At(i));
7803 }
7804 // Make sure we have an old space object and add it to the table.
7805 if (this->IsNew()) {
7806 result ^= Object::Clone(*this, Heap::kOld);
7807 } else {
7808 result = this->ptr();
7809 }
7810 ASSERT(result.IsOld());
7811 result.ComputeNullability();
7812 result.SetCanonical(); // Mark object as being canonical.
7813 // Now add this TypeArgument into the canonical list of type arguments.
7814 bool present = table.Insert(result);
7815 ASSERT(!present);
7816 }
7817 object_store->set_canonical_type_arguments(table.Release());
7818 }
7819 ASSERT(result.Equals(*this));
7820 ASSERT(!result.IsNull());
7821 ASSERT(result.IsTypeArguments());
7822 ASSERT(result.IsCanonical());
7823 return result.ptr();
7824}
7825
7827 Thread* thread,
7828 const Class& cls) const {
7829 if (IsNull()) {
7830 return ptr();
7831 }
7832 const intptr_t num_type_arguments = cls.NumTypeArguments();
7833 const intptr_t num_type_parameters = cls.NumTypeParameters(thread);
7834 ASSERT(Length() >= num_type_arguments);
7835 if (Length() == num_type_parameters) {
7836 return ptr();
7837 }
7838 if (num_type_parameters == 0) {
7839 return TypeArguments::null();
7840 }
7841 Zone* zone = thread->zone();
7842 const auto& args =
7843 TypeArguments::Handle(zone, TypeArguments::New(num_type_parameters));
7844 const intptr_t offset = num_type_arguments - num_type_parameters;
7845 auto& type = AbstractType::Handle(zone);
7846 for (intptr_t i = 0; i < num_type_parameters; ++i) {
7847 type = TypeAt(offset + i);
7848 args.SetTypeAt(i, type);
7849 }
7850 return args.ptr();
7851}
7852
7854 Thread* thread,
7855 const Class& cls) const {
7856 if (IsNull()) {
7857 return ptr();
7858 }
7859 const intptr_t num_type_arguments = cls.NumTypeArguments();
7860 const intptr_t num_type_parameters = cls.NumTypeParameters(thread);
7861 ASSERT(Length() == num_type_parameters);
7862 if (num_type_arguments == num_type_parameters) {
7863 return ptr();
7864 }
7865 Zone* zone = thread->zone();
7866 const auto& args =
7867 TypeArguments::Handle(zone, TypeArguments::New(num_type_arguments));
7868 const intptr_t offset = num_type_arguments - num_type_parameters;
7869 auto& type = AbstractType::Handle(zone);
7870 for (intptr_t i = 0; i < num_type_parameters; ++i) {
7871 type = TypeAt(i);
7872 args.SetTypeAt(offset + i, type);
7873 }
7874 return args.ptr();
7875}
7876
7878 if (IsNull()) {
7879 return;
7880 }
7881 Thread* thread = Thread::Current();
7882 Zone* zone = thread->zone();
7884 const intptr_t num_types = Length();
7885 for (intptr_t i = 0; i < num_types; i++) {
7886 type = TypeAt(i);
7887 type.EnumerateURIs(uris);
7888 }
7889}
7890
7891const char* TypeArguments::ToCString() const {
7892 if (IsNull()) {
7893 return "TypeArguments: null"; // Optimizing the frequent case.
7894 }
7895 ZoneTextBuffer buffer(Thread::Current()->zone());
7896 PrintTo(&buffer);
7897 return buffer.buffer();
7898}
7899
7900const char* PatchClass::ToCString() const {
7901 const Class& cls = Class::Handle(wrapped_class());
7902 const char* cls_name = cls.ToCString();
7903 return OS::SCreate(Thread::Current()->zone(), "PatchClass for %s", cls_name);
7904}
7905
7906PatchClassPtr PatchClass::New(const Class& wrapped_class,
7907 const KernelProgramInfo& info,
7908 const Script& script) {
7909 const PatchClass& result = PatchClass::Handle(PatchClass::New());
7910 result.set_wrapped_class(wrapped_class);
7912 result.untag()->set_kernel_program_info(info.ptr()));
7913 result.set_script(script);
7914 result.set_kernel_library_index(-1);
7915 return result.ptr();
7916}
7917
7918PatchClassPtr PatchClass::New() {
7920 return Object::Allocate<PatchClass>(Heap::kOld);
7921}
7922
7923void PatchClass::set_wrapped_class(const Class& value) const {
7924 untag()->set_wrapped_class(value.ptr());
7925}
7926
7927#if !defined(DART_PRECOMPILED_RUNTIME)
7929 untag()->set_kernel_program_info(info.ptr());
7930}
7931#endif
7932
7933void PatchClass::set_script(const Script& value) const {
7934 untag()->set_script(value.ptr());
7935}
7936
7939 if (IsClosureFunction()) {
7940 hash = hash ^ token_pos().Hash();
7941 }
7942 if (Owner()->IsClass()) {
7943 hash = hash ^ Class::Hash(Class::RawCast(Owner()));
7944 }
7945 return hash;
7946}
7947
7949#if defined(PRODUCT)
7950 return false;
7951#else
7952 auto thread = Thread::Current();
7953 return thread->isolate_group()->debugger()->HasBreakpoint(thread, *this);
7954#endif
7955}
7956
7957void Function::InstallOptimizedCode(const Code& code) const {
7958 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
7959 // We may not have previous code if FLAG_precompile is set.
7960 // Hot-reload may have already disabled the current code.
7961 if (HasCode() && !Code::Handle(CurrentCode()).IsDisabled()) {
7962 Code::Handle(CurrentCode()).DisableDartCode();
7963 }
7964 AttachCode(code);
7965}
7966
7967void Function::SetInstructions(const Code& value) const {
7968 // Ensure that nobody is executing this function when we install it.
7969 if (untag()->code() != Code::null() && HasCode()) {
7971 SetInstructionsSafe(value);
7972 } else {
7973 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
7974 SetInstructionsSafe(value);
7975 }
7976}
7977
7978void Function::SetInstructionsSafe(const Code& value) const {
7979 untag()->set_code(value.ptr());
7980 StoreNonPointer(&untag()->entry_point_, value.EntryPoint());
7981 StoreNonPointer(&untag()->unchecked_entry_point_,
7982 value.UncheckedEntryPoint());
7983}
7984
7985void Function::AttachCode(const Code& value) const {
7986 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
7987 // Finish setting up code before activating it.
7988 value.set_owner(*this);
7989 SetInstructions(value);
7990 ASSERT(Function::Handle(value.function()).IsNull() ||
7991 (value.function() == this->ptr()));
7992}
7993
7994bool Function::HasCode() const {
7995 NoSafepointScope no_safepoint;
7996 ASSERT(untag()->code() != Code::null());
7997 return untag()->code() != StubCode::LazyCompile().ptr();
7998}
7999
8000bool Function::HasCode(FunctionPtr function) {
8001 NoSafepointScope no_safepoint;
8002 ASSERT(function->untag()->code() != Code::null());
8003 return function->untag()->code() != StubCode::LazyCompile().ptr();
8004}
8005
8007#if defined(DART_PRECOMPILED_RUNTIME)
8008 UNREACHABLE();
8009#else
8010 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
8011 untag()->set_unoptimized_code(Code::null());
8012 SetInstructions(StubCode::LazyCompile());
8013#endif // defined(DART_PRECOMPILED_RUNTIME)
8014}
8015
8017#if defined(DART_PRECOMPILED_RUNTIME)
8018 UNREACHABLE();
8019#else
8020 untag()->set_unoptimized_code(Code::null());
8021
8022 SetInstructionsSafe(StubCode::LazyCompile());
8023#endif // defined(DART_PRECOMPILED_RUNTIME)
8024}
8025
8027 ASSERT(!ForceOptimize());
8028 Thread* thread = Thread::Current();
8029 ASSERT(thread->IsDartMutatorThread());
8030 // TODO(35224): DEBUG_ASSERT(thread->TopErrorHandlerIsExitFrame());
8031 Zone* zone = thread->zone();
8032
8033 const Error& error =
8034 Error::Handle(zone, Compiler::EnsureUnoptimizedCode(thread, *this));
8035 if (!error.IsNull()) {
8037 }
8038}
8039
8041 ASSERT(HasOptimizedCode());
8042 ASSERT(!ForceOptimize());
8043 Thread* thread = Thread::Current();
8046 Zone* zone = thread->zone();
8047 // TODO(35224): DEBUG_ASSERT(thread->TopErrorHandlerIsExitFrame());
8048 const Code& current_code = Code::Handle(zone, CurrentCode());
8049
8050 if (FLAG_trace_deoptimization_verbose) {
8051 THR_Print("Disabling optimized code: '%s' entry: %#" Px "\n",
8052 ToFullyQualifiedCString(), current_code.EntryPoint());
8053 }
8054 current_code.DisableDartCode();
8055 const Error& error =
8056 Error::Handle(zone, Compiler::EnsureUnoptimizedCode(thread, *this));
8057 if (!error.IsNull()) {
8059 }
8060 const Code& unopt_code = Code::Handle(zone, unoptimized_code());
8061 unopt_code.Enable();
8062 AttachCode(unopt_code);
8063}
8064
8066#if defined(DART_PRECOMPILED_RUNTIME)
8067 UNREACHABLE();
8068#else
8069 if (!HasOptimizedCode()) {
8070 return;
8071 }
8072
8073 Thread* thread = Thread::Current();
8074 Zone* zone = thread->zone();
8075 ASSERT(thread->IsDartMutatorThread());
8076
8077 const Code& current_code = Code::Handle(zone, CurrentCode());
8078 TIR_Print("Disabling optimized code for %s\n", ToCString());
8079 current_code.DisableDartCode();
8080
8081 const Code& unopt_code = Code::Handle(zone, unoptimized_code());
8082 if (unopt_code.IsNull()) {
8083 // Set the lazy compile stub code.
8084 TIR_Print("Switched to lazy compile stub for %s\n", ToCString());
8085 SetInstructions(StubCode::LazyCompile());
8086 return;
8087 }
8088
8089 TIR_Print("Switched to unoptimized code for %s\n", ToCString());
8090
8091 AttachCode(unopt_code);
8092 unopt_code.Enable();
8093#endif
8094}
8095
8096void Function::set_unoptimized_code(const Code& value) const {
8097#if defined(DART_PRECOMPILED_RUNTIME)
8098 UNREACHABLE();
8099#else
8100 DEBUG_ASSERT(IsMutatorOrAtDeoptSafepoint());
8101 ASSERT(value.IsNull() || !value.is_optimized());
8102 untag()->set_unoptimized_code(value.ptr());
8103#endif
8104}
8105
8106ContextScopePtr Function::context_scope() const {
8107 if (IsClosureFunction()) {
8108 const Object& obj = Object::Handle(untag()->data());
8109 ASSERT(!obj.IsNull());
8110 return ClosureData::Cast(obj).context_scope();
8111 }
8112 return ContextScope::null();
8113}
8114
8116 if (IsClosureFunction()) {
8117 const Object& obj = Object::Handle(untag()->data());
8118 ASSERT(!obj.IsNull());
8119 ClosureData::Cast(obj).set_context_scope(value);
8120 return;
8121 }
8122 UNREACHABLE();
8123}
8124
8126 if (IsClosureFunction()) {
8127 const Object& obj = Object::Handle(untag()->data());
8128 ASSERT(!obj.IsNull());
8129 return ClosureData::Cast(obj).awaiter_link();
8130 }
8131 UNREACHABLE();
8132 return {};
8133}
8134
8136 if (IsClosureFunction()) {
8137 const Object& obj = Object::Handle(untag()->data());
8138 ASSERT(!obj.IsNull());
8139 ClosureData::Cast(obj).set_awaiter_link(link);
8140 return;
8141 }
8142 UNREACHABLE();
8143}
8144
8145ClosurePtr Function::implicit_static_closure() const {
8146 if (IsImplicitStaticClosureFunction()) {
8147 const Object& obj = Object::Handle(untag()->data());
8148 ASSERT(!obj.IsNull());
8149 return ClosureData::Cast(obj).implicit_static_closure();
8150 }
8151 return Closure::null();
8152}
8153
8154void Function::set_implicit_static_closure(const Closure& closure) const {
8155 if (IsImplicitStaticClosureFunction()) {
8156 const Object& obj = Object::Handle(untag()->data());
8157 ASSERT(!obj.IsNull());
8158 ClosureData::Cast(obj).set_implicit_static_closure(closure);
8159 return;
8160 }
8161 UNREACHABLE();
8162}
8163
8164ScriptPtr Function::eval_script() const {
8165 const Object& obj = Object::Handle(untag()->data());
8166 if (obj.IsScript()) {
8167 return Script::Cast(obj).ptr();
8168 }
8169 return Script::null();
8170}
8171
8172void Function::set_eval_script(const Script& script) const {
8173 ASSERT(token_pos() == TokenPosition::kMinSource);
8174 ASSERT(untag()->data() == Object::null());
8175 set_data(script);
8176}
8177
8179 ASSERT(kind() == UntaggedFunction::kMethodExtractor);
8180 const Object& obj = Object::Handle(untag()->data());
8181 ASSERT(obj.IsFunction());
8182 return Function::Cast(obj).ptr();
8183}
8184
8186 ASSERT(kind() == UntaggedFunction::kMethodExtractor);
8187 ASSERT(untag()->data() == Object::null());
8188 set_data(value);
8189}
8190
8192 if (kind() == UntaggedFunction::kDynamicInvocationForwarder) {
8193 return Array::null();
8194 }
8195 ASSERT(kind() == UntaggedFunction::kNoSuchMethodDispatcher ||
8196 kind() == UntaggedFunction::kInvokeFieldDispatcher);
8197 return Array::RawCast(untag()->data());
8198}
8199
8200void Function::set_saved_args_desc(const Array& value) const {
8201 ASSERT(kind() == UntaggedFunction::kNoSuchMethodDispatcher ||
8202 kind() == UntaggedFunction::kInvokeFieldDispatcher);
8203 ASSERT(untag()->data() == Object::null());
8204 set_data(value);
8205}
8206
8208 ASSERT(kind() == UntaggedFunction::kImplicitGetter ||
8209 kind() == UntaggedFunction::kImplicitSetter ||
8210 kind() == UntaggedFunction::kImplicitStaticGetter ||
8211 kind() == UntaggedFunction::kFieldInitializer);
8212 return Field::RawCast(untag()->data());
8213}
8214
8215void Function::set_accessor_field(const Field& value) const {
8216 ASSERT(kind() == UntaggedFunction::kImplicitGetter ||
8217 kind() == UntaggedFunction::kImplicitSetter ||
8218 kind() == UntaggedFunction::kImplicitStaticGetter ||
8219 kind() == UntaggedFunction::kFieldInitializer);
8220 // Top level classes may be finalized multiple times.
8221 ASSERT(untag()->data() == Object::null() || untag()->data() == value.ptr());
8222 set_data(value);
8223}
8224
8225FunctionPtr Function::parent_function() const {
8226 if (!IsClosureFunction()) return Function::null();
8227 Object& obj = Object::Handle(untag()->data());
8228 ASSERT(!obj.IsNull());
8229 return ClosureData::Cast(obj).parent_function();
8230}
8231
8232void Function::set_parent_function(const Function& value) const {
8233 ASSERT(IsClosureFunction());
8234 const Object& obj = Object::Handle(untag()->data());
8235 ASSERT(!obj.IsNull());
8236 ClosureData::Cast(obj).set_parent_function(value);
8237}
8238
8239TypeArgumentsPtr Function::DefaultTypeArguments(Zone* zone) const {
8240 if (type_parameters() == TypeParameters::null()) {
8241 return Object::empty_type_arguments().ptr();
8242 }
8243 return TypeParameters::Handle(zone, type_parameters()).defaults();
8244}
8245
8247 if (!IsClosureFunction()) {
8248 UNREACHABLE();
8249 }
8250 return ClosureData::DefaultTypeArgumentsInstantiationMode(
8252}
8253
8255 InstantiationMode value) const {
8256 if (!IsClosureFunction()) {
8257 UNREACHABLE();
8258 }
8259 const auto& closure_data = ClosureData::Handle(ClosureData::RawCast(data()));
8260 ASSERT(!closure_data.IsNull());
8261 closure_data.set_default_type_arguments_instantiation_mode(value);
8262}
8263
8264// Enclosing outermost function of this local function.
8266 FunctionPtr parent = parent_function();
8267 if (parent == Object::null()) {
8268 return ptr();
8269 }
8271 do {
8272 function = parent;
8273 parent = function.parent_function();
8274 } while (parent != Object::null());
8275 return function.ptr();
8276}
8277
8278FunctionPtr Function::implicit_closure_function() const {
8279 if (IsClosureFunction() || IsDispatcherOrImplicitAccessor() ||
8280 IsFieldInitializer() || IsFfiCallbackTrampoline() ||
8281 IsMethodExtractor()) {
8282 return Function::null();
8283 }
8284 const Object& obj = Object::Handle(data());
8285 ASSERT(obj.IsNull() || obj.IsScript() || obj.IsFunction() || obj.IsArray());
8286 if (obj.IsNull() || obj.IsScript()) {
8287 return Function::null();
8288 }
8289 if (obj.IsFunction()) {
8290 return Function::Cast(obj).ptr();
8291 }
8292 ASSERT(is_native());
8293 ASSERT(obj.IsArray());
8294 const Object& res = Object::Handle(Array::Cast(obj).AtAcquire(1));
8295 return res.IsNull() ? Function::null() : Function::Cast(res).ptr();
8296}
8297
8298void Function::set_implicit_closure_function(const Function& value) const {
8300 IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
8301 ASSERT(!IsClosureFunction());
8302 const Object& old_data = Object::Handle(data());
8303 if (is_old_native()) {
8304 ASSERT(old_data.IsArray());
8305 const auto& pair = Array::Cast(old_data);
8306 ASSERT(pair.AtAcquire(NativeFunctionData::kTearOff) == Object::null() ||
8307 value.IsNull());
8308 pair.SetAtRelease(NativeFunctionData::kTearOff, value);
8309 } else {
8310 ASSERT(old_data.IsNull() || value.IsNull());
8311 set_data(value);
8312 }
8313}
8314
8316 ASSERT(IsFfiCallbackTrampoline());
8317 const Object& obj = Object::Handle(data());
8318 ASSERT(!obj.IsNull());
8319 FfiTrampolineData::Cast(obj).set_c_signature(sig);
8320}
8321
8322FunctionTypePtr Function::FfiCSignature() const {
8323 auto* const zone = Thread::Current()->zone();
8324 if (IsFfiCallbackTrampoline()) {
8325 const Object& obj = Object::Handle(zone, data());
8326 ASSERT(!obj.IsNull());
8327 return FfiTrampolineData::Cast(obj).c_signature();
8328 }
8329 auto& pragma_value = Instance::Handle(zone);
8330 if (is_ffi_native()) {
8331 pragma_value = GetNativeAnnotation();
8332 } else if (IsFfiCallClosure()) {
8333 pragma_value = GetFfiCallClosurePragmaValue();
8334 } else {
8335 UNREACHABLE();
8336 }
8337 const auto& type_args =
8338 TypeArguments::Handle(zone, pragma_value.GetTypeArguments());
8339 ASSERT(type_args.Length() == 1);
8340 const auto& native_type =
8341 FunctionType::Cast(AbstractType::ZoneHandle(zone, type_args.TypeAt(0)));
8342 return native_type.ptr();
8343}
8344
8346 const FunctionType& c_signature = FunctionType::Handle(FfiCSignature());
8347 return c_signature.ContainsHandles();
8348}
8349
8351 const intptr_t num_params = num_fixed_parameters();
8352 for (intptr_t i = 0; i < num_params; i++) {
8353 const bool is_handle =
8354 AbstractType::Handle(ParameterTypeAt(i)).type_class_id() ==
8355 kFfiHandleCid;
8356 if (is_handle) {
8357 return true;
8358 }
8359 }
8360 return AbstractType::Handle(result_type()).type_class_id() == kFfiHandleCid;
8361}
8362
8363// Keep consistent with BaseMarshaller::IsCompound.
8365 ASSERT(IsFfiCallbackTrampoline());
8366 Zone* zone = Thread::Current()->zone();
8367 const auto& c_signature = FunctionType::Handle(zone, FfiCSignature());
8368 const auto& type = AbstractType::Handle(zone, c_signature.result_type());
8369 if (IsFfiTypeClassId(type.type_class_id())) {
8370 return false;
8371 }
8372 const auto& cls = Class::Handle(zone, type.type_class());
8373 const auto& superClass = Class::Handle(zone, cls.SuperClass());
8374 const bool is_abi_specific_int =
8375 String::Handle(zone, superClass.UserVisibleName())
8376 .Equals(Symbols::AbiSpecificInteger());
8377 if (is_abi_specific_int) {
8378 return false;
8379 }
8380#ifdef DEBUG
8381 const bool is_struct = String::Handle(zone, superClass.UserVisibleName())
8382 .Equals(Symbols::Struct());
8383 const bool is_union = String::Handle(zone, superClass.UserVisibleName())
8384 .Equals(Symbols::Union());
8385 ASSERT(is_struct || is_union);
8386#endif
8387 return true;
8388}
8389
8391 ASSERT(IsFfiCallbackTrampoline());
8392
8393 const auto& obj = Object::Handle(data());
8394 ASSERT(!obj.IsNull());
8395 const auto& trampoline_data = FfiTrampolineData::Cast(obj);
8396
8397 ASSERT(trampoline_data.callback_id() != -1);
8398
8399 return trampoline_data.callback_id();
8400}
8401
8402void Function::AssignFfiCallbackId(int32_t callback_id) const {
8403 ASSERT(IsFfiCallbackTrampoline());
8404
8405 const auto& obj = Object::Handle(data());
8406 ASSERT(!obj.IsNull());
8407 const auto& trampoline_data = FfiTrampolineData::Cast(obj);
8408
8409 ASSERT(trampoline_data.callback_id() == -1);
8410 trampoline_data.set_callback_id(callback_id);
8411}
8412
8414 Zone* zone = Thread::Current()->zone();
8415 auto& pragma_value = Instance::Handle(zone);
8416 if (is_ffi_native()) {
8417 pragma_value = GetNativeAnnotation();
8418 } else if (IsFfiCallClosure()) {
8419 pragma_value = GetFfiCallClosurePragmaValue();
8420 } else {
8421 UNREACHABLE();
8422 }
8423 const auto& pragma_value_class = Class::Handle(zone, pragma_value.clazz());
8424 const auto& pragma_value_fields =
8425 Array::Handle(zone, pragma_value_class.fields());
8426 ASSERT(pragma_value_fields.Length() >= 1);
8427 const auto& is_leaf_field = Field::Handle(
8428 zone,
8429 Field::RawCast(pragma_value_fields.At(pragma_value_fields.Length() - 1)));
8430 ASSERT(is_leaf_field.name() == Symbols::isLeaf().ptr());
8431 return Bool::Handle(zone, Bool::RawCast(pragma_value.GetField(is_leaf_field)))
8432 .value();
8433}
8434
8435FunctionPtr Function::FfiCallbackTarget() const {
8436 ASSERT(IsFfiCallbackTrampoline());
8437 const Object& obj = Object::Handle(data());
8438 ASSERT(!obj.IsNull());
8439 return FfiTrampolineData::Cast(obj).callback_target();
8440}
8441
8443 ASSERT(IsFfiCallbackTrampoline());
8444 const Object& obj = Object::Handle(data());
8445 ASSERT(!obj.IsNull());
8446 FfiTrampolineData::Cast(obj).set_callback_target(target);
8447}
8448
8450 ASSERT(IsFfiCallbackTrampoline());
8451 const Object& obj = Object::Handle(data());
8452 ASSERT(!obj.IsNull());
8453 return FfiTrampolineData::Cast(obj).callback_exceptional_return();
8454}
8455
8457 ASSERT(IsFfiCallbackTrampoline());
8458 const Object& obj = Object::Handle(data());
8459 ASSERT(!obj.IsNull());
8460 FfiTrampolineData::Cast(obj).set_callback_exceptional_return(value);
8461}
8462
8464 ASSERT(IsFfiCallbackTrampoline());
8465 const Object& obj = Object::Handle(data());
8466 ASSERT(!obj.IsNull());
8467 return FfiTrampolineData::Cast(obj).ffi_function_kind();
8468}
8469
8471 ASSERT(IsFfiCallbackTrampoline());
8472 const Object& obj = Object::Handle(data());
8473 ASSERT(!obj.IsNull());
8474 FfiTrampolineData::Cast(obj).set_ffi_function_kind(value);
8475}
8476
8480
8481FunctionPtr Function::ForwardingTarget() const {
8482 ASSERT(kind() == UntaggedFunction::kDynamicInvocationForwarder);
8484}
8485
8487 ASSERT(kind() == UntaggedFunction::kDynamicInvocationForwarder);
8488 set_data(target);
8489}
8490
8491// This field is heavily overloaded:
8492// kernel eval function: Array[0] = Script
8493// Array[1] = KernelProgramInfo
8494// Array[2] = Kernel index of enclosing library
8495// method extractor: Function extracted closure function
8496// implicit getter: Field
8497// implicit setter: Field
8498// impl. static final gttr: Field
8499// field initializer: Field
8500// noSuchMethod dispatcher: Array arguments descriptor
8501// invoke-field dispatcher: Array arguments descriptor
8502// closure function: ClosureData
8503// irregexp function: Array[0] = RegExp
8504// Array[1] = Smi string specialization cid
8505// native function: Array[0] = String native name
8506// Array[1] = Function implicit closure function
8507// regular function: Function for implicit closure function
8508// constructor, factory: Function for implicit closure function
8509// ffi trampoline function: FfiTrampolineData (Dart->C)
8510// dyn inv forwarder: Forwarding target, a WSR pointing to it or null
8511// (null can only occur if forwarding target was
8512// dropped)
8513void Function::set_data(const Object& value) const {
8514 untag()->set_data<std::memory_order_release>(value.ptr());
8515}
8516
8517void Function::set_name(const String& value) const {
8518 ASSERT(value.IsSymbol());
8519 untag()->set_name(value.ptr());
8520}
8521
8522void Function::set_owner(const Object& value) const {
8523 ASSERT(!value.IsNull());
8524 untag()->set_owner(value.ptr());
8525}
8526
8527RegExpPtr Function::regexp() const {
8528 ASSERT(kind() == UntaggedFunction::kIrregexpFunction);
8529 const Array& pair = Array::Cast(Object::Handle(data()));
8530 return RegExp::RawCast(pair.At(0));
8531}
8532
8533class StickySpecialization : public BitField<intptr_t, bool, 0, 1> {};
8535 : public BitField<intptr_t, intptr_t, 1, UntaggedObject::kClassIdTagSize> {
8536};
8537
8539 ASSERT(kind() == UntaggedFunction::kIrregexpFunction);
8540 const Array& pair = Array::Cast(Object::Handle(data()));
8542}
8543
8545 ASSERT(kind() == UntaggedFunction::kIrregexpFunction);
8546 const Array& pair = Array::Cast(Object::Handle(data()));
8548}
8549
8551 intptr_t string_specialization_cid,
8552 bool sticky) const {
8553 ASSERT(kind() == UntaggedFunction::kIrregexpFunction);
8554 ASSERT(IsStringClassId(string_specialization_cid));
8555 ASSERT(data() == Object::null());
8556 const Array& pair = Array::Handle(Array::New(2, Heap::kOld));
8557 pair.SetAt(0, regexp);
8560 string_specialization_cid))));
8561 set_data(pair);
8562}
8563
8564StringPtr Function::native_name() const {
8565 ASSERT(is_native());
8566 const Object& obj = Object::Handle(data());
8567 ASSERT(obj.IsArray());
8568 return String::RawCast(Array::Cast(obj).At(0));
8569}
8570
8571void Function::set_native_name(const String& value) const {
8572 ASSERT(is_native());
8573 const auto& pair = Array::Cast(Object::Handle(data()));
8574 ASSERT(pair.At(0) == Object::null());
8575 pair.SetAt(NativeFunctionData::kNativeName, value);
8576}
8577
8579 ASSERT(is_ffi_native());
8580 Zone* zone = Thread::Current()->zone();
8581 auto& pragma_value = Object::Handle(zone);
8582 Library::FindPragma(dart::Thread::Current(), /*only_core=*/false,
8583 Object::Handle(zone, ptr()),
8584 String::Handle(zone, Symbols::vm_ffi_native().ptr()),
8585 /*multiple=*/false, &pragma_value);
8586 auto const& native_instance = Instance::Cast(pragma_value);
8587 ASSERT(!native_instance.IsNull());
8588#if defined(DEBUG)
8589 const auto& native_class = Class::Handle(zone, native_instance.clazz());
8590 ASSERT(String::Handle(zone, native_class.UserVisibleName())
8591 .Equals(Symbols::FfiNative()));
8592#endif
8593 return native_instance.ptr();
8594}
8595
8597 return is_native() && !is_external();
8598}
8599
8601 return is_native() && is_external();
8602}
8603
8604void Function::SetSignature(const FunctionType& value) const {
8605 set_signature(value);
8606 ASSERT(NumImplicitParameters() == value.num_implicit_parameters());
8607 if (IsClosureFunction() && value.IsGeneric()) {
8608 Zone* zone = Thread::Current()->zone();
8609 const TypeParameters& type_params =
8610 TypeParameters::Handle(zone, value.type_parameters());
8611 const TypeArguments& defaults =
8612 TypeArguments::Handle(zone, type_params.defaults());
8613 auto mode = defaults.GetInstantiationMode(zone, this);
8614 set_default_type_arguments_instantiation_mode(mode);
8615 }
8616}
8617
8618TypeParameterPtr FunctionType::TypeParameterAt(intptr_t index,
8619 Nullability nullability) const {
8620 ASSERT(index >= 0 && index < NumTypeParameters());
8621 Thread* thread = Thread::Current();
8622 Zone* zone = thread->zone();
8624 zone, TypeParameter::New(*this, NumParentTypeArguments(),
8625 NumParentTypeArguments() + index, nullability));
8626 type_param.SetIsFinalized();
8627 if (IsFinalized()) {
8628 type_param ^= type_param.Canonicalize(thread);
8629 }
8630 return type_param.ptr();
8631}
8632
8634 ASSERT(!value.IsNull());
8635 untag()->set_result_type(value.ptr());
8636}
8637
8638AbstractTypePtr Function::ParameterTypeAt(intptr_t index) const {
8639 const Array& types = Array::Handle(parameter_types());
8640 return AbstractType::RawCast(types.At(index));
8641}
8642
8643AbstractTypePtr FunctionType::ParameterTypeAt(intptr_t index) const {
8644 const Array& parameter_types = Array::Handle(untag()->parameter_types());
8645 return AbstractType::RawCast(parameter_types.At(index));
8646}
8647
8649 const AbstractType& value) const {
8650 ASSERT(!value.IsNull());
8651 const Array& parameter_types = Array::Handle(untag()->parameter_types());
8652 parameter_types.SetAt(index, value);
8653}
8654
8656 ASSERT(value.IsNull() || value.Length() > 0);
8657 untag()->set_parameter_types(value.ptr());
8658}
8659
8660StringPtr Function::ParameterNameAt(intptr_t index) const {
8661#if defined(DART_PRECOMPILED_RUNTIME)
8662 if (signature() == FunctionType::null()) {
8663 // Without the signature, we're guaranteed not to have any name information.
8664 return Symbols::OptimizedOut().ptr();
8665 }
8666#endif
8667 const intptr_t num_fixed = num_fixed_parameters();
8668 if (HasOptionalNamedParameters() && index >= num_fixed) {
8669 const Array& parameter_names =
8670 Array::Handle(signature()->untag()->named_parameter_names());
8671 return String::RawCast(parameter_names.At(index - num_fixed));
8672 }
8673#if defined(DART_PRECOMPILED_RUNTIME)
8674 return Symbols::OptimizedOut().ptr();
8675#else
8676 const Array& names = Array::Handle(untag()->positional_parameter_names());
8677 return String::RawCast(names.At(index));
8678#endif
8679}
8680
8681void Function::SetParameterNameAt(intptr_t index, const String& value) const {
8682#if defined(DART_PRECOMPILED_RUNTIME)
8683 UNREACHABLE();
8684#else
8685 ASSERT(!value.IsNull() && value.IsSymbol());
8686 if (HasOptionalNamedParameters() && index >= num_fixed_parameters()) {
8687 // These should be set on the signature, not the function.
8688 UNREACHABLE();
8689 }
8690 const Array& parameter_names =
8691 Array::Handle(untag()->positional_parameter_names());
8692 parameter_names.SetAt(index, value);
8693#endif
8694}
8695
8696#if !defined(DART_PRECOMPILED_RUNTIME)
8697void Function::set_positional_parameter_names(const Array& value) const {
8698 ASSERT(value.ptr() == Object::empty_array().ptr() || value.Length() > 0);
8699 untag()->set_positional_parameter_names(value.ptr());
8700}
8701#endif
8702
8703StringPtr FunctionType::ParameterNameAt(intptr_t index) const {
8704 const intptr_t num_fixed = num_fixed_parameters();
8705 if (!HasOptionalNamedParameters() || index < num_fixed) {
8706 // The positional parameter names are stored on the function, not here.
8707 UNREACHABLE();
8708 }
8709 const Array& parameter_names =
8710 Array::Handle(untag()->named_parameter_names());
8711 return String::RawCast(parameter_names.At(index - num_fixed));
8712}
8713
8715 const String& value) const {
8716#if defined(DART_PRECOMPILED_RUNTIME)
8717 UNREACHABLE();
8718#else
8719 ASSERT(!value.IsNull() && value.IsSymbol());
8720 const intptr_t num_fixed = num_fixed_parameters();
8721 if (!HasOptionalNamedParameters() || index < num_fixed) {
8722 UNREACHABLE();
8723 }
8724 const Array& parameter_names =
8725 Array::Handle(untag()->named_parameter_names());
8726 parameter_names.SetAt(index - num_fixed, value);
8727#endif
8728}
8729
8731 ASSERT(value.ptr() == Object::empty_array().ptr() || value.Length() > 0);
8732 untag()->set_named_parameter_names(value.ptr());
8733}
8734
8736#if defined(DART_PRECOMPILED_RUNTIME)
8737 UNREACHABLE();
8738#else
8739 const intptr_t num_positional_params =
8740 num_fixed_parameters() + NumOptionalPositionalParameters();
8741 if (num_positional_params == 0) {
8742 set_positional_parameter_names(Object::empty_array());
8743 } else {
8744 set_positional_parameter_names(
8745 Array::Handle(Array::New(num_positional_params, space)));
8746 }
8747#endif
8748}
8749
8751#if defined(DART_PRECOMPILED_RUNTIME)
8752 UNREACHABLE();
8753#else
8754 const intptr_t num_named_parameters = NumOptionalNamedParameters();
8755 if (num_named_parameters == 0) {
8756 return set_named_parameter_names(Object::empty_array());
8757 }
8758 // Currently, we only store flags for named parameters.
8759 const intptr_t last_index = (num_named_parameters - 1) /
8760 compiler::target::kNumParameterFlagsPerElement;
8761 const intptr_t num_flag_slots = last_index + 1;
8762 intptr_t num_total_slots = num_named_parameters + num_flag_slots;
8763 auto& array = Array::Handle(Array::New(num_total_slots, space));
8764 // Set flag slots to Smi 0 before handing off.
8765 auto& empty_flags_smi = Smi::Handle(Smi::New(0));
8766 for (intptr_t i = num_named_parameters; i < num_total_slots; i++) {
8767 array.SetAt(i, empty_flags_smi);
8768 }
8769 set_named_parameter_names(array);
8770#endif
8771}
8772
8774 intptr_t* flag_mask) const {
8775 // If these calculations change, also change
8776 // FlowGraphBuilder::BuildClosureCallHasRequiredNamedArgumentsCheck.
8777 ASSERT(HasOptionalNamedParameters());
8778 ASSERT(flag_mask != nullptr);
8779 ASSERT(index >= num_fixed_parameters());
8780 index -= num_fixed_parameters();
8781 *flag_mask = (1 << compiler::target::kRequiredNamedParameterFlag)
8782 << ((static_cast<uintptr_t>(index) %
8783 compiler::target::kNumParameterFlagsPerElement) *
8784 compiler::target::kNumParameterFlags);
8785 return NumOptionalNamedParameters() +
8786 index / compiler::target::kNumParameterFlagsPerElement;
8787}
8788
8790#if defined(DART_PRECOMPILED_RUNTIME)
8791 if (signature() == FunctionType::null()) {
8792 // Signatures for functions with required named parameters are not dropped.
8793 return false;
8794 }
8795#endif
8796 return FunctionType::Handle(signature()).HasRequiredNamedParameters();
8797}
8798
8799bool Function::IsRequiredAt(intptr_t index) const {
8800#if defined(DART_PRECOMPILED_RUNTIME)
8801 if (signature() == FunctionType::null()) {
8802 // Signature is not dropped in aot when any named parameter is required.
8803 return false;
8804 }
8805#endif
8806 if (!HasOptionalNamedParameters() || index < num_fixed_parameters()) {
8807 return false;
8808 }
8809 const FunctionType& sig = FunctionType::Handle(signature());
8810 return sig.IsRequiredAt(index);
8811}
8812
8813bool FunctionType::IsRequiredAt(intptr_t index) const {
8814 if (!HasOptionalNamedParameters() || index < num_fixed_parameters()) {
8815 return false;
8816 }
8817 intptr_t flag_mask;
8818 const intptr_t flag_index = GetRequiredFlagIndex(index, &flag_mask);
8819 const Array& parameter_names =
8820 Array::Handle(untag()->named_parameter_names());
8821 if (flag_index >= parameter_names.Length()) {
8822 return false;
8823 }
8824 const intptr_t flags =
8825 Smi::Value(Smi::RawCast(parameter_names.At(flag_index)));
8826 return (flags & flag_mask) != 0;
8827}
8828
8829void FunctionType::SetIsRequiredAt(intptr_t index) const {
8830#if defined(DART_PRECOMPILER_RUNTIME)
8831 UNREACHABLE();
8832#else
8833 intptr_t flag_mask;
8834 const intptr_t flag_index = GetRequiredFlagIndex(index, &flag_mask);
8835 const Array& parameter_names =
8836 Array::Handle(untag()->named_parameter_names());
8837 ASSERT(flag_index < parameter_names.Length());
8838 const intptr_t flags =
8839 Smi::Value(Smi::RawCast(parameter_names.At(flag_index)));
8840 parameter_names.SetAt(flag_index, Smi::Handle(Smi::New(flags | flag_mask)));
8841#endif
8842}
8843
8845#if defined(DART_PRECOMPILER_RUNTIME)
8846 UNREACHABLE();
8847#else
8848 const intptr_t num_named_parameters = NumOptionalNamedParameters();
8849 if (num_named_parameters == 0) {
8850 ASSERT(untag()->named_parameter_names() == Object::empty_array().ptr());
8851 return;
8852 }
8853 const Array& parameter_names =
8854 Array::Handle(untag()->named_parameter_names());
8855 // Truncate the parameter names array to remove unused flags from the end.
8856 intptr_t last_used = parameter_names.Length() - 1;
8857 for (; last_used >= num_named_parameters; --last_used) {
8858 if (Smi::Value(Smi::RawCast(parameter_names.At(last_used))) != 0) {
8859 break;
8860 }
8861 }
8862 parameter_names.Truncate(last_used + 1);
8863#endif
8864}
8865
8867 const intptr_t num_named_params = NumOptionalNamedParameters();
8868 if (num_named_params == 0) return false;
8869 // Check for flag slots in the named parameter names array.
8870 const auto& parameter_names = Array::Handle(named_parameter_names());
8871 ASSERT(!parameter_names.IsNull());
8872 return parameter_names.Length() > num_named_params;
8873}
8874
8876 Report::MessageF(Report::kError, Script::Handle(), TokenPosition::kNoSource,
8878 "too many type parameters declared in signature '%s' or in "
8879 "its enclosing signatures",
8880 sig.ToUserVisibleCString());
8881 UNREACHABLE();
8882}
8883
8885 untag()->set_type_parameters(value.ptr());
8886 const intptr_t count = value.Length();
8889 }
8890 untag()->packed_type_parameter_counts_.Update<PackedNumTypeParameters>(count);
8891}
8892
8894 ASSERT(value >= 0);
8895 if (!PackedNumParentTypeArguments::is_valid(value)) {
8897 }
8898 untag()->packed_type_parameter_counts_.Update<PackedNumParentTypeArguments>(
8899 value);
8900}
8901
8903 return FunctionType::IsGeneric(signature());
8904}
8906 return FunctionType::NumTypeParametersOf(signature());
8907}
8909 return FunctionType::NumParentTypeArgumentsOf(signature());
8910}
8912 return FunctionType::NumTypeArgumentsOf(signature());
8913}
8915 return FunctionType::NumFixedParametersOf(signature());
8916}
8918 return FunctionType::HasOptionalParameters(signature());
8919}
8927 return FunctionType::NumOptionalParametersOf(signature());
8928}
8935intptr_t Function::NumParameters() const {
8936 return FunctionType::NumParametersOf(signature());
8937}
8938
8939TypeParameterPtr Function::TypeParameterAt(intptr_t index,
8940 Nullability nullability) const {
8941 const FunctionType& sig = FunctionType::Handle(signature());
8942 return sig.TypeParameterAt(index, nullability);
8943}
8944
8945void Function::set_kind(UntaggedFunction::Kind value) const {
8946 untag()->kind_tag_.Update<KindBits>(value);
8947}
8948
8950 untag()->kind_tag_.Update<ModifierBits>(value);
8951}
8952
8954 // Prevent multiple settings of kind.
8955 ASSERT((value == MethodRecognizer::kUnknown) || !IsRecognized());
8956 untag()->kind_tag_.Update<RecognizedBits>(value);
8957}
8958
8960#if defined(DART_PRECOMPILED_RUNTIME)
8961 UNREACHABLE();
8962#else
8963 ASSERT(!token_pos.IsClassifying() || IsMethodExtractor());
8964 StoreNonPointer(&untag()->token_pos_, token_pos);
8965#endif
8966}
8967
8968void Function::set_kind_tag(uint32_t value) const {
8969 untag()->kind_tag_ = value;
8970}
8971
8972bool Function::is_eval_function() const {
8973 if (data()->IsArray()) {
8974 const intptr_t len = Array::LengthOf(Array::RawCast(data()));
8975 return len == static_cast<intptr_t>(EvalFunctionData::kLength);
8976 }
8977 return false;
8978}
8979
8980void Function::set_packed_fields(uint32_t packed_fields) const {
8981#if defined(DART_PRECOMPILED_RUNTIME)
8982 UNREACHABLE();
8983#else
8984 StoreNonPointer(&untag()->packed_fields_, packed_fields);
8985#endif
8986}
8987
8989 if (FLAG_precompiled_mode) {
8990 return true;
8991 }
8992 if (ForceOptimize()) return true;
8993 if (is_old_native()) {
8994 // Native methods don't need to be optimized.
8995 return false;
8996 }
8997 if (is_optimizable() && (script() != Script::null())) {
8998 // Additional check needed for implicit getters.
8999 return (unoptimized_code() == Object::null()) ||
9000 (Code::Handle(unoptimized_code()).Size() <
9001 FLAG_huge_method_cutoff_in_code_size);
9002 }
9003 return false;
9004}
9005
9006void Function::SetIsOptimizable(bool value) const {
9007 ASSERT(!is_native());
9008 set_is_optimizable(value);
9009 if (!value) {
9010 set_is_inlinable(false);
9011 set_usage_counter(INT32_MIN);
9012 }
9013}
9014
9016 switch (recognized_kind()) {
9017 case MethodRecognizer::kTypedData_ByteDataView_factory:
9018 case MethodRecognizer::kTypedData_Int8ArrayView_factory:
9019 case MethodRecognizer::kTypedData_Uint8ArrayView_factory:
9020 case MethodRecognizer::kTypedData_Uint8ClampedArrayView_factory:
9021 case MethodRecognizer::kTypedData_Int16ArrayView_factory:
9022 case MethodRecognizer::kTypedData_Uint16ArrayView_factory:
9023 case MethodRecognizer::kTypedData_Int32ArrayView_factory:
9024 case MethodRecognizer::kTypedData_Uint32ArrayView_factory:
9025 case MethodRecognizer::kTypedData_Int64ArrayView_factory:
9026 case MethodRecognizer::kTypedData_Uint64ArrayView_factory:
9027 case MethodRecognizer::kTypedData_Float32ArrayView_factory:
9028 case MethodRecognizer::kTypedData_Float64ArrayView_factory:
9029 case MethodRecognizer::kTypedData_Float32x4ArrayView_factory:
9030 case MethodRecognizer::kTypedData_Int32x4ArrayView_factory:
9031 case MethodRecognizer::kTypedData_Float64x2ArrayView_factory:
9032 return true;
9033 default:
9034 return false;
9035 }
9036}
9037
9039 switch (recognized_kind()) {
9040 case MethodRecognizer::kTypedData_UnmodifiableByteDataView_factory:
9041 case MethodRecognizer::kTypedData_UnmodifiableInt8ArrayView_factory:
9042 case MethodRecognizer::kTypedData_UnmodifiableUint8ArrayView_factory:
9043 case MethodRecognizer::kTypedData_UnmodifiableUint8ClampedArrayView_factory:
9044 case MethodRecognizer::kTypedData_UnmodifiableInt16ArrayView_factory:
9045 case MethodRecognizer::kTypedData_UnmodifiableUint16ArrayView_factory:
9046 case MethodRecognizer::kTypedData_UnmodifiableInt32ArrayView_factory:
9047 case MethodRecognizer::kTypedData_UnmodifiableUint32ArrayView_factory:
9048 case MethodRecognizer::kTypedData_UnmodifiableInt64ArrayView_factory:
9049 case MethodRecognizer::kTypedData_UnmodifiableUint64ArrayView_factory:
9050 case MethodRecognizer::kTypedData_UnmodifiableFloat32ArrayView_factory:
9051 case MethodRecognizer::kTypedData_UnmodifiableFloat64ArrayView_factory:
9052 case MethodRecognizer::kTypedData_UnmodifiableFloat32x4ArrayView_factory:
9053 case MethodRecognizer::kTypedData_UnmodifiableInt32x4ArrayView_factory:
9054 case MethodRecognizer::kTypedData_UnmodifiableFloat64x2ArrayView_factory:
9055 return true;
9056 default:
9057 return false;
9058 }
9059}
9060
9061static bool InVmTests(const Function& function) {
9062#if defined(TESTING)
9063 return true;
9064#else
9065 auto* zone = Thread::Current()->zone();
9066 const auto& cls = Class::Handle(zone, function.Owner());
9067 const auto& lib = Library::Handle(zone, cls.library());
9068 const auto& url = String::Handle(zone, lib.url());
9069 const bool in_vm_tests =
9070 strstr(url.ToCString(), "runtime/tests/vm/") != nullptr;
9071 return in_vm_tests;
9072#endif
9073}
9074
9076 if (RecognizedKindForceOptimize() || IsFfiCallClosure() ||
9077 IsFfiCallbackTrampoline() || is_ffi_native() ||
9078 IsTypedDataViewFactory() || IsUnmodifiableTypedDataViewFactory()) {
9079 return true;
9080 }
9081
9082 if (!has_pragma()) return false;
9083
9084 const bool has_vm_pragma = Library::FindPragma(
9085 Thread::Current(), false, *this, Symbols::vm_force_optimize());
9086 if (!has_vm_pragma) return false;
9087
9088 // For run_vm_tests and runtime/tests/vm allow marking arbitrary functions as
9089 // force-optimize via `@pragma('vm:force-optimize')`.
9090 return InVmTests(*this);
9091}
9092
9094 if (!has_pragma()) return false;
9095
9096 return Library::FindPragma(Thread::Current(), /*only_core=*/false, *this,
9097 Symbols::vm_prefer_inline());
9098}
9099
9101 if (!has_pragma()) return false;
9102
9103#if defined(TESTING)
9104 const bool kAllowOnlyForCoreLibFunctions = false;
9105#else
9106 const bool kAllowOnlyForCoreLibFunctions = true;
9107#endif // defined(TESTING)
9108
9109 return Library::FindPragma(Thread::Current(), kAllowOnlyForCoreLibFunctions,
9110 *this, Symbols::vm_idempotent());
9111}
9112
9114 if (!has_pragma()) return false;
9115
9116 const bool has_vm_pragma =
9117 Library::FindPragma(Thread::Current(), /*only_core=*/false, *this,
9118 Symbols::vm_cachable_idempotent());
9119 if (!has_vm_pragma) return false;
9120
9121 // For run_vm_tests and runtime/tests/vm allow marking arbitrary functions.
9122 return InVmTests(*this);
9123}
9124
9126 if (!IsNonImplicitClosureFunction()) return false;
9127 if (!has_pragma()) return false;
9128 return Library::FindPragma(Thread::Current(), /*only_core=*/false, *this,
9129 Symbols::vm_ffi_call_closure());
9130}
9131
9133 ASSERT(IsFfiCallClosure());
9134 Thread* thread = Thread::Current();
9135 Zone* zone = thread->zone();
9136 auto& pragma_value = Object::Handle(zone);
9137 Library::FindPragma(thread, /*only_core=*/false, *this,
9138 Symbols::vm_ffi_call_closure(),
9139 /*multiple=*/false, &pragma_value);
9140 ASSERT(!pragma_value.IsNull());
9141 return Instance::Cast(pragma_value).ptr();
9142}
9143
9145 switch (recognized_kind()) {
9146 // Uses unboxed/untagged data not supported in unoptimized, or uses
9147 // LoadIndexed/StoreIndexed/MemoryCopy instructions with typed data
9148 // arrays, which requires optimization for payload extraction.
9149 case MethodRecognizer::kObjectArrayGetIndexed:
9150 case MethodRecognizer::kGrowableArrayGetIndexed:
9151#define TYPED_DATA_GET_INDEXED_CASES(clazz) \
9152 case MethodRecognizer::k##clazz##ArrayGetIndexed: \
9153 FALL_THROUGH; \
9154 case MethodRecognizer::kExternal##clazz##ArrayGetIndexed: \
9155 FALL_THROUGH; \
9156 case MethodRecognizer::k##clazz##ArrayViewGetIndexed: \
9157 FALL_THROUGH;
9159#undef TYPED_DATA_GET_INDEXED_CASES
9160 case MethodRecognizer::kCopyRangeFromUint8ListToOneByteString:
9161 case MethodRecognizer::kFinalizerBase_getIsolateFinalizers:
9162 case MethodRecognizer::kFinalizerBase_setIsolate:
9163 case MethodRecognizer::kFinalizerBase_setIsolateFinalizers:
9164 case MethodRecognizer::kFinalizerEntry_getExternalSize:
9165 case MethodRecognizer::kExtensionStreamHasListener:
9166 case MethodRecognizer::kFfiLoadInt8:
9167 case MethodRecognizer::kFfiLoadInt16:
9168 case MethodRecognizer::kFfiLoadInt32:
9169 case MethodRecognizer::kFfiLoadInt64:
9170 case MethodRecognizer::kFfiLoadUint8:
9171 case MethodRecognizer::kFfiLoadUint16:
9172 case MethodRecognizer::kFfiLoadUint32:
9173 case MethodRecognizer::kFfiLoadUint64:
9174 case MethodRecognizer::kFfiLoadFloat:
9175 case MethodRecognizer::kFfiLoadFloatUnaligned:
9176 case MethodRecognizer::kFfiLoadDouble:
9177 case MethodRecognizer::kFfiLoadDoubleUnaligned:
9178 case MethodRecognizer::kFfiLoadPointer:
9179 case MethodRecognizer::kFfiStoreInt8:
9180 case MethodRecognizer::kFfiStoreInt16:
9181 case MethodRecognizer::kFfiStoreInt32:
9182 case MethodRecognizer::kFfiStoreInt64:
9183 case MethodRecognizer::kFfiStoreUint8:
9184 case MethodRecognizer::kFfiStoreUint16:
9185 case MethodRecognizer::kFfiStoreUint32:
9186 case MethodRecognizer::kFfiStoreUint64:
9187 case MethodRecognizer::kFfiStoreFloat:
9188 case MethodRecognizer::kFfiStoreFloatUnaligned:
9189 case MethodRecognizer::kFfiStoreDouble:
9190 case MethodRecognizer::kFfiStoreDoubleUnaligned:
9191 case MethodRecognizer::kFfiStorePointer:
9192 case MethodRecognizer::kFfiFromAddress:
9193 case MethodRecognizer::kFfiGetAddress:
9194 case MethodRecognizer::kFfiAsExternalTypedDataInt8:
9195 case MethodRecognizer::kFfiAsExternalTypedDataInt16:
9196 case MethodRecognizer::kFfiAsExternalTypedDataInt32:
9197 case MethodRecognizer::kFfiAsExternalTypedDataInt64:
9198 case MethodRecognizer::kFfiAsExternalTypedDataUint8:
9199 case MethodRecognizer::kFfiAsExternalTypedDataUint16:
9200 case MethodRecognizer::kFfiAsExternalTypedDataUint32:
9201 case MethodRecognizer::kFfiAsExternalTypedDataUint64:
9202 case MethodRecognizer::kFfiAsExternalTypedDataFloat:
9203 case MethodRecognizer::kFfiAsExternalTypedDataDouble:
9204 case MethodRecognizer::kGetNativeField:
9205 case MethodRecognizer::kRecord_fieldNames:
9206 case MethodRecognizer::kRecord_numFields:
9207 case MethodRecognizer::kStringBaseCodeUnitAt:
9208 case MethodRecognizer::kUtf8DecoderScan:
9209 case MethodRecognizer::kDouble_hashCode:
9210 case MethodRecognizer::kTypedList_GetInt8:
9211 case MethodRecognizer::kTypedList_SetInt8:
9212 case MethodRecognizer::kTypedList_GetUint8:
9213 case MethodRecognizer::kTypedList_SetUint8:
9214 case MethodRecognizer::kTypedList_GetInt16:
9215 case MethodRecognizer::kTypedList_SetInt16:
9216 case MethodRecognizer::kTypedList_GetUint16:
9217 case MethodRecognizer::kTypedList_SetUint16:
9218 case MethodRecognizer::kTypedList_GetInt32:
9219 case MethodRecognizer::kTypedList_SetInt32:
9220 case MethodRecognizer::kTypedList_GetUint32:
9221 case MethodRecognizer::kTypedList_SetUint32:
9222 case MethodRecognizer::kTypedList_GetInt64:
9223 case MethodRecognizer::kTypedList_SetInt64:
9224 case MethodRecognizer::kTypedList_GetUint64:
9225 case MethodRecognizer::kTypedList_SetUint64:
9226 case MethodRecognizer::kTypedList_GetFloat32:
9227 case MethodRecognizer::kTypedList_SetFloat32:
9228 case MethodRecognizer::kTypedList_GetFloat64:
9229 case MethodRecognizer::kTypedList_SetFloat64:
9230 case MethodRecognizer::kTypedList_GetInt32x4:
9231 case MethodRecognizer::kTypedList_SetInt32x4:
9232 case MethodRecognizer::kTypedList_GetFloat32x4:
9233 case MethodRecognizer::kTypedList_SetFloat32x4:
9234 case MethodRecognizer::kTypedList_GetFloat64x2:
9235 case MethodRecognizer::kTypedList_SetFloat64x2:
9236 case MethodRecognizer::kTypedData_memMove1:
9237 case MethodRecognizer::kTypedData_memMove2:
9238 case MethodRecognizer::kTypedData_memMove4:
9239 case MethodRecognizer::kTypedData_memMove8:
9240 case MethodRecognizer::kTypedData_memMove16:
9241 case MethodRecognizer::kMemCopy:
9242 // Prevent the GC from running so that the operation is atomic from
9243 // a GC point of view. Always double check implementation in
9244 // kernel_to_il.cc that no GC can happen in between the relevant IL
9245 // instructions.
9246 // TODO(https://dartbug.com/48527): Support inlining.
9247 case MethodRecognizer::kFinalizerBase_exchangeEntriesCollectedWithNull:
9248 // Both unboxed/untagged data and atomic-to-GC operation.
9249 case MethodRecognizer::kFinalizerEntry_allocate:
9250 return true;
9251 default:
9252 return false;
9253 }
9254}
9255
9256#if !defined(DART_PRECOMPILED_RUNTIME)
9258 if (ForceOptimize()) {
9259 if (IsFfiCallClosure() || IsFfiCallbackTrampoline() || is_ffi_native()) {
9260 // We currently don't support inlining FFI trampolines. Some of them
9261 // are naturally non-inlinable because they contain a try/catch block,
9262 // but this condition is broader than strictly necessary.
9263 // The work necessary for inlining FFI trampolines is tracked by
9264 // http://dartbug.com/45055.
9265 return false;
9266 }
9267 if (CompilerState::Current().is_aot()) {
9268 return true;
9269 }
9270 // Inlining of force-optimized functions requires target function to be
9271 // idempotent becase if deoptimization is needed in inlined body, the
9272 // execution of the force-optimized will be restarted at the beginning of
9273 // the function.
9274 ASSERT(!IsPreferInline() || IsIdempotent());
9275 return IsIdempotent();
9276 }
9277
9278 if (HasBreakpoint()) {
9279 return false;
9280 }
9281
9282 return is_inlinable();
9283}
9284#endif // !defined(DART_PRECOMPILED_RUNTIME)
9285
9287 const UntaggedFunction::Kind k = kind();
9288 if (k == UntaggedFunction::kConstructor) {
9289 // Type arguments for factory; instance for generative constructor.
9290 return 1;
9291 }
9292 if ((k == UntaggedFunction::kClosureFunction) ||
9293 (k == UntaggedFunction::kImplicitClosureFunction) ||
9294 (k == UntaggedFunction::kFfiTrampoline)) {
9295 return 1; // Closure object.
9296 }
9297 if (!is_static()) {
9298 // Closure functions defined inside instance (i.e. non-static) functions are
9299 // marked as non-static, but they do not have a receiver.
9300 // Closures are handled above.
9301 ASSERT((k != UntaggedFunction::kClosureFunction) &&
9302 (k != UntaggedFunction::kImplicitClosureFunction));
9303 return 1; // Receiver.
9304 }
9305 return 0; // No implicit parameters.
9306}
9307
9308bool Function::AreValidArgumentCounts(intptr_t num_type_arguments,
9309 intptr_t num_arguments,
9310 intptr_t num_named_arguments,
9311 String* error_message) const {
9312 if ((num_type_arguments != 0) &&
9313 (num_type_arguments != NumTypeParameters())) {
9314 if (error_message != nullptr) {
9315 const intptr_t kMessageBufferSize = 64;
9316 char message_buffer[kMessageBufferSize];
9317 Utils::SNPrint(message_buffer, kMessageBufferSize,
9318 "%" Pd " type arguments passed, but %" Pd " expected",
9319 num_type_arguments, NumTypeParameters());
9320 // Allocate in old space because it can be invoked in background
9321 // optimizing compilation.
9322 *error_message = String::New(message_buffer, Heap::kOld);
9323 }
9324 return false; // Too many type arguments.
9325 }
9326 if (num_named_arguments > NumOptionalNamedParameters()) {
9327 if (error_message != nullptr) {
9328 const intptr_t kMessageBufferSize = 64;
9329 char message_buffer[kMessageBufferSize];
9330 Utils::SNPrint(message_buffer, kMessageBufferSize,
9331 "%" Pd " named passed, at most %" Pd " expected",
9332 num_named_arguments, NumOptionalNamedParameters());
9333 // Allocate in old space because it can be invoked in background
9334 // optimizing compilation.
9335 *error_message = String::New(message_buffer, Heap::kOld);
9336 }
9337 return false; // Too many named arguments.
9338 }
9339 const intptr_t num_pos_args = num_arguments - num_named_arguments;
9340 const intptr_t num_opt_pos_params = NumOptionalPositionalParameters();
9341 const intptr_t num_pos_params = num_fixed_parameters() + num_opt_pos_params;
9342 if (num_pos_args > num_pos_params) {
9343 if (error_message != nullptr) {
9344 const intptr_t kMessageBufferSize = 64;
9345 char message_buffer[kMessageBufferSize];
9346 // Hide implicit parameters to the user.
9347 const intptr_t num_hidden_params = NumImplicitParameters();
9348 Utils::SNPrint(message_buffer, kMessageBufferSize,
9349 "%" Pd "%s passed, %s%" Pd " expected",
9350 num_pos_args - num_hidden_params,
9351 num_opt_pos_params > 0 ? " positional" : "",
9352 num_opt_pos_params > 0 ? "at most " : "",
9353 num_pos_params - num_hidden_params);
9354 // Allocate in old space because it can be invoked in background
9355 // optimizing compilation.
9356 *error_message = String::New(message_buffer, Heap::kOld);
9357 }
9358 return false; // Too many fixed and/or positional arguments.
9359 }
9360 if (num_pos_args < num_fixed_parameters()) {
9361 if (error_message != nullptr) {
9362 const intptr_t kMessageBufferSize = 64;
9363 char message_buffer[kMessageBufferSize];
9364 // Hide implicit parameters to the user.
9365 const intptr_t num_hidden_params = NumImplicitParameters();
9366 Utils::SNPrint(message_buffer, kMessageBufferSize,
9367 "%" Pd "%s passed, %s%" Pd " expected",
9368 num_pos_args - num_hidden_params,
9369 num_opt_pos_params > 0 ? " positional" : "",
9370 num_opt_pos_params > 0 ? "at least " : "",
9371 num_fixed_parameters() - num_hidden_params);
9372 // Allocate in old space because it can be invoked in background
9373 // optimizing compilation.
9374 *error_message = String::New(message_buffer, Heap::kOld);
9375 }
9376 return false; // Too few fixed and/or positional arguments.
9377 }
9378 return true;
9379}
9380
9381bool Function::AreValidArguments(intptr_t num_type_arguments,
9382 intptr_t num_arguments,
9383 const Array& argument_names,
9384 String* error_message) const {
9385 const Array& args_desc_array = Array::Handle(ArgumentsDescriptor::NewBoxed(
9386 num_type_arguments, num_arguments, argument_names, Heap::kNew));
9387 ArgumentsDescriptor args_desc(args_desc_array);
9388 return AreValidArguments(args_desc, error_message);
9389}
9390
9392 String* error_message) const {
9393 const intptr_t num_type_arguments = args_desc.TypeArgsLen();
9394 const intptr_t num_arguments = args_desc.Count();
9395 const intptr_t num_named_arguments = args_desc.NamedCount();
9396
9397 if (!AreValidArgumentCounts(num_type_arguments, num_arguments,
9398 num_named_arguments, error_message)) {
9399 return false;
9400 }
9401 // Verify that all argument names are valid parameter names.
9402 Thread* thread = Thread::Current();
9403 Zone* zone = thread->zone();
9404 String& argument_name = String::Handle(zone);
9405 String& parameter_name = String::Handle(zone);
9406 const intptr_t num_positional_args = num_arguments - num_named_arguments;
9407 const intptr_t num_parameters = NumParameters();
9408 for (intptr_t i = 0; i < num_named_arguments; i++) {
9409 argument_name = args_desc.NameAt(i);
9410 ASSERT(argument_name.IsSymbol());
9411 bool found = false;
9412 for (intptr_t j = num_positional_args; j < num_parameters; j++) {
9413 parameter_name = ParameterNameAt(j);
9414 ASSERT(parameter_name.IsSymbol());
9415 if (argument_name.Equals(parameter_name)) {
9416 found = true;
9417 break;
9418 }
9419 }
9420 if (!found) {
9421 if (error_message != nullptr) {
9422 const intptr_t kMessageBufferSize = 64;
9423 char message_buffer[kMessageBufferSize];
9424 Utils::SNPrint(message_buffer, kMessageBufferSize,
9425 "no optional formal parameter named '%s'",
9426 argument_name.ToCString());
9427 *error_message = String::New(message_buffer);
9428 }
9429 return false;
9430 }
9431 }
9432 // Verify that all required named parameters are filled.
9433 for (intptr_t j = num_parameters - NumOptionalNamedParameters();
9434 j < num_parameters; j++) {
9435 if (IsRequiredAt(j)) {
9436 parameter_name = ParameterNameAt(j);
9437 ASSERT(parameter_name.IsSymbol());
9438 bool found = false;
9439 for (intptr_t i = 0; i < num_named_arguments; i++) {
9440 argument_name = args_desc.NameAt(i);
9441 ASSERT(argument_name.IsSymbol());
9442 if (argument_name.Equals(parameter_name)) {
9443 found = true;
9444 break;
9445 }
9446 }
9447 if (!found) {
9448 if (error_message != nullptr) {
9449 const intptr_t kMessageBufferSize = 64;
9450 char message_buffer[kMessageBufferSize];
9451 Utils::SNPrint(message_buffer, kMessageBufferSize,
9452 "missing required named parameter '%s'",
9453 parameter_name.ToCString());
9454 *error_message = String::New(message_buffer);
9455 }
9456 return false;
9457 }
9458 }
9459 }
9460 return true;
9461}
9462
9463// Retrieves the function type arguments, if any. This could be explicitly
9464// passed type from the arguments array, delayed type arguments in closures,
9465// or instantiated bounds for the type parameters if no other source for
9466// function type arguments are found.
9467static TypeArgumentsPtr RetrieveFunctionTypeArguments(
9468 Thread* thread,
9469 Zone* zone,
9470 const Function& function,
9471 const Instance& receiver,
9472 const TypeArguments& instantiator_type_args,
9473 const Array& args,
9474 const ArgumentsDescriptor& args_desc) {
9475 ASSERT(!function.IsNull());
9476
9477 const intptr_t kNumCurrentTypeArgs = function.NumTypeParameters();
9478 const intptr_t kNumParentTypeArgs = function.NumParentTypeArguments();
9479 const intptr_t kNumTypeArgs = kNumCurrentTypeArgs + kNumParentTypeArgs;
9480 // Non-generic functions don't receive type arguments.
9481 if (kNumTypeArgs == 0) return Object::empty_type_arguments().ptr();
9482 // Closure functions require that the receiver be provided (and is a closure).
9483 ASSERT(!function.IsClosureFunction() || receiver.IsClosure());
9484
9485 // Only closure functions should have possibly generic parents.
9486 ASSERT(function.IsClosureFunction() || kNumParentTypeArgs == 0);
9487 const auto& parent_type_args =
9488 function.IsClosureFunction()
9490 zone, Closure::Cast(receiver).function_type_arguments())
9491 : Object::empty_type_arguments();
9492 // We don't try to instantiate the parent type parameters to their bounds
9493 // if not provided or check any closed-over type arguments against the parent
9494 // type parameter bounds (since they have been type checked already).
9495 if (kNumCurrentTypeArgs == 0) return parent_type_args.ptr();
9496
9497 auto& function_type_args = TypeArguments::Handle(zone);
9498 // First check for delayed type arguments before using either provided or
9499 // default type arguments.
9500 bool has_delayed_type_args = false;
9501 if (function.IsClosureFunction()) {
9502 const auto& closure = Closure::Cast(receiver);
9503 function_type_args = closure.delayed_type_arguments();
9504 has_delayed_type_args =
9505 function_type_args.ptr() != Object::empty_type_arguments().ptr();
9506 }
9507
9508 if (args_desc.TypeArgsLen() > 0) {
9509 // We should never end up here when the receiver is a closure with delayed
9510 // type arguments unless this dynamically called closure function was
9511 // retrieved directly from the closure instead of going through
9512 // DartEntry::ResolveCallable, which appropriately checks for this case.
9513 ASSERT(!has_delayed_type_args);
9514 function_type_args ^= args.At(0);
9515 } else if (!has_delayed_type_args) {
9516 // We have no explicitly provided function type arguments, so instantiate
9517 // the type parameters to bounds or replace as appropriate.
9518 function_type_args = function.DefaultTypeArguments(zone);
9519 auto const mode =
9520 function.IsClosureFunction()
9521 ? function.default_type_arguments_instantiation_mode()
9522 : function_type_args.GetInstantiationMode(zone, &function);
9523 switch (mode) {
9525 // Nothing left to do.
9526 break;
9528 function_type_args = function_type_args.InstantiateAndCanonicalizeFrom(
9529 instantiator_type_args, parent_type_args);
9530 break;
9532 function_type_args = instantiator_type_args.ptr();
9533 break;
9535 function_type_args = parent_type_args.ptr();
9536 break;
9537 }
9538 }
9539
9540 return function_type_args.Prepend(zone, parent_type_args, kNumParentTypeArgs,
9541 kNumTypeArgs);
9542}
9543
9544// Retrieves the instantiator type arguments, if any, from the receiver.
9546 Zone* zone,
9547 const Function& function,
9548 const Instance& receiver) {
9549 if (function.IsClosureFunction()) {
9550 ASSERT(receiver.IsClosure());
9551 const auto& closure = Closure::Cast(receiver);
9552 return closure.instantiator_type_arguments();
9553 }
9554 if (!receiver.IsNull()) {
9555 const auto& cls = Class::Handle(zone, receiver.clazz());
9556 if (cls.NumTypeArguments() > 0) {
9557 return receiver.GetTypeArguments();
9558 }
9559 }
9560 return Object::empty_type_arguments().ptr();
9561}
9562
9564 const Array& args,
9565 const ArgumentsDescriptor& args_desc) const {
9566#if defined(DART_PRECOMPILED_RUNTIME)
9567 if (signature() == FunctionType::null()) {
9568 // Precompiler deleted signature because of missing entry point pragma.
9569 return EntryPointMemberInvocationError(*this);
9570 }
9571#endif
9572 Thread* thread = Thread::Current();
9573 Zone* zone = thread->zone();
9574
9575 auto& receiver = Instance::Handle(zone);
9576 if (IsClosureFunction() || HasThisParameter()) {
9577 receiver ^= args.At(args_desc.FirstArgIndex());
9578 }
9579 const auto& instantiator_type_arguments = TypeArguments::Handle(
9580 zone, RetrieveInstantiatorTypeArguments(zone, *this, receiver));
9581 return Function::DoArgumentTypesMatch(args, args_desc,
9582 instantiator_type_arguments);
9583}
9584
9586 const Array& args,
9587 const ArgumentsDescriptor& args_desc,
9588 const TypeArguments& instantiator_type_arguments) const {
9589#if defined(DART_PRECOMPILED_RUNTIME)
9590 if (signature() == FunctionType::null()) {
9591 // Precompiler deleted signature because of missing entry point pragma.
9592 return EntryPointMemberInvocationError(*this);
9593 }
9594#endif
9595 Thread* thread = Thread::Current();
9596 Zone* zone = thread->zone();
9597
9598 auto& receiver = Instance::Handle(zone);
9599 if (IsClosureFunction() || HasThisParameter()) {
9600 receiver ^= args.At(args_desc.FirstArgIndex());
9601 }
9602
9603 const auto& function_type_arguments = TypeArguments::Handle(
9604 zone, RetrieveFunctionTypeArguments(thread, zone, *this, receiver,
9605 instantiator_type_arguments, args,
9606 args_desc));
9608 args, args_desc, instantiator_type_arguments, function_type_arguments);
9609}
9610
9612 const Array& args,
9613 const ArgumentsDescriptor& args_desc,
9614 const TypeArguments& instantiator_type_arguments,
9615 const TypeArguments& function_type_arguments) const {
9616#if defined(DART_PRECOMPILED_RUNTIME)
9617 if (signature() == FunctionType::null()) {
9618 // Precompiler deleted signature because of missing entry point pragma.
9619 return EntryPointMemberInvocationError(*this);
9620 }
9621#endif
9622 Thread* thread = Thread::Current();
9623 Zone* zone = thread->zone();
9624
9625 // Perform any non-covariant bounds checks on the provided function type
9626 // arguments to make sure they are appropriate subtypes of the bounds.
9627 const intptr_t kNumLocalTypeArgs = NumTypeParameters();
9628 if (kNumLocalTypeArgs > 0) {
9629 const intptr_t kNumParentTypeArgs = NumParentTypeArguments();
9630 ASSERT(function_type_arguments.HasCount(kNumParentTypeArgs +
9631 kNumLocalTypeArgs));
9632 const auto& params = TypeParameters::Handle(zone, type_parameters());
9633 // No checks are needed if all bounds are dynamic.
9634 if (!params.AllDynamicBounds()) {
9635 auto& param = AbstractType::Handle(zone);
9636 auto& bound = AbstractType::Handle(zone);
9637 for (intptr_t i = 0; i < kNumLocalTypeArgs; i++) {
9638 bound = params.BoundAt(i);
9639 // Only perform non-covariant checks where the bound is not
9640 // the top type.
9641 if (params.IsGenericCovariantImplAt(i) ||
9642 bound.IsTopTypeForSubtyping()) {
9643 continue;
9644 }
9645 param = TypeParameterAt(i);
9647 &param, &bound, instantiator_type_arguments,
9648 function_type_arguments)) {
9649 const auto& names = Array::Handle(zone, params.names());
9650 auto& name = String::Handle(zone);
9651 name ^= names.At(i);
9652 return Error::RawCast(
9653 ThrowTypeError(token_pos(), param, bound, name));
9654 }
9655 }
9656 }
9657 } else {
9658 ASSERT(function_type_arguments.HasCount(NumParentTypeArguments()));
9659 }
9660
9662 Instance& argument = Instance::Handle(zone);
9663
9664 auto check_argument = [](const Instance& argument, const AbstractType& type,
9665 const TypeArguments& instantiator_type_args,
9666 const TypeArguments& function_type_args) -> bool {
9667 // If the argument type is the top type, no need to check.
9668 if (type.IsTopTypeForSubtyping()) return true;
9669 if (argument.IsNull()) {
9670 return Instance::NullIsAssignableTo(type, instantiator_type_args,
9671 function_type_args);
9672 }
9673 return argument.IsAssignableTo(type, instantiator_type_args,
9674 function_type_args);
9675 };
9676
9677 // Check types of the provided arguments against the expected parameter types.
9678 const intptr_t arg_offset = args_desc.FirstArgIndex();
9679 // Only check explicit arguments.
9680 const intptr_t arg_start = arg_offset + NumImplicitParameters();
9681 const intptr_t end_positional_args = arg_offset + args_desc.PositionalCount();
9682 for (intptr_t arg_index = arg_start; arg_index < end_positional_args;
9683 ++arg_index) {
9684 argument ^= args.At(arg_index);
9685 // Adjust for type arguments when they're present.
9686 const intptr_t param_index = arg_index - arg_offset;
9687 type = ParameterTypeAt(param_index);
9688 if (!check_argument(argument, type, instantiator_type_arguments,
9689 function_type_arguments)) {
9690 auto& name = String::Handle(zone, ParameterNameAt(param_index));
9691 if (!type.IsInstantiated()) {
9692 type =
9693 type.InstantiateFrom(instantiator_type_arguments,
9694 function_type_arguments, kAllFree, Heap::kNew);
9695 }
9696 return ThrowTypeError(token_pos(), argument, type, name);
9697 }
9698 }
9699
9700 const intptr_t num_named_arguments = args_desc.NamedCount();
9701 if (num_named_arguments == 0) {
9702 return Error::null();
9703 }
9704
9705 const int num_parameters = NumParameters();
9706 const int num_fixed_params = num_fixed_parameters();
9707
9708 String& argument_name = String::Handle(zone);
9709 String& parameter_name = String::Handle(zone);
9710
9711 // Check types of named arguments against expected parameter type.
9712 for (intptr_t named_index = 0; named_index < num_named_arguments;
9713 named_index++) {
9714 argument_name = args_desc.NameAt(named_index);
9715 ASSERT(argument_name.IsSymbol());
9716 argument ^= args.At(arg_offset + args_desc.PositionAt(named_index));
9717
9718 // Try to find the named parameter that matches the provided argument.
9719 // Even when annotated with @required, named parameters are still stored
9720 // as if they were optional and so come after the fixed parameters.
9721 // Currently O(n^2) as there's no guarantee from either the CFE or the
9722 // VM that named parameters and named arguments are sorted in the same way.
9723 intptr_t param_index = num_fixed_params;
9724 for (; param_index < num_parameters; param_index++) {
9725 parameter_name = ParameterNameAt(param_index);
9726 ASSERT(parameter_name.IsSymbol());
9727
9728 if (!parameter_name.Equals(argument_name)) continue;
9729
9730 type = ParameterTypeAt(param_index);
9731 if (!check_argument(argument, type, instantiator_type_arguments,
9732 function_type_arguments)) {
9733 auto& name = String::Handle(zone, ParameterNameAt(param_index));
9734 if (!type.IsInstantiated()) {
9735 type = type.InstantiateFrom(instantiator_type_arguments,
9736 function_type_arguments, kAllFree,
9737 Heap::kNew);
9738 }
9739 return ThrowTypeError(token_pos(), argument, type, name);
9740 }
9741 break;
9742 }
9743 // Only should fail if AreValidArguments returns a false positive.
9744 ASSERT(param_index < num_parameters);
9745 }
9746 return Error::null();
9747}
9748
9749// Helper allocating a C string buffer in the zone, printing the fully qualified
9750// name of a function in it, and replacing ':' by '_' to make sure the
9751// constructed name is a valid C++ identifier for debugging purpose.
9752// Set 'chars' to allocated buffer and return number of written characters.
9753
9758
9760 const Function& function,
9761 char** chars,
9762 intptr_t reserve_len,
9763 bool with_lib,
9764 QualifiedFunctionLibKind lib_kind) {
9765 Zone* zone = Thread::Current()->zone();
9766 const char* name = String::Handle(zone, function.name()).ToCString();
9767 const char* function_format = (reserve_len == 0) ? "%s" : "%s_";
9768 reserve_len += Utils::SNPrint(nullptr, 0, function_format, name);
9769 const Function& parent = Function::Handle(zone, function.parent_function());
9770 intptr_t written = 0;
9771 if (parent.IsNull()) {
9772 const Class& function_class = Class::Handle(zone, function.Owner());
9773 ASSERT(!function_class.IsNull());
9774 const char* class_name =
9775 String::Handle(zone, function_class.Name()).ToCString();
9776 ASSERT(class_name != nullptr);
9777 const char* library_name = nullptr;
9778 const char* lib_class_format = nullptr;
9779 if (with_lib) {
9780 const Library& library = Library::Handle(zone, function_class.library());
9781 ASSERT(!library.IsNull());
9782 switch (lib_kind) {
9784 library_name = String::Handle(zone, library.url()).ToCString();
9785 break;
9787 library_name = String::Handle(zone, library.name()).ToCString();
9788 break;
9789 default:
9790 UNREACHABLE();
9791 }
9792 ASSERT(library_name != nullptr);
9793 lib_class_format = (library_name[0] == '\0') ? "%s%s_" : "%s_%s_";
9794 } else {
9795 library_name = "";
9796 lib_class_format = "%s%s.";
9797 }
9798 reserve_len +=
9799 Utils::SNPrint(nullptr, 0, lib_class_format, library_name, class_name);
9800 ASSERT(chars != nullptr);
9801 *chars = zone->Alloc<char>(reserve_len + 1);
9802 written = Utils::SNPrint(*chars, reserve_len + 1, lib_class_format,
9803 library_name, class_name);
9804 } else {
9805 written = ConstructFunctionFullyQualifiedCString(parent, chars, reserve_len,
9806 with_lib, lib_kind);
9807 }
9808 ASSERT(*chars != nullptr);
9809 char* next = *chars + written;
9810 written += Utils::SNPrint(next, reserve_len + 1, function_format, name);
9811 // Replace ":" with "_".
9812 while (true) {
9813 next = strchr(next, ':');
9814 if (next == nullptr) break;
9815 *next = '_';
9816 }
9817 return written;
9818}
9819
9821 char* chars = nullptr;
9822 ConstructFunctionFullyQualifiedCString(*this, &chars, 0, true,
9824 return chars;
9825}
9826
9828 char* chars = nullptr;
9829 ConstructFunctionFullyQualifiedCString(*this, &chars, 0, true,
9831 return chars;
9832}
9833
9834const char* Function::ToQualifiedCString() const {
9835 char* chars = nullptr;
9836 ConstructFunctionFullyQualifiedCString(*this, &chars, 0, false,
9838 return chars;
9839}
9840
9842 const TypeArguments& instantiator_type_arguments,
9843 const TypeArguments& function_type_arguments,
9844 intptr_t num_free_fun_type_params,
9845 Heap::Space space,
9846 FunctionTypeMapping* function_type_mapping,
9847 intptr_t num_parent_type_args_adjustment) const {
9849 Zone* zone = Thread::Current()->zone();
9850 const intptr_t num_parent_type_args = NumParentTypeArguments();
9851 bool delete_type_parameters = false;
9852 if (num_free_fun_type_params == kCurrentAndEnclosingFree) {
9853 // See the comment on kCurrentAndEnclosingFree to understand why we don't
9854 // adjust 'num_free_fun_type_params' downward in this case.
9855 num_free_fun_type_params = kAllFree;
9856 delete_type_parameters = true;
9857 } else {
9858 ASSERT(!IsInstantiated(kAny, num_free_fun_type_params));
9859 // We only consider the function type parameters declared by the parents
9860 // of this signature function as free.
9861 if (num_parent_type_args < num_free_fun_type_params) {
9862 num_free_fun_type_params = num_parent_type_args;
9863 }
9864 }
9865
9866 // The number of parent type parameters that remain uninstantiated.
9867 const intptr_t remaining_parent_type_params =
9868 num_free_fun_type_params < num_parent_type_args
9869 ? num_parent_type_args - num_free_fun_type_params
9870 : 0;
9871
9872 // Adjust number of parent type arguments for all nested substituted types.
9873 num_parent_type_args_adjustment =
9874 remaining_parent_type_params +
9875 (delete_type_parameters ? 0 : NumTypeParameters());
9876
9878 FunctionType::New(remaining_parent_type_params, nullability(), space));
9880
9881 FunctionTypeMapping scope(zone, &function_type_mapping, *this, sig);
9882
9883 // Copy the type parameters and instantiate their bounds and defaults.
9884 if (!delete_type_parameters) {
9885 const TypeParameters& type_params =
9886 TypeParameters::Handle(zone, type_parameters());
9887 if (!type_params.IsNull()) {
9888 const TypeParameters& sig_type_params =
9890 // No need to set names that are ignored in a signature, however, the
9891 // length of the names array defines the number of type parameters.
9892 sig_type_params.set_names(Array::Handle(zone, type_params.names()));
9893 sig_type_params.set_flags(Array::Handle(zone, type_params.flags()));
9894 sig.SetTypeParameters(sig_type_params);
9895 TypeArguments& type_args = TypeArguments::Handle(zone);
9896 type_args = type_params.bounds();
9897 if (!type_args.IsNull() && !type_args.IsInstantiated()) {
9898 type_args = type_args.InstantiateFrom(
9899 instantiator_type_arguments, function_type_arguments,
9900 num_free_fun_type_params, space, function_type_mapping,
9901 num_parent_type_args_adjustment);
9902 }
9903 sig_type_params.set_bounds(type_args);
9904 type_args = type_params.defaults();
9905 if (!type_args.IsNull() && !type_args.IsInstantiated()) {
9906 type_args = type_args.InstantiateFrom(
9907 instantiator_type_arguments, function_type_arguments,
9908 num_free_fun_type_params, space, function_type_mapping,
9909 num_parent_type_args_adjustment);
9910 }
9911 sig_type_params.set_defaults(type_args);
9912 }
9913 }
9914
9915 type = result_type();
9916 if (!type.IsInstantiated()) {
9917 type = type.InstantiateFrom(
9918 instantiator_type_arguments, function_type_arguments,
9919 num_free_fun_type_params, space, function_type_mapping,
9920 num_parent_type_args_adjustment);
9921 // A returned null type indicates a failed instantiation in dead code that
9922 // must be propagated up to the caller, the optimizing compiler.
9923 if (type.IsNull()) {
9924 return FunctionType::null();
9925 }
9926 }
9927 sig.set_result_type(type);
9928 const intptr_t num_params = NumParameters();
9929 sig.set_num_implicit_parameters(num_implicit_parameters());
9930 sig.set_num_fixed_parameters(num_fixed_parameters());
9931 sig.SetNumOptionalParameters(NumOptionalParameters(),
9932 HasOptionalPositionalParameters());
9933 sig.set_parameter_types(Array::Handle(Array::New(num_params, space)));
9934 for (intptr_t i = 0; i < num_params; i++) {
9935 type = ParameterTypeAt(i);
9936 if (!type.IsInstantiated()) {
9937 type = type.InstantiateFrom(
9938 instantiator_type_arguments, function_type_arguments,
9939 num_free_fun_type_params, space, function_type_mapping,
9940 num_parent_type_args_adjustment);
9941 // A returned null type indicates a failed instantiation in dead code that
9942 // must be propagated up to the caller, the optimizing compiler.
9943 if (type.IsNull()) {
9944 return FunctionType::null();
9945 }
9946 }
9947 sig.SetParameterTypeAt(i, type);
9948 }
9949 sig.set_named_parameter_names(Array::Handle(zone, named_parameter_names()));
9950
9951 if (delete_type_parameters) {
9952 ASSERT(sig.IsInstantiated(kFunctions));
9953 }
9954
9955 sig.SetIsFinalized();
9956
9957 // Canonicalization is not part of instantiation.
9958 return sig.ptr();
9959}
9960
9962 intptr_t num_parent_type_args_adjustment,
9963 intptr_t num_free_fun_type_params,
9964 Heap::Space space,
9965 FunctionTypeMapping* function_type_mapping) const {
9966 ASSERT(num_parent_type_args_adjustment >= 0);
9968 Zone* zone = Thread::Current()->zone();
9969
9970 const intptr_t old_num_parent_type_args = NumParentTypeArguments();
9971 // From now on, adjust all type parameter types
9972 // which belong to this or nested function types.
9973 if (num_free_fun_type_params > old_num_parent_type_args) {
9974 num_free_fun_type_params = old_num_parent_type_args;
9975 }
9976
9978 zone, FunctionType::New(
9979 NumParentTypeArguments() + num_parent_type_args_adjustment,
9980 nullability(), space));
9982
9983 FunctionTypeMapping scope(zone, &function_type_mapping, *this, new_type);
9984
9985 const TypeParameters& type_params =
9986 TypeParameters::Handle(zone, type_parameters());
9987 if (!type_params.IsNull()) {
9988 const TypeParameters& new_type_params =
9990 // No need to set names that are ignored in a signature, however, the
9991 // length of the names array defines the number of type parameters.
9992 new_type_params.set_names(Array::Handle(zone, type_params.names()));
9993 new_type_params.set_flags(Array::Handle(zone, type_params.flags()));
9994 TypeArguments& type_args = TypeArguments::Handle(zone);
9995 type_args = type_params.bounds();
9996 if (!type_args.IsNull()) {
9997 type_args = type_args.UpdateFunctionTypes(num_parent_type_args_adjustment,
9998 num_free_fun_type_params, space,
9999 function_type_mapping);
10000 }
10001 new_type_params.set_bounds(type_args);
10002 type_args = type_params.defaults();
10003 if (!type_args.IsNull()) {
10004 type_args = type_args.UpdateFunctionTypes(num_parent_type_args_adjustment,
10005 num_free_fun_type_params, space,
10006 function_type_mapping);
10007 }
10008 new_type_params.set_defaults(type_args);
10009 new_type.SetTypeParameters(new_type_params);
10010 }
10011
10012 type = result_type();
10013 type = type.UpdateFunctionTypes(num_parent_type_args_adjustment,
10014 num_free_fun_type_params, space,
10015 function_type_mapping);
10016 new_type.set_result_type(type);
10017
10018 const intptr_t num_params = NumParameters();
10019 new_type.set_num_implicit_parameters(num_implicit_parameters());
10020 new_type.set_num_fixed_parameters(num_fixed_parameters());
10021 new_type.SetNumOptionalParameters(NumOptionalParameters(),
10022 HasOptionalPositionalParameters());
10023 new_type.set_parameter_types(Array::Handle(Array::New(num_params, space)));
10024 for (intptr_t i = 0; i < num_params; i++) {
10025 type = ParameterTypeAt(i);
10026 type = type.UpdateFunctionTypes(num_parent_type_args_adjustment,
10027 num_free_fun_type_params, space,
10028 function_type_mapping);
10029 new_type.SetParameterTypeAt(i, type);
10030 }
10032 Array::Handle(zone, named_parameter_names()));
10033 new_type.SetIsFinalized();
10034
10035 return new_type.ptr();
10036}
10037
10038// Checks if the type of the specified parameter of this signature is a
10039// supertype of the type of the specified parameter of the other signature
10040// (i.e. check parameter contravariance).
10041// Note that types marked as covariant are already dealt with in the front-end.
10043 intptr_t parameter_position,
10044 const FunctionType& other,
10045 intptr_t other_parameter_position,
10046 Heap::Space space,
10047 FunctionTypeMapping* function_type_equivalence) const {
10048 const AbstractType& param_type =
10049 AbstractType::Handle(ParameterTypeAt(parameter_position));
10050 if (param_type.IsTopTypeForSubtyping()) {
10051 return true;
10052 }
10053 const AbstractType& other_param_type =
10054 AbstractType::Handle(other.ParameterTypeAt(other_parameter_position));
10055 return other_param_type.IsSubtypeOf(param_type, space,
10056 function_type_equivalence);
10057}
10058
10060 const FunctionType& other,
10061 TypeEquality kind,
10062 FunctionTypeMapping* function_type_equivalence) const {
10063 Zone* const zone = Thread::Current()->zone();
10065 " FunctionType::HasSameTypeParametersAndBounds(%s, %s)\n", ToCString(),
10066 other.ToCString());
10067
10068 const intptr_t num_type_params = NumTypeParameters();
10069 if (num_type_params != other.NumTypeParameters()) {
10071 " - result: false (number of type parameters)\n");
10072 return false;
10073 }
10074 if (num_type_params > 0) {
10075 const TypeParameters& type_params =
10076 TypeParameters::Handle(zone, type_parameters());
10077 ASSERT(!type_params.IsNull());
10078 const TypeParameters& other_type_params =
10080 ASSERT(!other_type_params.IsNull());
10081 if (kind == TypeEquality::kInSubtypeTest) {
10082 if (!type_params.AllDynamicBounds() ||
10083 !other_type_params.AllDynamicBounds()) {
10084 AbstractType& bound = AbstractType::Handle(zone);
10085 AbstractType& other_bound = AbstractType::Handle(zone);
10086 for (intptr_t i = 0; i < num_type_params; i++) {
10087 bound = type_params.BoundAt(i);
10088 other_bound = other_type_params.BoundAt(i);
10089 // Bounds that are mutual subtypes are considered equal.
10090 if (!bound.IsSubtypeOf(other_bound, Heap::kOld,
10091 function_type_equivalence) ||
10092 !other_bound.IsSubtypeOf(bound, Heap::kOld,
10093 function_type_equivalence)) {
10095 " - result: false (bounds are not mutual subtypes)\n");
10096 return false;
10097 }
10098 }
10099 }
10100 } else {
10101 if (NumParentTypeArguments() != other.NumParentTypeArguments()) {
10103 " - result: false (mismatch in number of type arguments)\n");
10104 return false;
10105 }
10106 const TypeArguments& bounds =
10107 TypeArguments::Handle(zone, type_params.bounds());
10108 const TypeArguments& other_bounds =
10109 TypeArguments::Handle(zone, other_type_params.bounds());
10110 if (!bounds.IsEquivalent(other_bounds, kind, function_type_equivalence)) {
10112 " - result: false (bounds are not equivalent)\n");
10113 return false;
10114 }
10115 if (kind == TypeEquality::kCanonical) {
10116 // Compare default arguments.
10117 const TypeArguments& defaults =
10118 TypeArguments::Handle(zone, type_params.defaults());
10119 const TypeArguments& other_defaults =
10120 TypeArguments::Handle(zone, other_type_params.defaults());
10121 if (defaults.IsNull()) {
10122 if (!other_defaults.IsNull()) {
10124 " - result: false (mismatch in defaults)\n");
10125 return false;
10126 }
10127 } else if (!defaults.IsEquivalent(other_defaults, kind,
10128 function_type_equivalence)) {
10130 " - result: false (default types are not equivalent)\n");
10131 return false;
10132 }
10133 }
10134 }
10135 if (kind != TypeEquality::kInSubtypeTest) {
10136 // Compare flags (IsGenericCovariantImpl).
10137 if (!Array::Equals(type_params.flags(), other_type_params.flags())) {
10138 TRACE_TYPE_CHECKS_VERBOSE(" - result: false (flags are not equal)\n");
10139 return false;
10140 }
10141 }
10142 }
10143 TRACE_TYPE_CHECKS_VERBOSE(" - result: true\n");
10144 return true;
10145}
10146
10148 const FunctionType& other,
10149 Heap::Space space,
10150 FunctionTypeMapping* function_type_equivalence) const {
10151 TRACE_TYPE_CHECKS_VERBOSE(" FunctionType::IsSubtypeOf(%s, %s)\n",
10152 ToCString(), other.ToCString());
10153 const intptr_t num_fixed_params = num_fixed_parameters();
10154 const intptr_t num_opt_pos_params = NumOptionalPositionalParameters();
10155 const intptr_t num_opt_named_params = NumOptionalNamedParameters();
10156 const intptr_t other_num_fixed_params = other.num_fixed_parameters();
10157 const intptr_t other_num_opt_pos_params =
10159 const intptr_t other_num_opt_named_params =
10161 // This signature requires the same arguments or less and accepts the same
10162 // arguments or more. We can ignore implicit parameters.
10163 const intptr_t num_ignored_params = num_implicit_parameters();
10164 const intptr_t other_num_ignored_params = other.num_implicit_parameters();
10165 if (((num_fixed_params - num_ignored_params) >
10166 (other_num_fixed_params - other_num_ignored_params)) ||
10167 ((num_fixed_params - num_ignored_params + num_opt_pos_params) <
10168 (other_num_fixed_params - other_num_ignored_params +
10169 other_num_opt_pos_params)) ||
10170 (num_opt_named_params < other_num_opt_named_params)) {
10172 " - result: false (mismatch in number of parameters)\n");
10173 return false;
10174 }
10175 Thread* thread = Thread::Current();
10176 Zone* zone = thread->zone();
10177 FunctionTypeMapping scope(zone, &function_type_equivalence, *this, other);
10178
10179 // Check the type parameters and bounds of generic functions.
10180 if (!HasSameTypeParametersAndBounds(other, TypeEquality::kInSubtypeTest,
10181 function_type_equivalence)) {
10183 " - result: false (mismatch in type parameters)\n");
10184 return false;
10185 }
10186 // Check the result type.
10187 const AbstractType& other_res_type =
10188 AbstractType::Handle(zone, other.result_type());
10189 // 'void Function()' is a subtype of 'Object Function()'.
10190 if (!other_res_type.IsTopTypeForSubtyping()) {
10191 const AbstractType& res_type = AbstractType::Handle(zone, result_type());
10192 if (!res_type.IsSubtypeOf(other_res_type, space,
10193 function_type_equivalence)) {
10194 TRACE_TYPE_CHECKS_VERBOSE(" - result: false (result type)\n");
10195 return false;
10196 }
10197 }
10198 // Check the types of fixed and optional positional parameters.
10199 for (intptr_t i = 0; i < (other_num_fixed_params - other_num_ignored_params +
10200 other_num_opt_pos_params);
10201 i++) {
10202 if (!IsContravariantParameter(i + num_ignored_params, other,
10203 i + other_num_ignored_params, space,
10204 function_type_equivalence)) {
10205 TRACE_TYPE_CHECKS_VERBOSE(" - result: false (parameter type)\n");
10206 return false;
10207 }
10208 }
10209 // Check that for each optional named parameter of type T of the other
10210 // function type, there exists an optional named parameter of this function
10211 // type with an identical name and with a type S that is a supertype of T.
10212 // Note that SetParameterNameAt() guarantees that names are symbols, so we
10213 // can compare their raw pointers.
10214 const int num_params = num_fixed_params + num_opt_named_params;
10215 const int other_num_params =
10216 other_num_fixed_params + other_num_opt_named_params;
10217 bool found_param_name;
10218 String& other_param_name = String::Handle(zone);
10219 for (intptr_t i = other_num_fixed_params; i < other_num_params; i++) {
10220 other_param_name = other.ParameterNameAt(i);
10221 ASSERT(other_param_name.IsSymbol());
10222 found_param_name = false;
10223 for (intptr_t j = num_fixed_params; j < num_params; j++) {
10224 ASSERT(String::Handle(zone, ParameterNameAt(j)).IsSymbol());
10225 if (ParameterNameAt(j) == other_param_name.ptr()) {
10226 found_param_name = true;
10227 if (!IsContravariantParameter(j, other, i, space,
10228 function_type_equivalence)) {
10230 " - result: false (optional parameter type)\n");
10231 return false;
10232 }
10233 break;
10234 }
10235 }
10236 if (!found_param_name) {
10238 " - result: false (named parameter not found)\n");
10239 return false;
10240 }
10241 }
10242 // Check that for each required named parameter in this function, there's a
10243 // corresponding required named parameter in the other function.
10244 String& param_name = other_param_name;
10245 for (intptr_t j = num_params - num_opt_named_params; j < num_params; j++) {
10246 if (IsRequiredAt(j)) {
10247 param_name = ParameterNameAt(j);
10248 ASSERT(param_name.IsSymbol());
10249 bool found = false;
10250 for (intptr_t i = other_num_fixed_params; i < other_num_params; i++) {
10251 ASSERT(String::Handle(zone, other.ParameterNameAt(i)).IsSymbol());
10252 if (other.ParameterNameAt(i) == param_name.ptr()) {
10253 found = true;
10254 if (!other.IsRequiredAt(i)) {
10256 " - result: false (mismatch in required named "
10257 "parameters)\n");
10258 return false;
10259 }
10260 }
10261 }
10262 if (!found) {
10264 " - result: false (required named parameter not found)\n");
10265 return false;
10266 }
10267 }
10268 }
10269 TRACE_TYPE_CHECKS_VERBOSE(" - result: true\n");
10270 return true;
10271}
10272
10273// The compiler generates an implicit constructor if a class definition
10274// does not contain an explicit constructor or factory. The implicit
10275// constructor has the same token position as the owner class.
10277 return IsGenerativeConstructor() && (token_pos() == end_token_pos());
10278}
10279
10281 NoSafepointScope no_safepoint;
10282 uint32_t kind_tag = func->untag()->kind_tag_.load(std::memory_order_relaxed);
10283 return (KindBits::decode(kind_tag) ==
10284 UntaggedFunction::kImplicitClosureFunction) &&
10285 StaticBit::decode(kind_tag);
10286}
10287
10289 NoSafepointScope no_safepoint;
10290 uint32_t kind_tag = func->untag()->kind_tag_.load(std::memory_order_relaxed);
10291 return (KindBits::decode(kind_tag) ==
10292 UntaggedFunction::kImplicitClosureFunction) &&
10293 !StaticBit::decode(kind_tag);
10294}
10295
10296FunctionPtr Function::New(Heap::Space space) {
10298 return Object::Allocate<Function>(space);
10299}
10300
10301FunctionPtr Function::New(const FunctionType& signature,
10302 const String& name,
10304 bool is_static,
10305 bool is_const,
10306 bool is_abstract,
10307 bool is_external,
10308 bool is_native,
10309 const Object& owner,
10310 TokenPosition token_pos,
10311 Heap::Space space) {
10312 ASSERT(!owner.IsNull());
10313 ASSERT(!signature.IsNull());
10315 result.set_kind_tag(0);
10316 result.set_packed_fields(0);
10317 result.set_name(name);
10318 result.set_kind_tag(0); // Ensure determinism of uninitialized bits.
10319 result.set_kind(kind);
10320 result.set_recognized_kind(MethodRecognizer::kUnknown);
10322 result.set_is_static(is_static);
10323 result.set_is_const(is_const);
10324 result.set_is_abstract(is_abstract);
10325 result.set_is_external(is_external);
10326 result.set_is_native(is_native);
10327 result.set_is_reflectable(true); // Will be computed later.
10328 result.set_is_visible(true); // Will be computed later.
10329 result.set_is_debuggable(true); // Will be computed later.
10330 result.set_is_intrinsic(false);
10331 result.set_has_pragma(false);
10332 result.set_is_polymorphic_target(false);
10333 result.set_is_synthetic(false);
10334 NOT_IN_PRECOMPILED(result.set_state_bits(0));
10335 result.set_owner(owner);
10336 NOT_IN_PRECOMPILED(result.set_token_pos(token_pos));
10337 NOT_IN_PRECOMPILED(result.set_end_token_pos(token_pos));
10338 NOT_IN_PRECOMPILED(result.set_usage_counter(0));
10339 NOT_IN_PRECOMPILED(result.set_deoptimization_counter(0));
10340 NOT_IN_PRECOMPILED(result.set_optimized_instruction_count(0));
10341 NOT_IN_PRECOMPILED(result.set_optimized_call_site_count(0));
10342 NOT_IN_PRECOMPILED(result.set_inlining_depth(0));
10343 NOT_IN_PRECOMPILED(result.set_kernel_offset(0));
10344 result.set_is_optimizable(is_native ? false : true);
10345 result.set_is_inlinable(true);
10346 result.reset_unboxed_parameters_and_return();
10347 result.SetInstructionsSafe(StubCode::LazyCompile());
10348
10349 // See Function::set_data() for more information.
10350 if (kind == UntaggedFunction::kClosureFunction ||
10351 kind == UntaggedFunction::kImplicitClosureFunction) {
10352 ASSERT(space == Heap::kOld);
10353 const ClosureData& data = ClosureData::Handle(ClosureData::New());
10354 data.set_awaiter_link({});
10355 result.set_data(data);
10356 } else if (kind == UntaggedFunction::kFfiTrampoline) {
10357 const FfiTrampolineData& data =
10358 FfiTrampolineData::Handle(FfiTrampolineData::New());
10359 result.set_data(data);
10360 } else if (result.is_old_native()) {
10361 const auto& data =
10362 Array::Handle(Array::New(NativeFunctionData::kLength, Heap::kOld));
10363 result.set_data(data);
10364 } else {
10365 // Functions other than signature functions have no reason to be allocated
10366 // in new space.
10367 ASSERT(space == Heap::kOld);
10368 }
10369
10370 // Force-optimized functions are not debuggable because they cannot
10371 // deoptimize.
10372 if (result.ForceOptimize()) {
10373 result.set_is_debuggable(false);
10374 }
10375 signature.set_num_implicit_parameters(result.NumImplicitParameters());
10376 result.SetSignature(signature);
10378 result.set_positional_parameter_names(Object::empty_array()));
10379 return result.ptr();
10380}
10381
10383 const String& name,
10384 const Function& parent,
10385 bool is_static,
10386 TokenPosition token_pos,
10387 const Object& owner) {
10388 ASSERT((kind == UntaggedFunction::kClosureFunction) ||
10389 (kind == UntaggedFunction::kImplicitClosureFunction));
10390 ASSERT(!parent.IsNull());
10391 ASSERT(!owner.IsNull());
10393 kind == UntaggedFunction::kClosureFunction ? parent.NumTypeArguments()
10394 : 0));
10396 Function::New(signature, name, kind,
10397 /* is_static = */ is_static,
10398 /* is_const = */ false,
10399 /* is_abstract = */ false,
10400 /* is_external = */ false,
10401 /* is_native = */ false, owner, token_pos));
10402 result.set_parent_function(parent);
10403 return result.ptr();
10404}
10405
10407 const Function& parent,
10408 TokenPosition token_pos) {
10409 // Use the owner defining the parent function and not the class containing it.
10410 const Object& parent_owner = Object::Handle(parent.RawOwner());
10411 return NewClosureFunctionWithKind(UntaggedFunction::kClosureFunction, name,
10412 parent, parent.is_static(), token_pos,
10413 parent_owner);
10414}
10415
10417 const Function& parent,
10418 TokenPosition token_pos) {
10419 // Use the owner defining the parent function and not the class containing it.
10420 const Object& parent_owner = Object::Handle(parent.RawOwner());
10421 return NewClosureFunctionWithKind(
10422 UntaggedFunction::kImplicitClosureFunction, name, parent,
10423 parent.is_static() || parent.IsConstructor(), token_pos, parent_owner);
10424}
10425
10427#if defined(DART_PRECOMPILED_RUNTIME)
10428 return HasImplicitClosureFunction();
10429#else
10430 return true;
10431#endif
10432}
10433
10435 if (!IsInvokeFieldDispatcher()) return false;
10436 if (thread->isolate_group()->object_store()->closure_class() != Owner()) {
10437 return false;
10438 }
10439 const auto& handle = String::Handle(thread->zone(), name());
10440 return handle.Equals(Symbols::DynamicCall());
10441}
10442
10444 // Return the existing implicit closure function if any.
10445 if (implicit_closure_function() != Function::null()) {
10446 return implicit_closure_function();
10447 }
10448
10449#if defined(DART_PRECOMPILED_RUNTIME)
10450 // In AOT mode all implicit closures are pre-created.
10451 FATAL("Cannot create implicit closure in AOT!");
10452 return Function::null();
10453#else
10454 ASSERT(!IsClosureFunction());
10455 Thread* thread = Thread::Current();
10456 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
10457
10458 if (implicit_closure_function() != Function::null()) {
10459 return implicit_closure_function();
10460 }
10461
10462 // Create closure function.
10463 Zone* zone = thread->zone();
10464 const String& closure_name = String::Handle(zone, name());
10465 const Function& closure_function = Function::Handle(
10466 zone, NewImplicitClosureFunction(closure_name, *this, token_pos()));
10467
10468 // Set closure function's context scope.
10469 if (is_static() || IsConstructor()) {
10470 closure_function.set_context_scope(Object::empty_context_scope());
10471 } else {
10472 const ContextScope& context_scope = ContextScope::Handle(
10474 closure_function.set_context_scope(context_scope);
10475 }
10476
10477 FunctionType& closure_signature =
10478 FunctionType::Handle(zone, closure_function.signature());
10479
10480 const auto& cls = Class::Handle(zone, Owner());
10481
10482 if (!is_static() && !IsConstructor() &&
10484 closure_function.set_awaiter_link({0, 0});
10485 }
10486
10487 const intptr_t num_type_params =
10488 IsConstructor() ? cls.NumTypeParameters() : NumTypeParameters();
10489
10490 TypeArguments& instantiator_type_arguments = TypeArguments::Handle(zone);
10491 TypeArguments& function_type_arguments = TypeArguments::Handle(zone);
10492
10493 FunctionTypeMapping* function_type_mapping = nullptr;
10494 FunctionTypeMapping scope(zone, &function_type_mapping,
10495 FunctionType::Handle(zone, signature()),
10496 closure_signature);
10497
10498 auto transform_type = [&](AbstractType& type) {
10499 if (num_type_params > 0) {
10500 if (IsConstructor()) {
10501 type = type.UpdateFunctionTypes(num_type_params, kAllFree, Heap::kOld,
10502 nullptr);
10503 if (!type.IsInstantiated(kCurrentClass)) {
10504 type = type.InstantiateFrom(
10505 instantiator_type_arguments, function_type_arguments,
10506 kNoneFree /* avoid truncating parent type args */, Heap::kOld);
10507 }
10508 } else {
10509 type = type.UpdateFunctionTypes(0, kNoneFree, Heap::kOld,
10510 function_type_mapping);
10511 }
10512 }
10513 };
10514
10515 auto transform_type_args = [&](TypeArguments& type_args) {
10516 ASSERT(num_type_params > 0);
10517 if (!type_args.IsNull()) {
10518 if (IsConstructor()) {
10519 type_args = type_args.UpdateFunctionTypes(num_type_params, kAllFree,
10520 Heap::kOld, nullptr);
10521 if (!type_args.IsInstantiated(kCurrentClass)) {
10522 type_args = type_args.InstantiateFrom(
10523 instantiator_type_arguments, function_type_arguments,
10524 kNoneFree /* avoid truncating parent type args */, Heap::kOld);
10525 }
10526 } else {
10527 type_args = type_args.UpdateFunctionTypes(0, kNoneFree, Heap::kOld,
10528 function_type_mapping);
10529 }
10530 }
10531 };
10532
10533 // Set closure function's type parameters.
10534 if (num_type_params > 0) {
10535 const TypeParameters& old_type_params = TypeParameters::Handle(
10536 zone, IsConstructor() ? cls.type_parameters() : type_parameters());
10537 const TypeParameters& new_type_params =
10539 // No need to set names that are ignored in a signature, however, the
10540 // length of the names array defines the number of type parameters.
10541 new_type_params.set_names(Array::Handle(zone, old_type_params.names()));
10542 new_type_params.set_flags(Array::Handle(zone, old_type_params.flags()));
10543
10544 closure_signature.SetTypeParameters(new_type_params);
10545 ASSERT(closure_signature.NumTypeParameters() == num_type_params);
10546
10547 TypeArguments& type_args = TypeArguments::Handle(zone);
10548 type_args = TypeArguments::New(num_type_params);
10549 TypeParameter& type_param = TypeParameter::Handle(zone);
10550 for (intptr_t i = 0; i < num_type_params; i++) {
10551 type_param = closure_signature.TypeParameterAt(i);
10552 type_args.SetTypeAt(i, type_param);
10553 }
10554
10555 if (IsConstructor()) {
10556 instantiator_type_arguments =
10557 type_args.ToInstantiatorTypeArguments(thread, cls);
10558 } else {
10559 ASSERT(NumTypeArguments() == type_args.Length());
10560 function_type_arguments = type_args.ptr();
10561 }
10562
10563 type_args = old_type_params.bounds();
10564 transform_type_args(type_args);
10565 new_type_params.set_bounds(type_args);
10566
10567 type_args = old_type_params.defaults();
10568 transform_type_args(type_args);
10569 new_type_params.set_defaults(type_args);
10570 }
10571
10572 // Set closure function's result type.
10573 AbstractType& result_type = AbstractType::Handle(zone);
10574 if (IsConstructor()) {
10575 result_type = cls.DeclarationType();
10576 } else {
10577 result_type = this->result_type();
10578 }
10579 transform_type(result_type);
10580 closure_signature.set_result_type(result_type);
10581
10582 // Set closure function's end token to this end token.
10583 closure_function.set_end_token_pos(end_token_pos());
10584
10585 // The closurized method stub just calls into the original method and should
10586 // therefore be skipped by the debugger and in stack traces.
10587 closure_function.set_is_debuggable(false);
10588 closure_function.set_is_visible(false);
10589
10590 // Set closure function's formal parameters to this formal parameters,
10591 // removing the receiver if this is an instance method and adding the closure
10592 // object as first parameter.
10593 const int kClosure = 1;
10594 const int num_implicit_params = NumImplicitParameters();
10595 const int num_fixed_params =
10596 kClosure - num_implicit_params + num_fixed_parameters();
10597 const int num_opt_params = NumOptionalParameters();
10598 const bool has_opt_pos_params = HasOptionalPositionalParameters();
10599 const int num_params = num_fixed_params + num_opt_params;
10600 const int num_pos_params = has_opt_pos_params ? num_params : num_fixed_params;
10601 closure_signature.set_num_fixed_parameters(num_fixed_params);
10602 closure_signature.SetNumOptionalParameters(num_opt_params,
10603 has_opt_pos_params);
10604 closure_signature.set_parameter_types(
10605 Array::Handle(zone, Array::New(num_params, Heap::kOld)));
10606 closure_function.CreateNameArray();
10607 closure_signature.CreateNameArrayIncludingFlags();
10608 AbstractType& param_type = AbstractType::Handle(zone);
10609 String& param_name = String::Handle(zone);
10610 // Add implicit closure object parameter.
10611 param_type = Type::DynamicType();
10612 closure_signature.SetParameterTypeAt(0, param_type);
10613 closure_function.SetParameterNameAt(0, Symbols::ClosureParameter());
10614 for (int i = kClosure; i < num_pos_params; i++) {
10615 param_type = ParameterTypeAt(num_implicit_params - kClosure + i);
10616 transform_type(param_type);
10617 closure_signature.SetParameterTypeAt(i, param_type);
10618 param_name = ParameterNameAt(num_implicit_params - kClosure + i);
10619 // Set the name in the function for positional parameters.
10620 closure_function.SetParameterNameAt(i, param_name);
10621 }
10622 for (int i = num_pos_params; i < num_params; i++) {
10623 param_type = ParameterTypeAt(num_implicit_params - kClosure + i);
10624 transform_type(param_type);
10625 closure_signature.SetParameterTypeAt(i, param_type);
10626 param_name = ParameterNameAt(num_implicit_params - kClosure + i);
10627 // Set the name in the signature for named parameters.
10628 closure_signature.SetParameterNameAt(i, param_name);
10629 if (IsRequiredAt(num_implicit_params - kClosure + i)) {
10630 closure_signature.SetIsRequiredAt(i);
10631 }
10632 }
10633 closure_signature.FinalizeNameArray();
10634 closure_function.InheritKernelOffsetFrom(*this);
10635
10636 if (!is_static() && !IsConstructor()) {
10637 // Change covariant parameter types to Object?.
10638 BitVector is_covariant(zone, NumParameters());
10639 BitVector is_generic_covariant_impl(zone, NumParameters());
10640 kernel::ReadParameterCovariance(*this, &is_covariant,
10641 &is_generic_covariant_impl);
10642
10643 ObjectStore* object_store = IsolateGroup::Current()->object_store();
10644 const auto& object_type =
10645 Type::Handle(zone, object_store->nullable_object_type());
10646 ASSERT(object_type.IsCanonical());
10647 for (intptr_t i = kClosure; i < num_params; ++i) {
10648 const intptr_t original_param_index = num_implicit_params - kClosure + i;
10649 if (is_covariant.Contains(original_param_index) ||
10650 is_generic_covariant_impl.Contains(original_param_index)) {
10651 closure_signature.SetParameterTypeAt(i, object_type);
10652 }
10653 }
10654 }
10655 ASSERT(!closure_signature.IsFinalized());
10656 closure_signature ^= ClassFinalizer::FinalizeType(closure_signature);
10657 closure_function.SetSignature(closure_signature);
10658 set_implicit_closure_function(closure_function);
10659 ASSERT(closure_function.IsImplicitClosureFunction());
10660 ASSERT(HasImplicitClosureFunction());
10661 return closure_function.ptr();
10662#endif // defined(DART_PRECOMPILED_RUNTIME)
10663}
10664
10666 if (implicit_closure_function() != Function::null()) {
10667 const Function& func = Function::Handle(implicit_closure_function());
10668 if (!func.HasCode()) {
10669 set_implicit_closure_function(Function::Handle());
10670 }
10671 }
10672}
10673
10675#if defined(DART_PRECOMPILED_RUNTIME)
10676 if (signature() == FunctionType::null()) {
10677 return String::null();
10678 }
10679#endif
10680 Thread* thread = Thread::Current();
10681 ZoneTextBuffer printer(thread->zone());
10682 const FunctionType& sig = FunctionType::Handle(signature());
10683 sig.Print(kInternalName, &printer);
10684 return Symbols::New(thread, printer.buffer());
10685}
10686
10688#if defined(DART_PRECOMPILED_RUNTIME)
10689 if (signature() == FunctionType::null()) {
10690 return String::null();
10691 }
10692#endif
10693 Thread* thread = Thread::Current();
10694 ZoneTextBuffer printer(thread->zone());
10695 const FunctionType& sig = FunctionType::Handle(signature());
10696 sig.Print(kUserVisibleName, &printer);
10697 return Symbols::New(thread, printer.buffer());
10698}
10699
10701 Zone* zone,
10702 NameVisibility name_visibility,
10703 BaseTextBuffer* printer) const {
10704 AbstractType& param_type = AbstractType::Handle(zone);
10705 const intptr_t num_params = NumParameters();
10706 const intptr_t num_fixed_params = num_fixed_parameters();
10707 const intptr_t num_opt_pos_params = NumOptionalPositionalParameters();
10708 const intptr_t num_opt_named_params = NumOptionalNamedParameters();
10709 const intptr_t num_opt_params = num_opt_pos_params + num_opt_named_params;
10710 ASSERT((num_fixed_params + num_opt_params) == num_params);
10711 intptr_t i = 0;
10712 if (name_visibility == kUserVisibleName) {
10713 // Hide implicit parameters.
10714 i = num_implicit_parameters();
10715 }
10716 String& name = String::Handle(zone);
10717 while (i < num_fixed_params) {
10718 param_type = ParameterTypeAt(i);
10719 ASSERT(!param_type.IsNull());
10720 param_type.PrintName(name_visibility, printer);
10721 if (i != (num_params - 1)) {
10722 printer->AddString(", ");
10723 }
10724 i++;
10725 }
10726 if (num_opt_params > 0) {
10727 if (num_opt_pos_params > 0) {
10728 printer->AddString("[");
10729 } else {
10730 printer->AddString("{");
10731 }
10732 for (intptr_t i = num_fixed_params; i < num_params; i++) {
10733 if (num_opt_named_params > 0 && IsRequiredAt(i)) {
10734 printer->AddString("required ");
10735 }
10736 param_type = ParameterTypeAt(i);
10737 ASSERT(!param_type.IsNull());
10738 param_type.PrintName(name_visibility, printer);
10739 // The parameter name of an optional positional parameter does not need
10740 // to be part of the signature, since it is not used.
10741 if (num_opt_named_params > 0) {
10742 name = ParameterNameAt(i);
10743 printer->AddString(" ");
10744 printer->AddString(name.ToCString());
10745 }
10746 if (i != (num_params - 1)) {
10747 printer->AddString(", ");
10748 }
10749 }
10750 if (num_opt_pos_params > 0) {
10751 printer->AddString("]");
10752 } else {
10753 printer->AddString("}");
10754 }
10755 }
10756}
10757
10759 ASSERT(IsImplicitStaticClosureFunction());
10760 if (implicit_static_closure() != Closure::null()) {
10761 return implicit_static_closure();
10762 }
10763
10764 auto thread = Thread::Current();
10765 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
10766
10767 if (implicit_static_closure() != Closure::null()) {
10768 return implicit_static_closure();
10769 }
10770
10771 Zone* zone = thread->zone();
10772 const auto& closure =
10773 Closure::Handle(zone, Closure::New(Object::null_type_arguments(),
10774 Object::null_type_arguments(), *this,
10775 Object::null_object(), Heap::kOld));
10776 set_implicit_static_closure(closure);
10777 return implicit_static_closure();
10778}
10779
10780ClosurePtr Function::ImplicitInstanceClosure(const Instance& receiver) const {
10781 ASSERT(IsImplicitClosureFunction());
10782 Zone* zone = Thread::Current()->zone();
10783 TypeArguments& instantiator_type_arguments = TypeArguments::Handle(zone);
10784 if (!HasInstantiatedSignature(kCurrentClass)) {
10785 instantiator_type_arguments = receiver.GetTypeArguments();
10786 }
10787 ASSERT(!HasGenericParent()); // No generic parent function.
10788 return Closure::New(instantiator_type_arguments,
10789 Object::null_type_arguments(), *this, receiver);
10790}
10791
10792FunctionPtr Function::ImplicitClosureTarget(Zone* zone) const {
10793 const auto& parent = Function::Handle(zone, parent_function());
10794 const auto& func_name = String::Handle(zone, parent.name());
10795 const auto& owner = Class::Handle(zone, parent.Owner());
10796 Thread* thread = Thread::Current();
10797 const auto& error = owner.EnsureIsFinalized(thread);
10798 ASSERT(error == Error::null());
10799 auto& target =
10800 Function::Handle(zone, Resolver::ResolveFunction(zone, owner, func_name));
10801
10802 if (!target.IsNull() && (target.ptr() != parent.ptr())) {
10803 DEBUG_ASSERT(IsolateGroup::Current()->HasAttemptedReload());
10804 if ((target.is_static() != parent.is_static()) ||
10805 (target.kind() != parent.kind())) {
10807 }
10808 }
10809
10810 return target.ptr();
10811}
10812
10814 BaseTextBuffer* printer) const {
10815 if (IsNull()) {
10816 printer->AddString("null"); // Signature optimized out in precompiler.
10817 return;
10818 }
10819 Thread* thread = Thread::Current();
10820 Zone* zone = thread->zone();
10821 const TypeParameters& type_params =
10822 TypeParameters::Handle(zone, type_parameters());
10823 if (!type_params.IsNull()) {
10824 printer->AddString("<");
10825 const intptr_t base = NumParentTypeArguments();
10826 const bool kIsClassTypeParameter = false;
10827 // Type parameter names are meaningless after canonicalization.
10828 type_params.Print(thread, zone, kIsClassTypeParameter, base,
10829 name_visibility, printer);
10830 printer->AddString(">");
10831 }
10832 printer->AddString("(");
10833 PrintParameters(thread, zone, name_visibility, printer);
10834 printer->AddString(") => ");
10835 const AbstractType& res_type = AbstractType::Handle(zone, result_type());
10836 if (!res_type.IsNull()) {
10837 res_type.PrintName(name_visibility, printer);
10838 } else {
10839 printer->AddString("null");
10840 }
10841}
10842
10844 Genericity genericity,
10845 intptr_t num_free_fun_type_params) const {
10846 return FunctionType::Handle(signature())
10847 .IsInstantiated(genericity, num_free_fun_type_params);
10848}
10849
10851 intptr_t num_free_fun_type_params) const {
10852 if (num_free_fun_type_params == kCurrentAndEnclosingFree) {
10853 num_free_fun_type_params = kAllFree;
10854 } else if (genericity != kCurrentClass) {
10855 const intptr_t num_parent_type_args = NumParentTypeArguments();
10856 if (num_parent_type_args > 0 && num_free_fun_type_params > 0) {
10857 // The number of parent type arguments is cached in the FunctionType, so
10858 // we can't consider any FunctionType with free parent type arguments as
10859 // fully instantiated. Instead, the FunctionType must be instantiated to
10860 // reduce the number of parent type arguments, even if they're unused in
10861 // its component types.
10862 return false;
10863 }
10864 // Don't consider local function type parameters as free.
10865 if (num_free_fun_type_params > num_parent_type_args) {
10866 num_free_fun_type_params = num_parent_type_args;
10867 }
10868 }
10869 AbstractType& type = AbstractType::Handle(result_type());
10870 if (!type.IsInstantiated(genericity, num_free_fun_type_params)) {
10871 return false;
10872 }
10873 const intptr_t num_parameters = NumParameters();
10874 for (intptr_t i = 0; i < num_parameters; i++) {
10875 type = ParameterTypeAt(i);
10876 if (!type.IsInstantiated(genericity, num_free_fun_type_params)) {
10877 return false;
10878 }
10879 }
10880 const intptr_t num_type_params = NumTypeParameters();
10881 if (num_type_params > 0) {
10882 TypeParameters& type_params = TypeParameters::Handle(type_parameters());
10883 if (!type_params.AllDynamicBounds()) {
10884 for (intptr_t i = 0; i < type_params.Length(); ++i) {
10885 type = type_params.BoundAt(i);
10886 if (!type.IsInstantiated(genericity, num_free_fun_type_params)) {
10887 return false;
10888 }
10889 }
10890 }
10891 }
10892 return true;
10893}
10894
10897}
10898
10899ClassPtr Function::Owner() const {
10900 ASSERT(untag()->owner() != Object::null());
10901 if (untag()->owner()->IsClass()) {
10902 return Class::RawCast(untag()->owner());
10903 }
10904 const Object& obj = Object::Handle(untag()->owner());
10905 ASSERT(obj.IsPatchClass());
10906 return PatchClass::Cast(obj).wrapped_class();
10907}
10908
10910#if defined(DART_PRECOMPILED_RUNTIME)
10911 UNREACHABLE();
10912#else
10913 StoreNonPointer(&untag()->kernel_offset_, src.untag()->kernel_offset_);
10914#endif
10915}
10916
10918#if defined(DART_PRECOMPILED_RUNTIME)
10919 UNREACHABLE();
10920#else
10921 set_kernel_offset(src.kernel_offset());
10922#endif
10923}
10924
10926 const Script& script,
10927 const class KernelProgramInfo& kernel_program_info,
10928 intptr_t index) const {
10929 Array& data_field = Array::Handle(
10930 Array::New(static_cast<intptr_t>(EvalFunctionData::kLength)));
10931 data_field.SetAt(static_cast<intptr_t>(EvalFunctionData::kScript), script);
10932 data_field.SetAt(static_cast<intptr_t>(EvalFunctionData::kKernelProgramInfo),
10933 kernel_program_info);
10934 data_field.SetAt(static_cast<intptr_t>(EvalFunctionData::kKernelLibraryIndex),
10935 Smi::Handle(Smi::New(index)));
10936 set_data(data_field);
10937}
10938
10939ScriptPtr Function::script() const {
10940 // NOTE(turnidge): If you update this function, you probably want to
10941 // update Class::PatchFieldsAndFunctions() at the same time.
10942 if (IsDynamicInvocationForwarder()) {
10943 const Function& target = Function::Handle(ForwardingTarget());
10944 return target.IsNull() ? Script::null() : target.script();
10945 }
10946 if (IsImplicitGetterOrSetter()) {
10947 const auto& field = Field::Handle(accessor_field());
10948 return field.IsNull() ? Script::null() : field.Script();
10949 }
10950 if (is_eval_function()) {
10951 const auto& fdata = Array::Handle(Array::RawCast(data()));
10952 return Script::RawCast(
10953 fdata.At(static_cast<intptr_t>(EvalFunctionData::kScript)));
10954 }
10955 if (token_pos() == TokenPosition::kMinSource) {
10956 // Testing for position 0 is an optimization that relies on temporary
10957 // eval functions having token position 0.
10958 const Script& script = Script::Handle(eval_script());
10959 if (!script.IsNull()) {
10960 return script.ptr();
10961 }
10962 }
10963 const Object& obj = Object::Handle(untag()->owner());
10964 if (obj.IsPatchClass()) {
10965 return PatchClass::Cast(obj).script();
10966 }
10967 if (IsClosureFunction()) {
10968 const Function& function = Function::Handle(parent_function());
10969 if (function.IsNull()) return Script::null();
10970 return function.script();
10971 }
10972 ASSERT(obj.IsClass());
10973 return Class::Cast(obj).script();
10974}
10975
10976#if !defined(DART_PRECOMPILED_RUNTIME)
10977KernelProgramInfoPtr Function::KernelProgramInfo() const {
10978 if (is_eval_function()) {
10979 const auto& fdata = Array::Handle(Array::RawCast(data()));
10981 fdata.At(static_cast<intptr_t>(EvalFunctionData::kKernelProgramInfo)));
10982 }
10983 if (IsClosureFunction()) {
10984 const auto& parent = Function::Handle(parent_function());
10985 return parent.KernelProgramInfo();
10986 }
10987 const auto& owner = Object::Handle(RawOwner());
10988 if (owner.IsClass()) {
10989 return Class::Cast(owner).KernelProgramInfo();
10990 }
10991 return PatchClass::Cast(owner).kernel_program_info();
10992}
10993
10994TypedDataViewPtr Function::KernelLibrary() const {
10996 return info.KernelLibrary(KernelLibraryIndex());
10997}
10998
11000 const intptr_t kernel_library_index = KernelLibraryIndex();
11001 if (kernel_library_index == -1) return 0;
11003 return info.KernelLibraryStartOffset(kernel_library_index);
11004}
11005
11007 if (IsNoSuchMethodDispatcher() || IsInvokeFieldDispatcher() ||
11008 IsFfiCallbackTrampoline()) {
11009 return -1;
11010 }
11011 if (is_eval_function()) {
11012 const auto& fdata = Array::Handle(Array::RawCast(data()));
11013 return Smi::Value(static_cast<SmiPtr>(fdata.At(
11014 static_cast<intptr_t>(EvalFunctionData::kKernelLibraryIndex))));
11015 }
11016 if (IsClosureFunction()) {
11017 const auto& parent = Function::Handle(parent_function());
11018 ASSERT(!parent.IsNull());
11019 return parent.KernelLibraryIndex();
11020 }
11021
11022 const auto& obj = Object::Handle(untag()->owner());
11023 if (obj.IsClass()) {
11024 const auto& lib = Library::Handle(Class::Cast(obj).library());
11025 return lib.kernel_library_index();
11026 }
11027 ASSERT(obj.IsPatchClass());
11028 return PatchClass::Cast(obj).kernel_library_index();
11029}
11030#endif
11031
11033 return HasCode() && Code::Handle(CurrentCode()).is_optimized();
11034}
11035
11036const char* Function::NameCString(NameVisibility name_visibility) const {
11037 switch (name_visibility) {
11038 case kInternalName:
11039 return String::Handle(name()).ToCString();
11040 case kScrubbedName:
11041 case kUserVisibleName:
11042 return UserVisibleNameCString();
11043 }
11044 UNREACHABLE();
11045 return nullptr;
11046}
11047
11049 if (FLAG_show_internal_names) {
11050 return String::Handle(name()).ToCString();
11051 }
11052 is_extension_type_member();
11054 is_extension_member() || is_extension_type_member());
11055}
11056
11057StringPtr Function::UserVisibleName() const {
11058 if (FLAG_show_internal_names) {
11059 return name();
11060 }
11061 return Symbols::New(
11064 is_extension_member() || is_extension_type_member()));
11065}
11066
11068 Thread* thread = Thread::Current();
11069 ZoneTextBuffer printer(thread->zone());
11070 PrintName(NameFormattingParams(kScrubbedName), &printer);
11071 return Symbols::New(thread, printer.buffer());
11072}
11073
11075 Thread* thread = Thread::Current();
11076 ZoneTextBuffer printer(thread->zone());
11077 PrintName(NameFormattingParams(kScrubbedName), &printer);
11078 return printer.buffer();
11079}
11080
11082 Thread* thread = Thread::Current();
11083 ZoneTextBuffer printer(thread->zone());
11084 PrintName(NameFormattingParams(kUserVisibleName), &printer);
11085 return Symbols::New(thread, printer.buffer());
11086}
11087
11089 Thread* thread = Thread::Current();
11090 ZoneTextBuffer printer(thread->zone());
11091 PrintName(NameFormattingParams(kUserVisibleName), &printer);
11092 return printer.buffer();
11093}
11094
11095static void FunctionPrintNameHelper(const Function& fun,
11097 BaseTextBuffer* printer) {
11098 if (fun.IsNonImplicitClosureFunction()) {
11099 if (params.include_parent_name) {
11100 const auto& parent = Function::Handle(fun.parent_function());
11101 if (parent.IsNull()) {
11102 printer->AddString(Symbols::OptimizedOut().ToCString());
11103 } else {
11104 parent.PrintName(params, printer);
11105 }
11106 // A function's scrubbed name and its user visible name are identical.
11107 printer->AddString(".");
11108 }
11109 if (params.disambiguate_names &&
11110 fun.name() == Symbols::AnonymousClosure().ptr()) {
11111 if (fun.token_pos().IsReal()) {
11112 printer->Printf("<anonymous closure @%" Pd ">", fun.token_pos().Pos());
11113 } else {
11114 printer->Printf("<anonymous closure @no position>");
11115 }
11116 } else {
11117 printer->AddString(fun.NameCString(params.name_visibility));
11118 if (params.disambiguate_names) {
11119 if (fun.token_pos().IsReal()) {
11120 printer->Printf("@<%" Pd ">", fun.token_pos().Pos());
11121 } else {
11122 printer->Printf("@<no position>");
11123 }
11124 }
11125 }
11126 return;
11127 }
11128 if (params.disambiguate_names) {
11129 if (fun.IsInvokeFieldDispatcher()) {
11130 printer->AddString("[invoke-field] ");
11131 }
11132 if (fun.IsNoSuchMethodDispatcher()) {
11133 printer->AddString("[no-such-method] ");
11134 }
11135 if (fun.IsImplicitClosureFunction()) {
11136 printer->AddString("[tear-off] ");
11137 }
11138 if (fun.IsMethodExtractor()) {
11139 printer->AddString("[tear-off-extractor] ");
11140 }
11141 }
11142
11143 if (fun.kind() == UntaggedFunction::kConstructor) {
11144 printer->AddString("new ");
11145 } else if (params.include_class_name) {
11146 const Class& cls = Class::Handle(fun.Owner());
11147 if (!cls.IsTopLevel()) {
11148 const Class& mixin = Class::Handle(cls.Mixin());
11149 printer->AddString(params.name_visibility == Object::kUserVisibleName
11150 ? mixin.UserVisibleNameCString()
11151 : cls.NameCString(params.name_visibility));
11152 printer->AddString(".");
11153 }
11154 }
11155
11156 printer->AddString(fun.NameCString(params.name_visibility));
11157
11158 // Dispatchers that are created with an arguments descriptor need both the
11159 // name and the saved arguments descriptor to disambiguate.
11160 if (params.disambiguate_names && fun.HasSavedArgumentsDescriptor()) {
11161 const auto& args_desc_array = Array::Handle(fun.saved_args_desc());
11162 const ArgumentsDescriptor args_desc(args_desc_array);
11163 args_desc.PrintTo(printer);
11164 }
11165}
11166
11168 BaseTextBuffer* printer) const {
11169 if (!IsLocalFunction()) {
11170 FunctionPrintNameHelper(*this, params, printer);
11171 return;
11172 }
11173 auto& fun = Function::Handle(ptr());
11174 FunctionPrintNameHelper(fun, params, printer);
11175}
11176
11177StringPtr Function::GetSource() const {
11178 if (IsImplicitConstructor() || is_synthetic()) {
11179 // We may need to handle more cases when the restrictions on mixins are
11180 // relaxed. In particular we might start associating some source with the
11181 // forwarding constructors when it becomes possible to specify a particular
11182 // constructor from the mixin to use.
11183 return String::null();
11184 }
11185 Zone* zone = Thread::Current()->zone();
11186 const Script& func_script = Script::Handle(zone, script());
11187
11188 intptr_t from_line, from_col;
11189 if (!func_script.GetTokenLocation(token_pos(), &from_line, &from_col)) {
11190 return String::null();
11191 }
11192 intptr_t to_line, to_col;
11193 if (!func_script.GetTokenLocation(end_token_pos(), &to_line, &to_col)) {
11194 return String::null();
11195 }
11196 intptr_t to_length = func_script.GetTokenLength(end_token_pos());
11197 if (to_length < 0) {
11198 return String::null();
11199 }
11200
11201 if (to_length == 1) {
11202 // Handle special cases for end tokens of closures (where we exclude the
11203 // last token):
11204 // (1) "foo(() => null, bar);": End token is `,', but we don't print it.
11205 // (2) "foo(() => null);": End token is ')`, but we don't print it.
11206 // (3) "var foo = () => null;": End token is `;', but in this case the
11207 // token semicolon belongs to the assignment so we skip it.
11208 const String& src = String::Handle(func_script.Source());
11209 if (src.IsNull() || src.Length() == 0) {
11210 return Symbols::OptimizedOut().ptr();
11211 }
11212 uint16_t end_char = src.CharAt(end_token_pos().Pos());
11213 if ((end_char == ',') || // Case 1.
11214 (end_char == ')') || // Case 2.
11215 (end_char == ';' && String::Handle(zone, name())
11216 .Equals("<anonymous closure>"))) { // Case 3.
11217 to_length = 0;
11218 }
11219 }
11220
11221 return func_script.GetSnippet(from_line, from_col, to_line,
11222 to_col + to_length);
11223}
11224
11225// Construct fingerprint from token stream. The token stream contains also
11226// arguments.
11228#if !defined(DART_PRECOMPILED_RUNTIME)
11230 *this);
11231#else
11232 return 0;
11233#endif // !defined(DART_PRECOMPILED_RUNTIME)
11234}
11235
11237 const ZoneGrowableArray<const ICData*>& deopt_id_to_ic_data,
11238 const Array& edge_counters_array,
11239 const Array& coverage_array) const {
11240#if !defined(DART_PRECOMPILED_RUNTIME)
11241 // Already installed nothing to do.
11242 if (ic_data_array() != Array::null()) {
11243 ASSERT(coverage_array.ptr() == GetCoverageArray());
11244 return;
11245 }
11246
11247 // Compute number of ICData objects to save.
11248 intptr_t count = 0;
11249 for (intptr_t i = 0; i < deopt_id_to_ic_data.length(); i++) {
11250 if (deopt_id_to_ic_data[i] != nullptr) {
11251 count++;
11252 }
11253 }
11254
11255 // Compress sparse deopt_id_to_ic_data mapping into a linear sequence of
11256 // ICData objects.
11257 const Array& array = Array::Handle(
11258 Array::New(ICDataArrayIndices::kFirstICData + count, Heap::kOld));
11259 for (intptr_t i = 0, pos = ICDataArrayIndices::kFirstICData;
11260 i < deopt_id_to_ic_data.length(); i++) {
11261 if (deopt_id_to_ic_data[i] != nullptr) {
11262 ASSERT(i == deopt_id_to_ic_data[i]->deopt_id());
11263 array.SetAt(pos++, *deopt_id_to_ic_data[i]);
11264 }
11265 }
11266 array.SetAt(ICDataArrayIndices::kEdgeCounters, edge_counters_array);
11267 // Preserve coverage_array which is stored early after graph construction.
11268 array.SetAt(ICDataArrayIndices::kCoverageData, coverage_array);
11269 set_ic_data_array(array);
11270#else // DART_PRECOMPILED_RUNTIME
11271 UNREACHABLE();
11272#endif // DART_PRECOMPILED_RUNTIME
11273}
11274
11276 ZoneGrowableArray<const ICData*>* deopt_id_to_ic_data,
11277 bool clone_ic_data) const {
11278#if !defined(DART_PRECOMPILED_RUNTIME)
11279 if (FLAG_force_clone_compiler_objects) {
11280 clone_ic_data = true;
11281 }
11282 ASSERT(deopt_id_to_ic_data->is_empty());
11283 Zone* zone = Thread::Current()->zone();
11284 const Array& saved_ic_data = Array::Handle(zone, ic_data_array());
11285 if (saved_ic_data.IsNull()) {
11286 // Could happen with not-yet compiled unoptimized code or force-optimized
11287 // functions.
11288 return;
11289 }
11290 const intptr_t saved_length = saved_ic_data.Length();
11291 ASSERT(saved_length > 0);
11292 if (saved_length > ICDataArrayIndices::kFirstICData) {
11293 const intptr_t restored_length =
11294 ICData::Cast(Object::Handle(zone, saved_ic_data.At(saved_length - 1)))
11295 .deopt_id() +
11296 1;
11297 deopt_id_to_ic_data->SetLength(restored_length);
11298 for (intptr_t i = 0; i < restored_length; i++) {
11299 (*deopt_id_to_ic_data)[i] = nullptr;
11300 }
11301 for (intptr_t i = ICDataArrayIndices::kFirstICData; i < saved_length; i++) {
11302 ICData& ic_data = ICData::ZoneHandle(zone);
11303 ic_data ^= saved_ic_data.At(i);
11304 if (clone_ic_data) {
11305 const ICData& original_ic_data = ICData::Handle(zone, ic_data.ptr());
11306 ic_data = ICData::Clone(ic_data);
11307 ic_data.SetOriginal(original_ic_data);
11308 }
11309 ASSERT(deopt_id_to_ic_data->At(ic_data.deopt_id()) == nullptr);
11310 (*deopt_id_to_ic_data)[ic_data.deopt_id()] = &ic_data;
11311 }
11312 }
11313#else // DART_PRECOMPILED_RUNTIME
11314 UNREACHABLE();
11315#endif // DART_PRECOMPILED_RUNTIME
11316}
11317
11319 const Array& arr = Array::Handle(ic_data_array());
11320 if (arr.IsNull()) {
11321 return Array::null();
11322 }
11323 return Array::RawCast(arr.At(ICDataArrayIndices::kCoverageData));
11324}
11325
11326void Function::set_ic_data_array(const Array& value) const {
11327 untag()->set_ic_data_array<std::memory_order_release>(value.ptr());
11328}
11329
11330ArrayPtr Function::ic_data_array() const {
11331 return untag()->ic_data_array<std::memory_order_acquire>();
11332}
11333
11335 set_ic_data_array(Array::null_array());
11336}
11337
11338ICDataPtr Function::FindICData(intptr_t deopt_id) const {
11339 const Array& array = Array::Handle(ic_data_array());
11340 ICData& ic_data = ICData::Handle();
11341 for (intptr_t i = ICDataArrayIndices::kFirstICData; i < array.Length(); i++) {
11342 ic_data ^= array.At(i);
11343 if (ic_data.deopt_id() == deopt_id) {
11344 return ic_data.ptr();
11345 }
11346 }
11347 return ICData::null();
11348}
11349
11350void Function::SetDeoptReasonForAll(intptr_t deopt_id,
11351 ICData::DeoptReasonId reason) {
11352 const Array& array = Array::Handle(ic_data_array());
11353 ICData& ic_data = ICData::Handle();
11354 for (intptr_t i = ICDataArrayIndices::kFirstICData; i < array.Length(); i++) {
11355 ic_data ^= array.At(i);
11356 if (ic_data.deopt_id() == deopt_id) {
11357 ic_data.AddDeoptReason(reason);
11358 }
11359 }
11360}
11361
11362bool Function::CheckSourceFingerprint(int32_t fp, const char* kind) const {
11363#if !defined(DEBUG)
11364 return true; // Only check on debug.
11365#endif
11366
11367#if !defined(DART_PRECOMPILED_RUNTIME)
11368 // Check that the function is marked as recognized via the vm:recognized
11369 // pragma. This is so that optimizations that change the signature will know
11370 // not to touch it.
11371 if (kind != nullptr && !MethodRecognizer::IsMarkedAsRecognized(*this, kind)) {
11373 "Recognized method %s should be marked with: "
11374 "@pragma(\"vm:recognized\", \"%s\")\n",
11375 ToQualifiedCString(), kind);
11376 return false;
11377 }
11378#endif
11379
11380 if (IsolateGroup::Current()->obfuscate() || FLAG_precompiled_mode ||
11382 return true; // The kernel structure has been altered, skip checking.
11383 }
11384
11385 if (SourceFingerprint() != fp) {
11386 // This output can be copied into a file, then used with sed
11387 // to replace the old values.
11388 // sed -i.bak -f /tmp/newkeys \
11389 // runtime/vm/compiler/recognized_methods_list.h
11390 THR_Print("s/0x%08x/0x%08x/\n", fp, SourceFingerprint());
11391 return false;
11392 }
11393 return true;
11394}
11395
11397 if (HasCode()) return CurrentCode();
11398 Thread* thread = Thread::Current();
11399 ASSERT(thread->IsDartMutatorThread());
11400 DEBUG_ASSERT(thread->TopErrorHandlerIsExitFrame());
11401 Zone* zone = thread->zone();
11402 const Object& result =
11403 Object::Handle(zone, Compiler::CompileFunction(thread, *this));
11404 if (result.IsError()) {
11405 if (result.ptr() == Object::out_of_memory_error().ptr()) {
11407 UNREACHABLE();
11408 }
11409 if (result.IsLanguageError()) {
11410 Exceptions::ThrowCompileTimeError(LanguageError::Cast(result));
11411 UNREACHABLE();
11412 }
11413 Exceptions::PropagateError(Error::Cast(result));
11414 UNREACHABLE();
11415 }
11416 // Compiling in unoptimized mode should never fail if there are no errors.
11417 RELEASE_ASSERT(HasCode());
11418 ASSERT(ForceOptimize() || unoptimized_code() == result.ptr());
11419 return CurrentCode();
11420}
11421
11423#if !defined(DART_PRECOMPILED_RUNTIME)
11424 if (!IsDynamicFunction()) {
11425 return false;
11426 }
11427
11428 // For functions which need an args descriptor the switchable call sites will
11429 // transition directly to calling via a stub (and therefore never call the
11430 // monomorphic entry).
11431 //
11432 // See runtime_entry.cc:DEFINE_RUNTIME_ENTRY(UnlinkedCall)
11433 if (PrologueNeedsArgumentsDescriptor()) {
11434 return false;
11435 }
11436
11437 // All dyn:* forwarders are called via SwitchableCalls and all except the ones
11438 // with `PrologueNeedsArgumentsDescriptor()` transition into monomorphic
11439 // state.
11441 return true;
11442 }
11443
11444 // AOT mode uses table dispatch.
11445 // In JIT mode all instance calls use switchable calls.
11446 if (!FLAG_precompiled_mode) {
11447 return true;
11448 }
11449
11450 // Only if there are dynamic callers and if we didn't create a dyn:* forwarder
11451 // for it do we need the monomorphic checked entry.
11452 return HasDynamicCallers(zone) &&
11453 !kernel::NeedsDynamicInvocationForwarder(*this);
11454#else
11455 UNREACHABLE();
11456 return true;
11457#endif
11458}
11459
11461#if !defined(DART_PRECOMPILED_RUNTIME)
11462 // Issue(dartbug.com/42719):
11463 // Right now the metadata of _Closure.call says there are no dynamic callers -
11464 // even though there can be. To be conservative we return true.
11465 if ((name() == Symbols::GetCall().ptr() || name() == Symbols::call().ptr()) &&
11466 Class::IsClosureClass(Owner())) {
11467 return true;
11468 }
11469
11470 // Use the results of TFA to determine whether this function is ever
11471 // called dynamically, i.e. using switchable calls.
11473 metadata = kernel::ProcedureAttributesOf(*this, zone);
11474 if (IsGetterFunction() || IsImplicitGetterFunction() || IsMethodExtractor()) {
11475 // Dynamic method call through field/getter involves dynamic call of
11476 // the field/getter.
11477 return metadata.getter_called_dynamically ||
11479 } else {
11481 }
11482#else
11483 UNREACHABLE();
11484 return true;
11485#endif
11486}
11487
11489 // These functions have a saved compile-time arguments descriptor that is
11490 // used in lieu of the runtime arguments descriptor in generated IL.
11491 if (HasSavedArgumentsDescriptor()) {
11492 return false;
11493 }
11494 // The prologue of those functions need to examine the arg descriptor for
11495 // various purposes.
11496 return IsGeneric() || HasOptionalParameters();
11497}
11498
11500 return FLAG_enable_multiple_entrypoints &&
11501 (NeedsTypeArgumentTypeChecks() || NeedsArgumentTypeChecks());
11502}
11503
11504intptr_t Function::SourceSize() const {
11505 const TokenPosition& start = token_pos();
11506 const TokenPosition& end = end_token_pos();
11507 if (!end.IsReal() || start.IsNoSource() || start.IsClassifying()) {
11508 // No source information, so just return 0.
11509 return 0;
11510 }
11511 if (start.IsSynthetic()) {
11512 // Try and approximate the source size using the parent's source size.
11513 const auto& parent = Function::Handle(parent_function());
11514 ASSERT(!parent.IsNull());
11515 const intptr_t parent_size = parent.SourceSize();
11516 if (parent_size == 0) {
11517 return parent_size;
11518 }
11519 // Parent must have a real ending position.
11520 return parent_size - (parent.end_token_pos().Pos() - end.Pos());
11521 }
11522 return end.Pos() - start.Pos();
11523}
11524
11525const char* Function::ToCString() const {
11526 if (IsNull()) {
11527 return "Function: null";
11528 }
11529 Zone* zone = Thread::Current()->zone();
11530 ZoneTextBuffer buffer(zone);
11531 buffer.Printf("Function '%s':", String::Handle(zone, name()).ToCString());
11532 if (is_static()) {
11533 buffer.AddString(" static");
11534 }
11535 if (is_abstract()) {
11536 buffer.AddString(" abstract");
11537 }
11538 switch (kind()) {
11539 case UntaggedFunction::kRegularFunction:
11540 case UntaggedFunction::kClosureFunction:
11541 case UntaggedFunction::kImplicitClosureFunction:
11542 case UntaggedFunction::kGetterFunction:
11543 case UntaggedFunction::kSetterFunction:
11544 break;
11545 case UntaggedFunction::kConstructor:
11546 buffer.AddString(is_static() ? " factory" : " constructor");
11547 break;
11548 case UntaggedFunction::kImplicitGetter:
11549 buffer.AddString(" getter");
11550 break;
11551 case UntaggedFunction::kImplicitSetter:
11552 buffer.AddString(" setter");
11553 break;
11554 case UntaggedFunction::kImplicitStaticGetter:
11555 buffer.AddString(" static-getter");
11556 break;
11557 case UntaggedFunction::kFieldInitializer:
11558 buffer.AddString(" field-initializer");
11559 break;
11560 case UntaggedFunction::kMethodExtractor:
11561 buffer.AddString(" method-extractor");
11562 break;
11563 case UntaggedFunction::kNoSuchMethodDispatcher:
11564 buffer.AddString(" no-such-method-dispatcher");
11565 break;
11566 case UntaggedFunction::kDynamicInvocationForwarder:
11567 buffer.AddString(" dynamic-invocation-forwarder");
11568 break;
11569 case UntaggedFunction::kInvokeFieldDispatcher:
11570 buffer.AddString(" invoke-field-dispatcher");
11571 break;
11572 case UntaggedFunction::kIrregexpFunction:
11573 buffer.AddString(" irregexp-function");
11574 break;
11575 case UntaggedFunction::kFfiTrampoline:
11576 buffer.AddString(" ffi-trampoline-function");
11577 break;
11578 case UntaggedFunction::kRecordFieldGetter:
11579 buffer.AddString(" record-field-getter");
11580 break;
11581 default:
11582 UNREACHABLE();
11583 }
11584 if (HasSavedArgumentsDescriptor()) {
11585 const auto& args_desc_array = Array::Handle(zone, saved_args_desc());
11586 const ArgumentsDescriptor args_desc(args_desc_array);
11587 buffer.AddChar('[');
11588 args_desc.PrintTo(&buffer);
11589 buffer.AddChar(']');
11590 }
11591 if (is_const()) {
11592 buffer.AddString(" const");
11593 }
11594 buffer.AddChar('.');
11595 return buffer.buffer();
11596}
11597
11599 uint32_t packed_parameter_counts) const {
11600 untag()->packed_parameter_counts_ = packed_parameter_counts;
11601}
11602
11604 uint16_t packed_type_parameter_counts) const {
11605 untag()->packed_type_parameter_counts_ = packed_type_parameter_counts;
11606}
11607
11609 ASSERT(value >= 0);
11610 untag()->packed_parameter_counts_.Update<PackedNumImplicitParameters>(value);
11611}
11612
11613void ClosureData::set_default_type_arguments_instantiation_mode(
11614 InstantiationMode value) const {
11615 untag()->packed_fields_.Update<PackedInstantiationMode>(value);
11616}
11617
11618Function::AwaiterLink ClosureData::awaiter_link() const {
11619 const uint8_t depth =
11620 untag()
11621 ->packed_fields_.Read<UntaggedClosureData::PackedAwaiterLinkDepth>();
11622 const uint8_t index =
11623 untag()
11624 ->packed_fields_.Read<UntaggedClosureData::PackedAwaiterLinkIndex>();
11625 return {depth, index};
11626}
11627
11628void ClosureData::set_awaiter_link(Function::AwaiterLink link) const {
11629 untag()->packed_fields_.Update<UntaggedClosureData::PackedAwaiterLinkDepth>(
11630 link.depth);
11631 untag()->packed_fields_.Update<UntaggedClosureData::PackedAwaiterLinkIndex>(
11632 link.index);
11633}
11634
11635ClosureDataPtr ClosureData::New() {
11637 return Object::Allocate<ClosureData>(Heap::kOld);
11638}
11639
11640const char* ClosureData::ToCString() const {
11641 if (IsNull()) {
11642 return "ClosureData: null";
11643 }
11644 auto const zone = Thread::Current()->zone();
11645 ZoneTextBuffer buffer(zone);
11646 buffer.Printf("ClosureData: context_scope: 0x%" Px "",
11647 static_cast<uword>(context_scope()));
11648 buffer.AddString(" parent_function: ");
11649 if (parent_function() == Object::null()) {
11650 buffer.AddString("null");
11651 } else {
11652 buffer.AddString(Object::Handle(parent_function()).ToCString());
11653 }
11654 buffer.Printf(" implicit_static_closure: 0x%" Px "",
11655 static_cast<uword>(implicit_static_closure()));
11656 return buffer.buffer();
11657}
11658
11659void FunctionType::set_num_fixed_parameters(intptr_t value) const {
11660 ASSERT(value >= 0);
11661 untag()->packed_parameter_counts_.Update<PackedNumFixedParameters>(value);
11662}
11663
11664void FfiTrampolineData::set_callback_target(const Function& value) const {
11665 untag()->set_callback_target(value.ptr());
11666}
11667
11669 intptr_t value,
11670 bool are_optional_positional) const {
11671 // HasOptionalNamedParameters only checks this bit, so only set it if there
11672 // are actual named parameters.
11673 untag()->packed_parameter_counts_.Update<PackedHasNamedOptionalParameters>(
11674 (value > 0) && !are_optional_positional);
11675 untag()->packed_parameter_counts_.Update<PackedNumOptionalParameters>(value);
11676}
11677
11678FunctionTypePtr FunctionType::New(Heap::Space space) {
11679 return Object::Allocate<FunctionType>(space);
11680}
11681
11682FunctionTypePtr FunctionType::New(intptr_t num_parent_type_arguments,
11684 Heap::Space space) {
11685 Zone* Z = Thread::Current()->zone();
11686 const FunctionType& result =
11688 result.set_packed_parameter_counts(0);
11689 result.set_packed_type_parameter_counts(0);
11690 result.set_named_parameter_names(Object::empty_array());
11691 result.SetNumParentTypeArguments(num_parent_type_arguments);
11692 result.SetHash(0);
11693 result.set_flags(0);
11694 result.set_nullability(nullability);
11696 result.InitializeTypeTestingStubNonAtomic(
11698 return result.ptr();
11699}
11700
11701FunctionTypePtr FunctionType::Clone(const FunctionType& orig,
11702 Heap::Space space) {
11703 if (orig.IsGeneric()) {
11704 // Need a deep clone in order to update owners of type parameters.
11705 return FunctionType::RawCast(
11706 orig.UpdateFunctionTypes(0, kAllFree, space, nullptr));
11707 } else {
11708 return FunctionType::RawCast(Object::Clone(orig, space));
11709 }
11710}
11711
11713 Zone* zone = Thread::Current()->zone();
11714 ZoneTextBuffer printer(zone);
11715 Print(kUserVisibleName, &printer);
11716 return printer.buffer();
11717}
11718
11720 Thread* thread = Thread::Current();
11721 ZoneTextBuffer printer(thread->zone());
11722 Print(kUserVisibleName, &printer);
11723 return Symbols::New(thread, printer.buffer());
11724}
11725
11726const char* FunctionType::ToCString() const {
11727 if (IsNull()) {
11728 return "FunctionType: null";
11729 }
11730 Zone* zone = Thread::Current()->zone();
11731 ZoneTextBuffer printer(zone);
11732 const char* suffix = NullabilitySuffix(kInternalName);
11733 if (suffix[0] != '\0') {
11734 printer.AddString("(");
11735 }
11736 Print(kInternalName, &printer);
11737 if (suffix[0] != '\0') {
11738 printer.AddString(")");
11739 printer.AddString(suffix);
11740 }
11741 return printer.buffer();
11742}
11743
11744void ClosureData::set_context_scope(const ContextScope& value) const {
11745 untag()->set_context_scope(value.ptr());
11746}
11747
11748void ClosureData::set_implicit_static_closure(const Closure& closure) const {
11749 ASSERT(!closure.IsNull());
11750 ASSERT(untag()->closure() == Closure::null());
11751 untag()->set_closure<std::memory_order_release>(closure.ptr());
11752}
11753
11754void FfiTrampolineData::set_c_signature(const FunctionType& value) const {
11755 untag()->set_c_signature(value.ptr());
11756}
11757
11758void FfiTrampolineData::set_callback_id(int32_t callback_id) const {
11759 StoreNonPointer(&untag()->callback_id_, callback_id);
11760}
11761
11762void FfiTrampolineData::set_callback_exceptional_return(
11763 const Instance& value) const {
11764 untag()->set_callback_exceptional_return(value.ptr());
11765}
11766
11767void FfiTrampolineData::set_ffi_function_kind(FfiCallbackKind kind) const {
11768 StoreNonPointer(&untag()->ffi_function_kind_, static_cast<uint8_t>(kind));
11769}
11770
11771FfiTrampolineDataPtr FfiTrampolineData::New() {
11773 const auto& data = FfiTrampolineData::Handle(
11774 Object::Allocate<FfiTrampolineData>(Heap::kOld));
11775 data.set_callback_id(-1);
11776 return data.ptr();
11777}
11778
11779const char* FfiTrampolineData::ToCString() const {
11780 const FunctionType& c_sig = FunctionType::Handle(c_signature());
11781 return OS::SCreate(Thread::Current()->zone(),
11782 "TrampolineData: c_signature=%s",
11783 c_sig.ToUserVisibleCString());
11784}
11785
11787 return this->Clone(*this);
11788}
11789
11790FieldPtr Field::Original() const {
11791 if (IsNull()) {
11792 return Field::null();
11793 }
11794 if (untag()->owner()->IsField()) {
11795 return static_cast<FieldPtr>(untag()->owner());
11796 }
11797 return this->ptr();
11798}
11799
11800intptr_t Field::guarded_cid() const {
11801#if defined(DEBUG)
11802 // This assertion ensures that the cid seen by the background compiler is
11803 // consistent. So the assertion passes if the field is a clone. It also
11804 // passes if the field is static, because we don't use field guards on
11805 // static fields. It also passes if we're compiling unoptimized
11806 // code (in which case the caller might get different answers if it obtains
11807 // the guarded cid multiple times).
11808 Thread* thread = Thread::Current();
11809#if defined(DART_PRECOMPILED_RUNTIME)
11810 ASSERT(!thread->IsInsideCompiler() || is_static());
11811#else
11812 ASSERT(!thread->IsInsideCompiler() ||
11813 ((CompilerState::Current().should_clone_fields() == !IsOriginal())) ||
11814 is_static());
11815#endif
11816#endif
11817 return LoadNonPointer<ClassIdTagType, std::memory_order_relaxed>(
11818 &untag()->guarded_cid_);
11819}
11820
11822#if defined(DEBUG)
11823 // Same assert as guarded_cid(), because is_nullable() also needs to be
11824 // consistent for the background compiler.
11825 Thread* thread = Thread::Current();
11826#if defined(DART_PRECOMPILED_RUNTIME)
11827 ASSERT(!thread->IsInsideCompiler() || is_static());
11828#else
11829 ASSERT(!thread->IsInsideCompiler() ||
11830 ((CompilerState::Current().should_clone_fields() == !IsOriginal())) ||
11831 is_static());
11832#endif
11833#endif
11834 return is_nullable_unsafe();
11835}
11836
11837void Field::SetOriginal(const Field& value) const {
11838 ASSERT(value.IsOriginal());
11839 ASSERT(!value.IsNull());
11840 untag()->set_owner(static_cast<ObjectPtr>(value.ptr()));
11841}
11842
11843StringPtr Field::GetterName(const String& field_name) {
11844 return String::Concat(Symbols::GetterPrefix(), field_name);
11845}
11846
11847StringPtr Field::GetterSymbol(const String& field_name) {
11848 return Symbols::FromGet(Thread::Current(), field_name);
11849}
11850
11851StringPtr Field::LookupGetterSymbol(const String& field_name) {
11852 return Symbols::LookupFromGet(Thread::Current(), field_name);
11853}
11854
11855StringPtr Field::SetterName(const String& field_name) {
11856 return String::Concat(Symbols::SetterPrefix(), field_name);
11857}
11858
11859StringPtr Field::SetterSymbol(const String& field_name) {
11860 return Symbols::FromSet(Thread::Current(), field_name);
11861}
11862
11863StringPtr Field::LookupSetterSymbol(const String& field_name) {
11864 return Symbols::LookupFromSet(Thread::Current(), field_name);
11865}
11866
11867StringPtr Field::NameFromGetter(const String& getter_name) {
11868 return Symbols::New(Thread::Current(), getter_name, kGetterPrefixLength,
11869 getter_name.Length() - kGetterPrefixLength);
11870}
11871
11872StringPtr Field::NameFromSetter(const String& setter_name) {
11873 return Symbols::New(Thread::Current(), setter_name, kSetterPrefixLength,
11874 setter_name.Length() - kSetterPrefixLength);
11875}
11876
11877StringPtr Field::NameFromInit(const String& init_name) {
11878 return Symbols::New(Thread::Current(), init_name, kInitPrefixLength,
11879 init_name.Length() - kInitPrefixLength);
11880}
11881
11883 return function_name.StartsWith(Symbols::GetterPrefix());
11884}
11885
11887 return function_name.StartsWith(Symbols::SetterPrefix());
11888}
11889
11891 return function_name.StartsWith(Symbols::InitPrefix());
11892}
11893
11894void Field::set_name(const String& value) const {
11895 ASSERT(value.IsSymbol());
11896 ASSERT(IsOriginal());
11897 untag()->set_name(value.ptr());
11898}
11899
11901 if (IsOriginal()) {
11902 return untag()->owner();
11903 } else {
11904 const Field& field = Field::Handle(Original());
11905 ASSERT(field.IsOriginal());
11906 ASSERT(!Object::Handle(field.untag()->owner()).IsField());
11907 return field.untag()->owner();
11908 }
11909}
11910
11911ClassPtr Field::Owner() const {
11912 const Field& field = Field::Handle(Original());
11913 ASSERT(field.IsOriginal());
11914 const Object& obj = Object::Handle(field.untag()->owner());
11915 if (obj.IsClass()) {
11916 return Class::Cast(obj).ptr();
11917 }
11918 ASSERT(obj.IsPatchClass());
11919 return PatchClass::Cast(obj).wrapped_class();
11920}
11921
11922ScriptPtr Field::Script() const {
11923 // NOTE(turnidge): If you update this function, you probably want to
11924 // update Class::PatchFieldsAndFunctions() at the same time.
11925 const Field& field = Field::Handle(Original());
11926 ASSERT(field.IsOriginal());
11927 const Object& obj = Object::Handle(field.untag()->owner());
11928 if (obj.IsClass()) {
11929 return Class::Cast(obj).script();
11930 }
11931 ASSERT(obj.IsPatchClass());
11932 return PatchClass::Cast(obj).script();
11933}
11934
11935#if !defined(DART_PRECOMPILED_RUNTIME)
11936KernelProgramInfoPtr Field::KernelProgramInfo() const {
11937 const auto& owner = Object::Handle(RawOwner());
11938 if (owner.IsClass()) {
11939 return Class::Cast(owner).KernelProgramInfo();
11940 }
11941 return PatchClass::Cast(owner).kernel_program_info();
11942}
11943#endif
11944
11945uint32_t Field::Hash() const {
11946 return String::HashRawSymbol(name());
11947}
11948
11950#if defined(DART_PRECOMPILED_RUNTIME)
11951 UNREACHABLE();
11952#else
11953 StoreNonPointer(&untag()->kernel_offset_, src.untag()->kernel_offset_);
11954#endif
11955}
11956
11957#if !defined(DART_PRECOMPILED_RUNTIME)
11958TypedDataViewPtr Field::KernelLibrary() const {
11960 return info.KernelLibrary(KernelLibraryIndex());
11961}
11962
11964 const intptr_t kernel_library_index = KernelLibraryIndex();
11965 if (kernel_library_index == -1) return 0;
11967 return info.KernelLibraryStartOffset(kernel_library_index);
11968}
11969
11971 const Object& obj = Object::Handle(untag()->owner());
11972 // During background JIT compilation field objects are copied
11973 // and copy points to the original field via the owner field.
11974 if (obj.IsField()) {
11975 return Field::Cast(obj).KernelLibraryIndex();
11976 } else if (obj.IsClass()) {
11977 const auto& lib = Library::Handle(Class::Cast(obj).library());
11978 return lib.kernel_library_index();
11979 }
11980 ASSERT(obj.IsPatchClass());
11981 return PatchClass::Cast(obj).kernel_library_index();
11982}
11983#endif // !defined(DART_PRECOMPILED_RUNTIME)
11984
11985void Field::SetFieldTypeSafe(const AbstractType& value) const {
11986 ASSERT(IsOriginal());
11987 ASSERT(!value.IsNull());
11988 if (value.ptr() != type()) {
11989 untag()->set_type(value.ptr());
11990 }
11991}
11992
11993// Called at finalization time
11994void Field::SetFieldType(const AbstractType& value) const {
11996 IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
11997 SetFieldTypeSafe(value);
11998}
11999
12000FieldPtr Field::New() {
12002 return Object::Allocate<Field>(Heap::kOld);
12003}
12004
12005void Field::InitializeNew(const Field& result,
12006 const String& name,
12007 bool is_static,
12008 bool is_final,
12009 bool is_const,
12010 bool is_reflectable,
12011 bool is_late,
12012 const Object& owner,
12013 TokenPosition token_pos,
12014 TokenPosition end_token_pos) {
12015 result.set_kind_bits(0);
12016 result.set_name(name);
12017 result.set_is_static(is_static);
12018 if (is_static) {
12019 result.set_field_id_unsafe(-1);
12020 } else {
12021 result.SetOffset(0, 0);
12022 }
12023 result.set_is_final(is_final);
12024 result.set_is_const(is_const);
12025 result.set_is_reflectable(is_reflectable);
12026 result.set_is_late(is_late);
12027 result.set_owner(owner);
12028 result.set_token_pos(token_pos);
12029 result.set_end_token_pos(end_token_pos);
12030 result.set_has_nontrivial_initializer_unsafe(false);
12031 result.set_has_initializer_unsafe(false);
12032 // We will make unboxing decision once we read static type or
12033 // in KernelLoader::ReadInferredType.
12034 result.set_is_unboxed_unsafe(false);
12035 result.set_initializer_changed_after_initialization(false);
12036 NOT_IN_PRECOMPILED(result.set_kernel_offset(0));
12037 result.set_has_pragma(false);
12038 result.set_static_type_exactness_state_unsafe(
12040 auto isolate_group = IsolateGroup::Current();
12041
12042// Use field guards if they are enabled and the isolate has never reloaded.
12043// TODO(johnmccutchan): The reload case assumes the worst case (everything is
12044// dynamic and possibly null). Attempt to relax this later.
12045//
12046// Do not use field guards for late fields as late field initialization
12047// doesn't update guarded cid and length.
12048#if defined(PRODUCT)
12049 const bool use_guarded_cid =
12050 FLAG_precompiled_mode || (isolate_group->use_field_guards() && !is_late);
12051#else
12052 const bool use_guarded_cid =
12053 FLAG_precompiled_mode ||
12054 (isolate_group->use_field_guards() &&
12055 !isolate_group->HasAttemptedReload() && !is_late);
12056#endif // !defined(PRODUCT)
12057 result.set_guarded_cid_unsafe(use_guarded_cid ? kIllegalCid : kDynamicCid);
12058 result.set_is_nullable_unsafe(use_guarded_cid ? false : true);
12059 result.set_guarded_list_length_in_object_offset_unsafe(
12061 // Presently, we only attempt to remember the list length for final fields.
12062 if (is_final && use_guarded_cid) {
12063 result.set_guarded_list_length_unsafe(Field::kUnknownFixedLength);
12064 } else {
12065 result.set_guarded_list_length_unsafe(Field::kNoFixedLength);
12066 }
12067}
12068
12069FieldPtr Field::New(const String& name,
12070 bool is_static,
12071 bool is_final,
12072 bool is_const,
12073 bool is_reflectable,
12074 bool is_late,
12075 const Object& owner,
12076 const AbstractType& type,
12077 TokenPosition token_pos,
12078 TokenPosition end_token_pos) {
12079 ASSERT(!owner.IsNull());
12080 const Field& result = Field::Handle(Field::New());
12081 InitializeNew(result, name, is_static, is_final, is_const, is_reflectable,
12082 is_late, owner, token_pos, end_token_pos);
12083 result.SetFieldTypeSafe(type);
12084#if !defined(DART_PRECOMPILED_RUNTIME)
12086#endif
12087 return result.ptr();
12088}
12089
12091 bool is_final,
12092 bool is_const,
12093 bool is_late,
12094 const Object& owner,
12095 TokenPosition token_pos,
12096 TokenPosition end_token_pos) {
12097 ASSERT(!owner.IsNull());
12098 const Field& result = Field::Handle(Field::New());
12099 InitializeNew(result, name, true, /* is_static */
12100 is_final, is_const, true, /* is_reflectable */
12101 is_late, owner, token_pos, end_token_pos);
12102 return result.ptr();
12103}
12104
12105FieldPtr Field::Clone(const Field& original) const {
12106 if (original.IsNull()) {
12107 return Field::null();
12108 }
12109 ASSERT(original.IsOriginal());
12110 Field& clone = Field::Handle();
12111 // Using relaxed loading is fine because concurrent fields changes are all
12112 // guarded, will be reconciled during optimized code installation.
12113 clone ^= Object::Clone(*this, Heap::kOld, /*load_with_relaxed_atomics=*/true);
12114 clone.SetOriginal(original);
12115 clone.InheritKernelOffsetFrom(original);
12116 return clone.ptr();
12117}
12118
12120#if !defined(DART_PRECOMPILED_RUNTIME)
12122 *this);
12123#else
12124 return 0;
12125#endif // !defined(DART_PRECOMPILED_RUNTIME)
12126}
12127
12129 UNREACHABLE();
12130 return String::null();
12131}
12132
12134 NoSafepointScope no_safepoint;
12135 if (FLAG_show_internal_names) {
12136 return String::Handle(name()).ToCString();
12137 }
12139 is_extension_member() || is_extension_type_member());
12140}
12141
12142StringPtr Field::UserVisibleName() const {
12143 if (FLAG_show_internal_names) {
12144 return name();
12145 }
12146 return Symbols::New(
12149 is_extension_member() || is_extension_type_member()));
12150}
12151
12153 return Smi::Value(untag()->guarded_list_length());
12154}
12155
12156void Field::set_guarded_list_length_unsafe(intptr_t list_length) const {
12157 ASSERT(IsOriginal());
12158 untag()->set_guarded_list_length(Smi::New(list_length));
12159}
12160
12162 return untag()->guarded_list_length_in_object_offset_ + kHeapObjectTag;
12163}
12164
12166 intptr_t list_length_offset) const {
12167 ASSERT(IsOriginal());
12168 StoreNonPointer<int8_t, int8_t, std::memory_order_relaxed>(
12169 &untag()->guarded_list_length_in_object_offset_,
12170 static_cast<int8_t>(list_length_offset - kHeapObjectTag));
12171 ASSERT(guarded_list_length_in_object_offset() == list_length_offset);
12172}
12173
12175 // According to the Dart language specification, final fields don't have
12176 // a setter, except late final fields without initializer.
12177 if (is_final()) {
12178 // Late final fields without initializer always need a setter to check
12179 // if they are already initialized.
12180 if (is_late() && !has_initializer()) {
12181 return true;
12182 }
12183 return false;
12184 }
12185
12186 // Instance non-final fields always need a setter.
12187 if (!is_static()) {
12188 return true;
12189 }
12190
12191 // Otherwise, setters for static fields can be omitted
12192 // and fields can be accessed directly.
12193 return false;
12194}
12195
12197 // All instance fields need a getter.
12198 if (!is_static()) return true;
12199
12200 // Static fields also need a getter if they have a non-trivial initializer,
12201 // because it needs to be initialized lazily.
12202 if (has_nontrivial_initializer()) return true;
12203
12204 // Static late fields with no initializer also need a getter, to check if it's
12205 // been initialized.
12206 return is_late() && !has_initializer();
12207}
12208
12209const char* Field::ToCString() const {
12210 NoSafepointScope no_safepoint;
12211 if (IsNull()) {
12212 return "Field: null";
12213 }
12214 const char* kF0 = is_static() ? " static" : "";
12215 const char* kF1 = is_late() ? " late" : "";
12216 const char* kF2 = is_final() ? " final" : "";
12217 const char* kF3 = is_const() ? " const" : "";
12218 const char* field_name = String::Handle(name()).ToCString();
12219 const Class& cls = Class::Handle(Owner());
12220 const char* cls_name = String::Handle(cls.Name()).ToCString();
12221 return OS::SCreate(Thread::Current()->zone(), "Field <%s.%s>:%s%s%s%s",
12222 cls_name, field_name, kF0, kF1, kF2, kF3);
12223}
12224
12225// Build a closure object that gets (or sets) the contents of a static
12226// field f and cache the closure in a newly created static field
12227// named #f (or #f= in case of a setter).
12228InstancePtr Field::AccessorClosure(bool make_setter) const {
12229 Thread* thread = Thread::Current();
12230 Zone* zone = thread->zone();
12231 ASSERT(is_static());
12232 const Class& field_owner = Class::Handle(zone, Owner());
12233
12234 String& closure_name = String::Handle(zone, this->name());
12235 closure_name = Symbols::FromConcat(thread, Symbols::HashMark(), closure_name);
12236 if (make_setter) {
12237 closure_name =
12238 Symbols::FromConcat(thread, Symbols::HashMark(), closure_name);
12239 }
12240
12241 Field& closure_field = Field::Handle(zone);
12242 closure_field = field_owner.LookupStaticField(closure_name);
12243 if (!closure_field.IsNull()) {
12244 ASSERT(closure_field.is_static());
12245 const Instance& closure =
12246 Instance::Handle(zone, Instance::RawCast(closure_field.StaticValue()));
12247 ASSERT(!closure.IsNull());
12248 ASSERT(closure.IsClosure());
12249 return closure.ptr();
12250 }
12251
12252 UNREACHABLE();
12253 return Instance::null();
12254}
12255
12256InstancePtr Field::GetterClosure() const {
12257 return AccessorClosure(false);
12258}
12259
12260InstancePtr Field::SetterClosure() const {
12261 return AccessorClosure(true);
12262}
12263
12264WeakArrayPtr Field::dependent_code() const {
12266 IsolateGroup::Current()->program_lock()->IsCurrentThreadReader());
12267 return untag()->dependent_code();
12268}
12269
12270void Field::set_dependent_code(const WeakArray& array) const {
12271 ASSERT(IsOriginal());
12273 IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
12274 untag()->set_dependent_code(array.ptr());
12275}
12276
12278 public:
12279 explicit FieldDependentArray(const Field& field)
12280 : WeakCodeReferences(WeakArray::Handle(field.dependent_code())),
12281 field_(field) {}
12282
12283 virtual void UpdateArrayTo(const WeakArray& value) {
12284 field_.set_dependent_code(value);
12285 }
12286
12287 virtual void ReportDeoptimization(const Code& code) {
12288 if (FLAG_trace_deoptimization || FLAG_trace_deoptimization_verbose) {
12289 Function& function = Function::Handle(code.function());
12290 THR_Print("Deoptimizing %s because guard on field %s failed.\n",
12291 function.ToFullyQualifiedCString(), field_.ToCString());
12292 }
12293 }
12294
12295 virtual void ReportSwitchingCode(const Code& code) {
12296 if (FLAG_trace_deoptimization || FLAG_trace_deoptimization_verbose) {
12297 Function& function = Function::Handle(code.function());
12298 THR_Print(
12299 "Switching '%s' to unoptimized code because guard"
12300 " on field '%s' was violated.\n",
12301 function.ToFullyQualifiedCString(), field_.ToCString());
12302 }
12303 }
12304
12305 private:
12306 const Field& field_;
12308};
12309
12310void Field::RegisterDependentCode(const Code& code) const {
12311 ASSERT(IsOriginal());
12312 DEBUG_ASSERT(IsMutatorOrAtDeoptSafepoint());
12313 ASSERT(code.is_optimized());
12314 FieldDependentArray a(*this);
12315 a.Register(code);
12316}
12317
12318void Field::DeoptimizeDependentCode(bool are_mutators_stopped) const {
12320 IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
12321 ASSERT(IsOriginal());
12322 FieldDependentArray a(*this);
12323 if (FLAG_trace_deoptimization && a.HasCodes()) {
12324 THR_Print("Deopt for field guard (field %s)\n", ToCString());
12325 }
12326 a.DisableCode(are_mutators_stopped);
12327}
12328
12329bool Field::IsConsistentWith(const Field& other) const {
12330 return (untag()->guarded_cid_ == other.untag()->guarded_cid_) &&
12331 (untag()->is_nullable_ == other.untag()->is_nullable_) &&
12332 (untag()->guarded_list_length() ==
12333 other.untag()->guarded_list_length()) &&
12334 (is_unboxed() == other.is_unboxed()) &&
12335 (static_type_exactness_state().Encode() ==
12337}
12338
12340 Thread* thread = Thread::Current();
12341 const FieldTable* field_table = thread->isolate()->field_table();
12342 const ObjectPtr raw_value = field_table->At(field_id());
12343 ASSERT(raw_value != Object::transition_sentinel().ptr());
12344 return raw_value == Object::sentinel().ptr();
12345}
12346
12348 ASSERT(has_nontrivial_initializer());
12349 ASSERT(IsOriginal());
12350 Thread* thread = Thread::Current();
12351 Zone* zone = thread->zone();
12352 Function& initializer = Function::Handle(zone, InitializerFunction());
12353 if (initializer.IsNull()) {
12354#if defined(DART_PRECOMPILED_RUNTIME)
12355 UNREACHABLE();
12356#else
12359 // Double check after grabbing the lock.
12360 initializer = InitializerFunction();
12361 if (initializer.IsNull()) {
12363 }
12364#endif
12365 }
12366 return initializer.ptr();
12367}
12368
12370#if defined(DART_PRECOMPILED_RUNTIME)
12371 UNREACHABLE();
12372#else
12373 ASSERT(IsOriginal());
12375 ->initializer_functions_mutex()
12376 ->IsOwnedByCurrentThread());
12377 // We have to ensure that all stores into the initializer function object
12378 // happen before releasing the pointer to the initializer as it may be
12379 // accessed without grabbing the lock.
12380 untag()->set_initializer_function<std::memory_order_release>(
12381 initializer.ptr());
12382#endif
12383}
12384
12386 return untag()->initializer_function() != Function::null();
12387}
12388
12390 ASSERT(IsOriginal());
12391 ASSERT(is_instance());
12392 ASSERT(instance.GetField(*this) == Object::sentinel().ptr());
12394
12395 if (has_nontrivial_initializer()) {
12396 const Function& initializer = Function::Handle(EnsureInitializerFunction());
12397 const Array& args = Array::Handle(Array::New(1));
12398 args.SetAt(0, instance);
12400 if (!value.IsNull() && value.IsError()) {
12401 return Error::Cast(value).ptr();
12402 }
12403 } else {
12404 if (is_late() && !has_initializer()) {
12406 UNREACHABLE();
12407 }
12408#if defined(DART_PRECOMPILED_RUNTIME)
12409 UNREACHABLE();
12410#else
12411 // Our trivial initializer is `null`. Any non-`null` initializer is
12412 // non-trivial (see `KernelLoader::CheckForInitializer()`).
12413 value = Object::null();
12414#endif
12415 }
12416 ASSERT(value.IsNull() || value.IsInstance());
12417 if (is_late() && is_final() &&
12418 (instance.GetField(*this) != Object::sentinel().ptr())) {
12420 String::Handle(name()));
12421 UNREACHABLE();
12422 }
12423 instance.SetField(*this, value);
12424 return Error::null();
12425}
12426
12427ErrorPtr Field::InitializeStatic() const {
12428 ASSERT(IsOriginal());
12429 ASSERT(is_static());
12430 if (StaticValue() == Object::sentinel().ptr()) {
12431 auto& value = Object::Handle();
12432 if (is_late()) {
12433 if (!has_initializer()) {
12435 UNREACHABLE();
12436 }
12437 value = EvaluateInitializer();
12438 if (value.IsError()) {
12439 return Error::Cast(value).ptr();
12440 }
12441 if (is_final() && (StaticValue() != Object::sentinel().ptr())) {
12443 String::Handle(name()));
12444 UNREACHABLE();
12445 }
12446 } else {
12447 SetStaticValue(Object::transition_sentinel());
12448 value = EvaluateInitializer();
12449 if (value.IsError()) {
12450 SetStaticValue(Object::null_instance());
12451 return Error::Cast(value).ptr();
12452 }
12453 }
12454 ASSERT(value.IsNull() || value.IsInstance());
12455 SetStaticValue(value.IsNull() ? Instance::null_instance()
12456 : Instance::Cast(value));
12457 return Error::null();
12458 } else if (StaticValue() == Object::transition_sentinel().ptr()) {
12459 ASSERT(!is_late());
12460 const Array& ctor_args = Array::Handle(Array::New(1));
12461 const String& field_name = String::Handle(name());
12462 ctor_args.SetAt(0, field_name);
12464 UNREACHABLE();
12465 }
12466 return Error::null();
12467}
12468
12470 ASSERT(is_static() &&
12471 (is_const() || (is_final() && has_trivial_initializer())));
12472
12473 auto thread = Thread::Current();
12474 auto zone = thread->zone();
12475 auto initial_field_table = thread->isolate_group()->initial_field_table();
12476
12477 // We can safely cache the value of the static const field in the initial
12478 // field table.
12479 auto& value = Object::Handle(
12480 zone, initial_field_table->At(field_id(), /*concurrent_use=*/true));
12481 if (value.ptr() == Object::sentinel().ptr()) {
12482 // Fields with trivial initializers get their initial value
12483 // eagerly when they are registered.
12484 ASSERT(is_const());
12485 ASSERT(has_initializer());
12486 ASSERT(has_nontrivial_initializer());
12487 value = EvaluateInitializer();
12488 if (!value.IsError()) {
12489 ASSERT(value.IsNull() || value.IsInstance());
12490 SetStaticConstFieldValue(value.IsNull() ? Instance::null_instance()
12491 : Instance::Cast(value));
12492 }
12493 }
12494 return value.ptr();
12495}
12496
12498 bool assert_initializing_store) const {
12499 ASSERT(is_static());
12500 auto thread = Thread::Current();
12501 auto initial_field_table = thread->isolate_group()->initial_field_table();
12502
12503 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
12504 ASSERT(initial_field_table->At(field_id()) == Object::sentinel().ptr() ||
12505 initial_field_table->At(field_id()) == value.ptr() ||
12506 !assert_initializing_store);
12507 initial_field_table->SetAt(field_id(),
12508 value.IsNull() ? Instance::null_instance().ptr()
12509 : Instance::Cast(value).ptr(),
12510 /*concurrent_use=*/true);
12511}
12512
12514 ASSERT(Thread::Current()->IsDartMutatorThread());
12515
12516#if !defined(DART_PRECOMPILED_RUNTIME)
12517 if (is_static() && is_const()) {
12518 return kernel::EvaluateStaticConstFieldInitializer(*this);
12519 }
12520#endif // !defined(DART_PRECOMPILED_RUNTIME)
12521
12522 const Function& initializer = Function::Handle(EnsureInitializerFunction());
12523 return DartEntry::InvokeFunction(initializer, Object::empty_array());
12524}
12525
12526static intptr_t GetListLength(const Object& value) {
12527 if (value.IsTypedDataBase()) {
12528 return TypedDataBase::Cast(value).Length();
12529 } else if (value.IsArray()) {
12530 return Array::Cast(value).Length();
12531 } else if (value.IsGrowableObjectArray()) {
12532 // List length is variable.
12533 return Field::kNoFixedLength;
12534 }
12535 return Field::kNoFixedLength;
12536}
12537
12538static intptr_t GetListLengthOffset(intptr_t cid) {
12542 return TypedData::length_offset();
12543 } else if (cid == kArrayCid || cid == kImmutableArrayCid) {
12544 return Array::length_offset();
12545 } else if (cid == kGrowableObjectArrayCid) {
12546 // List length is variable.
12548 }
12550}
12551
12553 if (guarded_cid() == kIllegalCid) {
12554 return "<?>";
12555 } else if (guarded_cid() == kDynamicCid) {
12556 ASSERT(!static_type_exactness_state().IsExactOrUninitialized());
12557 return "<*>";
12558 }
12559
12560 Zone* zone = Thread::Current()->zone();
12561
12562 const char* exactness = "";
12563 if (static_type_exactness_state().IsTracking()) {
12564 exactness =
12565 zone->PrintToString(" {%s}", static_type_exactness_state().ToCString());
12566 }
12567
12568 const Class& cls =
12569 Class::Handle(IsolateGroup::Current()->class_table()->At(guarded_cid()));
12570 const char* class_name = String::Handle(cls.Name()).ToCString();
12571
12572 if (IsBuiltinListClassId(guarded_cid()) && !is_nullable() && is_final()) {
12573 ASSERT(guarded_list_length() != kUnknownFixedLength);
12574 if (guarded_list_length() == kNoFixedLength) {
12575 return zone->PrintToString("<%s [*]%s>", class_name, exactness);
12576 } else {
12577 return zone->PrintToString(
12578 "<%s [%" Pd " @%" Pd "]%s>", class_name, guarded_list_length(),
12579 guarded_list_length_in_object_offset(), exactness);
12580 }
12581 }
12582
12583 return zone->PrintToString("<%s %s%s>",
12584 is_nullable() ? "nullable" : "not-nullable",
12585 class_name, exactness);
12586}
12587
12591 ASSERT(IsOriginal());
12592 if (needs_length_check() &&
12593 (guarded_list_length() != Field::kUnknownFixedLength)) {
12594 const intptr_t offset = GetListLengthOffset(guarded_cid());
12595 (this->*setter)(offset);
12597 } else {
12598 (this->*setter)(Field::kUnknownLengthOffset);
12599 }
12600}
12601
12603 public:
12604 FieldGuardUpdater(const Field* field, const Object& value);
12605
12607 return does_guarded_cid_need_update_ || does_is_nullable_need_update_ ||
12608 does_list_length_and_offset_need_update_ ||
12609 does_static_type_exactness_state_need_update_;
12610 }
12611 void DoUpdate();
12612
12613 private:
12614 void ReviewExactnessState();
12615 void ReviewGuards();
12616
12617 intptr_t guarded_cid() { return guarded_cid_; }
12618 void set_guarded_cid(intptr_t guarded_cid) {
12619 guarded_cid_ = guarded_cid;
12620 does_guarded_cid_need_update_ = true;
12621 }
12622
12623 bool is_nullable() { return is_nullable_; }
12624 void set_is_nullable(bool is_nullable) {
12625 is_nullable_ = is_nullable;
12626 does_is_nullable_need_update_ = true;
12627 }
12628
12629 intptr_t guarded_list_length() { return list_length_; }
12630 void set_guarded_list_length_and_offset(
12631 intptr_t list_length,
12632 intptr_t list_length_in_object_offset) {
12633 list_length_ = list_length;
12634 list_length_in_object_offset_ = list_length_in_object_offset;
12635 does_list_length_and_offset_need_update_ = true;
12636 }
12637
12638 StaticTypeExactnessState static_type_exactness_state() {
12639 return static_type_exactness_state_;
12640 }
12641 void set_static_type_exactness_state(StaticTypeExactnessState state) {
12642 static_type_exactness_state_ = state;
12643 does_static_type_exactness_state_need_update_ = true;
12644 }
12645
12646 const Field* field_;
12647 const Object& value_;
12648
12649 intptr_t guarded_cid_;
12650 bool is_nullable_;
12651 intptr_t list_length_;
12652 intptr_t list_length_in_object_offset_;
12653 StaticTypeExactnessState static_type_exactness_state_;
12654
12655 bool does_guarded_cid_need_update_ = false;
12656 bool does_is_nullable_need_update_ = false;
12657 bool does_list_length_and_offset_need_update_ = false;
12658 bool does_static_type_exactness_state_need_update_ = false;
12659};
12660
12661void FieldGuardUpdater::ReviewGuards() {
12662 ASSERT(field_->IsOriginal());
12663 const intptr_t cid = value_.GetClassId();
12664
12665 if (guarded_cid() == kIllegalCid) {
12666 set_guarded_cid(cid);
12667 set_is_nullable(cid == kNullCid);
12668
12669 // Start tracking length if needed.
12670 ASSERT((guarded_list_length() == Field::kUnknownFixedLength) ||
12671 (guarded_list_length() == Field::kNoFixedLength));
12672 if (field_->needs_length_check()) {
12673 ASSERT(guarded_list_length() == Field::kUnknownFixedLength);
12674 set_guarded_list_length_and_offset(GetListLength(value_),
12676 }
12677
12678 if (FLAG_trace_field_guards) {
12679 THR_Print(" => %s\n", field_->GuardedPropertiesAsCString());
12680 }
12681 return;
12682 }
12683
12684 if ((cid == guarded_cid()) || ((cid == kNullCid) && is_nullable())) {
12685 // Class id of the assigned value matches expected class id and nullability.
12686
12687 // If we are tracking length check if it has matches.
12688 if (field_->needs_length_check() &&
12689 (guarded_list_length() != GetListLength(value_))) {
12690 ASSERT(guarded_list_length() != Field::kUnknownFixedLength);
12691 set_guarded_list_length_and_offset(Field::kNoFixedLength,
12693 return;
12694 }
12695
12696 // Everything matches.
12697 return;
12698 }
12699
12700 if ((cid == kNullCid) && !is_nullable()) {
12701 // Assigning null value to a non-nullable field makes it nullable.
12702 set_is_nullable(true);
12703 } else if ((cid != kNullCid) && (guarded_cid() == kNullCid)) {
12704 // Assigning non-null value to a field that previously contained only null
12705 // turns it into a nullable field with the given class id.
12706 ASSERT(is_nullable());
12707 set_guarded_cid(cid);
12708 } else {
12709 // Give up on tracking class id of values contained in this field.
12710 ASSERT(guarded_cid() != cid);
12711 set_guarded_cid(kDynamicCid);
12712 set_is_nullable(true);
12713 }
12714
12715 // If we were tracking length drop collected feedback.
12716 if (field_->needs_length_check()) {
12717 ASSERT(guarded_list_length() != Field::kUnknownFixedLength);
12718 set_guarded_list_length_and_offset(Field::kNoFixedLength,
12720 }
12721}
12722
12724 const Class& cls,
12726 bool consider_only_super_classes) const {
12728 if (cls.ptr() == ptr()) {
12729 return true; // Found instantiation.
12730 }
12731
12732 Class& cls2 = Class::Handle(zone);
12733 Type& super = Type::Handle(zone, super_type());
12734 if (!super.IsNull() && !super.IsObjectType()) {
12735 cls2 = super.type_class();
12736 if (path != nullptr) {
12737 path->Add(&super);
12738 }
12739 if (cls2.FindInstantiationOf(zone, cls, path,
12740 consider_only_super_classes)) {
12741 return true; // Found instantiation.
12742 }
12743 if (path != nullptr) {
12744 path->RemoveLast();
12745 }
12746 }
12747
12748 if (!consider_only_super_classes) {
12749 Array& super_interfaces = Array::Handle(zone, interfaces());
12750 for (intptr_t i = 0; i < super_interfaces.Length(); i++) {
12751 super ^= super_interfaces.At(i);
12752 cls2 = super.type_class();
12753 if (path != nullptr) {
12754 path->Add(&super);
12755 }
12756 if (cls2.FindInstantiationOf(zone, cls, path)) {
12757 return true; // Found instantiation.
12758 }
12759 if (path != nullptr) {
12760 path->RemoveLast();
12761 }
12762 }
12763 }
12764
12765 return false; // Not found.
12766}
12767
12769 const Type& type,
12771 bool consider_only_super_classes) const {
12772 return FindInstantiationOf(zone, Class::Handle(zone, type.type_class()), path,
12773 consider_only_super_classes);
12774}
12775
12776TypePtr Class::GetInstantiationOf(Zone* zone, const Class& cls) const {
12777 if (ptr() == cls.ptr()) {
12778 return DeclarationType();
12779 }
12780 if (FindInstantiationOf(zone, cls, /*consider_only_super_classes=*/true)) {
12781 // Since [cls] is a superclass of [this], use [cls]'s declaration type.
12782 return cls.DeclarationType();
12783 }
12784 const auto& decl_type = Type::Handle(zone, DeclarationType());
12785 GrowableArray<const Type*> path(zone, 0);
12786 if (!FindInstantiationOf(zone, cls, &path)) {
12787 return Type::null();
12788 }
12789 Thread* thread = Thread::Current();
12790 ASSERT(!path.is_empty());
12791 auto& calculated_type = Type::Handle(zone, decl_type.ptr());
12792 auto& calculated_type_class =
12793 Class::Handle(zone, calculated_type.type_class());
12794 auto& calculated_type_args =
12795 TypeArguments::Handle(zone, calculated_type.arguments());
12796 calculated_type_args = calculated_type_args.ToInstantiatorTypeArguments(
12797 thread, calculated_type_class);
12798 for (auto* const type : path) {
12799 calculated_type ^= type->ptr();
12800 if (!calculated_type.IsInstantiated()) {
12801 calculated_type ^= calculated_type.InstantiateFrom(
12802 calculated_type_args, Object::null_type_arguments(), kAllFree,
12803 Heap::kNew);
12804 }
12805 calculated_type_class = calculated_type.type_class();
12806 calculated_type_args = calculated_type.arguments();
12807 calculated_type_args = calculated_type_args.ToInstantiatorTypeArguments(
12808 thread, calculated_type_class);
12809 }
12810 ASSERT_EQUAL(calculated_type.type_class_id(), cls.id());
12811 return calculated_type.ptr();
12812}
12813
12814TypePtr Class::GetInstantiationOf(Zone* zone, const Type& type) const {
12815 return GetInstantiationOf(zone, Class::Handle(zone, type.type_class()));
12816}
12817
12818void Field::SetStaticValue(const Object& value) const {
12819 auto thread = Thread::Current();
12820 ASSERT(thread->IsDartMutatorThread());
12821 ASSERT(value.IsNull() || value.IsSentinel() || value.IsInstance());
12822
12823 ASSERT(is_static()); // Valid only for static dart fields.
12824 const intptr_t id = field_id();
12825 ASSERT(id >= 0);
12826
12827 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
12828 thread->isolate()->field_table()->SetAt(id, value.ptr());
12829}
12830
12832 const intptr_t type_arguments_offset = cls.host_type_arguments_field_offset();
12833 ASSERT(type_arguments_offset != Class::kNoTypeArguments);
12835 type_arguments_offset / kCompressedWordSize)) {
12836 return StaticTypeExactnessState::TriviallyExact(type_arguments_offset /
12838 } else {
12840 }
12841}
12842
12844 return (args.ptr() == TypeArguments::null()) ? "<null>" : args.ToCString();
12845}
12846
12848 const Type& static_type,
12849 const Instance& value,
12850 bool print_trace /* = false */) {
12851 ASSERT(!value.IsNull()); // Should be handled by the caller.
12852 ASSERT(value.ptr() != Object::sentinel().ptr());
12853 ASSERT(value.ptr() != Object::transition_sentinel().ptr());
12854
12855 Thread* thread = Thread::Current();
12856 Zone* const zone = thread->zone();
12857 const TypeArguments& static_type_args =
12858 TypeArguments::Handle(zone, static_type.GetInstanceTypeArguments(thread));
12859
12861
12862 ASSERT(static_type.IsFinalized());
12863 const Class& cls = Class::Handle(zone, value.clazz());
12865
12866 bool is_super_class = true;
12867 if (!cls.FindInstantiationOf(zone, static_type, &path,
12868 /*consider_only_super_classes=*/true)) {
12869 is_super_class = false;
12870 bool found_super_interface =
12871 cls.FindInstantiationOf(zone, static_type, &path);
12872 ASSERT(found_super_interface);
12873 }
12874
12875 // Trivial case: field has type G<T0, ..., Tn> and value has type
12876 // G<U0, ..., Un>. Check if type arguments match.
12877 if (path.is_empty()) {
12878 ASSERT(cls.ptr() == static_type.type_class());
12879 args = value.GetTypeArguments();
12880 // TODO(dartbug.com/34170) Evaluate if comparing relevant subvectors (that
12881 // disregards superclass own arguments) improves precision of the
12882 // tracking.
12883 if (args.ptr() == static_type_args.ptr()) {
12884 return TrivialTypeExactnessFor(cls);
12885 }
12886
12887 if (print_trace) {
12888 THR_Print(" expected %s got %s type arguments\n",
12889 SafeTypeArgumentsToCString(static_type_args),
12891 }
12893 }
12894
12895 // Value has type C<U0, ..., Un> and field has type G<T0, ..., Tn> and G != C.
12896 // Compute C<X0, ..., Xn> at G (Xi are free type arguments).
12897 // Path array contains a chain of immediate supertypes S0 <: S1 <: ... Sn,
12898 // such that S0 is an immediate supertype of C and Sn is G<...>.
12899 // Each Si might depend on type parameters of the previous supertype S{i-1}.
12900 // To compute C<X0, ..., Xn> at G we walk the chain backwards and
12901 // instantiate Si using type parameters of S{i-1} which gives us a type
12902 // depending on type parameters of S{i-2}.
12903 Type& type = Type::Handle(zone, path.Last()->ptr());
12904 for (intptr_t i = path.length() - 2; (i >= 0) && !type.IsInstantiated();
12905 i--) {
12906 args = path[i]->GetInstanceTypeArguments(thread, /*canonicalize=*/false);
12907 type ^= type.InstantiateFrom(args, TypeArguments::null_type_arguments(),
12909 }
12910
12911 if (type.IsInstantiated()) {
12912 // C<X0, ..., Xn> at G is fully instantiated and does not depend on
12913 // Xi. In this case just check if type arguments match.
12914 args = type.GetInstanceTypeArguments(thread, /*canonicalize=*/false);
12915 if (args.Equals(static_type_args)) {
12916 return is_super_class ? StaticTypeExactnessState::HasExactSuperClass()
12918 }
12919
12920 if (print_trace) {
12921 THR_Print(" expected %s got %s type arguments\n",
12922 SafeTypeArgumentsToCString(static_type_args),
12924 }
12925
12927 }
12928
12929 // The most complicated case: C<X0, ..., Xn> at G depends on
12930 // Xi values. To compare type arguments we would need to instantiate
12931 // it fully from value's type arguments and compare with <U0, ..., Un>.
12932 // However this would complicate fast path in the native code. To avoid this
12933 // complication we would optimize for the trivial case: we check if
12934 // C<X0, ..., Xn> at G is exactly G<X0, ..., Xn> which means we can simply
12935 // compare values type arguments (<T0, ..., Tn>) to fields type arguments
12936 // (<U0, ..., Un>) to establish if field type is exact.
12937 ASSERT(cls.IsGeneric());
12938 const intptr_t num_type_params = cls.NumTypeParameters();
12939 bool trivial_case =
12940 (num_type_params ==
12941 Class::Handle(zone, static_type.type_class()).NumTypeParameters()) &&
12942 (value.GetTypeArguments() == static_type_args.ptr());
12943 if (!trivial_case && FLAG_trace_field_guards) {
12944 THR_Print("Not a simple case: %" Pd " vs %" Pd
12945 " type parameters, %s vs %s type arguments\n",
12946 num_type_params,
12947 Class::Handle(zone, static_type.type_class()).NumTypeParameters(),
12949 TypeArguments::Handle(zone, value.GetTypeArguments())),
12950 SafeTypeArgumentsToCString(static_type_args));
12951 }
12952
12953 AbstractType& type_arg = AbstractType::Handle(zone);
12954 args = type.GetInstanceTypeArguments(thread, /*canonicalize=*/false);
12955 for (intptr_t i = 0; (i < num_type_params) && trivial_case; i++) {
12956 type_arg = args.TypeAt(i);
12957 if (!type_arg.IsTypeParameter() ||
12958 (TypeParameter::Cast(type_arg).index() != i)) {
12959 if (FLAG_trace_field_guards) {
12960 THR_Print(" => encountered %s at index % " Pd "\n",
12961 type_arg.ToCString(), i);
12962 }
12963 trivial_case = false;
12964 }
12965 }
12966
12967 return trivial_case ? TrivialTypeExactnessFor(cls)
12969}
12970
12972 if (!IsTracking()) {
12973 return "not-tracking";
12974 } else if (!IsExactOrUninitialized()) {
12975 return "not-exact";
12976 } else if (IsTriviallyExact()) {
12977 return Thread::Current()->zone()->PrintToString(
12978 "trivially-exact(%hhu)", GetTypeArgumentsOffsetInWords());
12979 } else if (IsHasExactSuperType()) {
12980 return "has-exact-super-type";
12981 } else if (IsHasExactSuperClass()) {
12982 return "has-exact-super-class";
12983 } else {
12984 ASSERT(IsUninitialized());
12985 return "uninitialized-exactness";
12986 }
12987}
12988
12989void FieldGuardUpdater::ReviewExactnessState() {
12990 if (!static_type_exactness_state().IsExactOrUninitialized()) {
12991 // Nothing to update.
12992 return;
12993 }
12994
12995 if (guarded_cid() == kDynamicCid) {
12996 if (FLAG_trace_field_guards) {
12997 THR_Print(
12998 " => switching off exactness tracking because guarded cid is "
12999 "dynamic\n");
13000 }
13001 set_static_type_exactness_state(StaticTypeExactnessState::NotExact());
13002 return;
13003 }
13004
13005 // If we are storing null into a field or we have an exact super type
13006 // then there is nothing to do.
13007 if (value_.IsNull() || static_type_exactness_state().IsHasExactSuperType() ||
13008 static_type_exactness_state().IsHasExactSuperClass()) {
13009 return;
13010 }
13011
13012 // If we are storing a non-null value into a field that is considered
13013 // to be trivially exact then we need to check if value has an appropriate
13014 // type.
13015 ASSERT(guarded_cid() != kNullCid);
13016
13017 const Type& field_type = Type::Cast(AbstractType::Handle(field_->type()));
13018 const Instance& instance = Instance::Cast(value_);
13019
13020 if (static_type_exactness_state().IsTriviallyExact()) {
13021 const TypeArguments& args =
13022 TypeArguments::Handle(instance.GetTypeArguments());
13023 const TypeArguments& field_type_args = TypeArguments::Handle(
13024 field_type.GetInstanceTypeArguments(Thread::Current()));
13025 if (args.ptr() == field_type_args.ptr()) {
13026 return;
13027 }
13028
13029 if (FLAG_trace_field_guards) {
13030 THR_Print(" expected %s got %s type arguments\n",
13031 field_type_args.ToCString(), args.ToCString());
13032 }
13033
13034 set_static_type_exactness_state(StaticTypeExactnessState::NotExact());
13035 return;
13036 }
13037
13038 ASSERT(static_type_exactness_state().IsUninitialized());
13039 set_static_type_exactness_state(StaticTypeExactnessState::Compute(
13040 field_type, instance, FLAG_trace_field_guards));
13041 return;
13042}
13043
13045 : field_(field),
13046 value_(value),
13047 guarded_cid_(field->guarded_cid()),
13048 is_nullable_(field->is_nullable()),
13049 list_length_(field->guarded_list_length()),
13050 list_length_in_object_offset_(
13051 field->guarded_list_length_in_object_offset()),
13052 static_type_exactness_state_(field->static_type_exactness_state()) {
13053 ReviewGuards();
13054 ReviewExactnessState();
13055}
13056
13058 if (does_guarded_cid_need_update_) {
13059 field_->set_guarded_cid(guarded_cid_);
13060 }
13061 if (does_is_nullable_need_update_) {
13062 field_->set_is_nullable(is_nullable_);
13063 }
13064 if (does_list_length_and_offset_need_update_) {
13065 field_->set_guarded_list_length(list_length_);
13067 list_length_in_object_offset_);
13068 }
13069 if (does_static_type_exactness_state_need_update_) {
13070 field_->set_static_type_exactness_state(static_type_exactness_state_);
13071 }
13072}
13073
13074void Field::RecordStore(const Object& value) const {
13075 ASSERT(IsOriginal());
13076 Thread* const thread = Thread::Current();
13077 if (!thread->isolate_group()->use_field_guards()) {
13078 return;
13079 }
13080
13081 // We should never try to record a sentinel.
13082 ASSERT(value.ptr() != Object::sentinel().ptr());
13083
13084 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
13085 if ((guarded_cid() == kDynamicCid) ||
13086 (is_nullable() && value.ptr() == Object::null())) {
13087 // Nothing to do: the field is not guarded or we are storing null into
13088 // a nullable field.
13089 return;
13090 }
13091
13092 if (FLAG_trace_field_guards) {
13093 THR_Print("Store %s %s <- %s\n", ToCString(), GuardedPropertiesAsCString(),
13094 value.ToCString());
13095 }
13096
13097 FieldGuardUpdater updater(this, value);
13098 if (updater.IsUpdateNeeded()) {
13099 if (FLAG_trace_field_guards) {
13101 }
13102 // Nobody else could have updated guard state since we are holding write
13103 // program lock. But we need to ensure we stop mutators as we update
13104 // guard state as we can't have optimized code running with updated fields.
13105 auto isolate_group = IsolateGroup::Current();
13106 isolate_group->RunWithStoppedMutators([&]() {
13107 updater.DoUpdate();
13108 DeoptimizeDependentCode(/*are_mutators_stopped=*/true);
13109 });
13110 }
13111}
13112
13113void Field::ForceDynamicGuardedCidAndLength() const {
13114 if (!is_unboxed()) {
13116 set_is_nullable(true);
13117 }
13120 if (static_type_exactness_state().IsTracking()) {
13122 }
13123 // Drop any code that relied on the above assumptions.
13125}
13126
13127StringPtr Script::resolved_url() const {
13128#if defined(DART_PRECOMPILER)
13129 return String::RawCast(
13131#else
13132 return untag()->resolved_url();
13133#endif
13134}
13135
13136bool Script::HasSource() const {
13137 return untag()->source() != String::null();
13138}
13139
13140StringPtr Script::Source() const {
13141 return untag()->source();
13142}
13143
13145 const String& script_url = String::Handle(url());
13146 return (script_url.StartsWith(Symbols::DartScheme()) ||
13147 script_url.StartsWith(Symbols::DartSchemePrivate()));
13148}
13149
13150#if !defined(DART_PRECOMPILED_RUNTIME)
13151void Script::LoadSourceFromKernel(const uint8_t* kernel_buffer,
13152 intptr_t kernel_buffer_len) const {
13155 kernel_buffer, kernel_buffer_len, uri));
13156 set_source(source);
13157}
13158
13160 const KernelProgramInfo& info,
13161 intptr_t script_index,
13162 const TypedData& line_starts,
13163 const TypedDataView& constant_coverage) const {
13164 StoreNonPointer(&untag()->kernel_script_index_, script_index);
13165 untag()->set_kernel_program_info(info.ptr());
13166 untag()->set_line_starts(line_starts.ptr());
13167 untag()->set_debug_positions(Array::null_array().ptr());
13168 NOT_IN_PRODUCT(untag()->set_constant_coverage(constant_coverage.ptr()));
13169}
13170#endif
13171
13172GrowableObjectArrayPtr Script::GenerateLineNumberArray() const {
13173 Zone* zone = Thread::Current()->zone();
13174 const GrowableObjectArray& info =
13176 const Object& line_separator = Object::Handle(zone);
13177 if (line_starts() == TypedData::null()) {
13178 // Scripts in the AOT snapshot do not have a line starts array.
13179 // A well-formed line number array has a leading null.
13180 info.Add(line_separator); // New line.
13181 return info.ptr();
13182 }
13183#if !defined(DART_PRECOMPILED_RUNTIME)
13184 Smi& value = Smi::Handle(zone);
13185 const TypedData& line_starts_data = TypedData::Handle(zone, line_starts());
13186 intptr_t line_count = line_starts_data.Length();
13187 const Array& debug_positions_array = Array::Handle(debug_positions());
13188 intptr_t token_count = debug_positions_array.Length();
13189 int token_index = 0;
13190
13191 kernel::KernelLineStartsReader line_starts_reader(line_starts_data, zone);
13192 for (int line_index = 0; line_index < line_count; ++line_index) {
13193 intptr_t start = line_starts_reader.At(line_index);
13194 // Output the rest of the tokens if we have no next line.
13196 if (line_index + 1 < line_count) {
13197 end = line_starts_reader.At(line_index + 1);
13198 }
13199 bool first = true;
13200 while (token_index < token_count) {
13201 value ^= debug_positions_array.At(token_index);
13202 intptr_t debug_position = value.Value();
13203 if (debug_position >= end) break;
13204
13205 if (first) {
13206 info.Add(line_separator); // New line.
13207 value = Smi::New(line_index + 1); // Line number.
13208 info.Add(value);
13209 first = false;
13210 }
13211
13212 value ^= debug_positions_array.At(token_index);
13213 info.Add(value); // Token position.
13214 value = Smi::New(debug_position - start + 1); // Column.
13215 info.Add(value);
13216 ++token_index;
13217 }
13218 }
13219#endif // !defined(DART_PRECOMPILED_RUNTIME)
13220 return info.ptr();
13221}
13222
13224#if !defined(DART_PRECOMPILED_RUNTIME)
13225 if (HasCachedMaxPosition()) {
13228 untag()->flags_and_max_position_));
13229 }
13230 auto const zone = Thread::Current()->zone();
13231 if (!HasCachedMaxPosition() && line_starts() != TypedData::null()) {
13232 const auto& starts = TypedData::Handle(zone, line_starts());
13233 kernel::KernelLineStartsReader reader(starts, zone);
13234 const intptr_t max_position = reader.MaxPosition();
13235 SetCachedMaxPosition(max_position);
13236 SetHasCachedMaxPosition(true);
13237 return TokenPosition::Deserialize(max_position);
13238 }
13239#endif
13240 return TokenPosition::kNoSource;
13241}
13242
13243void Script::set_url(const String& value) const {
13244 untag()->set_url(value.ptr());
13245}
13246
13247void Script::set_resolved_url(const String& value) const {
13248 untag()->set_resolved_url(value.ptr());
13249}
13250
13251void Script::set_source(const String& value) const {
13252 untag()->set_source(value.ptr());
13253}
13254
13255#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
13256TypedDataViewPtr Script::constant_coverage() const {
13257 return untag()->constant_coverage();
13258}
13259#endif // !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
13260
13261void Script::set_debug_positions(const Array& value) const {
13262 untag()->set_debug_positions(value.ptr());
13263}
13264
13265TypedDataPtr Script::line_starts() const {
13266 return untag()->line_starts();
13267}
13268
13269ArrayPtr Script::debug_positions() const {
13270#if !defined(DART_PRECOMPILED_RUNTIME)
13271 Array& debug_positions_array = Array::Handle(untag()->debug_positions());
13272 if (debug_positions_array.IsNull()) {
13273 // This is created lazily. Now we need it.
13275 }
13276#endif // !defined(DART_PRECOMPILED_RUNTIME)
13277 return untag()->debug_positions();
13278}
13279
13280#if !defined(DART_PRECOMPILED_RUNTIME)
13281bool Script::HasCachedMaxPosition() const {
13283 untag()->flags_and_max_position_);
13284}
13285
13286void Script::SetHasCachedMaxPosition(bool value) const {
13287 StoreNonPointer(&untag()->flags_and_max_position_,
13289 value, untag()->flags_and_max_position_));
13290}
13291
13292void Script::SetCachedMaxPosition(intptr_t value) const {
13293 StoreNonPointer(&untag()->flags_and_max_position_,
13295 value, untag()->flags_and_max_position_));
13296}
13297#endif
13298
13299void Script::set_load_timestamp(int64_t value) const {
13300 StoreNonPointer(&untag()->load_timestamp_, value);
13301}
13302
13304 const TokenPosition& max_position = MaxPosition();
13305 // We may end up with scripts that have the empty string as a source file
13306 // in testing and the like, so allow any token position when the max position
13307 // is 0 as well as when it is kNoSource.
13308 return !max_position.IsReal() || !token_pos.IsReal() ||
13309 max_position.Pos() == 0 || token_pos <= max_position;
13310}
13311
13312#if !defined(DART_PRECOMPILED_RUNTIME)
13313static bool IsLetter(int32_t c) {
13314 return (('A' <= c) && (c <= 'Z')) || (('a' <= c) && (c <= 'z'));
13315}
13316
13317static bool IsDecimalDigit(int32_t c) {
13318 return '0' <= c && c <= '9';
13319}
13320
13321static bool IsIdentStartChar(int32_t c) {
13322 return IsLetter(c) || (c == '_') || (c == '$');
13323}
13324
13325static bool IsIdentChar(int32_t c) {
13326 return IsLetter(c) || IsDecimalDigit(c) || (c == '_') || (c == '$');
13327}
13328#endif // !defined(DART_PRECOMPILED_RUNTIME)
13329
13331 intptr_t* line,
13332 intptr_t* column) const {
13333 ASSERT(line != nullptr);
13334#if defined(DART_PRECOMPILED_RUNTIME)
13335 // Scripts in the AOT snapshot do not have a line starts array.
13336 return false;
13337#else
13338 if (!token_pos.IsReal()) return false;
13339
13340 auto const zone = Thread::Current()->zone();
13341 const TypedData& line_starts_data = TypedData::Handle(zone, line_starts());
13342 if (line_starts_data.IsNull()) return false;
13343 kernel::KernelLineStartsReader line_starts_reader(line_starts_data, zone);
13344 return line_starts_reader.LocationForPosition(token_pos.Pos(), line, column);
13345#endif // defined(DART_PRECOMPILED_RUNTIME)
13346}
13347
13348intptr_t Script::GetTokenLength(const TokenPosition& token_pos) const {
13349#if defined(DART_PRECOMPILED_RUNTIME)
13350 // Scripts in the AOT snapshot do not have their source.
13351 return -1;
13352#else
13353 if (!HasSource() || !token_pos.IsReal()) return -1;
13354 auto const zone = Thread::Current()->zone();
13355 // We don't explicitly save this data: Load the source and find it from there.
13356 const String& source = String::Handle(zone, Source());
13357 const intptr_t start = token_pos.Pos();
13358 if (start >= source.Length()) return -1; // Can't determine token_len.
13359 intptr_t end = start;
13360 if (IsIdentStartChar(source.CharAt(end++))) {
13361 for (; end < source.Length(); ++end) {
13362 if (!IsIdentChar(source.CharAt(end))) break;
13363 }
13364 }
13365 return end - start;
13366#endif
13367}
13368
13369bool Script::TokenRangeAtLine(intptr_t line_number,
13370 TokenPosition* first_token_index,
13371 TokenPosition* last_token_index) const {
13372 ASSERT(first_token_index != nullptr && last_token_index != nullptr);
13373#if defined(DART_PRECOMPILED_RUNTIME)
13374 // Scripts in the AOT snapshot do not have a line starts array.
13375 return false;
13376#else
13377 // Line numbers are 1-indexed.
13378 if (line_number <= 0) return false;
13379 Zone* zone = Thread::Current()->zone();
13380 const TypedData& line_starts_data = TypedData::Handle(zone, line_starts());
13381 kernel::KernelLineStartsReader line_starts_reader(line_starts_data, zone);
13382 if (!line_starts_reader.TokenRangeAtLine(line_number, first_token_index,
13383 last_token_index)) {
13384 return false;
13385 }
13386#if defined(DEBUG)
13387 intptr_t source_length;
13388 if (!HasSource()) {
13389 Smi& value = Smi::Handle(zone);
13390 const Array& debug_positions_array = Array::Handle(zone, debug_positions());
13391 value ^= debug_positions_array.At(debug_positions_array.Length() - 1);
13392 source_length = value.Value();
13393 } else {
13394 const String& source = String::Handle(zone, Source());
13395 source_length = source.Length();
13396 }
13397 ASSERT(last_token_index->Serialize() <= source_length);
13398#endif
13399 return true;
13400#endif // !defined(DART_PRECOMPILED_RUNTIME)
13401}
13402
13403// Returns the index in the given source string for the given (1-based) absolute
13404// line and column numbers. The line and column offsets are used to calculate
13405// the absolute line and column number for the starting index in the source.
13406//
13407// If the given line number is outside the range of lines represented by the
13408// source, the given column number invalid for the given line, or a negative
13409// starting index is given, a negative value is returned to indicate failure.
13410static intptr_t GetRelativeSourceIndex(const String& src,
13411 intptr_t line,
13412 intptr_t line_offset = 0,
13413 intptr_t column = 1,
13414 intptr_t column_offset = 0,
13415 intptr_t starting_index = 0) {
13416 if (starting_index < 0 || line < 1 || column < 1 || line <= line_offset ||
13417 (line == line_offset + 1 && column <= column_offset)) {
13418 return -1;
13419 }
13420 intptr_t len = src.Length();
13421 intptr_t current_line = line_offset + 1;
13422 intptr_t current_index = starting_index;
13423 for (; current_index < len; current_index++) {
13424 if (current_line == line) {
13425 break;
13426 }
13427 const uint16_t c = src.CharAt(current_index);
13428 if (c == '\n' || c == '\r') {
13429 current_line++;
13430 }
13431 if (c == '\r' && current_index + 1 < len &&
13432 src.CharAt(current_index + 1) == '\n') {
13433 // \r\n is treated as a single line terminator.
13434 current_index++;
13435 }
13436 }
13437 if (current_line != line) {
13438 return -1;
13439 }
13440 // Only adjust with column offset when still on the first line.
13441 intptr_t current_column = 1 + (line == line_offset + 1 ? column_offset : 0);
13442 for (; current_index < len; current_index++, current_column++) {
13443 if (current_column == column) {
13444 return current_index;
13445 }
13446 const uint16_t c = src.CharAt(current_index);
13447 if (c == '\n' || c == '\r') {
13448 break;
13449 }
13450 }
13451 // Check for a column value representing the source's end.
13452 if (current_column == column) {
13453 return current_index;
13454 }
13455 return -1;
13456}
13457
13458StringPtr Script::GetLine(intptr_t line_number, Heap::Space space) const {
13459 if (!HasSource()) {
13460 return Symbols::OptimizedOut().ptr();
13461 }
13462 const String& src = String::Handle(Source());
13463 const intptr_t start =
13464 GetRelativeSourceIndex(src, line_number, line_offset());
13465 if (start < 0) {
13466 return Symbols::Empty().ptr();
13467 }
13468 intptr_t end = start;
13469 for (; end < src.Length(); end++) {
13470 const uint16_t c = src.CharAt(end);
13471 if (c == '\n' || c == '\r') {
13472 break;
13473 }
13474 }
13475 return String::SubString(src, start, end - start, space);
13476}
13477
13478StringPtr Script::GetSnippet(intptr_t from_line,
13479 intptr_t from_column,
13480 intptr_t to_line,
13481 intptr_t to_column) const {
13482 if (!HasSource()) {
13483 return Symbols::OptimizedOut().ptr();
13484 }
13485 const String& src = String::Handle(Source());
13486 const intptr_t start = GetRelativeSourceIndex(src, from_line, line_offset(),
13487 from_column, col_offset());
13488 // Lines and columns are 1-based, so need to subtract one to get offsets.
13489 const intptr_t end = GetRelativeSourceIndex(
13490 src, to_line, from_line - 1, to_column, from_column - 1, start);
13491 // Only need to check end, because a negative start results in a negative end.
13492 if (end < 0) {
13493 return String::null();
13494 }
13495 return String::SubString(src, start, end - start);
13496}
13497
13498ScriptPtr Script::New(const String& url, const String& source) {
13499 return Script::New(url, url, source);
13500}
13501
13502ScriptPtr Script::New(const String& url,
13503 const String& resolved_url,
13504 const String& source) {
13506 Thread* thread = Thread::Current();
13507 Zone* zone = thread->zone();
13508 const Script& result =
13509 Script::Handle(zone, Object::Allocate<Script>(Heap::kOld));
13510 result.set_url(String::Handle(zone, Symbols::New(thread, url)));
13511 result.set_resolved_url(
13512 String::Handle(zone, Symbols::New(thread, resolved_url)));
13513 result.set_source(source);
13514 NOT_IN_PRECOMPILED(ASSERT_EQUAL(result.HasCachedMaxPosition(), false));
13515 ASSERT_EQUAL(result.kernel_script_index(), 0);
13516 if (FLAG_remove_script_timestamps_for_test) {
13517 ASSERT_EQUAL(result.load_timestamp(), 0);
13518 } else {
13519 result.set_load_timestamp(OS::GetCurrentTimeMillis());
13520 }
13521 return result.ptr();
13522}
13523
13524const char* Script::ToCString() const {
13525 const String& name = String::Handle(url());
13526 return OS::SCreate(Thread::Current()->zone(), "Script(%s)", name.ToCString());
13527}
13528
13529LibraryPtr Script::FindLibrary() const {
13530 Thread* thread = Thread::Current();
13531 Zone* zone = thread->zone();
13532 auto isolate_group = thread->isolate_group();
13534 zone, isolate_group->object_store()->libraries());
13535 Library& lib = Library::Handle(zone);
13536 Array& scripts = Array::Handle(zone);
13537 for (intptr_t i = 0; i < libs.Length(); i++) {
13538 lib ^= libs.At(i);
13539 scripts = lib.LoadedScripts();
13540 for (intptr_t j = 0; j < scripts.Length(); j++) {
13541 if (scripts.At(j) == ptr()) {
13542 return lib.ptr();
13543 }
13544 }
13545 }
13546 return Library::null();
13547}
13548
13550 : array_(Array::Handle(library.dictionary())),
13551 // Last element in array is a Smi indicating the number of entries used.
13552 size_(Array::Handle(library.dictionary()).Length() - 1),
13553 next_ix_(0) {
13554 MoveToNextObject();
13555}
13556
13558 ASSERT(HasNext());
13559 int ix = next_ix_++;
13560 MoveToNextObject();
13561 ASSERT(array_.At(ix) != Object::null());
13562 return array_.At(ix);
13563}
13564
13565void DictionaryIterator::MoveToNextObject() {
13566 Object& obj = Object::Handle(array_.At(next_ix_));
13567 while (obj.IsNull() && HasNext()) {
13568 next_ix_++;
13569 obj = array_.At(next_ix_);
13570 }
13571}
13572
13574 IterationKind kind)
13575 : DictionaryIterator(library),
13576 toplevel_class_(Class::Handle((kind == kIteratePrivate)
13577 ? library.toplevel_class()
13578 : Class::null())) {
13579 MoveToNextClass();
13580}
13581
13583 ASSERT(HasNext());
13584 Class& cls = Class::Handle();
13585 if (next_ix_ < size_) {
13586 int ix = next_ix_++;
13587 cls ^= array_.At(ix);
13588 MoveToNextClass();
13589 return cls.ptr();
13590 }
13591 ASSERT(!toplevel_class_.IsNull());
13592 cls = toplevel_class_.ptr();
13593 toplevel_class_ = Class::null();
13594 return cls.ptr();
13595}
13596
13597void ClassDictionaryIterator::MoveToNextClass() {
13598 Object& obj = Object::Handle();
13599 while (next_ix_ < size_) {
13600 obj = array_.At(next_ix_);
13601 if (obj.IsClass()) {
13602 return;
13603 }
13604 next_ix_++;
13605 }
13606}
13607
13608static void ReportTooManyImports(const Library& lib) {
13609 const String& url = String::Handle(lib.url());
13611 TokenPosition::kNoSource, Report::AtLocation,
13612 "too many imports in library '%s'", url.ToCString());
13613 UNREACHABLE();
13614}
13615
13617 String& url_str = Thread::Current()->StringHandle();
13618 url_str = url();
13619 return url_str.StartsWith(Symbols::DartScheme()) ||
13620 url_str.StartsWith(Symbols::DartSchemePrivate());
13621}
13622
13623void Library::set_num_imports(intptr_t value) const {
13624 if (!Utils::IsUint(16, value)) {
13625 ReportTooManyImports(*this);
13626 }
13627 StoreNonPointer(&untag()->num_imports_, value);
13628}
13629
13630void Library::set_name(const String& name) const {
13631 ASSERT(name.IsSymbol());
13632 untag()->set_name(name.ptr());
13633}
13634
13635void Library::set_url(const String& url) const {
13636 untag()->set_url(url.ptr());
13637}
13638
13639void Library::set_private_key(const String& key) const {
13640 untag()->set_private_key(key.ptr());
13641}
13642
13643#if !defined(DART_PRECOMPILED_RUNTIME)
13645 untag()->set_kernel_program_info(info.ptr());
13646}
13647
13648TypedDataViewPtr Library::KernelLibrary() const {
13650 return info.KernelLibrary(kernel_library_index());
13651}
13652
13655 return info.KernelLibraryStartOffset(kernel_library_index());
13656}
13657#endif
13658
13659void Library::set_loading_unit(const LoadingUnit& value) const {
13660 untag()->set_loading_unit(value.ptr());
13661}
13662
13663void Library::SetName(const String& name) const {
13664 // Only set name once.
13665 ASSERT(!Loaded());
13666 set_name(name);
13667}
13668
13670 // Must not already be in the process of being loaded.
13671 ASSERT(untag()->load_state_ <= UntaggedLibrary::kLoadRequested);
13672 StoreNonPointer(&untag()->load_state_, UntaggedLibrary::kLoadInProgress);
13673}
13674
13676 // Must not be already loaded.
13677 ASSERT(untag()->load_state_ == UntaggedLibrary::kAllocated);
13678 StoreNonPointer(&untag()->load_state_, UntaggedLibrary::kLoadRequested);
13679}
13680
13682 // Should not be already loaded or just allocated.
13684 StoreNonPointer(&untag()->load_state_, UntaggedLibrary::kLoaded);
13685}
13686
13687void Library::AddMetadata(const Object& declaration,
13688 intptr_t kernel_offset) const {
13689#if defined(DART_PRECOMPILED_RUNTIME)
13690 UNREACHABLE();
13691#else
13692 Thread* thread = Thread::Current();
13694
13695 MetadataMap map(metadata());
13696 map.UpdateOrInsert(declaration, Smi::Handle(Smi::New(kernel_offset)));
13697 set_metadata(map.Release());
13698#endif // defined(DART_PRECOMPILED_RUNTIME)
13699}
13700
13701ObjectPtr Library::GetMetadata(const Object& declaration) const {
13702#if defined(DART_PRECOMPILED_RUNTIME)
13703 return Object::empty_array().ptr();
13704#else
13705 RELEASE_ASSERT(declaration.IsClass() || declaration.IsField() ||
13706 declaration.IsFunction() || declaration.IsLibrary() ||
13707 declaration.IsTypeParameter() || declaration.IsNamespace());
13708
13709 auto thread = Thread::Current();
13710 auto zone = thread->zone();
13711
13712 if (declaration.IsLibrary()) {
13713 // Ensure top-level class is loaded as it may contain annotations of
13714 // a library.
13715 const auto& cls = Class::Handle(zone, toplevel_class());
13716 if (!cls.IsNull()) {
13717 cls.EnsureDeclarationLoaded();
13718 }
13719 }
13720 Object& value = Object::Handle(zone);
13721 {
13722 SafepointReadRwLocker ml(thread, thread->isolate_group()->program_lock());
13723 MetadataMap map(metadata());
13724 value = map.GetOrNull(declaration);
13725 set_metadata(map.Release());
13726 }
13727 if (value.IsNull()) {
13728 // There is no metadata for this object.
13729 return Object::empty_array().ptr();
13730 }
13731 if (!value.IsSmi()) {
13732 // Metadata is already evaluated.
13733 ASSERT(value.IsArray());
13734 return value.ptr();
13735 }
13736 const auto& smi_value = Smi::Cast(value);
13737 intptr_t kernel_offset = smi_value.Value();
13738 ASSERT(kernel_offset > 0);
13739 const auto& evaluated_value = Object::Handle(
13740 zone, kernel::EvaluateMetadata(
13741 *this, kernel_offset,
13742 /* is_annotations_offset = */ declaration.IsLibrary() ||
13743 declaration.IsNamespace()));
13744 if (evaluated_value.IsArray() || evaluated_value.IsNull()) {
13745 ASSERT(evaluated_value.ptr() != Object::empty_array().ptr());
13746 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
13747 MetadataMap map(metadata());
13748 if (map.GetOrNull(declaration) == smi_value.ptr()) {
13749 map.UpdateOrInsert(declaration, evaluated_value);
13750 } else {
13751 ASSERT(map.GetOrNull(declaration) == evaluated_value.ptr());
13752 }
13753 set_metadata(map.Release());
13754 }
13755 return evaluated_value.ptr();
13756#endif // defined(DART_PRECOMPILED_RUNTIME)
13757}
13758
13759#if !defined(DART_PRECOMPILED_RUNTIME)
13760static bool HasPragma(const Object& declaration) {
13761 return (declaration.IsClass() && Class::Cast(declaration).has_pragma()) ||
13762 (declaration.IsFunction() &&
13763 Function::Cast(declaration).has_pragma()) ||
13764 (declaration.IsField() && Field::Cast(declaration).has_pragma());
13765}
13766
13768 Object& declaration = Object::Handle();
13769 const GrowableObjectArray& declarations =
13771 {
13772 auto thread = Thread::Current();
13773 SafepointReadRwLocker ml(thread, thread->isolate_group()->program_lock());
13774 MetadataMap map(metadata());
13775 MetadataMap::Iterator it(&map);
13776 while (it.MoveNext()) {
13777 const intptr_t entry = it.Current();
13778 ASSERT(entry != -1);
13779 declaration = map.GetKey(entry);
13780 if (HasPragma(declaration)) {
13781 declarations.Add(declaration);
13782 }
13783 }
13784 set_metadata(map.Release());
13785 }
13786 for (intptr_t i = 0; i < declarations.Length(); ++i) {
13787 declaration = declarations.At(i);
13788 GetMetadata(declaration);
13789 }
13790}
13791
13792void Library::CopyPragmas(const Library& old_lib) {
13793 auto thread = Thread::Current();
13794 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
13795 MetadataMap new_map(metadata());
13796 MetadataMap old_map(old_lib.metadata());
13797 Object& declaration = Object::Handle();
13799 MetadataMap::Iterator it(&old_map);
13800 while (it.MoveNext()) {
13801 const intptr_t entry = it.Current();
13802 ASSERT(entry != -1);
13803 declaration = old_map.GetKey(entry);
13804 if (HasPragma(declaration)) {
13805 value = old_map.GetPayload(entry, 0);
13806 ASSERT(!value.IsNull());
13807 // Pragmas should be evaluated during hot reload phase 1
13808 // (when checkpointing libraries).
13809 ASSERT(!value.IsSmi());
13810 new_map.UpdateOrInsert(declaration, value);
13811 }
13812 }
13813 old_lib.set_metadata(old_map.Release());
13814 set_metadata(new_map.Release());
13815}
13816#endif // !defined(DART_PRECOMPILED_RUNTIME)
13817
13818static bool ShouldBePrivate(const String& name) {
13819 return (name.Length() >= 1 && name.CharAt(0) == '_') ||
13820 (name.Length() >= 5 &&
13821 (name.CharAt(4) == '_' &&
13822 (name.CharAt(0) == 'g' || name.CharAt(0) == 's') &&
13823 name.CharAt(1) == 'e' && name.CharAt(2) == 't' &&
13824 name.CharAt(3) == ':'));
13825}
13826
13827void Library::RehashDictionary(const Array& old_dict,
13828 intptr_t new_dict_size) const {
13829 intptr_t old_dict_size = old_dict.Length() - 1;
13830 const Array& new_dict =
13831 Array::Handle(Array::New(new_dict_size + 1, Heap::kOld));
13832 // Rehash all elements from the original dictionary
13833 // to the newly allocated array.
13834 Object& entry = Class::Handle();
13835 String& entry_name = String::Handle();
13836 Object& new_entry = Object::Handle();
13837 intptr_t used = 0;
13838 for (intptr_t i = 0; i < old_dict_size; i++) {
13839 entry = old_dict.At(i);
13840 if (!entry.IsNull()) {
13841 entry_name = entry.DictionaryName();
13842 ASSERT(!entry_name.IsNull());
13843 const intptr_t hash = entry_name.Hash();
13844 intptr_t index = hash % new_dict_size;
13845 new_entry = new_dict.At(index);
13846 while (!new_entry.IsNull()) {
13847 index = (index + 1) % new_dict_size; // Move to next element.
13848 new_entry = new_dict.At(index);
13849 }
13850 new_dict.SetAt(index, entry);
13851 used++;
13852 }
13853 }
13854 // Set used count.
13855 ASSERT(used < new_dict_size); // Need at least one empty slot.
13856 new_entry = Smi::New(used);
13857 new_dict.SetAt(new_dict_size, new_entry);
13858 // Remember the new dictionary now.
13859 untag()->set_dictionary(new_dict.ptr());
13860}
13861
13862void Library::AddObject(const Object& obj, const String& name) const {
13863 ASSERT(Thread::Current()->IsDartMutatorThread());
13864 ASSERT(obj.IsClass() || obj.IsFunction() || obj.IsField() ||
13865 obj.IsLibraryPrefix());
13866 ASSERT(name.Equals(String::Handle(obj.DictionaryName())));
13867 ASSERT(LookupLocalObject(name) == Object::null());
13868 const Array& dict = Array::Handle(dictionary());
13869 intptr_t dict_size = dict.Length() - 1;
13870 intptr_t index = name.Hash() % dict_size;
13871
13872 Object& entry = Object::Handle();
13873 entry = dict.At(index);
13874 // An empty spot will be found because we keep the hash set at most 75% full.
13875 while (!entry.IsNull()) {
13876 index = (index + 1) % dict_size;
13877 entry = dict.At(index);
13878 }
13879
13880 // Insert the object at the empty slot.
13881 dict.SetAt(index, obj);
13882 // One more element added.
13883 intptr_t used_elements = Smi::Value(Smi::RawCast(dict.At(dict_size))) + 1;
13884 const Smi& used = Smi::Handle(Smi::New(used_elements));
13885 dict.SetAt(dict_size, used); // Update used count.
13886
13887 // Rehash if symbol_table is 75% full.
13888 if (used_elements > ((dict_size / 4) * 3)) {
13889 // TODO(iposva): Avoid exponential growth.
13890 RehashDictionary(dict, 2 * dict_size);
13891 }
13892
13893 // Invalidate the cache of loaded scripts.
13894 if (loaded_scripts() != Array::null()) {
13895 untag()->set_loaded_scripts(Array::null());
13896 }
13897}
13898
13899// Lookup a name in the library's re-export namespace.
13900// This lookup can occur from two different threads: background compiler and
13901// mutator thread.
13903 ZoneGrowableArray<intptr_t>* trail) const {
13904 if (!HasExports()) {
13905 return Object::null();
13906 }
13907
13908 if (trail == nullptr) {
13909 trail = new ZoneGrowableArray<intptr_t>();
13910 }
13911 Object& obj = Object::Handle();
13912
13913 const intptr_t lib_id = this->index();
13914 ASSERT(lib_id >= 0); // We use -1 to indicate that a cycle was found.
13915 trail->Add(lib_id);
13916 const Array& exports = Array::Handle(this->exports());
13918 for (int i = 0; i < exports.Length(); i++) {
13919 ns ^= exports.At(i);
13920 obj = ns.Lookup(name, trail);
13921 if (!obj.IsNull()) {
13922 // The Lookup call above may return a setter x= when we are looking
13923 // for the name x. Make sure we only return when a matching name
13924 // is found.
13925 String& obj_name = String::Handle(obj.DictionaryName());
13926 if (Field::IsSetterName(obj_name) == Field::IsSetterName(name)) {
13927 break;
13928 }
13929 }
13930 }
13931 trail->RemoveLast();
13932 return obj.ptr();
13933}
13934
13935ObjectPtr Library::LookupEntry(const String& name, intptr_t* index) const {
13936 ASSERT(!IsNull());
13937 Thread* thread = Thread::Current();
13941 Array& dict = thread->ArrayHandle();
13942 dict = dictionary();
13943 intptr_t dict_size = dict.Length() - 1;
13944 *index = name.Hash() % dict_size;
13945 Object& entry = thread->ObjectHandle();
13946 String& entry_name = thread->StringHandle();
13947 entry = dict.At(*index);
13948 // Search the entry in the hash set.
13949 while (!entry.IsNull()) {
13950 entry_name = entry.DictionaryName();
13951 ASSERT(!entry_name.IsNull());
13952 if (entry_name.Equals(name)) {
13953 return entry.ptr();
13954 }
13955 *index = (*index + 1) % dict_size;
13956 entry = dict.At(*index);
13957 }
13958 return Object::null();
13959}
13960
13961void Library::AddClass(const Class& cls) const {
13963 const String& class_name = String::Handle(cls.Name());
13964 AddObject(cls, class_name);
13965 // Link class to this library.
13966 cls.set_library(*this);
13967}
13968
13970 const Script& candidate) {
13971 if (candidate.IsNull()) {
13972 return;
13973 }
13974 Script& script_obj = Script::Handle();
13975
13976 for (int i = 0; i < scripts.Length(); i++) {
13977 script_obj ^= scripts.At(i);
13978 if (script_obj.ptr() == candidate.ptr()) {
13979 // We already have a reference to this script.
13980 return;
13981 }
13982 }
13983 // Add script to the list of scripts.
13984 scripts.Add(candidate);
13985}
13986
13987ArrayPtr Library::LoadedScripts() const {
13988 // We compute the list of loaded scripts lazily. The result is
13989 // cached in loaded_scripts_.
13990 if (loaded_scripts() == Array::null()) {
13991 // TODO(jensj): This can be cleaned up.
13992 // It really should just return the content of `used_scripts`, and there
13993 // should be no need to do the O(n) call to `AddScriptIfUnique` per script.
13994
13995 // Iterate over the library dictionary and collect all scripts.
13998 Object& entry = Object::Handle();
13999 Class& cls = Class::Handle();
14000 Script& owner_script = Script::Handle();
14001 DictionaryIterator it(*this);
14002 while (it.HasNext()) {
14003 entry = it.GetNext();
14004 if (entry.IsClass()) {
14005 owner_script = Class::Cast(entry).script();
14006 } else if (entry.IsFunction()) {
14007 owner_script = Function::Cast(entry).script();
14008 } else if (entry.IsField()) {
14009 owner_script = Field::Cast(entry).Script();
14010 } else {
14011 continue;
14012 }
14013 AddScriptIfUnique(scripts, owner_script);
14014 }
14015
14016 // Add all scripts from patch classes.
14018 for (intptr_t i = 0; i < patches.Length(); i++) {
14019 entry = patches.At(i);
14020 if (entry.IsClass()) {
14021 owner_script = Class::Cast(entry).script();
14022 } else {
14023 ASSERT(entry.IsScript());
14024 owner_script = Script::Cast(entry).ptr();
14025 }
14026 AddScriptIfUnique(scripts, owner_script);
14027 }
14028
14029 cls = toplevel_class();
14030 if (!cls.IsNull()) {
14031 owner_script = cls.script();
14032 AddScriptIfUnique(scripts, owner_script);
14033 // Special case: Scripts that only contain external top-level functions
14034 // are not included above, but can be referenced through a library's
14035 // anonymous classes. Example: dart-core:identical.dart.
14036 Function& func = Function::Handle();
14037 Array& functions = Array::Handle(cls.current_functions());
14038 for (intptr_t j = 0; j < functions.Length(); j++) {
14039 func ^= functions.At(j);
14040 if (func.is_external()) {
14041 owner_script = func.script();
14042 AddScriptIfUnique(scripts, owner_script);
14043 }
14044 }
14045 }
14046
14047 // Create the array of scripts and cache it in loaded_scripts_.
14048 const Array& scripts_array = Array::Handle(Array::MakeFixedLength(scripts));
14049 untag()->set_loaded_scripts(scripts_array.ptr());
14050 }
14051 return loaded_scripts();
14052}
14053
14054// TODO(hausner): we might want to add a script dictionary to the
14055// library class to make this lookup faster.
14056ScriptPtr Library::LookupScript(const String& url,
14057 bool useResolvedUri /* = false */) const {
14058 const intptr_t url_length = url.Length();
14059 if (url_length == 0) {
14060 return Script::null();
14061 }
14063 Script& script = Script::Handle();
14064 String& script_url = String::Handle();
14065 const intptr_t num_scripts = scripts.Length();
14066 for (int i = 0; i < num_scripts; i++) {
14067 script ^= scripts.At(i);
14068 if (useResolvedUri) {
14069 // Use for urls with 'org-dartlang-sdk:' or 'file:' schemes
14070 script_url = script.resolved_url();
14071 } else {
14072 // Use for urls with 'dart:', 'package:', or 'file:' schemes
14073 script_url = script.url();
14074 }
14075 const intptr_t start_idx = script_url.Length() - url_length;
14076 if ((start_idx == 0) && url.Equals(script_url)) {
14077 return script.ptr();
14078 } else if (start_idx > 0) {
14079 // If we do a suffix match, only match if the partial path
14080 // starts at or immediately after the path separator.
14081 if (((url.CharAt(0) == '/') ||
14082 (script_url.CharAt(start_idx - 1) == '/')) &&
14083 url.Equals(script_url, start_idx, url_length)) {
14084 return script.ptr();
14085 }
14086 }
14087 }
14088 return Script::null();
14089}
14090
14092 if (toplevel_class() == Object::null()) {
14093 return;
14094 }
14095 Thread* thread = Thread::Current();
14096 const Class& cls = Class::Handle(thread->zone(), toplevel_class());
14097 if (cls.is_finalized()) {
14098 return;
14099 }
14100 const Error& error =
14101 Error::Handle(thread->zone(), cls.EnsureIsFinalized(thread));
14102 if (!error.IsNull()) {
14104 }
14105}
14106
14107ObjectPtr Library::LookupLocalObject(const String& name) const {
14108 intptr_t index;
14109 return LookupEntry(name, &index);
14110}
14111
14113 intptr_t index;
14115 const Object& result = Object::Handle(LookupEntry(name, &index));
14116 if (!result.IsNull() && !result.IsLibraryPrefix()) {
14117 return result.ptr();
14118 }
14119 return LookupReExport(name);
14120}
14121
14124 Object& obj = Object::Handle(LookupLocalObjectAllowPrivate(name));
14125 if (obj.IsField()) {
14126 return Field::Cast(obj).ptr();
14127 }
14128 return Field::null();
14129}
14130
14133 Object& obj = Object::Handle(LookupLocalObjectAllowPrivate(name));
14134 if (obj.IsFunction()) {
14135 return Function::Cast(obj).ptr();
14136 }
14137 return Function::null();
14138}
14139
14140ObjectPtr Library::LookupLocalObjectAllowPrivate(const String& name) const {
14141 Thread* thread = Thread::Current();
14142 Zone* zone = thread->zone();
14143 Object& obj = Object::Handle(zone, Object::null());
14144 obj = LookupLocalObject(name);
14145 if (obj.IsNull() && ShouldBePrivate(name)) {
14146 String& private_name = String::Handle(zone, PrivateName(name));
14147 obj = LookupLocalObject(private_name);
14148 }
14149 return obj.ptr();
14150}
14151
14152ClassPtr Library::LookupClass(const String& name) const {
14153 Object& obj = Object::Handle(LookupLocalObject(name));
14154 if (obj.IsClass()) {
14155 return Class::Cast(obj).ptr();
14156 }
14157 return Class::null();
14158}
14159
14161 Object& obj = Object::Handle(LookupLocalObjectAllowPrivate(name));
14162 if (obj.IsClass()) {
14163 return Class::Cast(obj).ptr();
14164 }
14165 return Class::null();
14166}
14167
14168LibraryPrefixPtr Library::LookupLocalLibraryPrefix(const String& name) const {
14169 const Object& obj = Object::Handle(LookupLocalObject(name));
14170 if (obj.IsLibraryPrefix()) {
14171 return LibraryPrefix::Cast(obj).ptr();
14172 }
14173 return LibraryPrefix::null();
14174}
14175
14176void Library::set_toplevel_class(const Class& value) const {
14178 untag()->set_toplevel_class(value.ptr());
14179}
14180
14181void Library::set_dependencies(const Array& deps) const {
14182 untag()->set_dependencies(deps.ptr());
14183}
14184
14185void Library::set_metadata(const Array& value) const {
14186 if (untag()->metadata() != value.ptr()) {
14188 IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
14189 untag()->set_metadata(value.ptr());
14190 }
14191}
14192
14193LibraryPtr Library::ImportLibraryAt(intptr_t index) const {
14194 Namespace& import = Namespace::Handle(ImportAt(index));
14195 if (import.IsNull()) {
14196 return Library::null();
14197 }
14198 return import.target();
14199}
14200
14201NamespacePtr Library::ImportAt(intptr_t index) const {
14202 if ((index < 0) || index >= num_imports()) {
14203 return Namespace::null();
14204 }
14205 const Array& import_list = Array::Handle(imports());
14206 return Namespace::RawCast(import_list.At(index));
14207}
14208
14210 untag()->set_imports(Object::empty_array().ptr());
14211 untag()->set_exports(Object::empty_array().ptr());
14212 StoreNonPointer(&untag()->num_imports_, 0);
14213 untag()->set_loaded_scripts(Array::null());
14214 untag()->set_dependencies(Array::null());
14215#if defined(PRODUCT)
14216 // used_scripts is only used by vm-service.
14217 untag()->set_used_scripts(GrowableObjectArray::null());
14218#endif
14219}
14220
14221void Library::AddImport(const Namespace& ns) const {
14222 Array& imports = Array::Handle(this->imports());
14223 intptr_t capacity = imports.Length();
14224 if (num_imports() == capacity) {
14225 capacity = capacity + kImportsCapacityIncrement + (capacity >> 2);
14226 imports = Array::Grow(imports, capacity);
14227 untag()->set_imports(imports.ptr());
14228 }
14229 intptr_t index = num_imports();
14230 imports.SetAt(index, ns);
14231 set_num_imports(index + 1);
14232}
14233
14234// Convenience function to determine whether the export list is
14235// non-empty.
14236bool Library::HasExports() const {
14237 return exports() != Object::empty_array().ptr();
14238}
14239
14240// We add one namespace at a time to the exports array and don't
14241// pre-allocate any unused capacity. The assumption is that
14242// re-exports are quite rare.
14243void Library::AddExport(const Namespace& ns) const {
14244 Array& exports = Array::Handle(this->exports());
14245 intptr_t num_exports = exports.Length();
14246 exports = Array::Grow(exports, num_exports + 1);
14247 untag()->set_exports(exports.ptr());
14248 exports.SetAt(num_exports, ns);
14249}
14250
14251static ArrayPtr NewDictionary(intptr_t initial_size) {
14252 const Array& dict = Array::Handle(Array::New(initial_size + 1, Heap::kOld));
14253 // The last element of the dictionary specifies the number of in use slots.
14254 dict.SetAt(initial_size, Object::smi_zero());
14255 return dict.ptr();
14256}
14257
14258void Library::InitClassDictionary() const {
14259 Thread* thread = Thread::Current();
14260 ASSERT(thread->IsDartMutatorThread());
14262 Array& dictionary = thread->ArrayHandle();
14263 // TODO(iposva): Find reasonable initial size.
14264 const int kInitialElementCount = 16;
14265 dictionary = NewDictionary(kInitialElementCount);
14266 untag()->set_dictionary(dictionary.ptr());
14267}
14268
14269void Library::InitImportList() const {
14270 const Array& imports =
14271 Array::Handle(Array::New(kInitialImportsCapacity, Heap::kOld));
14272 untag()->set_imports(imports.ptr());
14273 StoreNonPointer(&untag()->num_imports_, 0);
14274}
14275
14276LibraryPtr Library::New() {
14278 return Object::Allocate<Library>(Heap::kOld);
14279}
14280
14281LibraryPtr Library::NewLibraryHelper(const String& url, bool import_core_lib) {
14282 Thread* thread = Thread::Current();
14283 Zone* zone = thread->zone();
14284 ASSERT(thread->IsDartMutatorThread());
14285 // Force the url to have a hash code.
14286 url.Hash();
14287 const bool dart_scheme = url.StartsWith(Symbols::DartScheme());
14288 const Library& result = Library::Handle(zone, Library::New());
14289 result.untag()->set_name(Symbols::Empty().ptr());
14290 result.untag()->set_url(url.ptr());
14291 result.untag()->set_dictionary(Object::empty_array().ptr());
14292 Array& array = Array::Handle(zone);
14293 array = HashTables::New<MetadataMap>(4, Heap::kOld);
14294 result.untag()->set_metadata(array.ptr());
14295 result.untag()->set_toplevel_class(Class::null());
14296 GrowableObjectArray& list = GrowableObjectArray::Handle(zone);
14297 list = GrowableObjectArray::New(Object::empty_array(), Heap::kOld);
14298 result.untag()->set_used_scripts(list.ptr());
14299 result.untag()->set_imports(Object::empty_array().ptr());
14300 result.untag()->set_exports(Object::empty_array().ptr());
14302 result.untag()->set_kernel_program_info(KernelProgramInfo::null()));
14303 result.untag()->set_loaded_scripts(Array::null());
14304 result.set_native_entry_resolver(nullptr);
14305 result.set_native_entry_symbol_resolver(nullptr);
14306 result.set_ffi_native_resolver(nullptr);
14307 result.set_flags(0);
14308 result.set_is_in_fullsnapshot(false);
14309 // This logic is also in the DAP debug adapter in DDS to avoid needing
14310 // to call setLibraryDebuggable for every library for every isolate.
14311 // If these defaults change, the same should be done there in
14312 // dap/IsolateManager._getIsLibraryDebuggableByDefault.
14313 if (dart_scheme) {
14314 // Only debug dart: libraries if we have been requested to show invisible
14315 // frames.
14316 result.set_debuggable(FLAG_show_invisible_frames);
14317 } else {
14318 // Default to debuggable for all other libraries.
14319 result.set_debuggable(true);
14320 }
14321 result.set_is_dart_scheme(dart_scheme);
14323 result.StoreNonPointer(&result.untag()->kernel_library_index_, -1));
14324 result.StoreNonPointer(&result.untag()->load_state_,
14325 UntaggedLibrary::kAllocated);
14326 result.StoreNonPointer(&result.untag()->index_, -1);
14327 result.InitClassDictionary();
14328 result.InitImportList();
14329 result.AllocatePrivateKey();
14330 if (import_core_lib) {
14331 const Library& core_lib = Library::Handle(zone, Library::CoreLibrary());
14332 ASSERT(!core_lib.IsNull());
14333 const Namespace& ns =
14334 Namespace::Handle(zone, Namespace::New(core_lib, Object::null_array(),
14335 Object::null_array(), result));
14336 result.AddImport(ns);
14337 }
14338 return result.ptr();
14339}
14340
14341LibraryPtr Library::New(const String& url) {
14342 return NewLibraryHelper(url, false);
14343}
14344
14345void Library::set_flags(uint8_t flags) const {
14346 StoreNonPointer(&untag()->flags_, flags);
14347}
14348
14350 Thread* thread = Thread::Current();
14351 Zone* zone = thread->zone();
14352 const String& core_lib_url = Symbols::DartCore();
14353 const Library& core_lib =
14354 Library::Handle(zone, Library::NewLibraryHelper(core_lib_url, false));
14355 core_lib.SetLoadRequested();
14356 core_lib.Register(thread);
14357 isolate_group->object_store()->set_bootstrap_library(ObjectStore::kCore,
14358 core_lib);
14359 isolate_group->object_store()->set_root_library(Library::Handle());
14360}
14361
14362// Invoke the function, or noSuchMethod if it is null.
14364 Thread* thread,
14365 const Instance& receiver,
14366 const Function& function,
14367 const String& target_name,
14368 const Array& args,
14369 const Array& args_descriptor_array,
14370 bool respect_reflectable,
14371 const TypeArguments& instantiator_type_args) {
14372 // Note "args" is already the internal arguments with the receiver as the
14373 // first element.
14374 ArgumentsDescriptor args_descriptor(args_descriptor_array);
14375 if (function.IsNull() ||
14376 !function.AreValidArguments(args_descriptor, nullptr) ||
14377 (respect_reflectable && !function.is_reflectable())) {
14378 return DartEntry::InvokeNoSuchMethod(thread, receiver, target_name, args,
14379 args_descriptor_array);
14380 }
14381 ObjectPtr type_error = function.DoArgumentTypesMatch(args, args_descriptor,
14382 instantiator_type_args);
14383 if (type_error != Error::null()) {
14384 return type_error;
14385 }
14386 return DartEntry::InvokeFunction(function, args, args_descriptor_array);
14387}
14388
14390 bool throw_nsm_if_absent,
14391 bool respect_reflectable,
14392 bool check_is_entrypoint) const {
14394 Function& getter = Function::Handle();
14395 if (obj.IsField()) {
14396 const Field& field = Field::Cast(obj);
14397 if (check_is_entrypoint) {
14399 }
14400 if (!field.IsUninitialized()) {
14401 return field.StaticValue();
14402 }
14403 // An uninitialized field was found. Check for a getter in the field's
14404 // owner class.
14405 const Class& klass = Class::Handle(field.Owner());
14406 const String& internal_getter_name =
14407 String::Handle(Field::GetterName(getter_name));
14408 getter = klass.LookupStaticFunction(internal_getter_name);
14409 } else {
14410 // No field found. Check for a getter in the lib.
14411 const String& internal_getter_name =
14412 String::Handle(Field::GetterName(getter_name));
14413 obj = LookupLocalOrReExportObject(internal_getter_name);
14414 if (obj.IsFunction()) {
14415 getter = Function::Cast(obj).ptr();
14416 if (check_is_entrypoint) {
14418 }
14419 } else {
14420 obj = LookupLocalOrReExportObject(getter_name);
14421 // Normally static top-level methods cannot be closurized through the
14422 // native API even if they are marked as entry-points, with the one
14423 // exception of "main".
14424 if (obj.IsFunction() && check_is_entrypoint) {
14425 if (!getter_name.Equals(String::Handle(String::New("main"))) ||
14426 ptr() != IsolateGroup::Current()->object_store()->root_library()) {
14427 CHECK_ERROR(Function::Cast(obj).VerifyClosurizedEntryPoint());
14428 }
14429 }
14430 if (obj.IsFunction() && Function::Cast(obj).SafeToClosurize()) {
14431 // Looking for a getter but found a regular method: closurize it.
14432 const Function& closure_function =
14433 Function::Handle(Function::Cast(obj).ImplicitClosureFunction());
14434 return closure_function.ImplicitStaticClosure();
14435 }
14436 }
14437 }
14438
14439 if (getter.IsNull() || (respect_reflectable && !getter.is_reflectable())) {
14440 if (throw_nsm_if_absent) {
14441 return ThrowNoSuchMethod(Object::null_string(), getter_name,
14442 Object::null_array(), Object::null_array(),
14445 }
14446
14447 // Fall through case: Indicate that we didn't find any function or field
14448 // using a special null instance. This is different from a field being null.
14449 // Callers make sure that this null does not leak into Dartland.
14450 return Object::sentinel().ptr();
14451 }
14452
14453 // Invoke the getter and return the result.
14454 return DartEntry::InvokeFunction(getter, Object::empty_array());
14455}
14456
14458 const Instance& value,
14459 bool respect_reflectable,
14460 bool check_is_entrypoint) const {
14462 const String& internal_setter_name =
14463 String::Handle(Field::SetterName(setter_name));
14464 AbstractType& setter_type = AbstractType::Handle();
14465 AbstractType& argument_type = AbstractType::Handle(value.GetType(Heap::kOld));
14466 if (obj.IsField()) {
14467 const Field& field = Field::Cast(obj);
14468 if (check_is_entrypoint) {
14470 }
14471 setter_type = field.type();
14472 if (!argument_type.IsNullType() && !setter_type.IsDynamicType() &&
14473 !value.IsInstanceOf(setter_type, Object::null_type_arguments(),
14474 Object::null_type_arguments())) {
14475 return ThrowTypeError(field.token_pos(), value, setter_type, setter_name);
14476 }
14477 if (field.is_final() || (respect_reflectable && !field.is_reflectable())) {
14478 const int kNumArgs = 1;
14479 const Array& args = Array::Handle(Array::New(kNumArgs));
14480 args.SetAt(0, value);
14481
14482 return ThrowNoSuchMethod(Object::null_string(), internal_setter_name,
14483 args, Object::null_array(),
14486 }
14487 field.SetStaticValue(value);
14488 return value.ptr();
14489 }
14490
14491 Function& setter = Function::Handle();
14492 obj = LookupLocalOrReExportObject(internal_setter_name);
14493 if (obj.IsFunction()) {
14494 setter ^= obj.ptr();
14495 }
14496
14497 if (!setter.IsNull() && check_is_entrypoint) {
14499 }
14500
14501 const int kNumArgs = 1;
14502 const Array& args = Array::Handle(Array::New(kNumArgs));
14503 args.SetAt(0, value);
14504 if (setter.IsNull() || (respect_reflectable && !setter.is_reflectable())) {
14505 return ThrowNoSuchMethod(Object::null_string(), internal_setter_name, args,
14506 Object::null_array(), InvocationMirror::kTopLevel,
14508 }
14509
14510 setter_type = setter.ParameterTypeAt(0);
14511 if (!argument_type.IsNullType() && !setter_type.IsDynamicType() &&
14512 !value.IsInstanceOf(setter_type, Object::null_type_arguments(),
14513 Object::null_type_arguments())) {
14514 return ThrowTypeError(setter.token_pos(), value, setter_type, setter_name);
14515 }
14516
14517 return DartEntry::InvokeFunction(setter, args);
14518}
14519
14521 const Array& args,
14522 const Array& arg_names,
14523 bool respect_reflectable,
14524 bool check_is_entrypoint) const {
14525 Thread* thread = Thread::Current();
14526 Zone* zone = thread->zone();
14527
14528 // We don't pass any explicit type arguments, which will be understood as
14529 // using dynamic for any function type arguments by lower layers.
14530 const int kTypeArgsLen = 0;
14531 const Array& args_descriptor_array = Array::Handle(
14532 zone, ArgumentsDescriptor::NewBoxed(kTypeArgsLen, args.Length(),
14533 arg_names, Heap::kNew));
14534 ArgumentsDescriptor args_descriptor(args_descriptor_array);
14535
14536 auto& function = Function::Handle(zone);
14537 auto& result =
14539 if (result.IsFunction()) {
14540 function ^= result.ptr();
14541 }
14542
14543 if (!function.IsNull() && check_is_entrypoint) {
14544 CHECK_ERROR(function.VerifyCallEntryPoint());
14545 }
14546
14547 if (function.IsNull()) {
14548 // Didn't find a method: try to find a getter and invoke call on its result.
14549 const Object& getter_result = Object::Handle(
14550 zone, InvokeGetter(function_name, false, respect_reflectable,
14551 check_is_entrypoint));
14552 if (getter_result.ptr() != Object::sentinel().ptr()) {
14553 if (check_is_entrypoint) {
14555 }
14556 const auto& call_args_descriptor_array = Array::Handle(
14557 zone, ArgumentsDescriptor::NewBoxed(args_descriptor.TypeArgsLen(),
14558 args_descriptor.Count() + 1,
14559 arg_names, Heap::kNew));
14560 const auto& call_args = Array::Handle(
14561 zone,
14562 CreateCallableArgumentsFromStatic(zone, Instance::Cast(getter_result),
14563 args, arg_names, args_descriptor));
14564 return DartEntry::InvokeClosure(thread, call_args,
14565 call_args_descriptor_array);
14566 }
14567 }
14568
14569 if (function.IsNull() ||
14570 (respect_reflectable && !function.is_reflectable())) {
14571 return ThrowNoSuchMethod(Object::null_string(), function_name, args,
14572 arg_names, InvocationMirror::kTopLevel,
14574 }
14575 if (!function.AreValidArguments(args_descriptor, nullptr)) {
14576 return ThrowNoSuchMethod(
14577 String::Handle(function.UserVisibleSignature()), function_name, args,
14579 }
14580 // This is a static function, so we pass an empty instantiator tav.
14581 ASSERT(function.is_static());
14582 ObjectPtr type_error = function.DoArgumentTypesMatch(
14583 args, args_descriptor, Object::empty_type_arguments());
14584 if (type_error != Error::null()) {
14585 return type_error;
14586 }
14587 return DartEntry::InvokeFunction(function, args, args_descriptor_array);
14588}
14589
14591 bool is_kernel) {
14592 const int kNumNativeWrappersClasses = 4;
14593 COMPILE_ASSERT((kNumNativeWrappersClasses > 0) &&
14594 (kNumNativeWrappersClasses < 10));
14595 Thread* thread = Thread::Current();
14596 Zone* zone = thread->zone();
14597 const String& native_flds_lib_url = Symbols::DartNativeWrappers();
14598 const Library& native_flds_lib = Library::Handle(
14599 zone, Library::NewLibraryHelper(native_flds_lib_url, false));
14600 const String& native_flds_lib_name = Symbols::DartNativeWrappersLibName();
14601 native_flds_lib.SetName(native_flds_lib_name);
14602 native_flds_lib.SetLoadRequested();
14603 native_flds_lib.Register(thread);
14604 native_flds_lib.SetLoadInProgress();
14605 isolate_group->object_store()->set_native_wrappers_library(native_flds_lib);
14606 const char* const kNativeWrappersClass = "NativeFieldWrapperClass";
14607 const int kNameLength = 25;
14608 ASSERT(kNameLength == (strlen(kNativeWrappersClass) + 1 + 1));
14609 char name_buffer[kNameLength];
14610 String& cls_name = String::Handle(zone);
14611 for (int fld_cnt = 1; fld_cnt <= kNumNativeWrappersClasses; fld_cnt++) {
14612 Utils::SNPrint(name_buffer, kNameLength, "%s%d", kNativeWrappersClass,
14613 fld_cnt);
14614 cls_name = Symbols::New(thread, name_buffer);
14615 Class::NewNativeWrapper(native_flds_lib, cls_name, fld_cnt);
14616 }
14617 // NOTE: If we bootstrap from a Kernel IR file we want to generate the
14618 // synthetic constructors for the native wrapper classes. We leave this up to
14619 // the [KernelLoader] who will take care of it later.
14620 if (!is_kernel) {
14621 native_flds_lib.SetLoaded();
14622 }
14623}
14624
14625// LibraryLookupSet maps URIs to libraries.
14627 public:
14628 static const char* Name() { return "LibraryLookupTraits"; }
14629 static bool ReportStats() { return false; }
14630
14631 static bool IsMatch(const Object& a, const Object& b) {
14632 const String& a_str = String::Cast(a);
14633 const String& b_str = String::Cast(b);
14634
14635 ASSERT(a_str.HasHash() && b_str.HasHash());
14636 return a_str.Equals(b_str);
14637 }
14638
14639 static uword Hash(const Object& key) { return String::Cast(key).Hash(); }
14640
14641 static ObjectPtr NewKey(const String& str) { return str.ptr(); }
14642};
14644
14645// Returns library with given url in current isolate, or nullptr.
14646LibraryPtr Library::LookupLibrary(Thread* thread, const String& url) {
14647 Zone* zone = thread->zone();
14648 ObjectStore* object_store = thread->isolate_group()->object_store();
14649
14650 // Make sure the URL string has an associated hash code
14651 // to speed up the repeated equality checks.
14652 url.Hash();
14653
14654 // Use the libraries map to lookup the library by URL.
14655 Library& lib = Library::Handle(zone);
14656 if (object_store->libraries_map() == Array::null()) {
14657 return Library::null();
14658 } else {
14659 LibraryLookupMap map(object_store->libraries_map());
14660 lib ^= map.GetOrNull(url);
14661 ASSERT(map.Release().ptr() == object_store->libraries_map());
14662 }
14663 return lib.ptr();
14664}
14665
14667 if (ShouldBePrivate(name)) return true;
14668 // Factory names: List._fromLiteral.
14669 for (intptr_t i = 1; i < name.Length() - 1; i++) {
14670 if (name.CharAt(i) == '.') {
14671 if (name.CharAt(i + 1) == '_') {
14672 return true;
14673 }
14674 }
14675 }
14676 return false;
14677}
14678
14679// Create a private key for this library. It is based on the hash of the
14680// library URI and the sequence number of the library to guarantee unique
14681// private keys without having to verify.
14682void Library::AllocatePrivateKey() const {
14683 Thread* thread = Thread::Current();
14684 Zone* zone = thread->zone();
14685 auto isolate_group = thread->isolate_group();
14686
14687#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
14688 if (isolate_group->IsReloading()) {
14689 // When reloading, we need to make sure we use the original private key
14690 // if this library previously existed.
14691 ProgramReloadContext* program_reload_context =
14692 isolate_group->program_reload_context();
14693 const String& original_key =
14694 String::Handle(program_reload_context->FindLibraryPrivateKey(*this));
14695 if (!original_key.IsNull()) {
14696 untag()->set_private_key(original_key.ptr());
14697 return;
14698 }
14699 }
14700#endif // !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
14701
14702 // Format of the private key is: "@<sequence number><6 digits of hash>
14703 const intptr_t hash_mask = 0x7FFFF;
14704
14705 const String& url = String::Handle(zone, this->url());
14706 intptr_t hash_value = url.Hash() & hash_mask;
14707
14708 const GrowableObjectArray& libs = GrowableObjectArray::Handle(
14709 zone, isolate_group->object_store()->libraries());
14710 intptr_t sequence_value = libs.Length();
14711
14712 char private_key[32];
14713 Utils::SNPrint(private_key, sizeof(private_key), "%c%" Pd "%06" Pd "",
14714 kPrivateKeySeparator, sequence_value, hash_value);
14715 const String& key =
14717 key.Hash(); // This string may end up in the VM isolate.
14718 untag()->set_private_key(key.ptr());
14719}
14720
14722 const Library& core_lib = Library::Handle(Library::CoreLibrary());
14723 const String& private_name = String::ZoneHandle(core_lib.PrivateName(member));
14724 return private_name;
14725}
14726
14728 Zone* zone = Thread::Current()->zone();
14729 const auto& core_lib = Library::Handle(zone, Library::CoreLibrary());
14730 const auto& private_key = String::Handle(zone, core_lib.private_key());
14731
14732 ASSERT(core_lib.IsPrivate(member));
14733 return name.EqualsConcat(member, private_key);
14734}
14735
14737 Thread* thread = Thread::Current();
14738 Zone* zone = thread->zone();
14739 const Library& core_lib = Library::Handle(zone, Library::CoreLibrary());
14740 String& name = String::Handle(zone, class_name.ptr());
14741 if (class_name.CharAt(0) == kPrivateIdentifierStart) {
14742 // Private identifiers are mangled on a per library basis.
14743 name = Symbols::FromConcat(thread, name,
14744 String::Handle(zone, core_lib.private_key()));
14745 }
14746 return core_lib.LookupClass(name);
14747}
14748
14749// Cannot handle qualified names properly as it only appends private key to
14750// the end (e.g. _Alfa.foo -> _Alfa.foo@...).
14751StringPtr Library::PrivateName(const String& name) const {
14752 Thread* thread = Thread::Current();
14753 Zone* zone = thread->zone();
14755 // ASSERT(strchr(name, '@') == nullptr);
14756 String& str = String::Handle(zone);
14757 str = name.ptr();
14758 str = Symbols::FromConcat(thread, str,
14759 String::Handle(zone, this->private_key()));
14760 return str.ptr();
14761}
14762
14763LibraryPtr Library::GetLibrary(intptr_t index) {
14764 Thread* thread = Thread::Current();
14765 Zone* zone = thread->zone();
14766 auto isolate_group = thread->isolate_group();
14768 zone, isolate_group->object_store()->libraries());
14769 ASSERT(!libs.IsNull());
14770 if ((0 <= index) && (index < libs.Length())) {
14771 Library& lib = Library::Handle(zone);
14772 lib ^= libs.At(index);
14773 return lib.ptr();
14774 }
14775 return Library::null();
14776}
14777
14778void Library::Register(Thread* thread) const {
14779 Zone* zone = thread->zone();
14780 auto isolate_group = thread->isolate_group();
14781 ObjectStore* object_store = isolate_group->object_store();
14782
14783 // A library is "registered" in two places:
14784 // - A growable array mapping from index to library.
14785 const String& lib_url = String::Handle(zone, url());
14786 ASSERT(Library::LookupLibrary(thread, lib_url) == Library::null());
14787 ASSERT(lib_url.HasHash());
14788 GrowableObjectArray& libs =
14789 GrowableObjectArray::Handle(zone, object_store->libraries());
14790 ASSERT(!libs.IsNull());
14791 set_index(libs.Length());
14792 libs.Add(*this);
14793
14794 // - A map from URL string to library.
14795 if (object_store->libraries_map() == Array::null()) {
14796 LibraryLookupMap map(HashTables::New<LibraryLookupMap>(16, Heap::kOld));
14797 object_store->set_libraries_map(map.Release());
14798 }
14799
14800 LibraryLookupMap map(object_store->libraries_map());
14801 bool present = map.UpdateOrInsert(lib_url, *this);
14802 ASSERT(!present);
14803 object_store->set_libraries_map(map.Release());
14804}
14805
14807 const GrowableObjectArray& libs) {
14808 Zone* zone = thread->zone();
14809 auto isolate_group = thread->isolate_group();
14810 Library& lib = Library::Handle(zone);
14811 String& lib_url = String::Handle(zone);
14812
14813 LibraryLookupMap map(HashTables::New<LibraryLookupMap>(16, Heap::kOld));
14814
14815 intptr_t len = libs.Length();
14816 for (intptr_t i = 0; i < len; i++) {
14817 lib ^= libs.At(i);
14818 lib_url = lib.url();
14819 map.InsertNewOrGetValue(lib_url, lib);
14820 }
14821 // Now remember these in the isolate's object store.
14822 isolate_group->object_store()->set_libraries(libs);
14823 isolate_group->object_store()->set_libraries_map(map.Release());
14824}
14825
14827 return IsolateGroup::Current()->object_store()->async_library();
14828}
14829
14831 return IsolateGroup::Current()->object_store()->convert_library();
14832}
14833
14835 return IsolateGroup::Current()->object_store()->core_library();
14836}
14837
14839 return IsolateGroup::Current()->object_store()->collection_library();
14840}
14841
14843 return IsolateGroup::Current()->object_store()->developer_library();
14844}
14845
14847 return IsolateGroup::Current()->object_store()->ffi_library();
14848}
14849
14851 return IsolateGroup::Current()->object_store()->_internal_library();
14852}
14853
14855 return IsolateGroup::Current()->object_store()->isolate_library();
14856}
14857
14859 return IsolateGroup::Current()->object_store()->math_library();
14860}
14861
14862#if !defined(DART_PRECOMPILED_RUNTIME)
14864 return IsolateGroup::Current()->object_store()->mirrors_library();
14865}
14866#endif
14867
14869 return IsolateGroup::Current()->object_store()->native_wrappers_library();
14870}
14871
14873 return IsolateGroup::Current()->object_store()->typed_data_library();
14874}
14875
14877 return IsolateGroup::Current()->object_store()->_vmservice_library();
14878}
14879
14880const char* Library::ToCString() const {
14881 NoSafepointScope no_safepoint;
14882 const String& name = String::Handle(url());
14883 return OS::SCreate(Thread::Current()->zone(), "Library:'%s'",
14884 name.ToCString());
14885}
14886
14887LibraryPtr LibraryPrefix::GetLibrary(int index) const {
14888 if ((index >= 0) || (index < num_imports())) {
14889 const Array& imports = Array::Handle(this->imports());
14890 Namespace& import = Namespace::Handle();
14891 import ^= imports.At(index);
14892 return import.target();
14893 }
14894 return Library::null();
14895}
14896
14897void LibraryPrefix::AddImport(const Namespace& import) const {
14898 intptr_t num_current_imports = num_imports();
14899
14900 // Prefixes with deferred libraries can only contain one library.
14901 ASSERT((num_current_imports == 0) || !is_deferred_load());
14902
14903 // The library needs to be added to the list.
14904 Array& imports = Array::Handle(this->imports());
14905 const intptr_t length = (imports.IsNull()) ? 0 : imports.Length();
14906 // Grow the list if it is full.
14907 if (num_current_imports >= length) {
14908 const intptr_t new_length = length + kIncrementSize + (length >> 2);
14909 imports = Array::Grow(imports, new_length, Heap::kOld);
14910 set_imports(imports);
14911 }
14912 imports.SetAt(num_current_imports, import);
14913 set_num_imports(num_current_imports + 1);
14914}
14915
14916LibraryPrefixPtr LibraryPrefix::New() {
14917 return Object::Allocate<LibraryPrefix>(Heap::kOld);
14918}
14919
14920LibraryPrefixPtr LibraryPrefix::New(const String& name,
14921 const Namespace& import,
14922 bool deferred_load,
14923 const Library& importer) {
14924 const LibraryPrefix& result = LibraryPrefix::Handle(LibraryPrefix::New());
14925 result.set_name(name);
14926 result.set_num_imports(0);
14927 result.set_importer(importer);
14928 result.StoreNonPointer(&result.untag()->is_deferred_load_, deferred_load);
14929 result.set_imports(Array::Handle(Array::New(kInitialSize)));
14930 result.AddImport(import);
14931 return result.ptr();
14932}
14933
14934void LibraryPrefix::set_name(const String& value) const {
14935 ASSERT(value.IsSymbol());
14936 untag()->set_name(value.ptr());
14937}
14938
14939void LibraryPrefix::set_imports(const Array& value) const {
14940 untag()->set_imports(value.ptr());
14941}
14942
14943void LibraryPrefix::set_num_imports(intptr_t value) const {
14944 if (!Utils::IsUint(16, value)) {
14946 }
14947 StoreNonPointer(&untag()->num_imports_, value);
14948}
14949
14950void LibraryPrefix::set_importer(const Library& value) const {
14951 untag()->set_importer(value.ptr());
14952}
14953
14954const char* LibraryPrefix::ToCString() const {
14955 const String& prefix = String::Handle(name());
14956 return prefix.ToCString();
14957}
14958
14959const char* Namespace::ToCString() const {
14960 const Library& lib = Library::Handle(target());
14961 return OS::SCreate(Thread::Current()->zone(), "Namespace for library '%s'",
14962 lib.ToCString());
14963}
14964
14966 // Quick check for common case with no combinators.
14967 if (hide_names() == show_names()) {
14969 return false;
14970 }
14971 const String* plain_name = &name;
14974 } else if (Field::IsSetterName(name)) {
14976 }
14977 // Check whether the name is in the list of explicitly hidden names.
14978 if (hide_names() != Array::null()) {
14979 const Array& names = Array::Handle(hide_names());
14980 String& hidden = String::Handle();
14981 intptr_t num_names = names.Length();
14982 for (intptr_t i = 0; i < num_names; i++) {
14983 hidden ^= names.At(i);
14984 if (plain_name->Equals(hidden)) {
14985 return true;
14986 }
14987 }
14988 }
14989 // The name is not explicitly hidden. Now check whether it is in the
14990 // list of explicitly visible names, if there is one.
14991 if (show_names() != Array::null()) {
14992 const Array& names = Array::Handle(show_names());
14993 String& shown = String::Handle();
14994 intptr_t num_names = names.Length();
14995 for (intptr_t i = 0; i < num_names; i++) {
14996 shown ^= names.At(i);
14997 if (plain_name->Equals(shown)) {
14998 return false;
14999 }
15000 }
15001 // There is a list of visible names. The name we're looking for is not
15002 // contained in the list, so it is hidden.
15003 return true;
15004 }
15005 // The name is not filtered out.
15006 return false;
15007}
15008
15009// Look up object with given name in library and filter out hidden
15010// names. Also look up getters and setters.
15012 ZoneGrowableArray<intptr_t>* trail) const {
15013 Zone* zone = Thread::Current()->zone();
15014 const Library& lib = Library::Handle(zone, target());
15015
15016 if (trail != nullptr) {
15017 // Look for cycle in reexport graph.
15018 for (int i = 0; i < trail->length(); i++) {
15019 if (trail->At(i) == lib.index()) {
15020 for (int j = i + 1; j < trail->length(); j++) {
15021 (*trail)[j] = -1;
15022 }
15023 return Object::null();
15024 }
15025 }
15026 }
15027
15029
15030 intptr_t ignore = 0;
15031 // Lookup the name in the library's symbols.
15032 Object& obj = Object::Handle(zone, lib.LookupEntry(name, &ignore));
15034 (obj.IsNull() || obj.IsLibraryPrefix())) {
15035 String& accessor_name = String::Handle(zone);
15036 accessor_name = Field::LookupGetterSymbol(name);
15037 if (!accessor_name.IsNull()) {
15038 obj = lib.LookupEntry(accessor_name, &ignore);
15039 }
15040 if (obj.IsNull()) {
15041 accessor_name = Field::LookupSetterSymbol(name);
15042 if (!accessor_name.IsNull()) {
15043 obj = lib.LookupEntry(accessor_name, &ignore);
15044 }
15045 }
15046 }
15047
15048 // Library prefixes are not exported.
15049 if (obj.IsNull() || obj.IsLibraryPrefix()) {
15050 // Lookup in the re-exported symbols.
15051 obj = lib.LookupReExport(name, trail);
15052 if (obj.IsNull() && !Field::IsSetterName(name)) {
15053 // LookupReExport() only returns objects that match the given name.
15054 // If there is no field/func/getter, try finding a setter.
15055 const String& setter_name =
15057 if (!setter_name.IsNull()) {
15058 obj = lib.LookupReExport(setter_name, trail);
15059 }
15060 }
15061 }
15062 if (obj.IsNull() || HidesName(name) || obj.IsLibraryPrefix()) {
15063 return Object::null();
15064 }
15065 return obj.ptr();
15066}
15067
15068NamespacePtr Namespace::New() {
15070 return Object::Allocate<Namespace>(Heap::kOld);
15071}
15072
15073NamespacePtr Namespace::New(const Library& target,
15074 const Array& show_names,
15075 const Array& hide_names,
15076 const Library& owner) {
15077 ASSERT(show_names.IsNull() || (show_names.Length() > 0));
15078 ASSERT(hide_names.IsNull() || (hide_names.Length() > 0));
15079 const Namespace& result = Namespace::Handle(Namespace::New());
15080 result.untag()->set_target(target.ptr());
15081 result.untag()->set_show_names(show_names.ptr());
15082 result.untag()->set_hide_names(hide_names.ptr());
15083 result.untag()->set_owner(owner.ptr());
15084 return result.ptr();
15085}
15086
15087KernelProgramInfoPtr KernelProgramInfo::New() {
15088 return Object::Allocate<KernelProgramInfo>(Heap::kOld);
15089}
15090
15091KernelProgramInfoPtr KernelProgramInfo::New(
15092 const TypedDataBase& kernel_component,
15093 const TypedDataView& string_data,
15094 const TypedDataView& metadata_payloads,
15095 const TypedDataView& metadata_mappings,
15096 const TypedDataView& constants_table,
15097 const TypedData& string_offsets,
15098 const TypedData& canonical_names,
15099 const Array& scripts,
15100 const Array& libraries_cache,
15101 const Array& classes_cache) {
15102 ASSERT(kernel_component.IsExternalOrExternalView());
15103 ASSERT(string_data.IsExternalOrExternalView());
15104 ASSERT(metadata_payloads.IsExternalOrExternalView());
15105 ASSERT(metadata_mappings.IsExternalOrExternalView());
15106 ASSERT(constants_table.IsExternalOrExternalView());
15107
15108 const auto& info = KernelProgramInfo::Handle(KernelProgramInfo::New());
15109 info.untag()->set_kernel_component(kernel_component.ptr());
15110 info.untag()->set_string_offsets(string_offsets.ptr());
15111 info.untag()->set_string_data(string_data.ptr());
15112 info.untag()->set_canonical_names(canonical_names.ptr());
15113 info.untag()->set_metadata_payloads(metadata_payloads.ptr());
15114 info.untag()->set_metadata_mappings(metadata_mappings.ptr());
15115 info.untag()->set_scripts(scripts.ptr());
15116 info.untag()->set_constants_table(constants_table.ptr());
15117 info.untag()->set_libraries_cache(libraries_cache.ptr());
15118 info.untag()->set_classes_cache(classes_cache.ptr());
15119 return info.ptr();
15120}
15121
15122const char* KernelProgramInfo::ToCString() const {
15123 return "[KernelProgramInfo]";
15124}
15125
15126ScriptPtr KernelProgramInfo::ScriptAt(intptr_t index) const {
15127 const Array& all_scripts = Array::Handle(scripts());
15128 ObjectPtr script = all_scripts.At(index);
15129 return Script::RawCast(script);
15130}
15131
15133 untag()->set_scripts(scripts.ptr());
15134}
15135
15136void KernelProgramInfo::set_constants(const Array& constants) const {
15137 untag()->set_constants(constants.ptr());
15138}
15139
15141 intptr_t library_index) const {
15142 const auto& blob = TypedDataBase::Handle(kernel_component());
15143 const intptr_t library_count =
15144 Utils::BigEndianToHost32(LoadUnaligned(reinterpret_cast<uint32_t*>(
15145 blob.DataAddr(blob.LengthInBytes() - 2 * 4))));
15146 const intptr_t library_start =
15147 Utils::BigEndianToHost32(LoadUnaligned(reinterpret_cast<uint32_t*>(
15148 blob.DataAddr(blob.LengthInBytes() -
15149 (2 + 1 + (library_count - library_index)) * 4))));
15150 return library_start;
15151}
15152
15154 intptr_t library_index) const {
15155 const intptr_t start_offset = KernelLibraryStartOffset(library_index);
15156 const intptr_t end_offset = KernelLibraryEndOffset(library_index);
15157 const auto& component = TypedDataBase::Handle(kernel_component());
15158 return component.ViewFromTo(start_offset, end_offset);
15159}
15160
15162 intptr_t library_index) const {
15163 const auto& blob = TypedDataBase::Handle(kernel_component());
15164 const intptr_t library_count =
15165 Utils::BigEndianToHost32(LoadUnaligned(reinterpret_cast<uint32_t*>(
15166 blob.DataAddr(blob.LengthInBytes() - 2 * 4))));
15167 const intptr_t library_end = Utils::BigEndianToHost32(
15168 LoadUnaligned(reinterpret_cast<uint32_t*>(blob.DataAddr(
15169 blob.LengthInBytes() - (2 + (library_count - library_index)) * 4))));
15170 return library_end;
15171}
15172
15174 untag()->set_constants_table(value.ptr());
15175}
15176
15178 untag()->set_libraries_cache(cache.ptr());
15179}
15180
15182 const Smi& name_index) const {
15187 Array& data = thread->ArrayHandle();
15188 Library& result = thread->LibraryHandle();
15189 Object& key = thread->ObjectHandle();
15190 Smi& value = thread->SmiHandle();
15191 {
15195 ASSERT(!data.IsNull());
15197 result ^= table.GetOrNull(name_index);
15198 table.Release();
15199 }
15200 return result.ptr();
15201}
15202
15204 const Smi& name_index,
15205 const Library& lib) const {
15210 Array& data = thread->ArrayHandle();
15211 Library& result = thread->LibraryHandle();
15212 Object& key = thread->ObjectHandle();
15213 Smi& value = thread->SmiHandle();
15214 {
15218 ASSERT(!data.IsNull());
15220 result ^= table.InsertOrGetValue(name_index, lib);
15221 set_libraries_cache(table.Release());
15222 }
15223 return result.ptr();
15224}
15225
15227 untag()->set_classes_cache(cache.ptr());
15228}
15229
15231 const Smi& name_index) const {
15236 Array& data = thread->ArrayHandle();
15237 Class& result = thread->ClassHandle();
15238 Object& key = thread->ObjectHandle();
15239 Smi& value = thread->SmiHandle();
15240 {
15243 data = classes_cache();
15244 ASSERT(!data.IsNull());
15246 result ^= table.GetOrNull(name_index);
15247 table.Release();
15248 }
15249 return result.ptr();
15250}
15251
15253 const Smi& name_index,
15254 const Class& klass) const {
15259 Array& data = thread->ArrayHandle();
15260 Class& result = thread->ClassHandle();
15261 Object& key = thread->ObjectHandle();
15262 Smi& value = thread->SmiHandle();
15263 {
15266 data = classes_cache();
15267 ASSERT(!data.IsNull());
15269 result ^= table.InsertOrGetValue(name_index, klass);
15270 set_classes_cache(table.Release());
15271 }
15272 return result.ptr();
15273}
15274
15275ErrorPtr Library::CompileAll(bool ignore_error /* = false */) {
15276 Thread* thread = Thread::Current();
15277 Zone* zone = thread->zone();
15278 Error& error = Error::Handle(zone);
15280 IsolateGroup::Current()->object_store()->libraries());
15281 Library& lib = Library::Handle(zone);
15282 Class& cls = Class::Handle(zone);
15283 for (int i = 0; i < libs.Length(); i++) {
15284 lib ^= libs.At(i);
15286 while (it.HasNext()) {
15287 cls = it.GetNextClass();
15288 error = cls.EnsureIsFinalized(thread);
15289 if (!error.IsNull()) {
15290 if (ignore_error) continue;
15291 return error.ptr();
15292 }
15294 if (!error.IsNull()) {
15295 if (ignore_error) continue;
15296 return error.ptr();
15297 }
15298 }
15299 }
15300
15301 Object& result = Object::Handle(zone);
15303 if (!func.HasCode()) {
15304 result = Compiler::CompileFunction(thread, func);
15305 if (result.IsError()) {
15306 error = Error::Cast(result).ptr();
15307 return false; // Stop iteration.
15308 }
15309 }
15310 return true; // Continue iteration.
15311 });
15312 return error.ptr();
15313}
15314
15315#if !defined(DART_PRECOMPILED_RUNTIME)
15316
15317ErrorPtr Library::FinalizeAllClasses() {
15318 Thread* thread = Thread::Current();
15319 ASSERT(thread->IsDartMutatorThread());
15320 Zone* zone = thread->zone();
15321 Error& error = Error::Handle(zone);
15322 const GrowableObjectArray& libs = GrowableObjectArray::Handle(
15323 IsolateGroup::Current()->object_store()->libraries());
15324 Library& lib = Library::Handle(zone);
15325 Class& cls = Class::Handle(zone);
15326 for (int i = 0; i < libs.Length(); i++) {
15327 lib ^= libs.At(i);
15328 if (!lib.Loaded()) {
15329 String& uri = String::Handle(zone, lib.url());
15330 String& msg = String::Handle(
15331 zone,
15332 String::NewFormatted("Library '%s' is not loaded. "
15333 "Did you forget to call Dart_FinalizeLoading?",
15334 uri.ToCString()));
15335 return ApiError::New(msg);
15336 }
15337 ClassDictionaryIterator it(lib, ClassDictionaryIterator::kIteratePrivate);
15338 while (it.HasNext()) {
15339 cls = it.GetNextClass();
15340 error = cls.EnsureIsFinalized(thread);
15341 if (!error.IsNull()) {
15342 return error.ptr();
15343 }
15344 }
15345 }
15346 return Error::null();
15347}
15348
15349#endif // !defined(DART_PRECOMPILED_RUNTIME)
15350
15351// Return Function::null() if function does not exist in libs.
15352FunctionPtr Library::GetFunction(const GrowableArray<Library*>& libs,
15353 const char* class_name,
15354 const char* function_name) {
15355 Thread* thread = Thread::Current();
15356 Zone* zone = thread->zone();
15357 Function& func = Function::Handle(zone);
15358 String& class_str = String::Handle(zone);
15359 String& func_str = String::Handle(zone);
15360 Class& cls = Class::Handle(zone);
15361 for (intptr_t l = 0; l < libs.length(); l++) {
15362 const Library& lib = *libs[l];
15363 if (strcmp(class_name, "::") == 0) {
15364 cls = lib.toplevel_class();
15365 } else {
15366 class_str = String::New(class_name);
15367 cls = lib.LookupClassAllowPrivate(class_str);
15368 }
15369 if (!cls.IsNull()) {
15370 if (cls.EnsureIsFinalized(thread) == Error::null()) {
15371 func_str = String::New(function_name);
15372 if (function_name[0] == '.') {
15373 func_str = String::Concat(class_str, func_str);
15374 }
15375 func = cls.LookupFunctionAllowPrivate(func_str);
15376 }
15377 }
15378 if (!func.IsNull()) {
15379 return func.ptr();
15380 }
15381 }
15382 return Function::null();
15383}
15384
15385ObjectPtr Library::GetFunctionClosure(const String& name) const {
15386 Thread* thread = Thread::Current();
15387 Zone* zone = thread->zone();
15388 Function& func = Function::Handle(zone, LookupFunctionAllowPrivate(name));
15389 if (func.IsNull()) {
15390 // Check whether the function is reexported into the library.
15391 const Object& obj = Object::Handle(zone, LookupReExport(name));
15392 if (obj.IsFunction()) {
15393 func ^= obj.ptr();
15394 } else {
15395 // Check if there is a getter of 'name', in which case invoke it
15396 // and return the result.
15397 const String& getter_name = String::Handle(zone, Field::GetterName(name));
15398 func = LookupFunctionAllowPrivate(getter_name);
15399 if (func.IsNull()) {
15400 return Closure::null();
15401 }
15402 // Invoke the getter and return the result.
15403 return DartEntry::InvokeFunction(func, Object::empty_array());
15404 }
15405 }
15406 func = func.ImplicitClosureFunction();
15407 return func.ImplicitStaticClosure();
15408}
15409
15410#if defined(DEBUG) && !defined(DART_PRECOMPILED_RUNTIME)
15411void Library::CheckFunctionFingerprints() {
15412 GrowableArray<Library*> all_libs;
15413 Function& func = Function::Handle();
15414 bool fingerprints_match = true;
15415
15416#define CHECK_FINGERPRINTS_INNER(class_name, function_name, dest, fp, kind) \
15417 func = GetFunction(all_libs, #class_name, #function_name); \
15418 if (func.IsNull()) { \
15419 fingerprints_match = false; \
15420 OS::PrintErr("Function not found %s.%s\n", #class_name, #function_name); \
15421 } else { \
15422 fingerprints_match = \
15423 func.CheckSourceFingerprint(fp, kind) && fingerprints_match; \
15424 }
15425
15426#define CHECK_FINGERPRINTS(class_name, function_name, dest, fp) \
15427 CHECK_FINGERPRINTS_INNER(class_name, function_name, dest, fp, nullptr)
15428#define CHECK_FINGERPRINTS_ASM_INTRINSIC(class_name, function_name, dest, fp) \
15429 CHECK_FINGERPRINTS_INNER(class_name, function_name, dest, fp, "asm-intrinsic")
15430#define CHECK_FINGERPRINTS_GRAPH_INTRINSIC(class_name, function_name, dest, \
15431 fp) \
15432 CHECK_FINGERPRINTS_INNER(class_name, function_name, dest, fp, \
15433 "graph-intrinsic")
15434#define CHECK_FINGERPRINTS_OTHER(class_name, function_name, dest, fp) \
15435 CHECK_FINGERPRINTS_INNER(class_name, function_name, dest, fp, "other")
15436
15437 all_libs.Add(&Library::ZoneHandle(Library::CoreLibrary()));
15438 CORE_LIB_INTRINSIC_LIST(CHECK_FINGERPRINTS_ASM_INTRINSIC);
15439 CORE_INTEGER_LIB_INTRINSIC_LIST(CHECK_FINGERPRINTS_ASM_INTRINSIC);
15440 GRAPH_CORE_INTRINSICS_LIST(CHECK_FINGERPRINTS_GRAPH_INTRINSIC);
15441
15442 all_libs.Add(&Library::ZoneHandle(Library::AsyncLibrary()));
15443 all_libs.Add(&Library::ZoneHandle(Library::MathLibrary()));
15444 all_libs.Add(&Library::ZoneHandle(Library::TypedDataLibrary()));
15445 all_libs.Add(&Library::ZoneHandle(Library::CollectionLibrary()));
15446 all_libs.Add(&Library::ZoneHandle(Library::ConvertLibrary()));
15447 all_libs.Add(&Library::ZoneHandle(Library::InternalLibrary()));
15448 all_libs.Add(&Library::ZoneHandle(Library::IsolateLibrary()));
15449 all_libs.Add(&Library::ZoneHandle(Library::FfiLibrary()));
15450 all_libs.Add(&Library::ZoneHandle(Library::NativeWrappersLibrary()));
15451 all_libs.Add(&Library::ZoneHandle(Library::DeveloperLibrary()));
15452 INTERNAL_LIB_INTRINSIC_LIST(CHECK_FINGERPRINTS_ASM_INTRINSIC);
15453 OTHER_RECOGNIZED_LIST(CHECK_FINGERPRINTS_OTHER);
15454 POLYMORPHIC_TARGET_LIST(CHECK_FINGERPRINTS);
15455 GRAPH_TYPED_DATA_INTRINSICS_LIST(CHECK_FINGERPRINTS_GRAPH_INTRINSIC);
15456
15457 all_libs.Clear();
15458 all_libs.Add(&Library::ZoneHandle(Library::DeveloperLibrary()));
15459 DEVELOPER_LIB_INTRINSIC_LIST(CHECK_FINGERPRINTS_ASM_INTRINSIC);
15460
15461#undef CHECK_FINGERPRINTS_INNER
15462#undef CHECK_FINGERPRINTS
15463#undef CHECK_FINGERPRINTS_ASM_INTRINSIC
15464#undef CHECK_FINGERPRINTS_GRAPH_INTRINSIC
15465#undef CHECK_FINGERPRINTS_OTHER
15466
15467#define CHECK_FACTORY_FINGERPRINTS(symbol, class_name, factory_name, cid, fp) \
15468 func = GetFunction(all_libs, #class_name, #factory_name); \
15469 if (func.IsNull()) { \
15470 fingerprints_match = false; \
15471 OS::PrintErr("Function not found %s.%s\n", #class_name, #factory_name); \
15472 } else { \
15473 fingerprints_match = \
15474 func.CheckSourceFingerprint(fp) && fingerprints_match; \
15475 }
15476
15477 all_libs.Clear();
15478 all_libs.Add(&Library::ZoneHandle(Library::CoreLibrary()));
15479 all_libs.Add(&Library::ZoneHandle(Library::TypedDataLibrary()));
15480 RECOGNIZED_LIST_FACTORY_LIST(CHECK_FACTORY_FINGERPRINTS);
15481
15482#undef CHECK_FACTORY_FINGERPRINTS
15483
15484 if (!fingerprints_match) {
15485 // Private names are mangled. Mangling depends on Library::private_key_.
15486 // If registering a new bootstrap library, add at the end.
15487 FATAL(
15488 "FP mismatch while recognizing methods. If the behavior of "
15489 "these functions has changed, then changes are also needed in "
15490 "the VM's compiler. Otherwise the fingerprint can simply be "
15491 "updated in recognized_methods_list.h\n");
15492 }
15493}
15494#endif // defined(DEBUG) && !defined(DART_PRECOMPILED_RUNTIME).
15495
15496InstructionsPtr Instructions::New(intptr_t size,
15497 bool has_monomorphic_entry,
15498 bool should_be_aligned) {
15499 ASSERT(size >= 0);
15500 ASSERT(Object::instructions_class() != Class::null());
15501 if (size < 0 || size > kMaxElements) {
15502 // This should be caught before we reach here.
15503 FATAL("Fatal error in Instructions::New: invalid size %" Pd "\n", size);
15504 }
15505 Instructions& result = Instructions::Handle();
15506 {
15507 auto raw = Object::Allocate<Instructions>(Heap::kCode, size);
15508 NoSafepointScope no_safepoint;
15509 result = raw;
15510 result.SetSize(size);
15511 // Set this within the NoSafepointScope as well since it is contained in
15512 // the same bitfield as the size.
15513 result.SetHasMonomorphicEntry(has_monomorphic_entry);
15514 result.SetShouldBeAligned(should_be_aligned);
15515 }
15516 ASSERT(result.stats() == nullptr);
15517 return result.ptr();
15518}
15519
15520const char* Instructions::ToCString() const {
15521 return "Instructions";
15522}
15523
15524CodeStatistics* Instructions::stats() const {
15525#if defined(DART_PRECOMPILER)
15526 return reinterpret_cast<CodeStatistics*>(
15527 Thread::Current()->heap()->GetPeer(ptr()));
15528#else
15529 return nullptr;
15530#endif
15531}
15532
15533void Instructions::set_stats(CodeStatistics* stats) const {
15534#if defined(DART_PRECOMPILER)
15535 Thread::Current()->heap()->SetPeer(ptr(), stats);
15536#endif
15537}
15538
15539const char* InstructionsSection::ToCString() const {
15540 return "InstructionsSection";
15541}
15542
15543void InstructionsTable::set_length(intptr_t value) const {
15544 StoreNonPointer(&untag()->length_, value);
15545}
15546
15547void InstructionsTable::set_start_pc(uword value) const {
15548 StoreNonPointer(&untag()->start_pc_, value);
15549}
15550
15551void InstructionsTable::set_end_pc(uword value) const {
15552 StoreNonPointer(&untag()->end_pc_, value);
15553}
15554
15555void InstructionsTable::set_code_objects(const Array& value) const {
15556 untag()->set_code_objects(value.ptr());
15557}
15558
15559void InstructionsTable::set_rodata(uword value) const {
15560 StoreNonPointer(
15561 &untag()->rodata_,
15562 reinterpret_cast<const UntaggedInstructionsTable::Data*>(value));
15563}
15564
15565InstructionsTablePtr InstructionsTable::New(intptr_t length,
15566 uword start_pc,
15567 uword end_pc,
15568 uword rodata) {
15569 ASSERT(Object::instructions_table_class() != Class::null());
15570 ASSERT(length >= 0);
15571 ASSERT(start_pc <= end_pc);
15572 auto* const zone = Thread::Current()->zone();
15573 const Array& code_objects =
15574 (length == 0) ? Object::empty_array()
15575 : Array::Handle(zone, Array::New(length, Heap::kOld));
15576 const auto& result = InstructionsTable::Handle(
15577 zone, Object::Allocate<InstructionsTable>(Heap::kOld));
15578 result.set_code_objects(code_objects);
15579 result.set_length(length);
15580 result.set_start_pc(start_pc);
15581 result.set_end_pc(end_pc);
15582 result.set_rodata(rodata);
15583 return result.ptr();
15584}
15585
15586void InstructionsTable::SetCodeAt(intptr_t index, CodePtr code) const {
15587 ASSERT((0 <= index) &&
15588 (index < Smi::Value(code_objects()->untag()->length())));
15589 code_objects()->untag()->set_element(index, code);
15590}
15591
15592bool InstructionsTable::ContainsPc(InstructionsTablePtr table, uword pc) {
15593 return (InstructionsTable::start_pc(table) <= pc) &&
15594 (pc < InstructionsTable::end_pc(table));
15595}
15596
15597uint32_t InstructionsTable::ConvertPcToOffset(InstructionsTablePtr table,
15598 uword pc) {
15599 ASSERT(InstructionsTable::ContainsPc(table, pc));
15600 const uint32_t pc_offset =
15601 static_cast<uint32_t>(pc - InstructionsTable::start_pc(table));
15602 ASSERT(InstructionsTable::start_pc(table) + pc_offset == pc); // No overflow.
15603 return pc_offset;
15604}
15605
15606intptr_t InstructionsTable::FindEntry(InstructionsTablePtr table,
15607 uword pc,
15608 intptr_t start_index /* = 0 */) {
15609 // This can run in the middle of GC and must not allocate handles.
15610 NoSafepointScope no_safepoint;
15611 if (!InstructionsTable::ContainsPc(table, pc)) return -1;
15612 const uint32_t pc_offset = InstructionsTable::ConvertPcToOffset(table, pc);
15613
15614 const auto rodata = table.untag()->rodata_;
15615 const auto entries = rodata->entries();
15616 intptr_t lo = start_index;
15617 intptr_t hi = rodata->length - 1;
15618 while (lo <= hi) {
15619 intptr_t mid = (hi - lo + 1) / 2 + lo;
15620 ASSERT(mid >= lo);
15621 ASSERT(mid <= hi);
15622 if (pc_offset < entries[mid].pc_offset) {
15623 hi = mid - 1;
15624 } else if ((mid != hi) && (pc_offset >= entries[mid + 1].pc_offset)) {
15625 lo = mid + 1;
15626 } else {
15627 return mid;
15628 }
15629 }
15630 return -1;
15631}
15632
15633const UntaggedCompressedStackMaps::Payload*
15634InstructionsTable::GetCanonicalStackMap(InstructionsTablePtr table) {
15635 const auto rodata = table.untag()->rodata_;
15636 return rodata->canonical_stack_map_entries_offset != 0
15637 ? rodata->StackMapAt(rodata->canonical_stack_map_entries_offset)
15638 : nullptr;
15639}
15640
15641const UntaggedCompressedStackMaps::Payload* InstructionsTable::FindStackMap(
15642 InstructionsTablePtr table,
15643 uword pc,
15644 uword* start_pc) {
15645 // This can run in the middle of GC and must not allocate handles.
15646 NoSafepointScope no_safepoint;
15647 const intptr_t idx = FindEntry(table, pc);
15648 if (idx != -1) {
15649 const auto rodata = table.untag()->rodata_;
15650 const auto entries = rodata->entries();
15651 *start_pc = InstructionsTable::start_pc(table) + entries[idx].pc_offset;
15652 return rodata->StackMapAt(entries[idx].stack_map_offset);
15653 }
15654 return nullptr;
15655}
15656
15657CodePtr InstructionsTable::FindCode(InstructionsTablePtr table, uword pc) {
15658 // This can run in the middle of GC and must not allocate handles.
15659 NoSafepointScope no_safepoint;
15660 if (!InstructionsTable::ContainsPc(table, pc)) return Code::null();
15661
15662 const auto rodata = table.untag()->rodata_;
15663
15664 const auto pc_offset = InstructionsTable::ConvertPcToOffset(table, pc);
15665
15666 if (pc_offset <= rodata->entries()[rodata->first_entry_with_code].pc_offset) {
15667 return StubCode::UnknownDartCode().ptr();
15668 }
15669
15670 const auto idx =
15671 FindEntry(table, pc, table.untag()->rodata_->first_entry_with_code);
15672 if (idx != -1) {
15673 const intptr_t code_index = idx - rodata->first_entry_with_code;
15674 ASSERT(code_index >= 0);
15675 ASSERT(code_index <
15676 Smi::Value(table.untag()->code_objects()->untag()->length()));
15678 table.untag()->code_objects()->untag()->element(code_index);
15679 ASSERT(result->IsCode());
15680 // Note: can't use Code::RawCast(...) here because it allocates handles
15681 // in DEBUG mode.
15682 return static_cast<CodePtr>(result);
15683 }
15684
15685 return Code::null();
15686}
15687
15688uword InstructionsTable::EntryPointAt(intptr_t code_index) const {
15689 ASSERT(0 <= code_index);
15690 ASSERT(code_index < static_cast<intptr_t>(rodata()->length));
15691 return InstructionsTable::start_pc(this->ptr()) +
15692 rodata()->entries()[code_index].pc_offset;
15693}
15694
15695const char* InstructionsTable::ToCString() const {
15696 return "InstructionsTable";
15697}
15698
15699ObjectPoolPtr ObjectPool::New(intptr_t len) {
15700 ASSERT(Object::object_pool_class() != Class::null());
15701 if (len < 0 || len > kMaxElements) {
15702 // This should be caught before we reach here.
15703 FATAL("Fatal error in ObjectPool::New: invalid length %" Pd "\n", len);
15704 }
15705 // We only verify the entry bits in DEBUG, so only allocate a handle there.
15706 DEBUG_ONLY(auto& result = ObjectPool::Handle());
15707 auto raw = Object::Allocate<ObjectPool>(Heap::kOld, len);
15708 NoSafepointScope no_safepoint;
15709 raw->untag()->length_ = len;
15710#if defined(DEBUG)
15711 result = raw;
15712 for (intptr_t i = 0; i < len; i++) {
15713 // Verify that InitializeObject() already set the payload as expected.
15714 ASSERT_EQUAL(result.PatchableAt(i), ObjectPool::Patchability::kPatchable);
15715 ASSERT_EQUAL(result.TypeAt(i), ObjectPool::EntryType::kImmediate);
15716 ASSERT_EQUAL(result.RawValueAt(i), 0);
15717 }
15718#endif
15719 return raw;
15720}
15721
15722#if !defined(DART_PRECOMPILED_RUNTIME)
15723ObjectPoolPtr ObjectPool::NewFromBuilder(
15724 const compiler::ObjectPoolBuilder& builder) {
15725 const intptr_t len = builder.CurrentLength();
15726 if (len == 0) {
15727 return Object::empty_object_pool().ptr();
15728 }
15729 const ObjectPool& result = ObjectPool::Handle(ObjectPool::New(len));
15730 for (intptr_t i = 0; i < len; i++) {
15731 auto entry = builder.EntryAt(i);
15732 auto type = entry.type();
15733 auto patchable = entry.patchable();
15734 auto snapshot_behavior = entry.snapshot_behavior();
15735 result.SetTypeAt(i, type, patchable, snapshot_behavior);
15736 if (type == EntryType::kTaggedObject) {
15737 result.SetObjectAt(i, *entry.obj_);
15738 } else {
15739#if defined(TARGET_ARCH_IS_32_BIT)
15740 ASSERT(type != EntryType::kImmediate64);
15741#endif
15742 ASSERT(type != EntryType::kImmediate128);
15743 result.SetRawValueAt(i, entry.imm_);
15744 }
15745 }
15746 return result.ptr();
15747}
15748
15749void ObjectPool::CopyInto(compiler::ObjectPoolBuilder* builder) const {
15750 ASSERT(builder->CurrentLength() == 0);
15751 for (intptr_t i = 0; i < Length(); i++) {
15752 auto type = TypeAt(i);
15753 auto patchable = PatchableAt(i);
15754 auto snapshot_behavior = SnapshotBehaviorAt(i);
15755 switch (type) {
15756 case compiler::ObjectPoolBuilderEntry::kTaggedObject: {
15757 compiler::ObjectPoolBuilderEntry entry(&Object::ZoneHandle(ObjectAt(i)),
15758 patchable, snapshot_behavior);
15759 builder->AddObject(entry);
15760 break;
15761 }
15762 case compiler::ObjectPoolBuilderEntry::kImmediate:
15763 case compiler::ObjectPoolBuilderEntry::kNativeFunction: {
15764 compiler::ObjectPoolBuilderEntry entry(RawValueAt(i), type, patchable,
15765 snapshot_behavior);
15766 builder->AddObject(entry);
15767 break;
15768 }
15769 default:
15770 UNREACHABLE();
15771 }
15772 }
15773 ASSERT(builder->CurrentLength() == Length());
15774}
15775#endif
15776
15777const char* ObjectPool::ToCString() const {
15778 Zone* zone = Thread::Current()->zone();
15779 return zone->PrintToString("ObjectPool len:%" Pd, Length());
15780}
15781
15782void ObjectPool::DebugPrint() const {
15783 THR_Print("ObjectPool len:%" Pd " {\n", Length());
15784 for (intptr_t i = 0; i < Length(); i++) {
15785#if defined(DART_PRECOMPILED_RUNTIME)
15786 intptr_t offset = ObjectPool::element_offset(i);
15787#else
15788 intptr_t offset = compiler::target::ObjectPool::element_offset(i);
15789#endif
15790#if defined(TARGET_ARCH_RISCV32) || defined(TARGET_ARCH_RISCV64)
15791 THR_Print(" %" Pd "(pp) ", offset); // PP is untagged
15792#elif defined(TARGET_ARCH_ARM64)
15793 THR_Print(" [pp, #%" Pd "] ", offset); // PP is untagged
15794#elif defined(TARGET_ARCH_ARM32)
15795 THR_Print(" [pp, #%" Pd "] ", offset - kHeapObjectTag); // PP is tagged
15796#else
15797 THR_Print(" [pp+0x%" Px "] ", offset - kHeapObjectTag); // PP is tagged
15798#endif
15799 if (TypeAt(i) == EntryType::kTaggedObject) {
15800 const Object& obj = Object::Handle(ObjectAt(i));
15801 THR_Print("%s (obj)\n", obj.ToCString());
15802 } else if (TypeAt(i) == EntryType::kNativeFunction) {
15803 uword pc = RawValueAt(i);
15804 uintptr_t start = 0;
15805 char* name = NativeSymbolResolver::LookupSymbolName(pc, &start);
15806 char* dso_name;
15807 uword dso_base;
15808 if (name != nullptr) {
15809 THR_Print("%s (native function)\n", name);
15810 NativeSymbolResolver::FreeSymbolName(name);
15811 } else if (NativeSymbolResolver::LookupSharedObject(pc, &dso_base,
15812 &dso_name)) {
15813 uword dso_offset = pc - dso_base;
15814 THR_Print("%s+0x%" Px " (native function)\n", dso_name, dso_offset);
15815 NativeSymbolResolver::FreeSymbolName(dso_name);
15816 } else {
15817 THR_Print("0x%" Px " (native function)\n", pc);
15818 }
15819 } else {
15820 THR_Print("0x%" Px " (raw)\n", RawValueAt(i));
15821 }
15822 }
15823 THR_Print("}\n");
15824}
15825
15826intptr_t PcDescriptors::Length() const {
15827 return untag()->length_;
15828}
15829
15830void PcDescriptors::SetLength(intptr_t value) const {
15831 StoreNonPointer(&untag()->length_, value);
15832}
15833
15834void PcDescriptors::CopyData(const void* bytes, intptr_t size) {
15835 NoSafepointScope no_safepoint;
15836 uint8_t* data = UnsafeMutableNonPointer(&untag()->data()[0]);
15837 // We're guaranteed these memory spaces do not overlap.
15838 memcpy(data, bytes, size); // NOLINT
15839}
15840
15841PcDescriptorsPtr PcDescriptors::New(const void* delta_encoded_data,
15842 intptr_t size) {
15843 ASSERT(Object::pc_descriptors_class() != Class::null());
15844 Thread* thread = Thread::Current();
15845 PcDescriptors& result = PcDescriptors::Handle(thread->zone());
15846 {
15847 auto raw = Object::Allocate<PcDescriptors>(Heap::kOld, size);
15848 NoSafepointScope no_safepoint;
15849 result = raw;
15850 result.SetLength(size);
15851 }
15852 result.CopyData(delta_encoded_data, size);
15853 return result.ptr();
15854}
15855
15856PcDescriptorsPtr PcDescriptors::New(intptr_t length) {
15857 ASSERT(Object::pc_descriptors_class() != Class::null());
15858 Thread* thread = Thread::Current();
15859 PcDescriptors& result = PcDescriptors::Handle(thread->zone());
15860 {
15861 auto raw = Object::Allocate<PcDescriptors>(Heap::kOld, length);
15862 NoSafepointScope no_safepoint;
15863 result = raw;
15864 result.SetLength(length);
15865 }
15866 return result.ptr();
15867}
15868
15869const char* PcDescriptors::KindAsStr(UntaggedPcDescriptors::Kind kind) {
15870 switch (kind) {
15871 case UntaggedPcDescriptors::kDeopt:
15872 return "deopt ";
15873 case UntaggedPcDescriptors::kIcCall:
15874 return "ic-call";
15875 case UntaggedPcDescriptors::kUnoptStaticCall:
15876 return "unopt-call";
15877 case UntaggedPcDescriptors::kRuntimeCall:
15878 return "runtime-call";
15879 case UntaggedPcDescriptors::kOsrEntry:
15880 return "osr-entry";
15881 case UntaggedPcDescriptors::kRewind:
15882 return "rewind";
15883 case UntaggedPcDescriptors::kBSSRelocation:
15884 return "bss reloc";
15885 case UntaggedPcDescriptors::kOther:
15886 return "other";
15887 case UntaggedPcDescriptors::kAnyKind:
15888 UNREACHABLE();
15889 break;
15890 }
15891 UNREACHABLE();
15892 return "";
15893}
15894
15895void PcDescriptors::WriteToBuffer(BaseTextBuffer* buffer, uword base) const {
15896 // 4 bits per hex digit.
15897 const int addr_width = kBitsPerWord / 4;
15898 // "*" in a printf format specifier tells it to read the field width from
15899 // the printf argument list.
15900 buffer->Printf(
15901 "%-*s kind deopt-id tok-ix try-ix yield-idx\n",
15902 addr_width, "pc");
15903 Iterator iter(*this, UntaggedPcDescriptors::kAnyKind);
15904 while (iter.MoveNext()) {
15905 buffer->Printf("%#-*" Px " %-13s % 8" Pd " %-10s % 8" Pd " % 8" Pd
15906 "\n",
15907 addr_width, base + iter.PcOffset(), KindAsStr(iter.Kind()),
15908 iter.DeoptId(), iter.TokenPos().ToCString(), iter.TryIndex(),
15909 iter.YieldIndex());
15910 }
15911}
15912
15913const char* PcDescriptors::ToCString() const {
15914 if (Length() == 0) {
15915 return "empty PcDescriptors";
15916 }
15917 ZoneTextBuffer buffer(Thread::Current()->zone());
15918 WriteToBuffer(&buffer, /*base=*/0);
15919 return buffer.buffer();
15920}
15921
15922// Verify assumptions (in debug mode only).
15923// - No two deopt descriptors have the same deoptimization id.
15924// - No two ic-call descriptors have the same deoptimization id (type feedback).
15925// A function without unique ids is marked as non-optimizable (e.g., because of
15926// finally blocks).
15927void PcDescriptors::Verify(const Function& function) const {
15928#if defined(DEBUG)
15929 // Only check ids for unoptimized code that is optimizable.
15930 if (!function.IsOptimizable()) {
15931 return;
15932 }
15933 intptr_t max_deopt_id = 0;
15934 Iterator max_iter(
15935 *this, UntaggedPcDescriptors::kDeopt | UntaggedPcDescriptors::kIcCall);
15936 while (max_iter.MoveNext()) {
15937 if (max_iter.DeoptId() > max_deopt_id) {
15938 max_deopt_id = max_iter.DeoptId();
15939 }
15940 }
15941
15942 Zone* zone = Thread::Current()->zone();
15943 BitVector* deopt_ids = new (zone) BitVector(zone, max_deopt_id + 1);
15944 BitVector* iccall_ids = new (zone) BitVector(zone, max_deopt_id + 1);
15945 Iterator iter(*this,
15946 UntaggedPcDescriptors::kDeopt | UntaggedPcDescriptors::kIcCall);
15947 while (iter.MoveNext()) {
15948 // 'deopt_id' is set for kDeopt and kIcCall and must be unique for one kind.
15949 if (DeoptId::IsDeoptAfter(iter.DeoptId())) {
15950 // TODO(vegorov): some instructions contain multiple calls and have
15951 // multiple "after" targets recorded. Right now it is benign but might
15952 // lead to issues in the future. Fix that and enable verification.
15953 continue;
15954 }
15955 if (iter.Kind() == UntaggedPcDescriptors::kDeopt) {
15956 ASSERT(!deopt_ids->Contains(iter.DeoptId()));
15957 deopt_ids->Add(iter.DeoptId());
15958 } else {
15959 ASSERT(!iccall_ids->Contains(iter.DeoptId()));
15960 iccall_ids->Add(iter.DeoptId());
15961 }
15962 }
15963#endif // DEBUG
15964}
15965
15966void CodeSourceMap::SetLength(intptr_t value) const {
15967 StoreNonPointer(&untag()->length_, value);
15968}
15969
15970CodeSourceMapPtr CodeSourceMap::New(intptr_t length) {
15971 ASSERT(Object::code_source_map_class() != Class::null());
15972 Thread* thread = Thread::Current();
15973 CodeSourceMap& result = CodeSourceMap::Handle(thread->zone());
15974 {
15975 auto raw = Object::Allocate<CodeSourceMap>(Heap::kOld, length);
15976 NoSafepointScope no_safepoint;
15977 result = raw;
15978 result.SetLength(length);
15979 }
15980 return result.ptr();
15981}
15982
15983const char* CodeSourceMap::ToCString() const {
15984 return "CodeSourceMap";
15985}
15986
15987uword CompressedStackMaps::Hash() const {
15988 NoSafepointScope scope;
15989 uint8_t* data = UnsafeMutableNonPointer(&untag()->payload()->data()[0]);
15990 uint8_t* end = data + payload_size();
15991 uint32_t hash = payload_size();
15992 for (uint8_t* cursor = data; cursor < end; cursor++) {
15993 hash = CombineHashes(hash, *cursor);
15994 }
15995 return FinalizeHash(hash, kHashBits);
15996}
15997
15998void CompressedStackMaps::WriteToBuffer(BaseTextBuffer* buffer,
15999 uword base,
16000 const char* separator) const {
16001 auto it = iterator(Thread::Current());
16002 bool first_entry = true;
16003 while (it.MoveNext()) {
16004 if (!first_entry) {
16005 buffer->AddString(separator);
16006 }
16007 buffer->Printf("0x%.8" Px ": ", base + it.pc_offset());
16008 for (intptr_t i = 0, n = it.Length(); i < n; i++) {
16009 buffer->AddString(it.IsObject(i) ? "1" : "0");
16010 }
16011 first_entry = false;
16012 }
16013}
16014
16016CompressedStackMaps::iterator(Thread* thread) const {
16018 *this, CompressedStackMaps::Handle(
16019 thread->zone(), thread->isolate_group()
16020 ->object_store()
16021 ->canonicalized_stack_map_entries()));
16022}
16023
16024CompressedStackMapsPtr CompressedStackMaps::New(const void* payload,
16025 intptr_t size,
16026 bool is_global_table,
16027 bool uses_global_table) {
16028 ASSERT(Object::compressed_stackmaps_class() != Class::null());
16029 // We don't currently allow both flags to be true.
16030 ASSERT(!is_global_table || !uses_global_table);
16031 // The canonical empty instance should be used instead.
16032 ASSERT(size != 0);
16033
16034 if (!UntaggedCompressedStackMaps::SizeField::is_valid(size)) {
16035 FATAL(
16036 "Fatal error in CompressedStackMaps::New: "
16037 "invalid payload size %" Pu "\n",
16038 size);
16039 }
16040
16041 auto& result = CompressedStackMaps::Handle();
16042 {
16043 // CompressedStackMaps data objects are associated with a code object,
16044 // allocate them in old generation.
16045 auto raw = Object::Allocate<CompressedStackMaps>(Heap::kOld, size);
16046 NoSafepointScope no_safepoint;
16047 result = raw;
16048 result.untag()->payload()->set_flags_and_size(
16049 UntaggedCompressedStackMaps::GlobalTableBit::encode(is_global_table) |
16050 UntaggedCompressedStackMaps::UsesTableBit::encode(uses_global_table) |
16051 UntaggedCompressedStackMaps::SizeField::encode(size));
16052 // Perform the copy under the NoSafepointScope since it uses a raw pointer
16053 // to the payload, and so the object should not move during the copy.
16054 auto cursor =
16055 result.UnsafeMutableNonPointer(result.untag()->payload()->data());
16056 memcpy(cursor, payload, size); // NOLINT
16057 }
16058
16059 ASSERT(!result.IsGlobalTable() || !result.UsesGlobalTable());
16060
16061 return result.ptr();
16062}
16063
16064const char* CompressedStackMaps::ToCString() const {
16065 ASSERT(!IsGlobalTable());
16066 if (payload_size() == 0) {
16067 return "CompressedStackMaps()";
16068 }
16069 auto const t = Thread::Current();
16070 ZoneTextBuffer buffer(t->zone(), 100);
16071 buffer.AddString("CompressedStackMaps(");
16072 WriteToBuffer(&buffer, /*base=*/0, ", ");
16073 buffer.AddString(")");
16074 return buffer.buffer();
16075}
16076
16077StringPtr LocalVarDescriptors::GetName(intptr_t var_index) const {
16078 ASSERT(var_index < Length());
16079 ASSERT(Object::Handle(ptr()->untag()->name(var_index)).IsString());
16080 return ptr()->untag()->name(var_index);
16081}
16082
16083void LocalVarDescriptors::SetVar(
16084 intptr_t var_index,
16085 const String& name,
16087 ASSERT(var_index < Length());
16088 ASSERT(!name.IsNull());
16089 ptr()->untag()->set_name(var_index, name.ptr());
16090 ptr()->untag()->data()[var_index] = *info;
16091}
16092
16093void LocalVarDescriptors::GetInfo(
16094 intptr_t var_index,
16096 ASSERT(var_index < Length());
16097 *info = ptr()->untag()->data()[var_index];
16098}
16099
16100static int PrintVarInfo(char* buffer,
16101 int len,
16102 intptr_t i,
16103 const String& var_name,
16106 const int32_t index = info.index();
16107 if (kind == UntaggedLocalVarDescriptors::kContextLevel) {
16108 return Utils::SNPrint(buffer, len,
16109 "%2" Pd
16110 " %-13s level=%-3d"
16111 " begin=%-3d end=%d\n",
16112 i, LocalVarDescriptors::KindToCString(kind), index,
16113 static_cast<int>(info.begin_pos.Pos()),
16114 static_cast<int>(info.end_pos.Pos()));
16115 } else if (kind == UntaggedLocalVarDescriptors::kContextVar) {
16116 return Utils::SNPrint(
16117 buffer, len,
16118 "%2" Pd
16119 " %-13s level=%-3d index=%-3d"
16120 " begin=%-3d end=%-3d name=%s\n",
16121 i, LocalVarDescriptors::KindToCString(kind), info.scope_id, index,
16122 static_cast<int>(info.begin_pos.Pos()),
16123 static_cast<int>(info.end_pos.Pos()), var_name.ToCString());
16124 } else {
16125 return Utils::SNPrint(
16126 buffer, len,
16127 "%2" Pd
16128 " %-13s scope=%-3d index=%-3d"
16129 " begin=%-3d end=%-3d name=%s\n",
16130 i, LocalVarDescriptors::KindToCString(kind), info.scope_id, index,
16131 static_cast<int>(info.begin_pos.Pos()),
16132 static_cast<int>(info.end_pos.Pos()), var_name.ToCString());
16133 }
16134}
16135
16136const char* LocalVarDescriptors::ToCString() const {
16137 if (IsNull()) {
16138 return "LocalVarDescriptors: null";
16139 }
16140 if (Length() == 0) {
16141 return "empty LocalVarDescriptors";
16142 }
16143 intptr_t len = 1; // Trailing '\0'.
16144 String& var_name = String::Handle();
16145 for (intptr_t i = 0; i < Length(); i++) {
16146 UntaggedLocalVarDescriptors::VarInfo info;
16147 var_name = GetName(i);
16148 GetInfo(i, &info);
16149 len += PrintVarInfo(nullptr, 0, i, var_name, info);
16150 }
16151 char* buffer = Thread::Current()->zone()->Alloc<char>(len + 1);
16152 buffer[0] = '\0';
16153 intptr_t num_chars = 0;
16154 for (intptr_t i = 0; i < Length(); i++) {
16155 UntaggedLocalVarDescriptors::VarInfo info;
16156 var_name = GetName(i);
16157 GetInfo(i, &info);
16158 num_chars += PrintVarInfo((buffer + num_chars), (len - num_chars), i,
16159 var_name, info);
16160 }
16161 return buffer;
16162}
16163
16164const char* LocalVarDescriptors::KindToCString(
16166 switch (kind) {
16167 case UntaggedLocalVarDescriptors::kStackVar:
16168 return "StackVar";
16169 case UntaggedLocalVarDescriptors::kContextVar:
16170 return "ContextVar";
16171 case UntaggedLocalVarDescriptors::kContextLevel:
16172 return "ContextLevel";
16173 case UntaggedLocalVarDescriptors::kSavedCurrentContext:
16174 return "CurrentCtx";
16175 default:
16176 UNIMPLEMENTED();
16177 return nullptr;
16178 }
16179}
16180
16181LocalVarDescriptorsPtr LocalVarDescriptors::New(intptr_t num_variables) {
16182 ASSERT(Object::var_descriptors_class() != Class::null());
16183 if (num_variables < 0 || num_variables > kMaxElements) {
16184 // This should be caught before we reach here.
16185 FATAL(
16186 "Fatal error in LocalVarDescriptors::New: "
16187 "invalid num_variables %" Pd ". Maximum is: %d\n",
16188 num_variables, UntaggedLocalVarDescriptors::kMaxIndex);
16189 }
16190 auto raw = Object::Allocate<LocalVarDescriptors>(Heap::kOld, num_variables);
16191 NoSafepointScope no_safepoint;
16192 raw->untag()->num_entries_ = num_variables;
16193 return raw;
16194}
16195
16196intptr_t LocalVarDescriptors::Length() const {
16197 return untag()->num_entries_;
16198}
16199
16200intptr_t ExceptionHandlers::num_entries() const {
16201 return untag()->num_entries();
16202}
16203
16204bool ExceptionHandlers::has_async_handler() const {
16205 return UntaggedExceptionHandlers::AsyncHandlerBit::decode(
16206 untag()->packed_fields_);
16207}
16208
16209void ExceptionHandlers::set_has_async_handler(bool value) const {
16210 StoreNonPointer(&untag()->packed_fields_,
16211 UntaggedExceptionHandlers::AsyncHandlerBit::update(
16212 value, untag()->packed_fields_));
16213}
16214
16215void ExceptionHandlers::SetHandlerInfo(intptr_t try_index,
16216 intptr_t outer_try_index,
16217 uword handler_pc_offset,
16218 bool needs_stacktrace,
16219 bool has_catch_all,
16220 bool is_generated) const {
16221 ASSERT((try_index >= 0) && (try_index < num_entries()));
16222 NoSafepointScope no_safepoint;
16224 UnsafeMutableNonPointer(&untag()->data()[try_index]);
16225 info->outer_try_index = outer_try_index;
16226 // Some C compilers warn about the comparison always being true when using <=
16227 // due to limited range of data type.
16228 ASSERT((handler_pc_offset == static_cast<uword>(kMaxUint32)) ||
16229 (handler_pc_offset < static_cast<uword>(kMaxUint32)));
16230 info->handler_pc_offset = handler_pc_offset;
16231 info->needs_stacktrace = static_cast<int8_t>(needs_stacktrace);
16232 info->has_catch_all = static_cast<int8_t>(has_catch_all);
16233 info->is_generated = static_cast<int8_t>(is_generated);
16234}
16235
16236void ExceptionHandlers::GetHandlerInfo(intptr_t try_index,
16237 ExceptionHandlerInfo* info) const {
16238 ASSERT((try_index >= 0) && (try_index < num_entries()));
16239 ASSERT(info != nullptr);
16240 *info = untag()->data()[try_index];
16241}
16242
16243uword ExceptionHandlers::HandlerPCOffset(intptr_t try_index) const {
16244 ASSERT((try_index >= 0) && (try_index < num_entries()));
16245 return untag()->data()[try_index].handler_pc_offset;
16246}
16247
16248intptr_t ExceptionHandlers::OuterTryIndex(intptr_t try_index) const {
16249 ASSERT((try_index >= 0) && (try_index < num_entries()));
16250 return untag()->data()[try_index].outer_try_index;
16251}
16252
16253bool ExceptionHandlers::NeedsStackTrace(intptr_t try_index) const {
16254 ASSERT((try_index >= 0) && (try_index < num_entries()));
16255 return untag()->data()[try_index].needs_stacktrace != 0;
16256}
16257
16258bool ExceptionHandlers::IsGenerated(intptr_t try_index) const {
16259 ASSERT((try_index >= 0) && (try_index < num_entries()));
16260 return untag()->data()[try_index].is_generated != 0;
16261}
16262
16263bool ExceptionHandlers::HasCatchAll(intptr_t try_index) const {
16264 ASSERT((try_index >= 0) && (try_index < num_entries()));
16265 return untag()->data()[try_index].has_catch_all != 0;
16266}
16267
16268void ExceptionHandlers::SetHandledTypes(intptr_t try_index,
16269 const Array& handled_types) const {
16270 ASSERT((try_index >= 0) && (try_index < num_entries()));
16271 ASSERT(!handled_types.IsNull());
16272 const Array& handled_types_data =
16273 Array::Handle(untag()->handled_types_data());
16274 handled_types_data.SetAt(try_index, handled_types);
16275}
16276
16277ArrayPtr ExceptionHandlers::GetHandledTypes(intptr_t try_index) const {
16278 ASSERT((try_index >= 0) && (try_index < num_entries()));
16279 Array& array = Array::Handle(untag()->handled_types_data());
16280 array ^= array.At(try_index);
16281 return array.ptr();
16282}
16283
16284void ExceptionHandlers::set_handled_types_data(const Array& value) const {
16285 untag()->set_handled_types_data(value.ptr());
16286}
16287
16288ExceptionHandlersPtr ExceptionHandlers::New(intptr_t num_handlers) {
16289 ASSERT(Object::exception_handlers_class() != Class::null());
16290 if ((num_handlers < 0) || (num_handlers >= kMaxHandlers)) {
16291 FATAL(
16292 "Fatal error in ExceptionHandlers::New(): "
16293 "invalid num_handlers %" Pd "\n",
16294 num_handlers);
16295 }
16296 const Array& handled_types_data =
16297 (num_handlers == 0) ? Object::empty_array()
16298 : Array::Handle(Array::New(num_handlers, Heap::kOld));
16299 return ExceptionHandlers::New(handled_types_data);
16300}
16301
16302ExceptionHandlersPtr ExceptionHandlers::New(const Array& handled_types_data) {
16303 ASSERT(Object::exception_handlers_class() != Class::null());
16304 const intptr_t num_handlers = handled_types_data.Length();
16305 if ((num_handlers < 0) || (num_handlers >= kMaxHandlers)) {
16306 FATAL(
16307 "Fatal error in ExceptionHandlers::New(): "
16308 "invalid num_handlers %" Pd "\n",
16309 num_handlers);
16310 }
16311 ExceptionHandlers& result = ExceptionHandlers::Handle();
16312 {
16313 auto raw = Object::Allocate<ExceptionHandlers>(Heap::kOld, num_handlers);
16314 NoSafepointScope no_safepoint;
16315 result = raw;
16316 result.untag()->packed_fields_ =
16317 UntaggedExceptionHandlers::NumEntriesBits::encode(num_handlers);
16318 }
16319 result.set_handled_types_data(handled_types_data);
16320 return result.ptr();
16321}
16322
16323void ExceptionHandlers::WriteToBuffer(BaseTextBuffer* buffer,
16324 uword base) const {
16325 auto& handled_types = Array::Handle();
16326 auto& type = AbstractType::Handle();
16328 for (intptr_t i = 0; i < num_entries(); i++) {
16329 GetHandlerInfo(i, &info);
16330 handled_types = GetHandledTypes(i);
16331 const intptr_t num_types =
16332 handled_types.IsNull() ? 0 : handled_types.Length();
16333 buffer->Printf("%" Pd " => %#" Px " (%" Pd " types) (outer %d)%s%s\n", i,
16334 base + info.handler_pc_offset, num_types,
16335 info.outer_try_index,
16336 ((info.needs_stacktrace != 0) ? " (needs stack trace)" : ""),
16337 ((info.is_generated != 0) ? " (generated)" : ""));
16338 for (int k = 0; k < num_types; k++) {
16339 type ^= handled_types.At(k);
16340 ASSERT(!type.IsNull());
16341 buffer->Printf(" %d. %s\n", k, type.ToCString());
16342 }
16343 }
16344 if (has_async_handler()) {
16345 buffer->AddString("<async handler>\n");
16346 }
16347}
16348
16349const char* ExceptionHandlers::ToCString() const {
16350 if (num_entries() == 0) {
16351 return has_async_handler()
16352 ? "empty ExceptionHandlers (with <async handler>)"
16353 : "empty ExceptionHandlers";
16354 }
16355 ZoneTextBuffer buffer(Thread::Current()->zone());
16356 WriteToBuffer(&buffer, /*base=*/0);
16357 return buffer.buffer();
16358}
16359
16360void SingleTargetCache::set_target(const Code& value) const {
16361 untag()->set_target(value.ptr());
16362}
16363
16364const char* SingleTargetCache::ToCString() const {
16365 return "SingleTargetCache";
16366}
16367
16368SingleTargetCachePtr SingleTargetCache::New() {
16369 return Object::Allocate<SingleTargetCache>(Heap::kOld);
16370}
16371
16372void UnlinkedCall::set_can_patch_to_monomorphic(bool value) const {
16373 StoreNonPointer(&untag()->can_patch_to_monomorphic_, value);
16374}
16375
16376uword UnlinkedCall::Hash() const {
16377 return String::Handle(target_name()).Hash();
16378}
16379
16380bool UnlinkedCall::Equals(const UnlinkedCall& other) const {
16381 return (target_name() == other.target_name()) &&
16382 (arguments_descriptor() == other.arguments_descriptor()) &&
16383 (can_patch_to_monomorphic() == other.can_patch_to_monomorphic());
16384}
16385
16386const char* UnlinkedCall::ToCString() const {
16387 return "UnlinkedCall";
16388}
16389
16390UnlinkedCallPtr UnlinkedCall::New() {
16391 const auto& result =
16392 UnlinkedCall::Handle(Object::Allocate<UnlinkedCall>(Heap::kOld));
16393 result.set_can_patch_to_monomorphic(!FLAG_precompiled_mode);
16394 return result.ptr();
16395}
16396
16397MonomorphicSmiableCallPtr MonomorphicSmiableCall::New(classid_t expected_cid,
16398 const Code& target) {
16399 const auto& result = MonomorphicSmiableCall::Handle(
16400 Object::Allocate<MonomorphicSmiableCall>(Heap::kOld));
16401 result.StoreNonPointer(&result.untag()->expected_cid_, expected_cid);
16402 result.StoreNonPointer(&result.untag()->entrypoint_, target.EntryPoint());
16403 return result.ptr();
16404}
16405
16406const char* MonomorphicSmiableCall::ToCString() const {
16407 return "MonomorphicSmiableCall";
16408}
16409
16410const char* CallSiteData::ToCString() const {
16411 // CallSiteData is an abstract class. We should never reach here.
16412 UNREACHABLE();
16413 return "CallSiteData";
16414}
16415
16416void CallSiteData::set_target_name(const String& value) const {
16417 ASSERT(!value.IsNull());
16418 ASSERT(value.IsCanonical());
16419 untag()->set_target_name(value.ptr());
16420}
16421
16422void CallSiteData::set_arguments_descriptor(const Array& value) const {
16423 ASSERT(!value.IsNull());
16424 untag()->set_args_descriptor(value.ptr());
16425}
16426
16427#if !defined(DART_PRECOMPILED_RUNTIME)
16428void ICData::SetReceiversStaticType(const AbstractType& type) const {
16429 untag()->set_receivers_static_type(type.ptr());
16430
16431 if (!type.IsNull() && type.HasTypeClass() && (NumArgsTested() == 1) &&
16432 type.IsInstantiated() && !type.IsFutureOrType()) {
16433 const Class& cls = Class::Handle(type.type_class());
16434 if (cls.IsGeneric()) {
16435 set_tracking_exactness(true);
16436 }
16437 }
16438}
16439#endif
16440
16441void ICData::SetTargetAtPos(const Array& data,
16442 intptr_t data_pos,
16443 intptr_t num_args_tested,
16444 const Function& target) {
16445#if !defined(DART_PRECOMPILED_RUNTIME)
16446 // JIT
16447 data.SetAt(data_pos + TargetIndexFor(num_args_tested), target);
16448#else
16449 // AOT
16450 ASSERT(target.HasCode());
16451 const Code& code = Code::Handle(target.CurrentCode());
16452 data.SetAt(data_pos + CodeIndexFor(num_args_tested), code);
16453 data.SetAt(data_pos + EntryPointIndexFor(num_args_tested), target);
16454#endif
16455}
16456
16457uword ICData::Hash() const {
16458 return String::HashRawSymbol(target_name()) ^ deopt_id();
16459}
16460
16461const char* ICData::ToCString() const {
16462 Zone* zone = Thread::Current()->zone();
16463 const String& name = String::Handle(zone, target_name());
16464 return zone->PrintToString("ICData(%s num-args: %" Pd " num-checks: %" Pd
16465 " type-args-len: %" Pd ", deopt-id: %" Pd ")",
16466 name.ToCString(), NumArgsTested(),
16467 NumberOfChecks(), TypeArgsLen(), deopt_id());
16468}
16469
16470FunctionPtr ICData::Owner() const {
16471 Object& obj = Object::Handle(untag()->owner());
16472 if (obj.IsNull()) {
16473 ASSERT(Dart::vm_snapshot_kind() == Snapshot::kFullAOT);
16474 return Function::null();
16475 } else if (obj.IsFunction()) {
16476 return Function::Cast(obj).ptr();
16477 } else {
16478 ICData& original = ICData::Handle();
16479 original ^= obj.ptr();
16480 return original.Owner();
16481 }
16482}
16483
16484ICDataPtr ICData::Original() const {
16485 if (IsNull()) {
16486 return ICData::null();
16487 }
16488 if (untag()->owner()->IsICData()) {
16489 return static_cast<ICDataPtr>(untag()->owner());
16490 }
16491 return this->ptr();
16492}
16493
16494void ICData::SetOriginal(const ICData& value) const {
16495 ASSERT(value.IsOriginal());
16496 ASSERT(!value.IsNull());
16497 untag()->set_owner(static_cast<ObjectPtr>(value.ptr()));
16498}
16499
16500void ICData::set_owner(const Function& value) const {
16501 untag()->set_owner(static_cast<ObjectPtr>(value.ptr()));
16502}
16503
16504void ICData::set_deopt_id(intptr_t value) const {
16505#if defined(DART_PRECOMPILED_RUNTIME)
16506 UNREACHABLE();
16507#else
16508 ASSERT(value <= kMaxInt32);
16509 StoreNonPointer(&untag()->deopt_id_, value);
16510#endif
16511}
16512
16513void ICData::set_entries(const Array& value) const {
16514 ASSERT(!value.IsNull());
16515 untag()->set_entries<std::memory_order_release>(value.ptr());
16516}
16517
16518intptr_t ICData::NumArgsTested() const {
16519 return untag()->state_bits_.Read<NumArgsTestedBits>();
16520}
16521
16522void ICData::SetNumArgsTested(intptr_t value) const {
16523 ASSERT(Utils::IsUint(2, value));
16524 untag()->state_bits_.Update<NumArgsTestedBits>(value);
16525}
16526
16527intptr_t CallSiteData::TypeArgsLen() const {
16528 ArgumentsDescriptor args_desc(Array::Handle(arguments_descriptor()));
16529 return args_desc.TypeArgsLen();
16530}
16531
16532intptr_t CallSiteData::CountWithTypeArgs() const {
16533 ArgumentsDescriptor args_desc(Array::Handle(arguments_descriptor()));
16534 return args_desc.CountWithTypeArgs();
16535}
16536
16537intptr_t CallSiteData::CountWithoutTypeArgs() const {
16538 ArgumentsDescriptor args_desc(Array::Handle(arguments_descriptor()));
16539 return args_desc.Count();
16540}
16541
16542intptr_t CallSiteData::SizeWithoutTypeArgs() const {
16543 ArgumentsDescriptor args_desc(Array::Handle(arguments_descriptor()));
16544 return args_desc.Size();
16545}
16546
16547intptr_t CallSiteData::SizeWithTypeArgs() const {
16548 ArgumentsDescriptor args_desc(Array::Handle(arguments_descriptor()));
16549 return args_desc.SizeWithTypeArgs();
16550}
16551
16552uint32_t ICData::DeoptReasons() const {
16553 return untag()->state_bits_.Read<DeoptReasonBits>();
16554}
16555
16556void ICData::SetDeoptReasons(uint32_t reasons) const {
16557 untag()->state_bits_.Update<DeoptReasonBits>(reasons);
16558}
16559
16560bool ICData::HasDeoptReason(DeoptReasonId reason) const {
16561 ASSERT(reason <= kLastRecordedDeoptReason);
16562 return (DeoptReasons() & (1 << reason)) != 0;
16563}
16564
16565void ICData::AddDeoptReason(DeoptReasonId reason) const {
16566 if (reason <= kLastRecordedDeoptReason) {
16567 untag()->state_bits_.FetchOr<DeoptReasonBits>(1 << reason);
16568 }
16569}
16570
16571const char* ICData::RebindRuleToCString(RebindRule r) {
16572 switch (r) {
16573#define RULE_CASE(Name) \
16574 case RebindRule::k##Name: \
16575 return #Name;
16577#undef RULE_CASE
16578 default:
16579 return nullptr;
16580 }
16581}
16582
16583bool ICData::ParseRebindRule(const char* str, RebindRule* out) {
16584#define RULE_CASE(Name) \
16585 if (strcmp(str, #Name) == 0) { \
16586 *out = RebindRule::k##Name; \
16587 return true; \
16588 }
16590#undef RULE_CASE
16591 return false;
16592}
16593
16594ICData::RebindRule ICData::rebind_rule() const {
16595 return RebindRule(untag()->state_bits_.Read<RebindRuleBits>());
16596}
16597
16598void ICData::set_rebind_rule(uint32_t rebind_rule) const {
16599 untag()->state_bits_.Update<ICData::RebindRuleBits>(rebind_rule);
16600}
16601
16602bool ICData::is_static_call() const {
16603 return rebind_rule() != kInstance;
16604}
16605
16606void ICData::clear_state_bits() const {
16607 untag()->state_bits_ = 0;
16608}
16609
16610intptr_t ICData::TestEntryLengthFor(intptr_t num_args,
16611 bool tracking_exactness) {
16612 return num_args + 1 /* target function*/ + 1 /* frequency */ +
16613 (tracking_exactness ? 1 : 0) /* exactness state */;
16614}
16615
16616intptr_t ICData::TestEntryLength() const {
16617 return TestEntryLengthFor(NumArgsTested(), is_tracking_exactness());
16618}
16619
16620intptr_t ICData::Length() const {
16621 return (Smi::Value(entries()->untag()->length()) / TestEntryLength());
16622}
16623
16624intptr_t ICData::NumberOfChecks() const {
16625 DEBUG_ONLY(AssertInvariantsAreSatisfied());
16626 return Length() - 1;
16627}
16628
16629bool ICData::NumberOfChecksIs(intptr_t n) const {
16630 DEBUG_ONLY(AssertInvariantsAreSatisfied());
16631 return NumberOfChecks() == n;
16632}
16633
16634#if defined(DEBUG)
16635void ICData::AssertInvariantsAreSatisfied() const {
16636 // See layout and invariant of [ICData] in class comment in object.h.
16637 //
16638 // This method can be called without holding any locks, it will grab a
16639 // snapshot of `entries()` and do it's verification logic on that.
16640 auto zone = Thread::Current()->zone();
16641 const auto& array = Array::Handle(zone, entries());
16642
16643 const intptr_t entry_length = TestEntryLength();
16644 const intptr_t num_checks = array.Length() / entry_length - 1;
16645 const intptr_t num_args = NumArgsTested();
16646
16647 /// Backing store must be multiple of entry length.
16648 ASSERT((array.Length() % entry_length) == 0);
16649
16650 /// Entries must be valid.
16651 for (intptr_t i = 0; i < num_checks; ++i) {
16652 // Should be valid entry.
16653 const intptr_t start = entry_length * i;
16654 for (intptr_t i = 0; i < num_args; ++i) {
16655 ASSERT(!array.At(start + i)->IsHeapObject());
16656 ASSERT(array.At(start + i) != smi_illegal_cid().ptr());
16657 }
16658 ASSERT(array.At(start + TargetIndexFor(num_args))->IsHeapObject());
16659 if (is_tracking_exactness()) {
16660 ASSERT(!array.At(start + ExactnessIndexFor(num_args))->IsHeapObject());
16661 }
16662 }
16663
16664 /// Sentinel at end must be valid.
16665 const intptr_t sentinel_start = num_checks * entry_length;
16666 for (intptr_t i = 0; i < entry_length - 1; ++i) {
16667 ASSERT(array.At(sentinel_start + i) == smi_illegal_cid().ptr());
16668 }
16669 if (num_checks == 0) {
16670 ASSERT(array.At(sentinel_start + entry_length - 1) ==
16671 smi_illegal_cid().ptr());
16672 ASSERT(ICData::CachedEmptyICDataArray(num_args, is_tracking_exactness()) ==
16673 array.ptr());
16674 } else {
16675 ASSERT(array.At(sentinel_start + entry_length - 1) == ptr());
16676 }
16677
16678 // Invariants for ICData of static calls.
16679 if (num_args == 0) {
16680 ASSERT(Length() == 2);
16681 ASSERT(TestEntryLength() == 2);
16682 }
16683}
16684#endif // defined(DEBUG)
16685
16686// Discounts any checks with usage of zero.
16687intptr_t ICData::NumberOfUsedChecks() const {
16688 const intptr_t n = NumberOfChecks();
16689 intptr_t count = 0;
16690 for (intptr_t i = 0; i < n; i++) {
16691 if (GetCountAt(i) > 0) {
16692 count++;
16693 }
16694 }
16695 return count;
16696}
16697
16698void ICData::WriteSentinel(const Array& data,
16699 intptr_t test_entry_length,
16700 const Object& back_ref) {
16701 ASSERT(!data.IsNull());
16702 RELEASE_ASSERT(smi_illegal_cid().Value() == kIllegalCid);
16703 const intptr_t entry_start = data.Length() - test_entry_length;
16704 for (intptr_t i = 0; i < test_entry_length - 1; i++) {
16705 data.SetAt(entry_start + i, smi_illegal_cid());
16706 }
16707 data.SetAt(entry_start + test_entry_length - 1, back_ref);
16708}
16709
16710#if defined(DEBUG)
16711// Used in asserts to verify that a check is not added twice.
16712bool ICData::HasCheck(const GrowableArray<intptr_t>& cids) const {
16713 return FindCheck(cids) != -1;
16714}
16715#endif // DEBUG
16716
16717intptr_t ICData::FindCheck(const GrowableArray<intptr_t>& cids) const {
16718 const intptr_t len = NumberOfChecks();
16719 GrowableArray<intptr_t> class_ids;
16720 for (intptr_t i = 0; i < len; i++) {
16721 GetClassIdsAt(i, &class_ids);
16722 bool matches = true;
16723 for (intptr_t k = 0; k < class_ids.length(); k++) {
16724 ASSERT(class_ids[k] != kIllegalCid);
16725 if (class_ids[k] != cids[k]) {
16726 matches = false;
16727 break;
16728 }
16729 }
16730 if (matches) {
16731 return i;
16732 }
16733 }
16734 return -1;
16735}
16736
16737void ICData::TruncateTo(intptr_t num_checks,
16738 const CallSiteResetter& proof_of_reload) const {
16739 USE(proof_of_reload); // This method can only be called during reload.
16740
16741 DEBUG_ONLY(AssertInvariantsAreSatisfied());
16742 ASSERT(num_checks <= NumberOfChecks());
16743
16744 // Nothing to do.
16745 if (NumberOfChecks() == num_checks) return;
16746
16747 auto thread = Thread::Current();
16749 auto& array = thread->ArrayHandle();
16750
16751 // If we make the ICData empty, use the pre-allocated shared backing stores.
16752 const intptr_t num_args = NumArgsTested();
16753 if (num_checks == 0) {
16754 array = ICData::CachedEmptyICDataArray(num_args, is_tracking_exactness());
16755 set_entries(array);
16756 return;
16757 }
16758
16759 // Otherwise truncate array and initialize sentinel.
16760 // Use kSmiCid for all slots in the entry except the last, which is a backref
16761 // to ICData.
16762 const intptr_t entry_length = TestEntryLength();
16763 array = entries();
16764 array.Truncate((num_checks + 1) * entry_length);
16765 WriteSentinel(array, entry_length, *this);
16766}
16767
16768void ICData::ClearCountAt(intptr_t index,
16769 const CallSiteResetter& proof_of_reload) const {
16770 USE(proof_of_reload); // This method can only be called during reload.
16771
16772 ASSERT(index >= 0);
16773 ASSERT(index < NumberOfChecks());
16774 SetCountAt(index, 0);
16775}
16776
16777void ICData::ClearAndSetStaticTarget(
16778 const Function& func,
16779 const CallSiteResetter& proof_of_reload) const {
16780 USE(proof_of_reload); // This method can only be called during reload.
16781
16782 // The final entry is always the sentinel.
16783 DEBUG_ONLY(AssertInvariantsAreSatisfied());
16784
16785 if (IsImmutable()) return;
16786 if (NumberOfChecks() == 0) return;
16787
16788 // Leave one entry.
16789 TruncateTo(/*num_checks=*/1, proof_of_reload);
16790
16791 // Reinitialize the one and only entry.
16792 const intptr_t num_args = NumArgsTested();
16793 Thread* thread = Thread::Current();
16795 Array& data = thread->ArrayHandle();
16796 data = entries();
16797 const Smi& object_cid = Smi::Handle(Smi::New(kObjectCid));
16798 for (intptr_t i = 0; i < num_args; i++) {
16799 data.SetAt(i, object_cid);
16800 }
16801 data.SetAt(TargetIndexFor(num_args), func);
16802 data.SetAt(CountIndexFor(num_args), Object::smi_zero());
16803}
16804
16805bool ICData::ValidateInterceptor(const Function& target) const {
16806#if !defined(DART_PRECOMPILED_RUNTIME)
16807 const String& name = String::Handle(target_name());
16808 if (Function::IsDynamicInvocationForwarderName(name)) {
16809 return Function::DemangleDynamicInvocationForwarderName(name) ==
16810 target.name();
16811 }
16812#endif
16813 ObjectStore* store = IsolateGroup::Current()->object_store();
16814 ASSERT((target.ptr() == store->simple_instance_of_true_function()) ||
16815 (target.ptr() == store->simple_instance_of_false_function()));
16816 const String& instance_of_name = String::Handle(
16817 Library::PrivateCoreLibName(Symbols::_simpleInstanceOf()).ptr());
16818 ASSERT(target_name() == instance_of_name.ptr());
16819 return true;
16820}
16821
16822void ICData::EnsureHasCheck(const GrowableArray<intptr_t>& class_ids,
16823 const Function& target,
16824 intptr_t count) const {
16825 SafepointMutexLocker ml(IsolateGroup::Current()->type_feedback_mutex());
16826
16827 if (FindCheck(class_ids) != -1) return;
16828 AddCheckInternal(class_ids, target, count);
16829}
16830
16831void ICData::AddCheck(const GrowableArray<intptr_t>& class_ids,
16832 const Function& target,
16833 intptr_t count) const {
16834 SafepointMutexLocker ml(IsolateGroup::Current()->type_feedback_mutex());
16835 AddCheckInternal(class_ids, target, count);
16836}
16837
16838void ICData::AddCheckInternal(const GrowableArray<intptr_t>& class_ids,
16839 const Function& target,
16840 intptr_t count) const {
16841 ASSERT(
16842 IsolateGroup::Current()->type_feedback_mutex()->IsOwnedByCurrentThread());
16843
16844 ASSERT(!is_tracking_exactness());
16845 ASSERT(!target.IsNull());
16846 ASSERT((target.name() == target_name()) || ValidateInterceptor(target));
16847 DEBUG_ASSERT(!HasCheck(class_ids));
16848 ASSERT(NumArgsTested() > 1); // Otherwise use 'AddReceiverCheck'.
16849 const intptr_t num_args_tested = NumArgsTested();
16850 ASSERT(class_ids.length() == num_args_tested);
16851 const intptr_t old_num = NumberOfChecks();
16852 Array& data = Array::Handle(entries());
16853
16854 // ICData of static calls with NumArgsTested() > 0 have initially a
16855 // dummy set of cids entered (see ICData::NewForStaticCall). That entry is
16856 // overwritten by first real type feedback data.
16857 if (old_num == 1 && num_args_tested == 2) {
16858 const bool has_dummy_entry =
16859 Smi::Value(Smi::RawCast(data.At(0))) == kObjectCid &&
16860 Smi::Value(Smi::RawCast(data.At(1))) == kObjectCid;
16861 if (has_dummy_entry) {
16862 ASSERT(target.ptr() == data.At(TargetIndexFor(num_args_tested)));
16863 // Replace dummy entry.
16864 Smi& value = Smi::Handle();
16865 for (intptr_t i = 0; i < NumArgsTested(); i++) {
16866 ASSERT(class_ids[i] != kIllegalCid);
16867 value = Smi::New(class_ids[i]);
16868 data.SetAt(i, value);
16869 }
16870 return;
16871 }
16872 }
16873 intptr_t index = -1;
16874 data = Grow(&index);
16875 ASSERT(!data.IsNull());
16876 intptr_t data_pos = index * TestEntryLength();
16877 Smi& value = Smi::Handle();
16878 for (intptr_t i = 0; i < class_ids.length(); i++) {
16879 // kIllegalCid is used as terminating value, do not add it.
16880 ASSERT(class_ids[i] != kIllegalCid);
16881 value = Smi::New(class_ids[i]);
16882 data.SetAt(data_pos + i, value);
16883 }
16884 ASSERT(!target.IsNull());
16885 data.SetAt(data_pos + TargetIndexFor(num_args_tested), target);
16886 value = Smi::New(count);
16887 data.SetAt(data_pos + CountIndexFor(num_args_tested), value);
16888 // Multithreaded access to ICData requires setting of array to be the last
16889 // operation.
16890 set_entries(data);
16891}
16892
16893ArrayPtr ICData::Grow(intptr_t* index) const {
16894 DEBUG_ONLY(AssertInvariantsAreSatisfied());
16895
16896 *index = NumberOfChecks();
16897 Array& data = Array::Handle(entries());
16898 const intptr_t new_len = data.Length() + TestEntryLength();
16899 data = Array::Grow(data, new_len, Heap::kOld);
16900 WriteSentinel(data, TestEntryLength(), *this);
16901 return data.ptr();
16902}
16903
16904void ICData::DebugDump() const {
16905 const Function& owner = Function::Handle(Owner());
16906 THR_Print("ICData::DebugDump\n");
16907 THR_Print("Owner = %s [deopt=%" Pd "]\n", owner.ToCString(), deopt_id());
16908 THR_Print("NumArgsTested = %" Pd "\n", NumArgsTested());
16909 THR_Print("Length = %" Pd "\n", Length());
16910 THR_Print("NumberOfChecks = %" Pd "\n", NumberOfChecks());
16911
16912 GrowableArray<intptr_t> class_ids;
16913 for (intptr_t i = 0; i < NumberOfChecks(); i++) {
16914 THR_Print("Check[%" Pd "]:", i);
16915 GetClassIdsAt(i, &class_ids);
16916 for (intptr_t c = 0; c < class_ids.length(); c++) {
16917 THR_Print(" %" Pd "", class_ids[c]);
16918 }
16919 THR_Print("--- %" Pd " hits\n", GetCountAt(i));
16920 }
16921}
16922
16923void ICData::EnsureHasReceiverCheck(intptr_t receiver_class_id,
16924 const Function& target,
16925 intptr_t count,
16926 StaticTypeExactnessState exactness) const {
16927 SafepointMutexLocker ml(IsolateGroup::Current()->type_feedback_mutex());
16928
16929 GrowableArray<intptr_t> class_ids(1);
16930 class_ids.Add(receiver_class_id);
16931 if (FindCheck(class_ids) != -1) return;
16932
16933 AddReceiverCheckInternal(receiver_class_id, target, count, exactness);
16934}
16935
16936void ICData::AddReceiverCheck(intptr_t receiver_class_id,
16937 const Function& target,
16938 intptr_t count,
16939 StaticTypeExactnessState exactness) const {
16940 SafepointMutexLocker ml(IsolateGroup::Current()->type_feedback_mutex());
16941 AddReceiverCheckInternal(receiver_class_id, target, count, exactness);
16942}
16943
16944void ICData::AddReceiverCheckInternal(
16945 intptr_t receiver_class_id,
16946 const Function& target,
16947 intptr_t count,
16948 StaticTypeExactnessState exactness) const {
16949#if defined(DEBUG)
16950 GrowableArray<intptr_t> class_ids(1);
16951 class_ids.Add(receiver_class_id);
16952 ASSERT(!HasCheck(class_ids));
16953#endif // DEBUG
16954 ASSERT(!target.IsNull());
16955 const intptr_t kNumArgsTested = 1;
16956 ASSERT(NumArgsTested() == kNumArgsTested); // Otherwise use 'AddCheck'.
16957 ASSERT(receiver_class_id != kIllegalCid);
16958
16959 intptr_t index = -1;
16960 Array& data = Array::Handle(Grow(&index));
16961 intptr_t data_pos = index * TestEntryLength();
16962 if ((receiver_class_id == kSmiCid) && (data_pos > 0)) {
16963 ASSERT(GetReceiverClassIdAt(0) != kSmiCid);
16964 // Move class occupying position 0 to the data_pos.
16965 for (intptr_t i = 0; i < TestEntryLength(); i++) {
16966 data.SetAt(data_pos + i, Object::Handle(data.At(i)));
16967 }
16968 // Insert kSmiCid in position 0.
16969 data_pos = 0;
16970 }
16971 data.SetAt(data_pos, Smi::Handle(Smi::New(receiver_class_id)));
16972 SetTargetAtPos(data, data_pos, kNumArgsTested, target);
16973
16974#if !defined(DART_PRECOMPILED_RUNTIME)
16975 data.SetAt(data_pos + CountIndexFor(kNumArgsTested),
16976 Smi::Handle(Smi::New(count)));
16977 if (is_tracking_exactness()) {
16978 data.SetAt(data_pos + ExactnessIndexFor(kNumArgsTested),
16979 Smi::Handle(Smi::New(exactness.Encode())));
16980 }
16981#endif
16982
16983 // Multithreaded access to ICData requires setting of array to be the last
16984 // operation.
16985 set_entries(data);
16986}
16987
16988StaticTypeExactnessState ICData::GetExactnessAt(intptr_t index) const {
16989 if (!is_tracking_exactness()) {
16990 return StaticTypeExactnessState::NotTracking();
16991 }
16992 Thread* thread = Thread::Current();
16994 Array& data = thread->ArrayHandle();
16995 data = entries();
16996 intptr_t data_pos =
16997 index * TestEntryLength() + ExactnessIndexFor(NumArgsTested());
16998 return StaticTypeExactnessState::Decode(
16999 Smi::Value(Smi::RawCast(data.At(data_pos))));
17000}
17001
17002void ICData::GetCheckAt(intptr_t index,
17003 GrowableArray<intptr_t>* class_ids,
17004 Function* target) const {
17005 ASSERT(index < NumberOfChecks());
17006 ASSERT(class_ids != nullptr);
17007 ASSERT(target != nullptr);
17008 class_ids->Clear();
17009 Thread* thread = Thread::Current();
17011 Array& data = thread->ArrayHandle();
17012 data = entries();
17013 intptr_t data_pos = index * TestEntryLength();
17014 for (intptr_t i = 0; i < NumArgsTested(); i++) {
17015 class_ids->Add(Smi::Value(Smi::RawCast(data.At(data_pos + i))));
17016 }
17017 (*target) ^= data.At(data_pos + TargetIndexFor(NumArgsTested()));
17018}
17019
17020void ICData::GetClassIdsAt(intptr_t index,
17021 GrowableArray<intptr_t>* class_ids) const {
17022 ASSERT(index < Length());
17023 ASSERT(class_ids != nullptr);
17024 ASSERT(IsValidEntryIndex(index));
17025 class_ids->Clear();
17026 Thread* thread = Thread::Current();
17028 Array& data = thread->ArrayHandle();
17029 data = entries();
17030 intptr_t data_pos = index * TestEntryLength();
17031 for (intptr_t i = 0; i < NumArgsTested(); i++) {
17032 class_ids->Add(Smi::Value(Smi::RawCast(data.At(data_pos++))));
17033 }
17034}
17035
17036void ICData::GetOneClassCheckAt(intptr_t index,
17037 intptr_t* class_id,
17038 Function* target) const {
17039 ASSERT(class_id != nullptr);
17040 ASSERT(target != nullptr);
17041 ASSERT(NumArgsTested() == 1);
17042 Thread* thread = Thread::Current();
17044 Array& data = thread->ArrayHandle();
17045 data = entries();
17046 const intptr_t data_pos = index * TestEntryLength();
17047 *class_id = Smi::Value(Smi::RawCast(data.At(data_pos)));
17048 *target ^= data.At(data_pos + TargetIndexFor(NumArgsTested()));
17049}
17050
17051intptr_t ICData::GetCidAt(intptr_t index) const {
17052 ASSERT(NumArgsTested() == 1);
17053 Thread* thread = Thread::Current();
17055 Array& data = thread->ArrayHandle();
17056 data = entries();
17057 const intptr_t data_pos = index * TestEntryLength();
17058 return Smi::Value(Smi::RawCast(data.At(data_pos)));
17059}
17060
17061intptr_t ICData::GetClassIdAt(intptr_t index, intptr_t arg_nr) const {
17062 GrowableArray<intptr_t> class_ids;
17063 GetClassIdsAt(index, &class_ids);
17064 return class_ids[arg_nr];
17065}
17066
17067intptr_t ICData::GetReceiverClassIdAt(intptr_t index) const {
17068 ASSERT(index < Length());
17069 ASSERT(IsValidEntryIndex(index));
17070 const intptr_t data_pos = index * TestEntryLength();
17071 NoSafepointScope no_safepoint;
17072 ArrayPtr raw_data = entries();
17073 return Smi::Value(Smi::RawCast(raw_data->untag()->element(data_pos)));
17074}
17075
17076FunctionPtr ICData::GetTargetAt(intptr_t index) const {
17077#if defined(DART_PRECOMPILED_RUNTIME)
17078 UNREACHABLE();
17079 return nullptr;
17080#else
17081 const intptr_t data_pos =
17082 index * TestEntryLength() + TargetIndexFor(NumArgsTested());
17083 ASSERT(Object::Handle(Array::Handle(entries()).At(data_pos)).IsFunction());
17084
17085 NoSafepointScope no_safepoint;
17086 ArrayPtr raw_data = entries();
17087 return static_cast<FunctionPtr>(raw_data->untag()->element(data_pos));
17088#endif
17089}
17090
17091void ICData::IncrementCountAt(intptr_t index, intptr_t value) const {
17092 ASSERT(0 <= value);
17093 ASSERT(value <= Smi::kMaxValue);
17094 SetCountAt(index, Utils::Minimum(GetCountAt(index) + value, Smi::kMaxValue));
17095}
17096
17097void ICData::SetCountAt(intptr_t index, intptr_t value) const {
17098 ASSERT(0 <= value);
17099 ASSERT(value <= Smi::kMaxValue);
17100
17101 Thread* thread = Thread::Current();
17103 Array& data = thread->ArrayHandle();
17104 data = entries();
17105 const intptr_t data_pos =
17106 index * TestEntryLength() + CountIndexFor(NumArgsTested());
17107 data.SetAt(data_pos, Smi::Handle(Smi::New(value)));
17108}
17109
17110intptr_t ICData::GetCountAt(intptr_t index) const {
17111#if defined(DART_PRECOMPILED_RUNTIME)
17112 UNREACHABLE();
17113 return 0;
17114#else
17115 Thread* thread = Thread::Current();
17117 Array& data = thread->ArrayHandle();
17118 data = entries();
17119 const intptr_t data_pos =
17120 index * TestEntryLength() + CountIndexFor(NumArgsTested());
17121 intptr_t value = Smi::Value(Smi::RawCast(data.At(data_pos)));
17122 if (value >= 0) return value;
17123
17124 // The counter very rarely overflows to a negative value, but if it does, we
17125 // would rather just reset it to zero.
17126 SetCountAt(index, 0);
17127 return 0;
17128#endif
17129}
17130
17131intptr_t ICData::AggregateCount() const {
17132 if (IsNull()) return 0;
17133 const intptr_t len = NumberOfChecks();
17134 intptr_t count = 0;
17135 for (intptr_t i = 0; i < len; i++) {
17136 count += GetCountAt(i);
17137 }
17138 return count;
17139}
17140
17141#if !defined(DART_PRECOMPILED_RUNTIME)
17142ICDataPtr ICData::AsUnaryClassChecksForArgNr(intptr_t arg_nr) const {
17143 ASSERT(!IsNull());
17144 ASSERT(NumArgsTested() > arg_nr);
17145 if ((arg_nr == 0) && (NumArgsTested() == 1)) {
17146 // Frequent case.
17147 return ptr();
17148 }
17149 const intptr_t kNumArgsTested = 1;
17150 ICData& result = ICData::Handle(ICData::NewFrom(*this, kNumArgsTested));
17151 const intptr_t len = NumberOfChecks();
17152 for (intptr_t i = 0; i < len; i++) {
17153 const intptr_t class_id = GetClassIdAt(i, arg_nr);
17154 const intptr_t count = GetCountAt(i);
17155 if (count == 0) {
17156 continue;
17157 }
17158 intptr_t duplicate_class_id = -1;
17159 const intptr_t result_len = result.NumberOfChecks();
17160 for (intptr_t k = 0; k < result_len; k++) {
17161 if (class_id == result.GetReceiverClassIdAt(k)) {
17162 duplicate_class_id = k;
17163 break;
17164 }
17165 }
17166 if (duplicate_class_id >= 0) {
17167 // This check is valid only when checking the receiver.
17168 ASSERT((arg_nr != 0) ||
17169 (result.GetTargetAt(duplicate_class_id) == GetTargetAt(i)));
17170 result.IncrementCountAt(duplicate_class_id, count);
17171 } else {
17172 // This will make sure that Smi is first if it exists.
17173 result.AddReceiverCheckInternal(class_id,
17174 Function::Handle(GetTargetAt(i)), count,
17175 StaticTypeExactnessState::NotTracking());
17176 }
17177 }
17178
17179 return result.ptr();
17180}
17181
17182// (cid, count) tuple used to sort ICData by count.
17183struct CidCount {
17184 CidCount(intptr_t cid_, intptr_t count_, Function* f_)
17185 : cid(cid_), count(count_), function(f_) {}
17186
17187 static int HighestCountFirst(const CidCount* a, const CidCount* b);
17188
17189 intptr_t cid;
17190 intptr_t count;
17192};
17193
17194int CidCount::HighestCountFirst(const CidCount* a, const CidCount* b) {
17195 if (a->count > b->count) {
17196 return -1;
17197 }
17198 return (a->count < b->count) ? 1 : 0;
17199}
17200
17201ICDataPtr ICData::AsUnaryClassChecksSortedByCount() const {
17202 ASSERT(!IsNull());
17203 const intptr_t kNumArgsTested = 1;
17204 const intptr_t len = NumberOfChecks();
17205 if (len <= 1) {
17206 // No sorting needed.
17207 return AsUnaryClassChecks();
17208 }
17209 GrowableArray<CidCount> aggregate;
17210 for (intptr_t i = 0; i < len; i++) {
17211 const intptr_t class_id = GetClassIdAt(i, 0);
17212 const intptr_t count = GetCountAt(i);
17213 if (count == 0) {
17214 continue;
17215 }
17216 bool found = false;
17217 for (intptr_t r = 0; r < aggregate.length(); r++) {
17218 if (aggregate[r].cid == class_id) {
17219 aggregate[r].count += count;
17220 found = true;
17221 break;
17222 }
17223 }
17224 if (!found) {
17225 aggregate.Add(
17226 CidCount(class_id, count, &Function::ZoneHandle(GetTargetAt(i))));
17227 }
17228 }
17229 aggregate.Sort(CidCount::HighestCountFirst);
17230
17231 ICData& result = ICData::Handle(ICData::NewFrom(*this, kNumArgsTested));
17232 ASSERT(result.NumberOfChecksIs(0));
17233 // Room for all entries and the sentinel.
17234 const intptr_t data_len = result.TestEntryLength() * (aggregate.length() + 1);
17235 // Allocate the array but do not assign it to result until we have populated
17236 // it with the aggregate data and the terminating sentinel.
17237 const Array& data = Array::Handle(Array::New(data_len, Heap::kOld));
17238 intptr_t pos = 0;
17239 for (intptr_t i = 0; i < aggregate.length(); i++) {
17240 data.SetAt(pos + 0, Smi::Handle(Smi::New(aggregate[i].cid)));
17241 data.SetAt(pos + TargetIndexFor(1), *aggregate[i].function);
17242 data.SetAt(pos + CountIndexFor(1),
17243 Smi::Handle(Smi::New(aggregate[i].count)));
17244
17245 pos += result.TestEntryLength();
17246 }
17247 WriteSentinel(data, result.TestEntryLength(), result);
17248 result.set_entries(data);
17249 ASSERT(result.NumberOfChecksIs(aggregate.length()));
17250 return result.ptr();
17251}
17252
17253UnlinkedCallPtr ICData::AsUnlinkedCall() const {
17254 ASSERT(NumArgsTested() == 1);
17255 ASSERT(!is_tracking_exactness());
17256 const UnlinkedCall& result = UnlinkedCall::Handle(UnlinkedCall::New());
17257 result.set_target_name(String::Handle(target_name()));
17258 result.set_arguments_descriptor(Array::Handle(arguments_descriptor()));
17259 result.set_can_patch_to_monomorphic(!FLAG_precompiled_mode ||
17260 receiver_cannot_be_smi());
17261 return result.ptr();
17262}
17263
17264bool ICData::HasReceiverClassId(intptr_t class_id) const {
17265 ASSERT(NumArgsTested() > 0);
17266 const intptr_t len = NumberOfChecks();
17267 for (intptr_t i = 0; i < len; i++) {
17268 if (IsUsedAt(i)) {
17269 const intptr_t test_class_id = GetReceiverClassIdAt(i);
17270 if (test_class_id == class_id) {
17271 return true;
17272 }
17273 }
17274 }
17275 return false;
17276}
17277#endif
17278
17279bool ICData::IsUsedAt(intptr_t i) const {
17280 if (GetCountAt(i) <= 0) {
17281 // Do not mistake unoptimized static call ICData for unused.
17282 // See ICData::AddTarget.
17283 // TODO(srdjan): Make this test more robust.
17284 if (NumArgsTested() > 0) {
17285 const intptr_t cid = GetReceiverClassIdAt(i);
17286 if (cid == kObjectCid) {
17287 return true;
17288 }
17289 }
17290 return false;
17291 }
17292 return true;
17293}
17294
17295void ICData::Init() {
17296 for (int i = 0; i <= kCachedICDataMaxArgsTestedWithoutExactnessTracking;
17297 i++) {
17298 cached_icdata_arrays_
17299 [kCachedICDataZeroArgTestedWithoutExactnessTrackingIdx + i] =
17300 ICData::NewNonCachedEmptyICDataArray(i, false);
17301 }
17302 cached_icdata_arrays_[kCachedICDataOneArgWithExactnessTrackingIdx] =
17303 ICData::NewNonCachedEmptyICDataArray(1, true);
17304}
17305
17306void ICData::Cleanup() {
17307 for (int i = 0; i < kCachedICDataArrayCount; ++i) {
17308 cached_icdata_arrays_[i] = nullptr;
17309 }
17310}
17311
17312ArrayPtr ICData::NewNonCachedEmptyICDataArray(intptr_t num_args_tested,
17313 bool tracking_exactness) {
17314 // IC data array must be null terminated (sentinel entry).
17315 const intptr_t len = TestEntryLengthFor(num_args_tested, tracking_exactness);
17316 const Array& array = Array::Handle(Array::New(len, Heap::kOld));
17317 // Only empty [ICData]s are allowed to have a non-ICData backref.
17318 WriteSentinel(array, len, /*back_ref=*/smi_illegal_cid());
17319 array.MakeImmutable();
17320 return array.ptr();
17321}
17322
17323ArrayPtr ICData::CachedEmptyICDataArray(intptr_t num_args_tested,
17324 bool tracking_exactness) {
17325 if (tracking_exactness) {
17326 ASSERT(num_args_tested == 1);
17327 return cached_icdata_arrays_[kCachedICDataOneArgWithExactnessTrackingIdx];
17328 } else {
17329 ASSERT(num_args_tested >= 0);
17330 ASSERT(num_args_tested <=
17331 kCachedICDataMaxArgsTestedWithoutExactnessTracking);
17332 return cached_icdata_arrays_
17333 [kCachedICDataZeroArgTestedWithoutExactnessTrackingIdx +
17334 num_args_tested];
17335 }
17336}
17337
17338bool ICData::IsCachedEmptyEntry(const Array& array) {
17339 for (int i = 0; i < kCachedICDataArrayCount; ++i) {
17340 if (cached_icdata_arrays_[i] == array.ptr()) return true;
17341 }
17342 return false;
17343}
17344
17345// Does not initialize ICData array.
17346ICDataPtr ICData::NewDescriptor(Zone* zone,
17347 const Function& owner,
17348 const String& target_name,
17349 const Array& arguments_descriptor,
17350 intptr_t deopt_id,
17351 intptr_t num_args_tested,
17352 RebindRule rebind_rule,
17353 const AbstractType& receivers_static_type) {
17354#if !defined(DART_PRECOMPILED_RUNTIME)
17355 // We should only have null owners in the precompiled runtime, if the
17356 // owning function for a Code object was optimized out.
17357 ASSERT(!owner.IsNull());
17358#endif
17359 ASSERT(!target_name.IsNull());
17360 ASSERT(!arguments_descriptor.IsNull());
17361 ASSERT(Object::icdata_class() != Class::null());
17362 ASSERT(num_args_tested >= 0);
17363 // IC data objects are long living objects, allocate them in old generation.
17364 const auto& result =
17365 ICData::Handle(zone, Object::Allocate<ICData>(Heap::kOld));
17366 result.set_owner(owner);
17367 result.set_target_name(target_name);
17368 result.set_arguments_descriptor(arguments_descriptor);
17369 NOT_IN_PRECOMPILED(result.set_deopt_id(deopt_id));
17370 ASSERT_EQUAL(result.untag()->state_bits_, 0);
17371 result.set_rebind_rule(rebind_rule);
17372 result.SetNumArgsTested(num_args_tested);
17373 NOT_IN_PRECOMPILED(result.SetReceiversStaticType(receivers_static_type));
17374 return result.ptr();
17375}
17376
17377bool ICData::IsImmutable() const {
17378 return entries()->IsImmutableArray();
17379}
17380
17381ICDataPtr ICData::New() {
17382 // IC data objects are long living objects, allocate them in old generation.
17383 const auto& result = ICData::Handle(Object::Allocate<ICData>(Heap::kOld));
17384 ASSERT_EQUAL(result.untag()->state_bits_, 0);
17385 result.set_deopt_id(DeoptId::kNone);
17386 return result.ptr();
17387}
17388
17389ICDataPtr ICData::New(const Function& owner,
17390 const String& target_name,
17391 const Array& arguments_descriptor,
17392 intptr_t deopt_id,
17393 intptr_t num_args_tested,
17394 RebindRule rebind_rule,
17395 const AbstractType& receivers_static_type) {
17396 Zone* zone = Thread::Current()->zone();
17397 const ICData& result = ICData::Handle(
17398 zone,
17399 NewDescriptor(zone, owner, target_name, arguments_descriptor, deopt_id,
17400 num_args_tested, rebind_rule, receivers_static_type));
17401 result.set_entries(Array::Handle(
17402 zone,
17403 CachedEmptyICDataArray(num_args_tested, result.is_tracking_exactness())));
17404 return result.ptr();
17405}
17406
17407ICDataPtr ICData::NewWithCheck(const Function& owner,
17408 const String& target_name,
17409 const Array& arguments_descriptor,
17410 intptr_t deopt_id,
17411 intptr_t num_args_tested,
17412 RebindRule rebind_rule,
17414 const Function& target,
17415 const AbstractType& receiver_type) {
17416 ASSERT((cids != nullptr) && !target.IsNull());
17417 ASSERT(cids->length() == num_args_tested);
17418
17419 Zone* zone = Thread::Current()->zone();
17420 const auto& result = ICData::Handle(
17421 zone,
17422 NewDescriptor(zone, owner, target_name, arguments_descriptor, deopt_id,
17423 num_args_tested, rebind_rule, receiver_type));
17424
17425 const intptr_t kNumEntries = 2; // 1 entry and a sentinel.
17426 const intptr_t entry_len =
17427 TestEntryLengthFor(num_args_tested, result.is_tracking_exactness());
17428 const auto& array =
17429 Array::Handle(zone, Array::New(kNumEntries * entry_len, Heap::kOld));
17430
17431 auto& cid = Smi::Handle(zone);
17432 for (intptr_t i = 0; i < num_args_tested; ++i) {
17433 cid = Smi::New((*cids)[i]);
17434 array.SetAt(i, cid);
17435 }
17436
17437 SetTargetAtPos(array, 0, num_args_tested, target);
17438#if !defined(DART_PRECOMPILED_RUNTIME)
17439 array.SetAt(CountIndexFor(num_args_tested), Object::smi_zero());
17440#endif
17441 WriteSentinel(array, entry_len, result);
17442
17443 result.set_entries(array);
17444
17445 return result.ptr();
17446}
17447
17448ICDataPtr ICData::NewForStaticCall(const Function& owner,
17449 const Function& target,
17450 const Array& arguments_descriptor,
17451 intptr_t deopt_id,
17452 intptr_t num_args_tested,
17453 RebindRule rebind_rule) {
17454 // See `MethodRecognizer::NumArgsCheckedForStaticCall`.
17455 ASSERT(num_args_tested == 0 || num_args_tested == 2);
17456 ASSERT(!target.IsNull());
17457
17458 Zone* zone = Thread::Current()->zone();
17459 const auto& target_name = String::Handle(zone, target.name());
17460 GrowableArray<intptr_t> cids(num_args_tested);
17461 if (num_args_tested == 2) {
17462 cids.Add(kObjectCid);
17463 cids.Add(kObjectCid);
17464 }
17465 return ICData::NewWithCheck(owner, target_name, arguments_descriptor,
17466 deopt_id, num_args_tested, rebind_rule, &cids,
17467 target, Object::null_abstract_type());
17468}
17469
17470#if !defined(DART_PRECOMPILED_RUNTIME)
17471ICDataPtr ICData::NewFrom(const ICData& from, intptr_t num_args_tested) {
17472 // See comment in [ICData::Clone] why we access the megamorphic bit first.
17473 const bool is_megamorphic = from.is_megamorphic();
17474
17475 const ICData& result = ICData::Handle(ICData::New(
17476 Function::Handle(from.Owner()), String::Handle(from.target_name()),
17477 Array::Handle(from.arguments_descriptor()), from.deopt_id(),
17478 num_args_tested, from.rebind_rule(),
17479 AbstractType::Handle(from.receivers_static_type())));
17480 // Copy deoptimization reasons.
17481 result.SetDeoptReasons(from.DeoptReasons());
17482 result.set_is_megamorphic(is_megamorphic);
17483 return result.ptr();
17484}
17485
17486ICDataPtr ICData::Clone(const ICData& from) {
17487 Zone* zone = Thread::Current()->zone();
17488
17489 // We have to check the megamorphic bit before accessing the entries of the
17490 // ICData to ensure all writes to the entries have been flushed and are
17491 // visible at this point.
17492 //
17493 // This will allow us to maintain the invariant that if the megamorphic bit is
17494 // set, the number of entries in the ICData have reached the limit.
17495 const bool is_megamorphic = from.is_megamorphic();
17496
17497 const ICData& result = ICData::Handle(
17498 zone, ICData::NewDescriptor(
17499 zone, Function::Handle(zone, from.Owner()),
17500 String::Handle(zone, from.target_name()),
17501 Array::Handle(zone, from.arguments_descriptor()),
17502 from.deopt_id(), from.NumArgsTested(), from.rebind_rule(),
17503 AbstractType::Handle(zone, from.receivers_static_type())));
17504 // Clone entry array.
17505 const Array& from_array = Array::Handle(zone, from.entries());
17506 if (ICData::IsCachedEmptyEntry(from_array)) {
17507 result.set_entries(from_array);
17508 } else {
17509 const intptr_t len = from_array.Length();
17510 const Array& cloned_array =
17511 Array::Handle(zone, Array::New(len, Heap::kOld));
17512 Object& obj = Object::Handle(zone);
17513 for (intptr_t i = 0; i < len; i++) {
17514 obj = from_array.At(i);
17515 cloned_array.SetAt(i, obj);
17516 }
17517 // Update backref in our clone.
17518 cloned_array.SetAt(cloned_array.Length() - 1, result);
17519 result.set_entries(cloned_array);
17520 }
17521 // Copy deoptimization reasons.
17522 result.SetDeoptReasons(from.DeoptReasons());
17523 result.set_is_megamorphic(is_megamorphic);
17524
17525 RELEASE_ASSERT(!is_megamorphic ||
17526 result.NumberOfChecks() >= FLAG_max_polymorphic_checks);
17527
17528 DEBUG_ONLY(result.AssertInvariantsAreSatisfied());
17529
17530 return result.ptr();
17531}
17532#endif
17533
17534ICDataPtr ICData::ICDataOfEntriesArray(const Array& array) {
17535 const auto& back_ref = Object::Handle(array.At(array.Length() - 1));
17536 if (back_ref.ptr() == smi_illegal_cid().ptr()) {
17537 ASSERT(IsCachedEmptyEntry(array));
17538 return ICData::null();
17539 }
17540
17541 const auto& ic_data = ICData::Cast(back_ref);
17542 DEBUG_ONLY(ic_data.AssertInvariantsAreSatisfied());
17543 return ic_data.ptr();
17544}
17545
17546const char* WeakSerializationReference::ToCString() const {
17547 return Object::Handle(target()).ToCString();
17548}
17549
17550ObjectPtr WeakSerializationReference::New(const Object& target,
17551 const Object& replacement) {
17552 ASSERT(Object::weak_serialization_reference_class() != Class::null());
17553 // Don't wrap any object in the VM heap, as all objects in the VM isolate
17554 // heap are currently serialized.
17555 //
17556 // Note that we _do_ wrap Smis if requested. Smis are serialized in the Mint
17557 // cluster, and so dropping them if not strongly referenced saves space in
17558 // the snapshot.
17559 if (target.ptr()->IsHeapObject() && target.InVMIsolateHeap()) {
17560 return target.ptr();
17561 }
17562 // If the target is a WSR that already uses the replacement, then return it.
17563 if (target.IsWeakSerializationReference() &&
17564 WeakSerializationReference::Cast(target).replacement() ==
17565 replacement.ptr()) {
17566 return target.ptr();
17567 }
17568 const auto& result = WeakSerializationReference::Handle(
17569 Object::Allocate<WeakSerializationReference>(Heap::kOld));
17570 // Don't nest WSRs, instead just use the old WSR's target.
17571 result.untag()->set_target(target.IsWeakSerializationReference()
17572 ? WeakSerializationReference::Unwrap(target)
17573 : target.ptr());
17574 result.untag()->set_replacement(replacement.ptr());
17575 return result.ptr();
17576}
17577
17578const char* WeakArray::ToCString() const {
17579 return Thread::Current()->zone()->PrintToString("WeakArray len:%" Pd,
17580 Length());
17581}
17582
17583WeakArrayPtr WeakArray::New(intptr_t length, Heap::Space space) {
17584 ASSERT(Object::weak_array_class() != Class::null());
17585 if (!IsValidLength(length)) {
17586 // This should be caught before we reach here.
17587 FATAL("Fatal error in WeakArray::New: invalid len %" Pd "\n", length);
17588 }
17589 auto raw = Object::Allocate<WeakArray>(space, length);
17590 NoSafepointScope no_safepoint;
17591 raw->untag()->set_length(Smi::New(length));
17592 return raw;
17593}
17594
17595#if defined(INCLUDE_IL_PRINTER)
17596Code::Comments& Code::Comments::New(intptr_t count) {
17597 Comments* comments;
17598 if (count < 0 || count > (kIntptrMax / kNumberOfEntries)) {
17599 // This should be caught before we reach here.
17600 FATAL("Fatal error in Code::Comments::New: invalid count %" Pd "\n", count);
17601 }
17602 if (count == 0) {
17603 comments = new Comments(Object::empty_array());
17604 } else {
17605 const Array& data =
17606 Array::Handle(Array::New(count * kNumberOfEntries, Heap::kOld));
17607 comments = new Comments(data);
17608 }
17609 return *comments;
17610}
17611
17612intptr_t Code::Comments::Length() const {
17613 if (comments_.IsNull()) {
17614 return 0;
17615 }
17616 return comments_.Length() / kNumberOfEntries;
17617}
17618
17619intptr_t Code::Comments::PCOffsetAt(intptr_t idx) const {
17620 return Smi::Value(
17621 Smi::RawCast(comments_.At(idx * kNumberOfEntries + kPCOffsetEntry)));
17622}
17623
17624void Code::Comments::SetPCOffsetAt(intptr_t idx, intptr_t pc) {
17625 comments_.SetAt(idx * kNumberOfEntries + kPCOffsetEntry,
17626 Smi::Handle(Smi::New(pc)));
17627}
17628
17629const char* Code::Comments::CommentAt(intptr_t idx) const {
17630 string_ ^= comments_.At(idx * kNumberOfEntries + kCommentEntry);
17631 return string_.ToCString();
17632}
17633
17634void Code::Comments::SetCommentAt(intptr_t idx, const String& comment) {
17635 comments_.SetAt(idx * kNumberOfEntries + kCommentEntry, comment);
17636}
17637
17638Code::Comments::Comments(const Array& comments)
17639 : comments_(comments), string_(String::Handle()) {}
17640#endif // defined(INCLUDE_IL_PRINTER)
17641
17643 switch (kind) {
17644 case EntryKind::kNormal:
17645 return "Normal";
17646 case EntryKind::kUnchecked:
17647 return "Unchecked";
17648 case EntryKind::kMonomorphic:
17649 return "Monomorphic";
17650 case EntryKind::kMonomorphicUnchecked:
17651 return "MonomorphicUnchecked";
17652 default:
17653 UNREACHABLE();
17654 return nullptr;
17655 }
17656}
17657
17658bool Code::ParseEntryKind(const char* str, EntryKind* out) {
17659 if (strcmp(str, "Normal") == 0) {
17660 *out = EntryKind::kNormal;
17661 return true;
17662 } else if (strcmp(str, "Unchecked") == 0) {
17663 *out = EntryKind::kUnchecked;
17664 return true;
17665 } else if (strcmp(str, "Monomorphic") == 0) {
17666 *out = EntryKind::kMonomorphic;
17667 return true;
17668 } else if (strcmp(str, "MonomorphicUnchecked") == 0) {
17669 *out = EntryKind::kMonomorphicUnchecked;
17670 return true;
17671 }
17672 return false;
17673}
17674
17675LocalVarDescriptorsPtr Code::GetLocalVarDescriptors() const {
17677 if (v.IsNull()) {
17678 ASSERT(!is_optimized());
17679 const Function& f = Function::Handle(function());
17680 ASSERT(!f.IsIrregexpFunction()); // Not yet implemented.
17682 }
17683 return var_descriptors();
17684}
17685
17686void Code::set_owner(const Object& owner) const {
17687#if defined(DEBUG)
17688 const auto& unwrapped_owner =
17690 ASSERT(unwrapped_owner.IsFunction() || unwrapped_owner.IsClass() ||
17691 unwrapped_owner.IsAbstractType());
17692#endif
17693 untag()->set_owner(owner.ptr());
17694}
17695
17696void Code::set_state_bits(intptr_t bits) const {
17697 StoreNonPointer(&untag()->state_bits_, bits);
17698}
17699
17700void Code::set_is_optimized(bool value) const {
17701 set_state_bits(OptimizedBit::update(value, untag()->state_bits_));
17702}
17703
17704void Code::set_is_force_optimized(bool value) const {
17705 set_state_bits(ForceOptimizedBit::update(value, untag()->state_bits_));
17706}
17707
17708void Code::set_is_alive(bool value) const {
17709 set_state_bits(AliveBit::update(value, untag()->state_bits_));
17710}
17711
17712void Code::set_is_discarded(bool value) const {
17713 set_state_bits(DiscardedBit::update(value, untag()->state_bits_));
17714}
17715
17717 ASSERT(maps.IsOld());
17718 untag()->set_compressed_stackmaps(maps.ptr());
17719}
17720
17721#if !defined(DART_PRECOMPILED_RUNTIME)
17722intptr_t Code::num_variables() const {
17723 ASSERT(!FLAG_precompiled_mode);
17724 return Smi::Value(Smi::RawCast(untag()->catch_entry()));
17725}
17726void Code::set_num_variables(intptr_t num_variables) const {
17727 ASSERT(!FLAG_precompiled_mode);
17728 untag()->set_catch_entry(Smi::New(num_variables));
17729}
17730#endif
17731
17732#if defined(DART_PRECOMPILED_RUNTIME) || defined(DART_PRECOMPILER)
17733TypedDataPtr Code::catch_entry_moves_maps() const {
17734 ASSERT(FLAG_precompiled_mode);
17735 return TypedData::RawCast(untag()->catch_entry());
17736}
17737void Code::set_catch_entry_moves_maps(const TypedData& maps) const {
17738 ASSERT(FLAG_precompiled_mode);
17739 untag()->set_catch_entry(maps.ptr());
17740}
17741#endif
17742
17743void Code::set_deopt_info_array(const Array& array) const {
17744#if defined(DART_PRECOMPILED_RUNTIME)
17745 UNREACHABLE();
17746#else
17747 ASSERT(array.IsOld());
17748 untag()->set_deopt_info_array(array.ptr());
17749#endif
17750}
17751
17753#if defined(DART_PRECOMPILED_RUNTIME)
17754 UNREACHABLE();
17755#else
17756 untag()->set_static_calls_target_table(value.ptr());
17757#endif
17758#if defined(DEBUG)
17759 // Check that the table is sorted by pc offsets.
17760 // FlowGraphCompiler::AddStaticCallTarget adds pc-offsets to the table while
17761 // emitting assembly. This guarantees that every succeeding pc-offset is
17762 // larger than the previously added one.
17763 StaticCallsTable entries(value);
17764 const intptr_t count = entries.Length();
17765 for (intptr_t i = 0; i < count - 1; ++i) {
17766 auto left = Smi::Value(entries[i].Get<kSCallTableKindAndOffset>());
17767 auto right = Smi::Value(entries[i + 1].Get<kSCallTableKindAndOffset>());
17769 }
17770#endif // DEBUG
17771}
17772
17773ObjectPoolPtr Code::GetObjectPool() const {
17774#if defined(DART_PRECOMPILER) || defined(DART_PRECOMPILED_RUNTIME)
17775 if (FLAG_precompiled_mode) {
17776 return IsolateGroup::Current()->object_store()->global_object_pool();
17777 }
17778#endif
17779 return object_pool();
17780}
17781
17783#if defined(PRODUCT)
17784 return false;
17785#else
17787#endif
17788}
17789
17791 ICData::DeoptReasonId* deopt_reason,
17792 uint32_t* deopt_flags) const {
17793#if defined(DART_PRECOMPILED_RUNTIME)
17795 return TypedData::null();
17796#else
17799 uword code_entry = instrs.PayloadStart();
17801 if (table.IsNull()) {
17803 return TypedData::null();
17804 }
17805 // Linear search for the PC offset matching the target PC.
17807 Smi& offset = Smi::Handle();
17808 Smi& reason_and_flags = Smi::Handle();
17810 for (intptr_t i = 0; i < length; ++i) {
17811 DeoptTable::GetEntry(table, i, &offset, &info, &reason_and_flags);
17812 if (pc == (code_entry + offset.Value())) {
17813 ASSERT(!info.IsNull());
17814 *deopt_reason = DeoptTable::ReasonField::decode(reason_and_flags.Value());
17815 *deopt_flags = DeoptTable::FlagsField::decode(reason_and_flags.Value());
17816 return info.ptr();
17817 }
17818 }
17819 *deopt_reason = ICData::kDeoptUnknown;
17820 return TypedData::null();
17821#endif // defined(DART_PRECOMPILED_RUNTIME)
17822}
17823
17824intptr_t Code::BinarySearchInSCallTable(uword pc) const {
17825#if defined(DART_PRECOMPILED_RUNTIME)
17826 UNREACHABLE();
17827#else
17828 NoSafepointScope no_safepoint;
17830 StaticCallsTable entries(table);
17831 const intptr_t pc_offset = pc - PayloadStart();
17832 intptr_t imin = 0;
17833 intptr_t imax = (table.Length() / kSCallTableEntryLength) - 1;
17834 while (imax >= imin) {
17835 const intptr_t imid = imin + (imax - imin) / 2;
17836 const auto offset = OffsetField::decode(
17837 Smi::Value(entries[imid].Get<kSCallTableKindAndOffset>()));
17838 if (offset < pc_offset) {
17839 imin = imid + 1;
17840 } else if (offset > pc_offset) {
17841 imax = imid - 1;
17842 } else {
17843 return imid;
17844 }
17845 }
17846#endif
17847 return -1;
17848}
17849
17851#if defined(DART_PRECOMPILED_RUNTIME)
17852 UNREACHABLE();
17853 return Function::null();
17854#else
17855 const intptr_t i = BinarySearchInSCallTable(pc);
17856 if (i < 0) {
17857 return Function::null();
17858 }
17860 StaticCallsTable entries(array);
17861 return entries[i].Get<kSCallTableFunctionTarget>();
17862#endif
17863}
17864
17865void Code::SetStaticCallTargetCodeAt(uword pc, const Code& code) const {
17866#if defined(DART_PRECOMPILED_RUNTIME)
17867 UNREACHABLE();
17868#else
17869 const intptr_t i = BinarySearchInSCallTable(pc);
17870 ASSERT(i >= 0);
17872 StaticCallsTable entries(array);
17873 ASSERT(code.IsNull() ||
17874 (code.function() == entries[i].Get<kSCallTableFunctionTarget>()));
17875 return entries[i].Set<kSCallTableCodeOrTypeTarget>(code);
17876#endif
17877}
17878
17879void Code::SetStubCallTargetCodeAt(uword pc, const Code& code) const {
17880#if defined(DART_PRECOMPILED_RUNTIME)
17881 UNREACHABLE();
17882#else
17883 const intptr_t i = BinarySearchInSCallTable(pc);
17884 ASSERT(i >= 0);
17886 StaticCallsTable entries(array);
17887#if defined(DEBUG)
17888 if (entries[i].Get<kSCallTableFunctionTarget>() == Function::null()) {
17889 ASSERT(!code.IsNull() && Object::Handle(code.owner()).IsClass());
17890 } else {
17891 ASSERT(code.IsNull() ||
17892 (code.function() == entries[i].Get<kSCallTableFunctionTarget>()));
17893 }
17894#endif
17895 return entries[i].Set<kSCallTableCodeOrTypeTarget>(code);
17896#endif
17897}
17898
17900#if !defined(PRODUCT) || defined(FORCE_INCLUDE_DISASSEMBLER)
17901 if (!FLAG_support_disassembler) {
17902 return;
17903 }
17904 const uword start = PayloadStart();
17905 if (formatter == nullptr) {
17907 } else {
17908 Disassembler::Disassemble(start, start + Size(), formatter, *this);
17909 }
17910#endif // !defined(PRODUCT) || defined(FORCE_INCLUDE_DISASSEMBLER)
17911}
17912
17913#if defined(INCLUDE_IL_PRINTER)
17914#if defined(PRODUCT)
17915// In PRODUCT builds we don't have space in Code object to store code comments
17916// so we move them into malloced heap (and leak them). This functionality
17917// is only intended to be used in AOT compiler so leaking is fine.
17918class MallocCodeComments final : public CodeComments {
17919 public:
17920 explicit MallocCodeComments(const CodeComments& comments)
17921 : length_(comments.Length()), comments_(new Comment[comments.Length()]) {
17922 for (intptr_t i = 0; i < length_; i++) {
17923 comments_[i].pc_offset = comments.PCOffsetAt(i);
17924 comments_[i].comment =
17925 Utils::CreateCStringUniquePtr(Utils::StrDup(comments.CommentAt(i)));
17926 }
17927 }
17928
17929 intptr_t Length() const override { return length_; }
17930
17931 intptr_t PCOffsetAt(intptr_t i) const override {
17932 return comments_[i].pc_offset;
17933 }
17934
17935 const char* CommentAt(intptr_t i) const override {
17936 return comments_[i].comment.get();
17937 }
17938
17939 private:
17940 struct Comment {
17941 intptr_t pc_offset;
17942 Utils::CStringUniquePtr comment{nullptr, std::free};
17943 };
17944
17945 intptr_t length_;
17946 std::unique_ptr<Comment[]> comments_;
17947};
17948#endif
17949
17950const CodeComments& Code::comments() const {
17951#if defined(PRODUCT)
17952 auto comments =
17953 static_cast<CodeComments*>(Thread::Current()->heap()->GetPeer(ptr()));
17954 return (comments != nullptr) ? *comments : Code::Comments::New(0);
17955#else
17956 return *new Code::Comments(Array::Handle(untag()->comments()));
17957#endif
17958}
17959
17960void Code::set_comments(const CodeComments& comments) const {
17961#if !defined(PRODUCT)
17962 auto& wrapper = static_cast<const Code::Comments&>(comments);
17963 ASSERT(wrapper.comments_.IsOld());
17964 untag()->set_comments(wrapper.comments_.ptr());
17965#else
17966 if (FLAG_code_comments && comments.Length() > 0) {
17967 Thread::Current()->heap()->SetPeer(ptr(), new MallocCodeComments(comments));
17968 } else {
17969 Thread::Current()->heap()->SetPeer(ptr(), nullptr);
17970 }
17971#endif
17972}
17973#endif // defined(INCLUDE_IL_PRINTER)
17974
17975void Code::SetPrologueOffset(intptr_t offset) const {
17976#if defined(PRODUCT)
17977 UNREACHABLE();
17978#else
17979 ASSERT(offset >= 0);
17980 untag()->set_return_address_metadata(Smi::New(offset));
17981#endif
17982}
17983
17984intptr_t Code::GetPrologueOffset() const {
17985#if defined(PRODUCT)
17986 UNREACHABLE();
17987 return -1;
17988#else
17989 const Object& object = Object::Handle(untag()->return_address_metadata());
17990 // In the future we may put something other than a smi in
17991 // |return_address_metadata_|.
17992 if (object.IsNull() || !object.IsSmi()) {
17993 return -1;
17994 }
17995 return Smi::Cast(object).Value();
17996#endif
17997}
17998
18000 return untag()->inlined_id_to_function();
18001}
18002
18003void Code::set_inlined_id_to_function(const Array& value) const {
18004 ASSERT(value.IsOld());
18005 untag()->set_inlined_id_to_function(value.ptr());
18006}
18007
18008CodePtr Code::New(intptr_t pointer_offsets_length) {
18009 if (pointer_offsets_length < 0 || pointer_offsets_length > kMaxElements) {
18010 // This should be caught before we reach here.
18011 FATAL("Fatal error in Code::New: invalid pointer_offsets_length %" Pd "\n",
18013 }
18015 Code& result = Code::Handle();
18016 {
18017 auto raw = Object::Allocate<Code>(Heap::kOld, pointer_offsets_length);
18018 NoSafepointScope no_safepoint;
18019 result = raw;
18020 ASSERT_EQUAL(result.untag()->state_bits_, 0);
18021 result.set_pointer_offsets_length(pointer_offsets_length);
18022 }
18023 DEBUG_ASSERT(result.compile_timestamp() == 0);
18024#if defined(INCLUDE_IL_PRINTER)
18025 result.set_comments(Comments::New(0));
18026#endif
18027 result.set_pc_descriptors(Object::empty_descriptors());
18028 result.set_compressed_stackmaps(Object::empty_compressed_stackmaps());
18029 return result.ptr();
18030}
18031
18032#if !defined(DART_PRECOMPILED_RUNTIME)
18035 compiler::Assembler* assembler,
18036 PoolAttachment pool_attachment,
18037 bool optimized,
18038 CodeStatistics* stats) {
18039 auto thread = Thread::Current();
18041
18042 const auto& code = Code::Handle(
18043 FinalizeCode(compiler, assembler, pool_attachment, optimized, stats));
18044 NotifyCodeObservers(function, code, optimized);
18045 return code.ptr();
18046}
18047
18050 compiler::Assembler* assembler,
18051 PoolAttachment pool_attachment,
18052 bool optimized,
18053 CodeStatistics* stats) {
18054 auto thread = Thread::Current();
18056
18057 const auto& code = Code::Handle(
18058 FinalizeCode(compiler, assembler, pool_attachment, optimized, stats));
18059 NotifyCodeObservers(name, code, optimized);
18060 return code.ptr();
18061}
18062
18063#if defined(DART_PRECOMPILER)
18064DECLARE_FLAG(charp, write_v8_snapshot_profile_to);
18065DECLARE_FLAG(charp, trace_precompiler_to);
18066#endif // defined(DART_PRECOMPILER)
18067
18069 compiler::Assembler* assembler,
18070 PoolAttachment pool_attachment,
18071 bool optimized,
18072 CodeStatistics* stats /* = nullptr */) {
18073 auto thread = Thread::Current();
18075
18076 ASSERT(assembler != nullptr);
18078
18079 if (pool_attachment == PoolAttachment::kAttachPool) {
18080 if (assembler->HasObjectPoolBuilder()) {
18081 object_pool =
18083 } else {
18084 object_pool = ObjectPool::empty_object_pool().ptr();
18085 }
18086 } else {
18087#if defined(DART_PRECOMPILER)
18088 if (assembler->HasObjectPoolBuilder() &&
18089 assembler->object_pool_builder().HasParent()) {
18090 // We are not going to write this pool into snapshot, but we will use
18091 // it to emit references from this code object to other objects in the
18092 // snapshot that it uses.
18093 object_pool =
18095 }
18096#endif // defined(DART_PRECOMPILER)
18097 }
18098
18099 // Allocate the Code and Instructions objects. Code is allocated first
18100 // because a GC during allocation of the code will leave the instruction
18101 // pages read-only.
18102 intptr_t pointer_offset_count = assembler->CountPointerOffsets();
18103 Code& code = Code::ZoneHandle(Code::New(pointer_offset_count));
18104#ifdef TARGET_ARCH_IA32
18105 assembler->GetSelfHandle() = code.ptr();
18106#endif
18107 Instructions& instrs = Instructions::ZoneHandle(Instructions::New(
18108 assembler->CodeSize(), assembler->has_monomorphic_entry(),
18109 assembler->should_be_aligned()));
18110
18111 {
18112 // Important: if GC is triggered at any point between Instructions::New
18113 // and here it would write protect instructions object that we are trying
18114 // to fill in.
18115 NoSafepointScope no_safepoint;
18116
18117 // Copy the instructions into the instruction area and apply all fixups.
18118 // Embedded pointers are still in handles at this point.
18119 MemoryRegion region(reinterpret_cast<void*>(instrs.PayloadStart()),
18120 instrs.Size());
18121 assembler->FinalizeInstructions(region);
18122
18123 const auto& pointer_offsets = assembler->GetPointerOffsets();
18124 ASSERT(pointer_offsets.length() == pointer_offset_count);
18125 ASSERT(code.pointer_offsets_length() == pointer_offsets.length());
18126
18127 // Set pointer offsets list in Code object and resolve all handles in
18128 // the instruction stream to raw objects.
18129 for (intptr_t i = 0; i < pointer_offsets.length(); i++) {
18130 intptr_t offset_in_instrs = pointer_offsets[i];
18131 code.SetPointerOffsetAt(i, offset_in_instrs);
18132 uword addr = region.start() + offset_in_instrs;
18133 ASSERT(instrs.PayloadStart() <= addr);
18134 ASSERT((instrs.PayloadStart() + instrs.Size()) > addr);
18135 const Object* object = LoadUnaligned(reinterpret_cast<Object**>(addr));
18136 ASSERT(object->IsOld());
18137 // N.B. The pointer is embedded in the Instructions object, but visited
18138 // through the Code object.
18139 code.StorePointerUnaligned(reinterpret_cast<ObjectPtr*>(addr),
18140 object->ptr(), thread);
18141 }
18142
18143 // Write protect instructions and, if supported by OS, use dual mapping
18144 // for execution.
18145 if (FLAG_write_protect_code) {
18146 uword address = UntaggedObject::ToAddr(instrs.ptr());
18147 VirtualMemory::Protect(reinterpret_cast<void*>(address),
18148 instrs.ptr()->untag()->HeapSize(),
18150 }
18151
18152 // Hook up Code and Instructions objects.
18153 const uword unchecked_offset = assembler->UncheckedEntryOffset();
18154 code.SetActiveInstructions(instrs, unchecked_offset);
18155 code.set_instructions(instrs);
18156 NOT_IN_PRECOMPILED(code.set_unchecked_offset(unchecked_offset));
18157 code.set_is_alive(true);
18158
18159 // Set object pool in Instructions object.
18160 if (!object_pool.IsNull()) {
18161 code.set_object_pool(object_pool.ptr());
18162 }
18163
18164#if defined(DART_PRECOMPILER)
18165 if (stats != nullptr) {
18166 stats->Finalize();
18167 instrs.set_stats(stats);
18168 }
18169#endif
18170
18171 CPU::FlushICache(instrs.PayloadStart(), instrs.Size());
18172 }
18173
18174#if defined(INCLUDE_IL_PRINTER)
18175 code.set_comments(CreateCommentsFrom(assembler));
18176#endif // defined(INCLUDE_IL_PRINTER)
18177
18178#ifndef PRODUCT
18179 code.set_compile_timestamp(OS::GetCurrentMonotonicMicros());
18180 if (assembler->prologue_offset() >= 0) {
18181 code.SetPrologueOffset(assembler->prologue_offset());
18182 } else {
18183 // No prologue was ever entered, optimistically assume nothing was ever
18184 // pushed onto the stack.
18185 code.SetPrologueOffset(assembler->CodeSize());
18186 }
18187#endif
18188 return code.ptr();
18189}
18190
18191void Code::NotifyCodeObservers(const Code& code, bool optimized) {
18192#if !defined(PRODUCT)
18193 ASSERT(!Thread::Current()->OwnsGCSafepoint());
18195 if (code.IsFunctionCode()) {
18196 const auto& function = Function::Handle(code.function());
18197 if (!function.IsNull()) {
18198 return NotifyCodeObservers(function, code, optimized);
18199 }
18200 }
18201 NotifyCodeObservers(code.Name(), code, optimized);
18202 }
18203#endif
18204}
18205
18207 const Code& code,
18208 bool optimized) {
18209#if !defined(PRODUCT)
18210 ASSERT(!function.IsNull());
18211 ASSERT(!Thread::Current()->OwnsGCSafepoint());
18212 // Calling ToLibNamePrefixedQualifiedCString is very expensive,
18213 // try to avoid it.
18215 const char* name = function.ToLibNamePrefixedQualifiedCString();
18216 NotifyCodeObservers(name, code, optimized);
18217 }
18218#endif
18219}
18220
18222 const Code& code,
18223 bool optimized) {
18224#if !defined(PRODUCT)
18225 ASSERT(name != nullptr);
18226 ASSERT(!code.IsNull());
18227 ASSERT(!Thread::Current()->OwnsGCSafepoint());
18229 const auto& instrs = Instructions::Handle(code.instructions());
18230 CodeObservers::NotifyAll(name, instrs.PayloadStart(),
18231 code.GetPrologueOffset(), instrs.Size(), optimized,
18232 &code.comments());
18233 }
18234#endif
18235}
18236#endif // !defined(DART_PRECOMPILED_RUNTIME)
18237
18238CodePtr Code::FindCode(uword pc, int64_t timestamp) {
18239 class SlowFindCodeVisitor : public ObjectVisitor {
18240 public:
18241 SlowFindCodeVisitor(uword pc, int64_t timestamp)
18242 : pc_(pc), timestamp_(timestamp), result_(Code::null()) {}
18243
18244 void VisitObject(ObjectPtr obj) {
18245 if (!obj->IsCode()) return;
18246 CodePtr code = static_cast<CodePtr>(obj);
18247 if (Code::PayloadStartOf(code) != pc_) return;
18248#if !defined(PRODUCT)
18249 if (code->untag()->compile_timestamp_ != timestamp_) return;
18250#endif
18251 ASSERT(result_ == Code::null());
18252 result_ = code;
18253 }
18254
18255 CodePtr result() const { return result_; }
18256
18257 private:
18258 uword pc_;
18259 int64_t timestamp_;
18260 CodePtr result_;
18261 };
18262
18264 SlowFindCodeVisitor visitor(pc, timestamp);
18265 iteration.IterateVMIsolateObjects(&visitor);
18266 iteration.IterateOldObjectsNoImagePages(&visitor);
18267 return visitor.result();
18268}
18269
18271 class FindCodeUnsafeVisitor : public ObjectVisitor {
18272 public:
18273 explicit FindCodeUnsafeVisitor(uword pc) : pc_(pc), result_(Code::null()) {}
18274
18275 void VisitObject(ObjectPtr obj) {
18276 if (obj->IsCode()) {
18277 CodePtr code = static_cast<CodePtr>(obj);
18278 if (Code::ContainsInstructionAt(code, pc_)) {
18279 result_ = code;
18280 }
18281 }
18282 }
18283
18284 CodePtr result() { return result_; }
18285
18286 private:
18287 uword pc_;
18288 CodePtr result_;
18289 };
18290
18292 PageSpace* old_space = group->heap()->old_space();
18293 old_space->MakeIterable();
18294 FindCodeUnsafeVisitor visitor(pc);
18295 old_space->VisitObjectsUnsafe(&visitor);
18297 return visitor.result();
18298}
18299
18301 uword pc_offset = pc - PayloadStart();
18302 const PcDescriptors& descriptors = PcDescriptors::Handle(pc_descriptors());
18303 PcDescriptors::Iterator iter(descriptors, UntaggedPcDescriptors::kAnyKind);
18304 while (iter.MoveNext()) {
18305 if (iter.PcOffset() == pc_offset) {
18306 return iter.TokenPos();
18307 }
18308 }
18309 return TokenPosition::kNoSource;
18310}
18311
18313 UntaggedPcDescriptors::Kind kind) const {
18314 const PcDescriptors& descriptors = PcDescriptors::Handle(pc_descriptors());
18315 PcDescriptors::Iterator iter(descriptors, kind);
18316 while (iter.MoveNext()) {
18317 if (iter.DeoptId() == deopt_id) {
18318 uword pc_offset = iter.PcOffset();
18319 uword pc = PayloadStart() + pc_offset;
18321 return pc;
18322 }
18323 }
18324 return 0;
18325}
18326
18327intptr_t Code::GetDeoptIdForOsr(uword pc) const {
18328 uword pc_offset = pc - PayloadStart();
18329 const PcDescriptors& descriptors = PcDescriptors::Handle(pc_descriptors());
18330 PcDescriptors::Iterator iter(descriptors, UntaggedPcDescriptors::kOsrEntry);
18331 while (iter.MoveNext()) {
18332 if (iter.PcOffset() == pc_offset) {
18333 return iter.DeoptId();
18334 }
18335 }
18336 return DeoptId::kNone;
18337}
18338
18339const char* Code::ToCString() const {
18340 return OS::SCreate(Thread::Current()->zone(), "Code(%s)",
18343}
18344
18345uint32_t Code::Hash() const {
18346 // PayloadStart() is a tempting hash as Instructions are not moved by the
18347 // compactor, but Instructions are effectively moved between the process
18348 // creating an AppJIT/AOT snapshot and the process loading the snapshot.
18349 const Object& obj =
18351 if (obj.IsClass()) {
18352 return Class::Cast(obj).Hash();
18353 } else if (obj.IsAbstractType()) {
18354 return AbstractType::Cast(obj).Hash();
18355 } else if (obj.IsFunction()) {
18356 return Function::Cast(obj).Hash();
18357 } else {
18358 // E.g., VM stub.
18359 return 42;
18360 }
18361}
18362
18363const char* Code::Name() const {
18364 Zone* zone = Thread::Current()->zone();
18365 if (IsStubCode()) {
18366 // Regular stub.
18367 const char* name = StubCode::NameOfStub(EntryPoint());
18368 if (name == nullptr) {
18369 return "[unknown stub]"; // Not yet recorded.
18370 }
18371 return OS::SCreate(zone, "[Stub] %s", name);
18372 }
18373 const auto& obj =
18375 if (obj.IsClass()) {
18376 // Allocation stub.
18377 return OS::SCreate(zone, "[Stub] Allocate %s",
18378 Class::Cast(obj).ScrubbedNameCString());
18379 } else if (obj.IsAbstractType()) {
18380 // Type test stub.
18381 return OS::SCreate(zone, "[Stub] Type Test %s",
18382 AbstractType::Cast(obj).ToCString());
18383 } else if (obj.IsFunction()) {
18384 // Dart function.
18385 const char* opt = is_optimized() ? "[Optimized]" : "[Unoptimized]";
18386 const char* function_name = Function::Cast(obj).UserVisibleNameCString();
18387 return OS::SCreate(zone, "%s %s", opt, function_name);
18388 } else {
18389 // --no_retain_function_objects etc
18390 return "[unknown code]";
18391 }
18392}
18393
18395 Zone* zone = Thread::Current()->zone();
18396 const Object& obj =
18398 if (obj.IsFunction()) {
18399 ZoneTextBuffer printer(zone);
18400 printer.AddString(is_optimized() ? "[Optimized] " : "[Unoptimized] ");
18401 Function::Cast(obj).PrintName(params, &printer);
18402 return printer.buffer();
18403 }
18404 return Name();
18405}
18406
18407bool Code::IsStubCode() const {
18408 // We should _not_ unwrap any possible WSRs here, as the null value is never
18409 // wrapped by a WSR.
18410 return owner() == Object::null();
18411}
18412
18414 return OwnerClassId() == kClassCid;
18415}
18416
18418 auto const cid = OwnerClassId();
18419 return cid == kAbstractTypeCid || cid == kTypeCid ||
18420 cid == kFunctionTypeCid || cid == kRecordTypeCid ||
18421 cid == kTypeParameterCid;
18422}
18423
18425 return OwnerClassId() == kFunctionCid;
18426}
18427
18428bool Code::IsUnknownDartCode(CodePtr code) {
18430 (code == StubCode::UnknownDartCode().ptr());
18431}
18432
18437 const Code& new_code = StubCode::FixCallersTarget();
18438 SetActiveInstructions(Instructions::Handle(new_code.instructions()),
18439 new_code.UncheckedEntryPointOffset());
18440}
18441
18442void Code::DisableStubCode(bool is_cls_parameterized) const {
18446 const Code& new_code = is_cls_parameterized
18447 ? StubCode::FixParameterizedAllocationStubTarget()
18448 : StubCode::FixAllocationStubTarget();
18449 SetActiveInstructions(Instructions::Handle(new_code.instructions()),
18450 new_code.UncheckedEntryPointOffset());
18451}
18452
18453void Code::InitializeCachedEntryPointsFrom(CodePtr code,
18454 InstructionsPtr instructions,
18455 uint32_t unchecked_offset) {
18457 const uword entry_point = Instructions::EntryPoint(instructions);
18458 const uword monomorphic_entry_point =
18460 code->untag()->entry_point_ = entry_point;
18461 code->untag()->monomorphic_entry_point_ = monomorphic_entry_point;
18462 code->untag()->unchecked_entry_point_ = entry_point + unchecked_offset;
18463 code->untag()->monomorphic_unchecked_entry_point_ =
18464 monomorphic_entry_point + unchecked_offset;
18465}
18466
18467void Code::SetActiveInstructions(const Instructions& instructions,
18468 uint32_t unchecked_offset) const {
18469#if defined(DART_PRECOMPILED_RUNTIME)
18470 UNREACHABLE();
18471#else
18472 ASSERT(IsolateGroup::Current()->program_lock()->IsCurrentThreadWriter());
18473 SetActiveInstructionsSafe(instructions, unchecked_offset);
18474#endif
18475}
18476
18477void Code::SetActiveInstructionsSafe(const Instructions& instructions,
18478 uint32_t unchecked_offset) const {
18479#if defined(DART_PRECOMPILED_RUNTIME)
18480 UNREACHABLE();
18481#else
18482 // RawInstructions are never allocated in New space and hence a
18483 // store buffer update is not needed here.
18484 untag()->set_active_instructions(instructions.ptr());
18485 Code::InitializeCachedEntryPointsFrom(ptr(), instructions.ptr(),
18486 unchecked_offset);
18487#endif
18488}
18489
18490void Code::ResetActiveInstructions() const {
18491#if defined(DART_PRECOMPILED_RUNTIME)
18492 UNREACHABLE();
18493#else
18494 SetActiveInstructions(Instructions::Handle(instructions()),
18495 untag()->unchecked_offset_);
18496#endif
18497}
18498
18500 intptr_t pc_offset,
18502 GrowableArray<TokenPosition>* token_positions) const {
18504 if (map.IsNull()) {
18506 return; // VM stub, allocation stub, or type testing stub.
18507 }
18508 const Array& id_map = Array::Handle(inlined_id_to_function());
18509 const Function& root = Function::Handle(function());
18510 CodeSourceMapReader reader(map, id_map, root);
18511 reader.GetInlinedFunctionsAt(pc_offset, functions, token_positions);
18512}
18513
18514#ifndef PRODUCT
18515void Code::PrintJSONInlineIntervals(JSONObject* jsobj) const {
18516 if (!is_optimized()) {
18517 return; // No inlining.
18518 }
18519 const CodeSourceMap& map = CodeSourceMap::Handle(code_source_map());
18520 const Array& id_map = Array::Handle(inlined_id_to_function());
18521 const Function& root = Function::Handle(function());
18522 CodeSourceMapReader reader(map, id_map, root);
18523 reader.PrintJSONInlineIntervals(jsobj);
18524}
18525#endif
18526
18529 if (map.IsNull()) {
18530 // Stub code.
18531 return;
18532 }
18533 const Array& id_map = Array::Handle(inlined_id_to_function());
18534 const Function& root = Function::Handle(function());
18535 CodeSourceMapReader reader(map, id_map, root);
18537}
18538
18539void Code::DumpSourcePositions(bool relative_addresses) const {
18541 if (map.IsNull()) {
18542 // Stub code.
18543 return;
18544 }
18545 const Array& id_map = Array::Handle(inlined_id_to_function());
18546 const Function& root = Function::Handle(function());
18547 CodeSourceMapReader reader(map, id_map, root);
18548 reader.DumpSourcePositions(relative_addresses ? 0 : PayloadStart());
18549}
18550
18551intptr_t Context::GetLevel() const {
18552 intptr_t level = 0;
18553 Context& parent_ctx = Context::Handle(parent());
18554 while (!parent_ctx.IsNull()) {
18555 level++;
18556 parent_ctx = parent_ctx.parent();
18557 }
18558 return level;
18559}
18560
18561ContextPtr Context::New(intptr_t num_variables, Heap::Space space) {
18562 ASSERT(num_variables >= 0);
18564
18566 // This should be caught before we reach here.
18567 FATAL("Fatal error in Context::New: invalid num_variables %" Pd "\n",
18569 }
18570 auto raw = Object::Allocate<Context>(space, num_variables);
18571 NoSafepointScope no_safepoint;
18572 raw->untag()->num_variables_ = num_variables;
18573 return raw;
18574}
18575
18576const char* Context::ToCString() const {
18577 if (IsNull()) {
18578 return "Context: null";
18579 }
18580 Zone* zone = Thread::Current()->zone();
18581 const Context& parent_ctx = Context::Handle(parent());
18582 if (parent_ctx.IsNull()) {
18583 return zone->PrintToString("Context num_variables: %" Pd "",
18584 num_variables());
18585 } else {
18586 const char* parent_str = parent_ctx.ToCString();
18587 return zone->PrintToString("Context num_variables: %" Pd " parent:{ %s }",
18588 num_variables(), parent_str);
18589 }
18590}
18591
18592static void IndentN(int count) {
18593 for (int i = 0; i < count; i++) {
18594 THR_Print(" ");
18595 }
18596}
18597
18598void Context::Dump(int indent) const {
18599 if (IsNull()) {
18600 IndentN(indent);
18601 THR_Print("Context@null\n");
18602 return;
18603 }
18604
18605 IndentN(indent);
18606 THR_Print("Context vars(%" Pd ") {\n", num_variables());
18607 Object& obj = Object::Handle();
18608 for (intptr_t i = 0; i < num_variables(); i++) {
18609 IndentN(indent + 2);
18610 obj = At(i);
18611 const char* s = obj.ToCString();
18612 if (strlen(s) > 50) {
18613 THR_Print("[%" Pd "] = [first 50 chars:] %.50s...\n", i, s);
18614 } else {
18615 THR_Print("[%" Pd "] = %s\n", i, s);
18616 }
18617 }
18618
18619 const Context& parent_ctx = Context::Handle(parent());
18620 if (!parent_ctx.IsNull()) {
18621 parent_ctx.Dump(indent + 2);
18622 }
18623 IndentN(indent);
18624 THR_Print("}\n");
18625}
18626
18627ContextScopePtr ContextScope::New(intptr_t num_variables, bool is_implicit) {
18629 if (num_variables < 0 || num_variables > kMaxElements) {
18630 // This should be caught before we reach here.
18631 FATAL("Fatal error in ContextScope::New: invalid num_variables %" Pd "\n",
18633 }
18635 {
18636 auto raw = Object::Allocate<ContextScope>(Heap::kOld, num_variables);
18637 NoSafepointScope no_safepoint;
18638 result = raw;
18639 result.set_num_variables(num_variables);
18640 }
18641 result.set_is_implicit(is_implicit);
18642 return result.ptr();
18643}
18644
18645TokenPosition ContextScope::TokenIndexAt(intptr_t scope_index) const {
18647 Smi::Value(untag()->token_pos_at(scope_index)));
18648}
18649
18650void ContextScope::SetTokenIndexAt(intptr_t scope_index,
18651 TokenPosition token_pos) const {
18652 untag()->set_token_pos_at(scope_index, Smi::New(token_pos.Serialize()));
18653}
18654
18656 intptr_t scope_index) const {
18658 Smi::Value(untag()->declaration_token_pos_at(scope_index)));
18659}
18660
18662 intptr_t scope_index,
18663 TokenPosition declaration_token_pos) const {
18664 untag()->set_declaration_token_pos_at(
18665 scope_index, Smi::New(declaration_token_pos.Serialize()));
18666}
18667
18668StringPtr ContextScope::NameAt(intptr_t scope_index) const {
18669 return untag()->name_at(scope_index);
18670}
18671
18672void ContextScope::SetNameAt(intptr_t scope_index, const String& name) const {
18673 untag()->set_name_at(scope_index, name.ptr());
18674}
18675
18676void ContextScope::ClearFlagsAt(intptr_t scope_index) const {
18677 untag()->set_flags_at(scope_index, Smi::New(0));
18678}
18679
18680bool ContextScope::GetFlagAt(intptr_t scope_index, intptr_t bit_index) const {
18681 const intptr_t mask = 1 << bit_index;
18682 return (Smi::Value(untag()->flags_at(scope_index)) & mask) != 0;
18683}
18684
18685void ContextScope::SetFlagAt(intptr_t scope_index,
18686 intptr_t bit_index,
18687 bool value) const {
18688 const intptr_t mask = 1 << bit_index;
18689 intptr_t flags = Smi::Value(untag()->flags_at(scope_index));
18690 untag()->set_flags_at(scope_index,
18691 Smi::New(value ? flags | mask : flags & ~mask));
18692}
18693
18694#define DEFINE_FLAG_ACCESSORS(Name) \
18695 bool ContextScope::Is##Name##At(intptr_t scope_index) const { \
18696 return GetFlagAt(scope_index, \
18697 UntaggedContextScope::VariableDesc::kIs##Name); \
18698 } \
18699 \
18700 void ContextScope::SetIs##Name##At(intptr_t scope_index, bool value) const { \
18701 SetFlagAt(scope_index, UntaggedContextScope::VariableDesc::kIs##Name, \
18702 value); \
18703 }
18704
18706#undef DEFINE_FLAG_ACCESSORS
18707
18708intptr_t ContextScope::LateInitOffsetAt(intptr_t scope_index) const {
18709 return Smi::Value(untag()->late_init_offset_at(scope_index));
18710}
18711
18712void ContextScope::SetLateInitOffsetAt(intptr_t scope_index,
18713 intptr_t late_init_offset) const {
18714 untag()->set_late_init_offset_at(scope_index, Smi::New(late_init_offset));
18715}
18716
18717AbstractTypePtr ContextScope::TypeAt(intptr_t scope_index) const {
18718 return untag()->type_at(scope_index);
18719}
18720
18721void ContextScope::SetTypeAt(intptr_t scope_index,
18722 const AbstractType& type) const {
18723 untag()->set_type_at(scope_index, type.ptr());
18724}
18725
18726intptr_t ContextScope::CidAt(intptr_t scope_index) const {
18727 return Smi::Value(untag()->cid_at(scope_index));
18728}
18729
18730void ContextScope::SetCidAt(intptr_t scope_index, intptr_t cid) const {
18731 untag()->set_cid_at(scope_index, Smi::New(cid));
18732}
18733
18734intptr_t ContextScope::ContextIndexAt(intptr_t scope_index) const {
18735 return Smi::Value(untag()->context_index_at(scope_index));
18736}
18737
18738void ContextScope::SetContextIndexAt(intptr_t scope_index,
18739 intptr_t context_index) const {
18740 untag()->set_context_index_at(scope_index, Smi::New(context_index));
18741}
18742
18743intptr_t ContextScope::ContextLevelAt(intptr_t scope_index) const {
18744 return Smi::Value(untag()->context_level_at(scope_index));
18745}
18746
18747void ContextScope::SetContextLevelAt(intptr_t scope_index,
18748 intptr_t context_level) const {
18749 untag()->set_context_level_at(scope_index, Smi::New(context_level));
18750}
18751
18752intptr_t ContextScope::KernelOffsetAt(intptr_t scope_index) const {
18753 return Smi::Value(untag()->kernel_offset_at(scope_index));
18754}
18755
18756void ContextScope::SetKernelOffsetAt(intptr_t scope_index,
18757 intptr_t kernel_offset) const {
18758 untag()->set_kernel_offset_at(scope_index, Smi::New(kernel_offset));
18759}
18760
18761const char* ContextScope::ToCString() const {
18762 const char* prev_cstr = "ContextScope:";
18764 for (int i = 0; i < num_variables(); i++) {
18765 name = NameAt(i);
18766 const char* cname = name.ToCString();
18768 intptr_t idx = ContextIndexAt(i);
18769 intptr_t lvl = ContextLevelAt(i);
18770 char* chars =
18771 OS::SCreate(Thread::Current()->zone(),
18772 "%s\nvar %s token-pos %s ctx lvl %" Pd " index %" Pd "",
18773 prev_cstr, cname, pos.ToCString(), lvl, idx);
18774 prev_cstr = chars;
18775 }
18776 return prev_cstr;
18777}
18778
18779SentinelPtr Sentinel::New() {
18780 return Object::Allocate<Sentinel>(Heap::kOld);
18781}
18782
18783const char* Sentinel::ToCString() const {
18784 if (ptr() == Object::sentinel().ptr()) {
18785 return "sentinel";
18786 } else if (ptr() == Object::transition_sentinel().ptr()) {
18787 return "transition_sentinel";
18788 } else if (ptr() == Object::unknown_constant().ptr()) {
18789 return "unknown_constant";
18790 } else if (ptr() == Object::non_constant().ptr()) {
18791 return "non_constant";
18792 } else if (ptr() == Object::optimized_out().ptr()) {
18793 return "<optimized out>";
18794 }
18795 return "Sentinel(unknown)";
18796}
18797
18799 return untag()->buckets();
18800}
18801
18802void MegamorphicCache::set_buckets(const Array& buckets) const {
18803 untag()->set_buckets(buckets.ptr());
18804}
18805
18806// Class IDs in the table are smi-tagged, so we use a smi-tagged mask
18807// and target class ID to avoid untagging (on each iteration of the
18808// test loop) in generated code.
18809intptr_t MegamorphicCache::mask() const {
18810 return Smi::Value(untag()->mask());
18811}
18812
18813void MegamorphicCache::set_mask(intptr_t mask) const {
18814 untag()->set_mask(Smi::New(mask));
18815}
18816
18818 return untag()->filled_entry_count_;
18819}
18820
18822 StoreNonPointer(&untag()->filled_entry_count_, count);
18823}
18824
18825MegamorphicCachePtr MegamorphicCache::New() {
18826 return Object::Allocate<MegamorphicCache>(Heap::kOld);
18827}
18828
18829MegamorphicCachePtr MegamorphicCache::New(const String& target_name,
18830 const Array& arguments_descriptor) {
18831 auto* const zone = Thread::Current()->zone();
18832 const auto& result = MegamorphicCache::Handle(
18833 zone, Object::Allocate<MegamorphicCache>(Heap::kOld));
18834 const intptr_t capacity = kInitialCapacity;
18835 const Array& buckets =
18836 Array::Handle(zone, Array::New(kEntryLength * capacity, Heap::kOld));
18837 const Object& handler = Object::Handle(zone);
18838 for (intptr_t i = 0; i < capacity; ++i) {
18839 SetEntry(buckets, i, smi_illegal_cid(), handler);
18840 }
18841 result.set_buckets(buckets);
18842 result.set_mask(capacity - 1);
18843 result.set_target_name(target_name);
18844 result.set_arguments_descriptor(arguments_descriptor);
18845 result.set_filled_entry_count(0);
18846 return result.ptr();
18847}
18848
18850 const Object& target) const {
18851 SafepointMutexLocker ml(IsolateGroup::Current()->type_feedback_mutex());
18852
18853 if (LookupLocked(class_id) == Object::null()) {
18854 InsertLocked(class_id, target);
18855 }
18856
18857#if defined(DEBUG)
18858 ASSERT(LookupLocked(class_id) == target.ptr());
18859#endif // define(DEBUG)
18860}
18861
18863 SafepointMutexLocker ml(IsolateGroup::Current()->type_feedback_mutex());
18864 return LookupLocked(class_id);
18865}
18866
18867ObjectPtr MegamorphicCache::LookupLocked(const Smi& class_id) const {
18868 auto thread = Thread::Current();
18869 auto isolate_group = thread->isolate_group();
18870 auto zone = thread->zone();
18871 ASSERT(thread->IsDartMutatorThread());
18872 ASSERT(isolate_group->type_feedback_mutex()->IsOwnedByCurrentThread());
18873
18874 const auto& backing_array = Array::Handle(zone, buckets());
18875 intptr_t id_mask = mask();
18876 intptr_t index = (class_id.Value() * kSpreadFactor) & id_mask;
18877 intptr_t i = index;
18878 do {
18879 const classid_t current_cid =
18880 Smi::Value(Smi::RawCast(GetClassId(backing_array, i)));
18881 if (current_cid == class_id.Value()) {
18882 return GetTargetFunction(backing_array, i);
18883 } else if (current_cid == kIllegalCid) {
18884 return Object::null();
18885 }
18886 i = (i + 1) & id_mask;
18887 } while (i != index);
18888 UNREACHABLE();
18889}
18890
18891void MegamorphicCache::InsertLocked(const Smi& class_id,
18892 const Object& target) const {
18893 auto isolate_group = IsolateGroup::Current();
18894 ASSERT(isolate_group->type_feedback_mutex()->IsOwnedByCurrentThread());
18895
18896 // As opposed to ICData we are stopping mutator threads from other isolates
18897 // while modifying the megamorphic cache, since updates are not atomic.
18898 //
18899 // NOTE: In the future we might change the megamorphic cache insertions to
18900 // carefully use store-release barriers on the writer as well as
18901 // load-acquire barriers on the reader, ...
18902 isolate_group->RunWithStoppedMutators(
18903 [&]() {
18904 EnsureCapacityLocked();
18905 InsertEntryLocked(class_id, target);
18906 },
18907 /*use_force_growth=*/true);
18908}
18909
18910void MegamorphicCache::EnsureCapacityLocked() const {
18911 auto thread = Thread::Current();
18912 auto zone = thread->zone();
18913 auto isolate_group = thread->isolate_group();
18914 ASSERT(isolate_group->type_feedback_mutex()->IsOwnedByCurrentThread());
18915
18916 intptr_t old_capacity = mask() + 1;
18917 double load_limit = kLoadFactor * static_cast<double>(old_capacity);
18918 if (static_cast<double>(filled_entry_count() + 1) > load_limit) {
18919 const Array& old_buckets = Array::Handle(zone, buckets());
18920 intptr_t new_capacity = old_capacity * 2;
18921 const Array& new_buckets =
18922 Array::Handle(zone, Array::New(kEntryLength * new_capacity));
18923
18924 auto& target = Object::Handle(zone);
18925 for (intptr_t i = 0; i < new_capacity; ++i) {
18926 SetEntry(new_buckets, i, smi_illegal_cid(), target);
18927 }
18928 set_buckets(new_buckets);
18929 set_mask(new_capacity - 1);
18931
18932 // Rehash the valid entries.
18933 Smi& class_id = Smi::Handle(zone);
18934 for (intptr_t i = 0; i < old_capacity; ++i) {
18935 class_id ^= GetClassId(old_buckets, i);
18936 if (class_id.Value() != kIllegalCid) {
18937 target = GetTargetFunction(old_buckets, i);
18938 InsertEntryLocked(class_id, target);
18939 }
18940 }
18941 }
18942}
18943
18944void MegamorphicCache::InsertEntryLocked(const Smi& class_id,
18945 const Object& target) const {
18946 auto thread = Thread::Current();
18947 auto isolate_group = thread->isolate_group();
18948 ASSERT(isolate_group->type_feedback_mutex()->IsOwnedByCurrentThread());
18949
18950 ASSERT(Thread::Current()->IsDartMutatorThread());
18951 ASSERT(static_cast<double>(filled_entry_count() + 1) <=
18952 (kLoadFactor * static_cast<double>(mask() + 1)));
18953 const Array& backing_array = Array::Handle(buckets());
18954 intptr_t id_mask = mask();
18955 intptr_t index = (class_id.Value() * kSpreadFactor) & id_mask;
18956 intptr_t i = index;
18957 do {
18958 if (Smi::Value(Smi::RawCast(GetClassId(backing_array, i))) == kIllegalCid) {
18959 SetEntry(backing_array, i, class_id, target);
18961 return;
18962 }
18963 i = (i + 1) & id_mask;
18964 } while (i != index);
18965 UNREACHABLE();
18966}
18967
18968const char* MegamorphicCache::ToCString() const {
18969 const String& name = String::Handle(target_name());
18970 return OS::SCreate(Thread::Current()->zone(), "MegamorphicCache(%s)",
18971 name.ToCString());
18972}
18973
18974SubtypeTestCachePtr SubtypeTestCache::New(intptr_t num_inputs) {
18976 ASSERT(num_inputs >= 1);
18978 // SubtypeTestCache objects are long living objects, allocate them in the
18979 // old generation.
18980 const auto& result =
18981 SubtypeTestCache::Handle(Object::Allocate<SubtypeTestCache>(Heap::kOld));
18982 ASSERT_EQUAL(result.num_occupied(), 0);
18983 result.untag()->num_inputs_ = num_inputs;
18984 result.set_cache(Object::empty_subtype_test_cache_array());
18985 return result.ptr();
18986}
18987
18988ArrayPtr SubtypeTestCache::cache() const {
18989 return untag()->cache<std::memory_order_acquire>();
18990}
18991
18992void SubtypeTestCache::set_cache(const Array& value) const {
18993 // We have to ensure that initializing stores to the array are available
18994 // when releasing the pointer to the array pointer.
18995 // => We have to use store-release here.
18996 untag()->set_cache<std::memory_order_release>(value.ptr());
18997}
18998
18999void SubtypeTestCache::set_num_occupied(intptr_t value) const {
19000 ASSERT(Utils::IsUint(32, value));
19001 untag()->num_occupied_ = value;
19002}
19003
19005 ASSERT(!IsNull());
19006 return num_occupied();
19007}
19008
19010 ASSERT(!IsNull());
19012}
19013
19014intptr_t SubtypeTestCache::NumEntries(const Array& array) {
19016 return table.Length();
19017}
19018
19020 if (IsNull()) return false;
19022}
19023
19024bool SubtypeTestCache::IsHash(const Array& array) {
19025 return array.Length() > kMaxLinearCacheSize;
19026}
19027
19029 const Object& instance_class_id_or_signature,
19030 const AbstractType& destination_type,
19031 const TypeArguments& instance_type_arguments,
19032 const TypeArguments& instantiator_type_arguments,
19033 const TypeArguments& function_type_arguments,
19034 const TypeArguments& instance_parent_function_type_arguments,
19035 const TypeArguments& instance_delayed_type_arguments,
19036 const Bool& test_result) const {
19038 ->isolate_group()
19039 ->subtype_test_cache_mutex()
19040 ->IsOwnedByCurrentThread());
19041 ASSERT(!test_result.IsNull());
19042 ASSERT(Smi::New(kRecordCid) != instance_class_id_or_signature.ptr());
19043
19044 const intptr_t old_num = NumberOfChecks();
19045 Zone* const zone = Thread::Current()->zone();
19046 Array& data = Array::Handle(zone, cache());
19047 bool was_grown;
19048 data = EnsureCapacity(zone, data, old_num + 1, &was_grown);
19049 ASSERT(data.ptr() != Object::empty_subtype_test_cache_array().ptr());
19050
19051 const auto& loc = FindKeyOrUnused(
19052 data, num_inputs(), instance_class_id_or_signature, destination_type,
19053 instance_type_arguments, instantiator_type_arguments,
19054 function_type_arguments, instance_parent_function_type_arguments,
19055 instance_delayed_type_arguments);
19056 SubtypeTestCacheTable entries(data);
19057 const auto& entry = entries[loc.entry];
19058 if (loc.present) {
19059 if (entry.Get<kTestResult>() != test_result.ptr()) {
19060 const auto& old_result = Bool::Handle(zone, entry.Get<kTestResult>());
19061 FATAL("Existing subtype test cache entry has result %s, not %s",
19062 old_result.ToCString(), test_result.ToCString());
19063 }
19064 return loc.entry;
19065 }
19066
19067 // Set the used elements in the entry in reverse order, so that the instance
19068 // cid or signature is last, then increment the number of entries.
19069 entry.Set<kTestResult>(test_result);
19070 switch (num_inputs()) {
19071 case 7:
19072 entry.Set<kDestinationType>(destination_type);
19074 case 6:
19076 instance_delayed_type_arguments);
19078 case 5:
19080 instance_parent_function_type_arguments);
19082 case 4:
19083 entry.Set<kFunctionTypeArguments>(function_type_arguments);
19085 case 3:
19086 entry.Set<kInstantiatorTypeArguments>(instantiator_type_arguments);
19088 case 2:
19089 entry.Set<kInstanceTypeArguments>(instance_type_arguments);
19091 case 1:
19092 // If this is a new backing array, we don't need store-release barriers,
19093 // as no reader has access to the array until it is set as the backing
19094 // store (which is done with a store-release barrier).
19095 //
19096 // Otherwise, the instance cid or signature must be set last with a
19097 // store-release barrier, so concurrent readers can depend on a non-null
19098 // value meaning the rest of the entry is safe to load without barriers.
19099 if (was_grown) {
19100 entry.Set<kInstanceCidOrSignature>(instance_class_id_or_signature);
19101 } else {
19102 entry.Set<kInstanceCidOrSignature, std::memory_order_release>(
19103 instance_class_id_or_signature);
19104 }
19105 break;
19106 default:
19107 UNREACHABLE();
19108 }
19109 set_num_occupied(old_num + 1);
19110 if (was_grown) {
19111 set_cache(data);
19112 }
19113 return loc.entry;
19114}
19115
19118 intptr_t num_inputs,
19119 const Object& instance_class_id_or_signature,
19120 const AbstractType& destination_type,
19121 const TypeArguments& instance_type_arguments,
19122 const TypeArguments& instantiator_type_arguments,
19123 const TypeArguments& function_type_arguments,
19124 const TypeArguments& instance_parent_function_type_arguments,
19125 const TypeArguments& instance_delayed_type_arguments) {
19126 switch (num_inputs) {
19127 case 7:
19129 destination_type.ptr()) {
19130 return false;
19131 }
19133 case 6:
19135 instance_delayed_type_arguments.ptr()) {
19136 return false;
19137 }
19139 case 5:
19141 instance_parent_function_type_arguments.ptr()) {
19142 return false;
19143 }
19145 case 4:
19147 function_type_arguments.ptr()) {
19148 return false;
19149 }
19151 case 3:
19153 instantiator_type_arguments.ptr()) {
19154 return false;
19155 }
19157 case 2:
19159 instance_type_arguments.ptr()) {
19160 return false;
19161 }
19163 case 1:
19164 // We don't need to perform load-acquire semantics when re-retrieving
19165 // the kInstanceCidOrSignature field, as this is performed only if the
19166 // entry is occupied, and occupied entries never change.
19168 instance_class_id_or_signature.ptr();
19169 default:
19170 UNREACHABLE();
19171 }
19172}
19173
19174SubtypeTestCache::KeyLocation SubtypeTestCache::FindKeyOrUnused(
19175 const Array& array,
19176 intptr_t num_inputs,
19177 const Object& instance_class_id_or_signature,
19178 const AbstractType& destination_type,
19179 const TypeArguments& instance_type_arguments,
19180 const TypeArguments& instantiator_type_arguments,
19181 const TypeArguments& function_type_arguments,
19182 const TypeArguments& instance_parent_function_type_arguments,
19183 const TypeArguments& instance_delayed_type_arguments) {
19184 // Fast case for empty STCs.
19185 if (array.ptr() == Object::empty_subtype_test_cache_array().ptr()) {
19186 return {0, false};
19187 }
19188 const bool is_hash = IsHash(array);
19190 const intptr_t num_entries = table.Length();
19191 // For a linear cache, start at the first entry and probe linearly. This can
19192 // be done because a linear cache always has at least one unoccupied entry
19193 // after all the occupied ones.
19194 intptr_t probe = 0;
19195 intptr_t probe_distance = 1;
19196 if (is_hash) {
19197 // For a hash-based cache, instead start at an entry determined by the hash
19198 // of the keys.
19199 //
19200 // If we have an instance cid, then just use that as our starting hash.
19201 uint32_t hash =
19202 instance_class_id_or_signature.IsFunctionType()
19203 ? FunctionType::Cast(instance_class_id_or_signature).Hash()
19204 : Smi::Cast(instance_class_id_or_signature).Value();
19205 switch (num_inputs) {
19206 case 7:
19207 hash = CombineHashes(hash, destination_type.Hash());
19209 case 6:
19210 hash = CombineHashes(hash, instance_delayed_type_arguments.Hash());
19212 case 5:
19213 hash =
19214 CombineHashes(hash, instance_parent_function_type_arguments.Hash());
19216 case 4:
19217 hash = CombineHashes(hash, function_type_arguments.Hash());
19219 case 3:
19220 hash = CombineHashes(hash, instantiator_type_arguments.Hash());
19222 case 2:
19223 hash = CombineHashes(hash, instance_type_arguments.Hash());
19225 case 1:
19226 break;
19227 default:
19228 UNREACHABLE();
19229 }
19231 probe = hash & (num_entries - 1);
19232 }
19233 while (true) {
19234 const auto& tuple = table.At(probe);
19235 if (tuple.Get<kInstanceCidOrSignature, std::memory_order_acquire>() ==
19236 Object::null()) {
19237 break;
19238 }
19240 tuple, num_inputs, instance_class_id_or_signature, destination_type,
19241 instance_type_arguments, instantiator_type_arguments,
19242 function_type_arguments, instance_parent_function_type_arguments,
19243 instance_delayed_type_arguments)) {
19244 return {probe, true};
19245 }
19246 // Advance probe by the current probing distance.
19247 probe = probe + probe_distance;
19248 if (is_hash) {
19249 // Wrap around if the probe goes off the end of the entries array.
19250 probe = probe & (num_entries - 1);
19251 // We had a collision, so increase the probe distance. See comment in
19252 // EnsureCapacityLocked for an explanation of how this hits all slots.
19253 probe_distance++;
19254 }
19255 }
19256 return {probe, false};
19257}
19258
19259ArrayPtr SubtypeTestCache::EnsureCapacity(Zone* zone,
19260 const Array& array,
19261 intptr_t new_occupied,
19262 bool* was_grown) const {
19263 ASSERT(new_occupied > NumberOfChecks());
19264 ASSERT(was_grown != nullptr);
19265 // How many entries are in the current array (including unoccupied entries).
19266 const intptr_t current_capacity = NumEntries(array);
19267
19268 // Early returns for cases where no growth is needed.
19269 *was_grown = false;
19270 const bool is_linear = IsLinear(array);
19271 if (is_linear) {
19272 // We need at least one unoccupied entry in addition to the occupied ones.
19273 if (current_capacity > new_occupied) return array.ptr();
19274 } else {
19275 if (LoadFactor(new_occupied, current_capacity) < kMaxLoadFactor) {
19276 return array.ptr();
19277 }
19278 }
19279
19280 // Every path from here should result in a new backing array.
19281 *was_grown = true;
19282 // Initially null for initializing unoccupied entries.
19283 auto& instance_cid_or_signature = Object::Handle(zone);
19284 if (new_occupied <= kMaxLinearCacheEntries) {
19285 ASSERT(is_linear);
19286 // Not enough room for both the new entry and at least one unoccupied
19287 // entry, so grow the tuple capacity of the linear cache by about 50%,
19288 // ensuring that space for at least one new tuple is added, capping the
19289 // total number of occupied entries to the max allowed.
19290 const intptr_t new_capacity =
19291 Utils::Minimum(current_capacity + (current_capacity >> 1),
19293 1;
19294 const intptr_t cache_size = new_capacity * kTestEntryLength;
19295 ASSERT(cache_size <= kMaxLinearCacheSize);
19296 const auto& new_data =
19297 Array::Handle(zone, Array::Grow(array, cache_size, Heap::kOld));
19298 ASSERT(!new_data.IsNull());
19299 // No need to adjust old entries, as they are copied over by Array::Grow.
19300 // Just mark any new entries as unoccupied.
19301 SubtypeTestCacheTable table(new_data);
19302 for (intptr_t i = current_capacity; i < new_capacity; i++) {
19303 const auto& tuple = table.At(i);
19304 tuple.Set<kInstanceCidOrSignature>(instance_cid_or_signature);
19305 }
19306 return new_data.ptr();
19307 }
19308
19309 // Either we're converting a linear cache into a hash-based cache, or the
19310 // load factor of the hash-based cache has increased to the point where we
19311 // need to grow it.
19312 const intptr_t new_capacity =
19313 is_linear ? kNumInitialHashCacheEntries : 2 * current_capacity;
19314 // Because we use quadratic (actually triangle number) probing it is
19315 // important that the size is a power of two (otherwise we could fail to
19316 // find an empty slot). This is described in Knuth's The Art of Computer
19317 // Programming Volume 2, Chapter 6.4, exercise 20 (solution in the
19318 // appendix, 2nd edition).
19319 //
19320 // This is also important because when we do hash probing, we take the
19321 // calculated hash from the inputs and then calculate (hash % capacity) to get
19322 // the initial probe index. To ensure this is a fast calculation in the stubs,
19323 // we ensure the capacity is a power of 2, which allows (hash % capacity) to
19324 // be calculated as (hash & (capacity - 1)).
19325 ASSERT(Utils::IsPowerOfTwo(new_capacity));
19326 ASSERT(LoadFactor(new_occupied, new_capacity) < kMaxLoadFactor);
19327 const intptr_t new_size = new_capacity * kTestEntryLength;
19328 const auto& new_data =
19330 ASSERT(!new_data.IsNull());
19331 // Mark all the entries in new_data as unoccupied.
19332 SubtypeTestCacheTable to_table(new_data);
19333 for (const auto& tuple : to_table) {
19334 tuple.Set<kInstanceCidOrSignature>(instance_cid_or_signature);
19335 }
19336 // Finally, copy over the entries.
19337 auto& destination_type = AbstractType::Handle(zone);
19338 auto& instance_type_arguments = TypeArguments::Handle(zone);
19339 auto& instantiator_type_arguments = TypeArguments::Handle(zone);
19340 auto& function_type_arguments = TypeArguments::Handle(zone);
19341 auto& instance_parent_function_type_arguments = TypeArguments::Handle(zone);
19342 auto& instance_delayed_type_arguments = TypeArguments::Handle(zone);
19343 auto& test_result = Bool::Handle(zone);
19344 const SubtypeTestCacheTable from_table(array);
19345 const intptr_t used_inputs = num_inputs();
19346 for (intptr_t i = 0; i < current_capacity; i++) {
19347 const auto& from_tuple = from_table.At(i);
19348 // Skip unoccupied entries.
19349 if (from_tuple.Get<kInstanceCidOrSignature>() == Object::null()) continue;
19350 GetCheckFromArray(array, used_inputs, i, &instance_cid_or_signature,
19351 &destination_type, &instance_type_arguments,
19352 &instantiator_type_arguments, &function_type_arguments,
19353 &instance_parent_function_type_arguments,
19354 &instance_delayed_type_arguments, &test_result);
19355 // Since new_data has a different total capacity, we can't use the old
19356 // entry indexes, but must recalculate them.
19357 auto loc = FindKeyOrUnused(
19358 new_data, used_inputs, instance_cid_or_signature, destination_type,
19359 instance_type_arguments, instantiator_type_arguments,
19360 function_type_arguments, instance_parent_function_type_arguments,
19361 instance_delayed_type_arguments);
19362 ASSERT(!loc.present);
19363 const auto& to_tuple = to_table.At(loc.entry);
19364 to_tuple.Set<kTestResult>(test_result);
19365 switch (used_inputs) {
19366 case 7:
19367 to_tuple.Set<kDestinationType>(destination_type);
19369 case 6:
19371 instance_delayed_type_arguments);
19373 case 5:
19375 instance_parent_function_type_arguments);
19377 case 4:
19378 to_tuple.Set<kFunctionTypeArguments>(function_type_arguments);
19380 case 3:
19381 to_tuple.Set<kInstantiatorTypeArguments>(instantiator_type_arguments);
19383 case 2:
19384 to_tuple.Set<kInstanceTypeArguments>(instance_type_arguments);
19386 case 1:
19387 to_tuple.Set<kInstanceCidOrSignature>(instance_cid_or_signature);
19388 break;
19389 default:
19390 UNREACHABLE();
19391 }
19392 }
19393 return new_data.ptr();
19394}
19395
19397 intptr_t ix,
19398 Object* instance_class_id_or_signature,
19399 AbstractType* destination_type,
19400 TypeArguments* instance_type_arguments,
19401 TypeArguments* instantiator_type_arguments,
19402 TypeArguments* function_type_arguments,
19403 TypeArguments* instance_parent_function_type_arguments,
19404 TypeArguments* instance_delayed_type_arguments,
19405 Bool* test_result) const {
19407 ->isolate_group()
19408 ->subtype_test_cache_mutex()
19409 ->IsOwnedByCurrentThread());
19410 GetCurrentCheck(ix, instance_class_id_or_signature, destination_type,
19411 instance_type_arguments, instantiator_type_arguments,
19412 function_type_arguments,
19413 instance_parent_function_type_arguments,
19414 instance_delayed_type_arguments, test_result);
19415}
19416
19418 intptr_t ix,
19419 Object* instance_class_id_or_signature,
19420 AbstractType* destination_type,
19421 TypeArguments* instance_type_arguments,
19422 TypeArguments* instantiator_type_arguments,
19423 TypeArguments* function_type_arguments,
19424 TypeArguments* instance_parent_function_type_arguments,
19425 TypeArguments* instance_delayed_type_arguments,
19426 Bool* test_result) const {
19427 const Array& array = Array::Handle(cache());
19428 GetCheckFromArray(array, num_inputs(), ix, instance_class_id_or_signature,
19429 destination_type, instance_type_arguments,
19430 instantiator_type_arguments, function_type_arguments,
19431 instance_parent_function_type_arguments,
19432 instance_delayed_type_arguments, test_result);
19433}
19434
19435void SubtypeTestCache::GetCheckFromArray(
19436 const Array& array,
19437 intptr_t num_inputs,
19438 intptr_t ix,
19439 Object* instance_class_id_or_signature,
19440 AbstractType* destination_type,
19441 TypeArguments* instance_type_arguments,
19442 TypeArguments* instantiator_type_arguments,
19443 TypeArguments* function_type_arguments,
19444 TypeArguments* instance_parent_function_type_arguments,
19445 TypeArguments* instance_delayed_type_arguments,
19446 Bool* test_result) {
19447 ASSERT(array.ptr() != Object::empty_subtype_test_cache_array().ptr());
19448 SubtypeTestCacheTable entries(array);
19449 auto entry = entries[ix];
19450 // First get the field that determines occupancy. We have to do this with
19451 // load-acquire because some callers may not have the subtype test cache lock.
19452 *instance_class_id_or_signature =
19453 entry.Get<kInstanceCidOrSignature, std::memory_order_acquire>();
19454 // We should not be retrieving unoccupied entries.
19455 ASSERT(!instance_class_id_or_signature->IsNull());
19456 switch (num_inputs) {
19457 case 7:
19458 *destination_type = entry.Get<kDestinationType>();
19460 case 6:
19461 *instance_delayed_type_arguments =
19464 case 5:
19465 *instance_parent_function_type_arguments =
19468 case 4:
19469 *function_type_arguments = entry.Get<kFunctionTypeArguments>();
19471 case 3:
19472 *instantiator_type_arguments = entry.Get<kInstantiatorTypeArguments>();
19474 case 2:
19475 *instance_type_arguments = entry.Get<kInstanceTypeArguments>();
19477 case 1:
19478 break;
19479 default:
19480 UNREACHABLE();
19481 }
19482 *test_result = entry.Get<kTestResult>();
19483}
19484
19486 intptr_t* ix,
19487 Object* instance_class_id_or_signature,
19488 AbstractType* destination_type,
19489 TypeArguments* instance_type_arguments,
19490 TypeArguments* instantiator_type_arguments,
19491 TypeArguments* function_type_arguments,
19492 TypeArguments* instance_parent_function_type_arguments,
19493 TypeArguments* instance_delayed_type_arguments,
19494 Bool* test_result) const {
19495 ASSERT(ix != nullptr);
19496 for (intptr_t i = *ix; i < NumEntries(); i++) {
19498 ->isolate_group()
19499 ->subtype_test_cache_mutex()
19500 ->IsOwnedByCurrentThread());
19501 if (IsOccupied(i)) {
19502 GetCurrentCheck(i, instance_class_id_or_signature, destination_type,
19503 instance_type_arguments, instantiator_type_arguments,
19504 function_type_arguments,
19505 instance_parent_function_type_arguments,
19506 instance_delayed_type_arguments, test_result);
19507 *ix = i + 1;
19508 return true;
19509 }
19510 }
19511 return false;
19512}
19513
19515 const Object& instance_class_id_or_signature,
19516 const AbstractType& destination_type,
19517 const TypeArguments& instance_type_arguments,
19518 const TypeArguments& instantiator_type_arguments,
19519 const TypeArguments& function_type_arguments,
19520 const TypeArguments& instance_parent_function_type_arguments,
19521 const TypeArguments& instance_delayed_type_arguments,
19522 intptr_t* index,
19523 Bool* result) const {
19524 const auto& data = Array::Handle(cache());
19525 auto loc = FindKeyOrUnused(
19526 data, num_inputs(), instance_class_id_or_signature, destination_type,
19527 instance_type_arguments, instantiator_type_arguments,
19528 function_type_arguments, instance_parent_function_type_arguments,
19529 instance_delayed_type_arguments);
19530 if (loc.present) {
19531 if (index != nullptr) {
19532 *index = loc.entry;
19533 }
19534 if (result != nullptr) {
19535 SubtypeTestCacheTable entries(data);
19536 const auto& entry = entries[loc.entry];
19537 // A positive result from FindKeyOrUnused means that load-acquire is not
19538 // needed, as an occupied entry never changes for a given backing array.
19539 *result = entry.Get<kTestResult>();
19540 ASSERT(!result->IsNull());
19541 }
19542 }
19543 return loc.present;
19544}
19545
19548 intptr_t index,
19549 const char* line_prefix) const {
19551 ->isolate_group()
19552 ->subtype_test_cache_mutex()
19553 ->IsOwnedByCurrentThread());
19554 WriteCurrentEntryToBuffer(zone, buffer, index, line_prefix);
19555}
19556
19559 const char* line_prefix) const {
19561 ->isolate_group()
19562 ->subtype_test_cache_mutex()
19563 ->IsOwnedByCurrentThread());
19564 WriteToBufferUnlocked(zone, buffer, line_prefix);
19565}
19566
19567void SubtypeTestCache::WriteCurrentEntryToBuffer(
19568 Zone* zone,
19570 intptr_t index,
19571 const char* line_prefix) const {
19572 const char* separator =
19573 line_prefix == nullptr ? ", " : OS::SCreate(zone, "\n%s", line_prefix);
19574 auto& instance_class_id_or_signature = Object::Handle(zone);
19575 auto& destination_type = AbstractType::Handle(zone);
19576 auto& instance_type_arguments = TypeArguments::Handle(zone);
19577 auto& instantiator_type_arguments = TypeArguments::Handle(zone);
19578 auto& function_type_arguments = TypeArguments::Handle(zone);
19579 auto& instance_parent_function_type_arguments = TypeArguments::Handle(zone);
19580 auto& instance_delayed_type_arguments = TypeArguments::Handle(zone);
19581 auto& result = Bool::Handle(zone);
19582 GetCurrentCheck(index, &instance_class_id_or_signature, &destination_type,
19583 &instance_type_arguments, &instantiator_type_arguments,
19584 &function_type_arguments,
19585 &instance_parent_function_type_arguments,
19586 &instance_delayed_type_arguments, &result);
19587 buffer->Printf(
19588 "%" Pd ": [ %#" Px ", %#" Px ", %#" Px ", %#" Px ", %#" Px ", %#" Px
19589 ", %#" Px ", %#" Px " ]",
19590 index, static_cast<uword>(instance_class_id_or_signature.ptr()),
19591 static_cast<uword>(instance_type_arguments.ptr()),
19592 static_cast<uword>(instantiator_type_arguments.ptr()),
19593 static_cast<uword>(function_type_arguments.ptr()),
19594 static_cast<uword>(instance_parent_function_type_arguments.ptr()),
19595 static_cast<uword>(instance_delayed_type_arguments.ptr()),
19596 static_cast<uword>(destination_type.ptr()),
19597 static_cast<uword>(result.ptr()));
19598 if (instance_class_id_or_signature.IsSmi()) {
19599 buffer->Printf("%sclass id: %" Pd "", separator,
19600 Smi::Cast(instance_class_id_or_signature).Value());
19601 } else {
19602 buffer->Printf(
19603 "%ssignature: %s", separator,
19604 FunctionType::Cast(instance_class_id_or_signature).ToCString());
19605 }
19606 if (!instance_type_arguments.IsNull()) {
19607 if (instance_class_id_or_signature.IsSmi()) {
19608 buffer->Printf("%sinstance type arguments: %s", separator,
19609 instance_type_arguments.ToCString());
19610 } else {
19611 ASSERT(instance_class_id_or_signature.IsFunctionType());
19612 buffer->Printf("%sclosure instantiator function type arguments: %s",
19613 separator, instance_type_arguments.ToCString());
19614 }
19615 }
19616 if (!instantiator_type_arguments.IsNull()) {
19617 buffer->Printf("%sinstantiator type arguments: %s", separator,
19618 instantiator_type_arguments.ToCString());
19619 }
19620 if (!function_type_arguments.IsNull()) {
19621 buffer->Printf("%sfunction type arguments: %s", separator,
19622 function_type_arguments.ToCString());
19623 }
19624 if (!instance_parent_function_type_arguments.IsNull()) {
19625 buffer->Printf("%sclosure parent function type arguments: %s", separator,
19626 instance_parent_function_type_arguments.ToCString());
19627 }
19628 if (!instance_delayed_type_arguments.IsNull()) {
19629 buffer->Printf("%sclosure delayed function type arguments: %s", separator,
19630 instance_delayed_type_arguments.ToCString());
19631 }
19632 if (!destination_type.IsNull()) {
19633 buffer->Printf("%sdestination type: %s", separator,
19634 destination_type.ToCString());
19635 if (!destination_type.IsInstantiated()) {
19636 AbstractType& test_type = AbstractType::Handle(
19637 zone, destination_type.InstantiateFrom(instantiator_type_arguments,
19638 function_type_arguments,
19640 const auto type_class_id = test_type.type_class_id();
19641 buffer->Printf("%sinstantiated type: %s", separator,
19642 test_type.ToCString());
19643 buffer->Printf("%sinstantiated type class id: %d", separator,
19644 type_class_id);
19645 }
19646 }
19647 buffer->Printf("%sresult: %s", separator, result.ToCString());
19648}
19649
19650void SubtypeTestCache::WriteToBufferUnlocked(Zone* zone,
19651 BaseTextBuffer* buffer,
19652 const char* line_prefix) const {
19653 const char* separator =
19654 line_prefix == nullptr ? " " : OS::SCreate(zone, "\n%s", line_prefix);
19655 const char* internal_line_prefix =
19656 line_prefix == nullptr
19657 ? nullptr
19658 : OS::SCreate(zone, "%s%s", line_prefix, line_prefix);
19659 const intptr_t num_entries = NumEntries();
19660 buffer->Printf("SubtypeTestCache(%" Pd ", %" Pd "", num_inputs(),
19661 num_occupied());
19662 for (intptr_t i = 0; i < num_entries; i++) {
19663 if (!IsOccupied(i)) continue;
19664 buffer->Printf(",%s{", separator);
19665 WriteCurrentEntryToBuffer(zone, buffer, i, internal_line_prefix);
19666 buffer->Printf(line_prefix != nullptr ? "}" : " }");
19667 }
19668 buffer->AddString(line_prefix != nullptr && num_entries != 0 ? "\n)" : ")");
19669}
19670
19672 set_num_occupied(0);
19673 set_cache(Object::empty_subtype_test_cache_array());
19674}
19675
19678 ->isolate_group()
19679 ->subtype_test_cache_mutex()
19680 ->IsOwnedByCurrentThread());
19681 if (ptr() == other.ptr()) {
19682 return true;
19683 }
19684 if (num_inputs() != other.num_inputs()) return false;
19685 if (num_occupied() != other.num_occupied()) return false;
19686 return Array::Handle(cache()).Equals(Array::Handle(other.cache()));
19687}
19688
19689SubtypeTestCachePtr SubtypeTestCache::Copy(Thread* thread) const {
19690 ASSERT(thread->isolate_group()
19693 if (IsNull()) {
19694 return SubtypeTestCache::null();
19695 }
19696 Zone* const zone = thread->zone();
19697 // STC caches are only copied on write if there are not enough unoccupied
19698 // entries to store a new one, so we need to copy the array.
19699 const auto& result =
19701 auto& entry_cache = Array::Handle(zone, cache());
19702 entry_cache = entry_cache.Copy();
19703 result.set_cache(entry_cache);
19704 result.set_num_occupied(num_occupied());
19705 return result.ptr();
19706}
19707
19708bool SubtypeTestCache::IsOccupied(intptr_t index) const {
19709 ASSERT(!IsNull());
19710 ASSERT(index < NumEntries());
19711 const intptr_t cache_index =
19713 NoSafepointScope no_safepoint;
19714 return cache()->untag()->element<std::memory_order_acquire>(cache_index) !=
19715 Object::null();
19716}
19717
19719 if (type.IsType()) {
19720 if (type.IsInstantiated()) return 2;
19721 if (type.IsInstantiated(kFunctions)) return 3;
19722 return 4;
19723 }
19724 // Default to all inputs except for the destination type, which must be
19725 // statically known, otherwise this method wouldn't be called.
19726 static_assert(kDestinationType == kMaxInputs - 1,
19727 "destination type is not last input");
19728 return kMaxInputs - 1;
19729}
19730
19731const char* SubtypeTestCache::ToCString() const {
19732 auto const zone = Thread::Current()->zone();
19733 ZoneTextBuffer buffer(zone);
19734 WriteToBufferUnlocked(zone, &buffer);
19735 return buffer.buffer();
19736}
19737
19738LoadingUnitPtr LoadingUnit::New(intptr_t id, const LoadingUnit& parent) {
19740 // LoadingUnit objects are long living objects, allocate them in the
19741 // old generation.
19742 auto result = Object::Allocate<LoadingUnit>(Heap::kOld);
19743 NoSafepointScope scope;
19745 result->untag()->packed_fields_.Update<UntaggedLoadingUnit::IdBits>(id);
19746 result->untag()->set_parent(parent.ptr());
19747 return result;
19748}
19749
19750void LoadingUnit::set_base_objects(const Array& value) const {
19751 untag()->set_base_objects(value.ptr());
19752}
19753
19754const char* LoadingUnit::ToCString() const {
19755 return "LoadingUnit";
19756}
19757
19762
19764 bool transient_error) const {
19765 set_loaded(error_message.IsNull());
19766
19768 const String& sel = String::Handle(String::New("_completeLoads"));
19770 ASSERT(!func.IsNull());
19771 const Array& args = Array::Handle(Array::New(3));
19772 args.SetAt(0, Smi::Handle(Smi::New(id())));
19773 args.SetAt(1, error_message);
19774 args.SetAt(2, Bool::Get(transient_error));
19775 return DartEntry::InvokeFunction(func, args);
19776}
19777
19778// The assignment to loading units here must match that in
19779// AssignLoadingUnitsCodeVisitor, which runs after compilation is done.
19781 Thread* thread = Thread::Current();
19785
19786 Class& cls = thread->ClassHandle();
19787 Library& lib = thread->LibraryHandle();
19788 LoadingUnit& unit = thread->LoadingUnitHandle();
19789
19790 cls = function.Owner();
19791 lib = cls.library();
19792 unit = lib.loading_unit();
19793 if (unit.IsNull()) {
19794 FATAL("Unable to find loading unit of %s (class %s, library %s)",
19795 function.ToFullyQualifiedCString(), cls.ToCString(), lib.ToCString());
19796 }
19797 return unit.id();
19798}
19799
19800intptr_t LoadingUnit::LoadingUnitOf(const Code& code) {
19801 if (code.IsStubCode() || code.IsTypeTestStubCode() ||
19802 code.IsAllocationStubCode()) {
19803 return LoadingUnit::kRootId;
19804 } else {
19805 Thread* thread = Thread::Current();
19810
19811 Class& cls = thread->ClassHandle();
19812 Library& lib = thread->LibraryHandle();
19813 LoadingUnit& unit = thread->LoadingUnitHandle();
19814 Function& func = thread->FunctionHandle();
19815
19816 if (code.IsFunctionCode()) {
19817 func ^= code.function();
19818 cls = func.Owner();
19819 lib = cls.library();
19820 unit = lib.loading_unit();
19821 ASSERT(!unit.IsNull());
19822 return unit.id();
19823 } else {
19824 UNREACHABLE();
19826 }
19827 }
19828}
19829
19830const char* Error::ToErrorCString() const {
19831 if (IsNull()) {
19832 return "Error: null";
19833 }
19834 UNREACHABLE();
19835 return "Error";
19836}
19837
19838const char* Error::ToCString() const {
19839 if (IsNull()) {
19840 return "Error: null";
19841 }
19842 // Error is an abstract class. We should never reach here.
19843 UNREACHABLE();
19844 return "Error";
19845}
19846
19847ApiErrorPtr ApiError::New() {
19849 return Object::Allocate<ApiError>(Heap::kOld);
19850}
19851
19852ApiErrorPtr ApiError::New(const String& message, Heap::Space space) {
19853#ifndef PRODUCT
19854 if (FLAG_print_stacktrace_at_api_error) {
19855 OS::PrintErr("ApiError: %s\n", message.ToCString());
19856 Profiler::DumpStackTrace(false /* for_crash */);
19857 }
19858#endif // !PRODUCT
19859
19861 const auto& result = ApiError::Handle(Object::Allocate<ApiError>(space));
19862 result.set_message(message);
19863 return result.ptr();
19864}
19865
19866void ApiError::set_message(const String& message) const {
19867 untag()->set_message(message.ptr());
19868}
19869
19870const char* ApiError::ToErrorCString() const {
19871 const String& msg_str = String::Handle(message());
19872 return msg_str.ToCString();
19873}
19874
19875const char* ApiError::ToCString() const {
19876 return "ApiError";
19877}
19878
19879LanguageErrorPtr LanguageError::New() {
19881 return Object::Allocate<LanguageError>(Heap::kOld);
19882}
19883
19884LanguageErrorPtr LanguageError::NewFormattedV(const Error& prev_error,
19885 const Script& script,
19886 TokenPosition token_pos,
19887 bool report_after_token,
19888 Report::Kind kind,
19889 Heap::Space space,
19890 const char* format,
19891 va_list args) {
19893 const auto& result =
19894 LanguageError::Handle(Object::Allocate<LanguageError>(space));
19895 result.set_previous_error(prev_error);
19896 result.set_script(script);
19897 result.set_token_pos(token_pos);
19898 result.set_report_after_token(report_after_token);
19899 result.set_kind(kind);
19900 result.set_message(
19902 return result.ptr();
19903}
19904
19905LanguageErrorPtr LanguageError::NewFormatted(const Error& prev_error,
19906 const Script& script,
19907 TokenPosition token_pos,
19908 bool report_after_token,
19909 Report::Kind kind,
19910 Heap::Space space,
19911 const char* format,
19912 ...) {
19913 va_list args;
19914 va_start(args, format);
19915 LanguageErrorPtr result = LanguageError::NewFormattedV(
19916 prev_error, script, token_pos, report_after_token, kind, space, format,
19917 args);
19918 NoSafepointScope no_safepoint;
19919 va_end(args);
19920 return result;
19921}
19922
19923LanguageErrorPtr LanguageError::New(const String& formatted_message,
19924 Report::Kind kind,
19925 Heap::Space space) {
19927 const auto& result =
19928 LanguageError::Handle(Object::Allocate<LanguageError>(space));
19929 result.set_formatted_message(formatted_message);
19930 result.set_kind(kind);
19931 return result.ptr();
19932}
19933
19934void LanguageError::set_previous_error(const Error& value) const {
19935 untag()->set_previous_error(value.ptr());
19936}
19937
19938void LanguageError::set_script(const Script& value) const {
19939 untag()->set_script(value.ptr());
19940}
19941
19942void LanguageError::set_token_pos(TokenPosition token_pos) const {
19944 StoreNonPointer(&untag()->token_pos_, token_pos);
19945}
19946
19947void LanguageError::set_report_after_token(bool value) const {
19948 StoreNonPointer(&untag()->report_after_token_, value);
19949}
19950
19951void LanguageError::set_kind(uint8_t value) const {
19952 StoreNonPointer(&untag()->kind_, value);
19953}
19954
19955void LanguageError::set_message(const String& value) const {
19956 untag()->set_message(value.ptr());
19957}
19958
19959void LanguageError::set_formatted_message(const String& value) const {
19960 untag()->set_formatted_message(value.ptr());
19961}
19962
19964 if (formatted_message() != String::null()) {
19965 return formatted_message();
19966 }
19969 report_after_token(), String::Handle(message())));
19970 // Prepend previous error message.
19971 const Error& prev_error = Error::Handle(previous_error());
19972 if (!prev_error.IsNull()) {
19975 }
19976 set_formatted_message(result);
19977 return result.ptr();
19978}
19979
19981 const String& msg_str = String::Handle(FormatMessage());
19982 return msg_str.ToCString();
19983}
19984
19985const char* LanguageError::ToCString() const {
19986 return "LanguageError";
19987}
19988
19989UnhandledExceptionPtr UnhandledException::New(const Instance& exception,
19990 const Instance& stacktrace,
19991 Heap::Space space) {
19993 const auto& result =
19994 UnhandledException::Handle(Object::Allocate<UnhandledException>(space));
19995 result.set_exception(exception);
19996 result.set_stacktrace(stacktrace);
19997 return result.ptr();
19998}
19999
20000UnhandledExceptionPtr UnhandledException::New(Heap::Space space) {
20002 return Object::Allocate<UnhandledException>(space);
20003}
20004
20005void UnhandledException::set_exception(const Instance& exception) const {
20006 untag()->set_exception(exception.ptr());
20007}
20008
20009void UnhandledException::set_stacktrace(const Instance& stacktrace) const {
20010 untag()->set_stacktrace(stacktrace.ptr());
20011}
20012
20014 Thread* thread = Thread::Current();
20015 auto isolate_group = thread->isolate_group();
20016 NoReloadScope no_reload_scope(thread);
20017 HANDLESCOPE(thread);
20018 Object& strtmp = Object::Handle();
20019 const char* exc_str;
20020 if (exception() == isolate_group->object_store()->out_of_memory()) {
20021 exc_str = "Out of Memory";
20022 } else if (exception() == isolate_group->object_store()->stack_overflow()) {
20023 exc_str = "Stack Overflow";
20024 } else {
20025 const Instance& exc = Instance::Handle(exception());
20026 strtmp = DartLibraryCalls::ToString(exc);
20027 if (!strtmp.IsError()) {
20028 exc_str = strtmp.ToCString();
20029 } else {
20030 exc_str = "<Received error while converting exception to string>";
20031 }
20032 }
20033 const Instance& stack = Instance::Handle(stacktrace());
20034 const char* stack_str;
20035 if (stack.IsNull()) {
20036 stack_str = "null";
20037 } else if (stack.IsStackTrace()) {
20038 stack_str = StackTrace::Cast(stack).ToCString();
20039 } else {
20040 strtmp = DartLibraryCalls::ToString(stack);
20041 if (!strtmp.IsError()) {
20042 stack_str = strtmp.ToCString();
20043 } else {
20044 stack_str = "<Received error while converting stack trace to string>";
20045 }
20046 }
20047 return OS::SCreate(thread->zone(), "Unhandled exception:\n%s\n%s", exc_str,
20048 stack_str);
20049}
20050
20051const char* UnhandledException::ToCString() const {
20052 return "UnhandledException";
20053}
20054
20055UnwindErrorPtr UnwindError::New(const String& message, Heap::Space space) {
20057 const auto& result =
20058 UnwindError::Handle(Object::Allocate<UnwindError>(space));
20059 result.set_message(message);
20060 ASSERT_EQUAL(result.is_user_initiated(), false);
20061 return result.ptr();
20062}
20063
20064void UnwindError::set_message(const String& message) const {
20065 untag()->set_message(message.ptr());
20066}
20067
20069 StoreNonPointer(&untag()->is_user_initiated_, value);
20070}
20071
20072const char* UnwindError::ToErrorCString() const {
20073 const String& msg_str = String::Handle(message());
20074 return msg_str.ToCString();
20075}
20076
20077const char* UnwindError::ToCString() const {
20078 return "UnwindError";
20079}
20080
20082 bool respect_reflectable,
20083 bool check_is_entrypoint) const {
20084 Thread* thread = Thread::Current();
20085 Zone* zone = thread->zone();
20086
20087 Class& klass = Class::Handle(zone, clazz());
20088 CHECK_ERROR(klass.EnsureIsFinalized(thread));
20089 const auto& inst_type_args =
20090 klass.NumTypeArguments() > 0
20092 : Object::null_type_arguments();
20093
20094 const String& internal_getter_name =
20095 String::Handle(zone, Field::GetterName(getter_name));
20097 zone, Resolver::ResolveDynamicAnyArgs(zone, klass, internal_getter_name));
20098
20099 if (!function.IsNull() && check_is_entrypoint) {
20100 // The getter must correspond to either an entry-point field or a getter
20101 // method explicitly marked.
20102 Field& field = Field::Handle(zone);
20103 if (function.kind() == UntaggedFunction::kImplicitGetter) {
20104 field = function.accessor_field();
20105 }
20106 if (!field.IsNull()) {
20108 } else {
20109 CHECK_ERROR(function.VerifyCallEntryPoint());
20110 }
20111 }
20112
20113 // Check for method extraction when method extractors are not created.
20114 if (function.IsNull() && !FLAG_lazy_dispatchers) {
20115 function = Resolver::ResolveDynamicAnyArgs(zone, klass, getter_name);
20116
20117 if (!function.IsNull() && check_is_entrypoint) {
20118 CHECK_ERROR(function.VerifyClosurizedEntryPoint());
20119 }
20120
20121 if (!function.IsNull() && function.SafeToClosurize()) {
20122 const Function& closure_function =
20123 Function::Handle(zone, function.ImplicitClosureFunction());
20124 return closure_function.ImplicitInstanceClosure(*this);
20125 }
20126 }
20127
20128 const int kTypeArgsLen = 0;
20129 const int kNumArgs = 1;
20130 const Array& args = Array::Handle(zone, Array::New(kNumArgs));
20131 args.SetAt(0, *this);
20132 const Array& args_descriptor = Array::Handle(
20133 zone,
20134 ArgumentsDescriptor::NewBoxed(kTypeArgsLen, args.Length(), Heap::kNew));
20135
20136 return InvokeInstanceFunction(thread, *this, function, internal_getter_name,
20137 args, args_descriptor, respect_reflectable,
20138 inst_type_args);
20139}
20140
20142 const Instance& value,
20143 bool respect_reflectable,
20144 bool check_is_entrypoint) const {
20145 Thread* thread = Thread::Current();
20146 Zone* zone = thread->zone();
20147
20148 const Class& klass = Class::Handle(zone, clazz());
20149 CHECK_ERROR(klass.EnsureIsFinalized(thread));
20150 const auto& inst_type_args =
20151 klass.NumTypeArguments() > 0
20153 : Object::null_type_arguments();
20154
20155 const String& internal_setter_name =
20156 String::Handle(zone, Field::SetterName(setter_name));
20157 const Function& setter = Function::Handle(
20158 zone, Resolver::ResolveDynamicAnyArgs(zone, klass, internal_setter_name));
20159
20160 if (check_is_entrypoint) {
20161 // The setter must correspond to either an entry-point field or a setter
20162 // method explicitly marked.
20163 Field& field = Field::Handle(zone);
20164 if (setter.kind() == UntaggedFunction::kImplicitSetter) {
20165 field = setter.accessor_field();
20166 }
20167 if (!field.IsNull()) {
20169 } else if (!setter.IsNull()) {
20171 }
20172 }
20173
20174 const int kTypeArgsLen = 0;
20175 const int kNumArgs = 2;
20176 const Array& args = Array::Handle(zone, Array::New(kNumArgs));
20177 args.SetAt(0, *this);
20178 args.SetAt(1, value);
20179 const Array& args_descriptor = Array::Handle(
20180 zone,
20181 ArgumentsDescriptor::NewBoxed(kTypeArgsLen, args.Length(), Heap::kNew));
20182
20183 return InvokeInstanceFunction(thread, *this, setter, internal_setter_name,
20184 args, args_descriptor, respect_reflectable,
20185 inst_type_args);
20186}
20187
20189 const Array& args,
20190 const Array& arg_names,
20191 bool respect_reflectable,
20192 bool check_is_entrypoint) const {
20193 Thread* thread = Thread::Current();
20194 Zone* zone = thread->zone();
20195 Class& klass = Class::Handle(zone, clazz());
20196 CHECK_ERROR(klass.EnsureIsFinalized(thread));
20197
20200
20201 if (!function.IsNull() && check_is_entrypoint) {
20202 CHECK_ERROR(function.VerifyCallEntryPoint());
20203 }
20204
20205 // We don't pass any explicit type arguments, which will be understood as
20206 // using dynamic for any function type arguments by lower layers.
20207 const int kTypeArgsLen = 0;
20208 const Array& args_descriptor = Array::Handle(
20209 zone, ArgumentsDescriptor::NewBoxed(kTypeArgsLen, args.Length(),
20210 arg_names, Heap::kNew));
20211
20212 const auto& inst_type_args =
20213 klass.NumTypeArguments() > 0
20215 : Object::null_type_arguments();
20216
20217 if (function.IsNull()) {
20218 // Didn't find a method: try to find a getter and invoke call on its result.
20219 const String& getter_name =
20221 function = Resolver::ResolveDynamicAnyArgs(zone, klass, getter_name);
20222 if (!function.IsNull()) {
20223 if (check_is_entrypoint) {
20225 }
20226 ASSERT(function.kind() != UntaggedFunction::kMethodExtractor);
20227 // Invoke the getter.
20228 const int kNumArgs = 1;
20229 const Array& getter_args = Array::Handle(zone, Array::New(kNumArgs));
20230 getter_args.SetAt(0, *this);
20231 const Array& getter_args_descriptor = Array::Handle(
20233 kTypeArgsLen, getter_args.Length(), Heap::kNew));
20234 const Object& getter_result = Object::Handle(
20235 zone, InvokeInstanceFunction(thread, *this, function, getter_name,
20236 getter_args, getter_args_descriptor,
20237 respect_reflectable, inst_type_args));
20238 if (getter_result.IsError()) {
20239 return getter_result.ptr();
20240 }
20241 // Replace the closure as the receiver in the arguments list.
20242 args.SetAt(0, getter_result);
20243 return DartEntry::InvokeClosure(thread, args, args_descriptor);
20244 }
20245 }
20246
20247 // Found an ordinary method.
20248 return InvokeInstanceFunction(thread, *this, function, function_name, args,
20249 args_descriptor, respect_reflectable,
20250 inst_type_args);
20251}
20252
20254 // TODO(koda): Optimize for all builtin classes and all classes
20255 // that do not override hashCode.
20256 return DartLibraryCalls::HashCode(*this);
20257}
20258
20259// Keep in sync with AsmIntrinsifier::Object_getHash.
20260IntegerPtr Instance::IdentityHashCode(Thread* thread) const {
20261 if (IsInteger()) return Integer::Cast(*this).ptr();
20262
20263#if defined(HASH_IN_OBJECT_HEADER)
20264 intptr_t hash = Object::GetCachedHash(ptr());
20265#else
20266 intptr_t hash = thread->heap()->GetHash(ptr());
20267#endif
20268 if (hash == 0) {
20269 if (IsNull()) {
20271 } else if (IsBool()) {
20272 hash = Bool::Cast(*this).value() ? kTrueIdentityHash : kFalseIdentityHash;
20273 } else if (IsDouble()) {
20274 double val = Double::Cast(*this).value();
20275 if ((val >= kMinInt64RepresentableAsDouble) &&
20277 int64_t ival = static_cast<int64_t>(val);
20278 if (static_cast<double>(ival) == val) {
20279 return Integer::New(ival);
20280 }
20281 }
20282
20283 uint64_t uval = bit_cast<uint64_t>(val);
20284 hash = ((uval >> 32) ^ (uval)) & kSmiMax;
20285 } else {
20286 do {
20287 hash = thread->random()->NextUInt32() & 0x3FFFFFFF;
20288 } while (hash == 0);
20289 }
20290
20291#if defined(HASH_IN_OBJECT_HEADER)
20292 hash = Object::SetCachedHashIfNotSet(ptr(), hash);
20293#else
20294 hash = thread->heap()->SetHashIfNotSet(ptr(), hash);
20295#endif
20296 }
20297 return Smi::New(hash);
20298}
20299
20300bool Instance::CanonicalizeEquals(const Instance& other) const {
20301 if (this->ptr() == other.ptr()) {
20302 return true; // "===".
20303 }
20304
20305 if (other.IsNull() || (this->clazz() != other.clazz())) {
20306 return false;
20307 }
20308
20309 {
20310 NoSafepointScope no_safepoint;
20311 // Raw bits compare.
20312 const intptr_t instance_size = SizeFromClass();
20313 ASSERT(instance_size != 0);
20314 const intptr_t other_instance_size = other.SizeFromClass();
20315 ASSERT(other_instance_size != 0);
20316 if (instance_size != other_instance_size) {
20317 return false;
20318 }
20319 uword this_addr = reinterpret_cast<uword>(this->untag());
20320 uword other_addr = reinterpret_cast<uword>(other.untag());
20321 for (intptr_t offset = Instance::NextFieldOffset(); offset < instance_size;
20323 if ((reinterpret_cast<CompressedObjectPtr*>(this_addr + offset)
20324 ->Decompress(untag()->heap_base())) !=
20325 (reinterpret_cast<CompressedObjectPtr*>(other_addr + offset)
20326 ->Decompress(untag()->heap_base()))) {
20327 return false;
20328 }
20329 }
20330 }
20331 return true;
20332}
20333
20334bool Symbol::IsSymbolCid(Thread* thread, classid_t class_id) {
20335 auto object_store = thread->isolate_group()->object_store();
20336 return Class::GetClassId(object_store->symbol_class()) == class_id;
20337}
20338
20339// Must be kept in sync with Symbol.hashCode in symbol_patch.dart
20341 ASSERT(IsSymbolCid(thread, instance.GetClassId()));
20342
20343 auto zone = thread->zone();
20344 auto object_store = thread->isolate_group()->object_store();
20345
20346 const auto& symbol_name_field =
20347 Field::Handle(zone, object_store->symbol_name_field());
20348 ASSERT(!symbol_name_field.IsNull());
20349
20350 // Keep in sync with sdk/lib/_internal/vm/lib/symbol_patch.dart.
20351 const auto& name =
20352 String::Cast(Object::Handle(zone, instance.GetField(symbol_name_field)));
20353 const uint32_t arbitrary_prime = 664597;
20354 return 0x1fffffff & (arbitrary_prime * name.CanonicalizeHash());
20355}
20356
20358 if (GetClassId() == kNullCid) {
20359 return kNullIdentityHash;
20360 }
20361 Thread* thread = Thread::Current();
20362 uint32_t hash = thread->heap()->GetCanonicalHash(ptr());
20363 if (hash != 0) {
20364 return hash;
20365 }
20366 Zone* zone = thread->zone();
20367 const Class& cls = Class::Handle(zone, clazz());
20368 const bool is_symbol = Symbol::IsSymbolCid(thread, cls.id());
20369
20370 NoSafepointScope no_safepoint(thread);
20371
20372 if (is_symbol) {
20373 hash = Symbol::CanonicalizeHash(thread, *this);
20374 } else {
20375 const intptr_t class_id = cls.id();
20376 ASSERT(class_id != 0);
20377 hash = class_id;
20378 uword this_addr = reinterpret_cast<uword>(this->untag());
20379 Object& obj = Object::Handle(zone);
20381
20382 const auto unboxed_fields_bitmap =
20384 GetClassId());
20385
20386 for (intptr_t offset = Instance::NextFieldOffset();
20388 if (unboxed_fields_bitmap.Get(offset / kCompressedWordSize)) {
20389 if (kCompressedWordSize == 8) {
20391 hash, *reinterpret_cast<uint32_t*>(this_addr + offset));
20393 hash, *reinterpret_cast<uint32_t*>(this_addr + offset + 4));
20394 } else {
20396 hash, *reinterpret_cast<uint32_t*>(this_addr + offset));
20397 }
20398 } else {
20399 obj = reinterpret_cast<CompressedObjectPtr*>(this_addr + offset)
20400 ->Decompress(untag()->heap_base());
20401 if (obj.IsSentinel()) {
20402 hash = CombineHashes(hash, 11);
20403 } else {
20404 instance ^= obj.ptr();
20405 hash = CombineHashes(hash, instance.CanonicalizeHash());
20406 }
20407 }
20408 }
20410 }
20411 thread->heap()->SetCanonicalHash(ptr(), hash);
20412 return hash;
20413}
20414
20415#if defined(DEBUG)
20416class CheckForPointers : public ObjectPointerVisitor {
20417 public:
20418 explicit CheckForPointers(IsolateGroup* isolate_group)
20419 : ObjectPointerVisitor(isolate_group), has_pointers_(false) {}
20420
20421 bool has_pointers() const { return has_pointers_; }
20422
20423 void VisitPointers(ObjectPtr* first, ObjectPtr* last) override {
20424 if (last >= first) {
20425 has_pointers_ = true;
20426 }
20427 }
20428
20429#if defined(DART_COMPRESSED_POINTERS)
20430 void VisitCompressedPointers(uword heap_base,
20431 CompressedObjectPtr* first,
20432 CompressedObjectPtr* last) override {
20433 if (last >= first) {
20434 has_pointers_ = true;
20435 }
20436 }
20437#endif
20438
20439 private:
20440 bool has_pointers_;
20441
20442 DISALLOW_COPY_AND_ASSIGN(CheckForPointers);
20443};
20444#endif // DEBUG
20445
20447 const intptr_t class_id = GetClassId();
20448 if (class_id >= kNumPredefinedCids) {
20449 // Iterate over all fields, canonicalize numbers and strings, expect all
20450 // other instances to be canonical otherwise report error (return false).
20451 Zone* zone = thread->zone();
20452 Object& obj = Object::Handle(zone);
20453 const intptr_t instance_size = SizeFromClass();
20454 ASSERT(instance_size != 0);
20455 const auto unboxed_fields_bitmap =
20456 thread->isolate_group()->class_table()->GetUnboxedFieldsMapAt(class_id);
20457 for (intptr_t offset = Instance::NextFieldOffset(); offset < instance_size;
20459 if (unboxed_fields_bitmap.Get(offset / kCompressedWordSize)) {
20460 continue;
20461 }
20462 obj = this->FieldAddrAtOffset(offset)->Decompress(untag()->heap_base());
20463 if (obj.IsInstance()) {
20464 obj = Instance::Cast(obj).CanonicalizeLocked(thread);
20465 this->SetFieldAtOffset(offset, obj);
20466 } else {
20467 ASSERT(obj.IsNull() || obj.IsSentinel());
20468 }
20469 }
20470 } else {
20471#if defined(DEBUG) && !defined(DART_COMPRESSED_POINTERS)
20472 // Make sure that we are not missing any fields.
20474 CheckForPointers has_pointers(group);
20475 this->ptr()->untag()->VisitPointersPrecise(&has_pointers);
20476 ASSERT(!has_pointers.has_pointers());
20477#endif // DEBUG
20478 }
20479}
20480
20481InstancePtr Instance::CopyShallowToOldSpace(Thread* thread) const {
20483}
20484
20485InstancePtr Instance::Canonicalize(Thread* thread) const {
20488 return CanonicalizeLocked(thread);
20489}
20490
20491InstancePtr Instance::CanonicalizeLocked(Thread* thread) const {
20492 if (!this->ptr()->IsHeapObject() || this->IsCanonical()) {
20493 return this->ptr();
20494 }
20495 ASSERT(!IsNull());
20497 Zone* zone = thread->zone();
20498 const Class& cls = Class::Handle(zone, this->clazz());
20499 Instance& result =
20500 Instance::Handle(zone, cls.LookupCanonicalInstance(zone, *this));
20501 if (!result.IsNull()) {
20502 return result.ptr();
20503 }
20504 if (IsNew()) {
20505 ASSERT((thread->isolate() == Dart::vm_isolate()) || !InVMIsolateHeap());
20506 // Create a canonical object in old space.
20507 result ^= Object::Clone(*this, Heap::kOld);
20508 } else {
20509 result = this->ptr();
20510 }
20511 ASSERT(result.IsOld());
20512 result.SetCanonical();
20513 return cls.InsertCanonicalConstant(zone, result);
20514}
20515
20517 if (field.is_unboxed()) {
20518 switch (field.guarded_cid()) {
20519 case kDoubleCid:
20520 return Double::New(*reinterpret_cast<double_t*>(FieldAddr(field)));
20521 case kFloat32x4Cid:
20522 return Float32x4::New(
20523 *reinterpret_cast<simd128_value_t*>(FieldAddr(field)));
20524 case kFloat64x2Cid:
20525 return Float64x2::New(
20526 *reinterpret_cast<simd128_value_t*>(FieldAddr(field)));
20527 default:
20528 return Integer::New(*reinterpret_cast<int64_t*>(FieldAddr(field)));
20529 }
20530 } else {
20531 return FieldAddr(field)->Decompress(untag()->heap_base());
20532 }
20533}
20534
20535void Instance::SetField(const Field& field, const Object& value) const {
20536 if (field.is_unboxed()) {
20537 switch (field.guarded_cid()) {
20538 case kDoubleCid:
20539 StoreNonPointer(reinterpret_cast<double_t*>(FieldAddr(field)),
20540 Double::Cast(value).value());
20541 break;
20542 case kFloat32x4Cid:
20543 StoreNonPointer(reinterpret_cast<simd128_value_t*>(FieldAddr(field)),
20544 Float32x4::Cast(value).value());
20545 break;
20546 case kFloat64x2Cid:
20547 StoreNonPointer(reinterpret_cast<simd128_value_t*>(FieldAddr(field)),
20548 Float64x2::Cast(value).value());
20549 break;
20550 default:
20551 StoreNonPointer(reinterpret_cast<int64_t*>(FieldAddr(field)),
20552 Integer::Cast(value).AsInt64Value());
20553 break;
20554 }
20555 } else {
20556 field.RecordStore(value);
20557 StoreCompressedPointer(FieldAddr(field), value.ptr());
20558 }
20559}
20560
20561AbstractTypePtr Instance::GetType(Heap::Space space) const {
20562 if (IsNull()) {
20563 return Type::NullType();
20564 }
20565 Thread* thread = Thread::Current();
20566 Zone* zone = thread->zone();
20567 const Class& cls = Class::Handle(zone, clazz());
20568 if (!cls.is_finalized()) {
20569 // Various predefined classes can be instantiated by the VM or
20570 // Dart_NewString/Integer/TypedData/... before the class is finalized.
20571 ASSERT(cls.is_prefinalized());
20573 }
20574 if (cls.IsClosureClass()) {
20576 zone, Closure::Cast(*this).GetInstantiatedSignature(zone));
20577 if (!signature.IsFinalized()) {
20578 signature.SetIsFinalized();
20579 }
20580 signature ^= signature.Canonicalize(thread);
20581 return signature.ptr();
20582 }
20583 if (IsRecord()) {
20584 ASSERT(cls.IsRecordClass());
20585 auto& record_type =
20586 RecordType::Handle(zone, Record::Cast(*this).GetRecordType());
20587 ASSERT(record_type.IsFinalized());
20588 ASSERT(record_type.IsCanonical());
20589 return record_type.ptr();
20590 }
20591 Type& type = Type::Handle(zone);
20592 if (!cls.IsGeneric()) {
20593 type = cls.DeclarationType();
20594 }
20595 if (type.IsNull()) {
20596 TypeArguments& type_arguments = TypeArguments::Handle(zone);
20597 const intptr_t num_type_arguments = cls.NumTypeArguments();
20598 if (num_type_arguments > 0) {
20599 type_arguments = GetTypeArguments();
20600 if (!type_arguments.IsNull()) {
20601 type_arguments = type_arguments.FromInstanceTypeArguments(thread, cls);
20602 }
20603 }
20604 type = Type::New(cls, type_arguments, Nullability::kNonNullable, space);
20605 type.SetIsFinalized();
20606 type ^= type.Canonicalize(thread);
20607 }
20608 return type.ptr();
20609}
20610
20611TypeArgumentsPtr Instance::GetTypeArguments() const {
20612 ASSERT(!IsType());
20613 const Class& cls = Class::Handle(clazz());
20614 intptr_t field_offset = cls.host_type_arguments_field_offset();
20615 ASSERT(field_offset != Class::kNoTypeArguments);
20616 TypeArguments& type_arguments = TypeArguments::Handle();
20617 type_arguments ^=
20618 FieldAddrAtOffset(field_offset)->Decompress(untag()->heap_base());
20619 return type_arguments.ptr();
20620}
20621
20623 ASSERT(!IsType());
20624 ASSERT(value.IsNull() || value.IsCanonical());
20625 const Class& cls = Class::Handle(clazz());
20626 intptr_t field_offset = cls.host_type_arguments_field_offset();
20627 ASSERT(field_offset != Class::kNoTypeArguments);
20628 SetFieldAtOffset(field_offset, value);
20629}
20630
20631/*
20632Specification of instance checks (e is T) and casts (e as T), where e evaluates
20633to a value v and v has runtime type S:
20634
20635Instance checks (e is T) in weak checking mode in a legacy or opted-in library:
20636 If v == null and T is a legacy type
20637 return LEGACY_SUBTYPE(T, Null) || LEGACY_SUBTYPE(Object, T)
20638 If v == null and T is not a legacy type, return NNBD_SUBTYPE(Null, T)
20639 Otherwise return LEGACY_SUBTYPE(S, T)
20640
20641Instance checks (e is T) in strong checking mode in a legacy or opted-in lib:
20642 If v == null and T is a legacy type
20643 return LEGACY_SUBTYPE(T, Null) || LEGACY_SUBTYPE(Object, T)
20644 Otherwise return NNBD_SUBTYPE(S, T)
20645
20646Casts (e as T) in weak checking mode in a legacy or opted-in library:
20647 If LEGACY_SUBTYPE(S, T) then e as T evaluates to v.
20648 Otherwise a TypeError is thrown.
20649
20650Casts (e as T) in strong checking mode in a legacy or opted-in library:
20651 If NNBD_SUBTYPE(S, T) then e as T evaluates to v.
20652 Otherwise a TypeError is thrown.
20653*/
20654
20656 const AbstractType& other,
20657 const TypeArguments& other_instantiator_type_arguments,
20658 const TypeArguments& other_function_type_arguments) const {
20659 ASSERT(!other.IsDynamicType());
20660 if (IsNull()) {
20661 return Instance::NullIsInstanceOf(other, other_instantiator_type_arguments,
20662 other_function_type_arguments);
20663 }
20664 // In strong mode, compute NNBD_SUBTYPE(runtimeType, other).
20665 // In weak mode, compute LEGACY_SUBTYPE(runtimeType, other).
20666 return RuntimeTypeIsSubtypeOf(other, other_instantiator_type_arguments,
20667 other_function_type_arguments);
20668}
20669
20671 const AbstractType& other,
20672 const TypeArguments& other_instantiator_type_arguments,
20673 const TypeArguments& other_function_type_arguments) const {
20674 ASSERT(!other.IsDynamicType());
20675 // In strong mode, compute NNBD_SUBTYPE(runtimeType, other).
20676 // In weak mode, compute LEGACY_SUBTYPE(runtimeType, other).
20677 return RuntimeTypeIsSubtypeOf(other, other_instantiator_type_arguments,
20678 other_function_type_arguments);
20679}
20680
20681// If 'other' type (once instantiated) is a legacy type:
20682// return LEGACY_SUBTYPE(other, Null) || LEGACY_SUBTYPE(Object, other).
20683// Otherwise return NNBD_SUBTYPE(Null, T).
20684// Ignore value of strong flag value.
20685bool Instance::NullIsInstanceOf(
20686 const AbstractType& other,
20687 const TypeArguments& other_instantiator_type_arguments,
20688 const TypeArguments& other_function_type_arguments) {
20689 ASSERT(other.IsFinalized());
20690 if (other.IsNullable()) {
20691 // This case includes top types (void, dynamic, Object?).
20692 // The uninstantiated nullable type will remain nullable after
20693 // instantiation.
20694 return true;
20695 }
20696 if (other.IsFutureOrType()) {
20697 const auto& type = AbstractType::Handle(other.UnwrapFutureOr());
20698 return NullIsInstanceOf(type, other_instantiator_type_arguments,
20699 other_function_type_arguments);
20700 }
20701 // No need to instantiate type, unless it is a type parameter.
20702 // Note that a typeref cannot refer to a type parameter.
20703 if (other.IsTypeParameter()) {
20705 other_instantiator_type_arguments, other_function_type_arguments,
20707 return Instance::NullIsInstanceOf(type, Object::null_type_arguments(),
20708 Object::null_type_arguments());
20709 }
20710 return other.IsLegacy() && (other.IsObjectType() || other.IsNeverType());
20711}
20712
20713// Must be kept in sync with GenerateNullIsAssignableToType in
20714// stub_code_compiler.cc if any changes are made.
20716 // "Left Null" rule: null is assignable when destination type is either
20717 // legacy or nullable. Otherwise it is not assignable or we cannot tell
20718 // without instantiating type parameter.
20719 if (other.IsLegacy() || other.IsNullable()) {
20720 return true;
20721 }
20722 if (other.IsFutureOrType()) {
20724 }
20725 // Since the TAVs are not available, for non-nullable type parameters
20726 // this returns a conservative approximation of "not assignable" .
20727 return false;
20728}
20729
20730// Must be kept in sync with GenerateNullIsAssignableToType in
20731// stub_code_compiler.cc if any changes are made.
20733 const AbstractType& other,
20734 const TypeArguments& other_instantiator_type_arguments,
20735 const TypeArguments& other_function_type_arguments) {
20736 // Do checks that don't require instantiation first.
20737 if (NullIsAssignableTo(other)) return true;
20738 if (!other.IsTypeParameter()) return false;
20739 const auto& type = AbstractType::Handle(other.InstantiateFrom(
20740 other_instantiator_type_arguments, other_function_type_arguments,
20742 return NullIsAssignableTo(type);
20743}
20744
20745bool Instance::RuntimeTypeIsSubtypeOf(
20746 const AbstractType& other,
20747 const TypeArguments& other_instantiator_type_arguments,
20748 const TypeArguments& other_function_type_arguments) const {
20749 ASSERT(other.IsFinalized());
20750 ASSERT(ptr() != Object::sentinel().ptr());
20751 // Instance may not have runtimeType dynamic, void, or Never.
20752 if (other.IsTopTypeForSubtyping()) {
20753 return true;
20754 }
20755 Thread* thread = Thread::Current();
20756 Zone* zone = thread->zone();
20757 const Class& cls = Class::Handle(zone, clazz());
20758 if (cls.IsClosureClass()) {
20759 if (other.IsDartFunctionType() || other.IsDartClosureType() ||
20760 other.IsObjectType()) {
20761 return true;
20762 }
20763 AbstractType& instantiated_other = AbstractType::Handle(zone, other.ptr());
20764 if (!other.IsInstantiated()) {
20765 instantiated_other = other.InstantiateFrom(
20766 other_instantiator_type_arguments, other_function_type_arguments,
20768 if (instantiated_other.IsTopTypeForSubtyping() ||
20769 instantiated_other.IsObjectType() ||
20770 instantiated_other.IsDartFunctionType()) {
20771 return true;
20772 }
20773 }
20774 if (RuntimeTypeIsSubtypeOfFutureOr(zone, instantiated_other)) {
20775 return true;
20776 }
20777 if (!instantiated_other.IsFunctionType()) {
20778 return false;
20779 }
20780 const FunctionType& sig = FunctionType::Handle(
20781 Closure::Cast(*this).GetInstantiatedSignature(zone));
20782 return sig.IsSubtypeOf(FunctionType::Cast(instantiated_other), Heap::kOld);
20783 }
20784 if (cls.IsRecordClass()) {
20785 if (other.IsDartRecordType() || other.IsObjectType()) {
20786 return true;
20787 }
20788 AbstractType& instantiated_other = AbstractType::Handle(zone, other.ptr());
20789 if (!other.IsInstantiated()) {
20790 instantiated_other = other.InstantiateFrom(
20791 other_instantiator_type_arguments, other_function_type_arguments,
20793 if (instantiated_other.IsTopTypeForSubtyping() ||
20794 instantiated_other.IsObjectType() ||
20795 instantiated_other.IsDartRecordType()) {
20796 return true;
20797 }
20798 }
20799 if (RuntimeTypeIsSubtypeOfFutureOr(zone, instantiated_other)) {
20800 return true;
20801 }
20802 if (!instantiated_other.IsRecordType()) {
20803 return false;
20804 }
20805 const Record& record = Record::Cast(*this);
20806 const RecordType& record_type = RecordType::Cast(instantiated_other);
20807 if (record.shape() != record_type.shape()) {
20808 return false;
20809 }
20810 Instance& field_value = Instance::Handle(zone);
20811 AbstractType& field_type = AbstractType::Handle(zone);
20812 const intptr_t num_fields = record.num_fields();
20813 for (intptr_t i = 0; i < num_fields; ++i) {
20814 field_value ^= record.FieldAt(i);
20815 field_type = record_type.FieldTypeAt(i);
20816 if (!field_value.RuntimeTypeIsSubtypeOf(field_type,
20817 Object::null_type_arguments(),
20818 Object::null_type_arguments())) {
20819 return false;
20820 }
20821 }
20822 return true;
20823 }
20824 TypeArguments& type_arguments = TypeArguments::Handle(zone);
20825 const intptr_t num_type_arguments = cls.NumTypeArguments();
20826 if (num_type_arguments > 0) {
20827 type_arguments = GetTypeArguments();
20828 ASSERT(type_arguments.IsNull() || type_arguments.IsCanonical());
20829 // The number of type arguments in the instance must be greater or equal to
20830 // the number of type arguments expected by the instance class.
20831 // A discrepancy is allowed for closures, which borrow the type argument
20832 // vector of their instantiator, which may be of a subclass of the class
20833 // defining the closure. Truncating the vector to the correct length on
20834 // instantiation is unnecessary. The vector may therefore be longer.
20835 // Also, an optimization reuses the type argument vector of the instantiator
20836 // of generic instances when its layout is compatible.
20837 ASSERT(type_arguments.IsNull() ||
20838 (type_arguments.Length() >= num_type_arguments));
20839 }
20840 AbstractType& instantiated_other = AbstractType::Handle(zone, other.ptr());
20841 if (!other.IsInstantiated()) {
20842 instantiated_other = other.InstantiateFrom(
20843 other_instantiator_type_arguments, other_function_type_arguments,
20845 if (instantiated_other.IsTopTypeForSubtyping()) {
20846 return true;
20847 }
20848 }
20849 if (IsNull()) {
20850 if (instantiated_other.IsNullType()) {
20851 return true;
20852 }
20853 if (RuntimeTypeIsSubtypeOfFutureOr(zone, instantiated_other)) {
20854 return true;
20855 }
20856 // At this point, instantiated_other can be a function type.
20857 return !instantiated_other.IsNonNullable();
20858 }
20859 if (!instantiated_other.IsType()) {
20860 return false;
20861 }
20862 // RuntimeType of non-null instance is non-nullable, so there is no need to
20863 // check nullability of other type.
20864 return Class::IsSubtypeOf(cls, type_arguments, Nullability::kNonNullable,
20865 instantiated_other, Heap::kOld);
20866}
20867
20868bool Instance::RuntimeTypeIsSubtypeOfFutureOr(Zone* zone,
20869 const AbstractType& other) const {
20870 if (other.IsFutureOrType()) {
20871 const TypeArguments& other_type_arguments =
20872 TypeArguments::Handle(zone, other.arguments());
20873 const AbstractType& other_type_arg =
20874 AbstractType::Handle(zone, other_type_arguments.TypeAtNullSafe(0));
20875 if (other_type_arg.IsTopTypeForSubtyping()) {
20876 return true;
20877 }
20878 if (Class::Handle(zone, clazz()).IsFutureClass()) {
20879 const TypeArguments& type_arguments =
20881 const AbstractType& type_arg =
20882 AbstractType::Handle(zone, type_arguments.TypeAtNullSafe(0));
20883 if (type_arg.IsSubtypeOf(other_type_arg, Heap::kOld)) {
20884 return true;
20885 }
20886 }
20887 // Retry RuntimeTypeIsSubtypeOf after unwrapping type arg of FutureOr.
20888 if (RuntimeTypeIsSubtypeOf(other_type_arg, Object::null_type_arguments(),
20889 Object::null_type_arguments())) {
20890 return true;
20891 }
20892 }
20893 return false;
20894}
20895
20896bool Instance::OperatorEquals(const Instance& other) const {
20897 // TODO(koda): Optimize for all builtin classes and all classes
20898 // that do not override operator==.
20899 return DartLibraryCalls::Equals(*this, other) == Object::bool_true().ptr();
20900}
20901
20902bool Instance::IsIdenticalTo(const Instance& other) const {
20903 if (ptr() == other.ptr()) return true;
20904 if (IsInteger() && other.IsInteger()) {
20905 return Integer::Cast(*this).Equals(other);
20906 }
20907 if (IsDouble() && other.IsDouble()) {
20908 double other_value = Double::Cast(other).value();
20909 return Double::Cast(*this).BitwiseEqualsToDouble(other_value);
20910 }
20911 return false;
20912}
20913
20915 ASSERT(Thread::Current()->no_safepoint_scope_depth() > 0);
20916 TypedDataPtr native_fields = static_cast<TypedDataPtr>(
20917 NativeFieldsAddr()->Decompress(untag()->heap_base()));
20918 if (native_fields == TypedData::null()) {
20919 return nullptr;
20920 }
20921 return reinterpret_cast<intptr_t*>(native_fields->untag()->data());
20922}
20923
20924void Instance::SetNativeField(int index, intptr_t value) const {
20925 ASSERT(IsValidNativeIndex(index));
20926 Object& native_fields =
20927 Object::Handle(NativeFieldsAddr()->Decompress(untag()->heap_base()));
20928 if (native_fields.IsNull()) {
20929 // Allocate backing storage for the native fields.
20930 native_fields = TypedData::New(kIntPtrCid, NumNativeFields());
20931 StoreCompressedPointer(NativeFieldsAddr(), native_fields.ptr());
20932 }
20933 intptr_t byte_offset = index * sizeof(intptr_t);
20934 TypedData::Cast(native_fields).SetIntPtr(byte_offset, value);
20935}
20936
20937void Instance::SetNativeFields(uint16_t num_native_fields,
20938 const intptr_t* field_values) const {
20939 ASSERT(num_native_fields == NumNativeFields());
20940 ASSERT(field_values != nullptr);
20941 Object& native_fields =
20942 Object::Handle(NativeFieldsAddr()->Decompress(untag()->heap_base()));
20943 if (native_fields.IsNull()) {
20944 // Allocate backing storage for the native fields.
20945 native_fields = TypedData::New(kIntPtrCid, NumNativeFields());
20946 StoreCompressedPointer(NativeFieldsAddr(), native_fields.ptr());
20947 }
20948 for (uint16_t i = 0; i < num_native_fields; i++) {
20949 intptr_t byte_offset = i * sizeof(intptr_t);
20950 TypedData::Cast(native_fields).SetIntPtr(byte_offset, field_values[i]);
20951 }
20952}
20953
20955 Class& cls = Class::Handle(clazz());
20956 if (cls.IsClosureClass()) {
20957 if (function != nullptr) {
20958 *function = Closure::Cast(*this).function();
20959 }
20960 return true;
20961 }
20962 // Try to resolve a "call" method.
20963 Zone* zone = Thread::Current()->zone();
20964 Function& call_function = Function::Handle(
20965 zone, Resolver::ResolveDynamicAnyArgs(zone, cls, Symbols::DynamicCall(),
20966 /*allow_add=*/false));
20967 if (call_function.IsNull()) {
20968 return false;
20969 }
20970 if (function != nullptr) {
20971 *function = call_function.ptr();
20972 }
20973 return true;
20974}
20975
20976InstancePtr Instance::New(const Class& cls, Heap::Space space) {
20977 Thread* thread = Thread::Current();
20978 if (cls.EnsureIsAllocateFinalized(thread) != Error::null()) {
20979 return Instance::null();
20980 }
20981 return NewAlreadyFinalized(cls, space);
20982}
20983
20984InstancePtr Instance::NewAlreadyFinalized(const Class& cls, Heap::Space space) {
20986 intptr_t instance_size = cls.host_instance_size();
20987 ASSERT(instance_size > 0);
20988 // Initialize everything after the object header with Object::null(), since
20989 // this isn't a predefined class.
20990 const uword ptr_field_end_offset =
20991 instance_size - (Instance::ContainsCompressedPointers()
20993 : kWordSize);
20994 return static_cast<InstancePtr>(Object::Allocate(
20995 cls.id(), instance_size, space, Instance::ContainsCompressedPointers(),
20996 from_offset<Instance>(), ptr_field_end_offset));
20997}
20998
20999bool Instance::IsValidFieldOffset(intptr_t offset) const {
21000 Thread* thread = Thread::Current();
21002 Class& cls = thread->ClassHandle();
21003 cls = clazz();
21004 return (offset >= 0 &&
21006}
21007
21008intptr_t Instance::ElementSizeFor(intptr_t cid) {
21012 }
21013 switch (cid) {
21014 case kArrayCid:
21015 case kImmutableArrayCid:
21017 case kTypeArgumentsCid:
21019 case kOneByteStringCid:
21021 case kTwoByteStringCid:
21023 default:
21024 UNIMPLEMENTED();
21025 return 0;
21026 }
21027}
21028
21029intptr_t Instance::DataOffsetFor(intptr_t cid) {
21031 // Elements start at offset 0 of the external data.
21032 return 0;
21033 }
21034 if (IsTypedDataClassId(cid)) {
21036 }
21037 switch (cid) {
21038 case kArrayCid:
21039 case kImmutableArrayCid:
21040 return Array::data_offset();
21041 case kTypeArgumentsCid:
21043 case kOneByteStringCid:
21045 case kTwoByteStringCid:
21047 case kRecordCid:
21048 return Record::field_offset(0);
21049 default:
21050 UNIMPLEMENTED();
21051 return Array::data_offset();
21052 }
21053}
21054
21055const char* Instance::ToCString() const {
21056 if (IsNull()) {
21057 return "null";
21058 } else if (Thread::Current()->no_safepoint_scope_depth() > 0) {
21059 // Can occur when running disassembler.
21060 return "Instance";
21061 } else {
21062 if (IsClosure()) {
21063 return Closure::Cast(*this).ToCString();
21064 }
21065 // Background compiler disassembly of instructions referring to pool objects
21066 // calls this function and requires allocation of Type in old space.
21067 const AbstractType& type = AbstractType::Handle(GetType(Heap::kOld));
21068 const String& type_name = String::Handle(type.UserVisibleName());
21069 return OS::SCreate(Thread::Current()->zone(), "Instance of '%s'",
21070 type_name.ToCString());
21071 }
21072}
21073
21075 // All subclasses should implement this appropriately, so the only value that
21076 // should reach this implementation should be the null value.
21077 ASSERT(IsNull());
21078 // AbstractType is an abstract class.
21079 UNREACHABLE();
21080 return kIllegalCid;
21081}
21082
21084 // All subclasses should implement this appropriately, so the only value that
21085 // should reach this implementation should be the null value.
21086 ASSERT(IsNull());
21087 // AbstractType is an abstract class.
21088 UNREACHABLE();
21089 return Class::null();
21090}
21091
21092TypeArgumentsPtr AbstractType::arguments() const {
21093 // All subclasses should implement this appropriately, so the only value that
21094 // should reach this implementation should be the null value.
21095 ASSERT(IsNull());
21096 // AbstractType is an abstract class.
21097 UNREACHABLE();
21098 return nullptr;
21099}
21100
21102 // Null can be assigned to legacy and nullable types.
21103 if (!IsNonNullable()) {
21104 return false;
21105 }
21106
21107 Thread* thread = Thread::Current();
21108 Zone* zone = thread->zone();
21109
21110 if (IsTypeParameter()) {
21111 const auto& bound =
21112 AbstractType::Handle(zone, TypeParameter::Cast(*this).bound());
21113 ASSERT(!bound.IsNull());
21114 return bound.IsStrictlyNonNullable();
21115 }
21116 if (IsFutureOrType()) {
21117 return AbstractType::Handle(zone, UnwrapFutureOr()).IsStrictlyNonNullable();
21118 }
21119 return true;
21120}
21121
21123 const TypeParameter& type_param,
21124 Heap::Space space) const {
21125 Nullability result_nullability;
21126 const Nullability arg_nullability = nullability();
21127 const Nullability var_nullability = type_param.nullability();
21128 // Adjust nullability of result 'arg' instantiated from 'var'.
21129 // arg/var ! ? *
21130 // ! ! ? *
21131 // ? ? ? ?
21132 // * * ? *
21133 if (var_nullability == Nullability::kNullable ||
21134 arg_nullability == Nullability::kNullable) {
21135 result_nullability = Nullability::kNullable;
21136 } else if (var_nullability == Nullability::kLegacy ||
21137 arg_nullability == Nullability::kLegacy) {
21138 result_nullability = Nullability::kLegacy;
21139 } else {
21140 // Keep arg nullability.
21141 return ptr();
21142 }
21143 if (arg_nullability == result_nullability) {
21144 return ptr();
21145 }
21146 if (IsType()) {
21147 return Type::Cast(*this).ToNullability(result_nullability, space);
21148 }
21149 if (IsFunctionType()) {
21150 return FunctionType::Cast(*this).ToNullability(result_nullability, space);
21151 }
21152 if (IsRecordType()) {
21153 return RecordType::Cast(*this).ToNullability(result_nullability, space);
21154 }
21155 if (IsTypeParameter()) {
21156 return TypeParameter::Cast(*this).ToNullability(result_nullability, space);
21157 }
21158 UNREACHABLE();
21159}
21160
21162 if (IsFutureOrType()) {
21163 Zone* zone = Thread::Current()->zone();
21164 const AbstractType& unwrapped_type =
21166 const classid_t cid = unwrapped_type.type_class_id();
21167 if (cid == kDynamicCid || cid == kVoidCid) {
21168 return unwrapped_type.ptr();
21169 }
21170 if (cid == kInstanceCid) {
21171 if (IsNonNullable()) {
21172 return unwrapped_type.ptr();
21173 }
21174 if (IsNullable() || unwrapped_type.IsNullable()) {
21175 return Type::Cast(unwrapped_type)
21176 .ToNullability(Nullability::kNullable, space);
21177 }
21178 return Type::Cast(unwrapped_type)
21179 .ToNullability(Nullability::kLegacy, space);
21180 }
21181 if (cid == kNeverCid && unwrapped_type.IsNonNullable()) {
21182 ObjectStore* object_store = IsolateGroup::Current()->object_store();
21183 const Type& future_never_type =
21184 Type::Handle(zone, object_store->non_nullable_future_never_type());
21185 ASSERT(!future_never_type.IsNull());
21186 return future_never_type.ToNullability(nullability(), space);
21187 }
21188 if (cid == kNullCid) {
21189 ObjectStore* object_store = IsolateGroup::Current()->object_store();
21190 ASSERT(object_store->nullable_future_null_type() != Type::null());
21191 return object_store->nullable_future_null_type();
21192 }
21193 if (IsNullable() && unwrapped_type.IsNullable()) {
21194 return Type::Cast(*this).ToNullability(Nullability::kNonNullable, space);
21195 }
21196 }
21197 return ptr();
21198}
21199
21201 intptr_t num_free_fun_type_params) const {
21202 // All subclasses should implement this appropriately, so the only value that
21203 // should reach this implementation should be the null value.
21204 ASSERT(IsNull());
21205 // AbstractType is an abstract class.
21206 UNREACHABLE();
21207 return false;
21208}
21209
21216
21217void AbstractType::set_flags(uint32_t value) const {
21218 untag()->set_flags(value);
21219}
21220
21226
21228 ASSERT(!IsCanonical());
21230 static_cast<uint8_t>(value), untag()->flags()));
21231}
21232
21234 const Instance& other,
21235 TypeEquality kind,
21236 FunctionTypeMapping* function_type_equivalence) const {
21237 // All subclasses should implement this appropriately, so the only value that
21238 // should reach this implementation should be the null value.
21239 ASSERT(IsNull());
21240 // AbstractType is an abstract class.
21241 UNREACHABLE();
21242 return false;
21243}
21244
21246 const AbstractType& other_type,
21247 TypeEquality kind) const {
21248 Nullability this_type_nullability = nullability();
21249 Nullability other_type_nullability = other_type.nullability();
21250 if (kind == TypeEquality::kInSubtypeTest) {
21251 if (this_type_nullability == Nullability::kNullable &&
21252 other_type_nullability == Nullability::kNonNullable) {
21253 return false;
21254 }
21255 } else {
21256 if (kind == TypeEquality::kSyntactical) {
21257 if (this_type_nullability == Nullability::kLegacy) {
21258 this_type_nullability = Nullability::kNonNullable;
21259 }
21260 if (other_type_nullability == Nullability::kLegacy) {
21261 other_type_nullability = Nullability::kNonNullable;
21262 }
21263 } else {
21265 }
21266 if (this_type_nullability != other_type_nullability) {
21267 return false;
21268 }
21269 }
21270 return true;
21271}
21272
21274 const TypeArguments& instantiator_type_arguments,
21275 const TypeArguments& function_type_arguments,
21276 intptr_t num_free_fun_type_params,
21277 Heap::Space space,
21278 FunctionTypeMapping* function_type_mapping,
21279 intptr_t num_parent_type_args_adjustment) const {
21280 // All subclasses should implement this appropriately, so the only value that
21281 // should reach this implementation should be the null value.
21282 ASSERT(IsNull());
21283 // AbstractType is an abstract class.
21284 UNREACHABLE();
21285 return nullptr;
21286}
21287
21289 intptr_t num_parent_type_args_adjustment,
21290 intptr_t num_free_fun_type_params,
21291 Heap::Space space,
21292 FunctionTypeMapping* function_type_mapping) const {
21293 UNREACHABLE();
21294 return nullptr;
21295}
21296
21297AbstractTypePtr AbstractType::Canonicalize(Thread* thread) const {
21298 // All subclasses should implement this appropriately, so the only value that
21299 // should reach this implementation should be the null value.
21300 ASSERT(IsNull());
21301 // AbstractType is an abstract class.
21302 UNREACHABLE();
21303 return nullptr;
21304}
21305
21307 // All subclasses should implement this appropriately, so the only value that
21308 // should reach this implementation should be the null value.
21309 ASSERT(IsNull());
21310 // AbstractType is an abstract class.
21311 UNREACHABLE();
21312}
21313
21314void AbstractType::AddURI(URIs* uris, const String& name, const String& uri) {
21315 ASSERT(uris != nullptr);
21316 const intptr_t len = uris->length();
21317 ASSERT((len % 3) == 0);
21318 bool print_uri = false;
21319 for (intptr_t i = 0; i < len; i += 3) {
21320 if (uris->At(i).Equals(name)) {
21321 if (uris->At(i + 1).Equals(uri)) {
21322 // Same name and same URI: no need to add this already listed URI.
21323 return; // No state change is possible.
21324 } else {
21325 // Same name and different URI: the name is ambiguous, print both URIs.
21326 print_uri = true;
21327 uris->SetAt(i + 2, Symbols::print());
21328 }
21329 }
21330 }
21331 uris->Add(name);
21332 uris->Add(uri);
21333 if (print_uri) {
21334 uris->Add(Symbols::print());
21335 } else {
21336 uris->Add(Symbols::Empty());
21337 }
21338}
21339
21341 ASSERT(uris != nullptr);
21342 Thread* thread = Thread::Current();
21343 Zone* zone = thread->zone();
21344 const intptr_t len = uris->length();
21345 ASSERT((len % 3) == 0);
21346 GrowableHandlePtrArray<const String> pieces(zone, 5 * (len / 3));
21347 for (intptr_t i = 0; i < len; i += 3) {
21348 // Only print URIs that have been marked.
21349 if (uris->At(i + 2).ptr() == Symbols::print().ptr()) {
21350 pieces.Add(Symbols::TwoSpaces());
21351 pieces.Add(uris->At(i));
21352 pieces.Add(Symbols::SpaceIsFromSpace());
21353 pieces.Add(uris->At(i + 1));
21354 pieces.Add(Symbols::NewLine());
21355 }
21356 }
21357 return Symbols::FromConcatAll(thread, pieces);
21358}
21359
21361 NameVisibility name_visibility) const {
21362 if (IsDynamicType() || IsVoidType() || IsNullType()) {
21363 // Hide nullable suffix.
21364 return "";
21365 }
21366 // Keep in sync with Nullability enum in runtime/vm/object.h.
21367 switch (nullability()) {
21369 return "?";
21371 return "";
21373 return (FLAG_show_internal_names || name_visibility != kUserVisibleName)
21374 ? "*"
21375 : "";
21376 default:
21377 UNREACHABLE();
21378 }
21379}
21380
21381StringPtr AbstractType::Name() const {
21383}
21384
21385const char* AbstractType::NameCString() const {
21386 Thread* thread = Thread::Current();
21387 ZoneTextBuffer printer(thread->zone());
21388 PrintName(kInternalName, &printer);
21389 return printer.buffer();
21390}
21391
21395
21397 Thread* thread = Thread::Current();
21398 ZoneTextBuffer printer(thread->zone());
21399 PrintName(kUserVisibleName, &printer);
21400 return printer.buffer();
21401}
21402
21405}
21406
21408 Thread* thread = Thread::Current();
21409 ZoneTextBuffer printer(thread->zone());
21410 PrintName(kScrubbedName, &printer);
21411 return printer.buffer();
21412}
21413
21415 BaseTextBuffer* printer) const {
21416 // All subclasses should implement this appropriately, so the only value that
21417 // should reach this implementation should be the null value.
21418 ASSERT(IsNull());
21419 // AbstractType is an abstract class.
21420 UNREACHABLE();
21421}
21422
21423StringPtr AbstractType::ClassName() const {
21424 ASSERT(!IsFunctionType() && !IsRecordType());
21425 return Class::Handle(type_class()).Name();
21426}
21427
21429 return type_class_id() == kNullCid;
21430}
21431
21433 return type_class_id() == kNeverCid;
21434}
21435
21437 return type_class_id() == kSentinelCid;
21438}
21439
21441 const classid_t cid = type_class_id();
21442 if (cid == kDynamicCid || cid == kVoidCid) {
21443 return true;
21444 }
21445 if (cid == kInstanceCid) { // Object type.
21446 return !IsNonNullable(); // kLegacy or kNullable.
21447 }
21448 if (cid == kFutureOrCid) {
21449 // FutureOr<T> where T is a top type behaves as a top type.
21450 return AbstractType::Handle(UnwrapFutureOr()).IsTopTypeForInstanceOf();
21451 }
21452 return false;
21453}
21454
21455// Must be kept in sync with GenerateTypeIsTopTypeForSubtyping in
21456// stub_code_compiler.cc if any changes are made.
21458 const classid_t cid = type_class_id();
21459 if (cid == kDynamicCid || cid == kVoidCid) {
21460 return true;
21461 }
21462 if (cid == kInstanceCid) { // Object type.
21463 return !IsNonNullable();
21464 }
21465 if (cid == kFutureOrCid) {
21466 // FutureOr<T> where T is a top type behaves as a top type.
21467 return AbstractType::Handle(UnwrapFutureOr()).IsTopTypeForSubtyping();
21468 }
21469 return false;
21470}
21471
21473 return HasTypeClass() &&
21474 (type_class() == Type::Handle(Type::IntType()).type_class());
21475}
21476
21478 return HasTypeClass() &&
21480 ->object_store()
21481 ->integer_implementation_class());
21482}
21483
21485 return HasTypeClass() &&
21486 (type_class() == Type::Handle(Type::Double()).type_class());
21487}
21488
21490 // kFloat32x4Cid refers to the private class and cannot be used here.
21491 return HasTypeClass() &&
21492 (type_class() == Type::Handle(Type::Float32x4()).type_class());
21493}
21494
21496 // kFloat64x2Cid refers to the private class and cannot be used here.
21497 return HasTypeClass() &&
21498 (type_class() == Type::Handle(Type::Float64x2()).type_class());
21499}
21500
21502 // kInt32x4Cid refers to the private class and cannot be used here.
21503 return HasTypeClass() &&
21504 (type_class() == Type::Handle(Type::Int32x4()).type_class());
21505}
21506
21508 return HasTypeClass() &&
21509 (type_class() == Type::Handle(Type::StringType()).type_class());
21510}
21511
21513 return HasTypeClass() &&
21514 (type_class() == Type::Handle(Type::DartFunctionType()).type_class());
21515}
21516
21518 return (type_class_id() == kClosureCid);
21519}
21520
21522 if (!HasTypeClass()) return false;
21523 const auto cid = type_class_id();
21524 return ((cid == kRecordCid) ||
21525 (cid == Class::Handle(
21526 IsolateGroup::Current()->object_store()->record_class())
21527 .id()));
21528}
21529
21531 return HasTypeClass() && type_class_id() == kPointerCid;
21532}
21533
21535 if (!HasTypeClass()) return false;
21536
21537 intptr_t cid = type_class_id();
21538
21539 if (cid == kBoolCid) return true;
21540 if (cid == kDynamicCid) return true;
21541 if (cid == kInstanceCid) return true; // Object.
21542 if (cid == kNeverCid) return true;
21543 if (cid == kNullCid) return true;
21544 if (cid == kVoidCid) return true;
21545
21546 // These are not constant CID checks because kDoubleCid refers to _Double
21547 // not double, etc.
21548 ObjectStore* object_store = IsolateGroup::Current()->object_store();
21549 Type& candidate_type = Type::Handle();
21550 candidate_type = object_store->int_type();
21551 if (cid == candidate_type.type_class_id()) return true;
21552 candidate_type = object_store->double_type();
21553 if (cid == candidate_type.type_class_id()) return true;
21554 candidate_type = object_store->number_type();
21555 if (cid == candidate_type.type_class_id()) return true;
21556 candidate_type = object_store->string_type();
21557 if (cid == candidate_type.type_class_id()) return true;
21558
21559 Class& candidate_cls = Class::Handle();
21560 candidate_cls = object_store->list_class();
21561 if (cid == candidate_cls.id()) return true;
21562 candidate_cls = object_store->map_class();
21563 if (cid == candidate_cls.id()) return true;
21564 candidate_cls = object_store->set_class();
21565 if (cid == candidate_cls.id()) return true;
21566 candidate_cls = object_store->capability_class();
21567 if (cid == candidate_cls.id()) return true;
21568 candidate_cls = object_store->send_port_class();
21569 if (cid == candidate_cls.id()) return true;
21570 candidate_cls = object_store->transferable_class();
21571 if (cid == candidate_cls.id()) return true;
21572
21573 const auto& typed_data_lib =
21574 Library::Handle(object_store->typed_data_library());
21575
21576#define IS_CHECK(name) \
21577 candidate_cls = typed_data_lib.LookupClass(Symbols::name##List()); \
21578 if (cid == candidate_cls.id()) { \
21579 return true; \
21580 }
21582#undef IS_CHECK
21583
21584 return false;
21585}
21586
21587AbstractTypePtr AbstractType::UnwrapFutureOr() const {
21588 if (!IsFutureOrType()) {
21589 return ptr();
21590 }
21591 if (arguments() == TypeArguments::null()) {
21592 return Type::dynamic_type().ptr();
21593 }
21594 Thread* thread = Thread::Current();
21596 TypeArguments& type_args = thread->TypeArgumentsHandle();
21597 type_args = arguments();
21599 AbstractType& type_arg = thread->AbstractTypeHandle();
21600 type_arg = type_args.TypeAt(0);
21601 while (type_arg.IsFutureOrType()) {
21602 if (type_arg.arguments() == TypeArguments::null()) {
21603 return Type::dynamic_type().ptr();
21604 }
21605 type_args = type_arg.arguments();
21606 type_arg = type_args.TypeAt(0);
21607 }
21608 return type_arg.ptr();
21609}
21610
21612 const AbstractType& other,
21613 Heap::Space space,
21614 FunctionTypeMapping* function_type_equivalence) const {
21615 TRACE_TYPE_CHECKS_VERBOSE(" AbstractType::IsSubtypeOf(%s, %s)\n",
21616 ToCString(), other.ToCString());
21618 ASSERT(other.IsFinalized());
21619 // Reflexivity.
21620 if (ptr() == other.ptr()) {
21621 TRACE_TYPE_CHECKS_VERBOSE(" - result: true (same types)\n");
21622 return true;
21623 }
21624 // Right top type.
21625 if (other.IsTopTypeForSubtyping()) {
21626 TRACE_TYPE_CHECKS_VERBOSE(" - result: true (right is top)\n");
21627 return true;
21628 }
21629 // Left bottom type.
21630 // Any form of Never in weak mode maps to Null and Null is a bottom type in
21631 // weak mode. In strong mode, Never and Never* are bottom types. Therefore,
21632 // Never and Never* are bottom types regardless of weak/strong mode.
21633 // Note that we cannot encounter Never?, as it is normalized to Null.
21634 if (IsNeverType()) {
21635 ASSERT(!IsNullable());
21636 TRACE_TYPE_CHECKS_VERBOSE(" - result: true (left is Never)\n");
21637 return true;
21638 }
21639 // Left top type.
21640 if (IsDynamicType() || IsVoidType()) {
21641 TRACE_TYPE_CHECKS_VERBOSE(" - result: false (left is top)\n");
21642 return false;
21643 }
21644 // Left Null type.
21645 if (IsNullType()) {
21646 const bool result = Instance::NullIsAssignableTo(other);
21647 TRACE_TYPE_CHECKS_VERBOSE(" - result: %s (left is Null)\n",
21648 (result ? "true" : "false"));
21649 return result;
21650 }
21651 Thread* thread = Thread::Current();
21652 Zone* zone = thread->zone();
21653 // Type parameters cannot be handled by Class::IsSubtypeOf().
21654 // When comparing two uninstantiated function types, one returning type
21655 // parameter K, the other returning type parameter V, we cannot assume that
21656 // K is a subtype of V, or vice versa. We only return true if K equals V, as
21657 // defined by TypeParameter::Equals.
21658 // The same rule applies when checking the upper bound of a still
21659 // uninstantiated type at compile time. Returning false will defer the test
21660 // to run time.
21661 // There are however some cases that can be decided at compile time.
21662 // For example, with class A<K, V extends K>, new A<T, T> called from within
21663 // a class B<T> will never require a run time bound check, even if T is
21664 // uninstantiated at compile time.
21665 if (IsTypeParameter()) {
21666 const TypeParameter& type_param = TypeParameter::Cast(*this);
21667 if (other.IsTypeParameter()) {
21668 const TypeParameter& other_type_param = TypeParameter::Cast(other);
21669 if (type_param.IsEquivalent(other_type_param,
21671 function_type_equivalence)) {
21673 " - result: true (equivalent type parameters)\n");
21674 return true;
21675 }
21676 }
21677 const AbstractType& bound = AbstractType::Handle(zone, type_param.bound());
21678 ASSERT(bound.IsFinalized());
21679 if (bound.IsSubtypeOf(other, space, function_type_equivalence)) {
21680 TRACE_TYPE_CHECKS_VERBOSE(" - result: true (bound is a subtype)\n");
21681 return true;
21682 }
21683 // Apply additional subtyping rules if 'other' is 'FutureOr'.
21684 if (IsSubtypeOfFutureOr(zone, other, space, function_type_equivalence)) {
21686 " - result: true (type parameter is a subtype of FutureOr)\n");
21687 return true;
21688 }
21690 " - result: false (left is a type parameter)\n");
21691 return false;
21692 }
21693 if (other.IsTypeParameter()) {
21695 " - result: false (right is a type parameter)\n");
21696 return false;
21697 }
21698 // Function types cannot be handled by Class::IsSubtypeOf().
21699 if (IsFunctionType()) {
21700 // Any type that can be the type of a closure is a subtype of Function or
21701 // non-nullable Object.
21702 if (other.IsObjectType() || other.IsDartFunctionType()) {
21703 const bool result = !IsNullable() || !other.IsNonNullable();
21704 TRACE_TYPE_CHECKS_VERBOSE(" - result: %s (function vs non-function)\n",
21705 (result ? "true" : "false"));
21706 return result;
21707 }
21708 if (other.IsFunctionType()) {
21709 // Check for two function types.
21710 if (IsNullable() && other.IsNonNullable()) {
21712 " - result: false (function nullability)\n");
21713 return false;
21714 }
21715 const bool result = FunctionType::Cast(*this).IsSubtypeOf(
21716 FunctionType::Cast(other), space, function_type_equivalence);
21717 TRACE_TYPE_CHECKS_VERBOSE(" - result: %s (function types)\n",
21718 (result ? "true" : "false"));
21719 return result;
21720 }
21721 // Apply additional subtyping rules if 'other' is 'FutureOr'.
21722 if (IsSubtypeOfFutureOr(zone, other, space, function_type_equivalence)) {
21724 " - result: true (function type is a subtype of FutureOr)\n");
21725 return true;
21726 }
21727 // All possible supertypes for FunctionType have been checked.
21728 TRACE_TYPE_CHECKS_VERBOSE(" - result: false (function type)\n");
21729 return false;
21730 } else if (other.IsFunctionType()) {
21731 // FunctionTypes can only be subtyped by other FunctionTypes, so don't
21732 // fall through to class-based type tests.
21734 " - result: false (right is a function type)\n");
21735 return false;
21736 }
21737 // Record types cannot be handled by Class::IsSubtypeOf().
21738 if (IsRecordType()) {
21739 if (other.IsObjectType() || other.IsDartRecordType()) {
21740 const bool result = !IsNullable() || !other.IsNonNullable();
21741 TRACE_TYPE_CHECKS_VERBOSE(" - result: %s (record vs non-record)\n",
21742 (result ? "true" : "false"));
21743 return result;
21744 }
21745 if (other.IsRecordType()) {
21746 // Check for two record types.
21747 if (IsNullable() && other.IsNonNullable()) {
21748 TRACE_TYPE_CHECKS_VERBOSE(" - result: false (record nullability)\n");
21749 return false;
21750 }
21751 const bool result = RecordType::Cast(*this).IsSubtypeOf(
21752 RecordType::Cast(other), space, function_type_equivalence);
21753 TRACE_TYPE_CHECKS_VERBOSE(" - result: %s (record types)\n",
21754 (result ? "true" : "false"));
21755 return result;
21756 }
21757 // Apply additional subtyping rules if 'other' is 'FutureOr'.
21758 if (IsSubtypeOfFutureOr(zone, other, space, function_type_equivalence)) {
21760 " - result: true (record type is a subtype of FutureOr)\n");
21761 return true;
21762 }
21763 // All possible supertypes for record type have been checked.
21764 TRACE_TYPE_CHECKS_VERBOSE(" - result: false (record type)\n");
21765 return false;
21766 } else if (other.IsRecordType()) {
21767 // RecordTypes can only be subtyped by other RecordTypes, so don't
21768 // fall through to class-based type tests.
21769 TRACE_TYPE_CHECKS_VERBOSE(" - result: false (right is a record type)\n");
21770 return false;
21771 }
21772 ASSERT(IsType());
21773 const Class& type_cls = Class::Handle(zone, type_class());
21774 const bool result = Class::IsSubtypeOf(
21775 type_cls,
21776 TypeArguments::Handle(zone, Type::Cast(*this).GetInstanceTypeArguments(
21777 thread, /*canonicalize=*/false)),
21778 nullability(), other, space, function_type_equivalence);
21779 TRACE_TYPE_CHECKS_VERBOSE(" - result: %s (class type check)\n",
21780 (result ? "true" : "false"));
21781 return result;
21782}
21783
21784bool AbstractType::IsSubtypeOfFutureOr(
21785 Zone* zone,
21786 const AbstractType& other,
21787 Heap::Space space,
21788 FunctionTypeMapping* function_type_equivalence) const {
21789 if (other.IsFutureOrType()) {
21790 // This function is only called with a receiver that is either a function
21791 // type, record type, or an uninstantiated type parameter.
21792 // Therefore, it cannot be of class Future and we can spare the check.
21793 ASSERT(IsFunctionType() || IsRecordType() || IsTypeParameter());
21794 const TypeArguments& other_type_arguments =
21795 TypeArguments::Handle(zone, other.arguments());
21796 const AbstractType& other_type_arg =
21797 AbstractType::Handle(zone, other_type_arguments.TypeAtNullSafe(0));
21798 if (other_type_arg.IsTopTypeForSubtyping()) {
21799 return true;
21800 }
21801 // Retry the IsSubtypeOf check after unwrapping type arg of FutureOr.
21802 if (IsSubtypeOf(other_type_arg, space, function_type_equivalence)) {
21803 return true;
21804 }
21805 }
21806 return false;
21807}
21808
21810 // AbstractType is an abstract class.
21811 UNREACHABLE();
21812 return 0;
21813}
21814
21815const char* AbstractType::ToCString() const {
21816 // All subclasses should implement this appropriately, so the only value that
21817 // should reach this implementation should be the null value.
21818 ASSERT(IsNull());
21819 return "AbstractType: null";
21820}
21821
21823 if (stub.IsNull()) {
21825 return;
21826 }
21827
21828 auto& old = Code::Handle(Thread::Current()->zone());
21829 while (true) {
21830 // We load the old TTS and it's entrypoint.
21831 old = untag()->type_test_stub<std::memory_order_acquire>();
21832 uword old_entry_point = old.IsNull() ? 0 : old.EntryPoint();
21833
21834 // If we can successfully update the entrypoint of the TTS, we will
21835 // unconditionally also set the [Code] of the TTS.
21836 //
21837 // Any competing writer would do the same, lose the compare-exchange, loop
21838 // around and continue loading the old [Code] TTS and continue to lose the
21839 // race until we have finally also updated the [Code] TTS.
21840 if (untag()->type_test_stub_entry_point_.compare_exchange_strong(
21841 old_entry_point, stub.EntryPoint())) {
21842 untag()->set_type_test_stub<std::memory_order_release>(stub.ptr());
21843 return;
21844 }
21845 }
21846}
21847
21849 if (stub.IsNull()) {
21850 // This only happens during bootstrapping when creating Type objects before
21851 // we have the instructions.
21853 StoreNonPointer(&untag()->type_test_stub_entry_point_, 0);
21854 untag()->set_type_test_stub(stub.ptr());
21855 return;
21856 }
21857
21858 StoreNonPointer(&untag()->type_test_stub_entry_point_, stub.EntryPoint());
21859 untag()->set_type_test_stub(stub.ptr());
21860}
21861
21863 return IsolateGroup::Current()->object_store()->null_type();
21864}
21865
21867 return Object::dynamic_type().ptr();
21868}
21869
21871 return Object::void_type().ptr();
21872}
21873
21875 return IsolateGroup::Current()->object_store()->never_type();
21876}
21877
21879 return IsolateGroup::Current()->object_store()->object_type();
21880}
21881
21883 return IsolateGroup::Current()->object_store()->bool_type();
21884}
21885
21886TypePtr Type::IntType() {
21887 return IsolateGroup::Current()->object_store()->int_type();
21888}
21889
21891 return IsolateGroup::Current()->object_store()->nullable_int_type();
21892}
21893
21894TypePtr Type::SmiType() {
21895 return IsolateGroup::Current()->object_store()->smi_type();
21896}
21897
21899 return IsolateGroup::Current()->object_store()->mint_type();
21900}
21901
21902TypePtr Type::Double() {
21903 return IsolateGroup::Current()->object_store()->double_type();
21904}
21905
21907 return IsolateGroup::Current()->object_store()->nullable_double_type();
21908}
21909
21911 return IsolateGroup::Current()->object_store()->float32x4_type();
21912}
21913
21915 return IsolateGroup::Current()->object_store()->float64x2_type();
21916}
21917
21918TypePtr Type::Int32x4() {
21919 return IsolateGroup::Current()->object_store()->int32x4_type();
21920}
21921
21922TypePtr Type::Number() {
21923 return IsolateGroup::Current()->object_store()->number_type();
21924}
21925
21927 return IsolateGroup::Current()->object_store()->nullable_number_type();
21928}
21929
21931 return IsolateGroup::Current()->object_store()->string_type();
21932}
21933
21935 return IsolateGroup::Current()->object_store()->array_type();
21936}
21937
21939 return IsolateGroup::Current()->object_store()->function_type();
21940}
21941
21943 return IsolateGroup::Current()->object_store()->type_type();
21944}
21945
21946TypePtr Type::NewNonParameterizedType(const Class& type_class) {
21947 ASSERT(type_class.NumTypeArguments() == 0);
21948 if (type_class.IsNullClass()) {
21949 return Type::NullType();
21950 }
21951 if (type_class.IsDynamicClass()) {
21952 return Type::DynamicType();
21953 }
21954 if (type_class.IsVoidClass()) {
21955 return Type::VoidType();
21956 }
21957 // It is too early to use the class finalizer, as type_class may not be named
21958 // yet, so do not call DeclarationType().
21959 Type& type = Type::Handle(type_class.declaration_type());
21960 if (type.IsNull()) {
21962 Object::null_type_arguments(), Nullability::kNonNullable);
21963 type.SetIsFinalized();
21964 type ^= type.Canonicalize(Thread::Current());
21965 type_class.set_declaration_type(type);
21966 }
21967 ASSERT(type.IsFinalized());
21968 return type.ptr();
21969}
21970
21971TypePtr Type::ToNullability(Nullability value, Heap::Space space) const {
21972 if (nullability() == value) {
21973 return ptr();
21974 }
21975 // Type parameter instantiation may request a nullability change, which should
21976 // be ignored for types dynamic and void. Type Null cannot be the result of
21977 // instantiating a non-nullable type parameter (TypeError thrown).
21978 const classid_t cid = type_class_id();
21979 if (cid == kDynamicCid || cid == kVoidCid || cid == kNullCid) {
21980 return ptr();
21981 }
21983 // Normalize Never? to Null.
21984 return Type::NullType();
21985 }
21986 // Clone type and set new nullability.
21987 Type& type = Type::Handle();
21988 // Always cloning in old space and removing space parameter would not satisfy
21989 // currently existing requests for type instantiation in new space.
21990 // Load with relaxed atomics to prevent data race with updating type
21991 // testing stub.
21992 type ^= Object::Clone(*this, space, /*load_with_relaxed_atomics=*/true);
21993 type.set_nullability(value);
21994 type.SetHash(0);
21995 type.InitializeTypeTestingStubNonAtomic(
21997 if (IsCanonical()) {
21998 // Object::Clone does not clone canonical bit.
21999 ASSERT(!type.IsCanonical());
22000 type ^= type.Canonicalize(Thread::Current());
22001 }
22002 return type.ptr();
22003}
22004
22006 Heap::Space space) const {
22007 if (nullability() == value) {
22008 return ptr();
22009 }
22010 // Clone function type and set new nullability.
22012 type.set_nullability(value);
22013 type.SetHash(0);
22014 type.InitializeTypeTestingStubNonAtomic(
22016 if (IsCanonical()) {
22017 // Object::Clone does not clone canonical bit.
22018 ASSERT(!type.IsCanonical());
22019 type ^= type.Canonicalize(Thread::Current());
22020 }
22021 return type.ptr();
22022}
22023
22025 return untag()->type_class_id();
22026}
22027
22028ClassPtr Type::type_class() const {
22030}
22031
22033 intptr_t num_free_fun_type_params) const {
22035 return true;
22036 }
22037 if ((genericity == kAny) && (num_free_fun_type_params == kAllFree) &&
22039 return false;
22040 }
22041 if (arguments() == TypeArguments::null()) {
22042 return true;
22043 }
22045 return args.IsSubvectorInstantiated(0, args.Length(), genericity,
22046 num_free_fun_type_params);
22047}
22048
22050 const TypeArguments& instantiator_type_arguments,
22051 const TypeArguments& function_type_arguments,
22052 intptr_t num_free_fun_type_params,
22053 Heap::Space space,
22054 FunctionTypeMapping* function_type_mapping,
22055 intptr_t num_parent_type_args_adjustment) const {
22056 Zone* zone = Thread::Current()->zone();
22059 // Note that the type class has to be resolved at this time, but not
22060 // necessarily finalized yet. We may be checking bounds at compile time or
22061 // finalizing the type argument vector of a recursive type.
22062 const Class& cls = Class::Handle(zone, type_class());
22063 TypeArguments& type_arguments = TypeArguments::Handle(zone, arguments());
22064 ASSERT(type_arguments.Length() == cls.NumTypeParameters());
22065 type_arguments = type_arguments.InstantiateFrom(
22066 instantiator_type_arguments, function_type_arguments,
22067 num_free_fun_type_params, space, function_type_mapping,
22068 num_parent_type_args_adjustment);
22069 // A returned empty_type_arguments indicates a failed instantiation in dead
22070 // code that must be propagated up to the caller, the optimizing compiler.
22071 if (type_arguments.ptr() == Object::empty_type_arguments().ptr()) {
22072 return Type::null();
22073 }
22074 // This uninstantiated type is not modified, as it can be instantiated
22075 // with different instantiators. Allocate a new instantiated version of it.
22076 const Type& instantiated_type =
22077 Type::Handle(zone, Type::New(cls, type_arguments, nullability(), space));
22078 instantiated_type.SetIsFinalized();
22079 // Canonicalization is not part of instantiation.
22080 return instantiated_type.NormalizeFutureOrType(space);
22081}
22082
22084 intptr_t num_parent_type_args_adjustment,
22085 intptr_t num_free_fun_type_params,
22086 Heap::Space space,
22087 FunctionTypeMapping* function_type_mapping) const {
22089 ASSERT(num_parent_type_args_adjustment >= 0);
22090 if (arguments() == Object::null()) {
22091 return ptr();
22092 }
22093 Zone* zone = Thread::Current()->zone();
22094 const auto& type_args = TypeArguments::Handle(zone, arguments());
22095 const auto& updated_type_args = TypeArguments::Handle(
22096 zone, type_args.UpdateFunctionTypes(num_parent_type_args_adjustment,
22097 num_free_fun_type_params, space,
22098 function_type_mapping));
22099 if (type_args.ptr() == updated_type_args.ptr()) {
22100 return ptr();
22101 }
22102 const Class& cls = Class::Handle(zone, type_class());
22103 const Type& new_type = Type::Handle(
22104 zone, Type::New(cls, updated_type_args, nullability(), space));
22105 new_type.SetIsFinalized();
22106 return new_type.ptr();
22107}
22108
22109// Certain built-in classes are treated as syntactically equivalent.
22111 if (IsIntegerClassId(cid)) {
22112 return Type::Handle(Type::IntType()).type_class_id();
22113 } else if (IsStringClassId(cid)) {
22114 return Type::Handle(Type::StringType()).type_class_id();
22115 } else if (cid == kDoubleCid) {
22116 return Type::Handle(Type::Double()).type_class_id();
22117 } else if (IsTypeClassId(cid)) {
22118 return Type::Handle(Type::DartTypeType()).type_class_id();
22119 } else if (IsArrayClassId(cid)) {
22120 return Class::Handle(IsolateGroup::Current()->object_store()->list_class())
22121 .id();
22122 }
22123 return cid;
22124}
22125
22127 TypeEquality kind,
22128 FunctionTypeMapping* function_type_equivalence) const {
22129 ASSERT(!IsNull());
22130 if (ptr() == other.ptr()) {
22131 return true;
22132 }
22133 if (!other.IsType()) {
22134 return false;
22135 }
22136 const Type& other_type = Type::Cast(other);
22137 const classid_t type_cid = type_class_id();
22138 const classid_t other_type_cid = other_type.type_class_id();
22139 if (type_cid != other_type_cid) {
22140 if ((kind != TypeEquality::kSyntactical) ||
22143 return false;
22144 }
22145 }
22146 Thread* thread = Thread::Current();
22147 Zone* zone = thread->zone();
22148 ASSERT(
22149 Class::Handle(zone, type_class()).NumTypeParameters(thread) ==
22150 Class::Handle(zone, other_type.type_class()).NumTypeParameters(thread));
22151
22152 if (!IsNullabilityEquivalent(thread, other_type, kind)) {
22153 return false;
22154 }
22155 if (!IsFinalized() || !other_type.IsFinalized()) {
22157 return false; // Too early to decide if equal.
22158 }
22159 if (arguments() == other_type.arguments()) {
22160 return true;
22161 }
22162 const TypeArguments& type_args =
22163 TypeArguments::Handle(zone, this->arguments());
22164 const TypeArguments& other_type_args =
22165 TypeArguments::Handle(zone, other_type.arguments());
22166 return type_args.IsEquivalent(other_type_args, kind,
22167 function_type_equivalence);
22168}
22169
22171 const Instance& other,
22172 TypeEquality kind,
22173 FunctionTypeMapping* function_type_equivalence) const {
22174 ASSERT(!IsNull());
22175 if (ptr() == other.ptr()) {
22176 return true;
22177 }
22178 if (!other.IsFunctionType()) {
22179 return false;
22180 }
22181 const FunctionType& other_type = FunctionType::Cast(other);
22182 if ((packed_parameter_counts() != other_type.packed_parameter_counts()) ||
22184 other_type.packed_type_parameter_counts())) {
22185 // Different number of type parameters or parameters.
22186 return false;
22187 }
22188 Thread* thread = Thread::Current();
22189 Zone* zone = thread->zone();
22190 if (!IsNullabilityEquivalent(thread, other_type, kind)) {
22191 return false;
22192 }
22193 if (!IsFinalized() || !other_type.IsFinalized()) {
22195 return false; // Too early to decide if equal.
22196 }
22197 FunctionTypeMapping scope(zone, &function_type_equivalence, *this,
22198 other_type);
22199
22200 // Equal function types must have equal signature types and equal optional
22201 // named arguments.
22202 // Compare function type parameters and their bounds.
22203 // Check the type parameters and bounds of generic functions.
22204 if (!HasSameTypeParametersAndBounds(other_type, kind,
22205 function_type_equivalence)) {
22206 return false;
22207 }
22208 AbstractType& param_type = Type::Handle(zone);
22209 AbstractType& other_param_type = Type::Handle(zone);
22210 // Check the result type.
22211 param_type = result_type();
22212 other_param_type = other_type.result_type();
22213 if (!param_type.IsEquivalent(other_param_type, kind,
22214 function_type_equivalence)) {
22215 return false;
22216 }
22217 // Check the types of all parameters.
22218 const intptr_t num_params = NumParameters();
22219 ASSERT(other_type.NumParameters() == num_params);
22220 for (intptr_t i = 0; i < num_params; i++) {
22221 param_type = ParameterTypeAt(i);
22222 other_param_type = other_type.ParameterTypeAt(i);
22223 // Use contravariant order in case we test for subtyping.
22224 if (!other_param_type.IsEquivalent(param_type, kind,
22225 function_type_equivalence)) {
22226 return false;
22227 }
22228 }
22230 ASSERT(other_type.HasOptionalNamedParameters()); // Same packed counts.
22231 for (intptr_t i = num_fixed_parameters(); i < num_params; i++) {
22232 if (ParameterNameAt(i) != other_type.ParameterNameAt(i)) {
22233 return false;
22234 }
22235 if (IsRequiredAt(i) != other_type.IsRequiredAt(i)) {
22236 return false;
22237 }
22238 }
22239 }
22240 return true;
22241}
22242
22243bool Type::IsDeclarationTypeOf(const Class& cls) const {
22244 ASSERT(type_class() == cls.ptr());
22245 if (cls.IsNullClass()) {
22246 return true;
22247 }
22248 if (cls.IsGeneric() || cls.IsClosureClass()) {
22249 return false;
22250 }
22252}
22253
22254// Keep in sync with TypeSerializationCluster::IsInCanonicalSet.
22255AbstractTypePtr Type::Canonicalize(Thread* thread) const {
22256 Zone* zone = thread->zone();
22258 if (IsCanonical()) {
22259#ifdef DEBUG
22260 TypeArguments& type_args = TypeArguments::Handle(zone, arguments());
22261 ASSERT(type_args.IsCanonical());
22262 ASSERT(type_args.IsOld());
22263#endif
22264 return this->ptr();
22265 }
22266 auto isolate_group = thread->isolate_group();
22267 const classid_t cid = type_class_id();
22268 if (cid == kDynamicCid) {
22269 ASSERT(Object::dynamic_type().IsCanonical());
22270 return Object::dynamic_type().ptr();
22271 }
22272
22273 if (cid == kVoidCid) {
22274 ASSERT(Object::void_type().IsCanonical());
22275 return Object::void_type().ptr();
22276 }
22277
22278 const Class& cls = Class::Handle(zone, type_class());
22279
22280 // Fast canonical lookup/registry for simple types.
22281 if (IsDeclarationTypeOf(cls)) {
22282 ASSERT(!cls.IsNullClass() || IsNullable());
22283 Type& type = Type::Handle(zone, cls.declaration_type());
22284 if (type.IsNull()) {
22285 ASSERT(!cls.ptr()->untag()->InVMIsolateHeap() ||
22286 (isolate_group == Dart::vm_isolate_group()));
22287 // Canonicalize the type arguments of the supertype, if any.
22288 TypeArguments& type_args = TypeArguments::Handle(zone, arguments());
22289 type_args = type_args.Canonicalize(thread);
22290 set_arguments(type_args);
22291 type = cls.declaration_type();
22292 // May be set while canonicalizing type args.
22293 if (type.IsNull()) {
22294 SafepointMutexLocker ml(isolate_group->type_canonicalization_mutex());
22295 // Recheck if type exists.
22296 type = cls.declaration_type();
22297 if (type.IsNull()) {
22298 if (this->IsNew()) {
22299 type ^= Object::Clone(*this, Heap::kOld);
22300 } else {
22301 type = this->ptr();
22302 }
22303 ASSERT(type.IsOld());
22304 type.ComputeHash();
22305 type.SetCanonical();
22306 cls.set_declaration_type(type);
22307 return type.ptr();
22308 }
22309 }
22310 }
22311 ASSERT(this->Equals(type));
22312 ASSERT(type.IsOld());
22313 if (type.IsCanonical()) {
22314 return type.ptr();
22315 }
22316 }
22317
22318 Type& type = Type::Handle(zone);
22319 ObjectStore* object_store = isolate_group->object_store();
22320 {
22321 SafepointMutexLocker ml(isolate_group->type_canonicalization_mutex());
22322 CanonicalTypeSet table(zone, object_store->canonical_types());
22323 type ^= table.GetOrNull(CanonicalTypeKey(*this));
22324 ASSERT(object_store->canonical_types() == table.Release().ptr());
22325 }
22326 if (type.IsNull()) {
22327 // The type was not found in the table. It is not canonical yet.
22328
22329 // Canonicalize the type arguments.
22330 TypeArguments& type_args = TypeArguments::Handle(zone, arguments());
22331 ASSERT(type_args.IsNull() ||
22332 (type_args.Length() == cls.NumTypeParameters()));
22333 type_args = type_args.Canonicalize(thread);
22334 set_arguments(type_args);
22335 ASSERT(type_args.IsNull() || type_args.IsOld());
22336
22337 // Check to see if the type got added to canonical table as part of the
22338 // type arguments canonicalization.
22339 SafepointMutexLocker ml(isolate_group->type_canonicalization_mutex());
22340 CanonicalTypeSet table(zone, object_store->canonical_types());
22341 type ^= table.GetOrNull(CanonicalTypeKey(*this));
22342 if (type.IsNull()) {
22343 // Add this type into the canonical table of types.
22344 if (this->IsNew()) {
22345 type ^= Object::Clone(*this, Heap::kOld);
22346 } else {
22347 type = this->ptr();
22348 }
22349 ASSERT(type.IsOld());
22350 type.SetCanonical(); // Mark object as being canonical.
22351 bool present = table.Insert(type);
22352 ASSERT(!present);
22353 }
22354 object_store->set_canonical_types(table.Release());
22355 }
22356 return type.ptr();
22357}
22358
22359void Type::EnumerateURIs(URIs* uris) const {
22360 if (IsDynamicType() || IsVoidType() || IsNeverType()) {
22361 return;
22362 }
22363 Thread* thread = Thread::Current();
22364 Zone* zone = thread->zone();
22365 const Class& cls = Class::Handle(zone, type_class());
22366 const String& name = String::Handle(zone, cls.UserVisibleName());
22367 const Library& library = Library::Handle(zone, cls.library());
22368 const String& uri = String::Handle(zone, library.url());
22369 AddURI(uris, name, uri);
22370 const TypeArguments& type_args = TypeArguments::Handle(zone, arguments());
22371 type_args.EnumerateURIs(uris);
22372}
22373
22374void Type::PrintName(NameVisibility name_visibility,
22375 BaseTextBuffer* printer) const {
22376 Thread* thread = Thread::Current();
22377 Zone* zone = thread->zone();
22378 const Class& cls = Class::Handle(zone, type_class());
22379 const TypeParameters& params =
22381 printer->AddString(cls.NameCString(name_visibility));
22383 intptr_t num_type_params = 0;
22384 if (cls.is_declaration_loaded()) {
22385 num_type_params = cls.NumTypeParameters(thread);
22386 } else if (!args.IsNull() || args.ptr() != params.defaults()) {
22387 num_type_params = args.Length();
22388 }
22389 if (num_type_params == 0) {
22390 // Do nothing.
22391 } else {
22392 args.PrintSubvectorName(0, num_type_params, name_visibility, printer);
22393 }
22394 printer->AddString(NullabilitySuffix(name_visibility));
22395 // The name is only used for type checking and debugging purposes.
22396 // Unless profiling data shows otherwise, it is not worth caching the name in
22397 // the type.
22398}
22399
22402 uint32_t result = type_class_id();
22403 // A legacy type should have the same hash as its non-nullable version to be
22404 // consistent with the definition of type equality in Dart code.
22405 Nullability type_nullability = nullability();
22406 if (type_nullability == Nullability::kLegacy) {
22407 type_nullability = Nullability::kNonNullable;
22408 }
22409 result = CombineHashes(result, static_cast<uint32_t>(type_nullability));
22410 uint32_t type_args_hash = TypeArguments::kAllDynamicHash;
22411 if (arguments() != TypeArguments::null()) {
22413 type_args_hash = args.Hash();
22414 }
22415 result = CombineHashes(result, type_args_hash);
22417 SetHash(result);
22418 return result;
22419}
22420
22423 uint32_t result =
22425 // A legacy type should have the same hash as its non-nullable version to be
22426 // consistent with the definition of type equality in Dart code.
22427 Nullability type_nullability = nullability();
22428 if (type_nullability == Nullability::kLegacy) {
22429 type_nullability = Nullability::kNonNullable;
22430 }
22431 result = CombineHashes(result, static_cast<uint32_t>(type_nullability));
22433 const intptr_t num_type_params = NumTypeParameters();
22434 if (num_type_params > 0) {
22435 const TypeParameters& type_params =
22437 const TypeArguments& bounds = TypeArguments::Handle(type_params.bounds());
22438 result = CombineHashes(result, bounds.Hash());
22439 // Since the default arguments are ignored when comparing two generic
22440 // function types for type equality, the hash does not depend on them.
22441 }
22442 type = result_type();
22443 result = CombineHashes(result, type.Hash());
22444 const intptr_t num_params = NumParameters();
22445 for (intptr_t i = 0; i < num_params; i++) {
22446 type = ParameterTypeAt(i);
22447 result = CombineHashes(result, type.Hash());
22448 }
22450 String& param_name = String::Handle();
22451 for (intptr_t i = num_fixed_parameters(); i < num_params; i++) {
22452 param_name = ParameterNameAt(i);
22453 result = CombineHashes(result, param_name.Hash());
22454 }
22455 // Required flag is not hashed, see comment above about legacy type.
22456 }
22458 SetHash(result);
22459 return result;
22460}
22461
22462void Type::set_type_class(const Class& value) const {
22463 ASSERT(!value.IsNull());
22464 set_type_class_id(value.id());
22465}
22466
22467void Type::set_arguments(const TypeArguments& value) const {
22468 ASSERT(!IsCanonical());
22469 ASSERT(value.IsNull() ||
22470 // Do not attempt to query number of type parameters
22471 // before class declaration is fully loaded.
22472 !Class::Handle(type_class()).is_declaration_loaded() ||
22473 // Relax assertion in order to support invalid generic types
22474 // created in ClosureMirror_function.
22475 (type_class_id() == kInstanceCid) ||
22476 value.Length() == Class::Handle(type_class()).NumTypeParameters());
22477 untag()->set_arguments(value.ptr());
22478}
22479
22481 bool canonicalize) const {
22482 Zone* zone = thread->zone();
22483 const auto& cls = Class::Handle(zone, type_class());
22484 const auto& args = TypeArguments::Handle(zone, arguments());
22485 return cls.GetInstanceTypeArguments(thread, args, canonicalize);
22486}
22487
22488TypePtr Type::New(Heap::Space space) {
22489 return Object::Allocate<Type>(space);
22490}
22491
22492TypePtr Type::New(const Class& clazz,
22493 const TypeArguments& arguments,
22494 Nullability nullability,
22495 Heap::Space space) {
22496 Zone* Z = Thread::Current()->zone();
22497 const Type& result = Type::Handle(Z, Type::New(space));
22498 result.SetHash(0);
22499 result.set_flags(0);
22500 result.set_nullability(nullability);
22502 result.set_type_class(clazz);
22503 result.set_arguments(arguments);
22504
22505 result.InitializeTypeTestingStubNonAtomic(
22507 return result.ptr();
22508}
22509
22510void Type::set_type_class_id(intptr_t id) const {
22512 // We should never need a Type object for a top-level class.
22514 ASSERT(id != kIllegalCid);
22516 untag()->set_type_class_id(id);
22517}
22518
22519const char* Type::ToCString() const {
22520 if (IsNull()) {
22521 return "Type: null";
22522 }
22523 Zone* zone = Thread::Current()->zone();
22524 ZoneTextBuffer args(zone);
22525 const TypeArguments& type_args = TypeArguments::Handle(zone, arguments());
22526 const char* args_cstr = "";
22527 if (!type_args.IsNull()) {
22528 type_args.PrintSubvectorName(0, type_args.Length(), kInternalName, &args);
22529 args_cstr = args.buffer();
22530 }
22531 const Class& cls = Class::Handle(zone, type_class());
22532 const char* class_name;
22533 const String& name = String::Handle(zone, cls.Name());
22534 class_name = name.IsNull() ? "<null>" : name.ToCString();
22535 const char* suffix = NullabilitySuffix(kInternalName);
22536 return OS::SCreate(zone, "Type: %s%s%s", class_name, args_cstr, suffix);
22537}
22538
22539AbstractTypePtr FunctionType::Canonicalize(Thread* thread) const {
22541 Zone* zone = thread->zone();
22542 if (IsCanonical()) {
22543#ifdef DEBUG
22544 // Verify that all fields are allocated in old space and are canonical.
22545 if (IsGeneric()) {
22546 const TypeParameters& type_params =
22548 ASSERT(type_params.IsOld());
22549 TypeArguments& type_args = TypeArguments::Handle(zone);
22550 type_args = type_params.bounds();
22551 ASSERT(type_args.IsOld());
22552 ASSERT(type_args.IsCanonical());
22553 type_args = type_params.defaults();
22554 ASSERT(type_args.IsOld());
22555 ASSERT(type_args.IsCanonical());
22556 }
22558 type = result_type();
22559 ASSERT(type.IsOld());
22560 ASSERT(type.IsCanonical());
22563 const intptr_t num_params = NumParameters();
22564 for (intptr_t i = 0; i < num_params; i++) {
22565 type = ParameterTypeAt(i);
22566 ASSERT(type.IsOld());
22567 ASSERT(type.IsCanonical());
22568 }
22569#endif
22570 return ptr();
22571 }
22572 auto isolate_group = thread->isolate_group();
22573 ObjectStore* object_store = isolate_group->object_store();
22575 {
22576 SafepointMutexLocker ml(isolate_group->type_canonicalization_mutex());
22578 object_store->canonical_function_types());
22579 sig ^= table.GetOrNull(CanonicalFunctionTypeKey(*this));
22580 ASSERT(object_store->canonical_function_types() == table.Release().ptr());
22581 }
22582 if (sig.IsNull()) {
22583 // The function type was not found in the table. It is not canonical yet.
22584 // Canonicalize its type parameters and types.
22585
22586 // Clone this function type to the old heap and update
22587 // owners of type parameters.
22588 FunctionType& new_sig = FunctionType::Handle(zone);
22589 if (this->IsNew()) {
22590 new_sig ^= FunctionType::Clone(*this, Heap::kOld);
22591 } else {
22592 new_sig ^= this->ptr();
22593 }
22594 ASSERT(new_sig.IsOld());
22595
22596 if (new_sig.IsGeneric()) {
22597 const TypeParameters& type_params =
22598 TypeParameters::Handle(zone, new_sig.type_parameters());
22599 ASSERT(type_params.IsOld());
22600 TypeArguments& type_args = TypeArguments::Handle(zone);
22601 type_args = type_params.bounds();
22602 if (!type_args.IsCanonical()) {
22603 type_args = type_args.Canonicalize(thread);
22604 type_params.set_bounds(type_args);
22605 }
22606 type_args = type_params.defaults();
22607 if (!type_args.IsCanonical()) {
22608 type_args = type_args.Canonicalize(thread);
22609 type_params.set_defaults(type_args);
22610 }
22611 }
22613 type = new_sig.result_type();
22614 if (!type.IsCanonical()) {
22615 type = type.Canonicalize(thread);
22616 new_sig.set_result_type(type);
22617 }
22618 ASSERT(Array::Handle(zone, new_sig.parameter_types()).IsOld());
22619 ASSERT(Array::Handle(zone, new_sig.named_parameter_names()).IsOld());
22620 const intptr_t num_params = new_sig.NumParameters();
22621 for (intptr_t i = 0; i < num_params; i++) {
22622 type = new_sig.ParameterTypeAt(i);
22623 if (!type.IsCanonical()) {
22624 type = type.Canonicalize(thread);
22625 new_sig.SetParameterTypeAt(i, type);
22626 }
22627 }
22628 // Check to see if the function type got added to canonical table
22629 // during canonicalization of its signature types.
22630 SafepointMutexLocker ml(isolate_group->type_canonicalization_mutex());
22632 object_store->canonical_function_types());
22633 sig ^= table.GetOrNull(CanonicalFunctionTypeKey(new_sig));
22634 if (sig.IsNull()) {
22635 // Add this function type into the canonical table of function types.
22636 sig = new_sig.ptr();
22637 ASSERT(sig.IsOld());
22638 sig.SetCanonical(); // Mark object as being canonical.
22639 bool present = table.Insert(sig);
22640 ASSERT(!present);
22641 }
22642 object_store->set_canonical_function_types(table.Release());
22643 }
22644 return sig.ptr();
22645}
22646
22648 Thread* thread = Thread::Current();
22649 Zone* zone = thread->zone();
22651 const intptr_t num_params = NumParameters();
22652 for (intptr_t i = 0; i < num_params; i++) {
22653 type = ParameterTypeAt(i);
22654 type.EnumerateURIs(uris);
22655 }
22656 // Handle result type last, since it appears last in the user visible name.
22657 type = result_type();
22658 type.EnumerateURIs(uris);
22659}
22660
22662 BaseTextBuffer* printer) const {
22663 const char* suffix = NullabilitySuffix(name_visibility);
22664 if (suffix[0] != '\0') {
22665 printer->AddString("(");
22666 }
22667 FunctionType::Cast(*this).Print(name_visibility, printer);
22668 if (suffix[0] != '\0') {
22669 printer->AddString(")");
22670 printer->AddString(suffix);
22671 }
22672}
22673
22675 Heap::Space space) const {
22676 if (nullability() == value) {
22677 return ptr();
22678 }
22679 // Clone type parameter and set new nullability.
22680 TypeParameter& type_parameter = TypeParameter::Handle();
22681 type_parameter ^= Object::Clone(*this, space);
22682 type_parameter.set_nullability(value);
22683 type_parameter.SetHash(0);
22686 if (IsCanonical()) {
22687 // Object::Clone does not clone canonical bit.
22688 ASSERT(!type_parameter.IsCanonical());
22690 ASSERT(type_parameter.IsFinalized());
22691 type_parameter ^= type_parameter.Canonicalize(Thread::Current());
22692 }
22693 return type_parameter.ptr();
22694}
22695
22697 intptr_t num_free_fun_type_params) const {
22698 // Bounds of class type parameters are ignored in the VM.
22699 if (IsClassTypeParameter()) {
22700 return genericity == kFunctions;
22701 }
22703 return (genericity == kCurrentClass) || (index() >= num_free_fun_type_params);
22704}
22705
22707 const Instance& other,
22708 TypeEquality kind,
22709 FunctionTypeMapping* function_type_equivalence) const {
22710 TRACE_TYPE_CHECKS_VERBOSE(" TypeParameter::IsEquivalent(%s, %s, kind %d)\n",
22711 ToCString(), other.ToCString(),
22712 static_cast<int>(kind));
22713 if (ptr() == other.ptr()) {
22714 TRACE_TYPE_CHECKS_VERBOSE(" - result: true (same types)\n");
22715 return true;
22716 }
22717 if (!other.IsTypeParameter()) {
22719 " - result: false (other is not a type parameter)\n");
22720 return false;
22721 }
22722 const TypeParameter& other_type_param = TypeParameter::Cast(other);
22723 ASSERT(IsFinalized() && other_type_param.IsFinalized());
22724 // Compare index, base and owner.
22726 if (!other_type_param.IsFunctionTypeParameter()) {
22728 " - result: false (other is not a function type parameter)\n");
22729 return false;
22730 }
22732 other_type_param.parameterized_function_type()) &&
22733 ((function_type_equivalence == nullptr) ||
22734 !function_type_equivalence->ContainsOwnersOfTypeParameters(
22735 *this, other_type_param))) {
22737 " - result: false (owners are not equivalent)\n");
22738 return false;
22739 }
22740 } else {
22741 if (!other_type_param.IsClassTypeParameter()) {
22743 " - result: false (other is not a class type parameter)\n");
22744 return false;
22745 }
22746 if (parameterized_class_id() != other_type_param.parameterized_class_id()) {
22748 " - result: false (parameterized class id)\n");
22749 return false;
22750 }
22751 }
22752 if (base() != other_type_param.base() ||
22753 index() != other_type_param.index()) {
22754 TRACE_TYPE_CHECKS_VERBOSE(" - result: false (mismatch base/index)\n");
22755 return false;
22756 }
22757 if (!IsNullabilityEquivalent(Thread::Current(), other_type_param, kind)) {
22758 TRACE_TYPE_CHECKS_VERBOSE(" - result: false (mismatch nullability)\n");
22759 return false;
22760 }
22761 TRACE_TYPE_CHECKS_VERBOSE(" - result: true\n");
22762 return true;
22763}
22764
22765void TypeParameter::set_owner(const Object& value) const {
22766 ASSERT((IsFunctionTypeParameter() && value.IsFunctionType()) ||
22767 (IsClassTypeParameter() && value.IsSmi()));
22768 untag()->set_owner(value.ptr());
22769}
22770
22772 if (IsClassTypeParameter()) {
22773 return Smi::Value(Smi::RawCast(untag()->owner()));
22774 } else {
22775 return kFunctionCid;
22776 }
22777}
22780 untag()->set_owner(Smi::New(value));
22781}
22782
22784 if (IsClassTypeParameter()) {
22786 if (cid != kIllegalCid) {
22788 }
22789 }
22790 return Class::null();
22791}
22792
22795 return FunctionType::RawCast(untag()->owner());
22796}
22797
22798void TypeParameter::set_base(intptr_t value) const {
22799 ASSERT(value >= 0);
22801 StoreNonPointer(&untag()->base_, value);
22802}
22803
22804void TypeParameter::set_index(intptr_t value) const {
22805 ASSERT(value >= 0);
22807 StoreNonPointer(&untag()->index_, value);
22808}
22809
22810AbstractTypePtr TypeParameter::bound() const {
22813 const auto& type_parameters =
22814 TypeParameters::Handle(owner.type_parameters());
22815 return type_parameters.BoundAt(index() - base());
22816 } else {
22817 const auto& owner = Class::Handle(parameterized_class());
22818 if (owner.IsNull()) {
22819 return IsolateGroup::Current()->object_store()->nullable_object_type();
22820 }
22821 const auto& type_parameters =
22822 TypeParameters::Handle(owner.type_parameters());
22823 return type_parameters.BoundAt(index() - base());
22824 }
22825}
22826
22828 const TypeArguments& instantiator_type_arguments,
22829 const TypeArguments& function_type_arguments) const {
22831 const TypeArguments& type_args = IsFunctionTypeParameter()
22832 ? function_type_arguments
22833 : instantiator_type_arguments;
22834 return type_args.TypeAtNullSafe(index());
22835}
22836
22838 const TypeArguments& instantiator_type_arguments,
22839 const TypeArguments& function_type_arguments,
22840 intptr_t num_free_fun_type_params,
22841 Heap::Space space,
22842 FunctionTypeMapping* function_type_mapping,
22843 intptr_t num_parent_type_args_adjustment) const {
22844 Zone* zone = Thread::Current()->zone();
22846 bool substituted = false;
22849 if (index() >= num_free_fun_type_params) {
22850 // Do not instantiate the function type parameter.
22851 // Get a replacement from the updated function type.
22852 ASSERT(function_type_mapping != nullptr);
22853 result = function_type_mapping->MapTypeParameter(*this);
22854 ASSERT(TypeParameter::Cast(result).index() ==
22855 index() - num_free_fun_type_params);
22856 ASSERT(TypeParameter::Cast(result).base() ==
22857 base() - num_free_fun_type_params);
22858 ASSERT(TypeParameter::Cast(result).nullability() == nullability());
22859 AbstractType& upper_bound = AbstractType::Handle(zone, bound());
22860 if (!upper_bound.IsInstantiated()) {
22861 upper_bound = upper_bound.InstantiateFrom(
22862 instantiator_type_arguments, function_type_arguments,
22863 num_free_fun_type_params, space, function_type_mapping,
22864 num_parent_type_args_adjustment);
22865 }
22866 if (upper_bound.ptr() == Type::NeverType()) {
22867 // Normalize 'X extends Never' to 'Never'.
22869 }
22870 } else if (function_type_arguments.IsNull()) {
22871 return Type::DynamicType();
22872 } else {
22873 result = function_type_arguments.TypeAt(index());
22874 substituted = true;
22875 }
22876 } else {
22879 if (instantiator_type_arguments.IsNull()) {
22880 return Type::DynamicType();
22881 }
22882 if (instantiator_type_arguments.Length() <= index()) {
22883 // InstantiateFrom can be invoked from a compilation pipeline with
22884 // mismatching type arguments vector. This can only happen for
22885 // a dynamically unreachable code - which compiler can't remove
22886 // statically for some reason.
22887 // To prevent crashes we return AbstractType::null(), understood by caller
22888 // (see AssertAssignableInstr::Canonicalize).
22889 return AbstractType::null();
22890 }
22891 result = instantiator_type_arguments.TypeAt(index());
22892 substituted = true;
22893 // Instantiating a class type parameter cannot result in a
22894 // function type parameter.
22895 // Bounds of class type parameters are ignored in the VM.
22896 }
22897 result = result.SetInstantiatedNullability(*this, space);
22898 if (substituted && (num_parent_type_args_adjustment != 0)) {
22899 // This type parameter is used inside a generic function type.
22900 // A type being substituted can have nested function types,
22901 // whose number of parent function type arguments should be adjusted
22902 // after the substitution.
22903 result = result.UpdateFunctionTypes(num_parent_type_args_adjustment,
22904 kAllFree, space, function_type_mapping);
22905 }
22906 // Canonicalization is not part of instantiation.
22907 return result.NormalizeFutureOrType(space);
22908}
22909
22911 intptr_t num_parent_type_args_adjustment,
22912 intptr_t num_free_fun_type_params,
22913 Heap::Space space,
22914 FunctionTypeMapping* function_type_mapping) const {
22916 ASSERT(num_parent_type_args_adjustment >= 0);
22917 if (IsFunctionTypeParameter() && (index() >= num_free_fun_type_params)) {
22918 Zone* zone = Thread::Current()->zone();
22919 ASSERT(function_type_mapping != nullptr);
22920 const auto& new_tp = TypeParameter::Handle(
22921 zone, function_type_mapping->MapTypeParameter(*this));
22922 ASSERT(new_tp.base() == base() + num_parent_type_args_adjustment);
22923 ASSERT(new_tp.index() == index() + num_parent_type_args_adjustment);
22924 ASSERT(new_tp.nullability() == nullability());
22925 ASSERT(new_tp.IsFinalized());
22926 return new_tp.ptr();
22927 } else {
22928 return ptr();
22929 }
22930}
22931
22932AbstractTypePtr TypeParameter::Canonicalize(Thread* thread) const {
22934 Zone* zone = thread->zone();
22935 if (IsCanonical()) {
22936#ifdef DEBUG
22939 }
22940#endif
22941 return this->ptr();
22942 }
22943 auto isolate_group = thread->isolate_group();
22944 ObjectStore* object_store = isolate_group->object_store();
22945 TypeParameter& type_parameter = TypeParameter::Handle(zone);
22946 {
22947 SafepointMutexLocker ml(isolate_group->type_canonicalization_mutex());
22949 object_store->canonical_type_parameters());
22950 type_parameter ^= table.GetOrNull(CanonicalTypeParameterKey(*this));
22951 if (type_parameter.IsNull()) {
22952 // Add this type parameter into the canonical table of type parameters.
22953 if (this->IsNew()) {
22954 type_parameter ^= Object::Clone(*this, Heap::kOld);
22955 } else {
22956 type_parameter = this->ptr();
22957 }
22958 ASSERT(type_parameter.IsOld());
22959 type_parameter.SetCanonical(); // Mark object as being canonical.
22960 bool present = table.Insert(type_parameter);
22961 ASSERT(!present);
22962 }
22963 object_store->set_canonical_type_parameters(table.Release());
22964 }
22965 return type_parameter.ptr();
22966}
22967
22969 BaseTextBuffer* printer) const {
22970 const TypeParameter& type_param = TypeParameter::Cast(*this);
22971 // Type parameter names are meaningless after canonicalization.
22972 printer->AddString(type_param.CanonicalNameCString());
22973 printer->AddString(NullabilitySuffix(name_visibility));
22974}
22975
22978 uint32_t result = parameterized_class_id();
22981 // A legacy type should have the same hash as its non-nullable version to be
22982 // consistent with the definition of type equality in Dart code.
22983 Nullability type_param_nullability = nullability();
22984 if (type_param_nullability == Nullability::kLegacy) {
22985 type_param_nullability = Nullability::kNonNullable;
22986 }
22987 result = CombineHashes(result, static_cast<uint32_t>(type_param_nullability));
22989 SetHash(result);
22990 return result;
22991}
22992
22993TypeParameterPtr TypeParameter::New() {
22994 return Object::Allocate<TypeParameter>(Heap::kOld);
22995}
22996
22997TypeParameterPtr TypeParameter::New(const Object& owner,
22998 intptr_t base,
22999 intptr_t index,
23000 Nullability nullability) {
23001 ASSERT(owner.IsNull() || owner.IsClass() || owner.IsFunctionType());
23002 const bool is_function_type_parameter = owner.IsFunctionType();
23004 is_function_type_parameter);
23005 Zone* Z = Thread::Current()->zone();
23006 const TypeParameter& result = TypeParameter::Handle(Z, TypeParameter::New());
23007 result.set_flags(flags);
23008 if (is_function_type_parameter) {
23009 result.set_owner(owner);
23010 } else {
23011 result.set_parameterized_class_id(owner.IsNull() ? kIllegalCid
23012 : Class::Cast(owner).id());
23013 }
23014 result.set_base(base);
23015 result.set_index(index);
23016 result.SetHash(0);
23017 result.set_nullability(nullability);
23019
23020 result.InitializeTypeTestingStubNonAtomic(
23022 return result.ptr();
23023}
23024
23025const char* TypeParameter::CanonicalNameCString(bool is_class_type_parameter,
23026 intptr_t base,
23027 intptr_t index) {
23028 Thread* thread = Thread::Current();
23029 ZoneTextBuffer printer(thread->zone());
23030 const char* base_fmt = is_class_type_parameter ? "C%" Pd : "F%" Pd;
23031 const char* index_fmt = is_class_type_parameter ? "X%" Pd : "Y%" Pd;
23032 if (base != 0) {
23033 printer.Printf(base_fmt, base);
23034 }
23035 printer.Printf(index_fmt, index - base);
23036 return printer.buffer();
23037}
23038
23039const char* TypeParameter::ToCString() const {
23040 if (IsNull()) {
23041 return "TypeParameter: null";
23042 }
23043 Thread* thread = Thread::Current();
23044 ZoneTextBuffer printer(thread->zone());
23045 printer.Printf("TypeParameter: ");
23046 printer.AddString(CanonicalNameCString());
23047 printer.AddString(NullabilitySuffix(kInternalName));
23048 return printer.buffer();
23049}
23050
23051const char* Number::ToCString() const {
23052 // Number is an interface. No instances of Number should exist.
23053 UNREACHABLE();
23054 return "Number";
23055}
23056
23057const char* Integer::ToCString() const {
23058 // Integer is an interface. No instances of Integer should exist except null.
23059 ASSERT(IsNull());
23060 return "nullptr Integer";
23061}
23062
23063IntegerPtr Integer::New(const String& str, Heap::Space space) {
23064 // We are not supposed to have integers represented as two byte strings.
23065 ASSERT(str.IsOneByteString());
23066 if (str.IsNull() || (str.Length() == 0)) {
23067 return Integer::null();
23068 }
23069 int64_t value = 0;
23070 const char* cstr = str.ToCString();
23071 if (!OS::StringToInt64(cstr, &value)) {
23072 // Out of range.
23073 return Integer::null();
23074 }
23075 return Integer::New(value, space);
23076}
23077
23078IntegerPtr Integer::NewCanonical(const String& str) {
23079 // We are not supposed to have integers represented as two byte strings.
23080 ASSERT(str.IsOneByteString());
23081 int64_t value = 0;
23082 const char* cstr = str.ToCString();
23083 if (!OS::StringToInt64(cstr, &value)) {
23084 // Out of range.
23085 return Integer::null();
23086 }
23087 return NewCanonical(value);
23088}
23089
23090IntegerPtr Integer::NewCanonical(int64_t value) {
23091 if (Smi::IsValid(value)) {
23092 return Smi::New(static_cast<intptr_t>(value));
23093 }
23094 return Mint::NewCanonical(value);
23095}
23096
23097IntegerPtr Integer::New(int64_t value, Heap::Space space) {
23098 const bool is_smi = Smi::IsValid(value);
23099 if (is_smi) {
23100 return Smi::New(static_cast<intptr_t>(value));
23101 }
23102 return Mint::New(value, space);
23103}
23104
23105IntegerPtr Integer::NewFromUint64(uint64_t value, Heap::Space space) {
23106 return Integer::New(static_cast<int64_t>(value), space);
23107}
23108
23109bool Integer::IsValueInRange(uint64_t value) {
23110 return (value <= static_cast<uint64_t>(Mint::kMaxValue));
23111}
23112
23113bool Integer::Equals(const Instance& other) const {
23114 // Integer is an abstract class.
23115 UNREACHABLE();
23116 return false;
23117}
23118
23119bool Integer::IsZero() const {
23120 // Integer is an abstract class.
23121 UNREACHABLE();
23122 return false;
23123}
23124
23126 // Integer is an abstract class.
23127 UNREACHABLE();
23128 return false;
23129}
23130
23132 // Integer is an abstract class.
23133 UNREACHABLE();
23134 return 0.0;
23135}
23136
23137int64_t Integer::AsInt64Value() const {
23138 // Integer is an abstract class.
23139 UNREACHABLE();
23140 return 0;
23141}
23142
23144 // Integer is an abstract class.
23145 UNREACHABLE();
23146 return 0;
23147}
23148
23150 // Integer is an abstract class.
23151 UNREACHABLE();
23152 return false;
23153}
23154
23155int Integer::CompareWith(const Integer& other) const {
23156 // Integer is an abstract class.
23157 UNREACHABLE();
23158 return 0;
23159}
23160
23162 return Multiply64Hash(AsInt64Value());
23163}
23164
23165IntegerPtr Integer::AsValidInteger() const {
23166 if (IsSmi()) return ptr();
23167 if (IsMint()) {
23168 Mint& mint = Mint::Handle();
23169 mint ^= ptr();
23170 if (Smi::IsValid(mint.value())) {
23171 return Smi::New(static_cast<intptr_t>(mint.value()));
23172 } else {
23173 return ptr();
23174 }
23175 }
23176 return ptr();
23177}
23178
23179const char* Integer::ToHexCString(Zone* zone) const {
23180 ASSERT(IsSmi() || IsMint());
23181 int64_t value = AsInt64Value();
23182 if (value < 0) {
23183 return OS::SCreate(zone, "-0x%" PX64, -static_cast<uint64_t>(value));
23184 } else {
23185 return OS::SCreate(zone, "0x%" PX64, static_cast<uint64_t>(value));
23186 }
23187}
23188
23190 const Integer& other,
23191 Heap::Space space) const {
23192 // In 32-bit mode, the result of any operation between two Smis will fit in a
23193 // 32-bit signed result, except the product of two Smis, which will be 64-bit.
23194 // In 64-bit mode, the result of any operation between two Smis will fit in a
23195 // 64-bit signed result, except the product of two Smis (see below).
23196 if (IsSmi() && other.IsSmi()) {
23197 const intptr_t left_value = Smi::Value(Smi::RawCast(ptr()));
23198 const intptr_t right_value = Smi::Value(Smi::RawCast(other.ptr()));
23199 switch (operation) {
23200 case Token::kADD:
23201 return Integer::New(left_value + right_value, space);
23202 case Token::kSUB:
23203 return Integer::New(left_value - right_value, space);
23204 case Token::kMUL:
23205 return Integer::New(
23206 Utils::MulWithWrapAround(static_cast<int64_t>(left_value),
23207 static_cast<int64_t>(right_value)),
23208 space);
23209 case Token::kTRUNCDIV:
23210 return Integer::New(left_value / right_value, space);
23211 case Token::kMOD: {
23212 const intptr_t remainder = left_value % right_value;
23213 if (remainder < 0) {
23214 if (right_value < 0) {
23215 return Integer::New(remainder - right_value, space);
23216 } else {
23217 return Integer::New(remainder + right_value, space);
23218 }
23219 }
23220 return Integer::New(remainder, space);
23221 }
23222 default:
23223 UNIMPLEMENTED();
23224 }
23225 }
23226 const int64_t left_value = AsInt64Value();
23227 const int64_t right_value = other.AsInt64Value();
23228 switch (operation) {
23229 case Token::kADD:
23230 return Integer::New(Utils::AddWithWrapAround(left_value, right_value),
23231 space);
23232
23233 case Token::kSUB:
23234 return Integer::New(Utils::SubWithWrapAround(left_value, right_value),
23235 space);
23236
23237 case Token::kMUL:
23238 return Integer::New(Utils::MulWithWrapAround(left_value, right_value),
23239 space);
23240
23241 case Token::kTRUNCDIV:
23242 if ((left_value == Mint::kMinValue) && (right_value == -1)) {
23243 // Division special case: overflow in int64_t.
23244 // MIN_VALUE / -1 = (MAX_VALUE + 1), which wraps around to MIN_VALUE
23245 return Integer::New(Mint::kMinValue, space);
23246 }
23247 return Integer::New(left_value / right_value, space);
23248
23249 case Token::kMOD: {
23250 if ((left_value == Mint::kMinValue) && (right_value == -1)) {
23251 // Modulo special case: overflow in int64_t.
23252 // MIN_VALUE % -1 = 0 for reason given above.
23253 return Integer::New(0, space);
23254 }
23255 const int64_t remainder = left_value % right_value;
23256 if (remainder < 0) {
23257 if (right_value < 0) {
23258 return Integer::New(remainder - right_value, space);
23259 } else {
23260 return Integer::New(remainder + right_value, space);
23261 }
23262 }
23263 return Integer::New(remainder, space);
23264 }
23265 default:
23266 UNIMPLEMENTED();
23267 return Integer::null();
23268 }
23269}
23270
23272 const Integer& other,
23273 Heap::Space space) const {
23274 if (IsSmi() && other.IsSmi()) {
23275 intptr_t op1_value = Smi::Value(Smi::RawCast(ptr()));
23276 intptr_t op2_value = Smi::Value(Smi::RawCast(other.ptr()));
23277 intptr_t result = 0;
23278 switch (kind) {
23279 case Token::kBIT_AND:
23280 result = op1_value & op2_value;
23281 break;
23282 case Token::kBIT_OR:
23283 result = op1_value | op2_value;
23284 break;
23285 case Token::kBIT_XOR:
23286 result = op1_value ^ op2_value;
23287 break;
23288 default:
23289 UNIMPLEMENTED();
23290 }
23292 return Smi::New(result);
23293 } else {
23294 int64_t a = AsInt64Value();
23295 int64_t b = other.AsInt64Value();
23296 switch (kind) {
23297 case Token::kBIT_AND:
23298 return Integer::New(a & b, space);
23299 case Token::kBIT_OR:
23300 return Integer::New(a | b, space);
23301 case Token::kBIT_XOR:
23302 return Integer::New(a ^ b, space);
23303 default:
23304 UNIMPLEMENTED();
23305 return Integer::null();
23306 }
23307 }
23308}
23309
23311 const Integer& other,
23312 Heap::Space space) const {
23313 int64_t a = AsInt64Value();
23314 int64_t b = other.AsInt64Value();
23315 ASSERT(b >= 0);
23316 switch (kind) {
23317 case Token::kSHL:
23319 case Token::kSHR:
23320 return Integer::New(a >> Utils::Minimum<int64_t>(b, Mint::kBits), space);
23321 case Token::kUSHR:
23322 return Integer::New(
23323 (b >= kBitsPerInt64) ? 0 : static_cast<uint64_t>(a) >> b, space);
23324 default:
23325 UNIMPLEMENTED();
23326 return Integer::null();
23327 }
23328}
23329
23330bool Smi::Equals(const Instance& other) const {
23331 if (other.IsNull() || !other.IsSmi()) {
23332 return false;
23333 }
23334 return (this->Value() == Smi::Cast(other).Value());
23335}
23336
23337double Smi::AsDoubleValue() const {
23338 return static_cast<double>(this->Value());
23339}
23340
23341int64_t Smi::AsInt64Value() const {
23342 return this->Value();
23343}
23344
23346 return this->Value() & 0xFFFFFFFF;
23347}
23348
23349int Smi::CompareWith(const Integer& other) const {
23350 if (other.IsSmi()) {
23351 const Smi& other_smi = Smi::Cast(other);
23352 if (this->Value() < other_smi.Value()) {
23353 return -1;
23354 } else if (this->Value() > other_smi.Value()) {
23355 return 1;
23356 } else {
23357 return 0;
23358 }
23359 }
23360 ASSERT(!other.FitsIntoSmi());
23361 if (other.IsMint()) {
23362 if (this->IsNegative() == other.IsNegative()) {
23363 return this->IsNegative() ? 1 : -1;
23364 }
23365 return this->IsNegative() ? -1 : 1;
23366 }
23367 UNREACHABLE();
23368 return 0;
23369}
23370
23371const char* Smi::ToCString() const {
23372 return OS::SCreate(Thread::Current()->zone(), "%" Pd "", Value());
23373}
23374
23375ClassPtr Smi::Class() {
23376 return IsolateGroup::Current()->object_store()->smi_class();
23377}
23378
23379void Mint::set_value(int64_t value) const {
23380 StoreNonPointer(&untag()->value_, value);
23381}
23382
23383MintPtr Mint::New(int64_t val, Heap::Space space) {
23384 // Do not allocate a Mint if Smi would do.
23385 ASSERT(!Smi::IsValid(val));
23386 ASSERT(IsolateGroup::Current()->object_store()->mint_class() !=
23387 Class::null());
23388 const auto& result = Mint::Handle(Object::Allocate<Mint>(space));
23389 result.set_value(val);
23390 return result.ptr();
23391}
23392
23393MintPtr Mint::NewCanonical(int64_t value) {
23394 Thread* thread = Thread::Current();
23395 Mint& mint = Mint::Handle(thread->zone(), Mint::New(value, Heap::kOld));
23396 mint ^= mint.Canonicalize(thread);
23397 return mint.ptr();
23398}
23399
23400bool Mint::Equals(const Instance& other) const {
23401 if (this->ptr() == other.ptr()) {
23402 // Both handles point to the same raw instance.
23403 return true;
23404 }
23405 if (!other.IsMint() || other.IsNull()) {
23406 return false;
23407 }
23408 return value() == Mint::Cast(other).value();
23409}
23410
23411double Mint::AsDoubleValue() const {
23412 return static_cast<double>(this->value());
23413}
23414
23415int64_t Mint::AsInt64Value() const {
23416 return this->value();
23417}
23418
23420 return this->value() & 0xFFFFFFFF;
23421}
23422
23423bool Mint::FitsIntoSmi() const {
23424 return Smi::IsValid(AsInt64Value());
23425}
23426
23427int Mint::CompareWith(const Integer& other) const {
23428 ASSERT(!FitsIntoSmi());
23429 ASSERT(other.IsMint() || other.IsSmi());
23430 int64_t a = AsInt64Value();
23431 int64_t b = other.AsInt64Value();
23432 if (a < b) {
23433 return -1;
23434 } else if (a > b) {
23435 return 1;
23436 } else {
23437 return 0;
23438 }
23439}
23440
23441const char* Mint::ToCString() const {
23442 return OS::SCreate(Thread::Current()->zone(), "%" Pd64 "", value());
23443}
23444
23445void Double::set_value(double value) const {
23446 StoreNonPointer(&untag()->value_, value);
23447}
23448
23449bool Double::BitwiseEqualsToDouble(double value) const {
23451 void* this_addr = reinterpret_cast<void*>(
23452 reinterpret_cast<uword>(this->untag()) + value_offset);
23453 void* other_addr = reinterpret_cast<void*>(&value);
23454 return (memcmp(this_addr, other_addr, sizeof(value)) == 0);
23455}
23456
23457bool Double::OperatorEquals(const Instance& other) const {
23458 if (this->IsNull() || other.IsNull()) {
23459 return (this->IsNull() && other.IsNull());
23460 }
23461 if (!other.IsDouble()) {
23462 return false;
23463 }
23464 return this->value() == Double::Cast(other).value();
23465}
23466
23467bool Double::CanonicalizeEquals(const Instance& other) const {
23468 if (this->ptr() == other.ptr()) {
23469 return true; // "===".
23470 }
23471 if (other.IsNull() || !other.IsDouble()) {
23472 return false;
23473 }
23474 return BitwiseEqualsToDouble(Double::Cast(other).value());
23475}
23476
23478 return Hash64To32(bit_cast<uint64_t>(value()));
23479}
23480
23481DoublePtr Double::New(double d, Heap::Space space) {
23482 ASSERT(IsolateGroup::Current()->object_store()->double_class() !=
23483 Class::null());
23484 const auto& result = Double::Handle(Object::Allocate<Double>(space));
23485 result.set_value(d);
23486 return result.ptr();
23487}
23488
23489DoublePtr Double::New(const String& str, Heap::Space space) {
23490 double double_value;
23491 if (!CStringToDouble(str.ToCString(), str.Length(), &double_value)) {
23492 return Double::Handle().ptr();
23493 }
23494 return New(double_value, space);
23495}
23496
23497DoublePtr Double::NewCanonical(double value) {
23498 Thread* thread = Thread::Current();
23500 dbl ^= dbl.Canonicalize(thread);
23501 return dbl.ptr();
23502}
23503
23504DoublePtr Double::NewCanonical(const String& str) {
23505 double double_value;
23506 if (!CStringToDouble(str.ToCString(), str.Length(), &double_value)) {
23507 return Double::Handle().ptr();
23508 }
23509 return NewCanonical(double_value);
23510}
23511
23512StringPtr Number::ToString(Heap::Space space) const {
23513 // Refactoring can avoid Zone::Alloc and strlen, but gains are insignificant.
23514 const char* cstr = ToCString();
23515 intptr_t len = strlen(cstr);
23516// Resulting string is ASCII ...
23517#ifdef DEBUG
23518 for (intptr_t i = 0; i < len; ++i) {
23519 ASSERT(static_cast<uint8_t>(cstr[i]) < 128);
23520 }
23521#endif // DEBUG
23522 // ... which is a subset of Latin-1.
23523 return String::FromLatin1(reinterpret_cast<const uint8_t*>(cstr), len, space);
23524}
23525
23526const char* Double::ToCString() const {
23527 if (isnan(value())) {
23528 return "NaN";
23529 }
23530 if (isinf(value())) {
23531 return value() < 0 ? "-Infinity" : "Infinity";
23532 }
23533 const int kBufferSize = 128;
23534 char* buffer = Thread::Current()->zone()->Alloc<char>(kBufferSize);
23535 buffer[kBufferSize - 1] = '\0';
23537 return buffer;
23538}
23539
23540void StringHasher::Add(const String& str, intptr_t begin_index, intptr_t len) {
23541 ASSERT(begin_index >= 0);
23542 ASSERT(len >= 0);
23543 ASSERT((begin_index + len) <= str.Length());
23544 if (len == 0) {
23545 return;
23546 }
23547 if (str.IsOneByteString()) {
23548 NoSafepointScope no_safepoint;
23549 Add(OneByteString::CharAddr(str, begin_index), len);
23550 } else if (str.IsTwoByteString()) {
23551 NoSafepointScope no_safepoint;
23552 Add(TwoByteString::CharAddr(str, begin_index), len);
23553 } else {
23554 UNREACHABLE();
23555 }
23556}
23557
23558uword String::Hash(const String& str, intptr_t begin_index, intptr_t len) {
23559 StringHasher hasher;
23560 hasher.Add(str, begin_index, len);
23561 return hasher.Finalize();
23562}
23563
23564uword String::HashConcat(const String& str1, const String& str2) {
23565 StringHasher hasher;
23566 hasher.Add(str1, 0, str1.Length());
23567 hasher.Add(str2, 0, str2.Length());
23568 return hasher.Finalize();
23569}
23570
23571uword String::Hash(StringPtr raw) {
23572 StringHasher hasher;
23573 uword length = Smi::Value(raw->untag()->length());
23574 if (raw->IsOneByteString()) {
23575 const uint8_t* data = static_cast<OneByteStringPtr>(raw)->untag()->data();
23576 return String::Hash(data, length);
23577 } else {
23578 const uint16_t* data = static_cast<TwoByteStringPtr>(raw)->untag()->data();
23579 return String::Hash(data, length);
23580 }
23581}
23582
23583uword String::Hash(const char* characters, intptr_t len) {
23584 StringHasher hasher;
23585 hasher.Add(reinterpret_cast<const uint8_t*>(characters), len);
23586 return hasher.Finalize();
23587}
23588
23589uword String::Hash(const uint8_t* characters, intptr_t len) {
23590 StringHasher hasher;
23591 hasher.Add(characters, len);
23592 return hasher.Finalize();
23593}
23594
23595uword String::Hash(const uint16_t* characters, intptr_t len) {
23596 StringHasher hasher;
23597 hasher.Add(characters, len);
23598 return hasher.Finalize();
23599}
23600
23601intptr_t String::CharSize() const {
23602 intptr_t class_id = ptr()->GetClassId();
23603 if (class_id == kOneByteStringCid) {
23604 return kOneByteChar;
23605 }
23606 ASSERT(class_id == kTwoByteStringCid);
23607 return kTwoByteChar;
23608}
23609
23610bool String::Equals(const Instance& other) const {
23611 if (this->ptr() == other.ptr()) {
23612 // Both handles point to the same raw instance.
23613 return true;
23614 }
23615
23616 if (!other.IsString()) {
23617 return false;
23618 }
23619
23620 const String& other_string = String::Cast(other);
23621 return Equals(other_string);
23622}
23623
23624bool String::Equals(const String& str,
23625 intptr_t begin_index,
23626 intptr_t len) const {
23627 ASSERT(begin_index >= 0);
23628 ASSERT((begin_index == 0) || (begin_index < str.Length()));
23629 ASSERT(len >= 0);
23630 ASSERT(len <= str.Length());
23631 if (len != this->Length()) {
23632 return false; // Lengths don't match.
23633 }
23634
23635 for (intptr_t i = 0; i < len; i++) {
23636 if (CharAt(i) != str.CharAt(begin_index + i)) {
23637 return false;
23638 }
23639 }
23640
23641 return true;
23642}
23643
23644bool String::Equals(const char* cstr) const {
23645 ASSERT(cstr != nullptr);
23646 CodePointIterator it(*this);
23647 intptr_t len = strlen(cstr);
23648 while (it.Next()) {
23649 if (*cstr == '\0') {
23650 // Lengths don't match.
23651 return false;
23652 }
23653 int32_t ch;
23654 intptr_t consumed =
23655 Utf8::Decode(reinterpret_cast<const uint8_t*>(cstr), len, &ch);
23656 if (consumed == 0 || it.Current() != ch) {
23657 return false;
23658 }
23659 cstr += consumed;
23660 len -= consumed;
23661 }
23662 return *cstr == '\0';
23663}
23664
23665bool String::Equals(const uint8_t* latin1_array, intptr_t len) const {
23666 if (len != this->Length()) {
23667 // Lengths don't match.
23668 return false;
23669 }
23670
23671 for (intptr_t i = 0; i < len; i++) {
23672 if (this->CharAt(i) != latin1_array[i]) {
23673 return false;
23674 }
23675 }
23676 return true;
23677}
23678
23679bool String::Equals(const uint16_t* utf16_array, intptr_t len) const {
23680 if (len != this->Length()) {
23681 // Lengths don't match.
23682 return false;
23683 }
23684
23685 for (intptr_t i = 0; i < len; i++) {
23686 if (this->CharAt(i) != LoadUnaligned(&utf16_array[i])) {
23687 return false;
23688 }
23689 }
23690 return true;
23691}
23692
23693bool String::Equals(const int32_t* utf32_array, intptr_t len) const {
23694 if (len < 0) return false;
23695 intptr_t j = 0;
23696 for (intptr_t i = 0; i < len; ++i) {
23697 if (Utf::IsSupplementary(utf32_array[i])) {
23698 uint16_t encoded[2];
23699 Utf16::Encode(utf32_array[i], &encoded[0]);
23700 if (j + 1 >= Length()) return false;
23701 if (CharAt(j++) != encoded[0]) return false;
23702 if (CharAt(j++) != encoded[1]) return false;
23703 } else {
23704 if (j >= Length()) return false;
23705 if (CharAt(j++) != utf32_array[i]) return false;
23706 }
23707 }
23708 return j == Length();
23709}
23710
23711bool String::EqualsConcat(const String& str1, const String& str2) const {
23712 return (Length() == str1.Length() + str2.Length()) &&
23713 str1.Equals(*this, 0, str1.Length()) &&
23714 str2.Equals(*this, str1.Length(), str2.Length());
23715}
23716
23717intptr_t String::CompareTo(const String& other) const {
23718 const intptr_t this_len = this->Length();
23719 const intptr_t other_len = other.IsNull() ? 0 : other.Length();
23720 const intptr_t len = (this_len < other_len) ? this_len : other_len;
23721 for (intptr_t i = 0; i < len; i++) {
23722 uint16_t this_code_unit = this->CharAt(i);
23723 uint16_t other_code_unit = other.CharAt(i);
23724 if (this_code_unit < other_code_unit) {
23725 return -1;
23726 }
23727 if (this_code_unit > other_code_unit) {
23728 return 1;
23729 }
23730 }
23731 if (this_len < other_len) return -1;
23732 if (this_len > other_len) return 1;
23733 return 0;
23734}
23735
23736bool String::StartsWith(StringPtr str, StringPtr prefix) {
23737 if (prefix == String::null()) return false;
23738
23739 const intptr_t length = String::LengthOf(str);
23740 const intptr_t prefix_length = String::LengthOf(prefix);
23741 if (prefix_length > length) return false;
23742
23743 for (intptr_t i = 0; i < prefix_length; i++) {
23744 if (String::CharAt(str, i) != String::CharAt(prefix, i)) {
23745 return false;
23746 }
23747 }
23748 return true;
23749}
23750
23751bool String::EndsWith(const String& other) const {
23752 if (other.IsNull()) {
23753 return false;
23754 }
23755 const intptr_t len = this->Length();
23756 const intptr_t other_len = other.Length();
23757 const intptr_t offset = len - other_len;
23758
23759 if ((other_len == 0) || (other_len > len)) {
23760 return false;
23761 }
23762 for (int i = offset; i < len; i++) {
23763 if (this->CharAt(i) != other.CharAt(i - offset)) {
23764 return false;
23765 }
23766 }
23767 return true;
23768}
23769
23770InstancePtr String::CanonicalizeLocked(Thread* thread) const {
23771 if (IsCanonical()) {
23772 return this->ptr();
23773 }
23774 return Symbols::New(Thread::Current(), *this);
23775}
23776
23777StringPtr String::New(const char* cstr, Heap::Space space) {
23778 ASSERT(cstr != nullptr);
23779 intptr_t array_len = strlen(cstr);
23780 const uint8_t* utf8_array = reinterpret_cast<const uint8_t*>(cstr);
23781 return String::FromUTF8(utf8_array, array_len, space);
23782}
23783
23784StringPtr String::FromUTF8(const uint8_t* utf8_array,
23785 intptr_t array_len,
23786 Heap::Space space) {
23788 intptr_t len = Utf8::CodeUnitCount(utf8_array, array_len, &type);
23789 if (type == Utf8::kLatin1) {
23790 const String& strobj = String::Handle(OneByteString::New(len, space));
23791 if (len > 0) {
23792 NoSafepointScope no_safepoint;
23793 if (!Utf8::DecodeToLatin1(utf8_array, array_len,
23794 OneByteString::DataStart(strobj), len)) {
23795 Utf8::ReportInvalidByte(utf8_array, array_len, len);
23796 return String::null();
23797 }
23798 }
23799 return strobj.ptr();
23800 }
23802 const String& strobj = String::Handle(TwoByteString::New(len, space));
23803 NoSafepointScope no_safepoint;
23804 if (!Utf8::DecodeToUTF16(utf8_array, array_len,
23805 TwoByteString::DataStart(strobj), len)) {
23806 Utf8::ReportInvalidByte(utf8_array, array_len, len);
23807 return String::null();
23808 }
23809 return strobj.ptr();
23810}
23811
23812StringPtr String::FromLatin1(const uint8_t* latin1_array,
23813 intptr_t array_len,
23814 Heap::Space space) {
23815 return OneByteString::New(latin1_array, array_len, space);
23816}
23817
23818StringPtr String::FromUTF16(const uint16_t* utf16_array,
23819 intptr_t array_len,
23820 Heap::Space space) {
23821 bool is_one_byte_string = true;
23822 for (intptr_t i = 0; i < array_len; ++i) {
23823 if (!Utf::IsLatin1(LoadUnaligned(&utf16_array[i]))) {
23824 is_one_byte_string = false;
23825 break;
23826 }
23827 }
23828 if (is_one_byte_string) {
23829 return OneByteString::New(utf16_array, array_len, space);
23830 }
23831 return TwoByteString::New(utf16_array, array_len, space);
23832}
23833
23834StringPtr String::FromUTF32(const int32_t* utf32_array,
23835 intptr_t array_len,
23836 Heap::Space space) {
23837 bool is_one_byte_string = true;
23838 intptr_t utf16_len = array_len;
23839 for (intptr_t i = 0; i < array_len; ++i) {
23840 if (!Utf::IsLatin1(utf32_array[i])) {
23841 is_one_byte_string = false;
23842 if (Utf::IsSupplementary(utf32_array[i])) {
23843 utf16_len += 1;
23844 }
23845 }
23846 }
23847 if (is_one_byte_string) {
23848 return OneByteString::New(utf32_array, array_len, space);
23849 }
23850 return TwoByteString::New(utf16_len, utf32_array, array_len, space);
23851}
23852
23853StringPtr String::New(const String& str, Heap::Space space) {
23854 // Currently this just creates a copy of the string in the correct space.
23855 // Once we have external string support, this will also create a heap copy of
23856 // the string if necessary. Some optimizations are possible, such as not
23857 // copying internal strings into the same space.
23858 intptr_t len = str.Length();
23860 intptr_t char_size = str.CharSize();
23861 if (char_size == kOneByteChar) {
23862 result = OneByteString::New(len, space);
23863 } else {
23864 ASSERT(char_size == kTwoByteChar);
23865 result = TwoByteString::New(len, space);
23866 }
23867 String::Copy(result, 0, str, 0, len);
23868 return result.ptr();
23869}
23870
23871void String::Copy(const String& dst,
23872 intptr_t dst_offset,
23873 const uint8_t* characters,
23874 intptr_t len) {
23875 ASSERT(dst_offset >= 0);
23876 ASSERT(len >= 0);
23877 ASSERT(len <= (dst.Length() - dst_offset));
23878 if (dst.IsOneByteString()) {
23879 NoSafepointScope no_safepoint;
23880 if (len > 0) {
23881 memmove(OneByteString::CharAddr(dst, dst_offset), characters, len);
23882 }
23883 } else if (dst.IsTwoByteString()) {
23884 for (intptr_t i = 0; i < len; ++i) {
23885 *TwoByteString::CharAddr(dst, i + dst_offset) = characters[i];
23886 }
23887 }
23888}
23889
23890void String::Copy(const String& dst,
23891 intptr_t dst_offset,
23892 const uint16_t* utf16_array,
23893 intptr_t array_len) {
23894 ASSERT(dst_offset >= 0);
23895 ASSERT(array_len >= 0);
23896 ASSERT(array_len <= (dst.Length() - dst_offset));
23897 if (dst.IsOneByteString()) {
23898 NoSafepointScope no_safepoint;
23899 for (intptr_t i = 0; i < array_len; ++i) {
23900 ASSERT(Utf::IsLatin1(LoadUnaligned(&utf16_array[i])));
23901 *OneByteString::CharAddr(dst, i + dst_offset) = utf16_array[i];
23902 }
23903 } else {
23904 ASSERT(dst.IsTwoByteString());
23905 NoSafepointScope no_safepoint;
23906 if (array_len > 0) {
23907 memmove(TwoByteString::CharAddr(dst, dst_offset), utf16_array,
23908 array_len * 2);
23909 }
23910 }
23911}
23912
23913void String::Copy(const String& dst,
23914 intptr_t dst_offset,
23915 const String& src,
23916 intptr_t src_offset,
23917 intptr_t len) {
23918 ASSERT(dst_offset >= 0);
23919 ASSERT(src_offset >= 0);
23920 ASSERT(len >= 0);
23921 ASSERT(len <= (dst.Length() - dst_offset));
23922 ASSERT(len <= (src.Length() - src_offset));
23923 if (len > 0) {
23924 intptr_t char_size = src.CharSize();
23925 if (char_size == kOneByteChar) {
23926 ASSERT(src.IsOneByteString());
23927 NoSafepointScope no_safepoint;
23928 String::Copy(dst, dst_offset, OneByteString::CharAddr(src, src_offset),
23929 len);
23930 } else {
23931 ASSERT(char_size == kTwoByteChar);
23932 ASSERT(src.IsTwoByteString());
23933 NoSafepointScope no_safepoint;
23934 String::Copy(dst, dst_offset, TwoByteString::CharAddr(src, src_offset),
23935 len);
23936 }
23937 }
23938}
23939
23941 if (str.IsOneByteString()) {
23943 }
23944 ASSERT(str.IsTwoByteString());
23946}
23947
23948static bool IsPercent(int32_t c) {
23949 return c == '%';
23950}
23951
23952static bool IsHexCharacter(int32_t c) {
23953 if (c >= '0' && c <= '9') {
23954 return true;
23955 }
23956 if (c >= 'A' && c <= 'F') {
23957 return true;
23958 }
23959 return false;
23960}
23961
23962static bool IsURISafeCharacter(int32_t c) {
23963 if ((c >= '0') && (c <= '9')) {
23964 return true;
23965 }
23966 if ((c >= 'a') && (c <= 'z')) {
23967 return true;
23968 }
23969 if ((c >= 'A') && (c <= 'Z')) {
23970 return true;
23971 }
23972 return (c == '-') || (c == '_') || (c == '.') || (c == '~');
23973}
23974
23975static int32_t GetHexCharacter(int32_t c) {
23976 ASSERT(c >= 0);
23977 ASSERT(c < 16);
23978 const char* hex = "0123456789ABCDEF";
23979 return hex[c];
23980}
23981
23982static int32_t GetHexValue(int32_t c) {
23983 if (c >= '0' && c <= '9') {
23984 return c - '0';
23985 }
23986 if (c >= 'A' && c <= 'F') {
23987 return c - 'A' + 10;
23988 }
23989 UNREACHABLE();
23990 return 0;
23991}
23992
23993static int32_t MergeHexCharacters(int32_t c1, int32_t c2) {
23994 return GetHexValue(c1) << 4 | GetHexValue(c2);
23995}
23996
23997const char* String::EncodeIRI(const String& str) {
23998 const intptr_t len = Utf8::Length(str);
23999 Zone* zone = Thread::Current()->zone();
24000 uint8_t* utf8 = zone->Alloc<uint8_t>(len);
24001 str.ToUTF8(utf8, len);
24002 intptr_t num_escapes = 0;
24003 for (int i = 0; i < len; ++i) {
24004 uint8_t byte = utf8[i];
24005 if (!IsURISafeCharacter(byte)) {
24006 num_escapes += 2;
24007 }
24008 }
24009 intptr_t cstr_len = len + num_escapes + 1;
24010 char* cstr = zone->Alloc<char>(cstr_len);
24011 intptr_t index = 0;
24012 for (int i = 0; i < len; ++i) {
24013 uint8_t byte = utf8[i];
24014 if (!IsURISafeCharacter(byte)) {
24015 cstr[index++] = '%';
24016 cstr[index++] = GetHexCharacter(byte >> 4);
24017 cstr[index++] = GetHexCharacter(byte & 0xF);
24018 } else {
24019 ASSERT(byte <= 127);
24020 cstr[index++] = byte;
24021 }
24022 }
24023 cstr[index] = '\0';
24024 return cstr;
24025}
24026
24027StringPtr String::DecodeIRI(const String& str) {
24028 CodePointIterator cpi(str);
24029 intptr_t num_escapes = 0;
24030 intptr_t len = str.Length();
24031 {
24032 CodePointIterator cpi(str);
24033 while (cpi.Next()) {
24034 int32_t code_point = cpi.Current();
24035 if (IsPercent(code_point)) {
24036 // Verify that the two characters following the % are hex digits.
24037 if (!cpi.Next()) {
24038 return String::null();
24039 }
24040 int32_t code_point = cpi.Current();
24041 if (!IsHexCharacter(code_point)) {
24042 return String::null();
24043 }
24044 if (!cpi.Next()) {
24045 return String::null();
24046 }
24047 code_point = cpi.Current();
24048 if (!IsHexCharacter(code_point)) {
24049 return String::null();
24050 }
24051 num_escapes += 2;
24052 }
24053 }
24054 }
24055 intptr_t utf8_len = len - num_escapes;
24056 ASSERT(utf8_len >= 0);
24057 Zone* zone = Thread::Current()->zone();
24058 uint8_t* utf8 = zone->Alloc<uint8_t>(utf8_len);
24059 {
24060 intptr_t index = 0;
24061 CodePointIterator cpi(str);
24062 while (cpi.Next()) {
24063 ASSERT(index < utf8_len);
24064 int32_t code_point = cpi.Current();
24065 if (IsPercent(code_point)) {
24066 cpi.Next();
24067 int32_t ch1 = cpi.Current();
24068 cpi.Next();
24069 int32_t ch2 = cpi.Current();
24070 int32_t merged = MergeHexCharacters(ch1, ch2);
24071 ASSERT(merged >= 0 && merged < 256);
24072 utf8[index] = static_cast<uint8_t>(merged);
24073 } else {
24074 ASSERT(code_point >= 0 && code_point < 256);
24075 utf8[index] = static_cast<uint8_t>(code_point);
24076 }
24077 index++;
24078 }
24079 }
24080 return FromUTF8(utf8, utf8_len);
24081}
24082
24083StringPtr String::NewFormatted(const char* format, ...) {
24084 va_list args;
24085 va_start(args, format);
24086 StringPtr result = NewFormattedV(format, args);
24087 NoSafepointScope no_safepoint;
24088 va_end(args);
24089 return result;
24090}
24091
24092StringPtr String::NewFormatted(Heap::Space space, const char* format, ...) {
24093 va_list args;
24094 va_start(args, format);
24095 StringPtr result = NewFormattedV(format, args, space);
24096 NoSafepointScope no_safepoint;
24097 va_end(args);
24098 return result;
24099}
24100
24101StringPtr String::NewFormattedV(const char* format,
24102 va_list args,
24103 Heap::Space space) {
24104 va_list args_copy;
24105 va_copy(args_copy, args);
24106 intptr_t len = Utils::VSNPrint(nullptr, 0, format, args_copy);
24107 va_end(args_copy);
24108
24109 Zone* zone = Thread::Current()->zone();
24110 char* buffer = zone->Alloc<char>(len + 1);
24111 Utils::VSNPrint(buffer, (len + 1), format, args);
24112
24113 return String::New(buffer, space);
24114}
24115
24116StringPtr String::Concat(const String& str1,
24117 const String& str2,
24118 Heap::Space space) {
24119 ASSERT(!str1.IsNull() && !str2.IsNull());
24120 intptr_t char_size = Utils::Maximum(str1.CharSize(), str2.CharSize());
24121 if (char_size == kTwoByteChar) {
24122 return TwoByteString::Concat(str1, str2, space);
24123 }
24124 return OneByteString::Concat(str1, str2, space);
24125}
24126
24127StringPtr String::ConcatAll(const Array& strings, Heap::Space space) {
24128 return ConcatAllRange(strings, 0, strings.Length(), space);
24129}
24130
24131StringPtr String::ConcatAllRange(const Array& strings,
24132 intptr_t start,
24133 intptr_t end,
24134 Heap::Space space) {
24135 ASSERT(!strings.IsNull());
24136 ASSERT(start >= 0);
24137 ASSERT(end <= strings.Length());
24138 intptr_t result_len = 0;
24139 String& str = String::Handle();
24140 intptr_t char_size = kOneByteChar;
24141 // Compute 'char_size' and 'result_len'.
24142 for (intptr_t i = start; i < end; i++) {
24143 str ^= strings.At(i);
24144 const intptr_t str_len = str.Length();
24145 if ((kMaxElements - result_len) < str_len) {
24147 UNREACHABLE();
24148 }
24149 result_len += str_len;
24150 char_size = Utils::Maximum(char_size, str.CharSize());
24151 }
24152 if (char_size == kOneByteChar) {
24153 return OneByteString::ConcatAll(strings, start, end, result_len, space);
24154 }
24155 ASSERT(char_size == kTwoByteChar);
24156 return TwoByteString::ConcatAll(strings, start, end, result_len, space);
24157}
24158
24159StringPtr String::SubString(const String& str,
24160 intptr_t begin_index,
24161 Heap::Space space) {
24162 ASSERT(!str.IsNull());
24163 if (begin_index >= str.Length()) {
24164 return String::null();
24165 }
24166 return String::SubString(str, begin_index, (str.Length() - begin_index),
24167 space);
24168}
24169
24170StringPtr String::SubString(Thread* thread,
24171 const String& str,
24172 intptr_t begin_index,
24173 intptr_t length,
24174 Heap::Space space) {
24175 ASSERT(!str.IsNull());
24176 ASSERT(begin_index >= 0);
24177 ASSERT(length >= 0);
24178 if (begin_index <= str.Length() && length == 0) {
24179 return Symbols::Empty().ptr();
24180 }
24181 if (begin_index > str.Length()) {
24182 return String::null();
24183 }
24184 bool is_one_byte_string = true;
24185 intptr_t char_size = str.CharSize();
24186 if (char_size == kTwoByteChar) {
24187 for (intptr_t i = begin_index; i < begin_index + length; ++i) {
24188 if (!Utf::IsLatin1(str.CharAt(i))) {
24189 is_one_byte_string = false;
24190 break;
24191 }
24192 }
24193 }
24195 String& result = thread->StringHandle();
24196 if (is_one_byte_string) {
24198 } else {
24200 }
24201 String::Copy(result, 0, str, begin_index, length);
24202 return result.ptr();
24203}
24204
24205const char* String::ToCString() const {
24206 if (IsNull()) {
24207 return "String: null";
24208 }
24209 const intptr_t len = Utf8::Length(*this);
24210 Zone* zone = Thread::Current()->zone();
24211 uint8_t* result = zone->Alloc<uint8_t>(len + 1);
24212 ToUTF8(result, len);
24213 result[len] = 0;
24214 return reinterpret_cast<const char*>(result);
24215}
24216
24218 const intptr_t len = Utf8::Length(*this);
24219 uint8_t* result = reinterpret_cast<uint8_t*>(malloc(len + 1));
24220 ToUTF8(result, len);
24221 result[len] = 0;
24222 return reinterpret_cast<char*>(result);
24223}
24224
24225void String::ToUTF8(uint8_t* utf8_array, intptr_t array_len) const {
24226 ASSERT(array_len >= Utf8::Length(*this));
24227 Utf8::Encode(*this, reinterpret_cast<char*>(utf8_array), array_len);
24228}
24229
24230const char* String::ToCString(Thread* thread, StringPtr ptr) {
24231 if (ptr == nullptr) return nullptr;
24233 String& str = reused_string_handle.Handle();
24234 str = ptr;
24235 return str.ToCString();
24236}
24237
24239 void* peer,
24241 intptr_t external_size) {
24242 ASSERT(callback != nullptr);
24243 FinalizablePersistentHandle* finalizable_ref =
24245 callback, external_size,
24246 /*auto_delete=*/true);
24247 ASSERT(finalizable_ref != nullptr);
24248 return finalizable_ref;
24249}
24250
24251StringPtr String::Transform(int32_t (*mapping)(int32_t ch),
24252 const String& str,
24253 Heap::Space space) {
24254 ASSERT(!str.IsNull());
24255 bool has_mapping = false;
24256 int32_t dst_max = 0;
24257 CodePointIterator it(str);
24258 while (it.Next()) {
24259 int32_t src = it.Current();
24260 int32_t dst = mapping(src);
24261 if (src != dst) {
24262 has_mapping = true;
24263 }
24264 dst_max = Utils::Maximum(dst_max, dst);
24265 }
24266 if (!has_mapping) {
24267 return str.ptr();
24268 }
24269 if (Utf::IsLatin1(dst_max)) {
24270 return OneByteString::Transform(mapping, str, space);
24271 }
24272 ASSERT(Utf::IsBmp(dst_max) || Utf::IsSupplementary(dst_max));
24273 return TwoByteString::Transform(mapping, str, space);
24274}
24275
24276StringPtr String::ToUpperCase(const String& str, Heap::Space space) {
24277 // TODO(cshapiro): create a fast-path for OneByteString instances.
24278 return Transform(CaseMapping::ToUpper, str, space);
24279}
24280
24281StringPtr String::ToLowerCase(const String& str, Heap::Space space) {
24282 // TODO(cshapiro): create a fast-path for OneByteString instances.
24283 return Transform(CaseMapping::ToLower, str, space);
24284}
24285
24287 intptr_t start,
24288 intptr_t end,
24289 double* result) {
24290 ASSERT(0 <= start);
24291 ASSERT(start <= end);
24292 ASSERT(end <= str.Length());
24293 intptr_t length = end - start;
24294 NoSafepointScope no_safepoint;
24295 const uint8_t* startChar;
24296 if (str.IsOneByteString()) {
24297 startChar = OneByteString::CharAddr(str, start);
24298 } else {
24299 uint8_t* chars = Thread::Current()->zone()->Alloc<uint8_t>(length);
24300 for (intptr_t i = 0; i < length; i++) {
24301 int32_t ch = str.CharAt(start + i);
24302 if (ch < 128) {
24303 chars[i] = ch;
24304 } else {
24305 return false; // Not ASCII, so definitely not valid double numeral.
24306 }
24307 }
24308 startChar = chars;
24309 }
24310 return CStringToDouble(reinterpret_cast<const char*>(startChar), length,
24311 result);
24312}
24313
24314// Check to see if 'str1' matches 'str2' as is or
24315// once the private key separator is stripped from str2.
24316//
24317// Things are made more complicated by the fact that constructors are
24318// added *after* the private suffix, so "foo@123.named" should match
24319// "foo.named".
24320//
24321// Also, the private suffix can occur more than once in the name, as in:
24322//
24323// _ReceivePortImpl@6be832b._internal@6be832b
24324//
24325template <typename T1, typename T2>
24326static bool EqualsIgnoringPrivateKey(const String& str1, const String& str2) {
24327 intptr_t len = str1.Length();
24328 intptr_t str2_len = str2.Length();
24329 if (len == str2_len) {
24330 for (intptr_t i = 0; i < len; i++) {
24331 if (T1::CharAt(str1, i) != T2::CharAt(str2, i)) {
24332 return false;
24333 }
24334 }
24335 return true;
24336 }
24337 if (len < str2_len) {
24338 return false; // No way they can match.
24339 }
24340 intptr_t pos = 0;
24341 intptr_t str2_pos = 0;
24342 while (pos < len) {
24343 int32_t ch = T1::CharAt(str1, pos);
24344 pos++;
24345
24346 if ((str2_pos < str2_len) && (ch == T2::CharAt(str2, str2_pos))) {
24347 str2_pos++;
24348 continue;
24349 }
24350
24352 // Consume a private key separator if str1 has it but str2 does not.
24353 while ((pos < len) && (T1::CharAt(str1, pos) != '.') &&
24354 (T1::CharAt(str1, pos) != '&')) {
24355 pos++;
24356 }
24357 // Resume matching characters.
24358 continue;
24359 }
24360
24361 return false;
24362 }
24363
24364 // We have reached the end of mangled_name string.
24365 ASSERT(pos == len);
24366 return (str2_pos == str2_len);
24367}
24368
24369#define EQUALS_IGNORING_PRIVATE_KEY(class_id, type, str1, str2) \
24370 switch (class_id) { \
24371 case kOneByteStringCid: \
24372 return dart::EqualsIgnoringPrivateKey<type, OneByteString>(str1, str2); \
24373 case kTwoByteStringCid: \
24374 return dart::EqualsIgnoringPrivateKey<type, TwoByteString>(str1, str2); \
24375 } \
24376 UNREACHABLE();
24377
24378bool String::EqualsIgnoringPrivateKey(const String& str1, const String& str2) {
24379 if (str1.ptr() == str2.ptr()) {
24380 return true; // Both handles point to the same raw instance.
24381 }
24382 NoSafepointScope no_safepoint;
24383 intptr_t str1_class_id = str1.ptr()->GetClassId();
24384 intptr_t str2_class_id = str2.ptr()->GetClassId();
24385 switch (str1_class_id) {
24386 case kOneByteStringCid:
24387 EQUALS_IGNORING_PRIVATE_KEY(str2_class_id, OneByteString, str1, str2);
24388 break;
24389 case kTwoByteStringCid:
24390 EQUALS_IGNORING_PRIVATE_KEY(str2_class_id, TwoByteString, str1, str2);
24391 break;
24392 }
24393 UNREACHABLE();
24394 return false;
24395}
24396
24398 ASSERT(index_ >= -1);
24399 intptr_t length = Utf16::Length(ch_);
24400 if (index_ < (end_ - length)) {
24401 index_ += length;
24402 ch_ = str_.CharAt(index_);
24403 if (Utf16::IsLeadSurrogate(ch_) && (index_ < (end_ - 1))) {
24404 int32_t ch2 = str_.CharAt(index_ + 1);
24405 if (Utf16::IsTrailSurrogate(ch2)) {
24406 ch_ = Utf16::Decode(ch_, ch2);
24407 }
24408 }
24409 return true;
24410 }
24411 index_ = end_;
24412 return false;
24413}
24414
24416 intptr_t len = str.Length();
24417 if (len > 0) {
24418 intptr_t num_escapes = 0;
24419 for (intptr_t i = 0; i < len; i++) {
24420 num_escapes += EscapeOverhead(CharAt(str, i));
24421 }
24422 const String& dststr =
24423 String::Handle(OneByteString::New(len + num_escapes, Heap::kNew));
24424 intptr_t index = 0;
24425 for (intptr_t i = 0; i < len; i++) {
24426 uint8_t ch = CharAt(str, i);
24427 if (IsSpecialCharacter(ch)) {
24428 SetCharAt(dststr, index, '\\');
24429 SetCharAt(dststr, index + 1, SpecialCharacter(ch));
24430 index += 2;
24431 } else if (IsAsciiNonprintable(ch)) {
24432 SetCharAt(dststr, index, '\\');
24433 SetCharAt(dststr, index + 1, 'x');
24434 SetCharAt(dststr, index + 2, GetHexCharacter(ch >> 4));
24435 SetCharAt(dststr, index + 3, GetHexCharacter(ch & 0xF));
24436 index += 4;
24437 } else {
24438 SetCharAt(dststr, index, ch);
24439 index += 1;
24440 }
24441 }
24442 return OneByteString::raw(dststr);
24443 }
24444 return OneByteString::raw(Symbols::Empty());
24445}
24446
24447OneByteStringPtr OneByteString::New(intptr_t len, Heap::Space space) {
24449 ((IsolateGroup::Current()->object_store() != nullptr) &&
24450 (IsolateGroup::Current()->object_store()->one_byte_string_class() !=
24451 Class::null())));
24452 if (len < 0 || len > kMaxElements) {
24453 // This should be caught before we reach here.
24454 FATAL("Fatal error in OneByteString::New: invalid len %" Pd "\n", len);
24455 }
24456 auto result = Object::Allocate<OneByteString>(space, len);
24457 NoSafepointScope no_safepoint;
24458 result->untag()->set_length(Smi::New(len));
24459#if !defined(HASH_IN_OBJECT_HEADER)
24460 result->untag()->set_hash(Smi::New(0));
24461#endif
24462 intptr_t size = OneByteString::UnroundedSize(result);
24463 ASSERT(size <= result->untag()->HeapSize());
24464 memset(reinterpret_cast<void*>(UntaggedObject::ToAddr(result) + size), 0,
24465 result->untag()->HeapSize() - size);
24466 return result;
24467}
24468
24469OneByteStringPtr OneByteString::New(const uint8_t* characters,
24470 intptr_t len,
24471 Heap::Space space) {
24472 const String& result = String::Handle(OneByteString::New(len, space));
24473 if (len > 0) {
24474 NoSafepointScope no_safepoint;
24475 memmove(DataStart(result), characters, len);
24476 }
24477 return OneByteString::raw(result);
24478}
24479
24480OneByteStringPtr OneByteString::New(const uint16_t* characters,
24481 intptr_t len,
24482 Heap::Space space) {
24483 const String& result = String::Handle(OneByteString::New(len, space));
24484 NoSafepointScope no_safepoint;
24485 for (intptr_t i = 0; i < len; ++i) {
24486 ASSERT(Utf::IsLatin1(characters[i]));
24487 *CharAddr(result, i) = characters[i];
24488 }
24489 return OneByteString::raw(result);
24490}
24491
24492OneByteStringPtr OneByteString::New(const int32_t* characters,
24493 intptr_t len,
24494 Heap::Space space) {
24495 const String& result = String::Handle(OneByteString::New(len, space));
24496 NoSafepointScope no_safepoint;
24497 for (intptr_t i = 0; i < len; ++i) {
24498 ASSERT(Utf::IsLatin1(characters[i]));
24499 *CharAddr(result, i) = characters[i];
24500 }
24501 return OneByteString::raw(result);
24502}
24503
24504OneByteStringPtr OneByteString::New(const String& str, Heap::Space space) {
24505 intptr_t len = str.Length();
24506 const String& result = String::Handle(OneByteString::New(len, space));
24507 String::Copy(result, 0, str, 0, len);
24508 return OneByteString::raw(result);
24509}
24510
24511OneByteStringPtr OneByteString::New(const String& other_one_byte_string,
24512 intptr_t other_start_index,
24513 intptr_t other_len,
24514 Heap::Space space) {
24515 const String& result = String::Handle(OneByteString::New(other_len, space));
24516 ASSERT(other_one_byte_string.IsOneByteString());
24517 if (other_len > 0) {
24518 NoSafepointScope no_safepoint;
24519 memmove(OneByteString::DataStart(result),
24520 OneByteString::CharAddr(other_one_byte_string, other_start_index),
24521 other_len);
24522 }
24523 return OneByteString::raw(result);
24524}
24525
24526OneByteStringPtr OneByteString::New(const TypedDataBase& other_typed_data,
24527 intptr_t other_start_index,
24528 intptr_t other_len,
24529 Heap::Space space) {
24530 const String& result = String::Handle(OneByteString::New(other_len, space));
24531 ASSERT(other_typed_data.ElementSizeInBytes() == 1);
24532 if (other_len > 0) {
24533 NoSafepointScope no_safepoint;
24534 memmove(OneByteString::DataStart(result),
24535 other_typed_data.DataAddr(other_start_index), other_len);
24536 }
24537 return OneByteString::raw(result);
24538}
24539
24540OneByteStringPtr OneByteString::Concat(const String& str1,
24541 const String& str2,
24542 Heap::Space space) {
24543 intptr_t len1 = str1.Length();
24544 intptr_t len2 = str2.Length();
24545 intptr_t len = len1 + len2;
24546 const String& result = String::Handle(OneByteString::New(len, space));
24547 String::Copy(result, 0, str1, 0, len1);
24548 String::Copy(result, len1, str2, 0, len2);
24549 return OneByteString::raw(result);
24550}
24551
24552OneByteStringPtr OneByteString::ConcatAll(const Array& strings,
24553 intptr_t start,
24554 intptr_t end,
24555 intptr_t len,
24556 Heap::Space space) {
24557 ASSERT(!strings.IsNull());
24558 ASSERT(start >= 0);
24559 ASSERT(end <= strings.Length());
24560 const String& result = String::Handle(OneByteString::New(len, space));
24561 String& str = String::Handle();
24562 intptr_t pos = 0;
24563 for (intptr_t i = start; i < end; i++) {
24564 str ^= strings.At(i);
24565 const intptr_t str_len = str.Length();
24566 String::Copy(result, pos, str, 0, str_len);
24567 ASSERT((kMaxElements - pos) >= str_len);
24568 pos += str_len;
24569 }
24570 return OneByteString::raw(result);
24571}
24572
24573OneByteStringPtr OneByteString::Transform(int32_t (*mapping)(int32_t ch),
24574 const String& str,
24575 Heap::Space space) {
24576 ASSERT(!str.IsNull());
24577 intptr_t len = str.Length();
24578 const String& result = String::Handle(OneByteString::New(len, space));
24579 NoSafepointScope no_safepoint;
24580 for (intptr_t i = 0; i < len; ++i) {
24581 int32_t ch = mapping(str.CharAt(i));
24582 ASSERT(Utf::IsLatin1(ch));
24583 *CharAddr(result, i) = ch;
24584 }
24585 return OneByteString::raw(result);
24586}
24587
24588OneByteStringPtr OneByteString::SubStringUnchecked(const String& str,
24589 intptr_t begin_index,
24590 intptr_t length,
24591 Heap::Space space) {
24592 ASSERT(!str.IsNull() && str.IsOneByteString());
24593 ASSERT(begin_index >= 0);
24594 ASSERT(length >= 0);
24595 if (begin_index <= str.Length() && length == 0) {
24596 return OneByteString::raw(Symbols::Empty());
24597 }
24598 ASSERT(begin_index < str.Length());
24599 OneByteStringPtr result = OneByteString::New(length, space);
24600 NoSafepointScope no_safepoint;
24601 if (length > 0) {
24602 uint8_t* dest = &result->untag()->data()[0];
24603 const uint8_t* src = &untag(str)->data()[begin_index];
24604 memmove(dest, src, length);
24605 }
24606 return result;
24607}
24608
24610 intptr_t len = str.Length();
24611 if (len > 0) {
24612 intptr_t num_escapes = 0;
24613 for (intptr_t i = 0; i < len; i++) {
24614 num_escapes += EscapeOverhead(CharAt(str, i));
24615 }
24616 const String& dststr =
24617 String::Handle(TwoByteString::New(len + num_escapes, Heap::kNew));
24618 intptr_t index = 0;
24619 for (intptr_t i = 0; i < len; i++) {
24620 uint16_t ch = CharAt(str, i);
24621 if (IsSpecialCharacter(ch)) {
24622 SetCharAt(dststr, index, '\\');
24623 SetCharAt(dststr, index + 1, SpecialCharacter(ch));
24624 index += 2;
24625 } else if (IsAsciiNonprintable(ch)) {
24626 SetCharAt(dststr, index, '\\');
24627 SetCharAt(dststr, index + 1, 'x');
24628 SetCharAt(dststr, index + 2, GetHexCharacter(ch >> 4));
24629 SetCharAt(dststr, index + 3, GetHexCharacter(ch & 0xF));
24630 index += 4;
24631 } else {
24632 SetCharAt(dststr, index, ch);
24633 index += 1;
24634 }
24635 }
24636 return TwoByteString::raw(dststr);
24637 }
24638 return TwoByteString::New(0, Heap::kNew);
24639}
24640
24641TwoByteStringPtr TwoByteString::New(intptr_t len, Heap::Space space) {
24642 ASSERT(IsolateGroup::Current()->object_store()->two_byte_string_class() !=
24643 nullptr);
24644 if (len < 0 || len > kMaxElements) {
24645 // This should be caught before we reach here.
24646 FATAL("Fatal error in TwoByteString::New: invalid len %" Pd "\n", len);
24647 }
24648 auto s = Object::Allocate<TwoByteString>(space, len);
24649 NoSafepointScope no_safepoint;
24650 s->untag()->set_length(Smi::New(len));
24651#if !defined(HASH_IN_OBJECT_HEADER)
24652 s->untag()->set_hash(Smi::New(0));
24653#endif
24654 intptr_t size = TwoByteString::UnroundedSize(s);
24655 ASSERT(size <= s->untag()->HeapSize());
24656 memset(reinterpret_cast<void*>(UntaggedObject::ToAddr(s) + size), 0,
24657 s->untag()->HeapSize() - size);
24658 return s;
24659}
24660
24661TwoByteStringPtr TwoByteString::New(const uint16_t* utf16_array,
24662 intptr_t array_len,
24663 Heap::Space space) {
24664 ASSERT(array_len > 0);
24665 const String& result = String::Handle(TwoByteString::New(array_len, space));
24666 {
24667 NoSafepointScope no_safepoint;
24668 memmove(reinterpret_cast<void*>(DataStart(result)),
24669 reinterpret_cast<const void*>(utf16_array), (array_len * 2));
24670 }
24671 return TwoByteString::raw(result);
24672}
24673
24674TwoByteStringPtr TwoByteString::New(intptr_t utf16_len,
24675 const int32_t* utf32_array,
24676 intptr_t array_len,
24677 Heap::Space space) {
24678 ASSERT((array_len > 0) && (utf16_len >= array_len));
24679 const String& result = String::Handle(TwoByteString::New(utf16_len, space));
24680 {
24681 NoSafepointScope no_safepoint;
24682 intptr_t j = 0;
24683 for (intptr_t i = 0; i < array_len; ++i) {
24684 if (Utf::IsSupplementary(utf32_array[i])) {
24685 ASSERT(j < (utf16_len - 1));
24686 Utf16::Encode(utf32_array[i], CharAddr(result, j));
24687 j += 2;
24688 } else {
24689 ASSERT(j < utf16_len);
24690 *CharAddr(result, j) = utf32_array[i];
24691 j += 1;
24692 }
24693 }
24694 }
24695 return TwoByteString::raw(result);
24696}
24697
24698TwoByteStringPtr TwoByteString::New(const String& str, Heap::Space space) {
24699 intptr_t len = str.Length();
24700 const String& result = String::Handle(TwoByteString::New(len, space));
24701 String::Copy(result, 0, str, 0, len);
24702 return TwoByteString::raw(result);
24703}
24704
24705TwoByteStringPtr TwoByteString::New(const TypedDataBase& other_typed_data,
24706 intptr_t other_start_index,
24707 intptr_t other_len,
24708 Heap::Space space) {
24709 const String& result = String::Handle(TwoByteString::New(other_len, space));
24710 if (other_len > 0) {
24711 NoSafepointScope no_safepoint;
24712 memmove(TwoByteString::DataStart(result),
24713 other_typed_data.DataAddr(other_start_index),
24714 other_len * sizeof(uint16_t));
24715 }
24716 return TwoByteString::raw(result);
24717}
24718
24719TwoByteStringPtr TwoByteString::Concat(const String& str1,
24720 const String& str2,
24721 Heap::Space space) {
24722 intptr_t len1 = str1.Length();
24723 intptr_t len2 = str2.Length();
24724 intptr_t len = len1 + len2;
24725 const String& result = String::Handle(TwoByteString::New(len, space));
24726 String::Copy(result, 0, str1, 0, len1);
24727 String::Copy(result, len1, str2, 0, len2);
24728 return TwoByteString::raw(result);
24729}
24730
24731TwoByteStringPtr TwoByteString::ConcatAll(const Array& strings,
24732 intptr_t start,
24733 intptr_t end,
24734 intptr_t len,
24735 Heap::Space space) {
24736 ASSERT(!strings.IsNull());
24737 ASSERT(start >= 0);
24738 ASSERT(end <= strings.Length());
24739 const String& result = String::Handle(TwoByteString::New(len, space));
24740 String& str = String::Handle();
24741 intptr_t pos = 0;
24742 for (intptr_t i = start; i < end; i++) {
24743 str ^= strings.At(i);
24744 const intptr_t str_len = str.Length();
24745 String::Copy(result, pos, str, 0, str_len);
24746 ASSERT((kMaxElements - pos) >= str_len);
24747 pos += str_len;
24748 }
24749 return TwoByteString::raw(result);
24750}
24751
24752TwoByteStringPtr TwoByteString::Transform(int32_t (*mapping)(int32_t ch),
24753 const String& str,
24754 Heap::Space space) {
24755 ASSERT(!str.IsNull());
24756 intptr_t len = str.Length();
24757 const String& result = String::Handle(TwoByteString::New(len, space));
24759 intptr_t i = 0;
24760 NoSafepointScope no_safepoint;
24761 while (it.Next()) {
24762 int32_t src = it.Current();
24763 int32_t dst = mapping(src);
24764 ASSERT(dst >= 0 && dst <= 0x10FFFF);
24765 intptr_t len = Utf16::Length(dst);
24766 if (len == 1) {
24767 *CharAddr(result, i) = dst;
24768 } else {
24769 ASSERT(len == 2);
24770 Utf16::Encode(dst, CharAddr(result, i));
24771 }
24772 i += len;
24773 }
24774 return TwoByteString::raw(result);
24775}
24776
24777const char* Bool::ToCString() const {
24778 return value() ? "true" : "false";
24779}
24780
24781bool Array::CanonicalizeEquals(const Instance& other) const {
24782 if (this->ptr() == other.ptr()) {
24783 // Both handles point to the same raw instance.
24784 return true;
24785 }
24786
24787 // An Array may be compared to an ImmutableArray.
24788 if (!other.IsArray() || other.IsNull()) {
24789 return false;
24790 }
24791
24792 // First check if both arrays have the same length and elements.
24793 const Array& other_arr = Array::Cast(other);
24794
24795 intptr_t len = this->Length();
24796 if (len != other_arr.Length()) {
24797 return false;
24798 }
24799
24800 for (intptr_t i = 0; i < len; i++) {
24801 if (this->At(i) != other_arr.At(i)) {
24802 return false;
24803 }
24804 }
24805
24806 // Now check if both arrays have the same type arguments.
24807 if (GetTypeArguments() == other.GetTypeArguments()) {
24808 return true;
24809 }
24811 const TypeArguments& other_type_args =
24813 if (!type_args.Equals(other_type_args)) {
24814 return false;
24815 }
24816 return true;
24817}
24818
24819uint32_t Array::CanonicalizeHash() const {
24820 intptr_t len = Length();
24821 if (len == 0) {
24822 return 1;
24823 }
24824 Thread* thread = Thread::Current();
24825 uint32_t hash = thread->heap()->GetCanonicalHash(ptr());
24826 if (hash != 0) {
24827 return hash;
24828 }
24829 hash = len;
24832 for (intptr_t i = 0; i < len; i++) {
24833 member ^= At(i);
24835 }
24837 thread->heap()->SetCanonicalHash(ptr(), hash);
24838 return hash;
24839}
24840
24841ArrayPtr Array::New(intptr_t len,
24842 const AbstractType& element_type,
24843 Heap::Space space) {
24844 const Array& result = Array::Handle(Array::New(len, space));
24845 if (!element_type.IsDynamicType()) {
24847 type_args.SetTypeAt(0, element_type);
24848 type_args = type_args.Canonicalize(Thread::Current());
24849 result.SetTypeArguments(type_args);
24850 }
24851 return result.ptr();
24852}
24853
24854ArrayPtr Array::NewUninitialized(intptr_t class_id,
24855 intptr_t len,
24856 Heap::Space space) {
24857 if (!IsValidLength(len)) {
24858 // This should be caught before we reach here.
24859 FATAL("Fatal error in Array::New: invalid len %" Pd "\n", len);
24860 }
24861 auto raw = Object::AllocateVariant<Array>(class_id, space, len);
24862 NoSafepointScope no_safepoint;
24863 raw->untag()->set_length(Smi::New(len));
24864 if (UseCardMarkingForAllocation(len)) {
24865 ASSERT(raw->IsOldObject());
24866 raw->untag()->SetCardRememberedBitUnsynchronized();
24867 }
24868 return raw;
24869}
24870
24871ArrayPtr Array::New(intptr_t class_id, intptr_t len, Heap::Space space) {
24872 if (!UseCardMarkingForAllocation(len)) {
24873 return NewUninitialized(class_id, len, space);
24874 }
24875
24876 Thread* thread = Thread::Current();
24877 Array& result =
24878 Array::Handle(thread->zone(), NewUninitialized(class_id, len, space));
24879 result.SetTypeArguments(Object::null_type_arguments());
24880 for (intptr_t i = 0; i < len; i++) {
24881 result.SetAt(i, Object::null_object(), thread);
24882 if (((i + 1) % KB) == 0) {
24883 thread->CheckForSafepoint();
24884 }
24885 }
24886 return result.ptr();
24887}
24888
24889ArrayPtr Array::Slice(intptr_t start,
24890 intptr_t count,
24891 bool with_type_argument) const {
24892 Thread* thread = Thread::Current();
24893 Zone* zone = thread->zone();
24894 const Array& dest = Array::Handle(zone, Array::NewUninitialized(count));
24895 if (with_type_argument) {
24896 dest.SetTypeArguments(TypeArguments::Handle(zone, GetTypeArguments()));
24897 } else {
24898 dest.SetTypeArguments(Object::null_type_arguments());
24899 }
24900 if (!UseCardMarkingForAllocation(count)) {
24901 NoSafepointScope no_safepoint(thread);
24902 for (int i = 0; i < count; i++) {
24903 dest.untag()->set_element(i, untag()->element(i + start), thread);
24904 }
24905 } else {
24906 for (int i = 0; i < count; i++) {
24907 dest.untag()->set_element(i, untag()->element(i + start), thread);
24908 if (((i + 1) % KB) == 0) {
24909 thread->CheckForSafepoint();
24910 }
24911 }
24912 }
24913 return dest.ptr();
24914}
24915
24917 if (IsImmutable()) return;
24918 ASSERT(!IsCanonical());
24919 untag()->SetClassId(kImmutableArrayCid);
24920}
24921
24922const char* Array::ToCString() const {
24923 if (IsNull()) {
24924 return IsImmutable() ? "_ImmutableList nullptr" : "_List nullptr";
24925 }
24926 Zone* zone = Thread::Current()->zone();
24927 const char* format =
24928 IsImmutable() ? "_ImmutableList len:%" Pd : "_List len:%" Pd;
24929 return zone->PrintToString(format, Length());
24930}
24931
24932ArrayPtr Array::Grow(const Array& source,
24933 intptr_t new_length,
24934 Heap::Space space) {
24935 Thread* thread = Thread::Current();
24936 Zone* zone = thread->zone();
24937 const Array& result =
24938 Array::Handle(zone, Array::NewUninitialized(new_length, space));
24939 intptr_t old_length = 0;
24940 if (!source.IsNull()) {
24941 old_length = source.Length();
24942 result.SetTypeArguments(
24943 TypeArguments::Handle(zone, source.GetTypeArguments()));
24944 } else {
24945 result.SetTypeArguments(Object::null_type_arguments());
24946 }
24947 ASSERT(new_length > old_length); // Unnecessary copying of array.
24948 if (!UseCardMarkingForAllocation(new_length)) {
24949 NoSafepointScope no_safepoint(thread);
24950 for (intptr_t i = 0; i < old_length; i++) {
24951 result.untag()->set_element(i, source.untag()->element(i), thread);
24952 }
24953 for (intptr_t i = old_length; i < new_length; i++) {
24954 ASSERT(result.untag()->element(i) == Object::null());
24955 }
24956 } else {
24957 for (intptr_t i = 0; i < old_length; i++) {
24958 result.untag()->set_element(i, source.untag()->element(i), thread);
24959 if (((i + 1) % KB) == 0) {
24960 thread->CheckForSafepoint();
24961 }
24962 }
24963 for (intptr_t i = old_length; i < new_length; i++) {
24964 result.untag()->set_element(i, Object::null(), thread);
24965 if (((i + 1) % KB) == 0) {
24966 thread->CheckForSafepoint();
24967 }
24968 }
24969 }
24970 return result.ptr();
24971}
24972
24973void Array::Truncate(intptr_t new_len) const {
24974 if (IsNull()) {
24975 return;
24976 }
24977 Thread* thread = Thread::Current();
24978 Zone* zone = thread->zone();
24979 const Array& array = Array::Handle(zone, this->ptr());
24980
24981 intptr_t old_len = array.Length();
24982 ASSERT(new_len <= old_len);
24983 if (old_len == new_len) {
24984 return;
24985 }
24986 intptr_t old_size = Array::InstanceSize(old_len);
24987 intptr_t new_size = Array::InstanceSize(new_len);
24988
24989 NoSafepointScope no_safepoint;
24990
24991 // If there is any left over space fill it with either an Array object or
24992 // just a plain object (depending on the amount of left over space) so
24993 // that it can be traversed over successfully during garbage collection.
24994 Object::MakeUnusedSpaceTraversable(array, old_size, new_size);
24995
24996 // Update the size in the header field and length of the array object.
24997 // These release operations are balanced by acquire operations in the
24998 // concurrent sweeper.
24999 uword old_tags = array.untag()->tags_;
25000 uword new_tags;
25001 ASSERT(kArrayCid == UntaggedObject::ClassIdTag::decode(old_tags));
25002 do {
25003 new_tags = UntaggedObject::SizeTag::update(new_size, old_tags);
25004 } while (!array.untag()->tags_.compare_exchange_weak(
25005 old_tags, new_tags, std::memory_order_release));
25006
25007 // Between the CAS of the header above and the SetLength below, the array is
25008 // temporarily in an inconsistent state. The header is considered the
25009 // overriding source of object size by UntaggedObject::HeapSize, but the
25010 // ASSERTs in UntaggedObject::HeapSizeFromClass must handle this special case.
25011 array.SetLengthRelease(new_len);
25012}
25013
25014ArrayPtr Array::MakeFixedLength(const GrowableObjectArray& growable_array,
25015 bool unique) {
25016 ASSERT(!growable_array.IsNull());
25017 Thread* thread = Thread::Current();
25018 Zone* zone = thread->zone();
25019 intptr_t used_len = growable_array.Length();
25020 // Get the type arguments and prepare to copy them.
25021 const TypeArguments& type_arguments =
25022 TypeArguments::Handle(growable_array.GetTypeArguments());
25023 if (used_len == 0) {
25024 if (type_arguments.IsNull() && !unique) {
25025 // This is a raw List (as in no type arguments), so we can return the
25026 // simple empty array.
25027 return Object::empty_array().ptr();
25028 }
25029
25030 // The backing array may be a shared instance, or may not have correct
25031 // type parameters. Create a new empty array.
25033 Array& array = Array::Handle(zone, Array::New(0, space));
25034 array.SetTypeArguments(type_arguments);
25035 return array.ptr();
25036 }
25037 const Array& array = Array::Handle(zone, growable_array.data());
25038 ASSERT(array.IsArray());
25039 array.SetTypeArguments(type_arguments);
25040
25041 // Null the GrowableObjectArray, we are removing its backing array.
25042 growable_array.SetLength(0);
25043 growable_array.SetData(Object::empty_array());
25044
25045 // Truncate the old backing array and return it.
25046 array.Truncate(used_len);
25047 return array.ptr();
25048}
25049
25052 intptr_t len = Length();
25053 if (len > 0) {
25054 Zone* zone = thread->zone();
25055 Instance& obj = Instance::Handle(zone);
25056 for (intptr_t i = 0; i < len; i++) {
25057 obj ^= At(i);
25058 obj = obj.CanonicalizeLocked(thread);
25059 this->SetAt(i, obj);
25060 }
25061 }
25062}
25063
25064ImmutableArrayPtr ImmutableArray::New(intptr_t len, Heap::Space space) {
25065 ASSERT(IsolateGroup::Current()->object_store()->immutable_array_class() !=
25066 Class::null());
25067 return static_cast<ImmutableArrayPtr>(Array::New(kClassId, len, space));
25068}
25069
25070void GrowableObjectArray::Add(const Object& value, Heap::Space space) const {
25071 ASSERT(!IsNull());
25072 if (Length() == Capacity()) {
25073 // Grow from 0 to 3, and then double + 1.
25074 intptr_t new_capacity = (Capacity() * 2) | 3;
25075 if (new_capacity <= Capacity()) {
25077 UNREACHABLE();
25078 }
25079 Grow(new_capacity, space);
25080 }
25081 ASSERT(Length() < Capacity());
25082 intptr_t index = Length();
25083 SetLength(index + 1);
25084 SetAt(index, value);
25085}
25086
25087void GrowableObjectArray::Grow(intptr_t new_capacity, Heap::Space space) const {
25088 ASSERT(new_capacity > Capacity());
25089 const Array& contents = Array::Handle(data());
25090 const Array& new_contents =
25091 Array::Handle(Array::Grow(contents, new_capacity, space));
25092 untag()->set_data(new_contents.ptr());
25093}
25094
25096 ASSERT(!IsNull());
25097 ASSERT(Length() > 0);
25098 intptr_t index = Length() - 1;
25099 const Array& contents = Array::Handle(data());
25100 const PassiveObject& obj = PassiveObject::Handle(contents.At(index));
25101 contents.SetAt(index, Object::null_object());
25102 SetLength(index);
25103 return obj.ptr();
25104}
25105
25106GrowableObjectArrayPtr GrowableObjectArray::New(intptr_t capacity,
25107 Heap::Space space) {
25108 ArrayPtr raw_data = (capacity == 0) ? Object::empty_array().ptr()
25109 : Array::New(capacity, space);
25110 const Array& data = Array::Handle(raw_data);
25111 return New(data, space);
25112}
25113
25114GrowableObjectArrayPtr GrowableObjectArray::New(const Array& array,
25115 Heap::Space space) {
25116 ASSERT(
25117 IsolateGroup::Current()->object_store()->growable_object_array_class() !=
25118 Class::null());
25119 const auto& result =
25120 GrowableObjectArray::Handle(Object::Allocate<GrowableObjectArray>(space));
25121 result.SetLength(0);
25122 result.SetData(array);
25123 return result.ptr();
25124}
25125
25126const char* GrowableObjectArray::ToCString() const {
25127 if (IsNull()) {
25128 return "_GrowableList: null";
25129 }
25130 return OS::SCreate(Thread::Current()->zone(),
25131 "Instance(length:%" Pd ") of '_GrowableList'", Length());
25132}
25133
25134// Equivalent to Dart's operator "==" and hashCode.
25136 public:
25137 static const char* Name() { return "DefaultHashTraits"; }
25138 static bool ReportStats() { return false; }
25139
25140 static bool IsMatch(const Object& a, const Object& b) {
25141 if (a.IsNull() || b.IsNull()) {
25142 return (a.IsNull() && b.IsNull());
25143 } else {
25144 return Instance::Cast(a).OperatorEquals(Instance::Cast(b));
25145 }
25146 }
25147 static uword Hash(const Object& obj) {
25148 if (obj.IsNull()) {
25149 return 0;
25150 }
25151 // TODO(koda): Ensure VM classes only produce Smi hash codes, and remove
25152 // non-Smi cases once Dart-side implementation is complete.
25153 Thread* thread = Thread::Current();
25155 Instance& hash_code = thread->InstanceHandle();
25156 hash_code ^= Instance::Cast(obj).HashCode();
25157 if (hash_code.IsSmi()) {
25158 // May waste some bits on 64-bit, to ensure consistency with non-Smi case.
25159 return static_cast<uword>(Smi::Cast(hash_code).AsTruncatedUint32Value());
25160 } else if (hash_code.IsInteger()) {
25161 return static_cast<uword>(
25162 Integer::Cast(hash_code).AsTruncatedUint32Value());
25163 } else {
25164 return 0;
25165 }
25166 }
25167};
25168
25169MapPtr Map::NewDefault(intptr_t class_id, Heap::Space space) {
25170 const Array& data = Array::Handle(Array::New(kInitialIndexSize, space));
25171 const TypedData& index = TypedData::Handle(
25172 TypedData::New(kTypedDataUint32ArrayCid, kInitialIndexSize, space));
25173 // On 32-bit, the top bits are wasted to avoid Mint allocation.
25174 const intptr_t kAvailableBits = (kSmiBits >= 32) ? 32 : kSmiBits;
25175 const intptr_t kInitialHashMask =
25176 (1 << (kAvailableBits - kInitialIndexBits)) - 1;
25177 return Map::New(class_id, data, index, kInitialHashMask, 0, 0, space);
25178}
25179
25180MapPtr Map::New(intptr_t class_id,
25181 const Array& data,
25182 const TypedData& index,
25183 intptr_t hash_mask,
25184 intptr_t used_data,
25185 intptr_t deleted_keys,
25186 Heap::Space space) {
25187 ASSERT(class_id == kMapCid || class_id == kConstMapCid);
25188 ASSERT(IsolateGroup::Current()->object_store()->map_impl_class() !=
25189 Class::null());
25190 Map& result = Map::Handle(Map::NewUninitialized(class_id, space));
25191 result.set_data(data);
25192 result.set_index(index);
25193 result.set_hash_mask(hash_mask);
25194 result.set_used_data(used_data);
25195 result.set_deleted_keys(deleted_keys);
25196 return result.ptr();
25197}
25198
25199MapPtr Map::NewUninitialized(intptr_t class_id, Heap::Space space) {
25200 ASSERT(IsolateGroup::Current()->object_store()->map_impl_class() !=
25201 Class::null());
25202 return Object::AllocateVariant<Map>(class_id, space);
25203}
25204
25205const char* Map::ToCString() const {
25206 Zone* zone = Thread::Current()->zone();
25207 return zone->PrintToString(
25208 "%s len:%" Pd, GetClassId() == kConstMapCid ? "_ConstMap" : "_Map",
25209 Length());
25210}
25211
25214 ASSERT_EQUAL(Smi::Value(deleted_keys()), 0);
25215 Thread* const thread = Thread::Current();
25216 Zone* const zone = thread->zone();
25217
25218 const auto& data_array = Array::Handle(zone, data());
25219 const intptr_t data_length = Utils::RoundUpToPowerOfTwo(data_array.Length());
25220 const intptr_t index_size_mult = IsMap() ? 1 : 2;
25221 const intptr_t index_size = Utils::Maximum(LinkedHashBase::kInitialIndexSize,
25222 data_length * index_size_mult);
25223 ASSERT(Utils::IsPowerOfTwo(index_size));
25224
25225 const intptr_t hash_mask = IndexSizeToHashMask(index_size);
25226 set_hash_mask(hash_mask);
25227}
25228
25231
25232 if (this->ptr() == other.ptr()) {
25233 // Both handles point to the same raw instance.
25234 return true;
25235 }
25236 if (other.IsNull()) {
25237 return false;
25238 }
25239 if (GetClassId() != other.GetClassId()) {
25240 return false;
25241 }
25242
25243 Zone* zone = Thread::Current()->zone();
25244
25245 const LinkedHashBase& other_map = LinkedHashBase::Cast(other);
25246
25247 if (!Smi::Handle(zone, used_data())
25248 .Equals(Smi::Handle(zone, other_map.used_data()))) {
25249 return false;
25250 }
25251
25252 // Immutable maps and sets do not have deleted keys.
25253 ASSERT_EQUAL(RawSmiValue(deleted_keys()), 0);
25254
25255 if (!Array::Handle(zone, data())
25256 .CanonicalizeEquals(Array::Handle(zone, other_map.data()))) {
25257 return false;
25258 }
25259
25260 if (GetTypeArguments() == other.GetTypeArguments()) {
25261 return true;
25262 }
25263 const TypeArguments& type_args =
25265 const TypeArguments& other_type_args =
25267 return type_args.Equals(other_type_args);
25268}
25269
25272
25273 Thread* thread = Thread::Current();
25274 uint32_t hash = thread->heap()->GetCanonicalHash(ptr());
25275 if (hash != 0) {
25276 return hash;
25277 }
25278
25279 // Immutable maps and sets do not have deleted keys.
25280 ASSERT_EQUAL(RawSmiValue(deleted_keys()), 0);
25281
25282 Zone* zone = thread->zone();
25283 auto& member = Instance::Handle(zone, GetTypeArguments());
25284 hash = member.CanonicalizeHash();
25285 member = data();
25286 hash = CombineHashes(hash, member.CanonicalizeHash());
25287 member = used_data();
25288 hash = CombineHashes(hash, member.CanonicalizeHash());
25290 thread->heap()->SetCanonicalHash(ptr(), hash);
25291 return hash;
25292}
25293
25296
25297 Zone* zone = thread->zone();
25298
25300 if (!type_args.IsNull()) {
25301 type_args = type_args.Canonicalize(thread);
25302 SetTypeArguments(type_args);
25303 }
25304
25305 auto& data_array = Array::Handle(zone, data());
25306 data_array.MakeImmutable();
25307 data_array ^= data_array.CanonicalizeLocked(thread);
25308 set_data(data_array);
25309
25310 // Ignoring index. It will be initially null, created on first use, and
25311 // possibly non-null here if we are rehashing.
25312}
25313
25315 ASSERT(IsolateGroup::Current()->object_store()->const_map_impl_class() !=
25316 Class::null());
25317 return static_cast<ConstMapPtr>(Map::NewDefault(kClassId, space));
25318}
25319
25321 ASSERT(IsolateGroup::Current()->object_store()->const_map_impl_class() !=
25322 Class::null());
25323 return static_cast<ConstMapPtr>(Map::NewUninitialized(kClassId, space));
25324}
25325
25326SetPtr Set::New(intptr_t class_id,
25327 const Array& data,
25328 const TypedData& index,
25329 intptr_t hash_mask,
25330 intptr_t used_data,
25331 intptr_t deleted_keys,
25332 Heap::Space space) {
25333 ASSERT(class_id == kSetCid || class_id == kConstSetCid);
25334 ASSERT(IsolateGroup::Current()->object_store()->set_impl_class() !=
25335 Class::null());
25336 Set& result = Set::Handle(Set::NewUninitialized(class_id, space));
25337 result.set_data(data);
25338 result.set_index(index);
25339 result.set_hash_mask(hash_mask);
25340 result.set_used_data(used_data);
25341 result.set_deleted_keys(deleted_keys);
25342 return result.ptr();
25343}
25344
25345SetPtr Set::NewDefault(intptr_t class_id, Heap::Space space) {
25346 const Array& data = Array::Handle(Array::New(kInitialIndexSize, space));
25347 const TypedData& index = TypedData::Handle(
25348 TypedData::New(kTypedDataUint32ArrayCid, kInitialIndexSize, space));
25349 // On 32-bit, the top bits are wasted to avoid Mint allocation.
25350 const intptr_t kAvailableBits = (kSmiBits >= 32) ? 32 : kSmiBits;
25351 const intptr_t kInitialHashMask =
25352 (1 << (kAvailableBits - kInitialIndexBits)) - 1;
25353 return Set::New(class_id, data, index, kInitialHashMask, 0, 0, space);
25354}
25355
25356SetPtr Set::NewUninitialized(intptr_t class_id, Heap::Space space) {
25357 ASSERT(IsolateGroup::Current()->object_store()->set_impl_class() !=
25358 Class::null());
25359 return Object::AllocateVariant<Set>(class_id, space);
25360}
25361
25363 ASSERT(IsolateGroup::Current()->object_store()->const_set_impl_class() !=
25364 Class::null());
25365 return static_cast<ConstSetPtr>(Set::NewDefault(kClassId, space));
25366}
25367
25369 ASSERT(IsolateGroup::Current()->object_store()->const_set_impl_class() !=
25370 Class::null());
25371 return static_cast<ConstSetPtr>(Set::NewUninitialized(kClassId, space));
25372}
25373
25374const char* Set::ToCString() const {
25375 Zone* zone = Thread::Current()->zone();
25376 return zone->PrintToString(
25377 "%s len:%" Pd, GetClassId() == kConstSetCid ? "_ConstSet" : "_Set",
25378 Length());
25379}
25380
25381const char* FutureOr::ToCString() const {
25382 // FutureOr is an abstract class.
25383 UNREACHABLE();
25384}
25385
25386Float32x4Ptr Float32x4::New(float v0,
25387 float v1,
25388 float v2,
25389 float v3,
25390 Heap::Space space) {
25391 ASSERT(IsolateGroup::Current()->object_store()->float32x4_class() !=
25392 Class::null());
25393 const auto& result = Float32x4::Handle(Object::Allocate<Float32x4>(space));
25394 result.set_x(v0);
25395 result.set_y(v1);
25396 result.set_z(v2);
25397 result.set_w(v3);
25398 return result.ptr();
25399}
25400
25401Float32x4Ptr Float32x4::New(simd128_value_t value, Heap::Space space) {
25402 ASSERT(IsolateGroup::Current()->object_store()->float32x4_class() !=
25403 Class::null());
25404 const auto& result = Float32x4::Handle(Object::Allocate<Float32x4>(space));
25405 result.set_value(value);
25406 return result.ptr();
25407}
25408
25410 return LoadUnaligned(
25411 reinterpret_cast<const simd128_value_t*>(&untag()->value_));
25412}
25413
25415 StoreUnaligned(reinterpret_cast<simd128_value_t*>(&ptr()->untag()->value_),
25416 value);
25417}
25418
25419void Float32x4::set_x(float value) const {
25420 StoreNonPointer(&untag()->value_[0], value);
25421}
25422
25423void Float32x4::set_y(float value) const {
25424 StoreNonPointer(&untag()->value_[1], value);
25425}
25426
25427void Float32x4::set_z(float value) const {
25428 StoreNonPointer(&untag()->value_[2], value);
25429}
25430
25431void Float32x4::set_w(float value) const {
25432 StoreNonPointer(&untag()->value_[3], value);
25433}
25434
25435float Float32x4::x() const {
25436 return untag()->value_[0];
25437}
25438
25439float Float32x4::y() const {
25440 return untag()->value_[1];
25441}
25442
25443float Float32x4::z() const {
25444 return untag()->value_[2];
25445}
25446
25447float Float32x4::w() const {
25448 return untag()->value_[3];
25449}
25450
25451bool Float32x4::CanonicalizeEquals(const Instance& other) const {
25452 return memcmp(&untag()->value_, Float32x4::Cast(other).untag()->value_,
25453 sizeof(simd128_value_t)) == 0;
25454}
25455
25457 return HashBytes(reinterpret_cast<const uint8_t*>(&untag()->value_),
25458 sizeof(simd128_value_t));
25459}
25460
25461const char* Float32x4::ToCString() const {
25462 float _x = x();
25463 float _y = y();
25464 float _z = z();
25465 float _w = w();
25466 return OS::SCreate(Thread::Current()->zone(), "[%f, %f, %f, %f]", _x, _y, _z,
25467 _w);
25468}
25469
25470Int32x4Ptr Int32x4::New(int32_t v0,
25471 int32_t v1,
25472 int32_t v2,
25473 int32_t v3,
25474 Heap::Space space) {
25475 ASSERT(IsolateGroup::Current()->object_store()->int32x4_class() !=
25476 Class::null());
25477 const auto& result = Int32x4::Handle(Object::Allocate<Int32x4>(space));
25478 result.set_x(v0);
25479 result.set_y(v1);
25480 result.set_z(v2);
25481 result.set_w(v3);
25482 return result.ptr();
25483}
25484
25485Int32x4Ptr Int32x4::New(simd128_value_t value, Heap::Space space) {
25486 ASSERT(IsolateGroup::Current()->object_store()->int32x4_class() !=
25487 Class::null());
25488 const auto& result = Int32x4::Handle(Object::Allocate<Int32x4>(space));
25489 result.set_value(value);
25490 return result.ptr();
25491}
25492
25493void Int32x4::set_x(int32_t value) const {
25494 StoreNonPointer(&untag()->value_[0], value);
25495}
25496
25497void Int32x4::set_y(int32_t value) const {
25498 StoreNonPointer(&untag()->value_[1], value);
25499}
25500
25501void Int32x4::set_z(int32_t value) const {
25502 StoreNonPointer(&untag()->value_[2], value);
25503}
25504
25505void Int32x4::set_w(int32_t value) const {
25506 StoreNonPointer(&untag()->value_[3], value);
25507}
25508
25509int32_t Int32x4::x() const {
25510 return untag()->value_[0];
25511}
25512
25513int32_t Int32x4::y() const {
25514 return untag()->value_[1];
25515}
25516
25517int32_t Int32x4::z() const {
25518 return untag()->value_[2];
25519}
25520
25521int32_t Int32x4::w() const {
25522 return untag()->value_[3];
25523}
25524
25526 return LoadUnaligned(
25527 reinterpret_cast<const simd128_value_t*>(&untag()->value_));
25528}
25529
25531 StoreUnaligned(reinterpret_cast<simd128_value_t*>(&ptr()->untag()->value_),
25532 value);
25533}
25534
25535bool Int32x4::CanonicalizeEquals(const Instance& other) const {
25536 return memcmp(&untag()->value_, Int32x4::Cast(other).untag()->value_,
25537 sizeof(simd128_value_t)) == 0;
25538}
25539
25541 return HashBytes(reinterpret_cast<const uint8_t*>(&untag()->value_),
25542 sizeof(simd128_value_t));
25543}
25544
25545const char* Int32x4::ToCString() const {
25546 int32_t _x = x();
25547 int32_t _y = y();
25548 int32_t _z = z();
25549 int32_t _w = w();
25550 return OS::SCreate(Thread::Current()->zone(), "[%08x, %08x, %08x, %08x]", _x,
25551 _y, _z, _w);
25552}
25553
25554Float64x2Ptr Float64x2::New(double value0, double value1, Heap::Space space) {
25555 ASSERT(IsolateGroup::Current()->object_store()->float64x2_class() !=
25556 Class::null());
25557 const auto& result = Float64x2::Handle(Object::Allocate<Float64x2>(space));
25558 result.set_x(value0);
25559 result.set_y(value1);
25560 return result.ptr();
25561}
25562
25563Float64x2Ptr Float64x2::New(simd128_value_t value, Heap::Space space) {
25564 ASSERT(IsolateGroup::Current()->object_store()->float64x2_class() !=
25565 Class::null());
25566 const auto& result = Float64x2::Handle(Object::Allocate<Float64x2>(space));
25567 result.set_value(value);
25568 return result.ptr();
25569}
25570
25571double Float64x2::x() const {
25572 return untag()->value_[0];
25573}
25574
25575double Float64x2::y() const {
25576 return untag()->value_[1];
25577}
25578
25579void Float64x2::set_x(double x) const {
25580 StoreNonPointer(&untag()->value_[0], x);
25581}
25582
25583void Float64x2::set_y(double y) const {
25584 StoreNonPointer(&untag()->value_[1], y);
25585}
25586
25588 return simd128_value_t().readFrom(&untag()->value_[0]);
25589}
25590
25592 StoreSimd128(&untag()->value_[0], value);
25593}
25594
25595bool Float64x2::CanonicalizeEquals(const Instance& other) const {
25596 return memcmp(&untag()->value_, Float64x2::Cast(other).untag()->value_,
25597 sizeof(simd128_value_t)) == 0;
25598}
25599
25601 return HashBytes(reinterpret_cast<const uint8_t*>(&untag()->value_),
25602 sizeof(simd128_value_t));
25603}
25604
25605const char* Float64x2::ToCString() const {
25606 double _x = x();
25607 double _y = y();
25608 return OS::SCreate(Thread::Current()->zone(), "[%f, %f]", _x, _y);
25609}
25610
25611const intptr_t
25612 TypedDataBase::element_size_table[TypedDataBase::kNumElementSizes] = {
25613 1, // kTypedDataInt8ArrayCid.
25614 1, // kTypedDataUint8ArrayCid.
25615 1, // kTypedDataUint8ClampedArrayCid.
25616 2, // kTypedDataInt16ArrayCid.
25617 2, // kTypedDataUint16ArrayCid.
25618 4, // kTypedDataInt32ArrayCid.
25619 4, // kTypedDataUint32ArrayCid.
25620 8, // kTypedDataInt64ArrayCid.
25621 8, // kTypedDataUint64ArrayCid.
25622 4, // kTypedDataFloat32ArrayCid.
25623 8, // kTypedDataFloat64ArrayCid.
25624 16, // kTypedDataFloat32x4ArrayCid.
25625 16, // kTypedDataInt32x4ArrayCid.
25626 16, // kTypedDataFloat64x2ArrayCid,
25627 };
25628
25629bool TypedData::CanonicalizeEquals(const Instance& other) const {
25630 if (this->ptr() == other.ptr()) {
25631 // Both handles point to the same raw instance.
25632 return true;
25633 }
25634
25635 if (!other.IsTypedData() || other.IsNull()) {
25636 return false;
25637 }
25638
25639 const TypedData& other_typed_data = TypedData::Cast(other);
25640
25641 if (this->ElementType() != other_typed_data.ElementType()) {
25642 return false;
25643 }
25644
25645 const intptr_t len = this->LengthInBytes();
25646 if (len != other_typed_data.LengthInBytes()) {
25647 return false;
25648 }
25649 NoSafepointScope no_safepoint;
25650 return (len == 0) ||
25651 (memcmp(DataAddr(0), other_typed_data.DataAddr(0), len) == 0);
25652}
25653
25655 const intptr_t len = this->LengthInBytes();
25656 if (len == 0) {
25657 return 1;
25658 }
25659 uint32_t hash = len;
25660 for (intptr_t i = 0; i < len; i++) {
25661 hash = CombineHashes(len, GetUint8(i));
25662 }
25663 return FinalizeHash(hash, kHashBits);
25664}
25665
25666TypedDataPtr TypedData::New(intptr_t class_id,
25667 intptr_t len,
25668 Heap::Space space) {
25669 if (len < 0 || len > TypedData::MaxElements(class_id)) {
25670 FATAL("Fatal error in TypedData::New: invalid len %" Pd "\n", len);
25671 }
25672 auto raw = Object::AllocateVariant<TypedData>(
25673 class_id, space, len * ElementSizeInBytes(class_id));
25674 NoSafepointScope no_safepoint;
25675 raw->untag()->set_length(Smi::New(len));
25676 raw->untag()->RecomputeDataField();
25677 return raw;
25678}
25679
25680TypedDataPtr TypedData::Grow(const TypedData& current,
25681 intptr_t len,
25682 Heap::Space space) {
25683 ASSERT(len > current.Length());
25684 const auto& new_td =
25685 TypedData::Handle(TypedData::New(current.GetClassId(), len, space));
25686 {
25687 NoSafepointScope no_safepoint_scope;
25688 memcpy(new_td.DataAddr(0), current.DataAddr(0), current.LengthInBytes());
25689 }
25690 return new_td.ptr();
25691}
25692
25693const char* TypedData::ToCString() const {
25694 const Class& cls = Class::Handle(clazz());
25695 return cls.ScrubbedNameCString();
25696}
25697
25699 void* peer,
25701 intptr_t external_size) const {
25702 return dart::AddFinalizer(*this, peer, callback, external_size);
25703}
25704
25705ExternalTypedDataPtr ExternalTypedData::New(
25706 intptr_t class_id,
25707 uint8_t* data,
25708 intptr_t len,
25709 Heap::Space space,
25710 bool perform_eager_msan_initialization_check) {
25711 if (len < 0 || len > ExternalTypedData::MaxElements(class_id)) {
25712 FATAL("Fatal error in ExternalTypedData::New: invalid len %" Pd "\n", len);
25713 }
25714
25715 if (perform_eager_msan_initialization_check) {
25716 // Once the TypedData is created, Dart might read this memory. Check for
25717 // initialization at construction to make it easier to track the source.
25719 }
25720
25721 const auto& result = ExternalTypedData::Handle(
25722 Object::AllocateVariant<ExternalTypedData>(class_id, space));
25723 result.SetLength(len);
25724 result.SetData(data);
25725 return result.ptr();
25726}
25727
25728ExternalTypedDataPtr ExternalTypedData::NewFinalizeWithFree(uint8_t* data,
25729 intptr_t len) {
25731 kExternalTypedDataUint8ArrayCid, data, len, Heap::kOld));
25732 result.AddFinalizer(
25733 data, [](void* isolate_callback_data, void* data) { free(data); }, len);
25734 return result.ptr();
25735}
25736
25737TypedDataViewPtr TypedDataView::New(intptr_t class_id, Heap::Space space) {
25738 return Object::AllocateVariant<TypedDataView>(class_id, space);
25739}
25740
25741TypedDataViewPtr TypedDataView::New(intptr_t class_id,
25742 const TypedDataBase& typed_data,
25743 intptr_t offset_in_bytes,
25744 intptr_t length,
25745 Heap::Space space) {
25746 auto& result = TypedDataView::Handle(TypedDataView::New(class_id, space));
25747 result.InitializeWith(typed_data, offset_in_bytes, length);
25748 return result.ptr();
25749}
25750
25752 if (IsExternalTypedData()) return true;
25753 if (IsTypedDataView()) {
25754 const auto& backing =
25755 TypedDataBase::Handle(TypedDataView::Cast(*this).typed_data());
25756 return backing.IsExternalTypedData();
25757 }
25758 return false;
25759}
25760
25761TypedDataViewPtr TypedDataBase::ViewFromTo(intptr_t start,
25762 intptr_t end,
25763 Heap::Space space) const {
25764 const intptr_t len = end - start;
25765 ASSERT(0 <= len);
25766 ASSERT(start < Length());
25767 ASSERT((start + len) <= Length());
25768
25769 const intptr_t cid = GetClassId();
25770
25771 if (IsTypedDataView()) {
25772 const auto& view = TypedDataView::Cast(*this);
25773 const auto& td = TypedDataBase::Handle(view.typed_data());
25774 const intptr_t view_offset = Smi::Value(view.offset_in_bytes());
25776 return TypedDataView::New(cid, ExternalTypedData::Cast(td),
25777 view_offset + start, len, Heap::kOld);
25778 } else if (IsExternalTypedData()) {
25781 return TypedDataView::New(cid - 1, *this, start, len, Heap::kOld);
25782 }
25783 RELEASE_ASSERT(IsTypedData());
25786 return TypedDataView::New(cid + 1, *this, start, len, Heap::kOld);
25787}
25788
25789const char* TypedDataBase::ToCString() const {
25790 // There are no instances of UntaggedTypedDataBase.
25791 UNREACHABLE();
25792 return nullptr;
25793}
25794
25795const char* TypedDataView::ToCString() const {
25796 const Class& cls = Class::Handle(clazz());
25797 return cls.ScrubbedNameCString();
25798}
25799
25800const char* ExternalTypedData::ToCString() const {
25801 const Class& cls = Class::Handle(clazz());
25802 return cls.ScrubbedNameCString();
25803}
25804
25805PointerPtr Pointer::New(uword native_address, Heap::Space space) {
25806 Thread* thread = Thread::Current();
25807 Zone* zone = thread->zone();
25808
25809 const auto& type_args = TypeArguments::Handle(
25810 zone, IsolateGroup::Current()->object_store()->type_argument_never());
25811
25812 const Class& cls =
25813 Class::Handle(IsolateGroup::Current()->class_table()->At(kPointerCid));
25815
25816 const auto& result = Pointer::Handle(zone, Object::Allocate<Pointer>(space));
25817 result.SetTypeArguments(type_args);
25818 result.SetNativeAddress(native_address);
25819
25820 return result.ptr();
25821}
25822
25823const char* Pointer::ToCString() const {
25824 return OS::SCreate(Thread::Current()->zone(), "Pointer: address=0x%" Px,
25825 NativeAddress());
25826}
25827
25828DynamicLibraryPtr DynamicLibrary::New(void* handle,
25829 bool canBeClosed,
25830 Heap::Space space) {
25831 const auto& result =
25832 DynamicLibrary::Handle(Object::Allocate<DynamicLibrary>(space));
25833 ASSERT_EQUAL(result.IsClosed(), false);
25834 result.SetHandle(handle);
25835 result.SetCanBeClosed(canBeClosed);
25836 return result.ptr();
25837}
25838
25840 return IsFfiPointerClassId(obj.ptr()->GetClassId());
25841}
25842
25844 return Pointer::IsPointer(*this);
25845}
25846
25847const char* DynamicLibrary::ToCString() const {
25848 return OS::SCreate(Thread::Current()->zone(), "DynamicLibrary: handle=0x%" Px,
25849 reinterpret_cast<uintptr_t>(GetHandle()));
25850}
25851
25852CapabilityPtr Capability::New(uint64_t id, Heap::Space space) {
25853 const auto& result = Capability::Handle(Object::Allocate<Capability>(space));
25854 result.StoreNonPointer(&result.untag()->id_, id);
25855 return result.ptr();
25856}
25857
25858const char* Capability::ToCString() const {
25859 return "Capability";
25860}
25861
25862ReceivePortPtr ReceivePort::New(Dart_Port id,
25863 const String& debug_name,
25864 Heap::Space space) {
25865 ASSERT(id != ILLEGAL_PORT);
25866 Thread* thread = Thread::Current();
25867 Zone* zone = thread->zone();
25868 const SendPort& send_port =
25869 SendPort::Handle(zone, SendPort::New(id, thread->isolate()->origin_id()));
25870#if !defined(PRODUCT)
25871 const StackTrace& allocation_location_ =
25873#endif // !defined(PRODUCT)
25874
25875 const auto& result =
25876 ReceivePort::Handle(zone, Object::Allocate<ReceivePort>(space));
25877 result.untag()->set_send_port(send_port.ptr());
25878 result.untag()->set_bitfield(
25879 Smi::New(IsOpen::encode(true) | IsKeepIsolateAlive::encode(true)));
25880#if !defined(PRODUCT)
25881 result.untag()->set_debug_name(debug_name.ptr());
25882 result.untag()->set_allocation_location(allocation_location_.ptr());
25883#endif // !defined(PRODUCT)
25884 return result.ptr();
25885}
25886
25887const char* ReceivePort::ToCString() const {
25888 return "ReceivePort";
25889}
25890
25891SendPortPtr SendPort::New(Dart_Port id, Heap::Space space) {
25892 return New(id, ILLEGAL_PORT, space);
25893}
25894
25896 Dart_Port origin_id,
25897 Heap::Space space) {
25898 ASSERT(id != ILLEGAL_PORT);
25899 const auto& result = SendPort::Handle(Object::Allocate<SendPort>(space));
25900 result.StoreNonPointer(&result.untag()->id_, id);
25901 result.StoreNonPointer(&result.untag()->origin_id_, origin_id);
25902 return result.ptr();
25903}
25904
25905const char* SendPort::ToCString() const {
25906 return "SendPort";
25907}
25908
25909static void TransferableTypedDataFinalizer(void* isolate_callback_data,
25910 void* peer) {
25911 delete (reinterpret_cast<TransferableTypedDataPeer*>(peer));
25912}
25913
25914TransferableTypedDataPtr TransferableTypedData::New(uint8_t* data,
25915 intptr_t length) {
25916 auto* const peer = new TransferableTypedDataPeer(data, length);
25917
25918 Thread* thread = Thread::Current();
25919 const auto& result =
25920 TransferableTypedData::Handle(Object::Allocate<TransferableTypedData>(
25921 thread->heap()->SpaceForExternal(length)));
25922 thread->heap()->SetPeer(result.ptr(), peer);
25923
25924 // Set up finalizer so it frees allocated memory if handle is
25925 // garbage-collected.
25926 FinalizablePersistentHandle* finalizable_ref =
25929 /*auto_delete=*/true);
25930 ASSERT(finalizable_ref != nullptr);
25931 peer->set_handle(finalizable_ref);
25932
25933 return result.ptr();
25934}
25935
25936const char* TransferableTypedData::ToCString() const {
25937 return "TransferableTypedData";
25938}
25939
25940bool Closure::CanonicalizeEquals(const Instance& other) const {
25941 if (!other.IsClosure()) return false;
25942
25943 const Closure& other_closure = Closure::Cast(other);
25944 return (instantiator_type_arguments() ==
25945 other_closure.instantiator_type_arguments()) &&
25946 (function_type_arguments() ==
25947 other_closure.function_type_arguments()) &&
25948 (delayed_type_arguments() == other_closure.delayed_type_arguments()) &&
25949 (function() == other_closure.function()) &&
25950 (RawContext() == other_closure.RawContext());
25951}
25952
25954 TypeArguments& type_args = TypeArguments::Handle();
25955 type_args = instantiator_type_arguments();
25956 if (!type_args.IsNull()) {
25957 type_args = type_args.Canonicalize(thread);
25958 set_instantiator_type_arguments(type_args);
25959 }
25960 type_args = function_type_arguments();
25961 if (!type_args.IsNull()) {
25962 type_args = type_args.Canonicalize(thread);
25963 set_function_type_arguments(type_args);
25964 }
25965 type_args = delayed_type_arguments();
25966 if (!type_args.IsNull()) {
25967 type_args = type_args.Canonicalize(thread);
25968 set_delayed_type_arguments(type_args);
25969 }
25970 // Ignore function, context, hash.
25971}
25972
25973const char* Closure::ToCString() const {
25974 auto const thread = Thread::Current();
25975 auto const zone = thread->zone();
25976 ZoneTextBuffer buffer(zone);
25977 buffer.AddString("Closure: ");
25978 const Function& fun = Function::Handle(zone, function());
25979 const FunctionType& sig =
25980 FunctionType::Handle(zone, GetInstantiatedSignature(zone));
25982 if (fun.IsImplicitClosureFunction()) {
25983 buffer.Printf(" from %s", fun.ToCString());
25984 }
25985 return buffer.buffer();
25986}
25987
25989 Thread* thread = Thread::Current();
25990 DEBUG_ASSERT(thread->TopErrorHandlerIsExitFrame());
25991 Zone* zone = thread->zone();
25992 const Function& func = Function::Handle(zone, function());
25993 uint32_t result = 0;
25994 if (func.IsImplicitClosureFunction() || func.IsGeneric()) {
25995 // Combine function's hash code, delayed type arguments hash code
25996 // (if generic), and identityHashCode of cached receiver (if implicit
25997 // instance closure).
25998 result = static_cast<uint32_t>(func.Hash());
25999 if (func.IsGeneric()) {
26000 const TypeArguments& delayed_type_args =
26001 TypeArguments::Handle(zone, delayed_type_arguments());
26002 result = CombineHashes(result, delayed_type_args.Hash());
26003 }
26005 const Instance& receiver =
26006 Instance::Handle(zone, GetImplicitClosureReceiver());
26007 const Integer& receiverHash =
26008 Integer::Handle(zone, receiver.IdentityHashCode(thread));
26010 }
26011 } else {
26012 // Non-implicit closures of non-generic functions are unique,
26013 // so identityHashCode of closure object is good enough.
26014 const Integer& identityHash =
26015 Integer::Handle(zone, this->IdentityHashCode(thread));
26016 result = identityHash.AsTruncatedUint32Value();
26017 }
26019}
26020
26021ClosurePtr Closure::New(const TypeArguments& instantiator_type_arguments,
26022 const TypeArguments& function_type_arguments,
26023 const Function& function,
26024 const Object& context,
26025 Heap::Space space) {
26026 // We store null delayed type arguments, not empty ones, in closures with
26027 // non-generic functions a) to make method extraction slightly faster and
26028 // b) to make the Closure::IsGeneric check fast.
26029 // Keep in sync with StubCodeCompiler::GenerateAllocateClosureStub.
26030 return Closure::New(instantiator_type_arguments, function_type_arguments,
26031 function.IsGeneric() ? Object::empty_type_arguments()
26032 : Object::null_type_arguments(),
26033 function, context, space);
26034}
26035
26036ClosurePtr Closure::New(const TypeArguments& instantiator_type_arguments,
26037 const TypeArguments& function_type_arguments,
26038 const TypeArguments& delayed_type_arguments,
26039 const Function& function,
26040 const Object& context,
26041 Heap::Space space) {
26042 ASSERT(instantiator_type_arguments.IsCanonical());
26043 ASSERT(function_type_arguments.IsCanonical());
26044 ASSERT(delayed_type_arguments.IsCanonical());
26046 ASSERT(
26047 (function.IsImplicitInstanceClosureFunction() && context.IsInstance()) ||
26048 (function.IsNonImplicitClosureFunction() && context.IsContext()) ||
26049 context.IsNull());
26050 const auto& result = Closure::Handle(Object::Allocate<Closure>(space));
26051 result.untag()->set_instantiator_type_arguments(
26052 instantiator_type_arguments.ptr());
26053 result.untag()->set_function_type_arguments(function_type_arguments.ptr());
26054 result.untag()->set_delayed_type_arguments(delayed_type_arguments.ptr());
26055 result.untag()->set_function(function.ptr());
26056 result.untag()->set_context(context.ptr());
26057#if defined(DART_PRECOMPILED_RUNTIME)
26058 result.set_entry_point(function.entry_point());
26059#endif
26060 return result.ptr();
26061}
26062
26063FunctionTypePtr Closure::GetInstantiatedSignature(Zone* zone) const {
26064 const Function& fun = Function::Handle(zone, function());
26065 FunctionType& sig = FunctionType::Handle(zone, fun.signature());
26066 TypeArguments& fn_type_args =
26067 TypeArguments::Handle(zone, function_type_arguments());
26068 const TypeArguments& delayed_type_args =
26069 TypeArguments::Handle(zone, delayed_type_arguments());
26070 const TypeArguments& inst_type_args =
26071 TypeArguments::Handle(zone, instantiator_type_arguments());
26072
26073 // We detect the case of a partial tearoff type application and substitute the
26074 // type arguments for the type parameters of the function.
26075 intptr_t num_free_params;
26076 if (!IsGeneric() && fun.IsGeneric()) {
26077 num_free_params = kCurrentAndEnclosingFree;
26078 fn_type_args = delayed_type_args.Prepend(
26079 zone, fn_type_args, sig.NumParentTypeArguments(),
26081 } else {
26082 num_free_params = kAllFree;
26083 }
26084 if (num_free_params == kCurrentAndEnclosingFree || !sig.IsInstantiated()) {
26085 sig ^= sig.InstantiateFrom(inst_type_args, fn_type_args, num_free_params,
26086 Heap::kOld);
26087 }
26088 return sig.ptr();
26089}
26090
26092 return untag()->skip_sync_start_in_parent_stack;
26093}
26094
26096 StoreNonPointer(&untag()->skip_sync_start_in_parent_stack, value);
26097}
26098
26099intptr_t StackTrace::Length() const {
26100 const Array& code_array = Array::Handle(untag()->code_array());
26101 return code_array.Length();
26102}
26103
26104ObjectPtr StackTrace::CodeAtFrame(intptr_t frame_index) const {
26105 const Array& code_array = Array::Handle(untag()->code_array());
26106 return code_array.At(frame_index);
26107}
26108
26109void StackTrace::SetCodeAtFrame(intptr_t frame_index,
26110 const Object& code) const {
26111 const Array& code_array = Array::Handle(untag()->code_array());
26112 code_array.SetAt(frame_index, code);
26113}
26114
26115uword StackTrace::PcOffsetAtFrame(intptr_t frame_index) const {
26116 const TypedData& pc_offset_array =
26117 TypedData::Handle(untag()->pc_offset_array());
26118 return pc_offset_array.GetUintPtr(frame_index * kWordSize);
26119}
26120
26121void StackTrace::SetPcOffsetAtFrame(intptr_t frame_index,
26122 uword pc_offset) const {
26123 const TypedData& pc_offset_array =
26124 TypedData::Handle(untag()->pc_offset_array());
26125 pc_offset_array.SetUintPtr(frame_index * kWordSize, pc_offset);
26126}
26127
26128void StackTrace::set_async_link(const StackTrace& async_link) const {
26129 untag()->set_async_link(async_link.ptr());
26130}
26131
26132void StackTrace::set_code_array(const Array& code_array) const {
26133 untag()->set_code_array(code_array.ptr());
26134}
26135
26136void StackTrace::set_pc_offset_array(const TypedData& pc_offset_array) const {
26137 untag()->set_pc_offset_array(pc_offset_array.ptr());
26138}
26139
26140void StackTrace::set_expand_inlined(bool value) const {
26141 StoreNonPointer(&untag()->expand_inlined_, value);
26142}
26143
26144bool StackTrace::expand_inlined() const {
26145 return untag()->expand_inlined_;
26146}
26147
26148StackTracePtr StackTrace::New(const Array& code_array,
26149 const TypedData& pc_offset_array,
26150 Heap::Space space) {
26151 const auto& result = StackTrace::Handle(Object::Allocate<StackTrace>(space));
26152 result.set_code_array(code_array);
26153 result.set_pc_offset_array(pc_offset_array);
26154 result.set_expand_inlined(true); // default.
26155 ASSERT_EQUAL(result.skip_sync_start_in_parent_stack(), false);
26156 return result.ptr();
26157}
26158
26159StackTracePtr StackTrace::New(const Array& code_array,
26160 const TypedData& pc_offset_array,
26161 const StackTrace& async_link,
26162 bool skip_sync_start_in_parent_stack,
26163 Heap::Space space) {
26164 const auto& result = StackTrace::Handle(Object::Allocate<StackTrace>(space));
26165 result.set_async_link(async_link);
26166 result.set_code_array(code_array);
26167 result.set_pc_offset_array(pc_offset_array);
26168 result.set_expand_inlined(true); // default.
26169 result.set_skip_sync_start_in_parent_stack(skip_sync_start_in_parent_stack);
26170 return result.ptr();
26171}
26172
26173#if defined(DART_PRECOMPILED_RUNTIME)
26174static bool TryPrintNonSymbolicStackFrameBodyRelative(
26176 uword call_addr,
26177 uword instructions,
26178 bool vm,
26179 LoadingUnit* unit = nullptr) {
26180 const Image image(reinterpret_cast<const uint8_t*>(instructions));
26181 if (!image.contains(call_addr)) return false;
26182 if (unit != nullptr) {
26183 ASSERT(!unit->IsNull());
26184 // Add the unit ID to the stack frame, so the correct loading unit
26185 // information from the header can be checked.
26186 buffer->Printf(" unit %" Pd "", unit->id());
26187 }
26188 auto const offset = call_addr - instructions;
26189 // Only print the relocated address of the call when we know the saved
26190 // debugging information (if any) will have the same relocated address.
26191 // Also only print 'virt' fields for isolate addresses.
26192 if (!vm && image.compiled_to_elf()) {
26193 const uword relocated_section_start =
26194 image.instructions_relocated_address();
26195 buffer->Printf(" virt %" Pp "", relocated_section_start + offset);
26196 }
26197 const char* symbol = vm ? kVmSnapshotInstructionsAsmSymbol
26199 buffer->Printf(" %s+0x%" Px "\n", symbol, offset);
26200 return true;
26201}
26202
26203// Prints the best representation(s) for the call address.
26204static void PrintNonSymbolicStackFrameBody(BaseTextBuffer* buffer,
26205 uword call_addr,
26206 uword isolate_instructions,
26207 uword vm_instructions,
26208 const Array& loading_units,
26209 LoadingUnit* unit) {
26210 if (TryPrintNonSymbolicStackFrameBodyRelative(buffer, call_addr,
26211 vm_instructions,
26212 /*vm=*/true)) {
26213 return;
26214 }
26215
26216 if (!loading_units.IsNull()) {
26217 // All non-VM stack frames should include the loading unit id.
26218 const intptr_t unit_count = loading_units.Length();
26219 for (intptr_t i = LoadingUnit::kRootId; i < unit_count; i++) {
26220 *unit ^= loading_units.At(i);
26221 if (!unit->has_instructions_image()) continue;
26222 auto const instructions =
26223 reinterpret_cast<uword>(unit->instructions_image());
26224 if (TryPrintNonSymbolicStackFrameBodyRelative(buffer, call_addr,
26225 instructions,
26226 /*vm=*/false, unit)) {
26227 return;
26228 }
26229 }
26230 } else {
26231 if (TryPrintNonSymbolicStackFrameBodyRelative(buffer, call_addr,
26232 isolate_instructions,
26233 /*vm=*/false)) {
26234 return;
26235 }
26236 }
26237
26238 // The stack trace printer should never end up here, since these are not
26239 // addresses within a loading unit or the VM or app isolate instructions
26240 // sections. Thus, make it easy to notice when looking at the stack trace.
26241 buffer->Printf(" <invalid Dart instruction address>\n");
26242}
26243#endif
26244
26246 intptr_t frame_index) {
26247 buffer->Printf("#%-6" Pd "", frame_index);
26248}
26249
26251 const char* function_name,
26252 const char* url,
26253 intptr_t line = -1,
26254 intptr_t column = -1) {
26255 buffer->Printf(" %s (%s", function_name, url);
26256 if (line >= 0) {
26257 buffer->Printf(":%" Pd "", line);
26258 if (column >= 0) {
26259 buffer->Printf(":%" Pd "", column);
26260 }
26261 }
26262 buffer->Printf(")\n");
26263}
26264
26267 const Function& function,
26268 TokenPosition token_pos_or_line,
26269 intptr_t frame_index,
26270 bool is_line = false) {
26271 ASSERT(!function.IsNull());
26272 const auto& script = Script::Handle(zone, function.script());
26273 const char* function_name = function.QualifiedUserVisibleNameCString();
26274 const char* url = script.IsNull()
26275 ? "Kernel"
26276 : String::Handle(zone, script.url()).ToCString();
26277
26278 // If the URI starts with "data:application/dart;" this is a URI encoded
26279 // script so we shouldn't print the entire URI because it could be very long.
26280 if (strstr(url, "data:application/dart;") == url) {
26281 url = "<data:application/dart>";
26282 }
26283
26284 intptr_t line = -1;
26285 intptr_t column = -1;
26286 if (is_line) {
26287 ASSERT(token_pos_or_line.IsNoSource() || token_pos_or_line.IsReal());
26288 if (token_pos_or_line.IsReal()) {
26289 line = token_pos_or_line.Pos();
26290 }
26291 } else {
26292 ASSERT(!script.IsNull());
26293 script.GetTokenLocation(token_pos_or_line, &line, &column);
26294 }
26297}
26298
26300 if (function.is_visible()) {
26301 return true;
26302 }
26303
26304 if (function.IsImplicitClosureFunction()) {
26305 return function.parent_function() == Function::null() ||
26306 Function::is_visible(function.parent_function());
26307 }
26308
26309 return false;
26310}
26311
26312#if defined(DART_PRECOMPILED_RUNTIME)
26313static void WriteImageBuildId(BaseTextBuffer* buffer,
26314 const char* prefix,
26315 uword image_address) {
26316 const auto& build_id = OS::GetAppBuildId(image_address);
26317 if (build_id.data != nullptr) {
26318 ASSERT(build_id.len > 0);
26319 buffer->AddString(prefix);
26320 buffer->AddString("'");
26321 for (intptr_t i = 0; i < build_id.len; i++) {
26322 buffer->Printf("%2.2x", build_id.data[i]);
26323 }
26324 buffer->AddString("'");
26325 }
26326}
26327
26328void WriteStackTraceHeaderLoadingUnitEntry(BaseTextBuffer* buffer,
26329 intptr_t id,
26330 uword dso_base,
26331 uword instructions) {
26332 buffer->Printf("loading_unit: %" Pd "", id);
26333 WriteImageBuildId(buffer, ", build_id: ", instructions);
26334 buffer->Printf(", dso_base: %" Px ", instructions: %" Px "\n", dso_base,
26335 instructions);
26336}
26337#endif
26338
26339const char* StackTrace::ToCString() const {
26340 auto const T = Thread::Current();
26341 auto const zone = T->zone();
26342 auto& stack_trace = StackTrace::Handle(zone, this->ptr());
26343 auto& owner = Object::Handle(zone);
26344 auto& function = Function::Handle(zone);
26345 auto& code_object = Object::Handle(zone);
26346 auto& code = Code::Handle(zone);
26347
26348#if defined(DART_PRECOMPILED_RUNTIME)
26349 const Array& loading_units =
26350 Array::Handle(T->isolate_group()->object_store()->loading_units());
26351 auto* const unit =
26352 loading_units.IsNull() ? nullptr : &LoadingUnit::Handle(zone);
26353#endif
26354
26355 NoSafepointScope no_allocation;
26356 GrowableArray<const Function*> inlined_functions;
26357 GrowableArray<TokenPosition> inlined_token_positions;
26358
26359#if defined(DART_PRECOMPILED_RUNTIME)
26360 GrowableArray<void*> addresses(10);
26361 const bool have_footnote_callback =
26362 FLAG_dwarf_stack_traces_mode &&
26364#endif
26365
26366 ZoneTextBuffer buffer(zone, 1024);
26367
26368#if defined(DART_PRECOMPILED_RUNTIME)
26369 auto const isolate_instructions = reinterpret_cast<uword>(
26370 T->isolate_group()->source()->snapshot_instructions);
26371#if defined(DEBUG)
26372 if (!loading_units.IsNull()) {
26373 *unit ^= loading_units.At(LoadingUnit::kRootId);
26374 ASSERT(!unit->IsNull());
26375 ASSERT(unit->has_instructions_image());
26376 ASSERT(reinterpret_cast<uword>(unit->instructions_image()) ==
26377 isolate_instructions);
26378 }
26379#endif
26380 auto const vm_instructions = reinterpret_cast<uword>(
26382 if (FLAG_dwarf_stack_traces_mode) {
26383 // This prologue imitates Android's debuggerd to make it possible to paste
26384 // the stack trace into ndk-stack.
26385 buffer.Printf(
26386 "*** *** *** *** *** *** *** *** *** *** *** *** *** *** *** ***\n");
26387 OSThread* thread = OSThread::Current();
26388 buffer.Printf("pid: %" Pd ", tid: %" Pd ", name %s\n", OS::ProcessId(),
26389 OSThread::ThreadIdToIntPtr(thread->id()), thread->name());
26390#if defined(DART_COMPRESSED_POINTERS)
26391 const char kCompressedPointers[] = "yes";
26392#else
26393 const char kCompressedPointers[] = "no";
26394#endif
26395#if defined(USING_SIMULATOR)
26396 const char kUsingSimulator[] = "yes";
26397#else
26398 const char kUsingSimulator[] = "no";
26399#endif
26400 buffer.Printf("os: %s arch: %s comp: %s sim: %s\n",
26401 kHostOperatingSystemName, kTargetArchitectureName,
26402 kCompressedPointers, kUsingSimulator);
26403 WriteImageBuildId(&buffer, "build_id: ", isolate_instructions);
26404 buffer.AddString("\n");
26405 if (!loading_units.IsNull()) {
26406 const intptr_t unit_count = loading_units.Length();
26407 for (intptr_t i = LoadingUnit::kRootId; i < unit_count; i++) {
26408 *unit ^= loading_units.At(i);
26409 if (!unit->has_instructions_image()) continue;
26410 const uword instructions =
26411 reinterpret_cast<uword>(unit->instructions_image());
26412 const uword dso_base = OS::GetAppDSOBase(instructions);
26413 WriteStackTraceHeaderLoadingUnitEntry(&buffer, i, dso_base,
26414 instructions);
26415 }
26416 }
26417 // Print the dso_base of the VM and isolate_instructions. We print both here
26418 // as the VM and isolate may be loaded from different snapshot images.
26419 const uword isolate_dso_base = OS::GetAppDSOBase(isolate_instructions);
26420 buffer.Printf("isolate_dso_base: %" Px "", isolate_dso_base);
26421 const uword vm_dso_base = OS::GetAppDSOBase(vm_instructions);
26422 buffer.Printf(", vm_dso_base: %" Px "\n", vm_dso_base);
26423 buffer.Printf("isolate_instructions: %" Px "", isolate_instructions);
26424 buffer.Printf(", vm_instructions: %" Px "\n", vm_instructions);
26425 }
26426#endif
26427
26428 // Iterate through the stack frames and create C string description
26429 // for each frame.
26430 intptr_t frame_index = 0;
26431 uint32_t frame_skip = 0;
26432 // If we're already in a gap, don't print multiple gap markers.
26433 bool in_gap = false;
26434 do {
26435 for (intptr_t i = frame_skip; i < stack_trace.Length(); i++) {
26436 code_object = stack_trace.CodeAtFrame(i);
26437 if (code_object.IsNull()) {
26438 // Check for a null function, which indicates a gap in a StackOverflow
26439 // or OutOfMemory trace.
26440 if ((i < (stack_trace.Length() - 1)) &&
26441 (stack_trace.CodeAtFrame(i + 1) != Code::null())) {
26442 buffer.AddString("...\n...\n");
26443 // To account for gap frames.
26444 frame_index += stack_trace.PcOffsetAtFrame(i);
26445 }
26446 continue;
26447 }
26448
26449 if (code_object.ptr() == StubCode::AsynchronousGapMarker().ptr()) {
26450 if (!in_gap) {
26451 buffer.AddString("<asynchronous suspension>\n");
26452 }
26453 in_gap = true;
26454 continue;
26455 }
26456
26457 const uword pc_offset = stack_trace.PcOffsetAtFrame(i);
26458 ASSERT(code_object.IsCode());
26459 code ^= code_object.ptr();
26460 ASSERT(code.IsFunctionCode());
26461 owner = code.owner();
26462 if (owner.IsFunction()) {
26463 function ^= owner.ptr();
26464 } else {
26466 }
26467 const uword pc = code.PayloadStart() + pc_offset;
26468
26469 const bool is_future_listener =
26471
26472 // A visible frame ends any gap we might be in.
26473 in_gap = false;
26474
26475#if defined(DART_PRECOMPILED_RUNTIME)
26476 // When printing non-symbolic frames, we normally print call
26477 // addresses, not return addresses, by subtracting one from the PC to
26478 // get an address within the preceding instruction.
26479 //
26480 // The one exception is a normal closure registered as a listener on a
26481 // future. In this case, the returned pc_offset will be pointing to the
26482 // entry pooint of the function, which will be invoked when the future
26483 // completes. To make things more uniform stack unwinding code offets
26484 // pc_offset by 1 for such cases.
26485 const uword call_addr = pc - 1;
26486
26487 if (FLAG_dwarf_stack_traces_mode) {
26488 if (have_footnote_callback) {
26489 addresses.Add(reinterpret_cast<void*>(call_addr));
26490 }
26491
26492 // This output is formatted like Android's debuggerd. Note debuggerd
26493 // prints call addresses instead of return addresses.
26494 buffer.Printf(" #%02" Pd " abs %" Pp "", frame_index, call_addr);
26495 PrintNonSymbolicStackFrameBody(&buffer, call_addr, isolate_instructions,
26496 vm_instructions, loading_units, unit);
26497 frame_index++;
26498 continue;
26499 }
26500
26501 if (function.IsNull()) {
26502 in_gap = false;
26503 // We can't print the symbolic information since the owner was not
26504 // retained, so instead print the static symbol + offset like the
26505 // non-symbolic stack traces.
26506 PrintSymbolicStackFrameIndex(&buffer, frame_index);
26507 PrintNonSymbolicStackFrameBody(&buffer, call_addr, isolate_instructions,
26508 vm_instructions, loading_units, unit);
26509 frame_index++;
26510 continue;
26511 }
26512#endif
26513
26514 if (code.is_optimized() && stack_trace.expand_inlined() &&
26515 (FLAG_precompiled_mode || !is_future_listener)) {
26516 // Note: In AOT mode EmitFunctionEntrySourcePositionDescriptorIfNeeded
26517 // will take care of emitting a descriptor that would allow us to
26518 // symbolize stack frame with 0 offset.
26519 code.GetInlinedFunctionsAtReturnAddress(
26520 is_future_listener ? 0 : pc_offset, &inlined_functions,
26521 &inlined_token_positions);
26522 ASSERT(inlined_functions.length() >= 1);
26523 for (intptr_t j = inlined_functions.length() - 1; j >= 0; j--) {
26524 function = inlined_functions[j]->ptr();
26525 auto const pos = inlined_token_positions[j];
26526 if (is_future_listener && function.IsImplicitClosureFunction()) {
26527 function = function.parent_function();
26528 }
26529 if (FLAG_show_invisible_frames || function.is_visible()) {
26530 PrintSymbolicStackFrame(zone, &buffer, function, pos, frame_index,
26531 /*is_line=*/FLAG_precompiled_mode);
26532 frame_index++;
26533 }
26534 }
26535 continue;
26536 }
26537
26538 if (FLAG_show_invisible_frames || function.is_visible() ||
26539 (is_future_listener && IsVisibleAsFutureListener(function))) {
26540 auto const pos = is_future_listener ? function.token_pos()
26541 : code.GetTokenIndexOfPC(pc);
26542 PrintSymbolicStackFrame(zone, &buffer, function, pos, frame_index);
26543 frame_index++;
26544 }
26545 }
26546
26547 // Follow the link.
26548 frame_skip = stack_trace.skip_sync_start_in_parent_stack()
26550 : 0;
26551 stack_trace = stack_trace.async_link();
26552 } while (!stack_trace.IsNull());
26553
26554#if defined(DART_PRECOMPILED_RUNTIME)
26555 if (have_footnote_callback) {
26557 &addresses[0], addresses.length());
26558 if (footnote != nullptr) {
26559 buffer.AddString(footnote);
26560 free(footnote);
26561 }
26562 }
26563#endif
26564
26565 return buffer.buffer();
26566}
26567
26568static void DwarfStackTracesHandler(bool value) {
26569 FLAG_dwarf_stack_traces_mode = value;
26570
26571#if defined(PRODUCT)
26572 // We can safely remove function objects in precompiled snapshots if the
26573 // runtime will generate DWARF stack traces and we don't have runtime
26574 // debugging options like the observatory available.
26575 if (value) {
26576 FLAG_retain_function_objects = false;
26577 FLAG_retain_code_objects = false;
26578 }
26579#endif
26580}
26581
26583 dwarf_stack_traces,
26584 "Omit CodeSourceMaps in precompiled snapshots and don't "
26585 "symbolize stack traces in the precompiled runtime.");
26586
26587SuspendStatePtr SuspendState::New(intptr_t frame_size,
26588 const Instance& function_data,
26589 Heap::Space space) {
26590 ASSERT(frame_size >= 0);
26591 const intptr_t num_elements = frame_size + SuspendState::FrameSizeGrowthGap();
26592#if !defined(DART_PRECOMPILED_RUNTIME)
26593 // Include heap object alignment overhead into the frame capacity.
26594 const intptr_t instance_size = SuspendState::InstanceSize(num_elements);
26595 const intptr_t frame_capacity =
26596 instance_size - SuspendState::payload_offset();
26597 ASSERT(SuspendState::InstanceSize(frame_capacity) == instance_size);
26598 ASSERT(frame_size <= frame_capacity);
26599#endif
26600 auto raw = Object::Allocate<SuspendState>(space, num_elements);
26601 NoSafepointScope no_safepoint;
26602 ASSERT_EQUAL(raw->untag()->pc_, 0);
26603#if !defined(DART_PRECOMPILED_RUNTIME)
26604 raw->untag()->frame_capacity_ = frame_capacity;
26605#endif
26606 raw->untag()->frame_size_ = frame_size;
26607 raw->untag()->set_function_data(function_data.ptr());
26608 return raw;
26609}
26610
26611SuspendStatePtr SuspendState::Clone(Thread* thread,
26612 const SuspendState& src,
26613 Heap::Space space) {
26614 ASSERT(src.pc() != 0);
26615 Zone* zone = thread->zone();
26616 const intptr_t frame_size = src.frame_size();
26618 zone,
26619 SuspendState::New(frame_size, Instance::Handle(zone, src.function_data()),
26620 space));
26621 dst.set_then_callback(Closure::Handle(zone, src.then_callback()));
26622 dst.set_error_callback(Closure::Handle(zone, src.error_callback()));
26623 {
26624 NoSafepointScope no_safepoint;
26625 memmove(dst.payload(), src.payload(), frame_size);
26626 // Update value of :suspend_state variable in the copied frame.
26627 const uword fp = reinterpret_cast<uword>(dst.payload() + frame_size);
26628 *reinterpret_cast<ObjectPtr*>(
26630 kSuspendStateVarIndex))) = dst.ptr();
26631 dst.set_pc(src.pc());
26632 // Trigger write barrier if needed.
26633 if (dst.ptr()->IsOldObject()) {
26634 dst.untag()->EnsureInRememberedSet(thread);
26635 }
26636 if (thread->is_marking()) {
26637 thread->DeferredMarkingStackAddObject(dst.ptr());
26638 }
26639 }
26640 return dst.ptr();
26641}
26642
26643#if !defined(DART_PRECOMPILED_RUNTIME)
26644void SuspendState::set_frame_capacity(intptr_t frame_capcity) const {
26645 ASSERT(frame_capcity >= 0);
26646 StoreNonPointer(&untag()->frame_capacity_, frame_capcity);
26647}
26648#endif
26649
26650void SuspendState::set_frame_size(intptr_t frame_size) const {
26651 ASSERT(frame_size >= 0);
26652 StoreNonPointer(&untag()->frame_size_, frame_size);
26653}
26654
26655void SuspendState::set_pc(uword pc) const {
26656 StoreNonPointer(&untag()->pc_, pc);
26657}
26658
26659void SuspendState::set_function_data(const Instance& function_data) const {
26660 untag()->set_function_data(function_data.ptr());
26661}
26662
26663void SuspendState::set_then_callback(const Closure& then_callback) const {
26664 untag()->set_then_callback(then_callback.ptr());
26665}
26666
26667void SuspendState::set_error_callback(const Closure& error_callback) const {
26668 untag()->set_error_callback(error_callback.ptr());
26669}
26670
26671const char* SuspendState::ToCString() const {
26672 return "SuspendState";
26673}
26674
26676 ASSERT(pc() != 0);
26677#if defined(DART_PRECOMPILED_RUNTIME)
26678 NoSafepointScope no_safepoint;
26679 CodePtr code = ReversePc::Lookup(IsolateGroup::Current(), pc(),
26680 /*is_return_address=*/true);
26681 ASSERT(code != Code::null());
26682 return code;
26683#else
26684 ObjectPtr code = *(reinterpret_cast<ObjectPtr*>(
26685 untag()->payload() + untag()->frame_size_ +
26687 return Code::RawCast(code);
26688#endif // defined(DART_PRECOMPILED_RUNTIME)
26689}
26690
26691void RegExp::set_pattern(const String& pattern) const {
26692 untag()->set_pattern(pattern.ptr());
26693}
26694
26696 bool sticky,
26697 const Function& value) const {
26698 if (sticky) {
26699 switch (cid) {
26700 case kOneByteStringCid:
26701 return untag()->set_one_byte_sticky(value.ptr());
26702 case kTwoByteStringCid:
26703 return untag()->set_two_byte_sticky(value.ptr());
26704 }
26705 } else {
26706 switch (cid) {
26707 case kOneByteStringCid:
26708 return untag()->set_one_byte(value.ptr());
26709 case kTwoByteStringCid:
26710 return untag()->set_two_byte(value.ptr());
26711 }
26712 }
26713}
26714
26715void RegExp::set_bytecode(bool is_one_byte,
26716 bool sticky,
26717 const TypedData& bytecode) const {
26718 if (sticky) {
26719 if (is_one_byte) {
26720 untag()->set_one_byte_sticky<std::memory_order_release>(bytecode.ptr());
26721 } else {
26722 untag()->set_two_byte_sticky<std::memory_order_release>(bytecode.ptr());
26723 }
26724 } else {
26725 if (is_one_byte) {
26726 untag()->set_one_byte<std::memory_order_release>(bytecode.ptr());
26727 } else {
26728 untag()->set_two_byte<std::memory_order_release>(bytecode.ptr());
26729 }
26730 }
26731}
26732
26733void RegExp::set_num_bracket_expressions(intptr_t value) const {
26734 untag()->num_bracket_expressions_ = value;
26735}
26736
26737void RegExp::set_capture_name_map(const Array& array) const {
26738 untag()->set_capture_name_map(array.ptr());
26739}
26740
26741RegExpPtr RegExp::New(Zone* zone, Heap::Space space) {
26742 const auto& result = RegExp::Handle(Object::Allocate<RegExp>(space));
26743 ASSERT_EQUAL(result.type(), kUninitialized);
26744 ASSERT(result.flags() == RegExpFlags());
26745 result.set_num_bracket_expressions(-1);
26746 result.set_num_registers(/*is_one_byte=*/false, -1);
26747 result.set_num_registers(/*is_one_byte=*/true, -1);
26748
26749 if (!FLAG_interpret_irregexp) {
26750 auto thread = Thread::Current();
26751 const Library& lib = Library::Handle(zone, Library::CoreLibrary());
26752 const Class& owner =
26753 Class::Handle(zone, lib.LookupClass(Symbols::RegExp()));
26754
26755 for (intptr_t cid = kOneByteStringCid; cid <= kTwoByteStringCid; cid++) {
26756 CreateSpecializedFunction(thread, zone, result, cid, /*sticky=*/false,
26757 owner);
26758 CreateSpecializedFunction(thread, zone, result, cid, /*sticky=*/true,
26759 owner);
26760 }
26761 }
26762 return result.ptr();
26763}
26764
26765const char* RegExpFlags::ToCString() const {
26766 switch (value_ & ~kGlobal) {
26767 case kIgnoreCase | kMultiLine | kDotAll | kUnicode:
26768 return "imsu";
26769 case kIgnoreCase | kMultiLine | kDotAll:
26770 return "ims";
26771 case kIgnoreCase | kMultiLine | kUnicode:
26772 return "imu";
26773 case kIgnoreCase | kUnicode | kDotAll:
26774 return "ius";
26775 case kMultiLine | kDotAll | kUnicode:
26776 return "msu";
26777 case kIgnoreCase | kMultiLine:
26778 return "im";
26779 case kIgnoreCase | kDotAll:
26780 return "is";
26781 case kIgnoreCase | kUnicode:
26782 return "iu";
26783 case kMultiLine | kDotAll:
26784 return "ms";
26785 case kMultiLine | kUnicode:
26786 return "mu";
26787 case kDotAll | kUnicode:
26788 return "su";
26789 case kIgnoreCase:
26790 return "i";
26791 case kMultiLine:
26792 return "m";
26793 case kDotAll:
26794 return "s";
26795 case kUnicode:
26796 return "u";
26797 default:
26798 break;
26799 }
26800 return "";
26801}
26802
26803bool RegExp::CanonicalizeEquals(const Instance& other) const {
26804 if (this->ptr() == other.ptr()) {
26805 return true; // "===".
26806 }
26807 if (other.IsNull() || !other.IsRegExp()) {
26808 return false;
26809 }
26810 const RegExp& other_js = RegExp::Cast(other);
26811 // Match the pattern.
26812 const String& str1 = String::Handle(pattern());
26813 const String& str2 = String::Handle(other_js.pattern());
26814 if (!str1.Equals(str2)) {
26815 return false;
26816 }
26817 // Match the flags.
26818 if (flags() != other_js.flags()) {
26819 return false;
26820 }
26821 return true;
26822}
26823
26825 // Must agree with RegExpKey::Hash.
26826 return CombineHashes(String::Hash(pattern()), flags().value());
26827}
26828
26829const char* RegExp::ToCString() const {
26830 const String& str = String::Handle(pattern());
26831 return OS::SCreate(Thread::Current()->zone(), "RegExp: pattern=%s flags=%s",
26832 str.ToCString(), flags().ToCString());
26833}
26834
26835WeakPropertyPtr WeakProperty::New(Heap::Space space) {
26836 ASSERT(IsolateGroup::Current()->object_store()->weak_property_class() !=
26837 Class::null());
26838 return Object::Allocate<WeakProperty>(space);
26839}
26840
26841const char* WeakProperty::ToCString() const {
26842 return "_WeakProperty";
26843}
26844
26845WeakReferencePtr WeakReference::New(Heap::Space space) {
26846 ASSERT(IsolateGroup::Current()->object_store()->weak_reference_class() !=
26847 Class::null());
26848 return Object::Allocate<WeakReference>(space);
26849}
26850const char* WeakReference::ToCString() const {
26852 String& type_args_name = String::Handle(type_args.UserVisibleName());
26853 return OS::SCreate(Thread::Current()->zone(), "_WeakReference%s",
26854 type_args_name.ToCString());
26855}
26856
26857const char* FinalizerBase::ToCString() const {
26858 return "FinalizerBase";
26859}
26860
26861FinalizerPtr Finalizer::New(Heap::Space space) {
26862 ASSERT(IsolateGroup::Current()->object_store()->finalizer_class() !=
26863 Class::null());
26864 ASSERT(
26865 Class::Handle(IsolateGroup::Current()->object_store()->finalizer_class())
26866 .EnsureIsAllocateFinalized(Thread::Current()) == Error::null());
26867 return Object::Allocate<Finalizer>(space);
26868}
26869
26870const char* Finalizer::ToCString() const {
26872 String& type_args_name = String::Handle(type_args.UserVisibleName());
26873 return OS::SCreate(Thread::Current()->zone(), "_FinalizerImpl%s",
26874 type_args_name.ToCString());
26875}
26876
26877NativeFinalizerPtr NativeFinalizer::New(Heap::Space space) {
26878 ASSERT(IsolateGroup::Current()->object_store()->native_finalizer_class() !=
26879 Class::null());
26881 IsolateGroup::Current()->object_store()->native_finalizer_class())
26882 .EnsureIsAllocateFinalized(Thread::Current()) == Error::null());
26883 return Object::Allocate<NativeFinalizer>(space);
26884}
26885
26886// Runs the finalizer if not detached, detaches the value and set external size
26887// to 0.
26888// TODO(http://dartbug.com/47777): Can this be merged with
26889// RunNativeFinalizerCallback?
26891 const char* trace_context) const {
26892 Thread* const thread = Thread::Current();
26893 Zone* const zone = thread->zone();
26894 IsolateGroup* const group = thread->isolate_group();
26895 const intptr_t external_size = entry.external_size();
26896 const auto& token_object = Object::Handle(zone, entry.token());
26897 const auto& callback_pointer = Pointer::Handle(zone, this->callback());
26898 const auto callback = reinterpret_cast<NativeFinalizer::Callback>(
26899 callback_pointer.NativeAddress());
26900 if (token_object.IsFinalizerEntry()) {
26901 // Detached from Dart code.
26902 ASSERT(token_object.ptr() == entry.ptr());
26903 ASSERT(external_size == 0);
26904 if (FLAG_trace_finalizers) {
26905 THR_Print(
26906 "%s: Not running native finalizer %p callback %p, "
26907 "detached\n",
26908 trace_context, ptr()->untag(), callback);
26909 }
26910 } else {
26911 const auto& token = Pointer::Cast(token_object);
26912 void* peer = reinterpret_cast<void*>(token.NativeAddress());
26913 if (FLAG_trace_finalizers) {
26914 THR_Print(
26915 "%s: Running native finalizer %p callback %p "
26916 "with token %p\n",
26917 trace_context, ptr()->untag(), callback, peer);
26918 }
26919 entry.set_token(entry);
26920 callback(peer);
26921 if (external_size > 0) {
26922 ASSERT(!entry.value()->IsSmi());
26923 Heap::Space space =
26924 entry.value()->IsOldObject() ? Heap::kOld : Heap::kNew;
26925 if (FLAG_trace_finalizers) {
26926 THR_Print("%s: Clearing external size %" Pd " bytes in %s space\n",
26927 trace_context, external_size, space == 0 ? "new" : "old");
26928 }
26929 group->heap()->FreedExternal(external_size, space);
26930 entry.set_external_size(0);
26931 }
26932 }
26933}
26934
26935const char* NativeFinalizer::ToCString() const {
26936 const auto& pointer = Pointer::Handle(callback());
26937 return OS::SCreate(Thread::Current()->zone(), "_NativeFinalizer %s",
26938 pointer.ToCString());
26939}
26940
26941FinalizerEntryPtr FinalizerEntry::New(const FinalizerBase& finalizer,
26942 Heap::Space space) {
26943 ASSERT(IsolateGroup::Current()->object_store()->finalizer_entry_class() !=
26944 Class::null());
26945 const auto& entry =
26946 FinalizerEntry::Handle(Object::Allocate<FinalizerEntry>(space));
26947 ASSERT_EQUAL(entry.external_size(), 0);
26948 entry.set_finalizer(finalizer);
26949 return entry.ptr();
26950}
26951
26953 untag()->set_finalizer(value.ptr());
26954}
26955
26956const char* FinalizerEntry::ToCString() const {
26957 return "FinalizerEntry";
26958}
26959
26961 ASSERT(Object::Handle(referent()).IsAbstractType());
26962 return AbstractType::Cast(Object::Handle(referent())).ptr();
26963}
26964
26966 ASSERT(Object::Handle(referent()).IsClass());
26967 return Class::Cast(Object::Handle(referent())).ptr();
26968}
26969
26971 ASSERT(Object::Handle(referent()).IsField());
26972 return Field::Cast(Object::Handle(referent())).ptr();
26973}
26974
26976 ASSERT(Object::Handle(referent()).IsFunction());
26977 return Function::Cast(Object::Handle(referent())).ptr();
26978}
26979
26981 ASSERT(Object::Handle(referent()).IsFunctionType());
26982 return FunctionType::Cast(Object::Handle(referent())).ptr();
26983}
26984
26986 ASSERT(Object::Handle(referent()).IsLibrary());
26987 return Library::Cast(Object::Handle(referent())).ptr();
26988}
26989
26991 ASSERT(Object::Handle(referent()).IsTypeParameter());
26992 return TypeParameter::Cast(Object::Handle(referent())).ptr();
26993}
26994
26995MirrorReferencePtr MirrorReference::New(const Object& referent,
26996 Heap::Space space) {
26997 const auto& result =
26998 MirrorReference::Handle(Object::Allocate<MirrorReference>(space));
26999 result.set_referent(referent);
27000 return result.ptr();
27001}
27002
27003const char* MirrorReference::ToCString() const {
27004 return "_MirrorReference";
27005}
27006
27007UserTagPtr UserTag::MakeActive() const {
27008 Isolate* isolate = Isolate::Current();
27009 ASSERT(isolate != nullptr);
27010 UserTag& old = UserTag::Handle(isolate->current_tag());
27011 isolate->set_current_tag(*this);
27012
27013#if !defined(PRODUCT)
27014 // Notify VM service clients that the current UserTag has changed.
27015 if (Service::profiler_stream.enabled()) {
27017 String& name = String::Handle(old.label());
27018 event.set_previous_tag(name.ToCString());
27019 name ^= label();
27020 event.set_updated_tag(name.ToCString());
27022 }
27023#endif // !defined(PRODUCT)
27024
27025 return old.ptr();
27026}
27027
27028UserTagPtr UserTag::New(const String& label, Heap::Space space) {
27029 Thread* thread = Thread::Current();
27030 Isolate* isolate = thread->isolate();
27032 // Canonicalize by name.
27033 UserTag& result = UserTag::Handle(FindTagInIsolate(thread, label));
27034 if (!result.IsNull()) {
27035 // Tag already exists, return existing instance.
27036 return result.ptr();
27037 }
27038 if (TagTableIsFull(thread)) {
27040 "UserTag instance limit (%" Pd ") reached.", UserTags::kMaxUserTags));
27041 const Array& args = Array::Handle(Array::New(1));
27042 args.SetAt(0, error);
27044 }
27045 // No tag with label exists, create and register with isolate tag table.
27046 result = Object::Allocate<UserTag>(space);
27047 result.set_label(label);
27048 result.set_streamable(UserTags::IsTagNameStreamable(label.ToCString()));
27049 AddTagToIsolate(thread, result);
27050 return result.ptr();
27051}
27052
27054 Thread* thread = Thread::Current();
27055 Zone* zone = thread->zone();
27056 Isolate* isolate = thread->isolate();
27057 ASSERT(isolate != nullptr);
27058 if (isolate->default_tag() != UserTag::null()) {
27059 // Already created.
27060 return isolate->default_tag();
27061 }
27062 // Create default tag.
27063 const UserTag& result =
27064 UserTag::Handle(zone, UserTag::New(Symbols::Default()));
27066 isolate->set_default_tag(result);
27067 return result.ptr();
27068}
27069
27071 Thread* thread,
27072 const String& label) {
27073 Zone* zone = thread->zone();
27074 if (isolate->tag_table() == GrowableObjectArray::null()) {
27075 return UserTag::null();
27076 }
27077 const GrowableObjectArray& tag_table =
27078 GrowableObjectArray::Handle(zone, isolate->tag_table());
27079 UserTag& other = UserTag::Handle(zone);
27080 String& tag_label = String::Handle(zone);
27081 for (intptr_t i = 0; i < tag_table.Length(); i++) {
27082 other ^= tag_table.At(i);
27083 ASSERT(!other.IsNull());
27084 tag_label = other.label();
27085 ASSERT(!tag_label.IsNull());
27086 if (tag_label.Equals(label)) {
27087 return other.ptr();
27088 }
27089 }
27090 return UserTag::null();
27091}
27092
27093UserTagPtr UserTag::FindTagInIsolate(Thread* thread, const String& label) {
27094 Isolate* isolate = thread->isolate();
27095 return FindTagInIsolate(isolate, thread, label);
27096}
27097
27098void UserTag::AddTagToIsolate(Thread* thread, const UserTag& tag) {
27099 Isolate* isolate = thread->isolate();
27100 Zone* zone = thread->zone();
27101 ASSERT(isolate->tag_table() != GrowableObjectArray::null());
27102 const GrowableObjectArray& tag_table =
27103 GrowableObjectArray::Handle(zone, isolate->tag_table());
27104 ASSERT(!TagTableIsFull(thread));
27105#if defined(DEBUG)
27106 // Verify that no existing tag has the same tag id.
27107 UserTag& other = UserTag::Handle(thread->zone());
27108 for (intptr_t i = 0; i < tag_table.Length(); i++) {
27109 other ^= tag_table.At(i);
27110 ASSERT(!other.IsNull());
27111 ASSERT(tag.tag() != other.tag());
27112 }
27113#endif
27114 // Generate the UserTag tag id by taking the length of the isolate's
27115 // tag table + kUserTagIdOffset.
27116 uword tag_id = tag_table.Length() + UserTags::kUserTagIdOffset;
27119 tag.set_tag(tag_id);
27120 tag_table.Add(tag);
27121}
27122
27124 Isolate* isolate = thread->isolate();
27126 const GrowableObjectArray& tag_table =
27127 GrowableObjectArray::Handle(thread->zone(), isolate->tag_table());
27128 ASSERT(tag_table.Length() <= UserTags::kMaxUserTags);
27129 return tag_table.Length() == UserTags::kMaxUserTags;
27130}
27131
27132UserTagPtr UserTag::FindTagById(const Isolate* isolate, uword tag_id) {
27133 ASSERT(isolate != nullptr);
27134 Thread* thread = Thread::Current();
27135 Zone* zone = thread->zone();
27137 const GrowableObjectArray& tag_table =
27138 GrowableObjectArray::Handle(zone, isolate->tag_table());
27139 UserTag& tag = UserTag::Handle(zone);
27140 for (intptr_t i = 0; i < tag_table.Length(); i++) {
27141 tag ^= tag_table.At(i);
27142 if (tag.tag() == tag_id) {
27143 return tag.ptr();
27144 }
27145 }
27146 return UserTag::null();
27147}
27148
27149const char* UserTag::ToCString() const {
27150 const String& tag_label = String::Handle(label());
27151 return tag_label.ToCString();
27152}
27153
27154void DumpTypeTable(Isolate* isolate) {
27155 OS::PrintErr("canonical types:\n");
27156 CanonicalTypeSet table(isolate->group()->object_store()->canonical_types());
27157 table.Dump();
27158 table.Release();
27159}
27160
27162 OS::PrintErr("canonical function types:\n");
27164 isolate->group()->object_store()->canonical_function_types());
27165 table.Dump();
27166 table.Release();
27167}
27168
27170 OS::PrintErr("canonical record types:\n");
27172 isolate->group()->object_store()->canonical_record_types());
27173 table.Dump();
27174 table.Release();
27175}
27176
27178 OS::PrintErr("canonical type parameters (cloned from declarations):\n");
27180 isolate->group()->object_store()->canonical_type_parameters());
27181 table.Dump();
27182 table.Release();
27183}
27184
27186 OS::PrintErr("canonical type arguments:\n");
27188 isolate->group()->object_store()->canonical_type_arguments());
27189 table.Dump();
27190 table.Release();
27191}
27192
27194 const Array& metadata,
27195 Field* reusable_field_handle,
27196 Object* pragma) {
27197 for (intptr_t i = 0; i < metadata.Length(); i++) {
27198 *pragma = metadata.At(i);
27199 if (pragma->clazz() != IG->object_store()->pragma_class()) {
27200 continue;
27201 }
27202 *reusable_field_handle = IG->object_store()->pragma_name();
27203 if (Instance::Cast(*pragma).GetField(*reusable_field_handle) !=
27204 Symbols::vm_entry_point().ptr()) {
27205 continue;
27206 }
27207 *reusable_field_handle = IG->object_store()->pragma_options();
27208 *pragma = Instance::Cast(*pragma).GetField(*reusable_field_handle);
27209 if (pragma->ptr() == Bool::null() || pragma->ptr() == Bool::True().ptr()) {
27211 break;
27212 }
27213 if (pragma->ptr() == Symbols::get().ptr()) {
27215 }
27216 if (pragma->ptr() == Symbols::set().ptr()) {
27218 }
27219 if (pragma->ptr() == Symbols::call().ptr()) {
27221 }
27222 }
27224}
27225
27228 const Library& lib,
27229 const Object& member,
27230 const Object& annotated,
27231 std::initializer_list<EntryPointPragma> allowed_kinds) {
27232#if defined(DART_PRECOMPILED_RUNTIME)
27233 // Annotations are discarded in the AOT snapshot, so we can't determine
27234 // precisely if this member was marked as an entry-point. Instead, we use
27235 // "has_pragma()" as a proxy, since that bit is usually retained.
27236 bool is_marked_entrypoint = true;
27237 if (annotated.IsClass() && !Class::Cast(annotated).has_pragma()) {
27238 is_marked_entrypoint = false;
27239 } else if (annotated.IsField() && !Field::Cast(annotated).has_pragma()) {
27240 is_marked_entrypoint = false;
27241 } else if (annotated.IsFunction() &&
27242 !Function::Cast(annotated).has_pragma()) {
27243 is_marked_entrypoint = false;
27244 }
27245#else
27246 Object& metadata = Object::Handle(Object::empty_array().ptr());
27247 if (!annotated.IsNull()) {
27248 metadata = lib.GetMetadata(annotated);
27249 }
27250 if (metadata.IsError()) return Error::RawCast(metadata.ptr());
27251 ASSERT(!metadata.IsNull() && metadata.IsArray());
27252 EntryPointPragma pragma =
27253 FindEntryPointPragma(IsolateGroup::Current(), Array::Cast(metadata),
27255 bool is_marked_entrypoint = pragma == EntryPointPragma::kAlways;
27256 if (!is_marked_entrypoint) {
27257 for (const auto allowed_kind : allowed_kinds) {
27258 if (pragma == allowed_kind) {
27259 is_marked_entrypoint = true;
27260 break;
27261 }
27262 }
27263 }
27264#endif
27265 if (!is_marked_entrypoint) {
27266 return EntryPointMemberInvocationError(member);
27267 }
27268 return Error::null();
27269}
27270
27272ErrorPtr EntryPointFieldInvocationError(const String& getter_name) {
27273 if (!FLAG_verify_entry_points) return Error::null();
27274
27275 char const* error = OS::SCreate(
27276 Thread::Current()->zone(),
27277 "ERROR: Entry-points do not allow invoking fields "
27278 "(failure to resolve '%s')\n"
27279 "ERROR: See "
27280 "https://github.com/dart-lang/sdk/blob/master/runtime/docs/compiler/"
27281 "aot/entry_point_pragma.md\n",
27282 getter_name.ToCString());
27283 OS::PrintErr("%s", error);
27284 return ApiError::New(String::Handle(String::New(error)));
27285}
27286
27289 const char* member_cstring =
27290 member.IsFunction()
27291 ? OS::SCreate(
27292 Thread::Current()->zone(), "%s (kind %s)",
27293 Function::Cast(member).ToLibNamePrefixedQualifiedCString(),
27294 Function::KindToCString(Function::Cast(member).kind()))
27295 : member.ToCString();
27296 if (!FLAG_verify_entry_points) {
27297 // Print a warning, but do not return an error.
27298 char const* warning = OS::SCreate(
27299 Thread::Current()->zone(),
27300 "WARNING: '%s' is accessed through Dart C API without being marked as "
27301 "an entry point; its tree-shaken signature cannot be verified.\n"
27302 "WARNING: See "
27303 "https://github.com/dart-lang/sdk/blob/master/runtime/docs/compiler/"
27304 "aot/entry_point_pragma.md\n",
27305 member_cstring);
27306 OS::PrintErr("%s", warning);
27307 return Error::null();
27308 }
27309 char const* error = OS::SCreate(
27310 Thread::Current()->zone(),
27311 "ERROR: It is illegal to access '%s' through Dart C API.\n"
27312 "ERROR: See "
27313 "https://github.com/dart-lang/sdk/blob/master/runtime/docs/compiler/"
27314 "aot/entry_point_pragma.md\n",
27315 member_cstring);
27316 OS::PrintErr("%s", error);
27317 return ApiError::New(String::Handle(String::New(error)));
27318}
27319
27320#if !defined(DART_PRECOMPILED_RUNTIME)
27321// Note: see also [NeedsDynamicInvocationForwarder] which ensures that we
27322// never land in a function which expects parameters in registers from a
27323// dynamic call site.
27325#if defined(TARGET_ARCH_X64) || defined(TARGET_ARCH_ARM64) || \
27326 defined(TARGET_ARCH_ARM)
27327 if (!FLAG_precompiled_mode) {
27328 return 0;
27329 }
27330
27331 if (!FLAG_use_register_cc) {
27332 return 0;
27333 }
27334
27335 if (IsGeneric()) {
27336 return 0;
27337 }
27338
27339 switch (kind()) {
27340 case UntaggedFunction::kClosureFunction:
27341 case UntaggedFunction::kImplicitClosureFunction:
27342 case UntaggedFunction::kNoSuchMethodDispatcher:
27343 case UntaggedFunction::kInvokeFieldDispatcher:
27344 case UntaggedFunction::kDynamicInvocationForwarder:
27345 case UntaggedFunction::kMethodExtractor:
27346 case UntaggedFunction::kFfiTrampoline:
27347 case UntaggedFunction::kFieldInitializer:
27348 case UntaggedFunction::kIrregexpFunction:
27349 return 0;
27350
27351 default:
27352 break;
27353 }
27354
27355 const auto unboxing_metadata = kernel::UnboxingInfoMetadataOf(*this, zone);
27356 if (unboxing_metadata != nullptr &&
27357 unboxing_metadata->must_use_stack_calling_convention) {
27358 return 0;
27359 }
27360
27361 // Getters and setters have fixed signatures.
27362 switch (kind()) {
27363 case UntaggedFunction::kGetterFunction:
27364 case UntaggedFunction::kImplicitGetter:
27365 case UntaggedFunction::kSetterFunction:
27366 case UntaggedFunction::kImplicitSetter:
27367 return num_fixed_parameters();
27368
27369 default:
27370 break;
27371 }
27372
27373 if (unboxing_metadata != nullptr &&
27374 unboxing_metadata->has_overrides_with_less_direct_parameters) {
27375 // Receiver (`this`) can always be passed in the register because it is
27376 // never an optional or named parameter.
27377 return unboxing_metadata->unboxed_args_info.length() + 1;
27378 }
27379
27380 return num_fixed_parameters();
27381#endif
27382 return 0;
27383}
27384#endif // !defined(DART_PRECOMPILED_RUNTIME)
27385
27387 if (!FLAG_verify_entry_points) return Error::null();
27388
27389 const Class& cls = Class::Handle(Owner());
27390 const Library& lib = Library::Handle(cls.library());
27391 switch (kind()) {
27392 case UntaggedFunction::kRegularFunction:
27393 case UntaggedFunction::kSetterFunction:
27394 case UntaggedFunction::kConstructor:
27395 return dart::VerifyEntryPoint(lib, *this, *this,
27397 break;
27398 case UntaggedFunction::kGetterFunction:
27400 lib, *this, *this,
27402 break;
27403 case UntaggedFunction::kImplicitGetter:
27404 return dart::VerifyEntryPoint(lib, *this, Field::Handle(accessor_field()),
27406 break;
27407 case UntaggedFunction::kImplicitSetter:
27408 return dart::VerifyEntryPoint(lib, *this, Field::Handle(accessor_field()),
27410 case UntaggedFunction::kMethodExtractor:
27411 return Function::Handle(extracted_method_closure())
27412 .VerifyClosurizedEntryPoint();
27413 break;
27414 default:
27415 return dart::VerifyEntryPoint(lib, *this, Object::Handle(), {});
27416 break;
27417 }
27418}
27419
27421 if (!FLAG_verify_entry_points) return Error::null();
27422
27423 const Class& cls = Class::Handle(Owner());
27424 const Library& lib = Library::Handle(cls.library());
27425 switch (kind()) {
27426 case UntaggedFunction::kRegularFunction:
27427 return dart::VerifyEntryPoint(lib, *this, *this,
27429 case UntaggedFunction::kImplicitClosureFunction: {
27430 const Function& parent = Function::Handle(parent_function());
27431 return dart::VerifyEntryPoint(lib, parent, parent,
27433 }
27434 default:
27435 UNREACHABLE();
27436 }
27437}
27438
27440 if (!FLAG_verify_entry_points) return Error::null();
27441 const Class& cls = Class::Handle(Owner());
27442 const Library& lib = Library::Handle(cls.library());
27443 return dart::VerifyEntryPoint(lib, *this, *this, {pragma});
27444}
27445
27446ErrorPtr Class::VerifyEntryPoint() const {
27447 if (!FLAG_verify_entry_points) return Error::null();
27448 const Library& lib = Library::Handle(library());
27449 if (!lib.IsNull()) {
27450 return dart::VerifyEntryPoint(lib, *this, *this, {});
27451 } else {
27452 return Error::null();
27453 }
27454}
27455
27456AbstractTypePtr RecordType::FieldTypeAt(intptr_t index) const {
27457 const Array& field_types = Array::Handle(untag()->field_types());
27458 return AbstractType::RawCast(field_types.At(index));
27459}
27460
27461void RecordType::SetFieldTypeAt(intptr_t index,
27462 const AbstractType& value) const {
27463 ASSERT(!value.IsNull());
27464 const Array& field_types = Array::Handle(untag()->field_types());
27465 field_types.SetAt(index, value);
27466}
27467
27468void RecordType::set_field_types(const Array& value) const {
27469 ASSERT(!value.IsNull());
27470 untag()->set_field_types(value.ptr());
27471}
27472
27473void RecordType::set_shape(RecordShape shape) const {
27474 untag()->set_shape(shape.AsSmi());
27475}
27476
27477ArrayPtr RecordType::GetFieldNames(Thread* thread) const {
27478 return shape().GetFieldNames(thread);
27479}
27480
27482 BaseTextBuffer* printer) const {
27483 if (IsNull()) {
27484 printer->AddString("null");
27485 return;
27486 }
27487 Thread* thread = Thread::Current();
27488 Zone* zone = thread->zone();
27490 String& name = String::Handle(zone);
27491 const intptr_t num_fields = NumFields();
27492 const Array& field_names = Array::Handle(zone, GetFieldNames(thread));
27493 const intptr_t num_positional_fields = num_fields - field_names.Length();
27494 printer->AddString("(");
27495 for (intptr_t i = 0; i < num_fields; ++i) {
27496 if (i != 0) {
27497 printer->AddString(", ");
27498 }
27499 if (i == num_positional_fields) {
27500 printer->AddString("{");
27501 }
27502 type = FieldTypeAt(i);
27503 type.PrintName(name_visibility, printer);
27504 if (i >= num_positional_fields) {
27505 printer->AddString(" ");
27506 name ^= field_names.At(i - num_positional_fields);
27507 printer->AddString(name.ToCString());
27508 }
27509 }
27510 if (num_positional_fields < num_fields) {
27511 printer->AddString("}");
27512 }
27513 printer->AddString(")");
27514 printer->AddString(NullabilitySuffix(name_visibility));
27515}
27516
27517const char* RecordType::ToCString() const {
27518 Zone* zone = Thread::Current()->zone();
27519 ZoneTextBuffer printer(zone);
27520 Print(kInternalName, &printer);
27521 return printer.buffer();
27522}
27523
27525 intptr_t num_free_fun_type_params) const {
27527 const intptr_t num_fields = NumFields();
27528 for (intptr_t i = 0; i < num_fields; ++i) {
27529 type = FieldTypeAt(i);
27530 if (!type.IsInstantiated(genericity, num_free_fun_type_params)) {
27531 return false;
27532 }
27533 }
27534 return true;
27535}
27536
27537RecordTypePtr RecordType::New(Heap::Space space) {
27538 return Object::Allocate<RecordType>(space);
27539}
27540
27541RecordTypePtr RecordType::New(RecordShape shape,
27542 const Array& field_types,
27543 Nullability nullability,
27544 Heap::Space space) {
27545 Zone* Z = Thread::Current()->zone();
27547 result.set_shape(shape);
27548 result.set_field_types(field_types);
27549 result.SetHash(0);
27550 result.set_flags(0);
27551 result.set_nullability(nullability);
27553 result.InitializeTypeTestingStubNonAtomic(
27555 return result.ptr();
27556}
27557
27559 Heap::Space space) const {
27560 if (nullability() == value) {
27561 return ptr();
27562 }
27563 // Clone record type and set new nullability.
27564 // Always cloning in old space and removing space parameter would not satisfy
27565 // currently existing requests for type instantiation in new space.
27567 Zone* Z = T->zone();
27569 Z,
27570 RecordType::New(shape(), Array::Handle(Z, field_types()), value, space));
27571 if (IsFinalized()) {
27572 type.SetIsFinalized();
27573 if (IsCanonical()) {
27574 type ^= type.Canonicalize(T);
27575 }
27576 }
27577 return RecordType::Cast(type).ptr();
27578}
27579
27581 const Instance& other,
27582 TypeEquality kind,
27583 FunctionTypeMapping* function_type_equivalence) const {
27584 ASSERT(!IsNull());
27585 if (ptr() == other.ptr()) {
27586 return true;
27587 }
27588 if (!other.IsRecordType()) {
27589 return false;
27590 }
27591 const RecordType& other_type = RecordType::Cast(other);
27592 // Equal record types must have the same shape
27593 // (number of fields and named fields).
27594 if (shape() != other_type.shape()) {
27595 return false;
27596 }
27597 Thread* thread = Thread::Current();
27598 Zone* zone = thread->zone();
27599 if (!IsNullabilityEquivalent(thread, other_type, kind)) {
27600 return false;
27601 }
27602 // Equal record types must have equal field types.
27603 AbstractType& field_type = Type::Handle(zone);
27604 AbstractType& other_field_type = Type::Handle(zone);
27605 const intptr_t num_fields = NumFields();
27606 for (intptr_t i = 0; i < num_fields; ++i) {
27607 field_type = FieldTypeAt(i);
27608 other_field_type = other_type.FieldTypeAt(i);
27609 if (!field_type.IsEquivalent(other_field_type, kind,
27610 function_type_equivalence)) {
27611 return false;
27612 }
27613 }
27614 return true;
27615}
27616
27618 ASSERT(IsFinalized());
27619 uint32_t result = 0;
27620 // A legacy type should have the same hash as its non-nullable version to be
27621 // consistent with the definition of type equality in Dart code.
27622 Nullability type_nullability = nullability();
27623 if (type_nullability == Nullability::kLegacy) {
27624 type_nullability = Nullability::kNonNullable;
27625 }
27626 result = CombineHashes(result, static_cast<uint32_t>(type_nullability));
27627 result = CombineHashes(result, static_cast<uint32_t>(shape().AsInt()));
27629 const intptr_t num_fields = NumFields();
27630 for (intptr_t i = 0; i < num_fields; ++i) {
27631 type = FieldTypeAt(i);
27632 result = CombineHashes(result, type.Hash());
27633 }
27635 SetHash(result);
27636 return result;
27637}
27638
27639AbstractTypePtr RecordType::Canonicalize(Thread* thread) const {
27640 ASSERT(IsFinalized());
27641 Zone* zone = thread->zone();
27643 if (IsCanonical()) {
27644#ifdef DEBUG
27645 // Verify that all fields are allocated in old space and are canonical.
27646 ASSERT(Array::Handle(zone, field_types()).IsOld());
27647 const intptr_t num_fields = NumFields();
27648 for (intptr_t i = 0; i < num_fields; ++i) {
27649 type = FieldTypeAt(i);
27650 ASSERT(type.IsOld());
27651 ASSERT(type.IsCanonical());
27652 }
27653#endif
27654 return ptr();
27655 }
27656 auto isolate_group = thread->isolate_group();
27657 ObjectStore* object_store = isolate_group->object_store();
27658 RecordType& rec = RecordType::Handle(zone);
27659 {
27660 SafepointMutexLocker ml(isolate_group->type_canonicalization_mutex());
27661 CanonicalRecordTypeSet table(zone, object_store->canonical_record_types());
27662 rec ^= table.GetOrNull(CanonicalRecordTypeKey(*this));
27663 ASSERT(object_store->canonical_record_types() == table.Release().ptr());
27664 }
27665 if (rec.IsNull()) {
27666 ASSERT(Array::Handle(zone, field_types()).IsOld());
27667 const intptr_t num_fields = NumFields();
27668 for (intptr_t i = 0; i < num_fields; ++i) {
27669 type = FieldTypeAt(i);
27670 if (!type.IsCanonical()) {
27671 type = type.Canonicalize(thread);
27672 SetFieldTypeAt(i, type);
27673 }
27674 }
27675 // Check to see if the record type got added to canonical table as part
27676 // of the canonicalization of its signature types.
27677 SafepointMutexLocker ml(isolate_group->type_canonicalization_mutex());
27678 CanonicalRecordTypeSet table(zone, object_store->canonical_record_types());
27679 rec ^= table.GetOrNull(CanonicalRecordTypeKey(*this));
27680 if (rec.IsNull()) {
27681 // Add this record type into the canonical table of record types.
27682 if (this->IsNew()) {
27683 rec ^= Object::Clone(*this, Heap::kOld);
27684 } else {
27685 rec = this->ptr();
27686 }
27687 ASSERT(rec.IsOld());
27688 rec.SetCanonical(); // Mark object as being canonical.
27689 bool present = table.Insert(rec);
27690 ASSERT(!present);
27691 }
27692 object_store->set_canonical_record_types(table.Release());
27693 }
27694 return rec.ptr();
27695}
27696
27699 const intptr_t num_fields = NumFields();
27700 for (intptr_t i = 0; i < num_fields; ++i) {
27701 type = FieldTypeAt(i);
27702 type.EnumerateURIs(uris);
27703 }
27704}
27705
27707 BaseTextBuffer* printer) const {
27708 RecordType::Cast(*this).Print(name_visibility, printer);
27709}
27710
27712 const TypeArguments& instantiator_type_arguments,
27713 const TypeArguments& function_type_arguments,
27714 intptr_t num_free_fun_type_params,
27715 Heap::Space space,
27716 FunctionTypeMapping* function_type_mapping,
27717 intptr_t num_parent_type_args_adjustment) const {
27718 ASSERT(IsFinalized());
27719 Zone* zone = Thread::Current()->zone();
27720
27721 const intptr_t num_fields = NumFields();
27722 const Array& old_field_types = Array::Handle(zone, field_types());
27723 const Array& new_field_types =
27724 Array::Handle(zone, Array::New(num_fields, space));
27726 for (intptr_t i = 0; i < num_fields; ++i) {
27727 type ^= old_field_types.At(i);
27728 if (!type.IsInstantiated()) {
27729 type = type.InstantiateFrom(
27730 instantiator_type_arguments, function_type_arguments,
27731 num_free_fun_type_params, space, function_type_mapping,
27732 num_parent_type_args_adjustment);
27733 // A returned null type indicates a failed instantiation in dead code that
27734 // must be propagated up to the caller, the optimizing compiler.
27735 if (type.IsNull()) {
27736 return RecordType::null();
27737 }
27738 }
27739 new_field_types.SetAt(i, type);
27740 }
27741
27742 const auto& rec = RecordType::Handle(
27743 zone, RecordType::New(shape(), new_field_types, nullability(), space));
27744
27745 rec.SetIsFinalized();
27746
27747 // Canonicalization is not part of instantiation.
27748 return rec.ptr();
27749}
27750
27752 intptr_t num_parent_type_args_adjustment,
27753 intptr_t num_free_fun_type_params,
27754 Heap::Space space,
27755 FunctionTypeMapping* function_type_mapping) const {
27756 ASSERT(IsFinalized());
27757 ASSERT(num_parent_type_args_adjustment >= 0);
27758 Zone* zone = Thread::Current()->zone();
27759 const auto& types = Array::Handle(zone, field_types());
27760 Array* updated_types = nullptr;
27761 auto& type = AbstractType::Handle(zone);
27762 auto& updated = AbstractType::Handle(zone);
27763 for (intptr_t i = 0, n = NumFields(); i < n; ++i) {
27764 type ^= types.At(i);
27765 updated = type.UpdateFunctionTypes(num_parent_type_args_adjustment,
27766 num_free_fun_type_params, space,
27767 function_type_mapping);
27768 if (type.ptr() != updated.ptr()) {
27769 if (updated_types == nullptr) {
27770 updated_types = &Array::Handle(zone, Array::New(n, space));
27771 for (intptr_t j = 0; j < i; ++j) {
27772 type ^= types.At(j);
27773 updated_types->SetAt(j, type);
27774 }
27775 }
27776 }
27777 if (updated_types != nullptr) {
27778 updated_types->SetAt(i, updated);
27779 }
27780 }
27781 if (updated_types == nullptr) {
27782 return ptr();
27783 }
27784 const auto& new_rt = RecordType::Handle(
27785 zone, RecordType::New(shape(), *updated_types, nullability(), space));
27786 new_rt.SetIsFinalized();
27787 return new_rt.ptr();
27788}
27789
27791 const RecordType& other,
27792 Heap::Space space,
27793 FunctionTypeMapping* function_type_equivalence) const {
27794 if (ptr() == other.ptr()) {
27795 return true;
27796 }
27797 ASSERT(IsFinalized());
27798 ASSERT(other.IsFinalized());
27799 const intptr_t num_fields = NumFields();
27800 if (shape() != other.shape()) {
27801 // Different number of fields or different named fields.
27802 return false;
27803 }
27804 Thread* const thread = Thread::Current();
27805 if (!IsNullabilityEquivalent(thread, other, TypeEquality::kInSubtypeTest)) {
27806 return false;
27807 }
27808 // Check subtyping of record field types.
27809 Zone* const zone = thread->zone();
27810 AbstractType& field_type = Type::Handle(zone);
27811 AbstractType& other_field_type = Type::Handle(zone);
27812 for (intptr_t i = 0; i < num_fields; ++i) {
27813 field_type = FieldTypeAt(i);
27814 other_field_type = other.FieldTypeAt(i);
27815 if (!field_type.IsSubtypeOf(other_field_type, space,
27816 function_type_equivalence)) {
27817 return false;
27818 }
27819 }
27820 return true;
27821}
27822
27823RecordPtr Record::New(RecordShape shape, Heap::Space space) {
27824 const intptr_t num_fields = shape.num_fields();
27825 ASSERT(num_fields >= 0);
27826 auto raw = Object::Allocate<Record>(space, num_fields);
27827 NoSafepointScope no_safepoint;
27828 raw->untag()->set_shape(shape.AsSmi());
27829 return raw;
27830}
27831
27832const char* Record::ToCString() const {
27833 if (IsNull()) {
27834 return "Record: null";
27835 }
27836 Thread* thread = Thread::Current();
27837 Zone* zone = thread->zone();
27838 ZoneTextBuffer printer(zone);
27839 const intptr_t num_fields = this->num_fields();
27840 const Array& field_names = Array::Handle(zone, GetFieldNames(thread));
27841 const intptr_t num_positional_fields = num_fields - field_names.Length();
27842 Object& obj = Object::Handle(zone);
27843 printer.AddString("Record (");
27844 for (intptr_t i = 0; i < num_fields; ++i) {
27845 if (i != 0) {
27846 printer.AddString(", ");
27847 }
27848 if (i >= num_positional_fields) {
27849 obj = field_names.At(i - num_positional_fields);
27850 printer.AddString(obj.ToCString());
27851 printer.AddString(": ");
27852 }
27853 obj = FieldAt(i);
27854 printer.AddString(obj.ToCString());
27855 }
27856 printer.AddString(")");
27857 return printer.buffer();
27858}
27859
27860bool Record::CanonicalizeEquals(const Instance& other) const {
27861 if (this->ptr() == other.ptr()) {
27862 return true;
27863 }
27864
27865 if (!other.IsRecord() || other.IsNull()) {
27866 return false;
27867 }
27868
27869 const Record& other_rec = Record::Cast(other);
27870 if (shape() != other_rec.shape()) {
27871 return false;
27872 }
27873
27874 const intptr_t num_fields = this->num_fields();
27875 for (intptr_t i = 0; i < num_fields; ++i) {
27876 if (this->FieldAt(i) != other_rec.FieldAt(i)) {
27877 return false;
27878 }
27879 }
27880 return true;
27881}
27882
27884 Thread* thread = Thread::Current();
27885 uint32_t hash = thread->heap()->GetCanonicalHash(ptr());
27886 if (hash != 0) {
27887 return hash;
27888 }
27889 hash = shape().AsInt();
27890 Instance& element = Instance::Handle();
27891 const intptr_t num_fields = this->num_fields();
27892 for (intptr_t i = 0; i < num_fields; ++i) {
27893 element ^= FieldAt(i);
27895 }
27897 thread->heap()->SetCanonicalHash(ptr(), hash);
27898 return hash;
27899}
27900
27902 Zone* zone = thread->zone();
27903 Instance& obj = Instance::Handle(zone);
27904 const intptr_t num_fields = this->num_fields();
27905 for (intptr_t i = 0; i < num_fields; ++i) {
27906 obj ^= FieldAt(i);
27907 obj = obj.CanonicalizeLocked(thread);
27908 SetFieldAt(i, obj);
27909 }
27910}
27911
27912RecordTypePtr Record::GetRecordType() const {
27913 Zone* const zone = Thread::Current()->zone();
27914 const intptr_t num_fields = this->num_fields();
27915 const Array& field_types =
27916 Array::Handle(zone, Array::New(num_fields, Heap::kOld));
27917 Instance& obj = Instance::Handle(zone);
27919 for (intptr_t i = 0; i < num_fields; ++i) {
27920 obj ^= FieldAt(i);
27921 type = obj.GetType(Heap::kNew);
27922 field_types.SetAt(i, type);
27923 }
27924 type = RecordType::New(shape(), field_types, Nullability::kNonNullable);
27926 return RecordType::Cast(type).ptr();
27927}
27928
27930 const String& field_name) {
27931 if (field_name.IsOneByteString() && field_name.Length() >= 1 &&
27932 field_name.CharAt(0) == '$') {
27933 int64_t value = 0;
27934 const char* cstr = field_name.ToCString();
27935 if (OS::StringToInt64(cstr + 1 /* skip '$' */, &value)) {
27936 if (value >= 1 && value < kMaxElements) {
27937 return static_cast<intptr_t>(value - 1);
27938 }
27939 }
27940 }
27941 return -1;
27942}
27943
27945 const String& field_name) const {
27946 ASSERT(field_name.IsSymbol());
27947 const intptr_t field_index =
27949 const Array& field_names = Array::Handle(GetFieldNames(thread));
27950 const intptr_t num_positional_fields = num_fields() - field_names.Length();
27951 if ((field_index >= 0) && (field_index < num_positional_fields)) {
27952 return field_index;
27953 } else {
27954 for (intptr_t i = 0, n = field_names.Length(); i < n; ++i) {
27955 if (field_names.At(i) == field_name.ptr()) {
27956 return num_positional_fields + i;
27957 }
27958 }
27959 }
27960 return -1;
27961}
27962
27964 public:
27965 static const char* Name() { return "RecordFieldNamesMapTraits"; }
27966 static bool ReportStats() { return false; }
27967
27968 static bool IsMatch(const Object& a, const Object& b) {
27969 return Array::Cast(a).CanonicalizeEquals(Array::Cast(b));
27970 }
27971
27972 static uword Hash(const Object& key) {
27973 return Array::Cast(key).CanonicalizeHash();
27974 }
27975
27976 static ObjectPtr NewKey(const Array& arr) { return arr.ptr(); }
27977};
27979
27981 intptr_t num_fields,
27982 const Array& field_names) {
27983 ASSERT(!field_names.IsNull());
27984 ASSERT(field_names.IsImmutable());
27985 ASSERT(field_names.ptr() == Object::empty_array().ptr() ||
27986 field_names.Length() > 0);
27987
27988 Zone* zone = thread->zone();
27989 IsolateGroup* isolate_group = thread->isolate_group();
27990 ObjectStore* object_store = isolate_group->object_store();
27991
27992 if (object_store->record_field_names<std::memory_order_acquire>() ==
27993 Array::null()) {
27994 // First-time initialization.
27995 SafepointWriteRwLocker ml(thread, isolate_group->program_lock());
27996 if (object_store->record_field_names() == Array::null()) {
27997 // Reserve record field names index 0 for records without named fields.
27999 HashTables::New<RecordFieldNamesMap>(16, Heap::kOld));
28000 map.InsertOrGetValue(Object::empty_array(),
28001 Smi::Handle(zone, Smi::New(0)));
28002 ASSERT(map.NumOccupied() == 1);
28003 object_store->set_record_field_names_map(map.Release());
28004 const auto& table = Array::Handle(zone, Array::New(16));
28005 table.SetAt(0, Object::empty_array());
28006 object_store->set_record_field_names<std::memory_order_release>(table);
28007 }
28008 }
28009
28010#if defined(DART_PRECOMPILER)
28011 const intptr_t kMaxNumFields = compiler::target::RecordShape::kMaxNumFields;
28012 const intptr_t kMaxFieldNamesIndex =
28013 compiler::target::RecordShape::kMaxFieldNamesIndex;
28014#else
28015 const intptr_t kMaxNumFields = RecordShape::kMaxNumFields;
28016 const intptr_t kMaxFieldNamesIndex = RecordShape::kMaxFieldNamesIndex;
28017#endif
28018
28019 if (num_fields > kMaxNumFields) {
28020 FATAL("Too many record fields");
28021 }
28022 if (field_names.ptr() == Object::empty_array().ptr()) {
28023 return RecordShape::ForUnnamed(num_fields);
28024 }
28025
28026 {
28027 SafepointReadRwLocker ml(thread, isolate_group->program_lock());
28028 RecordFieldNamesMap map(object_store->record_field_names_map());
28029 Smi& index = Smi::Handle(zone);
28030 index ^= map.GetOrNull(field_names);
28031 ASSERT(map.Release().ptr() == object_store->record_field_names_map());
28032 if (!index.IsNull()) {
28033 return RecordShape(num_fields, index.Value());
28034 }
28035 }
28036
28037 SafepointWriteRwLocker ml(thread, isolate_group->program_lock());
28038 RecordFieldNamesMap map(object_store->record_field_names_map());
28039 const intptr_t new_index = map.NumOccupied();
28040 if (new_index > kMaxFieldNamesIndex) {
28041 FATAL("Too many record shapes");
28042 }
28043
28044 const intptr_t index = Smi::Value(Smi::RawCast(map.InsertOrGetValue(
28045 field_names, Smi::Handle(zone, Smi::New(new_index)))));
28046 ASSERT(index > 0);
28047
28048 if (index == new_index) {
28049 ASSERT(map.NumOccupied() == (new_index + 1));
28050 Array& table = Array::Handle(zone, object_store->record_field_names());
28051 intptr_t capacity = table.Length();
28052 if (index >= table.Length()) {
28053 capacity = capacity + (capacity >> 2);
28054 table = Array::Grow(table, capacity);
28055 object_store->set_record_field_names(table);
28056 }
28057 table.SetAt(index, field_names);
28058 } else {
28059 ASSERT(index < new_index);
28060 }
28061 object_store->set_record_field_names_map(map.Release());
28062
28063 const RecordShape shape(num_fields, index);
28064 ASSERT(shape.GetFieldNames(thread) == field_names.ptr());
28065 ASSERT(shape.num_fields() == num_fields);
28066 return shape;
28067}
28068
28069ArrayPtr RecordShape::GetFieldNames(Thread* thread) const {
28070 ObjectStore* object_store = thread->isolate_group()->object_store();
28071 Array& table =
28072 Array::Handle(thread->zone(), object_store->record_field_names());
28073 ASSERT(!table.IsNull());
28074 return Array::RawCast(table.At(field_names_index()));
28075}
28076
28077} // namespace dart
const char * options
static void info(const char *fmt,...) SK_PRINTF_LIKE(1
Definition DM.cpp:213
static struct Initializer initializer
int count
SkPoint pos
static float next(float f)
static constexpr size_t kHeaderSize
static void operation(T operation, uint32_t &a, uint32_t b, uint32_t c, uint32_t d, uint32_t x, uint8_t s, uint32_t t)
Definition SkMD5.cpp:144
static uint32_t hash(const SkShaderBase::GradientInfo &v)
static bool equals(T *a, T *b)
static bool left(const SkPoint &p0, const SkPoint &p1)
static bool right(const SkPoint &p0, const SkPoint &p1)
static const size_t kBufferSize
Definition SkString.cpp:27
SI void store(P *ptr, const T &val)
SI F table(const skcms_Curve *curve, F v)
Vec2Value v2
#define IG
#define UNREACHABLE()
Definition assert.h:248
#define OUT_OF_MEMORY()
Definition assert.h:250
#define DEBUG_ASSERT(cond)
Definition assert.h:321
#define ASSERT_EQUAL(expected, actual)
Definition assert.h:309
#define RELEASE_ASSERT(cond)
Definition assert.h:327
#define COMPILE_ASSERT(expr)
Definition assert.h:339
#define ASSERT_NOTNULL(ptr)
Definition assert.h:323
#define Z
#define CLASS_LIST_NO_OBJECT_NOR_STRING_NOR_ARRAY_NOR_MAP(V)
Definition class_id.h:113
#define CLASS_LIST_MAPS(V)
Definition class_id.h:116
#define CLASS_LIST_STRINGS(V)
Definition class_id.h:132
#define CLASS_LIST_SETS(V)
Definition class_id.h:120
#define CLASS_LIST_FIXED_LENGTH_ARRAYS(V)
Definition class_id.h:124
#define CLASS_LIST_FFI_TYPE_MARKER(V)
Definition class_id.h:165
#define CLASS_LIST_TYPED_DATA(V)
Definition class_id.h:137
#define DART_CLASS_LIST_TYPED_DATA(V)
Definition class_id.h:177
#define CLASS_LIST(V)
Definition class_id.h:208
bool IsSubtypeOf(const AbstractType &other, Heap::Space space, FunctionTypeMapping *function_type_equivalence=nullptr) const
Definition object.cc:21611
bool IsNonNullable() const
Definition object.h:9048
void SetTypeTestingStub(const Code &stub) const
Definition object.cc:21822
UntaggedAbstractType::TypeState type_state() const
Definition object.h:9331
virtual AbstractTypePtr UpdateFunctionTypes(intptr_t num_parent_type_args_adjustment, intptr_t num_free_fun_type_params, Heap::Space space, FunctionTypeMapping *function_type_mapping) const
Definition object.cc:21288
virtual classid_t type_class_id() const
Definition object.cc:21074
virtual const char * NullabilitySuffix(NameVisibility name_visibility) const
Definition object.cc:21360
bool IsDartFunctionType() const
Definition object.cc:21512
bool IsTopTypeForSubtyping() const
Definition object.cc:21457
StringPtr UserVisibleName() const
Definition object.cc:21392
bool IsStringType() const
Definition object.cc:21507
bool IsNullabilityEquivalent(Thread *thread, const AbstractType &other_type, TypeEquality kind) const
Definition object.cc:21245
bool IsFinalized() const
Definition object.h:9030
virtual AbstractTypePtr Canonicalize(Thread *thread) const
Definition object.cc:21297
bool IsIntegerImplementationType() const
Definition object.cc:21477
void set_flags(uint32_t value) const
Definition object.cc:21217
virtual bool HasTypeClass() const
Definition object.h:9063
virtual AbstractTypePtr SetInstantiatedNullability(const TypeParameter &type_param, Heap::Space space) const
Definition object.cc:21122
void SetHash(intptr_t value) const
Definition object.h:13360
virtual bool IsEquivalent(const Instance &other, TypeEquality kind, FunctionTypeMapping *function_type_equivalence=nullptr) const
Definition object.cc:21233
virtual TypeArgumentsPtr arguments() const
Definition object.cc:21092
bool IsDartRecordType() const
Definition object.cc:21521
bool IsVoidType() const
Definition object.h:9169
virtual AbstractTypePtr InstantiateFrom(const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments, intptr_t num_free_fun_type_params, Heap::Space space, FunctionTypeMapping *function_type_mapping=nullptr, intptr_t num_parent_type_args_adjustment=0) const
Definition object.cc:21273
const char * ScrubbedNameCString() const
Definition object.cc:21407
Nullability nullability() const
Definition object.h:9037
virtual bool Equals(const Instance &other) const
Definition object.h:9074
bool IsFfiPointerType() const
Definition object.cc:21530
bool IsInt32x4Type() const
Definition object.cc:21501
bool IsObjectType() const
Definition object.h:9181
virtual void PrintName(NameVisibility visibility, BaseTextBuffer *printer) const
Definition object.cc:21414
StringPtr Name() const
Definition object.cc:21381
virtual AbstractTypePtr NormalizeFutureOrType(Heap::Space space) const
Definition object.cc:21161
static void AddURI(URIs *uris, const String &name, const String &uri)
Definition object.cc:21314
bool IsTopTypeForInstanceOf() const
Definition object.cc:21440
static StringPtr PrintURIs(URIs *uris)
Definition object.cc:21340
bool IsFloat64x2Type() const
Definition object.cc:21495
bool IsFutureOrType() const
Definition object.h:9247
virtual bool IsInstantiated(Genericity genericity=kAny, intptr_t num_free_fun_type_params=kAllFree) const
Definition object.cc:21200
StringPtr ScrubbedName() const
Definition object.cc:21403
static bool InstantiateAndTestSubtype(AbstractType *subtype, AbstractType *supertype, const TypeArguments &instantiator_type_args, const TypeArguments &function_type_args)
Definition object.cc:4342
bool IsDartClosureType() const
Definition object.cc:21517
virtual ClassPtr type_class() const
Definition object.cc:21083
bool IsNullable() const
Definition object.h:9043
bool IsTypeClassAllowedBySpawnUri() const
Definition object.cc:21534
const char * NameCString() const
Definition object.cc:21385
bool IsNullType() const
Definition object.cc:21428
StringPtr ClassName() const
Definition object.cc:21423
bool IsDynamicType() const
Definition object.h:9166
void SetIsFinalized() const
Definition object.cc:21210
bool IsSentinelType() const
Definition object.cc:21436
bool IsDoubleType() const
Definition object.cc:21484
const char * UserVisibleNameCString() const
Definition object.cc:21396
bool IsStrictlyNonNullable() const
Definition object.cc:21101
AbstractTypePtr UnwrapFutureOr() const
Definition object.cc:21587
virtual uword ComputeHash() const
Definition object.cc:21809
virtual void EnumerateURIs(URIs *uris) const
Definition object.cc:21306
void set_nullability(Nullability value) const
Definition object.cc:21227
bool IsFloat32x4Type() const
Definition object.cc:21489
void InitializeTypeTestingStubNonAtomic(const Code &stub) const
Definition object.cc:21848
void set_type_state(UntaggedAbstractType::TypeState value) const
Definition object.cc:21221
bool IsIntType() const
Definition object.cc:21472
bool IsNeverType() const
Definition object.cc:21432
bool IsLegacy() const
Definition object.h:9053
StringPtr message() const
Definition object.h:8030
virtual const char * ToErrorCString() const
Definition object.cc:19870
intptr_t PositionalCount() const
intptr_t NamedCount() const
Definition dart_entry.h:43
intptr_t Count() const
void PrintTo(BaseTextBuffer *buffer, bool show_named_positions=false) const
intptr_t SizeWithTypeArgs() const
Definition dart_entry.h:41
static ArrayPtr NewBoxed(intptr_t type_args_len, intptr_t num_arguments, const Array &optional_arguments_names, Heap::Space space=Heap::kOld)
Definition dart_entry.h:83
intptr_t CountWithTypeArgs() const
Definition dart_entry.h:38
intptr_t FirstArgIndex() const
Definition dart_entry.h:37
intptr_t Size() const
intptr_t TypeArgsLen() const
intptr_t PositionAt(intptr_t i) const
StringPtr NameAt(intptr_t i) const
std::tuple_element< kElement, TupleT >::type::ObjectPtrType Get() const
Definition object.h:13449
intptr_t Length() const
Definition object.h:13499
static intptr_t type_arguments_offset()
Definition object.h:10902
static intptr_t InstanceSize()
Definition object.h:10910
ArrayPtr Slice(intptr_t start, intptr_t count, bool with_type_argument) const
Definition object.cc:24889
static bool Equals(ArrayPtr a, ArrayPtr b)
Definition object.h:10833
virtual uint32_t CanonicalizeHash() const
Definition object.cc:24819
ObjectPtr AtAcquire(intptr_t index) const
Definition object.h:10867
static ArrayPtr New(intptr_t len, Heap::Space space=Heap::kNew)
Definition object.h:10933
void Truncate(intptr_t new_length) const
Definition object.cc:24973
static constexpr intptr_t kBytesPerElement
Definition object.h:10897
virtual bool CanonicalizeEquals(const Instance &other) const
Definition object.cc:24781
void MakeImmutable() const
Definition object.cc:24916
bool IsImmutable() const
Definition object.h:10874
virtual void SetTypeArguments(const TypeArguments &value) const
Definition object.h:10882
static intptr_t LengthOf(const ArrayPtr array)
Definition object.h:10809
ObjectPtr At(intptr_t index) const
Definition object.h:10854
intptr_t Length() const
Definition object.h:10808
static ArrayPtr NewUninitialized(intptr_t len, Heap::Space space=Heap::kNew)
Definition object.h:10938
static ArrayPtr MakeFixedLength(const GrowableObjectArray &growable_array, bool unique=false)
Definition object.cc:25014
static intptr_t data_offset()
Definition object.h:10814
void SetAt(intptr_t index, const Object &value) const
Definition object.h:10858
static intptr_t length_offset()
Definition object.h:10813
static ArrayPtr Grow(const Array &source, intptr_t new_length, Heap::Space space=Heap::kNew)
Definition object.cc:24932
virtual void CanonicalizeFieldsLocked(Thread *thread) const
Definition object.cc:25050
void Add(const T &value)
const T & At(intptr_t index) const
void SetLength(intptr_t new_length)
void Sort(int compare(const T *, const T *))
intptr_t length() const
void SetAt(intptr_t index, const T &t)
const T & At(intptr_t index) const
void AddString(const char *s)
intptr_t Printf(const char *format,...) PRINTF_ATTRIBUTE(2
char * buffer() const
Definition text_buffer.h:35
static constexpr int bitsize()
Definition bitfield.h:164
static constexpr S update(T value, S original)
Definition bitfield.h:190
static constexpr intptr_t encode(bool value)
Definition bitfield.h:167
static constexpr bool is_valid(T value)
Definition bitfield.h:148
void Add(intptr_t i)
Definition bit_vector.h:63
bool Contains(intptr_t i) const
Definition bit_vector.h:91
static intptr_t InstanceSize()
Definition object.h:10772
static const Bool & Get(bool value)
Definition object.h:10780
static const Bool & True()
Definition object.h:10776
static ErrorPtr DoBootstrapping(const uint8_t *kernel_buffer, intptr_t kernel_buffer_size)
Definition bootstrap.cc:219
static constexpr bool ContainsCompressedPointers()
Definition object.h:11833
virtual void ReportSwitchingCode(const Code &code)
Definition object.cc:4430
virtual void UpdateArrayTo(const WeakArray &value)
Definition object.cc:4417
virtual void ReportDeoptimization(const Code &code)
Definition object.cc:4422
CHACodeArray(const Class &cls)
Definition object.cc:4413
static void FlushICache(uword start, uword size)
StringPtr target_name() const
Definition object.h:2352
ArrayPtr arguments_descriptor() const
Definition object.h:2353
static CapabilityPtr New(uint64_t id, Heap::Space space=Heap::kNew)
Definition object.cc:25852
static int32_t ToLower(int32_t code_point)
Definition unicode.h:179
static int32_t ToUpper(int32_t code_point)
Definition unicode.h:174
static AbstractTypePtr FinalizeType(const AbstractType &type, FinalizationKind finalization=kCanonicalize)
static void VerifyBootstrapClasses()
static ErrorPtr AllocateFinalizeClass(const Class &cls)
static ErrorPtr LoadClassMembers(const Class &cls)
static bool IsMatch(const FunctionName &name, const Object &obj)
Definition object.cc:3304
static bool ReportStats()
Definition object.cc:3296
static uword Hash(const FunctionName &name)
Definition object.cc:3310
static bool IsMatch(const Object &a, const Object &b)
Definition object.cc:3299
static uword Hash(const Object &key)
Definition object.cc:3307
static const char * Name()
Definition object.cc:3295
void Register(const Class &cls)
void CopySizesFromClassObjects()
ClassPtr At(intptr_t cid) const
bool ShouldTraceAllocationFor(intptr_t cid)
UnboxedFieldBitmap GetUnboxedFieldsMapAt(intptr_t cid) const
static bool IsTopLevelCid(intptr_t cid)
void AddFields(const GrowableArray< const Field * > &fields) const
Definition object.cc:5068
TypeArgumentsPtr DefaultTypeArguments(Zone *zone) const
Definition object.cc:3708
void AddFunction(const Function &function) const
Definition object.cc:3346
void set_is_implemented_unsafe() const
Definition object.cc:5673
void set_num_type_arguments(intptr_t value) const
Definition object.cc:3170
void set_is_transformed_mixin_application() const
Definition object.cc:5720
void set_is_implemented() const
Definition object.cc:5668
FieldPtr LookupInstanceField(const String &name) const
Definition object.cc:6399
void set_has_pragma(bool value) const
Definition object.cc:3190
const char * NameCString(NameVisibility name_visibility) const
Definition object.cc:3067
void set_is_finalized_unsafe() const
Definition object.cc:5775
intptr_t FindImplicitClosureFunctionIndex(const Function &needle) const
Definition object.cc:3414
intptr_t NumTypeParameters() const
Definition object.h:1346
const char * ScrubbedNameCString() const
Definition object.cc:3046
FunctionPtr LookupFunctionAllowPrivate(const String &name) const
Definition object.cc:6222
ObjectPtr InvokeSetter(const String &selector, const Instance &argument, bool respect_reflectable=true, bool check_is_entrypoint=false) const
Definition object.cc:4632
CodePtr allocation_stub() const
Definition object.h:1802
void set_is_deeply_immutable(bool value) const
Definition object.cc:3205
FunctionPtr LookupDynamicFunctionAllowPrivate(const String &name) const
Definition object.cc:6188
void set_instance_size(intptr_t host_value_in_bytes, intptr_t target_value_in_bytes) const
Definition object.h:1169
LibraryPtr library() const
Definition object.h:1335
void set_is_isolate_unsendable_due_to_pragma(bool value) const
Definition object.cc:3199
void set_super_type(const Type &value) const
Definition object.cc:3734
bool TraceAllocation(IsolateGroup *isolate_group) const
Definition object.cc:4489
FunctionPtr InvocationDispatcherFunctionFromIndex(intptr_t idx) const
Definition object.cc:3469
void set_is_allocate_finalized() const
Definition object.cc:5780
FunctionPtr LookupConstructorAllowPrivate(const String &name) const
Definition object.cc:6208
static ClassPtr NewTypedDataViewClass(intptr_t class_id, IsolateGroup *isolate_group)
Definition object.cc:5333
void set_is_enum_class() const
Definition object.cc:5710
void set_is_synthesized_class() const
Definition object.cc:5701
ObjectPtr Invoke(const String &selector, const Array &arguments, const Array &argument_names, bool respect_reflectable=true, bool check_is_entrypoint=false) const
Definition object.cc:4739
FunctionPtr LookupGetterFunction(const String &name) const
Definition object.cc:6363
void Finalize() const
Definition object.cc:4362
bool IsRecordClass() const
Definition object.h:1585
bool IsInFullSnapshot() const
Definition object.cc:3091
ObjectPtr InvokeGetter(const String &selector, bool throw_nsm_if_absent, bool respect_reflectable=true, bool check_is_entrypoint=false) const
Definition object.cc:4572
FieldPtr LookupInstanceFieldAllowPrivate(const String &name) const
Definition object.cc:6489
void set_is_allocated_unsafe(bool value) const
Definition object.cc:5760
void AddDirectImplementor(const Class &subclass, bool is_mixin) const
Definition object.cc:5801
TypePtr GetInstantiationOf(Zone *zone, const Class &cls) const
Definition object.cc:12776
bool is_declaration_loaded() const
Definition object.h:1705
intptr_t target_type_arguments_field_offset() const
Definition object.h:1386
uint32_t Hash() const
Definition object.cc:5652
void SetFields(const Array &value) const
Definition object.cc:5039
void set_is_prefinalized() const
Definition object.cc:5787
bool is_const() const
Definition object.h:1747
void set_is_interface_class() const
Definition object.cc:5740
bool IsDartFunctionClass() const
Definition object.cc:5958
StringPtr ScrubbedName() const
Definition object.cc:3042
static intptr_t InstanceSize()
Definition object.h:1687
GrowableObjectArrayPtr direct_subclasses() const
Definition object.h:1539
TypeArgumentsPtr GetDeclarationInstanceTypeArguments() const
Definition object.cc:3526
TypePtr super_type() const
Definition object.h:1433
FunctionPtr GetInvocationDispatcher(const String &target_name, const Array &args_desc, UntaggedFunction::Kind kind, bool create_if_absent) const
Definition object.cc:3897
intptr_t host_next_field_offset() const
Definition object.h:1192
FunctionPtr LookupStaticFunction(const String &name) const
Definition object.cc:6192
intptr_t id() const
Definition object.h:1235
static intptr_t UnboxedFieldSizeInBytesByCid(intptr_t cid)
Definition object.cc:3752
void set_is_declaration_loaded() const
Definition object.cc:5682
static ClassPtr NewExternalTypedDataClass(intptr_t class_id, IsolateGroup *isolate)
Definition object.cc:5373
void set_num_type_arguments_unsafe(intptr_t value) const
Definition object.cc:3186
intptr_t target_instance_size() const
Definition object.h:1149
void set_is_fields_marked_nullable() const
Definition object.cc:5750
intptr_t NumTypeArguments() const
Definition object.cc:3690
void set_is_abstract() const
Definition object.cc:5677
void set_is_mixin_class() const
Definition object.cc:5730
FunctionPtr LookupConstructor(const String &name) const
Definition object.cc:6202
void set_type_arguments_field_offset_in_words(intptr_t host_value, intptr_t target_value) const
Definition object.h:1416
void set_num_native_fields(uint16_t value) const
Definition object.h:1791
static ClassPtr NewTypedDataClass(intptr_t class_id, IsolateGroup *isolate_group)
Definition object.cc:5314
void set_type_arguments_field_offset(intptr_t host_value_in_bytes, intptr_t target_value_in_bytes) const
Definition object.h:1399
WeakArrayPtr dependent_code() const
Definition object.cc:4475
void set_dependent_code(const WeakArray &array) const
Definition object.cc:4481
intptr_t target_next_field_offset() const
Definition object.h:1195
ObjectPtr EvaluateCompiledExpression(const ExternalTypedData &kernel_buffer, const Array &type_definitions, const Array &param_values, const TypeArguments &type_param_values) const
Definition object.cc:4876
FunctionPtr LookupSetterFunction(const String &name) const
Definition object.cc:6367
bool IsPrivate() const
Definition object.cc:6180
FunctionPtr GetRecordFieldGetter(const String &getter_name) const
Definition object.cc:4128
void set_is_sealed() const
Definition object.cc:5725
TypePtr RareType() const
Definition object.cc:3097
void set_direct_subclasses(const GrowableObjectArray &subclasses) const
Definition object.cc:5854
intptr_t host_type_arguments_field_offset() const
Definition object.h:1377
ArrayPtr interfaces() const
Definition object.h:1449
bool IsObjectClass() const
Definition object.h:1567
bool InjectCIDFields() const
Definition object.cc:5114
void set_interfaces(const Array &value) const
Definition object.cc:5794
bool is_type_finalized() const
Definition object.h:1711
void SetUserVisibleNameInClassTable()
Definition object.cc:5438
InstancePtr InsertCanonicalConstant(Zone *zone, const Instance &constant) const
Definition object.cc:6540
void set_script(const Script &value) const
Definition object.cc:5610
ArrayPtr fields() const
Definition object.h:1617
static bool IsSubtypeOf(const Class &cls, const TypeArguments &type_arguments, Nullability nullability, const AbstractType &other, Heap::Space space, FunctionTypeMapping *function_type_equivalence=nullptr)
Definition object.cc:5975
void DisableAllocationStub() const
Definition object.cc:5934
void SetTraceAllocation(bool trace_allocation) const
Definition object.cc:4498
ArrayPtr constants() const
Definition object.cc:5861
bool HasInstanceFields() const
Definition object.cc:3261
void set_is_future_subtype(bool value) const
Definition object.cc:3210
void set_is_declaration_loaded_unsafe() const
Definition object.cc:5687
InstancePtr LookupCanonicalInstance(Zone *zone, const Instance &value) const
Definition object.cc:6527
uint16_t num_native_fields() const
Definition object.h:1790
intptr_t implementor_cid() const
Definition object.h:1247
void set_is_finalized() const
Definition object.cc:5769
ArrayPtr OffsetToFieldMap(ClassTable *class_table=nullptr) const
Definition object.cc:3233
intptr_t host_instance_size() const
Definition object.h:1145
static bool IsDeeplyImmutable(ClassPtr clazz)
Definition object.h:2167
void DisableAllCHAOptimizedCode()
Definition object.cc:4471
FunctionPtr LookupFunctionReadLocked(const String &name) const
Definition object.cc:6226
void AddField(const Field &field) const
Definition object.cc:5057
void DisableCHAOptimizedCode(const Class &subclass)
Definition object.cc:4457
bool HasCompressedPointers() const
Definition object.cc:3007
void set_is_allocated(bool value) const
Definition object.cc:5755
void set_allocation_stub(const Code &value) const
Definition object.cc:5925
int32_t SourceFingerprint() const
Definition object.cc:5659
FunctionPtr LookupDynamicFunctionUnsafe(const String &name) const
Definition object.cc:6184
bool is_abstract() const
Definition object.h:1698
bool IsDynamicClass() const
Definition object.h:1558
bool IsGeneric() const
Definition object.h:1360
bool IsClosureClass() const
Definition object.h:1579
StringPtr Name() const
Definition object.cc:3038
static constexpr intptr_t kNoTypeArguments
Definition object.h:1376
FunctionPtr LookupStaticFunctionAllowPrivate(const String &name) const
Definition object.cc:6198
void set_token_pos(TokenPosition value) const
Definition object.cc:5620
TypePtr DeclarationType() const
Definition object.cc:5882
FieldPtr LookupStaticFieldAllowPrivate(const String &name) const
Definition object.cc:6497
FunctionPtr LookupFactory(const String &name) const
Definition object.cc:6212
TokenPosition token_pos() const
Definition object.h:1281
void set_instance_size_in_words(intptr_t host_value, intptr_t target_value) const
Definition object.h:1176
ErrorPtr EnsureIsFinalized(Thread *thread) const
Definition object.cc:4979
bool IsVoidClass() const
Definition object.h:1561
void set_is_synthesized_class_unsafe() const
Definition object.cc:5706
bool FindInstantiationOf(Zone *zone, const Class &cls, GrowableArray< const Type * > *path, bool consider_only_super_classes=false) const
Definition object.cc:12723
void set_is_base_class() const
Definition object.cc:5735
static ClassPtr New(IsolateGroup *isolate_group, bool register_class=true)
Definition object.cc:3114
bool is_prefinalized() const
Definition object.h:1740
void RegisterCHACode(const Code &code)
Definition object.cc:4445
bool IsFutureClass() const
Definition object.cc:5962
KernelProgramInfoPtr KernelProgramInfo() const
Definition object.cc:5615
FieldPtr LookupField(const String &name) const
Definition object.cc:6407
void set_library(const Library &value) const
Definition object.cc:3488
void set_end_token_pos(TokenPosition value) const
Definition object.cc:5625
ErrorPtr EnsureIsAllocateFinalized(Thread *thread) const
Definition object.cc:5009
FunctionPtr LookupFactoryAllowPrivate(const String &name) const
Definition object.cc:6218
ClassPtr SuperClass(ClassTable *class_table=nullptr) const
Definition object.cc:3715
void set_is_loaded(bool value) const
Definition object.cc:5764
static ClassPtr NewStringClass(intptr_t class_id, IsolateGroup *isolate_group)
Definition object.cc:5287
void set_constants(const Array &value) const
Definition object.cc:5865
intptr_t FindFieldIndex(const Field &needle) const
Definition object.cc:5085
StringPtr UserVisibleName() const
Definition object.cc:3050
void AddInvocationDispatcher(const String &target_name, const Array &args_desc, const Function &dispatcher) const
Definition object.cc:3881
void AddDirectSubclass(const Class &subclass) const
Definition object.cc:5833
void set_id(intptr_t value) const
Definition object.h:1236
bool IsTopLevel() const
Definition object.cc:6176
FieldPtr FieldFromIndex(intptr_t idx) const
Definition object.cc:5106
bool NoteImplementor(const Class &implementor) const
Definition object.cc:5635
TypeParametersPtr type_parameters() const
Definition object.h:1340
bool IsNullClass() const
Definition object.h:1555
void set_is_type_finalized() const
Definition object.cc:5693
const char * UserVisibleNameCString() const
Definition object.cc:3059
void set_is_final() const
Definition object.cc:5745
GrowableObjectArrayPtr direct_implementors() const
Definition object.h:1522
intptr_t FindFunctionIndex(const Function &needle) const
Definition object.cc:3371
bool is_implemented() const
Definition object.h:1694
TypeParameterPtr TypeParameterAt(intptr_t index, Nullability nullability=Nullability::kNonNullable) const
Definition object.cc:3739
static ClassPtr NewPointerClass(intptr_t class_id, IsolateGroup *isolate_group)
Definition object.cc:5393
void set_next_field_offset(intptr_t host_value_in_bytes, intptr_t target_value_in_bytes) const
Definition object.h:1203
void set_is_const() const
Definition object.cc:5715
bool is_allocate_finalized() const
Definition object.h:1734
bool is_transformed_mixin_application() const
Definition object.h:1756
FunctionPtr ImplicitClosureFunctionFromIndex(intptr_t idx) const
Definition object.cc:3404
FunctionPtr FunctionFromIndex(intptr_t idx) const
Definition object.cc:3393
static ClassPtr NewNativeWrapper(const Library &library, const String &name, int num_fields)
Definition object.cc:5244
void SetFunctions(const Array &value) const
Definition object.cc:3314
void set_type_parameters(const TypeParameters &value) const
Definition object.cc:3492
static ClassPtr NewUnmodifiableTypedDataViewClass(intptr_t class_id, IsolateGroup *isolate_group)
Definition object.cc:5353
DART_WARN_UNUSED_RESULT ErrorPtr VerifyEntryPoint() const
Definition object.cc:27446
FieldPtr LookupFieldAllowPrivate(const String &name, bool instance_only=false) const
Definition object.cc:6458
intptr_t NumTypeParameters(Thread *thread) const
Definition object.cc:3605
ClassPtr Mixin() const
Definition object.cc:3081
bool is_isolate_unsendable_due_to_pragma() const
Definition object.h:2159
void set_direct_implementors(const GrowableObjectArray &implementors) const
Definition object.cc:5827
intptr_t FindInvocationDispatcherFunctionIndex(const Function &needle) const
Definition object.cc:3442
bool is_finalized() const
Definition object.h:1725
ArrayPtr current_functions() const
Definition object.h:1643
ArrayPtr functions() const
Definition object.h:1648
FieldPtr LookupStaticField(const String &name) const
Definition object.cc:6403
ScriptPtr script() const
Definition object.h:1274
void EnsureDeclarationLoaded() const
Definition object.cc:4968
TypeArgumentsPtr GetInstanceTypeArguments(Thread *thread, const TypeArguments &type_arguments, bool canonicalize=true) const
Definition object.cc:3574
void set_is_isolate_unsendable(bool value) const
Definition object.cc:3194
void set_can_be_future(bool value) const
Definition object.cc:3215
static void ForAllClosureFunctions(std::function< bool(const Function &)> callback)
ObjectPtr RawContext() const
Definition object.h:12332
virtual bool CanonicalizeEquals(const Instance &other) const
Definition object.cc:25940
static ClosurePtr New(const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments, const Function &function, const Object &context, Heap::Space space=Heap::kNew)
Definition object.cc:26021
FunctionTypePtr GetInstantiatedSignature(Zone *zone) const
Definition object.cc:26063
TypeArgumentsPtr instantiator_type_arguments() const
Definition object.h:12294
TypeArgumentsPtr delayed_type_arguments() const
Definition object.h:12314
TypeArgumentsPtr function_type_arguments() const
Definition object.h:12304
FunctionPtr function() const
Definition object.h:12324
virtual void CanonicalizeFieldsLocked(Thread *thread) const
Definition object.cc:25953
uword ComputeHash() const
Definition object.cc:25988
static void NotifyAll(const char *name, uword base, uword prologue_offset, uword size, bool optimized, const CodeComments *comments)
static bool AreActive()
void DumpInlineIntervals(uword start)
void GetInlinedFunctionsAt(int32_t pc_offset, GrowableArray< const Function * > *function_stack, GrowableArray< TokenPosition > *token_positions)
void DumpSourcePositions(uword start)
static intptr_t UnroundedSize(CodeSourceMapPtr map)
Definition object.h:6198
void set_is_force_optimized(bool value) const
Definition object.cc:17704
void DisableDartCode() const
Definition object.cc:18433
FunctionPtr function() const
Definition object.h:7101
uword EntryPoint() const
Definition object.h:6837
static CodePtr FindCode(uword pc, int64_t timestamp)
Definition object.cc:18238
static bool ParseEntryKind(const char *str, EntryKind *out)
Definition object.cc:17658
LocalVarDescriptorsPtr var_descriptors() const
Definition object.h:7067
void Enable() const
Definition object.h:7223
ArrayPtr deopt_info_array() const
Definition object.h:6916
bool HasBreakpoint() const
Definition object.cc:17782
uword Size() const
Definition object.h:6876
void set_compressed_stackmaps(const CompressedStackMaps &maps) const
Definition object.cc:17716
void SetStaticCallTargetCodeAt(uword pc, const Code &code) const
Definition object.cc:17865
void SetPrologueOffset(intptr_t offset) const
Definition object.cc:17975
PcDescriptorsPtr pc_descriptors() const
Definition object.h:6900
void set_inlined_id_to_function(const Array &value) const
Definition object.cc:18003
uword GetPcForDeoptId(intptr_t deopt_id, UntaggedPcDescriptors::Kind kind) const
Definition object.cc:18312
bool is_optimized() const
Definition object.h:6790
classid_t OwnerClassId() const
Definition object.h:7111
CodeSourceMapPtr code_source_map() const
Definition object.h:6906
intptr_t GetDeoptIdForOsr(uword pc) const
Definition object.cc:18327
intptr_t GetPrologueOffset() const
Definition object.cc:17984
void SetStubCallTargetCodeAt(uword pc, const Code &code) const
Definition object.cc:17879
ObjectPoolPtr object_pool() const
Definition object.h:6781
static CodePtr FindCodeUnsafe(uword pc)
Definition object.cc:18270
ArrayPtr inlined_id_to_function() const
Definition object.cc:17999
void set_num_variables(intptr_t num_variables) const
Definition object.cc:17726
bool ContainsInstructionAt(uword addr) const
Definition object.h:6888
const char * QualifiedName(const NameFormattingParams &params) const
Definition object.cc:18394
bool IsDisabled() const
Definition object.h:7228
static CodePtr FinalizeCodeAndNotify(const Function &function, FlowGraphCompiler *compiler, compiler::Assembler *assembler, PoolAttachment pool_attachment, bool optimized=false, CodeStatistics *stats=nullptr)
Definition object.cc:18033
uint32_t Hash() const
Definition object.cc:18345
void DumpInlineIntervals() const
Definition object.cc:18527
bool IsStubCode() const
Definition object.cc:18407
LocalVarDescriptorsPtr GetLocalVarDescriptors() const
Definition object.cc:17675
ObjectPtr owner() const
Definition object.h:7106
void DumpSourcePositions(bool relative_addresses=false) const
Definition object.cc:18539
ArrayPtr static_calls_target_table() const
Definition object.h:6972
void Disassemble(DisassemblyFormatter *formatter=nullptr) const
Definition object.cc:17899
const char * Name() const
Definition object.cc:18363
void set_is_alive(bool value) const
Definition object.cc:17708
static void NotifyCodeObservers(const Code &code, bool optimized)
Definition object.cc:18191
bool IsTypeTestStubCode() const
Definition object.cc:18417
bool IsAllocationStubCode() const
Definition object.cc:18413
@ kSCallTableEntryLength
Definition object.h:6957
@ kSCallTableFunctionTarget
Definition object.h:6956
@ kSCallTableCodeOrTypeTarget
Definition object.h:6955
ObjectPoolPtr GetObjectPool() const
Definition object.cc:17773
static constexpr intptr_t kMaxElements
Definition object.h:7127
void DisableStubCode(bool is_cls_parameterized) const
Definition object.cc:18442
void set_static_calls_target_table(const Array &value) const
Definition object.cc:17752
bool IsFunctionCode() const
Definition object.cc:18424
static CodePtr FinalizeCode(FlowGraphCompiler *compiler, compiler::Assembler *assembler, PoolAttachment pool_attachment, bool optimized, CodeStatistics *stats)
Definition object.cc:18068
TokenPosition GetTokenIndexOfPC(uword pc) const
Definition object.cc:18300
intptr_t pointer_offsets_length() const
Definition object.h:6786
InstructionsPtr active_instructions() const
Definition object.h:6737
intptr_t num_variables() const
Definition object.cc:17722
ObjectPtr return_address_metadata() const
Definition object.h:7026
void set_is_discarded(bool value) const
Definition object.cc:17712
FunctionPtr GetStaticCallTargetFunctionAt(uword pc) const
Definition object.cc:17850
bool IsUnknownDartCode() const
Definition object.h:7216
InstructionsPtr instructions() const
Definition object.h:6747
void set_is_optimized(bool value) const
Definition object.cc:17700
static const char * EntryKindToCString(EntryKind kind)
Definition object.cc:17642
uword PayloadStart() const
Definition object.h:6823
TypedDataPtr GetDeoptInfoAtPc(uword pc, ICData::DeoptReasonId *deopt_reason, uint32_t *deopt_flags) const
Definition object.cc:17790
void set_deopt_info_array(const Array &array) const
Definition object.cc:17743
void set_owner(const Object &owner) const
Definition object.cc:17686
void GetInlinedFunctionsAtInstruction(intptr_t pc_offset, GrowableArray< const Function * > *functions, GrowableArray< TokenPosition > *token_positions) const
Definition object.cc:18499
static CompilerState & Current()
static bool IsBackgroundCompilation()
Definition compiler.cc:299
static ErrorPtr EnsureUnoptimizedCode(Thread *thread, const Function &function)
Definition compiler.cc:855
static ObjectPtr CompileFunction(Thread *thread, const Function &function)
Definition compiler.cc:825
static ErrorPtr CompileAllFunctions(const Class &cls)
Definition compiler.cc:949
static void ComputeLocalVarDescriptors(const Code &code)
Definition compiler.cc:910
static intptr_t InstanceSize()
Definition object.h:6272
static intptr_t UnroundedSize(CompressedStackMapsPtr maps)
Definition object.h:6266
static ConstMapPtr NewUninitialized(Heap::Space space=Heap::kNew)
Definition object.cc:25320
static ConstMapPtr NewDefault(Heap::Space space=Heap::kNew)
Definition object.cc:25314
static ConstSetPtr NewUninitialized(Heap::Space space=Heap::kNew)
Definition object.cc:25368
static ConstSetPtr NewDefault(Heap::Space space=Heap::kNew)
Definition object.cc:25362
intptr_t KernelOffsetAt(intptr_t scope_index) const
Definition object.cc:18752
StringPtr NameAt(intptr_t scope_index) const
Definition object.cc:18668
AbstractTypePtr TypeAt(intptr_t scope_index) const
Definition object.cc:18717
intptr_t LateInitOffsetAt(intptr_t scope_index) const
Definition object.cc:18708
static intptr_t InstanceSize()
Definition object.h:7506
intptr_t ContextLevelAt(intptr_t scope_index) const
Definition object.cc:18743
void SetCidAt(intptr_t scope_index, intptr_t cid) const
Definition object.cc:18730
TokenPosition DeclarationTokenIndexAt(intptr_t scope_index) const
Definition object.cc:18655
intptr_t CidAt(intptr_t scope_index) const
Definition object.cc:18726
void ClearFlagsAt(intptr_t scope_index) const
Definition object.cc:18676
intptr_t ContextIndexAt(intptr_t scope_index) const
Definition object.cc:18734
TokenPosition TokenIndexAt(intptr_t scope_index) const
Definition object.cc:18645
void SetContextLevelAt(intptr_t scope_index, intptr_t context_level) const
Definition object.cc:18747
void SetTypeAt(intptr_t scope_index, const AbstractType &type) const
Definition object.cc:18721
void SetContextIndexAt(intptr_t scope_index, intptr_t context_index) const
Definition object.cc:18738
void SetTokenIndexAt(intptr_t scope_index, TokenPosition token_pos) const
Definition object.cc:18650
static constexpr intptr_t kMaxElements
Definition object.h:7497
void SetLateInitOffsetAt(intptr_t scope_index, intptr_t late_init_offset) const
Definition object.cc:18712
void SetNameAt(intptr_t scope_index, const String &name) const
Definition object.cc:18672
intptr_t num_variables() const
Definition object.h:7455
void SetKernelOffsetAt(intptr_t scope_index, intptr_t kernel_offset) const
Definition object.cc:18756
void SetDeclarationTokenIndexAt(intptr_t scope_index, TokenPosition declaration_token_pos) const
Definition object.cc:18661
static ContextScopePtr New(intptr_t num_variables, bool is_implicit)
Definition object.cc:18627
void Dump(int indent=0) const
Definition object.cc:18598
intptr_t GetLevel() const
Definition object.cc:18551
static ContextPtr New(intptr_t num_variables, Heap::Space space=Heap::kNew)
Definition object.cc:18561
static bool IsValidLength(intptr_t len)
Definition object.h:7415
ObjectPtr At(intptr_t context_index) const
Definition object.h:7393
intptr_t num_variables() const
Definition object.h:7385
ContextPtr parent() const
Definition object.h:7377
static ObjectPtr InvokeNoSuchMethod(Thread *thread, const Instance &receiver, const String &target_name, const Array &arguments, const Array &arguments_descriptor)
static ObjectPtr InvokeClosure(Thread *thread, const Array &arguments)
static ObjectPtr InvokeFunction(const Function &function, const Array &arguments)
Definition dart_entry.cc:31
static ObjectPtr HashCode(const Instance &receiver)
static ObjectPtr Equals(const Instance &left, const Instance &right)
static ObjectPtr ToString(const Instance &receiver)
static bool IsReadOnlyHandle(uword address)
Definition dart.cc:1139
static IsolateGroup * vm_isolate_group()
Definition dart.h:69
static Dart_DwarfStackTraceFootnoteCallback dwarf_stacktrace_footnote_callback()
Definition dart.h:145
static Isolate * vm_isolate()
Definition dart.h:68
static Snapshot::Kind vm_snapshot_kind()
Definition dart.h:95
static const char * Name()
Definition object.cc:25137
static uword Hash(const Object &obj)
Definition object.cc:25147
static bool IsMatch(const Object &a, const Object &b)
Definition object.cc:25140
static bool ReportStats()
Definition object.cc:25138
static constexpr intptr_t kNone
Definition deopt_id.h:27
static void GetEntry(const Array &table, intptr_t index, Smi *offset, TypedData *info, Smi *reason_and_flags)
static intptr_t GetLength(const Array &table)
friend class ClassDictionaryIterator
Definition object.h:5032
DictionaryIterator(const Library &library)
Definition object.cc:13549
bool HasNext() const
Definition object.h:5020
static void Disassemble(uword start, uword end, DisassemblyFormatter *formatter, const Code &code, const CodeComments *comments=nullptr)
static intptr_t value_offset()
Definition object.h:10118
virtual bool OperatorEquals(const Instance &other) const
Definition object.cc:23457
virtual bool CanonicalizeEquals(const Instance &other) const
Definition object.cc:23467
bool BitwiseEqualsToDouble(double value) const
Definition object.cc:23449
static DoublePtr New(double d, Heap::Space space=Heap::kNew)
Definition object.cc:23481
virtual uint32_t CanonicalizeHash() const
Definition object.cc:23477
double value() const
Definition object.h:10094
static DoublePtr NewCanonical(double d)
Definition object.cc:23497
static DynamicLibraryPtr New(void *handle, bool canBeClosed, Heap::Space space=Heap::kNew)
Definition object.cc:25828
static intptr_t InstanceSize()
Definition object.h:11900
virtual const char * ToErrorCString() const
Definition object.cc:19830
static intptr_t InstanceSize()
Definition object.h:6579
static DART_NORETURN void ThrowByType(ExceptionType type, const Array &arguments)
static DART_NORETURN void ThrowOOM()
static DART_NORETURN void ThrowLateFieldAssignedDuringInitialization(const String &name)
static DART_NORETURN void ThrowLateFieldNotInitialized(const String &name)
static DART_NORETURN void ThrowCompileTimeError(const LanguageError &error)
static DART_NORETURN void PropagateError(const Error &error)
static intptr_t InstanceSize()
Definition object.h:11714
static ExternalTypedDataPtr New(intptr_t class_id, uint8_t *data, intptr_t len, Heap::Space space=Heap::kNew, bool perform_eager_msan_initialization_check=true)
Definition object.cc:25705
static intptr_t MaxElements(intptr_t class_id)
Definition object.h:11718
FinalizablePersistentHandle * AddFinalizer(void *peer, Dart_HandleFinalizer callback, intptr_t external_size) const
Definition object.cc:25698
static ExternalTypedDataPtr NewFinalizeWithFree(uint8_t *data, intptr_t len)
Definition object.cc:25728
FieldDependentArray(const Field &field)
Definition object.cc:12279
virtual void UpdateArrayTo(const WeakArray &value)
Definition object.cc:12283
virtual void ReportSwitchingCode(const Code &code)
Definition object.cc:12295
virtual void ReportDeoptimization(const Code &code)
Definition object.cc:12287
FieldGuardUpdater(const Field *field, const Object &value)
Definition object.cc:13044
void SetAt(intptr_t index, ObjectPtr raw_instance, bool concurrent_use=false)
Definition field_table.h:75
ObjectPtr At(intptr_t index, bool concurrent_use=false) const
Definition field_table.h:61
void SetFieldType(const AbstractType &value) const
Definition object.cc:11994
static StringPtr GetterSymbol(const String &field_name)
Definition object.cc:11847
DART_WARN_UNUSED_RESULT ErrorPtr InitializeInstance(const Instance &instance) const
Definition object.cc:12389
bool is_final() const
Definition object.h:4420
InstancePtr SetterClosure() const
Definition object.cc:12260
static StringPtr NameFromSetter(const String &setter_name)
Definition object.cc:11872
const char * UserVisibleNameCString() const
Definition object.cc:12133
ClassPtr Owner() const
Definition object.cc:11911
ObjectPtr StaticConstFieldValue() const
Definition object.cc:12469
bool is_unboxed() const
Definition object.h:4685
void InheritKernelOffsetFrom(const Field &src) const
Definition object.cc:11949
StringPtr UserVisibleName() const
Definition object.cc:12142
static bool IsInitName(const String &function_name)
Definition object.cc:11890
bool IsOriginal() const
Definition object.h:4396
bool is_nullable() const
Definition object.cc:11821
FieldPtr CloneFromOriginal() const
Definition object.cc:11786
intptr_t KernelLibraryOffset() const
Definition object.cc:11963
uint32_t Hash() const
Definition object.cc:11945
void SetOriginal(const Field &value) const
Definition object.cc:11837
void RegisterDependentCode(const Code &code) const
Definition object.cc:12310
FieldPtr Original() const
Definition object.cc:11790
ScriptPtr Script() const
Definition object.cc:11922
bool HasInitializerFunction() const
Definition object.cc:12385
bool IsUninitialized() const
Definition object.cc:12339
ObjectPtr RawOwner() const
Definition object.cc:11900
static bool IsGetterName(const String &function_name)
Definition object.cc:11882
void SetInitializerFunction(const Function &initializer) const
Definition object.cc:12369
bool is_reflectable() const
Definition object.h:4432
InstancePtr GetterClosure() const
Definition object.cc:12256
@ kUnknownFixedLength
Definition object.h:4701
@ kUnknownLengthOffset
Definition object.h:4700
@ kNoFixedLength
Definition object.h:4702
DART_WARN_UNUSED_RESULT ErrorPtr InitializeStatic() const
Definition object.cc:12427
void set_static_type_exactness_state(StaticTypeExactnessState state) const
Definition object.h:4612
bool is_static() const
Definition object.h:4418
static bool IsSetterName(const String &function_name)
Definition object.cc:11886
void InitializeGuardedListLengthInObjectOffset(bool unsafe=false) const
Definition object.cc:12588
static StringPtr NameFromInit(const String &init_name)
Definition object.cc:11877
StaticTypeExactnessState static_type_exactness_state() const
Definition object.h:4606
FieldPtr Clone(const Field &original) const
Definition object.cc:12105
bool NeedsSetter() const
Definition object.cc:12174
intptr_t guarded_list_length_in_object_offset() const
Definition object.cc:12161
StringPtr name() const
Definition object.h:4408
void set_guarded_list_length_in_object_offset_unsafe(intptr_t offset) const
Definition object.cc:12165
static StringPtr LookupSetterSymbol(const String &field_name)
Definition object.cc:11863
DART_WARN_UNUSED_RESULT ObjectPtr EvaluateInitializer() const
Definition object.cc:12513
bool IsConsistentWith(const Field &field) const
Definition object.cc:12329
const char * GuardedPropertiesAsCString() const
Definition object.cc:12552
void set_guarded_cid(intptr_t cid) const
Definition object.h:4633
StringPtr InitializingExpression() const
Definition object.cc:12128
void SetStaticConstFieldValue(const Instance &value, bool assert_initializing_store=true) const
Definition object.cc:12497
void set_guarded_list_length_in_object_offset(intptr_t offset) const
Definition object.h:4661
ObjectPtr StaticValue() const
Definition object.h:13253
static StringPtr LookupGetterSymbol(const String &field_name)
Definition object.cc:11851
TypedDataViewPtr KernelLibrary() const
Definition object.cc:11958
intptr_t guarded_cid() const
Definition object.cc:11800
static FieldPtr NewTopLevel(const String &name, bool is_final, bool is_const, bool is_late, const Object &owner, TokenPosition token_pos, TokenPosition end_token_pos)
Definition object.cc:12090
void set_is_nullable(bool val) const
Definition object.h:4726
FunctionPtr EnsureInitializerFunction() const
Definition object.cc:12347
void set_dependent_code(const WeakArray &array) const
Definition object.cc:12270
void DeoptimizeDependentCode(bool are_mutators_stopped=false) const
Definition object.cc:12318
intptr_t guarded_list_length() const
Definition object.cc:12152
InstancePtr AccessorClosure(bool make_setter) const
Definition object.cc:12228
static StringPtr GetterName(const String &field_name)
Definition object.cc:11843
AbstractTypePtr type() const
Definition object.h:4523
static StringPtr SetterName(const String &field_name)
Definition object.cc:11855
bool NeedsGetter() const
Definition object.cc:12196
void SetStaticValue(const Object &value) const
Definition object.cc:12818
void SetFieldTypeSafe(const AbstractType &value) const
Definition object.cc:11985
WeakArrayPtr dependent_code() const
Definition object.cc:12264
DART_WARN_UNUSED_RESULT ErrorPtr VerifyEntryPoint(EntryPointPragma kind) const
Definition object.cc:27439
static StringPtr NameFromGetter(const String &getter_name)
Definition object.cc:11867
intptr_t KernelLibraryIndex() const
Definition object.cc:11970
void set_guarded_list_length_unsafe(intptr_t list_length) const
Definition object.cc:12156
void RecordStore(const Object &value) const
Definition object.cc:13074
void set_guarded_list_length(intptr_t list_length) const
Definition object.h:4651
static StringPtr SetterSymbol(const String &field_name)
Definition object.cc:11859
TokenPosition token_pos() const
Definition object.h:4562
KernelProgramInfoPtr KernelProgramInfo() const
Definition object.cc:11936
int32_t SourceFingerprint() const
Definition object.cc:12119
static FinalizablePersistentHandle * New(IsolateGroup *isolate_group, const Object &object, void *peer, Dart_HandleFinalizer callback, intptr_t external_size, bool auto_delete)
void VisitObject(ObjectPtr obj)
Definition object.cc:1420
ObjectPtr token() const
Definition object.h:12958
void set_token(const Object &value) const
Definition object.h:12959
ObjectPtr value() const
Definition object.h:12944
void set_finalizer(const FinalizerBase &value) const
Definition object.cc:26952
void set_external_size(intptr_t value) const
Definition object.h:12979
intptr_t external_size() const
Definition object.h:12978
static FinalizerEntryPtr New(const FinalizerBase &finalizer, Heap::Space space=Heap::kNew)
Definition object.cc:26941
static intptr_t type_arguments_offset()
Definition object.h:13046
static FinalizerPtr New(Heap::Space space=Heap::kNew)
Definition object.cc:26861
static Float32x4Ptr New(float value0, float value1, float value2, float value3, Heap::Space space=Heap::kNew)
Definition object.cc:25386
void set_w(float w) const
Definition object.cc:25431
float x() const
Definition object.cc:25435
void set_y(float y) const
Definition object.cc:25423
float z() const
Definition object.cc:25443
virtual uint32_t CanonicalizeHash() const
Definition object.cc:25456
virtual bool CanonicalizeEquals(const Instance &other) const
Definition object.cc:25451
void set_z(float z) const
Definition object.cc:25427
simd128_value_t value() const
Definition object.cc:25409
void set_x(float x) const
Definition object.cc:25419
void set_value(simd128_value_t value) const
Definition object.cc:25414
float w() const
Definition object.cc:25447
float y() const
Definition object.cc:25439
void set_y(double y) const
Definition object.cc:25583
void set_value(simd128_value_t value) const
Definition object.cc:25591
virtual uint32_t CanonicalizeHash() const
Definition object.cc:25600
simd128_value_t value() const
Definition object.cc:25587
static Float64x2Ptr New(double value0, double value1, Heap::Space space=Heap::kNew)
Definition object.cc:25554
void set_x(double x) const
Definition object.cc:25579
double y() const
Definition object.cc:25575
virtual bool CanonicalizeEquals(const Instance &other) const
Definition object.cc:25595
double x() const
Definition object.cc:25571
bool Matches(const Function &function) const
Definition object.cc:3277
intptr_t Hash() const
Definition object.cc:3285
FunctionName(const String &name, String *tmp_string)
Definition object.cc:3275
FunctionTypeMapping(Zone *zone, FunctionTypeMapping **mapping, const FunctionType &from, const FunctionType &to)
Definition object.cc:6564
TypeParameterPtr MapTypeParameter(const TypeParameter &type_param) const
Definition object.cc:6586
const FunctionType * Find(const Object &from) const
Definition object.cc:6573
bool ContainsOwnersOfTypeParameters(const TypeParameter &p1, const TypeParameter &p2) const
Definition object.cc:6597
void set_packed_parameter_counts(uint32_t packed_parameter_counts) const
Definition object.cc:11598
static intptr_t NumOptionalParametersOf(FunctionTypePtr ptr)
Definition object.h:9601
TypeParametersPtr type_parameters() const
Definition object.h:9707
void SetIsRequiredAt(intptr_t index) const
Definition object.cc:8829
void set_result_type(const AbstractType &value) const
Definition object.cc:8633
intptr_t NumOptionalNamedParameters() const
Definition object.h:9621
intptr_t num_implicit_parameters() const
Definition object.h:9565
void SetParameterTypeAt(intptr_t index, const AbstractType &value) const
Definition object.cc:8648
AbstractTypePtr ParameterTypeAt(intptr_t index) const
Definition object.cc:8643
bool ContainsHandles() const
Definition object.cc:8350
TypeParameterPtr TypeParameterAt(intptr_t index, Nullability nullability=Nullability::kNonNullable) const
Definition object.cc:8618
const char * ToUserVisibleCString() const
Definition object.cc:11712
bool HasRequiredNamedParameters() const
Definition object.cc:8866
StringPtr ParameterNameAt(intptr_t index) const
Definition object.cc:8703
void set_num_fixed_parameters(intptr_t value) const
Definition object.cc:11659
void set_num_implicit_parameters(intptr_t value) const
Definition object.cc:11608
AbstractTypePtr result_type() const
Definition object.h:9650
intptr_t NumOptionalPositionalParameters() const
Definition object.h:9614
virtual void PrintName(NameVisibility visibility, BaseTextBuffer *printer) const
Definition object.cc:22661
virtual AbstractTypePtr UpdateFunctionTypes(intptr_t num_parent_type_args_adjustment, intptr_t num_free_fun_type_params, Heap::Space space, FunctionTypeMapping *function_type_mapping) const
Definition object.cc:9961
bool IsRequiredAt(intptr_t index) const
Definition object.cc:8813
bool IsSubtypeOf(const FunctionType &other, Heap::Space space, FunctionTypeMapping *function_type_equivalence=nullptr) const
Definition object.cc:10147
void PrintParameters(Thread *thread, Zone *zone, NameVisibility name_visibility, BaseTextBuffer *printer) const
Definition object.cc:10700
void set_named_parameter_names(const Array &value) const
Definition object.cc:8730
ArrayPtr named_parameter_names() const
Definition object.h:9669
static FunctionTypePtr Clone(const FunctionType &orig, Heap::Space space)
Definition object.cc:11701
void SetParameterNameAt(intptr_t index, const String &value) const
Definition object.cc:8714
void CreateNameArrayIncludingFlags(Heap::Space space=Heap::kOld) const
Definition object.cc:8750
static intptr_t NumFixedParametersOf(FunctionTypePtr ptr)
Definition object.h:9571
virtual AbstractTypePtr InstantiateFrom(const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments, intptr_t num_free_fun_type_params, Heap::Space space, FunctionTypeMapping *function_type_mapping=nullptr, intptr_t num_parent_type_args_adjustment=0) const
Definition object.cc:9841
intptr_t NumParentTypeArguments() const
Definition object.h:9550
void set_packed_type_parameter_counts(uint16_t packed_parameter_counts) const
Definition object.cc:11603
void set_parameter_types(const Array &value) const
Definition object.cc:8655
virtual void EnumerateURIs(URIs *uris) const
Definition object.cc:22647
static intptr_t NumParentTypeArgumentsOf(FunctionTypePtr ptr)
Definition object.h:9545
virtual uword ComputeHash() const
Definition object.cc:22421
bool HasSameTypeParametersAndBounds(const FunctionType &other, TypeEquality kind, FunctionTypeMapping *function_type_equivalence=nullptr) const
Definition object.cc:10059
static intptr_t NumTypeArgumentsOf(FunctionTypePtr ptr)
Definition object.h:9560
bool HasOptionalNamedParameters() const
Definition object.h:9589
bool HasOptionalParameters() const
Definition object.h:9583
intptr_t GetRequiredFlagIndex(intptr_t index, intptr_t *flag_mask) const
Definition object.cc:8773
void FinalizeNameArray() const
Definition object.cc:8844
bool HasOptionalPositionalParameters() const
Definition object.h:9597
void SetNumOptionalParameters(intptr_t num_optional_parameters, bool are_optional_positional) const
Definition object.cc:11668
void SetNumParentTypeArguments(intptr_t value) const
Definition object.cc:8893
static intptr_t NumParametersOf(FunctionTypePtr ptr)
Definition object.h:9625
bool IsContravariantParameter(intptr_t parameter_position, const FunctionType &other, intptr_t other_parameter_position, Heap::Space space, FunctionTypeMapping *function_type_equivalence) const
Definition object.cc:10042
virtual bool IsEquivalent(const Instance &other, TypeEquality kind, FunctionTypeMapping *function_type_equivalence=nullptr) const
Definition object.cc:22170
intptr_t NumParameters() const
Definition object.h:9628
static intptr_t NumOptionalNamedParametersOf(FunctionTypePtr ptr)
Definition object.h:9618
StringPtr ToUserVisibleString() const
Definition object.cc:11719
void Print(NameVisibility name_visibility, BaseTextBuffer *printer) const
Definition object.cc:10813
static bool HasOptionalNamedParameters(FunctionTypePtr ptr)
Definition object.h:9585
static bool IsGeneric(FunctionTypePtr ptr)
Definition object.h:9724
intptr_t num_fixed_parameters() const
Definition object.h:9575
bool IsGeneric() const
Definition object.h:9727
static intptr_t NumTypeParametersOf(FunctionTypePtr ptr)
Definition object.h:9554
intptr_t NumTypeParameters() const
Definition object.h:9558
virtual AbstractTypePtr Canonicalize(Thread *thread) const
Definition object.cc:22539
FunctionTypePtr ToNullability(Nullability value, Heap::Space space) const
Definition object.cc:22005
virtual bool IsInstantiated(Genericity genericity=kAny, intptr_t num_free_fun_type_params=kAllFree) const
Definition object.cc:10850
uint16_t packed_type_parameter_counts() const
Definition object.h:9637
void SetTypeParameters(const TypeParameters &value) const
Definition object.cc:8884
static FunctionTypePtr New(intptr_t num_parent_type_arguments=0, Nullability nullability=Nullability::kLegacy, Heap::Space space=Heap::kOld)
Definition object.cc:11682
ArrayPtr parameter_types() const
Definition object.h:9658
uint32_t packed_parameter_counts() const
Definition object.h:9630
static intptr_t NumOptionalPositionalParametersOf(FunctionTypePtr ptr)
Definition object.h:9611
void SetIsOptimizable(bool value) const
Definition object.cc:9006
intptr_t KernelLibraryIndex() const
Definition object.cc:11006
void set_owner(const Object &value) const
Definition object.cc:8522
void set_context_scope(const ContextScope &value) const
Definition object.cc:8115
StringPtr ParameterNameAt(intptr_t index) const
Definition object.cc:8660
void set_accessor_field(const Field &value) const
Definition object.cc:8215
intptr_t MaxNumberOfParametersInRegisters(Zone *zone) const
Definition object.cc:27324
FunctionPtr CreateMethodExtractor(const String &getter_name) const
Definition object.cc:4017
bool HasOptionalNamedParameters() const
Definition object.cc:8920
FunctionPtr ForwardingTarget() const
Definition object.cc:8481
InstantiationMode default_type_arguments_instantiation_mode() const
Definition object.cc:8246
void SetFfiCallbackTarget(const Function &target) const
Definition object.cc:8442
bool IsImplicitConstructor() const
Definition object.cc:10276
void SetFfiCallbackKind(FfiCallbackKind value) const
Definition object.cc:8470
StringPtr GetSource() const
Definition object.cc:11177
bool PrologueNeedsArgumentsDescriptor() const
Definition object.cc:11488
const char * QualifiedUserVisibleNameCString() const
Definition object.cc:11088
bool IsRequiredAt(intptr_t index) const
Definition object.cc:8799
intptr_t NumParentTypeArguments() const
Definition object.cc:8908
StringPtr UserVisibleName() const
Definition object.cc:11057
ObjectPtr DoArgumentTypesMatch(const Array &args, const ArgumentsDescriptor &arg_names) const
Definition object.cc:9563
void AssignFfiCallbackId(int32_t callback_id) const
Definition object.cc:8402
ICDataPtr FindICData(intptr_t deopt_id) const
Definition object.cc:11338
void set_default_type_arguments_instantiation_mode(InstantiationMode value) const
Definition object.cc:8254
bool IsImplicitInstanceClosureFunction() const
Definition object.h:3904
FfiCallbackKind GetFfiCallbackKind() const
Definition object.cc:8463
intptr_t SourceSize() const
Definition object.cc:11504
void SetFfiCSignature(const FunctionType &sig) const
Definition object.cc:8315
bool IsImplicitClosureFunction() const
Definition object.h:3883
bool IsPrivate() const
Definition object.cc:10895
bool IsPreferInline() const
Definition object.cc:9093
bool is_old_native() const
Definition object.cc:8596
FunctionPtr CreateDynamicInvocationForwarder(const String &mangled_name) const
Definition object.cc:4260
bool is_sticky_specialization() const
Definition object.cc:8544
void set_modifier(UntaggedFunction::AsyncModifier value) const
Definition object.cc:8949
static bool IsDynamicInvocationForwarderName(const String &name)
Definition object.cc:4240
bool IsNoSuchMethodDispatcher() const
Definition object.h:3268
FunctionPtr GetDynamicInvocationForwarder(const String &mangled_name, bool allow_add=true) const
Definition object.cc:4298
bool SafeToClosurize() const
Definition object.cc:10426
int32_t FfiCallbackId() const
Definition object.cc:8390
bool IsFfiCallClosure() const
Definition object.cc:9125
StringPtr native_name() const
Definition object.cc:8564
static bool is_visible(FunctionPtr f)
Definition object.h:4163
void set_end_token_pos(TokenPosition value) const
Definition object.h:3442
void EnsureHasCompiledUnoptimizedCode() const
Definition object.cc:8026
bool IsDynamicClosureCallDispatcher() const
Definition object.h:3288
void InheritKernelOffsetFrom(const Function &src) const
Definition object.cc:10909
bool IsIdempotent() const
Definition object.cc:9100
intptr_t NumOptionalParameters() const
Definition object.cc:8926
bool IsCachableIdempotent() const
Definition object.cc:9113
bool IsTypedDataViewFactory() const
Definition object.cc:9015
bool HasOptionalParameters() const
Definition object.cc:8917
void SetKernelLibraryAndEvalScript(const Script &script, const class KernelProgramInfo &kernel_program_info, intptr_t index) const
Definition object.cc:10925
void CreateNameArray(Heap::Space space=Heap::kOld) const
Definition object.cc:8735
InstancePtr GetFfiCallClosurePragmaValue() const
Definition object.cc:9132
bool FfiCSignatureContainsHandles() const
Definition object.cc:8345
void set_awaiter_link(AwaiterLink link) const
Definition object.cc:8135
void SetForwardingTarget(const Function &target) const
Definition object.cc:8486
ObjectPtr RawOwner() const
Definition object.h:3063
const char * ToFullyQualifiedCString() const
Definition object.cc:9820
InstancePtr FfiCallbackExceptionalReturn() const
Definition object.cc:8449
bool HasCode() const
Definition object.cc:7994
bool CanBeInlined() const
Definition object.cc:9257
static const char * KindToCString(UntaggedFunction::Kind kind)
Definition object.cc:8477
StringPtr UserVisibleSignature() const
Definition object.cc:10687
FunctionPtr parent_function() const
Definition object.cc:8225
ClosurePtr ImplicitInstanceClosure(const Instance &receiver) const
Definition object.cc:10780
static FunctionPtr New(const FunctionType &signature, const String &name, UntaggedFunction::Kind kind, bool is_static, bool is_const, bool is_abstract, bool is_external, bool is_native, const Object &owner, TokenPosition token_pos, Heap::Space space=Heap::kOld)
Definition object.cc:10301
StringPtr InternalSignature() const
Definition object.cc:10674
intptr_t NumTypeArguments() const
Definition object.cc:8911
virtual uword Hash() const
Definition object.cc:7937
intptr_t string_specialization_cid() const
Definition object.cc:8538
void set_token_pos(TokenPosition value) const
Definition object.cc:8959
StringPtr name() const
Definition object.h:2972
bool AreValidArguments(intptr_t num_type_arguments, intptr_t num_arguments, const Array &argument_names, String *error_message) const
Definition object.cc:9381
TokenPosition token_pos() const
Definition object.h:3426
static StringPtr DemangleDynamicInvocationForwarderName(const String &name)
Definition object.cc:4248
void SetParameterNameAt(intptr_t index, const String &value) const
Definition object.cc:8681
intptr_t KernelLibraryOffset() const
Definition object.cc:10999
bool AreValidArgumentCounts(intptr_t num_type_arguments, intptr_t num_arguments, intptr_t num_named_arguments, String *error_message) const
Definition object.cc:9308
DART_WARN_UNUSED_RESULT ErrorPtr VerifyCallEntryPoint() const
Definition object.cc:27386
bool ForceOptimize() const
Definition object.cc:9075
void ClearCodeSafe() const
Definition object.cc:8016
bool HasRequiredNamedParameters() const
Definition object.cc:8789
DART_WARN_UNUSED_RESULT ErrorPtr VerifyClosurizedEntryPoint() const
Definition object.cc:27420
void PrintName(const NameFormattingParams &params, BaseTextBuffer *printer) const
Definition object.cc:11167
void DropUncompiledImplicitClosureFunction() const
Definition object.cc:10665
FunctionPtr ImplicitClosureFunction() const
Definition object.cc:10443
const char * QualifiedScrubbedNameCString() const
Definition object.cc:11074
bool HasOptimizedCode() const
Definition object.cc:11032
int32_t SourceFingerprint() const
Definition object.cc:11227
void set_unoptimized_code(const Code &value) const
Definition object.cc:8096
intptr_t NumOptionalNamedParameters() const
Definition object.cc:8932
bool CheckSourceFingerprint(int32_t fp, const char *kind=nullptr) const
Definition object.cc:11362
void set_extracted_method_closure(const Function &function) const
Definition object.cc:8185
void set_native_name(const String &name) const
Definition object.cc:8571
static FunctionPtr NewClosureFunctionWithKind(UntaggedFunction::Kind kind, const String &name, const Function &parent, bool is_static, TokenPosition token_pos, const Object &owner)
Definition object.cc:10382
intptr_t NumOptionalPositionalParameters() const
Definition object.cc:8929
bool IsOptimizable() const
Definition object.cc:8988
FunctionPtr FfiCallbackTarget() const
Definition object.cc:8435
void SetInstructions(const Code &value) const
Definition object.cc:7967
intptr_t NumTypeParameters() const
Definition object.cc:8905
void RestoreICDataMap(ZoneGrowableArray< const ICData * > *deopt_id_to_ic_data, bool clone_ic_data) const
Definition object.cc:11275
void SetDeoptReasonForAll(intptr_t deopt_id, ICData::DeoptReasonId reason)
Definition object.cc:11350
void SwitchToUnoptimizedCode() const
Definition object.cc:8040
bool HasOptionalPositionalParameters() const
Definition object.cc:8923
ScriptPtr script() const
Definition object.cc:10939
FunctionPtr GetOutermostFunction() const
Definition object.cc:8265
void SaveICDataMap(const ZoneGrowableArray< const ICData * > &deopt_id_to_ic_data, const Array &edge_counters_array, const Array &coverage_array) const
Definition object.cc:11236
StringPtr QualifiedScrubbedName() const
Definition object.cc:11067
void SetFfiCallbackExceptionalReturn(const Instance &value) const
Definition object.cc:8456
bool HasInstantiatedSignature(Genericity genericity=kAny, intptr_t num_free_fun_type_params=kAllFree) const
Definition object.cc:10843
bool IsInvokeFieldDispatcher() const
Definition object.h:3276
ArrayPtr ic_data_array() const
Definition object.cc:11330
void reset_unboxed_parameters_and_return() const
Definition object.h:3689
void InstallOptimizedCode(const Code &code) const
Definition object.cc:7957
void set_saved_args_desc(const Array &array) const
Definition object.cc:8200
static StringPtr CreateDynamicInvocationForwarderName(const String &name)
Definition object.cc:4255
FieldPtr accessor_field() const
Definition object.cc:8207
bool IsNonImplicitClosureFunction() const
Definition object.h:3891
bool HasSavedArgumentsDescriptor() const
Definition object.h:3253
TypeArgumentsPtr DefaultTypeArguments(Zone *zone) const
Definition object.cc:8239
const char * NameCString(NameVisibility name_visibility) const
Definition object.cc:11036
bool IsImplicitStaticClosureFunction() const
Definition object.h:3897
bool IsMethodExtractor() const
Definition object.h:3264
intptr_t NumImplicitParameters() const
Definition object.cc:9286
bool is_ffi_native() const
Definition object.cc:8600
ArrayPtr GetCoverageArray() const
Definition object.cc:11318
static FunctionPtr NewClosureFunction(const String &name, const Function &parent, TokenPosition token_pos)
Definition object.cc:10406
bool HasBreakpoint() const
Definition object.cc:7948
bool MayHaveUncheckedEntryPoint() const
Definition object.cc:11499
FunctionPtr extracted_method_closure() const
Definition object.cc:8178
TypedDataViewPtr KernelLibrary() const
Definition object.cc:10994
bool IsGeneric() const
Definition object.cc:8902
void set_packed_fields(uint32_t packed_fields) const
Definition object.cc:8980
CodePtr EnsureHasCode() const
Definition object.cc:11396
static FunctionPtr NewImplicitClosureFunction(const String &name, const Function &parent, TokenPosition token_pos)
Definition object.cc:10416
bool IsUnmodifiableTypedDataViewFactory() const
Definition object.cc:9038
KernelProgramInfoPtr KernelProgramInfo() const
Definition object.cc:10977
void ClearCode() const
Definition object.cc:8006
void SetSignature(const FunctionType &value) const
Definition object.cc:8604
bool FfiCSignatureReturnsStruct() const
Definition object.cc:8364
AbstractTypePtr ParameterTypeAt(intptr_t index) const
Definition object.cc:8638
AwaiterLink awaiter_link() const
Definition object.cc:8125
ClassPtr Owner() const
Definition object.cc:10899
intptr_t num_fixed_parameters() const
Definition object.cc:8914
bool FfiIsLeaf() const
Definition object.cc:8413
bool NeedsMonomorphicCheckedEntry(Zone *zone) const
Definition object.cc:11422
const char * UserVisibleNameCString() const
Definition object.cc:11048
const char * ToQualifiedCString() const
Definition object.cc:9834
StringPtr QualifiedUserVisibleName() const
Definition object.cc:11081
RegExpPtr regexp() const
Definition object.cc:8527
void SwitchToLazyCompiledUnoptimizedCode() const
Definition object.cc:8065
bool HasDynamicCallers(Zone *zone) const
Definition object.cc:11460
void SetInstructionsSafe(const Code &value) const
Definition object.cc:7978
UntaggedFunction::Kind kind() const
Definition object.h:3329
intptr_t NumParameters() const
Definition object.cc:8935
FunctionPtr GetMethodExtractor(const String &getter_name) const
Definition object.cc:4063
bool RecognizedKindForceOptimize() const
Definition object.cc:9144
ClosurePtr ImplicitStaticClosure() const
Definition object.cc:10758
bool IsConstructor() const
Definition object.h:3340
ArrayPtr saved_args_desc() const
Definition object.cc:8191
ContextScopePtr context_scope() const
Definition object.cc:8106
bool HasImplicitClosureFunction() const
Definition object.h:3306
InstancePtr GetNativeAnnotation() const
Definition object.cc:8578
void AttachCode(const Code &value) const
Definition object.cc:7985
FunctionPtr ImplicitClosureTarget(Zone *zone) const
Definition object.cc:10792
void SetRegExpData(const RegExp &regexp, intptr_t string_specialization_cid, bool sticky) const
Definition object.cc:8550
const char * ToLibNamePrefixedQualifiedCString() const
Definition object.cc:9827
void ClearICDataArray() const
Definition object.cc:11334
FunctionTypePtr FfiCSignature() const
Definition object.cc:8322
TypeParameterPtr TypeParameterAt(intptr_t index, Nullability nullability=Nullability::kNonNullable) const
Definition object.cc:8939
void set_recognized_kind(MethodRecognizer::Kind value) const
Definition object.cc:8953
static intptr_t type_arguments_offset()
Definition object.h:13181
bool HasBreakpointInCode(const Code &code)
Definition debugger.cc:438
bool HasBreakpoint(Thread *thread, const Function &function)
Definition debugger.cc:3441
void SetData(const Array &value) const
Definition object.h:11057
void Add(const Object &value, Heap::Space space=Heap::kNew) const
Definition object.cc:25070
void SetLength(intptr_t value) const
Definition object.h:11050
ObjectPtr RemoveLast() const
Definition object.cc:25095
static GrowableObjectArrayPtr New(Heap::Space space=Heap::kNew)
Definition object.h:11118
static intptr_t type_arguments_offset()
Definition object.h:11103
virtual TypeArgumentsPtr GetTypeArguments() const
Definition object.h:11078
intptr_t Length() const
Definition object.h:11046
ObjectPtr At(intptr_t index) const
Definition object.h:11059
ArrayPtr data() const
Definition object.h:11056
void Grow(intptr_t new_capacity, Heap::Space space=Heap::kNew) const
Definition object.cc:25087
bool UpdateOrInsert(const Object &key, const Object &value) const
Definition hash_table.h:713
bool Insert(const Object &key)
Definition hash_table.h:801
ObjectPtr GetOrNull(const Key &key, bool *present=nullptr) const
Definition hash_table.h:840
ObjectPtr GetPayload(intptr_t entry, intptr_t component) const
Definition hash_table.h:352
StorageTraits::ArrayHandle & Release()
Definition hash_table.h:195
ObjectPtr GetKey(intptr_t entry) const
Definition hash_table.h:348
void IterateVMIsolateObjects(ObjectVisitor *visitor) const
Definition heap.cc:353
void IterateOldObjectsNoImagePages(ObjectVisitor *visitor) const
Definition heap.cc:348
bool HasOutstandingSample() const
Definition sampler.h:124
void * InvokeCallbackForLastSample(intptr_t cid)
Definition sampler.cc:200
uword Allocate(Thread *thread, intptr_t size, Space space)
Definition heap.h:65
@ kNew
Definition heap.h:38
@ kOld
Definition heap.h:39
void SetHeapSamplingData(ObjectPtr obj, void *data)
Definition heap.h:213
intptr_t SetHashIfNotSet(ObjectPtr raw_obj, intptr_t hash)
Definition heap.h:175
void * GetPeer(ObjectPtr raw_obj) const
Definition heap.h:167
PageSpace * old_space()
Definition heap.h:63
intptr_t GetHash(ObjectPtr raw_obj) const
Definition heap.h:178
void SetPeer(ObjectPtr raw_obj, void *peer)
Definition heap.h:164
void SetCanonicalHash(ObjectPtr raw_obj, intptr_t hash)
Definition heap.h:183
intptr_t UsedInWords(Space space) const
Definition heap.cc:791
intptr_t GetCanonicalHash(ObjectPtr raw_obj) const
Definition heap.h:186
Space SpaceForExternal(intptr_t size) const
Definition heap.cc:1130
intptr_t deopt_id() const
Definition object.h:2448
intptr_t NumArgsTested() const
Definition object.cc:16518
ArrayPtr entries() const
Definition object.h:2763
RebindRule rebind_rule() const
Definition object.cc:16594
uint32_t DeoptReasons() const
Definition object.cc:16552
void SetOriginal(const ICData &value) const
Definition object.cc:16494
void AddDeoptReason(ICData::DeoptReasonId reason) const
Definition object.cc:16565
AbstractTypePtr receivers_static_type() const
Definition object.h:2460
static ICDataPtr Clone(const ICData &from)
Definition object.cc:17486
FunctionPtr Owner() const
Definition object.cc:16470
static ImmutableArrayPtr New(intptr_t len, Heap::Space space=Heap::kNew)
Definition object.cc:25064
InstancePtr CopyShallowToOldSpace(Thread *thread) const
Definition object.cc:20481
void SetNativeFields(uint16_t num_fields, const intptr_t *field_values) const
Definition object.cc:20937
ObjectPtr Invoke(const String &selector, const Array &arguments, const Array &argument_names, bool respect_reflectable=true, bool check_is_entrypoint=false) const
Definition object.cc:20188
static intptr_t DataOffsetFor(intptr_t cid)
Definition object.cc:21029
static intptr_t NextFieldOffset()
Definition object.h:8326
uint16_t NumNativeFields() const
Definition object.h:8251
static intptr_t InstanceSize()
Definition object.h:8308
bool IsCallable(Function *function) const
Definition object.cc:20954
ObjectPtr InvokeSetter(const String &selector, const Instance &argument, bool respect_reflectable=true, bool check_is_entrypoint=false) const
Definition object.cc:20141
virtual InstancePtr CanonicalizeLocked(Thread *thread) const
Definition object.cc:20491
ObjectPtr GetField(const Field &field) const
Definition object.cc:20516
virtual bool CanonicalizeEquals(const Instance &other) const
Definition object.cc:20300
bool IsInstanceOf(const AbstractType &other, const TypeArguments &other_instantiator_type_arguments, const TypeArguments &other_function_type_arguments) const
Definition object.cc:20655
AbstractTypePtr GetType(Heap::Space space) const
Definition object.cc:20561
friend class Closure
Definition object.h:8405
bool IsIdenticalTo(const Instance &other) const
Definition object.cc:20902
static intptr_t ElementSizeFor(intptr_t cid)
Definition object.cc:21008
static intptr_t UnroundedSize()
Definition object.h:8307
virtual void SetTypeArguments(const TypeArguments &value) const
Definition object.cc:20622
bool IsAssignableTo(const AbstractType &other, const TypeArguments &other_instantiator_type_arguments, const TypeArguments &other_function_type_arguments) const
Definition object.cc:20670
intptr_t SizeFromClass() const
Definition object.h:8187
virtual bool OperatorEquals(const Instance &other) const
Definition object.cc:20896
void SetNativeField(int index, intptr_t value) const
Definition object.cc:20924
ObjectPtr InvokeGetter(const String &selector, bool respect_reflectable=true, bool check_is_entrypoint=false) const
Definition object.cc:20081
InstancePtr Canonicalize(Thread *thread) const
Definition object.cc:20485
virtual TypeArgumentsPtr GetTypeArguments() const
Definition object.cc:20611
virtual ObjectPtr HashCode() const
Definition object.cc:20253
bool IsValidNativeIndex(int index) const
Definition object.h:8241
IntegerPtr IdentityHashCode(Thread *thread) const
Definition object.cc:20260
static bool NullIsAssignableTo(const AbstractType &other)
Definition object.cc:20715
intptr_t * NativeFieldsDataAddr() const
Definition object.cc:20914
ObjectPtr EvaluateCompiledExpression(const Class &klass, const ExternalTypedData &kernel_buffer, const Array &type_definitions, const Array &arguments, const TypeArguments &type_arguments) const
Definition object.cc:4888
void SetField(const Field &field, const Object &value) const
Definition object.cc:20535
friend class Class
Definition object.h:8404
virtual bool IsPointer() const
Definition object.cc:25843
static InstancePtr NewAlreadyFinalized(const Class &cls, Heap::Space space=Heap::kNew)
Definition object.cc:20984
virtual uint32_t CanonicalizeHash() const
Definition object.cc:20357
static InstancePtr New(const Class &cls, Heap::Space space=Heap::kNew)
Definition object.cc:20976
virtual void CanonicalizeFieldsLocked(Thread *thread) const
Definition object.cc:20446
void set_stats(CodeStatistics *stats) const
Definition object.cc:15533
uword MonomorphicEntryPoint() const
Definition object.h:5717
uword EntryPoint() const
Definition object.h:5718
uword PayloadStart() const
Definition object.h:5716
intptr_t Size() const
Definition object.h:5711
int32_t x() const
Definition object.cc:25509
int32_t y() const
Definition object.cc:25513
int32_t w() const
Definition object.cc:25521
void set_z(int32_t z) const
Definition object.cc:25501
virtual bool CanonicalizeEquals(const Instance &other) const
Definition object.cc:25535
static Int32x4Ptr New(int32_t value0, int32_t value1, int32_t value2, int32_t value3, Heap::Space space=Heap::kNew)
Definition object.cc:25470
void set_value(simd128_value_t value) const
Definition object.cc:25530
void set_y(int32_t y) const
Definition object.cc:25497
virtual uint32_t CanonicalizeHash() const
Definition object.cc:25540
void set_x(int32_t x) const
Definition object.cc:25493
simd128_value_t value() const
Definition object.cc:25525
int32_t z() const
Definition object.cc:25517
void set_w(int32_t w) const
Definition object.cc:25505
IntegerPtr ShiftOp(Token::Kind operation, const Integer &other, Heap::Space space=Heap::kNew) const
Definition object.cc:23310
static IntegerPtr New(const String &str, Heap::Space space=Heap::kNew)
Definition object.cc:23063
virtual bool FitsIntoSmi() const
Definition object.cc:23149
virtual int CompareWith(const Integer &other) const
Definition object.cc:23155
IntegerPtr AsValidInteger() const
Definition object.cc:23165
virtual double AsDoubleValue() const
Definition object.cc:23131
static IntegerPtr NewCanonical(const String &str)
Definition object.cc:23078
IntegerPtr ArithmeticOp(Token::Kind operation, const Integer &other, Heap::Space space=Heap::kNew) const
Definition object.cc:23189
const char * ToHexCString(Zone *zone) const
Definition object.cc:23179
IntegerPtr BitOp(Token::Kind operation, const Integer &other, Heap::Space space=Heap::kNew) const
Definition object.cc:23271
virtual bool IsNegative() const
Definition object.cc:23125
static IntegerPtr NewFromUint64(uint64_t value, Heap::Space space=Heap::kNew)
Definition object.cc:23105
virtual int64_t AsInt64Value() const
Definition object.cc:23137
static bool IsValueInRange(uint64_t value)
Definition object.cc:23109
virtual bool IsZero() const
Definition object.cc:23119
virtual bool Equals(const Instance &other) const
Definition object.cc:23113
virtual uint32_t CanonicalizeHash() const
Definition object.cc:23161
virtual uint32_t AsTruncatedUint32Value() const
Definition object.cc:23143
static int EncodeType(Level level, Kind kind)
const uint8_t * snapshot_instructions
Definition isolate.h:193
GroupDebugger * debugger() const
Definition isolate.h:314
Mutex * kernel_data_class_cache_mutex()
Definition isolate.h:519
void RegisterClass(const Class &cls)
Definition isolate.cc:763
bool all_classes_finalized() const
Definition isolate.h:706
Heap * heap() const
Definition isolate.h:295
ObjectStore * object_store() const
Definition isolate.h:505
SafepointRwLock * program_lock()
Definition isolate.h:532
static IsolateGroup * Current()
Definition isolate.h:534
ClassTable * class_table() const
Definition isolate.h:491
Mutex * type_arguments_canonicalization_mutex()
Definition isolate.h:508
Mutex * initializer_functions_mutex()
Definition isolate.h:529
FieldTable * initial_field_table() const
Definition isolate.h:728
IsolateGroupSource * source() const
Definition isolate.h:285
Mutex * constant_canonicalization_mutex()
Definition isolate.h:515
Mutex * kernel_data_lib_cache_mutex()
Definition isolate.h:518
Mutex * subtype_test_cache_mutex()
Definition isolate.h:511
UserTagPtr current_tag() const
Definition isolate.h:1301
FieldTable * field_table() const
Definition isolate.h:953
static Isolate * Current()
Definition isolate.h:939
UserTagPtr default_tag() const
Definition isolate.h:1304
ObjectPtr CallDeferredLoadHandler(intptr_t id)
Definition isolate.cc:1905
GrowableObjectArrayPtr tag_table() const
Definition isolate.h:1298
IsolateGroup * group() const
Definition isolate.h:990
void set_current_tag(const UserTag &tag)
Definition isolate.cc:3176
void set_default_tag(const UserTag &tag)
Definition isolate.cc:3183
Dart_Port origin_id()
Definition isolate.cc:1954
void set_constants(const Array &constants) const
Definition object.cc:15136
intptr_t KernelLibraryStartOffset(intptr_t library_index) const
Definition object.cc:15140
intptr_t KernelLibraryEndOffset(intptr_t library_index) const
Definition object.cc:15161
LibraryPtr LookupLibrary(Thread *thread, const Smi &name_index) const
Definition object.cc:15181
void set_scripts(const Array &scripts) const
Definition object.cc:15132
ArrayPtr libraries_cache() const
Definition object.h:5497
TypedDataViewPtr KernelLibrary(intptr_t library_index) const
Definition object.cc:15153
ClassPtr LookupClass(Thread *thread, const Smi &name_index) const
Definition object.cc:15230
ArrayPtr constants() const
Definition object.h:5492
ArrayPtr scripts() const
Definition object.h:5489
void set_classes_cache(const Array &cache) const
Definition object.cc:15226
TypedDataViewPtr constants_table() const
Definition object.h:5483
TypedDataPtr string_offsets() const
Definition object.h:5462
TypedDataViewPtr string_data() const
Definition object.h:5467
ArrayPtr classes_cache() const
Definition object.h:5504
LibraryPtr InsertLibrary(Thread *thread, const Smi &name_index, const Library &lib) const
Definition object.cc:15203
TypedDataViewPtr metadata_mappings() const
Definition object.h:5475
void set_constants_table(const TypedDataView &value) const
Definition object.cc:15173
TypedDataPtr canonical_names() const
Definition object.h:5469
ScriptPtr ScriptAt(intptr_t index) const
Definition object.cc:15126
void set_libraries_cache(const Array &cache) const
Definition object.cc:15177
TypedDataBasePtr kernel_component() const
Definition object.h:5464
ClassPtr InsertClass(Thread *thread, const Smi &name_index, const Class &klass) const
Definition object.cc:15252
TypedDataViewPtr metadata_payloads() const
Definition object.h:5471
StringPtr FormatMessage() const
Definition object.cc:19963
Report::Kind kind() const
Definition object.h:8051
virtual const char * ToErrorCString() const
Definition object.cc:19980
static LanguageErrorPtr static LanguageErrorPtr NewFormattedV(const Error &prev_error, const Script &script, TokenPosition token_pos, bool report_after_token, Report::Kind kind, Heap::Space space, const char *format, va_list args)
Definition object.cc:19884
static LanguageErrorPtr NewFormatted(const Error &prev_error, const Script &script, TokenPosition token_pos, bool report_after_token, Report::Kind kind, Heap::Space space, const char *format,...) PRINTF_ATTRIBUTE(7
Definition object.cc:19905
TokenPosition token_pos() const
Definition object.h:8087
static bool ReportStats()
Definition object.cc:14629
static uword Hash(const Object &key)
Definition object.cc:14639
static ObjectPtr NewKey(const String &str)
Definition object.cc:14641
static bool IsMatch(const Object &a, const Object &b)
Definition object.cc:14631
static const char * Name()
Definition object.cc:14628
bool is_deferred_load() const
Definition object.h:8432
void AddImport(const Namespace &import) const
Definition object.cc:14897
intptr_t num_imports() const
Definition object.h:8426
StringPtr name() const
Definition object.h:8422
ArrayPtr imports() const
Definition object.h:8425
LibraryPtr importer() const
Definition object.h:8427
LibraryPtr GetLibrary(int index) const
Definition object.cc:14887
static LibraryPtr CoreLibrary()
Definition object.cc:14834
static LibraryPtr VMServiceLibrary()
Definition object.cc:14876
void SetLoadInProgress() const
Definition object.cc:13669
friend class Object
Definition object.h:5411
StringPtr PrivateName(const String &name) const
Definition object.cc:14751
ScriptPtr LookupScript(const String &url, bool useResolvedUri=false) const
Definition object.cc:14056
void Register(Thread *thread) const
Definition object.cc:14778
ObjectPtr LookupReExport(const String &name, ZoneGrowableArray< intptr_t > *visited=nullptr) const
Definition object.cc:13902
intptr_t num_imports() const
Definition object.h:5190
static LibraryPtr MirrorsLibrary()
Definition object.cc:14863
bool Loaded() const
Definition object.h:5082
static bool IsPrivate(const String &name)
Definition object.cc:14666
bool LoadRequested() const
Definition object.h:5074
ObjectPtr InvokeGetter(const String &selector, bool throw_nsm_if_absent, bool respect_reflectable=true, bool check_is_entrypoint=false) const
Definition object.cc:14389
static LibraryPtr IsolateLibrary()
Definition object.cc:14854
GrowableObjectArrayPtr used_scripts() const
Definition object.h:5182
void SetName(const String &name) const
Definition object.cc:13663
void DropDependenciesAndCaches() const
Definition object.cc:14209
ArrayPtr LoadedScripts() const
Definition object.cc:13987
static ClassPtr LookupCoreClass(const String &class_name)
Definition object.cc:14736
void AddObject(const Object &obj, const String &name) const
Definition object.cc:13862
ObjectPtr InvokeSetter(const String &selector, const Instance &argument, bool respect_reflectable=true, bool check_is_entrypoint=false) const
Definition object.cc:14457
ObjectPtr LookupLocalOrReExportObject(const String &name) const
Definition object.cc:14112
void EnsureTopLevelClassIsFinalized() const
Definition object.cc:14091
TypedDataViewPtr KernelLibrary() const
Definition object.cc:13648
static LibraryPtr ConvertLibrary()
Definition object.cc:14830
void AddMetadata(const Object &declaration, intptr_t kernel_offset) const
Definition object.cc:13687
ClassPtr LookupClassAllowPrivate(const String &name) const
Definition object.cc:14160
LibraryPtr ImportLibraryAt(intptr_t index) const
Definition object.cc:14193
ObjectPtr GetMetadata(const Object &declaration) const
Definition object.cc:13701
void AddClass(const Class &cls) const
Definition object.cc:13961
ObjectPtr Invoke(const String &selector, const Array &arguments, const Array &argument_names, bool respect_reflectable=true, bool check_is_entrypoint=false) const
Definition object.cc:14520
ObjectPtr EvaluateCompiledExpression(const ExternalTypedData &kernel_buffer, const Array &type_definitions, const Array &param_values, const TypeArguments &type_param_values) const
Definition object.cc:4866
FunctionPtr LookupFunctionAllowPrivate(const String &name) const
Definition object.cc:14131
ClassPtr toplevel_class() const
Definition object.h:5179
static LibraryPtr NativeWrappersLibrary()
Definition object.cc:14868
NamespacePtr ImportAt(intptr_t index) const
Definition object.cc:14201
static LibraryPtr CollectionLibrary()
Definition object.cc:14838
void SetLoadRequested() const
Definition object.cc:13675
void EvaluatePragmas()
Definition object.cc:13767
static LibraryPtr GetLibrary(intptr_t index)
Definition object.cc:14763
friend class Namespace
Definition object.h:5410
static bool FindPragma(Thread *T, bool only_core, const Object &object, const String &pragma_name, bool multiple=false, Object *options=nullptr)
Definition object.cc:4201
StringPtr name() const
Definition object.h:5065
void set_loading_unit(const LoadingUnit &value) const
Definition object.cc:13659
static LibraryPtr AsyncLibrary()
Definition object.cc:14826
intptr_t index() const
Definition object.h:5241
ClassPtr LookupClass(const String &name) const
Definition object.cc:14152
static LibraryPtr LookupLibrary(Thread *thread, const String &url)
Definition object.cc:14646
static const String & PrivateCoreLibName(const String &member)
Definition object.cc:14721
void AddImport(const Namespace &ns) const
Definition object.cc:14221
ArrayPtr exports() const
Definition object.h:5188
LoadingUnitPtr loading_unit() const
Definition object.h:5087
static ErrorPtr CompileAll(bool ignore_error=false)
Definition object.cc:15275
void set_dependencies(const Array &deps) const
Definition object.cc:14181
void SetLoaded() const
Definition object.cc:13681
static const char kPrivateIdentifierStart
Definition object.h:5351
void set_index(intptr_t value) const
Definition object.h:5242
static LibraryPtr InternalLibrary()
Definition object.cc:14850
static void InitNativeWrappersLibrary(IsolateGroup *isolate_group, bool is_kernel_file)
Definition object.cc:14590
FieldPtr LookupFieldAllowPrivate(const String &name) const
Definition object.cc:14122
static LibraryPtr FfiLibrary()
Definition object.cc:14846
static void RegisterLibraries(Thread *thread, const GrowableObjectArray &libs)
Definition object.cc:14806
LibraryPrefixPtr LookupLocalLibraryPrefix(const String &name) const
Definition object.cc:14168
static bool IsPrivateCoreLibName(const String &name, const String &member)
Definition object.cc:14727
StringPtr private_key() const
Definition object.h:5070
static LibraryPtr MathLibrary()
Definition object.cc:14858
StringPtr url() const
Definition object.h:5068
bool LoadInProgress() const
Definition object.h:5077
void CopyPragmas(const Library &old_lib)
Definition object.cc:13792
intptr_t kernel_library_index() const
Definition object.h:5280
intptr_t KernelLibraryOffset() const
Definition object.cc:13653
KernelProgramInfoPtr kernel_program_info() const
Definition object.h:5272
ArrayPtr imports() const
Definition object.h:5187
static const char kPrivateKeySeparator
Definition object.h:5355
void set_toplevel_class(const Class &value) const
Definition object.cc:14176
bool IsAnyCoreLibrary() const
Definition object.cc:13616
static void InitCoreLibrary(IsolateGroup *isolate_group)
Definition object.cc:14349
void AddExport(const Namespace &ns) const
Definition object.cc:14243
static LibraryPtr DeveloperLibrary()
Definition object.cc:14842
static LibraryPtr TypedDataLibrary()
Definition object.cc:14872
void set_kernel_program_info(const KernelProgramInfo &info) const
Definition object.cc:13644
void ComputeAndSetHashMask() const
Definition object.cc:25212
virtual void CanonicalizeFieldsLocked(Thread *thread) const
Definition object.cc:25294
virtual uint32_t CanonicalizeHash() const
Definition object.cc:25270
ArrayPtr data() const
Definition object.h:12012
virtual bool CanonicalizeEquals(const Instance &other) const
Definition object.cc:25229
static constexpr intptr_t kInitialIndexSize
Definition object.h:12055
static const LinkedHashBase & Cast(const Object &obj)
Definition object.h:11984
static intptr_t type_arguments_offset()
Definition object.h:11960
SmiPtr used_data() const
Definition object.h:12020
intptr_t id() const
Definition object.h:7956
static LoadingUnitPtr New(intptr_t id, const LoadingUnit &parent)
Definition object.cc:19738
LoadingUnitPtr parent() const
Definition object.h:7951
static intptr_t LoadingUnitOf(const Function &function)
Definition object.cc:19780
void set_loaded(bool value) const
Definition object.h:7966
ObjectPtr CompleteLoad(const String &error_message, bool transient_error) const
Definition object.cc:19763
static constexpr intptr_t kIllegalId
Definition object.h:7938
static constexpr intptr_t kRootId
Definition object.h:7940
void set_base_objects(const Array &value) const
Definition object.cc:19750
ObjectPtr IssueLoad() const
Definition object.cc:19758
void set_load_outstanding() const
Definition object.h:7984
static ContextScopePtr CreateImplicitClosureScope(const Function &func)
Definition scopes.cc:580
static intptr_t InstanceSize()
Definition object.h:6026
static MapPtr NewDefault(intptr_t class_id=kMapCid, Heap::Space space=Heap::kNew)
Definition object.cc:25169
static MapPtr New(intptr_t class_id, const Array &data, const TypedData &index, intptr_t hash_mask, intptr_t used_data, intptr_t deleted_keys, Heap::Space space=Heap::kNew)
Definition object.cc:25180
void EnsureContains(const Smi &class_id, const Object &target) const
Definition object.cc:18849
intptr_t mask() const
Definition object.cc:18809
static constexpr intptr_t kSpreadFactor
Definition object.h:7571
void set_mask(intptr_t mask) const
Definition object.cc:18813
intptr_t filled_entry_count() const
Definition object.cc:18817
void set_filled_entry_count(intptr_t num) const
Definition object.cc:18821
ObjectPtr Lookup(const Smi &class_id) const
Definition object.cc:18862
ArrayPtr buckets() const
Definition object.cc:18798
void set_buckets(const Array &buckets) const
Definition object.cc:18802
static constexpr double kLoadFactor
Definition object.h:7572
static constexpr intptr_t kInitialCapacity
Definition object.h:7570
static bool IsMarkedAsRecognized(const Function &function, const char *kind=nullptr)
static MintPtr NewCanonical(int64_t value)
Definition object.cc:23393
virtual int CompareWith(const Integer &other) const
Definition object.cc:23427
virtual int64_t AsInt64Value() const
Definition object.cc:23415
virtual uint32_t AsTruncatedUint32Value() const
Definition object.cc:23419
static constexpr intptr_t kBits
Definition object.h:10046
virtual bool FitsIntoSmi() const
Definition object.cc:23423
static constexpr int64_t kMinValue
Definition object.h:10049
virtual bool Equals(const Instance &other) const
Definition object.cc:23400
int64_t value() const
Definition object.h:10052
static constexpr int64_t kMaxValue
Definition object.h:10047
static MintPtr New(int64_t value, Heap::Space space=Heap::kNew)
Definition object.cc:23383
virtual double AsDoubleValue() const
Definition object.cc:23411
FieldPtr GetFieldReferent() const
Definition object.cc:26970
TypeParameterPtr GetTypeParameterReferent() const
Definition object.cc:26990
LibraryPtr GetLibraryReferent() const
Definition object.cc:26985
static MirrorReferencePtr New(const Object &referent, Heap::Space space=Heap::kNew)
Definition object.cc:26995
AbstractTypePtr GetAbstractTypeReferent() const
Definition object.cc:26960
FunctionTypePtr GetFunctionTypeReferent() const
Definition object.cc:26980
ClassPtr GetClassReferent() const
Definition object.cc:26965
FunctionPtr GetFunctionReferent() const
Definition object.cc:26975
bool IsOwnedByCurrentThread() const
Definition os_thread.h:401
ArrayPtr hide_names() const
Definition object.h:5421
bool HidesName(const String &name) const
Definition object.cc:14965
ObjectPtr Lookup(const String &name, ZoneGrowableArray< intptr_t > *trail=nullptr) const
Definition object.cc:15011
ArrayPtr show_names() const
Definition object.h:5420
LibraryPtr target() const
Definition object.h:5419
LibraryPtr owner() const
Definition object.h:5422
void RunCallback(const FinalizerEntry &entry, const char *trace_context) const
Definition object.cc:26890
void(* Callback)(void *)
Definition object.h:13068
static NativeFinalizerPtr New(Heap::Space space=Heap::kNew)
Definition object.cc:26877
StringPtr ToString(Heap::Space space) const
Definition object.cc:23512
static OSThread * Current()
Definition os_thread.h:175
static intptr_t ThreadIdToIntPtr(ThreadId id)
static int64_t GetCurrentTimeMillis()
static int64_t GetCurrentMonotonicMicros()
static void static void PrintErr(const char *format,...) PRINTF_ATTRIBUTE(1
static bool StringToInt64(const char *str, int64_t *value)
static BuildId GetAppBuildId(const uint8_t *snapshot_instructions)
static const uint8_t * GetAppDSOBase(const uint8_t *snapshot_instructions)
Definition os.cc:13
static intptr_t ProcessId()
static char * SCreate(Zone *zone, const char *format,...) PRINTF_ATTRIBUTE(2
IsolateGroup * isolate_group() const
Definition visitor.h:25
void VisitCompressedPointers(uword heap_base, CompressedObjectPtr *first, CompressedObjectPtr *last)
Definition visitor.h:43
static intptr_t InstanceSize()
Definition object.h:5620
static ObjectPoolPtr NewFromBuilder(const compiler::ObjectPoolBuilder &builder)
Definition object.cc:15723
bool IsFreeListElement() const
ObjectPtr Decompress(uword heap_base) const
UntaggedObject * untag() const
bool IsStringInstance() const
bool IsForwardingCorpse() const
intptr_t GetClassId() const
Definition raw_object.h:864
intptr_t GetClassIdMayBeSmi() const
void set_bootstrap_library(BootstrapLibraryId index, const Library &value)
bool IsImmutable() const
Definition object.h:338
static ObjectPtr Clone(const Object &orig, Heap::Space space, bool load_with_relaxed_atomics=false)
Definition object.cc:2960
static ObjectPtr Allocate(intptr_t cls_id, intptr_t size, Heap::Space space, bool compressed, uword ptr_field_start_offset, uword ptr_field_end_offset)
Definition object.cc:2820
void StoreNonPointer(const FieldType *addr, ValueType value) const
Definition object.h:819
static ClassPtr loadingunit_class()
Definition object.h:566
static ClassPtr type_parameters_class()
Definition object.h:516
@ kUserVisibleName
Definition object.h:645
@ kInternalName
Definition object.h:622
@ kScrubbedName
Definition object.h:633
static void Cleanup()
Definition object.cc:1364
static ClassPtr ffi_trampoline_data_class()
Definition object.h:521
static ClassPtr class_class()
Definition object.h:513
static void InitNullAndBool(IsolateGroup *isolate_group)
Definition object.cc:554
static ClassPtr namespace_class()
Definition object.h:527
static const ClassId kClassId
Definition object.h:606
void StoreCompressedPointer(compressed_type const *addr, type value) const
Definition object.h:782
static ObjectPtr null()
Definition object.h:433
intptr_t GetClassId() const
Definition object.h:341
ObjectPtr ptr() const
Definition object.h:332
ObjectPtr ptr_
Definition object.h:870
static ClassPtr language_error_class()
Definition object.h:553
void CheckHandle() const
Definition object.cc:2808
bool InVMIsolateHeap() const
Definition object.h:395
virtual StringPtr DictionaryName() const
Definition object.cc:2685
static ClassPtr unhandled_exception_class()
Definition object.h:554
friend void ClassTable::Register(const Class &cls)
void Print() const
Definition object.cc:2681
static bool ShouldHaveImmutabilityBitSet(classid_t class_id)
Definition object.cc:2689
bool IsCanonical() const
Definition object.h:335
static void FinishInit(IsolateGroup *isolate_group)
Definition object.cc:1351
bool IsNew() const
Definition object.h:390
bool IsOld() const
Definition object.h:391
static void FinalizeVMIsolate(IsolateGroup *isolate_group)
Definition object.cc:1470
uword raw_value() const
Definition object.h:670
static ClassPtr context_class()
Definition object.h:549
static ClassPtr patch_class_class()
Definition object.h:518
static ClassPtr code_class()
Definition object.h:531
static void InitVtables()
Definition object.cc:615
static void MakeUnusedSpaceTraversable(const Object &obj, intptr_t original_size, intptr_t used_size)
Definition object.cc:1610
static void set_vm_isolate_snapshot_object_table(const Array &table)
Definition object.cc:1601
void SetCanonical() const
Definition object.h:336
static void FinalizeReadOnlyObject(ObjectPtr object)
Definition object.cc:1556
virtual const char * ToCString() const
Definition object.h:366
static constexpr bool ContainsCompressedPointers()
Definition object.h:329
static ClassPtr library_class()
Definition object.h:526
static ClassPtr unwind_error_class()
Definition object.h:557
cpp_vtable vtable() const
Definition object.h:700
static ClassPtr field_class()
Definition object.h:524
static constexpr intptr_t RoundedAllocationSize(intptr_t size)
Definition object.h:758
static ClassPtr script_class()
Definition object.h:525
bool IsNull() const
Definition object.h:363
static intptr_t InstanceSize()
Definition object.h:592
static ClassPtr context_scope_class()
Definition object.h:550
static Object & Handle()
Definition object.h:407
static ObjectPtr RawCast(ObjectPtr obj)
Definition object.h:325
static Object & ZoneHandle()
Definition object.h:419
static ClassPtr function_class()
Definition object.h:519
friend class Thread
Definition object.h:1025
static void Init(IsolateGroup *isolate_group)
Definition object.cc:721
static constexpr intptr_t kHashBits
Definition object.h:323
static ClassPtr closure_data_class()
Definition object.h:520
static ClassPtr api_error_class()
Definition object.h:552
void StoreSimd128(const FieldType *addr, simd128_value_t value) const
Definition object.h:800
static ClassPtr subtypetestcache_class()
Definition object.h:565
static void VerifyBuiltinVtables()
Definition object.cc:1677
ClassPtr clazz() const
Definition object.h:13192
static intptr_t UnroundedSize(OneByteStringPtr str)
Definition object.h:10537
static OneByteStringPtr New(intptr_t len, Heap::Space space)
Definition object.cc:24447
static OneByteStringPtr ConcatAll(const Array &strings, intptr_t start, intptr_t end, intptr_t len, Heap::Space space)
Definition object.cc:24552
static intptr_t data_offset()
Definition object.h:10533
static intptr_t InstanceSize()
Definition object.h:10543
static OneByteStringPtr Transform(int32_t(*mapping)(int32_t ch), const String &str, Heap::Space space)
Definition object.cc:24573
static OneByteStringPtr EscapeSpecialCharacters(const String &str)
Definition object.cc:24415
static OneByteStringPtr SubStringUnchecked(const String &str, intptr_t begin_index, intptr_t length, Heap::Space space)
Definition object.cc:24588
static OneByteStringPtr Concat(const String &str1, const String &str2, Heap::Space space)
Definition object.cc:24540
static constexpr intptr_t kBytesPerElement
Definition object.h:10521
void AllocateBlack(intptr_t size)
Definition pages.h:266
void VisitObjectsUnsafe(ObjectVisitor *visitor) const
Definition pages.cc:667
static PassiveObject & Handle()
Definition object.h:1077
void set_kernel_program_info(const KernelProgramInfo &info) const
Definition object.cc:7928
intptr_t YieldIndex() const
Definition object.h:6133
TokenPosition TokenPos() const
Definition object.h:6129
intptr_t DeoptId() const
Definition object.h:6128
intptr_t TryIndex() const
Definition object.h:6132
UntaggedPcDescriptors::Kind Kind() const
Definition object.h:6134
static intptr_t UnroundedSize(PcDescriptorsPtr desc)
Definition object.h:6056
static intptr_t InstanceSize()
Definition object.h:6060
static intptr_t type_arguments_offset()
Definition object.h:11876
static PointerPtr New(uword native_address, Heap::Space space=Heap::kNew)
Definition object.cc:25805
static intptr_t InstanceSize()
Definition object.h:11861
static void DumpStackTrace(void *context)
Definition profiler.cc:409
static void SampleAllocation(Thread *thread, intptr_t cid, uint32_t identity_hash)
Definition profiler.cc:1265
uint32_t NextUInt32()
Definition random.cc:73
static ReceivePortPtr New(Dart_Port id, const String &debug_name, Heap::Space space=Heap::kNew)
Definition object.cc:25862
static bool IsMatch(const Object &a, const Object &b)
Definition object.cc:27968
static uword Hash(const Object &key)
Definition object.cc:27972
static const char * Name()
Definition object.cc:27965
static ObjectPtr NewKey(const Array &arr)
Definition object.cc:27976
static RecordShape ForUnnamed(intptr_t num_fields)
Definition object.h:11282
ArrayPtr GetFieldNames(Thread *thread) const
Definition object.cc:28069
static constexpr intptr_t kMaxFieldNamesIndex
Definition object.h:11271
intptr_t num_fields() const
Definition object.h:11288
static constexpr intptr_t kMaxNumFields
Definition object.h:11266
SmiPtr AsSmi() const
Definition object.h:11294
static RecordShape Register(Thread *thread, intptr_t num_fields, const Array &field_names)
Definition object.cc:27980
AbstractTypePtr FieldTypeAt(intptr_t index) const
Definition object.cc:27456
virtual void EnumerateURIs(URIs *uris) const
Definition object.cc:27697
RecordTypePtr ToNullability(Nullability value, Heap::Space space) const
Definition object.cc:27558
RecordShape shape() const
Definition object.h:11362
virtual uword ComputeHash() const
Definition object.cc:27617
ArrayPtr GetFieldNames(Thread *thread) const
Definition object.cc:27477
virtual bool IsEquivalent(const Instance &other, TypeEquality kind, FunctionTypeMapping *function_type_equivalence=nullptr) const
Definition object.cc:27580
virtual AbstractTypePtr Canonicalize(Thread *thread) const
Definition object.cc:27639
virtual AbstractTypePtr UpdateFunctionTypes(intptr_t num_parent_type_args_adjustment, intptr_t num_free_fun_type_params, Heap::Space space, FunctionTypeMapping *function_type_mapping) const
Definition object.cc:27751
virtual AbstractTypePtr InstantiateFrom(const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments, intptr_t num_free_fun_type_params, Heap::Space space, FunctionTypeMapping *function_type_mapping=nullptr, intptr_t num_parent_type_args_adjustment=0) const
Definition object.cc:27711
bool IsSubtypeOf(const RecordType &other, Heap::Space space, FunctionTypeMapping *function_type_equivalence=nullptr) const
Definition object.cc:27790
void SetFieldTypeAt(intptr_t index, const AbstractType &value) const
Definition object.cc:27461
virtual void PrintName(NameVisibility visibility, BaseTextBuffer *printer) const
Definition object.cc:27706
static RecordTypePtr New(RecordShape shape, const Array &field_types, Nullability nullability=Nullability::kLegacy, Heap::Space space=Heap::kOld)
Definition object.cc:27541
virtual bool IsInstantiated(Genericity genericity=kAny, intptr_t num_free_fun_type_params=kAllFree) const
Definition object.cc:27524
virtual uint32_t CanonicalizeHash() const
Definition object.cc:27883
RecordShape shape() const
Definition object.h:11404
static intptr_t GetPositionalFieldIndexFromFieldName(const String &field_name)
Definition object.cc:27929
virtual void CanonicalizeFieldsLocked(Thread *thread) const
Definition object.cc:27901
RecordTypePtr GetRecordType() const
Definition object.cc:27912
virtual bool CanonicalizeEquals(const Instance &other) const
Definition object.cc:27860
static RecordPtr New(RecordShape shape, Heap::Space space=Heap::kNew)
Definition object.cc:27823
static intptr_t field_offset(intptr_t index)
Definition object.h:11422
intptr_t GetFieldIndexByName(Thread *thread, const String &field_name) const
Definition object.cc:27944
ObjectPtr FieldAt(intptr_t field_index) const
Definition object.h:11407
const char * ToCString() const
Definition object.cc:26765
void set_pattern(const String &pattern) const
Definition object.cc:26691
StringPtr pattern() const
Definition object.h:12771
virtual uint32_t CanonicalizeHash() const
Definition object.cc:26824
virtual bool CanonicalizeEquals(const Instance &other) const
Definition object.cc:26803
void set_function(intptr_t cid, bool sticky, const Function &value) const
Definition object.cc:26695
static RegExpPtr New(Zone *zone, Heap::Space space=Heap::kNew)
Definition object.cc:26741
void set_num_bracket_expressions(SmiPtr value) const
void set_capture_name_map(const Array &array) const
Definition object.cc:26737
void set_bytecode(bool is_one_byte, bool sticky, const TypedData &bytecode) const
Definition object.cc:26715
RegExpFlags flags() const
Definition object.h:12865
static DART_NORETURN void LongJump(const Error &error)
Definition report.cc:86
static void MessageF(Kind kind, const Script &script, TokenPosition token_pos, bool report_after_token, const char *format,...) PRINTF_ATTRIBUTE(5
Definition report.cc:123
static constexpr bool AtLocation
Definition report.h:29
static StringPtr PrependSnippet(Kind kind, const Script &script, TokenPosition token_pos, bool report_after_token, const String &message)
Definition report.cc:20
static FunctionPtr ResolveDynamicFunction(Zone *zone, const Class &receiver_class, const String &function_name)
Definition resolver.cc:189
static FunctionPtr ResolveFunction(Zone *zone, const Class &receiver_class, const String &function_name)
Definition resolver.cc:180
static FunctionPtr ResolveDynamicAnyArgs(Zone *zone, const Class &receiver_class, const String &function_name, bool allow_add=true)
Definition resolver.cc:198
static CodePtr Lookup(IsolateGroup *group, uword pc, bool is_return_address)
bool IsCurrentThreadWriter()
Definition lockers.h:354
intptr_t GetTokenLength(const TokenPosition &token_pos) const
Definition object.cc:13348
StringPtr Source() const
Definition object.cc:13140
TypedDataPtr line_starts() const
Definition object.cc:13265
bool IsPartOfDartColonLibrary() const
Definition object.cc:13144
GrowableObjectArrayPtr GenerateLineNumberArray() const
Definition object.cc:13172
void CollectTokenPositionsFor() const
Definition kernel.cc:223
TokenPosition MaxPosition() const
Definition object.cc:13223
intptr_t col_offset() const
Definition object.h:4915
void LoadSourceFromKernel(const uint8_t *kernel_buffer, intptr_t kernel_buffer_len) const
Definition object.cc:13151
LibraryPtr FindLibrary() const
Definition object.cc:13529
StringPtr resolved_url() const
Definition object.cc:13127
bool HasSource() const
Definition object.cc:13136
void InitializeFromKernel(const KernelProgramInfo &info, intptr_t script_index, const TypedData &line_starts, const TypedDataView &constant_coverage) const
Definition object.cc:13159
void set_url(const String &value) const
Definition object.cc:13243
StringPtr GetSnippet(intptr_t from_line, intptr_t from_column, intptr_t to_line, intptr_t to_column) const
Definition object.cc:13478
TypedDataViewPtr constant_coverage() const
Definition object.cc:13256
bool TokenRangeAtLine(intptr_t line_number, TokenPosition *first_token_index, TokenPosition *last_token_index) const
Definition object.cc:13369
StringPtr url() const
Definition object.h:4903
bool IsValidTokenPosition(TokenPosition token_pos) const
Definition object.cc:13303
static ScriptPtr New(const String &url, const String &source)
Definition object.cc:13498
StringPtr GetLine(intptr_t line_number, Heap::Space space=Heap::kNew) const
Definition object.cc:13458
intptr_t line_offset() const
Definition object.h:4914
bool GetTokenLocation(const TokenPosition &token_pos, intptr_t *line, intptr_t *column=nullptr) const
Definition object.cc:13330
static SendPortPtr New(Dart_Port id, Heap::Space space=Heap::kNew)
Definition object.cc:25891
static SentinelPtr New()
Definition object.cc:18779
static void HandleEvent(ServiceEvent *event, bool enter_safepoint=true)
Definition service.cc:1206
static StreamInfo profiler_stream
Definition service.h:188
static SetPtr New(intptr_t class_id, const Array &data, const TypedData &index, intptr_t hash_mask, intptr_t used_data, intptr_t deleted_keys, Heap::Space space=Heap::kNew)
Definition object.cc:25326
static SetPtr NewDefault(intptr_t class_id=kSetCid, Heap::Space space=Heap::kNew)
Definition object.cc:25345
static SmiPtr New(intptr_t value)
Definition object.h:9985
intptr_t Value() const
Definition object.h:9969
virtual bool Equals(const Instance &other) const
Definition object.cc:23330
virtual int64_t AsInt64Value() const
Definition object.cc:23341
virtual int CompareWith(const Integer &other) const
Definition object.cc:23349
virtual uint32_t AsTruncatedUint32Value() const
Definition object.cc:23345
friend class Class
Definition object.h:10026
static bool IsValid(int64_t value)
Definition object.h:10005
virtual double AsDoubleValue() const
Definition object.cc:23337
virtual bool IsNegative() const
Definition object.h:9973
static bool IsPossibleAwaiterLink(const Class &cls)
static constexpr uword kFutureListenerPcOffset
Definition stack_trace.h:19
intptr_t Length() const
Definition object.cc:26099
void SetPcOffsetAtFrame(intptr_t frame_index, uword pc_offset) const
Definition object.cc:26121
bool skip_sync_start_in_parent_stack() const
Definition object.cc:26091
void set_skip_sync_start_in_parent_stack(bool value) const
Definition object.cc:26095
void SetCodeAtFrame(intptr_t frame_index, const Object &code) const
Definition object.cc:26109
static constexpr intptr_t kSyncAsyncCroppedFrames
Definition object.h:12563
void set_async_link(const StackTrace &async_link) const
Definition object.cc:26128
uword PcOffsetAtFrame(intptr_t frame_index) const
Definition object.cc:26115
static StackTracePtr New(const Array &code_array, const TypedData &pc_offset_array, Heap::Space space=Heap::kNew)
Definition object.cc:26148
ObjectPtr CodeAtFrame(intptr_t frame_index) const
Definition object.cc:26104
void set_expand_inlined(bool value) const
Definition object.cc:26140
static StaticTypeExactnessState TriviallyExact(intptr_t type_arguments_offset_in_bytes)
static bool CanRepresentAsTriviallyExact(intptr_t type_arguments_offset_in_bytes)
static StaticTypeExactnessState HasExactSuperType()
static StaticTypeExactnessState NotExact()
static StaticTypeExactnessState NotTracking()
const char * ToCString() const
Definition object.cc:12971
static StaticTypeExactnessState Compute(const Type &static_type, const Instance &value, bool print_trace=false)
Definition object.cc:12847
static StaticTypeExactnessState HasExactSuperClass()
void Add(uint16_t code_unit)
Definition object.h:10480
intptr_t Finalize()
Definition object.h:10496
static StringPtr FromLatin1(const uint8_t *latin1_array, intptr_t array_len, Heap::Space space=Heap::kNew)
Definition object.cc:23812
static StringPtr ScrubNameRetainPrivate(const String &name, bool is_extension=false)
Definition object.cc:427
static constexpr intptr_t kMaxElements
Definition object.h:10152
static void Copy(const String &dst, intptr_t dst_offset, const uint8_t *characters, intptr_t len)
Definition object.cc:23871
static StringPtr NewFormatted(const char *format,...) PRINTF_ATTRIBUTE(1
Definition object.cc:24083
bool EndsWith(const String &other) const
Definition object.cc:23751
static StringPtr RemovePrivateKey(const String &name)
Definition object.cc:233
bool IsOneByteString() const
Definition object.h:10290
static uint32_t SetCachedHashIfNotSet(StringPtr obj, uint32_t hash)
Definition object.h:10428
intptr_t Length() const
Definition object.h:10189
static uword HashRawSymbol(const StringPtr symbol)
Definition object.h:10226
static bool ParseDouble(const String &str, intptr_t start, intptr_t end, double *result)
Definition object.cc:24286
char * ToMallocCString() const
Definition object.cc:24217
static StringPtr ToLowerCase(const String &str, Heap::Space space=Heap::kNew)
Definition object.cc:24281
bool HasHash() const
Definition object.h:10208
static StringPtr static StringPtr static StringPtr NewFormattedV(const char *format, va_list args, Heap::Space space=Heap::kNew)
Definition object.cc:24101
static uword HashConcat(const String &str1, const String &str2)
Definition object.cc:23564
static StringPtr ConcatAllRange(const Array &strings, intptr_t start, intptr_t end, Heap::Space space=Heap::kNew)
Definition object.cc:24131
static StringPtr ConcatAll(const Array &strings, Heap::Space space=Heap::kNew)
Definition object.cc:24127
void SetHash(intptr_t value) const
Definition object.h:10456
static const char * ScrubName(const String &name, bool is_extension=false)
Definition object.cc:287
void SetLength(intptr_t value) const
Definition object.h:10450
bool Equals(const String &str) const
Definition object.h:13311
static constexpr intptr_t kOneByteChar
Definition object.h:10139
static StringPtr New(const char *cstr, Heap::Space space=Heap::kNew)
Definition object.cc:23777
static constexpr intptr_t kTwoByteChar
Definition object.h:10140
intptr_t CompareTo(const String &other) const
Definition object.cc:23717
intptr_t CharSize() const
Definition object.cc:23601
void ToUTF8(uint8_t *utf8_array, intptr_t array_len) const
Definition object.cc:24225
bool IsTwoByteString() const
Definition object.h:10294
uint16_t CharAt(intptr_t index) const
Definition object.h:10238
static StringPtr SubString(const String &str, intptr_t begin_index, Heap::Space space=Heap::kNew)
Definition object.cc:24159
bool StartsWith(const String &other) const
Definition object.h:10277
static StringPtr EscapeSpecialCharacters(const String &str)
Definition object.cc:23940
static StringPtr Concat(const String &str1, const String &str2, Heap::Space space=Heap::kNew)
Definition object.cc:24116
virtual InstancePtr CanonicalizeLocked(Thread *thread) const
Definition object.cc:23770
static StringPtr DecodeIRI(const String &str)
Definition object.cc:24027
static const char * ToCString(Thread *thread, StringPtr ptr)
Definition object.cc:24205
static StringPtr Transform(int32_t(*mapping)(int32_t ch), const String &str, Heap::Space space=Heap::kNew)
Definition object.cc:24251
static StringPtr ToUpperCase(const String &str, Heap::Space space=Heap::kNew)
Definition object.cc:24276
bool IsSymbol() const
Definition object.h:10288
static StringPtr FromUTF16(const uint16_t *utf16_array, intptr_t array_len, Heap::Space space=Heap::kNew)
Definition object.cc:23818
uword Hash() const
Definition object.h:10195
virtual bool CanonicalizeEquals(const Instance &other) const
Definition object.h:10269
bool EqualsConcat(const String &str1, const String &str2) const
Definition object.cc:23711
friend class Class
Definition object.h:10463
static bool EqualsIgnoringPrivateKey(const String &str1, const String &str2)
Definition object.cc:24378
static const char * EncodeIRI(const String &str)
Definition object.cc:23997
static uint32_t GetCachedHash(const StringPtr obj)
Definition object.h:10424
static StringPtr FromUTF32(const int32_t *utf32_array, intptr_t array_len, Heap::Space space=Heap::kNew)
Definition object.cc:23834
static StringPtr FromUTF8(const uint8_t *utf8_array, intptr_t array_len, Heap::Space space=Heap::kNew)
Definition object.cc:23784
static intptr_t LengthOf(StringPtr obj)
Definition object.h:10190
static const char * NameOfStub(uword entry_point)
Definition stub_code.cc:330
static bool HasBeenInitialized()
Definition stub_code.h:41
void GetCurrentCheck(intptr_t ix, Object *instance_class_id_or_signature, AbstractType *destination_type, TypeArguments *instance_type_arguments, TypeArguments *instantiator_type_arguments, TypeArguments *function_type_arguments, TypeArguments *instance_parent_function_type_arguments, TypeArguments *instance_delayed_type_arguments, Bool *test_result) const
Definition object.cc:19417
void WriteEntryToBuffer(Zone *zone, BaseTextBuffer *buffer, intptr_t index, const char *line_prefix=nullptr) const
Definition object.cc:19546
intptr_t num_occupied() const
Definition object.h:7792
static constexpr intptr_t kMaxLinearCacheSize
Definition object.h:7883
intptr_t NumEntries() const
Definition object.cc:19009
bool Equals(const SubtypeTestCache &other) const
Definition object.cc:19676
static SubtypeTestCachePtr New(intptr_t num_inputs)
Definition object.cc:18974
static constexpr intptr_t kMaxInputs
Definition object.h:7676
void GetCheck(intptr_t ix, Object *instance_class_id_or_signature, AbstractType *destination_type, TypeArguments *instance_type_arguments, TypeArguments *instantiator_type_arguments, TypeArguments *function_type_arguments, TypeArguments *instance_parent_function_type_arguments, TypeArguments *instance_delayed_type_arguments, Bool *test_result) const
Definition object.cc:19396
SubtypeTestCachePtr Copy(Thread *thread) const
Definition object.cc:19689
ArrayPtr cache() const
Definition object.cc:18988
static intptr_t UsedInputsForType(const AbstractType &type)
Definition object.cc:19718
intptr_t num_inputs() const
Definition object.h:7790
intptr_t NumberOfChecks() const
Definition object.cc:19004
bool GetNextCheck(intptr_t *ix, Object *instance_class_id_or_signature, AbstractType *destination_type, TypeArguments *instance_type_arguments, TypeArguments *instantiator_type_arguments, TypeArguments *function_type_arguments, TypeArguments *instance_parent_function_type_arguments, TypeArguments *instance_delayed_type_arguments, Bool *test_result) const
Definition object.cc:19485
bool IsHash() const
Definition object.cc:19019
intptr_t AddCheck(const Object &instance_class_id_or_signature, const AbstractType &destination_type, const TypeArguments &instance_type_arguments, const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments, const TypeArguments &instance_parent_function_type_arguments, const TypeArguments &instance_delayed_type_arguments, const Bool &test_result) const
Definition object.cc:19028
bool HasCheck(const Object &instance_class_id_or_signature, const AbstractType &destination_type, const TypeArguments &instance_type_arguments, const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments, const TypeArguments &instance_parent_function_type_arguments, const TypeArguments &instance_delayed_type_arguments, intptr_t *index, Bool *result) const
Definition object.cc:19514
@ kInstanceDelayedFunctionTypeArguments
Definition object.h:7665
@ kInstanceParentFunctionTypeArguments
Definition object.h:7664
static constexpr intptr_t kMaxLinearCacheEntries
Definition object.h:7801
void WriteToBuffer(Zone *zone, BaseTextBuffer *buffer, const char *line_prefix=nullptr) const
Definition object.cc:19557
bool IsOccupied(intptr_t index) const
Definition object.cc:19708
static SuspendStatePtr Clone(Thread *thread, const SuspendState &src, Heap::Space space=Heap::kNew)
Definition object.cc:26611
static intptr_t FrameSizeGrowthGap()
Definition object.h:12611
static intptr_t payload_offset()
Definition object.h:12633
CodePtr GetCodeObject() const
Definition object.cc:26675
static intptr_t InstanceSize()
Definition object.h:12600
static SuspendStatePtr New(intptr_t frame_size, const Instance &function_data, Heap::Space space=Heap::kNew)
Definition object.cc:26587
static bool IsSymbolCid(Thread *thread, classid_t class_id)
Definition object.cc:20334
static uint32_t CanonicalizeHash(Thread *thread, const Instance &instance)
Definition object.cc:20340
static const String & HashMark()
Definition symbols.h:671
static const String & NewLine()
Definition symbols.h:650
static StringPtr LookupFromGet(Thread *thread, const String &str)
Definition symbols.cc:424
static StringPtr FromConcat(Thread *thread, const String &str1, const String &str2)
Definition symbols.cc:235
static StringPtr LookupFromSet(Thread *thread, const String &str)
Definition symbols.cc:428
static const String & Library()
Definition symbols.h:689
static StringPtr FromGet(Thread *thread, const String &str)
Definition symbols.cc:247
static StringPtr FromSet(Thread *thread, const String &str)
Definition symbols.cc:251
static void SetupSymbolTable(IsolateGroup *isolate_group)
Definition symbols.cc:168
static StringPtr FromConcatAll(Thread *thread, const GrowableHandlePtrArray< const String > &strs)
Definition symbols.cc:262
static const String & Void()
Definition symbols.h:693
static const String & This()
Definition symbols.h:691
static const String & Empty()
Definition symbols.h:687
static StringPtr New(Thread *thread, const char *cstr)
Definition symbols.h:722
static const String & Dot()
Definition symbols.h:612
static const String & Equals()
Definition symbols.h:613
Zone * zone() const
LongJumpScope * long_jump_base() const
void DeferredMarkingStackAddObject(ObjectPtr obj)
Definition thread.cc:833
int32_t no_callback_scope_depth() const
Definition thread.h:618
static Thread * Current()
Definition thread.h:361
int32_t no_safepoint_scope_depth() const
Definition thread.h:705
bool is_marking() const
Definition thread.h:669
void IncrementNoCallbackScopeDepth()
Definition thread.h:619
Heap * heap() const
Definition thread.cc:876
void CheckForSafepoint()
Definition thread.h:1091
void ClearStickyError()
Definition thread.cc:239
uword top_exit_frame_info() const
Definition thread.h:678
bool OwnsDeoptSafepoint() const
Definition thread.cc:1291
bool IsDartMutatorThread() const
Definition thread.h:546
Random * random()
Definition thread.h:1120
ExecutionState execution_state() const
Definition thread.h:1027
Isolate * isolate() const
Definition thread.h:533
IsolateGroup * isolate_group() const
Definition thread.h:540
void DecrementNoCallbackScopeDepth()
Definition thread.h:623
HeapProfileSampler & heap_sampler()
Definition thread.h:1128
bool IsNoSource() const
intptr_t Pos() const
const char * ToCString() const
int32_t Serialize() const
static TokenPosition Deserialize(int32_t value)
static constexpr int32_t kMaxSourcePos
static const TokenPosition kMinSource
bool IsClassifying() const
static TransferableTypedDataPtr New(uint8_t *data, intptr_t len)
Definition object.cc:25914
static intptr_t UnroundedSize(TwoByteStringPtr str)
Definition object.h:10677
static TwoByteStringPtr New(intptr_t len, Heap::Space space)
Definition object.cc:24641
static TwoByteStringPtr Transform(int32_t(*mapping)(int32_t ch), const String &str, Heap::Space space)
Definition object.cc:24752
static constexpr intptr_t kBytesPerElement
Definition object.h:10662
static TwoByteStringPtr ConcatAll(const Array &strings, intptr_t start, intptr_t end, intptr_t len, Heap::Space space)
Definition object.cc:24731
static intptr_t InstanceSize()
Definition object.h:10683
static intptr_t data_offset()
Definition object.h:10674
static TwoByteStringPtr Concat(const String &str1, const String &str2, Heap::Space space)
Definition object.cc:24719
static TwoByteStringPtr EscapeSpecialCharacters(const String &str)
Definition object.cc:24609
bool IsOccupied(intptr_t entry) const
Definition object.cc:7114
KeyLocation FindKeyOrUnused(const TypeArguments &instantiator_tav, const TypeArguments &function_tav) const
Definition object.h:8839
Cache(Zone *zone, const TypeArguments &source)
Definition object.cc:7035
static SmiPtr Sentinel()
Definition object.cc:7231
TypeArgumentsPtr Retrieve(intptr_t entry) const
Definition object.cc:7120
static const Array & EmptyStorage()
Definition object.h:8865
intptr_t NumOccupied() const
Definition object.h:8827
KeyLocation AddEntry(intptr_t entry, const TypeArguments &instantiator_tav, const TypeArguments &function_tav, const TypeArguments &instantiated_tav) const
Definition object.cc:7172
intptr_t NumEntries() const
Definition object.h:8899
friend class Object
Definition object.h:9016
bool IsEquivalent(const TypeArguments &other, TypeEquality kind, FunctionTypeMapping *function_type_equivalence=nullptr) const
Definition object.h:8665
void EnumerateURIs(URIs *uris) const
Definition object.cc:7877
void PrintSubvectorName(intptr_t from_index, intptr_t len, NameVisibility name_visibility, BaseTextBuffer *printer) const
Definition object.cc:6941
static constexpr intptr_t kNullabilityBitsPerType
Definition object.h:8592
StringPtr Name() const
Definition object.cc:6927
static constexpr intptr_t kNullableBits
Definition object.h:8596
bool IsInstantiated(Genericity genericity=kAny, intptr_t num_free_fun_type_params=kAllFree) const
Definition object.h:8681
uword Hash() const
Definition object.h:13370
TypeArgumentsPtr ToInstantiatorTypeArguments(Thread *thread, const Class &cls) const
Definition object.cc:7853
intptr_t Length() const
Definition object.cc:7352
intptr_t nullability() const
Definition object.cc:7359
uword HashForRange(intptr_t from_index, intptr_t len) const
Definition object.cc:6836
static intptr_t types_offset()
Definition object.h:8560
TypeArgumentsPtr TruncatedTo(intptr_t length) const
Definition object.cc:7572
bool Equals(const TypeArguments &other) const
Definition object.h:8660
static intptr_t InstanceSize()
Definition object.h:8962
static constexpr intptr_t kLegacyBits
Definition object.h:8597
bool IsSubvectorInstantiated(intptr_t from_index, intptr_t len, Genericity genericity=kAny, intptr_t num_free_fun_type_params=kAllFree) const
Definition object.cc:7387
bool CanShareFunctionTypeArguments(const Function &function, bool *with_runtime_check=nullptr) const
Definition object.cc:7530
static constexpr intptr_t kMaxElements
Definition object.h:8960
void PrintTo(BaseTextBuffer *printer) const
Definition object.cc:6965
bool CanShareInstantiatorTypeArguments(const Class &instantiator_class, bool *with_runtime_check=nullptr) const
Definition object.cc:7442
InstantiationMode GetInstantiationMode(Zone *zone, const Function *function=nullptr, const Class *cls=nullptr) const
Definition object.cc:6905
TypeArgumentsPtr InstantiateAndCanonicalizeFrom(const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments) const
Definition object.cc:7678
bool IsUninstantiatedIdentity() const
Definition object.cc:7410
TypeArgumentsPtr FromInstanceTypeArguments(Thread *thread, const Class &cls) const
Definition object.cc:7826
AbstractTypePtr TypeAtNullSafe(intptr_t index) const
Definition object.cc:7372
bool IsSubvectorEquivalent(const TypeArguments &other, intptr_t from_index, intptr_t len, TypeEquality kind, FunctionTypeMapping *function_type_equivalence=nullptr) const
Definition object.cc:6978
bool IsFinalized() const
Definition object.cc:7585
bool HasInstantiations() const
Definition object.cc:7327
TypeArgumentsPtr ConcatenateTypeParameters(Zone *zone, const TypeArguments &other) const
Definition object.cc:6885
static constexpr intptr_t kNonNullableBits
Definition object.h:8595
StringPtr UserVisibleName() const
Definition object.cc:6934
bool HasCount(intptr_t count) const
Definition object.cc:7345
static constexpr intptr_t kNullabilityMaxTypes
Definition object.h:8593
static constexpr intptr_t kAllDynamicHash
Definition object.h:8549
void SetTypeAt(intptr_t index, const AbstractType &value) const
Definition object.cc:7381
friend class Class
Definition object.h:9014
TypeArgumentsPtr InstantiateFrom(const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments, intptr_t num_free_fun_type_params, Heap::Space space, FunctionTypeMapping *function_type_mapping=nullptr, intptr_t num_parent_type_args_adjustment=0) const
Definition object.cc:7598
TypeArgumentsPtr Canonicalize(Thread *thread) const
Definition object.cc:7761
bool IsRaw(intptr_t from_index, intptr_t len) const
Definition object.h:8620
TypeArgumentsPtr UpdateFunctionTypes(intptr_t num_parent_type_args_adjustment, intptr_t num_free_fun_type_params, Heap::Space space, FunctionTypeMapping *function_type_mapping) const
Definition object.cc:7639
static TypeArgumentsPtr New(intptr_t len, Heap::Space space=Heap::kOld)
Definition object.cc:7733
AbstractTypePtr TypeAt(intptr_t index) const
Definition object.cc:7366
TypeArgumentsPtr Prepend(Zone *zone, const TypeArguments &other, intptr_t other_length, intptr_t total_length) const
Definition object.cc:6858
virtual uword ComputeHash() const
Definition object.cc:22976
bool IsClassTypeParameter() const
Definition object.h:9796
void set_parameterized_class_id(classid_t value) const
Definition object.cc:22778
virtual AbstractTypePtr UpdateFunctionTypes(intptr_t num_parent_type_args_adjustment, intptr_t num_free_fun_type_params, Heap::Space space, FunctionTypeMapping *function_type_mapping) const
Definition object.cc:22910
intptr_t index() const
Definition object.h:9800
TypeParameterPtr ToNullability(Nullability value, Heap::Space space) const
Definition object.cc:22674
virtual void PrintName(NameVisibility visibility, BaseTextBuffer *printer) const
Definition object.cc:22968
virtual bool IsInstantiated(Genericity genericity=kAny, intptr_t num_free_fun_type_params=kAllFree) const
Definition object.cc:22696
void set_index(intptr_t value) const
Definition object.cc:22804
classid_t parameterized_class_id() const
Definition object.cc:22771
ClassPtr parameterized_class() const
Definition object.cc:22783
AbstractTypePtr bound() const
Definition object.cc:22810
AbstractTypePtr GetFromTypeArguments(const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments) const
Definition object.cc:22827
virtual bool IsEquivalent(const Instance &other, TypeEquality kind, FunctionTypeMapping *function_type_equivalence=nullptr) const
Definition object.cc:22706
virtual AbstractTypePtr InstantiateFrom(const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments, intptr_t num_free_fun_type_params, Heap::Space space, FunctionTypeMapping *function_type_mapping=nullptr, intptr_t num_parent_type_args_adjustment=0) const
Definition object.cc:22837
void set_base(intptr_t value) const
Definition object.cc:22798
intptr_t base() const
Definition object.h:9798
const char * CanonicalNameCString() const
Definition object.h:9848
bool IsFunctionTypeParameter() const
Definition object.h:9792
FunctionTypePtr parameterized_function_type() const
Definition object.cc:22793
virtual AbstractTypePtr Canonicalize(Thread *thread) const
Definition object.cc:22932
void SetNameAt(intptr_t index, const String &value) const
Definition object.cc:6634
void SetBoundAt(intptr_t index, const AbstractType &value) const
Definition object.cc:6654
void Print(Thread *thread, Zone *zone, bool are_class_type_parameters, intptr_t base, NameVisibility name_visibility, BaseTextBuffer *printer) const
Definition object.cc:6726
intptr_t Length() const
Definition object.cc:6619
static constexpr intptr_t kFlagsPerSmiShift
Definition object.h:8496
static constexpr intptr_t kFlagsPerSmiMask
Definition object.h:8502
StringPtr NameAt(intptr_t index) const
Definition object.cc:6629
AbstractTypePtr DefaultAt(intptr_t index) const
Definition object.cc:6669
bool IsGenericCovariantImplAt(intptr_t index) const
Definition object.cc:6707
static TypeParametersPtr New(Heap::Space space=Heap::kOld)
Definition object.cc:6778
bool AllDynamicDefaults() const
Definition object.cc:6681
bool AllDynamicBounds() const
Definition object.cc:6660
AbstractTypePtr BoundAt(intptr_t index) const
Definition object.cc:6648
void SetDefaultAt(intptr_t index, const AbstractType &value) const
Definition object.cc:6675
void SetIsGenericCovariantImplAt(intptr_t index, bool value) const
Definition object.cc:6714
static CodePtr DefaultCodeForType(const AbstractType &type, bool lazy_specialize=true)
virtual AbstractTypePtr UpdateFunctionTypes(intptr_t num_parent_type_args_adjustment, intptr_t num_free_fun_type_params, Heap::Space space, FunctionTypeMapping *function_type_mapping) const
Definition object.cc:22083
TypeArgumentsPtr GetInstanceTypeArguments(Thread *thread, bool canonicalize=true) const
Definition object.cc:22480
static TypePtr Float32x4()
Definition object.cc:21910
static TypePtr VoidType()
Definition object.cc:21870
virtual void PrintName(NameVisibility visibility, BaseTextBuffer *printer) const
Definition object.cc:22374
friend class TypeArguments
Definition object.h:9487
static TypePtr SmiType()
Definition object.cc:21894
static TypePtr BoolType()
Definition object.cc:21882
TypePtr ToNullability(Nullability value, Heap::Space space) const
Definition object.cc:21971
static TypePtr New(const Class &clazz, const TypeArguments &arguments, Nullability nullability=Nullability::kLegacy, Heap::Space space=Heap::kOld)
Definition object.cc:22492
bool IsDeclarationTypeOf(const Class &cls) const
Definition object.cc:22243
virtual AbstractTypePtr InstantiateFrom(const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments, intptr_t num_free_fun_type_params, Heap::Space space, FunctionTypeMapping *function_type_mapping=nullptr, intptr_t num_parent_type_args_adjustment=0) const
Definition object.cc:22049
virtual AbstractTypePtr Canonicalize(Thread *thread) const
Definition object.cc:22255
static TypePtr NullableDouble()
Definition object.cc:21906
static TypePtr Float64x2()
Definition object.cc:21914
static TypePtr DartTypeType()
Definition object.cc:21942
static TypePtr NullableNumber()
Definition object.cc:21926
static TypePtr NeverType()
Definition object.cc:21874
virtual bool IsInstantiated(Genericity genericity=kAny, intptr_t num_free_fun_type_params=kAllFree) const
Definition object.cc:22032
virtual TypeArgumentsPtr arguments() const
Definition object.h:9361
virtual bool IsEquivalent(const Instance &other, TypeEquality kind, FunctionTypeMapping *function_type_equivalence=nullptr) const
Definition object.cc:22126
static TypePtr Number()
Definition object.cc:21922
static TypePtr ObjectType()
Definition object.cc:21878
void set_type_class(const Class &value) const
Definition object.cc:22462
static TypePtr NullableIntType()
Definition object.cc:21890
void set_arguments(const TypeArguments &value) const
Definition object.cc:22467
static TypePtr Double()
Definition object.cc:21902
static TypePtr DynamicType()
Definition object.cc:21866
static TypePtr DartFunctionType()
Definition object.cc:21938
static TypePtr ArrayType()
Definition object.cc:21934
virtual classid_t type_class_id() const
Definition object.cc:22024
virtual uword ComputeHash() const
Definition object.cc:22400
static TypePtr StringType()
Definition object.cc:21930
friend class Class
Definition object.h:9486
static TypePtr MintType()
Definition object.cc:21898
virtual void EnumerateURIs(URIs *uris) const
Definition object.cc:22359
static TypePtr IntType()
Definition object.cc:21886
static TypePtr NullType()
Definition object.cc:21862
virtual ClassPtr type_class() const
Definition object.cc:22028
static TypePtr NewNonParameterizedType(const Class &type_class)
Definition object.cc:21946
static TypePtr Int32x4()
Definition object.cc:21918
TypedDataElementType ElementType() const
Definition object.h:11501
intptr_t Length() const
Definition object.h:11492
TypedDataViewPtr ViewFromTo(intptr_t start, intptr_t end, Heap::Space space=Heap::kNew) const
Definition object.cc:25761
intptr_t ElementSizeInBytes() const
Definition object.h:11505
static intptr_t length_offset()
Definition object.h:11486
intptr_t LengthInBytes() const
Definition object.h:11497
bool IsExternalOrExternalView() const
Definition object.cc:25751
void * DataAddr(intptr_t byte_offset) const
Definition object.h:11545
static TypedDataViewPtr New(intptr_t class_id, Heap::Space space=Heap::kNew)
Definition object.cc:25737
static intptr_t InstanceSize()
Definition object.h:11767
static intptr_t payload_offset()
Definition object.h:11643
static TypedDataPtr Grow(const TypedData &current, intptr_t len, Heap::Space space=Heap::kNew)
Definition object.cc:25680
virtual bool CanonicalizeEquals(const Instance &other) const
Definition object.cc:25629
virtual uint32_t CanonicalizeHash() const
Definition object.cc:25654
static intptr_t MaxElements(intptr_t class_id)
Definition object.h:11658
static TypedDataPtr New(intptr_t class_id, intptr_t len, Heap::Space space=Heap::kNew)
Definition object.cc:25666
static intptr_t InstanceSize()
Definition object.h:11647
static DART_FORCE_INLINE constexpr intptr_t Length()
Definition class_table.h:67
virtual const char * ToErrorCString() const
Definition object.cc:20013
InstancePtr exception() const
Definition object.h:8117
static UnhandledExceptionPtr New(const Instance &exception, const Instance &stacktrace, Heap::Space space=Heap::kNew)
Definition object.cc:19989
InstancePtr stacktrace() const
Definition object.h:8122
bool can_patch_to_monomorphic() const
Definition object.h:2385
static const char * KindToCString(Kind k)
static constexpr uword update(intptr_t size, uword tag)
Definition raw_object.h:209
static ObjectPtr FromAddr(uword addr)
Definition raw_object.h:495
static bool IsMarked(uword tags)
Definition raw_object.h:298
static uword ToAddr(const UntaggedObject *raw_obj)
Definition raw_object.h:501
intptr_t HeapSize() const
Definition raw_object.h:380
bool InVMIsolateHeap() const
Definition raw_object.cc:20
void VisitPointersPrecise(ObjectPointerVisitor *visitor)
intptr_t VisitPointers(ObjectPointerVisitor *visitor)
Definition raw_object.h:426
void SetMarkBitUnsynchronized()
Definition raw_object.h:307
void set_is_user_initiated(bool value) const
Definition object.cc:20068
static UnwindErrorPtr New(const String &message, Heap::Space space=Heap::kNew)
Definition object.cc:20055
StringPtr message() const
Definition object.h:8153
virtual const char * ToErrorCString() const
Definition object.cc:20072
static UserTagPtr FindTagById(const Isolate *isolate, uword tag_id)
Definition object.cc:27132
static UserTagPtr New(const String &label, Heap::Space space=Heap::kOld)
Definition object.cc:27028
uword tag() const
Definition object.h:13128
static UserTagPtr FindTagInIsolate(Isolate *isolate, Thread *thread, const String &label)
Definition object.cc:27070
UserTagPtr MakeActive() const
Definition object.cc:27007
static UserTagPtr DefaultTag()
Definition object.cc:27053
static bool TagTableIsFull(Thread *thread)
Definition object.cc:27123
StringPtr label() const
Definition object.h:13142
static constexpr intptr_t kMaxUserTags
Definition tags.h:110
static bool IsTagNameStreamable(const char *tag)
Definition tags.cc:177
static constexpr uword kUserTagIdOffset
Definition tags.h:111
static constexpr uword kDefaultUserTag
Definition tags.h:112
static int32_t Decode(uint16_t lead, uint16_t trail)
Definition unicode.h:151
static void Encode(int32_t codepoint, uint16_t *dst)
Definition unicode.cc:273
static bool IsLeadSurrogate(uint32_t ch)
Definition unicode.h:126
static intptr_t Length(int32_t ch)
Definition unicode.h:118
static bool IsTrailSurrogate(uint32_t ch)
Definition unicode.h:131
static intptr_t Length(int32_t ch)
Definition unicode.cc:98
@ kSupplementary
Definition unicode.h:46
static intptr_t CodeUnitCount(const uint8_t *utf8_array, intptr_t array_len, Type *type)
Definition unicode.cc:46
static intptr_t ReportInvalidByte(const uint8_t *utf8_array, intptr_t array_len, intptr_t len)
Definition unicode.cc:163
static bool DecodeToUTF16(const uint8_t *utf8_array, intptr_t array_len, uint16_t *dst, intptr_t len)
Definition unicode.cc:217
static intptr_t Decode(const uint8_t *utf8_array, intptr_t array_len, int32_t *ch)
Definition unicode.cc:135
static bool DecodeToLatin1(const uint8_t *utf8_array, intptr_t array_len, uint8_t *dst, intptr_t len)
Definition unicode.cc:194
static intptr_t Encode(int32_t ch, char *dst)
Definition unicode.cc:110
static bool IsSupplementary(int32_t code_point)
Definition unicode.h:31
static bool IsBmp(int32_t code_point)
Definition unicode.h:27
static bool IsLatin1(int32_t code_point)
Definition unicode.h:23
static bool IsInt(intptr_t N, T value)
Definition utils.h:298
static constexpr uintptr_t RoundUpToPowerOfTwo(uintptr_t x)
Definition utils.h:120
static T MulWithWrapAround(T a, T b)
Definition utils.h:434
static constexpr T Maximum(T x, T y)
Definition utils.h:26
static int SNPrint(char *str, size_t size, const char *format,...) PRINTF_ATTRIBUTE(3
static constexpr int ShiftForPowerOfTwo(T x)
Definition utils.h:66
static int static int VSNPrint(char *str, size_t size, const char *format, va_list args)
static T Minimum(T x, T y)
Definition utils.h:21
static T AddWithWrapAround(T a, T b)
Definition utils.h:416
static T SubWithWrapAround(T a, T b)
Definition utils.h:425
static uint32_t BigEndianToHost32(uint32_t be_value)
Definition utils.h:503
static bool IsUint(intptr_t N, T value)
Definition utils.h:313
static constexpr bool IsAligned(T x, uintptr_t alignment, uintptr_t offset=0)
Definition utils.h:77
static int64_t ShiftLeftWithTruncation(int64_t a, int64_t b)
Definition utils.h:449
static constexpr bool IsPowerOfTwo(T x)
Definition utils.h:61
static void Protect(void *address, intptr_t size, Protection mode)
static WeakPropertyPtr New(Heap::Space space=Heap::kNew)
Definition object.cc:26835
static intptr_t type_arguments_offset()
Definition object.h:12926
static WeakReferencePtr New(Heap::Space space=Heap::kNew)
Definition object.cc:26845
static ObjectPtr Unwrap(ObjectPtr obj)
Definition object.h:6640
static ObjectPtr UnwrapIfTarget(ObjectPtr obj)
Definition object.h:6649
void VisitPointers(ObjectPtr *from, ObjectPtr *to) override
Definition object.cc:2899
WriteBarrierUpdateVisitor(Thread *thread, ObjectPtr obj)
Definition object.cc:2892
char * PrintToString(const char *format,...) PRINTF_ATTRIBUTE(2
Definition zone.cc:313
ElementType * Alloc(intptr_t length)
ObjectPoolBuilder & object_pool_builder()
intptr_t UncheckedEntryOffset() const
const ZoneGrowableArray< intptr_t > & GetPointerOffsets() const
void FinalizeInstructions(const MemoryRegion &region)
Object & GetSelfHandle() const
ObjectPtr LoadExpressionEvaluationFunction(const String &library_url, const String &klass)
static StringPtr FindSourceForScript(const uint8_t *kernel_buffer, intptr_t kernel_buffer_length, const String &url)
static uint32_t CalculateFunctionFingerprint(const Function &func)
static uint32_t CalculateFieldFingerprint(const Field &field)
static uint32_t CalculateClassFingerprint(const Class &klass)
#define THR_Print(format,...)
Definition log.h:20
#define kIsolateSnapshotInstructionsAsmSymbol
Definition dart_api.h:3911
#define ILLEGAL_PORT
Definition dart_api.h:1530
int64_t Dart_Port
Definition dart_api.h:1524
#define DART_WARN_UNUSED_RESULT
Definition dart_api.h:66
#define kVmSnapshotInstructionsAsmSymbol
Definition dart_api.h:3908
void(* Dart_HandleFinalizer)(void *isolate_callback_data, void *peer)
Definition dart_api.h:265
#define UNIMPLEMENTED
const EmbeddedViewParams * params
#define ASSERT(E)
VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE auto & d
Definition main.cc:19
VkInstance instance
Definition main.cc:48
sk_sp< SkImage > image
Definition examples.cpp:29
SkBitmap source
Definition examples.cpp:28
static bool b
struct MyStruct s
struct MyStruct a[10]
#define FATAL(error)
FlutterSemanticsFlag flag
AtkStateType state
FlutterSemanticsFlag flags
glong glong end
G_BEGIN_DECLS G_MODULE_EXPORT FlValue * args
FlKeyEvent uint64_t FlKeyResponderAsyncCallback callback
FlKeyEvent * event
static const uint8_t buffer[]
const uint8_t uint32_t uint32_t GError ** error
uint8_t value
GAsyncResult * result
uint32_t uint32_t * format
uint32_t * target
const char * charp
Definition flags.h:12
#define DECLARE_FLAG(type, name)
Definition flags.h:14
#define DEFINE_FLAG(type, name, default_value, comment)
Definition flags.h:16
#define DEFINE_FLAG_HANDLER(handler, name, comment)
Definition flags.h:20
Dart_NativeFunction function
Definition fuchsia.cc:51
const char * name
Definition fuchsia.cc:50
#define HANDLESCOPE(thread)
Definition handles.h:321
#define TIR_Print(format,...)
#define TYPED_DATA_GET_INDEXED_CASES(clazz)
size_t length
Win32Message message
#define MSAN_CHECK_INITIALIZED(ptr, len)
double y
double x
void UnboxFieldIfSupported(const dart::Field &field, const dart::AbstractType &type)
FunctionPtr CreateFieldInitializerFunction(Thread *thread, Zone *zone, const Field &field)
link(from_root, to_root)
Definition dart_pkg.py:44
static constexpr intptr_t kNullIdentityHash
Definition object.h:10763
bool IsTypedDataViewClassId(intptr_t index)
Definition class_id.h:439
ArrayOfTuplesView< TypeArguments::Cache::Entry, std::tuple< Object, TypeArguments, TypeArguments >, TypeArguments::Cache::kHeaderSize > InstantiationsCacheTable
Definition object.h:13540
static ArrayPtr CreateCallableArgumentsFromStatic(Zone *zone, const Instance &receiver, const Array &static_args, const Array &arg_names, const ArgumentsDescriptor &static_args_descriptor)
Definition object.cc:4707
bool IsTypedDataClassId(intptr_t index)
Definition class_id.h:433
static void AppendSubString(BaseTextBuffer *buffer, const char *name, intptr_t start_pos, intptr_t len)
Definition object.cc:193
static constexpr intptr_t kFalseIdentityHash
Definition object.h:10765
const intptr_t kSmiBits
Definition globals.h:24
static bool IsIdentChar(int32_t c)
Definition object.cc:13325
static void TransferableTypedDataFinalizer(void *isolate_callback_data, void *peer)
Definition object.cc:25909
static void ReportTooManyTypeArguments(const Class &cls)
Definition object.cc:3160
void DoubleToCString(double d, char *buffer, int buffer_size)
bool CStringToDouble(const char *str, intptr_t length, double *result)
static constexpr intptr_t kOldObjectAlignmentOffset
static bool EvaluationFunctionNeedsReceiver(Thread *thread, Zone *zone, const Function &eval_function)
Definition object.cc:4828
static const char *const names[]
Definition symbols.cc:24
InstantiationMode
static constexpr intptr_t kNewObjectAlignmentOffset
static type SpecialCharacter(type value)
Definition object.cc:530
const char *const name
static constexpr intptr_t kCompressedWordSizeLog2
Definition globals.h:43
static bool EqualsIgnoringPrivateKey(const String &str1, const String &str2)
Definition object.cc:24326
const intptr_t kSmiMax
Definition globals.h:28
bool IsTypedDataBaseClassId(intptr_t index)
Definition class_id.h:429
constexpr intptr_t kBitsPerWord
Definition globals.h:514
uword cpp_vtable
Definition globals.h:163
static constexpr intptr_t kBoolValueMask
static const intptr_t kGetterPrefixLength
Definition object.cc:116
static constexpr intptr_t kFalseOffsetFromNull
static int32_t GetHexCharacter(int32_t c)
Definition object.cc:23975
static StaticTypeExactnessState TrivialTypeExactnessFor(const Class &cls)
Definition object.cc:12831
static intptr_t GetListLength(const Object &value)
Definition object.cc:12526
Nullability
Definition object.h:1112
static int32_t EscapeOverhead(int32_t c)
Definition object.cc:520
DART_WARN_UNUSED_RESULT ErrorPtr EntryPointFieldInvocationError(const String &getter_name)
Definition object.cc:27272
QualifiedFunctionLibKind
Definition object.cc:9754
@ kQualifiedFunctionLibKindLibUrl
Definition object.cc:9755
@ kQualifiedFunctionLibKindLibName
Definition object.cc:9756
DART_EXPORT bool IsNull(Dart_Handle object)
static constexpr intptr_t kBoolVsNullMask
bool IsTypeClassId(intptr_t index)
Definition class_id.h:370
intptr_t RawSmiValue(const SmiPtr raw_value)
static bool SubtypeTestCacheEntryMatches(const SubtypeTestCacheTable::TupleView &t, intptr_t num_inputs, const Object &instance_class_id_or_signature, const AbstractType &destination_type, const TypeArguments &instance_type_arguments, const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments, const TypeArguments &instance_parent_function_type_arguments, const TypeArguments &instance_delayed_type_arguments)
Definition object.cc:19116
static ObjectPtr EvaluateCompiledExpressionHelper(Zone *zone, const Function &eval_function, const Array &type_definitions, const Array &arguments, const TypeArguments &type_arguments)
Definition object.cc:4837
void * malloc(size_t size)
Definition allocation.cc:19
uint32_t CombineHashes(uint32_t hash, uint32_t other_hash)
Definition hash.h:12
const char *const class_name
FfiCallbackKind
Definition object.h:2964
bool IsArrayClassId(intptr_t index)
Definition class_id.h:358
int32_t classid_t
Definition globals.h:524
static bool IsPercent(int32_t c)
Definition object.cc:23948
static constexpr intptr_t kTrueOffsetFromNull
DART_WARN_UNUSED_RESULT ErrorPtr EntryPointMemberInvocationError(const Object &member)
Definition object.cc:27288
bool IsUnmodifiableTypedDataViewClassId(intptr_t index)
Definition class_id.h:453
@ kForwardingCorpse
Definition class_id.h:225
@ kIllegalCid
Definition class_id.h:214
@ kNullCid
Definition class_id.h:252
@ kNumPredefinedCids
Definition class_id.h:257
@ kByteDataViewCid
Definition class_id.h:244
@ kVoidCid
Definition class_id.h:254
@ kByteBufferCid
Definition class_id.h:247
@ kDynamicCid
Definition class_id.h:253
@ kNeverCid
Definition class_id.h:255
@ kFreeListElement
Definition class_id.h:224
@ kUnmodifiableByteDataViewCid
Definition class_id.h:245
static bool IsHexCharacter(int32_t c)
Definition object.cc:23952
static bool IsSpecialCharacter(type value)
Definition object.cc:510
static TypeArgumentsPtr RetrieveInstantiatorTypeArguments(Zone *zone, const Function &function, const Instance &receiver)
Definition object.cc:9545
bool IsFfiTypeClassId(intptr_t index)
Definition class_id.h:513
constexpr uint32_t kMaxUint32
Definition globals.h:484
static const char *const kInitPrefix
Definition object.cc:119
EntryPointPragma FindEntryPointPragma(IsolateGroup *IG, const Array &metadata, Field *reusable_field_handle, Object *pragma)
Definition object.cc:27193
static bool IsAsciiNonprintable(int32_t c)
Definition object.cc:516
static int PrintVarInfo(char *buffer, int len, intptr_t i, const String &var_name, const UntaggedLocalVarDescriptors::VarInfo &info)
Definition object.cc:16100
static void PrintSymbolicStackFrameBody(BaseTextBuffer *buffer, const char *function_name, const char *url, intptr_t line=-1, intptr_t column=-1)
Definition object.cc:26250
@ kHeapObjectTag
ArrayOfTuplesView< SubtypeTestCache::Entries, std::tuple< Object, TypeArguments, TypeArguments, TypeArguments, TypeArguments, TypeArguments, AbstractType, Bool > > SubtypeTestCacheTable
Definition object.h:13532
constexpr intptr_t KB
Definition globals.h:528
DART_WARN_UNUSED_RESULT ErrorPtr VerifyEntryPoint(const Library &lib, const Object &member, const Object &annotated, std::initializer_list< EntryPointPragma > allowed_kinds)
Definition object.cc:27227
uintptr_t uword
Definition globals.h:501
static bool IsURISafeCharacter(int32_t c)
Definition object.cc:23962
static void FunctionPrintNameHelper(const Function &fun, const NameFormattingParams &params, BaseTextBuffer *printer)
Definition object.cc:11095
static int32_t MergeHexCharacters(int32_t c1, int32_t c2)
Definition object.cc:23993
constexpr uword kBreakInstructionFiller
TypeEquality
Definition object.h:1120
static void IndentN(int count)
Definition object.cc:18592
static void ReportTooManyImports(const Library &lib)
Definition object.cc:13608
uint32_t Multiply64Hash(int64_t ivalue)
Definition integers.cc:276
const uint32_t fp
EntryPointPragma
Definition object.h:4344
void CreateSpecializedFunction(Thread *thread, Zone *zone, const RegExp &regexp, intptr_t specialization_cid, bool sticky, const Object &owner)
Definition regexp.cc:5523
bool IsAllocatableInNewSpace(intptr_t size)
Definition spaces.h:57
uintptr_t compressed_uword
Definition globals.h:44
static bool MatchesAccessorName(const String &name, const char *prefix, intptr_t prefix_length, const String &accessor_name)
Definition object.cc:6231
static void DwarfStackTracesHandler(bool value)
Definition object.cc:26568
UnorderedHashSet< ClassFunctionsTraits > ClassFunctionsSet
Definition object.cc:3312
static ObjectPtr ThrowTypeError(const TokenPosition token_pos, const Instance &src_value, const AbstractType &dst_type, const String &dst_name)
Definition object.cc:4551
static bool IsDecimalDigit(int32_t c)
Definition object.cc:13317
UnorderedHashMap< LibraryLookupTraits > LibraryLookupMap
Definition object.cc:14643
bool ShouldHaveImmutabilityBitSetCid(intptr_t predefined_cid)
Definition class_id.h:507
static intptr_t ConstructFunctionFullyQualifiedCString(const Function &function, char **chars, intptr_t reserve_len, bool with_lib, QualifiedFunctionLibKind lib_kind)
Definition object.cc:9759
uint32_t HashBytes(const uint8_t *bytes, intptr_t size)
Definition hash.h:31
static const char *const kGetterPrefix
Definition object.cc:115
@ kCurrentAndEnclosingFree
Definition object.h:2917
@ kAllFree
Definition object.h:2920
@ kNoneFree
Definition object.h:2906
static classid_t NormalizeClassIdForSyntacticalTypeEquality(classid_t cid)
Definition object.cc:22110
bool FindPragmaInMetadata(Thread *T, const Object &metadata_obj, const String &pragma_name, bool multiple, Object *options)
Definition object.cc:4142
static void PrintSymbolicStackFrame(Zone *zone, BaseTextBuffer *buffer, const Function &function, TokenPosition token_pos_or_line, intptr_t frame_index, bool is_line=false)
Definition object.cc:26265
bool HasStack()
static T LoadUnaligned(const T *ptr)
Definition unaligned.h:14
const intptr_t cid
static constexpr intptr_t kTrueIdentityHash
Definition object.h:10764
static bool IsVisibleAsFutureListener(const Function &function)
Definition object.cc:26299
static constexpr intptr_t kCompressedWordSize
Definition globals.h:42
raw_obj untag() -> num_entries()) VARIABLE_COMPRESSED_VISITOR(Array, Smi::Value(raw_obj->untag() ->length())) VARIABLE_COMPRESSED_VISITOR(TypedData, TypedData::ElementSizeInBytes(raw_obj->GetClassId()) *Smi::Value(raw_obj->untag() ->length())) VARIABLE_COMPRESSED_VISITOR(Record, RecordShape(raw_obj->untag() ->shape()).num_fields()) VARIABLE_NULL_VISITOR(CompressedStackMaps, CompressedStackMaps::PayloadSizeOf(raw_obj)) VARIABLE_NULL_VISITOR(OneByteString, Smi::Value(raw_obj->untag() ->length())) VARIABLE_NULL_VISITOR(TwoByteString, Smi::Value(raw_obj->untag() ->length())) intptr_t UntaggedField::VisitFieldPointers(FieldPtr raw_obj, ObjectPointerVisitor *visitor)
uint32_t FinalizeHash(uint32_t hash, intptr_t hashbits=kBitsPerInt32)
Definition hash.h:20
static const intptr_t kSetterPrefixLength
Definition object.cc:118
static uword Hash64To32(uint64_t v)
Definition object.cc:6517
void DumpFunctionTypeTable(Isolate *isolate)
Definition object.cc:27161
FrameLayout runtime_frame_layout
static void StoreUnaligned(T *ptr, T value)
Definition unaligned.h:22
static void ReportTooManyTypeParameters(const FunctionType &sig)
Definition object.cc:8875
void DumpTypeTable(Isolate *isolate)
Definition object.cc:27154
static DART_FORCE_INLINE uword LocalVarAddress(uword fp, intptr_t index)
bool TESTING_runtime_fail_on_existing_cache_entry
Definition object.cc:7675
bool IsFfiPointerClassId(intptr_t index)
Definition class_id.h:541
static const intptr_t kInitPrefixLength
Definition object.cc:120
constexpr intptr_t kWordSize
Definition globals.h:509
const StackTrace & GetCurrentStackTrace(int skip_frames)
Definition stacktrace.cc:94
static ObjectPtr InvokeInstanceFunction(Thread *thread, const Instance &receiver, const Function &function, const String &target_name, const Array &args, const Array &args_descriptor_array, bool respect_reflectable, const TypeArguments &instantiator_type_args)
Definition object.cc:14363
static constexpr intptr_t kObjectAlignment
static bool HasPragma(const Object &declaration)
Definition object.cc:13760
static ArrayPtr NewDictionary(intptr_t initial_size)
Definition object.cc:14251
static void ThrowNoSuchMethod(const Instance &receiver, const String &function_name, const Array &arguments, const Array &argument_names, const InvocationMirror::Level level, const InvocationMirror::Kind kind)
Definition mirrors.cc:49
static void AddScriptIfUnique(const GrowableObjectArray &scripts, const Script &candidate)
Definition object.cc:13969
Genericity
Definition object.h:2228
@ kFunctions
Definition object.h:2231
@ kCurrentClass
Definition object.h:2230
@ kAny
Definition object.h:2229
constexpr int64_t kMinInt64RepresentableAsDouble
Definition globals.h:493
bool IsIntegerClassId(intptr_t index)
Definition class_id.h:340
UnorderedHashMap< RecordFieldNamesMapTraits > RecordFieldNamesMap
Definition object.cc:27978
bool IsInternalOnlyClassId(intptr_t index)
Definition class_id.h:299
void DumpTypeParameterTable(Isolate *isolate)
Definition object.cc:27177
const char *const function_name
static int8_t data[kExtLength]
static int32_t GetHexValue(int32_t c)
Definition object.cc:23982
static void PrintSymbolicStackFrameIndex(BaseTextBuffer *buffer, intptr_t frame_index)
Definition object.cc:26245
constexpr int64_t kMaxInt64RepresentableAsDouble
Definition globals.h:494
static bool ShouldBePrivate(const String &name)
Definition object.cc:13818
bool IsDeeplyImmutableCid(intptr_t predefined_cid)
Definition class_id.h:485
bool IsAllocatableViaFreeLists(intptr_t size)
Definition spaces.h:60
static ObjectPtr LoadExpressionEvaluationFunction(Zone *zone, const ExternalTypedData &kernel_buffer, const String &library_url, const String &klass)
Definition object.cc:4804
void DumpTypeArgumentsTable(Isolate *isolate)
Definition object.cc:27185
static const char * SafeTypeArgumentsToCString(const TypeArguments &args)
Definition object.cc:12843
static bool IsIdentStartChar(int32_t c)
Definition object.cc:13321
static bool IsLetter(int32_t c)
Definition object.cc:13313
static FinalizablePersistentHandle * AddFinalizer(const Object &referent, void *peer, Dart_HandleFinalizer callback, intptr_t external_size)
Definition object.cc:24238
static const char *const kSetterPrefix
Definition object.cc:117
static TypeArgumentsPtr RetrieveFunctionTypeArguments(Thread *thread, Zone *zone, const Function &function, const Instance &receiver, const TypeArguments &instantiator_type_args, const Array &args, const ArgumentsDescriptor &args_desc)
Definition object.cc:9467
bool IsBuiltinListClassId(intptr_t index)
Definition class_id.h:364
static int NumEntries(const FinalizerEntry &entry, intptr_t acc=0)
constexpr intptr_t kBitsPerInt64
Definition globals.h:467
bool IsExternalTypedDataClassId(intptr_t index)
Definition class_id.h:447
bool IsStringClassId(intptr_t index)
Definition class_id.h:350
static bool InVmTests(const Function &function)
Definition object.cc:9061
static intptr_t GetRelativeSourceIndex(const String &src, intptr_t line, intptr_t line_offset=0, intptr_t column=1, intptr_t column_offset=0, intptr_t starting_index=0)
Definition object.cc:13410
ObjectPtr CompressedObjectPtr
void DumpRecordTypeTable(Isolate *isolate)
Definition object.cc:27169
static intptr_t GetListLengthOffset(intptr_t cid)
Definition object.cc:12538
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not defaults to or::depending on whether ipv6 is specified vm service A custom Dart VM Service port The default is to pick a randomly available open port disable vm Disable the Dart VM Service The Dart VM Service is never available in release mode disable vm service Disable mDNS Dart VM Service publication Bind to the IPv6 localhost address for the Dart VM Service Ignored if vm service host is set endless trace Enable an endless trace buffer The default is a ring buffer This is useful when very old events need to viewed For during application launch Memory usage will continue to grow indefinitely however Start app with an specific route defined on the framework flutter assets Path to the Flutter assets directory enable service port Allow the VM service to fallback to automatic port selection if binding to a specified port fails trace Trace early application lifecycle Automatically switches to an endless trace buffer trace skia Filters out all Skia trace event categories except those that are specified in this comma separated list dump skp on shader Automatically dump the skp that triggers new shader compilations This is useful for writing custom ShaderWarmUp to reduce jank By this is not enabled to reduce the overhead purge persistent cache
Definition switches.h:191
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot data
Definition switches.h:41
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not set
Definition switches.h:76
std::function< void()> closure
Definition closure.h:14
#define SHARED_READONLY_HANDLES_LIST(V)
Definition object.h:457
#define FOR_EACH_REBIND_RULE(V)
Definition object.h:2517
SkScalar w
#define Pp
Definition globals.h:425
#define FALL_THROUGH
Definition globals.h:15
#define Px
Definition globals.h:410
#define PX64
Definition globals.h:419
#define DEBUG_ONLY(code)
Definition globals.h:141
#define Pu
Definition globals.h:409
#define UNLIKELY(cond)
Definition globals.h:261
#define Pd64
Definition globals.h:416
#define Pd
Definition globals.h:408
#define DISALLOW_COPY_AND_ASSIGN(TypeName)
Definition globals.h:581
#define T
#define CONTEXT_SCOPE_VARIABLE_DESC_FLAG_LIST(V)
#define CORE_LIB_INTRINSIC_LIST(V)
#define GRAPH_TYPED_DATA_INTRINSICS_LIST(V)
#define INTERNAL_LIB_INTRINSIC_LIST(V)
#define CORE_INTEGER_LIB_INTRINSIC_LIST(V)
#define POLYMORPHIC_TARGET_LIST(V)
#define GRAPH_CORE_INTRINSICS_LIST(V)
#define DEVELOPER_LIB_INTRINSIC_LIST(V)
#define RECOGNIZED_LIST_FACTORY_LIST(V)
#define OTHER_RECOGNIZED_LIST(V)
#define REUSABLE_CLASS_HANDLESCOPE(thread)
#define REUSABLE_LOADING_UNIT_HANDLESCOPE(thread)
#define REUSABLE_INSTANCE_HANDLESCOPE(thread)
#define REUSABLE_ARRAY_HANDLESCOPE(thread)
#define REUSABLE_SMI_HANDLESCOPE(thread)
#define REUSABLE_STRING_HANDLESCOPE(thread)
#define REUSABLE_TYPE_PARAMETERS_HANDLESCOPE(thread)
#define REUSABLE_FUNCTION_HANDLESCOPE(thread)
#define REUSABLE_FIELD_HANDLESCOPE(thread)
#define REUSABLE_LIBRARY_HANDLESCOPE(thread)
#define REUSABLE_ABSTRACT_TYPE_HANDLESCOPE(thread)
#define REUSABLE_OBJECT_HANDLESCOPE(thread)
#define REUSABLE_TYPE_ARGUMENTS_HANDLESCOPE(thread)
#define HANDLE_CASE(clazz)
Point offset
intptr_t count
Definition object.cc:17190
intptr_t cid
Definition object.cc:17189
CidCount(intptr_t cid_, intptr_t count_, Function *f_)
Definition object.cc:17184
Function * function
Definition object.cc:17191
intptr_t code_from_fp
intptr_t FrameSlotForVariableIndex(intptr_t index) const
static constexpr intptr_t kElementSize
Definition object.h:8573
simd128_value_t & readFrom(const float *v)
Definition globals.h:153
#define CHECK_ERROR(value, message)
#define TIMELINE_DURATION(thread, stream, name)
Definition timeline.h:39
#define NOT_IN_PRECOMPILED_RUNTIME(code)
Definition globals.h:113
#define ARRAY_SIZE(array)
Definition globals.h:72
#define NOT_IN_PRECOMPILED(code)
Definition globals.h:100
#define NOT_IN_PRODUCT(code)
Definition globals.h:84
#define EQUALS_IGNORING_PRIVATE_KEY(class_id, type, str1, str2)
Definition object.cc:24369
#define IS_CHECK(name)
#define CLASS_LIST_WITH_NULL(V)
#define ADD_SET_FIELD(clazz)
#define REGISTER_TYPED_DATA_CLASS(clazz)
#define REGISTER_FFI_CLASS(clazz)
#define INITIALIZE_SHARED_READONLY_HANDLE(Type, name)
#define REGISTER_EXT_TYPED_DATA_CLASS(clazz)
#define DEFINE_FLAG_ACCESSORS(Name)
Definition object.cc:18694
#define PRECOMPILER_WSR_FIELD_DEFINITION(Class, Type, Name)
Definition object.cc:209
#define RAW_NULL
Definition object.cc:132
#define REGISTER_TYPED_DATA_VIEW_CLASS(clazz)
#define TRACE_TYPE_CHECKS_VERBOSE(format,...)
Definition object.cc:226
#define REGISTER_FFI_TYPE_MARKER(clazz)
#define SET_CLASS_NAME(class_name, name)
Definition object.cc:1466
#define RULE_CASE(Name)
#define INIT_VTABLE(clazz)
#define DEFINE_SHARED_READONLY_HANDLE(Type, name)
Definition object.cc:142