Flutter Engine
The Flutter Engine
type_testing_stubs.cc
Go to the documentation of this file.
1// Copyright (c) 2018, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#include <functional>
6
7#include "platform/globals.h"
8#include "vm/class_id.h"
11#include "vm/hash_map.h"
12#include "vm/longjump.h"
13#include "vm/object_store.h"
14#include "vm/stub_code.h"
15#include "vm/timeline.h"
17#include "vm/zone_text_buffer.h"
18
19#if !defined(DART_PRECOMPILED_RUNTIME)
22#endif // !defined(DART_PRECOMPILED_RUNTIME)
23
24#define __ assembler->
25
26namespace dart {
27
29 : lib_(Library::Handle()),
30 klass_(Class::Handle()),
31 type_(AbstractType::Handle()),
32 string_(String::Handle()) {}
33
35 const AbstractType& type) const {
38 return buffer.buffer();
39}
40
43 const AbstractType& type) const {
44 buffer->AddString("TypeTestingStub_");
45 StringifyTypeTo(buffer, type);
46}
47
48void TypeTestingStubNamer::StringifyTypeTo(BaseTextBuffer* buffer,
49 const AbstractType& type) const {
50 NoSafepointScope no_safepoint;
51 if (type.IsType()) {
52 const intptr_t cid = Type::Cast(type).type_class_id();
54 klass_ = class_table->At(cid);
55 ASSERT(!klass_.IsNull());
56
57 lib_ = klass_.library();
58 if (!lib_.IsNull()) {
59 string_ = lib_.url();
60 buffer->AddString(string_.ToCString());
61 } else {
62 buffer->Printf("nolib%" Pd "_", nonce_++);
63 }
64
65 buffer->AddString("_");
66 buffer->AddString(klass_.ScrubbedNameCString());
67
68 auto& type_arguments = TypeArguments::Handle(Type::Cast(type).arguments());
69 if (!type_arguments.IsNull()) {
70 for (intptr_t i = 0, n = type_arguments.Length(); i < n; ++i) {
71 type_ = type_arguments.TypeAt(i);
72 buffer->AddString("__");
73 StringifyTypeTo(buffer, type_);
74 }
75 }
76 } else if (type.IsTypeParameter()) {
77 buffer->AddString(TypeParameter::Cast(type).CanonicalNameCString());
78 } else if (type.IsRecordType()) {
79 const RecordType& rec = RecordType::Cast(type);
80 buffer->AddString("Record");
81 const intptr_t num_fields = rec.NumFields();
82 const auto& field_names =
83 Array::Handle(rec.GetFieldNames(Thread::Current()));
84 const intptr_t num_positional_fields = num_fields - field_names.Length();
85 const auto& field_types = Array::Handle(rec.field_types());
86 for (intptr_t i = 0; i < num_fields; ++i) {
87 buffer->AddString("__");
88 type_ ^= field_types.At(i);
89 StringifyTypeTo(buffer, type_);
90 if (i >= num_positional_fields) {
91 buffer->AddString("_");
92 string_ ^= field_names.At(i - num_positional_fields);
93 buffer->AddString(string_.ToCString());
94 }
95 }
96 } else {
97 buffer->AddString(type.ToCString());
98 }
99 MakeNameAssemblerSafe(buffer);
100}
101
102void TypeTestingStubNamer::MakeNameAssemblerSafe(BaseTextBuffer* buffer) {
103 char* cursor = buffer->buffer();
104 while (*cursor != '\0') {
105 char c = *cursor;
106 if (!((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') ||
107 (c >= '0' && c <= '9') || (c == '_'))) {
108 *cursor = '_';
109 }
110 cursor++;
111 }
112}
113
115 const AbstractType& type,
116 bool lazy_specialize /* = true */) {
117 // During bootstrapping we have no access to stubs yet, so we'll just return
118 // `null` and patch these later in `Object::FinishInit()`.
120 ASSERT(type.IsType());
121 const classid_t cid = type.type_class_id();
123 return Code::null();
124 }
125
126 if (type.IsTopTypeForSubtyping()) {
127 return StubCode::TopTypeTypeTest().ptr();
128 }
129 if (type.IsTypeParameter()) {
130 const bool nullable = Instance::NullIsAssignableTo(type);
131 if (nullable) {
132 return StubCode::NullableTypeParameterTypeTest().ptr();
133 } else {
134 return StubCode::TypeParameterTypeTest().ptr();
135 }
136 }
137
138 if (type.IsFunctionType()) {
139 const bool nullable = Instance::NullIsAssignableTo(type);
140 return nullable ? StubCode::DefaultNullableTypeTest().ptr()
141 : StubCode::DefaultTypeTest().ptr();
142 }
143
144 if (type.IsType() || type.IsRecordType()) {
145 const bool should_specialize = !FLAG_precompiled_mode && lazy_specialize;
146 const bool nullable = Instance::NullIsAssignableTo(type);
147 if (should_specialize) {
148 return nullable ? StubCode::LazySpecializeNullableTypeTest().ptr()
149 : StubCode::LazySpecializeTypeTest().ptr();
150 } else {
151 return nullable ? StubCode::DefaultNullableTypeTest().ptr()
152 : StubCode::DefaultTypeTest().ptr();
153 }
154 }
155
156 return StubCode::UnreachableTypeTest().ptr();
157}
158
159#if !defined(DART_PRECOMPILED_RUNTIME)
161 const AbstractType& type) {
162 HierarchyInfo hi(thread);
163 TypeTestingStubGenerator generator;
164 return generator.OptimizedCodeForType(type);
165}
166#endif
167
169 : object_store_(IsolateGroup::Current()->object_store()) {}
170
172 const AbstractType& type) {
173#if !defined(TARGET_ARCH_IA32)
175
176 if (type.IsTypeParameter()) {
178 type, /*lazy_specialize=*/false);
179 }
180
181 if (type.IsTopTypeForSubtyping()) {
182 return StubCode::TopTypeTypeTest().ptr();
183 }
184
185 if (type.IsCanonical()) {
186 // When adding any new types that can have specialized TTSes, also update
187 // CollectTypes::VisitObject appropriately.
188 if (type.IsType() || type.IsRecordType()) {
189#if !defined(DART_PRECOMPILED_RUNTIME)
190 const Code& code =
191 Code::Handle(TypeTestingStubGenerator::BuildCodeForType(type));
192 if (!code.IsNull()) {
193 return code.ptr();
194 }
195 const Error& error = Error::Handle(Thread::Current()->StealStickyError());
196 if (!error.IsNull()) {
197 if (error.ptr() == Object::out_of_memory_error().ptr()) {
199 } else {
200 UNREACHABLE();
201 }
202 }
203
204 // Fall back to default.
205#else
206 // In the precompiled runtime we cannot lazily create new optimized type
207 // testing stubs, so if we cannot find one, we'll just return the default
208 // one.
209#endif // !defined(DART_PRECOMPILED_RUNTIME)
210 }
211 }
212#endif // !defined(TARGET_ARCH_IA32)
214 type, /*lazy_specialize=*/false);
215}
216
217#if !defined(TARGET_ARCH_IA32)
218#if !defined(DART_PRECOMPILED_RUNTIME)
219
221 Thread* thread,
222 std::function<CodePtr(compiler::Assembler&)> fun) {
223 volatile intptr_t far_branch_level = 0;
224 while (true) {
225 LongJumpScope jump;
226 if (setjmp(*jump.Set()) == 0) {
227 // To use the already-defined __ Macro !
228 compiler::Assembler assembler(nullptr, far_branch_level);
229 return fun(assembler);
230 } else {
231 // We bailed out or we encountered an error.
232 const Error& error = Error::Handle(thread->StealStickyError());
233 if (error.ptr() == Object::branch_offset_error().ptr()) {
234 ASSERT(far_branch_level < 2);
235 far_branch_level++;
236 } else if (error.ptr() == Object::out_of_memory_error().ptr()) {
237 thread->set_sticky_error(error);
238 return Code::null();
239 } else {
240 UNREACHABLE();
241 }
242 }
243 }
244}
245
246CodePtr TypeTestingStubGenerator::BuildCodeForType(const AbstractType& type) {
247 auto thread = Thread::Current();
248 auto zone = thread->zone();
249 HierarchyInfo* hi = thread->hierarchy_info();
250 ASSERT(hi != nullptr);
251
252 if (!hi->CanUseSubtypeRangeCheckFor(type) &&
253 !hi->CanUseGenericSubtypeRangeCheckFor(type) &&
254 !hi->CanUseRecordSubtypeRangeCheckFor(type)) {
255 return Code::null();
256 }
257
258 auto& slow_tts_stub = Code::ZoneHandle(zone);
259 if (FLAG_precompiled_mode) {
260 slow_tts_stub = thread->isolate_group()->object_store()->slow_tts_stub();
261 }
262
263 CompilerState compiler_state(thread, /*is_aot=*/FLAG_precompiled_mode,
264 /*is_optimizing=*/false);
265
266 const Code& code = Code::Handle(
267 thread->zone(),
269 thread, [&](compiler::Assembler& assembler) {
271 BuildOptimizedTypeTestStub(&assembler, &unresolved_calls,
272 slow_tts_stub, hi, type);
273
274 const auto& static_calls_table = Array::Handle(
276 zone, &unresolved_calls));
277
278 const char* name = namer_.StubNameForType(type);
279 const auto pool_attachment =
280 FLAG_precompiled_mode ? Code::PoolAttachment::kNotAttachPool
282
283 Code& code = Code::Handle(thread->zone());
284 auto install_code_fun = [&]() {
285 code = Code::FinalizeCode(nullptr, &assembler, pool_attachment,
286 /*optimized=*/false, /*stats=*/nullptr);
287 if (!static_calls_table.IsNull()) {
288 code.set_static_calls_target_table(static_calls_table);
289 }
290 };
291
292 // We have to ensure no mutators are running, because:
293 //
294 // a) We allocate an instructions object, which might cause us to
295 // temporarily flip page protections from (RX -> RW -> RX).
296 //
297 SafepointWriteRwLocker ml(thread,
298 thread->isolate_group()->program_lock());
300 install_code_fun,
301 /*use_force_growth=*/true);
302
303 Code::NotifyCodeObservers(name, code, /*optimized=*/false);
304
305 code.set_owner(type);
306#ifndef PRODUCT
307 if (FLAG_support_disassembler && FLAG_disassemble_stubs) {
308 LogBlock lb;
309 THR_Print("Code for stub '%s' (type = %s): {\n", name,
310 type.ToCString());
311 DisassembleToStdout formatter;
312 code.Disassemble(&formatter);
313 THR_Print("}\n");
314 const ObjectPool& object_pool =
315 ObjectPool::Handle(code.object_pool());
316 if (!object_pool.IsNull()) {
317 object_pool.DebugPrint();
318 }
319 }
320#endif // !PRODUCT
321 return code.ptr();
322 }));
323
324 return code.ptr();
325}
326
327void TypeTestingStubGenerator::BuildOptimizedTypeTestStub(
328 compiler::Assembler* assembler,
329 compiler::UnresolvedPcRelativeCalls* unresolved_calls,
330 const Code& slow_type_test_stub,
331 HierarchyInfo* hi,
332 const AbstractType& type) {
333 BuildOptimizedTypeTestStubFastCases(assembler, hi, type);
334 __ Jump(compiler::Address(
336}
337
338void TypeTestingStubGenerator::BuildOptimizedTypeTestStubFastCases(
339 compiler::Assembler* assembler,
340 HierarchyInfo* hi,
341 const AbstractType& type) {
342 // These are handled via the TopTypeTypeTestStub!
343 ASSERT(!type.IsTopTypeForSubtyping());
344
345 if (type.IsObjectType()) {
346 ASSERT(type.IsNonNullable());
347 compiler::Label is_null;
348 __ CompareObject(TypeTestABI::kInstanceReg, Object::null_object());
349 __ BranchIf(EQUAL, &is_null, compiler::Assembler::kNearJump);
350 __ Ret();
351 __ Bind(&is_null);
352 return; // No further checks needed.
353 }
354
355 // Fast case for 'int' and '_Smi' (which can appear in core libraries).
356 if (type.IsIntType() || type.IsSmiType()) {
357 compiler::Label non_smi_value;
358 __ BranchIfNotSmi(TypeTestABI::kInstanceReg, &non_smi_value,
360 __ Ret();
361 __ Bind(&non_smi_value);
362 } else {
363 // TODO(kustermann): Make more fast cases, e.g. Type::Number()
364 // is implemented by Smi.
365 }
366
367 // Check the cid ranges which are a subtype of [type].
368 if (hi->CanUseSubtypeRangeCheckFor(type)) {
369 const Class& type_class = Class::Handle(type.type_class());
370 ASSERT(!type_class.IsNull());
371 const CidRangeVector& ranges = hi->SubtypeRangesForClass(
372 type_class,
373 /*include_abstract=*/false,
374 /*exclude_null=*/!Instance::NullIsAssignableTo(type));
375
376 compiler::Label is_subtype, is_not_subtype;
377 const bool smi_is_ok =
378 Type::Handle(Type::SmiType()).IsSubtypeOf(type, Heap::kNew);
379 if (smi_is_ok) {
380 __ LoadClassIdMayBeSmi(TTSInternalRegs::kScratchReg,
382 } else {
383 __ BranchIfSmi(TypeTestABI::kInstanceReg, &is_not_subtype);
385 }
386 BuildOptimizedSubtypeRangeCheck(assembler, ranges,
387 TTSInternalRegs::kScratchReg, &is_subtype,
388 &is_not_subtype);
389 __ Bind(&is_subtype);
390 __ Ret();
391 __ Bind(&is_not_subtype);
392 } else if (hi->CanUseGenericSubtypeRangeCheckFor(type)) {
393 const Class& type_class = Class::Handle(type.type_class());
394 ASSERT(!type_class.IsNull());
395 BuildOptimizedSubclassRangeCheckWithTypeArguments(
396 assembler, hi, Type::Cast(type), type_class);
397 } else if (hi->CanUseRecordSubtypeRangeCheckFor(type)) {
398 BuildOptimizedRecordSubtypeRangeCheck(assembler, hi,
399 RecordType::Cast(type));
400 } else {
401 UNREACHABLE();
402 }
403
405 // Fast case for 'null'.
406 compiler::Label non_null;
407 __ CompareObject(TypeTestABI::kInstanceReg, Object::null_object());
408 __ BranchIf(NOT_EQUAL, &non_null, compiler::Assembler::kNearJump);
409 __ Ret();
410 __ Bind(&non_null);
411 }
412}
413
415 const CidRangeVector& ranges) {
416 if (!assembler->EmittingComments()) return;
417 Thread* const thread = Thread::Current();
418 ClassTable* const class_table = thread->isolate_group()->class_table();
419 Zone* const zone = thread->zone();
420 if (ranges.is_empty()) {
421 __ Comment("No valid cids to check");
422 return;
423 }
424 if ((ranges.length() == 1) && ranges[0].IsSingleCid()) {
425 const auto& cls = Class::Handle(zone, class_table->At(ranges[0].cid_start));
426 __ Comment("Checking for cid %" Pd " (%s)", cls.id(),
427 cls.ScrubbedNameCString());
428 return;
429 }
430 __ Comment("Checking for concrete finalized classes:");
431 auto& cls = Class::Handle(zone);
432 for (const auto& range : ranges) {
433 ASSERT(!range.IsIllegalRange());
434 for (classid_t cid = range.cid_start; cid <= range.cid_end; cid++) {
435 // Invalid entries can be included to keep range count low.
436 if (!class_table->HasValidClassAt(cid)) continue;
437 cls = class_table->At(cid);
438 if (cls.is_abstract()) continue; // Only output concrete classes.
439 __ Comment(" * %" Pd32 " (%s)", cid, cls.ScrubbedNameCString());
440 }
441 }
442}
443
444// Represents the following needs for runtime checks to see if an instance of
445// [cls] is a subtype of [type] that has type class [type_class]:
446//
447// * kCannotBeChecked: Instances of [cls] cannot be checked with any of the
448// currently implemented runtime checks, so must fall back on the runtime.
449//
450// * kNotSubtype: A [cls] instance is guaranteed to not be a subtype of [type]
451// regardless of any instance type arguments.
452//
453// * kCidCheckOnly: A [cls] instance is guaranteed to be a subtype of [type]
454// regardless of any instance type arguments.
455//
456// * kNeedsFinalization: Checking that an instance of [cls] is a subtype of
457// [type] requires instance type arguments, but [cls] is not finalized, and
458// so the appropriate type arguments field offset cannot be determined.
459//
460// * kInstanceTypeArgumentsAreSubtypes: [cls] implements a fully uninstantiated
461// type with type class [type_class] which can be directly instantiated with
462// the instance type arguments. Thus, each type argument of [type] should be
463// compared with the corresponding (index-wise) instance type argument.
464enum class CheckType {
470};
471
472// Returns a CheckType describing how to check instances of [to_check] as
473// subtypes of [type].
475 const Type& type,
476 const Class& type_class,
477 const Class& to_check) {
478 ASSERT_EQUAL(type.type_class_id(), type_class.id());
479 ASSERT(type_class.is_type_finalized());
480 ASSERT(!to_check.is_abstract());
481 ASSERT(to_check.is_type_finalized());
482 ASSERT(AbstractType::Handle(zone, to_check.RareType())
483 .IsSubtypeOf(AbstractType::Handle(zone, type_class.RareType()),
484 Heap::kNew));
485 if (!type_class.IsGeneric()) {
486 // All instances of [to_check] are subtypes of [type].
488 }
489 if (to_check.FindInstantiationOf(zone, type_class,
490 /*only_super_classes=*/true)) {
491 // No need to check for type argument consistency, as [to_check] is the same
492 // as or a subclass of [type_class].
493 return to_check.is_finalized()
496 }
497 auto& calculated_type =
498 Type::Handle(zone, to_check.GetInstantiationOf(zone, type_class));
499 if (calculated_type.IsInstantiated()) {
500 if (type.IsInstantiated()) {
501 return calculated_type.IsSubtypeOf(type, Heap::kNew)
504 }
505 // TODO(dartbug.com/46920): Requires walking both types, checking
506 // corresponding instantiated parts at compile time (assuming uninstantiated
507 // parts check successfully) and then creating appropriate runtime checks
508 // for uninstantiated parts of [type].
510 }
511 if (!to_check.is_finalized()) {
513 }
514 ASSERT(to_check.NumTypeArguments() > 0);
517 // If the calculated type arguments are a prefix of the declaration type
518 // arguments, then we can just treat the instance type arguments as if they
519 // were used to instantiate the type class during checking.
520 const auto& decl_type_args = TypeArguments::Handle(
521 zone, to_check.GetDeclarationInstanceTypeArguments());
522 const auto& calculated_type_args = TypeArguments::Handle(
523 zone, calculated_type.GetInstanceTypeArguments(Thread::Current(),
524 /*canonicalize=*/false));
525 const bool type_args_consistent = calculated_type_args.IsSubvectorEquivalent(
526 decl_type_args, 0, type_class.NumTypeArguments(),
528 // TODO(dartbug.com/46920): Currently we require subtyping to be checkable
529 // by comparing the instance type arguments against the type arguments of
530 // [type] piecewise, but we could check other cases as well.
531 return type_args_consistent ? CheckType::kInstanceTypeArgumentsAreSubtypes
533}
534
536 const Type& type,
537 const Class& type_class,
538 const CidRangeVector& ranges) {
539 if (!assembler->EmittingComments() || ranges.is_empty()) return;
540 if (ranges.is_empty()) return;
541 ASSERT(type_class.is_implemented());
542 __ Comment("Not checking the following concrete implementors of %s:",
543 type_class.ScrubbedNameCString());
544 Thread* const thread = Thread::Current();
545 auto* const class_table = thread->isolate_group()->class_table();
546 Zone* const zone = thread->zone();
547 auto& cls = Class::Handle(zone);
548 auto& calculated_type = Type::Handle(zone);
549 for (const auto& range : ranges) {
550 ASSERT(!range.IsIllegalRange());
551 for (classid_t cid = range.cid_start; cid <= range.cid_end; cid++) {
552 // Invalid entries can be included to keep range count low.
553 if (!class_table->HasValidClassAt(cid)) continue;
554 cls = class_table->At(cid);
555 if (cls.is_abstract()) continue; // Only output concrete classes.
556 ASSERT(cls.is_type_finalized());
557 TextBuffer buffer(128);
558 buffer.Printf(" * %" Pd32 "(%s): ", cid, cls.ScrubbedNameCString());
559 switch (SubtypeChecksForClass(zone, type, type_class, cls)) {
561 calculated_type = cls.GetInstantiationOf(zone, type_class);
562 buffer.AddString("cannot check that ");
563 calculated_type.PrintName(Object::kScrubbedName, &buffer);
564 buffer.AddString(" is a subtype of ");
565 type.PrintName(Object::kScrubbedName, &buffer);
566 break;
568 calculated_type = cls.GetInstantiationOf(zone, type_class);
569 calculated_type.PrintName(Object::kScrubbedName, &buffer);
570 buffer.AddString(" is not a subtype of ");
571 type.PrintName(Object::kScrubbedName, &buffer);
572 break;
574 buffer.AddString("is not finalized");
575 break;
577 buffer.AddString("was not finalized during class splitting");
578 break;
579 default:
580 // Either the CheckType was kCidCheckOnly, which should never happen
581 // since it only requires type finalization, or a new CheckType has
582 // been added.
583 UNREACHABLE();
584 break;
585 }
586 __ Comment("%s", buffer.buffer());
587 }
588 }
589}
590
591// Builds a cid range check for the concrete subclasses and implementors of
592// type. Falls through or jumps to check_succeeded if the range contains the
593// cid, else jumps to check_failed.
594//
595// Returns whether class_id_reg is clobbered.
596bool TypeTestingStubGenerator::BuildOptimizedSubtypeRangeCheck(
597 compiler::Assembler* assembler,
598 const CidRangeVector& ranges,
599 Register class_id_reg,
600 compiler::Label* check_succeeded,
601 compiler::Label* check_failed) {
602 CommentCheckedClasses(assembler, ranges);
604 assembler, class_id_reg, ranges, check_succeeded, check_failed, true);
605}
606
607void TypeTestingStubGenerator::
608 BuildOptimizedSubclassRangeCheckWithTypeArguments(
609 compiler::Assembler* assembler,
610 HierarchyInfo* hi,
611 const Type& type,
612 const Class& type_class) {
613 ASSERT(hi->CanUseGenericSubtypeRangeCheckFor(type));
614 compiler::Label check_failed, load_succeeded;
615 // a) First we perform subtype cid-range checks and load the instance type
616 // arguments based on which check succeeded.
617 if (BuildLoadInstanceTypeArguments(assembler, hi, type, type_class,
620 &load_succeeded, &check_failed)) {
621 // Only build type argument checking if any checked cid ranges require it.
622 __ Bind(&load_succeeded);
623
624 // The rare type of the class is guaranteed to be a supertype of the
625 // runtime type of any instance..
626 const Type& rare_type = Type::Handle(type_class.RareType());
627 // If the rare type is a subtype of the type being checked, then the runtime
628 // type of the instance is also a subtype and we shouldn't need to perform
629 // checks for the instance type arguments.
630 ASSERT(!rare_type.IsSubtypeOf(type, Heap::kNew));
631 // b) We check if the type arguments of the rare type are all dynamic
632 // (that is, the type arguments vector is null).
633 if (rare_type.arguments() == TypeArguments::null()) {
634 // If it is, then the instance could have a null instance TAV. However,
635 // if the instance TAV is null, then the runtime type of the instance is
636 // the rare type, which means it cannot be a subtype of the checked type.
638 Object::null_object());
639 __ BranchIf(EQUAL, &check_failed);
640 } else {
641 // If the TAV of the rare type is not null, at least one type argument
642 // of the rare type is a non-top type. This means no instance can have
643 // a null instance TAV, as the dynamic type cannot be a subtype of
644 // a non-top type and each type argument of an instance must be
645 // a subtype of the corresponding type argument for the rare type.
646#if defined(DEBUG)
647 // Add the check for null in DEBUG mode, but instead of failing, create a
648 // breakpoint to make it obvious that the assumption above has failed.
650 Object::null_object());
651 compiler::Label check_instance_tav;
652 __ BranchIf(NOT_EQUAL, &check_instance_tav,
654 __ Breakpoint();
655 __ Bind(&check_instance_tav);
656#endif
657 }
658
659 // c) Then we'll check each value of the type argument.
660 compiler::Label pop_saved_registers_on_failure;
661 const RegisterSet saved_registers(
663 __ PushRegisters(saved_registers);
664
665 AbstractType& type_arg = AbstractType::Handle();
666 const TypeArguments& ta = TypeArguments::Handle(type.arguments());
667 const intptr_t num_type_parameters = type_class.NumTypeParameters();
668 const intptr_t num_type_arguments = type_class.NumTypeArguments();
669 ASSERT(ta.Length() == num_type_parameters);
670 for (intptr_t i = 0; i < num_type_parameters; ++i) {
671 const intptr_t type_param_value_offset_i =
672 num_type_arguments - num_type_parameters + i;
673
674 type_arg = ta.TypeAt(i);
675 ASSERT(type_arg.IsTypeParameter() ||
676 hi->CanUseSubtypeRangeCheckFor(type_arg));
677
678 if (type_arg.IsTypeParameter()) {
679 BuildOptimizedTypeParameterArgumentValueCheck(
680 assembler, hi, TypeParameter::Cast(type_arg),
681 type_param_value_offset_i, &pop_saved_registers_on_failure);
682 } else {
683 BuildOptimizedTypeArgumentValueCheck(
684 assembler, hi, Type::Cast(type_arg), type_param_value_offset_i,
685 &pop_saved_registers_on_failure);
686 }
687 }
688 __ PopRegisters(saved_registers);
689 __ Ret();
690 __ Bind(&pop_saved_registers_on_failure);
691 __ PopRegisters(saved_registers);
692 }
693
694 // If anything fails.
695 __ Bind(&check_failed);
696}
697
698void TypeTestingStubGenerator::BuildOptimizedRecordSubtypeRangeCheck(
699 compiler::Assembler* assembler,
700 HierarchyInfo* hi,
701 const RecordType& type) {
702 compiler::Label is_subtype, is_not_subtype;
703 Zone* zone = Thread::Current()->zone();
704
705 __ BranchIfSmi(TypeTestABI::kInstanceReg, &is_not_subtype);
707
709 __ CompareImmediate(TTSInternalRegs::kScratchReg, kNullCid);
710 __ BranchIf(EQUAL, &is_subtype);
711 }
712 __ CompareImmediate(TTSInternalRegs::kScratchReg, kRecordCid);
713 __ BranchIf(NOT_EQUAL, &is_not_subtype);
714
715 __ LoadCompressedSmi(
717 compiler::FieldAddress(TypeTestABI::kInstanceReg,
719 __ CompareImmediate(TTSInternalRegs::kScratchReg,
720 Smi::RawValue(type.shape().AsInt()));
721 __ BranchIf(NOT_EQUAL, &is_not_subtype);
722
723 auto& field_type = AbstractType::Handle(zone);
724 auto& field_type_class = Class::Handle(zone);
725 const auto& smi_type = Type::Handle(zone, Type::SmiType());
726 for (intptr_t i = 0, n = type.NumFields(); i < n; ++i) {
727 compiler::Label next;
728
729 field_type = type.FieldTypeAt(i);
730 ASSERT(hi->CanUseSubtypeRangeCheckFor(field_type));
731
732 __ LoadCompressedFieldFromOffset(TTSInternalRegs::kScratchReg,
735
736 field_type_class = field_type.type_class();
737 ASSERT(!field_type_class.IsNull());
738
739 const CidRangeVector& ranges = hi->SubtypeRangesForClass(
740 field_type_class,
741 /*include_abstract=*/false,
742 /*exclude_null=*/!Instance::NullIsAssignableTo(field_type));
743
744 const bool smi_is_ok = smi_type.IsSubtypeOf(field_type, Heap::kNew);
746 smi_is_ok ? &next : &is_not_subtype);
748
749 BuildOptimizedSubtypeRangeCheck(assembler, ranges,
751 &is_not_subtype);
752 __ Bind(&next);
753 }
754
755 __ Bind(&is_subtype);
756 __ Ret();
757 __ Bind(&is_not_subtype);
758}
759
760// Splits [ranges] into multiple ranges in [output], where the concrete,
761// finalized classes in each range share the same type arguments field offset.
762//
763// The first range in [output] contains [type_class], if any do, and otherwise
764// prioritizes ranges that include predefined cids before ranges that only
765// contain user-defined classes.
766//
767// Any cids that do not have valid class table entries, correspond to abstract
768// or unfinalized classes, or have no TAV field offset are treated as don't
769// cares, in that the cid may appear in any of the CidRangeVectors as needed to
770// reduce the number of ranges.
771//
772// Note that CidRangeVectors are MallocGrowableArrays, so the elements in
773// output must be freed after use!
775 Thread* T,
776 const Class& type_class,
777 const CidRangeVector& ranges,
779 ASSERT(output != nullptr);
780 ASSERT(!ranges.is_empty());
781
782 Zone* const Z = T->zone();
783 ClassTable* const class_table = T->isolate_group()->class_table();
784 IntMap<CidRangeVector*> offset_map(Z);
785 IntMap<intptr_t> predefined_offsets(Z);
786 IntMap<intptr_t> user_defined_offsets(Z);
787
788 auto add_to_vector = [&](intptr_t tav_offset, const CidRange& range) {
789 if (range.cid_start == -1) return;
791 if (CidRangeVector* vector = offset_map.Lookup(tav_offset)) {
792 vector->Add(range);
793 } else {
794 vector = new CidRangeVector(1);
795 vector->Add(range);
796 offset_map.Insert(tav_offset, vector);
797 }
798 };
799
800 auto increment_count = [&](intptr_t cid, intptr_t tav_offset) {
801 if (cid <= kNumPredefinedCids) {
802 predefined_offsets.Update(
803 {tav_offset, predefined_offsets.Lookup(tav_offset) + 1});
804 } else if (auto* const kv = predefined_offsets.LookupPair(tav_offset)) {
805 predefined_offsets.Update({kv->key, kv->value + 1});
806 } else {
807 user_defined_offsets.Update(
808 {tav_offset, user_defined_offsets.Lookup(tav_offset) + 1});
809 }
810 };
811
812 // First populate offset_map.
813 auto& cls = Class::Handle(Z);
814 for (const auto& range : ranges) {
815 intptr_t last_offset = compiler::target::Class::kNoTypeArguments;
816 intptr_t cid_start = -1;
817 intptr_t cid_end = -1;
818 for (intptr_t cid = range.cid_start; cid <= range.cid_end; cid++) {
819 if (!class_table->HasValidClassAt(cid)) continue;
820 cls = class_table->At(cid);
821 if (cls.is_abstract()) continue;
822 // Only finalized concrete classes are present due to the conditions on
823 // returning kInstanceTypeArgumentsAreSubtypes in SubtypeChecksForClass.
824 ASSERT(cls.is_finalized());
825 const intptr_t tav_offset =
827 if (tav_offset == compiler::target::Class::kNoTypeArguments) continue;
828 if (tav_offset == last_offset && cid_start >= 0) {
829 cid_end = cid;
830 increment_count(cid, tav_offset);
831 continue;
832 }
833 add_to_vector(last_offset, {cid_start, cid_end});
834 last_offset = tav_offset;
835 cid_start = cid_end = cid;
836 increment_count(cid, tav_offset);
837 }
838 add_to_vector(last_offset, {cid_start, cid_end});
839 }
840
841 ASSERT(!offset_map.IsEmpty());
842
843 // Add the CidRangeVector for the type_class's offset, if it has one.
844 if (!type_class.is_abstract() && type_class.is_finalized()) {
845 const intptr_t type_class_offset =
847 ASSERT(predefined_offsets.LookupPair(type_class_offset) != nullptr ||
848 user_defined_offsets.LookupPair(type_class_offset) != nullptr);
849 CidRangeVector* const vector = offset_map.Lookup(type_class_offset);
850 ASSERT(vector != nullptr);
851 output->Add(vector);
852 // Remove this CidRangeVector from consideration in the following loops.
853 predefined_offsets.Remove(type_class_offset);
854 user_defined_offsets.Remove(type_class_offset);
855 }
856 // Now add CidRangeVectors that include predefined cids.
857 // For now, we do this in an arbitrary order, but we could use the counts
858 // to prioritize offsets that are more shared if desired.
859 auto predefined_it = predefined_offsets.GetIterator();
860 while (auto* const kv = predefined_it.Next()) {
861 CidRangeVector* const vector = offset_map.Lookup(kv->key);
862 ASSERT(vector != nullptr);
863 output->Add(vector);
864 }
865 // Finally, add CidRangeVectors that only include user-defined cids.
866 // For now, we do this in an arbitrary order, but we could use the counts
867 // to prioritize offsets that are more shared if desired.
868 auto user_defined_it = user_defined_offsets.GetIterator();
869 while (auto* const kv = user_defined_it.Next()) {
870 CidRangeVector* const vector = offset_map.Lookup(kv->key);
871 ASSERT(vector != nullptr);
872 output->Add(vector);
873 }
874 ASSERT(output->length() > 0);
875}
876
877// Given [type], its type class [type_class], and a CidRangeVector [ranges],
878// populates the output CidRangeVectors from cids in [ranges], based on what
879// runtime checks are needed to determine whether the runtime type of
880// an instance is a subtype of [type].
881//
882// Concrete, type finalized classes whose cids are added to [cid_check_only]
883// implement a particular instantiation of [type_class] that is guaranteed to
884// be a subtype of [type]. Thus, these instances do not require any checking
885// of type arguments.
886//
887// Concrete, finalized classes whose cids are added to [type_argument_checks]
888// implement a fully uninstantiated version of [type_class] that can be directly
889// instantiated with the type arguments of the class's instance. Thus, each
890// type argument of [type] should be checked against the corresponding
891// instance type argument.
892//
893// Classes whose cids are in [not_checked]:
894// * Instances of the class are guaranteed to not be a subtype of [type].
895// * The class is not finalized.
896// * The subtype relation cannot be checked with our current approach and
897// thus the stub must fall back to the STC/VM runtime.
898//
899// Any cids that do not have valid class table entries or correspond to
900// abstract classes are treated as don't cares, in that the cid may or may not
901// appear as needed to reduce the number of ranges.
903 const Type& type,
904 const Class& type_class,
905 const CidRangeVector& ranges,
906 CidRangeVector* cid_check_only,
907 CidRangeVector* type_argument_checks,
908 CidRangeVector* not_checked) {
909 ASSERT(type_class.is_implemented()); // No need to split if not implemented.
910 ASSERT(cid_check_only->is_empty());
911 ASSERT(type_argument_checks->is_empty());
912 ASSERT(not_checked->is_empty());
913 ClassTable* const class_table = hi->thread()->isolate_group()->class_table();
914 Zone* const zone = hi->thread()->zone();
915 auto& to_check = Class::Handle(zone);
916 auto add_cid_range = [&](CheckType check, const CidRange& range) {
917 if (range.cid_start == -1) return;
918 switch (check) {
920 cid_check_only->Add(range);
921 break;
923 type_argument_checks->Add(range);
924 break;
925 default:
926 not_checked->Add(range);
927 }
928 };
929 for (const auto& range : ranges) {
931 classid_t cid_start = -1, cid_end = -1;
932 for (classid_t cid = range.cid_start; cid <= range.cid_end; cid++) {
933 // Invalid entries can be included to keep range count low.
934 if (!class_table->HasValidClassAt(cid)) continue;
935 to_check = class_table->At(cid);
936 if (to_check.is_abstract()) continue;
937 const CheckType current_check =
938 SubtypeChecksForClass(zone, type, type_class, to_check);
940 to_check.is_finalized());
941 if (last_check == current_check && cid_start >= 0) {
942 cid_end = cid;
943 continue;
944 }
945 add_cid_range(last_check, {cid_start, cid_end});
946 last_check = current_check;
947 cid_start = cid_end = cid;
948 }
949 add_cid_range(last_check, {cid_start, cid_end});
950 }
951}
952
953bool TypeTestingStubGenerator::BuildLoadInstanceTypeArguments(
954 compiler::Assembler* assembler,
955 HierarchyInfo* hi,
956 const Type& type,
957 const Class& type_class,
958 const Register class_id_reg,
959 const Register instance_type_args_reg,
960 compiler::Label* load_succeeded,
961 compiler::Label* load_failed) {
962 const CidRangeVector& ranges =
963 hi->SubtypeRangesForClass(type_class, /*include_abstract=*/false,
965 if (ranges.is_empty()) {
966 // Fall through and signal type argument checks should not be generated.
967 CommentCheckedClasses(assembler, ranges);
968 return false;
969 }
970 if (!type_class.is_implemented()) {
971 ASSERT(type_class.is_finalized());
972 const intptr_t tav_offset =
974 compiler::Label is_subtype;
975 __ LoadClassIdMayBeSmi(class_id_reg, TypeTestABI::kInstanceReg);
976 BuildOptimizedSubtypeRangeCheck(assembler, ranges, class_id_reg,
977 &is_subtype, load_failed);
978 __ Bind(&is_subtype);
980 // The class and its subclasses have trivially consistent type arguments.
981 __ LoadCompressedFieldFromOffset(instance_type_args_reg,
982 TypeTestABI::kInstanceReg, tav_offset);
983 return true;
984 } else {
985 // Not a generic type, so cid checks are sufficient.
986 __ Ret();
987 return false;
988 }
989 }
990 Thread* const T = hi->thread();
991 Zone* const Z = T->zone();
992 CidRangeVector cid_checks_only, type_argument_checks, not_checked;
993 SplitOnTypeArgumentTests(hi, type, type_class, ranges, &cid_checks_only,
994 &type_argument_checks, &not_checked);
995 ASSERT(!CidRangeVectorUtils::ContainsCid(type_argument_checks, kSmiCid));
996 const bool smi_valid =
997 CidRangeVectorUtils::ContainsCid(cid_checks_only, kSmiCid);
998 // If we'll generate any cid checks and Smi isn't a valid subtype, then
999 // do a single Smi check here, since each generated check requires a fresh
1000 // load of the class id. Otherwise, we'll generate the Smi check as part of
1001 // the cid checks only block.
1002 if (!smi_valid &&
1003 (!cid_checks_only.is_empty() || !type_argument_checks.is_empty())) {
1004 __ BranchIfSmi(TypeTestABI::kInstanceReg, load_failed);
1005 }
1006 // Ensure that if the cid checks only block is skipped, the first iteration
1007 // of the type arguments check will generate a cid load.
1008 bool cid_needs_reload = true;
1009 if (!cid_checks_only.is_empty()) {
1010 compiler::Label is_subtype, keep_looking;
1011 compiler::Label* check_failed =
1012 type_argument_checks.is_empty() ? load_failed : &keep_looking;
1013 if (smi_valid) {
1014 __ LoadClassIdMayBeSmi(class_id_reg, TypeTestABI::kInstanceReg);
1015 } else {
1016 __ LoadClassId(class_id_reg, TypeTestABI::kInstanceReg);
1017 }
1018 cid_needs_reload = BuildOptimizedSubtypeRangeCheck(
1019 assembler, cid_checks_only, class_id_reg, &is_subtype, check_failed);
1020 __ Bind(&is_subtype);
1021 __ Ret();
1022 __ Bind(&keep_looking);
1023 }
1024 if (!type_argument_checks.is_empty()) {
1025 GrowableArray<CidRangeVector*> vectors;
1026 SplitByTypeArgumentsFieldOffset(T, type_class, type_argument_checks,
1027 &vectors);
1028 ASSERT(vectors.length() > 0);
1029 ClassTable* const class_table = T->isolate_group()->class_table();
1030 auto& cls = Class::Handle(Z);
1031 for (intptr_t i = 0; i < vectors.length(); i++) {
1032 CidRangeVector* const vector = vectors[i];
1033 ASSERT(!vector->is_empty());
1034 const intptr_t first_cid = vector->At(0).cid_start;
1035 ASSERT(class_table->HasValidClassAt(first_cid));
1036 cls = class_table->At(first_cid);
1037 ASSERT(cls.is_finalized());
1038 const intptr_t tav_offset =
1040 compiler::Label load_tav, keep_looking;
1041 // For the last vector, just jump to load_failed if the check fails
1042 // and avoid emitting a jump to load_succeeded.
1043 compiler::Label* check_failed =
1044 i < vectors.length() - 1 ? &keep_looking : load_failed;
1045 if (cid_needs_reload) {
1046 __ LoadClassId(class_id_reg, TypeTestABI::kInstanceReg);
1047 }
1048 cid_needs_reload = BuildOptimizedSubtypeRangeCheck(
1049 assembler, *vector, class_id_reg, &load_tav, check_failed);
1050 __ Bind(&load_tav);
1051 __ LoadCompressedFieldFromOffset(instance_type_args_reg,
1052 TypeTestABI::kInstanceReg, tav_offset);
1053 if (i < vectors.length() - 1) {
1054 __ Jump(load_succeeded);
1055 __ Bind(&keep_looking);
1056 }
1057 // Free the CidRangeVector allocated by SplitByTypeArgumentsFieldOffset.
1058 delete vector;
1059 }
1060 }
1061 if (!not_checked.is_empty()) {
1062 CommentSkippedClasses(assembler, type, type_class, not_checked);
1063 }
1064 return !type_argument_checks.is_empty();
1065}
1066
1067void TypeTestingStubGenerator::BuildOptimizedTypeParameterArgumentValueCheck(
1068 compiler::Assembler* assembler,
1069 HierarchyInfo* hi,
1070 const TypeParameter& type_param,
1071 intptr_t type_param_value_offset_i,
1072 compiler::Label* check_failed) {
1073 if (assembler->EmittingComments()) {
1074 TextBuffer buffer(128);
1075 buffer.Printf("Generating check for type argument %" Pd ": ",
1076 type_param_value_offset_i);
1077 type_param.PrintName(Object::kScrubbedName, &buffer);
1078 __ Comment("%s", buffer.buffer());
1079 }
1080
1081 const Register kTypeArgumentsReg =
1082 type_param.IsClassTypeParameter()
1085
1086 compiler::Label is_subtype;
1087 // TODO(dartbug.com/46920): Currently only canonical equality (identity)
1088 // and some top and bottom types are checked.
1089 __ CompareObject(kTypeArgumentsReg, Object::null_object());
1090 __ BranchIf(EQUAL, &is_subtype);
1091
1092 __ LoadCompressedFieldFromOffset(
1093 TTSInternalRegs::kSuperTypeArgumentReg, kTypeArgumentsReg,
1095 __ LoadCompressedFieldFromOffset(
1099 type_param_value_offset_i));
1102 __ BranchIf(EQUAL, &is_subtype);
1103
1104 __ Comment("Checking instantiated type parameter for possible top types");
1105 compiler::Label check_subtype_type_class_ids;
1106 __ LoadClassId(TTSInternalRegs::kScratchReg,
1108 __ CompareImmediate(TTSInternalRegs::kScratchReg, kTypeCid);
1109 __ BranchIf(NOT_EQUAL, &check_subtype_type_class_ids);
1110 __ LoadTypeClassId(TTSInternalRegs::kScratchReg,
1112 __ CompareImmediate(TTSInternalRegs::kScratchReg, kDynamicCid);
1113 __ BranchIf(EQUAL, &is_subtype);
1114 __ CompareImmediate(TTSInternalRegs::kScratchReg, kVoidCid);
1115 __ BranchIf(EQUAL, &is_subtype);
1116 __ CompareImmediate(TTSInternalRegs::kScratchReg, kInstanceCid);
1117 __ BranchIf(NOT_EQUAL, &check_subtype_type_class_ids);
1118 // If non-nullable Object, then the subtype must be legacy or non-nullable.
1119 __ CompareAbstractTypeNullabilityWith(
1121 static_cast<int8_t>(Nullability::kNonNullable),
1123 __ BranchIf(NOT_EQUAL, &is_subtype);
1124 __ Comment("Checking for legacy or non-nullable instance type argument");
1125 __ CompareAbstractTypeNullabilityWith(
1127 static_cast<int8_t>(Nullability::kNullable),
1129 __ BranchIf(EQUAL, check_failed);
1130 __ Jump(&is_subtype);
1131
1132 __ Bind(&check_subtype_type_class_ids);
1133 __ Comment("Checking instance type argument for possible bottom types");
1134 // Nothing else to check for non-Types, so fall back to the slow stub.
1135 __ LoadClassId(TTSInternalRegs::kScratchReg,
1137 __ CompareImmediate(TTSInternalRegs::kScratchReg, kTypeCid);
1138 __ BranchIf(NOT_EQUAL, check_failed);
1139 __ LoadTypeClassId(TTSInternalRegs::kScratchReg,
1141 __ CompareImmediate(TTSInternalRegs::kScratchReg, kNeverCid);
1142 __ BranchIf(EQUAL, &is_subtype);
1143 __ CompareImmediate(TTSInternalRegs::kScratchReg, kNullCid);
1144 // Last possible check, so fall back to slow stub on failure.
1145 __ BranchIf(NOT_EQUAL, check_failed);
1146 // Only nullable or legacy types can be a supertype of Null.
1147 __ Comment("Checking for legacy or nullable instantiated type parameter");
1148 __ CompareAbstractTypeNullabilityWith(
1150 static_cast<int8_t>(Nullability::kNonNullable),
1152 __ BranchIf(EQUAL, check_failed);
1153
1154 __ Bind(&is_subtype);
1155}
1156
1157// Generate code to verify that instance's type argument is a subtype of
1158// 'type_arg'.
1159void TypeTestingStubGenerator::BuildOptimizedTypeArgumentValueCheck(
1160 compiler::Assembler* assembler,
1161 HierarchyInfo* hi,
1162 const Type& type,
1163 intptr_t type_param_value_offset_i,
1164 compiler::Label* check_failed) {
1165 ASSERT(type.IsInstantiated());
1166 if (type.IsTopTypeForSubtyping()) {
1167 return;
1168 }
1169
1170 ASSERT(!type.IsObjectType() || type.IsNonNullable());
1171
1172 if (assembler->EmittingComments()) {
1173 TextBuffer buffer(128);
1174 buffer.Printf("Generating check for type argument %" Pd ": ",
1175 type_param_value_offset_i);
1176 type.PrintName(Object::kScrubbedName, &buffer);
1177 __ Comment("%s", buffer.buffer());
1178 }
1179
1180 compiler::Label is_subtype, sub_is_type;
1181 __ LoadCompressedFieldFromOffset(
1185 type_param_value_offset_i));
1186 __ LoadClassId(TTSInternalRegs::kScratchReg,
1188 if (type.IsObjectType() || type.IsDartFunctionType() ||
1189 type.IsDartRecordType()) {
1190 __ CompareImmediate(TTSInternalRegs::kScratchReg, kTypeCid);
1191 __ BranchIf(EQUAL, &sub_is_type);
1192 if (type.IsDartFunctionType()) {
1193 __ Comment("Checks for Function type");
1194 __ CompareImmediate(TTSInternalRegs::kScratchReg, kFunctionTypeCid);
1195 __ BranchIf(NOT_EQUAL, check_failed);
1196 } else if (type.IsDartRecordType()) {
1197 __ Comment("Checks for Record type");
1198 __ CompareImmediate(TTSInternalRegs::kScratchReg, kRecordTypeCid);
1199 __ BranchIf(NOT_EQUAL, check_failed);
1200 } else {
1201 __ Comment("Checks for Object type");
1202 }
1203 if (type.IsNonNullable()) {
1204 // Nullable types cannot be a subtype of a non-nullable type.
1205 __ CompareAbstractTypeNullabilityWith(
1207 static_cast<int8_t>(Nullability::kNullable),
1209 __ BranchIf(EQUAL, check_failed);
1210 }
1211 // No further checks needed for non-nullable Object, Function or Record.
1212 __ Jump(&is_subtype, compiler::Assembler::kNearJump);
1213 } else {
1214 // Don't fall back to cid tests for record and function types. Instead,
1215 // just let the STC/runtime handle any possible false negatives here.
1216 __ CompareImmediate(TTSInternalRegs::kScratchReg, kTypeCid);
1217 __ BranchIf(NOT_EQUAL, check_failed);
1218 }
1219
1220 __ Comment("Checks for Type");
1221 __ Bind(&sub_is_type);
1222 if (type.IsNonNullable()) {
1223 // Nullable types cannot be a subtype of a non-nullable type in strict mode.
1224 __ CompareAbstractTypeNullabilityWith(
1226 static_cast<int8_t>(Nullability::kNullable),
1228 __ BranchIf(EQUAL, check_failed);
1229 // Fall through to bottom type checks.
1230 }
1231
1232 // No further checks needed for non-nullable object.
1233 if (!type.IsObjectType()) {
1234 __ LoadTypeClassId(TTSInternalRegs::kScratchReg,
1236
1237 const bool null_is_assignable = Instance::NullIsAssignableTo(type);
1238 // Check bottom types.
1239 __ CompareImmediate(TTSInternalRegs::kScratchReg, kNeverCid);
1240 __ BranchIf(EQUAL, &is_subtype);
1241 if (null_is_assignable) {
1242 __ CompareImmediate(TTSInternalRegs::kScratchReg, kNullCid);
1243 __ BranchIf(EQUAL, &is_subtype);
1244 }
1245
1246 // Not a bottom type, so check cid ranges.
1247 const Class& type_class = Class::Handle(type.type_class());
1248 const CidRangeVector& ranges =
1249 hi->SubtypeRangesForClass(type_class,
1250 /*include_abstract=*/true,
1251 /*exclude_null=*/!null_is_assignable);
1252 BuildOptimizedSubtypeRangeCheck(assembler, ranges,
1253 TTSInternalRegs::kScratchReg, &is_subtype,
1254 check_failed);
1255 }
1256
1257 __ Bind(&is_subtype);
1258}
1259
1261 TypeUsageInfo* type_usage_info,
1262 const Class& klass,
1263 Definition* type_arguments) {
1264 // The [type_arguments] can, in the general case, be any kind of [Definition]
1265 // but generally (in order of expected frequency)
1266 //
1267 // Case a)
1268 // type_arguments <- Constant(#null)
1269 // type_arguments <- Constant(#TypeArguments: [ ... ])
1270 //
1271 // Case b)
1272 // type_arguments <- InstantiateTypeArguments(ita, fta, uta)
1273 // (where uta may not be a constant non-null TypeArguments object)
1274 //
1275 // Case c)
1276 // type_arguments <- LoadField(vx)
1277 // type_arguments <- LoadField(vx T{_ABC})
1278 // type_arguments <- LoadField(vx T{Type: class: '_ABC'})
1279 //
1280 // Case d, e)
1281 // type_arguments <- LoadIndexedUnsafe(rbp[vx + 16]))
1282 // type_arguments <- Parameter(0)
1283
1284 if (ConstantInstr* constant = type_arguments->AsConstant()) {
1285 const Object& object = constant->value();
1286 ASSERT(object.IsNull() || object.IsTypeArguments());
1287 const TypeArguments& type_arguments =
1289 type_usage_info->UseTypeArgumentsInInstanceCreation(klass, type_arguments);
1290 } else if (InstantiateTypeArgumentsInstr* instantiate =
1291 type_arguments->AsInstantiateTypeArguments()) {
1292 if (instantiate->type_arguments()->BindsToConstant() &&
1293 !instantiate->type_arguments()->BoundConstant().IsNull()) {
1294 const auto& ta =
1295 TypeArguments::Cast(instantiate->type_arguments()->BoundConstant());
1296 type_usage_info->UseTypeArgumentsInInstanceCreation(klass, ta);
1297 }
1298 } else if (LoadFieldInstr* load_field = type_arguments->AsLoadField()) {
1299 Definition* instance = load_field->instance()->definition();
1300 intptr_t cid = instance->Type()->ToNullableCid();
1301 if (cid == kDynamicCid) {
1302 // This is an approximation: If we only know the type, but not the cid, we
1303 // might have a this-dispatch where we know it's either this class or any
1304 // subclass.
1305 // We try to strengthen this assumption further down by checking the
1306 // offset of the type argument vector, but generally speaking this could
1307 // be a false-positive, which is still ok!
1308 const AbstractType& type = *instance->Type()->ToAbstractType();
1309 if (type.IsType()) {
1310 const Class& type_class = Class::Handle(type.type_class());
1311 if (type_class.NumTypeArguments() >= klass.NumTypeArguments()) {
1312 cid = type_class.id();
1313 }
1314 }
1315 }
1316 if (cid != kDynamicCid) {
1317 const Class& instance_klass =
1318 Class::Handle(IsolateGroup::Current()->class_table()->At(cid));
1319 if (load_field->slot().IsTypeArguments() && instance_klass.IsGeneric() &&
1321 load_field->slot().offset_in_bytes()) {
1322 // This is a subset of Case c) above, namely forwarding the type
1323 // argument vector.
1324 //
1325 // We use the declaration type arguments for the instance creation,
1326 // which is a non-instantiated, expanded, type arguments vector.
1327 TypeArguments& declaration_type_args = TypeArguments::Handle(
1328 instance_klass.GetDeclarationInstanceTypeArguments());
1329 type_usage_info->UseTypeArgumentsInInstanceCreation(
1330 klass, declaration_type_args);
1331 }
1332 }
1333 } else if (type_arguments->IsParameter() ||
1334 type_arguments->IsLoadIndexedUnsafe()) {
1335 // This happens in constructors with non-optional/optional parameters
1336 // where we forward the type argument vector to object allocation.
1337 //
1338 // Theoretically this could be a false-positive, which is still ok, but
1339 // practically it's guaranteed that this is a forward of a type argument
1340 // vector passed in by the caller.
1341 if (function.IsFactory()) {
1342 const Class& enclosing_class = Class::Handle(function.Owner());
1343 TypeArguments& declaration_type_args = TypeArguments::Handle(
1344 enclosing_class.GetDeclarationInstanceTypeArguments());
1345 type_usage_info->UseTypeArgumentsInInstanceCreation(
1346 klass, declaration_type_args);
1347 }
1348 } else {
1349 // It can also be a phi node where the inputs are any of the above,
1350 // or it could be the result of _prependTypeArguments call.
1351 ASSERT(type_arguments->IsPhi() || type_arguments->IsStaticCall());
1352 }
1353}
1354
1355#endif // !defined(DART_PRECOMPILED_RUNTIME)
1356
1357#else // !defined(TARGET_ARCH_IA32)
1358
1359#if !defined(DART_PRECOMPILED_RUNTIME)
1360void RegisterTypeArgumentsUse(const Function& function,
1361 TypeUsageInfo* type_usage_info,
1362 const Class& klass,
1363 Definition* type_arguments) {
1364 // We only have a [TypeUsageInfo] object available durin AOT compilation.
1365 UNREACHABLE();
1366}
1367#endif // !defined(DART_PRECOMPILED_RUNTIME)
1368
1369#endif // !defined(TARGET_ARCH_IA32)
1370
1371#undef __
1372
1374 : ThreadStackResource(thread),
1375 zone_(thread->zone()),
1376 assert_assignable_types_(),
1377 instance_creation_arguments_(
1379 [thread->isolate_group()->class_table()->NumCids()]),
1380 klass_(Class::Handle(zone_)) {
1382}
1383
1385 thread()->set_type_usage_info(nullptr);
1386 delete[] instance_creation_arguments_;
1387}
1388
1390 if (!assert_assignable_types_.HasKey(&type)) {
1391 AddTypeToSet(&assert_assignable_types_, &type);
1392 }
1393}
1394
1396 const Class& klass,
1397 const TypeArguments& ta) {
1398 if (ta.IsNull() || ta.IsCanonical()) {
1399 // The Dart VM performs an optimization where it re-uses type argument
1400 // vectors if the use-site needs a prefix of an already-existent type
1401 // arguments vector.
1402 //
1403 // For example:
1404 //
1405 // class Foo<K, V> {
1406 // foo() => new Bar<K>();
1407 // }
1408 //
1409 // So the length of the type arguments vector can be longer than the number
1410 // of type arguments the class expects.
1411 ASSERT(ta.IsNull() || klass.NumTypeArguments() <= ta.Length());
1412
1413 // If this is a non-instantiated [TypeArguments] object, then it refers to
1414 // type parameters. We need to ensure the type parameters in [ta] only
1415 // refer to type parameters in the class.
1416 if (!ta.IsNull() && !ta.IsInstantiated()) {
1417 return;
1418 }
1419
1420 klass_ = klass.ptr();
1421 while (klass_.NumTypeArguments() > 0) {
1422 const intptr_t cid = klass_.id();
1423 TypeArgumentsSet& set = instance_creation_arguments_[cid];
1424 if (!set.HasKey(&ta)) {
1425 set.Insert(&TypeArguments::ZoneHandle(zone_, ta.ptr()));
1426 }
1427 klass_ = klass_.SuperClass();
1428 }
1429 }
1430}
1431
1433 ClassTable* class_table = thread()->isolate_group()->class_table();
1434 const intptr_t cid_count = class_table->NumCids();
1435
1436 // Step 1) Collect the type parameters we're interested in.
1437 TypeParameterSet parameters_tested_against;
1438 CollectTypeParametersUsedInAssertAssignable(&parameters_tested_against);
1439
1440 // Step 2) Add all types which flow into a type parameter we test against to
1441 // the set of types tested against.
1442 UpdateAssertAssignableTypes(class_table, cid_count,
1443 &parameters_tested_against);
1444}
1445
1446void TypeUsageInfo::CollectTypeParametersUsedInAssertAssignable(
1448 TypeParameter& param = TypeParameter::Handle(zone_);
1449 auto it = assert_assignable_types_.GetIterator();
1450 for (const AbstractType** type = it.Next(); type != nullptr;
1451 type = it.Next()) {
1452 AddToSetIfParameter(set, *type, &param);
1453 }
1454}
1455
1456void TypeUsageInfo::UpdateAssertAssignableTypes(
1457 ClassTable* class_table,
1458 intptr_t cid_count,
1459 TypeParameterSet* parameters_tested_against) {
1460 Class& klass = Class::Handle(zone_);
1461 TypeParameter& param = TypeParameter::Handle(zone_);
1462 AbstractType& type = AbstractType::Handle(zone_);
1463
1464 // Because Object/dynamic are common values for type parameters, we add them
1465 // eagerly and avoid doing it down inside the loop.
1468 type = Type::ObjectType(); // TODO(regis): Add nullable Object?
1470
1471 for (intptr_t cid = 0; cid < cid_count; ++cid) {
1472 if (!class_table->IsValidIndex(cid) || !class_table->HasValidClassAt(cid)) {
1473 continue;
1474 }
1475 klass = class_table->At(cid);
1476 if (klass.NumTypeArguments() <= 0) {
1477 continue;
1478 }
1479
1480 const intptr_t num_parameters = klass.NumTypeParameters();
1481 for (intptr_t i = 0; i < num_parameters; ++i) {
1482 param = klass.TypeParameterAt(i);
1483 if (parameters_tested_against->HasKey(&param)) {
1484 TypeArgumentsSet& ta_set = instance_creation_arguments_[cid];
1485 auto it = ta_set.GetIterator();
1486 for (const TypeArguments** ta = it.Next(); ta != nullptr;
1487 ta = it.Next()) {
1488 // We only add instantiated types to the set (and dynamic/Object were
1489 // already handled above).
1490 if (!(*ta)->IsNull()) {
1491 type = (*ta)->TypeAt(i);
1492 if (type.IsInstantiated()) {
1494 }
1495 }
1496 }
1497 }
1498 }
1499 }
1500}
1501
1502void TypeUsageInfo::AddToSetIfParameter(TypeParameterSet* set,
1503 const AbstractType* type,
1504 TypeParameter* param) {
1505 if (type->IsTypeParameter()) {
1506 *param ^= type->ptr();
1507 if (!param->IsNull() && !set->HasKey(param)) {
1508 set->Insert(&TypeParameter::Handle(zone_, param->ptr()));
1509 }
1510 }
1511}
1512
1513void TypeUsageInfo::AddTypeToSet(TypeSet* set, const AbstractType* type) {
1514 if (!set->HasKey(type)) {
1515 set->Insert(&AbstractType::ZoneHandle(zone_, type->ptr()));
1516 }
1517}
1518
1520 if (type.IsFinalized()) {
1521 return assert_assignable_types_.HasKey(&type);
1522 }
1523 return false;
1524}
1525
1526#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
1527
1529 class CollectTypes : public ObjectVisitor {
1530 public:
1531 CollectTypes(Zone* zone, GrowableArray<AbstractType*>* types)
1532 : zone_(zone), types_(types), cache_(SubtypeTestCache::Handle(zone)) {}
1533
1534 void VisitObject(ObjectPtr object) {
1535 // Only types and record types may have optimized TTSes,
1536 // see TypeTestingStubGenerator::OptimizedCodeForType.
1537 if (object->IsType() || object->IsRecordType()) {
1538 types_->Add(&AbstractType::CheckedHandle(zone_, object));
1539 } else if (object->IsSubtypeTestCache()) {
1540 cache_ ^= object;
1541 cache_.Reset();
1542 }
1543 }
1544
1545 private:
1546 Zone* const zone_;
1547 GrowableArray<AbstractType*>* const types_;
1548 TypeTestingStubGenerator generator_;
1549 SubtypeTestCache& cache_;
1550 };
1551
1552 Thread* thread = Thread::Current();
1553 TIMELINE_DURATION(thread, Isolate, "DeoptimizeTypeTestingStubs");
1554 HANDLESCOPE(thread);
1555 Zone* zone = thread->zone();
1556 GrowableArray<AbstractType*> types(zone, 0);
1557 {
1558 HeapIterationScope iter(thread);
1559 CollectTypes visitor(zone, &types);
1560 iter.IterateObjects(&visitor);
1561 }
1562 auto& stub = Code::Handle(zone);
1563 for (auto* const type : types) {
1565 type->SetTypeTestingStub(stub);
1566 }
1567}
1568
1569#endif // !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
1570
1571} // namespace dart
static float next(float f)
#define check(reporter, ref, unref, make, kill)
Definition: RefCntTest.cpp:85
#define UNREACHABLE()
Definition: assert.h:248
#define ASSERT_EQUAL(expected, actual)
Definition: assert.h:309
#define Z
GLenum type
bool Remove(typename KeyValueTrait::Key key)
Definition: hash_map.h:275
void Update(typename KeyValueTrait::Pair kv)
Definition: hash_map.h:261
bool HasKey(typename KeyValueTrait::Key key) const
Definition: hash_map.h:52
Iterator GetIterator() const
Definition: hash_map.h:87
void Add(const T &value)
intptr_t length() const
static bool ContainsCid(const CidRangeVector &ranges, intptr_t cid)
Definition: il.h:257
ClassPtr At(intptr_t cid) const
Definition: class_table.h:362
intptr_t NumCids() const
Definition: class_table.h:447
bool HasValidClassAt(intptr_t cid) const
Definition: class_table.h:386
const char * ScrubbedNameCString() const
Definition: object.cc:2985
LibraryPtr library() const
Definition: object.h:1333
TypePtr GetInstantiationOf(Zone *zone, const Class &cls) const
Definition: object.cc:12728
TypeArgumentsPtr GetDeclarationInstanceTypeArguments() const
Definition: object.cc:3476
intptr_t id() const
Definition: object.h:1233
intptr_t NumTypeArguments() const
Definition: object.cc:3640
TypePtr RareType() const
Definition: object.cc:3036
bool is_type_finalized() const
Definition: object.h:1709
bool is_abstract() const
Definition: object.h:1696
bool IsGeneric() const
Definition: object.h:1358
bool FindInstantiationOf(Zone *zone, const Class &cls, GrowableArray< const Type * > *path, bool consider_only_super_classes=false) const
Definition: object.cc:12675
ClassPtr SuperClass(ClassTable *class_table=nullptr) const
Definition: object.cc:3665
bool is_implemented() const
Definition: object.h:1692
bool is_finalized() const
Definition: object.h:1723
static void NotifyCodeObservers(const Code &code, bool optimized)
Definition: object.cc:18141
static CodePtr FinalizeCode(FlowGraphCompiler *compiler, compiler::Assembler *assembler, PoolAttachment pool_attachment, bool optimized, CodeStatistics *stats)
Definition: object.cc:18018
static DART_NORETURN void ThrowOOM()
Definition: exceptions.cc:1066
static bool GenerateCidRangesCheck(compiler::Assembler *assembler, Register class_id_reg, const CidRangeVector &cid_ranges, compiler::Label *inside_range_lbl, compiler::Label *outside_range_lbl=nullptr, bool fall_through_if_inside=false)
void IterateObjects(ObjectVisitor *visitor) const
Definition: heap.cc:335
@ kNew
Definition: heap.h:38
static bool NullIsAssignableTo(const AbstractType &other)
Definition: object.cc:20674
V Lookup(const Key &key) const
Definition: hash_map.h:548
Pair * LookupPair(const Key &key) const
Definition: hash_map.h:558
void Insert(const Key &key, const Value &value)
Definition: hash_map.h:543
ObjectStore * object_store() const
Definition: isolate.h:510
SafepointRwLock * program_lock()
Definition: isolate.h:537
static IsolateGroup * Current()
Definition: isolate.h:539
ClassTable * class_table() const
Definition: isolate.h:496
void RunWithStoppedMutators(T single_current_mutator, S otherwise, bool use_force_growth_in_otherwise=false)
Definition: isolate.h:611
StringPtr url() const
Definition: object.h:5097
jmp_buf * Set()
Definition: longjump.cc:16
@ kScrubbedName
Definition: object.h:633
static ObjectPtr null()
Definition: object.h:433
ObjectPtr ptr() const
Definition: object.h:332
bool IsCanonical() const
Definition: object.h:335
bool IsNull() const
Definition: object.h:363
static Object & Handle()
Definition: object.h:407
static ObjectPtr RawCast(ObjectPtr obj)
Definition: object.h:325
static Object & ZoneHandle()
Definition: object.h:419
static intptr_t RawValue(intptr_t value)
Definition: object.h:10022
static const char * ToCString(Thread *thread, StringPtr ptr)
Definition: object.cc:24126
static bool HasBeenInitialized()
Definition: stub_code.h:41
Zone * zone() const
Definition: thread_state.h:37
HierarchyInfo * hierarchy_info() const
Definition: thread.h:593
void set_type_usage_info(TypeUsageInfo *value)
Definition: thread.h:610
static Thread * Current()
Definition: thread.h:362
DART_WARN_UNUSED_RESULT ErrorPtr StealStickyError()
Definition: thread.cc:245
void set_sticky_error(const Error &value)
Definition: thread.cc:236
IsolateGroup * isolate_group() const
Definition: thread.h:541
bool IsInstantiated(Genericity genericity=kAny, intptr_t num_free_fun_type_params=kAllFree) const
Definition: object.h:8707
intptr_t Length() const
Definition: object.cc:7294
static CodePtr DefaultCodeForType(const AbstractType &type, bool lazy_specialize=true)
CodePtr OptimizedCodeForType(const AbstractType &type)
static CodePtr SpecializeStubFor(Thread *thread, const AbstractType &type)
void WriteStubNameForTypeTo(BaseTextBuffer *buffer, const AbstractType &type) const
const char * StubNameForType(const AbstractType &type) const
bool IsUsedInTypeTest(const AbstractType &type)
TypeUsageInfo(Thread *thread)
void UseTypeArgumentsInInstanceCreation(const Class &klass, const TypeArguments &ta)
void UseTypeInAssertAssignable(const AbstractType &type)
static TypePtr ObjectType()
static TypePtr SmiType()
static TypePtr DynamicType()
void static bool EmittingComments()
static ArrayPtr BuildStaticCallsTable(Zone *zone, compiler::UnresolvedPcRelativeCalls *unresolved_calls)
Definition: stub_code.cc:147
static const word kNoTypeArguments
Definition: runtime_api.h:486
static intptr_t TypeArgumentsFieldOffset(const dart::Class &klass)
Definition: runtime_api.cc:539
static word field_offset(intptr_t index)
static word slow_type_test_entry_point_offset()
static word type_at_offset(intptr_t i)
#define THR_Print(format,...)
Definition: log.h:20
#define ASSERT(E)
VkInstance instance
Definition: main.cc:48
const uint8_t uint32_t uint32_t GError ** error
Dart_NativeFunction function
Definition: fuchsia.cc:51
#define HANDLESCOPE(thread)
Definition: handles.h:321
GrowableArray< UnresolvedPcRelativeCall * > UnresolvedPcRelativeCalls
Definition: dart_vm.cc:33
static void CommentSkippedClasses(compiler::Assembler *assembler, const Type &type, const Class &type_class, const CidRangeVector &ranges)
static CodePtr RetryCompilationWithFarBranches(Thread *thread, std::function< CodePtr(compiler::Assembler &)> fun)
const Register THR
@ kInstanceTypeArgumentsAreSubtypes
const char *const name
DART_EXPORT bool IsNull(Dart_Handle object)
static CheckType SubtypeChecksForClass(Zone *zone, const Type &type, const Class &type_class, const Class &to_check)
int32_t classid_t
Definition: globals.h:524
@ kNullCid
Definition: class_id.h:252
@ kNumPredefinedCids
Definition: class_id.h:257
@ kVoidCid
Definition: class_id.h:254
@ kDynamicCid
Definition: class_id.h:253
@ kNeverCid
Definition: class_id.h:255
void DeoptimizeTypeTestingStubs()
MallocGrowableArray< CidRangeValue > CidRangeVector
Definition: il.h:253
@ NOT_EQUAL
void RegisterTypeArgumentsUse(const Function &function, TypeUsageInfo *type_usage_info, const Class &klass, Definition *type_arguments)
static void SplitOnTypeArgumentTests(HierarchyInfo *hi, const Type &type, const Class &type_class, const CidRangeVector &ranges, CidRangeVector *cid_check_only, CidRangeVector *type_argument_checks, CidRangeVector *not_checked)
DirectChainedHashMap< TypeParameterKeyValueTrait > TypeParameterSet
Definition: precompiler.h:165
const intptr_t cid
static void CommentCheckedClasses(compiler::Assembler *assembler, const CidRangeVector &ranges)
static void SplitByTypeArgumentsFieldOffset(Thread *T, const Class &type_class, const CidRangeVector &ranges, GrowableArray< CidRangeVector * > *output)
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not defaults to or::depending on whether ipv6 is specified vm service A custom Dart VM Service port The default is to pick a randomly available open port disable vm Disable the Dart VM Service The Dart VM Service is never available in release mode disable vm service Disable mDNS Dart VM Service publication Bind to the IPv6 localhost address for the Dart VM Service Ignored if vm service host is set endless trace buffer
Definition: switches.h:126
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not set
Definition: switches.h:76
static bool Bind(PassBindingsCacheMTL &pass, ShaderStage stage, size_t bind_index, const BufferView &view)
#define Pd
Definition: globals.h:408
#define Pd32
Definition: globals.h:412
#define T
Definition: precompiler.cc:65
static constexpr intptr_t kSavedTypeArgumentRegisters
static constexpr Register kSuperTypeArgumentReg
static constexpr Register kSubTypeArgumentReg
static constexpr Register kInstanceTypeArgumentsReg
static constexpr Register kScratchReg
static constexpr Register kInstanceReg
static constexpr Register kFunctionTypeArgumentsReg
static constexpr Register kInstantiatorTypeArgumentsReg
#define TIMELINE_DURATION(thread, stream, name)
Definition: timeline.h:39
#define __