70 : public ObjectVisitor {
71 public:
72 explicit ObjectLocator(IsolateGroupReloadContext* context)
73 : context_(context), count_(0) {}
74
75 void VisitObject(ObjectPtr obj) override {
76 InstanceMorpher* morpher =
77 context_->instance_morpher_by_cid_.LookupValue(obj->GetClassId());
78 if (morpher != nullptr) {
79 morpher->AddObject(obj);
80 count_++;
81 }
82 }
83
84
85 intptr_t
count() {
return count_; }
86
87 private:
88 IsolateGroupReloadContext* context_;
89 intptr_t count_;
90};
91
93 MonitorLocker ml(heap->old_space()->tasks_lock());
94 return heap->old_space()->tasks() == 0;
95}
96
97InstanceMorpher* InstanceMorpher::CreateFromClassDescriptors(
98 Zone* zone,
99 ClassTable* class_table,
100 const Class& from,
101 const Class& to) {
104
105 if (from.NumTypeArguments() > 0) {
106
107 intptr_t from_offset = from.host_type_arguments_field_offset();
108 ASSERT(from_offset != Class::kNoTypeArguments);
109 intptr_t to_offset = to.host_type_arguments_field_offset();
110 ASSERT(to_offset != Class::kNoTypeArguments);
113 }
114
115
116
117 const Array& from_fields = Array::Handle(
118 from.OffsetToFieldMap(IsolateGroup::Current()->heap_walk_class_table()));
119 const Array& to_fields = Array::Handle(to.OffsetToFieldMap());
120 Field& from_field = Field::Handle();
121 Field& to_field = Field::Handle();
122 String& from_name = String::Handle();
123 String& to_name = String::Handle();
124
125 auto ensure_boxed_and_guarded = [&](const Field& field) {
126 field.set_needs_load_guard(true);
127 if (field.is_unboxed()) {
128 to.MarkFieldBoxedDuringReload(class_table, field);
129 }
130 };
131
132
133 for (intptr_t i = 0; i < to_fields.Length(); i++) {
134 if (to_fields.At(i) == Field::null()) {
135 continue;
136 }
137
138
139 to_field = Field::RawCast(to_fields.At(i));
140 ASSERT(to_field.is_instance());
141 to_name = to_field.name();
142
143
144 bool new_field = true;
145
146
147 for (intptr_t j = 0; j < from_fields.Length(); j++) {
148 if (from_fields.At(j) == Field::null()) {
149 continue;
150 }
151 from_field = Field::RawCast(from_fields.At(j));
152 ASSERT(from_field.is_instance());
153 from_name = from_field.name();
154 if (from_name.Equals(to_name)) {
157
158
159 if ((from_field.is_unboxed() && from_field.type() != to_field.type()) ||
160 (from_field.is_unboxed() != to_field.is_unboxed())) {
161
162
163 ensure_boxed_and_guarded(to_field);
164 }
165
166 if (from_field.is_unboxed()) {
167 const auto field_cid = from_field.guarded_cid();
168 switch (field_cid) {
169 case kDoubleCid:
170 case kFloat32x4Cid:
171 case kFloat64x2Cid:
172 from_box_cid = field_cid;
173 break;
174 default:
175 from_box_cid = kIntegerCid;
176 break;
177 }
178 }
179
180 if (to_field.is_unboxed()) {
181 const auto field_cid = to_field.guarded_cid();
182 switch (field_cid) {
183 case kDoubleCid:
184 case kFloat32x4Cid:
185 case kFloat64x2Cid:
186 to_box_cid = field_cid;
187 break;
188 default:
189 to_box_cid = kIntegerCid;
190 break;
191 }
192 }
193
194
195 ASSERT(from_box_cid != kIllegalCid || to_box_cid == kIllegalCid);
196
197
198 mapping->Add({from_field.HostOffset(), from_box_cid});
199 mapping->Add({to_field.HostOffset(), to_box_cid});
200
201
202 new_field = false;
203 break;
204 }
205 }
206
207 if (new_field) {
208 ensure_boxed_and_guarded(to_field);
209 new_fields_offsets->Add(to_field.HostOffset());
210 }
211 }
212
213 ASSERT(from.id() == to.id());
214 return new (zone)
215 InstanceMorpher(zone, to.id(), from, to, mapping, new_fields_offsets);
216}
217
218InstanceMorpher::InstanceMorpher(Zone* zone,
219 classid_t cid,
220 const Class& old_class,
221 const Class& new_class,
222 FieldMappingArray* mapping,
223 FieldOffsetArray* new_fields_offsets)
224 : zone_(zone),
226 old_class_(Class::Handle(zone, old_class.ptr())),
227 new_class_(Class::Handle(zone, new_class.ptr())),
228 mapping_(mapping),
229 new_fields_offsets_(new_fields_offsets),
230 before_(zone, 16) {}
231
232void InstanceMorpher::AddObject(ObjectPtr object) {
233 ASSERT(object->GetClassId() == cid_);
234 const Instance&
instance = Instance::Cast(Object::Handle(
Z,
object));
236}
237
238void InstanceMorpher::CreateMorphedCopies(Become* become) {
239 Instance& after = Instance::Handle(
Z);
240 Object&
value = Object::Handle(
Z);
241 for (intptr_t i = 0; i < before_.length(); i++) {
242 const Instance& before = *before_.At(i);
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262 const bool is_canonical = before.IsCanonical();
263 const Heap::Space space = is_canonical ? Heap::kOld : Heap::kNew;
264 after = Instance::NewAlreadyFinalized(new_class_, space);
265
266
267
268 if (is_canonical) {
269 after.SetCanonical();
270 }
271#if defined(HASH_IN_OBJECT_HEADER)
272 const uint32_t
hash = Object::GetCachedHash(before.ptr());
273 Object::SetCachedHashIfNotSet(after.ptr(),
hash);
274#endif
275
276
277 for (intptr_t i = 0; i < mapping_->length(); i += 2) {
278 const auto& from = mapping_->At(i);
279 const auto& to = mapping_->At(i + 1);
282 if (from.box_cid == kIllegalCid) {
283
284 ASSERT(to.box_cid == kIllegalCid);
285
286
287 ObjectPtr raw_value = before.RawGetFieldAtOffset(from.offset);
288 after.RawSetFieldAtOffset(to.offset, raw_value);
289 } else if (to.box_cid == kIllegalCid) {
290
291 switch (from.box_cid) {
292 case kDoubleCid: {
293 const auto unboxed_value =
294 before.RawGetUnboxedFieldAtOffset<double>(from.offset);
295 value = Double::New(unboxed_value);
296 break;
297 }
298 case kFloat32x4Cid: {
299 const auto unboxed_value =
300 before.RawGetUnboxedFieldAtOffset<simd128_value_t>(from.offset);
301 value = Float32x4::New(unboxed_value);
302 break;
303 }
304 case kFloat64x2Cid: {
305 const auto unboxed_value =
306 before.RawGetUnboxedFieldAtOffset<simd128_value_t>(from.offset);
307 value = Float64x2::New(unboxed_value);
308 break;
309 }
310 case kIntegerCid: {
311 const auto unboxed_value =
312 before.RawGetUnboxedFieldAtOffset<int64_t>(from.offset);
313 value = Integer::New(unboxed_value);
314 break;
315 }
316 }
317 if (is_canonical) {
318 value = Instance::Cast(value).Canonicalize(Thread::Current());
319 }
320 after.RawSetFieldAtOffset(to.offset, value);
321 } else {
322
323 ASSERT(to.box_cid == from.box_cid);
324 switch (from.box_cid) {
325 case kDoubleCid: {
326 const auto unboxed_value =
327 before.RawGetUnboxedFieldAtOffset<double>(from.offset);
328 after.RawSetUnboxedFieldAtOffset<double>(to.offset, unboxed_value);
329 break;
330 }
331 case kFloat32x4Cid:
332 case kFloat64x2Cid: {
333 const auto unboxed_value =
334 before.RawGetUnboxedFieldAtOffset<simd128_value_t>(from.offset);
335 after.RawSetUnboxedFieldAtOffset<simd128_value_t>(to.offset,
336 unboxed_value);
337 break;
338 }
339 case kIntegerCid: {
340 const auto unboxed_value =
341 before.RawGetUnboxedFieldAtOffset<int64_t>(from.offset);
342 after.RawSetUnboxedFieldAtOffset<int64_t>(to.offset, unboxed_value);
343 break;
344 }
345 }
346 }
347 }
348
349 for (intptr_t i = 0; i < new_fields_offsets_->length(); i++) {
350 const auto& field_offset = new_fields_offsets_->At(i);
351 after.RawSetFieldAtOffset(field_offset, Object::sentinel());
352 }
353
354
355
356
357 Become::MakeDummyObject(before);
358
359 become->Add(before, after);
360 }
361}
362
364 switch (box_cid) {
365 case kDoubleCid:
366 return "double";
367 case kFloat32x4Cid:
368 return "float32x4";
369 case kFloat64x2Cid:
370 return "float64x2";
371 case kIntegerCid:
372 return "int64";
373 }
374 return "?";
375}
376
377void InstanceMorpher::Dump() const {
378 LogBlock blocker;
379 THR_Print(
"Morphing objects with cid: %d via this mapping: ", cid_);
380 for (int i = 0; i < mapping_->length(); i += 2) {
381 const auto& from = mapping_->At(i);
382 const auto& to = mapping_->At(i + 1);
384 THR_Print(
" (%" Pd " -> %" Pd ")", from.box_cid, to.box_cid);
385 if (to.box_cid == kIllegalCid && from.box_cid != kIllegalCid) {
387 } else if (to.box_cid != kIllegalCid) {
389 }
390 }
392}
393
394void InstanceMorpher::AppendTo(JSONArray* array) {
395 JSONObject jsobj(array);
396 jsobj.AddProperty("type", "ShapeChangeMapping");
397 jsobj.AddProperty64("class-id", cid_);
398 jsobj.AddProperty("instanceCount", before_.length());
399 JSONArray
map(&jsobj,
"fieldOffsetMappings");
400 for (int i = 0; i < mapping_->length(); i += 2) {
401 const auto& from = mapping_->At(i);
402 const auto& to = mapping_->At(i + 1);
403
404 JSONArray pair(&map);
405 pair.AddValue(from.offset);
406 pair.AddValue(to.offset);
407 if (to.box_cid == kIllegalCid && from.box_cid != kIllegalCid) {
409 } else if (to.box_cid != kIllegalCid) {
411 }
412 }
413}
414
415void ReasonForCancelling::Report(IsolateGroupReloadContext* context) {
416 const Error&
error = Error::Handle(ToError());
417 context->ReportError(
error);
418}
419
420ErrorPtr ReasonForCancelling::ToError() {
421
423 return LanguageError::New(
message);
424}
425
426StringPtr ReasonForCancelling::ToString() {
428 return nullptr;
429}
430
431void ReasonForCancelling::AppendTo(JSONArray* array) {
432 JSONObject jsobj(array);
433 jsobj.AddProperty("type", "ReasonForCancelling");
435 jsobj.AddProperty(
"message",
message.ToCString());
436}
437
438ClassReasonForCancelling::ClassReasonForCancelling(Zone* zone,
439 const Class& from,
440 const Class& to)
441 : ReasonForCancelling(zone),
442 from_(Class::ZoneHandle(zone, from.ptr())),
443 to_(Class::ZoneHandle(zone, to.ptr())) {}
444
445void ClassReasonForCancelling::AppendTo(JSONArray* array) {
446 JSONObject jsobj(array);
447 jsobj.AddProperty("type", "ReasonForCancelling");
448 jsobj.AddProperty("class", from_);
450 jsobj.AddProperty(
"message",
message.ToCString());
451}
452
453ErrorPtr IsolateGroupReloadContext::error() const {
454 ASSERT(!reasons_to_cancel_reload_.is_empty());
455
456 return reasons_to_cancel_reload_.At(0)->ToError();
457}
458
459class ScriptUrlSetTraits {
460 public:
461 static bool ReportStats() { return false; }
462 static const char*
Name() {
return "ScriptUrlSetTraits"; }
463
464 static bool IsMatch(
const Object&
a,
const Object&
b) {
465 if (!
a.IsString() || !
b.IsString()) {
466 return false;
467 }
468
469 return String::Cast(
a).Equals(String::Cast(
b));
470 }
471
472 static uword Hash(
const Object& obj) {
return String::Cast(obj).Hash(); }
473};
474
475class ClassMapTraits {
476 public:
477 static bool ReportStats() { return false; }
478 static const char*
Name() {
return "ClassMapTraits"; }
479
480 static bool IsMatch(
const Object&
a,
const Object&
b) {
481 if (!
a.IsClass() || !
b.IsClass()) {
482 return false;
483 }
484 return ProgramReloadContext::IsSameClass(Class::Cast(
a), Class::Cast(
b));
485 }
486
487 static uword Hash(
const Object& obj) {
488 uword class_name_hash = String::HashRawSymbol(Class::Cast(obj).
Name());
489 LibraryPtr raw_library = Class::Cast(obj).library();
490 if (raw_library == Library::null()) {
491 return class_name_hash;
492 }
495 String::Hash(Library::Handle(raw_library).private_key())),
496 30);
497 }
498};
499
500class LibraryMapTraits {
501 public:
502 static bool ReportStats() { return false; }
503 static const char*
Name() {
return "LibraryMapTraits"; }
504
505 static bool IsMatch(
const Object&
a,
const Object&
b) {
506 if (!
a.IsLibrary() || !
b.IsLibrary()) {
507 return false;
508 }
509 return ProgramReloadContext::IsSameLibrary(Library::Cast(
a),
511 }
512
513 static uword Hash(
const Object& obj) {
return Library::Cast(obj).UrlHash(); }
514};
515
516bool ProgramReloadContext::IsSameClass(
const Class&
a,
const Class&
b) {
517
518
519
520 const String& a_name = String::Handle(
a.Name());
521 const String& b_name = String::Handle(
b.Name());
522
523 if (!a_name.Equals(b_name)) {
524 return false;
525 }
526
527 const Library& a_lib = Library::Handle(
a.library());
528 const Library& b_lib = Library::Handle(
b.library());
529
530 if (a_lib.IsNull() || b_lib.IsNull()) {
531 return a_lib.ptr() == b_lib.ptr();
532 }
533 return (a_lib.private_key() == b_lib.private_key());
534}
535
536bool ProgramReloadContext::IsSameLibrary(const Library& a_lib,
537 const Library& b_lib) {
538 const String& a_lib_url =
539 String::Handle(a_lib.IsNull() ? String::null() : a_lib.url());
540 const String& b_lib_url =
541 String::Handle(b_lib.IsNull() ? String::null() : b_lib.url());
542 return a_lib_url.Equals(b_lib_url);
543}
544
545IsolateGroupReloadContext::IsolateGroupReloadContext(
546 IsolateGroup* isolate_group,
547 ClassTable* class_table,
548 JSONStream* js)
549 : zone_(Thread::Current()->zone()),
550 isolate_group_(isolate_group),
551 class_table_(class_table),
552 start_time_micros_(OS::GetCurrentMonotonicMicros()),
553 reload_timestamp_(OS::GetCurrentTimeMillis()),
555 instance_morphers_(zone_, 0),
556 reasons_to_cancel_reload_(zone_, 0),
557 instance_morpher_by_cid_(zone_),
558 root_lib_url_(String::Handle(
Z, String::null())),
559 root_url_prefix_(String::null()),
560 old_root_url_prefix_(String::null()) {}
561IsolateGroupReloadContext::~IsolateGroupReloadContext() {}
562
563ProgramReloadContext::ProgramReloadContext(
564 std::shared_ptr<IsolateGroupReloadContext> group_reload_context,
565 IsolateGroup* isolate_group)
566 : zone_(Thread::Current()->zone()),
567 group_reload_context_(group_reload_context),
568 isolate_group_(isolate_group),
569 old_classes_set_storage_(Array::null()),
570 class_map_storage_(Array::null()),
571 removed_class_set_storage_(Array::null()),
572 old_libraries_set_storage_(Array::null()),
573 library_map_storage_(Array::null()),
574 saved_root_library_(Library::null()),
575 saved_libraries_(GrowableObjectArray::null()) {
576
577
578
580}
581
582ProgramReloadContext::~ProgramReloadContext() {
583 ASSERT(zone_ == Thread::Current()->zone());
584 ASSERT(
IG->class_table() ==
IG->heap_walk_class_table());
585}
586
587void IsolateGroupReloadContext::ReportError(
const Error&
error) {
588 IsolateGroup* isolate_group = IsolateGroup::Current();
589 if (IsolateGroup::IsSystemIsolateGroup(isolate_group)) {
590 return;
591 }
593 ServiceEvent service_event(isolate_group, ServiceEvent::kIsolateReload);
594 service_event.set_reload_error(&
error);
595 Service::HandleEvent(&service_event);
596}
597
598void IsolateGroupReloadContext::ReportSuccess() {
599 IsolateGroup* isolate_group = IsolateGroup::Current();
600 if (IsolateGroup::IsSystemIsolateGroup(isolate_group)) {
601 return;
602 }
603 ServiceEvent service_event(isolate_group, ServiceEvent::kIsolateReload);
604 Service::HandleEvent(&service_event);
605}
606
607class Aborted : public ReasonForCancelling {
608 public:
609 Aborted(Zone* zone,
const Error&
error)
610 : ReasonForCancelling(zone),
611 error_(Error::ZoneHandle(zone,
error.ptr())) {}
612
613 private:
614 const Error& error_;
615
616 ErrorPtr ToError() { return error_.ptr(); }
618 return String::NewFormatted("%s", error_.ToErrorCString());
619 }
620};
621
623 const intptr_t a_length = strlen(
a);
624 const intptr_t b_length = strlen(
b);
625 intptr_t a_cursor = a_length;
626 intptr_t b_cursor = b_length;
627
628 while ((a_cursor >= 0) && (b_cursor >= 0)) {
629 if (
a[a_cursor] !=
b[b_cursor]) {
630 break;
631 }
632 a_cursor--;
633 b_cursor--;
634 }
635
636 ASSERT((a_length - a_cursor) == (b_length - b_cursor));
637 return (a_length - a_cursor);
638}
639
641 TransitionVMToNative transition(thread);
646 "An error occurred while accepting the most recent"
647 " compilation results: %s",
649 }
651 "An error occurred while accepting the most recent"
652 " compilation results: %s",
654 Zone* zone = thread->zone();
655 const auto& error_str = String::Handle(zone, String::New(
result.error));
657 return ApiError::New(error_str);
658 }
659 return Object::null();
660}
661
663 TransitionVMToNative transition(thread);
668 "An error occurred while rejecting the most recent"
669 " compilation results: %s",
671 }
673 "An error occurred while rejecting the most recent"
674 " compilation results: %s",
676 Zone* zone = thread->zone();
677 const auto& error_str = String::Handle(zone, String::New(
result.error));
679 return ApiError::New(error_str);
680 }
681 return Object::null();
682}
683
684
685bool IsolateGroupReloadContext::Reload(bool force_reload,
686 const char* root_script_url,
687 const char* packages_url,
688 const uint8_t* kernel_buffer,
689 intptr_t kernel_buffer_size) {
691
692 Thread* thread = Thread::Current();
693 ASSERT(thread->OwnsReloadSafepoint());
694
695 Heap* heap =
IG->heap();
696 num_old_libs_ =
697 GrowableObjectArray::Handle(
Z,
IG->object_store()->libraries()).Length();
698
699
700 GetRootLibUrl(root_script_url);
701
702 std::unique_ptr<kernel::Program> kernel_program;
703
704
705 num_received_libs_ = 0;
706 bytes_received_libs_ = 0;
707 num_received_classes_ = 0;
708 num_received_procedures_ = 0;
709
710 bool did_kernel_compilation = false;
711 bool skip_reload = false;
712 {
713
714 intptr_t* p_num_received_classes = nullptr;
715 intptr_t* p_num_received_procedures = nullptr;
716
717
718
719
720
721 kernel_program = kernel::Program::ReadFromFile(root_script_url);
722 if (kernel_program != nullptr) {
723 num_received_libs_ = kernel_program->library_count();
724 bytes_received_libs_ = kernel_program->binary().LengthInBytes();
725 p_num_received_classes = &num_received_classes_;
726 p_num_received_procedures = &num_received_procedures_;
727 } else {
728 if (kernel_buffer == nullptr || kernel_buffer_size == 0) {
729 char*
error = CompileToKernel(force_reload, packages_url,
730 &kernel_buffer, &kernel_buffer_size);
731 did_kernel_compilation = true;
732 if (
error !=
nullptr) {
733 TIR_Print(
"---- LOAD FAILED, ABORTING RELOAD\n");
734 const auto& error_str = String::Handle(
Z, String::New(
error));
736 const ApiError&
error = ApiError::Handle(
Z, ApiError::New(error_str));
737 AddReasonForCancelling(
new Aborted(
Z,
error));
738 ReportReasonsForCancelling();
739 CommonFinalizeTail(num_old_libs_);
740
742 return false;
743 }
744 }
745 const auto& typed_data = ExternalTypedData::Handle(
746 Z, ExternalTypedData::NewFinalizeWithFree(
747 const_cast<uint8_t*>(kernel_buffer), kernel_buffer_size));
748 kernel_program = kernel::Program::ReadFromTypedData(typed_data);
749 }
750
751 NoActiveIsolateScope no_active_isolate_scope;
752
753 IsolateGroupSource*
source = IsolateGroup::Current()->source();
755 ExternalTypedData::Cast(kernel_program->binary()));
756
757 modified_libs_ =
new (
Z) BitVector(
Z, num_old_libs_);
758 kernel::KernelLoader::FindModifiedLibraries(
759 kernel_program.get(),
IG, modified_libs_, force_reload, &skip_reload,
760 p_num_received_classes, p_num_received_procedures);
761 modified_libs_transitive_ =
new (
Z) BitVector(
Z, num_old_libs_);
762 BuildModifiedLibrariesClosure(modified_libs_);
763
764 ASSERT(num_saved_libs_ == -1);
765 num_saved_libs_ = 0;
766 for (intptr_t i = 0; i < modified_libs_->length(); i++) {
767 if (!modified_libs_->Contains(i)) {
768 num_saved_libs_++;
769 }
770 }
771 }
772
773 NoActiveIsolateScope no_active_isolate_scope;
774
775 if (skip_reload) {
776 ASSERT(modified_libs_->IsEmpty());
777 reload_skipped_ = true;
778 ReportOnJSON(js_, num_old_libs_);
779
780
781
782
783 if (did_kernel_compilation) {
787 AddReasonForCancelling(
new Aborted(
Z,
error));
788 ReportReasonsForCancelling();
789 CommonFinalizeTail(num_old_libs_);
790 return false;
791 }
792 }
793 TIR_Print(
"---- SKIPPING RELOAD (No libraries were modified)\n");
794 return false;
795 }
796
798
799 intptr_t number_of_isolates = 0;
800 isolate_group_->ForEachIsolate(
801 [&](Isolate* isolate) { number_of_isolates++; });
802
803
804
805
806
807 const bool old_concurrent_mark_flag =
808 heap->old_space()->enable_concurrent_mark();
809 if (old_concurrent_mark_flag) {
810 heap->WaitForMarkerTasks(thread);
811 heap->old_space()->set_enable_concurrent_mark(false);
812 }
813
814
815
816
817
818
819 IG->program_reload_context()->EnsuredUnoptimizedCodeForStack();
820 IG->program_reload_context()->DeoptimizeDependentCode();
821 IG->program_reload_context()->ReloadPhase1AllocateStorageMapsAndCheckpoint();
822
823
824 modified_libs_ = nullptr;
825 modified_libs_transitive_ = nullptr;
826
827 if (FLAG_gc_during_reload) {
828
829
830 heap->CollectAllGarbage(GCReason::kDebugging, true);
831 }
832
833
834 {
836 IG->program_reload_context()->CheckpointClasses();
837 }
838
839 if (FLAG_gc_during_reload) {
840
841
842 heap->CollectAllGarbage(GCReason::kDebugging, true);
843 }
844
845
846
847
848
849
851 Object::Handle(
Z,
IG->program_reload_context()->ReloadPhase2LoadKernel(
852 kernel_program.get(), root_lib_url_));
853
855 TIR_Print(
"---- LOAD FAILED, ABORTING RELOAD\n");
856
858 AddReasonForCancelling(
new Aborted(
Z,
error));
859
860 IG->program_reload_context()->ReloadPhase4Rollback();
861 CommonFinalizeTail(num_old_libs_);
862 } else {
863 ASSERT(!reload_skipped_ && !reload_finalized_);
865
866 IG->program_reload_context()->ReloadPhase3FinalizeLoading();
867
868 if (FLAG_gc_during_reload) {
869
870
871 heap->CollectAllGarbage(GCReason::kDebugging, true);
872 }
873
874
875
876
877 if (did_kernel_compilation) {
882 AddReasonForCancelling(
new Aborted(
Z,
error));
883 }
884 }
885
886 if (!FLAG_reload_force_rollback && !HasReasonsForCancelling()) {
888 isolate_group_->program_reload_context()->ReloadPhase4CommitPrepare();
889 bool discard_class_tables = true;
890 if (HasInstanceMorphers()) {
891
892
893 ObjectLocator locator(this);
894 {
896 HeapIterationScope iteration(thread);
897 iteration.IterateObjects(&locator);
898 }
899
900
901 if (FLAG_gc_during_reload) {
902
903
904 heap->CollectAllGarbage(GCReason::kDebugging, true);
905 }
906 const intptr_t
count = locator.count();
909
910
911
912
913
914
915
916 ForceGrowthScope force_growth(thread);
917
918
920
921 MorphInstancesPhase1Allocate(&locator,
IG->become());
922 {
923
924
925
926
927
929
930
931
932 IG->DropOriginalClassTable();
933 }
934 MorphInstancesPhase2Become(
IG->become());
935
936 discard_class_tables = false;
937 }
938
939 if (FLAG_gc_during_reload) {
940
941
942 heap->CollectAllGarbage(GCReason::kDebugging, true);
943 }
944 }
945 if (FLAG_identity_reload) {
946 if (!discard_class_tables) {
947 TIR_Print(
"Identity reload failed! Some instances were morphed\n");
948 }
949 if (
IG->heap_walk_class_table()->NumCids() !=
950 IG->class_table()->NumCids()) {
951 TIR_Print(
"Identity reload failed! B#C=%" Pd " A#C=%" Pd "\n",
952 IG->heap_walk_class_table()->NumCids(),
953 IG->class_table()->NumCids());
954 }
955 if (
IG->heap_walk_class_table()->NumTopLevelCids() !=
956 IG->class_table()->NumTopLevelCids()) {
957 TIR_Print(
"Identity reload failed! B#TLC=%" Pd " A#TLC=%" Pd "\n",
958 IG->heap_walk_class_table()->NumTopLevelCids(),
959 IG->class_table()->NumTopLevelCids());
960 }
961 }
962 if (discard_class_tables) {
963 IG->DropOriginalClassTable();
964 }
965 isolate_group_->program_reload_context()->ReloadPhase4CommitFinish();
967 isolate_group_->set_last_reload_timestamp(reload_timestamp_);
968 } else {
970 isolate_group_->program_reload_context()->ReloadPhase4Rollback();
971 }
972
973
974
975
976 {
977 SafepointWriteRwLocker ml(thread,
IG->program_lock());
978 IG->program_reload_context()->RebuildDirectSubclasses();
979 }
980 const intptr_t final_library_count =
981 GrowableObjectArray::Handle(
Z,
IG->object_store()->libraries())
982 .Length();
983 CommonFinalizeTail(final_library_count);
984 }
985
986
987 if (old_concurrent_mark_flag) {
988 heap->old_space()->set_enable_concurrent_mark(true);
989 }
990
991 bool success;
992 if (!
result.IsError() || HasReasonsForCancelling()) {
993 ReportSuccess();
994 success = true;
995 } else {
996 ReportReasonsForCancelling();
997 success = false;
998 }
999
1000 Array& null_array = Array::Handle(
Z);
1001
1002 IG->object_store()->set_uri_to_resolved_uri_map(null_array);
1003 IG->object_store()->set_resolved_uri_to_uri_map(null_array);
1004
1005
1006
1007 if (
result.IsUnwindError()) {
1009 ForEachIsolate([&](Isolate* isolate) {
1010 Isolate::KillIfExists(isolate,
error.is_user_initiated()
1011 ? Isolate::kKillMsg
1012 : Isolate::kInternalKillMsg);
1013 });
1014 }
1015
1016 return success;
1017}
1018
1019
1021 const ZoneGrowableArray<ZoneGrowableArray<intptr_t>*>* imported_by,
1022 intptr_t lib_index,
1023 BitVector* modified_libs) {
1024 ZoneGrowableArray<intptr_t>* dep_libs = (*imported_by)[lib_index];
1025 for (intptr_t i = 0; i < dep_libs->length(); i++) {
1026 intptr_t dep_lib_index = (*dep_libs)[i];
1027 if (!modified_libs->Contains(dep_lib_index)) {
1028 modified_libs->Add(dep_lib_index);
1030 }
1031 }
1032}
1033
1034
1035void IsolateGroupReloadContext::BuildModifiedLibrariesClosure(
1036 BitVector* modified_libs) {
1037 const GrowableObjectArray&
libs =
1038 GrowableObjectArray::Handle(
IG->object_store()->libraries());
1039 Library& lib = Library::Handle();
1040 intptr_t num_libs =
libs.Length();
1041
1042
1043 ZoneGrowableArray<ZoneGrowableArray<intptr_t>*>* imported_by = new (zone_)
1044 ZoneGrowableArray<ZoneGrowableArray<intptr_t>*>(zone_, num_libs);
1045 imported_by->SetLength(num_libs);
1046 for (intptr_t i = 0; i < num_libs; i++) {
1047 (*imported_by)[i] = new (zone_) ZoneGrowableArray<intptr_t>(zone_, 0);
1048 }
1049 Array& ports = Array::Handle();
1050 Namespace& ns = Namespace::Handle();
1051 Library&
target = Library::Handle();
1052 String& target_url = String::Handle();
1053
1054 for (intptr_t lib_idx = 0; lib_idx < num_libs; lib_idx++) {
1055 lib ^=
libs.At(lib_idx);
1056 ASSERT(lib_idx == lib.index());
1057 if (lib.is_dart_scheme()) {
1058
1059 continue;
1060 }
1061
1062
1063 ports = lib.imports();
1064 for (intptr_t import_idx = 0; import_idx < ports.Length(); import_idx++) {
1065 ns ^= ports.At(import_idx);
1066 if (!ns.IsNull()) {
1068 target_url =
target.url();
1069 (*imported_by)[
target.index()]->Add(lib.index());
1070 }
1071 }
1072
1073
1074 ports = lib.exports();
1075 for (intptr_t export_idx = 0; export_idx < ports.Length(); export_idx++) {
1076 ns ^= ports.At(export_idx);
1077 if (!ns.IsNull()) {
1079 (*imported_by)[
target.index()]->Add(lib.index());
1080 }
1081 }
1082
1083
1084 DictionaryIterator entries(lib);
1085 Object& entry = Object::Handle();
1086 LibraryPrefix&
prefix = LibraryPrefix::Handle();
1087 while (entries.HasNext()) {
1088 entry = entries.GetNext();
1089 if (entry.IsLibraryPrefix()) {
1091 ports =
prefix.imports();
1092 for (intptr_t import_idx = 0; import_idx < ports.Length();
1093 import_idx++) {
1094 ns ^= ports.At(import_idx);
1095 if (!ns.IsNull()) {
1097 (*imported_by)[
target.index()]->Add(lib.index());
1098 }
1099 }
1100 }
1101 }
1102 }
1103
1104 for (intptr_t lib_idx = 0; lib_idx < num_libs; lib_idx++) {
1105 lib ^=
libs.At(lib_idx);
1106 if (lib.is_dart_scheme() || modified_libs_transitive_->Contains(lib_idx)) {
1107
1108
1109
1110 continue;
1111 }
1112 if (modified_libs->Contains(lib_idx)) {
1113 modified_libs_transitive_->Add(lib_idx);
1115 }
1116 }
1117}
1118
1119void IsolateGroupReloadContext::GetRootLibUrl(const char* root_script_url) {
1120 const auto& old_root_lib =
1121 Library::Handle(
IG->object_store()->root_library());
1122 ASSERT(!old_root_lib.IsNull());
1123 const auto& old_root_lib_url = String::Handle(old_root_lib.url());
1124
1125
1126 if (root_script_url != nullptr) {
1127 root_lib_url_ = String::New(root_script_url);
1128 } else {
1129 root_lib_url_ = old_root_lib_url.ptr();
1130 }
1131
1132
1133 if (!old_root_lib_url.Equals(root_lib_url_)) {
1134 const char* old_root_library_url_c = old_root_lib_url.ToCString();
1135 const char* root_library_url_c = root_lib_url_.ToCString();
1136 const intptr_t common_suffix_length =
1138 root_url_prefix_ = String::SubString(
1139 root_lib_url_, 0, root_lib_url_.Length() - common_suffix_length + 1);
1140 old_root_url_prefix_ =
1141 String::SubString(old_root_lib_url, 0,
1142 old_root_lib_url.Length() - common_suffix_length + 1);
1143 }
1144}
1145
1146char* IsolateGroupReloadContext::CompileToKernel(bool force_reload,
1147 const char* packages_url,
1148 const uint8_t** kernel_buffer,
1149 intptr_t* kernel_buffer_size) {
1151 intptr_t modified_scripts_count = 0;
1152 FindModifiedSources(force_reload, &modified_scripts, &modified_scripts_count,
1153 packages_url);
1154
1156 {
1157 const char* root_lib_url = root_lib_url_.ToCString();
1158 TransitionVMToNative transition(Thread::Current());
1159 retval = KernelIsolate::CompileToKernel(
1160 root_lib_url, nullptr, 0, modified_scripts_count, modified_scripts,
1161 true,
1162 false,
1163 true,
1164 nullptr,
1165 nullptr,
1166 nullptr);
1167 }
1169 if (retval.
kernel !=
nullptr) {
1171 }
1172 return retval.
error;
1173 }
1174 *kernel_buffer = retval.
kernel;
1176 return nullptr;
1177}
1178
1179void ProgramReloadContext::ReloadPhase1AllocateStorageMapsAndCheckpoint() {
1180
1181 old_classes_set_storage_ =
1182 HashTables::New<UnorderedHashSet<ClassMapTraits> >(4);
1183 class_map_storage_ = HashTables::New<UnorderedHashMap<ClassMapTraits> >(4);
1184 removed_class_set_storage_ =
1185 HashTables::New<UnorderedHashSet<ClassMapTraits> >(4);
1186 old_libraries_set_storage_ =
1187 HashTables::New<UnorderedHashSet<LibraryMapTraits> >(4);
1188 library_map_storage_ =
1189 HashTables::New<UnorderedHashMap<LibraryMapTraits> >(4);
1190
1191
1192
1193
1194 {
1196 CheckpointLibraries();
1197 }
1198}
1199
1200ObjectPtr ProgramReloadContext::ReloadPhase2LoadKernel(
1201 kernel::Program* program,
1202 const String& root_lib_url) {
1203 Thread* thread = Thread::Current();
1204
1205 LongJumpScope jump;
1206 if (setjmp(*jump.Set()) == 0) {
1207 const Object& tmp = kernel::KernelLoader::LoadEntireProgram(program);
1208 if (tmp.IsError()) {
1209 return tmp.ptr();
1210 }
1211
1212
1213
1214
1215 auto& lib = Library::Handle(Library::RawCast(tmp.ptr()));
1216 if (lib.IsNull()) {
1217 lib = Library::LookupLibrary(thread, root_lib_url);
1218 }
1219 IG->object_store()->set_root_library(lib);
1220 return Object::null();
1221 } else {
1222 return thread->StealStickyError();
1223 }
1224}
1225
1226void ProgramReloadContext::ReloadPhase3FinalizeLoading() {
1227 BuildLibraryMapping();
1228 BuildRemovedClassesSet();
1229 ValidateReload();
1230}
1231
1232void ProgramReloadContext::ReloadPhase4CommitPrepare() {
1233 CommitBeforeInstanceMorphing();
1234}
1235
1236void ProgramReloadContext::ReloadPhase4CommitFinish() {
1237 CommitAfterInstanceMorphing();
1238 PostCommit();
1239}
1240
1241void ProgramReloadContext::ReloadPhase4Rollback() {
1242 IG->RestoreOriginalClassTable();
1243 RollbackLibraries();
1244}
1245
1246void ProgramReloadContext::RegisterClass(const Class& new_cls) {
1247 const Class& old_cls = Class::Handle(OldClassOrNull(new_cls));
1248 if (old_cls.IsNull()) {
1249 if (new_cls.IsTopLevel()) {
1250 IG->class_table()->RegisterTopLevel(new_cls);
1251 } else {
1252 IG->class_table()->Register(new_cls);
1253 }
1254
1255 if (FLAG_identity_reload) {
1256 TIR_Print(
"Could not find replacement class for %s\n",
1257 new_cls.ToCString());
1259 }
1260
1261
1262 AddClassMapping(new_cls, new_cls);
1263 return;
1264 }
1265 VTIR_Print(
"Registering class: %s\n", new_cls.ToCString());
1266 new_cls.set_id(old_cls.id());
1267 IG->class_table()->SetAt(old_cls.id(), new_cls.ptr());
1268 new_cls.CopyCanonicalConstants(old_cls);
1269 new_cls.CopyDeclarationType(old_cls);
1270 AddBecomeMapping(old_cls, new_cls);
1271 AddClassMapping(new_cls, old_cls);
1272}
1273
1274void IsolateGroupReloadContext::CommonFinalizeTail(
1275 intptr_t final_library_count) {
1277 ReportOnJSON(js_, final_library_count);
1278 reload_finalized_ = true;
1279}
1280
1281void IsolateGroupReloadContext::ReportOnJSON(JSONStream* stream,
1282 intptr_t final_library_count) {
1283 JSONObject jsobj(stream);
1284 jsobj.AddProperty("type", "ReloadReport");
1285 jsobj.AddProperty("success", reload_skipped_ || !HasReasonsForCancelling());
1286 {
1287 if (HasReasonsForCancelling()) {
1288
1289 JSONArray array(&jsobj, "notices");
1290 for (intptr_t i = 0; i < reasons_to_cancel_reload_.length(); i++) {
1291 ReasonForCancelling* reason = reasons_to_cancel_reload_.At(i);
1292 reason->AppendTo(&array);
1293 }
1294 return;
1295 }
1296
1297 JSONObject details(&jsobj, "details");
1298 details.AddProperty("finalLibraryCount", final_library_count);
1299 details.AddProperty("receivedLibraryCount", num_received_libs_);
1300 details.AddProperty("receivedLibrariesBytes", bytes_received_libs_);
1301 details.AddProperty("receivedClassesCount", num_received_classes_);
1302 details.AddProperty("receivedProceduresCount", num_received_procedures_);
1303 if (reload_skipped_) {
1304
1305 details.AddProperty("savedLibraryCount", final_library_count);
1306 details.AddProperty("loadedLibraryCount", static_cast<intptr_t>(0));
1307 } else {
1308
1309 const intptr_t loaded_library_count =
1310 final_library_count - num_saved_libs_;
1311 details.AddProperty("savedLibraryCount", num_saved_libs_);
1312 details.AddProperty("loadedLibraryCount", loaded_library_count);
1313 JSONArray array(&jsobj, "shapeChangeMappings");
1314 for (intptr_t i = 0; i < instance_morphers_.length(); i++) {
1315 instance_morphers_.At(i)->AppendTo(&array);
1316 }
1317 }
1318 }
1319}
1320
1321void ProgramReloadContext::EnsuredUnoptimizedCodeForStack() {
1323
1324 IG->ForEachIsolate([](Isolate* isolate) {
1325 auto thread = isolate->mutator_thread();
1326 if (thread == nullptr) {
1327 return;
1328 }
1329 StackFrameIterator it(ValidationPolicy::kDontValidateFrames, thread,
1330 StackFrameIterator::kAllowCrossThreadIteration);
1331
1332 Function& func = Function::Handle();
1333 while (it.HasNextFrame()) {
1334 StackFrame*
frame = it.NextFrame();
1335 if (
frame->IsDartFrame()) {
1336 func =
frame->LookupDartFunction();
1338
1339
1340 if (!func.ForceOptimize()) {
1341 func.EnsureHasCompiledUnoptimizedCode();
1342 }
1343 }
1344 }
1345 });
1346}
1347
1348void ProgramReloadContext::DeoptimizeDependentCode() {
1350 ClassTable* class_table =
IG->class_table();
1351
1352 const intptr_t bottom = Dart::vm_isolate_group()->class_table()->NumCids();
1353 const intptr_t top =
IG->class_table()->NumCids();
1354 Class& cls = Class::Handle();
1355 Array& fields = Array::Handle();
1356 Field& field = Field::Handle();
1357 Thread* thread = Thread::Current();
1358 SafepointWriteRwLocker ml(thread,
IG->program_lock());
1359 for (intptr_t cls_idx = bottom; cls_idx < top; cls_idx++) {
1360 if (!class_table->HasValidClassAt(cls_idx)) {
1361
1362 continue;
1363 }
1364
1365
1366 cls = class_table->At(cls_idx);
1368
1369 cls.DisableAllCHAOptimizedCode();
1370
1371
1372 fields = cls.fields();
1373 ASSERT(!fields.IsNull());
1374 for (intptr_t field_idx = 0; field_idx < fields.Length(); field_idx++) {
1375 field = Field::RawCast(fields.At(field_idx));
1377 field.DeoptimizeDependentCode();
1378 }
1379 }
1380
1382
1383
1384}
1385
1386void ProgramReloadContext::CheckpointClasses() {
1387 TIR_Print(
"---- CHECKPOINTING CLASSES\n");
1388
1389
1390
1391
1392
1393
1394
1395
1396
1398
1399 IG->CloneClassTableForReload();
1400
1401
1403
1404 ClassTable* class_table =
IG->class_table();
1405
1406
1407
1408
1409
1410 Class& cls = Class::Handle();
1411 UnorderedHashSet<ClassMapTraits> old_classes_set(old_classes_set_storage_);
1412 for (intptr_t i = 0; i < class_table->NumCids(); i++) {
1413 if (class_table->IsValidIndex(i) && class_table->HasValidClassAt(i)) {
1414 if (i != kFreeListElement && i != kForwardingCorpse) {
1415 cls = class_table->At(i);
1416 bool already_present = old_classes_set.Insert(cls);
1417 ASSERT(!already_present);
1418 }
1419 }
1420 }
1421 for (intptr_t i = 0; i < class_table->NumTopLevelCids(); i++) {
1422 const intptr_t
cid = ClassTable::CidFromTopLevelIndex(i);
1423 if (class_table->IsValidIndex(cid) && class_table->HasValidClassAt(cid)) {
1424 cls = class_table->At(cid);
1425 bool already_present = old_classes_set.Insert(cls);
1426 ASSERT(!already_present);
1427 }
1428 }
1429 old_classes_set_storage_ = old_classes_set.Release().ptr();
1431 class_table->NumCids() + class_table->NumTopLevelCids());
1432}
1433
1435 nullptr;
1436
1437bool IsolateGroupReloadContext::ScriptModifiedSince(const Script& script,
1438 int64_t since) {
1439 if (IsolateGroupReloadContext::file_modified_callback_ == nullptr) {
1440 return true;
1441 }
1442
1443 const String& url = String::Handle(
script.resolved_url());
1444 const char* url_chars = url.ToCString();
1445 return (*IsolateGroupReloadContext::file_modified_callback_)(url_chars,
1446 since);
1447}
1448
1450 const char* uri) {
1451 for (intptr_t i = 0; i < seen_uris.length(); i++) {
1452 const char* seen_uri = seen_uris.At(i);
1453 size_t seen_len = strlen(seen_uri);
1454 if (seen_len != strlen(uri)) {
1455 continue;
1456 } else if (strncmp(seen_uri, uri, seen_len) == 0) {
1457 return true;
1458 }
1459 }
1460 return false;
1461}
1462
1463void IsolateGroupReloadContext::FindModifiedSources(
1464 bool force_reload,
1467 const char* packages_url) {
1468 const int64_t last_reload = isolate_group_->last_reload_timestamp();
1469 GrowableArray<const char*> modified_sources_uris;
1471 GrowableObjectArray::Handle(
IG->object_store()->libraries());
1472 Library& lib = Library::Handle(
Z);
1474 Script&
script = Script::Handle(
Z);
1475 String& uri = String::Handle(
Z);
1476
1477 for (intptr_t lib_idx = 0; lib_idx <
libs.Length(); lib_idx++) {
1478 lib ^=
libs.At(lib_idx);
1479 if (lib.is_dart_scheme()) {
1480
1481 continue;
1482 }
1483 scripts = lib.LoadedScripts();
1484 for (intptr_t script_idx = 0; script_idx <
scripts.Length(); script_idx++) {
1487 const bool dart_scheme = uri.StartsWith(Symbols::DartScheme());
1488 if (dart_scheme) {
1489
1490
1491
1492 continue;
1493 }
1495
1496 continue;
1497 }
1498
1499 if (force_reload || ScriptModifiedSince(script, last_reload)) {
1500 modified_sources_uris.Add(uri.ToCString());
1501 }
1502 }
1503 }
1504
1505
1506
1507 if (packages_url != nullptr) {
1508 if (IsolateGroupReloadContext::file_modified_callback_ == nullptr ||
1509 (*IsolateGroupReloadContext::file_modified_callback_)(packages_url,
1510 last_reload)) {
1511 modified_sources_uris.Add(packages_url);
1512 }
1513 }
1514
1515 *
count = modified_sources_uris.length();
1517 return;
1518 }
1519
1521 for (intptr_t i = 0; i < *
count; ++i) {
1522 (*modified_sources)[i].
uri = modified_sources_uris[i];
1523 (*modified_sources)[i].source = nullptr;
1524 }
1525}
1526
1527void ProgramReloadContext::CheckpointLibraries() {
1529 TIR_Print(
"---- CHECKPOINTING LIBRARIES\n");
1530
1531 const Library& root_lib = Library::Handle(object_store()->root_library());
1532 saved_root_library_ = root_lib.ptr();
1533
1534
1535 const GrowableObjectArray&
libs =
1536 GrowableObjectArray::Handle(object_store()->libraries());
1537 saved_libraries_ =
libs.ptr();
1538
1539
1540
1541 const GrowableObjectArray& new_libs =
1542 GrowableObjectArray::Handle(GrowableObjectArray::New(Heap::kOld));
1543 Library& lib = Library::Handle();
1544 UnorderedHashSet<LibraryMapTraits> old_libraries_set(
1545 old_libraries_set_storage_);
1546
1547 group_reload_context_->saved_libs_transitive_updated_ =
new (
Z)
1548 BitVector(
Z, group_reload_context_->modified_libs_transitive_->length());
1549 for (intptr_t i = 0; i <
libs.Length(); i++) {
1551 if (group_reload_context_->modified_libs_->Contains(i)) {
1552
1553 lib.set_index(-1);
1554 } else {
1555
1556 lib.set_index(new_libs.Length());
1557 new_libs.Add(lib, Heap::kOld);
1558
1559 if (group_reload_context_->modified_libs_transitive_->Contains(i)) {
1560
1561 group_reload_context_->saved_libs_transitive_updated_->Add(lib.index());
1562 }
1563 }
1564
1565 bool already_present = old_libraries_set.Insert(lib);
1566 ASSERT(!already_present);
1567
1568 lib.EvaluatePragmas();
1569 }
1570 old_libraries_set_storage_ = old_libraries_set.Release().ptr();
1571
1572
1573 Library::RegisterLibraries(Thread::Current(), new_libs);
1574
1575 object_store()->set_root_library(Library::Handle());
1576}
1577
1578void ProgramReloadContext::RollbackLibraries() {
1579 TIR_Print(
"---- ROLLING BACK LIBRARY CHANGES\n");
1580 Thread* thread = Thread::Current();
1581 Library& lib = Library::Handle();
1582 const auto& saved_libs = GrowableObjectArray::Handle(
Z, saved_libraries_);
1583 if (!saved_libs.IsNull()) {
1584 for (intptr_t i = 0; i < saved_libs.Length(); i++) {
1585 lib = Library::RawCast(saved_libs.At(i));
1586
1587 lib.set_index(i);
1588 }
1589
1590
1591 Library::RegisterLibraries(thread, saved_libs);
1592 }
1593
1594 Library& saved_root_lib = Library::Handle(
Z, saved_root_library_);
1595 if (!saved_root_lib.IsNull()) {
1596 object_store()->set_root_library(saved_root_lib);
1597 }
1598
1599 saved_root_library_ = Library::null();
1600 saved_libraries_ = GrowableObjectArray::null();
1601}
1602
1603#ifdef DEBUG
1604void ProgramReloadContext::VerifyMaps() {
1606 Class& cls = Class::Handle();
1607 Class& new_cls = Class::Handle();
1608 Class& cls2 = Class::Handle();
1609
1610
1611
1612 UnorderedHashMap<ClassMapTraits> class_map(class_map_storage_);
1613 UnorderedHashMap<ClassMapTraits> reverse_class_map(
1614 HashTables::New<UnorderedHashMap<ClassMapTraits> >(
1615 class_map.NumOccupied()));
1616 {
1617 UnorderedHashMap<ClassMapTraits>::Iterator it(&class_map);
1618 while (it.MoveNext()) {
1619 const intptr_t entry = it.Current();
1620 new_cls = Class::RawCast(class_map.GetKey(entry));
1621 cls = Class::RawCast(class_map.GetPayload(entry, 0));
1622 cls2 ^= reverse_class_map.GetOrNull(new_cls);
1623 if (!cls2.IsNull()) {
1624 OS::PrintErr(
1625 "Classes '%s' and '%s' are distinct classes but both map "
1626 " to class '%s'\n",
1627 cls.ToCString(), cls2.ToCString(), new_cls.ToCString());
1629 }
1630 bool update = reverse_class_map.UpdateOrInsert(cls, new_cls);
1632 }
1633 }
1634 class_map.Release();
1635 reverse_class_map.Release();
1636}
1637#endif
1638
1639void ProgramReloadContext::CommitBeforeInstanceMorphing() {
1641
1642#ifdef DEBUG
1643 VerifyMaps();
1644#endif
1645
1646
1647
1648 {
1650 Library& lib = Library::Handle();
1651 Library& new_lib = Library::Handle();
1652
1653 UnorderedHashMap<LibraryMapTraits> lib_map(library_map_storage_);
1654
1655 {
1656
1657 UnorderedHashMap<LibraryMapTraits>::Iterator it(&lib_map);
1658
1659 while (it.MoveNext()) {
1660 const intptr_t entry = it.Current();
1662 new_lib = Library::RawCast(lib_map.GetKey(entry));
1663 lib = Library::RawCast(lib_map.GetPayload(entry, 0));
1664 new_lib.set_debuggable(lib.IsDebuggable());
1665
1666 new_lib.set_native_entry_resolver(lib.native_entry_resolver());
1667 new_lib.set_native_entry_symbol_resolver(
1668 lib.native_entry_symbol_resolver());
1669 new_lib.set_ffi_native_resolver(lib.ffi_native_resolver());
1670 new_lib.CopyPragmas(lib);
1671 }
1672 }
1673
1674
1675 lib_map.Release();
1676 }
1677
1678 {
1680
1681
1682
1683 Class& old_cls = Class::Handle();
1684 Class& new_cls = Class::Handle();
1685 UnorderedHashMap<ClassMapTraits> class_map(class_map_storage_);
1686
1687 {
1688 UnorderedHashMap<ClassMapTraits>::Iterator it(&class_map);
1689 while (it.MoveNext()) {
1690 const intptr_t entry = it.Current();
1691 new_cls = Class::RawCast(class_map.GetKey(entry));
1692 old_cls = Class::RawCast(class_map.GetPayload(entry, 0));
1693 if (new_cls.ptr() != old_cls.ptr()) {
1694 ASSERT(new_cls.is_enum_class() == old_cls.is_enum_class());
1695 new_cls.CopyStaticFieldValues(this, old_cls);
1696 old_cls.PatchFieldsAndFunctions();
1697 old_cls.MigrateImplicitStaticClosures(this, new_cls);
1698 }
1699 }
1700 }
1701
1702 class_map.Release();
1703
1704 {
1705 UnorderedHashSet<ClassMapTraits> removed_class_set(
1706 removed_class_set_storage_);
1707 UnorderedHashSet<ClassMapTraits>::Iterator it(&removed_class_set);
1708 while (it.MoveNext()) {
1709 const intptr_t entry = it.Current();
1710 old_cls ^= removed_class_set.GetKey(entry);
1711 old_cls.PatchFieldsAndFunctions();
1712 }
1713 removed_class_set.Release();
1714 }
1715 }
1716
1717 {
1719
1720 Library& lib = Library::Handle();
1721 const GrowableObjectArray&
libs =
1722 GrowableObjectArray::Handle(
IG->object_store()->libraries());
1723 for (intptr_t i = 0; i <
libs.Length(); i++) {
1724 lib = Library::RawCast(
libs.At(i));
1725 VTIR_Print(
"Lib '%s' at index %" Pd "\n", lib.ToCString(), i);
1726 lib.set_index(i);
1727 }
1728
1729
1730 library_infos_.SetLength(
libs.Length());
1731 for (intptr_t i = 0; i <
libs.Length(); i++) {
1732 lib = Library::RawCast(
libs.At(i));
1733
1734 library_infos_[i].dirty =
1735 i >= group_reload_context_->num_saved_libs_ ||
1736 group_reload_context_->saved_libs_transitive_updated_->Contains(
1737 lib.index());
1738 }
1739 }
1740}
1741
1742void ProgramReloadContext::CommitAfterInstanceMorphing() {
1743
1744
1745 {
1747 IG->RehashConstants(&become_);
1748 }
1749 {
1751 become_.Forward();
1752 }
1753
1754 if (FLAG_identity_reload) {
1755 const auto& saved_libs = GrowableObjectArray::Handle(saved_libraries_);
1756 const GrowableObjectArray&
libs =
1757 GrowableObjectArray::Handle(
IG->object_store()->libraries());
1758 if (saved_libs.Length() !=
libs.Length()) {
1759 TIR_Print(
"Identity reload failed! B#L=%" Pd " A#L=%" Pd "\n",
1760 saved_libs.Length(),
libs.Length());
1761 }
1762 }
1763}
1764
1765bool ProgramReloadContext::IsDirty(const Library& lib) {
1766 const intptr_t index = lib.index();
1767 if (index ==
static_cast<classid_t>(-1)) {
1768
1769 return true;
1770 }
1771 ASSERT((index >= 0) && (index < library_infos_.length()));
1772 return library_infos_[index].dirty;
1773}
1774
1775void ProgramReloadContext::PostCommit() {
1777 saved_root_library_ = Library::null();
1778 saved_libraries_ = GrowableObjectArray::null();
1779 InvalidateWorld();
1780}
1781
1782void IsolateGroupReloadContext::AddReasonForCancelling(
1783 ReasonForCancelling* reason) {
1784 reasons_to_cancel_reload_.Add(reason);
1785}
1786
1787void IsolateGroupReloadContext::EnsureHasInstanceMorpherFor(
1788 classid_t cid,
1789 InstanceMorpher* instance_morpher) {
1790 for (intptr_t i = 0; i < instance_morphers_.length(); ++i) {
1791 if (instance_morphers_[i]->
cid() == cid) {
1792 return;
1793 }
1794 }
1795 instance_morphers_.Add(instance_morpher);
1796 instance_morpher_by_cid_.Insert(instance_morpher);
1797 ASSERT(instance_morphers_[instance_morphers_.length() - 1]->cid() == cid);
1798}
1799
1800void IsolateGroupReloadContext::ReportReasonsForCancelling() {
1801 ASSERT(FLAG_reload_force_rollback || HasReasonsForCancelling());
1802 for (int i = 0; i < reasons_to_cancel_reload_.length(); i++) {
1803 reasons_to_cancel_reload_.At(i)->Report(this);
1804 }
1805}
1806
1807void IsolateGroupReloadContext::MorphInstancesPhase1Allocate(
1808 ObjectLocator* locator,
1809 Become* become) {
1810 ASSERT(HasInstanceMorphers());
1811
1812 if (FLAG_trace_reload) {
1813 LogBlock blocker;
1815 for (intptr_t i = 0; i < instance_morphers_.length(); i++) {
1816 instance_morphers_.At(i)->Dump();
1817 }
1818 }
1819
1820 const intptr_t
count = locator->count();
1822 (
count > 1) ?
"s" :
"");
1823
1824 for (intptr_t i = 0; i < instance_morphers_.length(); i++) {
1825 instance_morphers_.At(i)->CreateMorphedCopies(become);
1826 }
1827}
1828
1829void IsolateGroupReloadContext::MorphInstancesPhase2Become(Become* become) {
1830 ASSERT(HasInstanceMorphers());
1831
1832 become->Forward();
1833
1834
1835}
1836
1837void IsolateGroupReloadContext::ForEachIsolate(
1838 std::function<
void(Isolate*)>
callback) {
1839 isolate_group_->ForEachIsolate(
callback);
1840}
1841
1842void ProgramReloadContext::ValidateReload() {
1844
1846
1847
1848 {
1849 ASSERT(library_map_storage_ != Array::null());
1850 UnorderedHashMap<LibraryMapTraits>
map(library_map_storage_);
1851 UnorderedHashMap<LibraryMapTraits>::Iterator it(&map);
1852 Library& lib = Library::Handle();
1853 Library& new_lib = Library::Handle();
1854 while (it.MoveNext()) {
1855 const intptr_t entry = it.Current();
1856 new_lib = Library::RawCast(
map.GetKey(entry));
1857 lib = Library::RawCast(
map.GetPayload(entry, 0));
1858 if (new_lib.ptr() != lib.ptr()) {
1859 lib.CheckReload(new_lib, this);
1860 }
1861 }
1863 }
1864
1865
1866 {
1867 ASSERT(class_map_storage_ != Array::null());
1868 UnorderedHashMap<ClassMapTraits>
map(class_map_storage_);
1869 UnorderedHashMap<ClassMapTraits>::Iterator it(&map);
1870 Class& cls = Class::Handle();
1871 Class& new_cls = Class::Handle();
1872 while (it.MoveNext()) {
1873 const intptr_t entry = it.Current();
1874 new_cls = Class::RawCast(
map.GetKey(entry));
1875 cls = Class::RawCast(
map.GetPayload(entry, 0));
1876 if (new_cls.ptr() != cls.ptr()) {
1877 cls.CheckReload(new_cls, this);
1878 }
1879 }
1881 }
1882}
1883
1884void IsolateGroupReloadContext::VisitObjectPointers(
1885 ObjectPointerVisitor* visitor) {
1886 visitor->VisitPointers(from(), to());
1887}
1888
1889void ProgramReloadContext::VisitObjectPointers(ObjectPointerVisitor* visitor) {
1890 visitor->VisitPointers(from(), to());
1891}
1892
1893ObjectStore* ProgramReloadContext::object_store() {
1894 return IG->object_store();
1895}
1896
1897void ProgramReloadContext::ResetUnoptimizedICsOnStack() {
1898 Thread* thread = Thread::Current();
1899 StackZone stack_zone(thread);
1900 Zone* zone = stack_zone.GetZone();
1901 Code&
code = Code::Handle(zone);
1902 Function& function = Function::Handle(zone);
1903 CallSiteResetter resetter(zone);
1904
1905 IG->ForEachIsolate([&](Isolate* isolate) {
1906 if (isolate->mutator_thread() == nullptr) {
1907 return;
1908 }
1909 DartFrameIterator iterator(isolate->mutator_thread(),
1910 StackFrameIterator::kAllowCrossThreadIteration);
1911 StackFrame*
frame = iterator.NextFrame();
1912 while (
frame !=
nullptr) {
1914 if (
code.is_optimized() && !
code.is_force_optimized()) {
1915
1916
1917
1918 function =
code.function();
1919 code =
function.unoptimized_code();
1921 resetter.ResetSwitchableCalls(code);
1922 resetter.ResetCaches(code);
1923 } else {
1924 resetter.ResetSwitchableCalls(code);
1925 resetter.ResetCaches(code);
1926 }
1927 frame = iterator.NextFrame();
1928 }
1929 });
1930}
1931
1932void ProgramReloadContext::ResetMegamorphicCaches() {
1933 object_store()->set_megamorphic_cache_table(GrowableObjectArray::Handle());
1934
1935
1936
1937
1938}
1939
1940class InvalidationCollector : public ObjectVisitor {
1941 public:
1942 InvalidationCollector(Zone* zone,
1943 GrowableArray<const Function*>* functions,
1944 GrowableArray<const KernelProgramInfo*>* kernel_infos,
1945 GrowableArray<const Field*>* fields,
1946 GrowableArray<const SuspendState*>* suspend_states,
1947 GrowableArray<const Instance*>* instances)
1948 : zone_(zone),
1949 functions_(functions),
1950 kernel_infos_(kernel_infos),
1951 fields_(fields),
1952 suspend_states_(suspend_states),
1953 instances_(instances) {}
1954 virtual ~InvalidationCollector() {}
1955
1956 void VisitObject(ObjectPtr obj) override {
1957 intptr_t
cid = obj->GetClassId();
1958 if (cid == kFunctionCid) {
1959 const Function& func =
1960 Function::Handle(zone_, static_cast<FunctionPtr>(obj));
1961 functions_->Add(&func);
1962 } else if (cid == kKernelProgramInfoCid) {
1963 kernel_infos_->Add(&KernelProgramInfo::Handle(
1964 zone_, static_cast<KernelProgramInfoPtr>(obj)));
1965 } else if (cid == kFieldCid) {
1966 fields_->Add(&Field::Handle(zone_, static_cast<FieldPtr>(obj)));
1967 } else if (cid == kSuspendStateCid) {
1968 const auto& suspend_state =
1969 SuspendState::Handle(zone_, static_cast<SuspendStatePtr>(obj));
1970 if (suspend_state.pc() != 0) {
1971 suspend_states_->Add(&suspend_state);
1972 }
1973 } else if (cid > kNumPredefinedCids) {
1974 instances_->Add(&Instance::Handle(zone_, static_cast<InstancePtr>(obj)));
1975 }
1976 }
1977
1978 private:
1979 Zone* const zone_;
1980 GrowableArray<const Function*>* const functions_;
1981 GrowableArray<const KernelProgramInfo*>* const kernel_infos_;
1982 GrowableArray<const Field*>* const fields_;
1983 GrowableArray<const SuspendState*>* const suspend_states_;
1984 GrowableArray<const Instance*>* const instances_;
1985};
1986
1987void ProgramReloadContext::RunInvalidationVisitors() {
1988 TIR_Print(
"---- RUNNING INVALIDATION HEAP VISITORS\n");
1989 Thread* thread = Thread::Current();
1990 StackZone stack_zone(thread);
1991 Zone* zone = stack_zone.GetZone();
1992
1993 GrowableArray<const Function*> functions(4 * KB);
1994 GrowableArray<const KernelProgramInfo*> kernel_infos(KB);
1995 GrowableArray<const Field*> fields(4 * KB);
1996 GrowableArray<const SuspendState*> suspend_states(4 * KB);
1997 GrowableArray<const Instance*> instances(4 * KB);
1998
1999 {
2001 HeapIterationScope iteration(thread);
2002 InvalidationCollector visitor(zone, &functions, &kernel_infos, &fields,
2003 &suspend_states, &instances);
2004 iteration.IterateObjects(&visitor);
2005 }
2006
2007 InvalidateKernelInfos(zone, kernel_infos);
2008 InvalidateSuspendStates(zone, suspend_states);
2009 InvalidateFields(zone, fields, instances);
2010
2011
2012
2013 InvalidateFunctions(zone, functions);
2014}
2015
2016void ProgramReloadContext::InvalidateKernelInfos(
2017 Zone* zone,
2018 const GrowableArray<const KernelProgramInfo*>& kernel_infos) {
2021
2022 Array&
data = Array::Handle(zone);
2023 Object&
key = Object::Handle(zone);
2024 Smi&
value = Smi::Handle(zone);
2025 for (intptr_t i = 0; i < kernel_infos.length(); i++) {
2026 const KernelProgramInfo&
info = *kernel_infos[i];
2027
2028 {
2033 info.set_libraries_cache(
table.Release());
2034 }
2035
2036 {
2041 info.set_classes_cache(
table.Release());
2042 }
2043 }
2044}
2045
2046void ProgramReloadContext::InvalidateFunctions(
2047 Zone* zone,
2048 const GrowableArray<const Function*>& functions) {
2050 auto thread = Thread::Current();
2052
2053 CallSiteResetter resetter(zone);
2054
2055 Class& owning_class = Class::Handle(zone);
2056 Library& owning_lib = Library::Handle(zone);
2057 Code&
code = Code::Handle(zone);
2058 Field& field = Field::Handle(zone);
2059 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
2060 for (intptr_t i = 0; i < functions.length(); i++) {
2061 const Function& func = *functions[i];
2062
2063
2064 if (func.ForceOptimize()) continue;
2065
2066
2067 func.SwitchToLazyCompiledUnoptimizedCode();
2068
2069
2070 code = func.CurrentCode();
2072
2073
2074
2075
2076 bool recompile_for_load_guard = false;
2077 if (func.IsImplicitGetterFunction() ||
2078 func.IsImplicitStaticGetterFunction()) {
2079 field = func.accessor_field();
2080 recompile_for_load_guard = field.needs_load_guard();
2081 }
2082
2083 owning_class = func.Owner();
2084 owning_lib = owning_class.library();
2085 const bool clear_unoptimized_code =
2086 IsDirty(owning_lib) || recompile_for_load_guard;
2087 const bool stub_code =
code.IsStubCode();
2088
2089
2090
2091 resetter.ZeroEdgeCounters(func);
2092
2093 if (stub_code) {
2094
2095 } else if (clear_unoptimized_code) {
2096 VTIR_Print(
"Marking %s for recompilation, clearing code\n",
2097 func.ToCString());
2098
2099 func.ClearICDataArray();
2100 func.ClearCode();
2101 func.SetWasCompiled(false);
2102 } else {
2103
2104
2105 resetter.ResetSwitchableCalls(code);
2106 resetter.ResetCaches(code);
2107 }
2108
2109
2110 func.set_usage_counter(0);
2111 func.set_deoptimization_counter(0);
2112 func.set_optimized_instruction_count(0);
2113 func.set_optimized_call_site_count(0);
2114 }
2115}
2116
2117void ProgramReloadContext::InvalidateSuspendStates(
2118 Zone* zone,
2119 const GrowableArray<const SuspendState*>& suspend_states) {
2121 auto thread = Thread::Current();
2123
2124 CallSiteResetter resetter(zone);
2125 Code&
code = Code::Handle(zone);
2126 Function& function = Function::Handle(zone);
2127
2128 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
2129 for (intptr_t i = 0, n = suspend_states.length(); i < n; ++i) {
2130 const SuspendState& suspend_state = *suspend_states[i];
2131 ASSERT(suspend_state.pc() != 0);
2132 code = suspend_state.GetCodeObject();
2134 if (
code.is_optimized() && !
code.is_force_optimized()) {
2135 function =
code.function();
2136
2137
2138 function.SwitchToLazyCompiledUnoptimizedCode();
2139
2140
2141
2142 if (!
code.IsDisabled()) {
2143 code.DisableDartCode();
2144 }
2145
2146
2147
2148 code =
function.unoptimized_code();
2149 if (!
code.IsNull()) {
2150 resetter.ResetSwitchableCalls(code);
2151 resetter.ResetCaches(code);
2152 }
2153 } else {
2154 function =
code.function();
2155
2156
2157
2158
2159 function.EnsureHasCompiledUnoptimizedCode();
2160 resetter.ResetSwitchableCalls(code);
2161 resetter.ResetCaches(code);
2162 }
2163 }
2164}
2165
2166
2167
2168
2169
2170class FieldInvalidator {
2171 public:
2172 explicit FieldInvalidator(Zone* zone)
2173 : zone_(zone),
2174 cls_(Class::Handle(zone)),
2175 cls_fields_(Array::Handle(zone)),
2176 entry_(Object::Handle(zone)),
2177 value_(Object::Handle(zone)),
2178 instance_(Instance::Handle(zone)),
2179 type_(AbstractType::Handle(zone)),
2180 cache_(SubtypeTestCache::Handle(zone)),
2181 result_(Bool::Handle(zone)),
2182 closure_function_(Function::Handle(zone)),
2183 instantiator_type_arguments_(TypeArguments::Handle(zone)),
2184 function_type_arguments_(TypeArguments::Handle(zone)),
2185 instance_cid_or_signature_(Object::Handle(zone)),
2186 instance_type_arguments_(TypeArguments::Handle(zone)),
2187 parent_function_type_arguments_(TypeArguments::Handle(zone)),
2188 delayed_function_type_arguments_(TypeArguments::Handle(zone)) {}
2189
2190 void CheckStatics(const GrowableArray<const Field*>& fields) {
2191 Thread* thread = Thread::Current();
2193 instantiator_type_arguments_ = TypeArguments::null();
2194 for (intptr_t i = 0; i < fields.length(); i++) {
2195 const Field& field = *fields[i];
2196 if (!field.is_static()) {
2197 continue;
2198 }
2199 if (field.needs_load_guard()) {
2200 continue;
2201 }
2202 const intptr_t field_id = field.field_id();
2203 thread->isolate_group()->ForEachIsolate([&](Isolate* isolate) {
2204 auto field_table = isolate->field_table();
2205
2206
2207
2208 if (field_table->IsReadyToUse()) {
2209 value_ = field_table->At(field_id);
2210 if ((value_.ptr() != Object::sentinel().ptr()) &&
2211 (value_.ptr() != Object::transition_sentinel().ptr())) {
2212 CheckValueType(value_, field);
2213 }
2214 }
2215 });
2216 }
2217 }
2218
2219 void CheckInstances(const GrowableArray<const Instance*>& instances) {
2220 Thread* thread = Thread::Current();
2222 for (intptr_t i = 0; i < instances.length(); i++) {
2223 CheckInstance(*instances[i]);
2224 }
2225 }
2226
2227 private:
2228 DART_FORCE_INLINE
2229 void CheckInstance(
const Instance&
instance) {
2231 if (cls_.NumTypeArguments() > 0) {
2232 instantiator_type_arguments_ =
instance.GetTypeArguments();
2233 } else {
2234 instantiator_type_arguments_ = TypeArguments::null();
2235 }
2236 cls_fields_ = cls_.OffsetToFieldMap();
2237 for (intptr_t i = 0; i < cls_fields_.Length(); i++) {
2238 entry_ = cls_fields_.At(i);
2239 if (!entry_.IsField()) {
2240 continue;
2241 }
2242 const Field& field = Field::Cast(entry_);
2243 CheckInstanceField(
instance, field);
2244 }
2245 }
2246
2247 DART_FORCE_INLINE
2248 void CheckInstanceField(
const Instance&
instance,
const Field& field) {
2249 if (field.needs_load_guard()) {
2250 return;
2251 }
2252 if (field.is_unboxed()) {
2253
2254 return;
2255 }
2257 if (value_.ptr() == Object::sentinel().ptr()) {
2258 if (field.is_late()) {
2259
2260 return;
2261 }
2262
2263 ASSERT(!FLAG_identity_reload);
2264 field.set_needs_load_guard(true);
2265 return;
2266 }
2267 CheckValueType(value_, field);
2268 }
2269
2270 DART_FORCE_INLINE
2271 bool CheckAssignabilityUsingCache(const Object& value,
2272 const AbstractType&
type) {
2274 if (
type.IsDynamicType()) {
2275 return true;
2276 }
2277
2278 if (
type.IsRecordType()) {
2279 return CheckAssignabilityForRecordType(value, RecordType::Cast(
type));
2280 }
2281
2282 cls_ =
value.clazz();
2283 const intptr_t
cid = cls_.id();
2284 if (cid == kClosureCid) {
2285 const auto&
closure = Closure::Cast(value);
2286 closure_function_ =
closure.function();
2287 instance_cid_or_signature_ = closure_function_.signature();
2288 instance_type_arguments_ =
closure.instantiator_type_arguments();
2289 parent_function_type_arguments_ =
closure.function_type_arguments();
2290 delayed_function_type_arguments_ =
closure.delayed_type_arguments();
2291 } else {
2292 instance_cid_or_signature_ = Smi::New(cid);
2293 if (cls_.NumTypeArguments() > 0) {
2294 instance_type_arguments_ = Instance::Cast(value).GetTypeArguments();
2295 } else {
2296 instance_type_arguments_ = TypeArguments::null();
2297 }
2298 parent_function_type_arguments_ = TypeArguments::null();
2299 delayed_function_type_arguments_ = TypeArguments::null();
2300 }
2301
2302 if (cache_.IsNull()) {
2303
2304 cache_ = SubtypeTestCache::New(SubtypeTestCache::kMaxInputs);
2305 }
2306 if (cache_.HasCheck(
2307 instance_cid_or_signature_,
type, instance_type_arguments_,
2308 instantiator_type_arguments_, function_type_arguments_,
2309 parent_function_type_arguments_, delayed_function_type_arguments_,
2310 nullptr, &result_)) {
2311 return result_.value();
2312 }
2313
2314 instance_ ^=
value.ptr();
2315 if (instance_.IsAssignableTo(
type, instantiator_type_arguments_,
2316 function_type_arguments_)) {
2317
2318
2319 if (cid != kRecordCid) {
2320 cache_.AddCheck(instance_cid_or_signature_,
type,
2321 instance_type_arguments_, instantiator_type_arguments_,
2322 function_type_arguments_,
2323 parent_function_type_arguments_,
2324 delayed_function_type_arguments_, Bool::True());
2325 }
2326 return true;
2327 }
2328
2329 return false;
2330 }
2331
2332 bool CheckAssignabilityForRecordType(const Object& value,
2333 const RecordType&
type) {
2334 if (!
value.IsRecord()) {
2335 return false;
2336 }
2337
2338 const Record& record = Record::Cast(value);
2339 if (record.shape() !=
type.shape()) {
2340 return false;
2341 }
2342
2343
2344 auto& field_value = Object::Handle(zone_);
2345 auto& field_type = AbstractType::Handle(zone_);
2346 const intptr_t num_fields = record.num_fields();
2347 for (intptr_t i = 0; i < num_fields; ++i) {
2348 field_value = record.FieldAt(i);
2349 field_type =
type.FieldTypeAt(i);
2350 if (!CheckAssignabilityUsingCache(field_value, field_type)) {
2351 return false;
2352 }
2353 }
2354 return true;
2355 }
2356
2357 DART_FORCE_INLINE
2358 void CheckValueType(const Object& value, const Field& field) {
2360 type_ = field.type();
2361 if (!CheckAssignabilityUsingCache(value, type_)) {
2362
2363
2364
2365#ifdef DEBUG
2366 if (FLAG_identity_reload && !
value.IsNull()) {
2368 "Type check failed during identity hot reload.\n"
2369 " field: %s\n"
2370 " type: %s\n"
2371 " value: %s\n",
2372 field.ToCString(), type_.ToCString(),
value.ToCString());
2373 }
2374#endif
2375 field.set_needs_load_guard(true);
2376 }
2377 }
2378
2379 Zone* zone_;
2380 Class& cls_;
2381 Array& cls_fields_;
2382 Object& entry_;
2383 Object& value_;
2384 Instance& instance_;
2385 AbstractType& type_;
2386 SubtypeTestCache& cache_;
2387 Bool& result_;
2388 Function& closure_function_;
2389 TypeArguments& instantiator_type_arguments_;
2390 TypeArguments& function_type_arguments_;
2391 Object& instance_cid_or_signature_;
2392 TypeArguments& instance_type_arguments_;
2393 TypeArguments& parent_function_type_arguments_;
2394 TypeArguments& delayed_function_type_arguments_;
2395};
2396
2397void ProgramReloadContext::InvalidateFields(
2398 Zone* zone,
2399 const GrowableArray<const Field*>& fields,
2400 const GrowableArray<const Instance*>& instances) {
2402 SafepointMutexLocker ml(
IG->subtype_test_cache_mutex());
2403 FieldInvalidator invalidator(zone);
2404 invalidator.CheckStatics(fields);
2405 invalidator.CheckInstances(instances);
2406}
2407
2408void ProgramReloadContext::InvalidateWorld() {
2411 ResetMegamorphicCaches();
2412 if (FLAG_trace_deoptimization) {
2414 }
2416 ResetUnoptimizedICsOnStack();
2417 RunInvalidationVisitors();
2418}
2419
2420ClassPtr ProgramReloadContext::OldClassOrNull(const Class& replacement_or_new) {
2421 UnorderedHashSet<ClassMapTraits> old_classes_set(old_classes_set_storage_);
2422 Class& cls = Class::Handle();
2423 cls ^= old_classes_set.GetOrNull(replacement_or_new);
2424 old_classes_set_storage_ = old_classes_set.Release().ptr();
2425 return cls.ptr();
2426}
2427
2428StringPtr ProgramReloadContext::FindLibraryPrivateKey(
2429 const Library& replacement_or_new) {
2430 const Library& old = Library::Handle(OldLibraryOrNull(replacement_or_new));
2431 if (old.IsNull()) {
2432 return String::null();
2433 }
2434#if defined(DEBUG)
2435 VTIR_Print(
"`%s` is getting `%s`'s private key.\n",
2436 String::Handle(replacement_or_new.url()).ToCString(),
2437 String::Handle(old.url()).ToCString());
2438#endif
2439 return old.private_key();
2440}
2441
2442LibraryPtr ProgramReloadContext::OldLibraryOrNull(
2443 const Library& replacement_or_new) {
2444 UnorderedHashSet<LibraryMapTraits> old_libraries_set(
2445 old_libraries_set_storage_);
2446 Library& lib = Library::Handle();
2447 lib ^= old_libraries_set.GetOrNull(replacement_or_new);
2448 old_libraries_set.Release();
2449
2450 if (lib.IsNull() &&
2451 (group_reload_context_->root_url_prefix_ != String::null()) &&
2452 (group_reload_context_->old_root_url_prefix_ != String::null())) {
2453 return OldLibraryOrNullBaseMoved(replacement_or_new);
2454 }
2455 return lib.ptr();
2456}
2457
2458
2459
2460LibraryPtr ProgramReloadContext::OldLibraryOrNullBaseMoved(
2461 const Library& replacement_or_new) {
2462 const String& url_prefix =
2463 String::Handle(group_reload_context_->root_url_prefix_);
2464 const String& old_url_prefix =
2465 String::Handle(group_reload_context_->old_root_url_prefix_);
2466 const intptr_t prefix_length = url_prefix.Length();
2467 const intptr_t old_prefix_length = old_url_prefix.Length();
2468 const String& new_url = String::Handle(replacement_or_new.url());
2470 String::Handle(String::SubString(new_url, prefix_length));
2471 if (!new_url.StartsWith(url_prefix)) {
2472 return Library::null();
2473 }
2474 Library& old = Library::Handle();
2475 String& old_url = String::Handle();
2476 String& old_suffix = String::Handle();
2477 const auto& saved_libs = GrowableObjectArray::Handle(saved_libraries_);
2478 ASSERT(!saved_libs.IsNull());
2479 for (intptr_t i = 0; i < saved_libs.Length(); i++) {
2480 old = Library::RawCast(saved_libs.At(i));
2481 old_url = old.url();
2482 if (!old_url.StartsWith(old_url_prefix)) {
2483 continue;
2484 }
2485 old_suffix = String::SubString(old_url, old_prefix_length);
2486 if (old_suffix.IsNull()) {
2487 continue;
2488 }
2489 if (old_suffix.Equals(suffix)) {
2490 TIR_Print(
"`%s` is moving to `%s`\n", old_url.ToCString(),
2491 new_url.ToCString());
2492 return old.ptr();
2493 }
2494 }
2495 return Library::null();
2496}
2497
2498void ProgramReloadContext::BuildLibraryMapping() {
2499 const GrowableObjectArray&
libs =
2500 GrowableObjectArray::Handle(object_store()->libraries());
2501
2502 Library& replacement_or_new = Library::Handle();
2503 Library& old = Library::Handle();
2504 for (intptr_t i = group_reload_context_->num_saved_libs_; i <
libs.Length();
2505 i++) {
2506 replacement_or_new = Library::RawCast(
libs.At(i));
2507 old = OldLibraryOrNull(replacement_or_new);
2508 if (old.IsNull()) {
2509 if (FLAG_identity_reload) {
2510 TIR_Print(
"Could not find original library for %s\n",
2511 replacement_or_new.ToCString());
2513 }
2514
2515 AddLibraryMapping(replacement_or_new, replacement_or_new);
2516 } else {
2517 ASSERT(!replacement_or_new.is_dart_scheme());
2518
2519 AddLibraryMapping(replacement_or_new, old);
2520
2521 AddBecomeMapping(old, replacement_or_new);
2522 }
2523 }
2524}
2525
2526
2527
2528
2529
2530
2531
2532
2533
2534void ProgramReloadContext::BuildRemovedClassesSet() {
2535
2536 UnorderedHashMap<ClassMapTraits> class_map(class_map_storage_);
2537 UnorderedHashSet<ClassMapTraits> mapped_old_classes_set(
2538 HashTables::New<UnorderedHashSet<ClassMapTraits> >(
2539 class_map.NumOccupied()));
2540 {
2541 UnorderedHashMap<ClassMapTraits>::Iterator it(&class_map);
2542 Class& cls = Class::Handle();
2543 Class& new_cls = Class::Handle();
2544 while (it.MoveNext()) {
2545 const intptr_t entry = it.Current();
2546 new_cls = Class::RawCast(class_map.GetKey(entry));
2547 cls = Class::RawCast(class_map.GetPayload(entry, 0));
2548 mapped_old_classes_set.InsertOrGet(cls);
2549 }
2550 }
2551 class_map.Release();
2552
2553
2554 UnorderedHashMap<LibraryMapTraits> library_map(library_map_storage_);
2555 UnorderedHashMap<LibraryMapTraits>::Iterator it_library(&library_map);
2556 UnorderedHashSet<LibraryMapTraits> mapped_old_library_set(
2557 HashTables::New<UnorderedHashSet<LibraryMapTraits> >(
2558 library_map.NumOccupied()));
2559 {
2560 Library& old_library = Library::Handle();
2561 Library& new_library = Library::Handle();
2562 while (it_library.MoveNext()) {
2563 const intptr_t entry = it_library.Current();
2564 new_library ^= library_map.GetKey(entry);
2565 old_library ^= library_map.GetPayload(entry, 0);
2566 if (new_library.ptr() != old_library.ptr()) {
2567 mapped_old_library_set.InsertOrGet(old_library);
2568 }
2569 }
2570 }
2571
2572
2573
2574
2575 UnorderedHashSet<ClassMapTraits> old_classes_set(old_classes_set_storage_);
2576 UnorderedHashSet<ClassMapTraits>::Iterator it(&old_classes_set);
2577 UnorderedHashSet<ClassMapTraits> removed_class_set(
2578 removed_class_set_storage_);
2579 Class& old_cls = Class::Handle();
2580 Class& new_cls = Class::Handle();
2581 Library& old_library = Library::Handle();
2582 Library& mapped_old_library = Library::Handle();
2583 while (it.MoveNext()) {
2584 const intptr_t entry = it.Current();
2585 old_cls ^= Class::RawCast(old_classes_set.GetKey(entry));
2586 old_library = old_cls.library();
2587 if (old_library.IsNull()) {
2588 continue;
2589 }
2590 mapped_old_library ^= mapped_old_library_set.GetOrNull(old_library);
2591 if (!mapped_old_library.IsNull()) {
2592 new_cls ^= mapped_old_classes_set.GetOrNull(old_cls);
2593 if (new_cls.IsNull()) {
2594 removed_class_set.InsertOrGet(old_cls);
2595 }
2596 }
2597 }
2598 removed_class_set_storage_ = removed_class_set.Release().ptr();
2599
2600 old_classes_set.Release();
2601 mapped_old_classes_set.Release();
2602 mapped_old_library_set.Release();
2603 library_map.Release();
2604}
2605
2606void ProgramReloadContext::AddClassMapping(const Class& replacement_or_new,
2607 const Class& original) {
2608 UnorderedHashMap<ClassMapTraits>
map(class_map_storage_);
2609 bool update =
map.UpdateOrInsert(replacement_or_new, original);
2611
2612
2613 class_map_storage_ =
map.Release().ptr();
2614}
2615
2616void ProgramReloadContext::AddLibraryMapping(const Library& replacement_or_new,
2617 const Library& original) {
2618 UnorderedHashMap<LibraryMapTraits>
map(library_map_storage_);
2619 bool update =
map.UpdateOrInsert(replacement_or_new, original);
2621
2622
2623 library_map_storage_ =
map.Release().ptr();
2624}
2625
2626void ProgramReloadContext::AddStaticFieldMapping(const Field& old_field,
2627 const Field& new_field) {
2628 ASSERT(old_field.is_static());
2629 ASSERT(new_field.is_static());
2630 AddBecomeMapping(old_field, new_field);
2631}
2632
2633void ProgramReloadContext::AddBecomeMapping(const Object& old,
2634 const Object& neu) {
2635 become_.Add(old, neu);
2636}
2637
2638void ProgramReloadContext::RebuildDirectSubclasses() {
2639 ClassTable* class_table =
IG->class_table();
2640 intptr_t num_cids = class_table->NumCids();
2641
2642
2643 Class& cls = Class::Handle();
2644 const GrowableObjectArray& null_list = GrowableObjectArray::Handle();
2645 for (intptr_t i = 1; i < num_cids; i++) {
2646 if (class_table->HasValidClassAt(i)) {
2647 cls = class_table->At(i);
2648 if (!cls.is_declaration_loaded()) {
2649 continue;
2650 }
2651
2652
2653 if (cls.direct_subclasses() != GrowableObjectArray::null()) {
2654 cls.set_direct_subclasses(null_list);
2655 }
2656 if (cls.direct_implementors() != GrowableObjectArray::null()) {
2657 cls.set_direct_implementors(null_list);
2658 }
2659 }
2660 }
2661
2662
2663
2664 AbstractType& super_type = AbstractType::Handle();
2665 Class& super_cls = Class::Handle();
2666
2667 Array& interface_types = Array::Handle();
2668 AbstractType& interface_type = AbstractType::Handle();
2669 Class& interface_class = Class::Handle();
2670
2671 for (intptr_t i = 1; i < num_cids; i++) {
2672 if (class_table->HasValidClassAt(i)) {
2673 cls = class_table->At(i);
2674 if (!cls.is_declaration_loaded()) {
2675 continue;
2676 }
2677 super_type = cls.super_type();
2678 if (!super_type.IsNull() && !super_type.IsObjectType()) {
2679 super_cls = cls.SuperClass();
2680 ASSERT(!super_cls.IsNull());
2681 super_cls.AddDirectSubclass(cls);
2682 }
2683
2684 interface_types = cls.interfaces();
2685 if (!interface_types.IsNull()) {
2686 const intptr_t mixin_index = cls.is_transformed_mixin_application()
2687 ? interface_types.Length() - 1
2688 : -1;
2689 for (intptr_t j = 0; j < interface_types.Length(); ++j) {
2690 interface_type ^= interface_types.At(j);
2691 interface_class = interface_type.type_class();
2692 interface_class.AddDirectImplementor(
2693 cls, i == mixin_index);
2694 }
2695 }
2696 }
2697 }
2698}
2699
2700#endif
2701
2702}
static void info(const char *fmt,...) SK_PRINTF_LIKE(1
static uint32_t hash(const SkShaderBase::GradientInfo &v)
#define RELEASE_ASSERT(cond)
#define THR_Print(format,...)
@ Dart_KernelCompilationStatus_MsgFailed
@ Dart_KernelCompilationStatus_Ok
FlKeyEvent uint64_t FlKeyResponderAsyncCallback callback
const uint8_t uint32_t uint32_t GError ** error
#define HANDLESCOPE(thread)
#define TIMELINE_SCOPE(name)
#define VTIR_Print(format,...)
#define TIR_Print(format,...)
ZoneGrowableArray< FieldMapping > FieldMappingArray
void DeoptimizeFunctionsOnStack()
static bool ContainsScriptUri(const GrowableArray< const char * > &seen_uris, const char *uri)
static ObjectPtr RejectCompilation(Thread *thread)
uint32_t CombineHashes(uint32_t hash, uint32_t other_hash)
static void PropagateLibraryModified(const ZoneGrowableArray< ZoneGrowableArray< intptr_t > * > *imported_by, intptr_t lib_index, BitVector *modified_libs)
Copied in from https://dart-review.googlesource.com/c/sdk/+/77722.
ZoneGrowableArray< intptr_t > FieldOffsetArray
void DeoptimizeTypeTestingStubs()
UnorderedHashMap< SmiTraits > IntHashMap
uint32_t FinalizeHash(uint32_t hash, intptr_t hashbits=kBitsPerInt32)
static ObjectPtr AcceptCompilation(Thread *thread)
static bool HasNoTasks(Heap *heap)
static const char * BoxCidToCString(intptr_t box_cid)
static intptr_t CommonSuffixLength(const char *a, const char *b)
@ ApiError
The Dart error code for an API error.
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot data
std::function< void()> closure
SI auto map(std::index_sequence< I... >, Fn &&fn, const Args &... args) -> skvx::Vec< sizeof...(I), decltype(fn(args[0]...))>
const char * ToString(ax::mojom::Event event)
Dart_KernelCompilationStatus status