Flutter Engine
The Flutter Engine
Loading...
Searching...
No Matches
Classes | Namespaces | Macros | Functions | Variables
isolate_reload.cc File Reference
#include "vm/isolate_reload.h"
#include <memory>
#include "vm/bit_vector.h"
#include "vm/compiler/jit/compiler.h"
#include "vm/dart_api_impl.h"
#include "vm/hash.h"
#include "vm/hash_table.h"
#include "vm/heap/become.h"
#include "vm/heap/safepoint.h"
#include "vm/isolate.h"
#include "vm/kernel_isolate.h"
#include "vm/kernel_loader.h"
#include "vm/log.h"
#include "vm/longjump.h"
#include "vm/object.h"
#include "vm/object_store.h"
#include "vm/parser.h"
#include "vm/runtime_entry.h"
#include "vm/service_event.h"
#include "vm/stack_frame.h"
#include "vm/thread.h"
#include "vm/timeline.h"
#include "vm/type_testing_stubs.h"
#include "vm/visitor.h"

Go to the source code of this file.

Classes

class  dart::ObjectLocator
 
class  dart::ScriptUrlSetTraits
 
class  dart::ClassMapTraits
 
class  dart::LibraryMapTraits
 
class  dart::Aborted
 
class  dart::InvalidationCollector
 
class  dart::FieldInvalidator
 

Namespaces

namespace  dart
 

Macros

#define IG   (isolate_group())
 
#define Z   zone_
 
#define TIMELINE_SCOPE(name)
 

Functions

 dart::DEFINE_FLAG (int, reload_every, 0, "Reload every N stack overflow checks.")
 
 dart::DEFINE_FLAG (bool, trace_reload, false, "Trace isolate reloading")
 
 dart::DEFINE_FLAG (bool, trace_reload_verbose, false, "trace isolate reloading verbose")
 
 dart::DEFINE_FLAG (bool, identity_reload, false, "Enable checks for identity reload.")
 
 dart::DEFINE_FLAG (bool, reload_every_optimized, true, "Only from optimized code.")
 
 dart::DEFINE_FLAG (bool, reload_every_back_off, false, "Double the --reload-every value after each reload.")
 
 dart::DEFINE_FLAG (bool, reload_force_rollback, false, "Force all reloads to fail and rollback.")
 
 dart::DEFINE_FLAG (bool, check_reloaded, false, "Assert that an isolate has reloaded at least once.") DEFINE_FLAG(bool
 
 dart::DECLARE_FLAG (bool, trace_deoptimization)
 
static bool dart::HasNoTasks (Heap *heap)
 
static const char * dart::BoxCidToCString (intptr_t box_cid)
 
static intptr_t dart::CommonSuffixLength (const char *a, const char *b)
 
static ObjectPtr dart::AcceptCompilation (Thread *thread)
 
static ObjectPtr dart::RejectCompilation (Thread *thread)
 
static void dart::PropagateLibraryModified (const ZoneGrowableArray< ZoneGrowableArray< intptr_t > * > *imported_by, intptr_t lib_index, BitVector *modified_libs)
 Copied in from https://dart-review.googlesource.com/c/sdk/+/77722.
 
static bool dart::ContainsScriptUri (const GrowableArray< const char * > &seen_uris, const char *uri)
 

Variables

 dart::gc_during_reload
 
 dart::false
 
Cause GC during dart::reload
 

Macro Definition Documentation

◆ IG

#define IG   (isolate_group())

Definition at line 62 of file isolate_reload.cc.

◆ TIMELINE_SCOPE

#define TIMELINE_SCOPE (   name)
Value:
TimelineBeginEndScope tbes##name(Thread::Current(), \
Timeline::GetIsolateStream(), #name)
const char * name
Definition fuchsia.cc:50

Definition at line 65 of file isolate_reload.cc.

70 : public ObjectVisitor {
71 public:
72 explicit ObjectLocator(IsolateGroupReloadContext* context)
73 : context_(context), count_(0) {}
74
75 void VisitObject(ObjectPtr obj) override {
76 InstanceMorpher* morpher =
77 context_->instance_morpher_by_cid_.LookupValue(obj->GetClassId());
78 if (morpher != nullptr) {
79 morpher->AddObject(obj);
80 count_++;
81 }
82 }
83
84 // Return the number of located objects for morphing.
85 intptr_t count() { return count_; }
86
87 private:
88 IsolateGroupReloadContext* context_;
89 intptr_t count_;
90};
91
92static bool HasNoTasks(Heap* heap) {
93 MonitorLocker ml(heap->old_space()->tasks_lock());
94 return heap->old_space()->tasks() == 0;
95}
96
97InstanceMorpher* InstanceMorpher::CreateFromClassDescriptors(
98 Zone* zone,
99 ClassTable* class_table,
100 const Class& from,
101 const Class& to) {
102 auto mapping = new (zone) FieldMappingArray();
103 auto new_fields_offsets = new (zone) FieldOffsetArray();
104
105 if (from.NumTypeArguments() > 0) {
106 // Add copying of the optional type argument field.
107 intptr_t from_offset = from.host_type_arguments_field_offset();
108 ASSERT(from_offset != Class::kNoTypeArguments);
109 intptr_t to_offset = to.host_type_arguments_field_offset();
110 ASSERT(to_offset != Class::kNoTypeArguments);
111 mapping->Add({from_offset, kIllegalCid});
112 mapping->Add({to_offset, kIllegalCid});
113 }
114
115 // Add copying of the instance fields if matching by name.
116 // Note: currently the type of the fields are ignored.
117 const Array& from_fields = Array::Handle(
118 from.OffsetToFieldMap(IsolateGroup::Current()->heap_walk_class_table()));
119 const Array& to_fields = Array::Handle(to.OffsetToFieldMap());
120 Field& from_field = Field::Handle();
121 Field& to_field = Field::Handle();
122 String& from_name = String::Handle();
123 String& to_name = String::Handle();
124
125 auto ensure_boxed_and_guarded = [&](const Field& field) {
126 field.set_needs_load_guard(true);
127 if (field.is_unboxed()) {
128 to.MarkFieldBoxedDuringReload(class_table, field);
129 }
130 };
131
132 // Scan across all the fields in the new class definition.
133 for (intptr_t i = 0; i < to_fields.Length(); i++) {
134 if (to_fields.At(i) == Field::null()) {
135 continue; // Ignore non-fields.
136 }
137
138 // Grab the field's name.
139 to_field = Field::RawCast(to_fields.At(i));
140 ASSERT(to_field.is_instance());
141 to_name = to_field.name();
142
143 // Did this field not exist in the old class definition?
144 bool new_field = true;
145
146 // Find this field in the old class.
147 for (intptr_t j = 0; j < from_fields.Length(); j++) {
148 if (from_fields.At(j) == Field::null()) {
149 continue; // Ignore non-fields.
150 }
151 from_field = Field::RawCast(from_fields.At(j));
152 ASSERT(from_field.is_instance());
153 from_name = from_field.name();
154 if (from_name.Equals(to_name)) {
155 intptr_t from_box_cid = kIllegalCid;
156 intptr_t to_box_cid = kIllegalCid;
157
158 // Check if either of the fields are unboxed.
159 if ((from_field.is_unboxed() && from_field.type() != to_field.type()) ||
160 (from_field.is_unboxed() != to_field.is_unboxed())) {
161 // For simplicity we just migrate to boxed fields if such
162 // situation occurs.
163 ensure_boxed_and_guarded(to_field);
164 }
165
166 if (from_field.is_unboxed()) {
167 const auto field_cid = from_field.guarded_cid();
168 switch (field_cid) {
169 case kDoubleCid:
170 case kFloat32x4Cid:
171 case kFloat64x2Cid:
172 from_box_cid = field_cid;
173 break;
174 default:
175 from_box_cid = kIntegerCid;
176 break;
177 }
178 }
179
180 if (to_field.is_unboxed()) {
181 const auto field_cid = to_field.guarded_cid();
182 switch (field_cid) {
183 case kDoubleCid:
184 case kFloat32x4Cid:
185 case kFloat64x2Cid:
186 to_box_cid = field_cid;
187 break;
188 default:
189 to_box_cid = kIntegerCid;
190 break;
191 }
192 }
193
194 // Field can't become unboxed if it was boxed.
195 ASSERT(from_box_cid != kIllegalCid || to_box_cid == kIllegalCid);
196
197 // Success
198 mapping->Add({from_field.HostOffset(), from_box_cid});
199 mapping->Add({to_field.HostOffset(), to_box_cid});
200
201 // Field did exist in old class definition.
202 new_field = false;
203 break;
204 }
205 }
206
207 if (new_field) {
208 ensure_boxed_and_guarded(to_field);
209 new_fields_offsets->Add(to_field.HostOffset());
210 }
211 }
212
213 ASSERT(from.id() == to.id());
214 return new (zone)
215 InstanceMorpher(zone, to.id(), from, to, mapping, new_fields_offsets);
216}
217
218InstanceMorpher::InstanceMorpher(Zone* zone,
219 classid_t cid,
220 const Class& old_class,
221 const Class& new_class,
222 FieldMappingArray* mapping,
223 FieldOffsetArray* new_fields_offsets)
224 : zone_(zone),
225 cid_(cid),
226 old_class_(Class::Handle(zone, old_class.ptr())),
227 new_class_(Class::Handle(zone, new_class.ptr())),
228 mapping_(mapping),
229 new_fields_offsets_(new_fields_offsets),
230 before_(zone, 16) {}
231
232void InstanceMorpher::AddObject(ObjectPtr object) {
233 ASSERT(object->GetClassId() == cid_);
234 const Instance& instance = Instance::Cast(Object::Handle(Z, object));
235 before_.Add(&instance);
236}
237
238void InstanceMorpher::CreateMorphedCopies(Become* become) {
239 Instance& after = Instance::Handle(Z);
240 Object& value = Object::Handle(Z);
241 for (intptr_t i = 0; i < before_.length(); i++) {
242 const Instance& before = *before_.At(i);
243
244 // Code can reference constants / canonical objects either directly in the
245 // instruction stream (ia32) or via an object pool.
246 //
247 // We have the following invariants:
248 //
249 // a) Those canonical objects don't change state (i.e. are not mutable):
250 // our optimizer can e.g. execute loads of such constants at
251 // compile-time.
252 //
253 // => We ensure that const-classes with live constants cannot be
254 // reloaded to become non-const classes (see Class::CheckReload).
255 //
256 // b) Those canonical objects live in old space: e.g. on ia32 the
257 // scavenger does not make the RX pages writable and therefore cannot
258 // update pointers embedded in the instruction stream.
259 //
260 // In order to maintain these invariants we ensure to always morph canonical
261 // objects to old space.
262 const bool is_canonical = before.IsCanonical();
263 const Heap::Space space = is_canonical ? Heap::kOld : Heap::kNew;
264 after = Instance::NewAlreadyFinalized(new_class_, space);
265
266 // We preserve the canonical bit of the object, since this object is present
267 // in the class's constants.
268 if (is_canonical) {
269 after.SetCanonical();
270 }
271#if defined(HASH_IN_OBJECT_HEADER)
272 const uint32_t hash = Object::GetCachedHash(before.ptr());
273 Object::SetCachedHashIfNotSet(after.ptr(), hash);
274#endif
275
276 // Morph the context from [before] to [after] using mapping_.
277 for (intptr_t i = 0; i < mapping_->length(); i += 2) {
278 const auto& from = mapping_->At(i);
279 const auto& to = mapping_->At(i + 1);
280 ASSERT(from.offset > 0);
281 ASSERT(to.offset > 0);
282 if (from.box_cid == kIllegalCid) {
283 // Boxed to boxed field migration.
284 ASSERT(to.box_cid == kIllegalCid);
285 // No handle: raw_value might be a ForwardingCorpse for an object
286 // processed earlier in instance morphing
287 ObjectPtr raw_value = before.RawGetFieldAtOffset(from.offset);
288 after.RawSetFieldAtOffset(to.offset, raw_value);
289 } else if (to.box_cid == kIllegalCid) {
290 // Unboxed to boxed field migration.
291 switch (from.box_cid) {
292 case kDoubleCid: {
293 const auto unboxed_value =
294 before.RawGetUnboxedFieldAtOffset<double>(from.offset);
295 value = Double::New(unboxed_value);
296 break;
297 }
298 case kFloat32x4Cid: {
299 const auto unboxed_value =
300 before.RawGetUnboxedFieldAtOffset<simd128_value_t>(from.offset);
301 value = Float32x4::New(unboxed_value);
302 break;
303 }
304 case kFloat64x2Cid: {
305 const auto unboxed_value =
306 before.RawGetUnboxedFieldAtOffset<simd128_value_t>(from.offset);
307 value = Float64x2::New(unboxed_value);
308 break;
309 }
310 case kIntegerCid: {
311 const auto unboxed_value =
312 before.RawGetUnboxedFieldAtOffset<int64_t>(from.offset);
313 value = Integer::New(unboxed_value);
314 break;
315 }
316 }
317 if (is_canonical) {
318 value = Instance::Cast(value).Canonicalize(Thread::Current());
319 }
320 after.RawSetFieldAtOffset(to.offset, value);
321 } else {
322 // Unboxed to unboxed field migration.
323 ASSERT(to.box_cid == from.box_cid);
324 switch (from.box_cid) {
325 case kDoubleCid: {
326 const auto unboxed_value =
327 before.RawGetUnboxedFieldAtOffset<double>(from.offset);
328 after.RawSetUnboxedFieldAtOffset<double>(to.offset, unboxed_value);
329 break;
330 }
331 case kFloat32x4Cid:
332 case kFloat64x2Cid: {
333 const auto unboxed_value =
334 before.RawGetUnboxedFieldAtOffset<simd128_value_t>(from.offset);
335 after.RawSetUnboxedFieldAtOffset<simd128_value_t>(to.offset,
336 unboxed_value);
337 break;
338 }
339 case kIntegerCid: {
340 const auto unboxed_value =
341 before.RawGetUnboxedFieldAtOffset<int64_t>(from.offset);
342 after.RawSetUnboxedFieldAtOffset<int64_t>(to.offset, unboxed_value);
343 break;
344 }
345 }
346 }
347 }
348
349 for (intptr_t i = 0; i < new_fields_offsets_->length(); i++) {
350 const auto& field_offset = new_fields_offsets_->At(i);
351 after.RawSetFieldAtOffset(field_offset, Object::sentinel());
352 }
353
354 // Convert the old instance into a filler object. We will switch to the
355 // new class table before the next heap walk, so there must be no
356 // instances of any class with the old size.
357 Become::MakeDummyObject(before);
358
359 become->Add(before, after);
360 }
361}
362
363static const char* BoxCidToCString(intptr_t box_cid) {
364 switch (box_cid) {
365 case kDoubleCid:
366 return "double";
367 case kFloat32x4Cid:
368 return "float32x4";
369 case kFloat64x2Cid:
370 return "float64x2";
371 case kIntegerCid:
372 return "int64";
373 }
374 return "?";
375}
376
377void InstanceMorpher::Dump() const {
378 LogBlock blocker;
379 THR_Print("Morphing objects with cid: %d via this mapping: ", cid_);
380 for (int i = 0; i < mapping_->length(); i += 2) {
381 const auto& from = mapping_->At(i);
382 const auto& to = mapping_->At(i + 1);
383 THR_Print(" %" Pd "->%" Pd "", from.offset, to.offset);
384 THR_Print(" (%" Pd " -> %" Pd ")", from.box_cid, to.box_cid);
385 if (to.box_cid == kIllegalCid && from.box_cid != kIllegalCid) {
386 THR_Print("[box %s]", BoxCidToCString(from.box_cid));
387 } else if (to.box_cid != kIllegalCid) {
388 THR_Print("[%s]", BoxCidToCString(from.box_cid));
389 }
390 }
391 THR_Print("\n");
392}
393
394void InstanceMorpher::AppendTo(JSONArray* array) {
395 JSONObject jsobj(array);
396 jsobj.AddProperty("type", "ShapeChangeMapping");
397 jsobj.AddProperty64("class-id", cid_);
398 jsobj.AddProperty("instanceCount", before_.length());
399 JSONArray map(&jsobj, "fieldOffsetMappings");
400 for (int i = 0; i < mapping_->length(); i += 2) {
401 const auto& from = mapping_->At(i);
402 const auto& to = mapping_->At(i + 1);
403
404 JSONArray pair(&map);
405 pair.AddValue(from.offset);
406 pair.AddValue(to.offset);
407 if (to.box_cid == kIllegalCid && from.box_cid != kIllegalCid) {
408 pair.AddValueF("box %s", BoxCidToCString(from.box_cid));
409 } else if (to.box_cid != kIllegalCid) {
410 pair.AddValueF("%s", BoxCidToCString(from.box_cid));
411 }
412 }
413}
414
415void ReasonForCancelling::Report(IsolateGroupReloadContext* context) {
416 const Error& error = Error::Handle(ToError());
417 context->ReportError(error);
418}
419
420ErrorPtr ReasonForCancelling::ToError() {
421 // By default create the error returned from ToString.
422 const String& message = String::Handle(ToString());
423 return LanguageError::New(message);
424}
425
426StringPtr ReasonForCancelling::ToString() {
427 UNREACHABLE();
428 return nullptr;
429}
430
431void ReasonForCancelling::AppendTo(JSONArray* array) {
432 JSONObject jsobj(array);
433 jsobj.AddProperty("type", "ReasonForCancelling");
434 const String& message = String::Handle(ToString());
435 jsobj.AddProperty("message", message.ToCString());
436}
437
438ClassReasonForCancelling::ClassReasonForCancelling(Zone* zone,
439 const Class& from,
440 const Class& to)
441 : ReasonForCancelling(zone),
442 from_(Class::ZoneHandle(zone, from.ptr())),
443 to_(Class::ZoneHandle(zone, to.ptr())) {}
444
445void ClassReasonForCancelling::AppendTo(JSONArray* array) {
446 JSONObject jsobj(array);
447 jsobj.AddProperty("type", "ReasonForCancelling");
448 jsobj.AddProperty("class", from_);
449 const String& message = String::Handle(ToString());
450 jsobj.AddProperty("message", message.ToCString());
451}
452
453ErrorPtr IsolateGroupReloadContext::error() const {
454 ASSERT(!reasons_to_cancel_reload_.is_empty());
455 // Report the first error to the surroundings.
456 return reasons_to_cancel_reload_.At(0)->ToError();
457}
458
459class ScriptUrlSetTraits {
460 public:
461 static bool ReportStats() { return false; }
462 static const char* Name() { return "ScriptUrlSetTraits"; }
463
464 static bool IsMatch(const Object& a, const Object& b) {
465 if (!a.IsString() || !b.IsString()) {
466 return false;
467 }
468
469 return String::Cast(a).Equals(String::Cast(b));
470 }
471
472 static uword Hash(const Object& obj) { return String::Cast(obj).Hash(); }
473};
474
475class ClassMapTraits {
476 public:
477 static bool ReportStats() { return false; }
478 static const char* Name() { return "ClassMapTraits"; }
479
480 static bool IsMatch(const Object& a, const Object& b) {
481 if (!a.IsClass() || !b.IsClass()) {
482 return false;
483 }
484 return ProgramReloadContext::IsSameClass(Class::Cast(a), Class::Cast(b));
485 }
486
487 static uword Hash(const Object& obj) {
488 uword class_name_hash = String::HashRawSymbol(Class::Cast(obj).Name());
489 LibraryPtr raw_library = Class::Cast(obj).library();
490 if (raw_library == Library::null()) {
491 return class_name_hash;
492 }
493 return FinalizeHash(
494 CombineHashes(class_name_hash,
495 String::Hash(Library::Handle(raw_library).private_key())),
496 /* hashbits= */ 30);
497 }
498};
499
500class LibraryMapTraits {
501 public:
502 static bool ReportStats() { return false; }
503 static const char* Name() { return "LibraryMapTraits"; }
504
505 static bool IsMatch(const Object& a, const Object& b) {
506 if (!a.IsLibrary() || !b.IsLibrary()) {
507 return false;
508 }
509 return ProgramReloadContext::IsSameLibrary(Library::Cast(a),
510 Library::Cast(b));
511 }
512
513 static uword Hash(const Object& obj) { return Library::Cast(obj).UrlHash(); }
514};
515
516bool ProgramReloadContext::IsSameClass(const Class& a, const Class& b) {
517 // TODO(turnidge): We need to look at generic type arguments for
518 // synthetic mixin classes. Their names are not necessarily unique
519 // currently.
520 const String& a_name = String::Handle(a.Name());
521 const String& b_name = String::Handle(b.Name());
522
523 if (!a_name.Equals(b_name)) {
524 return false;
525 }
526
527 const Library& a_lib = Library::Handle(a.library());
528 const Library& b_lib = Library::Handle(b.library());
529
530 if (a_lib.IsNull() || b_lib.IsNull()) {
531 return a_lib.ptr() == b_lib.ptr();
532 }
533 return (a_lib.private_key() == b_lib.private_key());
534}
535
536bool ProgramReloadContext::IsSameLibrary(const Library& a_lib,
537 const Library& b_lib) {
538 const String& a_lib_url =
539 String::Handle(a_lib.IsNull() ? String::null() : a_lib.url());
540 const String& b_lib_url =
541 String::Handle(b_lib.IsNull() ? String::null() : b_lib.url());
542 return a_lib_url.Equals(b_lib_url);
543}
544
545IsolateGroupReloadContext::IsolateGroupReloadContext(
546 IsolateGroup* isolate_group,
547 ClassTable* class_table,
548 JSONStream* js)
549 : zone_(Thread::Current()->zone()),
550 isolate_group_(isolate_group),
551 class_table_(class_table),
552 start_time_micros_(OS::GetCurrentMonotonicMicros()),
553 reload_timestamp_(OS::GetCurrentTimeMillis()),
554 js_(js),
555 instance_morphers_(zone_, 0),
556 reasons_to_cancel_reload_(zone_, 0),
557 instance_morpher_by_cid_(zone_),
558 root_lib_url_(String::Handle(Z, String::null())),
559 root_url_prefix_(String::null()),
560 old_root_url_prefix_(String::null()) {}
561IsolateGroupReloadContext::~IsolateGroupReloadContext() {}
562
563ProgramReloadContext::ProgramReloadContext(
564 std::shared_ptr<IsolateGroupReloadContext> group_reload_context,
565 IsolateGroup* isolate_group)
566 : zone_(Thread::Current()->zone()),
567 group_reload_context_(group_reload_context),
568 isolate_group_(isolate_group),
569 old_classes_set_storage_(Array::null()),
570 class_map_storage_(Array::null()),
571 removed_class_set_storage_(Array::null()),
572 old_libraries_set_storage_(Array::null()),
573 library_map_storage_(Array::null()),
574 saved_root_library_(Library::null()),
575 saved_libraries_(GrowableObjectArray::null()) {
576 // NOTE: DO NOT ALLOCATE ANY RAW OBJECTS HERE. The ProgramReloadContext is not
577 // associated with the isolate yet and if a GC is triggered here the raw
578 // objects will not be properly accounted for.
579 ASSERT(zone_ != nullptr);
580}
581
582ProgramReloadContext::~ProgramReloadContext() {
583 ASSERT(zone_ == Thread::Current()->zone());
584 ASSERT(IG->class_table() == IG->heap_walk_class_table());
585}
586
587void IsolateGroupReloadContext::ReportError(const Error& error) {
588 IsolateGroup* isolate_group = IsolateGroup::Current();
589 if (IsolateGroup::IsSystemIsolateGroup(isolate_group)) {
590 return;
591 }
592 TIR_Print("ISO-RELOAD: Error: %s\n", error.ToErrorCString());
593 ServiceEvent service_event(isolate_group, ServiceEvent::kIsolateReload);
594 service_event.set_reload_error(&error);
595 Service::HandleEvent(&service_event);
596}
597
598void IsolateGroupReloadContext::ReportSuccess() {
599 IsolateGroup* isolate_group = IsolateGroup::Current();
600 if (IsolateGroup::IsSystemIsolateGroup(isolate_group)) {
601 return;
602 }
603 ServiceEvent service_event(isolate_group, ServiceEvent::kIsolateReload);
604 Service::HandleEvent(&service_event);
605}
606
607class Aborted : public ReasonForCancelling {
608 public:
609 Aborted(Zone* zone, const Error& error)
610 : ReasonForCancelling(zone),
611 error_(Error::ZoneHandle(zone, error.ptr())) {}
612
613 private:
614 const Error& error_;
615
616 ErrorPtr ToError() { return error_.ptr(); }
617 StringPtr ToString() {
618 return String::NewFormatted("%s", error_.ToErrorCString());
619 }
620};
621
622static intptr_t CommonSuffixLength(const char* a, const char* b) {
623 const intptr_t a_length = strlen(a);
624 const intptr_t b_length = strlen(b);
625 intptr_t a_cursor = a_length;
626 intptr_t b_cursor = b_length;
627
628 while ((a_cursor >= 0) && (b_cursor >= 0)) {
629 if (a[a_cursor] != b[b_cursor]) {
630 break;
631 }
632 a_cursor--;
633 b_cursor--;
634 }
635
636 ASSERT((a_length - a_cursor) == (b_length - b_cursor));
637 return (a_length - a_cursor);
638}
639
640static ObjectPtr AcceptCompilation(Thread* thread) {
641 TransitionVMToNative transition(thread);
642 Dart_KernelCompilationResult result = KernelIsolate::AcceptCompilation();
645 FATAL(
646 "An error occurred while accepting the most recent"
647 " compilation results: %s",
648 result.error);
649 }
650 TIR_Print(
651 "An error occurred while accepting the most recent"
652 " compilation results: %s",
653 result.error);
654 Zone* zone = thread->zone();
655 const auto& error_str = String::Handle(zone, String::New(result.error));
656 free(result.error);
657 return ApiError::New(error_str);
658 }
659 return Object::null();
660}
661
662static ObjectPtr RejectCompilation(Thread* thread) {
663 TransitionVMToNative transition(thread);
664 Dart_KernelCompilationResult result = KernelIsolate::RejectCompilation();
667 FATAL(
668 "An error occurred while rejecting the most recent"
669 " compilation results: %s",
670 result.error);
671 }
672 TIR_Print(
673 "An error occurred while rejecting the most recent"
674 " compilation results: %s",
675 result.error);
676 Zone* zone = thread->zone();
677 const auto& error_str = String::Handle(zone, String::New(result.error));
678 free(result.error);
679 return ApiError::New(error_str);
680 }
681 return Object::null();
682}
683
684// If [root_script_url] is null, attempt to load from [kernel_buffer].
685bool IsolateGroupReloadContext::Reload(bool force_reload,
686 const char* root_script_url,
687 const char* packages_url,
688 const uint8_t* kernel_buffer,
689 intptr_t kernel_buffer_size) {
690 TIMELINE_SCOPE(Reload);
691
692 Thread* thread = Thread::Current();
693 ASSERT(thread->OwnsReloadSafepoint());
694
695 Heap* heap = IG->heap();
696 num_old_libs_ =
697 GrowableObjectArray::Handle(Z, IG->object_store()->libraries()).Length();
698
699 // Grab root library before calling CheckpointBeforeReload.
700 GetRootLibUrl(root_script_url);
701
702 std::unique_ptr<kernel::Program> kernel_program;
703
704 // Reset stats.
705 num_received_libs_ = 0;
706 bytes_received_libs_ = 0;
707 num_received_classes_ = 0;
708 num_received_procedures_ = 0;
709
710 bool did_kernel_compilation = false;
711 bool skip_reload = false;
712 {
713 // Load the kernel program and figure out the modified libraries.
714 intptr_t* p_num_received_classes = nullptr;
715 intptr_t* p_num_received_procedures = nullptr;
716
717 // ReadKernelFromFile checks to see if the file at
718 // root_script_url is a valid .dill file. If that's the case, a Program*
719 // is returned. Otherwise, this is likely a source file that needs to be
720 // compiled, so ReadKernelFromFile returns nullptr.
721 kernel_program = kernel::Program::ReadFromFile(root_script_url);
722 if (kernel_program != nullptr) {
723 num_received_libs_ = kernel_program->library_count();
724 bytes_received_libs_ = kernel_program->binary().LengthInBytes();
725 p_num_received_classes = &num_received_classes_;
726 p_num_received_procedures = &num_received_procedures_;
727 } else {
728 if (kernel_buffer == nullptr || kernel_buffer_size == 0) {
729 char* error = CompileToKernel(force_reload, packages_url,
730 &kernel_buffer, &kernel_buffer_size);
731 did_kernel_compilation = true;
732 if (error != nullptr) {
733 TIR_Print("---- LOAD FAILED, ABORTING RELOAD\n");
734 const auto& error_str = String::Handle(Z, String::New(error));
735 free(error);
736 const ApiError& error = ApiError::Handle(Z, ApiError::New(error_str));
737 AddReasonForCancelling(new Aborted(Z, error));
738 ReportReasonsForCancelling();
739 CommonFinalizeTail(num_old_libs_);
740
741 RejectCompilation(thread);
742 return false;
743 }
744 }
745 const auto& typed_data = ExternalTypedData::Handle(
746 Z, ExternalTypedData::NewFinalizeWithFree(
747 const_cast<uint8_t*>(kernel_buffer), kernel_buffer_size));
748 kernel_program = kernel::Program::ReadFromTypedData(typed_data);
749 }
750
751 NoActiveIsolateScope no_active_isolate_scope;
752
753 IsolateGroupSource* source = IsolateGroup::Current()->source();
754 source->add_loaded_blob(Z,
755 ExternalTypedData::Cast(kernel_program->binary()));
756
757 modified_libs_ = new (Z) BitVector(Z, num_old_libs_);
758 kernel::KernelLoader::FindModifiedLibraries(
759 kernel_program.get(), IG, modified_libs_, force_reload, &skip_reload,
760 p_num_received_classes, p_num_received_procedures);
761 modified_libs_transitive_ = new (Z) BitVector(Z, num_old_libs_);
762 BuildModifiedLibrariesClosure(modified_libs_);
763
764 ASSERT(num_saved_libs_ == -1);
765 num_saved_libs_ = 0;
766 for (intptr_t i = 0; i < modified_libs_->length(); i++) {
767 if (!modified_libs_->Contains(i)) {
768 num_saved_libs_++;
769 }
770 }
771 }
772
773 NoActiveIsolateScope no_active_isolate_scope;
774
775 if (skip_reload) {
776 ASSERT(modified_libs_->IsEmpty());
777 reload_skipped_ = true;
778 ReportOnJSON(js_, num_old_libs_);
779
780 // If we use the CFE and performed a compilation, we need to notify that
781 // we have accepted the compilation to clear some state in the incremental
782 // compiler.
783 if (did_kernel_compilation) {
784 const auto& result = Object::Handle(Z, AcceptCompilation(thread));
785 if (result.IsError()) {
786 const auto& error = Error::Cast(result);
787 AddReasonForCancelling(new Aborted(Z, error));
788 ReportReasonsForCancelling();
789 CommonFinalizeTail(num_old_libs_);
790 return false;
791 }
792 }
793 TIR_Print("---- SKIPPING RELOAD (No libraries were modified)\n");
794 return false;
795 }
796
797 TIR_Print("---- STARTING RELOAD\n");
798
799 intptr_t number_of_isolates = 0;
800 isolate_group_->ForEachIsolate(
801 [&](Isolate* isolate) { number_of_isolates++; });
802
803 // Wait for any concurrent marking tasks to finish and turn off the
804 // concurrent marker during reload as we might be allocating new instances
805 // (constants) when loading the new kernel file and this could cause
806 // inconsistency between the saved class table and the new class table.
807 const bool old_concurrent_mark_flag =
808 heap->old_space()->enable_concurrent_mark();
809 if (old_concurrent_mark_flag) {
810 heap->WaitForMarkerTasks(thread);
811 heap->old_space()->set_enable_concurrent_mark(false);
812 }
813
814 // Ensure all functions on the stack have unoptimized code.
815 // Deoptimize all code that had optimizing decisions that are dependent on
816 // assumptions from field guards or CHA or deferred library prefixes.
817 // TODO(johnmccutchan): Deoptimizing dependent code here (before the reload)
818 // is paranoid. This likely can be moved to the commit phase.
819 IG->program_reload_context()->EnsuredUnoptimizedCodeForStack();
820 IG->program_reload_context()->DeoptimizeDependentCode();
821 IG->program_reload_context()->ReloadPhase1AllocateStorageMapsAndCheckpoint();
822
823 // Renumbering the libraries has invalidated this.
824 modified_libs_ = nullptr;
825 modified_libs_transitive_ = nullptr;
826
827 if (FLAG_gc_during_reload) {
828 // We force the GC to compact, which is more likely to discover untracked
829 // pointers (and other issues, like incorrect class table).
830 heap->CollectAllGarbage(GCReason::kDebugging, /*compact=*/true);
831 }
832
833 // Clone the class table.
834 {
835 TIMELINE_SCOPE(CheckpointClasses);
836 IG->program_reload_context()->CheckpointClasses();
837 }
838
839 if (FLAG_gc_during_reload) {
840 // We force the GC to compact, which is more likely to discover untracked
841 // pointers (and other issues, like incorrect class table).
842 heap->CollectAllGarbage(GCReason::kDebugging, /*compact=*/true);
843 }
844
845 // We synchronously load the hot-reload kernel diff (which includes changed
846 // libraries and any libraries transitively depending on them).
847 //
848 // If loading the hot-reload diff succeeded we'll finalize the loading, which
849 // will either commit or reject the reload request.
850 const auto& result =
851 Object::Handle(Z, IG->program_reload_context()->ReloadPhase2LoadKernel(
852 kernel_program.get(), root_lib_url_));
853
854 if (result.IsError()) {
855 TIR_Print("---- LOAD FAILED, ABORTING RELOAD\n");
856
857 const auto& error = Error::Cast(result);
858 AddReasonForCancelling(new Aborted(Z, error));
859
860 IG->program_reload_context()->ReloadPhase4Rollback();
861 CommonFinalizeTail(num_old_libs_);
862 } else {
863 ASSERT(!reload_skipped_ && !reload_finalized_);
864 TIR_Print("---- LOAD SUCCEEDED\n");
865
866 IG->program_reload_context()->ReloadPhase3FinalizeLoading();
867
868 if (FLAG_gc_during_reload) {
869 // We force the GC to compact, which is more likely to discover untracked
870 // pointers (and other issues, like incorrect class table).
871 heap->CollectAllGarbage(GCReason::kDebugging, /*compact=*/true);
872 }
873
874 // If we use the CFE and performed a compilation, we need to notify that
875 // we have accepted the compilation to clear some state in the incremental
876 // compiler.
877 if (did_kernel_compilation) {
878 TIMELINE_SCOPE(AcceptCompilation);
879 const auto& result = Object::Handle(Z, AcceptCompilation(thread));
880 if (result.IsError()) {
881 const auto& error = Error::Cast(result);
882 AddReasonForCancelling(new Aborted(Z, error));
883 }
884 }
885
886 if (!FLAG_reload_force_rollback && !HasReasonsForCancelling()) {
887 TIR_Print("---- COMMITTING RELOAD\n");
888 isolate_group_->program_reload_context()->ReloadPhase4CommitPrepare();
889 bool discard_class_tables = true;
890 if (HasInstanceMorphers()) {
891 // Find all objects that need to be morphed (reallocated to a new
892 // layout).
893 ObjectLocator locator(this);
894 {
895 TIMELINE_SCOPE(CollectInstances);
896 HeapIterationScope iteration(thread);
897 iteration.IterateObjects(&locator);
898 }
899
900 // We are still using the old class table at this point.
901 if (FLAG_gc_during_reload) {
902 // We force the GC to compact, which is more likely to discover
903 // untracked pointers (and other issues, like incorrect class table).
904 heap->CollectAllGarbage(GCReason::kDebugging, /*compact=*/true);
905 }
906 const intptr_t count = locator.count();
907 if (count > 0) {
908 TIMELINE_SCOPE(MorphInstances);
909
910 // While we are reallocating instances to their new layout, the heap
911 // will contain a mix of instances with the old and new layouts that
912 // have the same cid. This makes the heap unwalkable until the
913 // "become" operation below replaces all the instances of the old
914 // layout with forwarding corpses. Force heap growth to prevent layout
915 // confusion during this period.
916 ForceGrowthScope force_growth(thread);
917 // The HeapIterationScope above ensures no other GC tasks can be
918 // active.
919 ASSERT(HasNoTasks(heap));
920
921 MorphInstancesPhase1Allocate(&locator, IG->become());
922 {
923 // Apply the new class table before "become". Become will replace
924 // all the instances of the old layout with forwarding corpses, then
925 // perform a heap walk to fix references to the forwarding corpses.
926 // During this heap walk, it will encounter instances of the new
927 // layout, so it requires the new class table.
928 ASSERT(HasNoTasks(heap));
929
930 // We accepted the hot-reload and morphed instances. So now we can
931 // commit to the changed class table and deleted the saved one.
932 IG->DropOriginalClassTable();
933 }
934 MorphInstancesPhase2Become(IG->become());
935
936 discard_class_tables = false;
937 }
938 // We are using the new class table now.
939 if (FLAG_gc_during_reload) {
940 // We force the GC to compact, which is more likely to discover
941 // untracked pointers (and other issues, like incorrect class table).
942 heap->CollectAllGarbage(GCReason::kDebugging, /*compact=*/true);
943 }
944 }
945 if (FLAG_identity_reload) {
946 if (!discard_class_tables) {
947 TIR_Print("Identity reload failed! Some instances were morphed\n");
948 }
949 if (IG->heap_walk_class_table()->NumCids() !=
950 IG->class_table()->NumCids()) {
951 TIR_Print("Identity reload failed! B#C=%" Pd " A#C=%" Pd "\n",
952 IG->heap_walk_class_table()->NumCids(),
953 IG->class_table()->NumCids());
954 }
955 if (IG->heap_walk_class_table()->NumTopLevelCids() !=
956 IG->class_table()->NumTopLevelCids()) {
957 TIR_Print("Identity reload failed! B#TLC=%" Pd " A#TLC=%" Pd "\n",
958 IG->heap_walk_class_table()->NumTopLevelCids(),
959 IG->class_table()->NumTopLevelCids());
960 }
961 }
962 if (discard_class_tables) {
963 IG->DropOriginalClassTable();
964 }
965 isolate_group_->program_reload_context()->ReloadPhase4CommitFinish();
966 TIR_Print("---- DONE COMMIT\n");
967 isolate_group_->set_last_reload_timestamp(reload_timestamp_);
968 } else {
969 TIR_Print("---- ROLLING BACK");
970 isolate_group_->program_reload_context()->ReloadPhase4Rollback();
971 }
972
973 // ValidateReload mutates the direct subclass information and does
974 // not remove dead subclasses. Rebuild the direct subclass
975 // information from scratch.
976 {
977 SafepointWriteRwLocker ml(thread, IG->program_lock());
978 IG->program_reload_context()->RebuildDirectSubclasses();
979 }
980 const intptr_t final_library_count =
981 GrowableObjectArray::Handle(Z, IG->object_store()->libraries())
982 .Length();
983 CommonFinalizeTail(final_library_count);
984 }
985
986 // Reenable concurrent marking if it was initially on.
987 if (old_concurrent_mark_flag) {
988 heap->old_space()->set_enable_concurrent_mark(true);
989 }
990
991 bool success;
992 if (!result.IsError() || HasReasonsForCancelling()) {
993 ReportSuccess();
994 success = true;
995 } else {
996 ReportReasonsForCancelling();
997 success = false;
998 }
999
1000 Array& null_array = Array::Handle(Z);
1001 // Invalidate the URI mapping caches.
1002 IG->object_store()->set_uri_to_resolved_uri_map(null_array);
1003 IG->object_store()->set_resolved_uri_to_uri_map(null_array);
1004
1005 // Re-queue any shutdown requests so they can inform each isolate's own thread
1006 // to shut down.
1007 if (result.IsUnwindError()) {
1008 const auto& error = UnwindError::Cast(result);
1009 ForEachIsolate([&](Isolate* isolate) {
1010 Isolate::KillIfExists(isolate, error.is_user_initiated()
1011 ? Isolate::kKillMsg
1012 : Isolate::kInternalKillMsg);
1013 });
1014 }
1015
1016 return success;
1017}
1018
1019/// Copied in from https://dart-review.googlesource.com/c/sdk/+/77722.
1020static void PropagateLibraryModified(
1021 const ZoneGrowableArray<ZoneGrowableArray<intptr_t>*>* imported_by,
1022 intptr_t lib_index,
1023 BitVector* modified_libs) {
1024 ZoneGrowableArray<intptr_t>* dep_libs = (*imported_by)[lib_index];
1025 for (intptr_t i = 0; i < dep_libs->length(); i++) {
1026 intptr_t dep_lib_index = (*dep_libs)[i];
1027 if (!modified_libs->Contains(dep_lib_index)) {
1028 modified_libs->Add(dep_lib_index);
1029 PropagateLibraryModified(imported_by, dep_lib_index, modified_libs);
1030 }
1031 }
1032}
1033
1034/// Copied in from https://dart-review.googlesource.com/c/sdk/+/77722.
1035void IsolateGroupReloadContext::BuildModifiedLibrariesClosure(
1036 BitVector* modified_libs) {
1037 const GrowableObjectArray& libs =
1038 GrowableObjectArray::Handle(IG->object_store()->libraries());
1039 Library& lib = Library::Handle();
1040 intptr_t num_libs = libs.Length();
1041
1042 // Construct the imported-by graph.
1043 ZoneGrowableArray<ZoneGrowableArray<intptr_t>*>* imported_by = new (zone_)
1044 ZoneGrowableArray<ZoneGrowableArray<intptr_t>*>(zone_, num_libs);
1045 imported_by->SetLength(num_libs);
1046 for (intptr_t i = 0; i < num_libs; i++) {
1047 (*imported_by)[i] = new (zone_) ZoneGrowableArray<intptr_t>(zone_, 0);
1048 }
1049 Array& ports = Array::Handle();
1050 Namespace& ns = Namespace::Handle();
1051 Library& target = Library::Handle();
1052 String& target_url = String::Handle();
1053
1054 for (intptr_t lib_idx = 0; lib_idx < num_libs; lib_idx++) {
1055 lib ^= libs.At(lib_idx);
1056 ASSERT(lib_idx == lib.index());
1057 if (lib.is_dart_scheme()) {
1058 // We don't care about imports among dart scheme libraries.
1059 continue;
1060 }
1061
1062 // Add imports to the import-by graph.
1063 ports = lib.imports();
1064 for (intptr_t import_idx = 0; import_idx < ports.Length(); import_idx++) {
1065 ns ^= ports.At(import_idx);
1066 if (!ns.IsNull()) {
1067 target = ns.target();
1068 target_url = target.url();
1069 (*imported_by)[target.index()]->Add(lib.index());
1070 }
1071 }
1072
1073 // Add exports to the import-by graph.
1074 ports = lib.exports();
1075 for (intptr_t export_idx = 0; export_idx < ports.Length(); export_idx++) {
1076 ns ^= ports.At(export_idx);
1077 if (!ns.IsNull()) {
1078 target = ns.target();
1079 (*imported_by)[target.index()]->Add(lib.index());
1080 }
1081 }
1082
1083 // Add prefixed imports to the import-by graph.
1084 DictionaryIterator entries(lib);
1085 Object& entry = Object::Handle();
1086 LibraryPrefix& prefix = LibraryPrefix::Handle();
1087 while (entries.HasNext()) {
1088 entry = entries.GetNext();
1089 if (entry.IsLibraryPrefix()) {
1090 prefix ^= entry.ptr();
1091 ports = prefix.imports();
1092 for (intptr_t import_idx = 0; import_idx < ports.Length();
1093 import_idx++) {
1094 ns ^= ports.At(import_idx);
1095 if (!ns.IsNull()) {
1096 target = ns.target();
1097 (*imported_by)[target.index()]->Add(lib.index());
1098 }
1099 }
1100 }
1101 }
1102 }
1103
1104 for (intptr_t lib_idx = 0; lib_idx < num_libs; lib_idx++) {
1105 lib ^= libs.At(lib_idx);
1106 if (lib.is_dart_scheme() || modified_libs_transitive_->Contains(lib_idx)) {
1107 // We don't consider dart scheme libraries during reload. If
1108 // the modified libs set already contains this library, then we
1109 // have already visited it.
1110 continue;
1111 }
1112 if (modified_libs->Contains(lib_idx)) {
1113 modified_libs_transitive_->Add(lib_idx);
1114 PropagateLibraryModified(imported_by, lib_idx, modified_libs_transitive_);
1115 }
1116 }
1117}
1118
1119void IsolateGroupReloadContext::GetRootLibUrl(const char* root_script_url) {
1120 const auto& old_root_lib =
1121 Library::Handle(IG->object_store()->root_library());
1122 ASSERT(!old_root_lib.IsNull());
1123 const auto& old_root_lib_url = String::Handle(old_root_lib.url());
1124
1125 // Root library url.
1126 if (root_script_url != nullptr) {
1127 root_lib_url_ = String::New(root_script_url);
1128 } else {
1129 root_lib_url_ = old_root_lib_url.ptr();
1130 }
1131
1132 // Check to see if the base url of the loaded libraries has moved.
1133 if (!old_root_lib_url.Equals(root_lib_url_)) {
1134 const char* old_root_library_url_c = old_root_lib_url.ToCString();
1135 const char* root_library_url_c = root_lib_url_.ToCString();
1136 const intptr_t common_suffix_length =
1137 CommonSuffixLength(root_library_url_c, old_root_library_url_c);
1138 root_url_prefix_ = String::SubString(
1139 root_lib_url_, 0, root_lib_url_.Length() - common_suffix_length + 1);
1140 old_root_url_prefix_ =
1141 String::SubString(old_root_lib_url, 0,
1142 old_root_lib_url.Length() - common_suffix_length + 1);
1143 }
1144}
1145
1146char* IsolateGroupReloadContext::CompileToKernel(bool force_reload,
1147 const char* packages_url,
1148 const uint8_t** kernel_buffer,
1149 intptr_t* kernel_buffer_size) {
1150 Dart_SourceFile* modified_scripts = nullptr;
1151 intptr_t modified_scripts_count = 0;
1152 FindModifiedSources(force_reload, &modified_scripts, &modified_scripts_count,
1153 packages_url);
1154
1155 Dart_KernelCompilationResult retval = {};
1156 {
1157 const char* root_lib_url = root_lib_url_.ToCString();
1158 TransitionVMToNative transition(Thread::Current());
1159 retval = KernelIsolate::CompileToKernel(
1160 root_lib_url, nullptr, 0, modified_scripts_count, modified_scripts,
1161 /*incremental_compile=*/true,
1162 /*snapshot_compile=*/false,
1163 /*embed_sources=*/true,
1164 /*package_config=*/nullptr,
1165 /*multiroot_filepaths=*/nullptr,
1166 /*multiroot_scheme=*/nullptr);
1167 }
1169 if (retval.kernel != nullptr) {
1170 free(retval.kernel);
1171 }
1172 return retval.error;
1173 }
1174 *kernel_buffer = retval.kernel;
1175 *kernel_buffer_size = retval.kernel_size;
1176 return nullptr;
1177}
1178
1179void ProgramReloadContext::ReloadPhase1AllocateStorageMapsAndCheckpoint() {
1180 // Preallocate storage for maps.
1181 old_classes_set_storage_ =
1182 HashTables::New<UnorderedHashSet<ClassMapTraits> >(4);
1183 class_map_storage_ = HashTables::New<UnorderedHashMap<ClassMapTraits> >(4);
1184 removed_class_set_storage_ =
1185 HashTables::New<UnorderedHashSet<ClassMapTraits> >(4);
1186 old_libraries_set_storage_ =
1187 HashTables::New<UnorderedHashSet<LibraryMapTraits> >(4);
1188 library_map_storage_ =
1189 HashTables::New<UnorderedHashMap<LibraryMapTraits> >(4);
1190
1191 // While reloading everything we do must be reversible so that we can abort
1192 // safely if the reload fails. This function stashes things to the side and
1193 // prepares the isolate for the reload attempt.
1194 {
1195 TIMELINE_SCOPE(Checkpoint);
1196 CheckpointLibraries();
1197 }
1198}
1199
1200ObjectPtr ProgramReloadContext::ReloadPhase2LoadKernel(
1201 kernel::Program* program,
1202 const String& root_lib_url) {
1203 Thread* thread = Thread::Current();
1204
1205 LongJumpScope jump;
1206 if (setjmp(*jump.Set()) == 0) {
1207 const Object& tmp = kernel::KernelLoader::LoadEntireProgram(program);
1208 if (tmp.IsError()) {
1209 return tmp.ptr();
1210 }
1211
1212 // If main method disappeared or were not there to begin with then
1213 // KernelLoader will return null. In this case lookup library by
1214 // URL.
1215 auto& lib = Library::Handle(Library::RawCast(tmp.ptr()));
1216 if (lib.IsNull()) {
1217 lib = Library::LookupLibrary(thread, root_lib_url);
1218 }
1219 IG->object_store()->set_root_library(lib);
1220 return Object::null();
1221 } else {
1222 return thread->StealStickyError();
1223 }
1224}
1225
1226void ProgramReloadContext::ReloadPhase3FinalizeLoading() {
1227 BuildLibraryMapping();
1228 BuildRemovedClassesSet();
1229 ValidateReload();
1230}
1231
1232void ProgramReloadContext::ReloadPhase4CommitPrepare() {
1233 CommitBeforeInstanceMorphing();
1234}
1235
1236void ProgramReloadContext::ReloadPhase4CommitFinish() {
1237 CommitAfterInstanceMorphing();
1238 PostCommit();
1239}
1240
1241void ProgramReloadContext::ReloadPhase4Rollback() {
1242 IG->RestoreOriginalClassTable();
1243 RollbackLibraries();
1244}
1245
1246void ProgramReloadContext::RegisterClass(const Class& new_cls) {
1247 const Class& old_cls = Class::Handle(OldClassOrNull(new_cls));
1248 if (old_cls.IsNull()) {
1249 if (new_cls.IsTopLevel()) {
1250 IG->class_table()->RegisterTopLevel(new_cls);
1251 } else {
1252 IG->class_table()->Register(new_cls);
1253 }
1254
1255 if (FLAG_identity_reload) {
1256 TIR_Print("Could not find replacement class for %s\n",
1257 new_cls.ToCString());
1258 UNREACHABLE();
1259 }
1260
1261 // New class maps to itself.
1262 AddClassMapping(new_cls, new_cls);
1263 return;
1264 }
1265 VTIR_Print("Registering class: %s\n", new_cls.ToCString());
1266 new_cls.set_id(old_cls.id());
1267 IG->class_table()->SetAt(old_cls.id(), new_cls.ptr());
1268 new_cls.CopyCanonicalConstants(old_cls);
1269 new_cls.CopyDeclarationType(old_cls);
1270 AddBecomeMapping(old_cls, new_cls);
1271 AddClassMapping(new_cls, old_cls);
1272}
1273
1274void IsolateGroupReloadContext::CommonFinalizeTail(
1275 intptr_t final_library_count) {
1276 RELEASE_ASSERT(!reload_finalized_);
1277 ReportOnJSON(js_, final_library_count);
1278 reload_finalized_ = true;
1279}
1280
1281void IsolateGroupReloadContext::ReportOnJSON(JSONStream* stream,
1282 intptr_t final_library_count) {
1283 JSONObject jsobj(stream);
1284 jsobj.AddProperty("type", "ReloadReport");
1285 jsobj.AddProperty("success", reload_skipped_ || !HasReasonsForCancelling());
1286 {
1287 if (HasReasonsForCancelling()) {
1288 // Reload was rejected.
1289 JSONArray array(&jsobj, "notices");
1290 for (intptr_t i = 0; i < reasons_to_cancel_reload_.length(); i++) {
1291 ReasonForCancelling* reason = reasons_to_cancel_reload_.At(i);
1292 reason->AppendTo(&array);
1293 }
1294 return;
1295 }
1296
1297 JSONObject details(&jsobj, "details");
1298 details.AddProperty("finalLibraryCount", final_library_count);
1299 details.AddProperty("receivedLibraryCount", num_received_libs_);
1300 details.AddProperty("receivedLibrariesBytes", bytes_received_libs_);
1301 details.AddProperty("receivedClassesCount", num_received_classes_);
1302 details.AddProperty("receivedProceduresCount", num_received_procedures_);
1303 if (reload_skipped_) {
1304 // Reload was skipped.
1305 details.AddProperty("savedLibraryCount", final_library_count);
1306 details.AddProperty("loadedLibraryCount", static_cast<intptr_t>(0));
1307 } else {
1308 // Reload was successful.
1309 const intptr_t loaded_library_count =
1310 final_library_count - num_saved_libs_;
1311 details.AddProperty("savedLibraryCount", num_saved_libs_);
1312 details.AddProperty("loadedLibraryCount", loaded_library_count);
1313 JSONArray array(&jsobj, "shapeChangeMappings");
1314 for (intptr_t i = 0; i < instance_morphers_.length(); i++) {
1315 instance_morphers_.At(i)->AppendTo(&array);
1316 }
1317 }
1318 }
1319}
1320
1321void ProgramReloadContext::EnsuredUnoptimizedCodeForStack() {
1322 TIMELINE_SCOPE(EnsuredUnoptimizedCodeForStack);
1323
1324 IG->ForEachIsolate([](Isolate* isolate) {
1325 auto thread = isolate->mutator_thread();
1326 if (thread == nullptr) {
1327 return;
1328 }
1329 StackFrameIterator it(ValidationPolicy::kDontValidateFrames, thread,
1330 StackFrameIterator::kAllowCrossThreadIteration);
1331
1332 Function& func = Function::Handle();
1333 while (it.HasNextFrame()) {
1334 StackFrame* frame = it.NextFrame();
1335 if (frame->IsDartFrame()) {
1336 func = frame->LookupDartFunction();
1337 ASSERT(!func.IsNull());
1338 // Force-optimized functions don't need unoptimized code because their
1339 // optimized code cannot deopt.
1340 if (!func.ForceOptimize()) {
1341 func.EnsureHasCompiledUnoptimizedCode();
1342 }
1343 }
1344 }
1345 });
1346}
1347
1348void ProgramReloadContext::DeoptimizeDependentCode() {
1349 TIMELINE_SCOPE(DeoptimizeDependentCode);
1350 ClassTable* class_table = IG->class_table();
1351
1352 const intptr_t bottom = Dart::vm_isolate_group()->class_table()->NumCids();
1353 const intptr_t top = IG->class_table()->NumCids();
1354 Class& cls = Class::Handle();
1355 Array& fields = Array::Handle();
1356 Field& field = Field::Handle();
1357 Thread* thread = Thread::Current();
1358 SafepointWriteRwLocker ml(thread, IG->program_lock());
1359 for (intptr_t cls_idx = bottom; cls_idx < top; cls_idx++) {
1360 if (!class_table->HasValidClassAt(cls_idx)) {
1361 // Skip.
1362 continue;
1363 }
1364
1365 // Deoptimize CHA code.
1366 cls = class_table->At(cls_idx);
1367 ASSERT(!cls.IsNull());
1368
1369 cls.DisableAllCHAOptimizedCode();
1370
1371 // Deoptimize field guard code.
1372 fields = cls.fields();
1373 ASSERT(!fields.IsNull());
1374 for (intptr_t field_idx = 0; field_idx < fields.Length(); field_idx++) {
1375 field = Field::RawCast(fields.At(field_idx));
1376 ASSERT(!field.IsNull());
1377 field.DeoptimizeDependentCode();
1378 }
1379 }
1380
1382
1383 // TODO(rmacnak): Also call LibraryPrefix::InvalidateDependentCode.
1384}
1385
1386void ProgramReloadContext::CheckpointClasses() {
1387 TIR_Print("---- CHECKPOINTING CLASSES\n");
1388 // Checkpoint classes before a reload.
1389
1390 // Before this operation class table which is used for heap scanning and
1391 // the class table used for program loading are the same. After this step
1392 // they will become different until reload is committed (or rolled back).
1393 //
1394 // Note that because GC is always reading from heap_walk_class_table and
1395 // we are not changing that, there is no reason to wait for sweeping
1396 // threads or marking to complete.
1397 RELEASE_ASSERT(IG->class_table() == IG->heap_walk_class_table());
1398
1399 IG->CloneClassTableForReload();
1400
1401 // IG->class_table() is now the clone of heap_walk_class_table.
1402 RELEASE_ASSERT(IG->class_table() != IG->heap_walk_class_table());
1403
1404 ClassTable* class_table = IG->class_table();
1405
1406 // For efficiency, we build a set of classes before the reload. This set
1407 // is used to pair new classes with old classes.
1408 // Add classes to the set. Set is stored in the Array, so adding an element
1409 // may allocate Dart object on the heap and trigger GC.
1410 Class& cls = Class::Handle();
1411 UnorderedHashSet<ClassMapTraits> old_classes_set(old_classes_set_storage_);
1412 for (intptr_t i = 0; i < class_table->NumCids(); i++) {
1413 if (class_table->IsValidIndex(i) && class_table->HasValidClassAt(i)) {
1414 if (i != kFreeListElement && i != kForwardingCorpse) {
1415 cls = class_table->At(i);
1416 bool already_present = old_classes_set.Insert(cls);
1417 ASSERT(!already_present);
1418 }
1419 }
1420 }
1421 for (intptr_t i = 0; i < class_table->NumTopLevelCids(); i++) {
1422 const intptr_t cid = ClassTable::CidFromTopLevelIndex(i);
1423 if (class_table->IsValidIndex(cid) && class_table->HasValidClassAt(cid)) {
1424 cls = class_table->At(cid);
1425 bool already_present = old_classes_set.Insert(cls);
1426 ASSERT(!already_present);
1427 }
1428 }
1429 old_classes_set_storage_ = old_classes_set.Release().ptr();
1430 TIR_Print("---- System had %" Pd " classes\n",
1431 class_table->NumCids() + class_table->NumTopLevelCids());
1432}
1433
1434Dart_FileModifiedCallback IsolateGroupReloadContext::file_modified_callback_ =
1435 nullptr;
1436
1437bool IsolateGroupReloadContext::ScriptModifiedSince(const Script& script,
1438 int64_t since) {
1439 if (IsolateGroupReloadContext::file_modified_callback_ == nullptr) {
1440 return true;
1441 }
1442 // We use the resolved url to determine if the script has been modified.
1443 const String& url = String::Handle(script.resolved_url());
1444 const char* url_chars = url.ToCString();
1445 return (*IsolateGroupReloadContext::file_modified_callback_)(url_chars,
1446 since);
1447}
1448
1449static bool ContainsScriptUri(const GrowableArray<const char*>& seen_uris,
1450 const char* uri) {
1451 for (intptr_t i = 0; i < seen_uris.length(); i++) {
1452 const char* seen_uri = seen_uris.At(i);
1453 size_t seen_len = strlen(seen_uri);
1454 if (seen_len != strlen(uri)) {
1455 continue;
1456 } else if (strncmp(seen_uri, uri, seen_len) == 0) {
1457 return true;
1458 }
1459 }
1460 return false;
1461}
1462
1463void IsolateGroupReloadContext::FindModifiedSources(
1464 bool force_reload,
1465 Dart_SourceFile** modified_sources,
1466 intptr_t* count,
1467 const char* packages_url) {
1468 const int64_t last_reload = isolate_group_->last_reload_timestamp();
1469 GrowableArray<const char*> modified_sources_uris;
1470 const auto& libs =
1471 GrowableObjectArray::Handle(IG->object_store()->libraries());
1472 Library& lib = Library::Handle(Z);
1473 Array& scripts = Array::Handle(Z);
1474 Script& script = Script::Handle(Z);
1475 String& uri = String::Handle(Z);
1476
1477 for (intptr_t lib_idx = 0; lib_idx < libs.Length(); lib_idx++) {
1478 lib ^= libs.At(lib_idx);
1479 if (lib.is_dart_scheme()) {
1480 // We don't consider dart scheme libraries during reload.
1481 continue;
1482 }
1483 scripts = lib.LoadedScripts();
1484 for (intptr_t script_idx = 0; script_idx < scripts.Length(); script_idx++) {
1485 script ^= scripts.At(script_idx);
1486 uri = script.url();
1487 const bool dart_scheme = uri.StartsWith(Symbols::DartScheme());
1488 if (dart_scheme) {
1489 // If a user-defined class mixes in a mixin from dart:*, it's list of
1490 // scripts will have a dart:* script as well. We don't consider those
1491 // during reload.
1492 continue;
1493 }
1494 if (ContainsScriptUri(modified_sources_uris, uri.ToCString())) {
1495 // We've already accounted for this script in a prior library.
1496 continue;
1497 }
1498
1499 if (force_reload || ScriptModifiedSince(script, last_reload)) {
1500 modified_sources_uris.Add(uri.ToCString());
1501 }
1502 }
1503 }
1504
1505 // In addition to all sources, we need to check if the .packages file
1506 // contents have been modified.
1507 if (packages_url != nullptr) {
1508 if (IsolateGroupReloadContext::file_modified_callback_ == nullptr ||
1509 (*IsolateGroupReloadContext::file_modified_callback_)(packages_url,
1510 last_reload)) {
1511 modified_sources_uris.Add(packages_url);
1512 }
1513 }
1514
1515 *count = modified_sources_uris.length();
1516 if (*count == 0) {
1517 return;
1518 }
1519
1520 *modified_sources = Z->Alloc<Dart_SourceFile>(*count);
1521 for (intptr_t i = 0; i < *count; ++i) {
1522 (*modified_sources)[i].uri = modified_sources_uris[i];
1523 (*modified_sources)[i].source = nullptr;
1524 }
1525}
1526
1527void ProgramReloadContext::CheckpointLibraries() {
1528 TIMELINE_SCOPE(CheckpointLibraries);
1529 TIR_Print("---- CHECKPOINTING LIBRARIES\n");
1530 // Save the root library in case we abort the reload.
1531 const Library& root_lib = Library::Handle(object_store()->root_library());
1532 saved_root_library_ = root_lib.ptr();
1533
1534 // Save the old libraries array in case we abort the reload.
1535 const GrowableObjectArray& libs =
1536 GrowableObjectArray::Handle(object_store()->libraries());
1537 saved_libraries_ = libs.ptr();
1538
1539 // Make a filtered copy of the old libraries array. Keep "clean" libraries
1540 // that we will use instead of reloading.
1541 const GrowableObjectArray& new_libs =
1542 GrowableObjectArray::Handle(GrowableObjectArray::New(Heap::kOld));
1543 Library& lib = Library::Handle();
1544 UnorderedHashSet<LibraryMapTraits> old_libraries_set(
1545 old_libraries_set_storage_);
1546
1547 group_reload_context_->saved_libs_transitive_updated_ = new (Z)
1548 BitVector(Z, group_reload_context_->modified_libs_transitive_->length());
1549 for (intptr_t i = 0; i < libs.Length(); i++) {
1550 lib ^= libs.At(i);
1551 if (group_reload_context_->modified_libs_->Contains(i)) {
1552 // We are going to reload this library. Clear the index.
1553 lib.set_index(-1);
1554 } else {
1555 // We are preserving this library across the reload, assign its new index
1556 lib.set_index(new_libs.Length());
1557 new_libs.Add(lib, Heap::kOld);
1558
1559 if (group_reload_context_->modified_libs_transitive_->Contains(i)) {
1560 // Remember the new index.
1561 group_reload_context_->saved_libs_transitive_updated_->Add(lib.index());
1562 }
1563 }
1564 // Add old library to old libraries set.
1565 bool already_present = old_libraries_set.Insert(lib);
1566 ASSERT(!already_present);
1567
1568 lib.EvaluatePragmas();
1569 }
1570 old_libraries_set_storage_ = old_libraries_set.Release().ptr();
1571
1572 // Reset the registered libraries to the filtered array.
1573 Library::RegisterLibraries(Thread::Current(), new_libs);
1574 // Reset the root library to null.
1575 object_store()->set_root_library(Library::Handle());
1576}
1577
1578void ProgramReloadContext::RollbackLibraries() {
1579 TIR_Print("---- ROLLING BACK LIBRARY CHANGES\n");
1580 Thread* thread = Thread::Current();
1581 Library& lib = Library::Handle();
1582 const auto& saved_libs = GrowableObjectArray::Handle(Z, saved_libraries_);
1583 if (!saved_libs.IsNull()) {
1584 for (intptr_t i = 0; i < saved_libs.Length(); i++) {
1585 lib = Library::RawCast(saved_libs.At(i));
1586 // Restore indexes that were modified in CheckpointLibraries.
1587 lib.set_index(i);
1588 }
1589
1590 // Reset the registered libraries to the filtered array.
1591 Library::RegisterLibraries(thread, saved_libs);
1592 }
1593
1594 Library& saved_root_lib = Library::Handle(Z, saved_root_library_);
1595 if (!saved_root_lib.IsNull()) {
1596 object_store()->set_root_library(saved_root_lib);
1597 }
1598
1599 saved_root_library_ = Library::null();
1600 saved_libraries_ = GrowableObjectArray::null();
1601}
1602
1603#ifdef DEBUG
1604void ProgramReloadContext::VerifyMaps() {
1605 TIMELINE_SCOPE(VerifyMaps);
1606 Class& cls = Class::Handle();
1607 Class& new_cls = Class::Handle();
1608 Class& cls2 = Class::Handle();
1609
1610 // Verify that two old classes aren't both mapped to the same new
1611 // class. This could happen is the IsSameClass function is broken.
1612 UnorderedHashMap<ClassMapTraits> class_map(class_map_storage_);
1613 UnorderedHashMap<ClassMapTraits> reverse_class_map(
1614 HashTables::New<UnorderedHashMap<ClassMapTraits> >(
1615 class_map.NumOccupied()));
1616 {
1617 UnorderedHashMap<ClassMapTraits>::Iterator it(&class_map);
1618 while (it.MoveNext()) {
1619 const intptr_t entry = it.Current();
1620 new_cls = Class::RawCast(class_map.GetKey(entry));
1621 cls = Class::RawCast(class_map.GetPayload(entry, 0));
1622 cls2 ^= reverse_class_map.GetOrNull(new_cls);
1623 if (!cls2.IsNull()) {
1624 OS::PrintErr(
1625 "Classes '%s' and '%s' are distinct classes but both map "
1626 " to class '%s'\n",
1627 cls.ToCString(), cls2.ToCString(), new_cls.ToCString());
1628 UNREACHABLE();
1629 }
1630 bool update = reverse_class_map.UpdateOrInsert(cls, new_cls);
1631 ASSERT(!update);
1632 }
1633 }
1634 class_map.Release();
1635 reverse_class_map.Release();
1636}
1637#endif
1638
1639void ProgramReloadContext::CommitBeforeInstanceMorphing() {
1640 TIMELINE_SCOPE(Commit);
1641
1642#ifdef DEBUG
1643 VerifyMaps();
1644#endif
1645
1646 // Copy over certain properties of libraries, e.g. is the library
1647 // debuggable?
1648 {
1649 TIMELINE_SCOPE(CopyLibraryBits);
1650 Library& lib = Library::Handle();
1651 Library& new_lib = Library::Handle();
1652
1653 UnorderedHashMap<LibraryMapTraits> lib_map(library_map_storage_);
1654
1655 {
1656 // Reload existing libraries.
1657 UnorderedHashMap<LibraryMapTraits>::Iterator it(&lib_map);
1658
1659 while (it.MoveNext()) {
1660 const intptr_t entry = it.Current();
1661 ASSERT(entry != -1);
1662 new_lib = Library::RawCast(lib_map.GetKey(entry));
1663 lib = Library::RawCast(lib_map.GetPayload(entry, 0));
1664 new_lib.set_debuggable(lib.IsDebuggable());
1665 // Native extension support.
1666 new_lib.set_native_entry_resolver(lib.native_entry_resolver());
1667 new_lib.set_native_entry_symbol_resolver(
1668 lib.native_entry_symbol_resolver());
1669 new_lib.set_ffi_native_resolver(lib.ffi_native_resolver());
1670 new_lib.CopyPragmas(lib);
1671 }
1672 }
1673
1674 // Release the library map.
1675 lib_map.Release();
1676 }
1677
1678 {
1679 TIMELINE_SCOPE(CopyStaticFieldsAndPatchFieldsAndFunctions);
1680 // Copy static field values from the old classes to the new classes.
1681 // Patch fields and functions in the old classes so that they retain
1682 // the old script.
1683 Class& old_cls = Class::Handle();
1684 Class& new_cls = Class::Handle();
1685 UnorderedHashMap<ClassMapTraits> class_map(class_map_storage_);
1686
1687 {
1688 UnorderedHashMap<ClassMapTraits>::Iterator it(&class_map);
1689 while (it.MoveNext()) {
1690 const intptr_t entry = it.Current();
1691 new_cls = Class::RawCast(class_map.GetKey(entry));
1692 old_cls = Class::RawCast(class_map.GetPayload(entry, 0));
1693 if (new_cls.ptr() != old_cls.ptr()) {
1694 ASSERT(new_cls.is_enum_class() == old_cls.is_enum_class());
1695 new_cls.CopyStaticFieldValues(this, old_cls);
1696 old_cls.PatchFieldsAndFunctions();
1697 old_cls.MigrateImplicitStaticClosures(this, new_cls);
1698 }
1699 }
1700 }
1701
1702 class_map.Release();
1703
1704 {
1705 UnorderedHashSet<ClassMapTraits> removed_class_set(
1706 removed_class_set_storage_);
1707 UnorderedHashSet<ClassMapTraits>::Iterator it(&removed_class_set);
1708 while (it.MoveNext()) {
1709 const intptr_t entry = it.Current();
1710 old_cls ^= removed_class_set.GetKey(entry);
1711 old_cls.PatchFieldsAndFunctions();
1712 }
1713 removed_class_set.Release();
1714 }
1715 }
1716
1717 {
1718 TIMELINE_SCOPE(UpdateLibrariesArray);
1719 // Update the libraries array.
1720 Library& lib = Library::Handle();
1721 const GrowableObjectArray& libs =
1722 GrowableObjectArray::Handle(IG->object_store()->libraries());
1723 for (intptr_t i = 0; i < libs.Length(); i++) {
1724 lib = Library::RawCast(libs.At(i));
1725 VTIR_Print("Lib '%s' at index %" Pd "\n", lib.ToCString(), i);
1726 lib.set_index(i);
1727 }
1728
1729 // Initialize library side table.
1730 library_infos_.SetLength(libs.Length());
1731 for (intptr_t i = 0; i < libs.Length(); i++) {
1732 lib = Library::RawCast(libs.At(i));
1733 // Mark the library dirty if it comes after the libraries we saved.
1734 library_infos_[i].dirty =
1735 i >= group_reload_context_->num_saved_libs_ ||
1736 group_reload_context_->saved_libs_transitive_updated_->Contains(
1737 lib.index());
1738 }
1739 }
1740}
1741
1742void ProgramReloadContext::CommitAfterInstanceMorphing() {
1743 // Rehash constants map for all classes. Constants are hashed by content, and
1744 // content may have changed from fields being added or removed.
1745 {
1746 TIMELINE_SCOPE(RehashConstants);
1747 IG->RehashConstants(&become_);
1748 }
1749 {
1750 TIMELINE_SCOPE(ForwardEnums);
1751 become_.Forward();
1752 }
1753
1754 if (FLAG_identity_reload) {
1755 const auto& saved_libs = GrowableObjectArray::Handle(saved_libraries_);
1756 const GrowableObjectArray& libs =
1757 GrowableObjectArray::Handle(IG->object_store()->libraries());
1758 if (saved_libs.Length() != libs.Length()) {
1759 TIR_Print("Identity reload failed! B#L=%" Pd " A#L=%" Pd "\n",
1760 saved_libs.Length(), libs.Length());
1761 }
1762 }
1763}
1764
1765bool ProgramReloadContext::IsDirty(const Library& lib) {
1766 const intptr_t index = lib.index();
1767 if (index == static_cast<classid_t>(-1)) {
1768 // Treat deleted libraries as dirty.
1769 return true;
1770 }
1771 ASSERT((index >= 0) && (index < library_infos_.length()));
1772 return library_infos_[index].dirty;
1773}
1774
1775void ProgramReloadContext::PostCommit() {
1776 TIMELINE_SCOPE(PostCommit);
1777 saved_root_library_ = Library::null();
1778 saved_libraries_ = GrowableObjectArray::null();
1779 InvalidateWorld();
1780}
1781
1782void IsolateGroupReloadContext::AddReasonForCancelling(
1783 ReasonForCancelling* reason) {
1784 reasons_to_cancel_reload_.Add(reason);
1785}
1786
1787void IsolateGroupReloadContext::EnsureHasInstanceMorpherFor(
1788 classid_t cid,
1789 InstanceMorpher* instance_morpher) {
1790 for (intptr_t i = 0; i < instance_morphers_.length(); ++i) {
1791 if (instance_morphers_[i]->cid() == cid) {
1792 return;
1793 }
1794 }
1795 instance_morphers_.Add(instance_morpher);
1796 instance_morpher_by_cid_.Insert(instance_morpher);
1797 ASSERT(instance_morphers_[instance_morphers_.length() - 1]->cid() == cid);
1798}
1799
1800void IsolateGroupReloadContext::ReportReasonsForCancelling() {
1801 ASSERT(FLAG_reload_force_rollback || HasReasonsForCancelling());
1802 for (int i = 0; i < reasons_to_cancel_reload_.length(); i++) {
1803 reasons_to_cancel_reload_.At(i)->Report(this);
1804 }
1805}
1806
1807void IsolateGroupReloadContext::MorphInstancesPhase1Allocate(
1808 ObjectLocator* locator,
1809 Become* become) {
1810 ASSERT(HasInstanceMorphers());
1811
1812 if (FLAG_trace_reload) {
1813 LogBlock blocker;
1814 TIR_Print("MorphInstance: \n");
1815 for (intptr_t i = 0; i < instance_morphers_.length(); i++) {
1816 instance_morphers_.At(i)->Dump();
1817 }
1818 }
1819
1820 const intptr_t count = locator->count();
1821 TIR_Print("Found %" Pd " object%s subject to morphing.\n", count,
1822 (count > 1) ? "s" : "");
1823
1824 for (intptr_t i = 0; i < instance_morphers_.length(); i++) {
1825 instance_morphers_.At(i)->CreateMorphedCopies(become);
1826 }
1827}
1828
1829void IsolateGroupReloadContext::MorphInstancesPhase2Become(Become* become) {
1830 ASSERT(HasInstanceMorphers());
1831
1832 become->Forward();
1833 // The heap now contains only instances with the new layout.
1834 // Ordinary GC is safe again.
1835}
1836
1837void IsolateGroupReloadContext::ForEachIsolate(
1838 std::function<void(Isolate*)> callback) {
1839 isolate_group_->ForEachIsolate(callback);
1840}
1841
1842void ProgramReloadContext::ValidateReload() {
1843 TIMELINE_SCOPE(ValidateReload);
1844
1845 TIR_Print("---- VALIDATING RELOAD\n");
1846
1847 // Validate libraries.
1848 {
1849 ASSERT(library_map_storage_ != Array::null());
1850 UnorderedHashMap<LibraryMapTraits> map(library_map_storage_);
1851 UnorderedHashMap<LibraryMapTraits>::Iterator it(&map);
1852 Library& lib = Library::Handle();
1853 Library& new_lib = Library::Handle();
1854 while (it.MoveNext()) {
1855 const intptr_t entry = it.Current();
1856 new_lib = Library::RawCast(map.GetKey(entry));
1857 lib = Library::RawCast(map.GetPayload(entry, 0));
1858 if (new_lib.ptr() != lib.ptr()) {
1859 lib.CheckReload(new_lib, this);
1860 }
1861 }
1862 map.Release();
1863 }
1864
1865 // Validate classes.
1866 {
1867 ASSERT(class_map_storage_ != Array::null());
1868 UnorderedHashMap<ClassMapTraits> map(class_map_storage_);
1869 UnorderedHashMap<ClassMapTraits>::Iterator it(&map);
1870 Class& cls = Class::Handle();
1871 Class& new_cls = Class::Handle();
1872 while (it.MoveNext()) {
1873 const intptr_t entry = it.Current();
1874 new_cls = Class::RawCast(map.GetKey(entry));
1875 cls = Class::RawCast(map.GetPayload(entry, 0));
1876 if (new_cls.ptr() != cls.ptr()) {
1877 cls.CheckReload(new_cls, this);
1878 }
1879 }
1880 map.Release();
1881 }
1882}
1883
1884void IsolateGroupReloadContext::VisitObjectPointers(
1885 ObjectPointerVisitor* visitor) {
1886 visitor->VisitPointers(from(), to());
1887}
1888
1889void ProgramReloadContext::VisitObjectPointers(ObjectPointerVisitor* visitor) {
1890 visitor->VisitPointers(from(), to());
1891}
1892
1893ObjectStore* ProgramReloadContext::object_store() {
1894 return IG->object_store();
1895}
1896
1897void ProgramReloadContext::ResetUnoptimizedICsOnStack() {
1898 Thread* thread = Thread::Current();
1899 StackZone stack_zone(thread);
1900 Zone* zone = stack_zone.GetZone();
1901 Code& code = Code::Handle(zone);
1902 Function& function = Function::Handle(zone);
1903 CallSiteResetter resetter(zone);
1904
1905 IG->ForEachIsolate([&](Isolate* isolate) {
1906 if (isolate->mutator_thread() == nullptr) {
1907 return;
1908 }
1909 DartFrameIterator iterator(isolate->mutator_thread(),
1910 StackFrameIterator::kAllowCrossThreadIteration);
1911 StackFrame* frame = iterator.NextFrame();
1912 while (frame != nullptr) {
1913 code = frame->LookupDartCode();
1914 if (code.is_optimized() && !code.is_force_optimized()) {
1915 // If this code is optimized, we need to reset the ICs in the
1916 // corresponding unoptimized code, which will be executed when the stack
1917 // unwinds to the optimized code.
1918 function = code.function();
1919 code = function.unoptimized_code();
1920 ASSERT(!code.IsNull());
1921 resetter.ResetSwitchableCalls(code);
1922 resetter.ResetCaches(code);
1923 } else {
1924 resetter.ResetSwitchableCalls(code);
1925 resetter.ResetCaches(code);
1926 }
1927 frame = iterator.NextFrame();
1928 }
1929 });
1930}
1931
1932void ProgramReloadContext::ResetMegamorphicCaches() {
1933 object_store()->set_megamorphic_cache_table(GrowableObjectArray::Handle());
1934 // Since any current optimized code will not make any more calls, it may be
1935 // better to clear the table instead of clearing each of the caches, allow
1936 // the current megamorphic caches get GC'd and any new optimized code allocate
1937 // new ones.
1938}
1939
1940class InvalidationCollector : public ObjectVisitor {
1941 public:
1942 InvalidationCollector(Zone* zone,
1943 GrowableArray<const Function*>* functions,
1944 GrowableArray<const KernelProgramInfo*>* kernel_infos,
1945 GrowableArray<const Field*>* fields,
1946 GrowableArray<const SuspendState*>* suspend_states,
1947 GrowableArray<const Instance*>* instances)
1948 : zone_(zone),
1949 functions_(functions),
1950 kernel_infos_(kernel_infos),
1951 fields_(fields),
1952 suspend_states_(suspend_states),
1953 instances_(instances) {}
1954 virtual ~InvalidationCollector() {}
1955
1956 void VisitObject(ObjectPtr obj) override {
1957 intptr_t cid = obj->GetClassId();
1958 if (cid == kFunctionCid) {
1959 const Function& func =
1960 Function::Handle(zone_, static_cast<FunctionPtr>(obj));
1961 functions_->Add(&func);
1962 } else if (cid == kKernelProgramInfoCid) {
1963 kernel_infos_->Add(&KernelProgramInfo::Handle(
1964 zone_, static_cast<KernelProgramInfoPtr>(obj)));
1965 } else if (cid == kFieldCid) {
1966 fields_->Add(&Field::Handle(zone_, static_cast<FieldPtr>(obj)));
1967 } else if (cid == kSuspendStateCid) {
1968 const auto& suspend_state =
1969 SuspendState::Handle(zone_, static_cast<SuspendStatePtr>(obj));
1970 if (suspend_state.pc() != 0) {
1971 suspend_states_->Add(&suspend_state);
1972 }
1973 } else if (cid > kNumPredefinedCids) {
1974 instances_->Add(&Instance::Handle(zone_, static_cast<InstancePtr>(obj)));
1975 }
1976 }
1977
1978 private:
1979 Zone* const zone_;
1980 GrowableArray<const Function*>* const functions_;
1981 GrowableArray<const KernelProgramInfo*>* const kernel_infos_;
1982 GrowableArray<const Field*>* const fields_;
1983 GrowableArray<const SuspendState*>* const suspend_states_;
1984 GrowableArray<const Instance*>* const instances_;
1985};
1986
1987void ProgramReloadContext::RunInvalidationVisitors() {
1988 TIR_Print("---- RUNNING INVALIDATION HEAP VISITORS\n");
1989 Thread* thread = Thread::Current();
1990 StackZone stack_zone(thread);
1991 Zone* zone = stack_zone.GetZone();
1992
1993 GrowableArray<const Function*> functions(4 * KB);
1994 GrowableArray<const KernelProgramInfo*> kernel_infos(KB);
1995 GrowableArray<const Field*> fields(4 * KB);
1996 GrowableArray<const SuspendState*> suspend_states(4 * KB);
1997 GrowableArray<const Instance*> instances(4 * KB);
1998
1999 {
2000 TIMELINE_SCOPE(CollectInvalidations);
2001 HeapIterationScope iteration(thread);
2002 InvalidationCollector visitor(zone, &functions, &kernel_infos, &fields,
2003 &suspend_states, &instances);
2004 iteration.IterateObjects(&visitor);
2005 }
2006
2007 InvalidateKernelInfos(zone, kernel_infos);
2008 InvalidateSuspendStates(zone, suspend_states);
2009 InvalidateFields(zone, fields, instances);
2010
2011 // After InvalidateFields in order to invalidate
2012 // implicit getters which need load guards.
2013 InvalidateFunctions(zone, functions);
2014}
2015
2016void ProgramReloadContext::InvalidateKernelInfos(
2017 Zone* zone,
2018 const GrowableArray<const KernelProgramInfo*>& kernel_infos) {
2019 TIMELINE_SCOPE(InvalidateKernelInfos);
2020 HANDLESCOPE(Thread::Current());
2021
2022 Array& data = Array::Handle(zone);
2023 Object& key = Object::Handle(zone);
2024 Smi& value = Smi::Handle(zone);
2025 for (intptr_t i = 0; i < kernel_infos.length(); i++) {
2026 const KernelProgramInfo& info = *kernel_infos[i];
2027 // Clear the libraries cache.
2028 {
2029 data = info.libraries_cache();
2030 ASSERT(!data.IsNull());
2031 IntHashMap table(&key, &value, &data);
2032 table.Clear();
2033 info.set_libraries_cache(table.Release());
2034 }
2035 // Clear the classes cache.
2036 {
2037 data = info.classes_cache();
2038 ASSERT(!data.IsNull());
2039 IntHashMap table(&key, &value, &data);
2040 table.Clear();
2041 info.set_classes_cache(table.Release());
2042 }
2043 }
2044}
2045
2046void ProgramReloadContext::InvalidateFunctions(
2047 Zone* zone,
2048 const GrowableArray<const Function*>& functions) {
2049 TIMELINE_SCOPE(InvalidateFunctions);
2050 auto thread = Thread::Current();
2051 HANDLESCOPE(thread);
2052
2053 CallSiteResetter resetter(zone);
2054
2055 Class& owning_class = Class::Handle(zone);
2056 Library& owning_lib = Library::Handle(zone);
2057 Code& code = Code::Handle(zone);
2058 Field& field = Field::Handle(zone);
2059 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
2060 for (intptr_t i = 0; i < functions.length(); i++) {
2061 const Function& func = *functions[i];
2062
2063 // Force-optimized functions cannot deoptimize.
2064 if (func.ForceOptimize()) continue;
2065
2066 // Switch to unoptimized code or the lazy compilation stub.
2067 func.SwitchToLazyCompiledUnoptimizedCode();
2068
2069 // Grab the current code.
2070 code = func.CurrentCode();
2071 ASSERT(!code.IsNull());
2072
2073 // Force recompilation of unoptimized code of implicit getters
2074 // in order to add load guards. This is needed for future
2075 // deoptimizations which will expect load guard in the unoptimized code.
2076 bool recompile_for_load_guard = false;
2077 if (func.IsImplicitGetterFunction() ||
2078 func.IsImplicitStaticGetterFunction()) {
2079 field = func.accessor_field();
2080 recompile_for_load_guard = field.needs_load_guard();
2081 }
2082
2083 owning_class = func.Owner();
2084 owning_lib = owning_class.library();
2085 const bool clear_unoptimized_code =
2086 IsDirty(owning_lib) || recompile_for_load_guard;
2087 const bool stub_code = code.IsStubCode();
2088
2089 // Zero edge counters, before clearing the ICDataArray, since that's where
2090 // they're held.
2091 resetter.ZeroEdgeCounters(func);
2092
2093 if (stub_code) {
2094 // Nothing to reset.
2095 } else if (clear_unoptimized_code) {
2096 VTIR_Print("Marking %s for recompilation, clearing code\n",
2097 func.ToCString());
2098 // Null out the ICData array and code.
2099 func.ClearICDataArray();
2100 func.ClearCode();
2101 func.SetWasCompiled(false);
2102 } else {
2103 // We are preserving the unoptimized code, reset instance calls and type
2104 // test caches.
2105 resetter.ResetSwitchableCalls(code);
2106 resetter.ResetCaches(code);
2107 }
2108
2109 // Clear counters.
2110 func.set_usage_counter(0);
2111 func.set_deoptimization_counter(0);
2112 func.set_optimized_instruction_count(0);
2113 func.set_optimized_call_site_count(0);
2114 }
2115}
2116
2117void ProgramReloadContext::InvalidateSuspendStates(
2118 Zone* zone,
2119 const GrowableArray<const SuspendState*>& suspend_states) {
2120 TIMELINE_SCOPE(InvalidateSuspendStates);
2121 auto thread = Thread::Current();
2122 HANDLESCOPE(thread);
2123
2124 CallSiteResetter resetter(zone);
2125 Code& code = Code::Handle(zone);
2126 Function& function = Function::Handle(zone);
2127
2128 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
2129 for (intptr_t i = 0, n = suspend_states.length(); i < n; ++i) {
2130 const SuspendState& suspend_state = *suspend_states[i];
2131 ASSERT(suspend_state.pc() != 0);
2132 code = suspend_state.GetCodeObject();
2133 ASSERT(!code.IsNull());
2134 if (code.is_optimized() && !code.is_force_optimized()) {
2135 function = code.function();
2136 // Before disabling [code], function needs to
2137 // switch to unoptimized code first.
2138 function.SwitchToLazyCompiledUnoptimizedCode();
2139 // Disable [code] in order to trigger lazy deoptimization.
2140 // Unless [code] is compiled for OSR, it may be already
2141 // disabled in SwitchToLazyCompiledUnoptimizedCode.
2142 if (!code.IsDisabled()) {
2143 code.DisableDartCode();
2144 }
2145 // Reset switchable calls and caches for unoptimized
2146 // code (if any), as it is going to be used to continue
2147 // execution of the suspended function.
2148 code = function.unoptimized_code();
2149 if (!code.IsNull()) {
2150 resetter.ResetSwitchableCalls(code);
2151 resetter.ResetCaches(code);
2152 }
2153 } else {
2154 function = code.function();
2155 // ResetSwitchableCalls uses ICData array, which
2156 // can be cleared along with the code in InvalidateFunctions
2157 // during previous hot reloads.
2158 // Rebuild an unoptimized code in order to recreate ICData array.
2159 function.EnsureHasCompiledUnoptimizedCode();
2160 resetter.ResetSwitchableCalls(code);
2161 resetter.ResetCaches(code);
2162 }
2163 }
2164}
2165
2166// Finds fields that are initialized or have a value that does not conform to
2167// the field's static type, setting Field::needs_load_guard(). Accessors for
2168// such fields are compiled with additional checks to handle lazy initialization
2169// and to preserve type soundness.
2170class FieldInvalidator {
2171 public:
2172 explicit FieldInvalidator(Zone* zone)
2173 : zone_(zone),
2174 cls_(Class::Handle(zone)),
2175 cls_fields_(Array::Handle(zone)),
2176 entry_(Object::Handle(zone)),
2177 value_(Object::Handle(zone)),
2178 instance_(Instance::Handle(zone)),
2179 type_(AbstractType::Handle(zone)),
2180 cache_(SubtypeTestCache::Handle(zone)),
2181 result_(Bool::Handle(zone)),
2182 closure_function_(Function::Handle(zone)),
2183 instantiator_type_arguments_(TypeArguments::Handle(zone)),
2184 function_type_arguments_(TypeArguments::Handle(zone)),
2185 instance_cid_or_signature_(Object::Handle(zone)),
2186 instance_type_arguments_(TypeArguments::Handle(zone)),
2187 parent_function_type_arguments_(TypeArguments::Handle(zone)),
2188 delayed_function_type_arguments_(TypeArguments::Handle(zone)) {}
2189
2190 void CheckStatics(const GrowableArray<const Field*>& fields) {
2191 Thread* thread = Thread::Current();
2192 HANDLESCOPE(thread);
2193 instantiator_type_arguments_ = TypeArguments::null();
2194 for (intptr_t i = 0; i < fields.length(); i++) {
2195 const Field& field = *fields[i];
2196 if (!field.is_static()) {
2197 continue;
2198 }
2199 if (field.needs_load_guard()) {
2200 continue; // Already guarding.
2201 }
2202 const intptr_t field_id = field.field_id();
2203 thread->isolate_group()->ForEachIsolate([&](Isolate* isolate) {
2204 auto field_table = isolate->field_table();
2205 // The isolate might've just been created and is now participating in
2206 // the reload request inside `IsolateGroup::RegisterIsolate()`.
2207 // At that point it doesn't have the field table setup yet.
2208 if (field_table->IsReadyToUse()) {
2209 value_ = field_table->At(field_id);
2210 if ((value_.ptr() != Object::sentinel().ptr()) &&
2211 (value_.ptr() != Object::transition_sentinel().ptr())) {
2212 CheckValueType(value_, field);
2213 }
2214 }
2215 });
2216 }
2217 }
2218
2219 void CheckInstances(const GrowableArray<const Instance*>& instances) {
2220 Thread* thread = Thread::Current();
2221 HANDLESCOPE(thread);
2222 for (intptr_t i = 0; i < instances.length(); i++) {
2223 CheckInstance(*instances[i]);
2224 }
2225 }
2226
2227 private:
2228 DART_FORCE_INLINE
2229 void CheckInstance(const Instance& instance) {
2230 cls_ = instance.clazz();
2231 if (cls_.NumTypeArguments() > 0) {
2232 instantiator_type_arguments_ = instance.GetTypeArguments();
2233 } else {
2234 instantiator_type_arguments_ = TypeArguments::null();
2235 }
2236 cls_fields_ = cls_.OffsetToFieldMap();
2237 for (intptr_t i = 0; i < cls_fields_.Length(); i++) {
2238 entry_ = cls_fields_.At(i);
2239 if (!entry_.IsField()) {
2240 continue;
2241 }
2242 const Field& field = Field::Cast(entry_);
2243 CheckInstanceField(instance, field);
2244 }
2245 }
2246
2247 DART_FORCE_INLINE
2248 void CheckInstanceField(const Instance& instance, const Field& field) {
2249 if (field.needs_load_guard()) {
2250 return; // Already guarding.
2251 }
2252 if (field.is_unboxed()) {
2253 // Unboxed fields are guaranteed to match.
2254 return;
2255 }
2256 value_ = instance.GetField(field);
2257 if (value_.ptr() == Object::sentinel().ptr()) {
2258 if (field.is_late()) {
2259 // Late fields already have lazy initialization logic.
2260 return;
2261 }
2262 // Needs guard for initialization.
2263 ASSERT(!FLAG_identity_reload);
2264 field.set_needs_load_guard(true);
2265 return;
2266 }
2267 CheckValueType(value_, field);
2268 }
2269
2270 DART_FORCE_INLINE
2271 bool CheckAssignabilityUsingCache(const Object& value,
2272 const AbstractType& type) {
2273 ASSERT(!value.IsSentinel());
2274 if (type.IsDynamicType()) {
2275 return true;
2276 }
2277
2278 if (type.IsRecordType()) {
2279 return CheckAssignabilityForRecordType(value, RecordType::Cast(type));
2280 }
2281
2282 cls_ = value.clazz();
2283 const intptr_t cid = cls_.id();
2284 if (cid == kClosureCid) {
2285 const auto& closure = Closure::Cast(value);
2286 closure_function_ = closure.function();
2287 instance_cid_or_signature_ = closure_function_.signature();
2288 instance_type_arguments_ = closure.instantiator_type_arguments();
2289 parent_function_type_arguments_ = closure.function_type_arguments();
2290 delayed_function_type_arguments_ = closure.delayed_type_arguments();
2291 } else {
2292 instance_cid_or_signature_ = Smi::New(cid);
2293 if (cls_.NumTypeArguments() > 0) {
2294 instance_type_arguments_ = Instance::Cast(value).GetTypeArguments();
2295 } else {
2296 instance_type_arguments_ = TypeArguments::null();
2297 }
2298 parent_function_type_arguments_ = TypeArguments::null();
2299 delayed_function_type_arguments_ = TypeArguments::null();
2300 }
2301
2302 if (cache_.IsNull()) {
2303 // Use a cache that will check all inputs.
2304 cache_ = SubtypeTestCache::New(SubtypeTestCache::kMaxInputs);
2305 }
2306 if (cache_.HasCheck(
2307 instance_cid_or_signature_, type, instance_type_arguments_,
2308 instantiator_type_arguments_, function_type_arguments_,
2309 parent_function_type_arguments_, delayed_function_type_arguments_,
2310 /*index=*/nullptr, &result_)) {
2311 return result_.value();
2312 }
2313
2314 instance_ ^= value.ptr();
2315 if (instance_.IsAssignableTo(type, instantiator_type_arguments_,
2316 function_type_arguments_)) {
2317 // Do not add record instances to cache as they don't have a valid
2318 // key (type of a record depends on types of all its fields).
2319 if (cid != kRecordCid) {
2320 cache_.AddCheck(instance_cid_or_signature_, type,
2321 instance_type_arguments_, instantiator_type_arguments_,
2322 function_type_arguments_,
2323 parent_function_type_arguments_,
2324 delayed_function_type_arguments_, Bool::True());
2325 }
2326 return true;
2327 }
2328
2329 return false;
2330 }
2331
2332 bool CheckAssignabilityForRecordType(const Object& value,
2333 const RecordType& type) {
2334 if (!value.IsRecord()) {
2335 return false;
2336 }
2337
2338 const Record& record = Record::Cast(value);
2339 if (record.shape() != type.shape()) {
2340 return false;
2341 }
2342
2343 // This method can be called recursively, so cannot reuse handles.
2344 auto& field_value = Object::Handle(zone_);
2345 auto& field_type = AbstractType::Handle(zone_);
2346 const intptr_t num_fields = record.num_fields();
2347 for (intptr_t i = 0; i < num_fields; ++i) {
2348 field_value = record.FieldAt(i);
2349 field_type = type.FieldTypeAt(i);
2350 if (!CheckAssignabilityUsingCache(field_value, field_type)) {
2351 return false;
2352 }
2353 }
2354 return true;
2355 }
2356
2357 DART_FORCE_INLINE
2358 void CheckValueType(const Object& value, const Field& field) {
2359 ASSERT(!value.IsSentinel());
2360 type_ = field.type();
2361 if (!CheckAssignabilityUsingCache(value, type_)) {
2362 // Even if doing an identity reload, type check can fail if hot reload
2363 // happens while constructor is still running and field is not
2364 // initialized yet, so it has a null value.
2365#ifdef DEBUG
2366 if (FLAG_identity_reload && !value.IsNull()) {
2367 FATAL(
2368 "Type check failed during identity hot reload.\n"
2369 " field: %s\n"
2370 " type: %s\n"
2371 " value: %s\n",
2372 field.ToCString(), type_.ToCString(), value.ToCString());
2373 }
2374#endif
2375 field.set_needs_load_guard(true);
2376 }
2377 }
2378
2379 Zone* zone_;
2380 Class& cls_;
2381 Array& cls_fields_;
2382 Object& entry_;
2383 Object& value_;
2384 Instance& instance_;
2385 AbstractType& type_;
2386 SubtypeTestCache& cache_;
2387 Bool& result_;
2388 Function& closure_function_;
2389 TypeArguments& instantiator_type_arguments_;
2390 TypeArguments& function_type_arguments_;
2391 Object& instance_cid_or_signature_;
2392 TypeArguments& instance_type_arguments_;
2393 TypeArguments& parent_function_type_arguments_;
2394 TypeArguments& delayed_function_type_arguments_;
2395};
2396
2397void ProgramReloadContext::InvalidateFields(
2398 Zone* zone,
2399 const GrowableArray<const Field*>& fields,
2400 const GrowableArray<const Instance*>& instances) {
2401 TIMELINE_SCOPE(InvalidateFields);
2402 SafepointMutexLocker ml(IG->subtype_test_cache_mutex());
2403 FieldInvalidator invalidator(zone);
2404 invalidator.CheckStatics(fields);
2405 invalidator.CheckInstances(instances);
2406}
2407
2408void ProgramReloadContext::InvalidateWorld() {
2409 TIMELINE_SCOPE(InvalidateWorld);
2410 TIR_Print("---- INVALIDATING WORLD\n");
2411 ResetMegamorphicCaches();
2412 if (FLAG_trace_deoptimization) {
2413 THR_Print("Deopt for reload\n");
2414 }
2416 ResetUnoptimizedICsOnStack();
2417 RunInvalidationVisitors();
2418}
2419
2420ClassPtr ProgramReloadContext::OldClassOrNull(const Class& replacement_or_new) {
2421 UnorderedHashSet<ClassMapTraits> old_classes_set(old_classes_set_storage_);
2422 Class& cls = Class::Handle();
2423 cls ^= old_classes_set.GetOrNull(replacement_or_new);
2424 old_classes_set_storage_ = old_classes_set.Release().ptr();
2425 return cls.ptr();
2426}
2427
2428StringPtr ProgramReloadContext::FindLibraryPrivateKey(
2429 const Library& replacement_or_new) {
2430 const Library& old = Library::Handle(OldLibraryOrNull(replacement_or_new));
2431 if (old.IsNull()) {
2432 return String::null();
2433 }
2434#if defined(DEBUG)
2435 VTIR_Print("`%s` is getting `%s`'s private key.\n",
2436 String::Handle(replacement_or_new.url()).ToCString(),
2437 String::Handle(old.url()).ToCString());
2438#endif
2439 return old.private_key();
2440}
2441
2442LibraryPtr ProgramReloadContext::OldLibraryOrNull(
2443 const Library& replacement_or_new) {
2444 UnorderedHashSet<LibraryMapTraits> old_libraries_set(
2445 old_libraries_set_storage_);
2446 Library& lib = Library::Handle();
2447 lib ^= old_libraries_set.GetOrNull(replacement_or_new);
2448 old_libraries_set.Release();
2449
2450 if (lib.IsNull() &&
2451 (group_reload_context_->root_url_prefix_ != String::null()) &&
2452 (group_reload_context_->old_root_url_prefix_ != String::null())) {
2453 return OldLibraryOrNullBaseMoved(replacement_or_new);
2454 }
2455 return lib.ptr();
2456}
2457
2458// Attempt to find the pair to |replacement_or_new| with the knowledge that
2459// the base url prefix has moved.
2460LibraryPtr ProgramReloadContext::OldLibraryOrNullBaseMoved(
2461 const Library& replacement_or_new) {
2462 const String& url_prefix =
2463 String::Handle(group_reload_context_->root_url_prefix_);
2464 const String& old_url_prefix =
2465 String::Handle(group_reload_context_->old_root_url_prefix_);
2466 const intptr_t prefix_length = url_prefix.Length();
2467 const intptr_t old_prefix_length = old_url_prefix.Length();
2468 const String& new_url = String::Handle(replacement_or_new.url());
2469 const String& suffix =
2470 String::Handle(String::SubString(new_url, prefix_length));
2471 if (!new_url.StartsWith(url_prefix)) {
2472 return Library::null();
2473 }
2474 Library& old = Library::Handle();
2475 String& old_url = String::Handle();
2476 String& old_suffix = String::Handle();
2477 const auto& saved_libs = GrowableObjectArray::Handle(saved_libraries_);
2478 ASSERT(!saved_libs.IsNull());
2479 for (intptr_t i = 0; i < saved_libs.Length(); i++) {
2480 old = Library::RawCast(saved_libs.At(i));
2481 old_url = old.url();
2482 if (!old_url.StartsWith(old_url_prefix)) {
2483 continue;
2484 }
2485 old_suffix = String::SubString(old_url, old_prefix_length);
2486 if (old_suffix.IsNull()) {
2487 continue;
2488 }
2489 if (old_suffix.Equals(suffix)) {
2490 TIR_Print("`%s` is moving to `%s`\n", old_url.ToCString(),
2491 new_url.ToCString());
2492 return old.ptr();
2493 }
2494 }
2495 return Library::null();
2496}
2497
2498void ProgramReloadContext::BuildLibraryMapping() {
2499 const GrowableObjectArray& libs =
2500 GrowableObjectArray::Handle(object_store()->libraries());
2501
2502 Library& replacement_or_new = Library::Handle();
2503 Library& old = Library::Handle();
2504 for (intptr_t i = group_reload_context_->num_saved_libs_; i < libs.Length();
2505 i++) {
2506 replacement_or_new = Library::RawCast(libs.At(i));
2507 old = OldLibraryOrNull(replacement_or_new);
2508 if (old.IsNull()) {
2509 if (FLAG_identity_reload) {
2510 TIR_Print("Could not find original library for %s\n",
2511 replacement_or_new.ToCString());
2512 UNREACHABLE();
2513 }
2514 // New library.
2515 AddLibraryMapping(replacement_or_new, replacement_or_new);
2516 } else {
2517 ASSERT(!replacement_or_new.is_dart_scheme());
2518 // Replaced class.
2519 AddLibraryMapping(replacement_or_new, old);
2520
2521 AddBecomeMapping(old, replacement_or_new);
2522 }
2523 }
2524}
2525
2526// Find classes that have been removed from the program.
2527// Instances of these classes may still be referenced from variables, so the
2528// functions of these class may still execute in the future, and they need to
2529// be given patch class owners still they correctly reference their (old) kernel
2530// data even after the library's kernel data is updated.
2531//
2532// Note that all such classes must belong to a library that has either been
2533// changed or removed.
2534void ProgramReloadContext::BuildRemovedClassesSet() {
2535 // Find all old classes [mapped_old_classes_set].
2536 UnorderedHashMap<ClassMapTraits> class_map(class_map_storage_);
2537 UnorderedHashSet<ClassMapTraits> mapped_old_classes_set(
2538 HashTables::New<UnorderedHashSet<ClassMapTraits> >(
2539 class_map.NumOccupied()));
2540 {
2541 UnorderedHashMap<ClassMapTraits>::Iterator it(&class_map);
2542 Class& cls = Class::Handle();
2543 Class& new_cls = Class::Handle();
2544 while (it.MoveNext()) {
2545 const intptr_t entry = it.Current();
2546 new_cls = Class::RawCast(class_map.GetKey(entry));
2547 cls = Class::RawCast(class_map.GetPayload(entry, 0));
2548 mapped_old_classes_set.InsertOrGet(cls);
2549 }
2550 }
2551 class_map.Release();
2552
2553 // Find all reloaded libraries [mapped_old_library_set].
2554 UnorderedHashMap<LibraryMapTraits> library_map(library_map_storage_);
2555 UnorderedHashMap<LibraryMapTraits>::Iterator it_library(&library_map);
2556 UnorderedHashSet<LibraryMapTraits> mapped_old_library_set(
2557 HashTables::New<UnorderedHashSet<LibraryMapTraits> >(
2558 library_map.NumOccupied()));
2559 {
2560 Library& old_library = Library::Handle();
2561 Library& new_library = Library::Handle();
2562 while (it_library.MoveNext()) {
2563 const intptr_t entry = it_library.Current();
2564 new_library ^= library_map.GetKey(entry);
2565 old_library ^= library_map.GetPayload(entry, 0);
2566 if (new_library.ptr() != old_library.ptr()) {
2567 mapped_old_library_set.InsertOrGet(old_library);
2568 }
2569 }
2570 }
2571
2572 // For every old class, check if it's library was reloaded and if
2573 // the class was mapped. If the class wasn't mapped - add it to
2574 // [removed_class_set].
2575 UnorderedHashSet<ClassMapTraits> old_classes_set(old_classes_set_storage_);
2576 UnorderedHashSet<ClassMapTraits>::Iterator it(&old_classes_set);
2577 UnorderedHashSet<ClassMapTraits> removed_class_set(
2578 removed_class_set_storage_);
2579 Class& old_cls = Class::Handle();
2580 Class& new_cls = Class::Handle();
2581 Library& old_library = Library::Handle();
2582 Library& mapped_old_library = Library::Handle();
2583 while (it.MoveNext()) {
2584 const intptr_t entry = it.Current();
2585 old_cls ^= Class::RawCast(old_classes_set.GetKey(entry));
2586 old_library = old_cls.library();
2587 if (old_library.IsNull()) {
2588 continue;
2589 }
2590 mapped_old_library ^= mapped_old_library_set.GetOrNull(old_library);
2591 if (!mapped_old_library.IsNull()) {
2592 new_cls ^= mapped_old_classes_set.GetOrNull(old_cls);
2593 if (new_cls.IsNull()) {
2594 removed_class_set.InsertOrGet(old_cls);
2595 }
2596 }
2597 }
2598 removed_class_set_storage_ = removed_class_set.Release().ptr();
2599
2600 old_classes_set.Release();
2601 mapped_old_classes_set.Release();
2602 mapped_old_library_set.Release();
2603 library_map.Release();
2604}
2605
2606void ProgramReloadContext::AddClassMapping(const Class& replacement_or_new,
2607 const Class& original) {
2608 UnorderedHashMap<ClassMapTraits> map(class_map_storage_);
2609 bool update = map.UpdateOrInsert(replacement_or_new, original);
2610 ASSERT(!update);
2611 // The storage given to the map may have been reallocated, remember the new
2612 // address.
2613 class_map_storage_ = map.Release().ptr();
2614}
2615
2616void ProgramReloadContext::AddLibraryMapping(const Library& replacement_or_new,
2617 const Library& original) {
2618 UnorderedHashMap<LibraryMapTraits> map(library_map_storage_);
2619 bool update = map.UpdateOrInsert(replacement_or_new, original);
2620 ASSERT(!update);
2621 // The storage given to the map may have been reallocated, remember the new
2622 // address.
2623 library_map_storage_ = map.Release().ptr();
2624}
2625
2626void ProgramReloadContext::AddStaticFieldMapping(const Field& old_field,
2627 const Field& new_field) {
2628 ASSERT(old_field.is_static());
2629 ASSERT(new_field.is_static());
2630 AddBecomeMapping(old_field, new_field);
2631}
2632
2633void ProgramReloadContext::AddBecomeMapping(const Object& old,
2634 const Object& neu) {
2635 become_.Add(old, neu);
2636}
2637
2638void ProgramReloadContext::RebuildDirectSubclasses() {
2639 ClassTable* class_table = IG->class_table();
2640 intptr_t num_cids = class_table->NumCids();
2641
2642 // Clear the direct subclasses for all classes.
2643 Class& cls = Class::Handle();
2644 const GrowableObjectArray& null_list = GrowableObjectArray::Handle();
2645 for (intptr_t i = 1; i < num_cids; i++) {
2646 if (class_table->HasValidClassAt(i)) {
2647 cls = class_table->At(i);
2648 if (!cls.is_declaration_loaded()) {
2649 continue; // Can't have any subclasses or implementors yet.
2650 }
2651 // Testing for null to prevent attempting to write to read-only classes
2652 // in the VM isolate.
2653 if (cls.direct_subclasses() != GrowableObjectArray::null()) {
2654 cls.set_direct_subclasses(null_list);
2655 }
2656 if (cls.direct_implementors() != GrowableObjectArray::null()) {
2657 cls.set_direct_implementors(null_list);
2658 }
2659 }
2660 }
2661
2662 // Recompute the direct subclasses / implementors.
2663
2664 AbstractType& super_type = AbstractType::Handle();
2665 Class& super_cls = Class::Handle();
2666
2667 Array& interface_types = Array::Handle();
2668 AbstractType& interface_type = AbstractType::Handle();
2669 Class& interface_class = Class::Handle();
2670
2671 for (intptr_t i = 1; i < num_cids; i++) {
2672 if (class_table->HasValidClassAt(i)) {
2673 cls = class_table->At(i);
2674 if (!cls.is_declaration_loaded()) {
2675 continue; // Will register itself later when loaded.
2676 }
2677 super_type = cls.super_type();
2678 if (!super_type.IsNull() && !super_type.IsObjectType()) {
2679 super_cls = cls.SuperClass();
2680 ASSERT(!super_cls.IsNull());
2681 super_cls.AddDirectSubclass(cls);
2682 }
2683
2684 interface_types = cls.interfaces();
2685 if (!interface_types.IsNull()) {
2686 const intptr_t mixin_index = cls.is_transformed_mixin_application()
2687 ? interface_types.Length() - 1
2688 : -1;
2689 for (intptr_t j = 0; j < interface_types.Length(); ++j) {
2690 interface_type ^= interface_types.At(j);
2691 interface_class = interface_type.type_class();
2692 interface_class.AddDirectImplementor(
2693 cls, /* is_mixin = */ i == mixin_index);
2694 }
2695 }
2696 }
2697 }
2698}
2699
2700#endif // !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
2701
2702} // namespace dart
static void info(const char *fmt,...) SK_PRINTF_LIKE(1
Definition DM.cpp:213
int count
static uint32_t hash(const SkShaderBase::GradientInfo &v)
SI F table(const skcms_Curve *curve, F v)
#define UNREACHABLE()
Definition assert.h:248
#define RELEASE_ASSERT(cond)
Definition assert.h:327
#define THR_Print(format,...)
Definition log.h:20
@ Dart_KernelCompilationStatus_MsgFailed
Definition dart_api.h:3731
@ Dart_KernelCompilationStatus_Ok
Definition dart_api.h:3728
bool(* Dart_FileModifiedCallback)(const char *url, int64_t since)
#define ASSERT(E)
VkInstance instance
Definition main.cc:48
SkBitmap source
Definition examples.cpp:28
double frame
Definition examples.cpp:31
static bool b
struct MyStruct a[10]
#define FATAL(error)
FlKeyEvent uint64_t FlKeyResponderAsyncCallback callback
const uint8_t uint32_t uint32_t GError ** error
uint8_t value
GAsyncResult * result
uint32_t * target
#define HANDLESCOPE(thread)
Definition handles.h:321
#define IG
#define Z
#define TIMELINE_SCOPE(name)
#define VTIR_Print(format,...)
#define TIR_Print(format,...)
Win32Message message
ImplicitString Name
Definition DMSrcSink.h:38
ZoneGrowableArray< FieldMapping > FieldMappingArray
void DeoptimizeFunctionsOnStack()
static bool ContainsScriptUri(const GrowableArray< const char * > &seen_uris, const char *uri)
static ObjectPtr RejectCompilation(Thread *thread)
uint32_t CombineHashes(uint32_t hash, uint32_t other_hash)
Definition hash.h:12
static void PropagateLibraryModified(const ZoneGrowableArray< ZoneGrowableArray< intptr_t > * > *imported_by, intptr_t lib_index, BitVector *modified_libs)
Copied in from https://dart-review.googlesource.com/c/sdk/+/77722.
int32_t classid_t
Definition globals.h:524
ZoneGrowableArray< intptr_t > FieldOffsetArray
@ kIllegalCid
Definition class_id.h:214
void DeoptimizeTypeTestingStubs()
uintptr_t uword
Definition globals.h:501
UnorderedHashMap< SmiTraits > IntHashMap
Definition hash_table.h:902
const intptr_t cid
uint32_t FinalizeHash(uint32_t hash, intptr_t hashbits=kBitsPerInt32)
Definition hash.h:20
static ObjectPtr AcceptCompilation(Thread *thread)
static bool HasNoTasks(Heap *heap)
static const char * BoxCidToCString(intptr_t box_cid)
static intptr_t CommonSuffixLength(const char *a, const char *b)
@ ApiError
The Dart error code for an API error.
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot data
Definition switches.h:41
std::function< void()> closure
Definition closure.h:14
SI auto map(std::index_sequence< I... >, Fn &&fn, const Args &... args) -> skvx::Vec< sizeof...(I), decltype(fn(args[0]...))>
Definition SkVx.h:680
const char * ToString(ax::mojom::Event event)
#define Pd
Definition globals.h:408
Dart_KernelCompilationStatus status
Definition dart_api.h:3735
const char * uri
Definition dart_api.h:3793

◆ Z

#define Z   zone_

Definition at line 63 of file isolate_reload.cc.