Flutter Engine
The Flutter Engine
isolate_reload.cc
Go to the documentation of this file.
1// Copyright (c) 2016, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
5#include "vm/isolate_reload.h"
6
7#include <memory>
8
9#include "vm/bit_vector.h"
11#include "vm/dart_api_impl.h"
12#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
13#include "vm/hash.h"
14#endif
15#include "vm/hash_table.h"
16#include "vm/heap/become.h"
17#include "vm/heap/safepoint.h"
18#include "vm/isolate.h"
19#include "vm/kernel_isolate.h"
20#include "vm/kernel_loader.h"
21#include "vm/log.h"
22#include "vm/longjump.h"
23#include "vm/object.h"
24#include "vm/object_store.h"
25#include "vm/parser.h"
26#include "vm/runtime_entry.h"
27#include "vm/service_event.h"
28#include "vm/stack_frame.h"
29#include "vm/thread.h"
30#include "vm/timeline.h"
32#include "vm/visitor.h"
33
34namespace dart {
35
36DEFINE_FLAG(int, reload_every, 0, "Reload every N stack overflow checks.");
37DEFINE_FLAG(bool, trace_reload, false, "Trace isolate reloading");
38
39#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
41 trace_reload_verbose,
42 false,
43 "trace isolate reloading verbose");
44DEFINE_FLAG(bool, identity_reload, false, "Enable checks for identity reload.");
45DEFINE_FLAG(bool, reload_every_optimized, true, "Only from optimized code.");
47 reload_every_back_off,
48 false,
49 "Double the --reload-every value after each reload.");
51 reload_force_rollback,
52 false,
53 "Force all reloads to fail and rollback.");
55 check_reloaded,
56 false,
57 "Assert that an isolate has reloaded at least once.")
58DEFINE_FLAG(bool, gc_during_reload, false, "Cause explicit GC during reload.");
59
60DECLARE_FLAG(bool, trace_deoptimization);
61
62#define IG (isolate_group())
63#define Z zone_
64
65#define TIMELINE_SCOPE(name) \
66 TimelineBeginEndScope tbes##name(Thread::Current(), \
67 Timeline::GetIsolateStream(), #name)
68
69// The ObjectLocator is used for collecting instances that
70// needs to be morphed.
72 public:
74 : context_(context), count_(0) {}
75
76 void VisitObject(ObjectPtr obj) override {
77 InstanceMorpher* morpher =
78 context_->instance_morpher_by_cid_.LookupValue(obj->GetClassId());
79 if (morpher != nullptr) {
80 morpher->AddObject(obj);
81 count_++;
82 }
83 }
84
85 // Return the number of located objects for morphing.
86 intptr_t count() { return count_; }
87
88 private:
90 intptr_t count_;
91};
92
93static bool HasNoTasks(Heap* heap) {
94 MonitorLocker ml(heap->old_space()->tasks_lock());
95 return heap->old_space()->tasks() == 0;
96}
97
99 Zone* zone,
100 ClassTable* class_table,
101 const Class& from,
102 const Class& to) {
103 auto mapping = new (zone) FieldMappingArray();
104 auto new_fields_offsets = new (zone) FieldOffsetArray();
105
106 if (from.NumTypeArguments() > 0) {
107 // Add copying of the optional type argument field.
108 intptr_t from_offset = from.host_type_arguments_field_offset();
109 ASSERT(from_offset != Class::kNoTypeArguments);
110 intptr_t to_offset = to.host_type_arguments_field_offset();
111 ASSERT(to_offset != Class::kNoTypeArguments);
112 mapping->Add({from_offset, kIllegalCid});
113 mapping->Add({to_offset, kIllegalCid});
114 }
115
116 // Add copying of the instance fields if matching by name.
117 // Note: currently the type of the fields are ignored.
118 const Array& from_fields = Array::Handle(
120 const Array& to_fields = Array::Handle(to.OffsetToFieldMap());
121 Field& from_field = Field::Handle();
122 Field& to_field = Field::Handle();
123 String& from_name = String::Handle();
124 String& to_name = String::Handle();
125
126 auto ensure_boxed_and_guarded = [&](const Field& field) {
127 field.set_needs_load_guard(true);
128 if (field.is_unboxed()) {
129 to.MarkFieldBoxedDuringReload(class_table, field);
130 }
131 };
132
133 // Scan across all the fields in the new class definition.
134 for (intptr_t i = 0; i < to_fields.Length(); i++) {
135 if (to_fields.At(i) == Field::null()) {
136 continue; // Ignore non-fields.
137 }
138
139 // Grab the field's name.
140 to_field = Field::RawCast(to_fields.At(i));
141 ASSERT(to_field.is_instance());
142 to_name = to_field.name();
143
144 // Did this field not exist in the old class definition?
145 bool new_field = true;
146
147 // Find this field in the old class.
148 for (intptr_t j = 0; j < from_fields.Length(); j++) {
149 if (from_fields.At(j) == Field::null()) {
150 continue; // Ignore non-fields.
151 }
152 from_field = Field::RawCast(from_fields.At(j));
153 ASSERT(from_field.is_instance());
154 from_name = from_field.name();
155 if (from_name.Equals(to_name)) {
156 intptr_t from_box_cid = kIllegalCid;
157 intptr_t to_box_cid = kIllegalCid;
158
159 // Check if either of the fields are unboxed.
160 if ((from_field.is_unboxed() && from_field.type() != to_field.type()) ||
161 (from_field.is_unboxed() != to_field.is_unboxed())) {
162 // For simplicity we just migrate to boxed fields if such
163 // situation occurs.
164 ensure_boxed_and_guarded(to_field);
165 }
166
167 if (from_field.is_unboxed()) {
168 const auto field_cid = from_field.guarded_cid();
169 switch (field_cid) {
170 case kDoubleCid:
171 case kFloat32x4Cid:
172 case kFloat64x2Cid:
173 from_box_cid = field_cid;
174 break;
175 default:
176 from_box_cid = kIntegerCid;
177 break;
178 }
179 }
180
181 if (to_field.is_unboxed()) {
182 const auto field_cid = to_field.guarded_cid();
183 switch (field_cid) {
184 case kDoubleCid:
185 case kFloat32x4Cid:
186 case kFloat64x2Cid:
187 to_box_cid = field_cid;
188 break;
189 default:
190 to_box_cid = kIntegerCid;
191 break;
192 }
193 }
194
195 // Field can't become unboxed if it was boxed.
196 ASSERT(from_box_cid != kIllegalCid || to_box_cid == kIllegalCid);
197
198 // Success
199 mapping->Add({from_field.HostOffset(), from_box_cid});
200 mapping->Add({to_field.HostOffset(), to_box_cid});
201
202 // Field did exist in old class definition.
203 new_field = false;
204 break;
205 }
206 }
207
208 if (new_field) {
209 ensure_boxed_and_guarded(to_field);
210 new_fields_offsets->Add(to_field.HostOffset());
211 }
212 }
213
214 ASSERT(from.id() == to.id());
215 return new (zone)
216 InstanceMorpher(zone, to.id(), from, to, mapping, new_fields_offsets);
217}
218
221 const Class& old_class,
222 const Class& new_class,
223 FieldMappingArray* mapping,
224 FieldOffsetArray* new_fields_offsets)
225 : zone_(zone),
226 cid_(cid),
227 old_class_(Class::Handle(zone, old_class.ptr())),
228 new_class_(Class::Handle(zone, new_class.ptr())),
229 mapping_(mapping),
230 new_fields_offsets_(new_fields_offsets),
231 before_(zone, 16) {}
232
234 ASSERT(object->GetClassId() == cid_);
235 const Instance& instance = Instance::Cast(Object::Handle(Z, object));
236 before_.Add(&instance);
237}
238
240 Instance& after = Instance::Handle(Z);
242 for (intptr_t i = 0; i < before_.length(); i++) {
243 const Instance& before = *before_.At(i);
244
245 // Code can reference constants / canonical objects either directly in the
246 // instruction stream (ia32) or via an object pool.
247 //
248 // We have the following invariants:
249 //
250 // a) Those canonical objects don't change state (i.e. are not mutable):
251 // our optimizer can e.g. execute loads of such constants at
252 // compile-time.
253 //
254 // => We ensure that const-classes with live constants cannot be
255 // reloaded to become non-const classes (see Class::CheckReload).
256 //
257 // b) Those canonical objects live in old space: e.g. on ia32 the
258 // scavenger does not make the RX pages writable and therefore cannot
259 // update pointers embedded in the instruction stream.
260 //
261 // In order to maintain these invariants we ensure to always morph canonical
262 // objects to old space.
263 const bool is_canonical = before.IsCanonical();
264 const Heap::Space space = is_canonical ? Heap::kOld : Heap::kNew;
265 after = Instance::NewAlreadyFinalized(new_class_, space);
266
267 // We preserve the canonical bit of the object, since this object is present
268 // in the class's constants.
269 if (is_canonical) {
270 after.SetCanonical();
271 }
272#if defined(HASH_IN_OBJECT_HEADER)
273 const uint32_t hash = Object::GetCachedHash(before.ptr());
274 Object::SetCachedHashIfNotSet(after.ptr(), hash);
275#endif
276
277 // Morph the context from [before] to [after] using mapping_.
278 for (intptr_t i = 0; i < mapping_->length(); i += 2) {
279 const auto& from = mapping_->At(i);
280 const auto& to = mapping_->At(i + 1);
281 ASSERT(from.offset > 0);
282 ASSERT(to.offset > 0);
283 if (from.box_cid == kIllegalCid) {
284 // Boxed to boxed field migration.
285 ASSERT(to.box_cid == kIllegalCid);
286 // No handle: raw_value might be a ForwardingCorpse for an object
287 // processed earlier in instance morphing
288 ObjectPtr raw_value = before.RawGetFieldAtOffset(from.offset);
289 after.RawSetFieldAtOffset(to.offset, raw_value);
290 } else if (to.box_cid == kIllegalCid) {
291 // Unboxed to boxed field migration.
292 switch (from.box_cid) {
293 case kDoubleCid: {
294 const auto unboxed_value =
295 before.RawGetUnboxedFieldAtOffset<double>(from.offset);
296 value = Double::New(unboxed_value);
297 break;
298 }
299 case kFloat32x4Cid: {
300 const auto unboxed_value =
301 before.RawGetUnboxedFieldAtOffset<simd128_value_t>(from.offset);
302 value = Float32x4::New(unboxed_value);
303 break;
304 }
305 case kFloat64x2Cid: {
306 const auto unboxed_value =
307 before.RawGetUnboxedFieldAtOffset<simd128_value_t>(from.offset);
308 value = Float64x2::New(unboxed_value);
309 break;
310 }
311 case kIntegerCid: {
312 const auto unboxed_value =
313 before.RawGetUnboxedFieldAtOffset<int64_t>(from.offset);
314 value = Integer::New(unboxed_value);
315 break;
316 }
317 }
318 if (is_canonical) {
319 value = Instance::Cast(value).Canonicalize(Thread::Current());
320 }
321 after.RawSetFieldAtOffset(to.offset, value);
322 } else {
323 // Unboxed to unboxed field migration.
324 ASSERT(to.box_cid == from.box_cid);
325 switch (from.box_cid) {
326 case kDoubleCid: {
327 const auto unboxed_value =
328 before.RawGetUnboxedFieldAtOffset<double>(from.offset);
329 after.RawSetUnboxedFieldAtOffset<double>(to.offset, unboxed_value);
330 break;
331 }
332 case kFloat32x4Cid:
333 case kFloat64x2Cid: {
334 const auto unboxed_value =
335 before.RawGetUnboxedFieldAtOffset<simd128_value_t>(from.offset);
336 after.RawSetUnboxedFieldAtOffset<simd128_value_t>(to.offset,
337 unboxed_value);
338 break;
339 }
340 case kIntegerCid: {
341 const auto unboxed_value =
342 before.RawGetUnboxedFieldAtOffset<int64_t>(from.offset);
343 after.RawSetUnboxedFieldAtOffset<int64_t>(to.offset, unboxed_value);
344 break;
345 }
346 }
347 }
348 }
349
350 for (intptr_t i = 0; i < new_fields_offsets_->length(); i++) {
351 const auto& field_offset = new_fields_offsets_->At(i);
352 after.RawSetFieldAtOffset(field_offset, Object::sentinel());
353 }
354
355 // Convert the old instance into a filler object. We will switch to the
356 // new class table before the next heap walk, so there must be no
357 // instances of any class with the old size.
359
360 become->Add(before, after);
361 }
362}
363
364static const char* BoxCidToCString(intptr_t box_cid) {
365 switch (box_cid) {
366 case kDoubleCid:
367 return "double";
368 case kFloat32x4Cid:
369 return "float32x4";
370 case kFloat64x2Cid:
371 return "float64x2";
372 case kIntegerCid:
373 return "int64";
374 }
375 return "?";
376}
377
379 LogBlock blocker;
380 THR_Print("Morphing objects with cid: %d via this mapping: ", cid_);
381 for (int i = 0; i < mapping_->length(); i += 2) {
382 const auto& from = mapping_->At(i);
383 const auto& to = mapping_->At(i + 1);
384 THR_Print(" %" Pd "->%" Pd "", from.offset, to.offset);
385 THR_Print(" (%" Pd " -> %" Pd ")", from.box_cid, to.box_cid);
386 if (to.box_cid == kIllegalCid && from.box_cid != kIllegalCid) {
387 THR_Print("[box %s]", BoxCidToCString(from.box_cid));
388 } else if (to.box_cid != kIllegalCid) {
389 THR_Print("[%s]", BoxCidToCString(from.box_cid));
390 }
391 }
392 THR_Print("\n");
393}
394
396 JSONObject jsobj(array);
397 jsobj.AddProperty("type", "ShapeChangeMapping");
398 jsobj.AddProperty64("class-id", cid_);
399 jsobj.AddProperty("instanceCount", before_.length());
400 JSONArray map(&jsobj, "fieldOffsetMappings");
401 for (int i = 0; i < mapping_->length(); i += 2) {
402 const auto& from = mapping_->At(i);
403 const auto& to = mapping_->At(i + 1);
404
405 JSONArray pair(&map);
406 pair.AddValue(from.offset);
407 pair.AddValue(to.offset);
408 if (to.box_cid == kIllegalCid && from.box_cid != kIllegalCid) {
409 pair.AddValueF("box %s", BoxCidToCString(from.box_cid));
410 } else if (to.box_cid != kIllegalCid) {
411 pair.AddValueF("%s", BoxCidToCString(from.box_cid));
412 }
413 }
414}
415
417 const Error& error = Error::Handle(ToError());
418 context->ReportError(error);
419}
420
422 // By default create the error returned from ToString.
424 return LanguageError::New(message);
425}
426
428 UNREACHABLE();
429 return nullptr;
430}
431
433 JSONObject jsobj(array);
434 jsobj.AddProperty("type", "ReasonForCancelling");
436 jsobj.AddProperty("message", message.ToCString());
437}
438
440 const Class& from,
441 const Class& to)
442 : ReasonForCancelling(zone),
443 from_(Class::ZoneHandle(zone, from.ptr())),
444 to_(Class::ZoneHandle(zone, to.ptr())) {}
445
447 JSONObject jsobj(array);
448 jsobj.AddProperty("type", "ReasonForCancelling");
449 jsobj.AddProperty("class", from_);
451 jsobj.AddProperty("message", message.ToCString());
452}
453
455 ASSERT(!reasons_to_cancel_reload_.is_empty());
456 // Report the first error to the surroundings.
457 return reasons_to_cancel_reload_.At(0)->ToError();
458}
459
461 public:
462 static bool ReportStats() { return false; }
463 static const char* Name() { return "ScriptUrlSetTraits"; }
464
465 static bool IsMatch(const Object& a, const Object& b) {
466 if (!a.IsString() || !b.IsString()) {
467 return false;
468 }
469
470 return String::Cast(a).Equals(String::Cast(b));
471 }
472
473 static uword Hash(const Object& obj) { return String::Cast(obj).Hash(); }
474};
475
477 public:
478 static bool ReportStats() { return false; }
479 static const char* Name() { return "ClassMapTraits"; }
480
481 static bool IsMatch(const Object& a, const Object& b) {
482 if (!a.IsClass() || !b.IsClass()) {
483 return false;
484 }
485 return ProgramReloadContext::IsSameClass(Class::Cast(a), Class::Cast(b));
486 }
487
488 static uword Hash(const Object& obj) {
489 uword class_name_hash = String::HashRawSymbol(Class::Cast(obj).Name());
490 LibraryPtr raw_library = Class::Cast(obj).library();
491 if (raw_library == Library::null()) {
492 return class_name_hash;
493 }
494 return FinalizeHash(
495 CombineHashes(class_name_hash,
496 String::Hash(Library::Handle(raw_library).private_key())),
497 /* hashbits= */ 30);
498 }
499};
500
502 public:
503 static bool ReportStats() { return false; }
504 static const char* Name() { return "LibraryMapTraits"; }
505
506 static bool IsMatch(const Object& a, const Object& b) {
507 if (!a.IsLibrary() || !b.IsLibrary()) {
508 return false;
509 }
510 return ProgramReloadContext::IsSameLibrary(Library::Cast(a),
511 Library::Cast(b));
512 }
513
514 static uword Hash(const Object& obj) { return Library::Cast(obj).UrlHash(); }
515};
516
518 // TODO(turnidge): We need to look at generic type arguments for
519 // synthetic mixin classes. Their names are not necessarily unique
520 // currently.
521 const String& a_name = String::Handle(a.Name());
522 const String& b_name = String::Handle(b.Name());
523
524 if (!a_name.Equals(b_name)) {
525 return false;
526 }
527
528 const Library& a_lib = Library::Handle(a.library());
529 const Library& b_lib = Library::Handle(b.library());
530
531 if (a_lib.IsNull() || b_lib.IsNull()) {
532 return a_lib.ptr() == b_lib.ptr();
533 }
534 return (a_lib.private_key() == b_lib.private_key());
535}
536
538 const Library& b_lib) {
539 const String& a_lib_url =
540 String::Handle(a_lib.IsNull() ? String::null() : a_lib.url());
541 const String& b_lib_url =
542 String::Handle(b_lib.IsNull() ? String::null() : b_lib.url());
543 return a_lib_url.Equals(b_lib_url);
544}
545
547 IsolateGroup* isolate_group,
548 ClassTable* class_table,
549 JSONStream* js)
550 : zone_(Thread::Current()->zone()),
551 isolate_group_(isolate_group),
552 class_table_(class_table),
553 start_time_micros_(OS::GetCurrentMonotonicMicros()),
554 reload_timestamp_(OS::GetCurrentTimeMillis()),
555 js_(js),
556 instance_morphers_(zone_, 0),
557 reasons_to_cancel_reload_(zone_, 0),
558 instance_morpher_by_cid_(zone_),
559 root_lib_url_(String::Handle(Z, String::null())),
560 root_url_prefix_(String::null()),
561 old_root_url_prefix_(String::null()) {}
563
565 std::shared_ptr<IsolateGroupReloadContext> group_reload_context,
566 IsolateGroup* isolate_group)
567 : zone_(Thread::Current()->zone()),
568 group_reload_context_(group_reload_context),
569 isolate_group_(isolate_group),
570 old_classes_set_storage_(Array::null()),
571 class_map_storage_(Array::null()),
572 removed_class_set_storage_(Array::null()),
573 old_libraries_set_storage_(Array::null()),
574 library_map_storage_(Array::null()),
575 saved_root_library_(Library::null()),
576 saved_libraries_(GrowableObjectArray::null()) {
577 // NOTE: DO NOT ALLOCATE ANY RAW OBJECTS HERE. The ProgramReloadContext is not
578 // associated with the isolate yet and if a GC is triggered here the raw
579 // objects will not be properly accounted for.
580 ASSERT(zone_ != nullptr);
581}
582
584 ASSERT(zone_ == Thread::Current()->zone());
585 ASSERT(IG->class_table() == IG->heap_walk_class_table());
586}
587
588void IsolateGroupReloadContext::ReportError(const Error& error) {
591 return;
592 }
593 TIR_Print("ISO-RELOAD: Error: %s\n", error.ToErrorCString());
594 ServiceEvent service_event(isolate_group, ServiceEvent::kIsolateReload);
595 service_event.set_reload_error(&error);
596 Service::HandleEvent(&service_event);
597}
598
599void IsolateGroupReloadContext::ReportSuccess() {
602 return;
603 }
604 ServiceEvent service_event(isolate_group, ServiceEvent::kIsolateReload);
605 Service::HandleEvent(&service_event);
606}
607
609 public:
610 Aborted(Zone* zone, const Error& error)
611 : ReasonForCancelling(zone),
612 error_(Error::ZoneHandle(zone, error.ptr())) {}
613
614 private:
615 const Error& error_;
616
617 ErrorPtr ToError() { return error_.ptr(); }
618 StringPtr ToString() {
619 return String::NewFormatted("%s", error_.ToErrorCString());
620 }
621};
622
623static intptr_t CommonSuffixLength(const char* a, const char* b) {
624 const intptr_t a_length = strlen(a);
625 const intptr_t b_length = strlen(b);
626 intptr_t a_cursor = a_length;
627 intptr_t b_cursor = b_length;
628
629 while ((a_cursor >= 0) && (b_cursor >= 0)) {
630 if (a[a_cursor] != b[b_cursor]) {
631 break;
632 }
633 a_cursor--;
634 b_cursor--;
635 }
636
637 ASSERT((a_length - a_cursor) == (b_length - b_cursor));
638 return (a_length - a_cursor);
639}
640
642 TransitionVMToNative transition(thread);
646 FATAL(
647 "An error occurred while accepting the most recent"
648 " compilation results: %s",
649 result.error);
650 }
651 TIR_Print(
652 "An error occurred while accepting the most recent"
653 " compilation results: %s",
654 result.error);
655 Zone* zone = thread->zone();
656 const auto& error_str = String::Handle(zone, String::New(result.error));
657 free(result.error);
658 return ApiError::New(error_str);
659 }
660 return Object::null();
661}
662
664 TransitionVMToNative transition(thread);
668 FATAL(
669 "An error occurred while rejecting the most recent"
670 " compilation results: %s",
671 result.error);
672 }
673 TIR_Print(
674 "An error occurred while rejecting the most recent"
675 " compilation results: %s",
676 result.error);
677 Zone* zone = thread->zone();
678 const auto& error_str = String::Handle(zone, String::New(result.error));
679 free(result.error);
680 return ApiError::New(error_str);
681 }
682 return Object::null();
683}
684
685// If [root_script_url] is null, attempt to load from [kernel_buffer].
687 const char* root_script_url,
688 const char* packages_url,
689 const uint8_t* kernel_buffer,
690 intptr_t kernel_buffer_size) {
692
693 Thread* thread = Thread::Current();
694 ASSERT(thread->OwnsReloadSafepoint());
695
696 Heap* heap = IG->heap();
697 num_old_libs_ =
698 GrowableObjectArray::Handle(Z, IG->object_store()->libraries()).Length();
699
700 // Grab root library before calling CheckpointBeforeReload.
701 GetRootLibUrl(root_script_url);
702
703 std::unique_ptr<kernel::Program> kernel_program;
704
705 // Reset stats.
706 num_received_libs_ = 0;
707 bytes_received_libs_ = 0;
708 num_received_classes_ = 0;
709 num_received_procedures_ = 0;
710
711 bool did_kernel_compilation = false;
712 bool skip_reload = false;
713 {
714 // Load the kernel program and figure out the modified libraries.
715 intptr_t* p_num_received_classes = nullptr;
716 intptr_t* p_num_received_procedures = nullptr;
717
718 // ReadKernelFromFile checks to see if the file at
719 // root_script_url is a valid .dill file. If that's the case, a Program*
720 // is returned. Otherwise, this is likely a source file that needs to be
721 // compiled, so ReadKernelFromFile returns nullptr.
722 kernel_program = kernel::Program::ReadFromFile(root_script_url);
723 if (kernel_program != nullptr) {
724 num_received_libs_ = kernel_program->library_count();
725 bytes_received_libs_ = kernel_program->binary().LengthInBytes();
726 p_num_received_classes = &num_received_classes_;
727 p_num_received_procedures = &num_received_procedures_;
728 } else {
729 if (kernel_buffer == nullptr || kernel_buffer_size == 0) {
730 char* error = CompileToKernel(force_reload, packages_url,
731 &kernel_buffer, &kernel_buffer_size);
732 did_kernel_compilation = true;
733 if (error != nullptr) {
734 TIR_Print("---- LOAD FAILED, ABORTING RELOAD\n");
735 const auto& error_str = String::Handle(Z, String::New(error));
736 free(error);
737 const ApiError& error = ApiError::Handle(Z, ApiError::New(error_str));
738 AddReasonForCancelling(new Aborted(Z, error));
739 ReportReasonsForCancelling();
740 CommonFinalizeTail(num_old_libs_);
741
742 RejectCompilation(thread);
743 return false;
744 }
745 }
746 const auto& typed_data = ExternalTypedData::Handle(
748 const_cast<uint8_t*>(kernel_buffer), kernel_buffer_size));
749 kernel_program = kernel::Program::ReadFromTypedData(typed_data);
750 }
751
752 NoActiveIsolateScope no_active_isolate_scope;
753
755 source->add_loaded_blob(Z,
756 ExternalTypedData::Cast(kernel_program->binary()));
757
758 modified_libs_ = new (Z) BitVector(Z, num_old_libs_);
760 kernel_program.get(), IG, modified_libs_, force_reload, &skip_reload,
761 p_num_received_classes, p_num_received_procedures);
762 modified_libs_transitive_ = new (Z) BitVector(Z, num_old_libs_);
763 BuildModifiedLibrariesClosure(modified_libs_);
764
765 ASSERT(num_saved_libs_ == -1);
766 num_saved_libs_ = 0;
767 for (intptr_t i = 0; i < modified_libs_->length(); i++) {
768 if (!modified_libs_->Contains(i)) {
769 num_saved_libs_++;
770 }
771 }
772 }
773
774 NoActiveIsolateScope no_active_isolate_scope;
775
776 if (skip_reload) {
777 ASSERT(modified_libs_->IsEmpty());
778 reload_skipped_ = true;
779 ReportOnJSON(js_, num_old_libs_);
780
781 // If we use the CFE and performed a compilation, we need to notify that
782 // we have accepted the compilation to clear some state in the incremental
783 // compiler.
784 if (did_kernel_compilation) {
785 const auto& result = Object::Handle(Z, AcceptCompilation(thread));
786 if (result.IsError()) {
787 const auto& error = Error::Cast(result);
788 AddReasonForCancelling(new Aborted(Z, error));
789 ReportReasonsForCancelling();
790 CommonFinalizeTail(num_old_libs_);
791 return false;
792 }
793 }
794 TIR_Print("---- SKIPPING RELOAD (No libraries were modified)\n");
795 return false;
796 }
797
798 TIR_Print("---- STARTING RELOAD\n");
799
800 intptr_t number_of_isolates = 0;
801 isolate_group_->ForEachIsolate(
802 [&](Isolate* isolate) { number_of_isolates++; });
803
804 // Wait for any concurrent marking tasks to finish and turn off the
805 // concurrent marker during reload as we might be allocating new instances
806 // (constants) when loading the new kernel file and this could cause
807 // inconsistency between the saved class table and the new class table.
808 const bool old_concurrent_mark_flag =
810 if (old_concurrent_mark_flag) {
811 heap->WaitForMarkerTasks(thread);
813 }
814
815 // Ensure all functions on the stack have unoptimized code.
816 // Deoptimize all code that had optimizing decisions that are dependent on
817 // assumptions from field guards or CHA or deferred library prefixes.
818 // TODO(johnmccutchan): Deoptimizing dependent code here (before the reload)
819 // is paranoid. This likely can be moved to the commit phase.
820 IG->program_reload_context()->EnsuredUnoptimizedCodeForStack();
821 IG->program_reload_context()->DeoptimizeDependentCode();
822 IG->program_reload_context()->ReloadPhase1AllocateStorageMapsAndCheckpoint();
823
824 // Renumbering the libraries has invalidated this.
825 modified_libs_ = nullptr;
826 modified_libs_transitive_ = nullptr;
827
828 if (FLAG_gc_during_reload) {
829 // We force the GC to compact, which is more likely to discover untracked
830 // pointers (and other issues, like incorrect class table).
831 heap->CollectAllGarbage(GCReason::kDebugging, /*compact=*/true);
832 }
833
834 // Clone the class table.
835 {
836 TIMELINE_SCOPE(CheckpointClasses);
837 IG->program_reload_context()->CheckpointClasses();
838 }
839
840 if (FLAG_gc_during_reload) {
841 // We force the GC to compact, which is more likely to discover untracked
842 // pointers (and other issues, like incorrect class table).
843 heap->CollectAllGarbage(GCReason::kDebugging, /*compact=*/true);
844 }
845
846 // We synchronously load the hot-reload kernel diff (which includes changed
847 // libraries and any libraries transitively depending on them).
848 //
849 // If loading the hot-reload diff succeeded we'll finalize the loading, which
850 // will either commit or reject the reload request.
851 const auto& result =
852 Object::Handle(Z, IG->program_reload_context()->ReloadPhase2LoadKernel(
853 kernel_program.get(), root_lib_url_));
854
855 if (result.IsError()) {
856 TIR_Print("---- LOAD FAILED, ABORTING RELOAD\n");
857
858 const auto& error = Error::Cast(result);
859 AddReasonForCancelling(new Aborted(Z, error));
860
861 IG->program_reload_context()->ReloadPhase4Rollback();
862 CommonFinalizeTail(num_old_libs_);
863 } else {
864 ASSERT(!reload_skipped_ && !reload_finalized_);
865 TIR_Print("---- LOAD SUCCEEDED\n");
866
867 IG->program_reload_context()->ReloadPhase3FinalizeLoading();
868
869 if (FLAG_gc_during_reload) {
870 // We force the GC to compact, which is more likely to discover untracked
871 // pointers (and other issues, like incorrect class table).
872 heap->CollectAllGarbage(GCReason::kDebugging, /*compact=*/true);
873 }
874
875 // If we use the CFE and performed a compilation, we need to notify that
876 // we have accepted the compilation to clear some state in the incremental
877 // compiler.
878 if (did_kernel_compilation) {
880 const auto& result = Object::Handle(Z, AcceptCompilation(thread));
881 if (result.IsError()) {
882 const auto& error = Error::Cast(result);
883 AddReasonForCancelling(new Aborted(Z, error));
884 }
885 }
886
887 if (!FLAG_reload_force_rollback && !HasReasonsForCancelling()) {
888 TIR_Print("---- COMMITTING RELOAD\n");
889 isolate_group_->program_reload_context()->ReloadPhase4CommitPrepare();
890 bool discard_class_tables = true;
891 if (HasInstanceMorphers()) {
892 // Find all objects that need to be morphed (reallocated to a new
893 // layout).
894 ObjectLocator locator(this);
895 {
896 TIMELINE_SCOPE(CollectInstances);
897 HeapIterationScope iteration(thread);
898 iteration.IterateObjects(&locator);
899 }
900
901 // We are still using the old class table at this point.
902 if (FLAG_gc_during_reload) {
903 // We force the GC to compact, which is more likely to discover
904 // untracked pointers (and other issues, like incorrect class table).
905 heap->CollectAllGarbage(GCReason::kDebugging, /*compact=*/true);
906 }
907 const intptr_t count = locator.count();
908 if (count > 0) {
909 TIMELINE_SCOPE(MorphInstances);
910
911 // While we are reallocating instances to their new layout, the heap
912 // will contain a mix of instances with the old and new layouts that
913 // have the same cid. This makes the heap unwalkable until the
914 // "become" operation below replaces all the instances of the old
915 // layout with forwarding corpses. Force heap growth to prevent layout
916 // confusion during this period.
917 ForceGrowthScope force_growth(thread);
918 // The HeapIterationScope above ensures no other GC tasks can be
919 // active.
920 ASSERT(HasNoTasks(heap));
921
922 MorphInstancesPhase1Allocate(&locator, IG->become());
923 {
924 // Apply the new class table before "become". Become will replace
925 // all the instances of the old layout with forwarding corpses, then
926 // perform a heap walk to fix references to the forwarding corpses.
927 // During this heap walk, it will encounter instances of the new
928 // layout, so it requires the new class table.
929 ASSERT(HasNoTasks(heap));
930
931 // We accepted the hot-reload and morphed instances. So now we can
932 // commit to the changed class table and deleted the saved one.
933 IG->DropOriginalClassTable();
934 }
935 MorphInstancesPhase2Become(IG->become());
936
937 discard_class_tables = false;
938 }
939 // We are using the new class table now.
940 if (FLAG_gc_during_reload) {
941 // We force the GC to compact, which is more likely to discover
942 // untracked pointers (and other issues, like incorrect class table).
943 heap->CollectAllGarbage(GCReason::kDebugging, /*compact=*/true);
944 }
945 }
946 if (FLAG_identity_reload) {
947 if (!discard_class_tables) {
948 TIR_Print("Identity reload failed! Some instances were morphed\n");
949 }
950 if (IG->heap_walk_class_table()->NumCids() !=
951 IG->class_table()->NumCids()) {
952 TIR_Print("Identity reload failed! B#C=%" Pd " A#C=%" Pd "\n",
953 IG->heap_walk_class_table()->NumCids(),
954 IG->class_table()->NumCids());
955 }
956 if (IG->heap_walk_class_table()->NumTopLevelCids() !=
957 IG->class_table()->NumTopLevelCids()) {
958 TIR_Print("Identity reload failed! B#TLC=%" Pd " A#TLC=%" Pd "\n",
959 IG->heap_walk_class_table()->NumTopLevelCids(),
960 IG->class_table()->NumTopLevelCids());
961 }
962 }
963 if (discard_class_tables) {
964 IG->DropOriginalClassTable();
965 }
966 isolate_group_->program_reload_context()->ReloadPhase4CommitFinish();
967 TIR_Print("---- DONE COMMIT\n");
968 isolate_group_->set_last_reload_timestamp(reload_timestamp_);
969 } else {
970 TIR_Print("---- ROLLING BACK");
971 isolate_group_->program_reload_context()->ReloadPhase4Rollback();
972 }
973
974 // ValidateReload mutates the direct subclass information and does
975 // not remove dead subclasses. Rebuild the direct subclass
976 // information from scratch.
977 {
978 SafepointWriteRwLocker ml(thread, IG->program_lock());
979 IG->program_reload_context()->RebuildDirectSubclasses();
980 }
981 const intptr_t final_library_count =
982 GrowableObjectArray::Handle(Z, IG->object_store()->libraries())
983 .Length();
984 CommonFinalizeTail(final_library_count);
985 }
986
987 // Reenable concurrent marking if it was initially on.
988 if (old_concurrent_mark_flag) {
990 }
991
992 bool success;
993 if (!result.IsError() || HasReasonsForCancelling()) {
994 ReportSuccess();
995 success = true;
996 } else {
997 ReportReasonsForCancelling();
998 success = false;
999 }
1000
1001 Array& null_array = Array::Handle(Z);
1002 // Invalidate the URI mapping caches.
1003 IG->object_store()->set_uri_to_resolved_uri_map(null_array);
1004 IG->object_store()->set_resolved_uri_to_uri_map(null_array);
1005
1006 // Re-queue any shutdown requests so they can inform each isolate's own thread
1007 // to shut down.
1008 if (result.IsUnwindError()) {
1009 const auto& error = UnwindError::Cast(result);
1010 ForEachIsolate([&](Isolate* isolate) {
1011 Isolate::KillIfExists(isolate, error.is_user_initiated()
1014 });
1015 }
1016
1017 return success;
1018}
1019
1020/// Copied in from https://dart-review.googlesource.com/c/sdk/+/77722.
1023 intptr_t lib_index,
1024 BitVector* modified_libs) {
1025 ZoneGrowableArray<intptr_t>* dep_libs = (*imported_by)[lib_index];
1026 for (intptr_t i = 0; i < dep_libs->length(); i++) {
1027 intptr_t dep_lib_index = (*dep_libs)[i];
1028 if (!modified_libs->Contains(dep_lib_index)) {
1029 modified_libs->Add(dep_lib_index);
1030 PropagateLibraryModified(imported_by, dep_lib_index, modified_libs);
1031 }
1032 }
1033}
1034
1035/// Copied in from https://dart-review.googlesource.com/c/sdk/+/77722.
1036void IsolateGroupReloadContext::BuildModifiedLibrariesClosure(
1037 BitVector* modified_libs) {
1038 const GrowableObjectArray& libs =
1039 GrowableObjectArray::Handle(IG->object_store()->libraries());
1040 Library& lib = Library::Handle();
1041 intptr_t num_libs = libs.Length();
1042
1043 // Construct the imported-by graph.
1044 ZoneGrowableArray<ZoneGrowableArray<intptr_t>*>* imported_by = new (zone_)
1045 ZoneGrowableArray<ZoneGrowableArray<intptr_t>*>(zone_, num_libs);
1046 imported_by->SetLength(num_libs);
1047 for (intptr_t i = 0; i < num_libs; i++) {
1048 (*imported_by)[i] = new (zone_) ZoneGrowableArray<intptr_t>(zone_, 0);
1049 }
1050 Array& ports = Array::Handle();
1051 Namespace& ns = Namespace::Handle();
1053 String& target_url = String::Handle();
1054
1055 for (intptr_t lib_idx = 0; lib_idx < num_libs; lib_idx++) {
1056 lib ^= libs.At(lib_idx);
1057 ASSERT(lib_idx == lib.index());
1058 if (lib.is_dart_scheme()) {
1059 // We don't care about imports among dart scheme libraries.
1060 continue;
1061 }
1062
1063 // Add imports to the import-by graph.
1064 ports = lib.imports();
1065 for (intptr_t import_idx = 0; import_idx < ports.Length(); import_idx++) {
1066 ns ^= ports.At(import_idx);
1067 if (!ns.IsNull()) {
1068 target = ns.target();
1069 target_url = target.url();
1070 (*imported_by)[target.index()]->Add(lib.index());
1071 }
1072 }
1073
1074 // Add exports to the import-by graph.
1075 ports = lib.exports();
1076 for (intptr_t export_idx = 0; export_idx < ports.Length(); export_idx++) {
1077 ns ^= ports.At(export_idx);
1078 if (!ns.IsNull()) {
1079 target = ns.target();
1080 (*imported_by)[target.index()]->Add(lib.index());
1081 }
1082 }
1083
1084 // Add prefixed imports to the import-by graph.
1085 DictionaryIterator entries(lib);
1086 Object& entry = Object::Handle();
1087 LibraryPrefix& prefix = LibraryPrefix::Handle();
1088 while (entries.HasNext()) {
1089 entry = entries.GetNext();
1090 if (entry.IsLibraryPrefix()) {
1091 prefix ^= entry.ptr();
1092 ports = prefix.imports();
1093 for (intptr_t import_idx = 0; import_idx < ports.Length();
1094 import_idx++) {
1095 ns ^= ports.At(import_idx);
1096 if (!ns.IsNull()) {
1097 target = ns.target();
1098 (*imported_by)[target.index()]->Add(lib.index());
1099 }
1100 }
1101 }
1102 }
1103 }
1104
1105 for (intptr_t lib_idx = 0; lib_idx < num_libs; lib_idx++) {
1106 lib ^= libs.At(lib_idx);
1107 if (lib.is_dart_scheme() || modified_libs_transitive_->Contains(lib_idx)) {
1108 // We don't consider dart scheme libraries during reload. If
1109 // the modified libs set already contains this library, then we
1110 // have already visited it.
1111 continue;
1112 }
1113 if (modified_libs->Contains(lib_idx)) {
1114 modified_libs_transitive_->Add(lib_idx);
1115 PropagateLibraryModified(imported_by, lib_idx, modified_libs_transitive_);
1116 }
1117 }
1118}
1119
1120void IsolateGroupReloadContext::GetRootLibUrl(const char* root_script_url) {
1121 const auto& old_root_lib =
1122 Library::Handle(IG->object_store()->root_library());
1123 ASSERT(!old_root_lib.IsNull());
1124 const auto& old_root_lib_url = String::Handle(old_root_lib.url());
1125
1126 // Root library url.
1127 if (root_script_url != nullptr) {
1128 root_lib_url_ = String::New(root_script_url);
1129 } else {
1130 root_lib_url_ = old_root_lib_url.ptr();
1131 }
1132
1133 // Check to see if the base url of the loaded libraries has moved.
1134 if (!old_root_lib_url.Equals(root_lib_url_)) {
1135 const char* old_root_library_url_c = old_root_lib_url.ToCString();
1136 const char* root_library_url_c = root_lib_url_.ToCString();
1137 const intptr_t common_suffix_length =
1138 CommonSuffixLength(root_library_url_c, old_root_library_url_c);
1139 root_url_prefix_ = String::SubString(
1140 root_lib_url_, 0, root_lib_url_.Length() - common_suffix_length + 1);
1141 old_root_url_prefix_ =
1142 String::SubString(old_root_lib_url, 0,
1143 old_root_lib_url.Length() - common_suffix_length + 1);
1144 }
1145}
1146
1147char* IsolateGroupReloadContext::CompileToKernel(bool force_reload,
1148 const char* packages_url,
1149 const uint8_t** kernel_buffer,
1150 intptr_t* kernel_buffer_size) {
1151 Dart_SourceFile* modified_scripts = nullptr;
1152 intptr_t modified_scripts_count = 0;
1153 FindModifiedSources(force_reload, &modified_scripts, &modified_scripts_count,
1154 packages_url);
1155
1156 Dart_KernelCompilationResult retval = {};
1157 {
1158 const char* root_lib_url = root_lib_url_.ToCString();
1159 TransitionVMToNative transition(Thread::Current());
1161 root_lib_url, nullptr, 0, modified_scripts_count, modified_scripts,
1162 /*incremental_compile=*/true,
1163 /*snapshot_compile=*/false,
1164 /*embed_sources=*/true,
1165 /*package_config=*/nullptr,
1166 /*multiroot_filepaths=*/nullptr,
1167 /*multiroot_scheme=*/nullptr);
1168 }
1170 if (retval.kernel != nullptr) {
1171 free(retval.kernel);
1172 }
1173 return retval.error;
1174 }
1175 *kernel_buffer = retval.kernel;
1176 *kernel_buffer_size = retval.kernel_size;
1177 return nullptr;
1178}
1179
1180void ProgramReloadContext::ReloadPhase1AllocateStorageMapsAndCheckpoint() {
1181 // Preallocate storage for maps.
1182 old_classes_set_storage_ =
1183 HashTables::New<UnorderedHashSet<ClassMapTraits> >(4);
1184 class_map_storage_ = HashTables::New<UnorderedHashMap<ClassMapTraits> >(4);
1185 removed_class_set_storage_ =
1186 HashTables::New<UnorderedHashSet<ClassMapTraits> >(4);
1187 old_libraries_set_storage_ =
1188 HashTables::New<UnorderedHashSet<LibraryMapTraits> >(4);
1189 library_map_storage_ =
1190 HashTables::New<UnorderedHashMap<LibraryMapTraits> >(4);
1191
1192 // While reloading everything we do must be reversible so that we can abort
1193 // safely if the reload fails. This function stashes things to the side and
1194 // prepares the isolate for the reload attempt.
1195 {
1196 TIMELINE_SCOPE(Checkpoint);
1197 CheckpointLibraries();
1198 }
1199}
1200
1201ObjectPtr ProgramReloadContext::ReloadPhase2LoadKernel(
1202 kernel::Program* program,
1203 const String& root_lib_url) {
1204 Thread* thread = Thread::Current();
1205
1206 LongJumpScope jump;
1207 if (setjmp(*jump.Set()) == 0) {
1208 const Object& tmp = kernel::KernelLoader::LoadEntireProgram(program);
1209 if (tmp.IsError()) {
1210 return tmp.ptr();
1211 }
1212
1213 // If main method disappeared or were not there to begin with then
1214 // KernelLoader will return null. In this case lookup library by
1215 // URL.
1216 auto& lib = Library::Handle(Library::RawCast(tmp.ptr()));
1217 if (lib.IsNull()) {
1218 lib = Library::LookupLibrary(thread, root_lib_url);
1219 }
1220 IG->object_store()->set_root_library(lib);
1221 return Object::null();
1222 } else {
1223 return thread->StealStickyError();
1224 }
1225}
1226
1227void ProgramReloadContext::ReloadPhase3FinalizeLoading() {
1228 BuildLibraryMapping();
1229 BuildRemovedClassesSet();
1230 ValidateReload();
1231}
1232
1233void ProgramReloadContext::ReloadPhase4CommitPrepare() {
1234 CommitBeforeInstanceMorphing();
1235}
1236
1237void ProgramReloadContext::ReloadPhase4CommitFinish() {
1238 CommitAfterInstanceMorphing();
1239 PostCommit();
1240}
1241
1242void ProgramReloadContext::ReloadPhase4Rollback() {
1243 IG->RestoreOriginalClassTable();
1244 RollbackLibraries();
1245}
1246
1247void ProgramReloadContext::RegisterClass(const Class& new_cls) {
1248 const Class& old_cls = Class::Handle(OldClassOrNull(new_cls));
1249 if (old_cls.IsNull()) {
1250 if (new_cls.IsTopLevel()) {
1251 IG->class_table()->RegisterTopLevel(new_cls);
1252 } else {
1253 IG->class_table()->Register(new_cls);
1254 }
1255
1256 if (FLAG_identity_reload) {
1257 TIR_Print("Could not find replacement class for %s\n",
1258 new_cls.ToCString());
1259 UNREACHABLE();
1260 }
1261
1262 // New class maps to itself.
1263 AddClassMapping(new_cls, new_cls);
1264 return;
1265 }
1266 VTIR_Print("Registering class: %s\n", new_cls.ToCString());
1267 new_cls.set_id(old_cls.id());
1268 IG->class_table()->SetAt(old_cls.id(), new_cls.ptr());
1269 new_cls.CopyCanonicalConstants(old_cls);
1270 new_cls.CopyDeclarationType(old_cls);
1271 AddBecomeMapping(old_cls, new_cls);
1272 AddClassMapping(new_cls, old_cls);
1273}
1274
1275void IsolateGroupReloadContext::CommonFinalizeTail(
1276 intptr_t final_library_count) {
1277 RELEASE_ASSERT(!reload_finalized_);
1278 ReportOnJSON(js_, final_library_count);
1279 reload_finalized_ = true;
1280}
1281
1282void IsolateGroupReloadContext::ReportOnJSON(JSONStream* stream,
1283 intptr_t final_library_count) {
1284 JSONObject jsobj(stream);
1285 jsobj.AddProperty("type", "ReloadReport");
1286 jsobj.AddProperty("success", reload_skipped_ || !HasReasonsForCancelling());
1287 {
1288 if (HasReasonsForCancelling()) {
1289 // Reload was rejected.
1290 JSONArray array(&jsobj, "notices");
1291 for (intptr_t i = 0; i < reasons_to_cancel_reload_.length(); i++) {
1292 ReasonForCancelling* reason = reasons_to_cancel_reload_.At(i);
1293 reason->AppendTo(&array);
1294 }
1295 return;
1296 }
1297
1298 JSONObject details(&jsobj, "details");
1299 details.AddProperty("finalLibraryCount", final_library_count);
1300 details.AddProperty("receivedLibraryCount", num_received_libs_);
1301 details.AddProperty("receivedLibrariesBytes", bytes_received_libs_);
1302 details.AddProperty("receivedClassesCount", num_received_classes_);
1303 details.AddProperty("receivedProceduresCount", num_received_procedures_);
1304 if (reload_skipped_) {
1305 // Reload was skipped.
1306 details.AddProperty("savedLibraryCount", final_library_count);
1307 details.AddProperty("loadedLibraryCount", static_cast<intptr_t>(0));
1308 } else {
1309 // Reload was successful.
1310 const intptr_t loaded_library_count =
1311 final_library_count - num_saved_libs_;
1312 details.AddProperty("savedLibraryCount", num_saved_libs_);
1313 details.AddProperty("loadedLibraryCount", loaded_library_count);
1314 JSONArray array(&jsobj, "shapeChangeMappings");
1315 for (intptr_t i = 0; i < instance_morphers_.length(); i++) {
1316 instance_morphers_.At(i)->AppendTo(&array);
1317 }
1318 }
1319 }
1320}
1321
1322void ProgramReloadContext::EnsuredUnoptimizedCodeForStack() {
1323 TIMELINE_SCOPE(EnsuredUnoptimizedCodeForStack);
1324
1325 IG->ForEachIsolate([](Isolate* isolate) {
1326 auto thread = isolate->mutator_thread();
1327 if (thread == nullptr) {
1328 return;
1329 }
1330 StackFrameIterator it(ValidationPolicy::kDontValidateFrames, thread,
1332
1333 Function& func = Function::Handle();
1334 while (it.HasNextFrame()) {
1335 StackFrame* frame = it.NextFrame();
1336 if (frame->IsDartFrame()) {
1337 func = frame->LookupDartFunction();
1338 ASSERT(!func.IsNull());
1339 // Force-optimized functions don't need unoptimized code because their
1340 // optimized code cannot deopt.
1341 if (!func.ForceOptimize()) {
1342 func.EnsureHasCompiledUnoptimizedCode();
1343 }
1344 }
1345 }
1346 });
1347}
1348
1349void ProgramReloadContext::DeoptimizeDependentCode() {
1350 TIMELINE_SCOPE(DeoptimizeDependentCode);
1351 ClassTable* class_table = IG->class_table();
1352
1353 const intptr_t bottom = Dart::vm_isolate_group()->class_table()->NumCids();
1354 const intptr_t top = IG->class_table()->NumCids();
1355 Class& cls = Class::Handle();
1356 Array& fields = Array::Handle();
1357 Field& field = Field::Handle();
1358 Thread* thread = Thread::Current();
1359 SafepointWriteRwLocker ml(thread, IG->program_lock());
1360 for (intptr_t cls_idx = bottom; cls_idx < top; cls_idx++) {
1361 if (!class_table->HasValidClassAt(cls_idx)) {
1362 // Skip.
1363 continue;
1364 }
1365
1366 // Deoptimize CHA code.
1367 cls = class_table->At(cls_idx);
1368 ASSERT(!cls.IsNull());
1369
1370 cls.DisableAllCHAOptimizedCode();
1371
1372 // Deoptimize field guard code.
1373 fields = cls.fields();
1374 ASSERT(!fields.IsNull());
1375 for (intptr_t field_idx = 0; field_idx < fields.Length(); field_idx++) {
1376 field = Field::RawCast(fields.At(field_idx));
1377 ASSERT(!field.IsNull());
1378 field.DeoptimizeDependentCode();
1379 }
1380 }
1381
1383
1384 // TODO(rmacnak): Also call LibraryPrefix::InvalidateDependentCode.
1385}
1386
1387void ProgramReloadContext::CheckpointClasses() {
1388 TIR_Print("---- CHECKPOINTING CLASSES\n");
1389 // Checkpoint classes before a reload.
1390
1391 // Before this operation class table which is used for heap scanning and
1392 // the class table used for program loading are the same. After this step
1393 // they will become different until reload is committed (or rolled back).
1394 //
1395 // Note that because GC is always reading from heap_walk_class_table and
1396 // we are not changing that, there is no reason to wait for sweeping
1397 // threads or marking to complete.
1398 RELEASE_ASSERT(IG->class_table() == IG->heap_walk_class_table());
1399
1400 IG->CloneClassTableForReload();
1401
1402 // IG->class_table() is now the clone of heap_walk_class_table.
1403 RELEASE_ASSERT(IG->class_table() != IG->heap_walk_class_table());
1404
1405 ClassTable* class_table = IG->class_table();
1406
1407 // For efficiency, we build a set of classes before the reload. This set
1408 // is used to pair new classes with old classes.
1409 // Add classes to the set. Set is stored in the Array, so adding an element
1410 // may allocate Dart object on the heap and trigger GC.
1411 Class& cls = Class::Handle();
1412 UnorderedHashSet<ClassMapTraits> old_classes_set(old_classes_set_storage_);
1413 for (intptr_t i = 0; i < class_table->NumCids(); i++) {
1414 if (class_table->IsValidIndex(i) && class_table->HasValidClassAt(i)) {
1415 if (i != kFreeListElement && i != kForwardingCorpse) {
1416 cls = class_table->At(i);
1417 bool already_present = old_classes_set.Insert(cls);
1418 ASSERT(!already_present);
1419 }
1420 }
1421 }
1422 for (intptr_t i = 0; i < class_table->NumTopLevelCids(); i++) {
1423 const intptr_t cid = ClassTable::CidFromTopLevelIndex(i);
1424 if (class_table->IsValidIndex(cid) && class_table->HasValidClassAt(cid)) {
1425 cls = class_table->At(cid);
1426 bool already_present = old_classes_set.Insert(cls);
1427 ASSERT(!already_present);
1428 }
1429 }
1430 old_classes_set_storage_ = old_classes_set.Release().ptr();
1431 TIR_Print("---- System had %" Pd " classes\n",
1432 class_table->NumCids() + class_table->NumTopLevelCids());
1433}
1434
1435Dart_FileModifiedCallback IsolateGroupReloadContext::file_modified_callback_ =
1436 nullptr;
1437
1438bool IsolateGroupReloadContext::ScriptModifiedSince(const Script& script,
1439 int64_t since) {
1440 if (IsolateGroupReloadContext::file_modified_callback_ == nullptr) {
1441 return true;
1442 }
1443 // We use the resolved url to determine if the script has been modified.
1444 const String& url = String::Handle(script.resolved_url());
1445 const char* url_chars = url.ToCString();
1446 return (*IsolateGroupReloadContext::file_modified_callback_)(url_chars,
1447 since);
1448}
1449
1451 const char* uri) {
1452 for (intptr_t i = 0; i < seen_uris.length(); i++) {
1453 const char* seen_uri = seen_uris.At(i);
1454 size_t seen_len = strlen(seen_uri);
1455 if (seen_len != strlen(uri)) {
1456 continue;
1457 } else if (strncmp(seen_uri, uri, seen_len) == 0) {
1458 return true;
1459 }
1460 }
1461 return false;
1462}
1463
1464void IsolateGroupReloadContext::FindModifiedSources(
1465 bool force_reload,
1466 Dart_SourceFile** modified_sources,
1467 intptr_t* count,
1468 const char* packages_url) {
1469 const int64_t last_reload = isolate_group_->last_reload_timestamp();
1470 GrowableArray<const char*> modified_sources_uris;
1471 const auto& libs =
1472 GrowableObjectArray::Handle(IG->object_store()->libraries());
1473 Library& lib = Library::Handle(Z);
1474 Array& scripts = Array::Handle(Z);
1475 Script& script = Script::Handle(Z);
1476 String& uri = String::Handle(Z);
1477
1478 for (intptr_t lib_idx = 0; lib_idx < libs.Length(); lib_idx++) {
1479 lib ^= libs.At(lib_idx);
1480 if (lib.is_dart_scheme()) {
1481 // We don't consider dart scheme libraries during reload.
1482 continue;
1483 }
1484 scripts = lib.LoadedScripts();
1485 for (intptr_t script_idx = 0; script_idx < scripts.Length(); script_idx++) {
1486 script ^= scripts.At(script_idx);
1487 uri = script.url();
1488 const bool dart_scheme = uri.StartsWith(Symbols::DartScheme());
1489 if (dart_scheme) {
1490 // If a user-defined class mixes in a mixin from dart:*, it's list of
1491 // scripts will have a dart:* script as well. We don't consider those
1492 // during reload.
1493 continue;
1494 }
1495 if (ContainsScriptUri(modified_sources_uris, uri.ToCString())) {
1496 // We've already accounted for this script in a prior library.
1497 continue;
1498 }
1499
1500 if (force_reload || ScriptModifiedSince(script, last_reload)) {
1501 modified_sources_uris.Add(uri.ToCString());
1502 }
1503 }
1504 }
1505
1506 // In addition to all sources, we need to check if the .packages file
1507 // contents have been modified.
1508 if (packages_url != nullptr) {
1509 if (IsolateGroupReloadContext::file_modified_callback_ == nullptr ||
1510 (*IsolateGroupReloadContext::file_modified_callback_)(packages_url,
1511 last_reload)) {
1512 modified_sources_uris.Add(packages_url);
1513 }
1514 }
1515
1516 *count = modified_sources_uris.length();
1517 if (*count == 0) {
1518 return;
1519 }
1520
1521 *modified_sources = Z->Alloc<Dart_SourceFile>(*count);
1522 for (intptr_t i = 0; i < *count; ++i) {
1523 (*modified_sources)[i].uri = modified_sources_uris[i];
1524 (*modified_sources)[i].source = nullptr;
1525 }
1526}
1527
1528void ProgramReloadContext::CheckpointLibraries() {
1529 TIMELINE_SCOPE(CheckpointLibraries);
1530 TIR_Print("---- CHECKPOINTING LIBRARIES\n");
1531 // Save the root library in case we abort the reload.
1532 const Library& root_lib = Library::Handle(object_store()->root_library());
1533 saved_root_library_ = root_lib.ptr();
1534
1535 // Save the old libraries array in case we abort the reload.
1536 const GrowableObjectArray& libs =
1537 GrowableObjectArray::Handle(object_store()->libraries());
1538 saved_libraries_ = libs.ptr();
1539
1540 // Make a filtered copy of the old libraries array. Keep "clean" libraries
1541 // that we will use instead of reloading.
1542 const GrowableObjectArray& new_libs =
1544 Library& lib = Library::Handle();
1545 UnorderedHashSet<LibraryMapTraits> old_libraries_set(
1546 old_libraries_set_storage_);
1547
1548 group_reload_context_->saved_libs_transitive_updated_ = new (Z)
1549 BitVector(Z, group_reload_context_->modified_libs_transitive_->length());
1550 for (intptr_t i = 0; i < libs.Length(); i++) {
1551 lib ^= libs.At(i);
1552 if (group_reload_context_->modified_libs_->Contains(i)) {
1553 // We are going to reload this library. Clear the index.
1554 lib.set_index(-1);
1555 } else {
1556 // We are preserving this library across the reload, assign its new index
1557 lib.set_index(new_libs.Length());
1558 new_libs.Add(lib, Heap::kOld);
1559
1560 if (group_reload_context_->modified_libs_transitive_->Contains(i)) {
1561 // Remember the new index.
1562 group_reload_context_->saved_libs_transitive_updated_->Add(lib.index());
1563 }
1564 }
1565 // Add old library to old libraries set.
1566 bool already_present = old_libraries_set.Insert(lib);
1567 ASSERT(!already_present);
1568
1569 lib.EvaluatePragmas();
1570 }
1571 old_libraries_set_storage_ = old_libraries_set.Release().ptr();
1572
1573 // Reset the registered libraries to the filtered array.
1575 // Reset the root library to null.
1576 object_store()->set_root_library(Library::Handle());
1577}
1578
1579void ProgramReloadContext::RollbackLibraries() {
1580 TIR_Print("---- ROLLING BACK LIBRARY CHANGES\n");
1581 Thread* thread = Thread::Current();
1582 Library& lib = Library::Handle();
1583 const auto& saved_libs = GrowableObjectArray::Handle(Z, saved_libraries_);
1584 if (!saved_libs.IsNull()) {
1585 for (intptr_t i = 0; i < saved_libs.Length(); i++) {
1586 lib = Library::RawCast(saved_libs.At(i));
1587 // Restore indexes that were modified in CheckpointLibraries.
1588 lib.set_index(i);
1589 }
1590
1591 // Reset the registered libraries to the filtered array.
1592 Library::RegisterLibraries(thread, saved_libs);
1593 }
1594
1595 Library& saved_root_lib = Library::Handle(Z, saved_root_library_);
1596 if (!saved_root_lib.IsNull()) {
1597 object_store()->set_root_library(saved_root_lib);
1598 }
1599
1600 saved_root_library_ = Library::null();
1601 saved_libraries_ = GrowableObjectArray::null();
1602}
1603
1604#ifdef DEBUG
1605void ProgramReloadContext::VerifyMaps() {
1606 TIMELINE_SCOPE(VerifyMaps);
1607 Class& cls = Class::Handle();
1608 Class& new_cls = Class::Handle();
1609 Class& cls2 = Class::Handle();
1610
1611 // Verify that two old classes aren't both mapped to the same new
1612 // class. This could happen is the IsSameClass function is broken.
1613 UnorderedHashMap<ClassMapTraits> class_map(class_map_storage_);
1614 UnorderedHashMap<ClassMapTraits> reverse_class_map(
1615 HashTables::New<UnorderedHashMap<ClassMapTraits> >(
1616 class_map.NumOccupied()));
1617 {
1618 UnorderedHashMap<ClassMapTraits>::Iterator it(&class_map);
1619 while (it.MoveNext()) {
1620 const intptr_t entry = it.Current();
1621 new_cls = Class::RawCast(class_map.GetKey(entry));
1622 cls = Class::RawCast(class_map.GetPayload(entry, 0));
1623 cls2 ^= reverse_class_map.GetOrNull(new_cls);
1624 if (!cls2.IsNull()) {
1626 "Classes '%s' and '%s' are distinct classes but both map "
1627 " to class '%s'\n",
1628 cls.ToCString(), cls2.ToCString(), new_cls.ToCString());
1629 UNREACHABLE();
1630 }
1631 bool update = reverse_class_map.UpdateOrInsert(cls, new_cls);
1632 ASSERT(!update);
1633 }
1634 }
1635 class_map.Release();
1636 reverse_class_map.Release();
1637}
1638#endif
1639
1640void ProgramReloadContext::CommitBeforeInstanceMorphing() {
1641 TIMELINE_SCOPE(Commit);
1642
1643#ifdef DEBUG
1644 VerifyMaps();
1645#endif
1646
1647 // Copy over certain properties of libraries, e.g. is the library
1648 // debuggable?
1649 {
1650 TIMELINE_SCOPE(CopyLibraryBits);
1651 Library& lib = Library::Handle();
1652 Library& new_lib = Library::Handle();
1653
1654 UnorderedHashMap<LibraryMapTraits> lib_map(library_map_storage_);
1655
1656 {
1657 // Reload existing libraries.
1658 UnorderedHashMap<LibraryMapTraits>::Iterator it(&lib_map);
1659
1660 while (it.MoveNext()) {
1661 const intptr_t entry = it.Current();
1662 ASSERT(entry != -1);
1663 new_lib = Library::RawCast(lib_map.GetKey(entry));
1664 lib = Library::RawCast(lib_map.GetPayload(entry, 0));
1665 new_lib.set_debuggable(lib.IsDebuggable());
1666 // Native extension support.
1667 new_lib.set_native_entry_resolver(lib.native_entry_resolver());
1668 new_lib.set_native_entry_symbol_resolver(
1669 lib.native_entry_symbol_resolver());
1670 new_lib.set_ffi_native_resolver(lib.ffi_native_resolver());
1671 new_lib.CopyPragmas(lib);
1672 }
1673 }
1674
1675 // Release the library map.
1676 lib_map.Release();
1677 }
1678
1679 {
1680 TIMELINE_SCOPE(CopyStaticFieldsAndPatchFieldsAndFunctions);
1681 // Copy static field values from the old classes to the new classes.
1682 // Patch fields and functions in the old classes so that they retain
1683 // the old script.
1684 Class& old_cls = Class::Handle();
1685 Class& new_cls = Class::Handle();
1686 UnorderedHashMap<ClassMapTraits> class_map(class_map_storage_);
1687
1688 {
1689 UnorderedHashMap<ClassMapTraits>::Iterator it(&class_map);
1690 while (it.MoveNext()) {
1691 const intptr_t entry = it.Current();
1692 new_cls = Class::RawCast(class_map.GetKey(entry));
1693 old_cls = Class::RawCast(class_map.GetPayload(entry, 0));
1694 if (new_cls.ptr() != old_cls.ptr()) {
1695 ASSERT(new_cls.is_enum_class() == old_cls.is_enum_class());
1696 new_cls.CopyStaticFieldValues(this, old_cls);
1697 old_cls.PatchFieldsAndFunctions();
1698 old_cls.MigrateImplicitStaticClosures(this, new_cls);
1699 }
1700 }
1701 }
1702
1703 class_map.Release();
1704
1705 {
1706 UnorderedHashSet<ClassMapTraits> removed_class_set(
1707 removed_class_set_storage_);
1708 UnorderedHashSet<ClassMapTraits>::Iterator it(&removed_class_set);
1709 while (it.MoveNext()) {
1710 const intptr_t entry = it.Current();
1711 old_cls ^= removed_class_set.GetKey(entry);
1712 old_cls.PatchFieldsAndFunctions();
1713 }
1714 removed_class_set.Release();
1715 }
1716 }
1717
1718 {
1719 TIMELINE_SCOPE(UpdateLibrariesArray);
1720 // Update the libraries array.
1721 Library& lib = Library::Handle();
1722 const GrowableObjectArray& libs =
1723 GrowableObjectArray::Handle(IG->object_store()->libraries());
1724 for (intptr_t i = 0; i < libs.Length(); i++) {
1725 lib = Library::RawCast(libs.At(i));
1726 VTIR_Print("Lib '%s' at index %" Pd "\n", lib.ToCString(), i);
1727 lib.set_index(i);
1728 }
1729
1730 // Initialize library side table.
1731 library_infos_.SetLength(libs.Length());
1732 for (intptr_t i = 0; i < libs.Length(); i++) {
1733 lib = Library::RawCast(libs.At(i));
1734 // Mark the library dirty if it comes after the libraries we saved.
1735 library_infos_[i].dirty =
1736 i >= group_reload_context_->num_saved_libs_ ||
1737 group_reload_context_->saved_libs_transitive_updated_->Contains(
1738 lib.index());
1739 }
1740 }
1741}
1742
1743void ProgramReloadContext::CommitAfterInstanceMorphing() {
1744 // Rehash constants map for all classes. Constants are hashed by content, and
1745 // content may have changed from fields being added or removed.
1746 {
1747 TIMELINE_SCOPE(RehashConstants);
1748 IG->RehashConstants(&become_);
1749 }
1750 {
1751 TIMELINE_SCOPE(ForwardEnums);
1752 become_.Forward();
1753 }
1754
1755 if (FLAG_identity_reload) {
1756 const auto& saved_libs = GrowableObjectArray::Handle(saved_libraries_);
1757 const GrowableObjectArray& libs =
1758 GrowableObjectArray::Handle(IG->object_store()->libraries());
1759 if (saved_libs.Length() != libs.Length()) {
1760 TIR_Print("Identity reload failed! B#L=%" Pd " A#L=%" Pd "\n",
1761 saved_libs.Length(), libs.Length());
1762 }
1763 }
1764}
1765
1766bool ProgramReloadContext::IsDirty(const Library& lib) {
1767 const intptr_t index = lib.index();
1768 if (index == static_cast<classid_t>(-1)) {
1769 // Treat deleted libraries as dirty.
1770 return true;
1771 }
1772 ASSERT((index >= 0) && (index < library_infos_.length()));
1773 return library_infos_[index].dirty;
1774}
1775
1776void ProgramReloadContext::PostCommit() {
1777 TIMELINE_SCOPE(PostCommit);
1778 saved_root_library_ = Library::null();
1779 saved_libraries_ = GrowableObjectArray::null();
1780 InvalidateWorld();
1781}
1782
1783void IsolateGroupReloadContext::AddReasonForCancelling(
1784 ReasonForCancelling* reason) {
1785 reasons_to_cancel_reload_.Add(reason);
1786}
1787
1788void IsolateGroupReloadContext::EnsureHasInstanceMorpherFor(
1789 classid_t cid,
1790 InstanceMorpher* instance_morpher) {
1791 for (intptr_t i = 0; i < instance_morphers_.length(); ++i) {
1792 if (instance_morphers_[i]->cid() == cid) {
1793 return;
1794 }
1795 }
1796 instance_morphers_.Add(instance_morpher);
1797 instance_morpher_by_cid_.Insert(instance_morpher);
1798 ASSERT(instance_morphers_[instance_morphers_.length() - 1]->cid() == cid);
1799}
1800
1801void IsolateGroupReloadContext::ReportReasonsForCancelling() {
1802 ASSERT(FLAG_reload_force_rollback || HasReasonsForCancelling());
1803 for (int i = 0; i < reasons_to_cancel_reload_.length(); i++) {
1804 reasons_to_cancel_reload_.At(i)->Report(this);
1805 }
1806}
1807
1808void IsolateGroupReloadContext::MorphInstancesPhase1Allocate(
1809 ObjectLocator* locator,
1810 Become* become) {
1811 ASSERT(HasInstanceMorphers());
1812
1813 if (FLAG_trace_reload) {
1814 LogBlock blocker;
1815 TIR_Print("MorphInstance: \n");
1816 for (intptr_t i = 0; i < instance_morphers_.length(); i++) {
1817 instance_morphers_.At(i)->Dump();
1818 }
1819 }
1820
1821 const intptr_t count = locator->count();
1822 TIR_Print("Found %" Pd " object%s subject to morphing.\n", count,
1823 (count > 1) ? "s" : "");
1824
1825 for (intptr_t i = 0; i < instance_morphers_.length(); i++) {
1826 instance_morphers_.At(i)->CreateMorphedCopies(become);
1827 }
1828}
1829
1830void IsolateGroupReloadContext::MorphInstancesPhase2Become(Become* become) {
1831 ASSERT(HasInstanceMorphers());
1832
1833 become->Forward();
1834 // The heap now contains only instances with the new layout.
1835 // Ordinary GC is safe again.
1836}
1837
1838void IsolateGroupReloadContext::ForEachIsolate(
1839 std::function<void(Isolate*)> callback) {
1840 isolate_group_->ForEachIsolate(callback);
1841}
1842
1843void ProgramReloadContext::ValidateReload() {
1844 TIMELINE_SCOPE(ValidateReload);
1845
1846 TIR_Print("---- VALIDATING RELOAD\n");
1847
1848 // Validate libraries.
1849 {
1850 ASSERT(library_map_storage_ != Array::null());
1851 UnorderedHashMap<LibraryMapTraits> map(library_map_storage_);
1852 UnorderedHashMap<LibraryMapTraits>::Iterator it(&map);
1853 Library& lib = Library::Handle();
1854 Library& new_lib = Library::Handle();
1855 while (it.MoveNext()) {
1856 const intptr_t entry = it.Current();
1857 new_lib = Library::RawCast(map.GetKey(entry));
1858 lib = Library::RawCast(map.GetPayload(entry, 0));
1859 if (new_lib.ptr() != lib.ptr()) {
1860 lib.CheckReload(new_lib, this);
1861 }
1862 }
1863 map.Release();
1864 }
1865
1866 // Validate classes.
1867 {
1868 ASSERT(class_map_storage_ != Array::null());
1869 UnorderedHashMap<ClassMapTraits> map(class_map_storage_);
1870 UnorderedHashMap<ClassMapTraits>::Iterator it(&map);
1871 Class& cls = Class::Handle();
1872 Class& new_cls = Class::Handle();
1873 while (it.MoveNext()) {
1874 const intptr_t entry = it.Current();
1875 new_cls = Class::RawCast(map.GetKey(entry));
1876 cls = Class::RawCast(map.GetPayload(entry, 0));
1877 if (new_cls.ptr() != cls.ptr()) {
1878 cls.CheckReload(new_cls, this);
1879 }
1880 }
1881 map.Release();
1882 }
1883}
1884
1885void IsolateGroupReloadContext::VisitObjectPointers(
1886 ObjectPointerVisitor* visitor) {
1887 visitor->VisitPointers(from(), to());
1888}
1889
1890void ProgramReloadContext::VisitObjectPointers(ObjectPointerVisitor* visitor) {
1891 visitor->VisitPointers(from(), to());
1892}
1893
1894ObjectStore* ProgramReloadContext::object_store() {
1895 return IG->object_store();
1896}
1897
1898void ProgramReloadContext::ResetUnoptimizedICsOnStack() {
1899 Thread* thread = Thread::Current();
1900 StackZone stack_zone(thread);
1901 Zone* zone = stack_zone.GetZone();
1902 Code& code = Code::Handle(zone);
1903 Function& function = Function::Handle(zone);
1904 CallSiteResetter resetter(zone);
1905
1906 IG->ForEachIsolate([&](Isolate* isolate) {
1907 if (isolate->mutator_thread() == nullptr) {
1908 return;
1909 }
1910 DartFrameIterator iterator(isolate->mutator_thread(),
1912 StackFrame* frame = iterator.NextFrame();
1913 while (frame != nullptr) {
1914 code = frame->LookupDartCode();
1915 if (code.is_optimized() && !code.is_force_optimized()) {
1916 // If this code is optimized, we need to reset the ICs in the
1917 // corresponding unoptimized code, which will be executed when the stack
1918 // unwinds to the optimized code.
1919 function = code.function();
1920 code = function.unoptimized_code();
1921 ASSERT(!code.IsNull());
1922 resetter.ResetSwitchableCalls(code);
1923 resetter.ResetCaches(code);
1924 } else {
1925 resetter.ResetSwitchableCalls(code);
1926 resetter.ResetCaches(code);
1927 }
1928 frame = iterator.NextFrame();
1929 }
1930 });
1931}
1932
1933void ProgramReloadContext::ResetMegamorphicCaches() {
1934 object_store()->set_megamorphic_cache_table(GrowableObjectArray::Handle());
1935 // Since any current optimized code will not make any more calls, it may be
1936 // better to clear the table instead of clearing each of the caches, allow
1937 // the current megamorphic caches get GC'd and any new optimized code allocate
1938 // new ones.
1939}
1940
1942 public:
1947 GrowableArray<const SuspendState*>* suspend_states,
1949 : zone_(zone),
1950 functions_(functions),
1951 kernel_infos_(kernel_infos),
1952 fields_(fields),
1953 suspend_states_(suspend_states),
1954 instances_(instances) {}
1956
1957 void VisitObject(ObjectPtr obj) override {
1958 intptr_t cid = obj->GetClassId();
1959 if (cid == kFunctionCid) {
1960 const Function& func =
1961 Function::Handle(zone_, static_cast<FunctionPtr>(obj));
1962 functions_->Add(&func);
1963 } else if (cid == kKernelProgramInfoCid) {
1964 kernel_infos_->Add(&KernelProgramInfo::Handle(
1965 zone_, static_cast<KernelProgramInfoPtr>(obj)));
1966 } else if (cid == kFieldCid) {
1967 fields_->Add(&Field::Handle(zone_, static_cast<FieldPtr>(obj)));
1968 } else if (cid == kSuspendStateCid) {
1969 const auto& suspend_state =
1970 SuspendState::Handle(zone_, static_cast<SuspendStatePtr>(obj));
1971 if (suspend_state.pc() != 0) {
1972 suspend_states_->Add(&suspend_state);
1973 }
1974 } else if (cid > kNumPredefinedCids) {
1975 instances_->Add(&Instance::Handle(zone_, static_cast<InstancePtr>(obj)));
1976 }
1977 }
1978
1979 private:
1980 Zone* const zone_;
1981 GrowableArray<const Function*>* const functions_;
1982 GrowableArray<const KernelProgramInfo*>* const kernel_infos_;
1983 GrowableArray<const Field*>* const fields_;
1984 GrowableArray<const SuspendState*>* const suspend_states_;
1985 GrowableArray<const Instance*>* const instances_;
1986};
1987
1988void ProgramReloadContext::RunInvalidationVisitors() {
1989 TIR_Print("---- RUNNING INVALIDATION HEAP VISITORS\n");
1990 Thread* thread = Thread::Current();
1991 StackZone stack_zone(thread);
1992 Zone* zone = stack_zone.GetZone();
1993
1994 GrowableArray<const Function*> functions(4 * KB);
1995 GrowableArray<const KernelProgramInfo*> kernel_infos(KB);
1996 GrowableArray<const Field*> fields(4 * KB);
1997 GrowableArray<const SuspendState*> suspend_states(4 * KB);
1998 GrowableArray<const Instance*> instances(4 * KB);
1999
2000 {
2001 TIMELINE_SCOPE(CollectInvalidations);
2002 HeapIterationScope iteration(thread);
2003 InvalidationCollector visitor(zone, &functions, &kernel_infos, &fields,
2004 &suspend_states, &instances);
2005 iteration.IterateObjects(&visitor);
2006 }
2007
2008 InvalidateKernelInfos(zone, kernel_infos);
2009 InvalidateSuspendStates(zone, suspend_states);
2010 InvalidateFields(zone, fields, instances);
2011
2012 // After InvalidateFields in order to invalidate
2013 // implicit getters which need load guards.
2014 InvalidateFunctions(zone, functions);
2015}
2016
2017void ProgramReloadContext::InvalidateKernelInfos(
2018 Zone* zone,
2019 const GrowableArray<const KernelProgramInfo*>& kernel_infos) {
2020 TIMELINE_SCOPE(InvalidateKernelInfos);
2022
2023 Array& data = Array::Handle(zone);
2024 Object& key = Object::Handle(zone);
2025 Smi& value = Smi::Handle(zone);
2026 for (intptr_t i = 0; i < kernel_infos.length(); i++) {
2027 const KernelProgramInfo& info = *kernel_infos[i];
2028 // Clear the libraries cache.
2029 {
2030 data = info.libraries_cache();
2031 ASSERT(!data.IsNull());
2033 table.Clear();
2034 info.set_libraries_cache(table.Release());
2035 }
2036 // Clear the classes cache.
2037 {
2038 data = info.classes_cache();
2039 ASSERT(!data.IsNull());
2041 table.Clear();
2042 info.set_classes_cache(table.Release());
2043 }
2044 }
2045}
2046
2047void ProgramReloadContext::InvalidateFunctions(
2048 Zone* zone,
2049 const GrowableArray<const Function*>& functions) {
2050 TIMELINE_SCOPE(InvalidateFunctions);
2051 auto thread = Thread::Current();
2052 HANDLESCOPE(thread);
2053
2054 CallSiteResetter resetter(zone);
2055
2056 Class& owning_class = Class::Handle(zone);
2057 Library& owning_lib = Library::Handle(zone);
2058 Code& code = Code::Handle(zone);
2059 Field& field = Field::Handle(zone);
2060 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
2061 for (intptr_t i = 0; i < functions.length(); i++) {
2062 const Function& func = *functions[i];
2063
2064 // Force-optimized functions cannot deoptimize.
2065 if (func.ForceOptimize()) continue;
2066
2067 // Switch to unoptimized code or the lazy compilation stub.
2068 func.SwitchToLazyCompiledUnoptimizedCode();
2069
2070 // Grab the current code.
2071 code = func.CurrentCode();
2072 ASSERT(!code.IsNull());
2073
2074 // Force recompilation of unoptimized code of implicit getters
2075 // in order to add load guards. This is needed for future
2076 // deoptimizations which will expect load guard in the unoptimized code.
2077 bool recompile_for_load_guard = false;
2078 if (func.IsImplicitGetterFunction() ||
2079 func.IsImplicitStaticGetterFunction()) {
2080 field = func.accessor_field();
2081 recompile_for_load_guard = field.needs_load_guard();
2082 }
2083
2084 owning_class = func.Owner();
2085 owning_lib = owning_class.library();
2086 const bool clear_unoptimized_code =
2087 IsDirty(owning_lib) || recompile_for_load_guard;
2088 const bool stub_code = code.IsStubCode();
2089
2090 // Zero edge counters, before clearing the ICDataArray, since that's where
2091 // they're held.
2092 resetter.ZeroEdgeCounters(func);
2093
2094 if (stub_code) {
2095 // Nothing to reset.
2096 } else if (clear_unoptimized_code) {
2097 VTIR_Print("Marking %s for recompilation, clearing code\n",
2098 func.ToCString());
2099 // Null out the ICData array and code.
2100 func.ClearICDataArray();
2101 func.ClearCode();
2102 func.SetWasCompiled(false);
2103 } else {
2104 // We are preserving the unoptimized code, reset instance calls and type
2105 // test caches.
2106 resetter.ResetSwitchableCalls(code);
2107 resetter.ResetCaches(code);
2108 }
2109
2110 // Clear counters.
2111 func.set_usage_counter(0);
2112 func.set_deoptimization_counter(0);
2113 func.set_optimized_instruction_count(0);
2114 func.set_optimized_call_site_count(0);
2115 }
2116}
2117
2118void ProgramReloadContext::InvalidateSuspendStates(
2119 Zone* zone,
2120 const GrowableArray<const SuspendState*>& suspend_states) {
2121 TIMELINE_SCOPE(InvalidateSuspendStates);
2122 auto thread = Thread::Current();
2123 HANDLESCOPE(thread);
2124
2125 CallSiteResetter resetter(zone);
2126 Code& code = Code::Handle(zone);
2127 Function& function = Function::Handle(zone);
2128
2129 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
2130 for (intptr_t i = 0, n = suspend_states.length(); i < n; ++i) {
2131 const SuspendState& suspend_state = *suspend_states[i];
2132 ASSERT(suspend_state.pc() != 0);
2133 code = suspend_state.GetCodeObject();
2134 ASSERT(!code.IsNull());
2135 if (code.is_optimized() && !code.is_force_optimized()) {
2136 function = code.function();
2137 // Before disabling [code], function needs to
2138 // switch to unoptimized code first.
2139 function.SwitchToLazyCompiledUnoptimizedCode();
2140 // Disable [code] in order to trigger lazy deoptimization.
2141 // Unless [code] is compiled for OSR, it may be already
2142 // disabled in SwitchToLazyCompiledUnoptimizedCode.
2143 if (!code.IsDisabled()) {
2144 code.DisableDartCode();
2145 }
2146 // Reset switchable calls and caches for unoptimized
2147 // code (if any), as it is going to be used to continue
2148 // execution of the suspended function.
2149 code = function.unoptimized_code();
2150 if (!code.IsNull()) {
2151 resetter.ResetSwitchableCalls(code);
2152 resetter.ResetCaches(code);
2153 }
2154 } else {
2155 function = code.function();
2156 // ResetSwitchableCalls uses ICData array, which
2157 // can be cleared along with the code in InvalidateFunctions
2158 // during previous hot reloads.
2159 // Rebuild an unoptimized code in order to recreate ICData array.
2160 function.EnsureHasCompiledUnoptimizedCode();
2161 resetter.ResetSwitchableCalls(code);
2162 resetter.ResetCaches(code);
2163 }
2164 }
2165}
2166
2167// Finds fields that are initialized or have a value that does not conform to
2168// the field's static type, setting Field::needs_load_guard(). Accessors for
2169// such fields are compiled with additional checks to handle lazy initialization
2170// and to preserve type soundness.
2172 public:
2173 explicit FieldInvalidator(Zone* zone)
2174 : zone_(zone),
2175 cls_(Class::Handle(zone)),
2176 cls_fields_(Array::Handle(zone)),
2177 entry_(Object::Handle(zone)),
2178 value_(Object::Handle(zone)),
2179 instance_(Instance::Handle(zone)),
2180 type_(AbstractType::Handle(zone)),
2181 cache_(SubtypeTestCache::Handle(zone)),
2182 result_(Bool::Handle(zone)),
2183 closure_function_(Function::Handle(zone)),
2184 instantiator_type_arguments_(TypeArguments::Handle(zone)),
2185 function_type_arguments_(TypeArguments::Handle(zone)),
2186 instance_cid_or_signature_(Object::Handle(zone)),
2187 instance_type_arguments_(TypeArguments::Handle(zone)),
2188 parent_function_type_arguments_(TypeArguments::Handle(zone)),
2189 delayed_function_type_arguments_(TypeArguments::Handle(zone)) {}
2190
2192 Thread* thread = Thread::Current();
2193 HANDLESCOPE(thread);
2194 instantiator_type_arguments_ = TypeArguments::null();
2195 for (intptr_t i = 0; i < fields.length(); i++) {
2196 const Field& field = *fields[i];
2197 if (!field.is_static()) {
2198 continue;
2199 }
2200 if (field.needs_load_guard()) {
2201 continue; // Already guarding.
2202 }
2203 const intptr_t field_id = field.field_id();
2204 thread->isolate_group()->ForEachIsolate([&](Isolate* isolate) {
2205 auto field_table = isolate->field_table();
2206 // The isolate might've just been created and is now participating in
2207 // the reload request inside `IsolateGroup::RegisterIsolate()`.
2208 // At that point it doesn't have the field table setup yet.
2209 if (field_table->IsReadyToUse()) {
2210 value_ = field_table->At(field_id);
2211 if ((value_.ptr() != Object::sentinel().ptr()) &&
2212 (value_.ptr() != Object::transition_sentinel().ptr())) {
2213 CheckValueType(value_, field);
2214 }
2215 }
2216 });
2217 }
2218 }
2219
2221 Thread* thread = Thread::Current();
2222 HANDLESCOPE(thread);
2223 for (intptr_t i = 0; i < instances.length(); i++) {
2224 CheckInstance(*instances[i]);
2225 }
2226 }
2227
2228 private:
2229 DART_FORCE_INLINE
2230 void CheckInstance(const Instance& instance) {
2231 cls_ = instance.clazz();
2232 if (cls_.NumTypeArguments() > 0) {
2233 instantiator_type_arguments_ = instance.GetTypeArguments();
2234 } else {
2235 instantiator_type_arguments_ = TypeArguments::null();
2236 }
2237 cls_fields_ = cls_.OffsetToFieldMap();
2238 for (intptr_t i = 0; i < cls_fields_.Length(); i++) {
2239 entry_ = cls_fields_.At(i);
2240 if (!entry_.IsField()) {
2241 continue;
2242 }
2243 const Field& field = Field::Cast(entry_);
2244 CheckInstanceField(instance, field);
2245 }
2246 }
2247
2248 DART_FORCE_INLINE
2249 void CheckInstanceField(const Instance& instance, const Field& field) {
2250 if (field.needs_load_guard()) {
2251 return; // Already guarding.
2252 }
2253 if (field.is_unboxed()) {
2254 // Unboxed fields are guaranteed to match.
2255 return;
2256 }
2257 value_ = instance.GetField(field);
2258 if (value_.ptr() == Object::sentinel().ptr()) {
2259 if (field.is_late()) {
2260 // Late fields already have lazy initialization logic.
2261 return;
2262 }
2263 // Needs guard for initialization.
2264 ASSERT(!FLAG_identity_reload);
2265 field.set_needs_load_guard(true);
2266 return;
2267 }
2268 CheckValueType(value_, field);
2269 }
2270
2271 DART_FORCE_INLINE
2272 bool CheckAssignabilityUsingCache(const Object& value,
2273 const AbstractType& type) {
2274 ASSERT(!value.IsSentinel());
2275 if (type.IsDynamicType()) {
2276 return true;
2277 }
2278
2279 if (type.IsRecordType()) {
2280 return CheckAssignabilityForRecordType(value, RecordType::Cast(type));
2281 }
2282
2283 cls_ = value.clazz();
2284 const intptr_t cid = cls_.id();
2285 if (cid == kClosureCid) {
2286 const auto& closure = Closure::Cast(value);
2287 closure_function_ = closure.function();
2288 instance_cid_or_signature_ = closure_function_.signature();
2289 instance_type_arguments_ = closure.instantiator_type_arguments();
2290 parent_function_type_arguments_ = closure.function_type_arguments();
2291 delayed_function_type_arguments_ = closure.delayed_type_arguments();
2292 } else {
2293 instance_cid_or_signature_ = Smi::New(cid);
2294 if (cls_.NumTypeArguments() > 0) {
2295 instance_type_arguments_ = Instance::Cast(value).GetTypeArguments();
2296 } else {
2297 instance_type_arguments_ = TypeArguments::null();
2298 }
2299 parent_function_type_arguments_ = TypeArguments::null();
2300 delayed_function_type_arguments_ = TypeArguments::null();
2301 }
2302
2303 if (cache_.IsNull()) {
2304 // Use a cache that will check all inputs.
2306 }
2307 if (cache_.HasCheck(
2308 instance_cid_or_signature_, type, instance_type_arguments_,
2309 instantiator_type_arguments_, function_type_arguments_,
2310 parent_function_type_arguments_, delayed_function_type_arguments_,
2311 /*index=*/nullptr, &result_)) {
2312 return result_.value();
2313 }
2314
2315 instance_ ^= value.ptr();
2316 if (instance_.IsAssignableTo(type, instantiator_type_arguments_,
2317 function_type_arguments_)) {
2318 // Do not add record instances to cache as they don't have a valid
2319 // key (type of a record depends on types of all its fields).
2320 if (cid != kRecordCid) {
2321 cache_.AddCheck(instance_cid_or_signature_, type,
2322 instance_type_arguments_, instantiator_type_arguments_,
2323 function_type_arguments_,
2324 parent_function_type_arguments_,
2325 delayed_function_type_arguments_, Bool::True());
2326 }
2327 return true;
2328 }
2329
2330 return false;
2331 }
2332
2333 bool CheckAssignabilityForRecordType(const Object& value,
2334 const RecordType& type) {
2335 if (!value.IsRecord()) {
2336 return false;
2337 }
2338
2339 const Record& record = Record::Cast(value);
2340 if (record.shape() != type.shape()) {
2341 return false;
2342 }
2343
2344 // This method can be called recursively, so cannot reuse handles.
2345 auto& field_value = Object::Handle(zone_);
2346 auto& field_type = AbstractType::Handle(zone_);
2347 const intptr_t num_fields = record.num_fields();
2348 for (intptr_t i = 0; i < num_fields; ++i) {
2349 field_value = record.FieldAt(i);
2350 field_type = type.FieldTypeAt(i);
2351 if (!CheckAssignabilityUsingCache(field_value, field_type)) {
2352 return false;
2353 }
2354 }
2355 return true;
2356 }
2357
2358 DART_FORCE_INLINE
2359 void CheckValueType(const Object& value, const Field& field) {
2360 ASSERT(!value.IsSentinel());
2361 type_ = field.type();
2362 if (!CheckAssignabilityUsingCache(value, type_)) {
2363 // Even if doing an identity reload, type check can fail if hot reload
2364 // happens while constructor is still running and field is not
2365 // initialized yet, so it has a null value.
2366#ifdef DEBUG
2367 if (FLAG_identity_reload && !value.IsNull()) {
2368 FATAL(
2369 "Type check failed during identity hot reload.\n"
2370 " field: %s\n"
2371 " type: %s\n"
2372 " value: %s\n",
2373 field.ToCString(), type_.ToCString(), value.ToCString());
2374 }
2375#endif
2376 field.set_needs_load_guard(true);
2377 }
2378 }
2379
2380 Zone* zone_;
2381 Class& cls_;
2382 Array& cls_fields_;
2383 Object& entry_;
2384 Object& value_;
2385 Instance& instance_;
2386 AbstractType& type_;
2387 SubtypeTestCache& cache_;
2388 Bool& result_;
2389 Function& closure_function_;
2390 TypeArguments& instantiator_type_arguments_;
2391 TypeArguments& function_type_arguments_;
2392 Object& instance_cid_or_signature_;
2393 TypeArguments& instance_type_arguments_;
2394 TypeArguments& parent_function_type_arguments_;
2395 TypeArguments& delayed_function_type_arguments_;
2396};
2397
2398void ProgramReloadContext::InvalidateFields(
2399 Zone* zone,
2400 const GrowableArray<const Field*>& fields,
2401 const GrowableArray<const Instance*>& instances) {
2402 TIMELINE_SCOPE(InvalidateFields);
2403 SafepointMutexLocker ml(IG->subtype_test_cache_mutex());
2404 FieldInvalidator invalidator(zone);
2405 invalidator.CheckStatics(fields);
2406 invalidator.CheckInstances(instances);
2407}
2408
2409void ProgramReloadContext::InvalidateWorld() {
2410 TIMELINE_SCOPE(InvalidateWorld);
2411 TIR_Print("---- INVALIDATING WORLD\n");
2412 ResetMegamorphicCaches();
2413 if (FLAG_trace_deoptimization) {
2414 THR_Print("Deopt for reload\n");
2415 }
2417 ResetUnoptimizedICsOnStack();
2418 RunInvalidationVisitors();
2419}
2420
2421ClassPtr ProgramReloadContext::OldClassOrNull(const Class& replacement_or_new) {
2422 UnorderedHashSet<ClassMapTraits> old_classes_set(old_classes_set_storage_);
2423 Class& cls = Class::Handle();
2424 cls ^= old_classes_set.GetOrNull(replacement_or_new);
2425 old_classes_set_storage_ = old_classes_set.Release().ptr();
2426 return cls.ptr();
2427}
2428
2429StringPtr ProgramReloadContext::FindLibraryPrivateKey(
2430 const Library& replacement_or_new) {
2431 const Library& old = Library::Handle(OldLibraryOrNull(replacement_or_new));
2432 if (old.IsNull()) {
2433 return String::null();
2434 }
2435#if defined(DEBUG)
2436 VTIR_Print("`%s` is getting `%s`'s private key.\n",
2437 String::Handle(replacement_or_new.url()).ToCString(),
2438 String::Handle(old.url()).ToCString());
2439#endif
2440 return old.private_key();
2441}
2442
2443LibraryPtr ProgramReloadContext::OldLibraryOrNull(
2444 const Library& replacement_or_new) {
2445 UnorderedHashSet<LibraryMapTraits> old_libraries_set(
2446 old_libraries_set_storage_);
2447 Library& lib = Library::Handle();
2448 lib ^= old_libraries_set.GetOrNull(replacement_or_new);
2449 old_libraries_set.Release();
2450
2451 if (lib.IsNull() &&
2452 (group_reload_context_->root_url_prefix_ != String::null()) &&
2453 (group_reload_context_->old_root_url_prefix_ != String::null())) {
2454 return OldLibraryOrNullBaseMoved(replacement_or_new);
2455 }
2456 return lib.ptr();
2457}
2458
2459// Attempt to find the pair to |replacement_or_new| with the knowledge that
2460// the base url prefix has moved.
2461LibraryPtr ProgramReloadContext::OldLibraryOrNullBaseMoved(
2462 const Library& replacement_or_new) {
2463 const String& url_prefix =
2464 String::Handle(group_reload_context_->root_url_prefix_);
2465 const String& old_url_prefix =
2466 String::Handle(group_reload_context_->old_root_url_prefix_);
2467 const intptr_t prefix_length = url_prefix.Length();
2468 const intptr_t old_prefix_length = old_url_prefix.Length();
2469 const String& new_url = String::Handle(replacement_or_new.url());
2470 const String& suffix =
2471 String::Handle(String::SubString(new_url, prefix_length));
2472 if (!new_url.StartsWith(url_prefix)) {
2473 return Library::null();
2474 }
2475 Library& old = Library::Handle();
2476 String& old_url = String::Handle();
2477 String& old_suffix = String::Handle();
2478 const auto& saved_libs = GrowableObjectArray::Handle(saved_libraries_);
2479 ASSERT(!saved_libs.IsNull());
2480 for (intptr_t i = 0; i < saved_libs.Length(); i++) {
2481 old = Library::RawCast(saved_libs.At(i));
2482 old_url = old.url();
2483 if (!old_url.StartsWith(old_url_prefix)) {
2484 continue;
2485 }
2486 old_suffix = String::SubString(old_url, old_prefix_length);
2487 if (old_suffix.IsNull()) {
2488 continue;
2489 }
2490 if (old_suffix.Equals(suffix)) {
2491 TIR_Print("`%s` is moving to `%s`\n", old_url.ToCString(),
2492 new_url.ToCString());
2493 return old.ptr();
2494 }
2495 }
2496 return Library::null();
2497}
2498
2499void ProgramReloadContext::BuildLibraryMapping() {
2500 const GrowableObjectArray& libs =
2501 GrowableObjectArray::Handle(object_store()->libraries());
2502
2503 Library& replacement_or_new = Library::Handle();
2504 Library& old = Library::Handle();
2505 for (intptr_t i = group_reload_context_->num_saved_libs_; i < libs.Length();
2506 i++) {
2507 replacement_or_new = Library::RawCast(libs.At(i));
2508 old = OldLibraryOrNull(replacement_or_new);
2509 if (old.IsNull()) {
2510 if (FLAG_identity_reload) {
2511 TIR_Print("Could not find original library for %s\n",
2512 replacement_or_new.ToCString());
2513 UNREACHABLE();
2514 }
2515 // New library.
2516 AddLibraryMapping(replacement_or_new, replacement_or_new);
2517 } else {
2518 ASSERT(!replacement_or_new.is_dart_scheme());
2519 // Replaced class.
2520 AddLibraryMapping(replacement_or_new, old);
2521
2522 AddBecomeMapping(old, replacement_or_new);
2523 }
2524 }
2525}
2526
2527// Find classes that have been removed from the program.
2528// Instances of these classes may still be referenced from variables, so the
2529// functions of these class may still execute in the future, and they need to
2530// be given patch class owners still they correctly reference their (old) kernel
2531// data even after the library's kernel data is updated.
2532//
2533// Note that all such classes must belong to a library that has either been
2534// changed or removed.
2535void ProgramReloadContext::BuildRemovedClassesSet() {
2536 // Find all old classes [mapped_old_classes_set].
2537 UnorderedHashMap<ClassMapTraits> class_map(class_map_storage_);
2538 UnorderedHashSet<ClassMapTraits> mapped_old_classes_set(
2539 HashTables::New<UnorderedHashSet<ClassMapTraits> >(
2540 class_map.NumOccupied()));
2541 {
2542 UnorderedHashMap<ClassMapTraits>::Iterator it(&class_map);
2543 Class& cls = Class::Handle();
2544 Class& new_cls = Class::Handle();
2545 while (it.MoveNext()) {
2546 const intptr_t entry = it.Current();
2547 new_cls = Class::RawCast(class_map.GetKey(entry));
2548 cls = Class::RawCast(class_map.GetPayload(entry, 0));
2549 mapped_old_classes_set.InsertOrGet(cls);
2550 }
2551 }
2552 class_map.Release();
2553
2554 // Find all reloaded libraries [mapped_old_library_set].
2555 UnorderedHashMap<LibraryMapTraits> library_map(library_map_storage_);
2556 UnorderedHashMap<LibraryMapTraits>::Iterator it_library(&library_map);
2557 UnorderedHashSet<LibraryMapTraits> mapped_old_library_set(
2558 HashTables::New<UnorderedHashSet<LibraryMapTraits> >(
2559 library_map.NumOccupied()));
2560 {
2561 Library& old_library = Library::Handle();
2562 Library& new_library = Library::Handle();
2563 while (it_library.MoveNext()) {
2564 const intptr_t entry = it_library.Current();
2565 new_library ^= library_map.GetKey(entry);
2566 old_library ^= library_map.GetPayload(entry, 0);
2567 if (new_library.ptr() != old_library.ptr()) {
2568 mapped_old_library_set.InsertOrGet(old_library);
2569 }
2570 }
2571 }
2572
2573 // For every old class, check if it's library was reloaded and if
2574 // the class was mapped. If the class wasn't mapped - add it to
2575 // [removed_class_set].
2576 UnorderedHashSet<ClassMapTraits> old_classes_set(old_classes_set_storage_);
2577 UnorderedHashSet<ClassMapTraits>::Iterator it(&old_classes_set);
2578 UnorderedHashSet<ClassMapTraits> removed_class_set(
2579 removed_class_set_storage_);
2580 Class& old_cls = Class::Handle();
2581 Class& new_cls = Class::Handle();
2582 Library& old_library = Library::Handle();
2583 Library& mapped_old_library = Library::Handle();
2584 while (it.MoveNext()) {
2585 const intptr_t entry = it.Current();
2586 old_cls ^= Class::RawCast(old_classes_set.GetKey(entry));
2587 old_library = old_cls.library();
2588 if (old_library.IsNull()) {
2589 continue;
2590 }
2591 mapped_old_library ^= mapped_old_library_set.GetOrNull(old_library);
2592 if (!mapped_old_library.IsNull()) {
2593 new_cls ^= mapped_old_classes_set.GetOrNull(old_cls);
2594 if (new_cls.IsNull()) {
2595 removed_class_set.InsertOrGet(old_cls);
2596 }
2597 }
2598 }
2599 removed_class_set_storage_ = removed_class_set.Release().ptr();
2600
2601 old_classes_set.Release();
2602 mapped_old_classes_set.Release();
2603 mapped_old_library_set.Release();
2604 library_map.Release();
2605}
2606
2607void ProgramReloadContext::AddClassMapping(const Class& replacement_or_new,
2608 const Class& original) {
2609 UnorderedHashMap<ClassMapTraits> map(class_map_storage_);
2610 bool update = map.UpdateOrInsert(replacement_or_new, original);
2611 ASSERT(!update);
2612 // The storage given to the map may have been reallocated, remember the new
2613 // address.
2614 class_map_storage_ = map.Release().ptr();
2615}
2616
2617void ProgramReloadContext::AddLibraryMapping(const Library& replacement_or_new,
2618 const Library& original) {
2619 UnorderedHashMap<LibraryMapTraits> map(library_map_storage_);
2620 bool update = map.UpdateOrInsert(replacement_or_new, original);
2621 ASSERT(!update);
2622 // The storage given to the map may have been reallocated, remember the new
2623 // address.
2624 library_map_storage_ = map.Release().ptr();
2625}
2626
2627void ProgramReloadContext::AddStaticFieldMapping(const Field& old_field,
2628 const Field& new_field) {
2629 ASSERT(old_field.is_static());
2630 ASSERT(new_field.is_static());
2631 AddBecomeMapping(old_field, new_field);
2632}
2633
2634void ProgramReloadContext::AddBecomeMapping(const Object& old,
2635 const Object& neu) {
2636 become_.Add(old, neu);
2637}
2638
2639void ProgramReloadContext::RebuildDirectSubclasses() {
2640 ClassTable* class_table = IG->class_table();
2641 intptr_t num_cids = class_table->NumCids();
2642
2643 // Clear the direct subclasses for all classes.
2644 Class& cls = Class::Handle();
2645 const GrowableObjectArray& null_list = GrowableObjectArray::Handle();
2646 for (intptr_t i = 1; i < num_cids; i++) {
2647 if (class_table->HasValidClassAt(i)) {
2648 cls = class_table->At(i);
2649 if (!cls.is_declaration_loaded()) {
2650 continue; // Can't have any subclasses or implementors yet.
2651 }
2652 // Testing for null to prevent attempting to write to read-only classes
2653 // in the VM isolate.
2654 if (cls.direct_subclasses() != GrowableObjectArray::null()) {
2655 cls.set_direct_subclasses(null_list);
2656 }
2657 if (cls.direct_implementors() != GrowableObjectArray::null()) {
2658 cls.set_direct_implementors(null_list);
2659 }
2660 }
2661 }
2662
2663 // Recompute the direct subclasses / implementors.
2664
2665 AbstractType& super_type = AbstractType::Handle();
2666 Class& super_cls = Class::Handle();
2667
2668 Array& interface_types = Array::Handle();
2669 AbstractType& interface_type = AbstractType::Handle();
2670 Class& interface_class = Class::Handle();
2671
2672 for (intptr_t i = 1; i < num_cids; i++) {
2673 if (class_table->HasValidClassAt(i)) {
2674 cls = class_table->At(i);
2675 if (!cls.is_declaration_loaded()) {
2676 continue; // Will register itself later when loaded.
2677 }
2678 super_type = cls.super_type();
2679 if (!super_type.IsNull() && !super_type.IsObjectType()) {
2680 super_cls = cls.SuperClass();
2681 ASSERT(!super_cls.IsNull());
2682 super_cls.AddDirectSubclass(cls);
2683 }
2684
2685 interface_types = cls.interfaces();
2686 if (!interface_types.IsNull()) {
2687 const intptr_t mixin_index = cls.is_transformed_mixin_application()
2688 ? interface_types.Length() - 1
2689 : -1;
2690 for (intptr_t j = 0; j < interface_types.Length(); ++j) {
2691 interface_type ^= interface_types.At(j);
2692 interface_class = interface_type.type_class();
2693 interface_class.AddDirectImplementor(
2694 cls, /* is_mixin = */ i == mixin_index);
2695 }
2696 }
2697 }
2698 }
2699}
2700
2701#endif // !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
2702
2703} // namespace dart
static void info(const char *fmt,...) SK_PRINTF_LIKE(1
Definition: DM.cpp:213
int count
Definition: FontMgrTest.cpp:50
static uint32_t hash(const SkShaderBase::GradientInfo &v)
SI F table(const skcms_Curve *curve, F v)
#define UNREACHABLE()
Definition: assert.h:248
#define RELEASE_ASSERT(cond)
Definition: assert.h:327
GLenum type
Aborted(Zone *zone, const Error &error)
ObjectPtr At(intptr_t index) const
Definition: object.h:10875
intptr_t Length() const
Definition: object.h:10829
KeyValueTrait::Value LookupValue(typename KeyValueTrait::Key key) const
Definition: hash_map.h:159
void Insert(typename KeyValueTrait::Pair kv)
Definition: hash_map.h:230
const T & At(intptr_t index) const
void SetLength(intptr_t new_length)
intptr_t length() const
static void MakeDummyObject(const Instance &instance)
Definition: become.cc:242
void Add(const Object &before, const Object &after)
Definition: become.cc:231
void Forward()
Definition: become.cc:275
void Add(intptr_t i)
Definition: bit_vector.h:63
bool IsEmpty() const
Definition: bit_vector.cc:100
intptr_t length() const
Definition: bit_vector.h:117
bool Contains(intptr_t i) const
Definition: bit_vector.h:91
static const Bool & True()
Definition: object.h:10797
static uword Hash(const Object &obj)
static bool ReportStats()
static const char * Name()
static bool IsMatch(const Object &a, const Object &b)
void AppendTo(JSONArray *array)
ClassReasonForCancelling(Zone *zone, const Class &from, const Class &to)
static intptr_t CidFromTopLevelIndex(intptr_t index)
Definition: class_table.h:503
intptr_t NumCids() const
Definition: class_table.h:447
intptr_t id() const
Definition: object.h:1233
intptr_t NumTypeArguments() const
Definition: object.cc:3640
intptr_t host_type_arguments_field_offset() const
Definition: object.h:1375
ArrayPtr OffsetToFieldMap(ClassTable *class_table=nullptr) const
Definition: object.cc:3183
static constexpr intptr_t kNoTypeArguments
Definition: object.h:1374
void MarkFieldBoxedDuringReload(ClassTable *class_table, const Field &field) const
static IsolateGroup * vm_isolate_group()
Definition: dart.h:69
static DoublePtr New(double d, Heap::Space space=Heap::kNew)
Definition: object.cc:23402
virtual const char * ToErrorCString() const
Definition: object.cc:19780
static ExternalTypedDataPtr NewFinalizeWithFree(uint8_t *data, intptr_t len)
Definition: object.cc:25649
void CheckInstances(const GrowableArray< const Instance * > &instances)
void CheckStatics(const GrowableArray< const Field * > &fields)
bool is_unboxed() const
Definition: object.h:4712
bool is_static() const
Definition: object.h:4440
StringPtr name() const
Definition: object.h:4430
bool needs_load_guard() const
Definition: object.h:4451
intptr_t guarded_cid() const
Definition: object.cc:11749
intptr_t field_id() const
Definition: object.h:13284
intptr_t HostOffset() const
Definition: object.h:13241
AbstractTypePtr type() const
Definition: object.h:4550
bool is_instance() const
Definition: object.h:4441
static Float32x4Ptr New(float value0, float value1, float value2, float value3, Heap::Space space=Heap::kNew)
Definition: object.cc:25307
static Float64x2Ptr New(double value0, double value1, Heap::Space space=Heap::kNew)
Definition: object.cc:25475
static GrowableObjectArrayPtr New(Heap::Space space=Heap::kNew)
Definition: object.h:11144
static Table::Storage::ArrayPtr New(intptr_t initial_capacity, Heap::Space space=Heap::kNew)
Definition: hash_table.h:574
void IterateObjects(ObjectVisitor *visitor) const
Definition: heap.cc:335
@ kNew
Definition: heap.h:38
@ kOld
Definition: heap.h:39
PageSpace * old_space()
Definition: heap.h:63
void WaitForMarkerTasks(Thread *thread)
Definition: heap.cc:656
void CollectAllGarbage(GCReason reason=GCReason::kFull, bool compact=false)
Definition: heap.cc:573
InstanceMorpher(Zone *zone, classid_t cid, const Class &old_class, const Class &new_class, FieldMappingArray *mapping, FieldOffsetArray *new_fields_offsets)
void AddObject(ObjectPtr object)
void AppendTo(JSONArray *array)
static InstanceMorpher * CreateFromClassDescriptors(Zone *zone, ClassTable *class_table, const Class &from, const Class &to)
void CreateMorphedCopies(Become *become)
static InstancePtr NewAlreadyFinalized(const Class &cls, Heap::Space space=Heap::kNew)
Definition: object.cc:20943
static IntegerPtr New(const String &str, Heap::Space space=Heap::kNew)
Definition: object.cc:22984
void VisitObject(ObjectPtr obj) override
InvalidationCollector(Zone *zone, GrowableArray< const Function * > *functions, GrowableArray< const KernelProgramInfo * > *kernel_infos, GrowableArray< const Field * > *fields, GrowableArray< const SuspendState * > *suspend_states, GrowableArray< const Instance * > *instances)
IsolateGroup * isolate_group() const
IsolateGroupReloadContext(IsolateGroup *isolate, ClassTable *class_table, JSONStream *js)
bool Reload(bool force_reload, const char *root_script_url=nullptr, const char *packages_url=nullptr, const uint8_t *kernel_buffer=nullptr, intptr_t kernel_buffer_size=0)
void ForEachIsolate(std::function< void(Isolate *isolate)> function, bool at_safepoint=false)
Definition: isolate.cc:2841
static bool IsSystemIsolateGroup(const IsolateGroup *group)
Definition: isolate.cc:3605
static IsolateGroup * Current()
Definition: isolate.h:539
ClassTable * class_table() const
Definition: isolate.h:496
void set_last_reload_timestamp(int64_t value)
Definition: isolate.h:651
IsolateGroupSource * source() const
Definition: isolate.h:286
ClassTable * heap_walk_class_table() const
Definition: isolate.h:503
int64_t last_reload_timestamp() const
Definition: isolate.h:654
ProgramReloadContext * program_reload_context()
Definition: isolate.h:659
FieldTable * field_table() const
Definition: isolate.h:1000
@ kInternalKillMsg
Definition: isolate.h:973
static void KillIfExists(Isolate *isolate, LibMsgId msg_id)
Definition: isolate.cc:3706
void AddValue(bool b) const
Definition: json_stream.h:494
void AddValueF(const char *format,...) const PRINTF_ATTRIBUTE(2
Definition: json_stream.cc:596
void AddProperty64(const char *name, int64_t i) const
Definition: json_stream.h:401
void AddProperty(const char *name, bool b) const
Definition: json_stream.h:395
static Dart_KernelCompilationResult CompileToKernel(const char *script_uri, const uint8_t *platform_kernel, intptr_t platform_kernel_size, int source_files_count=0, Dart_SourceFile source_files[]=nullptr, bool incremental_compile=true, bool for_snapshot=false, bool embed_sources=true, const char *package_config=nullptr, const char *multiroot_filepaths=nullptr, const char *multiroot_scheme=nullptr, Dart_KernelCompilationVerbosityLevel verbosity=Dart_KernelCompilationVerbosityLevel_All)
static Dart_KernelCompilationResult RejectCompilation()
static Dart_KernelCompilationResult AcceptCompilation()
static bool IsMatch(const Object &a, const Object &b)
static uword Hash(const Object &obj)
static const char * Name()
static LibraryPtr LookupLibrary(Thread *thread, const String &url)
Definition: object.cc:14599
static void RegisterLibraries(Thread *thread, const GrowableObjectArray &libs)
Definition: object.cc:14759
StringPtr private_key() const
Definition: object.h:5099
StringPtr url() const
Definition: object.h:5097
Definition: os.h:19
static void static void PrintErr(const char *format,...) PRINTF_ATTRIBUTE(1
ObjectLocator(IsolateGroupReloadContext *context)
void VisitObject(ObjectPtr obj) override
intptr_t GetClassId() const
Definition: raw_object.h:885
static ObjectPtr null()
Definition: object.h:433
ObjectPtr ptr() const
Definition: object.h:332
bool IsCanonical() const
Definition: object.h:335
void SetCanonical() const
Definition: object.h:336
bool IsNull() const
Definition: object.h:363
static Object & Handle()
Definition: object.h:407
static ObjectPtr RawCast(ObjectPtr obj)
Definition: object.h:325
intptr_t tasks() const
Definition: pages.h:315
bool enable_concurrent_mark() const
Definition: pages.h:351
Monitor * tasks_lock() const
Definition: pages.h:314
void set_enable_concurrent_mark(bool enable_concurrent_mark)
Definition: pages.h:352
static bool IsSameLibrary(const Library &a_lib, const Library &b_lib)
static bool IsSameClass(const Class &a, const Class &b)
ProgramReloadContext(std::shared_ptr< IsolateGroupReloadContext > group_reload_context, IsolateGroup *isolate_group)
void Report(IsolateGroupReloadContext *context)
virtual ErrorPtr ToError()
virtual void AppendTo(JSONArray *array)
virtual StringPtr ToString()
static const char * Name()
static bool IsMatch(const Object &a, const Object &b)
static uword Hash(const Object &obj)
static void HandleEvent(ServiceEvent *event, bool enter_safepoint=true)
Definition: service.cc:1206
static SmiPtr New(intptr_t value)
Definition: object.h:10006
static StringPtr NewFormatted(const char *format,...) PRINTF_ATTRIBUTE(1
Definition: object.cc:24004
intptr_t Length() const
Definition: object.h:10210
static uword HashRawSymbol(const StringPtr symbol)
Definition: object.h:10247
bool Equals(const String &str) const
Definition: object.h:13337
static StringPtr New(const char *cstr, Heap::Space space=Heap::kNew)
Definition: object.cc:23698
static StringPtr SubString(const String &str, intptr_t begin_index, Heap::Space space=Heap::kNew)
Definition: object.cc:24080
static const char * ToCString(Thread *thread, StringPtr ptr)
Definition: object.cc:24126
uword Hash() const
Definition: object.h:10216
static SubtypeTestCachePtr New(intptr_t num_inputs)
Definition: object.cc:18924
static constexpr intptr_t kMaxInputs
Definition: object.h:7705
Zone * zone() const
Definition: thread_state.h:37
static Thread * Current()
Definition: thread.h:362
bool OwnsReloadSafepoint() const
Definition: thread.cc:1362
IsolateGroup * isolate_group() const
Definition: thread.h:541
static Object & LoadEntireProgram(Program *program, bool process_pending_classes=true)
static void FindModifiedLibraries(Program *program, IsolateGroup *isolate_group, BitVector *modified_libs, bool force_reload, bool *is_empty_program, intptr_t *p_num_classes, intptr_t *p_num_procedures)
static std::unique_ptr< Program > ReadFromTypedData(const ExternalTypedData &typed_data, const char **error=nullptr)
static std::unique_ptr< Program > ReadFromFile(const char *script_uri, const char **error=nullptr)
#define THR_Print(format,...)
Definition: log.h:20
@ Dart_KernelCompilationStatus_MsgFailed
Definition: dart_api.h:3788
@ Dart_KernelCompilationStatus_Ok
Definition: dart_api.h:3785
bool(* Dart_FileModifiedCallback)(const char *url, int64_t since)
#define ASSERT(E)
VkInstance instance
Definition: main.cc:48
SkBitmap source
Definition: examples.cpp:28
double frame
Definition: examples.cpp:31
static bool b
struct MyStruct a[10]
#define FATAL(error)
FlKeyEvent uint64_t FlKeyResponderAsyncCallback callback
const uint8_t uint32_t uint32_t GError ** error
uint8_t value
GAsyncResult * result
uint32_t * target
Dart_NativeFunction function
Definition: fuchsia.cc:51
#define HANDLESCOPE(thread)
Definition: handles.h:321
#define IG
#define Z
#define TIMELINE_SCOPE(name)
#define VTIR_Print(format,...)
#define TIR_Print(format,...)
Win32Message message
Definition: dart_vm.cc:33
ZoneGrowableArray< FieldMapping > FieldMappingArray
void DeoptimizeFunctionsOnStack()
static bool ContainsScriptUri(const GrowableArray< const char * > &seen_uris, const char *uri)
static ObjectPtr RejectCompilation(Thread *thread)
uint32_t CombineHashes(uint32_t hash, uint32_t other_hash)
Definition: hash.h:12
static void PropagateLibraryModified(const ZoneGrowableArray< ZoneGrowableArray< intptr_t > * > *imported_by, intptr_t lib_index, BitVector *modified_libs)
Copied in from https://dart-review.googlesource.com/c/sdk/+/77722.
int32_t classid_t
Definition: globals.h:524
ZoneGrowableArray< intptr_t > FieldOffsetArray
@ kForwardingCorpse
Definition: class_id.h:225
@ kIllegalCid
Definition: class_id.h:214
@ kNumPredefinedCids
Definition: class_id.h:257
@ kFreeListElement
Definition: class_id.h:224
void DeoptimizeTypeTestingStubs()
constexpr intptr_t KB
Definition: globals.h:528
uintptr_t uword
Definition: globals.h:501
Cause GC during reload
UnorderedHashMap< SmiTraits > IntHashMap
Definition: hash_table.h:902
DEFINE_FLAG(bool, print_cluster_information, false, "Print information about clusters written to snapshot")
const intptr_t cid
uint32_t FinalizeHash(uint32_t hash, intptr_t hashbits=kBitsPerInt32)
Definition: hash.h:20
static ObjectPtr AcceptCompilation(Thread *thread)
gc_during_reload
static int8_t data[kExtLength]
static bool HasNoTasks(Heap *heap)
static const char * BoxCidToCString(intptr_t box_cid)
static intptr_t CommonSuffixLength(const char *a, const char *b)
DECLARE_FLAG(bool, show_invisible_frames)
std::function< void()> closure
Definition: closure.h:14
SI auto map(std::index_sequence< I... >, Fn &&fn, const Args &... args) -> skvx::Vec< sizeof...(I), decltype(fn(args[0]...))>
Definition: SkVx.h:680
Definition: update.py:1
#define Pd
Definition: globals.h:408
Dart_KernelCompilationStatus status
Definition: dart_api.h:3792
const char * uri
Definition: dart_api.h:3850