12#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
36DEFINE_FLAG(
int, reload_every, 0,
"Reload every N stack overflow checks.");
37DEFINE_FLAG(
bool, trace_reload,
false,
"Trace isolate reloading");
39#if !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)
43 "trace isolate reloading verbose");
44DEFINE_FLAG(
bool, identity_reload,
false,
"Enable checks for identity reload.");
45DEFINE_FLAG(
bool, reload_every_optimized,
true,
"Only from optimized code.");
47 reload_every_back_off,
49 "Double the --reload-every value after each reload.");
51 reload_force_rollback,
53 "Force all reloads to fail and rollback.");
57 "Assert that an isolate has reloaded at least once.")
62#define IG (isolate_group())
65#define TIMELINE_SCOPE(name) \
66 TimelineBeginEndScope tbes##name(Thread::Current(), \
67 Timeline::GetIsolateStream(), #name)
74 : context_(context), count_(0) {}
79 if (morpher !=
nullptr) {
86 intptr_t
count() {
return count_; }
126 auto ensure_boxed_and_guarded = [&](
const Field& field) {
127 field.set_needs_load_guard(
true);
128 if (field.is_unboxed()) {
134 for (intptr_t
i = 0;
i < to_fields.
Length();
i++) {
142 to_name = to_field.
name();
145 bool new_field =
true;
148 for (intptr_t j = 0; j < from_fields.
Length(); j++) {
154 from_name = from_field.
name();
155 if (from_name.
Equals(to_name)) {
164 ensure_boxed_and_guarded(to_field);
173 from_box_cid = field_cid;
176 from_box_cid = kIntegerCid;
187 to_box_cid = field_cid;
190 to_box_cid = kIntegerCid;
199 mapping->Add({from_field.
HostOffset(), from_box_cid});
200 mapping->Add({to_field.
HostOffset(), to_box_cid});
209 ensure_boxed_and_guarded(to_field);
210 new_fields_offsets->Add(to_field.
HostOffset());
221 const Class& old_class,
222 const Class& new_class,
227 old_class_(
Class::Handle(zone, old_class.ptr())),
228 new_class_(
Class::Handle(zone, new_class.ptr())),
230 new_fields_offsets_(new_fields_offsets),
242 for (intptr_t
i = 0;
i < before_.length();
i++) {
272#if defined(HASH_IN_OBJECT_HEADER)
273 const uint32_t
hash = Object::GetCachedHash(before.
ptr());
274 Object::SetCachedHashIfNotSet(after.
ptr(),
hash);
278 for (intptr_t
i = 0;
i < mapping_->
length();
i += 2) {
279 const auto& from = mapping_->
At(
i);
280 const auto& to = mapping_->
At(
i + 1);
288 ObjectPtr raw_value = before.RawGetFieldAtOffset(from.offset);
289 after.RawSetFieldAtOffset(to.offset, raw_value);
292 switch (from.box_cid) {
294 const auto unboxed_value =
295 before.RawGetUnboxedFieldAtOffset<
double>(from.offset);
299 case kFloat32x4Cid: {
300 const auto unboxed_value =
305 case kFloat64x2Cid: {
306 const auto unboxed_value =
312 const auto unboxed_value =
313 before.RawGetUnboxedFieldAtOffset<int64_t>(from.offset);
321 after.RawSetFieldAtOffset(to.offset,
value);
324 ASSERT(to.box_cid == from.box_cid);
325 switch (from.box_cid) {
327 const auto unboxed_value =
328 before.RawGetUnboxedFieldAtOffset<
double>(from.offset);
329 after.RawSetUnboxedFieldAtOffset<
double>(to.offset, unboxed_value);
333 case kFloat64x2Cid: {
334 const auto unboxed_value =
341 const auto unboxed_value =
342 before.RawGetUnboxedFieldAtOffset<int64_t>(from.offset);
343 after.RawSetUnboxedFieldAtOffset<int64_t>(to.offset, unboxed_value);
350 for (intptr_t
i = 0;
i < new_fields_offsets_->
length();
i++) {
351 const auto& field_offset = new_fields_offsets_->
At(
i);
352 after.RawSetFieldAtOffset(field_offset, Object::sentinel());
360 become->
Add(before, after);
380 THR_Print(
"Morphing objects with cid: %d via this mapping: ", cid_);
381 for (
int i = 0;
i < mapping_->
length();
i += 2) {
382 const auto& from = mapping_->
At(
i);
383 const auto& to = mapping_->
At(
i + 1);
385 THR_Print(
" (%" Pd " -> %" Pd ")", from.box_cid, to.box_cid);
399 jsobj.
AddProperty(
"instanceCount", before_.length());
401 for (
int i = 0;
i < mapping_->
length();
i += 2) {
402 const auto& from = mapping_->
At(
i);
403 const auto& to = mapping_->
At(
i + 1);
418 context->ReportError(
error);
424 return LanguageError::New(
message);
443 from_(
Class::ZoneHandle(zone, from.ptr())),
444 to_(
Class::ZoneHandle(zone, to.ptr())) {}
455 ASSERT(!reasons_to_cancel_reload_.is_empty());
457 return reasons_to_cancel_reload_.At(0)->ToError();
463 static const char*
Name() {
return "ScriptUrlSetTraits"; }
466 if (!
a.IsString() || !
b.IsString()) {
470 return String::Cast(
a).Equals(String::Cast(
b));
479 static const char*
Name() {
return "ClassMapTraits"; }
482 if (!
a.IsClass() || !
b.IsClass()) {
490 LibraryPtr raw_library = Class::Cast(obj).library();
492 return class_name_hash;
504 static const char*
Name() {
return "LibraryMapTraits"; }
507 if (!
a.IsLibrary() || !
b.IsLibrary()) {
524 if (!a_name.
Equals(b_name)) {
532 return a_lib.
ptr() == b_lib.
ptr();
543 return a_lib_url.
Equals(b_lib_url);
550 : zone_(
Thread::Current()->zone()),
551 isolate_group_(isolate_group),
552 class_table_(class_table),
553 start_time_micros_(
OS::GetCurrentMonotonicMicros()),
554 reload_timestamp_(
OS::GetCurrentTimeMillis()),
556 instance_morphers_(zone_, 0),
557 reasons_to_cancel_reload_(zone_, 0),
558 instance_morpher_by_cid_(zone_),
560 root_url_prefix_(
String::null()),
561 old_root_url_prefix_(
String::null()) {}
565 std::shared_ptr<IsolateGroupReloadContext> group_reload_context,
567 : zone_(
Thread::Current()->zone()),
568 group_reload_context_(group_reload_context),
569 isolate_group_(isolate_group),
570 old_classes_set_storage_(
Array::null()),
571 class_map_storage_(
Array::null()),
572 removed_class_set_storage_(
Array::null()),
573 old_libraries_set_storage_(
Array::null()),
574 library_map_storage_(
Array::null()),
575 saved_root_library_(
Library::null()),
585 ASSERT(
IG->class_table() ==
IG->heap_walk_class_table());
588void IsolateGroupReloadContext::ReportError(
const Error&
error) {
595 service_event.set_reload_error(&
error);
599void IsolateGroupReloadContext::ReportSuccess() {
612 error_(
Error::ZoneHandle(zone,
error.ptr())) {}
617 ErrorPtr ToError() {
return error_.
ptr(); }
618 StringPtr ToString() {
624 const intptr_t a_length = strlen(
a);
625 const intptr_t b_length = strlen(
b);
626 intptr_t a_cursor = a_length;
627 intptr_t b_cursor = b_length;
629 while ((a_cursor >= 0) && (b_cursor >= 0)) {
630 if (
a[a_cursor] !=
b[b_cursor]) {
637 ASSERT((a_length - a_cursor) == (b_length - b_cursor));
638 return (a_length - a_cursor);
647 "An error occurred while accepting the most recent"
648 " compilation results: %s",
652 "An error occurred while accepting the most recent"
653 " compilation results: %s",
658 return ApiError::New(error_str);
669 "An error occurred while rejecting the most recent"
670 " compilation results: %s",
674 "An error occurred while rejecting the most recent"
675 " compilation results: %s",
680 return ApiError::New(error_str);
687 const char* root_script_url,
688 const char* packages_url,
689 const uint8_t* kernel_buffer,
690 intptr_t kernel_buffer_size) {
701 GetRootLibUrl(root_script_url);
703 std::unique_ptr<kernel::Program> kernel_program;
706 num_received_libs_ = 0;
707 bytes_received_libs_ = 0;
708 num_received_classes_ = 0;
709 num_received_procedures_ = 0;
711 bool did_kernel_compilation =
false;
712 bool skip_reload =
false;
715 intptr_t* p_num_received_classes =
nullptr;
716 intptr_t* p_num_received_procedures =
nullptr;
723 if (kernel_program !=
nullptr) {
724 num_received_libs_ = kernel_program->library_count();
725 bytes_received_libs_ = kernel_program->binary().LengthInBytes();
726 p_num_received_classes = &num_received_classes_;
727 p_num_received_procedures = &num_received_procedures_;
729 if (kernel_buffer ==
nullptr || kernel_buffer_size == 0) {
730 char*
error = CompileToKernel(force_reload, packages_url,
731 &kernel_buffer, &kernel_buffer_size);
732 did_kernel_compilation =
true;
733 if (
error !=
nullptr) {
734 TIR_Print(
"---- LOAD FAILED, ABORTING RELOAD\n");
739 ReportReasonsForCancelling();
740 CommonFinalizeTail(num_old_libs_);
748 const_cast<uint8_t*
>(kernel_buffer), kernel_buffer_size));
756 ExternalTypedData::Cast(kernel_program->binary()));
758 modified_libs_ =
new (
Z)
BitVector(
Z, num_old_libs_);
760 kernel_program.get(),
IG, modified_libs_, force_reload, &skip_reload,
761 p_num_received_classes, p_num_received_procedures);
762 modified_libs_transitive_ =
new (
Z)
BitVector(
Z, num_old_libs_);
763 BuildModifiedLibrariesClosure(modified_libs_);
765 ASSERT(num_saved_libs_ == -1);
767 for (intptr_t
i = 0;
i < modified_libs_->
length();
i++) {
778 reload_skipped_ =
true;
779 ReportOnJSON(js_, num_old_libs_);
784 if (did_kernel_compilation) {
789 ReportReasonsForCancelling();
790 CommonFinalizeTail(num_old_libs_);
794 TIR_Print(
"---- SKIPPING RELOAD (No libraries were modified)\n");
800 intptr_t number_of_isolates = 0;
802 [&](
Isolate* isolate) { number_of_isolates++; });
808 const bool old_concurrent_mark_flag =
810 if (old_concurrent_mark_flag) {
820 IG->program_reload_context()->EnsuredUnoptimizedCodeForStack();
821 IG->program_reload_context()->DeoptimizeDependentCode();
822 IG->program_reload_context()->ReloadPhase1AllocateStorageMapsAndCheckpoint();
825 modified_libs_ =
nullptr;
826 modified_libs_transitive_ =
nullptr;
828 if (FLAG_gc_during_reload) {
837 IG->program_reload_context()->CheckpointClasses();
840 if (FLAG_gc_during_reload) {
853 kernel_program.get(), root_lib_url_));
856 TIR_Print(
"---- LOAD FAILED, ABORTING RELOAD\n");
861 IG->program_reload_context()->ReloadPhase4Rollback();
862 CommonFinalizeTail(num_old_libs_);
864 ASSERT(!reload_skipped_ && !reload_finalized_);
867 IG->program_reload_context()->ReloadPhase3FinalizeLoading();
869 if (FLAG_gc_during_reload) {
878 if (did_kernel_compilation) {
887 if (!FLAG_reload_force_rollback && !HasReasonsForCancelling()) {
890 bool discard_class_tables =
true;
891 if (HasInstanceMorphers()) {
902 if (FLAG_gc_during_reload) {
922 MorphInstancesPhase1Allocate(&locator,
IG->become());
933 IG->DropOriginalClassTable();
935 MorphInstancesPhase2Become(
IG->become());
937 discard_class_tables =
false;
940 if (FLAG_gc_during_reload) {
946 if (FLAG_identity_reload) {
947 if (!discard_class_tables) {
948 TIR_Print(
"Identity reload failed! Some instances were morphed\n");
950 if (
IG->heap_walk_class_table()->NumCids() !=
951 IG->class_table()->NumCids()) {
952 TIR_Print(
"Identity reload failed! B#C=%" Pd " A#C=%" Pd "\n",
953 IG->heap_walk_class_table()->NumCids(),
954 IG->class_table()->NumCids());
956 if (
IG->heap_walk_class_table()->NumTopLevelCids() !=
957 IG->class_table()->NumTopLevelCids()) {
958 TIR_Print(
"Identity reload failed! B#TLC=%" Pd " A#TLC=%" Pd "\n",
959 IG->heap_walk_class_table()->NumTopLevelCids(),
960 IG->class_table()->NumTopLevelCids());
963 if (discard_class_tables) {
964 IG->DropOriginalClassTable();
979 IG->program_reload_context()->RebuildDirectSubclasses();
981 const intptr_t final_library_count =
984 CommonFinalizeTail(final_library_count);
988 if (old_concurrent_mark_flag) {
993 if (!
result.IsError() || HasReasonsForCancelling()) {
997 ReportReasonsForCancelling();
1003 IG->object_store()->set_uri_to_resolved_uri_map(null_array);
1004 IG->object_store()->set_resolved_uri_to_uri_map(null_array);
1008 if (
result.IsUnwindError()) {
1010 ForEachIsolate([&](
Isolate* isolate) {
1026 for (intptr_t
i = 0;
i < dep_libs->
length();
i++) {
1027 intptr_t dep_lib_index = (*dep_libs)[
i];
1028 if (!modified_libs->
Contains(dep_lib_index)) {
1029 modified_libs->
Add(dep_lib_index);
1036void IsolateGroupReloadContext::BuildModifiedLibrariesClosure(
1037 BitVector* modified_libs) {
1038 const GrowableObjectArray&
libs =
1041 intptr_t num_libs =
libs.Length();
1044 ZoneGrowableArray<ZoneGrowableArray<intptr_t>*>* imported_by =
new (zone_)
1045 ZoneGrowableArray<ZoneGrowableArray<intptr_t>*>(zone_, num_libs);
1046 imported_by->SetLength(num_libs);
1047 for (intptr_t
i = 0;
i < num_libs;
i++) {
1048 (*imported_by)[
i] =
new (zone_) ZoneGrowableArray<intptr_t>(zone_, 0);
1055 for (intptr_t lib_idx = 0; lib_idx < num_libs; lib_idx++) {
1056 lib ^=
libs.At(lib_idx);
1057 ASSERT(lib_idx == lib.index());
1058 if (lib.is_dart_scheme()) {
1064 ports = lib.imports();
1065 for (intptr_t import_idx = 0; import_idx < ports.Length(); import_idx++) {
1066 ns ^= ports.At(import_idx);
1069 target_url =
target.url();
1070 (*imported_by)[
target.index()]->Add(lib.index());
1075 ports = lib.exports();
1076 for (intptr_t export_idx = 0; export_idx < ports.Length(); export_idx++) {
1077 ns ^= ports.At(export_idx);
1080 (*imported_by)[
target.index()]->Add(lib.index());
1085 DictionaryIterator entries(lib);
1088 while (entries.HasNext()) {
1089 entry = entries.GetNext();
1090 if (entry.IsLibraryPrefix()) {
1092 ports =
prefix.imports();
1093 for (intptr_t import_idx = 0; import_idx < ports.Length();
1095 ns ^= ports.At(import_idx);
1098 (*imported_by)[
target.index()]->Add(lib.index());
1105 for (intptr_t lib_idx = 0; lib_idx < num_libs; lib_idx++) {
1106 lib ^=
libs.At(lib_idx);
1107 if (lib.is_dart_scheme() || modified_libs_transitive_->
Contains(lib_idx)) {
1113 if (modified_libs->Contains(lib_idx)) {
1114 modified_libs_transitive_->
Add(lib_idx);
1120void IsolateGroupReloadContext::GetRootLibUrl(
const char* root_script_url) {
1121 const auto& old_root_lib =
1123 ASSERT(!old_root_lib.IsNull());
1124 const auto& old_root_lib_url =
String::Handle(old_root_lib.url());
1127 if (root_script_url !=
nullptr) {
1130 root_lib_url_ = old_root_lib_url.
ptr();
1134 if (!old_root_lib_url.Equals(root_lib_url_)) {
1135 const char* old_root_library_url_c = old_root_lib_url.ToCString();
1136 const char* root_library_url_c = root_lib_url_.
ToCString();
1137 const intptr_t common_suffix_length =
1140 root_lib_url_, 0, root_lib_url_.
Length() - common_suffix_length + 1);
1141 old_root_url_prefix_ =
1143 old_root_lib_url.Length() - common_suffix_length + 1);
1147char* IsolateGroupReloadContext::CompileToKernel(
bool force_reload,
1148 const char* packages_url,
1149 const uint8_t** kernel_buffer,
1150 intptr_t* kernel_buffer_size) {
1152 intptr_t modified_scripts_count = 0;
1153 FindModifiedSources(force_reload, &modified_scripts, &modified_scripts_count,
1158 const char* root_lib_url = root_lib_url_.
ToCString();
1161 root_lib_url,
nullptr, 0, modified_scripts_count, modified_scripts,
1170 if (retval.
kernel !=
nullptr) {
1173 return retval.
error;
1175 *kernel_buffer = retval.
kernel;
1180void ProgramReloadContext::ReloadPhase1AllocateStorageMapsAndCheckpoint() {
1182 old_classes_set_storage_ =
1183 HashTables::New<UnorderedHashSet<ClassMapTraits> >(4);
1184 class_map_storage_ = HashTables::New<UnorderedHashMap<ClassMapTraits> >(4);
1185 removed_class_set_storage_ =
1186 HashTables::New<UnorderedHashSet<ClassMapTraits> >(4);
1187 old_libraries_set_storage_ =
1188 HashTables::New<UnorderedHashSet<LibraryMapTraits> >(4);
1189 library_map_storage_ =
1190 HashTables::New<UnorderedHashMap<LibraryMapTraits> >(4);
1197 CheckpointLibraries();
1201ObjectPtr ProgramReloadContext::ReloadPhase2LoadKernel(
1202 kernel::Program* program,
1203 const String& root_lib_url) {
1207 if (setjmp(*jump.Set()) == 0) {
1209 if (tmp.IsError()) {
1220 IG->object_store()->set_root_library(lib);
1223 return thread->StealStickyError();
1227void ProgramReloadContext::ReloadPhase3FinalizeLoading() {
1228 BuildLibraryMapping();
1229 BuildRemovedClassesSet();
1233void ProgramReloadContext::ReloadPhase4CommitPrepare() {
1234 CommitBeforeInstanceMorphing();
1237void ProgramReloadContext::ReloadPhase4CommitFinish() {
1238 CommitAfterInstanceMorphing();
1242void ProgramReloadContext::ReloadPhase4Rollback() {
1243 IG->RestoreOriginalClassTable();
1244 RollbackLibraries();
1247void ProgramReloadContext::RegisterClass(
const Class& new_cls) {
1249 if (old_cls.IsNull()) {
1250 if (new_cls.IsTopLevel()) {
1251 IG->class_table()->RegisterTopLevel(new_cls);
1253 IG->class_table()->Register(new_cls);
1256 if (FLAG_identity_reload) {
1257 TIR_Print(
"Could not find replacement class for %s\n",
1258 new_cls.ToCString());
1263 AddClassMapping(new_cls, new_cls);
1266 VTIR_Print(
"Registering class: %s\n", new_cls.ToCString());
1267 new_cls.set_id(old_cls.id());
1268 IG->class_table()->SetAt(old_cls.id(), new_cls.ptr());
1269 new_cls.CopyCanonicalConstants(old_cls);
1270 new_cls.CopyDeclarationType(old_cls);
1271 AddBecomeMapping(old_cls, new_cls);
1272 AddClassMapping(new_cls, old_cls);
1275void IsolateGroupReloadContext::CommonFinalizeTail(
1276 intptr_t final_library_count) {
1278 ReportOnJSON(js_, final_library_count);
1279 reload_finalized_ =
true;
1282void IsolateGroupReloadContext::ReportOnJSON(JSONStream*
stream,
1283 intptr_t final_library_count) {
1284 JSONObject jsobj(
stream);
1285 jsobj.AddProperty(
"type",
"ReloadReport");
1286 jsobj.AddProperty(
"success", reload_skipped_ || !HasReasonsForCancelling());
1288 if (HasReasonsForCancelling()) {
1290 JSONArray array(&jsobj,
"notices");
1291 for (intptr_t
i = 0;
i < reasons_to_cancel_reload_.length();
i++) {
1293 reason->AppendTo(&array);
1298 JSONObject details(&jsobj,
"details");
1299 details.AddProperty(
"finalLibraryCount", final_library_count);
1300 details.AddProperty(
"receivedLibraryCount", num_received_libs_);
1301 details.AddProperty(
"receivedLibrariesBytes", bytes_received_libs_);
1302 details.AddProperty(
"receivedClassesCount", num_received_classes_);
1303 details.AddProperty(
"receivedProceduresCount", num_received_procedures_);
1304 if (reload_skipped_) {
1306 details.AddProperty(
"savedLibraryCount", final_library_count);
1307 details.AddProperty(
"loadedLibraryCount",
static_cast<intptr_t
>(0));
1310 const intptr_t loaded_library_count =
1311 final_library_count - num_saved_libs_;
1312 details.AddProperty(
"savedLibraryCount", num_saved_libs_);
1313 details.AddProperty(
"loadedLibraryCount", loaded_library_count);
1314 JSONArray array(&jsobj,
"shapeChangeMappings");
1315 for (intptr_t
i = 0;
i < instance_morphers_.length();
i++) {
1316 instance_morphers_.At(
i)->AppendTo(&array);
1322void ProgramReloadContext::EnsuredUnoptimizedCodeForStack() {
1325 IG->ForEachIsolate([](
Isolate* isolate) {
1326 auto thread = isolate->mutator_thread();
1327 if (thread ==
nullptr) {
1334 while (it.HasNextFrame()) {
1335 StackFrame*
frame = it.NextFrame();
1336 if (
frame->IsDartFrame()) {
1337 func =
frame->LookupDartFunction();
1341 if (!func.ForceOptimize()) {
1342 func.EnsureHasCompiledUnoptimizedCode();
1349void ProgramReloadContext::DeoptimizeDependentCode() {
1351 ClassTable* class_table =
IG->class_table();
1354 const intptr_t top =
IG->class_table()->NumCids();
1359 SafepointWriteRwLocker ml(thread,
IG->program_lock());
1360 for (intptr_t cls_idx = bottom; cls_idx < top; cls_idx++) {
1361 if (!class_table->HasValidClassAt(cls_idx)) {
1367 cls = class_table->At(cls_idx);
1370 cls.DisableAllCHAOptimizedCode();
1373 fields = cls.fields();
1374 ASSERT(!fields.IsNull());
1375 for (intptr_t field_idx = 0; field_idx < fields.Length(); field_idx++) {
1378 field.DeoptimizeDependentCode();
1387void ProgramReloadContext::CheckpointClasses() {
1388 TIR_Print(
"---- CHECKPOINTING CLASSES\n");
1400 IG->CloneClassTableForReload();
1405 ClassTable* class_table =
IG->class_table();
1412 UnorderedHashSet<ClassMapTraits> old_classes_set(old_classes_set_storage_);
1413 for (intptr_t
i = 0;
i < class_table->NumCids();
i++) {
1414 if (class_table->IsValidIndex(
i) && class_table->HasValidClassAt(
i)) {
1416 cls = class_table->At(
i);
1417 bool already_present = old_classes_set.Insert(cls);
1418 ASSERT(!already_present);
1422 for (intptr_t
i = 0;
i < class_table->NumTopLevelCids();
i++) {
1424 if (class_table->IsValidIndex(
cid) && class_table->HasValidClassAt(
cid)) {
1425 cls = class_table->At(
cid);
1426 bool already_present = old_classes_set.Insert(cls);
1427 ASSERT(!already_present);
1430 old_classes_set_storage_ = old_classes_set.Release().ptr();
1432 class_table->NumCids() + class_table->NumTopLevelCids());
1438bool IsolateGroupReloadContext::ScriptModifiedSince(
const Script&
script,
1440 if (IsolateGroupReloadContext::file_modified_callback_ ==
nullptr) {
1445 const char* url_chars = url.ToCString();
1446 return (*IsolateGroupReloadContext::file_modified_callback_)(url_chars,
1452 for (intptr_t
i = 0;
i < seen_uris.
length();
i++) {
1453 const char* seen_uri = seen_uris.
At(
i);
1454 size_t seen_len = strlen(seen_uri);
1455 if (seen_len != strlen(uri)) {
1457 }
else if (strncmp(seen_uri, uri, seen_len) == 0) {
1464void IsolateGroupReloadContext::FindModifiedSources(
1468 const char* packages_url) {
1470 GrowableArray<const char*> modified_sources_uris;
1478 for (intptr_t lib_idx = 0; lib_idx <
libs.Length(); lib_idx++) {
1479 lib ^=
libs.At(lib_idx);
1480 if (lib.is_dart_scheme()) {
1484 scripts = lib.LoadedScripts();
1485 for (intptr_t script_idx = 0; script_idx <
scripts.Length(); script_idx++) {
1488 const bool dart_scheme = uri.StartsWith(Symbols::DartScheme());
1500 if (force_reload || ScriptModifiedSince(
script, last_reload)) {
1501 modified_sources_uris.Add(uri.ToCString());
1508 if (packages_url !=
nullptr) {
1509 if (IsolateGroupReloadContext::file_modified_callback_ ==
nullptr ||
1510 (*IsolateGroupReloadContext::file_modified_callback_)(packages_url,
1512 modified_sources_uris.Add(packages_url);
1516 *
count = modified_sources_uris.length();
1522 for (intptr_t
i = 0;
i < *
count; ++
i) {
1523 (*modified_sources)[
i].
uri = modified_sources_uris[
i];
1524 (*modified_sources)[
i].source =
nullptr;
1528void ProgramReloadContext::CheckpointLibraries() {
1530 TIR_Print(
"---- CHECKPOINTING LIBRARIES\n");
1533 saved_root_library_ = root_lib.ptr();
1536 const GrowableObjectArray&
libs =
1538 saved_libraries_ =
libs.ptr();
1542 const GrowableObjectArray& new_libs =
1545 UnorderedHashSet<LibraryMapTraits> old_libraries_set(
1546 old_libraries_set_storage_);
1548 group_reload_context_->saved_libs_transitive_updated_ =
new (
Z)
1549 BitVector(
Z, group_reload_context_->modified_libs_transitive_->length());
1550 for (intptr_t
i = 0;
i <
libs.Length();
i++) {
1552 if (group_reload_context_->modified_libs_->Contains(
i)) {
1557 lib.set_index(new_libs.Length());
1560 if (group_reload_context_->modified_libs_transitive_->Contains(
i)) {
1562 group_reload_context_->saved_libs_transitive_updated_->Add(lib.index());
1566 bool already_present = old_libraries_set.Insert(lib);
1567 ASSERT(!already_present);
1569 lib.EvaluatePragmas();
1571 old_libraries_set_storage_ = old_libraries_set.Release().ptr();
1579void ProgramReloadContext::RollbackLibraries() {
1580 TIR_Print(
"---- ROLLING BACK LIBRARY CHANGES\n");
1584 if (!saved_libs.IsNull()) {
1585 for (intptr_t
i = 0;
i < saved_libs.Length();
i++) {
1596 if (!saved_root_lib.IsNull()) {
1597 object_store()->set_root_library(saved_root_lib);
1605void ProgramReloadContext::VerifyMaps() {
1613 UnorderedHashMap<ClassMapTraits> class_map(class_map_storage_);
1614 UnorderedHashMap<ClassMapTraits> reverse_class_map(
1616 class_map.NumOccupied()));
1618 UnorderedHashMap<ClassMapTraits>::Iterator it(&class_map);
1619 while (it.MoveNext()) {
1620 const intptr_t entry = it.Current();
1623 cls2 ^= reverse_class_map.GetOrNull(new_cls);
1624 if (!cls2.IsNull()) {
1626 "Classes '%s' and '%s' are distinct classes but both map "
1628 cls.ToCString(), cls2.ToCString(), new_cls.ToCString());
1631 bool update = reverse_class_map.UpdateOrInsert(cls, new_cls);
1635 class_map.Release();
1636 reverse_class_map.Release();
1640void ProgramReloadContext::CommitBeforeInstanceMorphing() {
1654 UnorderedHashMap<LibraryMapTraits> lib_map(library_map_storage_);
1658 UnorderedHashMap<LibraryMapTraits>::Iterator it(&lib_map);
1660 while (it.MoveNext()) {
1661 const intptr_t entry = it.Current();
1665 new_lib.set_debuggable(lib.IsDebuggable());
1667 new_lib.set_native_entry_resolver(lib.native_entry_resolver());
1668 new_lib.set_native_entry_symbol_resolver(
1669 lib.native_entry_symbol_resolver());
1670 new_lib.set_ffi_native_resolver(lib.ffi_native_resolver());
1671 new_lib.CopyPragmas(lib);
1686 UnorderedHashMap<ClassMapTraits> class_map(class_map_storage_);
1689 UnorderedHashMap<ClassMapTraits>::Iterator it(&class_map);
1690 while (it.MoveNext()) {
1691 const intptr_t entry = it.Current();
1694 if (new_cls.ptr() != old_cls.ptr()) {
1695 ASSERT(new_cls.is_enum_class() == old_cls.is_enum_class());
1696 new_cls.CopyStaticFieldValues(
this, old_cls);
1697 old_cls.PatchFieldsAndFunctions();
1698 old_cls.MigrateImplicitStaticClosures(
this, new_cls);
1703 class_map.Release();
1706 UnorderedHashSet<ClassMapTraits> removed_class_set(
1707 removed_class_set_storage_);
1708 UnorderedHashSet<ClassMapTraits>::Iterator it(&removed_class_set);
1709 while (it.MoveNext()) {
1710 const intptr_t entry = it.Current();
1711 old_cls ^= removed_class_set.GetKey(entry);
1712 old_cls.PatchFieldsAndFunctions();
1714 removed_class_set.Release();
1722 const GrowableObjectArray&
libs =
1724 for (intptr_t
i = 0;
i <
libs.Length();
i++) {
1726 VTIR_Print(
"Lib '%s' at index %" Pd "\n", lib.ToCString(),
i);
1732 for (intptr_t
i = 0;
i <
libs.Length();
i++) {
1735 library_infos_[
i].dirty =
1736 i >= group_reload_context_->num_saved_libs_ ||
1737 group_reload_context_->saved_libs_transitive_updated_->Contains(
1743void ProgramReloadContext::CommitAfterInstanceMorphing() {
1748 IG->RehashConstants(&become_);
1755 if (FLAG_identity_reload) {
1757 const GrowableObjectArray&
libs =
1759 if (saved_libs.Length() !=
libs.Length()) {
1760 TIR_Print(
"Identity reload failed! B#L=%" Pd " A#L=%" Pd "\n",
1761 saved_libs.Length(),
libs.Length());
1766bool ProgramReloadContext::IsDirty(
const Library& lib) {
1767 const intptr_t index = lib.index();
1768 if (index ==
static_cast<classid_t>(-1)) {
1772 ASSERT((index >= 0) && (index < library_infos_.
length()));
1773 return library_infos_[index].dirty;
1776void ProgramReloadContext::PostCommit() {
1783void IsolateGroupReloadContext::AddReasonForCancelling(
1784 ReasonForCancelling* reason) {
1785 reasons_to_cancel_reload_.Add(reason);
1788void IsolateGroupReloadContext::EnsureHasInstanceMorpherFor(
1790 InstanceMorpher* instance_morpher) {
1791 for (intptr_t
i = 0;
i < instance_morphers_.length(); ++
i) {
1792 if (instance_morphers_[
i]->
cid() ==
cid) {
1796 instance_morphers_.Add(instance_morpher);
1797 instance_morpher_by_cid_.
Insert(instance_morpher);
1798 ASSERT(instance_morphers_[instance_morphers_.length() - 1]->cid() ==
cid);
1801void IsolateGroupReloadContext::ReportReasonsForCancelling() {
1802 ASSERT(FLAG_reload_force_rollback || HasReasonsForCancelling());
1803 for (
int i = 0;
i < reasons_to_cancel_reload_.length();
i++) {
1804 reasons_to_cancel_reload_.At(
i)->Report(
this);
1808void IsolateGroupReloadContext::MorphInstancesPhase1Allocate(
1809 ObjectLocator* locator,
1811 ASSERT(HasInstanceMorphers());
1813 if (FLAG_trace_reload) {
1816 for (intptr_t
i = 0;
i < instance_morphers_.length();
i++) {
1817 instance_morphers_.At(
i)->Dump();
1821 const intptr_t
count = locator->count();
1823 (
count > 1) ?
"s" :
"");
1825 for (intptr_t
i = 0;
i < instance_morphers_.length();
i++) {
1826 instance_morphers_.At(
i)->CreateMorphedCopies(become);
1830void IsolateGroupReloadContext::MorphInstancesPhase2Become(Become* become) {
1831 ASSERT(HasInstanceMorphers());
1838void IsolateGroupReloadContext::ForEachIsolate(
1843void ProgramReloadContext::ValidateReload() {
1851 UnorderedHashMap<LibraryMapTraits>
map(library_map_storage_);
1852 UnorderedHashMap<LibraryMapTraits>::Iterator it(&
map);
1855 while (it.MoveNext()) {
1856 const intptr_t entry = it.Current();
1859 if (new_lib.ptr() != lib.ptr()) {
1860 lib.CheckReload(new_lib,
this);
1869 UnorderedHashMap<ClassMapTraits>
map(class_map_storage_);
1870 UnorderedHashMap<ClassMapTraits>::Iterator it(&
map);
1873 while (it.MoveNext()) {
1874 const intptr_t entry = it.Current();
1877 if (new_cls.ptr() != cls.ptr()) {
1878 cls.CheckReload(new_cls,
this);
1885void IsolateGroupReloadContext::VisitObjectPointers(
1886 ObjectPointerVisitor* visitor) {
1887 visitor->VisitPointers(from(), to());
1890void ProgramReloadContext::VisitObjectPointers(ObjectPointerVisitor* visitor) {
1891 visitor->VisitPointers(from(), to());
1894ObjectStore* ProgramReloadContext::object_store() {
1895 return IG->object_store();
1898void ProgramReloadContext::ResetUnoptimizedICsOnStack() {
1900 StackZone stack_zone(thread);
1901 Zone*
zone = stack_zone.GetZone();
1904 CallSiteResetter resetter(
zone);
1906 IG->ForEachIsolate([&](
Isolate* isolate) {
1907 if (isolate->mutator_thread() ==
nullptr) {
1910 DartFrameIterator iterator(isolate->mutator_thread(),
1912 StackFrame*
frame = iterator.NextFrame();
1913 while (
frame !=
nullptr) {
1915 if (
code.is_optimized() && !
code.is_force_optimized()) {
1919 function = code.function();
1920 code = function.unoptimized_code();
1921 ASSERT(!code.IsNull());
1922 resetter.ResetSwitchableCalls(code);
1923 resetter.ResetCaches(code);
1925 resetter.ResetSwitchableCalls(code);
1926 resetter.ResetCaches(code);
1928 frame = iterator.NextFrame();
1933void ProgramReloadContext::ResetMegamorphicCaches() {
1950 functions_(functions),
1951 kernel_infos_(kernel_infos),
1953 suspend_states_(suspend_states),
1954 instances_(instances) {}
1959 if (
cid == kFunctionCid) {
1962 functions_->Add(&func);
1963 }
else if (
cid == kKernelProgramInfoCid) {
1965 zone_,
static_cast<KernelProgramInfoPtr
>(obj)));
1966 }
else if (
cid == kFieldCid) {
1967 fields_->Add(&
Field::Handle(zone_,
static_cast<FieldPtr
>(obj)));
1968 }
else if (
cid == kSuspendStateCid) {
1969 const auto& suspend_state =
1971 if (suspend_state.pc() != 0) {
1972 suspend_states_->Add(&suspend_state);
1988void ProgramReloadContext::RunInvalidationVisitors() {
1989 TIR_Print(
"---- RUNNING INVALIDATION HEAP VISITORS\n");
1991 StackZone stack_zone(thread);
1992 Zone*
zone = stack_zone.GetZone();
1994 GrowableArray<const Function*> functions(4 *
KB);
1995 GrowableArray<const KernelProgramInfo*> kernel_infos(
KB);
1996 GrowableArray<const Field*> fields(4 *
KB);
1997 GrowableArray<const SuspendState*> suspend_states(4 *
KB);
1998 GrowableArray<const Instance*> instances(4 *
KB);
2002 HeapIterationScope iteration(thread);
2003 InvalidationCollector visitor(
zone, &functions, &kernel_infos, &fields,
2004 &suspend_states, &instances);
2005 iteration.IterateObjects(&visitor);
2008 InvalidateKernelInfos(
zone, kernel_infos);
2009 InvalidateSuspendStates(
zone, suspend_states);
2010 InvalidateFields(
zone, fields, instances);
2014 InvalidateFunctions(
zone, functions);
2017void ProgramReloadContext::InvalidateKernelInfos(
2019 const GrowableArray<const KernelProgramInfo*>& kernel_infos) {
2026 for (intptr_t
i = 0;
i < kernel_infos.length();
i++) {
2027 const KernelProgramInfo&
info = *kernel_infos[
i];
2034 info.set_libraries_cache(
table.Release());
2042 info.set_classes_cache(
table.Release());
2047void ProgramReloadContext::InvalidateFunctions(
2049 const GrowableArray<const Function*>& functions) {
2054 CallSiteResetter resetter(
zone);
2060 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
2061 for (intptr_t
i = 0;
i < functions.length();
i++) {
2062 const Function& func = *functions[
i];
2065 if (func.ForceOptimize())
continue;
2068 func.SwitchToLazyCompiledUnoptimizedCode();
2071 code = func.CurrentCode();
2077 bool recompile_for_load_guard =
false;
2078 if (func.IsImplicitGetterFunction() ||
2079 func.IsImplicitStaticGetterFunction()) {
2080 field = func.accessor_field();
2081 recompile_for_load_guard = field.needs_load_guard();
2084 owning_class = func.Owner();
2085 owning_lib = owning_class.library();
2086 const bool clear_unoptimized_code =
2087 IsDirty(owning_lib) || recompile_for_load_guard;
2088 const bool stub_code =
code.IsStubCode();
2092 resetter.ZeroEdgeCounters(func);
2096 }
else if (clear_unoptimized_code) {
2097 VTIR_Print(
"Marking %s for recompilation, clearing code\n",
2100 func.ClearICDataArray();
2102 func.SetWasCompiled(
false);
2106 resetter.ResetSwitchableCalls(
code);
2107 resetter.ResetCaches(
code);
2111 func.set_usage_counter(0);
2112 func.set_deoptimization_counter(0);
2113 func.set_optimized_instruction_count(0);
2114 func.set_optimized_call_site_count(0);
2118void ProgramReloadContext::InvalidateSuspendStates(
2120 const GrowableArray<const SuspendState*>& suspend_states) {
2125 CallSiteResetter resetter(
zone);
2129 SafepointWriteRwLocker ml(thread, thread->isolate_group()->program_lock());
2130 for (intptr_t
i = 0, n = suspend_states.length();
i < n; ++
i) {
2131 const SuspendState& suspend_state = *suspend_states[
i];
2132 ASSERT(suspend_state.pc() != 0);
2133 code = suspend_state.GetCodeObject();
2135 if (
code.is_optimized() && !
code.is_force_optimized()) {
2139 function.SwitchToLazyCompiledUnoptimizedCode();
2143 if (!
code.IsDisabled()) {
2144 code.DisableDartCode();
2150 if (!
code.IsNull()) {
2151 resetter.ResetSwitchableCalls(
code);
2152 resetter.ResetCaches(
code);
2160 function.EnsureHasCompiledUnoptimizedCode();
2161 resetter.ResetSwitchableCalls(
code);
2162 resetter.ResetCaches(
code);
2175 cls_(
Class::Handle(zone)),
2176 cls_fields_(
Array::Handle(zone)),
2177 entry_(
Object::Handle(zone)),
2178 value_(
Object::Handle(zone)),
2182 result_(
Bool::Handle(zone)),
2183 closure_function_(
Function::Handle(zone)),
2186 instance_cid_or_signature_(
Object::Handle(zone)),
2188 parent_function_type_arguments_(
TypeArguments::Handle(zone)),
2189 delayed_function_type_arguments_(
TypeArguments::Handle(zone)) {}
2195 for (intptr_t
i = 0;
i < fields.
length();
i++) {
2196 const Field& field = *fields[
i];
2203 const intptr_t field_id = field.
field_id();
2209 if (field_table->IsReadyToUse()) {
2210 value_ = field_table->At(field_id);
2211 if ((value_.ptr() != Object::sentinel().ptr()) &&
2212 (value_.ptr() != Object::transition_sentinel().ptr())) {
2213 CheckValueType(value_, field);
2223 for (intptr_t
i = 0;
i < instances.
length();
i++) {
2224 CheckInstance(*instances[
i]);
2232 if (cls_.NumTypeArguments() > 0) {
2233 instantiator_type_arguments_ =
instance.GetTypeArguments();
2237 cls_fields_ = cls_.OffsetToFieldMap();
2238 for (intptr_t
i = 0;
i < cls_fields_.Length();
i++) {
2239 entry_ = cls_fields_.At(
i);
2240 if (!entry_.IsField()) {
2243 const Field& field = Field::Cast(entry_);
2244 CheckInstanceField(
instance, field);
2249 void CheckInstanceField(
const Instance&
instance,
const Field& field) {
2250 if (field.needs_load_guard()) {
2253 if (field.is_unboxed()) {
2258 if (value_.ptr() == Object::sentinel().ptr()) {
2259 if (field.is_late()) {
2264 ASSERT(!FLAG_identity_reload);
2265 field.set_needs_load_guard(
true);
2268 CheckValueType(value_, field);
2272 bool CheckAssignabilityUsingCache(
const Object&
value,
2273 const AbstractType&
type) {
2275 if (
type.IsDynamicType()) {
2279 if (
type.IsRecordType()) {
2280 return CheckAssignabilityForRecordType(
value, RecordType::Cast(
type));
2283 cls_ =
value.clazz();
2284 const intptr_t
cid = cls_.id();
2285 if (
cid == kClosureCid) {
2287 closure_function_ =
closure.function();
2288 instance_cid_or_signature_ = closure_function_.signature();
2289 instance_type_arguments_ =
closure.instantiator_type_arguments();
2290 parent_function_type_arguments_ =
closure.function_type_arguments();
2291 delayed_function_type_arguments_ =
closure.delayed_type_arguments();
2294 if (cls_.NumTypeArguments() > 0) {
2295 instance_type_arguments_ = Instance::Cast(
value).GetTypeArguments();
2303 if (cache_.IsNull()) {
2307 if (cache_.HasCheck(
2308 instance_cid_or_signature_,
type, instance_type_arguments_,
2309 instantiator_type_arguments_, function_type_arguments_,
2310 parent_function_type_arguments_, delayed_function_type_arguments_,
2311 nullptr, &result_)) {
2312 return result_.value();
2315 instance_ ^=
value.ptr();
2316 if (instance_.IsAssignableTo(
type, instantiator_type_arguments_,
2317 function_type_arguments_)) {
2320 if (
cid != kRecordCid) {
2321 cache_.AddCheck(instance_cid_or_signature_,
type,
2322 instance_type_arguments_, instantiator_type_arguments_,
2323 function_type_arguments_,
2324 parent_function_type_arguments_,
2325 delayed_function_type_arguments_,
Bool::True());
2333 bool CheckAssignabilityForRecordType(
const Object&
value,
2334 const RecordType&
type) {
2335 if (!
value.IsRecord()) {
2339 const Record& record = Record::Cast(
value);
2340 if (record.shape() !=
type.shape()) {
2347 const intptr_t num_fields = record.num_fields();
2348 for (intptr_t
i = 0;
i < num_fields; ++
i) {
2349 field_value = record.FieldAt(
i);
2350 field_type =
type.FieldTypeAt(
i);
2351 if (!CheckAssignabilityUsingCache(field_value, field_type)) {
2359 void CheckValueType(
const Object&
value,
const Field& field) {
2361 type_ = field.type();
2362 if (!CheckAssignabilityUsingCache(
value, type_)) {
2367 if (FLAG_identity_reload && !
value.IsNull()) {
2369 "Type check failed during identity hot reload.\n"
2373 field.ToCString(), type_.ToCString(),
value.ToCString());
2376 field.set_needs_load_guard(
true);
2385 Instance& instance_;
2386 AbstractType& type_;
2387 SubtypeTestCache& cache_;
2389 Function& closure_function_;
2390 TypeArguments& instantiator_type_arguments_;
2391 TypeArguments& function_type_arguments_;
2392 Object& instance_cid_or_signature_;
2393 TypeArguments& instance_type_arguments_;
2394 TypeArguments& parent_function_type_arguments_;
2395 TypeArguments& delayed_function_type_arguments_;
2398void ProgramReloadContext::InvalidateFields(
2400 const GrowableArray<const Field*>& fields,
2401 const GrowableArray<const Instance*>& instances) {
2403 SafepointMutexLocker ml(
IG->subtype_test_cache_mutex());
2404 FieldInvalidator invalidator(zone);
2405 invalidator.CheckStatics(fields);
2406 invalidator.CheckInstances(instances);
2409void ProgramReloadContext::InvalidateWorld() {
2412 ResetMegamorphicCaches();
2413 if (FLAG_trace_deoptimization) {
2417 ResetUnoptimizedICsOnStack();
2418 RunInvalidationVisitors();
2421ClassPtr ProgramReloadContext::OldClassOrNull(
const Class& replacement_or_new) {
2422 UnorderedHashSet<ClassMapTraits> old_classes_set(old_classes_set_storage_);
2423 Class& cls = Class::Handle();
2424 cls ^= old_classes_set.GetOrNull(replacement_or_new);
2425 old_classes_set_storage_ = old_classes_set.Release().ptr();
2429StringPtr ProgramReloadContext::FindLibraryPrivateKey(
2430 const Library& replacement_or_new) {
2431 const Library& old = Library::Handle(OldLibraryOrNull(replacement_or_new));
2433 return String::null();
2436 VTIR_Print(
"`%s` is getting `%s`'s private key.\n",
2437 String::Handle(replacement_or_new.url()).ToCString(),
2438 String::Handle(old.url()).ToCString());
2440 return old.private_key();
2443LibraryPtr ProgramReloadContext::OldLibraryOrNull(
2444 const Library& replacement_or_new) {
2445 UnorderedHashSet<LibraryMapTraits> old_libraries_set(
2446 old_libraries_set_storage_);
2447 Library& lib = Library::Handle();
2448 lib ^= old_libraries_set.GetOrNull(replacement_or_new);
2449 old_libraries_set.Release();
2452 (group_reload_context_->root_url_prefix_ != String::null()) &&
2453 (group_reload_context_->old_root_url_prefix_ != String::null())) {
2454 return OldLibraryOrNullBaseMoved(replacement_or_new);
2461LibraryPtr ProgramReloadContext::OldLibraryOrNullBaseMoved(
2462 const Library& replacement_or_new) {
2463 const String& url_prefix =
2464 String::Handle(group_reload_context_->root_url_prefix_);
2465 const String& old_url_prefix =
2466 String::Handle(group_reload_context_->old_root_url_prefix_);
2467 const intptr_t prefix_length = url_prefix.Length();
2468 const intptr_t old_prefix_length = old_url_prefix.Length();
2469 const String& new_url = String::Handle(replacement_or_new.url());
2471 String::Handle(String::SubString(new_url, prefix_length));
2472 if (!new_url.StartsWith(url_prefix)) {
2473 return Library::null();
2475 Library& old = Library::Handle();
2476 String& old_url = String::Handle();
2477 String& old_suffix = String::Handle();
2478 const auto& saved_libs = GrowableObjectArray::Handle(saved_libraries_);
2479 ASSERT(!saved_libs.IsNull());
2480 for (intptr_t
i = 0;
i < saved_libs.Length();
i++) {
2481 old = Library::RawCast(saved_libs.At(
i));
2482 old_url = old.url();
2483 if (!old_url.StartsWith(old_url_prefix)) {
2486 old_suffix = String::SubString(old_url, old_prefix_length);
2487 if (old_suffix.IsNull()) {
2490 if (old_suffix.Equals(
suffix)) {
2491 TIR_Print(
"`%s` is moving to `%s`\n", old_url.ToCString(),
2492 new_url.ToCString());
2496 return Library::null();
2499void ProgramReloadContext::BuildLibraryMapping() {
2500 const GrowableObjectArray&
libs =
2501 GrowableObjectArray::Handle(object_store()->libraries());
2503 Library& replacement_or_new = Library::Handle();
2504 Library& old = Library::Handle();
2505 for (intptr_t
i = group_reload_context_->num_saved_libs_;
i <
libs.Length();
2507 replacement_or_new = Library::RawCast(
libs.At(
i));
2508 old = OldLibraryOrNull(replacement_or_new);
2510 if (FLAG_identity_reload) {
2511 TIR_Print(
"Could not find original library for %s\n",
2512 replacement_or_new.ToCString());
2516 AddLibraryMapping(replacement_or_new, replacement_or_new);
2518 ASSERT(!replacement_or_new.is_dart_scheme());
2520 AddLibraryMapping(replacement_or_new, old);
2522 AddBecomeMapping(old, replacement_or_new);
2535void ProgramReloadContext::BuildRemovedClassesSet() {
2537 UnorderedHashMap<ClassMapTraits> class_map(class_map_storage_);
2538 UnorderedHashSet<ClassMapTraits> mapped_old_classes_set(
2539 HashTables::New<UnorderedHashSet<ClassMapTraits> >(
2540 class_map.NumOccupied()));
2542 UnorderedHashMap<ClassMapTraits>::Iterator it(&class_map);
2543 Class& cls = Class::Handle();
2544 Class& new_cls = Class::Handle();
2545 while (it.MoveNext()) {
2546 const intptr_t entry = it.Current();
2547 new_cls = Class::RawCast(class_map.GetKey(entry));
2548 cls = Class::RawCast(class_map.GetPayload(entry, 0));
2549 mapped_old_classes_set.InsertOrGet(cls);
2552 class_map.Release();
2555 UnorderedHashMap<LibraryMapTraits> library_map(library_map_storage_);
2556 UnorderedHashMap<LibraryMapTraits>::Iterator it_library(&library_map);
2557 UnorderedHashSet<LibraryMapTraits> mapped_old_library_set(
2558 HashTables::New<UnorderedHashSet<LibraryMapTraits> >(
2559 library_map.NumOccupied()));
2561 Library& old_library = Library::Handle();
2562 Library& new_library = Library::Handle();
2563 while (it_library.MoveNext()) {
2564 const intptr_t entry = it_library.Current();
2565 new_library ^= library_map.GetKey(entry);
2566 old_library ^= library_map.GetPayload(entry, 0);
2567 if (new_library.ptr() != old_library.ptr()) {
2568 mapped_old_library_set.InsertOrGet(old_library);
2576 UnorderedHashSet<ClassMapTraits> old_classes_set(old_classes_set_storage_);
2577 UnorderedHashSet<ClassMapTraits>::Iterator it(&old_classes_set);
2578 UnorderedHashSet<ClassMapTraits> removed_class_set(
2579 removed_class_set_storage_);
2580 Class& old_cls = Class::Handle();
2581 Class& new_cls = Class::Handle();
2582 Library& old_library = Library::Handle();
2583 Library& mapped_old_library = Library::Handle();
2584 while (it.MoveNext()) {
2585 const intptr_t entry = it.Current();
2586 old_cls ^= Class::RawCast(old_classes_set.GetKey(entry));
2587 old_library = old_cls.library();
2588 if (old_library.IsNull()) {
2591 mapped_old_library ^= mapped_old_library_set.GetOrNull(old_library);
2592 if (!mapped_old_library.IsNull()) {
2593 new_cls ^= mapped_old_classes_set.GetOrNull(old_cls);
2594 if (new_cls.IsNull()) {
2595 removed_class_set.InsertOrGet(old_cls);
2599 removed_class_set_storage_ = removed_class_set.Release().ptr();
2601 old_classes_set.Release();
2602 mapped_old_classes_set.Release();
2603 mapped_old_library_set.Release();
2604 library_map.Release();
2607void ProgramReloadContext::AddClassMapping(
const Class& replacement_or_new,
2608 const Class& original) {
2609 UnorderedHashMap<ClassMapTraits>
map(class_map_storage_);
2610 bool update =
map.UpdateOrInsert(replacement_or_new, original);
2614 class_map_storage_ =
map.Release().ptr();
2617void ProgramReloadContext::AddLibraryMapping(
const Library& replacement_or_new,
2618 const Library& original) {
2619 UnorderedHashMap<LibraryMapTraits>
map(library_map_storage_);
2620 bool update =
map.UpdateOrInsert(replacement_or_new, original);
2624 library_map_storage_ =
map.Release().ptr();
2627void ProgramReloadContext::AddStaticFieldMapping(
const Field& old_field,
2628 const Field& new_field) {
2629 ASSERT(old_field.is_static());
2630 ASSERT(new_field.is_static());
2631 AddBecomeMapping(old_field, new_field);
2634void ProgramReloadContext::AddBecomeMapping(
const Object& old,
2635 const Object& neu) {
2636 become_.Add(old, neu);
2639void ProgramReloadContext::RebuildDirectSubclasses() {
2640 ClassTable* class_table =
IG->class_table();
2641 intptr_t num_cids = class_table->NumCids();
2644 Class& cls = Class::Handle();
2645 const GrowableObjectArray& null_list = GrowableObjectArray::Handle();
2646 for (intptr_t
i = 1;
i < num_cids;
i++) {
2647 if (class_table->HasValidClassAt(
i)) {
2648 cls = class_table->At(
i);
2649 if (!cls.is_declaration_loaded()) {
2654 if (cls.direct_subclasses() != GrowableObjectArray::null()) {
2655 cls.set_direct_subclasses(null_list);
2657 if (cls.direct_implementors() != GrowableObjectArray::null()) {
2658 cls.set_direct_implementors(null_list);
2665 AbstractType& super_type = AbstractType::Handle();
2666 Class& super_cls = Class::Handle();
2668 Array& interface_types = Array::Handle();
2669 AbstractType& interface_type = AbstractType::Handle();
2670 Class& interface_class = Class::Handle();
2672 for (intptr_t
i = 1;
i < num_cids;
i++) {
2673 if (class_table->HasValidClassAt(
i)) {
2674 cls = class_table->At(
i);
2675 if (!cls.is_declaration_loaded()) {
2678 super_type = cls.super_type();
2679 if (!super_type.IsNull() && !super_type.IsObjectType()) {
2680 super_cls = cls.SuperClass();
2681 ASSERT(!super_cls.IsNull());
2682 super_cls.AddDirectSubclass(cls);
2685 interface_types = cls.interfaces();
2686 if (!interface_types.IsNull()) {
2687 const intptr_t mixin_index = cls.is_transformed_mixin_application()
2688 ? interface_types.Length() - 1
2690 for (intptr_t j = 0; j < interface_types.Length(); ++j) {
2691 interface_type ^= interface_types.At(j);
2692 interface_class = interface_type.type_class();
2693 interface_class.AddDirectImplementor(
2694 cls,
i == mixin_index);
static void info(const char *fmt,...) SK_PRINTF_LIKE(1
static uint32_t hash(const SkShaderBase::GradientInfo &v)
#define RELEASE_ASSERT(cond)
Aborted(Zone *zone, const Error &error)
ObjectPtr At(intptr_t index) const
KeyValueTrait::Value LookupValue(typename KeyValueTrait::Key key) const
void Insert(typename KeyValueTrait::Pair kv)
const T & At(intptr_t index) const
void SetLength(intptr_t new_length)
static void MakeDummyObject(const Instance &instance)
void Add(const Object &before, const Object &after)
bool Contains(intptr_t i) const
static const Bool & True()
static uword Hash(const Object &obj)
static bool ReportStats()
static const char * Name()
static bool IsMatch(const Object &a, const Object &b)
void AppendTo(JSONArray *array)
ClassReasonForCancelling(Zone *zone, const Class &from, const Class &to)
static intptr_t CidFromTopLevelIndex(intptr_t index)
intptr_t NumTypeArguments() const
intptr_t host_type_arguments_field_offset() const
ArrayPtr OffsetToFieldMap(ClassTable *class_table=nullptr) const
static constexpr intptr_t kNoTypeArguments
void MarkFieldBoxedDuringReload(ClassTable *class_table, const Field &field) const
static IsolateGroup * vm_isolate_group()
static DoublePtr New(double d, Heap::Space space=Heap::kNew)
virtual const char * ToErrorCString() const
static ExternalTypedDataPtr NewFinalizeWithFree(uint8_t *data, intptr_t len)
void CheckInstances(const GrowableArray< const Instance * > &instances)
FieldInvalidator(Zone *zone)
void CheckStatics(const GrowableArray< const Field * > &fields)
bool needs_load_guard() const
intptr_t guarded_cid() const
intptr_t field_id() const
intptr_t HostOffset() const
AbstractTypePtr type() const
static Float32x4Ptr New(float value0, float value1, float value2, float value3, Heap::Space space=Heap::kNew)
static Float64x2Ptr New(double value0, double value1, Heap::Space space=Heap::kNew)
static GrowableObjectArrayPtr New(Heap::Space space=Heap::kNew)
static Table::Storage::ArrayPtr New(intptr_t initial_capacity, Heap::Space space=Heap::kNew)
void IterateObjects(ObjectVisitor *visitor) const
void WaitForMarkerTasks(Thread *thread)
void CollectAllGarbage(GCReason reason=GCReason::kFull, bool compact=false)
InstanceMorpher(Zone *zone, classid_t cid, const Class &old_class, const Class &new_class, FieldMappingArray *mapping, FieldOffsetArray *new_fields_offsets)
void AddObject(ObjectPtr object)
void AppendTo(JSONArray *array)
static InstanceMorpher * CreateFromClassDescriptors(Zone *zone, ClassTable *class_table, const Class &from, const Class &to)
void CreateMorphedCopies(Become *become)
static InstancePtr NewAlreadyFinalized(const Class &cls, Heap::Space space=Heap::kNew)
static IntegerPtr New(const String &str, Heap::Space space=Heap::kNew)
virtual ~InvalidationCollector()
void VisitObject(ObjectPtr obj) override
InvalidationCollector(Zone *zone, GrowableArray< const Function * > *functions, GrowableArray< const KernelProgramInfo * > *kernel_infos, GrowableArray< const Field * > *fields, GrowableArray< const SuspendState * > *suspend_states, GrowableArray< const Instance * > *instances)
IsolateGroup * isolate_group() const
IsolateGroupReloadContext(IsolateGroup *isolate, ClassTable *class_table, JSONStream *js)
friend class ReasonForCancelling
friend class IsolateGroup
~IsolateGroupReloadContext()
bool Reload(bool force_reload, const char *root_script_url=nullptr, const char *packages_url=nullptr, const uint8_t *kernel_buffer=nullptr, intptr_t kernel_buffer_size=0)
void ForEachIsolate(std::function< void(Isolate *isolate)> function, bool at_safepoint=false)
static bool IsSystemIsolateGroup(const IsolateGroup *group)
static IsolateGroup * Current()
ClassTable * class_table() const
void set_last_reload_timestamp(int64_t value)
IsolateGroupSource * source() const
ClassTable * heap_walk_class_table() const
int64_t last_reload_timestamp() const
ProgramReloadContext * program_reload_context()
FieldTable * field_table() const
static void KillIfExists(Isolate *isolate, LibMsgId msg_id)
void AddValue(bool b) const
void AddValueF(const char *format,...) const PRINTF_ATTRIBUTE(2
void AddProperty64(const char *name, int64_t i) const
void AddProperty(const char *name, bool b) const
static Dart_KernelCompilationResult CompileToKernel(const char *script_uri, const uint8_t *platform_kernel, intptr_t platform_kernel_size, int source_files_count=0, Dart_SourceFile source_files[]=nullptr, bool incremental_compile=true, bool for_snapshot=false, bool embed_sources=true, const char *package_config=nullptr, const char *multiroot_filepaths=nullptr, const char *multiroot_scheme=nullptr, Dart_KernelCompilationVerbosityLevel verbosity=Dart_KernelCompilationVerbosityLevel_All)
static Dart_KernelCompilationResult RejectCompilation()
static Dart_KernelCompilationResult AcceptCompilation()
static bool ReportStats()
static bool IsMatch(const Object &a, const Object &b)
static uword Hash(const Object &obj)
static const char * Name()
static LibraryPtr LookupLibrary(Thread *thread, const String &url)
static void RegisterLibraries(Thread *thread, const GrowableObjectArray &libs)
StringPtr private_key() const
static void static void PrintErr(const char *format,...) PRINTF_ATTRIBUTE(1
ObjectLocator(IsolateGroupReloadContext *context)
void VisitObject(ObjectPtr obj) override
intptr_t GetClassId() const
void SetCanonical() const
static ObjectPtr RawCast(ObjectPtr obj)
bool enable_concurrent_mark() const
Monitor * tasks_lock() const
void set_enable_concurrent_mark(bool enable_concurrent_mark)
static bool IsSameLibrary(const Library &a_lib, const Library &b_lib)
static bool IsSameClass(const Class &a, const Class &b)
ProgramReloadContext(std::shared_ptr< IsolateGroupReloadContext > group_reload_context, IsolateGroup *isolate_group)
void Report(IsolateGroupReloadContext *context)
virtual ErrorPtr ToError()
virtual void AppendTo(JSONArray *array)
virtual StringPtr ToString()
static const char * Name()
static bool IsMatch(const Object &a, const Object &b)
static bool ReportStats()
static uword Hash(const Object &obj)
static void HandleEvent(ServiceEvent *event, bool enter_safepoint=true)
static SmiPtr New(intptr_t value)
@ kAllowCrossThreadIteration
static StringPtr NewFormatted(const char *format,...) PRINTF_ATTRIBUTE(1
static uword HashRawSymbol(const StringPtr symbol)
bool Equals(const String &str) const
static StringPtr New(const char *cstr, Heap::Space space=Heap::kNew)
static StringPtr SubString(const String &str, intptr_t begin_index, Heap::Space space=Heap::kNew)
static const char * ToCString(Thread *thread, StringPtr ptr)
static SubtypeTestCachePtr New(intptr_t num_inputs)
static constexpr intptr_t kMaxInputs
static Thread * Current()
bool OwnsReloadSafepoint() const
IsolateGroup * isolate_group() const
static Object & LoadEntireProgram(Program *program, bool process_pending_classes=true)
static void FindModifiedLibraries(Program *program, IsolateGroup *isolate_group, BitVector *modified_libs, bool force_reload, bool *is_empty_program, intptr_t *p_num_classes, intptr_t *p_num_procedures)
static std::unique_ptr< Program > ReadFromTypedData(const ExternalTypedData &typed_data, const char **error=nullptr)
static std::unique_ptr< Program > ReadFromFile(const char *script_uri, const char **error=nullptr)
#define THR_Print(format,...)
@ Dart_KernelCompilationStatus_MsgFailed
@ Dart_KernelCompilationStatus_Ok
FlKeyEvent uint64_t FlKeyResponderAsyncCallback callback
const uint8_t uint32_t uint32_t GError ** error
Dart_NativeFunction function
#define HANDLESCOPE(thread)
#define TIMELINE_SCOPE(name)
#define VTIR_Print(format,...)
#define TIR_Print(format,...)
ZoneGrowableArray< FieldMapping > FieldMappingArray
void DeoptimizeFunctionsOnStack()
static bool ContainsScriptUri(const GrowableArray< const char * > &seen_uris, const char *uri)
static ObjectPtr RejectCompilation(Thread *thread)
uint32_t CombineHashes(uint32_t hash, uint32_t other_hash)
static void PropagateLibraryModified(const ZoneGrowableArray< ZoneGrowableArray< intptr_t > * > *imported_by, intptr_t lib_index, BitVector *modified_libs)
Copied in from https://dart-review.googlesource.com/c/sdk/+/77722.
ZoneGrowableArray< intptr_t > FieldOffsetArray
void DeoptimizeTypeTestingStubs()
UnorderedHashMap< SmiTraits > IntHashMap
DEFINE_FLAG(bool, print_cluster_information, false, "Print information about clusters written to snapshot")
uint32_t FinalizeHash(uint32_t hash, intptr_t hashbits=kBitsPerInt32)
static ObjectPtr AcceptCompilation(Thread *thread)
static int8_t data[kExtLength]
static bool HasNoTasks(Heap *heap)
static const char * BoxCidToCString(intptr_t box_cid)
static intptr_t CommonSuffixLength(const char *a, const char *b)
DECLARE_FLAG(bool, show_invisible_frames)
std::function< void()> closure
SI auto map(std::index_sequence< I... >, Fn &&fn, const Args &... args) -> skvx::Vec< sizeof...(I), decltype(fn(args[0]...))>
Dart_KernelCompilationStatus status