9#if defined(DART_PRECOMPILER)
30 zone_(
Thread::Current()->zone()),
31 thread_(
Thread::Current()),
42 : parsed_function_(parsed_function),
44 zone_(
Thread::Current()->zone()),
45 thread_(
Thread::Current()),
48ClassPtr FlowGraphDeserializer::GetClassById(
classid_t id)
const {
57 s->Write<
bool>(
false);
67 if (!
d->Read<
bool>()) {
87 s->Write<intptr_t>(value_);
91 : value_(
d->
Read<intptr_t>()) {}
94 TemplateInstruction::WriteTo(
s);
95 s->Write<intptr_t>(block_id_);
96 s->Write<intptr_t>(try_index_);
97 s->Write<intptr_t>(stack_depth_);
102 : TemplateInstruction(
d),
103 block_id_(
d->
Read<intptr_t>()),
104 try_index_(
d->
Read<intptr_t>()),
105 stack_depth_(
d->
Read<intptr_t>()),
106 dominated_blocks_(1),
107 parallel_move_(
d->
Read<ParallelMoveInstr*>()) {
108 d->set_block(block_id_,
this);
109 d->set_current_block(
this);
112void BlockEntryInstr::WriteExtra(FlowGraphSerializer*
s) {
113 TemplateInstruction::WriteExtra(
s);
116 if (parallel_move_ !=
nullptr) {
117 parallel_move_->WriteExtra(
s);
121void BlockEntryInstr::ReadExtra(FlowGraphDeserializer*
d) {
122 TemplateInstruction::ReadExtra(
d);
125 if (parallel_move_ !=
nullptr) {
126 parallel_move_->ReadExtra(
d);
136 s->Write<intptr_t>(-1);
139 const intptr_t
id =
x->block_id();
141 s->Write<intptr_t>(
id);
147 const intptr_t
id =
d->Read<intptr_t>();
154#define INSTRUCTION_REFS_SERIALIZABLE_AS_BLOCK_ENTRY(V) \
155 V(CatchBlockEntry, CatchBlockEntryInstr) \
156 V(FunctionEntry, FunctionEntryInstr) \
157 V(IndirectEntry, IndirectEntryInstr) \
158 V(JoinEntry, JoinEntryInstr) \
159 V(OsrEntry, OsrEntryInstr) \
160 V(TargetEntry, TargetEntryInstr)
162#define SERIALIZABLE_AS_BLOCK_ENTRY(name, type) \
164 void FlowGraphSerializer::WriteRefTrait<type*>::WriteRef( \
165 FlowGraphSerializer* s, type* x) { \
166 s->WriteRef<BlockEntryInstr*>(x); \
169 type* FlowGraphDeserializer::ReadRefTrait<type*>::ReadRef( \
170 FlowGraphDeserializer* d) { \
171 BlockEntryInstr* instr = d->ReadRef<BlockEntryInstr*>(); \
172 ASSERT((instr == nullptr) || instr->Is##name()); \
173 return static_cast<type*>(instr); \
177#undef SERIALIZABLE_AS_BLOCK_ENTRY
178#undef INSTRUCTION_REFS_SERIALIZABLE_AS_BLOCK_ENTRY
180void BlockEntryWithInitialDefs::WriteTo(FlowGraphSerializer*
s) {
181 BlockEntryInstr::WriteTo(
s);
182 s->Write<GrowableArray<Definition*>>(initial_definitions_);
186 : BlockEntryInstr(
d),
187 initial_definitions_(
d->
Read<GrowableArray<Definition*>>()) {
188 for (Definition* def : initial_definitions_) {
189 def->set_previous(
this);
190 if (
auto par = def->AsParameter()) {
191 par->set_block(
this);
196void BlockEntryWithInitialDefs::WriteExtra(FlowGraphSerializer*
s) {
197 BlockEntryInstr::WriteExtra(
s);
198 for (Definition* def : initial_definitions_) {
203void BlockEntryWithInitialDefs::ReadExtra(FlowGraphDeserializer*
d) {
204 BlockEntryInstr::ReadExtra(
d);
205 for (Definition* def : initial_definitions_) {
213 s->stream()->Write<uint8_t>(
x ? 1 : 0);
218 return (
d->stream()->Read<uint8_t>() != 0);
225 WriteExtraWithoutInputs(
s);
226 comparison_->WriteExtra(
s);
233 ReadExtraWithoutInputs(
d);
234 comparison_->ReadExtra(
d);
235 for (intptr_t
i = comparison_->InputCount() - 1;
i >= 0; --
i) {
236 comparison_->InputAt(
i)->set_instruction(
this);
238 true_successor_ =
d->ReadRef<TargetEntryInstr*>();
239 false_successor_ =
d->ReadRef<TargetEntryInstr*>();
240 constant_target_ =
d->ReadRef<TargetEntryInstr*>();
246 s->Write<
const Function&>(
x.dart_signature());
253 const char*
error =
nullptr;
254 return *compiler::ffi::CallbackMarshaller::FromFunction(
255 d->zone(), dart_signature, &
error);
261 s->Write<
const Function&>(
x.dart_signature());
262 s->Write<int8_t>(
x.dart_signature_params_start_at());
271 const intptr_t dart_signature_params_start_at =
d->Read<int8_t>();
273 const char*
error =
nullptr;
274 return *compiler::ffi::CallMarshaller::FromFunction(
275 d->zone(), dart_signature, dart_signature_params_start_at, c_signature,
293 const intptr_t
len = cid_ranges_.length();
294 s->Write<intptr_t>(
len);
295 for (intptr_t
i = 0;
i <
len; ++
i) {
300 s->Write<intptr_t>(t->
count);
306 const intptr_t
len =
d->Read<intptr_t>();
308 for (intptr_t
i = 0;
i <
len; ++
i) {
309 const intptr_t cid_start =
d->Read<intptr_t>();
310 const intptr_t cid_end =
d->Read<intptr_t>();
312 const intptr_t
count =
d->Read<intptr_t>();
322 BlockEntryWithInitialDefs::WriteTo(
s);
323 s->Write<
const Array&>(catch_handler_types_);
324 s->Write<intptr_t>(catch_try_index_);
325 s->Write<
bool>(needs_stacktrace_);
326 s->Write<
bool>(is_generated_);
330 : BlockEntryWithInitialDefs(
d),
331 graph_entry_(
d->graph_entry()),
332 predecessor_(nullptr),
333 catch_handler_types_(
d->
Read<const Array&>()),
334 catch_try_index_(
d->
Read<intptr_t>()),
335 exception_var_(nullptr),
336 stacktrace_var_(nullptr),
337 raw_exception_var_(nullptr),
338 raw_stacktrace_var_(nullptr),
339 needs_stacktrace_(
d->
Read<bool>()),
340 is_generated_(
d->
Read<bool>()) {}
346 const intptr_t
len = strlen(
x);
347 s->Write<intptr_t>(
len);
348 s->stream()->WriteBytes(
x,
len);
354 const intptr_t
len =
d->Read<intptr_t>();
355 char* str =
d->zone()->Alloc<
char>(
len + 1);
356 d->stream()->ReadBytes(str,
len);
366 comparison_->WriteExtra(
s);
371 comparison_->ReadExtra(
d);
372 for (intptr_t
i = comparison_->InputCount() - 1;
i >= 0; --
i) {
373 comparison_->InputAt(
i)->set_instruction(
this);
381 s->Write<intptr_t>(
x.cid_start);
382 s->Write<intptr_t>(
x.cid_end);
388 const intptr_t cid_start =
d->Read<intptr_t>();
389 const intptr_t cid_end =
d->Read<intptr_t>();
396 const intptr_t
len =
x.length();
397 s->Write<intptr_t>(
len);
398 for (intptr_t
i = 0;
i <
len; ++
i) {
408 Zone* zone =
d->zone();
410 const intptr_t
len =
d->Read<intptr_t>();
411 for (intptr_t
i = 0;
i <
len; ++
i) {
412 const intptr_t cid_start =
d->Read<intptr_t>();
413 const intptr_t cid_end =
d->Read<intptr_t>();
442 TemplateDefinition::WriteExtra(
s);
447 TemplateDefinition::ReadExtra(
d);
458 s->Write<intptr_t>(
i);
463 ObjectStore* object_store =
s->isolate_group()->object_store();
464#define MATCH(member, name) \
465 if (object_store->member() == x.ptr()) { \
466 s->Write<intptr_t>(index); \
478 const intptr_t stub_id =
d->Read<intptr_t>();
483 ObjectStore* object_store =
d->isolate_group()->object_store();
484#define MATCH(member, name) \
485 if (index == stub_id) { \
486 return Code::ZoneHandle(d->zone(), object_store->member()); \
499 s->Write<
bool>(
false);
501 s->Write<
bool>(
true);
509 if (!
d->Read<
bool>()) {
516 s->Write<
bool>(can_be_null_);
517 s->Write<
bool>(can_be_sentinel_);
519 if (type_ ==
nullptr) {
520 s->Write<
bool>(
false);
522 s->Write<
bool>(
true);
528 : can_be_null_(
d->
Read<bool>()),
529 can_be_sentinel_(
d->
Read<bool>()),
532 if (
d->Read<
bool>()) {
538 Instruction::WriteTo(
s);
540 s->Write<intptr_t>(temp_index_);
541 s->Write<intptr_t>(ssa_temp_index_);
547 range_(
d->
Read<Range*>()),
548 temp_index_(
d->
Read<intptr_t>()),
549 ssa_temp_index_(
d->
Read<intptr_t>()),
550 type_(
d->
Read<CompileType*>()) {
552 d->set_definition(ssa_temp_index(),
this);
554 if (type_ !=
nullptr) {
555 type_->set_owner(
this);
563 if (!
x->HasSSATemp()) {
564 if (
auto* move_arg =
x->AsMoveArgument()) {
569 x = move_arg->value()->definition();
576 s->Write<intptr_t>(
x->ssa_temp_index());
582 return d->definition(
d->Read<intptr_t>());
588 s->stream()->Write<int64_t>(bit_cast<int64_t>(
x));
594 return bit_cast<double>(
d->stream()->Read<int64_t>());
603 s->Write<
bool>(
false);
605 s->Write<
bool>(
true);
613 if (!
d->Read<
bool>()) {
621 s->Write<intptr_t>(fixed_parameter_count_);
622 s->Write<uintptr_t>(bitfield_);
625 if (locations_ ==
nullptr) {
626 s->Write<
bool>(
false);
628 s->Write<
bool>(
true);
629 for (intptr_t
i = 0, n = values_.
length();
i < n; ++
i) {
638 fixed_parameter_count_(
d->
Read<intptr_t>()),
639 bitfield_(
d->
Read<uintptr_t>()),
642 for (intptr_t
i = 0, n = values_.
length();
i < n; ++
i) {
646 if (
d->Read<
bool>()) {
648 for (intptr_t
i = 0, n = values_.
length();
i < n; ++
i) {
670 Write<Instruction*>(block);
671 for (
auto current : block->instructions()) {
672 Write<Instruction*>(current);
675 Write<Instruction*>(
nullptr);
676 Write<const ZoneGrowableArray<Definition*>&>(detached_defs);
677 can_write_refs_ =
true;
682 block->WriteExtra(
this);
683 for (
auto current : block->instructions()) {
684 current->WriteExtra(
this);
687 for (
auto* instr : detached_defs) {
688 instr->WriteExtra(
this);
692 Write<intptr_t>(optimized_block_order.length());
693 for (intptr_t
i = 0, n = optimized_block_order.length();
i < n; ++
i) {
694 WriteRef<BlockEntryInstr*>(optimized_block_order[
i]);
698 if (captured_parameters->IsEmpty()) {
705 for (intptr_t
i = 0, n = captured_parameters->length();
i < n; ++
i) {
706 if (captured_parameters->Contains(
i)) {
710 Write<GrowableArray<intptr_t>>(indices);
715 const intptr_t current_ssa_temp_index = Read<intptr_t>();
716 const intptr_t max_block_id = Read<intptr_t>();
717 const intptr_t inlining_id = Read<intptr_t>();
718 const Array& coverage_array = Read<const Array&>();
719 const PrologueInfo prologue_info(Read<intptr_t>(), Read<intptr_t>());
721 definitions_.EnsureLength(current_ssa_temp_index,
nullptr);
722 blocks_.EnsureLength(max_block_id + 1,
nullptr);
727 while (
Instruction* instr = Read<Instruction*>()) {
728 instructions.
Add(instr);
729 if (!instr->IsBlockEntry()) {
735 ASSERT(graph_entry_ !=
nullptr);
736 const auto& detached_defs = Read<const ZoneGrowableArray<Definition*>&>();
741 instr->ReadExtra(
this);
743 for (
auto* instr : detached_defs) {
744 instr->ReadExtra(
this);
757 const intptr_t num_blocks = Read<intptr_t>();
758 if (num_blocks != 0) {
761 for (intptr_t
i = 0;
i < num_blocks; ++
i) {
762 codegen_block_order->Add(ReadRef<BlockEntryInstr*>());
769 for (intptr_t
i : indices) {
782 s->Write<int8_t>(-1);
785 Zone* zone =
s->zone();
786 s->Write<int8_t>(
x.kind());
788 case UntaggedFunction::kRegularFunction:
789 case UntaggedFunction::kGetterFunction:
790 case UntaggedFunction::kSetterFunction:
791 case UntaggedFunction::kImplicitGetter:
792 case UntaggedFunction::kImplicitSetter:
793 case UntaggedFunction::kImplicitStaticGetter:
794 case UntaggedFunction::kConstructor: {
797 const intptr_t function_index = owner.FindFunctionIndex(
x);
798 ASSERT(function_index >= 0);
799 s->Write<intptr_t>(function_index);
802 case UntaggedFunction::kImplicitClosureFunction: {
807 case UntaggedFunction::kFieldInitializer: {
809 s->Write<
const Field&>(field);
812 case UntaggedFunction::kClosureFunction:
819 case UntaggedFunction::kMethodExtractor: {
827 case UntaggedFunction::kInvokeFieldDispatcher: {
833 case UntaggedFunction::kDynamicInvocationForwarder: {
838 case UntaggedFunction::kFfiTrampoline: {
839 s->Write<uint8_t>(
static_cast<uint8_t
>(
x.GetFfiCallbackKind()));
851#define UNIMPLEMENTED_FUNCTION_KIND(kind) \
852 case UntaggedFunction::k##kind: \
853 FATAL("Unimplemented WriteTrait<const Function&>::Write for " #kind);
855#undef UNIMPLEMENTED_FUNCTION_KIND
863 const int8_t raw_kind =
d->Read<int8_t>();
865 return Object::null_function();
867 Zone* zone =
d->zone();
870 case UntaggedFunction::kRegularFunction:
871 case UntaggedFunction::kGetterFunction:
872 case UntaggedFunction::kSetterFunction:
873 case UntaggedFunction::kImplicitGetter:
874 case UntaggedFunction::kImplicitSetter:
875 case UntaggedFunction::kImplicitStaticGetter:
876 case UntaggedFunction::kConstructor: {
878 const intptr_t function_index =
d->Read<intptr_t>();
879 const auto& owner =
Class::Handle(zone,
d->GetClassById(owner_class_id));
885 case UntaggedFunction::kImplicitClosureFunction: {
886 const auto& parent =
d->Read<
const Function&>();
889 case UntaggedFunction::kFieldInitializer: {
890 const auto& field =
d->Read<
const Field&>();
893 case UntaggedFunction::kClosureFunction: {
894 const intptr_t index =
d->Read<intptr_t>();
898 case UntaggedFunction::kMethodExtractor: {
903 case UntaggedFunction::kInvokeFieldDispatcher: {
910 target_name, args_desc, UntaggedFunction::kInvokeFieldDispatcher,
913 case UntaggedFunction::kDynamicInvocationForwarder: {
920 case UntaggedFunction::kFfiTrampoline: {
928 c_signature, callback_target, exceptional_return, kind));
932 return Object::null_function();
937 BlockEntryWithInitialDefs::WriteTo(
s);
943void GraphEntryInstr::WriteTo(FlowGraphSerializer*
s) {
944 BlockEntryWithInitialDefs::WriteTo(
s);
945 s->Write<intptr_t>(osr_id_);
946 s->Write<intptr_t>(entry_count_);
947 s->Write<intptr_t>(spill_slot_count_);
948 s->Write<intptr_t>(fixed_slot_count_);
949 s->Write<
bool>(needs_frame_);
953 : BlockEntryWithInitialDefs(
d),
955 osr_id_(
d->
Read<intptr_t>()),
956 entry_count_(
d->
Read<intptr_t>()),
957 spill_slot_count_(
d->
Read<intptr_t>()),
958 fixed_slot_count_(
d->
Read<intptr_t>()),
959 needs_frame_(
d->
Read<bool>()) {
960 d->set_graph_entry(
this);
963void GraphEntryInstr::WriteExtra(FlowGraphSerializer*
s) {
964 BlockEntryWithInitialDefs::WriteExtra(
s);
965 s->WriteRef<FunctionEntryInstr*>(normal_entry_);
966 s->WriteRef<FunctionEntryInstr*>(unchecked_entry_);
967 s->WriteRef<OsrEntryInstr*>(osr_entry_);
968 s->WriteGrowableArrayOfRefs<CatchBlockEntryInstr*>(catch_entries_);
969 s->WriteGrowableArrayOfRefs<IndirectEntryInstr*>(indirect_entries_);
973 BlockEntryWithInitialDefs::ReadExtra(
d);
974 normal_entry_ =
d->ReadRef<FunctionEntryInstr*>();
975 unchecked_entry_ =
d->ReadRef<FunctionEntryInstr*>();
976 osr_entry_ =
d->ReadRef<OsrEntryInstr*>();
977 catch_entries_ =
d->ReadGrowableArrayOfRefs<CatchBlockEntryInstr*>();
978 indirect_entries_ =
d->ReadGrowableArrayOfRefs<IndirectEntryInstr*>();
981void GotoInstr::WriteExtra(FlowGraphSerializer*
s) {
982 TemplateInstruction::WriteExtra(
s);
983 if (parallel_move_ !=
nullptr) {
984 parallel_move_->WriteExtra(
s);
986 s->WriteRef<JoinEntryInstr*>(successor_);
990 TemplateInstruction::ReadExtra(
d);
991 if (parallel_move_ !=
nullptr) {
992 parallel_move_->ReadExtra(
d);
994 successor_ =
d->ReadRef<JoinEntryInstr*>();
1002 s->Write<
bool>(
false);
1004 s->Write<
bool>(
true);
1013 if (!
d->Read<
bool>()) {
1016 return &ICData::Cast(
d->Read<
const Object&>());
1023 WriteExtraWithoutInputs(
s);
1024 comparison_->WriteExtra(
s);
1028 ReadExtraWithoutInputs(
d);
1029 comparison_->ReadExtra(
d);
1030 for (intptr_t
i = comparison_->InputCount() - 1;
i >= 0; --
i) {
1031 comparison_->InputAt(
i)->set_instruction(
this);
1035void IndirectGotoInstr::WriteTo(FlowGraphSerializer*
s) {
1036 TemplateInstruction::WriteTo(
s);
1037 s->Write<intptr_t>(offsets_.Length());
1041 : TemplateInstruction(
d),
1042 offsets_(TypedData::ZoneHandle(
d->
zone(),
1043 TypedData::New(kTypedDataInt32ArrayCid,
1044 d->
Read<intptr_t>(),
1047void IndirectGotoInstr::WriteExtra(FlowGraphSerializer*
s) {
1048 TemplateInstruction::WriteExtra(
s);
1049 s->WriteGrowableArrayOfRefs<TargetEntryInstr*>(successors_);
1053 TemplateInstruction::ReadExtra(
d);
1054 successors_ =
d->ReadGrowableArrayOfRefs<TargetEntryInstr*>();
1062 s->Write<uint8_t>(Instruction::kNumInstructions);
1064 s->Write<uint8_t>(
static_cast<uint8_t
>(
x->tag()));
1072 const uint8_t tag =
d->Read<uint8_t>();
1074#define READ_INSTRUCTION(type, attrs) \
1075 case Instruction::k##type: \
1076 return new (d->zone()) type##Instr(d);
1078#undef READ_INSTRUCTION
1079 case Instruction::kNumInstructions:
1087 s->Write<intptr_t>(deopt_id_);
1088 s->Write<intptr_t>(inlining_id_);
1092 : deopt_id_(
d->
Read<intptr_t>()), inlining_id_(
d->
Read<intptr_t>()) {}
1094void Instruction::WriteExtra(FlowGraphSerializer*
s) {
1095 for (intptr_t
i = 0, n = InputCount();
i < n; ++
i) {
1098 WriteExtraWithoutInputs(
s);
1102 for (intptr_t
i = 0, n = InputCount();
i < n; ++
i) {
1103 SetInputAt(
i,
d->Read<
Value*>());
1105 for (intptr_t
i = InputCount() - 1;
i >= 0; --
i) {
1106 Value* input = InputAt(
i);
1107 input->definition()->AddInputUse(input);
1109 ReadExtraWithoutInputs(
d);
1119 SetEnvironment(
env);
1123#define INSTRUCTIONS_SERIALIZABLE_AS_INSTRUCTION(V) \
1124 V(Comparison, ComparisonInstr) \
1125 V(Constant, ConstantInstr) \
1126 V(Definition, Definition) \
1127 V(ParallelMove, ParallelMoveInstr) \
1130#define SERIALIZABLE_AS_INSTRUCTION(name, type) \
1132 void FlowGraphSerializer::WriteTrait<type*>::Write(FlowGraphSerializer* s, \
1134 s->Write<Instruction*>(x); \
1137 type* FlowGraphDeserializer::ReadTrait<type*>::Read( \
1138 FlowGraphDeserializer* d) { \
1139 Instruction* instr = d->Read<Instruction*>(); \
1140 ASSERT((instr == nullptr) || instr->Is##name()); \
1141 return static_cast<type*>(instr); \
1145#undef SERIALIZABLE_AS_INSTRUCTION
1146#undef INSTRUCTIONS_SERIALIZABLE_AS_INSTRUCTION
1151 s->stream()->Write<int8_t>(
x);
1157 return d->stream()->Read<int8_t>();
1163 s->stream()->Write<int16_t>(
x);
1169 return d->stream()->Read<int16_t>();
1175 s->stream()->Write<int32_t>(
x);
1181 return d->stream()->Read<int32_t>();
1187 s->stream()->Write<int64_t>(
x);
1193 return d->stream()->Read<int64_t>();
1197 BlockEntryInstr::WriteExtra(
s);
1198 if (phis_ !=
nullptr) {
1206 BlockEntryInstr::ReadExtra(
d);
1207 if (phis_ !=
nullptr) {
1208 for (PhiInstr* phi : *phis_) {
1226 return *
d->parsed_function().receiver_var();
1230 if (IsPairLocation()) {
1231 s->Write<
uword>(value_ & kLocationTagMask);
1233 pair->At(0).Write(
s);
1234 pair->At(1).Write(
s);
1236 s->Write<
uword>(value_ & kLocationTagMask);
1245 if (
value == kPairLocationTag) {
1249 }
else if ((
value & kConstantTag) == kConstantTag) {
1251 ASSERT(instr !=
nullptr);
1252 const int pair_index = (
value & kPairLocationTag) != 0 ? 1 : 0;
1265 s->Write<
bool>(
false);
1267 s->Write<
bool>(
true);
1275 if (!
d->Read<
bool>()) {
1282 s->Write<intptr_t>(input_count());
1283 s->Write<intptr_t>(temp_count());
1284 s->Write<int8_t>(
static_cast<int8_t
>(contains_call_));
1285 live_registers_.Write(
s);
1287 for (intptr_t
i = 0, n = input_count();
i < n; ++
i) {
1290 for (intptr_t
i = 0, n = temp_count();
i < n; ++
i) {
1293 ASSERT(output_count() == 1);
1296 if ((stack_bitmap_ !=
nullptr) && (stack_bitmap_->Length() != 0)) {
1297 s->Write<int8_t>(1);
1298 stack_bitmap_->Write(
s->stream());
1300 s->Write<int8_t>(0);
1304 s->Write<intptr_t>(writable_inputs_);
1309 : num_inputs_(
d->
Read<intptr_t>()),
1310 num_temps_(
d->
Read<intptr_t>()),
1312 stack_bitmap_(nullptr),
1314 live_registers_(
d) {
1315 input_locations_ =
d->zone()->Alloc<
Location>(num_inputs_);
1316 for (intptr_t
i = 0;
i < num_inputs_; ++
i) {
1319 temp_locations_ =
d->zone()->Alloc<
Location>(num_temps_);
1320 for (intptr_t
i = 0;
i < num_temps_; ++
i) {
1325 if (
d->Read<int8_t>() != 0) {
1326 EnsureStackBitmap().
Read(
d->stream());
1330 writable_inputs_ =
d->Read<intptr_t>();
1335 TemplateDefinition::WriteExtra(
s);
1336 null_->WriteExtra(
s);
1339void MakeTempInstr::ReadExtra(FlowGraphDeserializer*
d) {
1340 TemplateDefinition::ReadExtra(
d);
1341 null_->ReadExtra(
d);
1344void MaterializeObjectInstr::WriteExtra(FlowGraphSerializer*
s) {
1345 VariadicDefinition::WriteExtra(
s);
1347 locations_[
i].Write(
s);
1351void MaterializeObjectInstr::ReadExtra(FlowGraphDeserializer*
d) {
1352 VariadicDefinition::ReadExtra(
d);
1384 const intptr_t
cid =
x.GetClassId();
1387 const intptr_t object_id =
s->heap()->GetObjectId(
x.ptr());
1388 if (object_id > 0) {
1389 const intptr_t object_index = object_id - 1;
1391 s->Write<intptr_t>(object_index);
1394 const intptr_t object_index =
s->object_counter_++;
1395 s->heap()->SetObjectId(
x.ptr(), object_index + 1);
1396 s->Write<intptr_t>(
cid);
1397 s->WriteObjectImpl(
x,
cid, object_index);
1403 const intptr_t
cid =
d->Read<intptr_t>();
1405 const intptr_t object_index =
d->Read<intptr_t>();
1406 return *(
d->objects_[object_index]);
1408 const intptr_t object_index =
d->object_counter_;
1409 d->object_counter_++;
1411 d->SetObjectAt(object_index,
result);
1415void FlowGraphDeserializer::SetObjectAt(intptr_t object_index,
1417 objects_.EnsureLength(object_index + 1, &Object::null_object());
1418 objects_[object_index] = &object;
1421bool FlowGraphSerializer::IsWritten(
const Object& obj) {
1423 return (object_id != 0);
1426bool FlowGraphSerializer::HasEnclosingTypes(
const Object& obj) {
1427 if (num_free_fun_type_params_ == 0)
return false;
1428 if (obj.IsAbstractType()) {
1429 return !AbstractType::Cast(obj).IsInstantiated(
kFunctions,
1430 num_free_fun_type_params_);
1431 }
else if (obj.IsTypeArguments()) {
1432 return !TypeArguments::Cast(obj).IsInstantiated(
kFunctions,
1433 num_free_fun_type_params_);
1439bool FlowGraphSerializer::WriteObjectWithEnclosingTypes(
const Object& obj) {
1440 if (HasEnclosingTypes(obj)) {
1445 WriteEnclosingTypes(obj, num_free_fun_type_params_);
1448 const intptr_t saved_num_free_fun_type_params = num_free_fun_type_params_;
1449 num_free_fun_type_params_ = 0;
1450 Write<const Object&>(obj);
1451 num_free_fun_type_params_ = saved_num_free_fun_type_params;
1459void FlowGraphSerializer::WriteEnclosingTypes(
1461 intptr_t num_free_fun_type_params) {
1463 const auto&
type = Type::Cast(obj);
1466 WriteEnclosingTypes(type_args, num_free_fun_type_params);
1468 }
else if (obj.IsRecordType()) {
1469 const auto& rec = RecordType::Cast(obj);
1471 for (intptr_t
i = 0, n = rec.NumFields();
i < n; ++
i) {
1472 elem = rec.FieldTypeAt(
i);
1473 WriteEnclosingTypes(elem, num_free_fun_type_params);
1475 }
else if (obj.IsFunctionType()) {
1476 const auto& sig = FunctionType::Cast(obj);
1477 const intptr_t num_parent_type_args = sig.NumParentTypeArguments();
1478 if (num_free_fun_type_params > num_parent_type_args) {
1479 num_free_fun_type_params = num_parent_type_args;
1482 WriteEnclosingTypes(elem, num_free_fun_type_params);
1483 for (intptr_t
i = 0, n = sig.NumParameters();
i < n; ++
i) {
1484 elem = sig.ParameterTypeAt(
i);
1485 WriteEnclosingTypes(elem, num_free_fun_type_params);
1487 if (sig.IsGeneric()) {
1488 const TypeParameters& type_params =
1491 num_free_fun_type_params);
1493 }
else if (obj.IsTypeParameter()) {
1494 const auto& tp = TypeParameter::Cast(obj);
1495 if (tp.IsFunctionTypeParameter() &&
1496 (tp.index() < num_free_fun_type_params)) {
1499 if (!IsWritten(owner)) {
1501 Write<const Object&>(owner);
1504 }
else if (obj.IsTypeArguments()) {
1505 const auto& type_args = TypeArguments::Cast(obj);
1507 for (intptr_t
i = 0, n = type_args.Length();
i < n; ++
i) {
1508 elem = type_args.TypeAt(
i);
1509 WriteEnclosingTypes(elem, num_free_fun_type_params);
1514const Object& FlowGraphDeserializer::ReadObjectWithEnclosingTypes() {
1516 while (Read<bool>()) {
1517 Read<const Object&>();
1519 return Read<const Object&>();
1521 return Object::null_object();
1525void FlowGraphSerializer::WriteObjectImpl(
const Object&
x,
1527 intptr_t object_index) {
1530 case kImmutableArrayCid: {
1531 const auto& array = Array::Cast(
x);
1532 const intptr_t
len = array.Length();
1533 Write<intptr_t>(
len);
1534 const auto& type_args =
1536 Write<const TypeArguments&>(type_args);
1537 if ((
len == 0) && type_args.IsNull()) {
1540 Write<bool>(array.IsCanonical());
1542 for (intptr_t
i = 0;
i <
len; ++
i) {
1544 Write<const Object&>(elem);
1549 Write<bool>(Bool::Cast(
x).
value());
1552 const auto&
closure = Closure::Cast(
x);
1558 type_args =
closure.instantiator_type_arguments();
1559 Write<const TypeArguments&>(type_args);
1560 type_args =
closure.function_type_arguments();
1561 Write<const TypeArguments&>(type_args);
1562 type_args =
closure.delayed_type_arguments();
1563 Write<const TypeArguments&>(type_args);
1569 Write<double>(Double::Cast(
x).
value());
1572 const auto& field = Field::Cast(
x);
1574 Write<classid_t>(owner.id());
1575 const intptr_t field_index = owner.FindFieldIndex(field);
1576 ASSERT(field_index >= 0);
1577 Write<intptr_t>(field_index);
1581 Write<const Function&>(Function::Cast(
x));
1583 case kFunctionTypeCid: {
1584 const auto&
type = FunctionType::Cast(
x);
1586 if (WriteObjectWithEnclosingTypes(
type)) {
1589 const intptr_t saved_num_free_fun_type_params = num_free_fun_type_params_;
1590 const intptr_t num_parent_type_args =
type.NumParentTypeArguments();
1591 if (num_free_fun_type_params_ > num_parent_type_args) {
1592 num_free_fun_type_params_ = num_parent_type_args;
1594 Write<int8_t>(
static_cast<int8_t
>(
type.nullability()));
1595 Write<uint32_t>(
type.packed_parameter_counts());
1596 Write<uint16_t>(
type.packed_type_parameter_counts());
1597 Write<const TypeParameters&>(
1602 num_free_fun_type_params_ = saved_num_free_fun_type_params;
1606 const auto& icdata = ICData::Cast(
x);
1607 Write<int8_t>(
static_cast<int8_t
>(icdata.rebind_rule()));
1609 Write<const Array&>(
Array::Handle(
Z, icdata.arguments_descriptor()));
1610 Write<intptr_t>(icdata.deopt_id());
1611 Write<intptr_t>(icdata.NumArgsTested());
1612 if (icdata.rebind_rule() == ICData::kStatic) {
1613 ASSERT(icdata.NumberOfChecks() == 1);
1615 }
else if (icdata.rebind_rule() == ICData::kInstance) {
1616 if (icdata.NumberOfChecks() != 0) {
1626 case kConstSetCid: {
1631 Write<const TypeArguments&>(
1636 if (
cid == kConstMapCid) {
1641 for (intptr_t
i = 0;
i < used_data; ++
i) {
1643 Write<const Object&>(elem);
1647 case kLibraryPrefixCid: {
1648 const auto&
prefix = LibraryPrefix::Cast(
x);
1650 Write<classid_t>(
Class::Handle(
Z, library.toplevel_class()).id());
1656 Write<int64_t>(Integer::Cast(
x).AsInt64Value());
1660 case kOneByteStringCid: {
1662 const auto& str = String::Cast(
x);
1663 const intptr_t
length = str.Length();
1665 NoSafepointScope no_safepoint;
1666 uint8_t* latin1 = OneByteString::DataStart(str);
1672 const auto& record = Record::Cast(
x);
1673 Write<RecordShape>(record.shape());
1675 for (intptr_t
i = 0, n = record.num_fields();
i < n; ++
i) {
1676 field = record.FieldAt(
i);
1677 Write<const Object&>(field);
1681 case kRecordTypeCid: {
1682 const auto& rec = RecordType::Cast(
x);
1683 ASSERT(rec.IsFinalized());
1684 if (WriteObjectWithEnclosingTypes(rec)) {
1687 Write<int8_t>(
static_cast<int8_t
>(rec.nullability()));
1688 Write<RecordShape>(rec.shape());
1693 if (
x.ptr() == Object::sentinel().ptr()) {
1695 }
else if (
x.ptr() == Object::transition_sentinel().ptr()) {
1697 }
else if (
x.ptr() == Object::optimized_out().ptr()) {
1704 Write<intptr_t>(Smi::Cast(
x).
Value());
1706 case kTwoByteStringCid: {
1708 const auto& str = String::Cast(
x);
1709 const intptr_t
length = str.Length();
1711 NoSafepointScope no_safepoint;
1712 uint16_t* utf16 = TwoByteString::DataStart(str);
1713 stream_->
WriteBytes(
reinterpret_cast<const uint8_t*
>(utf16),
1714 length *
sizeof(uint16_t));
1718 const auto&
type = Type::Cast(
x);
1720 if (WriteObjectWithEnclosingTypes(
type)) {
1724 Write<int8_t>(
static_cast<int8_t
>(
type.nullability()));
1725 Write<classid_t>(
type.type_class_id());
1726 if (cls.IsGeneric()) {
1728 Write<const TypeArguments&>(type_args);
1732 case kTypeArgumentsCid: {
1733 const auto& type_args = TypeArguments::Cast(
x);
1734 ASSERT(type_args.IsFinalized());
1735 if (WriteObjectWithEnclosingTypes(type_args)) {
1738 const intptr_t
len = type_args.Length();
1739 Write<intptr_t>(
len);
1741 for (intptr_t
i = 0;
i <
len; ++
i) {
1742 type = type_args.TypeAt(
i);
1743 Write<const AbstractType&>(
type);
1747 case kTypeParameterCid: {
1748 const auto& tp = TypeParameter::Cast(
x);
1749 ASSERT(tp.IsFinalized());
1750 if (WriteObjectWithEnclosingTypes(tp)) {
1753 Write<intptr_t>(tp.base());
1754 Write<intptr_t>(tp.index());
1755 Write<int8_t>(
static_cast<int8_t
>(tp.nullability()));
1756 if (tp.IsFunctionTypeParameter()) {
1758 Write<const FunctionType&>(
1766 case kTypeParametersCid: {
1767 const auto& tps = TypeParameters::Cast(
x);
1777 const auto&
instance = Instance::Cast(
x);
1781 const auto unboxed_fields_bitmap =
1783 const intptr_t next_field_offset = cls.host_next_field_offset();
1789 Write<int64_t>(*
reinterpret_cast<int64_t*
>(
1792 Write<int32_t>(*
reinterpret_cast<int32_t*
>(
1797 Write<const Object&>(obj);
1802 FATAL(
"Unimplemented WriteObjectImpl for %s",
x.ToCString());
1807const Object& FlowGraphDeserializer::ReadObjectImpl(intptr_t
cid,
1808 intptr_t object_index) {
1811 case kImmutableArrayCid: {
1812 const intptr_t
len = Read<intptr_t>();
1813 const auto& type_args = Read<const TypeArguments&>();
1814 if ((
len == 0) && type_args.IsNull()) {
1815 return Object::empty_array();
1817 const bool canonicalize = Read<bool>();
1820 if (!type_args.IsNull()) {
1821 array.SetTypeArguments(type_args);
1823 for (intptr_t
i = 0;
i <
len; ++
i) {
1824 array.SetAt(
i, Read<const Object&>());
1826 if (
cid == kImmutableArrayCid) {
1827 array.MakeImmutable();
1830 array ^= array.Canonicalize(
thread());
1837 const auto& instantiator_type_arguments = Read<const TypeArguments&>();
1838 const auto& function_type_arguments = Read<const TypeArguments&>();
1839 const auto& delayed_type_arguments = Read<const TypeArguments&>();
1840 const auto&
function = Read<const Function&>();
1842 Z,
Closure::New(instantiator_type_arguments, function_type_arguments,
1844 Object::null_object()));
1851 const classid_t owner_class_id = Read<classid_t>();
1852 const intptr_t field_index = Read<intptr_t>();
1853 const auto& owner =
Class::Handle(
Z, GetClassById(owner_class_id));
1859 return Read<const Function&>();
1860 case kFunctionTypeCid: {
1861 const auto& enc_type = ReadObjectWithEnclosingTypes();
1862 if (!enc_type.IsNull()) {
1868 SetObjectAt(object_index,
result);
1869 result.set_packed_parameter_counts(Read<uint32_t>());
1870 result.set_packed_type_parameter_counts(Read<uint16_t>());
1871 result.SetTypeParameters(Read<const TypeParameters&>());
1872 result.set_result_type(Read<const AbstractType&>());
1873 result.set_parameter_types(Read<const Array&>());
1874 result.set_named_parameter_names(Read<const Array&>());
1882 const auto& owner = Read<const Function&>();
1883 const auto& arguments_descriptor = Read<const Array&>();
1884 const intptr_t deopt_id = Read<intptr_t>();
1885 const intptr_t num_args_tested = Read<intptr_t>();
1887 if (rebind_rule == ICData::kStatic) {
1888 const auto&
target = Read<const Function&>();
1892 deopt_id, num_args_tested, rebind_rule));
1893 }
else if (rebind_rule == ICData::kInstance) {
1894 const auto& target_name = Read<const String&>();
1896 Z, ICData::New(owner, target_name, arguments_descriptor, deopt_id,
1897 num_args_tested, rebind_rule));
1904 case kConstSetCid: {
1905 const intptr_t
length = Read<intptr_t>();
1906 const auto& type_args = Read<const TypeArguments&>();
1909 if (
cid == kConstMapCid) {
1911 used_data = (
length << 1);
1919 map.SetTypeArguments(type_args);
1920 map.set_used_data(used_data);
1923 map.set_deleted_keys(0);
1924 map.ComputeAndSetHashMask();
1925 for (intptr_t
i = 0;
i < used_data; ++
i) {
1926 data.SetAt(
i, Read<const Object&>());
1931 case kLibraryPrefixCid: {
1932 const Class& toplevel_class =
1935 const String&
name = Read<const String&>();
1942 const int64_t
value = Read<int64_t>();
1946 return Object::null_object();
1947 case kOneByteStringCid: {
1948 const intptr_t
length = Read<intptr_t>();
1949 uint8_t* latin1 =
Z->Alloc<uint8_t>(
length);
1955 const RecordShape shape = Read<RecordShape>();
1957 for (intptr_t
i = 0, n = shape.num_fields();
i < n; ++
i) {
1958 record.SetFieldAt(
i, Read<const Object&>());
1960 record ^= record.Canonicalize(
thread());
1963 case kRecordTypeCid: {
1964 const auto& enc_type = ReadObjectWithEnclosingTypes();
1965 if (!enc_type.IsNull()) {
1969 const RecordShape shape = Read<RecordShape>();
1970 const Array& field_types = Read<const Array&>();
1973 rec.SetIsFinalized();
1974 rec ^= rec.Canonicalize(
thread());
1978 switch (Read<uint8_t>()) {
1980 return Object::sentinel();
1982 return Object::transition_sentinel();
1984 return Object::optimized_out();
1990 case kTwoByteStringCid: {
1991 const intptr_t
length = Read<intptr_t>();
1992 uint16_t* utf16 =
Z->Alloc<uint16_t>(
length);
1993 stream_->
ReadBytes(
reinterpret_cast<uint8_t*
>(utf16),
1994 length *
sizeof(uint16_t));
1998 const auto& enc_type = ReadObjectWithEnclosingTypes();
1999 if (!enc_type.IsNull()) {
2003 const classid_t type_class_id = Read<classid_t>();
2006 if (cls.IsGeneric()) {
2008 SetObjectAt(object_index,
result);
2009 const auto& type_args = Read<const TypeArguments&>();
2010 result.set_arguments(type_args);
2013 result = cls.DeclarationType();
2019 case kTypeArgumentsCid: {
2020 const auto& enc_type_args = ReadObjectWithEnclosingTypes();
2021 if (!enc_type_args.IsNull()) {
2022 return enc_type_args;
2024 const intptr_t
len = Read<intptr_t>();
2026 SetObjectAt(object_index, type_args);
2027 for (intptr_t
i = 0;
i <
len; ++
i) {
2028 type_args.SetTypeAt(
i, Read<const AbstractType&>());
2030 type_args ^= type_args.Canonicalize(
thread());
2033 case kTypeParameterCid: {
2034 const auto& enc_type = ReadObjectWithEnclosingTypes();
2035 if (!enc_type.IsNull()) {
2038 const intptr_t
base = Read<intptr_t>();
2039 const intptr_t index = Read<intptr_t>();
2041 const Object* owner =
nullptr;
2043 owner = &Read<const FunctionType&>();
2045 owner = &Read<const Class&>();
2048 Z, TypeParameter::New(*owner,
base, index, nullability));
2049 SetObjectAt(object_index, tp);
2050 tp.SetIsFinalized();
2051 tp ^= tp.Canonicalize(
thread());
2054 case kTypeParametersCid: {
2056 tps.set_names(Read<const Array&>());
2057 tps.set_flags(Read<const Array&>());
2058 tps.set_bounds(Read<const TypeArguments&>());
2059 tps.set_defaults(Read<const TypeArguments&>());
2065 const auto unboxed_fields_bitmap =
2067 const intptr_t next_field_offset = cls.host_next_field_offset();
2073 const int64_t v = Read<int64_t>();
2074 *
reinterpret_cast<int64_t*
>(
2077 const int32_t v = Read<int32_t>();
2078 *
reinterpret_cast<int32_t*
>(
2082 const auto& obj = Read<const Object&>();
2091 return Object::null_object();
2094#define HANDLES_SERIALIZABLE_AS_OBJECT(V) \
2095 V(AbstractType, Object::null_abstract_type()) \
2096 V(Array, Object::null_array()) \
2097 V(Field, Field::Handle(d->zone())) \
2098 V(FunctionType, Object::null_function_type()) \
2099 V(Instance, Object::null_instance()) \
2100 V(String, Object::null_string()) \
2101 V(TypeArguments, Object::null_type_arguments()) \
2102 V(TypeParameters, TypeParameters::Handle(d->zone()))
2104#define SERIALIZE_HANDLE_AS_OBJECT(handle, null_handle) \
2106 void FlowGraphSerializer::WriteTrait<const handle&>::Write( \
2107 FlowGraphSerializer* s, const handle& x) { \
2108 s->Write<const Object&>(x); \
2111 const handle& FlowGraphDeserializer::ReadTrait<const handle&>::Read( \
2112 FlowGraphDeserializer* d) { \
2113 const Object& result = d->Read<const Object&>(); \
2114 if (result.IsNull()) { \
2115 return null_handle; \
2117 return handle::Cast(result); \
2121#undef SERIALIZE_HANDLE_AS_OBJECT
2122#undef HANDLES_SERIALIZABLE_AS_OBJECT
2124void OsrEntryInstr::WriteTo(FlowGraphSerializer*
s) {
2125 BlockEntryWithInitialDefs::WriteTo(
s);
2129 : BlockEntryWithInitialDefs(
d), graph_entry_(
d->graph_entry()) {}
2131void ParallelMoveInstr::WriteExtra(FlowGraphSerializer*
s) {
2132 Instruction::WriteExtra(
s);
2133 s->Write<GrowableArray<MoveOperands*>>(moves_);
2134 s->Write<
const MoveSchedule*>(move_schedule_);
2137void ParallelMoveInstr::ReadExtra(FlowGraphDeserializer*
d) {
2138 Instruction::ReadExtra(
d);
2139 moves_ =
d->Read<GrowableArray<MoveOperands*>>();
2140 move_schedule_ =
d->Read<
const MoveSchedule*>();
2143void ParameterInstr::WriteExtra(FlowGraphSerializer*
s) {
2144 TemplateDefinition::WriteExtra(
s);
2148void ParameterInstr::ReadExtra(FlowGraphDeserializer*
d) {
2149 TemplateDefinition::ReadExtra(
d);
2153void PhiInstr::WriteTo(FlowGraphSerializer*
s) {
2154 VariadicDefinition::WriteTo(
s);
2156 s->Write<
bool>(is_alive_);
2157 s->Write<int8_t>(is_receiver_);
2160PhiInstr::PhiInstr(FlowGraphDeserializer*
d)
2161 : VariadicDefinition(
d),
2162 block_(
d->current_block()->AsJoinEntry()),
2164 is_alive_(
d->
Read<bool>()),
2165 is_receiver_(
d->
Read<int8_t>()) {}
2167void LeafRuntimeCallInstr::WriteTo(FlowGraphSerializer*
s) {
2168 VariadicDefinition::WriteTo(
s);
2170 s->Write<
const ZoneGrowableArray<Representation>&>(argument_representations_);
2173LeafRuntimeCallInstr::LeafRuntimeCallInstr(FlowGraphDeserializer*
d)
2174 : VariadicDefinition(
d),
2176 argument_representations_(
2178 native_calling_convention_(
2179 compiler::ffi::NativeCallingConvention::FromSignature(
2181 *
compiler::ffi::NativeFunctionType::FromRepresentations(
2183 return_representation_,
2184 argument_representations_))) {}
2190 s->Write<
bool>(
false);
2192 s->Write<
bool>(
true);
2200 if (!
d->Read<
bool>()) {
2203 return new (
d->zone())
Range(
d);
2215 s->Write<int8_t>(kind_);
2216 s->Write<int64_t>(value_);
2217 s->Write<int64_t>(offset_);
2221 : kind_(static_cast<
Kind>(
d->
Read<int8_t>())),
2222 value_(
d->
Read<int64_t>()),
2223 offset_(
d->
Read<int64_t>()) {}
2228 s->Write<intptr_t>(
x.num_fields());
2236 const intptr_t num_fields =
d->Read<intptr_t>();
2237 const auto& field_names =
d->Read<
const Array&>();
2242 s->Write<uintptr_t>(cpu_registers_.
data());
2243 s->Write<uintptr_t>(untagged_cpu_registers_.
data());
2244 s->Write<uintptr_t>(fpu_registers_.
data());
2248 : cpu_registers_(
d->
Read<uintptr_t>()),
2249 untagged_cpu_registers_(
d->
Read<uintptr_t>()),
2250 fpu_registers_(
d->
Read<uintptr_t>()) {}
2256 s->Write<uint8_t>(
x);
2281 s->Write<
bool>(
false);
2284 s->Write<
bool>(
true);
2291 if (!
d->Read<
bool>()) {
2303 s->Write<int8_t>(flags_);
2304 s->Write<intptr_t>(offset_in_bytes_);
2307 s->Write<intptr_t>(offset_in_bytes_);
2310 s->Write<intptr_t>(offset_in_bytes_);
2313 s->Write<intptr_t>(offset_in_bytes_);
2316 s->Write<int8_t>(flags_);
2317 s->Write<intptr_t>(offset_in_bytes_);
2318 s->Write<
const String&>(*DataAs<const String>());
2333 const void*
data =
nullptr;
2339 flags =
d->Read<int8_t>();
2341 data =
":type_arguments";
2356 data =
":array_element";
2362 data =
":record_field";
2366 flags =
d->Read<int8_t>();
2376 return Slot::GetNativeSlot(
kind);
2387#if defined(DART_PRECOMPILER)
2389 s->Write<int32_t>(
x->id);
2399#if defined(DART_PRECOMPILER)
2400 const int32_t
id =
d->Read<int32_t>();
2403 ASSERT(selector !=
nullptr);
2410template <
intptr_t kExtraInputs>
2412 VariadicDefinition::WriteTo(
s);
2413 s->Write<intptr_t>(type_args_len_);
2414 s->Write<
const Array&>(argument_names_);
2416 if (move_arguments_ ==
nullptr) {
2417 s->Write<intptr_t>(-1);
2419 s->Write<intptr_t>(move_arguments_->length());
2421 for (
auto move_arg : *move_arguments_) {
2422 if (move_arg->next() ==
nullptr) {
2423 s->Write<
bool>(
true);
2426 s->Write<
bool>(
false);
2432template <
intptr_t kExtraInputs>
2434 : VariadicDefinition(
d),
2435 type_args_len_(
d->
Read<intptr_t>()),
2436 argument_names_(
d->
Read<const Array&>()),
2437 token_pos_(
d->
Read<TokenPosition>()) {
2438 const intptr_t num_move_args =
d->Read<intptr_t>();
2439 if (num_move_args >= 0) {
2442 move_arguments_->EnsureLength(num_move_args,
nullptr);
2443 for (intptr_t
i = 0;
i < num_move_args;
i++) {
2444 if (
d->Read<
bool>()) {
2445 auto move_arg =
d->Read<Instruction*>()->AsMoveArgument();
2446 ASSERT(move_arg !=
nullptr);
2447 (*move_arguments_)[
i] = move_arg;
2453template <
intptr_t kExtraInputs>
2454void TemplateDartCall<kExtraInputs>::WriteExtra(FlowGraphSerializer*
s) {
2455 VariadicDefinition::WriteExtra(
s);
2456 if (move_arguments_ !=
nullptr) {
2459 for (intptr_t
i = move_arguments_->length() - 1;
i >= 0; --
i) {
2460 auto move_arg = move_arguments_->At(
i);
2461 if (move_arg->next() ==
nullptr) {
2462 move_arg->WriteExtra(
s);
2468template <
intptr_t kExtraInputs>
2470 VariadicDefinition::ReadExtra(
d);
2471 if (move_arguments_ !=
nullptr) {
2472 Instruction* cursor =
this;
2473 for (intptr_t
i = move_arguments_->length() - 1;
i >= 0; --
i) {
2474 if ((*move_arguments_)[
i] !=
nullptr) {
2475 (*move_arguments_)[
i]->ReadExtra(
d);
2480 cursor = cursor->previous();
2481 }
while (!cursor->IsMoveArgument());
2482 (*move_arguments_)[
i] = cursor->AsMoveArgument();
2485 if (
env() !=
nullptr) {
2486 RepairArgumentUsesInEnvironment();
2492template class TemplateDartCall<0>;
2493template class TemplateDartCall<1>;
2495void MoveArgumentInstr::WriteExtra(FlowGraphSerializer*
s) {
2496 TemplateDefinition::WriteExtra(
s);
2501 TemplateDefinition::ReadExtra(
d);
2509 s->Write<int32_t>(
x.Serialize());
2521 s->stream()->Write<uint8_t>(
x);
2527 return d->stream()->Read<uint8_t>();
2533 s->stream()->Write<uint16_t>(
x);
2539 return d->stream()->Read<uint16_t>();
2545 s->stream()->Write<int32_t>(
static_cast<int32_t
>(
x));
2551 return static_cast<uint32_t
>(
d->stream()->Read<int32_t>());
2557 s->stream()->Write<int64_t>(
static_cast<int64_t
>(
x));
2563 return static_cast<uint64_t
>(
d->stream()->Read<int64_t>());
2567 ConstantInstr::WriteTo(
s);
2575 constant_address_(0) {
2576 if (representation_ == kUnboxedDouble) {
2589 if ((reaching_type !=
nullptr) && def->
HasType() &&
2590 (reaching_type == def->
Type())) {
2591 reaching_type =
nullptr;
2608 Definition::WriteTo(
s);
2609 s->Write<intptr_t>(inputs_.length());
2614 const intptr_t num_inputs =
d->Read<intptr_t>();
2615 inputs_.EnsureLength(num_inputs,
nullptr);
static float prev(float f)
void Write(FlowGraphSerializer *s) const
AliasIdentity(const AliasIdentity &other)
static ArrayPtr New(intptr_t len, Heap::Space space=Heap::kNew)
void WriteBytes(const void *addr, intptr_t len)
static constexpr S encode(T value)
void Read(ReadStream *stream)
BlockEntryWithInitialDefs(intptr_t block_id, intptr_t try_index, intptr_t deopt_id, intptr_t stack_depth)
static const Bool & Get(bool value)
GrowableArray< CidRange * > cid_ranges_
void Add(CidRange *target)
ClassPtr At(intptr_t cid) const
UnboxedFieldBitmap GetUnboxedFieldsMapAt(intptr_t cid) const
FunctionPtr GetInvocationDispatcher(const String &target_name, const Array &args_desc, UntaggedFunction::Kind kind, bool create_if_absent) const
static intptr_t FindClosureIndex(const Function &needle)
static FunctionPtr ClosureFunctionFromIndex(intptr_t idx)
static ClosurePtr New(const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments, const Function &function, const Object &context, Heap::Space space=Heap::kNew)
static CompileType FromCid(intptr_t cid)
static constexpr bool kCannotBeSentinel
CompileType(bool can_be_null, bool can_be_sentinel, intptr_t cid, const AbstractType *type)
static constexpr bool kCannotBeNull
static CompileType Dynamic()
void Write(FlowGraphSerializer *s) const
static CompileType None()
static ConstMapPtr NewUninitialized(Heap::Space space=Heap::kNew)
static ConstSetPtr NewUninitialized(Heap::Space space=Heap::kNew)
Definition(intptr_t deopt_id=DeoptId::kNone)
static DoublePtr NewCanonical(double d)
void Write(FlowGraphSerializer *s) const
Environment(FlowGraphDeserializer *d)
FlowGraph * ReadFlowGraph()
FlowGraphDeserializer(const ParsedFunction &parsed_function, ReadStream *stream)
IsolateGroup * isolate_group() const
const ParsedFunction & parsed_function() const
IsolateGroup * isolate_group() const
FlowGraphSerializer(NonStreamingWriteStream *stream)
void WriteFlowGraph(const FlowGraph &flow_graph, const ZoneGrowableArray< Definition * > &detached_defs)
const GrowableArray< BlockEntryInstr * > & reverse_postorder() const
PrologueInfo prologue_info() const
void set_coverage_array(const Array &array)
intptr_t current_ssa_temp_index() const
intptr_t inlining_id() const
void set_current_ssa_temp_index(intptr_t index)
void CreateCommonConstants()
intptr_t max_block_id() const
const GrowableArray< BlockEntryInstr * > & optimized_block_order() const
void set_inlining_id(intptr_t value)
const Array & coverage_array() const
BitVector * captured_parameters() const
bool is_licm_allowed() const
GrowableArray< BlockEntryInstr * > * CodegenBlockOrder()
FunctionEntryInstr(GraphEntryInstr *graph_entry, intptr_t block_id, intptr_t try_index, intptr_t deopt_id)
static FunctionTypePtr New(intptr_t num_parent_type_arguments=0, Nullability nullability=Nullability::kNonNullable, Heap::Space space=Heap::kOld)
static StringPtr CreateDynamicInvocationForwarderName(const String &name)
GraphEntryInstr(const ParsedFunction &parsed_function, intptr_t osr_id)
void ResetObjectIdTable()
intptr_t GetObjectId(ObjectPtr raw_obj) const
void SetObjectId(ObjectPtr raw_obj, intptr_t object_id)
static ICDataPtr NewForStaticCall(const Function &owner, const Function &target, const Array &arguments_descriptor, intptr_t deopt_id, intptr_t num_args_tested, RebindRule rebind_rule)
IndirectGotoInstr(intptr_t target_count, Value *target_index)
static intptr_t NextFieldOffset()
static InstancePtr New(const Class &cls, Heap::Space space=Heap::kNew)
Instruction(const InstructionSource &source, intptr_t deopt_id=DeoptId::kNone)
void ReadExtraWithoutInputs(FlowGraphDeserializer *d)
friend class CatchBlockEntryInstr
void WriteExtraWithoutInputs(FlowGraphSerializer *s)
friend class BlockEntryInstr
static IntegerPtr NewCanonical(const String &str)
ClassTable * class_table() const
static const LinkedHashBase & Cast(const Object &obj)
void Write(FlowGraphSerializer *s) const
LocationSummary(Zone *zone, intptr_t input_count, intptr_t temp_count, LocationSummary::ContainsCall contains_call)
static Location Pair(Location first, Location second)
void Write(FlowGraphSerializer *s) const
static Location Read(FlowGraphDeserializer *d)
static Location Constant(const ConstantInstr *obj, int pair_index=0)
void Write(FlowGraphSerializer *s) const
MoveOperands(Location dest, Location src)
static constexpr bool ContainsCompressedPointers()
static Object & ZoneHandle()
OsrEntryInstr(GraphEntryInstr *graph_entry, intptr_t block_id, intptr_t try_index, intptr_t deopt_id, intptr_t stack_depth)
static Precompiler * Instance()
compiler::SelectorMap * selector_map()
void Write(FlowGraphSerializer *s) const
void ReadBytes(void *addr, intptr_t len)
static RecordShape Register(Thread *thread, intptr_t num_fields, const Array &field_names)
static RecordTypePtr New(RecordShape shape, const Array &field_types, Nullability nullability=Nullability::kNonNullable, Heap::Space space=Heap::kOld)
static RecordPtr New(RecordShape shape, Heap::Space space=Heap::kNew)
void Write(FlowGraphSerializer *s) const
static const Slot & Get(const Field &field, const ParsedFunction *parsed_function)
const Field & field() const
Representation representation() const
void Write(FlowGraphSerializer *s) const
static const Slot & Read(FlowGraphDeserializer *d)
static SmiPtr New(intptr_t value)
static StaticTypeExactnessState Decode(int8_t value)
static const Code & EntryAt(intptr_t index)
static intptr_t NumEntries()
static StringPtr FromUTF16(Thread *thread, const uint16_t *utf16_array, intptr_t len)
static StringPtr FromLatin1(Thread *thread, const uint8_t *latin1_array, intptr_t len)
TemplateDartCall(intptr_t deopt_id, intptr_t type_args_len, const Array &argument_names, InputsArray &&inputs, const InstructionSource &source)
static TokenPosition Deserialize(int32_t value)
static TypeArgumentsPtr New(intptr_t len, Heap::Space space=Heap::kOld)
static TypeParametersPtr New(Heap::Space space=Heap::kOld)
static TypePtr New(const Class &clazz, const TypeArguments &arguments, Nullability nullability=Nullability::kNonNullable, Heap::Space space=Heap::kOld)
UnboxedConstantInstr(const Object &value, Representation representation)
VariadicDefinition(InputsArray &&inputs, intptr_t deopt_id=DeoptId::kNone)
intptr_t InputCount() const
const TableSelector * GetSelector(const Function &interface_target) const
VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE auto & d
FlutterSemanticsFlag flags
const uint8_t uint32_t uint32_t GError ** error
Dart_NativeFunction function
#define FOR_EACH_INSTRUCTION(M)
#define SERIALIZABLE_AS_BLOCK_ENTRY(name, type)
#define MATCH(member, name)
#define UNIMPLEMENTED_FUNCTION_KIND(kind)
#define INSTRUCTIONS_SERIALIZABLE_AS_INSTRUCTION(V)
#define INSTRUCTION_REFS_SERIALIZABLE_AS_BLOCK_ENTRY(V)
#define SERIALIZABLE_AS_INSTRUCTION(name, type)
#define HANDLES_SERIALIZABLE_AS_OBJECT(V)
#define READ_INSTRUCTION(type, attrs)
#define SERIALIZE_HANDLE_AS_OBJECT(handle, null_handle)
SK_API bool Read(SkStreamSeekable *src, SkDocumentPage *dstArray, int dstArrayCount, const SkDeserialProcs *=nullptr)
FunctionPtr NativeCallbackFunction(const FunctionType &c_signature, const Function &dart_target, const Instance &exceptional_return, FfiCallbackKind kind)
bool IsDouble(const dart::Object &a)
uword FindDoubleConstant(double value)
static constexpr intptr_t kCompressedWordSize
ZoneGrowableArray< MoveArgumentInstr * > MoveArgumentsArray
static bool IsConstant(Definition *def, int64_t *val)
typename unwrap_enum< std::remove_cv_t< T >, std::is_enum< T >::value >::type serializable_type_t
static int8_t data[kExtLength]
std::function< void()> closure
SI auto map(std::index_sequence< I... >, Fn &&fn, const Args &... args) -> skvx::Vec< sizeof...(I), decltype(fn(args[0]...))>
#define OBJECT_STORE_STUB_CODE_LIST(DO)
#define FOR_EACH_RAW_FUNCTION_KIND(V)
BlockEntryInstr * ReadRef(FlowGraphDeserializer *d)
const AbstractType * Read(FlowGraphDeserializer *d)
void WriteRef(FlowGraphSerializer *s, BlockEntryInstr *x)
void Write(FlowGraphSerializer *s, const AbstractType *x)
StaticTypeExactnessState exactness