Flutter Engine
The Flutter Engine
Loading...
Searching...
No Matches
il_serializer.cc
Go to the documentation of this file.
1// Copyright (c) 2022, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
6
7#include "vm/class_id.h"
9#if defined(DART_PRECOMPILER)
11#endif
16#include "vm/object_store.h"
17#include "vm/parser.h"
18
19#define Z zone_
20
21// This file declares write/read methods for each type,
22// sorted alphabetically by type/class name (case-insensitive).
23// Each "write" method is followed by corresponding "read" method
24// or constructor.
25
26namespace dart {
27
29 : stream_(stream),
30 zone_(Thread::Current()->zone()),
31 thread_(Thread::Current()),
32 isolate_group_(IsolateGroup::Current()),
33 heap_(IsolateGroup::Current()->heap()) {}
34
38
40 const ParsedFunction& parsed_function,
41 ReadStream* stream)
42 : parsed_function_(parsed_function),
43 stream_(stream),
44 zone_(Thread::Current()->zone()),
45 thread_(Thread::Current()),
46 isolate_group_(IsolateGroup::Current()) {}
47
48ClassPtr FlowGraphDeserializer::GetClassById(classid_t id) const {
49 return isolate_group()->class_table()->At(id);
50}
51
52template <>
55 const AbstractType* x) {
56 if (x == nullptr) {
57 s->Write<bool>(false);
58 } else {
59 s->Write<bool>(true);
60 s->Write<const AbstractType&>(*x);
61 }
62}
63
64template <>
67 if (!d->Read<bool>()) {
68 return nullptr;
69 }
70 return &(d->Read<const AbstractType&>());
71}
72
73template <>
79
80template <>
85
87 s->Write<intptr_t>(value_);
88}
89
91 : value_(d->Read<intptr_t>()) {}
92
93void BlockEntryInstr::WriteTo(FlowGraphSerializer* s) {
94 TemplateInstruction::WriteTo(s);
95 s->Write<intptr_t>(block_id_);
96 s->Write<intptr_t>(try_index_);
97 s->Write<intptr_t>(stack_depth_);
98 s->Write<ParallelMoveInstr*>(parallel_move_);
99}
100
101BlockEntryInstr::BlockEntryInstr(FlowGraphDeserializer* d)
102 : TemplateInstruction(d),
103 block_id_(d->Read<intptr_t>()),
104 try_index_(d->Read<intptr_t>()),
105 stack_depth_(d->Read<intptr_t>()),
106 dominated_blocks_(1),
107 parallel_move_(d->Read<ParallelMoveInstr*>()) {
108 d->set_block(block_id_, this);
109 d->set_current_block(this);
110}
111
112void BlockEntryInstr::WriteExtra(FlowGraphSerializer* s) {
113 TemplateInstruction::WriteExtra(s);
114 s->WriteRef<BlockEntryInstr*>(dominator_);
115 s->WriteGrowableArrayOfRefs<BlockEntryInstr*>(dominated_blocks_);
116 if (parallel_move_ != nullptr) {
117 parallel_move_->WriteExtra(s);
118 }
119}
120
121void BlockEntryInstr::ReadExtra(FlowGraphDeserializer* d) {
122 TemplateInstruction::ReadExtra(d);
123 dominator_ = d->ReadRef<BlockEntryInstr*>();
124 dominated_blocks_ = d->ReadGrowableArrayOfRefs<BlockEntryInstr*>();
125 if (parallel_move_ != nullptr) {
126 parallel_move_->ReadExtra(d);
127 }
128}
129
130template <>
134 ASSERT(s->can_write_refs());
135 if (x == nullptr) {
136 s->Write<intptr_t>(-1);
137 return;
138 }
139 const intptr_t id = x->block_id();
140 ASSERT(id >= 0);
141 s->Write<intptr_t>(id);
142}
143
144template <>
147 const intptr_t id = d->Read<intptr_t>();
148 if (id < 0) {
149 return nullptr;
150 }
151 return d->block(id);
152}
153
154#define INSTRUCTION_REFS_SERIALIZABLE_AS_BLOCK_ENTRY(V) \
155 V(CatchBlockEntry, CatchBlockEntryInstr) \
156 V(FunctionEntry, FunctionEntryInstr) \
157 V(IndirectEntry, IndirectEntryInstr) \
158 V(JoinEntry, JoinEntryInstr) \
159 V(OsrEntry, OsrEntryInstr) \
160 V(TargetEntry, TargetEntryInstr)
161
162#define SERIALIZABLE_AS_BLOCK_ENTRY(name, type) \
163 template <> \
164 void FlowGraphSerializer::WriteRefTrait<type*>::WriteRef( \
165 FlowGraphSerializer* s, type* x) { \
166 s->WriteRef<BlockEntryInstr*>(x); \
167 } \
168 template <> \
169 type* FlowGraphDeserializer::ReadRefTrait<type*>::ReadRef( \
170 FlowGraphDeserializer* d) { \
171 BlockEntryInstr* instr = d->ReadRef<BlockEntryInstr*>(); \
172 ASSERT((instr == nullptr) || instr->Is##name()); \
173 return static_cast<type*>(instr); \
174 }
175
177#undef SERIALIZABLE_AS_BLOCK_ENTRY
178#undef INSTRUCTION_REFS_SERIALIZABLE_AS_BLOCK_ENTRY
179
180void BlockEntryWithInitialDefs::WriteTo(FlowGraphSerializer* s) {
181 BlockEntryInstr::WriteTo(s);
182 s->Write<GrowableArray<Definition*>>(initial_definitions_);
183}
184
186 : BlockEntryInstr(d),
187 initial_definitions_(d->Read<GrowableArray<Definition*>>()) {
188 for (Definition* def : initial_definitions_) {
189 def->set_previous(this);
190 if (auto par = def->AsParameter()) {
191 par->set_block(this);
192 }
193 }
194}
195
196void BlockEntryWithInitialDefs::WriteExtra(FlowGraphSerializer* s) {
197 BlockEntryInstr::WriteExtra(s);
198 for (Definition* def : initial_definitions_) {
199 def->WriteExtra(s);
200 }
201}
202
203void BlockEntryWithInitialDefs::ReadExtra(FlowGraphDeserializer* d) {
204 BlockEntryInstr::ReadExtra(d);
205 for (Definition* def : initial_definitions_) {
206 def->ReadExtra(d);
207 }
208}
209
210template <>
212 bool x) {
213 s->stream()->Write<uint8_t>(x ? 1 : 0);
214}
215
216template <>
218 return (d->stream()->Read<uint8_t>() != 0);
219}
220
221void BranchInstr::WriteExtra(FlowGraphSerializer* s) {
222 // Branch reuses inputs from its embedded Comparison.
223 // Instruction::WriteExtra is not called to avoid
224 // writing/reading inputs twice.
225 WriteExtraWithoutInputs(s);
226 comparison_->WriteExtra(s);
227 s->WriteRef<TargetEntryInstr*>(true_successor_);
228 s->WriteRef<TargetEntryInstr*>(false_successor_);
229 s->WriteRef<TargetEntryInstr*>(constant_target_);
230}
231
232void BranchInstr::ReadExtra(FlowGraphDeserializer* d) {
233 ReadExtraWithoutInputs(d);
234 comparison_->ReadExtra(d);
235 for (intptr_t i = comparison_->InputCount() - 1; i >= 0; --i) {
236 comparison_->InputAt(i)->set_instruction(this);
237 }
238 true_successor_ = d->ReadRef<TargetEntryInstr*>();
239 false_successor_ = d->ReadRef<TargetEntryInstr*>();
240 constant_target_ = d->ReadRef<TargetEntryInstr*>();
241}
242
243template <>
248
249template <>
252 const Function& dart_signature = d->Read<const Function&>();
253 const char* error = nullptr;
254 return *compiler::ffi::CallbackMarshaller::FromFunction(
255 d->zone(), dart_signature, &error);
256}
257
258template <>
261 s->Write<const Function&>(x.dart_signature());
262 s->Write<int8_t>(x.dart_signature_params_start_at());
263 s->Write<const FunctionType&>(x.c_signature());
264}
265
266template <>
270 const Function& dart_signature = d->Read<const Function&>();
271 const intptr_t dart_signature_params_start_at = d->Read<int8_t>();
272 const FunctionType& c_signature = d->Read<const FunctionType&>();
273 const char* error = nullptr;
274 return *compiler::ffi::CallMarshaller::FromFunction(
275 d->zone(), dart_signature, dart_signature_params_start_at, c_signature,
276 &error);
277}
278
279template <>
285
286template <>
291
292void CallTargets::Write(FlowGraphSerializer* s) const {
293 const intptr_t len = cid_ranges_.length();
294 s->Write<intptr_t>(len);
295 for (intptr_t i = 0; i < len; ++i) {
296 TargetInfo* t = TargetAt(i);
297 s->Write<intptr_t>(t->cid_start);
298 s->Write<intptr_t>(t->cid_end);
299 s->Write<const Function&>(*(t->target));
300 s->Write<intptr_t>(t->count);
301 s->Write<int8_t>(t->exactness.Encode());
302 }
303}
304
305CallTargets::CallTargets(FlowGraphDeserializer* d) : Cids(d->zone()) {
306 const intptr_t len = d->Read<intptr_t>();
307 cid_ranges_.EnsureLength(len, nullptr);
308 for (intptr_t i = 0; i < len; ++i) {
309 const intptr_t cid_start = d->Read<intptr_t>();
310 const intptr_t cid_end = d->Read<intptr_t>();
311 const Function& target = d->Read<const Function&>();
312 const intptr_t count = d->Read<intptr_t>();
313 const StaticTypeExactnessState exactness =
314 StaticTypeExactnessState::Decode(d->Read<int8_t>());
315 TargetInfo* t = new (d->zone())
316 TargetInfo(cid_start, cid_end, &target, count, exactness);
317 cid_ranges_[i] = t;
318 }
319}
320
321void CatchBlockEntryInstr::WriteTo(FlowGraphSerializer* s) {
322 BlockEntryWithInitialDefs::WriteTo(s);
323 s->Write<const Array&>(catch_handler_types_);
324 s->Write<intptr_t>(catch_try_index_);
325 s->Write<bool>(needs_stacktrace_);
326 s->Write<bool>(is_generated_);
327}
328
329CatchBlockEntryInstr::CatchBlockEntryInstr(FlowGraphDeserializer* d)
330 : BlockEntryWithInitialDefs(d),
331 graph_entry_(d->graph_entry()),
332 predecessor_(nullptr),
333 catch_handler_types_(d->Read<const Array&>()),
334 catch_try_index_(d->Read<intptr_t>()),
335 exception_var_(nullptr),
336 stacktrace_var_(nullptr),
337 raw_exception_var_(nullptr),
338 raw_stacktrace_var_(nullptr),
339 needs_stacktrace_(d->Read<bool>()),
340 is_generated_(d->Read<bool>()) {}
341
342template <>
344 const char* x) {
345 ASSERT(x != nullptr);
346 const intptr_t len = strlen(x);
347 s->Write<intptr_t>(len);
348 s->stream()->WriteBytes(x, len);
349}
350
351template <>
354 const intptr_t len = d->Read<intptr_t>();
355 char* str = d->zone()->Alloc<char>(len + 1);
356 d->stream()->ReadBytes(str, len);
357 str[len] = 0;
358 return str;
359}
360
361void CheckConditionInstr::WriteExtra(FlowGraphSerializer* s) {
362 // CheckCondition reuses inputs from its embedded Comparison.
363 // Instruction::WriteExtra is not called to avoid
364 // writing/reading inputs twice.
366 comparison_->WriteExtra(s);
367}
368
369void CheckConditionInstr::ReadExtra(FlowGraphDeserializer* d) {
371 comparison_->ReadExtra(d);
372 for (intptr_t i = comparison_->InputCount() - 1; i >= 0; --i) {
373 comparison_->InputAt(i)->set_instruction(this);
374 }
375}
376
377template <>
381 s->Write<intptr_t>(x.cid_start);
382 s->Write<intptr_t>(x.cid_end);
383}
384
385template <>
388 const intptr_t cid_start = d->Read<intptr_t>();
389 const intptr_t cid_end = d->Read<intptr_t>();
390 return CidRangeValue(cid_start, cid_end);
391}
392
393template <>
395 const Cids& x) {
396 const intptr_t len = x.length();
397 s->Write<intptr_t>(len);
398 for (intptr_t i = 0; i < len; ++i) {
399 const CidRange* r = x.At(i);
400 s->Write<intptr_t>(r->cid_start);
401 s->Write<intptr_t>(r->cid_end);
402 }
403}
404
405template <>
408 Zone* zone = d->zone();
409 Cids* cids = new (zone) Cids(zone);
410 const intptr_t len = d->Read<intptr_t>();
411 for (intptr_t i = 0; i < len; ++i) {
412 const intptr_t cid_start = d->Read<intptr_t>();
413 const intptr_t cid_end = d->Read<intptr_t>();
414 CidRange* r = new (zone) CidRange(cid_start, cid_end);
415 cids->Add(r);
416 }
417 return *cids;
418}
419
420template <>
423 const Class& x) {
424 if (x.IsNull()) {
425 s->Write<classid_t>(kIllegalCid);
426 return;
427 }
428 s->Write<classid_t>(x.id());
429}
430
431template <>
434 const classid_t cid = d->Read<classid_t>();
435 if (cid == kIllegalCid) {
436 return Class::ZoneHandle(d->zone());
437 }
438 return Class::ZoneHandle(d->zone(), d->GetClassById(cid));
439}
440
441void ConstraintInstr::WriteExtra(FlowGraphSerializer* s) {
442 TemplateDefinition::WriteExtra(s);
443 s->WriteRef<TargetEntryInstr*>(target_);
444}
445
446void ConstraintInstr::ReadExtra(FlowGraphDeserializer* d) {
447 TemplateDefinition::ReadExtra(d);
448 target_ = d->ReadRef<TargetEntryInstr*>();
449}
450
451template <>
453 const Code& x) {
454 ASSERT(!x.IsNull());
455 ASSERT(x.IsStubCode());
456 for (intptr_t i = 0, n = StubCode::NumEntries(); i < n; ++i) {
457 if (StubCode::EntryAt(i).ptr() == x.ptr()) {
458 s->Write<intptr_t>(i);
459 return;
460 }
461 }
462 intptr_t index = StubCode::NumEntries();
463 ObjectStore* object_store = s->isolate_group()->object_store();
464#define MATCH(member, name) \
465 if (object_store->member() == x.ptr()) { \
466 s->Write<intptr_t>(index); \
467 return; \
468 } \
469 ++index;
471#undef MATCH
473}
474
475template <>
478 const intptr_t stub_id = d->Read<intptr_t>();
479 if (stub_id < StubCode::NumEntries()) {
480 return StubCode::EntryAt(stub_id);
481 }
482 intptr_t index = StubCode::NumEntries();
483 ObjectStore* object_store = d->isolate_group()->object_store();
484#define MATCH(member, name) \
485 if (index == stub_id) { \
486 return Code::ZoneHandle(d->zone(), object_store->member()); \
487 } \
488 ++index;
490#undef MATCH
492}
493
494template <>
497 CompileType* x) {
498 if (x == nullptr) {
499 s->Write<bool>(false);
500 } else {
501 s->Write<bool>(true);
502 x->Write(s);
503 }
504}
505
506template <>
509 if (!d->Read<bool>()) {
510 return nullptr;
511 }
512 return new (d->zone()) CompileType(d);
513}
514
516 s->Write<bool>(can_be_null_);
517 s->Write<bool>(can_be_sentinel_);
518 s->Write<classid_t>(cid_);
519 if (type_ == nullptr) {
520 s->Write<bool>(false);
521 } else {
522 s->Write<bool>(true);
523 s->Write<const AbstractType&>(*type_);
524 }
525}
526
528 : can_be_null_(d->Read<bool>()),
529 can_be_sentinel_(d->Read<bool>()),
530 cid_(d->Read<classid_t>()),
531 type_(nullptr) {
532 if (d->Read<bool>()) {
533 type_ = &d->Read<const AbstractType&>();
534 }
535}
536
537void Definition::WriteTo(FlowGraphSerializer* s) {
538 Instruction::WriteTo(s);
539 s->Write<Range*>(range_);
540 s->Write<intptr_t>(temp_index_);
541 s->Write<intptr_t>(ssa_temp_index_);
542 s->Write<CompileType*>(type_);
543}
544
545Definition::Definition(FlowGraphDeserializer* d)
546 : Instruction(d),
547 range_(d->Read<Range*>()),
548 temp_index_(d->Read<intptr_t>()),
549 ssa_temp_index_(d->Read<intptr_t>()),
550 type_(d->Read<CompileType*>()) {
551 if (HasSSATemp()) {
552 d->set_definition(ssa_temp_index(), this);
553 }
554 if (type_ != nullptr) {
555 type_->set_owner(this);
556 }
557}
558
559template <>
562 Definition* x) {
563 if (!x->HasSSATemp()) {
564 if (auto* move_arg = x->AsMoveArgument()) {
565 // Environments of the calls can reference MoveArgument instructions
566 // and they don't have SSA temps.
567 // Write a reference to the original definition.
568 // When reading it is restored using RepairArgumentUsesInEnvironment.
569 x = move_arg->value()->definition();
570 } else {
571 UNREACHABLE();
572 }
573 }
574 ASSERT(x->HasSSATemp());
575 ASSERT(s->can_write_refs());
576 s->Write<intptr_t>(x->ssa_temp_index());
577}
578
579template <>
582 return d->definition(d->Read<intptr_t>());
583}
584
585template <>
587 double x) {
588 s->stream()->Write<int64_t>(bit_cast<int64_t>(x));
589}
590
591template <>
594 return bit_cast<double>(d->stream()->Read<int64_t>());
595}
596
597template <>
600 Environment* x) {
601 ASSERT(s->can_write_refs());
602 if (x == nullptr) {
603 s->Write<bool>(false);
604 } else {
605 s->Write<bool>(true);
606 x->Write(s);
607 }
608}
609
610template <>
613 if (!d->Read<bool>()) {
614 return nullptr;
615 }
616 return new (d->zone()) Environment(d);
617}
618
620 s->Write<GrowableArray<Value*>>(values_);
621 s->Write<intptr_t>(fixed_parameter_count_);
622 s->Write<uintptr_t>(bitfield_);
623 s->Write<const Function&>(function_);
624 s->Write<Environment*>(outer_);
625 if (locations_ == nullptr) {
626 s->Write<bool>(false);
627 } else {
628 s->Write<bool>(true);
629 for (intptr_t i = 0, n = values_.length(); i < n; ++i) {
630 locations_[i].Write(s);
631 }
632 }
633}
634
636 : values_(d->Read<GrowableArray<Value*>>()),
637 locations_(nullptr),
638 fixed_parameter_count_(d->Read<intptr_t>()),
639 bitfield_(d->Read<uintptr_t>()),
640 function_(d->Read<const Function&>()),
641 outer_(d->Read<Environment*>()) {
642 for (intptr_t i = 0, n = values_.length(); i < n; ++i) {
643 Value* value = values_[i];
644 value->definition()->AddEnvUse(value);
645 }
646 if (d->Read<bool>()) {
647 locations_ = d->zone()->Alloc<Location>(values_.length());
648 for (intptr_t i = 0, n = values_.length(); i < n; ++i) {
649 locations_[i] = Location::Read(d);
650 }
651 }
652}
653
655 const FlowGraph& flow_graph,
656 const ZoneGrowableArray<Definition*>& detached_defs) {
657 ASSERT(!flow_graph.is_licm_allowed());
658
659 Write<intptr_t>(flow_graph.current_ssa_temp_index());
660 Write<intptr_t>(flow_graph.max_block_id());
661 Write<intptr_t>(flow_graph.inlining_id());
662 Write<const Array&>(flow_graph.coverage_array());
663
664 PrologueInfo prologue_info = flow_graph.prologue_info();
665 Write<intptr_t>(prologue_info.min_block_id);
666 Write<intptr_t>(prologue_info.max_block_id);
667
668 // Write instructions
669 for (auto block : flow_graph.reverse_postorder()) {
670 Write<Instruction*>(block);
671 for (auto current : block->instructions()) {
672 Write<Instruction*>(current);
673 }
674 }
675 Write<Instruction*>(nullptr);
676 Write<const ZoneGrowableArray<Definition*>&>(detached_defs);
677 can_write_refs_ = true;
678
679 // Write instructions extra info.
680 // It may contain references to other instructions.
681 for (auto block : flow_graph.reverse_postorder()) {
682 block->WriteExtra(this);
683 for (auto current : block->instructions()) {
684 current->WriteExtra(this);
685 }
686 }
687 for (auto* instr : detached_defs) {
688 instr->WriteExtra(this);
689 }
690
691 const auto& optimized_block_order = flow_graph.optimized_block_order();
692 Write<intptr_t>(optimized_block_order.length());
693 for (intptr_t i = 0, n = optimized_block_order.length(); i < n; ++i) {
694 WriteRef<BlockEntryInstr*>(optimized_block_order[i]);
695 }
696
697 const auto* captured_parameters = flow_graph.captured_parameters();
698 if (captured_parameters->IsEmpty()) {
699 Write<bool>(false);
700 } else {
701 Write<bool>(true);
702 // Captured parameters are rare so write their bit numbers
703 // instead of writing BitVector.
704 GrowableArray<intptr_t> indices(Z, 0);
705 for (intptr_t i = 0, n = captured_parameters->length(); i < n; ++i) {
706 if (captured_parameters->Contains(i)) {
707 indices.Add(i);
708 }
709 }
710 Write<GrowableArray<intptr_t>>(indices);
711 }
712}
713
715 const intptr_t current_ssa_temp_index = Read<intptr_t>();
716 const intptr_t max_block_id = Read<intptr_t>();
717 const intptr_t inlining_id = Read<intptr_t>();
718 const Array& coverage_array = Read<const Array&>();
719 const PrologueInfo prologue_info(Read<intptr_t>(), Read<intptr_t>());
720
721 definitions_.EnsureLength(current_ssa_temp_index, nullptr);
722 blocks_.EnsureLength(max_block_id + 1, nullptr);
723
724 // Read/create instructions.
725 ZoneGrowableArray<Instruction*> instructions(16);
726 Instruction* prev = nullptr;
727 while (Instruction* instr = Read<Instruction*>()) {
728 instructions.Add(instr);
729 if (!instr->IsBlockEntry()) {
730 ASSERT(prev != nullptr);
731 prev->LinkTo(instr);
732 }
733 prev = instr;
734 }
735 ASSERT(graph_entry_ != nullptr);
736 const auto& detached_defs = Read<const ZoneGrowableArray<Definition*>&>();
737
738 // Read instructions extra info.
739 // It may contain references to other instructions.
740 for (Instruction* instr : instructions) {
741 instr->ReadExtra(this);
742 }
743 for (auto* instr : detached_defs) {
744 instr->ReadExtra(this);
745 }
746
747 FlowGraph* flow_graph =
748 new (Z) FlowGraph(parsed_function(), graph_entry_, max_block_id,
750 flow_graph->set_current_ssa_temp_index(current_ssa_temp_index);
751 flow_graph->CreateCommonConstants();
752 flow_graph->disallow_licm();
753 flow_graph->set_inlining_id(inlining_id);
754 flow_graph->set_coverage_array(coverage_array);
755
756 {
757 const intptr_t num_blocks = Read<intptr_t>();
758 if (num_blocks != 0) {
759 auto* codegen_block_order = flow_graph->CodegenBlockOrder();
760 ASSERT(codegen_block_order == &flow_graph->optimized_block_order());
761 for (intptr_t i = 0; i < num_blocks; ++i) {
762 codegen_block_order->Add(ReadRef<BlockEntryInstr*>());
763 }
764 }
765 }
766
767 if (Read<bool>()) {
768 GrowableArray<intptr_t> indices = Read<GrowableArray<intptr_t>>();
769 for (intptr_t i : indices) {
770 flow_graph->captured_parameters()->Add(i);
771 }
772 }
773
774 return flow_graph;
775}
776
777template <>
780 const Function& x) {
781 if (x.IsNull()) {
782 s->Write<int8_t>(-1);
783 return;
784 }
785 Zone* zone = s->zone();
786 s->Write<int8_t>(x.kind());
787 switch (x.kind()) {
788 case UntaggedFunction::kRegularFunction:
789 case UntaggedFunction::kGetterFunction:
790 case UntaggedFunction::kSetterFunction:
791 case UntaggedFunction::kImplicitGetter:
792 case UntaggedFunction::kImplicitSetter:
793 case UntaggedFunction::kImplicitStaticGetter:
794 case UntaggedFunction::kConstructor: {
795 const auto& owner = Class::Handle(zone, x.Owner());
796 s->Write<classid_t>(owner.id());
797 const intptr_t function_index = owner.FindFunctionIndex(x);
798 ASSERT(function_index >= 0);
799 s->Write<intptr_t>(function_index);
800 return;
801 }
802 case UntaggedFunction::kImplicitClosureFunction: {
803 const auto& parent = Function::Handle(zone, x.parent_function());
804 s->Write<const Function&>(parent);
805 return;
806 }
807 case UntaggedFunction::kFieldInitializer: {
808 const auto& field = Field::Handle(zone, x.accessor_field());
809 s->Write<const Field&>(field);
810 return;
811 }
812 case UntaggedFunction::kClosureFunction:
813 // TODO(alexmarkov): we cannot rely on ClosureFunctionsCache
814 // as it is lazily populated when compiling functions.
815 // We need to serialize kernel offset and re-create
816 // closure functions when reading as needed.
818 return;
819 case UntaggedFunction::kMethodExtractor: {
820 Function& function = Function::Handle(zone, x.extracted_method_closure());
821 ASSERT(function.IsImplicitClosureFunction());
822 function = function.parent_function();
823 s->Write<const Function&>(function);
824 s->Write<const String&>(String::Handle(zone, x.name()));
825 return;
826 }
827 case UntaggedFunction::kInvokeFieldDispatcher: {
828 s->Write<const Class&>(Class::Handle(zone, x.Owner()));
829 s->Write<const String&>(String::Handle(zone, x.name()));
830 s->Write<const Array&>(Array::Handle(zone, x.saved_args_desc()));
831 return;
832 }
833 case UntaggedFunction::kDynamicInvocationForwarder: {
834 const auto& target = Function::Handle(zone, x.ForwardingTarget());
835 s->Write<const Function&>(target);
836 return;
837 }
838 case UntaggedFunction::kFfiTrampoline: {
839 s->Write<uint8_t>(static_cast<uint8_t>(x.GetFfiCallbackKind()));
840 s->Write<const FunctionType&>(
841 FunctionType::Handle(zone, x.FfiCSignature()));
842 s->Write<const Function&>(Function::Handle(zone, x.FfiCallbackTarget()));
843 s->Write<const Instance&>(
844 Instance::Handle(zone, x.FfiCallbackExceptionalReturn()));
845 return;
846 }
847 default:
848 break;
849 }
850 switch (x.kind()) {
851#define UNIMPLEMENTED_FUNCTION_KIND(kind) \
852 case UntaggedFunction::k##kind: \
853 FATAL("Unimplemented WriteTrait<const Function&>::Write for " #kind);
855#undef UNIMPLEMENTED_FUNCTION_KIND
856 }
857 UNREACHABLE();
858}
859
860template <>
863 const int8_t raw_kind = d->Read<int8_t>();
864 if (raw_kind < 0) {
865 return Object::null_function();
866 }
867 Zone* zone = d->zone();
868 const auto kind = static_cast<UntaggedFunction::Kind>(raw_kind);
869 switch (kind) {
870 case UntaggedFunction::kRegularFunction:
871 case UntaggedFunction::kGetterFunction:
872 case UntaggedFunction::kSetterFunction:
873 case UntaggedFunction::kImplicitGetter:
874 case UntaggedFunction::kImplicitSetter:
875 case UntaggedFunction::kImplicitStaticGetter:
876 case UntaggedFunction::kConstructor: {
877 const classid_t owner_class_id = d->Read<classid_t>();
878 const intptr_t function_index = d->Read<intptr_t>();
879 const auto& owner = Class::Handle(zone, d->GetClassById(owner_class_id));
880 const auto& result =
881 Function::ZoneHandle(zone, owner.FunctionFromIndex(function_index));
882 ASSERT(!result.IsNull());
883 return result;
884 }
885 case UntaggedFunction::kImplicitClosureFunction: {
886 const auto& parent = d->Read<const Function&>();
887 return Function::ZoneHandle(zone, parent.ImplicitClosureFunction());
888 }
889 case UntaggedFunction::kFieldInitializer: {
890 const auto& field = d->Read<const Field&>();
891 return Function::ZoneHandle(zone, field.EnsureInitializerFunction());
892 }
893 case UntaggedFunction::kClosureFunction: {
894 const intptr_t index = d->Read<intptr_t>();
897 }
898 case UntaggedFunction::kMethodExtractor: {
899 const Function& function = d->Read<const Function&>();
900 const String& name = d->Read<const String&>();
901 return Function::ZoneHandle(zone, function.GetMethodExtractor(name));
902 }
903 case UntaggedFunction::kInvokeFieldDispatcher: {
904 const Class& owner = d->Read<const Class&>();
905 const String& target_name = d->Read<const String&>();
906 const Array& args_desc = d->Read<const Array&>();
908 zone,
910 target_name, args_desc, UntaggedFunction::kInvokeFieldDispatcher,
911 /*create_if_absent=*/true));
912 }
913 case UntaggedFunction::kDynamicInvocationForwarder: {
914 const auto& target = d->Read<const Function&>();
915 auto& name = String::Handle(zone, target.name());
917 return Function::ZoneHandle(zone,
918 target.GetDynamicInvocationForwarder(name));
919 }
920 case UntaggedFunction::kFfiTrampoline: {
921 const FfiCallbackKind kind =
922 static_cast<FfiCallbackKind>(d->Read<uint8_t>());
923 const FunctionType& c_signature = d->Read<const FunctionType&>();
924 const Function& callback_target = d->Read<const Function&>();
925 const Instance& exceptional_return = d->Read<const Instance&>();
928 c_signature, callback_target, exceptional_return, kind));
929 }
930 default:
932 return Object::null_function();
933 }
934}
935
936void FunctionEntryInstr::WriteTo(FlowGraphSerializer* s) {
937 BlockEntryWithInitialDefs::WriteTo(s);
938}
939
941 : BlockEntryWithInitialDefs(d), graph_entry_(d->graph_entry()) {}
942
943void GraphEntryInstr::WriteTo(FlowGraphSerializer* s) {
944 BlockEntryWithInitialDefs::WriteTo(s);
945 s->Write<intptr_t>(osr_id_);
946 s->Write<intptr_t>(entry_count_);
947 s->Write<intptr_t>(spill_slot_count_);
948 s->Write<intptr_t>(fixed_slot_count_);
949 s->Write<bool>(needs_frame_);
950}
951
952GraphEntryInstr::GraphEntryInstr(FlowGraphDeserializer* d)
953 : BlockEntryWithInitialDefs(d),
954 parsed_function_(d->parsed_function()),
955 osr_id_(d->Read<intptr_t>()),
956 entry_count_(d->Read<intptr_t>()),
957 spill_slot_count_(d->Read<intptr_t>()),
958 fixed_slot_count_(d->Read<intptr_t>()),
959 needs_frame_(d->Read<bool>()) {
960 d->set_graph_entry(this);
961}
962
963void GraphEntryInstr::WriteExtra(FlowGraphSerializer* s) {
964 BlockEntryWithInitialDefs::WriteExtra(s);
965 s->WriteRef<FunctionEntryInstr*>(normal_entry_);
966 s->WriteRef<FunctionEntryInstr*>(unchecked_entry_);
967 s->WriteRef<OsrEntryInstr*>(osr_entry_);
968 s->WriteGrowableArrayOfRefs<CatchBlockEntryInstr*>(catch_entries_);
969 s->WriteGrowableArrayOfRefs<IndirectEntryInstr*>(indirect_entries_);
970}
971
972void GraphEntryInstr::ReadExtra(FlowGraphDeserializer* d) {
973 BlockEntryWithInitialDefs::ReadExtra(d);
974 normal_entry_ = d->ReadRef<FunctionEntryInstr*>();
975 unchecked_entry_ = d->ReadRef<FunctionEntryInstr*>();
976 osr_entry_ = d->ReadRef<OsrEntryInstr*>();
977 catch_entries_ = d->ReadGrowableArrayOfRefs<CatchBlockEntryInstr*>();
978 indirect_entries_ = d->ReadGrowableArrayOfRefs<IndirectEntryInstr*>();
979}
980
981void GotoInstr::WriteExtra(FlowGraphSerializer* s) {
982 TemplateInstruction::WriteExtra(s);
983 if (parallel_move_ != nullptr) {
984 parallel_move_->WriteExtra(s);
985 }
986 s->WriteRef<JoinEntryInstr*>(successor_);
987}
988
989void GotoInstr::ReadExtra(FlowGraphDeserializer* d) {
990 TemplateInstruction::ReadExtra(d);
991 if (parallel_move_ != nullptr) {
992 parallel_move_->ReadExtra(d);
993 }
994 successor_ = d->ReadRef<JoinEntryInstr*>();
995}
996
997template <>
1000 const ICData* x) {
1001 if (x == nullptr) {
1002 s->Write<bool>(false);
1003 } else {
1004 s->Write<bool>(true);
1005 ASSERT(!x->IsNull());
1006 s->Write<const Object&>(*x);
1007 }
1008}
1009
1010template <>
1013 if (!d->Read<bool>()) {
1014 return nullptr;
1015 }
1016 return &ICData::Cast(d->Read<const Object&>());
1017}
1018
1019void IfThenElseInstr::WriteExtra(FlowGraphSerializer* s) {
1020 // IfThenElse reuses inputs from its embedded Comparison.
1021 // Definition::WriteExtra is not called to avoid
1022 // writing/reading inputs twice.
1023 WriteExtraWithoutInputs(s);
1024 comparison_->WriteExtra(s);
1025}
1026
1027void IfThenElseInstr::ReadExtra(FlowGraphDeserializer* d) {
1028 ReadExtraWithoutInputs(d);
1029 comparison_->ReadExtra(d);
1030 for (intptr_t i = comparison_->InputCount() - 1; i >= 0; --i) {
1031 comparison_->InputAt(i)->set_instruction(this);
1032 }
1033}
1034
1035void IndirectGotoInstr::WriteTo(FlowGraphSerializer* s) {
1036 TemplateInstruction::WriteTo(s);
1037 s->Write<intptr_t>(offsets_.Length());
1038}
1039
1040IndirectGotoInstr::IndirectGotoInstr(FlowGraphDeserializer* d)
1041 : TemplateInstruction(d),
1042 offsets_(TypedData::ZoneHandle(d->zone(),
1043 TypedData::New(kTypedDataInt32ArrayCid,
1044 d->Read<intptr_t>(),
1045 Heap::kOld))) {}
1046
1047void IndirectGotoInstr::WriteExtra(FlowGraphSerializer* s) {
1048 TemplateInstruction::WriteExtra(s);
1049 s->WriteGrowableArrayOfRefs<TargetEntryInstr*>(successors_);
1050}
1051
1052void IndirectGotoInstr::ReadExtra(FlowGraphDeserializer* d) {
1053 TemplateInstruction::ReadExtra(d);
1054 successors_ = d->ReadGrowableArrayOfRefs<TargetEntryInstr*>();
1055}
1056
1057template <>
1060 Instruction* x) {
1061 if (x == nullptr) {
1062 s->Write<uint8_t>(Instruction::kNumInstructions);
1063 } else {
1064 s->Write<uint8_t>(static_cast<uint8_t>(x->tag()));
1065 x->WriteTo(s);
1066 }
1067}
1068
1069template <>
1072 const uint8_t tag = d->Read<uint8_t>();
1073 switch (tag) {
1074#define READ_INSTRUCTION(type, attrs) \
1075 case Instruction::k##type: \
1076 return new (d->zone()) type##Instr(d);
1078#undef READ_INSTRUCTION
1079 case Instruction::kNumInstructions:
1080 return nullptr;
1081 }
1082 UNREACHABLE();
1083 return nullptr;
1084}
1085
1086void Instruction::WriteTo(FlowGraphSerializer* s) {
1087 s->Write<intptr_t>(deopt_id_);
1088 s->Write<intptr_t>(inlining_id_);
1089}
1090
1092 : deopt_id_(d->Read<intptr_t>()), inlining_id_(d->Read<intptr_t>()) {}
1093
1094void Instruction::WriteExtra(FlowGraphSerializer* s) {
1095 for (intptr_t i = 0, n = InputCount(); i < n; ++i) {
1096 s->Write<Value*>(InputAt(i));
1097 }
1098 WriteExtraWithoutInputs(s);
1099}
1100
1101void Instruction::ReadExtra(FlowGraphDeserializer* d) {
1102 for (intptr_t i = 0, n = InputCount(); i < n; ++i) {
1103 SetInputAt(i, d->Read<Value*>());
1104 }
1105 for (intptr_t i = InputCount() - 1; i >= 0; --i) {
1106 Value* input = InputAt(i);
1107 input->definition()->AddInputUse(input);
1108 }
1109 ReadExtraWithoutInputs(d);
1110}
1111
1113 s->Write<Environment*>(env_);
1114 s->Write<LocationSummary*>(locs_);
1115}
1116
1118 Environment* env = d->Read<Environment*>();
1119 SetEnvironment(env);
1120 locs_ = d->Read<LocationSummary*>();
1121}
1122
1123#define INSTRUCTIONS_SERIALIZABLE_AS_INSTRUCTION(V) \
1124 V(Comparison, ComparisonInstr) \
1125 V(Constant, ConstantInstr) \
1126 V(Definition, Definition) \
1127 V(ParallelMove, ParallelMoveInstr) \
1128 V(Phi, PhiInstr)
1129
1130#define SERIALIZABLE_AS_INSTRUCTION(name, type) \
1131 template <> \
1132 void FlowGraphSerializer::WriteTrait<type*>::Write(FlowGraphSerializer* s, \
1133 type* x) { \
1134 s->Write<Instruction*>(x); \
1135 } \
1136 template <> \
1137 type* FlowGraphDeserializer::ReadTrait<type*>::Read( \
1138 FlowGraphDeserializer* d) { \
1139 Instruction* instr = d->Read<Instruction*>(); \
1140 ASSERT((instr == nullptr) || instr->Is##name()); \
1141 return static_cast<type*>(instr); \
1142 }
1143
1145#undef SERIALIZABLE_AS_INSTRUCTION
1146#undef INSTRUCTIONS_SERIALIZABLE_AS_INSTRUCTION
1147
1148template <>
1150 int8_t x) {
1151 s->stream()->Write<int8_t>(x);
1152}
1153
1154template <>
1157 return d->stream()->Read<int8_t>();
1158}
1159
1160template <>
1162 int16_t x) {
1163 s->stream()->Write<int16_t>(x);
1164}
1165
1166template <>
1169 return d->stream()->Read<int16_t>();
1170}
1171
1172template <>
1174 int32_t x) {
1175 s->stream()->Write<int32_t>(x);
1176}
1177
1178template <>
1181 return d->stream()->Read<int32_t>();
1182}
1183
1184template <>
1186 int64_t x) {
1187 s->stream()->Write<int64_t>(x);
1188}
1189
1190template <>
1193 return d->stream()->Read<int64_t>();
1194}
1195
1196void JoinEntryInstr::WriteExtra(FlowGraphSerializer* s) {
1197 BlockEntryInstr::WriteExtra(s);
1198 if (phis_ != nullptr) {
1199 for (PhiInstr* phi : *phis_) {
1200 phi->WriteExtra(s);
1201 }
1202 }
1203}
1204
1205void JoinEntryInstr::ReadExtra(FlowGraphDeserializer* d) {
1206 BlockEntryInstr::ReadExtra(d);
1207 if (phis_ != nullptr) {
1208 for (PhiInstr* phi : *phis_) {
1209 phi->ReadExtra(d);
1210 }
1211 }
1212}
1213
1214template <>
1220
1221template <>
1222const LocalVariable&
1225 UNIMPLEMENTED();
1226 return *d->parsed_function().receiver_var();
1227}
1228
1230 if (IsPairLocation()) {
1231 s->Write<uword>(value_ & kLocationTagMask);
1232 PairLocation* pair = AsPairLocation();
1233 pair->At(0).Write(s);
1234 pair->At(1).Write(s);
1235 } else if (IsConstant()) {
1236 s->Write<uword>(value_ & kLocationTagMask);
1237 s->WriteRef<Definition*>(constant_instruction());
1238 } else {
1239 s->Write<uword>(value_);
1240 }
1241}
1242
1244 const uword value = d->Read<uword>();
1245 if (value == kPairLocationTag) {
1246 const Location first = Location::Read(d);
1247 const Location second = Location::Read(d);
1248 return Location::Pair(first, second);
1249 } else if ((value & kConstantTag) == kConstantTag) {
1250 ConstantInstr* instr = d->ReadRef<Definition*>()->AsConstant();
1251 ASSERT(instr != nullptr);
1252 const int pair_index = (value & kPairLocationTag) != 0 ? 1 : 0;
1253 return Location::Constant(instr, pair_index);
1254 } else {
1255 return Location(value);
1256 }
1257}
1258
1259template <>
1262 LocationSummary* x) {
1263 ASSERT(s->can_write_refs());
1264 if (x == nullptr) {
1265 s->Write<bool>(false);
1266 } else {
1267 s->Write<bool>(true);
1268 x->Write(s);
1269 }
1270}
1271
1272template <>
1275 if (!d->Read<bool>()) {
1276 return nullptr;
1277 }
1278 return new (d->zone()) LocationSummary(d);
1279}
1280
1282 s->Write<intptr_t>(input_count());
1283 s->Write<intptr_t>(temp_count());
1284 s->Write<int8_t>(static_cast<int8_t>(contains_call_));
1285 live_registers_.Write(s);
1286
1287 for (intptr_t i = 0, n = input_count(); i < n; ++i) {
1288 in(i).Write(s);
1289 }
1290 for (intptr_t i = 0, n = temp_count(); i < n; ++i) {
1291 temp(i).Write(s);
1292 }
1293 ASSERT(output_count() == 1);
1294 out(0).Write(s);
1295
1296 if ((stack_bitmap_ != nullptr) && (stack_bitmap_->Length() != 0)) {
1297 s->Write<int8_t>(1);
1298 stack_bitmap_->Write(s->stream());
1299 } else {
1300 s->Write<int8_t>(0);
1301 }
1302
1303#if defined(DEBUG)
1304 s->Write<intptr_t>(writable_inputs_);
1305#endif
1306}
1307
1309 : num_inputs_(d->Read<intptr_t>()),
1310 num_temps_(d->Read<intptr_t>()),
1311 output_location_(),
1312 stack_bitmap_(nullptr),
1313 contains_call_(static_cast<ContainsCall>(d->Read<int8_t>())),
1314 live_registers_(d) {
1315 input_locations_ = d->zone()->Alloc<Location>(num_inputs_);
1316 for (intptr_t i = 0; i < num_inputs_; ++i) {
1317 input_locations_[i] = Location::Read(d);
1318 }
1319 temp_locations_ = d->zone()->Alloc<Location>(num_temps_);
1320 for (intptr_t i = 0; i < num_temps_; ++i) {
1321 temp_locations_[i] = Location::Read(d);
1322 }
1323 output_location_ = Location::Read(d);
1324
1325 if (d->Read<int8_t>() != 0) {
1326 EnsureStackBitmap().Read(d->stream());
1327 }
1328
1329#if defined(DEBUG)
1330 writable_inputs_ = d->Read<intptr_t>();
1331#endif
1332}
1333
1334void MakeTempInstr::WriteExtra(FlowGraphSerializer* s) {
1335 TemplateDefinition::WriteExtra(s);
1336 null_->WriteExtra(s);
1337}
1338
1339void MakeTempInstr::ReadExtra(FlowGraphDeserializer* d) {
1340 TemplateDefinition::ReadExtra(d);
1341 null_->ReadExtra(d);
1342}
1343
1344void MaterializeObjectInstr::WriteExtra(FlowGraphSerializer* s) {
1345 VariadicDefinition::WriteExtra(s);
1346 for (intptr_t i = 0, n = InputCount(); i < n; ++i) {
1347 locations_[i].Write(s);
1348 }
1349}
1350
1351void MaterializeObjectInstr::ReadExtra(FlowGraphDeserializer* d) {
1352 VariadicDefinition::ReadExtra(d);
1353 locations_ = d->zone()->Alloc<Location>(InputCount());
1354 for (intptr_t i = 0, n = InputCount(); i < n; ++i) {
1355 locations_[i] = Location::Read(d);
1356 }
1357}
1358
1359template <>
1365
1366template <>
1371
1373 dest().Write(s);
1374 src().Write(s);
1375}
1376
1379
1380template <>
1383 const Object& x) {
1384 const intptr_t cid = x.GetClassId();
1386 // Do not write objects repeatedly.
1387 const intptr_t object_id = s->heap()->GetObjectId(x.ptr());
1388 if (object_id > 0) {
1389 const intptr_t object_index = object_id - 1;
1390 s->Write<intptr_t>(kIllegalCid);
1391 s->Write<intptr_t>(object_index);
1392 return;
1393 }
1394 const intptr_t object_index = s->object_counter_++;
1395 s->heap()->SetObjectId(x.ptr(), object_index + 1);
1396 s->Write<intptr_t>(cid);
1397 s->WriteObjectImpl(x, cid, object_index);
1398}
1399
1400template <>
1403 const intptr_t cid = d->Read<intptr_t>();
1404 if (cid == kIllegalCid) {
1405 const intptr_t object_index = d->Read<intptr_t>();
1406 return *(d->objects_[object_index]);
1407 }
1408 const intptr_t object_index = d->object_counter_;
1409 d->object_counter_++;
1410 const Object& result = d->ReadObjectImpl(cid, object_index);
1411 d->SetObjectAt(object_index, result);
1412 return result;
1413}
1414
1415void FlowGraphDeserializer::SetObjectAt(intptr_t object_index,
1416 const Object& object) {
1417 objects_.EnsureLength(object_index + 1, &Object::null_object());
1418 objects_[object_index] = &object;
1419}
1420
1421bool FlowGraphSerializer::IsWritten(const Object& obj) {
1422 const intptr_t object_id = heap()->GetObjectId(obj.ptr());
1423 return (object_id != 0);
1424}
1425
1426bool FlowGraphSerializer::HasEnclosingTypes(const Object& obj) {
1427 if (num_free_fun_type_params_ == 0) return false;
1428 if (obj.IsAbstractType()) {
1429 return !AbstractType::Cast(obj).IsInstantiated(kFunctions,
1430 num_free_fun_type_params_);
1431 } else if (obj.IsTypeArguments()) {
1432 return !TypeArguments::Cast(obj).IsInstantiated(kFunctions,
1433 num_free_fun_type_params_);
1434 } else {
1435 UNREACHABLE();
1436 }
1437}
1438
1439bool FlowGraphSerializer::WriteObjectWithEnclosingTypes(const Object& obj) {
1440 if (HasEnclosingTypes(obj)) {
1441 Write<bool>(true);
1442 // Reset assigned object id so it could be written
1443 // while writing enclosing types.
1444 heap()->SetObjectId(obj.ptr(), -1);
1445 WriteEnclosingTypes(obj, num_free_fun_type_params_);
1446 Write<bool>(false);
1447 // Can write any type parameters after all enclosing types are written.
1448 const intptr_t saved_num_free_fun_type_params = num_free_fun_type_params_;
1449 num_free_fun_type_params_ = 0;
1450 Write<const Object&>(obj);
1451 num_free_fun_type_params_ = saved_num_free_fun_type_params;
1452 return true;
1453 } else {
1454 Write<bool>(false);
1455 return false;
1456 }
1457}
1458
1459void FlowGraphSerializer::WriteEnclosingTypes(
1460 const Object& obj,
1461 intptr_t num_free_fun_type_params) {
1462 if (obj.IsType()) {
1463 const auto& type = Type::Cast(obj);
1464 if (type.arguments() != TypeArguments::null()) {
1465 const auto& type_args = TypeArguments::Handle(Z, type.arguments());
1466 WriteEnclosingTypes(type_args, num_free_fun_type_params);
1467 }
1468 } else if (obj.IsRecordType()) {
1469 const auto& rec = RecordType::Cast(obj);
1470 auto& elem = AbstractType::Handle(Z);
1471 for (intptr_t i = 0, n = rec.NumFields(); i < n; ++i) {
1472 elem = rec.FieldTypeAt(i);
1473 WriteEnclosingTypes(elem, num_free_fun_type_params);
1474 }
1475 } else if (obj.IsFunctionType()) {
1476 const auto& sig = FunctionType::Cast(obj);
1477 const intptr_t num_parent_type_args = sig.NumParentTypeArguments();
1478 if (num_free_fun_type_params > num_parent_type_args) {
1479 num_free_fun_type_params = num_parent_type_args;
1480 }
1481 AbstractType& elem = AbstractType::Handle(Z, sig.result_type());
1482 WriteEnclosingTypes(elem, num_free_fun_type_params);
1483 for (intptr_t i = 0, n = sig.NumParameters(); i < n; ++i) {
1484 elem = sig.ParameterTypeAt(i);
1485 WriteEnclosingTypes(elem, num_free_fun_type_params);
1486 }
1487 if (sig.IsGeneric()) {
1488 const TypeParameters& type_params =
1489 TypeParameters::Handle(Z, sig.type_parameters());
1490 WriteEnclosingTypes(TypeArguments::Handle(Z, type_params.bounds()),
1491 num_free_fun_type_params);
1492 }
1493 } else if (obj.IsTypeParameter()) {
1494 const auto& tp = TypeParameter::Cast(obj);
1495 if (tp.IsFunctionTypeParameter() &&
1496 (tp.index() < num_free_fun_type_params)) {
1497 const auto& owner =
1498 FunctionType::Handle(Z, tp.parameterized_function_type());
1499 if (!IsWritten(owner)) {
1500 Write<bool>(true);
1501 Write<const Object&>(owner);
1502 }
1503 }
1504 } else if (obj.IsTypeArguments()) {
1505 const auto& type_args = TypeArguments::Cast(obj);
1506 auto& elem = AbstractType::Handle(Z);
1507 for (intptr_t i = 0, n = type_args.Length(); i < n; ++i) {
1508 elem = type_args.TypeAt(i);
1509 WriteEnclosingTypes(elem, num_free_fun_type_params);
1510 }
1511 }
1512}
1513
1514const Object& FlowGraphDeserializer::ReadObjectWithEnclosingTypes() {
1515 if (Read<bool>()) {
1516 while (Read<bool>()) {
1517 Read<const Object&>();
1518 }
1519 return Read<const Object&>();
1520 } else {
1521 return Object::null_object();
1522 }
1523}
1524
1525void FlowGraphSerializer::WriteObjectImpl(const Object& x,
1526 intptr_t cid,
1527 intptr_t object_index) {
1528 switch (cid) {
1529 case kArrayCid:
1530 case kImmutableArrayCid: {
1531 const auto& array = Array::Cast(x);
1532 const intptr_t len = array.Length();
1533 Write<intptr_t>(len);
1534 const auto& type_args =
1535 TypeArguments::Handle(Z, array.GetTypeArguments());
1536 Write<const TypeArguments&>(type_args);
1537 if ((len == 0) && type_args.IsNull()) {
1538 break;
1539 }
1540 Write<bool>(array.IsCanonical());
1541 auto& elem = Object::Handle(Z);
1542 for (intptr_t i = 0; i < len; ++i) {
1543 elem = array.At(i);
1544 Write<const Object&>(elem);
1545 }
1546 break;
1547 }
1548 case kBoolCid:
1549 Write<bool>(Bool::Cast(x).value());
1550 break;
1551 case kClosureCid: {
1552 const auto& closure = Closure::Cast(x);
1553 if (closure.RawContext() != Object::null()) {
1554 UNIMPLEMENTED();
1555 }
1556 ASSERT(closure.IsCanonical());
1557 auto& type_args = TypeArguments::Handle(Z);
1558 type_args = closure.instantiator_type_arguments();
1559 Write<const TypeArguments&>(type_args);
1560 type_args = closure.function_type_arguments();
1561 Write<const TypeArguments&>(type_args);
1562 type_args = closure.delayed_type_arguments();
1563 Write<const TypeArguments&>(type_args);
1564 Write<const Function&>(Function::Handle(Z, closure.function()));
1565 break;
1566 }
1567 case kDoubleCid:
1568 ASSERT(x.IsCanonical());
1569 Write<double>(Double::Cast(x).value());
1570 break;
1571 case kFieldCid: {
1572 const auto& field = Field::Cast(x);
1573 const auto& owner = Class::Handle(Z, field.Owner());
1574 Write<classid_t>(owner.id());
1575 const intptr_t field_index = owner.FindFieldIndex(field);
1576 ASSERT(field_index >= 0);
1577 Write<intptr_t>(field_index);
1578 break;
1579 }
1580 case kFunctionCid:
1581 Write<const Function&>(Function::Cast(x));
1582 break;
1583 case kFunctionTypeCid: {
1584 const auto& type = FunctionType::Cast(x);
1585 ASSERT(type.IsFinalized());
1586 if (WriteObjectWithEnclosingTypes(type)) {
1587 break;
1588 }
1589 const intptr_t saved_num_free_fun_type_params = num_free_fun_type_params_;
1590 const intptr_t num_parent_type_args = type.NumParentTypeArguments();
1591 if (num_free_fun_type_params_ > num_parent_type_args) {
1592 num_free_fun_type_params_ = num_parent_type_args;
1593 }
1594 Write<int8_t>(static_cast<int8_t>(type.nullability()));
1595 Write<uint32_t>(type.packed_parameter_counts());
1596 Write<uint16_t>(type.packed_type_parameter_counts());
1597 Write<const TypeParameters&>(
1598 TypeParameters::Handle(Z, type.type_parameters()));
1599 Write<const AbstractType&>(AbstractType::Handle(Z, type.result_type()));
1600 Write<const Array&>(Array::Handle(Z, type.parameter_types()));
1601 Write<const Array&>(Array::Handle(Z, type.named_parameter_names()));
1602 num_free_fun_type_params_ = saved_num_free_fun_type_params;
1603 break;
1604 }
1605 case kICDataCid: {
1606 const auto& icdata = ICData::Cast(x);
1607 Write<int8_t>(static_cast<int8_t>(icdata.rebind_rule()));
1608 Write<const Function&>(Function::Handle(Z, icdata.Owner()));
1609 Write<const Array&>(Array::Handle(Z, icdata.arguments_descriptor()));
1610 Write<intptr_t>(icdata.deopt_id());
1611 Write<intptr_t>(icdata.NumArgsTested());
1612 if (icdata.rebind_rule() == ICData::kStatic) {
1613 ASSERT(icdata.NumberOfChecks() == 1);
1614 Write<const Function&>(Function::Handle(Z, icdata.GetTargetAt(0)));
1615 } else if (icdata.rebind_rule() == ICData::kInstance) {
1616 if (icdata.NumberOfChecks() != 0) {
1617 UNIMPLEMENTED();
1618 }
1619 Write<const String&>(String::Handle(Z, icdata.target_name()));
1620 } else {
1621 UNIMPLEMENTED();
1622 }
1623 break;
1624 }
1625 case kConstMapCid:
1626 case kConstSetCid: {
1627 const auto& map = LinkedHashBase::Cast(x);
1628 ASSERT(map.IsCanonical());
1629 const intptr_t length = map.Length();
1630 Write<intptr_t>(length);
1631 Write<const TypeArguments&>(
1632 TypeArguments::Handle(Z, map.GetTypeArguments()));
1633 const auto& data = Array::Handle(Z, map.data());
1634 auto& elem = Object::Handle(Z);
1635 intptr_t used_data;
1636 if (cid == kConstMapCid) {
1637 used_data = length << 1;
1638 } else {
1639 used_data = length;
1640 }
1641 for (intptr_t i = 0; i < used_data; ++i) {
1642 elem = data.At(i);
1643 Write<const Object&>(elem);
1644 }
1645 break;
1646 }
1647 case kLibraryPrefixCid: {
1648 const auto& prefix = LibraryPrefix::Cast(x);
1649 const Library& library = Library::Handle(Z, prefix.importer());
1650 Write<classid_t>(Class::Handle(Z, library.toplevel_class()).id());
1651 Write<const String&>(String::Handle(Z, prefix.name()));
1652 break;
1653 }
1654 case kMintCid:
1655 ASSERT(x.IsCanonical());
1656 Write<int64_t>(Integer::Cast(x).AsInt64Value());
1657 break;
1658 case kNullCid:
1659 break;
1660 case kOneByteStringCid: {
1661 ASSERT(x.IsCanonical());
1662 const auto& str = String::Cast(x);
1663 const intptr_t length = str.Length();
1664 Write<intptr_t>(length);
1665 NoSafepointScope no_safepoint;
1666 uint8_t* latin1 = OneByteString::DataStart(str);
1667 stream_->WriteBytes(latin1, length);
1668 break;
1669 }
1670 case kRecordCid: {
1671 ASSERT(x.IsCanonical());
1672 const auto& record = Record::Cast(x);
1673 Write<RecordShape>(record.shape());
1674 auto& field = Object::Handle(Z);
1675 for (intptr_t i = 0, n = record.num_fields(); i < n; ++i) {
1676 field = record.FieldAt(i);
1677 Write<const Object&>(field);
1678 }
1679 break;
1680 }
1681 case kRecordTypeCid: {
1682 const auto& rec = RecordType::Cast(x);
1683 ASSERT(rec.IsFinalized());
1684 if (WriteObjectWithEnclosingTypes(rec)) {
1685 break;
1686 }
1687 Write<int8_t>(static_cast<int8_t>(rec.nullability()));
1688 Write<RecordShape>(rec.shape());
1689 Write<const Array&>(Array::Handle(Z, rec.field_types()));
1690 break;
1691 }
1692 case kSentinelCid:
1693 if (x.ptr() == Object::sentinel().ptr()) {
1694 Write<uint8_t>(0);
1695 } else if (x.ptr() == Object::transition_sentinel().ptr()) {
1696 Write<uint8_t>(1);
1697 } else if (x.ptr() == Object::optimized_out().ptr()) {
1698 Write<uint8_t>(2);
1699 } else {
1700 UNIMPLEMENTED();
1701 }
1702 break;
1703 case kSmiCid:
1704 Write<intptr_t>(Smi::Cast(x).Value());
1705 break;
1706 case kTwoByteStringCid: {
1707 ASSERT(x.IsCanonical());
1708 const auto& str = String::Cast(x);
1709 const intptr_t length = str.Length();
1710 Write<intptr_t>(length);
1711 NoSafepointScope no_safepoint;
1712 uint16_t* utf16 = TwoByteString::DataStart(str);
1713 stream_->WriteBytes(reinterpret_cast<const uint8_t*>(utf16),
1714 length * sizeof(uint16_t));
1715 break;
1716 }
1717 case kTypeCid: {
1718 const auto& type = Type::Cast(x);
1719 ASSERT(type.IsFinalized());
1720 if (WriteObjectWithEnclosingTypes(type)) {
1721 break;
1722 }
1723 const auto& cls = Class::Handle(Z, type.type_class());
1724 Write<int8_t>(static_cast<int8_t>(type.nullability()));
1725 Write<classid_t>(type.type_class_id());
1726 if (cls.IsGeneric()) {
1727 const auto& type_args = TypeArguments::Handle(Z, type.arguments());
1728 Write<const TypeArguments&>(type_args);
1729 }
1730 break;
1731 }
1732 case kTypeArgumentsCid: {
1733 const auto& type_args = TypeArguments::Cast(x);
1734 ASSERT(type_args.IsFinalized());
1735 if (WriteObjectWithEnclosingTypes(type_args)) {
1736 break;
1737 }
1738 const intptr_t len = type_args.Length();
1739 Write<intptr_t>(len);
1740 auto& type = AbstractType::Handle(Z);
1741 for (intptr_t i = 0; i < len; ++i) {
1742 type = type_args.TypeAt(i);
1743 Write<const AbstractType&>(type);
1744 }
1745 break;
1746 }
1747 case kTypeParameterCid: {
1748 const auto& tp = TypeParameter::Cast(x);
1749 ASSERT(tp.IsFinalized());
1750 if (WriteObjectWithEnclosingTypes(tp)) {
1751 break;
1752 }
1753 Write<intptr_t>(tp.base());
1754 Write<intptr_t>(tp.index());
1755 Write<int8_t>(static_cast<int8_t>(tp.nullability()));
1756 if (tp.IsFunctionTypeParameter()) {
1757 Write<bool>(true);
1758 Write<const FunctionType&>(
1759 FunctionType::Handle(Z, tp.parameterized_function_type()));
1760 } else {
1761 Write<bool>(false);
1762 Write<const Class&>(Class::Handle(Z, tp.parameterized_class()));
1763 }
1764 break;
1765 }
1766 case kTypeParametersCid: {
1767 const auto& tps = TypeParameters::Cast(x);
1768 Write<const Array&>(Array::Handle(Z, tps.names()));
1769 Write<const Array&>(Array::Handle(Z, tps.flags()));
1770 Write<const TypeArguments&>(TypeArguments::Handle(Z, tps.bounds()));
1771 Write<const TypeArguments&>(TypeArguments::Handle(Z, tps.defaults()));
1772 break;
1773 }
1774 default: {
1775 const classid_t cid = x.GetClassId();
1776 if ((cid >= kNumPredefinedCids) || (cid == kInstanceCid)) {
1777 const auto& instance = Instance::Cast(x);
1778 ASSERT(instance.IsCanonical());
1779 const auto& cls =
1780 Class::Handle(Z, isolate_group()->class_table()->At(cid));
1781 const auto unboxed_fields_bitmap =
1783 const intptr_t next_field_offset = cls.host_next_field_offset();
1784 auto& obj = Object::Handle(Z);
1785 for (intptr_t offset = Instance::NextFieldOffset();
1786 offset < next_field_offset; offset += kCompressedWordSize) {
1787 if (unboxed_fields_bitmap.Get(offset / kCompressedWordSize)) {
1788 if (kCompressedWordSize == 8) {
1789 Write<int64_t>(*reinterpret_cast<int64_t*>(
1790 instance.RawFieldAddrAtOffset(offset)));
1791 } else {
1792 Write<int32_t>(*reinterpret_cast<int32_t*>(
1793 instance.RawFieldAddrAtOffset(offset)));
1794 }
1795 } else {
1796 obj = instance.RawGetFieldAtOffset(offset);
1797 Write<const Object&>(obj);
1798 }
1799 }
1800 break;
1801 }
1802 FATAL("Unimplemented WriteObjectImpl for %s", x.ToCString());
1803 }
1804 }
1805}
1806
1807const Object& FlowGraphDeserializer::ReadObjectImpl(intptr_t cid,
1808 intptr_t object_index) {
1809 switch (cid) {
1810 case kArrayCid:
1811 case kImmutableArrayCid: {
1812 const intptr_t len = Read<intptr_t>();
1813 const auto& type_args = Read<const TypeArguments&>();
1814 if ((len == 0) && type_args.IsNull()) {
1815 return Object::empty_array();
1816 }
1817 const bool canonicalize = Read<bool>();
1818 auto& array = Array::ZoneHandle(
1819 Z, Array::New(len, canonicalize ? Heap::kNew : Heap::kOld));
1820 if (!type_args.IsNull()) {
1821 array.SetTypeArguments(type_args);
1822 }
1823 for (intptr_t i = 0; i < len; ++i) {
1824 array.SetAt(i, Read<const Object&>());
1825 }
1826 if (cid == kImmutableArrayCid) {
1827 array.MakeImmutable();
1828 }
1829 if (canonicalize) {
1830 array ^= array.Canonicalize(thread());
1831 }
1832 return array;
1833 }
1834 case kBoolCid:
1835 return Bool::Get(Read<bool>());
1836 case kClosureCid: {
1837 const auto& instantiator_type_arguments = Read<const TypeArguments&>();
1838 const auto& function_type_arguments = Read<const TypeArguments&>();
1839 const auto& delayed_type_arguments = Read<const TypeArguments&>();
1840 const auto& function = Read<const Function&>();
1842 Z, Closure::New(instantiator_type_arguments, function_type_arguments,
1843 delayed_type_arguments, function,
1844 Object::null_object()));
1845 closure ^= closure.Canonicalize(thread());
1846 return closure;
1847 }
1848 case kDoubleCid:
1849 return Double::ZoneHandle(Z, Double::NewCanonical(Read<double>()));
1850 case kFieldCid: {
1851 const classid_t owner_class_id = Read<classid_t>();
1852 const intptr_t field_index = Read<intptr_t>();
1853 const auto& owner = Class::Handle(Z, GetClassById(owner_class_id));
1854 auto& result = Field::ZoneHandle(Z, owner.FieldFromIndex(field_index));
1855 ASSERT(!result.IsNull());
1856 return result;
1857 }
1858 case kFunctionCid:
1859 return Read<const Function&>();
1860 case kFunctionTypeCid: {
1861 const auto& enc_type = ReadObjectWithEnclosingTypes();
1862 if (!enc_type.IsNull()) {
1863 return enc_type;
1864 }
1865 const Nullability nullability = static_cast<Nullability>(Read<int8_t>());
1866 auto& result =
1868 SetObjectAt(object_index, result);
1869 result.set_packed_parameter_counts(Read<uint32_t>());
1870 result.set_packed_type_parameter_counts(Read<uint16_t>());
1871 result.SetTypeParameters(Read<const TypeParameters&>());
1872 result.set_result_type(Read<const AbstractType&>());
1873 result.set_parameter_types(Read<const Array&>());
1874 result.set_named_parameter_names(Read<const Array&>());
1875 result.SetIsFinalized();
1876 result ^= result.Canonicalize(thread());
1877 return result;
1878 }
1879 case kICDataCid: {
1880 const ICData::RebindRule rebind_rule =
1881 static_cast<ICData::RebindRule>(Read<int8_t>());
1882 const auto& owner = Read<const Function&>();
1883 const auto& arguments_descriptor = Read<const Array&>();
1884 const intptr_t deopt_id = Read<intptr_t>();
1885 const intptr_t num_args_tested = Read<intptr_t>();
1886
1887 if (rebind_rule == ICData::kStatic) {
1888 const auto& target = Read<const Function&>();
1889 return ICData::ZoneHandle(
1890 Z,
1891 ICData::NewForStaticCall(owner, target, arguments_descriptor,
1892 deopt_id, num_args_tested, rebind_rule));
1893 } else if (rebind_rule == ICData::kInstance) {
1894 const auto& target_name = Read<const String&>();
1895 return ICData::ZoneHandle(
1896 Z, ICData::New(owner, target_name, arguments_descriptor, deopt_id,
1897 num_args_tested, rebind_rule));
1898 } else {
1899 UNIMPLEMENTED();
1900 }
1901 break;
1902 }
1903 case kConstMapCid:
1904 case kConstSetCid: {
1905 const intptr_t length = Read<intptr_t>();
1906 const auto& type_args = Read<const TypeArguments&>();
1907 Instance& result = Instance::ZoneHandle(Z);
1908 intptr_t used_data;
1909 if (cid == kConstMapCid) {
1911 used_data = (length << 1);
1912 } else {
1914 used_data = length;
1915 }
1916 // LinkedHashBase is not a proper handle type, so
1917 // cannot create a LinkedHashBase handle upfront.
1918 const LinkedHashBase& map = LinkedHashBase::Cast(result);
1919 map.SetTypeArguments(type_args);
1920 map.set_used_data(used_data);
1921 const auto& data = Array::Handle(Z, Array::New(used_data));
1922 map.set_data(data);
1923 map.set_deleted_keys(0);
1924 map.ComputeAndSetHashMask();
1925 for (intptr_t i = 0; i < used_data; ++i) {
1926 data.SetAt(i, Read<const Object&>());
1927 }
1928 result ^= result.Canonicalize(thread());
1929 return result;
1930 }
1931 case kLibraryPrefixCid: {
1932 const Class& toplevel_class =
1933 Class::Handle(Z, GetClassById(Read<classid_t>()));
1934 const Library& library = Library::Handle(Z, toplevel_class.library());
1935 const String& name = Read<const String&>();
1936 const auto& prefix =
1937 LibraryPrefix::ZoneHandle(Z, library.LookupLocalLibraryPrefix(name));
1938 ASSERT(!prefix.IsNull());
1939 return prefix;
1940 }
1941 case kMintCid: {
1942 const int64_t value = Read<int64_t>();
1944 }
1945 case kNullCid:
1946 return Object::null_object();
1947 case kOneByteStringCid: {
1948 const intptr_t length = Read<intptr_t>();
1949 uint8_t* latin1 = Z->Alloc<uint8_t>(length);
1950 stream_->ReadBytes(latin1, length);
1951 return String::ZoneHandle(Z,
1952 Symbols::FromLatin1(thread(), latin1, length));
1953 }
1954 case kRecordCid: {
1955 const RecordShape shape = Read<RecordShape>();
1956 auto& record = Record::ZoneHandle(Z, Record::New(shape));
1957 for (intptr_t i = 0, n = shape.num_fields(); i < n; ++i) {
1958 record.SetFieldAt(i, Read<const Object&>());
1959 }
1960 record ^= record.Canonicalize(thread());
1961 return record;
1962 }
1963 case kRecordTypeCid: {
1964 const auto& enc_type = ReadObjectWithEnclosingTypes();
1965 if (!enc_type.IsNull()) {
1966 return enc_type;
1967 }
1968 const Nullability nullability = static_cast<Nullability>(Read<int8_t>());
1969 const RecordShape shape = Read<RecordShape>();
1970 const Array& field_types = Read<const Array&>();
1971 RecordType& rec = RecordType::ZoneHandle(
1972 Z, RecordType::New(shape, field_types, nullability));
1973 rec.SetIsFinalized();
1974 rec ^= rec.Canonicalize(thread());
1975 return rec;
1976 }
1977 case kSentinelCid:
1978 switch (Read<uint8_t>()) {
1979 case 0:
1980 return Object::sentinel();
1981 case 1:
1982 return Object::transition_sentinel();
1983 case 2:
1984 return Object::optimized_out();
1985 default:
1986 UNREACHABLE();
1987 }
1988 case kSmiCid:
1989 return Smi::ZoneHandle(Z, Smi::New(Read<intptr_t>()));
1990 case kTwoByteStringCid: {
1991 const intptr_t length = Read<intptr_t>();
1992 uint16_t* utf16 = Z->Alloc<uint16_t>(length);
1993 stream_->ReadBytes(reinterpret_cast<uint8_t*>(utf16),
1994 length * sizeof(uint16_t));
1996 }
1997 case kTypeCid: {
1998 const auto& enc_type = ReadObjectWithEnclosingTypes();
1999 if (!enc_type.IsNull()) {
2000 return enc_type;
2001 }
2002 const Nullability nullability = static_cast<Nullability>(Read<int8_t>());
2003 const classid_t type_class_id = Read<classid_t>();
2004 const auto& cls = Class::Handle(Z, GetClassById(type_class_id));
2005 auto& result = Type::ZoneHandle(Z);
2006 if (cls.IsGeneric()) {
2007 result = Type::New(cls, Object::null_type_arguments(), nullability);
2008 SetObjectAt(object_index, result);
2009 const auto& type_args = Read<const TypeArguments&>();
2010 result.set_arguments(type_args);
2011 result.SetIsFinalized();
2012 } else {
2013 result = cls.DeclarationType();
2014 result = result.ToNullability(nullability, Heap::kOld);
2015 }
2016 result ^= result.Canonicalize(thread());
2017 return result;
2018 }
2019 case kTypeArgumentsCid: {
2020 const auto& enc_type_args = ReadObjectWithEnclosingTypes();
2021 if (!enc_type_args.IsNull()) {
2022 return enc_type_args;
2023 }
2024 const intptr_t len = Read<intptr_t>();
2025 auto& type_args = TypeArguments::ZoneHandle(Z, TypeArguments::New(len));
2026 SetObjectAt(object_index, type_args);
2027 for (intptr_t i = 0; i < len; ++i) {
2028 type_args.SetTypeAt(i, Read<const AbstractType&>());
2029 }
2030 type_args ^= type_args.Canonicalize(thread());
2031 return type_args;
2032 }
2033 case kTypeParameterCid: {
2034 const auto& enc_type = ReadObjectWithEnclosingTypes();
2035 if (!enc_type.IsNull()) {
2036 return enc_type;
2037 }
2038 const intptr_t base = Read<intptr_t>();
2039 const intptr_t index = Read<intptr_t>();
2040 const Nullability nullability = static_cast<Nullability>(Read<int8_t>());
2041 const Object* owner = nullptr;
2042 if (Read<bool>()) {
2043 owner = &Read<const FunctionType&>();
2044 } else {
2045 owner = &Read<const Class&>();
2046 }
2047 auto& tp = TypeParameter::ZoneHandle(
2048 Z, TypeParameter::New(*owner, base, index, nullability));
2049 SetObjectAt(object_index, tp);
2050 tp.SetIsFinalized();
2051 tp ^= tp.Canonicalize(thread());
2052 return tp;
2053 }
2054 case kTypeParametersCid: {
2055 const auto& tps = TypeParameters::ZoneHandle(Z, TypeParameters::New());
2056 tps.set_names(Read<const Array&>());
2057 tps.set_flags(Read<const Array&>());
2058 tps.set_bounds(Read<const TypeArguments&>());
2059 tps.set_defaults(Read<const TypeArguments&>());
2060 return tps;
2061 }
2062 default:
2063 if ((cid >= kNumPredefinedCids) || (cid == kInstanceCid)) {
2064 const auto& cls = Class::Handle(Z, GetClassById(cid));
2065 const auto unboxed_fields_bitmap =
2067 const intptr_t next_field_offset = cls.host_next_field_offset();
2069 for (intptr_t offset = Instance::NextFieldOffset();
2070 offset < next_field_offset; offset += kCompressedWordSize) {
2071 if (unboxed_fields_bitmap.Get(offset / kCompressedWordSize)) {
2072 if (kCompressedWordSize == 8) {
2073 const int64_t v = Read<int64_t>();
2074 *reinterpret_cast<int64_t*>(
2075 instance.RawFieldAddrAtOffset(offset)) = v;
2076 } else {
2077 const int32_t v = Read<int32_t>();
2078 *reinterpret_cast<int32_t*>(
2079 instance.RawFieldAddrAtOffset(offset)) = v;
2080 }
2081 } else {
2082 const auto& obj = Read<const Object&>();
2083 instance.RawSetFieldAtOffset(offset, obj);
2084 }
2085 }
2086 instance = instance.Canonicalize(thread());
2087 return instance;
2088 }
2089 }
2090 UNIMPLEMENTED();
2091 return Object::null_object();
2092}
2093
2094#define HANDLES_SERIALIZABLE_AS_OBJECT(V) \
2095 V(AbstractType, Object::null_abstract_type()) \
2096 V(Array, Object::null_array()) \
2097 V(Field, Field::Handle(d->zone())) \
2098 V(FunctionType, Object::null_function_type()) \
2099 V(Instance, Object::null_instance()) \
2100 V(String, Object::null_string()) \
2101 V(TypeArguments, Object::null_type_arguments()) \
2102 V(TypeParameters, TypeParameters::Handle(d->zone()))
2103
2104#define SERIALIZE_HANDLE_AS_OBJECT(handle, null_handle) \
2105 template <> \
2106 void FlowGraphSerializer::WriteTrait<const handle&>::Write( \
2107 FlowGraphSerializer* s, const handle& x) { \
2108 s->Write<const Object&>(x); \
2109 } \
2110 template <> \
2111 const handle& FlowGraphDeserializer::ReadTrait<const handle&>::Read( \
2112 FlowGraphDeserializer* d) { \
2113 const Object& result = d->Read<const Object&>(); \
2114 if (result.IsNull()) { \
2115 return null_handle; \
2116 } \
2117 return handle::Cast(result); \
2118 }
2119
2121#undef SERIALIZE_HANDLE_AS_OBJECT
2122#undef HANDLES_SERIALIZABLE_AS_OBJECT
2123
2124void OsrEntryInstr::WriteTo(FlowGraphSerializer* s) {
2125 BlockEntryWithInitialDefs::WriteTo(s);
2126}
2127
2128OsrEntryInstr::OsrEntryInstr(FlowGraphDeserializer* d)
2129 : BlockEntryWithInitialDefs(d), graph_entry_(d->graph_entry()) {}
2130
2131void ParallelMoveInstr::WriteExtra(FlowGraphSerializer* s) {
2132 Instruction::WriteExtra(s);
2133 s->Write<GrowableArray<MoveOperands*>>(moves_);
2134 s->Write<const MoveSchedule*>(move_schedule_);
2135}
2136
2137void ParallelMoveInstr::ReadExtra(FlowGraphDeserializer* d) {
2138 Instruction::ReadExtra(d);
2139 moves_ = d->Read<GrowableArray<MoveOperands*>>();
2140 move_schedule_ = d->Read<const MoveSchedule*>();
2141}
2142
2143void ParameterInstr::WriteExtra(FlowGraphSerializer* s) {
2144 TemplateDefinition::WriteExtra(s);
2145 location_.Write(s);
2146}
2147
2148void ParameterInstr::ReadExtra(FlowGraphDeserializer* d) {
2149 TemplateDefinition::ReadExtra(d);
2150 location_ = Location::Read(d);
2151}
2152
2153void PhiInstr::WriteTo(FlowGraphSerializer* s) {
2154 VariadicDefinition::WriteTo(s);
2155 s->Write<Representation>(representation_);
2156 s->Write<bool>(is_alive_);
2157 s->Write<int8_t>(is_receiver_);
2158}
2159
2160PhiInstr::PhiInstr(FlowGraphDeserializer* d)
2161 : VariadicDefinition(d),
2162 block_(d->current_block()->AsJoinEntry()),
2163 representation_(d->Read<Representation>()),
2164 is_alive_(d->Read<bool>()),
2165 is_receiver_(d->Read<int8_t>()) {}
2166
2167void LeafRuntimeCallInstr::WriteTo(FlowGraphSerializer* s) {
2168 VariadicDefinition::WriteTo(s);
2169 s->Write<Representation>(return_representation_);
2170 s->Write<const ZoneGrowableArray<Representation>&>(argument_representations_);
2171}
2172
2173LeafRuntimeCallInstr::LeafRuntimeCallInstr(FlowGraphDeserializer* d)
2174 : VariadicDefinition(d),
2175 return_representation_(d->Read<Representation>()),
2176 argument_representations_(
2177 d->Read<const ZoneGrowableArray<Representation>&>()),
2178 native_calling_convention_(
2179 compiler::ffi::NativeCallingConvention::FromSignature(
2180 d->zone(),
2181 *compiler::ffi::NativeFunctionType::FromRepresentations(
2182 d->zone(),
2183 return_representation_,
2184 argument_representations_))) {}
2185
2186template <>
2188 Range* x) {
2189 if (x == nullptr) {
2190 s->Write<bool>(false);
2191 } else {
2192 s->Write<bool>(true);
2193 x->Write(s);
2194 }
2195}
2196
2197template <>
2200 if (!d->Read<bool>()) {
2201 return nullptr;
2202 }
2203 return new (d->zone()) Range(d);
2204}
2205
2206void Range::Write(FlowGraphSerializer* s) const {
2207 min_.Write(s);
2208 max_.Write(s);
2209}
2210
2213
2215 s->Write<int8_t>(kind_);
2216 s->Write<int64_t>(value_);
2217 s->Write<int64_t>(offset_);
2218}
2219
2221 : kind_(static_cast<Kind>(d->Read<int8_t>())),
2222 value_(d->Read<int64_t>()),
2223 offset_(d->Read<int64_t>()) {}
2224
2225template <>
2227 RecordShape x) {
2228 s->Write<intptr_t>(x.num_fields());
2229 s->Write<const Array&>(
2230 Array::Handle(s->zone(), x.GetFieldNames(s->thread())));
2231}
2232
2233template <>
2236 const intptr_t num_fields = d->Read<intptr_t>();
2237 const auto& field_names = d->Read<const Array&>();
2238 return RecordShape::Register(d->thread(), num_fields, field_names);
2239}
2240
2242 s->Write<uintptr_t>(cpu_registers_.data());
2243 s->Write<uintptr_t>(untagged_cpu_registers_.data());
2244 s->Write<uintptr_t>(fpu_registers_.data());
2245}
2246
2248 : cpu_registers_(d->Read<uintptr_t>()),
2249 untagged_cpu_registers_(d->Read<uintptr_t>()),
2250 fpu_registers_(d->Read<uintptr_t>()) {}
2251
2252template <>
2258
2259template <>
2264
2265template <>
2270
2271template <>
2276
2277template <>
2279 const Slot* x) {
2280 if (x == nullptr) {
2281 s->Write<bool>(false);
2282 return;
2283 }
2284 s->Write<bool>(true);
2285 x->Write(s);
2286}
2287
2288template <>
2291 if (!d->Read<bool>()) {
2292 return nullptr;
2293 }
2294 return &Slot::Read(d);
2295}
2296
2299 static_cast<serializable_type_t<Kind>>(kind_));
2300
2301 switch (kind_) {
2303 s->Write<int8_t>(flags_);
2304 s->Write<intptr_t>(offset_in_bytes_);
2305 break;
2307 s->Write<intptr_t>(offset_in_bytes_);
2308 break;
2310 s->Write<intptr_t>(offset_in_bytes_);
2311 break;
2312 case Kind::kRecordField:
2313 s->Write<intptr_t>(offset_in_bytes_);
2314 break;
2316 s->Write<int8_t>(flags_);
2317 s->Write<intptr_t>(offset_in_bytes_);
2318 s->Write<const String&>(*DataAs<const String>());
2319 type_.Write(s);
2320 break;
2321 case Kind::kDartField:
2322 s->Write<const Field&>(field());
2323 break;
2324 default:
2325 break;
2326 }
2327}
2328
2330 const Kind kind = static_cast<Kind>(d->Read<serializable_type_t<Kind>>());
2331 int8_t flags = 0;
2332 intptr_t offset = -1;
2333 const void* data = nullptr;
2336
2337 switch (kind) {
2339 flags = d->Read<int8_t>();
2340 offset = d->Read<intptr_t>();
2341 data = ":type_arguments";
2342 type = CompileType::FromCid(kTypeArgumentsCid);
2343 break;
2345 flags =
2348 offset = d->Read<intptr_t>();
2349 data = ":argument";
2352 break;
2355 offset = d->Read<intptr_t>();
2356 data = ":array_element";
2358 break;
2359 case Kind::kRecordField:
2361 offset = d->Read<intptr_t>();
2362 data = ":record_field";
2364 break;
2366 flags = d->Read<int8_t>();
2367 offset = d->Read<intptr_t>();
2368 data = &d->Read<const String&>();
2369 type = CompileType(d);
2370 break;
2371 case Kind::kDartField: {
2372 const Field& field = d->Read<const Field&>();
2373 return Slot::Get(field, &d->parsed_function());
2374 }
2375 default:
2376 return Slot::GetNativeSlot(kind);
2377 }
2378
2379 return GetCanonicalSlot(d->thread(), kind, flags, offset, data, type,
2381}
2382
2383template <>
2386 const compiler::TableSelector* x) {
2387#if defined(DART_PRECOMPILER)
2388 ASSERT(x != nullptr);
2389 s->Write<int32_t>(x->id);
2390#else
2391 UNREACHABLE();
2392#endif
2393}
2394
2395template <>
2399#if defined(DART_PRECOMPILER)
2400 const int32_t id = d->Read<int32_t>();
2401 const compiler::TableSelector* selector =
2403 ASSERT(selector != nullptr);
2404 return selector;
2405#else
2406 UNREACHABLE();
2407#endif
2408}
2409
2410template <intptr_t kExtraInputs>
2412 VariadicDefinition::WriteTo(s);
2413 s->Write<intptr_t>(type_args_len_);
2414 s->Write<const Array&>(argument_names_);
2415 s->Write<TokenPosition>(token_pos_);
2416 if (move_arguments_ == nullptr) {
2417 s->Write<intptr_t>(-1);
2418 } else {
2419 s->Write<intptr_t>(move_arguments_->length());
2420 // Write detached MoveArgument instructions.
2421 for (auto move_arg : *move_arguments_) {
2422 if (move_arg->next() == nullptr) {
2423 s->Write<bool>(true);
2424 s->Write<Instruction*>(move_arg);
2425 } else {
2426 s->Write<bool>(false);
2427 }
2428 }
2429 }
2430}
2431
2432template <intptr_t kExtraInputs>
2434 : VariadicDefinition(d),
2435 type_args_len_(d->Read<intptr_t>()),
2436 argument_names_(d->Read<const Array&>()),
2437 token_pos_(d->Read<TokenPosition>()) {
2438 const intptr_t num_move_args = d->Read<intptr_t>();
2439 if (num_move_args >= 0) {
2440 move_arguments_ =
2441 new (d->zone()) MoveArgumentsArray(d->zone(), num_move_args);
2442 move_arguments_->EnsureLength(num_move_args, nullptr);
2443 for (intptr_t i = 0; i < num_move_args; i++) {
2444 if (d->Read<bool>()) {
2445 auto move_arg = d->Read<Instruction*>()->AsMoveArgument();
2446 ASSERT(move_arg != nullptr);
2447 (*move_arguments_)[i] = move_arg;
2448 }
2449 }
2450 }
2451}
2452
2453template <intptr_t kExtraInputs>
2454void TemplateDartCall<kExtraInputs>::WriteExtra(FlowGraphSerializer* s) {
2455 VariadicDefinition::WriteExtra(s);
2456 if (move_arguments_ != nullptr) {
2457 // Write extras for detached MoveArgument in reverse order, because
2458 // we are going to read them back in reverse order.
2459 for (intptr_t i = move_arguments_->length() - 1; i >= 0; --i) {
2460 auto move_arg = move_arguments_->At(i);
2461 if (move_arg->next() == nullptr) {
2462 move_arg->WriteExtra(s);
2463 }
2464 }
2465 }
2466}
2467
2468template <intptr_t kExtraInputs>
2469void TemplateDartCall<kExtraInputs>::ReadExtra(FlowGraphDeserializer* d) {
2470 VariadicDefinition::ReadExtra(d);
2471 if (move_arguments_ != nullptr) {
2472 Instruction* cursor = this;
2473 for (intptr_t i = move_arguments_->length() - 1; i >= 0; --i) {
2474 if ((*move_arguments_)[i] != nullptr) {
2475 (*move_arguments_)[i]->ReadExtra(d);
2476 } else {
2477 // Note: IL might be serialized after ParallelMove instructions
2478 // were inserted between MoveArguments.
2479 do {
2480 cursor = cursor->previous();
2481 } while (!cursor->IsMoveArgument());
2482 (*move_arguments_)[i] = cursor->AsMoveArgument();
2483 }
2484 }
2485 if (env() != nullptr) {
2486 RepairArgumentUsesInEnvironment();
2487 }
2488 }
2489}
2490
2491// Explicit template instantiations, needed for the methods above.
2492template class TemplateDartCall<0>;
2493template class TemplateDartCall<1>;
2494
2495void MoveArgumentInstr::WriteExtra(FlowGraphSerializer* s) {
2496 TemplateDefinition::WriteExtra(s);
2497 location_.Write(s);
2498}
2499
2500void MoveArgumentInstr::ReadExtra(FlowGraphDeserializer* d) {
2501 TemplateDefinition::ReadExtra(d);
2502 location_ = Location::Read(d);
2503}
2504
2505template <>
2508 TokenPosition x) {
2509 s->Write<int32_t>(x.Serialize());
2510}
2511
2512template <>
2517
2518template <>
2520 uint8_t x) {
2521 s->stream()->Write<uint8_t>(x);
2522}
2523
2524template <>
2527 return d->stream()->Read<uint8_t>();
2528}
2529
2530template <>
2532 uint16_t x) {
2533 s->stream()->Write<uint16_t>(x);
2534}
2535
2536template <>
2539 return d->stream()->Read<uint16_t>();
2540}
2541
2542template <>
2544 uint32_t x) {
2545 s->stream()->Write<int32_t>(static_cast<int32_t>(x));
2546}
2547
2548template <>
2551 return static_cast<uint32_t>(d->stream()->Read<int32_t>());
2552}
2553
2554template <>
2556 uint64_t x) {
2557 s->stream()->Write<int64_t>(static_cast<int64_t>(x));
2558}
2559
2560template <>
2563 return static_cast<uint64_t>(d->stream()->Read<int64_t>());
2564}
2565
2566void UnboxedConstantInstr::WriteTo(FlowGraphSerializer* s) {
2567 ConstantInstr::WriteTo(s);
2568 s->Write<Representation>(representation_);
2569 // constant_address_ is not written - it is restored when reading.
2570}
2571
2573 : ConstantInstr(d),
2574 representation_(d->Read<Representation>()),
2575 constant_address_(0) {
2576 if (representation_ == kUnboxedDouble) {
2577 ASSERT(value().IsDouble());
2578 constant_address_ = FindDoubleConstant(Double::Cast(value()).value());
2579 }
2580}
2581
2582template <>
2584 Value* x) {
2585 ASSERT(s->can_write_refs());
2586 CompileType* reaching_type = x->reaching_type();
2587 Definition* def = x->definition();
2588 // Omit reaching type if it is the same as definition type.
2589 if ((reaching_type != nullptr) && def->HasType() &&
2590 (reaching_type == def->Type())) {
2591 reaching_type = nullptr;
2592 }
2593 s->Write<CompileType*>(reaching_type);
2594 s->WriteRef<Definition*>(def);
2595}
2596
2597template <>
2600 CompileType* type = d->Read<CompileType*>();
2601 Definition* def = d->ReadRef<Definition*>();
2602 Value* value = new (d->zone()) Value(def);
2603 value->SetReachingType(type);
2604 return value;
2605}
2606
2607void VariadicDefinition::WriteTo(FlowGraphSerializer* s) {
2608 Definition::WriteTo(s);
2609 s->Write<intptr_t>(inputs_.length());
2610}
2611
2613 : Definition(d), inputs_(d->zone(), 0) {
2614 const intptr_t num_inputs = d->Read<intptr_t>();
2615 inputs_.EnsureLength(num_inputs, nullptr);
2616}
2617
2618} // namespace dart
int count
static float prev(float f)
SI F min_(F x, F y)
SI F max_(F x, F y)
#define UNREACHABLE()
Definition assert.h:248
#define Z
void Write(FlowGraphSerializer *s) const
AliasIdentity(const AliasIdentity &other)
Definition il.h:2439
static ArrayPtr New(intptr_t len, Heap::Space space=Heap::kNew)
Definition object.h:10933
void Add(const T &value)
intptr_t length() const
void WriteBytes(const void *addr, intptr_t len)
Definition datastream.h:424
static constexpr S encode(T value)
Definition bitfield.h:167
void Add(intptr_t i)
Definition bit_vector.h:63
void Read(ReadStream *stream)
Definition bitmap.cc:161
BlockEntryInstr(intptr_t block_id, intptr_t try_index, intptr_t deopt_id, intptr_t stack_depth)
Definition il.h:1776
BlockEntryWithInitialDefs(intptr_t block_id, intptr_t try_index, intptr_t deopt_id, intptr_t stack_depth)
Definition il.h:1905
static const Bool & Get(bool value)
Definition object.h:10780
CatchBlockEntryInstr(bool is_generated, intptr_t block_id, intptr_t try_index, GraphEntryInstr *graph_entry, const Array &handler_types, intptr_t catch_try_index, bool needs_stacktrace, intptr_t deopt_id, const LocalVariable *exception_var, const LocalVariable *stacktrace_var, const LocalVariable *raw_exception_var, const LocalVariable *raw_stacktrace_var)
Definition il.h:2310
GrowableArray< CidRange * > cid_ranges_
Definition il.h:768
void Add(CidRange *target)
Definition il.h:746
ClassPtr At(intptr_t cid) const
UnboxedFieldBitmap GetUnboxedFieldsMapAt(intptr_t cid) const
FunctionPtr GetInvocationDispatcher(const String &target_name, const Array &args_desc, UntaggedFunction::Kind kind, bool create_if_absent) const
Definition object.cc:3897
static intptr_t FindClosureIndex(const Function &needle)
static FunctionPtr ClosureFunctionFromIndex(intptr_t idx)
static ClosurePtr New(const TypeArguments &instantiator_type_arguments, const TypeArguments &function_type_arguments, const Function &function, const Object &context, Heap::Space space=Heap::kNew)
Definition object.cc:26021
static CompileType FromCid(intptr_t cid)
static constexpr bool kCannotBeSentinel
CompileType(bool can_be_null, bool can_be_sentinel, intptr_t cid, const AbstractType *type)
static constexpr bool kCannotBeNull
static CompileType Dynamic()
void Write(FlowGraphSerializer *s) const
static CompileType None()
static ConstMapPtr NewUninitialized(Heap::Space space=Heap::kNew)
Definition object.cc:25320
static ConstSetPtr NewUninitialized(Heap::Space space=Heap::kNew)
Definition object.cc:25368
CompileType * Type()
Definition il.h:2503
Range * range_
Definition il.h:2674
Definition(intptr_t deopt_id=DeoptId::kNone)
Definition il.h:2470
bool HasType() const
Definition il.h:2512
static DoublePtr NewCanonical(double d)
Definition object.cc:23497
void Write(FlowGraphSerializer *s) const
Environment(FlowGraphDeserializer *d)
FlowGraphDeserializer(const ParsedFunction &parsed_function, ReadStream *stream)
IsolateGroup * isolate_group() const
const ParsedFunction & parsed_function() const
IsolateGroup * isolate_group() const
FlowGraphSerializer(NonStreamingWriteStream *stream)
void WriteFlowGraph(const FlowGraph &flow_graph, const ZoneGrowableArray< Definition * > &detached_defs)
const GrowableArray< BlockEntryInstr * > & reverse_postorder() const
Definition flow_graph.h:207
PrologueInfo prologue_info() const
Definition flow_graph.h:427
void set_coverage_array(const Array &array)
Definition flow_graph.h:553
intptr_t current_ssa_temp_index() const
Definition flow_graph.h:243
intptr_t inlining_id() const
Definition flow_graph.h:464
void set_current_ssa_temp_index(intptr_t index)
Definition flow_graph.h:244
void CreateCommonConstants()
intptr_t max_block_id() const
Definition flow_graph.h:264
const GrowableArray< BlockEntryInstr * > & optimized_block_order() const
Definition flow_graph.h:210
void set_inlining_id(intptr_t value)
Definition flow_graph.h:465
const Array & coverage_array() const
Definition flow_graph.h:552
BitVector * captured_parameters() const
Definition flow_graph.h:462
bool is_licm_allowed() const
Definition flow_graph.h:404
GrowableArray< BlockEntryInstr * > * CodegenBlockOrder()
void disallow_licm()
Definition flow_graph.h:408
FunctionEntryInstr(GraphEntryInstr *graph_entry, intptr_t block_id, intptr_t try_index, intptr_t deopt_id)
Definition il.h:2177
static FunctionTypePtr New(intptr_t num_parent_type_arguments=0, Nullability nullability=Nullability::kLegacy, Heap::Space space=Heap::kOld)
Definition object.cc:11682
static StringPtr CreateDynamicInvocationForwarderName(const String &name)
Definition object.cc:4255
GraphEntryInstr(const ParsedFunction &parsed_function, intptr_t osr_id)
Definition il.cc:1215
@ kNew
Definition heap.h:38
@ kOld
Definition heap.h:39
void ResetObjectIdTable()
Definition heap.cc:888
intptr_t GetObjectId(ObjectPtr raw_obj) const
Definition heap.h:197
void SetObjectId(ObjectPtr raw_obj, intptr_t object_id)
Definition heap.h:193
static ICDataPtr NewForStaticCall(const Function &owner, const Function &target, const Array &arguments_descriptor, intptr_t deopt_id, intptr_t num_args_tested, RebindRule rebind_rule)
Definition object.cc:17448
IndirectGotoInstr(intptr_t target_count, Value *target_index)
Definition il.h:3780
static intptr_t NextFieldOffset()
Definition object.h:8326
static InstancePtr New(const Class &cls, Heap::Space space=Heap::kNew)
Definition object.cc:20976
Instruction(const InstructionSource &source, intptr_t deopt_id=DeoptId::kNone)
Definition il.h:974
void ReadExtraWithoutInputs(FlowGraphDeserializer *d)
void WriteExtraWithoutInputs(FlowGraphSerializer *s)
static IntegerPtr NewCanonical(const String &str)
Definition object.cc:23078
ClassTable * class_table() const
Definition isolate.h:491
static const LinkedHashBase & Cast(const Object &obj)
Definition object.h:11984
void Write(FlowGraphSerializer *s) const
LocationSummary(Zone *zone, intptr_t input_count, intptr_t temp_count, LocationSummary::ContainsCall contains_call)
Definition locations.cc:170
static Location Pair(Location first, Location second)
Definition locations.cc:271
void Write(FlowGraphSerializer *s) const
static Location Read(FlowGraphDeserializer *d)
static Location Constant(const ConstantInstr *obj, int pair_index=0)
Definition locations.h:294
Location src() const
Definition il.h:1534
void Write(FlowGraphSerializer *s) const
Location dest() const
Definition il.h:1535
MoveOperands(Location dest, Location src)
Definition il.h:1524
static ObjectPtr null()
Definition object.h:433
ObjectPtr ptr() const
Definition object.h:332
static constexpr bool ContainsCompressedPointers()
Definition object.h:329
static Object & Handle()
Definition object.h:407
static Object & ZoneHandle()
Definition object.h:419
OsrEntryInstr(GraphEntryInstr *graph_entry, intptr_t block_id, intptr_t try_index, intptr_t deopt_id, intptr_t stack_depth)
Definition il.h:2250
static Precompiler * Instance()
compiler::SelectorMap * selector_map()
void Write(FlowGraphSerializer *s) const
void ReadBytes(void *addr, intptr_t len)
Definition datastream.h:90
static RecordShape Register(Thread *thread, intptr_t num_fields, const Array &field_names)
Definition object.cc:27980
static RecordTypePtr New(RecordShape shape, const Array &field_types, Nullability nullability=Nullability::kLegacy, Heap::Space space=Heap::kOld)
Definition object.cc:27541
static RecordPtr New(RecordShape shape, Heap::Space space=Heap::kNew)
Definition object.cc:27823
void Write(FlowGraphSerializer *s) const
Kind kind() const
Definition slot.h:502
static const Slot & Get(const Field &field, const ParsedFunction *parsed_function)
Definition slot.cc:351
const Field & field() const
Definition slot.h:540
Representation representation() const
Definition slot.h:519
void Write(FlowGraphSerializer *s) const
static const Slot & Read(FlowGraphDeserializer *d)
CompileType type() const
Definition slot.h:538
uintptr_t data() const
Definition locations.h:658
static SmiPtr New(intptr_t value)
Definition object.h:9985
static StaticTypeExactnessState Decode(int8_t value)
static const Code & EntryAt(intptr_t index)
Definition stub_code.h:101
static intptr_t NumEntries()
Definition stub_code.h:107
static StringPtr FromUTF16(Thread *thread, const uint16_t *utf16_array, intptr_t len)
Definition symbols.cc:229
static StringPtr FromLatin1(Thread *thread, const uint8_t *latin1_array, intptr_t len)
Definition symbols.cc:223
TemplateDartCall(intptr_t deopt_id, intptr_t type_args_len, const Array &argument_names, InputsArray &&inputs, const InstructionSource &source)
Definition il.h:4532
static TokenPosition Deserialize(int32_t value)
static TypeArgumentsPtr New(intptr_t len, Heap::Space space=Heap::kOld)
Definition object.cc:7733
static TypeParametersPtr New(Heap::Space space=Heap::kOld)
Definition object.cc:6778
static TypePtr New(const Class &clazz, const TypeArguments &arguments, Nullability nullability=Nullability::kLegacy, Heap::Space space=Heap::kOld)
Definition object.cc:22492
UnboxedConstantInstr(const Object &value, Representation representation)
Definition il.cc:1169
VariadicDefinition(InputsArray &&inputs, intptr_t deopt_id=DeoptId::kNone)
Definition il.h:2755
intptr_t InputCount() const
Definition il.h:2776
const TableSelector * GetSelector(const Function &interface_target) const
#define UNIMPLEMENTED
#define ASSERT(E)
VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE auto & d
Definition main.cc:19
VkInstance instance
Definition main.cc:48
struct MyStruct s
#define FATAL(error)
FlutterSemanticsFlag flags
const uint8_t uint32_t uint32_t GError ** error
uint8_t value
GAsyncResult * result
uint32_t * target
Dart_NativeFunction function
Definition fuchsia.cc:51
#define FOR_EACH_INSTRUCTION(M)
Definition il.h:405
#define SERIALIZABLE_AS_BLOCK_ENTRY(name, type)
#define MATCH(member, name)
#define UNIMPLEMENTED_FUNCTION_KIND(kind)
#define INSTRUCTIONS_SERIALIZABLE_AS_INSTRUCTION(V)
#define INSTRUCTION_REFS_SERIALIZABLE_AS_BLOCK_ENTRY(V)
#define SERIALIZABLE_AS_INSTRUCTION(name, type)
#define HANDLES_SERIALIZABLE_AS_OBJECT(V)
#define READ_INSTRUCTION(type, attrs)
#define SERIALIZE_HANDLE_AS_OBJECT(handle, null_handle)
size_t length
double x
SK_API bool Read(SkStreamSeekable *src, SkDocumentPage *dstArray, int dstArrayCount, const SkDeserialProcs *=nullptr)
FunctionPtr NativeCallbackFunction(const FunctionType &c_signature, const Function &dart_target, const Instance &exceptional_return, FfiCallbackKind kind)
Definition callback.cc:36
const char *const name
uword FindDoubleConstant(double value)
Nullability
Definition object.h:1112
FfiCallbackKind
Definition object.h:2964
int32_t classid_t
Definition globals.h:524
@ kIllegalCid
Definition class_id.h:214
@ kNullCid
Definition class_id.h:252
@ kNumPredefinedCids
Definition class_id.h:257
@ kDynamicCid
Definition class_id.h:253
Representation
Definition locations.h:66
uintptr_t uword
Definition globals.h:501
const intptr_t cid
static constexpr intptr_t kCompressedWordSize
Definition globals.h:42
ZoneGrowableArray< MoveArgumentInstr * > MoveArgumentsArray
Definition il.h:896
static bool IsConstant(Definition *def, int64_t *val)
Definition loops.cc:123
@ kFunctions
Definition object.h:2231
typename unwrap_enum< std::remove_cv_t< T >, std::is_enum< T >::value >::type serializable_type_t
Definition il.h:633
static int8_t data[kExtLength]
Definition __init__.py:1
std::function< void()> closure
Definition closure.h:14
SI auto map(std::index_sequence< I... >, Fn &&fn, const Args &... args) -> skvx::Vec< sizeof...(I), decltype(fn(args[0]...))>
Definition SkVx.h:680
#define OBJECT_STORE_STUB_CODE_LIST(DO)
#define FOR_EACH_RAW_FUNCTION_KIND(V)
Point offset
intptr_t cid_start
Definition il.h:220
intptr_t cid_end
Definition il.h:221
BlockEntryInstr * ReadRef(FlowGraphDeserializer *d)
const AbstractType * Read(FlowGraphDeserializer *d)
void WriteRef(FlowGraphSerializer *s, BlockEntryInstr *x)
void Write(FlowGraphSerializer *s, const AbstractType *x)
intptr_t max_block_id
Definition flow_graph.h:103
intptr_t min_block_id
Definition flow_graph.h:98
const Function * target
Definition il.h:721
StaticTypeExactnessState exactness
Definition il.h:723
intptr_t count
Definition il.h:722
const uintptr_t id