13 trace_write_barrier_elimination,
15 "Trace WriteBarrierElimination pass.");
81 void IndexDefinitions(
Zone* zone);
98 return def->IsPhi() || (def->IsAllocation() &&
99 def->AsAllocation()->WillAllocateNewOrRemembered());
103 static bool SlotEligibleForWBE(
const Slot& slot);
110 intptr_t definition_count_ = 0;
117 BitVector* large_array_allocations_mask_;
138 bool tracing_ =
false;
140 static constexpr bool tracing_ =
false;
146 : flow_graph_(flow_graph), block_order_(&flow_graph->postorder()) {
148 if (flow_graph->
should_print() && FLAG_trace_write_barrier_elimination) {
153 IndexDefinitions(zone);
155 for (intptr_t
i = 0;
i < block_order_->length(); ++
i) {
156 usable_allocs_in_.Add(
new (zone)
BitVector(zone, definition_count_));
157 usable_allocs_in_[
i]->CopyFrom(vector_);
159 usable_allocs_out_.Add(
new (zone)
BitVector(zone, definition_count_));
160 usable_allocs_out_[
i]->CopyFrom(vector_);
163 processed_blocks_ =
new (zone)
BitVector(zone, block_order_->length());
167 for (intptr_t
i = 0;
i < block_order_->length(); ++
i) {
168 worklist_.Add(block_order_->At(
i));
171 while (!worklist_.is_empty()) {
172 auto*
const entry = worklist_.RemoveLast();
173 if (AnalyzeBlock(entry)) {
174 for (intptr_t
i = 0;
i < entry->last_instruction()->SuccessorCount();
177 THR_Print(
"Enqueueing block %" Pd "\n", entry->block_id());
179 worklist_.Add(entry->last_instruction()->SuccessorAt(
i));
186 for (intptr_t
i = 0;
i < block_order_->length(); ++
i) {
188 UpdateVectorForBlock(block_order_->At(
i),
true);
193 if (
auto create_array = defn->AsCreateArray()) {
196 "Invariant restoration code does not handle card marking.");
199 return create_array->GetConstantNumElements() >
205void WriteBarrierElimination::IndexDefinitions(Zone* zone) {
206 BitmapBuilder large_array_allocations;
208 GrowableArray<Definition*> create_array_worklist;
210 for (intptr_t
i = 0;
i < block_order_->length(); ++
i) {
211 BlockEntryInstr*
const block = block_order_->At(
i);
212 if (
auto join_block = block->AsJoinEntry()) {
213 for (PhiIterator it(join_block); !it.Done(); it.Advance()) {
214 large_array_allocations.Set(definition_count_,
false);
215 definition_indices_.
Insert({it.Current(), definition_count_++});
219 it.Current()->ssa_temp_index(), definition_count_ - 1);
224 for (ForwardInstructionIterator it(block); !it.Done(); it.Advance()) {
225 if (Definition* current = it.Current()->AsDefinition()) {
226 if (IsUsable(current)) {
228 large_array_allocations.Set(definition_count_, is_create_large_array);
229 definition_indices_.
Insert({current, definition_count_++});
230 if (is_create_large_array) {
231 create_array_worklist.Add(current);
236 current->ssa_temp_index(), definition_count_ - 1);
244 while (!create_array_worklist.is_empty()) {
245 auto instr = create_array_worklist.RemoveLast();
246 for (Value::Iterator it(instr->input_use_list()); !it.Done();
248 if (
auto phi_use = it.Current()->instruction()->AsPhi()) {
249 const intptr_t index = Index(phi_use);
250 if (!large_array_allocations.Get(index)) {
251 large_array_allocations.Set(index,
253 create_array_worklist.Add(phi_use);
259 vector_ =
new (zone) BitVector(zone, definition_count_);
261 large_array_allocations_mask_ =
new (zone) BitVector(zone, definition_count_);
262 for (intptr_t
i = 0;
i < definition_count_; ++
i) {
263 if (!large_array_allocations.Get(
i)) large_array_allocations_mask_->
Add(
i);
267void WriteBarrierElimination::MergePredecessors(BlockEntryInstr* entry) {
269 for (intptr_t
i = 0;
i < entry->PredecessorCount(); ++
i) {
270 BitVector* predecessor_set =
271 usable_allocs_out_[Index(entry->PredecessorAt(
i))];
273 vector_->
AddAll(predecessor_set);
279 if (JoinEntryInstr*
join = entry->AsJoinEntry()) {
281 for (PhiIterator it(
join); !it.Done(); it.Advance()) {
282 PhiInstr* phi = it.Current();
283 ASSERT(phi->InputCount() == entry->PredecessorCount());
284 bool is_usable =
true;
285 for (intptr_t
i = 0;
i < phi->InputCount(); ++
i) {
286 BitVector*
const predecessor_set =
287 usable_allocs_out_[Index(entry->PredecessorAt(
i))];
288 Definition*
const origin = phi->InputAt(
i)->definition();
289 if (!IsUsable(origin) || !predecessor_set->Contains(Index(origin))) {
294 vector_->
Set(Index(phi), is_usable);
299 THR_Print(
"Merge predecessors for %" Pd ".\n", entry->block_id());
300 for (PhiIterator it(
join); !it.Done(); it.Advance()) {
301 PhiInstr* phi = it.Current();
303 vector_->
Contains(Index(phi)) ?
"true" :
"false");
310bool WriteBarrierElimination::AnalyzeBlock(BlockEntryInstr* entry) {
312 MergePredecessors(entry);
315 BitVector*
const in_set = usable_allocs_in_[Index(entry)];
317 if (vector_->
Equals(*in_set) && processed_blocks_->
Contains(Index(entry))) {
319 THR_Print(
"Bailout of block %" Pd ": inputs didn't change.\n",
323 }
else if (tracing_) {
324 THR_Print(
"Inputs of block %" Pd " changed: ", entry->block_id());
331 usable_allocs_in_[Index(entry)]->CopyFrom(vector_);
332 UpdateVectorForBlock(entry,
false);
334 processed_blocks_->
Add(Index(entry));
337 if (vector_->
Equals(*usable_allocs_out_[Index(entry)])) {
339 THR_Print(
"Bailout of block %" Pd ": out-set didn't change.\n",
345 BitVector*
const out_set = usable_allocs_out_[Index(entry)];
347 out_set->CopyFrom(vector_);
349 THR_Print(
"Block %" Pd " changed.\n", entry->block_id());
355bool WriteBarrierElimination::SlotEligibleForWBE(
const Slot& slot) {
360 switch (slot.kind()) {
366#define TAGGED_NATIVE_DART_SLOT_CASE(class, underlying_type, field, __, ___) \
367 case Slot::Kind::k##class##_##field: \
368 return std::is_base_of<UntaggedInstance, underlying_type>::value || \
369 std::is_base_of<UntaggedContext, underlying_type>::value || \
370 std::is_base_of<UntaggedUnhandledException, \
371 underlying_type>::value;
374#undef TAGGED_NATIVE_DART_SLOT_CASE
376#define OTHER_NATIVE_SLOT_CASE(class, __, field, ___, ____) \
377 case Slot::Kind::k##class##_##field:
381#undef OTHER_NATIVE_SLOT_CASE
390void WriteBarrierElimination::UpdateVectorForBlock(BlockEntryInstr* entry,
392 for (ForwardInstructionIterator it(entry); !it.Done(); it.Advance()) {
393 Instruction*
const current = it.Current();
396 if (StoreFieldInstr* instr = current->AsStoreField()) {
397 Definition*
const container = instr->instance()->definition();
398 if (IsUsable(container) && vector_->
Contains(Index(container))) {
402 }
else if (StoreIndexedInstr* instr = current->AsStoreIndexed()) {
403 Definition*
const array = instr->array()->definition();
404 if (IsUsable(array) && vector_->
Contains(Index(array))) {
410 if (current->CanCallDart()) {
412 }
else if (current->CanTriggerGC()) {
415 vector_->
Intersect(large_array_allocations_mask_);
418 if (AllocationInstr*
const alloc = current->AsAllocation()) {
419 if (alloc->WillAllocateNewOrRemembered()) {
420 vector_->
Add(Index(alloc));
#define DEBUG_ASSERT(cond)
static constexpr intptr_t kMaxLengthForWriteBarrierElimination
static constexpr bool UseCardMarkingForAllocation(const intptr_t array_length)
KeyValueTrait::Value LookupValue(typename KeyValueTrait::Key key) const
void Insert(typename KeyValueTrait::Pair kv)
void Set(intptr_t i, bool value)
bool Equals(const BitVector &other) const
bool SubsetOf(const BitVector &other)
bool Contains(intptr_t i) const
bool AddAll(const BitVector *from)
void Intersect(const BitVector *other)
void CopyFrom(const BitVector *other)
intptr_t postorder_number() const
static bool IsKeyEqual(Pair kv, Key key)
static Key KeyOf(Pair kv)
static Value ValueOf(Pair kv)
static uword Hash(Key key)
bool should_print() const
static Thread * Current()
static uint32_t WordHash(intptr_t key)
WriteBarrierElimination(Zone *zone, FlowGraph *flow_graph)
#define THR_Print(format,...)
void EliminateWriteBarriers(FlowGraph *flow_graph)
DirectChainedHashMap< DefinitionIndexPairTrait > DefinitionIndexMap
DEFINE_FLAG(bool, print_cluster_information, false, "Print information about clusters written to snapshot")
static bool IsCreateLargeArray(Definition *defn)
static SkString join(const CommandLineFlags::StringArray &)
#define TAGGED_NATIVE_DART_SLOTS_LIST(V)
#define NOT_TAGGED_NATIVE_DART_SLOTS_LIST(V)
Pair(Definition *definition, intptr_t index)