3077 {
3079 BitVector* all_places =
3081 all_places->SetAll();
3082
3083 BitVector* all_aliased_places =
3086 const auto& places = aliased_set_->
places();
3087
3088
3089
3090
3091 for (intptr_t i = 0; i < places.length(); i++) {
3092 Place* place = places[i];
3093 if (place->DependsOnInstance()) {
3094 Definition*
instance = place->instance();
3096 !
instance->Identity().IsAliased()) {
3097 continue;
3098 }
3099 }
3100 all_aliased_places->Add(i);
3101 }
3102 }
3103
3105 !block_it.
Done(); block_it.Advance()) {
3106 BlockEntryInstr* block = block_it.Current();
3107 const intptr_t postorder_number = block->postorder_number();
3108
3109 BitVector* kill =
kill_[postorder_number];
3110 BitVector* live_in =
live_in_[postorder_number];
3111 BitVector* live_out =
live_out_[postorder_number];
3112
3113 ZoneGrowableArray<Instruction*>* exposed_stores = nullptr;
3114
3115
3116 for (BackwardInstructionIterator instr_it(block); !instr_it.Done();
3117 instr_it.Advance()) {
3118 Instruction* instr = instr_it.Current();
3119
3120 bool is_load = false;
3121 bool is_store = false;
3122 Place place(instr, &is_load, &is_store);
3123 if (place.IsImmutableField()) {
3124
3125 continue;
3126 }
3127
3128
3129 if (is_store) {
3132 CanEliminateStore(instr)) {
3133 if (FLAG_trace_optimization && graph_->
should_print()) {
3134 THR_Print(
"Removing dead store to place %" Pd " in block B%" Pd
3135 "\n",
3137 }
3138 instr_it.RemoveCurrentFromGraph();
3139 }
3140 }
else if (!live_in->Contains(
GetPlaceId(instr))) {
3141
3142
3143 if (exposed_stores == nullptr) {
3144 const intptr_t kMaxExposedStoresInitialSize = 5;
3145 exposed_stores =
new (
zone) ZoneGrowableArray<Instruction*>(
3148 }
3149 exposed_stores->Add(instr);
3150 }
3151
3154 continue;
3155 }
3156
3157 if (instr->IsThrow() || instr->IsReThrow() || instr->IsReturnBase()) {
3158
3159
3160 live_out->CopyFrom(all_places);
3161 }
3162
3163
3165 if (instr->HasUnknownSideEffects() || instr->IsReturnBase()) {
3166
3167
3168 live_in->CopyFrom(all_places);
3169 continue;
3170 } else if (instr->MayThrow()) {
3172
3173
3174 live_in->AddAll(all_aliased_places);
3175 } else {
3176
3177
3178 live_in->CopyFrom(all_places);
3179 }
3180 continue;
3181 }
3182 } else {
3183
3184
3185
3186
3187
3188
3189
3190
3191 if (instr->HasUnknownSideEffects() || instr->CanDeoptimize() ||
3192 instr->MayThrow() || instr->IsReturnBase()) {
3193
3194
3195
3196 live_in->CopyFrom(all_places);
3197 continue;
3198 }
3199 }
3200
3201
3202 Definition* defn = instr->AsDefinition();
3204 const intptr_t alias = aliased_set_->
LookupAliasId(place.ToAlias());
3206 continue;
3207 }
3208 }
3209 exposed_stores_[postorder_number] = exposed_stores;
3210 }
3211 if (FLAG_trace_load_optimization && graph_->
should_print()) {
3214 }
3215 }
intptr_t max_place_id() const
intptr_t LookupAliasId(const Place &alias)
BitVector * GetKilledSet(intptr_t alias)
const ZoneGrowableArray< Place * > & places() const
static CompilerState & Current()
bool should_print() const
BlockIterator postorder_iterator() const
GrowableArray< BitVector * > kill_
GrowableArray< BitVector * > live_out_
GrowableArray< BitVector * > live_in_
static bool IsAllocation(Definition *defn)
static T Minimum(T x, T y)
#define THR_Print(format,...)
static DART_FORCE_INLINE intptr_t GetPlaceId(const Instruction *instr)
static bool IsLoadEliminationCandidate(Instruction *instr)
static constexpr intptr_t kInvalidTryIndex