Flutter Engine
The Flutter Engine
Public Member Functions | Static Public Member Functions | Private Member Functions | List of all members
dart::StoreOptimizer Class Reference
Inheritance diagram for dart::StoreOptimizer:
dart::LivenessAnalysis dart::ValueObject

Public Member Functions

 StoreOptimizer (FlowGraph *graph, AliasedSet *aliased_set, PointerSet< Place > *map)
 
- Public Member Functions inherited from dart::LivenessAnalysis
 LivenessAnalysis (intptr_t variable_count, const GrowableArray< BlockEntryInstr * > &postorder)
 
void Analyze ()
 
virtual ~LivenessAnalysis ()
 
BitVectorGetLiveInSetAt (intptr_t postorder_number) const
 
BitVectorGetLiveOutSetAt (intptr_t postorder_number) const
 
BitVectorGetLiveInSet (BlockEntryInstr *block) const
 
BitVectorGetKillSet (BlockEntryInstr *block) const
 
BitVectorGetLiveOutSet (BlockEntryInstr *block) const
 
void Dump ()
 
- Public Member Functions inherited from dart::ValueObject
 ValueObject ()
 
 ~ValueObject ()
 

Static Public Member Functions

static void OptimizeGraph (FlowGraph *graph)
 

Private Member Functions

virtual void ComputeInitialSets ()
 

Additional Inherited Members

- Protected Member Functions inherited from dart::LivenessAnalysis
virtual void ComputeInitialSets ()=0
 
bool UpdateLiveOut (const BlockEntryInstr &instr)
 
bool UpdateLiveIn (const BlockEntryInstr &instr)
 
void ComputeLiveInAndLiveOutSets ()
 
Zonezone () const
 
- Protected Attributes inherited from dart::LivenessAnalysis
Zonezone_
 
const intptr_t variable_count_
 
const GrowableArray< BlockEntryInstr * > & postorder_
 
GrowableArray< BitVector * > live_out_
 
GrowableArray< BitVector * > kill_
 
GrowableArray< BitVector * > live_in_
 

Detailed Description

Definition at line 3012 of file redundancy_elimination.cc.

Constructor & Destructor Documentation

◆ StoreOptimizer()

dart::StoreOptimizer::StoreOptimizer ( FlowGraph graph,
AliasedSet aliased_set,
PointerSet< Place > *  map 
)
inline

Definition at line 3014 of file redundancy_elimination.cc.

3017 : LivenessAnalysis(aliased_set->max_place_id(), graph->postorder()),
3018 graph_(graph),
3019 map_(map),
3020 aliased_set_(aliased_set),
3021 exposed_stores_(graph_->postorder().length()) {
3022 const intptr_t num_blocks = graph_->postorder().length();
3023 for (intptr_t i = 0; i < num_blocks; i++) {
3024 exposed_stores_.Add(nullptr);
3025 }
3026 }
const GrowableArray< BlockEntryInstr * > & postorder() const
Definition: flow_graph.h:204
LivenessAnalysis(intptr_t variable_count, const GrowableArray< BlockEntryInstr * > &postorder)
Definition: flow_graph.cc:679
SI auto map(std::index_sequence< I... >, Fn &&fn, const Args &... args) -> skvx::Vec< sizeof...(I), decltype(fn(args[0]...))>
Definition: SkVx.h:680

Member Function Documentation

◆ ComputeInitialSets()

virtual void dart::StoreOptimizer::ComputeInitialSets ( )
inlineprivatevirtual

Implements dart::LivenessAnalysis.

Definition at line 3077 of file redundancy_elimination.cc.

3077 {
3078 Zone* zone = graph_->zone();
3079 BitVector* all_places =
3080 new (zone) BitVector(zone, aliased_set_->max_place_id());
3081 all_places->SetAll();
3082
3083 BitVector* all_aliased_places = nullptr;
3085 all_aliased_places =
3086 new (zone) BitVector(zone, aliased_set_->max_place_id());
3087 }
3088 const auto& places = aliased_set_->places();
3089 for (intptr_t i = 0; i < places.length(); i++) {
3090 Place* place = places[i];
3091 if (place->DependsOnInstance()) {
3092 Definition* instance = place->instance();
3093 // Find escaping places by inspecting definition allocation
3094 // [AliasIdentity] field, which is populated above by
3095 // [AliasedSet::ComputeAliasing].
3096 if ((all_aliased_places != nullptr) &&
3098 instance->Identity().IsAliased())) {
3099 all_aliased_places->Add(i);
3100 }
3101 if (instance != nullptr) {
3102 // Avoid incorrect propagation of "dead" state beyond definition
3103 // of the instance. Otherwise it may eventually reach stores into
3104 // this place via loop backedge.
3105 live_in_[instance->GetBlock()->postorder_number()]->Add(i);
3106 }
3107 } else {
3108 if (all_aliased_places != nullptr) {
3109 all_aliased_places->Add(i);
3110 }
3111 }
3112 if (place->kind() == Place::kIndexed) {
3113 Definition* index = place->index();
3114 if (index != nullptr) {
3115 // Avoid incorrect propagation of "dead" state beyond definition
3116 // of the index. Otherwise it may eventually reach stores into
3117 // this place via loop backedge.
3118 live_in_[index->GetBlock()->postorder_number()]->Add(i);
3119 }
3120 }
3121 }
3122
3123 for (BlockIterator block_it = graph_->postorder_iterator();
3124 !block_it.Done(); block_it.Advance()) {
3125 BlockEntryInstr* block = block_it.Current();
3126 const intptr_t postorder_number = block->postorder_number();
3127
3128 BitVector* kill = kill_[postorder_number];
3129 BitVector* live_in = live_in_[postorder_number];
3130 BitVector* live_out = live_out_[postorder_number];
3131
3132 ZoneGrowableArray<Instruction*>* exposed_stores = nullptr;
3133
3134 // Iterate backwards starting at the last instruction.
3135 for (BackwardInstructionIterator instr_it(block); !instr_it.Done();
3136 instr_it.Advance()) {
3137 Instruction* instr = instr_it.Current();
3138
3139 bool is_load = false;
3140 bool is_store = false;
3141 Place place(instr, &is_load, &is_store);
3142 if (place.IsImmutableField()) {
3143 // Loads/stores of final fields do not participate.
3144 continue;
3145 }
3146
3147 // Handle stores.
3148 if (is_store) {
3149 if (kill->Contains(GetPlaceId(instr))) {
3150 if (!live_in->Contains(GetPlaceId(instr)) &&
3151 CanEliminateStore(instr)) {
3152 if (FLAG_trace_optimization && graph_->should_print()) {
3153 THR_Print("Removing dead store to place %" Pd " in block B%" Pd
3154 "\n",
3155 GetPlaceId(instr), block->block_id());
3156 }
3157 instr_it.RemoveCurrentFromGraph();
3158 }
3159 } else if (!live_in->Contains(GetPlaceId(instr))) {
3160 // Mark this store as down-ward exposed: They are the only
3161 // candidates for the global store elimination.
3162 if (exposed_stores == nullptr) {
3163 const intptr_t kMaxExposedStoresInitialSize = 5;
3164 exposed_stores = new (zone) ZoneGrowableArray<Instruction*>(
3165 Utils::Minimum(kMaxExposedStoresInitialSize,
3166 aliased_set_->max_place_id()));
3167 }
3168 exposed_stores->Add(instr);
3169 }
3170 // Interfering stores kill only loads from the same place.
3171 kill->Add(GetPlaceId(instr));
3172 live_in->Remove(GetPlaceId(instr));
3173 continue;
3174 }
3175
3176 if (instr->IsThrow() || instr->IsReThrow() || instr->IsReturnBase()) {
3177 // Initialize live-out for exit blocks since it won't be computed
3178 // otherwise during the fixed point iteration.
3179 live_out->CopyFrom(all_places);
3180 }
3181
3182 // Handle side effects, deoptimization and function return.
3184 if (instr->HasUnknownSideEffects() || instr->IsReturnBase()) {
3185 // An instruction that returns or has unknown side effects
3186 // is treated as if it loads from all places.
3187 live_in->CopyFrom(all_places);
3188 continue;
3189 } else if (instr->MayThrow()) {
3190 if (block->try_index() == kInvalidTryIndex) {
3191 // Outside of a try-catch block, an instruction that may throw
3192 // is only treated as if it loads from escaping places.
3193 live_in->AddAll(all_aliased_places);
3194 } else {
3195 // Inside of a try-catch block, an instruction that may throw
3196 // is treated as if it loads from all places.
3197 live_in->CopyFrom(all_places);
3198 }
3199 continue;
3200 }
3201 } else { // jit
3202 // Similar optimization to the above could be implemented in JIT
3203 // as long as deoptimization side-effects are taken into account.
3204 // Conceptually variables in deoptimization environment for
3205 // "MayThrow" instructions have to be also added to the
3206 // [live_in] set as they can be considered as escaping (to
3207 // unoptimized code). However those deoptimization environment
3208 // variables include also non-escaping(not aliased) ones, so
3209 // how to deal with that needs to be figured out.
3210 if (instr->HasUnknownSideEffects() || instr->CanDeoptimize() ||
3211 instr->MayThrow() || instr->IsReturnBase()) {
3212 // Instructions that return from the function, instructions with
3213 // side effects and instructions that can deoptimize are considered
3214 // as loads from all places.
3215 live_in->CopyFrom(all_places);
3216 continue;
3217 }
3218 }
3219
3220 // Handle loads.
3221 Definition* defn = instr->AsDefinition();
3222 if ((defn != nullptr) && IsLoadEliminationCandidate(defn)) {
3223 const intptr_t alias = aliased_set_->LookupAliasId(place.ToAlias());
3224 live_in->AddAll(aliased_set_->GetKilledSet(alias));
3225 continue;
3226 }
3227 }
3228 exposed_stores_[postorder_number] = exposed_stores;
3229 }
3230 if (FLAG_trace_load_optimization && graph_->should_print()) {
3231 Dump();
3232 THR_Print("---\n");
3233 }
3234 }
intptr_t max_place_id() const
intptr_t LookupAliasId(const Place &alias)
BitVector * GetKilledSet(intptr_t alias)
const ZoneGrowableArray< Place * > & places() const
bool Done() const
Definition: flow_graph.h:46
bool is_aot() const
static CompilerState & Current()
bool should_print() const
Definition: flow_graph.h:503
Zone * zone() const
Definition: flow_graph.h:261
BlockIterator postorder_iterator() const
Definition: flow_graph.h:222
GrowableArray< BitVector * > kill_
Definition: flow_graph.h:815
GrowableArray< BitVector * > live_out_
Definition: flow_graph.h:811
Zone * zone() const
Definition: flow_graph.h:799
GrowableArray< BitVector * > live_in_
Definition: flow_graph.h:819
static bool IsAllocation(Definition *defn)
static T Minimum(T x, T y)
Definition: utils.h:36
#define THR_Print(format,...)
Definition: log.h:20
VkInstance instance
Definition: main.cc:48
static DART_FORCE_INLINE intptr_t GetPlaceId(const Instruction *instr)
static bool IsLoadEliminationCandidate(Instruction *instr)
static constexpr intptr_t kInvalidTryIndex
#define Pd
Definition: globals.h:408

◆ OptimizeGraph()

static void dart::StoreOptimizer::OptimizeGraph ( FlowGraph graph)
inlinestatic

Definition at line 3028 of file redundancy_elimination.cc.

3028 {
3029 ASSERT(FLAG_load_cse);
3030
3031 // For now, bail out for large functions to avoid OOM situations.
3032 // TODO(fschneider): Fix the memory consumption issue.
3033 if (graph->is_huge_method()) {
3034 return;
3035 }
3036
3037 PointerSet<Place> map;
3038 AliasedSet* aliased_set = NumberPlaces(graph, &map, kOptimizeStores);
3039 if ((aliased_set != nullptr) && !aliased_set->IsEmpty()) {
3040 StoreOptimizer store_optimizer(graph, aliased_set, &map);
3041 store_optimizer.Optimize();
3042 }
3043 }
StoreOptimizer(FlowGraph *graph, AliasedSet *aliased_set, PointerSet< Place > *map)
#define ASSERT(E)
static AliasedSet * NumberPlaces(FlowGraph *graph, PointerSet< Place > *map, CSEMode mode)

The documentation for this class was generated from the following file: