Flutter Engine
The Flutter Engine
Loading...
Searching...
No Matches
Public Member Functions | Static Public Member Functions | Private Member Functions | List of all members
dart::StoreOptimizer Class Reference
Inheritance diagram for dart::StoreOptimizer:
dart::LivenessAnalysis dart::ValueObject

Public Member Functions

 StoreOptimizer (FlowGraph *graph, AliasedSet *aliased_set, PointerSet< Place > *map)
 
- Public Member Functions inherited from dart::LivenessAnalysis
 LivenessAnalysis (intptr_t variable_count, const GrowableArray< BlockEntryInstr * > &postorder)
 
void Analyze ()
 
virtual ~LivenessAnalysis ()
 
BitVectorGetLiveInSetAt (intptr_t postorder_number) const
 
BitVectorGetLiveOutSetAt (intptr_t postorder_number) const
 
BitVectorGetLiveInSet (BlockEntryInstr *block) const
 
BitVectorGetKillSet (BlockEntryInstr *block) const
 
BitVectorGetLiveOutSet (BlockEntryInstr *block) const
 
void Dump ()
 
- Public Member Functions inherited from dart::ValueObject
 ValueObject ()
 
 ~ValueObject ()
 

Static Public Member Functions

static void OptimizeGraph (FlowGraph *graph)
 

Private Member Functions

virtual void ComputeInitialSets ()
 

Additional Inherited Members

- Protected Member Functions inherited from dart::LivenessAnalysis
bool UpdateLiveOut (const BlockEntryInstr &instr)
 
bool UpdateLiveIn (const BlockEntryInstr &instr)
 
void ComputeLiveInAndLiveOutSets ()
 
Zonezone () const
 
- Protected Attributes inherited from dart::LivenessAnalysis
Zonezone_
 
const intptr_t variable_count_
 
const GrowableArray< BlockEntryInstr * > & postorder_
 
GrowableArray< BitVector * > live_out_
 
GrowableArray< BitVector * > kill_
 
GrowableArray< BitVector * > live_in_
 

Detailed Description

Definition at line 3012 of file redundancy_elimination.cc.

Constructor & Destructor Documentation

◆ StoreOptimizer()

dart::StoreOptimizer::StoreOptimizer ( FlowGraph graph,
AliasedSet aliased_set,
PointerSet< Place > *  map 
)
inline

Definition at line 3014 of file redundancy_elimination.cc.

3017 : LivenessAnalysis(aliased_set->max_place_id(), graph->postorder()),
3018 graph_(graph),
3019 map_(map),
3020 aliased_set_(aliased_set),
3021 exposed_stores_(graph_->postorder().length()) {
3022 const intptr_t num_blocks = graph_->postorder().length();
3023 for (intptr_t i = 0; i < num_blocks; i++) {
3024 exposed_stores_.Add(nullptr);
3025 }
3026 }
const GrowableArray< BlockEntryInstr * > & postorder() const
Definition flow_graph.h:204
LivenessAnalysis(intptr_t variable_count, const GrowableArray< BlockEntryInstr * > &postorder)

Member Function Documentation

◆ ComputeInitialSets()

virtual void dart::StoreOptimizer::ComputeInitialSets ( )
inlineprivatevirtual

Implements dart::LivenessAnalysis.

Definition at line 3077 of file redundancy_elimination.cc.

3077 {
3078 Zone* zone = graph_->zone();
3079 BitVector* all_places =
3080 new (zone) BitVector(zone, aliased_set_->max_place_id());
3081 all_places->SetAll();
3082
3083 BitVector* all_aliased_places =
3084 new (zone) BitVector(zone, aliased_set_->max_place_id());
3086 const auto& places = aliased_set_->places();
3087 // Go through all places and identify those which are escaping.
3088 // We find such places by inspecting definition allocation
3089 // [AliasIdentity] field, which is populated above by
3090 // [AliasedSet::ComputeAliasing].
3091 for (intptr_t i = 0; i < places.length(); i++) {
3092 Place* place = places[i];
3093 if (place->DependsOnInstance()) {
3094 Definition* instance = place->instance();
3096 !instance->Identity().IsAliased()) {
3097 continue;
3098 }
3099 }
3100 all_aliased_places->Add(i);
3101 }
3102 }
3103
3104 for (BlockIterator block_it = graph_->postorder_iterator();
3105 !block_it.Done(); block_it.Advance()) {
3106 BlockEntryInstr* block = block_it.Current();
3107 const intptr_t postorder_number = block->postorder_number();
3108
3109 BitVector* kill = kill_[postorder_number];
3110 BitVector* live_in = live_in_[postorder_number];
3111 BitVector* live_out = live_out_[postorder_number];
3112
3113 ZoneGrowableArray<Instruction*>* exposed_stores = nullptr;
3114
3115 // Iterate backwards starting at the last instruction.
3116 for (BackwardInstructionIterator instr_it(block); !instr_it.Done();
3117 instr_it.Advance()) {
3118 Instruction* instr = instr_it.Current();
3119
3120 bool is_load = false;
3121 bool is_store = false;
3122 Place place(instr, &is_load, &is_store);
3123 if (place.IsImmutableField()) {
3124 // Loads/stores of final fields do not participate.
3125 continue;
3126 }
3127
3128 // Handle stores.
3129 if (is_store) {
3130 if (kill->Contains(GetPlaceId(instr))) {
3131 if (!live_in->Contains(GetPlaceId(instr)) &&
3132 CanEliminateStore(instr)) {
3133 if (FLAG_trace_optimization && graph_->should_print()) {
3134 THR_Print("Removing dead store to place %" Pd " in block B%" Pd
3135 "\n",
3136 GetPlaceId(instr), block->block_id());
3137 }
3138 instr_it.RemoveCurrentFromGraph();
3139 }
3140 } else if (!live_in->Contains(GetPlaceId(instr))) {
3141 // Mark this store as down-ward exposed: They are the only
3142 // candidates for the global store elimination.
3143 if (exposed_stores == nullptr) {
3144 const intptr_t kMaxExposedStoresInitialSize = 5;
3145 exposed_stores = new (zone) ZoneGrowableArray<Instruction*>(
3146 Utils::Minimum(kMaxExposedStoresInitialSize,
3147 aliased_set_->max_place_id()));
3148 }
3149 exposed_stores->Add(instr);
3150 }
3151 // Interfering stores kill only loads from the same place.
3152 kill->Add(GetPlaceId(instr));
3153 live_in->Remove(GetPlaceId(instr));
3154 continue;
3155 }
3156
3157 if (instr->IsThrow() || instr->IsReThrow() || instr->IsReturnBase()) {
3158 // Initialize live-out for exit blocks since it won't be computed
3159 // otherwise during the fixed point iteration.
3160 live_out->CopyFrom(all_places);
3161 }
3162
3163 // Handle side effects, deoptimization and function return.
3165 if (instr->HasUnknownSideEffects() || instr->IsReturnBase()) {
3166 // An instruction that returns or has unknown side effects
3167 // is treated as if it loads from all places.
3168 live_in->CopyFrom(all_places);
3169 continue;
3170 } else if (instr->MayThrow()) {
3171 if (block->try_index() == kInvalidTryIndex) {
3172 // Outside of a try-catch block, an instruction that may throw
3173 // is only treated as if it loads from escaping places.
3174 live_in->AddAll(all_aliased_places);
3175 } else {
3176 // Inside of a try-catch block, an instruction that may throw
3177 // is treated as if it loads from all places.
3178 live_in->CopyFrom(all_places);
3179 }
3180 continue;
3181 }
3182 } else { // jit
3183 // Similar optimization to the above could be implemented in JIT
3184 // as long as deoptimization side-effects are taken into account.
3185 // Conceptually variables in deoptimization environment for
3186 // "MayThrow" instructions have to be also added to the
3187 // [live_in] set as they can be considered as escaping (to
3188 // unoptimized code). However those deoptimization environment
3189 // variables include also non-escaping(not aliased) ones, so
3190 // how to deal with that needs to be figured out.
3191 if (instr->HasUnknownSideEffects() || instr->CanDeoptimize() ||
3192 instr->MayThrow() || instr->IsReturnBase()) {
3193 // Instructions that return from the function, instructions with
3194 // side effects and instructions that can deoptimize are considered
3195 // as loads from all places.
3196 live_in->CopyFrom(all_places);
3197 continue;
3198 }
3199 }
3200
3201 // Handle loads.
3202 Definition* defn = instr->AsDefinition();
3203 if ((defn != nullptr) && IsLoadEliminationCandidate(defn)) {
3204 const intptr_t alias = aliased_set_->LookupAliasId(place.ToAlias());
3205 live_in->AddAll(aliased_set_->GetKilledSet(alias));
3206 continue;
3207 }
3208 }
3209 exposed_stores_[postorder_number] = exposed_stores;
3210 }
3211 if (FLAG_trace_load_optimization && graph_->should_print()) {
3212 Dump();
3213 THR_Print("---\n");
3214 }
3215 }
intptr_t max_place_id() const
intptr_t LookupAliasId(const Place &alias)
BitVector * GetKilledSet(intptr_t alias)
const ZoneGrowableArray< Place * > & places() const
bool Done() const
Definition flow_graph.h:46
static CompilerState & Current()
bool should_print() const
Definition flow_graph.h:505
Zone * zone() const
Definition flow_graph.h:261
BlockIterator postorder_iterator() const
Definition flow_graph.h:222
GrowableArray< BitVector * > kill_
Definition flow_graph.h:817
GrowableArray< BitVector * > live_out_
Definition flow_graph.h:813
Zone * zone() const
Definition flow_graph.h:801
GrowableArray< BitVector * > live_in_
Definition flow_graph.h:821
static bool IsAllocation(Definition *defn)
static T Minimum(T x, T y)
Definition utils.h:21
#define THR_Print(format,...)
Definition log.h:20
VkInstance instance
Definition main.cc:48
static DART_FORCE_INLINE intptr_t GetPlaceId(const Instruction *instr)
static bool IsLoadEliminationCandidate(Instruction *instr)
static constexpr intptr_t kInvalidTryIndex
#define Pd
Definition globals.h:408

◆ OptimizeGraph()

static void dart::StoreOptimizer::OptimizeGraph ( FlowGraph graph)
inlinestatic

Definition at line 3028 of file redundancy_elimination.cc.

3028 {
3029 ASSERT(FLAG_load_cse);
3030
3031 // For now, bail out for large functions to avoid OOM situations.
3032 // TODO(fschneider): Fix the memory consumption issue.
3033 if (graph->is_huge_method()) {
3034 return;
3035 }
3036
3037 PointerSet<Place> map;
3038 AliasedSet* aliased_set = NumberPlaces(graph, &map, kOptimizeStores);
3039 if ((aliased_set != nullptr) && !aliased_set->IsEmpty()) {
3040 StoreOptimizer store_optimizer(graph, aliased_set, &map);
3041 store_optimizer.Optimize();
3042 }
3043 }
StoreOptimizer(FlowGraph *graph, AliasedSet *aliased_set, PointerSet< Place > *map)
#define ASSERT(E)
static AliasedSet * NumberPlaces(FlowGraph *graph, PointerSet< Place > *map, CSEMode mode)
SI auto map(std::index_sequence< I... >, Fn &&fn, const Args &... args) -> skvx::Vec< sizeof...(I), decltype(fn(args[0]...))>
Definition SkVx.h:680

The documentation for this class was generated from the following file: