Flutter Engine
The Flutter Engine
constant_propagator_test.cc
Go to the documentation of this file.
1// Copyright (c) 2020, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
6
11#include "vm/unit_test.h"
12
13namespace dart {
14
15// Test issue https://github.com/flutter/flutter/issues/53903.
16//
17// If graph contains a cyclic phi which participates in an EqualityCompare
18// or StrictCompare with its input like phi(x, ...) == x then constant
19// propagation might fail to converge by constantly revisiting this phi and
20// its uses (which includes comparison and the phi itself).
21ISOLATE_UNIT_TEST_CASE(ConstantPropagation_PhiUnwrappingAndConvergence) {
23 CompilerState S(thread, /*is_aot=*/false, /*is_optimizing=*/true);
25
26 // We are going to build the following graph:
27 //
28 // B0[graph_entry]
29 // B1[function_entry]:
30 // v0 <- Constant(0)
31 // goto B2
32 // B2:
33 // v1 <- phi(v0, v1)
34 // v2 <- EqualityCompare(v1 == v0)
35 // if v2 == true then B4 else B3
36 // B3:
37 // goto B2
38 // B4:
39 // Return(v1)
40
41 PhiInstr* v1;
42 ConstantInstr* v0 = H.IntConstant(0);
43 auto b1 = H.flow_graph()->graph_entry()->normal_entry();
44 auto b2 = H.JoinEntry();
45 auto b3 = H.TargetEntry();
46 auto b4 = H.TargetEntry();
47
48 {
49 BlockBuilder builder(H.flow_graph(), b1);
50 builder.AddInstruction(new GotoInstr(b2, S.GetNextDeoptId()));
51 }
52
53 {
54 BlockBuilder builder(H.flow_graph(), b2);
55 v1 = H.Phi(b2, {{b1, v0}, {b3, &v1}});
56 builder.AddPhi(v1);
57 auto v2 = builder.AddDefinition(
58 new EqualityCompareInstr(InstructionSource(), Token::kEQ, new Value(v1),
59 new Value(v0), kSmiCid, S.GetNextDeoptId()));
60 builder.AddBranch(new StrictCompareInstr(
61 InstructionSource(), Token::kEQ_STRICT, new Value(v2),
62 new Value(H.flow_graph()->GetConstant(Bool::True())),
63 /*needs_number_check=*/false, S.GetNextDeoptId()),
64 b4, b3);
65 }
66
67 {
68 BlockBuilder builder(H.flow_graph(), b3);
69 builder.AddInstruction(new GotoInstr(b2, S.GetNextDeoptId()));
70 }
71
72 {
73 BlockBuilder builder(H.flow_graph(), b4);
74 builder.AddReturn(new Value(v1));
75 }
76
77 H.FinishGraph();
78
79 // Graph transformations will attempt to copy deopt information from
80 // branches and block entries which we did not assign.
81 // To disable copying we mark graph to disable LICM.
82 H.flow_graph()->disallow_licm();
83
84 FlowGraphPrinter::PrintGraph("Before ConstantPropagator", H.flow_graph());
85 ConstantPropagator::Optimize(H.flow_graph());
86 FlowGraphPrinter::PrintGraph("After ConstantPropagator", H.flow_graph());
87
88 auto& blocks = H.flow_graph()->reverse_postorder();
89 EXPECT_EQ(2, blocks.length());
90 EXPECT_PROPERTY(blocks[0], it.IsGraphEntry());
91 EXPECT_PROPERTY(blocks[1], it.IsFunctionEntry());
92 EXPECT_PROPERTY(blocks[1]->next(), it.IsDartReturn());
93 EXPECT_PROPERTY(blocks[1]->next()->AsDartReturn(),
94 it.value()->definition() == v0);
95}
96
97// This test does not work on 32-bit platforms because it requires
98// kUnboxedInt64 constants.
99#if defined(ARCH_IS_64_BIT)
100namespace {
101struct FoldingResult {
102 static FoldingResult NoFold() { return {false, 0}; }
103
104 static FoldingResult FoldsTo(int64_t result) { return {true, result}; }
105
106 bool should_fold;
107 int64_t result;
108};
109
110static void ConstantPropagatorUnboxedOpTest(
111 Thread* thread,
112 int64_t lhs,
113 int64_t rhs,
114 std::function<Definition*(Definition*, Definition*, intptr_t)> make_op,
115 bool redundant_phi,
116 FoldingResult expected) {
117 using compiler::BlockBuilder;
118
119 CompilerState S(thread, /*is_aot=*/false, /*is_optimizing=*/true);
120 FlowGraphBuilderHelper H(/*num_parameters=*/1);
121 H.AddVariable("v0", AbstractType::ZoneHandle(Type::IntType()));
122
123 // We are going to build the following graph:
124 //
125 // B0[graph_entry]
126 // B1[function_entry]:
127 // v0 <- Parameter(0)
128 // if 1 == ${redundant_phi ? 1 : v0} then B2 else B3
129 // B2:
130 // goto B4
131 // B3:
132 // goto B4
133 // B4:
134 // v1 <- Phi(lhs, ${redundant_phi ? -1 : lhs}) repr
135 // v2 <- Constant(rhs)
136 // v3 <- make_op(v1, v2)
137 // Return(v3)
138 //
139 // Note that we test both the case when v1 is fully redundant (has a single
140 // live predecessor) and when it is not redundant but has a constant value.
141 // These two cases are handled by different code paths - so we need to cover
142 // them both to ensure that we properly insert any unbox operations
143 // which are needed.
144
145 auto b1 = H.flow_graph()->graph_entry()->normal_entry();
146 auto b2 = H.TargetEntry();
147 auto b3 = H.TargetEntry();
148 auto b4 = H.JoinEntry();
149 DartReturnInstr* ret;
150
151 {
152 BlockBuilder builder(H.flow_graph(), b1);
153 auto v0 = builder.AddParameter(/*index=*/0, kTagged);
154 builder.AddBranch(
155 new StrictCompareInstr(
156 InstructionSource(), Token::kEQ_STRICT, new Value(H.IntConstant(1)),
157 new Value(redundant_phi ? H.IntConstant(1) : v0),
158 /*needs_number_check=*/false, S.GetNextDeoptId()),
159 b2, b3);
160 }
161
162 {
163 BlockBuilder builder(H.flow_graph(), b2);
164 builder.AddInstruction(new GotoInstr(b4, S.GetNextDeoptId()));
165 }
166
167 {
168 BlockBuilder builder(H.flow_graph(), b3);
169 builder.AddInstruction(new GotoInstr(b4, S.GetNextDeoptId()));
170 }
171
172 PhiInstr* v1;
173 Definition* op;
174 {
175 BlockBuilder builder(H.flow_graph(), b4);
176 v1 = H.Phi(b4, {{b2, H.IntConstant(lhs)},
177 {b3, H.IntConstant(redundant_phi ? -1 : lhs)}});
178 builder.AddPhi(v1);
179 op = builder.AddDefinition(
180 make_op(v1, H.IntConstant(rhs), S.GetNextDeoptId()));
181 ret = builder.AddReturn(new Value(op));
182 }
183
184 H.FinishGraph();
185 FlowGraphPrinter::PrintGraph("Before Optimization", H.flow_graph());
186 FlowGraphTypePropagator::Propagate(H.flow_graph());
187 FlowGraphPrinter::PrintGraph("After Propagate", H.flow_graph());
188 // Force phi unboxing independent of heuristics.
189 v1->set_representation(op->representation());
190 H.flow_graph()->SelectRepresentations();
191 FlowGraphPrinter::PrintGraph("After SelectRepresentations", H.flow_graph());
192 H.flow_graph()->Canonicalize();
193 FlowGraphPrinter::PrintGraph("After Canonicalize", H.flow_graph());
194 if (!expected.should_fold) {
195 EXPECT_PROPERTY(ret->value()->definition(),
196 it.IsBoxInteger() && it.RequiredInputRepresentation(0) ==
197 op->representation());
198 }
199
200 ConstantPropagator::Optimize(H.flow_graph());
201 FlowGraphPrinter::PrintGraph("After ConstantPropagator", H.flow_graph());
202
203 // If |should_fold| then check that resulting graph is
204 //
205 // Return(Constant(result))
206 //
207 // otherwise check that the graph is
208 //
209 // Return(Box(op))
210 //
211 {
212 auto ret_val = ret->value()->definition();
213 if (expected.should_fold) {
214 EXPECT_PROPERTY(ret_val,
215 it.IsConstant() && it.representation() == kTagged);
216 EXPECT_EQ(expected.result,
217 Integer::Cast(ret_val->AsConstant()->value()).AsInt64Value());
218 } else {
219 EXPECT_PROPERTY(ret_val,
220 it.IsBoxInteger() && it.RequiredInputRepresentation(0) ==
221 op->representation());
222 auto boxed_value = ret_val->AsBoxInteger()->value()->definition();
223 EXPECT_PROPERTY(boxed_value, &it == op);
224 }
225 }
226}
227
228void ConstantPropagatorUnboxedOpTest(
229 Thread* thread,
230 int64_t lhs,
231 int64_t rhs,
232 std::function<Definition*(Definition*, Definition*, intptr_t)> make_op,
233 FoldingResult expected) {
234 ConstantPropagatorUnboxedOpTest(thread, lhs, rhs, make_op,
235 /*redundant_phi=*/false, expected);
236 ConstantPropagatorUnboxedOpTest(thread, lhs, rhs, make_op,
237 /*redundant_phi=*/true, expected);
238}
239
240} // namespace
241
242// This test verifies that constant propagation respects representations when
243// replacing unboxed operations.
244ISOLATE_UNIT_TEST_CASE(ConstantPropagator_Regress35371) {
245 auto make_int64_add = [](Definition* lhs, Definition* rhs,
246 intptr_t deopt_id) {
247 return new BinaryInt64OpInstr(Token::kADD, new Value(lhs), new Value(rhs),
249 };
250
251 auto make_int32_add = [](Definition* lhs, Definition* rhs,
252 intptr_t deopt_id) {
253 return new BinaryInt32OpInstr(Token::kADD, new Value(lhs), new Value(rhs),
254 deopt_id);
255 };
256
257 auto make_int32_truncating_add = [](Definition* lhs, Definition* rhs,
258 intptr_t deopt_id) {
259 auto op = new BinaryInt32OpInstr(Token::kADD, new Value(lhs),
260 new Value(rhs), deopt_id);
261 op->mark_truncating();
262 return op;
263 };
264
265 ConstantPropagatorUnboxedOpTest(thread, /*lhs=*/1, /*lhs=*/2, make_int64_add,
266 FoldingResult::FoldsTo(3));
267 ConstantPropagatorUnboxedOpTest(thread, /*lhs=*/kMaxInt64, /*lhs=*/1,
268 make_int64_add,
269 FoldingResult::FoldsTo(kMinInt64));
270
271 ConstantPropagatorUnboxedOpTest(thread, /*lhs=*/1, /*lhs=*/2, make_int32_add,
272 FoldingResult::FoldsTo(3));
273 ConstantPropagatorUnboxedOpTest(thread, /*lhs=*/kMaxInt32 - 1, /*lhs=*/1,
274 make_int32_add,
275 FoldingResult::FoldsTo(kMaxInt32));
276
277 // Overflow of int32 representation and operation is not marked as
278 // truncating.
279 ConstantPropagatorUnboxedOpTest(thread, /*lhs=*/kMaxInt32, /*lhs=*/1,
280 make_int32_add, FoldingResult::NoFold());
281
282 // Overflow of int32 representation and operation is marked as truncating.
283 ConstantPropagatorUnboxedOpTest(thread, /*lhs=*/kMaxInt32, /*lhs=*/1,
284 make_int32_truncating_add,
285 FoldingResult::FoldsTo(kMinInt32));
286}
287#endif
288
290 bool negate,
291 bool non_sentinel_on_left) {
292 const char* kScript = R"(
293 late final int x = 4;
294 )";
295 Zone* const Z = Thread::Current()->zone();
296 const auto& root_library = Library::CheckedHandle(Z, LoadTestScript(kScript));
297 const auto& toplevel = Class::Handle(Z, root_library.toplevel_class());
298 const auto& field_x = Field::Handle(
299 Z, toplevel.LookupStaticField(String::Handle(Z, String::New("x"))));
300
302 CompilerState S(thread, /*is_aot=*/false, /*is_optimizing=*/true);
304
305 auto b1 = H.flow_graph()->graph_entry()->normal_entry();
306
307 {
308 BlockBuilder builder(H.flow_graph(), b1);
309 auto v_load = builder.AddDefinition(new LoadStaticFieldInstr(
310 field_x, {},
311 /*calls_initializer=*/true, S.GetNextDeoptId()));
312 auto v_sentinel = H.flow_graph()->GetConstant(Object::sentinel());
313 Value* const left_value =
314 non_sentinel_on_left ? new Value(v_load) : new Value(v_sentinel);
315 Value* const right_value =
316 non_sentinel_on_left ? new Value(v_sentinel) : new Value(v_load);
317 auto v_compare = builder.AddDefinition(new StrictCompareInstr(
318 {}, negate ? Token::kNE_STRICT : Token::kEQ_STRICT, left_value,
319 right_value,
320 /*needs_number_check=*/false, S.GetNextDeoptId()));
321 builder.AddReturn(new Value(v_compare));
322 }
323
324 H.FinishGraph();
325
326 FlowGraphPrinter::PrintGraph("Before TypePropagator", H.flow_graph());
327 FlowGraphTypePropagator::Propagate(H.flow_graph());
328 FlowGraphPrinter::PrintGraph("After TypePropagator", H.flow_graph());
330 ConstantPropagator::Optimize(H.flow_graph());
331 FlowGraphPrinter::PrintGraph("After ConstantPropagator", H.flow_graph());
332
333 DartReturnInstr* ret = nullptr;
334
335 ILMatcher cursor(H.flow_graph(),
336 H.flow_graph()->graph_entry()->normal_entry(), true);
337 RELEASE_ASSERT(cursor.TryMatch({
338 kMatchAndMoveFunctionEntry,
339 kMatchAndMoveLoadStaticField,
340 // The StrictCompare instruction should be removed.
341 {kMatchDartReturn, &ret},
342 }));
343
344 EXPECT_PROPERTY(ret, it.value()->BindsToConstant());
345 EXPECT_PROPERTY(&ret->value()->BoundConstant(), it.IsBool());
346 EXPECT_PROPERTY(&ret->value()->BoundConstant(),
347 Bool::Cast(it).value() == negate);
348}
349
350ISOLATE_UNIT_TEST_CASE(ConstantPropagator_StrictCompareEqualsSentinelLeft) {
351 StrictCompareSentinel(thread, /*negate=*/false,
352 /*non_sentinel_on_left=*/true);
353}
354
355ISOLATE_UNIT_TEST_CASE(ConstantPropagator_StrictCompareEqualsSentinelRightt) {
356 StrictCompareSentinel(thread, /*negate=*/false,
357 /*non_sentinel_on_left=*/false);
358}
359
360ISOLATE_UNIT_TEST_CASE(ConstantPropagator_StrictCompareNotEqualsSentinelLeft) {
361 StrictCompareSentinel(thread, /*negate=*/true,
362 /*non_sentinel_on_left=*/true);
363}
364
365ISOLATE_UNIT_TEST_CASE(ConstantPropagator_StrictCompareNotEqualsSentinelRight) {
366 StrictCompareSentinel(thread, /*negate=*/true,
367 /*non_sentinel_on_left=*/false);
368}
369
370} // namespace dart
static float next(float f)
Vec2Value v2
#define RELEASE_ASSERT(cond)
Definition: assert.h:327
#define Z
static const Bool & True()
Definition: object.h:10797
static void Optimize(FlowGraph *graph)
static void PrintGraph(const char *phase, FlowGraph *flow_graph)
Definition: il_printer.cc:1706
bool TryMatch(std::initializer_list< MatchCode > match_codes, MatchOpCode insert_before=kInvalidMatchOpCode)
@ kNotSpeculative
Definition: il.h:975
static Object & Handle()
Definition: object.h:407
static Object & ZoneHandle()
Definition: object.h:419
static StringPtr New(const char *cstr, Heap::Space space=Heap::kNew)
Definition: object.cc:23698
Zone * zone() const
Definition: thread_state.h:37
static Thread * Current()
Definition: thread.h:362
static TypePtr IntType()
Definition: il.h:75
#define H
GAsyncResult * result
Dart_NativeFunction function
Definition: fuchsia.cc:51
#define EXPECT_PROPERTY(entity, property)
Definition: dart_vm.cc:33
constexpr int64_t kMaxInt64
Definition: globals.h:486
constexpr int64_t kMinInt64
Definition: globals.h:485
LibraryPtr LoadTestScript(const char *script, Dart_NativeEntryResolver resolver, const char *lib_uri)
constexpr int32_t kMinInt32
Definition: globals.h:482
void StrictCompareSentinel(Thread *thread, bool negate, bool non_sentinel_on_left)
ISOLATE_UNIT_TEST_CASE(StackAllocatedDestruction)
constexpr int32_t kMaxInt32
Definition: globals.h:483
Definition: SkMD5.cpp:130
#define ISOLATE_UNIT_TEST_CASE(name)
Definition: unit_test.h:64