Flutter Engine
The Flutter Engine
Loading...
Searching...
No Matches
redundancy_elimination_test.cc
Go to the documentation of this file.
1// Copyright (c) 2018, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
6
7#include <functional>
8#include <utility>
9
19#include "vm/flags.h"
20#include "vm/kernel_isolate.h"
21#include "vm/log.h"
22#include "vm/object.h"
23#include "vm/parser.h"
24#include "vm/symbols.h"
25#include "vm/unit_test.h"
26
27namespace dart {
28
30
33 bool* auto_setup_scope) {
34 ASSERT(auto_setup_scope != nullptr);
35 *auto_setup_scope = false;
36 return NoopNative;
37}
38
39// Flatten all non-captured LocalVariables from the given scope and its children
40// and siblings into the given array based on their environment index.
42 LocalScope* scope,
44 for (intptr_t i = 0; i < scope->num_variables(); i++) {
45 auto var = scope->VariableAt(i);
46 if (var->is_captured()) {
47 continue;
48 }
49
50 auto index = graph->EnvIndex(var);
51 env->EnsureLength(index + 1, nullptr);
52 (*env)[index] = var;
53 }
54
55 if (scope->sibling() != nullptr) {
56 FlattenScopeIntoEnvironment(graph, scope->sibling(), env);
57 }
58 if (scope->child() != nullptr) {
59 FlattenScopeIntoEnvironment(graph, scope->child(), env);
60 }
61}
62
63// Run TryCatchAnalyzer optimization on the function foo from the given script
64// and check that the only variables from the given list are synchronized
65// on catch entry.
67 Thread* thread,
68 const char* script_chars,
69 std::initializer_list<const char*> synchronized) {
70 // Load the script and exercise the code once.
71 const auto& root_library =
73 Invoke(root_library, "main");
74
75 // Build the flow graph.
76 std::initializer_list<CompilerPass::Id> passes = {
77 CompilerPass::kComputeSSA, CompilerPass::kTypePropagation,
78 CompilerPass::kApplyICData, CompilerPass::kSelectRepresentations,
79 CompilerPass::kTypePropagation, CompilerPass::kCanonicalize,
80 };
81 const auto& function = Function::Handle(GetFunction(root_library, "foo"));
83 FlowGraph* graph = pipeline.RunPasses(passes);
84
85 // Finally run TryCatchAnalyzer on the graph (in AOT mode).
86 OptimizeCatchEntryStates(graph, /*is_aot=*/true);
87
88 EXPECT_EQ(1, graph->graph_entry()->catch_entries().length());
89 auto scope = graph->parsed_function().scope();
90
92 FlattenScopeIntoEnvironment(graph, scope, &env);
93
94 for (intptr_t i = 0; i < env.length(); i++) {
95 bool found = false;
96 for (auto name : synchronized) {
97 if (env[i]->name().Equals(name)) {
98 found = true;
99 break;
100 }
101 }
102 if (!found) {
103 env[i] = nullptr;
104 }
105 }
106
107 CatchBlockEntryInstr* catch_entry = graph->graph_entry()->catch_entries()[0];
108
109 // We should only synchronize state for variables from the synchronized list.
110 for (auto defn : *catch_entry->initial_definitions()) {
111 if (ParameterInstr* param = defn->AsParameter()) {
112 if (param->location().IsRegister()) {
113 EXPECT(param->location().Equals(LocationExceptionLocation()) ||
114 param->location().Equals(LocationStackTraceLocation()));
115 continue;
116 }
117
118 EXPECT(0 <= param->env_index() && param->env_index() < env.length());
119 EXPECT(env[param->env_index()] != nullptr);
120 if (env[param->env_index()] == nullptr) {
121 OS::PrintErr("something is wrong with %s\n", param->ToCString());
122 }
123 }
124 }
125}
126
127//
128// Tests for TryCatchOptimizer.
129//
130
131ISOLATE_UNIT_TEST_CASE(TryCatchOptimizer_DeadParameterElimination_Simple1) {
132 const char* script_chars = R"(
133 @pragma("vm:external-name", "BlackholeNative")
134 external dynamic blackhole([dynamic val]);
135 foo(int p) {
136 var a = blackhole(), b = blackhole();
137 try {
138 blackhole([a, b]);
139 } catch (e) {
140 // nothing is used
141 }
142 }
143 main() {
144 foo(42);
145 }
146 )";
147
148 TryCatchOptimizerTest(thread, script_chars, /*synchronized=*/{});
149}
150
151ISOLATE_UNIT_TEST_CASE(TryCatchOptimizer_DeadParameterElimination_Simple2) {
152 const char* script_chars = R"(
153 @pragma("vm:external-name", "BlackholeNative")
154 external dynamic blackhole([dynamic val]);
155 foo(int p) {
156 var a = blackhole(), b = blackhole();
157 try {
158 blackhole([a, b]);
159 } catch (e) {
160 // a should be synchronized
161 blackhole(a);
162 }
163 }
164 main() {
165 foo(42);
166 }
167 )";
168
169 TryCatchOptimizerTest(thread, script_chars, /*synchronized=*/{"a"});
170}
171
172ISOLATE_UNIT_TEST_CASE(TryCatchOptimizer_DeadParameterElimination_Cyclic1) {
173 const char* script_chars = R"(
174 @pragma("vm:external-name", "BlackholeNative")
175 external dynamic blackhole([dynamic val]);
176 foo(int p) {
177 var a = blackhole(), b;
178 for (var i = 0; i < 42; i++) {
179 b = blackhole();
180 try {
181 blackhole([a, b]);
182 } catch (e) {
183 // a and i should be synchronized
184 }
185 }
186 }
187 main() {
188 foo(42);
189 }
190 )";
191
192 TryCatchOptimizerTest(thread, script_chars, /*synchronized=*/{"a", "i"});
193}
194
195ISOLATE_UNIT_TEST_CASE(TryCatchOptimizer_DeadParameterElimination_Cyclic2) {
196 const char* script_chars = R"(
197 @pragma("vm:external-name", "BlackholeNative")
198 external dynamic blackhole([dynamic val]);
199 foo(int p) {
200 var a = blackhole(), b = blackhole();
201 for (var i = 0; i < 42; i++) {
202 try {
203 blackhole([a, b]);
204 } catch (e) {
205 // a, b and i should be synchronized
206 }
207 }
208 }
209 main() {
210 foo(42);
211 }
212 )";
213
214 TryCatchOptimizerTest(thread, script_chars, /*synchronized=*/{"a", "b", "i"});
215}
216
217// LoadOptimizer tests
218
219// This family of tests verifies behavior of load forwarding when alias for an
220// allocation A is created by creating a redefinition for it and then
221// letting redefinition escape.
223 Thread* thread,
224 bool make_it_escape,
225 std::function<Definition*(CompilerState* S, FlowGraph*, Definition*)>
226 make_redefinition) {
227 const char* script_chars = R"(
228 @pragma("vm:external-name", "BlackholeNative")
229 external dynamic blackhole([a, b, c, d, e, f]);
230 class K {
231 var field;
232 }
233 )";
234 const Library& lib =
236
237 const Class& cls = Class::ZoneHandle(
238 lib.LookupClass(String::Handle(Symbols::New(thread, "K"))));
239 const Error& err = Error::Handle(cls.EnsureIsFinalized(thread));
240 EXPECT(err.IsNull());
241
242 const Field& original_field = Field::Handle(
243 cls.LookupField(String::Handle(Symbols::New(thread, "field"))));
244 EXPECT(!original_field.IsNull());
245 const Field& field = Field::Handle(original_field.CloneFromOriginal());
246
247 const Function& blackhole =
248 Function::ZoneHandle(GetFunction(lib, "blackhole"));
249
251 CompilerState S(thread, /*is_aot=*/false, /*is_optimizing=*/true);
253
254 // We are going to build the following graph:
255 //
256 // B0[graph_entry]
257 // B1[function_entry]:
258 // v0 <- AllocateObject(class K)
259 // v1 <- LoadField(v0, K.field)
260 // v2 <- make_redefinition(v0)
261 // MoveArgument(v1)
262 // #if make_it_escape
263 // MoveArgument(v2)
264 // #endif
265 // v3 <- StaticCall(blackhole, v1, v2)
266 // v4 <- LoadField(v2, K.field)
267 // Return v4
268
269 auto b1 = H.flow_graph()->graph_entry()->normal_entry();
271 LoadFieldInstr* v1;
272 StaticCallInstr* call;
274 DartReturnInstr* ret;
275
276 {
277 BlockBuilder builder(H.flow_graph(), b1);
278 auto& slot = Slot::Get(field, &H.flow_graph()->parsed_function());
279 v0 = builder.AddDefinition(
280 new AllocateObjectInstr(InstructionSource(), cls, S.GetNextDeoptId()));
281 v1 = builder.AddDefinition(
282 new LoadFieldInstr(new Value(v0), slot, InstructionSource()));
283 auto v2 = builder.AddDefinition(make_redefinition(&S, H.flow_graph(), v0));
284 InputsArray args(2);
285 args.Add(new Value(v1));
286 if (make_it_escape) {
287 args.Add(new Value(v2));
288 }
289 call = builder.AddInstruction(new StaticCallInstr(
290 InstructionSource(), blackhole, 0, Array::empty_array(),
291 std::move(args), S.GetNextDeoptId(), 0, ICData::RebindRule::kStatic));
292 v4 = builder.AddDefinition(
293 new LoadFieldInstr(new Value(v2), slot, InstructionSource()));
294 ret = builder.AddInstruction(new DartReturnInstr(
295 InstructionSource(), new Value(v4), S.GetNextDeoptId()));
296 }
297 H.FinishGraph();
298 DominatorBasedCSE::Optimize(H.flow_graph());
299
300 if (make_it_escape) {
301 // Allocation must be considered aliased.
303 } else {
304 // Allocation must be considered not-aliased.
306 }
307
308 // v1 should have been removed from the graph and replaced with constant_null.
309 EXPECT_PROPERTY(v1, it.next() == nullptr && it.previous() == nullptr);
310 EXPECT_PROPERTY(call, it.ArgumentAt(0) == H.flow_graph()->constant_null());
311
312 if (make_it_escape) {
313 // v4 however should not be removed from the graph, because v0 escapes into
314 // blackhole.
315 EXPECT_PROPERTY(v4, it.next() != nullptr && it.previous() != nullptr);
316 EXPECT_PROPERTY(ret, it.value()->definition() == v4);
317 } else {
318 // If v0 it not aliased then v4 should also be removed from the graph.
319 EXPECT_PROPERTY(v4, it.next() == nullptr && it.previous() == nullptr);
321 ret, it.value()->definition() == H.flow_graph()->constant_null());
322 }
323}
324
326 FlowGraph* flow_graph,
327 Definition* defn) {
328 return new CheckNullInstr(new Value(defn), String::ZoneHandle(),
329 S->GetNextDeoptId(), InstructionSource());
330}
331
333 FlowGraph* flow_graph,
334 Definition* defn) {
335 return new RedefinitionInstr(new Value(defn));
336}
337
339 FlowGraph* flow_graph,
340 Definition* defn) {
341 const auto& dst_type = AbstractType::ZoneHandle(Type::ObjectType());
342 return new AssertAssignableInstr(InstructionSource(), new Value(defn),
343 new Value(flow_graph->GetConstant(dst_type)),
344 new Value(flow_graph->constant_null()),
345 new Value(flow_graph->constant_null()),
346 Symbols::Empty(), S->GetNextDeoptId());
347}
348
349ISOLATE_UNIT_TEST_CASE(LoadOptimizer_RedefinitionAliasing_CheckNull_NoEscape) {
350 TestAliasingViaRedefinition(thread, /*make_it_escape=*/false, MakeCheckNull);
351}
352
353ISOLATE_UNIT_TEST_CASE(LoadOptimizer_RedefinitionAliasing_CheckNull_Escape) {
354 TestAliasingViaRedefinition(thread, /*make_it_escape=*/true, MakeCheckNull);
355}
356
358 LoadOptimizer_RedefinitionAliasing_Redefinition_NoEscape) {
359 TestAliasingViaRedefinition(thread, /*make_it_escape=*/false,
361}
362
363ISOLATE_UNIT_TEST_CASE(LoadOptimizer_RedefinitionAliasing_Redefinition_Escape) {
364 TestAliasingViaRedefinition(thread, /*make_it_escape=*/true,
366}
367
369 LoadOptimizer_RedefinitionAliasing_AssertAssignable_NoEscape) {
370 TestAliasingViaRedefinition(thread, /*make_it_escape=*/false,
372}
373
375 LoadOptimizer_RedefinitionAliasing_AssertAssignable_Escape) {
376 TestAliasingViaRedefinition(thread, /*make_it_escape=*/true,
378}
379
380// This family of tests verifies behavior of load forwarding when alias for an
381// allocation A is created by storing it into another object B and then
382// either loaded from it ([make_it_escape] is true) or object B itself
383// escapes ([make_host_escape] is true).
384// We insert redefinition for object B to check that use list traversal
385// correctly discovers all loads and stores from B.
387 Thread* thread,
388 bool make_it_escape,
389 bool make_host_escape,
390 std::function<Definition*(CompilerState* S, FlowGraph*, Definition*)>
391 make_redefinition) {
392 const char* script_chars = R"(
393 @pragma("vm:external-name", "BlackholeNative")
394 external dynamic blackhole([a, b, c, d, e, f]);
395 class K {
396 var field;
397 }
398 )";
399 const Library& lib =
401
402 const Class& cls = Class::ZoneHandle(
403 lib.LookupClass(String::Handle(Symbols::New(thread, "K"))));
404 const Error& err = Error::Handle(cls.EnsureIsFinalized(thread));
405 EXPECT(err.IsNull());
406
407 const Field& original_field = Field::Handle(
408 cls.LookupField(String::Handle(Symbols::New(thread, "field"))));
409 EXPECT(!original_field.IsNull());
410 const Field& field = Field::Handle(original_field.CloneFromOriginal());
411
412 const Function& blackhole =
413 Function::ZoneHandle(GetFunction(lib, "blackhole"));
414
416 CompilerState S(thread, /*is_aot=*/false, /*is_optimizing=*/true);
418
419 // We are going to build the following graph:
420 //
421 // B0[graph_entry]
422 // B1[function_entry]:
423 // v0 <- AllocateObject(class K)
424 // v5 <- AllocateObject(class K)
425 // #if !make_host_escape
426 // StoreField(v5 . K.field = v0)
427 // #endif
428 // v1 <- LoadField(v0, K.field)
429 // v2 <- REDEFINITION(v5)
430 // MoveArgument(v1)
431 // #if make_it_escape
432 // v6 <- LoadField(v2, K.field)
433 // MoveArgument(v6)
434 // #elif make_host_escape
435 // StoreField(v2 . K.field = v0)
436 // MoveArgument(v5)
437 // #endif
438 // v3 <- StaticCall(blackhole, v1, v6)
439 // v4 <- LoadField(v0, K.field)
440 // Return v4
441
442 auto b1 = H.flow_graph()->graph_entry()->normal_entry();
445 LoadFieldInstr* v1;
446 StaticCallInstr* call;
448 DartReturnInstr* ret;
449
450 {
451 BlockBuilder builder(H.flow_graph(), b1);
452 auto& slot = Slot::Get(field, &H.flow_graph()->parsed_function());
453 v0 = builder.AddDefinition(
454 new AllocateObjectInstr(InstructionSource(), cls, S.GetNextDeoptId()));
455 v5 = builder.AddDefinition(
456 new AllocateObjectInstr(InstructionSource(), cls, S.GetNextDeoptId()));
457 if (!make_host_escape) {
458 builder.AddInstruction(
459 new StoreFieldInstr(slot, new Value(v5), new Value(v0),
461 }
462 v1 = builder.AddDefinition(
463 new LoadFieldInstr(new Value(v0), slot, InstructionSource()));
464 auto v2 = builder.AddDefinition(make_redefinition(&S, H.flow_graph(), v5));
465 InputsArray args(2);
466 args.Add(new Value(v1));
467 if (make_it_escape) {
468 auto v6 = builder.AddDefinition(
469 new LoadFieldInstr(new Value(v2), slot, InstructionSource()));
470 args.Add(new Value(v6));
471 } else if (make_host_escape) {
472 builder.AddInstruction(
473 new StoreFieldInstr(slot, new Value(v2), new Value(v0),
475 args.Add(new Value(v5));
476 }
477 call = builder.AddInstruction(new StaticCallInstr(
478 InstructionSource(), blackhole, 0, Array::empty_array(),
479 std::move(args), S.GetNextDeoptId(), 0, ICData::RebindRule::kStatic));
480 v4 = builder.AddDefinition(
481 new LoadFieldInstr(new Value(v0), slot, InstructionSource()));
482 ret = builder.AddInstruction(new DartReturnInstr(
483 InstructionSource(), new Value(v4), S.GetNextDeoptId()));
484 }
485 H.FinishGraph();
486 DominatorBasedCSE::Optimize(H.flow_graph());
487
488 if (make_it_escape || make_host_escape) {
489 // Allocation must be considered aliased.
491 } else {
492 // Allocation must not be considered aliased.
494 }
495
496 if (make_host_escape) {
498 } else {
500 }
501
502 // v1 should have been removed from the graph and replaced with constant_null.
503 EXPECT_PROPERTY(v1, it.next() == nullptr && it.previous() == nullptr);
504 EXPECT_PROPERTY(call, it.ArgumentAt(0) == H.flow_graph()->constant_null());
505
506 if (make_it_escape || make_host_escape) {
507 // v4 however should not be removed from the graph, because v0 escapes into
508 // blackhole.
509 EXPECT_PROPERTY(v4, it.next() != nullptr && it.previous() != nullptr);
510 EXPECT_PROPERTY(ret, it.value()->definition() == v4);
511 } else {
512 // If v0 it not aliased then v4 should also be removed from the graph.
513 EXPECT_PROPERTY(v4, it.next() == nullptr && it.previous() == nullptr);
515 ret, it.value()->definition() == H.flow_graph()->constant_null());
516 }
517}
518
519ISOLATE_UNIT_TEST_CASE(LoadOptimizer_AliasingViaStore_CheckNull_NoEscape) {
520 TestAliasingViaStore(thread, /*make_it_escape=*/false,
521 /* make_host_escape= */ false, MakeCheckNull);
522}
523
524ISOLATE_UNIT_TEST_CASE(LoadOptimizer_AliasingViaStore_CheckNull_Escape) {
525 TestAliasingViaStore(thread, /*make_it_escape=*/true,
526 /* make_host_escape= */ false, MakeCheckNull);
527}
528
529ISOLATE_UNIT_TEST_CASE(LoadOptimizer_AliasingViaStore_CheckNull_EscapeViaHost) {
530 TestAliasingViaStore(thread, /*make_it_escape=*/false,
531 /* make_host_escape= */ true, MakeCheckNull);
532}
533
534ISOLATE_UNIT_TEST_CASE(LoadOptimizer_AliasingViaStore_Redefinition_NoEscape) {
535 TestAliasingViaStore(thread, /*make_it_escape=*/false,
536 /* make_host_escape= */ false, MakeRedefinition);
537}
538
539ISOLATE_UNIT_TEST_CASE(LoadOptimizer_AliasingViaStore_Redefinition_Escape) {
540 TestAliasingViaStore(thread, /*make_it_escape=*/true,
541 /* make_host_escape= */ false, MakeRedefinition);
542}
543
545 LoadOptimizer_AliasingViaStore_Redefinition_EscapeViaHost) {
546 TestAliasingViaStore(thread, /*make_it_escape=*/false,
547 /* make_host_escape= */ true, MakeRedefinition);
548}
549
551 LoadOptimizer_AliasingViaStore_AssertAssignable_NoEscape) {
552 TestAliasingViaStore(thread, /*make_it_escape=*/false,
553 /* make_host_escape= */ false, MakeAssertAssignable);
554}
555
556ISOLATE_UNIT_TEST_CASE(LoadOptimizer_AliasingViaStore_AssertAssignable_Escape) {
557 TestAliasingViaStore(thread, /*make_it_escape=*/true,
558 /* make_host_escape= */ false, MakeAssertAssignable);
559}
560
562 LoadOptimizer_AliasingViaStore_AssertAssignable_EscapeViaHost) {
563 TestAliasingViaStore(thread, /*make_it_escape=*/false,
564 /* make_host_escape= */ true, MakeAssertAssignable);
565}
566
567// This is a regression test for
568// https://github.com/flutter/flutter/issues/48114.
569ISOLATE_UNIT_TEST_CASE(LoadOptimizer_AliasingViaTypedDataAndUntaggedTypedData) {
571 CompilerState S(thread, /*is_aot=*/false, /*is_optimizing=*/true);
573
574 const auto& lib = Library::Handle(Library::TypedDataLibrary());
575 const Class& cls = Class::Handle(lib.LookupClass(Symbols::Uint32List()));
576 const Error& err = Error::Handle(cls.EnsureIsFinalized(thread));
577 EXPECT(err.IsNull());
578
580 cls.LookupFactory(String::Handle(String::New("Uint32List."))));
581 EXPECT(!function.IsNull());
582
583 auto zone = H.flow_graph()->zone();
584
585 // We are going to build the following graph:
586 //
587 // B0[graph_entry] {
588 // vc0 <- Constant(0)
589 // vc42 <- Constant(42)
590 // }
591 //
592 // B1[function_entry] {
593 // }
594 // array <- StaticCall(...) {_Uint32List}
595 // v1 <- LoadIndexed(array)
596 // v2 <- LoadField(array, Slot::PointerBase_data())
597 // StoreIndexed(v2, index=vc0, value=vc42)
598 // v3 <- LoadIndexed(array)
599 // return v3
600 // }
601
602 auto vc0 = H.flow_graph()->GetConstant(Integer::Handle(Integer::New(0)));
603 auto vc42 = H.flow_graph()->GetConstant(Integer::Handle(Integer::New(42)));
604 auto b1 = H.flow_graph()->graph_entry()->normal_entry();
605
606 StaticCallInstr* array;
611 DartReturnInstr* ret;
612
613 {
614 BlockBuilder builder(H.flow_graph(), b1);
615
616 // array <- StaticCall(...) {_Uint32List}
617 array = builder.AddDefinition(new StaticCallInstr(
618 InstructionSource(), function, 0, Array::empty_array(), InputsArray(),
619 DeoptId::kNone, 0, ICData::kNoRebind));
620 array->UpdateType(CompileType::FromCid(kTypedDataUint32ArrayCid));
621 array->SetResultType(zone, CompileType::FromCid(kTypedDataUint32ArrayCid));
623
624 // v1 <- LoadIndexed(array)
625 v1 = builder.AddDefinition(new LoadIndexedInstr(
626 new Value(array), new Value(vc0), /*index_unboxed=*/false, 1,
627 kTypedDataUint32ArrayCid, kAlignedAccess, DeoptId::kNone,
629
630 // v2 <- LoadField(array, Slot::PointerBase_data())
631 // StoreIndexed(v2, index=0, value=42)
632 v2 = builder.AddDefinition(new LoadFieldInstr(
633 new Value(array), Slot::PointerBase_data(),
635 store = builder.AddInstruction(new StoreIndexedInstr(
636 new Value(v2), new Value(vc0), new Value(vc42), kNoStoreBarrier,
637 /*index_unboxed=*/false, 1, kTypedDataUint32ArrayCid, kAlignedAccess,
639
640 // v3 <- LoadIndexed(array)
641 v3 = builder.AddDefinition(new LoadIndexedInstr(
642 new Value(array), new Value(vc0), /*index_unboxed=*/false, 1,
643 kTypedDataUint32ArrayCid, kAlignedAccess, DeoptId::kNone,
645
646 // return v3
647 ret = builder.AddInstruction(new DartReturnInstr(
648 InstructionSource(), new Value(v3), S.GetNextDeoptId()));
649 }
650 H.FinishGraph();
651
652 DominatorBasedCSE::Optimize(H.flow_graph());
653 {
654 Instruction* sc = nullptr;
655 Instruction* li = nullptr;
656 Instruction* lf = nullptr;
657 Instruction* s = nullptr;
658 Instruction* li2 = nullptr;
659 Instruction* r = nullptr;
660 ILMatcher cursor(H.flow_graph(), b1, true);
661 RELEASE_ASSERT(cursor.TryMatch({
662 kMatchAndMoveFunctionEntry,
663 {kMatchAndMoveStaticCall, &sc},
664 {kMatchAndMoveLoadIndexed, &li},
665 {kMatchAndMoveLoadField, &lf},
666 {kMatchAndMoveStoreIndexed, &s},
667 {kMatchAndMoveLoadIndexed, &li2},
668 {kMatchDartReturn, &r},
669 }));
670 EXPECT(array == sc);
671 EXPECT(v1 == li);
672 EXPECT(v2 == lf);
673 EXPECT(store == s);
674 EXPECT(v3 == li2);
675 EXPECT(ret == r);
676 }
677}
678
679// This test ensures that a LoadNativeField of the PointerBase data field for
680// a newly allocated TypedData object does not have tagged null forwarded to it,
681// as that's wrong for two reasons: it's an unboxed field, and it is initialized
682// during the allocation stub.
683ISOLATE_UNIT_TEST_CASE(LoadOptimizer_LoadDataFieldOfNewTypedData) {
685 CompilerState S(thread, /*is_aot=*/false, /*is_optimizing=*/true);
687
688 auto zone = H.flow_graph()->zone();
689
690 // We are going to build the following graph:
691 //
692 // B0[graph_entry] {
693 // vc42 <- Constant(42)
694 // }
695 //
696 // B1[function_entry] {
697 // }
698 // array <- AllocateTypedData(kTypedDataUint8ArrayCid, vc42)
699 // view <- AllocateObject(kTypedDataUint8ArrayViewCid)
700 // v1 <- LoadNativeField(array, Slot::PointerBase_data())
701 // StoreNativeField(Slot::PointerBase_data(), view, v1, kNoStoreBarrier,
702 // kInitalizing)
703 // return view
704 // }
705
706 const auto& lib = Library::Handle(zone, Library::TypedDataLibrary());
707 EXPECT(!lib.IsNull());
708 const Class& view_cls = Class::ZoneHandle(
709 zone, lib.LookupClassAllowPrivate(Symbols::_Uint8ArrayView()));
710 EXPECT(!view_cls.IsNull());
711 const Error& err = Error::Handle(zone, view_cls.EnsureIsFinalized(thread));
712 EXPECT(err.IsNull());
713
714 auto vc42 = H.flow_graph()->GetConstant(Integer::Handle(Integer::New(42)));
715 auto b1 = H.flow_graph()->graph_entry()->normal_entry();
716
719 LoadFieldInstr* v1;
721 DartReturnInstr* ret;
722
723 {
724 BlockBuilder builder(H.flow_graph(), b1);
725
726 // array <- AllocateTypedData(kTypedDataUint8ArrayCid, vc42)
727 array = builder.AddDefinition(
728 new AllocateTypedDataInstr(InstructionSource(), kTypedDataUint8ArrayCid,
729 new (zone) Value(vc42), DeoptId::kNone));
730
731 // view <- AllocateObject(kTypedDataUint8ArrayViewCid, vta)
732 view = builder.AddDefinition(
733 new AllocateObjectInstr(InstructionSource(), view_cls, DeoptId::kNone));
734
735 // v1 <- LoadNativeField(array, Slot::PointerBase_data())
736 v1 = builder.AddDefinition(new LoadFieldInstr(
737 new (zone) Value(array), Slot::PointerBase_data(),
738 InnerPointerAccess::kMayBeInnerPointer, InstructionSource()));
739
740 // StoreNativeField(Slot::PointerBase_data(), view, v1, kNoStoreBarrier,
741 // kInitalizing)
742 store = builder.AddInstruction(new StoreFieldInstr(
743 Slot::PointerBase_data(), new (zone) Value(view), new (zone) Value(v1),
744 kNoStoreBarrier, InnerPointerAccess::kMayBeInnerPointer,
745 InstructionSource(), StoreFieldInstr::Kind::kInitializing));
746
747 // return view
748 ret = builder.AddInstruction(new DartReturnInstr(
749 InstructionSource(), new Value(view), S.GetNextDeoptId()));
750 }
751 H.FinishGraph();
752
753 DominatorBasedCSE::Optimize(H.flow_graph());
754 {
755 Instruction* alloc_array = nullptr;
756 Instruction* alloc_view = nullptr;
757 Instruction* lf = nullptr;
758 Instruction* sf = nullptr;
759 Instruction* r = nullptr;
760 ILMatcher cursor(H.flow_graph(), b1, true);
761 RELEASE_ASSERT(cursor.TryMatch({
762 kMatchAndMoveFunctionEntry,
763 {kMatchAndMoveAllocateTypedData, &alloc_array},
764 {kMatchAndMoveAllocateObject, &alloc_view},
765 {kMatchAndMoveLoadField, &lf},
766 {kMatchAndMoveStoreField, &sf},
767 {kMatchDartReturn, &r},
768 }));
769 EXPECT(array == alloc_array);
770 EXPECT(view == alloc_view);
771 EXPECT(v1 == lf);
772 EXPECT(store == sf);
773 EXPECT(ret == r);
774 }
775}
776
777// This test verifies that we correctly alias load/stores into typed array
778// which use different element sizes. This is a regression test for
779// a fix in 836c04f.
780ISOLATE_UNIT_TEST_CASE(LoadOptimizer_TypedArrayViewAliasing) {
781 const char* script_chars = R"(
782 import 'dart:typed_data';
783
784 class View {
785 final Float64List data;
786 View(this.data);
787 }
788 )";
789 const Library& lib =
790 Library::Handle(LoadTestScript(script_chars, NoopNativeLookup));
791
792 const Class& view_cls = Class::ZoneHandle(
793 lib.LookupClass(String::Handle(Symbols::New(thread, "View"))));
794 const Error& err = Error::Handle(view_cls.EnsureIsFinalized(thread));
795 EXPECT(err.IsNull());
796
797 const Field& original_field = Field::Handle(
798 view_cls.LookupField(String::Handle(Symbols::New(thread, "data"))));
799 EXPECT(!original_field.IsNull());
800 const Field& field = Field::Handle(original_field.CloneFromOriginal());
801
803 CompilerState S(thread, /*is_aot=*/false, /*is_optimizing=*/true);
805
806 auto b1 = H.flow_graph()->graph_entry()->normal_entry();
807
809 DartReturnInstr* ret;
810
811 {
812 BlockBuilder builder(H.flow_graph(), b1);
813 // array <- AllocateTypedData(1)
814 const auto array = builder.AddDefinition(new AllocateTypedDataInstr(
815 InstructionSource(), kTypedDataFloat64ArrayCid,
816 new Value(H.IntConstant(1)), DeoptId::kNone));
817 // view <- AllocateObject(View)
818 const auto view = builder.AddDefinition(
819 new AllocateObjectInstr(InstructionSource(), view_cls, DeoptId::kNone));
820 // StoreField(view.data = array)
821 builder.AddInstruction(new StoreFieldInstr(
822 field, new Value(view), new Value(array),
823 StoreBarrierType::kNoStoreBarrier, InstructionSource(),
824 &H.flow_graph()->parsed_function()));
825 // StoreIndexed(array <float64>, 0, 1.0)
826 builder.AddInstruction(new StoreIndexedInstr(
827 new Value(array), new Value(H.IntConstant(0)),
828 new Value(H.DoubleConstant(1.0)), StoreBarrierType::kNoStoreBarrier,
829 /*index_unboxed=*/false,
830 /*index_scale=*/Instance::ElementSizeFor(kTypedDataFloat64ArrayCid),
831 kTypedDataFloat64ArrayCid, AlignmentType::kAlignedAccess,
832 DeoptId::kNone, InstructionSource()));
833 // array_alias <- LoadField(view.data)
834 const auto array_alias = builder.AddDefinition(new LoadFieldInstr(
835 new Value(view), Slot::Get(field, &H.flow_graph()->parsed_function()),
837 // StoreIndexed(array_alias <float32>, 1, 2.0)
838 builder.AddInstruction(new StoreIndexedInstr(
839 new Value(array_alias), new Value(H.IntConstant(1)),
840 new Value(H.DoubleConstant(2.0)), StoreBarrierType::kNoStoreBarrier,
841 /*index_unboxed=*/false,
842 /*index_scale=*/Instance::ElementSizeFor(kTypedDataFloat32ArrayCid),
843 kTypedDataFloat32ArrayCid, AlignmentType::kAlignedAccess,
844 DeoptId::kNone, InstructionSource()));
845 // load <- LoadIndexed(array <float64>, 0)
846 load = builder.AddDefinition(new LoadIndexedInstr(
847 new Value(array), new Value(H.IntConstant(0)), /*index_unboxed=*/false,
848 /*index_scale=*/Instance::ElementSizeFor(kTypedDataFloat64ArrayCid),
849 kTypedDataFloat64ArrayCid, AlignmentType::kAlignedAccess,
850 DeoptId::kNone, InstructionSource()));
851 // Return(load)
852 ret = builder.AddReturn(new Value(load));
853 }
854 H.FinishGraph();
855 DominatorBasedCSE::Optimize(H.flow_graph());
856
857 // Check that we do not forward the load in question.
858 EXPECT_PROPERTY(ret, it.value()->definition() == load);
859}
860
861static void CountLoadsStores(FlowGraph* flow_graph,
862 intptr_t* loads,
863 intptr_t* stores) {
864 for (BlockIterator block_it = flow_graph->reverse_postorder_iterator();
865 !block_it.Done(); block_it.Advance()) {
866 for (ForwardInstructionIterator it(block_it.Current()); !it.Done();
867 it.Advance()) {
868 if (it.Current()->IsLoadField()) {
869 (*loads)++;
870 } else if (it.Current()->IsStoreField()) {
871 (*stores)++;
872 }
873 }
874 }
875}
876
877ISOLATE_UNIT_TEST_CASE(LoadOptimizer_RedundantStoresAndLoads) {
878 const char* kScript = R"(
879 class Bar {
880 Bar() { a = null; }
881 dynamic a;
882 }
883
884 Bar foo() {
885 Bar bar = new Bar();
886 bar.a = null;
887 bar.a = bar;
888 bar.a = bar.a;
889 return bar.a;
890 }
891
892 main() {
893 foo();
894 }
895 )";
896
897 const auto& root_library = Library::Handle(LoadTestScript(kScript));
898 Invoke(root_library, "main");
899 const auto& function = Function::Handle(GetFunction(root_library, "foo"));
900 TestPipeline pipeline(function, CompilerPass::kJIT);
901 FlowGraph* flow_graph = pipeline.RunPasses({
902 CompilerPass::kComputeSSA,
903 CompilerPass::kTypePropagation,
904 CompilerPass::kApplyICData,
905 CompilerPass::kInlining,
906 CompilerPass::kTypePropagation,
907 CompilerPass::kSelectRepresentations,
908 CompilerPass::kCanonicalize,
909 CompilerPass::kConstantPropagation,
910 });
911
912 ASSERT(flow_graph != nullptr);
913
914 // Before CSE, we have 2 loads and 4 stores.
915 intptr_t bef_loads = 0;
916 intptr_t bef_stores = 0;
917 CountLoadsStores(flow_graph, &bef_loads, &bef_stores);
918 EXPECT_EQ(2, bef_loads);
919 EXPECT_EQ(4, bef_stores);
920
921 DominatorBasedCSE::Optimize(flow_graph);
922
923 // After CSE, no load and only one store remains.
924 intptr_t aft_loads = 0;
925 intptr_t aft_stores = 0;
926 CountLoadsStores(flow_graph, &aft_loads, &aft_stores);
927 EXPECT_EQ(0, aft_loads);
928 EXPECT_EQ(1, aft_stores);
929}
930
931ISOLATE_UNIT_TEST_CASE(LoadOptimizer_RedundantStaticFieldInitialization) {
932 const char* kScript = R"(
933 int getX() => 2;
934 int x = getX();
935
936 foo() => x + x;
937
938 main() {
939 foo();
940 }
941 )";
942
943 const auto& root_library = Library::Handle(LoadTestScript(kScript));
944 Invoke(root_library, "main");
945 const auto& function = Function::Handle(GetFunction(root_library, "foo"));
946 TestPipeline pipeline(function, CompilerPass::kJIT);
947 FlowGraph* flow_graph = pipeline.RunPasses({});
948 ASSERT(flow_graph != nullptr);
949
950 auto entry = flow_graph->graph_entry()->normal_entry();
951 EXPECT(entry != nullptr);
952
953 ILMatcher cursor(flow_graph, entry);
954 RELEASE_ASSERT(cursor.TryMatch({
955 kMatchAndMoveFunctionEntry,
956 kMatchAndMoveCheckStackOverflow,
957 kMatchAndMoveLoadStaticField,
958 kMoveParallelMoves,
959 kMatchAndMoveCheckSmi,
960 kMoveParallelMoves,
961 kMatchAndMoveBinarySmiOp,
962 kMoveParallelMoves,
963 kMatchDartReturn,
964 }));
965}
966
967ISOLATE_UNIT_TEST_CASE(LoadOptimizer_RedundantInitializerCallAfterIf) {
968 const char* kScript = R"(
969 int x = int.parse('1');
970
971 @pragma('vm:never-inline')
972 use(int arg) {}
973
974 foo(bool condition) {
975 if (condition) {
976 x = 3;
977 } else {
978 use(x);
979 }
980 use(x);
981 }
982
983 main() {
984 foo(true);
985 }
986 )";
987
988 const auto& root_library = Library::Handle(LoadTestScript(kScript));
989 Invoke(root_library, "main");
990 const auto& function = Function::Handle(GetFunction(root_library, "foo"));
991 TestPipeline pipeline(function, CompilerPass::kJIT);
992 FlowGraph* flow_graph = pipeline.RunPasses({});
993 ASSERT(flow_graph != nullptr);
994
995 auto entry = flow_graph->graph_entry()->normal_entry();
996 EXPECT(entry != nullptr);
997
998 LoadStaticFieldInstr* load_static_after_if = nullptr;
999
1000 ILMatcher cursor(flow_graph, entry);
1001 RELEASE_ASSERT(cursor.TryMatch({
1002 kMoveGlob,
1003 kMatchAndMoveBranchTrue,
1004 kMoveGlob,
1005 kMatchAndMoveGoto,
1006 kMatchAndMoveJoinEntry,
1007 kMoveParallelMoves,
1008 {kMatchAndMoveLoadStaticField, &load_static_after_if},
1009 kMoveGlob,
1010 kMatchDartReturn,
1011 }));
1012 EXPECT(!load_static_after_if->calls_initializer());
1013}
1014
1015ISOLATE_UNIT_TEST_CASE(LoadOptimizer_RedundantInitializerCallInLoop) {
1016 if (!TestCase::IsNNBD()) {
1017 return;
1018 }
1019
1020 const char* kScript = R"(
1021 class A {
1022 late int x = int.parse('1');
1023 A? next;
1024 }
1025
1026 @pragma('vm:never-inline')
1027 use(int arg) {}
1028
1029 foo(A obj) {
1030 use(obj.x);
1031 for (;;) {
1032 use(obj.x);
1033 final next = obj.next;
1034 if (next == null) {
1035 break;
1036 }
1037 obj = next;
1038 use(obj.x);
1039 }
1040 }
1041
1042 main() {
1043 foo(A()..next = A());
1044 }
1045 )";
1046
1047 const auto& root_library = Library::Handle(LoadTestScript(kScript));
1048 Invoke(root_library, "main");
1049 const auto& function = Function::Handle(GetFunction(root_library, "foo"));
1050 TestPipeline pipeline(function, CompilerPass::kJIT);
1051 FlowGraph* flow_graph = pipeline.RunPasses({});
1052 ASSERT(flow_graph != nullptr);
1053
1054 auto entry = flow_graph->graph_entry()->normal_entry();
1055 EXPECT(entry != nullptr);
1056
1057 LoadFieldInstr* load_field_before_loop = nullptr;
1058 LoadFieldInstr* load_field_in_loop1 = nullptr;
1059 LoadFieldInstr* load_field_in_loop2 = nullptr;
1060
1061 ILMatcher cursor(flow_graph, entry);
1062 RELEASE_ASSERT(cursor.TryMatch({
1063 kMoveGlob,
1064 {kMatchAndMoveLoadField, &load_field_before_loop},
1065 kMoveGlob,
1066 kMatchAndMoveGoto,
1067 kMatchAndMoveJoinEntry,
1068 kMoveGlob,
1069 {kMatchAndMoveLoadField, &load_field_in_loop1},
1070 kMoveGlob,
1072 kMoveGlob,
1073 {kMatchAndMoveLoadField, &load_field_in_loop2},
1074 }));
1075
1076 EXPECT(load_field_before_loop->calls_initializer());
1077 EXPECT(!load_field_in_loop1->calls_initializer());
1078 EXPECT(load_field_in_loop2->calls_initializer());
1079}
1080
1081#if !defined(TARGET_ARCH_IA32)
1082
1083ISOLATE_UNIT_TEST_CASE(LoadOptimizer_RedundantInitializingStoreAOT) {
1084 const char* kScript = R"(
1085class Vec3 {
1086 final double x, y, z;
1087
1088 @pragma('vm:prefer-inline')
1089 const Vec3(this.x, this.y, this.z);
1090
1091 @override
1092 @pragma('vm:prefer-inline')
1093 String toString() => _vec3ToString(x, y, z);
1094}
1095
1096@pragma('vm:never-inline')
1097String _vec3ToString(double x, double y, double z) => '';
1098
1099// Boxed storage for Vec3.
1100// Fields are unboxed.
1101class Vec3Mut {
1102 double _x = 0.0;
1103 double _y = 0.0;
1104 double _z = 0.0;
1105
1106 Vec3Mut(Vec3 v)
1107 : _x = v.x,
1108 _y = v.y,
1109 _z = v.z;
1110
1111 @override
1112 String toString() => _vec3ToString(_x, _y, _z);
1113
1114 @pragma('vm:prefer-inline')
1115 set vec(Vec3 v) {
1116 _x = v.x;
1117 _y = v.y;
1118 _z = v.z;
1119 }
1120}
1121
1122Vec3Mut main() {
1123 final a = Vec3(3, 4, 5);
1124 final b = Vec3(8, 9, 10);
1125 final c = Vec3(18, 19, 20);
1126 final d = Vec3(180, 190, 200);
1127 final e = Vec3(1800, 1900, 2000);
1128 final v = Vec3Mut(a);
1129 v.vec = b;
1130 v.vec = c;
1131 v.vec = d;
1132 v.vec = e;
1133 return v;
1134}
1135 )";
1136
1137 const auto& root_library = Library::Handle(LoadTestScript(kScript));
1138 const auto& function = Function::Handle(GetFunction(root_library, "main"));
1139
1140 TestPipeline pipeline(function, CompilerPass::kAOT);
1141 FlowGraph* flow_graph = pipeline.RunPasses({});
1142 auto entry = flow_graph->graph_entry()->normal_entry();
1143
1144 AllocateObjectInstr* allocate;
1145 StoreFieldInstr* store1;
1146 StoreFieldInstr* store2;
1147 StoreFieldInstr* store3;
1148
1149 ILMatcher cursor(flow_graph, entry, true, ParallelMovesHandling::kSkip);
1150 RELEASE_ASSERT(cursor.TryMatch({
1151 kMoveGlob,
1152 {kMatchAndMoveAllocateObject, &allocate},
1153 {kMatchAndMoveStoreField, &store1},
1154 {kMatchAndMoveStoreField, &store2},
1155 {kMatchAndMoveStoreField, &store3},
1156 kMatchDartReturn,
1157 }));
1158
1159 EXPECT(store1->instance()->definition() == allocate);
1160 EXPECT(store2->instance()->definition() == allocate);
1161 EXPECT(store3->instance()->definition() == allocate);
1162}
1163
1164ISOLATE_UNIT_TEST_CASE(LoadOptimizer_RedundantStoreAOT) {
1165 const char* kScript = R"(
1166class Foo {
1167 int x = -1;
1168
1169 toString() => "Foo x: $x";
1170}
1171
1172class Bar {}
1173
1174main() {
1175 final foo = Foo();
1176 foo.x = 11;
1177 new Bar();
1178 foo.x = 12;
1179 new Bar();
1180 foo.x = 13;
1181 return foo;
1182}
1183 )";
1184
1185 const auto& root_library = Library::Handle(LoadTestScript(kScript));
1186 Invoke(root_library, "main");
1187 const auto& function = Function::Handle(GetFunction(root_library, "main"));
1188 TestPipeline pipeline(function, CompilerPass::kAOT);
1189 FlowGraph* flow_graph = pipeline.RunPasses({});
1190 auto entry = flow_graph->graph_entry()->normal_entry();
1191
1192 AllocateObjectInstr* allocate;
1193 StoreFieldInstr* store1;
1194
1195 ILMatcher cursor(flow_graph, entry, true, ParallelMovesHandling::kSkip);
1196 RELEASE_ASSERT(cursor.TryMatch({
1197 kMoveGlob,
1198 {kMatchAndMoveAllocateObject, &allocate},
1199 {kMatchAndMoveStoreField, &store1}, // initializing store
1200 kMatchDartReturn,
1201 }));
1202
1203 EXPECT(store1->instance()->definition() == allocate);
1204}
1205
1206#endif // !defined(TARGET_ARCH_IA32)
1207
1208ISOLATE_UNIT_TEST_CASE(AllocationSinking_Arrays) {
1209 const char* kScript = R"(
1210import 'dart:typed_data';
1211
1212class Vector2 {
1213 final Float64List _v2storage;
1214
1215 @pragma('vm:prefer-inline')
1216 Vector2.zero() : _v2storage = Float64List(2);
1217
1218 @pragma('vm:prefer-inline')
1219 factory Vector2(double x, double y) => Vector2.zero()..setValues(x, y);
1220
1221 @pragma('vm:prefer-inline')
1222 factory Vector2.copy(Vector2 other) => Vector2.zero()..setFrom(other);
1223
1224 @pragma('vm:prefer-inline')
1225 Vector2 clone() => Vector2.copy(this);
1226
1227 @pragma('vm:prefer-inline')
1228 void setValues(double x_, double y_) {
1229 _v2storage[0] = x_;
1230 _v2storage[1] = y_;
1231 }
1232
1233 @pragma('vm:prefer-inline')
1234 void setFrom(Vector2 other) {
1235 final otherStorage = other._v2storage;
1236 _v2storage[1] = otherStorage[1];
1237 _v2storage[0] = otherStorage[0];
1238 }
1239
1240 @pragma('vm:prefer-inline')
1241 Vector2 operator +(Vector2 other) => clone()..add(other);
1242
1243 @pragma('vm:prefer-inline')
1244 void add(Vector2 arg) {
1245 final argStorage = arg._v2storage;
1246 _v2storage[0] = _v2storage[0] + argStorage[0];
1247 _v2storage[1] = _v2storage[1] + argStorage[1];
1248 }
1249
1250 @pragma('vm:prefer-inline')
1251 double get x => _v2storage[0];
1252
1253 @pragma('vm:prefer-inline')
1254 double get y => _v2storage[1];
1255}
1256
1257@pragma('vm:never-inline')
1258String foo(double x) {
1259 // All allocations in this function are eliminated by the compiler,
1260 // except array allocation for string interpolation at the end.
1261 List v1 = List.filled(2, null);
1262 v1[0] = 1;
1263 v1[1] = 'hi';
1264 Vector2 v2 = new Vector2(1.0, 2.0);
1265 Vector2 v3 = v2 + Vector2(x, x);
1266 double sum = v3.x + v3.y;
1267 return "v1: [${v1[0]},${v1[1]}], v2: [${v2.x},${v2.y}], v3: [${v3.x},${v3.y}], sum: $sum";
1268}
1269
1270main() {
1271 foo(42.0);
1272}
1273 )";
1274
1275 const auto& root_library = Library::Handle(LoadTestScript(kScript));
1276 Invoke(root_library, "main");
1277 const auto& function = Function::Handle(GetFunction(root_library, "foo"));
1278 TestPipeline pipeline(function, CompilerPass::kJIT);
1279 FlowGraph* flow_graph = pipeline.RunPasses({});
1280 ASSERT(flow_graph != nullptr);
1281
1282 auto entry = flow_graph->graph_entry()->normal_entry();
1283 EXPECT(entry != nullptr);
1284
1285 /* Flow graph to match:
1286
1287 4: CheckStackOverflow:8(stack=0, loop=0)
1288 5: ParallelMove rax <- S+2
1289 6: CheckClass:14(v2 Cids[1: _Double@0150898 etc. cid 62] nullcheck)
1290 8: v312 <- Unbox:14(v2 T{_Double}) T{_Double}
1291 10: ParallelMove xmm1 <- C
1292 10: v221 <- BinaryDoubleOp:22(+, v341, v312) T{_Double}
1293 11: ParallelMove DS-7 <- xmm1
1294 12: ParallelMove xmm2 <- C
1295 12: v227 <- BinaryDoubleOp:34(+, v342, v312) T{_Double}
1296 13: ParallelMove DS-6 <- xmm2
1297 14: v333 <- Box(v221) T{_Double}
1298 15: ParallelMove S-4 <- rax
1299 16: v334 <- Box(v227) T{_Double}
1300 17: ParallelMove S-3 <- rcx
1301 18: ParallelMove xmm0 <- xmm1
1302 18: v15 <- BinaryDoubleOp:28(+, v221, v227) T{_Double}
1303 19: ParallelMove rbx <- C, r10 <- C, DS-5 <- xmm0
1304 20: v17 <- CreateArray:30(v0, v16) T{_List}
1305 21: ParallelMove rcx <- rax
1306 22: StoreIndexed(v17, v5, v18, NoStoreBarrier)
1307 24: StoreIndexed(v17, v6, v6, NoStoreBarrier)
1308 26: StoreIndexed(v17, v3, v20, NoStoreBarrier)
1309 28: StoreIndexed(v17, v21, v7, NoStoreBarrier)
1310 30: StoreIndexed(v17, v23, v24, NoStoreBarrier)
1311 32: StoreIndexed(v17, v25, v8, NoStoreBarrier)
1312 34: StoreIndexed(v17, v27, v20, NoStoreBarrier)
1313 36: StoreIndexed(v17, v28, v9, NoStoreBarrier)
1314 38: StoreIndexed(v17, v30, v31, NoStoreBarrier)
1315 39: ParallelMove rax <- S-4
1316 40: StoreIndexed(v17, v32, v333, NoStoreBarrier)
1317 42: StoreIndexed(v17, v34, v20, NoStoreBarrier)
1318 43: ParallelMove rax <- S-3
1319 44: StoreIndexed(v17, v35, v334, NoStoreBarrier)
1320 46: StoreIndexed(v17, v37, v38, NoStoreBarrier)
1321 47: ParallelMove xmm0 <- DS-5
1322 48: v335 <- Box(v15) T{_Double}
1323 49: ParallelMove rdx <- rcx, rax <- rax
1324 50: StoreIndexed(v17, v39, v335)
1325 52: MoveArgument(v17)
1326 54: v40 <- StaticCall:44( _interpolate@0150898<0> v17,
1327 recognized_kind = StringBaseInterpolate) T{String?}
1328 56: Return:48(v40)
1329*/
1330
1331 CreateArrayInstr* create_array = nullptr;
1332 StaticCallInstr* string_interpolate = nullptr;
1333
1334 ILMatcher cursor(flow_graph, entry, /*trace=*/true,
1335 ParallelMovesHandling::kSkip);
1336 RELEASE_ASSERT(cursor.TryMatch({
1337 kMatchAndMoveFunctionEntry,
1338 kMatchAndMoveCheckStackOverflow,
1339 }));
1340 RELEASE_ASSERT(cursor.TryMatch({
1341 kMatchAndMoveUnbox,
1342 kMatchAndMoveBinaryDoubleOp,
1343 kMatchAndMoveBinaryDoubleOp,
1344 kMatchAndMoveBox,
1345 kMatchAndMoveBox,
1346 kMatchAndMoveBinaryDoubleOp,
1347 {kMatchAndMoveCreateArray, &create_array},
1348 kMatchAndMoveStoreIndexed,
1349 kMatchAndMoveStoreIndexed,
1350 kMatchAndMoveStoreIndexed,
1351 kMatchAndMoveStoreIndexed,
1352 kMatchAndMoveStoreIndexed,
1353 kMatchAndMoveStoreIndexed,
1354 kMatchAndMoveStoreIndexed,
1355 kMatchAndMoveStoreIndexed,
1356 kMatchAndMoveStoreIndexed,
1357 kMatchAndMoveStoreIndexed,
1358 kMatchAndMoveStoreIndexed,
1359 kMatchAndMoveStoreIndexed,
1360 kMatchAndMoveStoreIndexed,
1361 kMatchAndMoveBox,
1362 kMatchAndMoveStoreIndexed,
1363 kMatchAndMoveMoveArgument,
1364 {kMatchAndMoveStaticCall, &string_interpolate},
1365 kMatchDartReturn,
1366 }));
1367
1368 EXPECT(string_interpolate->ArgumentAt(0) == create_array);
1369}
1370
1371ISOLATE_UNIT_TEST_CASE(AllocationSinking_Records) {
1372 const char* kScript = R"(
1373
1374@pragma('vm:prefer-inline')
1375({int field1, String field2}) getRecord(int x, String y) =>
1376 (field1: x, field2: y);
1377
1378@pragma('vm:never-inline')
1379String foo(int x, String y) {
1380 // All allocations in this function are eliminated by the compiler,
1381 // except array allocation for string interpolation at the end.
1382 (int, bool) r1 = (x, true);
1383 final r2 = getRecord(x, y);
1384 int sum = r1.$1 + r2.field1;
1385 return "r1: (${r1.$1}, ${r1.$2}), "
1386 "r2: (field1: ${r2.field1}, field2: ${r2.field2}), sum: $sum";
1387}
1388
1389int count = 0;
1390main() {
1391 // Deoptimize on the 2nd run.
1392 return foo(count++ == 0 ? 42 : 9223372036854775807, 'hey');
1393}
1394 )";
1395
1396 const auto& root_library = Library::Handle(LoadTestScript(kScript));
1397 const auto& result1 = Object::Handle(Invoke(root_library, "main"));
1398 EXPECT(result1.IsString());
1399 EXPECT_STREQ(result1.ToCString(),
1400 "r1: (42, true), r2: (field1: 42, field2: hey), sum: 84");
1401 const auto& function = Function::Handle(GetFunction(root_library, "foo"));
1402 TestPipeline pipeline(function, CompilerPass::kJIT);
1403 FlowGraph* flow_graph = pipeline.RunPasses({});
1404 ASSERT(flow_graph != nullptr);
1405
1406 auto entry = flow_graph->graph_entry()->normal_entry();
1407 EXPECT(entry != nullptr);
1408
1409 /* Flow graph to match:
1410
1411 2: B1[function entry]:2 {
1412 v2 <- Parameter(0) [-9223372036854775808, 9223372036854775807] T{int}
1413 v3 <- Parameter(1) T{String}
1414}
1415 4: CheckStackOverflow:8(stack=0, loop=0)
1416 5: ParallelMove rax <- S+3
1417 6: CheckSmi:16(v2)
1418 8: ParallelMove rcx <- rax
1419 8: v9 <- BinarySmiOp:16(+, v2 T{_Smi}, v2 T{_Smi}) [-4611686018427387904, 4611686018427387903] T{_Smi}
1420 9: ParallelMove rbx <- C, r10 <- C, S-3 <- rcx
1421 10: v11 <- CreateArray:18(v0, v10) T{_List}
1422 11: ParallelMove rax <- rax
1423 12: StoreIndexed(v11, v12, v13, NoStoreBarrier)
1424 13: ParallelMove rcx <- S+3
1425 14: StoreIndexed(v11, v14, v2 T{_Smi}, NoStoreBarrier)
1426 16: StoreIndexed(v11, v16, v17, NoStoreBarrier)
1427 18: StoreIndexed(v11, v18, v5, NoStoreBarrier)
1428 20: StoreIndexed(v11, v20, v21, NoStoreBarrier)
1429 22: StoreIndexed(v11, v22, v2 T{_Smi}, NoStoreBarrier)
1430 24: StoreIndexed(v11, v24, v25, NoStoreBarrier)
1431 25: ParallelMove rcx <- S+2
1432 26: StoreIndexed(v11, v26, v3, NoStoreBarrier)
1433 28: StoreIndexed(v11, v28, v29, NoStoreBarrier)
1434 29: ParallelMove rcx <- S-3
1435 30: StoreIndexed(v11, v30, v9, NoStoreBarrier)
1436 32: MoveArgument(v11)
1437 34: v31 <- StaticCall:20( _interpolate@0150898<0> v11, recognized_kind = StringBaseInterpolate) T{String}
1438 35: ParallelMove rax <- rax
1439 36: Return:24(v31)
1440*/
1441
1442 ILMatcher cursor(flow_graph, entry, /*trace=*/true,
1443 ParallelMovesHandling::kSkip);
1444 RELEASE_ASSERT(cursor.TryMatch({
1445 kMatchAndMoveFunctionEntry,
1446 kMatchAndMoveCheckStackOverflow,
1447 kMatchAndMoveCheckSmi,
1448 kMatchAndMoveBinarySmiOp,
1449 kMatchAndMoveCreateArray,
1450 kMatchAndMoveStoreIndexed,
1451 kMatchAndMoveStoreIndexed,
1452 kMatchAndMoveStoreIndexed,
1453 kMatchAndMoveStoreIndexed,
1454 kMatchAndMoveStoreIndexed,
1455 kMatchAndMoveStoreIndexed,
1456 kMatchAndMoveStoreIndexed,
1457 kMatchAndMoveStoreIndexed,
1458 kMatchAndMoveStoreIndexed,
1459 kMatchAndMoveStoreIndexed,
1460 kMatchAndMoveMoveArgument,
1461 kMatchAndMoveStaticCall,
1462 kMatchDartReturn,
1463 }));
1464
1465 Compiler::CompileOptimizedFunction(thread, function);
1466 const auto& result2 = Object::Handle(Invoke(root_library, "main"));
1467 EXPECT(result2.IsString());
1468 EXPECT_STREQ(result2.ToCString(),
1469 "r1: (9223372036854775807, true), r2: (field1: "
1470 "9223372036854775807, field2: hey), sum: -2");
1471}
1472
1473#if !defined(TARGET_ARCH_IA32)
1474
1475ISOLATE_UNIT_TEST_CASE(DelayAllocations_DelayAcrossCalls) {
1476 const char* kScript = R"(
1477 class A {
1478 dynamic x, y;
1479 A(this.x, this.y);
1480 }
1481
1482 int count = 0;
1483
1484 @pragma("vm:never-inline")
1485 dynamic foo(int i) => count++ < 2 ? i : '$i';
1486
1487 @pragma("vm:never-inline")
1488 dynamic use(v) {}
1489
1490 void test() {
1491 A a = new A(foo(1), foo(2));
1492 use(a);
1493 }
1494 )";
1495
1496 const auto& root_library = Library::Handle(LoadTestScript(kScript));
1497 const auto& function = Function::Handle(GetFunction(root_library, "test"));
1498
1499 // Get fields to kDynamicCid guard
1500 Invoke(root_library, "test");
1501 Invoke(root_library, "test");
1502
1503 TestPipeline pipeline(function, CompilerPass::kAOT);
1504 FlowGraph* flow_graph = pipeline.RunPasses({});
1505 auto entry = flow_graph->graph_entry()->normal_entry();
1506
1507 StaticCallInstr* call1;
1508 StaticCallInstr* call2;
1509 AllocateObjectInstr* allocate;
1510 StoreFieldInstr* store1;
1511 StoreFieldInstr* store2;
1512
1513 ILMatcher cursor(flow_graph, entry, true, ParallelMovesHandling::kSkip);
1514 RELEASE_ASSERT(cursor.TryMatch({
1515 kMoveGlob,
1516 {kMatchAndMoveStaticCall, &call1},
1517 kMoveGlob,
1518 {kMatchAndMoveStaticCall, &call2},
1519 kMoveGlob,
1520 {kMatchAndMoveAllocateObject, &allocate},
1521 {kMatchAndMoveStoreField, &store1},
1522 {kMatchAndMoveStoreField, &store2},
1523 }));
1524
1525 EXPECT(strcmp(call1->function().UserVisibleNameCString(), "foo") == 0);
1526 EXPECT(strcmp(call2->function().UserVisibleNameCString(), "foo") == 0);
1527 EXPECT(store1->instance()->definition() == allocate);
1528 EXPECT(!store1->ShouldEmitStoreBarrier());
1529 EXPECT(store2->instance()->definition() == allocate);
1530 EXPECT(!store2->ShouldEmitStoreBarrier());
1531}
1532
1533ISOLATE_UNIT_TEST_CASE(DelayAllocations_DontDelayIntoLoop) {
1534 const char* kScript = R"(
1535 void test() {
1536 Object o = new Object();
1537 for (int i = 0; i < 10; i++) {
1538 use(o);
1539 }
1540 }
1541
1542 @pragma('vm:never-inline')
1543 void use(Object o) {
1544 print(o.hashCode);
1545 }
1546 )";
1547
1548 const auto& root_library = Library::Handle(LoadTestScript(kScript));
1549 const auto& function = Function::Handle(GetFunction(root_library, "test"));
1550
1551 TestPipeline pipeline(function, CompilerPass::kAOT);
1552 FlowGraph* flow_graph = pipeline.RunPasses({});
1553 auto entry = flow_graph->graph_entry()->normal_entry();
1554
1555 AllocateObjectInstr* allocate;
1556 StaticCallInstr* call;
1557
1558 ILMatcher cursor(flow_graph, entry, true, ParallelMovesHandling::kSkip);
1559 RELEASE_ASSERT(cursor.TryMatch({
1560 kMoveGlob,
1561 {kMatchAndMoveAllocateObject, &allocate},
1562 kMoveGlob,
1564 kMoveGlob,
1565 {kMatchAndMoveStaticCall, &call},
1566 }));
1567
1568 EXPECT(strcmp(call->function().UserVisibleNameCString(), "use") == 0);
1569 EXPECT(call->Receiver()->definition() == allocate);
1570}
1571
1572ISOLATE_UNIT_TEST_CASE(CheckStackOverflowElimination_NoInterruptsPragma) {
1573 const char* kScript = R"(
1574 @pragma('vm:prefer-inline')
1575 int bar(int n) {
1576 print(''); // Side-effectful operation
1577 var sum = 0;
1578 for (int i = 0; i < n; i++) {
1579 sum += i;
1580 }
1581 return sum;
1582 }
1583
1584 @pragma('vm:unsafe:no-interrupts')
1585 int test() {
1586 int result = 0;
1587 for (int i = 0; i < 10; i++) {
1588 result ^= bar(i);
1589 }
1590 return result;
1591 }
1592 )";
1593
1594 const auto& root_library = Library::Handle(LoadTestScript(kScript));
1595 const auto& function = Function::Handle(GetFunction(root_library, "test"));
1596
1597 TestPipeline pipeline(function, CompilerPass::kAOT);
1598 auto flow_graph = pipeline.RunPasses({});
1599 for (auto block : flow_graph->postorder()) {
1600 for (auto instr : block->instructions()) {
1601 EXPECT_PROPERTY(instr, !it.IsCheckStackOverflow());
1602 }
1603 }
1604}
1605
1606ISOLATE_UNIT_TEST_CASE(BoundsCheckElimination_Pragma) {
1607 const char* kScript = R"(
1608 import 'dart:typed_data';
1609
1610 @pragma('vm:unsafe:no-bounds-checks')
1611 int test(Uint8List list) {
1612 int result = 0;
1613 for (int i = 0; i < 10; i++) {
1614 result = list[i];
1615 }
1616 return result;
1617 }
1618 )";
1619
1620 const auto& root_library = Library::Handle(LoadTestScript(kScript));
1621 const auto& function = Function::Handle(GetFunction(root_library, "test"));
1622
1623 TestPipeline pipeline(function, CompilerPass::kAOT);
1624 auto flow_graph = pipeline.RunPasses({});
1625 for (auto block : flow_graph->postorder()) {
1626 for (auto instr : block->instructions()) {
1627 EXPECT_PROPERTY(instr, !it.IsCheckBoundBase());
1628 }
1629 }
1630}
1631
1632// This test checks that CSE unwraps redefinitions when comparing all
1633// instructions except loads, which are handled specially.
1634ISOLATE_UNIT_TEST_CASE(CSE_Redefinitions) {
1635 const char* script_chars = R"(
1636 @pragma("vm:external-name", "BlackholeNative")
1637 external dynamic blackhole([a, b, c, d, e, f]);
1638 class K<T> {
1639 final T field;
1640 K(this.field);
1641 }
1642 )";
1643 const Library& lib =
1644 Library::Handle(LoadTestScript(script_chars, NoopNativeLookup));
1645
1646 const Class& cls = Class::ZoneHandle(
1647 lib.LookupClass(String::Handle(Symbols::New(thread, "K"))));
1648 const Error& err = Error::Handle(cls.EnsureIsFinalized(thread));
1649 EXPECT(err.IsNull());
1650
1651 const Field& original_field = Field::Handle(
1652 cls.LookupField(String::Handle(Symbols::New(thread, "field"))));
1653 EXPECT(!original_field.IsNull());
1654 const Field& field = Field::Handle(original_field.CloneFromOriginal());
1655
1656 const Function& blackhole =
1657 Function::ZoneHandle(GetFunction(lib, "blackhole"));
1658
1660 CompilerState S(thread, /*is_aot=*/false, /*is_optimizing=*/true);
1661 FlowGraphBuilderHelper H(/*num_parameters=*/2);
1662 H.AddVariable("v0", AbstractType::ZoneHandle(Type::DynamicType()));
1663 H.AddVariable("v1", AbstractType::ZoneHandle(Type::DynamicType()));
1664
1665 auto b1 = H.flow_graph()->graph_entry()->normal_entry();
1666
1667 BoxInstr* box0;
1668 BoxInstr* box1;
1669 LoadFieldInstr* load0;
1670 LoadFieldInstr* load1;
1671 LoadFieldInstr* load2;
1672 StaticCallInstr* call;
1673 DartReturnInstr* ret;
1674
1675 {
1676 BlockBuilder builder(H.flow_graph(), b1);
1677 auto& slot = Slot::Get(field, &H.flow_graph()->parsed_function());
1678 auto param0 = builder.AddParameter(0, kUnboxedDouble);
1679 auto param1 = builder.AddParameter(1, kTagged);
1680 auto redef0 =
1681 builder.AddDefinition(new RedefinitionInstr(new Value(param0)));
1682 auto redef1 =
1683 builder.AddDefinition(new RedefinitionInstr(new Value(param0)));
1684 box0 = builder.AddDefinition(
1685 BoxInstr::Create(kUnboxedDouble, new Value(redef0)));
1686 box1 = builder.AddDefinition(
1687 BoxInstr::Create(kUnboxedDouble, new Value(redef1)));
1688
1689 auto redef2 =
1690 builder.AddDefinition(new RedefinitionInstr(new Value(param1)));
1691 auto redef3 =
1692 builder.AddDefinition(new RedefinitionInstr(new Value(param1)));
1693 load0 = builder.AddDefinition(
1694 new LoadFieldInstr(new Value(redef2), slot, InstructionSource()));
1695 load1 = builder.AddDefinition(
1696 new LoadFieldInstr(new Value(redef3), slot, InstructionSource()));
1697 load2 = builder.AddDefinition(
1698 new LoadFieldInstr(new Value(redef3), slot, InstructionSource()));
1699
1700 InputsArray args(3);
1701 args.Add(new Value(load0));
1702 args.Add(new Value(load1));
1703 args.Add(new Value(load2));
1704 call = builder.AddInstruction(new StaticCallInstr(
1705 InstructionSource(), blackhole, 0, Array::empty_array(),
1706 std::move(args), S.GetNextDeoptId(), 0, ICData::RebindRule::kStatic));
1707
1708 ret = builder.AddReturn(new Value(box1));
1709 }
1710 H.FinishGraph();
1711
1712 // Running CSE without load optimization should eliminate redundant boxing
1713 // but keep loads intact if they don't have exactly matching inputs.
1714 DominatorBasedCSE::Optimize(H.flow_graph(), /*run_load_optimization=*/false);
1715
1716 EXPECT_PROPERTY(box1, it.WasEliminated());
1717 EXPECT_PROPERTY(ret, it.value()->definition() == box0);
1718
1719 EXPECT_PROPERTY(load0, !it.WasEliminated());
1720 EXPECT_PROPERTY(load1, !it.WasEliminated());
1721 EXPECT_PROPERTY(load2, it.WasEliminated());
1722
1723 EXPECT_PROPERTY(call, it.ArgumentAt(0) == load0);
1724 EXPECT_PROPERTY(call, it.ArgumentAt(1) == load1);
1725 EXPECT_PROPERTY(call, it.ArgumentAt(2) == load1);
1726
1727 // Running load optimization pass should remove the second load but
1728 // insert a redefinition to prevent code motion because the field
1729 // has a generic type.
1730 DominatorBasedCSE::Optimize(H.flow_graph(), /*run_load_optimization=*/true);
1731
1732 EXPECT_PROPERTY(load0, !it.WasEliminated());
1733 EXPECT_PROPERTY(load1, it.WasEliminated());
1734 EXPECT_PROPERTY(load2, it.WasEliminated());
1735
1736 EXPECT_PROPERTY(call, it.ArgumentAt(0) == load0);
1737 EXPECT_PROPERTY(call, it.ArgumentAt(1)->IsRedefinition() &&
1738 it.ArgumentAt(1)->OriginalDefinition() == load0);
1739 EXPECT_PROPERTY(call, it.ArgumentAt(2)->IsRedefinition() &&
1740 it.ArgumentAt(2)->OriginalDefinition() == load0);
1741}
1742
1743ISOLATE_UNIT_TEST_CASE(AllocationSinking_NoViewDataMaterialization) {
1744 auto* const kFunctionName = "unalignedUint16";
1745 auto* const kInvokeNoDeoptName = "no_deopt";
1746 auto* const kInvokeDeoptName = "deopt";
1747 auto kScript = Utils::CStringUniquePtr(
1748 OS::SCreate(nullptr, R"(
1749 import 'dart:_internal';
1750 import 'dart:typed_data';
1751
1752 @pragma("vm:never-inline")
1753 void check(int x, int y) {
1754 if (x != y) {
1755 throw "Doesn't match";
1756 }
1757 }
1758
1759 @pragma("vm:never-inline")
1760 bool %s(num x) {
1761 var bytes = new ByteData(64);
1762 if (x is int) {
1763 for (var i = 2; i < 4; i++) {
1764 bytes.setUint16(i, x + 1, Endian.host);
1765 check(x + 1, bytes.getUint16(i, Endian.host));
1766 }
1767 } else {
1768 // Force a garbage collection after deoptimization. In DEBUG mode,
1769 // the scavenger tests that the view's data field was set correctly
1770 // during deoptimization before recomputing it.
1771 VMInternalsForTesting.collectAllGarbage();
1772 }
1773 // Make sure the array is also used on the non-int path.
1774 check(0, bytes.getUint16(0, Endian.host));
1775 return x is int;
1776 }
1777
1778 bool %s() {
1779 return %s(0xABCC);
1780 }
1781
1782 bool %s() {
1783 return %s(1.0);
1784 }
1785 )",
1786 kFunctionName, kInvokeNoDeoptName, kFunctionName,
1787 kInvokeDeoptName, kFunctionName),
1788 std::free);
1789
1790 const auto& lib =
1791 Library::Handle(LoadTestScript(kScript.get(), NoopNativeLookup));
1792 EXPECT(!lib.IsNull());
1793 if (lib.IsNull()) return;
1794
1795 const auto& function = Function::ZoneHandle(GetFunction(lib, kFunctionName));
1796 EXPECT(!function.IsNull());
1797 if (function.IsNull()) return;
1798
1799 // Run the unoptimized code.
1800 auto& result = Object::Handle(Invoke(lib, kInvokeNoDeoptName));
1801 EXPECT(Bool::Cast(result).value());
1802
1803 TestPipeline pipeline(function, CompilerPass::kJIT);
1804 FlowGraph* flow_graph = pipeline.RunPasses({
1805 CompilerPass::kComputeSSA,
1806 CompilerPass::kApplyICData,
1807 CompilerPass::kTryOptimizePatterns,
1808 CompilerPass::kSetOuterInliningId,
1809 CompilerPass::kTypePropagation,
1810 CompilerPass::kApplyClassIds,
1811 CompilerPass::kInlining,
1812 CompilerPass::kTypePropagation,
1813 CompilerPass::kApplyClassIds,
1814 CompilerPass::kTypePropagation,
1815 CompilerPass::kApplyICData,
1816 CompilerPass::kCanonicalize,
1817 CompilerPass::kBranchSimplify,
1818 CompilerPass::kIfConvert,
1819 CompilerPass::kCanonicalize,
1820 CompilerPass::kConstantPropagation,
1821 CompilerPass::kOptimisticallySpecializeSmiPhis,
1822 CompilerPass::kTypePropagation,
1823 CompilerPass::kWidenSmiToInt32,
1824 CompilerPass::kSelectRepresentations,
1825 CompilerPass::kCSE,
1826 CompilerPass::kCanonicalize,
1827 CompilerPass::kLICM,
1828 CompilerPass::kTryOptimizePatterns,
1829 CompilerPass::kSelectRepresentations,
1830 CompilerPass::kDSE,
1831 CompilerPass::kTypePropagation,
1832 CompilerPass::kSelectRepresentations,
1833 CompilerPass::kEliminateEnvironments,
1834 CompilerPass::kEliminateDeadPhis,
1835 CompilerPass::kDCE,
1836 CompilerPass::kCanonicalize,
1837 CompilerPass::kOptimizeBranches,
1838 });
1839
1840 // Check for the soon-to-be-sunk ByteDataView allocation.
1841
1842 auto entry = flow_graph->graph_entry()->normal_entry();
1843 EXPECT(entry != nullptr);
1844
1845 AllocateTypedDataInstr* alloc_typed_data = nullptr;
1846 AllocateObjectInstr* alloc_view = nullptr;
1847 StoreFieldInstr* store_view_typed_data = nullptr;
1848 StoreFieldInstr* store_view_offset_in_bytes = nullptr;
1849 StoreFieldInstr* store_view_length = nullptr;
1850 LoadFieldInstr* load_typed_data_payload = nullptr;
1851 StoreFieldInstr* store_view_payload = nullptr;
1852
1853 ILMatcher cursor(flow_graph, entry, true, ParallelMovesHandling::kSkip);
1854 EXPECT(cursor.TryMatch({
1855 kMoveGlob,
1856 {kMatchAndMoveAllocateTypedData, &alloc_typed_data},
1857 {kMatchAndMoveAllocateObject, &alloc_view},
1858 {kMatchAndMoveStoreField, &store_view_typed_data},
1859 {kMatchAndMoveStoreField, &store_view_offset_in_bytes},
1860 {kMatchAndMoveStoreField, &store_view_length},
1861 {kMatchAndMoveLoadField, &load_typed_data_payload},
1862 {kMatchAndMoveStoreField, &store_view_payload},
1863 }));
1864 if (store_view_payload == nullptr) return;
1865
1866 EXPECT_EQ(alloc_view, store_view_typed_data->instance()->definition());
1867 EXPECT(Slot::TypedDataView_typed_data().IsIdentical(
1868 store_view_typed_data->slot()));
1869 EXPECT_EQ(alloc_typed_data, store_view_typed_data->value()->definition());
1870
1871 EXPECT_EQ(alloc_view, store_view_length->instance()->definition());
1872 EXPECT(Slot::TypedDataBase_length().IsIdentical(store_view_length->slot()));
1873 EXPECT_EQ(alloc_typed_data->num_elements()->definition(),
1874 store_view_length->value()->definition());
1875
1876 EXPECT_EQ(alloc_view, store_view_offset_in_bytes->instance()->definition());
1877 EXPECT(Slot::TypedDataView_offset_in_bytes().IsIdentical(
1878 store_view_offset_in_bytes->slot()));
1879 EXPECT(store_view_offset_in_bytes->value()->BindsToSmiConstant());
1880 EXPECT_EQ(0, store_view_offset_in_bytes->value()->BoundSmiConstant());
1881
1882 EXPECT_EQ(alloc_typed_data,
1883 load_typed_data_payload->instance()->definition());
1884 EXPECT(Slot::PointerBase_data().IsIdentical(load_typed_data_payload->slot()));
1885
1886 EXPECT_EQ(alloc_view, store_view_payload->instance()->definition());
1887 EXPECT(Slot::PointerBase_data().IsIdentical(store_view_payload->slot()));
1888 EXPECT_EQ(load_typed_data_payload, store_view_payload->value()->definition());
1889
1890 // Setting the view data field is the only use of the unsafe payload load.
1891 EXPECT(load_typed_data_payload->HasOnlyUse(store_view_payload->value()));
1892
1893 pipeline.RunAdditionalPasses({
1894 CompilerPass::kAllocationSinking_Sink,
1895 });
1896
1897 // After sinking, the view allocation has been removed from the flow graph.
1898 EXPECT_EQ(nullptr, alloc_view->previous());
1899 EXPECT_EQ(nullptr, alloc_view->next());
1900 // There is at least one MaterializeObject instruction created for the view.
1901 intptr_t mat_count = 0;
1902 for (auto block_it = flow_graph->reverse_postorder_iterator();
1903 !block_it.Done(); block_it.Advance()) {
1904 for (ForwardInstructionIterator it(block_it.Current()); !it.Done();
1905 it.Advance()) {
1906 auto* const mat = it.Current()->AsMaterializeObject();
1907 if (mat == nullptr) continue;
1908 if (mat->allocation() == alloc_view) {
1909 ++mat_count;
1910 for (intptr_t i = 0; i < mat->InputCount(); i++) {
1911 // No slot of the materialization should correspond to the data field.
1912 EXPECT(mat->FieldOffsetAt(i) !=
1913 Slot::PointerBase_data().offset_in_bytes());
1914 // No input of the materialization should be a load of the typed
1915 // data object's payload.
1916 if (auto* const load = mat->InputAt(i)->definition()->AsLoadField()) {
1917 if (load->instance()->definition() == alloc_typed_data) {
1918 EXPECT(!load->slot().IsIdentical(Slot::PointerBase_data()));
1919 }
1920 }
1921 }
1922 }
1923 }
1924 }
1925 EXPECT(mat_count > 0);
1926 // There are no uses of the original unsafe payload load. In particular, no
1927 // MaterializeObject instructions use it.
1928 EXPECT(!load_typed_data_payload->HasUses());
1929
1930 pipeline.RunAdditionalPasses({
1931 CompilerPass::kEliminateDeadPhis,
1932 CompilerPass::kDCE,
1933 CompilerPass::kCanonicalize,
1934 CompilerPass::kTypePropagation,
1935 CompilerPass::kSelectRepresentations_Final,
1936 CompilerPass::kUseTableDispatch,
1937 CompilerPass::kEliminateStackOverflowChecks,
1938 CompilerPass::kCanonicalize,
1939 CompilerPass::kAllocationSinking_DetachMaterializations,
1940 CompilerPass::kEliminateWriteBarriers,
1941 CompilerPass::kLoweringAfterCodeMotionDisabled,
1942 CompilerPass::kFinalizeGraph,
1943 CompilerPass::kCanonicalize,
1944 CompilerPass::kReorderBlocks,
1945 CompilerPass::kAllocateRegisters,
1946 CompilerPass::kTestILSerialization,
1947 });
1948
1949 // Finish the compilation and attach code so we can run it.
1950 pipeline.CompileGraphAndAttachFunction();
1951
1952 // Can run optimized code fine without deoptimization.
1953 result = Invoke(lib, kInvokeNoDeoptName);
1954 EXPECT(function.HasOptimizedCode());
1955 EXPECT(Bool::Cast(result).value());
1956
1957 // Can run code fine with deoptimization.
1958 result = Invoke(lib, kInvokeDeoptName);
1959 // Deoptimization has put us back to unoptimized code.
1960 EXPECT(!function.HasOptimizedCode());
1961 EXPECT(!Bool::Cast(result).value());
1962}
1963
1964#endif // !defined(TARGET_ARCH_IA32)
1965
1966// Regression test for https://github.com/dart-lang/sdk/issues/51220.
1967// Verifies that deoptimization at the hoisted BinarySmiOp
1968// doesn't result in the infinite re-optimization loop.
1969ISOLATE_UNIT_TEST_CASE(LICM_Deopt_Regress51220) {
1970 auto kScript =
1971 Utils::CStringUniquePtr(OS::SCreate(nullptr,
1972 R"(
1973 int n = int.parse('3');
1974 main() {
1975 int x = 0;
1976 for (int i = 0; i < n; ++i) {
1977 if (i > ((1 << %d)*1024)) {
1978 ++x;
1979 }
1980 }
1981 return x;
1982 }
1983 )",
1984 static_cast<int>(kSmiBits + 1 - 10)),
1985 std::free);
1986
1987 const auto& root_library = Library::Handle(LoadTestScript(kScript.get()));
1988 const auto& function = Function::Handle(GetFunction(root_library, "main"));
1989
1990 // Run unoptimized code.
1991 Invoke(root_library, "main");
1992 EXPECT(!function.HasOptimizedCode());
1993
1994 Compiler::CompileOptimizedFunction(thread, function);
1995 EXPECT(function.HasOptimizedCode());
1996
1997 // Only 2 rounds of deoptimization are allowed:
1998 // * the first round should disable LICM;
1999 // * the second round should disable BinarySmiOp.
2000 Invoke(root_library, "main");
2001 EXPECT(!function.HasOptimizedCode());
2002 // EXPECT(function.ProhibitsInstructionHoisting());
2003
2004 Compiler::CompileOptimizedFunction(thread, function);
2005 EXPECT(function.HasOptimizedCode());
2006
2007 Invoke(root_library, "main");
2008 EXPECT(!function.HasOptimizedCode());
2009 // EXPECT(function.ProhibitsInstructionHoisting());
2010
2011 Compiler::CompileOptimizedFunction(thread, function);
2012 EXPECT(function.HasOptimizedCode());
2013
2014 // Should not deoptimize.
2015 Invoke(root_library, "main");
2016 EXPECT(function.HasOptimizedCode());
2017}
2018
2019// Regression test for https://github.com/dart-lang/sdk/issues/50245.
2020// Verifies that deoptimization at the hoisted GuardFieldClass
2021// doesn't result in the infinite re-optimization loop.
2022ISOLATE_UNIT_TEST_CASE(LICM_Deopt_Regress50245) {
2023 const char* kScript = R"(
2024 class A {
2025 List<int> foo;
2026 A(this.foo);
2027 }
2028
2029 A obj = A([1, 2, 3]);
2030 int n = int.parse('3');
2031
2032 main() {
2033 // Make sure A.foo= is compiled.
2034 obj.foo = [];
2035 int sum = 0;
2036 for (int i = 0; i < n; ++i) {
2037 if (int.parse('1') != 1) {
2038 // Field guard from this unreachable code is moved up
2039 // and causes repeated deoptimization.
2040 obj.foo = const [];
2041 }
2042 sum += i;
2043 }
2044 return sum;
2045 }
2046 )";
2047
2048 const auto& root_library = Library::Handle(LoadTestScript(kScript));
2049 const auto& function = Function::Handle(GetFunction(root_library, "main"));
2050
2051 // Run unoptimized code.
2052 Invoke(root_library, "main");
2053 EXPECT(!function.HasOptimizedCode());
2054
2055 Compiler::CompileOptimizedFunction(thread, function);
2056 EXPECT(function.HasOptimizedCode());
2057
2058 // LICM should be disabled after the first round of deoptimization.
2059 Invoke(root_library, "main");
2060 EXPECT(!function.HasOptimizedCode());
2061 // EXPECT(function.ProhibitsInstructionHoisting());
2062
2063 Compiler::CompileOptimizedFunction(thread, function);
2064 EXPECT(function.HasOptimizedCode());
2065
2066 // Should not deoptimize.
2067 Invoke(root_library, "main");
2068 EXPECT(function.HasOptimizedCode());
2069}
2070
2071} // namespace dart
static volatile float blackhole[4]
Definition Sk4fBench.cpp:13
static SkV4 v4(SkV3 v, SkScalar w)
Definition SkM44.cpp:329
SI void store(P *ptr, const T &val)
SI T load(const P *ptr)
#define EXPECT(type, expectedAlignment, expectedSize)
Vec2Value v2
#define RELEASE_ASSERT(cond)
Definition assert.h:327
bool IsNotAliased() const
Definition il.h:2434
virtual AliasIdentity Identity() const
Definition il.h:7283
GrowableArray< Definition * > * initial_definitions()
Definition il.h:1911
FunctionPtr LookupFactory(const String &name) const
Definition object.cc:6212
ErrorPtr EnsureIsFinalized(Thread *thread) const
Definition object.cc:4979
FieldPtr LookupField(const String &name) const
Definition object.cc:6407
static CompileType FromCid(intptr_t cid)
Value * value() const
Definition il.h:3468
PRINT_OPERANDS_TO_SUPPORT PRINT_TO_SUPPORT bool UpdateType(CompileType new_type)
Definition il.h:2535
static constexpr intptr_t kNone
Definition deopt_id.h:27
static bool Optimize(FlowGraph *graph, bool run_load_optimization=true)
FieldPtr CloneFromOriginal() const
Definition object.cc:11786
GraphEntryInstr * graph_entry() const
Definition flow_graph.h:268
ConstantInstr * GetConstant(const Object &object, Representation representation=kTagged)
ConstantInstr * constant_null() const
Definition flow_graph.h:270
const ParsedFunction & parsed_function() const
Definition flow_graph.h:129
BlockIterator reverse_postorder_iterator() const
Definition flow_graph.h:219
intptr_t EnvIndex(const LocalVariable *variable) const
Definition flow_graph.h:189
FunctionEntryInstr * normal_entry() const
Definition il.h:1986
const GrowableArray< CatchBlockEntryInstr * > & catch_entries() const
Definition il.h:1997
bool TryMatch(std::initializer_list< MatchCode > match_codes, MatchOpCode insert_before=kInvalidMatchOpCode)
static IntegerPtr New(const String &str, Heap::Space space=Heap::kNew)
Definition object.cc:23063
ClassPtr LookupClass(const String &name) const
Definition object.cc:14152
static LibraryPtr TypedDataLibrary()
Definition object.cc:14872
LocalScope * sibling() const
Definition scopes.h:321
LocalVariable * VariableAt(intptr_t index) const
Definition scopes.h:398
intptr_t num_variables() const
Definition scopes.h:397
LocalScope * child() const
Definition scopes.h:320
static void static void PrintErr(const char *format,...) PRINTF_ATTRIBUTE(1
bool IsNull() const
Definition object.h:363
static Object & Handle()
Definition object.h:407
static Object & ZoneHandle()
Definition object.h:419
LocalScope * scope() const
Definition parser.h:76
static const Slot & Get(const Field &field, const ParsedFunction *parsed_function)
Definition slot.cc:351
void SetResultType(Zone *zone, CompileType new_type)
Definition il.h:5599
void set_is_known_list_constructor(bool value)
Definition il.h:5613
static StringPtr New(const char *cstr, Heap::Space space=Heap::kNew)
Definition object.cc:23777
static const String & Empty()
Definition symbols.h:687
static StringPtr New(Thread *thread, const char *cstr)
Definition symbols.h:722
FlowGraph * RunPasses(std::initializer_list< CompilerPass::Id > passes)
static TypePtr ObjectType()
Definition object.cc:21878
std::unique_ptr< char, decltype(std::free) * > CStringUniquePtr
Definition utils.h:644
Definition * definition() const
Definition il.h:103
#define H
struct _Dart_Handle * Dart_Handle
Definition dart_api.h:258
struct _Dart_NativeArguments * Dart_NativeArguments
Definition dart_api.h:3010
void(* Dart_NativeFunction)(Dart_NativeArguments arguments)
Definition dart_api.h:3198
#define ASSERT(E)
struct MyStruct s
G_BEGIN_DECLS G_MODULE_EXPORT FlValue * args
uint8_t value
GAsyncResult * result
Dart_NativeFunction function
Definition fuchsia.cc:51
int argument_count
Definition fuchsia.cc:52
#define EXPECT_PROPERTY(entity, property)
LibraryPtr LoadTestScript(const char *script, Dart_NativeEntryResolver resolver, const char *lib_uri)
@ kMatchAndMoveBranchFalse
@ kMatchAndMoveBranchTrue
static void FlattenScopeIntoEnvironment(FlowGraph *graph, LocalScope *scope, GrowableArray< LocalVariable * > *env)
const char *const name
void OptimizeCatchEntryStates(FlowGraph *flow_graph, bool is_aot)
static void NoopNative(Dart_NativeArguments args)
Location LocationExceptionLocation()
Definition locations.cc:484
@ kNoStoreBarrier
Definition il.h:6252
@ kEmitStoreBarrier
Definition il.h:6252
GrowableArray< Value * > InputsArray
Definition il.h:895
ObjectPtr Invoke(const Library &lib, const char *name)
FunctionPtr GetFunction(const Library &lib, const char *name)
static void TestAliasingViaStore(Thread *thread, bool make_it_escape, bool make_host_escape, std::function< Definition *(CompilerState *S, FlowGraph *, Definition *)> make_redefinition)
static Definition * MakeCheckNull(CompilerState *S, FlowGraph *flow_graph, Definition *defn)
Location LocationStackTraceLocation()
Definition locations.cc:488
static void TestAliasingViaRedefinition(Thread *thread, bool make_it_escape, std::function< Definition *(CompilerState *S, FlowGraph *, Definition *)> make_redefinition)
static void CountLoadsStores(FlowGraph *flow_graph, intptr_t *loads, intptr_t *stores)
static Definition * MakeRedefinition(CompilerState *S, FlowGraph *flow_graph, Definition *defn)
static void TryCatchOptimizerTest(Thread *thread, const char *script_chars, std::initializer_list< const char * > synchronized)
static Definition * MakeAssertAssignable(CompilerState *S, FlowGraph *flow_graph, Definition *defn)
@ kAlignedAccess
Definition il.h:6722
static Dart_NativeFunction NoopNativeLookup(Dart_Handle name, int argument_count, bool *auto_setup_scope)
Definition __init__.py:1
Definition SkMD5.cpp:130
#define ISOLATE_UNIT_TEST_CASE(name)
Definition unit_test.h:64