Flutter Engine
The Flutter Engine
redundancy_elimination_test.cc
Go to the documentation of this file.
1// Copyright (c) 2018, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
6
7#include <functional>
8#include <utility>
9
19#include "vm/flags.h"
20#include "vm/kernel_isolate.h"
21#include "vm/log.h"
22#include "vm/object.h"
23#include "vm/parser.h"
24#include "vm/symbols.h"
25#include "vm/unit_test.h"
26
27namespace dart {
28
30
33 bool* auto_setup_scope) {
34 ASSERT(auto_setup_scope != nullptr);
35 *auto_setup_scope = false;
36 return NoopNative;
37}
38
39// Flatten all non-captured LocalVariables from the given scope and its children
40// and siblings into the given array based on their environment index.
42 LocalScope* scope,
44 for (intptr_t i = 0; i < scope->num_variables(); i++) {
45 auto var = scope->VariableAt(i);
46 if (var->is_captured()) {
47 continue;
48 }
49
50 auto index = graph->EnvIndex(var);
51 env->EnsureLength(index + 1, nullptr);
52 (*env)[index] = var;
53 }
54
55 if (scope->sibling() != nullptr) {
56 FlattenScopeIntoEnvironment(graph, scope->sibling(), env);
57 }
58 if (scope->child() != nullptr) {
59 FlattenScopeIntoEnvironment(graph, scope->child(), env);
60 }
61}
62
63// Run TryCatchAnalyzer optimization on the function foo from the given script
64// and check that the only variables from the given list are synchronized
65// on catch entry.
67 Thread* thread,
68 const char* script_chars,
69 std::initializer_list<const char*> synchronized) {
70 // Load the script and exercise the code once.
71 const auto& root_library =
73 Invoke(root_library, "main");
74
75 // Build the flow graph.
76 std::initializer_list<CompilerPass::Id> passes = {
77 CompilerPass::kComputeSSA, CompilerPass::kTypePropagation,
78 CompilerPass::kApplyICData, CompilerPass::kSelectRepresentations,
79 CompilerPass::kTypePropagation, CompilerPass::kCanonicalize,
80 };
81 const auto& function = Function::Handle(GetFunction(root_library, "foo"));
83 FlowGraph* graph = pipeline.RunPasses(passes);
84
85 // Finally run TryCatchAnalyzer on the graph (in AOT mode).
86 OptimizeCatchEntryStates(graph, /*is_aot=*/true);
87
88 EXPECT_EQ(1, graph->graph_entry()->catch_entries().length());
89 auto scope = graph->parsed_function().scope();
90
92 FlattenScopeIntoEnvironment(graph, scope, &env);
93
94 for (intptr_t i = 0; i < env.length(); i++) {
95 bool found = false;
96 for (auto name : synchronized) {
97 if (env[i]->name().Equals(name)) {
98 found = true;
99 break;
100 }
101 }
102 if (!found) {
103 env[i] = nullptr;
104 }
105 }
106
107 CatchBlockEntryInstr* catch_entry = graph->graph_entry()->catch_entries()[0];
108
109 // We should only synchronize state for variables from the synchronized list.
110 for (auto defn : *catch_entry->initial_definitions()) {
111 if (ParameterInstr* param = defn->AsParameter()) {
112 if (param->location().IsRegister()) {
113 EXPECT(param->location().Equals(LocationExceptionLocation()) ||
114 param->location().Equals(LocationStackTraceLocation()));
115 continue;
116 }
117
118 EXPECT(0 <= param->env_index() && param->env_index() < env.length());
119 EXPECT(env[param->env_index()] != nullptr);
120 if (env[param->env_index()] == nullptr) {
121 OS::PrintErr("something is wrong with %s\n", param->ToCString());
122 }
123 }
124 }
125}
126
127//
128// Tests for TryCatchOptimizer.
129//
130
131ISOLATE_UNIT_TEST_CASE(TryCatchOptimizer_DeadParameterElimination_Simple1) {
132 const char* script_chars = R"(
133 @pragma("vm:external-name", "BlackholeNative")
134 external dynamic blackhole([dynamic val]);
135 foo(int p) {
136 var a = blackhole(), b = blackhole();
137 try {
138 blackhole([a, b]);
139 } catch (e) {
140 // nothing is used
141 }
142 }
143 main() {
144 foo(42);
145 }
146 )";
147
148 TryCatchOptimizerTest(thread, script_chars, /*synchronized=*/{});
149}
150
151ISOLATE_UNIT_TEST_CASE(TryCatchOptimizer_DeadParameterElimination_Simple2) {
152 const char* script_chars = R"(
153 @pragma("vm:external-name", "BlackholeNative")
154 external dynamic blackhole([dynamic val]);
155 foo(int p) {
156 var a = blackhole(), b = blackhole();
157 try {
158 blackhole([a, b]);
159 } catch (e) {
160 // a should be synchronized
161 blackhole(a);
162 }
163 }
164 main() {
165 foo(42);
166 }
167 )";
168
169 TryCatchOptimizerTest(thread, script_chars, /*synchronized=*/{"a"});
170}
171
172ISOLATE_UNIT_TEST_CASE(TryCatchOptimizer_DeadParameterElimination_Cyclic1) {
173 const char* script_chars = R"(
174 @pragma("vm:external-name", "BlackholeNative")
175 external dynamic blackhole([dynamic val]);
176 foo(int p) {
177 var a = blackhole(), b;
178 for (var i = 0; i < 42; i++) {
179 b = blackhole();
180 try {
181 blackhole([a, b]);
182 } catch (e) {
183 // a and i should be synchronized
184 }
185 }
186 }
187 main() {
188 foo(42);
189 }
190 )";
191
192 TryCatchOptimizerTest(thread, script_chars, /*synchronized=*/{"a", "i"});
193}
194
195ISOLATE_UNIT_TEST_CASE(TryCatchOptimizer_DeadParameterElimination_Cyclic2) {
196 const char* script_chars = R"(
197 @pragma("vm:external-name", "BlackholeNative")
198 external dynamic blackhole([dynamic val]);
199 foo(int p) {
200 var a = blackhole(), b = blackhole();
201 for (var i = 0; i < 42; i++) {
202 try {
203 blackhole([a, b]);
204 } catch (e) {
205 // a, b and i should be synchronized
206 }
207 }
208 }
209 main() {
210 foo(42);
211 }
212 )";
213
214 TryCatchOptimizerTest(thread, script_chars, /*synchronized=*/{"a", "b", "i"});
215}
216
217// LoadOptimizer tests
218
219// This family of tests verifies behavior of load forwarding when alias for an
220// allocation A is created by creating a redefinition for it and then
221// letting redefinition escape.
223 Thread* thread,
224 bool make_it_escape,
226 make_redefinition) {
227 const char* script_chars = R"(
228 @pragma("vm:external-name", "BlackholeNative")
229 external dynamic blackhole([a, b, c, d, e, f]);
230 class K {
231 var field;
232 }
233 )";
234 const Library& lib =
236
237 const Class& cls = Class::ZoneHandle(
238 lib.LookupClass(String::Handle(Symbols::New(thread, "K"))));
239 const Error& err = Error::Handle(cls.EnsureIsFinalized(thread));
240 EXPECT(err.IsNull());
241
242 const Field& original_field = Field::Handle(
243 cls.LookupField(String::Handle(Symbols::New(thread, "field"))));
244 EXPECT(!original_field.IsNull());
245 const Field& field = Field::Handle(original_field.CloneFromOriginal());
246
247 const Function& blackhole =
248 Function::ZoneHandle(GetFunction(lib, "blackhole"));
249
251 CompilerState S(thread, /*is_aot=*/false, /*is_optimizing=*/true);
253
254 // We are going to build the following graph:
255 //
256 // B0[graph_entry]
257 // B1[function_entry]:
258 // v0 <- AllocateObject(class K)
259 // v1 <- LoadField(v0, K.field)
260 // v2 <- make_redefinition(v0)
261 // MoveArgument(v1)
262 // #if make_it_escape
263 // MoveArgument(v2)
264 // #endif
265 // v3 <- StaticCall(blackhole, v1, v2)
266 // v4 <- LoadField(v2, K.field)
267 // Return v4
268
269 auto b1 = H.flow_graph()->graph_entry()->normal_entry();
271 LoadFieldInstr* v1;
274 DartReturnInstr* ret;
275
276 {
277 BlockBuilder builder(H.flow_graph(), b1);
278 auto& slot = Slot::Get(field, &H.flow_graph()->parsed_function());
279 v0 = builder.AddDefinition(
280 new AllocateObjectInstr(InstructionSource(), cls, S.GetNextDeoptId()));
281 v1 = builder.AddDefinition(
282 new LoadFieldInstr(new Value(v0), slot, InstructionSource()));
283 auto v2 = builder.AddDefinition(make_redefinition(&S, H.flow_graph(), v0));
284 InputsArray args(2);
285 args.Add(new Value(v1));
286 if (make_it_escape) {
287 args.Add(new Value(v2));
288 }
289 call = builder.AddInstruction(new StaticCallInstr(
290 InstructionSource(), blackhole, 0, Array::empty_array(),
291 std::move(args), S.GetNextDeoptId(), 0, ICData::RebindRule::kStatic));
292 v4 = builder.AddDefinition(
293 new LoadFieldInstr(new Value(v2), slot, InstructionSource()));
294 ret = builder.AddInstruction(new DartReturnInstr(
295 InstructionSource(), new Value(v4), S.GetNextDeoptId()));
296 }
297 H.FinishGraph();
298 DominatorBasedCSE::Optimize(H.flow_graph());
299
300 if (make_it_escape) {
301 // Allocation must be considered aliased.
303 } else {
304 // Allocation must be considered not-aliased.
306 }
307
308 // v1 should have been removed from the graph and replaced with constant_null.
309 EXPECT_PROPERTY(v1, it.next() == nullptr && it.previous() == nullptr);
310 EXPECT_PROPERTY(call, it.ArgumentAt(0) == H.flow_graph()->constant_null());
311
312 if (make_it_escape) {
313 // v4 however should not be removed from the graph, because v0 escapes into
314 // blackhole.
315 EXPECT_PROPERTY(v4, it.next() != nullptr && it.previous() != nullptr);
316 EXPECT_PROPERTY(ret, it.value()->definition() == v4);
317 } else {
318 // If v0 it not aliased then v4 should also be removed from the graph.
319 EXPECT_PROPERTY(v4, it.next() == nullptr && it.previous() == nullptr);
321 ret, it.value()->definition() == H.flow_graph()->constant_null());
322 }
323}
324
326 FlowGraph* flow_graph,
327 Definition* defn) {
328 return new CheckNullInstr(new Value(defn), String::ZoneHandle(),
329 S->GetNextDeoptId(), InstructionSource());
330}
331
333 FlowGraph* flow_graph,
334 Definition* defn) {
335 return new RedefinitionInstr(new Value(defn));
336}
337
339 FlowGraph* flow_graph,
340 Definition* defn) {
341 const auto& dst_type = AbstractType::ZoneHandle(Type::ObjectType());
342 return new AssertAssignableInstr(InstructionSource(), new Value(defn),
343 new Value(flow_graph->GetConstant(dst_type)),
344 new Value(flow_graph->constant_null()),
345 new Value(flow_graph->constant_null()),
346 Symbols::Empty(), S->GetNextDeoptId());
347}
348
349ISOLATE_UNIT_TEST_CASE(LoadOptimizer_RedefinitionAliasing_CheckNull_NoEscape) {
350 TestAliasingViaRedefinition(thread, /*make_it_escape=*/false, MakeCheckNull);
351}
352
353ISOLATE_UNIT_TEST_CASE(LoadOptimizer_RedefinitionAliasing_CheckNull_Escape) {
354 TestAliasingViaRedefinition(thread, /*make_it_escape=*/true, MakeCheckNull);
355}
356
358 LoadOptimizer_RedefinitionAliasing_Redefinition_NoEscape) {
359 TestAliasingViaRedefinition(thread, /*make_it_escape=*/false,
361}
362
363ISOLATE_UNIT_TEST_CASE(LoadOptimizer_RedefinitionAliasing_Redefinition_Escape) {
364 TestAliasingViaRedefinition(thread, /*make_it_escape=*/true,
366}
367
369 LoadOptimizer_RedefinitionAliasing_AssertAssignable_NoEscape) {
370 TestAliasingViaRedefinition(thread, /*make_it_escape=*/false,
372}
373
375 LoadOptimizer_RedefinitionAliasing_AssertAssignable_Escape) {
376 TestAliasingViaRedefinition(thread, /*make_it_escape=*/true,
378}
379
380// This family of tests verifies behavior of load forwarding when alias for an
381// allocation A is created by storing it into another object B and then
382// either loaded from it ([make_it_escape] is true) or object B itself
383// escapes ([make_host_escape] is true).
384// We insert redefinition for object B to check that use list traversal
385// correctly discovers all loads and stores from B.
387 Thread* thread,
388 bool make_it_escape,
389 bool make_host_escape,
391 make_redefinition) {
392 const char* script_chars = R"(
393 @pragma("vm:external-name", "BlackholeNative")
394 external dynamic blackhole([a, b, c, d, e, f]);
395 class K {
396 var field;
397 }
398 )";
399 const Library& lib =
401
402 const Class& cls = Class::ZoneHandle(
403 lib.LookupClass(String::Handle(Symbols::New(thread, "K"))));
404 const Error& err = Error::Handle(cls.EnsureIsFinalized(thread));
405 EXPECT(err.IsNull());
406
407 const Field& original_field = Field::Handle(
408 cls.LookupField(String::Handle(Symbols::New(thread, "field"))));
409 EXPECT(!original_field.IsNull());
410 const Field& field = Field::Handle(original_field.CloneFromOriginal());
411
412 const Function& blackhole =
413 Function::ZoneHandle(GetFunction(lib, "blackhole"));
414
416 CompilerState S(thread, /*is_aot=*/false, /*is_optimizing=*/true);
418
419 // We are going to build the following graph:
420 //
421 // B0[graph_entry]
422 // B1[function_entry]:
423 // v0 <- AllocateObject(class K)
424 // v5 <- AllocateObject(class K)
425 // #if !make_host_escape
426 // StoreField(v5 . K.field = v0)
427 // #endif
428 // v1 <- LoadField(v0, K.field)
429 // v2 <- REDEFINITION(v5)
430 // MoveArgument(v1)
431 // #if make_it_escape
432 // v6 <- LoadField(v2, K.field)
433 // MoveArgument(v6)
434 // #elif make_host_escape
435 // StoreField(v2 . K.field = v0)
436 // MoveArgument(v5)
437 // #endif
438 // v3 <- StaticCall(blackhole, v1, v6)
439 // v4 <- LoadField(v0, K.field)
440 // Return v4
441
442 auto b1 = H.flow_graph()->graph_entry()->normal_entry();
445 LoadFieldInstr* v1;
448 DartReturnInstr* ret;
449
450 {
451 BlockBuilder builder(H.flow_graph(), b1);
452 auto& slot = Slot::Get(field, &H.flow_graph()->parsed_function());
453 v0 = builder.AddDefinition(
454 new AllocateObjectInstr(InstructionSource(), cls, S.GetNextDeoptId()));
455 v5 = builder.AddDefinition(
456 new AllocateObjectInstr(InstructionSource(), cls, S.GetNextDeoptId()));
457 if (!make_host_escape) {
458 builder.AddInstruction(
459 new StoreFieldInstr(slot, new Value(v5), new Value(v0),
461 }
462 v1 = builder.AddDefinition(
463 new LoadFieldInstr(new Value(v0), slot, InstructionSource()));
464 auto v2 = builder.AddDefinition(make_redefinition(&S, H.flow_graph(), v5));
465 InputsArray args(2);
466 args.Add(new Value(v1));
467 if (make_it_escape) {
468 auto v6 = builder.AddDefinition(
469 new LoadFieldInstr(new Value(v2), slot, InstructionSource()));
470 args.Add(new Value(v6));
471 } else if (make_host_escape) {
472 builder.AddInstruction(
473 new StoreFieldInstr(slot, new Value(v2), new Value(v0),
475 args.Add(new Value(v5));
476 }
477 call = builder.AddInstruction(new StaticCallInstr(
478 InstructionSource(), blackhole, 0, Array::empty_array(),
479 std::move(args), S.GetNextDeoptId(), 0, ICData::RebindRule::kStatic));
480 v4 = builder.AddDefinition(
481 new LoadFieldInstr(new Value(v0), slot, InstructionSource()));
482 ret = builder.AddInstruction(new DartReturnInstr(
483 InstructionSource(), new Value(v4), S.GetNextDeoptId()));
484 }
485 H.FinishGraph();
486 DominatorBasedCSE::Optimize(H.flow_graph());
487
488 if (make_it_escape || make_host_escape) {
489 // Allocation must be considered aliased.
491 } else {
492 // Allocation must not be considered aliased.
494 }
495
496 if (make_host_escape) {
498 } else {
500 }
501
502 // v1 should have been removed from the graph and replaced with constant_null.
503 EXPECT_PROPERTY(v1, it.next() == nullptr && it.previous() == nullptr);
504 EXPECT_PROPERTY(call, it.ArgumentAt(0) == H.flow_graph()->constant_null());
505
506 if (make_it_escape || make_host_escape) {
507 // v4 however should not be removed from the graph, because v0 escapes into
508 // blackhole.
509 EXPECT_PROPERTY(v4, it.next() != nullptr && it.previous() != nullptr);
510 EXPECT_PROPERTY(ret, it.value()->definition() == v4);
511 } else {
512 // If v0 it not aliased then v4 should also be removed from the graph.
513 EXPECT_PROPERTY(v4, it.next() == nullptr && it.previous() == nullptr);
515 ret, it.value()->definition() == H.flow_graph()->constant_null());
516 }
517}
518
519ISOLATE_UNIT_TEST_CASE(LoadOptimizer_AliasingViaStore_CheckNull_NoEscape) {
520 TestAliasingViaStore(thread, /*make_it_escape=*/false,
521 /* make_host_escape= */ false, MakeCheckNull);
522}
523
524ISOLATE_UNIT_TEST_CASE(LoadOptimizer_AliasingViaStore_CheckNull_Escape) {
525 TestAliasingViaStore(thread, /*make_it_escape=*/true,
526 /* make_host_escape= */ false, MakeCheckNull);
527}
528
529ISOLATE_UNIT_TEST_CASE(LoadOptimizer_AliasingViaStore_CheckNull_EscapeViaHost) {
530 TestAliasingViaStore(thread, /*make_it_escape=*/false,
531 /* make_host_escape= */ true, MakeCheckNull);
532}
533
534ISOLATE_UNIT_TEST_CASE(LoadOptimizer_AliasingViaStore_Redefinition_NoEscape) {
535 TestAliasingViaStore(thread, /*make_it_escape=*/false,
536 /* make_host_escape= */ false, MakeRedefinition);
537}
538
539ISOLATE_UNIT_TEST_CASE(LoadOptimizer_AliasingViaStore_Redefinition_Escape) {
540 TestAliasingViaStore(thread, /*make_it_escape=*/true,
541 /* make_host_escape= */ false, MakeRedefinition);
542}
543
545 LoadOptimizer_AliasingViaStore_Redefinition_EscapeViaHost) {
546 TestAliasingViaStore(thread, /*make_it_escape=*/false,
547 /* make_host_escape= */ true, MakeRedefinition);
548}
549
551 LoadOptimizer_AliasingViaStore_AssertAssignable_NoEscape) {
552 TestAliasingViaStore(thread, /*make_it_escape=*/false,
553 /* make_host_escape= */ false, MakeAssertAssignable);
554}
555
556ISOLATE_UNIT_TEST_CASE(LoadOptimizer_AliasingViaStore_AssertAssignable_Escape) {
557 TestAliasingViaStore(thread, /*make_it_escape=*/true,
558 /* make_host_escape= */ false, MakeAssertAssignable);
559}
560
562 LoadOptimizer_AliasingViaStore_AssertAssignable_EscapeViaHost) {
563 TestAliasingViaStore(thread, /*make_it_escape=*/false,
564 /* make_host_escape= */ true, MakeAssertAssignable);
565}
566
567// This is a regression test for
568// https://github.com/flutter/flutter/issues/48114.
569ISOLATE_UNIT_TEST_CASE(LoadOptimizer_AliasingViaTypedDataAndUntaggedTypedData) {
571 CompilerState S(thread, /*is_aot=*/false, /*is_optimizing=*/true);
573
574 const auto& lib = Library::Handle(Library::TypedDataLibrary());
575 const Class& cls = Class::Handle(lib.LookupClass(Symbols::Uint32List()));
576 const Error& err = Error::Handle(cls.EnsureIsFinalized(thread));
577 EXPECT(err.IsNull());
578
580 cls.LookupFactory(String::Handle(String::New("Uint32List."))));
581 EXPECT(!function.IsNull());
582
583 auto zone = H.flow_graph()->zone();
584
585 // We are going to build the following graph:
586 //
587 // B0[graph_entry] {
588 // vc0 <- Constant(0)
589 // vc42 <- Constant(42)
590 // }
591 //
592 // B1[function_entry] {
593 // }
594 // array <- StaticCall(...) {_Uint32List}
595 // v1 <- LoadIndexed(array)
596 // v2 <- LoadField(array, Slot::PointerBase_data())
597 // StoreIndexed(v2, index=vc0, value=vc42)
598 // v3 <- LoadIndexed(array)
599 // return v3
600 // }
601
602 auto vc0 = H.flow_graph()->GetConstant(Integer::Handle(Integer::New(0)));
603 auto vc42 = H.flow_graph()->GetConstant(Integer::Handle(Integer::New(42)));
604 auto b1 = H.flow_graph()->graph_entry()->normal_entry();
605
606 StaticCallInstr* array;
611 DartReturnInstr* ret;
612
613 {
614 BlockBuilder builder(H.flow_graph(), b1);
615
616 // array <- StaticCall(...) {_Uint32List}
617 array = builder.AddDefinition(new StaticCallInstr(
618 InstructionSource(), function, 0, Array::empty_array(), InputsArray(),
619 DeoptId::kNone, 0, ICData::kNoRebind));
620 array->UpdateType(CompileType::FromCid(kTypedDataUint32ArrayCid));
621 array->SetResultType(zone, CompileType::FromCid(kTypedDataUint32ArrayCid));
623
624 // v1 <- LoadIndexed(array)
625 v1 = builder.AddDefinition(new LoadIndexedInstr(
626 new Value(array), new Value(vc0), /*index_unboxed=*/false, 1,
627 kTypedDataUint32ArrayCid, kAlignedAccess, DeoptId::kNone,
629
630 // v2 <- LoadField(array, Slot::PointerBase_data())
631 // StoreIndexed(v2, index=0, value=42)
632 v2 = builder.AddDefinition(new LoadFieldInstr(
633 new Value(array), Slot::PointerBase_data(),
635 store = builder.AddInstruction(new StoreIndexedInstr(
636 new Value(v2), new Value(vc0), new Value(vc42), kNoStoreBarrier,
637 /*index_unboxed=*/false, 1, kTypedDataUint32ArrayCid, kAlignedAccess,
639
640 // v3 <- LoadIndexed(array)
641 v3 = builder.AddDefinition(new LoadIndexedInstr(
642 new Value(array), new Value(vc0), /*index_unboxed=*/false, 1,
643 kTypedDataUint32ArrayCid, kAlignedAccess, DeoptId::kNone,
645
646 // return v3
647 ret = builder.AddInstruction(new DartReturnInstr(
648 InstructionSource(), new Value(v3), S.GetNextDeoptId()));
649 }
650 H.FinishGraph();
651
652 DominatorBasedCSE::Optimize(H.flow_graph());
653 {
654 Instruction* sc = nullptr;
655 Instruction* li = nullptr;
656 Instruction* lf = nullptr;
657 Instruction* s = nullptr;
658 Instruction* li2 = nullptr;
659 Instruction* r = nullptr;
660 ILMatcher cursor(H.flow_graph(), b1, true);
661 RELEASE_ASSERT(cursor.TryMatch({
662 kMatchAndMoveFunctionEntry,
663 {kMatchAndMoveStaticCall, &sc},
664 {kMatchAndMoveLoadIndexed, &li},
665 {kMatchAndMoveLoadField, &lf},
666 {kMatchAndMoveStoreIndexed, &s},
667 {kMatchAndMoveLoadIndexed, &li2},
668 {kMatchDartReturn, &r},
669 }));
670 EXPECT(array == sc);
671 EXPECT(v1 == li);
672 EXPECT(v2 == lf);
673 EXPECT(store == s);
674 EXPECT(v3 == li2);
675 EXPECT(ret == r);
676 }
677}
678
679// This test ensures that a LoadNativeField of the PointerBase data field for
680// a newly allocated TypedData object does not have tagged null forwarded to it,
681// as that's wrong for two reasons: it's an unboxed field, and it is initialized
682// during the allocation stub.
683ISOLATE_UNIT_TEST_CASE(LoadOptimizer_LoadDataFieldOfNewTypedData) {
685 CompilerState S(thread, /*is_aot=*/false, /*is_optimizing=*/true);
687
688 auto zone = H.flow_graph()->zone();
689
690 // We are going to build the following graph:
691 //
692 // B0[graph_entry] {
693 // vc42 <- Constant(42)
694 // }
695 //
696 // B1[function_entry] {
697 // }
698 // array <- AllocateTypedData(kTypedDataUint8ArrayCid, vc42)
699 // view <- AllocateObject(kTypedDataUint8ArrayViewCid)
700 // v1 <- LoadNativeField(array, Slot::PointerBase_data())
701 // StoreNativeField(Slot::PointerBase_data(), view, v1, kNoStoreBarrier,
702 // kInitalizing)
703 // return view
704 // }
705
706 const auto& lib = Library::Handle(zone, Library::TypedDataLibrary());
707 EXPECT(!lib.IsNull());
708 const Class& view_cls = Class::ZoneHandle(
709 zone, lib.LookupClassAllowPrivate(Symbols::_Uint8ArrayView()));
710 EXPECT(!view_cls.IsNull());
711 const Error& err = Error::Handle(zone, view_cls.EnsureIsFinalized(thread));
712 EXPECT(err.IsNull());
713
714 auto vc42 = H.flow_graph()->GetConstant(Integer::Handle(Integer::New(42)));
715 auto b1 = H.flow_graph()->graph_entry()->normal_entry();
716
719 LoadFieldInstr* v1;
721 DartReturnInstr* ret;
722
723 {
724 BlockBuilder builder(H.flow_graph(), b1);
725
726 // array <- AllocateTypedData(kTypedDataUint8ArrayCid, vc42)
727 array = builder.AddDefinition(
728 new AllocateTypedDataInstr(InstructionSource(), kTypedDataUint8ArrayCid,
729 new (zone) Value(vc42), DeoptId::kNone));
730
731 // view <- AllocateObject(kTypedDataUint8ArrayViewCid, vta)
732 view = builder.AddDefinition(
734
735 // v1 <- LoadNativeField(array, Slot::PointerBase_data())
736 v1 = builder.AddDefinition(new LoadFieldInstr(
737 new (zone) Value(array), Slot::PointerBase_data(),
738 InnerPointerAccess::kMayBeInnerPointer, InstructionSource()));
739
740 // StoreNativeField(Slot::PointerBase_data(), view, v1, kNoStoreBarrier,
741 // kInitalizing)
742 store = builder.AddInstruction(new StoreFieldInstr(
743 Slot::PointerBase_data(), new (zone) Value(view), new (zone) Value(v1),
744 kNoStoreBarrier, InnerPointerAccess::kMayBeInnerPointer,
745 InstructionSource(), StoreFieldInstr::Kind::kInitializing));
746
747 // return view
748 ret = builder.AddInstruction(new DartReturnInstr(
749 InstructionSource(), new Value(view), S.GetNextDeoptId()));
750 }
751 H.FinishGraph();
752
753 DominatorBasedCSE::Optimize(H.flow_graph());
754 {
755 Instruction* alloc_array = nullptr;
756 Instruction* alloc_view = nullptr;
757 Instruction* lf = nullptr;
758 Instruction* sf = nullptr;
759 Instruction* r = nullptr;
760 ILMatcher cursor(H.flow_graph(), b1, true);
761 RELEASE_ASSERT(cursor.TryMatch({
762 kMatchAndMoveFunctionEntry,
763 {kMatchAndMoveAllocateTypedData, &alloc_array},
764 {kMatchAndMoveAllocateObject, &alloc_view},
765 {kMatchAndMoveLoadField, &lf},
766 {kMatchAndMoveStoreField, &sf},
767 {kMatchDartReturn, &r},
768 }));
769 EXPECT(array == alloc_array);
770 EXPECT(view == alloc_view);
771 EXPECT(v1 == lf);
772 EXPECT(store == sf);
773 EXPECT(ret == r);
774 }
775}
776
777// This test verifies that we correctly alias load/stores into typed array
778// which use different element sizes. This is a regression test for
779// a fix in 836c04f.
780ISOLATE_UNIT_TEST_CASE(LoadOptimizer_TypedArrayViewAliasing) {
781 const char* script_chars = R"(
782 import 'dart:typed_data';
783
784 class View {
785 final Float64List data;
786 View(this.data);
787 }
788 )";
789 const Library& lib =
790 Library::Handle(LoadTestScript(script_chars, NoopNativeLookup));
791
792 const Class& view_cls = Class::ZoneHandle(
793 lib.LookupClass(String::Handle(Symbols::New(thread, "View"))));
794 const Error& err = Error::Handle(view_cls.EnsureIsFinalized(thread));
795 EXPECT(err.IsNull());
796
797 const Field& original_field = Field::Handle(
798 view_cls.LookupField(String::Handle(Symbols::New(thread, "data"))));
799 EXPECT(!original_field.IsNull());
800 const Field& field = Field::Handle(original_field.CloneFromOriginal());
801
803 CompilerState S(thread, /*is_aot=*/false, /*is_optimizing=*/true);
805
806 auto b1 = H.flow_graph()->graph_entry()->normal_entry();
807
809 DartReturnInstr* ret;
810
811 {
812 BlockBuilder builder(H.flow_graph(), b1);
813 // array <- AllocateTypedData(1)
814 const auto array = builder.AddDefinition(new AllocateTypedDataInstr(
815 InstructionSource(), kTypedDataFloat64ArrayCid,
816 new Value(H.IntConstant(1)), DeoptId::kNone));
817 // view <- AllocateObject(View)
818 const auto view = builder.AddDefinition(
820 // StoreField(view.data = array)
821 builder.AddInstruction(new StoreFieldInstr(
822 field, new Value(view), new Value(array),
824 &H.flow_graph()->parsed_function()));
825 // StoreIndexed(array <float64>, 0, 1.0)
826 builder.AddInstruction(new StoreIndexedInstr(
827 new Value(array), new Value(H.IntConstant(0)),
828 new Value(H.DoubleConstant(1.0)), StoreBarrierType::kNoStoreBarrier,
829 /*index_unboxed=*/false,
830 /*index_scale=*/Instance::ElementSizeFor(kTypedDataFloat64ArrayCid),
831 kTypedDataFloat64ArrayCid, AlignmentType::kAlignedAccess,
833 // array_alias <- LoadField(view.data)
834 const auto array_alias = builder.AddDefinition(new LoadFieldInstr(
835 new Value(view), Slot::Get(field, &H.flow_graph()->parsed_function()),
837 // StoreIndexed(array_alias <float32>, 1, 2.0)
838 builder.AddInstruction(new StoreIndexedInstr(
839 new Value(array_alias), new Value(H.IntConstant(1)),
840 new Value(H.DoubleConstant(2.0)), StoreBarrierType::kNoStoreBarrier,
841 /*index_unboxed=*/false,
842 /*index_scale=*/Instance::ElementSizeFor(kTypedDataFloat32ArrayCid),
843 kTypedDataFloat32ArrayCid, AlignmentType::kAlignedAccess,
845 // load <- LoadIndexed(array <float64>, 0)
846 load = builder.AddDefinition(new LoadIndexedInstr(
847 new Value(array), new Value(H.IntConstant(0)), /*index_unboxed=*/false,
848 /*index_scale=*/Instance::ElementSizeFor(kTypedDataFloat64ArrayCid),
849 kTypedDataFloat64ArrayCid, AlignmentType::kAlignedAccess,
851 // Return(load)
852 ret = builder.AddReturn(new Value(load));
853 }
854 H.FinishGraph();
855 DominatorBasedCSE::Optimize(H.flow_graph());
856
857 // Check that we do not forward the load in question.
858 EXPECT_PROPERTY(ret, it.value()->definition() == load);
859}
860
861static void CountLoadsStores(FlowGraph* flow_graph,
862 intptr_t* loads,
863 intptr_t* stores) {
864 for (BlockIterator block_it = flow_graph->reverse_postorder_iterator();
865 !block_it.Done(); block_it.Advance()) {
866 for (ForwardInstructionIterator it(block_it.Current()); !it.Done();
867 it.Advance()) {
868 if (it.Current()->IsLoadField()) {
869 (*loads)++;
870 } else if (it.Current()->IsStoreField()) {
871 (*stores)++;
872 }
873 }
874 }
875}
876
877ISOLATE_UNIT_TEST_CASE(LoadOptimizer_RedundantStoresAndLoads) {
878 const char* kScript = R"(
879 class Bar {
880 Bar() { a = null; }
881 dynamic a;
882 }
883
884 Bar foo() {
885 Bar bar = new Bar();
886 bar.a = null;
887 bar.a = bar;
888 bar.a = bar.a;
889 return bar.a;
890 }
891
892 main() {
893 foo();
894 }
895 )";
896
897 const auto& root_library = Library::Handle(LoadTestScript(kScript));
898 Invoke(root_library, "main");
899 const auto& function = Function::Handle(GetFunction(root_library, "foo"));
900 TestPipeline pipeline(function, CompilerPass::kJIT);
901 FlowGraph* flow_graph = pipeline.RunPasses({
902 CompilerPass::kComputeSSA,
903 CompilerPass::kTypePropagation,
904 CompilerPass::kApplyICData,
905 CompilerPass::kInlining,
906 CompilerPass::kTypePropagation,
907 CompilerPass::kSelectRepresentations,
908 CompilerPass::kCanonicalize,
909 CompilerPass::kConstantPropagation,
910 });
911
912 ASSERT(flow_graph != nullptr);
913
914 // Before CSE, we have 2 loads and 4 stores.
915 intptr_t bef_loads = 0;
916 intptr_t bef_stores = 0;
917 CountLoadsStores(flow_graph, &bef_loads, &bef_stores);
918 EXPECT_EQ(2, bef_loads);
919 EXPECT_EQ(4, bef_stores);
920
921 DominatorBasedCSE::Optimize(flow_graph);
922
923 // After CSE, no load and only one store remains.
924 intptr_t aft_loads = 0;
925 intptr_t aft_stores = 0;
926 CountLoadsStores(flow_graph, &aft_loads, &aft_stores);
927 EXPECT_EQ(0, aft_loads);
928 EXPECT_EQ(1, aft_stores);
929}
930
931ISOLATE_UNIT_TEST_CASE(LoadOptimizer_RedundantStaticFieldInitialization) {
932 const char* kScript = R"(
933 int getX() => 2;
934 int x = getX();
935
936 foo() => x + x;
937
938 main() {
939 foo();
940 }
941 )";
942
943 const auto& root_library = Library::Handle(LoadTestScript(kScript));
944 Invoke(root_library, "main");
945 const auto& function = Function::Handle(GetFunction(root_library, "foo"));
946 TestPipeline pipeline(function, CompilerPass::kJIT);
947 FlowGraph* flow_graph = pipeline.RunPasses({});
948 ASSERT(flow_graph != nullptr);
949
950 auto entry = flow_graph->graph_entry()->normal_entry();
951 EXPECT(entry != nullptr);
952
953 ILMatcher cursor(flow_graph, entry);
954 RELEASE_ASSERT(cursor.TryMatch({
955 kMatchAndMoveFunctionEntry,
956 kMatchAndMoveCheckStackOverflow,
957 kMatchAndMoveLoadStaticField,
958 kMoveParallelMoves,
959 kMatchAndMoveCheckSmi,
960 kMoveParallelMoves,
961 kMatchAndMoveBinarySmiOp,
962 kMoveParallelMoves,
963 kMatchDartReturn,
964 }));
965}
966
967ISOLATE_UNIT_TEST_CASE(LoadOptimizer_RedundantInitializerCallAfterIf) {
968 const char* kScript = R"(
969 int x = int.parse('1');
970
971 @pragma('vm:never-inline')
972 use(int arg) {}
973
974 foo(bool condition) {
975 if (condition) {
976 x = 3;
977 } else {
978 use(x);
979 }
980 use(x);
981 }
982
983 main() {
984 foo(true);
985 }
986 )";
987
988 const auto& root_library = Library::Handle(LoadTestScript(kScript));
989 Invoke(root_library, "main");
990 const auto& function = Function::Handle(GetFunction(root_library, "foo"));
991 TestPipeline pipeline(function, CompilerPass::kJIT);
992 FlowGraph* flow_graph = pipeline.RunPasses({});
993 ASSERT(flow_graph != nullptr);
994
995 auto entry = flow_graph->graph_entry()->normal_entry();
996 EXPECT(entry != nullptr);
997
998 LoadStaticFieldInstr* load_static_after_if = nullptr;
999
1000 ILMatcher cursor(flow_graph, entry);
1001 RELEASE_ASSERT(cursor.TryMatch({
1002 kMoveGlob,
1003 kMatchAndMoveBranchTrue,
1004 kMoveGlob,
1005 kMatchAndMoveGoto,
1006 kMatchAndMoveJoinEntry,
1007 kMoveParallelMoves,
1008 {kMatchAndMoveLoadStaticField, &load_static_after_if},
1009 kMoveGlob,
1010 kMatchDartReturn,
1011 }));
1012 EXPECT(!load_static_after_if->calls_initializer());
1013}
1014
1015ISOLATE_UNIT_TEST_CASE(LoadOptimizer_RedundantInitializerCallInLoop) {
1016 const char* kScript = R"(
1017 class A {
1018 late int x = int.parse('1');
1019 A? next;
1020 }
1021
1022 @pragma('vm:never-inline')
1023 use(int arg) {}
1024
1025 foo(A obj) {
1026 use(obj.x);
1027 for (;;) {
1028 use(obj.x);
1029 final next = obj.next;
1030 if (next == null) {
1031 break;
1032 }
1033 obj = next;
1034 use(obj.x);
1035 }
1036 }
1037
1038 main() {
1039 foo(A()..next = A());
1040 }
1041 )";
1042
1043 const auto& root_library = Library::Handle(LoadTestScript(kScript));
1044 Invoke(root_library, "main");
1045 const auto& function = Function::Handle(GetFunction(root_library, "foo"));
1046 TestPipeline pipeline(function, CompilerPass::kJIT);
1047 FlowGraph* flow_graph = pipeline.RunPasses({});
1048 ASSERT(flow_graph != nullptr);
1049
1050 auto entry = flow_graph->graph_entry()->normal_entry();
1051 EXPECT(entry != nullptr);
1052
1053 LoadFieldInstr* load_field_before_loop = nullptr;
1054 LoadFieldInstr* load_field_in_loop1 = nullptr;
1055 LoadFieldInstr* load_field_in_loop2 = nullptr;
1056
1057 ILMatcher cursor(flow_graph, entry);
1058 RELEASE_ASSERT(cursor.TryMatch({
1059 kMoveGlob,
1060 {kMatchAndMoveLoadField, &load_field_before_loop},
1061 kMoveGlob,
1062 kMatchAndMoveGoto,
1063 kMatchAndMoveJoinEntry,
1064 kMoveGlob,
1065 {kMatchAndMoveLoadField, &load_field_in_loop1},
1066 kMoveGlob,
1068 kMoveGlob,
1069 {kMatchAndMoveLoadField, &load_field_in_loop2},
1070 }));
1071
1072 EXPECT(load_field_before_loop->calls_initializer());
1073 EXPECT(!load_field_in_loop1->calls_initializer());
1074 EXPECT(load_field_in_loop2->calls_initializer());
1075}
1076
1077#if !defined(TARGET_ARCH_IA32)
1078
1079ISOLATE_UNIT_TEST_CASE(LoadOptimizer_RedundantInitializingStoreAOT) {
1080 const char* kScript = R"(
1081class Vec3 {
1082 final double x, y, z;
1083
1084 @pragma('vm:prefer-inline')
1085 const Vec3(this.x, this.y, this.z);
1086
1087 @override
1088 @pragma('vm:prefer-inline')
1089 String toString() => _vec3ToString(x, y, z);
1090}
1091
1092@pragma('vm:never-inline')
1093String _vec3ToString(double x, double y, double z) => '';
1094
1095// Boxed storage for Vec3.
1096// Fields are unboxed.
1097class Vec3Mut {
1098 double _x = 0.0;
1099 double _y = 0.0;
1100 double _z = 0.0;
1101
1102 Vec3Mut(Vec3 v)
1103 : _x = v.x,
1104 _y = v.y,
1105 _z = v.z;
1106
1107 @override
1108 String toString() => _vec3ToString(_x, _y, _z);
1109
1110 @pragma('vm:prefer-inline')
1111 set vec(Vec3 v) {
1112 _x = v.x;
1113 _y = v.y;
1114 _z = v.z;
1115 }
1116}
1117
1118Vec3Mut main() {
1119 final a = Vec3(3, 4, 5);
1120 final b = Vec3(8, 9, 10);
1121 final c = Vec3(18, 19, 20);
1122 final d = Vec3(180, 190, 200);
1123 final e = Vec3(1800, 1900, 2000);
1124 final v = Vec3Mut(a);
1125 v.vec = b;
1126 v.vec = c;
1127 v.vec = d;
1128 v.vec = e;
1129 return v;
1130}
1131 )";
1132
1133 const auto& root_library = Library::Handle(LoadTestScript(kScript));
1134 const auto& function = Function::Handle(GetFunction(root_library, "main"));
1135
1136 TestPipeline pipeline(function, CompilerPass::kAOT);
1137 FlowGraph* flow_graph = pipeline.RunPasses({});
1138 auto entry = flow_graph->graph_entry()->normal_entry();
1139
1140 AllocateObjectInstr* allocate;
1141 StoreFieldInstr* store1;
1143 StoreFieldInstr* store3;
1144
1145 ILMatcher cursor(flow_graph, entry, true, ParallelMovesHandling::kSkip);
1146 RELEASE_ASSERT(cursor.TryMatch({
1147 kMoveGlob,
1148 {kMatchAndMoveAllocateObject, &allocate},
1149 {kMatchAndMoveStoreField, &store1},
1150 {kMatchAndMoveStoreField, &store2},
1151 {kMatchAndMoveStoreField, &store3},
1152 kMatchDartReturn,
1153 }));
1154
1155 EXPECT(store1->instance()->definition() == allocate);
1156 EXPECT(store2->instance()->definition() == allocate);
1157 EXPECT(store3->instance()->definition() == allocate);
1158}
1159
1160ISOLATE_UNIT_TEST_CASE(LoadOptimizer_RedundantStoreAOT) {
1161 const char* kScript = R"(
1162class Foo {
1163 int x = -1;
1164
1165 toString() => "Foo x: $x";
1166}
1167
1168class Bar {}
1169
1170main() {
1171 final foo = Foo();
1172 foo.x = 11;
1173 new Bar();
1174 foo.x = 12;
1175 new Bar();
1176 foo.x = 13;
1177 return foo;
1178}
1179 )";
1180
1181 const auto& root_library = Library::Handle(LoadTestScript(kScript));
1182 Invoke(root_library, "main");
1183 const auto& function = Function::Handle(GetFunction(root_library, "main"));
1184 TestPipeline pipeline(function, CompilerPass::kAOT);
1185 FlowGraph* flow_graph = pipeline.RunPasses({});
1186 auto entry = flow_graph->graph_entry()->normal_entry();
1187
1188 AllocateObjectInstr* allocate;
1189 StoreFieldInstr* store1;
1190
1191 ILMatcher cursor(flow_graph, entry, true, ParallelMovesHandling::kSkip);
1192 RELEASE_ASSERT(cursor.TryMatch({
1193 kMoveGlob,
1194 {kMatchAndMoveAllocateObject, &allocate},
1195 {kMatchAndMoveStoreField, &store1}, // initializing store
1196 kMatchDartReturn,
1197 }));
1198
1199 EXPECT(store1->instance()->definition() == allocate);
1200}
1201
1202#endif // !defined(TARGET_ARCH_IA32)
1203
1204ISOLATE_UNIT_TEST_CASE(AllocationSinking_Arrays) {
1205 const char* kScript = R"(
1206import 'dart:typed_data';
1207
1208class Vector2 {
1209 final Float64List _v2storage;
1210
1211 @pragma('vm:prefer-inline')
1212 Vector2.zero() : _v2storage = Float64List(2);
1213
1214 @pragma('vm:prefer-inline')
1215 factory Vector2(double x, double y) => Vector2.zero()..setValues(x, y);
1216
1217 @pragma('vm:prefer-inline')
1218 factory Vector2.copy(Vector2 other) => Vector2.zero()..setFrom(other);
1219
1220 @pragma('vm:prefer-inline')
1221 Vector2 clone() => Vector2.copy(this);
1222
1223 @pragma('vm:prefer-inline')
1224 void setValues(double x_, double y_) {
1225 _v2storage[0] = x_;
1226 _v2storage[1] = y_;
1227 }
1228
1229 @pragma('vm:prefer-inline')
1230 void setFrom(Vector2 other) {
1231 final otherStorage = other._v2storage;
1232 _v2storage[1] = otherStorage[1];
1233 _v2storage[0] = otherStorage[0];
1234 }
1235
1236 @pragma('vm:prefer-inline')
1237 Vector2 operator +(Vector2 other) => clone()..add(other);
1238
1239 @pragma('vm:prefer-inline')
1240 void add(Vector2 arg) {
1241 final argStorage = arg._v2storage;
1242 _v2storage[0] = _v2storage[0] + argStorage[0];
1243 _v2storage[1] = _v2storage[1] + argStorage[1];
1244 }
1245
1246 @pragma('vm:prefer-inline')
1247 double get x => _v2storage[0];
1248
1249 @pragma('vm:prefer-inline')
1250 double get y => _v2storage[1];
1251}
1252
1253@pragma('vm:never-inline')
1254String foo(double x) {
1255 // All allocations in this function are eliminated by the compiler,
1256 // except array allocation for string interpolation at the end.
1257 List v1 = List.filled(2, null);
1258 v1[0] = 1;
1259 v1[1] = 'hi';
1260 Vector2 v2 = new Vector2(1.0, 2.0);
1261 Vector2 v3 = v2 + Vector2(x, x);
1262 double sum = v3.x + v3.y;
1263 return "v1: [${v1[0]},${v1[1]}], v2: [${v2.x},${v2.y}], v3: [${v3.x},${v3.y}], sum: $sum";
1264}
1265
1266main() {
1267 foo(42.0);
1268}
1269 )";
1270
1271 const auto& root_library = Library::Handle(LoadTestScript(kScript));
1272 Invoke(root_library, "main");
1273 const auto& function = Function::Handle(GetFunction(root_library, "foo"));
1274 TestPipeline pipeline(function, CompilerPass::kJIT);
1275 FlowGraph* flow_graph = pipeline.RunPasses({});
1276 ASSERT(flow_graph != nullptr);
1277
1278 auto entry = flow_graph->graph_entry()->normal_entry();
1279 EXPECT(entry != nullptr);
1280
1281 /* Flow graph to match:
1282
1283 4: CheckStackOverflow:8(stack=0, loop=0)
1284 5: ParallelMove rax <- S+2
1285 6: CheckClass:14(v2 Cids[1: _Double@0150898 etc. cid 62] nullcheck)
1286 8: v312 <- Unbox:14(v2 T{_Double}) T{_Double}
1287 10: ParallelMove xmm1 <- C
1288 10: v221 <- BinaryDoubleOp:22(+, v341, v312) T{_Double}
1289 11: ParallelMove DS-7 <- xmm1
1290 12: ParallelMove xmm2 <- C
1291 12: v227 <- BinaryDoubleOp:34(+, v342, v312) T{_Double}
1292 13: ParallelMove DS-6 <- xmm2
1293 14: v333 <- Box(v221) T{_Double}
1294 15: ParallelMove S-4 <- rax
1295 16: v334 <- Box(v227) T{_Double}
1296 17: ParallelMove S-3 <- rcx
1297 18: ParallelMove xmm0 <- xmm1
1298 18: v15 <- BinaryDoubleOp:28(+, v221, v227) T{_Double}
1299 19: ParallelMove rbx <- C, r10 <- C, DS-5 <- xmm0
1300 20: v17 <- CreateArray:30(v0, v16) T{_List}
1301 21: ParallelMove rcx <- rax
1302 22: StoreIndexed(v17, v5, v18, NoStoreBarrier)
1303 24: StoreIndexed(v17, v6, v6, NoStoreBarrier)
1304 26: StoreIndexed(v17, v3, v20, NoStoreBarrier)
1305 28: StoreIndexed(v17, v21, v7, NoStoreBarrier)
1306 30: StoreIndexed(v17, v23, v24, NoStoreBarrier)
1307 32: StoreIndexed(v17, v25, v8, NoStoreBarrier)
1308 34: StoreIndexed(v17, v27, v20, NoStoreBarrier)
1309 36: StoreIndexed(v17, v28, v9, NoStoreBarrier)
1310 38: StoreIndexed(v17, v30, v31, NoStoreBarrier)
1311 39: ParallelMove rax <- S-4
1312 40: StoreIndexed(v17, v32, v333, NoStoreBarrier)
1313 42: StoreIndexed(v17, v34, v20, NoStoreBarrier)
1314 43: ParallelMove rax <- S-3
1315 44: StoreIndexed(v17, v35, v334, NoStoreBarrier)
1316 46: StoreIndexed(v17, v37, v38, NoStoreBarrier)
1317 47: ParallelMove xmm0 <- DS-5
1318 48: v335 <- Box(v15) T{_Double}
1319 49: ParallelMove rdx <- rcx, rax <- rax
1320 50: StoreIndexed(v17, v39, v335)
1321 52: MoveArgument(v17)
1322 54: v40 <- StaticCall:44( _interpolate@0150898<0> v17,
1323 recognized_kind = StringBaseInterpolate) T{String?}
1324 56: Return:48(v40)
1325*/
1326
1327 CreateArrayInstr* create_array = nullptr;
1328 StaticCallInstr* string_interpolate = nullptr;
1329
1330 ILMatcher cursor(flow_graph, entry, /*trace=*/true,
1331 ParallelMovesHandling::kSkip);
1332 RELEASE_ASSERT(cursor.TryMatch({
1333 kMatchAndMoveFunctionEntry,
1334 kMatchAndMoveCheckStackOverflow,
1335 }));
1336 RELEASE_ASSERT(cursor.TryMatch({
1337 kMatchAndMoveUnbox,
1338 kMatchAndMoveBinaryDoubleOp,
1339 kMatchAndMoveBinaryDoubleOp,
1340 kMatchAndMoveBox,
1341 kMatchAndMoveBox,
1342 kMatchAndMoveBinaryDoubleOp,
1343 {kMatchAndMoveCreateArray, &create_array},
1344 kMatchAndMoveStoreIndexed,
1345 kMatchAndMoveStoreIndexed,
1346 kMatchAndMoveStoreIndexed,
1347 kMatchAndMoveStoreIndexed,
1348 kMatchAndMoveStoreIndexed,
1349 kMatchAndMoveStoreIndexed,
1350 kMatchAndMoveStoreIndexed,
1351 kMatchAndMoveStoreIndexed,
1352 kMatchAndMoveStoreIndexed,
1353 kMatchAndMoveStoreIndexed,
1354 kMatchAndMoveStoreIndexed,
1355 kMatchAndMoveStoreIndexed,
1356 kMatchAndMoveStoreIndexed,
1357 kMatchAndMoveBox,
1358 kMatchAndMoveStoreIndexed,
1359 kMatchAndMoveMoveArgument,
1360 {kMatchAndMoveStaticCall, &string_interpolate},
1361 kMatchDartReturn,
1362 }));
1363
1364 EXPECT(string_interpolate->ArgumentAt(0) == create_array);
1365}
1366
1367ISOLATE_UNIT_TEST_CASE(AllocationSinking_Records) {
1368 const char* kScript = R"(
1369
1370@pragma('vm:prefer-inline')
1371({int field1, String field2}) getRecord(int x, String y) =>
1372 (field1: x, field2: y);
1373
1374@pragma('vm:never-inline')
1375String foo(int x, String y) {
1376 // All allocations in this function are eliminated by the compiler,
1377 // except array allocation for string interpolation at the end.
1378 (int, bool) r1 = (x, true);
1379 final r2 = getRecord(x, y);
1380 int sum = r1.$1 + r2.field1;
1381 return "r1: (${r1.$1}, ${r1.$2}), "
1382 "r2: (field1: ${r2.field1}, field2: ${r2.field2}), sum: $sum";
1383}
1384
1385int count = 0;
1386main() {
1387 // Deoptimize on the 2nd run.
1388 return foo(count++ == 0 ? 42 : 9223372036854775807, 'hey');
1389}
1390 )";
1391
1392 const auto& root_library = Library::Handle(LoadTestScript(kScript));
1393 const auto& result1 = Object::Handle(Invoke(root_library, "main"));
1394 EXPECT(result1.IsString());
1395 EXPECT_STREQ(result1.ToCString(),
1396 "r1: (42, true), r2: (field1: 42, field2: hey), sum: 84");
1397 const auto& function = Function::Handle(GetFunction(root_library, "foo"));
1398 TestPipeline pipeline(function, CompilerPass::kJIT);
1399 FlowGraph* flow_graph = pipeline.RunPasses({});
1400 ASSERT(flow_graph != nullptr);
1401
1402 auto entry = flow_graph->graph_entry()->normal_entry();
1403 EXPECT(entry != nullptr);
1404
1405 /* Flow graph to match:
1406
1407 2: B1[function entry]:2 {
1408 v2 <- Parameter(0) [-9223372036854775808, 9223372036854775807] T{int}
1409 v3 <- Parameter(1) T{String}
1410}
1411 4: CheckStackOverflow:8(stack=0, loop=0)
1412 5: ParallelMove rax <- S+3
1413 6: CheckSmi:16(v2)
1414 8: ParallelMove rcx <- rax
1415 8: v9 <- BinarySmiOp:16(+, v2 T{_Smi}, v2 T{_Smi}) [-4611686018427387904, 4611686018427387903] T{_Smi}
1416 9: ParallelMove rbx <- C, r10 <- C, S-3 <- rcx
1417 10: v11 <- CreateArray:18(v0, v10) T{_List}
1418 11: ParallelMove rax <- rax
1419 12: StoreIndexed(v11, v12, v13, NoStoreBarrier)
1420 13: ParallelMove rcx <- S+3
1421 14: StoreIndexed(v11, v14, v2 T{_Smi}, NoStoreBarrier)
1422 16: StoreIndexed(v11, v16, v17, NoStoreBarrier)
1423 18: StoreIndexed(v11, v18, v5, NoStoreBarrier)
1424 20: StoreIndexed(v11, v20, v21, NoStoreBarrier)
1425 22: StoreIndexed(v11, v22, v2 T{_Smi}, NoStoreBarrier)
1426 24: StoreIndexed(v11, v24, v25, NoStoreBarrier)
1427 25: ParallelMove rcx <- S+2
1428 26: StoreIndexed(v11, v26, v3, NoStoreBarrier)
1429 28: StoreIndexed(v11, v28, v29, NoStoreBarrier)
1430 29: ParallelMove rcx <- S-3
1431 30: StoreIndexed(v11, v30, v9, NoStoreBarrier)
1432 32: MoveArgument(v11)
1433 34: v31 <- StaticCall:20( _interpolate@0150898<0> v11, recognized_kind = StringBaseInterpolate) T{String}
1434 35: ParallelMove rax <- rax
1435 36: Return:24(v31)
1436*/
1437
1438 ILMatcher cursor(flow_graph, entry, /*trace=*/true,
1439 ParallelMovesHandling::kSkip);
1440 RELEASE_ASSERT(cursor.TryMatch({
1441 kMatchAndMoveFunctionEntry,
1442 kMatchAndMoveCheckStackOverflow,
1443 kMatchAndMoveCheckSmi,
1444 kMatchAndMoveBinarySmiOp,
1445 kMatchAndMoveCreateArray,
1446 kMatchAndMoveStoreIndexed,
1447 kMatchAndMoveStoreIndexed,
1448 kMatchAndMoveStoreIndexed,
1449 kMatchAndMoveStoreIndexed,
1450 kMatchAndMoveStoreIndexed,
1451 kMatchAndMoveStoreIndexed,
1452 kMatchAndMoveStoreIndexed,
1453 kMatchAndMoveStoreIndexed,
1454 kMatchAndMoveStoreIndexed,
1455 kMatchAndMoveStoreIndexed,
1456 kMatchAndMoveMoveArgument,
1457 kMatchAndMoveStaticCall,
1458 kMatchDartReturn,
1459 }));
1460
1461 Compiler::CompileOptimizedFunction(thread, function);
1462 const auto& result2 = Object::Handle(Invoke(root_library, "main"));
1463 EXPECT(result2.IsString());
1464 EXPECT_STREQ(result2.ToCString(),
1465 "r1: (9223372036854775807, true), r2: (field1: "
1466 "9223372036854775807, field2: hey), sum: -2");
1467}
1468
1469#if !defined(TARGET_ARCH_IA32)
1470
1471ISOLATE_UNIT_TEST_CASE(DelayAllocations_DelayAcrossCalls) {
1472 const char* kScript = R"(
1473 class A {
1474 dynamic x, y;
1475 A(this.x, this.y);
1476 }
1477
1478 int count = 0;
1479
1480 @pragma("vm:never-inline")
1481 dynamic foo(int i) => count++ < 2 ? i : '$i';
1482
1483 @pragma("vm:never-inline")
1484 dynamic use(v) {}
1485
1486 void test() {
1487 A a = new A(foo(1), foo(2));
1488 use(a);
1489 }
1490 )";
1491
1492 const auto& root_library = Library::Handle(LoadTestScript(kScript));
1493 const auto& function = Function::Handle(GetFunction(root_library, "test"));
1494
1495 // Get fields to kDynamicCid guard
1496 Invoke(root_library, "test");
1497 Invoke(root_library, "test");
1498
1499 TestPipeline pipeline(function, CompilerPass::kAOT);
1500 FlowGraph* flow_graph = pipeline.RunPasses({});
1501 auto entry = flow_graph->graph_entry()->normal_entry();
1502
1503 StaticCallInstr* call1;
1504 StaticCallInstr* call2;
1505 AllocateObjectInstr* allocate;
1506 StoreFieldInstr* store1;
1508
1509 ILMatcher cursor(flow_graph, entry, true, ParallelMovesHandling::kSkip);
1510 RELEASE_ASSERT(cursor.TryMatch({
1511 kMoveGlob,
1512 {kMatchAndMoveStaticCall, &call1},
1513 kMoveGlob,
1514 {kMatchAndMoveStaticCall, &call2},
1515 kMoveGlob,
1516 {kMatchAndMoveAllocateObject, &allocate},
1517 {kMatchAndMoveStoreField, &store1},
1518 {kMatchAndMoveStoreField, &store2},
1519 }));
1520
1521 EXPECT(strcmp(call1->function().UserVisibleNameCString(), "foo") == 0);
1522 EXPECT(strcmp(call2->function().UserVisibleNameCString(), "foo") == 0);
1523 EXPECT(store1->instance()->definition() == allocate);
1524 EXPECT(!store1->ShouldEmitStoreBarrier());
1525 EXPECT(store2->instance()->definition() == allocate);
1526 EXPECT(!store2->ShouldEmitStoreBarrier());
1527}
1528
1529ISOLATE_UNIT_TEST_CASE(DelayAllocations_DontDelayIntoLoop) {
1530 const char* kScript = R"(
1531 void test() {
1532 Object o = new Object();
1533 for (int i = 0; i < 10; i++) {
1534 use(o);
1535 }
1536 }
1537
1538 @pragma('vm:never-inline')
1539 void use(Object o) {
1540 print(o.hashCode);
1541 }
1542 )";
1543
1544 const auto& root_library = Library::Handle(LoadTestScript(kScript));
1545 const auto& function = Function::Handle(GetFunction(root_library, "test"));
1546
1547 TestPipeline pipeline(function, CompilerPass::kAOT);
1548 FlowGraph* flow_graph = pipeline.RunPasses({});
1549 auto entry = flow_graph->graph_entry()->normal_entry();
1550
1551 AllocateObjectInstr* allocate;
1553
1554 ILMatcher cursor(flow_graph, entry, true, ParallelMovesHandling::kSkip);
1555 RELEASE_ASSERT(cursor.TryMatch({
1556 kMoveGlob,
1557 {kMatchAndMoveAllocateObject, &allocate},
1558 kMoveGlob,
1560 kMoveGlob,
1561 {kMatchAndMoveStaticCall, &call},
1562 }));
1563
1564 EXPECT(strcmp(call->function().UserVisibleNameCString(), "use") == 0);
1565 EXPECT(call->Receiver()->definition() == allocate);
1566}
1567
1568ISOLATE_UNIT_TEST_CASE(CheckStackOverflowElimination_NoInterruptsPragma) {
1569 const char* kScript = R"(
1570 @pragma('vm:prefer-inline')
1571 int bar(int n) {
1572 print(''); // Side-effectful operation
1573 var sum = 0;
1574 for (int i = 0; i < n; i++) {
1575 sum += i;
1576 }
1577 return sum;
1578 }
1579
1580 @pragma('vm:unsafe:no-interrupts')
1581 int test() {
1582 int result = 0;
1583 for (int i = 0; i < 10; i++) {
1584 result ^= bar(i);
1585 }
1586 return result;
1587 }
1588 )";
1589
1590 const auto& root_library = Library::Handle(LoadTestScript(kScript));
1591 const auto& function = Function::Handle(GetFunction(root_library, "test"));
1592
1593 TestPipeline pipeline(function, CompilerPass::kAOT);
1594 auto flow_graph = pipeline.RunPasses({});
1595 for (auto block : flow_graph->postorder()) {
1596 for (auto instr : block->instructions()) {
1597 EXPECT_PROPERTY(instr, !it.IsCheckStackOverflow());
1598 }
1599 }
1600}
1601
1602ISOLATE_UNIT_TEST_CASE(BoundsCheckElimination_Pragma) {
1603 const char* kScript = R"(
1604 import 'dart:typed_data';
1605
1606 @pragma('vm:unsafe:no-bounds-checks')
1607 int test(Uint8List list) {
1608 int result = 0;
1609 for (int i = 0; i < 10; i++) {
1610 result = list[i];
1611 }
1612 return result;
1613 }
1614 )";
1615
1616 const auto& root_library = Library::Handle(LoadTestScript(kScript));
1617 const auto& function = Function::Handle(GetFunction(root_library, "test"));
1618
1619 TestPipeline pipeline(function, CompilerPass::kAOT);
1620 auto flow_graph = pipeline.RunPasses({});
1621 for (auto block : flow_graph->postorder()) {
1622 for (auto instr : block->instructions()) {
1623 EXPECT_PROPERTY(instr, !it.IsCheckBoundBase());
1624 }
1625 }
1626}
1627
1628// This test checks that CSE unwraps redefinitions when comparing all
1629// instructions except loads, which are handled specially.
1630ISOLATE_UNIT_TEST_CASE(CSE_Redefinitions) {
1631 const char* script_chars = R"(
1632 @pragma("vm:external-name", "BlackholeNative")
1633 external dynamic blackhole([a, b, c, d, e, f]);
1634 class K<T> {
1635 final T field;
1636 K(this.field);
1637 }
1638 )";
1639 const Library& lib =
1640 Library::Handle(LoadTestScript(script_chars, NoopNativeLookup));
1641
1642 const Class& cls = Class::ZoneHandle(
1643 lib.LookupClass(String::Handle(Symbols::New(thread, "K"))));
1644 const Error& err = Error::Handle(cls.EnsureIsFinalized(thread));
1645 EXPECT(err.IsNull());
1646
1647 const Field& original_field = Field::Handle(
1648 cls.LookupField(String::Handle(Symbols::New(thread, "field"))));
1649 EXPECT(!original_field.IsNull());
1650 const Field& field = Field::Handle(original_field.CloneFromOriginal());
1651
1652 const Function& blackhole =
1653 Function::ZoneHandle(GetFunction(lib, "blackhole"));
1654
1656 CompilerState S(thread, /*is_aot=*/false, /*is_optimizing=*/true);
1657 FlowGraphBuilderHelper H(/*num_parameters=*/2);
1658 H.AddVariable("v0", AbstractType::ZoneHandle(Type::DynamicType()));
1659 H.AddVariable("v1", AbstractType::ZoneHandle(Type::DynamicType()));
1660
1661 auto b1 = H.flow_graph()->graph_entry()->normal_entry();
1662
1663 BoxInstr* box0;
1664 BoxInstr* box1;
1665 LoadFieldInstr* load0;
1666 LoadFieldInstr* load1;
1669 DartReturnInstr* ret;
1670
1671 {
1672 BlockBuilder builder(H.flow_graph(), b1);
1673 auto& slot = Slot::Get(field, &H.flow_graph()->parsed_function());
1674 auto param0 = builder.AddParameter(0, kUnboxedDouble);
1675 auto param1 = builder.AddParameter(1, kTagged);
1676 auto redef0 =
1677 builder.AddDefinition(new RedefinitionInstr(new Value(param0)));
1678 auto redef1 =
1679 builder.AddDefinition(new RedefinitionInstr(new Value(param0)));
1680 box0 = builder.AddDefinition(
1681 BoxInstr::Create(kUnboxedDouble, new Value(redef0)));
1682 box1 = builder.AddDefinition(
1683 BoxInstr::Create(kUnboxedDouble, new Value(redef1)));
1684
1685 auto redef2 =
1686 builder.AddDefinition(new RedefinitionInstr(new Value(param1)));
1687 auto redef3 =
1688 builder.AddDefinition(new RedefinitionInstr(new Value(param1)));
1689 load0 = builder.AddDefinition(
1690 new LoadFieldInstr(new Value(redef2), slot, InstructionSource()));
1691 load1 = builder.AddDefinition(
1692 new LoadFieldInstr(new Value(redef3), slot, InstructionSource()));
1693 load2 = builder.AddDefinition(
1694 new LoadFieldInstr(new Value(redef3), slot, InstructionSource()));
1695
1696 InputsArray args(3);
1697 args.Add(new Value(load0));
1698 args.Add(new Value(load1));
1699 args.Add(new Value(load2));
1700 call = builder.AddInstruction(new StaticCallInstr(
1701 InstructionSource(), blackhole, 0, Array::empty_array(),
1702 std::move(args), S.GetNextDeoptId(), 0, ICData::RebindRule::kStatic));
1703
1704 ret = builder.AddReturn(new Value(box1));
1705 }
1706 H.FinishGraph();
1707
1708 // Running CSE without load optimization should eliminate redundant boxing
1709 // but keep loads intact if they don't have exactly matching inputs.
1710 DominatorBasedCSE::Optimize(H.flow_graph(), /*run_load_optimization=*/false);
1711
1712 EXPECT_PROPERTY(box1, it.WasEliminated());
1713 EXPECT_PROPERTY(ret, it.value()->definition() == box0);
1714
1715 EXPECT_PROPERTY(load0, !it.WasEliminated());
1716 EXPECT_PROPERTY(load1, !it.WasEliminated());
1717 EXPECT_PROPERTY(load2, it.WasEliminated());
1718
1719 EXPECT_PROPERTY(call, it.ArgumentAt(0) == load0);
1720 EXPECT_PROPERTY(call, it.ArgumentAt(1) == load1);
1721 EXPECT_PROPERTY(call, it.ArgumentAt(2) == load1);
1722
1723 // Running load optimization pass should remove the second load but
1724 // insert a redefinition to prevent code motion because the field
1725 // has a generic type.
1726 DominatorBasedCSE::Optimize(H.flow_graph(), /*run_load_optimization=*/true);
1727
1728 EXPECT_PROPERTY(load0, !it.WasEliminated());
1729 EXPECT_PROPERTY(load1, it.WasEliminated());
1730 EXPECT_PROPERTY(load2, it.WasEliminated());
1731
1732 EXPECT_PROPERTY(call, it.ArgumentAt(0) == load0);
1733 EXPECT_PROPERTY(call, it.ArgumentAt(1)->IsRedefinition() &&
1734 it.ArgumentAt(1)->OriginalDefinition() == load0);
1735 EXPECT_PROPERTY(call, it.ArgumentAt(2)->IsRedefinition() &&
1736 it.ArgumentAt(2)->OriginalDefinition() == load0);
1737}
1738
1739ISOLATE_UNIT_TEST_CASE(AllocationSinking_NoViewDataMaterialization) {
1740 auto* const kFunctionName = "unalignedUint16";
1741 auto* const kInvokeNoDeoptName = "no_deopt";
1742 auto* const kInvokeDeoptName = "deopt";
1743 CStringUniquePtr kScript(OS::SCreate(nullptr, R"(
1744 import 'dart:_internal';
1745 import 'dart:typed_data';
1746
1747 @pragma("vm:never-inline")
1748 void check(int x, int y) {
1749 if (x != y) {
1750 throw "Doesn't match";
1751 }
1752 }
1753
1754 @pragma("vm:never-inline")
1755 bool %s(num x) {
1756 var bytes = new ByteData(64);
1757 if (x is int) {
1758 for (var i = 2; i < 4; i++) {
1759 bytes.setUint16(i, x + 1, Endian.host);
1760 check(x + 1, bytes.getUint16(i, Endian.host));
1761 }
1762 } else {
1763 // Force a garbage collection after deoptimization. In DEBUG mode,
1764 // the scavenger tests that the view's data field was set correctly
1765 // during deoptimization before recomputing it.
1766 VMInternalsForTesting.collectAllGarbage();
1767 }
1768 // Make sure the array is also used on the non-int path.
1769 check(0, bytes.getUint16(0, Endian.host));
1770 return x is int;
1771 }
1772
1773 bool %s() {
1774 return %s(0xABCC);
1775 }
1776
1777 bool %s() {
1778 return %s(1.0);
1779 }
1780 )",
1781 kFunctionName, kInvokeNoDeoptName,
1782 kFunctionName, kInvokeDeoptName,
1783 kFunctionName));
1784
1785 const auto& lib =
1786 Library::Handle(LoadTestScript(kScript.get(), NoopNativeLookup));
1787 EXPECT(!lib.IsNull());
1788 if (lib.IsNull()) return;
1789
1790 const auto& function = Function::ZoneHandle(GetFunction(lib, kFunctionName));
1791 EXPECT(!function.IsNull());
1792 if (function.IsNull()) return;
1793
1794 // Run the unoptimized code.
1795 auto& result = Object::Handle(Invoke(lib, kInvokeNoDeoptName));
1796 EXPECT(Bool::Cast(result).value());
1797
1798 TestPipeline pipeline(function, CompilerPass::kJIT);
1799 FlowGraph* flow_graph = pipeline.RunPasses({
1800 CompilerPass::kComputeSSA,
1801 CompilerPass::kApplyICData,
1802 CompilerPass::kTryOptimizePatterns,
1803 CompilerPass::kSetOuterInliningId,
1804 CompilerPass::kTypePropagation,
1805 CompilerPass::kApplyClassIds,
1806 CompilerPass::kInlining,
1807 CompilerPass::kTypePropagation,
1808 CompilerPass::kApplyClassIds,
1809 CompilerPass::kTypePropagation,
1810 CompilerPass::kApplyICData,
1811 CompilerPass::kCanonicalize,
1812 CompilerPass::kBranchSimplify,
1813 CompilerPass::kIfConvert,
1814 CompilerPass::kCanonicalize,
1815 CompilerPass::kConstantPropagation,
1816 CompilerPass::kOptimisticallySpecializeSmiPhis,
1817 CompilerPass::kTypePropagation,
1818 CompilerPass::kSelectRepresentations,
1819 CompilerPass::kCSE,
1820 CompilerPass::kCanonicalize,
1821 CompilerPass::kLICM,
1822 CompilerPass::kTryOptimizePatterns,
1823 CompilerPass::kSelectRepresentations,
1824 CompilerPass::kDSE,
1825 CompilerPass::kTypePropagation,
1826 CompilerPass::kSelectRepresentations,
1827 CompilerPass::kEliminateEnvironments,
1828 CompilerPass::kEliminateDeadPhis,
1829 CompilerPass::kDCE,
1830 CompilerPass::kCanonicalize,
1831 CompilerPass::kOptimizeBranches,
1832 });
1833
1834 // Check for the soon-to-be-sunk ByteDataView allocation.
1835
1836 auto entry = flow_graph->graph_entry()->normal_entry();
1837 EXPECT(entry != nullptr);
1838
1839 AllocateTypedDataInstr* alloc_typed_data = nullptr;
1840 AllocateObjectInstr* alloc_view = nullptr;
1841 StoreFieldInstr* store_view_typed_data = nullptr;
1842 StoreFieldInstr* store_view_offset_in_bytes = nullptr;
1843 StoreFieldInstr* store_view_length = nullptr;
1844 LoadFieldInstr* load_typed_data_payload = nullptr;
1845 StoreFieldInstr* store_view_payload = nullptr;
1846
1847 ILMatcher cursor(flow_graph, entry, true, ParallelMovesHandling::kSkip);
1848 EXPECT(cursor.TryMatch({
1849 kMoveGlob,
1850 {kMatchAndMoveAllocateTypedData, &alloc_typed_data},
1851 {kMatchAndMoveAllocateObject, &alloc_view},
1852 {kMatchAndMoveStoreField, &store_view_typed_data},
1853 {kMatchAndMoveStoreField, &store_view_offset_in_bytes},
1854 {kMatchAndMoveStoreField, &store_view_length},
1855 {kMatchAndMoveLoadField, &load_typed_data_payload},
1856 {kMatchAndMoveStoreField, &store_view_payload},
1857 }));
1858 if (store_view_payload == nullptr) return;
1859
1860 EXPECT_EQ(alloc_view, store_view_typed_data->instance()->definition());
1862 store_view_typed_data->slot()));
1863 EXPECT_EQ(alloc_typed_data, store_view_typed_data->value()->definition());
1864
1865 EXPECT_EQ(alloc_view, store_view_length->instance()->definition());
1866 EXPECT(Slot::TypedDataBase_length().IsIdentical(store_view_length->slot()));
1867 EXPECT_EQ(alloc_typed_data->num_elements()->definition(),
1868 store_view_length->value()->definition());
1869
1870 EXPECT_EQ(alloc_view, store_view_offset_in_bytes->instance()->definition());
1872 store_view_offset_in_bytes->slot()));
1873 EXPECT(store_view_offset_in_bytes->value()->BindsToSmiConstant());
1874 EXPECT_EQ(0, store_view_offset_in_bytes->value()->BoundSmiConstant());
1875
1876 EXPECT_EQ(alloc_typed_data,
1877 load_typed_data_payload->instance()->definition());
1878 EXPECT(Slot::PointerBase_data().IsIdentical(load_typed_data_payload->slot()));
1879
1880 EXPECT_EQ(alloc_view, store_view_payload->instance()->definition());
1881 EXPECT(Slot::PointerBase_data().IsIdentical(store_view_payload->slot()));
1882 EXPECT_EQ(load_typed_data_payload, store_view_payload->value()->definition());
1883
1884 // Setting the view data field is the only use of the unsafe payload load.
1885 EXPECT(load_typed_data_payload->HasOnlyUse(store_view_payload->value()));
1886
1887 pipeline.RunAdditionalPasses({
1888 CompilerPass::kAllocationSinking_Sink,
1889 });
1890
1891 // After sinking, the view allocation has been removed from the flow graph.
1892 EXPECT_EQ(nullptr, alloc_view->previous());
1893 EXPECT_EQ(nullptr, alloc_view->next());
1894 // There is at least one MaterializeObject instruction created for the view.
1895 intptr_t mat_count = 0;
1896 for (auto block_it = flow_graph->reverse_postorder_iterator();
1897 !block_it.Done(); block_it.Advance()) {
1898 for (ForwardInstructionIterator it(block_it.Current()); !it.Done();
1899 it.Advance()) {
1900 auto* const mat = it.Current()->AsMaterializeObject();
1901 if (mat == nullptr) continue;
1902 if (mat->allocation() == alloc_view) {
1903 ++mat_count;
1904 for (intptr_t i = 0; i < mat->InputCount(); i++) {
1905 // No slot of the materialization should correspond to the data field.
1906 EXPECT(mat->FieldOffsetAt(i) !=
1907 Slot::PointerBase_data().offset_in_bytes());
1908 // No input of the materialization should be a load of the typed
1909 // data object's payload.
1910 if (auto* const load = mat->InputAt(i)->definition()->AsLoadField()) {
1911 if (load->instance()->definition() == alloc_typed_data) {
1912 EXPECT(!load->slot().IsIdentical(Slot::PointerBase_data()));
1913 }
1914 }
1915 }
1916 }
1917 }
1918 }
1919 EXPECT(mat_count > 0);
1920 // There are no uses of the original unsafe payload load. In particular, no
1921 // MaterializeObject instructions use it.
1922 EXPECT(!load_typed_data_payload->HasUses());
1923
1924 pipeline.RunAdditionalPasses({
1925 CompilerPass::kEliminateDeadPhis,
1926 CompilerPass::kDCE,
1927 CompilerPass::kCanonicalize,
1928 CompilerPass::kTypePropagation,
1929 CompilerPass::kSelectRepresentations_Final,
1930 CompilerPass::kUseTableDispatch,
1931 CompilerPass::kEliminateStackOverflowChecks,
1932 CompilerPass::kCanonicalize,
1933 CompilerPass::kAllocationSinking_DetachMaterializations,
1934 CompilerPass::kEliminateWriteBarriers,
1935 CompilerPass::kLoweringAfterCodeMotionDisabled,
1936 CompilerPass::kFinalizeGraph,
1937 CompilerPass::kCanonicalize,
1938 CompilerPass::kReorderBlocks,
1939 CompilerPass::kAllocateRegisters,
1940 CompilerPass::kTestILSerialization,
1941 });
1942
1943 // Finish the compilation and attach code so we can run it.
1944 pipeline.CompileGraphAndAttachFunction();
1945
1946 // Can run optimized code fine without deoptimization.
1947 result = Invoke(lib, kInvokeNoDeoptName);
1948 EXPECT(function.HasOptimizedCode());
1949 EXPECT(Bool::Cast(result).value());
1950
1951 // Can run code fine with deoptimization.
1952 result = Invoke(lib, kInvokeDeoptName);
1953 // Deoptimization has put us back to unoptimized code.
1954 EXPECT(!function.HasOptimizedCode());
1955 EXPECT(!Bool::Cast(result).value());
1956}
1957
1958#endif // !defined(TARGET_ARCH_IA32)
1959
1960// Regression test for https://github.com/dart-lang/sdk/issues/51220.
1961// Verifies that deoptimization at the hoisted BinarySmiOp
1962// doesn't result in the infinite re-optimization loop.
1963ISOLATE_UNIT_TEST_CASE(LICM_Deopt_Regress51220) {
1964 CStringUniquePtr kScript(OS::SCreate(nullptr,
1965 R"(
1966 int n = int.parse('3');
1967 main() {
1968 int x = 0;
1969 for (int i = 0; i < n; ++i) {
1970 if (i > ((1 << %d)*1024)) {
1971 ++x;
1972 }
1973 }
1974 return x;
1975 }
1976 )",
1977 static_cast<int>(kSmiBits + 1 - 10)));
1978
1979 const auto& root_library = Library::Handle(LoadTestScript(kScript.get()));
1980 const auto& function = Function::Handle(GetFunction(root_library, "main"));
1981
1982 // Run unoptimized code.
1983 Invoke(root_library, "main");
1984 EXPECT(!function.HasOptimizedCode());
1985
1986 Compiler::CompileOptimizedFunction(thread, function);
1987 EXPECT(function.HasOptimizedCode());
1988
1989 // Only 2 rounds of deoptimization are allowed:
1990 // * the first round should disable LICM;
1991 // * the second round should disable BinarySmiOp.
1992 Invoke(root_library, "main");
1993 EXPECT(!function.HasOptimizedCode());
1994 // EXPECT(function.ProhibitsInstructionHoisting());
1995
1996 Compiler::CompileOptimizedFunction(thread, function);
1997 EXPECT(function.HasOptimizedCode());
1998
1999 Invoke(root_library, "main");
2000 EXPECT(!function.HasOptimizedCode());
2001 // EXPECT(function.ProhibitsInstructionHoisting());
2002
2003 Compiler::CompileOptimizedFunction(thread, function);
2004 EXPECT(function.HasOptimizedCode());
2005
2006 // Should not deoptimize.
2007 Invoke(root_library, "main");
2008 EXPECT(function.HasOptimizedCode());
2009}
2010
2011// Regression test for https://github.com/dart-lang/sdk/issues/50245.
2012// Verifies that deoptimization at the hoisted GuardFieldClass
2013// doesn't result in the infinite re-optimization loop.
2014ISOLATE_UNIT_TEST_CASE(LICM_Deopt_Regress50245) {
2015 const char* kScript = R"(
2016 class A {
2017 List<int> foo;
2018 A(this.foo);
2019 }
2020
2021 A obj = A([1, 2, 3]);
2022 int n = int.parse('3');
2023
2024 main() {
2025 // Make sure A.foo= is compiled.
2026 obj.foo = [];
2027 int sum = 0;
2028 for (int i = 0; i < n; ++i) {
2029 if (int.parse('1') != 1) {
2030 // Field guard from this unreachable code is moved up
2031 // and causes repeated deoptimization.
2032 obj.foo = const [];
2033 }
2034 sum += i;
2035 }
2036 return sum;
2037 }
2038 )";
2039
2040 const auto& root_library = Library::Handle(LoadTestScript(kScript));
2041 const auto& function = Function::Handle(GetFunction(root_library, "main"));
2042
2043 // Run unoptimized code.
2044 Invoke(root_library, "main");
2045 EXPECT(!function.HasOptimizedCode());
2046
2047 Compiler::CompileOptimizedFunction(thread, function);
2048 EXPECT(function.HasOptimizedCode());
2049
2050 // LICM should be disabled after the first round of deoptimization.
2051 Invoke(root_library, "main");
2052 EXPECT(!function.HasOptimizedCode());
2053 // EXPECT(function.ProhibitsInstructionHoisting());
2054
2055 Compiler::CompileOptimizedFunction(thread, function);
2056 EXPECT(function.HasOptimizedCode());
2057
2058 // Should not deoptimize.
2059 Invoke(root_library, "main");
2060 EXPECT(function.HasOptimizedCode());
2061}
2062
2063} // namespace dart
static sk_sp< Effect > Create()
Definition: RefCntTest.cpp:117
static volatile float blackhole[4]
Definition: Sk4fBench.cpp:13
static SkV4 v4(SkV3 v, SkScalar w)
Definition: SkM44.cpp:329
SI void store(P *ptr, const T &val)
SI T load(const P *ptr)
Definition: Transform_inl.h:98
#define EXPECT(type, expectedAlignment, expectedSize)
Vec2Value v2
#define RELEASE_ASSERT(cond)
Definition: assert.h:327
bool IsNotAliased() const
Definition: il.h:2452
virtual AliasIdentity Identity() const
Definition: il.h:7322
GrowableArray< Definition * > * initial_definitions()
Definition: il.h:1917
bool Done() const
Definition: flow_graph.h:46
FunctionPtr LookupFactory(const String &name) const
Definition: object.cc:6157
ErrorPtr EnsureIsFinalized(Thread *thread) const
Definition: object.cc:4924
FieldPtr LookupField(const String &name) const
Definition: object.cc:6352
static CompileType FromCid(intptr_t cid)
Value * value() const
Definition: il.h:3486
PRINT_OPERANDS_TO_SUPPORT PRINT_TO_SUPPORT bool UpdateType(CompileType new_type)
Definition: il.h:2553
static constexpr intptr_t kNone
Definition: deopt_id.h:27
static bool Optimize(FlowGraph *graph, bool run_load_optimization=true)
FieldPtr CloneFromOriginal() const
Definition: object.cc:11735
GraphEntryInstr * graph_entry() const
Definition: flow_graph.h:268
ConstantInstr * GetConstant(const Object &object, Representation representation=kTagged)
Definition: flow_graph.cc:187
ConstantInstr * constant_null() const
Definition: flow_graph.h:270
const ParsedFunction & parsed_function() const
Definition: flow_graph.h:129
BlockIterator reverse_postorder_iterator() const
Definition: flow_graph.h:219
intptr_t EnvIndex(const LocalVariable *variable) const
Definition: flow_graph.h:189
FunctionEntryInstr * normal_entry() const
Definition: il.h:2001
const GrowableArray< CatchBlockEntryInstr * > & catch_entries() const
Definition: il.h:2012
bool TryMatch(std::initializer_list< MatchCode > match_codes, MatchOpCode insert_before=kInvalidMatchOpCode)
bool WasEliminated() const
Definition: il.h:1262
Instruction * next() const
Definition: il.h:1093
Instruction * previous() const
Definition: il.h:1087
static IntegerPtr New(const String &str, Heap::Space space=Heap::kNew)
Definition: object.cc:22984
ClassPtr LookupClass(const String &name) const
Definition: object.cc:14105
static LibraryPtr TypedDataLibrary()
Definition: object.cc:14825
LocalScope * sibling() const
Definition: scopes.h:321
LocalVariable * VariableAt(intptr_t index) const
Definition: scopes.h:398
intptr_t num_variables() const
Definition: scopes.h:397
LocalScope * child() const
Definition: scopes.h:320
static void static void PrintErr(const char *format,...) PRINTF_ATTRIBUTE(1
bool IsNull() const
Definition: object.h:363
static Object & Handle()
Definition: object.h:407
static Object & ZoneHandle()
Definition: object.h:419
LocalScope * scope() const
Definition: parser.h:76
static const Slot & Get(const Field &field, const ParsedFunction *parsed_function)
Definition: slot.cc:351
void SetResultType(Zone *zone, CompileType new_type)
Definition: il.h:5648
void set_is_known_list_constructor(bool value)
Definition: il.h:5662
static StringPtr New(const char *cstr, Heap::Space space=Heap::kNew)
Definition: object.cc:23698
static const String & Empty()
Definition: symbols.h:688
static StringPtr New(Thread *thread, const char *cstr)
Definition: symbols.h:723
FlowGraph * RunPasses(std::initializer_list< CompilerPass::Id > passes)
static TypePtr ObjectType()
Definition: il.h:75
Definition * definition() const
Definition: il.h:103
#define H
struct _Dart_Handle * Dart_Handle
Definition: dart_api.h:258
struct _Dart_NativeArguments * Dart_NativeArguments
Definition: dart_api.h:3019
void(* Dart_NativeFunction)(Dart_NativeArguments arguments)
Definition: dart_api.h:3207
#define ASSERT(E)
struct MyStruct s
G_BEGIN_DECLS G_MODULE_EXPORT FlValue * args
uint8_t value
GAsyncResult * result
Dart_NativeFunction function
Definition: fuchsia.cc:51
int argument_count
Definition: fuchsia.cc:52
#define EXPECT_PROPERTY(entity, property)
const GrXPFactory * Get(SkBlendMode mode)
SI void load2(const uint16_t *ptr, U16 *r, U16 *g)
SI void store2(uint16_t *ptr, U16 r, U16 g)
InvalidClass kSmiBits
const Type & DynamicType()
Definition: runtime_api.cc:169
Definition: dart_vm.cc:33
static bool Equals(const Object &expected, const Object &actual)
LibraryPtr LoadTestScript(const char *script, Dart_NativeEntryResolver resolver, const char *lib_uri)
@ kMatchAndMoveBranchFalse
@ kMatchAndMoveBranchTrue
static void FlattenScopeIntoEnvironment(FlowGraph *graph, LocalScope *scope, GrowableArray< LocalVariable * > *env)
const char *const name
void OptimizeCatchEntryStates(FlowGraph *flow_graph, bool is_aot)
@ TypedDataView_offset_in_bytes
Definition: il_test.cc:1251
@ TypedDataBase_length
Definition: il_test.cc:1250
@ TypedDataView_typed_data
Definition: il_test.cc:1252
static void NoopNative(Dart_NativeArguments args)
Location LocationExceptionLocation()
Definition: locations.cc:484
@ kNoStoreBarrier
Definition: il.h:6301
@ kEmitStoreBarrier
Definition: il.h:6301
GrowableArray< Value * > InputsArray
Definition: il.h:901
ObjectPtr Invoke(const Library &lib, const char *name)
FunctionPtr GetFunction(const Library &lib, const char *name)
static void TestAliasingViaStore(Thread *thread, bool make_it_escape, bool make_host_escape, std::function< Definition *(CompilerState *S, FlowGraph *, Definition *)> make_redefinition)
ISOLATE_UNIT_TEST_CASE(StackAllocatedDestruction)
static Definition * MakeCheckNull(CompilerState *S, FlowGraph *flow_graph, Definition *defn)
Location LocationStackTraceLocation()
Definition: locations.cc:488
static void TestAliasingViaRedefinition(Thread *thread, bool make_it_escape, std::function< Definition *(CompilerState *S, FlowGraph *, Definition *)> make_redefinition)
static void CountLoadsStores(FlowGraph *flow_graph, intptr_t *loads, intptr_t *stores)
static Definition * MakeRedefinition(CompilerState *S, FlowGraph *flow_graph, Definition *defn)
static void TryCatchOptimizerTest(Thread *thread, const char *script_chars, std::initializer_list< const char * > synchronized)
static Definition * MakeAssertAssignable(CompilerState *S, FlowGraph *flow_graph, Definition *defn)
@ kAlignedAccess
Definition: il.h:6766
static Dart_NativeFunction NoopNativeLookup(Dart_Handle name, int argument_count, bool *auto_setup_scope)
def call(args)
Definition: dom.py:159
Definition: __init__.py:1
@ kNone
Definition: layer.h:53
Definition: SkMD5.cpp:130
#define ISOLATE_UNIT_TEST_CASE(name)
Definition: unit_test.h:64