Flutter Engine
The Flutter Engine
Loading...
Searching...
No Matches
graph_intrinsifier.cc
Go to the documentation of this file.
1// Copyright (c) 2019, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4// Class for intrinsifying functions.
5
18#include "vm/cpu.h"
19#include "vm/flag_list.h"
20
21namespace dart {
22
23DECLARE_FLAG(bool, print_flow_graph);
24DECLARE_FLAG(bool, print_flow_graph_optimized);
25
27 public:
29 : compiler_(compiler), old_is_optimizing_(compiler->is_optimizing()) {
30 compiler_->is_optimizing_ = true;
31 }
33 compiler_->is_optimizing_ = old_is_optimizing_;
34 }
35
36 private:
37 FlowGraphCompiler* compiler_;
38 bool old_is_optimizing_;
39};
40
41namespace compiler {
42
43static void EmitCodeFor(FlowGraphCompiler* compiler, FlowGraph* graph) {
44 // For graph intrinsics we run the linearscan register allocator, which will
45 // pass opt=true for MakeLocationSummary. We therefore also have to ensure
46 // `compiler->is_optimizing()` is set to true during EmitNativeCode.
47 GraphIntrinsicCodeGenScope optimizing_scope(compiler);
48
49 compiler->assembler()->Comment("Graph intrinsic begin");
50 for (intptr_t i = 0; i < graph->reverse_postorder().length(); i++) {
51 BlockEntryInstr* block = graph->reverse_postorder()[i];
52 if (block->IsGraphEntry()) continue; // No code for graph entry needed.
53
54 if (block->HasParallelMove()) {
55 block->parallel_move()->EmitNativeCode(compiler);
56 }
57
58 for (ForwardInstructionIterator it(block); !it.Done(); it.Advance()) {
59 Instruction* instr = it.Current();
60 if (FLAG_code_comments) compiler->EmitComment(instr);
61 // Calls are not supported in intrinsics code.
62 ASSERT(instr->IsParallelMove() ||
63 (instr->locs() != nullptr && !instr->locs()->always_calls()));
64 instr->EmitNativeCode(compiler);
65 }
66 }
67 compiler->assembler()->Comment("Graph intrinsic end");
68}
69
72 ASSERT(!parsed_function.function().HasOptionalParameters());
73 PrologueInfo prologue_info(-1, -1);
74
75 auto graph_entry =
76 new GraphEntryInstr(parsed_function, Compiler::kNoOSRDeoptId);
77
78 intptr_t block_id = 1; // 0 is GraphEntry.
79 graph_entry->set_normal_entry(
80 new FunctionEntryInstr(graph_entry, block_id, kInvalidTryIndex,
81 CompilerState::Current().GetNextDeoptId()));
82
83 FlowGraph* graph =
84 new FlowGraph(parsed_function, graph_entry, block_id, prologue_info,
86 compiler->set_intrinsic_flow_graph(*graph);
87
88 const Function& function = parsed_function.function();
89
90 switch (function.recognized_kind()) {
91#define EMIT_CASE(class_name, function_name, enum_name, fp) \
92 case MethodRecognizer::k##enum_name: \
93 if (!Build_##enum_name(graph)) return false; \
94 break;
95
97#undef EMIT_CASE
98 default:
99 return false;
100 }
101
102 if (FLAG_support_il_printer && FLAG_print_flow_graph &&
104 THR_Print("Intrinsic graph before\n");
105 FlowGraphPrinter printer(*graph);
106 printer.PrintBlocks();
107 }
108
109 // Prepare for register allocation (cf. FinalizeGraph).
110 graph->RemoveRedefinitions();
111
112 // Ensure dominators are re-computed. Normally this is done during SSA
113 // construction (which we don't do for graph intrinsics).
114 GrowableArray<BitVector*> dominance_frontier;
115 graph->ComputeDominators(&dominance_frontier);
116
117 CompilerPassState state(parsed_function.thread(), graph,
118 /*speculative_inlining_policy*/ nullptr);
120
121 if (FLAG_support_il_printer && FLAG_print_flow_graph &&
123 THR_Print("Intrinsic graph after\n");
124 FlowGraphPrinter printer(*graph);
125 printer.PrintBlocks();
126 }
127 EmitCodeFor(compiler, graph);
128 return true;
129}
130
131static Representation RepresentationForCid(intptr_t cid) {
132 switch (cid) {
133 case kDoubleCid:
134 return kUnboxedDouble;
135 case kFloat32x4Cid:
136 return kUnboxedFloat32x4;
137 case kInt32x4Cid:
138 return kUnboxedInt32x4;
139 case kFloat64x2Cid:
140 return kUnboxedFloat64x2;
141 default:
142 UNREACHABLE();
143 return kNoRepresentation;
144 }
145}
146
147// Notes about the graph intrinsics:
148//
149// IR instructions which would jump to a deoptimization sequence on failure
150// instead branch to the intrinsic slow path.
151//
152static Definition* PrepareIndexedOp(FlowGraph* flow_graph,
153 BlockBuilder* builder,
154 Definition* array,
155 Definition* index,
156 const Slot& length_field) {
157 Definition* length = builder->AddDefinition(
158 new LoadFieldInstr(new Value(array), length_field, InstructionSource()));
159 // Note that the intrinsifier must always use deopting array bound
160 // checks, because intrinsics currently don't support calls.
161 Definition* safe_index = new CheckArrayBoundInstr(
162 new Value(length), new Value(index), DeoptId::kNone);
163 builder->AddDefinition(safe_index);
164 return safe_index;
165}
166
167static void VerifyParameterIsBoxed(BlockBuilder* builder, intptr_t arg_index) {
168 const auto& function = builder->function();
169 if (function.is_unboxed_parameter_at(arg_index)) {
170 FATAL("Unsupported unboxed parameter %" Pd " in %s", arg_index,
171 function.ToFullyQualifiedCString());
172 }
173}
174
175static Definition* CreateBoxedParameterIfNeeded(BlockBuilder* builder,
176 Definition* value,
177 Representation representation,
178 intptr_t arg_index) {
179 const auto& function = builder->function();
180 if (function.is_unboxed_parameter_at(arg_index)) {
181 return builder->AddDefinition(
182 BoxInstr::Create(representation, new Value(value)));
183 } else {
184 return value;
185 }
186}
187
188static Definition* CreateBoxedResultIfNeeded(BlockBuilder* builder,
189 Definition* value,
190 Representation representation) {
191 const auto& function = builder->function();
192 ASSERT(!function.has_unboxed_record_return());
193 Definition* result = value;
194 if (representation == kUnboxedFloat) {
195 result = builder->AddDefinition(
196 new FloatToDoubleInstr(new Value(result), DeoptId::kNone));
197 representation = kUnboxedDouble;
198 }
199 if (!function.has_unboxed_return()) {
200 result = builder->AddDefinition(BoxInstr::Create(
201 Boxing::NativeRepresentation(representation), new Value(result)));
202 }
203 return result;
204}
205
206static Definition* CreateUnboxedResultIfNeeded(BlockBuilder* builder,
207 Definition* value) {
208 const auto& function = builder->function();
209 ASSERT(!function.has_unboxed_record_return());
210 if (function.has_unboxed_return() && value->representation() == kTagged) {
212 new Value(value), /* is_checked = */ true);
213 } else {
214 return value;
215 }
216}
217
218static bool IntrinsifyArraySetIndexed(FlowGraph* flow_graph,
219 intptr_t array_cid) {
220 GraphEntryInstr* graph_entry = flow_graph->graph_entry();
221 auto normal_entry = graph_entry->normal_entry();
222 BlockBuilder builder(flow_graph, normal_entry, /*with_frame=*/false);
223
224 Definition* array = builder.AddParameter(0);
225 Definition* index = builder.AddParameter(1);
226 Definition* value = builder.AddParameter(2);
227
228 VerifyParameterIsBoxed(&builder, 0);
229 VerifyParameterIsBoxed(&builder, 2);
230
231 index = CreateBoxedParameterIfNeeded(&builder, index, kUnboxedInt64, 1);
232 index = PrepareIndexedOp(flow_graph, &builder, array, index,
234
235 // Value check/conversion.
236 auto const rep = RepresentationUtils::RepresentationOfArrayElement(array_cid);
237 if (IsClampedTypedDataBaseClassId(array_cid)) {
238#if defined(TARGET_ARCH_IS_32_BIT)
239 // On 32-bit architectures, clamping operations need the exact value
240 // for proper operations. On 64-bit architectures, kUnboxedIntPtr
241 // maps to kUnboxedInt64. All other situations get away with
242 // truncating even non-smi values.
243 builder.AddInstruction(
244 new CheckSmiInstr(new Value(value), DeoptId::kNone, builder.Source()));
245#endif
246 }
248 // Use same truncating unbox-instruction for int32 and uint32.
249 auto const unbox_rep = rep == kUnboxedInt32 ? kUnboxedUint32 : rep;
250 value = builder.AddUnboxInstr(unbox_rep, new Value(value),
251 /* is_checked = */ false);
252 } else if (RepresentationUtils::IsUnboxed(rep)) {
253 Zone* zone = flow_graph->zone();
254 Cids* value_check = Cids::CreateMonomorphic(zone, Boxing::BoxCid(rep));
255 builder.AddInstruction(new CheckClassInstr(new Value(value), DeoptId::kNone,
256 *value_check, builder.Source()));
257 value = builder.AddUnboxInstr(rep, new Value(value),
258 /* is_checked = */ true);
259 }
260
261 if (IsExternalTypedDataClassId(array_cid)) {
262 array = builder.AddDefinition(new LoadFieldInstr(
263 new Value(array), Slot::PointerBase_data(),
265 }
266 // No store barrier.
268 IsTypedDataClassId(array_cid));
269 builder.AddInstruction(new StoreIndexedInstr(
270 new Value(array), new Value(index), new Value(value), kNoStoreBarrier,
271 /*index_unboxed=*/false,
272 /*index_scale=*/target::Instance::ElementSizeFor(array_cid), array_cid,
274 // Return null.
275 Definition* null_def = builder.AddNullDefinition();
276 builder.AddReturn(new Value(null_def));
277 return true;
278}
279
280#define DEFINE_ARRAY_SETTER_INTRINSIC(enum_name) \
281 bool GraphIntrinsifier::Build_##enum_name##SetIndexed( \
282 FlowGraph* flow_graph) { \
283 return IntrinsifyArraySetIndexed( \
284 flow_graph, MethodRecognizer::MethodKindToReceiverCid( \
285 MethodRecognizer::k##enum_name##SetIndexed)); \
286 }
287
290DEFINE_ARRAY_SETTER_INTRINSIC(ExternalUint8Array)
291DEFINE_ARRAY_SETTER_INTRINSIC(Uint8ClampedArray)
292DEFINE_ARRAY_SETTER_INTRINSIC(ExternalUint8ClampedArray)
299
300#undef DEFINE_ARRAY_SETTER_INTRINSIC
301
302#define DEFINE_FLOAT_ARRAY_SETTER_INTRINSIC(enum_name) \
303 bool GraphIntrinsifier::Build_##enum_name##SetIndexed( \
304 FlowGraph* flow_graph) { \
305 if (!FlowGraphCompiler::SupportsUnboxedDoubles()) { \
306 return false; \
307 } \
308 return IntrinsifyArraySetIndexed( \
309 flow_graph, MethodRecognizer::MethodKindToReceiverCid( \
310 MethodRecognizer::k##enum_name##SetIndexed)); \
311 }
312
315
316#undef DEFINE_FLOAT_ARRAY_SETTER_INTRINSIC
317
318#define DEFINE_SIMD_ARRAY_SETTER_INTRINSIC(enum_name) \
319 bool GraphIntrinsifier::Build_##enum_name##SetIndexed( \
320 FlowGraph* flow_graph) { \
321 if (!FlowGraphCompiler::SupportsUnboxedSimd128()) { \
322 return false; \
323 } \
324 return IntrinsifyArraySetIndexed( \
325 flow_graph, MethodRecognizer::MethodKindToReceiverCid( \
326 MethodRecognizer::k##enum_name##SetIndexed)); \
327 }
328
332
333#undef DEFINE_SIMD_ARRAY_SETTER_INTRINSIC
334
335static bool BuildSimdOp(FlowGraph* flow_graph, intptr_t cid, Token::Kind kind) {
337
338 auto const rep = RepresentationForCid(cid);
339
340 Zone* zone = flow_graph->zone();
341 GraphEntryInstr* graph_entry = flow_graph->graph_entry();
342 auto normal_entry = graph_entry->normal_entry();
343 BlockBuilder builder(flow_graph, normal_entry, /*with_frame=*/false);
344
345 Definition* left = builder.AddParameter(0);
346 Definition* right = builder.AddParameter(1);
347
348 VerifyParameterIsBoxed(&builder, 0);
349 VerifyParameterIsBoxed(&builder, 1);
350
351 Cids* value_check = Cids::CreateMonomorphic(zone, cid);
352 // Check argument. Receiver (left) is known to be a Float32x4.
353 builder.AddInstruction(new CheckClassInstr(new Value(right), DeoptId::kNone,
354 *value_check, builder.Source()));
355 Definition* left_simd = builder.AddUnboxInstr(rep, new Value(left),
356 /* is_checked = */ true);
357
358 Definition* right_simd = builder.AddUnboxInstr(rep, new Value(right),
359 /* is_checked = */ true);
360
361 Definition* unboxed_result = builder.AddDefinition(SimdOpInstr::Create(
362 SimdOpInstr::KindForOperator(cid, kind), new Value(left_simd),
363 new Value(right_simd), DeoptId::kNone));
364 Definition* result = CreateBoxedResultIfNeeded(&builder, unboxed_result, rep);
365
366 builder.AddReturn(new Value(result));
367 return true;
368}
369
370bool GraphIntrinsifier::Build_Float32x4Mul(FlowGraph* flow_graph) {
371 return BuildSimdOp(flow_graph, kFloat32x4Cid, Token::kMUL);
372}
373
374bool GraphIntrinsifier::Build_Float32x4Div(FlowGraph* flow_graph) {
375 return BuildSimdOp(flow_graph, kFloat32x4Cid, Token::kDIV);
376}
377
378bool GraphIntrinsifier::Build_Float32x4Sub(FlowGraph* flow_graph) {
379 return BuildSimdOp(flow_graph, kFloat32x4Cid, Token::kSUB);
380}
381
382bool GraphIntrinsifier::Build_Float32x4Add(FlowGraph* flow_graph) {
383 return BuildSimdOp(flow_graph, kFloat32x4Cid, Token::kADD);
384}
385
386bool GraphIntrinsifier::Build_Float64x2Mul(FlowGraph* flow_graph) {
387 return BuildSimdOp(flow_graph, kFloat64x2Cid, Token::kMUL);
388}
389
390bool GraphIntrinsifier::Build_Float64x2Div(FlowGraph* flow_graph) {
391 return BuildSimdOp(flow_graph, kFloat64x2Cid, Token::kDIV);
392}
393
394bool GraphIntrinsifier::Build_Float64x2Sub(FlowGraph* flow_graph) {
395 return BuildSimdOp(flow_graph, kFloat64x2Cid, Token::kSUB);
396}
397
398bool GraphIntrinsifier::Build_Float64x2Add(FlowGraph* flow_graph) {
399 return BuildSimdOp(flow_graph, kFloat64x2Cid, Token::kADD);
400}
401
402static bool BuildFloat32x4Get(FlowGraph* flow_graph,
406 return false;
407 }
408 GraphEntryInstr* graph_entry = flow_graph->graph_entry();
409 auto normal_entry = graph_entry->normal_entry();
410 BlockBuilder builder(flow_graph, normal_entry, /*with_frame=*/false);
411
412 Definition* receiver = builder.AddParameter(0);
413
414 const auto& function = flow_graph->function();
415 Definition* unboxed_receiver =
416 !function.is_unboxed_parameter_at(0)
417 ? builder.AddUnboxInstr(kUnboxedFloat32x4, new Value(receiver),
418 /* is_checked = */ true)
419 : receiver;
420
421 Definition* unboxed_result = builder.AddDefinition(
422 SimdOpInstr::Create(kind, new Value(unboxed_receiver), DeoptId::kNone));
423
424 Definition* result =
425 CreateBoxedResultIfNeeded(&builder, unboxed_result, kUnboxedDouble);
426
427 builder.AddReturn(new Value(result));
428 return true;
429}
430
431bool GraphIntrinsifier::Build_Float32x4GetX(FlowGraph* flow_graph) {
432 return BuildFloat32x4Get(flow_graph, MethodRecognizer::kFloat32x4GetX);
433}
434
435bool GraphIntrinsifier::Build_Float32x4GetY(FlowGraph* flow_graph) {
436 return BuildFloat32x4Get(flow_graph, MethodRecognizer::kFloat32x4GetY);
437}
438
439bool GraphIntrinsifier::Build_Float32x4GetZ(FlowGraph* flow_graph) {
440 return BuildFloat32x4Get(flow_graph, MethodRecognizer::kFloat32x4GetZ);
441}
442
443bool GraphIntrinsifier::Build_Float32x4GetW(FlowGraph* flow_graph) {
444 return BuildFloat32x4Get(flow_graph, MethodRecognizer::kFloat32x4GetW);
445}
446
447static bool BuildLoadField(FlowGraph* flow_graph, const Slot& field) {
448 GraphEntryInstr* graph_entry = flow_graph->graph_entry();
449 auto normal_entry = graph_entry->normal_entry();
450 BlockBuilder builder(flow_graph, normal_entry, /*with_frame=*/false);
451
452 Definition* array = builder.AddParameter(0);
453 VerifyParameterIsBoxed(&builder, 0);
454
455 Definition* length = builder.AddDefinition(
456 new LoadFieldInstr(new Value(array), field, builder.Source()));
457
458 length = CreateUnboxedResultIfNeeded(&builder, length);
459 builder.AddReturn(new Value(length));
460 return true;
461}
462
463bool GraphIntrinsifier::Build_ObjectArrayLength(FlowGraph* flow_graph) {
464 return BuildLoadField(flow_graph, Slot::Array_length());
465}
466
467bool GraphIntrinsifier::Build_GrowableArrayLength(FlowGraph* flow_graph) {
468 return BuildLoadField(flow_graph, Slot::GrowableObjectArray_length());
469}
470
471bool GraphIntrinsifier::Build_StringBaseLength(FlowGraph* flow_graph) {
472 return BuildLoadField(flow_graph, Slot::String_length());
473}
474
475bool GraphIntrinsifier::Build_TypedListBaseLength(FlowGraph* flow_graph) {
476 return BuildLoadField(flow_graph, Slot::TypedDataBase_length());
477}
478
479bool GraphIntrinsifier::Build_ByteDataViewLength(FlowGraph* flow_graph) {
480 return BuildLoadField(flow_graph, Slot::TypedDataBase_length());
481}
482
483bool GraphIntrinsifier::Build_GrowableArrayCapacity(FlowGraph* flow_graph) {
484 GraphEntryInstr* graph_entry = flow_graph->graph_entry();
485 auto normal_entry = graph_entry->normal_entry();
486 BlockBuilder builder(flow_graph, normal_entry, /*with_frame=*/false);
487
488 Definition* array = builder.AddParameter(0);
489 VerifyParameterIsBoxed(&builder, 0);
490
491 Definition* backing_store = builder.AddDefinition(new LoadFieldInstr(
492 new Value(array), Slot::GrowableObjectArray_data(), builder.Source()));
493 Definition* capacity = builder.AddDefinition(new LoadFieldInstr(
494 new Value(backing_store), Slot::Array_length(), builder.Source()));
495 capacity = CreateUnboxedResultIfNeeded(&builder, capacity);
496 builder.AddReturn(new Value(capacity));
497 return true;
498}
499
500bool GraphIntrinsifier::Build_ObjectArraySetIndexedUnchecked(
501 FlowGraph* flow_graph) {
502 GraphEntryInstr* graph_entry = flow_graph->graph_entry();
503 auto normal_entry = graph_entry->normal_entry();
504 BlockBuilder builder(flow_graph, normal_entry, /*with_frame=*/false);
505
506 Definition* array = builder.AddParameter(0);
507 Definition* index = builder.AddParameter(1);
508 Definition* value = builder.AddParameter(2);
509
510 VerifyParameterIsBoxed(&builder, 0);
511 VerifyParameterIsBoxed(&builder, 2);
512
513 index = CreateBoxedParameterIfNeeded(&builder, index, kUnboxedInt64, 1);
514 index = PrepareIndexedOp(flow_graph, &builder, array, index,
515 Slot::Array_length());
516
517 builder.AddInstruction(new StoreIndexedInstr(
518 new Value(array), new Value(index), new Value(value), kEmitStoreBarrier,
519 /*index_unboxed=*/false,
520 /*index_scale=*/target::Instance::ElementSizeFor(kArrayCid), kArrayCid,
522 // Return null.
523 Definition* null_def = builder.AddNullDefinition();
524 builder.AddReturn(new Value(null_def));
525 return true;
526}
527
528bool GraphIntrinsifier::Build_GrowableArraySetIndexedUnchecked(
529 FlowGraph* flow_graph) {
530 GraphEntryInstr* graph_entry = flow_graph->graph_entry();
531 auto normal_entry = graph_entry->normal_entry();
532 BlockBuilder builder(flow_graph, normal_entry, /*with_frame=*/false);
533
534 Definition* array = builder.AddParameter(0);
535 Definition* index = builder.AddParameter(1);
536 Definition* value = builder.AddParameter(2);
537
538 VerifyParameterIsBoxed(&builder, 0);
539 VerifyParameterIsBoxed(&builder, 2);
540
541 index = CreateBoxedParameterIfNeeded(&builder, index, kUnboxedInt64, 1);
542 index = PrepareIndexedOp(flow_graph, &builder, array, index,
543 Slot::GrowableObjectArray_length());
544
545 Definition* backing_store = builder.AddDefinition(new LoadFieldInstr(
546 new Value(array), Slot::GrowableObjectArray_data(), builder.Source()));
547
548 builder.AddInstruction(new StoreIndexedInstr(
549 new Value(backing_store), new Value(index), new Value(value),
550 kEmitStoreBarrier, /*index_unboxed=*/false,
551 /*index_scale=*/target::Instance::ElementSizeFor(kArrayCid), kArrayCid,
553 // Return null.
554 Definition* null_def = builder.AddNullDefinition();
555 builder.AddReturn(new Value(null_def));
556 return true;
557}
558
559bool GraphIntrinsifier::Build_GrowableArraySetData(FlowGraph* flow_graph) {
560 GraphEntryInstr* graph_entry = flow_graph->graph_entry();
561 auto normal_entry = graph_entry->normal_entry();
562 BlockBuilder builder(flow_graph, normal_entry, /*with_frame=*/false);
563
564 Definition* growable_array = builder.AddParameter(0);
565 Definition* data = builder.AddParameter(1);
566 Zone* zone = flow_graph->zone();
567
568 VerifyParameterIsBoxed(&builder, 0);
569 VerifyParameterIsBoxed(&builder, 1);
570
571 Cids* value_check = Cids::CreateMonomorphic(zone, kArrayCid);
572 builder.AddInstruction(new CheckClassInstr(new Value(data), DeoptId::kNone,
573 *value_check, builder.Source()));
574
575 builder.AddInstruction(new StoreFieldInstr(
576 Slot::GrowableObjectArray_data(), new Value(growable_array),
577 new Value(data), kEmitStoreBarrier, builder.Source()));
578 // Return null.
579 Definition* null_def = builder.AddNullDefinition();
580 builder.AddReturn(new Value(null_def));
581 return true;
582}
583
584bool GraphIntrinsifier::Build_GrowableArraySetLength(FlowGraph* flow_graph) {
585 GraphEntryInstr* graph_entry = flow_graph->graph_entry();
586 auto normal_entry = graph_entry->normal_entry();
587 BlockBuilder builder(flow_graph, normal_entry, /*with_frame=*/false);
588
589 Definition* growable_array = builder.AddParameter(0);
590 Definition* length = builder.AddParameter(1);
591
592 VerifyParameterIsBoxed(&builder, 0);
593 VerifyParameterIsBoxed(&builder, 1);
594
595 builder.AddInstruction(
596 new CheckSmiInstr(new Value(length), DeoptId::kNone, builder.Source()));
597 builder.AddInstruction(new StoreFieldInstr(
598 Slot::GrowableObjectArray_length(), new Value(growable_array),
599 new Value(length), kNoStoreBarrier, builder.Source()));
600 Definition* null_def = builder.AddNullDefinition();
601 builder.AddReturn(new Value(null_def));
602 return true;
603}
604
605static bool BuildUnarySmiOp(FlowGraph* flow_graph, Token::Kind op_kind) {
606 ASSERT(!flow_graph->function().has_unboxed_return());
607 ASSERT(!flow_graph->function().is_unboxed_parameter_at(0));
608 GraphEntryInstr* graph_entry = flow_graph->graph_entry();
609 auto normal_entry = graph_entry->normal_entry();
610 BlockBuilder builder(flow_graph, normal_entry, /*with_frame=*/false);
611 Definition* left = builder.AddParameter(0);
612 builder.AddInstruction(
613 new CheckSmiInstr(new Value(left), DeoptId::kNone, builder.Source()));
614 Definition* result = builder.AddDefinition(
615 new UnarySmiOpInstr(op_kind, new Value(left), DeoptId::kNone));
616 builder.AddReturn(new Value(result));
617 return true;
618}
619
620bool GraphIntrinsifier::Build_Smi_bitNegate(FlowGraph* flow_graph) {
621 return BuildUnarySmiOp(flow_graph, Token::kBIT_NOT);
622}
623
624bool GraphIntrinsifier::Build_Integer_negate(FlowGraph* flow_graph) {
625 return BuildUnarySmiOp(flow_graph, Token::kNEGATE);
626}
627
628static bool BuildBinarySmiOp(FlowGraph* flow_graph, Token::Kind op_kind) {
629 ASSERT(!flow_graph->function().has_unboxed_return());
630 ASSERT(!flow_graph->function().is_unboxed_parameter_at(0));
631 ASSERT(!flow_graph->function().is_unboxed_parameter_at(1));
632 GraphEntryInstr* graph_entry = flow_graph->graph_entry();
633 auto normal_entry = graph_entry->normal_entry();
634 BlockBuilder builder(flow_graph, normal_entry, /*with_frame=*/false);
635 Definition* left = builder.AddParameter(0);
636 Definition* right = builder.AddParameter(1);
637 builder.AddInstruction(
638 new CheckSmiInstr(new Value(left), DeoptId::kNone, builder.Source()));
639 builder.AddInstruction(
640 new CheckSmiInstr(new Value(right), DeoptId::kNone, builder.Source()));
641 Definition* result = builder.AddDefinition(new BinarySmiOpInstr(
642 op_kind, new Value(left), new Value(right), DeoptId::kNone));
643 builder.AddReturn(new Value(result));
644 return true;
645}
646
647bool GraphIntrinsifier::Build_Integer_add(FlowGraph* flow_graph) {
648 return BuildBinarySmiOp(flow_graph, Token::kADD);
649}
650
651bool GraphIntrinsifier::Build_Integer_sub(FlowGraph* flow_graph) {
652 return BuildBinarySmiOp(flow_graph, Token::kSUB);
653}
654
655bool GraphIntrinsifier::Build_Integer_mul(FlowGraph* flow_graph) {
656 return BuildBinarySmiOp(flow_graph, Token::kMUL);
657}
658
659bool GraphIntrinsifier::Build_Integer_mod(FlowGraph* flow_graph) {
660 return BuildBinarySmiOp(flow_graph, Token::kMOD);
661}
662
663bool GraphIntrinsifier::Build_Integer_truncDivide(FlowGraph* flow_graph) {
664 return BuildBinarySmiOp(flow_graph, Token::kTRUNCDIV);
665}
666
667bool GraphIntrinsifier::Build_Integer_bitAnd(FlowGraph* flow_graph) {
668 return BuildBinarySmiOp(flow_graph, Token::kBIT_AND);
669}
670
671bool GraphIntrinsifier::Build_Integer_bitOr(FlowGraph* flow_graph) {
672 return BuildBinarySmiOp(flow_graph, Token::kBIT_OR);
673}
674
675bool GraphIntrinsifier::Build_Integer_bitXor(FlowGraph* flow_graph) {
676 return BuildBinarySmiOp(flow_graph, Token::kBIT_XOR);
677}
678
679bool GraphIntrinsifier::Build_Integer_sar(FlowGraph* flow_graph) {
680 return BuildBinarySmiOp(flow_graph, Token::kSHR);
681}
682
683bool GraphIntrinsifier::Build_Integer_shr(FlowGraph* flow_graph) {
684 return BuildBinarySmiOp(flow_graph, Token::kUSHR);
685}
686
687static Definition* ConvertOrUnboxDoubleParameter(BlockBuilder* builder,
688 Definition* value,
689 intptr_t index,
690 bool is_checked) {
691 const auto& function = builder->function();
692 if (function.is_unboxed_double_parameter_at(index)) {
693 return value;
694 } else if (function.is_unboxed_integer_parameter_at(index)) {
695 if (compiler::target::kWordSize == 4) {
696 // Int64ToDoubleInstr is not implemented in 32-bit platforms
697 return nullptr;
698 }
699 auto to_double = new Int64ToDoubleInstr(new Value(value), DeoptId::kNone);
700 return builder->AddDefinition(to_double);
701 } else {
702 ASSERT(!function.is_unboxed_parameter_at(index));
703 return builder->AddUnboxInstr(kUnboxedDouble, value, is_checked);
704 }
705}
706
707bool GraphIntrinsifier::Build_DoubleFlipSignBit(FlowGraph* flow_graph) {
709 return false;
710 }
711 GraphEntryInstr* graph_entry = flow_graph->graph_entry();
712 auto normal_entry = graph_entry->normal_entry();
713 BlockBuilder builder(flow_graph, normal_entry, /*with_frame=*/false);
714
715 Definition* receiver = builder.AddParameter(0);
716 Definition* unboxed_value = ConvertOrUnboxDoubleParameter(
717 &builder, receiver, 0, /* is_checked = */ true);
718 if (unboxed_value == nullptr) {
719 return false;
720 }
721 Definition* unboxed_result = builder.AddDefinition(new UnaryDoubleOpInstr(
722 Token::kNEGATE, new Value(unboxed_value), DeoptId::kNone));
723 Definition* result =
724 CreateBoxedResultIfNeeded(&builder, unboxed_result, kUnboxedDouble);
725 builder.AddReturn(new Value(result));
726 return true;
727}
728
729} // namespace compiler
730} // namespace dart
static bool left(const SkPoint &p0, const SkPoint &p1)
static bool right(const SkPoint &p0, const SkPoint &p1)
emscripten::val Uint32Array
Definition WasmCommon.h:34
emscripten::val Uint16Array
Definition WasmCommon.h:33
emscripten::val Float32Array
Definition WasmCommon.h:35
emscripten::val Uint8Array
Definition WasmCommon.h:32
#define UNREACHABLE()
Definition assert.h:248
static BoxInstr * Create(Representation from, Value *value)
Definition il.cc:4009
static Cids * CreateMonomorphic(Zone *zone, intptr_t cid)
Definition il.cc:689
static void RunGraphIntrinsicPipeline(CompilerPassState *state)
static CompilerState & Current()
static constexpr intptr_t kNoOSRDeoptId
Definition compiler.h:73
static constexpr intptr_t kNone
Definition deopt_id.h:27
static bool SupportsUnboxedDoubles()
static bool SupportsUnboxedSimd128()
static bool ShouldPrint(const Function &function, uint8_t **compiler_pass_filter=nullptr)
static Representation ReturnRepresentationOf(const Function &function)
void RemoveRedefinitions(bool keep_checks=false)
void ComputeDominators(GrowableArray< BitVector * > *dominance_frontier)
bool HasOptionalParameters() const
Definition object.cc:8917
GraphIntrinsicCodeGenScope(FlowGraphCompiler *compiler)
const Function & function() const
Definition parser.h:73
Thread * thread() const
Definition parser.h:211
static SimdOpInstr * Create(Kind kind, Value *left, Value *right, intptr_t deopt_id)
Definition il.h:11268
static Kind KindForOperator(MethodRecognizer::Kind kind)
Definition il.cc:8085
static const Slot & GetLengthFieldForArrayCid(intptr_t array_cid)
Definition slot.cc:249
static bool GraphIntrinsify(const ParsedFunction &parsed_function, FlowGraphCompiler *compiler)
#define THR_Print(format,...)
Definition log.h:20
#define ASSERT(E)
#define FATAL(error)
AtkStateType state
uint8_t value
GAsyncResult * result
constexpr bool FLAG_support_il_printer
Definition flag_list.h:48
#define DECLARE_FLAG(type, name)
Definition flags.h:14
Dart_NativeFunction function
Definition fuchsia.cc:51
#define DEFINE_SIMD_ARRAY_SETTER_INTRINSIC(enum_name)
#define DEFINE_FLOAT_ARRAY_SETTER_INTRINSIC(enum_name)
#define DEFINE_ARRAY_SETTER_INTRINSIC(enum_name)
#define EMIT_CASE(Instruction, _)
size_t length
bool IsTypedDataClassId(intptr_t index)
Definition class_id.h:433
@ kNoStoreBarrier
Definition il.h:6252
@ kEmitStoreBarrier
Definition il.h:6252
Representation
Definition locations.h:66
bool IsClampedTypedDataBaseClassId(intptr_t index)
Definition class_id.h:461
const intptr_t cid
static int8_t data[kExtLength]
static constexpr intptr_t kInvalidTryIndex
bool IsExternalTypedDataClassId(intptr_t index)
Definition class_id.h:447
@ kAlignedAccess
Definition il.h:6722
#define Pd
Definition globals.h:408
#define GRAPH_INTRINSICS_LIST(V)
static constexpr Representation NativeRepresentation(Representation rep)
Definition il.h:8456
static intptr_t BoxCid(Representation rep)
Definition il.cc:491
static constexpr bool IsUnboxedInteger(Representation rep)
Definition locations.h:92
static constexpr bool IsUnboxed(Representation rep)
Definition locations.h:101
static Representation RepresentationOfArrayElement(classid_t cid)
Definition locations.cc:79