Flutter Engine
The Flutter Engine
Loading...
Searching...
No Matches
kernel_to_il.cc
Go to the documentation of this file.
1// Copyright (c) 2016, the Dart project authors. Please see the AUTHORS file
2// for details. All rights reserved. Use of this source code is governed by a
3// BSD-style license that can be found in the LICENSE file.
4
6
7#include <utility>
8
10#include "platform/assert.h"
11#include "platform/globals.h"
12#include "vm/class_id.h"
19#include "vm/compiler/ffi/abi.h"
30#include "vm/kernel_isolate.h"
31#include "vm/kernel_loader.h"
32#include "vm/log.h"
33#include "vm/longjump.h"
34#include "vm/native_entry.h"
35#include "vm/object_store.h"
36#include "vm/report.h"
37#include "vm/resolver.h"
38#include "vm/runtime_entry.h"
39#include "vm/scopes.h"
40#include "vm/stack_frame.h"
41#include "vm/symbols.h"
42
43namespace dart {
44
46 print_huge_methods,
47 false,
48 "Print huge methods (less optimized)");
49
51 force_switch_dispatch_type,
52 -1,
53 "Force switch statements to use a particular dispatch type: "
54 "-1=auto, 0=linear scan, 1=binary search, 2=jump table");
55
56namespace kernel {
57
58#define Z (zone_)
59#define H (translation_helper_)
60#define T (type_translator_)
61#define I Isolate::Current()
62#define IG IsolateGroup::Current()
63
65 ParsedFunction* parsed_function,
67 ZoneGrowableArray<intptr_t>* context_level_array,
68 InlineExitCollector* exit_collector,
69 bool optimizing,
70 intptr_t osr_id,
71 intptr_t first_block_id,
72 bool inlining_unchecked_entry)
73 : BaseFlowGraphBuilder(parsed_function,
74 first_block_id - 1,
75 osr_id,
76 context_level_array,
77 exit_collector,
78 inlining_unchecked_entry),
79 translation_helper_(Thread::Current()),
80 thread_(translation_helper_.thread()),
81 zone_(translation_helper_.zone()),
82 parsed_function_(parsed_function),
83 optimizing_(optimizing),
84 ic_data_array_(*ic_data_array),
85 next_function_id_(0),
86 loop_depth_(0),
87 try_depth_(0),
88 catch_depth_(0),
89 block_expression_depth_(0),
90 graph_entry_(nullptr),
91 scopes_(nullptr),
92 breakable_block_(nullptr),
93 switch_block_(nullptr),
94 try_catch_block_(nullptr),
95 try_finally_block_(nullptr),
96 catch_block_(nullptr),
97 prepend_type_arguments_(Function::ZoneHandle(zone_)) {
98 const auto& info = KernelProgramInfo::Handle(
99 Z, parsed_function->function().KernelProgramInfo());
100 H.InitFromKernelProgramInfo(info);
101}
102
104
105Fragment FlowGraphBuilder::EnterScope(
106 intptr_t kernel_offset,
107 const LocalScope** context_scope /* = nullptr */) {
108 Fragment instructions;
109 const LocalScope* scope = scopes_->scopes.Lookup(kernel_offset);
110 if (scope->num_context_variables() > 0) {
111 instructions += PushContext(scope);
112 instructions += Drop();
113 }
114 if (context_scope != nullptr) {
115 *context_scope = scope;
116 }
117 return instructions;
118}
119
120Fragment FlowGraphBuilder::ExitScope(intptr_t kernel_offset) {
121 Fragment instructions;
122 const intptr_t context_size =
123 scopes_->scopes.Lookup(kernel_offset)->num_context_variables();
124 if (context_size > 0) {
125 instructions += PopContext();
126 }
127 return instructions;
128}
129
130Fragment FlowGraphBuilder::AdjustContextTo(int depth) {
131 ASSERT(depth <= context_depth_ && depth >= 0);
132 Fragment instructions;
133 if (depth < context_depth_) {
134 instructions += LoadContextAt(depth);
135 instructions += StoreLocal(TokenPosition::kNoSource,
136 parsed_function_->current_context_var());
137 instructions += Drop();
138 context_depth_ = depth;
139 }
140 return instructions;
141}
142
143Fragment FlowGraphBuilder::PushContext(const LocalScope* scope) {
144 ASSERT(scope->num_context_variables() > 0);
145 Fragment instructions = AllocateContext(scope->context_slots());
146 LocalVariable* context = MakeTemporary();
147 instructions += LoadLocal(context);
148 instructions += LoadLocal(parsed_function_->current_context_var());
149 instructions += StoreNativeField(Slot::Context_parent(),
151 instructions += StoreLocal(TokenPosition::kNoSource,
152 parsed_function_->current_context_var());
154 return instructions;
155}
156
157Fragment FlowGraphBuilder::PopContext() {
158 return AdjustContextTo(context_depth_ - 1);
159}
160
161Fragment FlowGraphBuilder::LoadInstantiatorTypeArguments() {
162 // TODO(27590): We could use `active_class_->IsGeneric()`.
163 Fragment instructions;
164 if (scopes_ != nullptr && scopes_->type_arguments_variable != nullptr) {
165#ifdef DEBUG
166 Function& function =
167 Function::Handle(Z, parsed_function_->function().ptr());
168 while (function.IsClosureFunction()) {
169 function = function.parent_function();
170 }
171 ASSERT(function.IsFactory());
172#endif
173 instructions += LoadLocal(scopes_->type_arguments_variable);
174 } else if (parsed_function_->has_receiver_var() &&
175 active_class_.ClassNumTypeArguments() > 0) {
176 ASSERT(!parsed_function_->function().IsFactory());
177 instructions += LoadLocal(parsed_function_->receiver_var());
178 instructions += LoadNativeField(
179 Slot::GetTypeArgumentsSlotFor(thread_, *active_class_.klass));
180 } else {
181 instructions += NullConstant();
182 }
183 return instructions;
184}
185
186// This function is responsible for pushing a type arguments vector which
187// contains all type arguments of enclosing functions prepended to the type
188// arguments of the current function.
189Fragment FlowGraphBuilder::LoadFunctionTypeArguments() {
190 Fragment instructions;
191
192 const Function& function = parsed_function_->function();
193
194 if (function.IsGeneric() || function.HasGenericParent()) {
195 ASSERT(parsed_function_->function_type_arguments() != nullptr);
196 instructions += LoadLocal(parsed_function_->function_type_arguments());
197 } else {
198 instructions += NullConstant();
199 }
200
201 return instructions;
202}
203
204Fragment FlowGraphBuilder::TranslateInstantiatedTypeArguments(
205 const TypeArguments& type_arguments) {
206 Fragment instructions;
207
208 auto const mode = type_arguments.GetInstantiationMode(
209 Z, &parsed_function_->function(), active_class_.klass);
210
211 switch (mode) {
213 // There are no type references to type parameters so we can just take it.
214 instructions += Constant(type_arguments);
215 break;
217 // If the instantiator type arguments are just passed on, we don't need to
218 // resolve the type parameters.
219 //
220 // This is for example the case here:
221 // class Foo<T> {
222 // newList() => new List<T>();
223 // }
224 // We just use the type argument vector from the [Foo] object and pass it
225 // directly to the `new List<T>()` factory constructor.
226 instructions += LoadInstantiatorTypeArguments();
227 break;
229 instructions += LoadFunctionTypeArguments();
230 break;
232 // Otherwise we need to resolve [TypeParameterType]s in the type
233 // expression based on the current instantiator type argument vector.
234 if (!type_arguments.IsInstantiated(kCurrentClass)) {
235 instructions += LoadInstantiatorTypeArguments();
236 } else {
237 instructions += NullConstant();
238 }
239 if (!type_arguments.IsInstantiated(kFunctions)) {
240 instructions += LoadFunctionTypeArguments();
241 } else {
242 instructions += NullConstant();
243 }
244 instructions += InstantiateTypeArguments(type_arguments);
245 break;
246 }
247 return instructions;
248}
249
250Fragment FlowGraphBuilder::CatchBlockEntry(const Array& handler_types,
251 intptr_t handler_index,
252 bool needs_stacktrace,
253 bool is_synthesized) {
254 LocalVariable* exception_var = CurrentException();
255 LocalVariable* stacktrace_var = CurrentStackTrace();
256 LocalVariable* raw_exception_var = CurrentRawException();
257 LocalVariable* raw_stacktrace_var = CurrentRawStackTrace();
258
259 CatchBlockEntryInstr* entry = new (Z) CatchBlockEntryInstr(
260 is_synthesized, // whether catch block was synthesized by FE compiler
261 AllocateBlockId(), CurrentTryIndex(), graph_entry_, handler_types,
262 handler_index, needs_stacktrace, GetNextDeoptId(), exception_var,
263 stacktrace_var, raw_exception_var, raw_stacktrace_var);
264 graph_entry_->AddCatchEntry(entry);
265
266 Fragment instructions(entry);
267
268 // Auxiliary variables introduced by the try catch can be captured if we are
269 // inside a function with yield/resume points. In this case we first need
270 // to restore the context to match the context at entry into the closure.
271 const bool should_restore_closure_context =
272 CurrentException()->is_captured() || CurrentCatchContext()->is_captured();
273 LocalVariable* context_variable = parsed_function_->current_context_var();
274 if (should_restore_closure_context) {
275 ASSERT(parsed_function_->function().IsClosureFunction());
276
277 LocalVariable* closure_parameter = parsed_function_->ParameterVariable(0);
278 ASSERT(!closure_parameter->is_captured());
279 instructions += LoadLocal(closure_parameter);
280 instructions += LoadNativeField(Slot::Closure_context());
281 instructions += StoreLocal(TokenPosition::kNoSource, context_variable);
282 instructions += Drop();
283 }
284
285 if (exception_var->is_captured()) {
286 instructions += LoadLocal(context_variable);
287 instructions += LoadLocal(raw_exception_var);
288 instructions += StoreNativeField(
289 Slot::GetContextVariableSlotFor(thread_, *exception_var));
290 }
291 if (stacktrace_var->is_captured()) {
292 instructions += LoadLocal(context_variable);
293 instructions += LoadLocal(raw_stacktrace_var);
294 instructions += StoreNativeField(
295 Slot::GetContextVariableSlotFor(thread_, *stacktrace_var));
296 }
297
298 // :saved_try_context_var can be captured in the context of
299 // of the closure, in this case CatchBlockEntryInstr restores
300 // :current_context_var to point to closure context in the
301 // same way as normal function prologue does.
302 // Update current context depth to reflect that.
303 const intptr_t saved_context_depth = context_depth_;
304 ASSERT(!CurrentCatchContext()->is_captured() ||
305 CurrentCatchContext()->owner()->context_level() == 0);
306 context_depth_ = 0;
307 instructions += LoadLocal(CurrentCatchContext());
308 instructions += StoreLocal(TokenPosition::kNoSource,
309 parsed_function_->current_context_var());
310 instructions += Drop();
311 context_depth_ = saved_context_depth;
312
313 return instructions;
314}
315
316Fragment FlowGraphBuilder::TryCatch(int try_handler_index) {
317 // The body of the try needs to have it's own block in order to get a new try
318 // index.
319 //
320 // => We therefore create a block for the body (fresh try index) and another
321 // join block (with current try index).
322 Fragment body;
323 JoinEntryInstr* entry = BuildJoinEntry(try_handler_index);
324 body += LoadLocal(parsed_function_->current_context_var());
325 body += StoreLocal(TokenPosition::kNoSource, CurrentCatchContext());
326 body += Drop();
327 body += Goto(entry);
328 return Fragment(body.entry, entry);
329}
330
331Fragment FlowGraphBuilder::CheckStackOverflowInPrologue(
332 TokenPosition position) {
333 ASSERT(loop_depth_ == 0);
335}
336
337Fragment FlowGraphBuilder::CloneContext(
338 const ZoneGrowableArray<const Slot*>& context_slots) {
339 LocalVariable* context_variable = parsed_function_->current_context_var();
340
341 Fragment instructions = LoadLocal(context_variable);
342
343 CloneContextInstr* clone_instruction = new (Z) CloneContextInstr(
344 InstructionSource(), Pop(), context_slots, GetNextDeoptId());
345 instructions <<= clone_instruction;
346 Push(clone_instruction);
347
348 instructions += StoreLocal(TokenPosition::kNoSource, context_variable);
349 instructions += Drop();
350 return instructions;
351}
352
353Fragment FlowGraphBuilder::InstanceCall(
354 TokenPosition position,
355 const String& name,
356 Token::Kind kind,
357 intptr_t type_args_len,
358 intptr_t argument_count,
359 const Array& argument_names,
360 intptr_t checked_argument_count,
361 const Function& interface_target,
362 const Function& tearoff_interface_target,
363 const InferredTypeMetadata* result_type,
364 bool use_unchecked_entry,
365 const CallSiteAttributesMetadata* call_site_attrs,
366 bool receiver_is_not_smi,
367 bool is_call_on_this) {
368 const intptr_t total_count = argument_count + (type_args_len > 0 ? 1 : 0);
369 InputsArray arguments = GetArguments(total_count);
370 InstanceCallInstr* call = new (Z) InstanceCallInstr(
371 InstructionSource(position), name, kind, std::move(arguments),
372 type_args_len, argument_names, checked_argument_count, ic_data_array_,
373 GetNextDeoptId(), interface_target, tearoff_interface_target);
374 if ((result_type != nullptr) && !result_type->IsTrivial()) {
375 call->SetResultType(Z, result_type->ToCompileType(Z));
376 }
377 if (use_unchecked_entry) {
378 call->set_entry_kind(Code::EntryKind::kUnchecked);
379 }
380 if (is_call_on_this) {
381 call->mark_as_call_on_this();
382 }
383 if (call_site_attrs != nullptr && call_site_attrs->receiver_type != nullptr &&
384 call_site_attrs->receiver_type->IsInstantiated()) {
385 call->set_receivers_static_type(call_site_attrs->receiver_type);
386 } else if (!interface_target.IsNull()) {
387 const Class& owner = Class::Handle(Z, interface_target.Owner());
388 const AbstractType& type =
389 AbstractType::ZoneHandle(Z, owner.DeclarationType());
390 call->set_receivers_static_type(&type);
391 }
392 call->set_receiver_is_not_smi(receiver_is_not_smi);
393 Push(call);
394 if (result_type != nullptr && result_type->IsConstant()) {
395 Fragment instructions(call);
396 instructions += Drop();
397 instructions += Constant(result_type->constant_value);
398 return instructions;
399 }
400 return Fragment(call);
401}
402
403Fragment FlowGraphBuilder::FfiCall(
404 const compiler::ffi::CallMarshaller& marshaller,
405 bool is_leaf) {
406 Fragment body;
407
408 const intptr_t num_arguments =
410 InputsArray arguments = GetArguments(num_arguments);
411 FfiCallInstr* const call = new (Z)
412 FfiCallInstr(GetNextDeoptId(), marshaller, is_leaf, std::move(arguments));
413 Push(call);
414 body <<= call;
415
416 return body;
417}
418
419Fragment FlowGraphBuilder::CallLeafRuntimeEntry(
420 const RuntimeEntry& entry,
421 Representation return_representation,
422 const ZoneGrowableArray<Representation>& argument_representations) {
423 Fragment body;
424
425 body += LoadThread();
426 body += LoadUntagged(compiler::target::Thread::OffsetFromThread(&entry));
427
428 const intptr_t num_arguments = argument_representations.length() + 1;
429 InputsArray arguments = GetArguments(num_arguments);
430 auto* const call = LeafRuntimeCallInstr::Make(
431 Z, return_representation, argument_representations, std::move(arguments));
432 Push(call);
433 body <<= call;
434
435 return body;
436}
437
438Fragment FlowGraphBuilder::RethrowException(TokenPosition position,
439 int catch_try_index) {
440 Fragment instructions;
441 Value* stacktrace = Pop();
442 Value* exception = Pop();
443 instructions += Fragment(new (Z) ReThrowInstr(
444 InstructionSource(position), catch_try_index,
445 GetNextDeoptId(), exception, stacktrace))
446 .closed();
447 // Use its side effect of leaving a constant on the stack (does not change
448 // the graph).
449 NullConstant();
450
451 return instructions;
452}
453
454Fragment FlowGraphBuilder::LoadLocal(LocalVariable* variable) {
455 // Captured 'this' is immutable, so within the outer method we don't need to
456 // load it from the context.
457 const ParsedFunction* pf = parsed_function_;
458 if (pf->function().HasThisParameter() && pf->has_receiver_var() &&
459 variable == pf->receiver_var()) {
460 ASSERT(variable == pf->ParameterVariable(0));
461 variable = pf->RawParameterVariable(0);
462 }
463 if (variable->is_captured()) {
464 Fragment instructions;
465 instructions += LoadContextAt(variable->owner()->context_level());
466 instructions +=
468 return instructions;
469 } else {
470 return BaseFlowGraphBuilder::LoadLocal(variable);
471 }
472}
473
474IndirectGotoInstr* FlowGraphBuilder::IndirectGoto(intptr_t target_count) {
475 Value* index = Pop();
476 return new (Z) IndirectGotoInstr(target_count, index);
477}
478
479Fragment FlowGraphBuilder::ThrowLateInitializationError(
480 TokenPosition position,
481 const char* throw_method_name,
482 const String& name) {
483 const auto& dart_internal = Library::Handle(Z, Library::InternalLibrary());
484 const Class& klass =
485 Class::ZoneHandle(Z, dart_internal.LookupClass(Symbols::LateError()));
486 ASSERT(!klass.IsNull());
487
488 const auto& error = klass.EnsureIsFinalized(thread_);
490 const Function& throw_new =
491 Function::ZoneHandle(Z, klass.LookupStaticFunctionAllowPrivate(
492 H.DartSymbolObfuscate(throw_method_name)));
493 ASSERT(!throw_new.IsNull());
494
495 Fragment instructions;
496
497 // Call LateError._throwFoo.
498 instructions += Constant(name);
499 instructions +=
500 StaticCall(TokenPosition::Synthetic(position.Pos()), throw_new,
501 /* argument_count = */ 1, ICData::kStatic);
502 instructions += Drop();
503
504 return instructions;
505}
506
507Fragment FlowGraphBuilder::StoreLateField(const Field& field,
508 LocalVariable* instance,
509 LocalVariable* setter_value) {
510 Fragment instructions;
511 TargetEntryInstr* is_uninitialized;
512 TargetEntryInstr* is_initialized;
513 const TokenPosition position = field.token_pos();
514 const bool is_static = field.is_static();
515 const bool is_final = field.is_final();
516
517 if (is_final) {
518 // Check whether the field has been initialized already.
519 if (is_static) {
520 instructions += LoadStaticField(field, /*calls_initializer=*/false);
521 } else {
522 instructions += LoadLocal(instance);
523 instructions += LoadField(field, /*calls_initializer=*/false);
524 }
525 instructions += Constant(Object::sentinel());
526 instructions += BranchIfStrictEqual(&is_uninitialized, &is_initialized);
527 JoinEntryInstr* join = BuildJoinEntry();
528
529 {
530 // If the field isn't initialized, do nothing.
531 Fragment initialize(is_uninitialized);
532 initialize += Goto(join);
533 }
534
535 {
536 // If the field is already initialized, throw a LateInitializationError.
537 Fragment already_initialized(is_initialized);
538 already_initialized += ThrowLateInitializationError(
539 position, "_throwFieldAlreadyInitialized",
540 String::ZoneHandle(Z, field.name()));
541 already_initialized += Goto(join);
542 }
543
544 instructions = Fragment(instructions.entry, join);
545 }
546
547 if (!is_static) {
548 instructions += LoadLocal(instance);
549 }
550 instructions += LoadLocal(setter_value);
551 if (is_static) {
552 instructions += StoreStaticField(position, field);
553 } else {
554 instructions += StoreFieldGuarded(field);
555 }
556
557 return instructions;
558}
559
560Fragment FlowGraphBuilder::NativeCall(const String& name,
561 const Function& function) {
562 InlineBailout("kernel::FlowGraphBuilder::NativeCall");
563 // +1 for result placeholder.
564 const intptr_t num_args =
565 function.NumParameters() + (function.IsGeneric() ? 1 : 0) + 1;
566
567 Fragment instructions;
568 instructions += NullConstant(); // Placeholder for the result.
569
570 InputsArray arguments = GetArguments(num_args);
571 NativeCallInstr* call = new (Z) NativeCallInstr(
572 name, function, FLAG_link_natives_lazily,
573 InstructionSource(function.end_token_pos()), std::move(arguments));
574 Push(call);
575 instructions <<= call;
576 return instructions;
577}
578
579Fragment FlowGraphBuilder::Return(TokenPosition position,
580 bool omit_result_type_check) {
581 Fragment instructions;
582 const Function& function = parsed_function_->function();
583
584 // Emit a type check of the return type in checked mode for all functions
585 // and in strong mode for native functions.
586 if (!omit_result_type_check && function.is_old_native()) {
587 const AbstractType& return_type =
588 AbstractType::Handle(Z, function.result_type());
589 instructions += CheckAssignable(return_type, Symbols::FunctionResult());
590 }
591
592 if (NeedsDebugStepCheck(function, position)) {
593 instructions += DebugStepCheck(position);
594 }
595
596 instructions += BaseFlowGraphBuilder::Return(position);
597
598 return instructions;
599}
600
601Fragment FlowGraphBuilder::StaticCall(TokenPosition position,
602 const Function& target,
603 intptr_t argument_count,
604 ICData::RebindRule rebind_rule) {
605 return StaticCall(position, target, argument_count, Array::null_array(),
606 rebind_rule);
607}
608
609void FlowGraphBuilder::SetResultTypeForStaticCall(
610 StaticCallInstr* call,
611 const Function& target,
612 intptr_t argument_count,
613 const InferredTypeMetadata* result_type) {
614 if (call->InitResultType(Z)) {
615 ASSERT((result_type == nullptr) || (result_type->cid == kDynamicCid) ||
616 (result_type->cid == call->result_cid()));
617 return;
618 }
619 if ((result_type != nullptr) && !result_type->IsTrivial()) {
620 call->SetResultType(Z, result_type->ToCompileType(Z));
621 }
622}
623
624Fragment FlowGraphBuilder::StaticCall(TokenPosition position,
625 const Function& target,
626 intptr_t argument_count,
627 const Array& argument_names,
628 ICData::RebindRule rebind_rule,
629 const InferredTypeMetadata* result_type,
630 intptr_t type_args_count,
631 bool use_unchecked_entry) {
632 const intptr_t total_count = argument_count + (type_args_count > 0 ? 1 : 0);
633 InputsArray arguments = GetArguments(total_count);
634 StaticCallInstr* call = new (Z) StaticCallInstr(
635 InstructionSource(position), target, type_args_count, argument_names,
636 std::move(arguments), ic_data_array_, GetNextDeoptId(), rebind_rule);
637 SetResultTypeForStaticCall(call, target, argument_count, result_type);
638 if (use_unchecked_entry) {
639 call->set_entry_kind(Code::EntryKind::kUnchecked);
640 }
641 Push(call);
642 if (result_type != nullptr && result_type->IsConstant()) {
643 Fragment instructions(call);
644 instructions += Drop();
645 instructions += Constant(result_type->constant_value);
646 return instructions;
647 }
648 return Fragment(call);
649}
650
651Fragment FlowGraphBuilder::CachableIdempotentCall(TokenPosition position,
652 Representation representation,
653 const Function& target,
654 intptr_t argument_count,
655 const Array& argument_names,
656 intptr_t type_args_count) {
657 const intptr_t total_count = argument_count + (type_args_count > 0 ? 1 : 0);
658 InputsArray arguments = GetArguments(total_count);
659 CachableIdempotentCallInstr* call = new (Z) CachableIdempotentCallInstr(
660 InstructionSource(position), representation, target, type_args_count,
661 argument_names, std::move(arguments), GetNextDeoptId());
662 Push(call);
663 return Fragment(call);
664}
665
666Fragment FlowGraphBuilder::StringInterpolateSingle(TokenPosition position) {
667 Fragment instructions;
668 instructions += StaticCall(
669 position, CompilerState::Current().StringBaseInterpolateSingle(),
670 /* argument_count = */ 1, ICData::kStatic);
671 return instructions;
672}
673
674Fragment FlowGraphBuilder::StringInterpolate(TokenPosition position) {
675 Fragment instructions;
676 instructions +=
677 StaticCall(position, CompilerState::Current().StringBaseInterpolate(),
678 /* argument_count = */ 1, ICData::kStatic);
679 return instructions;
680}
681
682Fragment FlowGraphBuilder::ThrowTypeError() {
683 const Class& klass =
684 Class::ZoneHandle(Z, Library::LookupCoreClass(Symbols::TypeError()));
685 ASSERT(!klass.IsNull());
686 GrowableHandlePtrArray<const String> pieces(Z, 3);
687 pieces.Add(Symbols::TypeError());
688 pieces.Add(Symbols::Dot());
689 pieces.Add(H.DartSymbolObfuscate("_create"));
690
691 const Function& constructor = Function::ZoneHandle(
692 Z, klass.LookupConstructorAllowPrivate(
693 String::ZoneHandle(Z, Symbols::FromConcatAll(thread_, pieces))));
694 ASSERT(!constructor.IsNull());
695
696 const String& url = H.DartString(
697 parsed_function_->function().ToLibNamePrefixedQualifiedCString(),
698 Heap::kOld);
699
700 Fragment instructions;
701
702 // Create instance of _TypeError
703 instructions += AllocateObject(TokenPosition::kNoSource, klass, 0);
704 LocalVariable* instance = MakeTemporary();
705
706 // Call _TypeError._create constructor.
707 instructions += LoadLocal(instance); // this
708 instructions += Constant(url); // url
709 instructions += NullConstant(); // line
710 instructions += IntConstant(0); // column
711 instructions += Constant(H.DartSymbolPlain("Malformed type.")); // message
712
713 instructions += StaticCall(TokenPosition::kNoSource, constructor,
714 /* argument_count = */ 5, ICData::kStatic);
715 instructions += Drop();
716
717 // Throw the exception
718 instructions += ThrowException(TokenPosition::kNoSource);
719
720 return instructions;
721}
722
723Fragment FlowGraphBuilder::ThrowNoSuchMethodError(TokenPosition position,
724 const Function& target,
725 bool incompatible_arguments,
726 bool receiver_pushed) {
727 const Class& owner = Class::Handle(Z, target.Owner());
728 auto& receiver = Instance::ZoneHandle();
730 if (target.IsImplicitGetterFunction() || target.IsGetterFunction() ||
731 target.IsRecordFieldGetter()) {
733 } else if (target.IsImplicitSetterFunction() || target.IsSetterFunction()) {
735 }
737 if (owner.IsTopLevel()) {
738 if (incompatible_arguments) {
739 receiver = target.UserVisibleSignature();
740 }
742 } else {
743 receiver = owner.RareType();
744 if (target.kind() == UntaggedFunction::kConstructor) {
746 } else if (target.IsRecordFieldGetter()) {
748 } else {
750 }
751 }
752
753 Fragment instructions;
754 if (!receiver_pushed) {
755 instructions += Constant(receiver); // receiver
756 }
757 instructions +=
758 ThrowNoSuchMethodError(position, String::ZoneHandle(Z, target.name()),
759 level, kind, /*receiver_pushed*/ true);
760 return instructions;
761}
762
763Fragment FlowGraphBuilder::ThrowNoSuchMethodError(TokenPosition position,
764 const String& selector,
767 bool receiver_pushed) {
768 const Class& klass = Class::ZoneHandle(
769 Z, Library::LookupCoreClass(Symbols::NoSuchMethodError()));
770 ASSERT(!klass.IsNull());
771 const auto& error = klass.EnsureIsFinalized(H.thread());
773 const Function& throw_function = Function::ZoneHandle(
774 Z, klass.LookupStaticFunctionAllowPrivate(Symbols::ThrowNew()));
775 ASSERT(!throw_function.IsNull());
776
777 Fragment instructions;
778 if (!receiver_pushed) {
779 instructions += NullConstant(); // receiver
780 }
781 instructions += Constant(selector);
782 instructions += IntConstant(InvocationMirror::EncodeType(level, kind));
783 instructions += IntConstant(0); // type arguments length
784 instructions += NullConstant(); // type arguments
785 instructions += NullConstant(); // arguments
786 instructions += NullConstant(); // argumentNames
787 instructions += StaticCall(position, throw_function, /* argument_count = */ 7,
788 ICData::kNoRebind);
789 return instructions;
790}
791
792LocalVariable* FlowGraphBuilder::LookupVariable(intptr_t kernel_offset) {
793 LocalVariable* local = scopes_->locals.Lookup(kernel_offset);
794 ASSERT(local != nullptr);
795 ASSERT(local->kernel_offset() == kernel_offset);
796 return local;
797}
798
800 const Function& function = parsed_function_->function();
801
802#ifdef DEBUG
803 // Check that all functions that are explicitly marked as recognized with the
804 // vm:recognized annotation are in fact recognized. The check can't be done on
805 // function creation, since the recognized status isn't set until later.
806 if ((function.IsRecognized() !=
808 !function.IsDynamicInvocationForwarder()) {
809 if (function.IsRecognized()) {
810 FATAL("Recognized method %s is not marked with the vm:recognized pragma.",
811 function.ToQualifiedCString());
812 } else {
813 FATAL("Non-recognized method %s is marked with the vm:recognized pragma.",
814 function.ToQualifiedCString());
815 }
816 }
817#endif
818
819 auto& kernel_data = TypedDataView::Handle(Z, function.KernelLibrary());
820 intptr_t kernel_data_program_offset = function.KernelLibraryOffset();
821
822 StreamingFlowGraphBuilder streaming_flow_graph_builder(
823 this, kernel_data, kernel_data_program_offset);
824 auto result = streaming_flow_graph_builder.BuildGraph();
825
827 result->set_coverage_array(coverage_array());
828
829 if (streaming_flow_graph_builder.num_ast_nodes() >
830 FLAG_huge_method_cutoff_in_ast_nodes) {
831 if (FLAG_print_huge_methods) {
833 "Warning: \'%s\' from \'%s\' is too large. Some optimizations have "
834 "been "
835 "disabled, and the compiler might run out of memory. "
836 "Consider refactoring this code into smaller components.\n",
837 function.QualifiedUserVisibleNameCString(),
839 Z, Class::Handle(Z, function.Owner()).library())
840 .url())
841 .ToCString());
842 }
843 result->mark_huge_method();
844 }
845
846 return result;
847}
848
849Fragment FlowGraphBuilder::NativeFunctionBody(const Function& function,
850 LocalVariable* first_parameter) {
851 ASSERT(function.is_old_native());
853 RELEASE_ASSERT(!function.IsClosureFunction()); // Not supported.
854
855 Fragment body;
856 String& name = String::ZoneHandle(Z, function.native_name());
857 if (function.IsGeneric()) {
858 body += LoadLocal(parsed_function_->RawTypeArgumentsVariable());
859 }
860 for (intptr_t i = 0; i < function.NumParameters(); ++i) {
861 body += LoadLocal(parsed_function_->RawParameterVariable(i));
862 }
863 body += NativeCall(name, function);
864 // We typecheck results of native calls for type safety.
865 body +=
866 Return(TokenPosition::kNoSource, /* omit_result_type_check = */ false);
867 return body;
868}
869
872 case kUnboxedFloat:
873 case kUnboxedDouble:
875 case kUnboxedInt32x4:
876 case kUnboxedFloat32x4:
877 case kUnboxedFloat64x2:
879 default:
880 return true;
881 }
882}
883
885 auto& state = thread->compiler_state();
887 case kUnboxedFloat:
888 return state.TypedListGetFloat32();
889 case kUnboxedDouble:
890 return state.TypedListGetFloat64();
891 case kUnboxedInt32x4:
892 return state.TypedListGetInt32x4();
893 case kUnboxedFloat32x4:
894 return state.TypedListGetFloat32x4();
895 case kUnboxedFloat64x2:
896 return state.TypedListGetFloat64x2();
897 default:
898 UNREACHABLE();
899 return Object::null_function();
900 }
901}
902
903#define LOAD_NATIVE_FIELD(V) \
904 V(ByteDataViewLength, TypedDataBase_length) \
905 V(ByteDataViewOffsetInBytes, TypedDataView_offset_in_bytes) \
906 V(ByteDataViewTypedData, TypedDataView_typed_data) \
907 V(Finalizer_getCallback, Finalizer_callback) \
908 V(FinalizerBase_getAllEntries, FinalizerBase_all_entries) \
909 V(FinalizerBase_getDetachments, FinalizerBase_detachments) \
910 V(FinalizerEntry_getDetach, FinalizerEntry_detach) \
911 V(FinalizerEntry_getNext, FinalizerEntry_next) \
912 V(FinalizerEntry_getToken, FinalizerEntry_token) \
913 V(FinalizerEntry_getValue, FinalizerEntry_value) \
914 V(NativeFinalizer_getCallback, NativeFinalizer_callback) \
915 V(GrowableArrayLength, GrowableObjectArray_length) \
916 V(ReceivePort_getSendPort, ReceivePort_send_port) \
917 V(ReceivePort_getHandler, ReceivePort_handler) \
918 V(ImmutableLinkedHashBase_getData, ImmutableLinkedHashBase_data) \
919 V(ImmutableLinkedHashBase_getIndex, ImmutableLinkedHashBase_index) \
920 V(LinkedHashBase_getData, LinkedHashBase_data) \
921 V(LinkedHashBase_getDeletedKeys, LinkedHashBase_deleted_keys) \
922 V(LinkedHashBase_getHashMask, LinkedHashBase_hash_mask) \
923 V(LinkedHashBase_getIndex, LinkedHashBase_index) \
924 V(LinkedHashBase_getUsedData, LinkedHashBase_used_data) \
925 V(ObjectArrayLength, Array_length) \
926 V(Record_shape, Record_shape) \
927 V(SuspendState_getFunctionData, SuspendState_function_data) \
928 V(SuspendState_getThenCallback, SuspendState_then_callback) \
929 V(SuspendState_getErrorCallback, SuspendState_error_callback) \
930 V(TypedDataViewOffsetInBytes, TypedDataView_offset_in_bytes) \
931 V(TypedDataViewTypedData, TypedDataView_typed_data) \
932 V(TypedListBaseLength, TypedDataBase_length) \
933 V(WeakProperty_getKey, WeakProperty_key) \
934 V(WeakProperty_getValue, WeakProperty_value) \
935 V(WeakReference_getTarget, WeakReference_target)
936
937#define STORE_NATIVE_FIELD(V) \
938 V(Finalizer_setCallback, Finalizer_callback) \
939 V(FinalizerBase_setAllEntries, FinalizerBase_all_entries) \
940 V(FinalizerBase_setDetachments, FinalizerBase_detachments) \
941 V(FinalizerEntry_setToken, FinalizerEntry_token) \
942 V(NativeFinalizer_setCallback, NativeFinalizer_callback) \
943 V(ReceivePort_setHandler, ReceivePort_handler) \
944 V(LinkedHashBase_setData, LinkedHashBase_data) \
945 V(LinkedHashBase_setIndex, LinkedHashBase_index) \
946 V(SuspendState_setFunctionData, SuspendState_function_data) \
947 V(SuspendState_setThenCallback, SuspendState_then_callback) \
948 V(SuspendState_setErrorCallback, SuspendState_error_callback) \
949 V(WeakProperty_setKey, WeakProperty_key) \
950 V(WeakProperty_setValue, WeakProperty_value) \
951 V(WeakReference_setTarget, WeakReference_target)
952
953#define STORE_NATIVE_FIELD_NO_BARRIER(V) \
954 V(LinkedHashBase_setDeletedKeys, LinkedHashBase_deleted_keys) \
955 V(LinkedHashBase_setHashMask, LinkedHashBase_hash_mask) \
956 V(LinkedHashBase_setUsedData, LinkedHashBase_used_data)
957
959 const Function& function) {
960 const MethodRecognizer::Kind kind = function.recognized_kind();
961
962 switch (kind) {
963#define TYPED_DATA_GET_INDEXED_CASES(clazz) \
964 case MethodRecognizer::k##clazz##ArrayGetIndexed: \
965 FALL_THROUGH; \
966 case MethodRecognizer::kExternal##clazz##ArrayGetIndexed: \
967 FALL_THROUGH; \
968 case MethodRecognizer::k##clazz##ArrayViewGetIndexed: \
969 FALL_THROUGH;
971#undef TYPED_DATA_GET_INDEXED_CASES
972 case MethodRecognizer::kObjectArrayGetIndexed:
973 case MethodRecognizer::kGrowableArrayGetIndexed:
974 case MethodRecognizer::kRecord_fieldAt:
975 case MethodRecognizer::kRecord_fieldNames:
976 case MethodRecognizer::kRecord_numFields:
977 case MethodRecognizer::kSuspendState_clone:
978 case MethodRecognizer::kSuspendState_resume:
979 case MethodRecognizer::kTypedList_GetInt8:
980 case MethodRecognizer::kTypedList_SetInt8:
981 case MethodRecognizer::kTypedList_GetUint8:
982 case MethodRecognizer::kTypedList_SetUint8:
983 case MethodRecognizer::kTypedList_GetInt16:
984 case MethodRecognizer::kTypedList_SetInt16:
985 case MethodRecognizer::kTypedList_GetUint16:
986 case MethodRecognizer::kTypedList_SetUint16:
987 case MethodRecognizer::kTypedList_GetInt32:
988 case MethodRecognizer::kTypedList_SetInt32:
989 case MethodRecognizer::kTypedList_GetUint32:
990 case MethodRecognizer::kTypedList_SetUint32:
991 case MethodRecognizer::kTypedList_GetInt64:
992 case MethodRecognizer::kTypedList_SetInt64:
993 case MethodRecognizer::kTypedList_GetUint64:
994 case MethodRecognizer::kTypedList_SetUint64:
995 case MethodRecognizer::kTypedList_GetFloat32:
996 case MethodRecognizer::kTypedList_SetFloat32:
997 case MethodRecognizer::kTypedList_GetFloat64:
998 case MethodRecognizer::kTypedList_SetFloat64:
999 case MethodRecognizer::kTypedList_GetInt32x4:
1000 case MethodRecognizer::kTypedList_SetInt32x4:
1001 case MethodRecognizer::kTypedList_GetFloat32x4:
1002 case MethodRecognizer::kTypedList_SetFloat32x4:
1003 case MethodRecognizer::kTypedList_GetFloat64x2:
1004 case MethodRecognizer::kTypedList_SetFloat64x2:
1005 case MethodRecognizer::kTypedData_memMove1:
1006 case MethodRecognizer::kTypedData_memMove2:
1007 case MethodRecognizer::kTypedData_memMove4:
1008 case MethodRecognizer::kTypedData_memMove8:
1009 case MethodRecognizer::kTypedData_memMove16:
1010 case MethodRecognizer::kTypedData_ByteDataView_factory:
1011 case MethodRecognizer::kTypedData_Int8ArrayView_factory:
1012 case MethodRecognizer::kTypedData_Uint8ArrayView_factory:
1013 case MethodRecognizer::kTypedData_Uint8ClampedArrayView_factory:
1014 case MethodRecognizer::kTypedData_Int16ArrayView_factory:
1015 case MethodRecognizer::kTypedData_Uint16ArrayView_factory:
1016 case MethodRecognizer::kTypedData_Int32ArrayView_factory:
1017 case MethodRecognizer::kTypedData_Uint32ArrayView_factory:
1018 case MethodRecognizer::kTypedData_Int64ArrayView_factory:
1019 case MethodRecognizer::kTypedData_Uint64ArrayView_factory:
1020 case MethodRecognizer::kTypedData_Float32ArrayView_factory:
1021 case MethodRecognizer::kTypedData_Float64ArrayView_factory:
1022 case MethodRecognizer::kTypedData_Float32x4ArrayView_factory:
1023 case MethodRecognizer::kTypedData_Int32x4ArrayView_factory:
1024 case MethodRecognizer::kTypedData_Float64x2ArrayView_factory:
1025 case MethodRecognizer::kTypedData_UnmodifiableByteDataView_factory:
1026 case MethodRecognizer::kTypedData_UnmodifiableInt8ArrayView_factory:
1027 case MethodRecognizer::kTypedData_UnmodifiableUint8ArrayView_factory:
1028 case MethodRecognizer::kTypedData_UnmodifiableUint8ClampedArrayView_factory:
1029 case MethodRecognizer::kTypedData_UnmodifiableInt16ArrayView_factory:
1030 case MethodRecognizer::kTypedData_UnmodifiableUint16ArrayView_factory:
1031 case MethodRecognizer::kTypedData_UnmodifiableInt32ArrayView_factory:
1032 case MethodRecognizer::kTypedData_UnmodifiableUint32ArrayView_factory:
1033 case MethodRecognizer::kTypedData_UnmodifiableInt64ArrayView_factory:
1034 case MethodRecognizer::kTypedData_UnmodifiableUint64ArrayView_factory:
1035 case MethodRecognizer::kTypedData_UnmodifiableFloat32ArrayView_factory:
1036 case MethodRecognizer::kTypedData_UnmodifiableFloat64ArrayView_factory:
1037 case MethodRecognizer::kTypedData_UnmodifiableFloat32x4ArrayView_factory:
1038 case MethodRecognizer::kTypedData_UnmodifiableInt32x4ArrayView_factory:
1039 case MethodRecognizer::kTypedData_UnmodifiableFloat64x2ArrayView_factory:
1040 case MethodRecognizer::kTypedData_Int8Array_factory:
1041 case MethodRecognizer::kTypedData_Uint8Array_factory:
1042 case MethodRecognizer::kTypedData_Uint8ClampedArray_factory:
1043 case MethodRecognizer::kTypedData_Int16Array_factory:
1044 case MethodRecognizer::kTypedData_Uint16Array_factory:
1045 case MethodRecognizer::kTypedData_Int32Array_factory:
1046 case MethodRecognizer::kTypedData_Uint32Array_factory:
1047 case MethodRecognizer::kTypedData_Int64Array_factory:
1048 case MethodRecognizer::kTypedData_Uint64Array_factory:
1049 case MethodRecognizer::kTypedData_Float32Array_factory:
1050 case MethodRecognizer::kTypedData_Float64Array_factory:
1051 case MethodRecognizer::kTypedData_Float32x4Array_factory:
1052 case MethodRecognizer::kTypedData_Int32x4Array_factory:
1053 case MethodRecognizer::kTypedData_Float64x2Array_factory:
1054 case MethodRecognizer::kMemCopy:
1055 case MethodRecognizer::kFfiLoadInt8:
1056 case MethodRecognizer::kFfiLoadInt16:
1057 case MethodRecognizer::kFfiLoadInt32:
1058 case MethodRecognizer::kFfiLoadInt64:
1059 case MethodRecognizer::kFfiLoadUint8:
1060 case MethodRecognizer::kFfiLoadUint16:
1061 case MethodRecognizer::kFfiLoadUint32:
1062 case MethodRecognizer::kFfiLoadUint64:
1063 case MethodRecognizer::kFfiLoadFloat:
1064 case MethodRecognizer::kFfiLoadFloatUnaligned:
1065 case MethodRecognizer::kFfiLoadDouble:
1066 case MethodRecognizer::kFfiLoadDoubleUnaligned:
1067 case MethodRecognizer::kFfiLoadPointer:
1068 case MethodRecognizer::kFfiNativeCallbackFunction:
1069 case MethodRecognizer::kFfiNativeAsyncCallbackFunction:
1070 case MethodRecognizer::kFfiNativeIsolateLocalCallbackFunction:
1071 case MethodRecognizer::kFfiStoreInt8:
1072 case MethodRecognizer::kFfiStoreInt16:
1073 case MethodRecognizer::kFfiStoreInt32:
1074 case MethodRecognizer::kFfiStoreInt64:
1075 case MethodRecognizer::kFfiStoreUint8:
1076 case MethodRecognizer::kFfiStoreUint16:
1077 case MethodRecognizer::kFfiStoreUint32:
1078 case MethodRecognizer::kFfiStoreUint64:
1079 case MethodRecognizer::kFfiStoreFloat:
1080 case MethodRecognizer::kFfiStoreFloatUnaligned:
1081 case MethodRecognizer::kFfiStoreDouble:
1082 case MethodRecognizer::kFfiStoreDoubleUnaligned:
1083 case MethodRecognizer::kFfiStorePointer:
1084 case MethodRecognizer::kFfiFromAddress:
1085 case MethodRecognizer::kFfiGetAddress:
1086 case MethodRecognizer::kFfiAsExternalTypedDataInt8:
1087 case MethodRecognizer::kFfiAsExternalTypedDataInt16:
1088 case MethodRecognizer::kFfiAsExternalTypedDataInt32:
1089 case MethodRecognizer::kFfiAsExternalTypedDataInt64:
1090 case MethodRecognizer::kFfiAsExternalTypedDataUint8:
1091 case MethodRecognizer::kFfiAsExternalTypedDataUint16:
1092 case MethodRecognizer::kFfiAsExternalTypedDataUint32:
1093 case MethodRecognizer::kFfiAsExternalTypedDataUint64:
1094 case MethodRecognizer::kFfiAsExternalTypedDataFloat:
1095 case MethodRecognizer::kFfiAsExternalTypedDataDouble:
1096 case MethodRecognizer::kGetNativeField:
1097 case MethodRecognizer::kFinalizerBase_exchangeEntriesCollectedWithNull:
1098 case MethodRecognizer::kFinalizerBase_getIsolateFinalizers:
1099 case MethodRecognizer::kFinalizerBase_setIsolate:
1100 case MethodRecognizer::kFinalizerBase_setIsolateFinalizers:
1101 case MethodRecognizer::kFinalizerEntry_allocate:
1102 case MethodRecognizer::kFinalizerEntry_getExternalSize:
1103 case MethodRecognizer::kCheckNotDeeplyImmutable:
1104 case MethodRecognizer::kObjectEquals:
1105 case MethodRecognizer::kStringBaseCodeUnitAt:
1106 case MethodRecognizer::kStringBaseLength:
1107 case MethodRecognizer::kStringBaseIsEmpty:
1108 case MethodRecognizer::kClassIDgetID:
1109 case MethodRecognizer::kGrowableArrayAllocateWithData:
1110 case MethodRecognizer::kGrowableArrayCapacity:
1111 case MethodRecognizer::kObjectArrayAllocate:
1112 case MethodRecognizer::kCopyRangeFromUint8ListToOneByteString:
1113 case MethodRecognizer::kImmutableLinkedHashBase_setIndexStoreRelease:
1114 case MethodRecognizer::kFfiAbi:
1115 case MethodRecognizer::kUtf8DecoderScan:
1116 case MethodRecognizer::kHas63BitSmis:
1117 case MethodRecognizer::kExtensionStreamHasListener:
1118 case MethodRecognizer::kSmi_hashCode:
1119 case MethodRecognizer::kMint_hashCode:
1120 case MethodRecognizer::kDouble_hashCode:
1121#define CASE(method, slot) case MethodRecognizer::k##method:
1125#undef CASE
1126 return true;
1127 case MethodRecognizer::kDoubleToInteger:
1128 case MethodRecognizer::kDoubleMod:
1129 case MethodRecognizer::kDoubleRoundToDouble:
1130 case MethodRecognizer::kDoubleTruncateToDouble:
1131 case MethodRecognizer::kDoubleFloorToDouble:
1132 case MethodRecognizer::kDoubleCeilToDouble:
1133 case MethodRecognizer::kMathDoublePow:
1134 case MethodRecognizer::kMathSin:
1135 case MethodRecognizer::kMathCos:
1136 case MethodRecognizer::kMathTan:
1137 case MethodRecognizer::kMathAsin:
1138 case MethodRecognizer::kMathAcos:
1139 case MethodRecognizer::kMathAtan:
1140 case MethodRecognizer::kMathAtan2:
1141 case MethodRecognizer::kMathExp:
1142 case MethodRecognizer::kMathLog:
1143 case MethodRecognizer::kMathSqrt:
1145 case MethodRecognizer::kDoubleCeilToInt:
1146 case MethodRecognizer::kDoubleFloorToInt:
1147 if (!FlowGraphCompiler::SupportsUnboxedDoubles()) return false;
1148#if defined(TARGET_ARCH_X64)
1149 return CompilerState::Current().is_aot() || FLAG_target_unknown_cpu;
1150#elif defined(TARGET_ARCH_ARM64) || defined(TARGET_ARCH_RISCV32) || \
1151 defined(TARGET_ARCH_RISCV64)
1152 return true;
1153#else
1154 return false;
1155#endif
1156 default:
1157 return false;
1158 }
1159}
1160
1162 const Function& function) {
1164 switch (function.recognized_kind()) {
1165 case MethodRecognizer::kStringBaseCodeUnitAt:
1166 return true;
1167 default:
1168 return false;
1169 }
1170}
1171
1172FlowGraph* FlowGraphBuilder::BuildGraphOfRecognizedMethod(
1173 const Function& function) {
1175
1176 graph_entry_ =
1177 new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
1178
1179 auto normal_entry = BuildFunctionEntry(graph_entry_);
1180 graph_entry_->set_normal_entry(normal_entry);
1181
1182 PrologueInfo prologue_info(-1, -1);
1183 BlockEntryInstr* instruction_cursor =
1184 BuildPrologue(normal_entry, &prologue_info);
1185
1186 Fragment body(instruction_cursor);
1187 body += CheckStackOverflowInPrologue(function.token_pos());
1188
1189 if (function.IsDynamicInvocationForwarder()) {
1190 body += BuildDefaultTypeHandling(function);
1191 BuildTypeArgumentTypeChecks(
1193 BuildArgumentTypeChecks(&body, &body, nullptr);
1194 }
1195
1196 const MethodRecognizer::Kind kind = function.recognized_kind();
1197 switch (kind) {
1198#define TYPED_DATA_GET_INDEXED_CASES(clazz) \
1199 case MethodRecognizer::k##clazz##ArrayGetIndexed: \
1200 FALL_THROUGH; \
1201 case MethodRecognizer::kExternal##clazz##ArrayGetIndexed: \
1202 FALL_THROUGH; \
1203 case MethodRecognizer::k##clazz##ArrayViewGetIndexed: \
1204 FALL_THROUGH;
1206#undef TYPED_DATA_GET_INDEXED_CASES
1207 case MethodRecognizer::kObjectArrayGetIndexed:
1208 case MethodRecognizer::kGrowableArrayGetIndexed: {
1209 ASSERT_EQUAL(function.NumParameters(), 2);
1210 intptr_t array_cid = MethodRecognizer::MethodKindToReceiverCid(kind);
1211 const Representation elem_rep =
1213 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1215 body += LoadLocal(parsed_function_->RawParameterVariable(1));
1216 body += GenericCheckBound();
1217 LocalVariable* safe_index = MakeTemporary();
1218 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1219 if (IsTypedDataBaseClassId(array_cid) && !CanUnboxElements(array_cid)) {
1220 const auto& native_function =
1221 TypedListGetNativeFunction(thread_, array_cid);
1222 body += LoadLocal(safe_index);
1223 body += UnboxTruncate(kUnboxedIntPtr);
1226 body += BinaryIntegerOp(Token::kSHL, kUnboxedIntPtr,
1227 /*is_truncating=*/true);
1228 body += StaticCall(TokenPosition::kNoSource, native_function, 2,
1229 ICData::kNoRebind);
1230 } else {
1231 if (kind == MethodRecognizer::kGrowableArrayGetIndexed) {
1232 body += LoadNativeField(Slot::GrowableObjectArray_data());
1233 array_cid = kArrayCid;
1234 } else if (IsExternalTypedDataClassId(array_cid)) {
1235 body += LoadNativeField(Slot::PointerBase_data(),
1237 }
1238 body += LoadLocal(safe_index);
1239 body +=
1240 LoadIndexed(array_cid,
1241 /*index_scale=*/
1242 compiler::target::Instance::ElementSizeFor(array_cid),
1243 /*index_unboxed=*/
1245 if (elem_rep == kUnboxedFloat) {
1246 body += FloatToDouble();
1247 }
1248 }
1249 body += DropTempsPreserveTop(1); // Drop [safe_index], keep result.
1250 break;
1251 }
1252 case MethodRecognizer::kRecord_fieldAt:
1253 ASSERT_EQUAL(function.NumParameters(), 2);
1254 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1255 body += LoadLocal(parsed_function_->RawParameterVariable(1));
1256 body += LoadIndexed(
1257 kRecordCid, /*index_scale*/ compiler::target::kCompressedWordSize);
1258 break;
1259 case MethodRecognizer::kRecord_fieldNames:
1260 body += LoadObjectStore();
1261 body += LoadNativeField(Slot::ObjectStore_record_field_names());
1262 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1263 body += LoadNativeField(Slot::Record_shape());
1264 body += IntConstant(compiler::target::RecordShape::kFieldNamesIndexShift);
1265 body += SmiBinaryOp(Token::kSHR);
1266 body += IntConstant(compiler::target::RecordShape::kFieldNamesIndexMask);
1267 body += SmiBinaryOp(Token::kBIT_AND);
1268 body += LoadIndexed(
1269 kArrayCid, /*index_scale=*/compiler::target::kCompressedWordSize);
1270 break;
1271 case MethodRecognizer::kRecord_numFields:
1272 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1273 body += LoadNativeField(Slot::Record_shape());
1274 body += IntConstant(compiler::target::RecordShape::kNumFieldsMask);
1275 body += SmiBinaryOp(Token::kBIT_AND);
1276 break;
1277 case MethodRecognizer::kSuspendState_clone: {
1278 ASSERT_EQUAL(function.NumParameters(), 1);
1279 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1280 body += Call1ArgStub(TokenPosition::kNoSource,
1282 break;
1283 }
1284 case MethodRecognizer::kSuspendState_resume: {
1285 const Code& resume_stub =
1286 Code::ZoneHandle(Z, IG->object_store()->resume_stub());
1287 body += NullConstant();
1288 body += TailCall(resume_stub);
1289 break;
1290 }
1291 case MethodRecognizer::kTypedList_GetInt8:
1292 body += BuildTypedListGet(function, kTypedDataInt8ArrayCid);
1293 break;
1294 case MethodRecognizer::kTypedList_SetInt8:
1295 body += BuildTypedListSet(function, kTypedDataInt8ArrayCid);
1296 break;
1297 case MethodRecognizer::kTypedList_GetUint8:
1298 body += BuildTypedListGet(function, kTypedDataUint8ArrayCid);
1299 break;
1300 case MethodRecognizer::kTypedList_SetUint8:
1301 body += BuildTypedListSet(function, kTypedDataUint8ArrayCid);
1302 break;
1303 case MethodRecognizer::kTypedList_GetInt16:
1304 body += BuildTypedListGet(function, kTypedDataInt16ArrayCid);
1305 break;
1306 case MethodRecognizer::kTypedList_SetInt16:
1307 body += BuildTypedListSet(function, kTypedDataInt16ArrayCid);
1308 break;
1309 case MethodRecognizer::kTypedList_GetUint16:
1310 body += BuildTypedListGet(function, kTypedDataUint16ArrayCid);
1311 break;
1312 case MethodRecognizer::kTypedList_SetUint16:
1313 body += BuildTypedListSet(function, kTypedDataUint16ArrayCid);
1314 break;
1315 case MethodRecognizer::kTypedList_GetInt32:
1316 body += BuildTypedListGet(function, kTypedDataInt32ArrayCid);
1317 break;
1318 case MethodRecognizer::kTypedList_SetInt32:
1319 body += BuildTypedListSet(function, kTypedDataInt32ArrayCid);
1320 break;
1321 case MethodRecognizer::kTypedList_GetUint32:
1322 body += BuildTypedListGet(function, kTypedDataUint32ArrayCid);
1323 break;
1324 case MethodRecognizer::kTypedList_SetUint32:
1325 body += BuildTypedListSet(function, kTypedDataUint32ArrayCid);
1326 break;
1327 case MethodRecognizer::kTypedList_GetInt64:
1328 body += BuildTypedListGet(function, kTypedDataInt64ArrayCid);
1329 break;
1330 case MethodRecognizer::kTypedList_SetInt64:
1331 body += BuildTypedListSet(function, kTypedDataInt64ArrayCid);
1332 break;
1333 case MethodRecognizer::kTypedList_GetUint64:
1334 body += BuildTypedListGet(function, kTypedDataUint64ArrayCid);
1335 break;
1336 case MethodRecognizer::kTypedList_SetUint64:
1337 body += BuildTypedListSet(function, kTypedDataUint64ArrayCid);
1338 break;
1339 case MethodRecognizer::kTypedList_GetFloat32:
1340 body += BuildTypedListGet(function, kTypedDataFloat32ArrayCid);
1341 break;
1342 case MethodRecognizer::kTypedList_SetFloat32:
1343 body += BuildTypedListSet(function, kTypedDataFloat32ArrayCid);
1344 break;
1345 case MethodRecognizer::kTypedList_GetFloat64:
1346 body += BuildTypedListGet(function, kTypedDataFloat64ArrayCid);
1347 break;
1348 case MethodRecognizer::kTypedList_SetFloat64:
1349 body += BuildTypedListSet(function, kTypedDataFloat64ArrayCid);
1350 break;
1351 case MethodRecognizer::kTypedList_GetInt32x4:
1352 body += BuildTypedListGet(function, kTypedDataInt32x4ArrayCid);
1353 break;
1354 case MethodRecognizer::kTypedList_SetInt32x4:
1355 body += BuildTypedListSet(function, kTypedDataInt32x4ArrayCid);
1356 break;
1357 case MethodRecognizer::kTypedList_GetFloat32x4:
1358 body += BuildTypedListGet(function, kTypedDataFloat32x4ArrayCid);
1359 break;
1360 case MethodRecognizer::kTypedList_SetFloat32x4:
1361 body += BuildTypedListSet(function, kTypedDataFloat32x4ArrayCid);
1362 break;
1363 case MethodRecognizer::kTypedList_GetFloat64x2:
1364 body += BuildTypedListGet(function, kTypedDataFloat64x2ArrayCid);
1365 break;
1366 case MethodRecognizer::kTypedList_SetFloat64x2:
1367 body += BuildTypedListSet(function, kTypedDataFloat64x2ArrayCid);
1368 break;
1369 case MethodRecognizer::kTypedData_memMove1:
1370 body += BuildTypedDataMemMove(function, kTypedDataInt8ArrayCid);
1371 break;
1372 case MethodRecognizer::kTypedData_memMove2:
1373 body += BuildTypedDataMemMove(function, kTypedDataInt16ArrayCid);
1374 break;
1375 case MethodRecognizer::kTypedData_memMove4:
1376 body += BuildTypedDataMemMove(function, kTypedDataInt32ArrayCid);
1377 break;
1378 case MethodRecognizer::kTypedData_memMove8:
1379 body += BuildTypedDataMemMove(function, kTypedDataInt64ArrayCid);
1380 break;
1381 case MethodRecognizer::kTypedData_memMove16:
1382 body += BuildTypedDataMemMove(function, kTypedDataInt32x4ArrayCid);
1383 break;
1384#define CASE(name) \
1385 case MethodRecognizer::kTypedData_##name##_factory: \
1386 body += BuildTypedDataFactoryConstructor(function, kTypedData##name##Cid); \
1387 break; \
1388 case MethodRecognizer::kTypedData_##name##View_factory: \
1389 body += BuildTypedDataViewFactoryConstructor(function, \
1390 kTypedData##name##ViewCid); \
1391 break; \
1392 case MethodRecognizer::kTypedData_Unmodifiable##name##View_factory: \
1393 body += BuildTypedDataViewFactoryConstructor( \
1394 function, kUnmodifiableTypedData##name##ViewCid); \
1395 break;
1397#undef CASE
1398 case MethodRecognizer::kTypedData_ByteDataView_factory:
1399 body += BuildTypedDataViewFactoryConstructor(function, kByteDataViewCid);
1400 break;
1401 case MethodRecognizer::kTypedData_UnmodifiableByteDataView_factory:
1402 body += BuildTypedDataViewFactoryConstructor(
1404 break;
1405 case MethodRecognizer::kObjectEquals:
1406 ASSERT_EQUAL(function.NumParameters(), 2);
1407 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1408 body += LoadLocal(parsed_function_->RawParameterVariable(1));
1409 body += StrictCompare(Token::kEQ_STRICT);
1410 break;
1411 case MethodRecognizer::kStringBaseCodeUnitAt: {
1412 ASSERT_EQUAL(function.NumParameters(), 2);
1413 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1414 body += LoadNativeField(Slot::String_length());
1415 body += LoadLocal(parsed_function_->RawParameterVariable(1));
1416 body += GenericCheckBound();
1417 LocalVariable* safe_index = MakeTemporary();
1418
1419 JoinEntryInstr* done = BuildJoinEntry();
1420 LocalVariable* result = parsed_function_->expression_temp_var();
1421 TargetEntryInstr* one_byte_string;
1422 TargetEntryInstr* two_byte_string;
1423 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1424 body += LoadClassId();
1425 body += IntConstant(kOneByteStringCid);
1426 body += BranchIfEqual(&one_byte_string, &two_byte_string);
1427
1428 body.current = one_byte_string;
1429 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1430 body += LoadLocal(safe_index);
1431 body += LoadIndexed(
1432 kOneByteStringCid,
1433 /*index_scale=*/
1434 compiler::target::Instance::ElementSizeFor(kOneByteStringCid),
1436 body += StoreLocal(TokenPosition::kNoSource, result);
1437 body += Drop();
1438 body += Goto(done);
1439
1440 body.current = two_byte_string;
1441 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1442 body += LoadLocal(safe_index);
1443 body += LoadIndexed(
1444 kTwoByteStringCid,
1445 /*index_scale=*/
1446 compiler::target::Instance::ElementSizeFor(kTwoByteStringCid),
1448 body += StoreLocal(TokenPosition::kNoSource, result);
1449 body += Drop();
1450 body += Goto(done);
1451
1452 body.current = done;
1453 body += DropTemporary(&safe_index);
1454 body += LoadLocal(result);
1455 } break;
1456 case MethodRecognizer::kStringBaseLength:
1457 case MethodRecognizer::kStringBaseIsEmpty:
1458 ASSERT_EQUAL(function.NumParameters(), 1);
1459 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1460 body += LoadNativeField(Slot::String_length());
1461 if (kind == MethodRecognizer::kStringBaseIsEmpty) {
1462 body += IntConstant(0);
1463 body += StrictCompare(Token::kEQ_STRICT);
1464 }
1465 break;
1466 case MethodRecognizer::kClassIDgetID:
1467 ASSERT_EQUAL(function.NumParameters(), 1);
1468 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1469 body += LoadClassId();
1470 break;
1471 case MethodRecognizer::kGrowableArrayAllocateWithData: {
1472 ASSERT(function.IsFactory());
1473 ASSERT_EQUAL(function.NumParameters(), 2);
1474 const Class& cls =
1476 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1477 body += AllocateObject(TokenPosition::kNoSource, cls, 1);
1478 LocalVariable* object = MakeTemporary();
1479 body += LoadLocal(object);
1480 body += LoadLocal(parsed_function_->RawParameterVariable(1));
1481 body += StoreNativeField(Slot::GrowableObjectArray_data(),
1484 body += LoadLocal(object);
1485 body += IntConstant(0);
1486 body += StoreNativeField(Slot::GrowableObjectArray_length(),
1489 break;
1490 }
1491 case MethodRecognizer::kGrowableArrayCapacity:
1492 ASSERT_EQUAL(function.NumParameters(), 1);
1493 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1494 body += LoadNativeField(Slot::GrowableObjectArray_data());
1495 body += LoadNativeField(Slot::Array_length());
1496 break;
1497 case MethodRecognizer::kObjectArrayAllocate:
1498 ASSERT(function.IsFactory() && (function.NumParameters() == 2));
1499 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1500 body += LoadLocal(parsed_function_->RawParameterVariable(1));
1501 body += CreateArray();
1502 break;
1503 case MethodRecognizer::kCopyRangeFromUint8ListToOneByteString:
1504 ASSERT_EQUAL(function.NumParameters(), 5);
1505 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1506 body += LoadLocal(parsed_function_->RawParameterVariable(1));
1507 body += LoadLocal(parsed_function_->RawParameterVariable(2));
1508 body += LoadLocal(parsed_function_->RawParameterVariable(3));
1509 body += LoadLocal(parsed_function_->RawParameterVariable(4));
1510 body += MemoryCopy(kTypedDataUint8ArrayCid, kOneByteStringCid,
1511 /*unboxed_inputs=*/false,
1512 /*can_overlap=*/false);
1513 body += NullConstant();
1514 break;
1515 case MethodRecognizer::kImmutableLinkedHashBase_setIndexStoreRelease:
1516 ASSERT_EQUAL(function.NumParameters(), 2);
1517 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1518 body += LoadLocal(parsed_function_->RawParameterVariable(1));
1519 // Uses a store-release barrier so that other isolates will see the
1520 // contents of the index after seeing the index itself.
1521 body += StoreNativeField(Slot::ImmutableLinkedHashBase_index(),
1524 body += NullConstant();
1525 break;
1526 case MethodRecognizer::kUtf8DecoderScan:
1527 ASSERT_EQUAL(function.NumParameters(), 5);
1528 body += LoadLocal(parsed_function_->RawParameterVariable(0)); // decoder
1529 body += LoadLocal(parsed_function_->RawParameterVariable(1)); // bytes
1530 body += LoadLocal(parsed_function_->RawParameterVariable(2)); // start
1532 body += UnboxTruncate(kUnboxedIntPtr);
1533 body += LoadLocal(parsed_function_->RawParameterVariable(3)); // end
1535 body += UnboxTruncate(kUnboxedIntPtr);
1536 body += LoadLocal(parsed_function_->RawParameterVariable(4)); // table
1537 body += Utf8Scan();
1538 body += Box(kUnboxedIntPtr);
1539 break;
1540 case MethodRecognizer::kMemCopy: {
1541 ASSERT_EQUAL(function.NumParameters(), 5);
1542 LocalVariable* arg_target = parsed_function_->RawParameterVariable(0);
1543 LocalVariable* arg_target_offset_in_bytes =
1544 parsed_function_->RawParameterVariable(1);
1545 LocalVariable* arg_source = parsed_function_->RawParameterVariable(2);
1546 LocalVariable* arg_source_offset_in_bytes =
1547 parsed_function_->RawParameterVariable(3);
1548 LocalVariable* arg_length_in_bytes =
1549 parsed_function_->RawParameterVariable(4);
1550 body += LoadLocal(arg_source);
1551 body += LoadLocal(arg_target);
1552 body += LoadLocal(arg_source_offset_in_bytes);
1553 body += UnboxTruncate(kUnboxedIntPtr);
1554 body += LoadLocal(arg_target_offset_in_bytes);
1555 body += UnboxTruncate(kUnboxedIntPtr);
1556 body += LoadLocal(arg_length_in_bytes);
1557 body += UnboxTruncate(kUnboxedIntPtr);
1558 body += MemoryCopy(kTypedDataUint8ArrayCid, kTypedDataUint8ArrayCid,
1559 /*unboxed_inputs=*/true,
1560 /*can_overlap=*/true);
1561 body += NullConstant();
1562 } break;
1563 case MethodRecognizer::kFfiAbi:
1564 ASSERT_EQUAL(function.NumParameters(), 0);
1565 body += IntConstant(static_cast<int64_t>(compiler::ffi::TargetAbi()));
1566 break;
1567 case MethodRecognizer::kFfiNativeCallbackFunction:
1568 case MethodRecognizer::kFfiNativeAsyncCallbackFunction:
1569 case MethodRecognizer::kFfiNativeIsolateLocalCallbackFunction: {
1570 const auto& error = String::ZoneHandle(
1571 Z, Symbols::New(thread_,
1572 "This function should be handled on call site."));
1573 body += Constant(error);
1574 body += ThrowException(TokenPosition::kNoSource);
1575 break;
1576 }
1577 case MethodRecognizer::kFfiLoadInt8:
1578 case MethodRecognizer::kFfiLoadInt16:
1579 case MethodRecognizer::kFfiLoadInt32:
1580 case MethodRecognizer::kFfiLoadInt64:
1581 case MethodRecognizer::kFfiLoadUint8:
1582 case MethodRecognizer::kFfiLoadUint16:
1583 case MethodRecognizer::kFfiLoadUint32:
1584 case MethodRecognizer::kFfiLoadUint64:
1585 case MethodRecognizer::kFfiLoadFloat:
1586 case MethodRecognizer::kFfiLoadFloatUnaligned:
1587 case MethodRecognizer::kFfiLoadDouble:
1588 case MethodRecognizer::kFfiLoadDoubleUnaligned:
1589 case MethodRecognizer::kFfiLoadPointer: {
1590 const classid_t ffi_type_arg_cid =
1592 const AlignmentType alignment =
1595 compiler::ffi::ElementTypedDataCid(ffi_type_arg_cid);
1596
1597 ASSERT_EQUAL(function.NumParameters(), 2);
1598 // Argument can be a TypedData for loads on struct fields.
1599 LocalVariable* arg_typed_data_base =
1600 parsed_function_->RawParameterVariable(0);
1601 LocalVariable* arg_offset = parsed_function_->RawParameterVariable(1);
1602
1603 body += LoadLocal(arg_typed_data_base);
1605 body += LoadLocal(arg_offset);
1607 body += UnboxTruncate(kUnboxedIntPtr);
1608 body += LoadIndexed(typed_data_cid, /*index_scale=*/1,
1609 /*index_unboxed=*/true, alignment);
1610 if (kind == MethodRecognizer::kFfiLoadPointer) {
1611 const auto& pointer_class =
1612 Class::ZoneHandle(Z, IG->object_store()->ffi_pointer_class());
1613 const auto& type_arguments = TypeArguments::ZoneHandle(
1614 Z, IG->object_store()->type_argument_never());
1615
1616 // We do not reify Pointer type arguments
1617 ASSERT(function.NumTypeParameters() == 1);
1618 LocalVariable* address = MakeTemporary();
1619 body += Constant(type_arguments);
1620 body += AllocateObject(TokenPosition::kNoSource, pointer_class, 1);
1621 LocalVariable* pointer = MakeTemporary();
1622 body += LoadLocal(pointer);
1623 body += LoadLocal(address);
1626 body += ConvertUnboxedToUntagged();
1627 body += StoreNativeField(Slot::PointerBase_data(),
1630 body += DropTempsPreserveTop(1); // Drop [address] keep [pointer].
1631 } else {
1632 // Avoid any unnecessary (and potentially deoptimizing) int
1633 // conversions by using the representation returned from LoadIndexed.
1635 }
1636 } break;
1637 case MethodRecognizer::kFfiStoreInt8:
1638 case MethodRecognizer::kFfiStoreInt16:
1639 case MethodRecognizer::kFfiStoreInt32:
1640 case MethodRecognizer::kFfiStoreInt64:
1641 case MethodRecognizer::kFfiStoreUint8:
1642 case MethodRecognizer::kFfiStoreUint16:
1643 case MethodRecognizer::kFfiStoreUint32:
1644 case MethodRecognizer::kFfiStoreUint64:
1645 case MethodRecognizer::kFfiStoreFloat:
1646 case MethodRecognizer::kFfiStoreFloatUnaligned:
1647 case MethodRecognizer::kFfiStoreDouble:
1648 case MethodRecognizer::kFfiStoreDoubleUnaligned:
1649 case MethodRecognizer::kFfiStorePointer: {
1650 const classid_t ffi_type_arg_cid =
1652 const AlignmentType alignment =
1655 compiler::ffi::ElementTypedDataCid(ffi_type_arg_cid);
1656
1657 // Argument can be a TypedData for stores on struct fields.
1658 LocalVariable* arg_typed_data_base =
1659 parsed_function_->RawParameterVariable(0);
1660 LocalVariable* arg_offset = parsed_function_->RawParameterVariable(1);
1661 LocalVariable* arg_value = parsed_function_->RawParameterVariable(2);
1662
1663 ASSERT_EQUAL(function.NumParameters(), 3);
1664
1665 body += LoadLocal(arg_typed_data_base); // Pointer.
1667 body += LoadLocal(arg_offset);
1669 body += UnboxTruncate(kUnboxedIntPtr);
1670 body += LoadLocal(arg_value);
1672 if (kind == MethodRecognizer::kFfiStorePointer) {
1673 // This can only be Pointer, so it is safe to load the data field.
1674 body += LoadNativeField(Slot::PointerBase_data(),
1676 body += ConvertUntaggedToUnboxed();
1679 } else {
1680 // Avoid any unnecessary (and potentially deoptimizing) int
1681 // conversions by using the representation consumed by StoreIndexed.
1682 body += UnboxTruncate(
1684 }
1685 body += StoreIndexedTypedData(typed_data_cid, /*index_scale=*/1,
1686 /*index_unboxed=*/true, alignment);
1687 body += NullConstant();
1688 } break;
1689 case MethodRecognizer::kFfiFromAddress: {
1690 const auto& pointer_class =
1691 Class::ZoneHandle(Z, IG->object_store()->ffi_pointer_class());
1692 const auto& type_arguments = TypeArguments::ZoneHandle(
1693 Z, IG->object_store()->type_argument_never());
1694
1695 ASSERT(function.NumTypeParameters() == 1);
1696 ASSERT_EQUAL(function.NumParameters(), 1);
1697 body += Constant(type_arguments);
1698 body += AllocateObject(TokenPosition::kNoSource, pointer_class, 1);
1699 body += LoadLocal(MakeTemporary()); // Duplicate Pointer.
1700 body += LoadLocal(parsed_function_->RawParameterVariable(0)); // Address.
1702 // Use the same representation as FfiGetAddress so that the conversions
1703 // in Pointer.fromAddress(address).address cancel out if the temporary
1704 // Pointer allocation is removed.
1705 body += UnboxTruncate(kUnboxedAddress);
1706 body += ConvertUnboxedToUntagged();
1707 body += StoreNativeField(Slot::PointerBase_data(),
1710 } break;
1711 case MethodRecognizer::kFfiGetAddress: {
1712 ASSERT_EQUAL(function.NumParameters(), 1);
1713 body += LoadLocal(parsed_function_->RawParameterVariable(0)); // Pointer.
1715 // This can only be Pointer, so it is safe to load the data field.
1716 body += LoadNativeField(Slot::PointerBase_data(),
1718 body += ConvertUntaggedToUnboxed();
1719 body += Box(kUnboxedAddress);
1720 } break;
1721 case MethodRecognizer::kHas63BitSmis: {
1722#if defined(HAS_SMI_63_BITS)
1723 body += Constant(Bool::True());
1724#else
1725 body += Constant(Bool::False());
1726#endif // defined(ARCH_IS_64_BIT)
1727 } break;
1728 case MethodRecognizer::kExtensionStreamHasListener: {
1729#ifdef PRODUCT
1730 body += Constant(Bool::False());
1731#else
1732 body += LoadServiceExtensionStream();
1733 body += LoadNativeField(Slot::StreamInfo_enabled());
1734 // StreamInfo::enabled_ is a std::atomic<intptr_t>. This is effectively
1735 // relaxed order access, which is acceptable for this use case.
1736 body += IntToBool();
1737#endif // PRODUCT
1738 } break;
1739 case MethodRecognizer::kSmi_hashCode: {
1740 // TODO(dartbug.com/38985): We should make this LoadLocal+Unbox+
1741 // IntegerHash+Box. Though this would make use of unboxed values on stack
1742 // which isn't allowed in unoptimized mode.
1743 // Once force-optimized functions can be inlined, we should change this
1744 // code to the above.
1745 ASSERT_EQUAL(function.NumParameters(), 1);
1746 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1747 body += BuildIntegerHashCode(/*smi=*/true);
1748 } break;
1749 case MethodRecognizer::kMint_hashCode: {
1750 ASSERT_EQUAL(function.NumParameters(), 1);
1751 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1752 body += BuildIntegerHashCode(/*smi=*/false);
1753 } break;
1754 case MethodRecognizer::kDouble_hashCode: {
1755 ASSERT_EQUAL(function.NumParameters(), 1);
1756 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1757 body += UnboxTruncate(kUnboxedDouble);
1758 body += BuildDoubleHashCode();
1759 body += Box(kUnboxedInt64);
1760 } break;
1761 case MethodRecognizer::kFfiAsExternalTypedDataInt8:
1762 case MethodRecognizer::kFfiAsExternalTypedDataInt16:
1763 case MethodRecognizer::kFfiAsExternalTypedDataInt32:
1764 case MethodRecognizer::kFfiAsExternalTypedDataInt64:
1765 case MethodRecognizer::kFfiAsExternalTypedDataUint8:
1766 case MethodRecognizer::kFfiAsExternalTypedDataUint16:
1767 case MethodRecognizer::kFfiAsExternalTypedDataUint32:
1768 case MethodRecognizer::kFfiAsExternalTypedDataUint64:
1769 case MethodRecognizer::kFfiAsExternalTypedDataFloat:
1770 case MethodRecognizer::kFfiAsExternalTypedDataDouble: {
1771 const classid_t ffi_type_arg_cid =
1775
1776 auto class_table = thread_->isolate_group()->class_table();
1777 ASSERT(class_table->HasValidClassAt(external_typed_data_cid));
1778 const auto& typed_data_class =
1779 Class::ZoneHandle(H.zone(), class_table->At(external_typed_data_cid));
1780
1781 // We assume that the caller has checked that the arguments are non-null
1782 // and length is in the range [0, kSmiMax/elementSize].
1783 ASSERT_EQUAL(function.NumParameters(), 2);
1784 LocalVariable* arg_pointer = parsed_function_->RawParameterVariable(0);
1785 LocalVariable* arg_length = parsed_function_->RawParameterVariable(1);
1786
1787 body += AllocateObject(TokenPosition::kNoSource, typed_data_class, 0);
1788 LocalVariable* typed_data_object = MakeTemporary();
1789
1790 // Initialize the result's length field.
1791 body += LoadLocal(typed_data_object);
1792 body += LoadLocal(arg_length);
1793 body += StoreNativeField(Slot::TypedDataBase_length(),
1796
1797 // Initialize the result's data pointer field.
1798 body += LoadLocal(typed_data_object);
1799 body += LoadLocal(arg_pointer);
1800 body += LoadNativeField(Slot::PointerBase_data(),
1802 body += StoreNativeField(Slot::PointerBase_data(),
1805 } break;
1806 case MethodRecognizer::kGetNativeField: {
1807 auto& name = String::ZoneHandle(Z, function.name());
1808 // Note: This method is force optimized so we can push untagged, etc.
1809 // Load TypedDataArray from Instance Handle implementing
1810 // NativeFieldWrapper.
1811 body += LoadLocal(parsed_function_->RawParameterVariable(0)); // Object.
1812 body += CheckNullOptimized(name);
1813 body += LoadNativeField(Slot::Instance_native_fields_array()); // Fields.
1814 body += CheckNullOptimized(name);
1815 // Load the native field at index.
1816 body += IntConstant(0); // Index.
1817 body += LoadIndexed(kIntPtrCid);
1818 body += Box(kUnboxedIntPtr);
1819 } break;
1820 case MethodRecognizer::kDoubleToInteger:
1821 case MethodRecognizer::kDoubleCeilToInt:
1822 case MethodRecognizer::kDoubleFloorToInt: {
1823 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1824 body += DoubleToInteger(kind);
1825 } break;
1826 case MethodRecognizer::kDoubleMod:
1827 case MethodRecognizer::kDoubleRoundToDouble:
1828 case MethodRecognizer::kDoubleTruncateToDouble:
1829 case MethodRecognizer::kDoubleFloorToDouble:
1830 case MethodRecognizer::kDoubleCeilToDouble:
1831 case MethodRecognizer::kMathDoublePow:
1832 case MethodRecognizer::kMathSin:
1833 case MethodRecognizer::kMathCos:
1834 case MethodRecognizer::kMathTan:
1835 case MethodRecognizer::kMathAsin:
1836 case MethodRecognizer::kMathAcos:
1837 case MethodRecognizer::kMathAtan:
1838 case MethodRecognizer::kMathAtan2:
1839 case MethodRecognizer::kMathExp:
1840 case MethodRecognizer::kMathLog: {
1841 for (intptr_t i = 0, n = function.NumParameters(); i < n; ++i) {
1842 body += LoadLocal(parsed_function_->RawParameterVariable(i));
1843 }
1844 if (!CompilerState::Current().is_aot() &&
1846 ((kind == MethodRecognizer::kDoubleTruncateToDouble) ||
1847 (kind == MethodRecognizer::kDoubleFloorToDouble) ||
1848 (kind == MethodRecognizer::kDoubleCeilToDouble))) {
1849 switch (kind) {
1850 case MethodRecognizer::kDoubleTruncateToDouble:
1851 body += UnaryDoubleOp(Token::kTRUNCATE);
1852 break;
1853 case MethodRecognizer::kDoubleFloorToDouble:
1854 body += UnaryDoubleOp(Token::kFLOOR);
1855 break;
1856 case MethodRecognizer::kDoubleCeilToDouble:
1857 body += UnaryDoubleOp(Token::kCEILING);
1858 break;
1859 default:
1860 UNREACHABLE();
1861 }
1862 } else {
1863 body += InvokeMathCFunction(kind, function.NumParameters());
1864 }
1865 } break;
1866 case MethodRecognizer::kMathSqrt: {
1867 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1868 body += UnaryDoubleOp(Token::kSQRT);
1869 } break;
1870 case MethodRecognizer::kFinalizerBase_setIsolate:
1871 ASSERT_EQUAL(function.NumParameters(), 1);
1872 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1873 body += LoadIsolate();
1874 body += StoreNativeField(Slot::FinalizerBase_isolate(),
1876 body += NullConstant();
1877 break;
1878 case MethodRecognizer::kFinalizerBase_getIsolateFinalizers:
1879 ASSERT_EQUAL(function.NumParameters(), 0);
1880 body += LoadIsolate();
1881 body += LoadNativeField(Slot::Isolate_finalizers());
1882 break;
1883 case MethodRecognizer::kFinalizerBase_setIsolateFinalizers:
1884 ASSERT_EQUAL(function.NumParameters(), 1);
1885 body += LoadIsolate();
1886 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1887 body += StoreNativeField(Slot::Isolate_finalizers());
1888 body += NullConstant();
1889 break;
1890 case MethodRecognizer::kFinalizerBase_exchangeEntriesCollectedWithNull:
1891 ASSERT_EQUAL(function.NumParameters(), 1);
1892 ASSERT(this->optimizing_);
1893 // This relies on being force-optimized to do an 'atomic' exchange w.r.t.
1894 // the GC.
1895 // As an alternative design we could introduce an ExchangeNativeFieldInstr
1896 // that uses the same machine code as std::atomic::exchange. Or we could
1897 // use an Native to do that in C.
1898 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1899 // No GC from here til StoreNativeField.
1900 body += LoadNativeField(Slot::FinalizerBase_entries_collected());
1901 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1902 body += NullConstant();
1903 body += StoreNativeField(Slot::FinalizerBase_entries_collected());
1904 break;
1905 case MethodRecognizer::kFinalizerEntry_allocate: {
1906 // Object value, Object token, Object detach, FinalizerBase finalizer
1907 ASSERT_EQUAL(function.NumParameters(), 4);
1908
1909 const auto class_table = thread_->isolate_group()->class_table();
1910 ASSERT(class_table->HasValidClassAt(kFinalizerEntryCid));
1911 const auto& finalizer_entry_class =
1912 Class::ZoneHandle(H.zone(), class_table->At(kFinalizerEntryCid));
1913
1914 body +=
1915 AllocateObject(TokenPosition::kNoSource, finalizer_entry_class, 0);
1916 LocalVariable* const entry = MakeTemporary("entry");
1917 // No GC from here to the end.
1918 body += LoadLocal(entry);
1919 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1920 body += StoreNativeField(Slot::FinalizerEntry_value());
1921 body += LoadLocal(entry);
1922 body += LoadLocal(parsed_function_->RawParameterVariable(1));
1923 body += StoreNativeField(Slot::FinalizerEntry_token());
1924 body += LoadLocal(entry);
1925 body += LoadLocal(parsed_function_->RawParameterVariable(2));
1926 body += StoreNativeField(Slot::FinalizerEntry_detach());
1927 body += LoadLocal(entry);
1928 body += LoadLocal(parsed_function_->RawParameterVariable(3));
1929 body += StoreNativeField(Slot::FinalizerEntry_finalizer());
1930 body += LoadLocal(entry);
1932 body += StoreNativeField(Slot::FinalizerEntry_external_size());
1933 break;
1934 }
1935 case MethodRecognizer::kFinalizerEntry_getExternalSize:
1936 ASSERT_EQUAL(function.NumParameters(), 1);
1937 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1938 body += LoadNativeField(Slot::FinalizerEntry_external_size());
1939 body += Box(kUnboxedInt64);
1940 break;
1941 case MethodRecognizer::kCheckNotDeeplyImmutable:
1942 ASSERT_EQUAL(function.NumParameters(), 1);
1943 body += LoadLocal(parsed_function_->RawParameterVariable(0));
1946 body += NullConstant();
1947 break;
1948#define IL_BODY(method, slot) \
1949 case MethodRecognizer::k##method: \
1950 ASSERT_EQUAL(function.NumParameters(), 1); \
1951 body += LoadLocal(parsed_function_->RawParameterVariable(0)); \
1952 body += LoadNativeField(Slot::slot()); \
1953 break;
1955#undef IL_BODY
1956#define IL_BODY(method, slot) \
1957 case MethodRecognizer::k##method: \
1958 ASSERT_EQUAL(function.NumParameters(), 2); \
1959 body += LoadLocal(parsed_function_->RawParameterVariable(0)); \
1960 body += LoadLocal(parsed_function_->RawParameterVariable(1)); \
1961 body += StoreNativeField(Slot::slot()); \
1962 body += NullConstant(); \
1963 break;
1965#undef IL_BODY
1966#define IL_BODY(method, slot) \
1967 case MethodRecognizer::k##method: \
1968 ASSERT_EQUAL(function.NumParameters(), 2); \
1969 body += LoadLocal(parsed_function_->RawParameterVariable(0)); \
1970 body += LoadLocal(parsed_function_->RawParameterVariable(1)); \
1971 body += StoreNativeField(Slot::slot(), StoreFieldInstr::Kind::kOther, \
1972 kNoStoreBarrier); \
1973 body += NullConstant(); \
1974 break;
1976#undef IL_BODY
1977 default: {
1978 UNREACHABLE();
1979 break;
1980 }
1981 }
1982
1983 if (body.is_open()) {
1984 body +=
1985 Return(TokenPosition::kNoSource, /* omit_result_type_check = */ true);
1986 }
1987
1988 return new (Z)
1989 FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
1990 prologue_info, FlowGraph::CompilationModeFrom(optimizing_));
1991}
1992
1993Fragment FlowGraphBuilder::BuildTypedDataViewFactoryConstructor(
1994 const Function& function,
1995 classid_t cid) {
1996 auto token_pos = function.token_pos();
1997 auto class_table = Thread::Current()->isolate_group()->class_table();
1998
1999 ASSERT(class_table->HasValidClassAt(cid));
2000 const auto& view_class = Class::ZoneHandle(H.zone(), class_table->At(cid));
2001
2002 ASSERT(function.IsFactory() && (function.NumParameters() == 4));
2003 LocalVariable* typed_data = parsed_function_->RawParameterVariable(1);
2004 LocalVariable* offset_in_bytes = parsed_function_->RawParameterVariable(2);
2005 LocalVariable* length = parsed_function_->RawParameterVariable(3);
2006
2007 Fragment body;
2008
2009 // Note that we do no input checking here before allocation. The factory is
2010 // private, and only called by other code in the library implementation.
2011 // Thus, either the inputs are checked within Dart code before the factory is
2012 // called (e.g., the implementation of XList.sublistView), or the inputs to
2013 // the factory are retrieved from previously constructed TypedData objects
2014 // and thus already checked (e.g., the implementation of the
2015 // UnmodifiableXListView constructors).
2016
2017 body += AllocateObject(token_pos, view_class, /*arg_count=*/0);
2018 LocalVariable* view_object = MakeTemporary();
2019
2020 body += LoadLocal(view_object);
2021 body += LoadLocal(typed_data);
2022 body += StoreNativeField(token_pos, Slot::TypedDataView_typed_data(),
2024
2025 body += LoadLocal(view_object);
2026 body += LoadLocal(offset_in_bytes);
2027 body +=
2028 StoreNativeField(token_pos, Slot::TypedDataView_offset_in_bytes(),
2030
2031 body += LoadLocal(view_object);
2032 body += LoadLocal(length);
2033 body +=
2034 StoreNativeField(token_pos, Slot::TypedDataBase_length(),
2036
2037 // First unbox the offset in bytes prior to the unsafe untagged load to avoid
2038 // any boxes being inserted between the load and its use. While any such box
2039 // is eventually canonicalized away, the FlowGraphChecker runs after every
2040 // pass in DEBUG mode and may see the box before canonicalization happens.
2041 body += LoadLocal(offset_in_bytes);
2042 body += UnboxTruncate(kUnboxedIntPtr);
2043 LocalVariable* unboxed_offset_in_bytes =
2044 MakeTemporary("unboxed_offset_in_bytes");
2045 // Now update the inner pointer.
2046 //
2047 // WARNING: Notice that we assume here no GC happens between the
2048 // LoadNativeField and the StoreNativeField, as the GC expects a properly
2049 // updated data field (see ScavengerVisitorBase::VisitTypedDataViewPointers).
2050 body += LoadLocal(view_object);
2051 body += LoadLocal(typed_data);
2052 body += LoadNativeField(Slot::PointerBase_data(),
2055 body += LoadLocal(unboxed_offset_in_bytes);
2056 body += CalculateElementAddress(/*index_scale=*/1);
2057 body += StoreNativeField(Slot::PointerBase_data(),
2060 body += DropTemporary(&unboxed_offset_in_bytes);
2061
2062 return body;
2063}
2064
2065Fragment FlowGraphBuilder::BuildTypedListGet(const Function& function,
2066 classid_t cid) {
2067 const intptr_t kNumParameters = 2;
2068 ASSERT_EQUAL(parsed_function_->function().NumParameters(), kNumParameters);
2069 // Guaranteed to be non-null since it's only called internally from other
2070 // instance methods.
2071 LocalVariable* arg_receiver = parsed_function_->RawParameterVariable(0);
2072 // Guaranteed to be a non-null Smi due to bounds checks prior to call.
2073 LocalVariable* arg_offset_in_bytes =
2074 parsed_function_->RawParameterVariable(1);
2075
2076 Fragment body;
2077 if (CanUnboxElements(cid)) {
2078 body += LoadLocal(arg_receiver);
2079 body += LoadLocal(arg_offset_in_bytes);
2080 body += LoadIndexed(cid, /*index_scale=*/1,
2081 /*index_unboxed=*/false, kUnalignedAccess);
2083 } else {
2084 const auto& native_function = TypedListGetNativeFunction(thread_, cid);
2085 body += LoadLocal(arg_receiver);
2086 body += LoadLocal(arg_offset_in_bytes);
2087 body += StaticCall(TokenPosition::kNoSource, native_function,
2088 kNumParameters, ICData::kNoRebind);
2089 }
2090 return body;
2091}
2092
2094 classid_t cid) {
2095 auto& state = thread->compiler_state();
2097 case kUnboxedFloat:
2098 return state.TypedListSetFloat32();
2099 case kUnboxedDouble:
2100 return state.TypedListSetFloat64();
2101 case kUnboxedInt32x4:
2102 return state.TypedListSetInt32x4();
2103 case kUnboxedFloat32x4:
2104 return state.TypedListSetFloat32x4();
2105 case kUnboxedFloat64x2:
2106 return state.TypedListSetFloat64x2();
2107 default:
2108 UNREACHABLE();
2109 return Object::null_function();
2110 }
2111}
2112
2113Fragment FlowGraphBuilder::BuildTypedListSet(const Function& function,
2114 classid_t cid) {
2115 const intptr_t kNumParameters = 3;
2116 ASSERT_EQUAL(parsed_function_->function().NumParameters(), kNumParameters);
2117 // Guaranteed to be non-null since it's only called internally from other
2118 // instance methods.
2119 LocalVariable* arg_receiver = parsed_function_->RawParameterVariable(0);
2120 // Guaranteed to be a non-null Smi due to bounds checks prior to call.
2121 LocalVariable* arg_offset_in_bytes =
2122 parsed_function_->RawParameterVariable(1);
2123 LocalVariable* arg_value = parsed_function_->RawParameterVariable(2);
2124
2125 Fragment body;
2126 if (CanUnboxElements(cid)) {
2127 body += LoadLocal(arg_receiver);
2128 body += LoadLocal(arg_offset_in_bytes);
2129 body += LoadLocal(arg_value);
2130 body +=
2132 body += UnboxTruncate(StoreIndexedInstr::ValueRepresentation(cid));
2133 body += StoreIndexedTypedData(cid, /*index_scale=*/1,
2134 /*index_unboxed=*/false, kUnalignedAccess);
2135 body += NullConstant();
2136 } else {
2137 const auto& native_function = TypedListSetNativeFunction(thread_, cid);
2138 body += LoadLocal(arg_receiver);
2139 body += LoadLocal(arg_offset_in_bytes);
2140 body += LoadLocal(arg_value);
2141 body += StaticCall(TokenPosition::kNoSource, native_function,
2142 kNumParameters, ICData::kNoRebind);
2143 }
2144 return body;
2145}
2146
2147Fragment FlowGraphBuilder::BuildTypedDataMemMove(const Function& function,
2148 classid_t cid) {
2149 ASSERT_EQUAL(parsed_function_->function().NumParameters(), 5);
2150 LocalVariable* arg_to = parsed_function_->RawParameterVariable(0);
2151 LocalVariable* arg_to_start = parsed_function_->RawParameterVariable(1);
2152 LocalVariable* arg_count = parsed_function_->RawParameterVariable(2);
2153 LocalVariable* arg_from = parsed_function_->RawParameterVariable(3);
2154 LocalVariable* arg_from_start = parsed_function_->RawParameterVariable(4);
2155
2156 Fragment body;
2157 // If we're copying at least this many elements, calling memmove via CCall
2158 // is faster than using the code currently emitted by MemoryCopy.
2159#if defined(TARGET_ARCH_X64) || defined(TARGET_ARCH_IA32)
2160 // On X86, the breakpoint for using CCall instead of generating a loop via
2161 // MemoryCopy() is around the same as the largest benchmark (1048576 elements)
2162 // on the machines we use.
2163 const intptr_t kCopyLengthForCCall = 1024 * 1024;
2164#else
2165 // On other architectures, when the element size is less than a word,
2166 // we copy in word-sized chunks when possible to get back some speed without
2167 // increasing the number of emitted instructions for MemoryCopy too much, but
2168 // memmove is even more aggressive, copying in 64-byte chunks when possible.
2169 // Thus, the breakpoint for a call to memmove being faster is much lower for
2170 // our benchmarks than for X86.
2171 const intptr_t kCopyLengthForCCall = 1024;
2172#endif
2173
2174 JoinEntryInstr* done = BuildJoinEntry();
2175 TargetEntryInstr *is_small_enough, *is_too_large;
2176 body += LoadLocal(arg_count);
2177 body += IntConstant(kCopyLengthForCCall);
2178 body += SmiRelationalOp(Token::kLT);
2179 body += BranchIfTrue(&is_small_enough, &is_too_large);
2180
2181 Fragment use_instruction(is_small_enough);
2182 use_instruction += LoadLocal(arg_from);
2183 use_instruction += LoadLocal(arg_to);
2184 use_instruction += LoadLocal(arg_from_start);
2185 use_instruction += LoadLocal(arg_to_start);
2186 use_instruction += LoadLocal(arg_count);
2187 use_instruction += MemoryCopy(cid, cid,
2188 /*unboxed_inputs=*/false, /*can_overlap=*/true);
2189 use_instruction += Goto(done);
2190
2191 Fragment call_memmove(is_too_large);
2192 const intptr_t element_size = Instance::ElementSizeFor(cid);
2193 auto* const arg_reps =
2194 new (zone_) ZoneGrowableArray<Representation>(zone_, 3);
2195 // First unbox the arguments to avoid any boxes being inserted between unsafe
2196 // untagged loads and their uses. Also adjust the length to be in bytes, since
2197 // that's what memmove expects.
2198 call_memmove += LoadLocal(arg_to_start);
2199 call_memmove += UnboxTruncate(kUnboxedIntPtr);
2200 LocalVariable* to_start_unboxed = MakeTemporary("to_start_unboxed");
2201 call_memmove += LoadLocal(arg_from_start);
2202 call_memmove += UnboxTruncate(kUnboxedIntPtr);
2203 LocalVariable* from_start_unboxed = MakeTemporary("from_start_unboxed");
2204 // Used for length in bytes calculations, since memmove expects a size_t.
2205 const Representation size_rep = kUnboxedUword;
2206 call_memmove += LoadLocal(arg_count);
2207 call_memmove += UnboxTruncate(size_rep);
2208 call_memmove += UnboxedIntConstant(element_size, size_rep);
2209 call_memmove +=
2210 BinaryIntegerOp(Token::kMUL, size_rep, /*is_truncating=*/true);
2211 LocalVariable* length_in_bytes = MakeTemporary("length_in_bytes");
2212 // dest: void*
2213 call_memmove += LoadLocal(arg_to);
2214 call_memmove += LoadNativeField(Slot::PointerBase_data(),
2216 call_memmove += LoadLocal(to_start_unboxed);
2217 call_memmove += UnboxedIntConstant(0, kUnboxedIntPtr);
2218 call_memmove += CalculateElementAddress(element_size);
2219 arg_reps->Add(kUntagged);
2220 // src: const void*
2221 call_memmove += LoadLocal(arg_from);
2222 call_memmove += LoadNativeField(Slot::PointerBase_data(),
2224 call_memmove += LoadLocal(from_start_unboxed);
2225 call_memmove += UnboxedIntConstant(0, kUnboxedIntPtr);
2226 call_memmove += CalculateElementAddress(element_size);
2227 arg_reps->Add(kUntagged);
2228 // n: size_t
2229 call_memmove += LoadLocal(length_in_bytes);
2230 arg_reps->Add(size_rep);
2231 // memmove(dest, src, n)
2232 call_memmove +=
2233 CallLeafRuntimeEntry(kMemoryMoveRuntimeEntry, kUntagged, *arg_reps);
2234 // The returned address is unused.
2235 call_memmove += Drop();
2236 call_memmove += DropTemporary(&length_in_bytes);
2237 call_memmove += DropTemporary(&from_start_unboxed);
2238 call_memmove += DropTemporary(&to_start_unboxed);
2239 call_memmove += Goto(done);
2240
2241 body.current = done;
2242 body += NullConstant();
2243
2244 return body;
2245}
2246
2247Fragment FlowGraphBuilder::BuildTypedDataFactoryConstructor(
2248 const Function& function,
2249 classid_t cid) {
2250 const auto token_pos = function.token_pos();
2251 ASSERT(
2252 Thread::Current()->isolate_group()->class_table()->HasValidClassAt(cid));
2253
2254 ASSERT(function.IsFactory() && (function.NumParameters() == 2));
2255 LocalVariable* length = parsed_function_->RawParameterVariable(1);
2256
2257 Fragment instructions;
2258 instructions += LoadLocal(length);
2259 // AllocateTypedData instruction checks that length is valid (a non-negative
2260 // Smi below maximum allowed length).
2261 instructions += AllocateTypedData(token_pos, cid);
2262 return instructions;
2263}
2264
2265Fragment FlowGraphBuilder::BuildImplicitClosureCreation(
2266 TokenPosition position,
2267 const Function& target) {
2268 // The function cannot be local and have parent generic functions.
2269 ASSERT(!target.HasGenericParent());
2270 ASSERT(target.IsImplicitInstanceClosureFunction());
2271
2272 Fragment fragment;
2273 fragment += Constant(target);
2274 fragment += LoadLocal(parsed_function_->receiver_var());
2275 // The function signature can have uninstantiated class type parameters.
2276 const bool has_instantiator_type_args =
2277 !target.HasInstantiatedSignature(kCurrentClass);
2278 if (has_instantiator_type_args) {
2279 fragment += LoadInstantiatorTypeArguments();
2280 }
2281 fragment += AllocateClosure(position, has_instantiator_type_args,
2282 target.IsGeneric(), /*is_tear_off=*/true);
2283
2284 return fragment;
2285}
2286
2287Fragment FlowGraphBuilder::CheckVariableTypeInCheckedMode(
2288 const AbstractType& dst_type,
2289 const String& name_symbol) {
2290 return Fragment();
2291}
2292
2293bool FlowGraphBuilder::NeedsDebugStepCheck(const Function& function,
2294 TokenPosition position) {
2295 return position.IsDebugPause() && !function.is_native() &&
2296 function.is_debuggable();
2297}
2298
2299bool FlowGraphBuilder::NeedsDebugStepCheck(Value* value,
2300 TokenPosition position) {
2301 if (!position.IsDebugPause()) {
2302 return false;
2303 }
2304 Definition* definition = value->definition();
2305 if (definition->IsConstant() || definition->IsLoadStaticField() ||
2306 definition->IsLoadLocal() || definition->IsAssertAssignable() ||
2307 definition->IsAllocateSmallRecord() || definition->IsAllocateRecord()) {
2308 return true;
2309 }
2310 if (auto const alloc = definition->AsAllocateClosure()) {
2311 return !alloc->known_function().IsNull();
2312 }
2313 return false;
2314}
2315
2316Fragment FlowGraphBuilder::EvaluateAssertion() {
2317 const Class& klass =
2318 Class::ZoneHandle(Z, Library::LookupCoreClass(Symbols::AssertionError()));
2319 ASSERT(!klass.IsNull());
2320 const auto& error = klass.EnsureIsFinalized(H.thread());
2321 ASSERT(error == Error::null());
2322 const Function& target = Function::ZoneHandle(
2323 Z, klass.LookupStaticFunctionAllowPrivate(Symbols::EvaluateAssertion()));
2324 ASSERT(!target.IsNull());
2325 return StaticCall(TokenPosition::kNoSource, target, /* argument_count = */ 1,
2326 ICData::kStatic);
2327}
2328
2329Fragment FlowGraphBuilder::CheckBoolean(TokenPosition position) {
2330 Fragment instructions;
2331 LocalVariable* top_of_stack = MakeTemporary();
2332 instructions += LoadLocal(top_of_stack);
2333 instructions += AssertBool(position);
2334 instructions += Drop();
2335 return instructions;
2336}
2337
2338Fragment FlowGraphBuilder::CheckAssignable(const AbstractType& dst_type,
2339 const String& dst_name,
2341 TokenPosition token_pos) {
2342 Fragment instructions;
2343 if (!dst_type.IsTopTypeForSubtyping()) {
2344 LocalVariable* top_of_stack = MakeTemporary();
2345 instructions += LoadLocal(top_of_stack);
2346 instructions +=
2347 AssertAssignableLoadTypeArguments(token_pos, dst_type, dst_name, kind);
2348 instructions += Drop();
2349 }
2350 return instructions;
2351}
2352
2353Fragment FlowGraphBuilder::AssertAssignableLoadTypeArguments(
2354 TokenPosition position,
2355 const AbstractType& dst_type,
2356 const String& dst_name,
2358 Fragment instructions;
2359
2360 instructions += Constant(AbstractType::ZoneHandle(dst_type.ptr()));
2361
2362 if (!dst_type.IsInstantiated(kCurrentClass)) {
2363 instructions += LoadInstantiatorTypeArguments();
2364 } else {
2365 instructions += NullConstant();
2366 }
2367
2368 if (!dst_type.IsInstantiated(kFunctions)) {
2369 instructions += LoadFunctionTypeArguments();
2370 } else {
2371 instructions += NullConstant();
2372 }
2373
2374 instructions += AssertAssignable(position, dst_name, kind);
2375
2376 return instructions;
2377}
2378
2379Fragment FlowGraphBuilder::AssertSubtype(TokenPosition position,
2380 const AbstractType& sub_type_value,
2381 const AbstractType& super_type_value,
2382 const String& dst_name_value) {
2383 Fragment instructions;
2384 instructions += LoadInstantiatorTypeArguments();
2385 instructions += LoadFunctionTypeArguments();
2386 instructions += Constant(AbstractType::ZoneHandle(Z, sub_type_value.ptr()));
2387 instructions += Constant(AbstractType::ZoneHandle(Z, super_type_value.ptr()));
2388 instructions += Constant(String::ZoneHandle(Z, dst_name_value.ptr()));
2389 instructions += AssertSubtype(position);
2390 return instructions;
2391}
2392
2393Fragment FlowGraphBuilder::AssertSubtype(TokenPosition position) {
2394 Fragment instructions;
2395
2396 Value* dst_name = Pop();
2397 Value* super_type = Pop();
2398 Value* sub_type = Pop();
2399 Value* function_type_args = Pop();
2400 Value* instantiator_type_args = Pop();
2401
2402 AssertSubtypeInstr* instr = new (Z) AssertSubtypeInstr(
2403 InstructionSource(position), instantiator_type_args, function_type_args,
2404 sub_type, super_type, dst_name, GetNextDeoptId());
2405 instructions += Fragment(instr);
2406
2407 return instructions;
2408}
2409
2410void FlowGraphBuilder::BuildTypeArgumentTypeChecks(TypeChecksToBuild mode,
2411 Fragment* implicit_checks) {
2412 const Function& dart_function = parsed_function_->function();
2413
2414 const Function* forwarding_target = nullptr;
2415 if (parsed_function_->is_forwarding_stub()) {
2416 forwarding_target = parsed_function_->forwarding_stub_super_target();
2417 ASSERT(!forwarding_target->IsNull());
2418 }
2419
2420 TypeParameters& type_parameters = TypeParameters::Handle(Z);
2421 if (dart_function.IsFactory()) {
2422 type_parameters = Class::Handle(Z, dart_function.Owner()).type_parameters();
2423 } else {
2424 type_parameters = dart_function.type_parameters();
2425 }
2426 const intptr_t num_type_params = type_parameters.Length();
2427 if (num_type_params == 0) return;
2428 if (forwarding_target != nullptr) {
2429 type_parameters = forwarding_target->type_parameters();
2430 ASSERT(type_parameters.Length() == num_type_params);
2431 }
2432 if (type_parameters.AllDynamicBounds()) {
2433 return; // All bounds are dynamic.
2434 }
2435 TypeParameter& type_param = TypeParameter::Handle(Z);
2436 String& name = String::Handle(Z);
2437 AbstractType& bound = AbstractType::Handle(Z);
2438 Fragment check_bounds;
2439 for (intptr_t i = 0; i < num_type_params; ++i) {
2440 bound = type_parameters.BoundAt(i);
2441 if (bound.IsTopTypeForSubtyping()) {
2442 continue;
2443 }
2444
2445 switch (mode) {
2447 break;
2449 if (!type_parameters.IsGenericCovariantImplAt(i)) {
2450 continue;
2451 }
2452 break;
2454 if (type_parameters.IsGenericCovariantImplAt(i)) {
2455 continue;
2456 }
2457 break;
2458 }
2459
2460 name = type_parameters.NameAt(i);
2461
2462 if (forwarding_target != nullptr) {
2463 type_param = forwarding_target->TypeParameterAt(i);
2464 } else if (dart_function.IsFactory()) {
2465 type_param = Class::Handle(Z, dart_function.Owner()).TypeParameterAt(i);
2466 } else {
2467 type_param = dart_function.TypeParameterAt(i);
2468 }
2469 ASSERT(type_param.IsFinalized());
2470 check_bounds +=
2471 AssertSubtype(TokenPosition::kNoSource, type_param, bound, name);
2472 }
2473
2474 // Type arguments passed through partial instantiation are guaranteed to be
2475 // bounds-checked at the point of partial instantiation, so we don't need to
2476 // check them again at the call-site.
2477 if (dart_function.IsClosureFunction() && !check_bounds.is_empty() &&
2478 FLAG_eliminate_type_checks) {
2479 LocalVariable* closure = parsed_function_->ParameterVariable(0);
2480 *implicit_checks += TestDelayedTypeArgs(closure, /*present=*/{},
2481 /*absent=*/check_bounds);
2482 } else {
2483 *implicit_checks += check_bounds;
2484 }
2485}
2486
2487void FlowGraphBuilder::BuildArgumentTypeChecks(
2488 Fragment* explicit_checks,
2489 Fragment* implicit_checks,
2490 Fragment* implicit_redefinitions) {
2491 const Function& dart_function = parsed_function_->function();
2492
2493 const Function* forwarding_target = nullptr;
2494 if (parsed_function_->is_forwarding_stub()) {
2495 forwarding_target = parsed_function_->forwarding_stub_super_target();
2496 ASSERT(!forwarding_target->IsNull());
2497 }
2498
2499 const intptr_t num_params = dart_function.NumParameters();
2500 for (intptr_t i = dart_function.NumImplicitParameters(); i < num_params;
2501 ++i) {
2502 LocalVariable* param = parsed_function_->ParameterVariable(i);
2503 const String& name = param->name();
2504 if (!param->needs_type_check()) {
2505 continue;
2506 }
2507 if (param->is_captured()) {
2508 param = parsed_function_->RawParameterVariable(i);
2509 }
2510
2511 const AbstractType* target_type = &param->static_type();
2512 if (forwarding_target != nullptr) {
2513 // We add 1 to the parameter index to account for the receiver.
2514 target_type =
2515 &AbstractType::ZoneHandle(Z, forwarding_target->ParameterTypeAt(i));
2516 }
2517
2518 if (target_type->IsTopTypeForSubtyping()) continue;
2519
2520 const bool is_covariant = param->is_explicit_covariant_parameter();
2521 Fragment* checks = is_covariant ? explicit_checks : implicit_checks;
2522
2523 *checks += LoadLocal(param);
2524 *checks += AssertAssignableLoadTypeArguments(
2525 param->token_pos(), *target_type, name,
2526 AssertAssignableInstr::kParameterCheck);
2527 *checks += StoreLocal(param);
2528 *checks += Drop();
2529
2530 if (!is_covariant && implicit_redefinitions != nullptr && optimizing_) {
2531 // We generate slightly different code in optimized vs. un-optimized code,
2532 // which is ok since we don't allocate any deopt ids.
2533 AssertNoDeoptIdsAllocatedScope no_deopt_allocation(thread_);
2534
2535 *implicit_redefinitions += LoadLocal(param);
2536 *implicit_redefinitions += RedefinitionWithType(*target_type);
2537 *implicit_redefinitions += StoreLocal(TokenPosition::kNoSource, param);
2538 *implicit_redefinitions += Drop();
2539 }
2540 }
2541}
2542
2543BlockEntryInstr* FlowGraphBuilder::BuildPrologue(BlockEntryInstr* normal_entry,
2544 PrologueInfo* prologue_info) {
2545 const bool compiling_for_osr = IsCompiledForOsr();
2546
2547 kernel::PrologueBuilder prologue_builder(
2548 parsed_function_, last_used_block_id_, compiling_for_osr, IsInlining());
2549 BlockEntryInstr* instruction_cursor =
2550 prologue_builder.BuildPrologue(normal_entry, prologue_info);
2551
2552 last_used_block_id_ = prologue_builder.last_used_block_id();
2553
2554 return instruction_cursor;
2555}
2556
2557ArrayPtr FlowGraphBuilder::GetOptionalParameterNames(const Function& function) {
2558 if (!function.HasOptionalNamedParameters()) {
2559 return Array::null();
2560 }
2561
2562 const intptr_t num_fixed_params = function.num_fixed_parameters();
2563 const intptr_t num_opt_params = function.NumOptionalNamedParameters();
2564 const auto& names = Array::Handle(Z, Array::New(num_opt_params, Heap::kOld));
2565 auto& name = String::Handle(Z);
2566 for (intptr_t i = 0; i < num_opt_params; ++i) {
2567 name = function.ParameterNameAt(num_fixed_params + i);
2568 names.SetAt(i, name);
2569 }
2570 return names.ptr();
2571}
2572
2573Fragment FlowGraphBuilder::PushExplicitParameters(
2574 const Function& function,
2575 const Function& target /* = Function::null_function()*/) {
2576 Fragment instructions;
2577 for (intptr_t i = function.NumImplicitParameters(),
2578 n = function.NumParameters();
2579 i < n; ++i) {
2580 Fragment push_param = LoadLocal(parsed_function_->ParameterVariable(i));
2581 if (!target.IsNull() && target.is_unboxed_parameter_at(i)) {
2582 Representation to;
2583 if (target.is_unboxed_integer_parameter_at(i)) {
2584 to = kUnboxedInt64;
2585 } else {
2586 ASSERT(target.is_unboxed_double_parameter_at(i));
2587 to = kUnboxedDouble;
2588 }
2589 const auto unbox = UnboxInstr::Create(to, Pop(), DeoptId::kNone,
2591 Push(unbox);
2592 push_param += Fragment(unbox);
2593 }
2594 instructions += push_param;
2595 }
2596 return instructions;
2597}
2598
2599FlowGraph* FlowGraphBuilder::BuildGraphOfMethodExtractor(
2600 const Function& method) {
2601 // A method extractor is the implicit getter for a method.
2602 const Function& function =
2603 Function::ZoneHandle(Z, method.extracted_method_closure());
2604
2605 graph_entry_ =
2606 new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
2607
2608 auto normal_entry = BuildFunctionEntry(graph_entry_);
2609 graph_entry_->set_normal_entry(normal_entry);
2610
2611 Fragment body(normal_entry);
2612 body += CheckStackOverflowInPrologue(method.token_pos());
2613 body += BuildImplicitClosureCreation(TokenPosition::kNoSource, function);
2614 body += Return(TokenPosition::kNoSource);
2615
2616 // There is no prologue code for a method extractor.
2617 PrologueInfo prologue_info(-1, -1);
2618 return new (Z)
2619 FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
2620 prologue_info, FlowGraph::CompilationModeFrom(optimizing_));
2621}
2622
2623FlowGraph* FlowGraphBuilder::BuildGraphOfNoSuchMethodDispatcher(
2624 const Function& function) {
2625 // This function is specialized for a receiver class, a method name, and
2626 // the arguments descriptor at a call site.
2627 const ArgumentsDescriptor descriptor(saved_args_desc_array());
2628
2629 graph_entry_ =
2630 new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
2631
2632 auto normal_entry = BuildFunctionEntry(graph_entry_);
2633 graph_entry_->set_normal_entry(normal_entry);
2634
2635 PrologueInfo prologue_info(-1, -1);
2636 BlockEntryInstr* instruction_cursor =
2637 BuildPrologue(normal_entry, &prologue_info);
2638
2639 Fragment body(instruction_cursor);
2640 body += CheckStackOverflowInPrologue(function.token_pos());
2641
2642 // The receiver is the first argument to noSuchMethod, and it is the first
2643 // argument passed to the dispatcher function.
2644 body += LoadLocal(parsed_function_->ParameterVariable(0));
2645
2646 // The second argument to noSuchMethod is an invocation mirror. Push the
2647 // arguments for allocating the invocation mirror. First, the name.
2648 body += Constant(String::ZoneHandle(Z, function.name()));
2649
2650 // Second, the arguments descriptor.
2652
2653 // Third, an array containing the original arguments. Create it and fill
2654 // it in.
2655 const intptr_t receiver_index = descriptor.TypeArgsLen() > 0 ? 1 : 0;
2657 body += IntConstant(receiver_index + descriptor.Size());
2658 body += CreateArray();
2659 LocalVariable* array = MakeTemporary();
2660 if (receiver_index > 0) {
2661 LocalVariable* type_args = parsed_function_->function_type_arguments();
2662 ASSERT(type_args != nullptr);
2663 body += LoadLocal(array);
2664 body += IntConstant(0);
2665 body += LoadLocal(type_args);
2666 body += StoreIndexed(kArrayCid);
2667 }
2668 for (intptr_t i = 0; i < descriptor.PositionalCount(); ++i) {
2669 body += LoadLocal(array);
2670 body += IntConstant(receiver_index + i);
2671 body += LoadLocal(parsed_function_->ParameterVariable(i));
2672 body += StoreIndexed(kArrayCid);
2673 }
2674 String& name = String::Handle(Z);
2675 for (intptr_t i = 0; i < descriptor.NamedCount(); ++i) {
2676 const intptr_t parameter_index = descriptor.PositionAt(i);
2677 name = descriptor.NameAt(i);
2678 name = Symbols::New(H.thread(), name);
2679 body += LoadLocal(array);
2680 body += IntConstant(receiver_index + parameter_index);
2681 body += LoadLocal(parsed_function_->ParameterVariable(parameter_index));
2682 body += StoreIndexed(kArrayCid);
2683 }
2684
2685 // Fourth, false indicating this is not a super NoSuchMethod.
2686 body += Constant(Bool::False());
2687
2688 const Class& mirror_class =
2689 Class::Handle(Z, Library::LookupCoreClass(Symbols::InvocationMirror()));
2690 ASSERT(!mirror_class.IsNull());
2691 const auto& error = mirror_class.EnsureIsFinalized(H.thread());
2692 ASSERT(error == Error::null());
2693 const Function& allocation_function = Function::ZoneHandle(
2694 Z, mirror_class.LookupStaticFunction(
2695 Library::PrivateCoreLibName(Symbols::AllocateInvocationMirror())));
2696 ASSERT(!allocation_function.IsNull());
2697 body += StaticCall(TokenPosition::kMinSource, allocation_function,
2698 /* argument_count = */ 4, ICData::kStatic);
2699
2700 const int kTypeArgsLen = 0;
2701 ArgumentsDescriptor two_arguments(
2702 Array::Handle(Z, ArgumentsDescriptor::NewBoxed(kTypeArgsLen, 2)));
2703 Function& no_such_method =
2705 Class::Handle(Z, function.Owner()),
2706 Symbols::NoSuchMethod(), two_arguments));
2707 if (no_such_method.IsNull()) {
2708 // If noSuchMethod is not found on the receiver class, call
2709 // Object.noSuchMethod.
2711 Class::Handle(Z, IG->object_store()->object_class()),
2712 Symbols::NoSuchMethod(), two_arguments);
2713 }
2714 body += StaticCall(TokenPosition::kMinSource, no_such_method,
2715 /* argument_count = */ 2, ICData::kNSMDispatch);
2716 body += Return(TokenPosition::kNoSource);
2717
2718 return new (Z)
2719 FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
2720 prologue_info, FlowGraph::CompilationModeFrom(optimizing_));
2721}
2722
2723FlowGraph* FlowGraphBuilder::BuildGraphOfRecordFieldGetter(
2724 const Function& function) {
2725 graph_entry_ =
2726 new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
2727
2728 auto normal_entry = BuildFunctionEntry(graph_entry_);
2729 graph_entry_->set_normal_entry(normal_entry);
2730
2731 JoinEntryInstr* nsm = BuildJoinEntry();
2732 JoinEntryInstr* done = BuildJoinEntry();
2733
2734 Fragment body(normal_entry);
2735 body += CheckStackOverflowInPrologue(function.token_pos());
2736
2737 String& name = String::ZoneHandle(Z, function.name());
2740
2741 // Get an array of field names.
2742 const Class& cls = Class::Handle(Z, IG->class_table()->At(kRecordCid));
2743 const auto& error = cls.EnsureIsFinalized(thread_);
2744 ASSERT(error == Error::null());
2745 const Function& get_field_names_function = Function::ZoneHandle(
2746 Z, cls.LookupFunctionAllowPrivate(Symbols::Get_fieldNames()));
2747 ASSERT(!get_field_names_function.IsNull());
2748 body += LoadLocal(parsed_function_->receiver_var());
2749 body += StaticCall(TokenPosition::kNoSource, get_field_names_function, 1,
2750 ICData::kNoRebind);
2751 LocalVariable* field_names = MakeTemporary("field_names");
2752
2753 body += LoadLocal(field_names);
2754 body += LoadNativeField(Slot::Array_length());
2755 LocalVariable* num_named = MakeTemporary("num_named");
2756
2757 // num_positional = num_fields - field_names.length
2758 body += LoadLocal(parsed_function_->receiver_var());
2759 body += LoadNativeField(Slot::Record_shape());
2760 body += IntConstant(compiler::target::RecordShape::kNumFieldsMask);
2761 body += SmiBinaryOp(Token::kBIT_AND);
2762 body += LoadLocal(num_named);
2763 body += SmiBinaryOp(Token::kSUB);
2764 LocalVariable* num_positional = MakeTemporary("num_positional");
2765
2766 const intptr_t field_index =
2768 if (field_index >= 0) {
2769 // Get positional record field by index.
2770 body += IntConstant(field_index);
2771 body += LoadLocal(num_positional);
2772 body += SmiRelationalOp(Token::kLT);
2773 TargetEntryInstr* valid_index;
2774 TargetEntryInstr* invalid_index;
2775 body += BranchIfTrue(&valid_index, &invalid_index);
2776
2777 body.current = valid_index;
2778 body += LoadLocal(parsed_function_->receiver_var());
2780 thread_, compiler::target::Record::field_offset(field_index)));
2781
2782 body += StoreLocal(TokenPosition::kNoSource,
2783 parsed_function_->expression_temp_var());
2784 body += Drop();
2785 body += Goto(done);
2786
2787 body.current = invalid_index;
2788 }
2789
2790 // Search field among named fields.
2791 body += IntConstant(0);
2792 body += LoadLocal(num_named);
2793 body += SmiRelationalOp(Token::kLT);
2794 TargetEntryInstr* has_named_fields;
2795 TargetEntryInstr* no_named_fields;
2796 body += BranchIfTrue(&has_named_fields, &no_named_fields);
2797
2798 Fragment(no_named_fields) + Goto(nsm);
2799 body.current = has_named_fields;
2800
2801 LocalVariable* index = parsed_function_->expression_temp_var();
2802 body += IntConstant(0);
2803 body += StoreLocal(TokenPosition::kNoSource, index);
2804 body += Drop();
2805
2806 JoinEntryInstr* loop = BuildJoinEntry();
2807 body += Goto(loop);
2808 body.current = loop;
2809
2810 body += LoadLocal(field_names);
2811 body += LoadLocal(index);
2812 body += LoadIndexed(kArrayCid,
2813 /*index_scale*/ compiler::target::kCompressedWordSize);
2814 body += Constant(name);
2815 TargetEntryInstr* found;
2816 TargetEntryInstr* continue_search;
2817 body += BranchIfEqual(&found, &continue_search);
2818
2819 body.current = continue_search;
2820 body += LoadLocal(index);
2821 body += IntConstant(1);
2822 body += SmiBinaryOp(Token::kADD);
2823 body += StoreLocal(TokenPosition::kNoSource, index);
2824 body += Drop();
2825
2826 body += LoadLocal(index);
2827 body += LoadLocal(num_named);
2828 body += SmiRelationalOp(Token::kLT);
2829 TargetEntryInstr* has_more_fields;
2830 TargetEntryInstr* no_more_fields;
2831 body += BranchIfTrue(&has_more_fields, &no_more_fields);
2832
2833 Fragment(has_more_fields) + Goto(loop);
2834 Fragment(no_more_fields) + Goto(nsm);
2835
2836 body.current = found;
2837
2838 body += LoadLocal(parsed_function_->receiver_var());
2839
2840 body += LoadLocal(num_positional);
2841 body += LoadLocal(index);
2842 body += SmiBinaryOp(Token::kADD);
2843
2844 body += LoadIndexed(kRecordCid,
2845 /*index_scale*/ compiler::target::kCompressedWordSize);
2846
2847 body += StoreLocal(TokenPosition::kNoSource,
2848 parsed_function_->expression_temp_var());
2849 body += Drop();
2850 body += Goto(done);
2851
2852 body.current = done;
2853
2854 body += LoadLocal(parsed_function_->expression_temp_var());
2855 body += DropTempsPreserveTop(3); // field_names, num_named, num_positional
2856 body += Return(TokenPosition::kNoSource);
2857
2858 Fragment throw_nsm(nsm);
2859 throw_nsm += LoadLocal(parsed_function_->receiver_var());
2860 throw_nsm += ThrowNoSuchMethodError(TokenPosition::kNoSource, function,
2861 /*incompatible_arguments=*/false,
2862 /*receiver_pushed=*/true);
2863 throw_nsm += ThrowException(TokenPosition::kNoSource); // Close graph.
2864
2865 // There is no prologue code for a record field getter.
2866 PrologueInfo prologue_info(-1, -1);
2867 return new (Z)
2868 FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
2869 prologue_info, FlowGraph::CompilationModeFrom(optimizing_));
2870}
2871
2872// Information used by the various dynamic closure call fragment builders.
2905
2906Fragment FlowGraphBuilder::TestClosureFunctionGeneric(
2907 const ClosureCallInfo& info,
2908 Fragment generic,
2909 Fragment not_generic) {
2910 JoinEntryInstr* after_branch = BuildJoinEntry();
2911
2913 check += LoadLocal(info.type_parameters);
2914 TargetEntryInstr* is_not_generic;
2915 TargetEntryInstr* is_generic;
2916 check += BranchIfNull(&is_not_generic, &is_generic);
2917
2918 generic.Prepend(is_generic);
2919 generic += Goto(after_branch);
2920
2921 not_generic.Prepend(is_not_generic);
2922 not_generic += Goto(after_branch);
2923
2924 return Fragment(check.entry, after_branch);
2925}
2926
2927Fragment FlowGraphBuilder::TestClosureFunctionNamedParameterRequired(
2928 const ClosureCallInfo& info,
2929 Fragment set,
2930 Fragment not_set) {
2931 Fragment check_required;
2932 // We calculate the index to dereference in the parameter names array.
2933 check_required += LoadLocal(info.vars->current_param_index);
2934 check_required +=
2935 IntConstant(compiler::target::kNumParameterFlagsPerElementLog2);
2936 check_required += SmiBinaryOp(Token::kSHR);
2937 check_required += LoadLocal(info.num_opt_params);
2938 check_required += SmiBinaryOp(Token::kADD);
2939 LocalVariable* flags_index = MakeTemporary("flags_index"); // Read-only.
2940
2941 // One read-only stack value (flag_index) that must be dropped
2942 // after we rejoin at after_check.
2943 JoinEntryInstr* after_check = BuildJoinEntry();
2944
2945 // Now we check to see if the flags index is within the bounds of the
2946 // parameters names array. If not, it cannot be required.
2947 check_required += LoadLocal(flags_index);
2948 check_required += LoadLocal(info.named_parameter_names);
2949 check_required += LoadNativeField(Slot::Array_length());
2950 check_required += SmiRelationalOp(Token::kLT);
2951 TargetEntryInstr* valid_index;
2952 TargetEntryInstr* invalid_index;
2953 check_required += BranchIfTrue(&valid_index, &invalid_index);
2954
2955 JoinEntryInstr* join_not_set = BuildJoinEntry();
2956
2957 Fragment(invalid_index) + Goto(join_not_set);
2958
2959 // Otherwise, we need to retrieve the value. We're guaranteed the Smis in
2960 // the flag slots are non-null, so after loading we can immediate check
2961 // the required flag bit for the given named parameter.
2962 check_required.current = valid_index;
2963 check_required += LoadLocal(info.named_parameter_names);
2964 check_required += LoadLocal(flags_index);
2965 check_required += LoadIndexed(
2966 kArrayCid, /*index_scale*/ compiler::target::kCompressedWordSize);
2967 check_required += LoadLocal(info.vars->current_param_index);
2968 check_required +=
2969 IntConstant(compiler::target::kNumParameterFlagsPerElement - 1);
2970 check_required += SmiBinaryOp(Token::kBIT_AND);
2971 // If the below changes, we'll need to multiply by the number of parameter
2972 // flags before shifting.
2973 static_assert(compiler::target::kNumParameterFlags == 1,
2974 "IL builder assumes only one flag bit per parameter");
2975 check_required += SmiBinaryOp(Token::kSHR);
2976 check_required +=
2977 IntConstant(1 << compiler::target::kRequiredNamedParameterFlag);
2978 check_required += SmiBinaryOp(Token::kBIT_AND);
2979 check_required += IntConstant(0);
2980 TargetEntryInstr* is_not_set;
2981 TargetEntryInstr* is_set;
2982 check_required += BranchIfEqual(&is_not_set, &is_set);
2983
2984 Fragment(is_not_set) + Goto(join_not_set);
2985
2986 set.Prepend(is_set);
2987 set += Goto(after_check);
2988
2989 not_set.Prepend(join_not_set);
2990 not_set += Goto(after_check);
2991
2992 // After rejoining, drop the introduced temporaries.
2993 check_required.current = after_check;
2994 check_required += DropTemporary(&flags_index);
2995 return check_required;
2996}
2997
2998Fragment FlowGraphBuilder::BuildClosureCallDefaultTypeHandling(
2999 const ClosureCallInfo& info) {
3000 if (info.descriptor.TypeArgsLen() > 0) {
3001 ASSERT(parsed_function_->function_type_arguments() != nullptr);
3002 // A TAV was provided, so we don't need default type argument handling
3003 // and can just take the arguments we were given.
3004 Fragment store_provided;
3005 store_provided += LoadLocal(parsed_function_->function_type_arguments());
3006 store_provided += StoreLocal(info.vars->function_type_args);
3007 store_provided += Drop();
3008 return store_provided;
3009 }
3010
3011 // Load the defaults, instantiating or replacing them with the other type
3012 // arguments as appropriate.
3013 Fragment store_default;
3014 store_default += LoadLocal(info.closure);
3015 store_default += LoadNativeField(Slot::Closure_function());
3016 store_default += LoadNativeField(Slot::Function_data());
3017 LocalVariable* closure_data = MakeTemporary("closure_data");
3018
3019 store_default += LoadLocal(closure_data);
3020 store_default += BuildExtractUnboxedSlotBitFieldIntoSmi<
3021 ClosureData::PackedInstantiationMode>(Slot::ClosureData_packed_fields());
3022 LocalVariable* default_tav_kind = MakeTemporary("default_tav_kind");
3023
3024 // Two locals to drop after join, closure_data and default_tav_kind.
3025 JoinEntryInstr* done = BuildJoinEntry();
3026
3027 store_default += LoadLocal(default_tav_kind);
3028 TargetEntryInstr* is_instantiated;
3029 TargetEntryInstr* is_not_instantiated;
3030 store_default +=
3031 IntConstant(static_cast<intptr_t>(InstantiationMode::kIsInstantiated));
3032 store_default += BranchIfEqual(&is_instantiated, &is_not_instantiated);
3033 store_default.current = is_not_instantiated; // Check next case.
3034 store_default += LoadLocal(default_tav_kind);
3035 TargetEntryInstr* needs_instantiation;
3036 TargetEntryInstr* can_share;
3037 store_default += IntConstant(
3038 static_cast<intptr_t>(InstantiationMode::kNeedsInstantiation));
3039 store_default += BranchIfEqual(&needs_instantiation, &can_share);
3040 store_default.current = can_share; // Check next case.
3041 store_default += LoadLocal(default_tav_kind);
3042 TargetEntryInstr* can_share_instantiator;
3043 TargetEntryInstr* can_share_function;
3044 store_default += IntConstant(static_cast<intptr_t>(
3046 store_default += BranchIfEqual(&can_share_instantiator, &can_share_function);
3047
3048 Fragment instantiated(is_instantiated);
3049 instantiated += LoadLocal(info.type_parameters);
3050 instantiated += LoadNativeField(Slot::TypeParameters_defaults());
3051 instantiated += StoreLocal(info.vars->function_type_args);
3052 instantiated += Drop();
3053 instantiated += Goto(done);
3054
3055 Fragment do_instantiation(needs_instantiation);
3056 // Load the instantiator type arguments.
3057 do_instantiation += LoadLocal(info.instantiator_type_args);
3058 // Load the parent function type arguments. (No local function type arguments
3059 // can be used within the defaults).
3060 do_instantiation += LoadLocal(info.parent_function_type_args);
3061 // Load the default type arguments to instantiate.
3062 do_instantiation += LoadLocal(info.type_parameters);
3063 do_instantiation += LoadNativeField(Slot::TypeParameters_defaults());
3064 do_instantiation += InstantiateDynamicTypeArguments();
3065 do_instantiation += StoreLocal(info.vars->function_type_args);
3066 do_instantiation += Drop();
3067 do_instantiation += Goto(done);
3068
3069 Fragment share_instantiator(can_share_instantiator);
3070 share_instantiator += LoadLocal(info.instantiator_type_args);
3071 share_instantiator += StoreLocal(info.vars->function_type_args);
3072 share_instantiator += Drop();
3073 share_instantiator += Goto(done);
3074
3075 Fragment share_function(can_share_function);
3076 // Since the defaults won't have local type parameters, these must all be
3077 // from the parent function type arguments, so we can just use it.
3078 share_function += LoadLocal(info.parent_function_type_args);
3079 share_function += StoreLocal(info.vars->function_type_args);
3080 share_function += Drop();
3081 share_function += Goto(done);
3082
3083 store_default.current = done; // Return here after branching.
3084 store_default += DropTemporary(&default_tav_kind);
3085 store_default += DropTemporary(&closure_data);
3086
3087 Fragment store_delayed;
3088 store_delayed += LoadLocal(info.closure);
3089 store_delayed += LoadNativeField(Slot::Closure_delayed_type_arguments());
3090 store_delayed += StoreLocal(info.vars->function_type_args);
3091 store_delayed += Drop();
3092
3093 // Use the delayed type args if present, else the default ones.
3094 return TestDelayedTypeArgs(info.closure, store_delayed, store_default);
3095}
3096
3097Fragment FlowGraphBuilder::BuildClosureCallNamedArgumentsCheck(
3098 const ClosureCallInfo& info) {
3099 // When no named arguments are provided, we just need to check for possible
3100 // required named arguments.
3101 if (info.descriptor.NamedCount() == 0) {
3102 // If the below changes, we can no longer assume that flag slots existing
3103 // means there are required parameters.
3104 static_assert(compiler::target::kNumParameterFlags == 1,
3105 "IL builder assumes only one flag bit per parameter");
3106 // No named args were provided, so check for any required named params.
3107 // Here, we assume that the only parameter flag saved is the required bit
3108 // for named parameters. If this changes, we'll need to check each flag
3109 // entry appropriately for any set required bits.
3110 Fragment has_any;
3111 has_any += LoadLocal(info.num_opt_params);
3112 has_any += LoadLocal(info.named_parameter_names);
3113 has_any += LoadNativeField(Slot::Array_length());
3114 TargetEntryInstr* no_required;
3115 TargetEntryInstr* has_required;
3116 has_any += BranchIfEqual(&no_required, &has_required);
3117
3118 Fragment(has_required) + Goto(info.throw_no_such_method);
3119
3120 return Fragment(has_any.entry, no_required);
3121 }
3122
3123 // Otherwise, we need to loop through the parameter names to check the names
3124 // of named arguments for validity (and possibly missing required ones).
3125 Fragment check_names;
3126 check_names += LoadLocal(info.vars->current_param_index);
3127 LocalVariable* old_index = MakeTemporary("old_index"); // Read-only.
3128 check_names += LoadLocal(info.vars->current_num_processed);
3129 LocalVariable* old_processed = MakeTemporary("old_processed"); // Read-only.
3130
3131 // Two local stack values (old_index, old_processed) to drop after rejoining
3132 // at done.
3133 JoinEntryInstr* loop = BuildJoinEntry();
3134 JoinEntryInstr* done = BuildJoinEntry();
3135
3136 check_names += IntConstant(0);
3137 check_names += StoreLocal(info.vars->current_num_processed);
3138 check_names += Drop();
3139 check_names += IntConstant(0);
3140 check_names += StoreLocal(info.vars->current_param_index);
3141 check_names += Drop();
3142 check_names += Goto(loop);
3143
3144 Fragment loop_check(loop);
3145 loop_check += LoadLocal(info.vars->current_param_index);
3146 loop_check += LoadLocal(info.num_opt_params);
3147 loop_check += SmiRelationalOp(Token::kLT);
3148 TargetEntryInstr* no_more;
3149 TargetEntryInstr* more;
3150 loop_check += BranchIfTrue(&more, &no_more);
3151
3152 Fragment(no_more) + Goto(done);
3153
3154 Fragment loop_body(more);
3155 // First load the name we need to check against.
3156 loop_body += LoadLocal(info.named_parameter_names);
3157 loop_body += LoadLocal(info.vars->current_param_index);
3158 loop_body += LoadIndexed(
3159 kArrayCid, /*index_scale*/ compiler::target::kCompressedWordSize);
3160 LocalVariable* param_name = MakeTemporary("param_name"); // Read only.
3161
3162 // One additional local value on the stack within the loop body (param_name)
3163 // that should be dropped after rejoining at loop_incr.
3164 JoinEntryInstr* loop_incr = BuildJoinEntry();
3165
3166 // Now iterate over the ArgumentsDescriptor names and check for a match.
3167 for (intptr_t i = 0; i < info.descriptor.NamedCount(); i++) {
3168 const auto& name = String::ZoneHandle(Z, info.descriptor.NameAt(i));
3169 loop_body += Constant(name);
3170 loop_body += LoadLocal(param_name);
3171 TargetEntryInstr* match;
3172 TargetEntryInstr* mismatch;
3173 loop_body += BranchIfEqual(&match, &mismatch);
3174 loop_body.current = mismatch;
3175
3176 // We have a match, so go to the next name after storing the corresponding
3177 // parameter index on the stack and incrementing the number of matched
3178 // arguments. (No need to check the required bit for provided parameters.)
3179 Fragment matched(match);
3180 matched += LoadLocal(info.vars->current_param_index);
3181 matched += LoadLocal(info.num_fixed_params);
3182 matched += SmiBinaryOp(Token::kADD, /*is_truncating=*/true);
3183 matched += StoreLocal(info.vars->named_argument_parameter_indices.At(i));
3184 matched += Drop();
3185 matched += LoadLocal(info.vars->current_num_processed);
3186 matched += IntConstant(1);
3187 matched += SmiBinaryOp(Token::kADD, /*is_truncating=*/true);
3188 matched += StoreLocal(info.vars->current_num_processed);
3189 matched += Drop();
3190 matched += Goto(loop_incr);
3191 }
3192
3193 // None of the names in the arguments descriptor matched, so check if this
3194 // is a required parameter.
3195 loop_body += TestClosureFunctionNamedParameterRequired(
3196 info,
3197 /*set=*/Goto(info.throw_no_such_method),
3198 /*not_set=*/{});
3199
3200 loop_body += Goto(loop_incr);
3201
3202 Fragment incr_index(loop_incr);
3203 incr_index += DropTemporary(&param_name);
3204 incr_index += LoadLocal(info.vars->current_param_index);
3205 incr_index += IntConstant(1);
3206 incr_index += SmiBinaryOp(Token::kADD, /*is_truncating=*/true);
3207 incr_index += StoreLocal(info.vars->current_param_index);
3208 incr_index += Drop();
3209 incr_index += Goto(loop);
3210
3211 Fragment check_processed(done);
3212 check_processed += LoadLocal(info.vars->current_num_processed);
3213 check_processed += IntConstant(info.descriptor.NamedCount());
3214 TargetEntryInstr* all_processed;
3215 TargetEntryInstr* bad_name;
3216 check_processed += BranchIfEqual(&all_processed, &bad_name);
3217
3218 // Didn't find a matching parameter name for at least one argument name.
3219 Fragment(bad_name) + Goto(info.throw_no_such_method);
3220
3221 // Drop the temporaries at the end of the fragment.
3222 check_names.current = all_processed;
3223 check_names += LoadLocal(old_processed);
3224 check_names += StoreLocal(info.vars->current_num_processed);
3225 check_names += Drop();
3226 check_names += DropTemporary(&old_processed);
3227 check_names += LoadLocal(old_index);
3228 check_names += StoreLocal(info.vars->current_param_index);
3229 check_names += Drop();
3230 check_names += DropTemporary(&old_index);
3231 return check_names;
3232}
3233
3234Fragment FlowGraphBuilder::BuildClosureCallArgumentsValidCheck(
3235 const ClosureCallInfo& info) {
3236 Fragment check_entry;
3237 // We only need to check the length of any explicitly provided type arguments.
3238 if (info.descriptor.TypeArgsLen() > 0) {
3239 Fragment check_type_args_length;
3240 check_type_args_length += LoadLocal(info.type_parameters);
3241 TargetEntryInstr* null;
3242 TargetEntryInstr* not_null;
3243 check_type_args_length += BranchIfNull(&null, &not_null);
3244 check_type_args_length.current = not_null; // Continue in non-error case.
3245 check_type_args_length += LoadLocal(info.signature);
3246 check_type_args_length += BuildExtractUnboxedSlotBitFieldIntoSmi<
3248 Slot::FunctionType_packed_type_parameter_counts());
3249 check_type_args_length += IntConstant(info.descriptor.TypeArgsLen());
3250 TargetEntryInstr* equal;
3251 TargetEntryInstr* not_equal;
3252 check_type_args_length += BranchIfEqual(&equal, &not_equal);
3253 check_type_args_length.current = equal; // Continue in non-error case.
3254
3255 // The function is not generic.
3256 Fragment(null) + Goto(info.throw_no_such_method);
3257
3258 // An incorrect number of type arguments were passed.
3259 Fragment(not_equal) + Goto(info.throw_no_such_method);
3260
3261 // Type arguments should not be provided if there are delayed type
3262 // arguments, as then the closure itself is not generic.
3263 check_entry += TestDelayedTypeArgs(
3264 info.closure, /*present=*/Goto(info.throw_no_such_method),
3265 /*absent=*/check_type_args_length);
3266 }
3267
3268 check_entry += LoadLocal(info.has_named_params);
3269 TargetEntryInstr* has_named;
3270 TargetEntryInstr* has_positional;
3271 check_entry += BranchIfTrue(&has_named, &has_positional);
3272 JoinEntryInstr* join_after_optional = BuildJoinEntry();
3273 check_entry.current = join_after_optional;
3274
3275 if (info.descriptor.NamedCount() > 0) {
3276 // No reason to continue checking, as this function doesn't take named args.
3277 Fragment(has_positional) + Goto(info.throw_no_such_method);
3278 } else {
3279 Fragment check_pos(has_positional);
3280 check_pos += LoadLocal(info.num_fixed_params);
3281 check_pos += IntConstant(info.descriptor.PositionalCount());
3282 check_pos += SmiRelationalOp(Token::kLTE);
3283 TargetEntryInstr* enough;
3284 TargetEntryInstr* too_few;
3285 check_pos += BranchIfTrue(&enough, &too_few);
3286 check_pos.current = enough;
3287
3288 Fragment(too_few) + Goto(info.throw_no_such_method);
3289
3290 check_pos += IntConstant(info.descriptor.PositionalCount());
3291 check_pos += LoadLocal(info.num_max_params);
3292 check_pos += SmiRelationalOp(Token::kLTE);
3293 TargetEntryInstr* valid;
3294 TargetEntryInstr* too_many;
3295 check_pos += BranchIfTrue(&valid, &too_many);
3296 check_pos.current = valid;
3297
3298 Fragment(too_many) + Goto(info.throw_no_such_method);
3299
3300 check_pos += Goto(join_after_optional);
3301 }
3302
3303 Fragment check_named(has_named);
3304
3305 TargetEntryInstr* same;
3306 TargetEntryInstr* different;
3307 check_named += LoadLocal(info.num_fixed_params);
3308 check_named += IntConstant(info.descriptor.PositionalCount());
3309 check_named += BranchIfEqual(&same, &different);
3310 check_named.current = same;
3311
3312 Fragment(different) + Goto(info.throw_no_such_method);
3313
3314 if (info.descriptor.NamedCount() > 0) {
3315 check_named += IntConstant(info.descriptor.NamedCount());
3316 check_named += LoadLocal(info.num_opt_params);
3317 check_named += SmiRelationalOp(Token::kLTE);
3318 TargetEntryInstr* valid;
3319 TargetEntryInstr* too_many;
3320 check_named += BranchIfTrue(&valid, &too_many);
3321 check_named.current = valid;
3322
3323 Fragment(too_many) + Goto(info.throw_no_such_method);
3324 }
3325
3326 // Check the names for optional arguments. If applicable, also check that all
3327 // required named parameters are provided.
3328 check_named += BuildClosureCallNamedArgumentsCheck(info);
3329 check_named += Goto(join_after_optional);
3330
3331 check_entry.current = join_after_optional;
3332 return check_entry;
3333}
3334
3335Fragment FlowGraphBuilder::BuildClosureCallTypeArgumentsTypeCheck(
3336 const ClosureCallInfo& info) {
3337 JoinEntryInstr* done = BuildJoinEntry();
3338 JoinEntryInstr* loop = BuildJoinEntry();
3339
3340 // We assume that the value stored in :t_type_parameters is not null (i.e.,
3341 // the function stored in :t_function is generic).
3342 Fragment loop_init;
3343
3344 // A null bounds vector represents a vector of dynamic and no check is needed.
3345 loop_init += LoadLocal(info.type_parameters);
3346 loop_init += LoadNativeField(Slot::TypeParameters_bounds());
3347 TargetEntryInstr* null_bounds;
3348 TargetEntryInstr* non_null_bounds;
3349 loop_init += BranchIfNull(&null_bounds, &non_null_bounds);
3350
3351 Fragment(null_bounds) + Goto(done);
3352
3353 loop_init.current = non_null_bounds;
3354 // Loop over the type parameters array.
3355 loop_init += IntConstant(0);
3356 loop_init += StoreLocal(info.vars->current_param_index);
3357 loop_init += Drop();
3358 loop_init += Goto(loop);
3359
3360 Fragment loop_check(loop);
3361 loop_check += LoadLocal(info.vars->current_param_index);
3362 loop_check += LoadLocal(info.num_type_parameters);
3363 loop_check += SmiRelationalOp(Token::kLT);
3364 TargetEntryInstr* more;
3365 TargetEntryInstr* no_more;
3366 loop_check += BranchIfTrue(&more, &no_more);
3367
3368 Fragment(no_more) + Goto(done);
3369
3370 Fragment loop_test_flag(more);
3371 JoinEntryInstr* next = BuildJoinEntry();
3372 JoinEntryInstr* check = BuildJoinEntry();
3373 loop_test_flag += LoadLocal(info.type_parameter_flags);
3374 TargetEntryInstr* null_flags;
3375 TargetEntryInstr* non_null_flags;
3376 loop_test_flag += BranchIfNull(&null_flags, &non_null_flags);
3377
3378 Fragment(null_flags) + Goto(check); // Check type if null (non-covariant).
3379
3380 loop_test_flag.current = non_null_flags; // Test flags if not null.
3381 loop_test_flag += LoadLocal(info.type_parameter_flags);
3382 loop_test_flag += LoadLocal(info.vars->current_param_index);
3384 loop_test_flag += SmiBinaryOp(Token::kSHR);
3385 loop_test_flag += LoadIndexed(
3386 kArrayCid, /*index_scale*/ compiler::target::kCompressedWordSize);
3387 loop_test_flag += LoadLocal(info.vars->current_param_index);
3389 loop_test_flag += SmiBinaryOp(Token::kBIT_AND);
3390 loop_test_flag += SmiBinaryOp(Token::kSHR);
3391 loop_test_flag += IntConstant(1);
3392 loop_test_flag += SmiBinaryOp(Token::kBIT_AND);
3393 loop_test_flag += IntConstant(0);
3394 TargetEntryInstr* is_noncovariant;
3395 TargetEntryInstr* is_covariant;
3396 loop_test_flag += BranchIfEqual(&is_noncovariant, &is_covariant);
3397
3398 Fragment(is_covariant) + Goto(next); // Continue if covariant.
3399 Fragment(is_noncovariant) + Goto(check); // Check type if non-covariant.
3400
3401 Fragment loop_prep_type_param(check);
3402 JoinEntryInstr* dynamic_type_param = BuildJoinEntry();
3403 JoinEntryInstr* call = BuildJoinEntry();
3404
3405 // Load type argument already stored in function_type_args if non null.
3406 loop_prep_type_param += LoadLocal(info.vars->function_type_args);
3407 TargetEntryInstr* null_ftav;
3408 TargetEntryInstr* non_null_ftav;
3409 loop_prep_type_param += BranchIfNull(&null_ftav, &non_null_ftav);
3410
3411 Fragment(null_ftav) + Goto(dynamic_type_param);
3412
3413 loop_prep_type_param.current = non_null_ftav;
3414 loop_prep_type_param += LoadLocal(info.vars->function_type_args);
3415 loop_prep_type_param += LoadLocal(info.vars->current_param_index);
3416 loop_prep_type_param += LoadLocal(info.num_parent_type_args);
3417 loop_prep_type_param += SmiBinaryOp(Token::kADD, /*is_truncating=*/true);
3418 loop_prep_type_param += LoadIndexed(
3419 kTypeArgumentsCid, /*index_scale*/ compiler::target::kCompressedWordSize);
3420 loop_prep_type_param += StoreLocal(info.vars->current_type_param);
3421 loop_prep_type_param += Drop();
3422 loop_prep_type_param += Goto(call);
3423
3424 Fragment loop_dynamic_type_param(dynamic_type_param);
3425 // If function_type_args is null, the instantiated type param is dynamic.
3426 loop_dynamic_type_param += Constant(Type::ZoneHandle(Type::DynamicType()));
3427 loop_dynamic_type_param += StoreLocal(info.vars->current_type_param);
3428 loop_dynamic_type_param += Drop();
3429 loop_dynamic_type_param += Goto(call);
3430
3431 Fragment loop_call_check(call);
3432 // Load instantiators.
3433 loop_call_check += LoadLocal(info.instantiator_type_args);
3434 loop_call_check += LoadLocal(info.vars->function_type_args);
3435 // Load instantiated type parameter.
3436 loop_call_check += LoadLocal(info.vars->current_type_param);
3437 // Load bound from type parameters.
3438 loop_call_check += LoadLocal(info.type_parameters);
3439 loop_call_check += LoadNativeField(Slot::TypeParameters_bounds());
3440 loop_call_check += LoadLocal(info.vars->current_param_index);
3441 loop_call_check += LoadIndexed(
3442 kTypeArgumentsCid, /*index_scale*/ compiler::target::kCompressedWordSize);
3443 // Load (canonicalized) name of type parameter in signature.
3444 loop_call_check += LoadLocal(info.type_parameters);
3445 loop_call_check += LoadNativeField(Slot::TypeParameters_names());
3446 loop_call_check += LoadLocal(info.vars->current_param_index);
3447 loop_call_check += LoadIndexed(
3448 kArrayCid, /*index_scale*/ compiler::target::kCompressedWordSize);
3449 // Assert that the passed-in type argument is consistent with the bound of
3450 // the corresponding type parameter.
3451 loop_call_check += AssertSubtype(TokenPosition::kNoSource);
3452 loop_call_check += Goto(next);
3453
3454 Fragment loop_incr(next);
3455 loop_incr += LoadLocal(info.vars->current_param_index);
3456 loop_incr += IntConstant(1);
3457 loop_incr += SmiBinaryOp(Token::kADD, /*is_truncating=*/true);
3458 loop_incr += StoreLocal(info.vars->current_param_index);
3459 loop_incr += Drop();
3460 loop_incr += Goto(loop);
3461
3462 return Fragment(loop_init.entry, done);
3463}
3464
3465Fragment FlowGraphBuilder::BuildClosureCallArgumentTypeCheck(
3466 const ClosureCallInfo& info,
3467 LocalVariable* param_index,
3468 intptr_t arg_index,
3469 const String& arg_name) {
3470 Fragment instructions;
3471
3472 // Load value.
3473 instructions += LoadLocal(parsed_function_->ParameterVariable(arg_index));
3474 // Load destination type.
3475 instructions += LoadLocal(info.parameter_types);
3476 instructions += LoadLocal(param_index);
3477 instructions += LoadIndexed(
3478 kArrayCid, /*index_scale*/ compiler::target::kCompressedWordSize);
3479 // Load instantiator type arguments.
3480 instructions += LoadLocal(info.instantiator_type_args);
3481 // Load the full set of function type arguments.
3482 instructions += LoadLocal(info.vars->function_type_args);
3483 // Check that the value has the right type.
3484 instructions += AssertAssignable(TokenPosition::kNoSource, arg_name,
3485 AssertAssignableInstr::kParameterCheck);
3486 // Make sure to store the result to keep data dependencies accurate.
3487 instructions += StoreLocal(parsed_function_->ParameterVariable(arg_index));
3488 instructions += Drop();
3489
3490 return instructions;
3491}
3492
3493Fragment FlowGraphBuilder::BuildClosureCallArgumentTypeChecks(
3494 const ClosureCallInfo& info) {
3495 Fragment instructions;
3496
3497 // Only check explicit arguments (i.e., skip the receiver), as the receiver
3498 // is always assignable to its type (stored as dynamic).
3499 for (intptr_t i = 1; i < info.descriptor.PositionalCount(); i++) {
3500 instructions += IntConstant(i);
3501 LocalVariable* param_index = MakeTemporary("param_index");
3502 // We don't have a compile-time name, so this symbol signals the runtime
3503 // that it should recreate the type check using info from the stack.
3504 instructions += BuildClosureCallArgumentTypeCheck(
3505 info, param_index, i, Symbols::dynamic_assert_assignable_stc_check());
3506 instructions += DropTemporary(&param_index);
3507 }
3508
3509 for (intptr_t i = 0; i < info.descriptor.NamedCount(); i++) {
3510 const intptr_t arg_index = info.descriptor.PositionAt(i);
3511 auto const param_index = info.vars->named_argument_parameter_indices.At(i);
3512 // We have a compile-time name available, but we still want the runtime to
3513 // detect that the generated AssertAssignable instruction is dynamic.
3514 instructions += BuildClosureCallArgumentTypeCheck(
3515 info, param_index, arg_index,
3516 Symbols::dynamic_assert_assignable_stc_check());
3517 }
3518
3519 return instructions;
3520}
3521
3522Fragment FlowGraphBuilder::BuildDynamicClosureCallChecks(
3523 LocalVariable* closure) {
3524 ClosureCallInfo info(closure, BuildThrowNoSuchMethod(),
3526 parsed_function_->dynamic_closure_call_vars());
3527
3528 Fragment body;
3529 body += LoadLocal(info.closure);
3530 body += LoadNativeField(Slot::Closure_function());
3531 body += LoadNativeField(Slot::Function_signature());
3532 info.signature = MakeTemporary("signature");
3533
3534 body += LoadLocal(info.signature);
3535 body += BuildExtractUnboxedSlotBitFieldIntoSmi<
3537 Slot::FunctionType_packed_parameter_counts());
3538 info.num_fixed_params = MakeTemporary("num_fixed_params");
3539
3540 body += LoadLocal(info.signature);
3541 body += BuildExtractUnboxedSlotBitFieldIntoSmi<
3543 Slot::FunctionType_packed_parameter_counts());
3544 info.num_opt_params = MakeTemporary("num_opt_params");
3545
3546 body += LoadLocal(info.num_fixed_params);
3547 body += LoadLocal(info.num_opt_params);
3548 body += SmiBinaryOp(Token::kADD);
3549 info.num_max_params = MakeTemporary("num_max_params");
3550
3551 body += LoadLocal(info.signature);
3552 body += BuildExtractUnboxedSlotBitFieldIntoSmi<
3554 Slot::FunctionType_packed_parameter_counts());
3555
3556 body += IntConstant(0);
3557 body += StrictCompare(Token::kNE_STRICT);
3558 info.has_named_params = MakeTemporary("has_named_params");
3559
3560 body += LoadLocal(info.signature);
3561 body += LoadNativeField(Slot::FunctionType_named_parameter_names());
3562 info.named_parameter_names = MakeTemporary("named_parameter_names");
3563
3564 body += LoadLocal(info.signature);
3565 body += LoadNativeField(Slot::FunctionType_parameter_types());
3566 info.parameter_types = MakeTemporary("parameter_types");
3567
3568 body += LoadLocal(info.signature);
3569 body += LoadNativeField(Slot::FunctionType_type_parameters());
3570 info.type_parameters = MakeTemporary("type_parameters");
3571
3572 body += LoadLocal(info.closure);
3573 body += LoadNativeField(Slot::Closure_instantiator_type_arguments());
3574 info.instantiator_type_args = MakeTemporary("instantiator_type_args");
3575
3576 body += LoadLocal(info.closure);
3577 body += LoadNativeField(Slot::Closure_function_type_arguments());
3578 info.parent_function_type_args = MakeTemporary("parent_function_type_args");
3579
3580 // At this point, all the read-only temporaries stored in the ClosureCallInfo
3581 // should be either loaded or still nullptr, if not needed for this function.
3582 // Now we check that the arguments to the closure call have the right shape.
3583 body += BuildClosureCallArgumentsValidCheck(info);
3584
3585 // If the closure function is not generic, there are no local function type
3586 // args. Thus, use whatever was stored for the parent function type arguments,
3587 // which has already been checked against any parent type parameter bounds.
3588 Fragment not_generic;
3589 not_generic += LoadLocal(info.parent_function_type_args);
3590 not_generic += StoreLocal(info.vars->function_type_args);
3591 not_generic += Drop();
3592
3593 // If the closure function is generic, then we first need to calculate the
3594 // full set of function type arguments, then check the local function type
3595 // arguments against the closure function's type parameter bounds.
3596 Fragment generic;
3597 // Calculate the number of parent type arguments and store them in
3598 // info.num_parent_type_args.
3599 generic += LoadLocal(info.signature);
3600 generic += BuildExtractUnboxedSlotBitFieldIntoSmi<
3602 Slot::FunctionType_packed_type_parameter_counts());
3603 info.num_parent_type_args = MakeTemporary("num_parent_type_args");
3604
3605 // Hoist number of type parameters.
3606 generic += LoadLocal(info.signature);
3607 generic += BuildExtractUnboxedSlotBitFieldIntoSmi<
3609 Slot::FunctionType_packed_type_parameter_counts());
3610 info.num_type_parameters = MakeTemporary("num_type_parameters");
3611
3612 // Hoist type parameter flags.
3613 generic += LoadLocal(info.type_parameters);
3614 generic += LoadNativeField(Slot::TypeParameters_flags());
3615 info.type_parameter_flags = MakeTemporary("type_parameter_flags");
3616
3617 // Calculate the local function type arguments and store them in
3618 // info.vars->function_type_args.
3619 generic += BuildClosureCallDefaultTypeHandling(info);
3620
3621 // Load the local function type args.
3622 generic += LoadLocal(info.vars->function_type_args);
3623 // Load the parent function type args.
3624 generic += LoadLocal(info.parent_function_type_args);
3625 // Load the number of parent type parameters.
3626 generic += LoadLocal(info.num_parent_type_args);
3627 // Load the number of total type parameters.
3628 generic += LoadLocal(info.num_parent_type_args);
3629 generic += LoadLocal(info.num_type_parameters);
3630 generic += SmiBinaryOp(Token::kADD, /*is_truncating=*/true);
3631
3632 // Call the static function for prepending type arguments.
3633 generic += StaticCall(TokenPosition::kNoSource,
3634 PrependTypeArgumentsFunction(), 4, ICData::kStatic);
3635 generic += StoreLocal(info.vars->function_type_args);
3636 generic += Drop();
3637
3638 // Now that we have the full set of function type arguments, check them
3639 // against the type parameter bounds. However, if the local function type
3640 // arguments are delayed type arguments, they have already been checked by
3641 // the type system and need not be checked again at the call site.
3642 auto const check_bounds = BuildClosureCallTypeArgumentsTypeCheck(info);
3643 if (FLAG_eliminate_type_checks) {
3644 generic += TestDelayedTypeArgs(info.closure, /*present=*/{},
3645 /*absent=*/check_bounds);
3646 } else {
3647 generic += check_bounds;
3648 }
3649 generic += DropTemporary(&info.type_parameter_flags);
3650 generic += DropTemporary(&info.num_type_parameters);
3651 generic += DropTemporary(&info.num_parent_type_args);
3652
3653 // Call the appropriate fragment for setting up the function type arguments
3654 // and performing any needed type argument checking.
3655 body += TestClosureFunctionGeneric(info, generic, not_generic);
3656
3657 // Check that the values provided as arguments are assignable to the types
3658 // of the corresponding closure function parameters.
3659 body += BuildClosureCallArgumentTypeChecks(info);
3660
3661 // Drop all the read-only temporaries at the end of the fragment.
3662 body += DropTemporary(&info.parent_function_type_args);
3663 body += DropTemporary(&info.instantiator_type_args);
3664 body += DropTemporary(&info.type_parameters);
3665 body += DropTemporary(&info.parameter_types);
3666 body += DropTemporary(&info.named_parameter_names);
3667 body += DropTemporary(&info.has_named_params);
3668 body += DropTemporary(&info.num_max_params);
3669 body += DropTemporary(&info.num_opt_params);
3670 body += DropTemporary(&info.num_fixed_params);
3671 body += DropTemporary(&info.signature);
3672
3673 return body;
3674}
3675
3676FlowGraph* FlowGraphBuilder::BuildGraphOfInvokeFieldDispatcher(
3677 const Function& function) {
3678 const ArgumentsDescriptor descriptor(saved_args_desc_array());
3679 // Find the name of the field we should dispatch to.
3680 const Class& owner = Class::Handle(Z, function.Owner());
3681 ASSERT(!owner.IsNull());
3682 auto& field_name = String::Handle(Z, function.name());
3683 // If the field name has a dyn: tag, then remove it. We don't add dynamic
3684 // invocation forwarders for field getters used for invoking, we just use
3685 // the tag in the name of the invoke field dispatcher to detect dynamic calls.
3686 const bool is_dynamic_call =
3688 if (is_dynamic_call) {
3689 field_name = Function::DemangleDynamicInvocationForwarderName(field_name);
3690 }
3691 const String& getter_name = String::ZoneHandle(
3692 Z, Symbols::New(thread_,
3693 String::Handle(Z, Field::GetterSymbol(field_name))));
3694
3695 // Determine if this is `class Closure { get call => this; }`
3696 const Class& closure_class =
3697 Class::Handle(Z, IG->object_store()->closure_class());
3698 const bool is_closure_call = (owner.ptr() == closure_class.ptr()) &&
3699 field_name.Equals(Symbols::call());
3700
3701 graph_entry_ =
3702 new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
3703
3704 auto normal_entry = BuildFunctionEntry(graph_entry_);
3705 graph_entry_->set_normal_entry(normal_entry);
3706
3707 PrologueInfo prologue_info(-1, -1);
3708 BlockEntryInstr* instruction_cursor =
3709 BuildPrologue(normal_entry, &prologue_info);
3710
3711 Fragment body(instruction_cursor);
3712 body += CheckStackOverflowInPrologue(function.token_pos());
3713
3714 // Build any dynamic closure call checks before pushing arguments to the
3715 // final call on the stack to make debugging easier.
3716 LocalVariable* closure = nullptr;
3717 if (is_closure_call) {
3718 closure = parsed_function_->ParameterVariable(0);
3719 if (is_dynamic_call) {
3720 // The whole reason for making this invoke field dispatcher is that
3721 // this closure call needs checking, so we shouldn't inline a call to an
3722 // unchecked entry that can't tail call NSM.
3724 "kernel::FlowGraphBuilder::BuildGraphOfInvokeFieldDispatcher");
3725
3726 body += BuildDynamicClosureCallChecks(closure);
3727 }
3728 }
3729
3730 if (descriptor.TypeArgsLen() > 0) {
3731 LocalVariable* type_args = parsed_function_->function_type_arguments();
3732 ASSERT(type_args != nullptr);
3733 body += LoadLocal(type_args);
3734 }
3735
3736 if (is_closure_call) {
3737 // The closure itself is the first argument.
3738 body += LoadLocal(closure);
3739 } else {
3740 // Invoke the getter to get the field value.
3741 body += LoadLocal(parsed_function_->ParameterVariable(0));
3742 const intptr_t kTypeArgsLen = 0;
3743 const intptr_t kNumArgsChecked = 1;
3744 body += InstanceCall(TokenPosition::kMinSource, getter_name, Token::kGET,
3745 kTypeArgsLen, 1, Array::null_array(), kNumArgsChecked);
3746 }
3747
3748 // Push all arguments onto the stack.
3749 for (intptr_t pos = 1; pos < descriptor.Count(); pos++) {
3750 body += LoadLocal(parsed_function_->ParameterVariable(pos));
3751 }
3752
3753 // Construct argument names array if necessary.
3754 const Array* argument_names = &Object::null_array();
3755 if (descriptor.NamedCount() > 0) {
3756 const auto& array_handle =
3757 Array::ZoneHandle(Z, Array::New(descriptor.NamedCount(), Heap::kNew));
3758 String& string_handle = String::Handle(Z);
3759 for (intptr_t i = 0; i < descriptor.NamedCount(); ++i) {
3760 const intptr_t named_arg_index =
3761 descriptor.PositionAt(i) - descriptor.PositionalCount();
3762 string_handle = descriptor.NameAt(i);
3763 array_handle.SetAt(named_arg_index, string_handle);
3764 }
3765 argument_names = &array_handle;
3766 }
3767
3768 if (is_closure_call) {
3769 body += LoadLocal(closure);
3770 if (!FLAG_precompiled_mode) {
3771 // Lookup the function in the closure.
3772 body += LoadNativeField(Slot::Closure_function());
3773 }
3774 body += ClosureCall(Function::null_function(), TokenPosition::kNoSource,
3775 descriptor.TypeArgsLen(), descriptor.Count(),
3776 *argument_names);
3777 } else {
3778 const intptr_t kNumArgsChecked = 1;
3779 body +=
3780 InstanceCall(TokenPosition::kMinSource,
3781 is_dynamic_call ? Symbols::DynamicCall() : Symbols::call(),
3782 Token::kILLEGAL, descriptor.TypeArgsLen(),
3783 descriptor.Count(), *argument_names, kNumArgsChecked);
3784 }
3785
3786 body += Return(TokenPosition::kNoSource);
3787
3788 return new (Z)
3789 FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
3790 prologue_info, FlowGraph::CompilationModeFrom(optimizing_));
3791}
3792
3793FlowGraph* FlowGraphBuilder::BuildGraphOfNoSuchMethodForwarder(
3794 const Function& function,
3795 bool is_implicit_closure_function,
3796 bool throw_no_such_method_error) {
3797 graph_entry_ =
3798 new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
3799
3800 auto normal_entry = BuildFunctionEntry(graph_entry_);
3801 graph_entry_->set_normal_entry(normal_entry);
3802
3803 PrologueInfo prologue_info(-1, -1);
3804 BlockEntryInstr* instruction_cursor =
3805 BuildPrologue(normal_entry, &prologue_info);
3806
3807 Fragment body(instruction_cursor);
3808 body += CheckStackOverflowInPrologue(function.token_pos());
3809
3810 // If we are inside the tearoff wrapper function (implicit closure), we need
3811 // to extract the receiver from the context. We just replace it directly on
3812 // the stack to simplify the rest of the code.
3813 if (is_implicit_closure_function && !function.is_static()) {
3814 if (parsed_function_->has_arg_desc_var()) {
3815 body += LoadArgDescriptor();
3816 body += LoadNativeField(Slot::ArgumentsDescriptor_size());
3817 } else {
3818 ASSERT(function.NumOptionalParameters() == 0);
3819 body += IntConstant(function.NumParameters());
3820 }
3821 body += LoadLocal(parsed_function_->current_context_var());
3822 body += StoreFpRelativeSlot(
3823 kWordSize * compiler::target::frame_layout.param_end_from_fp);
3824 }
3825
3826 if (function.NeedsTypeArgumentTypeChecks()) {
3827 BuildTypeArgumentTypeChecks(TypeChecksToBuild::kCheckAllTypeParameterBounds,
3828 &body);
3829 }
3830
3831 if (function.NeedsArgumentTypeChecks()) {
3832 BuildArgumentTypeChecks(&body, &body, nullptr);
3833 }
3834
3835 body += MakeTemp();
3836 LocalVariable* result = MakeTemporary();
3837
3838 // Do "++argument_count" if any type arguments were passed.
3839 LocalVariable* argument_count_var = parsed_function_->expression_temp_var();
3840 body += IntConstant(0);
3841 body += StoreLocal(TokenPosition::kNoSource, argument_count_var);
3842 body += Drop();
3843 if (function.IsGeneric()) {
3844 Fragment then;
3845 Fragment otherwise;
3846 otherwise += IntConstant(1);
3847 otherwise += StoreLocal(TokenPosition::kNoSource, argument_count_var);
3848 otherwise += Drop();
3849 body += TestAnyTypeArgs(then, otherwise);
3850 }
3851
3852 if (function.HasOptionalParameters()) {
3853 body += LoadArgDescriptor();
3854 body += LoadNativeField(Slot::ArgumentsDescriptor_size());
3855 } else {
3856 body += IntConstant(function.NumParameters());
3857 }
3858 body += LoadLocal(argument_count_var);
3859 body += SmiBinaryOp(Token::kADD, /* truncate= */ true);
3860 LocalVariable* argument_count = MakeTemporary();
3861
3862 // We are generating code like the following:
3863 //
3864 // var arguments = new Array<dynamic>(argument_count);
3865 //
3866 // int i = 0;
3867 // if (any type arguments are passed) {
3868 // arguments[0] = function_type_arguments;
3869 // ++i;
3870 // }
3871 //
3872 // for (; i < argument_count; ++i) {
3873 // arguments[i] = LoadFpRelativeSlot(
3874 // kWordSize * (frame_layout.param_end_from_fp + argument_count - i));
3875 // }
3877 body += LoadLocal(argument_count);
3878 body += CreateArray();
3879 LocalVariable* arguments = MakeTemporary();
3880
3881 {
3882 // int i = 0
3883 LocalVariable* index = parsed_function_->expression_temp_var();
3884 body += IntConstant(0);
3885 body += StoreLocal(TokenPosition::kNoSource, index);
3886 body += Drop();
3887
3888 // if (any type arguments are passed) {
3889 // arguments[0] = function_type_arguments;
3890 // i = 1;
3891 // }
3892 if (function.IsGeneric()) {
3893 Fragment store;
3894 store += LoadLocal(arguments);
3895 store += IntConstant(0);
3896 store += LoadFunctionTypeArguments();
3897 store += StoreIndexed(kArrayCid);
3898 store += IntConstant(1);
3899 store += StoreLocal(TokenPosition::kNoSource, index);
3900 store += Drop();
3901 body += TestAnyTypeArgs(store, Fragment());
3902 }
3903
3904 TargetEntryInstr* body_entry;
3905 TargetEntryInstr* loop_exit;
3906
3907 Fragment condition;
3908 // i < argument_count
3909 condition += LoadLocal(index);
3910 condition += LoadLocal(argument_count);
3911 condition += SmiRelationalOp(Token::kLT);
3912 condition += BranchIfTrue(&body_entry, &loop_exit, /*negate=*/false);
3913
3914 Fragment loop_body(body_entry);
3915
3916 // arguments[i] = LoadFpRelativeSlot(
3917 // kWordSize * (frame_layout.param_end_from_fp + argument_count - i));
3918 loop_body += LoadLocal(arguments);
3919 loop_body += LoadLocal(index);
3920 loop_body += LoadLocal(argument_count);
3921 loop_body += LoadLocal(index);
3922 loop_body += SmiBinaryOp(Token::kSUB, /*truncate=*/true);
3923 loop_body +=
3924 LoadFpRelativeSlot(compiler::target::kWordSize *
3925 compiler::target::frame_layout.param_end_from_fp,
3927 loop_body += StoreIndexed(kArrayCid);
3928
3929 // ++i
3930 loop_body += LoadLocal(index);
3931 loop_body += IntConstant(1);
3932 loop_body += SmiBinaryOp(Token::kADD, /*truncate=*/true);
3933 loop_body += StoreLocal(TokenPosition::kNoSource, index);
3934 loop_body += Drop();
3935
3936 JoinEntryInstr* join = BuildJoinEntry();
3937 loop_body += Goto(join);
3938
3939 Fragment loop(join);
3940 loop += condition;
3941
3942 Instruction* entry =
3943 new (Z) GotoInstr(join, CompilerState::Current().GetNextDeoptId());
3944 body += Fragment(entry, loop_exit);
3945 }
3946
3947 // Load receiver.
3948 if (is_implicit_closure_function) {
3949 if (throw_no_such_method_error) {
3950 const Function& parent =
3951 Function::ZoneHandle(Z, function.parent_function());
3952 const Class& owner = Class::ZoneHandle(Z, parent.Owner());
3953 AbstractType& type = AbstractType::ZoneHandle(Z);
3954 type = Type::New(owner, Object::null_type_arguments());
3956 body += Constant(type);
3957 } else {
3958 body += LoadLocal(parsed_function_->current_context_var());
3959 }
3960 } else {
3961 body += LoadLocal(parsed_function_->ParameterVariable(0));
3962 }
3963
3964 body += Constant(String::ZoneHandle(Z, function.name()));
3965
3966 if (!parsed_function_->has_arg_desc_var()) {
3967 // If there is no variable for the arguments descriptor (this function's
3968 // signature doesn't require it), then we need to create one.
3969 Array& args_desc = Array::ZoneHandle(
3970 Z, ArgumentsDescriptor::NewBoxed(0, function.NumParameters()));
3971 body += Constant(args_desc);
3972 } else {
3973 body += LoadArgDescriptor();
3974 }
3975
3976 body += LoadLocal(arguments);
3977
3978 if (throw_no_such_method_error) {
3979 const Function& parent =
3980 Function::ZoneHandle(Z, function.parent_function());
3981 const Class& owner = Class::ZoneHandle(Z, parent.Owner());
3982 InvocationMirror::Level im_level = owner.IsTopLevel()
3985 InvocationMirror::Kind im_kind;
3986 if (function.IsImplicitGetterFunction() || function.IsGetterFunction()) {
3987 im_kind = InvocationMirror::kGetter;
3988 } else if (function.IsImplicitSetterFunction() ||
3989 function.IsSetterFunction()) {
3990 im_kind = InvocationMirror::kSetter;
3991 } else {
3992 im_kind = InvocationMirror::kMethod;
3993 }
3994 body += IntConstant(InvocationMirror::EncodeType(im_level, im_kind));
3995 } else {
3996 body += NullConstant();
3997 }
3998
3999 // Push the number of delayed type arguments.
4000 if (function.IsClosureFunction()) {
4001 LocalVariable* closure = parsed_function_->ParameterVariable(0);
4002 Fragment then;
4003 then += IntConstant(function.NumTypeParameters());
4004 then += StoreLocal(TokenPosition::kNoSource, argument_count_var);
4005 then += Drop();
4006 Fragment otherwise;
4007 otherwise += IntConstant(0);
4008 otherwise += StoreLocal(TokenPosition::kNoSource, argument_count_var);
4009 otherwise += Drop();
4010 body += TestDelayedTypeArgs(closure, then, otherwise);
4011 body += LoadLocal(argument_count_var);
4012 } else {
4013 body += IntConstant(0);
4014 }
4015
4016 const Class& mirror_class =
4017 Class::Handle(Z, Library::LookupCoreClass(Symbols::InvocationMirror()));
4018 ASSERT(!mirror_class.IsNull());
4019 const auto& error = mirror_class.EnsureIsFinalized(H.thread());
4020 ASSERT(error == Error::null());
4021 const Function& allocation_function = Function::ZoneHandle(
4022 Z, mirror_class.LookupStaticFunction(Library::PrivateCoreLibName(
4023 Symbols::AllocateInvocationMirrorForClosure())));
4024 ASSERT(!allocation_function.IsNull());
4025 body += StaticCall(TokenPosition::kMinSource, allocation_function,
4026 /* argument_count = */ 5, ICData::kStatic);
4027
4028 if (throw_no_such_method_error) {
4029 const Class& klass = Class::ZoneHandle(
4030 Z, Library::LookupCoreClass(Symbols::NoSuchMethodError()));
4031 ASSERT(!klass.IsNull());
4032 const auto& error = klass.EnsureIsFinalized(H.thread());
4033 ASSERT(error == Error::null());
4034 const Function& throw_function = Function::ZoneHandle(
4035 Z,
4036 klass.LookupStaticFunctionAllowPrivate(Symbols::ThrowNewInvocation()));
4037 ASSERT(!throw_function.IsNull());
4038 body += StaticCall(TokenPosition::kNoSource, throw_function, 2,
4039 ICData::kStatic);
4040 } else {
4041 body += InstanceCall(
4042 TokenPosition::kNoSource, Symbols::NoSuchMethod(), Token::kILLEGAL,
4043 /*type_args_len=*/0, /*argument_count=*/2, Array::null_array(),
4044 /*checked_argument_count=*/1);
4045 }
4046 body += StoreLocal(TokenPosition::kNoSource, result);
4047 body += Drop();
4048
4049 body += Drop(); // arguments
4050 body += Drop(); // argument count
4051
4052 AbstractType& return_type = AbstractType::Handle(function.result_type());
4053 if (!return_type.IsTopTypeForSubtyping()) {
4054 body += AssertAssignableLoadTypeArguments(TokenPosition::kNoSource,
4055 return_type, Symbols::Empty());
4056 }
4057 body += Return(TokenPosition::kNoSource);
4058
4059 return new (Z)
4060 FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
4061 prologue_info, FlowGraph::CompilationModeFrom(optimizing_));
4062}
4063
4064Fragment FlowGraphBuilder::BuildDefaultTypeHandling(const Function& function) {
4065 Fragment keep_same, use_defaults;
4066
4067 if (!function.IsGeneric()) return keep_same;
4068
4069 const auto& default_types =
4070 TypeArguments::ZoneHandle(Z, function.DefaultTypeArguments(Z));
4071
4072 if (default_types.IsNull()) return keep_same;
4073
4074 if (function.IsClosureFunction()) {
4075 // Note that we can't use TranslateInstantiatedTypeArguments here as
4076 // that uses LoadInstantiatorTypeArguments() and LoadFunctionTypeArguments()
4077 // for the instantiator and function type argument vectors, but here we
4078 // load the instantiator and parent function type argument vectors from
4079 // the closure object instead.
4080 LocalVariable* const closure = parsed_function_->ParameterVariable(0);
4081 auto const mode = function.default_type_arguments_instantiation_mode();
4082
4083 switch (mode) {
4085 use_defaults += Constant(default_types);
4086 break;
4088 use_defaults += LoadLocal(closure);
4089 use_defaults +=
4090 LoadNativeField(Slot::Closure_instantiator_type_arguments());
4091 break;
4093 use_defaults += LoadLocal(closure);
4094 use_defaults +=
4095 LoadNativeField(Slot::Closure_function_type_arguments());
4096 break;
4098 // Only load the instantiator or function type arguments from the
4099 // closure if they're needed for instantiation.
4100 if (!default_types.IsInstantiated(kCurrentClass)) {
4101 use_defaults += LoadLocal(closure);
4102 use_defaults +=
4103 LoadNativeField(Slot::Closure_instantiator_type_arguments());
4104 } else {
4105 use_defaults += NullConstant();
4106 }
4107 if (!default_types.IsInstantiated(kFunctions)) {
4108 use_defaults += LoadLocal(closure);
4109 use_defaults +=
4110 LoadNativeField(Slot::Closure_function_type_arguments());
4111 } else {
4112 use_defaults += NullConstant();
4113 }
4114 use_defaults += InstantiateTypeArguments(default_types);
4115 break;
4116 }
4117 } else {
4118 use_defaults += TranslateInstantiatedTypeArguments(default_types);
4119 }
4120 use_defaults += StoreLocal(parsed_function_->function_type_arguments());
4121 use_defaults += Drop();
4122
4123 return TestAnyTypeArgs(keep_same, use_defaults);
4124}
4125
4126FunctionEntryInstr* FlowGraphBuilder::BuildSharedUncheckedEntryPoint(
4127 Fragment shared_prologue_linked_in,
4128 Fragment skippable_checks,
4129 Fragment redefinitions_if_skipped,
4130 Fragment body) {
4131 ASSERT(shared_prologue_linked_in.entry == graph_entry_->normal_entry());
4132 ASSERT(parsed_function_->has_entry_points_temp_var());
4133 Instruction* prologue_start = shared_prologue_linked_in.entry->next();
4134
4135 auto* join_entry = BuildJoinEntry();
4136
4137 Fragment normal_entry(shared_prologue_linked_in.entry);
4138 normal_entry +=
4139 IntConstant(static_cast<intptr_t>(UncheckedEntryPointStyle::kNone));
4140 normal_entry += StoreLocal(TokenPosition::kNoSource,
4141 parsed_function_->entry_points_temp_var());
4142 normal_entry += Drop();
4143 normal_entry += Goto(join_entry);
4144
4145 auto* extra_target_entry = BuildFunctionEntry(graph_entry_);
4146 Fragment extra_entry(extra_target_entry);
4147 extra_entry += IntConstant(
4148 static_cast<intptr_t>(UncheckedEntryPointStyle::kSharedWithVariable));
4149 extra_entry += StoreLocal(TokenPosition::kNoSource,
4150 parsed_function_->entry_points_temp_var());
4151 extra_entry += Drop();
4152 extra_entry += Goto(join_entry);
4153
4154 if (prologue_start != nullptr) {
4155 join_entry->LinkTo(prologue_start);
4156 } else {
4157 // Prologue is empty.
4158 shared_prologue_linked_in.current = join_entry;
4159 }
4160
4161 TargetEntryInstr* do_checks;
4162 TargetEntryInstr* skip_checks;
4163 shared_prologue_linked_in +=
4164 LoadLocal(parsed_function_->entry_points_temp_var());
4165 shared_prologue_linked_in += BuildEntryPointsIntrospection();
4166 shared_prologue_linked_in +=
4167 LoadLocal(parsed_function_->entry_points_temp_var());
4168 shared_prologue_linked_in += IntConstant(
4169 static_cast<intptr_t>(UncheckedEntryPointStyle::kSharedWithVariable));
4170 shared_prologue_linked_in +=
4171 BranchIfEqual(&skip_checks, &do_checks, /*negate=*/false);
4172
4173 JoinEntryInstr* rest_entry = BuildJoinEntry();
4174
4175 Fragment(do_checks) + skippable_checks + Goto(rest_entry);
4176 Fragment(skip_checks) + redefinitions_if_skipped + Goto(rest_entry);
4177 Fragment(rest_entry) + body;
4178
4179 return extra_target_entry;
4180}
4181
4182FunctionEntryInstr* FlowGraphBuilder::BuildSeparateUncheckedEntryPoint(
4183 BlockEntryInstr* normal_entry,
4184 Fragment normal_prologue,
4185 Fragment extra_prologue,
4186 Fragment shared_prologue,
4187 Fragment body) {
4188 auto* join_entry = BuildJoinEntry();
4189 auto* extra_entry = BuildFunctionEntry(graph_entry_);
4190
4191 Fragment normal(normal_entry);
4192 normal += IntConstant(static_cast<intptr_t>(UncheckedEntryPointStyle::kNone));
4194 normal += normal_prologue;
4195 normal += Goto(join_entry);
4196
4197 Fragment extra(extra_entry);
4198 extra +=
4199 IntConstant(static_cast<intptr_t>(UncheckedEntryPointStyle::kSeparate));
4201 extra += extra_prologue;
4202 extra += Goto(join_entry);
4203
4204 Fragment(join_entry) + shared_prologue + body;
4205 return extra_entry;
4206}
4207
4208FlowGraph* FlowGraphBuilder::BuildGraphOfImplicitClosureFunction(
4209 const Function& function) {
4210 const Function& parent = Function::ZoneHandle(Z, function.parent_function());
4211 Function& target = Function::ZoneHandle(Z, function.ImplicitClosureTarget(Z));
4212
4213 if (target.IsNull() ||
4214 (parent.num_fixed_parameters() != target.num_fixed_parameters())) {
4215 return BuildGraphOfNoSuchMethodForwarder(function, true,
4216 parent.is_static());
4217 }
4218
4219 graph_entry_ =
4220 new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
4221
4222 auto normal_entry = BuildFunctionEntry(graph_entry_);
4223 graph_entry_->set_normal_entry(normal_entry);
4224
4225 PrologueInfo prologue_info(-1, -1);
4226 BlockEntryInstr* instruction_cursor =
4227 BuildPrologue(normal_entry, &prologue_info);
4228
4229 Fragment closure(instruction_cursor);
4230 closure += CheckStackOverflowInPrologue(function.token_pos());
4231 closure += BuildDefaultTypeHandling(function);
4232
4233 // For implicit closure functions, any non-covariant checks are either
4234 // performed by the type system or a dynamic invocation layer (dynamic closure
4235 // call dispatcher, mirror, etc.). Static targets never have covariant
4236 // arguments, and for non-static targets, they already perform the covariant
4237 // checks internally. Thus, no checks are needed and we just need to invoke
4238 // the target with the right receiver (unless static).
4239 //
4240 // TODO(dartbug.com/44195): Consider replacing the argument pushes + static
4241 // call with stack manipulation and a tail call instead.
4242
4243 intptr_t type_args_len = 0;
4244 if (function.IsGeneric()) {
4245 if (target.IsConstructor()) {
4246 const auto& result_type = AbstractType::Handle(Z, function.result_type());
4247 ASSERT(result_type.IsFinalized());
4248 // Instantiate a flattened type arguments vector which
4249 // includes type arguments corresponding to superclasses.
4250 // TranslateInstantiatedTypeArguments is smart enough to
4251 // avoid instantiation and reuse passed function type arguments
4252 // if there are no extra type arguments in the flattened vector.
4253 const auto& instantiated_type_arguments = TypeArguments::ZoneHandle(
4254 Z, Type::Cast(result_type).GetInstanceTypeArguments(H.thread()));
4255 closure +=
4256 TranslateInstantiatedTypeArguments(instantiated_type_arguments);
4257 } else {
4258 type_args_len = function.NumTypeParameters();
4259 ASSERT(parsed_function_->function_type_arguments() != nullptr);
4260 closure += LoadLocal(parsed_function_->function_type_arguments());
4261 }
4262 } else if (target.IsFactory()) {
4263 // Factories always take an extra implicit argument for
4264 // type arguments even if their classes don't have type parameters.
4265 closure += NullConstant();
4266 }
4267
4268 // Push receiver.
4269 if (target.IsGenerativeConstructor()) {
4270 const Class& cls = Class::ZoneHandle(Z, target.Owner());
4271 if (cls.NumTypeArguments() > 0) {
4272 if (!function.IsGeneric()) {
4274 Z, cls.GetDeclarationInstanceTypeArguments()));
4275 }
4276 closure += AllocateObject(function.token_pos(), cls, 1);
4277 } else {
4278 ASSERT(!function.IsGeneric());
4279 closure += AllocateObject(function.token_pos(), cls, 0);
4280 }
4281 LocalVariable* receiver = MakeTemporary();
4282 closure += LoadLocal(receiver);
4283 } else if (!target.is_static()) {
4284 // The closure context is the receiver.
4285 closure += LoadLocal(parsed_function_->ParameterVariable(0));
4286 closure += LoadNativeField(Slot::Closure_context());
4287 }
4288
4289 closure += PushExplicitParameters(function);
4290
4291 // Forward parameters to the target.
4292 intptr_t argument_count = function.NumParameters() -
4293 function.NumImplicitParameters() +
4294 target.NumImplicitParameters();
4295 ASSERT(argument_count == target.NumParameters());
4296
4297 Array& argument_names =
4298 Array::ZoneHandle(Z, GetOptionalParameterNames(function));
4299
4300 closure += StaticCall(function.token_pos(), target, argument_count,
4301 argument_names, ICData::kNoRebind,
4302 /* result_type = */ nullptr, type_args_len);
4303
4304 if (target.IsGenerativeConstructor()) {
4305 // Drop result of constructor invocation, leave receiver
4306 // instance on the stack.
4307 closure += Drop();
4308 }
4309
4310 // Return the result.
4311 closure += Return(function.end_token_pos());
4312
4313 return new (Z)
4314 FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
4315 prologue_info, FlowGraph::CompilationModeFrom(optimizing_));
4316}
4317
4318FlowGraph* FlowGraphBuilder::BuildGraphOfFieldAccessor(
4319 const Function& function) {
4320 ASSERT(function.IsImplicitGetterOrSetter() ||
4321 function.IsDynamicInvocationForwarder());
4322
4323 // Instead of building a dynamic invocation forwarder that checks argument
4324 // type and then invokes original setter we simply generate the type check
4325 // and inlined field store. Scope builder takes care of setting correct
4326 // type check mode in this case.
4327 const auto& target = Function::Handle(
4328 Z, function.IsDynamicInvocationForwarder() ? function.ForwardingTarget()
4329 : function.ptr());
4330 ASSERT(target.IsImplicitGetterOrSetter());
4331
4332 const bool is_method = !function.IsStaticFunction();
4333 const bool is_setter = target.IsImplicitSetterFunction();
4334 const bool is_getter = target.IsImplicitGetterFunction() ||
4335 target.IsImplicitStaticGetterFunction();
4336 ASSERT(is_setter || is_getter);
4337
4338 const auto& field = Field::ZoneHandle(Z, target.accessor_field());
4339
4340 graph_entry_ =
4341 new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
4342
4343 auto normal_entry = BuildFunctionEntry(graph_entry_);
4344 graph_entry_->set_normal_entry(normal_entry);
4345
4346 Fragment body(normal_entry);
4347 if (is_setter) {
4348 auto const setter_value =
4349 parsed_function_->ParameterVariable(is_method ? 1 : 0);
4350 if (is_method) {
4351 body += LoadLocal(parsed_function_->ParameterVariable(0));
4352 }
4353 body += LoadLocal(setter_value);
4354
4355 // The dyn:* forwarder has to check the parameters that the
4356 // actual target will not check.
4357 // Though here we manually inline the target, so the dyn:* forwarder has to
4358 // check all parameters.
4359 const bool needs_type_check = function.IsDynamicInvocationForwarder() ||
4360 setter_value->needs_type_check();
4361 if (needs_type_check) {
4362 body += CheckAssignable(setter_value->static_type(), setter_value->name(),
4363 AssertAssignableInstr::kParameterCheck,
4364 field.token_pos());
4365 }
4366 if (field.is_late()) {
4367 if (is_method) {
4368 body += Drop();
4369 }
4370 body += Drop();
4371 body += StoreLateField(
4372 field, is_method ? parsed_function_->ParameterVariable(0) : nullptr,
4373 setter_value);
4374 } else {
4375 if (is_method) {
4377 } else {
4378 body += StoreStaticField(TokenPosition::kNoSource, field);
4379 }
4380 }
4381 body += NullConstant();
4382 } else {
4383 ASSERT(is_getter);
4384 if (is_method) {
4385 body += LoadLocal(parsed_function_->ParameterVariable(0));
4386 body += LoadField(
4387 field, /*calls_initializer=*/field.NeedsInitializationCheckOnLoad());
4388 } else if (field.is_const()) {
4389 const auto& value = Object::Handle(Z, field.StaticConstFieldValue());
4390 if (value.IsError()) {
4391 Report::LongJump(Error::Cast(value));
4392 }
4394 } else {
4395 // Static fields
4396 // - with trivial initializer
4397 // - without initializer if they are not late
4398 // are initialized eagerly and do not have implicit getters.
4399 // Static fields with non-trivial initializer need getter to perform
4400 // lazy initialization. Late fields without initializer need getter
4401 // to make sure they are already initialized.
4402 ASSERT(field.has_nontrivial_initializer() ||
4403 (field.is_late() && !field.has_initializer()));
4404 body += LoadStaticField(field, /*calls_initializer=*/true);
4405 }
4406
4407 if (is_method || !field.is_const()) {
4408#if defined(PRODUCT)
4409 RELEASE_ASSERT(!field.needs_load_guard());
4410#else
4411 // Always build fragment for load guard to maintain stable deopt_id
4412 // numbering, but link it into the graph only if field actually
4413 // needs load guard.
4414 Fragment load_guard = CheckAssignable(
4415 AbstractType::Handle(Z, field.type()), Symbols::FunctionResult());
4416 if (field.needs_load_guard()) {
4417 ASSERT(IG->HasAttemptedReload());
4418 body += load_guard;
4419 }
4420#endif
4421 }
4422 }
4423 body += Return(TokenPosition::kNoSource);
4424
4425 PrologueInfo prologue_info(-1, -1);
4426 return new (Z)
4427 FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
4428 prologue_info, FlowGraph::CompilationModeFrom(optimizing_));
4429}
4430
4431FlowGraph* FlowGraphBuilder::BuildGraphOfDynamicInvocationForwarder(
4432 const Function& function) {
4433 auto& name = String::Handle(Z, function.name());
4435 const auto& target = Function::ZoneHandle(Z, function.ForwardingTarget());
4436 ASSERT(!target.IsNull());
4437
4438 if (target.IsImplicitSetterFunction() || target.IsImplicitGetterFunction()) {
4439 return BuildGraphOfFieldAccessor(function);
4440 }
4441 if (target.IsMethodExtractor()) {
4442 return BuildGraphOfMethodExtractor(target);
4443 }
4445 return BuildGraphOfRecognizedMethod(function);
4446 }
4447
4448 graph_entry_ = new (Z) GraphEntryInstr(*parsed_function_, osr_id_);
4449
4450 auto normal_entry = BuildFunctionEntry(graph_entry_);
4451 graph_entry_->set_normal_entry(normal_entry);
4452
4453 PrologueInfo prologue_info(-1, -1);
4454 auto instruction_cursor = BuildPrologue(normal_entry, &prologue_info);
4455
4456 Fragment body;
4457 if (!function.is_native()) {
4458 body += CheckStackOverflowInPrologue(function.token_pos());
4459 }
4460
4461 ASSERT(parsed_function_->scope()->num_context_variables() == 0);
4462
4463 // Should never build a dynamic invocation forwarder for equality
4464 // operator.
4465 ASSERT(function.name() != Symbols::EqualOperator().ptr());
4466
4467 // Even if the caller did not pass argument vector we would still
4468 // call the target with instantiate-to-bounds type arguments.
4469 body += BuildDefaultTypeHandling(function);
4470
4471 // Build argument type checks that complement those that are emitted in the
4472 // target.
4473 BuildTypeArgumentTypeChecks(
4475 BuildArgumentTypeChecks(&body, &body, nullptr);
4476
4477 // Push all arguments and invoke the original method.
4478
4479 intptr_t type_args_len = 0;
4480 if (function.IsGeneric()) {
4481 type_args_len = function.NumTypeParameters();
4482 ASSERT(parsed_function_->function_type_arguments() != nullptr);
4483 body += LoadLocal(parsed_function_->function_type_arguments());
4484 }
4485
4486 // Push receiver.
4487 ASSERT(function.NumImplicitParameters() == 1);
4488 body += LoadLocal(parsed_function_->receiver_var());
4489 body += PushExplicitParameters(function, target);
4490
4491 const intptr_t argument_count = function.NumParameters();
4492 const auto& argument_names =
4493 Array::ZoneHandle(Z, GetOptionalParameterNames(function));
4494
4495 body += StaticCall(TokenPosition::kNoSource, target, argument_count,
4496 argument_names, ICData::kNoRebind, nullptr, type_args_len);
4497
4498 if (target.has_unboxed_integer_return()) {
4499 body += Box(kUnboxedInt64);
4500 } else if (target.has_unboxed_double_return()) {
4501 body += Box(kUnboxedDouble);
4502 } else if (target.has_unboxed_record_return()) {
4503 // Handled in SelectRepresentations pass in optimized mode.
4504 ASSERT(optimizing_);
4505 }
4506
4507 // Later optimization passes assume that result of a x.[]=(...) call is not
4508 // used. We must guarantee this invariant because violation will lead to an
4509 // illegal IL once we replace x.[]=(...) with a sequence that does not
4510 // actually produce any value. See http://dartbug.com/29135 for more details.
4511 if (name.ptr() == Symbols::AssignIndexToken().ptr()) {
4512 body += Drop();
4513 body += NullConstant();
4514 }
4515
4516 body += Return(TokenPosition::kNoSource);
4517
4518 instruction_cursor->LinkTo(body.entry);
4519
4520 // When compiling for OSR, use a depth first search to find the OSR
4521 // entry and make graph entry jump to it instead of normal entry.
4522 // Catch entries are always considered reachable, even if they
4523 // become unreachable after OSR.
4524 if (IsCompiledForOsr()) {
4525 graph_entry_->RelinkToOsrEntry(Z, last_used_block_id_ + 1);
4526 }
4527 return new (Z)
4528 FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
4529 prologue_info, FlowGraph::CompilationModeFrom(optimizing_));
4530}
4531
4532void FlowGraphBuilder::SetConstantRangeOfCurrentDefinition(
4533 const Fragment& fragment,
4534 int64_t min,
4535 int64_t max) {
4536 ASSERT(fragment.current->IsDefinition());
4537 Range range(RangeBoundary::FromConstant(min),
4539 fragment.current->AsDefinition()->set_range(range);
4540}
4541
4542static classid_t TypedDataCidUnboxed(Representation unboxed_representation) {
4543 switch (unboxed_representation) {
4544 case kUnboxedFloat:
4545 // Note kTypedDataFloat32ArrayCid loads kUnboxedDouble.
4546 UNREACHABLE();
4547 return kTypedDataFloat32ArrayCid;
4548 case kUnboxedInt32:
4549 return kTypedDataInt32ArrayCid;
4550 case kUnboxedUint32:
4551 return kTypedDataUint32ArrayCid;
4552 case kUnboxedInt64:
4553 return kTypedDataInt64ArrayCid;
4554 case kUnboxedDouble:
4555 return kTypedDataFloat64ArrayCid;
4556 default:
4557 UNREACHABLE();
4558 }
4559 UNREACHABLE();
4560}
4561
4562Fragment FlowGraphBuilder::StoreIndexedTypedDataUnboxed(
4563 Representation unboxed_representation,
4564 intptr_t index_scale,
4565 bool index_unboxed) {
4566 ASSERT(unboxed_representation == kUnboxedInt32 ||
4567 unboxed_representation == kUnboxedUint32 ||
4568 unboxed_representation == kUnboxedInt64 ||
4569 unboxed_representation == kUnboxedFloat ||
4570 unboxed_representation == kUnboxedDouble);
4571 Fragment fragment;
4572 if (unboxed_representation == kUnboxedFloat) {
4573 fragment += BitCast(kUnboxedFloat, kUnboxedInt32);
4574 unboxed_representation = kUnboxedInt32;
4575 }
4576 fragment += StoreIndexedTypedData(TypedDataCidUnboxed(unboxed_representation),
4577 index_scale, index_unboxed);
4578 return fragment;
4579}
4580
4581Fragment FlowGraphBuilder::LoadIndexedTypedDataUnboxed(
4582 Representation unboxed_representation,
4583 intptr_t index_scale,
4584 bool index_unboxed) {
4585 ASSERT(unboxed_representation == kUnboxedInt32 ||
4586 unboxed_representation == kUnboxedUint32 ||
4587 unboxed_representation == kUnboxedInt64 ||
4588 unboxed_representation == kUnboxedFloat ||
4589 unboxed_representation == kUnboxedDouble);
4590 Representation representation_for_load = unboxed_representation;
4591 if (unboxed_representation == kUnboxedFloat) {
4592 representation_for_load = kUnboxedInt32;
4593 }
4594 Fragment fragment;
4595 fragment += LoadIndexed(TypedDataCidUnboxed(representation_for_load),
4596 index_scale, index_unboxed);
4597 if (unboxed_representation == kUnboxedFloat) {
4598 fragment += BitCast(kUnboxedInt32, kUnboxedFloat);
4599 }
4600 return fragment;
4601}
4602
4603Fragment FlowGraphBuilder::UnhandledException() {
4604 const auto class_table = thread_->isolate_group()->class_table();
4605 ASSERT(class_table->HasValidClassAt(kUnhandledExceptionCid));
4606 const auto& klass =
4607 Class::ZoneHandle(H.zone(), class_table->At(kUnhandledExceptionCid));
4608 ASSERT(!klass.IsNull());
4609 Fragment body;
4610 body += AllocateObject(TokenPosition::kNoSource, klass, 0);
4611 LocalVariable* error_instance = MakeTemporary();
4612
4613 body += LoadLocal(error_instance);
4614 body += LoadLocal(CurrentException());
4615 body +=
4616 StoreNativeField(Slot::UnhandledException_exception(),
4618
4619 body += LoadLocal(error_instance);
4620 body += LoadLocal(CurrentStackTrace());
4621 body +=
4622 StoreNativeField(Slot::UnhandledException_stacktrace(),
4624
4625 return body;
4626}
4627
4628Fragment FlowGraphBuilder::UnboxTruncate(Representation to) {
4629 auto const unbox_to = to == kUnboxedFloat ? kUnboxedDouble : to;
4630 Fragment instructions;
4631 auto* unbox = UnboxInstr::Create(unbox_to, Pop(), DeoptId::kNone,
4633 instructions <<= unbox;
4634 Push(unbox);
4635 if (to == kUnboxedFloat) {
4636 instructions += DoubleToFloat();
4637 }
4638 return instructions;
4639}
4640
4641Fragment FlowGraphBuilder::LoadThread() {
4642 LoadThreadInstr* instr = new (Z) LoadThreadInstr();
4643 Push(instr);
4644 return Fragment(instr);
4645}
4646
4647Fragment FlowGraphBuilder::LoadIsolate() {
4648 Fragment body;
4649 body += LoadThread();
4650 body += LoadNativeField(Slot::Thread_isolate());
4651 return body;
4652}
4653
4654Fragment FlowGraphBuilder::LoadIsolateGroup() {
4655 Fragment body;
4656 body += LoadThread();
4657 body += LoadNativeField(Slot::Thread_isolate_group());
4658 return body;
4659}
4660
4661Fragment FlowGraphBuilder::LoadObjectStore() {
4662 Fragment body;
4663 body += LoadIsolateGroup();
4664 body += LoadNativeField(Slot::IsolateGroup_object_store());
4665 return body;
4666}
4667
4668Fragment FlowGraphBuilder::LoadServiceExtensionStream() {
4669 Fragment body;
4670 body += LoadThread();
4671 body += LoadNativeField(Slot::Thread_service_extension_stream());
4672 return body;
4673}
4674
4675// TODO(http://dartbug.com/47487): Support unboxed output value.
4676Fragment FlowGraphBuilder::BoolToInt() {
4677 // TODO(http://dartbug.com/36855) Build IfThenElseInstr, instead of letting
4678 // the optimizer turn this into that.
4679
4680 LocalVariable* expression_temp = parsed_function_->expression_temp_var();
4681
4682 Fragment instructions;
4683 TargetEntryInstr* is_true;
4684 TargetEntryInstr* is_false;
4685
4686 instructions += BranchIfTrue(&is_true, &is_false);
4687 JoinEntryInstr* join = BuildJoinEntry();
4688
4689 {
4690 Fragment store_1(is_true);
4691 store_1 += IntConstant(1);
4692 store_1 += StoreLocal(TokenPosition::kNoSource, expression_temp);
4693 store_1 += Drop();
4694 store_1 += Goto(join);
4695 }
4696
4697 {
4698 Fragment store_0(is_false);
4699 store_0 += IntConstant(0);
4700 store_0 += StoreLocal(TokenPosition::kNoSource, expression_temp);
4701 store_0 += Drop();
4702 store_0 += Goto(join);
4703 }
4704
4705 instructions = Fragment(instructions.entry, join);
4706 instructions += LoadLocal(expression_temp);
4707 return instructions;
4708}
4709
4710Fragment FlowGraphBuilder::IntToBool() {
4711 Fragment body;
4712 body += IntConstant(0);
4713 body += StrictCompare(Token::kNE_STRICT);
4714 return body;
4715}
4716
4717Fragment FlowGraphBuilder::IntRelationalOp(TokenPosition position,
4718 Token::Kind kind) {
4719 if (CompilerState::Current().is_aot()) {
4720 Value* right = Pop();
4721 Value* left = Pop();
4722 RelationalOpInstr* instr = new (Z) RelationalOpInstr(
4723 InstructionSource(position), kind, left, right, kMintCid,
4725 Push(instr);
4726 return Fragment(instr);
4727 }
4728 const String* name = nullptr;
4729 switch (kind) {
4730 case Token::kLT:
4732 break;
4733 case Token::kGT:
4735 break;
4736 case Token::kLTE:
4737 name = &Symbols::LessEqualOperator();
4738 break;
4739 case Token::kGTE:
4740 name = &Symbols::GreaterEqualOperator();
4741 break;
4742 default:
4743 UNREACHABLE();
4744 }
4745 return InstanceCall(
4746 position, *name, kind, /*type_args_len=*/0, /*argument_count=*/2,
4747 /*argument_names=*/Array::null_array(), /*checked_argument_count=*/2);
4748}
4749
4750Fragment FlowGraphBuilder::NativeReturn(
4751 const compiler::ffi::CallbackMarshaller& marshaller) {
4752 const intptr_t num_return_defs = marshaller.NumReturnDefinitions();
4753 if (num_return_defs == 1) {
4754 auto* instr = new (Z) NativeReturnInstr(Pop(), marshaller);
4755 return Fragment(instr).closed();
4756 }
4757 ASSERT_EQUAL(num_return_defs, 2);
4758 auto* offset = Pop();
4759 auto* typed_data_base = Pop();
4760 auto* instr = new (Z) NativeReturnInstr(typed_data_base, offset, marshaller);
4761 return Fragment(instr).closed();
4762}
4763
4764Fragment FlowGraphBuilder::BitCast(Representation from, Representation to) {
4765 BitCastInstr* instr = new (Z) BitCastInstr(from, to, Pop());
4766 Push(instr);
4767 return Fragment(instr);
4768}
4769
4770Fragment FlowGraphBuilder::Call1ArgStub(TokenPosition position,
4771 Call1ArgStubInstr::StubId stub_id) {
4772 Call1ArgStubInstr* instr = new (Z) Call1ArgStubInstr(
4773 InstructionSource(position), stub_id, Pop(), GetNextDeoptId());
4774 Push(instr);
4775 return Fragment(instr);
4776}
4777
4778Fragment FlowGraphBuilder::Suspend(TokenPosition position,
4779 SuspendInstr::StubId stub_id) {
4780 Value* type_args =
4781 (stub_id == SuspendInstr::StubId::kAwaitWithTypeCheck) ? Pop() : nullptr;
4782 Value* operand = Pop();
4783 SuspendInstr* instr =
4784 new (Z) SuspendInstr(InstructionSource(position), stub_id, operand,
4785 type_args, GetNextDeoptId(), GetNextDeoptId());
4786 Push(instr);
4787 return Fragment(instr);
4788}
4789
4790Fragment FlowGraphBuilder::WrapTypedDataBaseInCompound(
4791 const AbstractType& compound_type) {
4792 const auto& compound_sub_class =
4793 Class::ZoneHandle(Z, compound_type.type_class());
4794 compound_sub_class.EnsureIsFinalized(thread_);
4795
4796 auto& state = thread_->compiler_state();
4797
4798 Fragment body;
4799 LocalVariable* typed_data = MakeTemporary("typed_data_base");
4800 body += AllocateObject(TokenPosition::kNoSource, compound_sub_class, 0);
4801 LocalVariable* compound = MakeTemporary("compound");
4802 body += LoadLocal(compound);
4803 body += LoadLocal(typed_data);
4804 body += StoreField(state.CompoundTypedDataBaseField(),
4806 body += LoadLocal(compound);
4807 body += IntConstant(0);
4808 body += StoreField(state.CompoundOffsetInBytesField(),
4810 body += DropTempsPreserveTop(1); // Drop TypedData.
4811 return body;
4812}
4813
4814Fragment FlowGraphBuilder::LoadTypedDataBaseFromCompound() {
4815 Fragment body;
4816 auto& state = thread_->compiler_state();
4817 body += LoadField(state.CompoundTypedDataBaseField(),
4818 /*calls_initializer=*/false);
4819 return body;
4820}
4821
4822Fragment FlowGraphBuilder::LoadOffsetInBytesFromCompound() {
4823 Fragment body;
4824 auto& state = thread_->compiler_state();
4825 body += LoadField(state.CompoundOffsetInBytesField(),
4826 /*calls_initializer=*/false);
4827 return body;
4828}
4829
4830Fragment FlowGraphBuilder::PopFromStackToTypedDataBase(
4831 ZoneGrowableArray<LocalVariable*>* definitions,
4832 const GrowableArray<Representation>& representations) {
4833 Fragment body;
4834 const intptr_t num_defs = representations.length();
4835 ASSERT(definitions->length() == num_defs);
4836
4837 LocalVariable* uint8_list = MakeTemporary("uint8_list");
4838 int offset_in_bytes = 0;
4839 for (intptr_t i = 0; i < num_defs; i++) {
4840 const Representation representation = representations[i];
4841 body += LoadLocal(uint8_list);
4842 body += IntConstant(offset_in_bytes);
4843 body += LoadLocal(definitions->At(i));
4844 body += StoreIndexedTypedDataUnboxed(representation, /*index_scale=*/1,
4845 /*index_unboxed=*/false);
4846 offset_in_bytes += RepresentationUtils::ValueSize(representation);
4847 }
4848 body += DropTempsPreserveTop(num_defs); // Drop chunk defs keep TypedData.
4849 return body;
4850}
4851
4852static intptr_t chunk_size(intptr_t bytes_left) {
4853 ASSERT(bytes_left >= 1);
4854 if (bytes_left >= 8 && compiler::target::kWordSize == 8) {
4855 return 8;
4856 }
4857 if (bytes_left >= 4) {
4858 return 4;
4859 }
4860 if (bytes_left >= 2) {
4861 return 2;
4862 }
4863 return 1;
4864}
4865
4867 switch (chunk_size) {
4868 case 8:
4869 return kTypedDataInt64ArrayCid;
4870 case 4:
4871 return kTypedDataInt32ArrayCid;
4872 case 2:
4873 return kTypedDataInt16ArrayCid;
4874 case 1:
4875 return kTypedDataInt8ArrayCid;
4876 }
4877 UNREACHABLE();
4878}
4879
4880// Only for use within FfiCallbackConvertCompoundArgumentToDart and
4881// FfiCallbackConvertCompoundReturnToNative, where we know the "array" being
4882// passed is an untagged pointer coming from C.
4884 switch (chunk_size) {
4885 case 8:
4886 return kExternalTypedDataInt64ArrayCid;
4887 case 4:
4888 return kExternalTypedDataInt32ArrayCid;
4889 case 2:
4890 return kExternalTypedDataInt16ArrayCid;
4891 case 1:
4892 return kExternalTypedDataInt8ArrayCid;
4893 }
4894 UNREACHABLE();
4895}
4896
4897Fragment FlowGraphBuilder::LoadTail(LocalVariable* variable,
4898 intptr_t size,
4899 intptr_t offset_in_bytes,
4900 Representation representation) {
4901 Fragment body;
4902 if (size == 8 || size == 4) {
4903 body += LoadLocal(variable);
4904 body += LoadTypedDataBaseFromCompound();
4905 body += LoadLocal(variable);
4906 body += LoadOffsetInBytesFromCompound();
4907 body += IntConstant(offset_in_bytes);
4908 body += BinaryIntegerOp(Token::kADD, kTagged, /*is_truncating=*/true);
4909 body += LoadIndexedTypedDataUnboxed(representation, /*index_scale=*/1,
4910 /*index_unboxed=*/false);
4911 return body;
4912 }
4913 ASSERT(representation != kUnboxedFloat);
4914 ASSERT(representation != kUnboxedDouble);
4915 intptr_t shift = 0;
4916 intptr_t remaining = size;
4917 auto step = [&](intptr_t part_bytes, intptr_t part_cid) {
4918 while (remaining >= part_bytes) {
4919 body += LoadLocal(variable);
4920 body += LoadTypedDataBaseFromCompound();
4921 body += LoadLocal(variable);
4922 body += LoadOffsetInBytesFromCompound();
4923 body += IntConstant(offset_in_bytes);
4924 body += BinaryIntegerOp(Token::kADD, kTagged, /*is_truncating=*/true);
4925 body += LoadIndexed(part_cid, /*index_scale*/ 1,
4926 /*index_unboxed=*/false);
4927 if (shift != 0) {
4928 body += IntConstant(shift);
4929 // 64-bit doesn't support kUnboxedInt32 ops.
4930 Representation op_representation = kUnboxedIntPtr;
4931 body += BinaryIntegerOp(Token::kSHL, op_representation,
4932 /*is_truncating*/ true);
4933 body += BinaryIntegerOp(Token::kBIT_OR, op_representation,
4934 /*is_truncating*/ true);
4935 }
4936 offset_in_bytes += part_bytes;
4937 remaining -= part_bytes;
4938 shift += part_bytes * kBitsPerByte;
4939 }
4940 };
4941 step(8, kTypedDataUint64ArrayCid);
4942 step(4, kTypedDataUint32ArrayCid);
4943 step(2, kTypedDataUint16ArrayCid);
4944 step(1, kTypedDataUint8ArrayCid);
4945
4946 // Sigh, LoadIndex's representation for int8/16 is [u]int64, but the FfiCall
4947 // wants an [u]int32 input. Manually insert a "truncating" conversion so one
4948 // isn't automatically added that thinks it can deopt.
4949 Representation from_representation = Peek(0)->representation();
4950 if (from_representation != representation) {
4951 IntConverterInstr* convert = new IntConverterInstr(
4952 from_representation, representation, Pop(), DeoptId::kNone);
4953 convert->mark_truncating();
4954 Push(convert);
4955 body <<= convert;
4956 }
4957
4958 return body;
4959}
4960
4961Fragment FlowGraphBuilder::FfiCallConvertCompoundArgumentToNative(
4962 LocalVariable* variable,
4963 const compiler::ffi::BaseMarshaller& marshaller,
4964 intptr_t arg_index) {
4965 Fragment body;
4966 const auto& native_loc = marshaller.Location(arg_index);
4967 if (native_loc.IsMultiple()) {
4968 const auto& multiple_loc = native_loc.AsMultiple();
4969 intptr_t offset_in_bytes = 0;
4970 for (intptr_t i = 0; i < multiple_loc.locations().length(); i++) {
4971 const auto& loc = *multiple_loc.locations()[i];
4972 Representation representation;
4973 if (loc.container_type().IsInt() && loc.payload_type().IsFloat()) {
4974 // IL can only pass integers to integer Locations, so pass as integer if
4975 // the Location requires it to be an integer.
4976 representation = loc.container_type().AsRepresentationOverApprox(Z);
4977 } else {
4978 // Representations do not support 8 or 16 bit ints, over approximate to
4979 // 32 bits.
4980 representation = loc.payload_type().AsRepresentationOverApprox(Z);
4981 }
4982 intptr_t size = loc.payload_type().SizeInBytes();
4983 body += LoadTail(variable, size, offset_in_bytes, representation);
4984 offset_in_bytes += size;
4985 }
4986 } else if (native_loc.IsStack()) {
4987 // Break struct in pieces to separate IL definitions to pass those
4988 // separate definitions into the FFI call.
4989 Representation representation = kUnboxedWord;
4990 intptr_t remaining = native_loc.payload_type().SizeInBytes();
4991 intptr_t offset_in_bytes = 0;
4992 while (remaining >= compiler::target::kWordSize) {
4993 body += LoadTail(variable, compiler::target::kWordSize, offset_in_bytes,
4994 representation);
4995 offset_in_bytes += compiler::target::kWordSize;
4996 remaining -= compiler::target::kWordSize;
4997 }
4998 if (remaining > 0) {
4999 body += LoadTail(variable, remaining, offset_in_bytes, representation);
5000 }
5001 } else {
5002 ASSERT(native_loc.IsPointerToMemory());
5003 // Only load the typed data, do copying in the FFI call machine code.
5004 body += LoadLocal(variable); // User-defined struct.
5005 body += LoadTypedDataBaseFromCompound();
5006 body += LoadLocal(variable); // User-defined struct.
5007 body += LoadOffsetInBytesFromCompound();
5008 body += UnboxTruncate(kUnboxedWord);
5009 }
5010 return body;
5011}
5012
5013Fragment FlowGraphBuilder::FfiCallConvertCompoundReturnToDart(
5014 const compiler::ffi::BaseMarshaller& marshaller,
5015 intptr_t arg_index) {
5016 Fragment body;
5017 // The typed data is allocated before the FFI call, and is populated in
5018 // machine code. So, here, it only has to be wrapped in the struct class.
5019 const auto& compound_type =
5020 AbstractType::Handle(Z, marshaller.CType(arg_index));
5021 body += WrapTypedDataBaseInCompound(compound_type);
5022 return body;
5023}
5024
5025Fragment FlowGraphBuilder::FfiCallbackConvertCompoundArgumentToDart(
5026 const compiler::ffi::BaseMarshaller& marshaller,
5027 intptr_t arg_index,
5028 ZoneGrowableArray<LocalVariable*>* definitions) {
5029 const intptr_t length_in_bytes =
5030 marshaller.Location(arg_index).payload_type().SizeInBytes();
5031
5032 Fragment body;
5033 if (marshaller.Location(arg_index).IsMultiple()) {
5034 body += IntConstant(length_in_bytes);
5035 body +=
5036 AllocateTypedData(TokenPosition::kNoSource, kTypedDataUint8ArrayCid);
5037 LocalVariable* uint8_list = MakeTemporary("uint8_list");
5038
5039 const auto& multiple_loc = marshaller.Location(arg_index).AsMultiple();
5040 const intptr_t num_defs = multiple_loc.locations().length();
5041 intptr_t offset_in_bytes = 0;
5042 for (intptr_t i = 0; i < num_defs; i++) {
5043 const auto& loc = *multiple_loc.locations()[i];
5044 Representation representation;
5045 if (loc.container_type().IsInt() && loc.payload_type().IsFloat()) {
5046 // IL can only pass integers to integer Locations, so pass as integer if
5047 // the Location requires it to be an integer.
5048 representation = loc.container_type().AsRepresentationOverApprox(Z);
5049 } else {
5050 // Representations do not support 8 or 16 bit ints, over approximate to
5051 // 32 bits.
5052 representation = loc.payload_type().AsRepresentationOverApprox(Z);
5053 }
5054 body += LoadLocal(uint8_list);
5055 body += IntConstant(offset_in_bytes);
5056 body += LoadLocal(definitions->At(i));
5057 body += StoreIndexedTypedDataUnboxed(representation, /*index_scale=*/1,
5058 /*index_unboxed=*/false);
5059 offset_in_bytes += loc.payload_type().SizeInBytes();
5060 }
5061
5062 body += DropTempsPreserveTop(num_defs); // Drop chunk defs keep TypedData.
5063 } else if (marshaller.Location(arg_index).IsStack()) {
5064 // Allocate and populate a TypedData from the individual NativeParameters.
5065 body += IntConstant(length_in_bytes);
5066 body +=
5067 AllocateTypedData(TokenPosition::kNoSource, kTypedDataUint8ArrayCid);
5068 GrowableArray<Representation> representations;
5069 marshaller.RepsInFfiCall(arg_index, &representations);
5070 body += PopFromStackToTypedDataBase(definitions, representations);
5071 } else {
5072 ASSERT(marshaller.Location(arg_index).IsPointerToMemory());
5073 // Allocate a TypedData and copy contents pointed to by an address into it.
5074 LocalVariable* address_of_compound = MakeTemporary("address_of_compound");
5075 body += IntConstant(length_in_bytes);
5076 body +=
5077 AllocateTypedData(TokenPosition::kNoSource, kTypedDataUint8ArrayCid);
5078 LocalVariable* typed_data_base = MakeTemporary("typed_data_base");
5079 intptr_t offset_in_bytes = 0;
5080 while (offset_in_bytes < length_in_bytes) {
5081 const intptr_t bytes_left = length_in_bytes - offset_in_bytes;
5082 const intptr_t chunk_sizee = chunk_size(bytes_left);
5083
5084 body += LoadLocal(address_of_compound);
5085 body += IntConstant(offset_in_bytes);
5086 body +=
5087 LoadIndexed(external_typed_data_cid(chunk_sizee), /*index_scale=*/1,
5088 /*index_unboxed=*/false);
5089 LocalVariable* chunk_value = MakeTemporary("chunk_value");
5090
5091 body += LoadLocal(typed_data_base);
5092 body += IntConstant(offset_in_bytes);
5093 body += LoadLocal(chunk_value);
5094 body += StoreIndexedTypedData(typed_data_cid(chunk_sizee),
5095 /*index_scale=*/1,
5096 /*index_unboxed=*/false);
5097 body += DropTemporary(&chunk_value);
5098
5099 offset_in_bytes += chunk_sizee;
5100 }
5101 ASSERT(offset_in_bytes == length_in_bytes);
5102 body += DropTempsPreserveTop(1); // Drop address_of_compound.
5103 }
5104 // Wrap typed data in compound class.
5105 const auto& compound_type =
5106 AbstractType::Handle(Z, marshaller.CType(arg_index));
5107 body += WrapTypedDataBaseInCompound(compound_type);
5108 return body;
5109}
5110
5111Fragment FlowGraphBuilder::FfiCallbackConvertCompoundReturnToNative(
5112 const compiler::ffi::CallbackMarshaller& marshaller,
5113 intptr_t arg_index) {
5114 Fragment body;
5115 const auto& native_loc = marshaller.Location(arg_index);
5116 if (native_loc.IsMultiple()) {
5117 // Pass in typed data and offset to native return instruction, and do the
5118 // copying in machine code.
5119 LocalVariable* compound = MakeTemporary("compound");
5120 body += LoadLocal(compound);
5121 body += LoadOffsetInBytesFromCompound();
5122 body += UnboxTruncate(kUnboxedWord);
5123 body += StoreLocal(TokenPosition::kNoSource,
5124 parsed_function_->expression_temp_var());
5125 body += Drop();
5126 body += LoadTypedDataBaseFromCompound();
5127 body += LoadLocal(parsed_function_->expression_temp_var());
5128 } else {
5129 ASSERT(native_loc.IsPointerToMemory());
5130 // We copy the data into the right location in IL.
5131 const intptr_t length_in_bytes =
5132 marshaller.Location(arg_index).payload_type().SizeInBytes();
5133
5134 LocalVariable* compound = MakeTemporary("compound");
5135 body += LoadLocal(compound);
5136 body += LoadTypedDataBaseFromCompound();
5137 LocalVariable* typed_data_base = MakeTemporary("typed_data_base");
5138 body += LoadLocal(compound);
5139 body += LoadOffsetInBytesFromCompound();
5140 LocalVariable* offset = MakeTemporary("offset");
5141
5142 auto* pointer_to_return =
5143 new (Z) NativeParameterInstr(marshaller, compiler::ffi::kResultIndex);
5144 Push(pointer_to_return); // Address where return value should be stored.
5145 body <<= pointer_to_return;
5146 LocalVariable* unboxed_address = MakeTemporary("unboxed_address");
5147
5148 intptr_t offset_in_bytes = 0;
5149 while (offset_in_bytes < length_in_bytes) {
5150 const intptr_t bytes_left = length_in_bytes - offset_in_bytes;
5151 const intptr_t chunk_sizee = chunk_size(bytes_left);
5152
5153 body += LoadLocal(typed_data_base);
5154 body += LoadLocal(offset);
5155 body += IntConstant(offset_in_bytes);
5156 body += BinaryIntegerOp(Token::kADD, kTagged, /*is_truncating=*/true);
5157 body += LoadIndexed(typed_data_cid(chunk_sizee), /*index_scale=*/1,
5158 /*index_unboxed=*/false);
5159 LocalVariable* chunk_value = MakeTemporary("chunk_value");
5160
5161 body += LoadLocal(unboxed_address);
5162 body += IntConstant(offset_in_bytes);
5163 body += LoadLocal(chunk_value);
5165 /*index_scale=*/1,
5166 /*index_unboxed=*/false);
5167 body += DropTemporary(&chunk_value);
5168
5169 offset_in_bytes += chunk_sizee;
5170 }
5171
5172 ASSERT(offset_in_bytes == length_in_bytes);
5173 body += DropTempsPreserveTop(3);
5174 }
5175 return body;
5176}
5177
5178Fragment FlowGraphBuilder::FfiConvertPrimitiveToDart(
5179 const compiler::ffi::BaseMarshaller& marshaller,
5180 intptr_t arg_index) {
5181 ASSERT(!marshaller.IsCompoundCType(arg_index));
5182
5183 Fragment body;
5184 if (marshaller.IsPointerPointer(arg_index)) {
5185 Class& result_class =
5186 Class::ZoneHandle(Z, IG->object_store()->ffi_pointer_class());
5187 // This class might only be instantiated as a return type of ffi calls.
5188 result_class.EnsureIsFinalized(thread_);
5189
5190 TypeArguments& args =
5191 TypeArguments::ZoneHandle(Z, IG->object_store()->type_argument_never());
5192
5193 // A kernel transform for FFI in the front-end ensures that type parameters
5194 // do not appear in the type arguments to a any Pointer classes in an FFI
5195 // signature.
5196 ASSERT(args.IsNull() || args.IsInstantiated());
5197 args = args.Canonicalize(thread_);
5198
5199 LocalVariable* address = MakeTemporary("address");
5200 LocalVariable* result = parsed_function_->expression_temp_var();
5201
5202 body += Constant(args);
5203 body += AllocateObject(TokenPosition::kNoSource, result_class, 1);
5204 body += StoreLocal(TokenPosition::kNoSource, result);
5205 body += LoadLocal(address);
5206 body += StoreNativeField(Slot::PointerBase_data(),
5209 body += DropTemporary(&address); // address
5210 body += LoadLocal(result);
5211 } else if (marshaller.IsTypedDataPointer(arg_index)) {
5212 UNREACHABLE(); // Only supported for FFI call arguments.
5213 } else if (marshaller.IsCompoundPointer(arg_index)) {
5214 UNREACHABLE(); // Only supported for FFI call arguments.
5215 } else if (marshaller.IsHandleCType(arg_index)) {
5216 // The top of the stack is a Dart_Handle, so retrieve the tagged pointer
5217 // out of it.
5218 body += LoadNativeField(Slot::LocalHandle_ptr());
5219 } else if (marshaller.IsVoid(arg_index)) {
5220 // Ignore whatever value was being returned and return null.
5222 body += Drop();
5223 body += NullConstant();
5224 } else {
5225 if (marshaller.RequiresBitCast(arg_index)) {
5226 body += BitCast(
5227 marshaller.RepInFfiCall(marshaller.FirstDefinitionIndex(arg_index)),
5228 marshaller.RepInDart(arg_index));
5229 }
5230
5231 body += Box(marshaller.RepInDart(arg_index));
5232
5233 if (marshaller.IsBool(arg_index)) {
5234 body += IntToBool();
5235 }
5236 }
5237 return body;
5238}
5239
5240Fragment FlowGraphBuilder::FfiConvertPrimitiveToNative(
5241 const compiler::ffi::BaseMarshaller& marshaller,
5242 intptr_t arg_index,
5243 LocalVariable* variable) {
5244 ASSERT(!marshaller.IsCompoundCType(arg_index));
5245
5246 Fragment body;
5247 if (marshaller.IsPointerPointer(arg_index)) {
5248 // This can only be Pointer, so it is safe to load the data field.
5249 body += LoadNativeField(Slot::PointerBase_data(),
5251 } else if (marshaller.IsTypedDataPointer(arg_index)) {
5252 // Nothing to do. Unwrap in `FfiCallInstr::EmitNativeCode`.
5253 } else if (marshaller.IsCompoundPointer(arg_index)) {
5254 ASSERT(variable != nullptr);
5255 body += LoadTypedDataBaseFromCompound();
5256 body += LoadLocal(variable); // User-defined struct.
5257 body += LoadOffsetInBytesFromCompound();
5258 body += UnboxTruncate(kUnboxedWord);
5259 } else if (marshaller.IsHandleCType(arg_index)) {
5260 // FfiCallInstr specifies all handle locations as Stack, and will pass a
5261 // pointer to the stack slot as the native handle argument. Therefore the
5262 // only handles that need wrapping are function results.
5264 LocalVariable* object = MakeTemporary("object");
5265
5266 auto* const arg_reps =
5267 new (zone_) ZoneGrowableArray<Representation>(zone_, 1);
5268
5269 // Get a reference to the top handle scope.
5270 body += LoadThread();
5271 body += LoadNativeField(Slot::Thread_api_top_scope());
5272 arg_reps->Add(kUntagged);
5273
5274 // Allocate a new handle in the top handle scope.
5275 body +=
5276 CallLeafRuntimeEntry(kAllocateHandleRuntimeEntry, kUntagged, *arg_reps);
5277
5278 LocalVariable* handle = MakeTemporary("handle");
5279
5280 // Store the object address into the handle.
5281 body += LoadLocal(handle);
5282 body += LoadLocal(object);
5283 body += StoreNativeField(Slot::LocalHandle_ptr(),
5285
5286 body += DropTempsPreserveTop(1); // Drop object.
5287 } else if (marshaller.IsVoid(arg_index)) {
5289 // Ignore whatever value was being returned and return nullptr.
5290 body += Drop();
5292 } else {
5293 if (marshaller.IsBool(arg_index)) {
5294 body += BoolToInt();
5295 }
5296
5297 body += UnboxTruncate(marshaller.RepInDart(arg_index));
5298 }
5299
5300 if (marshaller.RequiresBitCast(arg_index)) {
5301 body += BitCast(
5302 marshaller.RepInDart(arg_index),
5303 marshaller.RepInFfiCall(marshaller.FirstDefinitionIndex(arg_index)));
5304 }
5305
5306 return body;
5307}
5308
5309FlowGraph* FlowGraphBuilder::BuildGraphOfFfiTrampoline(
5310 const Function& function) {
5311 switch (function.GetFfiCallbackKind()) {
5314 return BuildGraphOfSyncFfiCallback(function);
5316 return BuildGraphOfAsyncFfiCallback(function);
5317 }
5318 UNREACHABLE();
5319 return nullptr;
5320}
5321
5322Fragment FlowGraphBuilder::FfiNativeLookupAddress(
5323 const dart::Instance& native) {
5324 const auto& native_class = Class::Handle(Z, native.clazz());
5325 ASSERT(String::Handle(Z, native_class.UserVisibleName())
5326 .Equals(Symbols::FfiNative()));
5327 const auto& native_class_fields = Array::Handle(Z, native_class.fields());
5328 ASSERT(native_class_fields.Length() == 4);
5329 const auto& symbol_field =
5330 Field::Handle(Z, Field::RawCast(native_class_fields.At(1)));
5331 ASSERT(!symbol_field.is_static());
5332 const auto& asset_id_field =
5333 Field::Handle(Z, Field::RawCast(native_class_fields.At(2)));
5334 ASSERT(!asset_id_field.is_static());
5335 const auto& symbol =
5336 String::ZoneHandle(Z, String::RawCast(native.GetField(symbol_field)));
5337 const auto& asset_id =
5338 String::ZoneHandle(Z, String::RawCast(native.GetField(asset_id_field)));
5339 const auto& type_args = TypeArguments::Handle(Z, native.GetTypeArguments());
5340 ASSERT(type_args.Length() == 1);
5341 const auto& native_type = AbstractType::ZoneHandle(Z, type_args.TypeAt(0));
5342 intptr_t arg_n;
5343 if (native_type.IsFunctionType()) {
5344 const auto& native_function_type = FunctionType::Cast(native_type);
5345 arg_n = native_function_type.NumParameters() -
5346 native_function_type.num_implicit_parameters();
5347 } else {
5348 // We're looking up the address of a native field.
5349 arg_n = 0;
5350 }
5351 const auto& ffi_resolver =
5352 Function::ZoneHandle(Z, IG->object_store()->ffi_resolver_function());
5353#if !defined(TARGET_ARCH_IA32)
5354 // Access to the pool, use cacheable static call.
5355 Fragment body;
5356 body += Constant(asset_id);
5357 body += Constant(symbol);
5358 body += Constant(Smi::ZoneHandle(Smi::New(arg_n)));
5359 body +=
5360 CachableIdempotentCall(TokenPosition::kNoSource, kUntagged, ffi_resolver,
5361 /*argument_count=*/3,
5362 /*argument_names=*/Array::null_array(),
5363 /*type_args_count=*/0);
5364 return body;
5365#else // !defined(TARGET_ARCH_IA32)
5366 // IA32 only has JIT and no pool. This function will only be compiled if
5367 // immediately run afterwards, so do the lookup here.
5368 char* error = nullptr;
5369#if !defined(DART_PRECOMPILER) || defined(TESTING)
5370 const uintptr_t function_address =
5371 FfiResolveInternal(asset_id, symbol, arg_n, &error);
5372#else
5373 const uintptr_t function_address = 0;
5374 UNREACHABLE(); // JIT runtime should not contain AOT code
5375#endif
5376 if (error == nullptr) {
5377 Fragment body;
5378 body += UnboxedIntConstant(function_address, kUnboxedAddress);
5379 body += ConvertUnboxedToUntagged();
5380 return body;
5381 } else {
5382 free(error);
5383 // Lookup failed, we want to throw an error consistent with AOT, just
5384 // compile into a lookup so that we can throw the error from the same
5385 // error path.
5386 Fragment body;
5387 body += Constant(asset_id);
5388 body += Constant(symbol);
5389 body += Constant(Smi::ZoneHandle(Smi::New(arg_n)));
5390 // Non-cacheable call, this is IA32.
5391 body += StaticCall(TokenPosition::kNoSource, ffi_resolver,
5392 /*argument_count=*/3, ICData::kStatic);
5393 body += UnboxTruncate(kUnboxedAddress);
5394 body += ConvertUnboxedToUntagged();
5395 return body;
5396 }
5397#endif // !defined(TARGET_ARCH_IA32)
5398}
5399
5400Fragment FlowGraphBuilder::FfiNativeFunctionBody(const Function& function) {
5401 ASSERT(function.is_ffi_native());
5403 ASSERT(optimizing_);
5404
5405 const auto& c_signature =
5406 FunctionType::ZoneHandle(Z, function.FfiCSignature());
5407 auto const& native_instance =
5408 Instance::Handle(function.GetNativeAnnotation());
5409
5410 Fragment body;
5411 body += FfiNativeLookupAddress(native_instance);
5412 body += FfiCallFunctionBody(function, c_signature,
5413 /*first_argument_parameter_offset=*/0);
5414 return body;
5415}
5416
5417Fragment FlowGraphBuilder::FfiCallFunctionBody(
5418 const Function& function,
5419 const FunctionType& c_signature,
5420 intptr_t first_argument_parameter_offset) {
5421 ASSERT(function.is_ffi_native() || function.IsFfiCallClosure());
5422
5423 LocalVariable* address = MakeTemporary("address");
5424
5425 Fragment body;
5426
5427 const char* error = nullptr;
5428 const auto marshaller_ptr = compiler::ffi::CallMarshaller::FromFunction(
5429 Z, function, first_argument_parameter_offset, c_signature, &error);
5430 // AbiSpecific integers can be incomplete causing us to not know the calling
5431 // convention. However, this is caught in asFunction in both JIT/AOT.
5432 RELEASE_ASSERT(error == nullptr);
5433 RELEASE_ASSERT(marshaller_ptr != nullptr);
5434 const auto& marshaller = *marshaller_ptr;
5435
5436 const bool signature_contains_handles = marshaller.ContainsHandles();
5437
5438 // FFI trampolines are accessed via closures, so non-covariant argument types
5439 // and type arguments are either statically checked by the type system or
5440 // dynamically checked via dynamic closure call dispatchers.
5441
5442 // Null check arguments before we go into the try catch, so that we don't
5443 // catch our own null errors.
5444 const intptr_t num_args = marshaller.num_args();
5445 for (intptr_t i = 0; i < num_args; i++) {
5446 if (marshaller.IsHandleCType(i)) {
5447 continue;
5448 }
5449 body += LoadLocal(parsed_function_->ParameterVariable(
5450 first_argument_parameter_offset + i));
5451 // TODO(http://dartbug.com/47486): Support entry without checking for null.
5452 // Check for 'null'.
5453 body += CheckNullOptimized(
5455 Z, function.ParameterNameAt(first_argument_parameter_offset + i)),
5457 body += StoreLocal(TokenPosition::kNoSource,
5458 parsed_function_->ParameterVariable(
5459 first_argument_parameter_offset + i));
5460 body += Drop();
5461 }
5462
5463 intptr_t try_handler_index = -1;
5464 if (signature_contains_handles) {
5465 // Wrap in Try catch to transition from Native to Generated on a throw from
5466 // the dart_api.
5467 try_handler_index = AllocateTryIndex();
5468 body += TryCatch(try_handler_index);
5469 ++try_depth_;
5470 // TODO(dartbug.com/48989): Remove scope for calls where we don't actually
5471 // need it.
5472 // We no longer need the scope for passing in Handle arguments, but the
5473 // native function might for instance be relying on this scope for Dart API.
5474
5475 auto* const arg_reps =
5476 new (zone_) ZoneGrowableArray<Representation>(zone_, 1);
5477
5478 body += LoadThread(); // argument.
5479 arg_reps->Add(kUntagged);
5480
5481 body += CallLeafRuntimeEntry(kEnterHandleScopeRuntimeEntry, kUntagged,
5482 *arg_reps);
5483 }
5484
5485 // Allocate typed data before FfiCall and pass it in to ffi call if needed.
5486 LocalVariable* return_compound_typed_data = nullptr;
5487 if (marshaller.ReturnsCompound()) {
5488 body += IntConstant(marshaller.CompoundReturnSizeInBytes());
5489 body +=
5490 AllocateTypedData(TokenPosition::kNoSource, kTypedDataUint8ArrayCid);
5491 return_compound_typed_data = MakeTemporary();
5492 }
5493
5494 // Unbox and push the arguments.
5495 for (intptr_t i = 0; i < marshaller.num_args(); i++) {
5496 if (marshaller.IsCompoundCType(i)) {
5497 body += FfiCallConvertCompoundArgumentToNative(
5498 parsed_function_->ParameterVariable(first_argument_parameter_offset +
5499 i),
5500 marshaller, i);
5501 } else {
5502 body += LoadLocal(parsed_function_->ParameterVariable(
5503 first_argument_parameter_offset + i));
5504 // FfiCallInstr specifies all handle locations as Stack, and will pass a
5505 // pointer to the stack slot as the native handle argument.
5506 // Therefore we do not need to wrap handles.
5507 if (!marshaller.IsHandleCType(i)) {
5508 body += FfiConvertPrimitiveToNative(
5509 marshaller, i,
5510 parsed_function_->ParameterVariable(
5511 first_argument_parameter_offset + i));
5512 }
5513 }
5514 }
5515
5516 body += LoadLocal(address);
5517
5518 if (marshaller.ReturnsCompound()) {
5519 body += LoadLocal(return_compound_typed_data);
5520 }
5521
5522 body += FfiCall(marshaller, function.FfiIsLeaf());
5523
5524 const intptr_t num_defs = marshaller.NumReturnDefinitions();
5525 ASSERT(num_defs >= 1);
5526 auto defs = new (Z) ZoneGrowableArray<LocalVariable*>(Z, num_defs);
5527 LocalVariable* def = MakeTemporary("ffi call result");
5528 defs->Add(def);
5529
5530 if (marshaller.ReturnsCompound()) {
5531 // Drop call result, typed data with contents is already on the stack.
5532 body += DropTemporary(&def);
5533 }
5534
5535 if (marshaller.IsCompoundCType(compiler::ffi::kResultIndex)) {
5536 body += FfiCallConvertCompoundReturnToDart(marshaller,
5538 } else {
5539 body += FfiConvertPrimitiveToDart(marshaller, compiler::ffi::kResultIndex);
5540 }
5541
5542 auto exit_handle_scope = [&]() -> Fragment {
5543 Fragment code;
5544 auto* const arg_reps =
5545 new (zone_) ZoneGrowableArray<Representation>(zone_, 1);
5546
5547 code += LoadThread(); // argument.
5548 arg_reps->Add(kUntagged);
5549
5550 code += CallLeafRuntimeEntry(kExitHandleScopeRuntimeEntry, kUntagged,
5551 *arg_reps);
5552 code += Drop();
5553 return code;
5554 };
5555
5556 if (signature_contains_handles) {
5557 // TODO(dartbug.com/48989): Remove scope for calls where we don't actually
5558 // need it.
5559 body += DropTempsPreserveTop(1); // Drop api_local_scope.
5560 body += exit_handle_scope();
5561 }
5562
5563 body += DropTempsPreserveTop(1); // Drop address.
5564 body += Return(TokenPosition::kNoSource);
5565
5566 if (signature_contains_handles) {
5567 --try_depth_;
5568 ++catch_depth_;
5569 Fragment catch_body =
5570 CatchBlockEntry(Array::empty_array(), try_handler_index,
5571 /*needs_stacktrace=*/true, /*is_synthesized=*/true);
5572
5573 // TODO(dartbug.com/48989): Remove scope for calls where we don't actually
5574 // need it.
5575 // TODO(41984): If we want to pass in the handle scope, move it out
5576 // of the try catch.
5577 catch_body += exit_handle_scope();
5578
5579 catch_body += LoadLocal(CurrentException());
5580 catch_body += LoadLocal(CurrentStackTrace());
5581 catch_body += RethrowException(TokenPosition::kNoSource, try_handler_index);
5582 --catch_depth_;
5583 }
5584
5585 return body;
5586}
5587
5588Fragment FlowGraphBuilder::LoadNativeArg(
5589 const compiler::ffi::CallbackMarshaller& marshaller,
5590 intptr_t arg_index) {
5591 const intptr_t num_defs = marshaller.NumDefinitions(arg_index);
5592 auto defs = new (Z) ZoneGrowableArray<LocalVariable*>(Z, num_defs);
5593
5594 Fragment fragment;
5595 for (intptr_t j = 0; j < num_defs; j++) {
5596 const intptr_t def_index = marshaller.DefinitionIndex(j, arg_index);
5597 auto* parameter = new (Z) NativeParameterInstr(marshaller, def_index);
5598 Push(parameter);
5599 fragment <<= parameter;
5600 LocalVariable* def = MakeTemporary();
5601 defs->Add(def);
5602 }
5603
5604 if (marshaller.IsCompoundCType(arg_index)) {
5605 fragment +=
5606 FfiCallbackConvertCompoundArgumentToDart(marshaller, arg_index, defs);
5607 } else {
5608 fragment += FfiConvertPrimitiveToDart(marshaller, arg_index);
5609 }
5610 return fragment;
5611}
5612
5613FlowGraph* FlowGraphBuilder::BuildGraphOfSyncFfiCallback(
5614 const Function& function) {
5615 const char* error = nullptr;
5616 const auto marshaller_ptr =
5618 // AbiSpecific integers can be incomplete causing us to not know the calling
5619 // convention. However, this is caught fromFunction in both JIT/AOT.
5620 RELEASE_ASSERT(error == nullptr);
5621 RELEASE_ASSERT(marshaller_ptr != nullptr);
5622 const auto& marshaller = *marshaller_ptr;
5623 const bool is_closure = function.GetFfiCallbackKind() ==
5625
5626 graph_entry_ =
5627 new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
5628
5629 auto* const native_entry =
5630 new (Z) NativeEntryInstr(marshaller, graph_entry_, AllocateBlockId(),
5632
5633 graph_entry_->set_normal_entry(native_entry);
5634
5635 Fragment function_body(native_entry);
5636 function_body += CheckStackOverflowInPrologue(function.token_pos());
5637
5638 // Wrap the entire method in a big try/catch. This is important to ensure that
5639 // the VM does not crash if the callback throws an exception.
5640 const intptr_t try_handler_index = AllocateTryIndex();
5641 Fragment body = TryCatch(try_handler_index);
5642 ++try_depth_;
5643
5644 LocalVariable* closure = nullptr;
5645 if (is_closure) {
5646 // Load and unwrap closure persistent handle.
5647 body += LoadThread();
5648 body +=
5649 LoadUntagged(compiler::target::Thread::unboxed_runtime_arg_offset());
5650 body += LoadNativeField(Slot::PersistentHandle_ptr());
5652 }
5653
5654 // Box and push the arguments.
5655 for (intptr_t i = 0; i < marshaller.num_args(); i++) {
5656 body += LoadNativeArg(marshaller, i);
5657 }
5658
5659 if (is_closure) {
5660 // Call the target. The +1 in the argument count is because the closure
5661 // itself is the first argument.
5662 const intptr_t argument_count = marshaller.num_args() + 1;
5663 body += LoadLocal(closure);
5664 if (!FLAG_precompiled_mode) {
5665 // The ClosureCallInstr() takes one explicit input (apart from arguments).
5666 // It uses it to find the target address (in AOT from
5667 // Closure::entry_point, in JIT from Closure::function_::entry_point).
5668 body += LoadNativeField(Slot::Closure_function());
5669 }
5670 body +=
5671 ClosureCall(Function::null_function(), TokenPosition::kNoSource,
5672 /*type_args_len=*/0, argument_count, Array::null_array());
5673 } else {
5674 // Call the target.
5675 //
5676 // TODO(36748): Determine the hot-reload semantics of callbacks and update
5677 // the rebind-rule accordingly.
5678 body += StaticCall(TokenPosition::kNoSource,
5679 Function::ZoneHandle(Z, function.FfiCallbackTarget()),
5680 marshaller.num_args(), Array::empty_array(),
5681 ICData::kNoRebind);
5682 }
5683
5684 if (!marshaller.IsHandleCType(compiler::ffi::kResultIndex)) {
5685 body += CheckNullOptimized(
5686 String::ZoneHandle(Z, Symbols::New(H.thread(), "return_value")),
5688 }
5689
5690 if (marshaller.IsCompoundCType(compiler::ffi::kResultIndex)) {
5691 body += FfiCallbackConvertCompoundReturnToNative(
5692 marshaller, compiler::ffi::kResultIndex);
5693 } else {
5694 body +=
5695 FfiConvertPrimitiveToNative(marshaller, compiler::ffi::kResultIndex);
5696 }
5697
5698 body += NativeReturn(marshaller);
5699
5700 --try_depth_;
5701 function_body += body;
5702
5703 ++catch_depth_;
5704 Fragment catch_body = CatchBlockEntry(Array::empty_array(), try_handler_index,
5705 /*needs_stacktrace=*/false,
5706 /*is_synthesized=*/true);
5707
5708 // Return the "exceptional return" value given in 'fromFunction'.
5709 if (marshaller.IsVoid(compiler::ffi::kResultIndex)) {
5710 // The exceptional return is always null -- return nullptr instead.
5711 ASSERT(function.FfiCallbackExceptionalReturn() == Object::null());
5712 catch_body += UnboxedIntConstant(0, kUnboxedIntPtr);
5713 } else if (marshaller.IsPointerPointer(compiler::ffi::kResultIndex)) {
5714 // The exceptional return is always null -- return nullptr instead.
5715 ASSERT(function.FfiCallbackExceptionalReturn() == Object::null());
5716 catch_body += UnboxedIntConstant(0, kUnboxedAddress);
5717 catch_body += ConvertUnboxedToUntagged();
5718 } else if (marshaller.IsHandleCType(compiler::ffi::kResultIndex)) {
5719 catch_body += UnhandledException();
5720 catch_body +=
5721 FfiConvertPrimitiveToNative(marshaller, compiler::ffi::kResultIndex);
5722 } else if (marshaller.IsCompoundCType(compiler::ffi::kResultIndex)) {
5723 ASSERT(function.FfiCallbackExceptionalReturn() == Object::null());
5724 // Manufacture empty result.
5725 const intptr_t size =
5726 Utils::RoundUp(marshaller.Location(compiler::ffi::kResultIndex)
5727 .payload_type()
5728 .SizeInBytes(),
5729 compiler::target::kWordSize);
5730 catch_body += IntConstant(size);
5731 catch_body +=
5732 AllocateTypedData(TokenPosition::kNoSource, kTypedDataUint8ArrayCid);
5733 catch_body += WrapTypedDataBaseInCompound(
5735 catch_body += FfiCallbackConvertCompoundReturnToNative(
5736 marshaller, compiler::ffi::kResultIndex);
5737
5738 } else {
5739 catch_body += Constant(
5740 Instance::ZoneHandle(Z, function.FfiCallbackExceptionalReturn()));
5741 catch_body +=
5742 FfiConvertPrimitiveToNative(marshaller, compiler::ffi::kResultIndex);
5743 }
5744
5745 catch_body += NativeReturn(marshaller);
5746 --catch_depth_;
5747
5748 PrologueInfo prologue_info(-1, -1);
5749 return new (Z)
5750 FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
5751 prologue_info, FlowGraph::CompilationModeFrom(optimizing_));
5752}
5753
5754FlowGraph* FlowGraphBuilder::BuildGraphOfAsyncFfiCallback(
5755 const Function& function) {
5756 const char* error = nullptr;
5757 const auto marshaller_ptr =
5759 // AbiSpecific integers can be incomplete causing us to not know the calling
5760 // convention. However, this is caught fromFunction in both JIT/AOT.
5761 RELEASE_ASSERT(error == nullptr);
5762 RELEASE_ASSERT(marshaller_ptr != nullptr);
5763 const auto& marshaller = *marshaller_ptr;
5764
5765 // Currently all async FFI callbacks return void. This is enforced by the
5766 // frontend.
5767 ASSERT(marshaller.IsVoid(compiler::ffi::kResultIndex));
5768
5769 graph_entry_ =
5770 new (Z) GraphEntryInstr(*parsed_function_, Compiler::kNoOSRDeoptId);
5771
5772 auto* const native_entry =
5773 new (Z) NativeEntryInstr(marshaller, graph_entry_, AllocateBlockId(),
5775
5776 graph_entry_->set_normal_entry(native_entry);
5777
5778 Fragment function_body(native_entry);
5779 function_body += CheckStackOverflowInPrologue(function.token_pos());
5780
5781 // Wrap the entire method in a big try/catch. This is important to ensure that
5782 // the VM does not crash if the callback throws an exception.
5783 const intptr_t try_handler_index = AllocateTryIndex();
5784 Fragment body = TryCatch(try_handler_index);
5785 ++try_depth_;
5786
5787 // Box and push the arguments into an array, to be sent to the target.
5789 body += IntConstant(marshaller.num_args());
5790 body += CreateArray();
5791 LocalVariable* array = MakeTemporary();
5792 for (intptr_t i = 0; i < marshaller.num_args(); i++) {
5793 body += LoadLocal(array);
5794 body += IntConstant(i);
5795 body += LoadNativeArg(marshaller, i);
5796 body += StoreIndexed(kArrayCid);
5797 }
5798
5799 // Send the arg array to the target. The arg array is still on the stack.
5800 body += Call1ArgStub(TokenPosition::kNoSource,
5802
5803 body += FfiConvertPrimitiveToNative(marshaller, compiler::ffi::kResultIndex);
5804 ASSERT_EQUAL(marshaller.NumReturnDefinitions(), 1);
5805 body += NativeReturn(marshaller);
5806
5807 --try_depth_;
5808 function_body += body;
5809
5810 ++catch_depth_;
5811 Fragment catch_body = CatchBlockEntry(Array::empty_array(), try_handler_index,
5812 /*needs_stacktrace=*/false,
5813 /*is_synthesized=*/true);
5814
5815 // This catch indicates there's been some sort of error, but async callbacks
5816 // are fire-and-forget, and we don't guarantee delivery.
5817 catch_body += NullConstant();
5818 catch_body +=
5819 FfiConvertPrimitiveToNative(marshaller, compiler::ffi::kResultIndex);
5820 ASSERT_EQUAL(marshaller.NumReturnDefinitions(), 1);
5821 catch_body += NativeReturn(marshaller);
5822 --catch_depth_;
5823
5824 PrologueInfo prologue_info(-1, -1);
5825 return new (Z)
5826 FlowGraph(*parsed_function_, graph_entry_, last_used_block_id_,
5827 prologue_info, FlowGraph::CompilationModeFrom(optimizing_));
5828}
5829
5830void FlowGraphBuilder::SetCurrentTryCatchBlock(TryCatchBlock* try_catch_block) {
5831 try_catch_block_ = try_catch_block;
5832 SetCurrentTryIndex(try_catch_block == nullptr ? kInvalidTryIndex
5833 : try_catch_block->try_index());
5834}
5835
5836const Function& FlowGraphBuilder::PrependTypeArgumentsFunction() {
5837 if (prepend_type_arguments_.IsNull()) {
5838 const auto& dart_internal = Library::Handle(Z, Library::InternalLibrary());
5839 prepend_type_arguments_ = dart_internal.LookupFunctionAllowPrivate(
5840 Symbols::PrependTypeArguments());
5841 ASSERT(!prepend_type_arguments_.IsNull());
5842 }
5843 return prepend_type_arguments_;
5844}
5845
5846Fragment FlowGraphBuilder::BuildIntegerHashCode(bool smi) {
5847 Fragment body;
5848 Value* unboxed_value = Pop();
5849 HashIntegerOpInstr* hash =
5850 new HashIntegerOpInstr(unboxed_value, smi, DeoptId::kNone);
5851 Push(hash);
5852 body <<= hash;
5853 return body;
5854}
5855
5856Fragment FlowGraphBuilder::BuildDoubleHashCode() {
5857 Fragment body;
5858 Value* double_value = Pop();
5859 HashDoubleOpInstr* hash = new HashDoubleOpInstr(double_value, DeoptId::kNone);
5860 Push(hash);
5861 body <<= hash;
5862 body += Box(kUnboxedInt64);
5863 return body;
5864}
5865
5867 TokenPosition position,
5868 bool is_exhaustive,
5869 const AbstractType& expression_type,
5870 SwitchBlock* switch_block,
5871 intptr_t case_count)
5872 : zone_(zone),
5873 position_(position),
5874 is_exhaustive_(is_exhaustive),
5875 expression_type_(expression_type),
5876 switch_block_(switch_block),
5877 case_count_(case_count),
5878 case_bodies_(case_count),
5879 case_expression_counts_(case_count),
5880 expressions_(case_count),
5881 sorted_expressions_(case_count) {
5882 case_expression_counts_.FillWith(0, 0, case_count);
5883
5886 is_optimizable_ = true;
5887 } else if (expression_type.HasTypeClass() &&
5889 .is_enum_class()) {
5890 is_optimizable_ = true;
5891 is_enum_switch_ = true;
5892 }
5893 }
5894}
5895
5897 const int64_t min = expression_min().AsInt64Value();
5898 const int64_t max = expression_max().AsInt64Value();
5899 ASSERT(min <= max);
5900 const uint64_t diff = static_cast<uint64_t>(max) - static_cast<uint64_t>(min);
5901 // Saturate to avoid overflow.
5902 if (diff > static_cast<uint64_t>(kMaxInt64 - 1)) {
5903 return kMaxInt64;
5904 }
5905 return static_cast<int64_t>(diff + 1);
5906}
5907
5909 if (is_enum_switch()) {
5910 if (expression_min().IsZero()) {
5911 // Enum indexes are always positive.
5912 return false;
5913 }
5914 }
5915 return true;
5916}
5917
5919 if (is_enum_switch()) {
5920 return has_default() || !is_exhaustive();
5921 }
5922 return true;
5923}
5924
5926 // For small to medium-sized switches, binary search is faster than a
5927 // jump table.
5928 // Please update runtime/tests/vm/dart/optimized_switch_test.dart
5929 // when changing this constant.
5930 const intptr_t kJumpTableMinExpressions = 16;
5931 // This limit comes from IndirectGotoInstr.
5932 // Realistically, the current limit should never be hit by any code.
5933 const intptr_t kJumpTableMaxSize = kMaxInt32;
5934 // Sometimes the switch expressions don't cover a contiguous range.
5935 // If the ratio of holes to expressions is too great we fall back to a
5936 // binary search to avoid code size explosion.
5937 const double kJumpTableMaxHolesRatio = 1.0;
5938
5939 if (!is_optimizable() || expressions().is_empty()) {
5940 // The switch is not optimizable, so we can only use linear scan.
5942 }
5943
5944 if (!CompilerState::Current().is_aot()) {
5945 // JIT mode supports hot-reload, which currently prevents us from
5946 // enabling optimized switches.
5948 }
5949
5950 if (FLAG_force_switch_dispatch_type == kSwitchDispatchLinearScan) {
5952 }
5953
5954 PrepareForOptimizedSwitch();
5955
5956 if (!is_optimizable()) {
5957 // While preparing for an optimized switch we might have discovered that
5958 // the switch is not optimizable after all.
5960 }
5961
5962 if (FLAG_force_switch_dispatch_type == kSwitchDispatchBinarySearch) {
5964 }
5965
5966 const int64_t range = ExpressionRange();
5967 if (range > kJumpTableMaxSize) {
5969 }
5970
5971 const intptr_t num_expressions = expressions().length();
5972 ASSERT(num_expressions <= range);
5973
5974 const intptr_t max_holes = num_expressions * kJumpTableMaxHolesRatio;
5975 const int64_t holes = range - num_expressions;
5976
5977 if (FLAG_force_switch_dispatch_type != kSwitchDispatchJumpTable) {
5978 if (num_expressions < kJumpTableMinExpressions) {
5980 }
5981
5982 if (holes > max_holes) {
5984 }
5985 }
5986
5987 // After this point we will use a jump table.
5988
5989 // In the general case, bounds checks are required before a jump table
5990 // to handle all possible integer values.
5991 // For enums, the set of possible index values is known and much smaller
5992 // than the set of all possible integer values. A jump table that covers
5993 // either or both bounds of the range of index values requires only one or
5994 // no bounds checks.
5995 // If the expressions of an enum switch don't cover the full range of
5996 // values we can try to extend the jump table to cover the full range, but
5997 // not beyond kJumpTableMaxHolesRatio.
5998 // The count of enum values is not available when the flow graph is
5999 // constructed. The lower bound is always 0 so eliminating the lower
6000 // bound check is still possible by extending expression_min to 0.
6001 //
6002 // In the case of an integer switch we try to extend expression_min to 0
6003 // for a different reason.
6004 // If the range starts at zero it directly maps to the jump table
6005 // and we don't need to adjust the switch variable before the
6006 // jump table.
6007 if (expression_min().AsInt64Value() > 0) {
6008 const intptr_t holes_budget = Utils::Minimum(
6009 // Holes still available.
6010 max_holes - holes,
6011 // Entries left in the jump table.
6012 kJumpTableMaxSize - range);
6013
6014 const int64_t required_holes = expression_min().AsInt64Value();
6015 if (required_holes <= holes_budget) {
6016 expression_min_ = &Object::smi_zero();
6017 }
6018 }
6019
6021}
6022
6023void SwitchHelper::PrepareForOptimizedSwitch() {
6024 // Find the min and max of integer representations of expressions.
6025 // We also populate SwitchExpressions.integer for later use.
6026 const Field* enum_index_field = nullptr;
6027 for (intptr_t i = 0; i < expressions_.length(); ++i) {
6028 SwitchExpression& expression = expressions_[i];
6029 sorted_expressions_.Add(&expression);
6030
6031 const Instance& value = expression.value();
6032 const Integer* integer = nullptr;
6033 if (is_enum_switch()) {
6034 if (enum_index_field == nullptr) {
6035 enum_index_field =
6036 &Field::Handle(zone_, IG->object_store()->enum_index_field());
6037 }
6038 integer = &Integer::ZoneHandle(
6039 zone_, Integer::RawCast(value.GetField(*enum_index_field)));
6040 } else {
6041 integer = &Integer::Cast(value);
6042 }
6043 expression.set_integer(*integer);
6044 if (i == 0) {
6045 expression_min_ = integer;
6046 expression_max_ = integer;
6047 } else {
6048 if (expression_min_->CompareWith(*integer) > 0) {
6049 expression_min_ = integer;
6050 }
6051 if (expression_max_->CompareWith(*integer) < 0) {
6052 expression_max_ = integer;
6053 }
6054 }
6055 }
6056
6057 // Sort expressions by their integer value.
6058 sorted_expressions_.Sort(
6059 [](SwitchExpression* const* a, SwitchExpression* const* b) {
6060 return (*a)->integer().CompareWith((*b)->integer());
6061 });
6062
6063 // Check that there are no duplicate case expressions.
6064 // Duplicate expressions are allowed in switch statements, but
6065 // optimized switches don't implemented them.
6066 for (intptr_t i = 0; i < sorted_expressions_.length() - 1; ++i) {
6067 const SwitchExpression& a = *sorted_expressions_.At(i);
6068 const SwitchExpression& b = *sorted_expressions_.At(i + 1);
6069 if (a.integer().Equals(b.integer())) {
6070 is_optimizable_ = false;
6071 break;
6072 }
6073 }
6074}
6075
6076void SwitchHelper::AddExpression(intptr_t case_index,
6077 TokenPosition position,
6078 const Instance& value) {
6079 case_expression_counts_[case_index]++;
6080
6081 expressions_.Add(SwitchExpression(case_index, position, value));
6082
6083 if (is_optimizable_) {
6084 // Check the type of the case expression for use in an optimized switch.
6085 if (!value.IsInstanceOf(expression_type_, Object::null_type_arguments(),
6086 Object::null_type_arguments())) {
6087 is_optimizable_ = false;
6088 }
6089 }
6090}
6091
6092} // namespace kernel
6093
6094} // namespace dart
static int step(int x, SkScalar min, SkScalar max)
Definition BlurTest.cpp:215
static void done(const char *config, const char *src, const char *srcOptions, const char *name)
Definition DM.cpp:263
static bool match(const char *needle, const char *haystack)
Definition DM.cpp:1132
static void info(const char *fmt,...) SK_PRINTF_LIKE(1
Definition DM.cpp:213
SkPoint pos
static bool equal(const SkBitmap &a, const SkBitmap &b)
static void is_empty(skiatest::Reporter *reporter, const SkPath &p)
static float next(float f)
#define check(reporter, ref, unref, make, kill)
void check_bounds(skiatest::Reporter *reporter, const SkPath &path)
static uint32_t hash(const SkShaderBase::GradientInfo &v)
static bool left(const SkPoint &p0, const SkPoint &p1)
static bool right(const SkPoint &p0, const SkPoint &p1)
SI void store(P *ptr, const T &val)
static size_t element_size(Layout layout, SkSLType type)
#define IG
#define UNREACHABLE()
Definition assert.h:248
#define ASSERT_EQUAL(expected, actual)
Definition assert.h:309
#define RELEASE_ASSERT(cond)
Definition assert.h:327
#define ASSERT_NOTNULL(ptr)
Definition assert.h:323
#define Z
#define CLASS_LIST_TYPED_DATA(V)
Definition class_id.h:137
#define DART_CLASS_LIST_TYPED_DATA(V)
Definition class_id.h:177
virtual bool HasTypeClass() const
Definition object.h:9063
bool IsSmiType() const
Definition object.h:9226
Nullability nullability() const
Definition object.h:9037
virtual ClassPtr type_class() const
Definition object.cc:21083
bool IsIntType() const
Definition object.cc:21472
static ArrayPtr NewBoxed(intptr_t type_args_len, intptr_t num_arguments, const Array &optional_arguments_names, Heap::Space space=Heap::kOld)
Definition dart_entry.h:83
static ArrayPtr New(intptr_t len, Heap::Space space=Heap::kNew)
Definition object.h:10933
void FillWith(const T &value, intptr_t start, intptr_t length)
static const Bool & False()
Definition object.h:10778
static const Bool & True()
Definition object.h:10776
@ kDeeplyImmutableAttachNativeFinalizer
Definition il.h:10862
static AbstractTypePtr FinalizeType(const AbstractType &type, FinalizationKind finalization=kCanonicalize)
UntaggedClosureData::PackedInstantiationMode PackedInstantiationMode
Definition object.h:4302
static CompileType Dynamic()
intptr_t GetNextDeoptId()
const Function & TypedListGetFloat32()
static CompilerState & Current()
const Function & TypedListSetFloat32()
static constexpr intptr_t kNoOSRDeoptId
Definition compiler.h:73
static constexpr intptr_t kNone
Definition deopt_id.h:27
static intptr_t InputCountForMarshaller(const compiler::ffi::CallMarshaller &marshaller)
Definition il.h:6097
static StringPtr GetterSymbol(const String &field_name)
Definition object.cc:11847
static bool IsGetterName(const String &function_name)
Definition object.cc:11882
static StringPtr NameFromGetter(const String &getter_name)
Definition object.cc:11867
static bool SupportsUnboxedDoubles()
static bool SupportsUnboxedSimd128()
static constexpr CompilationMode CompilationModeFrom(bool is_optimizing)
Definition flow_graph.h:587
UntaggedFunctionType::PackedNumOptionalParameters PackedNumOptionalParameters
Definition object.h:9506
UntaggedFunctionType::PackedNumFixedParameters PackedNumFixedParameters
Definition object.h:9504
UntaggedFunctionType::PackedHasNamedOptionalParameters PackedHasNamedOptionalParameters
Definition object.h:9500
static bool IsDynamicInvocationForwarderName(const String &name)
Definition object.cc:4240
static StringPtr DemangleDynamicInvocationForwarderName(const String &name)
Definition object.cc:4248
bool IsClosureFunction() const
Definition object.h:3871
bool IsFactory() const
Definition object.h:3347
KernelProgramInfoPtr KernelProgramInfo() const
Definition object.cc:10977
intptr_t NumParameters() const
Definition object.cc:8935
const char * ToLibNamePrefixedQualifiedCString() const
Definition object.cc:9827
static bool UseUnboxedRepresentation()
Definition il.h:10810
void RelinkToOsrEntry(Zone *zone, intptr_t max_block_id)
Definition il.cc:1740
void AddCatchEntry(CatchBlockEntryInstr *entry)
Definition il.h:1951
FunctionEntryInstr * normal_entry() const
Definition il.h:1986
void set_normal_entry(FunctionEntryInstr *entry)
Definition il.h:1988
@ kNew
Definition heap.h:38
@ kOld
Definition heap.h:39
ObjectPtr GetField(const Field &field) const
Definition object.cc:20516
static intptr_t ElementSizeFor(intptr_t cid)
Definition object.cc:21008
virtual TypeArgumentsPtr GetTypeArguments() const
Definition object.cc:20611
@ kNotSpeculative
Definition il.h:969
virtual Representation representation() const
Definition il.h:1254
virtual int CompareWith(const Integer &other) const
Definition object.cc:23155
virtual int64_t AsInt64Value() const
Definition object.cc:23137
static int EncodeType(Level level, Kind kind)
ClassTable * class_table() const
Definition isolate.h:491
static LeafRuntimeCallInstr * Make(Zone *zone, Representation return_representation, const ZoneGrowableArray< Representation > &argument_representations, InputsArray &&inputs)
Definition il.cc:8026
static ClassPtr LookupCoreClass(const String &class_name)
Definition object.cc:14736
static const String & PrivateCoreLibName(const String &member)
Definition object.cc:14721
static LibraryPtr InternalLibrary()
Definition object.cc:14850
static Representation ReturnRepresentation(intptr_t array_cid)
Definition il.cc:6874
int num_context_variables() const
Definition scopes.h:361
bool is_captured() const
Definition scopes.h:143
const AbstractType & static_type() const
Definition scopes.h:134
const String & name() const
Definition scopes.h:119
static bool IsMarkedAsRecognized(const Function &function, const char *kind=nullptr)
static intptr_t MethodKindToReceiverCid(Kind kind)
static void static void PrintErr(const char *format,...) PRINTF_ATTRIBUTE(1
static ObjectPtr null()
Definition object.h:433
ObjectPtr ptr() const
Definition object.h:332
virtual const char * ToCString() const
Definition object.h:366
bool IsNull() const
Definition object.h:363
static Object & Handle()
Definition object.h:407
static ObjectPtr RawCast(ObjectPtr obj)
Definition object.h:325
static Object & ZoneHandle()
Definition object.h:419
ClassPtr clazz() const
Definition object.h:13192
bool has_entry_points_temp_var() const
Definition parser.h:171
LocalVariable * expression_temp_var() const
Definition parser.h:151
const Function & function() const
Definition parser.h:73
LocalVariable * RawTypeArgumentsVariable() const
Definition parser.h:223
LocalScope * scope() const
Definition parser.h:76
const Function * forwarding_stub_super_target() const
Definition parser.h:204
bool has_receiver_var() const
Definition parser.h:142
LocalVariable * entry_points_temp_var() const
Definition parser.h:163
bool has_arg_desc_var() const
Definition parser.h:130
bool is_forwarding_stub() const
Definition parser.h:201
DynamicClosureCallVars * dynamic_closure_call_vars() const
Definition parser.h:289
LocalVariable * ParameterVariable(intptr_t i) const
Definition parser.h:239
LocalVariable * current_context_var() const
Definition parser.h:128
LocalVariable * RawParameterVariable(intptr_t i) const
Definition parser.h:235
LocalVariable * receiver_var() const
Definition parser.h:133
LocalVariable * function_type_arguments() const
Definition parser.h:88
static RangeBoundary FromConstant(int64_t val)
static intptr_t GetPositionalFieldIndexFromFieldName(const String &field_name)
Definition object.cc:27929
static DART_NORETURN void LongJump(const Error &error)
Definition report.cc:86
static FunctionPtr ResolveDynamicForReceiverClass(const Class &receiver_class, const String &function_name, const ArgumentsDescriptor &args_desc, bool allow_add=true)
Definition resolver.cc:160
static const Slot & GetContextVariableSlotFor(Thread *thread, const LocalVariable &var)
Definition slot.cc:292
static const Slot & GetRecordFieldSlot(Thread *thread, intptr_t offset_in_bytes)
Definition slot.cc:324
static const Slot & GetLengthFieldForArrayCid(intptr_t array_cid)
Definition slot.cc:249
static const Slot & GetTypeArgumentsSlotFor(Thread *thread, const Class &cls)
Definition slot.cc:276
static SmiPtr New(intptr_t value)
Definition object.h:9985
static Representation ValueRepresentation(intptr_t array_cid)
Definition il.cc:6927
static const String & LAngleBracket()
Definition symbols.h:622
static const String & RAngleBracket()
Definition symbols.h:625
static StringPtr FromConcatAll(Thread *thread, const GrowableHandlePtrArray< const String > &strs)
Definition symbols.cc:262
static const String & Empty()
Definition symbols.h:687
static StringPtr New(Thread *thread, const char *cstr)
Definition symbols.h:722
static const String & Dot()
Definition symbols.h:612
static bool double_truncate_round_supported()
Definition cpu_arm.h:72
static Thread * Current()
Definition thread.h:361
CompilerState & compiler_state()
Definition thread.h:583
IsolateGroup * isolate_group() const
Definition thread.h:540
static TokenPosition Synthetic(intptr_t value)
static const TokenPosition kMinSource
static constexpr intptr_t kFlagsPerSmiShift
Definition object.h:8496
static constexpr intptr_t kFlagsPerSmiMask
Definition object.h:8502
static TypePtr New(const Class &clazz, const TypeArguments &arguments, Nullability nullability=Nullability::kLegacy, Heap::Space space=Heap::kOld)
Definition object.cc:22492
static TypePtr DynamicType()
Definition object.cc:21866
static UnboxInstr * Create(Representation to, Value *value, intptr_t deopt_id, SpeculativeMode speculative_mode=kGuardInputs)
Definition il.cc:4045
BitField< decltype(packed_type_parameter_counts_), uint8_t, PackedNumParentTypeArguments::kNextBit, 8 > PackedNumTypeParameters
BitField< decltype(packed_type_parameter_counts_), uint8_t, 0, 8 > PackedNumParentTypeArguments
static constexpr int ShiftForPowerOfTwo(T x)
Definition utils.h:66
static T Minimum(T x, T y)
Definition utils.h:21
static constexpr T RoundUp(T x, uintptr_t alignment, uintptr_t offset=0)
Definition utils.h:105
static CallMarshaller * FromFunction(Zone *zone, const Function &function, intptr_t function_params_start_at, const FunctionType &c_signature, const char **error)
Definition marshaller.cc:90
static CallbackMarshaller * FromFunction(Zone *zone, const Function &function, const char **error)
Fragment TestDelayedTypeArgs(LocalVariable *closure, Fragment present, Fragment absent)
Fragment LoadLocal(LocalVariable *variable)
Fragment StoreNativeField(TokenPosition position, const Slot &slot, InnerPointerAccess stores_inner_pointer, StoreFieldInstr::Kind kind=StoreFieldInstr::Kind::kOther, StoreBarrierType emit_store_barrier=kEmitStoreBarrier, compiler::Assembler::MemoryOrder memory_order=compiler::Assembler::kRelaxedNonAtomic)
Fragment ThrowException(TokenPosition position)
Fragment TestAnyTypeArgs(Fragment present, Fragment absent)
Fragment DebugStepCheck(TokenPosition position)
Fragment CalculateElementAddress(intptr_t index_scale)
Fragment LoadFpRelativeSlot(intptr_t offset, CompileType result_type, Representation representation=kTagged)
Fragment InvokeMathCFunction(MethodRecognizer::Kind recognized_kind, intptr_t num_inputs)
Fragment StoreField(const Field &field, StoreFieldInstr::Kind kind=StoreFieldInstr::Kind::kOther, StoreBarrierType emit_store_barrier=kEmitStoreBarrier)
Fragment CheckNotDeeplyImmutable(CheckWritableInstr::Kind kind)
Fragment AllocateTypedData(TokenPosition position, classid_t class_id)
Fragment MemoryCopy(classid_t src_cid, classid_t dest_cid, bool unboxed_inputs, bool can_overlap=true)
Fragment InstantiateTypeArguments(const TypeArguments &type_arguments)
Fragment StoreStaticField(TokenPosition position, const Field &field)
Fragment StoreIndexedTypedData(classid_t class_id, intptr_t index_scale, bool index_unboxed, AlignmentType alignment=kAlignedAccess)
Fragment AssertBool(TokenPosition position)
Fragment AssertAssignable(TokenPosition position, const String &dst_name, AssertAssignableInstr::Kind kind=AssertAssignableInstr::kUnknown)
Fragment StoreLocal(LocalVariable *variable)
Fragment LoadField(const Field &field, bool calls_initializer)
Fragment DropTempsPreserveTop(intptr_t num_temps_to_drop)
Fragment ClosureCall(const Function &target_function, TokenPosition position, intptr_t type_args_len, intptr_t argument_count, const Array &argument_names, const InferredTypeMetadata *result_type=nullptr)
FunctionEntryInstr * BuildFunctionEntry(GraphEntryInstr *graph_entry)
Fragment LoadNativeField(const Slot &native_field, InnerPointerAccess loads_inner_pointer, bool calls_initializer=false)
Fragment StoreFieldGuarded(const Field &field, StoreFieldInstr::Kind kind=StoreFieldInstr::Kind::kOther)
Fragment LoadStaticField(const Field &field, bool calls_initializer)
Fragment BranchIfTrue(TargetEntryInstr **then_entry, TargetEntryInstr **otherwise_entry, bool negate=false)
Fragment BranchIfEqual(TargetEntryInstr **then_entry, TargetEntryInstr **otherwise_entry, bool negate=false)
Fragment UnboxedIntConstant(int64_t value, Representation representation)
Fragment RedefinitionWithType(const AbstractType &type)
Fragment LoadIndexed(classid_t class_id, intptr_t index_scale=compiler::target::kWordSize, bool index_unboxed=false, AlignmentType alignment=kAlignedAccess)
Fragment Return(TokenPosition position)
LocalVariable * MakeTemporary(const char *suffix=nullptr)
Fragment BinaryIntegerOp(Token::Kind op, Representation representation, bool is_truncating=false)
Fragment AllocateClosure(TokenPosition position, bool has_instantiator_type_args, bool is_generic, bool is_tear_off)
Fragment StrictCompare(TokenPosition position, Token::Kind kind, bool number_check=false)
Fragment AllocateObject(TokenPosition position, const Class &klass, intptr_t argument_count)
Fragment StoreIndexed(classid_t class_id)
Fragment CheckNullOptimized(const String &name, CheckNullInstr::ExceptionType exception_type, TokenPosition position=TokenPosition::kNoSource)
Fragment SmiBinaryOp(Token::Kind op, bool is_truncating=false)
Fragment DoubleToInteger(MethodRecognizer::Kind recognized_kind)
Fragment BranchIfNull(TargetEntryInstr **then_entry, TargetEntryInstr **otherwise_entry, bool negate=false)
Fragment DropTemporary(LocalVariable **temp)
Fragment CheckStackOverflowInPrologue(TokenPosition position)
Fragment Goto(JoinEntryInstr *destination)
Fragment AllocateContext(const ZoneGrowableArray< const Slot * > &scope)
Fragment BranchIfStrictEqual(TargetEntryInstr **then_entry, TargetEntryInstr **otherwise_entry)
FlowGraphBuilder(ParsedFunction *parsed_function, ZoneGrowableArray< const ICData * > *ic_data_array, ZoneGrowableArray< intptr_t > *context_level_array, InlineExitCollector *exit_collector, bool optimizing, intptr_t osr_id, intptr_t first_block_id=1, bool inlining_unchecked_entry=false)
static bool IsExpressionTempVarUsedInRecognizedMethodFlowGraph(const Function &function)
static bool IsRecognizedMethodForFlowGraph(const Function &function)
void Prepend(Instruction *start)
IntMap< LocalScope * > scopes
IntMap< LocalVariable * > locals
const Instance & value() const
void set_integer(const Integer &integer)
const Integer & expression_max() const
const AbstractType & expression_type() const
SwitchHelper(Zone *zone, TokenPosition position, bool is_exhaustive, const AbstractType &expression_type, SwitchBlock *switch_block, intptr_t case_count)
void AddExpression(intptr_t case_index, TokenPosition position, const Instance &value)
const GrowableArray< SwitchExpression > & expressions() const
int64_t ExpressionRange() const
intptr_t case_count() const
SwitchDispatch SelectDispatchStrategy()
const Integer & expression_min() const
const TokenPosition & position() const
static Editor::Movement convert(skui::Key key)
#define ASSERT(E)
VkInstance instance
Definition main.cc:48
static bool b
struct MyStruct a[10]
#define FATAL(error)
AtkStateType state
G_BEGIN_DECLS G_MODULE_EXPORT FlValue * args
const uint8_t uint32_t uint32_t GError ** error
uint8_t value
GAsyncResult * result
uint32_t * target
#define DEFINE_FLAG(type, name, default_value, comment)
Definition flags.h:16
Dart_NativeFunction function
Definition fuchsia.cc:51
int argument_count
Definition fuchsia.cc:52
static float max(float r, float g, float b)
Definition hsl.cpp:49
static float min(float r, float g, float b)
Definition hsl.cpp:48
#define CASE(Arity, Mask, Name, Args, Result)
#define LOAD_NATIVE_FIELD(V)
#define STORE_NATIVE_FIELD_NO_BARRIER(V)
#define STORE_NATIVE_FIELD(V)
#define IL_BODY(method, slot)
#define TYPED_DATA_GET_INDEXED_CASES(clazz)
size_t length
AlignmentType RecognizedMethodAlignment(MethodRecognizer::Kind kind)
classid_t RecognizedMethodTypeArgCid(MethodRecognizer::Kind kind)
Abi TargetAbi()
Definition abi.cc:88
const intptr_t kResultIndex
Definition marshaller.h:28
classid_t ElementExternalTypedDataCid(classid_t class_id)
classid_t ElementTypedDataCid(classid_t class_id)
const Class & GrowableObjectArrayClass()
static const Function & TypedListSetNativeFunction(Thread *thread, classid_t cid)
static classid_t TypedDataCidUnboxed(Representation unboxed_representation)
@ kSwitchDispatchLinearScan
@ kSwitchDispatchBinarySearch
static classid_t external_typed_data_cid(intptr_t chunk_size)
static classid_t typed_data_cid(intptr_t chunk_size)
const Function & TypedListGetNativeFunction(Thread *thread, classid_t cid)
static intptr_t chunk_size(intptr_t bytes_left)
static bool CanUnboxElements(classid_t cid)
constexpr int64_t kMaxInt64
Definition globals.h:486
static const char *const names[]
Definition symbols.cc:24
const char *const name
bool IsTypedDataBaseClassId(intptr_t index)
Definition class_id.h:429
static constexpr Representation kUnboxedUword
Definition locations.h:171
int32_t classid_t
Definition globals.h:524
@ kNoStoreBarrier
Definition il.h:6252
@ kEmitStoreBarrier
Definition il.h:6252
@ kByteDataViewCid
Definition class_id.h:244
@ kDynamicCid
Definition class_id.h:253
@ kUnmodifiableByteDataViewCid
Definition class_id.h:245
Representation
Definition locations.h:66
constexpr intptr_t kBitsPerByte
Definition globals.h:463
GrowableArray< Value * > InputsArray
Definition il.h:895
bool IsZero(char *begin, char *end)
const intptr_t cid
static constexpr Representation kUnboxedAddress
Definition locations.h:182
constexpr int32_t kMaxInt32
Definition globals.h:483
intptr_t FfiResolveInternal(const String &asset, const String &symbol, uintptr_t args_n, char **error)
constexpr intptr_t kWordSize
Definition globals.h:509
static constexpr Representation kUnboxedIntPtr
Definition locations.h:176
@ kFunctions
Definition object.h:2231
@ kCurrentClass
Definition object.h:2230
static constexpr Representation kUnboxedWord
Definition locations.h:164
static constexpr intptr_t kInvalidTryIndex
bool IsExternalTypedDataClassId(intptr_t index)
Definition class_id.h:447
AlignmentType
Definition il.h:6720
@ kUnalignedAccess
Definition il.h:6721
call(args)
Definition dom.py:159
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive mode
Definition switches.h:228
it will be possible to load the file into Perfetto s trace viewer disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive keep the shell running after the Dart script has completed enable serial On low power devices with low core running concurrent GC tasks on threads can cause them to contend with the UI thread which could potentially lead to jank This option turns off all concurrent GC activities domain network JSON encoded network policy per domain This overrides the DisallowInsecureConnections switch Embedder can specify whether to allow or disallow insecure connections at a domain level old gen heap size
Definition switches.h:259
std::function< void()> closure
Definition closure.h:14
SINT Vec< 2 *N, T > join(const Vec< N, T > &lo, const Vec< N, T > &hi)
Definition SkVx.h:242
Point offset
Definition SkMD5.cpp:130
static constexpr size_t ValueSize(Representation rep)
Definition locations.h:112
static Representation RepresentationOfArrayElement(classid_t cid)
Definition locations.cc:79
ParsedFunction::DynamicClosureCallVars *const vars
ClosureCallInfo(LocalVariable *closure, JoinEntryInstr *throw_no_such_method, const Array &arguments_descriptor_array, ParsedFunction::DynamicClosureCallVars *const vars)